Merge "Add end effector to superstructure"
diff --git a/aos/configuration.cc b/aos/configuration.cc
index 1d5e60a..ad0a489 100644
--- a/aos/configuration.cc
+++ b/aos/configuration.cc
@@ -1355,8 +1355,10 @@
     case LoggerConfig::LOCAL_LOGGER:
       return channel->source_node()->string_view() == node_name;
     case LoggerConfig::LOCAL_AND_REMOTE_LOGGER:
-      CHECK(channel->has_logger_nodes());
-      CHECK_GT(channel->logger_nodes()->size(), 0u);
+      CHECK(channel->has_logger_nodes())
+          << "Missing logger nodes on " << StrippedChannelToString(channel);
+      CHECK_GT(channel->logger_nodes()->size(), 0u)
+          << "Missing logger nodes on " << StrippedChannelToString(channel);
 
       if (channel->source_node()->string_view() == node_name) {
         return true;
@@ -1364,8 +1366,10 @@
 
       [[fallthrough]];
     case LoggerConfig::REMOTE_LOGGER:
-      CHECK(channel->has_logger_nodes());
-      CHECK_GT(channel->logger_nodes()->size(), 0u);
+      CHECK(channel->has_logger_nodes())
+          << "Missing logger nodes on " << StrippedChannelToString(channel);
+      CHECK_GT(channel->logger_nodes()->size(), 0u)
+          << "Missing logger nodes on " << StrippedChannelToString(channel);
       for (const flatbuffers::String *logger_node : *channel->logger_nodes()) {
         if (logger_node->string_view() == node_name) {
           return true;
diff --git a/frc971/analysis/foxglove.md b/frc971/analysis/foxglove.md
new file mode 100644
index 0000000..8276584
--- /dev/null
+++ b/frc971/analysis/foxglove.md
@@ -0,0 +1,36 @@
+We have some support for using [Foxglove Studio](https://studio.foxglove.dev)
+for visualizing robot data.
+
+# Accessing Foxglove
+
+You have three main options for using foxglove studio:
+1. Go to https://studio.foxglove.dev and use the most up-to-date studio. This
+   is convenient; it won't work when you do not have Internet access, and
+   has some limitations when it comes to accessing unsecured websockets.
+2. Download the foxglove desktop application.
+3. Run our local copy by running `bazel run //frc971/analysis:local_foxglove`
+   This will work offline, and serves foxglove at http://localhost:8000 by
+   default.
+
+# Log Visualization
+
+If looking at data from a log, you will first need to convert one of our AOS
+logs to MCAP so that it can be viewed in foxglove. In order to do so,
+run `bazel run -c opt //aos/util:log_to_mcap -- /path/to/log --output_path /tmp/log.mcap`.
+This will create an MCAP file at the specified path, which you can then open
+in any of the various foxglove options.
+
+# Live Visualization
+
+On the pis, we run a `foxglove_websocket` application by default. This exposes
+a websocket on the 8765 port. How you connect to this varies depending on
+what method you are using to create a foxglove instance.
+
+If using https://studio.foxglove.dev, you cannot directly access
+ws://10.9.71.10X:8765 due to security constraints. Instead, you will have to
+port forward by doing something like `ssh -L 8765:localhost:8765 pi@10.9.71.101`
+to expose the port locally, and then use the `ws://localhost:8765` websocket.
+
+If using the local foxglove, you can just use the pi IP address directly.
+
+I have not tried using the desktop Foxglove application for this.
diff --git a/frc971/vision/calibration_accumulator.cc b/frc971/vision/calibration_accumulator.cc
index 711ed7d..f01af3c 100644
--- a/frc971/vision/calibration_accumulator.cc
+++ b/frc971/vision/calibration_accumulator.cc
@@ -8,11 +8,11 @@
 #include "aos/events/simulated_event_loop.h"
 #include "aos/network/team_number.h"
 #include "aos/time/time.h"
+#include "external/com_github_foxglove_schemas/CompressedImage_schema.h"
+#include "external/com_github_foxglove_schemas/ImageAnnotations_schema.h"
 #include "frc971/control_loops/quaternion_utils.h"
 #include "frc971/vision/charuco_lib.h"
 #include "frc971/wpilib/imu_batch_generated.h"
-#include "external/com_github_foxglove_schemas/ImageAnnotations_schema.h"
-#include "external/com_github_foxglove_schemas/CompressedImage_schema.h"
 
 DEFINE_bool(display_undistorted, false,
             "If true, display the undistorted image.");
@@ -116,10 +116,10 @@
 CalibrationFoxgloveVisualizer::CalibrationFoxgloveVisualizer(
     aos::EventLoop *event_loop)
     : event_loop_(event_loop),
-      image_converter_(event_loop_, "/camera", "/visualization",
+      image_converter_(event_loop_, "/camera", "/camera",
                        ImageCompression::kJpeg),
       annotations_sender_(
-          event_loop_->MakeSender<foxglove::ImageAnnotations>("/visualization")) {}
+          event_loop_->MakeSender<foxglove::ImageAnnotations>("/camera")) {}
 
 aos::FlatbufferDetachedBuffer<aos::Configuration>
 CalibrationFoxgloveVisualizer::AddVisualizationChannels(
diff --git a/frc971/vision/calibration_accumulator.h b/frc971/vision/calibration_accumulator.h
index 93d68a5..4b59406 100644
--- a/frc971/vision/calibration_accumulator.h
+++ b/frc971/vision/calibration_accumulator.h
@@ -88,8 +88,8 @@
   void HandleCharuco(const aos::monotonic_clock::time_point eof,
                      std::vector<std::vector<cv::Point2f>> charuco_corners) {
     auto builder = annotations_sender_.MakeBuilder();
-    builder.CheckOk(
-        builder.Send(BuildAnnotations(eof, charuco_corners, builder.fbb())));
+    builder.CheckOk(builder.Send(
+        BuildAnnotations(eof, charuco_corners, 2.0, builder.fbb())));
   }
 
  private:
diff --git a/frc971/vision/charuco_lib.cc b/frc971/vision/charuco_lib.cc
index dbff4db..2f9e81a 100644
--- a/frc971/vision/charuco_lib.cc
+++ b/frc971/vision/charuco_lib.cc
@@ -457,14 +457,15 @@
 
 flatbuffers::Offset<foxglove::ImageAnnotations> BuildAnnotations(
     const aos::monotonic_clock::time_point monotonic_now,
-    const std::vector<std::vector<cv::Point2f>> &corners,
+    const std::vector<std::vector<cv::Point2f>> &corners, double thickness,
     flatbuffers::FlatBufferBuilder *fbb) {
   std::vector<flatbuffers::Offset<foxglove::PointsAnnotation>> rectangles;
   const struct timespec now_t = aos::time::to_timespec(monotonic_now);
   foxglove::Time time{static_cast<uint32_t>(now_t.tv_sec),
                       static_cast<uint32_t>(now_t.tv_nsec)};
+  // Draw the points in pink
   const flatbuffers::Offset<foxglove::Color> color_offset =
-      foxglove::CreateColor(*fbb, 0.0, 1.0, 0.0, 1.0);
+      foxglove::CreateColor(*fbb, 1.0, 0.75, 0.8, 1.0);
   for (const std::vector<cv::Point2f> &rectangle : corners) {
     std::vector<flatbuffers::Offset<foxglove::Point2>> points_offsets;
     for (const cv::Point2f &point : rectangle) {
@@ -482,7 +483,7 @@
     points_builder.add_points(points_offset);
     points_builder.add_outline_color(color_offset);
     points_builder.add_outline_colors(colors_offset);
-    points_builder.add_thickness(2.0);
+    points_builder.add_thickness(thickness);
     rectangles.push_back(points_builder.Finish());
   }
 
diff --git a/frc971/vision/charuco_lib.h b/frc971/vision/charuco_lib.h
index 40b1601..f1846b5 100644
--- a/frc971/vision/charuco_lib.h
+++ b/frc971/vision/charuco_lib.h
@@ -175,7 +175,7 @@
 // visualization purposes.
 flatbuffers::Offset<foxglove::ImageAnnotations> BuildAnnotations(
     const aos::monotonic_clock::time_point monotonic_now,
-    const std::vector<std::vector<cv::Point2f>> &corners,
+    const std::vector<std::vector<cv::Point2f>> &corners, double thickness,
     flatbuffers::FlatBufferBuilder *fbb);
 
 }  // namespace vision
diff --git a/frc971/vision/extrinsics_calibration.cc b/frc971/vision/extrinsics_calibration.cc
index d563fb0..daa371e 100644
--- a/frc971/vision/extrinsics_calibration.cc
+++ b/frc971/vision/extrinsics_calibration.cc
@@ -488,38 +488,6 @@
     vis_robot.SetCameraParameters(camera_mat);
     vis_robot.SetDistortionCoefficients(dist_coeffs);
 
-    /*
-    // Draw an initial visualization
-    Eigen::Vector3d T_world_imu_vec =
-        calibration_parameters.initial_state.block<3, 1>(0, 0);
-    Eigen::Translation3d T_world_imu(T_world_imu_vec);
-    Eigen::Affine3d H_world_imu =
-        T_world_imu * calibration_parameters.initial_orientation;
-
-    vis_robot.DrawFrameAxes(H_world_imu, "imu");
-
-    Eigen::Quaterniond R_imu_pivot(calibration_parameters.pivot_to_imu);
-    Eigen::Translation3d T_imu_pivot(
-        calibration_parameters.pivot_to_imu_translation);
-    Eigen::Affine3d H_imu_pivot = T_imu_pivot * R_imu_pivot;
-    Eigen::Affine3d H_world_pivot = H_world_imu * H_imu_pivot;
-    vis_robot.DrawFrameAxes(H_world_pivot, "pivot");
-
-    Eigen::Affine3d H_imupivot_camerapivot(
-        Eigen::AngleAxisd(1.5, Eigen::Vector3d::UnitZ()));
-    Eigen::Quaterniond R_camera_pivot(calibration_parameters.pivot_to_camera);
-    Eigen::Translation3d T_camera_pivot(
-        calibration_parameters.pivot_to_camera_translation);
-    Eigen::Affine3d H_camera_pivot = T_camera_pivot * R_camera_pivot;
-    Eigen::Affine3d H_world_camera = H_world_imu * H_imu_pivot *
-                                     H_imupivot_camerapivot *
-                                     H_camera_pivot.inverse();
-    vis_robot.DrawFrameAxes(H_world_camera, "camera");
-
-    cv::imshow("Original poses", image_mat);
-    cv::waitKey();
-    */
-
     uint current_state_index = 0;
     uint current_turret_index = 0;
     for (uint i = 0; i < camera_times_.size() - 1; i++) {
@@ -623,11 +591,6 @@
 
       cv::imshow("Live", image_mat);
       cv::waitKey(50);
-
-      if (i % 200 == 0) {
-        LOG(INFO) << "Pausing at step " << i;
-        cv::waitKey();
-      }
     }
     LOG(INFO) << "Finished visualizing robot.  Press any key to continue";
     cv::waitKey();
@@ -849,11 +812,11 @@
           trans_error_scale * filter.errorpz(i);
     }
 
-    LOG(INFO) << "Cost function calc took "
-              << chrono::duration<double>(aos::monotonic_clock::now() -
-                                          start_time)
-                     .count()
-              << " seconds";
+    VLOG(2) << "Cost function calc took "
+            << chrono::duration<double>(aos::monotonic_clock::now() -
+                                        start_time)
+                   .count()
+            << " seconds";
 
     return true;
   }
@@ -898,7 +861,7 @@
         calibration_parameters->accelerometer_bias.data());
   }
 
-  {
+  if (calibration_parameters->has_pivot) {
     // The turret's Z rotation is redundant with the camera's mounting z
     // rotation since it's along the rotation axis.
     ceres::CostFunction *turret_z_cost_function =
@@ -922,6 +885,17 @@
         calibration_parameters->pivot_to_imu_translation.data());
   }
 
+  {
+    // The board rotation in z is a bit arbitrary, so hoping to limit it to
+    // increase repeatability
+    ceres::CostFunction *board_z_cost_function =
+        new ceres::AutoDiffCostFunction<PenalizeQuaternionZ, 1, 4>(
+            new PenalizeQuaternionZ());
+    problem.AddResidualBlock(
+        board_z_cost_function, nullptr,
+        calibration_parameters->board_to_world.coeffs().data());
+  }
+
   problem.SetParameterization(
       calibration_parameters->initial_orientation.coeffs().data(),
       quaternion_local_parameterization);
@@ -952,9 +926,9 @@
   // Run the solver!
   ceres::Solver::Options options;
   options.minimizer_progress_to_stdout = true;
-  options.gradient_tolerance = 1e-12;
+  options.gradient_tolerance = 1e-6;
   options.function_tolerance = 1e-6;
-  options.parameter_tolerance = 1e-12;
+  options.parameter_tolerance = 1e-6;
   ceres::Solver::Summary summary;
   Solve(options, &problem, &summary);
   LOG(INFO) << summary.FullReport();
diff --git a/frc971/vision/target_map.json b/frc971/vision/target_map.json
index 3f6eb54..8d971f3 100644
--- a/frc971/vision/target_map.json
+++ b/frc971/vision/target_map.json
@@ -7,12 +7,11 @@
                 "y": -2.938,
                 "z": 0.463
             },
-            /* yaw of pi */
             "orientation": {
-                "w": 0.0,
-                "x": 0.0,
-                "y": 0.0,
-                "z": 1.0
+                "w": -0.5,
+                "x": 0.5,
+                "y": -0.5,
+                "z": 0.5
             }
         },
         {
@@ -22,12 +21,11 @@
                 "y": -1.262,
                 "z": 0.463
             },
-            /* yaw of pi */
             "orientation": {
-                "w": 0.0,
-                "x": 0.0,
-                "y": 0.0,
-                "z": 1.0
+                "w": -0.5,
+                "x": 0.5,
+                "y": -0.5,
+                "z": 0.5
             }
         },
         {
@@ -37,12 +35,11 @@
                 "y": 0.414,
                 "z": 0.463
             },
-            /* yaw of pi */
             "orientation": {
-                "w": 0.0,
-                "x": 0.0,
-                "y": 0.0,
-                "z": 1.0
+                "w": -0.5,
+                "x": 0.5,
+                "y": -0.5,
+                "z": 0.5
             }
         },
         {
@@ -52,12 +49,11 @@
                 "y": 2.740,
                 "z": 0.695
             },
-            /* yaw of pi */
             "orientation": {
-                "w": 0.0,
-                "x": 0.0,
-                "y": 0.0,
-                "z": 1.0
+                "w": -0.5,
+                "x": 0.5,
+                "y": -0.5,
+                "z": 0.5
             }
         },
         {
@@ -68,11 +64,10 @@
                 "z": 0.695
             },
             "orientation": {
-            /* yaw of 0 */
-                "w": 1.0,
-                "x": 0.0,
-                "y": 0.0,
-                "z": 0.0
+                "w": 0.5,
+                "x": -0.5,
+                "y": -0.5,
+                "z": 0.5
             }
         },
         {
@@ -82,12 +77,11 @@
                 "y": 0.414,
                 "z": 0.463
             },
-            /* yaw of 0 */
             "orientation": {
-                "w": 1.0,
-                "x": 0.0,
-                "y": 0.0,
-                "z": 0.0
+                "w": 0.5,
+                "x": -0.5,
+                "y": -0.5,
+                "z": 0.5
             }
         },
         {
@@ -97,12 +91,11 @@
                 "y": -1.262,
                 "z": 0.463
             },
-            /* yaw of 0 */
             "orientation": {
-                "w": 1.0,
-                "x": 0.0,
-                "y": 0.0,
-                "z": 0.0
+                "w": 0.5,
+                "x": -0.5,
+                "y": -0.5,
+                "z": 0.5
             }
         },
         {
@@ -112,12 +105,11 @@
                 "y": -2.938,
                 "z": 0.463
             },
-            /* yaw of 0 */
             "orientation": {
-                "w": 1.0,
-                "x": 0.0,
-                "y": 0.0,
-                "z": 0.0
+                "w": 0.5,
+                "x": -0.5,
+                "y": -0.5,
+                "z": 0.5
             }
         }
     ]
diff --git a/y2023/BUILD b/y2023/BUILD
index b9842aa..3824ba1 100644
--- a/y2023/BUILD
+++ b/y2023/BUILD
@@ -47,15 +47,18 @@
         "//aos/network:web_proxy_main",
         "//aos/events/logging:log_cat",
         "//y2023/rockpi:imu_main",
+        "//frc971/image_streamer:image_streamer",
     ],
     data = [
         ":aos_config",
         "//frc971/rockpi:rockpi_config.json",
         "//y2023/constants:constants.json",
+        "//y2023/vision:image_streamer_start",
         "//y2023/www:www_files",
     ],
     dirs = [
         "//y2023/www:www_files",
+        "//frc971/image_streamer/www:www_files",
     ],
     start_binaries = [
         "//aos/network:message_bridge_client",
@@ -107,7 +110,6 @@
             "//frc971/vision:calibration_fbs",
             "//frc971/vision:target_map_fbs",
             "//frc971/vision:vision_fbs",
-            "//y2023/vision:april_debug_fbs",
             "@com_github_foxglove_schemas//:schemas",
         ],
         target_compatible_with = ["@platforms//os:linux"],
diff --git a/y2023/constants/971.json b/y2023/constants/971.json
index f00cc1c..9ac8c8b 100644
--- a/y2023/constants/971.json
+++ b/y2023/constants/971.json
@@ -1,16 +1,17 @@
 {
   "cameras": [
     {
-      "calibration": {% include 'y2023/vision/calib_files/calibration_pi-971-1_cam-23-05_2013-01-18_09-38-22.915096335.json' %}
+      "calibration": {% include 'y2023/vision/calib_files/calibration_pi-971-1_cam-23-05_ext_2023-02-22.json' %}
     },
     {
-      "calibration": {% include 'y2023/vision/calib_files/calibration_pi-971-2_cam-23-06_2013-01-18_09-32-06.409252911.json' %}
+      "calibration": {% include 'y2023/vision/calib_files/calibration_pi-971-2_cam-23-06_ext_2023-02-22.json' %}
     },
     {
-      "calibration": {% include 'y2023/vision/calib_files/calibration_pi-971-3_cam-23-07_2013-01-18_09-11-56.768663953.json' %}
+      "calibration": {% include 'y2023/vision/calib_files/calibration_pi-971-3_cam-23-07_ext_2023-02-22.json' %}
     },
     {
-      "calibration": {% include 'y2023/vision/calib_files/calibration_pi-971-4_cam-23-08_2013-01-18_09-27-45.150551614.json' %}
+      "calibration": {% include 'y2023/vision/calib_files/calibration_pi-971-4_cam-23-08_ext_2023-02-22.json' %}
     }
-  ]
+  ],
+  "target_map": {% include 'y2023/vision/maps/target_map.json' %}
 }
diff --git a/y2023/constants/9971.json b/y2023/constants/9971.json
index beb81e1..6d5cbf9 100644
--- a/y2023/constants/9971.json
+++ b/y2023/constants/9971.json
@@ -12,5 +12,6 @@
     {
       "calibration": {% include 'y2023/vision/calib_files/calibration_pi-971-4_cam-23-08_2013-01-18_09-27-45.150551614.json' %}
     }
-  ]
+  ],
+  "target_map": {% include 'y2023/vision/maps/target_map.json' %}
 }
diff --git a/y2023/control_loops/python/graph_edit.py b/y2023/control_loops/python/graph_edit.py
index b9e29d0..689401a 100644
--- a/y2023/control_loops/python/graph_edit.py
+++ b/y2023/control_loops/python/graph_edit.py
@@ -162,6 +162,8 @@
         self.ax = self.fig.add_subplot(111)
         plt.show(block=False)
 
+        self.index = 0
+
     def do_key_press(self, event):
         pass
 
@@ -330,8 +332,7 @@
 
         set_color(cr, Color(0.0, 0.5, 1.0))
         for segment in self.segments:
-            color = [0, random.random(), 1]
-            random.shuffle(color)
+            color = [0.2, random.random(), random.random()]
             set_color(cr, Color(color[0], color[1], color[2]))
             segment.DrawTo(cr, self.theta_version)
             with px(cr):
@@ -411,13 +412,16 @@
         elif keyval == Gdk.KEY_r:
             self.prev_segment_pt = self.now_segment_pt
 
+        elif keyval == Gdk.KEY_o:
+            # Only prints current segment
+            print(repr(self.segments[self.index]))
         elif keyval == Gdk.KEY_p:
             # Print out the segments.
             print(repr(self.segments))
         elif keyval == Gdk.KEY_g:
             # Generate theta points.
             if self.segments:
-                print(repr(self.segments[0].ToThetaPoints()))
+                print(repr(self.segments[self.index].ToThetaPoints()))
         elif keyval == Gdk.KEY_e:
             best_pt = self.now_segment_pt
             best_dist = 1e10
@@ -432,6 +436,10 @@
                     best_dist = d
             self.now_segment_pt = best_pt
 
+        elif keyval == Gdk.KEY_k:
+            self.index += 1
+            self.index = self.index % len(self.segments)
+
         elif keyval == Gdk.KEY_t:
             # Toggle between theta and xy renderings
             if self.theta_version:
@@ -449,9 +457,9 @@
         elif keyval == Gdk.KEY_z:
             self.edit_control1 = not self.edit_control1
             if self.edit_control1:
-                self.now_segment_pt = self.segments[0].control1
+                self.now_segment_pt = self.segments[self.index].control1
             else:
-                self.now_segment_pt = self.segments[0].control2
+                self.now_segment_pt = self.segments[self.index].control2
             if not self.theta_version:
                 data = to_xy(self.now_segment_pt[0], self.now_segment_pt[1])
                 self.last_pos = (data[0], data[1])
@@ -474,9 +482,9 @@
         self.now_segment_pt = np.array(shift_angles(pt_theta))
 
         if self.edit_control1:
-            self.segments[0].control1 = self.now_segment_pt
+            self.segments[self.index].control1 = self.now_segment_pt
         else:
-            self.segments[0].control2 = self.now_segment_pt
+            self.segments[self.index].control2 = self.now_segment_pt
 
         print('Clicked at theta: %s' % (repr(self.now_segment_pt, )))
         if not self.theta_version:
@@ -485,9 +493,11 @@
                    self.circular_index_select))
 
         print('c1: np.array([%f, %f])' %
-              (self.segments[0].control1[0], self.segments[0].control1[1]))
+              (self.segments[self.index].control1[0],
+               self.segments[self.index].control1[1]))
         print('c2: np.array([%f, %f])' %
-              (self.segments[0].control2[0], self.segments[0].control2[1]))
+              (self.segments[self.index].control2[0],
+               self.segments[self.index].control2[1]))
 
         self.redraw()
 
diff --git a/y2023/rockpi/BUILD b/y2023/rockpi/BUILD
index 91e8729..701383b 100644
--- a/y2023/rockpi/BUILD
+++ b/y2023/rockpi/BUILD
@@ -7,5 +7,6 @@
         "//aos:init",
         "//aos/events:shm_event_loop",
         "//frc971/imu_reader:imu",
+        "//y2023:constants",
     ],
 )
diff --git a/y2023/rockpi/imu_main.cc b/y2023/rockpi/imu_main.cc
index ac0c141..d53b3fb 100644
--- a/y2023/rockpi/imu_main.cc
+++ b/y2023/rockpi/imu_main.cc
@@ -2,6 +2,7 @@
 #include "aos/init.h"
 #include "aos/realtime.h"
 #include "frc971/imu_reader/imu.h"
+#include "y2023/constants.h"
 
 DEFINE_string(config, "aos_config.json", "Path to the config file to use.");
 
@@ -12,8 +13,8 @@
       aos::configuration::ReadConfig(FLAGS_config);
 
   aos::ShmEventLoop event_loop(&config.message());
-  // TODO(austin): Set the ratio...
-  frc971::imu::Imu imu(&event_loop, 1.0);
+  frc971::imu::Imu imu(&event_loop,
+                       y2023::constants::Values::DrivetrainEncoderToMeters(1));
 
   event_loop.SetRuntimeAffinity(aos::MakeCpusetFromCpus({0}));
   event_loop.SetRuntimeRealtimePriority(55);
diff --git a/y2023/vision/BUILD b/y2023/vision/BUILD
index 839f11e..a0b0ce5 100644
--- a/y2023/vision/BUILD
+++ b/y2023/vision/BUILD
@@ -1,5 +1,3 @@
-load("@com_github_google_flatbuffers//:build_defs.bzl", "flatbuffer_cc_library")
-
 cc_binary(
     name = "camera_reader",
     srcs = [
@@ -33,7 +31,6 @@
         "//frc971/constants:constants_sender_lib",
         "//frc971/vision:vision_fbs",
         "//third_party:opencv",
-        "//y2023/vision:april_debug_fbs",
         "//y2023/vision:vision_util",
         "@com_google_absl//absl/strings",
     ],
@@ -75,14 +72,6 @@
     ],
 )
 
-flatbuffer_cc_library(
-    name = "april_debug_fbs",
-    srcs = ["april_debug.fbs"],
-    gen_reflections = 1,
-    target_compatible_with = ["@platforms//os:linux"],
-    visibility = ["//visibility:public"],
-)
-
 cc_library(
     name = "aprilrobotics_lib",
     srcs = [
@@ -92,7 +81,6 @@
     target_compatible_with = ["@platforms//os:linux"],
     visibility = ["//y2023:__subpackages__"],
     deps = [
-        ":april_debug_fbs",
         ":vision_util",
         "//aos:init",
         "//aos/events:shm_event_loop",
@@ -122,6 +110,12 @@
     ],
 )
 
+filegroup(
+    name = "image_streamer_start",
+    srcs = ["image_streamer_start.sh"],
+    visibility = ["//visibility:public"],
+)
+
 cc_binary(
     name = "foxglove_image_converter",
     srcs = ["foxglove_image_converter.cc"],
diff --git a/y2023/vision/april_debug.fbs b/y2023/vision/april_debug.fbs
deleted file mode 100644
index 4442448..0000000
--- a/y2023/vision/april_debug.fbs
+++ /dev/null
@@ -1,21 +0,0 @@
-namespace y2023.vision;
-
-// Stores image xy pixel coordinates of apriltag corners
-struct Point {
-     x:double (id: 0);
-     y:double (id: 1);
-}
-
-// Corner locations for each apriltag
-table AprilCorners {
-  id:uint64 (id: 0);
-  // Will always have 4 values, one for each corner
-  points: [Point] (id: 1);
-}
-
-// List of positions of all apriltags detected in current frame
-table AprilDebug {
-  corners: [AprilCorners] (id: 0);
-}
-
-root_type AprilDebug;
diff --git a/y2023/vision/aprilrobotics.cc b/y2023/vision/aprilrobotics.cc
index 07eee64..b132f85 100644
--- a/y2023/vision/aprilrobotics.cc
+++ b/y2023/vision/aprilrobotics.cc
@@ -27,8 +27,8 @@
           chrono::milliseconds(5)),
       target_map_sender_(
           event_loop->MakeSender<frc971::vision::TargetMap>("/camera")),
-      april_debug_sender_(
-          event_loop->MakeSender<y2023::vision::AprilDebug>("/camera")) {
+      image_annotations_sender_(
+          event_loop->MakeSender<foxglove::ImageAnnotations>("/camera")) {
   tag_family_ = tag16h5_create();
   tag_detector_ = apriltag_detector_create();
 
@@ -80,7 +80,7 @@
 void AprilRoboticsDetector::HandleImage(cv::Mat image_grayscale,
                                         aos::monotonic_clock::time_point eof) {
   std::vector<std::pair<apriltag_detection_t, apriltag_pose_t>> detections =
-      DetectTags(image_grayscale);
+      DetectTags(image_grayscale, eof);
 
   auto builder = target_map_sender_.MakeBuilder();
   std::vector<flatbuffers::Offset<frc971::vision::TargetPoseFbs>> target_poses;
@@ -136,7 +136,8 @@
 }
 
 std::vector<std::pair<apriltag_detection_t, apriltag_pose_t>>
-AprilRoboticsDetector::DetectTags(cv::Mat image) {
+AprilRoboticsDetector::DetectTags(cv::Mat image,
+                                  aos::monotonic_clock::time_point eof) {
   const aos::monotonic_clock::time_point start_time =
       aos::monotonic_clock::now();
 
@@ -153,9 +154,9 @@
 
   std::vector<std::pair<apriltag_detection_t, apriltag_pose_t>> results;
 
-  std::vector<flatbuffers::Offset<AprilCorners>> corners_vector;
+  std::vector<std::vector<cv::Point2f>> corners_vector;
 
-  auto builder = april_debug_sender_.MakeBuilder();
+  auto builder = image_annotations_sender_.MakeBuilder();
 
   for (int i = 0; i < zarray_size(detections); i++) {
     apriltag_detection_t *det;
@@ -195,33 +196,19 @@
                      .count()
               << " seconds for pose estimation";
 
-      std::vector<Point> corner_points;
-
+      std::vector<cv::Point2f> corner_points;
       corner_points.emplace_back(det->p[0][0], det->p[0][1]);
       corner_points.emplace_back(det->p[1][0], det->p[1][1]);
       corner_points.emplace_back(det->p[2][0], det->p[2][1]);
       corner_points.emplace_back(det->p[3][0], det->p[3][1]);
 
-      auto corner_points_fbs =
-          builder.fbb()->CreateVectorOfStructs(corner_points);
-
-      AprilCorners::Builder april_corners_builder =
-          builder.MakeBuilder<AprilCorners>();
-
-      april_corners_builder.add_id(det->id);
-      april_corners_builder.add_points(corner_points_fbs);
-
-      corners_vector.emplace_back(april_corners_builder.Finish());
+      corners_vector.emplace_back(corner_points);
     }
   }
 
-  auto corners_vector_fbs = builder.fbb()->CreateVector(corners_vector);
-
-  AprilDebug::Builder april_debug_builder = builder.MakeBuilder<AprilDebug>();
-
-  april_debug_builder.add_corners(corners_vector_fbs);
-
-  builder.CheckOk(builder.Send(april_debug_builder.Finish()));
+  const auto annotations_offset =
+      frc971::vision::BuildAnnotations(eof, corners_vector, 5.0, builder.fbb());
+  builder.CheckOk(builder.Send(annotations_offset));
 
   apriltag_detections_destroy(detections);
 
diff --git a/y2023/vision/aprilrobotics.h b/y2023/vision/aprilrobotics.h
index 4477856..bbc1661 100644
--- a/y2023/vision/aprilrobotics.h
+++ b/y2023/vision/aprilrobotics.h
@@ -17,7 +17,6 @@
 #include "third_party/apriltag/apriltag_pose.h"
 #include "third_party/apriltag/tag16h5.h"
 #include "y2023/constants/constants_generated.h"
-#include "y2023/vision/april_debug_generated.h"
 
 DECLARE_int32(team_number);
 
@@ -36,9 +35,9 @@
   void UndistortDetection(apriltag_detection_t *det) const;
 
   std::vector<std::pair<apriltag_detection_t, apriltag_pose_t>> DetectTags(
-      cv::Mat image);
+      cv::Mat image, aos::monotonic_clock::time_point eof);
 
-  const cv::Mat extrinsics() const { return extrinsics_; }
+  const std::optional<cv::Mat> extrinsics() const { return extrinsics_; }
   const cv::Mat intrinsics() const { return intrinsics_; }
   const cv::Mat dist_coeffs() const { return dist_coeffs_; }
 
@@ -57,14 +56,14 @@
   const frc971::vision::calibration::CameraCalibration *calibration_;
   cv::Mat intrinsics_;
   cv::Mat projection_matrix_;
-  cv::Mat extrinsics_;
+  std::optional<cv::Mat> extrinsics_;
   cv::Mat dist_coeffs_;
 
   aos::Ftrace ftrace_;
 
   frc971::vision::ImageCallback image_callback_;
   aos::Sender<frc971::vision::TargetMap> target_map_sender_;
-  aos::Sender<y2023::vision::AprilDebug> april_debug_sender_;
+  aos::Sender<foxglove::ImageAnnotations> image_annotations_sender_;
 };
 
 }  // namespace vision
diff --git a/y2023/vision/calib_files/calibration_pi-971-1_cam-23-01_2013-01-18_08-56-17.194305339.json b/y2023/vision/calib_files/calibration_pi-971-1_cam-23-01_2013-01-18_08-56-17.194305339.json
index 1f30579..c9d960a 100755
--- a/y2023/vision/calib_files/calibration_pi-971-1_cam-23-01_2013-01-18_08-56-17.194305339.json
+++ b/y2023/vision/calib_files/calibration_pi-971-1_cam-23-01_2013-01-18_08-56-17.194305339.json
@@ -19,24 +19,6 @@
   0.008949,
   -0.079813
  ],
- "fixed_extrinsics": [
-  1.0,
-  0.0,
-  0.0,
-  0.0,
-  0.0,
-  1.0,
-  0.0,
-  0.0,
-  0.0,
-  0.0,
-  1.0,
-  0.0,
-  0.0,
-  0.0,
-  0.0,
-  1.0
- ],
  "calibration_timestamp": 1358499377194305339,
  "camera_id": "23-01"
 }
diff --git a/y2023/vision/calib_files/calibration_pi-971-1_cam-23-05_ext_2023-02-22.json b/y2023/vision/calib_files/calibration_pi-971-1_cam-23-05_ext_2023-02-22.json
new file mode 100644
index 0000000..b21da40
--- /dev/null
+++ b/y2023/vision/calib_files/calibration_pi-971-1_cam-23-05_ext_2023-02-22.json
@@ -0,0 +1 @@
+{ "node_name": "pi1", "team_number": 971, "intrinsics": [ 890.980713, 0.0, 619.298645, 0.0, 890.668762, 364.009766, 0.0, 0.0, 1.0 ], "fixed_extrinsics": { "data": [ -0.487722, 0.222354, 0.844207, 0.025116, 0.864934, -0.008067, 0.501821, -0.246003, 0.118392, 0.974933, -0.188387, 0.532497, 0.0, 0.0, 0.0, 1.0 ] }, "dist_coeffs": [ -0.449172, 0.252318, 0.000881, -0.000615, -0.082208 ], "calibration_timestamp": 1358501902915096335, "camera_id": "23-05" }
\ No newline at end of file
diff --git a/y2023/vision/calib_files/calibration_pi-971-2_cam-23-06_ext_2023-02-22.json b/y2023/vision/calib_files/calibration_pi-971-2_cam-23-06_ext_2023-02-22.json
new file mode 100644
index 0000000..4759fda
--- /dev/null
+++ b/y2023/vision/calib_files/calibration_pi-971-2_cam-23-06_ext_2023-02-22.json
@@ -0,0 +1 @@
+{ "node_name": "pi2", "team_number": 971, "intrinsics": [ 893.242981, 0.0, 639.796692, 0.0, 892.498718, 354.109344, 0.0, 0.0, 1.0 ], "fixed_extrinsics": { "data": [ 0.852213, 0.227336, 0.471224, 0.220072, 0.485092, -0.005909, -0.874443, -0.175232, -0.196008, 0.973799, -0.115315, 0.61409, 0.0, 0.0, 0.0, 1.0 ] }, "dist_coeffs": [ -0.451751, 0.252422, 0.000531, 0.000079, -0.079369 ], "calibration_timestamp": 1358501526409252911, "camera_id": "23-06" }
\ No newline at end of file
diff --git a/y2023/vision/calib_files/calibration_pi-971-3_cam-23-07_ext_2023-02-22.json b/y2023/vision/calib_files/calibration_pi-971-3_cam-23-07_ext_2023-02-22.json
new file mode 100644
index 0000000..8c86d20
--- /dev/null
+++ b/y2023/vision/calib_files/calibration_pi-971-3_cam-23-07_ext_2023-02-22.json
@@ -0,0 +1 @@
+{ "node_name": "pi3", "team_number": 971, "intrinsics": [ 892.627869, 0.0, 629.289978, 0.0, 891.73761, 373.299896, 0.0, 0.0, 1.0 ], "fixed_extrinsics": { "data": [ 0.492232, -0.163335, -0.855002, 0.020122, -0.866067, 0.006706, -0.499883, -0.174518, 0.087382, 0.986548, -0.138158, 0.645307, 0.0, 0.0, 0.0, 1.0 ] }, "dist_coeffs": [ -0.454901, 0.266778, -0.000316, -0.000469, -0.091357 ], "calibration_timestamp": 1358500316768663953, "camera_id": "23-07" }
\ No newline at end of file
diff --git a/y2023/vision/calib_files/calibration_pi-971-4_cam-23-08_ext_2023-02-22.json b/y2023/vision/calib_files/calibration_pi-971-4_cam-23-08_ext_2023-02-22.json
new file mode 100644
index 0000000..92ab69c
--- /dev/null
+++ b/y2023/vision/calib_files/calibration_pi-971-4_cam-23-08_ext_2023-02-22.json
@@ -0,0 +1 @@
+{ "node_name": "pi4", "team_number": 971, "intrinsics": [ 891.88385, 0.0, 642.268616, 0.0, 890.626465, 353.272919, 0.0, 0.0, 1.0 ], "fixed_extrinsics": { "data": [ -0.865915, -0.186983, -0.463928, -0.014873, -0.473362, 0.006652, 0.880843, -0.215738, -0.161617, 0.982341, -0.094271, 0.676433, 0.0, 0.0, 0.0, 1.0 ] }, "dist_coeffs": [ -0.448426, 0.246817, 0.000002, 0.000948, -0.076717 ], "calibration_timestamp": 1358501265150551614, "camera_id": "23-08" }
\ No newline at end of file
diff --git a/y2023/vision/calibrate_extrinsics.cc b/y2023/vision/calibrate_extrinsics.cc
index eab8724..10e1639 100644
--- a/y2023/vision/calibrate_extrinsics.cc
+++ b/y2023/vision/calibrate_extrinsics.cc
@@ -7,8 +7,8 @@
 #include "aos/network/team_number.h"
 #include "aos/time/time.h"
 #include "aos/util/file.h"
-#include "frc971/control_loops/quaternion_utils.h"
 #include "frc971/constants/constants_sender_lib.h"
+#include "frc971/control_loops/quaternion_utils.h"
 #include "frc971/vision/extrinsics_calibration.h"
 #include "frc971/vision/vision_generated.h"
 #include "frc971/wpilib/imu_batch_generated.h"
@@ -17,7 +17,7 @@
 
 DEFINE_string(pi, "pi-7971-2", "Pi name to calibrate.");
 DEFINE_bool(plot, false, "Whether to plot the resulting data.");
-DEFINE_string(target_type, "charuco",
+DEFINE_string(target_type, "charuco_diamond",
               "Type of target: aruco|charuco|charuco_diamond");
 DEFINE_string(image_channel, "/camera", "Channel to listen for images on");
 DEFINE_string(output_logs, "/tmp/calibration/",
@@ -51,16 +51,17 @@
       TargetType target_type = TargetTypeFromString(FLAGS_target_type);
       std::unique_ptr<aos::EventLoop> constants_event_loop =
           factory->MakeEventLoop("constants_fetcher", pi_event_loop->node());
-      frc971::constants::ConstantsFetcher<y2023::Constants> constants_fetcher(
-          constants_event_loop.get());
-      *intrinsics =
-          FLAGS_base_intrinsics.empty()
-              ? aos::RecursiveCopyFlatBuffer(
-                    y2023::vision::FindCameraCalibration(
-                        constants_fetcher.constants(),
-                        pi_event_loop->node()->name()->string_view()))
-              : aos::JsonFileToFlatbuffer<calibration::CameraCalibration>(
-                    FLAGS_base_intrinsics);
+      if (FLAGS_base_intrinsics.empty()) {
+        frc971::constants::ConstantsFetcher<y2023::Constants> constants_fetcher(
+            constants_event_loop.get());
+        *intrinsics =
+            aos::RecursiveCopyFlatBuffer(y2023::vision::FindCameraCalibration(
+                constants_fetcher.constants(),
+                pi_event_loop->node()->name()->string_view()));
+      } else {
+        *intrinsics = aos::JsonFileToFlatbuffer<calibration::CameraCalibration>(
+            FLAGS_base_intrinsics);
+      }
       extractor = std::make_unique<Calibration>(
           factory, pi_event_loop.get(), imu_event_loop.get(), FLAGS_pi,
           &intrinsics->message(), target_type, FLAGS_image_channel, data);
@@ -80,6 +81,14 @@
     // Now, accumulate all the data into the data object.
     aos::logger::LogReader reader(
         aos::logger::SortParts(aos::logger::FindLogs(argc, argv)));
+    reader.RemapLoggedChannel<foxglove::CompressedImage>("/pi1/camera");
+    reader.RemapLoggedChannel<foxglove::CompressedImage>("/pi2/camera");
+    reader.RemapLoggedChannel<foxglove::CompressedImage>("/pi3/camera");
+    reader.RemapLoggedChannel<foxglove::CompressedImage>("/pi4/camera");
+    reader.RemapLoggedChannel<foxglove::ImageAnnotations>("/pi1/camera");
+    reader.RemapLoggedChannel<foxglove::ImageAnnotations>("/pi2/camera");
+    reader.RemapLoggedChannel<foxglove::ImageAnnotations>("/pi3/camera");
+    reader.RemapLoggedChannel<foxglove::ImageAnnotations>("/pi4/camera");
 
     aos::SimulatedEventLoopFactory factory(reader.configuration());
     reader.RegisterWithoutStarting(&factory);
@@ -122,29 +131,40 @@
   CHECK(data.camera_samples_size() > 0) << "Didn't get any camera observations";
 
   // And now we have it, we can start processing it.
-  const Eigen::Quaternion<double> nominal_initial_orientation(
-      frc971::controls::ToQuaternionFromRotationVector(
-          Eigen::Vector3d(0.0, 0.0, M_PI)));
-  const Eigen::Quaternion<double> nominal_pivot_to_camera(
+  // NOTE: For y2023, with no turret, pivot == imu
+
+  // Define the mapping that takes imu frame (with z up) to camera frame (with z
+  // pointing out)
+  const Eigen::Quaterniond R_precam_cam(
+      Eigen::AngleAxisd(-0.5 * M_PI, Eigen::Vector3d::UnitX()) *
+      Eigen::AngleAxisd(-0.5 * M_PI, Eigen::Vector3d::UnitZ()));
+  // Set up initial conditions for the pis that are reasonable
+  Eigen::Quaternion<double> nominal_initial_orientation(
+      Eigen::AngleAxisd(0.0, Eigen::Vector3d::UnitZ()));
+  Eigen::Quaternion<double> nominal_pivot_to_camera(
+      Eigen::AngleAxisd(0.5 * M_PI, Eigen::Vector3d::UnitX()) *
+      Eigen::AngleAxisd(-0.75 * M_PI, Eigen::Vector3d::UnitY()));
+  Eigen::Vector3d nominal_pivot_to_camera_translation(8.0, 8.0, 0.0);
+  Eigen::Quaternion<double> nominal_board_to_world(
       Eigen::AngleAxisd(-0.5 * M_PI, Eigen::Vector3d::UnitX()));
-  const Eigen::Quaternion<double> nominal_board_to_world(
-      Eigen::AngleAxisd(0.5 * M_PI, Eigen::Vector3d::UnitX()));
   Eigen::Matrix<double, 6, 1> nominal_initial_state =
       Eigen::Matrix<double, 6, 1>::Zero();
-  // Set x value to 0.5 m (center view on the board)
-  // nominal_initial_state(0, 0) = 0.5;
-  // Set y value to -1 m (approx distance from imu to board/world)
-  nominal_initial_state(1, 0) = -1.0;
+  // Set y value to -1 m (approx distance from imu to the board/world)
+  nominal_initial_state(1, 0) = 1.0;
 
   CalibrationParameters calibration_parameters;
   calibration_parameters.initial_orientation = nominal_initial_orientation;
   calibration_parameters.pivot_to_camera = nominal_pivot_to_camera;
+  calibration_parameters.pivot_to_camera_translation =
+      nominal_pivot_to_camera_translation;
+  // Board to world rotation
   calibration_parameters.board_to_world = nominal_board_to_world;
+  // Initial imu location (and velocity)
   calibration_parameters.initial_state = nominal_initial_state;
   calibration_parameters.has_pivot = false;
 
-  // Show the inverse of pivot_to_camera, since camera_to_pivot tells where the
-  // camera is with respect to the pivot frame
+  // Show the inverse of pivot_to_camera, since camera_to_pivot tells where
+  // the camera is with respect to the pivot frame
   const Eigen::Affine3d nominal_affine_pivot_to_camera =
       Eigen::Translation3d(calibration_parameters.pivot_to_camera_translation) *
       nominal_pivot_to_camera;
@@ -156,22 +176,20 @@
   LOG(INFO) << "Initial Conditions for solver.  Assumes:\n"
             << "1) board origin is same as world, but rotated pi/2 about "
                "x-axis, so z points out\n"
-            << "2) pivot origin matches imu origin\n"
+            << "2) pivot origin matches imu origin (since there's no turret)\n"
             << "3) camera is offset from pivot (depends on which camera)";
 
-  LOG(INFO)
-      << "Nominal initial_orientation of imu w.r.t. world (angle-axis vector): "
-      << frc971::controls::ToRotationVectorFromQuaternion(
-             nominal_initial_orientation)
-             .transpose();
+  LOG(INFO) << "Nominal initial_orientation of imu w.r.t. world "
+               "(angle-axis vector): "
+            << frc971::controls::ToRotationVectorFromQuaternion(
+                   nominal_initial_orientation)
+                   .transpose();
   LOG(INFO) << "Nominal initial_state: \n"
             << "Position: "
             << nominal_initial_state.block<3, 1>(0, 0).transpose() << "\n"
             << "Velocity: "
             << nominal_initial_state.block<3, 1>(3, 0).transpose();
-  // TODO<Jim>: Might be nice to take out the rotation component that maps into
-  // camera image coordinates (with x right, y down, z forward)
-  LOG(INFO) << "Nominal camera_to_pivot (angle-axis vector): "
+  LOG(INFO) << "Nominal camera_to_pivot rotation (angle-axis vector): "
             << frc971::controls::ToRotationVectorFromQuaternion(
                    nominal_camera_to_pivot_rotation)
                    .transpose();
@@ -188,17 +206,20 @@
 
   if (!FLAGS_output_calibration.empty()) {
     aos::WriteFlatbufferToJson(FLAGS_output_calibration, merged_calibration);
+  } else {
+    LOG(WARNING) << "Calibration filename not provided, so not saving it";
   }
 
   LOG(INFO) << "RESULTS OF CALIBRATION SOLVER:";
   std::cout << aos::FlatbufferToJson(&merged_calibration.message())
             << std::endl;
+
   LOG(INFO) << "initial_orientation of imu w.r.t. world (angle-axis vector): "
             << frc971::controls::ToRotationVectorFromQuaternion(
                    calibration_parameters.initial_orientation)
                    .transpose();
   LOG(INFO)
-      << "initial_state: \n"
+      << "initial_state (imu): \n"
       << "Position: "
       << calibration_parameters.initial_state.block<3, 1>(0, 0).transpose()
       << "\n"
@@ -208,15 +229,17 @@
   const Eigen::Affine3d affine_pivot_to_camera =
       Eigen::Translation3d(calibration_parameters.pivot_to_camera_translation) *
       calibration_parameters.pivot_to_camera;
+  const Eigen::Affine3d affine_camera_to_pivot =
+      affine_pivot_to_camera.inverse();
   const Eigen::Quaterniond camera_to_pivot_rotation(
-      affine_pivot_to_camera.inverse().rotation());
+      affine_camera_to_pivot.rotation());
   const Eigen::Vector3d camera_to_pivot_translation(
-      affine_pivot_to_camera.inverse().translation());
-  LOG(INFO) << "camera to pivot (angle-axis vec): "
+      affine_camera_to_pivot.translation());
+  LOG(INFO) << "camera to pivot(imu) rotation (angle-axis vec): "
             << frc971::controls::ToRotationVectorFromQuaternion(
                    camera_to_pivot_rotation)
                    .transpose();
-  LOG(INFO) << "camera to pivot translation: "
+  LOG(INFO) << "camera to pivot(imu) translation: "
             << camera_to_pivot_translation.transpose();
   LOG(INFO) << "board_to_world (rotation) "
             << frc971::controls::ToRotationVectorFromQuaternion(
@@ -227,12 +250,24 @@
   LOG(INFO) << "gyro_bias " << calibration_parameters.gyro_bias.transpose();
   LOG(INFO) << "gravity " << 9.81 * calibration_parameters.gravity_scalar;
 
-  LOG(INFO) << "pivot_to_camera change "
+  LOG(INFO) << "Checking delta from nominal (initial condition) to solved "
+               "values:";
+  LOG(INFO) << "nominal_pivot_to_camera rotation: "
+            << frc971::controls::ToRotationVectorFromQuaternion(
+                   R_precam_cam * nominal_pivot_to_camera)
+                   .transpose();
+  LOG(INFO) << "solved pivot_to_camera rotation: "
+            << frc971::controls::ToRotationVectorFromQuaternion(
+                   R_precam_cam * calibration_parameters.pivot_to_camera)
+                   .transpose();
+
+  LOG(INFO) << "pivot_to_camera rotation delta (zero if the IC's match the "
+               "solved value) "
             << frc971::controls::ToRotationVectorFromQuaternion(
                    calibration_parameters.pivot_to_camera *
                    nominal_pivot_to_camera.inverse())
                    .transpose();
-  LOG(INFO) << "board_to_world delta "
+  LOG(INFO) << "board_to_world rotation change "
             << frc971::controls::ToRotationVectorFromQuaternion(
                    calibration_parameters.board_to_world *
                    nominal_board_to_world.inverse())
diff --git a/y2023/vision/image_streamer_start.sh b/y2023/vision/image_streamer_start.sh
new file mode 100755
index 0000000..48d9da7
--- /dev/null
+++ b/y2023/vision/image_streamer_start.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+
+# Some configurations to avoid dropping frames
+# 640x480@30fps, 400x300@60fps.
+# Bitrate 500000-1500000
+WIDTH=640
+HEIGHT=480
+BITRATE=1500000
+LISTEN_ON="/camera/downsized"
+# Don't interfere with field webpage
+STREAMING_PORT=1181
+
+# Handle weirdness with openssl and gstreamer
+export OPENSSL_CONF=""
+
+# Enable for verbose logging
+#export GST_DEBUG=4
+
+export LD_LIBRARY_PATH=/usr/lib/aarch64-linux-gnu/gstreamer-1.0
+
+exec ./image_streamer --width=$WIDTH --height=$HEIGHT --bitrate=$BITRATE --listen_on=$LISTEN_ON --config=aos_config.json --streaming_port=$STREAMING_PORT
+
diff --git a/y2023/vision/maps/target_map.json b/y2023/vision/maps/target_map.json
index 3f6eb54..2aa1995 100644
--- a/y2023/vision/maps/target_map.json
+++ b/y2023/vision/maps/target_map.json
@@ -1,3 +1,7 @@
+/* Targets have positive Z axis pointing into the board, positive X to the right
+   when looking at the board, and positive Y is down when looking at the board.
+   This means that you will get an identity rotation from the camera to target
+   frame when the target is upright, flat, and centered in the camera's view.*/
 {
     "target_poses": [
         {
@@ -7,12 +11,11 @@
                 "y": -2.938,
                 "z": 0.463
             },
-            /* yaw of pi */
             "orientation": {
-                "w": 0.0,
-                "x": 0.0,
-                "y": 0.0,
-                "z": 1.0
+                "w": -0.5,
+                "x": 0.5,
+                "y": -0.5,
+                "z": 0.5
             }
         },
         {
@@ -22,12 +25,11 @@
                 "y": -1.262,
                 "z": 0.463
             },
-            /* yaw of pi */
             "orientation": {
-                "w": 0.0,
-                "x": 0.0,
-                "y": 0.0,
-                "z": 1.0
+                "w": -0.5,
+                "x": 0.5,
+                "y": -0.5,
+                "z": 0.5
             }
         },
         {
@@ -37,12 +39,11 @@
                 "y": 0.414,
                 "z": 0.463
             },
-            /* yaw of pi */
             "orientation": {
-                "w": 0.0,
-                "x": 0.0,
-                "y": 0.0,
-                "z": 1.0
+                "w": -0.5,
+                "x": 0.5,
+                "y": -0.5,
+                "z": 0.5
             }
         },
         {
@@ -52,12 +53,11 @@
                 "y": 2.740,
                 "z": 0.695
             },
-            /* yaw of pi */
             "orientation": {
-                "w": 0.0,
-                "x": 0.0,
-                "y": 0.0,
-                "z": 1.0
+                "w": -0.5,
+                "x": 0.5,
+                "y": -0.5,
+                "z": 0.5
             }
         },
         {
@@ -68,11 +68,10 @@
                 "z": 0.695
             },
             "orientation": {
-            /* yaw of 0 */
-                "w": 1.0,
-                "x": 0.0,
-                "y": 0.0,
-                "z": 0.0
+                "w": 0.5,
+                "x": -0.5,
+                "y": -0.5,
+                "z": 0.5
             }
         },
         {
@@ -82,12 +81,11 @@
                 "y": 0.414,
                 "z": 0.463
             },
-            /* yaw of 0 */
             "orientation": {
-                "w": 1.0,
-                "x": 0.0,
-                "y": 0.0,
-                "z": 0.0
+                "w": 0.5,
+                "x": -0.5,
+                "y": -0.5,
+                "z": 0.5
             }
         },
         {
@@ -97,12 +95,11 @@
                 "y": -1.262,
                 "z": 0.463
             },
-            /* yaw of 0 */
             "orientation": {
-                "w": 1.0,
-                "x": 0.0,
-                "y": 0.0,
-                "z": 0.0
+                "w": 0.5,
+                "x": -0.5,
+                "y": -0.5,
+                "z": 0.5
             }
         },
         {
@@ -112,12 +109,11 @@
                 "y": -2.938,
                 "z": 0.463
             },
-            /* yaw of 0 */
             "orientation": {
-                "w": 1.0,
-                "x": 0.0,
-                "y": 0.0,
-                "z": 0.0
+                "w": 0.5,
+                "x": -0.5,
+                "y": -0.5,
+                "z": 0.5
             }
         }
     ]
diff --git a/y2023/vision/target_mapping.cc b/y2023/vision/target_mapping.cc
index 921c172..b41d7d5 100644
--- a/y2023/vision/target_mapping.cc
+++ b/y2023/vision/target_mapping.cc
@@ -29,10 +29,10 @@
 namespace calibration = frc971::vision::calibration;
 
 // Change reference frame from camera to robot
-Eigen::Affine3d CameraToRobotDetection(Eigen::Affine3d H_camrob_target,
+Eigen::Affine3d CameraToRobotDetection(Eigen::Affine3d H_camera_target,
                                        Eigen::Affine3d extrinsics) {
-  const Eigen::Affine3d H_robot_camrob = extrinsics;
-  const Eigen::Affine3d H_robot_target = H_robot_camrob * H_camrob_target;
+  const Eigen::Affine3d H_robot_camera = extrinsics;
+  const Eigen::Affine3d H_robot_target = H_robot_camera * H_camera_target;
   return H_robot_target;
 }
 
@@ -47,20 +47,13 @@
     const TargetMapper::TargetPose target_pose =
         PoseUtils::TargetPoseFromFbs(*target_pose_fbs);
 
-    Eigen::Affine3d H_camcv_target =
+    Eigen::Affine3d H_camera_target =
         Eigen::Translation3d(target_pose.pose.p) * target_pose.pose.q;
-    // With X, Y, Z being robot axes and x, y, z being camera axes,
-    // x = -Y, y = -Z, z = X
-    static const Eigen::Affine3d H_camcv_camrob =
-        Eigen::Affine3d((Eigen::Matrix4d() << 0.0, -1.0, 0.0, 0.0, 0.0, 0.0,
-                         -1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0)
-                            .finished());
-    Eigen::Affine3d H_camrob_target = H_camcv_camrob.inverse() * H_camcv_target;
     Eigen::Affine3d H_robot_target =
-        CameraToRobotDetection(H_camrob_target, extrinsics);
+        CameraToRobotDetection(H_camera_target, extrinsics);
 
     ceres::examples::Pose3d target_pose_camera =
-        PoseUtils::Affine3dToPose3d(H_camrob_target);
+        PoseUtils::Affine3dToPose3d(H_camera_target);
     double distance_from_camera = target_pose_camera.p.norm();
 
     CHECK(map.has_monotonic_timestamp_ns())
@@ -86,7 +79,7 @@
   auto detector_ptr = std::make_unique<AprilRoboticsDetector>(
       detection_event_loop, kImageChannel);
   // Get the camera extrinsics
-  cv::Mat extrinsics_cv = detector_ptr->extrinsics();
+  cv::Mat extrinsics_cv = detector_ptr->extrinsics().value();
   Eigen::Matrix4d extrinsics_matrix;
   cv::cv2eigen(extrinsics_cv, extrinsics_matrix);
   const auto extrinsics = Eigen::Affine3d(extrinsics_matrix);
diff --git a/y2023/vision/viewer.cc b/y2023/vision/viewer.cc
index 7877a57..4fb96bc 100644
--- a/y2023/vision/viewer.cc
+++ b/y2023/vision/viewer.cc
@@ -10,7 +10,6 @@
 #include "frc971/vision/vision_generated.h"
 #include "opencv2/calib3d.hpp"
 #include "opencv2/imgproc.hpp"
-#include "y2023/vision/april_debug_generated.h"
 #include "y2023/vision/vision_util.h"
 
 DEFINE_string(config, "aos_config.json", "Path to the config file to use.");
@@ -28,10 +27,8 @@
 using frc971::vision::CameraImage;
 
 bool DisplayLoop(const cv::Mat intrinsics, const cv::Mat dist_coeffs,
-                 aos::Fetcher<CameraImage> *image_fetcher,
-                 aos::Fetcher<AprilDebug> *april_debug_fetcher) {
+                 aos::Fetcher<CameraImage> *image_fetcher) {
   const CameraImage *image;
-  std::optional<const AprilDebug *> april_debug = std::nullopt;
 
   // Read next image
   if (!image_fetcher->Fetch()) {
@@ -41,12 +38,6 @@
   image = image_fetcher->get();
   CHECK(image != nullptr) << "Couldn't read image";
 
-  if (april_debug_fetcher->Fetch()) {
-    april_debug = april_debug_fetcher->get();
-  } else {
-    VLOG(2) << "Couldn't fetch next target map";
-  }
-
   // Create color image:
   cv::Mat image_color_mat(cv::Size(image->cols(), image->rows()), CV_8UC2,
                           (void *)image->data()->data());
@@ -66,17 +57,6 @@
 
   cv::Mat undistorted_image;
   cv::undistort(bgr_image, undistorted_image, intrinsics, dist_coeffs);
-
-  if (april_debug.has_value() && april_debug.value()->corners()->size() > 0) {
-    for (const auto *corners : *april_debug.value()->corners()) {
-      std::vector<cv::Point> points;
-      for (const auto *point_fbs : *corners->points()) {
-        points.emplace_back(point_fbs->x(), point_fbs->y());
-      }
-      cv::polylines(undistorted_image, points, true, cv::Scalar(255, 0, 0), 10);
-    }
-  }
-
   cv::imshow("Display", undistorted_image);
 
   int keystroke = cv::waitKey(1);
@@ -109,14 +89,11 @@
 
   aos::Fetcher<CameraImage> image_fetcher =
       event_loop.MakeFetcher<CameraImage>(FLAGS_channel);
-  aos::Fetcher<AprilDebug> april_debug_fetcher =
-      event_loop.MakeFetcher<AprilDebug>("/camera");
 
   // Run the display loop
   event_loop.AddPhasedLoop(
       [&](int) {
-        if (!DisplayLoop(intrinsics, dist_coeffs, &image_fetcher,
-                         &april_debug_fetcher)) {
+        if (!DisplayLoop(intrinsics, dist_coeffs, &image_fetcher)) {
           LOG(INFO) << "Calling event_loop Exit";
           event_loop.Exit();
         };
diff --git a/y2023/vision/vision_util.cc b/y2023/vision/vision_util.cc
index f4937e5..ca5ad89 100644
--- a/y2023/vision/vision_util.cc
+++ b/y2023/vision/vision_util.cc
@@ -18,11 +18,15 @@
   LOG(FATAL) << ": Failed to find camera calibration for " << node_name;
 }
 
-cv::Mat CameraExtrinsics(
+std::optional<cv::Mat> CameraExtrinsics(
     const frc971::vision::calibration::CameraCalibration *camera_calibration) {
   CHECK(!camera_calibration->has_turret_extrinsics())
       << "No turret on 2023 robot";
 
+  if (!camera_calibration->has_fixed_extrinsics()) {
+    return std::nullopt;
+  }
+  CHECK(camera_calibration->fixed_extrinsics()->has_data());
   cv::Mat result(4, 4, CV_32F,
                  const_cast<void *>(static_cast<const void *>(
                      camera_calibration->fixed_extrinsics()->data()->data())));
diff --git a/y2023/vision/vision_util.h b/y2023/vision/vision_util.h
index ce1a69d..79f5c92 100644
--- a/y2023/vision/vision_util.h
+++ b/y2023/vision/vision_util.h
@@ -10,7 +10,7 @@
 const frc971::vision::calibration::CameraCalibration *FindCameraCalibration(
     const y2023::Constants &calibration_data, std::string_view node_name);
 
-cv::Mat CameraExtrinsics(
+std::optional<cv::Mat> CameraExtrinsics(
     const frc971::vision::calibration::CameraCalibration *camera_calibration);
 
 cv::Mat CameraIntrinsics(
diff --git a/y2023/y2023_logger.json b/y2023/y2023_logger.json
index e5d62b0..bb78bc5 100644
--- a/y2023/y2023_logger.json
+++ b/y2023/y2023_logger.json
@@ -373,7 +373,7 @@
       "max_size": 1843456,
       "num_readers": 4,
       "read_method": "PIN",
-      "num_senders": 1
+      "num_senders": 18
     },
     {
       "name": "/logger/camera/downsized",
@@ -442,24 +442,17 @@
   ],
   "applications": [
     {
-      "name": "logger_message_bridge_client",
-      "autostart": false,
-      "executable_name": "message_bridge_client.sh",
-      "args": [
-        "--rmem=8388608",
-        "--rt_priority=16"
-      ],
+      "name": "message_bridge_client",
+      "executable_name": "message_bridge_client",
+      "user": "pi",
       "nodes": [
         "logger"
       ]
     },
     {
-      "name": "logger_message_bridge_server",
+      "name": "message_bridge_server",
       "executable_name": "message_bridge_server",
-      "autostart": false,
-      "args": [
-        "--rt_priority=16"
-      ],
+      "user": "pi",
       "nodes": [
         "logger"
       ]
@@ -467,6 +460,7 @@
     {
       "name": "logger_camera_reader",
       "executable_name": "camera_reader",
+      "user": "pi",
       "args": ["--enable_ftrace", "--send_downsized_images"],
       "nodes": [
         "logger"
@@ -476,6 +470,7 @@
       "name": "image_logger",
       "executable_name": "logger_main",
       "autostart": false,
+      "user": "pi",
       "args": [
         "--logging_folder",
         "",
@@ -491,7 +486,8 @@
     {
       "name": "image_streamer",
       "executable_name": "image_streamer_start.sh",
-      "autostart": false,
+      "autostart": true,
+      "user": "pi",
       "nodes": [
         "logger"
       ]
@@ -499,6 +495,7 @@
     {
       "name": "constants_sender",
       "autorestart": false,
+      "user": "pi",
       "nodes": [
         "logger"
       ]
diff --git a/y2023/y2023_pi_template.json b/y2023/y2023_pi_template.json
index b6eaf04..9abc08d 100644
--- a/y2023/y2023_pi_template.json
+++ b/y2023/y2023_pi_template.json
@@ -217,15 +217,6 @@
       ]
     },
     {
-      "name": "/pi{{ NUM }}/camera",
-      "type": "y2023.vision.AprilDebug",
-      "source_node": "pi{{ NUM }}",
-      "frequency": 40,
-      "num_senders": 2,
-      "max_size": 40000,
-      "logger": "LOCAL_LOGGER"
-    },
-    {
       "name": "/pi{{ NUM }}/aos/remote_timestamps/imu/pi{{ NUM }}/camera/frc971-vision-TargetMap",
       "type": "aos.message_bridge.RemoteMessage",
       "frequency": 80,