Merge "Revert "Make it so fadvise works in the presence of ResetStatistics""
diff --git a/WORKSPACE b/WORKSPACE
index 698c066..f411682 100644
--- a/WORKSPACE
+++ b/WORKSPACE
@@ -769,9 +769,9 @@
     deps = ["@//third_party/allwpilib/wpimath"],
 )
 """,
-    sha256 = "decff0a28fa4a167696cc2e1122b6a5acd2fef01d3bfd356d8cad25bb487a191",
+    sha256 = "340a9c8e726e2eb365b7a40a722df05fe7c7072c5c4a617fa0218eb6d074ad9f",
     urls = [
-        "https://maven.ctr-electronics.com/release/com/ctre/phoenixpro/api-cpp/23.0.10/api-cpp-23.0.10-headers.zip",
+        "https://maven.ctr-electronics.com/release/com/ctre/phoenixpro/api-cpp/23.0.11/api-cpp-23.0.11-headers.zip",
     ],
 )
 
@@ -793,9 +793,9 @@
     target_compatible_with = ['@//tools/platforms/hardware:roborio'],
 )
 """,
-    sha256 = "00aea02c583d109056e2716e73b7d70e84d5c56a6daebd1dc9f4612c430894f8",
+    sha256 = "11f392bebfe54f512be9ef59809e1a10c4497e0ce92970645f054e7e04fe7ef6",
     urls = [
-        "https://maven.ctr-electronics.com/release/com/ctre/phoenixpro/api-cpp/23.0.10/api-cpp-23.0.10-linuxathena.zip",
+        "https://maven.ctr-electronics.com/release/com/ctre/phoenixpro/api-cpp/23.0.11/api-cpp-23.0.11-linuxathena.zip",
     ],
 )
 
@@ -808,9 +808,9 @@
     hdrs = glob(['ctre/**/*.h', 'ctre/**/*.hpp']),
 )
 """,
-    sha256 = "3d503df97b711c150c0b50238f644c528e55d5b82418c8e3970c79faa14b749c",
+    sha256 = "7585e1bd9e581dd745e7f040ab521b966b40a04d05bc7fa82d6dafe2fb65764e",
     urls = [
-        "https://maven.ctr-electronics.com/release/com/ctre/phoenixpro/tools/23.0.10/tools-23.0.10-headers.zip",
+        "https://maven.ctr-electronics.com/release/com/ctre/phoenixpro/tools/23.0.11/tools-23.0.11-headers.zip",
     ],
 )
 
@@ -832,9 +832,9 @@
     target_compatible_with = ['@//tools/platforms/hardware:roborio'],
 )
 """,
-    sha256 = "4ada1ed9e11c208da9e8a8e8a648a0fe426e6717121ebc2f1392ae3ddc7f2b8c",
+    sha256 = "b1daadfe782c43ed32c2e1a3956998f9604a3fc9282ef866fd8dc1482f3b8cc9",
     urls = [
-        "https://maven.ctr-electronics.com/release/com/ctre/phoenixpro/tools/23.0.10/tools-23.0.10-linuxathena.zip",
+        "https://maven.ctr-electronics.com/release/com/ctre/phoenixpro/tools/23.0.11/tools-23.0.11-linuxathena.zip",
     ],
 )
 
diff --git a/aos/events/logging/lzma_encoder.cc b/aos/events/logging/lzma_encoder.cc
index f354638..d348b70 100644
--- a/aos/events/logging/lzma_encoder.cc
+++ b/aos/events/logging/lzma_encoder.cc
@@ -84,6 +84,13 @@
   // efficient to allocate if we go over a threshold to keep the static memory
   // in use smaller, or just allocate the worst case like we are doing here?
   input_buffer_.resize(max_message_size);
+
+  // Start our queues out with a reasonable space allocation.  The cost of this
+  // is negligable compared to the buffer cost, but removes a bunch of
+  // allocations at runtime.
+  queue_.reserve(4);
+  free_queue_.reserve(4);
+  return_queue_.reserve(4);
 }
 
 LzmaEncoder::~LzmaEncoder() { lzma_end(&stream_); }
@@ -111,6 +118,9 @@
 void LzmaEncoder::Clear(const int n) {
   CHECK_GE(n, 0);
   CHECK_LE(static_cast<size_t>(n), queue_size());
+  for (int i = 0; i < n; ++i) {
+    free_queue_.emplace_back(std::move(queue_[i]));
+  }
   queue_.erase(queue_.begin(), queue_.begin() + n);
   if (queue_.empty()) {
     stream_.next_out = nullptr;
@@ -155,8 +165,13 @@
     // construction or a Reset, or when an input buffer is large enough to fill
     // more than one output buffer.
     if (stream_.avail_out == 0) {
-      queue_.emplace_back();
-      queue_.back().resize(kEncodedBufferSizeBytes);
+      if (!free_queue_.empty()) {
+        queue_.emplace_back(std::move(free_queue_.back()));
+        free_queue_.pop_back();
+      } else {
+        queue_.emplace_back();
+        queue_.back().resize(kEncodedBufferSizeBytes);
+      }
       stream_.next_out = queue_.back().data();
       stream_.avail_out = kEncodedBufferSizeBytes;
       // Update the byte count.
diff --git a/aos/events/logging/lzma_encoder.h b/aos/events/logging/lzma_encoder.h
index b4964fb..3136d93 100644
--- a/aos/events/logging/lzma_encoder.h
+++ b/aos/events/logging/lzma_encoder.h
@@ -54,6 +54,11 @@
   lzma_stream stream_;
   uint32_t compression_preset_;
   std::vector<ResizeableBuffer> queue_;
+  // Since we pretty much just allocate a couple of buffers, then allocate and
+  // release them over and over with very similar memory usage and without much
+  // variation in the peak usage, put the allocate chunks in a free queue to
+  // reduce fragmentation.
+  std::vector<ResizeableBuffer> free_queue_;
   bool finished_ = false;
   // Total bytes that resulted from encoding raw data since the last call to
   // Reset.
diff --git a/aos/network/sctp_client.cc b/aos/network/sctp_client.cc
index a2c0439..e3da03a 100644
--- a/aos/network/sctp_client.cc
+++ b/aos/network/sctp_client.cc
@@ -13,6 +13,10 @@
 #include "aos/unique_malloc_ptr.h"
 #include "glog/logging.h"
 
+DEFINE_int32(sinit_max_init_timeout, 0,
+             "Timeout in milliseconds for retrying the INIT packet when "
+             "connecting to the message bridge server");
+
 namespace aos {
 namespace message_bridge {
 
@@ -29,6 +33,8 @@
     memset(&initmsg, 0, sizeof(struct sctp_initmsg));
     initmsg.sinit_num_ostreams = streams;
     initmsg.sinit_max_instreams = streams;
+    // Max timeout in milliseconds for the INIT packet.
+    initmsg.sinit_max_init_timeo = FLAGS_sinit_max_init_timeout;
     PCHECK(setsockopt(fd(), IPPROTO_SCTP, SCTP_INITMSG, &initmsg,
                       sizeof(struct sctp_initmsg)) == 0);
   }
diff --git a/frc971/can_logger/BUILD b/frc971/can_logger/BUILD
new file mode 100644
index 0000000..a3b002a
--- /dev/null
+++ b/frc971/can_logger/BUILD
@@ -0,0 +1,39 @@
+load("@com_github_google_flatbuffers//:build_defs.bzl", "flatbuffer_cc_library")
+
+cc_binary(
+    name = "can_logger",
+    srcs = [
+        "can_logger_main.cc",
+    ],
+    visibility = ["//visibility:public"],
+    deps = [
+        ":can_logger_lib",
+        "//aos:init",
+        "//aos/events:shm_event_loop",
+        "//aos/time",
+        "@com_github_google_glog//:glog",
+    ],
+)
+
+cc_library(
+    name = "can_logger_lib",
+    srcs = [
+        "can_logger.cc",
+        "can_logger.h",
+    ],
+    deps = [
+        ":can_logging_fbs",
+        "//aos/events:event_loop",
+        "//aos/scoped:scoped_fd",
+        "@com_github_google_glog//:glog",
+    ],
+)
+
+flatbuffer_cc_library(
+    name = "can_logging_fbs",
+    srcs = [
+        "can_logging.fbs",
+    ],
+    gen_reflections = 1,
+    visibility = ["//visibility:public"],
+)
diff --git a/frc971/can_logger/can_logger.cc b/frc971/can_logger/can_logger.cc
new file mode 100644
index 0000000..6b4258a
--- /dev/null
+++ b/frc971/can_logger/can_logger.cc
@@ -0,0 +1,88 @@
+#include "frc971/can_logger/can_logger.h"
+
+namespace frc971 {
+namespace can_logger {
+
+CanLogger::CanLogger(aos::EventLoop *event_loop,
+                     std::string_view interface_name)
+    : fd_(socket(PF_CAN, SOCK_RAW | SOCK_NONBLOCK, CAN_RAW)),
+      frames_sender_(event_loop->MakeSender<CanFrame>("/can")) {
+  struct ifreq ifr;
+  strcpy(ifr.ifr_name, interface_name.data());
+  PCHECK(ioctl(fd_.get(), SIOCGIFINDEX, &ifr) == 0)
+      << "Failed to get index for interface " << interface_name;
+
+  int enable_canfd = true;
+  PCHECK(setsockopt(fd_.get(), SOL_CAN_RAW, CAN_RAW_FD_FRAMES, &enable_canfd,
+                    sizeof(enable_canfd)) == 0)
+      << "Failed to enable CAN FD";
+
+  struct sockaddr_can addr;
+  addr.can_family = AF_CAN;
+  addr.can_ifindex = ifr.ifr_ifindex;
+
+  PCHECK(bind(fd_.get(), reinterpret_cast<struct sockaddr *>(&addr),
+              sizeof(addr)) == 0)
+      << "Failed to bind socket to interface " << interface_name;
+
+  int recieve_buffer_size;
+  socklen_t opt_size = sizeof(recieve_buffer_size);
+  PCHECK(getsockopt(fd_.get(), SOL_SOCKET, SO_RCVBUF, &recieve_buffer_size,
+                    &opt_size) == 0);
+  CHECK_EQ(opt_size, sizeof(recieve_buffer_size));
+  VLOG(0) << "CAN recieve bufffer is " << recieve_buffer_size << " bytes large";
+
+  aos::TimerHandler *timer_handler = event_loop->AddTimer([this]() { Poll(); });
+  timer_handler->set_name("CAN logging Loop");
+  timer_handler->Setup(event_loop->monotonic_now(), kPollPeriod);
+}
+
+void CanLogger::Poll() {
+  VLOG(2) << "Polling";
+  int frames_read = 0;
+  while (ReadFrame()) {
+    frames_read++;
+  }
+  VLOG(1) << "Read " << frames_read << " frames to end of buffer";
+}
+
+bool CanLogger::ReadFrame() {
+  errno = 0;
+  struct canfd_frame frame;
+  ssize_t bytes_read = read(fd_.get(), &frame, sizeof(struct canfd_frame));
+
+  if (bytes_read < 0 && (errno == EAGAIN || errno == EWOULDBLOCK)) {
+    // There are no more frames sitting in the recieve buffer.
+    return false;
+  }
+
+  VLOG(2) << "Read " << bytes_read << " bytes";
+  PCHECK(bytes_read > 0);
+  PCHECK(bytes_read == static_cast<ssize_t>(CAN_MTU) ||
+         bytes_read == static_cast<ssize_t>(CANFD_MTU))
+      << "Incomplete can frame";
+
+  struct timeval tv;
+  PCHECK(ioctl(fd_.get(), SIOCGSTAMP, &tv) == 0)
+      << "Failed to get timestamp of CAN frame";
+
+  aos::Sender<CanFrame>::Builder builder = frames_sender_.MakeBuilder();
+
+  auto frame_data = builder.fbb()->CreateVector<uint8_t>(frame.data, frame.len);
+
+  CanFrame::Builder can_frame_builder = builder.MakeBuilder<CanFrame>();
+  can_frame_builder.add_can_id(frame.can_id);
+  can_frame_builder.add_data(frame_data);
+  can_frame_builder.add_monotonic_timestamp_ns(
+      static_cast<std::chrono::nanoseconds>(
+          std::chrono::seconds(tv.tv_sec) +
+          std::chrono::microseconds(tv.tv_usec))
+          .count());
+
+  builder.CheckOk(builder.Send(can_frame_builder.Finish()));
+
+  return true;
+}
+
+}  // namespace can_logger
+}  // namespace frc971
diff --git a/frc971/can_logger/can_logger.h b/frc971/can_logger/can_logger.h
new file mode 100644
index 0000000..cf37841
--- /dev/null
+++ b/frc971/can_logger/can_logger.h
@@ -0,0 +1,50 @@
+#ifndef FRC971_CAN_LOGGER_CAN_LOGGER_H_
+#define FRC971_CAN_LOGGER_CAN_LOGGER_H_
+
+#include <linux/can.h>
+#include <linux/can/raw.h>
+#include <linux/sockios.h>
+#include <net/if.h>
+#include <sys/ioctl.h>
+#include <sys/socket.h>
+#include <sys/time.h>
+
+#include <chrono>
+
+#include "aos/events/event_loop.h"
+#include "aos/realtime.h"
+#include "aos/scoped/scoped_fd.h"
+#include "frc971/can_logger/can_logging_generated.h"
+
+namespace frc971 {
+namespace can_logger {
+
+// This class listens to all the traffic on a SocketCAN interface and sends it
+// on the aos event loop so it can be logged with the aos logging
+// infrastructure.
+class CanLogger {
+ public:
+  static constexpr std::chrono::milliseconds kPollPeriod =
+      std::chrono::milliseconds(100);
+
+  CanLogger(aos::EventLoop *event_loop,
+            std::string_view interface_name = "can0");
+
+  CanLogger(const CanLogger &) = delete;
+  CanLogger &operator=(const CanLogger &) = delete;
+
+ private:
+  void Poll();
+
+  // Read a CAN frame from the socket and send it on the event loop
+  // Returns true if successful and false if the recieve buffer is empty.
+  bool ReadFrame();
+
+  aos::ScopedFD fd_;
+  aos::Sender<CanFrame> frames_sender_;
+};
+
+}  // namespace can_logger
+}  // namespace frc971
+
+#endif  // FRC971_CAN_LOGGER_CAN_LOGGER_H_
diff --git a/frc971/can_logger/can_logger_main.cc b/frc971/can_logger/can_logger_main.cc
new file mode 100644
index 0000000..42c7162
--- /dev/null
+++ b/frc971/can_logger/can_logger_main.cc
@@ -0,0 +1,18 @@
+#include "aos/events/shm_event_loop.h"
+#include "aos/init.h"
+#include "frc971/can_logger/can_logger.h"
+
+int main(int argc, char **argv) {
+  ::aos::InitGoogle(&argc, &argv);
+
+  aos::FlatbufferDetachedBuffer<aos::Configuration> config =
+      aos::configuration::ReadConfig("aos_config.json");
+
+  ::aos::ShmEventLoop event_loop(&config.message());
+
+  frc971::can_logger::CanLogger can_logger(&event_loop, "can0");
+
+  event_loop.Run();
+
+  return 0;
+}
diff --git a/frc971/can_logger/can_logging.fbs b/frc971/can_logger/can_logging.fbs
new file mode 100644
index 0000000..d6ec8b9
--- /dev/null
+++ b/frc971/can_logger/can_logging.fbs
@@ -0,0 +1,13 @@
+namespace frc971.can_logger;
+
+// A message to represent a single CAN or CAN FD frame.
+table CanFrame {
+  // CAN id + flags
+  can_id:uint32 (id: 0);
+  // The body of the CAN frame up to 64 bytes long.
+  data:[ubyte] (id: 1);
+  // The hardware timestamp of when the frame came in
+  monotonic_timestamp_ns:uint64 (id: 2);
+}
+
+root_type CanFrame;
diff --git a/frc971/control_loops/drivetrain/drivetrain.h b/frc971/control_loops/drivetrain/drivetrain.h
index 0d100ed..f448760 100644
--- a/frc971/control_loops/drivetrain/drivetrain.h
+++ b/frc971/control_loops/drivetrain/drivetrain.h
@@ -67,9 +67,10 @@
 
   // Returns the current localizer state.
   Eigen::Matrix<double, 5, 1> trajectory_state() {
+    // Use the regular kalman filter's left/right velocity because they are
+    // generally smoother.
     return (Eigen::Matrix<double, 5, 1>() << localizer_->x(), localizer_->y(),
-            localizer_->theta(), localizer_->left_velocity(),
-            localizer_->right_velocity())
+            localizer_->theta(), DrivetrainXHat()(1), DrivetrainXHat()(3))
         .finished();
   }
 
diff --git a/frc971/control_loops/drivetrain/drivetrain_config.h b/frc971/control_loops/drivetrain/drivetrain_config.h
index 51f92b1..9c17fc8 100644
--- a/frc971/control_loops/drivetrain/drivetrain_config.h
+++ b/frc971/control_loops/drivetrain/drivetrain_config.h
@@ -13,6 +13,28 @@
 namespace control_loops {
 namespace drivetrain {
 
+// What to use the top two buttons for on the pistol grip.
+enum class PistolTopButtonUse {
+  // Normal shifting.
+  kShift,
+  // Line following (currently just uses top button).
+  kLineFollow,
+  // Don't use the top button
+  kNone,
+};
+
+enum class PistolSecondButtonUse {
+  kTurn1,
+  kShiftLow,
+  kNone,
+};
+
+enum class PistolBottomButtonUse {
+  kControlLoopDriving,
+  kSlowDown,
+  kNone,
+};
+
 enum class ShifterType : int32_t {
   HALL_EFFECT_SHIFTER = 0,  // Detect when inbetween gears.
   SIMPLE_SHIFTER = 1,       // Switch gears without speedmatch logic.
@@ -145,6 +167,10 @@
 
   LineFollowConfig line_follow_config{};
 
+  PistolTopButtonUse top_button_use = PistolTopButtonUse::kShift;
+  PistolSecondButtonUse second_button_use = PistolSecondButtonUse::kShiftLow;
+  PistolBottomButtonUse bottom_button_use = PistolBottomButtonUse::kSlowDown;
+
   // Converts the robot state to a linear distance position, velocity.
   static Eigen::Matrix<Scalar, 2, 1> LeftRightToLinear(
       const Eigen::Matrix<Scalar, 7, 1> &left_right) {
diff --git a/frc971/control_loops/drivetrain/drivetrain_lib_test.cc b/frc971/control_loops/drivetrain/drivetrain_lib_test.cc
index 824f0a8..edda034 100644
--- a/frc971/control_loops/drivetrain/drivetrain_lib_test.cc
+++ b/frc971/control_loops/drivetrain/drivetrain_lib_test.cc
@@ -657,8 +657,8 @@
       CHECK_NOTNULL(drivetrain_status_fetcher_->trajectory_logging())->theta();
   // As a sanity check, compare both against absolute angle and the spline's
   // goal angle.
-  EXPECT_NEAR(0.0, ::aos::math::DiffAngle(actual(2), 0.0), 2e-2);
-  EXPECT_NEAR(0.0, ::aos::math::DiffAngle(actual(2), expected_theta), 2e-2);
+  EXPECT_NEAR(0.0, ::aos::math::DiffAngle(actual(2), 0.0), 5e-2);
+  EXPECT_NEAR(0.0, ::aos::math::DiffAngle(actual(2), expected_theta), 5e-2);
 }
 
 // Tests that simple spline with a single goal message.
@@ -1005,7 +1005,7 @@
   const ::Eigen::Vector2d actual = drivetrain_plant_.GetPosition();
   // Expect the x position comparison to fail; everything else to succeed.
   spline_estimate_tolerance_ = 0.11;
-  spline_control_tolerance_ = 0.11;
+  spline_control_tolerance_ = 0.09;
   EXPECT_GT(std::abs(estimated_x - expected_x), spline_control_tolerance_);
   EXPECT_NEAR(estimated_y, expected_y, spline_control_tolerance_);
   EXPECT_NEAR(actual(0), estimated_x, spline_estimate_tolerance_);
diff --git a/frc971/control_loops/drivetrain/trajectory.cc b/frc971/control_loops/drivetrain/trajectory.cc
index 27b31a0..33a8577 100644
--- a/frc971/control_loops/drivetrain/trajectory.cc
+++ b/frc971/control_loops/drivetrain/trajectory.cc
@@ -765,7 +765,7 @@
   // TODO(james): Pull these out into a config.
   Eigen::Matrix<double, 5, 5> Q;
   Q.setIdentity();
-  Q.diagonal() << 20.0, 20.0, 20.0, 10.0, 10.0;
+  Q.diagonal() << 30.0, 30.0, 20.0, 15.0, 15.0;
   Q *= 2.0;
   Q = (Q * Q).eval();
 
diff --git a/frc971/imu_reader/imu_watcher.cc b/frc971/imu_reader/imu_watcher.cc
index ed9c65f..e4e100a 100644
--- a/frc971/imu_reader/imu_watcher.cc
+++ b/frc971/imu_reader/imu_watcher.cc
@@ -18,7 +18,8 @@
     std::function<
         void(aos::monotonic_clock::time_point, aos::monotonic_clock::time_point,
              std::optional<Eigen::Vector2d>, Eigen::Vector3d, Eigen::Vector3d)>
-        callback)
+        callback,
+    TimestampSource timestamp_source)
     : dt_config_(dt_config),
       callback_(std::move(callback)),
       zeroer_(zeroing::ImuZeroer::FaultBehavior::kTemporary),
@@ -28,7 +29,8 @@
       right_encoder_(
           -EncoderWrapDistance(drivetrain_distance_per_encoder_tick) / 2.0,
           EncoderWrapDistance(drivetrain_distance_per_encoder_tick)) {
-  event_loop->MakeWatcher("/localizer", [this](const IMUValuesBatch &values) {
+  event_loop->MakeWatcher("/localizer", [this, timestamp_source](
+                                            const IMUValuesBatch &values) {
     CHECK(values.has_readings());
     for (const IMUValues *value : *values.readings()) {
       zeroer_.InsertAndProcessMeasurement(*value);
@@ -69,18 +71,21 @@
                     left_encoder_.Unwrap(value->left_encoder()),
                     right_encoder_.Unwrap(value->right_encoder())});
       {
-        // If we can't trust the imu reading, just naively increment the
-        // pico timestamp.
-        const aos::monotonic_clock::time_point pico_timestamp =
-            zeroer_.Faulted()
-                ? (last_pico_timestamp_.has_value()
-                       ? last_pico_timestamp_.value() + kNominalDt
-                       : aos::monotonic_clock::epoch())
-                : aos::monotonic_clock::time_point(
-                      std::chrono::microseconds(value->pico_timestamp_us()));
         const aos::monotonic_clock::time_point pi_read_timestamp =
             aos::monotonic_clock::time_point(
                 std::chrono::nanoseconds(value->monotonic_timestamp_ns()));
+        // If we can't trust the imu reading, just naively increment the
+        // pico timestamp.
+        const aos::monotonic_clock::time_point pico_timestamp =
+            timestamp_source == TimestampSource::kPi
+                ? pi_read_timestamp
+                : (zeroer_.Faulted()
+                       ? (last_pico_timestamp_.has_value()
+                              ? last_pico_timestamp_.value() + kNominalDt
+                              : aos::monotonic_clock::epoch())
+                       : aos::monotonic_clock::time_point(
+                             std::chrono::microseconds(
+                                 value->pico_timestamp_us())));
         // TODO(james): If we get large enough drift off of the pico,
         // actually do something about it.
         if (!pico_offset_.has_value()) {
diff --git a/frc971/imu_reader/imu_watcher.h b/frc971/imu_reader/imu_watcher.h
index 8867266..fe1f77d 100644
--- a/frc971/imu_reader/imu_watcher.h
+++ b/frc971/imu_reader/imu_watcher.h
@@ -20,6 +20,16 @@
   // Expected frequency of messages from the pico-based IMU.
   static constexpr std::chrono::microseconds kNominalDt{500};
 
+  enum class TimestampSource {
+    // Use the pico's timestamp to provide timestamps to the callbacks.
+    kPico,
+    // Use pi-based timestamps--this can result in a clock that has marginally
+    // more jitter relative to the sample times than the pico's clock, but
+    // is less likely to encounter major issues when there is some sort of issue
+    // with the pico <-> pi interface.
+    kPi,
+  };
+
   // The callback specified by the user will take:
   // sample_time_pico: The pico-based timestamp corresponding to the measurement
   //   time. This will be offset by roughly pico_offset_error from the pi's
@@ -40,7 +50,8 @@
       std::function<void(
           aos::monotonic_clock::time_point, aos::monotonic_clock::time_point,
           std::optional<Eigen::Vector2d>, Eigen::Vector3d, Eigen::Vector3d)>
-          callback);
+          callback,
+      TimestampSource timestamp_source = TimestampSource::kPico);
 
   const zeroing::ImuZeroer &zeroer() const { return zeroer_; }
 
diff --git a/frc971/input/drivetrain_input.cc b/frc971/input/drivetrain_input.cc
index 69795dd..239eddf 100644
--- a/frc971/input/drivetrain_input.cc
+++ b/frc971/input/drivetrain_input.cc
@@ -262,7 +262,8 @@
 
 std::unique_ptr<PistolDrivetrainInputReader> PistolDrivetrainInputReader::Make(
     ::aos::EventLoop *event_loop, bool default_high_gear,
-    TopButtonUse top_button_use) {
+    PistolTopButtonUse top_button_use, PistolSecondButtonUse second_button_use,
+    PistolBottomButtonUse bottom_button_use) {
   // Pistol Grip controller
   const JoystickAxis kTriggerHigh(1, 1), kTriggerLow(1, 4),
       kTriggerVelocityHigh(1, 2), kTriggerVelocityLow(1, 5),
@@ -275,6 +276,7 @@
   const ButtonLocation kQuickTurn(1, 3);
 
   const ButtonLocation kTopButton(1, 1);
+
   const ButtonLocation kSecondButton(1, 2);
   const ButtonLocation kBottomButton(1, 4);
   // Non-existant button for nops.
@@ -282,17 +284,20 @@
 
   // TODO(james): Make a copy assignment operator for ButtonLocation so we don't
   // have to shoehorn in these ternary operators.
-  const ButtonLocation kTurn1 = (top_button_use == TopButtonUse::kLineFollow)
-                                    ? kSecondButton
-                                    : kDummyButton;
+  const ButtonLocation kTurn1 =
+      (second_button_use == PistolSecondButtonUse::kTurn1) ? kSecondButton
+                                                           : kDummyButton;
   // Turn2 does closed loop driving.
   const ButtonLocation kTurn2 =
-      (top_button_use == TopButtonUse::kLineFollow) ? kTopButton : kDummyButton;
+      (top_button_use == PistolTopButtonUse::kLineFollow) ? kTopButton
+                                                          : kDummyButton;
 
   const ButtonLocation kShiftHigh =
-      (top_button_use == TopButtonUse::kShift) ? kTopButton : kDummyButton;
+      (top_button_use == PistolTopButtonUse::kShift) ? kTopButton
+                                                     : kDummyButton;
   const ButtonLocation kShiftLow =
-      (top_button_use == TopButtonUse::kShift) ? kSecondButton : kDummyButton;
+      (second_button_use == PistolSecondButtonUse::kShiftLow) ? kSecondButton
+                                                              : kDummyButton;
 
   std::unique_ptr<PistolDrivetrainInputReader> result(
       new PistolDrivetrainInputReader(
@@ -300,7 +305,12 @@
           kTriggerVelocityLow, kTriggerTorqueHigh, kTriggerTorqueLow,
           kTriggerHigh, kTriggerLow, kWheelVelocityHigh, kWheelVelocityLow,
           kWheelTorqueHigh, kWheelTorqueLow, kQuickTurn, kShiftHigh, kShiftLow,
-          kTurn1, kTurn2, kBottomButton));
+          kTurn1,
+          (bottom_button_use == PistolBottomButtonUse::kControlLoopDriving)
+              ? kBottomButton
+              : kTurn2,
+          (top_button_use == PistolTopButtonUse::kNone) ? kTopButton
+                                                        : kBottomButton));
 
   result->set_default_high_gear(default_high_gear);
   return result;
@@ -335,13 +345,25 @@
       drivetrain_input_reader = SteeringWheelDrivetrainInputReader::Make(
           event_loop, dt_config.default_high_gear);
       break;
-    case InputType::kPistol:
-      drivetrain_input_reader = PistolDrivetrainInputReader::Make(
-          event_loop, dt_config.default_high_gear,
+    case InputType::kPistol: {
+      // For backwards compatibility
+      PistolTopButtonUse top_button_use =
           dt_config.pistol_grip_shift_enables_line_follow
-              ? PistolDrivetrainInputReader::TopButtonUse::kLineFollow
-              : PistolDrivetrainInputReader::TopButtonUse::kShift);
+              ? PistolTopButtonUse::kLineFollow
+              : dt_config.top_button_use;
+
+      PistolSecondButtonUse second_button_use = dt_config.second_button_use;
+      PistolBottomButtonUse bottom_button_use = dt_config.bottom_button_use;
+
+      if (top_button_use == PistolTopButtonUse::kLineFollow) {
+        second_button_use = PistolSecondButtonUse::kTurn1;
+      }
+
+      drivetrain_input_reader = PistolDrivetrainInputReader::Make(
+          event_loop, dt_config.default_high_gear, top_button_use,
+          second_button_use, bottom_button_use);
       break;
+    }
     case InputType::kXbox:
       drivetrain_input_reader = XboxDrivetrainInputReader::Make(event_loop);
       break;
diff --git a/frc971/input/drivetrain_input.h b/frc971/input/drivetrain_input.h
index a33d449..0cb724d 100644
--- a/frc971/input/drivetrain_input.h
+++ b/frc971/input/drivetrain_input.h
@@ -16,6 +16,10 @@
 namespace frc971 {
 namespace input {
 
+using control_loops::drivetrain::PistolBottomButtonUse;
+using control_loops::drivetrain::PistolSecondButtonUse;
+using control_loops::drivetrain::PistolTopButtonUse;
+
 // We have a couple different joystick configurations used to drive our skid
 // steer robots.  These configurations don't change very often, and there are a
 // small, discrete, set of them.  The interface to the drivetrain is the same
@@ -179,19 +183,13 @@
  public:
   using DrivetrainInputReader::DrivetrainInputReader;
 
-  // What to use the top two buttons for on the pistol grip.
-  enum class TopButtonUse {
-    // Normal shifting.
-    kShift,
-    // Line following (currently just uses top button).
-    kLineFollow,
-  };
-
   // Creates a DrivetrainInputReader with the corresponding joystick ports and
   // axis for the (cheap) pistol grip controller.
   static std::unique_ptr<PistolDrivetrainInputReader> Make(
       ::aos::EventLoop *event_loop, bool default_high_gear,
-      TopButtonUse top_button_use);
+      PistolTopButtonUse top_button_use,
+      PistolSecondButtonUse second_button_use,
+      PistolBottomButtonUse bottom_button_use);
 
  private:
   PistolDrivetrainInputReader(
diff --git a/frc971/vision/BUILD b/frc971/vision/BUILD
index f3ebecb..133281b 100644
--- a/frc971/vision/BUILD
+++ b/frc971/vision/BUILD
@@ -223,6 +223,7 @@
     hdrs = [
         "visualize_robot.h",
     ],
+    visibility = ["//visibility:public"],
     deps = [
         "//aos:init",
         "//third_party:opencv",
diff --git a/frc971/vision/target_map.fbs b/frc971/vision/target_map.fbs
index de79744..10f48cd 100644
--- a/frc971/vision/target_map.fbs
+++ b/frc971/vision/target_map.fbs
@@ -48,6 +48,15 @@
   // NOTE: not filtered by aprilrobotics.cc so that we can log
   // more detections.
   distortion_factor:double (id: 5);
+
+  // Ratio of pose_error from the best estimation to
+  // pose error of the second best estimation.
+  // Only filled out if this pose represents a live detection.
+  // This should be significantly less than 1,
+  // otherwise this pose may be a wrong solution.
+  // NOTE: not filtered by aprilrobotics.cc so that we can log
+  // more detections.
+  pose_error_ratio:double (id: 6);
 }
 
 // Map of all target poses on a field.
diff --git a/frc971/vision/target_mapper.cc b/frc971/vision/target_mapper.cc
index dd332fc..dea883b 100644
--- a/frc971/vision/target_mapper.cc
+++ b/frc971/vision/target_mapper.cc
@@ -10,9 +10,11 @@
 DEFINE_double(distortion_noise_scalar, 1.0,
               "Scale the target pose distortion factor by this when computing "
               "the noise.");
+DEFINE_uint64(
+    frozen_target_id, 1,
+    "Freeze the pose of this target so the map can have one fixed point.");
 
 namespace frc971::vision {
-
 Eigen::Affine3d PoseUtils::Pose3dToAffine3d(
     const ceres::examples::Pose3d &pose3d) {
   Eigen::Affine3d H_world_pose =
@@ -160,8 +162,8 @@
 
   {
     // Noise for vision-based target localizations. Multiplying this matrix by
-    // the distance from camera to target squared results in the uncertainty in
-    // that measurement
+    // the distance from camera to target squared results in the uncertainty
+    // in that measurement
     TargetMapper::ConfidenceMatrix Q_vision =
         TargetMapper::ConfidenceMatrix::Zero();
     Q_vision(kX, kX) = std::pow(0.045, 2);
@@ -232,6 +234,15 @@
   return std::nullopt;
 }
 
+std::optional<TargetMapper::TargetPose> TargetMapper::GetTargetPoseById(
+    TargetId target_id) {
+  if (target_poses_.count(target_id) > 0) {
+    return TargetMapper::TargetPose{target_id, target_poses_[target_id]};
+  }
+
+  return std::nullopt;
+}
+
 // Taken from ceres/examples/slam/pose_graph_3d/pose_graph_3d.cc
 // Constructs the nonlinear least squares optimization problem from the pose
 // graph constraints.
@@ -289,7 +300,10 @@
   // algorithm has internal damping which mitigates this issue, but it is
   // better to properly constrain the gauge freedom. This can be done by
   // setting one of the poses as constant so the optimizer cannot change it.
-  ceres::examples::MapOfPoses::iterator pose_start_iter = poses->begin();
+  CHECK_NE(poses->count(FLAGS_frozen_target_id), 0ul)
+      << "Got no poses for frozen target id " << FLAGS_frozen_target_id;
+  ceres::examples::MapOfPoses::iterator pose_start_iter =
+      poses->find(FLAGS_frozen_target_id);
   CHECK(pose_start_iter != poses->end()) << "There are no poses.";
   problem->SetParameterBlockConstant(pose_start_iter->second.p.data());
   problem->SetParameterBlockConstant(pose_start_iter->second.q.coeffs().data());
@@ -302,6 +316,7 @@
   ceres::Solver::Options options;
   options.max_num_iterations = FLAGS_max_num_iterations;
   options.linear_solver_type = ceres::SPARSE_NORMAL_CHOLESKY;
+  options.minimizer_progress_to_stdout = true;
 
   ceres::Solver::Summary summary;
   ceres::Solve(options, problem, &summary);
diff --git a/frc971/vision/target_mapper.h b/frc971/vision/target_mapper.h
index 3724832..35c0977 100644
--- a/frc971/vision/target_mapper.h
+++ b/frc971/vision/target_mapper.h
@@ -46,6 +46,9 @@
   static std::optional<TargetPose> GetTargetPoseById(
       std::vector<TargetPose> target_poses, TargetId target_id);
 
+  // Version that gets based on internal target_poses
+  std::optional<TargetPose> GetTargetPoseById(TargetId target_id);
+
   ceres::examples::MapOfPoses target_poses() { return target_poses_; }
 
  private:
diff --git a/frc971/vision/visualize_robot.cc b/frc971/vision/visualize_robot.cc
index 0bbe507..55932ef 100644
--- a/frc971/vision/visualize_robot.cc
+++ b/frc971/vision/visualize_robot.cc
@@ -1,14 +1,33 @@
 #include "frc971/vision/visualize_robot.h"
-#include "glog/logging.h"
 
 #include <opencv2/calib3d.hpp>
 #include <opencv2/core/eigen.hpp>
 #include <opencv2/highgui/highgui.hpp>
 #include <opencv2/imgproc.hpp>
 
+#include "glog/logging.h"
+
 namespace frc971 {
 namespace vision {
 
+void VisualizeRobot::SetDefaultViewpoint(int image_width, double focal_length) {
+  // 10 meters above the origin, rotated so the camera faces straight down
+  Eigen::Translation3d camera_trans(0, 0, 10.0);
+  Eigen::AngleAxisd camera_rot(M_PI, Eigen::Vector3d::UnitX());
+  Eigen::Affine3d camera_viewpoint = camera_trans * camera_rot;
+  SetViewpoint(camera_viewpoint);
+
+  cv::Mat camera_mat;
+  double half_width = static_cast<double>(image_width) / 2.0;
+  double intr[] = {focal_length, 0.0, half_width, 0.0, focal_length,
+                   half_width,   0.0, 0.0,        1.0};
+  camera_mat = cv::Mat(3, 3, CV_64FC1, intr);
+  SetCameraParameters(camera_mat);
+
+  cv::Mat dist_coeffs = cv::Mat(1, 5, CV_64F, 0.0);
+  SetDistortionCoefficients(dist_coeffs);
+}
+
 cv::Point VisualizeRobot::ProjectPoint(Eigen::Vector3d T_world_point) {
   // Map 3D point in world coordinates to camera frame
   Eigen::Vector3d T_camera_point = H_world_viewpoint_.inverse() * T_world_point;
@@ -24,15 +43,17 @@
   return projected_point;
 }
 
-void VisualizeRobot::DrawLine(Eigen::Vector3d start3d, Eigen::Vector3d end3d) {
+void VisualizeRobot::DrawLine(Eigen::Vector3d start3d, Eigen::Vector3d end3d,
+                              cv::Scalar color) {
   cv::Point start2d = ProjectPoint(start3d);
   cv::Point end2d = ProjectPoint(end3d);
 
-  cv::line(image_, start2d, end2d, cv::Scalar(0, 0, 255));
+  cv::line(image_, start2d, end2d, color);
 }
 
 void VisualizeRobot::DrawFrameAxes(Eigen::Affine3d H_world_target,
-                                   std::string label, double axis_scale) {
+                                   std::string label, cv::Scalar label_color,
+                                   double axis_scale) {
   // Map origin to display from global (world) frame to camera frame
   Eigen::Affine3d H_viewpoint_target =
       H_world_viewpoint_.inverse() * H_world_target;
@@ -53,21 +74,37 @@
     // Grab x axis direction
     cv::Vec3d label_offset = r_mat.col(0);
 
-    // Find 3D coordinate of point at the end of the x-axis, and project it
+    // Find 3D coordinate of point at the end of the x-axis, and put label there
+    // Bump it just a few (5) pixels to the right, to make it read easier
     cv::Mat label_coord_res =
         camera_mat_ * cv::Mat(tvec + label_offset * axis_scale);
     cv::Vec3d label_coord = label_coord_res.col(0);
-    label_coord[0] = label_coord[0] / label_coord[2];
+    label_coord[0] = label_coord[0] / label_coord[2] + 5;
     label_coord[1] = label_coord[1] / label_coord[2];
     cv::putText(image_, label, cv::Point(label_coord[0], label_coord[1]),
-                cv::FONT_HERSHEY_PLAIN, 1.0, cv::Scalar(0, 0, 255));
+                cv::FONT_HERSHEY_PLAIN, 1.0, label_color);
   }
 }
 
-void VisualizeRobot::DrawBoardOutline(Eigen::Affine3d H_world_board,
-                                      std::string label) {
-  LOG(INFO) << "Not yet implemented; drawing axes only";
-  DrawFrameAxes(H_world_board, label);
+void VisualizeRobot::DrawRobotOutline(Eigen::Affine3d H_world_robot,
+                                      std::string label, cv::Scalar color) {
+  DrawFrameAxes(H_world_robot, label, color);
+  const double kBotHalfWidth = 0.75 / 2.0;
+  const double kBotHalfLength = 1.0 / 2.0;
+  // Compute coordinates for front/rear and right/left corners
+  Eigen::Vector3d fr_corner =
+      H_world_robot * Eigen::Vector3d(kBotHalfLength, kBotHalfWidth, 0);
+  Eigen::Vector3d fl_corner =
+      H_world_robot * Eigen::Vector3d(kBotHalfLength, -kBotHalfWidth, 0);
+  Eigen::Vector3d rl_corner =
+      H_world_robot * Eigen::Vector3d(-kBotHalfLength, -kBotHalfWidth, 0);
+  Eigen::Vector3d rr_corner =
+      H_world_robot * Eigen::Vector3d(-kBotHalfLength, kBotHalfWidth, 0);
+
+  DrawLine(fr_corner, fl_corner, color);
+  DrawLine(fl_corner, rl_corner, color);
+  DrawLine(rl_corner, rr_corner, color);
+  DrawLine(rr_corner, fr_corner, color);
 }
 
 }  // namespace vision
diff --git a/frc971/vision/visualize_robot.h b/frc971/vision/visualize_robot.h
index 391a030..cd8b4d0 100644
--- a/frc971/vision/visualize_robot.h
+++ b/frc971/vision/visualize_robot.h
@@ -24,6 +24,14 @@
   // Set image on which to draw
   void SetImage(cv::Mat image) { image_ = image; }
 
+  // Sets image to all black.
+  // Uses default_size if no image has been set yet, else image_.size()
+  void ClearImage(cv::Size default_size = cv::Size(1280, 720)) {
+    auto image_size = (image_.data == nullptr ? default_size : image_.size());
+    cv::Mat black_image_mat = cv::Mat::zeros(image_size, CV_8UC3);
+    SetImage(black_image_mat);
+  }
+
   // Set the viewpoint of the camera relative to a global origin
   void SetViewpoint(Eigen::Affine3d view_origin) {
     H_world_viewpoint_ = view_origin;
@@ -31,27 +39,37 @@
 
   // Set camera parameters (for projection into a virtual view)
   void SetCameraParameters(cv::Mat camera_intrinsics) {
-    camera_mat_ = camera_intrinsics;
+    camera_mat_ = camera_intrinsics.clone();
   }
 
   // Set distortion coefficients (defaults to 0)
   void SetDistortionCoefficients(cv::Mat dist_coeffs) {
-    dist_coeffs_ = dist_coeffs;
+    dist_coeffs_ = dist_coeffs.clone();
   }
 
+  // Sets up a default camera view 10 m above the origin, pointing down
+  // Uses image_width and focal_length to define a default camera projection
+  // matrix
+  void SetDefaultViewpoint(int image_width = 1000,
+                           double focal_length = 1000.0);
+
   // Helper function to project a point in 3D to the virtual image coordinates
   cv::Point ProjectPoint(Eigen::Vector3d point3d);
 
   // Draw a line connecting two 3D points
-  void DrawLine(Eigen::Vector3d start, Eigen::Vector3d end);
+  void DrawLine(Eigen::Vector3d start, Eigen::Vector3d end,
+                cv::Scalar color = cv::Scalar(0, 200, 0));
 
   // Draw coordinate frame for a target frame relative to the world frame
   // Axes are drawn (x,y,z) -> (red, green, blue)
   void DrawFrameAxes(Eigen::Affine3d H_world_target, std::string label = "",
+                     cv::Scalar label_color = cv::Scalar(0, 0, 255),
                      double axis_scale = 0.25);
 
-  // TODO<Jim>: Need to implement this, and maybe DrawRobotOutline
-  void DrawBoardOutline(Eigen::Affine3d H_world_board, std::string label = "");
+  // TODO<Jim>: Also implement DrawBoardOutline?  Maybe one function w/
+  // parameters?
+  void DrawRobotOutline(Eigen::Affine3d H_world_robot, std::string label = "",
+                        cv::Scalar color = cv::Scalar(0, 200, 0));
 
   Eigen::Affine3d H_world_viewpoint_;  // Where to view the world from
   cv::Mat image_;                      // Image to draw on
diff --git a/frc971/vision/visualize_robot_sample.cc b/frc971/vision/visualize_robot_sample.cc
index dc38352..59acba0 100644
--- a/frc971/vision/visualize_robot_sample.cc
+++ b/frc971/vision/visualize_robot_sample.cc
@@ -26,25 +26,8 @@
   cv::Mat image_mat =
       cv::Mat::zeros(cv::Size(image_width, image_width), CV_8UC3);
   vis_robot.SetImage(image_mat);
-
-  // 10 meters above the origin, rotated so the camera faces straight down
-  Eigen::Translation3d camera_trans(0, 0, 10.0);
-  Eigen::AngleAxisd camera_rot(M_PI, Eigen::Vector3d::UnitX());
-  Eigen::Affine3d camera_viewpoint = camera_trans * camera_rot;
-  vis_robot.SetViewpoint(camera_viewpoint);
-
-  cv::Mat camera_mat;
   double focal_length = 1000.0;
-  double intr[] = {focal_length, 0.0,          image_width / 2.0,
-                   0.0,          focal_length, image_width / 2.0,
-                   0.0,          0.0,          1.0};
-  camera_mat = cv::Mat(3, 3, CV_64FC1, intr);
-  vis_robot.SetCameraParameters(camera_mat);
-
-  Eigen::Affine3d offset_rotate_origin(Eigen::Affine3d::Identity());
-
-  cv::Mat dist_coeffs = cv::Mat(1, 5, CV_64F, 0.0);
-  vis_robot.SetDistortionCoefficients(dist_coeffs);
+  vis_robot.SetDefaultViewpoint(image_width, focal_length);
 
   // Go around the clock and plot the coordinate frame at different rotations
   for (int i = 0; i < 12; i++) {
@@ -52,8 +35,9 @@
     Eigen::Vector3d trans;
     trans << 1.0 * cos(angle), 1.0 * sin(angle), 0.0;
 
-    offset_rotate_origin = Eigen::Translation3d(trans) *
-                           Eigen::AngleAxisd(angle, Eigen::Vector3d::UnitX());
+    Eigen::Affine3d offset_rotate_origin =
+        Eigen::Translation3d(trans) *
+        Eigen::AngleAxisd(angle, Eigen::Vector3d::UnitX());
 
     vis_robot.DrawFrameAxes(offset_rotate_origin, std::to_string(i));
   }
diff --git a/y2023/BUILD b/y2023/BUILD
index 095b856..08713b7 100644
--- a/y2023/BUILD
+++ b/y2023/BUILD
@@ -37,6 +37,7 @@
         "//y2023/autonomous:binaries",
         ":joystick_reader",
         ":wpilib_interface",
+        "//frc971/can_logger",
         "//aos/network:message_bridge_client",
         "//aos/network:message_bridge_server",
         "//y2023/control_loops/drivetrain:drivetrain",
@@ -211,6 +212,7 @@
         "//y2023/control_loops/superstructure:superstructure_output_fbs",
         "//y2023/control_loops/superstructure:superstructure_position_fbs",
         "//y2023/control_loops/superstructure:superstructure_status_fbs",
+        "//frc971/can_logger:can_logging_fbs",
     ],
     target_compatible_with = ["@platforms//os:linux"],
     deps = [
diff --git a/y2023/autonomous/auto_splines.cc b/y2023/autonomous/auto_splines.cc
index 09cf4ea..c99e417 100644
--- a/y2023/autonomous/auto_splines.cc
+++ b/y2023/autonomous/auto_splines.cc
@@ -142,16 +142,6 @@
       alliance);
 }
 
-flatbuffers::Offset<frc971::MultiSpline> AutonomousSplines::Spline5(
-    aos::Sender<frc971::control_loops::drivetrain::SplineGoal>::Builder
-        *builder,
-    aos::Alliance alliance) {
-  return FixSpline(
-      builder,
-      aos::CopyFlatBuffer<frc971::MultiSpline>(spline_5_, builder->fbb()),
-      alliance);
-}
-
 flatbuffers::Offset<frc971::MultiSpline> AutonomousSplines::StraightLine(
     aos::Sender<frc971::control_loops::drivetrain::SplineGoal>::Builder
         *builder,
diff --git a/y2023/autonomous/auto_splines.h b/y2023/autonomous/auto_splines.h
index 2847957..27442f3 100644
--- a/y2023/autonomous/auto_splines.h
+++ b/y2023/autonomous/auto_splines.h
@@ -29,9 +29,7 @@
         spline_3_(aos::JsonFileToFlatbuffer<frc971::MultiSpline>(
             "splines/spline.2.json")),
         spline_4_(aos::JsonFileToFlatbuffer<frc971::MultiSpline>(
-            "splines/spline.3.json")),
-        spline_5_(aos::JsonFileToFlatbuffer<frc971::MultiSpline>(
-            "splines/spline.4.json")) {}
+            "splines/spline.3.json")) {}
   static flatbuffers::Offset<frc971::MultiSpline> BasicSSpline(
       aos::Sender<frc971::control_loops::drivetrain::SplineGoal>::Builder
           *builder,
@@ -61,10 +59,6 @@
       aos::Sender<frc971::control_loops::drivetrain::SplineGoal>::Builder
           *builder,
       aos::Alliance alliance);
-  flatbuffers::Offset<frc971::MultiSpline> Spline5(
-      aos::Sender<frc971::control_loops::drivetrain::SplineGoal>::Builder
-          *builder,
-      aos::Alliance alliance);
 
  private:
   aos::FlatbufferDetachedBuffer<frc971::MultiSpline> test_spline_;
@@ -72,7 +66,6 @@
   aos::FlatbufferDetachedBuffer<frc971::MultiSpline> spline_2_;
   aos::FlatbufferDetachedBuffer<frc971::MultiSpline> spline_3_;
   aos::FlatbufferDetachedBuffer<frc971::MultiSpline> spline_4_;
-  aos::FlatbufferDetachedBuffer<frc971::MultiSpline> spline_5_;
 };
 
 }  // namespace autonomous
diff --git a/y2023/autonomous/autonomous_actor.cc b/y2023/autonomous/autonomous_actor.cc
index 77965e4..c3ba65b 100644
--- a/y2023/autonomous/autonomous_actor.cc
+++ b/y2023/autonomous/autonomous_actor.cc
@@ -12,14 +12,27 @@
 #include "y2023/control_loops/drivetrain/drivetrain_base.h"
 #include "y2023/control_loops/superstructure/arm/generated_graph.h"
 
-DEFINE_bool(spline_auto, true, "Run simple test S-spline auto mode.");
+DEFINE_bool(spline_auto, false, "Run simple test S-spline auto mode.");
 DEFINE_bool(charged_up, true, "If true run charged up autonomous mode");
 
 namespace y2023 {
 namespace autonomous {
 
-using ::aos::monotonic_clock;
 using ::frc971::ProfileParametersT;
+
+ProfileParametersT MakeProfileParameters(float max_velocity,
+                                         float max_acceleration) {
+  ProfileParametersT result;
+  result.max_velocity = max_velocity;
+  result.max_acceleration = max_acceleration;
+  return result;
+}
+
+using ::aos::monotonic_clock;
+using frc971::CreateProfileParameters;
+using ::frc971::ProfileParametersT;
+using frc971::control_loops::CreateStaticZeroingSingleDOFProfiledSubsystemGoal;
+using frc971::control_loops::StaticZeroingSingleDOFProfiledSubsystemGoal;
 using frc971::control_loops::drivetrain::LocalizerControl;
 namespace chrono = ::std::chrono;
 
@@ -34,8 +47,16 @@
           event_loop->MakeFetcher<aos::JoystickState>("/aos")),
       robot_state_fetcher_(event_loop->MakeFetcher<aos::RobotState>("/aos")),
       auto_splines_(),
-      arm_goal_position_(control_loops::superstructure::arm::NeutralIndex()),
+      arm_goal_position_(control_loops::superstructure::arm::StartingIndex()),
+      superstructure_goal_sender_(
+          event_loop->MakeSender<::y2023::control_loops::superstructure::Goal>(
+              "/superstructure")),
+      superstructure_status_fetcher_(
+          event_loop
+              ->MakeFetcher<::y2023::control_loops::superstructure::Status>(
+                  "/superstructure")),
       points_(control_loops::superstructure::arm::PointList()) {
+  drivetrain_status_fetcher_.Fetch();
   replan_timer_ = event_loop->AddTimer([this]() { Replan(); });
 
   event_loop->OnRun([this, event_loop]() {
@@ -74,6 +95,12 @@
 }
 
 void AutonomousActor::Replan() {
+  if (!drivetrain_status_fetcher_.Fetch()) {
+    replan_timer_->Setup(event_loop()->monotonic_now() + chrono::seconds(1));
+    AOS_LOG(INFO, "Drivetrain not up, replanning in 1 second");
+    return;
+  }
+
   if (alliance_ == aos::Alliance::kInvalid) {
     return;
   }
@@ -86,6 +113,7 @@
 
     starting_position_ = test_spline_->starting_position();
   } else if (FLAGS_charged_up) {
+    AOS_LOG(INFO, "Charged up replanning!");
     charged_up_splines_ = {
         PlanSpline(std::bind(&AutonomousSplines::Spline1, &auto_splines_,
                              std::placeholders::_1, alliance_),
@@ -99,9 +127,6 @@
         PlanSpline(std::bind(&AutonomousSplines::Spline4, &auto_splines_,
                              std::placeholders::_1, alliance_),
                    SplineDirection::kForward),
-        PlanSpline(std::bind(&AutonomousSplines::Spline5, &auto_splines_,
-                             std::placeholders::_1, alliance_),
-                   SplineDirection::kBackward),
     };
 
     starting_position_ = charged_up_splines_.value()[0].starting_position();
@@ -129,6 +154,12 @@
   CHECK(joystick_state_fetcher_.get() != nullptr)
       << "Expect at least one JoystickState message before running auto...";
   alliance_ = joystick_state_fetcher_->alliance();
+
+  wrist_goal_ = 0.0;
+  roller_goal_ = control_loops::superstructure::RollerGoal::IDLE;
+  arm_goal_position_ = control_loops::superstructure::arm::StartingIndex();
+  preloaded_ = false;
+  SendSuperstructureGoal();
 }
 
 bool AutonomousActor::RunAction(
@@ -155,6 +186,8 @@
   }
   if (FLAGS_spline_auto) {
     SplineAuto();
+  } else if (FLAGS_charged_up) {
+    ChargedUp();
   } else {
     AOS_LOG(WARNING, "No auto mode selected.");
   }
@@ -197,15 +230,21 @@
 
   auto &splines = *charged_up_splines_;
 
+  AOS_LOG(INFO, "Going to preload");
+
   // Tell the superstructure a cone was preloaded
   if (!WaitForPreloaded()) return;
+  AOS_LOG(INFO, "Moving arm");
 
   // Place first cone on mid level
   MidConeScore();
 
   // Wait until the arm is at the goal to spit
-  if (!WaitForArmGoal()) return;
+  if (!WaitForArmGoal(0.10)) return;
   Spit();
+  if (!WaitForArmGoal(0.01)) return;
+
+  std::this_thread::sleep_for(chrono::milliseconds(100));
 
   AOS_LOG(
       INFO, "Placed first cone %lf s\n",
@@ -217,21 +256,49 @@
 
   // Move arm into position to pickup a cube and start cube intake
   PickupCube();
+
+  std::this_thread::sleep_for(chrono::milliseconds(500));
+
   IntakeCube();
 
+  AOS_LOG(
+      INFO, "Turning on rollers %lf s\n",
+      aos::time::DurationInSeconds(aos::monotonic_clock::now() - start_time));
+
   if (!splines[0].WaitForSplineDistanceRemaining(0.02)) return;
 
+  AOS_LOG(
+      INFO, "Got there %lf s\n",
+      aos::time::DurationInSeconds(aos::monotonic_clock::now() - start_time));
+
   // Drive back to grid and place cube on high level
   if (!splines[1].WaitForPlan()) return;
   splines[1].Start();
 
+  std::this_thread::sleep_for(chrono::milliseconds(300));
   HighCubeScore();
 
-  if (!splines[1].WaitForSplineDistanceRemaining(0.02)) return;
+  if (!splines[1].WaitForSplineDistanceRemaining(0.08)) return;
+  AOS_LOG(
+      INFO, "Back for first cube %lf s\n",
+      aos::time::DurationInSeconds(aos::monotonic_clock::now() - start_time));
 
-  if (!WaitForArmGoal()) return;
+  if (!WaitForArmGoal(0.10)) return;
+
+  AOS_LOG(
+      INFO, "Arm in place for first cube %lf s\n",
+      aos::time::DurationInSeconds(aos::monotonic_clock::now() - start_time));
+
   Spit();
 
+  if (!splines[1].WaitForSplineDistanceRemaining(0.08)) return;
+
+  AOS_LOG(
+      INFO, "Finished spline back %lf s\n",
+      aos::time::DurationInSeconds(aos::monotonic_clock::now() - start_time));
+
+  if (!WaitForArmGoal(0.05)) return;
+
   AOS_LOG(
       INFO, "Placed first cube %lf s\n",
       aos::time::DurationInSeconds(aos::monotonic_clock::now() - start_time));
@@ -240,43 +307,110 @@
   if (!splines[2].WaitForPlan()) return;
   splines[2].Start();
 
+  std::this_thread::sleep_for(chrono::milliseconds(200));
   PickupCube();
+
+  std::this_thread::sleep_for(chrono::milliseconds(500));
   IntakeCube();
 
-  if (!splines[2].WaitForSplineDistanceRemaining(0.02)) return;
+  if (!splines[2].WaitForSplineDistanceRemaining(0.05)) return;
+  AOS_LOG(
+      INFO, "Picked up second cube %lf s\n",
+      aos::time::DurationInSeconds(aos::monotonic_clock::now() - start_time));
 
   // Drive back to grid and place object on mid level
   if (!splines[3].WaitForPlan()) return;
   splines[3].Start();
 
+  AOS_LOG(
+      INFO, "Driving back %lf s\n",
+      aos::time::DurationInSeconds(aos::monotonic_clock::now() - start_time));
+
   MidCubeScore();
 
-  if (!splines[3].WaitForSplineDistanceRemaining(0.02)) return;
+  if (!splines[3].WaitForSplineDistanceRemaining(0.05)) return;
+  AOS_LOG(
+      INFO, "Got back from second cube at %lf s\n",
+      aos::time::DurationInSeconds(aos::monotonic_clock::now() - start_time));
 
-  if (!WaitForArmGoal()) return;
+  if (!WaitForArmGoal(0.05)) return;
+
+  if (!splines[3].WaitForSplineDistanceRemaining(0.05)) return;
   Spit();
 
+  if (!splines[3].WaitForSplineDistanceRemaining(0.02)) return;
+
   AOS_LOG(
       INFO, "Placed second cube %lf s\n",
       aos::time::DurationInSeconds(aos::monotonic_clock::now() - start_time));
+  InitializeEncoders();
 
-  // Drive onto charging station
-  if (!splines[4].WaitForPlan()) return;
-  splines[4].Start();
+  const ProfileParametersT kDrive = MakeProfileParameters(2.0, 4.0);
+  const ProfileParametersT kTurn = MakeProfileParameters(3.0, 4.5);
+  StartDrive(0.0, 0.0, kDrive, kTurn);
 
-  if (!splines[4].WaitForSplineDistanceRemaining(0.02)) return;
+  {
+    double side_scalar = (alliance_ == aos::Alliance::kRed) ? 1.0 : -1.0;
+    StartDrive(6.33 - std::abs(X()), 0.0, kDrive, kTurn);
+    if (!WaitForDriveProfileNear(0.01)) return;
+
+    AOS_LOG(
+        INFO, "Done backing up %lf s\n",
+        aos::time::DurationInSeconds(aos::monotonic_clock::now() - start_time));
+
+    const ProfileParametersT kInPlaceTurn = MakeProfileParameters(2.0, 4.5);
+    StartDrive(0.0, aos::math::NormalizeAngle(M_PI / 2.0 - Theta()), kDrive,
+               kInPlaceTurn);
+
+    std::this_thread::sleep_for(chrono::milliseconds(400));
+    StopSpitting();
+
+    AOS_LOG(
+        INFO, "Roller off %lf s\n",
+        aos::time::DurationInSeconds(aos::monotonic_clock::now() - start_time));
+
+    Balance();
+    if (!WaitForTurnProfileNear(0.03)) return;
+
+    AOS_LOG(
+        INFO, "Done turning %lf s\n",
+        aos::time::DurationInSeconds(aos::monotonic_clock::now() - start_time));
+
+    const ProfileParametersT kDrive = MakeProfileParameters(1.4, 3.0);
+    const ProfileParametersT kFinalTurn = MakeProfileParameters(3.0, 4.5);
+    const double kDriveDistance = 3.12;
+    StartDrive(-kDriveDistance, 0.0, kDrive, kFinalTurn);
+
+    const ProfileParametersT kFastTurn = MakeProfileParameters(5.0, 8.0);
+    if (!WaitForDriveProfileNear(kDriveDistance - 0.4)) return;
+
+    AOS_LOG(
+        INFO, "Turning %lf s\n",
+        aos::time::DurationInSeconds(aos::monotonic_clock::now() - start_time));
+    StartDrive(0.0, -side_scalar * M_PI / 2.0, kDrive, kFastTurn);
+    if (!WaitForDriveProfileDone()) return;
+    if (!WaitForTurnProfileDone()) return;
+    AOS_LOG(
+        INFO, "Done %lf s\n",
+        aos::time::DurationInSeconds(aos::monotonic_clock::now() - start_time));
+  }
 }
 
 void AutonomousActor::SendSuperstructureGoal() {
   auto builder = superstructure_goal_sender_.MakeBuilder();
 
+  flatbuffers::Offset<StaticZeroingSingleDOFProfiledSubsystemGoal>
+      wrist_offset = CreateStaticZeroingSingleDOFProfiledSubsystemGoal(
+          *builder.fbb(), wrist_goal_,
+          CreateProfileParameters(*builder.fbb(), 12.0, 90.0));
+
   control_loops::superstructure::Goal::Builder superstructure_builder =
       builder.MakeBuilder<control_loops::superstructure::Goal>();
 
   superstructure_builder.add_arm_goal_position(arm_goal_position_);
   superstructure_builder.add_preloaded_with_cone(preloaded_);
   superstructure_builder.add_roller_goal(roller_goal_);
-  superstructure_builder.add_wrist(wrist_goal_);
+  superstructure_builder.add_wrist(wrist_offset);
 
   if (builder.Send(superstructure_builder.Finish()) !=
       aos::RawSender::Error::kOk) {
@@ -314,8 +448,21 @@
 
 void AutonomousActor::MidConeScore() {
   set_arm_goal_position(
-      control_loops::superstructure::arm::ScoreFrontMidConeUpIndex());
-  set_wrist_goal(0.05);
+      control_loops::superstructure::arm::ScoreFrontMidConeUpAutoIndex());
+  set_wrist_goal(0.0);
+  SendSuperstructureGoal();
+}
+
+void AutonomousActor::Neutral() {
+  set_arm_goal_position(control_loops::superstructure::arm::NeutralIndex());
+  set_wrist_goal(0.0);
+  SendSuperstructureGoal();
+}
+
+void AutonomousActor::Balance() {
+  set_arm_goal_position(
+      control_loops::superstructure::arm::GroundPickupFrontConeUpIndex());
+  set_wrist_goal(0.0);
   SendSuperstructureGoal();
 }
 
@@ -329,14 +476,14 @@
 void AutonomousActor::MidCubeScore() {
   set_arm_goal_position(
       control_loops::superstructure::arm::ScoreFrontMidCubeIndex());
-  set_wrist_goal(0.6);
+  set_wrist_goal(1.0);
   SendSuperstructureGoal();
 }
 
 void AutonomousActor::PickupCube() {
   set_arm_goal_position(
       control_loops::superstructure::arm::GroundPickupBackCubeIndex());
-  set_wrist_goal(0.6);
+  set_wrist_goal(1.0);
   SendSuperstructureGoal();
 }
 
@@ -345,13 +492,18 @@
   SendSuperstructureGoal();
 }
 
+void AutonomousActor::StopSpitting() {
+  set_roller_goal(control_loops::superstructure::RollerGoal::IDLE);
+  SendSuperstructureGoal();
+}
+
 void AutonomousActor::IntakeCube() {
   set_roller_goal(control_loops::superstructure::RollerGoal::INTAKE_CUBE);
   SendSuperstructureGoal();
 }
 
-[[nodiscard]] bool AutonomousActor::WaitForArmGoal() {
-  constexpr double kEpsTheta = 0.01;
+[[nodiscard]] bool AutonomousActor::WaitForArmGoal(double distance_to_go) {
+  constexpr double kEpsTheta = 0.10;
 
   ::aos::time::PhasedLoop phased_loop(frc971::controls::kLoopFrequency,
                                       event_loop()->monotonic_now(),
@@ -367,24 +519,15 @@
     superstructure_status_fetcher_.Fetch();
     CHECK(superstructure_status_fetcher_.get() != nullptr);
 
-    // Check that the status had the right goal
-    at_goal = (std::abs(points_[arm_goal_position_](0) -
-                        superstructure_status_fetcher_->arm()->theta0()) <
-                   kEpsTheta &&
-               std::abs(points_[arm_goal_position_](1) -
-                        superstructure_status_fetcher_->arm()->theta1()) <
-                   kEpsTheta &&
-               std::abs(points_[arm_goal_position_](2) -
-                        superstructure_status_fetcher_->arm()->theta2()) <
-                   kEpsTheta) &&
+    at_goal = (arm_goal_position_ ==
+                   superstructure_status_fetcher_->arm()->current_node() &&
+               superstructure_status_fetcher_->arm()->path_distance_to_go() <
+                   distance_to_go) &&
               (std::abs(wrist_goal_ -
                         superstructure_status_fetcher_->wrist()->position()) <
                kEpsTheta);
   }
 
-  set_preloaded(false);
-  SendSuperstructureGoal();
-
   return true;
 }
 
diff --git a/y2023/autonomous/autonomous_actor.h b/y2023/autonomous/autonomous_actor.h
index d13003d..46c8a78 100644
--- a/y2023/autonomous/autonomous_actor.h
+++ b/y2023/autonomous/autonomous_actor.h
@@ -41,11 +41,14 @@
   void HighCubeScore();
   void MidCubeScore();
   void MidConeScore();
+  void Neutral();
   void PickupCube();
   void Spit();
+  void StopSpitting();
   void IntakeCube();
+  void Balance();
 
-  [[nodiscard]] bool WaitForArmGoal();
+  [[nodiscard]] bool WaitForArmGoal(double distance_to_go = 0.01);
 
   [[nodiscard]] bool WaitForPreloaded();
 
@@ -62,7 +65,7 @@
   aos::Fetcher<aos::JoystickState> joystick_state_fetcher_;
   aos::Fetcher<aos::RobotState> robot_state_fetcher_;
 
-  double wrist_goal_ = 0.0;
+  double wrist_goal_;
   control_loops::superstructure::RollerGoal roller_goal_ =
       control_loops::superstructure::RollerGoal::IDLE;
 
@@ -86,7 +89,7 @@
       superstructure_status_fetcher_;
 
   std::optional<SplineHandle> test_spline_;
-  std::optional<std::array<SplineHandle, 5>> charged_up_splines_;
+  std::optional<std::array<SplineHandle, 4>> charged_up_splines_;
 
   // List of arm angles from arm::PointsList
   const ::std::vector<::Eigen::Matrix<double, 3, 1>> points_;
diff --git a/y2023/autonomous/splines/spline.0.json b/y2023/autonomous/splines/spline.0.json
index e6e24ed..b547160 100644
--- a/y2023/autonomous/splines/spline.0.json
+++ b/y2023/autonomous/splines/spline.0.json
@@ -1 +1 @@
-{"spline_count": 1, "spline_x": [-6.450466090790975, -6.021160733648118, -5.591855376505261, -3.3652785421474576, -2.7749836760760287, -1.7732711760760287], "spline_y": [0.9493418961252269, 0.9493418961252269, 0.9314541729109411, 0.5975544198946889, 0.5975544198946889, 0.5796666966804032], "constraints": [{"constraint_type": "LONGITUDINAL_ACCELERATION", "value": 3}, {"constraint_type": "LATERAL_ACCELERATION", "value": 2}, {"constraint_type": "VOLTAGE", "value": 10}]}
\ No newline at end of file
+{"spline_count": 1, "spline_x": [6.530466090790975, 6.021160733648118, 5.591855376505261, 3.197458857773618, 2.607163991702189, 1.6230056318540846], "spline_y": [0.9493418961252269, 0.9493418961252269, 0.9314541729109411, 0.5106030443873093, 0.5106030443873093, 0.4224994345862587], "constraints": [{"constraint_type": "LONGITUDINAL_ACCELERATION", "value": 4.0}, {"constraint_type": "LATERAL_ACCELERATION", "value": 2}, {"constraint_type": "VOLTAGE", "value": 12.0}]}
\ No newline at end of file
diff --git a/y2023/autonomous/splines/spline.1.json b/y2023/autonomous/splines/spline.1.json
index 032a081..ada494a 100644
--- a/y2023/autonomous/splines/spline.1.json
+++ b/y2023/autonomous/splines/spline.1.json
@@ -1 +1 @@
-{"spline_count": 1, "spline_x": [-1.7732711760760287, -2.7749836760760287, -3.3652785421474576, -5.591855376505261, -6.021160733648118, -6.450466090790975], "spline_y": [0.5796666966804032, 0.5975544198946889, 0.5975544198946889, 0.40105062588141127, 0.41893834909569705, 0.41893834909569705], "constraints": [{"constraint_type": "LONGITUDINAL_ACCELERATION", "value": 3}, {"constraint_type": "LATERAL_ACCELERATION", "value": 2}, {"constraint_type": "VOLTAGE", "value": 10}]}
\ No newline at end of file
+{"spline_count": 1, "spline_x": [1.6230056318540846, 2.595606822913154, 3.185901688984583, 5.530672564287547, 5.959977921430404, 6.389283278573261], "spline_y": [0.4224994345862587, 0.509568426046465, 0.509568426046465, 0.28192333408803366, 0.29981105730231944, 0.29981105730231944], "constraints": [{"constraint_type": "LONGITUDINAL_ACCELERATION", "value": 3.5}, {"constraint_type": "LATERAL_ACCELERATION", "value": 2}, {"constraint_type": "VOLTAGE", "value": 12.0}]}
\ No newline at end of file
diff --git a/y2023/autonomous/splines/spline.2.json b/y2023/autonomous/splines/spline.2.json
index 4ca06a8..bbefb4e 100644
--- a/y2023/autonomous/splines/spline.2.json
+++ b/y2023/autonomous/splines/spline.2.json
@@ -1 +1 @@
-{"spline_count": 1, "spline_x": [-6.450466090790975, -6.021160733648118, -5.591855376505261, -3.605574338541678, -3.0269522872367363, -1.6929070022836754], "spline_y": [0.41893834909569705, 0.41893834909569705, 0.40105062588141127, 0.5475210271634618, 0.515375357646521, -0.3364848845524211], "constraints": [{"constraint_type": "LONGITUDINAL_ACCELERATION", "value": 3}, {"constraint_type": "LATERAL_ACCELERATION", "value": 2}, {"constraint_type": "VOLTAGE", "value": 10}]}
\ No newline at end of file
+{"spline_count": 1, "spline_x": [6.389283278573261, 5.998153290606758, 4.236658486151713, 2.789187490908938, 2.3614674084109164, 1.571420503593167], "spline_y": [0.29981105730231944, 0.29981105730231944, 0.6546640312799279, 0.8695373532912908, -0.061240797065554964, -0.6670628799510845], "constraints": [{"constraint_type": "LONGITUDINAL_ACCELERATION", "value": 3.5}, {"constraint_type": "LATERAL_ACCELERATION", "value": 2.5}, {"constraint_type": "VOLTAGE", "value": 12.0}]}
\ No newline at end of file
diff --git a/y2023/autonomous/splines/spline.3.json b/y2023/autonomous/splines/spline.3.json
index a56d24e..d888718 100644
--- a/y2023/autonomous/splines/spline.3.json
+++ b/y2023/autonomous/splines/spline.3.json
@@ -1 +1 @@
-{"spline_count": 1, "spline_x": [-1.6929070022836754, -3.0269522872367363, -3.605574338541678, -5.591855376505261, -6.021160733648118, -6.450466090790975], "spline_y": [-0.3364848845524211, 0.515375357646521, 0.5475210271634618, 0.40105062588141127, 0.41893834909569705, 0.41893834909569705], "constraints": [{"constraint_type": "LONGITUDINAL_ACCELERATION", "value": 3}, {"constraint_type": "LATERAL_ACCELERATION", "value": 2}, {"constraint_type": "VOLTAGE", "value": 10}]}
\ No newline at end of file
+{"spline_count": 1, "spline_x": [1.571420503593167, 3.182907251060901, 3.1860855438536575, 5.26674552341491, 4.916110025955861, 6.379956328994636], "spline_y": [-0.6670628799510845, 0.568653951467349, 0.5015288933650817, 0.5325195989567781, 0.24498490138963108, 0.24723266038664643], "constraints": [{"constraint_type": "LONGITUDINAL_ACCELERATION", "value": 3.5}, {"constraint_type": "LATERAL_ACCELERATION", "value": 2.5}, {"constraint_type": "VOLTAGE", "value": 12.0}, {"constraint_type": "VELOCITY", "value": 1.5, "start_distance": 4.1, "end_distance": 10.0}]}
\ No newline at end of file
diff --git a/y2023/autonomous/splines/spline.4.json b/y2023/autonomous/splines/spline.4.json
deleted file mode 100644
index 4eee822..0000000
--- a/y2023/autonomous/splines/spline.4.json
+++ /dev/null
@@ -1 +0,0 @@
-{"spline_count": 1, "spline_x": [-6.450466090790975, -6.448323209188465, -6.468936183333308, -5.63485982210851, -5.224861021501398, -4.383040925048516], "spline_y": [0.41893834909569705, -0.2089748700255587, -1.0435424455861884, -0.6390449134590346, -0.8779649804883709, -0.8766708249234052], "constraints": [{"constraint_type": "LONGITUDINAL_ACCELERATION", "value": 3}, {"constraint_type": "LATERAL_ACCELERATION", "value": 2}, {"constraint_type": "VOLTAGE", "value": 10}]}
\ No newline at end of file
diff --git a/y2023/constants.cc b/y2023/constants.cc
index f11c68c..c8b75d1 100644
--- a/y2023/constants.cc
+++ b/y2023/constants.cc
@@ -106,28 +106,29 @@
           0.0201047336425017 - 1.0173426655158 - 0.186085272847293 - 0.0317706563397807;
 
       wrist->subsystem_params.zeroing_constants.measured_absolute_position =
-          1.00731305518279;
+          2.27068625283861;
 
       break;
 
     case kPracticeTeamNumber:
-      arm_proximal->zeroing.measured_absolute_position = 0.261970010788946;
+      arm_proximal->zeroing.measured_absolute_position = 0.24572544970387;
       arm_proximal->potentiometer_offset =
-          10.5178592988554 + 0.0944609125285876 - 0.00826532984625095;
+          10.5178592988554 + 0.0944609125285876 - 0.00826532984625095 +
+          0.167359305216504;
 
-      arm_distal->zeroing.measured_absolute_position = 0.507166003869875;
+      arm_distal->zeroing.measured_absolute_position = 0.0915283983599425;
       arm_distal->potentiometer_offset =
           7.673132586937 - 0.0799284644472573 - 0.0323574039310657 +
-          0.0143810684138064 + 0.00945555248207735;
+          0.0143810684138064 + 0.00945555248207735 + 0.452446388633863;
 
-      roll_joint->zeroing.measured_absolute_position = 1.85482286175059;
+      roll_joint->zeroing.measured_absolute_position = 0.0722321007692069;
       roll_joint->potentiometer_offset =
           0.624713611895747 + 3.10458504917251 - 0.0966407797407789 +
           0.0257708772364788 - 0.0395076737853459 - 6.87914956118006 -
-          0.097581301615046;
+          0.097581301615046 + 3.3424421683095 - 3.97605190912604;
 
       wrist->subsystem_params.zeroing_constants.measured_absolute_position =
-          6.04062267812154;
+          1.04410369068834;
 
       break;
 
diff --git a/y2023/constants/971.json b/y2023/constants/971.json
index 2f91d1b..9f783fa 100644
--- a/y2023/constants/971.json
+++ b/y2023/constants/971.json
@@ -4,7 +4,7 @@
       "calibration": {% include 'y2023/vision/calib_files/calibration_pi-971-1_cam-23-05_ext_2023-03-05.json' %}
     },
     {
-      "calibration": {% include 'y2023/vision/calib_files/calibration_pi-971-2_cam-23-06_ext_2023-02-22.json' %}
+      "calibration": {% include 'y2023/vision/calib_files/calibration_pi-971-2_cam-23-06_ext_2023-03-25.json' %}
     },
     {
       "calibration": {% include 'y2023/vision/calib_files/calibration_pi-971-3_cam-23-07_ext_2023-02-22.json' %}
diff --git a/y2023/control_loops/drivetrain/drivetrain_base.cc b/y2023/control_loops/drivetrain/drivetrain_base.cc
index 4efba61..a11a896 100644
--- a/y2023/control_loops/drivetrain/drivetrain_base.cc
+++ b/y2023/control_loops/drivetrain/drivetrain_base.cc
@@ -76,6 +76,10 @@
                                    .finished()
                                    .asDiagonal()),
           .max_controllable_offset = 0.5},
+      frc971::control_loops::drivetrain::PistolTopButtonUse::kNone,
+      frc971::control_loops::drivetrain::PistolSecondButtonUse::kTurn1,
+      frc971::control_loops::drivetrain::PistolBottomButtonUse::
+          kControlLoopDriving,
   };
 
   return kDrivetrainConfig;
diff --git a/y2023/control_loops/python/graph_edit.py b/y2023/control_loops/python/graph_edit.py
index e47128a..bbc7a1b 100644
--- a/y2023/control_loops/python/graph_edit.py
+++ b/y2023/control_loops/python/graph_edit.py
@@ -3,6 +3,8 @@
 from __future__ import print_function
 # matplotlib overrides fontconfig locations, so it needs to be imported before gtk.
 import matplotlib.pyplot as plt
+from matplotlib.backends.backend_gtk3agg import (FigureCanvasGTK3Agg as
+                                                 FigureCanvas)
 import os
 from frc971.control_loops.python import basic_window
 from frc971.control_loops.python.color import Color, palette
@@ -23,6 +25,8 @@
 import shapely
 from shapely.geometry import Polygon
 
+from frc971.control_loops.python.constants import *
+
 
 def px(cr):
     return OverrideMatrix(cr, identity)
@@ -227,37 +231,30 @@
             self.current_path_index = id
 
 
+ARM_AREA_WIDTH = 2 * (SCREEN_SIZE - 200)
+ARM_AREA_HEIGHT = SCREEN_SIZE
+
+
 # Create a GTK+ widget on which we will draw using Cairo
-class ArmUi(basic_window.BaseWindow):
+class ArmUi(Gtk.DrawingArea):
 
     def __init__(self, segments):
         super(ArmUi, self).__init__()
 
-        self.window = Gtk.Window()
-        self.window.set_title("DrawingArea")
-
-        self.window.set_events(Gdk.EventMask.BUTTON_PRESS_MASK
-                               | Gdk.EventMask.BUTTON_RELEASE_MASK
-                               | Gdk.EventMask.POINTER_MOTION_MASK
-                               | Gdk.EventMask.SCROLL_MASK
-                               | Gdk.EventMask.KEY_PRESS_MASK)
-        self.method_connect("key-press-event", self.do_key_press)
-        self.method_connect("motion-notify-event", self.do_motion)
-        self.method_connect("button-press-event",
-                            self._do_button_press_internal)
-        self.method_connect("configure-event", self._do_configure)
-        self.window.add(self)
-        self.window.show_all()
-
+        self.set_size_request(ARM_AREA_WIDTH, ARM_AREA_HEIGHT)
+        self.center = (0, 0)
+        self.shape = (ARM_AREA_WIDTH, ARM_AREA_HEIGHT)
         self.theta_version = False
-        self.reinit_extents()
+
+        self.init_extents()
+
+        self.connect('draw', self.on_draw)
 
         self.last_pos = to_xy(*graph_paths.points['Neutral'][:2])
         self.circular_index_select = 1
 
         # Extra stuff for drawing lines.
         self.segments = segments
-        self.prev_segment_pt = None
         self.now_segment_pt = None
         self.spline_edit = 0
         self.edit_control1 = True
@@ -271,7 +268,6 @@
             self.fig.add_subplot(3, 1, 3)
         ]
         self.fig.subplots_adjust(hspace=1.0)
-        plt.show(block=False)
 
         self.index = 0
 
@@ -282,16 +278,20 @@
                                     [DRIVER_CAM_X, DRIVER_CAM_Y],
                                     DRIVER_CAM_WIDTH, DRIVER_CAM_HEIGHT)
 
-        self.segment_selector = SegmentSelector(self.segments)
-        self.segment_selector.show()
-
         self.show_indicators = True
         # Lets you only view selected path
         self.view_current = False
 
+        self.editing = True
+
+        self.x_offset = 0
+        self.y_offset = 0
+
     def _do_button_press_internal(self, event):
         o_x = event.x
         o_y = event.y
+        event.y -= self.y_offset
+        event.x -= self.x_offset
         x = event.x - self.window_shape[0] / 2
         y = self.window_shape[1] / 2 - event.y
         scale = self.get_current_scale()
@@ -301,22 +301,7 @@
         event.x = o_x
         event.y = o_y
 
-    def _do_configure(self, event):
-        self.window_shape = (event.width, event.height)
-
-    def redraw(self):
-        if not self.needs_redraw:
-            self.needs_redraw = True
-            self.window.queue_draw()
-
-    def method_connect(self, event, cb):
-
-        def handler(obj, *args):
-            cb(*args)
-
-        self.window.connect(event, handler)
-
-    def reinit_extents(self):
+    def init_extents(self):
         if self.theta_version:
             self.extents_x_min = -np.pi * 2
             self.extents_x_max = np.pi * 2
@@ -328,18 +313,41 @@
             self.extents_y_min = -4.0 * 0.0254
             self.extents_y_max = 110.0 * 0.0254
 
-        self.init_extents(
-            (0.5 * (self.extents_x_min + self.extents_x_max), 0.5 *
-             (self.extents_y_max + self.extents_y_min)),
-            (1.0 * (self.extents_x_max - self.extents_x_min), 1.0 *
-             (self.extents_y_max - self.extents_y_min)))
+        self.center = (0.5 * (self.extents_x_min + self.extents_x_max),
+                       0.5 * (self.extents_y_max + self.extents_y_min))
+        self.shape = (1.0 * (self.extents_x_max - self.extents_x_min),
+                      1.0 * (self.extents_y_max - self.extents_y_min))
+
+    def get_current_scale(self):
+        w_w, w_h = self.window_shape
+        w, h = self.shape
+        return min((w_w / w), (w_h / h))
+
+    def on_draw(self, widget, event):
+        cr = self.get_window().cairo_create()
+
+        self.window_shape = (self.get_window().get_geometry().width,
+                             self.get_window().get_geometry().height)
+
+        cr.save()
+        cr.set_font_size(20)
+        cr.translate(self.window_shape[0] / 2, self.window_shape[1] / 2)
+        scale = self.get_current_scale()
+        cr.scale(scale, -scale)
+        cr.translate(-self.center[0], -self.center[1])
+        cr.reset_clip()
+        self.handle_draw(cr)
+        cr.restore()
+
+    def method_connect(self, event, cb):
+
+        def handler(obj, *args):
+            cb(*args)
+
+        self.window.connect(event, handler)
 
     # Handle the expose-event by drawing
     def handle_draw(self, cr):
-        # use "with px(cr): blah;" to transform to pixel coordinates.
-        if self.segment_selector.current_path_index is not None:
-            self.index = self.segment_selector.current_path_index
-
         # Fill the background color of the window with grey
         set_color(cr, palette["GREY"])
         cr.paint()
@@ -485,6 +493,8 @@
     def do_motion(self, event):
         o_x = event.x
         o_y = event.y
+        event.x -= self.x_offset
+        event.y -= self.y_offset
         x = event.x - self.window_shape[0] / 2
         y = self.window_shape[1] / 2 - event.y
         scale = self.get_current_scale()
@@ -515,7 +525,21 @@
         event.x = o_x
         event.y = o_y
 
-        self.redraw()
+        self.queue_draw()
+
+    def switch_theta(self):
+        # Toggle between theta and xy renderings
+        if self.theta_version:
+            theta1, theta2 = self.last_pos
+            data = to_xy(theta1, theta2)
+            self.circular_index_select = int(
+                np.floor((theta2 - theta1) / np.pi))
+            self.last_pos = (data[0], data[1])
+        else:
+            self.last_pos = self.cur_pt_in_theta()
+
+        self.theta_version = not self.theta_version
+        self.init_extents()
 
     def do_key_press(self, event):
         keyval = Gdk.keyval_to_lower(event.keyval)
@@ -532,9 +556,6 @@
             self.circular_index_select -= 1
             print(self.circular_index_select)
 
-        elif keyval == Gdk.KEY_r:
-            self.prev_segment_pt = self.now_segment_pt
-
         elif keyval == Gdk.KEY_o:
             # Only prints current segment
             print(repr(self.segments[self.index]))
@@ -542,19 +563,6 @@
             # Generate theta points.
             if self.segments:
                 print(repr(self.segments[self.index].ToThetaPoints()))
-        elif keyval == Gdk.KEY_e:
-            best_pt = self.now_segment_pt
-            best_dist = 1e10
-            for segment in self.segments:
-                d = angle_dist_sqr(segment.start, self.now_segment_pt)
-                if (d < best_dist):
-                    best_pt = segment.start
-                    best_dist = d
-                d = angle_dist_sqr(segment.end, self.now_segment_pt)
-                if (d < best_dist):
-                    best_pt = segment.end
-                    best_dist = d
-            self.now_segment_pt = best_pt
 
         elif keyval == Gdk.KEY_p:
             if self.index > 0:
@@ -567,28 +575,33 @@
         elif keyval == Gdk.KEY_i:
             self.show_indicators = not self.show_indicators
 
+        elif keyval == Gdk.KEY_h:
+            print("q: Quit the program")
+            print("c: Incriment which arm solution we render")
+            print("v: Decrement which arm solution we render")
+            print("o: Print the current segment")
+            print("g: Generate theta points")
+            print("p: Move to the previous segment")
+            print("n: Move to the next segment")
+            print("i: Switch on or off the control point indicators")
+            print("l: Switch on or off viewing only the selected spline")
+            print("t: Toggle between xy or theta renderings")
+            print("z: Switch between editing control point 1 and 2")
+
         elif keyval == Gdk.KEY_n:
             self.index += 1
             self.index = self.index % len(self.segments)
             print("Switched to segment:", self.segments[self.index].name)
             self.segments[self.index].Print(graph_paths.points)
 
+        elif keyval == Gdk.KEY_d:
+            self.editing = not self.editing
+
         elif keyval == Gdk.KEY_l:
             self.view_current = not self.view_current
 
         elif keyval == Gdk.KEY_t:
-            # Toggle between theta and xy renderings
-            if self.theta_version:
-                theta1, theta2 = self.last_pos
-                data = to_xy(theta1, theta2)
-                self.circular_index_select = int(
-                    np.floor((theta2 - theta1) / np.pi))
-                self.last_pos = (data[0], data[1])
-            else:
-                self.last_pos = self.cur_pt_in_theta()
-
-            self.theta_version = not self.theta_version
-            self.reinit_extents()
+            self.switch_theta()
 
         elif keyval == Gdk.KEY_z:
             self.edit_control1 = not self.edit_control1
@@ -605,9 +618,10 @@
             print("self.last_pos: ", self.last_pos, " ci: ",
                   self.circular_index_select)
 
-        self.redraw()
+        self.queue_draw()
 
     def do_button_press(self, event):
+
         last_pos = self.last_pos
         self.last_pos = (event.x, event.y)
         pt_theta = self.cur_pt_in_theta()
@@ -617,10 +631,11 @@
 
         self.now_segment_pt = np.array(shift_angles(pt_theta))
 
-        if self.edit_control1:
-            self.segments[self.index].control1 = self.now_segment_pt
-        else:
-            self.segments[self.index].control2 = self.now_segment_pt
+        if self.editing:
+            if self.edit_control1:
+                self.segments[self.index].control1 = self.now_segment_pt
+            else:
+                self.segments[self.index].control2 = self.now_segment_pt
 
         print('Clicked at theta: np.array([%s, %s])' %
               (self.now_segment_pt[0], self.now_segment_pt[1]))
@@ -632,10 +647,107 @@
 
         self.segments[self.index].Print(graph_paths.points)
 
-        self.redraw()
+        self.queue_draw()
 
 
-arm_ui = ArmUi(graph_paths.segments)
-print('Starting with segment: ', arm_ui.segments[arm_ui.index].name)
-arm_ui.segments[arm_ui.index].Print(graph_paths.points)
+class Window(Gtk.Window):
+
+    def __init__(self, segments):
+        super().__init__(title="Drawing Area")
+
+        self.segment_store = Gtk.ListStore(int, str)
+
+        for i, segment in enumerate(segments):
+            self.segment_store.append([i, segment.name])
+
+        self.segment_box = Gtk.ComboBox.new_with_model_and_entry(
+            self.segment_store)
+        self.segment_box.connect("changed", self.on_combo_changed)
+        self.segment_box.set_entry_text_column(1)
+
+        self.arm_draw = ArmUi(segments)
+
+        self.arm_draw.y_offset = self.segment_box.get_allocation().width
+
+        print('Starting with segment: ',
+              self.arm_draw.segments[self.arm_draw.index].name)
+        self.arm_draw.segments[self.arm_draw.index].Print(graph_paths.points)
+
+        self.set_events(Gdk.EventMask.BUTTON_PRESS_MASK
+                        | Gdk.EventMask.BUTTON_RELEASE_MASK
+                        | Gdk.EventMask.POINTER_MOTION_MASK
+                        | Gdk.EventMask.SCROLL_MASK
+                        | Gdk.EventMask.KEY_PRESS_MASK)
+        self.method_connect('map-event', self.do_map_event)
+        self.method_connect("key-press-event", self.arm_draw.do_key_press)
+        self.method_connect("motion-notify-event", self.arm_draw.do_motion)
+        self.method_connect("button-press-event",
+                            self.arm_draw._do_button_press_internal)
+
+        self.grid = Gtk.Grid()
+        self.add(self.grid)
+
+        self.grid.attach(self.arm_draw, 0, 1, 1, 1)
+
+        self.isolate_button = Gtk.Button(label="Toggle Path Isolation")
+        self.isolate_button.connect('clicked', self.on_button_click)
+
+        self.theta_button = Gtk.Button(label="Toggle Theta Mode")
+        self.theta_button.connect('clicked', self.on_button_click)
+
+        self.editing_button = Gtk.Button(label="Toggle Editing Mode")
+        self.editing_button.connect('clicked', self.on_button_click)
+
+        self.indicator_button = Gtk.Button(
+            label="Toggle Control Point Indicators")
+        self.indicator_button.connect('clicked', self.on_button_click)
+
+        self.box = Gtk.Box(spacing=6)
+        self.grid.attach(self.box, 0, 0, 1, 1)
+
+        self.figure_canvas = FigureCanvas(self.arm_draw.fig)
+        self.figure_canvas.set_size_request(500, 300)
+
+        self.grid.attach(self.figure_canvas, 1, 1, 1, 1)
+
+        self.box.pack_start(self.segment_box, False, False, 0)
+        self.box.pack_start(self.isolate_button, False, False, 0)
+        self.box.pack_start(self.theta_button, False, False, 0)
+        self.box.pack_start(self.editing_button, False, False, 0)
+        self.box.pack_start(self.indicator_button, False, False, 0)
+
+    def on_combo_changed(self, combo):
+        iter = combo.get_active_iter()
+
+        if iter is not None:
+            model = combo.get_model()
+            id, name = model[iter][:2]
+            print("Selected: ID=%d, name=%s" % (id, name))
+            self.arm_draw.index = id
+            self.arm_draw.queue_draw()
+
+    def method_connect(self, event, cb):
+
+        def handler(obj, *args):
+            cb(*args)
+
+        self.connect(event, handler)
+
+    def do_map_event(self, event):
+        self.arm_draw.y_offset = self.box.get_allocation().height
+
+    def on_button_click(self, button):
+        if self.isolate_button == button:
+            self.arm_draw.view_current = not self.arm_draw.view_current
+        elif self.theta_button == button:
+            self.arm_draw.switch_theta()
+        elif self.editing_button == button:
+            self.arm_draw.editing = not self.arm_draw.editing
+        elif self.indicator_button == button:
+            self.arm_draw.show_indicators = not self.arm_draw.show_indicators
+        self.arm_draw.queue_draw()
+
+
+window = Window(graph_paths.segments)
+window.show_all()
 basic_window.RunApp()
diff --git a/y2023/control_loops/python/graph_paths.py b/y2023/control_loops/python/graph_paths.py
index 24b9805..19e5a34 100644
--- a/y2023/control_loops/python/graph_paths.py
+++ b/y2023/control_loops/python/graph_paths.py
@@ -31,7 +31,7 @@
         control1=np.array([3.170156, -0.561227]),
         control2=np.array([2.972776, -1.026820]),
         end=points['GroundPickupBackConeUp'],
-        control_alpha_rolls=[(0.30, 0.0), (.95, np.pi / 2.0)],
+        control_alpha_rolls=[(.95, np.pi / 2.0)],
     ))
 
 points['GroundPickupBackConeDownBase'] = to_theta_with_circular_index_and_roll(
@@ -44,7 +44,7 @@
         control1=np.array([3.170156, -0.561227]),
         control2=np.array([2.972776, -1.026820]),
         end=points['GroundPickupBackConeDownBase'],
-        control_alpha_rolls=[(0.30, 0.0), (.95, np.pi / 2.0)],
+        control_alpha_rolls=[(.95, np.pi / 2.0)],
     ))
 
 points[
@@ -58,7 +58,7 @@
         control1=np.array([3.495221564200401, 0.4737763579250964]),
         control2=np.array([4.110392601248856, 1.0424853539638115]),
         end=points['GroundPickupFrontConeDownBase'],
-        control_alpha_rolls=[(0.30, 0.0), (.95, -np.pi / 2.0)],
+        control_alpha_rolls=[(.95, -np.pi / 2.0)],
     ))
 
 points['ScoreFrontLowConeDownBase'] = to_theta_with_circular_index_and_roll(
@@ -110,7 +110,7 @@
         control1=np.array([3.6217558044411176, 0.6335548380532725]),
         control2=np.array([4.2557660430407935, 1.0411926555706872]),
         end=points['ScoreFrontLowConeDownTip'],
-        control_alpha_rolls=[(0.20, 0.0), (.95, np.pi / 2.0)],
+        control_alpha_rolls=[(0.30, 0.0), (.95, np.pi / 2.0)],
     ))
 
 points['ScoreBackMidConeDownTip'] = to_theta_with_circular_index_and_roll(
@@ -240,7 +240,7 @@
         control1=np.array([3.153228, -0.497009]),
         control2=np.array([2.972776, -1.026820]),
         end=points['GroundPickupBackCube'],
-        control_alpha_rolls=[(0.4, 0.0), (.9, -np.pi / 2.0)],
+        control_alpha_rolls=[(.9, -np.pi / 2.0)],
     ))
 
 points['GroundPickupFrontCube'] = to_theta_with_circular_index_and_roll(
@@ -253,7 +253,7 @@
         control1=np.array([3.338852196583635, 0.34968650009090885]),
         control2=np.array([4.28246270189025, 1.492916470137478]),
         end=points['GroundPickupFrontCube'],
-        control_alpha_rolls=[(0.4, 0.0), (.9, np.pi / 2.0)],
+        control_alpha_rolls=[(.9, np.pi / 2.0)],
     ))
 
 points['ScoreBackMidConeUp'] = to_theta_with_circular_index_and_roll(
@@ -266,11 +266,11 @@
         control1=np.array([3.6130298244820453, -0.2781204657180023]),
         control2=np.array([3.804763224169111, -0.5179424890517237]),
         end=points['ScoreBackMidConeUp'],
-        control_alpha_rolls=[(0.40, 0.0), (.95, np.pi / 2.0)],
+        control_alpha_rolls=[(.95, np.pi / 2.0)],
     ))
 
 points['ScoreBackLowConeUp'] = to_theta_with_circular_index_and_roll(
-    -1.00472, 0.672615, np.pi / 2.0, circular_index=1)
+    -1.00472, 0.472615, np.pi / 2.0, circular_index=1)
 
 named_segments.append(
     ThetaSplineSegment(
@@ -279,7 +279,7 @@
         control1=np.array([3.260123029490386, -0.5296803702636037]),
         control2=np.array([3.1249665389044283, -0.7810758529482493]),
         end=points['ScoreBackLowConeUp'],
-        control_alpha_rolls=[(0.40, 0.0), (.95, np.pi / 2.0)],
+        control_alpha_rolls=[(.95, np.pi / 2.0)],
     ))
 
 named_segments.append(
@@ -329,20 +329,22 @@
     ))
 
 points['HPPickupBackConeUp'] = to_theta_with_circular_index_and_roll(
-    -1.1200539, 1.335, np.pi / 2.0, circular_index=0)
+    -1.1200539, 1.345, np.pi / 2.0, circular_index=0)
 
 named_segments.append(
     ThetaSplineSegment(
         name="NeutralToHPPickupBackConeUp",
         start=points['Neutral'],
-        control1=np.array([2.0, -0.239378]),
-        control2=np.array([1.6, -0.626835]),
+        control1=np.array([2.7014360412658567, -0.32490272351246796]),
+        control2=np.array([0.8282769211604871, -1.8026615176254461]),
         end=points['HPPickupBackConeUp'],
-        control_alpha_rolls=[(0.3, 0.0), (.9, np.pi / 2.0)],
+        control_alpha_rolls=[(.9, np.pi / 2.0)],
+        alpha_unitizer=np.matrix(
+            f"{1.0 / 15.0} 0 0; 0 {1.0 / 15.0} 0; 0 0 {1.0 / 100.0}"),
     ))
 
 points['HPPickupFrontConeUp'] = np.array(
-    (5.16514378449353, 1.23, -np.pi / 2.0))
+    (5.16514378449353, 1.26, -np.pi / 2.0))
 #        to_theta_with_circular_index_and_roll(
 #    0.265749, 1.28332, -np.pi / 2.0, circular_index=1)
 
@@ -350,10 +352,12 @@
     ThetaSplineSegment(
         name="NeutralToHPPickupFrontConeUp",
         start=points['Neutral'],
-        control1=np.array([4.068204933788692, -0.05440997896697275]),
-        control2=np.array([4.861911360838861, -0.03790410600482508]),
+        control1=np.array([3.7428100581067785, 0.3957215032198551]),
+        control2=np.array([4.349242198354247, 0.8724403878333801]),
         end=points['HPPickupFrontConeUp'],
-        control_alpha_rolls=[(0.3, 0.0), (.9, -np.pi / 2.0)],
+        control_alpha_rolls=[(.8, -np.pi / 2.0)],
+        alpha_unitizer=np.matrix(
+            f"{1.0 / 15.0} 0 0; 0 {1.0 / 15.0} 0; 0 0 {1.0 / 70.0}"),
     ))
 
 points['ScoreFrontHighConeUp'] = to_theta_with_circular_index_and_roll(
@@ -366,7 +370,7 @@
         control1=np.array([2.594244, 0.417442]),
         control2=np.array([1.51325, 0.679748]),
         end=points['ScoreFrontHighConeUp'],
-        control_alpha_rolls=[(0.20, 0.0), (.95, -np.pi / 2.0)],
+        control_alpha_rolls=[(.95, -np.pi / 2.0)],
     ))
 
 points['ScoreFrontMidConeUp'] = to_theta_with_circular_index_and_roll(
@@ -379,20 +383,22 @@
         control1=np.array([3.0, 0.317442]),
         control2=np.array([2.9, 0.479748]),
         end=points['ScoreFrontMidConeUp'],
-        control_alpha_rolls=[(0.40, 0.0), (.95, -np.pi / 2.0)],
+        control_alpha_rolls=[(.95, -np.pi / 2.0)],
     ))
 
+points['Starting'] = np.array((np.pi, -0.125053863467887, 0.0))
+
 points['ScoreFrontMidConeUpAuto'] = to_theta_with_circular_index_and_roll(
     0.58, 0.97, -np.pi / 2.0, circular_index=0)
 
 named_segments.append(
     ThetaSplineSegment(
-        name="NeutralToScoreFrontMidConeUpAuto",
-        start=points['Neutral'],
+        name="StartingToScoreFrontMidConeUpAuto",
+        start=points['Starting'],
         control1=np.array([2.99620794024176, 0.23620211875551145]),
         control2=np.array([2.728197531599509, 0.5677148040671784]),
         end=points['ScoreFrontMidConeUpAuto'],
-        control_alpha_rolls=[(0.20, 0.0), (.90, -np.pi / 2.0)],
+        control_alpha_rolls=[(0.20, 0.0), (.85, -np.pi / 2.0)],
         vmax=10.0,
         alpha_unitizer=np.matrix(
             f"{1.0 / 20.0} 0 0; 0 {1.0 / 25.0} 0; 0 0 {1.0 / 100.0}"),
@@ -418,7 +424,7 @@
         control1=np.array([3.338852196583635, 0.34968650009090885]),
         control2=np.array([4.28246270189025, 1.492916470137478]),
         end=points['ScoreFrontLowCube'],
-        control_alpha_rolls=[(0.4, 0.0), (.9, np.pi / 2.0)],
+        control_alpha_rolls=[(.9, np.pi / 2.0)],
     ))
 
 points['ScoreFrontMidCube'] = to_theta_with_circular_index_and_roll(
@@ -431,7 +437,7 @@
         control1=np.array([3.1310824883477952, 0.23591705727105095]),
         control2=np.array([3.0320025094685965, 0.43674789928668933]),
         end=points["ScoreFrontMidCube"],
-        control_alpha_rolls=[(0.4, np.pi * 0.0), (0.95, np.pi * 0.5)],
+        control_alpha_rolls=[(0.95, np.pi * 0.5)],
     ))
 
 named_segments.append(
@@ -444,17 +450,6 @@
         control_alpha_rolls=[],
     ))
 
-# Auto express spline...
-named_segments.append(
-    ThetaSplineSegment(
-        name="GroundPickupBackCubeToScoreFrontMidCube",
-        start=points['ScoreFrontMidCube'],
-        control1=np.array([3.2345111429709847, 0.45338639767112277]),
-        control2=np.array([3.098240119468829, -0.46161157069783254]),
-        end=points['GroundPickupBackCube'],
-        control_alpha_rolls=[(0.40, 0.0), (0.60, 0.0)],
-    ))
-
 points['ScoreFrontHighCube'] = to_theta_with_circular_index_and_roll(
     0.901437, 1.16, np.pi / 2.0, circular_index=0)
 
@@ -465,7 +460,7 @@
         control1=np.array([2.537484161662287, 0.059700523547219]),
         control2=np.array([2.449391812539668, 0.4141564369176016]),
         end=points["ScoreFrontHighCube"],
-        control_alpha_rolls=[(0.4, np.pi * 0.0), (0.95, np.pi * 0.5)],
+        control_alpha_rolls=[(0.95, np.pi * 0.5)],
     ))
 
 named_segments.append(
@@ -479,7 +474,7 @@
     ))
 
 points['ScoreBackLowCube'] = to_theta_with_circular_index_and_roll(
-    -1.102, 0.3212121, -np.pi / 2.0, circular_index=1)
+    -1.102, 0.3712121, -np.pi / 2.0, circular_index=1)
 
 named_segments.append(
     ThetaSplineSegment(
@@ -501,7 +496,7 @@
         control1=np.array([3.3485646154655404, -0.4369603013926491]),
         control2=np.array([3.2653593368256995, -0.789587049476034]),
         end=points["ScoreBackMidCube"],
-        control_alpha_rolls=[(0.3, -np.pi * 0.0), (0.95, -np.pi * 0.5)],
+        control_alpha_rolls=[(0.95, -np.pi * 0.5)],
     ))
 
 named_segments.append(
@@ -527,7 +522,7 @@
         control1=np.array([3.6804854484103684, -0.3494541095053125]),
         control2=np.array([3.9889380578509517, -0.6637934755748516]),
         end=points["ScoreBackHighCube"],
-        control_alpha_rolls=[(0.3, -np.pi * 0.0), (0.95, -np.pi * 0.5)],
+        control_alpha_rolls=[(0.95, -np.pi * 0.5)],
     ))
 
 named_segments.append(
@@ -550,11 +545,11 @@
         control1=np.array([3.153481004695907, 0.4827717171390571]),
         control2=np.array([4.107487625131798, 0.9935705415901082]),
         end=points['GroundPickupFrontConeUp'],
-        control_alpha_rolls=[(0.30, 0.0), (.95, -np.pi / 2.0)],
+        control_alpha_rolls=[(.95, -np.pi / 2.0)],
     ))
 
 points['ScoreFrontLowConeUp'] = to_theta_with_circular_index_and_roll(
-    0.349687, 0.468804, -np.pi / 2.0, circular_index=0)
+    0.33, 0.42, -np.pi / 2.0, circular_index=0)
 
 named_segments.append(
     ThetaSplineSegment(
@@ -600,8 +595,8 @@
     ThetaSplineSegment(
         name="GroundPickupBackConeUpToGroundPickupBackConeDownBase",
         start=points["GroundPickupBackConeUp"],
-        control1=np.array([2.984750907058771, -1.4175755629898785]),
-        control2=np.array([2.9834020302847164, -1.4959006770007095]),
+        control1=np.array([2.9926247819374154, -1.4933529586884973]),
+        control2=np.array([3.0109584409452284, -1.545647945665206]),
         end=points["GroundPickupBackConeDownBase"],
         control_alpha_rolls=[],
     ))
@@ -610,8 +605,8 @@
     ThetaSplineSegment(
         name="GroundPickupBackCubeToGroundPickupBackConeUp",
         start=points["GroundPickupBackCube"],
-        control1=np.array([2.9814712671305497, -1.4752615794585657]),
-        control2=np.array([2.9814712671305497, -1.4752615794585657]),
+        control1=np.array([3.0505709159476315, -1.2367645274118955]),
+        control2=np.array([3.092073954938234, -1.2212460452853597]),
         end=points["GroundPickupBackConeUp"],
         control_alpha_rolls=[],
     ))
@@ -620,8 +615,8 @@
     ThetaSplineSegment(
         name="GroundPickupBackCubeToGroundPickupBackConeDownBase",
         start=points["GroundPickupBackCube"],
-        control1=np.array([2.9915062872070943, -1.5453319249912183]),
-        control2=np.array([3.0113316601735853, -1.5625220857410058]),
+        control1=np.array([3.1162986756987365, -1.277140627447758]),
+        control2=np.array([3.040845013474888, -1.3101069219600996]),
         end=points["GroundPickupBackConeDownBase"],
         control_alpha_rolls=[],
     ))
@@ -630,8 +625,8 @@
     ThetaSplineSegment(
         name="GroundPickupFrontConeUpToGroundPickupFrontConeDownBase",
         start=points["GroundPickupFrontConeUp"],
-        control1=np.array([4.178303420953318, 1.7954892324947347]),
-        control2=np.array([4.198694185882847, 1.8611851211658763]),
+        control1=np.array([4.165611255924869, 1.776661759504955]),
+        control2=np.array([4.158617821533932, 1.8192037605481532]),
         end=points["GroundPickupFrontConeDownBase"],
         control_alpha_rolls=[],
     ))
@@ -640,8 +635,8 @@
     ThetaSplineSegment(
         name="GroundPickupFrontCubeToGroundPickupFrontConeUp",
         start=points["GroundPickupFrontCube"],
-        control1=np.array([4.152678427672294, 1.755703782155648]),
-        control2=np.array([4.115445030197163, 1.77599054062196]),
+        control1=np.array([4.24343254606187, 1.5218945203478058]),
+        control2=np.array([4.264613405238473, 1.4226736369442823]),
         end=points["GroundPickupFrontConeUp"],
         control_alpha_rolls=[],
     ))
@@ -650,12 +645,33 @@
     ThetaSplineSegment(
         name="GroundPickupFrontCubeToGroundPickFrontCubeDownBase",
         start=points["GroundPickupFrontCube"],
-        control1=np.array([4.126486425254001, 1.838621758570565]),
-        control2=np.array([4.1585708953556, 1.8633805468551703]),
+        control1=np.array([4.291217306429481, 1.6211752281452299]),
+        control2=np.array([4.329997918670176, 1.6541325057673302]),
         end=points["GroundPickupFrontConeDownBase"],
         control_alpha_rolls=[],
     ))
 
+# Auto express spline...
+named_segments.append(
+    ThetaSplineSegment(
+        name="GroundPickupBackCubeToScoreFrontMidCube",
+        start=points['ScoreFrontMidCube'],
+        control1=np.array([3.2345111429709847, 0.45338639767112277]),
+        control2=np.array([3.098240119468829, -0.46161157069783254]),
+        end=points['GroundPickupBackCube'],
+        control_alpha_rolls=[(0.40, 0.0), (0.60, 0.0)],
+    ))
+
+named_segments.append(
+    ThetaSplineSegment(
+        name="GroundPickupBackCubeToScoreFrontHighCube",
+        start=points['ScoreFrontHighCube'],
+        control1=np.array([2.7074513232200186, 0.20154350392334375]),
+        control2=np.array([3.01714846217257, -0.6310437434614364]),
+        end=points['GroundPickupBackCube'],
+        control_alpha_rolls=[(0.40, 0.0), (0.60, 0.0)],
+    ))
+
 front_points = []
 back_points = []
 unnamed_segments = []
diff --git a/y2023/control_loops/python/graph_tools.py b/y2023/control_loops/python/graph_tools.py
index b4d047a..c15b49d 100644
--- a/y2023/control_loops/python/graph_tools.py
+++ b/y2023/control_loops/python/graph_tools.py
@@ -143,8 +143,8 @@
 
 DRIVER_CAM_Z_OFFSET = 3.225 * IN_TO_M
 DRIVER_CAM_POINTS = rect_points(
-    (-5.126 * IN_TO_M + joint_center[0], 0.393 * IN_TO_M + joint_center[0]),
-    (5.125 * IN_TO_M + joint_center[1], 17.375 * IN_TO_M + joint_center[1]),
+    (-0.252, -0.252 + 0.098),
+    (-3.0 * IN_TO_M + joint_center[1], -8.0 * IN_TO_M + joint_center[1]),
     (-8.475 * IN_TO_M - DRIVER_CAM_Z_OFFSET,
      -4.350 * IN_TO_M - DRIVER_CAM_Z_OFFSET))
 
diff --git a/y2023/control_loops/superstructure/BUILD b/y2023/control_loops/superstructure/BUILD
index e5bc830..afca18b 100644
--- a/y2023/control_loops/superstructure/BUILD
+++ b/y2023/control_loops/superstructure/BUILD
@@ -166,6 +166,7 @@
     ],
     target_compatible_with = ["//tools/platforms/hardware:roborio"],
     deps = [
+        ":superstructure_goal_fbs",
         ":superstructure_output_fbs",
         ":superstructure_position_fbs",
         ":superstructure_status_fbs",
diff --git a/y2023/control_loops/superstructure/led_indicator.cc b/y2023/control_loops/superstructure/led_indicator.cc
index 48bca1b..5cee266 100644
--- a/y2023/control_loops/superstructure/led_indicator.cc
+++ b/y2023/control_loops/superstructure/led_indicator.cc
@@ -1,6 +1,7 @@
 #include "y2023/control_loops/superstructure/led_indicator.h"
 
 namespace led = ctre::phoenix::led;
+namespace chrono = std::chrono;
 
 namespace y2023::control_loops::superstructure {
 
@@ -13,6 +14,8 @@
           event_loop_->MakeFetcher<Status>("/superstructure")),
       superstructure_position_fetcher_(
           event_loop_->MakeFetcher<Position>("/superstructure")),
+      superstructure_goal_fetcher_(
+          event_loop_->MakeFetcher<Goal>("/superstructure")),
       server_statistics_fetcher_(
           event_loop_->MakeFetcher<aos::message_bridge::ServerStatistics>(
               "/roborio/aos")),
@@ -34,8 +37,10 @@
   config.brightnessScalar = 1.0;
   candle_.ConfigAllSettings(config, 0);
 
-  event_loop_->AddPhasedLoop([&](int) { DecideColor(); },
-                             std::chrono::milliseconds(20));
+  event_loop_->AddPhasedLoop([this](int) { DecideColor(); },
+                             chrono::milliseconds(20));
+  event_loop_->OnRun(
+      [this]() { startup_time_ = event_loop_->monotonic_now(); });
 }
 
 // This method will be called once per scheduler run
@@ -75,6 +80,7 @@
   drivetrain_output_fetcher_.Fetch();
   drivetrain_status_fetcher_.Fetch();
   client_statistics_fetcher_.Fetch();
+  superstructure_goal_fetcher_.Fetch();
   gyro_reading_fetcher_.Fetch();
   localizer_output_fetcher_.Fetch();
 
@@ -96,15 +102,18 @@
   // Not zeroed
   if (superstructure_status_fetcher_.get() &&
       !superstructure_status_fetcher_->zeroed()) {
-    DisplayLed(255, 0, 255);
+    DisplayLed(255, 255, 0);
     return;
   }
 
-  // If the imu gyro readings are not being sent/updated recently
-  if (!gyro_reading_fetcher_.get() ||
-      gyro_reading_fetcher_.context().monotonic_event_time <
-          event_loop_->monotonic_now() -
-              frc971::controls::kLoopFrequency * 10) {
+  // If the imu gyro readings are not being sent/updated recently.  Only do this
+  // after we've been on for a bit.
+  if (event_loop_->context().monotonic_event_time >
+          startup_time_ + chrono::seconds(5) &&
+      (!gyro_reading_fetcher_.get() ||
+       gyro_reading_fetcher_.context().monotonic_event_time +
+               frc971::controls::kLoopFrequency * 10 <
+           event_loop_->monotonic_now())) {
     if (flash_counter_.Flash()) {
       DisplayLed(255, 0, 0);
     } else {
@@ -127,59 +136,49 @@
     return;
   }
 
-  if (superstructure_status_fetcher_.get()) {
+  if (superstructure_status_fetcher_.get() &&
+      superstructure_goal_fetcher_.get()) {
+    const bool cone = (superstructure_status_fetcher_->game_piece() ==
+                           vision::Class::CONE_UP ||
+                       superstructure_status_fetcher_->game_piece() ==
+                           vision::Class::CONE_DOWN);
+    const bool intaking = (superstructure_goal_fetcher_->roller_goal() ==
+                               RollerGoal::INTAKE_CONE_UP ||
+                           superstructure_goal_fetcher_->roller_goal() ==
+                               RollerGoal::INTAKE_CUBE ||
+                           superstructure_goal_fetcher_->roller_goal() ==
+                               RollerGoal::INTAKE_LAST ||
+                           superstructure_goal_fetcher_->roller_goal() ==
+                               RollerGoal::INTAKE_CONE_DOWN);
     // Check if end effector is intaking.
     if (superstructure_status_fetcher_->end_effector_state() ==
-        EndEffectorState::INTAKING) {
+            EndEffectorState::LOADED &&
+        intaking) {
       if (flash_counter_.Flash()) {
-        DisplayLed(255, 165, 0);
-      } else {
-        DisplayLed(0, 0, 0);
+        if (cone) {
+          DisplayLed(255, 165, 0);
+        } else {
+          DisplayLed(138, 43, 226);
+        }
+        return;
       }
-
-      return;
-    }
-    // Check if end effector is spitting.
-    if (superstructure_status_fetcher_->end_effector_state() ==
-        EndEffectorState::SPITTING) {
-      if (flash_counter_.Flash()) {
-        DisplayLed(0, 255, 0);
-      } else {
-        DisplayLed(0, 0, 0);
-      }
-
-      return;
-    }
-
-    // Check the if there is a cone in the end effector.
-    if (superstructure_status_fetcher_->game_piece() ==
-            vision::Class::CONE_UP ||
-        superstructure_status_fetcher_->game_piece() ==
-            vision::Class::CONE_DOWN) {
-      DisplayLed(255, 255, 0);
-      return;
-    }
-    // Check if the cube beam break is triggered.
-    if (superstructure_status_fetcher_->game_piece() == vision::Class::CUBE) {
-      DisplayLed(138, 43, 226);
-      return;
     }
 
     // Check if there is a target that is in sight
-    if (drivetrain_status_fetcher_.get() != nullptr &&
-        drivetrain_status_fetcher_->line_follow_logging()->have_target()) {
-      DisplayLed(255, 165, 0);
-      return;
-    }
-
     if (event_loop_->monotonic_now() <
-        last_accepted_time_ + std::chrono::milliseconds(500)) {
-      DisplayLed(0, 0, 255);
+        last_accepted_time_ + chrono::milliseconds(100)) {
+      if (drivetrain_status_fetcher_.get() != nullptr &&
+          drivetrain_status_fetcher_->line_follow_logging()->have_target()) {
+        DisplayLed(0, 255, 0);
+        return;
+      } else {
+        DisplayLed(0, 0, 255);
+      }
       return;
     }
-
-    return;
   }
+
+  DisplayLed(0, 0, 0);
 }
 
 }  // namespace y2023::control_loops::superstructure
diff --git a/y2023/control_loops/superstructure/led_indicator.h b/y2023/control_loops/superstructure/led_indicator.h
index d88650d..dfdf4b1 100644
--- a/y2023/control_loops/superstructure/led_indicator.h
+++ b/y2023/control_loops/superstructure/led_indicator.h
@@ -12,6 +12,7 @@
 #include "frc971/control_loops/drivetrain/localization/localizer_output_generated.h"
 #include "frc971/control_loops/profiled_subsystem_generated.h"
 #include "frc971/queues/gyro_generated.h"
+#include "y2023/control_loops/superstructure/superstructure_goal_generated.h"
 #include "y2023/control_loops/superstructure/superstructure_output_generated.h"
 #include "y2023/control_loops/superstructure/superstructure_position_generated.h"
 #include "y2023/control_loops/superstructure/superstructure_status_generated.h"
@@ -69,13 +70,14 @@
 
   void DisplayLed(uint8_t r, uint8_t g, uint8_t b);
 
-  ctre::phoenix::led::CANdle candle_{0, ""};
+  ctre::phoenix::led::CANdle candle_{8, "rio"};
 
   aos::EventLoop *event_loop_;
   aos::Fetcher<frc971::control_loops::drivetrain::Output>
       drivetrain_output_fetcher_;
   aos::Fetcher<Status> superstructure_status_fetcher_;
   aos::Fetcher<Position> superstructure_position_fetcher_;
+  aos::Fetcher<Goal> superstructure_goal_fetcher_;
   aos::Fetcher<aos::message_bridge::ServerStatistics>
       server_statistics_fetcher_;
   aos::Fetcher<aos::message_bridge::ClientStatistics>
@@ -89,6 +91,9 @@
   aos::monotonic_clock::time_point last_accepted_time_ =
       aos::monotonic_clock::min_time;
 
+  aos::monotonic_clock::time_point startup_time_ =
+      aos::monotonic_clock::min_time;
+
   FlashCounter flash_counter_{kFlashIterations};
 };
 
diff --git a/y2023/control_loops/superstructure/superstructure_goal.fbs b/y2023/control_loops/superstructure/superstructure_goal.fbs
index 7ac7000..2ee2d2b 100644
--- a/y2023/control_loops/superstructure/superstructure_goal.fbs
+++ b/y2023/control_loops/superstructure/superstructure_goal.fbs
@@ -28,5 +28,4 @@
 }
 
 
-
 root_type Goal;
diff --git a/y2023/localizer/localizer.cc b/y2023/localizer/localizer.cc
index 32f5604..efffca3 100644
--- a/y2023/localizer/localizer.cc
+++ b/y2023/localizer/localizer.cc
@@ -9,16 +9,26 @@
 
 DEFINE_double(max_pose_error, 1e-6,
               "Throw out target poses with a higher pose error than this");
+DEFINE_double(
+    max_pose_error_ratio, 0.4,
+    "Throw out target poses with a higher pose error ratio than this");
 DEFINE_double(distortion_noise_scalar, 1.0,
               "Scale the target pose distortion factor by this when computing "
               "the noise.");
 DEFINE_double(
-    max_implied_yaw_error, 30.0,
+    max_implied_yaw_error, 3.0,
     "Reject target poses that imply a robot yaw of more than this many degrees "
     "off from our estimate.");
-DEFINE_double(max_distance_to_target, 6.0,
+DEFINE_double(
+    max_implied_teleop_yaw_error, 30.0,
+    "Reject target poses that imply a robot yaw of more than this many degrees "
+    "off from our estimate.");
+DEFINE_double(max_distance_to_target, 3.5,
               "Reject target poses that have a 3d distance of more than this "
               "many meters.");
+DEFINE_double(max_auto_image_robot_speed, 2.0,
+              "Reject target poses when the robot is travelling faster than "
+              "this speed in auto.");
 
 namespace y2023::localizer {
 namespace {
@@ -97,7 +107,8 @@
                    y2023::constants::Values::DrivetrainEncoderToMeters(1),
                    std::bind(&Localizer::HandleImu, this, std::placeholders::_1,
                              std::placeholders::_2, std::placeholders::_3,
-                             std::placeholders::_4, std::placeholders::_5)),
+                             std::placeholders::_4, std::placeholders::_5),
+                   frc971::controls::ImuWatcher::TimestampSource::kPi),
       utils_(event_loop),
       status_sender_(event_loop->MakeSender<Status>("/localizer")),
       output_sender_(event_loop->MakeSender<frc971::controls::LocalizerOutput>(
@@ -329,14 +340,30 @@
             << target.pose_error();
     return RejectImage(camera_index, RejectionReason::HIGH_POSE_ERROR,
                        &builder);
+  } else if (target.pose_error_ratio() > FLAGS_max_pose_error_ratio) {
+    VLOG(1) << "Rejecting target due to high pose error ratio "
+            << target.pose_error_ratio();
+    return RejectImage(camera_index, RejectionReason::HIGH_POSE_ERROR_RATIO,
+                       &builder);
   }
 
   double theta_at_capture = state_at_capture.value()(StateIdx::kTheta);
   double camera_implied_theta = Z(Corrector::kTheta);
   constexpr double kDegToRad = M_PI / 180.0;
 
-  if (std::abs(camera_implied_theta - theta_at_capture) >
-             FLAGS_max_implied_yaw_error * kDegToRad) {
+  const double robot_speed =
+      (state_at_capture.value()(StateIdx::kLeftVelocity) +
+       state_at_capture.value()(StateIdx::kRightVelocity)) /
+      2.0;
+  const double yaw_threshold =
+      (utils_.MaybeInAutonomous() ? FLAGS_max_implied_yaw_error
+                                  : FLAGS_max_implied_teleop_yaw_error) *
+      kDegToRad;
+  if (utils_.MaybeInAutonomous() &&
+      (std::abs(robot_speed) > FLAGS_max_auto_image_robot_speed)) {
+    return RejectImage(camera_index, RejectionReason::ROBOT_TOO_FAST, &builder);
+  } else if (std::abs(aos::math::NormalizeAngle(
+                 camera_implied_theta - theta_at_capture)) > yaw_threshold) {
     return RejectImage(camera_index, RejectionReason::HIGH_IMPLIED_YAW_ERROR,
                        &builder);
   } else if (distance_to_target > FLAGS_max_distance_to_target) {
diff --git a/y2023/localizer/localizer_test.cc b/y2023/localizer/localizer_test.cc
index 7248b33..e8b7d56 100644
--- a/y2023/localizer/localizer_test.cc
+++ b/y2023/localizer/localizer_test.cc
@@ -155,6 +155,7 @@
             target_builder.add_position(position_offset);
             target_builder.add_orientation(quat_offset);
             target_builder.add_pose_error(pose_error_);
+            target_builder.add_pose_error_ratio(pose_error_ratio_);
             auto target_offset = target_builder.Finish();
 
             auto targets_offset = builder.fbb()->CreateVector({target_offset});
@@ -277,6 +278,7 @@
 
   uint64_t send_target_id_ = kTargetId;
   double pose_error_ = 1e-7;
+  double pose_error_ratio_ = 0.1;
   double implied_yaw_error_ = 0.0;
 
   gflags::FlagSaver flag_saver_;
@@ -507,4 +509,27 @@
       status_fetcher_->statistics()->Get(0)->total_candidates());
 }
 
+// Tests that we correctly reject a detection with a high pose error ratio.
+TEST_F(LocalizerTest, HighPoseErrorRatio) {
+  output_voltages_ << 0.0, 0.0;
+  send_targets_ = true;
+  // Send the minimum pose error to be rejected
+  constexpr double kEps = 1e-9;
+  pose_error_ratio_ = 0.4 + kEps;
+
+  event_loop_factory_.RunFor(std::chrono::seconds(4));
+  CHECK(status_fetcher_.Fetch());
+  ASSERT_TRUE(status_fetcher_->has_statistics());
+  ASSERT_EQ(4u /* number of cameras */, status_fetcher_->statistics()->size());
+  ASSERT_EQ(0, status_fetcher_->statistics()->Get(0)->total_accepted());
+  ASSERT_LT(10, status_fetcher_->statistics()->Get(0)->total_candidates());
+  ASSERT_EQ(
+      status_fetcher_->statistics()
+          ->Get(0)
+          ->rejection_reasons()
+          ->Get(static_cast<size_t>(RejectionReason::HIGH_POSE_ERROR_RATIO))
+          ->count(),
+      status_fetcher_->statistics()->Get(0)->total_candidates());
+}
+
 }  // namespace y2023::localizer::testing
diff --git a/y2023/localizer/status.fbs b/y2023/localizer/status.fbs
index ded492e..854603e 100644
--- a/y2023/localizer/status.fbs
+++ b/y2023/localizer/status.fbs
@@ -22,6 +22,10 @@
   // Pose estimate had a high distance to target.
   // We don't trust estimates very far out.
   HIGH_DISTANCE_TO_TARGET = 6,
+  // The robot was travelling too fast; we don't trust the target.
+  ROBOT_TOO_FAST = 7,
+  // Pose estimation error ratio was higher than any normal detection.
+  HIGH_POSE_ERROR_RATIO = 8,
 }
 
 table RejectionCount {
diff --git a/y2023/vision/BUILD b/y2023/vision/BUILD
index 1cdbe36..0f75aba 100644
--- a/y2023/vision/BUILD
+++ b/y2023/vision/BUILD
@@ -117,6 +117,7 @@
         "//frc971/vision:target_map_fbs",
         "//frc971/vision:target_mapper",
         "//frc971/vision:vision_fbs",
+        "//frc971/vision:visualize_robot",
         "//third_party:opencv",
         "//third_party/apriltag",
         "//y2023/constants:constants_fbs",
diff --git a/y2023/vision/aprilrobotics.cc b/y2023/vision/aprilrobotics.cc
index 95ad541..45ab7db 100644
--- a/y2023/vision/aprilrobotics.cc
+++ b/y2023/vision/aprilrobotics.cc
@@ -1,12 +1,14 @@
 #include "y2023/vision/aprilrobotics.h"
 
+#include <opencv2/highgui.hpp>
+
 #include "y2023/vision/vision_util.h"
 
 DEFINE_bool(
     debug, false,
     "If true, dump a ton of debug and crash on the first valid detection.");
 
-DEFINE_double(min_decision_margin, 75.0,
+DEFINE_double(min_decision_margin, 50.0,
               "Minimum decision margin (confidence) for an apriltag detection");
 DEFINE_int32(pixel_border, 10,
              "Size of image border within which to reject detected corners");
@@ -15,6 +17,8 @@
     "Maximum expected value for unscaled distortion factors. Will scale "
     "distortion factors so that this value (and a higher distortion) maps to "
     "1.0.");
+DEFINE_uint64(pose_estimation_iterations, 50,
+              "Number of iterations for apriltag pose estimation.");
 
 namespace y2023 {
 namespace vision {
@@ -22,17 +26,19 @@
 namespace chrono = std::chrono;
 
 AprilRoboticsDetector::AprilRoboticsDetector(aos::EventLoop *event_loop,
-                                             std::string_view channel_name)
+                                             std::string_view channel_name,
+                                             bool flip_image)
     : calibration_data_(event_loop),
       image_size_(0, 0),
+      flip_image_(flip_image),
+      node_name_(event_loop->node()->name()->string_view()),
       ftrace_(),
-      image_callback_(
-          event_loop, channel_name,
-          [&](cv::Mat image_color_mat,
-              const aos::monotonic_clock::time_point eof) {
-            HandleImage(image_color_mat, eof);
-          },
-          chrono::milliseconds(5)),
+      image_callback_(event_loop, channel_name,
+                      [&](cv::Mat image_color_mat,
+                          const aos::monotonic_clock::time_point eof) {
+                        HandleImage(image_color_mat, eof);
+                      },
+                      chrono::milliseconds(5)),
       target_map_sender_(
           event_loop->MakeSender<frc971::vision::TargetMap>("/camera")),
       image_annotations_sender_(
@@ -54,7 +60,6 @@
       calibration_data_.constants(), event_loop->node()->name()->string_view());
 
   extrinsics_ = CameraExtrinsics(calibration_);
-
   intrinsics_ = CameraIntrinsics(calibration_);
   // Create an undistort projection matrix using the intrinsics
   projection_matrix_ = cv::Mat::zeros(3, 4, CV_64F);
@@ -94,8 +99,11 @@
 
   auto builder = target_map_sender_.MakeBuilder();
   std::vector<flatbuffers::Offset<frc971::vision::TargetPoseFbs>> target_poses;
-  for (const auto &detection : result.detections) {
-    target_poses.emplace_back(BuildTargetPose(detection, builder.fbb()));
+  for (auto &detection : result.detections) {
+    auto *fbb = builder.fbb();
+    auto pose = BuildTargetPose(detection, fbb);
+    DestroyPose(&detection.pose);
+    target_poses.emplace_back(pose);
   }
   const auto target_poses_offset = builder.fbb()->CreateVector(target_poses);
   auto target_map_builder = builder.MakeBuilder<frc971::vision::TargetMap>();
@@ -123,7 +131,7 @@
   return frc971::vision::CreateTargetPoseFbs(
       *fbb, detection.det.id, position_offset, orientation_offset,
       detection.det.decision_margin, detection.pose_error,
-      detection.distortion_factor);
+      detection.distortion_factor, detection.pose_error_ratio);
 }
 
 void AprilRoboticsDetector::UndistortDetection(
@@ -179,8 +187,15 @@
   return corner_points;
 }
 
+void AprilRoboticsDetector::DestroyPose(apriltag_pose_t *pose) const {
+  matd_destroy(pose->R);
+  matd_destroy(pose->t);
+}
+
 AprilRoboticsDetector::DetectionResult AprilRoboticsDetector::DetectTags(
     cv::Mat image, aos::monotonic_clock::time_point eof) {
+  cv::Mat color_image;
+  cvtColor(image, color_image, cv::COLOR_GRAY2RGB);
   const aos::monotonic_clock::time_point start_time =
       aos::monotonic_clock::now();
 
@@ -250,8 +265,13 @@
       const aos::monotonic_clock::time_point before_pose_estimation =
           aos::monotonic_clock::now();
 
-      apriltag_pose_t pose;
-      double pose_error = estimate_tag_pose(&info, &pose);
+      apriltag_pose_t pose_1;
+      apriltag_pose_t pose_2;
+      double pose_error_1;
+      double pose_error_2;
+      estimate_tag_pose_orthogonal_iteration(&info, &pose_error_1, &pose_1,
+                                             &pose_error_2, &pose_2,
+                                             FLAGS_pose_estimation_iterations);
 
       const aos::monotonic_clock::time_point after_pose_estimation =
           aos::monotonic_clock::now();
@@ -260,7 +280,8 @@
                                           before_pose_estimation)
                      .count()
               << " seconds for pose estimation";
-      VLOG(1) << "Pose err: " << pose_error;
+      VLOG(1) << "Pose err 1: " << pose_error_1;
+      VLOG(1) << "Pose err 2: " << pose_error_2;
 
       // Send undistorted corner points in pink
       std::vector<cv::Point2f> corner_points = MakeCornerVector(det);
@@ -271,14 +292,67 @@
       double distortion_factor =
           ComputeDistortionFactor(orig_corner_points, corner_points);
 
+      // We get two estimates for poses.
+      // Choose the one with the lower estimation error
+      bool use_pose_1 = (pose_error_1 < pose_error_2);
+      auto best_pose = (use_pose_1 ? pose_1 : pose_2);
+      auto secondary_pose = (use_pose_1 ? pose_2 : pose_1);
+      double best_pose_error = (use_pose_1 ? pose_error_1 : pose_error_2);
+      double secondary_pose_error = (use_pose_1 ? pose_error_2 : pose_error_1);
+
+      CHECK_NE(best_pose_error, std::numeric_limits<double>::infinity())
+          << "Got no valid pose estimations, this should not be possible.";
+      double pose_error_ratio = best_pose_error / secondary_pose_error;
+
+      // Destroy the secondary pose if we got one
+      if (secondary_pose_error != std::numeric_limits<double>::infinity()) {
+        DestroyPose(&secondary_pose);
+      }
+
       results.emplace_back(Detection{.det = *det,
-                                     .pose = pose,
-                                     .pose_error = pose_error,
-                                     .distortion_factor = distortion_factor});
+                                     .pose = best_pose,
+                                     .pose_error = best_pose_error,
+                                     .distortion_factor = distortion_factor,
+                                     .pose_error_ratio = pose_error_ratio});
+
+      if (FLAGS_visualize) {
+        // Draw raw (distorted) corner points in green
+        cv::line(color_image, orig_corner_points[0], orig_corner_points[1],
+                 cv::Scalar(0, 255, 0), 2);
+        cv::line(color_image, orig_corner_points[1], orig_corner_points[2],
+                 cv::Scalar(0, 255, 0), 2);
+        cv::line(color_image, orig_corner_points[2], orig_corner_points[3],
+                 cv::Scalar(0, 255, 0), 2);
+        cv::line(color_image, orig_corner_points[3], orig_corner_points[0],
+                 cv::Scalar(0, 255, 0), 2);
+
+        // Draw undistorted corner points in red
+        cv::line(color_image, corner_points[0], corner_points[1],
+                 cv::Scalar(0, 0, 255), 2);
+        cv::line(color_image, corner_points[2], corner_points[1],
+                 cv::Scalar(0, 0, 255), 2);
+        cv::line(color_image, corner_points[2], corner_points[3],
+                 cv::Scalar(0, 0, 255), 2);
+        cv::line(color_image, corner_points[0], corner_points[3],
+                 cv::Scalar(0, 0, 255), 2);
+      }
+
+      VLOG(1) << "Found tag number " << det->id << " hamming: " << det->hamming
+              << " margin: " << det->decision_margin;
+
     } else {
       rejections_++;
     }
   }
+  if (FLAGS_visualize) {
+    // Display the result
+    // Rotate by 180 degrees to make it upright
+    if (flip_image_) {
+      cv::rotate(color_image, color_image, 1);
+    }
+    cv::imshow(absl::StrCat("AprilRoboticsDetector Image ", node_name_),
+               color_image);
+  }
 
   const auto corners_offset = builder.fbb()->CreateVector(foxglove_corners);
   foxglove::ImageAnnotations::Builder annotation_builder(*builder.fbb());
diff --git a/y2023/vision/aprilrobotics.h b/y2023/vision/aprilrobotics.h
index fab2d30..01e3138 100644
--- a/y2023/vision/aprilrobotics.h
+++ b/y2023/vision/aprilrobotics.h
@@ -11,6 +11,7 @@
 #include "frc971/vision/target_map_generated.h"
 #include "frc971/vision/target_mapper.h"
 #include "frc971/vision/vision_generated.h"
+#include "frc971/vision/visualize_robot.h"
 #include "opencv2/core/eigen.hpp"
 #include "opencv2/imgproc.hpp"
 #include "third_party/apriltag/apriltag.h"
@@ -29,6 +30,7 @@
     apriltag_pose_t pose;
     double pose_error;
     double distortion_factor;
+    double pose_error_ratio;
   };
 
   struct DetectionResult {
@@ -37,11 +39,15 @@
   };
 
   AprilRoboticsDetector(aos::EventLoop *event_loop,
-                        std::string_view channel_name);
+                        std::string_view channel_name, bool flip_image = true);
   ~AprilRoboticsDetector();
 
   void SetWorkerpoolAffinities();
 
+  // Deletes the heap-allocated rotation and translation pointers in the given
+  // pose
+  void DestroyPose(apriltag_pose_t *pose) const;
+
   // Undistorts the april tag corners using the camera calibration
   void UndistortDetection(apriltag_detection_t *det) const;
 
@@ -77,14 +83,16 @@
   std::optional<cv::Mat> extrinsics_;
   cv::Mat dist_coeffs_;
   cv::Size image_size_;
+  bool flip_image_;
+  std::string_view node_name_;
 
   aos::Ftrace ftrace_;
 
   frc971::vision::ImageCallback image_callback_;
   aos::Sender<frc971::vision::TargetMap> target_map_sender_;
   aos::Sender<foxglove::ImageAnnotations> image_annotations_sender_;
-
   size_t rejections_;
+  frc971::vision::VisualizeRobot vis_robot_;
 };
 
 }  // namespace vision
diff --git a/y2023/vision/calib_files/calibration_pi-971-2_cam-23-06_ext_2023-03-25.json b/y2023/vision/calib_files/calibration_pi-971-2_cam-23-06_ext_2023-03-25.json
new file mode 100644
index 0000000..d8b5227
--- /dev/null
+++ b/y2023/vision/calib_files/calibration_pi-971-2_cam-23-06_ext_2023-03-25.json
@@ -0,0 +1 @@
+{ "node_name": "pi2", "team_number": 971, "intrinsics": [ 893.242981, 0.0, 639.796692, 0.0, 892.498718, 354.109344, 0.0, 0.0, 1.0 ], "fixed_extrinsics": { "data": [ 0.858536,    0.224804,    0.460847,    0.198133, 0.473832, -0.00434901,   -0.880604,   -0.221657, -0.195959,    0.974394,   -0.110253,    0.593406, 0.0, 0.0, 0.0, 1.0 ] }, "dist_coeffs": [ -0.451751, 0.252422, 0.000531, 0.000079, -0.079369 ], "calibration_timestamp": 1358501526409252911, "camera_id": "23-06" }
diff --git a/y2023/vision/target_mapping.cc b/y2023/vision/target_mapping.cc
index 9352bdd..e9ef662 100644
--- a/y2023/vision/target_mapping.cc
+++ b/y2023/vision/target_mapping.cc
@@ -7,6 +7,7 @@
 #include "frc971/vision/calibration_generated.h"
 #include "frc971/vision/charuco_lib.h"
 #include "frc971/vision/target_mapper.h"
+#include "frc971/vision/visualize_robot.h"
 #include "opencv2/aruco.hpp"
 #include "opencv2/calib3d.hpp"
 #include "opencv2/core/eigen.hpp"
@@ -28,12 +29,21 @@
               "Directory to write solved target map to");
 DEFINE_string(field_name, "charged_up",
               "Field name, for the output json filename and flatbuffer field");
-DEFINE_int32(team_number, 7971,
-             "Use the calibration for a node with this team number");
+DEFINE_int32(team_number, 0,
+             "Required: Use the calibration for a node with this team number");
 DEFINE_string(mcap_output_path, "/tmp/log.mcap", "Log to output.");
 DEFINE_string(pi, "pi1", "Pi name to generate mcap log for; defaults to pi1.");
 DEFINE_double(max_pose_error, 1e-6,
               "Throw out target poses with a higher pose error than this");
+DEFINE_double(
+    max_pose_error_ratio, 0.4,
+    "Throw out target poses with a higher pose error ratio than this");
+DEFINE_uint64(wait_key, 0,
+              "Time in ms to wait between images, if no click (0 to wait "
+              "indefinitely until click).");
+DEFINE_uint64(skip_to, 1,
+              "Start at combined image of this number (1 is the first image)");
+DEFINE_bool(solve, true, "Whether to solve for the field's target map.");
 
 namespace y2023 {
 namespace vision {
@@ -42,6 +52,7 @@
 using frc971::vision::PoseUtils;
 using frc971::vision::TargetMap;
 using frc971::vision::TargetMapper;
+using frc971::vision::VisualizeRobot;
 namespace calibration = frc971::vision::calibration;
 
 // Change reference frame from camera to robot
@@ -52,16 +63,41 @@
   return H_robot_target;
 }
 
+const int kImageWidth = 1000;
+// Map from pi node name to color for drawing
+const std::map<std::string, cv::Scalar> kPiColors = {
+    {"pi1", cv::Scalar(255, 0, 255)},
+    {"pi2", cv::Scalar(255, 255, 0)},
+    {"pi3", cv::Scalar(0, 255, 255)},
+    {"pi4", cv::Scalar(255, 165, 0)},
+};
+
 // Add detected apriltag poses relative to the robot to
 // timestamped_target_detections
 void HandleAprilTags(const TargetMap &map,
                      aos::distributed_clock::time_point pi_distributed_time,
                      std::vector<DataAdapter::TimestampedDetection>
                          *timestamped_target_detections,
-                     Eigen::Affine3d extrinsics) {
+                     Eigen::Affine3d extrinsics, std::string node_name,
+                     frc971::vision::TargetMapper target_loc_mapper,
+                     std::set<std::string> *drawn_nodes,
+                     VisualizeRobot *vis_robot, size_t *display_count) {
+  bool drew = false;
+  std::stringstream label;
+  label << node_name << " - ";
+
   for (const auto *target_pose_fbs : *map.target_poses()) {
     // Skip detections with high pose errors
     if (target_pose_fbs->pose_error() > FLAGS_max_pose_error) {
+      VLOG(1) << " Skipping tag " << target_pose_fbs->id()
+              << " due to pose error of " << target_pose_fbs->pose_error();
+      continue;
+    }
+    // Skip detections with high pose error ratios
+    if (target_pose_fbs->pose_error_ratio() > FLAGS_max_pose_error_ratio) {
+      VLOG(1) << " Skipping tag " << target_pose_fbs->id()
+              << " due to pose error ratio of "
+              << target_pose_fbs->pose_error_ratio();
       continue;
     }
 
@@ -88,6 +124,59 @@
             .distance_from_camera = distance_from_camera,
             .distortion_factor = distortion_factor,
             .id = static_cast<TargetMapper::TargetId>(target_pose.id)});
+
+    if (FLAGS_visualize) {
+      // If we've already drawn in the current image,
+      // display it before clearing and adding the new poses
+      if (drawn_nodes->count(node_name) != 0) {
+        (*display_count)++;
+        cv::putText(vis_robot->image_,
+                    "Poses #" + std::to_string(*display_count),
+                    cv::Point(600, 10), cv::FONT_HERSHEY_PLAIN, 1.0,
+                    cv::Scalar(255, 255, 255));
+
+        if (*display_count >= FLAGS_skip_to) {
+          cv::imshow("View", vis_robot->image_);
+          cv::waitKey(FLAGS_wait_key);
+        } else {
+          VLOG(1) << "At poses #" << std::to_string(*display_count);
+        }
+        vis_robot->ClearImage();
+        drawn_nodes->clear();
+      }
+
+      Eigen::Affine3d H_world_target = PoseUtils::Pose3dToAffine3d(
+          target_loc_mapper.GetTargetPoseById(target_pose_fbs->id())->pose);
+      Eigen::Affine3d H_world_robot = H_world_target * H_robot_target.inverse();
+      VLOG(2) << node_name << ", " << target_pose_fbs->id()
+              << ", t = " << pi_distributed_time
+              << ", pose_error = " << target_pose_fbs->pose_error()
+              << ", pose_error_ratio = " << target_pose_fbs->pose_error_ratio()
+              << ", robot_pos (x,y,z) + "
+              << H_world_robot.translation().transpose();
+
+      label << "id " << target_pose_fbs->id() << ": err "
+            << (target_pose_fbs->pose_error() / FLAGS_max_pose_error)
+            << " ratio " << target_pose_fbs->pose_error_ratio() << " ";
+
+      vis_robot->DrawRobotOutline(H_world_robot,
+                                  std::to_string(target_pose_fbs->id()),
+                                  kPiColors.at(node_name));
+
+      vis_robot->DrawFrameAxes(H_world_target,
+                               std::to_string(target_pose_fbs->id()),
+                               kPiColors.at(node_name));
+      drew = true;
+    }
+  }
+  if (drew) {
+    size_t pi_number =
+        static_cast<size_t>(node_name[node_name.size() - 1] - '0');
+    cv::putText(vis_robot->image_, label.str(),
+                cv::Point(10, 10 + 20 * pi_number), cv::FONT_HERSHEY_PLAIN, 1.0,
+                kPiColors.at(node_name));
+
+    drawn_nodes->emplace(node_name);
   }
 }
 
@@ -97,10 +186,15 @@
     aos::logger::LogReader *reader,
     std::vector<DataAdapter::TimestampedDetection>
         *timestamped_target_detections,
-    std::vector<std::unique_ptr<AprilRoboticsDetector>> *detectors) {
+    std::vector<std::unique_ptr<AprilRoboticsDetector>> *detectors,
+    std::set<std::string> *drawn_nodes, VisualizeRobot *vis_robot,
+    size_t *display_count) {
   static constexpr std::string_view kImageChannel = "/camera";
+  // For the robots, we need to flip the image since the cameras are rolled by
+  // 180 degrees
+  bool flip_image = (FLAGS_team_number != 7971);
   auto detector_ptr = std::make_unique<AprilRoboticsDetector>(
-      detection_event_loop, kImageChannel);
+      detection_event_loop, kImageChannel, flip_image);
   // Get the camera extrinsics
   cv::Mat extrinsics_cv = detector_ptr->extrinsics().value();
   Eigen::Matrix4d extrinsics_matrix;
@@ -109,6 +203,10 @@
 
   detectors->emplace_back(std::move(detector_ptr));
 
+  ceres::examples::VectorOfConstraints target_constraints;
+  frc971::vision::TargetMapper target_loc_mapper(FLAGS_json_path,
+                                                 target_constraints);
+
   mapping_event_loop->MakeWatcher("/camera", [=](const TargetMap &map) {
     aos::distributed_clock::time_point pi_distributed_time =
         reader->event_loop_factory()
@@ -116,8 +214,10 @@
             ->ToDistributedClock(aos::monotonic_clock::time_point(
                 aos::monotonic_clock::duration(map.monotonic_timestamp_ns())));
 
+    std::string node_name = detection_event_loop->node()->name()->str();
     HandleAprilTags(map, pi_distributed_time, timestamped_target_detections,
-                    extrinsics);
+                    extrinsics, node_name, target_loc_mapper, drawn_nodes,
+                    vis_robot, display_count);
   });
 }
 
@@ -136,8 +236,9 @@
   aos::logger::LogReader reader(
       aos::logger::SortParts(unsorted_logfiles),
       config.has_value() ? &config->message() : nullptr);
-  // Send new april tag poses. This allows us to take a log of images, then play
-  // with the april detection code and see those changes take effect in mapping
+  // Send new april tag poses. This allows us to take a log of images, then
+  // play with the april detection code and see those changes take effect in
+  // mapping
   constexpr size_t kNumPis = 4;
   for (size_t i = 1; i <= kNumPis; i++) {
     reader.RemapLoggedChannel(absl::StrFormat("/pi%u/camera", i),
@@ -156,6 +257,9 @@
                           FLAGS_constants_path);
 
   std::vector<std::unique_ptr<AprilRoboticsDetector>> detectors;
+  VisualizeRobot vis_robot;
+  std::set<std::string> drawn_nodes;
+  size_t display_count = 0;
 
   const aos::Node *pi1 =
       aos::configuration::GetNode(reader.configuration(), "pi1");
@@ -164,7 +268,8 @@
   std::unique_ptr<aos::EventLoop> pi1_mapping_event_loop =
       reader.event_loop_factory()->MakeEventLoop("pi1_mapping", pi1);
   HandlePiCaptures(pi1_detection_event_loop.get(), pi1_mapping_event_loop.get(),
-                   &reader, &timestamped_target_detections, &detectors);
+                   &reader, &timestamped_target_detections, &detectors,
+                   &drawn_nodes, &vis_robot, &display_count);
 
   const aos::Node *pi2 =
       aos::configuration::GetNode(reader.configuration(), "pi2");
@@ -173,7 +278,8 @@
   std::unique_ptr<aos::EventLoop> pi2_mapping_event_loop =
       reader.event_loop_factory()->MakeEventLoop("pi2_mapping", pi2);
   HandlePiCaptures(pi2_detection_event_loop.get(), pi2_mapping_event_loop.get(),
-                   &reader, &timestamped_target_detections, &detectors);
+                   &reader, &timestamped_target_detections, &detectors,
+                   &drawn_nodes, &vis_robot, &display_count);
 
   const aos::Node *pi3 =
       aos::configuration::GetNode(reader.configuration(), "pi3");
@@ -182,7 +288,8 @@
   std::unique_ptr<aos::EventLoop> pi3_mapping_event_loop =
       reader.event_loop_factory()->MakeEventLoop("pi3_mapping", pi3);
   HandlePiCaptures(pi3_detection_event_loop.get(), pi3_mapping_event_loop.get(),
-                   &reader, &timestamped_target_detections, &detectors);
+                   &reader, &timestamped_target_detections, &detectors,
+                   &drawn_nodes, &vis_robot, &display_count);
 
   const aos::Node *pi4 =
       aos::configuration::GetNode(reader.configuration(), "pi4");
@@ -191,13 +298,13 @@
   std::unique_ptr<aos::EventLoop> pi4_mapping_event_loop =
       reader.event_loop_factory()->MakeEventLoop("pi4_mapping", pi4);
   HandlePiCaptures(pi4_detection_event_loop.get(), pi4_mapping_event_loop.get(),
-                   &reader, &timestamped_target_detections, &detectors);
+                   &reader, &timestamped_target_detections, &detectors,
+                   &drawn_nodes, &vis_robot, &display_count);
 
   std::unique_ptr<aos::EventLoop> mcap_event_loop;
   std::unique_ptr<aos::McapLogger> relogger;
   if (!FLAGS_mcap_output_path.empty()) {
     LOG(INFO) << "Writing out mcap file to " << FLAGS_mcap_output_path;
-    // TODO: Should make this work for any pi
     const aos::Node *node =
         aos::configuration::GetNode(reader.configuration(), FLAGS_pi);
     reader.event_loop_factory()->GetNodeEventLoopFactory(node)->OnStartup(
@@ -212,13 +319,21 @@
         });
   }
 
+  if (FLAGS_visualize) {
+    vis_robot.ClearImage();
+    const double kFocalLength = 500.0;
+    vis_robot.SetDefaultViewpoint(kImageWidth, kFocalLength);
+  }
+
   reader.event_loop_factory()->Run();
 
-  auto target_constraints =
-      DataAdapter::MatchTargetDetections(timestamped_target_detections);
+  if (FLAGS_solve) {
+    auto target_constraints =
+        DataAdapter::MatchTargetDetections(timestamped_target_detections);
 
-  frc971::vision::TargetMapper mapper(FLAGS_json_path, target_constraints);
-  mapper.Solve(FLAGS_field_name, FLAGS_output_dir);
+    frc971::vision::TargetMapper mapper(FLAGS_json_path, target_constraints);
+    mapper.Solve(FLAGS_field_name, FLAGS_output_dir);
+  }
 
   // Clean up all the pointers
   for (auto &detector_ptr : detectors) {
diff --git a/y2023/wpilib_interface.cc b/y2023/wpilib_interface.cc
index cd52cfd..2f41c91 100644
--- a/y2023/wpilib_interface.cc
+++ b/y2023/wpilib_interface.cc
@@ -114,16 +114,12 @@
 }  // namespace
 
 static constexpr int kCANFalconCount = 6;
-static constexpr int kCANSignalsCount = 4;
-static constexpr int kRollerSignalsCount = 4;
 static constexpr units::frequency::hertz_t kCANUpdateFreqHz = 200_Hz;
 
 class Falcon {
  public:
   Falcon(int device_id, std::string canbus,
-         aos::SizedArray<ctre::phoenixpro::BaseStatusSignalValue *,
-                         kCANFalconCount * kCANSignalsCount +
-                             kRollerSignalsCount> *signals)
+         std::vector<ctre::phoenixpro::BaseStatusSignalValue *> *signals)
       : talon_(device_id, canbus),
         device_id_(device_id),
         device_temp_(talon_.GetDeviceTemp()),
@@ -134,9 +130,7 @@
     // device temp is not timesynced so don't add it to the list of signals
     device_temp_.SetUpdateFrequency(kCANUpdateFreqHz);
 
-    CHECK_EQ(kCANSignalsCount, 4);
     CHECK_NOTNULL(signals);
-    CHECK_LE(signals->size() + 4u, signals->capacity());
 
     supply_voltage_.SetUpdateFrequency(kCANUpdateFreqHz);
     signals->push_back(&supply_voltage_);
@@ -279,8 +273,11 @@
 
 class CANSensorReader {
  public:
-  CANSensorReader(aos::EventLoop *event_loop)
+  CANSensorReader(
+      aos::EventLoop *event_loop,
+      std::vector<ctre::phoenixpro::BaseStatusSignalValue *> signals_registry)
       : event_loop_(event_loop),
+        signals_(signals_registry.begin(), signals_registry.end()),
         can_position_sender_(
             event_loop->MakeSender<drivetrain::CANPosition>("/drivetrain")),
         roller_falcon_data_(std::nullopt) {
@@ -294,12 +291,6 @@
     });
   }
 
-  aos::SizedArray<ctre::phoenixpro::BaseStatusSignalValue *,
-                  kCANFalconCount * kCANSignalsCount + kRollerSignalsCount>
-      *get_signals_registry() {
-    return &signals_;
-  };
-
   void set_falcons(std::shared_ptr<Falcon> right_front,
                    std::shared_ptr<Falcon> right_back,
                    std::shared_ptr<Falcon> right_under,
@@ -323,16 +314,8 @@
 
  private:
   void Loop() {
-    CHECK_EQ(signals_.size(), 28u);
     ctre::phoenix::StatusCode status =
-        ctre::phoenixpro::BaseStatusSignalValue::WaitForAll(
-            2000_ms, {signals_[0],  signals_[1],  signals_[2],  signals_[3],
-                      signals_[4],  signals_[5],  signals_[6],  signals_[7],
-                      signals_[8],  signals_[9],  signals_[10], signals_[11],
-                      signals_[12], signals_[13], signals_[14], signals_[15],
-                      signals_[16], signals_[17], signals_[18], signals_[19],
-                      signals_[20], signals_[21], signals_[22], signals_[23],
-                      signals_[24], signals_[25], signals_[26], signals_[27]});
+        ctre::phoenixpro::BaseStatusSignalValue::WaitForAll(2000_ms, signals_);
 
     if (!status.IsOK()) {
       AOS_LOG(ERROR, "Failed to read signals from falcons: %s: %s",
@@ -383,9 +366,7 @@
 
   aos::EventLoop *event_loop_;
 
-  aos::SizedArray<ctre::phoenixpro::BaseStatusSignalValue *,
-                  kCANFalconCount * kCANSignalsCount + kRollerSignalsCount>
-      signals_;
+  const std::vector<ctre::phoenixpro::BaseStatusSignalValue *> signals_;
   aos::Sender<drivetrain::CANPosition> can_position_sender_;
 
   std::shared_ptr<Falcon> right_front_, right_back_, right_under_, left_front_,
@@ -934,25 +915,27 @@
     std::shared_ptr<frc::DigitalOutput> superstructure_reading =
         make_unique<frc::DigitalOutput>(25);
 
+    std::vector<ctre::phoenixpro::BaseStatusSignalValue *> signals_registry;
+    std::shared_ptr<Falcon> right_front =
+        std::make_shared<Falcon>(1, "Drivetrain Bus", &signals_registry);
+    std::shared_ptr<Falcon> right_back =
+        std::make_shared<Falcon>(2, "Drivetrain Bus", &signals_registry);
+    std::shared_ptr<Falcon> right_under =
+        std::make_shared<Falcon>(3, "Drivetrain Bus", &signals_registry);
+    std::shared_ptr<Falcon> left_front =
+        std::make_shared<Falcon>(4, "Drivetrain Bus", &signals_registry);
+    std::shared_ptr<Falcon> left_back =
+        std::make_shared<Falcon>(5, "Drivetrain Bus", &signals_registry);
+    std::shared_ptr<Falcon> left_under =
+        std::make_shared<Falcon>(6, "Drivetrain Bus", &signals_registry);
+    std::shared_ptr<Falcon> roller =
+        std::make_shared<Falcon>(13, "Drivetrain Bus", &signals_registry);
+
     // Thread 3.
     ::aos::ShmEventLoop can_sensor_reader_event_loop(&config.message());
     can_sensor_reader_event_loop.set_name("CANSensorReader");
-    CANSensorReader can_sensor_reader(&can_sensor_reader_event_loop);
-
-    std::shared_ptr<Falcon> right_front = std::make_shared<Falcon>(
-        1, "Drivetrain Bus", can_sensor_reader.get_signals_registry());
-    std::shared_ptr<Falcon> right_back = std::make_shared<Falcon>(
-        2, "Drivetrain Bus", can_sensor_reader.get_signals_registry());
-    std::shared_ptr<Falcon> right_under = std::make_shared<Falcon>(
-        3, "Drivetrain Bus", can_sensor_reader.get_signals_registry());
-    std::shared_ptr<Falcon> left_front = std::make_shared<Falcon>(
-        4, "Drivetrain Bus", can_sensor_reader.get_signals_registry());
-    std::shared_ptr<Falcon> left_back = std::make_shared<Falcon>(
-        5, "Drivetrain Bus", can_sensor_reader.get_signals_registry());
-    std::shared_ptr<Falcon> left_under = std::make_shared<Falcon>(
-        6, "Drivetrain Bus", can_sensor_reader.get_signals_registry());
-    std::shared_ptr<Falcon> roller = std::make_shared<Falcon>(
-        13, "Drivetrain Bus", can_sensor_reader.get_signals_registry());
+    CANSensorReader can_sensor_reader(&can_sensor_reader_event_loop,
+                                      std::move(signals_registry));
 
     can_sensor_reader.set_falcons(right_front, right_back, right_under,
                                   left_front, left_back, left_under, roller);
diff --git a/y2023/y2023_imu.json b/y2023/y2023_imu.json
index 416552b..535d97f 100644
--- a/y2023/y2023_imu.json
+++ b/y2023/y2023_imu.json
@@ -452,8 +452,6 @@
   "applications": [
     {
       "name": "message_bridge_client",
-      "executable_name": "message_bridge_client",
-      "user": "pi",
       "nodes": [
         "imu"
       ]
diff --git a/y2023/y2023_logger.json b/y2023/y2023_logger.json
index 55d201d..9b0ba55 100644
--- a/y2023/y2023_logger.json
+++ b/y2023/y2023_logger.json
@@ -417,8 +417,6 @@
   "applications": [
     {
       "name": "message_bridge_client",
-      "executable_name": "message_bridge_client",
-      "user": "pi",
       "nodes": [
         "logger"
       ]
diff --git a/y2023/y2023_pi_template.json b/y2023/y2023_pi_template.json
index 8d9fdaa..31f4045 100644
--- a/y2023/y2023_pi_template.json
+++ b/y2023/y2023_pi_template.json
@@ -350,7 +350,11 @@
   "applications": [
     {
       "name": "message_bridge_client",
-      "args": ["--rt_priority=16"],
+      "executable_name": "message_bridge_client",
+      "args": [
+        "--rt_priority=16",
+        "--sinit_max_init_timeout=5000"
+      ],
       "user": "pi",
       "nodes": [
         "pi{{ NUM }}"
diff --git a/y2023/y2023_roborio.json b/y2023/y2023_roborio.json
index 3114ac9..452a570 100644
--- a/y2023/y2023_roborio.json
+++ b/y2023/y2023_roborio.json
@@ -268,6 +268,14 @@
       "max_size": 448
     },
     {
+      "name": "/can",
+      "type": "frc971.can_logger.CanFrame",
+      "source_node": "roborio",
+      "frequency": 6000,
+      "num_senders": 2,
+      "max_size": 200
+    },
+    {
       "name": "/drivetrain",
       "type": "y2023.control_loops.drivetrain.CANPosition",
       "source_node": "roborio",
@@ -530,7 +538,8 @@
       "name": "roborio_message_bridge_client",
       "executable_name": "message_bridge_client",
       "args": [
-        "--rt_priority=16"
+        "--rt_priority=16",
+        "--sinit_max_init_timeout=5000"
       ],
       "nodes": [
         "roborio"
@@ -565,6 +574,13 @@
       "nodes": [
         "roborio"
       ]
+    },
+    {
+      "name": "can_logger",
+      "executable_name": "can_logger",
+      "nodes": [
+        "roborio"
+      ]
     }
   ],
   "maps": [