Merge "Fix TS compile errors with images."
diff --git a/aos/logging/log_namer.cc b/aos/logging/log_namer.cc
index 83a39c8..f025054 100644
--- a/aos/logging/log_namer.cc
+++ b/aos/logging/log_namer.cc
@@ -144,21 +144,25 @@
 
   char *tmp;
   AllocateLogName(&tmp, folder, basename);
+
+  std::string log_base_name = tmp;
+  std::string log_roborio_name = log_base_name + "_roborio_data.bfbs";
+  free(tmp);
+
   char *tmp2;
-  if (asprintf(&tmp2, "%s/%s-current", folder, basename) == -1) {
+  if (asprintf(&tmp2, "%s/%s-current.bfbs", folder, basename) == -1) {
     PLOG(WARNING) << "couldn't create current symlink name";
   } else {
     if (unlink(tmp2) == -1 && (errno != EROFS && errno != ENOENT)) {
       LOG(WARNING) << "unlink('" << tmp2 << "') failed";
     }
-    if (symlink(tmp, tmp2) == -1) {
-      PLOG(WARNING) << "symlink('" << tmp << "', '" << tmp2 << "') failed";
+    if (symlink(log_roborio_name.c_str(), tmp2) == -1) {
+      PLOG(WARNING) << "symlink('" << log_roborio_name.c_str() << "', '" << tmp2
+                    << "') failed";
     }
     free(tmp2);
   }
-  std::string result = tmp;
-  free(tmp);
-  return result;
+  return log_base_name;
 }
 
 }  // namespace logging
diff --git a/aos/network/BUILD b/aos/network/BUILD
index 89853f0..8f3f394 100644
--- a/aos/network/BUILD
+++ b/aos/network/BUILD
@@ -70,6 +70,7 @@
     ],
     deps = [
         ":team_number",
+        "//aos:configuration",
         "//aos/testing:googletest",
     ],
 )
diff --git a/aos/network/team_number.cc b/aos/network/team_number.cc
index 14fadab..d2fbc8e 100644
--- a/aos/network/team_number.cc
+++ b/aos/network/team_number.cc
@@ -9,6 +9,8 @@
 
 #include "aos/util/string_to_num.h"
 
+DECLARE_string(override_hostname);
+
 namespace aos {
 namespace network {
 namespace team_number_internal {
@@ -101,10 +103,14 @@
 }  // namespace
 
 ::std::string GetHostname() {
-  char buf[256];
-  buf[sizeof(buf) - 1] = '\0';
-  PCHECK(gethostname(buf, sizeof(buf) - 1) == 0);
-  return buf;
+  if (FLAGS_override_hostname.empty()) {
+    char buf[256];
+    buf[sizeof(buf) - 1] = '\0';
+    PCHECK(gethostname(buf, sizeof(buf) - 1) == 0);
+    return buf;
+  } else {
+    return FLAGS_override_hostname;
+  }
 }
 
 uint16_t GetTeamNumber() {
diff --git a/frc971/analysis/plot_configs/localizer.pb b/frc971/analysis/plot_configs/localizer.pb
index f81ec89..644b516 100644
--- a/frc971/analysis/plot_configs/localizer.pb
+++ b/frc971/analysis/plot_configs/localizer.pb
@@ -23,6 +23,16 @@
   axes {
     line {
       y_signal {
+        channel: "DrivetrainStatus"
+        field: "trajectory_logging.y"
+      }
+      x_signal {
+        channel: "DrivetrainStatus"
+        field: "trajectory_logging.x"
+      }
+    }
+    line {
+      y_signal {
         channel: "DrivetrainTruthStatus"
         field: "y"
       }
@@ -31,7 +41,6 @@
         field: "x"
       }
     }
-    share_x_axis: false
     line {
       y_signal {
         channel: "DrivetrainStatus"
diff --git a/frc971/analysis/py_log_reader.cc b/frc971/analysis/py_log_reader.cc
index a9a1db1..08f3707 100644
--- a/frc971/analysis/py_log_reader.cc
+++ b/frc971/analysis/py_log_reader.cc
@@ -87,8 +87,14 @@
   tools->reader = std::make_unique<aos::logger::LogReader>(log_file_name);
   tools->reader->Register();
 
-  tools->event_loop =
-      tools->reader->event_loop_factory()->MakeEventLoop("data_fetcher");
+  if (aos::configuration::MultiNode(tools->reader->configuration())) {
+    tools->event_loop = tools->reader->event_loop_factory()->MakeEventLoop(
+        "data_fetcher",
+        aos::configuration::GetNode(tools->reader->configuration(), "roborio"));
+  } else {
+    tools->event_loop =
+        tools->reader->event_loop_factory()->MakeEventLoop("data_fetcher");
+  }
   tools->event_loop->SkipTimingReport();
   tools->event_loop->SkipAosLog();
 
diff --git a/frc971/control_loops/drivetrain/BUILD b/frc971/control_loops/drivetrain/BUILD
index 9fef8f9..30abc87 100644
--- a/frc971/control_loops/drivetrain/BUILD
+++ b/frc971/control_loops/drivetrain/BUILD
@@ -691,6 +691,7 @@
         "improved_down_estimator.h",
     ],
     deps = [
+        ":drivetrain_config",
         ":drivetrain_status_fbs",
         "//aos/events:event_loop",
         "//frc971/control_loops:control_loops_fbs",
@@ -707,6 +708,7 @@
         "improved_down_estimator_test.cc",
     ],
     deps = [
+        ":drivetrain_test_lib",
         "//aos/testing:googletest",
         "//aos/testing:random_seed",
         "//frc971/control_loops/drivetrain:improved_down_estimator",
diff --git a/frc971/control_loops/drivetrain/drivetrain.cc b/frc971/control_loops/drivetrain/drivetrain.cc
index 77c52c4..1c0ac4f 100644
--- a/frc971/control_loops/drivetrain/drivetrain.cc
+++ b/frc971/control_loops/drivetrain/drivetrain.cc
@@ -41,6 +41,7 @@
       gyro_reading_fetcher_(
           event_loop->MakeFetcher<::frc971::sensors::GyroReading>(
               "/drivetrain")),
+      down_estimator_(dt_config),
       localizer_(localizer),
       kf_(dt_config_.make_kf_drivetrain_loop()),
       dt_openloop_(dt_config_, &kf_),
@@ -177,6 +178,9 @@
       case IMUType::IMU_Y:
         last_accel_ = -imu_values_fetcher_->accelerometer_y();
         break;
+      case IMUType::IMU_Z:
+        last_accel_ = imu_values_fetcher_->accelerometer_z();
+        break;
     }
   }
 
diff --git a/frc971/control_loops/drivetrain/drivetrain_config.h b/frc971/control_loops/drivetrain/drivetrain_config.h
index 5a29008..a33ff22 100644
--- a/frc971/control_loops/drivetrain/drivetrain_config.h
+++ b/frc971/control_loops/drivetrain/drivetrain_config.h
@@ -37,6 +37,7 @@
   IMU_X = 0,          // Use the x-axis of the IMU.
   IMU_Y = 1,          // Use the y-axis of the IMU.
   IMU_FLIPPED_X = 2,  // Use the flipped x-axis of the IMU.
+  IMU_Z = 3,          // Use the z-axis of the IMU.
 };
 
 template <typename Scalar = double>
diff --git a/frc971/control_loops/drivetrain/drivetrain_test_lib.cc b/frc971/control_loops/drivetrain/drivetrain_test_lib.cc
index 336025a..6a90122 100644
--- a/frc971/control_loops/drivetrain/drivetrain_test_lib.cc
+++ b/frc971/control_loops/drivetrain/drivetrain_test_lib.cc
@@ -190,16 +190,23 @@
   auto builder = imu_sender_.MakeBuilder();
   frc971::IMUValues::Builder imu_builder =
       builder.MakeBuilder<frc971::IMUValues>();
-  imu_builder.add_gyro_x(0.0);
-  imu_builder.add_gyro_y(0.0);
-  imu_builder.add_gyro_z(
-      (drivetrain_plant_.X(3, 0) - drivetrain_plant_.X(1, 0)) /
-      (dt_config_.robot_radius * 2.0));
+  const Eigen::Vector3d gyro =
+      dt_config_.imu_transform.inverse() *
+      Eigen::Vector3d(0.0, 0.0,
+                      (drivetrain_plant_.X(3, 0) - drivetrain_plant_.X(1, 0)) /
+                          (dt_config_.robot_radius * 2.0));
+  imu_builder.add_gyro_x(gyro.x());
+  imu_builder.add_gyro_y(gyro.y());
+  imu_builder.add_gyro_z(gyro.z());
   // Acceleration due to gravity, in m/s/s.
   constexpr double kG = 9.807;
-  imu_builder.add_accelerometer_x(last_acceleration_.x() / kG);
-  imu_builder.add_accelerometer_y(last_acceleration_.y() / kG);
-  imu_builder.add_accelerometer_z(1.0);
+  const Eigen::Vector3d accel =
+      dt_config_.imu_transform.inverse() *
+      Eigen::Vector3d(last_acceleration_.x() / kG, last_acceleration_.y() / kG,
+                      1.0);
+  imu_builder.add_accelerometer_x(accel.x());
+  imu_builder.add_accelerometer_y(accel.y());
+  imu_builder.add_accelerometer_z(accel.z());
   imu_builder.add_monotonic_timestamp_ns(
       std::chrono::duration_cast<std::chrono::nanoseconds>(
           event_loop_->monotonic_now().time_since_epoch())
diff --git a/frc971/control_loops/drivetrain/improved_down_estimator.h b/frc971/control_loops/drivetrain/improved_down_estimator.h
index 27de867..c1c8e1b 100644
--- a/frc971/control_loops/drivetrain/improved_down_estimator.h
+++ b/frc971/control_loops/drivetrain/improved_down_estimator.h
@@ -6,6 +6,7 @@
 
 #include "aos/events/event_loop.h"
 #include "aos/time/time.h"
+#include "frc971/control_loops/drivetrain/drivetrain_config.h"
 #include "frc971/control_loops/drivetrain/drivetrain_status_generated.h"
 #include "frc971/control_loops/runge_kutta.h"
 #include "glog/logging.h"
@@ -276,6 +277,8 @@
 // accelerometer calibration).
 class DrivetrainUkf : public QuaternionUkf {
  public:
+  DrivetrainUkf(const DrivetrainConfig<double> &dt_config)
+      : QuaternionUkf(dt_config.imu_transform) {}
   // UKF for http://kodlab.seas.upenn.edu/uploads/Arun/UKFpaper.pdf
   // Reference in case the link is dead:
   // Kraft, Edgar. "A quaternion-based unscented Kalman filter for orientation
diff --git a/frc971/control_loops/drivetrain/improved_down_estimator_test.cc b/frc971/control_loops/drivetrain/improved_down_estimator_test.cc
index 86ef5e9..2edce5a 100644
--- a/frc971/control_loops/drivetrain/improved_down_estimator_test.cc
+++ b/frc971/control_loops/drivetrain/improved_down_estimator_test.cc
@@ -4,6 +4,7 @@
 #include <random>
 
 #include "aos/testing/random_seed.h"
+#include "frc971/control_loops/drivetrain/drivetrain_test_lib.h"
 #include "frc971/control_loops/drivetrain/improved_down_estimator.h"
 #include "glog/logging.h"
 #include "gtest/gtest.h"
@@ -91,7 +92,8 @@
 }
 
 TEST(DownEstimatorTest, UkfConstantRotation) {
-  drivetrain::DrivetrainUkf dtukf;
+  drivetrain::DrivetrainUkf dtukf(
+      drivetrain::testing::GetTestDrivetrainConfig());
   const Eigen::Vector3d ux = Eigen::Vector3d::UnitX();
   EXPECT_EQ(0.0,
             (Eigen::Vector3d(0.0, 0.0, 1.0) - dtukf.H(dtukf.X_hat().coeffs()))
@@ -112,7 +114,8 @@
 
 // Tests that the euler angles in the status message are correct.
 TEST(DownEstimatorTest, UkfEulerStatus) {
-  drivetrain::DrivetrainUkf dtukf;
+  drivetrain::DrivetrainUkf dtukf(
+      drivetrain::testing::GetTestDrivetrainConfig());
   const Eigen::Vector3d ux = Eigen::Vector3d::UnitX();
   const Eigen::Vector3d uy = Eigen::Vector3d::UnitY();
   const Eigen::Vector3d uz = Eigen::Vector3d::UnitZ();
@@ -167,7 +170,8 @@
 // that we are slightly rotated, that we eventually adjust our estimate to be
 // correct.
 TEST(DownEstimatorTest, UkfAccelCorrectsBias) {
-  drivetrain::DrivetrainUkf dtukf;
+  drivetrain::DrivetrainUkf dtukf(
+      drivetrain::testing::GetTestDrivetrainConfig());
   const Eigen::Vector3d ux = Eigen::Vector3d::UnitX();
   Eigen::Matrix<double, 3, 1> measurement;
   // Supply the accelerometer with a slightly off reading to ensure that we
@@ -192,7 +196,8 @@
 // that we are slightly rotated, that we eventually adjust our estimate to be
 // correct.
 TEST(DownEstimatorTest, UkfIgnoreBadAccel) {
-  drivetrain::DrivetrainUkf dtukf;
+  drivetrain::DrivetrainUkf dtukf(
+      drivetrain::testing::GetTestDrivetrainConfig());
   const Eigen::Vector3d uy = Eigen::Vector3d::UnitY();
   Eigen::Matrix<double, 3, 1> measurement;
   // Set up a scenario where, if we naively took the accelerometer readings, we
diff --git a/frc971/control_loops/hybrid_state_feedback_loop.h b/frc971/control_loops/hybrid_state_feedback_loop.h
index a6160e7..d769541 100644
--- a/frc971/control_loops/hybrid_state_feedback_loop.h
+++ b/frc971/control_loops/hybrid_state_feedback_loop.h
@@ -75,6 +75,9 @@
     ::std::swap(coefficients_, other.coefficients_);
     X_.swap(other.X_);
     Y_.swap(other.Y_);
+    A_.swap(other.A_);
+    B_.swap(other.B_);
+    DelayedU_.swap(other.DelayedU_);
   }
 
   virtual ~StateFeedbackHybridPlant() {}
@@ -167,8 +170,6 @@
     DelayedU_ = U;
   }
 
-  Eigen::Matrix<Scalar, number_of_inputs, 1> DelayedU_;
-
   Eigen::Matrix<Scalar, number_of_states, 1> Update(
       const Eigen::Matrix<Scalar, number_of_states, 1> X,
       const Eigen::Matrix<Scalar, number_of_inputs, 1> &U,
@@ -202,6 +203,8 @@
       number_of_states, number_of_inputs, number_of_outputs>>>
       coefficients_;
 
+  Eigen::Matrix<Scalar, number_of_inputs, 1> DelayedU_;
+
   int index_;
 
   DISALLOW_COPY_AND_ASSIGN(StateFeedbackHybridPlant);
@@ -243,9 +246,13 @@
           *observers)
       : coefficients_(::std::move(*observers)) {}
 
-  HybridKalman(HybridKalman &&other)
-      : X_hat_(other.X_hat_), index_(other.index_) {
+  HybridKalman(HybridKalman &&other) : index_(other.index_) {
     ::std::swap(coefficients_, other.coefficients_);
+
+    X_hat_.swap(other.X_hat_);
+    P_.swap(other.P_);
+    Q_.swap(other.Q_);
+    R_.swap(other.R_);
   }
 
   // Getters for Q
diff --git a/frc971/control_loops/python/path_edit.py b/frc971/control_loops/python/path_edit.py
index 0d1b263..d04089b 100755
--- a/frc971/control_loops/python/path_edit.py
+++ b/frc971/control_loops/python/path_edit.py
@@ -393,97 +393,7 @@
                             self.held_x = self.x
 
     def do_button_press(self, event):
-        print("button press activated")
         # Be consistent with the scaling in the drawing_area
         self.x = event.x * 2
         self.y = event.y * 2
         self.button_press_action()
-
-
-class GridWindow(Gtk.Window):
-    def method_connect(self, event, cb):
-        def handler(obj, *args):
-            cb(*args)
-
-        print("Method_connect ran")
-        self.connect(event, handler)
-
-    def mouse_move(self, event):
-        #Changes event.x and event.y to be relative to the center.
-        x = event.x - self.drawing_area.window_shape[0] / 2
-        y = self.drawing_area.window_shape[1] / 2 - event.y
-        scale = self.drawing_area.get_current_scale()
-        event.x = x / scale + self.drawing_area.center[0]
-        event.y = y / scale + self.drawing_area.center[1]
-        self.drawing_area.mouse_move(event)
-        self.queue_draw()
-
-    def button_press(self, event):
-        print("button press activated")
-        o_x = event.x
-        o_y = event.y
-        x = event.x - self.drawing_area.window_shape[0] / 2
-        y = self.drawing_area.window_shape[1] / 2 - event.y
-        scale = 2 * self.drawing_area.get_current_scale()
-        event.x = x / scale + self.drawing_area.center[0]
-        event.y = y / scale + self.drawing_area.center[1]
-        self.drawing_area.do_button_press(event)
-        event.x = o_x
-        event.y = o_y
-
-    def key_press(self, event):
-        print("key press activated")
-        self.drawing_area.do_key_press(event, self.file_name_box.get_text())
-        self.queue_draw()
-
-    def configure(self, event):
-        print("configure activated")
-        self.drawing_area.window_shape = (event.width, event.height)
-
-    def __init__(self):
-        Gtk.Window.__init__(self)
-
-        self.set_default_size(1.5 * SCREEN_SIZE, SCREEN_SIZE)
-
-        flowBox = Gtk.FlowBox()
-        flowBox.set_valign(Gtk.Align.START)
-        flowBox.set_selection_mode(Gtk.SelectionMode.NONE)
-
-        flowBox.set_valign(Gtk.Align.START)
-
-        self.add(flowBox)
-
-        container = Gtk.Fixed()
-        flowBox.add(container)
-
-        self.eventBox = Gtk.EventBox()
-        container.add(self.eventBox)
-
-        self.eventBox.set_events(Gdk.EventMask.BUTTON_PRESS_MASK
-                                 | Gdk.EventMask.BUTTON_RELEASE_MASK
-                                 | Gdk.EventMask.POINTER_MOTION_MASK
-                                 | Gdk.EventMask.SCROLL_MASK
-                                 | Gdk.EventMask.KEY_PRESS_MASK)
-
-        #add the graph box
-        self.drawing_area = GTK_Widget()
-        self.eventBox.add(self.drawing_area)
-
-        self.method_connect("key-release-event", self.key_press)
-        self.method_connect("button-release-event", self.button_press)
-        self.method_connect("configure-event", self.configure)
-        self.method_connect("motion_notify_event", self.mouse_move)
-
-        self.file_name_box = Gtk.Entry()
-        self.file_name_box.set_size_request(200, 40)
-
-        self.file_name_box.set_text("File")
-        self.file_name_box.set_editable(True)
-
-        container.put(self.file_name_box, 0, 0)
-
-        self.show_all()
-
-
-window = GridWindow()
-RunApp()
diff --git a/frc971/control_loops/python/spline_graph.py b/frc971/control_loops/python/spline_graph.py
index 2fd7964..94ee683 100755
--- a/frc971/control_loops/python/spline_graph.py
+++ b/frc971/control_loops/python/spline_graph.py
@@ -11,11 +11,10 @@
         def handler(obj, *args):
             cb(*args)
 
-        print("Method_connect ran")
         self.connect(event, handler)
 
     def mouse_move(self, event):
-        #Changes event.x and event.y to be relative to the center.
+        # Changes event.x and event.y to be relative to the center.
         x = event.x - self.drawing_area.window_shape[0] / 2
         y = self.drawing_area.window_shape[1] / 2 - event.y
         scale = self.drawing_area.get_current_scale()
@@ -25,25 +24,22 @@
         self.queue_draw()
 
     def button_press(self, event):
-        print("button press activated")
-        o_x = event.x
-        o_y = event.y
+        original_x = event.x
+        original_y = event.y
         x = event.x - self.drawing_area.window_shape[0] / 2
         y = self.drawing_area.window_shape[1] / 2 - event.y
         scale = 2 * self.drawing_area.get_current_scale()
         event.x = x / scale + self.drawing_area.center[0]
         event.y = y / scale + self.drawing_area.center[1]
         self.drawing_area.do_button_press(event)
-        event.x = o_x
-        event.y = o_y
+        event.x = original_x
+        event.y = original_y
 
     def key_press(self, event):
-        print("key press activated")
         self.drawing_area.do_key_press(event, self.file_name_box.get_text())
         self.queue_draw()
 
     def configure(self, event):
-        print("configure activated")
         self.drawing_area.window_shape = (event.width, event.height)
 
     # handle submitting a constraint
@@ -88,7 +84,7 @@
         self.file_name_box = Gtk.Entry()
         self.file_name_box.set_size_request(200, 40)
 
-        self.file_name_box.set_text("File")
+        self.file_name_box.set_text("output_file_name.json")
         self.file_name_box.set_editable(True)
 
         container.put(self.file_name_box, 0, 0)
diff --git a/frc971/control_loops/state_feedback_loop.h b/frc971/control_loops/state_feedback_loop.h
index 83a54f0..d63bca7 100644
--- a/frc971/control_loops/state_feedback_loop.h
+++ b/frc971/control_loops/state_feedback_loop.h
@@ -397,11 +397,11 @@
       : plant_(::std::move(other.plant_)),
         controller_(::std::move(other.controller_)),
         observer_(::std::move(other.observer_)) {
+    ff_U_.swap(other.ff_U_);
     R_.swap(other.R_);
     next_R_.swap(other.next_R_);
     U_.swap(other.U_);
     U_uncapped_.swap(other.U_uncapped_);
-    ff_U_.swap(other.ff_U_);
   }
 
   virtual ~StateFeedbackLoop() {}
diff --git a/y2020/control_loops/drivetrain/drivetrain_base.cc b/y2020/control_loops/drivetrain/drivetrain_base.cc
index 62679bc..b919902 100644
--- a/y2020/control_loops/drivetrain/drivetrain_base.cc
+++ b/y2020/control_loops/drivetrain/drivetrain_base.cc
@@ -25,8 +25,8 @@
   static DrivetrainConfig<double> kDrivetrainConfig{
       ::frc971::control_loops::drivetrain::ShifterType::SIMPLE_SHIFTER,
       ::frc971::control_loops::drivetrain::LoopType::CLOSED_LOOP,
-      ::frc971::control_loops::drivetrain::GyroType::IMU_Z_GYRO,
-      ::frc971::control_loops::drivetrain::IMUType::IMU_FLIPPED_X,
+      ::frc971::control_loops::drivetrain::GyroType::IMU_X_GYRO,
+      ::frc971::control_loops::drivetrain::IMUType::IMU_Z,
 
       drivetrain::MakeDrivetrainLoop,
       drivetrain::MakeVelocityDrivetrainLoop,
@@ -53,8 +53,8 @@
       1.2 /* quickturn_wheel_multiplier */,
       1.2 /* wheel_multiplier */,
       true /*pistol_grip_shift_enables_line_follow*/,
-      (Eigen::Matrix<double, 3, 3>() << 0.0, -1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
-       1.0)
+      (Eigen::Matrix<double, 3, 3>() << 0.0, 0.0, 1.0, 0.0, -1.0, 0.0, 1.0, 0.0,
+       0.0)
           .finished() /*imu_transform*/,
   };
 
diff --git a/y2020/control_loops/drivetrain/drivetrain_replay_test.cc b/y2020/control_loops/drivetrain/drivetrain_replay_test.cc
index 3e391c8..6033184 100644
--- a/y2020/control_loops/drivetrain/drivetrain_replay_test.cc
+++ b/y2020/control_loops/drivetrain/drivetrain_replay_test.cc
@@ -52,13 +52,19 @@
         reader_.event_loop_factory()->MakeEventLoop("drivetrain", roborio_);
     drivetrain_event_loop_->SkipTimingReport();
 
+    frc971::control_loops::drivetrain::DrivetrainConfig<double> config =
+        GetDrivetrainConfig();
+    // Make the modification required to the imu transform to work with the 2016
+    // logs...
+    config.imu_transform << 0.0, -1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0;
+    config.gyro_type = frc971::control_loops::drivetrain::GyroType::IMU_Z_GYRO;
+
     localizer_ =
         std::make_unique<frc971::control_loops::drivetrain::DeadReckonEkf>(
-            drivetrain_event_loop_.get(), GetDrivetrainConfig());
+            drivetrain_event_loop_.get(), config);
     drivetrain_ =
         std::make_unique<frc971::control_loops::drivetrain::DrivetrainLoop>(
-            GetDrivetrainConfig(), drivetrain_event_loop_.get(),
-            localizer_.get());
+            config, drivetrain_event_loop_.get(), localizer_.get());
 
     test_event_loop_ =
         reader_.event_loop_factory()->MakeEventLoop("drivetrain", roborio_);
@@ -88,7 +94,7 @@
   ASSERT_TRUE(status_fetcher_->has_x());
   ASSERT_TRUE(status_fetcher_->has_y());
   ASSERT_TRUE(status_fetcher_->has_theta());
-  EXPECT_LT(std::abs(status_fetcher_->x()), 0.1);
+  EXPECT_LT(std::abs(status_fetcher_->x()), 0.25);
   // Because the encoders should not be affecting the y or yaw axes, expect a
   // reasonably precise result (although, since this is a real worl dtest, the
   // robot probably did actually move be some non-zero amount).
diff --git a/y2020/control_loops/drivetrain/localizer_test.cc b/y2020/control_loops/drivetrain/localizer_test.cc
index c438af5..c47faa7 100644
--- a/y2020/control_loops/drivetrain/localizer_test.cc
+++ b/y2020/control_loops/drivetrain/localizer_test.cc
@@ -92,7 +92,6 @@
 namespace chrono = std::chrono;
 using frc971::control_loops::drivetrain::testing::DrivetrainSimulation;
 using frc971::control_loops::drivetrain::DrivetrainLoop;
-using frc971::control_loops::drivetrain::testing::GetTestDrivetrainConfig;
 using aos::monotonic_clock;
 
 class LocalizedDrivetrainTest : public aos::testing::ControlLoopTest {
@@ -418,6 +417,22 @@
   EXPECT_TRUE(VerifyEstimatorAccurate(5e-4));
 }
 
+// Tests that we can drive in a straight line and have things estimate
+// correctly.
+TEST_F(LocalizedDrivetrainTest, NoCameraUpdateStraightLine) {
+  SetEnabled(true);
+  set_enable_cameras(false);
+  EXPECT_TRUE(VerifyEstimatorAccurate(1e-7));
+
+  SendGoal(1.0, 1.0);
+
+  RunFor(chrono::seconds(1));
+  VerifyNearGoal();
+  // Due to accelerometer drift, the straight-line driving tends to be less
+  // accurate...
+  EXPECT_TRUE(VerifyEstimatorAccurate(0.1));
+}
+
 // Tests that camera updates with a perfect models results in no errors.
 TEST_F(LocalizedDrivetrainTest, PerfectCameraUpdate) {
   SetEnabled(true);
diff --git a/y2020/vision/BUILD b/y2020/vision/BUILD
index 60276c9..b643fcb 100644
--- a/y2020/vision/BUILD
+++ b/y2020/vision/BUILD
@@ -36,6 +36,9 @@
         "//tools:armhf-debian",
     ],
     visibility = ["//y2020:__subpackages__"],
+    data = [
+        "//y2020:config.json",
+    ],
     deps = [
         ":v4l2_reader",
         ":vision_fbs",
diff --git a/y2020/vision/camera_reader.cc b/y2020/vision/camera_reader.cc
index 23986bf..d45ec3f 100644
--- a/y2020/vision/camera_reader.cc
+++ b/y2020/vision/camera_reader.cc
@@ -14,6 +14,11 @@
 #include "y2020/vision/v4l2_reader.h"
 #include "y2020/vision/vision_generated.h"
 
+// config used to allow running camera_reader independently.  E.g.,
+// bazel run //y2020/vision:camera_reader -- --config y2020/config.json
+//   --override_hostname pi-7971-1  --ignore_timestamps true
+DEFINE_string(config, "config.json", "Path to the config file to use.");
+
 namespace frc971 {
 namespace vision {
 namespace {
@@ -70,9 +75,26 @@
                             const cv::Mat &descriptors,
                             const std::vector<std::vector<cv::DMatch>> &matches,
                             const std::vector<cv::Mat> &camera_target_list,
+                            const std::vector<cv::Mat> &field_camera_list,
                             aos::Sender<sift::ImageMatchResult> *result_sender,
                             bool send_details);
 
+  // Returns the 2D (image) location for the specified training feature.
+  cv::Point2f Training2dPoint(int training_image_index,
+                              int feature_index) const {
+    const float x = training_data_->images()
+                        ->Get(training_image_index)
+                        ->features()
+                        ->Get(feature_index)
+                        ->x();
+    const float y = training_data_->images()
+                        ->Get(training_image_index)
+                        ->features()
+                        ->Get(feature_index)
+                        ->y();
+    return cv::Point2f(x, y);
+  }
+
   // Returns the 3D location for the specified training feature.
   cv::Point3f Training3dPoint(int training_image_index,
                               int feature_index) const {
@@ -170,6 +192,7 @@
     const cv::Mat &descriptors,
     const std::vector<std::vector<cv::DMatch>> &matches,
     const std::vector<cv::Mat> &camera_target_list,
+    const std::vector<cv::Mat> &field_camera_list,
     aos::Sender<sift::ImageMatchResult> *result_sender, bool send_details) {
   auto builder = result_sender->MakeBuilder();
   const auto camera_calibration_offset =
@@ -187,6 +210,7 @@
 
   std::vector<flatbuffers::Offset<sift::CameraPose>> camera_poses;
 
+  CHECK_EQ(camera_target_list.size(), field_camera_list.size());
   for (size_t i = 0; i < camera_target_list.size(); ++i) {
     cv::Mat camera_target = camera_target_list[i];
     CHECK(camera_target.isContinuous());
@@ -194,12 +218,21 @@
         reinterpret_cast<float *>(camera_target.data), camera_target.total());
     const flatbuffers::Offset<sift::TransformationMatrix> transform_offset =
         sift::CreateTransformationMatrix(*builder.fbb(), data_offset);
+
+    cv::Mat field_camera = field_camera_list[i];
+    CHECK(field_camera.isContinuous());
+    const auto fc_data_offset = builder.fbb()->CreateVector<float>(
+        reinterpret_cast<float *>(field_camera.data), field_camera.total());
+    const flatbuffers::Offset<sift::TransformationMatrix> fc_transform_offset =
+        sift::CreateTransformationMatrix(*builder.fbb(), fc_data_offset);
+
     const flatbuffers::Offset<sift::TransformationMatrix>
         field_to_target_offset =
             aos::CopyFlatBuffer(FieldToTarget(i), builder.fbb());
 
     sift::CameraPose::Builder pose_builder(*builder.fbb());
     pose_builder.add_camera_to_target(transform_offset);
+    pose_builder.add_field_to_camera(fc_transform_offset);
     pose_builder.add_field_to_target(field_to_target_offset);
     camera_poses.emplace_back(pose_builder.Finish());
   }
@@ -246,6 +279,8 @@
     std::vector<const std::vector<cv::DMatch> *> matches;
     std::vector<cv::Point3f> training_points_3d;
     std::vector<cv::Point2f> query_points;
+    std::vector<cv::Point2f> training_points;
+    cv::Mat homography;
   };
   std::vector<PerImageMatches> per_image_matches(number_training_images());
 
@@ -268,6 +303,8 @@
     CHECK_LT(training_image, static_cast<int>(per_image_matches.size()));
     PerImageMatches *const per_image = &per_image_matches[training_image];
     per_image->matches.push_back(&match);
+    per_image->training_points.push_back(
+        Training2dPoint(training_image, match[0].trainIdx));
     per_image->training_points_3d.push_back(
         Training3dPoint(training_image, match[0].trainIdx));
 
@@ -278,39 +315,139 @@
   // The minimum number of matches in a training image for us to use it.
   static constexpr int kMinimumMatchCount = 10;
   std::vector<cv::Mat> camera_target_list;
+  std::vector<cv::Mat> field_camera_list;
 
+  std::vector<PerImageMatches> per_image_good_matches;
+  std::vector<std::vector<cv::DMatch>> all_good_matches;
+  // Iterate through matches for each training image
   for (size_t i = 0; i < per_image_matches.size(); ++i) {
     const PerImageMatches &per_image = per_image_matches[i];
+
+    VLOG(2) << "Number of matches to start: " << per_image_matches.size()
+            << "\n";
+    // If we don't have enough matches to start, skip this set of matches
     if (per_image.matches.size() < kMinimumMatchCount) {
       continue;
     }
 
-    cv::Mat R_camera_target_vec, R_camera_target, T_camera_target;
-    // Compute the pose of the camera (global origin relative to camera)
-    cv::solvePnPRansac(per_image.training_points_3d, per_image.query_points,
-                       CameraIntrinsics(), CameraDistCoeffs(),
-                       R_camera_target_vec, T_camera_target);
-    T_camera_target = T_camera_target.t();
-    // Convert Camera from angle-axis (3x1) to homogenous (3x3) representation
-    cv::Rodrigues(R_camera_target_vec, R_camera_target);
+    // Use homography to determine which matches make sense physically
+    cv::Mat mask;
+    cv::Mat homography =
+        cv::findHomography(per_image.training_points, per_image.query_points,
+                           CV_RANSAC, 3.0, mask);
 
+    // If mask doesn't have enough leftover matches, skip these matches
+    if (cv::countNonZero(mask) < kMinimumMatchCount) {
+      continue;
+    }
+
+    VLOG(2) << "Number of matches after homography: " << cv::countNonZero(mask)
+            << "\n";
+    // Fill our match info for each good match
+    // TODO<Jim>: Could probably optimize some of the copies here
+    PerImageMatches per_image_good_match;
+    CHECK_EQ(per_image.training_points.size(),
+             (unsigned long)mask.size().height);
+    for (size_t j = 0; j < per_image.matches.size(); j++) {
+      // Skip if we masked out by homography
+      if (mask.at<uchar>(0, j) != 1) {
+        continue;
+      }
+
+      // Add this to our collection of all matches that passed our criteria
+      all_good_matches.push_back(
+          static_cast<std::vector<cv::DMatch>>(*per_image.matches[j]));
+
+      // Fill out the data for matches per image that made it past
+      // homography check
+      per_image_good_match.matches.push_back(per_image.matches[j]);
+      per_image_good_match.training_points.push_back(
+          per_image.training_points[j]);
+      per_image_good_match.training_points_3d.push_back(
+          per_image.training_points_3d[j]);
+      per_image_good_match.query_points.push_back(per_image.query_points[j]);
+      per_image_good_match.homography = homography;
+    }
+    per_image_good_matches.push_back(per_image_good_match);
+
+    // TODO: Use homography to compute target point on query image
+
+    // Pose transformations (rotations and translations) for various
+    // coordinate frames.  R_X_Y_vec is the Rodrigues (angle-axis)
+    // representation the 3x3 rotation R_X_Y from frame X to frame Y
+
+    // Tranform from camera to target frame
+    cv::Mat R_camera_target_vec, R_camera_target, T_camera_target;
+    // Tranform from camera to field origin (global) reference frame
+    cv::Mat R_camera_field_vec, R_camera_field, T_camera_field;
+    // Inverse of camera to field-- defines location of camera in
+    // global (field) reference frame
+    cv::Mat R_field_camera_vec, R_field_camera, T_field_camera;
+
+    // Compute the pose of the camera (global origin relative to camera)
+    cv::solvePnPRansac(per_image_good_match.training_points_3d,
+                       per_image_good_match.query_points, CameraIntrinsics(),
+                       CameraDistCoeffs(), R_camera_field_vec, T_camera_field);
+    CHECK_EQ(cv::Size(1, 3), T_camera_field.size());
+
+    // Convert to float32's (from float64) to be compatible with the rest
+    R_camera_field_vec.convertTo(R_camera_field_vec, CV_32F);
+    T_camera_field.convertTo(T_camera_field, CV_32F);
+
+    // Get matrix version of R_camera_field
+    cv::Rodrigues(R_camera_field_vec, R_camera_field);
+    CHECK_EQ(cv::Size(3, 3), R_camera_field.size());
+
+    // Compute H_field_camera = H_camera_field^-1
+    R_field_camera = R_camera_field.t();
+    T_field_camera = -R_field_camera * (T_camera_field);
+
+    // Extract the field_target transformation
+    const cv::Mat H_field_target(4, 4, CV_32F,
+                                 const_cast<void *>(static_cast<const void *>(
+                                     FieldToTarget(i)->data()->data())));
+
+    const cv::Mat R_field_target =
+        H_field_target(cv::Range(0, 3), cv::Range(0, 3));
+    const cv::Mat T_field_target =
+        H_field_target(cv::Range(0, 3), cv::Range(3, 4));
+
+    // Use it to get the relative pose from camera to target
+    R_camera_target = R_camera_field * (R_field_target);
+    T_camera_target = R_camera_field * (T_field_target) + T_camera_field;
+
+    // Set H_camera_target
     {
       CHECK_EQ(cv::Size(3, 3), R_camera_target.size());
-      CHECK_EQ(cv::Size(3, 1), T_camera_target.size());
-      cv::Mat camera_target = cv::Mat::zeros(4, 4, CV_32F);
-      R_camera_target.copyTo(camera_target(cv::Range(0, 3), cv::Range(0, 3)));
-      T_camera_target.copyTo(camera_target(cv::Range(3, 4), cv::Range(0, 3)));
-      camera_target.at<float>(3, 3) = 1;
-      CHECK(camera_target.isContinuous());
-      camera_target_list.push_back(camera_target);
+      CHECK_EQ(cv::Size(1, 3), T_camera_target.size());
+      cv::Mat H_camera_target = cv::Mat::zeros(4, 4, CV_32F);
+      R_camera_target.copyTo(H_camera_target(cv::Range(0, 3), cv::Range(0, 3)));
+      T_camera_target.copyTo(H_camera_target(cv::Range(0, 3), cv::Range(3, 4)));
+      H_camera_target.at<float>(3, 3) = 1;
+      CHECK(H_camera_target.isContinuous());
+      camera_target_list.push_back(H_camera_target.clone());
+    }
+
+    // Set H_field_camera
+    {
+      CHECK_EQ(cv::Size(3, 3), R_field_camera.size());
+      CHECK_EQ(cv::Size(1, 3), T_field_camera.size());
+      cv::Mat H_field_camera = cv::Mat::zeros(4, 4, CV_32F);
+      R_field_camera.copyTo(H_field_camera(cv::Range(0, 3), cv::Range(0, 3)));
+      T_field_camera.copyTo(H_field_camera(cv::Range(0, 3), cv::Range(3, 4)));
+      H_field_camera.at<float>(3, 3) = 1;
+      CHECK(H_field_camera.isContinuous());
+      field_camera_list.push_back(H_field_camera.clone());
     }
   }
   // Now, send our two messages-- one large, with details for remote
   // debugging(features), and one smaller
-  SendImageMatchResult(image, keypoints, descriptors, matches,
-                       camera_target_list, &detailed_result_sender_, true);
-  SendImageMatchResult(image, keypoints, descriptors, matches,
-                       camera_target_list, &result_sender_, false);
+  SendImageMatchResult(image, keypoints, descriptors, all_good_matches,
+                       camera_target_list, field_camera_list,
+                       &detailed_result_sender_, true);
+  SendImageMatchResult(image, keypoints, descriptors, all_good_matches,
+                       camera_target_list, field_camera_list, &result_sender_,
+                       false);
 }
 
 void CameraReader::ReadImage() {
@@ -391,7 +528,7 @@
 
 void CameraReaderMain() {
   aos::FlatbufferDetachedBuffer<aos::Configuration> config =
-      aos::configuration::ReadConfig("config.json");
+      aos::configuration::ReadConfig(FLAGS_config);
 
   const auto training_data_bfbs = SiftTrainingData();
   const sift::TrainingData *const training_data =
diff --git a/y2020/vision/sift/sift.fbs b/y2020/vision/sift/sift.fbs
index 8806957..43ea152 100644
--- a/y2020/vision/sift/sift.fbs
+++ b/y2020/vision/sift/sift.fbs
@@ -105,16 +105,21 @@
 // Contains the information the EKF wants from an image matched against a single
 // training image.
 //
-// This is represented as a transformation to a target in field coordinates.
+// This is represented as a transformation from the camera to the target
+// (camera_to_target) and a transformatoin from the field to the target
+// (field_to_target).
+//
+// We also send the map from the field to the camera, which can be computed
+// with the first two, to make it easier to display.
 table CameraPose {
-  // Transformation matrix from the target to the camera's origin.
+  // Transformation matrix from the camera to the target.
   // (0, 0, 0) is the aperture of the camera (we pretend it's an ideal pinhole
   // camera). Positive Z is out of the camera. Positive X and Y are right
   // handed, but which way they face depends on the camera extrinsics.
   camera_to_target:TransformationMatrix;
 
   // Field coordinates of the target, represented as a transformation matrix
-  // from the target to the field.
+  // from the field to the target.
   // (0, 0, 0) is the center of the field, on the level of most of the field
   // (not the region under the truss). Positive X is towards the red alliance's
   // PLAYER STATION. Positive Z is up. The coordinate system is right-handed.
@@ -125,6 +130,9 @@
   // The value here will be selected from a small, static set of targets we
   // train images on.
   field_to_target:TransformationMatrix;
+
+  // The pose of the camera in the field coordinate frame
+  field_to_camera:TransformationMatrix;
 }
 
 table ImageMatchResult {
diff --git a/y2020/vision/v4l2_reader.cc b/y2020/vision/v4l2_reader.cc
index f1944c1..91777c7 100644
--- a/y2020/vision/v4l2_reader.cc
+++ b/y2020/vision/v4l2_reader.cc
@@ -6,6 +6,9 @@
 #include <sys/stat.h>
 #include <sys/types.h>
 
+DEFINE_bool(ignore_timestamps, false,
+            "Don't require timestamps on images.  Used to allow webcams");
+
 namespace frc971 {
 namespace vision {
 
@@ -137,8 +140,11 @@
            buffer.m.userptr);
   CHECK_EQ(ImageSize(), buffer.length);
   CHECK(buffer.flags & V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC);
-  CHECK_EQ(buffer.flags & V4L2_BUF_FLAG_TSTAMP_SRC_MASK,
-           static_cast<uint32_t>(V4L2_BUF_FLAG_TSTAMP_SRC_EOF));
+  if (!FLAGS_ignore_timestamps) {
+    // Require that we have good timestamp on images
+    CHECK_EQ(buffer.flags & V4L2_BUF_FLAG_TSTAMP_SRC_MASK,
+             static_cast<uint32_t>(V4L2_BUF_FLAG_TSTAMP_SRC_EOF));
+  }
   return {static_cast<int>(buffer.index),
           aos::time::from_timeval(buffer.timestamp)};
 }
diff --git a/y2020/wpilib_interface.cc b/y2020/wpilib_interface.cc
index 8c30918..4f39acf 100644
--- a/y2020/wpilib_interface.cc
+++ b/y2020/wpilib_interface.cc
@@ -218,8 +218,7 @@
 
     {
       auto builder = superstructure_position_sender_.MakeBuilder();
-      superstructure::Position::Builder position_builder =
-          builder.MakeBuilder<superstructure::Position>();
+
       // TODO(alex): check new absolute encoder api.
       // Hood
       frc971::AbsolutePositionT hood;
@@ -246,6 +245,14 @@
       flatbuffers::Offset<frc971::PotAndAbsolutePosition> turret_offset =
           frc971::PotAndAbsolutePosition::Pack(*builder.fbb(), &turret);
 
+      // Control Panel
+      frc971::RelativePositionT control_panel;
+      CopyPosition(*control_panel_encoder_, &control_panel,
+                   Values::kControlPanelEncoderCountsPerRevolution(),
+                   Values::kControlPanelEncoderRatio(), false);
+      flatbuffers::Offset<frc971::RelativePosition> control_panel_offset =
+          frc971::RelativePosition::Pack(*builder.fbb(), &control_panel);
+
       // Shooter
       y2020::control_loops::superstructure::ShooterPositionT shooter;
       shooter.theta_finisher =
@@ -266,14 +273,8 @@
               y2020::control_loops::superstructure::ShooterPosition::Pack(
                   *builder.fbb(), &shooter);
 
-      // Control Panel
-      frc971::RelativePositionT control_panel;
-      CopyPosition(*control_panel_encoder_, &control_panel,
-                   Values::kControlPanelEncoderCountsPerRevolution(),
-                   Values::kControlPanelEncoderRatio(), false);
-      flatbuffers::Offset<frc971::RelativePosition> control_panel_offset =
-          frc971::RelativePosition::Pack(*builder.fbb(), &control_panel);
-
+      superstructure::Position::Builder position_builder =
+          builder.MakeBuilder<superstructure::Position>();
       position_builder.add_hood(hood_offset);
       position_builder.add_intake_joint(intake_joint_offset);
       position_builder.add_turret(turret_offset);
@@ -481,6 +482,7 @@
     // Thread 3.
     ::aos::ShmEventLoop sensor_reader_event_loop(&config.message());
     SensorReader sensor_reader(&sensor_reader_event_loop);
+    sensor_reader.set_pwm_trigger(true);
     sensor_reader.set_drivetrain_left_encoder(make_encoder(0));
     sensor_reader.set_drivetrain_right_encoder(make_encoder(1));
     // TODO: pin numbers