Make 2020 Localizer to process ImageMatchResult's

This should provide a localizer that can correctly consume the vision
processing results coming from the Pi's. It does not actually start up
this localizer yet, atlhough that should just be a 1-line change in
y2020/control_loops/drivetrain/drivetrain_main.cc.

Change-Id: Iea8aa50774932ebf19d89ca3a3f4b9cd12dfe446
diff --git a/frc971/control_loops/drivetrain/BUILD b/frc971/control_loops/drivetrain/BUILD
index 35aebc7..9fef8f9 100644
--- a/frc971/control_loops/drivetrain/BUILD
+++ b/frc971/control_loops/drivetrain/BUILD
@@ -187,6 +187,7 @@
         ":drivetrain_config",
         ":drivetrain_status_fbs",
         ":hybrid_ekf",
+        "//frc971/control_loops:control_loops_fbs",
         "//frc971/control_loops:pose",
     ],
 )
diff --git a/frc971/control_loops/drivetrain/hybrid_ekf.h b/frc971/control_loops/drivetrain/hybrid_ekf.h
index fa723e0..d2c0be3 100644
--- a/frc971/control_loops/drivetrain/hybrid_ekf.h
+++ b/frc971/control_loops/drivetrain/hybrid_ekf.h
@@ -660,14 +660,18 @@
   Q_continuous_(kAngularError, kAngularError) = std::pow(2.0, 2.0);
   // This noise value largely governs whether we will trust the encoders or
   // accelerometer more for estimating the robot position.
+  // Note that this also affects how we interpret camera measurements,
+  // particularly when using a heading/distance/skew measurement--if the
+  // noise on these numbers is particularly high, then we can end up with weird
+  // dynamics where a camera update both shifts our X/Y position and adjusts our
+  // velocity estimates substantially, causing the camera updates to create
+  // "momentum" and if we don't trust the encoders enough, then we have no way of
+  // determining that the velocity updates are bogus. This also interacts with
+  // kVelocityOffsetTimeConstant.
   Q_continuous_(kLongitudinalVelocityOffset, kLongitudinalVelocityOffset) =
       std::pow(1.1, 2.0);
   Q_continuous_(kLateralVelocity, kLateralVelocity) = std::pow(0.1, 2.0);
 
-  P_.setZero();
-  P_.diagonal() << 0.01, 0.01, 0.01, 0.02, 0.01, 0.02, 0.01, 1, 1, 0.03, 0.01,
-      0.01;
-
   H_encoders_and_gyro_.setZero();
   // Encoders are stored directly in the state matrix, so are a minor
   // transform away.
diff --git a/frc971/control_loops/drivetrain/localizer.h b/frc971/control_loops/drivetrain/localizer.h
index d1778b6..43462b8 100644
--- a/frc971/control_loops/drivetrain/localizer.h
+++ b/frc971/control_loops/drivetrain/localizer.h
@@ -36,9 +36,10 @@
 
 // Defines an interface for classes that provide field-global localization.
 class LocalizerInterface {
+ public:
   typedef HybridEkf<double> Ekf;
   typedef typename Ekf::StateIdx StateIdx;
- public:
+
   // Perform a single step of the filter, using the information that is
   // available on every drivetrain iteration.
   // The user should pass in the U that the real system experienced from the
@@ -124,9 +125,6 @@
 // This provides no method for using cameras or the such to get global
 // measurements and just assumes that you can dead-reckon perfectly.
 class DeadReckonEkf : public LocalizerInterface {
-  typedef HybridEkf<double> Ekf;
-  typedef typename Ekf::StateIdx StateIdx;
-
  public:
   DeadReckonEkf(::aos::EventLoop *event_loop,
                 const DrivetrainConfig<double> &dt_config)