Convert extrinsics_calibration to an observer model

Rather than coupling the logic for estimation with extraction, add an
observer interface and put the filter there.  This makes it much easier
conceptually to reprocess data multiple times.

Change-Id: I70c6f36d2c2069ce86a45a4859894188a470d003
Signed-off-by: Austin Schuh <austin.linux@gmail.com>
Signed-off-by: milind-u <milind.upadhyay@gmail.com>
diff --git a/y2020/vision/extrinsics_calibration.cc b/y2020/vision/extrinsics_calibration.cc
index e775e3e..be64efb 100644
--- a/y2020/vision/extrinsics_calibration.cc
+++ b/y2020/vision/extrinsics_calibration.cc
@@ -6,10 +6,7 @@
 #include "Eigen/Dense"
 #include "Eigen/Geometry"
 
-#include "frc971/wpilib/imu_batch_generated.h"
 #include "absl/strings/str_format.h"
-#include "frc971/control_loops/quaternion_utils.h"
-#include "y2020/vision/charuco_lib.h"
 #include "aos/events/logging/log_reader.h"
 #include "aos/events/shm_event_loop.h"
 #include "aos/init.h"
@@ -17,16 +14,17 @@
 #include "aos/time/time.h"
 #include "aos/util/file.h"
 #include "frc971/control_loops/drivetrain/improved_down_estimator.h"
+#include "frc971/control_loops/quaternion_utils.h"
+#include "frc971/wpilib/imu_batch_generated.h"
+#include "y2020/vision/calibration_accumulator.h"
+#include "y2020/vision/charuco_lib.h"
 #include "y2020/vision/sift/sift_generated.h"
 #include "y2020/vision/sift/sift_training_generated.h"
 #include "y2020/vision/tools/python_code/sift_training_data.h"
 #include "y2020/vision/vision_generated.h"
-#include "y2020/vision/charuco_lib.h"
 
 DEFINE_string(config, "config.json", "Path to the config file to use.");
 DEFINE_string(pi, "pi-7971-2", "Pi name to calibrate.");
-DEFINE_bool(display_undistorted, false,
-            "If true, display the undistorted image.");
 
 namespace frc971 {
 namespace vision {
@@ -34,74 +32,37 @@
 using aos::distributed_clock;
 using aos::monotonic_clock;
 
-// Class to both accumulate and replay camera and IMU data in time order.
-class CalibrationData {
+class PoseFilter : public CalibrationDataObserver {
  public:
-  CalibrationData()
-      : x_hat_(Eigen::Matrix<double, 9, 1>::Zero()),
+  PoseFilter()
+      : accel_(Eigen::Matrix<double, 3, 1>::Zero()),
+        omega_(Eigen::Matrix<double, 3, 1>::Zero()),
+        x_hat_(Eigen::Matrix<double, 9, 1>::Zero()),
         q_(Eigen::Matrix<double, 9, 9>::Zero()) {}
 
-  // Adds an IMU point to the list at the provided time.
-  void AddImu(distributed_clock::time_point distributed_now,
-              Eigen::Vector3d gyro, Eigen::Vector3d accel) {
-    imu_points_.emplace_back(distributed_now, std::make_pair(gyro, accel));
-  }
-
-  // Adds a camera/charuco detection to the list at the provided time.
-  void AddCharuco(distributed_clock::time_point distributed_now,
-                  Eigen::Vector3d rvec, Eigen::Vector3d tvec) {
-    rot_trans_points_.emplace_back(distributed_now, std::make_pair(rvec, tvec));
-  }
-
-  // Processes the data points by calling UpdateCamera and UpdateIMU in time
-  // order.
-  void ReviewData() {
-    size_t next_imu_point = 0;
-    size_t next_camera_point = 0;
-    while (true) {
-      if (next_imu_point != imu_points_.size()) {
-        // There aren't that many combinations, so just brute force them all
-        // rather than being too clever.
-        if (next_camera_point != rot_trans_points_.size()) {
-          if (imu_points_[next_imu_point].first >
-              rot_trans_points_[next_camera_point].first) {
-            // Camera!
-            UpdateCamera(rot_trans_points_[next_camera_point].first,
-                         rot_trans_points_[next_camera_point].second);
-            ++next_camera_point;
-          } else {
-            // IMU!
-            UpdateIMU(imu_points_[next_imu_point].first,
-                      imu_points_[next_imu_point].second);
-            ++next_imu_point;
-          }
-        } else {
-          if (next_camera_point != rot_trans_points_.size()) {
-            // Camera!
-            UpdateCamera(rot_trans_points_[next_camera_point].first,
-                         rot_trans_points_[next_camera_point].second);
-            ++next_camera_point;
-          } else {
-            // Nothing left for either list of points, so we are done.
-            break;
-          }
-        }
-      }
-    }
-  }
-
   void UpdateCamera(distributed_clock::time_point t,
-                    std::pair<Eigen::Vector3d, Eigen::Vector3d> rt) {
+                    std::pair<Eigen::Vector3d, Eigen::Vector3d> rt) override {
+    Integrate(t);
+    // TODO(austin): take the sample.
     LOG(INFO) << t << " Camera " << rt.second.transpose();
   }
 
   void UpdateIMU(distributed_clock::time_point t,
-                 std::pair<Eigen::Vector3d, Eigen::Vector3d> wa) {
+                 std::pair<Eigen::Vector3d, Eigen::Vector3d> wa) override {
+    Integrate(t);
+    omega_ = wa.first;
+    accel_ = wa.second;
     LOG(INFO) << t << " IMU " << wa.first.transpose();
   }
 
  private:
-  // TODO(austin): Actually use these.  Or make a new "callback" object which has these.
+  void Integrate(distributed_clock::time_point t) { LOG(INFO) << t; }
+
+  Eigen::Matrix<double, 3, 1> accel_;
+  Eigen::Matrix<double, 3, 1> omega_;
+
+  // TODO(austin): Actually use these.  Or make a new "callback" object which
+  // has these.
   Eigen::Matrix<double, 9, 1> x_hat_;
   Eigen::Matrix<double, 9, 9> q_;
 
@@ -118,160 +79,6 @@
   // Measurement:
   //   xyz position
   //   orientation rotation vector
-
-  std::vector<std::pair<distributed_clock::time_point,
-                        std::pair<Eigen::Vector3d, Eigen::Vector3d>>>
-      imu_points_;
-
-  // Store pose samples as timestamp, along with
-  // pair of rotation, translation vectors
-  std::vector<std::pair<distributed_clock::time_point,
-                        std::pair<Eigen::Vector3d, Eigen::Vector3d>>>
-      rot_trans_points_;
-};
-
-// Class to register image and IMU callbacks in AOS and route them to the
-// corresponding CalibrationData class.
-class Calibration {
- public:
-  Calibration(aos::SimulatedEventLoopFactory *event_loop_factory,
-              aos::EventLoop *image_event_loop, aos::EventLoop *imu_event_loop,
-              std::string_view pi, CalibrationData *data)
-      : image_event_loop_(image_event_loop),
-        image_factory_(event_loop_factory->GetNodeEventLoopFactory(
-            image_event_loop_->node())),
-        imu_event_loop_(imu_event_loop),
-        imu_factory_(event_loop_factory->GetNodeEventLoopFactory(
-            imu_event_loop_->node())),
-        charuco_extractor_(
-            image_event_loop_, pi,
-            [this](cv::Mat rgb_image, monotonic_clock::time_point eof,
-                   std::vector<int> charuco_ids,
-                   std::vector<cv::Point2f> charuco_corners, bool valid,
-                   Eigen::Vector3d rvec_eigen, Eigen::Vector3d tvec_eigen) {
-              HandleCharuco(rgb_image, eof, charuco_ids, charuco_corners, valid,
-                            rvec_eigen, tvec_eigen);
-            }),
-        data_(data) {
-    imu_event_loop_->MakeWatcher(
-        "/drivetrain", [this](const frc971::IMUValuesBatch &imu) {
-          if (!imu.has_readings()) {
-            return;
-          }
-          for (const frc971::IMUValues *value : *imu.readings()) {
-            HandleIMU(value);
-          }
-        });
-  }
-
-  // Processes a charuco detection.
-  void HandleCharuco(cv::Mat rgb_image, const monotonic_clock::time_point eof,
-                     std::vector<int> /*charuco_ids*/,
-                     std::vector<cv::Point2f> /*charuco_corners*/, bool valid,
-                     Eigen::Vector3d rvec_eigen, Eigen::Vector3d tvec_eigen) {
-    if (valid) {
-      Eigen::Quaternion<double> rotation(
-          frc971::controls::ToQuaternionFromRotationVector(rvec_eigen));
-      Eigen::Translation3d translation(tvec_eigen);
-
-      const Eigen::Affine3d board_to_camera = translation * rotation;
-      (void)board_to_camera;
-
-      // TODO(austin): Need a gravity vector input.
-      //
-      // TODO(austin): Need a state, covariance, and model.
-      //
-      // TODO(austin): Need to record all the values out of a log and run it
-      // as a batch run so we can feed it into ceres.
-
-      // LOG(INFO) << "rotation " << rotation.matrix();
-      // LOG(INFO) << "translation " << translation.matrix();
-      // Z -> up
-      // Y -> away from cameras 2 and 3
-      // X -> left
-      Eigen::Vector3d imu(last_value_.accelerometer_x,
-                          last_value_.accelerometer_y,
-                          last_value_.accelerometer_z);
-
-      // For cameras 2 and 3...
-      Eigen::Quaternion<double> imu_to_camera(
-          Eigen::AngleAxisd(-0.5 * M_PI, Eigen::Vector3d::UnitX()));
-
-      Eigen::Quaternion<double> board_to_world(
-          Eigen::AngleAxisd(0.5 * M_PI, Eigen::Vector3d::UnitX()));
-
-      Eigen::IOFormat HeavyFmt(Eigen::FullPrecision, 0, ", ", ",\n", "[", "]",
-                               "[", "]");
-
-      LOG(INFO);
-      LOG(INFO) << "World Gravity "
-                << (board_to_world * rotation.inverse() * imu_to_camera * imu)
-                       .transpose()
-                       .format(HeavyFmt);
-      LOG(INFO) << "Board Gravity "
-                << (rotation.inverse() * imu_to_camera * imu)
-                       .transpose()
-                       .format(HeavyFmt);
-      LOG(INFO) << "Camera Gravity "
-                << (imu_to_camera * imu).transpose().format(HeavyFmt);
-      LOG(INFO) << "IMU Gravity " << imu.transpose().format(HeavyFmt);
-
-      const double age_double =
-          std::chrono::duration_cast<std::chrono::duration<double>>(
-              image_event_loop_->monotonic_now() - eof)
-              .count();
-      LOG(INFO) << std::fixed << std::setprecision(6) << "Age: " << age_double
-                << ", Pose is R:" << rvec_eigen.transpose().format(HeavyFmt)
-                << " T:" << tvec_eigen.transpose().format(HeavyFmt);
-
-      data_->AddCharuco(image_factory_->ToDistributedClock(eof), rvec_eigen,
-                        tvec_eigen);
-    }
-
-    cv::imshow("Display", rgb_image);
-
-    if (FLAGS_display_undistorted) {
-      const cv::Size image_size(rgb_image.cols, rgb_image.rows);
-      cv::Mat undistorted_rgb_image(image_size, CV_8UC3);
-      cv::undistort(rgb_image, undistorted_rgb_image,
-                    charuco_extractor_.camera_matrix(),
-                    charuco_extractor_.dist_coeffs());
-
-      cv::imshow("Display undist", undistorted_rgb_image);
-    }
-
-    int keystroke = cv::waitKey(1);
-    if ((keystroke & 0xFF) == static_cast<int>('q')) {
-      // image_event_loop_->Exit();
-    }
-  }
-
-  // Processes an IMU reading.
-  void HandleIMU(const frc971::IMUValues *imu) {
-    VLOG(1) << "IMU " << imu;
-    imu->UnPackTo(&last_value_);
-    Eigen::Vector3d gyro(last_value_.gyro_x, last_value_.gyro_y,
-                         last_value_.gyro_z);
-    Eigen::Vector3d accel(last_value_.accelerometer_x,
-                          last_value_.accelerometer_y,
-                          last_value_.accelerometer_z);
-
-    data_->AddImu(imu_factory_->ToDistributedClock(monotonic_clock::time_point(
-                      chrono::nanoseconds(imu->monotonic_timestamp_ns()))),
-                  gyro, accel);
-  }
-
-  frc971::IMUValuesT last_value_;
-
- private:
-  aos::EventLoop *image_event_loop_;
-  aos::NodeEventLoopFactory *image_factory_;
-  aos::EventLoop *imu_event_loop_;
-  aos::NodeEventLoopFactory *imu_factory_;
-
-  CharucoExtractor charuco_extractor_;
-
-  CalibrationData *data_;
 };
 
 void Main(int argc, char **argv) {
@@ -316,7 +123,11 @@
 
   LOG(INFO) << "Done with event_loop running";
   // And now we have it, we can start processing it.
-  data.ReviewData();
+
+  {
+    PoseFilter filter;
+    data.ReviewData(&filter);
+  }
 }
 
 }  // namespace vision