Adding target_finder.{cc,h}. This filters blob lists into the best target.

Change-Id: Ib796fd2c5ba075150d009789801395dfcfc84a18
diff --git a/y2017/vision/target_finder.cc b/y2017/vision/target_finder.cc
new file mode 100644
index 0000000..e39fe73
--- /dev/null
+++ b/y2017/vision/target_finder.cc
@@ -0,0 +1,238 @@
+#include "y2017/vision/target_finder.h"
+
+#include <math.h>
+
+namespace y2017 {
+namespace vision {
+
+// Blobs now come in three types:
+//  0) normal blob.
+//  1) first part of a split blob.
+//  2) second part of a split blob.
+void ComputeXShiftPolynomial(int type, const RangeImage &img,
+                             TargetComponent *target) {
+  target->img = &img;
+  RangeImage t_img = Transpose(img);
+  int spacing = 10;
+  int n = t_img.size() - spacing * 2;
+  target->n = n;
+  if (n <= 0) {
+    printf("Empty blob aborting (%d).\n", n);
+    return;
+  }
+  Eigen::MatrixXf A = Eigen::MatrixXf::Zero(n * 2, 4);
+  Eigen::VectorXf b = Eigen::VectorXf::Zero(n * 2);
+  int i = 0;
+  for (const auto &row : t_img) {
+    // We decided this was a split target, but this is not a split row.
+    if (i >= spacing && i - spacing < n) {
+      int j = (i - spacing) * 2;
+      // normal blob or the first part of a split.
+      if (type == 0 || type == 1) {
+        b(j) = row[0].st;
+      } else {
+        b(j) = row[1].st;
+      }
+      A(j, 0) = (i) * (i);
+      A(j, 1) = (i);
+      A(j, 2) = 1;
+      ++j;
+      // normal target or the second part of a split.
+      if (type == 0 || type == 2) {
+        b(j) = row[row.size() - 1].ed;
+      } else {
+        b(j) = row[0].ed;
+      }
+      A(j, 0) = i * i;
+      A(j, 1) = i;
+      A(j, 3) = 1;
+    }
+    ++i;
+  }
+  Eigen::VectorXf sol =
+      A.jacobiSvd(Eigen::ComputeThinU | Eigen::ComputeThinV).solve(b);
+  target->a = sol(0);
+  target->b = sol(1);
+  target->c_0 = sol(2);
+  target->c_1 = sol(3);
+
+  target->mini = t_img.min_y();
+
+  Eigen::VectorXf base = A * sol;
+  Eigen::VectorXf error_v = b - base;
+  target->fit_error = error_v.dot(error_v);
+}
+
+double TargetFinder::DetectConnectedTarget(const RangeImage &img) {
+  using namespace aos::vision;
+  RangeImage t_img = Transpose(img);
+  int total = 0;
+  int split = 0;
+  int count = t_img.mini();
+  for (const auto &row : t_img) {
+    if (row.size() == 1) {
+      total++;
+    } else if (row.size() == 2) {
+      split++;
+    }
+    count++;
+  }
+  return (double)split / total;
+}
+
+std::vector<TargetComponent> TargetFinder::FillTargetComponentList(
+    const BlobList &blobs) {
+  std::vector<TargetComponent> list;
+  TargetComponent newTarg;
+  for (std::size_t i = 0; i < blobs.size(); ++i) {
+    double split_ratio;
+    if ((split_ratio = DetectConnectedTarget(blobs[i])) > 0.50) {
+      // Split type blob, do it two parts.
+      ComputeXShiftPolynomial(1, blobs[i], &newTarg);
+      list.emplace_back(newTarg);
+      ComputeXShiftPolynomial(2, blobs[i], &newTarg);
+      list.emplace_back(newTarg);
+    } else {
+      // normal type blob.
+      ComputeXShiftPolynomial(0, blobs[i], &newTarg);
+      list.emplace_back(newTarg);
+    }
+  }
+
+  return list;
+}
+
+aos::vision::RangeImage TargetFinder::Threshold(aos::vision::ImagePtr image) {
+  return aos::vision::DoThreshold(image, [&](aos::vision::PixelRef &px) {
+    if (px.g > 88) {
+      return true;
+      uint8_t min = std::min(px.b, px.r);
+      uint8_t max = std::max(px.b, px.r);
+      if (min >= px.g || max >= px.g) return false;
+      uint8_t a = px.g - min;
+      uint8_t b = px.g - max;
+      return (a > 10 && b > 10);
+    }
+    return false;
+  });
+}
+
+void TargetFinder::PreFilter(BlobList &imgs) {
+  imgs.erase(std::remove_if(imgs.begin(), imgs.end(),
+                            [](RangeImage &img) {
+                              // We can drop images with a small number of
+                              // pixels, but images
+                              // must be over 20px or the math will have issues.
+                              return (img.npixels() < 100 || img.height() < 25);
+                            }),
+             imgs.end());
+}
+
+bool TargetFinder::FindTargetFromComponents(
+    std::vector<TargetComponent> component_list, Target *final_target) {
+  using namespace aos::vision;
+  if (component_list.size() < 2 || final_target == NULL) {
+    // We don't enough parts for a traget.
+    return false;
+  }
+
+  // A0 * c + A1*s = b
+  Eigen::MatrixXf A = Eigen::MatrixXf::Zero(4, 2);
+  // A0: Offset component will be constant across all equations.
+  A(0, 0) = 1;
+  A(1, 0) = 1;
+  A(2, 0) = 1;
+  A(3, 0) = 1;
+
+  // A1: s defines the scaling and defines an expexted target.
+  // So these are the center heights of the top and bottom of the two targets.
+  A(0, 1) = -1;
+  A(1, 1) = 0;
+  A(2, 1) = 2;
+  A(3, 1) = 4;
+
+  // Track which pair is the best fit.
+  double best_error = -1;
+  double best_offset = -1;
+  Eigen::VectorXf best_v;
+  // Write down the two indicies.
+  std::pair<int, int> selected;
+  // We are regressing the combined estimated center,  might write that down.
+  double regressed_y_center;
+
+  Eigen::VectorXf b = Eigen::VectorXf::Zero(4);
+  for (size_t i = 0; i < component_list.size(); i++) {
+    for (size_t j = 0; j < component_list.size(); j++) {
+      if (i == j) {
+        continue;
+      } else {
+        if (component_list[i].a < 0.0 || component_list[j].a < 0.0) {
+          // one of the targets is upside down (ie curved up), this can't
+          // happen.
+          continue;
+        }
+        // b is the target offests.
+        b(0) = component_list[j].EvalMinTop();
+        b(1) = component_list[j].EvalMinBot();
+        b(2) = component_list[i].EvalMinTop();
+        b(3) = component_list[i].EvalMinBot();
+      }
+
+      Eigen::VectorXf sol =
+          A.jacobiSvd(Eigen::ComputeThinU | Eigen::ComputeThinV).solve(b);
+
+      Eigen::VectorXf base = A * sol;
+      Eigen::VectorXf error_v = b - base;
+      double error = error_v.dot(error_v);
+      // offset in scrren x of the two targets.
+      double offset = std::abs(component_list[i].CenterPolyOne() -
+                               component_list[j].CenterPolyOne());
+      // How much do we care about offset. As far as I've seen, error is like
+      // 5-20, offset are around 10. Value selected for worst garbage can image.
+      const double offsetWeight = 2.1;
+      error += offsetWeight * offset;
+      if ((best_error < 0 || error < best_error) && !isnan(error)) {
+        best_error = error;
+        best_offset = offset;
+        best_v = error_v;
+        selected.first = i;
+        selected.second = j;
+        regressed_y_center = sol(0);
+      }
+    }
+  }
+
+  // If we missed or the error is ridiculous just give up here.
+  if (best_error < 0 || best_error > 300.0 || isnan(best_error)) {
+    fprintf(stderr, "Bogus target dude (%f).\n", best_error);
+    return false;
+  }
+
+  fprintf(stderr,
+          "Selected (%d, %d):\n\t"
+          "err(%.2f, %.2f, %.2f, %.2f)(%.2f)(%.2f).\n\t"
+          "c00(%.2f, %.2f)(%.2f)\n",
+          selected.first, selected.second, best_v(0), best_v(1), best_v(2),
+          best_v(3), best_error, best_offset,
+          component_list[selected.first].CenterPolyOne(),
+          component_list[selected.second].CenterPolyOne(),
+          component_list[selected.first].CenterPolyOne() -
+              component_list[selected.second].CenterPolyOne());
+
+  double avgOff = (component_list[selected.first].mini +
+                   component_list[selected.second].mini) /
+                  2.0;
+  double avgOne = (component_list[selected.first].CenterPolyOne() +
+                   component_list[selected.second].CenterPolyOne()) /
+                  2.0;
+
+  final_target->screen_coord.x(avgOne + avgOff);
+  final_target->screen_coord.y(regressed_y_center);
+  final_target->comp1 = component_list[selected.first];
+  final_target->comp2 = component_list[selected.second];
+
+  return true;
+}
+
+}  // namespace vision
+}  // namespace y2017