Count decision margin rejections in aprilrobotics

That way we'll easily be able to tell if we need to adjust the threshold
because of lighting. Next step is adding this output from each pi
to the webpage.

Signed-off-by: milind-u <milind.upadhyay@gmail.com>
Change-Id: I20491dab30850c0a2115bcdf6ef1dbec8153e673
diff --git a/y2023/vision/aprilrobotics.h b/y2023/vision/aprilrobotics.h
index bf9265b..fab2d30 100644
--- a/y2023/vision/aprilrobotics.h
+++ b/y2023/vision/aprilrobotics.h
@@ -31,6 +31,11 @@
     double distortion_factor;
   };
 
+  struct DetectionResult {
+    std::vector<Detection> detections;
+    size_t rejections;
+  };
+
   AprilRoboticsDetector(aos::EventLoop *event_loop,
                         std::string_view channel_name);
   ~AprilRoboticsDetector();
@@ -43,8 +48,8 @@
   // Helper function to store detection points in vector of Point2f's
   std::vector<cv::Point2f> MakeCornerVector(const apriltag_detection_t *det);
 
-  std::vector<Detection> DetectTags(cv::Mat image,
-                                    aos::monotonic_clock::time_point eof);
+  DetectionResult DetectTags(cv::Mat image,
+                             aos::monotonic_clock::time_point eof);
 
   const std::optional<cv::Mat> extrinsics() const { return extrinsics_; }
   const cv::Mat intrinsics() const { return intrinsics_; }
@@ -78,6 +83,8 @@
   frc971::vision::ImageCallback image_callback_;
   aos::Sender<frc971::vision::TargetMap> target_map_sender_;
   aos::Sender<foxglove::ImageAnnotations> image_annotations_sender_;
+
+  size_t rejections_;
 };
 
 }  // namespace vision