blob: f466c8ba95c757c583590702455fb367acc7f126 [file] [log] [blame]
Jim Ostrowskiff7b3de2022-01-22 22:20:26 -08001#include "y2022/vision/camera_reader.h"
2
3#include <math.h>
4
5#include <opencv2/imgproc.hpp>
6
7#include "aos/events/event_loop.h"
8#include "aos/flatbuffer_merge.h"
9#include "aos/network/team_number.h"
10#include "frc971/vision/v4l2_reader.h"
11#include "frc971/vision/vision_generated.h"
12#include "y2020/vision/sift/sift_generated.h"
13#include "y2020/vision/sift/sift_training_generated.h"
14#include "y2020/vision/tools/python_code/sift_training_data.h"
milind-u92195982022-01-22 20:29:31 -080015#include "y2022/vision/blob_detector.h"
16#include "y2022/vision/target_estimator.h"
Jim Ostrowskiff7b3de2022-01-22 22:20:26 -080017
18namespace y2022 {
19namespace vision {
20
21using namespace frc971::vision;
22
23const sift::CameraCalibration *CameraReader::FindCameraCalibration() const {
24 const std::string_view node_name = event_loop_->node()->name()->string_view();
25 const int team_number = aos::network::GetTeamNumber();
26 for (const sift::CameraCalibration *candidate :
27 *training_data_->camera_calibrations()) {
28 if (candidate->node_name()->string_view() != node_name) {
29 continue;
30 }
31 if (candidate->team_number() != team_number) {
32 continue;
33 }
34 return candidate;
35 }
36 LOG(FATAL) << ": Failed to find camera calibration for " << node_name
37 << " on " << team_number;
38}
39
40void CameraReader::ProcessImage(const CameraImage &image) {
41 // Remember, we're getting YUYV images, so we start by converting to RGB
42
43 // TOOD: Need to code this up for blob detection
44 cv::Mat image_mat(image.rows(), image.cols(), CV_8U);
45 CHECK(image_mat.isContinuous());
46 const int number_pixels = image.rows() * image.cols();
47 for (int i = 0; i < number_pixels; ++i) {
48 reinterpret_cast<uint8_t *>(image_mat.data)[i] =
49 image.data()->data()[i * 2];
50 }
51
52 // Now, send our two messages-- one large, with details for remote
53 // debugging(features), and one smaller
milind-u92195982022-01-22 20:29:31 -080054 // TODO: Send debugging message as well
55 std::vector<std::vector<cv::Point> > filtered_blobs, unfiltered_blobs;
56 std::vector<BlobDetector::BlobStats> blob_stats;
57 cv::Mat binarized_image =
58 cv::Mat::zeros(cv::Size(image_mat.cols, image_mat.rows), CV_8UC1);
59 cv::Mat ret_image =
60 cv::Mat::zeros(cv::Size(image_mat.cols, image_mat.rows), CV_8UC3);
61 BlobDetector::ExtractBlobs(image_mat, binarized_image, ret_image,
62 filtered_blobs, unfiltered_blobs, blob_stats);
63 // TODO(milind): use actual centroid
64 TargetEstimateT target = TargetEstimator::EstimateTargetLocation(
65 blob_stats[0].centroid, CameraIntrinsics(), CameraExtrinsics());
66
67 auto builder = target_estimate_sender_.MakeBuilder();
68 builder.CheckOk(builder.Send(TargetEstimate::Pack(*builder.fbb(), &target)));
Jim Ostrowskiff7b3de2022-01-22 22:20:26 -080069}
70
71void CameraReader::ReadImage() {
72 if (!reader_->ReadLatestImage()) {
73 read_image_timer_->Setup(event_loop_->monotonic_now() +
74 std::chrono::milliseconds(10));
75 return;
76 }
77
78 ProcessImage(reader_->LatestImage());
79
80 reader_->SendLatestImage();
81 read_image_timer_->Setup(event_loop_->monotonic_now());
82}
83
84} // namespace vision
85} // namespace y2022