Merge "Add explode on combined timestamp flag"
diff --git a/y2022/BUILD b/y2022/BUILD
index 2e2f26d..5d29f04 100644
--- a/y2022/BUILD
+++ b/y2022/BUILD
@@ -7,6 +7,7 @@
binaries = [
":setpoint_setter",
"//aos/network:web_proxy_main",
+ "//aos/events/logging:log_cat",
],
data = [
":aos_config",
@@ -58,6 +59,7 @@
"//aos/network:message_bridge_server",
"//aos/network:web_proxy_main",
"//y2022/vision:camera_reader",
+ "//y2022/vision:ball_color_detector",
],
target_compatible_with = ["//tools/platforms/hardware:raspberry_pi"],
target_type = "pi",
@@ -100,6 +102,7 @@
"//y2022/localizer:localizer_output_fbs",
"//y2022/vision:calibration_fbs",
"//y2022/vision:target_estimate_fbs",
+ "//y2022/vision:ball_color_fbs",
],
target_compatible_with = ["@platforms//os:linux"],
visibility = ["//visibility:public"],
@@ -147,6 +150,7 @@
"//aos/network:remote_message_fbs",
"//frc971/vision:vision_fbs",
"//y2022/vision:calibration_fbs",
+ "//y2022/vision:ball_color_fbs",
],
target_compatible_with = ["@platforms//os:linux"],
visibility = ["//visibility:public"],
@@ -166,6 +170,7 @@
"//aos/network:message_bridge_client_fbs",
"//aos/network:message_bridge_server_fbs",
"//aos/network:timestamp_fbs",
+ "//y2022/vision:ball_color_fbs",
"//y2019/control_loops/drivetrain:target_selector_fbs",
"//y2022/control_loops/superstructure:superstructure_goal_fbs",
"//y2022/control_loops/superstructure:superstructure_output_fbs",
diff --git a/y2022/constants.cc b/y2022/constants.cc
index f72dfe3..cb85e94 100644
--- a/y2022/constants.cc
+++ b/y2022/constants.cc
@@ -132,15 +132,19 @@
// Interpolation table for comp and practice robots
r.shot_interpolation_table = InterpolationTable<Values::ShotParams>({
- {1, {0.1, 19.0}},
- {1.9, {0.1, 19.0}}, // 1.7 in reality
- {2.12, {0.15, 18.8}}, // 2.006 in reality
- {2.9, {0.25, 19.2}}, // 2.92 in reality
- {3.8, {0.30, 20.8}}, // 3.8 in reality
- {4.9, {0.32, 22.8}}, // 4.97 in reality
- {6.9, {0.40, 24.0}}, // 6.1 in reality
- {7.9, {0.40, 25.0}}, // 6.5 in reality
- {10, {0.40, 24.0}},
+ {1.0, {0.0, 19.0}},
+ {1.6, {0.0, 19.0}},
+ {1.9, {0.1, 19.0}},
+ {2.12, {0.15, 18.8}},
+ {2.9, {0.25, 19.2}},
+
+ {3.8, {0.35, 20.6}},
+ {4.9, {0.4, 21.9}},
+ {6.0, {0.40, 24.0}},
+ {7.0, {0.40, 25.5}},
+
+ {7.8, {0.35, 26.9}},
+ {10.0, {0.35, 26.9}},
});
switch (team) {
diff --git a/y2022/control_loops/superstructure/superstructure.cc b/y2022/control_loops/superstructure/superstructure.cc
index 3d5284e..b21dd0d 100644
--- a/y2022/control_loops/superstructure/superstructure.cc
+++ b/y2022/control_loops/superstructure/superstructure.cc
@@ -243,7 +243,7 @@
.shooting = true});
// Dont shoot if the robot is moving faster than this
- constexpr double kMaxShootSpeed = 1.0;
+ constexpr double kMaxShootSpeed = 1.7;
const bool moving_too_fast = std::abs(robot_velocity()) > kMaxShootSpeed;
switch (state_) {
diff --git a/y2022/joystick_reader.cc b/y2022/joystick_reader.cc
index 7e7a735..baaa2ee 100644
--- a/y2022/joystick_reader.cc
+++ b/y2022/joystick_reader.cc
@@ -64,7 +64,7 @@
const ButtonLocation kIntakeFrontOut(4, 10);
const ButtonLocation kIntakeBackOut(4, 9);
const ButtonLocation kSpitFront(3, 3);
-const ButtonLocation kSpitBack(2, 3);
+const ButtonLocation kSpitBack(3, 1);
const ButtonLocation kRedLocalizerReset(4, 14);
const ButtonLocation kBlueLocalizerReset(4, 13);
@@ -226,14 +226,18 @@
constexpr double kIntakePosition = -0.02;
constexpr size_t kIntakeCounterIterations = 25;
- // Extend the intakes and spin the rollers
- if (data.IsPressed(kIntakeFrontOut)) {
+ // Extend the intakes and spin the rollers.
+ // Don't let this happen if there is a ball in the other intake, because
+ // that would spit this one out.
+ if (data.IsPressed(kIntakeFrontOut) &&
+ !superstructure_status_fetcher_->back_intake_has_ball()) {
intake_front_pos = kIntakePosition;
transfer_roller_speed = kTransferRollerSpeed;
intake_front_counter_ = kIntakeCounterIterations;
intake_back_counter_ = 0;
- } else if (data.IsPressed(kIntakeBackOut)) {
+ } else if (data.IsPressed(kIntakeBackOut) &&
+ !superstructure_status_fetcher_->front_intake_has_ball()) {
intake_back_pos = kIntakePosition;
transfer_roller_speed = -kTransferRollerSpeed;
diff --git a/y2022/vision/BUILD b/y2022/vision/BUILD
index a26c507..65ab20c 100644
--- a/y2022/vision/BUILD
+++ b/y2022/vision/BUILD
@@ -149,6 +149,73 @@
],
)
+cc_binary(
+ name = "ball_color_detector",
+ srcs = [
+ "ball_color_main.cc",
+ ],
+ target_compatible_with = ["@platforms//os:linux"],
+ visibility = ["//y2022:__subpackages__"],
+ deps = [
+ ":ball_color_lib",
+ "//aos:init",
+ "//aos/events:shm_event_loop",
+ ],
+)
+
+cc_test(
+ name = "ball_color_test",
+ srcs = [
+ "ball_color_test.cc",
+ ],
+ data = [
+ "test_ball_color_image.jpg",
+ ],
+ deps = [
+ ":ball_color_lib",
+ "//aos:json_to_flatbuffer",
+ "//aos/events:simulated_event_loop",
+ "//aos/testing:googletest",
+ "//aos/testing:test_logging",
+ "//y2022:constants",
+ ],
+)
+
+cc_library(
+ name = "ball_color_lib",
+ srcs = [
+ "ball_color.cc",
+ ],
+ hdrs = [
+ "ball_color.h",
+ ],
+ data = [
+ "//y2022:aos_config",
+ ],
+ target_compatible_with = ["@platforms//os:linux"],
+ visibility = ["//y2022:__subpackages__"],
+ deps = [
+ ":ball_color_fbs",
+ "//aos/events:event_loop",
+ "//aos/events:shm_event_loop",
+ "//aos/network:team_number",
+ "//frc971/input:joystick_state_fbs",
+ "//frc971/vision:vision_fbs",
+ "//third_party:opencv",
+ ],
+)
+
+flatbuffer_cc_library(
+ name = "ball_color_fbs",
+ srcs = ["ball_color.fbs"],
+ gen_reflections = 1,
+ includes = [
+ "//frc971/input:joystick_state_fbs_includes",
+ ],
+ target_compatible_with = ["@platforms//os:linux"],
+ visibility = ["//y2022:__subpackages__"],
+)
+
cc_library(
name = "geometry_lib",
hdrs = [
diff --git a/y2022/vision/ball_color.cc b/y2022/vision/ball_color.cc
new file mode 100644
index 0000000..e896da5
--- /dev/null
+++ b/y2022/vision/ball_color.cc
@@ -0,0 +1,138 @@
+#include "y2022/vision/ball_color.h"
+
+#include <chrono>
+#include <cmath>
+#include <opencv2/highgui/highgui.hpp>
+#include <thread>
+
+#include "aos/events/event_loop.h"
+#include "aos/events/shm_event_loop.h"
+#include "frc971/input/joystick_state_generated.h"
+#include "frc971/vision/vision_generated.h"
+#include "glog/logging.h"
+#include "opencv2/imgproc.hpp"
+
+namespace y2022 {
+namespace vision {
+
+BallColorDetector::BallColorDetector(aos::EventLoop *event_loop)
+ : ball_color_sender_(event_loop->MakeSender<BallColor>("/superstructure")) {
+ event_loop->MakeWatcher("/camera", [this](const CameraImage &camera_image) {
+ this->ProcessImage(camera_image);
+ });
+}
+
+void BallColorDetector::ProcessImage(const CameraImage &image) {
+ cv::Mat image_color_mat(cv::Size(image.cols(), image.rows()), CV_8UC2,
+ (void *)image.data()->data());
+ cv::Mat image_mat(cv::Size(image.cols(), image.rows()), CV_8UC3);
+ cv::cvtColor(image_color_mat, image_mat, cv::COLOR_YUV2BGR_YUYV);
+
+ aos::Alliance detected_color = DetectColor(image_mat);
+
+ auto builder = ball_color_sender_.MakeBuilder();
+ auto ball_color_builder = builder.MakeBuilder<BallColor>();
+ ball_color_builder.add_ball_color(detected_color);
+ builder.CheckOk(builder.Send(ball_color_builder.Finish()));
+}
+
+aos::Alliance BallColorDetector::DetectColor(cv::Mat image) {
+ cv::Mat hsv(cv::Size(image.cols, image.rows), CV_8UC3);
+
+ cv::cvtColor(image, hsv, cv::COLOR_BGR2HSV);
+
+ // Look at 3 chunks of the image
+ cv::Mat reference_red =
+ BallColorDetector::SubImage(hsv, BallColorDetector::kReferenceRed());
+
+ cv::Mat reference_blue =
+ BallColorDetector::SubImage(hsv, BallColorDetector::kReferenceBlue());
+ cv::Mat ball_location =
+ BallColorDetector::SubImage(hsv, BallColorDetector::kBallLocation());
+
+ // OpenCV HSV hues go from [0 to 179]
+ // Average the average color of each patch in both directions
+ // Rejecting pixels that have too low saturation or to bright or dark value
+ // And dealing with the wrapping of the red hues by shifting the wrap to be
+ // around 90 instead of 180. 90 is a color we don't care about.
+ double red = BallColorDetector::mean_hue(reference_red);
+ double blue = BallColorDetector::mean_hue(reference_blue);
+ double ball = BallColorDetector::mean_hue(ball_location);
+
+ // Just look at the hue values for distance
+ const double distance_to_blue = std::abs(ball - blue);
+ const double distance_to_red = std::abs(ball - red);
+
+ VLOG(1) << "\n"
+ << "Red: " << red << " deg\n"
+ << "Blue: " << blue << " deg\n"
+ << "Ball: " << ball << " deg\n"
+ << "distance to blue: " << distance_to_blue << " "
+ << "distance_to_red: " << distance_to_red;
+
+ // Is the ball location close enough to being the same hue as the blue
+ // reference or the red reference?
+
+ if (distance_to_blue < distance_to_red &&
+ distance_to_blue < kMaxHueDistance) {
+ return aos::Alliance::kBlue;
+ } else if (distance_to_red < distance_to_blue &&
+ distance_to_red < kMaxHueDistance) {
+ return aos::Alliance::kRed;
+ }
+
+ return aos::Alliance::kInvalid;
+}
+
+cv::Mat BallColorDetector::SubImage(cv::Mat image, cv::Rect location) {
+ cv::Rect new_location = BallColorDetector::RescaleRect(
+ image, location, BallColorDetector::kMeasurementsImageSize());
+ return image(new_location);
+}
+
+// Handle varying size images by scaling our constants rectangles
+cv::Rect BallColorDetector::RescaleRect(cv::Mat image, cv::Rect location,
+ cv::Size original_size) {
+ const double x_scale = static_cast<double>(image.cols) / original_size.width;
+ const double y_scale = static_cast<double>(image.rows) / original_size.height;
+
+ cv::Rect new_location(location.x * x_scale, location.y * y_scale,
+ location.width * x_scale, location.height * y_scale);
+
+ return new_location;
+}
+
+double BallColorDetector::mean_hue(cv::Mat hsv_image) {
+ double num_pixels_selected = 0;
+ double sum = 0;
+
+ for (int i = 0; i < hsv_image.rows; ++i) {
+ for (int j = 0; j < hsv_image.cols; ++j) {
+ const cv::Vec3b &color = hsv_image.at<cv::Vec3b>(i, j);
+ double value = static_cast<double>(color(2));
+ double saturation = static_cast<double>(color(1));
+
+ if (value < kMinValue || value > kMaxValue ||
+ saturation < kMinSaturation) {
+ continue;
+ }
+
+ // unwrap hue so that break is around 90 instead of 180
+ // ex. a hue of 180 goes to 0, a hue of 120 goes to -60
+ // but there's still a break around 90 where it will be either +- 90
+ // depending on which side it's on
+ double hue = static_cast<double>(color(0));
+ if (hue > 90) {
+ hue = hue - 180;
+ }
+
+ num_pixels_selected++;
+ sum += hue;
+ }
+ }
+
+ return sum / num_pixels_selected;
+}
+
+} // namespace vision
+} // namespace y2022
diff --git a/y2022/vision/ball_color.fbs b/y2022/vision/ball_color.fbs
new file mode 100644
index 0000000..7eb93e0
--- /dev/null
+++ b/y2022/vision/ball_color.fbs
@@ -0,0 +1,12 @@
+include "frc971/input/joystick_state.fbs";
+
+namespace y2022.vision;
+
+table BallColor {
+ // The color of the ball represented as which alliance it belongs to
+ // it will be unpredictable when there is no ball and it will be kInvalid
+ // if the color is not close enough to either of the two references.
+ ball_color:aos.Alliance (id: 0);
+}
+
+root_type BallColor;
diff --git a/y2022/vision/ball_color.h b/y2022/vision/ball_color.h
new file mode 100644
index 0000000..ef3bdd2
--- /dev/null
+++ b/y2022/vision/ball_color.h
@@ -0,0 +1,58 @@
+#ifndef Y2022_VISION_BALL_COLOR_H_
+#define Y2022_VISION_BALL_COLOR_H_
+
+#include <opencv2/imgproc.hpp>
+
+#include "aos/events/shm_event_loop.h"
+#include "frc971/input/joystick_state_generated.h"
+#include "frc971/vision/vision_generated.h"
+#include "y2022/vision/ball_color_generated.h"
+
+namespace y2022 {
+namespace vision {
+
+using namespace frc971::vision;
+
+// Takes in camera images and detects what color the loaded ball is
+// Does not detect if there is a ball, and will output bad measurements in
+// the case that that there is not a ball.
+class BallColorDetector {
+ public:
+ // The size image that the reference rectangles were measure with
+ // These constants will be scaled if the image sent is not the same size
+ static const cv::Size kMeasurementsImageSize() { return {640, 480}; };
+ static const cv::Rect kReferenceRed() { return {440, 150, 50, 130}; };
+ static const cv::Rect kReferenceBlue() { return {440, 350, 30, 100}; };
+ static const cv::Rect kBallLocation() { return {100, 400, 140, 50}; };
+
+ // Constants used to filter out pixels that don't have good color information
+ static constexpr double kMinSaturation = 128;
+ static constexpr double kMinValue = 25;
+ static constexpr double kMaxValue = 230;
+
+ static constexpr double kMaxHueDistance = 10;
+
+ BallColorDetector(aos::EventLoop *event_loop);
+
+ void ProcessImage(const CameraImage &camera_image);
+
+ // We look at three parts of the image: two reference locations where there
+ // will be red and blue markers that should match the ball, and then the
+ // location in the catapult where we expect to see the ball. We then compute
+ // the average hue of each patch but discard pixels that we deem not colorful
+ // enough. Then we decide whether the ball color looks close enough to either
+ // of the reference colors. If no good color is detected, outputs kInvalid.
+ static aos::Alliance DetectColor(cv::Mat image);
+
+ static cv::Mat SubImage(cv::Mat image, cv::Rect location);
+
+ static cv::Rect RescaleRect(cv::Mat image, cv::Rect location,
+ cv::Size original_size);
+ static double mean_hue(cv::Mat hsv_image);
+
+ private:
+ aos::Sender<BallColor> ball_color_sender_;
+};
+} // namespace vision
+} // namespace y2022
+#endif
diff --git a/y2022/vision/ball_color_main.cc b/y2022/vision/ball_color_main.cc
new file mode 100644
index 0000000..63f9d06
--- /dev/null
+++ b/y2022/vision/ball_color_main.cc
@@ -0,0 +1,35 @@
+#include "aos/events/shm_event_loop.h"
+#include "aos/init.h"
+#include "y2022/vision/ball_color.h"
+
+// config used to allow running ball_color_detector independently. E.g.,
+// bazel run //y2022/vision:ball_color_detector -- --config
+// y2022/aos_config.json
+// --override_hostname pi-7971-1 --ignore_timestamps true
+DEFINE_string(config, "aos_config.json", "Path to the config file to use.");
+
+namespace y2022 {
+namespace vision {
+namespace {
+
+using namespace frc971::vision;
+
+void BallColorDetectorMain() {
+ aos::FlatbufferDetachedBuffer<aos::Configuration> config =
+ aos::configuration::ReadConfig(FLAGS_config);
+
+ aos::ShmEventLoop event_loop(&config.message());
+
+ BallColorDetector ball_color_detector(&event_loop);
+
+ event_loop.Run();
+}
+
+} // namespace
+} // namespace vision
+} // namespace y2022
+
+int main(int argc, char **argv) {
+ aos::InitGoogle(&argc, &argv);
+ y2022::vision::BallColorDetectorMain();
+}
diff --git a/y2022/vision/ball_color_test.cc b/y2022/vision/ball_color_test.cc
new file mode 100644
index 0000000..695791b
--- /dev/null
+++ b/y2022/vision/ball_color_test.cc
@@ -0,0 +1,147 @@
+#include "y2022/vision/ball_color.h"
+
+#include <cmath>
+#include <opencv2/highgui/highgui.hpp>
+#include <opencv2/imgproc.hpp>
+
+#include "aos/events/simulated_event_loop.h"
+#include "aos/json_to_flatbuffer.h"
+#include "aos/testing/test_logging.h"
+#include "glog/logging.h"
+#include "gtest/gtest.h"
+#include "y2022/constants.h"
+
+DEFINE_string(output_folder, "",
+ "If set, logs all channels to the provided logfile.");
+
+namespace y2022::vision::testing {
+
+class BallColorTest : public ::testing::Test {
+ public:
+ BallColorTest()
+ : config_(aos::configuration::ReadConfig("y2022/aos_config.json")),
+ event_loop_factory_(&config_.message()),
+ logger_pi_(aos::configuration::GetNode(
+ event_loop_factory_.configuration(), "logger")),
+ roborio_(aos::configuration::GetNode(
+ event_loop_factory_.configuration(), "roborio")),
+ camera_event_loop_(
+ event_loop_factory_.MakeEventLoop("Camera", logger_pi_)),
+ color_detector_event_loop_(event_loop_factory_.MakeEventLoop(
+ "Ball color detector", logger_pi_)),
+ superstructure_event_loop_(
+ event_loop_factory_.MakeEventLoop("Superstructure", roborio_)),
+ ball_color_fetcher_(superstructure_event_loop_->MakeFetcher<BallColor>(
+ "/superstructure")),
+ image_sender_(camera_event_loop_->MakeSender<CameraImage>("/camera"))
+
+ {}
+
+ // copied from camera_reader.cc
+ void SendImage(cv::Mat bgr_image) {
+ cv::Mat image_color_mat;
+ cv::cvtColor(bgr_image, image_color_mat, cv::COLOR_BGR2YUV);
+
+ // Convert YUV (3 channels) to YUYV (stacked format)
+ std::vector<uint8_t> yuyv;
+ for (int i = 0; i < image_color_mat.rows; i++) {
+ for (int j = 0; j < image_color_mat.cols; j++) {
+ // Always push a Y value
+ yuyv.emplace_back(image_color_mat.at<cv::Vec3b>(i, j)[0]);
+ if ((j % 2) == 0) {
+ // If column # is even, push a U value.
+ yuyv.emplace_back(image_color_mat.at<cv::Vec3b>(i, j)[1]);
+ } else {
+ // If column # is odd, push a V value.
+ yuyv.emplace_back(image_color_mat.at<cv::Vec3b>(i, j)[2]);
+ }
+ }
+ }
+
+ CHECK_EQ(static_cast<int>(yuyv.size()),
+ image_color_mat.rows * image_color_mat.cols * 2);
+
+ auto builder = image_sender_.MakeBuilder();
+ auto image_offset = builder.fbb()->CreateVector(yuyv);
+ auto image_builder = builder.MakeBuilder<CameraImage>();
+
+ int64_t timestamp = aos::monotonic_clock::now().time_since_epoch().count();
+
+ image_builder.add_rows(image_color_mat.rows);
+ image_builder.add_cols(image_color_mat.cols);
+ image_builder.add_data(image_offset);
+ image_builder.add_monotonic_timestamp_ns(timestamp);
+
+ builder.CheckOk(builder.Send(image_builder.Finish()));
+ }
+
+ aos::FlatbufferDetachedBuffer<aos::Configuration> config_;
+ aos::SimulatedEventLoopFactory event_loop_factory_;
+ const aos::Node *const logger_pi_;
+ const aos::Node *const roborio_;
+ ::std::unique_ptr<::aos::EventLoop> camera_event_loop_;
+ ::std::unique_ptr<::aos::EventLoop> color_detector_event_loop_;
+ ::std::unique_ptr<::aos::EventLoop> superstructure_event_loop_;
+ aos::Fetcher<BallColor> ball_color_fetcher_;
+ aos::Sender<CameraImage> image_sender_;
+};
+
+TEST_F(BallColorTest, DetectColorFromTestImage) {
+ cv::Mat bgr_image =
+ cv::imread("y2022/vision/test_ball_color_image.jpg", cv::IMREAD_COLOR);
+
+ ASSERT_TRUE(bgr_image.data != nullptr);
+
+ aos::Alliance detected_color = BallColorDetector::DetectColor(bgr_image);
+
+ EXPECT_EQ(detected_color, aos::Alliance::kRed);
+}
+
+TEST_F(BallColorTest, DetectColorFromTestImageInEventLoop) {
+ cv::Mat bgr_image =
+ cv::imread("y2022/vision/test_ball_color_image.jpg", cv::IMREAD_COLOR);
+ ASSERT_TRUE(bgr_image.data != nullptr);
+
+ BallColorDetector detector(color_detector_event_loop_.get());
+
+ camera_event_loop_->OnRun([this, bgr_image]() { SendImage(bgr_image); });
+
+ event_loop_factory_.RunFor(std::chrono::milliseconds(5));
+
+ ASSERT_TRUE(ball_color_fetcher_.Fetch());
+
+ EXPECT_TRUE(ball_color_fetcher_->has_ball_color());
+ EXPECT_EQ(ball_color_fetcher_->ball_color(), aos::Alliance::kRed);
+}
+
+TEST_F(BallColorTest, TestRescaling) {
+ cv::Mat mat(cv::Size(320, 240), CV_8UC3);
+ cv::Rect new_rect = BallColorDetector::RescaleRect(
+ mat, cv::Rect(30, 30, 30, 30), cv::Size(1920, 1080));
+
+ EXPECT_EQ(new_rect, cv::Rect(5, 6, 5, 6));
+}
+
+TEST_F(BallColorTest, TestAreas) {
+ cv::Mat bgr_image =
+ cv::imread("y2022/vision/test_ball_color_image.jpg", cv::IMREAD_COLOR);
+ ASSERT_TRUE(bgr_image.data != nullptr);
+
+ cv::Rect reference_red = BallColorDetector::RescaleRect(
+ bgr_image, BallColorDetector::kReferenceRed(),
+ BallColorDetector::kMeasurementsImageSize());
+ cv::Rect reference_blue = BallColorDetector::RescaleRect(
+ bgr_image, BallColorDetector::kReferenceBlue(),
+ BallColorDetector::kMeasurementsImageSize());
+ cv::Rect ball_location = BallColorDetector::RescaleRect(
+ bgr_image, BallColorDetector::kBallLocation(),
+ BallColorDetector::kMeasurementsImageSize());
+
+ cv::rectangle(bgr_image, reference_red, cv::Scalar(0, 0, 255));
+ cv::rectangle(bgr_image, reference_blue, cv::Scalar(255, 0, 0));
+ cv::rectangle(bgr_image, ball_location, cv::Scalar(0, 255, 0));
+
+ cv::imwrite("/tmp/rectangles.jpg", bgr_image);
+}
+
+} // namespace y2022::vision::testing
diff --git a/y2022/vision/camera_definition.py b/y2022/vision/camera_definition.py
index 219a41f..61789cb 100644
--- a/y2022/vision/camera_definition.py
+++ b/y2022/vision/camera_definition.py
@@ -100,13 +100,13 @@
if pi_number == "pi1":
camera_yaw = 90.0 * np.pi / 180.0
- T = np.array([-7.0 * 0.0254, 3.5 * 0.0254, 32.0 * 0.0254])
+ T = np.array([-8.25 * 0.0254, 3.25 * 0.0254, 32.0 * 0.0254])
elif pi_number == "pi2":
camera_yaw = 0.0
- T = np.array([-7.0 * 0.0254, -3.0 * 0.0254, 34.0 * 0.0254])
+ T = np.array([-7.5 * 0.0254, -3.5 * 0.0254, 34.0 * 0.0254])
elif pi_number == "pi3":
camera_yaw = 179.0 * np.pi / 180.0
- T = np.array([-1.0 * 0.0254, 8.5 * 0.0254, 34.0 * 0.0254])
+ T = np.array([-1.0 * 0.0254, 8.5 * 0.0254, 34.25 * 0.0254])
elif pi_number == "pi4":
camera_yaw = -90.0 * np.pi / 180.0
T = np.array([-9.0 * 0.0254, -5 * 0.0254, 27.5 * 0.0254])
diff --git a/y2022/vision/target_estimator.cc b/y2022/vision/target_estimator.cc
index 2526355..9eef390 100644
--- a/y2022/vision/target_estimator.cc
+++ b/y2022/vision/target_estimator.cc
@@ -98,6 +98,12 @@
const std::array<cv::Point3d, 4> TargetEstimator::kMiddleTapePiecePoints =
ComputeMiddleTapePiecePoints();
+namespace {
+constexpr double kDefaultDistance = 3.0;
+constexpr double kDefaultYaw = M_PI;
+constexpr double kDefaultAngleToCamera = 0.0;
+} // namespace
+
TargetEstimator::TargetEstimator(cv::Mat intrinsics, cv::Mat extrinsics)
: blob_stats_(),
middle_blob_index_(0),
@@ -105,9 +111,9 @@
image_(std::nullopt),
roll_(0.0),
pitch_(0.0),
- yaw_(M_PI),
- distance_(3.0),
- angle_to_camera_(0.0),
+ yaw_(kDefaultYaw),
+ distance_(kDefaultDistance),
+ angle_to_camera_(kDefaultAngleToCamera),
// Seed camera height
camera_height_(extrinsics.at<double>(2, 3) +
constants::Values::kImuHeight()) {
@@ -209,6 +215,19 @@
// TODO(milind): seed with localizer output as well
+ // If we didn't solve well last time, seed everything at the defaults so we
+ // don't get stuck in a bad state.
+ // Copied from localizer.cc
+ constexpr double kMinConfidence = 0.75;
+ if (confidence_ < kMinConfidence) {
+ roll_ = roll_seed;
+ pitch_ = pitch_seed;
+ yaw_ = kDefaultYaw;
+ distance_ = kDefaultDistance;
+ angle_to_camera_ = kDefaultAngleToCamera;
+ camera_height_ = extrinsics_(2, 3) + constants::Values::kImuHeight();
+ }
+
// Constrain the rotation to be around the localizer's, otherwise there can be
// multiple solutions. There shouldn't be too much roll or pitch
if (FLAGS_freeze_roll) {
@@ -607,7 +626,9 @@
const auto kTextColor = cv::Scalar(0, 255, 255);
constexpr double kFontScale = 0.6;
- cv::putText(view_image, absl::StrFormat("Distance: %.3f", distance_),
+ cv::putText(view_image,
+ absl::StrFormat("Distance: %.3f m (%.3f in)", distance_,
+ distance_ / 0.0254),
cv::Point(kTextX, text_y += kTextSpacing),
cv::FONT_HERSHEY_DUPLEX, kFontScale, kTextColor, 2);
cv::putText(view_image,
diff --git a/y2022/vision/test_ball_color_image.jpg b/y2022/vision/test_ball_color_image.jpg
new file mode 100644
index 0000000..8750460
--- /dev/null
+++ b/y2022/vision/test_ball_color_image.jpg
Binary files differ
diff --git a/y2022/vision/viewer.cc b/y2022/vision/viewer.cc
index a21c09f..446f1f6 100644
--- a/y2022/vision/viewer.cc
+++ b/y2022/vision/viewer.cc
@@ -274,15 +274,14 @@
blob_result.filtered_blobs.size()
<< ")";
+ estimator.Solve(blob_result.filtered_stats,
+ FLAGS_display_estimation ? std::make_optional(ret_image)
+ : std::nullopt);
if (blob_result.filtered_blobs.size() > 0) {
- estimator.Solve(blob_result.filtered_stats,
- FLAGS_display_estimation ? std::make_optional(ret_image)
- : std::nullopt);
estimator.DrawEstimate(ret_image);
LOG(INFO) << "Read file " << (it - file_list.begin()) << ": " << *it;
}
-
cv::imshow("image", image_mat);
cv::imshow("mask", blob_result.binarized_image);
cv::imshow("blobs", ret_image);
diff --git a/y2022/www/field_handler.ts b/y2022/www/field_handler.ts
index bda6828..6e0f08d 100644
--- a/y2022/www/field_handler.ts
+++ b/y2022/www/field_handler.ts
@@ -278,34 +278,6 @@
// Draw the matches with debugging information from the localizer.
const now = Date.now() / 1000.0;
- for (const [time, value] of this.localizerImageMatches) {
- const age = now - time;
- const kRemovalAge = 2.0;
- if (age > kRemovalAge) {
- this.localizerImageMatches.delete(time);
- continue;
- }
- const ageAlpha = (kRemovalAge - age) / kRemovalAge
- for (let i = 0; i < value.targetsLength(); i++) {
- const imageDebug = value.targets(i);
- const x = imageDebug.impliedRobotX();
- const y = imageDebug.impliedRobotY();
- const theta = imageDebug.impliedRobotTheta();
- const cameraX = imageDebug.cameraX();
- const cameraY = imageDebug.cameraY();
- const cameraTheta = imageDebug.cameraTheta();
- const accepted = imageDebug.accepted();
- // Make camera readings fade over time.
- const alpha = Math.round(255 * ageAlpha).toString(16).padStart(2, '0');
- const dashed = false;
- const acceptedRgb = accepted ? '#00FF00' : '#FF0000';
- const acceptedRgba = acceptedRgb + alpha;
- const cameraRgb = PI_COLORS[imageDebug.camera()];
- const cameraRgba = cameraRgb + alpha;
- this.drawRobot(x, y, theta, null, acceptedRgba, dashed, false);
- this.drawCamera(cameraX, cameraY, cameraTheta, cameraRgba, false);
- }
- }
if (this.superstructureStatus) {
this.shotDistance.innerHTML = this.superstructureStatus.aimer() ?
(this.superstructureStatus.aimer().shotDistance() /
@@ -422,6 +394,36 @@
null);
}
+ for (const [time, value] of this.localizerImageMatches) {
+ const age = now - time;
+ const kRemovalAge = 1.0;
+ if (age > kRemovalAge) {
+ this.localizerImageMatches.delete(time);
+ continue;
+ }
+ const kMaxImageAlpha = 0.5;
+ const ageAlpha = kMaxImageAlpha * (kRemovalAge - age) / kRemovalAge
+ for (let i = 0; i < value.targetsLength(); i++) {
+ const imageDebug = value.targets(i);
+ const x = imageDebug.impliedRobotX();
+ const y = imageDebug.impliedRobotY();
+ const theta = imageDebug.impliedRobotTheta();
+ const cameraX = imageDebug.cameraX();
+ const cameraY = imageDebug.cameraY();
+ const cameraTheta = imageDebug.cameraTheta();
+ const accepted = imageDebug.accepted();
+ // Make camera readings fade over time.
+ const alpha = Math.round(255 * ageAlpha).toString(16).padStart(2, '0');
+ const dashed = false;
+ const acceptedRgb = accepted ? '#00FF00' : '#FF0000';
+ const acceptedRgba = acceptedRgb + alpha;
+ const cameraRgb = PI_COLORS[imageDebug.camera()];
+ const cameraRgba = cameraRgb + alpha;
+ this.drawRobot(x, y, theta, null, acceptedRgba, dashed, false);
+ this.drawCamera(cameraX, cameraY, cameraTheta, cameraRgba, false);
+ }
+ }
+
window.requestAnimationFrame(() => this.draw());
}
diff --git a/y2022/y2022_logger.json b/y2022/y2022_logger.json
index f811dc8..f54ccd7 100644
--- a/y2022/y2022_logger.json
+++ b/y2022/y2022_logger.json
@@ -19,6 +19,38 @@
]
},
{
+ "name": "/superstructure",
+ "type": "y2022.vision.BallColor",
+ "source_node": "logger",
+ "logger": "LOCAL_AND_REMOTE_LOGGER",
+ "logger_nodes": [
+ "roborio"
+ ],
+ "frequency": 200,
+ "num_senders": 2,
+ "max_size": 72,
+ "destination_nodes": [
+ {
+ "name": "roborio",
+ "priority": 2,
+ "timestamp_logger": "LOCAL_AND_REMOTE_LOGGER",
+ "timestamp_logger_nodes": [
+ "roborio"
+ ],
+ "time_to_live": 5000000
+ }
+ ]
+ },
+ {
+ "name": "/aos/remote_timestamps/roborio/superstructure/y2022-vision-BallColor",
+ "type": "aos.message_bridge.RemoteMessage",
+ "source_node": "logger",
+ "logger": "NOT_LOGGED",
+ "frequency": 20,
+ "num_senders": 2,
+ "max_size": 200
+ },
+ {
"name": "/drivetrain",
"type": "frc971.control_loops.drivetrain.Position",
"source_node": "roborio",
@@ -72,7 +104,7 @@
"type": "aos.message_bridge.RemoteMessage",
"source_node": "roborio",
"logger": "NOT_LOGGED",
- "frequency": 200,
+ "frequency": 400,
"num_senders": 2,
"max_size": 400
},
@@ -481,6 +513,13 @@
"nodes": [
"logger"
]
+ },
+ {
+ "name": "ball_color_detector",
+ "executable_name": "ball_color_detector",
+ "nodes": [
+ "logger"
+ ]
}
],
"nodes": [
diff --git a/y2022/y2022_pi_template.json b/y2022/y2022_pi_template.json
index 6eddf9a..bcd3f6b 100644
--- a/y2022/y2022_pi_template.json
+++ b/y2022/y2022_pi_template.json
@@ -181,7 +181,7 @@
"source_node": "pi{{ NUM }}",
"frequency": 25,
"num_senders": 2,
- "max_size": 20000,
+ "max_size": 40000,
"logger": "LOCAL_AND_REMOTE_LOGGER",
"logger_nodes": [
"imu",
@@ -211,14 +211,14 @@
{
"name": "/pi{{ NUM }}/aos/remote_timestamps/imu/pi{{ NUM }}/camera/y2022-vision-TargetEstimate",
"type": "aos.message_bridge.RemoteMessage",
- "frequency": 20,
+ "frequency": 40,
"source_node": "pi{{ NUM }}",
"max_size": 208
},
{
"name": "/pi{{ NUM }}/aos/remote_timestamps/logger/pi{{ NUM }}/camera/y2022-vision-TargetEstimate",
"type": "aos.message_bridge.RemoteMessage",
- "frequency": 20,
+ "frequency": 40,
"source_node": "pi{{ NUM }}",
"max_size": 208
},
diff --git a/y2022/y2022_roborio.json b/y2022/y2022_roborio.json
index 93e0483..8651923 100644
--- a/y2022/y2022_roborio.json
+++ b/y2022/y2022_roborio.json
@@ -240,7 +240,7 @@
"name": "/superstructure",
"type": "y2022.control_loops.superstructure.Status",
"source_node": "roborio",
- "frequency": 200,
+ "frequency": 400,
"num_senders": 2,
"logger": "LOCAL_AND_REMOTE_LOGGER",
"logger_nodes": [
@@ -317,6 +317,21 @@
"num_senders": 2
},
{
+ "name": "/superstructure",
+ "type": "y2022.vision.BallColor",
+ "source_node": "logger",
+ "frequency": 200,
+ "num_senders": 2,
+ "max_size": 72,
+ "destination_nodes": [
+ {
+ "name": "roborio",
+ "priority": 2,
+ "time_to_live": 500000000
+ }
+ ]
+ },
+ {
"name": "/drivetrain",
"type": "frc971.sensors.GyroReading",
"source_node": "roborio",