Create y2024_bot3 folder
Change-Id: Iee232bde58c0425920a49eee145a0f93c7485391
Signed-off-by: Yash Maheshwari <yashmahe2018@gmail.com>
Signed-off-by: James (Peilun) Li <jamespeilunli@gmail.com>
diff --git a/y2024_bot3/vision/BUILD b/y2024_bot3/vision/BUILD
new file mode 100644
index 0000000..b55d35d
--- /dev/null
+++ b/y2024_bot3/vision/BUILD
@@ -0,0 +1,54 @@
+filegroup(
+ name = "image_streamer_start",
+ srcs = ["image_streamer_start.sh"],
+ visibility = ["//visibility:public"],
+)
+
+cc_binary(
+ name = "apriltag_detector",
+ srcs = [
+ "apriltag_detector.cc",
+ "vision_util.cc",
+ "vision_util.h",
+ ],
+ features = ["cuda"],
+ target_compatible_with = ["@platforms//cpu:arm64"],
+ visibility = ["//visibility:public"],
+ deps = [
+ "//aos:configuration",
+ "//aos:init",
+ "//aos/events:shm_event_loop",
+ "//frc971/orin:gpu_apriltag_lib",
+ "//third_party:cudart",
+ "//third_party/apriltag",
+ "//y2024_bot3/constants:constants_fbs",
+ "@com_github_nvidia_cccl//:cccl",
+ "@com_google_absl//absl/flags:flag",
+ "@com_google_absl//absl/log",
+ "@com_google_absl//absl/log:check",
+ ],
+)
+
+cc_binary(
+ name = "viewer",
+ srcs = [
+ "viewer.cc",
+ "vision_util.cc",
+ "vision_util.h",
+ ],
+ target_compatible_with = ["@platforms//os:linux"],
+ visibility = [
+ "//y2024_bot3:__subpackages__",
+ ],
+ deps = [
+ "//aos:init",
+ "//aos:json_to_flatbuffer",
+ "//aos/events:shm_event_loop",
+ "//frc971/constants:constants_sender_lib",
+ "//frc971/vision:vision_fbs",
+ "//frc971/vision:vision_util_lib",
+ "//third_party:opencv",
+ "//y2024_bot3/constants:constants_fbs",
+ "@com_google_absl//absl/strings",
+ ],
+)
diff --git a/y2024_bot3/vision/apriltag_detector.cc b/y2024_bot3/vision/apriltag_detector.cc
new file mode 100644
index 0000000..2c73c58
--- /dev/null
+++ b/y2024_bot3/vision/apriltag_detector.cc
@@ -0,0 +1,54 @@
+
+#include <string>
+
+#include "absl/flags/flag.h"
+
+#include "aos/init.h"
+#include "frc971/orin/gpu_apriltag.h"
+#include "y2024_bot3/constants/constants_generated.h"
+#include "y2024_bot3/vision/vision_util.h"
+
+ABSL_FLAG(std::string, channel, "/camera", "Channel name");
+ABSL_FLAG(std::string, config, "aos_config.json",
+ "Path to the config file to use.");
+
+void GpuApriltagDetector() {
+ aos::FlatbufferDetachedBuffer<aos::Configuration> config =
+ aos::configuration::ReadConfig(absl::GetFlag(FLAGS_config));
+
+ frc971::constants::WaitForConstants<y2024_bot3::Constants>(&config.message());
+
+ aos::ShmEventLoop event_loop(&config.message());
+
+ const frc971::constants::ConstantsFetcher<y2024_bot3::Constants>
+ calibration_data(&event_loop);
+
+ CHECK(absl::GetFlag(FLAGS_channel).length() == 8);
+ int camera_id = std::stoi(absl::GetFlag(FLAGS_channel).substr(7, 1));
+ const frc971::vision::calibration::CameraCalibration *calibration =
+ y2024_bot3::vision::FindCameraCalibration(
+ calibration_data.constants(),
+ event_loop.node()->name()->string_view(), camera_id);
+
+ frc971::apriltag::ApriltagDetector detector(
+ &event_loop, absl::GetFlag(FLAGS_channel), calibration);
+
+ // TODO(austin): Figure out our core pinning strategy.
+ // event_loop.SetRuntimeAffinity(aos::MakeCpusetFromCpus({5}));
+
+ LOG(INFO) << "Setting scheduler priority";
+ struct sched_param param;
+ param.sched_priority = 21;
+ PCHECK(sched_setscheduler(0, SCHED_FIFO, ¶m) == 0);
+
+ LOG(INFO) << "Running event loop";
+ // TODO(austin): Pre-warm it...
+ event_loop.Run();
+} // namespace frc971::apriltag
+
+int main(int argc, char **argv) {
+ aos::InitGoogle(&argc, &argv);
+ GpuApriltagDetector();
+
+ return 0;
+}
diff --git a/y2024_bot3/vision/image_streamer_start.sh b/y2024_bot3/vision/image_streamer_start.sh
new file mode 100755
index 0000000..48d9da7
--- /dev/null
+++ b/y2024_bot3/vision/image_streamer_start.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+
+# Some configurations to avoid dropping frames
+# 640x480@30fps, 400x300@60fps.
+# Bitrate 500000-1500000
+WIDTH=640
+HEIGHT=480
+BITRATE=1500000
+LISTEN_ON="/camera/downsized"
+# Don't interfere with field webpage
+STREAMING_PORT=1181
+
+# Handle weirdness with openssl and gstreamer
+export OPENSSL_CONF=""
+
+# Enable for verbose logging
+#export GST_DEBUG=4
+
+export LD_LIBRARY_PATH=/usr/lib/aarch64-linux-gnu/gstreamer-1.0
+
+exec ./image_streamer --width=$WIDTH --height=$HEIGHT --bitrate=$BITRATE --listen_on=$LISTEN_ON --config=aos_config.json --streaming_port=$STREAMING_PORT
+
diff --git a/y2024_bot3/vision/maps/BUILD b/y2024_bot3/vision/maps/BUILD
new file mode 100644
index 0000000..38191a4
--- /dev/null
+++ b/y2024_bot3/vision/maps/BUILD
@@ -0,0 +1,7 @@
+filegroup(
+ name = "maps",
+ srcs = glob([
+ "*.json",
+ ]),
+ visibility = ["//visibility:public"],
+)
diff --git a/y2024_bot3/vision/maps/target_map.json b/y2024_bot3/vision/maps/target_map.json
new file mode 100644
index 0000000..2a8dfef
--- /dev/null
+++ b/y2024_bot3/vision/maps/target_map.json
@@ -0,0 +1,236 @@
+{
+/* Targets have positive Z axis pointing into the board, positive X to the right
+ when looking at the board, and positive Y is down when looking at the board.
+ This means that you will get an identity rotation from the camera to target
+ frame when the target is upright, flat, and centered in the camera's view.
+
+ The global frame as the origin at the center of the field, positive X points
+ at the red driver's station, and positive Z points straight up.
+ */
+ "target_poses": [
+ {
+ "id": 1,
+ "position": {
+ "x": 6.809,
+ "y": -3.860,
+ "z": 1.361
+ },
+ "orientation": {
+ "w": 0.1830127,
+ "x": -0.1830127,
+ "y": 0.6830127,
+ "z": -0.6830127
+ }
+ },
+ {
+ "id": 2,
+ "position": {
+ "x": 7.915,
+ "y": -3.223,
+ "z": 1.361
+ },
+ "orientation": {
+ "w": 0.1830127,
+ "x": -0.1830127,
+ "y": 0.6830127,
+ "z": -0.6830127
+ }
+ },
+ {
+ "id": 3,
+ "position": {
+ "x": 8.309,
+ "y": 0.877,
+ "z": 1.456
+ },
+ "orientation": {
+ "w": 0.5,
+ "x": -0.5,
+ "y": 0.5,
+ "z": -0.5
+ }
+ },
+ {
+ "id": 4,
+ "position": {
+ "x": 8.309,
+ "y": 1.442,
+ "z": 1.456
+ },
+ "orientation": {
+ "w": 0.5,
+ "x": -0.5,
+ "y": 0.5,
+ "z": -0.5
+ }
+ },
+ {
+ "id": 5,
+ "position": {
+ "x": 6.428,
+ "y": 4.099,
+ "z": 1.361
+ },
+ "orientation": {
+ "w": 0.7071068,
+ "x": -0.7071068,
+ "y": 0.0,
+ "z": 0.0
+ }
+ },
+ {
+ "id": 6,
+ "position": {
+ "x": -6.430,
+ "y": 4.099,
+ "z": 1.361
+ },
+ "orientation": {
+ "w": 0.7071068,
+ "x": -0.7071068,
+ "y": 0.0,
+ "z": 0.0
+ }
+ },
+ {
+ "id": 7,
+ "position": {
+ "x": -8.309,
+ "y": 1.442,
+ "z": 1.474
+ },
+ "orientation": {
+ "w": 0.5,
+ "x": -0.5,
+ "y": -0.5,
+ "z": 0.5
+ }
+ },
+ {
+ "id": 8,
+ "position": {
+ "x": -8.309,
+ "y": 0.877,
+ "z": 1.474
+ },
+ "orientation": {
+ "w": 0.5,
+ "x": -0.5,
+ "y": -0.5,
+ "z": 0.5
+ }
+ },
+ {
+ "id": 9,
+ "position": {
+ "x": -7.915,
+ "y": -3.223,
+ "z": 1.361
+ },
+ "orientation": {
+ "w": 0.1830127,
+ "x": -0.1830127,
+ "y": -0.6830127,
+ "z": 0.6830127
+ }
+ },
+ {
+ "id": 10,
+ "position": {
+ "x": -6.809,
+ "y": -3.860,
+ "z": 1.361
+ },
+ "orientation": {
+ "w": 0.1830127,
+ "x": -0.1830127,
+ "y": -0.6830127,
+ "z": 0.6830127
+ }
+ },
+ {
+ "id": 11,
+ "position": {
+ "x": 3.629,
+ "y": -0.393,
+ "z": 1.326
+ },
+ "orientation": {
+ "w": 0.6830127,
+ "x": -0.6830127,
+ "y": -0.1830127,
+ "z": 0.1830127
+ }
+ },
+ {
+ "id": 12,
+ "position": {
+ "x": 3.630,
+ "y": 0.392,
+ "z": 1.326
+ },
+ "orientation": {
+ "w": 0.1830127,
+ "x": -0.1830127,
+ "y": -0.6830127,
+ "z": 0.6830127
+ }
+ },
+ {
+ "id": 13,
+ "position": {
+ "x": 2.949,
+ "y": -0.000,
+ "z": 1.326
+ },
+ "orientation": {
+ "w": 0.5,
+ "x": -0.5,
+ "y": 0.5,
+ "z": -0.5
+ }
+ },
+ {
+ "id": 14,
+ "position": {
+ "x": -2.949,
+ "y": -0.000,
+ "z": 1.326
+ },
+ "orientation": {
+ "w": 0.5,
+ "x": -0.5,
+ "y": -0.5,
+ "z": 0.5
+ }
+ },
+ {
+ "id": 15,
+ "position": {
+ "x": -3.629,
+ "y": 0.393,
+ "z": 1.326
+ },
+ "orientation": {
+ "w": 0.1830127,
+ "x": -0.1830127,
+ "y": 0.6830127,
+ "z": -0.6830127
+ }
+ },
+ {
+ "id": 16,
+ "position": {
+ "x": -3.630,
+ "y": -0.392,
+ "z": 1.326
+ },
+ "orientation": {
+ "w": 0.6830127,
+ "x": -0.6830127,
+ "y": 0.1830127,
+ "z": -0.1830127
+ }
+ }
+ ]
+}
diff --git a/y2024_bot3/vision/viewer.cc b/y2024_bot3/vision/viewer.cc
new file mode 100644
index 0000000..2d99fd5
--- /dev/null
+++ b/y2024_bot3/vision/viewer.cc
@@ -0,0 +1,122 @@
+#include "absl/strings/match.h"
+#include <opencv2/calib3d.hpp>
+#include <opencv2/highgui/highgui.hpp>
+#include <opencv2/imgproc.hpp>
+
+#include "aos/events/shm_event_loop.h"
+#include "aos/init.h"
+#include "aos/json_to_flatbuffer.h"
+#include "aos/time/time.h"
+#include "frc971/constants/constants_sender_lib.h"
+#include "frc971/vision/vision_generated.h"
+#include "frc971/vision/vision_util_lib.h"
+#include "y2024_bot3/vision/vision_util.h"
+
+ABSL_FLAG(std::string, capture, "",
+ "If set, capture a single image and save it to this filename.");
+ABSL_FLAG(std::string, channel, "/camera", "Channel name for the image.");
+ABSL_FLAG(std::string, config, "aos_config.json",
+ "Path to the config file to use.");
+ABSL_FLAG(int32_t, rate, 100, "Time in milliseconds to wait between images");
+ABSL_FLAG(double, scale, 1.0, "Scale factor for images being displayed");
+
+namespace y2024_bot3::vision {
+namespace {
+
+using frc971::vision::CameraImage;
+
+bool DisplayLoop(const cv::Mat intrinsics, const cv::Mat dist_coeffs,
+ aos::Fetcher<CameraImage> *image_fetcher) {
+ const CameraImage *image;
+
+ // Read next image
+ if (!image_fetcher->Fetch()) {
+ VLOG(2) << "Couldn't fetch next image";
+ return true;
+ }
+ image = image_fetcher->get();
+ CHECK(image != nullptr) << "Couldn't read image";
+
+ // Create color image:
+ cv::Mat image_color_mat(cv::Size(image->cols(), image->rows()), CV_8UC2,
+ (void *)image->data()->data());
+ cv::Mat bgr_image(cv::Size(image->cols(), image->rows()), CV_8UC3);
+ cv::cvtColor(image_color_mat, bgr_image, cv::COLOR_YUV2BGR_YUYV);
+
+ if (!absl::GetFlag(FLAGS_capture).empty()) {
+ if (absl::EndsWith(absl::GetFlag(FLAGS_capture), ".bfbs")) {
+ aos::WriteFlatbufferToFile(absl::GetFlag(FLAGS_capture),
+ image_fetcher->CopyFlatBuffer());
+ } else {
+ cv::imwrite(absl::GetFlag(FLAGS_capture), bgr_image);
+ }
+
+ return false;
+ }
+
+ cv::Mat undistorted_image;
+ cv::undistort(bgr_image, undistorted_image, intrinsics, dist_coeffs);
+ if (absl::GetFlag(FLAGS_scale) != 1.0) {
+ cv::resize(undistorted_image, undistorted_image, cv::Size(),
+ absl::GetFlag(FLAGS_scale), absl::GetFlag(FLAGS_scale));
+ }
+ cv::imshow("Display", undistorted_image);
+
+ int keystroke = cv::waitKey(1);
+ if ((keystroke & 0xFF) == static_cast<int>('c')) {
+ // Convert again, to get clean image
+ cv::cvtColor(image_color_mat, bgr_image, cv::COLOR_YUV2BGR_YUYV);
+ std::stringstream name;
+ name << "capture-" << aos::realtime_clock::now() << ".png";
+ cv::imwrite(name.str(), bgr_image);
+ LOG(INFO) << "Saved image file: " << name.str();
+ } else if ((keystroke & 0xFF) == static_cast<int>('q')) {
+ return false;
+ }
+ return true;
+}
+
+void ViewerMain() {
+ aos::FlatbufferDetachedBuffer<aos::Configuration> config =
+ aos::configuration::ReadConfig(absl::GetFlag(FLAGS_config));
+
+ frc971::constants::WaitForConstants<y2024_bot3::Constants>(&config.message());
+
+ aos::ShmEventLoop event_loop(&config.message());
+
+ frc971::constants::ConstantsFetcher<y2024_bot3::Constants> constants_fetcher(
+ &event_loop);
+ CHECK(absl::GetFlag(FLAGS_channel).length() == 8);
+ int camera_id = std::stoi(absl::GetFlag(FLAGS_channel).substr(7, 1));
+ const auto *calibration_data = FindCameraCalibration(
+ constants_fetcher.constants(), event_loop.node()->name()->string_view(),
+ camera_id);
+ const cv::Mat intrinsics = frc971::vision::CameraIntrinsics(calibration_data);
+ const cv::Mat dist_coeffs =
+ frc971::vision::CameraDistCoeffs(calibration_data);
+
+ aos::Fetcher<CameraImage> image_fetcher =
+ event_loop.MakeFetcher<CameraImage>(absl::GetFlag(FLAGS_channel));
+
+ // Run the display loop
+ event_loop.AddPhasedLoop(
+ [&](int) {
+ if (!DisplayLoop(intrinsics, dist_coeffs, &image_fetcher)) {
+ LOG(INFO) << "Calling event_loop Exit";
+ event_loop.Exit();
+ };
+ },
+ ::std::chrono::milliseconds(absl::GetFlag(FLAGS_rate)));
+
+ event_loop.Run();
+
+ image_fetcher = aos::Fetcher<CameraImage>();
+}
+
+} // namespace
+} // namespace y2024_bot3::vision
+
+int main(int argc, char **argv) {
+ aos::InitGoogle(&argc, &argv);
+ y2024_bot3::vision::ViewerMain();
+}
diff --git a/y2024_bot3/vision/vision_util.cc b/y2024_bot3/vision/vision_util.cc
new file mode 100644
index 0000000..0f99849
--- /dev/null
+++ b/y2024_bot3/vision/vision_util.cc
@@ -0,0 +1,49 @@
+#include "y2024_bot3/vision/vision_util.h"
+
+#include "absl/log/check.h"
+#include "absl/log/log.h"
+
+namespace y2024_bot3::vision {
+
+// Store a list of ordered cameras as you progress around the robot/box of orins
+std::vector<CameraNode> CreateNodeList() {
+ std::vector<CameraNode> list;
+
+ list.push_back({.node_name = "imu", .camera_number = 0});
+ list.push_back({.node_name = "imu", .camera_number = 1});
+ list.push_back({.node_name = "orin1", .camera_number = 1});
+ list.push_back({.node_name = "orin1", .camera_number = 0});
+
+ return list;
+}
+
+// From the node_list, create a numbering scheme from 0 to 3
+std::map<std::string, int> CreateOrderingMap(
+ std::vector<CameraNode> &node_list) {
+ std::map<std::string, int> map;
+
+ for (uint i = 0; i < node_list.size(); i++) {
+ map.insert({node_list.at(i).camera_name(), i});
+ }
+
+ return map;
+}
+
+const frc971::vision::calibration::CameraCalibration *FindCameraCalibration(
+ const y2024_bot3::Constants &calibration_data, std::string_view node_name,
+ int camera_number) {
+ CHECK(calibration_data.has_cameras());
+ for (const y2024_bot3::CameraConfiguration *candidate :
+ *calibration_data.cameras()) {
+ CHECK(candidate->has_calibration());
+ if (candidate->calibration()->node_name()->string_view() != node_name ||
+ candidate->calibration()->camera_number() != camera_number) {
+ continue;
+ }
+ return candidate->calibration();
+ }
+ LOG(FATAL) << ": Failed to find camera calibration for " << node_name
+ << " and camera number " << camera_number;
+}
+
+} // namespace y2024_bot3::vision
diff --git a/y2024_bot3/vision/vision_util.h b/y2024_bot3/vision/vision_util.h
new file mode 100644
index 0000000..fc0f29c
--- /dev/null
+++ b/y2024_bot3/vision/vision_util.h
@@ -0,0 +1,41 @@
+#ifndef Y2024_BOT3_VISION_VISION_UTIL_H_
+#define Y2024_BOT3_VISION_VISION_UTIL_H_
+#include <map>
+#include <string_view>
+
+#include "opencv2/imgproc.hpp"
+
+#include "y2024_bot3/constants/constants_generated.h"
+
+namespace y2024_bot3::vision {
+
+// Generate unique colors for each camera
+const auto kOrinColors = std::map<std::string, cv::Scalar>{
+ {"/orin1/camera0", cv::Scalar(255, 0, 255)},
+ {"/orin1/camera1", cv::Scalar(255, 255, 0)},
+ {"/imu/camera0", cv::Scalar(0, 255, 255)},
+ {"/imu/camera1", cv::Scalar(255, 165, 0)},
+};
+
+// Structure to store node name (e.g., orin1, imu), number, and a usable string
+struct CameraNode {
+ std::string node_name;
+ int camera_number;
+
+ inline const std::string camera_name() const {
+ return "/" + node_name + "/camera" + std::to_string(camera_number);
+ }
+};
+
+std::vector<CameraNode> CreateNodeList();
+
+std::map<std::string, int> CreateOrderingMap(
+ std::vector<CameraNode> &node_list);
+
+const frc971::vision::calibration::CameraCalibration *FindCameraCalibration(
+ const y2024_bot3::Constants &calibration_data, std::string_view node_name,
+ int camera_number);
+
+} // namespace y2024_bot3::vision
+
+#endif // Y2024_BOT3_VISION_VISION_UTIL_H_