Merge "Updating 971 robot calibration and some filtering parameters for april tags"
diff --git a/WORKSPACE b/WORKSPACE
index 698c066..f411682 100644
--- a/WORKSPACE
+++ b/WORKSPACE
@@ -769,9 +769,9 @@
deps = ["@//third_party/allwpilib/wpimath"],
)
""",
- sha256 = "decff0a28fa4a167696cc2e1122b6a5acd2fef01d3bfd356d8cad25bb487a191",
+ sha256 = "340a9c8e726e2eb365b7a40a722df05fe7c7072c5c4a617fa0218eb6d074ad9f",
urls = [
- "https://maven.ctr-electronics.com/release/com/ctre/phoenixpro/api-cpp/23.0.10/api-cpp-23.0.10-headers.zip",
+ "https://maven.ctr-electronics.com/release/com/ctre/phoenixpro/api-cpp/23.0.11/api-cpp-23.0.11-headers.zip",
],
)
@@ -793,9 +793,9 @@
target_compatible_with = ['@//tools/platforms/hardware:roborio'],
)
""",
- sha256 = "00aea02c583d109056e2716e73b7d70e84d5c56a6daebd1dc9f4612c430894f8",
+ sha256 = "11f392bebfe54f512be9ef59809e1a10c4497e0ce92970645f054e7e04fe7ef6",
urls = [
- "https://maven.ctr-electronics.com/release/com/ctre/phoenixpro/api-cpp/23.0.10/api-cpp-23.0.10-linuxathena.zip",
+ "https://maven.ctr-electronics.com/release/com/ctre/phoenixpro/api-cpp/23.0.11/api-cpp-23.0.11-linuxathena.zip",
],
)
@@ -808,9 +808,9 @@
hdrs = glob(['ctre/**/*.h', 'ctre/**/*.hpp']),
)
""",
- sha256 = "3d503df97b711c150c0b50238f644c528e55d5b82418c8e3970c79faa14b749c",
+ sha256 = "7585e1bd9e581dd745e7f040ab521b966b40a04d05bc7fa82d6dafe2fb65764e",
urls = [
- "https://maven.ctr-electronics.com/release/com/ctre/phoenixpro/tools/23.0.10/tools-23.0.10-headers.zip",
+ "https://maven.ctr-electronics.com/release/com/ctre/phoenixpro/tools/23.0.11/tools-23.0.11-headers.zip",
],
)
@@ -832,9 +832,9 @@
target_compatible_with = ['@//tools/platforms/hardware:roborio'],
)
""",
- sha256 = "4ada1ed9e11c208da9e8a8e8a648a0fe426e6717121ebc2f1392ae3ddc7f2b8c",
+ sha256 = "b1daadfe782c43ed32c2e1a3956998f9604a3fc9282ef866fd8dc1482f3b8cc9",
urls = [
- "https://maven.ctr-electronics.com/release/com/ctre/phoenixpro/tools/23.0.10/tools-23.0.10-linuxathena.zip",
+ "https://maven.ctr-electronics.com/release/com/ctre/phoenixpro/tools/23.0.11/tools-23.0.11-linuxathena.zip",
],
)
diff --git a/frc971/can_logger/BUILD b/frc971/can_logger/BUILD
new file mode 100644
index 0000000..a3b002a
--- /dev/null
+++ b/frc971/can_logger/BUILD
@@ -0,0 +1,39 @@
+load("@com_github_google_flatbuffers//:build_defs.bzl", "flatbuffer_cc_library")
+
+cc_binary(
+ name = "can_logger",
+ srcs = [
+ "can_logger_main.cc",
+ ],
+ visibility = ["//visibility:public"],
+ deps = [
+ ":can_logger_lib",
+ "//aos:init",
+ "//aos/events:shm_event_loop",
+ "//aos/time",
+ "@com_github_google_glog//:glog",
+ ],
+)
+
+cc_library(
+ name = "can_logger_lib",
+ srcs = [
+ "can_logger.cc",
+ "can_logger.h",
+ ],
+ deps = [
+ ":can_logging_fbs",
+ "//aos/events:event_loop",
+ "//aos/scoped:scoped_fd",
+ "@com_github_google_glog//:glog",
+ ],
+)
+
+flatbuffer_cc_library(
+ name = "can_logging_fbs",
+ srcs = [
+ "can_logging.fbs",
+ ],
+ gen_reflections = 1,
+ visibility = ["//visibility:public"],
+)
diff --git a/frc971/can_logger/can_logger.cc b/frc971/can_logger/can_logger.cc
new file mode 100644
index 0000000..6b4258a
--- /dev/null
+++ b/frc971/can_logger/can_logger.cc
@@ -0,0 +1,88 @@
+#include "frc971/can_logger/can_logger.h"
+
+namespace frc971 {
+namespace can_logger {
+
+CanLogger::CanLogger(aos::EventLoop *event_loop,
+ std::string_view interface_name)
+ : fd_(socket(PF_CAN, SOCK_RAW | SOCK_NONBLOCK, CAN_RAW)),
+ frames_sender_(event_loop->MakeSender<CanFrame>("/can")) {
+ struct ifreq ifr;
+ strcpy(ifr.ifr_name, interface_name.data());
+ PCHECK(ioctl(fd_.get(), SIOCGIFINDEX, &ifr) == 0)
+ << "Failed to get index for interface " << interface_name;
+
+ int enable_canfd = true;
+ PCHECK(setsockopt(fd_.get(), SOL_CAN_RAW, CAN_RAW_FD_FRAMES, &enable_canfd,
+ sizeof(enable_canfd)) == 0)
+ << "Failed to enable CAN FD";
+
+ struct sockaddr_can addr;
+ addr.can_family = AF_CAN;
+ addr.can_ifindex = ifr.ifr_ifindex;
+
+ PCHECK(bind(fd_.get(), reinterpret_cast<struct sockaddr *>(&addr),
+ sizeof(addr)) == 0)
+ << "Failed to bind socket to interface " << interface_name;
+
+ int recieve_buffer_size;
+ socklen_t opt_size = sizeof(recieve_buffer_size);
+ PCHECK(getsockopt(fd_.get(), SOL_SOCKET, SO_RCVBUF, &recieve_buffer_size,
+ &opt_size) == 0);
+ CHECK_EQ(opt_size, sizeof(recieve_buffer_size));
+ VLOG(0) << "CAN recieve bufffer is " << recieve_buffer_size << " bytes large";
+
+ aos::TimerHandler *timer_handler = event_loop->AddTimer([this]() { Poll(); });
+ timer_handler->set_name("CAN logging Loop");
+ timer_handler->Setup(event_loop->monotonic_now(), kPollPeriod);
+}
+
+void CanLogger::Poll() {
+ VLOG(2) << "Polling";
+ int frames_read = 0;
+ while (ReadFrame()) {
+ frames_read++;
+ }
+ VLOG(1) << "Read " << frames_read << " frames to end of buffer";
+}
+
+bool CanLogger::ReadFrame() {
+ errno = 0;
+ struct canfd_frame frame;
+ ssize_t bytes_read = read(fd_.get(), &frame, sizeof(struct canfd_frame));
+
+ if (bytes_read < 0 && (errno == EAGAIN || errno == EWOULDBLOCK)) {
+ // There are no more frames sitting in the recieve buffer.
+ return false;
+ }
+
+ VLOG(2) << "Read " << bytes_read << " bytes";
+ PCHECK(bytes_read > 0);
+ PCHECK(bytes_read == static_cast<ssize_t>(CAN_MTU) ||
+ bytes_read == static_cast<ssize_t>(CANFD_MTU))
+ << "Incomplete can frame";
+
+ struct timeval tv;
+ PCHECK(ioctl(fd_.get(), SIOCGSTAMP, &tv) == 0)
+ << "Failed to get timestamp of CAN frame";
+
+ aos::Sender<CanFrame>::Builder builder = frames_sender_.MakeBuilder();
+
+ auto frame_data = builder.fbb()->CreateVector<uint8_t>(frame.data, frame.len);
+
+ CanFrame::Builder can_frame_builder = builder.MakeBuilder<CanFrame>();
+ can_frame_builder.add_can_id(frame.can_id);
+ can_frame_builder.add_data(frame_data);
+ can_frame_builder.add_monotonic_timestamp_ns(
+ static_cast<std::chrono::nanoseconds>(
+ std::chrono::seconds(tv.tv_sec) +
+ std::chrono::microseconds(tv.tv_usec))
+ .count());
+
+ builder.CheckOk(builder.Send(can_frame_builder.Finish()));
+
+ return true;
+}
+
+} // namespace can_logger
+} // namespace frc971
diff --git a/frc971/can_logger/can_logger.h b/frc971/can_logger/can_logger.h
new file mode 100644
index 0000000..cf37841
--- /dev/null
+++ b/frc971/can_logger/can_logger.h
@@ -0,0 +1,50 @@
+#ifndef FRC971_CAN_LOGGER_CAN_LOGGER_H_
+#define FRC971_CAN_LOGGER_CAN_LOGGER_H_
+
+#include <linux/can.h>
+#include <linux/can/raw.h>
+#include <linux/sockios.h>
+#include <net/if.h>
+#include <sys/ioctl.h>
+#include <sys/socket.h>
+#include <sys/time.h>
+
+#include <chrono>
+
+#include "aos/events/event_loop.h"
+#include "aos/realtime.h"
+#include "aos/scoped/scoped_fd.h"
+#include "frc971/can_logger/can_logging_generated.h"
+
+namespace frc971 {
+namespace can_logger {
+
+// This class listens to all the traffic on a SocketCAN interface and sends it
+// on the aos event loop so it can be logged with the aos logging
+// infrastructure.
+class CanLogger {
+ public:
+ static constexpr std::chrono::milliseconds kPollPeriod =
+ std::chrono::milliseconds(100);
+
+ CanLogger(aos::EventLoop *event_loop,
+ std::string_view interface_name = "can0");
+
+ CanLogger(const CanLogger &) = delete;
+ CanLogger &operator=(const CanLogger &) = delete;
+
+ private:
+ void Poll();
+
+ // Read a CAN frame from the socket and send it on the event loop
+ // Returns true if successful and false if the recieve buffer is empty.
+ bool ReadFrame();
+
+ aos::ScopedFD fd_;
+ aos::Sender<CanFrame> frames_sender_;
+};
+
+} // namespace can_logger
+} // namespace frc971
+
+#endif // FRC971_CAN_LOGGER_CAN_LOGGER_H_
diff --git a/frc971/can_logger/can_logger_main.cc b/frc971/can_logger/can_logger_main.cc
new file mode 100644
index 0000000..42c7162
--- /dev/null
+++ b/frc971/can_logger/can_logger_main.cc
@@ -0,0 +1,18 @@
+#include "aos/events/shm_event_loop.h"
+#include "aos/init.h"
+#include "frc971/can_logger/can_logger.h"
+
+int main(int argc, char **argv) {
+ ::aos::InitGoogle(&argc, &argv);
+
+ aos::FlatbufferDetachedBuffer<aos::Configuration> config =
+ aos::configuration::ReadConfig("aos_config.json");
+
+ ::aos::ShmEventLoop event_loop(&config.message());
+
+ frc971::can_logger::CanLogger can_logger(&event_loop, "can0");
+
+ event_loop.Run();
+
+ return 0;
+}
diff --git a/frc971/can_logger/can_logging.fbs b/frc971/can_logger/can_logging.fbs
new file mode 100644
index 0000000..d6ec8b9
--- /dev/null
+++ b/frc971/can_logger/can_logging.fbs
@@ -0,0 +1,13 @@
+namespace frc971.can_logger;
+
+// A message to represent a single CAN or CAN FD frame.
+table CanFrame {
+ // CAN id + flags
+ can_id:uint32 (id: 0);
+ // The body of the CAN frame up to 64 bytes long.
+ data:[ubyte] (id: 1);
+ // The hardware timestamp of when the frame came in
+ monotonic_timestamp_ns:uint64 (id: 2);
+}
+
+root_type CanFrame;
diff --git a/frc971/control_loops/drivetrain/drivetrain_config.h b/frc971/control_loops/drivetrain/drivetrain_config.h
index 51f92b1..9c17fc8 100644
--- a/frc971/control_loops/drivetrain/drivetrain_config.h
+++ b/frc971/control_loops/drivetrain/drivetrain_config.h
@@ -13,6 +13,28 @@
namespace control_loops {
namespace drivetrain {
+// What to use the top two buttons for on the pistol grip.
+enum class PistolTopButtonUse {
+ // Normal shifting.
+ kShift,
+ // Line following (currently just uses top button).
+ kLineFollow,
+ // Don't use the top button
+ kNone,
+};
+
+enum class PistolSecondButtonUse {
+ kTurn1,
+ kShiftLow,
+ kNone,
+};
+
+enum class PistolBottomButtonUse {
+ kControlLoopDriving,
+ kSlowDown,
+ kNone,
+};
+
enum class ShifterType : int32_t {
HALL_EFFECT_SHIFTER = 0, // Detect when inbetween gears.
SIMPLE_SHIFTER = 1, // Switch gears without speedmatch logic.
@@ -145,6 +167,10 @@
LineFollowConfig line_follow_config{};
+ PistolTopButtonUse top_button_use = PistolTopButtonUse::kShift;
+ PistolSecondButtonUse second_button_use = PistolSecondButtonUse::kShiftLow;
+ PistolBottomButtonUse bottom_button_use = PistolBottomButtonUse::kSlowDown;
+
// Converts the robot state to a linear distance position, velocity.
static Eigen::Matrix<Scalar, 2, 1> LeftRightToLinear(
const Eigen::Matrix<Scalar, 7, 1> &left_right) {
diff --git a/frc971/input/drivetrain_input.cc b/frc971/input/drivetrain_input.cc
index 69795dd..239eddf 100644
--- a/frc971/input/drivetrain_input.cc
+++ b/frc971/input/drivetrain_input.cc
@@ -262,7 +262,8 @@
std::unique_ptr<PistolDrivetrainInputReader> PistolDrivetrainInputReader::Make(
::aos::EventLoop *event_loop, bool default_high_gear,
- TopButtonUse top_button_use) {
+ PistolTopButtonUse top_button_use, PistolSecondButtonUse second_button_use,
+ PistolBottomButtonUse bottom_button_use) {
// Pistol Grip controller
const JoystickAxis kTriggerHigh(1, 1), kTriggerLow(1, 4),
kTriggerVelocityHigh(1, 2), kTriggerVelocityLow(1, 5),
@@ -275,6 +276,7 @@
const ButtonLocation kQuickTurn(1, 3);
const ButtonLocation kTopButton(1, 1);
+
const ButtonLocation kSecondButton(1, 2);
const ButtonLocation kBottomButton(1, 4);
// Non-existant button for nops.
@@ -282,17 +284,20 @@
// TODO(james): Make a copy assignment operator for ButtonLocation so we don't
// have to shoehorn in these ternary operators.
- const ButtonLocation kTurn1 = (top_button_use == TopButtonUse::kLineFollow)
- ? kSecondButton
- : kDummyButton;
+ const ButtonLocation kTurn1 =
+ (second_button_use == PistolSecondButtonUse::kTurn1) ? kSecondButton
+ : kDummyButton;
// Turn2 does closed loop driving.
const ButtonLocation kTurn2 =
- (top_button_use == TopButtonUse::kLineFollow) ? kTopButton : kDummyButton;
+ (top_button_use == PistolTopButtonUse::kLineFollow) ? kTopButton
+ : kDummyButton;
const ButtonLocation kShiftHigh =
- (top_button_use == TopButtonUse::kShift) ? kTopButton : kDummyButton;
+ (top_button_use == PistolTopButtonUse::kShift) ? kTopButton
+ : kDummyButton;
const ButtonLocation kShiftLow =
- (top_button_use == TopButtonUse::kShift) ? kSecondButton : kDummyButton;
+ (second_button_use == PistolSecondButtonUse::kShiftLow) ? kSecondButton
+ : kDummyButton;
std::unique_ptr<PistolDrivetrainInputReader> result(
new PistolDrivetrainInputReader(
@@ -300,7 +305,12 @@
kTriggerVelocityLow, kTriggerTorqueHigh, kTriggerTorqueLow,
kTriggerHigh, kTriggerLow, kWheelVelocityHigh, kWheelVelocityLow,
kWheelTorqueHigh, kWheelTorqueLow, kQuickTurn, kShiftHigh, kShiftLow,
- kTurn1, kTurn2, kBottomButton));
+ kTurn1,
+ (bottom_button_use == PistolBottomButtonUse::kControlLoopDriving)
+ ? kBottomButton
+ : kTurn2,
+ (top_button_use == PistolTopButtonUse::kNone) ? kTopButton
+ : kBottomButton));
result->set_default_high_gear(default_high_gear);
return result;
@@ -335,13 +345,25 @@
drivetrain_input_reader = SteeringWheelDrivetrainInputReader::Make(
event_loop, dt_config.default_high_gear);
break;
- case InputType::kPistol:
- drivetrain_input_reader = PistolDrivetrainInputReader::Make(
- event_loop, dt_config.default_high_gear,
+ case InputType::kPistol: {
+ // For backwards compatibility
+ PistolTopButtonUse top_button_use =
dt_config.pistol_grip_shift_enables_line_follow
- ? PistolDrivetrainInputReader::TopButtonUse::kLineFollow
- : PistolDrivetrainInputReader::TopButtonUse::kShift);
+ ? PistolTopButtonUse::kLineFollow
+ : dt_config.top_button_use;
+
+ PistolSecondButtonUse second_button_use = dt_config.second_button_use;
+ PistolBottomButtonUse bottom_button_use = dt_config.bottom_button_use;
+
+ if (top_button_use == PistolTopButtonUse::kLineFollow) {
+ second_button_use = PistolSecondButtonUse::kTurn1;
+ }
+
+ drivetrain_input_reader = PistolDrivetrainInputReader::Make(
+ event_loop, dt_config.default_high_gear, top_button_use,
+ second_button_use, bottom_button_use);
break;
+ }
case InputType::kXbox:
drivetrain_input_reader = XboxDrivetrainInputReader::Make(event_loop);
break;
diff --git a/frc971/input/drivetrain_input.h b/frc971/input/drivetrain_input.h
index a33d449..0cb724d 100644
--- a/frc971/input/drivetrain_input.h
+++ b/frc971/input/drivetrain_input.h
@@ -16,6 +16,10 @@
namespace frc971 {
namespace input {
+using control_loops::drivetrain::PistolBottomButtonUse;
+using control_loops::drivetrain::PistolSecondButtonUse;
+using control_loops::drivetrain::PistolTopButtonUse;
+
// We have a couple different joystick configurations used to drive our skid
// steer robots. These configurations don't change very often, and there are a
// small, discrete, set of them. The interface to the drivetrain is the same
@@ -179,19 +183,13 @@
public:
using DrivetrainInputReader::DrivetrainInputReader;
- // What to use the top two buttons for on the pistol grip.
- enum class TopButtonUse {
- // Normal shifting.
- kShift,
- // Line following (currently just uses top button).
- kLineFollow,
- };
-
// Creates a DrivetrainInputReader with the corresponding joystick ports and
// axis for the (cheap) pistol grip controller.
static std::unique_ptr<PistolDrivetrainInputReader> Make(
::aos::EventLoop *event_loop, bool default_high_gear,
- TopButtonUse top_button_use);
+ PistolTopButtonUse top_button_use,
+ PistolSecondButtonUse second_button_use,
+ PistolBottomButtonUse bottom_button_use);
private:
PistolDrivetrainInputReader(
diff --git a/frc971/vision/BUILD b/frc971/vision/BUILD
index f3ebecb..133281b 100644
--- a/frc971/vision/BUILD
+++ b/frc971/vision/BUILD
@@ -223,6 +223,7 @@
hdrs = [
"visualize_robot.h",
],
+ visibility = ["//visibility:public"],
deps = [
"//aos:init",
"//third_party:opencv",
diff --git a/frc971/vision/target_map.fbs b/frc971/vision/target_map.fbs
index de79744..10f48cd 100644
--- a/frc971/vision/target_map.fbs
+++ b/frc971/vision/target_map.fbs
@@ -48,6 +48,15 @@
// NOTE: not filtered by aprilrobotics.cc so that we can log
// more detections.
distortion_factor:double (id: 5);
+
+ // Ratio of pose_error from the best estimation to
+ // pose error of the second best estimation.
+ // Only filled out if this pose represents a live detection.
+ // This should be significantly less than 1,
+ // otherwise this pose may be a wrong solution.
+ // NOTE: not filtered by aprilrobotics.cc so that we can log
+ // more detections.
+ pose_error_ratio:double (id: 6);
}
// Map of all target poses on a field.
diff --git a/frc971/vision/target_mapper.cc b/frc971/vision/target_mapper.cc
index dd332fc..dea883b 100644
--- a/frc971/vision/target_mapper.cc
+++ b/frc971/vision/target_mapper.cc
@@ -10,9 +10,11 @@
DEFINE_double(distortion_noise_scalar, 1.0,
"Scale the target pose distortion factor by this when computing "
"the noise.");
+DEFINE_uint64(
+ frozen_target_id, 1,
+ "Freeze the pose of this target so the map can have one fixed point.");
namespace frc971::vision {
-
Eigen::Affine3d PoseUtils::Pose3dToAffine3d(
const ceres::examples::Pose3d &pose3d) {
Eigen::Affine3d H_world_pose =
@@ -160,8 +162,8 @@
{
// Noise for vision-based target localizations. Multiplying this matrix by
- // the distance from camera to target squared results in the uncertainty in
- // that measurement
+ // the distance from camera to target squared results in the uncertainty
+ // in that measurement
TargetMapper::ConfidenceMatrix Q_vision =
TargetMapper::ConfidenceMatrix::Zero();
Q_vision(kX, kX) = std::pow(0.045, 2);
@@ -232,6 +234,15 @@
return std::nullopt;
}
+std::optional<TargetMapper::TargetPose> TargetMapper::GetTargetPoseById(
+ TargetId target_id) {
+ if (target_poses_.count(target_id) > 0) {
+ return TargetMapper::TargetPose{target_id, target_poses_[target_id]};
+ }
+
+ return std::nullopt;
+}
+
// Taken from ceres/examples/slam/pose_graph_3d/pose_graph_3d.cc
// Constructs the nonlinear least squares optimization problem from the pose
// graph constraints.
@@ -289,7 +300,10 @@
// algorithm has internal damping which mitigates this issue, but it is
// better to properly constrain the gauge freedom. This can be done by
// setting one of the poses as constant so the optimizer cannot change it.
- ceres::examples::MapOfPoses::iterator pose_start_iter = poses->begin();
+ CHECK_NE(poses->count(FLAGS_frozen_target_id), 0ul)
+ << "Got no poses for frozen target id " << FLAGS_frozen_target_id;
+ ceres::examples::MapOfPoses::iterator pose_start_iter =
+ poses->find(FLAGS_frozen_target_id);
CHECK(pose_start_iter != poses->end()) << "There are no poses.";
problem->SetParameterBlockConstant(pose_start_iter->second.p.data());
problem->SetParameterBlockConstant(pose_start_iter->second.q.coeffs().data());
@@ -302,6 +316,7 @@
ceres::Solver::Options options;
options.max_num_iterations = FLAGS_max_num_iterations;
options.linear_solver_type = ceres::SPARSE_NORMAL_CHOLESKY;
+ options.minimizer_progress_to_stdout = true;
ceres::Solver::Summary summary;
ceres::Solve(options, problem, &summary);
diff --git a/frc971/vision/target_mapper.h b/frc971/vision/target_mapper.h
index 3724832..35c0977 100644
--- a/frc971/vision/target_mapper.h
+++ b/frc971/vision/target_mapper.h
@@ -46,6 +46,9 @@
static std::optional<TargetPose> GetTargetPoseById(
std::vector<TargetPose> target_poses, TargetId target_id);
+ // Version that gets based on internal target_poses
+ std::optional<TargetPose> GetTargetPoseById(TargetId target_id);
+
ceres::examples::MapOfPoses target_poses() { return target_poses_; }
private:
diff --git a/frc971/vision/visualize_robot.cc b/frc971/vision/visualize_robot.cc
index 0bbe507..55932ef 100644
--- a/frc971/vision/visualize_robot.cc
+++ b/frc971/vision/visualize_robot.cc
@@ -1,14 +1,33 @@
#include "frc971/vision/visualize_robot.h"
-#include "glog/logging.h"
#include <opencv2/calib3d.hpp>
#include <opencv2/core/eigen.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc.hpp>
+#include "glog/logging.h"
+
namespace frc971 {
namespace vision {
+void VisualizeRobot::SetDefaultViewpoint(int image_width, double focal_length) {
+ // 10 meters above the origin, rotated so the camera faces straight down
+ Eigen::Translation3d camera_trans(0, 0, 10.0);
+ Eigen::AngleAxisd camera_rot(M_PI, Eigen::Vector3d::UnitX());
+ Eigen::Affine3d camera_viewpoint = camera_trans * camera_rot;
+ SetViewpoint(camera_viewpoint);
+
+ cv::Mat camera_mat;
+ double half_width = static_cast<double>(image_width) / 2.0;
+ double intr[] = {focal_length, 0.0, half_width, 0.0, focal_length,
+ half_width, 0.0, 0.0, 1.0};
+ camera_mat = cv::Mat(3, 3, CV_64FC1, intr);
+ SetCameraParameters(camera_mat);
+
+ cv::Mat dist_coeffs = cv::Mat(1, 5, CV_64F, 0.0);
+ SetDistortionCoefficients(dist_coeffs);
+}
+
cv::Point VisualizeRobot::ProjectPoint(Eigen::Vector3d T_world_point) {
// Map 3D point in world coordinates to camera frame
Eigen::Vector3d T_camera_point = H_world_viewpoint_.inverse() * T_world_point;
@@ -24,15 +43,17 @@
return projected_point;
}
-void VisualizeRobot::DrawLine(Eigen::Vector3d start3d, Eigen::Vector3d end3d) {
+void VisualizeRobot::DrawLine(Eigen::Vector3d start3d, Eigen::Vector3d end3d,
+ cv::Scalar color) {
cv::Point start2d = ProjectPoint(start3d);
cv::Point end2d = ProjectPoint(end3d);
- cv::line(image_, start2d, end2d, cv::Scalar(0, 0, 255));
+ cv::line(image_, start2d, end2d, color);
}
void VisualizeRobot::DrawFrameAxes(Eigen::Affine3d H_world_target,
- std::string label, double axis_scale) {
+ std::string label, cv::Scalar label_color,
+ double axis_scale) {
// Map origin to display from global (world) frame to camera frame
Eigen::Affine3d H_viewpoint_target =
H_world_viewpoint_.inverse() * H_world_target;
@@ -53,21 +74,37 @@
// Grab x axis direction
cv::Vec3d label_offset = r_mat.col(0);
- // Find 3D coordinate of point at the end of the x-axis, and project it
+ // Find 3D coordinate of point at the end of the x-axis, and put label there
+ // Bump it just a few (5) pixels to the right, to make it read easier
cv::Mat label_coord_res =
camera_mat_ * cv::Mat(tvec + label_offset * axis_scale);
cv::Vec3d label_coord = label_coord_res.col(0);
- label_coord[0] = label_coord[0] / label_coord[2];
+ label_coord[0] = label_coord[0] / label_coord[2] + 5;
label_coord[1] = label_coord[1] / label_coord[2];
cv::putText(image_, label, cv::Point(label_coord[0], label_coord[1]),
- cv::FONT_HERSHEY_PLAIN, 1.0, cv::Scalar(0, 0, 255));
+ cv::FONT_HERSHEY_PLAIN, 1.0, label_color);
}
}
-void VisualizeRobot::DrawBoardOutline(Eigen::Affine3d H_world_board,
- std::string label) {
- LOG(INFO) << "Not yet implemented; drawing axes only";
- DrawFrameAxes(H_world_board, label);
+void VisualizeRobot::DrawRobotOutline(Eigen::Affine3d H_world_robot,
+ std::string label, cv::Scalar color) {
+ DrawFrameAxes(H_world_robot, label, color);
+ const double kBotHalfWidth = 0.75 / 2.0;
+ const double kBotHalfLength = 1.0 / 2.0;
+ // Compute coordinates for front/rear and right/left corners
+ Eigen::Vector3d fr_corner =
+ H_world_robot * Eigen::Vector3d(kBotHalfLength, kBotHalfWidth, 0);
+ Eigen::Vector3d fl_corner =
+ H_world_robot * Eigen::Vector3d(kBotHalfLength, -kBotHalfWidth, 0);
+ Eigen::Vector3d rl_corner =
+ H_world_robot * Eigen::Vector3d(-kBotHalfLength, -kBotHalfWidth, 0);
+ Eigen::Vector3d rr_corner =
+ H_world_robot * Eigen::Vector3d(-kBotHalfLength, kBotHalfWidth, 0);
+
+ DrawLine(fr_corner, fl_corner, color);
+ DrawLine(fl_corner, rl_corner, color);
+ DrawLine(rl_corner, rr_corner, color);
+ DrawLine(rr_corner, fr_corner, color);
}
} // namespace vision
diff --git a/frc971/vision/visualize_robot.h b/frc971/vision/visualize_robot.h
index 391a030..cd8b4d0 100644
--- a/frc971/vision/visualize_robot.h
+++ b/frc971/vision/visualize_robot.h
@@ -24,6 +24,14 @@
// Set image on which to draw
void SetImage(cv::Mat image) { image_ = image; }
+ // Sets image to all black.
+ // Uses default_size if no image has been set yet, else image_.size()
+ void ClearImage(cv::Size default_size = cv::Size(1280, 720)) {
+ auto image_size = (image_.data == nullptr ? default_size : image_.size());
+ cv::Mat black_image_mat = cv::Mat::zeros(image_size, CV_8UC3);
+ SetImage(black_image_mat);
+ }
+
// Set the viewpoint of the camera relative to a global origin
void SetViewpoint(Eigen::Affine3d view_origin) {
H_world_viewpoint_ = view_origin;
@@ -31,27 +39,37 @@
// Set camera parameters (for projection into a virtual view)
void SetCameraParameters(cv::Mat camera_intrinsics) {
- camera_mat_ = camera_intrinsics;
+ camera_mat_ = camera_intrinsics.clone();
}
// Set distortion coefficients (defaults to 0)
void SetDistortionCoefficients(cv::Mat dist_coeffs) {
- dist_coeffs_ = dist_coeffs;
+ dist_coeffs_ = dist_coeffs.clone();
}
+ // Sets up a default camera view 10 m above the origin, pointing down
+ // Uses image_width and focal_length to define a default camera projection
+ // matrix
+ void SetDefaultViewpoint(int image_width = 1000,
+ double focal_length = 1000.0);
+
// Helper function to project a point in 3D to the virtual image coordinates
cv::Point ProjectPoint(Eigen::Vector3d point3d);
// Draw a line connecting two 3D points
- void DrawLine(Eigen::Vector3d start, Eigen::Vector3d end);
+ void DrawLine(Eigen::Vector3d start, Eigen::Vector3d end,
+ cv::Scalar color = cv::Scalar(0, 200, 0));
// Draw coordinate frame for a target frame relative to the world frame
// Axes are drawn (x,y,z) -> (red, green, blue)
void DrawFrameAxes(Eigen::Affine3d H_world_target, std::string label = "",
+ cv::Scalar label_color = cv::Scalar(0, 0, 255),
double axis_scale = 0.25);
- // TODO<Jim>: Need to implement this, and maybe DrawRobotOutline
- void DrawBoardOutline(Eigen::Affine3d H_world_board, std::string label = "");
+ // TODO<Jim>: Also implement DrawBoardOutline? Maybe one function w/
+ // parameters?
+ void DrawRobotOutline(Eigen::Affine3d H_world_robot, std::string label = "",
+ cv::Scalar color = cv::Scalar(0, 200, 0));
Eigen::Affine3d H_world_viewpoint_; // Where to view the world from
cv::Mat image_; // Image to draw on
diff --git a/frc971/vision/visualize_robot_sample.cc b/frc971/vision/visualize_robot_sample.cc
index dc38352..59acba0 100644
--- a/frc971/vision/visualize_robot_sample.cc
+++ b/frc971/vision/visualize_robot_sample.cc
@@ -26,25 +26,8 @@
cv::Mat image_mat =
cv::Mat::zeros(cv::Size(image_width, image_width), CV_8UC3);
vis_robot.SetImage(image_mat);
-
- // 10 meters above the origin, rotated so the camera faces straight down
- Eigen::Translation3d camera_trans(0, 0, 10.0);
- Eigen::AngleAxisd camera_rot(M_PI, Eigen::Vector3d::UnitX());
- Eigen::Affine3d camera_viewpoint = camera_trans * camera_rot;
- vis_robot.SetViewpoint(camera_viewpoint);
-
- cv::Mat camera_mat;
double focal_length = 1000.0;
- double intr[] = {focal_length, 0.0, image_width / 2.0,
- 0.0, focal_length, image_width / 2.0,
- 0.0, 0.0, 1.0};
- camera_mat = cv::Mat(3, 3, CV_64FC1, intr);
- vis_robot.SetCameraParameters(camera_mat);
-
- Eigen::Affine3d offset_rotate_origin(Eigen::Affine3d::Identity());
-
- cv::Mat dist_coeffs = cv::Mat(1, 5, CV_64F, 0.0);
- vis_robot.SetDistortionCoefficients(dist_coeffs);
+ vis_robot.SetDefaultViewpoint(image_width, focal_length);
// Go around the clock and plot the coordinate frame at different rotations
for (int i = 0; i < 12; i++) {
@@ -52,8 +35,9 @@
Eigen::Vector3d trans;
trans << 1.0 * cos(angle), 1.0 * sin(angle), 0.0;
- offset_rotate_origin = Eigen::Translation3d(trans) *
- Eigen::AngleAxisd(angle, Eigen::Vector3d::UnitX());
+ Eigen::Affine3d offset_rotate_origin =
+ Eigen::Translation3d(trans) *
+ Eigen::AngleAxisd(angle, Eigen::Vector3d::UnitX());
vis_robot.DrawFrameAxes(offset_rotate_origin, std::to_string(i));
}
diff --git a/y2023/BUILD b/y2023/BUILD
index 095b856..08713b7 100644
--- a/y2023/BUILD
+++ b/y2023/BUILD
@@ -37,6 +37,7 @@
"//y2023/autonomous:binaries",
":joystick_reader",
":wpilib_interface",
+ "//frc971/can_logger",
"//aos/network:message_bridge_client",
"//aos/network:message_bridge_server",
"//y2023/control_loops/drivetrain:drivetrain",
@@ -211,6 +212,7 @@
"//y2023/control_loops/superstructure:superstructure_output_fbs",
"//y2023/control_loops/superstructure:superstructure_position_fbs",
"//y2023/control_loops/superstructure:superstructure_status_fbs",
+ "//frc971/can_logger:can_logging_fbs",
],
target_compatible_with = ["@platforms//os:linux"],
deps = [
diff --git a/y2023/constants.cc b/y2023/constants.cc
index f11c68c..c8b75d1 100644
--- a/y2023/constants.cc
+++ b/y2023/constants.cc
@@ -106,28 +106,29 @@
0.0201047336425017 - 1.0173426655158 - 0.186085272847293 - 0.0317706563397807;
wrist->subsystem_params.zeroing_constants.measured_absolute_position =
- 1.00731305518279;
+ 2.27068625283861;
break;
case kPracticeTeamNumber:
- arm_proximal->zeroing.measured_absolute_position = 0.261970010788946;
+ arm_proximal->zeroing.measured_absolute_position = 0.24572544970387;
arm_proximal->potentiometer_offset =
- 10.5178592988554 + 0.0944609125285876 - 0.00826532984625095;
+ 10.5178592988554 + 0.0944609125285876 - 0.00826532984625095 +
+ 0.167359305216504;
- arm_distal->zeroing.measured_absolute_position = 0.507166003869875;
+ arm_distal->zeroing.measured_absolute_position = 0.0915283983599425;
arm_distal->potentiometer_offset =
7.673132586937 - 0.0799284644472573 - 0.0323574039310657 +
- 0.0143810684138064 + 0.00945555248207735;
+ 0.0143810684138064 + 0.00945555248207735 + 0.452446388633863;
- roll_joint->zeroing.measured_absolute_position = 1.85482286175059;
+ roll_joint->zeroing.measured_absolute_position = 0.0722321007692069;
roll_joint->potentiometer_offset =
0.624713611895747 + 3.10458504917251 - 0.0966407797407789 +
0.0257708772364788 - 0.0395076737853459 - 6.87914956118006 -
- 0.097581301615046;
+ 0.097581301615046 + 3.3424421683095 - 3.97605190912604;
wrist->subsystem_params.zeroing_constants.measured_absolute_position =
- 6.04062267812154;
+ 1.04410369068834;
break;
diff --git a/y2023/control_loops/drivetrain/drivetrain_base.cc b/y2023/control_loops/drivetrain/drivetrain_base.cc
index 4efba61..a11a896 100644
--- a/y2023/control_loops/drivetrain/drivetrain_base.cc
+++ b/y2023/control_loops/drivetrain/drivetrain_base.cc
@@ -76,6 +76,10 @@
.finished()
.asDiagonal()),
.max_controllable_offset = 0.5},
+ frc971::control_loops::drivetrain::PistolTopButtonUse::kNone,
+ frc971::control_loops::drivetrain::PistolSecondButtonUse::kTurn1,
+ frc971::control_loops::drivetrain::PistolBottomButtonUse::
+ kControlLoopDriving,
};
return kDrivetrainConfig;
diff --git a/y2023/control_loops/superstructure/led_indicator.cc b/y2023/control_loops/superstructure/led_indicator.cc
index 48bca1b..afd0201 100644
--- a/y2023/control_loops/superstructure/led_indicator.cc
+++ b/y2023/control_loops/superstructure/led_indicator.cc
@@ -34,7 +34,7 @@
config.brightnessScalar = 1.0;
candle_.ConfigAllSettings(config, 0);
- event_loop_->AddPhasedLoop([&](int) { DecideColor(); },
+ event_loop_->AddPhasedLoop([this](int) { DecideColor(); },
std::chrono::milliseconds(20));
}
@@ -177,9 +177,9 @@
DisplayLed(0, 0, 255);
return;
}
-
- return;
}
+
+ DisplayLed(0, 0, 0);
}
} // namespace y2023::control_loops::superstructure
diff --git a/y2023/control_loops/superstructure/led_indicator.h b/y2023/control_loops/superstructure/led_indicator.h
index d88650d..256de30 100644
--- a/y2023/control_loops/superstructure/led_indicator.h
+++ b/y2023/control_loops/superstructure/led_indicator.h
@@ -69,7 +69,7 @@
void DisplayLed(uint8_t r, uint8_t g, uint8_t b);
- ctre::phoenix::led::CANdle candle_{0, ""};
+ ctre::phoenix::led::CANdle candle_{8, "rio"};
aos::EventLoop *event_loop_;
aos::Fetcher<frc971::control_loops::drivetrain::Output>
diff --git a/y2023/localizer/localizer.cc b/y2023/localizer/localizer.cc
index 4bb07e2..efffca3 100644
--- a/y2023/localizer/localizer.cc
+++ b/y2023/localizer/localizer.cc
@@ -9,6 +9,9 @@
DEFINE_double(max_pose_error, 1e-6,
"Throw out target poses with a higher pose error than this");
+DEFINE_double(
+ max_pose_error_ratio, 0.4,
+ "Throw out target poses with a higher pose error ratio than this");
DEFINE_double(distortion_noise_scalar, 1.0,
"Scale the target pose distortion factor by this when computing "
"the noise.");
@@ -337,6 +340,11 @@
<< target.pose_error();
return RejectImage(camera_index, RejectionReason::HIGH_POSE_ERROR,
&builder);
+ } else if (target.pose_error_ratio() > FLAGS_max_pose_error_ratio) {
+ VLOG(1) << "Rejecting target due to high pose error ratio "
+ << target.pose_error_ratio();
+ return RejectImage(camera_index, RejectionReason::HIGH_POSE_ERROR_RATIO,
+ &builder);
}
double theta_at_capture = state_at_capture.value()(StateIdx::kTheta);
diff --git a/y2023/localizer/localizer_test.cc b/y2023/localizer/localizer_test.cc
index 7248b33..e8b7d56 100644
--- a/y2023/localizer/localizer_test.cc
+++ b/y2023/localizer/localizer_test.cc
@@ -155,6 +155,7 @@
target_builder.add_position(position_offset);
target_builder.add_orientation(quat_offset);
target_builder.add_pose_error(pose_error_);
+ target_builder.add_pose_error_ratio(pose_error_ratio_);
auto target_offset = target_builder.Finish();
auto targets_offset = builder.fbb()->CreateVector({target_offset});
@@ -277,6 +278,7 @@
uint64_t send_target_id_ = kTargetId;
double pose_error_ = 1e-7;
+ double pose_error_ratio_ = 0.1;
double implied_yaw_error_ = 0.0;
gflags::FlagSaver flag_saver_;
@@ -507,4 +509,27 @@
status_fetcher_->statistics()->Get(0)->total_candidates());
}
+// Tests that we correctly reject a detection with a high pose error ratio.
+TEST_F(LocalizerTest, HighPoseErrorRatio) {
+ output_voltages_ << 0.0, 0.0;
+ send_targets_ = true;
+ // Send the minimum pose error to be rejected
+ constexpr double kEps = 1e-9;
+ pose_error_ratio_ = 0.4 + kEps;
+
+ event_loop_factory_.RunFor(std::chrono::seconds(4));
+ CHECK(status_fetcher_.Fetch());
+ ASSERT_TRUE(status_fetcher_->has_statistics());
+ ASSERT_EQ(4u /* number of cameras */, status_fetcher_->statistics()->size());
+ ASSERT_EQ(0, status_fetcher_->statistics()->Get(0)->total_accepted());
+ ASSERT_LT(10, status_fetcher_->statistics()->Get(0)->total_candidates());
+ ASSERT_EQ(
+ status_fetcher_->statistics()
+ ->Get(0)
+ ->rejection_reasons()
+ ->Get(static_cast<size_t>(RejectionReason::HIGH_POSE_ERROR_RATIO))
+ ->count(),
+ status_fetcher_->statistics()->Get(0)->total_candidates());
+}
+
} // namespace y2023::localizer::testing
diff --git a/y2023/localizer/status.fbs b/y2023/localizer/status.fbs
index 0ea779f..854603e 100644
--- a/y2023/localizer/status.fbs
+++ b/y2023/localizer/status.fbs
@@ -24,6 +24,8 @@
HIGH_DISTANCE_TO_TARGET = 6,
// The robot was travelling too fast; we don't trust the target.
ROBOT_TOO_FAST = 7,
+ // Pose estimation error ratio was higher than any normal detection.
+ HIGH_POSE_ERROR_RATIO = 8,
}
table RejectionCount {
diff --git a/y2023/vision/BUILD b/y2023/vision/BUILD
index 1cdbe36..0f75aba 100644
--- a/y2023/vision/BUILD
+++ b/y2023/vision/BUILD
@@ -117,6 +117,7 @@
"//frc971/vision:target_map_fbs",
"//frc971/vision:target_mapper",
"//frc971/vision:vision_fbs",
+ "//frc971/vision:visualize_robot",
"//third_party:opencv",
"//third_party/apriltag",
"//y2023/constants:constants_fbs",
diff --git a/y2023/vision/aprilrobotics.cc b/y2023/vision/aprilrobotics.cc
index 317720c..45ab7db 100644
--- a/y2023/vision/aprilrobotics.cc
+++ b/y2023/vision/aprilrobotics.cc
@@ -1,5 +1,7 @@
#include "y2023/vision/aprilrobotics.h"
+#include <opencv2/highgui.hpp>
+
#include "y2023/vision/vision_util.h"
DEFINE_bool(
@@ -15,6 +17,8 @@
"Maximum expected value for unscaled distortion factors. Will scale "
"distortion factors so that this value (and a higher distortion) maps to "
"1.0.");
+DEFINE_uint64(pose_estimation_iterations, 50,
+ "Number of iterations for apriltag pose estimation.");
namespace y2023 {
namespace vision {
@@ -22,9 +26,12 @@
namespace chrono = std::chrono;
AprilRoboticsDetector::AprilRoboticsDetector(aos::EventLoop *event_loop,
- std::string_view channel_name)
+ std::string_view channel_name,
+ bool flip_image)
: calibration_data_(event_loop),
image_size_(0, 0),
+ flip_image_(flip_image),
+ node_name_(event_loop->node()->name()->string_view()),
ftrace_(),
image_callback_(event_loop, channel_name,
[&](cv::Mat image_color_mat,
@@ -53,7 +60,6 @@
calibration_data_.constants(), event_loop->node()->name()->string_view());
extrinsics_ = CameraExtrinsics(calibration_);
-
intrinsics_ = CameraIntrinsics(calibration_);
// Create an undistort projection matrix using the intrinsics
projection_matrix_ = cv::Mat::zeros(3, 4, CV_64F);
@@ -93,8 +99,11 @@
auto builder = target_map_sender_.MakeBuilder();
std::vector<flatbuffers::Offset<frc971::vision::TargetPoseFbs>> target_poses;
- for (const auto &detection : result.detections) {
- target_poses.emplace_back(BuildTargetPose(detection, builder.fbb()));
+ for (auto &detection : result.detections) {
+ auto *fbb = builder.fbb();
+ auto pose = BuildTargetPose(detection, fbb);
+ DestroyPose(&detection.pose);
+ target_poses.emplace_back(pose);
}
const auto target_poses_offset = builder.fbb()->CreateVector(target_poses);
auto target_map_builder = builder.MakeBuilder<frc971::vision::TargetMap>();
@@ -122,7 +131,7 @@
return frc971::vision::CreateTargetPoseFbs(
*fbb, detection.det.id, position_offset, orientation_offset,
detection.det.decision_margin, detection.pose_error,
- detection.distortion_factor);
+ detection.distortion_factor, detection.pose_error_ratio);
}
void AprilRoboticsDetector::UndistortDetection(
@@ -178,8 +187,15 @@
return corner_points;
}
+void AprilRoboticsDetector::DestroyPose(apriltag_pose_t *pose) const {
+ matd_destroy(pose->R);
+ matd_destroy(pose->t);
+}
+
AprilRoboticsDetector::DetectionResult AprilRoboticsDetector::DetectTags(
cv::Mat image, aos::monotonic_clock::time_point eof) {
+ cv::Mat color_image;
+ cvtColor(image, color_image, cv::COLOR_GRAY2RGB);
const aos::monotonic_clock::time_point start_time =
aos::monotonic_clock::now();
@@ -249,8 +265,13 @@
const aos::monotonic_clock::time_point before_pose_estimation =
aos::monotonic_clock::now();
- apriltag_pose_t pose;
- double pose_error = estimate_tag_pose(&info, &pose);
+ apriltag_pose_t pose_1;
+ apriltag_pose_t pose_2;
+ double pose_error_1;
+ double pose_error_2;
+ estimate_tag_pose_orthogonal_iteration(&info, &pose_error_1, &pose_1,
+ &pose_error_2, &pose_2,
+ FLAGS_pose_estimation_iterations);
const aos::monotonic_clock::time_point after_pose_estimation =
aos::monotonic_clock::now();
@@ -259,7 +280,8 @@
before_pose_estimation)
.count()
<< " seconds for pose estimation";
- VLOG(1) << "Pose err: " << pose_error;
+ VLOG(1) << "Pose err 1: " << pose_error_1;
+ VLOG(1) << "Pose err 2: " << pose_error_2;
// Send undistorted corner points in pink
std::vector<cv::Point2f> corner_points = MakeCornerVector(det);
@@ -270,14 +292,67 @@
double distortion_factor =
ComputeDistortionFactor(orig_corner_points, corner_points);
+ // We get two estimates for poses.
+ // Choose the one with the lower estimation error
+ bool use_pose_1 = (pose_error_1 < pose_error_2);
+ auto best_pose = (use_pose_1 ? pose_1 : pose_2);
+ auto secondary_pose = (use_pose_1 ? pose_2 : pose_1);
+ double best_pose_error = (use_pose_1 ? pose_error_1 : pose_error_2);
+ double secondary_pose_error = (use_pose_1 ? pose_error_2 : pose_error_1);
+
+ CHECK_NE(best_pose_error, std::numeric_limits<double>::infinity())
+ << "Got no valid pose estimations, this should not be possible.";
+ double pose_error_ratio = best_pose_error / secondary_pose_error;
+
+ // Destroy the secondary pose if we got one
+ if (secondary_pose_error != std::numeric_limits<double>::infinity()) {
+ DestroyPose(&secondary_pose);
+ }
+
results.emplace_back(Detection{.det = *det,
- .pose = pose,
- .pose_error = pose_error,
- .distortion_factor = distortion_factor});
+ .pose = best_pose,
+ .pose_error = best_pose_error,
+ .distortion_factor = distortion_factor,
+ .pose_error_ratio = pose_error_ratio});
+
+ if (FLAGS_visualize) {
+ // Draw raw (distorted) corner points in green
+ cv::line(color_image, orig_corner_points[0], orig_corner_points[1],
+ cv::Scalar(0, 255, 0), 2);
+ cv::line(color_image, orig_corner_points[1], orig_corner_points[2],
+ cv::Scalar(0, 255, 0), 2);
+ cv::line(color_image, orig_corner_points[2], orig_corner_points[3],
+ cv::Scalar(0, 255, 0), 2);
+ cv::line(color_image, orig_corner_points[3], orig_corner_points[0],
+ cv::Scalar(0, 255, 0), 2);
+
+ // Draw undistorted corner points in red
+ cv::line(color_image, corner_points[0], corner_points[1],
+ cv::Scalar(0, 0, 255), 2);
+ cv::line(color_image, corner_points[2], corner_points[1],
+ cv::Scalar(0, 0, 255), 2);
+ cv::line(color_image, corner_points[2], corner_points[3],
+ cv::Scalar(0, 0, 255), 2);
+ cv::line(color_image, corner_points[0], corner_points[3],
+ cv::Scalar(0, 0, 255), 2);
+ }
+
+ VLOG(1) << "Found tag number " << det->id << " hamming: " << det->hamming
+ << " margin: " << det->decision_margin;
+
} else {
rejections_++;
}
}
+ if (FLAGS_visualize) {
+ // Display the result
+ // Rotate by 180 degrees to make it upright
+ if (flip_image_) {
+ cv::rotate(color_image, color_image, 1);
+ }
+ cv::imshow(absl::StrCat("AprilRoboticsDetector Image ", node_name_),
+ color_image);
+ }
const auto corners_offset = builder.fbb()->CreateVector(foxglove_corners);
foxglove::ImageAnnotations::Builder annotation_builder(*builder.fbb());
diff --git a/y2023/vision/aprilrobotics.h b/y2023/vision/aprilrobotics.h
index fab2d30..01e3138 100644
--- a/y2023/vision/aprilrobotics.h
+++ b/y2023/vision/aprilrobotics.h
@@ -11,6 +11,7 @@
#include "frc971/vision/target_map_generated.h"
#include "frc971/vision/target_mapper.h"
#include "frc971/vision/vision_generated.h"
+#include "frc971/vision/visualize_robot.h"
#include "opencv2/core/eigen.hpp"
#include "opencv2/imgproc.hpp"
#include "third_party/apriltag/apriltag.h"
@@ -29,6 +30,7 @@
apriltag_pose_t pose;
double pose_error;
double distortion_factor;
+ double pose_error_ratio;
};
struct DetectionResult {
@@ -37,11 +39,15 @@
};
AprilRoboticsDetector(aos::EventLoop *event_loop,
- std::string_view channel_name);
+ std::string_view channel_name, bool flip_image = true);
~AprilRoboticsDetector();
void SetWorkerpoolAffinities();
+ // Deletes the heap-allocated rotation and translation pointers in the given
+ // pose
+ void DestroyPose(apriltag_pose_t *pose) const;
+
// Undistorts the april tag corners using the camera calibration
void UndistortDetection(apriltag_detection_t *det) const;
@@ -77,14 +83,16 @@
std::optional<cv::Mat> extrinsics_;
cv::Mat dist_coeffs_;
cv::Size image_size_;
+ bool flip_image_;
+ std::string_view node_name_;
aos::Ftrace ftrace_;
frc971::vision::ImageCallback image_callback_;
aos::Sender<frc971::vision::TargetMap> target_map_sender_;
aos::Sender<foxglove::ImageAnnotations> image_annotations_sender_;
-
size_t rejections_;
+ frc971::vision::VisualizeRobot vis_robot_;
};
} // namespace vision
diff --git a/y2023/vision/target_mapping.cc b/y2023/vision/target_mapping.cc
index 9352bdd..e9ef662 100644
--- a/y2023/vision/target_mapping.cc
+++ b/y2023/vision/target_mapping.cc
@@ -7,6 +7,7 @@
#include "frc971/vision/calibration_generated.h"
#include "frc971/vision/charuco_lib.h"
#include "frc971/vision/target_mapper.h"
+#include "frc971/vision/visualize_robot.h"
#include "opencv2/aruco.hpp"
#include "opencv2/calib3d.hpp"
#include "opencv2/core/eigen.hpp"
@@ -28,12 +29,21 @@
"Directory to write solved target map to");
DEFINE_string(field_name, "charged_up",
"Field name, for the output json filename and flatbuffer field");
-DEFINE_int32(team_number, 7971,
- "Use the calibration for a node with this team number");
+DEFINE_int32(team_number, 0,
+ "Required: Use the calibration for a node with this team number");
DEFINE_string(mcap_output_path, "/tmp/log.mcap", "Log to output.");
DEFINE_string(pi, "pi1", "Pi name to generate mcap log for; defaults to pi1.");
DEFINE_double(max_pose_error, 1e-6,
"Throw out target poses with a higher pose error than this");
+DEFINE_double(
+ max_pose_error_ratio, 0.4,
+ "Throw out target poses with a higher pose error ratio than this");
+DEFINE_uint64(wait_key, 0,
+ "Time in ms to wait between images, if no click (0 to wait "
+ "indefinitely until click).");
+DEFINE_uint64(skip_to, 1,
+ "Start at combined image of this number (1 is the first image)");
+DEFINE_bool(solve, true, "Whether to solve for the field's target map.");
namespace y2023 {
namespace vision {
@@ -42,6 +52,7 @@
using frc971::vision::PoseUtils;
using frc971::vision::TargetMap;
using frc971::vision::TargetMapper;
+using frc971::vision::VisualizeRobot;
namespace calibration = frc971::vision::calibration;
// Change reference frame from camera to robot
@@ -52,16 +63,41 @@
return H_robot_target;
}
+const int kImageWidth = 1000;
+// Map from pi node name to color for drawing
+const std::map<std::string, cv::Scalar> kPiColors = {
+ {"pi1", cv::Scalar(255, 0, 255)},
+ {"pi2", cv::Scalar(255, 255, 0)},
+ {"pi3", cv::Scalar(0, 255, 255)},
+ {"pi4", cv::Scalar(255, 165, 0)},
+};
+
// Add detected apriltag poses relative to the robot to
// timestamped_target_detections
void HandleAprilTags(const TargetMap &map,
aos::distributed_clock::time_point pi_distributed_time,
std::vector<DataAdapter::TimestampedDetection>
*timestamped_target_detections,
- Eigen::Affine3d extrinsics) {
+ Eigen::Affine3d extrinsics, std::string node_name,
+ frc971::vision::TargetMapper target_loc_mapper,
+ std::set<std::string> *drawn_nodes,
+ VisualizeRobot *vis_robot, size_t *display_count) {
+ bool drew = false;
+ std::stringstream label;
+ label << node_name << " - ";
+
for (const auto *target_pose_fbs : *map.target_poses()) {
// Skip detections with high pose errors
if (target_pose_fbs->pose_error() > FLAGS_max_pose_error) {
+ VLOG(1) << " Skipping tag " << target_pose_fbs->id()
+ << " due to pose error of " << target_pose_fbs->pose_error();
+ continue;
+ }
+ // Skip detections with high pose error ratios
+ if (target_pose_fbs->pose_error_ratio() > FLAGS_max_pose_error_ratio) {
+ VLOG(1) << " Skipping tag " << target_pose_fbs->id()
+ << " due to pose error ratio of "
+ << target_pose_fbs->pose_error_ratio();
continue;
}
@@ -88,6 +124,59 @@
.distance_from_camera = distance_from_camera,
.distortion_factor = distortion_factor,
.id = static_cast<TargetMapper::TargetId>(target_pose.id)});
+
+ if (FLAGS_visualize) {
+ // If we've already drawn in the current image,
+ // display it before clearing and adding the new poses
+ if (drawn_nodes->count(node_name) != 0) {
+ (*display_count)++;
+ cv::putText(vis_robot->image_,
+ "Poses #" + std::to_string(*display_count),
+ cv::Point(600, 10), cv::FONT_HERSHEY_PLAIN, 1.0,
+ cv::Scalar(255, 255, 255));
+
+ if (*display_count >= FLAGS_skip_to) {
+ cv::imshow("View", vis_robot->image_);
+ cv::waitKey(FLAGS_wait_key);
+ } else {
+ VLOG(1) << "At poses #" << std::to_string(*display_count);
+ }
+ vis_robot->ClearImage();
+ drawn_nodes->clear();
+ }
+
+ Eigen::Affine3d H_world_target = PoseUtils::Pose3dToAffine3d(
+ target_loc_mapper.GetTargetPoseById(target_pose_fbs->id())->pose);
+ Eigen::Affine3d H_world_robot = H_world_target * H_robot_target.inverse();
+ VLOG(2) << node_name << ", " << target_pose_fbs->id()
+ << ", t = " << pi_distributed_time
+ << ", pose_error = " << target_pose_fbs->pose_error()
+ << ", pose_error_ratio = " << target_pose_fbs->pose_error_ratio()
+ << ", robot_pos (x,y,z) + "
+ << H_world_robot.translation().transpose();
+
+ label << "id " << target_pose_fbs->id() << ": err "
+ << (target_pose_fbs->pose_error() / FLAGS_max_pose_error)
+ << " ratio " << target_pose_fbs->pose_error_ratio() << " ";
+
+ vis_robot->DrawRobotOutline(H_world_robot,
+ std::to_string(target_pose_fbs->id()),
+ kPiColors.at(node_name));
+
+ vis_robot->DrawFrameAxes(H_world_target,
+ std::to_string(target_pose_fbs->id()),
+ kPiColors.at(node_name));
+ drew = true;
+ }
+ }
+ if (drew) {
+ size_t pi_number =
+ static_cast<size_t>(node_name[node_name.size() - 1] - '0');
+ cv::putText(vis_robot->image_, label.str(),
+ cv::Point(10, 10 + 20 * pi_number), cv::FONT_HERSHEY_PLAIN, 1.0,
+ kPiColors.at(node_name));
+
+ drawn_nodes->emplace(node_name);
}
}
@@ -97,10 +186,15 @@
aos::logger::LogReader *reader,
std::vector<DataAdapter::TimestampedDetection>
*timestamped_target_detections,
- std::vector<std::unique_ptr<AprilRoboticsDetector>> *detectors) {
+ std::vector<std::unique_ptr<AprilRoboticsDetector>> *detectors,
+ std::set<std::string> *drawn_nodes, VisualizeRobot *vis_robot,
+ size_t *display_count) {
static constexpr std::string_view kImageChannel = "/camera";
+ // For the robots, we need to flip the image since the cameras are rolled by
+ // 180 degrees
+ bool flip_image = (FLAGS_team_number != 7971);
auto detector_ptr = std::make_unique<AprilRoboticsDetector>(
- detection_event_loop, kImageChannel);
+ detection_event_loop, kImageChannel, flip_image);
// Get the camera extrinsics
cv::Mat extrinsics_cv = detector_ptr->extrinsics().value();
Eigen::Matrix4d extrinsics_matrix;
@@ -109,6 +203,10 @@
detectors->emplace_back(std::move(detector_ptr));
+ ceres::examples::VectorOfConstraints target_constraints;
+ frc971::vision::TargetMapper target_loc_mapper(FLAGS_json_path,
+ target_constraints);
+
mapping_event_loop->MakeWatcher("/camera", [=](const TargetMap &map) {
aos::distributed_clock::time_point pi_distributed_time =
reader->event_loop_factory()
@@ -116,8 +214,10 @@
->ToDistributedClock(aos::monotonic_clock::time_point(
aos::monotonic_clock::duration(map.monotonic_timestamp_ns())));
+ std::string node_name = detection_event_loop->node()->name()->str();
HandleAprilTags(map, pi_distributed_time, timestamped_target_detections,
- extrinsics);
+ extrinsics, node_name, target_loc_mapper, drawn_nodes,
+ vis_robot, display_count);
});
}
@@ -136,8 +236,9 @@
aos::logger::LogReader reader(
aos::logger::SortParts(unsorted_logfiles),
config.has_value() ? &config->message() : nullptr);
- // Send new april tag poses. This allows us to take a log of images, then play
- // with the april detection code and see those changes take effect in mapping
+ // Send new april tag poses. This allows us to take a log of images, then
+ // play with the april detection code and see those changes take effect in
+ // mapping
constexpr size_t kNumPis = 4;
for (size_t i = 1; i <= kNumPis; i++) {
reader.RemapLoggedChannel(absl::StrFormat("/pi%u/camera", i),
@@ -156,6 +257,9 @@
FLAGS_constants_path);
std::vector<std::unique_ptr<AprilRoboticsDetector>> detectors;
+ VisualizeRobot vis_robot;
+ std::set<std::string> drawn_nodes;
+ size_t display_count = 0;
const aos::Node *pi1 =
aos::configuration::GetNode(reader.configuration(), "pi1");
@@ -164,7 +268,8 @@
std::unique_ptr<aos::EventLoop> pi1_mapping_event_loop =
reader.event_loop_factory()->MakeEventLoop("pi1_mapping", pi1);
HandlePiCaptures(pi1_detection_event_loop.get(), pi1_mapping_event_loop.get(),
- &reader, ×tamped_target_detections, &detectors);
+ &reader, ×tamped_target_detections, &detectors,
+ &drawn_nodes, &vis_robot, &display_count);
const aos::Node *pi2 =
aos::configuration::GetNode(reader.configuration(), "pi2");
@@ -173,7 +278,8 @@
std::unique_ptr<aos::EventLoop> pi2_mapping_event_loop =
reader.event_loop_factory()->MakeEventLoop("pi2_mapping", pi2);
HandlePiCaptures(pi2_detection_event_loop.get(), pi2_mapping_event_loop.get(),
- &reader, ×tamped_target_detections, &detectors);
+ &reader, ×tamped_target_detections, &detectors,
+ &drawn_nodes, &vis_robot, &display_count);
const aos::Node *pi3 =
aos::configuration::GetNode(reader.configuration(), "pi3");
@@ -182,7 +288,8 @@
std::unique_ptr<aos::EventLoop> pi3_mapping_event_loop =
reader.event_loop_factory()->MakeEventLoop("pi3_mapping", pi3);
HandlePiCaptures(pi3_detection_event_loop.get(), pi3_mapping_event_loop.get(),
- &reader, ×tamped_target_detections, &detectors);
+ &reader, ×tamped_target_detections, &detectors,
+ &drawn_nodes, &vis_robot, &display_count);
const aos::Node *pi4 =
aos::configuration::GetNode(reader.configuration(), "pi4");
@@ -191,13 +298,13 @@
std::unique_ptr<aos::EventLoop> pi4_mapping_event_loop =
reader.event_loop_factory()->MakeEventLoop("pi4_mapping", pi4);
HandlePiCaptures(pi4_detection_event_loop.get(), pi4_mapping_event_loop.get(),
- &reader, ×tamped_target_detections, &detectors);
+ &reader, ×tamped_target_detections, &detectors,
+ &drawn_nodes, &vis_robot, &display_count);
std::unique_ptr<aos::EventLoop> mcap_event_loop;
std::unique_ptr<aos::McapLogger> relogger;
if (!FLAGS_mcap_output_path.empty()) {
LOG(INFO) << "Writing out mcap file to " << FLAGS_mcap_output_path;
- // TODO: Should make this work for any pi
const aos::Node *node =
aos::configuration::GetNode(reader.configuration(), FLAGS_pi);
reader.event_loop_factory()->GetNodeEventLoopFactory(node)->OnStartup(
@@ -212,13 +319,21 @@
});
}
+ if (FLAGS_visualize) {
+ vis_robot.ClearImage();
+ const double kFocalLength = 500.0;
+ vis_robot.SetDefaultViewpoint(kImageWidth, kFocalLength);
+ }
+
reader.event_loop_factory()->Run();
- auto target_constraints =
- DataAdapter::MatchTargetDetections(timestamped_target_detections);
+ if (FLAGS_solve) {
+ auto target_constraints =
+ DataAdapter::MatchTargetDetections(timestamped_target_detections);
- frc971::vision::TargetMapper mapper(FLAGS_json_path, target_constraints);
- mapper.Solve(FLAGS_field_name, FLAGS_output_dir);
+ frc971::vision::TargetMapper mapper(FLAGS_json_path, target_constraints);
+ mapper.Solve(FLAGS_field_name, FLAGS_output_dir);
+ }
// Clean up all the pointers
for (auto &detector_ptr : detectors) {
diff --git a/y2023/wpilib_interface.cc b/y2023/wpilib_interface.cc
index cd52cfd..2f41c91 100644
--- a/y2023/wpilib_interface.cc
+++ b/y2023/wpilib_interface.cc
@@ -114,16 +114,12 @@
} // namespace
static constexpr int kCANFalconCount = 6;
-static constexpr int kCANSignalsCount = 4;
-static constexpr int kRollerSignalsCount = 4;
static constexpr units::frequency::hertz_t kCANUpdateFreqHz = 200_Hz;
class Falcon {
public:
Falcon(int device_id, std::string canbus,
- aos::SizedArray<ctre::phoenixpro::BaseStatusSignalValue *,
- kCANFalconCount * kCANSignalsCount +
- kRollerSignalsCount> *signals)
+ std::vector<ctre::phoenixpro::BaseStatusSignalValue *> *signals)
: talon_(device_id, canbus),
device_id_(device_id),
device_temp_(talon_.GetDeviceTemp()),
@@ -134,9 +130,7 @@
// device temp is not timesynced so don't add it to the list of signals
device_temp_.SetUpdateFrequency(kCANUpdateFreqHz);
- CHECK_EQ(kCANSignalsCount, 4);
CHECK_NOTNULL(signals);
- CHECK_LE(signals->size() + 4u, signals->capacity());
supply_voltage_.SetUpdateFrequency(kCANUpdateFreqHz);
signals->push_back(&supply_voltage_);
@@ -279,8 +273,11 @@
class CANSensorReader {
public:
- CANSensorReader(aos::EventLoop *event_loop)
+ CANSensorReader(
+ aos::EventLoop *event_loop,
+ std::vector<ctre::phoenixpro::BaseStatusSignalValue *> signals_registry)
: event_loop_(event_loop),
+ signals_(signals_registry.begin(), signals_registry.end()),
can_position_sender_(
event_loop->MakeSender<drivetrain::CANPosition>("/drivetrain")),
roller_falcon_data_(std::nullopt) {
@@ -294,12 +291,6 @@
});
}
- aos::SizedArray<ctre::phoenixpro::BaseStatusSignalValue *,
- kCANFalconCount * kCANSignalsCount + kRollerSignalsCount>
- *get_signals_registry() {
- return &signals_;
- };
-
void set_falcons(std::shared_ptr<Falcon> right_front,
std::shared_ptr<Falcon> right_back,
std::shared_ptr<Falcon> right_under,
@@ -323,16 +314,8 @@
private:
void Loop() {
- CHECK_EQ(signals_.size(), 28u);
ctre::phoenix::StatusCode status =
- ctre::phoenixpro::BaseStatusSignalValue::WaitForAll(
- 2000_ms, {signals_[0], signals_[1], signals_[2], signals_[3],
- signals_[4], signals_[5], signals_[6], signals_[7],
- signals_[8], signals_[9], signals_[10], signals_[11],
- signals_[12], signals_[13], signals_[14], signals_[15],
- signals_[16], signals_[17], signals_[18], signals_[19],
- signals_[20], signals_[21], signals_[22], signals_[23],
- signals_[24], signals_[25], signals_[26], signals_[27]});
+ ctre::phoenixpro::BaseStatusSignalValue::WaitForAll(2000_ms, signals_);
if (!status.IsOK()) {
AOS_LOG(ERROR, "Failed to read signals from falcons: %s: %s",
@@ -383,9 +366,7 @@
aos::EventLoop *event_loop_;
- aos::SizedArray<ctre::phoenixpro::BaseStatusSignalValue *,
- kCANFalconCount * kCANSignalsCount + kRollerSignalsCount>
- signals_;
+ const std::vector<ctre::phoenixpro::BaseStatusSignalValue *> signals_;
aos::Sender<drivetrain::CANPosition> can_position_sender_;
std::shared_ptr<Falcon> right_front_, right_back_, right_under_, left_front_,
@@ -934,25 +915,27 @@
std::shared_ptr<frc::DigitalOutput> superstructure_reading =
make_unique<frc::DigitalOutput>(25);
+ std::vector<ctre::phoenixpro::BaseStatusSignalValue *> signals_registry;
+ std::shared_ptr<Falcon> right_front =
+ std::make_shared<Falcon>(1, "Drivetrain Bus", &signals_registry);
+ std::shared_ptr<Falcon> right_back =
+ std::make_shared<Falcon>(2, "Drivetrain Bus", &signals_registry);
+ std::shared_ptr<Falcon> right_under =
+ std::make_shared<Falcon>(3, "Drivetrain Bus", &signals_registry);
+ std::shared_ptr<Falcon> left_front =
+ std::make_shared<Falcon>(4, "Drivetrain Bus", &signals_registry);
+ std::shared_ptr<Falcon> left_back =
+ std::make_shared<Falcon>(5, "Drivetrain Bus", &signals_registry);
+ std::shared_ptr<Falcon> left_under =
+ std::make_shared<Falcon>(6, "Drivetrain Bus", &signals_registry);
+ std::shared_ptr<Falcon> roller =
+ std::make_shared<Falcon>(13, "Drivetrain Bus", &signals_registry);
+
// Thread 3.
::aos::ShmEventLoop can_sensor_reader_event_loop(&config.message());
can_sensor_reader_event_loop.set_name("CANSensorReader");
- CANSensorReader can_sensor_reader(&can_sensor_reader_event_loop);
-
- std::shared_ptr<Falcon> right_front = std::make_shared<Falcon>(
- 1, "Drivetrain Bus", can_sensor_reader.get_signals_registry());
- std::shared_ptr<Falcon> right_back = std::make_shared<Falcon>(
- 2, "Drivetrain Bus", can_sensor_reader.get_signals_registry());
- std::shared_ptr<Falcon> right_under = std::make_shared<Falcon>(
- 3, "Drivetrain Bus", can_sensor_reader.get_signals_registry());
- std::shared_ptr<Falcon> left_front = std::make_shared<Falcon>(
- 4, "Drivetrain Bus", can_sensor_reader.get_signals_registry());
- std::shared_ptr<Falcon> left_back = std::make_shared<Falcon>(
- 5, "Drivetrain Bus", can_sensor_reader.get_signals_registry());
- std::shared_ptr<Falcon> left_under = std::make_shared<Falcon>(
- 6, "Drivetrain Bus", can_sensor_reader.get_signals_registry());
- std::shared_ptr<Falcon> roller = std::make_shared<Falcon>(
- 13, "Drivetrain Bus", can_sensor_reader.get_signals_registry());
+ CANSensorReader can_sensor_reader(&can_sensor_reader_event_loop,
+ std::move(signals_registry));
can_sensor_reader.set_falcons(right_front, right_back, right_under,
left_front, left_back, left_under, roller);
diff --git a/y2023/y2023_roborio.json b/y2023/y2023_roborio.json
index cf3a14e..452a570 100644
--- a/y2023/y2023_roborio.json
+++ b/y2023/y2023_roborio.json
@@ -268,6 +268,14 @@
"max_size": 448
},
{
+ "name": "/can",
+ "type": "frc971.can_logger.CanFrame",
+ "source_node": "roborio",
+ "frequency": 6000,
+ "num_senders": 2,
+ "max_size": 200
+ },
+ {
"name": "/drivetrain",
"type": "y2023.control_loops.drivetrain.CANPosition",
"source_node": "roborio",
@@ -566,6 +574,13 @@
"nodes": [
"roborio"
]
+ },
+ {
+ "name": "can_logger",
+ "executable_name": "can_logger",
+ "nodes": [
+ "roborio"
+ ]
}
],
"maps": [