Solver with turret fixes and visualization
Converges generally well, but still has sensitivity to some parameters
In addition to turret and visualization, significant changes include:
-- Added quaternion parameterization for pivot_to_imu
-- Filtering out of bad gyro readings
-- Scaling of rotation error (residual)
Visualization tool is really just a set of helper routines to plot axes
in 3D space in a projected 2D virtual image
Change-Id: I03a6939b6b12df4b8116c30c617a1595781fe635
Signed-off-by: Jim Ostrowski <yimmy13@gmail.com>
diff --git a/frc971/vision/BUILD b/frc971/vision/BUILD
index 7872926..8c35711 100644
--- a/frc971/vision/BUILD
+++ b/frc971/vision/BUILD
@@ -79,6 +79,7 @@
"//aos/events/logging:log_reader",
"//frc971/analysis:in_process_plotter",
"//frc971/control_loops/drivetrain:improved_down_estimator",
+ "//frc971/vision:visualize_robot",
"//frc971/wpilib:imu_batch_fbs",
"//frc971/wpilib:imu_fbs",
"//third_party:opencv",
@@ -143,3 +144,36 @@
"//aos/testing:googletest",
],
)
+
+cc_library(
+ name = "visualize_robot",
+ srcs = [
+ "visualize_robot.cc",
+ ],
+ hdrs = [
+ "visualize_robot.h",
+ ],
+ deps = [
+ "//aos:init",
+ "//third_party:opencv",
+ "@com_google_absl//absl/strings:str_format",
+ "@org_tuxfamily_eigen//:eigen",
+ ],
+)
+
+cc_binary(
+ name = "visualize_robot_sample",
+ srcs = [
+ "visualize_robot_sample.cc",
+ ],
+ target_compatible_with = ["@platforms//os:linux"],
+ visibility = ["//visibility:public"],
+ deps = [
+ "//aos:init",
+ "//frc971/vision:visualize_robot",
+ "//third_party:opencv",
+ "@com_github_google_glog//:glog",
+ "@com_google_ceres_solver//:ceres",
+ "@org_tuxfamily_eigen//:eigen",
+ ],
+)
diff --git a/frc971/vision/calibration_accumulator.cc b/frc971/vision/calibration_accumulator.cc
index ac1946c..8125e5e 100644
--- a/frc971/vision/calibration_accumulator.cc
+++ b/frc971/vision/calibration_accumulator.cc
@@ -1,20 +1,21 @@
#include "frc971/vision/calibration_accumulator.h"
-#include <opencv2/aruco/charuco.hpp>
-#include <opencv2/calib3d.hpp>
-#include <opencv2/features2d.hpp>
-#include <opencv2/highgui/highgui.hpp>
-#include <opencv2/imgproc.hpp>
-
#include "Eigen/Dense"
#include "aos/events/simulated_event_loop.h"
#include "aos/time/time.h"
#include "frc971/control_loops/quaternion_utils.h"
-#include "frc971/wpilib/imu_batch_generated.h"
#include "frc971/vision/charuco_lib.h"
+#include "frc971/wpilib/imu_batch_generated.h"
+
+#include <algorithm>
+#include <limits>
+#include <opencv2/highgui/highgui.hpp>
DEFINE_bool(display_undistorted, false,
"If true, display the undistorted image.");
+DEFINE_string(save_path, "", "Where to store annotated images");
+DEFINE_bool(save_valid_only, false,
+ "If true, only save images with valid pose estimates");
namespace frc971 {
namespace vision {
@@ -27,59 +28,83 @@
void CalibrationData::AddCameraPose(
distributed_clock::time_point distributed_now, Eigen::Vector3d rvec,
Eigen::Vector3d tvec) {
- // Always start with IMU reading...
- if (!imu_points_.empty() && imu_points_[0].first < distributed_now) {
+ // Always start with IMU (or turret) reading...
+ // Note, we may not have a turret, so need to handle that case
+ // If we later get a turret point, then we handle removal of camera points in
+ // AddTurret
+ if ((!imu_points_.empty() && imu_points_[0].first < distributed_now) &&
+ (turret_points_.empty() || turret_points_[0].first < distributed_now)) {
rot_trans_points_.emplace_back(distributed_now, std::make_pair(rvec, tvec));
}
}
void CalibrationData::AddImu(distributed_clock::time_point distributed_now,
Eigen::Vector3d gyro, Eigen::Vector3d accel) {
- imu_points_.emplace_back(distributed_now, std::make_pair(gyro, accel));
+ double zero_threshold = 1e-12;
+ // We seem to be getting 0 readings on IMU, so ignore for now
+ // TODO<Jim>: I think this has been resolved in HandleIMU, but want to leave
+ // this here just in case there are other ways this could happen
+ if ((fabs(accel(0)) < zero_threshold) && (fabs(accel(1)) < zero_threshold) &&
+ (fabs(accel(2)) < zero_threshold)) {
+ LOG(FATAL) << "Ignoring zero value from IMU accelerometer: " << accel
+ << " (gyro is " << gyro << ")";
+ } else {
+ imu_points_.emplace_back(distributed_now, std::make_pair(gyro, accel));
+ }
}
void CalibrationData::AddTurret(
aos::distributed_clock::time_point distributed_now, Eigen::Vector2d state) {
- // We want the turret to be known too when solving. But, we don't know if we
- // are going to have a turret until we get the first reading. In that case,
- // blow away any camera readings from before.
- while (!rot_trans_points_.empty() &&
- rot_trans_points_[0].first < distributed_now) {
- rot_trans_points_.erase(rot_trans_points_.begin());
+ // We want the turret to be known too when solving. But, we don't know if
+ // we are going to have a turret until we get the first reading. In that
+ // case, blow away any camera readings from before.
+ // NOTE: Since the IMU motion is independent of the turret position, we don't
+ // need to remove the IMU readings before the turret
+ if (turret_points_.empty()) {
+ while (!rot_trans_points_.empty() &&
+ rot_trans_points_[0].first < distributed_now) {
+ LOG(INFO) << "Erasing, distributed " << distributed_now;
+ rot_trans_points_.erase(rot_trans_points_.begin());
+ }
}
turret_points_.emplace_back(distributed_now, state);
}
void CalibrationData::ReviewData(CalibrationDataObserver *observer) const {
- size_t next_imu_point = 0;
size_t next_camera_point = 0;
- while (true) {
- if (next_imu_point != imu_points_.size()) {
- // There aren't that many combinations, so just brute force them all
- // rather than being too clever.
- if (next_camera_point != rot_trans_points_.size()) {
- if (imu_points_[next_imu_point].first >
- rot_trans_points_[next_camera_point].first) {
- // Camera!
- observer->UpdateCamera(rot_trans_points_[next_camera_point].first,
- rot_trans_points_[next_camera_point].second);
- ++next_camera_point;
- } else {
- // IMU!
- observer->UpdateIMU(imu_points_[next_imu_point].first,
- imu_points_[next_imu_point].second);
- ++next_imu_point;
- }
+ size_t next_imu_point = 0;
+ size_t next_turret_point = 0;
+
+ // Just go until one of the data streams runs out. We lose a few points, but
+ // it makes the logic much easier
+ while (
+ next_camera_point != rot_trans_points_.size() &&
+ next_imu_point != imu_points_.size() &&
+ (turret_points_.empty() || next_turret_point != turret_points_.size())) {
+ // If camera_point is next, update it
+ if ((rot_trans_points_[next_camera_point].first <=
+ imu_points_[next_imu_point].first) &&
+ (turret_points_.empty() ||
+ (rot_trans_points_[next_camera_point].first <=
+ turret_points_[next_turret_point].first))) {
+ // Camera!
+ observer->UpdateCamera(rot_trans_points_[next_camera_point].first,
+ rot_trans_points_[next_camera_point].second);
+ ++next_camera_point;
+ } else {
+ // If it's not the camera, check if IMU is next
+ if (turret_points_.empty() || (imu_points_[next_imu_point].first <=
+ turret_points_[next_turret_point].first)) {
+ // IMU!
+ observer->UpdateIMU(imu_points_[next_imu_point].first,
+ imu_points_[next_imu_point].second);
+ ++next_imu_point;
} else {
- if (next_camera_point != rot_trans_points_.size()) {
- // Camera!
- observer->UpdateCamera(rot_trans_points_[next_camera_point].first,
- rot_trans_points_[next_camera_point].second);
- ++next_camera_point;
- } else {
- // Nothing left for either list of points, so we are done.
- break;
- }
+ // If it's not IMU or camera, and turret_points is not empty, it must be
+ // the turret!
+ observer->UpdateTurret(turret_points_[next_turret_point].first,
+ turret_points_[next_turret_point].second);
+ ++next_turret_point;
}
}
}
@@ -107,8 +132,24 @@
data_(data) {
imu_factory_->OnShutdown([]() { cv::destroyAllWindows(); });
+ // Check for IMUValuesBatch topic on both /localizer and /drivetrain channels,
+ // since both are valid/possible
+ std::string imu_channel;
+ if (imu_event_loop->HasChannel<frc971::IMUValuesBatch>("/localizer")) {
+ imu_channel = "/localizer";
+ } else if (imu_event_loop->HasChannel<frc971::IMUValuesBatch>(
+ "/drivetrain")) {
+ imu_channel = "/drivetrain";
+ } else {
+ LOG(FATAL) << "Couldn't find channel with IMU data for either localizer or "
+ "drivtrain";
+ }
+
+ VLOG(2) << "Listening for " << frc971::IMUValuesBatch::GetFullyQualifiedName()
+ << " on channel: " << imu_channel;
+
imu_event_loop_->MakeWatcher(
- "/drivetrain", [this](const frc971::IMUValuesBatch &imu) {
+ imu_channel, [this](const frc971::IMUValuesBatch &imu) {
if (!imu.has_readings()) {
return;
}
@@ -128,19 +169,6 @@
data_->AddCameraPose(image_factory_->ToDistributedClock(eof), rvec_eigen,
tvec_eigen);
- // Z -> up
- // Y -> away from cameras 2 and 3
- // X -> left
- Eigen::Vector3d imu(last_value_.accelerometer_x,
- last_value_.accelerometer_y,
- last_value_.accelerometer_z);
-
- Eigen::Quaternion<double> imu_to_camera(
- Eigen::AngleAxisd(-0.5 * M_PI, Eigen::Vector3d::UnitX()));
-
- Eigen::Quaternion<double> board_to_world(
- Eigen::AngleAxisd(0.5 * M_PI, Eigen::Vector3d::UnitX()));
-
Eigen::IOFormat HeavyFmt(Eigen::FullPrecision, 0, ", ", ",\n", "[", "]",
"[", "]");
@@ -148,9 +176,9 @@
std::chrono::duration_cast<std::chrono::duration<double>>(
image_event_loop_->monotonic_now() - eof)
.count();
- LOG(INFO) << std::fixed << std::setprecision(6) << "Age: " << age_double
- << ", Pose is R:" << rvec_eigen.transpose().format(HeavyFmt)
- << " T:" << tvec_eigen.transpose().format(HeavyFmt);
+ VLOG(1) << std::fixed << std::setprecision(6) << "Age: " << age_double
+ << ", Pose is R:" << rvec_eigen.transpose().format(HeavyFmt)
+ << "\nT:" << tvec_eigen.transpose().format(HeavyFmt);
}
cv::imshow("Display", rgb_image);
@@ -164,10 +192,28 @@
cv::imshow("Display undist", undistorted_rgb_image);
}
+
+ if (FLAGS_save_path != "") {
+ if (!FLAGS_save_valid_only || valid) {
+ static int img_count = 0;
+ std::string image_name = absl::StrFormat("/img_%06d.png", img_count);
+ std::string path = FLAGS_save_path + image_name;
+ VLOG(2) << "Saving image to " << path;
+ cv::imwrite(path, rgb_image);
+ img_count++;
+ }
+ }
}
void Calibration::HandleIMU(const frc971::IMUValues *imu) {
- VLOG(1) << "IMU " << imu;
+ // Need to check for valid values, since we sometimes don't get them
+ if (!imu->has_gyro_x() || !imu->has_gyro_y() || !imu->has_gyro_z() ||
+ !imu->has_accelerometer_x() || !imu->has_accelerometer_y() ||
+ !imu->has_accelerometer_z()) {
+ return;
+ }
+
+ VLOG(2) << "IMU " << imu;
imu->UnPackTo(&last_value_);
Eigen::Vector3d gyro(last_value_.gyro_x, last_value_.gyro_y,
last_value_.gyro_z);
diff --git a/frc971/vision/calibration_accumulator.h b/frc971/vision/calibration_accumulator.h
index e4f9c8a..cb5609b 100644
--- a/frc971/vision/calibration_accumulator.h
+++ b/frc971/vision/calibration_accumulator.h
@@ -56,7 +56,9 @@
size_t camera_samples_size() const { return rot_trans_points_.size(); }
- size_t turret_samples() const { return turret_points_.size(); }
+ size_t imu_samples_size() const { return imu_points_.size(); }
+
+ size_t turret_samples_size() const { return turret_points_.size(); }
private:
std::vector<std::pair<aos::distributed_clock::time_point,
diff --git a/frc971/vision/charuco_lib.cc b/frc971/vision/charuco_lib.cc
index fde5394..1398af5 100644
--- a/frc971/vision/charuco_lib.cc
+++ b/frc971/vision/charuco_lib.cc
@@ -277,8 +277,16 @@
cv::aruco::drawAxis(rgb_image, camera_matrix_, dist_coeffs_, rvec,
tvec, 0.1);
}
+ std::stringstream ss;
+ ss << "tvec = " << tvec << "; |t| = " << tvec_eigen.norm();
+ cv::putText(rgb_image, ss.str(), cv::Point(10, 25),
+ cv::FONT_HERSHEY_PLAIN, 1.0, cv::Scalar(255, 255, 255));
+ ss.str("");
+ ss << "rvec = " << rvec;
+ cv::putText(rgb_image, ss.str(), cv::Point(10, 50),
+ cv::FONT_HERSHEY_PLAIN, 1.0, cv::Scalar(255, 255, 255));
} else {
- LOG(INFO) << "Age: " << age_double << ", invalid pose";
+ VLOG(2) << "Age: " << age_double << ", invalid pose";
}
} else {
VLOG(2) << "Age: " << age_double << ", not enough charuco IDs, got "
diff --git a/frc971/vision/extrinsics_calibration.cc b/frc971/vision/extrinsics_calibration.cc
index a71f14d..cd6d183 100644
--- a/frc971/vision/extrinsics_calibration.cc
+++ b/frc971/vision/extrinsics_calibration.cc
@@ -6,6 +6,13 @@
#include "frc971/control_loops/runge_kutta.h"
#include "frc971/vision/calibration_accumulator.h"
#include "frc971/vision/charuco_lib.h"
+#include "frc971/vision/visualize_robot.h"
+
+#include <opencv2/core.hpp>
+#include <opencv2/core/eigen.hpp>
+#include <opencv2/highgui.hpp>
+#include <opencv2/highgui/highgui.hpp>
+#include <opencv2/imgproc.hpp>
namespace frc971 {
namespace vision {
@@ -79,17 +86,9 @@
virtual void ObserveCameraUpdate(
distributed_clock::time_point /*t*/,
Eigen::Vector3d /*board_to_camera_rotation*/,
+ Eigen::Vector3d /*board_to_camera_translation*/,
Eigen::Quaternion<Scalar> /*imu_to_world_rotation*/,
- Affine3s /*imu_to_world*/) {}
-
- void UpdateTurret(distributed_clock::time_point t,
- Eigen::Vector2d state) override {
- state_ = state;
- state_time_ = t;
- }
-
- Eigen::Vector2d state_ = Eigen::Vector2d::Zero();
- distributed_clock::time_point state_time_ = distributed_clock::min_time;
+ Affine3s /*imu_to_world*/, double /*turret_angle*/) {}
// Observes a camera measurement by applying a kalman filter correction and
// accumulating up the error associated with the step.
@@ -100,8 +99,9 @@
const double pivot_angle =
state_time_ == distributed_clock::min_time
? 0.0
- : state_(0) +
- state_(1) * chrono::duration<double>(t - state_time_).count();
+ : turret_state_(0) +
+ turret_state_(1) *
+ chrono::duration<double>(t - state_time_).count();
const Eigen::Quaternion<Scalar> board_to_camera_rotation(
frc971::controls::ToQuaternionFromRotationVector(rt.first)
@@ -143,9 +143,12 @@
H(2, 2) = static_cast<Scalar>(1.0);
const Eigen::Matrix<Scalar, 3, 1> y = z - H * x_hat_;
+ // TODO<Jim>: Need to understand dependence on this-- solutions vary by 20cm
+ // when changing from 0.01 -> 0.1
+ double obs_noise_var = ::std::pow(0.01, 2);
const Eigen::Matrix<double, 3, 3> R =
- (::Eigen::DiagonalMatrix<double, 3>().diagonal() << ::std::pow(0.01, 2),
- ::std::pow(0.01, 2), ::std::pow(0.01, 2))
+ (::Eigen::DiagonalMatrix<double, 3>().diagonal() << obs_noise_var,
+ obs_noise_var, obs_noise_var)
.finished()
.asDiagonal();
@@ -163,7 +166,8 @@
Eigen::Matrix<Scalar, 3, 1>(error.x(), error.y(), error.z()));
position_errors_.emplace_back(y);
- ObserveCameraUpdate(t, rt.first, imu_to_world_rotation, imu_to_world);
+ ObserveCameraUpdate(t, rt.first, rt.second, imu_to_world_rotation,
+ imu_to_world, pivot_angle);
}
virtual void ObserveIMUUpdate(
@@ -179,7 +183,22 @@
ObserveIMUUpdate(t, wa);
}
+ virtual void ObserveTurretUpdate(distributed_clock::time_point /*t*/,
+ Eigen::Vector2d /*turret_state*/) {}
+
+ void UpdateTurret(distributed_clock::time_point t,
+ Eigen::Vector2d state) override {
+ turret_state_ = state;
+ state_time_ = t;
+
+ ObserveTurretUpdate(t, state);
+ }
+
+ Eigen::Vector2d turret_state_ = Eigen::Vector2d::Zero();
+ distributed_clock::time_point state_time_ = distributed_clock::min_time;
+
const Eigen::Quaternion<Scalar> &orientation() const { return orientation_; }
+ const Eigen::Matrix<Scalar, 6, 1> &get_x_hat() const { return x_hat_; }
size_t num_errors() const { return errors_.size(); }
Scalar errorx(size_t i) const { return errors_[i].x(); }
@@ -373,73 +392,253 @@
vz.emplace_back(x_hat(5));
}
+ // TODO<Jim>: Could probably still do a bit more work on naming
+ // conventions and what is being shown here
frc971::analysis::Plotter plotter;
- plotter.AddFigure("position");
- plotter.AddLine(times_, rx, "x_hat(0)");
- plotter.AddLine(times_, ry, "x_hat(1)");
- plotter.AddLine(times_, rz, "x_hat(2)");
- plotter.AddLine(camera_times_, camera_x_, "Camera x");
- plotter.AddLine(camera_times_, camera_y_, "Camera y");
- plotter.AddLine(camera_times_, camera_z_, "Camera z");
- plotter.AddLine(camera_times_, camera_error_x_, "Camera error x");
- plotter.AddLine(camera_times_, camera_error_y_, "Camera error y");
- plotter.AddLine(camera_times_, camera_error_z_, "Camera error z");
+ plotter.AddFigure("bot (imu) position");
+ plotter.AddLine(times_, x, "x_hat(0)");
+ plotter.AddLine(times_, y, "x_hat(1)");
+ plotter.AddLine(times_, z, "x_hat(2)");
plotter.Publish();
- plotter.AddFigure("error");
- plotter.AddLine(times_, rx, "x_hat(0)");
- plotter.AddLine(times_, ry, "x_hat(1)");
- plotter.AddLine(times_, rz, "x_hat(2)");
- plotter.AddLine(camera_times_, camera_error_x_, "Camera error x");
- plotter.AddLine(camera_times_, camera_error_y_, "Camera error y");
- plotter.AddLine(camera_times_, camera_error_z_, "Camera error z");
+ plotter.AddFigure("bot (imu) rotation");
+ plotter.AddLine(camera_times_, imu_rot_x_, "bot (imu) rot x");
+ plotter.AddLine(camera_times_, imu_rot_y_, "bot (imu) rot y");
+ plotter.AddLine(camera_times_, imu_rot_z_, "bot (imu) rot z");
+ plotter.Publish();
+
+ plotter.AddFigure("rotation error");
+ plotter.AddLine(camera_times_, rotation_error_x_, "Error x");
+ plotter.AddLine(camera_times_, rotation_error_y_, "Error y");
+ plotter.AddLine(camera_times_, rotation_error_z_, "Error z");
+ plotter.Publish();
+
+ plotter.AddFigure("translation error");
+ plotter.AddLine(camera_times_, translation_error_x_, "Error x");
+ plotter.AddLine(camera_times_, translation_error_y_, "Error y");
+ plotter.AddLine(camera_times_, translation_error_z_, "Error z");
plotter.Publish();
plotter.AddFigure("imu");
- plotter.AddLine(camera_times_, world_gravity_x_, "world_gravity(0)");
- plotter.AddLine(camera_times_, world_gravity_y_, "world_gravity(1)");
- plotter.AddLine(camera_times_, world_gravity_z_, "world_gravity(2)");
- plotter.AddLine(imu_times_, imu_x_, "imu x");
- plotter.AddLine(imu_times_, imu_y_, "imu y");
- plotter.AddLine(imu_times_, imu_z_, "imu z");
- plotter.AddLine(times_, rx, "rotation x");
- plotter.AddLine(times_, ry, "rotation y");
- plotter.AddLine(times_, rz, "rotation z");
+ plotter.AddLine(imu_times_, imu_rate_x_, "imu gyro x");
+ plotter.AddLine(imu_times_, imu_rate_y_, "imu gyro y");
+ plotter.AddLine(imu_times_, imu_rate_z_, "imu gyro z");
+ plotter.AddLine(imu_times_, imu_accel_x_, "imu accel x");
+ plotter.AddLine(imu_times_, imu_accel_y_, "imu accel y");
+ plotter.AddLine(imu_times_, imu_accel_z_, "imu accel z");
+ plotter.AddLine(camera_times_, accel_minus_gravity_x_,
+ "accel_minus_gravity(0)");
+ plotter.AddLine(camera_times_, accel_minus_gravity_y_,
+ "accel_minus_gravity(1)");
+ plotter.AddLine(camera_times_, accel_minus_gravity_z_,
+ "accel_minus_gravity(2)");
plotter.Publish();
- plotter.AddFigure("raw");
- plotter.AddLine(imu_times_, imu_x_, "imu x");
- plotter.AddLine(imu_times_, imu_y_, "imu y");
- plotter.AddLine(imu_times_, imu_z_, "imu z");
- plotter.AddLine(imu_times_, imu_ratex_, "omega x");
- plotter.AddLine(imu_times_, imu_ratey_, "omega y");
- plotter.AddLine(imu_times_, imu_ratez_, "omega z");
- plotter.AddLine(camera_times_, raw_camera_x_, "Camera x");
- plotter.AddLine(camera_times_, raw_camera_y_, "Camera y");
- plotter.AddLine(camera_times_, raw_camera_z_, "Camera z");
+ plotter.AddFigure("raw camera observations");
+ plotter.AddLine(camera_times_, raw_camera_rot_x_, "Camera rot x");
+ plotter.AddLine(camera_times_, raw_camera_rot_y_, "Camera rot y");
+ plotter.AddLine(camera_times_, raw_camera_rot_z_, "Camera rot z");
+ plotter.AddLine(camera_times_, raw_camera_trans_x_, "Camera trans x");
+ plotter.AddLine(camera_times_, raw_camera_trans_y_, "Camera trans y");
+ plotter.AddLine(camera_times_, raw_camera_trans_z_, "Camera trans z");
plotter.Publish();
- plotter.AddFigure("xyz vel");
- plotter.AddLine(times_, x, "x");
- plotter.AddLine(times_, y, "y");
- plotter.AddLine(times_, z, "z");
+ plotter.AddFigure("xyz pos, vel estimates");
+ plotter.AddLine(times_, x, "x (x_hat(0))");
+ plotter.AddLine(times_, y, "y (x_hat(1))");
+ plotter.AddLine(times_, z, "z (x_hat(2))");
plotter.AddLine(times_, vx, "vx");
plotter.AddLine(times_, vy, "vy");
plotter.AddLine(times_, vz, "vz");
- plotter.AddLine(camera_times_, camera_position_x_, "Camera x");
- plotter.AddLine(camera_times_, camera_position_y_, "Camera y");
- plotter.AddLine(camera_times_, camera_position_z_, "Camera z");
+ plotter.AddLine(camera_times_, imu_position_x_, "x pos from board");
+ plotter.AddLine(camera_times_, imu_position_y_, "y pos from board");
+ plotter.AddLine(camera_times_, imu_position_z_, "z pos from board");
plotter.Publish();
+ // If we've got 'em, plot 'em
+ if (turret_times_.size() > 0) {
+ plotter.AddFigure("Turret angle");
+ plotter.AddLine(turret_times_, turret_angles_, "turret angle");
+ plotter.Publish();
+ }
+
plotter.Spin();
}
+ void Visualize(const CalibrationParameters &calibration_parameters) {
+ // Set up virtual camera for visualization
+ VisualizeRobot vis_robot;
+
+ // Set virtual viewing point 10 meters above the origin, rotated so the
+ // camera faces straight down
+ Eigen::Translation3d camera_trans(0, 0, 10.0);
+ Eigen::AngleAxisd camera_rot(M_PI, Eigen::Vector3d::UnitX());
+ Eigen::Affine3d camera_viewpoint = camera_trans * camera_rot;
+ vis_robot.SetViewpoint(camera_viewpoint);
+
+ // Create camera with origin in center, and focal length suitable to fit
+ // robot visualization fully in view
+ int image_width = 500;
+ double focal_length = 1000.0;
+ double intr[] = {focal_length, 0.0, image_width / 2.0,
+ 0.0, focal_length, image_width / 2.0,
+ 0.0, 0.0, 1.0};
+ cv::Mat camera_mat = cv::Mat(3, 3, CV_64FC1, intr);
+ cv::Mat dist_coeffs = cv::Mat(1, 5, CV_64F, 0.0);
+ vis_robot.SetCameraParameters(camera_mat);
+ vis_robot.SetDistortionCoefficients(dist_coeffs);
+
+ /*
+ // Draw an initial visualization
+ Eigen::Vector3d T_world_imu_vec =
+ calibration_parameters.initial_state.block<3, 1>(0, 0);
+ Eigen::Translation3d T_world_imu(T_world_imu_vec);
+ Eigen::Affine3d H_world_imu =
+ T_world_imu * calibration_parameters.initial_orientation;
+
+ vis_robot.DrawFrameAxes(H_world_imu, "imu");
+
+ Eigen::Quaterniond R_imu_pivot(calibration_parameters.pivot_to_imu);
+ Eigen::Translation3d T_imu_pivot(
+ calibration_parameters.pivot_to_imu_translation);
+ Eigen::Affine3d H_imu_pivot = T_imu_pivot * R_imu_pivot;
+ Eigen::Affine3d H_world_pivot = H_world_imu * H_imu_pivot;
+ vis_robot.DrawFrameAxes(H_world_pivot, "pivot");
+
+ Eigen::Affine3d H_imupivot_camerapivot(
+ Eigen::AngleAxisd(1.5, Eigen::Vector3d::UnitZ()));
+ Eigen::Quaterniond R_camera_pivot(calibration_parameters.pivot_to_camera);
+ Eigen::Translation3d T_camera_pivot(
+ calibration_parameters.pivot_to_camera_translation);
+ Eigen::Affine3d H_camera_pivot = T_camera_pivot * R_camera_pivot;
+ Eigen::Affine3d H_world_camera = H_world_imu * H_imu_pivot *
+ H_imupivot_camerapivot *
+ H_camera_pivot.inverse();
+ vis_robot.DrawFrameAxes(H_world_camera, "camera");
+
+ cv::imshow("Original poses", image_mat);
+ cv::waitKey();
+ */
+
+ uint current_state_index = 0;
+ uint current_turret_index = 0;
+ for (uint i = 0; i < camera_times_.size() - 1; i++) {
+ // reset image each frame
+ cv::Mat image_mat =
+ cv::Mat::zeros(cv::Size(image_width, image_width), CV_8UC3);
+ vis_robot.SetImage(image_mat);
+
+ // Jump to state closest to current camera_time
+ while (camera_times_[i] > times_[current_state_index] &&
+ current_state_index < times_.size()) {
+ current_state_index++;
+ }
+
+ // H_world_imu: map from world origin to imu (robot) frame
+ Eigen::Vector3d T_world_imu_vec =
+ x_hats_[current_state_index].block<3, 1>(0, 0);
+ Eigen::Translation3d T_world_imu(T_world_imu_vec);
+ Eigen::Affine3d H_world_imu =
+ T_world_imu * orientations_[current_state_index];
+
+ vis_robot.DrawFrameAxes(H_world_imu, "imu_kf");
+
+ // H_world_pivot: map from world origin to pivot point
+ // Do this via the imu (using H_world_pivot = H_world_imu * H_imu_pivot)
+ Eigen::Quaterniond R_imu_pivot(calibration_parameters.pivot_to_imu);
+ Eigen::Translation3d T_imu_pivot(
+ calibration_parameters.pivot_to_imu_translation);
+ Eigen::Affine3d H_imu_pivot = T_imu_pivot * R_imu_pivot;
+ Eigen::Affine3d H_world_pivot = H_world_imu * H_imu_pivot;
+ vis_robot.DrawFrameAxes(H_world_pivot, "pivot");
+
+ // Jump to turret sample closest to current camera_time
+ while (turret_times_.size() > 0 &&
+ camera_times_[i] > turret_times_[current_turret_index] &&
+ current_turret_index < turret_times_.size()) {
+ current_turret_index++;
+ }
+
+ // Draw the camera frame
+
+ Eigen::Affine3d H_imupivot_camerapivot(Eigen::Matrix4d::Identity());
+ if (turret_angles_.size() > 0) {
+ // Need to rotate by the turret angle in the middle of all this
+ H_imupivot_camerapivot = Eigen::Affine3d(Eigen::AngleAxisd(
+ turret_angles_[current_turret_index], Eigen::Vector3d::UnitZ()));
+ }
+
+ // H_world_camera: map from world origin to camera frame
+ // Via imu->pivot->pivot rotation
+ Eigen::Quaterniond R_camera_pivot(calibration_parameters.pivot_to_camera);
+ Eigen::Translation3d T_camera_pivot(
+ calibration_parameters.pivot_to_camera_translation);
+ Eigen::Affine3d H_camera_pivot = T_camera_pivot * R_camera_pivot;
+ Eigen::Affine3d H_world_camera = H_world_imu * H_imu_pivot *
+ H_imupivot_camerapivot *
+ H_camera_pivot.inverse();
+ vis_robot.DrawFrameAxes(H_world_camera, "camera");
+
+ // H_world_board: board location from world reference frame
+ // Uses the estimate from camera-> board, on top of H_world_camera
+ Eigen::Quaterniond R_camera_board(
+ frc971::controls::ToQuaternionFromRotationVector(
+ board_to_camera_rotations_[i]));
+ Eigen::Translation3d T_camera_board(board_to_camera_translations_[i]);
+ Eigen::Affine3d H_camera_board = T_camera_board * R_camera_board;
+ Eigen::Affine3d H_world_board = H_world_camera * H_camera_board;
+
+ vis_robot.DrawFrameAxes(H_world_board, "board est");
+
+ // H_world_board_solve: board in world frame based on solver
+ // Find world -> board via solved parameter of H_world_board
+ // (parameter "board_to_world" and assuming origin of board frame is
+ // coincident with origin of world frame, i.e., T_world_board == 0)
+ Eigen::Quaterniond R_world_board_solve(
+ calibration_parameters.board_to_world);
+ Eigen::Translation3d T_world_board_solve(Eigen::Vector3d(0, 0, 0));
+ Eigen::Affine3d H_world_board_solve =
+ T_world_board_solve * R_world_board_solve;
+
+ vis_robot.DrawFrameAxes(H_world_board_solve, "board_solve");
+
+ // H_world_imu_from_board: imu location in world frame, via the board
+ // Determine the imu location via the board_to_world solved
+ // transformation
+ Eigen::Affine3d H_world_imu_from_board =
+ H_world_board_solve * H_camera_board.inverse() * H_camera_pivot *
+ H_imupivot_camerapivot.inverse() * H_imu_pivot.inverse();
+
+ vis_robot.DrawFrameAxes(H_world_imu_from_board, "imu_board");
+
+ // These errors should match up with the residuals in the optimizer
+ // (Note: rotation seems to differ by sign, but that's OK in residual)
+ Eigen::Affine3d error = H_world_imu_from_board.inverse() * H_world_imu;
+ Eigen::Vector3d trans_error =
+ H_world_imu_from_board.translation() - H_world_imu.translation();
+ Eigen::Quaterniond error_rot(error.rotation());
+ VLOG(1) << "Error: \n"
+ << "Rotation: " << error_rot.coeffs().transpose() << "\n"
+ << "Translation: " << trans_error.transpose();
+
+ cv::imshow("Live", image_mat);
+ cv::waitKey(50);
+
+ if (i % 200 == 0) {
+ LOG(INFO) << "Pausing at step " << i;
+ cv::waitKey();
+ }
+ }
+ LOG(INFO) << "Finished visualizing robot. Press any key to continue";
+ cv::waitKey();
+ }
+
void ObserveIntegrated(distributed_clock::time_point t,
Eigen::Matrix<double, 6, 1> x_hat,
Eigen::Quaternion<double> orientation,
Eigen::Matrix<double, 6, 6> p) override {
- VLOG(1) << t << " -> " << p;
- VLOG(1) << t << " xhat -> " << x_hat.transpose();
+ VLOG(2) << t << " -> " << p;
+ VLOG(2) << t << " xhat -> " << x_hat.transpose();
times_.emplace_back(chrono::duration<double>(t.time_since_epoch()).count());
x_hats_.emplace_back(x_hat);
orientations_.emplace_back(orientation);
@@ -448,83 +647,126 @@
void ObserveIMUUpdate(
distributed_clock::time_point t,
std::pair<Eigen::Vector3d, Eigen::Vector3d> wa) override {
- imu_times_.emplace_back(chrono::duration<double>(t.time_since_epoch()).count());
- imu_ratex_.emplace_back(wa.first.x());
- imu_ratey_.emplace_back(wa.first.y());
- imu_ratez_.emplace_back(wa.first.z());
- imu_x_.emplace_back(wa.second.x());
- imu_y_.emplace_back(wa.second.y());
- imu_z_.emplace_back(wa.second.z());
+ imu_times_.emplace_back(
+ chrono::duration<double>(t.time_since_epoch()).count());
+ imu_rate_x_.emplace_back(wa.first.x());
+ imu_rate_y_.emplace_back(wa.first.y());
+ imu_rate_z_.emplace_back(wa.first.z());
+ imu_accel_x_.emplace_back(wa.second.x());
+ imu_accel_y_.emplace_back(wa.second.y());
+ imu_accel_z_.emplace_back(wa.second.z());
last_accel_ = wa.second;
}
void ObserveCameraUpdate(distributed_clock::time_point t,
Eigen::Vector3d board_to_camera_rotation,
+ Eigen::Vector3d board_to_camera_translation,
Eigen::Quaternion<double> imu_to_world_rotation,
- Eigen::Affine3d imu_to_world) override {
- raw_camera_x_.emplace_back(board_to_camera_rotation(0, 0));
- raw_camera_y_.emplace_back(board_to_camera_rotation(1, 0));
- raw_camera_z_.emplace_back(board_to_camera_rotation(2, 0));
+ Eigen::Affine3d imu_to_world,
+ double turret_angle) override {
+ board_to_camera_rotations_.emplace_back(board_to_camera_rotation);
+ board_to_camera_translations_.emplace_back(board_to_camera_translation);
- Eigen::Matrix<double, 3, 1> rotation_vector =
- frc971::controls::ToRotationVectorFromQuaternion(imu_to_world_rotation);
camera_times_.emplace_back(
chrono::duration<double>(t.time_since_epoch()).count());
- Eigen::Matrix<double, 3, 1> camera_error =
+ raw_camera_rot_x_.emplace_back(board_to_camera_rotation(0, 0));
+ raw_camera_rot_y_.emplace_back(board_to_camera_rotation(1, 0));
+ raw_camera_rot_z_.emplace_back(board_to_camera_rotation(2, 0));
+
+ raw_camera_trans_x_.emplace_back(board_to_camera_translation(0, 0));
+ raw_camera_trans_y_.emplace_back(board_to_camera_translation(1, 0));
+ raw_camera_trans_z_.emplace_back(board_to_camera_translation(2, 0));
+
+ Eigen::Matrix<double, 3, 1> rotation_vector =
+ frc971::controls::ToRotationVectorFromQuaternion(imu_to_world_rotation);
+ imu_rot_x_.emplace_back(rotation_vector(0, 0));
+ imu_rot_y_.emplace_back(rotation_vector(1, 0));
+ imu_rot_z_.emplace_back(rotation_vector(2, 0));
+
+ Eigen::Matrix<double, 3, 1> rotation_error =
frc971::controls::ToRotationVectorFromQuaternion(
imu_to_world_rotation.inverse() * orientation());
- camera_x_.emplace_back(rotation_vector(0, 0));
- camera_y_.emplace_back(rotation_vector(1, 0));
- camera_z_.emplace_back(rotation_vector(2, 0));
+ rotation_error_x_.emplace_back(rotation_error(0, 0));
+ rotation_error_y_.emplace_back(rotation_error(1, 0));
+ rotation_error_z_.emplace_back(rotation_error(2, 0));
- camera_error_x_.emplace_back(camera_error(0, 0));
- camera_error_y_.emplace_back(camera_error(1, 0));
- camera_error_z_.emplace_back(camera_error(2, 0));
+ Eigen::Matrix<double, 3, 1> imu_pos = get_x_hat().block<3, 1>(0, 0);
+ Eigen::Translation3d T_world_imu(imu_pos);
+ Eigen::Affine3d H_world_imu = T_world_imu * orientation();
+ Eigen::Affine3d H_error = imu_to_world.inverse() * H_world_imu;
- const Eigen::Vector3d world_gravity =
+ Eigen::Matrix<double, 3, 1> translation_error = H_error.translation();
+ translation_error_x_.emplace_back(translation_error(0, 0));
+ translation_error_y_.emplace_back(translation_error(1, 0));
+ translation_error_z_.emplace_back(translation_error(2, 0));
+
+ const Eigen::Vector3d accel_minus_gravity =
imu_to_world_rotation * last_accel_ -
Eigen::Vector3d(0, 0, kGravity) * gravity_scalar();
- const Eigen::Vector3d camera_position =
- imu_to_world * Eigen::Vector3d::Zero();
+ accel_minus_gravity_x_.emplace_back(accel_minus_gravity.x());
+ accel_minus_gravity_y_.emplace_back(accel_minus_gravity.y());
+ accel_minus_gravity_z_.emplace_back(accel_minus_gravity.z());
- world_gravity_x_.emplace_back(world_gravity.x());
- world_gravity_y_.emplace_back(world_gravity.y());
- world_gravity_z_.emplace_back(world_gravity.z());
+ const Eigen::Vector3d imu_position = imu_to_world * Eigen::Vector3d::Zero();
- camera_position_x_.emplace_back(camera_position.x());
- camera_position_y_.emplace_back(camera_position.y());
- camera_position_z_.emplace_back(camera_position.z());
+ imu_position_x_.emplace_back(imu_position.x());
+ imu_position_y_.emplace_back(imu_position.y());
+ imu_position_z_.emplace_back(imu_position.z());
+
+ turret_angles_from_camera_.emplace_back(turret_angle);
+ imu_to_world_save_.emplace_back(imu_to_world);
+ }
+
+ void ObserveTurretUpdate(distributed_clock::time_point t,
+ Eigen::Vector2d turret_state) override {
+ turret_times_.emplace_back(
+ chrono::duration<double>(t.time_since_epoch()).count());
+ turret_angles_.emplace_back(turret_state(0));
}
std::vector<double> camera_times_;
- std::vector<double> camera_x_;
- std::vector<double> camera_y_;
- std::vector<double> camera_z_;
- std::vector<double> raw_camera_x_;
- std::vector<double> raw_camera_y_;
- std::vector<double> raw_camera_z_;
- std::vector<double> camera_error_x_;
- std::vector<double> camera_error_y_;
- std::vector<double> camera_error_z_;
+ std::vector<double> imu_rot_x_;
+ std::vector<double> imu_rot_y_;
+ std::vector<double> imu_rot_z_;
+ std::vector<double> raw_camera_rot_x_;
+ std::vector<double> raw_camera_rot_y_;
+ std::vector<double> raw_camera_rot_z_;
+ std::vector<double> raw_camera_trans_x_;
+ std::vector<double> raw_camera_trans_y_;
+ std::vector<double> raw_camera_trans_z_;
+ std::vector<double> rotation_error_x_;
+ std::vector<double> rotation_error_y_;
+ std::vector<double> rotation_error_z_;
+ std::vector<double> translation_error_x_;
+ std::vector<double> translation_error_y_;
+ std::vector<double> translation_error_z_;
+ std::vector<Eigen::Vector3d> board_to_camera_rotations_;
+ std::vector<Eigen::Vector3d> board_to_camera_translations_;
- std::vector<double> world_gravity_x_;
- std::vector<double> world_gravity_y_;
- std::vector<double> world_gravity_z_;
- std::vector<double> imu_x_;
- std::vector<double> imu_y_;
- std::vector<double> imu_z_;
- std::vector<double> camera_position_x_;
- std::vector<double> camera_position_y_;
- std::vector<double> camera_position_z_;
+ std::vector<double> turret_angles_from_camera_;
+ std::vector<Eigen::Affine3d> imu_to_world_save_;
+
+ std::vector<double> imu_position_x_;
+ std::vector<double> imu_position_y_;
+ std::vector<double> imu_position_z_;
std::vector<double> imu_times_;
- std::vector<double> imu_ratex_;
- std::vector<double> imu_ratey_;
- std::vector<double> imu_ratez_;
+ std::vector<double> imu_rate_x_;
+ std::vector<double> imu_rate_y_;
+ std::vector<double> imu_rate_z_;
+ std::vector<double> accel_minus_gravity_x_;
+ std::vector<double> accel_minus_gravity_y_;
+ std::vector<double> accel_minus_gravity_z_;
+ std::vector<double> imu_accel_x_;
+ std::vector<double> imu_accel_y_;
+ std::vector<double> imu_accel_z_;
+
+ std::vector<double> turret_times_;
+ std::vector<double> turret_angles_;
std::vector<double> times_;
std::vector<Eigen::Matrix<double, 6, 1>> x_hats_;
@@ -549,6 +791,8 @@
const S *const pivot_to_imu_translation_ptr,
const S *const gravity_scalar_ptr,
const S *const accelerometer_bias_ptr, S *residual) const {
+ const aos::monotonic_clock::time_point start_time =
+ aos::monotonic_clock::now();
Eigen::Quaternion<S> initial_orientation(
initial_orientation_ptr[3], initial_orientation_ptr[0],
initial_orientation_ptr[1], initial_orientation_ptr[2]);
@@ -585,18 +829,32 @@
pivot_to_imu_translation, *gravity_scalar_ptr, accelerometer_bias);
data->ReviewData(&filter);
+ // Since the angular error scale is bounded by 1 (quaternion, so unit
+ // vector, scaled by sin(alpha)), I found it necessary to scale the
+ // angular error to have it properly balance with the translational error
+ double ang_error_scale = 5.0;
for (size_t i = 0; i < filter.num_errors(); ++i) {
- residual[3 * i + 0] = filter.errorx(i);
- residual[3 * i + 1] = filter.errory(i);
- residual[3 * i + 2] = filter.errorz(i);
+ residual[3 * i + 0] = ang_error_scale * filter.errorx(i);
+ residual[3 * i + 1] = ang_error_scale * filter.errory(i);
+ residual[3 * i + 2] = ang_error_scale * filter.errorz(i);
}
+ double trans_error_scale = 1.0;
for (size_t i = 0; i < filter.num_perrors(); ++i) {
- residual[3 * filter.num_errors() + 3 * i + 0] = filter.errorpx(i);
- residual[3 * filter.num_errors() + 3 * i + 1] = filter.errorpy(i);
- residual[3 * filter.num_errors() + 3 * i + 2] = filter.errorpz(i);
+ residual[3 * filter.num_errors() + 3 * i + 0] =
+ trans_error_scale * filter.errorpx(i);
+ residual[3 * filter.num_errors() + 3 * i + 1] =
+ trans_error_scale * filter.errorpy(i);
+ residual[3 * filter.num_errors() + 3 * i + 2] =
+ trans_error_scale * filter.errorpz(i);
}
+ LOG(INFO) << "Cost function calc took "
+ << chrono::duration<double>(aos::monotonic_clock::now() -
+ start_time)
+ .count()
+ << " seconds";
+
return true;
}
};
@@ -630,6 +888,8 @@
}
{
+ // The turret's Z rotation is redundant with the camera's mounting z
+ // rotation since it's along the rotation axis.
ceres::CostFunction *turret_z_cost_function =
new ceres::AutoDiffCostFunction<PenalizeQuaternionZ, 1, 4>(
new PenalizeQuaternionZ());
@@ -639,7 +899,8 @@
}
if (calibration_parameters->has_pivot) {
- // Constrain Z.
+ // Constrain Z since it's along the rotation axis and therefore
+ // redundant.
problem.SetParameterization(
calibration_parameters->pivot_to_imu_translation.data(),
new ceres::SubsetParameterization(3, {2}));
@@ -657,6 +918,9 @@
calibration_parameters->pivot_to_camera.coeffs().data(),
quaternion_local_parameterization);
problem.SetParameterization(
+ calibration_parameters->pivot_to_imu.coeffs().data(),
+ quaternion_local_parameterization);
+ problem.SetParameterization(
calibration_parameters->board_to_world.coeffs().data(),
quaternion_local_parameterization);
for (int i = 0; i < 3; ++i) {
@@ -678,40 +942,13 @@
ceres::Solver::Options options;
options.minimizer_progress_to_stdout = true;
options.gradient_tolerance = 1e-12;
- options.function_tolerance = 1e-16;
+ options.function_tolerance = 1e-6;
options.parameter_tolerance = 1e-12;
ceres::Solver::Summary summary;
Solve(options, &problem, &summary);
LOG(INFO) << summary.FullReport();
-
- LOG(INFO) << "initial_orientation "
- << calibration_parameters->initial_orientation.coeffs().transpose();
- LOG(INFO) << "pivot_to_imu "
- << calibration_parameters->pivot_to_imu.coeffs().transpose();
- LOG(INFO) << "pivot_to_imu(rotation) "
- << frc971::controls::ToRotationVectorFromQuaternion(
- calibration_parameters->pivot_to_imu)
- .transpose();
- LOG(INFO) << "pivot_to_camera "
- << calibration_parameters->pivot_to_camera.coeffs().transpose();
- LOG(INFO) << "pivot_to_camera(rotation) "
- << frc971::controls::ToRotationVectorFromQuaternion(
- calibration_parameters->pivot_to_camera)
- .transpose();
- LOG(INFO) << "gyro_bias " << calibration_parameters->gyro_bias.transpose();
- LOG(INFO) << "board_to_world "
- << calibration_parameters->board_to_world.coeffs().transpose();
- LOG(INFO) << "board_to_world(rotation) "
- << frc971::controls::ToRotationVectorFromQuaternion(
- calibration_parameters->board_to_world)
- .transpose();
- LOG(INFO) << "pivot_to_imu_translation "
- << calibration_parameters->pivot_to_imu_translation.transpose();
- LOG(INFO) << "pivot_to_camera_translation "
- << calibration_parameters->pivot_to_camera_translation.transpose();
- LOG(INFO) << "gravity " << kGravity * calibration_parameters->gravity_scalar;
- LOG(INFO) << "accelerometer bias "
- << calibration_parameters->accelerometer_bias.transpose();
+ LOG(INFO) << "Solution is " << (summary.IsSolutionUsable() ? "" : "NOT ")
+ << "usable";
}
void Plot(const CalibrationData &data,
@@ -730,5 +967,21 @@
filter.Plot();
}
+void Visualize(const CalibrationData &data,
+ const CalibrationParameters &calibration_parameters) {
+ PoseFilter filter(calibration_parameters.initial_orientation,
+ calibration_parameters.pivot_to_camera,
+ calibration_parameters.pivot_to_imu,
+ calibration_parameters.gyro_bias,
+ calibration_parameters.initial_state,
+ calibration_parameters.board_to_world,
+ calibration_parameters.pivot_to_camera_translation,
+ calibration_parameters.pivot_to_imu_translation,
+ calibration_parameters.gravity_scalar,
+ calibration_parameters.accelerometer_bias);
+ data.ReviewData(&filter);
+ filter.Visualize(calibration_parameters);
+}
+
} // namespace vision
} // namespace frc971
diff --git a/frc971/vision/extrinsics_calibration.h b/frc971/vision/extrinsics_calibration.h
index b24f13f..eb35482 100644
--- a/frc971/vision/extrinsics_calibration.h
+++ b/frc971/vision/extrinsics_calibration.h
@@ -42,6 +42,11 @@
void Plot(const CalibrationData &data,
const CalibrationParameters &calibration_parameters);
+// Shows the evolution of the calibration over time by visualizing relevant
+// coordinate frames in a virtual camera view
+void Visualize(const CalibrationData &data,
+ const CalibrationParameters &calibration_parameters);
+
} // namespace vision
} // namespace frc971
diff --git a/frc971/vision/visualize_robot.cc b/frc971/vision/visualize_robot.cc
new file mode 100644
index 0000000..0bbe507
--- /dev/null
+++ b/frc971/vision/visualize_robot.cc
@@ -0,0 +1,74 @@
+#include "frc971/vision/visualize_robot.h"
+#include "glog/logging.h"
+
+#include <opencv2/calib3d.hpp>
+#include <opencv2/core/eigen.hpp>
+#include <opencv2/highgui/highgui.hpp>
+#include <opencv2/imgproc.hpp>
+
+namespace frc971 {
+namespace vision {
+
+cv::Point VisualizeRobot::ProjectPoint(Eigen::Vector3d T_world_point) {
+ // Map 3D point in world coordinates to camera frame
+ Eigen::Vector3d T_camera_point = H_world_viewpoint_.inverse() * T_world_point;
+
+ cv::Vec3d T_camera_point_cv;
+ cv::eigen2cv(T_camera_point, T_camera_point_cv);
+
+ // Project 3d point in camera frame via camera intrinsics
+ cv::Mat proj_point = camera_mat_ * cv::Mat(T_camera_point_cv);
+ cv::Point projected_point(
+ proj_point.at<double>(0, 0) / proj_point.at<double>(0, 2),
+ proj_point.at<double>(0, 1) / proj_point.at<double>(0, 2));
+ return projected_point;
+}
+
+void VisualizeRobot::DrawLine(Eigen::Vector3d start3d, Eigen::Vector3d end3d) {
+ cv::Point start2d = ProjectPoint(start3d);
+ cv::Point end2d = ProjectPoint(end3d);
+
+ cv::line(image_, start2d, end2d, cv::Scalar(0, 0, 255));
+}
+
+void VisualizeRobot::DrawFrameAxes(Eigen::Affine3d H_world_target,
+ std::string label, double axis_scale) {
+ // Map origin to display from global (world) frame to camera frame
+ Eigen::Affine3d H_viewpoint_target =
+ H_world_viewpoint_.inverse() * H_world_target;
+
+ // Extract into OpenCV vectors
+ cv::Mat H_viewpoint_target_mat;
+ cv::eigen2cv(H_viewpoint_target.matrix(), H_viewpoint_target_mat);
+
+ // Convert to opencv R, T for using drawFrameAxes
+ cv::Vec3d rvec, tvec;
+ tvec = H_viewpoint_target_mat(cv::Rect(3, 0, 1, 3));
+ cv::Mat r_mat = H_viewpoint_target_mat(cv::Rect(0, 0, 3, 3));
+ cv::Rodrigues(r_mat, rvec);
+
+ cv::drawFrameAxes(image_, camera_mat_, dist_coeffs_, rvec, tvec, axis_scale);
+
+ if (label != "") {
+ // Grab x axis direction
+ cv::Vec3d label_offset = r_mat.col(0);
+
+ // Find 3D coordinate of point at the end of the x-axis, and project it
+ cv::Mat label_coord_res =
+ camera_mat_ * cv::Mat(tvec + label_offset * axis_scale);
+ cv::Vec3d label_coord = label_coord_res.col(0);
+ label_coord[0] = label_coord[0] / label_coord[2];
+ label_coord[1] = label_coord[1] / label_coord[2];
+ cv::putText(image_, label, cv::Point(label_coord[0], label_coord[1]),
+ cv::FONT_HERSHEY_PLAIN, 1.0, cv::Scalar(0, 0, 255));
+ }
+}
+
+void VisualizeRobot::DrawBoardOutline(Eigen::Affine3d H_world_board,
+ std::string label) {
+ LOG(INFO) << "Not yet implemented; drawing axes only";
+ DrawFrameAxes(H_world_board, label);
+}
+
+} // namespace vision
+} // namespace frc971
diff --git a/frc971/vision/visualize_robot.h b/frc971/vision/visualize_robot.h
new file mode 100644
index 0000000..391a030
--- /dev/null
+++ b/frc971/vision/visualize_robot.h
@@ -0,0 +1,65 @@
+#ifndef FRC971_VISION_VISUALIZE_ROBOT_H_
+#define FRC971_VISION_VISUALIZE_ROBOT_H_
+
+#include <opencv2/core.hpp>
+#include <opencv2/highgui.hpp>
+#include <opencv2/imgproc.hpp>
+
+#include "Eigen/Dense"
+#include "Eigen/Geometry"
+
+namespace frc971 {
+namespace vision {
+
+// Helper class to visualize the coordinate frames associated with
+// the robot Based on a virtual camera viewpoint, and camera model,
+// this class can be used to draw 3D coordinate frames in a virtual
+// camera view.
+//
+// Mostly useful just for doing all the projection calculations
+// Leverages Eigen for transforms and opencv for drawing axes
+
+class VisualizeRobot {
+ public:
+ // Set image on which to draw
+ void SetImage(cv::Mat image) { image_ = image; }
+
+ // Set the viewpoint of the camera relative to a global origin
+ void SetViewpoint(Eigen::Affine3d view_origin) {
+ H_world_viewpoint_ = view_origin;
+ }
+
+ // Set camera parameters (for projection into a virtual view)
+ void SetCameraParameters(cv::Mat camera_intrinsics) {
+ camera_mat_ = camera_intrinsics;
+ }
+
+ // Set distortion coefficients (defaults to 0)
+ void SetDistortionCoefficients(cv::Mat dist_coeffs) {
+ dist_coeffs_ = dist_coeffs;
+ }
+
+ // Helper function to project a point in 3D to the virtual image coordinates
+ cv::Point ProjectPoint(Eigen::Vector3d point3d);
+
+ // Draw a line connecting two 3D points
+ void DrawLine(Eigen::Vector3d start, Eigen::Vector3d end);
+
+ // Draw coordinate frame for a target frame relative to the world frame
+ // Axes are drawn (x,y,z) -> (red, green, blue)
+ void DrawFrameAxes(Eigen::Affine3d H_world_target, std::string label = "",
+ double axis_scale = 0.25);
+
+ // TODO<Jim>: Need to implement this, and maybe DrawRobotOutline
+ void DrawBoardOutline(Eigen::Affine3d H_world_board, std::string label = "");
+
+ Eigen::Affine3d H_world_viewpoint_; // Where to view the world from
+ cv::Mat image_; // Image to draw on
+ cv::Mat camera_mat_; // Virtual camera intrinsics (defines fov, center)
+ cv::Mat dist_coeffs_; // Distortion coefficients, if desired (only used in
+ // DrawFrameAxes
+};
+} // namespace vision
+} // namespace frc971
+
+#endif // FRC971_VISION_VISUALIZE_ROBOT_H_
diff --git a/frc971/vision/visualize_robot_sample.cc b/frc971/vision/visualize_robot_sample.cc
new file mode 100644
index 0000000..dc38352
--- /dev/null
+++ b/frc971/vision/visualize_robot_sample.cc
@@ -0,0 +1,72 @@
+#include "frc971/vision/visualize_robot.h"
+
+#include "aos/init.h"
+#include "aos/logging/logging.h"
+#include "glog/logging.h"
+
+#include "Eigen/Dense"
+
+#include <math.h>
+#include <opencv2/aruco.hpp>
+#include <opencv2/aruco/charuco.hpp>
+#include <opencv2/calib3d.hpp>
+#include <opencv2/core/eigen.hpp>
+#include <opencv2/highgui/highgui.hpp>
+#include <opencv2/imgproc.hpp>
+#include "aos/time/time.h"
+
+namespace frc971 {
+namespace vision {
+
+// Show / test the basics of visualizing the robot frames
+void Main(int /*argc*/, char ** /* argv */) {
+ VisualizeRobot vis_robot;
+
+ int image_width = 500;
+ cv::Mat image_mat =
+ cv::Mat::zeros(cv::Size(image_width, image_width), CV_8UC3);
+ vis_robot.SetImage(image_mat);
+
+ // 10 meters above the origin, rotated so the camera faces straight down
+ Eigen::Translation3d camera_trans(0, 0, 10.0);
+ Eigen::AngleAxisd camera_rot(M_PI, Eigen::Vector3d::UnitX());
+ Eigen::Affine3d camera_viewpoint = camera_trans * camera_rot;
+ vis_robot.SetViewpoint(camera_viewpoint);
+
+ cv::Mat camera_mat;
+ double focal_length = 1000.0;
+ double intr[] = {focal_length, 0.0, image_width / 2.0,
+ 0.0, focal_length, image_width / 2.0,
+ 0.0, 0.0, 1.0};
+ camera_mat = cv::Mat(3, 3, CV_64FC1, intr);
+ vis_robot.SetCameraParameters(camera_mat);
+
+ Eigen::Affine3d offset_rotate_origin(Eigen::Affine3d::Identity());
+
+ cv::Mat dist_coeffs = cv::Mat(1, 5, CV_64F, 0.0);
+ vis_robot.SetDistortionCoefficients(dist_coeffs);
+
+ // Go around the clock and plot the coordinate frame at different rotations
+ for (int i = 0; i < 12; i++) {
+ double angle = M_PI * double(i) / 6.0;
+ Eigen::Vector3d trans;
+ trans << 1.0 * cos(angle), 1.0 * sin(angle), 0.0;
+
+ offset_rotate_origin = Eigen::Translation3d(trans) *
+ Eigen::AngleAxisd(angle, Eigen::Vector3d::UnitX());
+
+ vis_robot.DrawFrameAxes(offset_rotate_origin, std::to_string(i));
+ }
+
+ // Display the result
+ cv::imshow("Display", image_mat);
+ cv::waitKey();
+}
+} // namespace vision
+} // namespace frc971
+
+int main(int argc, char **argv) {
+ aos::InitGoogle(&argc, &argv);
+
+ frc971::vision::Main(argc, argv);
+}
diff --git a/y2020/vision/extrinsics_calibration.cc b/y2020/vision/extrinsics_calibration.cc
index c561652..83d8760 100644
--- a/y2020/vision/extrinsics_calibration.cc
+++ b/y2020/vision/extrinsics_calibration.cc
@@ -40,6 +40,8 @@
CHECK(aos::configuration::MultiNode(reader.configuration()));
// Find the nodes we care about.
+ const aos::Node *const imu_node =
+ aos::configuration::GetNode(factory.configuration(), "imu");
const aos::Node *const roborio_node =
aos::configuration::GetNode(factory.configuration(), "roborio");
@@ -49,17 +51,20 @@
const aos::Node *const pi_node = aos::configuration::GetNode(
factory.configuration(), absl::StrCat("pi", *pi_number));
+ LOG(INFO) << "imu " << aos::FlatbufferToJson(imu_node);
LOG(INFO) << "roboRIO " << aos::FlatbufferToJson(roborio_node);
LOG(INFO) << "Pi " << aos::FlatbufferToJson(pi_node);
+ std::unique_ptr<aos::EventLoop> imu_event_loop =
+ factory.MakeEventLoop("calibration", imu_node);
std::unique_ptr<aos::EventLoop> roborio_event_loop =
factory.MakeEventLoop("calibration", roborio_node);
std::unique_ptr<aos::EventLoop> pi_event_loop =
factory.MakeEventLoop("calibration", pi_node);
// Now, hook Calibration up to everything.
- Calibration extractor(&factory, pi_event_loop.get(),
- roborio_event_loop.get(), FLAGS_pi, &data);
+ Calibration extractor(&factory, pi_event_loop.get(), imu_event_loop.get(),
+ FLAGS_pi, &data);
if (FLAGS_turret) {
aos::NodeEventLoopFactory *roborio_factory =
@@ -89,25 +94,42 @@
Eigen::Vector3d(0.0, 0.0, M_PI)));
const Eigen::Quaternion<double> nominal_pivot_to_camera(
Eigen::AngleAxisd(-0.5 * M_PI, Eigen::Vector3d::UnitX()));
+ const Eigen::Quaternion<double> nominal_pivot_to_imu(
+ Eigen::AngleAxisd(0.0, Eigen::Vector3d::UnitX()));
const Eigen::Quaternion<double> nominal_board_to_world(
Eigen::AngleAxisd(0.5 * M_PI, Eigen::Vector3d::UnitX()));
+ Eigen::Matrix<double, 6, 1> nominal_initial_state =
+ Eigen::Matrix<double, 6, 1>::Zero();
+ // Set y value to -1 m (approx distance from imu to board/world
+ nominal_initial_state(1, 0) = -1.0;
CalibrationParameters calibration_parameters;
calibration_parameters.initial_orientation = nominal_initial_orientation;
calibration_parameters.pivot_to_camera = nominal_pivot_to_camera;
+ calibration_parameters.pivot_to_imu = nominal_pivot_to_imu;
calibration_parameters.board_to_world = nominal_board_to_world;
+ calibration_parameters.initial_state = nominal_initial_state;
+ if (data.turret_samples_size() > 0) {
+ LOG(INFO) << "Have turret, so using pivot setup";
+ calibration_parameters.has_pivot = true;
+ }
Solve(data, &calibration_parameters);
LOG(INFO) << "Nominal initial_orientation "
<< nominal_initial_orientation.coeffs().transpose();
LOG(INFO) << "Nominal pivot_to_camera "
<< nominal_pivot_to_camera.coeffs().transpose();
-
- LOG(INFO) << "pivot_to_camera delta "
+ LOG(INFO) << "Nominal pivot_to_camera (rot-xyz) "
+ << frc971::controls::ToRotationVectorFromQuaternion(
+ nominal_pivot_to_camera)
+ .transpose();
+ LOG(INFO) << "pivot_to_camera change "
<< frc971::controls::ToRotationVectorFromQuaternion(
calibration_parameters.pivot_to_camera *
nominal_pivot_to_camera.inverse())
.transpose();
+ LOG(INFO) << "Nominal pivot_to_imu "
+ << nominal_pivot_to_imu.coeffs().transpose();
LOG(INFO) << "board_to_world delta "
<< frc971::controls::ToRotationVectorFromQuaternion(
calibration_parameters.board_to_world *
diff --git a/y2022/vision/BUILD b/y2022/vision/BUILD
index 4fda1ad..9726542 100644
--- a/y2022/vision/BUILD
+++ b/y2022/vision/BUILD
@@ -328,9 +328,9 @@
)
cc_binary(
- name = "extrinsics_calibration",
+ name = "calibrate_extrinsics",
srcs = [
- "extrinsics_calibration.cc",
+ "calibrate_extrinsics.cc",
],
target_compatible_with = ["@platforms//os:linux"],
visibility = ["//y2022:__subpackages__"],
@@ -339,6 +339,7 @@
"//aos/events/logging:log_reader",
"//frc971/control_loops:profiled_subsystem_fbs",
"//frc971/vision:extrinsics_calibration",
+ "//third_party:opencv",
"//y2022/control_loops/superstructure:superstructure_status_fbs",
],
)
diff --git a/y2022/vision/calibrate_extrinsics.cc b/y2022/vision/calibrate_extrinsics.cc
new file mode 100644
index 0000000..8ae97c2
--- /dev/null
+++ b/y2022/vision/calibrate_extrinsics.cc
@@ -0,0 +1,237 @@
+#include "Eigen/Dense"
+#include "Eigen/Geometry"
+#include "absl/strings/str_format.h"
+#include "aos/events/logging/log_reader.h"
+#include "aos/init.h"
+#include "aos/network/team_number.h"
+#include "aos/time/time.h"
+#include "aos/util/file.h"
+#include "frc971/control_loops/quaternion_utils.h"
+#include "frc971/vision/extrinsics_calibration.h"
+#include "frc971/vision/vision_generated.h"
+#include "frc971/wpilib/imu_batch_generated.h"
+#include "y2020/vision/sift/sift_generated.h"
+#include "y2020/vision/sift/sift_training_generated.h"
+#include "y2020/vision/tools/python_code/sift_training_data.h"
+#include "y2022/control_loops/superstructure/superstructure_status_generated.h"
+
+DEFINE_string(pi, "pi-7971-2", "Pi name to calibrate.");
+DEFINE_bool(plot, false, "Whether to plot the resulting data.");
+DEFINE_bool(visualize, false, "Whether to visualize the resulting data.");
+DEFINE_bool(turret, true, "If true, the camera is on the turret");
+
+namespace frc971 {
+namespace vision {
+namespace chrono = std::chrono;
+using aos::distributed_clock;
+using aos::monotonic_clock;
+
+// TODO(austin): Source of IMU data? Is it the same?
+// TODO(austin): Intrinsics data?
+
+void Main(int argc, char **argv) {
+ CalibrationData data;
+
+ {
+ // Now, accumulate all the data into the data object.
+ aos::logger::LogReader reader(
+ aos::logger::SortParts(aos::logger::FindLogs(argc, argv)));
+
+ aos::SimulatedEventLoopFactory factory(reader.configuration());
+ reader.Register(&factory);
+
+ CHECK(aos::configuration::MultiNode(reader.configuration()));
+
+ // Find the nodes we care about.
+ const aos::Node *const imu_node =
+ aos::configuration::GetNode(factory.configuration(), "imu");
+ const aos::Node *const roborio_node =
+ aos::configuration::GetNode(factory.configuration(), "roborio");
+
+ std::optional<uint16_t> pi_number = aos::network::ParsePiNumber(FLAGS_pi);
+ CHECK(pi_number);
+ LOG(INFO) << "Pi " << *pi_number;
+ const aos::Node *const pi_node = aos::configuration::GetNode(
+ factory.configuration(), absl::StrCat("pi", *pi_number));
+
+ LOG(INFO) << "imu " << aos::FlatbufferToJson(imu_node);
+ LOG(INFO) << "roboRIO " << aos::FlatbufferToJson(roborio_node);
+ LOG(INFO) << "Pi " << aos::FlatbufferToJson(pi_node);
+
+ std::unique_ptr<aos::EventLoop> imu_event_loop =
+ factory.MakeEventLoop("calibration", imu_node);
+ std::unique_ptr<aos::EventLoop> roborio_event_loop =
+ factory.MakeEventLoop("calibration", roborio_node);
+ std::unique_ptr<aos::EventLoop> pi_event_loop =
+ factory.MakeEventLoop("calibration", pi_node);
+
+ // Now, hook Calibration up to everything.
+ Calibration extractor(&factory, pi_event_loop.get(), imu_event_loop.get(),
+ FLAGS_pi, &data);
+
+ if (FLAGS_turret) {
+ aos::NodeEventLoopFactory *roborio_factory =
+ factory.GetNodeEventLoopFactory(roborio_node->name()->string_view());
+ roborio_event_loop->MakeWatcher(
+ "/superstructure",
+ [roborio_factory, roborio_event_loop = roborio_event_loop.get(),
+ &data](const y2022::control_loops::superstructure::Status &status) {
+ data.AddTurret(
+ roborio_factory->ToDistributedClock(
+ roborio_event_loop->context().monotonic_event_time),
+ Eigen::Vector2d(status.turret()->position(),
+ status.turret()->velocity()));
+ });
+ }
+
+ factory.Run();
+
+ reader.Deregister();
+ }
+
+ LOG(INFO) << "Done with event_loop running";
+ CHECK(data.imu_samples_size() > 0) << "Didn't get any IMU data";
+ CHECK(data.camera_samples_size() > 0) << "Didn't get any camera observations";
+
+ // And now we have it, we can start processing it.
+ const Eigen::Quaternion<double> nominal_initial_orientation(
+ frc971::controls::ToQuaternionFromRotationVector(
+ Eigen::Vector3d(0.0, 0.0, M_PI)));
+ const Eigen::Quaternion<double> nominal_pivot_to_camera(
+ Eigen::AngleAxisd(-0.5 * M_PI, Eigen::Vector3d::UnitX()));
+ const Eigen::Quaternion<double> nominal_pivot_to_imu(
+ Eigen::AngleAxisd(0.0, Eigen::Vector3d::UnitX()));
+ const Eigen::Quaternion<double> nominal_board_to_world(
+ Eigen::AngleAxisd(0.5 * M_PI, Eigen::Vector3d::UnitX()));
+ Eigen::Matrix<double, 6, 1> nominal_initial_state =
+ Eigen::Matrix<double, 6, 1>::Zero();
+ // Set x value to 0.5 m (center view on the board)
+ // nominal_initial_state(0, 0) = 0.5;
+ // Set y value to -1 m (approx distance from imu to board/world)
+ nominal_initial_state(1, 0) = -1.0;
+
+ CalibrationParameters calibration_parameters;
+ calibration_parameters.initial_orientation = nominal_initial_orientation;
+ calibration_parameters.pivot_to_camera = nominal_pivot_to_camera;
+ calibration_parameters.pivot_to_imu = nominal_pivot_to_imu;
+ calibration_parameters.board_to_world = nominal_board_to_world;
+ calibration_parameters.initial_state = nominal_initial_state;
+
+ // Show the inverse of pivot_to_camera, since camera_to_pivot tells where the
+ // camera is with respect to the pivot frame
+ const Eigen::Affine3d nominal_affine_pivot_to_camera =
+ Eigen::Translation3d(calibration_parameters.pivot_to_camera_translation) *
+ nominal_pivot_to_camera;
+ const Eigen::Quaterniond nominal_camera_to_pivot_rotation(
+ nominal_affine_pivot_to_camera.inverse().rotation());
+ const Eigen::Vector3d nominal_camera_to_pivot_translation(
+ nominal_affine_pivot_to_camera.inverse().translation());
+
+ if (data.turret_samples_size() > 0) {
+ LOG(INFO) << "Have turret, so using pivot setup";
+ calibration_parameters.has_pivot = true;
+ }
+
+ LOG(INFO) << "Initial Conditions for solver. Assumes:\n"
+ << "1) board origin is same as world, but rotated pi/2 about "
+ "x-axis, so z points out\n"
+ << "2) pivot origin matches imu origin\n"
+ << "3) camera is offset from pivot (depends on which camera)";
+
+ LOG(INFO)
+ << "Nominal initial_orientation of imu w.r.t. world (angle-axis vector): "
+ << frc971::controls::ToRotationVectorFromQuaternion(
+ nominal_initial_orientation)
+ .transpose();
+ LOG(INFO) << "Nominal initial_state: \n"
+ << "Position: "
+ << nominal_initial_state.block<3, 1>(0, 0).transpose() << "\n"
+ << "Velocity: "
+ << nominal_initial_state.block<3, 1>(3, 0).transpose();
+ LOG(INFO) << "Nominal pivot_to_imu (angle-axis vector) "
+ << frc971::controls::ToRotationVectorFromQuaternion(
+ calibration_parameters.pivot_to_imu)
+ .transpose();
+ LOG(INFO) << "Nominal pivot_to_imu translation: "
+ << calibration_parameters.pivot_to_imu_translation.transpose();
+ // TODO<Jim>: Might be nice to take out the rotation component that maps into
+ // camera image coordinates (with x right, y down, z forward)
+ LOG(INFO) << "Nominal camera_to_pivot (angle-axis vector): "
+ << frc971::controls::ToRotationVectorFromQuaternion(
+ nominal_camera_to_pivot_rotation)
+ .transpose();
+ LOG(INFO) << "Nominal camera_to_pivot translation: "
+ << nominal_camera_to_pivot_translation.transpose();
+
+ Solve(data, &calibration_parameters);
+
+ LOG(INFO) << "RESULTS OF CALIBRATION SOLVER:";
+ LOG(INFO) << "initial_orientation of imu w.r.t. world (angle-axis vector): "
+ << frc971::controls::ToRotationVectorFromQuaternion(
+ calibration_parameters.initial_orientation)
+ .transpose();
+ LOG(INFO)
+ << "initial_state: \n"
+ << "Position: "
+ << calibration_parameters.initial_state.block<3, 1>(0, 0).transpose()
+ << "\n"
+ << "Velocity: "
+ << calibration_parameters.initial_state.block<3, 1>(3, 0).transpose();
+
+ LOG(INFO) << "pivot_to_imu rotation (angle-axis vec) "
+ << frc971::controls::ToRotationVectorFromQuaternion(
+ calibration_parameters.pivot_to_imu)
+ .transpose();
+ LOG(INFO) << "pivot_to_imu_translation "
+ << calibration_parameters.pivot_to_imu_translation.transpose();
+ const Eigen::Affine3d affine_pivot_to_camera =
+ Eigen::Translation3d(calibration_parameters.pivot_to_camera_translation) *
+ calibration_parameters.pivot_to_camera;
+ const Eigen::Quaterniond camera_to_pivot_rotation(
+ affine_pivot_to_camera.inverse().rotation());
+ const Eigen::Vector3d camera_to_pivot_translation(
+ affine_pivot_to_camera.inverse().translation());
+ LOG(INFO) << "camera to pivot (angle-axis vec): "
+ << frc971::controls::ToRotationVectorFromQuaternion(
+ camera_to_pivot_rotation)
+ .transpose();
+ LOG(INFO) << "camera to pivot translation: "
+ << camera_to_pivot_translation.transpose();
+ LOG(INFO) << "board_to_world (rotation) "
+ << frc971::controls::ToRotationVectorFromQuaternion(
+ calibration_parameters.board_to_world)
+ .transpose();
+ LOG(INFO) << "accelerometer bias "
+ << calibration_parameters.accelerometer_bias.transpose();
+ LOG(INFO) << "gyro_bias " << calibration_parameters.gyro_bias.transpose();
+ LOG(INFO) << "gravity " << 9.81 * calibration_parameters.gravity_scalar;
+
+ LOG(INFO) << "pivot_to_camera change "
+ << frc971::controls::ToRotationVectorFromQuaternion(
+ calibration_parameters.pivot_to_camera *
+ nominal_pivot_to_camera.inverse())
+ .transpose();
+ LOG(INFO) << "board_to_world delta "
+ << frc971::controls::ToRotationVectorFromQuaternion(
+ calibration_parameters.board_to_world *
+ nominal_board_to_world.inverse())
+ .transpose();
+
+ if (FLAGS_visualize) {
+ LOG(INFO) << "Showing visualization";
+ Visualize(data, calibration_parameters);
+ }
+
+ if (FLAGS_plot) {
+ Plot(data, calibration_parameters);
+ }
+}
+
+} // namespace vision
+} // namespace frc971
+
+int main(int argc, char **argv) {
+ aos::InitGoogle(&argc, &argv);
+
+ frc971::vision::Main(argc, argv);
+}
diff --git a/y2022/vision/extrinsics_calibration.cc b/y2022/vision/extrinsics_calibration.cc
deleted file mode 100644
index 49f2ca3..0000000
--- a/y2022/vision/extrinsics_calibration.cc
+++ /dev/null
@@ -1,129 +0,0 @@
-#include "frc971/vision/extrinsics_calibration.h"
-
-#include "Eigen/Dense"
-#include "Eigen/Geometry"
-#include "absl/strings/str_format.h"
-#include "aos/events/logging/log_reader.h"
-#include "aos/init.h"
-#include "aos/network/team_number.h"
-#include "aos/time/time.h"
-#include "aos/util/file.h"
-#include "frc971/control_loops/quaternion_utils.h"
-#include "frc971/vision/vision_generated.h"
-#include "frc971/wpilib/imu_batch_generated.h"
-#include "y2022/control_loops/superstructure/superstructure_status_generated.h"
-#include "y2020/vision/sift/sift_generated.h"
-#include "y2020/vision/sift/sift_training_generated.h"
-#include "y2020/vision/tools/python_code/sift_training_data.h"
-
-DEFINE_string(pi, "pi-7971-2", "Pi name to calibrate.");
-DEFINE_bool(plot, false, "Whether to plot the resulting data.");
-
-namespace frc971 {
-namespace vision {
-namespace chrono = std::chrono;
-using aos::distributed_clock;
-using aos::monotonic_clock;
-
-// TODO(austin): Source of IMU data? Is it the same?
-// TODO(austin): Intrinsics data?
-
-void Main(int argc, char **argv) {
- CalibrationData data;
-
- {
- // Now, accumulate all the data into the data object.
- aos::logger::LogReader reader(
- aos::logger::SortParts(aos::logger::FindLogs(argc, argv)));
-
- aos::SimulatedEventLoopFactory factory(reader.configuration());
- reader.Register(&factory);
-
- CHECK(aos::configuration::MultiNode(reader.configuration()));
-
- // Find the nodes we care about.
- const aos::Node *const roborio_node =
- aos::configuration::GetNode(factory.configuration(), "roborio");
-
- std::optional<uint16_t> pi_number = aos::network::ParsePiNumber(FLAGS_pi);
- CHECK(pi_number);
- LOG(INFO) << "Pi " << *pi_number;
- const aos::Node *const pi_node = aos::configuration::GetNode(
- factory.configuration(), absl::StrCat("pi", *pi_number));
-
- LOG(INFO) << "roboRIO " << aos::FlatbufferToJson(roborio_node);
- LOG(INFO) << "Pi " << aos::FlatbufferToJson(pi_node);
-
- std::unique_ptr<aos::EventLoop> roborio_event_loop =
- factory.MakeEventLoop("calibration", roborio_node);
- std::unique_ptr<aos::EventLoop> pi_event_loop =
- factory.MakeEventLoop("calibration", pi_node);
-
- // Now, hook Calibration up to everything.
- Calibration extractor(&factory, pi_event_loop.get(),
- roborio_event_loop.get(), FLAGS_pi, &data);
-
- aos::NodeEventLoopFactory *roborio_factory =
- factory.GetNodeEventLoopFactory(roborio_node->name()->string_view());
- roborio_event_loop->MakeWatcher(
- "/superstructure",
- [roborio_factory, roborio_event_loop = roborio_event_loop.get(),
- &data](const y2022::control_loops::superstructure::Status &status) {
- data.AddTurret(
- roborio_factory->ToDistributedClock(
- roborio_event_loop->context().monotonic_event_time),
- Eigen::Vector2d(status.turret()->position(),
- status.turret()->velocity()));
- });
-
- factory.Run();
-
- reader.Deregister();
- }
-
- LOG(INFO) << "Done with event_loop running";
- // And now we have it, we can start processing it.
-
- const Eigen::Quaternion<double> nominal_initial_orientation(
- frc971::controls::ToQuaternionFromRotationVector(
- Eigen::Vector3d(0.0, 0.0, M_PI)));
- const Eigen::Quaternion<double> nominal_pivot_to_camera(
- Eigen::AngleAxisd(-0.5 * M_PI, Eigen::Vector3d::UnitX()));
- const Eigen::Quaternion<double> nominal_board_to_world(
- Eigen::AngleAxisd(0.5 * M_PI, Eigen::Vector3d::UnitX()));
-
- CalibrationParameters calibration_parameters;
- calibration_parameters.initial_orientation = nominal_initial_orientation;
- calibration_parameters.pivot_to_camera = nominal_pivot_to_camera;
- calibration_parameters.board_to_world = nominal_board_to_world;
-
- Solve(data, &calibration_parameters);
- LOG(INFO) << "Nominal initial_orientation "
- << nominal_initial_orientation.coeffs().transpose();
- LOG(INFO) << "Nominal pivot_to_camera "
- << nominal_pivot_to_camera.coeffs().transpose();
-
- LOG(INFO) << "pivot_to_camera delta "
- << frc971::controls::ToRotationVectorFromQuaternion(
- calibration_parameters.pivot_to_camera *
- nominal_pivot_to_camera.inverse())
- .transpose();
- LOG(INFO) << "board_to_world delta "
- << frc971::controls::ToRotationVectorFromQuaternion(
- calibration_parameters.board_to_world *
- nominal_board_to_world.inverse())
- .transpose();
-
- if (FLAGS_plot) {
- Plot(data, calibration_parameters);
- }
-}
-
-} // namespace vision
-} // namespace frc971
-
-int main(int argc, char **argv) {
- aos::InitGoogle(&argc, &argv);
-
- frc971::vision::Main(argc, argv);
-}
diff --git a/y2022/y2022_logger.json b/y2022/y2022_logger.json
index 4024bf6..0f790c7 100644
--- a/y2022/y2022_logger.json
+++ b/y2022/y2022_logger.json
@@ -462,6 +462,38 @@
]
},
{
+ "name": "/pi3/camera",
+ "type": "frc971.vision.CameraImage",
+ "source_node": "pi3",
+ "logger": "LOCAL_AND_REMOTE_LOGGER",
+ "logger_nodes": [
+ "logger"
+ ],
+ "destination_nodes": [
+ {
+ "name": "logger",
+ "priority": 3,
+ "time_to_live": 500000000
+ }
+ ]
+ },
+ {
+ "name": "/localizer",
+ "type": "frc971.IMUValuesBatch",
+ "source_node": "imu",
+ "logger": "LOCAL_AND_REMOTE_LOGGER",
+ "logger_nodes": [
+ "logger"
+ ],
+ "destination_nodes": [
+ {
+ "name": "logger",
+ "priority": 3,
+ "time_to_live": 500000000
+ }
+ ]
+ },
+ {
"name": "/pi4/camera/decimated",
"type": "frc971.vision.CameraImage",
"source_node": "pi4",