blob: 9b6c428bf7c607809e54f18140806355d97f9db6 [file] [log] [blame]
Jim Ostrowskiba2edd12022-12-03 15:44:37 -08001#include "Eigen/Dense"
2#include "Eigen/Geometry"
3#include "absl/strings/str_format.h"
4#include "aos/events/logging/log_reader.h"
James Kuszmaul969e4ab2023-01-28 16:09:19 -08005#include "aos/events/logging/log_writer.h"
Jim Ostrowskiba2edd12022-12-03 15:44:37 -08006#include "aos/init.h"
7#include "aos/network/team_number.h"
8#include "aos/time/time.h"
9#include "aos/util/file.h"
10#include "frc971/control_loops/quaternion_utils.h"
11#include "frc971/vision/extrinsics_calibration.h"
12#include "frc971/vision/vision_generated.h"
13#include "frc971/wpilib/imu_batch_generated.h"
14#include "y2020/vision/sift/sift_generated.h"
15#include "y2020/vision/sift/sift_training_generated.h"
16#include "y2020/vision/tools/python_code/sift_training_data.h"
17#include "y2022/control_loops/superstructure/superstructure_status_generated.h"
18
19DEFINE_string(pi, "pi-7971-2", "Pi name to calibrate.");
20DEFINE_bool(plot, false, "Whether to plot the resulting data.");
Jim Ostrowskiba2edd12022-12-03 15:44:37 -080021DEFINE_bool(turret, true, "If true, the camera is on the turret");
Milind Upadhyayc6e42ee2022-12-27 00:02:11 -080022DEFINE_string(target_type, "charuco",
23 "Type of target: april_tag|aruco|charuco|charuco_diamond");
24DEFINE_string(image_channel, "/camera", "Channel to listen for images on");
James Kuszmaul969e4ab2023-01-28 16:09:19 -080025DEFINE_string(output_logs, "/tmp/calibration/",
26 "Output folder for visualization logs.");
Jim Ostrowskiba2edd12022-12-03 15:44:37 -080027
28namespace frc971 {
29namespace vision {
30namespace chrono = std::chrono;
31using aos::distributed_clock;
32using aos::monotonic_clock;
33
34// TODO(austin): Source of IMU data? Is it the same?
35// TODO(austin): Intrinsics data?
36
37void Main(int argc, char **argv) {
38 CalibrationData data;
James Kuszmaul969e4ab2023-01-28 16:09:19 -080039 std::optional<uint16_t> pi_number = aos::network::ParsePiNumber(FLAGS_pi);
40 CHECK(pi_number);
41 const std::string pi_name = absl::StrCat("pi", *pi_number);
42 LOG(INFO) << "Pi " << *pi_number;
43 aos::FlatbufferDetachedBuffer<aos::Configuration> config = [argc, argv,
44 pi_name]() {
45 aos::logger::LogReader reader(
46 aos::logger::SortParts(aos::logger::FindLogs(argc, argv)));
47 return CalibrationFoxgloveVisualizer::AddVisualizationChannels(
48 reader.logged_configuration(),
49 aos::configuration::GetNode(reader.logged_configuration(), pi_name));
50 }();
Jim Ostrowskiba2edd12022-12-03 15:44:37 -080051
52 {
53 // Now, accumulate all the data into the data object.
54 aos::logger::LogReader reader(
James Kuszmaul969e4ab2023-01-28 16:09:19 -080055 aos::logger::SortParts(aos::logger::FindLogs(argc, argv)),
56 &config.message());
Jim Ostrowskiba2edd12022-12-03 15:44:37 -080057
58 aos::SimulatedEventLoopFactory factory(reader.configuration());
59 reader.Register(&factory);
60
61 CHECK(aos::configuration::MultiNode(reader.configuration()));
62
63 // Find the nodes we care about.
64 const aos::Node *const imu_node =
65 aos::configuration::GetNode(factory.configuration(), "imu");
66 const aos::Node *const roborio_node =
67 aos::configuration::GetNode(factory.configuration(), "roborio");
68
James Kuszmaul969e4ab2023-01-28 16:09:19 -080069 const aos::Node *const pi_node =
70 aos::configuration::GetNode(factory.configuration(), pi_name);
Jim Ostrowskiba2edd12022-12-03 15:44:37 -080071
72 LOG(INFO) << "imu " << aos::FlatbufferToJson(imu_node);
73 LOG(INFO) << "roboRIO " << aos::FlatbufferToJson(roborio_node);
74 LOG(INFO) << "Pi " << aos::FlatbufferToJson(pi_node);
75
76 std::unique_ptr<aos::EventLoop> imu_event_loop =
77 factory.MakeEventLoop("calibration", imu_node);
78 std::unique_ptr<aos::EventLoop> roborio_event_loop =
79 factory.MakeEventLoop("calibration", roborio_node);
80 std::unique_ptr<aos::EventLoop> pi_event_loop =
81 factory.MakeEventLoop("calibration", pi_node);
82
James Kuszmaul969e4ab2023-01-28 16:09:19 -080083 std::unique_ptr<aos::EventLoop> logger_loop =
84 factory.MakeEventLoop("logger", pi_node);
85 aos::logger::Logger logger(logger_loop.get());
86 logger.StartLoggingOnRun(FLAGS_output_logs);
87
Milind Upadhyayc6e42ee2022-12-27 00:02:11 -080088 TargetType target_type = TargetType::kCharuco;
89 if (FLAGS_target_type == "april_tag") {
90 target_type = TargetType::kAprilTag;
91 } else if (FLAGS_target_type == "aruco") {
92 target_type = TargetType::kAruco;
93 } else if (FLAGS_target_type == "charuco") {
94 target_type = TargetType::kCharuco;
95 } else if (FLAGS_target_type == "charuco_diamond") {
96 target_type = TargetType::kCharucoDiamond;
97 } else {
98 LOG(FATAL) << "Unknown target type: " << FLAGS_target_type
99 << ", expected: april_tag|aruco|charuco|charuco_diamond";
100 }
101
Jim Ostrowskiba2edd12022-12-03 15:44:37 -0800102 // Now, hook Calibration up to everything.
103 Calibration extractor(&factory, pi_event_loop.get(), imu_event_loop.get(),
Milind Upadhyayc6e42ee2022-12-27 00:02:11 -0800104 FLAGS_pi, target_type, FLAGS_image_channel, &data);
Jim Ostrowskiba2edd12022-12-03 15:44:37 -0800105
106 if (FLAGS_turret) {
107 aos::NodeEventLoopFactory *roborio_factory =
108 factory.GetNodeEventLoopFactory(roborio_node->name()->string_view());
109 roborio_event_loop->MakeWatcher(
110 "/superstructure",
111 [roborio_factory, roborio_event_loop = roborio_event_loop.get(),
112 &data](const y2022::control_loops::superstructure::Status &status) {
113 data.AddTurret(
114 roborio_factory->ToDistributedClock(
115 roborio_event_loop->context().monotonic_event_time),
116 Eigen::Vector2d(status.turret()->position(),
117 status.turret()->velocity()));
118 });
119 }
120
121 factory.Run();
122
123 reader.Deregister();
124 }
125
126 LOG(INFO) << "Done with event_loop running";
127 CHECK(data.imu_samples_size() > 0) << "Didn't get any IMU data";
128 CHECK(data.camera_samples_size() > 0) << "Didn't get any camera observations";
129
130 // And now we have it, we can start processing it.
131 const Eigen::Quaternion<double> nominal_initial_orientation(
132 frc971::controls::ToQuaternionFromRotationVector(
133 Eigen::Vector3d(0.0, 0.0, M_PI)));
134 const Eigen::Quaternion<double> nominal_pivot_to_camera(
135 Eigen::AngleAxisd(-0.5 * M_PI, Eigen::Vector3d::UnitX()));
136 const Eigen::Quaternion<double> nominal_pivot_to_imu(
137 Eigen::AngleAxisd(0.0, Eigen::Vector3d::UnitX()));
138 const Eigen::Quaternion<double> nominal_board_to_world(
139 Eigen::AngleAxisd(0.5 * M_PI, Eigen::Vector3d::UnitX()));
140 Eigen::Matrix<double, 6, 1> nominal_initial_state =
141 Eigen::Matrix<double, 6, 1>::Zero();
142 // Set x value to 0.5 m (center view on the board)
143 // nominal_initial_state(0, 0) = 0.5;
144 // Set y value to -1 m (approx distance from imu to board/world)
145 nominal_initial_state(1, 0) = -1.0;
146
147 CalibrationParameters calibration_parameters;
148 calibration_parameters.initial_orientation = nominal_initial_orientation;
149 calibration_parameters.pivot_to_camera = nominal_pivot_to_camera;
150 calibration_parameters.pivot_to_imu = nominal_pivot_to_imu;
151 calibration_parameters.board_to_world = nominal_board_to_world;
152 calibration_parameters.initial_state = nominal_initial_state;
153
154 // Show the inverse of pivot_to_camera, since camera_to_pivot tells where the
155 // camera is with respect to the pivot frame
156 const Eigen::Affine3d nominal_affine_pivot_to_camera =
157 Eigen::Translation3d(calibration_parameters.pivot_to_camera_translation) *
158 nominal_pivot_to_camera;
159 const Eigen::Quaterniond nominal_camera_to_pivot_rotation(
160 nominal_affine_pivot_to_camera.inverse().rotation());
161 const Eigen::Vector3d nominal_camera_to_pivot_translation(
162 nominal_affine_pivot_to_camera.inverse().translation());
163
164 if (data.turret_samples_size() > 0) {
165 LOG(INFO) << "Have turret, so using pivot setup";
166 calibration_parameters.has_pivot = true;
167 }
168
169 LOG(INFO) << "Initial Conditions for solver. Assumes:\n"
170 << "1) board origin is same as world, but rotated pi/2 about "
171 "x-axis, so z points out\n"
172 << "2) pivot origin matches imu origin\n"
173 << "3) camera is offset from pivot (depends on which camera)";
174
175 LOG(INFO)
176 << "Nominal initial_orientation of imu w.r.t. world (angle-axis vector): "
177 << frc971::controls::ToRotationVectorFromQuaternion(
178 nominal_initial_orientation)
179 .transpose();
180 LOG(INFO) << "Nominal initial_state: \n"
181 << "Position: "
182 << nominal_initial_state.block<3, 1>(0, 0).transpose() << "\n"
183 << "Velocity: "
184 << nominal_initial_state.block<3, 1>(3, 0).transpose();
185 LOG(INFO) << "Nominal pivot_to_imu (angle-axis vector) "
186 << frc971::controls::ToRotationVectorFromQuaternion(
187 calibration_parameters.pivot_to_imu)
188 .transpose();
189 LOG(INFO) << "Nominal pivot_to_imu translation: "
190 << calibration_parameters.pivot_to_imu_translation.transpose();
191 // TODO<Jim>: Might be nice to take out the rotation component that maps into
192 // camera image coordinates (with x right, y down, z forward)
193 LOG(INFO) << "Nominal camera_to_pivot (angle-axis vector): "
194 << frc971::controls::ToRotationVectorFromQuaternion(
195 nominal_camera_to_pivot_rotation)
196 .transpose();
197 LOG(INFO) << "Nominal camera_to_pivot translation: "
198 << nominal_camera_to_pivot_translation.transpose();
199
200 Solve(data, &calibration_parameters);
201
202 LOG(INFO) << "RESULTS OF CALIBRATION SOLVER:";
203 LOG(INFO) << "initial_orientation of imu w.r.t. world (angle-axis vector): "
204 << frc971::controls::ToRotationVectorFromQuaternion(
205 calibration_parameters.initial_orientation)
206 .transpose();
207 LOG(INFO)
208 << "initial_state: \n"
209 << "Position: "
210 << calibration_parameters.initial_state.block<3, 1>(0, 0).transpose()
211 << "\n"
212 << "Velocity: "
213 << calibration_parameters.initial_state.block<3, 1>(3, 0).transpose();
214
215 LOG(INFO) << "pivot_to_imu rotation (angle-axis vec) "
216 << frc971::controls::ToRotationVectorFromQuaternion(
217 calibration_parameters.pivot_to_imu)
218 .transpose();
219 LOG(INFO) << "pivot_to_imu_translation "
220 << calibration_parameters.pivot_to_imu_translation.transpose();
221 const Eigen::Affine3d affine_pivot_to_camera =
222 Eigen::Translation3d(calibration_parameters.pivot_to_camera_translation) *
223 calibration_parameters.pivot_to_camera;
224 const Eigen::Quaterniond camera_to_pivot_rotation(
225 affine_pivot_to_camera.inverse().rotation());
226 const Eigen::Vector3d camera_to_pivot_translation(
227 affine_pivot_to_camera.inverse().translation());
228 LOG(INFO) << "camera to pivot (angle-axis vec): "
229 << frc971::controls::ToRotationVectorFromQuaternion(
230 camera_to_pivot_rotation)
231 .transpose();
232 LOG(INFO) << "camera to pivot translation: "
233 << camera_to_pivot_translation.transpose();
234 LOG(INFO) << "board_to_world (rotation) "
235 << frc971::controls::ToRotationVectorFromQuaternion(
236 calibration_parameters.board_to_world)
237 .transpose();
238 LOG(INFO) << "accelerometer bias "
239 << calibration_parameters.accelerometer_bias.transpose();
240 LOG(INFO) << "gyro_bias " << calibration_parameters.gyro_bias.transpose();
241 LOG(INFO) << "gravity " << 9.81 * calibration_parameters.gravity_scalar;
242
243 LOG(INFO) << "pivot_to_camera change "
244 << frc971::controls::ToRotationVectorFromQuaternion(
245 calibration_parameters.pivot_to_camera *
246 nominal_pivot_to_camera.inverse())
247 .transpose();
248 LOG(INFO) << "board_to_world delta "
249 << frc971::controls::ToRotationVectorFromQuaternion(
250 calibration_parameters.board_to_world *
251 nominal_board_to_world.inverse())
252 .transpose();
253
254 if (FLAGS_visualize) {
255 LOG(INFO) << "Showing visualization";
256 Visualize(data, calibration_parameters);
257 }
258
259 if (FLAGS_plot) {
260 Plot(data, calibration_parameters);
261 }
Milind Upadhyayc6e42ee2022-12-27 00:02:11 -0800262} // namespace vision
Jim Ostrowskiba2edd12022-12-03 15:44:37 -0800263
264} // namespace vision
265} // namespace frc971
266
267int main(int argc, char **argv) {
268 aos::InitGoogle(&argc, &argv);
269
270 frc971::vision::Main(argc, argv);
271}