James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 1 | #include "y2023/localizer/localizer.h" |
| 2 | |
Austin Schuh | 99f7c6a | 2024-06-25 22:07:44 -0700 | [diff] [blame^] | 3 | #include "absl/flags/flag.h" |
Philipp Schrader | 790cb54 | 2023-07-05 21:06:52 -0700 | [diff] [blame] | 4 | |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 5 | #include "aos/containers/sized_array.h" |
| 6 | #include "frc971/control_loops/drivetrain/localizer_generated.h" |
| 7 | #include "frc971/control_loops/pose.h" |
James Kuszmaul | 9c3db18 | 2024-02-09 22:02:18 -0800 | [diff] [blame] | 8 | #include "frc971/vision/target_map_utils.h" |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 9 | #include "y2023/constants.h" |
| 10 | |
Austin Schuh | 99f7c6a | 2024-06-25 22:07:44 -0700 | [diff] [blame^] | 11 | ABSL_FLAG(double, max_pose_error, 1e-6, |
| 12 | "Throw out target poses with a higher pose error than this"); |
| 13 | ABSL_FLAG(double, max_pose_error_ratio, 0.4, |
| 14 | "Throw out target poses with a higher pose error ratio than this"); |
| 15 | ABSL_FLAG(double, distortion_noise_scalar, 1.0, |
| 16 | "Scale the target pose distortion factor by this when computing " |
| 17 | "the noise."); |
| 18 | ABSL_FLAG( |
| 19 | double, max_implied_yaw_error, 3.0, |
milind-u | 4860119 | 2023-03-11 19:44:46 -0800 | [diff] [blame] | 20 | "Reject target poses that imply a robot yaw of more than this many degrees " |
| 21 | "off from our estimate."); |
Austin Schuh | 99f7c6a | 2024-06-25 22:07:44 -0700 | [diff] [blame^] | 22 | ABSL_FLAG( |
| 23 | double, max_implied_teleop_yaw_error, 30.0, |
James Kuszmaul | 3c6a968 | 2023-03-23 20:36:53 -0700 | [diff] [blame] | 24 | "Reject target poses that imply a robot yaw of more than this many degrees " |
| 25 | "off from our estimate."); |
Austin Schuh | 99f7c6a | 2024-06-25 22:07:44 -0700 | [diff] [blame^] | 26 | ABSL_FLAG(double, max_distance_to_target, 5.0, |
| 27 | "Reject target poses that have a 3d distance of more than this " |
| 28 | "many meters."); |
| 29 | ABSL_FLAG(double, max_auto_image_robot_speed, 2.0, |
| 30 | "Reject target poses when the robot is travelling faster than " |
| 31 | "this speed in auto."); |
milind-u | 7a7f666 | 2023-02-26 16:41:29 -0800 | [diff] [blame] | 32 | |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 33 | namespace y2023::localizer { |
| 34 | namespace { |
| 35 | constexpr std::array<std::string_view, Localizer::kNumCameras> kPisToUse{ |
| 36 | "pi1", "pi2", "pi3", "pi4"}; |
| 37 | |
| 38 | size_t CameraIndexForName(std::string_view name) { |
| 39 | for (size_t index = 0; index < kPisToUse.size(); ++index) { |
| 40 | if (name == kPisToUse.at(index)) { |
| 41 | return index; |
| 42 | } |
| 43 | } |
| 44 | LOG(FATAL) << "No camera named " << name; |
| 45 | } |
| 46 | |
| 47 | std::map<uint64_t, Localizer::Transform> GetTargetLocations( |
| 48 | const Constants &constants) { |
| 49 | CHECK(constants.has_target_map()); |
| 50 | CHECK(constants.target_map()->has_target_poses()); |
| 51 | std::map<uint64_t, Localizer::Transform> transforms; |
| 52 | for (const frc971::vision::TargetPoseFbs *target : |
| 53 | *constants.target_map()->target_poses()) { |
| 54 | CHECK(target->has_id()); |
| 55 | CHECK(target->has_position()); |
| 56 | CHECK(target->has_orientation()); |
| 57 | CHECK_EQ(0u, transforms.count(target->id())); |
| 58 | transforms[target->id()] = PoseToTransform(target); |
| 59 | } |
| 60 | return transforms; |
| 61 | } |
| 62 | } // namespace |
| 63 | |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 64 | std::array<Localizer::CameraState, Localizer::kNumCameras> |
| 65 | Localizer::MakeCameras(const Constants &constants, aos::EventLoop *event_loop) { |
| 66 | CHECK(constants.has_cameras()); |
| 67 | std::array<Localizer::CameraState, Localizer::kNumCameras> cameras; |
| 68 | for (const CameraConfiguration *camera : *constants.cameras()) { |
| 69 | CHECK(camera->has_calibration()); |
| 70 | const frc971::vision::calibration::CameraCalibration *calibration = |
| 71 | camera->calibration(); |
| 72 | CHECK(!calibration->has_turret_extrinsics()) |
| 73 | << "The 2023 robot does not have a turret."; |
| 74 | CHECK(calibration->has_node_name()); |
| 75 | const size_t index = |
| 76 | CameraIndexForName(calibration->node_name()->string_view()); |
| 77 | // We default-construct the extrinsics matrix to all-zeros; use that to |
| 78 | // sanity-check whether we have populated the matrix yet or not. |
| 79 | CHECK(cameras.at(index).extrinsics.norm() == 0) |
| 80 | << "Got multiple calibrations for " |
| 81 | << calibration->node_name()->string_view(); |
| 82 | CHECK(calibration->has_fixed_extrinsics()); |
| 83 | cameras.at(index).extrinsics = |
| 84 | frc971::control_loops::drivetrain::FlatbufferToTransformationMatrix( |
| 85 | *calibration->fixed_extrinsics()); |
| 86 | cameras.at(index).debug_sender = event_loop->MakeSender<Visualization>( |
| 87 | absl::StrCat("/", calibration->node_name()->string_view(), "/camera")); |
| 88 | } |
| 89 | for (const CameraState &camera : cameras) { |
| 90 | CHECK(camera.extrinsics.norm() != 0) << "Missing a camera calibration."; |
| 91 | } |
| 92 | return cameras; |
| 93 | } |
| 94 | |
| 95 | Localizer::Localizer( |
| 96 | aos::EventLoop *event_loop, |
| 97 | const frc971::control_loops::drivetrain::DrivetrainConfig<double> dt_config) |
| 98 | : event_loop_(event_loop), |
| 99 | dt_config_(dt_config), |
| 100 | constants_fetcher_(event_loop), |
| 101 | cameras_(MakeCameras(constants_fetcher_.constants(), event_loop)), |
| 102 | target_poses_(GetTargetLocations(constants_fetcher_.constants())), |
| 103 | down_estimator_(dt_config), |
| 104 | ekf_(dt_config), |
| 105 | observations_(&ekf_), |
| 106 | imu_watcher_(event_loop, dt_config, |
| 107 | y2023::constants::Values::DrivetrainEncoderToMeters(1), |
| 108 | std::bind(&Localizer::HandleImu, this, std::placeholders::_1, |
| 109 | std::placeholders::_2, std::placeholders::_3, |
James Kuszmaul | 54fe9d0 | 2023-03-23 20:32:40 -0700 | [diff] [blame] | 110 | std::placeholders::_4, std::placeholders::_5), |
| 111 | frc971::controls::ImuWatcher::TimestampSource::kPi), |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 112 | utils_(event_loop), |
| 113 | status_sender_(event_loop->MakeSender<Status>("/localizer")), |
| 114 | output_sender_(event_loop->MakeSender<frc971::controls::LocalizerOutput>( |
Maxwell Henderson | 85d8187 | 2023-04-07 18:21:59 -0700 | [diff] [blame] | 115 | "/localizer")), |
| 116 | server_statistics_fetcher_( |
| 117 | event_loop_->MakeFetcher<aos::message_bridge::ServerStatistics>( |
| 118 | "/aos")), |
| 119 | client_statistics_fetcher_( |
| 120 | event_loop_->MakeFetcher<aos::message_bridge::ClientStatistics>( |
| 121 | "/aos")) { |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 122 | if (dt_config_.is_simulated) { |
| 123 | down_estimator_.assume_perfect_gravity(); |
| 124 | } |
| 125 | |
| 126 | for (size_t camera_index = 0; camera_index < kNumCameras; ++camera_index) { |
| 127 | const std::string_view pi_name = kPisToUse.at(camera_index); |
| 128 | event_loop->MakeWatcher( |
| 129 | absl::StrCat("/", pi_name, "/camera"), |
| 130 | [this, pi_name, |
| 131 | camera_index](const frc971::vision::TargetMap &targets) { |
| 132 | CHECK(targets.has_target_poses()); |
| 133 | CHECK(targets.has_monotonic_timestamp_ns()); |
| 134 | const std::optional<aos::monotonic_clock::duration> clock_offset = |
| 135 | utils_.ClockOffset(pi_name); |
| 136 | if (!clock_offset.has_value()) { |
| 137 | VLOG(1) << "Rejecting image due to disconnected message bridge at " |
| 138 | << event_loop_->monotonic_now(); |
| 139 | cameras_.at(camera_index) |
| 140 | .rejection_counter.IncrementError( |
| 141 | RejectionReason::MESSAGE_BRIDGE_DISCONNECTED); |
| 142 | return; |
| 143 | } |
| 144 | const aos::monotonic_clock::time_point pi_capture_time( |
| 145 | std::chrono::nanoseconds(targets.monotonic_timestamp_ns()) - |
| 146 | clock_offset.value()); |
| 147 | const aos::monotonic_clock::time_point capture_time = |
| 148 | pi_capture_time - imu_watcher_.pico_offset_error(); |
| 149 | VLOG(2) << "Capture time of " |
| 150 | << targets.monotonic_timestamp_ns() * 1e-9 |
| 151 | << " clock offset of " |
| 152 | << aos::time::DurationInSeconds(clock_offset.value()) |
| 153 | << " pico error " |
| 154 | << aos::time::DurationInSeconds( |
| 155 | imu_watcher_.pico_offset_error()); |
| 156 | if (pi_capture_time > event_loop_->context().monotonic_event_time) { |
| 157 | VLOG(1) << "Rejecting image due to being from future at " |
| 158 | << event_loop_->monotonic_now() << " with timestamp of " |
| 159 | << pi_capture_time << " and event time pf " |
| 160 | << event_loop_->context().monotonic_event_time; |
| 161 | cameras_.at(camera_index) |
| 162 | .rejection_counter.IncrementError( |
| 163 | RejectionReason::IMAGE_FROM_FUTURE); |
| 164 | return; |
| 165 | } |
| 166 | auto builder = cameras_.at(camera_index).debug_sender.MakeBuilder(); |
| 167 | aos::SizedArray<flatbuffers::Offset<TargetEstimateDebug>, 20> |
| 168 | debug_offsets; |
| 169 | for (const frc971::vision::TargetPoseFbs *target : |
| 170 | *targets.target_poses()) { |
| 171 | VLOG(1) << "Handling target from " << camera_index; |
| 172 | auto offset = HandleTarget(camera_index, capture_time, *target, |
| 173 | builder.fbb()); |
| 174 | if (debug_offsets.size() < debug_offsets.capacity()) { |
| 175 | debug_offsets.push_back(offset); |
| 176 | } else { |
| 177 | AOS_LOG(ERROR, "Dropped message from debug vector."); |
| 178 | } |
| 179 | } |
| 180 | auto vector_offset = builder.fbb()->CreateVector( |
| 181 | debug_offsets.data(), debug_offsets.size()); |
James Kuszmaul | fb89457 | 2023-02-23 17:25:06 -0800 | [diff] [blame] | 182 | auto stats_offset = |
| 183 | StatisticsForCamera(cameras_.at(camera_index), builder.fbb()); |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 184 | Visualization::Builder visualize_builder(*builder.fbb()); |
| 185 | visualize_builder.add_targets(vector_offset); |
James Kuszmaul | fb89457 | 2023-02-23 17:25:06 -0800 | [diff] [blame] | 186 | visualize_builder.add_statistics(stats_offset); |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 187 | builder.CheckOk(builder.Send(visualize_builder.Finish())); |
| 188 | SendStatus(); |
| 189 | }); |
| 190 | } |
| 191 | |
| 192 | event_loop_->AddPhasedLoop([this](int) { SendOutput(); }, |
James Kuszmaul | 456774b | 2023-03-08 21:29:15 -0800 | [diff] [blame] | 193 | std::chrono::milliseconds(20)); |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 194 | |
| 195 | event_loop_->MakeWatcher( |
| 196 | "/drivetrain", |
| 197 | [this]( |
| 198 | const frc971::control_loops::drivetrain::LocalizerControl &control) { |
| 199 | const double theta = control.keep_current_theta() |
| 200 | ? ekf_.X_hat(StateIdx::kTheta) |
| 201 | : control.theta(); |
| 202 | const double left_encoder = ekf_.X_hat(StateIdx::kLeftEncoder); |
| 203 | const double right_encoder = ekf_.X_hat(StateIdx::kRightEncoder); |
| 204 | ekf_.ResetInitialState( |
| 205 | t_, |
| 206 | (HybridEkf::State() << control.x(), control.y(), theta, |
| 207 | left_encoder, 0, right_encoder, 0, 0, 0, 0, 0, 0) |
| 208 | .finished(), |
| 209 | ekf_.P()); |
| 210 | }); |
| 211 | |
| 212 | ekf_.set_ignore_accel(true); |
| 213 | // Priority should be lower than the imu reading process, but non-zero. |
| 214 | event_loop->SetRuntimeRealtimePriority(10); |
| 215 | event_loop->OnRun([this, event_loop]() { |
| 216 | ekf_.ResetInitialState(event_loop->monotonic_now(), |
| 217 | HybridEkf::State::Zero(), ekf_.P()); |
| 218 | }); |
| 219 | } |
| 220 | |
| 221 | void Localizer::HandleImu(aos::monotonic_clock::time_point sample_time_pico, |
| 222 | aos::monotonic_clock::time_point sample_time_pi, |
| 223 | std::optional<Eigen::Vector2d> encoders, |
| 224 | Eigen::Vector3d gyro, Eigen::Vector3d accel) { |
| 225 | last_encoder_readings_ = encoders; |
| 226 | // Ignore ivnalid readings; the HybridEkf will handle it reasonably. |
| 227 | if (!encoders.has_value()) { |
| 228 | return; |
| 229 | } |
| 230 | if (t_ == aos::monotonic_clock::min_time) { |
| 231 | t_ = sample_time_pico; |
| 232 | } |
| 233 | if (t_ + 10 * frc971::controls::ImuWatcher::kNominalDt < sample_time_pico) { |
| 234 | t_ = sample_time_pico; |
| 235 | ++clock_resets_; |
| 236 | } |
| 237 | const aos::monotonic_clock::duration dt = sample_time_pico - t_; |
| 238 | t_ = sample_time_pico; |
| 239 | // We don't actually use the down estimator currently, but it's really |
| 240 | // convenient for debugging. |
| 241 | down_estimator_.Predict(gyro, accel, dt); |
| 242 | const double yaw_rate = (dt_config_.imu_transform * gyro)(2); |
| 243 | ekf_.UpdateEncodersAndGyro(encoders.value()(0), encoders.value()(1), yaw_rate, |
| 244 | utils_.VoltageOrZero(sample_time_pi), accel, t_); |
| 245 | SendStatus(); |
| 246 | } |
| 247 | |
| 248 | flatbuffers::Offset<TargetEstimateDebug> Localizer::RejectImage( |
| 249 | int camera_index, RejectionReason reason, |
| 250 | TargetEstimateDebug::Builder *builder) { |
| 251 | builder->add_accepted(false); |
| 252 | builder->add_rejection_reason(reason); |
| 253 | cameras_.at(camera_index).rejection_counter.IncrementError(reason); |
| 254 | return builder->Finish(); |
| 255 | } |
| 256 | |
James Kuszmaul | 4fe845a | 2023-03-26 12:57:30 -0700 | [diff] [blame] | 257 | bool Localizer::UseAprilTag(uint64_t target_id) { |
| 258 | if (target_poses_.count(target_id) == 0) { |
| 259 | return false; |
| 260 | } |
| 261 | |
| 262 | const flatbuffers::Vector<uint64_t> *ignore_tags = nullptr; |
| 263 | |
| 264 | switch (utils_.Alliance()) { |
| 265 | case aos::Alliance::kRed: |
Austin Schuh | 6bdcc37 | 2024-06-27 14:49:11 -0700 | [diff] [blame] | 266 | ignore_tags = constants_fetcher_.constants().ignore_targets()->red(); |
| 267 | CHECK(ignore_tags != nullptr); |
James Kuszmaul | 4fe845a | 2023-03-26 12:57:30 -0700 | [diff] [blame] | 268 | break; |
| 269 | case aos::Alliance::kBlue: |
Austin Schuh | 6bdcc37 | 2024-06-27 14:49:11 -0700 | [diff] [blame] | 270 | ignore_tags = constants_fetcher_.constants().ignore_targets()->blue(); |
| 271 | CHECK(ignore_tags != nullptr); |
James Kuszmaul | 4fe845a | 2023-03-26 12:57:30 -0700 | [diff] [blame] | 272 | break; |
| 273 | case aos::Alliance::kInvalid: |
| 274 | return true; |
| 275 | } |
| 276 | return std::find(ignore_tags->begin(), ignore_tags->end(), target_id) == |
| 277 | ignore_tags->end(); |
| 278 | } |
| 279 | |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 280 | flatbuffers::Offset<TargetEstimateDebug> Localizer::HandleTarget( |
| 281 | int camera_index, const aos::monotonic_clock::time_point capture_time, |
| 282 | const frc971::vision::TargetPoseFbs &target, |
| 283 | flatbuffers::FlatBufferBuilder *debug_fbb) { |
| 284 | ++total_candidate_targets_; |
| 285 | ++cameras_.at(camera_index).total_candidate_targets; |
| 286 | |
| 287 | TargetEstimateDebug::Builder builder(*debug_fbb); |
| 288 | builder.add_camera(camera_index); |
| 289 | builder.add_image_age_sec(aos::time::DurationInSeconds( |
| 290 | event_loop_->monotonic_now() - capture_time)); |
James Kuszmaul | e600d17 | 2023-03-12 10:19:44 -0700 | [diff] [blame] | 291 | builder.add_image_monotonic_timestamp_ns( |
| 292 | std::chrono::duration_cast<std::chrono::nanoseconds>( |
| 293 | capture_time.time_since_epoch()) |
| 294 | .count()); |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 295 | |
| 296 | const uint64_t target_id = target.id(); |
James Kuszmaul | e600d17 | 2023-03-12 10:19:44 -0700 | [diff] [blame] | 297 | builder.add_april_tag(target_id); |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 298 | VLOG(2) << aos::FlatbufferToJson(&target); |
James Kuszmaul | 4fe845a | 2023-03-26 12:57:30 -0700 | [diff] [blame] | 299 | if (!UseAprilTag(target_id)) { |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 300 | VLOG(1) << "Rejecting target due to invalid ID " << target_id; |
| 301 | return RejectImage(camera_index, RejectionReason::NO_SUCH_TARGET, &builder); |
| 302 | } |
| 303 | |
| 304 | const Transform &H_field_target = target_poses_.at(target_id); |
| 305 | const Transform &H_robot_camera = cameras_.at(camera_index).extrinsics; |
| 306 | |
| 307 | const Transform H_camera_target = PoseToTransform(&target); |
| 308 | |
| 309 | const Transform H_field_camera = H_field_target * H_camera_target.inverse(); |
| 310 | // Back out the robot position that is implied by the current camera |
| 311 | // reading. Note that the Pose object ignores any roll/pitch components, so |
| 312 | // if the camera's extrinsics for pitch/roll are off, this should just |
| 313 | // ignore it. |
| 314 | const frc971::control_loops::Pose measured_camera_pose(H_field_camera); |
| 315 | builder.add_camera_x(measured_camera_pose.rel_pos().x()); |
| 316 | builder.add_camera_y(measured_camera_pose.rel_pos().y()); |
| 317 | // Because the camera uses Z as forwards rather than X, just calculate the |
| 318 | // debugging theta value using the transformation matrix directly. |
| 319 | builder.add_camera_theta( |
| 320 | std::atan2(H_field_camera(1, 2), H_field_camera(0, 2))); |
| 321 | // Calculate the camera-to-robot transformation matrix ignoring the |
| 322 | // pitch/roll of the camera. |
| 323 | const Transform H_camera_robot_stripped = |
| 324 | frc971::control_loops::Pose(H_robot_camera) |
| 325 | .AsTransformationMatrix() |
| 326 | .inverse(); |
| 327 | const frc971::control_loops::Pose measured_pose( |
| 328 | measured_camera_pose.AsTransformationMatrix() * H_camera_robot_stripped); |
| 329 | // This "Z" is the robot pose directly implied by the camera results. |
| 330 | const Eigen::Matrix<double, 3, 1> Z(measured_pose.rel_pos().x(), |
| 331 | measured_pose.rel_pos().y(), |
| 332 | measured_pose.rel_theta()); |
| 333 | builder.add_implied_robot_x(Z(Corrector::kX)); |
| 334 | builder.add_implied_robot_y(Z(Corrector::kY)); |
| 335 | builder.add_implied_robot_theta(Z(Corrector::kTheta)); |
| 336 | |
milind-u | 5e055a0 | 2023-03-12 14:20:06 -0700 | [diff] [blame] | 337 | Eigen::AngleAxisd rvec_camera_target( |
| 338 | Eigen::Affine3d(H_camera_target).rotation()); |
| 339 | // Use y angle (around vertical axis) to compute skew |
| 340 | double skew = rvec_camera_target.axis().y() * rvec_camera_target.angle(); |
| 341 | builder.add_skew(skew); |
| 342 | |
milind-u | 4860119 | 2023-03-11 19:44:46 -0800 | [diff] [blame] | 343 | double distance_to_target = |
| 344 | Eigen::Affine3d(H_camera_target).translation().norm(); |
| 345 | |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 346 | // TODO(james): Tune this. Also, gain schedule for auto mode? |
| 347 | Eigen::Matrix<double, 3, 1> noises(1.0, 1.0, 0.5); |
James Kuszmaul | 285a790 | 2023-03-05 18:06:36 -0800 | [diff] [blame] | 348 | noises /= 4.0; |
milind-u | a4f44ac | 2023-02-26 17:03:43 -0800 | [diff] [blame] | 349 | // Scale noise by the distortion factor for this detection |
Austin Schuh | 99f7c6a | 2024-06-25 22:07:44 -0700 | [diff] [blame^] | 350 | noises *= |
| 351 | (1.0 + absl::GetFlag(FLAGS_distortion_noise_scalar) * |
| 352 | target.distortion_factor() * std::exp(distance_to_target)); |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 353 | |
| 354 | Eigen::Matrix3d R = Eigen::Matrix3d::Zero(); |
| 355 | R.diagonal() = noises.cwiseAbs2(); |
| 356 | // In order to do the EKF correction, we determine the expected state based |
| 357 | // on the state at the time the image was captured; however, we insert the |
| 358 | // correction update itself at the current time. This is technically not |
| 359 | // quite correct, but saves substantial CPU usage & code complexity by |
| 360 | // making |
| 361 | // it so that we don't have to constantly rewind the entire EKF history. |
| 362 | const std::optional<State> state_at_capture = |
| 363 | ekf_.LastStateBeforeTime(capture_time); |
| 364 | |
| 365 | if (!state_at_capture.has_value()) { |
| 366 | VLOG(1) << "Rejecting image due to being too old."; |
| 367 | return RejectImage(camera_index, RejectionReason::IMAGE_TOO_OLD, &builder); |
Austin Schuh | 99f7c6a | 2024-06-25 22:07:44 -0700 | [diff] [blame^] | 368 | } else if (target.pose_error() > absl::GetFlag(FLAGS_max_pose_error)) { |
milind-u | 7a7f666 | 2023-02-26 16:41:29 -0800 | [diff] [blame] | 369 | VLOG(1) << "Rejecting target due to high pose error " |
| 370 | << target.pose_error(); |
| 371 | return RejectImage(camera_index, RejectionReason::HIGH_POSE_ERROR, |
| 372 | &builder); |
Austin Schuh | 99f7c6a | 2024-06-25 22:07:44 -0700 | [diff] [blame^] | 373 | } else if (target.pose_error_ratio() > |
| 374 | absl::GetFlag(FLAGS_max_pose_error_ratio)) { |
milind-u | 08fb972 | 2023-03-25 18:23:59 -0700 | [diff] [blame] | 375 | VLOG(1) << "Rejecting target due to high pose error ratio " |
| 376 | << target.pose_error_ratio(); |
| 377 | return RejectImage(camera_index, RejectionReason::HIGH_POSE_ERROR_RATIO, |
| 378 | &builder); |
James Kuszmaul | 9da3b7a | 2023-03-11 21:02:25 -0800 | [diff] [blame] | 379 | } |
| 380 | |
| 381 | double theta_at_capture = state_at_capture.value()(StateIdx::kTheta); |
| 382 | double camera_implied_theta = Z(Corrector::kTheta); |
| 383 | constexpr double kDegToRad = M_PI / 180.0; |
| 384 | |
James Kuszmaul | 3c6a968 | 2023-03-23 20:36:53 -0700 | [diff] [blame] | 385 | const double robot_speed = |
| 386 | (state_at_capture.value()(StateIdx::kLeftVelocity) + |
| 387 | state_at_capture.value()(StateIdx::kRightVelocity)) / |
| 388 | 2.0; |
| 389 | const double yaw_threshold = |
Austin Schuh | 99f7c6a | 2024-06-25 22:07:44 -0700 | [diff] [blame^] | 390 | (utils_.MaybeInAutonomous() |
| 391 | ? absl::GetFlag(FLAGS_max_implied_yaw_error) |
| 392 | : absl::GetFlag(FLAGS_max_implied_teleop_yaw_error)) * |
James Kuszmaul | 3c6a968 | 2023-03-23 20:36:53 -0700 | [diff] [blame] | 393 | kDegToRad; |
| 394 | if (utils_.MaybeInAutonomous() && |
Austin Schuh | 99f7c6a | 2024-06-25 22:07:44 -0700 | [diff] [blame^] | 395 | (std::abs(robot_speed) > |
| 396 | absl::GetFlag(FLAGS_max_auto_image_robot_speed))) { |
James Kuszmaul | 3c6a968 | 2023-03-23 20:36:53 -0700 | [diff] [blame] | 397 | return RejectImage(camera_index, RejectionReason::ROBOT_TOO_FAST, &builder); |
| 398 | } else if (std::abs(aos::math::NormalizeAngle( |
| 399 | camera_implied_theta - theta_at_capture)) > yaw_threshold) { |
milind-u | 4860119 | 2023-03-11 19:44:46 -0800 | [diff] [blame] | 400 | return RejectImage(camera_index, RejectionReason::HIGH_IMPLIED_YAW_ERROR, |
| 401 | &builder); |
Austin Schuh | 99f7c6a | 2024-06-25 22:07:44 -0700 | [diff] [blame^] | 402 | } else if (distance_to_target > absl::GetFlag(FLAGS_max_distance_to_target)) { |
milind-u | 4860119 | 2023-03-11 19:44:46 -0800 | [diff] [blame] | 403 | return RejectImage(camera_index, RejectionReason::HIGH_DISTANCE_TO_TARGET, |
| 404 | &builder); |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 405 | } |
| 406 | |
| 407 | const Input U = ekf_.MostRecentInput(); |
| 408 | VLOG(1) << "previous state " << ekf_.X_hat().topRows<3>().transpose(); |
James Kuszmaul | e600d17 | 2023-03-12 10:19:44 -0700 | [diff] [blame] | 409 | const State prior_state = ekf_.X_hat(); |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 410 | // For the correction step, instead of passing in the measurement directly, |
| 411 | // we pass in (0, 0, 0) as the measurement and then for the expected |
| 412 | // measurement (Zhat) we calculate the error between the pose implied by |
| 413 | // the camera measurement and the current estimate of the |
| 414 | // pose. This doesn't affect any of the math, it just makes the code a bit |
| 415 | // more convenient to write given the Correct() interface we already have. |
| 416 | observations_.CorrectKnownH(Eigen::Vector3d::Zero(), &U, |
| 417 | Corrector(state_at_capture.value(), Z), R, t_); |
| 418 | ++total_accepted_targets_; |
| 419 | ++cameras_.at(camera_index).total_accepted_targets; |
| 420 | VLOG(1) << "new state " << ekf_.X_hat().topRows<3>().transpose(); |
James Kuszmaul | e600d17 | 2023-03-12 10:19:44 -0700 | [diff] [blame] | 421 | builder.add_correction_x(ekf_.X_hat()(StateIdx::kX) - |
| 422 | prior_state(StateIdx::kX)); |
| 423 | builder.add_correction_y(ekf_.X_hat()(StateIdx::kY) - |
| 424 | prior_state(StateIdx::kY)); |
| 425 | builder.add_correction_theta(ekf_.X_hat()(StateIdx::kTheta) - |
| 426 | prior_state(StateIdx::kTheta)); |
James Kuszmaul | 7151887 | 2023-02-25 18:00:15 -0800 | [diff] [blame] | 427 | builder.add_accepted(true); |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 428 | return builder.Finish(); |
| 429 | } |
| 430 | |
| 431 | Localizer::Output Localizer::Corrector::H(const State &, const Input &) { |
| 432 | CHECK(Z_.allFinite()); |
| 433 | Eigen::Vector3d Zhat = H_ * state_at_capture_ - Z_; |
| 434 | // Rewrap angle difference to put it back in range. |
| 435 | Zhat(2) = aos::math::NormalizeAngle(Zhat(2)); |
| 436 | VLOG(1) << "Zhat " << Zhat.transpose() << " Z_ " << Z_.transpose() |
| 437 | << " state " << (H_ * state_at_capture_).transpose(); |
| 438 | return Zhat; |
| 439 | } |
| 440 | |
| 441 | void Localizer::SendOutput() { |
| 442 | auto builder = output_sender_.MakeBuilder(); |
| 443 | frc971::controls::LocalizerOutput::Builder output_builder = |
| 444 | builder.MakeBuilder<frc971::controls::LocalizerOutput>(); |
| 445 | output_builder.add_monotonic_timestamp_ns( |
| 446 | std::chrono::duration_cast<std::chrono::nanoseconds>( |
| 447 | event_loop_->context().monotonic_event_time.time_since_epoch()) |
| 448 | .count()); |
| 449 | output_builder.add_x(ekf_.X_hat(StateIdx::kX)); |
| 450 | output_builder.add_y(ekf_.X_hat(StateIdx::kY)); |
| 451 | output_builder.add_theta(ekf_.X_hat(StateIdx::kTheta)); |
| 452 | output_builder.add_zeroed(imu_watcher_.zeroer().Zeroed()); |
| 453 | output_builder.add_image_accepted_count(total_accepted_targets_); |
| 454 | const Eigen::Quaterniond &orientation = |
| 455 | Eigen::AngleAxis<double>(ekf_.X_hat(StateIdx::kTheta), |
| 456 | Eigen::Vector3d::UnitZ()) * |
| 457 | down_estimator_.X_hat(); |
| 458 | frc971::controls::Quaternion quaternion; |
| 459 | quaternion.mutate_x(orientation.x()); |
| 460 | quaternion.mutate_y(orientation.y()); |
| 461 | quaternion.mutate_z(orientation.z()); |
| 462 | quaternion.mutate_w(orientation.w()); |
| 463 | output_builder.add_orientation(&quaternion); |
Maxwell Henderson | 85d8187 | 2023-04-07 18:21:59 -0700 | [diff] [blame] | 464 | server_statistics_fetcher_.Fetch(); |
| 465 | client_statistics_fetcher_.Fetch(); |
| 466 | |
| 467 | bool pis_connected = true; |
| 468 | |
| 469 | if (server_statistics_fetcher_.get()) { |
| 470 | for (const auto *pi_server_status : |
| 471 | *server_statistics_fetcher_->connections()) { |
| 472 | if (pi_server_status->state() == |
| 473 | aos::message_bridge::State::DISCONNECTED && |
| 474 | pi_server_status->node()->name()->string_view() != "logger") { |
| 475 | pis_connected = false; |
| 476 | } |
| 477 | } |
| 478 | } |
| 479 | |
| 480 | if (client_statistics_fetcher_.get()) { |
| 481 | for (const auto *pi_client_status : |
| 482 | *client_statistics_fetcher_->connections()) { |
| 483 | if (pi_client_status->state() == |
| 484 | aos::message_bridge::State::DISCONNECTED && |
| 485 | pi_client_status->node()->name()->string_view() != "logger") { |
| 486 | pis_connected = false; |
| 487 | } |
| 488 | } |
| 489 | } |
| 490 | |
| 491 | output_builder.add_all_pis_connected(pis_connected); |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 492 | builder.CheckOk(builder.Send(output_builder.Finish())); |
| 493 | } |
| 494 | |
| 495 | flatbuffers::Offset<frc971::control_loops::drivetrain::LocalizerState> |
James Kuszmaul | e600d17 | 2023-03-12 10:19:44 -0700 | [diff] [blame] | 496 | Localizer::PopulateState(const State &X_hat, |
| 497 | flatbuffers::FlatBufferBuilder *fbb) { |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 498 | frc971::control_loops::drivetrain::LocalizerState::Builder builder(*fbb); |
James Kuszmaul | e600d17 | 2023-03-12 10:19:44 -0700 | [diff] [blame] | 499 | builder.add_x(X_hat(StateIdx::kX)); |
| 500 | builder.add_y(X_hat(StateIdx::kY)); |
| 501 | builder.add_theta(X_hat(StateIdx::kTheta)); |
| 502 | builder.add_left_velocity(X_hat(StateIdx::kLeftVelocity)); |
| 503 | builder.add_right_velocity(X_hat(StateIdx::kRightVelocity)); |
| 504 | builder.add_left_encoder(X_hat(StateIdx::kLeftEncoder)); |
| 505 | builder.add_right_encoder(X_hat(StateIdx::kRightEncoder)); |
| 506 | builder.add_left_voltage_error(X_hat(StateIdx::kLeftVoltageError)); |
| 507 | builder.add_right_voltage_error(X_hat(StateIdx::kRightVoltageError)); |
| 508 | builder.add_angular_error(X_hat(StateIdx::kAngularError)); |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 509 | builder.add_longitudinal_velocity_offset( |
James Kuszmaul | e600d17 | 2023-03-12 10:19:44 -0700 | [diff] [blame] | 510 | X_hat(StateIdx::kLongitudinalVelocityOffset)); |
| 511 | builder.add_lateral_velocity(X_hat(StateIdx::kLateralVelocity)); |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 512 | return builder.Finish(); |
| 513 | } |
| 514 | |
| 515 | flatbuffers::Offset<ImuStatus> Localizer::PopulateImu( |
| 516 | flatbuffers::FlatBufferBuilder *fbb) const { |
| 517 | const auto zeroer_offset = imu_watcher_.zeroer().PopulateStatus(fbb); |
| 518 | const auto failures_offset = imu_watcher_.PopulateImuFailures(fbb); |
| 519 | ImuStatus::Builder builder(*fbb); |
| 520 | builder.add_zeroed(imu_watcher_.zeroer().Zeroed()); |
| 521 | builder.add_faulted_zero(imu_watcher_.zeroer().Faulted()); |
| 522 | builder.add_zeroing(zeroer_offset); |
| 523 | if (imu_watcher_.pico_offset().has_value()) { |
| 524 | builder.add_pico_offset_ns(imu_watcher_.pico_offset().value().count()); |
| 525 | builder.add_pico_offset_error_ns(imu_watcher_.pico_offset_error().count()); |
| 526 | } |
| 527 | if (last_encoder_readings_.has_value()) { |
| 528 | builder.add_left_encoder(last_encoder_readings_.value()(0)); |
| 529 | builder.add_right_encoder(last_encoder_readings_.value()(1)); |
| 530 | } |
| 531 | builder.add_imu_failures(failures_offset); |
| 532 | return builder.Finish(); |
| 533 | } |
| 534 | |
James Kuszmaul | fb89457 | 2023-02-23 17:25:06 -0800 | [diff] [blame] | 535 | flatbuffers::Offset<CumulativeStatistics> Localizer::StatisticsForCamera( |
| 536 | const CameraState &camera, flatbuffers::FlatBufferBuilder *fbb) { |
| 537 | const auto counts_offset = camera.rejection_counter.PopulateCounts(fbb); |
| 538 | CumulativeStatistics::Builder stats_builder(*fbb); |
| 539 | stats_builder.add_total_accepted(camera.total_accepted_targets); |
| 540 | stats_builder.add_total_candidates(camera.total_candidate_targets); |
| 541 | stats_builder.add_rejection_reasons(counts_offset); |
| 542 | return stats_builder.Finish(); |
| 543 | } |
| 544 | |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 545 | void Localizer::SendStatus() { |
| 546 | auto builder = status_sender_.MakeBuilder(); |
| 547 | std::array<flatbuffers::Offset<CumulativeStatistics>, kNumCameras> |
| 548 | stats_offsets; |
| 549 | for (size_t ii = 0; ii < kNumCameras; ++ii) { |
James Kuszmaul | fb89457 | 2023-02-23 17:25:06 -0800 | [diff] [blame] | 550 | stats_offsets.at(ii) = StatisticsForCamera(cameras_.at(ii), builder.fbb()); |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 551 | } |
| 552 | auto stats_offset = |
| 553 | builder.fbb()->CreateVector(stats_offsets.data(), stats_offsets.size()); |
| 554 | auto down_estimator_offset = |
| 555 | down_estimator_.PopulateStatus(builder.fbb(), t_); |
| 556 | auto imu_offset = PopulateImu(builder.fbb()); |
James Kuszmaul | e600d17 | 2023-03-12 10:19:44 -0700 | [diff] [blame] | 557 | auto state_offset = PopulateState(ekf_.X_hat(), builder.fbb()); |
James Kuszmaul | 04a343c | 2023-02-20 16:38:22 -0800 | [diff] [blame] | 558 | Status::Builder status_builder = builder.MakeBuilder<Status>(); |
| 559 | status_builder.add_state(state_offset); |
| 560 | status_builder.add_down_estimator(down_estimator_offset); |
| 561 | status_builder.add_imu(imu_offset); |
| 562 | status_builder.add_statistics(stats_offset); |
| 563 | builder.CheckOk(builder.Send(status_builder.Finish())); |
| 564 | } |
| 565 | |
| 566 | } // namespace y2023::localizer |