Add ability to read image results in localizer
Change-Id: I958734e35944c5e8a5141f58b2b26af917dc43fc
Signed-off-by: James Kuszmaul <jabukuszmaul+collab@gmail.com>
diff --git a/y2022/localizer/BUILD b/y2022/localizer/BUILD
index e13159f..9deab1a 100644
--- a/y2022/localizer/BUILD
+++ b/y2022/localizer/BUILD
@@ -56,6 +56,7 @@
":localizer_status_fbs",
"//aos/containers:ring_buffer",
"//aos/events:event_loop",
+ "//aos/network:message_bridge_server_fbs",
"//aos/time",
"//frc971/control_loops:c2d",
"//frc971/control_loops:control_loops_fbs",
@@ -67,8 +68,10 @@
"//frc971/wpilib:imu_fbs",
"//frc971/zeroing:imu_zeroer",
"//frc971/zeroing:wrap",
- "//y2020/vision/sift:sift_fbs",
"//y2022:constants",
+ "//y2022/control_loops/superstructure:superstructure_status_fbs",
+ "//y2022/vision:calibration_fbs",
+ "//y2022/vision:target_estimate_fbs",
"@org_tuxfamily_eigen//:eigen",
],
)
@@ -91,12 +94,14 @@
data = [
"//y2022:aos_config",
],
- shard_count = 10,
+ shard_count = 13,
deps = [
":localizer",
"//aos/events:simulated_event_loop",
+ "//aos/events/logging:log_writer",
"//aos/testing:googletest",
"//frc971/control_loops/drivetrain:drivetrain_test_lib",
+ "//y2022/control_loops/drivetrain:drivetrain_base",
],
)
diff --git a/y2022/localizer/localizer.cc b/y2022/localizer/localizer.cc
index 8d4f908..d0e9704 100644
--- a/y2022/localizer/localizer.cc
+++ b/y2022/localizer/localizer.cc
@@ -3,6 +3,7 @@
#include "frc971/control_loops/c2d.h"
#include "frc971/wpilib/imu_batch_generated.h"
#include "y2022/constants.h"
+#include "aos/json_to_flatbuffer.h"
namespace frc971::controls {
@@ -10,6 +11,11 @@
constexpr double kG = 9.80665;
constexpr std::chrono::microseconds kNominalDt(500);
+// Field position of the target (the 2022 target is conveniently in the middle
+// of the field....).
+constexpr double kVisionTargetX = 0.0;
+constexpr double kVisionTargetY = 0.0;
+
template <int N>
Eigen::Matrix<double, N, 1> MakeState(std::vector<double> values) {
CHECK_EQ(static_cast<size_t>(N), values.size());
@@ -29,6 +35,7 @@
.plant()
.coefficients()),
down_estimator_(dt_config) {
+ statistics_.rejection_counts.fill(0);
CHECK_EQ(branches_.capacity(), static_cast<size_t>(std::chrono::seconds(1) /
kNominalDt / kBranchPeriod));
if (dt_config_.is_simulated) {
@@ -77,10 +84,20 @@
B_continuous_accel_(kTheta, kThetaRate) = 1.0;
Q_continuous_model_.setZero();
- Q_continuous_model_.diagonal() << 1e-4, 1e-4, 1e-4, 1e-2, 1e-0, 1e-0, 1e-2,
+ Q_continuous_model_.diagonal() << 1e-2, 1e-2, 1e-8, 1e-2, 1e-0, 1e-0, 1e-2,
1e-0, 1e-0;
+ Q_continuous_accel_.setZero();
+ Q_continuous_accel_.diagonal() << 1e-2, 1e-2, 1e-20, 1e-4, 1e-4;
+
P_model_ = Q_continuous_model_ * aos::time::DurationInSeconds(kNominalDt);
+
+ // We can precalculate the discretizations of the accel model because it is
+ // actually LTI.
+
+ DiscretizeQAFast(Q_continuous_accel_, A_continuous_accel_, kNominalDt,
+ &Q_discrete_accel_, &A_discrete_accel_);
+ P_accel_ = Q_discrete_accel_;
}
Eigen::Matrix<double, ModelBasedLocalizer::kNModelStates,
@@ -281,6 +298,8 @@
&A_discrete);
P_model_ = A_discrete * P_model_ * A_discrete.transpose() + Q_discrete;
+ P_accel_ = A_discrete_accel_ * P_accel_ * A_discrete_accel_.transpose() +
+ Q_discrete_accel_;
Eigen::Matrix<double, kNModelOutputs, kNModelStates> H;
Eigen::Matrix<double, kNModelOutputs, kNModelOutputs> R;
@@ -300,8 +319,6 @@
if (branches_.empty()) {
VLOG(2) << "Initializing";
- current_state_.model_state.setZero();
- current_state_.accel_state.setZero();
current_state_.model_state(kLeftEncoder) = encoders(0);
current_state_.model_state(kRightEncoder) = encoders(1);
current_state_.branch_time = t;
@@ -404,6 +421,8 @@
++branch_counter_;
if (branch_counter_ % kBranchPeriod == 0) {
branches_.Push(new_branch);
+ old_positions_.Push(OldPosition{t, xytheta(), latest_turret_position_,
+ latest_turret_velocity_});
branch_counter_ = 0;
}
@@ -426,6 +445,219 @@
CHECK(std::isfinite(last_residual_));
}
+const ModelBasedLocalizer::OldPosition ModelBasedLocalizer::GetStateForTime(
+ aos::monotonic_clock::time_point time) {
+ if (old_positions_.empty()) {
+ return OldPosition{};
+ }
+
+ aos::monotonic_clock::duration lowest_time_error =
+ aos::monotonic_clock::duration::max();
+ const OldPosition *best_match = nullptr;
+ for (const OldPosition &sample : old_positions_) {
+ const aos::monotonic_clock::duration time_error =
+ std::chrono::abs(sample.sample_time - time);
+ if (time_error < lowest_time_error) {
+ lowest_time_error = time_error;
+ best_match = &sample;
+ }
+ }
+ return *best_match;
+}
+
+namespace {
+// Converts a flatbuffer TransformationMatrix to an Eigen matrix. Technically,
+// this should be able to do a single memcpy, but the extra verbosity here seems
+// appropriate.
+Eigen::Matrix<double, 4, 4> FlatbufferToTransformationMatrix(
+ const frc971::vision::calibration::TransformationMatrix &flatbuffer) {
+ CHECK_EQ(16u, CHECK_NOTNULL(flatbuffer.data())->size());
+ Eigen::Matrix<double, 4, 4> result;
+ result.setIdentity();
+ for (int row = 0; row < 4; ++row) {
+ for (int col = 0; col < 4; ++col) {
+ result(row, col) = (*flatbuffer.data())[row * 4 + col];
+ }
+ }
+ return result;
+}
+
+// Node names of the pis to listen for cameras from.
+const std::array<std::string, 4> kPisToUse{"pi1", "pi2", "pi3", "pi4"};
+}
+
+const Eigen::Matrix<double, 4, 4> ModelBasedLocalizer::CameraTransform(
+ const OldPosition &state,
+ const frc971::vision::calibration::CameraCalibration *calibration,
+ std::optional<RejectionReason> *rejection_reason) const {
+ CHECK_NOTNULL(rejection_reason);
+ CHECK_NOTNULL(calibration);
+ // Per the CameraCalibration specification, we can actually determine whether
+ // the camera is the turret camera just from the presence of the
+ // turret_extrinsics member.
+ const bool is_turret = calibration->has_turret_extrinsics();
+ // Ignore readings when the turret is spinning too fast, on the assumption
+ // that the odds of screwing up the time compensation are higher.
+ // Note that the current number here is chosen pretty arbitrarily--1 rad / sec
+ // seems reasonable, but may be unnecessarily low or high.
+ constexpr double kMaxTurretVelocity = 1.0;
+ if (is_turret && std::abs(state.turret_velocity) > kMaxTurretVelocity &&
+ !rejection_reason->has_value()) {
+ *rejection_reason = RejectionReason::TURRET_TOO_FAST;
+ }
+ CHECK(calibration->has_fixed_extrinsics());
+ const Eigen::Matrix<double, 4, 4> fixed_extrinsics =
+ FlatbufferToTransformationMatrix(*calibration->fixed_extrinsics());
+
+ // Calculate the pose of the camera relative to the robot origin.
+ Eigen::Matrix<double, 4, 4> H_robot_camera = fixed_extrinsics;
+ if (is_turret) {
+ H_robot_camera =
+ H_robot_camera *
+ frc971::control_loops::TransformationMatrixForYaw<double>(
+ state.turret_position) *
+ FlatbufferToTransformationMatrix(*calibration->turret_extrinsics());
+ }
+ return H_robot_camera;
+}
+
+const std::optional<Eigen::Vector2d>
+ModelBasedLocalizer::CameraMeasuredRobotPosition(
+ const OldPosition &state, const y2022::vision::TargetEstimate *target,
+ std::optional<RejectionReason> *rejection_reason) const {
+ if (!target->has_camera_calibration()) {
+ *rejection_reason = RejectionReason::NO_CALIBRATION;
+ return std::nullopt;
+ }
+ const Eigen::Matrix<double, 4, 4> H_robot_camera =
+ CameraTransform(state, target->camera_calibration(), rejection_reason);
+ const control_loops::Pose robot_pose(
+ {state.xytheta(0), state.xytheta(1), 0.0}, state.xytheta(2));
+ const Eigen::Matrix<double, 4, 4> H_field_robot =
+ robot_pose.AsTransformationMatrix();
+ // Current estimated pose of the camera in the global frame.
+ // Note that this is all really just an elaborate way of extracting the
+ // current estimated camera yaw, and nothing else.
+ const Eigen::Matrix<double, 4, 4> H_field_camera =
+ H_field_robot * H_robot_camera;
+ // Grab the implied yaw of the camera (the +Z axis is coming out of the front
+ // of the cameras).
+ const Eigen::Vector3d rotated_camera_z =
+ H_field_camera.block<3, 3>(0, 0) * Eigen::Vector3d(0, 0, 1);
+ const double camera_yaw =
+ std::atan2(rotated_camera_z.y(), rotated_camera_z.x());
+ // All right, now we need to use the heading and distance from the
+ // TargetEstimate, plus the yaw embedded in the camera_pose, to determine what
+ // the implied X/Y position of the robot is. To do this, we calculate the
+ // heading/distance from the target to the robot. The distance is easy, since
+ // that's the same as the distance from the robot to the target. The heading
+ // isn't too hard, but is obnoxious to think about, since the heading from the
+ // target to the robot is distinct from the heading from the robot to the
+ // target.
+
+ // Just to walk through examples to confirm that the below calculation is
+ // correct:
+ // * If yaw = 0, and angle_to_target = 0, we are at 180 deg relative to the
+ // target.
+ // * If yaw = 90 deg, and angle_to_target = 0, we are at -90 deg relative to
+ // the target.
+ // * If yaw = 0, and angle_to_target = 90 deg, we are at -90 deg relative to
+ // the target.
+ const double heading_from_target =
+ aos::math::NormalizeAngle(M_PI + camera_yaw + target->angle_to_target());
+ const double distance_from_target = target->distance();
+ // Extract the implied camera position on the field.
+ Eigen::Matrix<double, 4, 4> H_field_camera_measured = H_field_camera;
+ // TODO(james): Are we going to need to evict the roll/pitch components of the
+ // camera extrinsics this year as well?
+ H_field_camera_measured(0, 3) =
+ distance_from_target * std::cos(heading_from_target) + kVisionTargetX;
+ H_field_camera_measured(1, 3) =
+ distance_from_target * std::sin(heading_from_target) + kVisionTargetY;
+ const Eigen::Matrix<double, 4, 4> H_field_robot_measured =
+ H_field_camera_measured * H_robot_camera.inverse();
+ return H_field_robot_measured.block<2, 1>(0, 3);
+}
+
+void ModelBasedLocalizer::HandleImageMatch(
+ aos::monotonic_clock::time_point sample_time,
+ const y2022::vision::TargetEstimate *target) {
+ std::optional<RejectionReason> rejection_reason;
+
+ const OldPosition &state = GetStateForTime(sample_time);
+ const std::optional<Eigen::Vector2d> measured_robot_position =
+ CameraMeasuredRobotPosition(state, target, &rejection_reason);
+ // Technically, rejection_reason should always be set if
+ // measured_robot_position is nullopt, but in the future we may have more
+ // recoverable rejection reasons that we wish to allow to propagate further
+ // into the process.
+ if (!measured_robot_position || rejection_reason.has_value()) {
+ CHECK(rejection_reason.has_value());
+ TallyRejection(rejection_reason.value());
+ return;
+ }
+
+ // Next, go through and do the actual Kalman corrections for the x/y
+ // measurement, for both the accel state and the model-based state.
+ const Eigen::Matrix<double, kNModelStates, kNModelStates> A_continuous_model =
+ AModel(current_state_.model_state);
+
+ Eigen::Matrix<double, kNModelStates, kNModelStates> A_discrete_model;
+ Eigen::Matrix<double, kNModelStates, kNModelStates> Q_discrete_model;
+
+ DiscretizeQAFast(Q_continuous_model_, A_continuous_model, kNominalDt,
+ &Q_discrete_model, &A_discrete_model);
+
+ Eigen::Matrix<double, 2, kNModelStates> H_model;
+ H_model.setZero();
+ Eigen::Matrix<double, 2, kNAccelStates> H_accel;
+ H_accel.setZero();
+ Eigen::Matrix<double, 2, 2> R;
+ R.setZero();
+ H_model(0, kX) = 1.0;
+ H_model(1, kY) = 1.0;
+ H_accel(0, kX) = 1.0;
+ H_accel(1, kY) = 1.0;
+ R.diagonal() << 1e-2, 1e-2;
+
+ const Eigen::Matrix<double, kNModelStates, 2> K_model =
+ P_model_ * H_model.transpose() *
+ (H_model * P_model_ * H_model.transpose() + R).inverse();
+ const Eigen::Matrix<double, kNAccelStates, 2> K_accel =
+ P_accel_ * H_accel.transpose() *
+ (H_accel * P_accel_ * H_accel.transpose() + R).inverse();
+ P_model_ = (Eigen::Matrix<double, kNModelStates, kNModelStates>::Identity() -
+ K_model * H_model) *
+ P_model_;
+ P_accel_ = (Eigen::Matrix<double, kNAccelStates, kNAccelStates>::Identity() -
+ K_accel * H_accel) *
+ P_accel_;
+ // And now we have to correct *everything* on all the branches:
+ for (CombinedState &state : branches_) {
+ state.model_state += K_model * (measured_robot_position.value() -
+ H_model * state.model_state);
+ state.accel_state += K_accel * (measured_robot_position.value() -
+ H_accel * state.accel_state);
+ }
+ current_state_.model_state +=
+ K_model *
+ (measured_robot_position.value() - H_model * current_state_.model_state);
+ current_state_.accel_state +=
+ K_accel *
+ (measured_robot_position.value() - H_accel * current_state_.accel_state);
+
+ statistics_.total_accepted++;
+ statistics_.total_candidates++;
+}
+
+void ModelBasedLocalizer::HandleTurret(
+ aos::monotonic_clock::time_point sample_time, double turret_position,
+ double turret_velocity) {
+ last_turret_update_ = sample_time;
+ latest_turret_position_ = turret_position;
+ latest_turret_velocity_ = turret_velocity;
+}
+
void ModelBasedLocalizer::HandleReset(aos::monotonic_clock::time_point now,
const Eigen::Vector3d &xytheta) {
branches_.Reset();
@@ -470,6 +702,16 @@
flatbuffers::Offset<ModelBasedStatus> ModelBasedLocalizer::PopulateStatus(
flatbuffers::FlatBufferBuilder *fbb) {
+ const auto rejections_offset = fbb->CreateVector(
+ statistics_.rejection_counts.data(), statistics_.rejection_counts.size());
+
+ CumulativeStatistics::Builder stats_builder(*fbb);
+ stats_builder.add_total_accepted(statistics_.total_accepted);
+ stats_builder.add_total_candidates(statistics_.total_candidates);
+ stats_builder.add_rejection_reason_count(rejections_offset);
+ const flatbuffers::Offset<CumulativeStatistics> stats_offset =
+ stats_builder.Finish();
+
const flatbuffers::Offset<control_loops::drivetrain::DownEstimatorState>
down_estimator_offset = down_estimator_.PopulateStatus(fbb, t_);
@@ -508,6 +750,7 @@
builder.add_implied_accel_y(abs_accel_(1));
builder.add_implied_accel_z(abs_accel_(2));
builder.add_clock_resets(clock_resets_);
+ builder.add_statistics(stats_offset);
return builder.Finish();
}
@@ -528,6 +771,9 @@
output_fetcher_(
event_loop_->MakeFetcher<frc971::control_loops::drivetrain::Output>(
"/drivetrain")),
+ clock_offset_fetcher_(
+ event_loop_->MakeFetcher<aos::message_bridge::ServerStatistics>(
+ "/aos")),
left_encoder_(-DrivetrainWrapPeriod() / 2.0, DrivetrainWrapPeriod()),
right_encoder_(-DrivetrainWrapPeriod() / 2.0, DrivetrainWrapPeriod()) {
event_loop_->MakeWatcher(
@@ -538,9 +784,40 @@
? model_based_.xytheta()(2)
: control.theta();
model_based_.HandleReset(event_loop_->monotonic_now(),
- {control.x(), control.y(), theta});
+ {control.x(), control.y(), theta});
});
event_loop_->MakeWatcher(
+ "/superstructure",
+ [this](const y2022::control_loops::superstructure::Status &status) {
+ if (!status.has_turret()) {
+ return;
+ }
+ CHECK(status.has_turret());
+ model_based_.HandleTurret(event_loop_->context().monotonic_event_time,
+ status.turret()->position(),
+ status.turret()->velocity());
+ });
+
+ for (const auto &pi : kPisToUse) {
+ event_loop_->MakeWatcher(
+ "/" + pi + "/camera",
+ [this, pi](const y2022::vision::TargetEstimate &target) {
+ const std::optional<aos::monotonic_clock::duration> monotonic_offset =
+ ClockOffset(pi);
+ if (!monotonic_offset.has_value()) {
+ return;
+ }
+ // TODO(james): Get timestamp from message contents.
+ aos::monotonic_clock::time_point capture_time(
+ event_loop_->context().monotonic_remote_time - monotonic_offset.value());
+ if (capture_time > event_loop_->context().monotonic_event_time) {
+ model_based_.TallyRejection(RejectionReason::IMAGE_FROM_FUTURE);
+ return;
+ }
+ model_based_.HandleImageMatch(capture_time, &target);
+ });
+ }
+ event_loop_->MakeWatcher(
"/localizer", [this](const frc971::IMUValuesBatch &values) {
CHECK(values.has_readings());
output_fetcher_.Fetch();
@@ -626,4 +903,30 @@
});
}
+std::optional<aos::monotonic_clock::duration> EventLoopLocalizer::ClockOffset(
+ std::string_view pi) {
+ std::optional<aos::monotonic_clock::duration> monotonic_offset;
+ clock_offset_fetcher_.Fetch();
+ if (clock_offset_fetcher_.get() != nullptr) {
+ for (const auto connection : *clock_offset_fetcher_->connections()) {
+ if (connection->has_node() && connection->node()->has_name() &&
+ connection->node()->name()->string_view() == pi) {
+ if (connection->has_monotonic_offset()) {
+ monotonic_offset =
+ std::chrono::nanoseconds(connection->monotonic_offset());
+ } else {
+ // If we don't have a monotonic offset, that means we aren't
+ // connected.
+ model_based_.TallyRejection(
+ RejectionReason::MESSAGE_BRIDGE_DISCONNECTED);
+ return std::nullopt;
+ }
+ break;
+ }
+ }
+ }
+ CHECK(monotonic_offset.has_value());
+ return monotonic_offset;
+}
+
} // namespace frc971::controls
diff --git a/y2022/localizer/localizer.h b/y2022/localizer/localizer.h
index 34f4c7a..978eb15 100644
--- a/y2022/localizer/localizer.h
+++ b/y2022/localizer/localizer.h
@@ -3,18 +3,19 @@
#include "Eigen/Dense"
#include "Eigen/Geometry"
-
-#include "aos/events/event_loop.h"
#include "aos/containers/ring_buffer.h"
+#include "aos/events/event_loop.h"
+#include "aos/network/message_bridge_server_generated.h"
#include "aos/time/time.h"
-#include "y2020/vision/sift/sift_generated.h"
-#include "y2022/localizer/localizer_status_generated.h"
-#include "y2022/localizer/localizer_output_generated.h"
-#include "frc971/control_loops/drivetrain/improved_down_estimator.h"
#include "frc971/control_loops/drivetrain/drivetrain_output_generated.h"
+#include "frc971/control_loops/drivetrain/improved_down_estimator.h"
#include "frc971/control_loops/drivetrain/localizer_generated.h"
#include "frc971/zeroing/imu_zeroer.h"
#include "frc971/zeroing/wrap.h"
+#include "y2022/control_loops/superstructure/superstructure_status_generated.h"
+#include "y2022/localizer/localizer_output_generated.h"
+#include "y2022/localizer/localizer_status_generated.h"
+#include "y2022/vision/target_estimate_generated.h"
namespace frc971::controls {
@@ -104,10 +105,10 @@
void HandleImu(aos::monotonic_clock::time_point t,
const Eigen::Vector3d &gyro, const Eigen::Vector3d &accel,
const Eigen::Vector2d encoders, const Eigen::Vector2d voltage);
- void HandleImageMatch(aos::monotonic_clock::time_point,
- const vision::sift::ImageMatchResult *) {
- LOG(FATAL) << "Unimplemented.";
- }
+ void HandleTurret(aos::monotonic_clock::time_point sample_time,
+ double turret_position, double turret_velocity);
+ void HandleImageMatch(aos::monotonic_clock::time_point sample_time,
+ const y2022::vision::TargetEstimate *target);
void HandleReset(aos::monotonic_clock::time_point,
const Eigen::Vector3d &xytheta);
@@ -128,12 +129,28 @@
void set_longitudinal_offset(double offset) { long_offset_ = offset; }
+ void TallyRejection(const RejectionReason reason) {
+ statistics_.total_candidates++;
+ statistics_.rejection_counts[static_cast<size_t>(reason)]++;
+ }
+
private:
struct CombinedState {
- AccelState accel_state;
- ModelState model_state;
- aos::monotonic_clock::time_point branch_time;
- double accumulated_divergence;
+ AccelState accel_state = AccelState::Zero();
+ ModelState model_state = ModelState::Zero();
+ aos::monotonic_clock::time_point branch_time =
+ aos::monotonic_clock::min_time;
+ double accumulated_divergence = 0.0;
+ };
+
+ // Struct to store state data needed to perform a camera correction, since
+ // camera corrections require looking back in time.
+ struct OldPosition {
+ aos::monotonic_clock::time_point sample_time =
+ aos::monotonic_clock::min_time;
+ Eigen::Vector3d xytheta = Eigen::Vector3d::Zero();
+ double turret_position = 0.0;
+ double turret_velocity = 0.0;
};
static flatbuffers::Offset<AccelBasedState> BuildAccelState(
@@ -169,22 +186,45 @@
const AccelInput &accel_input, const ModelInput &model_input,
aos::monotonic_clock::duration dt);
+ const OldPosition GetStateForTime(aos::monotonic_clock::time_point time);
+
+ // Returns the transformation to get from the camera frame to the robot frame
+ // for the specified state.
+ const Eigen::Matrix<double, 4, 4> CameraTransform(
+ const OldPosition &state,
+ const frc971::vision::calibration::CameraCalibration *calibration,
+ std::optional<RejectionReason> *rejection_reason) const;
+
+ // Returns the robot x/y position implied by the specified camera data and
+ // estimated state from that time.
+ const std::optional<Eigen::Vector2d> CameraMeasuredRobotPosition(
+ const OldPosition &state, const y2022::vision::TargetEstimate *target,
+ std::optional<RejectionReason> *rejection_reason) const;
+
const control_loops::drivetrain::DrivetrainConfig<double> dt_config_;
const StateFeedbackHybridPlantCoefficients<2, 2, 2, double>
velocity_drivetrain_coefficients_;
Eigen::Matrix<double, kNModelStates, kNModelStates> A_continuous_model_;
Eigen::Matrix<double, kNAccelStates, kNAccelStates> A_continuous_accel_;
+ Eigen::Matrix<double, kNAccelStates, kNAccelStates> A_discrete_accel_;
Eigen::Matrix<double, kNModelStates, kNModelInputs> B_continuous_model_;
Eigen::Matrix<double, kNAccelStates, kNAccelInputs> B_continuous_accel_;
Eigen::Matrix<double, kNModelStates, kNModelStates> Q_continuous_model_;
Eigen::Matrix<double, kNModelStates, kNModelStates> P_model_;
+
+ Eigen::Matrix<double, kNAccelStates, kNAccelStates> Q_continuous_accel_;
+ Eigen::Matrix<double, kNAccelStates, kNAccelStates> Q_discrete_accel_;
+
+ Eigen::Matrix<double, kNAccelStates, kNAccelStates> P_accel_;
+
+ control_loops::drivetrain::DrivetrainUkf down_estimator_;
+
// When we are following the model, we will, on each iteration:
// 1) Perform a model-based update of a single state.
// 2) Add a hypothetical non-model-based entry based on the current state.
// 3) Evict too-old non-model-based entries.
- control_loops::drivetrain::DrivetrainUkf down_estimator_;
// Buffer of old branches these are all created by initializing a new
// model-based state based on the current state, and then initializing a new
@@ -195,6 +235,10 @@
aos::RingBuffer<CombinedState, 2000 / kBranchPeriod> branches_;
int branch_counter_ = 0;
+ // Buffer of old x/y/theta positions. This is used so that we can have a
+ // reference for exactly where we thought a camera was when it took an image.
+ aos::RingBuffer<OldPosition, 500 / kBranchPeriod> old_positions_;
+
CombinedState current_state_;
aos::monotonic_clock::time_point t_ = aos::monotonic_clock::min_time;
bool using_model_;
@@ -215,6 +259,29 @@
int clock_resets_ = 0;
+ std::optional<aos::monotonic_clock::time_point> last_turret_update_;
+ double latest_turret_position_ = 0.0;
+ double latest_turret_velocity_ = 0.0;
+
+ // Stuff to track faults.
+ static constexpr size_t kNumRejectionReasons =
+ static_cast<int>(RejectionReason::MAX) -
+ static_cast<int>(RejectionReason::MIN) + 1;
+
+ struct Statistics {
+ int total_accepted = 0;
+ int total_candidates = 0;
+ static_assert(0 == static_cast<int>(RejectionReason::MIN));
+ static_assert(
+ kNumRejectionReasons ==
+ sizeof(
+ std::invoke_result<decltype(EnumValuesRejectionReason)>::type) /
+ sizeof(RejectionReason),
+ "RejectionReason has non-contiguous error values.");
+ std::array<int, kNumRejectionReasons> rejection_counts;
+ };
+ Statistics statistics_;
+
friend class testing::LocalizerTest;
};
@@ -227,11 +294,14 @@
ModelBasedLocalizer *localizer() { return &model_based_; }
private:
+ std::optional<aos::monotonic_clock::duration> ClockOffset(
+ std::string_view pi);
aos::EventLoop *event_loop_;
ModelBasedLocalizer model_based_;
aos::Sender<LocalizerStatus> status_sender_;
aos::Sender<LocalizerOutput> output_sender_;
aos::Fetcher<frc971::control_loops::drivetrain::Output> output_fetcher_;
+ aos::Fetcher<aos::message_bridge::ServerStatistics> clock_offset_fetcher_;
zeroing::ImuZeroer zeroer_;
aos::monotonic_clock::time_point last_output_send_ =
aos::monotonic_clock::min_time;
diff --git a/y2022/localizer/localizer_status.fbs b/y2022/localizer/localizer_status.fbs
index 80cee0b..8be770e 100644
--- a/y2022/localizer/localizer_status.fbs
+++ b/y2022/localizer/localizer_status.fbs
@@ -2,6 +2,20 @@
namespace frc971.controls;
+enum RejectionReason : byte {
+ IMAGE_FROM_FUTURE = 0,
+ NO_CALIBRATION = 1,
+ TURRET_TOO_FAST = 2,
+ MESSAGE_BRIDGE_DISCONNECTED = 3,
+}
+
+table CumulativeStatistics {
+ total_accepted:int (id: 0);
+ total_candidates:int (id: 1);
+ // Indexed by integer value of RejectionReason enum.
+ rejection_reason_count:[int] (id: 2);
+}
+
// Stores the state associated with the acceleration-based modelling.
table AccelBasedState {
// x/y position, in meters.
@@ -74,6 +88,7 @@
// Number of times we have missed an IMU reading. Should never increase except
// *maybe* during startup.
clock_resets:int (id: 17);
+ statistics:CumulativeStatistics (id: 18);
}
table LocalizerStatus {
diff --git a/y2022/localizer/localizer_test.cc b/y2022/localizer/localizer_test.cc
index 1997b80..58df869 100644
--- a/y2022/localizer/localizer_test.cc
+++ b/y2022/localizer/localizer_test.cc
@@ -1,14 +1,31 @@
#include "y2022/localizer/localizer.h"
+#include "aos/events/logging/log_writer.h"
#include "aos/events/simulated_event_loop.h"
#include "gtest/gtest.h"
#include "frc971/control_loops/drivetrain/drivetrain_test_lib.h"
+#include "frc971/control_loops/pose.h"
+#include "y2022/vision/target_estimate_generated.h"
+#include "y2022/control_loops/superstructure/superstructure_status_generated.h"
+#include "y2022/control_loops/drivetrain/drivetrain_base.h"
+
+DEFINE_string(output_folder, "",
+ "If set, logs all channels to the provided logfile.");
namespace frc971::controls::testing {
typedef ModelBasedLocalizer::ModelState ModelState;
typedef ModelBasedLocalizer::AccelState AccelState;
typedef ModelBasedLocalizer::ModelInput ModelInput;
typedef ModelBasedLocalizer::AccelInput AccelInput;
+
+using frc971::vision::calibration::CameraCalibrationT;
+using frc971::vision::calibration::TransformationMatrixT;
+using frc971::control_loops::drivetrain::DrivetrainConfig;
+using frc971::control_loops::drivetrain::LocalizerControl;
+using frc971::control_loops::Pose;
+using y2022::vision::TargetEstimate;
+using y2022::vision::TargetEstimateT;
+
namespace {
constexpr size_t kX = ModelBasedLocalizer::kX;
constexpr size_t kY = ModelBasedLocalizer::kY;
@@ -26,13 +43,52 @@
constexpr size_t kRightVoltageError = ModelBasedLocalizer::kRightVoltageError;
constexpr size_t kLeftVoltage = ModelBasedLocalizer::kLeftVoltage;
constexpr size_t kRightVoltage = ModelBasedLocalizer::kRightVoltage;
+
+Eigen::Matrix<double, 4, 4> TurretRobotTransformation() {
+ Eigen::Matrix<double, 4, 4> H;
+ H.setIdentity();
+ H.block<3, 1>(0, 3) << 1, 1.1, 0.9;
+ return H;
+}
+
+// Provides the location of the camera on the turret.
+Eigen::Matrix<double, 4, 4> CameraTurretTransformation() {
+ Eigen::Matrix<double, 4, 4> H;
+ H.setIdentity();
+ H.block<3, 1>(0, 3) << 0.1, 0, 0;
+ H.block<3, 3>(0, 0) << 0, 0, 1, -1, 0, 0, 0, -1, 0;
+
+ // Introduce a bit of pitch to make sure that we're exercising all the code.
+ H.block<3, 3>(0, 0) =
+ Eigen::AngleAxis<double>(0.1, Eigen::Vector3d::UnitY()) *
+ H.block<3, 3>(0, 0);
+ return H;
+}
+
+// Copies an Eigen matrix into a row-major vector of the data.
+std::vector<float> MatrixToVector(const Eigen::Matrix<double, 4, 4> &H) {
+ std::vector<float> data;
+ for (int row = 0; row < 4; ++row) {
+ for (int col = 0; col < 4; ++col) {
+ data.push_back(H(row, col));
+ }
+ }
+ return data;
+}
+
+DrivetrainConfig<double> GetTest2022DrivetrainConfig() {
+ DrivetrainConfig<double> config =
+ y2022::control_loops::drivetrain::GetDrivetrainConfig();
+ config.is_simulated = true;
+ return config;
+}
}
class LocalizerTest : public ::testing::Test {
protected:
LocalizerTest()
: dt_config_(
- control_loops::drivetrain::testing::GetTestDrivetrainConfig()),
+ GetTest2022DrivetrainConfig()),
localizer_(dt_config_) {
localizer_.set_longitudinal_offset(0.0);
}
@@ -177,7 +233,7 @@
const Eigen::Vector2d encoders{0.0, 0.0};
const Eigen::Vector2d voltages{0.0, 0.0};
Eigen::Vector3d accel{5.0, 2.0, 9.80665};
- Eigen::Vector3d accel_gs = accel / 9.80665;
+ Eigen::Vector3d accel_gs = dt_config_.imu_transform.inverse() * accel / 9.80665;
while (t < start) {
// Spin to fill up the buffer.
localizer_.HandleImu(t, gyro, Eigen::Vector3d::UnitZ(), encoders, voltages);
@@ -215,7 +271,7 @@
// and then leave them constant, which should make it look like we are going
// around in a circle.
accel = Eigen::Vector3d{-accel(1), accel(0), 9.80665};
- accel_gs = accel / 9.80665;
+ accel_gs = dt_config_.imu_transform.inverse() * accel / 9.80665;
// v^2 / r = a
// w * r = v
// v^2 / v * w = a
@@ -292,6 +348,8 @@
aos::configuration::GetNode(&configuration_.message(), "roborio")),
imu_node_(
aos::configuration::GetNode(&configuration_.message(), "imu")),
+ camera_node_(
+ aos::configuration::GetNode(&configuration_.message(), "pi1")),
dt_config_(
control_loops::drivetrain::testing::GetTestDrivetrainConfig()),
localizer_event_loop_(
@@ -308,37 +366,177 @@
event_loop_factory_.MakeEventLoop("test", roborio_node_)),
imu_test_event_loop_(
event_loop_factory_.MakeEventLoop("test", imu_node_)),
+ camera_test_event_loop_(
+ event_loop_factory_.MakeEventLoop("test", camera_node_)),
logger_test_event_loop_(
event_loop_factory_.GetNodeEventLoopFactory("logger")
->MakeEventLoop("test")),
output_sender_(
roborio_test_event_loop_->MakeSender<Output>("/drivetrain")),
+ turret_sender_(
+ roborio_test_event_loop_
+ ->MakeSender<y2022::control_loops::superstructure::Status>(
+ "/superstructure")),
+ target_sender_(
+ camera_test_event_loop_->MakeSender<y2022::vision::TargetEstimate>(
+ "/camera")),
+ control_sender_(roborio_test_event_loop_->MakeSender<LocalizerControl>(
+ "/drivetrain")),
output_fetcher_(roborio_test_event_loop_->MakeFetcher<LocalizerOutput>(
"/localizer")),
status_fetcher_(
imu_test_event_loop_->MakeFetcher<LocalizerStatus>("/localizer")) {
localizer_.localizer()->set_longitudinal_offset(0.0);
- aos::TimerHandler *timer = roborio_test_event_loop_->AddTimer([this]() {
- auto builder = output_sender_.MakeBuilder();
- auto output_builder = builder.MakeBuilder<Output>();
- output_builder.add_left_voltage(output_voltages_(0));
- output_builder.add_right_voltage(output_voltages_(1));
- builder.CheckOk(builder.Send(output_builder.Finish()));
- });
- roborio_test_event_loop_->OnRun([timer, this]() {
- timer->Setup(roborio_test_event_loop_->monotonic_now(),
- std::chrono::milliseconds(5));
- });
+ {
+ aos::TimerHandler *timer = roborio_test_event_loop_->AddTimer([this]() {
+ {
+ auto builder = output_sender_.MakeBuilder();
+ auto output_builder = builder.MakeBuilder<Output>();
+ output_builder.add_left_voltage(output_voltages_(0));
+ output_builder.add_right_voltage(output_voltages_(1));
+ builder.CheckOk(builder.Send(output_builder.Finish()));
+ }
+ {
+ auto builder = turret_sender_.MakeBuilder();
+ auto turret_builder =
+ builder
+ .MakeBuilder<frc971::control_loops::
+ PotAndAbsoluteEncoderProfiledJointStatus>();
+ turret_builder.add_position(turret_position_);
+ turret_builder.add_velocity(turret_velocity_);
+ const auto turret_offset = turret_builder.Finish();
+ auto status_builder =
+ builder
+ .MakeBuilder<y2022::control_loops::superstructure::Status>();
+ status_builder.add_turret(turret_offset);
+ builder.CheckOk(builder.Send(status_builder.Finish()));
+ }
+ });
+ roborio_test_event_loop_->OnRun([timer, this]() {
+ timer->Setup(roborio_test_event_loop_->monotonic_now(),
+ std::chrono::milliseconds(5));
+ });
+ }
+ {
+ aos::TimerHandler *timer = camera_test_event_loop_->AddTimer([this]() {
+ if (!send_targets_) {
+ return;
+ }
+ const frc971::control_loops::Pose robot_pose(
+ {drivetrain_plant_.GetPosition().x(),
+ drivetrain_plant_.GetPosition().y(), 0.0},
+ drivetrain_plant_.state()(2, 0));
+ const Eigen::Matrix<double, 4, 4> H_turret_camera =
+ frc971::control_loops::TransformationMatrixForYaw(
+ turret_position_) *
+ CameraTurretTransformation();
+
+ const Eigen::Matrix<double, 4, 4> H_field_camera =
+ robot_pose.AsTransformationMatrix() * TurretRobotTransformation() *
+ H_turret_camera;
+ const Eigen::Matrix<double, 4, 4> target_transform =
+ Eigen::Matrix<double, 4, 4>::Identity();
+ const Eigen::Matrix<double, 4, 4> H_camera_target =
+ H_field_camera.inverse() * target_transform;
+ const Eigen::Matrix<double, 4, 4> H_target_camera =
+ H_camera_target.inverse();
+
+ std::unique_ptr<y2022::vision::TargetEstimateT> estimate(
+ new y2022::vision::TargetEstimateT());
+ estimate->distance = H_target_camera.block<2, 1>(0, 3).norm();
+ estimate->angle_to_target =
+ std::atan2(-H_camera_target(0, 3), H_camera_target(2, 3));
+ estimate->camera_calibration.reset(new CameraCalibrationT());
+ {
+ estimate->camera_calibration->fixed_extrinsics.reset(
+ new TransformationMatrixT());
+ TransformationMatrixT *H_robot_turret =
+ estimate->camera_calibration->fixed_extrinsics.get();
+ H_robot_turret->data = MatrixToVector(TurretRobotTransformation());
+ }
+
+ estimate->camera_calibration->turret_extrinsics.reset(
+ new TransformationMatrixT());
+ estimate->camera_calibration->turret_extrinsics->data =
+ MatrixToVector(CameraTurretTransformation());
+
+ auto builder = target_sender_.MakeBuilder();
+ builder.CheckOk(
+ builder.Send(TargetEstimate::Pack(*builder.fbb(), estimate.get())));
+ });
+ camera_test_event_loop_->OnRun([timer, this]() {
+ timer->Setup(camera_test_event_loop_->monotonic_now(),
+ std::chrono::milliseconds(50));
+ });
+ }
+
+ localizer_control_send_timer_ =
+ roborio_test_event_loop_->AddTimer([this]() {
+ auto builder = control_sender_.MakeBuilder();
+ auto control_builder = builder.MakeBuilder<LocalizerControl>();
+ control_builder.add_x(localizer_control_x_);
+ control_builder.add_y(localizer_control_y_);
+ control_builder.add_theta(localizer_control_theta_);
+ control_builder.add_theta_uncertainty(0.01);
+ control_builder.add_keep_current_theta(false);
+ builder.CheckOk(builder.Send(control_builder.Finish()));
+ });
+
// Get things zeroed.
event_loop_factory_.RunFor(std::chrono::seconds(10));
CHECK(status_fetcher_.Fetch());
CHECK(status_fetcher_->zeroed());
+
+ if (!FLAGS_output_folder.empty()) {
+ logger_event_loop_ =
+ event_loop_factory_.MakeEventLoop("logger", imu_node_);
+ logger_ = std::make_unique<aos::logger::Logger>(logger_event_loop_.get());
+ logger_->StartLoggingOnRun(FLAGS_output_folder);
+ }
+ }
+
+ void SendLocalizerControl(double x, double y, double theta) {
+ localizer_control_x_ = x;
+ localizer_control_y_ = y;
+ localizer_control_theta_ = theta;
+ localizer_control_send_timer_->Setup(
+ roborio_test_event_loop_->monotonic_now());
+ }
+ ::testing::AssertionResult IsNear(double expected, double actual,
+ double epsilon) {
+ if (std::abs(expected - actual) < epsilon) {
+ return ::testing::AssertionSuccess();
+ } else {
+ return ::testing::AssertionFailure()
+ << "Expected " << expected << " but got " << actual
+ << " with a max difference of " << epsilon
+ << " and an actual difference of " << std::abs(expected - actual);
+ }
+ }
+ ::testing::AssertionResult VerifyEstimatorAccurate(double eps) {
+ const Eigen::Matrix<double, 5, 1> true_state = drivetrain_plant_.state();
+ ::testing::AssertionResult result(true);
+ status_fetcher_.Fetch();
+ if (!(result = IsNear(status_fetcher_->model_based()->x(), true_state(0),
+ eps))) {
+ return result;
+ }
+ if (!(result = IsNear(status_fetcher_->model_based()->y(), true_state(1),
+ eps))) {
+ return result;
+ }
+ if (!(result = IsNear(status_fetcher_->model_based()->theta(),
+ true_state(2), eps))) {
+ return result;
+ }
+ return result;
}
aos::FlatbufferDetachedBuffer<aos::Configuration> configuration_;
aos::SimulatedEventLoopFactory event_loop_factory_;
const aos::Node *const roborio_node_;
const aos::Node *const imu_node_;
+ const aos::Node *const camera_node_;
const control_loops::drivetrain::DrivetrainConfig<double> dt_config_;
std::unique_ptr<aos::EventLoop> localizer_event_loop_;
EventLoopLocalizer localizer_;
@@ -349,13 +547,30 @@
std::unique_ptr<aos::EventLoop> roborio_test_event_loop_;
std::unique_ptr<aos::EventLoop> imu_test_event_loop_;
+ std::unique_ptr<aos::EventLoop> camera_test_event_loop_;
std::unique_ptr<aos::EventLoop> logger_test_event_loop_;
aos::Sender<Output> output_sender_;
+ aos::Sender<y2022::control_loops::superstructure::Status> turret_sender_;
+ aos::Sender<y2022::vision::TargetEstimate> target_sender_;
+ aos::Sender<LocalizerControl> control_sender_;
aos::Fetcher<LocalizerOutput> output_fetcher_;
aos::Fetcher<LocalizerStatus> status_fetcher_;
Eigen::Vector2d output_voltages_ = Eigen::Vector2d::Zero();
+
+ aos::TimerHandler *localizer_control_send_timer_;
+
+ bool send_targets_ = false;
+ double turret_position_ = 0.0;
+ double turret_velocity_ = 0.0;
+
+ double localizer_control_x_ = 0.0;
+ double localizer_control_y_ = 0.0;
+ double localizer_control_theta_ = 0.0;
+
+ std::unique_ptr<aos::EventLoop> logger_event_loop_;
+ std::unique_ptr<aos::logger::Logger> logger_;
};
TEST_F(EventLoopLocalizerTest, Nominal) {
@@ -525,4 +740,79 @@
status_fetcher_->model_based()->y(), 1e-6);
}
+// Tests that image corrections in the nominal case (no errors) causes no
+// issues.
+TEST_F(EventLoopLocalizerTest, NominalImageCorrections) {
+ output_voltages_ << 3.0, 2.0;
+ drivetrain_plant_.set_accel_sin_magnitude(0.01);
+ send_targets_ = true;
+
+ event_loop_factory_.RunFor(std::chrono::seconds(4));
+ CHECK(status_fetcher_.Fetch());
+ ASSERT_TRUE(status_fetcher_->model_based()->using_model());
+ EXPECT_TRUE(VerifyEstimatorAccurate(1e-1));
+ ASSERT_TRUE(status_fetcher_->model_based()->has_statistics());
+ ASSERT_LT(10,
+ status_fetcher_->model_based()->statistics()->total_candidates());
+ ASSERT_EQ(status_fetcher_->model_based()->statistics()->total_candidates(),
+ status_fetcher_->model_based()->statistics()->total_accepted());
+}
+
+// Tests that image corrections when there is an error at the start results
+// in us actually getting corrected over time.
+TEST_F(EventLoopLocalizerTest, ImageCorrections) {
+ output_voltages_ << 0.0, 0.0;
+ drivetrain_plant_.mutable_state()->x() = 2.0;
+ drivetrain_plant_.mutable_state()->y() = 2.0;
+ SendLocalizerControl(5.0, 3.0, 0.0);
+ event_loop_factory_.RunFor(std::chrono::seconds(4));
+ CHECK(output_fetcher_.Fetch());
+ ASSERT_NEAR(5.0, output_fetcher_->x(), 1e-5);
+ ASSERT_NEAR(3.0, output_fetcher_->y(), 1e-5);
+ ASSERT_NEAR(0.0, output_fetcher_->theta(), 1e-5);
+
+ send_targets_ = true;
+
+ event_loop_factory_.RunFor(std::chrono::seconds(4));
+ CHECK(status_fetcher_.Fetch());
+ ASSERT_TRUE(status_fetcher_->model_based()->using_model());
+ EXPECT_TRUE(VerifyEstimatorAccurate(1e-1));
+ ASSERT_TRUE(status_fetcher_->model_based()->has_statistics());
+ ASSERT_LT(10,
+ status_fetcher_->model_based()->statistics()->total_candidates());
+ ASSERT_EQ(status_fetcher_->model_based()->statistics()->total_candidates(),
+ status_fetcher_->model_based()->statistics()->total_accepted());
+}
+
+// Tests that image corrections when we are in accel mode works.
+TEST_F(EventLoopLocalizerTest, ImageCorrectionsInAccel) {
+ output_voltages_ << 0.0, 0.0;
+ drivetrain_plant_.set_left_voltage_offset(200.0);
+ drivetrain_plant_.set_right_voltage_offset(200.0);
+ drivetrain_plant_.set_accel_sin_magnitude(0.01);
+ drivetrain_plant_.mutable_state()->x() = 2.0;
+ drivetrain_plant_.mutable_state()->y() = 2.0;
+ SendLocalizerControl(5.0, 3.0, 0.0);
+ event_loop_factory_.RunFor(std::chrono::seconds(1));
+ CHECK(output_fetcher_.Fetch());
+ CHECK(status_fetcher_.Fetch());
+ ASSERT_FALSE(status_fetcher_->model_based()->using_model());
+ EXPECT_FALSE(VerifyEstimatorAccurate(0.5));
+
+ send_targets_ = true;
+
+ event_loop_factory_.RunFor(std::chrono::seconds(4));
+ CHECK(status_fetcher_.Fetch());
+ ASSERT_FALSE(status_fetcher_->model_based()->using_model());
+ EXPECT_TRUE(VerifyEstimatorAccurate(0.5));
+ // y should be noticeably more accurate than x, since we are just driving
+ // straight.
+ ASSERT_NEAR(drivetrain_plant_.state()(1), status_fetcher_->model_based()->y(), 0.1);
+ ASSERT_TRUE(status_fetcher_->model_based()->has_statistics());
+ ASSERT_LT(10,
+ status_fetcher_->model_based()->statistics()->total_candidates());
+ ASSERT_EQ(status_fetcher_->model_based()->statistics()->total_candidates(),
+ status_fetcher_->model_based()->statistics()->total_accepted());
+}
+
} // namespace frc91::controls::testing