Add more functionality to year-agnostic localization utils
* Allow retrieving encoder values from the roborio.
* Move a utility for calculating some H matrices from y2019
into the year-agnostic code.
Change-Id: I2ef0e391fc615ccceeb2b0fc035233bb995d6e1e
Signed-off-by: James Kuszmaul <jabukuszmaul+collab@gmail.com>
diff --git a/frc971/control_loops/drivetrain/localization/BUILD b/frc971/control_loops/drivetrain/localization/BUILD
index 7b86f09..747b16c 100644
--- a/frc971/control_loops/drivetrain/localization/BUILD
+++ b/frc971/control_loops/drivetrain/localization/BUILD
@@ -9,7 +9,10 @@
deps = [
"//aos/events:event_loop",
"//aos/network:message_bridge_server_fbs",
+ "//frc971/control_loops:pose",
"//frc971/control_loops/drivetrain:drivetrain_output_fbs",
+ "//frc971/control_loops/drivetrain:drivetrain_position_fbs",
+ "//frc971/control_loops/drivetrain:hybrid_ekf",
"//frc971/input:joystick_state_fbs",
"//frc971/vision:calibration_fbs",
"@org_tuxfamily_eigen//:eigen",
diff --git a/frc971/control_loops/drivetrain/localization/utils.cc b/frc971/control_loops/drivetrain/localization/utils.cc
index 10ee445..fb46219 100644
--- a/frc971/control_loops/drivetrain/localization/utils.cc
+++ b/frc971/control_loops/drivetrain/localization/utils.cc
@@ -4,6 +4,7 @@
LocalizationUtils::LocalizationUtils(aos::EventLoop *event_loop)
: output_fetcher_(event_loop->MakeFetcher<Output>("/drivetrain")),
+ position_fetcher_(event_loop->TryMakeFetcher<Position>("/drivetrain")),
clock_offset_fetcher_(
event_loop->MakeFetcher<aos::message_bridge::ServerStatistics>(
"/aos")),
@@ -23,6 +24,20 @@
output_fetcher_->right_voltage()};
}
+std::optional<Eigen::Vector2d> LocalizationUtils::Encoders(
+ aos::monotonic_clock::time_point now) {
+ CHECK(position_fetcher_.valid());
+ position_fetcher_.Fetch();
+ const bool stale = (position_fetcher_.get() == nullptr) ||
+ (position_fetcher_.context().monotonic_event_time +
+ std::chrono::milliseconds(10) <
+ now);
+ return stale ? std::nullopt
+ : std::make_optional<Eigen::Vector2d>(
+ position_fetcher_->left_encoder(),
+ position_fetcher_->right_encoder());
+}
+
bool LocalizationUtils::MaybeInAutonomous() {
joystick_state_fetcher_.Fetch();
return (joystick_state_fetcher_.get() != nullptr)
diff --git a/frc971/control_loops/drivetrain/localization/utils.h b/frc971/control_loops/drivetrain/localization/utils.h
index cfd443a..e2985af 100644
--- a/frc971/control_loops/drivetrain/localization/utils.h
+++ b/frc971/control_loops/drivetrain/localization/utils.h
@@ -5,6 +5,9 @@
#include "aos/events/event_loop.h"
#include "aos/network/message_bridge_server_generated.h"
#include "frc971/control_loops/drivetrain/drivetrain_output_generated.h"
+#include "frc971/control_loops/drivetrain/drivetrain_position_generated.h"
+#include "frc971/control_loops/drivetrain/hybrid_ekf.h"
+#include "frc971/control_loops/pose.h"
#include "frc971/input/joystick_state_generated.h"
#include "frc971/vision/calibration_generated.h"
@@ -16,6 +19,7 @@
// has timed out.
// * Offsets between monotonic clocks on different devices.
// * Whether we are in autonomous mode.
+// * Drivetrain encoder voltages, as reported by the roborio.
class LocalizationUtils {
public:
LocalizationUtils(aos::EventLoop *event_loop);
@@ -26,6 +30,11 @@
// [left_voltage, right_voltage]
Eigen::Vector2d VoltageOrZero(aos::monotonic_clock::time_point now);
+ // Returns the latest drivetrain encoder values (in meters), or nullopt if no
+ // position message is available (or if the message is stale).
+ // Returns encoders as [left_position, right_position]
+ std::optional<Eigen::Vector2d> Encoders(aos::monotonic_clock::time_point now);
+
// Returns true if either there is no JoystickState message available or if
// we are currently in autonomous mode.
bool MaybeInAutonomous();
@@ -39,6 +48,7 @@
private:
aos::Fetcher<frc971::control_loops::drivetrain::Output> output_fetcher_;
+ aos::Fetcher<frc971::control_loops::drivetrain::Position> position_fetcher_;
aos::Fetcher<aos::message_bridge::ServerStatistics> clock_offset_fetcher_;
aos::Fetcher<aos::JoystickState> joystick_state_fetcher_;
};
@@ -47,6 +57,63 @@
Eigen::Matrix<double, 4, 4> FlatbufferToTransformationMatrix(
const frc971::vision::calibration::TransformationMatrix &flatbuffer);
+// This approximates the Jacobian of a vector of [heading, distance, skew]
+// of a target with respect to the full state of a drivetrain EKF.
+// Note that the only nonzero values in the returned matrix will be in the
+// columns corresponding to the X, Y, and Theta components of the state.
+// This is suitable for use as the H matrix in the kalman updates of the EKF,
+// although due to the approximation it should not be used to actually
+// calculate the expected measurement.
+// target_pose is the global pose of the target that we have identified.
+// camera_pose is the current estimate of the global pose of
+// the camera that can see the target.
+template <typename Scalar>
+Eigen::Matrix<double, 3, HybridEkf<Scalar>::kNStates>
+HMatrixForCameraHeadingDistanceSkew(const TypedPose<Scalar> &target_pose,
+ const TypedPose<Scalar> &camera_pose) {
+ // For all of the below calculations, we will assume to a first
+ // approximation that:
+ //
+ // dcamera_theta / dtheta ~= 1
+ // dcamera_x / dx ~= 1
+ // dcamera_y / dy ~= 1
+ //
+ // For cameras sufficiently far from the robot's origin, or if the robot were
+ // spinning extremely rapidly, this would not hold.
+
+ // To calculate dheading/d{x,y,theta}:
+ // heading = arctan2(target_pos - camera_pos) - camera_theta
+ Eigen::Matrix<Scalar, 3, 1> target_pos = target_pose.abs_pos();
+ Eigen::Matrix<Scalar, 3, 1> camera_pos = camera_pose.abs_pos();
+ Scalar diffx = target_pos.x() - camera_pos.x();
+ Scalar diffy = target_pos.y() - camera_pos.y();
+ Scalar norm2 = diffx * diffx + diffy * diffy;
+ Scalar dheadingdx = diffy / norm2;
+ Scalar dheadingdy = -diffx / norm2;
+ Scalar dheadingdtheta = -1.0;
+
+ // To calculate ddistance/d{x,y}:
+ // distance = sqrt(diffx^2 + diffy^2)
+ Scalar distance = ::std::sqrt(norm2);
+ Scalar ddistdx = -diffx / distance;
+ Scalar ddistdy = -diffy / distance;
+
+ // Skew = target.theta - camera.theta - heading
+ // = target.theta - arctan2(target_pos - camera_pos)
+ Scalar dskewdx = -dheadingdx;
+ Scalar dskewdy = -dheadingdy;
+ Eigen::Matrix<Scalar, 3, HybridEkf<Scalar>::kNStates> H;
+ H.setZero();
+ H(0, HybridEkf<Scalar>::kX) = dheadingdx;
+ H(0, HybridEkf<Scalar>::kY) = dheadingdy;
+ H(0, HybridEkf<Scalar>::kTheta) = dheadingdtheta;
+ H(1, HybridEkf<Scalar>::kX) = ddistdx;
+ H(1, HybridEkf<Scalar>::kY) = ddistdy;
+ H(2, HybridEkf<Scalar>::kX) = dskewdx;
+ H(2, HybridEkf<Scalar>::kY) = dskewdy;
+ return H;
+}
+
} // namespace frc971::control_loops::drivetrain
#endif // FRC971_CONTROL_LOOPS_DRIVETRAIN_LOCALIZATION_UTILS_H_
diff --git a/y2019/control_loops/drivetrain/BUILD b/y2019/control_loops/drivetrain/BUILD
index 65857c6..53cb777 100644
--- a/y2019/control_loops/drivetrain/BUILD
+++ b/y2019/control_loops/drivetrain/BUILD
@@ -133,6 +133,7 @@
"//frc971/control_loops:pose",
"//frc971/control_loops/drivetrain:camera",
"//frc971/control_loops/drivetrain:hybrid_ekf",
+ "//frc971/control_loops/drivetrain/localization:utils",
],
)
diff --git a/y2019/control_loops/drivetrain/localizer.h b/y2019/control_loops/drivetrain/localizer.h
index f4c7a8a..3494536 100644
--- a/y2019/control_loops/drivetrain/localizer.h
+++ b/y2019/control_loops/drivetrain/localizer.h
@@ -6,6 +6,7 @@
#include "frc971/control_loops/drivetrain/camera.h"
#include "frc971/control_loops/drivetrain/hybrid_ekf.h"
+#include "frc971/control_loops/drivetrain/localization/utils.h"
#include "frc971/control_loops/pose.h"
#if !defined(__clang__) && defined(__GNUC__)
@@ -437,37 +438,8 @@
Eigen::Matrix<Scalar, kNOutputs, kNStates> HMatrix(const Target &target,
const Pose &camera_pose) {
- // To calculate dheading/d{x,y,theta}:
- // heading = arctan2(target_pos - camera_pos) - camera_theta
- Eigen::Matrix<Scalar, 3, 1> target_pos = target.pose().abs_pos();
- Eigen::Matrix<Scalar, 3, 1> camera_pos = camera_pose.abs_pos();
- Scalar diffx = target_pos.x() - camera_pos.x();
- Scalar diffy = target_pos.y() - camera_pos.y();
- Scalar norm2 = diffx * diffx + diffy * diffy;
- Scalar dheadingdx = diffy / norm2;
- Scalar dheadingdy = -diffx / norm2;
- Scalar dheadingdtheta = -1.0;
-
- // To calculate ddistance/d{x,y}:
- // distance = sqrt(diffx^2 + diffy^2)
- Scalar distance = ::std::sqrt(norm2);
- Scalar ddistdx = -diffx / distance;
- Scalar ddistdy = -diffy / distance;
-
- // Skew = target.theta - camera.theta - heading
- // = target.theta - arctan2(target_pos - camera_pos)
- Scalar dskewdx = -dheadingdx;
- Scalar dskewdy = -dheadingdy;
- Eigen::Matrix<Scalar, kNOutputs, kNStates> H;
- H.setZero();
- H(0, 0) = dheadingdx;
- H(0, 1) = dheadingdy;
- H(0, 2) = dheadingdtheta;
- H(1, 0) = ddistdx;
- H(1, 1) = ddistdy;
- H(2, 0) = dskewdx;
- H(2, 1) = dskewdy;
- return H;
+ return frc971::control_loops::drivetrain::
+ HMatrixForCameraHeadingDistanceSkew(target.pose(), camera_pose);
}
// A helper function for the fuller version of MatchFrames; this just