Improve localizer update debugging
* Only send LocalizerDebug messages when something interesting happens.
* On debug field, draw both the implied robot position of the update as
well as the camera position.
* Indicate accepted vs. rejected images via red/green colors.
* Leave old corrections on the display for 2 seconds and gradually fade
them out.
* Add aggregate statistics about why we are rejecting image corrections
to the readouts table.
Change-Id: Ibc3397cb5654aacbc6cce4e5f3eb71f0371692cc
Signed-off-by: James Kuszmaul <jabukuszmaul@gmail.com>
diff --git a/y2020/control_loops/drivetrain/BUILD b/y2020/control_loops/drivetrain/BUILD
index edec5c2..b12cdc3 100644
--- a/y2020/control_loops/drivetrain/BUILD
+++ b/y2020/control_loops/drivetrain/BUILD
@@ -1,4 +1,4 @@
-load("@com_github_google_flatbuffers//:build_defs.bzl", "flatbuffer_cc_library")
+load("@com_github_google_flatbuffers//:build_defs.bzl", "flatbuffer_cc_library", "flatbuffer_ts_library")
load("//aos:config.bzl", "aos_config")
load("//tools/build_rules:select.bzl", "compiler_select", "cpu_select")
load("@npm_bazel_typescript//:defs.bzl", "ts_library")
@@ -64,6 +64,13 @@
visibility = ["//visibility:public"],
)
+flatbuffer_ts_library(
+ name = "localizer_debug_ts_fbs",
+ srcs = ["localizer_debug.fbs"],
+ target_compatible_with = ["@platforms//os:linux"],
+ visibility = ["//visibility:public"],
+)
+
cc_library(
name = "localizer",
srcs = ["localizer.cc"],
diff --git a/y2020/control_loops/drivetrain/localizer.cc b/y2020/control_loops/drivetrain/localizer.cc
index 937d8c4..650a0c5 100644
--- a/y2020/control_loops/drivetrain/localizer.cc
+++ b/y2020/control_loops/drivetrain/localizer.cc
@@ -2,6 +2,10 @@
#include "y2020/constants.h"
+DEFINE_bool(send_empty_debug, false,
+ "If true, send LocalizerDebug messages on every tick, even if "
+ "they would be empty.");
+
namespace y2020 {
namespace control_loops {
namespace drivetrain {
@@ -92,6 +96,7 @@
event_loop_
->MakeSender<y2020::control_loops::drivetrain::LocalizerDebug>(
"/drivetrain")) {
+ statistics_.rejection_counts.fill(0);
// TODO(james): The down estimator has trouble handling situations where the
// robot is constantly wiggling but not actually moving much, and can cause
// drift when using accelerometer readings.
@@ -176,11 +181,26 @@
}
}
}
- const auto vector_offset =
- builder.fbb()->CreateVector(debug_offsets.data(), debug_offsets.size());
- LocalizerDebug::Builder debug_builder = builder.MakeBuilder<LocalizerDebug>();
- debug_builder.add_matches(vector_offset);
- CHECK(builder.Send(debug_builder.Finish()));
+ if (FLAGS_send_empty_debug || !debug_offsets.empty()) {
+ const auto vector_offset =
+ builder.fbb()->CreateVector(debug_offsets.data(), debug_offsets.size());
+ const auto rejections_offset =
+ builder.fbb()->CreateVector(statistics_.rejection_counts.data(),
+ statistics_.rejection_counts.size());
+
+ CumulativeStatistics::Builder stats_builder =
+ builder.MakeBuilder<CumulativeStatistics>();
+ stats_builder.add_total_accepted(statistics_.total_accepted);
+ stats_builder.add_total_candidates(statistics_.total_candidates);
+ stats_builder.add_rejection_reason_count(rejections_offset);
+ const auto stats_offset = stats_builder.Finish();
+
+ LocalizerDebug::Builder debug_builder =
+ builder.MakeBuilder<LocalizerDebug>();
+ debug_builder.add_matches(vector_offset);
+ debug_builder.add_statistics(stats_offset);
+ CHECK(builder.Send(debug_builder.Finish()));
+ }
}
aos::SizedArray<flatbuffers::Offset<ImageMatchDebug>, 5>
@@ -209,14 +229,10 @@
<< aos::time::DurationInSeconds(monotonic_offset)
<< " when at time of " << now << " and capture time estimate of "
<< capture_time;
+ std::optional<RejectionReason> rejection_reason;
if (capture_time > now) {
LOG(WARNING) << "Got camera frame from the future.";
- ImageMatchDebug::Builder builder(*fbb);
- builder.add_camera(camera_index);
- builder.add_accepted(false);
- builder.add_rejection_reason(RejectionReason::IMAGE_FROM_FUTURE);
- debug_offsets.push_back(builder.Finish());
- return debug_offsets;
+ rejection_reason = RejectionReason::IMAGE_FROM_FUTURE;
}
if (!result.has_camera_calibration()) {
LOG(WARNING) << "Got camera frame without calibration data.";
@@ -225,6 +241,8 @@
builder.add_accepted(false);
builder.add_rejection_reason(RejectionReason::NO_CALIBRATION);
debug_offsets.push_back(builder.Finish());
+ statistics_.rejection_counts[static_cast<size_t>(
+ RejectionReason::NO_CALIBRATION)]++;
return debug_offsets;
}
// Per the ImageMatchResult specification, we can actually determine whether
@@ -238,12 +256,7 @@
// seems reasonable, but may be unnecessarily low or high.
constexpr float kMaxTurretVelocity = 1.0;
if (is_turret && std::abs(turret_data.velocity) > kMaxTurretVelocity) {
- ImageMatchDebug::Builder builder(*fbb);
- builder.add_camera(camera_index);
- builder.add_accepted(false);
- builder.add_rejection_reason(RejectionReason::TURRET_TOO_FAST);
- debug_offsets.push_back(builder.Finish());
- return debug_offsets;
+ rejection_reason = RejectionReason::TURRET_TOO_FAST;
}
CHECK(result.camera_calibration()->has_fixed_extrinsics());
const Eigen::Matrix<float, 4, 4> fixed_extrinsics =
@@ -266,12 +279,15 @@
builder.add_accepted(false);
builder.add_rejection_reason(RejectionReason::NO_RESULTS);
debug_offsets.push_back(builder.Finish());
+ statistics_
+ .rejection_counts[static_cast<size_t>(RejectionReason::NO_RESULTS)]++;
return debug_offsets;
}
int index = -1;
for (const frc971::vision::sift::CameraPose *vision_result :
*result.camera_poses()) {
+ ++statistics_.total_candidates;
++index;
ImageMatchDebug::Builder builder(*fbb);
@@ -286,6 +302,8 @@
!vision_result->has_field_to_target()) {
builder.add_accepted(false);
builder.add_rejection_reason(RejectionReason::NO_TRANSFORMS);
+ statistics_.rejection_counts[static_cast<size_t>(
+ RejectionReason::NO_TRANSFORMS)]++;
debug_offsets.push_back(builder.Finish());
continue;
}
@@ -294,11 +312,21 @@
const Eigen::Matrix<float, 4, 4> H_field_target =
FlatbufferToTransformationMatrix(*vision_result->field_to_target());
+ const Eigen::Matrix<float, 4, 4> H_field_camera =
+ H_field_target * H_camera_target.inverse();
// Back out the robot position that is implied by the current camera
// reading. Note that the Pose object ignores any roll/pitch components, so
// if the camera's extrinsics for pitch/roll are off, this should just
// ignore it.
- const Pose measured_camera_pose(H_field_target * H_camera_target.inverse());
+ const Pose measured_camera_pose(H_field_camera);
+ builder.add_camera_x(measured_camera_pose.rel_pos().x());
+ builder.add_camera_y(measured_camera_pose.rel_pos().y());
+ // Because the camera uses Z as forwards rather than X, just calculate the
+ // debugging theta value using the transformation matrix directly (note that
+ // the rest of this file deliberately does not care what convention the
+ // camera uses, since that is encoded in the extrinsics themselves).
+ builder.add_camera_theta(
+ std::atan2(H_field_camera(1, 2), H_field_camera(0, 2)));
// Calculate the camera-to-robot transformation matrix ignoring the
// pitch/roll of the camera.
// TODO(james): This could probably be made a bit more efficient, but I
@@ -313,12 +341,30 @@
const Eigen::Matrix<float, 3, 1> Z(measured_pose.rel_pos().x(),
measured_pose.rel_pos().y(),
measured_pose.rel_theta());
+ builder.add_implied_robot_x(Z(0));
+ builder.add_implied_robot_y(Z(1));
+ builder.add_implied_robot_theta(Z(2));
// Pose of the target in the robot frame.
// Note that we use measured_pose's transformation matrix rather than just
// doing H_robot_camera * H_camera_target because measured_pose ignores
// pitch/roll.
Pose pose_robot_target(measured_pose.AsTransformationMatrix().inverse() *
H_field_target);
+
+ // Turret is zero when pointed backwards.
+ builder.add_implied_turret_goal(
+ aos::math::NormalizeAngle(M_PI + pose_robot_target.heading()));
+
+ // Since we've now built up all the information that is useful to include in
+ // the debug message, bail if we have reason to do so.
+ if (rejection_reason) {
+ builder.add_rejection_reason(*rejection_reason);
+ statistics_.rejection_counts[static_cast<size_t>(*rejection_reason)]++;
+ builder.add_accepted(false);
+ debug_offsets.push_back(builder.Finish());
+ continue;
+ }
+
// TODO(james): Figure out how to properly handle calculating the
// noise. Currently, the values are deliberately tuned so that image updates
// will not be trusted overly much. In theory, we should probably also be
@@ -357,6 +403,8 @@
AOS_LOG(WARNING, "Dropped image match due to heading mismatch.\n");
builder.add_accepted(false);
builder.add_rejection_reason(RejectionReason::HIGH_THETA_DIFFERENCE);
+ statistics_.rejection_counts[static_cast<size_t>(
+ RejectionReason::HIGH_THETA_DIFFERENCE)]++;
debug_offsets.push_back(builder.Finish());
continue;
}
@@ -371,18 +419,12 @@
AOS_LOG(WARNING, "Dropped image match due to age of image.\n");
builder.add_accepted(false);
builder.add_rejection_reason(RejectionReason::IMAGE_TOO_OLD);
+ statistics_.rejection_counts[static_cast<size_t>(
+ RejectionReason::IMAGE_TOO_OLD)]++;
debug_offsets.push_back(builder.Finish());
continue;
}
- builder.add_implied_robot_x(Z(0));
- builder.add_implied_robot_y(Z(1));
- builder.add_implied_robot_theta(Z(2));
-
- // Turret is zero when pointed backwards.
- builder.add_implied_turret_goal(
- aos::math::NormalizeAngle(M_PI + pose_robot_target.heading()));
-
std::optional<RejectionReason> correction_rejection;
const Input U = ekf_.MostRecentInput();
// For the correction step, instead of passing in the measurement directly,
@@ -436,8 +478,11 @@
if (correction_rejection) {
builder.add_accepted(false);
builder.add_rejection_reason(*correction_rejection);
+ statistics_
+ .rejection_counts[static_cast<size_t>(*correction_rejection)]++;
} else {
builder.add_accepted(true);
+ statistics_.total_accepted++;
}
debug_offsets.push_back(builder.Finish());
}
diff --git a/y2020/control_loops/drivetrain/localizer.h b/y2020/control_loops/drivetrain/localizer.h
index 19ee4ad..5458797 100644
--- a/y2020/control_loops/drivetrain/localizer.h
+++ b/y2020/control_loops/drivetrain/localizer.h
@@ -78,6 +78,23 @@
double velocity = 0.0; // rad/sec
};
+ static constexpr size_t kNumRejectionReasons =
+ static_cast<int>(RejectionReason::MAX) -
+ static_cast<int>(RejectionReason::MIN) + 1;
+
+ struct Statistics {
+ int total_accepted = 0;
+ int total_candidates = 0;
+ static_assert(0 == static_cast<int>(RejectionReason::MIN));
+ static_assert(
+ kNumRejectionReasons ==
+ sizeof(
+ std::invoke_result<decltype(EnumValuesRejectionReason)>::type) /
+ sizeof(RejectionReason),
+ "RejectionReason has non-contiguous error values.");
+ std::array<int, kNumRejectionReasons> rejection_counts;
+ };
+
// Processes new image data from the given pi and updates the EKF.
aos::SizedArray<flatbuffers::Offset<ImageMatchDebug>, 5> HandleImageMatch(
size_t camera_index, std::string_view pi,
@@ -111,6 +128,8 @@
// Target selector to allow us to satisfy the LocalizerInterface requirements.
frc971::control_loops::drivetrain::TrivialTargetSelector target_selector_;
+
+ Statistics statistics_;
};
} // namespace drivetrain
diff --git a/y2020/control_loops/drivetrain/localizer_debug.fbs b/y2020/control_loops/drivetrain/localizer_debug.fbs
index 89aea68..e94d251 100644
--- a/y2020/control_loops/drivetrain/localizer_debug.fbs
+++ b/y2020/control_loops/drivetrain/localizer_debug.fbs
@@ -17,6 +17,9 @@
pose_index: uint8 (id: 1);
local_image_capture_time_ns:long (id: 2);
roborio_image_capture_time_ns:long (id: 3);
+ camera_x:float (id: 11);
+ camera_y:float (id: 12);
+ camera_theta:float (id: 13);
implied_robot_x:float (id: 4);
implied_robot_y:float (id: 5);
implied_robot_theta:float (id: 6);
@@ -27,8 +30,16 @@
image_age_sec:float (id: 10);
}
+table CumulativeStatistics {
+ total_accepted:int (id: 0);
+ total_candidates:int (id: 1);
+ // Indexed by integer value of RejectionReason enum.
+ rejection_reason_count:[int] (id: 2);
+}
+
table LocalizerDebug {
matches:[ImageMatchDebug] (id: 0);
+ statistics:CumulativeStatistics (id: 1);
}
root_type LocalizerDebug;
diff --git a/y2020/www/BUILD b/y2020/www/BUILD
index be3bfac..cdfd9d7 100644
--- a/y2020/www/BUILD
+++ b/y2020/www/BUILD
@@ -35,6 +35,7 @@
"//aos/network:web_proxy_ts_fbs",
"//aos/network/www:proxy",
"//frc971/control_loops/drivetrain:drivetrain_status_ts_fbs",
+ "//y2020/control_loops/drivetrain:localizer_debug_ts_fbs",
"//y2020/control_loops/superstructure:superstructure_status_ts_fbs",
"//y2020/vision/sift:sift_ts_fbs",
"@com_github_google_flatbuffers//ts:flatbuffers_ts",
diff --git a/y2020/www/field.html b/y2020/www/field.html
index 14ebfb0..5b486ea 100644
--- a/y2020/www/field.html
+++ b/y2020/www/field.html
@@ -50,6 +50,14 @@
<div> Intake </div>
<div id="intake"> NA </div>
</div>
+ <div>
+ <div> Images Accepted </div>
+ <div id="images_accepted"> NA </div>
+ </div>
+ <div>
+ <div> Images Rejected </div>
+ <div id="images_rejected"> NA </div>
+ </div>
</div>
</body>
</html>
diff --git a/y2020/www/field_handler.ts b/y2020/www/field_handler.ts
index 1cba7eb..dec4b34 100644
--- a/y2020/www/field_handler.ts
+++ b/y2020/www/field_handler.ts
@@ -3,11 +3,15 @@
import * as flatbuffers_builder from 'org_frc971/external/com_github_google_flatbuffers/ts/builder';
import {ByteBuffer} from 'org_frc971/external/com_github_google_flatbuffers/ts/byte-buffer';
import * as drivetrain from 'org_frc971/frc971/control_loops/drivetrain/drivetrain_status_generated';
+import * as localizer from 'org_frc971/y2020/control_loops/drivetrain/localizer_debug_generated';
import * as sift from 'org_frc971/y2020/vision/sift/sift_generated';
import * as web_proxy from 'org_frc971/aos/network/web_proxy_generated';
import * as ss from 'org_frc971/y2020/control_loops/superstructure/superstructure_status_generated'
import DrivetrainStatus = drivetrain.frc971.control_loops.drivetrain.Status;
+import LocalizerDebug = localizer.y2020.control_loops.drivetrain.LocalizerDebug;
+import RejectionReason = localizer.y2020.control_loops.drivetrain.RejectionReason;
+import ImageMatchDebug = localizer.y2020.control_loops.drivetrain.ImageMatchDebug;
import SuperstructureStatus = ss.y2020.control_loops.superstructure.Status;
import ImageMatchResult = sift.frc971.vision.sift.ImageMatchResult;
import Channel = configuration.aos.Channel;
@@ -67,6 +71,9 @@
private imageMatchResult = new Map<string, ImageMatchResult>();
private drivetrainStatus: DrivetrainStatus|null = null;
private superstructureStatus: SuperstructureStatus|null = null;
+ // Image information indexed by timestamp (seconds since the epoch), so that
+ // we can stop displaying images after a certain amount of time.
+ private localizerImageMatches = new Map<number, LocalizerDebug>();
private x: HTMLDivElement = (document.getElementById('x') as HTMLDivElement);
private y: HTMLDivElement = (document.getElementById('y') as HTMLDivElement);
private theta: HTMLDivElement = (document.getElementById('theta') as HTMLDivElement);
@@ -78,10 +85,30 @@
private hood: HTMLDivElement = (document.getElementById('hood') as HTMLDivElement);
private turret: HTMLDivElement = (document.getElementById('turret') as HTMLDivElement);
private intake: HTMLDivElement = (document.getElementById('intake') as HTMLDivElement);
+ private imagesAcceptedCounter: HTMLDivElement = (document.getElementById('images_accepted') as HTMLDivElement);
+ private imagesRejectedCounter: HTMLDivElement = (document.getElementById('images_rejected') as HTMLDivElement);
+ private rejectionReasonCells: HTMLDivElement[] = [];
constructor(private readonly connection: Connection) {
(document.getElementById('field') as HTMLElement).appendChild(this.canvas);
+ for (const value in RejectionReason) {
+ // Typescript generates an iterator that produces both numbers and
+ // strings... don't do anything on the string iterations.
+ if (isNaN(Number(value))) {
+ continue;
+ }
+ const row = document.createElement("div");
+ const nameCell = document.createElement("div");
+ nameCell.innerHTML = RejectionReason[value];
+ row.appendChild(nameCell);
+ const valueCell = document.createElement("div");
+ valueCell.innerHTML = "NA";
+ this.rejectionReasonCells.push(valueCell);
+ row.appendChild(valueCell);
+ document.getElementById('readouts').appendChild(row);
+ }
+
this.connection.addConfigHandler(() => {
// Go through and register handlers for both all the individual pis as
// well as the local pi. Depending on the node that we are running on,
@@ -93,6 +120,10 @@
});
}
this.connection.addHandler(
+ '/drivetrain', LocalizerDebug.getFullyQualifiedName(), (data) => {
+ this.handleLocalizerDebug(data);
+ });
+ this.connection.addHandler(
'/drivetrain', DrivetrainStatus.getFullyQualifiedName(), (data) => {
this.handleDrivetrainStatus(data);
});
@@ -112,6 +143,41 @@
fbBuffer as unknown as flatbuffers.ByteBuffer));
}
+ private handleLocalizerDebug(data: Uint8Array): void {
+ const now = Date.now() / 1000.0;
+
+ const fbBuffer = new ByteBuffer(data);
+ this.localizerImageMatches.set(
+ now,
+ LocalizerDebug.getRootAsLocalizerDebug(
+ fbBuffer as unknown as flatbuffers.ByteBuffer));
+
+ const debug = this.localizerImageMatches.get(now);
+
+ if (debug.statistics()) {
+ this.imagesAcceptedCounter.innerHTML =
+ debug.statistics().totalAccepted().toString();
+ this.imagesRejectedCounter.innerHTML =
+ (debug.statistics().totalCandidates() -
+ debug.statistics().totalAccepted())
+ .toString();
+ if (debug.statistics().rejectionReasonCountLength() ==
+ this.rejectionReasonCells.length) {
+ for (let ii = 0; ii < debug.statistics().rejectionReasonCountLength();
+ ++ii) {
+ this.rejectionReasonCells[ii].innerHTML =
+ debug.statistics().rejectionReasonCount(ii).toString();
+ }
+ } else {
+ console.error("Unexpected number of rejection reasons in counter.");
+ }
+ this.imagesRejectedCounter.innerHTML =
+ (debug.statistics().totalCandidates() -
+ debug.statistics().totalAccepted())
+ .toString();
+ }
+ }
+
private handleDrivetrainStatus(data: Uint8Array): void {
const fbBuffer = new ByteBuffer(data);
this.drivetrainStatus = DrivetrainStatus.getRootAsStatus(
@@ -194,16 +260,21 @@
ctx.stroke();
}
- drawCamera(x: number, y: number, theta: number): void {
+ drawCamera(
+ x: number, y: number, theta: number, color: string = 'blue',
+ extendLines: boolean = true): void {
const ctx = this.canvas.getContext('2d');
ctx.save();
ctx.translate(x, y);
ctx.rotate(theta);
+ ctx.strokeStyle = color;
ctx.beginPath();
ctx.moveTo(0.5, 0.5);
ctx.lineTo(0, 0);
- ctx.lineTo(100.0, 0);
- ctx.lineTo(0, 0);
+ if (extendLines) {
+ ctx.lineTo(100.0, 0);
+ ctx.lineTo(0, 0);
+ }
ctx.lineTo(0.5, -0.5);
ctx.stroke();
ctx.beginPath();
@@ -212,13 +283,34 @@
ctx.restore();
}
- drawRobot(x: number, y: number, theta: number, turret: number|null): void {
+ drawRobot(
+ x: number, y: number, theta: number, turret: number|null,
+ color: string = 'blue', dashed: boolean = false,
+ extendLines: boolean = true): void {
const ctx = this.canvas.getContext('2d');
ctx.save();
ctx.translate(x, y);
ctx.rotate(theta);
+ ctx.strokeStyle = color;
+ if (dashed) {
+ ctx.setLineDash([0.05, 0.05]);
+ } else {
+ // Empty array = solid line.
+ ctx.setLineDash([]);
+ }
ctx.rect(-ROBOT_LENGTH / 2, -ROBOT_WIDTH / 2, ROBOT_LENGTH, ROBOT_WIDTH);
ctx.stroke();
+
+ // Draw line indicating which direction is forwards on the robot.
+ ctx.beginPath();
+ ctx.moveTo(0, 0);
+ if (extendLines) {
+ ctx.lineTo(1000.0, 0);
+ } else {
+ ctx.lineTo(ROBOT_LENGTH / 2.0, 0);
+ }
+ ctx.stroke();
+
if (turret) {
ctx.save();
ctx.rotate(turret + Math.PI);
@@ -231,14 +323,14 @@
// Draw line in circle to show forwards.
ctx.beginPath();
ctx.moveTo(0, 0);
- ctx.lineTo(1000.0 * turretRadius, 0);
+ if (extendLines) {
+ ctx.lineTo(1000.0, 0);
+ } else {
+ ctx.lineTo(turretRadius, 0);
+ }
ctx.stroke();
ctx.restore();
}
- ctx.beginPath();
- ctx.moveTo(0, 0);
- ctx.lineTo(100.0 * ROBOT_LENGTH / 2, 0);
- ctx.stroke();
ctx.restore();
}
@@ -261,7 +353,38 @@
draw(): void {
this.reset();
this.drawField();
- // draw cameras
+
+ // Draw the matches with debugging information from the localizer.
+ const now = Date.now() / 1000.0;
+ for (const [time, value] of this.localizerImageMatches) {
+ const age = now - time;
+ const kRemovalAge = 2.0;
+ if (age > kRemovalAge) {
+ this.localizerImageMatches.delete(time);
+ continue;
+ }
+ const ageAlpha = (kRemovalAge - age) / kRemovalAge
+ for (let i = 0; i < value.matchesLength(); i++) {
+ const imageDebug = value.matches(i);
+ const x = imageDebug.impliedRobotX();
+ const y = imageDebug.impliedRobotY();
+ const theta = imageDebug.impliedRobotTheta();
+ const cameraX = imageDebug.cameraX();
+ const cameraY = imageDebug.cameraY();
+ const cameraTheta = imageDebug.cameraTheta();
+ const accepted = imageDebug.accepted();
+ // Make camera readings fade over time.
+ const alpha = Math.round(255 * ageAlpha).toString(16).padStart(2, '0');
+ const dashed = false;
+ const rgb = accepted ? "#00FF00" : "#FF0000";
+ const rgba = rgb + alpha;
+ this.drawRobot(x, y, theta, null, rgba, dashed, false);
+ this.drawCamera(cameraX, cameraY, cameraTheta, rgba, false);
+ }
+ }
+
+ // draw cameras from ImageMatchResults directly (helpful when viewing page
+ // on the pis individually).
for (const keyPair of this.imageMatchResult) {
const value = keyPair[1];
for (let i = 0; i < value.cameraPosesLength(); i++) {
@@ -294,35 +417,53 @@
this.setValue(this.theta, this.drivetrainStatus.theta());
}
- this.shotDistance.innerHTML = this.superstructureStatus.aimer().shotDistance().toFixed(2);
- this.finisher.innerHTML = this.superstructureStatus.shooter().finisher().angularVelocity().toFixed(2);
- this.leftAccelerator.innerHTML = this.superstructureStatus.shooter().acceleratorLeft().angularVelocity().toFixed(2);
- this.rightAccelerator.innerHTML = this.superstructureStatus.shooter().acceleratorRight().angularVelocity().toFixed(2);
- if (this.superstructureStatus.aimer().aimingForInnerPort()) {
- this.innerPort.innerHTML = "true";
- } else {
- this.innerPort.innerHTML = "false";
- }
- if (!this.superstructureStatus.hood().zeroed()) {
- this.setZeroing(this.hood);
- } else if (this.superstructureStatus.hood().estopped()) {
- this.setEstopped(this.hood);
- } else {
- this.setValue(this.hood, this.superstructureStatus.hood().estimatorState().position());
- }
- if (!this.superstructureStatus.turret().zeroed()) {
- this.setZeroing(this.turret);
- } else if (this.superstructureStatus.turret().estopped()) {
- this.setEstopped(this.turret);
- } else {
- this.setValue(this.turret, this.superstructureStatus.turret().estimatorState().position());
- }
- if (!this.superstructureStatus.intake().zeroed()) {
- this.setZeroing(this.intake);
- } else if (this.superstructureStatus.intake().estopped()) {
- this.setEstopped(this.intake);
- } else {
- this.setValue(this.intake, this.superstructureStatus.intake().estimatorState().position());
+ if (this.superstructureStatus) {
+ this.shotDistance.innerHTML =
+ this.superstructureStatus.aimer().shotDistance().toFixed(2);
+ this.finisher.innerHTML = this.superstructureStatus.shooter()
+ .finisher()
+ .angularVelocity()
+ .toFixed(2);
+ this.leftAccelerator.innerHTML = this.superstructureStatus.shooter()
+ .acceleratorLeft()
+ .angularVelocity()
+ .toFixed(2);
+ this.rightAccelerator.innerHTML = this.superstructureStatus.shooter()
+ .acceleratorRight()
+ .angularVelocity()
+ .toFixed(2);
+ if (this.superstructureStatus.aimer().aimingForInnerPort()) {
+ this.innerPort.innerHTML = 'true';
+ } else {
+ this.innerPort.innerHTML = 'false';
+ }
+ if (!this.superstructureStatus.hood().zeroed()) {
+ this.setZeroing(this.hood);
+ } else if (this.superstructureStatus.hood().estopped()) {
+ this.setEstopped(this.hood);
+ } else {
+ this.setValue(
+ this.hood,
+ this.superstructureStatus.hood().estimatorState().position());
+ }
+ if (!this.superstructureStatus.turret().zeroed()) {
+ this.setZeroing(this.turret);
+ } else if (this.superstructureStatus.turret().estopped()) {
+ this.setEstopped(this.turret);
+ } else {
+ this.setValue(
+ this.turret,
+ this.superstructureStatus.turret().estimatorState().position());
+ }
+ if (!this.superstructureStatus.intake().zeroed()) {
+ this.setZeroing(this.intake);
+ } else if (this.superstructureStatus.intake().estopped()) {
+ this.setEstopped(this.intake);
+ } else {
+ this.setValue(
+ this.intake,
+ this.superstructureStatus.intake().estimatorState().position());
+ }
}
this.drawRobot(
this.drivetrainStatus.x(), this.drivetrainStatus.y(),