Update field visualization for 2023
Kept things pretty rudimentary so far, but this keeps the core
functionality that we had in 2022 for understanding the localizer.
Change-Id: I566d6ebc2dead414b11e92d41fde78dd70f34e92
Signed-off-by: James Kuszmaul <jabukuszmaul@gmail.com>
diff --git a/aos/util/error_counter.h b/aos/util/error_counter.h
index 9fbe242..3e69a71 100644
--- a/aos/util/error_counter.h
+++ b/aos/util/error_counter.h
@@ -80,7 +80,7 @@
ArrayErrorCounter() { ResetCounts(); }
flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<Count>>>
- PopulateCounts(flatbuffers::FlatBufferBuilder *fbb) {
+ PopulateCounts(flatbuffers::FlatBufferBuilder *fbb) const {
const flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<Count>>>
offset = ErrorCounter<Error, Count>::Initialize(fbb);
flatbuffers::Vector<flatbuffers::Offset<Count>> *vector =
diff --git a/y2023/localizer/localizer.cc b/y2023/localizer/localizer.cc
index 87063ad..1d78528 100644
--- a/y2023/localizer/localizer.cc
+++ b/y2023/localizer/localizer.cc
@@ -158,8 +158,11 @@
}
auto vector_offset = builder.fbb()->CreateVector(
debug_offsets.data(), debug_offsets.size());
+ auto stats_offset =
+ StatisticsForCamera(cameras_.at(camera_index), builder.fbb());
Visualization::Builder visualize_builder(*builder.fbb());
visualize_builder.add_targets(vector_offset);
+ visualize_builder.add_statistics(stats_offset);
builder.CheckOk(builder.Send(visualize_builder.Finish()));
SendStatus();
});
@@ -392,19 +395,22 @@
return builder.Finish();
}
+flatbuffers::Offset<CumulativeStatistics> Localizer::StatisticsForCamera(
+ const CameraState &camera, flatbuffers::FlatBufferBuilder *fbb) {
+ const auto counts_offset = camera.rejection_counter.PopulateCounts(fbb);
+ CumulativeStatistics::Builder stats_builder(*fbb);
+ stats_builder.add_total_accepted(camera.total_accepted_targets);
+ stats_builder.add_total_candidates(camera.total_candidate_targets);
+ stats_builder.add_rejection_reasons(counts_offset);
+ return stats_builder.Finish();
+}
+
void Localizer::SendStatus() {
auto builder = status_sender_.MakeBuilder();
std::array<flatbuffers::Offset<CumulativeStatistics>, kNumCameras>
stats_offsets;
for (size_t ii = 0; ii < kNumCameras; ++ii) {
- const auto counts_offset =
- cameras_.at(ii).rejection_counter.PopulateCounts(builder.fbb());
- CumulativeStatistics::Builder stats_builder =
- builder.MakeBuilder<CumulativeStatistics>();
- stats_builder.add_total_accepted(cameras_.at(ii).total_accepted_targets);
- stats_builder.add_total_candidates(cameras_.at(ii).total_candidate_targets);
- stats_builder.add_rejection_reasons(counts_offset);
- stats_offsets.at(ii) = stats_builder.Finish();
+ stats_offsets.at(ii) = StatisticsForCamera(cameras_.at(ii), builder.fbb());
}
auto stats_offset =
builder.fbb()->CreateVector(stats_offsets.data(), stats_offsets.size());
diff --git a/y2023/localizer/localizer.h b/y2023/localizer/localizer.h
index b89d222..8c53467 100644
--- a/y2023/localizer/localizer.h
+++ b/y2023/localizer/localizer.h
@@ -88,6 +88,8 @@
flatbuffers::Offset<ImuStatus> PopulateImu(
flatbuffers::FlatBufferBuilder *fbb) const;
void SendStatus();
+ static flatbuffers::Offset<CumulativeStatistics> StatisticsForCamera(
+ const CameraState &camera, flatbuffers::FlatBufferBuilder *fbb);
aos::EventLoop *const event_loop_;
const frc971::control_loops::drivetrain::DrivetrainConfig<double> dt_config_;
diff --git a/y2023/www/2022.png b/y2023/www/2022.png
deleted file mode 100644
index 68087bd..0000000
--- a/y2023/www/2022.png
+++ /dev/null
Binary files differ
diff --git a/y2023/www/2023.png b/y2023/www/2023.png
new file mode 100644
index 0000000..d3bffd1
--- /dev/null
+++ b/y2023/www/2023.png
Binary files differ
diff --git a/y2023/www/BUILD b/y2023/www/BUILD
index 539fa6d..f8b706c 100644
--- a/y2023/www/BUILD
+++ b/y2023/www/BUILD
@@ -24,7 +24,9 @@
"//aos/network:web_proxy_ts_fbs",
"//aos/network/www:proxy",
"//frc971/control_loops/drivetrain:drivetrain_status_ts_fbs",
- "//y2023/control_loops/superstructure:superstructure_status_ts_fbs",
+ "//frc971/control_loops/drivetrain/localization:localizer_output_ts_fbs",
+ "//y2023/localizer:status_ts_fbs",
+ "//y2023/localizer:visualization_ts_fbs",
"@com_github_google_flatbuffers//ts:flatbuffers_ts",
],
)
diff --git a/y2023/www/field.html b/y2023/www/field.html
index f39c1a4..72d8f54 100644
--- a/y2023/www/field.html
+++ b/y2023/www/field.html
@@ -27,96 +27,12 @@
<table>
<tr>
- <th colspan="2">Aiming</th>
- </tr>
- <tr>
- <td>Shot distance</td>
- <td id="shot_distance"> NA </td>
- </tr>
- <tr>
- <td>Turret</td>
- <td id="turret"> NA </td>
- </tr>
- </table>
-
- <table>
- <tr>
- <th colspan="2">Catapult</th>
- </tr>
- <tr>
- <td>Fire</td>
- <td id="fire"> NA </td>
- </tr>
- <tr>
- <td>Solve Time</td>
- <td id="mpc_solve_time"> NA </td>
- </tr>
- <tr>
- <td>MPC Active</td>
- <td id="mpc_horizon"> NA </td>
- </tr>
- <tr>
- <td>Shot Count</td>
- <td id="shot_count"> NA </td>
- </tr>
- <tr>
- <td>Position</td>
- <td id="catapult"> NA </td>
- </tr>
- </table>
-
- <table>
- <tr>
- <th colspan="2">Superstructure</th>
- </tr>
- <tr>
- <td>State</td>
- <td id="superstructure_state"> NA </td>
- </tr>
- <tr>
- <td>Intake State</td>
- <td id="intake_state"> NA </td>
- </tr>
- <tr>
- <td>Reseating</td>
- <td id="reseating_in_catapult"> NA </td>
- </tr>
- <tr>
- <td>Flippers Open</td>
- <td id="flippers_open"> NA </td>
- </tr>
- <tr>
- <td>Climber</td>
- <td id="climber"> NA </td>
- </tr>
- </table>
-
- <table>
- <tr>
- <th colspan="2">Intakes</th>
- </tr>
- <tr>
- <td>Front Intake</td>
- <td id="front_intake"> NA </td>
- </tr>
- <tr>
- <td>Back Intake</td>
- <td id="back_intake"> NA </td>
- </tr>
- </table>
-
- <table>
- <tr>
<th colspan="2">Images</th>
</tr>
<tr>
<td> Images Accepted </td>
<td id="images_accepted"> NA </td>
</tr>
- <tr>
- <td> Images Rejected </td>
- <td id="images_rejected"> NA </td>
- </tr>
</table>
</div>
<div id="vision_readouts">
diff --git a/y2023/www/field_handler.ts b/y2023/www/field_handler.ts
index 67566ea..0f5a975 100644
--- a/y2023/www/field_handler.ts
+++ b/y2023/www/field_handler.ts
@@ -1,7 +1,9 @@
import {ByteBuffer} from 'flatbuffers';
import {Connection} from '../../aos/network/www/proxy';
-import {Status as SuperstructureStatus} from '../control_loops/superstructure/superstructure_status_generated'
+import {LocalizerOutput} from '../../frc971/control_loops/drivetrain/localization/localizer_output_generated';
+import {RejectionReason} from '../localizer/status_generated';
import {Status as DrivetrainStatus} from '../../frc971/control_loops/drivetrain/drivetrain_status_generated';
+import {Visualization, TargetEstimateDebug} from '../localizer/visualization_generated';
import {FIELD_LENGTH, FIELD_WIDTH, FT_TO_M, IN_TO_M} from './constants';
@@ -9,34 +11,50 @@
const FIELD_SIDE_Y = FIELD_WIDTH / 2;
const FIELD_EDGE_X = FIELD_LENGTH / 2;
-const ROBOT_WIDTH = 34 * IN_TO_M;
-const ROBOT_LENGTH = 36 * IN_TO_M;
+const ROBOT_WIDTH = 25 * IN_TO_M;
+const ROBOT_LENGTH = 32 * IN_TO_M;
const PI_COLORS = ['#ff00ff', '#ffff00', '#00ffff', '#ffa500'];
+const PIS = ['pi1', 'pi2', 'pi3', 'pi4'];
export class FieldHandler {
private canvas = document.createElement('canvas');
+ private localizerOutput: LocalizerOutput|null = null;
private drivetrainStatus: DrivetrainStatus|null = null;
- private superstructureStatus: SuperstructureStatus|null = null;
// Image information indexed by timestamp (seconds since the epoch), so that
// we can stop displaying images after a certain amount of time.
- private x: HTMLElement = (document.getElementById('x') as HTMLElement);
+ private localizerImageMatches = new Map<number, Visualization>();
+ private x: HTMLElement = (document.getElementById('x') as HTMLElement);
private y: HTMLElement = (document.getElementById('y') as HTMLElement);
private theta: HTMLElement =
(document.getElementById('theta') as HTMLElement);
- private superstructureState: HTMLElement =
- (document.getElementById('superstructure_state') as HTMLElement);
private imagesAcceptedCounter: HTMLElement =
(document.getElementById('images_accepted') as HTMLElement);
- private imagesRejectedCounter: HTMLElement =
- (document.getElementById('images_rejected') as HTMLElement);
+ private rejectionReasonCells: HTMLElement[] = [];
private fieldImage: HTMLImageElement = new Image();
constructor(private readonly connection: Connection) {
(document.getElementById('field') as HTMLElement).appendChild(this.canvas);
- this.fieldImage.src = "2022.png";
+ this.fieldImage.src = "2023.png";
+
+ for (const value in RejectionReason) {
+ // Typescript generates an iterator that produces both numbers and
+ // strings... don't do anything on the string iterations.
+ if (isNaN(Number(value))) {
+ continue;
+ }
+ const row = document.createElement('div');
+ const nameCell = document.createElement('div');
+ nameCell.innerHTML = RejectionReason[value];
+ row.appendChild(nameCell);
+ const valueCell = document.createElement('div');
+ valueCell.innerHTML = 'NA';
+ this.rejectionReasonCells.push(valueCell);
+ row.appendChild(valueCell);
+ document.getElementById('vision_readouts').appendChild(row);
+ }
for (let ii = 0; ii < PI_COLORS.length; ++ii) {
const legendEntry = document.createElement('div');
@@ -48,32 +66,61 @@
this.connection.addConfigHandler(() => {
// Visualization message is reliable so that we can see *all* the vision
// matches.
+ for (const pi in PIS) {
+ this.connection.addReliableHandler(
+ '/' + PIS[pi] + '/camera', "y2023.localizer.Visualization",
+ (data) => {
+ this.handleLocalizerDebug(pi, data);
+ });
+ }
this.connection.addHandler(
'/drivetrain', "frc971.control_loops.drivetrain.Status", (data) => {
this.handleDrivetrainStatus(data);
});
this.connection.addHandler(
- '/superstructure', "y2023.control_loops.superstructure.Status",
- (data) => {
- this.handleSuperstructureStatus(data);
+ '/localizer', "frc971.controls.LocalizerOutput", (data) => {
+ this.handleLocalizerOutput(data);
});
});
}
+ private handleLocalizerDebug(pi: string, data: Uint8Array): void {
+ const now = Date.now() / 1000.0;
+
+ const fbBuffer = new ByteBuffer(data);
+ this.localizerImageMatches.set(
+ now, Visualization.getRootAsVisualization(fbBuffer));
+
+ const debug = this.localizerImageMatches.get(now);
+
+ if (debug.statistics()) {
+ if (debug.statistics().rejectionReasonsLength() ==
+ this.rejectionReasonCells.length) {
+ for (let ii = 0; ii < debug.statistics().rejectionReasonsLength();
+ ++ii) {
+ this.rejectionReasonCells[ii].innerHTML =
+ debug.statistics().rejectionReasons(ii).count().toString();
+ }
+ } else {
+ console.error('Unexpected number of rejection reasons in counter.');
+ }
+ }
+ }
+
+ private handleLocalizerOutput(data: Uint8Array): void {
+ const fbBuffer = new ByteBuffer(data);
+ this.localizerOutput = LocalizerOutput.getRootAsLocalizerOutput(fbBuffer);
+ }
+
private handleDrivetrainStatus(data: Uint8Array): void {
const fbBuffer = new ByteBuffer(data);
this.drivetrainStatus = DrivetrainStatus.getRootAsStatus(fbBuffer);
}
- private handleSuperstructureStatus(data: Uint8Array): void {
- const fbBuffer = new ByteBuffer(data);
- this.superstructureStatus = SuperstructureStatus.getRootAsStatus(fbBuffer);
- }
-
drawField(): void {
const ctx = this.canvas.getContext('2d');
ctx.save();
- ctx.scale(-1.0, 1.0);
+ ctx.scale(1.0, -1.0);
ctx.drawImage(
this.fieldImage, 0, 0, this.fieldImage.width, this.fieldImage.height,
-FIELD_EDGE_X, -FIELD_SIDE_Y, FIELD_LENGTH, FIELD_WIDTH);
@@ -81,8 +128,7 @@
}
drawCamera(
- x: number, y: number, theta: number, color: string = 'blue',
- extendLines: boolean = true): void {
+ x: number, y: number, theta: number, color: string = 'blue'): void {
const ctx = this.canvas.getContext('2d');
ctx.save();
ctx.translate(x, y);
@@ -91,10 +137,6 @@
ctx.beginPath();
ctx.moveTo(0.5, 0.5);
ctx.lineTo(0, 0);
- if (extendLines) {
- ctx.lineTo(100.0, 0);
- ctx.lineTo(0, 0);
- }
ctx.lineTo(0.5, -0.5);
ctx.stroke();
ctx.beginPath();
@@ -104,9 +146,8 @@
}
drawRobot(
- x: number, y: number, theta: number, turret: number|null,
- color: string = 'blue', dashed: boolean = false,
- extendLines: boolean = true): void {
+ x: number, y: number, theta: number,
+ color: string = 'blue', dashed: boolean = false): void {
const ctx = this.canvas.getContext('2d');
ctx.save();
ctx.translate(x, y);
@@ -125,11 +166,7 @@
// Draw line indicating which direction is forwards on the robot.
ctx.beginPath();
ctx.moveTo(0, 0);
- if (extendLines) {
- ctx.lineTo(1000.0, 0);
- } else {
- ctx.lineTo(ROBOT_LENGTH / 2.0, 0);
- }
+ ctx.lineTo(ROBOT_LENGTH / 2.0, 0);
ctx.stroke();
ctx.restore();
@@ -142,25 +179,6 @@
div.classList.remove('near');
}
- setEstopped(div: HTMLElement): void {
- div.innerHTML = 'estopped';
- div.classList.add('faulted');
- div.classList.remove('zeroing');
- div.classList.remove('near');
- }
-
- setTargetValue(
- div: HTMLElement, target: number, val: number, tolerance: number): void {
- div.innerHTML = val.toFixed(4);
- div.classList.remove('faulted');
- div.classList.remove('zeroing');
- if (Math.abs(target - val) < tolerance) {
- div.classList.add('near');
- } else {
- div.classList.remove('near');
- }
- }
-
setValue(div: HTMLElement, val: number): void {
div.innerHTML = val.toFixed(4);
div.classList.remove('faulted');
@@ -174,15 +192,64 @@
// Draw the matches with debugging information from the localizer.
const now = Date.now() / 1000.0;
-
+
if (this.drivetrainStatus && this.drivetrainStatus.trajectoryLogging()) {
this.drawRobot(
this.drivetrainStatus.trajectoryLogging().x(),
this.drivetrainStatus.trajectoryLogging().y(),
- this.drivetrainStatus.trajectoryLogging().theta(), null, "#000000FF",
+ this.drivetrainStatus.trajectoryLogging().theta(), "#000000FF",
false);
}
+ if (this.localizerOutput) {
+ if (!this.localizerOutput.zeroed()) {
+ this.setZeroing(this.x);
+ this.setZeroing(this.y);
+ this.setZeroing(this.theta);
+ } else {
+ this.setValue(this.x, this.localizerOutput.x());
+ this.setValue(this.y, this.localizerOutput.y());
+ this.setValue(this.theta, this.localizerOutput.theta());
+ }
+
+ this.drawRobot(
+ this.localizerOutput.x(), this.localizerOutput.y(),
+ this.localizerOutput.theta());
+
+ this.imagesAcceptedCounter.innerHTML =
+ this.localizerOutput.imageAcceptedCount().toString();
+ }
+
+ for (const [time, value] of this.localizerImageMatches) {
+ const age = now - time;
+ const kRemovalAge = 1.0;
+ if (age > kRemovalAge) {
+ this.localizerImageMatches.delete(time);
+ continue;
+ }
+ const kMaxImageAlpha = 0.5;
+ const ageAlpha = kMaxImageAlpha * (kRemovalAge - age) / kRemovalAge
+ for (let i = 0; i < value.targetsLength(); i++) {
+ const imageDebug = value.targets(i);
+ const x = imageDebug.impliedRobotX();
+ const y = imageDebug.impliedRobotY();
+ const theta = imageDebug.impliedRobotTheta();
+ const cameraX = imageDebug.cameraX();
+ const cameraY = imageDebug.cameraY();
+ const cameraTheta = imageDebug.cameraTheta();
+ const accepted = imageDebug.accepted();
+ // Make camera readings fade over time.
+ const alpha = Math.round(255 * ageAlpha).toString(16).padStart(2, '0');
+ const dashed = false;
+ const acceptedRgb = accepted ? '#00FF00' : '#FF0000';
+ const acceptedRgba = acceptedRgb + alpha;
+ const cameraRgb = PI_COLORS[imageDebug.camera()];
+ const cameraRgba = cameraRgb + alpha;
+ this.drawRobot(x, y, theta, acceptedRgba, dashed);
+ this.drawCamera(cameraX, cameraY, cameraTheta, cameraRgba);
+ }
+ }
+
window.requestAnimationFrame(() => this.draw());
}