Draw most recent targets on webpage
Draws the most recently received targets from each camera in yellow.
Change-Id: I32b31d1b350cc5579dfdd202adeb661fd16c1332
diff --git a/y2019/vision/server/BUILD b/y2019/vision/server/BUILD
index 420731a..5ee83f6 100644
--- a/y2019/vision/server/BUILD
+++ b/y2019/vision/server/BUILD
@@ -51,7 +51,9 @@
"//aos/logging",
"//aos/time",
"//frc971/control_loops/drivetrain:drivetrain_queue",
+ "//frc971/control_loops:pose",
"//third_party/seasocks",
+ "//y2019:constants",
"//y2019/control_loops/drivetrain:camera_queue",
],
)
diff --git a/y2019/vision/server/server.cc b/y2019/vision/server/server.cc
index 95ecd8d..6f2e26a 100644
--- a/y2019/vision/server/server.cc
+++ b/y2019/vision/server/server.cc
@@ -2,6 +2,7 @@
#include <sys/types.h>
#include <unistd.h>
#include <array>
+#include <cmath>
#include <memory>
#include <set>
#include <sstream>
@@ -10,12 +11,14 @@
#include "aos/logging/logging.h"
#include "aos/time/time.h"
#include "frc971/control_loops/drivetrain/drivetrain.q.h"
+#include "frc971/control_loops/pose.h"
#include "internal/Embedded.h"
#include "google/protobuf/util/json_util.h"
#include "seasocks/PrintfLogger.h"
#include "seasocks/Server.h"
#include "seasocks/StringUtil.h"
#include "seasocks/WebSocket.h"
+#include "y2019/constants.h"
#include "y2019/control_loops/drivetrain/camera.q.h"
#include "y2019/vision/server/server_data.pb.h"
@@ -142,6 +145,8 @@
{
const auto &new_frame = *camera_frames;
+ // TODO(james): Maybe we only want to fill out a new frame if it has
+ // targets or the saved target is > 0.1 sec old? Not sure, but for now
if (new_frame.camera < latest_frames.size()) {
latest_frames[new_frame.camera].capture_time =
aos::monotonic_clock::time_point(
@@ -153,7 +158,14 @@
}
for (int target = 0; target < new_frame.num_targets; ++target) {
latest_frames[new_frame.camera].targets.emplace_back();
- // TODO: Do something useful.
+ const float heading = new_frame.targets[target].heading;
+ const float distance = new_frame.targets[target].distance;
+ latest_frames[new_frame.camera].targets.back().x =
+ ::std::cos(heading) * distance;
+ latest_frames[new_frame.camera].targets.back().y =
+ ::std::sin(heading) * distance;
+ latest_frames[new_frame.camera].targets.back().theta =
+ new_frame.targets[target].skew;
}
}
}
@@ -166,6 +178,9 @@
debug_data.mutable_robot_pose()->set_x(drivetrain_status->x);
debug_data.mutable_robot_pose()->set_y(drivetrain_status->y);
debug_data.mutable_robot_pose()->set_theta(drivetrain_status->theta);
+ frc971::control_loops::TypedPose<double> robot_pose(
+ {drivetrain_status->x, drivetrain_status->y, 0},
+ drivetrain_status->theta);
{
LineFollowDebug *line_debug = debug_data.mutable_line_follow_debug();
line_debug->set_frozen(drivetrain_status->line_follow_logging.frozen);
@@ -181,6 +196,10 @@
for (size_t ii = 0; ii < latest_frames.size(); ++ii) {
CameraDebug *camera_debug = debug_data.add_camera_debug();
LocalCameraFrame cur_frame = latest_frames[ii];
+ constants::Values::CameraCalibration camera_info =
+ constants::GetValues().cameras[ii];
+ frc971::control_loops::TypedPose<double> camera_pose =
+ camera_info.pose.Rebase(&robot_pose);
camera_debug->set_current_frame_age(
::std::chrono::duration_cast<::std::chrono::duration<double>>(
@@ -189,10 +208,12 @@
::std::chrono::duration_cast<::std::chrono::duration<double>>(
now - last_target_time[ii]).count());
for (const auto &target : cur_frame.targets) {
+ frc971::control_loops::TypedPose<double> target_pose(
+ &camera_pose, {target.x, target.y, 0}, target.theta);
Pose *pose = camera_debug->add_targets();
- pose->set_x(target.x);
- pose->set_y(target.y);
- pose->set_theta(target.theta);
+ pose->set_x(target_pose.abs_pos().x());
+ pose->set_y(target_pose.abs_pos().y());
+ pose->set_theta(target_pose.abs_theta());
}
}
::std::string json;
diff --git a/y2019/vision/server/www/main.ts b/y2019/vision/server/www/main.ts
index a260787..0d88a43 100644
--- a/y2019/vision/server/www/main.ts
+++ b/y2019/vision/server/www/main.ts
@@ -1,6 +1,6 @@
import {FIELD_WIDTH, FT_TO_M} from './constants';
import {drawField, drawTarget} from './field';
-import {drawRobot} from './robot';
+import {drawRobot, Frame} from './robot';
function main(): void {
const vis = new Visualiser();
@@ -16,7 +16,7 @@
private targetX = 0;
private targetY = 0;
private targetTheta = 0;
- private cameraColors = ['red', 'red', 'red', 'red', 'red'];
+ private cameraFrames : Frame[];
constructor() {
const canvas = <HTMLCanvasElement>document.getElementById('field');
@@ -30,6 +30,7 @@
initWebSocket(server: string): void {
const socket = new WebSocket(`ws://${server}/ws`);
const reader = new FileReader();
+ this.cameraFrames = [];
reader.addEventListener('loadend', (e) => {
const text = e.srcElement.result;
const j = JSON.parse(text);
@@ -44,13 +45,7 @@
this.targetY = j.lineFollowDebug.goalTarget.y;
this.targetTheta = j.lineFollowDebug.goalTarget.theta;
}
- for (let ii of [0, 1, 2, 3, 4]) {
- if (j.cameraDebug[ii].timeSinceLastTarget > 0.25) {
- this.cameraColors[ii] = 'red';
- } else {
- this.cameraColors[ii] = 'green';
- }
- }
+ this.cameraFrames = j.cameraDebug;
});
socket.addEventListener('message', (event) => {
reader.readAsText(event.data);
@@ -81,7 +76,7 @@
this.reset(ctx);
drawField(ctx);
- drawRobot(ctx, this.x, this.y, this.theta, this.cameraColors);
+ drawRobot(ctx, this.x, this.y, this.theta, this.cameraFrames);
ctx.save();
ctx.lineWidth = 2.0 * ctx.lineWidth;
if (this.targetLocked) {
diff --git a/y2019/vision/server/www/robot.ts b/y2019/vision/server/www/robot.ts
index 6329fb3..71defb6 100644
--- a/y2019/vision/server/www/robot.ts
+++ b/y2019/vision/server/www/robot.ts
@@ -1,28 +1,45 @@
import {CAMERA_POSES} from './camera_constants';
import {FT_TO_M, IN_TO_M} from './constants';
+import {drawTarget} from './field';
const ROBOT_WIDTH = 25 * IN_TO_M;
const ROBOT_LENGTH = 31 * IN_TO_M;
-const CAMERA_SCALE = 0.3;
+const CAMERA_SCALE = 0.2;
+
+interface Pose {
+ x : number;
+ y : number;
+ theta: number;
+}
+
+export interface Frame {
+ timeSinceLastTarget : number;
+ currentFrameAge : number;
+ targets : Pose[];
+}
function drawCamera(
- ctx: CanvasRenderingContext2D,
- pose: {x: number, y: number, theta: number}): void {
+ ctx: CanvasRenderingContext2D, pose: Pose, frame: Frame): void {
+ ctx.save();
+ ctx.translate(pose.x, pose.y);
+ ctx.rotate(pose.theta);
+ if (frame.timeSinceLastTarget > 0.25) {
+ ctx.strokeStyle = 'red';
+ } else {
+ ctx.strokeStyle = 'green';
+ }
ctx.beginPath();
- ctx.moveTo(pose.x, pose.y);
- ctx.lineTo(
- pose.x + CAMERA_SCALE * Math.cos(pose.theta + Math.PI / 4.0),
- pose.y + CAMERA_SCALE * Math.sin(pose.theta + Math.PI / 4.0));
- ctx.lineTo(
- pose.x + CAMERA_SCALE * Math.cos(pose.theta - Math.PI / 4.0),
- pose.y + CAMERA_SCALE * Math.sin(pose.theta - Math.PI / 4.0));
+ ctx.moveTo(0, 0);
+ ctx.lineTo(CAMERA_SCALE, CAMERA_SCALE);
+ ctx.lineTo(CAMERA_SCALE, -CAMERA_SCALE);
ctx.closePath();
ctx.stroke();
+ ctx.restore();
}
export function drawRobot(
ctx: CanvasRenderingContext2D, x: number, y: number, theta: number,
- camera_colors: string[]): void {
+ cameraFrames: Frame[]): void {
ctx.save();
ctx.translate(x, y);
ctx.rotate(theta);
@@ -39,9 +56,19 @@
ctx.stroke();
ctx.lineWidth = 3.0 * ctx.lineWidth;
for (let ii of [0, 1, 2, 3, 4]) {
- ctx.strokeStyle = camera_colors[ii];
- drawCamera(ctx, CAMERA_POSES[ii]);
+ if (ii < cameraFrames.length) {
+ drawCamera(ctx, CAMERA_POSES[ii], cameraFrames[ii]);
+ }
}
ctx.restore();
+
+ for (let frame of cameraFrames) {
+ if (frame.targets) {
+ for (let target of frame.targets) {
+ ctx.strokeStyle = 'yellow';
+ drawTarget(ctx, target.x, target.y, target.theta);
+ }
+ }
+ }
}