Merge "Enable deploying code for the raspberry pis"
diff --git a/aos/network/BUILD b/aos/network/BUILD
index 89853f0..8f3f394 100644
--- a/aos/network/BUILD
+++ b/aos/network/BUILD
@@ -70,6 +70,7 @@
],
deps = [
":team_number",
+ "//aos:configuration",
"//aos/testing:googletest",
],
)
diff --git a/aos/network/team_number.cc b/aos/network/team_number.cc
index 14fadab..d2fbc8e 100644
--- a/aos/network/team_number.cc
+++ b/aos/network/team_number.cc
@@ -9,6 +9,8 @@
#include "aos/util/string_to_num.h"
+DECLARE_string(override_hostname);
+
namespace aos {
namespace network {
namespace team_number_internal {
@@ -101,10 +103,14 @@
} // namespace
::std::string GetHostname() {
- char buf[256];
- buf[sizeof(buf) - 1] = '\0';
- PCHECK(gethostname(buf, sizeof(buf) - 1) == 0);
- return buf;
+ if (FLAGS_override_hostname.empty()) {
+ char buf[256];
+ buf[sizeof(buf) - 1] = '\0';
+ PCHECK(gethostname(buf, sizeof(buf) - 1) == 0);
+ return buf;
+ } else {
+ return FLAGS_override_hostname;
+ }
}
uint16_t GetTeamNumber() {
diff --git a/aos/network/web_proxy.cc b/aos/network/web_proxy.cc
index f4a8ce8..f4da7d9 100644
--- a/aos/network/web_proxy.cc
+++ b/aos/network/web_proxy.cc
@@ -53,6 +53,8 @@
}
fetcher_->Fetch();
+ VLOG(2) << "Sending a message with " << GetPacketCount(fetcher_->context())
+ << "packets";
for (int packet_index = 0; packet_index < GetPacketCount(fetcher_->context());
++packet_index) {
flatbuffers::Offset<MessageHeader> message =
@@ -71,8 +73,10 @@
}
bool Subscriber::Compare(const Channel *channel) const {
- return channel->name() == fetcher_->channel()->name() &&
- channel->type() == fetcher_->channel()->type();
+ return channel->name()->string_view() ==
+ fetcher_->channel()->name()->string_view() &&
+ channel->type()->string_view() ==
+ fetcher_->channel()->type()->string_view();
}
Connection::Connection(
@@ -156,6 +160,7 @@
webrtc::DataBuffer data_buffer(
rtc::CopyOnWriteBuffer(buffer.data(), buffer.size()),
true /* binary array */);
+ VLOG(2) << "Sending " << buffer.size() << "bytes to a client";
data_channel_->Send(data_buffer);
}
@@ -211,9 +216,11 @@
void Connection::OnMessage(const webrtc::DataBuffer &buffer) {
const message_bridge::Connect *message =
flatbuffers::GetRoot<message_bridge::Connect>(buffer.data.data());
+ VLOG(2) << "Got a connect message " << aos::FlatbufferToJson(message);
for (auto &subscriber : subscribers_) {
// Make sure the subscriber is for a channel on this node.
if (subscriber.get() == nullptr) {
+ VLOG(2) << ": Null subscriber";
continue;
}
bool found_match = false;
diff --git a/aos/network/www/proxy.ts b/aos/network/www/proxy.ts
index 13f4636..704fc85 100644
--- a/aos/network/www/proxy.ts
+++ b/aos/network/www/proxy.ts
@@ -19,13 +19,17 @@
const messageHeader =
WebProxy.MessageHeader.getRootAsMessageHeader(fbBuffer);
// Short circuit if only one packet
- if (messageHeader.packetCount === 1) {
+ if (messageHeader.packetCount() === 1) {
this.handlerFunc(messageHeader.dataArray());
return;
}
if (messageHeader.packetIndex() === 0) {
this.dataBuffer = new Uint8Array(messageHeader.length());
+ this.receivedMessageLength = 0;
+ }
+ if (!messageHeader.dataLength()) {
+ return;
}
this.dataBuffer.set(
messageHeader.dataArray(),
@@ -128,7 +132,7 @@
onWebSocketOpen(): void {
this.rtcPeerConnection = new RTCPeerConnection({});
this.rtcPeerConnection.addEventListener(
- 'datachannel', (e) => this.onDataCnannel(e));
+ 'datachannel', (e) => this.onDataChannel(e));
this.dataChannel = this.rtcPeerConnection.createDataChannel('signalling');
this.dataChannel.addEventListener(
'message', (e) => this.onDataChannelMessage(e));
diff --git a/y2020/vision/BUILD b/y2020/vision/BUILD
index 60276c9..b643fcb 100644
--- a/y2020/vision/BUILD
+++ b/y2020/vision/BUILD
@@ -36,6 +36,9 @@
"//tools:armhf-debian",
],
visibility = ["//y2020:__subpackages__"],
+ data = [
+ "//y2020:config.json",
+ ],
deps = [
":v4l2_reader",
":vision_fbs",
diff --git a/y2020/vision/camera_reader.cc b/y2020/vision/camera_reader.cc
index 2615bca..d45ec3f 100644
--- a/y2020/vision/camera_reader.cc
+++ b/y2020/vision/camera_reader.cc
@@ -14,6 +14,11 @@
#include "y2020/vision/v4l2_reader.h"
#include "y2020/vision/vision_generated.h"
+// config used to allow running camera_reader independently. E.g.,
+// bazel run //y2020/vision:camera_reader -- --config y2020/config.json
+// --override_hostname pi-7971-1 --ignore_timestamps true
+DEFINE_string(config, "config.json", "Path to the config file to use.");
+
namespace frc971 {
namespace vision {
namespace {
@@ -523,7 +528,7 @@
void CameraReaderMain() {
aos::FlatbufferDetachedBuffer<aos::Configuration> config =
- aos::configuration::ReadConfig("config.json");
+ aos::configuration::ReadConfig(FLAGS_config);
const auto training_data_bfbs = SiftTrainingData();
const sift::TrainingData *const training_data =
diff --git a/y2020/vision/v4l2_reader.cc b/y2020/vision/v4l2_reader.cc
index f1944c1..91777c7 100644
--- a/y2020/vision/v4l2_reader.cc
+++ b/y2020/vision/v4l2_reader.cc
@@ -6,6 +6,9 @@
#include <sys/stat.h>
#include <sys/types.h>
+DEFINE_bool(ignore_timestamps, false,
+ "Don't require timestamps on images. Used to allow webcams");
+
namespace frc971 {
namespace vision {
@@ -137,8 +140,11 @@
buffer.m.userptr);
CHECK_EQ(ImageSize(), buffer.length);
CHECK(buffer.flags & V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC);
- CHECK_EQ(buffer.flags & V4L2_BUF_FLAG_TSTAMP_SRC_MASK,
- static_cast<uint32_t>(V4L2_BUF_FLAG_TSTAMP_SRC_EOF));
+ if (!FLAGS_ignore_timestamps) {
+ // Require that we have good timestamp on images
+ CHECK_EQ(buffer.flags & V4L2_BUF_FLAG_TSTAMP_SRC_MASK,
+ static_cast<uint32_t>(V4L2_BUF_FLAG_TSTAMP_SRC_EOF));
+ }
return {static_cast<int>(buffer.index),
aos::time::from_timeval(buffer.timestamp)};
}
diff --git a/y2020/www/image_handler.ts b/y2020/www/image_handler.ts
index 6ad83a3..ae530ef 100644
--- a/y2020/www/image_handler.ts
+++ b/y2020/www/image_handler.ts
@@ -7,31 +7,41 @@
private imageTimestamp: flatbuffers.Long|null = null;
private result: ImageMatchResult|null = null;
private resultTimestamp: flatbuffers.Long|null = null;
+ private width = 0;
+ private height = 0;
+ private imageSkipCount = 3;
constructor() {
document.body.appendChild(this.canvas);
}
handleImage(data: Uint8Array): void {
+ if (this.imageSkipCount != 0) {
+ this.imageSkipCount--;
+ return;
+ } else {
+ this.imageSkipCount = 3;
+ }
+
const fbBuffer = new flatbuffers.ByteBuffer(data);
const image = CameraImage.getRootAsCameraImage(fbBuffer);
this.imageTimestamp = image.monotonicTimestampNs();
- const width = image.cols();
- const height = image.rows();
- if (width === 0 || height === 0) {
+ this.width = image.cols();
+ this.height = image.rows();
+ if (this.width === 0 || this.height === 0) {
return;
}
- this.imageBuffer = new Uint8ClampedArray(width * height * 4); // RGBA
+ this.imageBuffer = new Uint8ClampedArray(this.width * this.height * 4); // RGBA
// Read four bytes (YUYV) from the data and transform into two pixels of
// RGBA for canvas
- for (const j = 0; j < height; j++) {
- for (const i = 0; i < width; i += 2) {
- const y1 = image.data((j * width + i) * 2);
- const u = image.data((j * width + i) * 2 + 1);
- const y2 = image.data((j * width + i + 1) * 2);
- const v = image.data((j * width + i + 1) * 2 + 1);
+ for (const j = 0; j < this.height; j++) {
+ for (const i = 0; i < this.width; i += 2) {
+ const y1 = image.data((j * this.width + i) * 2);
+ const u = image.data((j * this.width + i) * 2 + 1);
+ const y2 = image.data((j * this.width + i + 1) * 2);
+ const v = image.data((j * this.width + i + 1) * 2 + 1);
// Based on https://en.wikipedia.org/wiki/YUV#Converting_between_Y%E2%80%B2UV_and_RGB
const c1 = y1 - 16;
@@ -39,53 +49,55 @@
const d = u - 128;
const e = v - 128;
- imageBuffer[(j * width + i) * 4 + 0] = (298 * c1 + 409 * e + 128) >> 8;
- imageBuffer[(j * width + i) * 4 + 1] =
- (298 * c1 - 100 * d - 208 * e + 128) >> 8;
- imageBuffer[(j * width + i) * 4 + 2] = (298 * c1 + 516 * d + 128) >> 8;
- imageBuffer[(j * width + i) * 4 + 3] = 255;
- imageBuffer[(j * width + i) * 4 + 4] = (298 * c2 + 409 * e + 128) >> 8;
- imageBuffer[(j * width + i) * 4 + 5] =
- (298 * c2 - 100 * d - 208 * e + 128) >> 8;
- imageBuffer[(j * width + i) * 4 + 6] = (298 * c2 + 516 * d + 128) >> 8;
- imageBuffer[(j * width + i) * 4 + 7] = 255;
+ this.imageBuffer[(j * this.width + i) * 4 + 0] = (298 * c1 + 409 * e + 128) >> 8;
+ this.imageBuffer[(j * this.width + i) * 4 + 1] = (298 * c1 - 100 * d - 208 * e + 128) >> 8;
+ this.imageBuffer[(j * this.width + i) * 4 + 2] = (298 * c1 + 516 * d + 128) >> 8;
+ this.imageBuffer[(j * this.width + i) * 4 + 3] = 255;
+ this.imageBuffer[(j * this.width + i) * 4 + 4] = (298 * c2 + 409 * e + 128) >> 8;
+ this.imageBuffer[(j * this.width + i) * 4 + 5] = (298 * c2 - 100 * d - 208 * e + 128) >> 8;
+ this.imageBuffer[(j * this.width + i) * 4 + 6] = (298 * c2 + 516 * d + 128) >> 8;
+ this.imageBuffer[(j * this.width + i) * 4 + 7] = 255;
}
}
- draw();
+ this.draw();
}
handleImageMetadata(data: Uint8Array): void {
const fbBuffer = new flatbuffers.ByteBuffer(data);
this.result = ImageMatchResult.getRootAsImageMatchResult(fbBuffer);
- this.resultTimestamp = result.imageMonotonicTimestampNs();
- draw();
+ this.resultTimestamp = this.result.imageMonotonicTimestampNs();
+ this.draw();
}
draw(): void {
- if (imageTimestamp.low !== resultTimestamp.low ||
- imageTimestamp.high !== resultTimestamp.high) {
+ if (!this.imageTimestamp || !this.resultTimestamp ||
+ this.imageTimestamp.low !== this.resultTimestamp.low ||
+ this.imageTimestamp.high !== this.resultTimestamp.high) {
return;
}
const ctx = this.canvas.getContext('2d');
- this.canvas.width = width;
- this.canvas.height = height;
- const idata = ctx.createImageData(width, height);
+ this.canvas.width = this.width;
+ this.canvas.height = this.height;
+ const idata = ctx.createImageData(this.width, this.height);
idata.data.set(this.imageBuffer);
ctx.putImageData(idata, 0, 0);
- ctx.beginPath();
- for (const feature of this.result.getFeatures()) {
+ for (const i = 0; i < this.result.featuresLength(); i++) {
+ const feature = this.result.features(i);
// Based on OpenCV drawKeypoint.
- ctx.arc(feature.x, feature.y, feature.size, 0, 2 * Math.PI);
- ctx.moveTo(feature.x, feature.y);
- // TODO(alex): check that angle is correct (0?, direction?)
- const angle = feature.angle * Math.PI / 180;
+ ctx.beginPath();
+ ctx.arc(feature.x(), feature.y(), feature.size(), 0, 2 * Math.PI);
+ ctx.stroke();
+
+ ctx.beginPath();
+ ctx.moveTo(feature.x(), feature.y());
+ const angle = feature.angle() * Math.PI / 180;
ctx.lineTo(
- feature.x + feature.radius * cos(angle),
- feature.y + feature.radius * sin(angle));
+ feature.x() + feature.size() * Math.cos(angle),
+ feature.y() + feature.size() * Math.sin(angle));
+ ctx.stroke();
}
- ctx.stroke();
}
getId(): string {