Merge "Fix TS compile errors with images."
diff --git a/aos/network/web_proxy.cc b/aos/network/web_proxy.cc
index f4a8ce8..f4da7d9 100644
--- a/aos/network/web_proxy.cc
+++ b/aos/network/web_proxy.cc
@@ -53,6 +53,8 @@
}
fetcher_->Fetch();
+ VLOG(2) << "Sending a message with " << GetPacketCount(fetcher_->context())
+ << "packets";
for (int packet_index = 0; packet_index < GetPacketCount(fetcher_->context());
++packet_index) {
flatbuffers::Offset<MessageHeader> message =
@@ -71,8 +73,10 @@
}
bool Subscriber::Compare(const Channel *channel) const {
- return channel->name() == fetcher_->channel()->name() &&
- channel->type() == fetcher_->channel()->type();
+ return channel->name()->string_view() ==
+ fetcher_->channel()->name()->string_view() &&
+ channel->type()->string_view() ==
+ fetcher_->channel()->type()->string_view();
}
Connection::Connection(
@@ -156,6 +160,7 @@
webrtc::DataBuffer data_buffer(
rtc::CopyOnWriteBuffer(buffer.data(), buffer.size()),
true /* binary array */);
+ VLOG(2) << "Sending " << buffer.size() << "bytes to a client";
data_channel_->Send(data_buffer);
}
@@ -211,9 +216,11 @@
void Connection::OnMessage(const webrtc::DataBuffer &buffer) {
const message_bridge::Connect *message =
flatbuffers::GetRoot<message_bridge::Connect>(buffer.data.data());
+ VLOG(2) << "Got a connect message " << aos::FlatbufferToJson(message);
for (auto &subscriber : subscribers_) {
// Make sure the subscriber is for a channel on this node.
if (subscriber.get() == nullptr) {
+ VLOG(2) << ": Null subscriber";
continue;
}
bool found_match = false;
diff --git a/aos/network/www/proxy.ts b/aos/network/www/proxy.ts
index 13f4636..704fc85 100644
--- a/aos/network/www/proxy.ts
+++ b/aos/network/www/proxy.ts
@@ -19,13 +19,17 @@
const messageHeader =
WebProxy.MessageHeader.getRootAsMessageHeader(fbBuffer);
// Short circuit if only one packet
- if (messageHeader.packetCount === 1) {
+ if (messageHeader.packetCount() === 1) {
this.handlerFunc(messageHeader.dataArray());
return;
}
if (messageHeader.packetIndex() === 0) {
this.dataBuffer = new Uint8Array(messageHeader.length());
+ this.receivedMessageLength = 0;
+ }
+ if (!messageHeader.dataLength()) {
+ return;
}
this.dataBuffer.set(
messageHeader.dataArray(),
@@ -128,7 +132,7 @@
onWebSocketOpen(): void {
this.rtcPeerConnection = new RTCPeerConnection({});
this.rtcPeerConnection.addEventListener(
- 'datachannel', (e) => this.onDataCnannel(e));
+ 'datachannel', (e) => this.onDataChannel(e));
this.dataChannel = this.rtcPeerConnection.createDataChannel('signalling');
this.dataChannel.addEventListener(
'message', (e) => this.onDataChannelMessage(e));
diff --git a/y2020/www/image_handler.ts b/y2020/www/image_handler.ts
index 6ad83a3..ae530ef 100644
--- a/y2020/www/image_handler.ts
+++ b/y2020/www/image_handler.ts
@@ -7,31 +7,41 @@
private imageTimestamp: flatbuffers.Long|null = null;
private result: ImageMatchResult|null = null;
private resultTimestamp: flatbuffers.Long|null = null;
+ private width = 0;
+ private height = 0;
+ private imageSkipCount = 3;
constructor() {
document.body.appendChild(this.canvas);
}
handleImage(data: Uint8Array): void {
+ if (this.imageSkipCount != 0) {
+ this.imageSkipCount--;
+ return;
+ } else {
+ this.imageSkipCount = 3;
+ }
+
const fbBuffer = new flatbuffers.ByteBuffer(data);
const image = CameraImage.getRootAsCameraImage(fbBuffer);
this.imageTimestamp = image.monotonicTimestampNs();
- const width = image.cols();
- const height = image.rows();
- if (width === 0 || height === 0) {
+ this.width = image.cols();
+ this.height = image.rows();
+ if (this.width === 0 || this.height === 0) {
return;
}
- this.imageBuffer = new Uint8ClampedArray(width * height * 4); // RGBA
+ this.imageBuffer = new Uint8ClampedArray(this.width * this.height * 4); // RGBA
// Read four bytes (YUYV) from the data and transform into two pixels of
// RGBA for canvas
- for (const j = 0; j < height; j++) {
- for (const i = 0; i < width; i += 2) {
- const y1 = image.data((j * width + i) * 2);
- const u = image.data((j * width + i) * 2 + 1);
- const y2 = image.data((j * width + i + 1) * 2);
- const v = image.data((j * width + i + 1) * 2 + 1);
+ for (const j = 0; j < this.height; j++) {
+ for (const i = 0; i < this.width; i += 2) {
+ const y1 = image.data((j * this.width + i) * 2);
+ const u = image.data((j * this.width + i) * 2 + 1);
+ const y2 = image.data((j * this.width + i + 1) * 2);
+ const v = image.data((j * this.width + i + 1) * 2 + 1);
// Based on https://en.wikipedia.org/wiki/YUV#Converting_between_Y%E2%80%B2UV_and_RGB
const c1 = y1 - 16;
@@ -39,53 +49,55 @@
const d = u - 128;
const e = v - 128;
- imageBuffer[(j * width + i) * 4 + 0] = (298 * c1 + 409 * e + 128) >> 8;
- imageBuffer[(j * width + i) * 4 + 1] =
- (298 * c1 - 100 * d - 208 * e + 128) >> 8;
- imageBuffer[(j * width + i) * 4 + 2] = (298 * c1 + 516 * d + 128) >> 8;
- imageBuffer[(j * width + i) * 4 + 3] = 255;
- imageBuffer[(j * width + i) * 4 + 4] = (298 * c2 + 409 * e + 128) >> 8;
- imageBuffer[(j * width + i) * 4 + 5] =
- (298 * c2 - 100 * d - 208 * e + 128) >> 8;
- imageBuffer[(j * width + i) * 4 + 6] = (298 * c2 + 516 * d + 128) >> 8;
- imageBuffer[(j * width + i) * 4 + 7] = 255;
+ this.imageBuffer[(j * this.width + i) * 4 + 0] = (298 * c1 + 409 * e + 128) >> 8;
+ this.imageBuffer[(j * this.width + i) * 4 + 1] = (298 * c1 - 100 * d - 208 * e + 128) >> 8;
+ this.imageBuffer[(j * this.width + i) * 4 + 2] = (298 * c1 + 516 * d + 128) >> 8;
+ this.imageBuffer[(j * this.width + i) * 4 + 3] = 255;
+ this.imageBuffer[(j * this.width + i) * 4 + 4] = (298 * c2 + 409 * e + 128) >> 8;
+ this.imageBuffer[(j * this.width + i) * 4 + 5] = (298 * c2 - 100 * d - 208 * e + 128) >> 8;
+ this.imageBuffer[(j * this.width + i) * 4 + 6] = (298 * c2 + 516 * d + 128) >> 8;
+ this.imageBuffer[(j * this.width + i) * 4 + 7] = 255;
}
}
- draw();
+ this.draw();
}
handleImageMetadata(data: Uint8Array): void {
const fbBuffer = new flatbuffers.ByteBuffer(data);
this.result = ImageMatchResult.getRootAsImageMatchResult(fbBuffer);
- this.resultTimestamp = result.imageMonotonicTimestampNs();
- draw();
+ this.resultTimestamp = this.result.imageMonotonicTimestampNs();
+ this.draw();
}
draw(): void {
- if (imageTimestamp.low !== resultTimestamp.low ||
- imageTimestamp.high !== resultTimestamp.high) {
+ if (!this.imageTimestamp || !this.resultTimestamp ||
+ this.imageTimestamp.low !== this.resultTimestamp.low ||
+ this.imageTimestamp.high !== this.resultTimestamp.high) {
return;
}
const ctx = this.canvas.getContext('2d');
- this.canvas.width = width;
- this.canvas.height = height;
- const idata = ctx.createImageData(width, height);
+ this.canvas.width = this.width;
+ this.canvas.height = this.height;
+ const idata = ctx.createImageData(this.width, this.height);
idata.data.set(this.imageBuffer);
ctx.putImageData(idata, 0, 0);
- ctx.beginPath();
- for (const feature of this.result.getFeatures()) {
+ for (const i = 0; i < this.result.featuresLength(); i++) {
+ const feature = this.result.features(i);
// Based on OpenCV drawKeypoint.
- ctx.arc(feature.x, feature.y, feature.size, 0, 2 * Math.PI);
- ctx.moveTo(feature.x, feature.y);
- // TODO(alex): check that angle is correct (0?, direction?)
- const angle = feature.angle * Math.PI / 180;
+ ctx.beginPath();
+ ctx.arc(feature.x(), feature.y(), feature.size(), 0, 2 * Math.PI);
+ ctx.stroke();
+
+ ctx.beginPath();
+ ctx.moveTo(feature.x(), feature.y());
+ const angle = feature.angle() * Math.PI / 180;
ctx.lineTo(
- feature.x + feature.radius * cos(angle),
- feature.y + feature.radius * sin(angle));
+ feature.x() + feature.size() * Math.cos(angle),
+ feature.y() + feature.size() * Math.sin(angle));
+ ctx.stroke();
}
- ctx.stroke();
}
getId(): string {