Merge "Adding save and load functionality for camera intrinsic calibration"
diff --git a/aos/network/web_proxy.cc b/aos/network/web_proxy.cc
index f4da7d9..5b50072 100644
--- a/aos/network/web_proxy.cc
+++ b/aos/network/web_proxy.cc
@@ -67,6 +67,10 @@
         rtc::CopyOnWriteBuffer(buffer.data(), buffer.size()),
         true /* binary array */);
     for (auto conn : channels_) {
+      if (conn->buffered_amount() > 14000000) {
+        VLOG(1) << "skipping a send because buffered amount is too high";
+        continue;
+      }
       conn->Send(data_buffer);
     }
   }
@@ -160,7 +164,7 @@
   webrtc::DataBuffer data_buffer(
       rtc::CopyOnWriteBuffer(buffer.data(), buffer.size()),
       true /* binary array */);
-  VLOG(2) << "Sending " << buffer.size() << "bytes to a client";
+  VLOG(1) << "Sending " << buffer.size() << "bytes to a client";
   data_channel_->Send(data_buffer);
 }
 
diff --git a/aos/network/web_proxy_main.cc b/aos/network/web_proxy_main.cc
index 0e49760..c674520 100644
--- a/aos/network/web_proxy_main.cc
+++ b/aos/network/web_proxy_main.cc
@@ -24,7 +24,6 @@
 
   LOG(INFO) << "My node is " << aos::FlatbufferToJson(self);
 
-  // TODO(alex): skip fetchers on the wrong node.
   for (uint i = 0; i < config.message().channels()->size(); ++i) {
     auto channel = config.message().channels()->Get(i);
     if (aos::configuration::ChannelIsReadableOnNode(channel, self)) {
diff --git a/aos/network/www/config_handler.ts b/aos/network/www/config_handler.ts
index 3d20f0d..6615f39 100644
--- a/aos/network/www/config_handler.ts
+++ b/aos/network/www/config_handler.ts
@@ -3,13 +3,14 @@
 import {Connection} from './proxy';
 
 export class ConfigHandler {
-  private readonly root_div = document.getElementById('config');
+  private readonly root_div = document.createElement('div');
   private readonly tree_div;
   private config: Configuration|null = null
 
   constructor(private readonly connection: Connection) {
     this.connection.addConfigHandler((config) => this.handleConfig(config));
 
+    document.body.appendChild(this.root_div);
     const show_button = document.createElement('button');
     show_button.addEventListener('click', () => this.toggleConfig());
     const show_text = document.createTextNode('Show/Hide Config');
diff --git a/aos/network/www/proxy.ts b/aos/network/www/proxy.ts
index 9eb0bb6..7bcf575 100644
--- a/aos/network/www/proxy.ts
+++ b/aos/network/www/proxy.ts
@@ -66,6 +66,11 @@
     this.configHandlers.add(handler);
   }
 
+  /**
+   * Add a handler for a specific message type. Until we need to handle
+   * different channel names with the same type differently, this is good
+   * enough.
+   */
   addHandler(id: string, handler: (data: Uint8Array) => void): void {
     this.handlerFuncs.set(id, handler);
   }
@@ -79,7 +84,7 @@
         'message', (e) => this.onWebSocketMessage(e));
   }
 
-  get config() {
+  getConfig() {
     return this.config_internal;
   }
 
@@ -88,7 +93,7 @@
   onDataChannelMessage(e: MessageEvent): void {
     const fbBuffer = new flatbuffers.ByteBuffer(new Uint8Array(e.data));
     this.configInternal = Configuration.getRootAsConfiguration(fbBuffer);
-    for (handler of this.configHandlers) {
+    for (const handler of Array.from(this.configHandlers)) {
       handler(this.configInternal);
     }
   }
@@ -183,11 +188,12 @@
   }
 
   /**
-   * Subscribes to messages.
+   * Subscribes to messages. Only the most recent connect message is in use. Any
+   * channels not specified in the message are implicitely unsubscribed.
    * @param a Finished flatbuffer.Builder containing a Connect message to send.
    */
   sendConnectMessage(builder: any) {
-    const array = builder.assUint8Array();
+    const array = builder.asUint8Array();
     this.dataChannel.send(array.buffer.slice(array.byteOffset));
   }
 }
diff --git a/frc971/control_loops/python/path_edit.py b/frc971/control_loops/python/path_edit.py
index ddc38ac..c186711 100755
--- a/frc971/control_loops/python/path_edit.py
+++ b/frc971/control_loops/python/path_edit.py
@@ -72,6 +72,8 @@
         self.inValue = None
         self.startSet = False
 
+        self.module_path = os.path.dirname(os.path.realpath(sys.argv[0]))
+
     """set extents on images"""
 
     def reinit_extents(self):
@@ -320,35 +322,45 @@
             self.spline_edit = self.points.updates_for_mouse_move(
                 self.index_of_edit, self.spline_edit, self.x, self.y, difs)
 
+    def export_json(self, file_name):
+        self.path_to_export = os.path.join(self.module_path,
+                                           "spline_jsons/" + file_name)
+        if file_name[-5:] != ".json":
+            print("Error: Filename doesn't end in .json")
+        else:
+            # Will export to json file
+            self.mode = Mode.kEditing
+            exportList = [l.tolist() for l in self.points.getSplines()]
+            with open(self.path_to_export, mode='w') as points_file:
+                json.dump(exportList, points_file)
+
+    def import_json(self, file_name):
+        self.path_to_export = os.path.join(self.module_path,
+                                           "spline_jsons/" + file_name)
+        if file_name[-5:] != ".json":
+            print("Error: Filename doesn't end in .json")
+        else:
+            # import from json file
+            self.mode = Mode.kEditing
+            self.points.resetPoints()
+            self.points.resetSplines()
+            print("LOADING LOAD FROM " + file_name) # Load takes a few seconds
+            with open(self.path_to_export) as points_file:
+                self.points.setUpSplines(json.load(points_file))
+
+            self.points.update_lib_spline()
+            print("SPLINES LOADED")
+
     def do_key_press(self, event, file_name):
         keyval = Gdk.keyval_to_lower(event.keyval)
-        module_path = os.path.dirname(os.path.realpath(sys.argv[0]))
-        self.path_to_export = os.path.join(module_path,
-                                           "spline_jsons/" + file_name)
         if keyval == Gdk.KEY_q:
             print("Found q key and exiting.")
             quit_main_loop()
-        file_name_end = file_name[-5:]
-        if file_name_end != ".json":
-            print("Error: Filename doesn't end in .json")
-        else:
-            if keyval == Gdk.KEY_e:
-                # Will export to json file
-                self.mode = Mode.kEditing
-                print('out to: ', self.path_to_export)
-                exportList = [l.tolist() for l in self.points.getSplines()]
-                with open(self.path_to_export, mode='w') as points_file:
-                    json.dump(exportList, points_file)
+        if keyval == Gdk.KEY_e:
+            export_json(file_name)
 
-            if keyval == Gdk.KEY_i:
-                # import from json file
-                self.mode = Mode.kEditing
-                self.points.resetPoints()
-                self.points.resetSplines()
-                with open(self.path_to_export) as points_file:
-                    self.points.setUpSplines(json.load(points_file))
-
-                self.points.update_lib_spline()
+        if keyval == Gdk.KEY_i:
+            import_json(file_name)
 
         if keyval == Gdk.KEY_p:
             self.mode = Mode.kPlacing
diff --git a/frc971/control_loops/python/spline_graph.py b/frc971/control_loops/python/spline_graph.py
index 94ee683..7885ec1 100755
--- a/frc971/control_loops/python/spline_graph.py
+++ b/frc971/control_loops/python/spline_graph.py
@@ -42,10 +42,13 @@
     def configure(self, event):
         self.drawing_area.window_shape = (event.width, event.height)
 
-    # handle submitting a constraint
-    def on_submit_click(self, widget):
-        self.drawing_area.inConstraint = int(self.constraint_box.get_text())
-        self.drawing_area.inValue = int(self.value_box.get_text())
+    def output_json_clicked(self, button):
+        print("OUTPUT JSON CLICKED")
+        self.drawing_area.export_json(self.file_name_box.get_text())
+
+    def input_json_clicked(self, button):
+        print("INPUT JSON CLICKED")
+        self.drawing_area.import_json(self.file_name_box.get_text())
 
     def __init__(self):
         Gtk.Window.__init__(self)
@@ -89,6 +92,17 @@
 
         container.put(self.file_name_box, 0, 0)
 
+        self.output_json = Gtk.Button.new_with_label("Output")
+        self.output_json.set_size_request(100, 40)
+        self.output_json.connect("clicked", self.output_json_clicked)
+
+        self.input_json = Gtk.Button.new_with_label("Import")
+        self.input_json.set_size_request(100, 40)
+        self.input_json.connect("clicked", self.input_json_clicked)
+
+        container.put(self.output_json, 210, 0)
+        container.put(self.input_json, 320, 0)
+
         self.show_all()
 
 
diff --git a/y2020/vision/BUILD b/y2020/vision/BUILD
index b95bca3..3baa61b 100644
--- a/y2020/vision/BUILD
+++ b/y2020/vision/BUILD
@@ -80,3 +80,25 @@
         "//third_party:opencv",
     ],
 )
+
+cc_binary(
+    name = "viewer_replay",
+    srcs = [
+        "viewer_replay.cc",
+    ],
+    data = [
+        "//y2020:config.json",
+    ],
+    restricted_to = [
+        "//tools:k8",
+        "//tools:armhf-debian",
+    ],
+    visibility = ["//y2020:__subpackages__"],
+    deps = [
+        ":vision_fbs",
+        "//aos:init",
+        "//aos/events:simulated_event_loop",
+        "//aos/events/logging:logger",
+        "//third_party:opencv",
+    ],
+)
diff --git a/y2020/vision/viewer_replay.cc b/y2020/vision/viewer_replay.cc
new file mode 100644
index 0000000..82ab11a
--- /dev/null
+++ b/y2020/vision/viewer_replay.cc
@@ -0,0 +1,65 @@
+#include <opencv2/calib3d.hpp>
+#include <opencv2/features2d.hpp>
+#include <opencv2/highgui/highgui.hpp>
+#include <opencv2/imgproc.hpp>
+
+#include "aos/events/logging/logger.h"
+#include "aos/events/simulated_event_loop.h"
+#include "aos/init.h"
+#include "y2020/vision/vision_generated.h"
+
+DEFINE_string(config, "y2020/config.json", "Path to the config file to use.");
+DEFINE_string(logfile, "", "Path to the log file to use.");
+DEFINE_string(node, "pi1", "Node name to replay.");
+DEFINE_string(image_save_prefix, "/tmp/img",
+              "Prefix to use for saving images from the logfile.");
+
+namespace frc971 {
+namespace vision {
+namespace {
+
+void ViewerMain() {
+  CHECK(!FLAGS_logfile.empty()) << "You forgot to specify a logfile.";
+
+  aos::FlatbufferDetachedBuffer<aos::Configuration> config =
+      aos::configuration::ReadConfig(FLAGS_config);
+
+  aos::logger::LogReader reader(FLAGS_logfile, &config.message());
+  reader.Register();
+  const aos::Node *node = nullptr;
+  if (aos::configuration::MultiNode(reader.configuration())) {
+    node = aos::configuration::GetNode(reader.configuration(), FLAGS_node);
+  }
+  std::unique_ptr<aos::EventLoop> event_loop =
+      reader.event_loop_factory()->MakeEventLoop("player", node);
+
+  int image_count = 0;
+  event_loop->MakeWatcher("/camera", [&image_count](const CameraImage &image) {
+    cv::Mat image_mat(image.rows(), image.cols(), CV_8U);
+    CHECK(image_mat.isContinuous());
+    const int number_pixels = image.rows() * image.cols();
+    for (int i = 0; i < number_pixels; ++i) {
+      reinterpret_cast<uint8_t *>(image_mat.data)[i] =
+          image.data()->data()[i * 2];
+    }
+
+    cv::imshow("Display", image_mat);
+    if (!FLAGS_image_save_prefix.empty()) {
+      cv::imwrite("/tmp/img" + std::to_string(image_count++) + ".png",
+                  image_mat);
+    }
+    cv::waitKey(1);
+  });
+
+  reader.event_loop_factory()->Run();
+}
+
+}  // namespace
+}  // namespace vision
+}  // namespace frc971
+
+// Quick and lightweight grayscale viewer for images
+int main(int argc, char **argv) {
+  aos::InitGoogle(&argc, &argv);
+  frc971::vision::ViewerMain();
+}
diff --git a/y2020/www/BUILD b/y2020/www/BUILD
index e8c227d..a035d84 100644
--- a/y2020/www/BUILD
+++ b/y2020/www/BUILD
@@ -25,6 +25,7 @@
     ],
     deps = [
         "//aos/network/www:proxy",
+        "//y2020/vision/sift:sift_ts_fbs",
     ],
 )
 
diff --git a/y2020/www/field.html b/y2020/www/field.html
index 37452a3..ad449db 100644
--- a/y2020/www/field.html
+++ b/y2020/www/field.html
@@ -5,8 +5,6 @@
     <link rel="stylesheet" href="styles.css">
   </head>
   <body>
-    <div id="config">
-    </div>
   </body>
 </html>
 
diff --git a/y2020/www/field_handler.ts b/y2020/www/field_handler.ts
index a960a63..f05f5d5 100644
--- a/y2020/www/field_handler.ts
+++ b/y2020/www/field_handler.ts
@@ -1,33 +1,36 @@
+import {Configuration, Channel} from 'aos/configuration_generated';
+import {Connection} from 'aos/network/www/proxy';
+import {Connect} from 'aos/network/connect_generated';
 import {FIELD_LENGTH, FIELD_WIDTH, FT_TO_M, IN_TO_M} from './constants';
+import {ImageMatchResult} from 'y2020/vision/sift/sift_generated'
 
+// (0,0) is field center, +X is toward red DS
 const FIELD_SIDE_Y = FIELD_WIDTH / 2;
-const FIELD_CENTER_X = (198.75 + 116) * IN_TO_M;
+const FIELD_EDGE_X = FIELD_LENGTH / 2;
 
 const DS_WIDTH = 69 * IN_TO_M;
 const DS_ANGLE = 20 * Math.PI / 180;
-const DS_END_X = DS_WIDTH * Math.sin(DS_ANGLE);
-const OTHER_DS_X = FIELD_LENGTH - DS_END_X;
+const DS_END_X = FIELD_EDGE_X - DS_WIDTH * Math.sin(DS_ANGLE);
 const DS_INSIDE_Y = FIELD_SIDE_Y - DS_WIDTH * Math.cos(DS_ANGLE);
 
-const TRENCH_START_X = 206.57 * IN_TO_M;
-const TRENCH_END_X = FIELD_LENGTH - TRENCH_START_X;
+const TRENCH_X = 108 * IN_TO_M;
 const TRENCH_WIDTH = 55.5 * IN_TO_M;
 const TRENCH_INSIDE = FIELD_SIDE_Y - TRENCH_WIDTH;
 
 const SPINNER_LENGTH = 30 * IN_TO_M;
-const SPINNER_TOP_X = 374.59 * IN_TO_M;
+const SPINNER_TOP_X = 374.59 * IN_TO_M - FIELD_EDGE_X;
 const SPINNER_BOTTOM_X = SPINNER_TOP_X - SPINNER_LENGTH;
 
-const SHIELD_BOTTOM_X = FIELD_CENTER_X - 116 * IN_TO_M;
+const SHIELD_BOTTOM_X = -116 * IN_TO_M;
 const SHIELD_BOTTOM_Y = 43.75 * IN_TO_M;
 
-const SHIELD_TOP_X = FIELD_CENTER_X + 116 * IN_TO_M;
+const SHIELD_TOP_X = 116 * IN_TO_M;
 const SHIELD_TOP_Y = -43.75 * IN_TO_M;
 
-const SHIELD_RIGHT_X = FIELD_CENTER_X - 51.06 * IN_TO_M;
+const SHIELD_RIGHT_X = -51.06 * IN_TO_M;
 const SHIELD_RIGHT_Y = -112.88 * IN_TO_M;
 
-const SHIELD_LEFT_X = FIELD_CENTER_X + 51.06 * IN_TO_M;
+const SHIELD_LEFT_X = 51.06 * IN_TO_M;
 const SHIELD_LEFT_Y = 112.88 * IN_TO_M;
 
 const SHIELD_CENTER_TOP_X = (SHIELD_TOP_X + SHIELD_LEFT_X) / 2
@@ -36,18 +39,78 @@
 const SHIELD_CENTER_BOTTOM_X = (SHIELD_BOTTOM_X + SHIELD_RIGHT_X) / 2
 const SHIELD_CENTER_BOTTOM_Y = (SHIELD_BOTTOM_Y + SHIELD_RIGHT_Y) / 2
 
-const INITIATION_X = 120 * IN_TO_M;
-const FAR_INITIATION_X = FIELD_LENGTH - 120 * IN_TO_M;
+const INITIATION_X = FIELD_EDGE_X - 120 * IN_TO_M;
 
-const TARGET_ZONE_TIP_X = 30 * IN_TO_M;
+const TARGET_ZONE_TIP_X = FIELD_EDGE_X - 30 * IN_TO_M;
 const TARGET_ZONE_WIDTH = 48 * IN_TO_M;
 const LOADING_ZONE_WIDTH = 60 * IN_TO_M;
 
+/**
+ * All the messages that are required to display camera information on the field.
+ * Messages not readable on the server node are ignored.
+ */
+const REQUIRED_CHANNELS = [
+  {
+    name: '/pi1/camera',
+    type: 'frc971.vision.sift.ImageMatchResult',
+  },
+  {
+    name: '/pi2/camera',
+    type: 'frc971.vision.sift.ImageMatchResult',
+  },
+  {
+    name: '/pi3/camera',
+    type: 'frc971.vision.sift.ImageMatchResult',
+  },
+  {
+    name: '/pi4/camera',
+    type: 'frc971.vision.sift.ImageMatchResult',
+  },
+  {
+    name: '/pi5/camera',
+    type: 'frc971.vision.sift.ImageMatchResult',
+  },
+];
+
 export class FieldHandler {
   private canvas = document.createElement('canvas');
+  private imageMatchResult :ImageMatchResult|null = null
 
-  constructor() {
+  constructor(private readonly connection: Connection) {
     document.body.appendChild(this.canvas);
+
+    this.connection.addConfigHandler(() => {
+      this.sendConnect();
+    });
+    this.connection.addHandler(ImageMatchResult.getFullyQualifiedName(), (res) => {
+      this.handleImageMatchResult(res);
+    });
+  }
+
+  private handleImageMatchResult(data: Uint8Array): void {
+    const fbBuffer = new flatbuffers.ByteBuffer(data);
+    this.imageMatchResult = ImageMatchResult.getRootAsImageMatchResult(fbBuffer);
+  }
+
+  private sendConnect(): void {
+    const builder = new flatbuffers.Builder(512);
+    const channels: flatbuffers.Offset[] = [];
+    for (const channel of REQUIRED_CHANNELS) {
+      const nameFb = builder.createString(channel.name);
+      const typeFb = builder.createString(channel.type);
+      Channel.startChannel(builder);
+      Channel.addName(builder, nameFb);
+      Channel.addType(builder, typeFb);
+      const channelFb = Channel.endChannel(builder);
+      channels.push(channelFb);
+    }
+
+    const channelsFb = Connect.createChannelsToTransferVector(builder, channels);
+    Connect.startConnect(builder);
+    Connect.addChannelsToTransfer(builder, channelsFb);
+    const connect = Connect.endConnect(builder);
+    builder.finish(connect);
+    this.connection.sendConnectMessage(builder);
   }
 
   drawField(): void {
@@ -56,15 +119,15 @@
     const ctx = this.canvas.getContext('2d');
     // draw perimiter
     ctx.beginPath();
-    ctx.moveTo(0, DS_INSIDE_Y);
+    ctx.moveTo(FIELD_EDGE_X, DS_INSIDE_Y);
     ctx.lineTo(DS_END_X, FIELD_SIDE_Y);
-    ctx.lineTo(OTHER_DS_X, FIELD_SIDE_Y);
-    ctx.lineTo(FIELD_LENGTH, DS_INSIDE_Y);
-    ctx.lineTo(FIELD_LENGTH, -DS_INSIDE_Y);
-    ctx.lineTo(OTHER_DS_X, -FIELD_SIDE_Y);
+    ctx.lineTo(-DS_END_X, FIELD_SIDE_Y);
+    ctx.lineTo(-FIELD_EDGE_X, DS_INSIDE_Y);
+    ctx.lineTo(-FIELD_EDGE_X, -DS_INSIDE_Y);
+    ctx.lineTo(-DS_END_X, -FIELD_SIDE_Y);
     ctx.lineTo(DS_END_X, -FIELD_SIDE_Y);
-    ctx.lineTo(0, -DS_INSIDE_Y);
-    ctx.lineTo(0, DS_INSIDE_Y);
+    ctx.lineTo(FIELD_EDGE_X, -DS_INSIDE_Y);
+    ctx.lineTo(FIELD_EDGE_X, DS_INSIDE_Y);
     ctx.stroke();
 
     // draw shield generator
@@ -78,21 +141,20 @@
     ctx.lineTo(SHIELD_CENTER_BOTTOM_X, SHIELD_CENTER_BOTTOM_Y);
     ctx.stroke();
 
-    // draw trenches
-    ctx.strokeStyle = MY_COLOR;
-    ctx.beginPath();
-    ctx.moveTo(TRENCH_START_X, FIELD_SIDE_Y);
-    ctx.lineTo(TRENCH_START_X, TRENCH_INSIDE);
-    ctx.lineTo(TRENCH_END_X, TRENCH_INSIDE);
-    ctx.lineTo(TRENCH_END_X, FIELD_SIDE_Y);
-    ctx.stroke();
+    this.drawHalfField(ctx, 'red');
+    ctx.rotate(Math.PI);
+    this.drawHalfField(ctx, 'blue');
+    ctx.rotate(Math.PI);
+  }
 
-    ctx.strokeStyle = OTHER_COLOR;
+  drawHalfField(ctx, color: string): void {
+    // trenches
+    ctx.strokeStyle = color;
     ctx.beginPath();
-    ctx.moveTo(TRENCH_START_X, -FIELD_SIDE_Y);
-    ctx.lineTo(TRENCH_START_X, -TRENCH_INSIDE);
-    ctx.lineTo(TRENCH_END_X, -TRENCH_INSIDE);
-    ctx.lineTo(TRENCH_END_X, -FIELD_SIDE_Y);
+    ctx.moveTo(TRENCH_X, FIELD_SIDE_Y);
+    ctx.lineTo(TRENCH_X, TRENCH_INSIDE);
+    ctx.lineTo(-TRENCH_X, TRENCH_INSIDE);
+    ctx.lineTo(-TRENCH_X, FIELD_SIDE_Y);
     ctx.stroke();
 
     ctx.strokeStyle = 'black';
@@ -101,46 +163,59 @@
     ctx.lineTo(SPINNER_TOP_X, TRENCH_INSIDE);
     ctx.lineTo(SPINNER_BOTTOM_X, TRENCH_INSIDE);
     ctx.lineTo(SPINNER_BOTTOM_X, FIELD_SIDE_Y);
-    ctx.moveTo(FIELD_LENGTH - SPINNER_TOP_X, -FIELD_SIDE_Y);
-    ctx.lineTo(FIELD_LENGTH - SPINNER_TOP_X, -TRENCH_INSIDE);
-    ctx.lineTo(FIELD_LENGTH - SPINNER_BOTTOM_X, -TRENCH_INSIDE);
-    ctx.lineTo(FIELD_LENGTH - SPINNER_BOTTOM_X, -FIELD_SIDE_Y);
     ctx.stroke();
 
-    // draw initiation lines
     ctx.beginPath();
     ctx.moveTo(INITIATION_X, FIELD_SIDE_Y);
     ctx.lineTo(INITIATION_X, -FIELD_SIDE_Y);
-    ctx.moveTo(FAR_INITIATION_X, FIELD_SIDE_Y);
-    ctx.lineTo(FAR_INITIATION_X, -FIELD_SIDE_Y);
     ctx.stroke();
 
-    // draw target/loading zones
-    ctx.strokeStyle = MY_COLOR;
+    // target/loading
+    ctx.strokeStyle = color;
     ctx.beginPath();
-    ctx.moveTo(0, DS_INSIDE_Y);
+    ctx.moveTo(FIELD_EDGE_X, DS_INSIDE_Y);
     ctx.lineTo(TARGET_ZONE_TIP_X, DS_INSIDE_Y - 0.5 * TARGET_ZONE_WIDTH);
-    ctx.lineTo(0, DS_INSIDE_Y - TARGET_ZONE_WIDTH);
+    ctx.lineTo(FIELD_EDGE_X, DS_INSIDE_Y - TARGET_ZONE_WIDTH);
 
-    ctx.moveTo(FIELD_LENGTH, DS_INSIDE_Y);
-    ctx.lineTo(
-        FIELD_LENGTH - TARGET_ZONE_TIP_X,
-        DS_INSIDE_Y - 0.5 * LOADING_ZONE_WIDTH);
-    ctx.lineTo(FIELD_LENGTH, DS_INSIDE_Y - LOADING_ZONE_WIDTH);
+    ctx.moveTo(-FIELD_EDGE_X, DS_INSIDE_Y);
+    ctx.lineTo(-TARGET_ZONE_TIP_X, DS_INSIDE_Y - 0.5 * LOADING_ZONE_WIDTH);
+    ctx.lineTo(-FIELD_EDGE_X, DS_INSIDE_Y - LOADING_ZONE_WIDTH);
     ctx.stroke();
+  }
 
-    ctx.strokeStyle = OTHER_COLOR;
+  drawCamera(x: number, y: number, theta: number): void {
+    const ctx = this.canvas.getContext('2d');
+    ctx.save();
+    ctx.translate(x, y);
+    ctx.rotate(theta);
     ctx.beginPath();
-    ctx.moveTo(0, -DS_INSIDE_Y);
-    ctx.lineTo(TARGET_ZONE_TIP_X, -(DS_INSIDE_Y - 0.5 * LOADING_ZONE_WIDTH));
-    ctx.lineTo(0, -(DS_INSIDE_Y - LOADING_ZONE_WIDTH));
-
-    ctx.moveTo(FIELD_LENGTH, -DS_INSIDE_Y);
-    ctx.lineTo(
-        FIELD_LENGTH - TARGET_ZONE_TIP_X,
-        -(DS_INSIDE_Y - 0.5 * TARGET_ZONE_WIDTH));
-    ctx.lineTo(FIELD_LENGTH, -(DS_INSIDE_Y - TARGET_ZONE_WIDTH));
+    ctx.moveTo(0.5, 0.5);
+    ctx.lineTo(0, 0);
+    ctx.lineTo(0.5, -0.5);
     ctx.stroke();
+    ctx.beginPath();
+    ctx.arc(0, 0, 0.25, -Math.PI/4, Math.PI/4);
+    ctx.stroke();
+    ctx.restore();
+  }
+
+  draw(): void  {
+    this.reset();
+    this.drawField();
+    //draw cameras
+    if (this.imageMatchResult) {
+      console.log(this.imageMatchResult.cameraPosesLength());
+      for (const i = 0; i < this.imageMatchResult.cameraPosesLength(); i++) {
+        const pose = this.imageMatchResult.cameraPoses(i);
+        const mat = pose.fieldToCamera();
+        const x = mat.data(3);
+        const y = mat.data(7);
+        this.drawCamera(x, y, 0);
+        console.log(x, y);
+      }
+    }
+
+    window.requestAnimationFrame(() => this.draw());
   }
 
   reset(): void {
@@ -148,17 +223,17 @@
     ctx.setTransform(1, 0, 0, 1, 0, 0);
     const size = window.innerHeight * 0.9;
     ctx.canvas.height = size;
-    ctx.canvas.width = size / 2 + 10;
-    ctx.clearRect(0, 0, size, size / 2 + 10);
+    const width = size / 2 + 20;
+    ctx.canvas.width = width;
+    ctx.clearRect(0, 0, size, width);
 
-    // Translate to center of bottom of display.
-    ctx.translate(size / 4, size);
+    // Translate to center of display.
+    ctx.translate(width / 2, size / 2);
     // Coordinate system is:
     // x -> forward.
     // y -> to the left.
     ctx.rotate(-Math.PI / 2);
     ctx.scale(1, -1);
-    ctx.translate(5, 0);
 
     const M_TO_PX = (size - 10) / FIELD_LENGTH;
     ctx.scale(M_TO_PX, M_TO_PX);
diff --git a/y2020/www/field_main.ts b/y2020/www/field_main.ts
index adcaa27..163c817 100644
--- a/y2020/www/field_main.ts
+++ b/y2020/www/field_main.ts
@@ -6,8 +6,7 @@
 
 conn.connect();
 
-const fieldHandler = new FieldHandler();
+const fieldHandler = new FieldHandler(conn);
 
-fieldHandler.reset();
-fieldHandler.drawField();
+fieldHandler.draw();
 
diff --git a/y2020/www/image_handler.ts b/y2020/www/image_handler.ts
index ae530ef..e2ba0b9 100644
--- a/y2020/www/image_handler.ts
+++ b/y2020/www/image_handler.ts
@@ -4,6 +4,7 @@
 export class ImageHandler {
   private canvas = document.createElement('canvas');
   private imageBuffer: Uint8ClampedArray|null = null;
+  private image: CameraImage|null = null;
   private imageTimestamp: flatbuffers.Long|null = null;
   private result: ImageMatchResult|null = null;
   private resultTimestamp: flatbuffers.Long|null = null;
@@ -16,6 +17,7 @@
   }
 
   handleImage(data: Uint8Array): void {
+    console.log('got an image to process');
     if (this.imageSkipCount != 0) {
       this.imageSkipCount--;
       return;
@@ -24,24 +26,28 @@
     }
 
     const fbBuffer = new flatbuffers.ByteBuffer(data);
-    const image = CameraImage.getRootAsCameraImage(fbBuffer);
-    this.imageTimestamp = image.monotonicTimestampNs();
+    this.image = CameraImage.getRootAsCameraImage(fbBuffer);
+    this.imageTimestamp = this.image.monotonicTimestampNs();
 
-    this.width = image.cols();
-    this.height = image.rows();
+    this.width = this.image.cols();
+    this.height = this.image.rows();
     if (this.width === 0 || this.height === 0) {
       return;
     }
-    this.imageBuffer = new Uint8ClampedArray(this.width * this.height * 4); // RGBA
 
+    this.draw();
+  }
+
+  convertImage(): void {
+    this.imageBuffer = new Uint8ClampedArray(this.width * this.height * 4); // RGBA
     // Read four bytes (YUYV) from the data and transform into two pixels of
     // RGBA for canvas
     for (const j = 0; j < this.height; j++) {
       for (const i = 0; i < this.width; i += 2) {
-        const y1 = image.data((j * this.width + i) * 2);
-        const u = image.data((j * this.width + i) * 2 + 1);
-        const y2 = image.data((j * this.width + i + 1) * 2);
-        const v = image.data((j * this.width + i + 1) * 2 + 1);
+        const y1 = this.image.data((j * this.width + i) * 2);
+        const u = this.image.data((j * this.width + i) * 2 + 1);
+        const y2 = this.image.data((j * this.width + i + 1) * 2);
+        const v = this.image.data((j * this.width + i + 1) * 2 + 1);
 
         // Based on https://en.wikipedia.org/wiki/YUV#Converting_between_Y%E2%80%B2UV_and_RGB
         const c1 = y1 - 16;
@@ -59,11 +65,10 @@
         this.imageBuffer[(j * this.width + i) * 4 + 7] = 255;
       }
     }
-
-    this.draw();
   }
 
   handleImageMetadata(data: Uint8Array): void {
+    console.log('got an image match result to process');
     const fbBuffer = new flatbuffers.ByteBuffer(data);
     this.result = ImageMatchResult.getRootAsImageMatchResult(fbBuffer);
     this.resultTimestamp = this.result.imageMonotonicTimestampNs();
@@ -74,8 +79,12 @@
   if (!this.imageTimestamp || !this.resultTimestamp ||
         this.imageTimestamp.low !== this.resultTimestamp.low ||
         this.imageTimestamp.high !== this.resultTimestamp.high) {
+      console.log('image and result do not match');
+      console.log(this.imageTimestamp.low, this.resultTimestamp.low);
+      console.log(this.imageTimestamp.high, this.resultTimestamp.high);
       return;
     }
+    this.convertImage();
     const ctx = this.canvas.getContext('2d');
 
     this.canvas.width = this.width;
diff --git a/y2020/www/index.html b/y2020/www/index.html
index 97e16d4..20b9785 100644
--- a/y2020/www/index.html
+++ b/y2020/www/index.html
@@ -5,7 +5,5 @@
     <link rel="stylesheet" href="styles.css">
   </head>
   <body>
-    <div id="config">
-    </div>
   </body>
 </html>