copied everything over from 2012 and removed all of the actual robot code except the drivetrain stuff


git-svn-id: https://robotics.mvla.net/svn/frc971/2013/trunk/src@4078 f308d9b7-e957-4cde-b6ac-9a88185e7312
diff --git a/aos/atom_code/camera/Buffers.cpp b/aos/atom_code/camera/Buffers.cpp
new file mode 100644
index 0000000..22b7337
--- /dev/null
+++ b/aos/atom_code/camera/Buffers.cpp
@@ -0,0 +1,160 @@
+#include "Buffers.h"
+#include "V4L2.h"
+
+#include <sys/mman.h>
+
+namespace aos {
+namespace camera {
+
+// Represents an actual v4l2 buffer.
+struct Buffers::Buffer {
+  void *start;
+  size_t length; // for munmap
+};
+const std::string Buffers::kFDServerName("/tmp/aos_fd_server");
+const std::string Buffers::kQueueName("CameraBufferQueue");
+const aos_type_sig Buffers::kSignature{sizeof(Message), 971, 1};
+
+int Buffers::CreateSocket(int (*bind_connect)(int, const sockaddr *, socklen_t)) {
+  union af_unix_sockaddr {
+    sockaddr_un un;
+    sockaddr addr;
+  } addr;
+  const int r = socket(AF_UNIX, SOCK_STREAM, 0);
+  if (r == -1) {
+    LOG(ERROR, "socket(AF_UNIX, SOCK_STREAM, 0) failed with %d: %s\n",
+        errno, strerror(errno));
+    return -1;
+  }
+  addr.un.sun_family = AF_UNIX;
+  memset(addr.un.sun_path, 0, sizeof(addr.un.sun_path));
+  strcpy(addr.un.sun_path, kFDServerName.c_str());
+  if (bind_connect(r, &addr.addr, sizeof(addr.un)) == -1) {
+    LOG(ERROR, "bind_connect(=%p)(%d, %p, %zd) failed with %d: %s\n",
+        bind_connect, r, &addr.addr, sizeof(addr.un), errno, strerror(errno));
+    close(r); // what are we going to do about an error?
+    return -1;
+  }
+  return r;
+}
+
+void Buffers::MMap() {
+  buffers_ = new Buffer[kNumBuffers];
+  v4l2_buffer buf;
+  for (unsigned int n = 0; n < kNumBuffers; ++n) {
+    memset(&buf, 0, sizeof(buf));
+    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    buf.memory = V4L2_MEMORY_MMAP;
+    buf.index = n;
+    if (xioctl(fd_, VIDIOC_QUERYBUF, &buf) == -1) {
+      LOG(FATAL, "ioctl VIDIOC_QUERYBUF(%d, %p) failed with %d: %s\n",
+          fd_, &buf, errno, strerror(errno));
+    }
+    buffers_[n].length = buf.length;
+    buffers_[n].start = mmap(NULL, buf.length,
+                             PROT_READ | PROT_WRITE, MAP_SHARED,
+                             fd_, buf.m.offset);
+    if (buffers_[n].start == MAP_FAILED) {
+      LOG(FATAL, "mmap(NULL, %zd, PROT_READ | PROT_WRITE, MAP_SHARED, %d, %jd)"
+          " failed with %d: %s\n", buf.length, fd_, static_cast<intmax_t>(buf.m.offset),
+          errno, strerror(errno));
+    }
+  }
+}
+
+void Buffers::Release() {
+  if (message_ != NULL) {
+    aos_queue_free_msg(queue_, message_);
+    message_ = NULL;
+  }
+}
+const void *Buffers::GetNext(bool block,
+                       uint32_t *bytesused, timeval *timestamp, uint32_t *sequence) {
+  Release();
+
+  // TODO(brians) make sure the camera reader process hasn't died
+  do {
+    if (block) {
+      message_ = static_cast<const Message *>(aos_queue_read_msg(queue_, PEEK | BLOCK));
+    } else {
+      static int index = 0;
+      message_ = static_cast<const Message *>(aos_queue_read_msg_index(queue_, BLOCK,
+                                                                       &index));
+    }
+  } while (block && message_ == NULL);
+  if (message_ != NULL) {
+    if (bytesused != NULL) memcpy(bytesused, &message_->bytesused, sizeof(*bytesused));
+    if (timestamp != NULL) memcpy(timestamp, &message_->timestamp, sizeof(*timestamp));
+    if (sequence != NULL) memcpy(sequence, &message_->sequence, sizeof(*sequence));
+    return buffers_[message_->index].start;
+  } else {
+    return NULL;
+  }
+}
+
+int Buffers::FetchFD() {
+  int myfds[Buffers::kNumFDs]; // where to retrieve the fds into
+  char buf[CMSG_SPACE(sizeof(myfds))]; // ancillary data buffer
+
+  iovec data;
+  memset(&data, 0, sizeof(data));
+  char dummy;
+  data.iov_base = &dummy;
+  data.iov_len = sizeof(dummy);
+  msghdr msg;
+  memset(&msg, 0, sizeof(msg));
+  msg.msg_iov = &data;
+  msg.msg_iovlen = 1;
+  msg.msg_control = buf;
+  msg.msg_controllen = sizeof(buf);
+
+  switch (recvmsg(server_, &msg, 0)) {
+    case 0: // "the peer has performed an orderly shutdown"
+      LOG(FATAL, "the fd server shut down (connected on %d)\n", server_);
+    case -1:
+      LOG(FATAL, "recvmsg(server_(=%d), %p, 0) failed with %d: %s\n",
+          server_, &msg, errno, strerror(errno));
+  }
+  const cmsghdr *const cmsg = CMSG_FIRSTHDR(&msg);
+  if (cmsg == NULL) {
+    LOG(FATAL, "no headers in message\n");
+  }
+  if (cmsg->cmsg_len != CMSG_LEN(sizeof(myfds))) {
+    LOG(FATAL, "got wrong size. got %d but expected %zd\n",
+        cmsg->cmsg_len, CMSG_LEN(sizeof(myfds)));
+  }
+  if (cmsg->cmsg_level != SOL_SOCKET) {
+    LOG(FATAL, "cmsg_level=%d. expected SOL_SOCKET(=%d)\n", cmsg->cmsg_level, SOL_SOCKET);
+  }
+  if (cmsg->cmsg_type != SCM_RIGHTS) {
+    LOG(FATAL, "cmsg_type=%d. expected SCM_RIGHTS(=%d)\n", cmsg->cmsg_type, SCM_RIGHTS);
+  }
+  memcpy(myfds, CMSG_DATA(cmsg), sizeof(myfds));
+  
+  return myfds[0];
+}
+Buffers::Buffers() : server_(CreateSocket(connect)), fd_(FetchFD()), message_(NULL) {
+  MMap();
+  queue_ = aos_fetch_queue(kQueueName.c_str(), &kSignature);
+}
+
+Buffers::~Buffers() {
+  Release();
+
+  for (unsigned i = 0; i < kNumBuffers; ++i) {
+    if (munmap(buffers_[i].start, buffers_[i].length) == -1) {
+      LOG(WARNING, "munmap(%p, %zd) for destruction failed with %d: %s\n",
+          buffers_[i].start, buffers_[i].length, errno, strerror(errno));
+    }
+  }
+  delete[] buffers_;
+
+  if (close(fd_) == -1) {
+    LOG(WARNING, "close(%d) for destruction failed with %d: %s\n",
+        fd_, errno, strerror(errno));
+  }
+}
+
+} // namespace camera
+} // namespace aos
+
diff --git a/aos/atom_code/camera/Buffers.h b/aos/atom_code/camera/Buffers.h
new file mode 100644
index 0000000..7f1206d
--- /dev/null
+++ b/aos/atom_code/camera/Buffers.h
@@ -0,0 +1,93 @@
+#ifndef AOS_ATOM_CODE_CAMERA_CAMERA_BUFFERS_H_
+#define AOS_ATOM_CODE_CAMERA_CAMERA_BUFFERS_H_
+
+#include <sys/socket.h>
+#include <sys/un.h>
+
+#include <string>
+
+#include "aos/aos_core.h"
+#include "aos/common/type_traits.h"
+
+namespace aos {
+namespace camera {
+
+class Reader;
+class Buffers {
+  // It has to do a lot of the same things as all the other ones, but it gets
+  // the information from different places (some of it gets sent out by it).
+  friend class Reader;
+  // Not an abstract name so that an existing one can just be unlinked without
+  // disturbing it if necessary (like with shm_link).
+  static const std::string kFDServerName;
+  // How many v4l2 buffers and other things that depend on that.
+  static const unsigned int kNumBuffers = 10;
+  // How many fds to transfer from the fd server.
+  // Used to make it clear which 1s are 1 and which are this.
+  static const size_t kNumFDs = 1;
+  // Creates a socket and calls either bind or connect.
+  // Returns a bound/connected socket or -1 (the reason for which will already
+  // have been logged at ERROR).
+  static int CreateSocket(int (*bind_connect)(int, const sockaddr *, socklen_t));
+
+  // File descriptor connected to the fd server. Used for detecting if the
+  // camera reading process has died.
+  // A value of -2 means don't check.
+  const int server_;
+  // Gets the fd (using server_).
+  int FetchFD();
+  // File descriptor for the v4l2 device (that's valid in this process of
+  // course).
+  const int fd_;
+
+  struct Buffer;
+  // Buffer[kNumBuffers]
+  Buffer *buffers_;
+  struct Message {
+    uint32_t index;
+    uint32_t bytesused;
+    timeval timestamp;
+    uint32_t sequence;
+  };
+  static_assert(shm_ok<Message>::value, "it's going through queues");
+  // The current one. Sometimes NULL.
+  const Message *message_;
+  static const std::string kQueueName;
+  static const aos_type_sig kSignature;
+  // NULL for the Reader one.
+  aos_queue *queue_;
+  // Make the actual mmap calls.
+  // Called by Buffers() automatically.
+  void MMap();
+ public:
+  Buffers();
+  // Will clean everything up.
+  // So that HTTPStreamer can create/destroy one for each client to make it
+  // simpler.
+  ~Buffers();
+
+  // Retrieves the next image. Will return the current one if it hasn't yet.
+  // Calls Release() at the beginning.
+  // NOTE: this means that the caller can't keep using references to the old
+  // return value after calling this function again
+  // block is whether to return NULL or wait for a new one
+  // the last 3 output parameters will be filled in straight from the
+  // v4l2_buffer if they're not NULL
+  // (see <http://v4l2spec.bytesex.org/spec/x5953.htm#V4L2-BUFFER> for details)
+  // NOTE: guaranteed to return a valid pointer if block is true
+  const void *GetNext(bool block,
+                uint32_t *bytesused, timeval *timestamp, uint32_t *sequence);
+  // Releases the most recent frame buffer. Automatically called by GetNext and
+  // the destructor. Safe to call multiple times without getting frames in
+  // between.
+  void Release();
+
+  // How big images are.
+  static const int32_t kWidth = 640, kHeight = 480;
+};
+
+} // namespace camera
+} // namespace aos
+
+#endif
+
diff --git a/aos/atom_code/camera/HTTPStreamer.cpp b/aos/atom_code/camera/HTTPStreamer.cpp
new file mode 100644
index 0000000..ad339a8
--- /dev/null
+++ b/aos/atom_code/camera/HTTPStreamer.cpp
@@ -0,0 +1,388 @@
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <fcntl.h>
+#include <unistd.h>
+#include <errno.h>
+#include <malloc.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <sys/time.h>
+#include <sys/mman.h>
+#include <netinet/ip.h>
+#include <sys/socket.h>
+#include <inttypes.h>
+
+#include <vector>
+
+#include "aos/common/Configuration.h"
+#include "aos/aos_core.h"
+#include "aos/atom_code/camera/Buffers.h"
+
+namespace aos {
+namespace camera {
+
+namespace {
+
+// doesn't like being a static class member
+static const unsigned char dht_table[] = {
+  0xff, 0xc4, 0x01, 0xa2, 0x00, 0x00, 0x01, 0x05, 0x01, 0x01, 0x01, 0x01,
+  0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02,
+  0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x01, 0x00, 0x03,
+  0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00,
+  0x00, 0x00, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09,
+  0x0a, 0x0b, 0x10, 0x00, 0x02, 0x01, 0x03, 0x03, 0x02, 0x04, 0x03, 0x05,
+  0x05, 0x04, 0x04, 0x00, 0x00, 0x01, 0x7d, 0x01, 0x02, 0x03, 0x00, 0x04,
+  0x11, 0x05, 0x12, 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07, 0x22,
+  0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08, 0x23, 0x42, 0xb1, 0xc1, 0x15,
+  0x52, 0xd1, 0xf0, 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16, 0x17,
+  0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x34, 0x35, 0x36,
+  0x37, 0x38, 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4a,
+  0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66,
+  0x67, 0x68, 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a,
+  0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95,
+  0x96, 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7, 0xa8,
+  0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2,
+  0xc3, 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4, 0xd5,
+  0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7,
+  0xe8, 0xe9, 0xea, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9,
+  0xfa, 0x11, 0x00, 0x02, 0x01, 0x02, 0x04, 0x04, 0x03, 0x04, 0x07, 0x05,
+  0x04, 0x04, 0x00, 0x01, 0x02, 0x77, 0x00, 0x01, 0x02, 0x03, 0x11, 0x04,
+  0x05, 0x21, 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71, 0x13, 0x22,
+  0x32, 0x81, 0x08, 0x14, 0x42, 0x91, 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33,
+  0x52, 0xf0, 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34, 0xe1, 0x25,
+  0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36,
+  0x37, 0x38, 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4a,
+  0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66,
+  0x67, 0x68, 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a,
+  0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94,
+  0x95, 0x96, 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
+  0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba,
+  0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4,
+  0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7,
+  0xe8, 0xe9, 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9, 0xfa
+};
+
+const char kFirstHeader[] = "HTTP/1.0 200 OK\r\n"
+"Connection: close\r\n"
+"Server: AOS/0.0 Camera\r\n"
+"Cache-Control: no-store, no-cache, must-revalidate, pre-check=0, "
+"post-check=0, max-age=0\r\n"  // this and above from mjpg-streamer
+"Pragma: no-cache\r\n"
+"Expires: Mon, 3 Jan 2000 12:34:56 GMT\r\n"  // also from mjpg-streamer
+"Content-Type: multipart/x-mixed-replace; boundary=boundarydonotcross\r\n";
+
+}  // namespace
+
+class HTTPStreamer {
+  // Represents a single client. Handles all reading and writing of sockets and
+  // queues.
+  class Client {
+    enum class State {
+      kReadHeaders,
+      kWriteHeaders,
+      kWriteBoundary,  // these last 2 loop to each other
+      kWriteImage,
+      kWriteDHT,  // happens in the middle of kWriteImage
+    };
+    const int sock_;
+    State state_;
+    inline fd_set *GetFDSetForCurrentState(fd_set *read_fds,
+                                           fd_set *write_fds) {
+      if (state_ == State::kReadHeaders) {
+        return read_fds;
+      } else {
+        return write_fds;
+      }
+    }
+    // MUST BE LONG ENOUGH TO HOLD kBoundaryText WITH A BIGGISH # IN IT
+    char scratch_[4096];
+    int to_write_;
+    int zero_reads_;
+    static const int kMaxZeroReads = 2000;
+    size_t pos_, dht_pos_, dht_start_;
+    Buffers buffers_;
+    const void *current_;
+    uint32_t size_;
+
+    Client(const Client &);
+    void operator=(const Client &);
+
+   public:
+    explicit Client(int sock) : sock_(sock), state_(State::kReadHeaders),
+        zero_reads_(0), pos_(0) {}
+    ~Client() {
+      LOG(DEBUG, "closing socket %d\n", sock_);
+      if (close(sock_) == -1) {
+        LOG(INFO, "closing socket %d for destruction failed with %d: %s\n",
+            sock_, errno, strerror(errno));
+      }
+    }
+    // Set any fds necessary into the 2 arguments.
+    void FDSet(fd_set *read_fds, fd_set *write_fds) {
+      FD_SET(sock_, GetFDSetForCurrentState(read_fds, write_fds));
+    }
+    // The arguments are the same as the last FDSet call (after a successful
+    // select).
+    // Return value is whether or not to keep this one around.
+    bool Process(fd_set *read_fds, fd_set *write_fds) {
+      // if the socket we're waiting on isn't ready
+      if (!FD_ISSET(sock_, GetFDSetForCurrentState(read_fds, write_fds))) {
+        return true;
+      }
+
+      ssize_t ret;
+      switch (state_) {
+        case State::kReadHeaders:
+          if (pos_ >= sizeof(scratch_)) {
+            LOG(WARNING, "read too many bytes of headers on sock %d."
+                " somebody should increase the size of scratch_\n", sock_);
+            return false;
+          }
+          if (zero_reads_ > kMaxZeroReads) {
+            LOG(WARNING, "read 0 bytes %d times on sock %d. giving up\n",
+                zero_reads_, sock_);
+            return false;
+          }
+          ret = read(sock_, scratch_ + pos_, sizeof(scratch_) - pos_);
+          if (ret == -1) {
+            LOG(WARNING, "read(%d, %p, %zd) failed with %d: %s\n",
+                sock_, scratch_ + pos_, sizeof(scratch_) - pos_,
+                errno, strerror(errno));
+            return false;
+          }
+          pos_ += ret;
+          // if we just received \r\n\r\n (the end of the headers)
+          if (scratch_[pos_ - 4] == '\r' && scratch_[pos_ - 3] == '\n' &&
+              scratch_[pos_ - 2] == '\r' && scratch_[pos_ - 1] == '\n') {
+            LOG(INFO, "entering state kWriteHeaders"
+                " after %zd bytes of headers read\n", pos_ - 1);
+            pos_ = 0;
+            state_ = State::kWriteHeaders;
+          }
+          scratch_[pos_] = '\0';
+          if (ret == 0) {
+            ++zero_reads_;
+          } else {
+            zero_reads_ = 0;
+            LOG(DEBUG, "read %zd bytes of headers scratch_=%s\n",
+                ret, scratch_);
+          }
+          break;
+        case State::kWriteHeaders:
+          // this intentionally doesn't write the terminating \0 on the string
+          ret = write(sock_, kFirstHeader + pos_, sizeof(kFirstHeader) - pos_);
+          if (ret == -1) {
+            LOG(WARNING, "write(%d, %p, %zd) failed with %d: %s\n",
+                sock_, kFirstHeader + pos_, sizeof(kFirstHeader) - pos_,
+                errno, strerror(errno));
+          } else {
+            pos_ += ret;
+            if (pos_ >= sizeof(kFirstHeader)) {
+              current_ = NULL;
+              state_ = State::kWriteBoundary;
+            }
+          }
+          break;
+        case State::kWriteBoundary:
+          if (current_ == NULL) {
+            timeval timestamp;
+            current_ = buffers_.GetNext(false, &size_, &timestamp, NULL);
+
+            /*static int skip = 0;
+            if (current_ != NULL) skip = (skip + 1) % 30;
+            if (!skip) current_ = NULL;
+            if (current_ == NULL) break;*/
+
+#if 0
+            // set pos_ to where the first header starts
+            for (pos_ = 0; static_cast<const uint8_t *>(current_)[pos_] != 0xFF;
+                 ++pos_);
+#else
+            pos_ = 0;
+#endif
+#if 0
+            // go through the frame looking for the start of frame marker
+            for (dht_start_ = 0;
+                 static_cast<const uint8_t *>(current_)[dht_start_ + 0] !=
+                 0xFF &&
+                 static_cast<const uint8_t *>(current_)[dht_start_ + 1] !=
+                 0xC0 &&
+                 dht_start_ < size_; ++dht_start_)
+              printf("[%zd]=%"PRIx8" ", dht_start_,
+                     static_cast<const uint8_t *>(current_)[dht_start_]);
+            if (dht_start_ >= size_) {
+              LOG(WARNING, "couldn't find start of frame marker\n");
+              return false;
+            }
+#else
+            dht_start_ = 0;
+#endif
+            dht_pos_ = 0;
+
+            // aos.ChannelImageGetter depends on the exact format of this
+            to_write_ = snprintf(scratch_, sizeof(scratch_),
+                                "\r\n--boundarydonotcross\r\n"
+                                "Content-Type: image/jpeg\r\n"
+                                "Content-Length: %"PRId32"\r\n"
+                                "X-Timestamp: %ld.%06ld\r\n"
+                                "\r\n",
+                                size_,
+                                timestamp.tv_sec, timestamp.tv_usec);
+          }
+          ret = write(sock_, scratch_ + pos_, to_write_ - pos_);
+          if (ret == -1) {
+            LOG(WARNING, "write(%d, %p, %zd) failed with %d: %s\n",
+                sock_, scratch_ + pos_, to_write_ - pos_,
+                errno, strerror(errno));
+            return false;
+          } else {
+            pos_ += ret;
+            if (static_cast<ssize_t>(pos_) >= to_write_) {
+              pos_ = 0;
+              state_ = State::kWriteImage;
+            }
+          }
+          break;
+        case State::kWriteImage:
+          ret = write(sock_, static_cast<const char *>(current_) + pos_,
+                      ((dht_start_ == 0) ? size_ : dht_start_) - pos_);
+          if (ret == -1) {
+            LOG(WARNING, "write(%d, %p, %zd) failed with %d: %s\n",
+                sock_, static_cast<const char *>(current_) + pos_,
+                ((dht_start_ == 0) ? size_ : dht_start_) - pos_,
+                errno, strerror(errno));
+            return false;
+          } else {
+            pos_ += ret;
+            if (dht_start_ == 0) {
+              if (pos_ >= size_) {
+                buffers_.Release();
+                current_ = NULL;
+                state_ = State::kWriteBoundary;
+              }
+            } else {
+              if (pos_ >= dht_start_) {
+                dht_start_ = 0;
+                state_ = State::kWriteDHT;
+              }
+            }
+          }
+          break;
+        case State::kWriteDHT:
+          ret = write(sock_, dht_table + dht_pos_,
+                      sizeof(dht_table) - dht_pos_);
+          if (ret == -1) {
+            LOG(WARNING, "write(%d, %p, %zd) failed with %d: %s\n",
+                sock_, dht_table + dht_pos_, sizeof(dht_table) - dht_pos_,
+                errno, strerror(errno));
+            return false;
+          } else {
+            dht_pos_ += ret;
+            if (dht_pos_ >= sizeof(dht_table)) {
+              state_ = State::kWriteImage;
+            }
+          }
+          break;
+        default:
+          LOG(FATAL, "something weird happened\n");
+      }
+      return true;
+    }
+  };
+
+  const int bind_socket_;
+
+ public:
+  HTTPStreamer() : bind_socket_(socket(AF_INET, SOCK_STREAM, 0)) {
+    if (bind_socket_ < 0) {
+      LOG(FATAL, "socket(AF_INET, SOCK_STREAM, 0) failed with %d: %s\n",
+          errno, strerror(errno));
+    }
+
+    union {
+      sockaddr_in in;
+      sockaddr addr;
+    } bind_sockaddr;
+    memset(&bind_sockaddr, 0, sizeof(bind_sockaddr));
+    bind_sockaddr.in.sin_family = AF_INET;
+    bind_sockaddr.in.sin_port =
+        htons(static_cast<uint16_t>(aos::NetworkPort::kCameraStreamer));
+    bind_sockaddr.in.sin_addr.s_addr = htonl(INADDR_ANY);
+    int optval = 1;
+    setsockopt(bind_socket_, SOL_SOCKET, SO_REUSEADDR, &optval, sizeof(optval));
+    if (bind(bind_socket_, &bind_sockaddr.addr,
+             sizeof(bind_sockaddr.addr)) == -1) {
+      LOG(FATAL, "bind(%d, %p) failed because of %d: %s\n",
+          bind_socket_, &bind_sockaddr.addr, errno, strerror(errno));
+    }
+
+    if (listen(bind_socket_, 10) == -1) {
+      LOG(FATAL, "listen(%d, 10) failed because of %d: %s\n", bind_socket_,
+          errno, strerror(errno));
+    }
+    const int flags = fcntl(bind_socket_, F_GETFL, 0);
+    if (flags == -1) {
+      LOG(FATAL, "fcntl(%d, F_GETFL, 0) failed because of %d: %s\n",
+          bind_socket_, errno, strerror(errno));
+    }
+    if (fcntl(bind_socket_, F_SETFL, flags | O_NONBLOCK) == -1) {
+      LOG(FATAL, "fcntl(%d, F_SETFL, %x) failed because of %d: %s\n",
+          bind_socket_, flags | O_NONBLOCK, errno, strerror(errno));
+    }
+  }
+  void Run() {
+    signal(SIGPIPE, SIG_IGN);
+
+    std::vector<Client *> clients;
+    fd_set read_fds, write_fds;
+    while (true) {
+      FD_ZERO(&read_fds);
+      FD_ZERO(&write_fds);
+      FD_SET(bind_socket_, &read_fds);
+      for (auto it = clients.begin(); it != clients.end(); ++it) {
+        (*it)->FDSet(&read_fds, &write_fds);
+      }
+      switch (select(FD_SETSIZE, &read_fds, &write_fds,
+                     NULL,  // err
+                     NULL)) {  // timeout
+        case -1:
+          LOG(ERROR, "select(FD_SETSIZE(=%d), %p, %p, NULL, NULL) failed"
+              " because of %d: %s\n", FD_SETSIZE, &read_fds, &write_fds,
+              errno, strerror(errno));
+          continue;
+        case 0:
+          LOG(ERROR, "select with NULL timeout timed out...\n");
+          continue;
+      }
+
+      if (FD_ISSET(bind_socket_, &read_fds)) {
+        const int sock = accept4(bind_socket_, NULL, NULL, SOCK_NONBLOCK);
+        if (sock == -1) {
+          LOG(ERROR, "accept4(%d, NULL, NULL, SOCK_NONBLOCK(=%d) failed"
+              " because of %d: %s\n",
+              bind_socket_, SOCK_NONBLOCK, errno, strerror(errno));
+        } else {
+          clients.push_back(new Client(sock));
+        }
+      }
+
+      std::vector<std::vector<Client *>::iterator> to_remove;
+      for (auto it = clients.begin(); it != clients.end(); ++it) {
+        if (!(*it)->Process(&read_fds, &write_fds)) {
+          to_remove.push_back(it);
+          delete *it;
+        }
+      }
+      for (auto it = to_remove.rbegin(); it != to_remove.rend(); ++it) {
+        LOG(INFO, "removing client\n");
+        clients.erase(*it);
+      }
+    }
+  }
+};
+
+}  // namespace camera
+}  // namespace aos
+
+AOS_RUN_NRT(aos::camera::HTTPStreamer)
diff --git a/aos/atom_code/camera/Reader.cpp b/aos/atom_code/camera/Reader.cpp
new file mode 100644
index 0000000..9cfee2a
--- /dev/null
+++ b/aos/atom_code/camera/Reader.cpp
@@ -0,0 +1,362 @@
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <fcntl.h>
+#include <unistd.h>
+#include <errno.h>
+#include <malloc.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <sys/time.h>
+#include <sys/mman.h>
+
+#include <string>
+#include <inttypes.h>
+
+#include "aos/aos_core.h"
+#include "aos/atom_code/camera/V4L2.h"
+#include "aos/atom_code/camera/Buffers.h"
+
+#define CLEAR(x) memset(&(x), 0, sizeof(x))
+
+namespace aos {
+namespace camera {
+
+class Reader {
+  static const char *const dev_name;
+
+  // of the camera
+  int fd_;
+  // the bound socket listening for fd requests
+  int server_fd_;
+
+  static const aos_type_sig kRecycleSignature;
+  aos_queue *queue_, *recycle_queue_;
+  // the number of buffers currently queued in v4l2
+  uint32_t queued_;
+ public:
+  Reader() {
+    struct stat st; 
+    if (stat(dev_name, &st) == -1) {
+      LOG(FATAL, "Cannot identify '%s' because of %d: %s\n",
+              dev_name, errno, strerror(errno));
+    }
+    if (!S_ISCHR(st.st_mode)) {
+      LOG(FATAL, "%s is no device\n", dev_name);
+    }
+
+    fd_ = open(dev_name, O_RDWR /* required */ | O_NONBLOCK, 0);
+    if (fd_ == -1) {
+      LOG(FATAL, "Cannot open '%s' because of %d: %s\n",
+              dev_name, errno, strerror(errno));
+    }
+
+    queue_ = aos_fetch_queue_recycle(Buffers::kQueueName.c_str(), &Buffers::kSignature,
+                                     &kRecycleSignature, &recycle_queue_);
+    // read off any existing recycled messages
+    while (aos_queue_read_msg(recycle_queue_, NON_BLOCK) != NULL);
+    queued_ = 0;
+
+    InitServer();
+    Init();
+  }
+ private:
+  void InitServer() {
+    if (unlink(Buffers::kFDServerName.c_str()) == -1 && errno != ENOENT) {
+      LOG(WARNING, "unlink(kFDServerName(='%s')) failed with %d: %s\n",
+          Buffers::kFDServerName.c_str(), errno, strerror(errno));
+    }
+    if ((server_fd_ = Buffers::CreateSocket(bind)) == -1) {
+      LOG(FATAL, "creating the IPC socket failed\n");
+    }
+    if (listen(server_fd_, 10) == -1) {
+      LOG(FATAL, "listen(%d, 10) failed with %d: %s\n",
+          server_fd_, errno, strerror(errno));
+    }
+  }
+  void SendFD(const int sock) {
+    int myfds[Buffers::kNumFDs]; /* Contains the file descriptors to pass. */
+    myfds[0] = fd_;
+    char buf[CMSG_SPACE(sizeof(myfds))];  /* ancillary data buffer */
+
+    iovec data;
+    memset(&data, 0, sizeof(data));
+    char dummy = 'A';
+    data.iov_base = &dummy;
+    data.iov_len = sizeof(dummy);
+    msghdr msg;
+    memset(&msg, 0, sizeof(msg));
+    msg.msg_iov = &data;
+    msg.msg_iovlen = 1;
+    msg.msg_control = buf;
+    msg.msg_controllen = sizeof(buf);
+    cmsghdr *const cmsg = CMSG_FIRSTHDR(&msg);
+    cmsg->cmsg_level = SOL_SOCKET;
+    cmsg->cmsg_type = SCM_RIGHTS;
+    cmsg->cmsg_len = CMSG_LEN(sizeof(myfds));
+    /* Initialize the payload: */
+    memcpy(CMSG_DATA(cmsg), myfds, sizeof(myfds));
+    if (sendmsg(sock, &msg, 0) == -1) {
+      LOG(ERROR, "sendmsg(%d, %p, 0) failed with %d: %s\n",
+          sock, &msg, errno, strerror(errno));
+    }
+    // leave it open so that the other end can tell if this process dies
+  }
+
+#if 0
+  // if we ever do want to do any of these things, this is how
+  void Stop() {
+    const v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    if (xioctl(fd_, VIDIOC_STREAMOFF, &type) == -1) {
+      errno_exit("VIDIOC_STREAMOFF");
+    }
+  }
+  void Close() {
+    if (close(fd_) == -1)
+      errno_exit("close");
+    fd_ = -1;
+  }
+#endif
+
+  void QueueBuffer(v4l2_buffer *buf) {
+    if (xioctl(fd_, VIDIOC_QBUF, buf) == -1) {
+      LOG(WARNING, "ioctl VIDIOC_QBUF(%d, %p) failed with %d: %s. losing buf #%"PRIu32"\n",
+          fd_, &buf, errno, strerror(errno), buf->index);
+    } else {
+      LOG(DEBUG, "put buf #%"PRIu32" into driver's queue\n", buf->index);
+      ++queued_;
+    }
+  }
+  void ReadFrame() {
+    v4l2_buffer buf;
+    CLEAR(buf);
+    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    buf.memory = V4L2_MEMORY_MMAP;
+    
+    const Buffers::Message *read;
+    do {
+      read = static_cast<const Buffers::Message *>(
+          // we block waiting for one if we can't dequeue one without leaving
+          // the driver <= 2 (to be safe)
+          aos_queue_read_msg(recycle_queue_, (queued_ <= 2) ? BLOCK : NON_BLOCK));
+      if (read != NULL) {
+        buf.index = read->index;
+        aos_queue_free_msg(recycle_queue_, read);
+        QueueBuffer(&buf);
+      }
+    } while (read != NULL);
+
+    if (xioctl(fd_, VIDIOC_DQBUF, &buf) == -1) {
+      if (errno != EAGAIN) {
+        LOG(ERROR, "ioctl VIDIOC_DQBUF(%d, %p) failed with %d: %s\n",
+            fd_, &buf, errno, strerror(errno));
+      }
+      return;
+    }
+    --queued_;
+    if (buf.index >= Buffers::kNumBuffers) {
+      LOG(ERROR, "buf.index (%"PRIu32") is >= kNumBuffers (%u)\n",
+          buf.index, Buffers::kNumBuffers);
+      return;
+    }
+
+    Buffers::Message *const msg = static_cast<Buffers::Message *>(
+        aos_queue_get_msg(queue_));
+    if (msg == NULL) {
+      LOG(WARNING, "couldn't get a message to send buf #%"PRIu32" from queue %p."
+          " re-queueing now\n", buf.index, queue_);
+      QueueBuffer(&buf);
+      return;
+    }
+    msg->index = buf.index;
+    msg->bytesused = buf.bytesused;
+    memcpy(&msg->timestamp, &buf.timestamp, sizeof(msg->timestamp));
+    msg->sequence = buf.sequence;
+    if (aos_queue_write_msg_free(queue_, msg, OVERRIDE) == -1) {
+      LOG(WARNING, "sending message %p with buf #%"PRIu32" to queue %p failed."
+          " re-queueing now\n", msg, buf.index, queue_);
+      QueueBuffer(&buf);
+      return;
+    } else {
+      LOG(DEBUG, "sent message off to queue %p with buffer #%"PRIu32"\n",
+          queue_, buf.index);
+    }
+  }
+
+  void init_mmap() {
+    v4l2_requestbuffers req;
+    CLEAR(req);
+    req.count = Buffers::kNumBuffers;
+    req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    req.memory = V4L2_MEMORY_MMAP;
+    if (xioctl(fd_, VIDIOC_REQBUFS, &req) == -1) {
+      if (EINVAL == errno) {
+        LOG(FATAL, "%s does not support memory mapping\n", dev_name);
+      } else {
+        LOG(FATAL, "ioctl VIDIOC_REQBUFS(%d, %p) failed with %d: %s\n",
+            fd_, &req, errno, strerror(errno));
+      }
+    }
+    queued_ = Buffers::kNumBuffers;
+    if (req.count < Buffers::kNumBuffers) {
+      LOG(FATAL, "Insufficient buffer memory on %s\n", dev_name);
+    }
+  }
+
+  void Init() {
+    v4l2_capability cap;
+    if (xioctl(fd_, VIDIOC_QUERYCAP, &cap) == -1) {
+      if (EINVAL == errno) {
+        LOG(FATAL, "%s is no V4L2 device\n",
+                dev_name);
+      } else {
+        LOG(FATAL, "ioctl VIDIOC_QUERYCAP(%d, %p) failed with %d: %s\n",
+            fd_, &cap, errno, strerror(errno));
+      }
+    }
+    if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
+      LOG(FATAL, "%s is no video capture device\n",
+              dev_name);
+    }
+    if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
+      LOG(FATAL, "%s does not support streaming i/o\n",
+              dev_name);
+    }
+
+    /* Select video input, video standard and tune here. */
+
+    v4l2_cropcap cropcap;
+    CLEAR(cropcap);
+    cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    if (xioctl(fd_, VIDIOC_CROPCAP, &cropcap) == 0) {
+      v4l2_crop crop;
+      crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+      crop.c = cropcap.defrect; /* reset to default */
+
+      if (xioctl(fd_, VIDIOC_S_CROP, &crop) == -1) {
+        switch (errno) {
+          case EINVAL:
+            /* Cropping not supported. */
+            break;
+          default:
+            /* Errors ignored. */
+            break;
+        }
+      }
+    } else {        
+      /* Errors ignored. */
+    }
+
+    v4l2_format fmt;
+    CLEAR(fmt);
+    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    fmt.fmt.pix.width = Buffers::kWidth; 
+    fmt.fmt.pix.height = Buffers::kHeight;
+    fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG;
+    fmt.fmt.pix.field = V4L2_FIELD_ANY;
+    //fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
+    //fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
+    if (xioctl(fd_, VIDIOC_S_FMT, &fmt) == -1) {
+      LOG(FATAL, "ioctl VIDIC_S_FMT(%d, %p) failed with %d: %s\n",
+          fd_, &fmt, errno, strerror(errno));
+    }
+    /* Note VIDIOC_S_FMT may change width and height. */
+
+    /* Buggy driver paranoia. */
+    unsigned int min = fmt.fmt.pix.width * 2;
+    if (fmt.fmt.pix.bytesperline < min)
+      fmt.fmt.pix.bytesperline = min;
+    min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height;
+    if (fmt.fmt.pix.sizeimage < min)
+      fmt.fmt.pix.sizeimage = min;
+
+#if 0
+    // set framerate
+    struct v4l2_streamparm *setfps;
+    setfps = (struct v4l2_streamparm *) calloc(1, sizeof(struct v4l2_streamparm));
+    memset(setfps, 0, sizeof(struct v4l2_streamparm));
+    setfps->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    setfps->parm.capture.timeperframe.numerator = 1;
+    setfps->parm.capture.timeperframe.denominator = 20;
+    if (xioctl(fd_, VIDIOC_S_PARM, setfps) == -1) {
+      LOG(ERROR, "ioctl VIDIOC_S_PARM(%d, %p) failed with %d: %s\n",
+          fd_, setfps, errno, strerror(errno));
+      exit(EXIT_FAILURE);
+    }
+    LOG(INFO, "framerate ended up at %d/%d\n",
+        setfps->parm.capture.timeperframe.numerator,
+        setfps->parm.capture.timeperframe.denominator);
+#endif
+
+    init_mmap();
+  }
+
+  void Start() {
+    LOG(DEBUG, "queueing buffers for the first time\n");
+    v4l2_buffer buf;
+    for (unsigned int i = 0; i < Buffers::kNumBuffers; ++i) {
+      CLEAR(buf);
+      buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+      buf.memory = V4L2_MEMORY_MMAP;
+      buf.index = i;
+      QueueBuffer(&buf);
+    }
+    LOG(DEBUG, "done with first queue\n");
+
+    v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    if (xioctl(fd_, VIDIOC_STREAMON, &type) == -1) {
+      LOG(FATAL, "ioctl VIDIOC_STREAMON(%d, %p) failed with %d: %s\n",
+          fd_, &type, errno, strerror(errno));
+    }
+  }
+
+ public:
+  void Run() {
+    Start();
+
+    fd_set fds;
+    timeval tv;
+    while (true) {
+      // HAVE TO DO THIS EACH TIME THROUGH THE LOOP
+      tv.tv_sec = 2;
+      tv.tv_usec = 0;
+
+      FD_ZERO(&fds);
+      FD_SET(fd_, &fds);
+      FD_SET(server_fd_, &fds);
+      switch (select(std::max(fd_, server_fd_) + 1, &fds, NULL, NULL, &tv)) {
+        case -1:
+          if (errno != EINTR) {
+            LOG(ERROR, "select(%d, %p, NULL, NULL, %p) failed with %d: %s\n",
+                std::max(fd_, server_fd_) + 1, &fds, &tv, errno, strerror(errno));
+          }
+          continue;
+        case 0:
+          LOG(WARNING, "select timed out\n");
+          continue;
+      }
+
+      if (FD_ISSET(fd_, &fds)) {
+        ReadFrame();
+      }
+      if (FD_ISSET(server_fd_, &fds)) {
+        const int sock = accept4(server_fd_, NULL, NULL, SOCK_NONBLOCK);
+        if (sock == -1) {
+          LOG(ERROR, "accept4(%d, NULL, NULL, SOCK_NONBLOCK(=%d) failed with %d: %s\n",
+              server_fd_, SOCK_NONBLOCK, errno, strerror(errno));
+        } else {
+          SendFD(sock);
+        }
+      }
+    }
+  }
+};
+const char *const Reader::dev_name = "/dev/video0";
+const aos_type_sig Reader::kRecycleSignature{
+  sizeof(Buffers::Message), 1, Buffers::kNumBuffers};
+
+} // namespace camera
+} // namespace aos
+
+AOS_RUN_NRT(aos::camera::Reader)
+
diff --git a/aos/atom_code/camera/V4L2.h b/aos/atom_code/camera/V4L2.h
new file mode 100644
index 0000000..bbafe3e
--- /dev/null
+++ b/aos/atom_code/camera/V4L2.h
@@ -0,0 +1,27 @@
+#ifndef AOS_ATOM_CODE_CAMREA_V4L2_H_
+#define AOS_ATOM_CODE_CAMREA_V4L2_H_
+
+// This file handles including everything needed to use V4L2 and has some
+// utility functions.
+
+#include <sys/ioctl.h>
+
+#include <asm/types.h>          /* for videodev2.h */
+#include <linux/videodev2.h>
+
+namespace aos {
+namespace camera {
+
+static inline int xioctl(int fd, int request, void *arg) {
+  int r;
+  do {
+    r = ioctl(fd, request, reinterpret_cast<uintptr_t>(arg));
+  } while (r == -1 && errno == EINTR);
+  return r;
+}
+
+} // namespace camera
+} // namespace aos
+
+#endif
+
diff --git a/aos/atom_code/camera/aos.jar_manifest b/aos/atom_code/camera/aos.jar_manifest
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/aos/atom_code/camera/aos.jar_manifest
@@ -0,0 +1 @@
+
diff --git a/aos/atom_code/camera/camera.gyp b/aos/atom_code/camera/camera.gyp
new file mode 100644
index 0000000..e94a6ac
--- /dev/null
+++ b/aos/atom_code/camera/camera.gyp
@@ -0,0 +1,66 @@
+{
+  'targets': [
+    {
+      'target_name': 'aos_camera',
+      'type': 'loadable_module',
+      'sources': [
+        'jni.cpp',
+      ],
+      'dependencies': [
+        '<(AOS)/build/aos.gyp:aos_shared_lib',
+        '<(AOS)/common/network/network.gyp:socket_so',
+        '<(AOS)/common/common.gyp:timing_so',
+        '<(AOS)/atom_code/messages/messages.gyp:messages_so',
+        'private_aos_camera_jar',
+        '<(EXTERNALS):libjpeg',
+      ],
+      'export_dependent_settings': [
+        '<(AOS)/build/aos.gyp:aos_shared_lib',
+        '<(AOS)/common/network/network.gyp:socket_so',
+        '<(AOS)/common/common.gyp:timing_so',
+        '<(AOS)/atom_code/messages/messages.gyp:messages_so',
+        'private_aos_camera_jar',
+      ],
+    },
+    {
+      'target_name': 'private_aos_camera_jar',
+      'dependencies': [
+        '<(EXTERNALS):javacv',
+      ],
+      'variables': {
+        'srcdirs': ['java'],
+        'gen_headers': ['aos.Natives'],
+      },
+      'export_dependent_settings': [
+        '<(EXTERNALS):javacv',
+      ],
+      'direct_dependent_settings': {
+        'variables': {
+          'jni_libs': ['aos_camera'],
+        },
+      },
+      'includes': ['../../build/java.gypi'],
+      'hard_dependency': 1,
+    },
+    {
+      'target_name': 'CameraHTTPStreamer',
+      'type': 'executable',
+      'sources': [
+        'HTTPStreamer.cpp',
+      ],
+      'dependencies': [
+        '<(AOS)/build/aos.gyp:libaos',
+      ],
+    },
+    {
+      'target_name': 'CameraReader',
+      'type': 'executable',
+      'sources': [
+        'Reader.cpp',
+      ],
+      'dependencies': [
+        '<(AOS)/build/aos.gyp:libaos',
+      ],
+    },
+  ],
+}
diff --git a/aos/atom_code/camera/java/aos/CameraProcessor.java b/aos/atom_code/camera/java/aos/CameraProcessor.java
new file mode 100644
index 0000000..4f6c68d
--- /dev/null
+++ b/aos/atom_code/camera/java/aos/CameraProcessor.java
@@ -0,0 +1,67 @@
+package aos;
+
+import java.io.IOException;
+import java.net.Inet4Address;
+import java.net.InetSocketAddress;
+import java.net.UnknownHostException;
+import java.nio.ByteBuffer;
+import java.nio.channels.ReadableByteChannel;
+import java.nio.channels.SocketChannel;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import com.googlecode.javacv.cpp.opencv_core;
+
+/**
+ * Makes implementing code that processes frames from a camera easy.
+ */
+public abstract class CameraProcessor {
+	private static final Logger LOG = Logger.getLogger(CameraProcessor.class.getName());
+	protected final ImageGetter getter;
+	protected final ServableImage start = new ServableImage(ImageGetter.width, ImageGetter.height, opencv_core.IPL_DEPTH_8U, 3);
+	
+	/**
+	 * Parses any arguments it recognizes out of {@code args} and initializes stuff appropriately.
+	 * This includes using {@link QueueLogHandler} for all exceptions and {@link Thread#setDefaultUncaughtExceptionHandler}ing.
+	 * @param args from {@code main}
+	 */
+	protected CameraProcessor(String[] args) throws UnknownHostException, IOException {
+		QueueLogHandler.UseForAll();
+		ReadableByteChannel channel = null;
+		for (int i = 0; i < args.length; ++i) {
+			final String c = args[i];
+			if (c.equals("--host")) {
+				String host = args[++i];
+				final SocketChannel socketChannel = SocketChannel.open(new InetSocketAddress(Inet4Address.getByName(host), 9714));
+				socketChannel.write(ByteBuffer.wrap(new byte[] {'\r', '\n', '\r', '\n'})); // get it past the read headers stage
+				channel = socketChannel;
+			} else {
+				System.err.println("CameraProcessor: warning: unrecognized argument '" + c + "'. ignoring");
+			}
+		}
+		
+		if (channel != null) {
+			getter = new ChannelImageGetter(channel);
+		} else {
+			System.out.println("creating QueueImageGetter");
+			getter = new QueueImageGetter();
+			System.out.println("done");
+		}
+
+		LOG.log(Level.INFO, "CameraProcessor is up");
+		System.err.println("CameraProcessor is up (on stderr)");
+	}
+	
+	protected abstract void RunIteration();
+	
+	protected void Run() {
+		while (true) {
+			if (!getter.get(start.getImage())) {
+				LOG.log(Level.WARNING, "getting image failed");
+				continue;
+			}
+			RunIteration();
+			start.releaseImage();
+		}
+	}
+}
diff --git a/aos/atom_code/camera/java/aos/ChannelImageGetter.java b/aos/atom_code/camera/java/aos/ChannelImageGetter.java
new file mode 100644
index 0000000..511b55b
--- /dev/null
+++ b/aos/atom_code/camera/java/aos/ChannelImageGetter.java
@@ -0,0 +1,158 @@
+package aos;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.channels.ReadableByteChannel;
+import java.nio.channels.SelectableChannel;
+import java.nio.channels.SelectionKey;
+import java.nio.channels.Selector;
+import java.util.Map;
+import java.util.TreeMap;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+/**
+ * Retrieves images from a {@link InputChannel}. Expects the images in mjpg form.
+ * For now, only accepts streams formatted pretty closely to how aos::camera::HTTPStreamer does it.
+ */
+public class ChannelImageGetter extends JPEGImageGetter {
+	/**
+	 * What to multiply each length by when it needs to allocate a larger buffer to fit an image.
+	 */
+	private static final double extraLength = 1.2;
+	
+	private static final Logger LOG = Logger.getLogger(ChannelImageGetter.class
+			.getName());
+	private final ReadableByteChannel channel;
+	private final Selector selector = Selector.open();
+	private String separator = "--boundarydonotcross\r\n";
+	private ByteBuffer current;
+	private final ByteBuffer headerBuffer = ByteBuffer.allocateDirect(30);
+	private final Map<String, String> headers = new TreeMap<String, String>(String.CASE_INSENSITIVE_ORDER);
+	
+	public ChannelImageGetter(ReadableByteChannel channel) throws IOException {
+		this.channel = channel;
+		if (channel instanceof SelectableChannel) {
+			((SelectableChannel)channel).configureBlocking(false);
+		}
+	}
+
+	@Override
+	public ByteBuffer getJPEG() {
+		try {
+			if (!parseHeaders()) {
+				return null;
+			}
+			LOG.log(Level.FINE, "parsed headers " + headers.toString());
+			try {
+				final int length = Integer.parseInt(headers.get("Content-Length"));
+				if (current == null || current.capacity() < length) {
+					LOG.log(Level.INFO, "allocating a new direct buffer of length " + length * extraLength);
+					current = ByteBuffer.allocateDirect((int) (length * extraLength));
+				} else {
+					current.rewind();
+					current.limit(length);
+				}
+			} catch (NumberFormatException e) {
+				LOG.log(Level.WARNING, "couldn't parse '" + headers.get("Content-Length") + "' as a number");
+				return null;
+			}
+			current.put(headerBuffer); // copy out any of the image that got buffered with the headers
+			while (current.hasRemaining()) {
+				channel.read(current);
+			}
+			current.flip();
+		} catch (IOException e) {
+			LOG.log(Level.WARNING, "reading the headers and/or image failed", e);
+			return null;
+		}
+		return current;
+	}
+	// returns success
+	private boolean parseHeaders() throws IOException {
+		// Reads chunks into headerBuffer and parses out headers.
+		// Looks for separator first.
+		
+		headerBuffer.clear();
+		headers.clear();
+		final byte[] separatorBytes = separator.getBytes();
+		int separatorIndex = 0; // how much of the separator has been matched
+		while (headerBuffer.hasRemaining() || headerBuffer.limit() < headerBuffer.capacity()) {
+			if (channel instanceof SelectableChannel) {
+				((SelectableChannel)channel).register(selector, SelectionKey.OP_READ);
+				selector.select();
+			}
+			headerBuffer.limit(headerBuffer.capacity());
+			channel.read(headerBuffer);
+			headerBuffer.flip();
+			if (separatorIndex < separatorBytes.length) {
+				// make sure we don't get part of the way through
+				while (headerBuffer.remaining() >= (separatorBytes.length - separatorIndex)) {
+					final byte c = headerBuffer.get();
+					if (separatorBytes[separatorIndex++] != c) {
+						separatorIndex = 0;
+					}
+					if (separatorIndex == separatorBytes.length) {
+						break;
+					}
+				}
+				headerBuffer.compact();
+			} else {
+				int keyEnd = 0, valueStart = 0;
+				boolean foundEndR = false; // found the end \r
+				while (headerBuffer.hasRemaining()) {
+					final byte c = headerBuffer.get();
+					if (foundEndR) {
+						if (c != '\n') {
+							LOG.log(Level.WARNING, "found \r\n\r but no \n afterwards");
+						} else {
+							return true;
+						}
+					} else if (keyEnd == 0) {
+						if (c == ':') {
+							keyEnd = headerBuffer.position() - 1;
+						} else if (c == '\r') {
+							foundEndR = true;
+						}
+					} else if (valueStart == 0) {
+						if (c != ' ') {
+							valueStart = headerBuffer.position() - 1;
+						}
+					} else {
+						if (c == '\r') {
+							final int valueEnd = headerBuffer.position();
+							final byte[] key = new byte[keyEnd];
+							headerBuffer.position(0);
+							headerBuffer.get(key);
+							final byte[] value = new byte[valueEnd - valueStart - 1];
+							headerBuffer.position(valueStart);
+							headerBuffer.get(value);
+							headers.put(new String(key), new String(value));
+							
+							headerBuffer.get(); // get the \r
+							headerBuffer.get(); // get the \n
+							
+							headerBuffer.compact();
+							headerBuffer.flip();
+							
+							keyEnd = valueStart = 0;
+						}
+					}
+				}
+			}
+		}
+		// if we got here, then it doesn't have space left and we haven't finished
+		LOG.log(Level.WARNING, "got a header that was too long. headerBuffer should be made bigger");
+		return false;
+	}
+
+	@Override
+	public double getTimestamp() {
+		if (headers.containsKey("X-Timestamp")) {
+			return Double.parseDouble(headers.get("X-Timestamp"));
+		} else {
+			throw new UnsupportedOperationException("source stream doesn't have X-Timestamp headers");
+		}
+	}
+
+}
diff --git a/aos/atom_code/camera/java/aos/DebugServer.java b/aos/atom_code/camera/java/aos/DebugServer.java
new file mode 100644
index 0000000..398cb11
--- /dev/null
+++ b/aos/atom_code/camera/java/aos/DebugServer.java
@@ -0,0 +1,357 @@
+package aos;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.channels.ClosedChannelException;
+import java.nio.channels.SelectionKey;
+import java.nio.channels.Selector;
+import java.nio.channels.ServerSocketChannel;
+import java.nio.channels.SocketChannel;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import com.googlecode.javacv.cpp.opencv_core.IplImage;
+
+/**
+ * A server that serves {@link ServableImage}s.
+ */
+public class DebugServer {
+	private static final String initialHeaderString = "HTTP/1.0 200 OK\r\n"
+			+ "Connection: close\r\n"
+			+ "Server: AOS/0.0 Vision Code Debug\r\n"
+			+ "Cache-Control: no-store, no-cache, must-revalidate, pre-check=0, post-check=0, max-age=0\r\n"
+			+ "Pragma: no-cache\r\n"
+			+ "Expires: Mon, 3 Jan 2000 12:34:56 GMT\r\n"
+			+ "Content-Type: multipart/x-mixed-replace; boundary=boundarydonotcross\r\n";
+	private static final String intermediateHeaderFormat = "\r\n--boundarydonotcross\r\n"
+			+ "Content-Type: image/bmp\r\n"
+			+ "Content-Length: %d\r\n"
+			+ "X-Timestamp: %f\r\n"
+			+ "\r\n";
+	private static final ByteBuffer initialHeader;
+	private static final Pattern headerPattern;
+	static {
+		initialHeader = ByteBuffer.wrap(initialHeaderString.getBytes())
+				.asReadOnlyBuffer();
+		headerPattern = Pattern.compile("^GET ([^? ]+)(?:\\?i=(\\S*))? HTTP/.*\r\n.*$",
+				Pattern.DOTALL);
+	}
+
+	private static final Logger LOG = Logger.getLogger(DebugServer.class
+			.getName());
+	private final ServerSocketChannel server;
+	private final Collection<Client> clients = new ArrayList<Client>();
+	private final Map<String, ServableImage> images = new HashMap<String, ServableImage>();
+	private final Map<String, Palette> palettes = new HashMap<String, Palette>();
+	private double timestamp;
+	
+	public static class Palette {
+		private final ByteArrayOutputStream out = new ByteArrayOutputStream();
+		private int entries = 0;
+
+		/**
+		 * Adds a new color. All 4 arguments are unsigned bytes.
+		 * @param r red
+		 * @param g green
+		 * @param b blue
+		 * @param a alpha (doesn't seem to work)
+		 * @return this
+		 */
+		public Palette add(int r, int g, int b, int a) {
+			out.write(b);
+			out.write(g);
+			out.write(r);
+			out.write(a);
+			++entries;
+			return this;
+		}
+		
+		private int entries() {
+			return entries;
+		}
+		private int bytes() {
+			return entries * 4;
+		}
+		private void writeTo(ByteBuffer buffer) {
+			buffer.put(out.toByteArray());
+		}
+	}
+
+	private class Client {
+		private final int bmpHeaderSize;
+		private int bmpType;
+		private Palette palette;
+
+		private final SocketChannel channel;
+		private ServableImage image = null;
+		private IplImage img;
+		private int index;
+		private final ByteBuffer[] buffers;
+		private final ByteBuffer initial = initialHeader.duplicate();
+		private final ByteBuffer read = ByteBuffer.allocate(1024);
+
+		public Client(SocketChannel channel) throws IOException {
+			this.channel = channel;
+			channel.configureBlocking(false);
+
+			if (bmpType == 3) {
+				bmpHeaderSize = 122;
+			} else if (bmpType == 0) {
+				bmpHeaderSize = 54;
+			} else {
+				throw new AssertionError("unknown bmpType value " + bmpType);
+			}
+			// [0] gets filled in by createBmpHeader which writes the header into [1]
+			// [2] gets set to the image buffer
+			buffers = new ByteBuffer[] { null, ByteBuffer.allocate(2048), null };
+		}
+
+		public void register(Selector selector) throws ClosedChannelException {
+			channel.register(
+					selector,
+					(image == null) ? SelectionKey.OP_READ
+							: SelectionKey.OP_WRITE, this);
+		}
+
+		public void close() {
+			if (image != null) {
+				image.setDebugging(false);
+			}
+			if (channel != null) {
+				try {
+					channel.close();
+				} catch (IOException e) {
+					LOG.log(Level.WARNING,
+							"encountered error when closing channel", e);
+				}
+			}
+		}
+
+		private void createBmpHeader(ByteBuffer buffer) {
+			// <http://en.wikipedia.org/wiki/BMP_file_format#File_structure>
+			// explains what these all are
+			// signed/unsigned numbers don't matter because they'd better not be
+			// that big
+			final int paletteSize = (palette == null) ? 0 : palette.bytes();
+			buffers[0] = ByteBuffer.wrap(String.format(intermediateHeaderFormat,
+					bmpHeaderSize + paletteSize + image.imageSize(), timestamp).getBytes());
+			buffer.order(ByteOrder.LITTLE_ENDIAN);
+			buffer.put((byte) 'B').put((byte) 'M');
+			buffer.putInt(bmpHeaderSize + paletteSize + image.imageSize());
+			buffer.putInt(0); // skip ahead 4 bytes
+			buffer.putInt(bmpHeaderSize + paletteSize); // offset to start of image data
+			buffer.putInt(bmpHeaderSize - 14); // size of the rest of the header
+			// BMPs expect image data bottom to top, so -height to fix that
+			buffer.putInt(ImageGetter.width).putInt(-ImageGetter.height);
+			buffer.putShort((short) 1).putShort(image.bitsPerPixel());
+			buffer.putInt(bmpType);
+			buffer.putInt(image.imageSize()); // number of bytes in the actual
+												// image
+			buffer.putInt(2835).putInt(2835); // physical resolution; don't
+												// think it matters
+			buffer.putInt((palette == null) ? 0 : palette.entries());
+			buffer.putInt(0); // # of important colors (0 means all)
+			if (palette != null) {
+				palette.writeTo(buffer);
+			}
+			final int expected;
+			if (bmpType == 0) { // BI_RGB
+				expected = bmpHeaderSize + paletteSize;
+			} else if (bmpType == 3) { // BI_BITFIELDS
+				buffer.putInt(0x0000FF00).putInt(0x00FF0000).putInt(0xFF000000)
+						.putInt(0); // RGBA bitmasks
+				buffer.putInt(0x57696E20); // LCS_WINDOWS_COLOR_SPACE
+				expected = bmpHeaderSize - 48;
+			} else {
+				throw new AssertionError("unknown bmpType value " + bmpType);
+			}
+			if (buffer.position() != expected) {
+				throw new AssertionError(
+						"header ended up the wrong size. expected "
+								+ expected + " but got "
+								+ buffer.position());
+			}
+			buffer.limit(bmpHeaderSize + paletteSize);
+			buffer.rewind();
+		}
+
+		/**
+		 * Does anything that this one can right now.
+		 * 
+		 * @return whether or not to {@link #close()} and remove this one
+		 */
+		public boolean run() throws IOException {
+			if (image == null) {
+				final int bytesRead = channel.read(read);
+				final String readString = new String(read.array(), 0,
+						read.position());
+				LOG.log(Level.INFO, "read " + bytesRead
+						+ " header bytes position=" + read.position()
+						+ " string='" + readString + "'");
+
+				final Matcher matcher = headerPattern.matcher(readString);
+				if (matcher.matches()) {
+					final String url = matcher.group(1);
+					image = images.get(url);
+					if (image == null) {
+						LOG.log(Level.INFO, "couldn't find an image for url '"
+								+ url + "'. dropping client");
+						return true;
+					} else {
+						LOG.log(Level.INFO, "found an image for url '"
+								+ url + "'");
+					}
+					palette = palettes.get(url);
+					bmpType = 0; // could change this in the future
+					createBmpHeader(buffers[1]);
+					image.setDebugging(true);
+					final String indexString = matcher.group(2);
+					if (indexString != null) {
+						index = Integer.parseInt(indexString);
+					} else {
+						index = 0;
+					}
+					LOG.log(Level.INFO, "using index " + index);
+				} else if (!read.hasRemaining()) {
+					read.flip();
+					LOG.log(Level.WARNING,
+							"ran out of buffer space reading the header. currently have '"
+									+ readString + "'. dropping connection");
+					return true;
+				} else if (bytesRead == -1) {
+					read.flip();
+					LOG.log(Level.WARNING,
+							"reached end of stream for headers without getting anything valid. currently have "
+									+ read.limit()
+									+ " bytes ='"
+									+ readString
+									+ "'. dropping connection");
+					return true;
+				}
+			} else if (initial.hasRemaining()) {
+				channel.write(initial);
+			} else {
+				if (buffers[2] == null) {
+					img = image.getSnapshot(index);
+					if (img == null) {
+						return false;
+					} else {
+						buffers[2] = img.getByteBuffer();
+						LOG.log(Level.FINE, "got " + buffers[2]
+								+ " from the image");
+					}
+				}
+				channel.write(buffers);
+				boolean remaining = false;
+				for (ByteBuffer c : buffers) {
+					if (c.hasRemaining()) {
+						remaining = true;
+					}
+				}
+				if (!remaining) {
+					for (ByteBuffer c : buffers) {
+						c.rewind();
+					}
+					buffers[2] = null;
+					image.releaseSnapshot(index, img);
+				}
+			}
+			return false;
+		}
+	}
+
+	public DebugServer(int port) throws IOException {
+		server = ServerSocketChannel.open();
+		server.configureBlocking(false);
+		server.socket().bind(new InetSocketAddress(port), 10);
+		new Thread("DebugServer") {
+			@Override
+			public void run() {
+				try {
+					loop();
+				} catch (Throwable e) {
+					LOG.log(Level.SEVERE,
+							"trouble running the server loop", e);
+					System.exit(1);
+				}
+			}
+		}.start();
+	}
+
+	public void addImage(String name, ServableImage image) {
+		if (image.bitsPerPixel() != 24) {
+			throw new IllegalArgumentException("only 24-bit images are supported");
+			// could support 16 and 32 bpp images by using bmpType of 3
+		}
+		images.put(name, image);
+	}
+	public void addImage(String name, ServableImage image, Palette palette) {
+		if (image.bitsPerPixel() != 8) {
+			throw new IllegalArgumentException("only 8-bit images are supported");
+			// everything should work for 1, 2, and 4 bpp ones too except for padding etc
+		}
+		if (palette.entries() > (1 << image.bitsPerPixel())) {
+			throw new IllegalArgumentException("too many colors in the palette");
+		}
+		images.put(name, image);
+		palettes.put(name, palette);
+	}
+	/**
+	 * This timestamp is written out in the debugging images.
+	 * @param timestamp the current timestamp
+	 */
+	public void setTimestamp(double timestamp) {
+		this.timestamp = timestamp;
+	}
+
+	private void loop() throws IOException {
+		final Selector selector = Selector.open();
+		server.register(selector, SelectionKey.OP_ACCEPT);
+		while (true) {
+			try {
+				for (Client c : clients) {
+					c.register(selector);
+				}
+				selector.select();
+				for (final Iterator<SelectionKey> i = selector.selectedKeys()
+						.iterator(); i.hasNext();) {
+					final SelectionKey c = i.next();
+					if (c.isAcceptable()) {
+						// there's only 1 socket there that can accept
+						final SocketChannel channel = server.accept();
+						if (channel != null) {
+							clients.add(new Client(channel));
+						}
+					} else {
+						final Client client = (Client) c.attachment();
+						try {
+							if (client.run()) {
+								client.close();
+								clients.remove(client);
+							}
+						} catch (Exception e) {
+							LOG.log(Level.INFO, "dropping client " + client
+									+ " because it's run() threw an exception",
+									e);
+							client.close();
+							clients.remove(client);
+						}
+					}
+					i.remove();
+				}
+			} catch (IOException e) {
+				LOG.log(Level.WARNING, "trouble running the server loop", e);
+			}
+		}
+	}
+}
diff --git a/aos/atom_code/camera/java/aos/ImageGetter.java b/aos/atom_code/camera/java/aos/ImageGetter.java
new file mode 100644
index 0000000..f0f1063
--- /dev/null
+++ b/aos/atom_code/camera/java/aos/ImageGetter.java
@@ -0,0 +1,24 @@
+package aos;
+
+import com.googlecode.javacv.cpp.opencv_core;
+import com.googlecode.javacv.cpp.opencv_core.IplImage;
+
+/**
+ * An object that can retrieve images from somewhere.
+ */
+public interface ImageGetter {
+	public static int width = 640, height = 480;
+	
+	/**
+	 * Gets an image.
+	 * @param out Where to write the image to. Must be a 3-channel {@link opencv_core#IPL_DEPTH_8U} image.
+	 * @return whether it succeeded or not
+	 */
+	public boolean get(IplImage out);
+	/**
+	 * Only valid after a successful {@link #get()}.
+	 * @return The timestamp from the most recent frame. Will be in seconds with at least ms accuracy.
+	 */
+	public double getTimestamp();
+}
+
diff --git a/aos/atom_code/camera/java/aos/JPEGDecoder.java b/aos/atom_code/camera/java/aos/JPEGDecoder.java
new file mode 100644
index 0000000..63d12fd
--- /dev/null
+++ b/aos/atom_code/camera/java/aos/JPEGDecoder.java
@@ -0,0 +1,29 @@
+package aos;
+
+import java.nio.ByteBuffer;
+
+import com.googlecode.javacv.cpp.opencv_core;
+import com.googlecode.javacv.cpp.opencv_core.IplImage;
+
+/**
+ * Efficiently decodes a JPEG image from a @{class ByteBuffer} into an @{class IplImage}.
+ * Instances are not safe for use from multiple threads.
+ * The first use of an instance allocates some largish buffers which are never freed. 
+ */
+public class JPEGDecoder {
+	private final long[] state = new long[1];
+	
+	/**
+	 * @param in Must be direct. The {@link ByteBuffer#limit()} of it will be respected.
+	 * @param out Where to write the decoded image to. Must be a 3-channel {@link opencv_core#IPL_DEPTH_8U} image.
+	 * Will be written in the RGB color space.
+	 * @return Whether or not it succeeded. If not, {@code out} is undefined.
+	 */
+	public boolean decode(ByteBuffer in, IplImage out) {
+		if (out.nChannels() != 3 || out.depth() != opencv_core.IPL_DEPTH_8U) {
+			throw new IllegalArgumentException("out is invalid");
+		}
+		return Natives.decodeJPEG(state, in, in.limit(), out.getByteBuffer());
+	}
+}
+
diff --git a/aos/atom_code/camera/java/aos/JPEGImageGetter.java b/aos/atom_code/camera/java/aos/JPEGImageGetter.java
new file mode 100644
index 0000000..84296e7
--- /dev/null
+++ b/aos/atom_code/camera/java/aos/JPEGImageGetter.java
@@ -0,0 +1,26 @@
+package aos;
+
+import java.nio.ByteBuffer;
+
+import com.googlecode.javacv.cpp.opencv_core.IplImage;
+
+/**
+ * Helper class for {@link ImageGetter}s that return JPEG images.
+ */
+public abstract class JPEGImageGetter implements ImageGetter {
+	
+	private final JPEGDecoder decoder = new JPEGDecoder();
+
+	@Override
+	public boolean get(IplImage out) {
+		final ByteBuffer jpeg = getJPEG();
+		if (jpeg == null) return false;
+		final boolean r = decoder.decode(jpeg, out);
+		release();
+		return r;
+	}
+	
+	protected abstract ByteBuffer getJPEG();
+	protected void release() {}
+
+}
diff --git a/aos/atom_code/camera/java/aos/JavaCVImageGetter.java b/aos/atom_code/camera/java/aos/JavaCVImageGetter.java
new file mode 100644
index 0000000..67398f0
--- /dev/null
+++ b/aos/atom_code/camera/java/aos/JavaCVImageGetter.java
@@ -0,0 +1,56 @@
+package aos;
+
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import com.googlecode.javacv.FrameGrabber;
+import com.googlecode.javacv.cpp.opencv_core.IplImage;
+
+
+/**
+ * Adapts between the JavaCV {@link FrameGrabber} and {@link ImageGetter}.
+ * There is (at least) 1 extra copy involved, so this shouldn't be used if you care about speed.
+ */
+public class JavaCVImageGetter implements ImageGetter {
+	private static final Logger LOG = Logger.getLogger(JavaCVImageGetter.class
+			.getName());
+	private final FrameGrabber grabber;
+	
+	public JavaCVImageGetter(FrameGrabber grabber) {
+		this.grabber = grabber;
+		if (grabber.getImageWidth() != width || grabber.getImageHeight() != height) {
+			if (grabber.getImageWidth() == 0 && grabber.getImageHeight() == 0) {
+				LOG.log(Level.WARNING, "grabber says it will give 0x0 images at the start. ignoring");
+			} else {
+				throw new IllegalArgumentException("grabber says it will give images that are the wrong size!!");
+			}
+		}
+	}
+
+	@Override
+	public boolean get(IplImage out) {
+		try {
+			final IplImage frame = grabber.grab();
+			if (grabber.getImageWidth() != width || grabber.getImageHeight() != height) {
+				LOG.log(Level.SEVERE, "grabber says it will give the wrong size images");
+				return false;
+			}
+			if (out.imageSize() != frame.imageSize()) {
+				LOG.log(Level.SEVERE, "the grabber gave a " + frame.imageSize() + "-byte image" +
+						"but a " + out.imageSize() + "-byte image was passed in");
+				return false;
+			}
+			out.getByteBuffer().put(frame.getByteBuffer());
+			return true;
+		} catch (FrameGrabber.Exception e) {
+			LOG.log(Level.WARNING, "grabber.grab() threw an exception", e);
+			return false;
+		}
+	}
+
+	@Override
+	public double getTimestamp() {
+		// grabber.getTimestamp seems to be in ms (all the implementations are at least)
+		return grabber.getTimestamp() / 1000.0;
+	}
+}
diff --git a/aos/atom_code/camera/java/aos/NativeBufferError.java b/aos/atom_code/camera/java/aos/NativeBufferError.java
new file mode 100644
index 0000000..41c794d
--- /dev/null
+++ b/aos/atom_code/camera/java/aos/NativeBufferError.java
@@ -0,0 +1,22 @@
+package aos;
+
+public class NativeBufferError extends NativeError {
+	private static final long serialVersionUID = -5298480149664213316L;
+
+	public NativeBufferError() {
+		super();
+	}
+
+	public NativeBufferError(String message, Throwable cause) {
+		super(message, cause);
+	}
+
+	public NativeBufferError(String message) {
+		super(message);
+	}
+
+	public NativeBufferError(Throwable cause) {
+		super(cause);
+	}
+
+}
diff --git a/aos/atom_code/camera/java/aos/NativeError.java b/aos/atom_code/camera/java/aos/NativeError.java
new file mode 100644
index 0000000..d410234
--- /dev/null
+++ b/aos/atom_code/camera/java/aos/NativeError.java
@@ -0,0 +1,25 @@
+package aos;
+
+/**
+ * Represents an error from native code.
+ */
+public class NativeError extends Error {
+	private static final long serialVersionUID = 7394872852984037261L;
+
+	public NativeError() {
+		super();
+	}
+
+	public NativeError(String message, Throwable cause) {
+		super(message, cause);
+	}
+
+	public NativeError(String message) {
+		super(message);
+	}
+
+	public NativeError(Throwable cause) {
+		super(cause);
+	}
+
+}
diff --git a/aos/atom_code/camera/java/aos/NativeLoader.java b/aos/atom_code/camera/java/aos/NativeLoader.java
new file mode 100644
index 0000000..d4fe7a8
--- /dev/null
+++ b/aos/atom_code/camera/java/aos/NativeLoader.java
@@ -0,0 +1,14 @@
+package aos;
+
+/**
+ * Provides support for dealing with loading native code.
+ */
+public class NativeLoader {
+	/**
+	 * Loads a native library.
+	 * @param name the name of the gyp shared_library or loadable_module target to load
+	 */
+	public static void load(String name) {
+		System.load(System.getProperty("one-jar.expand.dir") + "/so_libs/lib" + name + ".so");
+	}
+}
diff --git a/aos/atom_code/camera/java/aos/Natives.java b/aos/atom_code/camera/java/aos/Natives.java
new file mode 100644
index 0000000..8ea4e01
--- /dev/null
+++ b/aos/atom_code/camera/java/aos/Natives.java
@@ -0,0 +1,82 @@
+package aos;
+
+import com.googlecode.javacv.cpp.opencv_core;
+
+import java.nio.ByteBuffer;
+import java.util.logging.Level;
+
+/**
+ * <p>Package-private class that has all of the native functions in it to make them easier to implement.</p>
+ * <p>WARNING: The raw native functions are <b>NOT</b> thread-safe!!!!! Any java functions that need to be thread safe MUST be synchronized in JAVA!</p>
+ */
+class Natives {
+	static {
+		NativeLoader.load("aos_camera");
+		nativeInit(ImageGetter.width, ImageGetter.height);
+	}
+	private static native void nativeInit(int width, int height);
+	/**
+	 * Empty function to make sure the class gets loaded (which means loading the native library).
+	 */
+	public static void ensureLoaded() {}
+	
+	/**
+	 * Decodes a JPEG from in into out. Both buffers must be direct.
+	 * @param state a long[1] for storing thread-local state
+	 * @param in the JPEG to decode
+	 * @param inBytes how many bytes long the JPEG is
+	 * @param out the buffer to write the decoded image into
+	 * @return Whether or not it succeeded. If not, {@code out} is undefined.
+	 */
+	public static native boolean decodeJPEG(long[] state, ByteBuffer in, int inBytes, ByteBuffer out);
+
+	/**
+	 * Thresholds in into out. Both buffers must be direct.
+	 * All of the short arguments should be unsigned bytes. The min and max parameters specify what colors to accept.
+	 * @param in The image to threshold. Must be a 3-channel {@link opencv_core#IPL_DEPTH_8U} image buffer in the HSV color space.
+	 * @param out Where to write the thresholded image to. Must be a 1-channel {@link opencv_core#IPL_DEPTH_8U} image buffer.
+	 * @param hoffset An offset to be added to the hue value before comparing it to {@code hmin} and {@code hmax}.
+	 * The addition will be performed to a {@code uint8_t}, which will wrap around. This means that it must be positive.
+	 * Useful for finding red values.
+	 */
+	public static native void threshold(ByteBuffer in, ByteBuffer out, short hoffset, char hmin, char hmax,
+			char smin, char smax, char vmin, char vmax);
+	
+	/**
+	 * Converts the colors from in to the format required for dumping them into a BMP image.
+	 * @param in The image to convert. Must be a 3-channel {@link opencv_core#IPL_DEPTH_8U} image buffer in the regular (BGR I think...) color space.
+	 * @param out Where to write the converted image to. Must be a 3-channel {@link opencv_core#IPL_DEPTH_8U} image buffer.
+	 */
+	public static native void convertBGR2BMP(ByteBuffer in, ByteBuffer out);
+
+	/**
+	 * Retrieves a JPEG image from the queue system. Will block until a new one is ready.
+	 * @param id from {@link #queueInit()}
+	 * @return Will be direct. This buffer <b>must not EVER</b> be written to.
+	 */
+	public static native ByteBuffer queueGetJPEG(long id);
+	/**
+	 * Retrieves the latest frame timestamp from the queue system. Must only be called between {@link #queueGetJPEG} and {@link #queueReleaseJPEG}.
+	 * @param id from {@link #queueInit()}
+	 * @return a timestamp
+	 */
+	public static native double queueGetTimestamp(long id);
+	/**
+	 * Releases the last image retrieved from the queue system. The result of the last {@link #queueGetJPEG()} will now be invalid.
+	 * @param id from {@link #queueInit()}
+	 */
+	public static native void queueReleaseJPEG(long id);
+	/**
+	 * Prepares to start retrieving JPEGs from the queues.
+	 * @return the ID to pass to the other queue functions
+	 */
+	public static native long queueInit();
+	
+	/**
+	 * Puts the given message into the logging framework.
+	 * @param message the complete log message
+	 * @param level the level (from {@link Level#intValue()}
+	 */
+	public static native void LOG(String message, int level);
+}
+
diff --git a/aos/atom_code/camera/java/aos/QueueImageGetter.java b/aos/atom_code/camera/java/aos/QueueImageGetter.java
new file mode 100644
index 0000000..7ed2f65
--- /dev/null
+++ b/aos/atom_code/camera/java/aos/QueueImageGetter.java
@@ -0,0 +1,34 @@
+package aos;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Retrieves images from the queue system.<br>
+ * {@link #getTimestamp()} returns the value from the v4l2 driver,
+ * which is a CLOCK_MONOTONIC time for most (all?) of them and definitely uvcvideo.
+ */
+public class QueueImageGetter extends JPEGImageGetter {
+	private final long nativeID;
+	public QueueImageGetter() {
+		nativeID = Natives.queueInit();
+	}
+	
+	@Override
+	public ByteBuffer getJPEG() {
+		final ByteBuffer buf = Natives.queueGetJPEG(nativeID);
+		if (buf == null) {
+			return null;
+		}
+		return buf.asReadOnlyBuffer();
+	}
+
+	@Override
+	public void release() {
+		Natives.queueReleaseJPEG(nativeID);
+	}
+
+	@Override
+	public double getTimestamp() {
+		return Natives.queueGetTimestamp(nativeID);
+	}
+}
diff --git a/aos/atom_code/camera/java/aos/QueueLogHandler.java b/aos/atom_code/camera/java/aos/QueueLogHandler.java
new file mode 100644
index 0000000..3eb8938
--- /dev/null
+++ b/aos/atom_code/camera/java/aos/QueueLogHandler.java
@@ -0,0 +1,119 @@
+package aos;
+
+import java.lang.Thread.UncaughtExceptionHandler;
+import java.util.logging.Formatter;
+import java.util.logging.Handler;
+import java.util.logging.Level;
+import java.util.logging.LogRecord;
+import java.util.logging.Logger;
+import java.util.logging.SimpleFormatter;
+
+/**
+ * <p>Sends messages to the AOS queue-based logging system. Also sends anything that's at least a {@link Level#WARNING} to {@link System#err}.</p>
+ * <p>Writes out each stack frame of exceptions as a separate line after the first one with a \t at the beginning.</p>
+ */
+public class QueueLogHandler extends Handler {
+	private Formatter defaultFormatter;
+	
+	/**
+	 * Sets up the logging framework to use an instance of this class for all logging and returns the newly created instance.
+	 */
+	public static QueueLogHandler UseForAll() {
+		Natives.ensureLoaded();
+		final Logger top = Logger.getLogger("");
+		for (Handler c : top.getHandlers()) {
+			top.removeHandler(c);
+		}
+		QueueLogHandler instance = new QueueLogHandler();
+		top.addHandler(instance);
+		top.setLevel(Level.ALL);
+		
+		Thread.setDefaultUncaughtExceptionHandler(new UncaughtExceptionHandler() {
+			@Override
+			public void uncaughtException(Thread thread, Throwable e) {
+				top.log(Level.SEVERE, "uncaught exception in thread " + thread, e);
+				System.exit(1);
+			}
+		});
+		
+		return instance;
+	}
+
+	@Override
+	public void close() throws SecurityException {
+	}
+	@Override
+	public void flush() {
+	}
+
+	private Formatter findFormatter() {
+		final Formatter r = getFormatter();
+		if (r != null) {
+			return r;
+		}
+		if (defaultFormatter != null) {
+			return defaultFormatter;
+		}
+		return defaultFormatter = new SimpleFormatter();
+	}
+	@Override
+	public void publish(LogRecord record) {
+		/*final StringBuilder thrownString = new StringBuilder(0);
+		if (record.getThrown() != null) {
+			thrownString.append(": ");
+			thrownString.append(record.getThrown().toString());
+			for (StackTraceElement c : record.getThrown().getStackTrace()) {
+				thrownString.append(" > ");
+				thrownString.append(c.getClassName());
+				thrownString.append('.');
+				thrownString.append(c.getMethodName());
+				thrownString.append('(');
+				thrownString.append(c.getFileName());
+				thrownString.append(':');
+				thrownString.append(c.getLineNumber());
+				thrownString.append(')');
+			}
+		}
+		Natives.LOG(record.getSourceClassName() + ": " + record.getSourceMethodName() + ": " +
+				findFormatter().formatMessage(record) + thrownString, record.getLevel().intValue());*/
+		if (record.getThrown() instanceof UnsatisfiedLinkError || record.getThrown().getCause() instanceof UnsatisfiedLinkError) {
+			record.setThrown(new UnsatisfiedLinkError("are you running a JVM of the correct bitness?").initCause(record.getThrown()));
+		}
+		Natives.LOG(record.getSourceClassName() + ": " + record.getSourceMethodName() + ": " +
+				findFormatter().formatMessage(record), record.getLevel().intValue());
+		if (record.getThrown() != null) {
+			logException(record.getThrown(), record.getLevel().intValue(), false);
+		}
+		
+		if (record.getLevel().intValue() >= Level.WARNING.intValue()) {
+			System.err.println(findFormatter().format(record));
+		}
+	}
+	private void logException(Throwable e, int level, boolean caused_by) {
+		final StringBuilder thrownString = new StringBuilder();
+		if (caused_by) {
+			thrownString.append("Caused by: ");
+		}
+		thrownString.append(e.getClass().getName());
+		thrownString.append(": ");
+		thrownString.append(e.getLocalizedMessage());
+		Natives.LOG(thrownString.toString(), level);
+		for (StackTraceElement c : e.getStackTrace()) {
+			thrownString.setLength(0);
+			thrownString.append("\t");
+			thrownString.append(c.getClassName());
+			thrownString.append('.');
+			thrownString.append(c.getMethodName());
+			thrownString.append('(');
+			thrownString.append(c.getFileName());
+			thrownString.append(':');
+			thrownString.append(c.getLineNumber());
+			thrownString.append(')');
+			Natives.LOG(thrownString.toString(), level);
+		}
+		if (e.getCause() != null) {
+			logException(e.getCause(), level, true);
+		}
+	}
+
+}
diff --git a/aos/atom_code/camera/java/aos/ServableImage.java b/aos/atom_code/camera/java/aos/ServableImage.java
new file mode 100644
index 0000000..c5db252
--- /dev/null
+++ b/aos/atom_code/camera/java/aos/ServableImage.java
@@ -0,0 +1,145 @@
+package aos;
+
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Queue;
+import java.util.logging.Logger;
+
+import com.googlecode.javacv.cpp.opencv_core.IplImage;
+
+/**
+ * Provides {@link IplImage}s that can be used with a {@link DebugServer}.
+ */
+public class ServableImage {
+	@SuppressWarnings("unused")
+	private static final Logger LOG = Logger.getLogger(ServableImage.class
+			.getName());
+	private final int width, height, depth, channels;
+	private final ArrayList<Queue<IplImage>> queues = new ArrayList<Queue<IplImage>>();
+	private final ArrayList<IplImage> snapshots = new ArrayList<IplImage>();
+	private final IplImage current;
+	private int debugging = 0;
+
+	public ServableImage(int width, int height, int depth, int channels) {
+		this.width = width;
+		this.height = height;
+		this.depth = depth;
+		this.channels = channels;
+
+		current = IplImage.create(width, height, depth, channels);
+	}
+
+	/**
+	 * @return the number of bytes in each image
+	 */
+	public int imageSize() {
+		return width * height * depth * channels / 8;
+	}
+
+	/**
+	 * @return the number of bits in each pixel
+	 */
+	public short bitsPerPixel() {
+		return (short) (depth * channels);
+	}
+
+	/**
+	 * Retrieves an image that should be used for debugging. It clears the value
+	 * when called. {@link #releastSnapshot} MUST be called with the result.
+	 * 
+	 * @param i
+	 *            Which snapshot to retrieve. 0 means the most recent final
+	 *            image.
+	 * @return The most recent image at this index. {@code null} if there isn't
+	 *         a new one since last time this function was called. Will be in
+	 *         the correct color space to dump into a BMP image if this is a
+	 *         3-channel image.
+	 */
+	public synchronized IplImage getSnapshot(int i) {
+		if (snapshots.size() > i) {
+			return snapshots.get(i);
+		} else {
+			return null;
+		}
+	}
+
+	public synchronized void releaseSnapshot(int i, IplImage image) {
+		queues.get(i).add(image);
+	}
+
+	/**
+	 * This function will return the same image if called repeatedly until
+	 * {@link #releaseImage} is called.
+	 * 
+	 * @return the current image
+	 */
+	public synchronized IplImage getImage() {
+		return current;
+	}
+
+	/**
+	 * Releases the current image (to be potentially sent out for debugging and
+	 * then reused).
+	 */
+	public synchronized void releaseImage() {
+		recordSnapshot(0);
+	}
+
+	/**
+	 * Records a copy of the current image for debugging. It will be accessible
+	 * at <{@code <base path>?i=<the value>}>. Does nothing unless
+	 * {@link #isDebugging()}. This method <i>should</i> get called regardless
+	 * of {@link #isDebugging()} to avoid outputting old debugging images. Note:
+	 * 0 is not a valid snapshot number.
+	 * 
+	 * @param i
+	 *            which snapshot this is
+	 */
+	public synchronized void recordSnapshot(int i) {
+		while (queues.size() <= i) {
+			queues.add(null);
+		}
+		if (queues.get(i) == null) {
+			queues.set(i, new ArrayDeque<IplImage>());
+		}
+		while (snapshots.size() <= i) {
+			snapshots.add(null);
+		}
+		if (snapshots.get(i) != null) {
+			releaseSnapshot(i, snapshots.get(i));
+		}
+		if (isDebugging()) {
+			IplImage snapshot = queues.get(i).poll();
+			if (snapshot == null) {
+				snapshot = IplImage.create(width, height, depth, channels);
+			}
+			if (channels == 3) {
+				Natives.convertBGR2BMP(current.getByteBuffer(),
+						snapshot.getByteBuffer());
+			} else {
+				snapshot.getByteBuffer().put(current.getByteBuffer());
+			}
+			snapshots.set(i, snapshot);
+		} else {
+			snapshots.set(i, null);
+		}
+	}
+
+	/**
+	 * @return whether or not to do extra debug work with the current image
+	 */
+	public synchronized boolean isDebugging() {
+		return debugging > 0;
+	}
+
+	/**
+	 * Whoever turns this on should turn it off when they're done.
+	 */
+	synchronized void setDebugging(boolean debugging) {
+		if (debugging) {
+			++this.debugging;
+		} else {
+			--this.debugging;
+		}
+	}
+}
diff --git a/aos/atom_code/camera/java/aos/Thresholder.java b/aos/atom_code/camera/java/aos/Thresholder.java
new file mode 100644
index 0000000..adf1b49
--- /dev/null
+++ b/aos/atom_code/camera/java/aos/Thresholder.java
@@ -0,0 +1,28 @@
+package aos;
+
+import com.googlecode.javacv.cpp.opencv_core;
+import com.googlecode.javacv.cpp.opencv_core.IplImage;
+
+public class Thresholder {
+	/**
+	 * Thresholds in into out.
+	 * All of the int arguments should be unsigned bytes except hoffset, which should be a signed byte.
+	 * The min and max parameters specify what colors to accept.
+	 * @param in The image to threshold. Must be a 3-channel {@link opencv_core#IPL_DEPTH_8U} image in the HSV color space.
+	 * @param out Where to write the thresholded image to. Must be a 1-channel {@link opencv_core#IPL_DEPTH_8U} image.
+	 * @param hoffset An offset to be added to the hue value before comparing it to {@code hmin} and {@code hmax}.
+	 * The addition will be performed to a {@code uint8_t}, which will wrap around. This means that it must be positive.
+	 * Useful for finding red values.
+	 */
+	public static void threshold(IplImage in, IplImage out, int hoffset, int hmin, int hmax,
+			int smin, int smax, int vmin, int vmax) {
+		if (in.nChannels() != 3 || in.depth() != opencv_core.IPL_DEPTH_8U) {
+			throw new IllegalArgumentException("in is invalid");
+		}
+		if (out.nChannels() != 1 || out.depth() != opencv_core.IPL_DEPTH_8U) {
+			throw new IllegalArgumentException("out is invalid");
+		}
+		Natives.threshold(in.getByteBuffer(), out.getByteBuffer(), (short)hoffset,
+				(char)hmin, (char)hmax, (char)smin, (char)smax, (char)vmin, (char)vmax);
+	}
+}
diff --git a/aos/atom_code/camera/jni.cpp b/aos/atom_code/camera/jni.cpp
new file mode 100644
index 0000000..7b7eafa
--- /dev/null
+++ b/aos/atom_code/camera/jni.cpp
@@ -0,0 +1,257 @@
+#include <setjmp.h>
+
+#include "jni/aos_Natives.h"
+#include "aos/atom_code/camera/Buffers.h"
+#include "aos/externals/libjpeg/include/jpeglib.h"
+
+using aos::camera::Buffers;
+
+namespace {
+
+jclass nativeError, bufferError, outOfMemoryError;
+bool findClass(JNIEnv *env, const char *name, jclass *out) {
+  jclass local = env->FindClass(name);
+  if (out == NULL) return true;
+  *out = static_cast<jclass>(env->NewGlobalRef(local));
+  if (out == NULL) return true;
+  env->DeleteLocalRef(local);
+  return false;
+}
+
+// Checks that the size is correct and retrieves the address.
+// An expected_size of 0 means don't check it.
+// If this function returns NULL, a java exception will already have been
+// thrown.
+void *getBufferAddress(JNIEnv *env, jobject obj, jlong expected_size) {
+  if (obj == NULL) {
+    env->ThrowNew(nativeError, "null buffer");
+    return NULL;
+  }
+  if (expected_size != 0 &&
+      expected_size != env->GetDirectBufferCapacity(obj)) {
+    char *str;
+    if (asprintf(&str, "wrong size. expected %lld but got %lld",
+                 expected_size, env->GetDirectBufferCapacity(obj)) < 0) {
+      env->ThrowNew(bufferError, "creating message failed");
+      return NULL;
+    }
+    env->ThrowNew(bufferError, str);
+    free(str);
+    return NULL;
+  }
+  void *const r = env->GetDirectBufferAddress(obj);
+  if (r == NULL) {
+    env->ThrowNew(bufferError, "couldn't get address");
+  }
+  return r;
+}
+
+const int kImagePixels = Buffers::kWidth * Buffers::kHeight;
+
+void jpeg_log_message(jpeg_common_struct *cinfo, log_level level) {
+  char buf[LOG_MESSAGE_LEN];
+  cinfo->err->format_message(cinfo, buf);
+  log_do(level, "libjpeg: %s\n", buf);
+}
+void jpeg_error_exit(jpeg_common_struct *cinfo) __attribute__((noreturn));
+void jpeg_error_exit(jpeg_common_struct *cinfo) {
+  jpeg_log_message(cinfo, ERROR);
+  longjmp(*static_cast<jmp_buf *>(cinfo->client_data), 1);
+}
+void jpeg_emit_message(jpeg_common_struct *cinfo, int msg_level) {
+  if (msg_level < 0) {
+    jpeg_log_message(cinfo, WARNING);
+    longjmp(*static_cast<jmp_buf *>(cinfo->client_data), 2);
+  }
+  // this spews a lot of messages out
+  //jpeg_log_message(cinfo, DEBUG);
+}
+
+// The structure used to hold all of the state for the functions that deal with
+// a Buffers. A pointer to this structure is stored java-side.
+struct BuffersHolder {
+  Buffers buffers;
+  timeval timestamp;
+  BuffersHolder() : buffers() {}
+};
+
+} // namespace
+
+void Java_aos_Natives_nativeInit(JNIEnv *env, jclass, jint width, jint height) {
+  if (findClass(env, "aos/NativeError", &nativeError)) return;
+  if (findClass(env, "aos/NativeBufferError", &bufferError)) return;
+  if (findClass(env, "java/lang/OutOfMemoryError", &outOfMemoryError)) return;
+
+  aos::InitNRT();
+
+  if (width != Buffers::kWidth || height != Buffers::kHeight) {
+    env->ThrowNew(nativeError, "dimensions mismatch");
+    return;
+  }
+
+  LOG(INFO, "nativeInit finished\n");
+}
+
+static_assert(sizeof(jlong) >= sizeof(void *),
+              "can't stick pointers into jlongs");
+
+jboolean Java_aos_Natives_decodeJPEG(JNIEnv *env, jclass, jlongArray stateArray,
+                                     jobject inobj, jint inLength,
+                                     jobject outobj) {
+  unsigned char *const in = static_cast<unsigned char *>(
+      getBufferAddress(env, inobj, 0));
+  if (in == NULL) return false;
+  if (env->GetDirectBufferCapacity(inobj) < inLength) {
+    env->ThrowNew(bufferError, "in is too small");
+    return false;
+  }
+  unsigned char *const out = static_cast<unsigned char *>(
+      getBufferAddress(env, outobj, kImagePixels * 3));
+  if (out == NULL) return false;
+
+  jpeg_decompress_struct *volatile cinfo; // volatile because of the setjmp call
+
+  jlong state;
+  env->GetLongArrayRegion(stateArray, 0, 1, &state);
+  if (env->ExceptionCheck()) return false;
+  if (state == 0) {
+    cinfo = static_cast<jpeg_decompress_struct *>(malloc(sizeof(*cinfo)));
+    if (cinfo == NULL) {
+      env->ThrowNew(outOfMemoryError, "malloc for jpeg_decompress_struct");
+      return false;
+    }
+    cinfo->err = jpeg_std_error(static_cast<jpeg_error_mgr *>(
+            malloc(sizeof(*cinfo->err))));
+    cinfo->client_data = malloc(sizeof(jmp_buf));
+    cinfo->err->error_exit = jpeg_error_exit;
+    cinfo->err->emit_message = jpeg_emit_message;
+    // if the error handler sees a failure, it needs to clean up
+    // (jpeg_abort_decompress) and then return the failure
+    // set cinfo->client_data to the jmp_buf
+    jpeg_create_decompress(cinfo);
+    state = reinterpret_cast<intptr_t>(cinfo);
+    env->SetLongArrayRegion(stateArray, 0, 1, &state);
+    if (env->ExceptionCheck()) return false;
+  } else {
+    cinfo = reinterpret_cast<jpeg_decompress_struct *>(state);
+  }
+
+  // set up the jump buffer
+  // this has to happen each time
+  if (setjmp(*static_cast<jmp_buf *>(cinfo->client_data))) {
+    jpeg_abort_decompress(cinfo);
+    return false;
+  }
+
+  jpeg_mem_src(cinfo, in, inLength);
+  jpeg_read_header(cinfo, TRUE);
+  if (cinfo->image_width != static_cast<unsigned int>(Buffers::kWidth) ||
+      cinfo->image_height != static_cast<unsigned int>(Buffers::kHeight)) {
+    LOG(WARNING, "got (%ux%u) image but expected (%dx%d)\n", cinfo->image_width,
+        cinfo->image_height, Buffers::kWidth, Buffers::kHeight);
+    jpeg_abort_decompress(cinfo);
+    return false;
+  }
+  cinfo->out_color_space = JCS_RGB;
+  jpeg_start_decompress(cinfo);
+  if (cinfo->output_components != 3) {
+    LOG(WARNING, "libjpeg wants to return %d color components instead of 3\n",
+        cinfo->out_color_components);
+    jpeg_abort_decompress(cinfo);
+    return false;
+  }
+  if (cinfo->output_width != static_cast<unsigned int>(Buffers::kWidth) ||
+      cinfo->output_height != static_cast<unsigned int>(Buffers::kHeight)) {
+    LOG(WARNING, "libjpeg wants to return a (%ux%u) image but need (%dx%d)\n",
+        cinfo->output_width, cinfo->output_height,
+        Buffers::kWidth, Buffers::kHeight);
+    jpeg_abort_decompress(cinfo);
+    return false;
+  }
+
+  unsigned char *buffers[Buffers::kHeight];
+  for (int i = 0; i < Buffers::kHeight; ++i) {
+    buffers[i] = &out[i * Buffers::kWidth * 3];
+  }
+  while (cinfo->output_scanline < cinfo->output_height) {
+    jpeg_read_scanlines(cinfo, &buffers[cinfo->output_scanline],
+                        Buffers::kHeight - cinfo->output_scanline);
+  }
+
+  jpeg_finish_decompress(cinfo);
+  return true;
+}
+
+void Java_aos_Natives_threshold(JNIEnv *env, jclass, jobject inobj,
+                                jobject outobj, jshort hoffset, jchar hmin,
+                                jchar hmax, jchar smin, jchar smax, jchar vmin,
+                                jchar vmax) {
+  const unsigned char *__restrict__ const in = static_cast<unsigned char *>(
+      getBufferAddress(env, inobj, kImagePixels * 3));
+  if (in == NULL) return;
+  char *__restrict__ const out = static_cast<char *>(
+      getBufferAddress(env, outobj, kImagePixels));
+  if (out == NULL) return;
+
+  for (int i = 0; i < kImagePixels; ++i) {
+    const uint8_t h = in[i * 3] + static_cast<uint8_t>(hoffset);
+    out[i] = h > hmin && h < hmax &&
+        in[i * 3 + 1] > smin && in[i * 3 + 1] < smax &&
+        in[i * 3 + 2] > vmin && in[i * 3 + 2] < vmax;
+  }
+}
+void Java_aos_Natives_convertBGR2BMP(JNIEnv *env, jclass,
+                                     jobject inobj, jobject outobj) {
+  const char *__restrict__ const in = static_cast<char *>(
+      getBufferAddress(env, inobj, kImagePixels * 3));
+  if (in == NULL) return;
+  char *__restrict__ const out = static_cast<char *>(
+      getBufferAddress(env, outobj, kImagePixels * 3));
+  if (out == NULL) return;
+
+  for (int i = 0; i < kImagePixels; ++i) {
+    out[i * 3 + 0] = in[i * 3 + 2];
+    out[i * 3 + 1] = in[i * 3 + 1];
+    out[i * 3 + 2] = in[i * 3 + 0];
+  }
+}
+
+jlong Java_aos_Natives_queueInit(JNIEnv *, jclass) {
+  return reinterpret_cast<intptr_t>(new BuffersHolder());
+}
+void Java_aos_Natives_queueReleaseJPEG(JNIEnv *, jclass, jlong ptr) {
+  reinterpret_cast<BuffersHolder *>(ptr)->buffers.Release();
+}
+jobject Java_aos_Natives_queueGetJPEG(JNIEnv *env, jclass, jlong ptr) {
+  uint32_t size;
+  BuffersHolder *const holder = reinterpret_cast<BuffersHolder *>(ptr);
+  const void *const r = holder->buffers.GetNext(true, &size,
+                                                &holder->timestamp, NULL);
+  if (r == NULL) return NULL;
+  return env->NewDirectByteBuffer(const_cast<void *>(r), size);
+}
+jdouble Java_aos_Natives_queueGetTimestamp(JNIEnv *, jclass, jlong ptr) {
+  const BuffersHolder *const holder = reinterpret_cast<BuffersHolder *>(ptr);
+  return holder->timestamp.tv_sec + holder->timestamp.tv_usec / 1000000.0;
+}
+
+void Java_aos_Natives_LOG(JNIEnv *env, jclass, jstring message, jint jlevel) {
+  log_level level;
+  if (jlevel >= 1000) {
+    // Don't want to use FATAL because the uncaught java exception that is
+    // likely to come next will be useful.
+    level = ERROR;
+  } else if (jlevel >= 900) {
+    level = WARNING;
+  } else if (jlevel >= 800) {
+    level = INFO;
+  } else {
+    level = DEBUG;
+  }
+  // can't use Get/ReleaseStringCritical because log_do might block waiting to
+  // put its message into the queue
+  const char *const message_chars = env->GetStringUTFChars(message, NULL);
+  if (message_chars == NULL) return;
+  log_do(level, "%s\n", message_chars);
+  env->ReleaseStringUTFChars(message, message_chars);
+}