Send downsized images for streaming

Added another v4l2 reader using mainpath to send 640x480 images.
Also had to make some config queue size changes to allow us to add
another image channel.

Signed-off-by: milind-u <milind.upadhyay@gmail.com>
Change-Id: I5714ea774773e1c37ff2647cb75b3dda07f896fe
diff --git a/frc971/vision/v4l2_reader.cc b/frc971/vision/v4l2_reader.cc
index 2553003..1404ec5 100644
--- a/frc971/vision/v4l2_reader.cc
+++ b/frc971/vision/v4l2_reader.cc
@@ -13,9 +13,11 @@
 namespace vision {
 
 V4L2ReaderBase::V4L2ReaderBase(aos::EventLoop *event_loop,
-                               const std::string &device_name)
-    : fd_(open(device_name.c_str(), O_RDWR | O_NONBLOCK)),
-      event_loop_(event_loop) {
+                               std::string_view device_name,
+                               std::string_view image_channel)
+    : fd_(open(device_name.data(), O_RDWR | O_NONBLOCK)),
+      event_loop_(event_loop),
+      image_channel_(image_channel) {
   PCHECK(fd_.get() != -1) << " Failed to open device " << device_name;
 
   // Figure out if we are multi-planar or not.
@@ -84,7 +86,7 @@
   }
 
   for (size_t i = 0; i < buffers_.size(); ++i) {
-    buffers_[i].sender = event_loop_->MakeSender<CameraImage>("/camera");
+    buffers_[i].sender = event_loop_->MakeSender<CameraImage>(image_channel_);
     MarkBufferToBeEnqueued(i);
   }
   int type = multiplanar() ? V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
@@ -287,9 +289,9 @@
   PLOG(FATAL) << "VIDIOC_STREAMOFF failed";
 }
 
-V4L2Reader::V4L2Reader(aos::EventLoop *event_loop,
-                       const std::string &device_name)
-    : V4L2ReaderBase(event_loop, device_name) {
+V4L2Reader::V4L2Reader(aos::EventLoop *event_loop, std::string_view device_name,
+                       std::string_view image_channel)
+    : V4L2ReaderBase(event_loop, device_name, image_channel) {
   // Don't know why this magic call to SetExposure is required (before the
   // camera settings are configured) to make things work on boot of the pi, but
   // it seems to be-- without it, the image exposure is wrong (too dark). Note--
@@ -322,11 +324,12 @@
 
 RockchipV4L2Reader::RockchipV4L2Reader(aos::EventLoop *event_loop,
                                        aos::internal::EPoll *epoll,
-                                       const std::string &device_name,
-                                       const std::string &image_sensor_subdev)
-    : V4L2ReaderBase(event_loop, device_name),
+                                       std::string_view device_name,
+                                       std::string_view image_sensor_subdev,
+                                       std::string_view image_channel)
+    : V4L2ReaderBase(event_loop, device_name, image_channel),
       epoll_(epoll),
-      image_sensor_fd_(open(image_sensor_subdev.c_str(), O_RDWR | O_NONBLOCK)),
+      image_sensor_fd_(open(image_sensor_subdev.data(), O_RDWR | O_NONBLOCK)),
       buffer_requeuer_([this](int buffer) { EnqueueBuffer(buffer); },
                        kEnqueueFifoPriority) {
   PCHECK(image_sensor_fd_.get() != -1)
diff --git a/frc971/vision/v4l2_reader.h b/frc971/vision/v4l2_reader.h
index 22e4367..8764024 100644
--- a/frc971/vision/v4l2_reader.h
+++ b/frc971/vision/v4l2_reader.h
@@ -22,7 +22,9 @@
 class V4L2ReaderBase {
  public:
   // device_name is the name of the device file (like "/dev/video0").
-  V4L2ReaderBase(aos::EventLoop *event_loop, const std::string &device_name);
+  // image_channel is the channel to send images on
+  V4L2ReaderBase(aos::EventLoop *event_loop, std::string_view device_name,
+                 std::string_view image_channel);
 
   V4L2ReaderBase(const V4L2ReaderBase &) = delete;
   V4L2ReaderBase &operator=(const V4L2ReaderBase &) = delete;
@@ -114,6 +116,8 @@
     flatbuffers::Offset<CameraImage> message_offset;
 
     uint8_t *data_pointer = nullptr;
+
+    std::string_view image_channel_;
   };
 
   struct BufferInfo {
@@ -149,12 +153,15 @@
 
   aos::EventLoop *event_loop_;
   aos::Ftrace ftrace_;
+
+  std::string_view image_channel_;
 };
 
 // Generic V4L2 reader for pi's and older.
 class V4L2Reader : public V4L2ReaderBase {
  public:
-  V4L2Reader(aos::EventLoop *event_loop, const std::string &device_name);
+  V4L2Reader(aos::EventLoop *event_loop, std::string_view device_name,
+             std::string_view image_channel = "/camera");
 };
 
 // Rockpi specific v4l2 reader.  This assumes that the media device has been
@@ -162,10 +169,11 @@
 class RockchipV4L2Reader : public V4L2ReaderBase {
  public:
   RockchipV4L2Reader(aos::EventLoop *event_loop, aos::internal::EPoll *epoll,
-                     const std::string &device_name,
-                     const std::string &image_sensor_subdev);
+                     std::string_view device_name,
+                     std::string_view image_sensor_subdev,
+                     std::string_view image_channel = "/camera");
 
-  ~RockchipV4L2Reader();
+  virtual ~RockchipV4L2Reader();
 
   void SetExposure(size_t duration) override;
 
diff --git a/y2023/vision/camera_reader.cc b/y2023/vision/camera_reader.cc
index b604516..7de3eb5 100644
--- a/y2023/vision/camera_reader.cc
+++ b/y2023/vision/camera_reader.cc
@@ -17,6 +17,8 @@
 DEFINE_double(green, 1, "Green gain");
 DEFINE_double(blue, 1.96, "Blue gain");
 DEFINE_double(exposure, 150, "Camera exposure");
+DEFINE_bool(send_downsized_images, false,
+            "Whether to send downsized image for driver cam streaming.");
 
 namespace y2023 {
 namespace vision {
@@ -31,59 +33,70 @@
     media_device->Log();
   }
 
-  int width = 1296;
-  int height = 972;
-  int color_format = MEDIA_BUS_FMT_SBGGR10_1X10;
-  std::string camera_device_string = "ov5647 4-0036";
-  if (FLAGS_lowlight_camera) {
-    width = 1920;
-    height = 1080;
-    color_format = MEDIA_BUS_FMT_SRGGB10_1X10;
-    camera_device_string = "arducam-pivariety 4-000c";
-  }
+  const int kWidth = (FLAGS_lowlight_camera ? 1920 : 1296);
+  const int kHeight = (FLAGS_lowlight_camera ? 1080 : 972);
+  const int kColorFormat = (FLAGS_lowlight_camera ? MEDIA_BUS_FMT_SRGGB10_1X10
+                                                  : MEDIA_BUS_FMT_SBGGR10_1X10);
+
+  const std::string_view kCameraDeviceString =
+      (FLAGS_lowlight_camera ? "arducam-pivariety 4-000c" : "ov5647 4-0036");
+
+  // Scale down the selfpath images so we can log at 30 Hz (but still detect
+  // april tags at a far enough distance)
+  const double kSelfpathScalar = 2.0 / 3.0;
+  const int kSelfpathWidth = kWidth * kSelfpathScalar;
+  const int kSelfpathHeight = kHeight * kSelfpathScalar;
+
+  // Send heavily downsized images to the drivercam. They go over the network,
+  // and in this case frame rate is more important than quality
+  constexpr int kMainpathWidth = 640;
+  constexpr int kMainpathHeight = 480;
 
   media_device->Reset();
 
-  Entity *camera = media_device->FindEntity(camera_device_string);
-  camera->pads()[0]->SetSubdevFormat(width, height, color_format);
+  Entity *camera = media_device->FindEntity(kCameraDeviceString);
+  camera->pads()[0]->SetSubdevFormat(kWidth, kHeight, kColorFormat);
 
   Entity *rkisp1_csi = media_device->FindEntity("rkisp1_csi");
-  rkisp1_csi->pads()[0]->SetSubdevFormat(width, height, color_format);
-  rkisp1_csi->pads()[1]->SetSubdevFormat(width, height, color_format);
+  rkisp1_csi->pads()[0]->SetSubdevFormat(kWidth, kHeight, kColorFormat);
+  rkisp1_csi->pads()[1]->SetSubdevFormat(kWidth, kHeight, kColorFormat);
 
   // TODO(austin): Should we set this on the link?
   Entity *rkisp1_isp = media_device->FindEntity("rkisp1_isp");
-  rkisp1_isp->pads(0)->SetSubdevFormat(width, height, color_format);
-  rkisp1_isp->pads(0)->SetSubdevCrop(width, height);
+  rkisp1_isp->pads(0)->SetSubdevFormat(kWidth, kHeight, kColorFormat);
+  rkisp1_isp->pads(0)->SetSubdevCrop(kWidth, kHeight);
 
-  rkisp1_isp->pads(2)->SetSubdevFormat(width, height, MEDIA_BUS_FMT_YUYV8_2X8);
-  rkisp1_isp->pads(2)->SetSubdevCrop(width, height);
+  rkisp1_isp->pads(2)->SetSubdevFormat(kWidth, kHeight,
+                                       MEDIA_BUS_FMT_YUYV8_2X8);
+  rkisp1_isp->pads(2)->SetSubdevCrop(kWidth, kHeight);
 
   Entity *rkisp1_resizer_selfpath =
       media_device->FindEntity("rkisp1_resizer_selfpath");
-  rkisp1_resizer_selfpath->pads(0)->SetSubdevFormat(width, height,
+  rkisp1_resizer_selfpath->pads(0)->SetSubdevFormat(kWidth, kHeight,
                                                     MEDIA_BUS_FMT_YUYV8_2X8);
   rkisp1_resizer_selfpath->pads(1)->SetSubdevFormat(
-      width * 2 / 3, height * 2 / 3, MEDIA_BUS_FMT_YUYV8_2X8);
-  rkisp1_resizer_selfpath->pads(0)->SetSubdevCrop(width, height);
+      kSelfpathWidth, kSelfpathHeight, MEDIA_BUS_FMT_YUYV8_2X8);
+  rkisp1_resizer_selfpath->pads(0)->SetSubdevCrop(kWidth, kHeight);
 
   Entity *rkisp1_resizer_mainpath =
       media_device->FindEntity("rkisp1_resizer_mainpath");
-  rkisp1_resizer_mainpath->pads(0)->SetSubdevFormat(width, height,
+  rkisp1_resizer_mainpath->pads(0)->SetSubdevFormat(kWidth, kHeight,
                                                     MEDIA_BUS_FMT_YUYV8_2X8);
 
-  rkisp1_resizer_mainpath->pads(0)->SetSubdevCrop(width, height);
-  rkisp1_resizer_mainpath->pads(1)->SetSubdevFormat(width / 2, height / 2,
-                                                    MEDIA_BUS_FMT_YUYV8_2X8);
+  rkisp1_resizer_mainpath->pads(0)->SetSubdevCrop(kWidth, kHeight);
+  rkisp1_resizer_mainpath->pads(1)->SetSubdevFormat(
+      kMainpathWidth, kMainpathHeight, MEDIA_BUS_FMT_YUYV8_2X8);
 
   Entity *rkisp1_mainpath = media_device->FindEntity("rkisp1_mainpath");
-  rkisp1_mainpath->SetFormat(width / 2, height / 2, V4L2_PIX_FMT_YUV422P);
+  rkisp1_mainpath->SetFormat(kMainpathWidth, kMainpathHeight,
+                             V4L2_PIX_FMT_YUYV);
 
   Entity *rkisp1_selfpath = media_device->FindEntity("rkisp1_selfpath");
-  rkisp1_selfpath->SetFormat(width * 2 / 3, height * 2 / 3, V4L2_PIX_FMT_YUYV);
+  rkisp1_selfpath->SetFormat(kSelfpathWidth, kSelfpathHeight,
+                             V4L2_PIX_FMT_YUYV);
 
   media_device->Enable(
-      media_device->FindLink(camera_device_string, 0, "rkisp1_csi", 0));
+      media_device->FindLink(kCameraDeviceString, 0, "rkisp1_csi", 0));
   media_device->Enable(
       media_device->FindLink("rkisp1_csi", 1, "rkisp1_isp", 0));
   media_device->Enable(
@@ -99,16 +112,25 @@
   event_loop.SetRuntimeRealtimePriority(55);
   event_loop.SetRuntimeAffinity(aos::MakeCpusetFromCpus({2}));
 
-  RockchipV4L2Reader v4l2_reader(&event_loop, event_loop.epoll(),
-                                 rkisp1_selfpath->device(), camera->device());
-
+  // Reader for vision processing
+  RockchipV4L2Reader v4l2_reader_selfpath(&event_loop, event_loop.epoll(),
+                                          rkisp1_selfpath->device(),
+                                          camera->device());
   if (FLAGS_lowlight_camera) {
-    v4l2_reader.SetGainExt(100);
-    v4l2_reader.SetVerticalBlanking(1000);
-    v4l2_reader.SetExposure(FLAGS_exposure);
+    v4l2_reader_selfpath.SetGainExt(100);
+    v4l2_reader_selfpath.SetVerticalBlanking(1000);
+    v4l2_reader_selfpath.SetExposure(FLAGS_exposure);
   } else {
-    v4l2_reader.SetGainExt(1000);
-    v4l2_reader.SetExposure(1000);
+    v4l2_reader_selfpath.SetGainExt(1000);
+    v4l2_reader_selfpath.SetExposure(1000);
+  }
+
+  std::unique_ptr<RockchipV4L2Reader> v4l2_reader_mainpath;
+  if (FLAGS_send_downsized_images) {
+    // Reader for driver cam streaming on logger pi, sending lower res images
+    v4l2_reader_mainpath = std::make_unique<RockchipV4L2Reader>(
+        &event_loop, event_loop.epoll(), rkisp1_mainpath->device(),
+        camera->device(), "/camera/downsized");
   }
 
   {
diff --git a/y2023/y2023.json b/y2023/y2023.json
index d5f9462..cdac47c 100644
--- a/y2023/y2023.json
+++ b/y2023/y2023.json
@@ -1,5 +1,5 @@
 {
-  "channel_storage_duration": 10000000000,
+  "channel_storage_duration": 8000000000,
   "maps": [
     {
       "match": {
diff --git a/y2023/y2023_logger.json b/y2023/y2023_logger.json
index db313d2..cdbd8ec 100644
--- a/y2023/y2023_logger.json
+++ b/y2023/y2023_logger.json
@@ -377,6 +377,17 @@
       "num_senders": 1
     },
     {
+      "name": "/logger/camera/downsized",
+      "type": "frc971.vision.CameraImage",
+      "logger": "NOT_LOGGED",
+      "source_node": "logger",
+      "frequency": 40,
+      "max_size": 921744,
+      "num_readers": 4,
+      "read_method": "PIN",
+      "num_senders": 18
+    },
+    {
       "name": "/localizer",
       "type": "frc971.IMUValuesBatch",
       "source_node": "imu",
@@ -438,6 +449,14 @@
       ]
     },
     {
+      "name": "camera_reader",
+      "executable_name": "camera_reader",
+      "args": ["--enable_ftrace", "--send_downsized_images"],
+      "nodes": [
+        "logger"
+      ]
+    },
+    {
       "name": "image_logger",
       "executable_name": "logger_main",
       "autostart": false,
diff --git a/y2023/y2023_pi_template.json b/y2023/y2023_pi_template.json
index cf87798..e641d5d 100644
--- a/y2023/y2023_pi_template.json
+++ b/y2023/y2023_pi_template.json
@@ -175,7 +175,7 @@
       "source_node": "pi{{ NUM }}",
       "frequency": 40,
       "num_senders": 2,
-      "max_size": 40000,
+      "max_size": 1024,
       "logger": "LOCAL_AND_REMOTE_LOGGER",
       "logger_nodes": [
         "imu",