blob: 016bd818200c3c99211e22608fd73598b178ab6b [file] [log] [blame]
Austin Schuh68913442023-10-20 23:16:03 -07001#include <chrono>
2#include <filesystem>
3#include <thread>
4
5#include "glog/logging.h"
6
7#include "Argus/Argus.h"
8#include "Argus/EGLStream.h"
9#include "Argus/Types.h"
10#include "Argus/utils/Error.h"
11#include "EGLStream/FrameConsumer.h"
12#include "EGLStream/Image.h"
13#include "EGLStream/NV/ImageNativeBuffer.h"
14#include "HalideBuffer.h"
15#include "HalideRuntime.h"
16#include "aos/events/shm_event_loop.h"
17#include "aos/init.h"
Austin Schuh71a69c52024-01-01 21:47:17 -080018#include "aos/realtime.h"
Austin Schuh68913442023-10-20 23:16:03 -070019#include "aos/time/time.h"
20#include "aos/util/file.h"
21#include "frc971/orin/ycbcr.h"
22#include "frc971/orin/ycbcr422.h"
23#include "frc971/vision/vision_generated.h"
24#include "nvbufsurface.h"
25
26DEFINE_string(config, "aos_config.json", "Path to the config file to use.");
27
28DEFINE_int32(colorformat, NVBUF_COLOR_FORMAT_NV16,
29 "Mode to use. Don't change unless you know what you are doing.");
30DEFINE_int32(camera, 0, "Camera number");
31DEFINE_int32(mode, 0, "Mode number to use.");
32DEFINE_int32(exposure, 200000, "Exposure number to use.");
33DEFINE_int32(gain, 5, "gain number to use.");
34DEFINE_int32(width, 1456, "Image width");
35DEFINE_int32(height, 1088, "Image height");
36DEFINE_double(rgain, 1.0, "R gain");
37DEFINE_double(g1gain, 1.0, "G gain");
38DEFINE_double(g2gain, 1.0, "G gain");
39DEFINE_double(bgain, 1.0, "B gain");
40DEFINE_string(channel, "/camera", "Channel name for the image.");
41
42namespace frc971 {
43
44namespace chrono = std::chrono;
45
46// Converts a multiplanar 422 image into a single plane 422 image at the
47// provided memory location sutable for putting in a flatbuffer.
48void YCbCr422(NvBufSurface *nvbuf_surf, uint8_t *data_pointer) {
49 CHECK_EQ(nvbuf_surf->surfaceList->planeParams.width[0],
50 nvbuf_surf->surfaceList->planeParams.width[1] * 2);
51 CHECK_EQ(nvbuf_surf->surfaceList->planeParams.height[0],
52 nvbuf_surf->surfaceList->planeParams.height[1]);
53 CHECK_EQ(nvbuf_surf->surfaceList->planeParams.pitch[0], 0x600u);
54 CHECK_EQ(nvbuf_surf->surfaceList->planeParams.pitch[1], 0x600u);
55 std::array<halide_dimension_t, 2> y_dimensions{{
56 {
57 /*.min =*/0,
58 /*.extent =*/
59 static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.width[0]),
60 /*.stride =*/1,
61 /*.flags =*/0,
62 },
63 {
64 /*.min = */ 0,
65 /*.extent =*/
66 static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.height[0]),
67 /*.stride =*/
68 static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.pitch[0]),
69 /*.flags =*/0,
70 },
71 }};
72
73 Halide::Runtime::Buffer<uint8_t, 2> y(
74 reinterpret_cast<uint8_t *>(nvbuf_surf->surfaceList->mappedAddr.addr[0]),
75 y_dimensions.size(), y_dimensions.data());
76
77 std::array<halide_dimension_t, 3> cbcr_dimensions{
78 {{
79 /*.min =*/0,
80 /*.extent =*/
81 static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.width[1]),
82 /*.stride =*/2,
83 /*.flags =*/0,
84 },
85 {
86 /*.min =*/0,
87 /*.extent =*/
88 static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.height[1]),
89 /*.stride =*/
90 static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.pitch[1]),
91 /*.flags =*/0,
92 },
93 {
94 /*.min =*/0,
95 /*.extent =*/2,
96 /*.stride =*/1,
97 /*.flags =*/0,
98 }}};
99
100 Halide::Runtime::Buffer<uint8_t, 3> cbcr(
101 reinterpret_cast<uint8_t *>(nvbuf_surf->surfaceList->mappedAddr.addr[1]),
102 cbcr_dimensions.size(), cbcr_dimensions.data());
103
104 std::array<halide_dimension_t, 3> output_dimensions{
105 {{
106 /*.min =*/0,
107 /*.extent =*/
108 static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.width[0]),
109 /*.stride =*/2,
110 /*.flags =*/0,
111 },
112 {
113 /*.min =*/0,
114 /*.extent =*/
115 static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.height[0]),
116 /*.stride =*/
117 static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.width[0] *
118 2),
119 /*.flags =*/0,
120 },
121 {
122 /*.min =*/0,
123 /*.extent =*/2,
124 /*.stride =*/1,
125 /*.flags =*/0,
126 }}};
127
128 Halide::Runtime::Buffer<uint8_t, 3> output(
129 data_pointer, output_dimensions.size(), output_dimensions.data());
130 ycbcr422(y, cbcr, output);
131}
132
133// Helper class to tie a NvBufSurface to an Argus::Buffer.
134class DmaBuffer {
135 public:
136 // Creates a DmaBuffer. This is a static method so we can make sure it ends
137 // up as a unique_ptr so the pointer value doesn't change and break all the
138 // links.
139 static std::unique_ptr<DmaBuffer> Create(
140 const Argus::Size2D<uint32_t> &size, NvBufSurfaceColorFormat color_format,
141 NvBufSurfaceLayout layout = NVBUF_LAYOUT_PITCH) {
142 std::unique_ptr<DmaBuffer> buffer(new DmaBuffer());
143
144 NvBufSurfaceAllocateParams params;
145
146 params.memtag = NvBufSurfaceTag_CAMERA;
147 params.params.width = size.width();
148 params.params.height = size.height();
149 params.params.colorFormat = color_format;
150 params.params.layout = layout;
151 params.params.isContiguous = true;
152 params.disablePitchPadding = true;
153 params.params.memType = NVBUF_MEM_SURFACE_ARRAY;
154
155 NvBufSurface *nvbuf_surf = 0;
156 CHECK_EQ(NvBufSurfaceAllocate(&nvbuf_surf, 1, &params), 0);
157 buffer->fd_ = nvbuf_surf->surfaceList[0].bufferDesc;
158
159 return buffer;
160 }
161
162 // Extracts the DmaBuffer from the Argus::Buffer.
163 static DmaBuffer *FromArgusBuffer(Argus::Buffer *buffer) {
Austin Schuh71a69c52024-01-01 21:47:17 -0800164 aos::ScopedNotRealtime nrt;
Austin Schuh68913442023-10-20 23:16:03 -0700165 Argus::IBuffer *i_buffer = Argus::interface_cast<Argus::IBuffer>(buffer);
166 const DmaBuffer *dmabuf =
167 static_cast<const DmaBuffer *>(i_buffer->getClientData());
168
169 return const_cast<DmaBuffer *>(dmabuf);
170 }
171
172 // Returns the DMA buffer handle.
173 int fd() const { return fd_; }
174
175 // Sets and gets the Argus::Buffer pointer.
176 void set_argus_buffer(Argus::Buffer *buffer) { buffer_ = buffer; }
177 Argus::Buffer *get_argus_buffer() const { return buffer_; }
178
179 virtual ~DmaBuffer() {
180 if (fd_ >= 0) {
181 NvBufSurface *nvbuf_surf = 0;
182 NvBufSurfaceFromFd(fd_, (void **)(&nvbuf_surf));
183 if (nvbuf_surf != NULL) {
184 NvBufSurfaceDestroy(nvbuf_surf);
185 }
186 }
187 }
188
189 private:
190 // Private to force people to use Create() above.
191 DmaBuffer() {}
192
193 int fd_ = -1;
194 Argus::Buffer *buffer_ = nullptr;
195};
196
Austin Schuh71a69c52024-01-01 21:47:17 -0800197// Class to make it easy to interact with an Argus camera inside an event loop.
198class ArgusCamera {
199 public:
200 ArgusCamera(Argus::ICameraProvider *i_camera_provider,
201 Argus::CameraDevice *camera_device) {
202 std::vector<Argus::SensorMode *> sensor_modes;
203 Argus::ICameraProperties *i_camera_properties =
204 Argus::interface_cast<Argus::ICameraProperties>(camera_device);
205 CHECK(i_camera_properties) << "Failed to get ICameraProperties Interface";
206 // Get available Sensor Modes
207 i_camera_properties->getAllSensorModes(&sensor_modes);
208 LOG(INFO) << "Found " << sensor_modes.size() << " modes";
209
210 for (Argus::SensorMode *mode : sensor_modes) {
211 Argus::ISensorMode *imode =
212 Argus::interface_cast<Argus::ISensorMode>(mode);
213 LOG(INFO) << imode->getResolution().width() << " x "
214 << imode->getResolution().height();
215 LOG(INFO) << "type " << imode->getSensorModeType().getName();
216 LOG(INFO) << "exposure min " << imode->getExposureTimeRange().min();
217 LOG(INFO) << "exposure max " << imode->getExposureTimeRange().max();
218 }
219 CHECK_GT(sensor_modes.size(), 0u);
220
221 Argus::ISensorMode *i_sensor_mode =
222 Argus::interface_cast<Argus::ISensorMode>(sensor_modes[FLAGS_mode]);
223 CHECK(i_sensor_mode);
224
225 {
226 auto range = i_sensor_mode->getFrameDurationRange();
227 LOG(INFO) << "Min: " << range.min() << ", " << range.max();
228 LOG(INFO) << "type " << i_sensor_mode->getSensorModeType().getName();
229 }
230
231 // Create the capture session using the first device and get the core
232 // interface.
233 capture_session_.reset(
234 i_camera_provider->createCaptureSession(camera_device));
235 i_capture_session_ =
236 Argus::interface_cast<Argus::ICaptureSession>(capture_session_);
237 CHECK(i_capture_session_);
238
239 CHECK_NE(egl_display_, EGL_NO_DISPLAY) << ": Failed to open display";
240
241 // Create the OutputStream.
242 stream_settings_.reset(i_capture_session_->createOutputStreamSettings(
243 Argus::STREAM_TYPE_BUFFER));
244
245 Argus::IBufferOutputStreamSettings *i_buffer_output_stream_settings =
246 Argus::interface_cast<Argus::IBufferOutputStreamSettings>(
247 stream_settings_);
248 CHECK(i_buffer_output_stream_settings != nullptr);
249 i_buffer_output_stream_settings->setBufferType(
250 Argus::BUFFER_TYPE_EGL_IMAGE);
251 i_buffer_output_stream_settings->setMetadataEnable(true);
252 LOG(INFO) << "Type: "
253 << i_buffer_output_stream_settings->getBufferType().getName();
254
255 output_stream_.reset(
256 i_capture_session_->createOutputStream(stream_settings_.get()));
257 LOG(INFO) << "Got sream";
258
259 i_buffer_output_stream_ =
260 Argus::interface_cast<Argus::IBufferOutputStream>(output_stream_);
261 CHECK(i_buffer_output_stream_ != nullptr);
262
263 // Build the DmaBuffers
264 for (size_t i = 0; i < native_buffers_.size(); ++i) {
265 native_buffers_[i] = DmaBuffer::Create(
266 i_sensor_mode->getResolution(),
267 static_cast<NvBufSurfaceColorFormat>(FLAGS_colorformat),
268 NVBUF_LAYOUT_PITCH);
269 }
270
271 // Create EGLImages from the native buffers
272 for (size_t i = 0; i < egl_images_.size(); i++) {
273 int ret = 0;
274
275 ret = NvBufSurfaceFromFd(native_buffers_[i]->fd(), (void **)(&surf_[i]));
276 CHECK(ret == 0) << ": NvBufSurfaceFromFd failed";
277
278 ret = NvBufSurfaceMapEglImage(surf_[i], 0);
Tushar Pankaj6d5eab82024-01-13 13:29:04 -0800279 // This check typically fails from having X forwarding enabled.
280 // Always call argus_camera without X forwarding.
Austin Schuh71a69c52024-01-01 21:47:17 -0800281 CHECK(ret == 0) << ": NvBufSurfaceMapEglImage failed";
282
283 egl_images_[i] = surf_[i]->surfaceList[0].mappedAddr.eglImage;
284 CHECK(egl_images_[i] != EGL_NO_IMAGE_KHR)
285 << ": Failed to create EGLImage";
286 }
287
288 // Create the BufferSettings object to configure Buffer creation.
289 buffer_settings_.reset(i_buffer_output_stream_->createBufferSettings());
290 Argus::IEGLImageBufferSettings *i_buffer_settings =
291 Argus::interface_cast<Argus::IEGLImageBufferSettings>(buffer_settings_);
292 CHECK(i_buffer_settings);
293
294 // Create the Buffers for each EGLImage (and release to the stream for
295 // initial capture use)
296 for (size_t i = 0; i < buffers_.size(); i++) {
297 i_buffer_settings->setEGLImage(egl_images_[i]);
298 i_buffer_settings->setEGLDisplay(egl_display_);
299 buffers_[i].reset(
300 i_buffer_output_stream_->createBuffer(buffer_settings_.get()));
301 Argus::IBuffer *i_buffer =
302 Argus::interface_cast<Argus::IBuffer>(buffers_[i]);
303
304 // Ties Argus::Buffer and DmaBuffer together.
305 i_buffer->setClientData(native_buffers_[i].get());
306 native_buffers_[i]->set_argus_buffer(buffers_[i].get());
307
308 CHECK(Argus::interface_cast<Argus::IEGLImageBuffer>(buffers_[i]) !=
309 nullptr)
310 << ": Failed to create Buffer";
311
312 CHECK_EQ(i_buffer_output_stream_->releaseBuffer(buffers_[i].get()),
313 Argus::STATUS_OK)
314 << "Failed to release Buffer for capture use";
315 }
316
317 request_.reset(i_capture_session_->createRequest());
318 Argus::IRequest *i_request =
319 Argus::interface_cast<Argus::IRequest>(request_);
320 CHECK(i_request);
321
322 Argus::IAutoControlSettings *i_auto_control_settings =
323 Argus::interface_cast<Argus::IAutoControlSettings>(
324 i_request->getAutoControlSettings());
325 CHECK(i_auto_control_settings != nullptr);
326 i_auto_control_settings->setAwbMode(Argus::AWB_MODE_OFF);
327
328 i_auto_control_settings->setAeLock(false);
329 Argus::Range<float> isp_digital_gain_range;
330 isp_digital_gain_range.min() = 1;
331 isp_digital_gain_range.max() = 1;
332 i_auto_control_settings->setIspDigitalGainRange(isp_digital_gain_range);
333
334 Argus::IEdgeEnhanceSettings *i_ee_settings =
335 Argus::interface_cast<Argus::IEdgeEnhanceSettings>(request_);
336 CHECK(i_ee_settings != nullptr);
337
338 i_ee_settings->setEdgeEnhanceStrength(0);
339
340 i_request->enableOutputStream(output_stream_.get());
341
342 Argus::ISourceSettings *i_source_settings =
343 Argus::interface_cast<Argus::ISourceSettings>(
344 i_request->getSourceSettings());
345 CHECK(i_source_settings != nullptr);
346
347 i_source_settings->setFrameDurationRange(
348 i_sensor_mode->getFrameDurationRange().min());
349 i_source_settings->setSensorMode(sensor_modes[FLAGS_mode]);
350
351 Argus::Range<float> sensor_mode_analog_gain_range;
352 sensor_mode_analog_gain_range.min() = FLAGS_gain;
353 sensor_mode_analog_gain_range.max() = FLAGS_gain;
354 i_source_settings->setGainRange(sensor_mode_analog_gain_range);
355
356 Argus::Range<uint64_t> limit_exposure_time_range;
357 limit_exposure_time_range.min() = FLAGS_exposure;
358 limit_exposure_time_range.max() = FLAGS_exposure;
359 i_source_settings->setExposureTimeRange(limit_exposure_time_range);
360 }
361
362 void Start() {
363 if (i_capture_session_->repeat(request_.get()) != Argus::STATUS_OK) {
364 LOG(ERROR) << "Failed to submit repeat";
365 }
366
367 LOG(INFO) << "Session submitted";
368 }
369
370 // Class to manage an image buffer and return it when we are done.
371 class MappedBuffer {
372 public:
373 MappedBuffer(Argus::IBufferOutputStream *i_buffer_output_stream,
374 Argus::Buffer *buffer)
375 : i_buffer_output_stream_(i_buffer_output_stream), buffer_(buffer) {
376 if (buffer_ == nullptr) {
377 return;
378 }
379
380 start_time_ = aos::monotonic_clock::now();
381
382 dmabuf_ = DmaBuffer::FromArgusBuffer(buffer_);
383
384 int dmabuf_fd = dmabuf_->fd();
385
386 CHECK_EQ(NvBufSurfaceFromFd(dmabuf_fd, (void **)(&nvbuf_surf_)), 0);
387
388 CHECK_EQ(NvBufSurfaceMap(nvbuf_surf_, -1, -1, NVBUF_MAP_READ), 0);
389 VLOG(1) << "Mapped";
390 NvBufSurfaceSyncForCpu(nvbuf_surf_, -1, -1);
391
392 VLOG(1) << "Planes " << nvbuf_surf_->surfaceList->planeParams.num_planes
393 << " colorFormat " << nvbuf_surf_->surfaceList->colorFormat;
394 for (size_t i = 0; i < nvbuf_surf_->surfaceList->planeParams.num_planes;
395 ++i) {
396 VLOG(1) << "Address "
397 << static_cast<void *>(
398 nvbuf_surf_->surfaceList->mappedAddr.addr[i])
399 << ", pitch " << nvbuf_surf_->surfaceList->planeParams.pitch[i]
400 << " height " << nvbuf_surf_->surfaceList->planeParams.height[i]
401 << " width " << nvbuf_surf_->surfaceList->planeParams.width[i]
402 << " bytes per pixel "
403 << nvbuf_surf_->surfaceList->planeParams.bytesPerPix[i];
404 }
405 CHECK_EQ(nvbuf_surf_->surfaceList->planeParams.width[0],
406 static_cast<size_t>(FLAGS_width));
407 CHECK_EQ(nvbuf_surf_->surfaceList->planeParams.height[0],
408 static_cast<size_t>(FLAGS_height));
409 }
410 MappedBuffer(const MappedBuffer &other) = delete;
411 MappedBuffer &operator=(const MappedBuffer &other) = delete;
412 MappedBuffer(MappedBuffer &&other) noexcept {
413 buffer_ = other.buffer_;
414 dmabuf_ = other.dmabuf_;
415 nvbuf_surf_ = other.nvbuf_surf_;
416 i_buffer_output_stream_ = other.i_buffer_output_stream_;
417 start_time_ = other.start_time_;
418 other.buffer_ = nullptr;
419 other.dmabuf_ = nullptr;
420 other.nvbuf_surf_ = nullptr;
421 }
422
423 NvBufSurface *nvbuf_surf() { return nvbuf_surf_; }
424
425 const Argus::ICaptureMetadata *imetadata() {
426 Argus::IBuffer *ibuffer = Argus::interface_cast<Argus::IBuffer>(buffer_);
427 CHECK(ibuffer != nullptr);
428
429 aos::ScopedNotRealtime nrt;
430 const Argus::CaptureMetadata *metadata = ibuffer->getMetadata();
431 const Argus::ICaptureMetadata *imetadata =
432 Argus::interface_cast<const Argus::ICaptureMetadata>(metadata);
433 CHECK(imetadata);
434 return imetadata;
435 }
436
437 aos::monotonic_clock::time_point start_time() const { return start_time_; }
438
439 virtual ~MappedBuffer() {
440 if (buffer_ != nullptr) {
441 CHECK_EQ(NvBufSurfaceUnMap(nvbuf_surf_, -1, -1), 0);
442 aos::ScopedNotRealtime nrt;
443 i_buffer_output_stream_->releaseBuffer(buffer_);
444 }
445 }
446
447 private:
448 Argus::IBufferOutputStream *i_buffer_output_stream_;
449
450 Argus::Buffer *buffer_;
451
452 DmaBuffer *dmabuf_ = nullptr;
453
454 NvBufSurface *nvbuf_surf_ = nullptr;
455
456 aos::monotonic_clock::time_point start_time_;
457 };
458
459 MappedBuffer NextImageBlocking() {
460 VLOG(1) << "Going for frame";
461
462 Argus::Buffer *buffer;
463 {
464 Argus::Status status;
465 aos::ScopedNotRealtime nrt;
466
467 buffer = i_buffer_output_stream_->acquireBuffer(
468 std::chrono::nanoseconds(std::chrono::seconds(5)).count(), &status);
469
470 if (status == Argus::STATUS_END_OF_STREAM) {
471 return MappedBuffer(nullptr, nullptr);
472 }
473 }
474
475 // const aos::monotonic_clock::time_point now = aos::monotonic_clock::now();
476 return MappedBuffer(i_buffer_output_stream_, buffer);
477 }
478
479 void Stop() {
480 i_capture_session_->stopRepeat();
481 i_buffer_output_stream_->endOfStream();
482 i_capture_session_->waitForIdle();
483 }
484
485 virtual ~ArgusCamera() {
486 output_stream_.reset();
487
488 for (uint32_t i = 0; i < surf_.size(); i++) {
489 NvBufSurfaceUnMapEglImage(surf_[i], 0);
490 }
491 eglTerminate(egl_display_);
492 }
493
494 private:
495 Argus::UniqueObj<Argus::CaptureSession> capture_session_;
496 Argus::ICaptureSession *i_capture_session_;
497
498 EGLDisplay egl_display_ = eglGetDisplay(EGL_DEFAULT_DISPLAY);
499
500 Argus::UniqueObj<Argus::OutputStreamSettings> stream_settings_;
501
502 Argus::UniqueObj<Argus::OutputStream> output_stream_;
503 Argus::IBufferOutputStream *i_buffer_output_stream_;
504
505 std::array<std::unique_ptr<DmaBuffer>, 10> native_buffers_;
506
507 std::array<NvBufSurface *, 10> surf_;
508
509 std::array<EGLImageKHR, 10> egl_images_;
510
511 Argus::UniqueObj<Argus::BufferSettings> buffer_settings_;
512
513 std::array<Argus::UniqueObj<Argus::Buffer>, 10> buffers_;
514
515 Argus::UniqueObj<Argus::Request> request_;
516};
517
Austin Schuh68913442023-10-20 23:16:03 -0700518int Main() {
519 aos::FlatbufferDetachedBuffer<aos::Configuration> config =
520 aos::configuration::ReadConfig(FLAGS_config);
521
522 aos::ShmEventLoop event_loop(&config.message());
523
524 event_loop.SetRuntimeRealtimePriority(55);
525
526 aos::Sender<frc971::vision::CameraImage> sender =
527 event_loop.MakeSender<frc971::vision::CameraImage>(FLAGS_channel);
528
529 LOG(INFO) << "Started";
530 // Initialize the Argus camera provider.
531 Argus::UniqueObj<Argus::CameraProvider> camera_provider;
532 camera_provider =
533 Argus::UniqueObj<Argus::CameraProvider>(Argus::CameraProvider::create());
534
535 // Get the ICameraProvider interface from the global CameraProvider
536 Argus::ICameraProvider *i_camera_provider =
537 Argus::interface_cast<Argus::ICameraProvider>(camera_provider);
538 if (!i_camera_provider) {
539 ORIGINATE_ERROR("Failed to get ICameraProvider interface");
540 }
541
542 // Get the camera devices.
543 std::vector<Argus::CameraDevice *> camera_devices;
544 i_camera_provider->getCameraDevices(&camera_devices);
545 if (camera_devices.size() == 0) {
546 ORIGINATE_ERROR("there are %d cameras", (unsigned)camera_devices.size());
547 }
548
549 LOG(INFO) << "Found " << camera_devices.size() << " cameras";
550 for (Argus::CameraDevice *camera : camera_devices) {
Austin Schuh71a69c52024-01-01 21:47:17 -0800551 Argus::ICameraProperties *i_camera_properties =
Austin Schuh68913442023-10-20 23:16:03 -0700552 Argus::interface_cast<Argus::ICameraProperties>(camera);
Austin Schuh71a69c52024-01-01 21:47:17 -0800553 LOG(INFO) << "Camera " << i_camera_properties->getModelName();
Austin Schuh68913442023-10-20 23:16:03 -0700554 }
555
556 {
Austin Schuh71a69c52024-01-01 21:47:17 -0800557 ArgusCamera camera(i_camera_provider, camera_devices[FLAGS_camera]);
558
559 aos::monotonic_clock::time_point last_time = aos::monotonic_clock::epoch();
560
561 aos::TimerHandler *timer = event_loop.AddTimer([&camera, &event_loop,
562 &sender, &last_time,
563 &timer]() {
564 ArgusCamera::MappedBuffer buffer = camera.NextImageBlocking();
565
566 if (buffer.nvbuf_surf() == nullptr) {
567 // TODO(austin): Control-C isn't working for some reason, debug it...
568 event_loop.Exit();
569 return;
570 }
571
572 const Argus::ICaptureMetadata *imetadata = buffer.imetadata();
573
574 aos::Sender<frc971::vision::CameraImage>::Builder builder =
575 sender.MakeBuilder();
576
577 uint8_t *data_pointer = nullptr;
578 builder.fbb()->StartIndeterminateVector(FLAGS_width * FLAGS_height * 2, 1,
579 64, &data_pointer);
580
581 YCbCr422(buffer.nvbuf_surf(), data_pointer);
582 flatbuffers::Offset<flatbuffers::Vector<uint8_t>> data_offset =
583 builder.fbb()->EndIndeterminateVector(FLAGS_width * FLAGS_height * 2,
584 1);
585
586 auto image_builder = builder.MakeBuilder<frc971::vision::CameraImage>();
587 image_builder.add_data(data_offset);
588 image_builder.add_rows(FLAGS_height);
589 image_builder.add_cols(FLAGS_width);
590 {
591 aos::ScopedNotRealtime nrt;
592 image_builder.add_monotonic_timestamp_ns(
593 imetadata->getSensorTimestamp());
594 }
595 builder.CheckOk(builder.Send(image_builder.Finish()));
596
597 const aos::monotonic_clock::time_point after_send =
598 aos::monotonic_clock::now();
599
600 VLOG(1)
601 << "Got " << imetadata->getCaptureId() << " delay "
602 << chrono::duration<double>(
603 chrono::nanoseconds(
604 (buffer.start_time().time_since_epoch().count() -
605 (imetadata->getSensorTimestamp() +
606 imetadata->getFrameReadoutTime()))))
607 .count()
608 << " mmap "
609 << chrono::duration<double>(after_send - buffer.start_time()).count()
610 << "sec dt "
611 << chrono::duration<double>(buffer.start_time() - last_time).count()
612 << "sec, exposure " << imetadata->getSensorExposureTime();
613
614 last_time = buffer.start_time();
615 timer->Schedule(event_loop.monotonic_now());
616 });
617
618 event_loop.OnRun([&event_loop, timer]() {
619 timer->Schedule(event_loop.monotonic_now());
620 });
621
622 camera.Start();
623
624 event_loop.Run();
625 LOG(INFO) << "Event loop shutting down";
626
627 camera.Stop();
Austin Schuh68913442023-10-20 23:16:03 -0700628 }
629
Austin Schuh68913442023-10-20 23:16:03 -0700630 return 0;
631}
632
633}; // namespace frc971
634
635int main(int argc, char **argv) {
636 aos::InitGoogle(&argc, &argv);
637 return frc971::Main();
638}
639
640// I tried every different format option. Here's what worked and didn't work.
641//
642// NVBUF_COLOR_FORMAT_RGB,
643
644// NVBUF_COLOR_FORMAT_YUYV, // Failed
645// NVBUF_COLOR_FORMAT_NV24, // Works
646// NVBUF_COLOR_FORMAT_UYVY, // Failed
647// NVBUF_COLOR_FORMAT_YUV420, // Failed with error.
648
649// NVBUF_COLOR_FORMAT_GRAY8, // unsupported
650// NVBUF_COLOR_FORMAT_YUV420, // unsupported
651// NVBUF_COLOR_FORMAT_YVU420, // unsupported
652
653// NVBUF_COLOR_FORMAT_YUV420_ER, // unsupported
654// NVBUF_COLOR_FORMAT_YVU420_ER, // unsupported
655//
656///** Specifies BT.601 colorspace - Y/CbCr 4:2:0 multi-planar. */
657// NVBUF_COLOR_FORMAT_NV12, // Works! pitch 2048 height 1080 width
658// 1920 colorFormat 6 planes 2 bytes per pixel 1 delay 0.00203304
659// mmap 0.000340288sec dt 0.0166379sec
660//
661///** Specifies BT.601 colorspace - Y/CbCr ER 4:2:0 multi-planar. */
662// NVBUF_COLOR_FORMAT_NV12_ER, // Works! pitch 2048 height 1080
663// width 1920 colorFormat 7 planes 2 bytes per pixel 1
664///** Specifies BT.601 colorspace - Y/CbCr 4:2:0 multi-planar. */
665// NVBUF_COLOR_FORMAT_NV21, // Works! pitch 2048 height 1080 width
666// 1920 colorFormat 8 planes 2 bytes per pixel 1
667///** Specifies BT.601 colorspace - Y/CbCr ER 4:2:0 multi-planar. */
668// NVBUF_COLOR_FORMAT_NV21_ER, // Works! pitch 2048 height 1080
669// width 1920 colorFormat 9 planes 2 bytes per pixel 1
670//
671//
672// NVBUF_COLOR_FORMAT_UYVY, // works with an error?!?
673// NVBUF_COLOR_FORMAT_UYVY_ER, // unsupported 11
674// NVBUF_COLOR_FORMAT_VYUY, // unsupported 12
675// NVBUF_COLOR_FORMAT_VYUY_ER, // unsupported 13
676// NVBUF_COLOR_FORMAT_YUYV, // unsupported 14
677// NVBUF_COLOR_FORMAT_YUYV_ER, // unsupported 15
678// NVBUF_COLOR_FORMAT_YVYU, // unsupported 16
679// NVBUF_COLOR_FORMAT_YVYU_ER, // unsupported 17
680// NVBUF_COLOR_FORMAT_YUV444, // unsupported 18
681// NVBUF_COLOR_FORMAT_RGBA, // unsupported 19
682// NVBUF_COLOR_FORMAT_BGRA, // unsupported 20
683// NVBUF_COLOR_FORMAT_ARGB, // unsupported 21
684// NVBUF_COLOR_FORMAT_ABGR, // unsupported 22
685// NVBUF_COLOR_FORMAT_RGBx, // unsupported 23
686// NVBUF_COLOR_FORMAT_BGRx, // unsupported 24
687// NVBUF_COLOR_FORMAT_xRGB, // unsupported 25
688// NVBUF_COLOR_FORMAT_xBGR, // unsupported 26
689// NVBUF_COLOR_FORMAT_RGB, // unsupported 27
690// NVBUF_COLOR_FORMAT_BGR, // unsupported 28
691// NVBUF_COLOR_FORMAT_NV12_10LE, // unsupported 29
692// NVBUF_COLOR_FORMAT_NV12_12LE, // unsupported 30
693// NVBUF_COLOR_FORMAT_YUV420_709, // unsupported 31
694// NVBUF_COLOR_FORMAT_YUV420_709_ER, // unsupported 32
695// NVBUF_COLOR_FORMAT_NV12_709, // works pitch 2048 height 1080
696// width 1920 colorFormat 33 planes 2 bytes per pixel 1
697// NVBUF_COLOR_FORMAT_NV12_709_ER, // works pitch 2048 height 1080
698// width 1920 colorFormat 34 planes 2 bytes per pixel 1
699// NVBUF_COLOR_FORMAT_YUV420_2020, // unsupported 35
700// NVBUF_COLOR_FORMAT_NV12_2020, // unsupported 36
701// NVBUF_COLOR_FORMAT_NV12_10LE_ER, // unsupported 37
702// NVBUF_COLOR_FORMAT_NV12_10LE_709, // unsupported 38
703// NVBUF_COLOR_FORMAT_NV12_10LE_709_ER, // unsupported 39
704// NVBUF_COLOR_FORMAT_NV12_10LE_2020, // unsupported 40
705// NVBUF_COLOR_FORMAT_SIGNED_R16G16, // unsupported 41
706// NVBUF_COLOR_FORMAT_R8_G8_B8, // unsupported 42
707// NVBUF_COLOR_FORMAT_B8_G8_R8, // unsupported 43
708// NVBUF_COLOR_FORMAT_R32F_G32F_B32F, // unsupported 44
709// NVBUF_COLOR_FORMAT_B32F_G32F_R32F, // unsupported 45
710// NVBUF_COLOR_FORMAT_YUV422, // unsupported 46
711// NVBUF_COLOR_FORMAT_NV21_10LE, // unsupported 47
712// NVBUF_COLOR_FORMAT_NV21_12LE, // unsupported 48
713// NVBUF_COLOR_FORMAT_NV12_12LE_2020, // unsupported 49
714///** Specifies BT.601 colorspace - Y/CbCr 4:2:2 multi-planar. */
715// NVBUF_COLOR_FORMAT_NV16, // works pitch 2048 height 1080 width
716// 1920 colorFormat 50 planes 2 bytes per pixel 1
717// NVBUF_COLOR_FORMAT_NV16_10LE, // unsupported 51
718///** Specifies BT.601 colorspace - Y/CbCr 4:4:4 multi-planar. */
719// NVBUF_COLOR_FORMAT_NV24, // works pitch 2048 height 1080
720// width 1920 colorFormat 52 planes 2 bytes per pixel 1
721// NVBUF_COLOR_FORMAT_NV24_10LE, // unsupported 53
722//
723// NVBUF_COLOR_FORMAT_NV16_ER, // works pitch 2048 height 1080
724// width 1920 colorFormat 54 planes 2 bytes per pixel 1
725// NVBUF_COLOR_FORMAT_NV24_ER, // works pitch 2048 height 1080
726// width 1920 colorFormat 55 planes 2 bytes per pixel 1
727// NVBUF_COLOR_FORMAT_NV16_709, // unsupported 56
728// NVBUF_COLOR_FORMAT_NV24_709, // unsupported 57
729// NVBUF_COLOR_FORMAT_NV16_709_ER, // unsupported 58
730// NVBUF_COLOR_FORMAT_NV24_709_ER, // unsupported 59
731// NVBUF_COLOR_FORMAT_NV24_10LE_709, // unsupported 60
732// NVBUF_COLOR_FORMAT_NV24_10LE_709_ER, // unsupported 61
733// NVBUF_COLOR_FORMAT_NV24_10LE_2020, // unsupported 62
734// NVBUF_COLOR_FORMAT_NV24_12LE_2020, // unsupported 63
735// NVBUF_COLOR_FORMAT_RGBA_10_10_10_2_709, // unsupported 64
736// NVBUF_COLOR_FORMAT_RGBA_10_10_10_2_2020, // unsupported 65
737// NVBUF_COLOR_FORMAT_BGRA_10_10_10_2_709, // unsupported 66
738// NVBUF_COLOR_FORMAT_BGRA_10_10_10_2_2020, // unsupported 67
739// NVBUF_COLOR_FORMAT_A32, // unsupported 68
740// NVBUF_COLOR_FORMAT_UYVP, // unsupported 69
741// NVBUF_COLOR_FORMAT_UYVP_ER // unsupported 70
742
743// NVBUF_COLOR_FORMAT_ABGR,
744// NVBUF_COLOR_FORMAT_ARGB,