blob: 42686a18b357eaafb27e99a376e37674657b6eeb [file] [log] [blame]
Austin Schuh5db29f22024-03-16 17:00:31 -07001#include <dirent.h>
2
Austin Schuh68913442023-10-20 23:16:03 -07003#include <chrono>
4#include <filesystem>
5#include <thread>
6
7#include "glog/logging.h"
8
9#include "Argus/Argus.h"
10#include "Argus/EGLStream.h"
11#include "Argus/Types.h"
12#include "Argus/utils/Error.h"
13#include "EGLStream/FrameConsumer.h"
14#include "EGLStream/Image.h"
15#include "EGLStream/NV/ImageNativeBuffer.h"
16#include "HalideBuffer.h"
17#include "HalideRuntime.h"
18#include "aos/events/shm_event_loop.h"
19#include "aos/init.h"
Austin Schuh71a69c52024-01-01 21:47:17 -080020#include "aos/realtime.h"
Austin Schuh68913442023-10-20 23:16:03 -070021#include "aos/time/time.h"
22#include "aos/util/file.h"
23#include "frc971/orin/ycbcr.h"
24#include "frc971/orin/ycbcr422.h"
25#include "frc971/vision/vision_generated.h"
26#include "nvbufsurface.h"
27
28DEFINE_string(config, "aos_config.json", "Path to the config file to use.");
29
30DEFINE_int32(colorformat, NVBUF_COLOR_FORMAT_NV16,
31 "Mode to use. Don't change unless you know what you are doing.");
32DEFINE_int32(camera, 0, "Camera number");
33DEFINE_int32(mode, 0, "Mode number to use.");
34DEFINE_int32(exposure, 200000, "Exposure number to use.");
35DEFINE_int32(gain, 5, "gain number to use.");
36DEFINE_int32(width, 1456, "Image width");
37DEFINE_int32(height, 1088, "Image height");
38DEFINE_double(rgain, 1.0, "R gain");
39DEFINE_double(g1gain, 1.0, "G gain");
40DEFINE_double(g2gain, 1.0, "G gain");
41DEFINE_double(bgain, 1.0, "B gain");
42DEFINE_string(channel, "/camera", "Channel name for the image.");
43
44namespace frc971 {
45
46namespace chrono = std::chrono;
47
48// Converts a multiplanar 422 image into a single plane 422 image at the
49// provided memory location sutable for putting in a flatbuffer.
50void YCbCr422(NvBufSurface *nvbuf_surf, uint8_t *data_pointer) {
51 CHECK_EQ(nvbuf_surf->surfaceList->planeParams.width[0],
52 nvbuf_surf->surfaceList->planeParams.width[1] * 2);
53 CHECK_EQ(nvbuf_surf->surfaceList->planeParams.height[0],
54 nvbuf_surf->surfaceList->planeParams.height[1]);
55 CHECK_EQ(nvbuf_surf->surfaceList->planeParams.pitch[0], 0x600u);
56 CHECK_EQ(nvbuf_surf->surfaceList->planeParams.pitch[1], 0x600u);
57 std::array<halide_dimension_t, 2> y_dimensions{{
58 {
59 /*.min =*/0,
60 /*.extent =*/
61 static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.width[0]),
62 /*.stride =*/1,
63 /*.flags =*/0,
64 },
65 {
66 /*.min = */ 0,
67 /*.extent =*/
68 static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.height[0]),
69 /*.stride =*/
70 static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.pitch[0]),
71 /*.flags =*/0,
72 },
73 }};
74
75 Halide::Runtime::Buffer<uint8_t, 2> y(
76 reinterpret_cast<uint8_t *>(nvbuf_surf->surfaceList->mappedAddr.addr[0]),
77 y_dimensions.size(), y_dimensions.data());
78
79 std::array<halide_dimension_t, 3> cbcr_dimensions{
80 {{
81 /*.min =*/0,
82 /*.extent =*/
83 static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.width[1]),
84 /*.stride =*/2,
85 /*.flags =*/0,
86 },
87 {
88 /*.min =*/0,
89 /*.extent =*/
90 static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.height[1]),
91 /*.stride =*/
92 static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.pitch[1]),
93 /*.flags =*/0,
94 },
95 {
96 /*.min =*/0,
97 /*.extent =*/2,
98 /*.stride =*/1,
99 /*.flags =*/0,
100 }}};
101
102 Halide::Runtime::Buffer<uint8_t, 3> cbcr(
103 reinterpret_cast<uint8_t *>(nvbuf_surf->surfaceList->mappedAddr.addr[1]),
104 cbcr_dimensions.size(), cbcr_dimensions.data());
105
106 std::array<halide_dimension_t, 3> output_dimensions{
107 {{
108 /*.min =*/0,
109 /*.extent =*/
110 static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.width[0]),
111 /*.stride =*/2,
112 /*.flags =*/0,
113 },
114 {
115 /*.min =*/0,
116 /*.extent =*/
117 static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.height[0]),
118 /*.stride =*/
119 static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.width[0] *
120 2),
121 /*.flags =*/0,
122 },
123 {
124 /*.min =*/0,
125 /*.extent =*/2,
126 /*.stride =*/1,
127 /*.flags =*/0,
128 }}};
129
130 Halide::Runtime::Buffer<uint8_t, 3> output(
131 data_pointer, output_dimensions.size(), output_dimensions.data());
132 ycbcr422(y, cbcr, output);
133}
134
135// Helper class to tie a NvBufSurface to an Argus::Buffer.
136class DmaBuffer {
137 public:
138 // Creates a DmaBuffer. This is a static method so we can make sure it ends
139 // up as a unique_ptr so the pointer value doesn't change and break all the
140 // links.
141 static std::unique_ptr<DmaBuffer> Create(
142 const Argus::Size2D<uint32_t> &size, NvBufSurfaceColorFormat color_format,
143 NvBufSurfaceLayout layout = NVBUF_LAYOUT_PITCH) {
144 std::unique_ptr<DmaBuffer> buffer(new DmaBuffer());
145
146 NvBufSurfaceAllocateParams params;
147
148 params.memtag = NvBufSurfaceTag_CAMERA;
149 params.params.width = size.width();
150 params.params.height = size.height();
151 params.params.colorFormat = color_format;
152 params.params.layout = layout;
153 params.params.isContiguous = true;
154 params.disablePitchPadding = true;
155 params.params.memType = NVBUF_MEM_SURFACE_ARRAY;
156
157 NvBufSurface *nvbuf_surf = 0;
158 CHECK_EQ(NvBufSurfaceAllocate(&nvbuf_surf, 1, &params), 0);
159 buffer->fd_ = nvbuf_surf->surfaceList[0].bufferDesc;
160
161 return buffer;
162 }
163
164 // Extracts the DmaBuffer from the Argus::Buffer.
165 static DmaBuffer *FromArgusBuffer(Argus::Buffer *buffer) {
Austin Schuh71a69c52024-01-01 21:47:17 -0800166 aos::ScopedNotRealtime nrt;
Austin Schuh68913442023-10-20 23:16:03 -0700167 Argus::IBuffer *i_buffer = Argus::interface_cast<Argus::IBuffer>(buffer);
168 const DmaBuffer *dmabuf =
169 static_cast<const DmaBuffer *>(i_buffer->getClientData());
170
171 return const_cast<DmaBuffer *>(dmabuf);
172 }
173
174 // Returns the DMA buffer handle.
175 int fd() const { return fd_; }
176
177 // Sets and gets the Argus::Buffer pointer.
178 void set_argus_buffer(Argus::Buffer *buffer) { buffer_ = buffer; }
179 Argus::Buffer *get_argus_buffer() const { return buffer_; }
180
181 virtual ~DmaBuffer() {
182 if (fd_ >= 0) {
183 NvBufSurface *nvbuf_surf = 0;
184 NvBufSurfaceFromFd(fd_, (void **)(&nvbuf_surf));
185 if (nvbuf_surf != NULL) {
186 NvBufSurfaceDestroy(nvbuf_surf);
187 }
188 }
189 }
190
191 private:
192 // Private to force people to use Create() above.
193 DmaBuffer() {}
194
195 int fd_ = -1;
196 Argus::Buffer *buffer_ = nullptr;
197};
198
Austin Schuh71a69c52024-01-01 21:47:17 -0800199// Class to make it easy to interact with an Argus camera inside an event loop.
200class ArgusCamera {
201 public:
202 ArgusCamera(Argus::ICameraProvider *i_camera_provider,
203 Argus::CameraDevice *camera_device) {
204 std::vector<Argus::SensorMode *> sensor_modes;
205 Argus::ICameraProperties *i_camera_properties =
206 Argus::interface_cast<Argus::ICameraProperties>(camera_device);
207 CHECK(i_camera_properties) << "Failed to get ICameraProperties Interface";
208 // Get available Sensor Modes
209 i_camera_properties->getAllSensorModes(&sensor_modes);
210 LOG(INFO) << "Found " << sensor_modes.size() << " modes";
211
212 for (Argus::SensorMode *mode : sensor_modes) {
213 Argus::ISensorMode *imode =
214 Argus::interface_cast<Argus::ISensorMode>(mode);
215 LOG(INFO) << imode->getResolution().width() << " x "
216 << imode->getResolution().height();
217 LOG(INFO) << "type " << imode->getSensorModeType().getName();
218 LOG(INFO) << "exposure min " << imode->getExposureTimeRange().min();
219 LOG(INFO) << "exposure max " << imode->getExposureTimeRange().max();
220 }
221 CHECK_GT(sensor_modes.size(), 0u);
222
223 Argus::ISensorMode *i_sensor_mode =
224 Argus::interface_cast<Argus::ISensorMode>(sensor_modes[FLAGS_mode]);
225 CHECK(i_sensor_mode);
226
227 {
228 auto range = i_sensor_mode->getFrameDurationRange();
229 LOG(INFO) << "Min: " << range.min() << ", " << range.max();
230 LOG(INFO) << "type " << i_sensor_mode->getSensorModeType().getName();
231 }
232
233 // Create the capture session using the first device and get the core
234 // interface.
235 capture_session_.reset(
236 i_camera_provider->createCaptureSession(camera_device));
237 i_capture_session_ =
238 Argus::interface_cast<Argus::ICaptureSession>(capture_session_);
239 CHECK(i_capture_session_);
240
241 CHECK_NE(egl_display_, EGL_NO_DISPLAY) << ": Failed to open display";
242
243 // Create the OutputStream.
244 stream_settings_.reset(i_capture_session_->createOutputStreamSettings(
245 Argus::STREAM_TYPE_BUFFER));
246
247 Argus::IBufferOutputStreamSettings *i_buffer_output_stream_settings =
248 Argus::interface_cast<Argus::IBufferOutputStreamSettings>(
249 stream_settings_);
250 CHECK(i_buffer_output_stream_settings != nullptr);
251 i_buffer_output_stream_settings->setBufferType(
252 Argus::BUFFER_TYPE_EGL_IMAGE);
253 i_buffer_output_stream_settings->setMetadataEnable(true);
254 LOG(INFO) << "Type: "
255 << i_buffer_output_stream_settings->getBufferType().getName();
256
257 output_stream_.reset(
258 i_capture_session_->createOutputStream(stream_settings_.get()));
Jim Ostrowski855b7442024-01-20 18:03:09 -0800259 LOG(INFO) << "Got image stream";
Austin Schuh71a69c52024-01-01 21:47:17 -0800260
261 i_buffer_output_stream_ =
262 Argus::interface_cast<Argus::IBufferOutputStream>(output_stream_);
263 CHECK(i_buffer_output_stream_ != nullptr);
264
265 // Build the DmaBuffers
266 for (size_t i = 0; i < native_buffers_.size(); ++i) {
267 native_buffers_[i] = DmaBuffer::Create(
268 i_sensor_mode->getResolution(),
269 static_cast<NvBufSurfaceColorFormat>(FLAGS_colorformat),
270 NVBUF_LAYOUT_PITCH);
271 }
272
273 // Create EGLImages from the native buffers
274 for (size_t i = 0; i < egl_images_.size(); i++) {
275 int ret = 0;
276
277 ret = NvBufSurfaceFromFd(native_buffers_[i]->fd(), (void **)(&surf_[i]));
278 CHECK(ret == 0) << ": NvBufSurfaceFromFd failed";
279
280 ret = NvBufSurfaceMapEglImage(surf_[i], 0);
Tushar Pankaj6d5eab82024-01-13 13:29:04 -0800281 // This check typically fails from having X forwarding enabled.
282 // Always call argus_camera without X forwarding.
Jim Ostrowskicb8b4082024-01-21 02:23:46 -0800283 CHECK(ret == 0) << ": NvBufSurfaceMapEglImage failed. Make sure X "
284 "forwarding is not enabled.";
Austin Schuh71a69c52024-01-01 21:47:17 -0800285
286 egl_images_[i] = surf_[i]->surfaceList[0].mappedAddr.eglImage;
287 CHECK(egl_images_[i] != EGL_NO_IMAGE_KHR)
288 << ": Failed to create EGLImage";
289 }
290
291 // Create the BufferSettings object to configure Buffer creation.
292 buffer_settings_.reset(i_buffer_output_stream_->createBufferSettings());
293 Argus::IEGLImageBufferSettings *i_buffer_settings =
294 Argus::interface_cast<Argus::IEGLImageBufferSettings>(buffer_settings_);
295 CHECK(i_buffer_settings);
296
297 // Create the Buffers for each EGLImage (and release to the stream for
298 // initial capture use)
299 for (size_t i = 0; i < buffers_.size(); i++) {
300 i_buffer_settings->setEGLImage(egl_images_[i]);
301 i_buffer_settings->setEGLDisplay(egl_display_);
302 buffers_[i].reset(
303 i_buffer_output_stream_->createBuffer(buffer_settings_.get()));
304 Argus::IBuffer *i_buffer =
305 Argus::interface_cast<Argus::IBuffer>(buffers_[i]);
306
307 // Ties Argus::Buffer and DmaBuffer together.
308 i_buffer->setClientData(native_buffers_[i].get());
309 native_buffers_[i]->set_argus_buffer(buffers_[i].get());
310
311 CHECK(Argus::interface_cast<Argus::IEGLImageBuffer>(buffers_[i]) !=
312 nullptr)
313 << ": Failed to create Buffer";
314
315 CHECK_EQ(i_buffer_output_stream_->releaseBuffer(buffers_[i].get()),
316 Argus::STATUS_OK)
317 << "Failed to release Buffer for capture use";
318 }
319
320 request_.reset(i_capture_session_->createRequest());
321 Argus::IRequest *i_request =
322 Argus::interface_cast<Argus::IRequest>(request_);
323 CHECK(i_request);
324
325 Argus::IAutoControlSettings *i_auto_control_settings =
326 Argus::interface_cast<Argus::IAutoControlSettings>(
327 i_request->getAutoControlSettings());
328 CHECK(i_auto_control_settings != nullptr);
329 i_auto_control_settings->setAwbMode(Argus::AWB_MODE_OFF);
330
331 i_auto_control_settings->setAeLock(false);
332 Argus::Range<float> isp_digital_gain_range;
333 isp_digital_gain_range.min() = 1;
334 isp_digital_gain_range.max() = 1;
335 i_auto_control_settings->setIspDigitalGainRange(isp_digital_gain_range);
336
337 Argus::IEdgeEnhanceSettings *i_ee_settings =
338 Argus::interface_cast<Argus::IEdgeEnhanceSettings>(request_);
339 CHECK(i_ee_settings != nullptr);
340
341 i_ee_settings->setEdgeEnhanceStrength(0);
342
343 i_request->enableOutputStream(output_stream_.get());
344
345 Argus::ISourceSettings *i_source_settings =
346 Argus::interface_cast<Argus::ISourceSettings>(
347 i_request->getSourceSettings());
348 CHECK(i_source_settings != nullptr);
349
350 i_source_settings->setFrameDurationRange(
351 i_sensor_mode->getFrameDurationRange().min());
352 i_source_settings->setSensorMode(sensor_modes[FLAGS_mode]);
353
354 Argus::Range<float> sensor_mode_analog_gain_range;
355 sensor_mode_analog_gain_range.min() = FLAGS_gain;
356 sensor_mode_analog_gain_range.max() = FLAGS_gain;
357 i_source_settings->setGainRange(sensor_mode_analog_gain_range);
358
359 Argus::Range<uint64_t> limit_exposure_time_range;
360 limit_exposure_time_range.min() = FLAGS_exposure;
361 limit_exposure_time_range.max() = FLAGS_exposure;
362 i_source_settings->setExposureTimeRange(limit_exposure_time_range);
363 }
364
365 void Start() {
366 if (i_capture_session_->repeat(request_.get()) != Argus::STATUS_OK) {
367 LOG(ERROR) << "Failed to submit repeat";
368 }
369
370 LOG(INFO) << "Session submitted";
371 }
372
373 // Class to manage an image buffer and return it when we are done.
374 class MappedBuffer {
375 public:
376 MappedBuffer(Argus::IBufferOutputStream *i_buffer_output_stream,
377 Argus::Buffer *buffer)
378 : i_buffer_output_stream_(i_buffer_output_stream), buffer_(buffer) {
379 if (buffer_ == nullptr) {
380 return;
381 }
382
383 start_time_ = aos::monotonic_clock::now();
384
385 dmabuf_ = DmaBuffer::FromArgusBuffer(buffer_);
386
387 int dmabuf_fd = dmabuf_->fd();
388
389 CHECK_EQ(NvBufSurfaceFromFd(dmabuf_fd, (void **)(&nvbuf_surf_)), 0);
390
391 CHECK_EQ(NvBufSurfaceMap(nvbuf_surf_, -1, -1, NVBUF_MAP_READ), 0);
392 VLOG(1) << "Mapped";
393 NvBufSurfaceSyncForCpu(nvbuf_surf_, -1, -1);
394
395 VLOG(1) << "Planes " << nvbuf_surf_->surfaceList->planeParams.num_planes
396 << " colorFormat " << nvbuf_surf_->surfaceList->colorFormat;
397 for (size_t i = 0; i < nvbuf_surf_->surfaceList->planeParams.num_planes;
398 ++i) {
399 VLOG(1) << "Address "
400 << static_cast<void *>(
401 nvbuf_surf_->surfaceList->mappedAddr.addr[i])
402 << ", pitch " << nvbuf_surf_->surfaceList->planeParams.pitch[i]
403 << " height " << nvbuf_surf_->surfaceList->planeParams.height[i]
404 << " width " << nvbuf_surf_->surfaceList->planeParams.width[i]
405 << " bytes per pixel "
406 << nvbuf_surf_->surfaceList->planeParams.bytesPerPix[i];
407 }
408 CHECK_EQ(nvbuf_surf_->surfaceList->planeParams.width[0],
409 static_cast<size_t>(FLAGS_width));
410 CHECK_EQ(nvbuf_surf_->surfaceList->planeParams.height[0],
411 static_cast<size_t>(FLAGS_height));
412 }
413 MappedBuffer(const MappedBuffer &other) = delete;
414 MappedBuffer &operator=(const MappedBuffer &other) = delete;
415 MappedBuffer(MappedBuffer &&other) noexcept {
416 buffer_ = other.buffer_;
417 dmabuf_ = other.dmabuf_;
418 nvbuf_surf_ = other.nvbuf_surf_;
419 i_buffer_output_stream_ = other.i_buffer_output_stream_;
420 start_time_ = other.start_time_;
421 other.buffer_ = nullptr;
422 other.dmabuf_ = nullptr;
423 other.nvbuf_surf_ = nullptr;
424 }
425
426 NvBufSurface *nvbuf_surf() { return nvbuf_surf_; }
427
428 const Argus::ICaptureMetadata *imetadata() {
429 Argus::IBuffer *ibuffer = Argus::interface_cast<Argus::IBuffer>(buffer_);
430 CHECK(ibuffer != nullptr);
431
432 aos::ScopedNotRealtime nrt;
433 const Argus::CaptureMetadata *metadata = ibuffer->getMetadata();
434 const Argus::ICaptureMetadata *imetadata =
435 Argus::interface_cast<const Argus::ICaptureMetadata>(metadata);
Austin Schuh71a69c52024-01-01 21:47:17 -0800436 return imetadata;
437 }
438
439 aos::monotonic_clock::time_point start_time() const { return start_time_; }
440
441 virtual ~MappedBuffer() {
442 if (buffer_ != nullptr) {
443 CHECK_EQ(NvBufSurfaceUnMap(nvbuf_surf_, -1, -1), 0);
444 aos::ScopedNotRealtime nrt;
445 i_buffer_output_stream_->releaseBuffer(buffer_);
446 }
447 }
448
449 private:
450 Argus::IBufferOutputStream *i_buffer_output_stream_;
451
452 Argus::Buffer *buffer_;
453
454 DmaBuffer *dmabuf_ = nullptr;
455
456 NvBufSurface *nvbuf_surf_ = nullptr;
457
458 aos::monotonic_clock::time_point start_time_;
459 };
460
461 MappedBuffer NextImageBlocking() {
462 VLOG(1) << "Going for frame";
463
464 Argus::Buffer *buffer;
465 {
466 Argus::Status status;
467 aos::ScopedNotRealtime nrt;
468
469 buffer = i_buffer_output_stream_->acquireBuffer(
470 std::chrono::nanoseconds(std::chrono::seconds(5)).count(), &status);
471
472 if (status == Argus::STATUS_END_OF_STREAM) {
473 return MappedBuffer(nullptr, nullptr);
474 }
475 }
476
477 // const aos::monotonic_clock::time_point now = aos::monotonic_clock::now();
478 return MappedBuffer(i_buffer_output_stream_, buffer);
479 }
480
481 void Stop() {
482 i_capture_session_->stopRepeat();
483 i_buffer_output_stream_->endOfStream();
484 i_capture_session_->waitForIdle();
485 }
486
487 virtual ~ArgusCamera() {
488 output_stream_.reset();
489
490 for (uint32_t i = 0; i < surf_.size(); i++) {
491 NvBufSurfaceUnMapEglImage(surf_[i], 0);
492 }
493 eglTerminate(egl_display_);
494 }
495
496 private:
497 Argus::UniqueObj<Argus::CaptureSession> capture_session_;
498 Argus::ICaptureSession *i_capture_session_;
499
500 EGLDisplay egl_display_ = eglGetDisplay(EGL_DEFAULT_DISPLAY);
501
502 Argus::UniqueObj<Argus::OutputStreamSettings> stream_settings_;
503
504 Argus::UniqueObj<Argus::OutputStream> output_stream_;
505 Argus::IBufferOutputStream *i_buffer_output_stream_;
506
507 std::array<std::unique_ptr<DmaBuffer>, 10> native_buffers_;
508
509 std::array<NvBufSurface *, 10> surf_;
510
511 std::array<EGLImageKHR, 10> egl_images_;
512
513 Argus::UniqueObj<Argus::BufferSettings> buffer_settings_;
514
515 std::array<Argus::UniqueObj<Argus::Buffer>, 10> buffers_;
516
517 Argus::UniqueObj<Argus::Request> request_;
518};
519
Austin Schuh68913442023-10-20 23:16:03 -0700520int Main() {
521 aos::FlatbufferDetachedBuffer<aos::Configuration> config =
522 aos::configuration::ReadConfig(FLAGS_config);
523
524 aos::ShmEventLoop event_loop(&config.message());
525
526 event_loop.SetRuntimeRealtimePriority(55);
Austin Schuhd4001e02024-03-16 11:57:06 -0700527 event_loop.SetRuntimeAffinity(aos::MakeCpusetFromCpus({2, 3, 4}));
Austin Schuh68913442023-10-20 23:16:03 -0700528
529 aos::Sender<frc971::vision::CameraImage> sender =
530 event_loop.MakeSender<frc971::vision::CameraImage>(FLAGS_channel);
531
532 LOG(INFO) << "Started";
533 // Initialize the Argus camera provider.
534 Argus::UniqueObj<Argus::CameraProvider> camera_provider;
535 camera_provider =
536 Argus::UniqueObj<Argus::CameraProvider>(Argus::CameraProvider::create());
537
538 // Get the ICameraProvider interface from the global CameraProvider
539 Argus::ICameraProvider *i_camera_provider =
540 Argus::interface_cast<Argus::ICameraProvider>(camera_provider);
541 if (!i_camera_provider) {
542 ORIGINATE_ERROR("Failed to get ICameraProvider interface");
543 }
544
545 // Get the camera devices.
546 std::vector<Argus::CameraDevice *> camera_devices;
547 i_camera_provider->getCameraDevices(&camera_devices);
548 if (camera_devices.size() == 0) {
549 ORIGINATE_ERROR("there are %d cameras", (unsigned)camera_devices.size());
550 }
551
552 LOG(INFO) << "Found " << camera_devices.size() << " cameras";
553 for (Argus::CameraDevice *camera : camera_devices) {
Austin Schuh71a69c52024-01-01 21:47:17 -0800554 Argus::ICameraProperties *i_camera_properties =
Austin Schuh68913442023-10-20 23:16:03 -0700555 Argus::interface_cast<Argus::ICameraProperties>(camera);
Austin Schuh71a69c52024-01-01 21:47:17 -0800556 LOG(INFO) << "Camera " << i_camera_properties->getModelName();
Austin Schuh68913442023-10-20 23:16:03 -0700557 }
558
559 {
Austin Schuh71a69c52024-01-01 21:47:17 -0800560 ArgusCamera camera(i_camera_provider, camera_devices[FLAGS_camera]);
561
562 aos::monotonic_clock::time_point last_time = aos::monotonic_clock::epoch();
563
564 aos::TimerHandler *timer = event_loop.AddTimer([&camera, &event_loop,
565 &sender, &last_time,
566 &timer]() {
567 ArgusCamera::MappedBuffer buffer = camera.NextImageBlocking();
568
569 if (buffer.nvbuf_surf() == nullptr) {
570 // TODO(austin): Control-C isn't working for some reason, debug it...
Maxwell Hendersonae74fa52024-02-18 11:11:59 -0800571 // We're restarting nvargus-daemon here because if we exit like this its
572 // likely that nvargus-daemon has run into an error that it can't
573 // recover from. Which means even if this program restarts it can't get
574 // new camera images.
575 CHECK_EQ(std::system("sudo systemctl restart nvargus-daemon.service"),
576 0);
Austin Schuh71a69c52024-01-01 21:47:17 -0800577 event_loop.Exit();
578 return;
579 }
580
581 const Argus::ICaptureMetadata *imetadata = buffer.imetadata();
582
Tushar Pankajb39cffc2024-01-13 19:05:43 -0800583 if (imetadata) {
584 aos::Sender<frc971::vision::CameraImage>::Builder builder =
585 sender.MakeBuilder();
Austin Schuh71a69c52024-01-01 21:47:17 -0800586
Tushar Pankajb39cffc2024-01-13 19:05:43 -0800587 uint8_t *data_pointer = nullptr;
588 builder.fbb()->StartIndeterminateVector(FLAGS_width * FLAGS_height * 2,
589 1, 64, &data_pointer);
Austin Schuh71a69c52024-01-01 21:47:17 -0800590
Tushar Pankajb39cffc2024-01-13 19:05:43 -0800591 YCbCr422(buffer.nvbuf_surf(), data_pointer);
592 flatbuffers::Offset<flatbuffers::Vector<uint8_t>> data_offset =
593 builder.fbb()->EndIndeterminateVector(
594 FLAGS_width * FLAGS_height * 2, 1);
Austin Schuh71a69c52024-01-01 21:47:17 -0800595
Tushar Pankajb39cffc2024-01-13 19:05:43 -0800596 auto image_builder = builder.MakeBuilder<frc971::vision::CameraImage>();
597 image_builder.add_data(data_offset);
598 image_builder.add_rows(FLAGS_height);
599 image_builder.add_cols(FLAGS_width);
600 {
601 aos::ScopedNotRealtime nrt;
602 image_builder.add_monotonic_timestamp_ns(
603 imetadata->getSensorTimestamp());
604 }
605 builder.CheckOk(builder.Send(image_builder.Finish()));
606
607 const aos::monotonic_clock::time_point after_send =
608 aos::monotonic_clock::now();
609
610 VLOG(1)
611 << "Got " << imetadata->getCaptureId() << " delay "
612 << chrono::duration<double>(
613 chrono::nanoseconds(
614 (buffer.start_time().time_since_epoch().count() -
615 (imetadata->getSensorTimestamp() +
616 imetadata->getFrameReadoutTime()))))
617 .count()
618 << " mmap "
619 << chrono::duration<double>(after_send - buffer.start_time())
620 .count()
621 << "sec dt "
622 << chrono::duration<double>(buffer.start_time() - last_time).count()
623 << "sec, exposure " << imetadata->getSensorExposureTime();
Austin Schuh71a69c52024-01-01 21:47:17 -0800624 }
Austin Schuh71a69c52024-01-01 21:47:17 -0800625
626 last_time = buffer.start_time();
627 timer->Schedule(event_loop.monotonic_now());
628 });
629
630 event_loop.OnRun([&event_loop, timer]() {
631 timer->Schedule(event_loop.monotonic_now());
632 });
633
634 camera.Start();
635
Austin Schuh5db29f22024-03-16 17:00:31 -0700636 // Set the libargus threads which got spawned to RT priority.
637 {
638 DIR *dirp = opendir("/proc/self/task");
639 PCHECK(dirp != nullptr);
640 const int main_pid = getpid();
641 struct dirent *directory_entry;
642 while ((directory_entry = readdir(dirp)) != NULL) {
643 const int thread_id = std::atoi(directory_entry->d_name);
644
645 // ignore . and .. which are zeroes for some reason
646 if (thread_id != 0 && thread_id != main_pid) {
647 struct sched_param param;
648 param.sched_priority = 56;
649 sched_setscheduler(thread_id, SCHED_FIFO, &param);
650 }
651 }
652 closedir(dirp);
653 }
654
Austin Schuh71a69c52024-01-01 21:47:17 -0800655 event_loop.Run();
656 LOG(INFO) << "Event loop shutting down";
657
658 camera.Stop();
Austin Schuh68913442023-10-20 23:16:03 -0700659 }
660
Austin Schuh68913442023-10-20 23:16:03 -0700661 return 0;
662}
663
664}; // namespace frc971
665
666int main(int argc, char **argv) {
667 aos::InitGoogle(&argc, &argv);
668 return frc971::Main();
669}
670
671// I tried every different format option. Here's what worked and didn't work.
672//
673// NVBUF_COLOR_FORMAT_RGB,
674
675// NVBUF_COLOR_FORMAT_YUYV, // Failed
676// NVBUF_COLOR_FORMAT_NV24, // Works
677// NVBUF_COLOR_FORMAT_UYVY, // Failed
678// NVBUF_COLOR_FORMAT_YUV420, // Failed with error.
679
680// NVBUF_COLOR_FORMAT_GRAY8, // unsupported
681// NVBUF_COLOR_FORMAT_YUV420, // unsupported
682// NVBUF_COLOR_FORMAT_YVU420, // unsupported
683
684// NVBUF_COLOR_FORMAT_YUV420_ER, // unsupported
685// NVBUF_COLOR_FORMAT_YVU420_ER, // unsupported
686//
687///** Specifies BT.601 colorspace - Y/CbCr 4:2:0 multi-planar. */
688// NVBUF_COLOR_FORMAT_NV12, // Works! pitch 2048 height 1080 width
689// 1920 colorFormat 6 planes 2 bytes per pixel 1 delay 0.00203304
690// mmap 0.000340288sec dt 0.0166379sec
691//
692///** Specifies BT.601 colorspace - Y/CbCr ER 4:2:0 multi-planar. */
693// NVBUF_COLOR_FORMAT_NV12_ER, // Works! pitch 2048 height 1080
694// width 1920 colorFormat 7 planes 2 bytes per pixel 1
695///** Specifies BT.601 colorspace - Y/CbCr 4:2:0 multi-planar. */
696// NVBUF_COLOR_FORMAT_NV21, // Works! pitch 2048 height 1080 width
697// 1920 colorFormat 8 planes 2 bytes per pixel 1
698///** Specifies BT.601 colorspace - Y/CbCr ER 4:2:0 multi-planar. */
699// NVBUF_COLOR_FORMAT_NV21_ER, // Works! pitch 2048 height 1080
700// width 1920 colorFormat 9 planes 2 bytes per pixel 1
701//
702//
703// NVBUF_COLOR_FORMAT_UYVY, // works with an error?!?
704// NVBUF_COLOR_FORMAT_UYVY_ER, // unsupported 11
705// NVBUF_COLOR_FORMAT_VYUY, // unsupported 12
706// NVBUF_COLOR_FORMAT_VYUY_ER, // unsupported 13
707// NVBUF_COLOR_FORMAT_YUYV, // unsupported 14
708// NVBUF_COLOR_FORMAT_YUYV_ER, // unsupported 15
709// NVBUF_COLOR_FORMAT_YVYU, // unsupported 16
710// NVBUF_COLOR_FORMAT_YVYU_ER, // unsupported 17
711// NVBUF_COLOR_FORMAT_YUV444, // unsupported 18
712// NVBUF_COLOR_FORMAT_RGBA, // unsupported 19
713// NVBUF_COLOR_FORMAT_BGRA, // unsupported 20
714// NVBUF_COLOR_FORMAT_ARGB, // unsupported 21
715// NVBUF_COLOR_FORMAT_ABGR, // unsupported 22
716// NVBUF_COLOR_FORMAT_RGBx, // unsupported 23
717// NVBUF_COLOR_FORMAT_BGRx, // unsupported 24
718// NVBUF_COLOR_FORMAT_xRGB, // unsupported 25
719// NVBUF_COLOR_FORMAT_xBGR, // unsupported 26
720// NVBUF_COLOR_FORMAT_RGB, // unsupported 27
721// NVBUF_COLOR_FORMAT_BGR, // unsupported 28
722// NVBUF_COLOR_FORMAT_NV12_10LE, // unsupported 29
723// NVBUF_COLOR_FORMAT_NV12_12LE, // unsupported 30
724// NVBUF_COLOR_FORMAT_YUV420_709, // unsupported 31
725// NVBUF_COLOR_FORMAT_YUV420_709_ER, // unsupported 32
726// NVBUF_COLOR_FORMAT_NV12_709, // works pitch 2048 height 1080
727// width 1920 colorFormat 33 planes 2 bytes per pixel 1
728// NVBUF_COLOR_FORMAT_NV12_709_ER, // works pitch 2048 height 1080
729// width 1920 colorFormat 34 planes 2 bytes per pixel 1
730// NVBUF_COLOR_FORMAT_YUV420_2020, // unsupported 35
731// NVBUF_COLOR_FORMAT_NV12_2020, // unsupported 36
732// NVBUF_COLOR_FORMAT_NV12_10LE_ER, // unsupported 37
733// NVBUF_COLOR_FORMAT_NV12_10LE_709, // unsupported 38
734// NVBUF_COLOR_FORMAT_NV12_10LE_709_ER, // unsupported 39
735// NVBUF_COLOR_FORMAT_NV12_10LE_2020, // unsupported 40
736// NVBUF_COLOR_FORMAT_SIGNED_R16G16, // unsupported 41
737// NVBUF_COLOR_FORMAT_R8_G8_B8, // unsupported 42
738// NVBUF_COLOR_FORMAT_B8_G8_R8, // unsupported 43
739// NVBUF_COLOR_FORMAT_R32F_G32F_B32F, // unsupported 44
740// NVBUF_COLOR_FORMAT_B32F_G32F_R32F, // unsupported 45
741// NVBUF_COLOR_FORMAT_YUV422, // unsupported 46
742// NVBUF_COLOR_FORMAT_NV21_10LE, // unsupported 47
743// NVBUF_COLOR_FORMAT_NV21_12LE, // unsupported 48
744// NVBUF_COLOR_FORMAT_NV12_12LE_2020, // unsupported 49
745///** Specifies BT.601 colorspace - Y/CbCr 4:2:2 multi-planar. */
746// NVBUF_COLOR_FORMAT_NV16, // works pitch 2048 height 1080 width
747// 1920 colorFormat 50 planes 2 bytes per pixel 1
748// NVBUF_COLOR_FORMAT_NV16_10LE, // unsupported 51
749///** Specifies BT.601 colorspace - Y/CbCr 4:4:4 multi-planar. */
750// NVBUF_COLOR_FORMAT_NV24, // works pitch 2048 height 1080
751// width 1920 colorFormat 52 planes 2 bytes per pixel 1
752// NVBUF_COLOR_FORMAT_NV24_10LE, // unsupported 53
753//
754// NVBUF_COLOR_FORMAT_NV16_ER, // works pitch 2048 height 1080
755// width 1920 colorFormat 54 planes 2 bytes per pixel 1
756// NVBUF_COLOR_FORMAT_NV24_ER, // works pitch 2048 height 1080
757// width 1920 colorFormat 55 planes 2 bytes per pixel 1
758// NVBUF_COLOR_FORMAT_NV16_709, // unsupported 56
759// NVBUF_COLOR_FORMAT_NV24_709, // unsupported 57
760// NVBUF_COLOR_FORMAT_NV16_709_ER, // unsupported 58
761// NVBUF_COLOR_FORMAT_NV24_709_ER, // unsupported 59
762// NVBUF_COLOR_FORMAT_NV24_10LE_709, // unsupported 60
763// NVBUF_COLOR_FORMAT_NV24_10LE_709_ER, // unsupported 61
764// NVBUF_COLOR_FORMAT_NV24_10LE_2020, // unsupported 62
765// NVBUF_COLOR_FORMAT_NV24_12LE_2020, // unsupported 63
766// NVBUF_COLOR_FORMAT_RGBA_10_10_10_2_709, // unsupported 64
767// NVBUF_COLOR_FORMAT_RGBA_10_10_10_2_2020, // unsupported 65
768// NVBUF_COLOR_FORMAT_BGRA_10_10_10_2_709, // unsupported 66
769// NVBUF_COLOR_FORMAT_BGRA_10_10_10_2_2020, // unsupported 67
770// NVBUF_COLOR_FORMAT_A32, // unsupported 68
771// NVBUF_COLOR_FORMAT_UYVP, // unsupported 69
772// NVBUF_COLOR_FORMAT_UYVP_ER // unsupported 70
773
774// NVBUF_COLOR_FORMAT_ABGR,
775// NVBUF_COLOR_FORMAT_ARGB,