Austin Schuh | 6891344 | 2023-10-20 23:16:03 -0700 | [diff] [blame] | 1 | #include <chrono> |
| 2 | #include <filesystem> |
| 3 | #include <thread> |
| 4 | |
| 5 | #include "glog/logging.h" |
| 6 | |
| 7 | #include "Argus/Argus.h" |
| 8 | #include "Argus/EGLStream.h" |
| 9 | #include "Argus/Types.h" |
| 10 | #include "Argus/utils/Error.h" |
| 11 | #include "EGLStream/FrameConsumer.h" |
| 12 | #include "EGLStream/Image.h" |
| 13 | #include "EGLStream/NV/ImageNativeBuffer.h" |
| 14 | #include "HalideBuffer.h" |
| 15 | #include "HalideRuntime.h" |
| 16 | #include "aos/events/shm_event_loop.h" |
| 17 | #include "aos/init.h" |
Austin Schuh | 71a69c5 | 2024-01-01 21:47:17 -0800 | [diff] [blame] | 18 | #include "aos/realtime.h" |
Austin Schuh | 6891344 | 2023-10-20 23:16:03 -0700 | [diff] [blame] | 19 | #include "aos/time/time.h" |
| 20 | #include "aos/util/file.h" |
| 21 | #include "frc971/orin/ycbcr.h" |
| 22 | #include "frc971/orin/ycbcr422.h" |
| 23 | #include "frc971/vision/vision_generated.h" |
| 24 | #include "nvbufsurface.h" |
| 25 | |
| 26 | DEFINE_string(config, "aos_config.json", "Path to the config file to use."); |
| 27 | |
| 28 | DEFINE_int32(colorformat, NVBUF_COLOR_FORMAT_NV16, |
| 29 | "Mode to use. Don't change unless you know what you are doing."); |
| 30 | DEFINE_int32(camera, 0, "Camera number"); |
| 31 | DEFINE_int32(mode, 0, "Mode number to use."); |
| 32 | DEFINE_int32(exposure, 200000, "Exposure number to use."); |
| 33 | DEFINE_int32(gain, 5, "gain number to use."); |
| 34 | DEFINE_int32(width, 1456, "Image width"); |
| 35 | DEFINE_int32(height, 1088, "Image height"); |
| 36 | DEFINE_double(rgain, 1.0, "R gain"); |
| 37 | DEFINE_double(g1gain, 1.0, "G gain"); |
| 38 | DEFINE_double(g2gain, 1.0, "G gain"); |
| 39 | DEFINE_double(bgain, 1.0, "B gain"); |
| 40 | DEFINE_string(channel, "/camera", "Channel name for the image."); |
| 41 | |
| 42 | namespace frc971 { |
| 43 | |
| 44 | namespace chrono = std::chrono; |
| 45 | |
| 46 | // Converts a multiplanar 422 image into a single plane 422 image at the |
| 47 | // provided memory location sutable for putting in a flatbuffer. |
| 48 | void YCbCr422(NvBufSurface *nvbuf_surf, uint8_t *data_pointer) { |
| 49 | CHECK_EQ(nvbuf_surf->surfaceList->planeParams.width[0], |
| 50 | nvbuf_surf->surfaceList->planeParams.width[1] * 2); |
| 51 | CHECK_EQ(nvbuf_surf->surfaceList->planeParams.height[0], |
| 52 | nvbuf_surf->surfaceList->planeParams.height[1]); |
| 53 | CHECK_EQ(nvbuf_surf->surfaceList->planeParams.pitch[0], 0x600u); |
| 54 | CHECK_EQ(nvbuf_surf->surfaceList->planeParams.pitch[1], 0x600u); |
| 55 | std::array<halide_dimension_t, 2> y_dimensions{{ |
| 56 | { |
| 57 | /*.min =*/0, |
| 58 | /*.extent =*/ |
| 59 | static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.width[0]), |
| 60 | /*.stride =*/1, |
| 61 | /*.flags =*/0, |
| 62 | }, |
| 63 | { |
| 64 | /*.min = */ 0, |
| 65 | /*.extent =*/ |
| 66 | static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.height[0]), |
| 67 | /*.stride =*/ |
| 68 | static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.pitch[0]), |
| 69 | /*.flags =*/0, |
| 70 | }, |
| 71 | }}; |
| 72 | |
| 73 | Halide::Runtime::Buffer<uint8_t, 2> y( |
| 74 | reinterpret_cast<uint8_t *>(nvbuf_surf->surfaceList->mappedAddr.addr[0]), |
| 75 | y_dimensions.size(), y_dimensions.data()); |
| 76 | |
| 77 | std::array<halide_dimension_t, 3> cbcr_dimensions{ |
| 78 | {{ |
| 79 | /*.min =*/0, |
| 80 | /*.extent =*/ |
| 81 | static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.width[1]), |
| 82 | /*.stride =*/2, |
| 83 | /*.flags =*/0, |
| 84 | }, |
| 85 | { |
| 86 | /*.min =*/0, |
| 87 | /*.extent =*/ |
| 88 | static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.height[1]), |
| 89 | /*.stride =*/ |
| 90 | static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.pitch[1]), |
| 91 | /*.flags =*/0, |
| 92 | }, |
| 93 | { |
| 94 | /*.min =*/0, |
| 95 | /*.extent =*/2, |
| 96 | /*.stride =*/1, |
| 97 | /*.flags =*/0, |
| 98 | }}}; |
| 99 | |
| 100 | Halide::Runtime::Buffer<uint8_t, 3> cbcr( |
| 101 | reinterpret_cast<uint8_t *>(nvbuf_surf->surfaceList->mappedAddr.addr[1]), |
| 102 | cbcr_dimensions.size(), cbcr_dimensions.data()); |
| 103 | |
| 104 | std::array<halide_dimension_t, 3> output_dimensions{ |
| 105 | {{ |
| 106 | /*.min =*/0, |
| 107 | /*.extent =*/ |
| 108 | static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.width[0]), |
| 109 | /*.stride =*/2, |
| 110 | /*.flags =*/0, |
| 111 | }, |
| 112 | { |
| 113 | /*.min =*/0, |
| 114 | /*.extent =*/ |
| 115 | static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.height[0]), |
| 116 | /*.stride =*/ |
| 117 | static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.width[0] * |
| 118 | 2), |
| 119 | /*.flags =*/0, |
| 120 | }, |
| 121 | { |
| 122 | /*.min =*/0, |
| 123 | /*.extent =*/2, |
| 124 | /*.stride =*/1, |
| 125 | /*.flags =*/0, |
| 126 | }}}; |
| 127 | |
| 128 | Halide::Runtime::Buffer<uint8_t, 3> output( |
| 129 | data_pointer, output_dimensions.size(), output_dimensions.data()); |
| 130 | ycbcr422(y, cbcr, output); |
| 131 | } |
| 132 | |
| 133 | // Helper class to tie a NvBufSurface to an Argus::Buffer. |
| 134 | class DmaBuffer { |
| 135 | public: |
| 136 | // Creates a DmaBuffer. This is a static method so we can make sure it ends |
| 137 | // up as a unique_ptr so the pointer value doesn't change and break all the |
| 138 | // links. |
| 139 | static std::unique_ptr<DmaBuffer> Create( |
| 140 | const Argus::Size2D<uint32_t> &size, NvBufSurfaceColorFormat color_format, |
| 141 | NvBufSurfaceLayout layout = NVBUF_LAYOUT_PITCH) { |
| 142 | std::unique_ptr<DmaBuffer> buffer(new DmaBuffer()); |
| 143 | |
| 144 | NvBufSurfaceAllocateParams params; |
| 145 | |
| 146 | params.memtag = NvBufSurfaceTag_CAMERA; |
| 147 | params.params.width = size.width(); |
| 148 | params.params.height = size.height(); |
| 149 | params.params.colorFormat = color_format; |
| 150 | params.params.layout = layout; |
| 151 | params.params.isContiguous = true; |
| 152 | params.disablePitchPadding = true; |
| 153 | params.params.memType = NVBUF_MEM_SURFACE_ARRAY; |
| 154 | |
| 155 | NvBufSurface *nvbuf_surf = 0; |
| 156 | CHECK_EQ(NvBufSurfaceAllocate(&nvbuf_surf, 1, ¶ms), 0); |
| 157 | buffer->fd_ = nvbuf_surf->surfaceList[0].bufferDesc; |
| 158 | |
| 159 | return buffer; |
| 160 | } |
| 161 | |
| 162 | // Extracts the DmaBuffer from the Argus::Buffer. |
| 163 | static DmaBuffer *FromArgusBuffer(Argus::Buffer *buffer) { |
Austin Schuh | 71a69c5 | 2024-01-01 21:47:17 -0800 | [diff] [blame] | 164 | aos::ScopedNotRealtime nrt; |
Austin Schuh | 6891344 | 2023-10-20 23:16:03 -0700 | [diff] [blame] | 165 | Argus::IBuffer *i_buffer = Argus::interface_cast<Argus::IBuffer>(buffer); |
| 166 | const DmaBuffer *dmabuf = |
| 167 | static_cast<const DmaBuffer *>(i_buffer->getClientData()); |
| 168 | |
| 169 | return const_cast<DmaBuffer *>(dmabuf); |
| 170 | } |
| 171 | |
| 172 | // Returns the DMA buffer handle. |
| 173 | int fd() const { return fd_; } |
| 174 | |
| 175 | // Sets and gets the Argus::Buffer pointer. |
| 176 | void set_argus_buffer(Argus::Buffer *buffer) { buffer_ = buffer; } |
| 177 | Argus::Buffer *get_argus_buffer() const { return buffer_; } |
| 178 | |
| 179 | virtual ~DmaBuffer() { |
| 180 | if (fd_ >= 0) { |
| 181 | NvBufSurface *nvbuf_surf = 0; |
| 182 | NvBufSurfaceFromFd(fd_, (void **)(&nvbuf_surf)); |
| 183 | if (nvbuf_surf != NULL) { |
| 184 | NvBufSurfaceDestroy(nvbuf_surf); |
| 185 | } |
| 186 | } |
| 187 | } |
| 188 | |
| 189 | private: |
| 190 | // Private to force people to use Create() above. |
| 191 | DmaBuffer() {} |
| 192 | |
| 193 | int fd_ = -1; |
| 194 | Argus::Buffer *buffer_ = nullptr; |
| 195 | }; |
| 196 | |
Austin Schuh | 71a69c5 | 2024-01-01 21:47:17 -0800 | [diff] [blame] | 197 | // Class to make it easy to interact with an Argus camera inside an event loop. |
| 198 | class ArgusCamera { |
| 199 | public: |
| 200 | ArgusCamera(Argus::ICameraProvider *i_camera_provider, |
| 201 | Argus::CameraDevice *camera_device) { |
| 202 | std::vector<Argus::SensorMode *> sensor_modes; |
| 203 | Argus::ICameraProperties *i_camera_properties = |
| 204 | Argus::interface_cast<Argus::ICameraProperties>(camera_device); |
| 205 | CHECK(i_camera_properties) << "Failed to get ICameraProperties Interface"; |
| 206 | // Get available Sensor Modes |
| 207 | i_camera_properties->getAllSensorModes(&sensor_modes); |
| 208 | LOG(INFO) << "Found " << sensor_modes.size() << " modes"; |
| 209 | |
| 210 | for (Argus::SensorMode *mode : sensor_modes) { |
| 211 | Argus::ISensorMode *imode = |
| 212 | Argus::interface_cast<Argus::ISensorMode>(mode); |
| 213 | LOG(INFO) << imode->getResolution().width() << " x " |
| 214 | << imode->getResolution().height(); |
| 215 | LOG(INFO) << "type " << imode->getSensorModeType().getName(); |
| 216 | LOG(INFO) << "exposure min " << imode->getExposureTimeRange().min(); |
| 217 | LOG(INFO) << "exposure max " << imode->getExposureTimeRange().max(); |
| 218 | } |
| 219 | CHECK_GT(sensor_modes.size(), 0u); |
| 220 | |
| 221 | Argus::ISensorMode *i_sensor_mode = |
| 222 | Argus::interface_cast<Argus::ISensorMode>(sensor_modes[FLAGS_mode]); |
| 223 | CHECK(i_sensor_mode); |
| 224 | |
| 225 | { |
| 226 | auto range = i_sensor_mode->getFrameDurationRange(); |
| 227 | LOG(INFO) << "Min: " << range.min() << ", " << range.max(); |
| 228 | LOG(INFO) << "type " << i_sensor_mode->getSensorModeType().getName(); |
| 229 | } |
| 230 | |
| 231 | // Create the capture session using the first device and get the core |
| 232 | // interface. |
| 233 | capture_session_.reset( |
| 234 | i_camera_provider->createCaptureSession(camera_device)); |
| 235 | i_capture_session_ = |
| 236 | Argus::interface_cast<Argus::ICaptureSession>(capture_session_); |
| 237 | CHECK(i_capture_session_); |
| 238 | |
| 239 | CHECK_NE(egl_display_, EGL_NO_DISPLAY) << ": Failed to open display"; |
| 240 | |
| 241 | // Create the OutputStream. |
| 242 | stream_settings_.reset(i_capture_session_->createOutputStreamSettings( |
| 243 | Argus::STREAM_TYPE_BUFFER)); |
| 244 | |
| 245 | Argus::IBufferOutputStreamSettings *i_buffer_output_stream_settings = |
| 246 | Argus::interface_cast<Argus::IBufferOutputStreamSettings>( |
| 247 | stream_settings_); |
| 248 | CHECK(i_buffer_output_stream_settings != nullptr); |
| 249 | i_buffer_output_stream_settings->setBufferType( |
| 250 | Argus::BUFFER_TYPE_EGL_IMAGE); |
| 251 | i_buffer_output_stream_settings->setMetadataEnable(true); |
| 252 | LOG(INFO) << "Type: " |
| 253 | << i_buffer_output_stream_settings->getBufferType().getName(); |
| 254 | |
| 255 | output_stream_.reset( |
| 256 | i_capture_session_->createOutputStream(stream_settings_.get())); |
Jim Ostrowski | 855b744 | 2024-01-20 18:03:09 -0800 | [diff] [blame] | 257 | LOG(INFO) << "Got image stream"; |
Austin Schuh | 71a69c5 | 2024-01-01 21:47:17 -0800 | [diff] [blame] | 258 | |
| 259 | i_buffer_output_stream_ = |
| 260 | Argus::interface_cast<Argus::IBufferOutputStream>(output_stream_); |
| 261 | CHECK(i_buffer_output_stream_ != nullptr); |
| 262 | |
| 263 | // Build the DmaBuffers |
| 264 | for (size_t i = 0; i < native_buffers_.size(); ++i) { |
| 265 | native_buffers_[i] = DmaBuffer::Create( |
| 266 | i_sensor_mode->getResolution(), |
| 267 | static_cast<NvBufSurfaceColorFormat>(FLAGS_colorformat), |
| 268 | NVBUF_LAYOUT_PITCH); |
| 269 | } |
| 270 | |
| 271 | // Create EGLImages from the native buffers |
| 272 | for (size_t i = 0; i < egl_images_.size(); i++) { |
| 273 | int ret = 0; |
| 274 | |
| 275 | ret = NvBufSurfaceFromFd(native_buffers_[i]->fd(), (void **)(&surf_[i])); |
| 276 | CHECK(ret == 0) << ": NvBufSurfaceFromFd failed"; |
| 277 | |
| 278 | ret = NvBufSurfaceMapEglImage(surf_[i], 0); |
Tushar Pankaj | 6d5eab8 | 2024-01-13 13:29:04 -0800 | [diff] [blame] | 279 | // This check typically fails from having X forwarding enabled. |
| 280 | // Always call argus_camera without X forwarding. |
Jim Ostrowski | cb8b408 | 2024-01-21 02:23:46 -0800 | [diff] [blame] | 281 | CHECK(ret == 0) << ": NvBufSurfaceMapEglImage failed. Make sure X " |
| 282 | "forwarding is not enabled."; |
Austin Schuh | 71a69c5 | 2024-01-01 21:47:17 -0800 | [diff] [blame] | 283 | |
| 284 | egl_images_[i] = surf_[i]->surfaceList[0].mappedAddr.eglImage; |
| 285 | CHECK(egl_images_[i] != EGL_NO_IMAGE_KHR) |
| 286 | << ": Failed to create EGLImage"; |
| 287 | } |
| 288 | |
| 289 | // Create the BufferSettings object to configure Buffer creation. |
| 290 | buffer_settings_.reset(i_buffer_output_stream_->createBufferSettings()); |
| 291 | Argus::IEGLImageBufferSettings *i_buffer_settings = |
| 292 | Argus::interface_cast<Argus::IEGLImageBufferSettings>(buffer_settings_); |
| 293 | CHECK(i_buffer_settings); |
| 294 | |
| 295 | // Create the Buffers for each EGLImage (and release to the stream for |
| 296 | // initial capture use) |
| 297 | for (size_t i = 0; i < buffers_.size(); i++) { |
| 298 | i_buffer_settings->setEGLImage(egl_images_[i]); |
| 299 | i_buffer_settings->setEGLDisplay(egl_display_); |
| 300 | buffers_[i].reset( |
| 301 | i_buffer_output_stream_->createBuffer(buffer_settings_.get())); |
| 302 | Argus::IBuffer *i_buffer = |
| 303 | Argus::interface_cast<Argus::IBuffer>(buffers_[i]); |
| 304 | |
| 305 | // Ties Argus::Buffer and DmaBuffer together. |
| 306 | i_buffer->setClientData(native_buffers_[i].get()); |
| 307 | native_buffers_[i]->set_argus_buffer(buffers_[i].get()); |
| 308 | |
| 309 | CHECK(Argus::interface_cast<Argus::IEGLImageBuffer>(buffers_[i]) != |
| 310 | nullptr) |
| 311 | << ": Failed to create Buffer"; |
| 312 | |
| 313 | CHECK_EQ(i_buffer_output_stream_->releaseBuffer(buffers_[i].get()), |
| 314 | Argus::STATUS_OK) |
| 315 | << "Failed to release Buffer for capture use"; |
| 316 | } |
| 317 | |
| 318 | request_.reset(i_capture_session_->createRequest()); |
| 319 | Argus::IRequest *i_request = |
| 320 | Argus::interface_cast<Argus::IRequest>(request_); |
| 321 | CHECK(i_request); |
| 322 | |
| 323 | Argus::IAutoControlSettings *i_auto_control_settings = |
| 324 | Argus::interface_cast<Argus::IAutoControlSettings>( |
| 325 | i_request->getAutoControlSettings()); |
| 326 | CHECK(i_auto_control_settings != nullptr); |
| 327 | i_auto_control_settings->setAwbMode(Argus::AWB_MODE_OFF); |
| 328 | |
| 329 | i_auto_control_settings->setAeLock(false); |
| 330 | Argus::Range<float> isp_digital_gain_range; |
| 331 | isp_digital_gain_range.min() = 1; |
| 332 | isp_digital_gain_range.max() = 1; |
| 333 | i_auto_control_settings->setIspDigitalGainRange(isp_digital_gain_range); |
| 334 | |
| 335 | Argus::IEdgeEnhanceSettings *i_ee_settings = |
| 336 | Argus::interface_cast<Argus::IEdgeEnhanceSettings>(request_); |
| 337 | CHECK(i_ee_settings != nullptr); |
| 338 | |
| 339 | i_ee_settings->setEdgeEnhanceStrength(0); |
| 340 | |
| 341 | i_request->enableOutputStream(output_stream_.get()); |
| 342 | |
| 343 | Argus::ISourceSettings *i_source_settings = |
| 344 | Argus::interface_cast<Argus::ISourceSettings>( |
| 345 | i_request->getSourceSettings()); |
| 346 | CHECK(i_source_settings != nullptr); |
| 347 | |
| 348 | i_source_settings->setFrameDurationRange( |
| 349 | i_sensor_mode->getFrameDurationRange().min()); |
| 350 | i_source_settings->setSensorMode(sensor_modes[FLAGS_mode]); |
| 351 | |
| 352 | Argus::Range<float> sensor_mode_analog_gain_range; |
| 353 | sensor_mode_analog_gain_range.min() = FLAGS_gain; |
| 354 | sensor_mode_analog_gain_range.max() = FLAGS_gain; |
| 355 | i_source_settings->setGainRange(sensor_mode_analog_gain_range); |
| 356 | |
| 357 | Argus::Range<uint64_t> limit_exposure_time_range; |
| 358 | limit_exposure_time_range.min() = FLAGS_exposure; |
| 359 | limit_exposure_time_range.max() = FLAGS_exposure; |
| 360 | i_source_settings->setExposureTimeRange(limit_exposure_time_range); |
| 361 | } |
| 362 | |
| 363 | void Start() { |
| 364 | if (i_capture_session_->repeat(request_.get()) != Argus::STATUS_OK) { |
| 365 | LOG(ERROR) << "Failed to submit repeat"; |
| 366 | } |
| 367 | |
| 368 | LOG(INFO) << "Session submitted"; |
| 369 | } |
| 370 | |
| 371 | // Class to manage an image buffer and return it when we are done. |
| 372 | class MappedBuffer { |
| 373 | public: |
| 374 | MappedBuffer(Argus::IBufferOutputStream *i_buffer_output_stream, |
| 375 | Argus::Buffer *buffer) |
| 376 | : i_buffer_output_stream_(i_buffer_output_stream), buffer_(buffer) { |
| 377 | if (buffer_ == nullptr) { |
| 378 | return; |
| 379 | } |
| 380 | |
| 381 | start_time_ = aos::monotonic_clock::now(); |
| 382 | |
| 383 | dmabuf_ = DmaBuffer::FromArgusBuffer(buffer_); |
| 384 | |
| 385 | int dmabuf_fd = dmabuf_->fd(); |
| 386 | |
| 387 | CHECK_EQ(NvBufSurfaceFromFd(dmabuf_fd, (void **)(&nvbuf_surf_)), 0); |
| 388 | |
| 389 | CHECK_EQ(NvBufSurfaceMap(nvbuf_surf_, -1, -1, NVBUF_MAP_READ), 0); |
| 390 | VLOG(1) << "Mapped"; |
| 391 | NvBufSurfaceSyncForCpu(nvbuf_surf_, -1, -1); |
| 392 | |
| 393 | VLOG(1) << "Planes " << nvbuf_surf_->surfaceList->planeParams.num_planes |
| 394 | << " colorFormat " << nvbuf_surf_->surfaceList->colorFormat; |
| 395 | for (size_t i = 0; i < nvbuf_surf_->surfaceList->planeParams.num_planes; |
| 396 | ++i) { |
| 397 | VLOG(1) << "Address " |
| 398 | << static_cast<void *>( |
| 399 | nvbuf_surf_->surfaceList->mappedAddr.addr[i]) |
| 400 | << ", pitch " << nvbuf_surf_->surfaceList->planeParams.pitch[i] |
| 401 | << " height " << nvbuf_surf_->surfaceList->planeParams.height[i] |
| 402 | << " width " << nvbuf_surf_->surfaceList->planeParams.width[i] |
| 403 | << " bytes per pixel " |
| 404 | << nvbuf_surf_->surfaceList->planeParams.bytesPerPix[i]; |
| 405 | } |
| 406 | CHECK_EQ(nvbuf_surf_->surfaceList->planeParams.width[0], |
| 407 | static_cast<size_t>(FLAGS_width)); |
| 408 | CHECK_EQ(nvbuf_surf_->surfaceList->planeParams.height[0], |
| 409 | static_cast<size_t>(FLAGS_height)); |
| 410 | } |
| 411 | MappedBuffer(const MappedBuffer &other) = delete; |
| 412 | MappedBuffer &operator=(const MappedBuffer &other) = delete; |
| 413 | MappedBuffer(MappedBuffer &&other) noexcept { |
| 414 | buffer_ = other.buffer_; |
| 415 | dmabuf_ = other.dmabuf_; |
| 416 | nvbuf_surf_ = other.nvbuf_surf_; |
| 417 | i_buffer_output_stream_ = other.i_buffer_output_stream_; |
| 418 | start_time_ = other.start_time_; |
| 419 | other.buffer_ = nullptr; |
| 420 | other.dmabuf_ = nullptr; |
| 421 | other.nvbuf_surf_ = nullptr; |
| 422 | } |
| 423 | |
| 424 | NvBufSurface *nvbuf_surf() { return nvbuf_surf_; } |
| 425 | |
| 426 | const Argus::ICaptureMetadata *imetadata() { |
| 427 | Argus::IBuffer *ibuffer = Argus::interface_cast<Argus::IBuffer>(buffer_); |
| 428 | CHECK(ibuffer != nullptr); |
| 429 | |
| 430 | aos::ScopedNotRealtime nrt; |
| 431 | const Argus::CaptureMetadata *metadata = ibuffer->getMetadata(); |
| 432 | const Argus::ICaptureMetadata *imetadata = |
| 433 | Argus::interface_cast<const Argus::ICaptureMetadata>(metadata); |
Austin Schuh | 71a69c5 | 2024-01-01 21:47:17 -0800 | [diff] [blame] | 434 | return imetadata; |
| 435 | } |
| 436 | |
| 437 | aos::monotonic_clock::time_point start_time() const { return start_time_; } |
| 438 | |
| 439 | virtual ~MappedBuffer() { |
| 440 | if (buffer_ != nullptr) { |
| 441 | CHECK_EQ(NvBufSurfaceUnMap(nvbuf_surf_, -1, -1), 0); |
| 442 | aos::ScopedNotRealtime nrt; |
| 443 | i_buffer_output_stream_->releaseBuffer(buffer_); |
| 444 | } |
| 445 | } |
| 446 | |
| 447 | private: |
| 448 | Argus::IBufferOutputStream *i_buffer_output_stream_; |
| 449 | |
| 450 | Argus::Buffer *buffer_; |
| 451 | |
| 452 | DmaBuffer *dmabuf_ = nullptr; |
| 453 | |
| 454 | NvBufSurface *nvbuf_surf_ = nullptr; |
| 455 | |
| 456 | aos::monotonic_clock::time_point start_time_; |
| 457 | }; |
| 458 | |
| 459 | MappedBuffer NextImageBlocking() { |
| 460 | VLOG(1) << "Going for frame"; |
| 461 | |
| 462 | Argus::Buffer *buffer; |
| 463 | { |
| 464 | Argus::Status status; |
| 465 | aos::ScopedNotRealtime nrt; |
| 466 | |
| 467 | buffer = i_buffer_output_stream_->acquireBuffer( |
| 468 | std::chrono::nanoseconds(std::chrono::seconds(5)).count(), &status); |
| 469 | |
| 470 | if (status == Argus::STATUS_END_OF_STREAM) { |
| 471 | return MappedBuffer(nullptr, nullptr); |
| 472 | } |
| 473 | } |
| 474 | |
| 475 | // const aos::monotonic_clock::time_point now = aos::monotonic_clock::now(); |
| 476 | return MappedBuffer(i_buffer_output_stream_, buffer); |
| 477 | } |
| 478 | |
| 479 | void Stop() { |
| 480 | i_capture_session_->stopRepeat(); |
| 481 | i_buffer_output_stream_->endOfStream(); |
| 482 | i_capture_session_->waitForIdle(); |
| 483 | } |
| 484 | |
| 485 | virtual ~ArgusCamera() { |
| 486 | output_stream_.reset(); |
| 487 | |
| 488 | for (uint32_t i = 0; i < surf_.size(); i++) { |
| 489 | NvBufSurfaceUnMapEglImage(surf_[i], 0); |
| 490 | } |
| 491 | eglTerminate(egl_display_); |
| 492 | } |
| 493 | |
| 494 | private: |
| 495 | Argus::UniqueObj<Argus::CaptureSession> capture_session_; |
| 496 | Argus::ICaptureSession *i_capture_session_; |
| 497 | |
| 498 | EGLDisplay egl_display_ = eglGetDisplay(EGL_DEFAULT_DISPLAY); |
| 499 | |
| 500 | Argus::UniqueObj<Argus::OutputStreamSettings> stream_settings_; |
| 501 | |
| 502 | Argus::UniqueObj<Argus::OutputStream> output_stream_; |
| 503 | Argus::IBufferOutputStream *i_buffer_output_stream_; |
| 504 | |
| 505 | std::array<std::unique_ptr<DmaBuffer>, 10> native_buffers_; |
| 506 | |
| 507 | std::array<NvBufSurface *, 10> surf_; |
| 508 | |
| 509 | std::array<EGLImageKHR, 10> egl_images_; |
| 510 | |
| 511 | Argus::UniqueObj<Argus::BufferSettings> buffer_settings_; |
| 512 | |
| 513 | std::array<Argus::UniqueObj<Argus::Buffer>, 10> buffers_; |
| 514 | |
| 515 | Argus::UniqueObj<Argus::Request> request_; |
| 516 | }; |
| 517 | |
Austin Schuh | 6891344 | 2023-10-20 23:16:03 -0700 | [diff] [blame] | 518 | int Main() { |
| 519 | aos::FlatbufferDetachedBuffer<aos::Configuration> config = |
| 520 | aos::configuration::ReadConfig(FLAGS_config); |
| 521 | |
| 522 | aos::ShmEventLoop event_loop(&config.message()); |
| 523 | |
| 524 | event_loop.SetRuntimeRealtimePriority(55); |
| 525 | |
| 526 | aos::Sender<frc971::vision::CameraImage> sender = |
| 527 | event_loop.MakeSender<frc971::vision::CameraImage>(FLAGS_channel); |
| 528 | |
| 529 | LOG(INFO) << "Started"; |
| 530 | // Initialize the Argus camera provider. |
| 531 | Argus::UniqueObj<Argus::CameraProvider> camera_provider; |
| 532 | camera_provider = |
| 533 | Argus::UniqueObj<Argus::CameraProvider>(Argus::CameraProvider::create()); |
| 534 | |
| 535 | // Get the ICameraProvider interface from the global CameraProvider |
| 536 | Argus::ICameraProvider *i_camera_provider = |
| 537 | Argus::interface_cast<Argus::ICameraProvider>(camera_provider); |
| 538 | if (!i_camera_provider) { |
| 539 | ORIGINATE_ERROR("Failed to get ICameraProvider interface"); |
| 540 | } |
| 541 | |
| 542 | // Get the camera devices. |
| 543 | std::vector<Argus::CameraDevice *> camera_devices; |
| 544 | i_camera_provider->getCameraDevices(&camera_devices); |
| 545 | if (camera_devices.size() == 0) { |
| 546 | ORIGINATE_ERROR("there are %d cameras", (unsigned)camera_devices.size()); |
| 547 | } |
| 548 | |
| 549 | LOG(INFO) << "Found " << camera_devices.size() << " cameras"; |
| 550 | for (Argus::CameraDevice *camera : camera_devices) { |
Austin Schuh | 71a69c5 | 2024-01-01 21:47:17 -0800 | [diff] [blame] | 551 | Argus::ICameraProperties *i_camera_properties = |
Austin Schuh | 6891344 | 2023-10-20 23:16:03 -0700 | [diff] [blame] | 552 | Argus::interface_cast<Argus::ICameraProperties>(camera); |
Austin Schuh | 71a69c5 | 2024-01-01 21:47:17 -0800 | [diff] [blame] | 553 | LOG(INFO) << "Camera " << i_camera_properties->getModelName(); |
Austin Schuh | 6891344 | 2023-10-20 23:16:03 -0700 | [diff] [blame] | 554 | } |
| 555 | |
| 556 | { |
Austin Schuh | 71a69c5 | 2024-01-01 21:47:17 -0800 | [diff] [blame] | 557 | ArgusCamera camera(i_camera_provider, camera_devices[FLAGS_camera]); |
| 558 | |
| 559 | aos::monotonic_clock::time_point last_time = aos::monotonic_clock::epoch(); |
| 560 | |
| 561 | aos::TimerHandler *timer = event_loop.AddTimer([&camera, &event_loop, |
| 562 | &sender, &last_time, |
| 563 | &timer]() { |
| 564 | ArgusCamera::MappedBuffer buffer = camera.NextImageBlocking(); |
| 565 | |
| 566 | if (buffer.nvbuf_surf() == nullptr) { |
| 567 | // TODO(austin): Control-C isn't working for some reason, debug it... |
Maxwell Henderson | ae74fa5 | 2024-02-18 11:11:59 -0800 | [diff] [blame] | 568 | // We're restarting nvargus-daemon here because if we exit like this its |
| 569 | // likely that nvargus-daemon has run into an error that it can't |
| 570 | // recover from. Which means even if this program restarts it can't get |
| 571 | // new camera images. |
| 572 | CHECK_EQ(std::system("sudo systemctl restart nvargus-daemon.service"), |
| 573 | 0); |
Austin Schuh | 71a69c5 | 2024-01-01 21:47:17 -0800 | [diff] [blame] | 574 | event_loop.Exit(); |
| 575 | return; |
| 576 | } |
| 577 | |
| 578 | const Argus::ICaptureMetadata *imetadata = buffer.imetadata(); |
| 579 | |
Tushar Pankaj | b39cffc | 2024-01-13 19:05:43 -0800 | [diff] [blame] | 580 | if (imetadata) { |
| 581 | aos::Sender<frc971::vision::CameraImage>::Builder builder = |
| 582 | sender.MakeBuilder(); |
Austin Schuh | 71a69c5 | 2024-01-01 21:47:17 -0800 | [diff] [blame] | 583 | |
Tushar Pankaj | b39cffc | 2024-01-13 19:05:43 -0800 | [diff] [blame] | 584 | uint8_t *data_pointer = nullptr; |
| 585 | builder.fbb()->StartIndeterminateVector(FLAGS_width * FLAGS_height * 2, |
| 586 | 1, 64, &data_pointer); |
Austin Schuh | 71a69c5 | 2024-01-01 21:47:17 -0800 | [diff] [blame] | 587 | |
Tushar Pankaj | b39cffc | 2024-01-13 19:05:43 -0800 | [diff] [blame] | 588 | YCbCr422(buffer.nvbuf_surf(), data_pointer); |
| 589 | flatbuffers::Offset<flatbuffers::Vector<uint8_t>> data_offset = |
| 590 | builder.fbb()->EndIndeterminateVector( |
| 591 | FLAGS_width * FLAGS_height * 2, 1); |
Austin Schuh | 71a69c5 | 2024-01-01 21:47:17 -0800 | [diff] [blame] | 592 | |
Tushar Pankaj | b39cffc | 2024-01-13 19:05:43 -0800 | [diff] [blame] | 593 | auto image_builder = builder.MakeBuilder<frc971::vision::CameraImage>(); |
| 594 | image_builder.add_data(data_offset); |
| 595 | image_builder.add_rows(FLAGS_height); |
| 596 | image_builder.add_cols(FLAGS_width); |
| 597 | { |
| 598 | aos::ScopedNotRealtime nrt; |
| 599 | image_builder.add_monotonic_timestamp_ns( |
| 600 | imetadata->getSensorTimestamp()); |
| 601 | } |
| 602 | builder.CheckOk(builder.Send(image_builder.Finish())); |
| 603 | |
| 604 | const aos::monotonic_clock::time_point after_send = |
| 605 | aos::monotonic_clock::now(); |
| 606 | |
| 607 | VLOG(1) |
| 608 | << "Got " << imetadata->getCaptureId() << " delay " |
| 609 | << chrono::duration<double>( |
| 610 | chrono::nanoseconds( |
| 611 | (buffer.start_time().time_since_epoch().count() - |
| 612 | (imetadata->getSensorTimestamp() + |
| 613 | imetadata->getFrameReadoutTime())))) |
| 614 | .count() |
| 615 | << " mmap " |
| 616 | << chrono::duration<double>(after_send - buffer.start_time()) |
| 617 | .count() |
| 618 | << "sec dt " |
| 619 | << chrono::duration<double>(buffer.start_time() - last_time).count() |
| 620 | << "sec, exposure " << imetadata->getSensorExposureTime(); |
Austin Schuh | 71a69c5 | 2024-01-01 21:47:17 -0800 | [diff] [blame] | 621 | } |
Austin Schuh | 71a69c5 | 2024-01-01 21:47:17 -0800 | [diff] [blame] | 622 | |
| 623 | last_time = buffer.start_time(); |
| 624 | timer->Schedule(event_loop.monotonic_now()); |
| 625 | }); |
| 626 | |
| 627 | event_loop.OnRun([&event_loop, timer]() { |
| 628 | timer->Schedule(event_loop.monotonic_now()); |
| 629 | }); |
| 630 | |
| 631 | camera.Start(); |
| 632 | |
| 633 | event_loop.Run(); |
| 634 | LOG(INFO) << "Event loop shutting down"; |
| 635 | |
| 636 | camera.Stop(); |
Austin Schuh | 6891344 | 2023-10-20 23:16:03 -0700 | [diff] [blame] | 637 | } |
| 638 | |
Austin Schuh | 6891344 | 2023-10-20 23:16:03 -0700 | [diff] [blame] | 639 | return 0; |
| 640 | } |
| 641 | |
| 642 | }; // namespace frc971 |
| 643 | |
| 644 | int main(int argc, char **argv) { |
| 645 | aos::InitGoogle(&argc, &argv); |
| 646 | return frc971::Main(); |
| 647 | } |
| 648 | |
| 649 | // I tried every different format option. Here's what worked and didn't work. |
| 650 | // |
| 651 | // NVBUF_COLOR_FORMAT_RGB, |
| 652 | |
| 653 | // NVBUF_COLOR_FORMAT_YUYV, // Failed |
| 654 | // NVBUF_COLOR_FORMAT_NV24, // Works |
| 655 | // NVBUF_COLOR_FORMAT_UYVY, // Failed |
| 656 | // NVBUF_COLOR_FORMAT_YUV420, // Failed with error. |
| 657 | |
| 658 | // NVBUF_COLOR_FORMAT_GRAY8, // unsupported |
| 659 | // NVBUF_COLOR_FORMAT_YUV420, // unsupported |
| 660 | // NVBUF_COLOR_FORMAT_YVU420, // unsupported |
| 661 | |
| 662 | // NVBUF_COLOR_FORMAT_YUV420_ER, // unsupported |
| 663 | // NVBUF_COLOR_FORMAT_YVU420_ER, // unsupported |
| 664 | // |
| 665 | ///** Specifies BT.601 colorspace - Y/CbCr 4:2:0 multi-planar. */ |
| 666 | // NVBUF_COLOR_FORMAT_NV12, // Works! pitch 2048 height 1080 width |
| 667 | // 1920 colorFormat 6 planes 2 bytes per pixel 1 delay 0.00203304 |
| 668 | // mmap 0.000340288sec dt 0.0166379sec |
| 669 | // |
| 670 | ///** Specifies BT.601 colorspace - Y/CbCr ER 4:2:0 multi-planar. */ |
| 671 | // NVBUF_COLOR_FORMAT_NV12_ER, // Works! pitch 2048 height 1080 |
| 672 | // width 1920 colorFormat 7 planes 2 bytes per pixel 1 |
| 673 | ///** Specifies BT.601 colorspace - Y/CbCr 4:2:0 multi-planar. */ |
| 674 | // NVBUF_COLOR_FORMAT_NV21, // Works! pitch 2048 height 1080 width |
| 675 | // 1920 colorFormat 8 planes 2 bytes per pixel 1 |
| 676 | ///** Specifies BT.601 colorspace - Y/CbCr ER 4:2:0 multi-planar. */ |
| 677 | // NVBUF_COLOR_FORMAT_NV21_ER, // Works! pitch 2048 height 1080 |
| 678 | // width 1920 colorFormat 9 planes 2 bytes per pixel 1 |
| 679 | // |
| 680 | // |
| 681 | // NVBUF_COLOR_FORMAT_UYVY, // works with an error?!? |
| 682 | // NVBUF_COLOR_FORMAT_UYVY_ER, // unsupported 11 |
| 683 | // NVBUF_COLOR_FORMAT_VYUY, // unsupported 12 |
| 684 | // NVBUF_COLOR_FORMAT_VYUY_ER, // unsupported 13 |
| 685 | // NVBUF_COLOR_FORMAT_YUYV, // unsupported 14 |
| 686 | // NVBUF_COLOR_FORMAT_YUYV_ER, // unsupported 15 |
| 687 | // NVBUF_COLOR_FORMAT_YVYU, // unsupported 16 |
| 688 | // NVBUF_COLOR_FORMAT_YVYU_ER, // unsupported 17 |
| 689 | // NVBUF_COLOR_FORMAT_YUV444, // unsupported 18 |
| 690 | // NVBUF_COLOR_FORMAT_RGBA, // unsupported 19 |
| 691 | // NVBUF_COLOR_FORMAT_BGRA, // unsupported 20 |
| 692 | // NVBUF_COLOR_FORMAT_ARGB, // unsupported 21 |
| 693 | // NVBUF_COLOR_FORMAT_ABGR, // unsupported 22 |
| 694 | // NVBUF_COLOR_FORMAT_RGBx, // unsupported 23 |
| 695 | // NVBUF_COLOR_FORMAT_BGRx, // unsupported 24 |
| 696 | // NVBUF_COLOR_FORMAT_xRGB, // unsupported 25 |
| 697 | // NVBUF_COLOR_FORMAT_xBGR, // unsupported 26 |
| 698 | // NVBUF_COLOR_FORMAT_RGB, // unsupported 27 |
| 699 | // NVBUF_COLOR_FORMAT_BGR, // unsupported 28 |
| 700 | // NVBUF_COLOR_FORMAT_NV12_10LE, // unsupported 29 |
| 701 | // NVBUF_COLOR_FORMAT_NV12_12LE, // unsupported 30 |
| 702 | // NVBUF_COLOR_FORMAT_YUV420_709, // unsupported 31 |
| 703 | // NVBUF_COLOR_FORMAT_YUV420_709_ER, // unsupported 32 |
| 704 | // NVBUF_COLOR_FORMAT_NV12_709, // works pitch 2048 height 1080 |
| 705 | // width 1920 colorFormat 33 planes 2 bytes per pixel 1 |
| 706 | // NVBUF_COLOR_FORMAT_NV12_709_ER, // works pitch 2048 height 1080 |
| 707 | // width 1920 colorFormat 34 planes 2 bytes per pixel 1 |
| 708 | // NVBUF_COLOR_FORMAT_YUV420_2020, // unsupported 35 |
| 709 | // NVBUF_COLOR_FORMAT_NV12_2020, // unsupported 36 |
| 710 | // NVBUF_COLOR_FORMAT_NV12_10LE_ER, // unsupported 37 |
| 711 | // NVBUF_COLOR_FORMAT_NV12_10LE_709, // unsupported 38 |
| 712 | // NVBUF_COLOR_FORMAT_NV12_10LE_709_ER, // unsupported 39 |
| 713 | // NVBUF_COLOR_FORMAT_NV12_10LE_2020, // unsupported 40 |
| 714 | // NVBUF_COLOR_FORMAT_SIGNED_R16G16, // unsupported 41 |
| 715 | // NVBUF_COLOR_FORMAT_R8_G8_B8, // unsupported 42 |
| 716 | // NVBUF_COLOR_FORMAT_B8_G8_R8, // unsupported 43 |
| 717 | // NVBUF_COLOR_FORMAT_R32F_G32F_B32F, // unsupported 44 |
| 718 | // NVBUF_COLOR_FORMAT_B32F_G32F_R32F, // unsupported 45 |
| 719 | // NVBUF_COLOR_FORMAT_YUV422, // unsupported 46 |
| 720 | // NVBUF_COLOR_FORMAT_NV21_10LE, // unsupported 47 |
| 721 | // NVBUF_COLOR_FORMAT_NV21_12LE, // unsupported 48 |
| 722 | // NVBUF_COLOR_FORMAT_NV12_12LE_2020, // unsupported 49 |
| 723 | ///** Specifies BT.601 colorspace - Y/CbCr 4:2:2 multi-planar. */ |
| 724 | // NVBUF_COLOR_FORMAT_NV16, // works pitch 2048 height 1080 width |
| 725 | // 1920 colorFormat 50 planes 2 bytes per pixel 1 |
| 726 | // NVBUF_COLOR_FORMAT_NV16_10LE, // unsupported 51 |
| 727 | ///** Specifies BT.601 colorspace - Y/CbCr 4:4:4 multi-planar. */ |
| 728 | // NVBUF_COLOR_FORMAT_NV24, // works pitch 2048 height 1080 |
| 729 | // width 1920 colorFormat 52 planes 2 bytes per pixel 1 |
| 730 | // NVBUF_COLOR_FORMAT_NV24_10LE, // unsupported 53 |
| 731 | // |
| 732 | // NVBUF_COLOR_FORMAT_NV16_ER, // works pitch 2048 height 1080 |
| 733 | // width 1920 colorFormat 54 planes 2 bytes per pixel 1 |
| 734 | // NVBUF_COLOR_FORMAT_NV24_ER, // works pitch 2048 height 1080 |
| 735 | // width 1920 colorFormat 55 planes 2 bytes per pixel 1 |
| 736 | // NVBUF_COLOR_FORMAT_NV16_709, // unsupported 56 |
| 737 | // NVBUF_COLOR_FORMAT_NV24_709, // unsupported 57 |
| 738 | // NVBUF_COLOR_FORMAT_NV16_709_ER, // unsupported 58 |
| 739 | // NVBUF_COLOR_FORMAT_NV24_709_ER, // unsupported 59 |
| 740 | // NVBUF_COLOR_FORMAT_NV24_10LE_709, // unsupported 60 |
| 741 | // NVBUF_COLOR_FORMAT_NV24_10LE_709_ER, // unsupported 61 |
| 742 | // NVBUF_COLOR_FORMAT_NV24_10LE_2020, // unsupported 62 |
| 743 | // NVBUF_COLOR_FORMAT_NV24_12LE_2020, // unsupported 63 |
| 744 | // NVBUF_COLOR_FORMAT_RGBA_10_10_10_2_709, // unsupported 64 |
| 745 | // NVBUF_COLOR_FORMAT_RGBA_10_10_10_2_2020, // unsupported 65 |
| 746 | // NVBUF_COLOR_FORMAT_BGRA_10_10_10_2_709, // unsupported 66 |
| 747 | // NVBUF_COLOR_FORMAT_BGRA_10_10_10_2_2020, // unsupported 67 |
| 748 | // NVBUF_COLOR_FORMAT_A32, // unsupported 68 |
| 749 | // NVBUF_COLOR_FORMAT_UYVP, // unsupported 69 |
| 750 | // NVBUF_COLOR_FORMAT_UYVP_ER // unsupported 70 |
| 751 | |
| 752 | // NVBUF_COLOR_FORMAT_ABGR, |
| 753 | // NVBUF_COLOR_FORMAT_ARGB, |