Austin Schuh | 6891344 | 2023-10-20 23:16:03 -0700 | [diff] [blame^] | 1 | #include <chrono> |
| 2 | #include <filesystem> |
| 3 | #include <thread> |
| 4 | |
| 5 | #include "glog/logging.h" |
| 6 | |
| 7 | #include "Argus/Argus.h" |
| 8 | #include "Argus/EGLStream.h" |
| 9 | #include "Argus/Types.h" |
| 10 | #include "Argus/utils/Error.h" |
| 11 | #include "EGLStream/FrameConsumer.h" |
| 12 | #include "EGLStream/Image.h" |
| 13 | #include "EGLStream/NV/ImageNativeBuffer.h" |
| 14 | #include "HalideBuffer.h" |
| 15 | #include "HalideRuntime.h" |
| 16 | #include "aos/events/shm_event_loop.h" |
| 17 | #include "aos/init.h" |
| 18 | #include "aos/time/time.h" |
| 19 | #include "aos/util/file.h" |
| 20 | #include "frc971/orin/ycbcr.h" |
| 21 | #include "frc971/orin/ycbcr422.h" |
| 22 | #include "frc971/vision/vision_generated.h" |
| 23 | #include "nvbufsurface.h" |
| 24 | |
| 25 | DEFINE_string(config, "aos_config.json", "Path to the config file to use."); |
| 26 | |
| 27 | DEFINE_int32(colorformat, NVBUF_COLOR_FORMAT_NV16, |
| 28 | "Mode to use. Don't change unless you know what you are doing."); |
| 29 | DEFINE_int32(camera, 0, "Camera number"); |
| 30 | DEFINE_int32(mode, 0, "Mode number to use."); |
| 31 | DEFINE_int32(exposure, 200000, "Exposure number to use."); |
| 32 | DEFINE_int32(gain, 5, "gain number to use."); |
| 33 | DEFINE_int32(width, 1456, "Image width"); |
| 34 | DEFINE_int32(height, 1088, "Image height"); |
| 35 | DEFINE_double(rgain, 1.0, "R gain"); |
| 36 | DEFINE_double(g1gain, 1.0, "G gain"); |
| 37 | DEFINE_double(g2gain, 1.0, "G gain"); |
| 38 | DEFINE_double(bgain, 1.0, "B gain"); |
| 39 | DEFINE_string(channel, "/camera", "Channel name for the image."); |
| 40 | |
| 41 | namespace frc971 { |
| 42 | |
| 43 | namespace chrono = std::chrono; |
| 44 | |
| 45 | // Converts a multiplanar 422 image into a single plane 422 image at the |
| 46 | // provided memory location sutable for putting in a flatbuffer. |
| 47 | void YCbCr422(NvBufSurface *nvbuf_surf, uint8_t *data_pointer) { |
| 48 | CHECK_EQ(nvbuf_surf->surfaceList->planeParams.width[0], |
| 49 | nvbuf_surf->surfaceList->planeParams.width[1] * 2); |
| 50 | CHECK_EQ(nvbuf_surf->surfaceList->planeParams.height[0], |
| 51 | nvbuf_surf->surfaceList->planeParams.height[1]); |
| 52 | CHECK_EQ(nvbuf_surf->surfaceList->planeParams.pitch[0], 0x600u); |
| 53 | CHECK_EQ(nvbuf_surf->surfaceList->planeParams.pitch[1], 0x600u); |
| 54 | std::array<halide_dimension_t, 2> y_dimensions{{ |
| 55 | { |
| 56 | /*.min =*/0, |
| 57 | /*.extent =*/ |
| 58 | static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.width[0]), |
| 59 | /*.stride =*/1, |
| 60 | /*.flags =*/0, |
| 61 | }, |
| 62 | { |
| 63 | /*.min = */ 0, |
| 64 | /*.extent =*/ |
| 65 | static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.height[0]), |
| 66 | /*.stride =*/ |
| 67 | static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.pitch[0]), |
| 68 | /*.flags =*/0, |
| 69 | }, |
| 70 | }}; |
| 71 | |
| 72 | Halide::Runtime::Buffer<uint8_t, 2> y( |
| 73 | reinterpret_cast<uint8_t *>(nvbuf_surf->surfaceList->mappedAddr.addr[0]), |
| 74 | y_dimensions.size(), y_dimensions.data()); |
| 75 | |
| 76 | std::array<halide_dimension_t, 3> cbcr_dimensions{ |
| 77 | {{ |
| 78 | /*.min =*/0, |
| 79 | /*.extent =*/ |
| 80 | static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.width[1]), |
| 81 | /*.stride =*/2, |
| 82 | /*.flags =*/0, |
| 83 | }, |
| 84 | { |
| 85 | /*.min =*/0, |
| 86 | /*.extent =*/ |
| 87 | static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.height[1]), |
| 88 | /*.stride =*/ |
| 89 | static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.pitch[1]), |
| 90 | /*.flags =*/0, |
| 91 | }, |
| 92 | { |
| 93 | /*.min =*/0, |
| 94 | /*.extent =*/2, |
| 95 | /*.stride =*/1, |
| 96 | /*.flags =*/0, |
| 97 | }}}; |
| 98 | |
| 99 | Halide::Runtime::Buffer<uint8_t, 3> cbcr( |
| 100 | reinterpret_cast<uint8_t *>(nvbuf_surf->surfaceList->mappedAddr.addr[1]), |
| 101 | cbcr_dimensions.size(), cbcr_dimensions.data()); |
| 102 | |
| 103 | std::array<halide_dimension_t, 3> output_dimensions{ |
| 104 | {{ |
| 105 | /*.min =*/0, |
| 106 | /*.extent =*/ |
| 107 | static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.width[0]), |
| 108 | /*.stride =*/2, |
| 109 | /*.flags =*/0, |
| 110 | }, |
| 111 | { |
| 112 | /*.min =*/0, |
| 113 | /*.extent =*/ |
| 114 | static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.height[0]), |
| 115 | /*.stride =*/ |
| 116 | static_cast<int32_t>(nvbuf_surf->surfaceList->planeParams.width[0] * |
| 117 | 2), |
| 118 | /*.flags =*/0, |
| 119 | }, |
| 120 | { |
| 121 | /*.min =*/0, |
| 122 | /*.extent =*/2, |
| 123 | /*.stride =*/1, |
| 124 | /*.flags =*/0, |
| 125 | }}}; |
| 126 | |
| 127 | Halide::Runtime::Buffer<uint8_t, 3> output( |
| 128 | data_pointer, output_dimensions.size(), output_dimensions.data()); |
| 129 | ycbcr422(y, cbcr, output); |
| 130 | } |
| 131 | |
| 132 | // Helper class to tie a NvBufSurface to an Argus::Buffer. |
| 133 | class DmaBuffer { |
| 134 | public: |
| 135 | // Creates a DmaBuffer. This is a static method so we can make sure it ends |
| 136 | // up as a unique_ptr so the pointer value doesn't change and break all the |
| 137 | // links. |
| 138 | static std::unique_ptr<DmaBuffer> Create( |
| 139 | const Argus::Size2D<uint32_t> &size, NvBufSurfaceColorFormat color_format, |
| 140 | NvBufSurfaceLayout layout = NVBUF_LAYOUT_PITCH) { |
| 141 | std::unique_ptr<DmaBuffer> buffer(new DmaBuffer()); |
| 142 | |
| 143 | NvBufSurfaceAllocateParams params; |
| 144 | |
| 145 | params.memtag = NvBufSurfaceTag_CAMERA; |
| 146 | params.params.width = size.width(); |
| 147 | params.params.height = size.height(); |
| 148 | params.params.colorFormat = color_format; |
| 149 | params.params.layout = layout; |
| 150 | params.params.isContiguous = true; |
| 151 | params.disablePitchPadding = true; |
| 152 | params.params.memType = NVBUF_MEM_SURFACE_ARRAY; |
| 153 | |
| 154 | NvBufSurface *nvbuf_surf = 0; |
| 155 | CHECK_EQ(NvBufSurfaceAllocate(&nvbuf_surf, 1, ¶ms), 0); |
| 156 | buffer->fd_ = nvbuf_surf->surfaceList[0].bufferDesc; |
| 157 | |
| 158 | return buffer; |
| 159 | } |
| 160 | |
| 161 | // Extracts the DmaBuffer from the Argus::Buffer. |
| 162 | static DmaBuffer *FromArgusBuffer(Argus::Buffer *buffer) { |
| 163 | Argus::IBuffer *i_buffer = Argus::interface_cast<Argus::IBuffer>(buffer); |
| 164 | const DmaBuffer *dmabuf = |
| 165 | static_cast<const DmaBuffer *>(i_buffer->getClientData()); |
| 166 | |
| 167 | return const_cast<DmaBuffer *>(dmabuf); |
| 168 | } |
| 169 | |
| 170 | // Returns the DMA buffer handle. |
| 171 | int fd() const { return fd_; } |
| 172 | |
| 173 | // Sets and gets the Argus::Buffer pointer. |
| 174 | void set_argus_buffer(Argus::Buffer *buffer) { buffer_ = buffer; } |
| 175 | Argus::Buffer *get_argus_buffer() const { return buffer_; } |
| 176 | |
| 177 | virtual ~DmaBuffer() { |
| 178 | if (fd_ >= 0) { |
| 179 | NvBufSurface *nvbuf_surf = 0; |
| 180 | NvBufSurfaceFromFd(fd_, (void **)(&nvbuf_surf)); |
| 181 | if (nvbuf_surf != NULL) { |
| 182 | NvBufSurfaceDestroy(nvbuf_surf); |
| 183 | } |
| 184 | } |
| 185 | } |
| 186 | |
| 187 | private: |
| 188 | // Private to force people to use Create() above. |
| 189 | DmaBuffer() {} |
| 190 | |
| 191 | int fd_ = -1; |
| 192 | Argus::Buffer *buffer_ = nullptr; |
| 193 | }; |
| 194 | |
| 195 | int Main() { |
| 196 | aos::FlatbufferDetachedBuffer<aos::Configuration> config = |
| 197 | aos::configuration::ReadConfig(FLAGS_config); |
| 198 | |
| 199 | aos::ShmEventLoop event_loop(&config.message()); |
| 200 | |
| 201 | event_loop.SetRuntimeRealtimePriority(55); |
| 202 | |
| 203 | aos::Sender<frc971::vision::CameraImage> sender = |
| 204 | event_loop.MakeSender<frc971::vision::CameraImage>(FLAGS_channel); |
| 205 | |
| 206 | LOG(INFO) << "Started"; |
| 207 | // Initialize the Argus camera provider. |
| 208 | Argus::UniqueObj<Argus::CameraProvider> camera_provider; |
| 209 | camera_provider = |
| 210 | Argus::UniqueObj<Argus::CameraProvider>(Argus::CameraProvider::create()); |
| 211 | |
| 212 | // Get the ICameraProvider interface from the global CameraProvider |
| 213 | Argus::ICameraProvider *i_camera_provider = |
| 214 | Argus::interface_cast<Argus::ICameraProvider>(camera_provider); |
| 215 | if (!i_camera_provider) { |
| 216 | ORIGINATE_ERROR("Failed to get ICameraProvider interface"); |
| 217 | } |
| 218 | |
| 219 | // Get the camera devices. |
| 220 | std::vector<Argus::CameraDevice *> camera_devices; |
| 221 | i_camera_provider->getCameraDevices(&camera_devices); |
| 222 | if (camera_devices.size() == 0) { |
| 223 | ORIGINATE_ERROR("there are %d cameras", (unsigned)camera_devices.size()); |
| 224 | } |
| 225 | |
| 226 | LOG(INFO) << "Found " << camera_devices.size() << " cameras"; |
| 227 | for (Argus::CameraDevice *camera : camera_devices) { |
| 228 | Argus::ICameraProperties *iCameraProperties = |
| 229 | Argus::interface_cast<Argus::ICameraProperties>(camera); |
| 230 | LOG(INFO) << "Camera " << iCameraProperties->getModelName(); |
| 231 | } |
| 232 | |
| 233 | std::vector<Argus::SensorMode *> sensor_modes; |
| 234 | Argus::ICameraProperties *iCameraProperties = |
| 235 | Argus::interface_cast<Argus::ICameraProperties>( |
| 236 | camera_devices[FLAGS_camera]); |
| 237 | if (!iCameraProperties) |
| 238 | ORIGINATE_ERROR("Failed to get ICameraProperties Interface"); |
| 239 | // Get available Sensor Modes |
| 240 | iCameraProperties->getAllSensorModes(&sensor_modes); |
| 241 | LOG(INFO) << "Found " << sensor_modes.size() << " modes"; |
| 242 | |
| 243 | for (Argus::SensorMode *mode : sensor_modes) { |
| 244 | Argus::ISensorMode *imode = Argus::interface_cast<Argus::ISensorMode>(mode); |
| 245 | LOG(INFO) << imode->getResolution().width() << " x " |
| 246 | << imode->getResolution().height(); |
| 247 | LOG(INFO) << "type " << imode->getSensorModeType().getName(); |
| 248 | LOG(INFO) << "exposure min " << imode->getExposureTimeRange().min(); |
| 249 | LOG(INFO) << "exposure max " << imode->getExposureTimeRange().max(); |
| 250 | } |
| 251 | if (sensor_modes.size() <= 0) { |
| 252 | ORIGINATE_ERROR("Preview Sensor Mode %d not available", 0); |
| 253 | } |
| 254 | |
| 255 | Argus::ISensorMode *i_sensor_mode = |
| 256 | Argus::interface_cast<Argus::ISensorMode>(sensor_modes[FLAGS_mode]); |
| 257 | if (!i_sensor_mode) { |
| 258 | ORIGINATE_ERROR("Failed to get SensorMode interface"); |
| 259 | } |
| 260 | |
| 261 | { |
| 262 | auto range = i_sensor_mode->getFrameDurationRange(); |
| 263 | LOG(INFO) << "Min: " << range.min() << ", " << range.max(); |
| 264 | LOG(INFO) << "type " << i_sensor_mode->getSensorModeType().getName(); |
| 265 | } |
| 266 | |
| 267 | // Create the capture session using the first device and get the core |
| 268 | // interface. |
| 269 | Argus::UniqueObj<Argus::CaptureSession> capture_session; |
| 270 | capture_session.reset( |
| 271 | i_camera_provider->createCaptureSession(camera_devices[FLAGS_camera])); |
| 272 | Argus::ICaptureSession *i_capture_session = |
| 273 | Argus::interface_cast<Argus::ICaptureSession>(capture_session); |
| 274 | if (!i_capture_session) { |
| 275 | ORIGINATE_ERROR("Failed to create CaptureSession"); |
| 276 | } |
| 277 | |
| 278 | EGLDisplay egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY); |
| 279 | CHECK_NE(egl_display, EGL_NO_DISPLAY) << ": Failed to open display"; |
| 280 | |
| 281 | // Create the OutputStream. |
| 282 | Argus::UniqueObj<Argus::OutputStreamSettings> stream_settings( |
| 283 | i_capture_session->createOutputStreamSettings(Argus::STREAM_TYPE_BUFFER)); |
| 284 | |
| 285 | Argus::IBufferOutputStreamSettings *i_buffer_output_stream_settings = |
| 286 | Argus::interface_cast<Argus::IBufferOutputStreamSettings>( |
| 287 | stream_settings); |
| 288 | CHECK(i_buffer_output_stream_settings != nullptr); |
| 289 | i_buffer_output_stream_settings->setBufferType(Argus::BUFFER_TYPE_EGL_IMAGE); |
| 290 | i_buffer_output_stream_settings->setMetadataEnable(true); |
| 291 | LOG(INFO) << "Type: " |
| 292 | << i_buffer_output_stream_settings->getBufferType().getName(); |
| 293 | |
| 294 | Argus::UniqueObj<Argus::OutputStream> output_stream( |
| 295 | i_capture_session->createOutputStream(stream_settings.get())); |
| 296 | LOG(INFO) << "Got sream"; |
| 297 | |
| 298 | Argus::IBufferOutputStream *i_buffer_output_stream = |
| 299 | Argus::interface_cast<Argus::IBufferOutputStream>(output_stream); |
| 300 | CHECK(i_buffer_output_stream != nullptr); |
| 301 | |
| 302 | // Build the DmaBuffers |
| 303 | std::array<std::unique_ptr<DmaBuffer>, 10> native_buffers; |
| 304 | for (size_t i = 0; i < native_buffers.size(); ++i) { |
| 305 | native_buffers[i] = DmaBuffer::Create( |
| 306 | i_sensor_mode->getResolution(), |
| 307 | static_cast<NvBufSurfaceColorFormat>(FLAGS_colorformat), |
| 308 | NVBUF_LAYOUT_PITCH); |
| 309 | } |
| 310 | |
| 311 | std::array<NvBufSurface *, 10> surf; |
| 312 | |
| 313 | // Create EGLImages from the native buffers |
| 314 | std::array<EGLImageKHR, 10> egl_images; |
| 315 | for (size_t i = 0; i < egl_images.size(); i++) { |
| 316 | int ret = 0; |
| 317 | |
| 318 | ret = NvBufSurfaceFromFd(native_buffers[i]->fd(), (void **)(&surf[i])); |
| 319 | CHECK(ret == 0) << ": NvBufSurfaceFromFd failed"; |
| 320 | |
| 321 | ret = NvBufSurfaceMapEglImage(surf[i], 0); |
| 322 | CHECK(ret == 0) << ": NvBufSurfaceMapEglImage failed"; |
| 323 | |
| 324 | egl_images[i] = surf[i]->surfaceList[0].mappedAddr.eglImage; |
| 325 | CHECK(egl_images[i] != EGL_NO_IMAGE_KHR) << ": Failed to create EGLImage"; |
| 326 | } |
| 327 | |
| 328 | // Create the BufferSettings object to configure Buffer creation. |
| 329 | Argus::UniqueObj<Argus::BufferSettings> buffer_settings( |
| 330 | i_buffer_output_stream->createBufferSettings()); |
| 331 | Argus::IEGLImageBufferSettings *i_buffer_settings = |
| 332 | Argus::interface_cast<Argus::IEGLImageBufferSettings>(buffer_settings); |
| 333 | if (!i_buffer_settings) ORIGINATE_ERROR("Failed to create BufferSettings"); |
| 334 | |
| 335 | // Create the Buffers for each EGLImage (and release to the stream for initial |
| 336 | // capture use) |
| 337 | std::array<Argus::UniqueObj<Argus::Buffer>, 10> buffers; |
| 338 | for (size_t i = 0; i < buffers.size(); i++) { |
| 339 | i_buffer_settings->setEGLImage(egl_images[i]); |
| 340 | i_buffer_settings->setEGLDisplay(egl_display); |
| 341 | buffers[i].reset( |
| 342 | i_buffer_output_stream->createBuffer(buffer_settings.get())); |
| 343 | Argus::IBuffer *i_buffer = |
| 344 | Argus::interface_cast<Argus::IBuffer>(buffers[i]); |
| 345 | |
| 346 | // Ties Argus::Buffer and DmaBuffer together. |
| 347 | i_buffer->setClientData(native_buffers[i].get()); |
| 348 | native_buffers[i]->set_argus_buffer(buffers[i].get()); |
| 349 | |
| 350 | CHECK(Argus::interface_cast<Argus::IEGLImageBuffer>(buffers[i]) != nullptr) |
| 351 | << ": Failed to create Buffer"; |
| 352 | |
| 353 | if (i_buffer_output_stream->releaseBuffer(buffers[i].get()) != |
| 354 | Argus::STATUS_OK) |
| 355 | ORIGINATE_ERROR("Failed to release Buffer for capture use"); |
| 356 | } |
| 357 | |
| 358 | Argus::UniqueObj<Argus::Request> request(i_capture_session->createRequest()); |
| 359 | Argus::IRequest *i_request = Argus::interface_cast<Argus::IRequest>(request); |
| 360 | CHECK(i_request); |
| 361 | |
| 362 | Argus::IAutoControlSettings *i_auto_control_settings = |
| 363 | Argus::interface_cast<Argus::IAutoControlSettings>( |
| 364 | i_request->getAutoControlSettings()); |
| 365 | CHECK(i_auto_control_settings != nullptr); |
| 366 | i_auto_control_settings->setAwbMode(Argus::AWB_MODE_OFF); |
| 367 | |
| 368 | i_auto_control_settings->setAeLock(false); |
| 369 | Argus::Range<float> isp_digital_gain_range; |
| 370 | isp_digital_gain_range.min() = 1; |
| 371 | isp_digital_gain_range.max() = 1; |
| 372 | i_auto_control_settings->setIspDigitalGainRange(isp_digital_gain_range); |
| 373 | |
| 374 | Argus::IEdgeEnhanceSettings *i_ee_settings = |
| 375 | Argus::interface_cast<Argus::IEdgeEnhanceSettings>(request); |
| 376 | CHECK(i_ee_settings != nullptr); |
| 377 | |
| 378 | i_ee_settings->setEdgeEnhanceStrength(0); |
| 379 | |
| 380 | i_request->enableOutputStream(output_stream.get()); |
| 381 | |
| 382 | Argus::ISourceSettings *i_source_settings = |
| 383 | Argus::interface_cast<Argus::ISourceSettings>( |
| 384 | i_request->getSourceSettings()); |
| 385 | CHECK(i_source_settings != nullptr); |
| 386 | |
| 387 | i_source_settings->setFrameDurationRange( |
| 388 | i_sensor_mode->getFrameDurationRange().min()); |
| 389 | i_source_settings->setSensorMode(sensor_modes[FLAGS_mode]); |
| 390 | |
| 391 | Argus::Range<float> sensor_mode_analog_gain_range; |
| 392 | sensor_mode_analog_gain_range.min() = FLAGS_gain; |
| 393 | sensor_mode_analog_gain_range.max() = FLAGS_gain; |
| 394 | i_source_settings->setGainRange(sensor_mode_analog_gain_range); |
| 395 | |
| 396 | Argus::Range<uint64_t> limit_exposure_time_range; |
| 397 | limit_exposure_time_range.min() = FLAGS_exposure; |
| 398 | limit_exposure_time_range.max() = FLAGS_exposure; |
| 399 | i_source_settings->setExposureTimeRange(limit_exposure_time_range); |
| 400 | |
| 401 | if (i_capture_session->repeat(request.get()) != Argus::STATUS_OK) { |
| 402 | LOG(ERROR) << "Failed to submit repeat"; |
| 403 | } |
| 404 | |
| 405 | LOG(INFO) << "Session submitted"; |
| 406 | |
| 407 | // Run. |
| 408 | // |
| 409 | // TODO(austin): Use the event loop a bit better... That'll let us set |
| 410 | // priority + get stats. Timer which always repeats "now" ? |
| 411 | aos::monotonic_clock::time_point last_time = aos::monotonic_clock::epoch(); |
| 412 | while (true) { |
| 413 | VLOG(1) << "Going for frame"; |
| 414 | Argus::Status status; |
| 415 | |
| 416 | Argus::Buffer *buffer = i_buffer_output_stream->acquireBuffer( |
| 417 | std::chrono::nanoseconds(std::chrono::seconds(5)).count(), &status); |
| 418 | |
| 419 | if (status == Argus::STATUS_END_OF_STREAM) { |
| 420 | break; |
| 421 | } |
| 422 | |
| 423 | const aos::monotonic_clock::time_point now = aos::monotonic_clock::now(); |
| 424 | |
| 425 | DmaBuffer *dmabuf = DmaBuffer::FromArgusBuffer(buffer); |
| 426 | int dmabuf_fd = dmabuf->fd(); |
| 427 | |
| 428 | Argus::IBuffer *ibuffer = Argus::interface_cast<Argus::IBuffer>(buffer); |
| 429 | CHECK(ibuffer != nullptr); |
| 430 | |
| 431 | const Argus::CaptureMetadata *metadata = ibuffer->getMetadata(); |
| 432 | const Argus::ICaptureMetadata *imetadata = |
| 433 | Argus::interface_cast<const Argus::ICaptureMetadata>(metadata); |
| 434 | |
| 435 | NvBufSurface *nvbuf_surf = 0; |
| 436 | CHECK_EQ(NvBufSurfaceFromFd(dmabuf_fd, (void **)(&nvbuf_surf)), 0); |
| 437 | |
| 438 | CHECK_EQ(NvBufSurfaceMap(nvbuf_surf, -1, -1, NVBUF_MAP_READ), 0); |
| 439 | VLOG(1) << "Mapped"; |
| 440 | NvBufSurfaceSyncForCpu(nvbuf_surf, -1, -1); |
| 441 | |
| 442 | VLOG(1) << "Planes " << nvbuf_surf->surfaceList->planeParams.num_planes |
| 443 | << " colorFormat " << nvbuf_surf->surfaceList->colorFormat; |
| 444 | for (size_t i = 0; i < nvbuf_surf->surfaceList->planeParams.num_planes; |
| 445 | ++i) { |
| 446 | VLOG(1) << "Address " |
| 447 | << static_cast<void *>( |
| 448 | nvbuf_surf->surfaceList->mappedAddr.addr[i]) |
| 449 | << ", pitch " << nvbuf_surf->surfaceList->planeParams.pitch[i] |
| 450 | << " height " << nvbuf_surf->surfaceList->planeParams.height[i] |
| 451 | << " width " << nvbuf_surf->surfaceList->planeParams.width[i] |
| 452 | << " bytes per pixel " |
| 453 | << nvbuf_surf->surfaceList->planeParams.bytesPerPix[i]; |
| 454 | } |
| 455 | CHECK_EQ(nvbuf_surf->surfaceList->planeParams.width[0], |
| 456 | static_cast<size_t>(FLAGS_width)); |
| 457 | CHECK_EQ(nvbuf_surf->surfaceList->planeParams.height[0], |
| 458 | static_cast<size_t>(FLAGS_height)); |
| 459 | |
| 460 | aos::Sender<frc971::vision::CameraImage>::Builder builder = |
| 461 | sender.MakeBuilder(); |
| 462 | |
| 463 | uint8_t *data_pointer = nullptr; |
| 464 | builder.fbb()->StartIndeterminateVector(FLAGS_width * FLAGS_height * 2, 1, |
| 465 | 64, &data_pointer); |
| 466 | |
| 467 | YCbCr422(nvbuf_surf, data_pointer); |
| 468 | flatbuffers::Offset<flatbuffers::Vector<uint8_t>> data_offset = |
| 469 | builder.fbb()->EndIndeterminateVector(FLAGS_width * FLAGS_height * 2, |
| 470 | 1); |
| 471 | |
| 472 | auto image_builder = builder.MakeBuilder<frc971::vision::CameraImage>(); |
| 473 | image_builder.add_data(data_offset); |
| 474 | image_builder.add_rows(FLAGS_height); |
| 475 | image_builder.add_cols(FLAGS_width); |
| 476 | image_builder.add_monotonic_timestamp_ns(imetadata->getFrameReadoutTime()); |
| 477 | builder.CheckOk(builder.Send(image_builder.Finish())); |
| 478 | |
| 479 | const aos::monotonic_clock::time_point after_send = |
| 480 | aos::monotonic_clock::now(); |
| 481 | |
| 482 | CHECK_EQ(NvBufSurfaceUnMap(nvbuf_surf, -1, -1), 0); |
| 483 | |
| 484 | CHECK(imetadata); |
| 485 | VLOG(1) << "Got " << imetadata->getCaptureId() << " delay " |
| 486 | << chrono::duration<double>( |
| 487 | chrono::nanoseconds((now.time_since_epoch().count() - |
| 488 | (imetadata->getSensorTimestamp() + |
| 489 | imetadata->getFrameReadoutTime())))) |
| 490 | .count() |
| 491 | << " mmap " << chrono::duration<double>(after_send - now).count() |
| 492 | << "sec dt " << chrono::duration<double>(now - last_time).count() |
| 493 | << "sec " << dmabuf << " exposure " |
| 494 | << imetadata->getSensorExposureTime(); |
| 495 | i_buffer_output_stream->releaseBuffer(buffer); |
| 496 | |
| 497 | last_time = now; |
| 498 | } |
| 499 | |
| 500 | i_capture_session->stopRepeat(); |
| 501 | i_buffer_output_stream->endOfStream(); |
| 502 | i_capture_session->waitForIdle(); |
| 503 | |
| 504 | output_stream.reset(); |
| 505 | |
| 506 | for (uint32_t i = 0; i < surf.size(); i++) { |
| 507 | NvBufSurfaceUnMapEglImage(surf[i], 0); |
| 508 | } |
| 509 | |
| 510 | eglTerminate(egl_display); |
| 511 | return 0; |
| 512 | } |
| 513 | |
| 514 | }; // namespace frc971 |
| 515 | |
| 516 | int main(int argc, char **argv) { |
| 517 | aos::InitGoogle(&argc, &argv); |
| 518 | return frc971::Main(); |
| 519 | } |
| 520 | |
| 521 | // I tried every different format option. Here's what worked and didn't work. |
| 522 | // |
| 523 | // NVBUF_COLOR_FORMAT_RGB, |
| 524 | |
| 525 | // NVBUF_COLOR_FORMAT_YUYV, // Failed |
| 526 | // NVBUF_COLOR_FORMAT_NV24, // Works |
| 527 | // NVBUF_COLOR_FORMAT_UYVY, // Failed |
| 528 | // NVBUF_COLOR_FORMAT_YUV420, // Failed with error. |
| 529 | |
| 530 | // NVBUF_COLOR_FORMAT_GRAY8, // unsupported |
| 531 | // NVBUF_COLOR_FORMAT_YUV420, // unsupported |
| 532 | // NVBUF_COLOR_FORMAT_YVU420, // unsupported |
| 533 | |
| 534 | // NVBUF_COLOR_FORMAT_YUV420_ER, // unsupported |
| 535 | // NVBUF_COLOR_FORMAT_YVU420_ER, // unsupported |
| 536 | // |
| 537 | ///** Specifies BT.601 colorspace - Y/CbCr 4:2:0 multi-planar. */ |
| 538 | // NVBUF_COLOR_FORMAT_NV12, // Works! pitch 2048 height 1080 width |
| 539 | // 1920 colorFormat 6 planes 2 bytes per pixel 1 delay 0.00203304 |
| 540 | // mmap 0.000340288sec dt 0.0166379sec |
| 541 | // |
| 542 | ///** Specifies BT.601 colorspace - Y/CbCr ER 4:2:0 multi-planar. */ |
| 543 | // NVBUF_COLOR_FORMAT_NV12_ER, // Works! pitch 2048 height 1080 |
| 544 | // width 1920 colorFormat 7 planes 2 bytes per pixel 1 |
| 545 | ///** Specifies BT.601 colorspace - Y/CbCr 4:2:0 multi-planar. */ |
| 546 | // NVBUF_COLOR_FORMAT_NV21, // Works! pitch 2048 height 1080 width |
| 547 | // 1920 colorFormat 8 planes 2 bytes per pixel 1 |
| 548 | ///** Specifies BT.601 colorspace - Y/CbCr ER 4:2:0 multi-planar. */ |
| 549 | // NVBUF_COLOR_FORMAT_NV21_ER, // Works! pitch 2048 height 1080 |
| 550 | // width 1920 colorFormat 9 planes 2 bytes per pixel 1 |
| 551 | // |
| 552 | // |
| 553 | // NVBUF_COLOR_FORMAT_UYVY, // works with an error?!? |
| 554 | // NVBUF_COLOR_FORMAT_UYVY_ER, // unsupported 11 |
| 555 | // NVBUF_COLOR_FORMAT_VYUY, // unsupported 12 |
| 556 | // NVBUF_COLOR_FORMAT_VYUY_ER, // unsupported 13 |
| 557 | // NVBUF_COLOR_FORMAT_YUYV, // unsupported 14 |
| 558 | // NVBUF_COLOR_FORMAT_YUYV_ER, // unsupported 15 |
| 559 | // NVBUF_COLOR_FORMAT_YVYU, // unsupported 16 |
| 560 | // NVBUF_COLOR_FORMAT_YVYU_ER, // unsupported 17 |
| 561 | // NVBUF_COLOR_FORMAT_YUV444, // unsupported 18 |
| 562 | // NVBUF_COLOR_FORMAT_RGBA, // unsupported 19 |
| 563 | // NVBUF_COLOR_FORMAT_BGRA, // unsupported 20 |
| 564 | // NVBUF_COLOR_FORMAT_ARGB, // unsupported 21 |
| 565 | // NVBUF_COLOR_FORMAT_ABGR, // unsupported 22 |
| 566 | // NVBUF_COLOR_FORMAT_RGBx, // unsupported 23 |
| 567 | // NVBUF_COLOR_FORMAT_BGRx, // unsupported 24 |
| 568 | // NVBUF_COLOR_FORMAT_xRGB, // unsupported 25 |
| 569 | // NVBUF_COLOR_FORMAT_xBGR, // unsupported 26 |
| 570 | // NVBUF_COLOR_FORMAT_RGB, // unsupported 27 |
| 571 | // NVBUF_COLOR_FORMAT_BGR, // unsupported 28 |
| 572 | // NVBUF_COLOR_FORMAT_NV12_10LE, // unsupported 29 |
| 573 | // NVBUF_COLOR_FORMAT_NV12_12LE, // unsupported 30 |
| 574 | // NVBUF_COLOR_FORMAT_YUV420_709, // unsupported 31 |
| 575 | // NVBUF_COLOR_FORMAT_YUV420_709_ER, // unsupported 32 |
| 576 | // NVBUF_COLOR_FORMAT_NV12_709, // works pitch 2048 height 1080 |
| 577 | // width 1920 colorFormat 33 planes 2 bytes per pixel 1 |
| 578 | // NVBUF_COLOR_FORMAT_NV12_709_ER, // works pitch 2048 height 1080 |
| 579 | // width 1920 colorFormat 34 planes 2 bytes per pixel 1 |
| 580 | // NVBUF_COLOR_FORMAT_YUV420_2020, // unsupported 35 |
| 581 | // NVBUF_COLOR_FORMAT_NV12_2020, // unsupported 36 |
| 582 | // NVBUF_COLOR_FORMAT_NV12_10LE_ER, // unsupported 37 |
| 583 | // NVBUF_COLOR_FORMAT_NV12_10LE_709, // unsupported 38 |
| 584 | // NVBUF_COLOR_FORMAT_NV12_10LE_709_ER, // unsupported 39 |
| 585 | // NVBUF_COLOR_FORMAT_NV12_10LE_2020, // unsupported 40 |
| 586 | // NVBUF_COLOR_FORMAT_SIGNED_R16G16, // unsupported 41 |
| 587 | // NVBUF_COLOR_FORMAT_R8_G8_B8, // unsupported 42 |
| 588 | // NVBUF_COLOR_FORMAT_B8_G8_R8, // unsupported 43 |
| 589 | // NVBUF_COLOR_FORMAT_R32F_G32F_B32F, // unsupported 44 |
| 590 | // NVBUF_COLOR_FORMAT_B32F_G32F_R32F, // unsupported 45 |
| 591 | // NVBUF_COLOR_FORMAT_YUV422, // unsupported 46 |
| 592 | // NVBUF_COLOR_FORMAT_NV21_10LE, // unsupported 47 |
| 593 | // NVBUF_COLOR_FORMAT_NV21_12LE, // unsupported 48 |
| 594 | // NVBUF_COLOR_FORMAT_NV12_12LE_2020, // unsupported 49 |
| 595 | ///** Specifies BT.601 colorspace - Y/CbCr 4:2:2 multi-planar. */ |
| 596 | // NVBUF_COLOR_FORMAT_NV16, // works pitch 2048 height 1080 width |
| 597 | // 1920 colorFormat 50 planes 2 bytes per pixel 1 |
| 598 | // NVBUF_COLOR_FORMAT_NV16_10LE, // unsupported 51 |
| 599 | ///** Specifies BT.601 colorspace - Y/CbCr 4:4:4 multi-planar. */ |
| 600 | // NVBUF_COLOR_FORMAT_NV24, // works pitch 2048 height 1080 |
| 601 | // width 1920 colorFormat 52 planes 2 bytes per pixel 1 |
| 602 | // NVBUF_COLOR_FORMAT_NV24_10LE, // unsupported 53 |
| 603 | // |
| 604 | // NVBUF_COLOR_FORMAT_NV16_ER, // works pitch 2048 height 1080 |
| 605 | // width 1920 colorFormat 54 planes 2 bytes per pixel 1 |
| 606 | // NVBUF_COLOR_FORMAT_NV24_ER, // works pitch 2048 height 1080 |
| 607 | // width 1920 colorFormat 55 planes 2 bytes per pixel 1 |
| 608 | // NVBUF_COLOR_FORMAT_NV16_709, // unsupported 56 |
| 609 | // NVBUF_COLOR_FORMAT_NV24_709, // unsupported 57 |
| 610 | // NVBUF_COLOR_FORMAT_NV16_709_ER, // unsupported 58 |
| 611 | // NVBUF_COLOR_FORMAT_NV24_709_ER, // unsupported 59 |
| 612 | // NVBUF_COLOR_FORMAT_NV24_10LE_709, // unsupported 60 |
| 613 | // NVBUF_COLOR_FORMAT_NV24_10LE_709_ER, // unsupported 61 |
| 614 | // NVBUF_COLOR_FORMAT_NV24_10LE_2020, // unsupported 62 |
| 615 | // NVBUF_COLOR_FORMAT_NV24_12LE_2020, // unsupported 63 |
| 616 | // NVBUF_COLOR_FORMAT_RGBA_10_10_10_2_709, // unsupported 64 |
| 617 | // NVBUF_COLOR_FORMAT_RGBA_10_10_10_2_2020, // unsupported 65 |
| 618 | // NVBUF_COLOR_FORMAT_BGRA_10_10_10_2_709, // unsupported 66 |
| 619 | // NVBUF_COLOR_FORMAT_BGRA_10_10_10_2_2020, // unsupported 67 |
| 620 | // NVBUF_COLOR_FORMAT_A32, // unsupported 68 |
| 621 | // NVBUF_COLOR_FORMAT_UYVP, // unsupported 69 |
| 622 | // NVBUF_COLOR_FORMAT_UYVP_ER // unsupported 70 |
| 623 | |
| 624 | // NVBUF_COLOR_FORMAT_ABGR, |
| 625 | // NVBUF_COLOR_FORMAT_ARGB, |