Brian Silverman | f7f267a | 2017-02-04 16:16:08 -0800 | [diff] [blame^] | 1 | /*----------------------------------------------------------------------------*/ |
| 2 | /* Copyright (c) FIRST 2016-2017. All Rights Reserved. */ |
| 3 | /* Open Source Software - may be modified and shared by FRC teams. The code */ |
| 4 | /* must be accompanied by the FIRST BSD license file in the root directory of */ |
| 5 | /* the project. */ |
| 6 | /*----------------------------------------------------------------------------*/ |
| 7 | |
| 8 | #include "CameraServer.h" |
| 9 | |
| 10 | #include "Utility.h" |
| 11 | #include "WPIErrors.h" |
| 12 | #include "llvm/SmallString.h" |
| 13 | #include "llvm/raw_ostream.h" |
| 14 | #include "ntcore_cpp.h" |
| 15 | |
| 16 | using namespace frc; |
| 17 | |
| 18 | CameraServer* CameraServer::GetInstance() { |
| 19 | static CameraServer instance; |
| 20 | return &instance; |
| 21 | } |
| 22 | |
| 23 | static llvm::StringRef MakeSourceValue(CS_Source source, |
| 24 | llvm::SmallVectorImpl<char>& buf) { |
| 25 | CS_Status status = 0; |
| 26 | buf.clear(); |
| 27 | switch (cs::GetSourceKind(source, &status)) { |
| 28 | case cs::VideoSource::kUsb: { |
| 29 | llvm::StringRef prefix{"usb:"}; |
| 30 | buf.append(prefix.begin(), prefix.end()); |
| 31 | auto path = cs::GetUsbCameraPath(source, &status); |
| 32 | buf.append(path.begin(), path.end()); |
| 33 | break; |
| 34 | } |
| 35 | case cs::VideoSource::kHttp: { |
| 36 | llvm::StringRef prefix{"ip:"}; |
| 37 | buf.append(prefix.begin(), prefix.end()); |
| 38 | auto urls = cs::GetHttpCameraUrls(source, &status); |
| 39 | if (!urls.empty()) buf.append(urls[0].begin(), urls[0].end()); |
| 40 | break; |
| 41 | } |
| 42 | case cs::VideoSource::kCv: |
| 43 | // FIXME: Should be "cv:", but LabVIEW dashboard requires "usb:". |
| 44 | // https://github.com/wpilibsuite/allwpilib/issues/407 |
| 45 | return "usb:"; |
| 46 | default: |
| 47 | return "unknown:"; |
| 48 | } |
| 49 | |
| 50 | return llvm::StringRef{buf.begin(), buf.size()}; |
| 51 | } |
| 52 | |
| 53 | static std::string MakeStreamValue(llvm::StringRef address, int port) { |
| 54 | std::string rv; |
| 55 | llvm::raw_string_ostream stream(rv); |
| 56 | stream << "mjpg:http://" << address << ':' << port << "/?action=stream"; |
| 57 | stream.flush(); |
| 58 | return rv; |
| 59 | } |
| 60 | |
| 61 | std::shared_ptr<ITable> CameraServer::GetSourceTable(CS_Source source) { |
| 62 | std::lock_guard<std::mutex> lock(m_mutex); |
| 63 | return m_tables.lookup(source); |
| 64 | } |
| 65 | |
| 66 | std::vector<std::string> CameraServer::GetSinkStreamValues(CS_Sink sink) { |
| 67 | CS_Status status = 0; |
| 68 | |
| 69 | // Ignore all but MjpegServer |
| 70 | if (cs::GetSinkKind(sink, &status) != CS_SINK_MJPEG) |
| 71 | return std::vector<std::string>{}; |
| 72 | |
| 73 | // Get port |
| 74 | int port = cs::GetMjpegServerPort(sink, &status); |
| 75 | |
| 76 | // Generate values |
| 77 | std::vector<std::string> values; |
| 78 | auto listenAddress = cs::GetMjpegServerListenAddress(sink, &status); |
| 79 | if (!listenAddress.empty()) { |
| 80 | // If a listen address is specified, only use that |
| 81 | values.emplace_back(MakeStreamValue(listenAddress, port)); |
| 82 | } else { |
| 83 | // Otherwise generate for hostname and all interface addresses |
| 84 | values.emplace_back(MakeStreamValue(cs::GetHostname() + ".local", port)); |
| 85 | |
| 86 | for (const auto& addr : m_addresses) { |
| 87 | if (addr == "127.0.0.1") continue; // ignore localhost |
| 88 | values.emplace_back(MakeStreamValue(addr, port)); |
| 89 | } |
| 90 | } |
| 91 | |
| 92 | return values; |
| 93 | } |
| 94 | |
| 95 | std::vector<std::string> CameraServer::GetSourceStreamValues(CS_Source source) { |
| 96 | CS_Status status = 0; |
| 97 | |
| 98 | // Ignore all but HttpCamera |
| 99 | if (cs::GetSourceKind(source, &status) != CS_SOURCE_HTTP) |
| 100 | return std::vector<std::string>{}; |
| 101 | |
| 102 | // Generate values |
| 103 | auto values = cs::GetHttpCameraUrls(source, &status); |
| 104 | for (auto& value : values) value = "mjpg:" + value; |
| 105 | |
| 106 | // Look to see if we have a passthrough server for this source |
| 107 | for (const auto& i : m_sinks) { |
| 108 | CS_Sink sink = i.second.GetHandle(); |
| 109 | CS_Source sinkSource = cs::GetSinkSource(sink, &status); |
| 110 | if (source == sinkSource && |
| 111 | cs::GetSinkKind(sink, &status) == CS_SINK_MJPEG) { |
| 112 | // Add USB-only passthrough |
| 113 | int port = cs::GetMjpegServerPort(sink, &status); |
| 114 | values.emplace_back(MakeStreamValue("172.22.11.2", port)); |
| 115 | break; |
| 116 | } |
| 117 | } |
| 118 | |
| 119 | // Set table value |
| 120 | return values; |
| 121 | } |
| 122 | |
| 123 | void CameraServer::UpdateStreamValues() { |
| 124 | std::lock_guard<std::mutex> lock(m_mutex); |
| 125 | // Over all the sinks... |
| 126 | for (const auto& i : m_sinks) { |
| 127 | CS_Status status = 0; |
| 128 | CS_Sink sink = i.second.GetHandle(); |
| 129 | |
| 130 | // Get the source's subtable (if none exists, we're done) |
| 131 | CS_Source source = cs::GetSinkSource(sink, &status); |
| 132 | if (source == 0) continue; |
| 133 | auto table = m_tables.lookup(source); |
| 134 | if (table) { |
| 135 | // Don't set stream values if this is a HttpCamera passthrough |
| 136 | if (cs::GetSourceKind(source, &status) == CS_SOURCE_HTTP) continue; |
| 137 | |
| 138 | // Set table value |
| 139 | auto values = GetSinkStreamValues(sink); |
| 140 | if (!values.empty()) table->PutStringArray("streams", values); |
| 141 | } |
| 142 | } |
| 143 | |
| 144 | // Over all the sources... |
| 145 | for (const auto& i : m_sources) { |
| 146 | CS_Source source = i.second.GetHandle(); |
| 147 | |
| 148 | // Get the source's subtable (if none exists, we're done) |
| 149 | auto table = m_tables.lookup(source); |
| 150 | if (table) { |
| 151 | // Set table value |
| 152 | auto values = GetSourceStreamValues(source); |
| 153 | if (!values.empty()) table->PutStringArray("streams", values); |
| 154 | } |
| 155 | } |
| 156 | } |
| 157 | |
| 158 | static std::string PixelFormatToString(int pixelFormat) { |
| 159 | switch (pixelFormat) { |
| 160 | case cs::VideoMode::PixelFormat::kMJPEG: |
| 161 | return "MJPEG"; |
| 162 | case cs::VideoMode::PixelFormat::kYUYV: |
| 163 | return "YUYV"; |
| 164 | case cs::VideoMode::PixelFormat::kRGB565: |
| 165 | return "RGB565"; |
| 166 | case cs::VideoMode::PixelFormat::kBGR: |
| 167 | return "BGR"; |
| 168 | case cs::VideoMode::PixelFormat::kGray: |
| 169 | return "Gray"; |
| 170 | default: |
| 171 | return "Unknown"; |
| 172 | } |
| 173 | } |
| 174 | #if 0 |
| 175 | static cs::VideoMode::PixelFormat PixelFormatFromString(llvm::StringRef str) { |
| 176 | if (str == "MJPEG" || str == "mjpeg" || str == "JPEG" || str == "jpeg") |
| 177 | return cs::VideoMode::PixelFormat::kMJPEG; |
| 178 | if (str == "YUYV" || str == "yuyv") return cs::VideoMode::PixelFormat::kYUYV; |
| 179 | if (str == "RGB565" || str == "rgb565") |
| 180 | return cs::VideoMode::PixelFormat::kRGB565; |
| 181 | if (str == "BGR" || str == "bgr") return cs::VideoMode::PixelFormat::kBGR; |
| 182 | if (str == "GRAY" || str == "Gray" || str == "gray") |
| 183 | return cs::VideoMode::PixelFormat::kGray; |
| 184 | return cs::VideoMode::PixelFormat::kUnknown; |
| 185 | } |
| 186 | |
| 187 | static cs::VideoMode VideoModeFromString(llvm::StringRef modeStr) { |
| 188 | cs::VideoMode mode; |
| 189 | size_t pos; |
| 190 | |
| 191 | // width: [0-9]+ |
| 192 | pos = modeStr.find_first_not_of("0123456789"); |
| 193 | llvm::StringRef widthStr = modeStr.slice(0, pos); |
| 194 | modeStr = modeStr.drop_front(pos).ltrim(); // drop whitespace too |
| 195 | |
| 196 | // 'x' |
| 197 | if (modeStr.empty() || modeStr[0] != 'x') return mode; |
| 198 | modeStr = modeStr.drop_front(1).ltrim(); // drop whitespace too |
| 199 | |
| 200 | // height: [0-9]+ |
| 201 | pos = modeStr.find_first_not_of("0123456789"); |
| 202 | llvm::StringRef heightStr = modeStr.slice(0, pos); |
| 203 | modeStr = modeStr.drop_front(pos).ltrim(); // drop whitespace too |
| 204 | |
| 205 | // format: all characters until whitespace |
| 206 | pos = modeStr.find_first_of(" \t\n\v\f\r"); |
| 207 | llvm::StringRef formatStr = modeStr.slice(0, pos); |
| 208 | modeStr = modeStr.drop_front(pos).ltrim(); // drop whitespace too |
| 209 | |
| 210 | // fps: [0-9.]+ |
| 211 | pos = modeStr.find_first_not_of("0123456789."); |
| 212 | llvm::StringRef fpsStr = modeStr.slice(0, pos); |
| 213 | modeStr = modeStr.drop_front(pos).ltrim(); // drop whitespace too |
| 214 | |
| 215 | // "fps" |
| 216 | if (!modeStr.startswith("fps")) return mode; |
| 217 | |
| 218 | // make fps an integer string by dropping after the decimal |
| 219 | fpsStr = fpsStr.slice(0, fpsStr.find('.')); |
| 220 | |
| 221 | // convert width, height, and fps to integers |
| 222 | if (widthStr.getAsInteger(10, mode.width)) return mode; |
| 223 | if (heightStr.getAsInteger(10, mode.height)) return mode; |
| 224 | if (fpsStr.getAsInteger(10, mode.fps)) return mode; |
| 225 | |
| 226 | // convert format to enum value |
| 227 | mode.pixelFormat = PixelFormatFromString(formatStr); |
| 228 | |
| 229 | return mode; |
| 230 | } |
| 231 | #endif |
| 232 | static std::string VideoModeToString(const cs::VideoMode& mode) { |
| 233 | std::string rv; |
| 234 | llvm::raw_string_ostream oss{rv}; |
| 235 | oss << mode.width << "x" << mode.height; |
| 236 | oss << " " << PixelFormatToString(mode.pixelFormat) << " "; |
| 237 | oss << mode.fps << " fps"; |
| 238 | return oss.str(); |
| 239 | } |
| 240 | |
| 241 | static std::vector<std::string> GetSourceModeValues(int source) { |
| 242 | std::vector<std::string> rv; |
| 243 | CS_Status status = 0; |
| 244 | for (const auto& mode : cs::EnumerateSourceVideoModes(source, &status)) |
| 245 | rv.emplace_back(VideoModeToString(mode)); |
| 246 | return rv; |
| 247 | } |
| 248 | |
| 249 | static inline llvm::StringRef Concatenate(llvm::StringRef lhs, |
| 250 | llvm::StringRef rhs, |
| 251 | llvm::SmallVectorImpl<char>& buf) { |
| 252 | buf.clear(); |
| 253 | llvm::raw_svector_ostream oss{buf}; |
| 254 | oss << lhs << rhs; |
| 255 | return oss.str(); |
| 256 | } |
| 257 | |
| 258 | static void PutSourcePropertyValue(ITable* table, const cs::VideoEvent& event, |
| 259 | bool isNew) { |
| 260 | llvm::SmallString<64> name; |
| 261 | llvm::SmallString<64> infoName; |
| 262 | if (llvm::StringRef{event.name}.startswith("raw_")) { |
| 263 | name = "RawProperty/"; |
| 264 | name += event.name; |
| 265 | infoName = "RawPropertyInfo/"; |
| 266 | infoName += event.name; |
| 267 | } else { |
| 268 | name = "Property/"; |
| 269 | name += event.name; |
| 270 | infoName = "PropertyInfo/"; |
| 271 | infoName += event.name; |
| 272 | } |
| 273 | |
| 274 | llvm::SmallString<64> buf; |
| 275 | CS_Status status = 0; |
| 276 | switch (event.propertyKind) { |
| 277 | case cs::VideoProperty::kBoolean: |
| 278 | if (isNew) |
| 279 | table->SetDefaultBoolean(name, event.value != 0); |
| 280 | else |
| 281 | table->PutBoolean(name, event.value != 0); |
| 282 | break; |
| 283 | case cs::VideoProperty::kInteger: |
| 284 | case cs::VideoProperty::kEnum: |
| 285 | if (isNew) { |
| 286 | table->SetDefaultNumber(name, event.value); |
| 287 | table->PutNumber(Concatenate(infoName, "/min", buf), |
| 288 | cs::GetPropertyMin(event.propertyHandle, &status)); |
| 289 | table->PutNumber(Concatenate(infoName, "/max", buf), |
| 290 | cs::GetPropertyMax(event.propertyHandle, &status)); |
| 291 | table->PutNumber(Concatenate(infoName, "/step", buf), |
| 292 | cs::GetPropertyStep(event.propertyHandle, &status)); |
| 293 | table->PutNumber(Concatenate(infoName, "/default", buf), |
| 294 | cs::GetPropertyDefault(event.propertyHandle, &status)); |
| 295 | } else { |
| 296 | table->PutNumber(name, event.value); |
| 297 | } |
| 298 | break; |
| 299 | case cs::VideoProperty::kString: |
| 300 | if (isNew) |
| 301 | table->SetDefaultString(name, event.valueStr); |
| 302 | else |
| 303 | table->PutString(name, event.valueStr); |
| 304 | break; |
| 305 | default: |
| 306 | break; |
| 307 | } |
| 308 | } |
| 309 | |
| 310 | CameraServer::CameraServer() |
| 311 | : m_publishTable{NetworkTable::GetTable(kPublishName)}, |
| 312 | m_nextPort(kBasePort) { |
| 313 | // We publish sources to NetworkTables using the following structure: |
| 314 | // "/CameraPublisher/{Source.Name}/" - root |
| 315 | // - "source" (string): Descriptive, prefixed with type (e.g. "usb:0") |
| 316 | // - "streams" (string array): URLs that can be used to stream data |
| 317 | // - "description" (string): Description of the source |
| 318 | // - "connected" (boolean): Whether source is connected |
| 319 | // - "mode" (string): Current video mode |
| 320 | // - "modes" (string array): Available video modes |
| 321 | // - "Property/{Property}" - Property values |
| 322 | // - "PropertyInfo/{Property}" - Property supporting information |
| 323 | |
| 324 | // Listener for video events |
| 325 | m_videoListener = cs::VideoListener{ |
| 326 | [=](const cs::VideoEvent& event) { |
| 327 | CS_Status status = 0; |
| 328 | switch (event.kind) { |
| 329 | case cs::VideoEvent::kSourceCreated: { |
| 330 | // Create subtable for the camera |
| 331 | auto table = m_publishTable->GetSubTable(event.name); |
| 332 | { |
| 333 | std::lock_guard<std::mutex> lock(m_mutex); |
| 334 | m_tables.insert(std::make_pair(event.sourceHandle, table)); |
| 335 | } |
| 336 | llvm::SmallString<64> buf; |
| 337 | table->PutString("source", |
| 338 | MakeSourceValue(event.sourceHandle, buf)); |
| 339 | llvm::SmallString<64> descBuf; |
| 340 | table->PutString( |
| 341 | "description", |
| 342 | cs::GetSourceDescription(event.sourceHandle, descBuf, &status)); |
| 343 | table->PutBoolean("connected", cs::IsSourceConnected( |
| 344 | event.sourceHandle, &status)); |
| 345 | table->PutStringArray("streams", |
| 346 | GetSourceStreamValues(event.sourceHandle)); |
| 347 | auto mode = cs::GetSourceVideoMode(event.sourceHandle, &status); |
| 348 | table->SetDefaultString("mode", VideoModeToString(mode)); |
| 349 | table->PutStringArray("modes", |
| 350 | GetSourceModeValues(event.sourceHandle)); |
| 351 | break; |
| 352 | } |
| 353 | case cs::VideoEvent::kSourceDestroyed: { |
| 354 | auto table = GetSourceTable(event.sourceHandle); |
| 355 | if (table) { |
| 356 | table->PutString("source", ""); |
| 357 | table->PutStringArray("streams", std::vector<std::string>{}); |
| 358 | table->PutStringArray("modes", std::vector<std::string>{}); |
| 359 | } |
| 360 | break; |
| 361 | } |
| 362 | case cs::VideoEvent::kSourceConnected: { |
| 363 | auto table = GetSourceTable(event.sourceHandle); |
| 364 | if (table) { |
| 365 | // update the description too (as it may have changed) |
| 366 | llvm::SmallString<64> descBuf; |
| 367 | table->PutString("description", |
| 368 | cs::GetSourceDescription(event.sourceHandle, |
| 369 | descBuf, &status)); |
| 370 | table->PutBoolean("connected", true); |
| 371 | } |
| 372 | break; |
| 373 | } |
| 374 | case cs::VideoEvent::kSourceDisconnected: { |
| 375 | auto table = GetSourceTable(event.sourceHandle); |
| 376 | if (table) table->PutBoolean("connected", false); |
| 377 | break; |
| 378 | } |
| 379 | case cs::VideoEvent::kSourceVideoModesUpdated: { |
| 380 | auto table = GetSourceTable(event.sourceHandle); |
| 381 | if (table) |
| 382 | table->PutStringArray("modes", |
| 383 | GetSourceModeValues(event.sourceHandle)); |
| 384 | break; |
| 385 | } |
| 386 | case cs::VideoEvent::kSourceVideoModeChanged: { |
| 387 | auto table = GetSourceTable(event.sourceHandle); |
| 388 | if (table) table->PutString("mode", VideoModeToString(event.mode)); |
| 389 | break; |
| 390 | } |
| 391 | case cs::VideoEvent::kSourcePropertyCreated: { |
| 392 | auto table = GetSourceTable(event.sourceHandle); |
| 393 | if (table) PutSourcePropertyValue(table.get(), event, true); |
| 394 | break; |
| 395 | } |
| 396 | case cs::VideoEvent::kSourcePropertyValueUpdated: { |
| 397 | auto table = GetSourceTable(event.sourceHandle); |
| 398 | if (table) PutSourcePropertyValue(table.get(), event, false); |
| 399 | break; |
| 400 | } |
| 401 | case cs::VideoEvent::kSourcePropertyChoicesUpdated: { |
| 402 | auto table = GetSourceTable(event.sourceHandle); |
| 403 | if (table) { |
| 404 | llvm::SmallString<64> name{"PropertyInfo/"}; |
| 405 | name += event.name; |
| 406 | name += "/choices"; |
| 407 | auto choices = |
| 408 | cs::GetEnumPropertyChoices(event.propertyHandle, &status); |
| 409 | table->PutStringArray(name, choices); |
| 410 | } |
| 411 | break; |
| 412 | } |
| 413 | case cs::VideoEvent::kSinkSourceChanged: |
| 414 | case cs::VideoEvent::kSinkCreated: |
| 415 | case cs::VideoEvent::kSinkDestroyed: { |
| 416 | UpdateStreamValues(); |
| 417 | break; |
| 418 | } |
| 419 | case cs::VideoEvent::kNetworkInterfacesChanged: { |
| 420 | m_addresses = cs::GetNetworkInterfaces(); |
| 421 | break; |
| 422 | } |
| 423 | default: |
| 424 | break; |
| 425 | } |
| 426 | }, |
| 427 | 0x4fff, true}; |
| 428 | |
| 429 | // Listener for NetworkTable events |
| 430 | // We don't currently support changing settings via NT due to |
| 431 | // synchronization issues, so just update to current setting if someone |
| 432 | // else tries to change it. |
| 433 | llvm::SmallString<64> buf; |
| 434 | m_tableListener = nt::AddEntryListener( |
| 435 | Concatenate(kPublishName, "/", buf), |
| 436 | [=](unsigned int uid, llvm::StringRef key, |
| 437 | std::shared_ptr<nt::Value> value, unsigned int flags) { |
| 438 | llvm::StringRef relativeKey = |
| 439 | key.substr(llvm::StringRef(kPublishName).size() + 1); |
| 440 | |
| 441 | // get source (sourceName/...) |
| 442 | auto subKeyIndex = relativeKey.find('/'); |
| 443 | if (subKeyIndex == llvm::StringRef::npos) return; |
| 444 | llvm::StringRef sourceName = relativeKey.slice(0, subKeyIndex); |
| 445 | auto sourceIt = m_sources.find(sourceName); |
| 446 | if (sourceIt == m_sources.end()) return; |
| 447 | |
| 448 | // get subkey |
| 449 | relativeKey = relativeKey.substr(subKeyIndex + 1); |
| 450 | |
| 451 | // handle standard names |
| 452 | llvm::StringRef propName; |
| 453 | if (relativeKey == "mode") { |
| 454 | // reset to current mode |
| 455 | nt::SetEntryValue(key, nt::Value::MakeString(VideoModeToString( |
| 456 | sourceIt->second.GetVideoMode()))); |
| 457 | return; |
| 458 | } else if (relativeKey.startswith("Property/")) { |
| 459 | propName = relativeKey.substr(9); |
| 460 | } else if (relativeKey.startswith("RawProperty/")) { |
| 461 | propName = relativeKey.substr(12); |
| 462 | } else { |
| 463 | return; // ignore |
| 464 | } |
| 465 | |
| 466 | // everything else is a property |
| 467 | auto property = sourceIt->second.GetProperty(propName); |
| 468 | switch (property.GetKind()) { |
| 469 | case cs::VideoProperty::kNone: |
| 470 | return; |
| 471 | case cs::VideoProperty::kBoolean: |
| 472 | nt::SetEntryValue(key, nt::Value::MakeBoolean(property.Get() != 0)); |
| 473 | return; |
| 474 | case cs::VideoProperty::kInteger: |
| 475 | case cs::VideoProperty::kEnum: |
| 476 | nt::SetEntryValue(key, nt::Value::MakeDouble(property.Get())); |
| 477 | return; |
| 478 | case cs::VideoProperty::kString: |
| 479 | nt::SetEntryValue(key, nt::Value::MakeString(property.GetString())); |
| 480 | return; |
| 481 | default: |
| 482 | return; |
| 483 | } |
| 484 | }, |
| 485 | NT_NOTIFY_IMMEDIATE | NT_NOTIFY_UPDATE); |
| 486 | } |
| 487 | |
| 488 | cs::UsbCamera CameraServer::StartAutomaticCapture() { |
| 489 | return StartAutomaticCapture(m_defaultUsbDevice++); |
| 490 | } |
| 491 | |
| 492 | cs::UsbCamera CameraServer::StartAutomaticCapture(int dev) { |
| 493 | llvm::SmallString<64> buf; |
| 494 | llvm::raw_svector_ostream name{buf}; |
| 495 | name << "USB Camera " << dev; |
| 496 | |
| 497 | cs::UsbCamera camera{name.str(), dev}; |
| 498 | StartAutomaticCapture(camera); |
| 499 | return camera; |
| 500 | } |
| 501 | |
| 502 | cs::UsbCamera CameraServer::StartAutomaticCapture(llvm::StringRef name, |
| 503 | int dev) { |
| 504 | cs::UsbCamera camera{name, dev}; |
| 505 | StartAutomaticCapture(camera); |
| 506 | return camera; |
| 507 | } |
| 508 | |
| 509 | cs::UsbCamera CameraServer::StartAutomaticCapture(llvm::StringRef name, |
| 510 | llvm::StringRef path) { |
| 511 | cs::UsbCamera camera{name, path}; |
| 512 | StartAutomaticCapture(camera); |
| 513 | return camera; |
| 514 | } |
| 515 | |
| 516 | cs::AxisCamera CameraServer::AddAxisCamera(llvm::StringRef host) { |
| 517 | return AddAxisCamera("Axis Camera", host); |
| 518 | } |
| 519 | |
| 520 | cs::AxisCamera CameraServer::AddAxisCamera(const char* host) { |
| 521 | return AddAxisCamera("Axis Camera", host); |
| 522 | } |
| 523 | |
| 524 | cs::AxisCamera CameraServer::AddAxisCamera(const std::string& host) { |
| 525 | return AddAxisCamera("Axis Camera", host); |
| 526 | } |
| 527 | |
| 528 | cs::AxisCamera CameraServer::AddAxisCamera(llvm::ArrayRef<std::string> hosts) { |
| 529 | return AddAxisCamera("Axis Camera", hosts); |
| 530 | } |
| 531 | |
| 532 | cs::AxisCamera CameraServer::AddAxisCamera(llvm::StringRef name, |
| 533 | llvm::StringRef host) { |
| 534 | cs::AxisCamera camera{name, host}; |
| 535 | StartAutomaticCapture(camera); |
| 536 | return camera; |
| 537 | } |
| 538 | |
| 539 | cs::AxisCamera CameraServer::AddAxisCamera(llvm::StringRef name, |
| 540 | const char* host) { |
| 541 | cs::AxisCamera camera{name, host}; |
| 542 | StartAutomaticCapture(camera); |
| 543 | return camera; |
| 544 | } |
| 545 | |
| 546 | cs::AxisCamera CameraServer::AddAxisCamera(llvm::StringRef name, |
| 547 | const std::string& host) { |
| 548 | cs::AxisCamera camera{name, host}; |
| 549 | StartAutomaticCapture(camera); |
| 550 | return camera; |
| 551 | } |
| 552 | |
| 553 | cs::AxisCamera CameraServer::AddAxisCamera(llvm::StringRef name, |
| 554 | llvm::ArrayRef<std::string> hosts) { |
| 555 | cs::AxisCamera camera{name, hosts}; |
| 556 | StartAutomaticCapture(camera); |
| 557 | return camera; |
| 558 | } |
| 559 | |
| 560 | void CameraServer::StartAutomaticCapture(const cs::VideoSource& camera) { |
| 561 | llvm::SmallString<64> name{"serve_"}; |
| 562 | name += camera.GetName(); |
| 563 | |
| 564 | AddCamera(camera); |
| 565 | auto server = AddServer(name); |
| 566 | server.SetSource(camera); |
| 567 | } |
| 568 | |
| 569 | cs::CvSink CameraServer::GetVideo() { |
| 570 | cs::VideoSource source; |
| 571 | { |
| 572 | std::lock_guard<std::mutex> lock(m_mutex); |
| 573 | if (m_primarySourceName.empty()) { |
| 574 | wpi_setWPIErrorWithContext(CameraServerError, "no camera available"); |
| 575 | return cs::CvSink{}; |
| 576 | } |
| 577 | auto it = m_sources.find(m_primarySourceName); |
| 578 | if (it == m_sources.end()) { |
| 579 | wpi_setWPIErrorWithContext(CameraServerError, "no camera available"); |
| 580 | return cs::CvSink{}; |
| 581 | } |
| 582 | source = it->second; |
| 583 | } |
| 584 | return GetVideo(std::move(source)); |
| 585 | } |
| 586 | |
| 587 | cs::CvSink CameraServer::GetVideo(const cs::VideoSource& camera) { |
| 588 | llvm::SmallString<64> name{"opencv_"}; |
| 589 | name += camera.GetName(); |
| 590 | |
| 591 | { |
| 592 | std::lock_guard<std::mutex> lock(m_mutex); |
| 593 | auto it = m_sinks.find(name); |
| 594 | if (it != m_sinks.end()) { |
| 595 | auto kind = it->second.GetKind(); |
| 596 | if (kind != cs::VideoSink::kCv) { |
| 597 | llvm::SmallString<64> buf; |
| 598 | llvm::raw_svector_ostream err{buf}; |
| 599 | err << "expected OpenCV sink, but got " << kind; |
| 600 | wpi_setWPIErrorWithContext(CameraServerError, err.str()); |
| 601 | return cs::CvSink{}; |
| 602 | } |
| 603 | return *static_cast<cs::CvSink*>(&it->second); |
| 604 | } |
| 605 | } |
| 606 | |
| 607 | cs::CvSink newsink{name}; |
| 608 | newsink.SetSource(camera); |
| 609 | AddServer(newsink); |
| 610 | return newsink; |
| 611 | } |
| 612 | |
| 613 | cs::CvSink CameraServer::GetVideo(llvm::StringRef name) { |
| 614 | cs::VideoSource source; |
| 615 | { |
| 616 | std::lock_guard<std::mutex> lock(m_mutex); |
| 617 | auto it = m_sources.find(name); |
| 618 | if (it == m_sources.end()) { |
| 619 | llvm::SmallString<64> buf; |
| 620 | llvm::raw_svector_ostream err{buf}; |
| 621 | err << "could not find camera " << name; |
| 622 | wpi_setWPIErrorWithContext(CameraServerError, err.str()); |
| 623 | return cs::CvSink{}; |
| 624 | } |
| 625 | source = it->second; |
| 626 | } |
| 627 | return GetVideo(source); |
| 628 | } |
| 629 | |
| 630 | cs::CvSource CameraServer::PutVideo(llvm::StringRef name, int width, |
| 631 | int height) { |
| 632 | cs::CvSource source{name, cs::VideoMode::kMJPEG, width, height, 30}; |
| 633 | StartAutomaticCapture(source); |
| 634 | return source; |
| 635 | } |
| 636 | |
| 637 | cs::MjpegServer CameraServer::AddServer(llvm::StringRef name) { |
| 638 | int port; |
| 639 | { |
| 640 | std::lock_guard<std::mutex> lock(m_mutex); |
| 641 | port = m_nextPort++; |
| 642 | } |
| 643 | return AddServer(name, port); |
| 644 | } |
| 645 | |
| 646 | cs::MjpegServer CameraServer::AddServer(llvm::StringRef name, int port) { |
| 647 | cs::MjpegServer server{name, port}; |
| 648 | AddServer(server); |
| 649 | return server; |
| 650 | } |
| 651 | |
| 652 | void CameraServer::AddServer(const cs::VideoSink& server) { |
| 653 | std::lock_guard<std::mutex> lock(m_mutex); |
| 654 | m_sinks.emplace_second(server.GetName(), server); |
| 655 | } |
| 656 | |
| 657 | void CameraServer::RemoveServer(llvm::StringRef name) { |
| 658 | std::lock_guard<std::mutex> lock(m_mutex); |
| 659 | m_sinks.erase(name); |
| 660 | } |
| 661 | |
| 662 | cs::VideoSink CameraServer::GetServer() { |
| 663 | llvm::SmallString<64> name; |
| 664 | { |
| 665 | std::lock_guard<std::mutex> lock(m_mutex); |
| 666 | if (m_primarySourceName.empty()) { |
| 667 | wpi_setWPIErrorWithContext(CameraServerError, "no camera available"); |
| 668 | return cs::VideoSink{}; |
| 669 | } |
| 670 | name = "serve_"; |
| 671 | name += m_primarySourceName; |
| 672 | } |
| 673 | return GetServer(name); |
| 674 | } |
| 675 | |
| 676 | cs::VideoSink CameraServer::GetServer(llvm::StringRef name) { |
| 677 | std::lock_guard<std::mutex> lock(m_mutex); |
| 678 | auto it = m_sinks.find(name); |
| 679 | if (it == m_sinks.end()) { |
| 680 | llvm::SmallString<64> buf; |
| 681 | llvm::raw_svector_ostream err{buf}; |
| 682 | err << "could not find server " << name; |
| 683 | wpi_setWPIErrorWithContext(CameraServerError, err.str()); |
| 684 | return cs::VideoSink{}; |
| 685 | } |
| 686 | return it->second; |
| 687 | } |
| 688 | |
| 689 | void CameraServer::AddCamera(const cs::VideoSource& camera) { |
| 690 | std::string name = camera.GetName(); |
| 691 | std::lock_guard<std::mutex> lock(m_mutex); |
| 692 | if (m_primarySourceName.empty()) m_primarySourceName = name; |
| 693 | m_sources.emplace_second(name, camera); |
| 694 | } |
| 695 | |
| 696 | void CameraServer::RemoveCamera(llvm::StringRef name) { |
| 697 | std::lock_guard<std::mutex> lock(m_mutex); |
| 698 | m_sources.erase(name); |
| 699 | } |
| 700 | |
| 701 | void CameraServer::SetSize(int size) { |
| 702 | std::lock_guard<std::mutex> lock(m_mutex); |
| 703 | if (m_primarySourceName.empty()) return; |
| 704 | auto it = m_sources.find(m_primarySourceName); |
| 705 | if (it == m_sources.end()) return; |
| 706 | if (size == kSize160x120) |
| 707 | it->second.SetResolution(160, 120); |
| 708 | else if (size == kSize320x240) |
| 709 | it->second.SetResolution(320, 240); |
| 710 | else if (size == kSize640x480) |
| 711 | it->second.SetResolution(640, 480); |
| 712 | } |