Squashed 'third_party/allwpilib_2019/' content from commit bd05dfa1c
Change-Id: I2b1c2250cdb9b055133780c33593292098c375b7
git-subtree-dir: third_party/allwpilib_2019
git-subtree-split: bd05dfa1c7cca74c4fac451e7b9d6a37e7b53447
diff --git a/cameraserver/src/main/native/cpp/cameraserver/CameraServer.cpp b/cameraserver/src/main/native/cpp/cameraserver/CameraServer.cpp
new file mode 100644
index 0000000..3069d22
--- /dev/null
+++ b/cameraserver/src/main/native/cpp/cameraserver/CameraServer.cpp
@@ -0,0 +1,695 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#include "cameraserver/CameraServer.h"
+
+#include <atomic>
+#include <vector>
+
+#include <networktables/NetworkTable.h>
+#include <networktables/NetworkTableInstance.h>
+#include <wpi/DenseMap.h>
+#include <wpi/SmallString.h>
+#include <wpi/StringMap.h>
+#include <wpi/mutex.h>
+#include <wpi/raw_ostream.h>
+
+#include "cameraserver/CameraServerShared.h"
+#include "ntcore_cpp.h"
+
+using namespace frc;
+
+static constexpr char const* kPublishName = "/CameraPublisher";
+
+struct CameraServer::Impl {
+ Impl();
+ std::shared_ptr<nt::NetworkTable> GetSourceTable(CS_Source source);
+ std::vector<std::string> GetSinkStreamValues(CS_Sink sink);
+ std::vector<std::string> GetSourceStreamValues(CS_Source source);
+ void UpdateStreamValues();
+
+ wpi::mutex m_mutex;
+ std::atomic<int> m_defaultUsbDevice;
+ std::string m_primarySourceName;
+ wpi::StringMap<cs::VideoSource> m_sources;
+ wpi::StringMap<cs::VideoSink> m_sinks;
+ wpi::DenseMap<CS_Source, std::shared_ptr<nt::NetworkTable>> m_tables;
+ std::shared_ptr<nt::NetworkTable> m_publishTable;
+ cs::VideoListener m_videoListener;
+ int m_tableListener;
+ int m_nextPort;
+ std::vector<std::string> m_addresses;
+};
+
+CameraServer* CameraServer::GetInstance() {
+ static CameraServer instance;
+ return &instance;
+}
+
+static wpi::StringRef MakeSourceValue(CS_Source source,
+ wpi::SmallVectorImpl<char>& buf) {
+ CS_Status status = 0;
+ buf.clear();
+ switch (cs::GetSourceKind(source, &status)) {
+ case cs::VideoSource::kUsb: {
+ wpi::StringRef prefix{"usb:"};
+ buf.append(prefix.begin(), prefix.end());
+ auto path = cs::GetUsbCameraPath(source, &status);
+ buf.append(path.begin(), path.end());
+ break;
+ }
+ case cs::VideoSource::kHttp: {
+ wpi::StringRef prefix{"ip:"};
+ buf.append(prefix.begin(), prefix.end());
+ auto urls = cs::GetHttpCameraUrls(source, &status);
+ if (!urls.empty()) buf.append(urls[0].begin(), urls[0].end());
+ break;
+ }
+ case cs::VideoSource::kCv:
+ return "cv:";
+ default:
+ return "unknown:";
+ }
+
+ return wpi::StringRef{buf.begin(), buf.size()};
+}
+
+static std::string MakeStreamValue(const wpi::Twine& address, int port) {
+ return ("mjpg:http://" + address + wpi::Twine(':') + wpi::Twine(port) +
+ "/?action=stream")
+ .str();
+}
+
+std::shared_ptr<nt::NetworkTable> CameraServer::Impl::GetSourceTable(
+ CS_Source source) {
+ std::lock_guard<wpi::mutex> lock(m_mutex);
+ return m_tables.lookup(source);
+}
+
+std::vector<std::string> CameraServer::Impl::GetSinkStreamValues(CS_Sink sink) {
+ CS_Status status = 0;
+
+ // Ignore all but MjpegServer
+ if (cs::GetSinkKind(sink, &status) != CS_SINK_MJPEG)
+ return std::vector<std::string>{};
+
+ // Get port
+ int port = cs::GetMjpegServerPort(sink, &status);
+
+ // Generate values
+ std::vector<std::string> values;
+ auto listenAddress = cs::GetMjpegServerListenAddress(sink, &status);
+ if (!listenAddress.empty()) {
+ // If a listen address is specified, only use that
+ values.emplace_back(MakeStreamValue(listenAddress, port));
+ } else {
+ // Otherwise generate for hostname and all interface addresses
+ values.emplace_back(MakeStreamValue(cs::GetHostname() + ".local", port));
+
+ for (const auto& addr : m_addresses) {
+ if (addr == "127.0.0.1") continue; // ignore localhost
+ values.emplace_back(MakeStreamValue(addr, port));
+ }
+ }
+
+ return values;
+}
+
+std::vector<std::string> CameraServer::Impl::GetSourceStreamValues(
+ CS_Source source) {
+ CS_Status status = 0;
+
+ // Ignore all but HttpCamera
+ if (cs::GetSourceKind(source, &status) != CS_SOURCE_HTTP)
+ return std::vector<std::string>{};
+
+ // Generate values
+ auto values = cs::GetHttpCameraUrls(source, &status);
+ for (auto& value : values) value = "mjpg:" + value;
+
+ // Look to see if we have a passthrough server for this source
+ for (const auto& i : m_sinks) {
+ CS_Sink sink = i.second.GetHandle();
+ CS_Source sinkSource = cs::GetSinkSource(sink, &status);
+ if (source == sinkSource &&
+ cs::GetSinkKind(sink, &status) == CS_SINK_MJPEG) {
+ // Add USB-only passthrough
+ int port = cs::GetMjpegServerPort(sink, &status);
+ values.emplace_back(MakeStreamValue("172.22.11.2", port));
+ break;
+ }
+ }
+
+ // Set table value
+ return values;
+}
+
+void CameraServer::Impl::UpdateStreamValues() {
+ std::lock_guard<wpi::mutex> lock(m_mutex);
+ // Over all the sinks...
+ for (const auto& i : m_sinks) {
+ CS_Status status = 0;
+ CS_Sink sink = i.second.GetHandle();
+
+ // Get the source's subtable (if none exists, we're done)
+ CS_Source source = cs::GetSinkSource(sink, &status);
+ if (source == 0) continue;
+ auto table = m_tables.lookup(source);
+ if (table) {
+ // Don't set stream values if this is a HttpCamera passthrough
+ if (cs::GetSourceKind(source, &status) == CS_SOURCE_HTTP) continue;
+
+ // Set table value
+ auto values = GetSinkStreamValues(sink);
+ if (!values.empty()) table->GetEntry("streams").SetStringArray(values);
+ }
+ }
+
+ // Over all the sources...
+ for (const auto& i : m_sources) {
+ CS_Source source = i.second.GetHandle();
+
+ // Get the source's subtable (if none exists, we're done)
+ auto table = m_tables.lookup(source);
+ if (table) {
+ // Set table value
+ auto values = GetSourceStreamValues(source);
+ if (!values.empty()) table->GetEntry("streams").SetStringArray(values);
+ }
+ }
+}
+
+static std::string PixelFormatToString(int pixelFormat) {
+ switch (pixelFormat) {
+ case cs::VideoMode::PixelFormat::kMJPEG:
+ return "MJPEG";
+ case cs::VideoMode::PixelFormat::kYUYV:
+ return "YUYV";
+ case cs::VideoMode::PixelFormat::kRGB565:
+ return "RGB565";
+ case cs::VideoMode::PixelFormat::kBGR:
+ return "BGR";
+ case cs::VideoMode::PixelFormat::kGray:
+ return "Gray";
+ default:
+ return "Unknown";
+ }
+}
+
+static std::string VideoModeToString(const cs::VideoMode& mode) {
+ std::string rv;
+ wpi::raw_string_ostream oss{rv};
+ oss << mode.width << "x" << mode.height;
+ oss << " " << PixelFormatToString(mode.pixelFormat) << " ";
+ oss << mode.fps << " fps";
+ return oss.str();
+}
+
+static std::vector<std::string> GetSourceModeValues(int source) {
+ std::vector<std::string> rv;
+ CS_Status status = 0;
+ for (const auto& mode : cs::EnumerateSourceVideoModes(source, &status))
+ rv.emplace_back(VideoModeToString(mode));
+ return rv;
+}
+
+static void PutSourcePropertyValue(nt::NetworkTable* table,
+ const cs::VideoEvent& event, bool isNew) {
+ wpi::SmallString<64> name;
+ wpi::SmallString<64> infoName;
+ if (wpi::StringRef{event.name}.startswith("raw_")) {
+ name = "RawProperty/";
+ name += event.name;
+ infoName = "RawPropertyInfo/";
+ infoName += event.name;
+ } else {
+ name = "Property/";
+ name += event.name;
+ infoName = "PropertyInfo/";
+ infoName += event.name;
+ }
+
+ wpi::SmallString<64> buf;
+ CS_Status status = 0;
+ nt::NetworkTableEntry entry = table->GetEntry(name);
+ switch (event.propertyKind) {
+ case cs::VideoProperty::kBoolean:
+ if (isNew)
+ entry.SetDefaultBoolean(event.value != 0);
+ else
+ entry.SetBoolean(event.value != 0);
+ break;
+ case cs::VideoProperty::kInteger:
+ case cs::VideoProperty::kEnum:
+ if (isNew) {
+ entry.SetDefaultDouble(event.value);
+ table->GetEntry(infoName + "/min")
+ .SetDouble(cs::GetPropertyMin(event.propertyHandle, &status));
+ table->GetEntry(infoName + "/max")
+ .SetDouble(cs::GetPropertyMax(event.propertyHandle, &status));
+ table->GetEntry(infoName + "/step")
+ .SetDouble(cs::GetPropertyStep(event.propertyHandle, &status));
+ table->GetEntry(infoName + "/default")
+ .SetDouble(cs::GetPropertyDefault(event.propertyHandle, &status));
+ } else {
+ entry.SetDouble(event.value);
+ }
+ break;
+ case cs::VideoProperty::kString:
+ if (isNew)
+ entry.SetDefaultString(event.valueStr);
+ else
+ entry.SetString(event.valueStr);
+ break;
+ default:
+ break;
+ }
+}
+
+CameraServer::Impl::Impl()
+ : m_publishTable{nt::NetworkTableInstance::GetDefault().GetTable(
+ kPublishName)},
+ m_nextPort(kBasePort) {
+ // We publish sources to NetworkTables using the following structure:
+ // "/CameraPublisher/{Source.Name}/" - root
+ // - "source" (string): Descriptive, prefixed with type (e.g. "usb:0")
+ // - "streams" (string array): URLs that can be used to stream data
+ // - "description" (string): Description of the source
+ // - "connected" (boolean): Whether source is connected
+ // - "mode" (string): Current video mode
+ // - "modes" (string array): Available video modes
+ // - "Property/{Property}" - Property values
+ // - "PropertyInfo/{Property}" - Property supporting information
+
+ // Listener for video events
+ m_videoListener = cs::VideoListener{
+ [=](const cs::VideoEvent& event) {
+ CS_Status status = 0;
+ switch (event.kind) {
+ case cs::VideoEvent::kSourceCreated: {
+ // Create subtable for the camera
+ auto table = m_publishTable->GetSubTable(event.name);
+ {
+ std::lock_guard<wpi::mutex> lock(m_mutex);
+ m_tables.insert(std::make_pair(event.sourceHandle, table));
+ }
+ wpi::SmallString<64> buf;
+ table->GetEntry("source").SetString(
+ MakeSourceValue(event.sourceHandle, buf));
+ wpi::SmallString<64> descBuf;
+ table->GetEntry("description")
+ .SetString(cs::GetSourceDescription(event.sourceHandle, descBuf,
+ &status));
+ table->GetEntry("connected")
+ .SetBoolean(cs::IsSourceConnected(event.sourceHandle, &status));
+ table->GetEntry("streams").SetStringArray(
+ GetSourceStreamValues(event.sourceHandle));
+ auto mode = cs::GetSourceVideoMode(event.sourceHandle, &status);
+ table->GetEntry("mode").SetDefaultString(VideoModeToString(mode));
+ table->GetEntry("modes").SetStringArray(
+ GetSourceModeValues(event.sourceHandle));
+ break;
+ }
+ case cs::VideoEvent::kSourceDestroyed: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table) {
+ table->GetEntry("source").SetString("");
+ table->GetEntry("streams").SetStringArray(
+ std::vector<std::string>{});
+ table->GetEntry("modes").SetStringArray(
+ std::vector<std::string>{});
+ }
+ break;
+ }
+ case cs::VideoEvent::kSourceConnected: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table) {
+ // update the description too (as it may have changed)
+ wpi::SmallString<64> descBuf;
+ table->GetEntry("description")
+ .SetString(cs::GetSourceDescription(event.sourceHandle,
+ descBuf, &status));
+ table->GetEntry("connected").SetBoolean(true);
+ }
+ break;
+ }
+ case cs::VideoEvent::kSourceDisconnected: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table) table->GetEntry("connected").SetBoolean(false);
+ break;
+ }
+ case cs::VideoEvent::kSourceVideoModesUpdated: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table)
+ table->GetEntry("modes").SetStringArray(
+ GetSourceModeValues(event.sourceHandle));
+ break;
+ }
+ case cs::VideoEvent::kSourceVideoModeChanged: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table)
+ table->GetEntry("mode").SetString(VideoModeToString(event.mode));
+ break;
+ }
+ case cs::VideoEvent::kSourcePropertyCreated: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table) PutSourcePropertyValue(table.get(), event, true);
+ break;
+ }
+ case cs::VideoEvent::kSourcePropertyValueUpdated: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table) PutSourcePropertyValue(table.get(), event, false);
+ break;
+ }
+ case cs::VideoEvent::kSourcePropertyChoicesUpdated: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table) {
+ wpi::SmallString<64> name{"PropertyInfo/"};
+ name += event.name;
+ name += "/choices";
+ auto choices =
+ cs::GetEnumPropertyChoices(event.propertyHandle, &status);
+ table->GetEntry(name).SetStringArray(choices);
+ }
+ break;
+ }
+ case cs::VideoEvent::kSinkSourceChanged:
+ case cs::VideoEvent::kSinkCreated:
+ case cs::VideoEvent::kSinkDestroyed:
+ case cs::VideoEvent::kNetworkInterfacesChanged: {
+ m_addresses = cs::GetNetworkInterfaces();
+ UpdateStreamValues();
+ break;
+ }
+ default:
+ break;
+ }
+ },
+ 0x4fff, true};
+
+ // Listener for NetworkTable events
+ // We don't currently support changing settings via NT due to
+ // synchronization issues, so just update to current setting if someone
+ // else tries to change it.
+ wpi::SmallString<64> buf;
+ m_tableListener = nt::NetworkTableInstance::GetDefault().AddEntryListener(
+ kPublishName + wpi::Twine('/'),
+ [=](const nt::EntryNotification& event) {
+ wpi::StringRef relativeKey =
+ event.name.substr(wpi::StringRef(kPublishName).size() + 1);
+
+ // get source (sourceName/...)
+ auto subKeyIndex = relativeKey.find('/');
+ if (subKeyIndex == wpi::StringRef::npos) return;
+ wpi::StringRef sourceName = relativeKey.slice(0, subKeyIndex);
+ auto sourceIt = m_sources.find(sourceName);
+ if (sourceIt == m_sources.end()) return;
+
+ // get subkey
+ relativeKey = relativeKey.substr(subKeyIndex + 1);
+
+ // handle standard names
+ wpi::StringRef propName;
+ nt::NetworkTableEntry entry{event.entry};
+ if (relativeKey == "mode") {
+ // reset to current mode
+ entry.SetString(VideoModeToString(sourceIt->second.GetVideoMode()));
+ return;
+ } else if (relativeKey.startswith("Property/")) {
+ propName = relativeKey.substr(9);
+ } else if (relativeKey.startswith("RawProperty/")) {
+ propName = relativeKey.substr(12);
+ } else {
+ return; // ignore
+ }
+
+ // everything else is a property
+ auto property = sourceIt->second.GetProperty(propName);
+ switch (property.GetKind()) {
+ case cs::VideoProperty::kNone:
+ return;
+ case cs::VideoProperty::kBoolean:
+ entry.SetBoolean(property.Get() != 0);
+ return;
+ case cs::VideoProperty::kInteger:
+ case cs::VideoProperty::kEnum:
+ entry.SetDouble(property.Get());
+ return;
+ case cs::VideoProperty::kString:
+ entry.SetString(property.GetString());
+ return;
+ default:
+ return;
+ }
+ },
+ NT_NOTIFY_IMMEDIATE | NT_NOTIFY_UPDATE);
+}
+
+CameraServer::CameraServer() : m_impl(new Impl) {}
+
+CameraServer::~CameraServer() {}
+
+cs::UsbCamera CameraServer::StartAutomaticCapture() {
+ cs::UsbCamera camera = StartAutomaticCapture(m_impl->m_defaultUsbDevice++);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportUsbCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::UsbCamera CameraServer::StartAutomaticCapture(int dev) {
+ cs::UsbCamera camera{"USB Camera " + wpi::Twine(dev), dev};
+ StartAutomaticCapture(camera);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportUsbCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::UsbCamera CameraServer::StartAutomaticCapture(const wpi::Twine& name,
+ int dev) {
+ cs::UsbCamera camera{name, dev};
+ StartAutomaticCapture(camera);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportUsbCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::UsbCamera CameraServer::StartAutomaticCapture(const wpi::Twine& name,
+ const wpi::Twine& path) {
+ cs::UsbCamera camera{name, path};
+ StartAutomaticCapture(camera);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportUsbCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(const wpi::Twine& host) {
+ return AddAxisCamera("Axis Camera", host);
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(const char* host) {
+ return AddAxisCamera("Axis Camera", host);
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(const std::string& host) {
+ return AddAxisCamera("Axis Camera", host);
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(wpi::ArrayRef<std::string> hosts) {
+ return AddAxisCamera("Axis Camera", hosts);
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(const wpi::Twine& name,
+ const wpi::Twine& host) {
+ cs::AxisCamera camera{name, host};
+ StartAutomaticCapture(camera);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportAxisCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(const wpi::Twine& name,
+ const char* host) {
+ cs::AxisCamera camera{name, host};
+ StartAutomaticCapture(camera);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportAxisCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(const wpi::Twine& name,
+ const std::string& host) {
+ cs::AxisCamera camera{name, host};
+ StartAutomaticCapture(camera);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportAxisCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(const wpi::Twine& name,
+ wpi::ArrayRef<std::string> hosts) {
+ cs::AxisCamera camera{name, hosts};
+ StartAutomaticCapture(camera);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportAxisCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::MjpegServer CameraServer::StartAutomaticCapture(
+ const cs::VideoSource& camera) {
+ AddCamera(camera);
+ auto server = AddServer(wpi::Twine("serve_") + camera.GetName());
+ server.SetSource(camera);
+ return server;
+}
+
+cs::CvSink CameraServer::GetVideo() {
+ cs::VideoSource source;
+ {
+ auto csShared = GetCameraServerShared();
+ std::lock_guard<wpi::mutex> lock(m_impl->m_mutex);
+ if (m_impl->m_primarySourceName.empty()) {
+ csShared->SetCameraServerError("no camera available");
+ return cs::CvSink{};
+ }
+ auto it = m_impl->m_sources.find(m_impl->m_primarySourceName);
+ if (it == m_impl->m_sources.end()) {
+ csShared->SetCameraServerError("no camera available");
+ return cs::CvSink{};
+ }
+ source = it->second;
+ }
+ return GetVideo(std::move(source));
+}
+
+cs::CvSink CameraServer::GetVideo(const cs::VideoSource& camera) {
+ wpi::SmallString<64> name{"opencv_"};
+ name += camera.GetName();
+
+ {
+ std::lock_guard<wpi::mutex> lock(m_impl->m_mutex);
+ auto it = m_impl->m_sinks.find(name);
+ if (it != m_impl->m_sinks.end()) {
+ auto kind = it->second.GetKind();
+ if (kind != cs::VideoSink::kCv) {
+ auto csShared = GetCameraServerShared();
+ csShared->SetCameraServerError("expected OpenCV sink, but got " +
+ wpi::Twine(kind));
+ return cs::CvSink{};
+ }
+ return *static_cast<cs::CvSink*>(&it->second);
+ }
+ }
+
+ cs::CvSink newsink{name};
+ newsink.SetSource(camera);
+ AddServer(newsink);
+ return newsink;
+}
+
+cs::CvSink CameraServer::GetVideo(const wpi::Twine& name) {
+ wpi::SmallString<64> nameBuf;
+ wpi::StringRef nameStr = name.toStringRef(nameBuf);
+ cs::VideoSource source;
+ {
+ std::lock_guard<wpi::mutex> lock(m_impl->m_mutex);
+ auto it = m_impl->m_sources.find(nameStr);
+ if (it == m_impl->m_sources.end()) {
+ auto csShared = GetCameraServerShared();
+ csShared->SetCameraServerError("could not find camera " + nameStr);
+ return cs::CvSink{};
+ }
+ source = it->second;
+ }
+ return GetVideo(source);
+}
+
+cs::CvSource CameraServer::PutVideo(const wpi::Twine& name, int width,
+ int height) {
+ cs::CvSource source{name, cs::VideoMode::kMJPEG, width, height, 30};
+ StartAutomaticCapture(source);
+ return source;
+}
+
+cs::MjpegServer CameraServer::AddServer(const wpi::Twine& name) {
+ int port;
+ {
+ std::lock_guard<wpi::mutex> lock(m_impl->m_mutex);
+ port = m_impl->m_nextPort++;
+ }
+ return AddServer(name, port);
+}
+
+cs::MjpegServer CameraServer::AddServer(const wpi::Twine& name, int port) {
+ cs::MjpegServer server{name, port};
+ AddServer(server);
+ return server;
+}
+
+void CameraServer::AddServer(const cs::VideoSink& server) {
+ std::lock_guard<wpi::mutex> lock(m_impl->m_mutex);
+ m_impl->m_sinks.try_emplace(server.GetName(), server);
+}
+
+void CameraServer::RemoveServer(const wpi::Twine& name) {
+ std::lock_guard<wpi::mutex> lock(m_impl->m_mutex);
+ wpi::SmallString<64> nameBuf;
+ m_impl->m_sinks.erase(name.toStringRef(nameBuf));
+}
+
+cs::VideoSink CameraServer::GetServer() {
+ wpi::SmallString<64> name;
+ {
+ std::lock_guard<wpi::mutex> lock(m_impl->m_mutex);
+ if (m_impl->m_primarySourceName.empty()) {
+ auto csShared = GetCameraServerShared();
+ csShared->SetCameraServerError("no camera available");
+ return cs::VideoSink{};
+ }
+ name = "serve_";
+ name += m_impl->m_primarySourceName;
+ }
+ return GetServer(name);
+}
+
+cs::VideoSink CameraServer::GetServer(const wpi::Twine& name) {
+ wpi::SmallString<64> nameBuf;
+ wpi::StringRef nameStr = name.toStringRef(nameBuf);
+ std::lock_guard<wpi::mutex> lock(m_impl->m_mutex);
+ auto it = m_impl->m_sinks.find(nameStr);
+ if (it == m_impl->m_sinks.end()) {
+ auto csShared = GetCameraServerShared();
+ csShared->SetCameraServerError("could not find server " + nameStr);
+ return cs::VideoSink{};
+ }
+ return it->second;
+}
+
+void CameraServer::AddCamera(const cs::VideoSource& camera) {
+ std::string name = camera.GetName();
+ std::lock_guard<wpi::mutex> lock(m_impl->m_mutex);
+ if (m_impl->m_primarySourceName.empty()) m_impl->m_primarySourceName = name;
+ m_impl->m_sources.try_emplace(name, camera);
+}
+
+void CameraServer::RemoveCamera(const wpi::Twine& name) {
+ std::lock_guard<wpi::mutex> lock(m_impl->m_mutex);
+ wpi::SmallString<64> nameBuf;
+ m_impl->m_sources.erase(name.toStringRef(nameBuf));
+}
+
+void CameraServer::SetSize(int size) {
+ std::lock_guard<wpi::mutex> lock(m_impl->m_mutex);
+ if (m_impl->m_primarySourceName.empty()) return;
+ auto it = m_impl->m_sources.find(m_impl->m_primarySourceName);
+ if (it == m_impl->m_sources.end()) return;
+ if (size == kSize160x120)
+ it->second.SetResolution(160, 120);
+ else if (size == kSize320x240)
+ it->second.SetResolution(320, 240);
+ else if (size == kSize640x480)
+ it->second.SetResolution(640, 480);
+}
diff --git a/cameraserver/src/main/native/cpp/cameraserver/CameraServerShared.cpp b/cameraserver/src/main/native/cpp/cameraserver/CameraServerShared.cpp
new file mode 100644
index 0000000..97ab090
--- /dev/null
+++ b/cameraserver/src/main/native/cpp/cameraserver/CameraServerShared.cpp
@@ -0,0 +1,43 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#include "cameraserver/CameraServerShared.h"
+
+#include <wpi/mutex.h>
+
+namespace {
+class DefaultCameraServerShared : public frc::CameraServerShared {
+ public:
+ void ReportUsbCamera(int id) override {}
+ void ReportAxisCamera(int id) override {}
+ void ReportVideoServer(int id) override {}
+ void SetCameraServerError(const wpi::Twine& error) override {}
+ void SetVisionRunnerError(const wpi::Twine& error) override {}
+ void ReportDriverStationError(const wpi::Twine& error) override {}
+ std::pair<std::thread::id, bool> GetRobotMainThreadId() const override {
+ return std::make_pair(std::thread::id(), false);
+ }
+};
+} // namespace
+
+namespace frc {
+
+static std::unique_ptr<CameraServerShared> cameraServerShared = nullptr;
+static wpi::mutex setLock;
+
+void SetCameraServerShared(std::unique_ptr<CameraServerShared> shared) {
+ std::unique_lock<wpi::mutex> lock(setLock);
+ cameraServerShared = std::move(shared);
+}
+CameraServerShared* GetCameraServerShared() {
+ std::unique_lock<wpi::mutex> lock(setLock);
+ if (!cameraServerShared) {
+ cameraServerShared = std::make_unique<DefaultCameraServerShared>();
+ }
+ return cameraServerShared.get();
+}
+} // namespace frc
diff --git a/cameraserver/src/main/native/cpp/vision/VisionRunner.cpp b/cameraserver/src/main/native/cpp/vision/VisionRunner.cpp
new file mode 100644
index 0000000..9896bbd
--- /dev/null
+++ b/cameraserver/src/main/native/cpp/vision/VisionRunner.cpp
@@ -0,0 +1,59 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#include "vision/VisionRunner.h"
+
+#include <thread>
+
+#include <opencv2/core/mat.hpp>
+
+#include "cameraserver/CameraServerShared.h"
+
+using namespace frc;
+
+VisionRunnerBase::VisionRunnerBase(cs::VideoSource videoSource)
+ : m_image(std::make_unique<cv::Mat>()),
+ m_cvSink("VisionRunner CvSink"),
+ m_enabled(true) {
+ m_cvSink.SetSource(videoSource);
+}
+
+// Located here and not in header due to cv::Mat forward declaration.
+VisionRunnerBase::~VisionRunnerBase() {}
+
+void VisionRunnerBase::RunOnce() {
+ auto csShared = frc::GetCameraServerShared();
+ auto res = csShared->GetRobotMainThreadId();
+ if (res.second && (std::this_thread::get_id() == res.first)) {
+ csShared->SetVisionRunnerError(
+ "VisionRunner::RunOnce() cannot be called from the main robot thread");
+ return;
+ }
+ auto frameTime = m_cvSink.GrabFrame(*m_image);
+ if (frameTime == 0) {
+ auto error = m_cvSink.GetError();
+ csShared->ReportDriverStationError(error);
+ } else {
+ DoProcess(*m_image);
+ }
+}
+
+void VisionRunnerBase::RunForever() {
+ auto csShared = frc::GetCameraServerShared();
+ auto res = csShared->GetRobotMainThreadId();
+ if (res.second && (std::this_thread::get_id() == res.first)) {
+ csShared->SetVisionRunnerError(
+ "VisionRunner::RunForever() cannot be called from the main robot "
+ "thread");
+ return;
+ }
+ while (m_enabled) {
+ RunOnce();
+ }
+}
+
+void VisionRunnerBase::Stop() { m_enabled = false; }
diff --git a/cameraserver/src/main/native/include/CameraServer.h b/cameraserver/src/main/native/include/CameraServer.h
new file mode 100644
index 0000000..2fcc0f2
--- /dev/null
+++ b/cameraserver/src/main/native/include/CameraServer.h
@@ -0,0 +1,19 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#pragma once
+
+// clang-format off
+#ifdef _MSC_VER
+#pragma message "warning: CameraServer.h is deprecated; include cameraserver/CameraServer.h instead"
+#else
+#warning "CameraServer.h is deprecated; include cameraserver/CameraServer.h instead"
+#endif
+
+// clang-format on
+
+#include "cameraserver/CameraServer.h"
diff --git a/cameraserver/src/main/native/include/cameraserver/CameraServer.h b/cameraserver/src/main/native/include/cameraserver/CameraServer.h
new file mode 100644
index 0000000..42a9f0c
--- /dev/null
+++ b/cameraserver/src/main/native/include/cameraserver/CameraServer.h
@@ -0,0 +1,289 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2014-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#pragma once
+
+#include <stdint.h>
+
+#include <memory>
+#include <string>
+
+#include <wpi/ArrayRef.h>
+#include <wpi/Twine.h>
+
+#include "cscore.h"
+
+namespace frc {
+
+/**
+ * Singleton class for creating and keeping camera servers.
+ *
+ * Also publishes camera information to NetworkTables.
+ */
+class CameraServer {
+ public:
+ static constexpr uint16_t kBasePort = 1181;
+ static constexpr int kSize640x480 = 0;
+ static constexpr int kSize320x240 = 1;
+ static constexpr int kSize160x120 = 2;
+
+ /**
+ * Get the CameraServer instance.
+ */
+ static CameraServer* GetInstance();
+
+ /**
+ * Start automatically capturing images to send to the dashboard.
+ *
+ * You should call this method to see a camera feed on the dashboard. If you
+ * also want to perform vision processing on the roboRIO, use getVideo() to
+ * get access to the camera images.
+ *
+ * The first time this overload is called, it calls StartAutomaticCapture()
+ * with device 0, creating a camera named "USB Camera 0". Subsequent calls
+ * increment the device number (e.g. 1, 2, etc).
+ */
+ cs::UsbCamera StartAutomaticCapture();
+
+ /**
+ * Start automatically capturing images to send to the dashboard.
+ *
+ * This overload calls StartAutomaticCapture() with a name of "USB Camera
+ * {dev}".
+ *
+ * @param dev The device number of the camera interface
+ */
+ cs::UsbCamera StartAutomaticCapture(int dev);
+
+ /**
+ * Start automatically capturing images to send to the dashboard.
+ *
+ * @param name The name to give the camera
+ * @param dev The device number of the camera interface
+ */
+ cs::UsbCamera StartAutomaticCapture(const wpi::Twine& name, int dev);
+
+ /**
+ * Start automatically capturing images to send to the dashboard.
+ *
+ * @param name The name to give the camera
+ * @param path The device path (e.g. "/dev/video0") of the camera
+ */
+ cs::UsbCamera StartAutomaticCapture(const wpi::Twine& name,
+ const wpi::Twine& path);
+
+ /**
+ * Start automatically capturing images to send to the dashboard from
+ * an existing camera.
+ *
+ * @param camera Camera
+ */
+ cs::MjpegServer StartAutomaticCapture(const cs::VideoSource& camera);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * This overload calls AddAxisCamera() with name "Axis Camera".
+ *
+ * @param host Camera host IP or DNS name (e.g. "10.x.y.11")
+ */
+ cs::AxisCamera AddAxisCamera(const wpi::Twine& host);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * This overload calls AddAxisCamera() with name "Axis Camera".
+ *
+ * @param host Camera host IP or DNS name (e.g. "10.x.y.11")
+ */
+ cs::AxisCamera AddAxisCamera(const char* host);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * This overload calls AddAxisCamera() with name "Axis Camera".
+ *
+ * @param host Camera host IP or DNS name (e.g. "10.x.y.11")
+ */
+ cs::AxisCamera AddAxisCamera(const std::string& host);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * This overload calls AddAxisCamera() with name "Axis Camera".
+ *
+ * @param hosts Array of Camera host IPs/DNS names
+ */
+ cs::AxisCamera AddAxisCamera(wpi::ArrayRef<std::string> hosts);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * This overload calls AddAxisCamera() with name "Axis Camera".
+ *
+ * @param hosts Array of Camera host IPs/DNS names
+ */
+ template <typename T>
+ cs::AxisCamera AddAxisCamera(std::initializer_list<T> hosts);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * @param name The name to give the camera
+ * @param host Camera host IP or DNS name (e.g. "10.x.y.11")
+ */
+ cs::AxisCamera AddAxisCamera(const wpi::Twine& name, const wpi::Twine& host);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * @param name The name to give the camera
+ * @param host Camera host IP or DNS name (e.g. "10.x.y.11")
+ */
+ cs::AxisCamera AddAxisCamera(const wpi::Twine& name, const char* host);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * @param name The name to give the camera
+ * @param host Camera host IP or DNS name (e.g. "10.x.y.11")
+ */
+ cs::AxisCamera AddAxisCamera(const wpi::Twine& name, const std::string& host);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * @param name The name to give the camera
+ * @param hosts Array of Camera host IPs/DNS names
+ */
+ cs::AxisCamera AddAxisCamera(const wpi::Twine& name,
+ wpi::ArrayRef<std::string> hosts);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * @param name The name to give the camera
+ * @param hosts Array of Camera host IPs/DNS names
+ */
+ template <typename T>
+ cs::AxisCamera AddAxisCamera(const wpi::Twine& name,
+ std::initializer_list<T> hosts);
+
+ /**
+ * Get OpenCV access to the primary camera feed. This allows you to
+ * get images from the camera for image processing on the roboRIO.
+ *
+ * <p>This is only valid to call after a camera feed has been added
+ * with startAutomaticCapture() or addServer().
+ */
+ cs::CvSink GetVideo();
+
+ /**
+ * Get OpenCV access to the specified camera. This allows you to get
+ * images from the camera for image processing on the roboRIO.
+ *
+ * @param camera Camera (e.g. as returned by startAutomaticCapture).
+ */
+ cs::CvSink GetVideo(const cs::VideoSource& camera);
+
+ /**
+ * Get OpenCV access to the specified camera. This allows you to get
+ * images from the camera for image processing on the roboRIO.
+ *
+ * @param name Camera name
+ */
+ cs::CvSink GetVideo(const wpi::Twine& name);
+
+ /**
+ * Create a MJPEG stream with OpenCV input. This can be called to pass custom
+ * annotated images to the dashboard.
+ *
+ * @param name Name to give the stream
+ * @param width Width of the image being sent
+ * @param height Height of the image being sent
+ */
+ cs::CvSource PutVideo(const wpi::Twine& name, int width, int height);
+
+ /**
+ * Adds a MJPEG server at the next available port.
+ *
+ * @param name Server name
+ */
+ cs::MjpegServer AddServer(const wpi::Twine& name);
+
+ /**
+ * Adds a MJPEG server.
+ *
+ * @param name Server name
+ */
+ cs::MjpegServer AddServer(const wpi::Twine& name, int port);
+
+ /**
+ * Adds an already created server.
+ *
+ * @param server Server
+ */
+ void AddServer(const cs::VideoSink& server);
+
+ /**
+ * Removes a server by name.
+ *
+ * @param name Server name
+ */
+ void RemoveServer(const wpi::Twine& name);
+
+ /**
+ * Get server for the primary camera feed.
+ *
+ * This is only valid to call after a camera feed has been added with
+ * StartAutomaticCapture() or AddServer().
+ */
+ cs::VideoSink GetServer();
+
+ /**
+ * Gets a server by name.
+ *
+ * @param name Server name
+ */
+ cs::VideoSink GetServer(const wpi::Twine& name);
+
+ /**
+ * Adds an already created camera.
+ *
+ * @param camera Camera
+ */
+ void AddCamera(const cs::VideoSource& camera);
+
+ /**
+ * Removes a camera by name.
+ *
+ * @param name Camera name
+ */
+ void RemoveCamera(const wpi::Twine& name);
+
+ /**
+ * Sets the size of the image to use. Use the public kSize constants to set
+ * the correct mode, or set it directly on a camera and call the appropriate
+ * StartAutomaticCapture method.
+ *
+ * @deprecated Use SetResolution on the UsbCamera returned by
+ * StartAutomaticCapture() instead.
+ * @param size The size to use
+ */
+ void SetSize(int size);
+
+ private:
+ CameraServer();
+ ~CameraServer();
+
+ struct Impl;
+ std::unique_ptr<Impl> m_impl;
+};
+
+} // namespace frc
+
+#include "cameraserver/CameraServer.inc"
diff --git a/cameraserver/src/main/native/include/cameraserver/CameraServer.inc b/cameraserver/src/main/native/include/cameraserver/CameraServer.inc
new file mode 100644
index 0000000..5daf29f
--- /dev/null
+++ b/cameraserver/src/main/native/include/cameraserver/CameraServer.inc
@@ -0,0 +1,30 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#pragma once
+
+#include <string>
+#include <vector>
+
+namespace frc {
+
+template <typename T>
+inline cs::AxisCamera CameraServer::AddAxisCamera(
+ std::initializer_list<T> hosts) {
+ return AddAxisCamera("Axis Camera", hosts);
+}
+
+template <typename T>
+inline cs::AxisCamera CameraServer::AddAxisCamera(
+ const wpi::Twine& name, std::initializer_list<T> hosts) {
+ std::vector<std::string> vec;
+ vec.reserve(hosts.size());
+ for (const auto& host : hosts) vec.emplace_back(host);
+ return AddAxisCamera(name, vec);
+}
+
+} // namespace frc
diff --git a/cameraserver/src/main/native/include/cameraserver/CameraServerShared.h b/cameraserver/src/main/native/include/cameraserver/CameraServerShared.h
new file mode 100644
index 0000000..72b784e
--- /dev/null
+++ b/cameraserver/src/main/native/include/cameraserver/CameraServerShared.h
@@ -0,0 +1,31 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#pragma once
+
+#include <memory>
+#include <thread>
+#include <utility>
+
+#include <wpi/Twine.h>
+
+namespace frc {
+class CameraServerShared {
+ public:
+ virtual ~CameraServerShared() = default;
+ virtual void ReportUsbCamera(int id) = 0;
+ virtual void ReportAxisCamera(int id) = 0;
+ virtual void ReportVideoServer(int id) = 0;
+ virtual void SetCameraServerError(const wpi::Twine& error) = 0;
+ virtual void SetVisionRunnerError(const wpi::Twine& error) = 0;
+ virtual void ReportDriverStationError(const wpi::Twine& error) = 0;
+ virtual std::pair<std::thread::id, bool> GetRobotMainThreadId() const = 0;
+};
+
+void SetCameraServerShared(std::unique_ptr<CameraServerShared> shared);
+CameraServerShared* GetCameraServerShared();
+} // namespace frc
diff --git a/cameraserver/src/main/native/include/vision/VisionPipeline.h b/cameraserver/src/main/native/include/vision/VisionPipeline.h
new file mode 100644
index 0000000..de8e54c
--- /dev/null
+++ b/cameraserver/src/main/native/include/vision/VisionPipeline.h
@@ -0,0 +1,32 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#pragma once
+
+namespace cv {
+class Mat;
+} // namespace cv
+
+namespace frc {
+
+/**
+ * A vision pipeline is responsible for running a group of OpenCV algorithms to
+ * extract data from an image.
+ *
+ * @see VisionRunner
+ */
+class VisionPipeline {
+ public:
+ virtual ~VisionPipeline() = default;
+
+ /**
+ * Processes the image input and sets the result objects. Implementations
+ * should make these objects accessible.
+ */
+ virtual void Process(cv::Mat& mat) = 0;
+};
+} // namespace frc
diff --git a/cameraserver/src/main/native/include/vision/VisionRunner.h b/cameraserver/src/main/native/include/vision/VisionRunner.h
new file mode 100644
index 0000000..a317f80
--- /dev/null
+++ b/cameraserver/src/main/native/include/vision/VisionRunner.h
@@ -0,0 +1,99 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#pragma once
+
+#include <atomic>
+#include <functional>
+#include <memory>
+
+#include "cscore.h"
+#include "vision/VisionPipeline.h"
+
+namespace frc {
+
+/**
+ * Non-template base class for VisionRunner.
+ */
+class VisionRunnerBase {
+ public:
+ /**
+ * Creates a new vision runner. It will take images from the {@code
+ * videoSource}, and call the virtual DoProcess() method.
+ *
+ * @param videoSource the video source to use to supply images for the
+ * pipeline
+ */
+ explicit VisionRunnerBase(cs::VideoSource videoSource);
+
+ ~VisionRunnerBase();
+
+ VisionRunnerBase(const VisionRunnerBase&) = delete;
+ VisionRunnerBase& operator=(const VisionRunnerBase&) = delete;
+
+ /**
+ * Runs the pipeline one time, giving it the next image from the video source
+ * specified in the constructor. This will block until the source either has
+ * an image or throws an error. If the source successfully supplied a frame,
+ * the pipeline's image input will be set, the pipeline will run, and the
+ * listener specified in the constructor will be called to notify it that the
+ * pipeline ran. This must be run in a dedicated thread, and cannot be used in
+ * the main robot thread because it will freeze the robot program.
+ *
+ * <p>This method is exposed to allow teams to add additional functionality or
+ * have their own ways to run the pipeline. Most teams, however, should just
+ * use {@link #runForever} in its own thread using a std::thread.</p>
+ */
+ void RunOnce();
+
+ /**
+ * A convenience method that calls {@link #runOnce()} in an infinite loop.
+ * This must be run in a dedicated thread, and cannot be used in the main
+ * robot thread because it will freeze the robot program.
+ *
+ * <strong>Do not call this method directly from the main thread.</strong>
+ */
+ void RunForever();
+
+ /**
+ * Stop a RunForever() loop.
+ */
+ void Stop();
+
+ protected:
+ virtual void DoProcess(cv::Mat& image) = 0;
+
+ private:
+ std::unique_ptr<cv::Mat> m_image;
+ cs::CvSink m_cvSink;
+ std::atomic_bool m_enabled;
+};
+
+/**
+ * A vision runner is a convenient wrapper object to make it easy to run vision
+ * pipelines from robot code. The easiest way to use this is to run it in a
+ * std::thread and use the listener to take snapshots of the pipeline's outputs.
+ *
+ * @see VisionPipeline
+ */
+template <typename T>
+class VisionRunner : public VisionRunnerBase {
+ public:
+ VisionRunner(cs::VideoSource videoSource, T* pipeline,
+ std::function<void(T&)> listener);
+ virtual ~VisionRunner() = default;
+
+ protected:
+ void DoProcess(cv::Mat& image) override;
+
+ private:
+ T* m_pipeline;
+ std::function<void(T&)> m_listener;
+};
+} // namespace frc
+
+#include "VisionRunner.inc"
diff --git a/cameraserver/src/main/native/include/vision/VisionRunner.inc b/cameraserver/src/main/native/include/vision/VisionRunner.inc
new file mode 100644
index 0000000..1a38048
--- /dev/null
+++ b/cameraserver/src/main/native/include/vision/VisionRunner.inc
@@ -0,0 +1,35 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#pragma once
+
+namespace frc {
+
+/**
+ * Creates a new vision runner. It will take images from the {@code
+ * videoSource}, send them to the {@code pipeline}, and call the {@code
+ * listener} when the pipeline has finished to alert user code when it is safe
+ * to access the pipeline's outputs.
+ *
+ * @param videoSource The video source to use to supply images for the pipeline
+ * @param pipeline The vision pipeline to run
+ * @param listener A function to call after the pipeline has finished running
+ */
+template <typename T>
+VisionRunner<T>::VisionRunner(cs::VideoSource videoSource, T* pipeline,
+ std::function<void(T&)> listener)
+ : VisionRunnerBase(videoSource),
+ m_pipeline(pipeline),
+ m_listener(listener) {}
+
+template <typename T>
+void VisionRunner<T>::DoProcess(cv::Mat& image) {
+ m_pipeline->Process(image);
+ m_listener(*m_pipeline);
+}
+
+} // namespace frc