Rename our allwpilib (which is now 2020) to not have 2019 in the name
Change-Id: I3c07f85ed32ab8b97db765a9b43f2a6ce7da964a
diff --git a/cameraserver/src/dev/java/edu/wpi/first/cameraserver/DevMain.java b/cameraserver/src/dev/java/edu/wpi/first/cameraserver/DevMain.java
new file mode 100644
index 0000000..1182ac4
--- /dev/null
+++ b/cameraserver/src/dev/java/edu/wpi/first/cameraserver/DevMain.java
@@ -0,0 +1,17 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+package edu.wpi.first.cameraserver;
+
+public final class DevMain {
+ public static void main(String[] args) {
+
+ }
+
+ private DevMain() {
+ }
+}
diff --git a/cameraserver/src/dev/native/cpp/main.cpp b/cameraserver/src/dev/native/cpp/main.cpp
new file mode 100644
index 0000000..e324b44
--- /dev/null
+++ b/cameraserver/src/dev/native/cpp/main.cpp
@@ -0,0 +1,8 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+int main() {}
diff --git a/cameraserver/src/main/java/edu/wpi/first/cameraserver/CameraServer.java b/cameraserver/src/main/java/edu/wpi/first/cameraserver/CameraServer.java
new file mode 100644
index 0000000..3ef23c3
--- /dev/null
+++ b/cameraserver/src/main/java/edu/wpi/first/cameraserver/CameraServer.java
@@ -0,0 +1,802 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2019 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+package edu.wpi.first.cameraserver;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import edu.wpi.cscore.AxisCamera;
+import edu.wpi.cscore.CameraServerJNI;
+import edu.wpi.cscore.CvSink;
+import edu.wpi.cscore.CvSource;
+import edu.wpi.cscore.MjpegServer;
+import edu.wpi.cscore.UsbCamera;
+import edu.wpi.cscore.VideoEvent;
+import edu.wpi.cscore.VideoException;
+import edu.wpi.cscore.VideoListener;
+import edu.wpi.cscore.VideoMode;
+import edu.wpi.cscore.VideoMode.PixelFormat;
+import edu.wpi.cscore.VideoProperty;
+import edu.wpi.cscore.VideoSink;
+import edu.wpi.cscore.VideoSource;
+import edu.wpi.first.networktables.EntryListenerFlags;
+import edu.wpi.first.networktables.NetworkTable;
+import edu.wpi.first.networktables.NetworkTableEntry;
+import edu.wpi.first.networktables.NetworkTableInstance;
+
+/**
+ * Singleton class for creating and keeping camera servers.
+ * Also publishes camera information to NetworkTables.
+ */
+@SuppressWarnings("PMD.TooManyMethods")
+public final class CameraServer {
+ public static final int kBasePort = 1181;
+
+ @Deprecated
+ public static final int kSize640x480 = 0;
+ @Deprecated
+ public static final int kSize320x240 = 1;
+ @Deprecated
+ public static final int kSize160x120 = 2;
+
+ private static final String kPublishName = "/CameraPublisher";
+ private static CameraServer server;
+
+ /**
+ * Get the CameraServer instance.
+ */
+ public static synchronized CameraServer getInstance() {
+ if (server == null) {
+ server = new CameraServer();
+ }
+ return server;
+ }
+
+ private final AtomicInteger m_defaultUsbDevice;
+ private String m_primarySourceName;
+ private final Map<String, VideoSource> m_sources;
+ private final Map<String, VideoSink> m_sinks;
+ private final Map<Integer, NetworkTable> m_tables; // indexed by source handle
+ // source handle indexed by sink handle
+ private final Map<Integer, Integer> m_fixedSources;
+ private final NetworkTable m_publishTable;
+ private final VideoListener m_videoListener; //NOPMD
+ private final int m_tableListener; //NOPMD
+ private int m_nextPort;
+ private String[] m_addresses;
+
+ @SuppressWarnings("JavadocMethod")
+ private static String makeSourceValue(int source) {
+ switch (VideoSource.getKindFromInt(CameraServerJNI.getSourceKind(source))) {
+ case kUsb:
+ return "usb:" + CameraServerJNI.getUsbCameraPath(source);
+ case kHttp: {
+ String[] urls = CameraServerJNI.getHttpCameraUrls(source);
+ if (urls.length > 0) {
+ return "ip:" + urls[0];
+ } else {
+ return "ip:";
+ }
+ }
+ case kCv:
+ return "cv:";
+ default:
+ return "unknown:";
+ }
+ }
+
+ @SuppressWarnings("JavadocMethod")
+ private static String makeStreamValue(String address, int port) {
+ return "mjpg:http://" + address + ":" + port + "/?action=stream";
+ }
+
+ @SuppressWarnings({"JavadocMethod", "PMD.AvoidUsingHardCodedIP"})
+ private synchronized String[] getSinkStreamValues(int sink) {
+ // Ignore all but MjpegServer
+ if (VideoSink.getKindFromInt(CameraServerJNI.getSinkKind(sink)) != VideoSink.Kind.kMjpeg) {
+ return new String[0];
+ }
+
+ // Get port
+ int port = CameraServerJNI.getMjpegServerPort(sink);
+
+ // Generate values
+ ArrayList<String> values = new ArrayList<>(m_addresses.length + 1);
+ String listenAddress = CameraServerJNI.getMjpegServerListenAddress(sink);
+ if (!listenAddress.isEmpty()) {
+ // If a listen address is specified, only use that
+ values.add(makeStreamValue(listenAddress, port));
+ } else {
+ // Otherwise generate for hostname and all interface addresses
+ values.add(makeStreamValue(CameraServerJNI.getHostname() + ".local", port));
+ for (String addr : m_addresses) {
+ if ("127.0.0.1".equals(addr)) {
+ continue; // ignore localhost
+ }
+ values.add(makeStreamValue(addr, port));
+ }
+ }
+
+ return values.toArray(new String[0]);
+ }
+
+ @SuppressWarnings({"JavadocMethod", "PMD.AvoidUsingHardCodedIP"})
+ private synchronized String[] getSourceStreamValues(int source) {
+ // Ignore all but HttpCamera
+ if (VideoSource.getKindFromInt(CameraServerJNI.getSourceKind(source))
+ != VideoSource.Kind.kHttp) {
+ return new String[0];
+ }
+
+ // Generate values
+ String[] values = CameraServerJNI.getHttpCameraUrls(source);
+ for (int j = 0; j < values.length; j++) {
+ values[j] = "mjpg:" + values[j];
+ }
+
+ if (CameraServerSharedStore.getCameraServerShared().isRoboRIO()) {
+ // Look to see if we have a passthrough server for this source
+ // Only do this on the roboRIO
+ for (VideoSink i : m_sinks.values()) {
+ int sink = i.getHandle();
+ int sinkSource = CameraServerJNI.getSinkSource(sink);
+ if (source == sinkSource
+ && VideoSink.getKindFromInt(CameraServerJNI.getSinkKind(sink))
+ == VideoSink.Kind.kMjpeg) {
+ // Add USB-only passthrough
+ String[] finalValues = Arrays.copyOf(values, values.length + 1);
+ int port = CameraServerJNI.getMjpegServerPort(sink);
+ finalValues[values.length] = makeStreamValue("172.22.11.2", port);
+ return finalValues;
+ }
+ }
+ }
+
+ return values;
+ }
+
+ @SuppressWarnings({"JavadocMethod", "PMD.AvoidUsingHardCodedIP", "PMD.CyclomaticComplexity"})
+ private synchronized void updateStreamValues() {
+ // Over all the sinks...
+ for (VideoSink i : m_sinks.values()) {
+ int sink = i.getHandle();
+
+ // Get the source's subtable (if none exists, we're done)
+ int source = Objects.requireNonNullElseGet(m_fixedSources.get(sink),
+ () -> CameraServerJNI.getSinkSource(sink));
+
+ if (source == 0) {
+ continue;
+ }
+ NetworkTable table = m_tables.get(source);
+ if (table != null) {
+ // Don't set stream values if this is a HttpCamera passthrough
+ if (VideoSource.getKindFromInt(CameraServerJNI.getSourceKind(source))
+ == VideoSource.Kind.kHttp) {
+ continue;
+ }
+
+ // Set table value
+ String[] values = getSinkStreamValues(sink);
+ if (values.length > 0) {
+ table.getEntry("streams").setStringArray(values);
+ }
+ }
+ }
+
+ // Over all the sources...
+ for (VideoSource i : m_sources.values()) {
+ int source = i.getHandle();
+
+ // Get the source's subtable (if none exists, we're done)
+ NetworkTable table = m_tables.get(source);
+ if (table != null) {
+ // Set table value
+ String[] values = getSourceStreamValues(source);
+ if (values.length > 0) {
+ table.getEntry("streams").setStringArray(values);
+ }
+ }
+ }
+ }
+
+ @SuppressWarnings("JavadocMethod")
+ private static String pixelFormatToString(PixelFormat pixelFormat) {
+ switch (pixelFormat) {
+ case kMJPEG:
+ return "MJPEG";
+ case kYUYV:
+ return "YUYV";
+ case kRGB565:
+ return "RGB565";
+ case kBGR:
+ return "BGR";
+ case kGray:
+ return "Gray";
+ default:
+ return "Unknown";
+ }
+ }
+
+ /// Provide string description of video mode.
+ /// The returned string is "{width}x{height} {format} {fps} fps".
+ @SuppressWarnings("JavadocMethod")
+ private static String videoModeToString(VideoMode mode) {
+ return mode.width + "x" + mode.height + " " + pixelFormatToString(mode.pixelFormat)
+ + " " + mode.fps + " fps";
+ }
+
+ @SuppressWarnings("JavadocMethod")
+ private static String[] getSourceModeValues(int sourceHandle) {
+ VideoMode[] modes = CameraServerJNI.enumerateSourceVideoModes(sourceHandle);
+ String[] modeStrings = new String[modes.length];
+ for (int i = 0; i < modes.length; i++) {
+ modeStrings[i] = videoModeToString(modes[i]);
+ }
+ return modeStrings;
+ }
+
+ @SuppressWarnings({"JavadocMethod", "PMD.CyclomaticComplexity"})
+ private static void putSourcePropertyValue(NetworkTable table, VideoEvent event, boolean isNew) {
+ String name;
+ String infoName;
+ if (event.name.startsWith("raw_")) {
+ name = "RawProperty/" + event.name;
+ infoName = "RawPropertyInfo/" + event.name;
+ } else {
+ name = "Property/" + event.name;
+ infoName = "PropertyInfo/" + event.name;
+ }
+
+ NetworkTableEntry entry = table.getEntry(name);
+ try {
+ switch (event.propertyKind) {
+ case kBoolean:
+ if (isNew) {
+ entry.setDefaultBoolean(event.value != 0);
+ } else {
+ entry.setBoolean(event.value != 0);
+ }
+ break;
+ case kInteger:
+ case kEnum:
+ if (isNew) {
+ entry.setDefaultDouble(event.value);
+ table.getEntry(infoName + "/min").setDouble(
+ CameraServerJNI.getPropertyMin(event.propertyHandle));
+ table.getEntry(infoName + "/max").setDouble(
+ CameraServerJNI.getPropertyMax(event.propertyHandle));
+ table.getEntry(infoName + "/step").setDouble(
+ CameraServerJNI.getPropertyStep(event.propertyHandle));
+ table.getEntry(infoName + "/default").setDouble(
+ CameraServerJNI.getPropertyDefault(event.propertyHandle));
+ } else {
+ entry.setDouble(event.value);
+ }
+ break;
+ case kString:
+ if (isNew) {
+ entry.setDefaultString(event.valueStr);
+ } else {
+ entry.setString(event.valueStr);
+ }
+ break;
+ default:
+ break;
+ }
+ } catch (VideoException ignored) {
+ // ignore
+ }
+ }
+
+ @SuppressWarnings({"JavadocMethod", "PMD.UnusedLocalVariable", "PMD.ExcessiveMethodLength",
+ "PMD.NPathComplexity"})
+ private CameraServer() {
+ m_defaultUsbDevice = new AtomicInteger();
+ m_sources = new HashMap<>();
+ m_sinks = new HashMap<>();
+ m_fixedSources = new HashMap<>();
+ m_tables = new HashMap<>();
+ m_publishTable = NetworkTableInstance.getDefault().getTable(kPublishName);
+ m_nextPort = kBasePort;
+ m_addresses = new String[0];
+
+ // We publish sources to NetworkTables using the following structure:
+ // "/CameraPublisher/{Source.Name}/" - root
+ // - "source" (string): Descriptive, prefixed with type (e.g. "usb:0")
+ // - "streams" (string array): URLs that can be used to stream data
+ // - "description" (string): Description of the source
+ // - "connected" (boolean): Whether source is connected
+ // - "mode" (string): Current video mode
+ // - "modes" (string array): Available video modes
+ // - "Property/{Property}" - Property values
+ // - "PropertyInfo/{Property}" - Property supporting information
+
+ // Listener for video events
+ m_videoListener = new VideoListener(event -> {
+ switch (event.kind) {
+ case kSourceCreated: {
+ // Create subtable for the camera
+ NetworkTable table = m_publishTable.getSubTable(event.name);
+ m_tables.put(event.sourceHandle, table);
+ table.getEntry("source").setString(makeSourceValue(event.sourceHandle));
+ table.getEntry("description").setString(
+ CameraServerJNI.getSourceDescription(event.sourceHandle));
+ table.getEntry("connected").setBoolean(
+ CameraServerJNI.isSourceConnected(event.sourceHandle));
+ table.getEntry("streams").setStringArray(getSourceStreamValues(event.sourceHandle));
+ try {
+ VideoMode mode = CameraServerJNI.getSourceVideoMode(event.sourceHandle);
+ table.getEntry("mode").setDefaultString(videoModeToString(mode));
+ table.getEntry("modes").setStringArray(getSourceModeValues(event.sourceHandle));
+ } catch (VideoException ignored) {
+ // Do nothing. Let the other event handlers update this if there is an error.
+ }
+ break;
+ }
+ case kSourceDestroyed: {
+ NetworkTable table = m_tables.get(event.sourceHandle);
+ if (table != null) {
+ table.getEntry("source").setString("");
+ table.getEntry("streams").setStringArray(new String[0]);
+ table.getEntry("modes").setStringArray(new String[0]);
+ }
+ break;
+ }
+ case kSourceConnected: {
+ NetworkTable table = m_tables.get(event.sourceHandle);
+ if (table != null) {
+ // update the description too (as it may have changed)
+ table.getEntry("description").setString(
+ CameraServerJNI.getSourceDescription(event.sourceHandle));
+ table.getEntry("connected").setBoolean(true);
+ }
+ break;
+ }
+ case kSourceDisconnected: {
+ NetworkTable table = m_tables.get(event.sourceHandle);
+ if (table != null) {
+ table.getEntry("connected").setBoolean(false);
+ }
+ break;
+ }
+ case kSourceVideoModesUpdated: {
+ NetworkTable table = m_tables.get(event.sourceHandle);
+ if (table != null) {
+ table.getEntry("modes").setStringArray(getSourceModeValues(event.sourceHandle));
+ }
+ break;
+ }
+ case kSourceVideoModeChanged: {
+ NetworkTable table = m_tables.get(event.sourceHandle);
+ if (table != null) {
+ table.getEntry("mode").setString(videoModeToString(event.mode));
+ }
+ break;
+ }
+ case kSourcePropertyCreated: {
+ NetworkTable table = m_tables.get(event.sourceHandle);
+ if (table != null) {
+ putSourcePropertyValue(table, event, true);
+ }
+ break;
+ }
+ case kSourcePropertyValueUpdated: {
+ NetworkTable table = m_tables.get(event.sourceHandle);
+ if (table != null) {
+ putSourcePropertyValue(table, event, false);
+ }
+ break;
+ }
+ case kSourcePropertyChoicesUpdated: {
+ NetworkTable table = m_tables.get(event.sourceHandle);
+ if (table != null) {
+ try {
+ String[] choices = CameraServerJNI.getEnumPropertyChoices(event.propertyHandle);
+ table.getEntry("PropertyInfo/" + event.name + "/choices").setStringArray(choices);
+ } catch (VideoException ignored) {
+ // ignore
+ }
+ }
+ break;
+ }
+ case kSinkSourceChanged:
+ case kSinkCreated:
+ case kSinkDestroyed:
+ case kNetworkInterfacesChanged: {
+ m_addresses = CameraServerJNI.getNetworkInterfaces();
+ updateStreamValues();
+ break;
+ }
+ default:
+ break;
+ }
+ }, 0x4fff, true);
+
+ // Listener for NetworkTable events
+ // We don't currently support changing settings via NT due to
+ // synchronization issues, so just update to current setting if someone
+ // else tries to change it.
+ m_tableListener = NetworkTableInstance.getDefault().addEntryListener(kPublishName + "/",
+ event -> {
+ String relativeKey = event.name.substring(kPublishName.length() + 1);
+
+ // get source (sourceName/...)
+ int subKeyIndex = relativeKey.indexOf('/');
+ if (subKeyIndex == -1) {
+ return;
+ }
+ String sourceName = relativeKey.substring(0, subKeyIndex);
+ VideoSource source = m_sources.get(sourceName);
+ if (source == null) {
+ return;
+ }
+
+ // get subkey
+ relativeKey = relativeKey.substring(subKeyIndex + 1);
+
+ // handle standard names
+ String propName;
+ if ("mode".equals(relativeKey)) {
+ // reset to current mode
+ event.getEntry().setString(videoModeToString(source.getVideoMode()));
+ return;
+ } else if (relativeKey.startsWith("Property/")) {
+ propName = relativeKey.substring(9);
+ } else if (relativeKey.startsWith("RawProperty/")) {
+ propName = relativeKey.substring(12);
+ } else {
+ return; // ignore
+ }
+
+ // everything else is a property
+ VideoProperty property = source.getProperty(propName);
+ switch (property.getKind()) {
+ case kNone:
+ return;
+ case kBoolean:
+ // reset to current setting
+ event.getEntry().setBoolean(property.get() != 0);
+ return;
+ case kInteger:
+ case kEnum:
+ // reset to current setting
+ event.getEntry().setDouble(property.get());
+ return;
+ case kString:
+ // reset to current setting
+ event.getEntry().setString(property.getString());
+ return;
+ default:
+ return;
+ }
+ }, EntryListenerFlags.kImmediate | EntryListenerFlags.kUpdate);
+ }
+
+ /**
+ * Start automatically capturing images to send to the dashboard.
+ *
+ * <p>You should call this method to see a camera feed on the dashboard.
+ * If you also want to perform vision processing on the roboRIO, use
+ * getVideo() to get access to the camera images.
+ *
+ * <p>The first time this overload is called, it calls
+ * {@link #startAutomaticCapture(int)} with device 0, creating a camera
+ * named "USB Camera 0". Subsequent calls increment the device number
+ * (e.g. 1, 2, etc).
+ */
+ public UsbCamera startAutomaticCapture() {
+ UsbCamera camera = startAutomaticCapture(m_defaultUsbDevice.getAndIncrement());
+ CameraServerSharedStore.getCameraServerShared().reportUsbCamera(camera.getHandle());
+ return camera;
+ }
+
+ /**
+ * Start automatically capturing images to send to the dashboard.
+ *
+ * <p>This overload calls {@link #startAutomaticCapture(String, int)} with
+ * a name of "USB Camera {dev}".
+ *
+ * @param dev The device number of the camera interface
+ */
+ public UsbCamera startAutomaticCapture(int dev) {
+ UsbCamera camera = new UsbCamera("USB Camera " + dev, dev);
+ startAutomaticCapture(camera);
+ CameraServerSharedStore.getCameraServerShared().reportUsbCamera(camera.getHandle());
+ return camera;
+ }
+
+ /**
+ * Start automatically capturing images to send to the dashboard.
+ *
+ * @param name The name to give the camera
+ * @param dev The device number of the camera interface
+ */
+ public UsbCamera startAutomaticCapture(String name, int dev) {
+ UsbCamera camera = new UsbCamera(name, dev);
+ startAutomaticCapture(camera);
+ CameraServerSharedStore.getCameraServerShared().reportUsbCamera(camera.getHandle());
+ return camera;
+ }
+
+ /**
+ * Start automatically capturing images to send to the dashboard.
+ *
+ * @param name The name to give the camera
+ * @param path The device path (e.g. "/dev/video0") of the camera
+ */
+ public UsbCamera startAutomaticCapture(String name, String path) {
+ UsbCamera camera = new UsbCamera(name, path);
+ startAutomaticCapture(camera);
+ CameraServerSharedStore.getCameraServerShared().reportUsbCamera(camera.getHandle());
+ return camera;
+ }
+
+ /**
+ * Start automatically capturing images to send to the dashboard from
+ * an existing camera.
+ *
+ * @param camera Camera
+ */
+ public MjpegServer startAutomaticCapture(VideoSource camera) {
+ addCamera(camera);
+ MjpegServer server = addServer("serve_" + camera.getName());
+ server.setSource(camera);
+ return server;
+ }
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * <p>This overload calls {@link #addAxisCamera(String, String)} with
+ * name "Axis Camera".
+ *
+ * @param host Camera host IP or DNS name (e.g. "10.x.y.11")
+ */
+ public AxisCamera addAxisCamera(String host) {
+ return addAxisCamera("Axis Camera", host);
+ }
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * <p>This overload calls {@link #addAxisCamera(String, String[])} with
+ * name "Axis Camera".
+ *
+ * @param hosts Array of Camera host IPs/DNS names
+ */
+ public AxisCamera addAxisCamera(String[] hosts) {
+ return addAxisCamera("Axis Camera", hosts);
+ }
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * @param name The name to give the camera
+ * @param host Camera host IP or DNS name (e.g. "10.x.y.11")
+ */
+ public AxisCamera addAxisCamera(String name, String host) {
+ AxisCamera camera = new AxisCamera(name, host);
+ // Create a passthrough MJPEG server for USB access
+ startAutomaticCapture(camera);
+ CameraServerSharedStore.getCameraServerShared().reportAxisCamera(camera.getHandle());
+ return camera;
+ }
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * @param name The name to give the camera
+ * @param hosts Array of Camera host IPs/DNS names
+ */
+ public AxisCamera addAxisCamera(String name, String[] hosts) {
+ AxisCamera camera = new AxisCamera(name, hosts);
+ // Create a passthrough MJPEG server for USB access
+ startAutomaticCapture(camera);
+ CameraServerSharedStore.getCameraServerShared().reportAxisCamera(camera.getHandle());
+ return camera;
+ }
+
+ /**
+ * Adds a virtual camera for switching between two streams. Unlike the
+ * other addCamera methods, this returns a VideoSink rather than a
+ * VideoSource. Calling setSource() on the returned object can be used
+ * to switch the actual source of the stream.
+ */
+ public MjpegServer addSwitchedCamera(String name) {
+ // create a dummy CvSource
+ CvSource source = new CvSource(name, VideoMode.PixelFormat.kMJPEG, 160, 120, 30);
+ MjpegServer server = startAutomaticCapture(source);
+ synchronized (this) {
+ m_fixedSources.put(server.getHandle(), source.getHandle());
+ }
+
+ return server;
+ }
+
+ /**
+ * Get OpenCV access to the primary camera feed. This allows you to
+ * get images from the camera for image processing on the roboRIO.
+ *
+ * <p>This is only valid to call after a camera feed has been added
+ * with startAutomaticCapture() or addServer().
+ */
+ public CvSink getVideo() {
+ VideoSource source;
+ synchronized (this) {
+ if (m_primarySourceName == null) {
+ throw new VideoException("no camera available");
+ }
+ source = m_sources.get(m_primarySourceName);
+ }
+ if (source == null) {
+ throw new VideoException("no camera available");
+ }
+ return getVideo(source);
+ }
+
+ /**
+ * Get OpenCV access to the specified camera. This allows you to get
+ * images from the camera for image processing on the roboRIO.
+ *
+ * @param camera Camera (e.g. as returned by startAutomaticCapture).
+ */
+ public CvSink getVideo(VideoSource camera) {
+ String name = "opencv_" + camera.getName();
+
+ synchronized (this) {
+ VideoSink sink = m_sinks.get(name);
+ if (sink != null) {
+ VideoSink.Kind kind = sink.getKind();
+ if (kind != VideoSink.Kind.kCv) {
+ throw new VideoException("expected OpenCV sink, but got " + kind);
+ }
+ return (CvSink) sink;
+ }
+ }
+
+ CvSink newsink = new CvSink(name);
+ newsink.setSource(camera);
+ addServer(newsink);
+ return newsink;
+ }
+
+ /**
+ * Get OpenCV access to the specified camera. This allows you to get
+ * images from the camera for image processing on the roboRIO.
+ *
+ * @param name Camera name
+ */
+ public CvSink getVideo(String name) {
+ VideoSource source;
+ synchronized (this) {
+ source = m_sources.get(name);
+ if (source == null) {
+ throw new VideoException("could not find camera " + name);
+ }
+ }
+ return getVideo(source);
+ }
+
+ /**
+ * Create a MJPEG stream with OpenCV input. This can be called to pass custom
+ * annotated images to the dashboard.
+ *
+ * @param name Name to give the stream
+ * @param width Width of the image being sent
+ * @param height Height of the image being sent
+ */
+ public CvSource putVideo(String name, int width, int height) {
+ CvSource source = new CvSource(name, VideoMode.PixelFormat.kMJPEG, width, height, 30);
+ startAutomaticCapture(source);
+ return source;
+ }
+
+ /**
+ * Adds a MJPEG server at the next available port.
+ *
+ * @param name Server name
+ */
+ public MjpegServer addServer(String name) {
+ int port;
+ synchronized (this) {
+ port = m_nextPort;
+ m_nextPort++;
+ }
+ return addServer(name, port);
+ }
+
+ /**
+ * Adds a MJPEG server.
+ *
+ * @param name Server name
+ */
+ public MjpegServer addServer(String name, int port) {
+ MjpegServer server = new MjpegServer(name, port);
+ addServer(server);
+ return server;
+ }
+
+ /**
+ * Adds an already created server.
+ *
+ * @param server Server
+ */
+ public void addServer(VideoSink server) {
+ synchronized (this) {
+ m_sinks.put(server.getName(), server);
+ }
+ }
+
+ /**
+ * Removes a server by name.
+ *
+ * @param name Server name
+ */
+ public void removeServer(String name) {
+ synchronized (this) {
+ m_sinks.remove(name);
+ }
+ }
+
+ /**
+ * Get server for the primary camera feed.
+ *
+ * <p>This is only valid to call after a camera feed has been added
+ * with startAutomaticCapture() or addServer().
+ */
+ public VideoSink getServer() {
+ synchronized (this) {
+ if (m_primarySourceName == null) {
+ throw new VideoException("no camera available");
+ }
+ return getServer("serve_" + m_primarySourceName);
+ }
+ }
+
+ /**
+ * Gets a server by name.
+ *
+ * @param name Server name
+ */
+ public VideoSink getServer(String name) {
+ synchronized (this) {
+ return m_sinks.get(name);
+ }
+ }
+
+ /**
+ * Adds an already created camera.
+ *
+ * @param camera Camera
+ */
+ public void addCamera(VideoSource camera) {
+ String name = camera.getName();
+ synchronized (this) {
+ if (m_primarySourceName == null) {
+ m_primarySourceName = name;
+ }
+ m_sources.put(name, camera);
+ }
+ }
+
+ /**
+ * Removes a camera by name.
+ *
+ * @param name Camera name
+ */
+ public void removeCamera(String name) {
+ synchronized (this) {
+ m_sources.remove(name);
+ }
+ }
+}
diff --git a/cameraserver/src/main/java/edu/wpi/first/cameraserver/CameraServerShared.java b/cameraserver/src/main/java/edu/wpi/first/cameraserver/CameraServerShared.java
new file mode 100644
index 0000000..c9cbb8f
--- /dev/null
+++ b/cameraserver/src/main/java/edu/wpi/first/cameraserver/CameraServerShared.java
@@ -0,0 +1,55 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2018-2019 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+package edu.wpi.first.cameraserver;
+
+
+public interface CameraServerShared {
+ /**
+ * get the main thread id func.
+ *
+ * @return the robotMainThreadId
+ */
+ Long getRobotMainThreadId();
+
+ /**
+ * Report an error to the driver station.
+ *
+ * @param error the error to set
+ */
+ void reportDriverStationError(String error);
+
+ /**
+ * Report an video server usage.
+ *
+ * @param id the usage id
+ */
+ void reportVideoServer(int id);
+
+ /**
+ * Report a usb camera usage.
+ *
+ * @param id the usage id
+ */
+ void reportUsbCamera(int id);
+
+ /**
+ * Report an axis camera usage.
+ *
+ * @param id the usage id
+ */
+ void reportAxisCamera(int id);
+
+ /**
+ * Get if running on a roboRIO.
+ *
+ * @return true if on roboRIO
+ */
+ default boolean isRoboRIO() {
+ return false;
+ }
+}
diff --git a/cameraserver/src/main/java/edu/wpi/first/cameraserver/CameraServerSharedStore.java b/cameraserver/src/main/java/edu/wpi/first/cameraserver/CameraServerSharedStore.java
new file mode 100644
index 0000000..c0cf2bb
--- /dev/null
+++ b/cameraserver/src/main/java/edu/wpi/first/cameraserver/CameraServerSharedStore.java
@@ -0,0 +1,58 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+package edu.wpi.first.cameraserver;
+
+public final class CameraServerSharedStore {
+ private static CameraServerShared cameraServerShared;
+
+ private CameraServerSharedStore() {
+ }
+
+ /**
+ * get the CameraServerShared object.
+ */
+ public static synchronized CameraServerShared getCameraServerShared() {
+ if (cameraServerShared == null) {
+ cameraServerShared = new CameraServerShared() {
+
+ @Override
+ public void reportVideoServer(int id) {
+
+ }
+
+ @Override
+ public void reportUsbCamera(int id) {
+
+ }
+
+ @Override
+ public void reportDriverStationError(String error) {
+
+ }
+
+ @Override
+ public void reportAxisCamera(int id) {
+
+ }
+
+ @Override
+ public Long getRobotMainThreadId() {
+ return null;
+ }
+ };
+ }
+ return cameraServerShared;
+ }
+
+ /**
+ * set the CameraServerShared object.
+ */
+ public static synchronized void setCameraServerShared(CameraServerShared shared) {
+ cameraServerShared = shared;
+ }
+}
diff --git a/cameraserver/src/main/java/edu/wpi/first/vision/VisionPipeline.java b/cameraserver/src/main/java/edu/wpi/first/vision/VisionPipeline.java
new file mode 100644
index 0000000..6df10e7
--- /dev/null
+++ b/cameraserver/src/main/java/edu/wpi/first/vision/VisionPipeline.java
@@ -0,0 +1,26 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+package edu.wpi.first.vision;
+
+import org.opencv.core.Mat;
+
+/**
+ * A vision pipeline is responsible for running a group of
+ * OpenCV algorithms to extract data from an image.
+ *
+ * @see VisionRunner
+ * @see VisionThread
+ */
+public interface VisionPipeline {
+ /**
+ * Processes the image input and sets the result objects.
+ * Implementations should make these objects accessible.
+ */
+ void process(Mat image);
+
+}
diff --git a/cameraserver/src/main/java/edu/wpi/first/vision/VisionRunner.java b/cameraserver/src/main/java/edu/wpi/first/vision/VisionRunner.java
new file mode 100644
index 0000000..8d8b12e
--- /dev/null
+++ b/cameraserver/src/main/java/edu/wpi/first/vision/VisionRunner.java
@@ -0,0 +1,128 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2019 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+package edu.wpi.first.vision;
+
+import org.opencv.core.Mat;
+
+import edu.wpi.cscore.CvSink;
+import edu.wpi.cscore.VideoSource;
+import edu.wpi.first.cameraserver.CameraServerSharedStore;
+
+/**
+ * A vision runner is a convenient wrapper object to make it easy to run vision pipelines
+ * from robot code. The easiest way to use this is to run it in a {@link VisionThread}
+ * and use the listener to take snapshots of the pipeline's outputs.
+ *
+ * @see VisionPipeline
+ * @see VisionThread
+ * @see <a href="package-summary.html">vision</a>
+ */
+public class VisionRunner<P extends VisionPipeline> {
+ private final CvSink m_cvSink = new CvSink("VisionRunner CvSink");
+ private final P m_pipeline;
+ private final Mat m_image = new Mat();
+ private final Listener<? super P> m_listener;
+ private volatile boolean m_enabled = true;
+
+ /**
+ * Listener interface for a callback that should run after a pipeline has processed its input.
+ *
+ * @param <P> the type of the pipeline this listener is for
+ */
+ @FunctionalInterface
+ public interface Listener<P extends VisionPipeline> {
+ /**
+ * Called when the pipeline has run. This shouldn't take much time to run because it will delay
+ * later calls to the pipeline's {@link VisionPipeline#process process} method. Copying the
+ * outputs and code that uses the copies should be <i>synchronized</i> on the same mutex to
+ * prevent multiple threads from reading and writing to the same memory at the same time.
+ *
+ * @param pipeline the vision pipeline that ran
+ */
+ void copyPipelineOutputs(P pipeline);
+
+ }
+
+ /**
+ * Creates a new vision runner. It will take images from the {@code videoSource}, send them to
+ * the {@code pipeline}, and call the {@code listener} when the pipeline has finished to alert
+ * user code when it is safe to access the pipeline's outputs.
+ *
+ * @param videoSource the video source to use to supply images for the pipeline
+ * @param pipeline the vision pipeline to run
+ * @param listener a function to call after the pipeline has finished running
+ */
+ public VisionRunner(VideoSource videoSource, P pipeline, Listener<? super P> listener) {
+ this.m_pipeline = pipeline;
+ this.m_listener = listener;
+ m_cvSink.setSource(videoSource);
+ }
+
+ /**
+ * Runs the pipeline one time, giving it the next image from the video source specified
+ * in the constructor. This will block until the source either has an image or throws an error.
+ * If the source successfully supplied a frame, the pipeline's image input will be set,
+ * the pipeline will run, and the listener specified in the constructor will be called to notify
+ * it that the pipeline ran.
+ *
+ * <p>This method is exposed to allow teams to add additional functionality or have their own
+ * ways to run the pipeline. Most teams, however, should just use {@link #runForever} in its own
+ * thread using a {@link VisionThread}.</p>
+ */
+ public void runOnce() {
+ Long id = CameraServerSharedStore.getCameraServerShared().getRobotMainThreadId();
+
+ if (id != null && Thread.currentThread().getId() == id) {
+ throw new IllegalStateException(
+ "VisionRunner.runOnce() cannot be called from the main robot thread");
+ }
+ runOnceInternal();
+ }
+
+ private void runOnceInternal() {
+ long frameTime = m_cvSink.grabFrame(m_image);
+ if (frameTime == 0) {
+ // There was an error, report it
+ String error = m_cvSink.getError();
+ CameraServerSharedStore.getCameraServerShared().reportDriverStationError(error);
+ } else {
+ // No errors, process the image
+ m_pipeline.process(m_image);
+ m_listener.copyPipelineOutputs(m_pipeline);
+ }
+ }
+
+ /**
+ * A convenience method that calls {@link #runOnce()} in an infinite loop. This must
+ * be run in a dedicated thread, and cannot be used in the main robot thread because
+ * it will freeze the robot program.
+ *
+ * <p><strong>Do not call this method directly from the main thread.</strong></p>
+ *
+ * @throws IllegalStateException if this is called from the main robot thread
+ * @see VisionThread
+ */
+ public void runForever() {
+ Long id = CameraServerSharedStore.getCameraServerShared().getRobotMainThreadId();
+
+ if (id != null && Thread.currentThread().getId() == id) {
+ throw new IllegalStateException(
+ "VisionRunner.runForever() cannot be called from the main robot thread");
+ }
+ while (m_enabled && !Thread.interrupted()) {
+ runOnceInternal();
+ }
+ }
+
+ /**
+ * Stop a RunForever() loop.
+ */
+ public void stop() {
+ m_enabled = false;
+ }
+}
diff --git a/cameraserver/src/main/java/edu/wpi/first/vision/VisionThread.java b/cameraserver/src/main/java/edu/wpi/first/vision/VisionThread.java
new file mode 100644
index 0000000..576cb96
--- /dev/null
+++ b/cameraserver/src/main/java/edu/wpi/first/vision/VisionThread.java
@@ -0,0 +1,47 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+package edu.wpi.first.vision;
+
+import edu.wpi.cscore.VideoSource;
+
+/**
+ * A vision thread is a special thread that runs a vision pipeline. It is a <i>daemon</i> thread;
+ * it does not prevent the program from exiting when all other non-daemon threads
+ * have finished running.
+ *
+ * @see VisionPipeline
+ * @see VisionRunner
+ * @see Thread#setDaemon(boolean)
+ */
+public class VisionThread extends Thread {
+ /**
+ * Creates a vision thread that continuously runs a {@link VisionPipeline}.
+ *
+ * @param visionRunner the runner for a vision pipeline
+ */
+ public VisionThread(VisionRunner<?> visionRunner) {
+ super(visionRunner::runForever, "WPILib Vision Thread");
+ setDaemon(true);
+ }
+
+ /**
+ * Creates a new vision thread that continuously runs the given vision pipeline. This is
+ * equivalent to {@code new VisionThread(new VisionRunner<>(videoSource, pipeline, listener))}.
+ *
+ * @param videoSource the source for images the pipeline should process
+ * @param pipeline the pipeline to run
+ * @param listener the listener to copy outputs from the pipeline after it runs
+ * @param <P> the type of the pipeline
+ */
+ public <P extends VisionPipeline> VisionThread(VideoSource videoSource,
+ P pipeline,
+ VisionRunner.Listener<? super P> listener) {
+ this(new VisionRunner<>(videoSource, pipeline, listener));
+ }
+
+}
diff --git a/cameraserver/src/main/java/edu/wpi/first/vision/package-info.java b/cameraserver/src/main/java/edu/wpi/first/vision/package-info.java
new file mode 100644
index 0000000..e2e5c62
--- /dev/null
+++ b/cameraserver/src/main/java/edu/wpi/first/vision/package-info.java
@@ -0,0 +1,88 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+/**
+ * Classes in the {@code edu.wpi.first.vision} package are designed to
+ * simplify using OpenCV vision processing code from a robot program.
+ *
+ * <p>An example use case for grabbing a yellow tote from 2015 in autonomous:
+ * <br>
+ * <pre><code>
+ * public class Robot extends IterativeRobot
+ * implements VisionRunner.Listener<MyFindTotePipeline> {
+ *
+ * // A USB camera connected to the roboRIO.
+ * private {@link edu.wpi.cscore.VideoSource VideoSource} usbCamera;
+ *
+ * // A vision pipeline. This could be handwritten or generated by GRIP.
+ * // This has to implement {@link edu.wpi.first.vision.VisionPipeline}.
+ * // For this example, assume that it's perfect and will always see the tote.
+ * private MyFindTotePipeline findTotePipeline;
+ * private {@link edu.wpi.first.vision.VisionThread} findToteThread;
+ *
+ * // The object to synchronize on to make sure the vision thread doesn't
+ * // write to variables the main thread is using.
+ * private final Object visionLock = new Object();
+ *
+ * // The pipeline outputs we want
+ * private boolean pipelineRan = false; // lets us know when the pipeline has actually run
+ * private double angleToTote = 0;
+ * private double distanceToTote = 0;
+ *
+ * {@literal @}Override
+ * public void {@link edu.wpi.first.vision.VisionRunner.Listener#copyPipelineOutputs
+ * copyPipelineOutputs(MyFindTotePipeline pipeline)} {
+ * synchronized (visionLock) {
+ * // Take a snapshot of the pipeline's output because
+ * // it may have changed the next time this method is called!
+ * this.pipelineRan = true;
+ * this.angleToTote = pipeline.getAngleToTote();
+ * this.distanceToTote = pipeline.getDistanceToTote();
+ * }
+ * }
+ *
+ * {@literal @}Override
+ * public void robotInit() {
+ * usbCamera = CameraServer.getInstance().startAutomaticCapture(0);
+ * findTotePipeline = new MyFindTotePipeline();
+ * findToteThread = new VisionThread(usbCamera, findTotePipeline, this);
+ * }
+ *
+ * {@literal @}Override
+ * public void autonomousInit() {
+ * findToteThread.start();
+ * }
+ *
+ * {@literal @}Override
+ * public void autonomousPeriodic() {
+ * double angle;
+ * double distance;
+ * synchronized (visionLock) {
+ * if (!pipelineRan) {
+ * // Wait until the pipeline has run
+ * return;
+ * }
+ * // Copy the outputs to make sure they're all from the same run
+ * angle = this.angleToTote;
+ * distance = this.distanceToTote;
+ * }
+ * if (!aimedAtTote()) {
+ * turnToAngle(angle);
+ * } else if (!droveToTote()) {
+ * driveDistance(distance);
+ * } else if (!grabbedTote()) {
+ * grabTote();
+ * } else {
+ * // Tote was grabbed and we're done!
+ * return;
+ * }
+ * }
+ *
+ * }
+ * </code></pre>
+ */
+package edu.wpi.first.vision;
diff --git a/cameraserver/src/main/native/cpp/cameraserver/CameraServer.cpp b/cameraserver/src/main/native/cpp/cameraserver/CameraServer.cpp
new file mode 100644
index 0000000..f7647de
--- /dev/null
+++ b/cameraserver/src/main/native/cpp/cameraserver/CameraServer.cpp
@@ -0,0 +1,716 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2019 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#include "cameraserver/CameraServer.h"
+
+#include <atomic>
+#include <vector>
+
+#include <networktables/NetworkTable.h>
+#include <networktables/NetworkTableInstance.h>
+#include <wpi/DenseMap.h>
+#include <wpi/ManagedStatic.h>
+#include <wpi/SmallString.h>
+#include <wpi/StringMap.h>
+#include <wpi/mutex.h>
+#include <wpi/raw_ostream.h>
+
+#include "cameraserver/CameraServerShared.h"
+#include "ntcore_cpp.h"
+
+using namespace frc;
+
+static constexpr char const* kPublishName = "/CameraPublisher";
+
+struct CameraServer::Impl {
+ Impl();
+ std::shared_ptr<nt::NetworkTable> GetSourceTable(CS_Source source);
+ std::vector<std::string> GetSinkStreamValues(CS_Sink sink);
+ std::vector<std::string> GetSourceStreamValues(CS_Source source);
+ void UpdateStreamValues();
+
+ wpi::mutex m_mutex;
+ std::atomic<int> m_defaultUsbDevice{0};
+ std::string m_primarySourceName;
+ wpi::StringMap<cs::VideoSource> m_sources;
+ wpi::StringMap<cs::VideoSink> m_sinks;
+ wpi::DenseMap<CS_Sink, CS_Source> m_fixedSources;
+ wpi::DenseMap<CS_Source, std::shared_ptr<nt::NetworkTable>> m_tables;
+ std::shared_ptr<nt::NetworkTable> m_publishTable;
+ cs::VideoListener m_videoListener;
+ int m_tableListener;
+ int m_nextPort;
+ std::vector<std::string> m_addresses;
+};
+
+CameraServer* CameraServer::GetInstance() {
+ struct Creator {
+ static void* call() { return new CameraServer{}; }
+ };
+ struct Deleter {
+ static void call(void* ptr) { delete static_cast<CameraServer*>(ptr); }
+ };
+ static wpi::ManagedStatic<CameraServer, Creator, Deleter> instance;
+ return &(*instance);
+}
+
+static wpi::StringRef MakeSourceValue(CS_Source source,
+ wpi::SmallVectorImpl<char>& buf) {
+ CS_Status status = 0;
+ buf.clear();
+ switch (cs::GetSourceKind(source, &status)) {
+ case CS_SOURCE_USB: {
+ wpi::StringRef prefix{"usb:"};
+ buf.append(prefix.begin(), prefix.end());
+ auto path = cs::GetUsbCameraPath(source, &status);
+ buf.append(path.begin(), path.end());
+ break;
+ }
+ case CS_SOURCE_HTTP: {
+ wpi::StringRef prefix{"ip:"};
+ buf.append(prefix.begin(), prefix.end());
+ auto urls = cs::GetHttpCameraUrls(source, &status);
+ if (!urls.empty()) buf.append(urls[0].begin(), urls[0].end());
+ break;
+ }
+ case CS_SOURCE_CV:
+ return "cv:";
+ default:
+ return "unknown:";
+ }
+
+ return wpi::StringRef{buf.begin(), buf.size()};
+}
+
+static std::string MakeStreamValue(const wpi::Twine& address, int port) {
+ return ("mjpg:http://" + address + wpi::Twine(':') + wpi::Twine(port) +
+ "/?action=stream")
+ .str();
+}
+
+std::shared_ptr<nt::NetworkTable> CameraServer::Impl::GetSourceTable(
+ CS_Source source) {
+ std::scoped_lock lock(m_mutex);
+ return m_tables.lookup(source);
+}
+
+std::vector<std::string> CameraServer::Impl::GetSinkStreamValues(CS_Sink sink) {
+ CS_Status status = 0;
+
+ // Ignore all but MjpegServer
+ if (cs::GetSinkKind(sink, &status) != CS_SINK_MJPEG)
+ return std::vector<std::string>{};
+
+ // Get port
+ int port = cs::GetMjpegServerPort(sink, &status);
+
+ // Generate values
+ std::vector<std::string> values;
+ auto listenAddress = cs::GetMjpegServerListenAddress(sink, &status);
+ if (!listenAddress.empty()) {
+ // If a listen address is specified, only use that
+ values.emplace_back(MakeStreamValue(listenAddress, port));
+ } else {
+ // Otherwise generate for hostname and all interface addresses
+ values.emplace_back(MakeStreamValue(cs::GetHostname() + ".local", port));
+
+ for (const auto& addr : m_addresses) {
+ if (addr == "127.0.0.1") continue; // ignore localhost
+ values.emplace_back(MakeStreamValue(addr, port));
+ }
+ }
+
+ return values;
+}
+
+std::vector<std::string> CameraServer::Impl::GetSourceStreamValues(
+ CS_Source source) {
+ CS_Status status = 0;
+
+ // Ignore all but HttpCamera
+ if (cs::GetSourceKind(source, &status) != CS_SOURCE_HTTP)
+ return std::vector<std::string>{};
+
+ // Generate values
+ auto values = cs::GetHttpCameraUrls(source, &status);
+ for (auto& value : values) value = "mjpg:" + value;
+
+#ifdef __FRC_ROBORIO__
+ // Look to see if we have a passthrough server for this source
+ // Only do this on the roboRIO
+ for (const auto& i : m_sinks) {
+ CS_Sink sink = i.second.GetHandle();
+ CS_Source sinkSource = cs::GetSinkSource(sink, &status);
+ if (source == sinkSource &&
+ cs::GetSinkKind(sink, &status) == CS_SINK_MJPEG) {
+ // Add USB-only passthrough
+ int port = cs::GetMjpegServerPort(sink, &status);
+ values.emplace_back(MakeStreamValue("172.22.11.2", port));
+ break;
+ }
+ }
+#endif
+
+ // Set table value
+ return values;
+}
+
+void CameraServer::Impl::UpdateStreamValues() {
+ std::scoped_lock lock(m_mutex);
+ // Over all the sinks...
+ for (const auto& i : m_sinks) {
+ CS_Status status = 0;
+ CS_Sink sink = i.second.GetHandle();
+
+ // Get the source's subtable (if none exists, we're done)
+ CS_Source source = m_fixedSources.lookup(sink);
+ if (source == 0) source = cs::GetSinkSource(sink, &status);
+ if (source == 0) continue;
+ auto table = m_tables.lookup(source);
+ if (table) {
+ // Don't set stream values if this is a HttpCamera passthrough
+ if (cs::GetSourceKind(source, &status) == CS_SOURCE_HTTP) continue;
+
+ // Set table value
+ auto values = GetSinkStreamValues(sink);
+ if (!values.empty()) table->GetEntry("streams").SetStringArray(values);
+ }
+ }
+
+ // Over all the sources...
+ for (const auto& i : m_sources) {
+ CS_Source source = i.second.GetHandle();
+
+ // Get the source's subtable (if none exists, we're done)
+ auto table = m_tables.lookup(source);
+ if (table) {
+ // Set table value
+ auto values = GetSourceStreamValues(source);
+ if (!values.empty()) table->GetEntry("streams").SetStringArray(values);
+ }
+ }
+}
+
+static std::string PixelFormatToString(int pixelFormat) {
+ switch (pixelFormat) {
+ case cs::VideoMode::PixelFormat::kMJPEG:
+ return "MJPEG";
+ case cs::VideoMode::PixelFormat::kYUYV:
+ return "YUYV";
+ case cs::VideoMode::PixelFormat::kRGB565:
+ return "RGB565";
+ case cs::VideoMode::PixelFormat::kBGR:
+ return "BGR";
+ case cs::VideoMode::PixelFormat::kGray:
+ return "Gray";
+ default:
+ return "Unknown";
+ }
+}
+
+static std::string VideoModeToString(const cs::VideoMode& mode) {
+ std::string rv;
+ wpi::raw_string_ostream oss{rv};
+ oss << mode.width << "x" << mode.height;
+ oss << " " << PixelFormatToString(mode.pixelFormat) << " ";
+ oss << mode.fps << " fps";
+ return oss.str();
+}
+
+static std::vector<std::string> GetSourceModeValues(int source) {
+ std::vector<std::string> rv;
+ CS_Status status = 0;
+ for (const auto& mode : cs::EnumerateSourceVideoModes(source, &status))
+ rv.emplace_back(VideoModeToString(mode));
+ return rv;
+}
+
+static void PutSourcePropertyValue(nt::NetworkTable* table,
+ const cs::VideoEvent& event, bool isNew) {
+ wpi::SmallString<64> name;
+ wpi::SmallString<64> infoName;
+ if (wpi::StringRef{event.name}.startswith("raw_")) {
+ name = "RawProperty/";
+ name += event.name;
+ infoName = "RawPropertyInfo/";
+ infoName += event.name;
+ } else {
+ name = "Property/";
+ name += event.name;
+ infoName = "PropertyInfo/";
+ infoName += event.name;
+ }
+
+ wpi::SmallString<64> buf;
+ CS_Status status = 0;
+ nt::NetworkTableEntry entry = table->GetEntry(name);
+ switch (event.propertyKind) {
+ case CS_PROP_BOOLEAN:
+ if (isNew)
+ entry.SetDefaultBoolean(event.value != 0);
+ else
+ entry.SetBoolean(event.value != 0);
+ break;
+ case CS_PROP_INTEGER:
+ case CS_PROP_ENUM:
+ if (isNew) {
+ entry.SetDefaultDouble(event.value);
+ table->GetEntry(infoName + "/min")
+ .SetDouble(cs::GetPropertyMin(event.propertyHandle, &status));
+ table->GetEntry(infoName + "/max")
+ .SetDouble(cs::GetPropertyMax(event.propertyHandle, &status));
+ table->GetEntry(infoName + "/step")
+ .SetDouble(cs::GetPropertyStep(event.propertyHandle, &status));
+ table->GetEntry(infoName + "/default")
+ .SetDouble(cs::GetPropertyDefault(event.propertyHandle, &status));
+ } else {
+ entry.SetDouble(event.value);
+ }
+ break;
+ case CS_PROP_STRING:
+ if (isNew)
+ entry.SetDefaultString(event.valueStr);
+ else
+ entry.SetString(event.valueStr);
+ break;
+ default:
+ break;
+ }
+}
+
+CameraServer::Impl::Impl()
+ : m_publishTable{nt::NetworkTableInstance::GetDefault().GetTable(
+ kPublishName)},
+ m_nextPort(kBasePort) {
+ // We publish sources to NetworkTables using the following structure:
+ // "/CameraPublisher/{Source.Name}/" - root
+ // - "source" (string): Descriptive, prefixed with type (e.g. "usb:0")
+ // - "streams" (string array): URLs that can be used to stream data
+ // - "description" (string): Description of the source
+ // - "connected" (boolean): Whether source is connected
+ // - "mode" (string): Current video mode
+ // - "modes" (string array): Available video modes
+ // - "Property/{Property}" - Property values
+ // - "PropertyInfo/{Property}" - Property supporting information
+
+ // Listener for video events
+ m_videoListener = cs::VideoListener{
+ [=](const cs::VideoEvent& event) {
+ CS_Status status = 0;
+ switch (event.kind) {
+ case cs::VideoEvent::kSourceCreated: {
+ // Create subtable for the camera
+ auto table = m_publishTable->GetSubTable(event.name);
+ {
+ std::scoped_lock lock(m_mutex);
+ m_tables.insert(std::make_pair(event.sourceHandle, table));
+ }
+ wpi::SmallString<64> buf;
+ table->GetEntry("source").SetString(
+ MakeSourceValue(event.sourceHandle, buf));
+ wpi::SmallString<64> descBuf;
+ table->GetEntry("description")
+ .SetString(cs::GetSourceDescription(event.sourceHandle, descBuf,
+ &status));
+ table->GetEntry("connected")
+ .SetBoolean(cs::IsSourceConnected(event.sourceHandle, &status));
+ table->GetEntry("streams").SetStringArray(
+ GetSourceStreamValues(event.sourceHandle));
+ auto mode = cs::GetSourceVideoMode(event.sourceHandle, &status);
+ table->GetEntry("mode").SetDefaultString(VideoModeToString(mode));
+ table->GetEntry("modes").SetStringArray(
+ GetSourceModeValues(event.sourceHandle));
+ break;
+ }
+ case cs::VideoEvent::kSourceDestroyed: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table) {
+ table->GetEntry("source").SetString("");
+ table->GetEntry("streams").SetStringArray(
+ std::vector<std::string>{});
+ table->GetEntry("modes").SetStringArray(
+ std::vector<std::string>{});
+ }
+ break;
+ }
+ case cs::VideoEvent::kSourceConnected: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table) {
+ // update the description too (as it may have changed)
+ wpi::SmallString<64> descBuf;
+ table->GetEntry("description")
+ .SetString(cs::GetSourceDescription(event.sourceHandle,
+ descBuf, &status));
+ table->GetEntry("connected").SetBoolean(true);
+ }
+ break;
+ }
+ case cs::VideoEvent::kSourceDisconnected: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table) table->GetEntry("connected").SetBoolean(false);
+ break;
+ }
+ case cs::VideoEvent::kSourceVideoModesUpdated: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table)
+ table->GetEntry("modes").SetStringArray(
+ GetSourceModeValues(event.sourceHandle));
+ break;
+ }
+ case cs::VideoEvent::kSourceVideoModeChanged: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table)
+ table->GetEntry("mode").SetString(VideoModeToString(event.mode));
+ break;
+ }
+ case cs::VideoEvent::kSourcePropertyCreated: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table) PutSourcePropertyValue(table.get(), event, true);
+ break;
+ }
+ case cs::VideoEvent::kSourcePropertyValueUpdated: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table) PutSourcePropertyValue(table.get(), event, false);
+ break;
+ }
+ case cs::VideoEvent::kSourcePropertyChoicesUpdated: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table) {
+ wpi::SmallString<64> name{"PropertyInfo/"};
+ name += event.name;
+ name += "/choices";
+ auto choices =
+ cs::GetEnumPropertyChoices(event.propertyHandle, &status);
+ table->GetEntry(name).SetStringArray(choices);
+ }
+ break;
+ }
+ case cs::VideoEvent::kSinkSourceChanged:
+ case cs::VideoEvent::kSinkCreated:
+ case cs::VideoEvent::kSinkDestroyed:
+ case cs::VideoEvent::kNetworkInterfacesChanged: {
+ m_addresses = cs::GetNetworkInterfaces();
+ UpdateStreamValues();
+ break;
+ }
+ default:
+ break;
+ }
+ },
+ 0x4fff, true};
+
+ // Listener for NetworkTable events
+ // We don't currently support changing settings via NT due to
+ // synchronization issues, so just update to current setting if someone
+ // else tries to change it.
+ wpi::SmallString<64> buf;
+ m_tableListener = nt::NetworkTableInstance::GetDefault().AddEntryListener(
+ kPublishName + wpi::Twine('/'),
+ [=](const nt::EntryNotification& event) {
+ wpi::StringRef relativeKey =
+ event.name.substr(wpi::StringRef(kPublishName).size() + 1);
+
+ // get source (sourceName/...)
+ auto subKeyIndex = relativeKey.find('/');
+ if (subKeyIndex == wpi::StringRef::npos) return;
+ wpi::StringRef sourceName = relativeKey.slice(0, subKeyIndex);
+ auto sourceIt = m_sources.find(sourceName);
+ if (sourceIt == m_sources.end()) return;
+
+ // get subkey
+ relativeKey = relativeKey.substr(subKeyIndex + 1);
+
+ // handle standard names
+ wpi::StringRef propName;
+ nt::NetworkTableEntry entry{event.entry};
+ if (relativeKey == "mode") {
+ // reset to current mode
+ entry.SetString(VideoModeToString(sourceIt->second.GetVideoMode()));
+ return;
+ } else if (relativeKey.startswith("Property/")) {
+ propName = relativeKey.substr(9);
+ } else if (relativeKey.startswith("RawProperty/")) {
+ propName = relativeKey.substr(12);
+ } else {
+ return; // ignore
+ }
+
+ // everything else is a property
+ auto property = sourceIt->second.GetProperty(propName);
+ switch (property.GetKind()) {
+ case cs::VideoProperty::kNone:
+ return;
+ case cs::VideoProperty::kBoolean:
+ entry.SetBoolean(property.Get() != 0);
+ return;
+ case cs::VideoProperty::kInteger:
+ case cs::VideoProperty::kEnum:
+ entry.SetDouble(property.Get());
+ return;
+ case cs::VideoProperty::kString:
+ entry.SetString(property.GetString());
+ return;
+ default:
+ return;
+ }
+ },
+ NT_NOTIFY_IMMEDIATE | NT_NOTIFY_UPDATE);
+}
+
+CameraServer::CameraServer() : m_impl(new Impl) {}
+
+CameraServer::~CameraServer() {}
+
+cs::UsbCamera CameraServer::StartAutomaticCapture() {
+ cs::UsbCamera camera = StartAutomaticCapture(m_impl->m_defaultUsbDevice++);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportUsbCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::UsbCamera CameraServer::StartAutomaticCapture(int dev) {
+ cs::UsbCamera camera{"USB Camera " + wpi::Twine(dev), dev};
+ StartAutomaticCapture(camera);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportUsbCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::UsbCamera CameraServer::StartAutomaticCapture(const wpi::Twine& name,
+ int dev) {
+ cs::UsbCamera camera{name, dev};
+ StartAutomaticCapture(camera);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportUsbCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::UsbCamera CameraServer::StartAutomaticCapture(const wpi::Twine& name,
+ const wpi::Twine& path) {
+ cs::UsbCamera camera{name, path};
+ StartAutomaticCapture(camera);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportUsbCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(const wpi::Twine& host) {
+ return AddAxisCamera("Axis Camera", host);
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(const char* host) {
+ return AddAxisCamera("Axis Camera", host);
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(const std::string& host) {
+ return AddAxisCamera("Axis Camera", host);
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(wpi::ArrayRef<std::string> hosts) {
+ return AddAxisCamera("Axis Camera", hosts);
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(const wpi::Twine& name,
+ const wpi::Twine& host) {
+ cs::AxisCamera camera{name, host};
+ StartAutomaticCapture(camera);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportAxisCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(const wpi::Twine& name,
+ const char* host) {
+ cs::AxisCamera camera{name, host};
+ StartAutomaticCapture(camera);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportAxisCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(const wpi::Twine& name,
+ const std::string& host) {
+ cs::AxisCamera camera{name, host};
+ StartAutomaticCapture(camera);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportAxisCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(const wpi::Twine& name,
+ wpi::ArrayRef<std::string> hosts) {
+ cs::AxisCamera camera{name, hosts};
+ StartAutomaticCapture(camera);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportAxisCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::MjpegServer CameraServer::AddSwitchedCamera(const wpi::Twine& name) {
+ // create a dummy CvSource
+ cs::CvSource source{name, cs::VideoMode::PixelFormat::kMJPEG, 160, 120, 30};
+ cs::MjpegServer server = StartAutomaticCapture(source);
+ m_impl->m_fixedSources[server.GetHandle()] = source.GetHandle();
+
+ return server;
+}
+
+cs::MjpegServer CameraServer::StartAutomaticCapture(
+ const cs::VideoSource& camera) {
+ AddCamera(camera);
+ auto server = AddServer(wpi::Twine("serve_") + camera.GetName());
+ server.SetSource(camera);
+ return server;
+}
+
+cs::CvSink CameraServer::GetVideo() {
+ cs::VideoSource source;
+ {
+ auto csShared = GetCameraServerShared();
+ std::scoped_lock lock(m_impl->m_mutex);
+ if (m_impl->m_primarySourceName.empty()) {
+ csShared->SetCameraServerError("no camera available");
+ return cs::CvSink{};
+ }
+ auto it = m_impl->m_sources.find(m_impl->m_primarySourceName);
+ if (it == m_impl->m_sources.end()) {
+ csShared->SetCameraServerError("no camera available");
+ return cs::CvSink{};
+ }
+ source = it->second;
+ }
+ return GetVideo(std::move(source));
+}
+
+cs::CvSink CameraServer::GetVideo(const cs::VideoSource& camera) {
+ wpi::SmallString<64> name{"opencv_"};
+ name += camera.GetName();
+
+ {
+ std::scoped_lock lock(m_impl->m_mutex);
+ auto it = m_impl->m_sinks.find(name);
+ if (it != m_impl->m_sinks.end()) {
+ auto kind = it->second.GetKind();
+ if (kind != cs::VideoSink::kCv) {
+ auto csShared = GetCameraServerShared();
+ csShared->SetCameraServerError("expected OpenCV sink, but got " +
+ wpi::Twine(kind));
+ return cs::CvSink{};
+ }
+ return *static_cast<cs::CvSink*>(&it->second);
+ }
+ }
+
+ cs::CvSink newsink{name};
+ newsink.SetSource(camera);
+ AddServer(newsink);
+ return newsink;
+}
+
+cs::CvSink CameraServer::GetVideo(const wpi::Twine& name) {
+ wpi::SmallString<64> nameBuf;
+ wpi::StringRef nameStr = name.toStringRef(nameBuf);
+ cs::VideoSource source;
+ {
+ std::scoped_lock lock(m_impl->m_mutex);
+ auto it = m_impl->m_sources.find(nameStr);
+ if (it == m_impl->m_sources.end()) {
+ auto csShared = GetCameraServerShared();
+ csShared->SetCameraServerError("could not find camera " + nameStr);
+ return cs::CvSink{};
+ }
+ source = it->second;
+ }
+ return GetVideo(source);
+}
+
+cs::CvSource CameraServer::PutVideo(const wpi::Twine& name, int width,
+ int height) {
+ cs::CvSource source{name, cs::VideoMode::kMJPEG, width, height, 30};
+ StartAutomaticCapture(source);
+ return source;
+}
+
+cs::MjpegServer CameraServer::AddServer(const wpi::Twine& name) {
+ int port;
+ {
+ std::scoped_lock lock(m_impl->m_mutex);
+ port = m_impl->m_nextPort++;
+ }
+ return AddServer(name, port);
+}
+
+cs::MjpegServer CameraServer::AddServer(const wpi::Twine& name, int port) {
+ cs::MjpegServer server{name, port};
+ AddServer(server);
+ return server;
+}
+
+void CameraServer::AddServer(const cs::VideoSink& server) {
+ std::scoped_lock lock(m_impl->m_mutex);
+ m_impl->m_sinks.try_emplace(server.GetName(), server);
+}
+
+void CameraServer::RemoveServer(const wpi::Twine& name) {
+ std::scoped_lock lock(m_impl->m_mutex);
+ wpi::SmallString<64> nameBuf;
+ m_impl->m_sinks.erase(name.toStringRef(nameBuf));
+}
+
+cs::VideoSink CameraServer::GetServer() {
+ wpi::SmallString<64> name;
+ {
+ std::scoped_lock lock(m_impl->m_mutex);
+ if (m_impl->m_primarySourceName.empty()) {
+ auto csShared = GetCameraServerShared();
+ csShared->SetCameraServerError("no camera available");
+ return cs::VideoSink{};
+ }
+ name = "serve_";
+ name += m_impl->m_primarySourceName;
+ }
+ return GetServer(name);
+}
+
+cs::VideoSink CameraServer::GetServer(const wpi::Twine& name) {
+ wpi::SmallString<64> nameBuf;
+ wpi::StringRef nameStr = name.toStringRef(nameBuf);
+ std::scoped_lock lock(m_impl->m_mutex);
+ auto it = m_impl->m_sinks.find(nameStr);
+ if (it == m_impl->m_sinks.end()) {
+ auto csShared = GetCameraServerShared();
+ csShared->SetCameraServerError("could not find server " + nameStr);
+ return cs::VideoSink{};
+ }
+ return it->second;
+}
+
+void CameraServer::AddCamera(const cs::VideoSource& camera) {
+ std::string name = camera.GetName();
+ std::scoped_lock lock(m_impl->m_mutex);
+ if (m_impl->m_primarySourceName.empty()) m_impl->m_primarySourceName = name;
+ m_impl->m_sources.try_emplace(name, camera);
+}
+
+void CameraServer::RemoveCamera(const wpi::Twine& name) {
+ std::scoped_lock lock(m_impl->m_mutex);
+ wpi::SmallString<64> nameBuf;
+ m_impl->m_sources.erase(name.toStringRef(nameBuf));
+}
+
+void CameraServer::SetSize(int size) {
+ std::scoped_lock lock(m_impl->m_mutex);
+ if (m_impl->m_primarySourceName.empty()) return;
+ auto it = m_impl->m_sources.find(m_impl->m_primarySourceName);
+ if (it == m_impl->m_sources.end()) return;
+ if (size == kSize160x120)
+ it->second.SetResolution(160, 120);
+ else if (size == kSize320x240)
+ it->second.SetResolution(320, 240);
+ else if (size == kSize640x480)
+ it->second.SetResolution(640, 480);
+}
diff --git a/cameraserver/src/main/native/cpp/cameraserver/CameraServerShared.cpp b/cameraserver/src/main/native/cpp/cameraserver/CameraServerShared.cpp
new file mode 100644
index 0000000..7d30d28
--- /dev/null
+++ b/cameraserver/src/main/native/cpp/cameraserver/CameraServerShared.cpp
@@ -0,0 +1,45 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2018-2019 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#include "cameraserver/CameraServerShared.h"
+
+#include <wpi/mutex.h>
+
+namespace {
+class DefaultCameraServerShared : public frc::CameraServerShared {
+ public:
+ void ReportUsbCamera(int id) override {}
+ void ReportAxisCamera(int id) override {}
+ void ReportVideoServer(int id) override {}
+ void SetCameraServerError(const wpi::Twine& error) override {}
+ void SetVisionRunnerError(const wpi::Twine& error) override {}
+ void ReportDriverStationError(const wpi::Twine& error) override {}
+ std::pair<std::thread::id, bool> GetRobotMainThreadId() const override {
+ return std::make_pair(std::thread::id(), false);
+ }
+};
+} // namespace
+
+static std::unique_ptr<frc::CameraServerShared> cameraServerShared = nullptr;
+static wpi::mutex setLock;
+
+namespace frc {
+CameraServerShared* GetCameraServerShared() {
+ std::unique_lock lock(setLock);
+ if (!cameraServerShared) {
+ cameraServerShared = std::make_unique<DefaultCameraServerShared>();
+ }
+ return cameraServerShared.get();
+}
+} // namespace frc
+
+extern "C" {
+void CameraServer_SetCameraServerShared(frc::CameraServerShared* shared) {
+ std::unique_lock lock(setLock);
+ cameraServerShared.reset(shared);
+}
+} // extern "C"
diff --git a/cameraserver/src/main/native/cpp/vision/VisionRunner.cpp b/cameraserver/src/main/native/cpp/vision/VisionRunner.cpp
new file mode 100644
index 0000000..9896bbd
--- /dev/null
+++ b/cameraserver/src/main/native/cpp/vision/VisionRunner.cpp
@@ -0,0 +1,59 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#include "vision/VisionRunner.h"
+
+#include <thread>
+
+#include <opencv2/core/mat.hpp>
+
+#include "cameraserver/CameraServerShared.h"
+
+using namespace frc;
+
+VisionRunnerBase::VisionRunnerBase(cs::VideoSource videoSource)
+ : m_image(std::make_unique<cv::Mat>()),
+ m_cvSink("VisionRunner CvSink"),
+ m_enabled(true) {
+ m_cvSink.SetSource(videoSource);
+}
+
+// Located here and not in header due to cv::Mat forward declaration.
+VisionRunnerBase::~VisionRunnerBase() {}
+
+void VisionRunnerBase::RunOnce() {
+ auto csShared = frc::GetCameraServerShared();
+ auto res = csShared->GetRobotMainThreadId();
+ if (res.second && (std::this_thread::get_id() == res.first)) {
+ csShared->SetVisionRunnerError(
+ "VisionRunner::RunOnce() cannot be called from the main robot thread");
+ return;
+ }
+ auto frameTime = m_cvSink.GrabFrame(*m_image);
+ if (frameTime == 0) {
+ auto error = m_cvSink.GetError();
+ csShared->ReportDriverStationError(error);
+ } else {
+ DoProcess(*m_image);
+ }
+}
+
+void VisionRunnerBase::RunForever() {
+ auto csShared = frc::GetCameraServerShared();
+ auto res = csShared->GetRobotMainThreadId();
+ if (res.second && (std::this_thread::get_id() == res.first)) {
+ csShared->SetVisionRunnerError(
+ "VisionRunner::RunForever() cannot be called from the main robot "
+ "thread");
+ return;
+ }
+ while (m_enabled) {
+ RunOnce();
+ }
+}
+
+void VisionRunnerBase::Stop() { m_enabled = false; }
diff --git a/cameraserver/src/main/native/include/cameraserver/CameraServer.h b/cameraserver/src/main/native/include/cameraserver/CameraServer.h
new file mode 100644
index 0000000..8f384fd
--- /dev/null
+++ b/cameraserver/src/main/native/include/cameraserver/CameraServer.h
@@ -0,0 +1,298 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2014-2019 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#pragma once
+
+#include <stdint.h>
+
+#include <memory>
+#include <string>
+
+#include <wpi/ArrayRef.h>
+#include <wpi/Twine.h>
+
+#include "cscore.h"
+#include "cscore_cv.h"
+
+namespace frc {
+
+/**
+ * Singleton class for creating and keeping camera servers.
+ *
+ * Also publishes camera information to NetworkTables.
+ */
+class CameraServer {
+ public:
+ static constexpr uint16_t kBasePort = 1181;
+ static constexpr int kSize640x480 = 0;
+ static constexpr int kSize320x240 = 1;
+ static constexpr int kSize160x120 = 2;
+
+ /**
+ * Get the CameraServer instance.
+ */
+ static CameraServer* GetInstance();
+
+ /**
+ * Start automatically capturing images to send to the dashboard.
+ *
+ * You should call this method to see a camera feed on the dashboard. If you
+ * also want to perform vision processing on the roboRIO, use getVideo() to
+ * get access to the camera images.
+ *
+ * The first time this overload is called, it calls StartAutomaticCapture()
+ * with device 0, creating a camera named "USB Camera 0". Subsequent calls
+ * increment the device number (e.g. 1, 2, etc).
+ */
+ cs::UsbCamera StartAutomaticCapture();
+
+ /**
+ * Start automatically capturing images to send to the dashboard.
+ *
+ * This overload calls StartAutomaticCapture() with a name of "USB Camera
+ * {dev}".
+ *
+ * @param dev The device number of the camera interface
+ */
+ cs::UsbCamera StartAutomaticCapture(int dev);
+
+ /**
+ * Start automatically capturing images to send to the dashboard.
+ *
+ * @param name The name to give the camera
+ * @param dev The device number of the camera interface
+ */
+ cs::UsbCamera StartAutomaticCapture(const wpi::Twine& name, int dev);
+
+ /**
+ * Start automatically capturing images to send to the dashboard.
+ *
+ * @param name The name to give the camera
+ * @param path The device path (e.g. "/dev/video0") of the camera
+ */
+ cs::UsbCamera StartAutomaticCapture(const wpi::Twine& name,
+ const wpi::Twine& path);
+
+ /**
+ * Start automatically capturing images to send to the dashboard from
+ * an existing camera.
+ *
+ * @param camera Camera
+ */
+ cs::MjpegServer StartAutomaticCapture(const cs::VideoSource& camera);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * This overload calls AddAxisCamera() with name "Axis Camera".
+ *
+ * @param host Camera host IP or DNS name (e.g. "10.x.y.11")
+ */
+ cs::AxisCamera AddAxisCamera(const wpi::Twine& host);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * This overload calls AddAxisCamera() with name "Axis Camera".
+ *
+ * @param host Camera host IP or DNS name (e.g. "10.x.y.11")
+ */
+ cs::AxisCamera AddAxisCamera(const char* host);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * This overload calls AddAxisCamera() with name "Axis Camera".
+ *
+ * @param host Camera host IP or DNS name (e.g. "10.x.y.11")
+ */
+ cs::AxisCamera AddAxisCamera(const std::string& host);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * This overload calls AddAxisCamera() with name "Axis Camera".
+ *
+ * @param hosts Array of Camera host IPs/DNS names
+ */
+ cs::AxisCamera AddAxisCamera(wpi::ArrayRef<std::string> hosts);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * This overload calls AddAxisCamera() with name "Axis Camera".
+ *
+ * @param hosts Array of Camera host IPs/DNS names
+ */
+ template <typename T>
+ cs::AxisCamera AddAxisCamera(std::initializer_list<T> hosts);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * @param name The name to give the camera
+ * @param host Camera host IP or DNS name (e.g. "10.x.y.11")
+ */
+ cs::AxisCamera AddAxisCamera(const wpi::Twine& name, const wpi::Twine& host);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * @param name The name to give the camera
+ * @param host Camera host IP or DNS name (e.g. "10.x.y.11")
+ */
+ cs::AxisCamera AddAxisCamera(const wpi::Twine& name, const char* host);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * @param name The name to give the camera
+ * @param host Camera host IP or DNS name (e.g. "10.x.y.11")
+ */
+ cs::AxisCamera AddAxisCamera(const wpi::Twine& name, const std::string& host);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * @param name The name to give the camera
+ * @param hosts Array of Camera host IPs/DNS names
+ */
+ cs::AxisCamera AddAxisCamera(const wpi::Twine& name,
+ wpi::ArrayRef<std::string> hosts);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * @param name The name to give the camera
+ * @param hosts Array of Camera host IPs/DNS names
+ */
+ template <typename T>
+ cs::AxisCamera AddAxisCamera(const wpi::Twine& name,
+ std::initializer_list<T> hosts);
+
+ /**
+ * Adds a virtual camera for switching between two streams. Unlike the
+ * other addCamera methods, this returns a VideoSink rather than a
+ * VideoSource. Calling SetSource() on the returned object can be used
+ * to switch the actual source of the stream.
+ */
+ cs::MjpegServer AddSwitchedCamera(const wpi::Twine& name);
+
+ /**
+ * Get OpenCV access to the primary camera feed. This allows you to
+ * get images from the camera for image processing on the roboRIO.
+ *
+ * <p>This is only valid to call after a camera feed has been added
+ * with startAutomaticCapture() or addServer().
+ */
+ cs::CvSink GetVideo();
+
+ /**
+ * Get OpenCV access to the specified camera. This allows you to get
+ * images from the camera for image processing on the roboRIO.
+ *
+ * @param camera Camera (e.g. as returned by startAutomaticCapture).
+ */
+ cs::CvSink GetVideo(const cs::VideoSource& camera);
+
+ /**
+ * Get OpenCV access to the specified camera. This allows you to get
+ * images from the camera for image processing on the roboRIO.
+ *
+ * @param name Camera name
+ */
+ cs::CvSink GetVideo(const wpi::Twine& name);
+
+ /**
+ * Create a MJPEG stream with OpenCV input. This can be called to pass custom
+ * annotated images to the dashboard.
+ *
+ * @param name Name to give the stream
+ * @param width Width of the image being sent
+ * @param height Height of the image being sent
+ */
+ cs::CvSource PutVideo(const wpi::Twine& name, int width, int height);
+
+ /**
+ * Adds a MJPEG server at the next available port.
+ *
+ * @param name Server name
+ */
+ cs::MjpegServer AddServer(const wpi::Twine& name);
+
+ /**
+ * Adds a MJPEG server.
+ *
+ * @param name Server name
+ */
+ cs::MjpegServer AddServer(const wpi::Twine& name, int port);
+
+ /**
+ * Adds an already created server.
+ *
+ * @param server Server
+ */
+ void AddServer(const cs::VideoSink& server);
+
+ /**
+ * Removes a server by name.
+ *
+ * @param name Server name
+ */
+ void RemoveServer(const wpi::Twine& name);
+
+ /**
+ * Get server for the primary camera feed.
+ *
+ * This is only valid to call after a camera feed has been added with
+ * StartAutomaticCapture() or AddServer().
+ */
+ cs::VideoSink GetServer();
+
+ /**
+ * Gets a server by name.
+ *
+ * @param name Server name
+ */
+ cs::VideoSink GetServer(const wpi::Twine& name);
+
+ /**
+ * Adds an already created camera.
+ *
+ * @param camera Camera
+ */
+ void AddCamera(const cs::VideoSource& camera);
+
+ /**
+ * Removes a camera by name.
+ *
+ * @param name Camera name
+ */
+ void RemoveCamera(const wpi::Twine& name);
+
+ /**
+ * Sets the size of the image to use. Use the public kSize constants to set
+ * the correct mode, or set it directly on a camera and call the appropriate
+ * StartAutomaticCapture method.
+ *
+ * @deprecated Use SetResolution on the UsbCamera returned by
+ * StartAutomaticCapture() instead.
+ * @param size The size to use
+ */
+ void SetSize(int size);
+
+ private:
+ CameraServer();
+ ~CameraServer();
+
+ struct Impl;
+ std::unique_ptr<Impl> m_impl;
+};
+
+} // namespace frc
+
+#include "cameraserver/CameraServer.inc"
diff --git a/cameraserver/src/main/native/include/cameraserver/CameraServer.inc b/cameraserver/src/main/native/include/cameraserver/CameraServer.inc
new file mode 100644
index 0000000..5daf29f
--- /dev/null
+++ b/cameraserver/src/main/native/include/cameraserver/CameraServer.inc
@@ -0,0 +1,30 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#pragma once
+
+#include <string>
+#include <vector>
+
+namespace frc {
+
+template <typename T>
+inline cs::AxisCamera CameraServer::AddAxisCamera(
+ std::initializer_list<T> hosts) {
+ return AddAxisCamera("Axis Camera", hosts);
+}
+
+template <typename T>
+inline cs::AxisCamera CameraServer::AddAxisCamera(
+ const wpi::Twine& name, std::initializer_list<T> hosts) {
+ std::vector<std::string> vec;
+ vec.reserve(hosts.size());
+ for (const auto& host : hosts) vec.emplace_back(host);
+ return AddAxisCamera(name, vec);
+}
+
+} // namespace frc
diff --git a/cameraserver/src/main/native/include/cameraserver/CameraServerShared.h b/cameraserver/src/main/native/include/cameraserver/CameraServerShared.h
new file mode 100644
index 0000000..cb72c9b
--- /dev/null
+++ b/cameraserver/src/main/native/include/cameraserver/CameraServerShared.h
@@ -0,0 +1,35 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2018-2019 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#pragma once
+
+#include <memory>
+#include <thread>
+#include <utility>
+
+#include <wpi/Twine.h>
+
+namespace frc {
+class CameraServerShared {
+ public:
+ virtual ~CameraServerShared() = default;
+ virtual void ReportUsbCamera(int id) = 0;
+ virtual void ReportAxisCamera(int id) = 0;
+ virtual void ReportVideoServer(int id) = 0;
+ virtual void SetCameraServerError(const wpi::Twine& error) = 0;
+ virtual void SetVisionRunnerError(const wpi::Twine& error) = 0;
+ virtual void ReportDriverStationError(const wpi::Twine& error) = 0;
+ virtual std::pair<std::thread::id, bool> GetRobotMainThreadId() const = 0;
+};
+
+CameraServerShared* GetCameraServerShared();
+} // namespace frc
+
+extern "C" {
+// Takes ownership
+void CameraServer_SetCameraServerShared(frc::CameraServerShared* shared);
+} // extern "C"
diff --git a/cameraserver/src/main/native/include/vision/VisionPipeline.h b/cameraserver/src/main/native/include/vision/VisionPipeline.h
new file mode 100644
index 0000000..de8e54c
--- /dev/null
+++ b/cameraserver/src/main/native/include/vision/VisionPipeline.h
@@ -0,0 +1,32 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#pragma once
+
+namespace cv {
+class Mat;
+} // namespace cv
+
+namespace frc {
+
+/**
+ * A vision pipeline is responsible for running a group of OpenCV algorithms to
+ * extract data from an image.
+ *
+ * @see VisionRunner
+ */
+class VisionPipeline {
+ public:
+ virtual ~VisionPipeline() = default;
+
+ /**
+ * Processes the image input and sets the result objects. Implementations
+ * should make these objects accessible.
+ */
+ virtual void Process(cv::Mat& mat) = 0;
+};
+} // namespace frc
diff --git a/cameraserver/src/main/native/include/vision/VisionRunner.h b/cameraserver/src/main/native/include/vision/VisionRunner.h
new file mode 100644
index 0000000..610ac4d
--- /dev/null
+++ b/cameraserver/src/main/native/include/vision/VisionRunner.h
@@ -0,0 +1,100 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2019 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#pragma once
+
+#include <atomic>
+#include <functional>
+#include <memory>
+
+#include "cscore.h"
+#include "cscore_cv.h"
+#include "vision/VisionPipeline.h"
+
+namespace frc {
+
+/**
+ * Non-template base class for VisionRunner.
+ */
+class VisionRunnerBase {
+ public:
+ /**
+ * Creates a new vision runner. It will take images from the {@code
+ * videoSource}, and call the virtual DoProcess() method.
+ *
+ * @param videoSource the video source to use to supply images for the
+ * pipeline
+ */
+ explicit VisionRunnerBase(cs::VideoSource videoSource);
+
+ ~VisionRunnerBase();
+
+ VisionRunnerBase(const VisionRunnerBase&) = delete;
+ VisionRunnerBase& operator=(const VisionRunnerBase&) = delete;
+
+ /**
+ * Runs the pipeline one time, giving it the next image from the video source
+ * specified in the constructor. This will block until the source either has
+ * an image or throws an error. If the source successfully supplied a frame,
+ * the pipeline's image input will be set, the pipeline will run, and the
+ * listener specified in the constructor will be called to notify it that the
+ * pipeline ran. This must be run in a dedicated thread, and cannot be used in
+ * the main robot thread because it will freeze the robot program.
+ *
+ * <p>This method is exposed to allow teams to add additional functionality or
+ * have their own ways to run the pipeline. Most teams, however, should just
+ * use {@link #runForever} in its own thread using a std::thread.</p>
+ */
+ void RunOnce();
+
+ /**
+ * A convenience method that calls {@link #runOnce()} in an infinite loop.
+ * This must be run in a dedicated thread, and cannot be used in the main
+ * robot thread because it will freeze the robot program.
+ *
+ * <strong>Do not call this method directly from the main thread.</strong>
+ */
+ void RunForever();
+
+ /**
+ * Stop a RunForever() loop.
+ */
+ void Stop();
+
+ protected:
+ virtual void DoProcess(cv::Mat& image) = 0;
+
+ private:
+ std::unique_ptr<cv::Mat> m_image;
+ cs::CvSink m_cvSink;
+ std::atomic_bool m_enabled;
+};
+
+/**
+ * A vision runner is a convenient wrapper object to make it easy to run vision
+ * pipelines from robot code. The easiest way to use this is to run it in a
+ * std::thread and use the listener to take snapshots of the pipeline's outputs.
+ *
+ * @see VisionPipeline
+ */
+template <typename T>
+class VisionRunner : public VisionRunnerBase {
+ public:
+ VisionRunner(cs::VideoSource videoSource, T* pipeline,
+ std::function<void(T&)> listener);
+ virtual ~VisionRunner() = default;
+
+ protected:
+ void DoProcess(cv::Mat& image) override;
+
+ private:
+ T* m_pipeline;
+ std::function<void(T&)> m_listener;
+};
+} // namespace frc
+
+#include "VisionRunner.inc"
diff --git a/cameraserver/src/main/native/include/vision/VisionRunner.inc b/cameraserver/src/main/native/include/vision/VisionRunner.inc
new file mode 100644
index 0000000..1a38048
--- /dev/null
+++ b/cameraserver/src/main/native/include/vision/VisionRunner.inc
@@ -0,0 +1,35 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#pragma once
+
+namespace frc {
+
+/**
+ * Creates a new vision runner. It will take images from the {@code
+ * videoSource}, send them to the {@code pipeline}, and call the {@code
+ * listener} when the pipeline has finished to alert user code when it is safe
+ * to access the pipeline's outputs.
+ *
+ * @param videoSource The video source to use to supply images for the pipeline
+ * @param pipeline The vision pipeline to run
+ * @param listener A function to call after the pipeline has finished running
+ */
+template <typename T>
+VisionRunner<T>::VisionRunner(cs::VideoSource videoSource, T* pipeline,
+ std::function<void(T&)> listener)
+ : VisionRunnerBase(videoSource),
+ m_pipeline(pipeline),
+ m_listener(listener) {}
+
+template <typename T>
+void VisionRunner<T>::DoProcess(cv::Mat& image) {
+ m_pipeline->Process(image);
+ m_listener(*m_pipeline);
+}
+
+} // namespace frc
diff --git a/cameraserver/src/test/native/cpp/main.cpp b/cameraserver/src/test/native/cpp/main.cpp
new file mode 100644
index 0000000..f07ede3
--- /dev/null
+++ b/cameraserver/src/test/native/cpp/main.cpp
@@ -0,0 +1,8 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+int main() { return 0; }