Squashed 'third_party/allwpilib_2019/' content from commit bd05dfa1c
Change-Id: I2b1c2250cdb9b055133780c33593292098c375b7
git-subtree-dir: third_party/allwpilib_2019
git-subtree-split: bd05dfa1c7cca74c4fac451e7b9d6a37e7b53447
diff --git a/cameraserver/.styleguide b/cameraserver/.styleguide
new file mode 100644
index 0000000..899d9c2
--- /dev/null
+++ b/cameraserver/.styleguide
@@ -0,0 +1,28 @@
+cppHeaderFileInclude {
+ \.h$
+ \.inc$
+}
+
+cppSrcFileInclude {
+ \.cpp$
+}
+
+modifiableFileExclude {
+ \.so$
+}
+
+repoRootNameOverride {
+ cameraserver
+}
+
+includeOtherLibs {
+ ^HAL/
+ ^networktables/
+ ^opencv2/
+ ^support/
+ ^wpi/
+}
+
+includeGuardRoots {
+ cameraserver/src/main/native/include/
+}
diff --git a/cameraserver/CMakeLists.txt b/cameraserver/CMakeLists.txt
new file mode 100644
index 0000000..a3a7460
--- /dev/null
+++ b/cameraserver/CMakeLists.txt
@@ -0,0 +1,57 @@
+project(cameraserver)
+
+find_package( OpenCV REQUIRED )
+
+# Java bindings
+if (NOT WITHOUT_JAVA)
+ find_package(Java REQUIRED)
+ include(UseJava)
+ set(CMAKE_JAVA_COMPILE_FLAGS "-Xlint:unchecked")
+
+ #find java files, copy them locally
+
+ set(OPENCV_JAVA_INSTALL_DIR ${OpenCV_INSTALL_PATH}/share/OpenCV/java/)
+
+ find_file(OPENCV_JAR_FILE NAMES opencv-${OpenCV_VERSION_MAJOR}${OpenCV_VERSION_MINOR}${OpenCV_VERSION_PATCH}.jar PATHS ${OPENCV_JAVA_INSTALL_DIR} ${OpenCV_INSTALL_PATH}/bin NO_DEFAULT_PATH)
+
+ file(GLOB_RECURSE JAVA_SOURCES src/main/java/*.java)
+
+ add_jar(cameraserver_jar ${JAVA_SOURCES} INCLUDE_JARS wpiutil_jar cscore_jar ntcore_jar ${OPENCV_JAR_FILE} OUTPUT_NAME cameraserver)
+
+ get_property(CAMERASERVER_JAR_FILE TARGET cameraserver_jar PROPERTY JAR_FILE)
+ install(FILES ${CAMERASERVER_JAR_FILE} DESTINATION "${java_lib_dest}")
+
+ set_property(TARGET cameraserver_jar PROPERTY FOLDER "java")
+
+endif()
+
+file(GLOB_RECURSE
+ cameraserver_native_src src/main/native/cpp/*.cpp)
+add_library(cameraserver ${cameraserver_native_src})
+set_target_properties(cameraserver PROPERTIES DEBUG_POSTFIX "d")
+target_include_directories(cameraserver PUBLIC
+ $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src/main/native/include>
+ $<INSTALL_INTERFACE:${include_dest}/cameraserver>)
+target_link_libraries(cameraserver PUBLIC ntcore cscore wpiutil ${OpenCV_LIBS})
+
+set_property(TARGET cameraserver PROPERTY FOLDER "libraries")
+
+install(TARGETS cameraserver EXPORT cameraserver DESTINATION "${main_lib_dest}")
+install(DIRECTORY src/main/native/include/ DESTINATION "${include_dest}/cameraserver")
+
+if (NOT WITHOUT_JAVA AND MSVC)
+ install(TARGETS cameraserver RUNTIME DESTINATION "${jni_lib_dest}" COMPONENT Runtime)
+endif()
+
+if (MSVC)
+ set (cameraserver_config_dir ${wpilib_dest})
+else()
+ set (cameraserver_config_dir share/cameraserver)
+endif()
+
+install(FILES cameraserver-config.cmake DESTINATION ${cameraserver_config_dir})
+install(EXPORT cameraserver DESTINATION ${cameraserver_config_dir})
+
+file(GLOB multiCameraServer_src multiCameraServer/src/main/native/cpp/*.cpp)
+add_executable(multiCameraServer ${multiCameraServer_src})
+target_link_libraries(multiCameraServer cameraserver)
diff --git a/cameraserver/build.gradle b/cameraserver/build.gradle
new file mode 100644
index 0000000..2b19454
--- /dev/null
+++ b/cameraserver/build.gradle
@@ -0,0 +1,80 @@
+ext {
+ nativeName = 'cameraserver'
+ devMain = 'edu.wpi.first.cameraserver.DevMain'
+}
+
+evaluationDependsOn(':ntcore')
+evaluationDependsOn(':cscore')
+evaluationDependsOn(':hal')
+
+apply from: "${rootDir}/shared/javacpp/setupBuild.gradle"
+
+dependencies {
+ compile project(':wpiutil')
+ compile project(':ntcore')
+ compile project(':cscore')
+ devCompile project(':wpiutil')
+ devCompile project(':ntcore')
+ devCompile project(':cscore')
+}
+
+ext {
+ sharedCvConfigs = [cameraserver : [],
+ cameraserverBase: [],
+ cameraserverDev : [],
+ cameraserverTest: []]
+ staticCvConfigs = [:]
+ useJava = true
+ useCpp = true
+}
+
+apply from: "${rootDir}/shared/opencv.gradle"
+
+model {
+ // Exports config is a utility to enable exporting all symbols in a C++ library on windows to a DLL.
+ // This removes the need for DllExport on a library. However, the gradle C++ builder has a bug
+ // where some extra symbols are added that cannot be resolved at link time. This configuration
+ // lets you specify specific symbols to exlude from exporting.
+ exportsConfigs {
+ cameraserver(ExportsConfig) {
+ x86ExcludeSymbols = ['_CT??_R0?AV_System_error', '_CT??_R0?AVexception', '_CT??_R0?AVfailure',
+ '_CT??_R0?AVruntime_error', '_CT??_R0?AVsystem_error', '_CTA5?AVfailure',
+ '_TI5?AVfailure', '_CT??_R0?AVout_of_range', '_CTA3?AVout_of_range',
+ '_TI3?AVout_of_range', '_CT??_R0?AVbad_cast']
+ x64ExcludeSymbols = ['_CT??_R0?AV_System_error', '_CT??_R0?AVexception', '_CT??_R0?AVfailure',
+ '_CT??_R0?AVruntime_error', '_CT??_R0?AVsystem_error', '_CTA5?AVfailure',
+ '_TI5?AVfailure', '_CT??_R0?AVout_of_range', '_CTA3?AVout_of_range',
+ '_TI3?AVout_of_range', '_CT??_R0?AVbad_cast']
+ }
+ }
+ components {}
+ binaries {
+ all {
+ if (!it.buildable || !(it instanceof NativeBinarySpec)) {
+ return
+ }
+ lib project: ':ntcore', library: 'ntcore', linkage: 'shared'
+ lib project: ':cscore', library: 'cscore', linkage: 'shared'
+ lib project: ':wpiutil', library: 'wpiutil', linkage: 'shared'
+ }
+ }
+ tasks {
+ def c = $.components
+ def found = false
+ def systemArch = getCurrentArch()
+ c.each {
+ if (it in NativeExecutableSpec && it.name == "${nativeName}Dev") {
+ it.binaries.each {
+ if (!found) {
+ def arch = it.targetPlatform.architecture.name
+ if (arch == systemArch) {
+ def filePath = it.tasks.install.installDirectory.get().toString() + File.separatorChar + 'lib'
+
+ found = true
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/cameraserver/cameraserver-config.cmake b/cameraserver/cameraserver-config.cmake
new file mode 100644
index 0000000..2c2b1cc
--- /dev/null
+++ b/cameraserver/cameraserver-config.cmake
@@ -0,0 +1,8 @@
+include(CMakeFindDependencyMacro)
+find_dependency(wpiutil)
+find_dependency(ntcore)
+find_dependency(cscore)
+find_dependency(OpenCV)
+
+get_filename_component(SELF_DIR "${CMAKE_CURRENT_LIST_FILE}" PATH)
+include(${SELF_DIR}/cameraserver.cmake)
diff --git a/cameraserver/multiCameraServer/build.gradle b/cameraserver/multiCameraServer/build.gradle
new file mode 100644
index 0000000..c49cfb0
--- /dev/null
+++ b/cameraserver/multiCameraServer/build.gradle
@@ -0,0 +1,62 @@
+plugins {
+ id 'java'
+ id 'application'
+ id 'cpp'
+ id 'visual-studio'
+}
+
+apply plugin: 'edu.wpi.first.NativeUtils'
+
+apply from: "${rootDir}/shared/config.gradle"
+
+ext {
+ staticCvConfigs = [multiCameraServerCpp: []]
+ useJava = true
+ useCpp = true
+ skipDev = true
+}
+
+apply from: "${rootDir}/shared/opencv.gradle"
+
+mainClassName = 'Main'
+
+apply plugin: 'com.github.johnrengelman.shadow'
+
+repositories {
+ mavenCentral()
+}
+
+dependencies {
+ compile 'com.google.code.gson:gson:2.8.5'
+
+ compile project(':wpiutil')
+ compile project(':ntcore')
+ compile project(':cscore')
+ compile project(':cameraserver')
+}
+
+model {
+ components {
+ multiCameraServerCpp(NativeExecutableSpec) {
+ targetBuildTypes 'release'
+ sources {
+ cpp {
+ source {
+ srcDirs = ['src/main/native/cpp']
+ includes = ['**/*.cpp']
+ }
+ exportedHeaders {
+ srcDirs = ['src/main/native/include']
+ includes = ['**/*.h']
+ }
+ }
+ }
+ binaries.all { binary ->
+ lib project: ':cameraserver', library: 'cameraserver', linkage: 'static'
+ lib project: ':ntcore', library: 'ntcore', linkage: 'static'
+ lib project: ':cscore', library: 'cscore', linkage: 'static'
+ lib project: ':wpiutil', library: 'wpiutil', linkage: 'static'
+ }
+ }
+ }
+}
diff --git a/cameraserver/multiCameraServer/src/main/java/Main.java b/cameraserver/multiCameraServer/src/main/java/Main.java
new file mode 100644
index 0000000..8bcc54d
--- /dev/null
+++ b/cameraserver/multiCameraServer/src/main/java/Main.java
@@ -0,0 +1,211 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.List;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.JsonArray;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParser;
+
+import edu.wpi.cscore.VideoSource;
+import edu.wpi.first.cameraserver.CameraServer;
+import edu.wpi.first.networktables.NetworkTableInstance;
+
+/*
+ JSON format:
+ {
+ "team": <team number>,
+ "ntmode": <"client" or "server", "client" if unspecified>
+ "cameras": [
+ {
+ "name": <camera name>
+ "path": <path, e.g. "/dev/video0">
+ "pixel format": <"MJPEG", "YUYV", etc> // optional
+ "width": <video mode width> // optional
+ "height": <video mode height> // optional
+ "fps": <video mode fps> // optional
+ "brightness": <percentage brightness> // optional
+ "white balance": <"auto", "hold", value> // optional
+ "exposure": <"auto", "hold", value> // optional
+ "properties": [ // optional
+ {
+ "name": <property name>
+ "value": <property value>
+ }
+ ]
+ }
+ ]
+ }
+ */
+
+public final class Main {
+ private static String configFile = "/boot/frc.json";
+
+ @SuppressWarnings("MemberName")
+ public static class CameraConfig {
+ public String name;
+ public String path;
+ public JsonObject config;
+ }
+
+ public static int team;
+ public static boolean server;
+ public static List<CameraConfig> cameras = new ArrayList<>();
+
+ private Main() {
+ }
+
+ /**
+ * Report parse error.
+ */
+ public static void parseError(String str) {
+ System.err.println("config error in '" + configFile + "': " + str);
+ }
+
+ /**
+ * Read single camera configuration.
+ */
+ public static boolean readCameraConfig(JsonObject config) {
+ CameraConfig cam = new CameraConfig();
+
+ // name
+ JsonElement nameElement = config.get("name");
+ if (nameElement == null) {
+ parseError("could not read camera name");
+ return false;
+ }
+ cam.name = nameElement.getAsString();
+
+ // path
+ JsonElement pathElement = config.get("path");
+ if (pathElement == null) {
+ parseError("camera '" + cam.name + "': could not read path");
+ return false;
+ }
+ cam.path = pathElement.getAsString();
+
+ cam.config = config;
+
+ cameras.add(cam);
+ return true;
+ }
+
+ /**
+ * Read configuration file.
+ */
+ @SuppressWarnings("PMD.CyclomaticComplexity")
+ public static boolean readConfig() {
+ // parse file
+ JsonElement top;
+ try {
+ top = new JsonParser().parse(Files.newBufferedReader(Paths.get(configFile)));
+ } catch (IOException ex) {
+ System.err.println("could not open '" + configFile + "': " + ex);
+ return false;
+ }
+
+ // top level must be an object
+ if (!top.isJsonObject()) {
+ parseError("must be JSON object");
+ return false;
+ }
+ JsonObject obj = top.getAsJsonObject();
+
+ // team number
+ JsonElement teamElement = obj.get("team");
+ if (teamElement == null) {
+ parseError("could not read team number");
+ return false;
+ }
+ team = teamElement.getAsInt();
+
+ // ntmode (optional)
+ if (obj.has("ntmode")) {
+ String str = obj.get("ntmode").getAsString();
+ if ("client".equalsIgnoreCase(str)) {
+ server = false;
+ } else if ("server".equalsIgnoreCase(str)) {
+ server = true;
+ } else {
+ parseError("could not understand ntmode value '" + str + "'");
+ }
+ }
+
+ // cameras
+ JsonElement camerasElement = obj.get("cameras");
+ if (camerasElement == null) {
+ parseError("could not read cameras");
+ return false;
+ }
+ JsonArray cameras = camerasElement.getAsJsonArray();
+ for (JsonElement camera : cameras) {
+ if (!readCameraConfig(camera.getAsJsonObject())) {
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ /**
+ * Start running the camera.
+ */
+ public static void startCamera(CameraConfig config) {
+ System.out.println("Starting camera '" + config.name + "' on " + config.path);
+ VideoSource camera = CameraServer.getInstance().startAutomaticCapture(
+ config.name, config.path);
+
+ Gson gson = new GsonBuilder().create();
+
+ camera.setConfigJson(gson.toJson(config.config));
+ }
+
+ /**
+ * Main.
+ */
+ public static void main(String... args) {
+ if (args.length > 0) {
+ configFile = args[0];
+ }
+
+ // read configuration
+ if (!readConfig()) {
+ return;
+ }
+
+ // start NetworkTables
+ NetworkTableInstance ntinst = NetworkTableInstance.getDefault();
+ if (server) {
+ System.out.println("Setting up NetworkTables server");
+ ntinst.startServer();
+ } else {
+ System.out.println("Setting up NetworkTables client for team " + team);
+ ntinst.startClientTeam(team);
+ }
+
+ // start cameras
+ for (CameraConfig camera : cameras) {
+ startCamera(camera);
+ }
+
+ // loop forever
+ for (;;) {
+ try {
+ Thread.sleep(10000);
+ } catch (InterruptedException ex) {
+ return;
+ }
+ }
+ }
+}
diff --git a/cameraserver/multiCameraServer/src/main/native/cpp/main.cpp b/cameraserver/multiCameraServer/src/main/native/cpp/main.cpp
new file mode 100644
index 0000000..dbfc7b6
--- /dev/null
+++ b/cameraserver/multiCameraServer/src/main/native/cpp/main.cpp
@@ -0,0 +1,190 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#include <cstdio>
+#include <string>
+#include <vector>
+
+#include <networktables/NetworkTableInstance.h>
+#include <wpi/StringRef.h>
+#include <wpi/json.h>
+#include <wpi/raw_istream.h>
+#include <wpi/raw_ostream.h>
+
+#include "cameraserver/CameraServer.h"
+
+/*
+ JSON format:
+ {
+ "team": <team number>,
+ "ntmode": <"client" or "server", "client" if unspecified>
+ "cameras": [
+ {
+ "name": <camera name>
+ "path": <path, e.g. "/dev/video0">
+ "pixel format": <"MJPEG", "YUYV", etc> // optional
+ "width": <video mode width> // optional
+ "height": <video mode height> // optional
+ "fps": <video mode fps> // optional
+ "brightness": <percentage brightness> // optional
+ "white balance": <"auto", "hold", value> // optional
+ "exposure": <"auto", "hold", value> // optional
+ "properties": [ // optional
+ {
+ "name": <property name>
+ "value": <property value>
+ }
+ ]
+ }
+ ]
+ }
+ */
+
+#ifdef __RASPBIAN__
+static const char* configFile = "/boot/frc.json";
+#else
+static const char* configFile = "frc.json";
+#endif
+
+namespace {
+
+unsigned int team;
+bool server = false;
+
+struct CameraConfig {
+ std::string name;
+ std::string path;
+ wpi::json config;
+};
+
+std::vector<CameraConfig> cameras;
+
+wpi::raw_ostream& ParseError() {
+ return wpi::errs() << "config error in '" << configFile << "': ";
+}
+
+bool ReadCameraConfig(const wpi::json& config) {
+ CameraConfig c;
+
+ // name
+ try {
+ c.name = config.at("name").get<std::string>();
+ } catch (const wpi::json::exception& e) {
+ ParseError() << "could not read camera name: " << e.what() << '\n';
+ return false;
+ }
+
+ // path
+ try {
+ c.path = config.at("path").get<std::string>();
+ } catch (const wpi::json::exception& e) {
+ ParseError() << "camera '" << c.name
+ << "': could not read path: " << e.what() << '\n';
+ return false;
+ }
+
+ c.config = config;
+
+ cameras.emplace_back(std::move(c));
+ return true;
+}
+
+bool ReadConfig() {
+ // open config file
+ std::error_code ec;
+ wpi::raw_fd_istream is(configFile, ec);
+ if (ec) {
+ wpi::errs() << "could not open '" << configFile << "': " << ec.message()
+ << '\n';
+ return false;
+ }
+
+ // parse file
+ wpi::json j;
+ try {
+ j = wpi::json::parse(is);
+ } catch (const wpi::json::parse_error& e) {
+ ParseError() << "byte " << e.byte << ": " << e.what() << '\n';
+ return false;
+ }
+
+ // top level must be an object
+ if (!j.is_object()) {
+ ParseError() << "must be JSON object\n";
+ return false;
+ }
+
+ // team number
+ try {
+ team = j.at("team").get<unsigned int>();
+ } catch (const wpi::json::exception& e) {
+ ParseError() << "could not read team number: " << e.what() << '\n';
+ return false;
+ }
+
+ // ntmode (optional)
+ if (j.count("ntmode") != 0) {
+ try {
+ auto str = j.at("ntmode").get<std::string>();
+ wpi::StringRef s(str);
+ if (s.equals_lower("client")) {
+ server = false;
+ } else if (s.equals_lower("server")) {
+ server = true;
+ } else {
+ ParseError() << "could not understand ntmode value '" << str << "'\n";
+ }
+ } catch (const wpi::json::exception& e) {
+ ParseError() << "could not read ntmode: " << e.what() << '\n';
+ }
+ }
+
+ // cameras
+ try {
+ for (auto&& camera : j.at("cameras")) {
+ if (!ReadCameraConfig(camera)) return false;
+ }
+ } catch (const wpi::json::exception& e) {
+ ParseError() << "could not read cameras: " << e.what() << '\n';
+ return false;
+ }
+
+ return true;
+}
+
+void StartCamera(const CameraConfig& config) {
+ wpi::outs() << "Starting camera '" << config.name << "' on " << config.path
+ << '\n';
+ auto camera = frc::CameraServer::GetInstance()->StartAutomaticCapture(
+ config.name, config.path);
+
+ camera.SetConfigJson(config.config);
+}
+} // namespace
+
+int main(int argc, char* argv[]) {
+ if (argc >= 2) configFile = argv[1];
+
+ // read configuration
+ if (!ReadConfig()) return EXIT_FAILURE;
+
+ // start NetworkTables
+ auto ntinst = nt::NetworkTableInstance::GetDefault();
+ if (server) {
+ wpi::outs() << "Setting up NetworkTables server\n";
+ ntinst.StartServer();
+ } else {
+ wpi::outs() << "Setting up NetworkTables client for team " << team << '\n';
+ ntinst.StartClientTeam(team);
+ }
+
+ // start cameras
+ for (auto&& camera : cameras) StartCamera(camera);
+
+ // loop forever
+ for (;;) std::this_thread::sleep_for(std::chrono::seconds(10));
+}
diff --git a/cameraserver/src/dev/java/edu/wpi/first/cameraserver/DevMain.java b/cameraserver/src/dev/java/edu/wpi/first/cameraserver/DevMain.java
new file mode 100644
index 0000000..1182ac4
--- /dev/null
+++ b/cameraserver/src/dev/java/edu/wpi/first/cameraserver/DevMain.java
@@ -0,0 +1,17 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+package edu.wpi.first.cameraserver;
+
+public final class DevMain {
+ public static void main(String[] args) {
+
+ }
+
+ private DevMain() {
+ }
+}
diff --git a/cameraserver/src/dev/native/cpp/main.cpp b/cameraserver/src/dev/native/cpp/main.cpp
new file mode 100644
index 0000000..e324b44
--- /dev/null
+++ b/cameraserver/src/dev/native/cpp/main.cpp
@@ -0,0 +1,8 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+int main() {}
diff --git a/cameraserver/src/main/java/edu/wpi/first/cameraserver/CameraServer.java b/cameraserver/src/main/java/edu/wpi/first/cameraserver/CameraServer.java
new file mode 100644
index 0000000..58da593
--- /dev/null
+++ b/cameraserver/src/main/java/edu/wpi/first/cameraserver/CameraServer.java
@@ -0,0 +1,775 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+package edu.wpi.first.cameraserver;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Hashtable;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import edu.wpi.cscore.AxisCamera;
+import edu.wpi.cscore.CameraServerJNI;
+import edu.wpi.cscore.CvSink;
+import edu.wpi.cscore.CvSource;
+import edu.wpi.cscore.MjpegServer;
+import edu.wpi.cscore.UsbCamera;
+import edu.wpi.cscore.VideoEvent;
+import edu.wpi.cscore.VideoException;
+import edu.wpi.cscore.VideoListener;
+import edu.wpi.cscore.VideoMode;
+import edu.wpi.cscore.VideoMode.PixelFormat;
+import edu.wpi.cscore.VideoProperty;
+import edu.wpi.cscore.VideoSink;
+import edu.wpi.cscore.VideoSource;
+import edu.wpi.first.networktables.EntryListenerFlags;
+import edu.wpi.first.networktables.NetworkTable;
+import edu.wpi.first.networktables.NetworkTableEntry;
+import edu.wpi.first.networktables.NetworkTableInstance;
+
+/**
+ * Singleton class for creating and keeping camera servers.
+ * Also publishes camera information to NetworkTables.
+ */
+@SuppressWarnings("PMD.TooManyMethods")
+public final class CameraServer {
+ public static final int kBasePort = 1181;
+
+ @Deprecated
+ public static final int kSize640x480 = 0;
+ @Deprecated
+ public static final int kSize320x240 = 1;
+ @Deprecated
+ public static final int kSize160x120 = 2;
+
+ private static final String kPublishName = "/CameraPublisher";
+ private static CameraServer server;
+
+ /**
+ * Get the CameraServer instance.
+ */
+ public static synchronized CameraServer getInstance() {
+ if (server == null) {
+ server = new CameraServer();
+ }
+ return server;
+ }
+
+ private final AtomicInteger m_defaultUsbDevice;
+ private String m_primarySourceName;
+ private final Map<String, VideoSource> m_sources;
+ private final Map<String, VideoSink> m_sinks;
+ private final Map<Integer, NetworkTable> m_tables; // indexed by source handle
+ private final NetworkTable m_publishTable;
+ private final VideoListener m_videoListener; //NOPMD
+ private final int m_tableListener; //NOPMD
+ private int m_nextPort;
+ private String[] m_addresses;
+
+ @SuppressWarnings("JavadocMethod")
+ private static String makeSourceValue(int source) {
+ switch (VideoSource.getKindFromInt(CameraServerJNI.getSourceKind(source))) {
+ case kUsb:
+ return "usb:" + CameraServerJNI.getUsbCameraPath(source);
+ case kHttp: {
+ String[] urls = CameraServerJNI.getHttpCameraUrls(source);
+ if (urls.length > 0) {
+ return "ip:" + urls[0];
+ } else {
+ return "ip:";
+ }
+ }
+ case kCv:
+ return "cv:";
+ default:
+ return "unknown:";
+ }
+ }
+
+ @SuppressWarnings("JavadocMethod")
+ private static String makeStreamValue(String address, int port) {
+ return "mjpg:http://" + address + ":" + port + "/?action=stream";
+ }
+
+ @SuppressWarnings({"JavadocMethod", "PMD.AvoidUsingHardCodedIP"})
+ private synchronized String[] getSinkStreamValues(int sink) {
+ // Ignore all but MjpegServer
+ if (VideoSink.getKindFromInt(CameraServerJNI.getSinkKind(sink)) != VideoSink.Kind.kMjpeg) {
+ return new String[0];
+ }
+
+ // Get port
+ int port = CameraServerJNI.getMjpegServerPort(sink);
+
+ // Generate values
+ ArrayList<String> values = new ArrayList<>(m_addresses.length + 1);
+ String listenAddress = CameraServerJNI.getMjpegServerListenAddress(sink);
+ if (!listenAddress.isEmpty()) {
+ // If a listen address is specified, only use that
+ values.add(makeStreamValue(listenAddress, port));
+ } else {
+ // Otherwise generate for hostname and all interface addresses
+ values.add(makeStreamValue(CameraServerJNI.getHostname() + ".local", port));
+ for (String addr : m_addresses) {
+ if ("127.0.0.1".equals(addr)) {
+ continue; // ignore localhost
+ }
+ values.add(makeStreamValue(addr, port));
+ }
+ }
+
+ return values.toArray(new String[0]);
+ }
+
+ @SuppressWarnings({"JavadocMethod", "PMD.AvoidUsingHardCodedIP"})
+ private synchronized String[] getSourceStreamValues(int source) {
+ // Ignore all but HttpCamera
+ if (VideoSource.getKindFromInt(CameraServerJNI.getSourceKind(source))
+ != VideoSource.Kind.kHttp) {
+ return new String[0];
+ }
+
+ // Generate values
+ String[] values = CameraServerJNI.getHttpCameraUrls(source);
+ for (int j = 0; j < values.length; j++) {
+ values[j] = "mjpg:" + values[j];
+ }
+
+ // Look to see if we have a passthrough server for this source
+ for (VideoSink i : m_sinks.values()) {
+ int sink = i.getHandle();
+ int sinkSource = CameraServerJNI.getSinkSource(sink);
+ if (source == sinkSource
+ && VideoSink.getKindFromInt(CameraServerJNI.getSinkKind(sink)) == VideoSink.Kind.kMjpeg) {
+ // Add USB-only passthrough
+ String[] finalValues = Arrays.copyOf(values, values.length + 1);
+ int port = CameraServerJNI.getMjpegServerPort(sink);
+ finalValues[values.length] = makeStreamValue("172.22.11.2", port);
+ return finalValues;
+ }
+ }
+
+ return values;
+ }
+
+ @SuppressWarnings({"JavadocMethod", "PMD.AvoidUsingHardCodedIP"})
+ private synchronized void updateStreamValues() {
+ // Over all the sinks...
+ for (VideoSink i : m_sinks.values()) {
+ int sink = i.getHandle();
+
+ // Get the source's subtable (if none exists, we're done)
+ int source = CameraServerJNI.getSinkSource(sink);
+ if (source == 0) {
+ continue;
+ }
+ NetworkTable table = m_tables.get(source);
+ if (table != null) {
+ // Don't set stream values if this is a HttpCamera passthrough
+ if (VideoSource.getKindFromInt(CameraServerJNI.getSourceKind(source))
+ == VideoSource.Kind.kHttp) {
+ continue;
+ }
+
+ // Set table value
+ String[] values = getSinkStreamValues(sink);
+ if (values.length > 0) {
+ table.getEntry("streams").setStringArray(values);
+ }
+ }
+ }
+
+ // Over all the sources...
+ for (VideoSource i : m_sources.values()) {
+ int source = i.getHandle();
+
+ // Get the source's subtable (if none exists, we're done)
+ NetworkTable table = m_tables.get(source);
+ if (table != null) {
+ // Set table value
+ String[] values = getSourceStreamValues(source);
+ if (values.length > 0) {
+ table.getEntry("streams").setStringArray(values);
+ }
+ }
+ }
+ }
+
+ @SuppressWarnings("JavadocMethod")
+ private static String pixelFormatToString(PixelFormat pixelFormat) {
+ switch (pixelFormat) {
+ case kMJPEG:
+ return "MJPEG";
+ case kYUYV:
+ return "YUYV";
+ case kRGB565:
+ return "RGB565";
+ case kBGR:
+ return "BGR";
+ case kGray:
+ return "Gray";
+ default:
+ return "Unknown";
+ }
+ }
+
+ /// Provide string description of video mode.
+ /// The returned string is "{width}x{height} {format} {fps} fps".
+ @SuppressWarnings("JavadocMethod")
+ private static String videoModeToString(VideoMode mode) {
+ return mode.width + "x" + mode.height + " " + pixelFormatToString(mode.pixelFormat)
+ + " " + mode.fps + " fps";
+ }
+
+ @SuppressWarnings("JavadocMethod")
+ private static String[] getSourceModeValues(int sourceHandle) {
+ VideoMode[] modes = CameraServerJNI.enumerateSourceVideoModes(sourceHandle);
+ String[] modeStrings = new String[modes.length];
+ for (int i = 0; i < modes.length; i++) {
+ modeStrings[i] = videoModeToString(modes[i]);
+ }
+ return modeStrings;
+ }
+
+ @SuppressWarnings({"JavadocMethod", "PMD.CyclomaticComplexity"})
+ private static void putSourcePropertyValue(NetworkTable table, VideoEvent event, boolean isNew) {
+ String name;
+ String infoName;
+ if (event.name.startsWith("raw_")) {
+ name = "RawProperty/" + event.name;
+ infoName = "RawPropertyInfo/" + event.name;
+ } else {
+ name = "Property/" + event.name;
+ infoName = "PropertyInfo/" + event.name;
+ }
+
+ NetworkTableEntry entry = table.getEntry(name);
+ try {
+ switch (event.propertyKind) {
+ case kBoolean:
+ if (isNew) {
+ entry.setDefaultBoolean(event.value != 0);
+ } else {
+ entry.setBoolean(event.value != 0);
+ }
+ break;
+ case kInteger:
+ case kEnum:
+ if (isNew) {
+ entry.setDefaultDouble(event.value);
+ table.getEntry(infoName + "/min").setDouble(
+ CameraServerJNI.getPropertyMin(event.propertyHandle));
+ table.getEntry(infoName + "/max").setDouble(
+ CameraServerJNI.getPropertyMax(event.propertyHandle));
+ table.getEntry(infoName + "/step").setDouble(
+ CameraServerJNI.getPropertyStep(event.propertyHandle));
+ table.getEntry(infoName + "/default").setDouble(
+ CameraServerJNI.getPropertyDefault(event.propertyHandle));
+ } else {
+ entry.setDouble(event.value);
+ }
+ break;
+ case kString:
+ if (isNew) {
+ entry.setDefaultString(event.valueStr);
+ } else {
+ entry.setString(event.valueStr);
+ }
+ break;
+ default:
+ break;
+ }
+ } catch (VideoException ignored) {
+ // ignore
+ }
+ }
+
+ @SuppressWarnings({"JavadocMethod", "PMD.UnusedLocalVariable", "PMD.ExcessiveMethodLength",
+ "PMD.NPathComplexity"})
+ private CameraServer() {
+ m_defaultUsbDevice = new AtomicInteger();
+ m_sources = new Hashtable<>();
+ m_sinks = new Hashtable<>();
+ m_tables = new Hashtable<>();
+ m_publishTable = NetworkTableInstance.getDefault().getTable(kPublishName);
+ m_nextPort = kBasePort;
+ m_addresses = new String[0];
+
+ // We publish sources to NetworkTables using the following structure:
+ // "/CameraPublisher/{Source.Name}/" - root
+ // - "source" (string): Descriptive, prefixed with type (e.g. "usb:0")
+ // - "streams" (string array): URLs that can be used to stream data
+ // - "description" (string): Description of the source
+ // - "connected" (boolean): Whether source is connected
+ // - "mode" (string): Current video mode
+ // - "modes" (string array): Available video modes
+ // - "Property/{Property}" - Property values
+ // - "PropertyInfo/{Property}" - Property supporting information
+
+ // Listener for video events
+ m_videoListener = new VideoListener(event -> {
+ switch (event.kind) {
+ case kSourceCreated: {
+ // Create subtable for the camera
+ NetworkTable table = m_publishTable.getSubTable(event.name);
+ m_tables.put(event.sourceHandle, table);
+ table.getEntry("source").setString(makeSourceValue(event.sourceHandle));
+ table.getEntry("description").setString(
+ CameraServerJNI.getSourceDescription(event.sourceHandle));
+ table.getEntry("connected").setBoolean(
+ CameraServerJNI.isSourceConnected(event.sourceHandle));
+ table.getEntry("streams").setStringArray(getSourceStreamValues(event.sourceHandle));
+ try {
+ VideoMode mode = CameraServerJNI.getSourceVideoMode(event.sourceHandle);
+ table.getEntry("mode").setDefaultString(videoModeToString(mode));
+ table.getEntry("modes").setStringArray(getSourceModeValues(event.sourceHandle));
+ } catch (VideoException ignored) {
+ // Do nothing. Let the other event handlers update this if there is an error.
+ }
+ break;
+ }
+ case kSourceDestroyed: {
+ NetworkTable table = m_tables.get(event.sourceHandle);
+ if (table != null) {
+ table.getEntry("source").setString("");
+ table.getEntry("streams").setStringArray(new String[0]);
+ table.getEntry("modes").setStringArray(new String[0]);
+ }
+ break;
+ }
+ case kSourceConnected: {
+ NetworkTable table = m_tables.get(event.sourceHandle);
+ if (table != null) {
+ // update the description too (as it may have changed)
+ table.getEntry("description").setString(
+ CameraServerJNI.getSourceDescription(event.sourceHandle));
+ table.getEntry("connected").setBoolean(true);
+ }
+ break;
+ }
+ case kSourceDisconnected: {
+ NetworkTable table = m_tables.get(event.sourceHandle);
+ if (table != null) {
+ table.getEntry("connected").setBoolean(false);
+ }
+ break;
+ }
+ case kSourceVideoModesUpdated: {
+ NetworkTable table = m_tables.get(event.sourceHandle);
+ if (table != null) {
+ table.getEntry("modes").setStringArray(getSourceModeValues(event.sourceHandle));
+ }
+ break;
+ }
+ case kSourceVideoModeChanged: {
+ NetworkTable table = m_tables.get(event.sourceHandle);
+ if (table != null) {
+ table.getEntry("mode").setString(videoModeToString(event.mode));
+ }
+ break;
+ }
+ case kSourcePropertyCreated: {
+ NetworkTable table = m_tables.get(event.sourceHandle);
+ if (table != null) {
+ putSourcePropertyValue(table, event, true);
+ }
+ break;
+ }
+ case kSourcePropertyValueUpdated: {
+ NetworkTable table = m_tables.get(event.sourceHandle);
+ if (table != null) {
+ putSourcePropertyValue(table, event, false);
+ }
+ break;
+ }
+ case kSourcePropertyChoicesUpdated: {
+ NetworkTable table = m_tables.get(event.sourceHandle);
+ if (table != null) {
+ try {
+ String[] choices = CameraServerJNI.getEnumPropertyChoices(event.propertyHandle);
+ table.getEntry("PropertyInfo/" + event.name + "/choices").setStringArray(choices);
+ } catch (VideoException ignored) {
+ // ignore
+ }
+ }
+ break;
+ }
+ case kSinkSourceChanged:
+ case kSinkCreated:
+ case kSinkDestroyed:
+ case kNetworkInterfacesChanged: {
+ m_addresses = CameraServerJNI.getNetworkInterfaces();
+ updateStreamValues();
+ break;
+ }
+ default:
+ break;
+ }
+ }, 0x4fff, true);
+
+ // Listener for NetworkTable events
+ // We don't currently support changing settings via NT due to
+ // synchronization issues, so just update to current setting if someone
+ // else tries to change it.
+ m_tableListener = NetworkTableInstance.getDefault().addEntryListener(kPublishName + "/",
+ event -> {
+ String relativeKey = event.name.substring(kPublishName.length() + 1);
+
+ // get source (sourceName/...)
+ int subKeyIndex = relativeKey.indexOf('/');
+ if (subKeyIndex == -1) {
+ return;
+ }
+ String sourceName = relativeKey.substring(0, subKeyIndex);
+ VideoSource source = m_sources.get(sourceName);
+ if (source == null) {
+ return;
+ }
+
+ // get subkey
+ relativeKey = relativeKey.substring(subKeyIndex + 1);
+
+ // handle standard names
+ String propName;
+ if ("mode".equals(relativeKey)) {
+ // reset to current mode
+ event.getEntry().setString(videoModeToString(source.getVideoMode()));
+ return;
+ } else if (relativeKey.startsWith("Property/")) {
+ propName = relativeKey.substring(9);
+ } else if (relativeKey.startsWith("RawProperty/")) {
+ propName = relativeKey.substring(12);
+ } else {
+ return; // ignore
+ }
+
+ // everything else is a property
+ VideoProperty property = source.getProperty(propName);
+ switch (property.getKind()) {
+ case kNone:
+ return;
+ case kBoolean:
+ // reset to current setting
+ event.getEntry().setBoolean(property.get() != 0);
+ return;
+ case kInteger:
+ case kEnum:
+ // reset to current setting
+ event.getEntry().setDouble(property.get());
+ return;
+ case kString:
+ // reset to current setting
+ event.getEntry().setString(property.getString());
+ return;
+ default:
+ return;
+ }
+ }, EntryListenerFlags.kImmediate | EntryListenerFlags.kUpdate);
+ }
+
+ /**
+ * Start automatically capturing images to send to the dashboard.
+ *
+ * <p>You should call this method to see a camera feed on the dashboard.
+ * If you also want to perform vision processing on the roboRIO, use
+ * getVideo() to get access to the camera images.
+ *
+ * <p>The first time this overload is called, it calls
+ * {@link #startAutomaticCapture(int)} with device 0, creating a camera
+ * named "USB Camera 0". Subsequent calls increment the device number
+ * (e.g. 1, 2, etc).
+ */
+ public UsbCamera startAutomaticCapture() {
+ UsbCamera camera = startAutomaticCapture(m_defaultUsbDevice.getAndIncrement());
+ CameraServerSharedStore.getCameraServerShared().reportUsbCamera(camera.getHandle());
+ return camera;
+ }
+
+ /**
+ * Start automatically capturing images to send to the dashboard.
+ *
+ * <p>This overload calls {@link #startAutomaticCapture(String, int)} with
+ * a name of "USB Camera {dev}".
+ *
+ * @param dev The device number of the camera interface
+ */
+ public UsbCamera startAutomaticCapture(int dev) {
+ UsbCamera camera = new UsbCamera("USB Camera " + dev, dev);
+ startAutomaticCapture(camera);
+ CameraServerSharedStore.getCameraServerShared().reportUsbCamera(camera.getHandle());
+ return camera;
+ }
+
+ /**
+ * Start automatically capturing images to send to the dashboard.
+ *
+ * @param name The name to give the camera
+ * @param dev The device number of the camera interface
+ */
+ public UsbCamera startAutomaticCapture(String name, int dev) {
+ UsbCamera camera = new UsbCamera(name, dev);
+ startAutomaticCapture(camera);
+ CameraServerSharedStore.getCameraServerShared().reportUsbCamera(camera.getHandle());
+ return camera;
+ }
+
+ /**
+ * Start automatically capturing images to send to the dashboard.
+ *
+ * @param name The name to give the camera
+ * @param path The device path (e.g. "/dev/video0") of the camera
+ */
+ public UsbCamera startAutomaticCapture(String name, String path) {
+ UsbCamera camera = new UsbCamera(name, path);
+ startAutomaticCapture(camera);
+ CameraServerSharedStore.getCameraServerShared().reportUsbCamera(camera.getHandle());
+ return camera;
+ }
+
+ /**
+ * Start automatically capturing images to send to the dashboard from
+ * an existing camera.
+ *
+ * @param camera Camera
+ */
+ public MjpegServer startAutomaticCapture(VideoSource camera) {
+ addCamera(camera);
+ MjpegServer server = addServer("serve_" + camera.getName());
+ server.setSource(camera);
+ return server;
+ }
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * <p>This overload calls {@link #addAxisCamera(String, String)} with
+ * name "Axis Camera".
+ *
+ * @param host Camera host IP or DNS name (e.g. "10.x.y.11")
+ */
+ public AxisCamera addAxisCamera(String host) {
+ return addAxisCamera("Axis Camera", host);
+ }
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * <p>This overload calls {@link #addAxisCamera(String, String[])} with
+ * name "Axis Camera".
+ *
+ * @param hosts Array of Camera host IPs/DNS names
+ */
+ public AxisCamera addAxisCamera(String[] hosts) {
+ return addAxisCamera("Axis Camera", hosts);
+ }
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * @param name The name to give the camera
+ * @param host Camera host IP or DNS name (e.g. "10.x.y.11")
+ */
+ public AxisCamera addAxisCamera(String name, String host) {
+ AxisCamera camera = new AxisCamera(name, host);
+ // Create a passthrough MJPEG server for USB access
+ startAutomaticCapture(camera);
+ CameraServerSharedStore.getCameraServerShared().reportAxisCamera(camera.getHandle());
+ return camera;
+ }
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * @param name The name to give the camera
+ * @param hosts Array of Camera host IPs/DNS names
+ */
+ public AxisCamera addAxisCamera(String name, String[] hosts) {
+ AxisCamera camera = new AxisCamera(name, hosts);
+ // Create a passthrough MJPEG server for USB access
+ startAutomaticCapture(camera);
+ CameraServerSharedStore.getCameraServerShared().reportAxisCamera(camera.getHandle());
+ return camera;
+ }
+
+ /**
+ * Get OpenCV access to the primary camera feed. This allows you to
+ * get images from the camera for image processing on the roboRIO.
+ *
+ * <p>This is only valid to call after a camera feed has been added
+ * with startAutomaticCapture() or addServer().
+ */
+ public CvSink getVideo() {
+ VideoSource source;
+ synchronized (this) {
+ if (m_primarySourceName == null) {
+ throw new VideoException("no camera available");
+ }
+ source = m_sources.get(m_primarySourceName);
+ }
+ if (source == null) {
+ throw new VideoException("no camera available");
+ }
+ return getVideo(source);
+ }
+
+ /**
+ * Get OpenCV access to the specified camera. This allows you to get
+ * images from the camera for image processing on the roboRIO.
+ *
+ * @param camera Camera (e.g. as returned by startAutomaticCapture).
+ */
+ public CvSink getVideo(VideoSource camera) {
+ String name = "opencv_" + camera.getName();
+
+ synchronized (this) {
+ VideoSink sink = m_sinks.get(name);
+ if (sink != null) {
+ VideoSink.Kind kind = sink.getKind();
+ if (kind != VideoSink.Kind.kCv) {
+ throw new VideoException("expected OpenCV sink, but got " + kind);
+ }
+ return (CvSink) sink;
+ }
+ }
+
+ CvSink newsink = new CvSink(name);
+ newsink.setSource(camera);
+ addServer(newsink);
+ return newsink;
+ }
+
+ /**
+ * Get OpenCV access to the specified camera. This allows you to get
+ * images from the camera for image processing on the roboRIO.
+ *
+ * @param name Camera name
+ */
+ public CvSink getVideo(String name) {
+ VideoSource source;
+ synchronized (this) {
+ source = m_sources.get(name);
+ if (source == null) {
+ throw new VideoException("could not find camera " + name);
+ }
+ }
+ return getVideo(source);
+ }
+
+ /**
+ * Create a MJPEG stream with OpenCV input. This can be called to pass custom
+ * annotated images to the dashboard.
+ *
+ * @param name Name to give the stream
+ * @param width Width of the image being sent
+ * @param height Height of the image being sent
+ */
+ public CvSource putVideo(String name, int width, int height) {
+ CvSource source = new CvSource(name, VideoMode.PixelFormat.kMJPEG, width, height, 30);
+ startAutomaticCapture(source);
+ return source;
+ }
+
+ /**
+ * Adds a MJPEG server at the next available port.
+ *
+ * @param name Server name
+ */
+ public MjpegServer addServer(String name) {
+ int port;
+ synchronized (this) {
+ port = m_nextPort;
+ m_nextPort++;
+ }
+ return addServer(name, port);
+ }
+
+ /**
+ * Adds a MJPEG server.
+ *
+ * @param name Server name
+ */
+ public MjpegServer addServer(String name, int port) {
+ MjpegServer server = new MjpegServer(name, port);
+ addServer(server);
+ return server;
+ }
+
+ /**
+ * Adds an already created server.
+ *
+ * @param server Server
+ */
+ public void addServer(VideoSink server) {
+ synchronized (this) {
+ m_sinks.put(server.getName(), server);
+ }
+ }
+
+ /**
+ * Removes a server by name.
+ *
+ * @param name Server name
+ */
+ public void removeServer(String name) {
+ synchronized (this) {
+ m_sinks.remove(name);
+ }
+ }
+
+ /**
+ * Get server for the primary camera feed.
+ *
+ * <p>This is only valid to call after a camera feed has been added
+ * with startAutomaticCapture() or addServer().
+ */
+ public VideoSink getServer() {
+ synchronized (this) {
+ if (m_primarySourceName == null) {
+ throw new VideoException("no camera available");
+ }
+ return getServer("serve_" + m_primarySourceName);
+ }
+ }
+
+ /**
+ * Gets a server by name.
+ *
+ * @param name Server name
+ */
+ public VideoSink getServer(String name) {
+ synchronized (this) {
+ return m_sinks.get(name);
+ }
+ }
+
+ /**
+ * Adds an already created camera.
+ *
+ * @param camera Camera
+ */
+ public void addCamera(VideoSource camera) {
+ String name = camera.getName();
+ synchronized (this) {
+ if (m_primarySourceName == null) {
+ m_primarySourceName = name;
+ }
+ m_sources.put(name, camera);
+ }
+ }
+
+ /**
+ * Removes a camera by name.
+ *
+ * @param name Camera name
+ */
+ public void removeCamera(String name) {
+ synchronized (this) {
+ m_sources.remove(name);
+ }
+ }
+}
diff --git a/cameraserver/src/main/java/edu/wpi/first/cameraserver/CameraServerShared.java b/cameraserver/src/main/java/edu/wpi/first/cameraserver/CameraServerShared.java
new file mode 100644
index 0000000..32a4d01
--- /dev/null
+++ b/cameraserver/src/main/java/edu/wpi/first/cameraserver/CameraServerShared.java
@@ -0,0 +1,46 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+package edu.wpi.first.cameraserver;
+
+
+public interface CameraServerShared {
+ /**
+ * get the main thread id func.
+ *
+ * @return the robotMainThreadId
+ */
+ Long getRobotMainThreadId();
+
+ /**
+ * Report an error to the driver station.
+ *
+ * @param error the error to set
+ */
+ void reportDriverStationError(String error);
+
+ /**
+ * Report an video server usage.
+ *
+ * @param id the usage id
+ */
+ void reportVideoServer(int id);
+
+ /**
+ * Report a usb camera usage.
+ *
+ * @param id the usage id
+ */
+ void reportUsbCamera(int id);
+
+ /**
+ * Report an axis camera usage.
+ *
+ * @param id the usage id
+ */
+ void reportAxisCamera(int id);
+}
diff --git a/cameraserver/src/main/java/edu/wpi/first/cameraserver/CameraServerSharedStore.java b/cameraserver/src/main/java/edu/wpi/first/cameraserver/CameraServerSharedStore.java
new file mode 100644
index 0000000..c0cf2bb
--- /dev/null
+++ b/cameraserver/src/main/java/edu/wpi/first/cameraserver/CameraServerSharedStore.java
@@ -0,0 +1,58 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+package edu.wpi.first.cameraserver;
+
+public final class CameraServerSharedStore {
+ private static CameraServerShared cameraServerShared;
+
+ private CameraServerSharedStore() {
+ }
+
+ /**
+ * get the CameraServerShared object.
+ */
+ public static synchronized CameraServerShared getCameraServerShared() {
+ if (cameraServerShared == null) {
+ cameraServerShared = new CameraServerShared() {
+
+ @Override
+ public void reportVideoServer(int id) {
+
+ }
+
+ @Override
+ public void reportUsbCamera(int id) {
+
+ }
+
+ @Override
+ public void reportDriverStationError(String error) {
+
+ }
+
+ @Override
+ public void reportAxisCamera(int id) {
+
+ }
+
+ @Override
+ public Long getRobotMainThreadId() {
+ return null;
+ }
+ };
+ }
+ return cameraServerShared;
+ }
+
+ /**
+ * set the CameraServerShared object.
+ */
+ public static synchronized void setCameraServerShared(CameraServerShared shared) {
+ cameraServerShared = shared;
+ }
+}
diff --git a/cameraserver/src/main/java/edu/wpi/first/vision/VisionPipeline.java b/cameraserver/src/main/java/edu/wpi/first/vision/VisionPipeline.java
new file mode 100644
index 0000000..6df10e7
--- /dev/null
+++ b/cameraserver/src/main/java/edu/wpi/first/vision/VisionPipeline.java
@@ -0,0 +1,26 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+package edu.wpi.first.vision;
+
+import org.opencv.core.Mat;
+
+/**
+ * A vision pipeline is responsible for running a group of
+ * OpenCV algorithms to extract data from an image.
+ *
+ * @see VisionRunner
+ * @see VisionThread
+ */
+public interface VisionPipeline {
+ /**
+ * Processes the image input and sets the result objects.
+ * Implementations should make these objects accessible.
+ */
+ void process(Mat image);
+
+}
diff --git a/cameraserver/src/main/java/edu/wpi/first/vision/VisionRunner.java b/cameraserver/src/main/java/edu/wpi/first/vision/VisionRunner.java
new file mode 100644
index 0000000..24ffcda
--- /dev/null
+++ b/cameraserver/src/main/java/edu/wpi/first/vision/VisionRunner.java
@@ -0,0 +1,128 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+package edu.wpi.first.vision;
+
+import org.opencv.core.Mat;
+
+import edu.wpi.cscore.CvSink;
+import edu.wpi.cscore.VideoSource;
+import edu.wpi.first.cameraserver.CameraServerSharedStore;
+
+/**
+ * A vision runner is a convenient wrapper object to make it easy to run vision pipelines
+ * from robot code. The easiest way to use this is to run it in a {@link VisionThread}
+ * and use the listener to take snapshots of the pipeline's outputs.
+ *
+ * @see VisionPipeline
+ * @see VisionThread
+ * @see <a href="package-summary.html">vision</a>
+ */
+public class VisionRunner<P extends VisionPipeline> {
+ private final CvSink m_cvSink = new CvSink("VisionRunner CvSink");
+ private final P m_pipeline;
+ private final Mat m_image = new Mat();
+ private final Listener<? super P> m_listener;
+ private volatile boolean m_enabled = true;
+
+ /**
+ * Listener interface for a callback that should run after a pipeline has processed its input.
+ *
+ * @param <P> the type of the pipeline this listener is for
+ */
+ @FunctionalInterface
+ public interface Listener<P extends VisionPipeline> {
+ /**
+ * Called when the pipeline has run. This shouldn't take much time to run because it will delay
+ * later calls to the pipeline's {@link VisionPipeline#process process} method. Copying the
+ * outputs and code that uses the copies should be <i>synchronized</i> on the same mutex to
+ * prevent multiple threads from reading and writing to the same memory at the same time.
+ *
+ * @param pipeline the vision pipeline that ran
+ */
+ void copyPipelineOutputs(P pipeline);
+
+ }
+
+ /**
+ * Creates a new vision runner. It will take images from the {@code videoSource}, send them to
+ * the {@code pipeline}, and call the {@code listener} when the pipeline has finished to alert
+ * user code when it is safe to access the pipeline's outputs.
+ *
+ * @param videoSource the video source to use to supply images for the pipeline
+ * @param pipeline the vision pipeline to run
+ * @param listener a function to call after the pipeline has finished running
+ */
+ public VisionRunner(VideoSource videoSource, P pipeline, Listener<? super P> listener) {
+ this.m_pipeline = pipeline;
+ this.m_listener = listener;
+ m_cvSink.setSource(videoSource);
+ }
+
+ /**
+ * Runs the pipeline one time, giving it the next image from the video source specified
+ * in the constructor. This will block until the source either has an image or throws an error.
+ * If the source successfully supplied a frame, the pipeline's image input will be set,
+ * the pipeline will run, and the listener specified in the constructor will be called to notify
+ * it that the pipeline ran.
+ *
+ * <p>This method is exposed to allow teams to add additional functionality or have their own
+ * ways to run the pipeline. Most teams, however, should just use {@link #runForever} in its own
+ * thread using a {@link VisionThread}.</p>
+ */
+ public void runOnce() {
+ Long id = CameraServerSharedStore.getCameraServerShared().getRobotMainThreadId();
+
+ if (id != null && Thread.currentThread().getId() == id.longValue()) {
+ throw new IllegalStateException(
+ "VisionRunner.runOnce() cannot be called from the main robot thread");
+ }
+ runOnceInternal();
+ }
+
+ private void runOnceInternal() {
+ long frameTime = m_cvSink.grabFrame(m_image);
+ if (frameTime == 0) {
+ // There was an error, report it
+ String error = m_cvSink.getError();
+ CameraServerSharedStore.getCameraServerShared().reportDriverStationError(error);
+ } else {
+ // No errors, process the image
+ m_pipeline.process(m_image);
+ m_listener.copyPipelineOutputs(m_pipeline);
+ }
+ }
+
+ /**
+ * A convenience method that calls {@link #runOnce()} in an infinite loop. This must
+ * be run in a dedicated thread, and cannot be used in the main robot thread because
+ * it will freeze the robot program.
+ *
+ * <p><strong>Do not call this method directly from the main thread.</strong></p>
+ *
+ * @throws IllegalStateException if this is called from the main robot thread
+ * @see VisionThread
+ */
+ public void runForever() {
+ Long id = CameraServerSharedStore.getCameraServerShared().getRobotMainThreadId();
+
+ if (id != null && Thread.currentThread().getId() == id.longValue()) {
+ throw new IllegalStateException(
+ "VisionRunner.runForever() cannot be called from the main robot thread");
+ }
+ while (m_enabled && !Thread.interrupted()) {
+ runOnceInternal();
+ }
+ }
+
+ /**
+ * Stop a RunForever() loop.
+ */
+ public void stop() {
+ m_enabled = false;
+ }
+}
diff --git a/cameraserver/src/main/java/edu/wpi/first/vision/VisionThread.java b/cameraserver/src/main/java/edu/wpi/first/vision/VisionThread.java
new file mode 100644
index 0000000..576cb96
--- /dev/null
+++ b/cameraserver/src/main/java/edu/wpi/first/vision/VisionThread.java
@@ -0,0 +1,47 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+package edu.wpi.first.vision;
+
+import edu.wpi.cscore.VideoSource;
+
+/**
+ * A vision thread is a special thread that runs a vision pipeline. It is a <i>daemon</i> thread;
+ * it does not prevent the program from exiting when all other non-daemon threads
+ * have finished running.
+ *
+ * @see VisionPipeline
+ * @see VisionRunner
+ * @see Thread#setDaemon(boolean)
+ */
+public class VisionThread extends Thread {
+ /**
+ * Creates a vision thread that continuously runs a {@link VisionPipeline}.
+ *
+ * @param visionRunner the runner for a vision pipeline
+ */
+ public VisionThread(VisionRunner<?> visionRunner) {
+ super(visionRunner::runForever, "WPILib Vision Thread");
+ setDaemon(true);
+ }
+
+ /**
+ * Creates a new vision thread that continuously runs the given vision pipeline. This is
+ * equivalent to {@code new VisionThread(new VisionRunner<>(videoSource, pipeline, listener))}.
+ *
+ * @param videoSource the source for images the pipeline should process
+ * @param pipeline the pipeline to run
+ * @param listener the listener to copy outputs from the pipeline after it runs
+ * @param <P> the type of the pipeline
+ */
+ public <P extends VisionPipeline> VisionThread(VideoSource videoSource,
+ P pipeline,
+ VisionRunner.Listener<? super P> listener) {
+ this(new VisionRunner<>(videoSource, pipeline, listener));
+ }
+
+}
diff --git a/cameraserver/src/main/java/edu/wpi/first/vision/package-info.java b/cameraserver/src/main/java/edu/wpi/first/vision/package-info.java
new file mode 100644
index 0000000..e2e5c62
--- /dev/null
+++ b/cameraserver/src/main/java/edu/wpi/first/vision/package-info.java
@@ -0,0 +1,88 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+/**
+ * Classes in the {@code edu.wpi.first.vision} package are designed to
+ * simplify using OpenCV vision processing code from a robot program.
+ *
+ * <p>An example use case for grabbing a yellow tote from 2015 in autonomous:
+ * <br>
+ * <pre><code>
+ * public class Robot extends IterativeRobot
+ * implements VisionRunner.Listener<MyFindTotePipeline> {
+ *
+ * // A USB camera connected to the roboRIO.
+ * private {@link edu.wpi.cscore.VideoSource VideoSource} usbCamera;
+ *
+ * // A vision pipeline. This could be handwritten or generated by GRIP.
+ * // This has to implement {@link edu.wpi.first.vision.VisionPipeline}.
+ * // For this example, assume that it's perfect and will always see the tote.
+ * private MyFindTotePipeline findTotePipeline;
+ * private {@link edu.wpi.first.vision.VisionThread} findToteThread;
+ *
+ * // The object to synchronize on to make sure the vision thread doesn't
+ * // write to variables the main thread is using.
+ * private final Object visionLock = new Object();
+ *
+ * // The pipeline outputs we want
+ * private boolean pipelineRan = false; // lets us know when the pipeline has actually run
+ * private double angleToTote = 0;
+ * private double distanceToTote = 0;
+ *
+ * {@literal @}Override
+ * public void {@link edu.wpi.first.vision.VisionRunner.Listener#copyPipelineOutputs
+ * copyPipelineOutputs(MyFindTotePipeline pipeline)} {
+ * synchronized (visionLock) {
+ * // Take a snapshot of the pipeline's output because
+ * // it may have changed the next time this method is called!
+ * this.pipelineRan = true;
+ * this.angleToTote = pipeline.getAngleToTote();
+ * this.distanceToTote = pipeline.getDistanceToTote();
+ * }
+ * }
+ *
+ * {@literal @}Override
+ * public void robotInit() {
+ * usbCamera = CameraServer.getInstance().startAutomaticCapture(0);
+ * findTotePipeline = new MyFindTotePipeline();
+ * findToteThread = new VisionThread(usbCamera, findTotePipeline, this);
+ * }
+ *
+ * {@literal @}Override
+ * public void autonomousInit() {
+ * findToteThread.start();
+ * }
+ *
+ * {@literal @}Override
+ * public void autonomousPeriodic() {
+ * double angle;
+ * double distance;
+ * synchronized (visionLock) {
+ * if (!pipelineRan) {
+ * // Wait until the pipeline has run
+ * return;
+ * }
+ * // Copy the outputs to make sure they're all from the same run
+ * angle = this.angleToTote;
+ * distance = this.distanceToTote;
+ * }
+ * if (!aimedAtTote()) {
+ * turnToAngle(angle);
+ * } else if (!droveToTote()) {
+ * driveDistance(distance);
+ * } else if (!grabbedTote()) {
+ * grabTote();
+ * } else {
+ * // Tote was grabbed and we're done!
+ * return;
+ * }
+ * }
+ *
+ * }
+ * </code></pre>
+ */
+package edu.wpi.first.vision;
diff --git a/cameraserver/src/main/native/cpp/cameraserver/CameraServer.cpp b/cameraserver/src/main/native/cpp/cameraserver/CameraServer.cpp
new file mode 100644
index 0000000..3069d22
--- /dev/null
+++ b/cameraserver/src/main/native/cpp/cameraserver/CameraServer.cpp
@@ -0,0 +1,695 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#include "cameraserver/CameraServer.h"
+
+#include <atomic>
+#include <vector>
+
+#include <networktables/NetworkTable.h>
+#include <networktables/NetworkTableInstance.h>
+#include <wpi/DenseMap.h>
+#include <wpi/SmallString.h>
+#include <wpi/StringMap.h>
+#include <wpi/mutex.h>
+#include <wpi/raw_ostream.h>
+
+#include "cameraserver/CameraServerShared.h"
+#include "ntcore_cpp.h"
+
+using namespace frc;
+
+static constexpr char const* kPublishName = "/CameraPublisher";
+
+struct CameraServer::Impl {
+ Impl();
+ std::shared_ptr<nt::NetworkTable> GetSourceTable(CS_Source source);
+ std::vector<std::string> GetSinkStreamValues(CS_Sink sink);
+ std::vector<std::string> GetSourceStreamValues(CS_Source source);
+ void UpdateStreamValues();
+
+ wpi::mutex m_mutex;
+ std::atomic<int> m_defaultUsbDevice;
+ std::string m_primarySourceName;
+ wpi::StringMap<cs::VideoSource> m_sources;
+ wpi::StringMap<cs::VideoSink> m_sinks;
+ wpi::DenseMap<CS_Source, std::shared_ptr<nt::NetworkTable>> m_tables;
+ std::shared_ptr<nt::NetworkTable> m_publishTable;
+ cs::VideoListener m_videoListener;
+ int m_tableListener;
+ int m_nextPort;
+ std::vector<std::string> m_addresses;
+};
+
+CameraServer* CameraServer::GetInstance() {
+ static CameraServer instance;
+ return &instance;
+}
+
+static wpi::StringRef MakeSourceValue(CS_Source source,
+ wpi::SmallVectorImpl<char>& buf) {
+ CS_Status status = 0;
+ buf.clear();
+ switch (cs::GetSourceKind(source, &status)) {
+ case cs::VideoSource::kUsb: {
+ wpi::StringRef prefix{"usb:"};
+ buf.append(prefix.begin(), prefix.end());
+ auto path = cs::GetUsbCameraPath(source, &status);
+ buf.append(path.begin(), path.end());
+ break;
+ }
+ case cs::VideoSource::kHttp: {
+ wpi::StringRef prefix{"ip:"};
+ buf.append(prefix.begin(), prefix.end());
+ auto urls = cs::GetHttpCameraUrls(source, &status);
+ if (!urls.empty()) buf.append(urls[0].begin(), urls[0].end());
+ break;
+ }
+ case cs::VideoSource::kCv:
+ return "cv:";
+ default:
+ return "unknown:";
+ }
+
+ return wpi::StringRef{buf.begin(), buf.size()};
+}
+
+static std::string MakeStreamValue(const wpi::Twine& address, int port) {
+ return ("mjpg:http://" + address + wpi::Twine(':') + wpi::Twine(port) +
+ "/?action=stream")
+ .str();
+}
+
+std::shared_ptr<nt::NetworkTable> CameraServer::Impl::GetSourceTable(
+ CS_Source source) {
+ std::lock_guard<wpi::mutex> lock(m_mutex);
+ return m_tables.lookup(source);
+}
+
+std::vector<std::string> CameraServer::Impl::GetSinkStreamValues(CS_Sink sink) {
+ CS_Status status = 0;
+
+ // Ignore all but MjpegServer
+ if (cs::GetSinkKind(sink, &status) != CS_SINK_MJPEG)
+ return std::vector<std::string>{};
+
+ // Get port
+ int port = cs::GetMjpegServerPort(sink, &status);
+
+ // Generate values
+ std::vector<std::string> values;
+ auto listenAddress = cs::GetMjpegServerListenAddress(sink, &status);
+ if (!listenAddress.empty()) {
+ // If a listen address is specified, only use that
+ values.emplace_back(MakeStreamValue(listenAddress, port));
+ } else {
+ // Otherwise generate for hostname and all interface addresses
+ values.emplace_back(MakeStreamValue(cs::GetHostname() + ".local", port));
+
+ for (const auto& addr : m_addresses) {
+ if (addr == "127.0.0.1") continue; // ignore localhost
+ values.emplace_back(MakeStreamValue(addr, port));
+ }
+ }
+
+ return values;
+}
+
+std::vector<std::string> CameraServer::Impl::GetSourceStreamValues(
+ CS_Source source) {
+ CS_Status status = 0;
+
+ // Ignore all but HttpCamera
+ if (cs::GetSourceKind(source, &status) != CS_SOURCE_HTTP)
+ return std::vector<std::string>{};
+
+ // Generate values
+ auto values = cs::GetHttpCameraUrls(source, &status);
+ for (auto& value : values) value = "mjpg:" + value;
+
+ // Look to see if we have a passthrough server for this source
+ for (const auto& i : m_sinks) {
+ CS_Sink sink = i.second.GetHandle();
+ CS_Source sinkSource = cs::GetSinkSource(sink, &status);
+ if (source == sinkSource &&
+ cs::GetSinkKind(sink, &status) == CS_SINK_MJPEG) {
+ // Add USB-only passthrough
+ int port = cs::GetMjpegServerPort(sink, &status);
+ values.emplace_back(MakeStreamValue("172.22.11.2", port));
+ break;
+ }
+ }
+
+ // Set table value
+ return values;
+}
+
+void CameraServer::Impl::UpdateStreamValues() {
+ std::lock_guard<wpi::mutex> lock(m_mutex);
+ // Over all the sinks...
+ for (const auto& i : m_sinks) {
+ CS_Status status = 0;
+ CS_Sink sink = i.second.GetHandle();
+
+ // Get the source's subtable (if none exists, we're done)
+ CS_Source source = cs::GetSinkSource(sink, &status);
+ if (source == 0) continue;
+ auto table = m_tables.lookup(source);
+ if (table) {
+ // Don't set stream values if this is a HttpCamera passthrough
+ if (cs::GetSourceKind(source, &status) == CS_SOURCE_HTTP) continue;
+
+ // Set table value
+ auto values = GetSinkStreamValues(sink);
+ if (!values.empty()) table->GetEntry("streams").SetStringArray(values);
+ }
+ }
+
+ // Over all the sources...
+ for (const auto& i : m_sources) {
+ CS_Source source = i.second.GetHandle();
+
+ // Get the source's subtable (if none exists, we're done)
+ auto table = m_tables.lookup(source);
+ if (table) {
+ // Set table value
+ auto values = GetSourceStreamValues(source);
+ if (!values.empty()) table->GetEntry("streams").SetStringArray(values);
+ }
+ }
+}
+
+static std::string PixelFormatToString(int pixelFormat) {
+ switch (pixelFormat) {
+ case cs::VideoMode::PixelFormat::kMJPEG:
+ return "MJPEG";
+ case cs::VideoMode::PixelFormat::kYUYV:
+ return "YUYV";
+ case cs::VideoMode::PixelFormat::kRGB565:
+ return "RGB565";
+ case cs::VideoMode::PixelFormat::kBGR:
+ return "BGR";
+ case cs::VideoMode::PixelFormat::kGray:
+ return "Gray";
+ default:
+ return "Unknown";
+ }
+}
+
+static std::string VideoModeToString(const cs::VideoMode& mode) {
+ std::string rv;
+ wpi::raw_string_ostream oss{rv};
+ oss << mode.width << "x" << mode.height;
+ oss << " " << PixelFormatToString(mode.pixelFormat) << " ";
+ oss << mode.fps << " fps";
+ return oss.str();
+}
+
+static std::vector<std::string> GetSourceModeValues(int source) {
+ std::vector<std::string> rv;
+ CS_Status status = 0;
+ for (const auto& mode : cs::EnumerateSourceVideoModes(source, &status))
+ rv.emplace_back(VideoModeToString(mode));
+ return rv;
+}
+
+static void PutSourcePropertyValue(nt::NetworkTable* table,
+ const cs::VideoEvent& event, bool isNew) {
+ wpi::SmallString<64> name;
+ wpi::SmallString<64> infoName;
+ if (wpi::StringRef{event.name}.startswith("raw_")) {
+ name = "RawProperty/";
+ name += event.name;
+ infoName = "RawPropertyInfo/";
+ infoName += event.name;
+ } else {
+ name = "Property/";
+ name += event.name;
+ infoName = "PropertyInfo/";
+ infoName += event.name;
+ }
+
+ wpi::SmallString<64> buf;
+ CS_Status status = 0;
+ nt::NetworkTableEntry entry = table->GetEntry(name);
+ switch (event.propertyKind) {
+ case cs::VideoProperty::kBoolean:
+ if (isNew)
+ entry.SetDefaultBoolean(event.value != 0);
+ else
+ entry.SetBoolean(event.value != 0);
+ break;
+ case cs::VideoProperty::kInteger:
+ case cs::VideoProperty::kEnum:
+ if (isNew) {
+ entry.SetDefaultDouble(event.value);
+ table->GetEntry(infoName + "/min")
+ .SetDouble(cs::GetPropertyMin(event.propertyHandle, &status));
+ table->GetEntry(infoName + "/max")
+ .SetDouble(cs::GetPropertyMax(event.propertyHandle, &status));
+ table->GetEntry(infoName + "/step")
+ .SetDouble(cs::GetPropertyStep(event.propertyHandle, &status));
+ table->GetEntry(infoName + "/default")
+ .SetDouble(cs::GetPropertyDefault(event.propertyHandle, &status));
+ } else {
+ entry.SetDouble(event.value);
+ }
+ break;
+ case cs::VideoProperty::kString:
+ if (isNew)
+ entry.SetDefaultString(event.valueStr);
+ else
+ entry.SetString(event.valueStr);
+ break;
+ default:
+ break;
+ }
+}
+
+CameraServer::Impl::Impl()
+ : m_publishTable{nt::NetworkTableInstance::GetDefault().GetTable(
+ kPublishName)},
+ m_nextPort(kBasePort) {
+ // We publish sources to NetworkTables using the following structure:
+ // "/CameraPublisher/{Source.Name}/" - root
+ // - "source" (string): Descriptive, prefixed with type (e.g. "usb:0")
+ // - "streams" (string array): URLs that can be used to stream data
+ // - "description" (string): Description of the source
+ // - "connected" (boolean): Whether source is connected
+ // - "mode" (string): Current video mode
+ // - "modes" (string array): Available video modes
+ // - "Property/{Property}" - Property values
+ // - "PropertyInfo/{Property}" - Property supporting information
+
+ // Listener for video events
+ m_videoListener = cs::VideoListener{
+ [=](const cs::VideoEvent& event) {
+ CS_Status status = 0;
+ switch (event.kind) {
+ case cs::VideoEvent::kSourceCreated: {
+ // Create subtable for the camera
+ auto table = m_publishTable->GetSubTable(event.name);
+ {
+ std::lock_guard<wpi::mutex> lock(m_mutex);
+ m_tables.insert(std::make_pair(event.sourceHandle, table));
+ }
+ wpi::SmallString<64> buf;
+ table->GetEntry("source").SetString(
+ MakeSourceValue(event.sourceHandle, buf));
+ wpi::SmallString<64> descBuf;
+ table->GetEntry("description")
+ .SetString(cs::GetSourceDescription(event.sourceHandle, descBuf,
+ &status));
+ table->GetEntry("connected")
+ .SetBoolean(cs::IsSourceConnected(event.sourceHandle, &status));
+ table->GetEntry("streams").SetStringArray(
+ GetSourceStreamValues(event.sourceHandle));
+ auto mode = cs::GetSourceVideoMode(event.sourceHandle, &status);
+ table->GetEntry("mode").SetDefaultString(VideoModeToString(mode));
+ table->GetEntry("modes").SetStringArray(
+ GetSourceModeValues(event.sourceHandle));
+ break;
+ }
+ case cs::VideoEvent::kSourceDestroyed: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table) {
+ table->GetEntry("source").SetString("");
+ table->GetEntry("streams").SetStringArray(
+ std::vector<std::string>{});
+ table->GetEntry("modes").SetStringArray(
+ std::vector<std::string>{});
+ }
+ break;
+ }
+ case cs::VideoEvent::kSourceConnected: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table) {
+ // update the description too (as it may have changed)
+ wpi::SmallString<64> descBuf;
+ table->GetEntry("description")
+ .SetString(cs::GetSourceDescription(event.sourceHandle,
+ descBuf, &status));
+ table->GetEntry("connected").SetBoolean(true);
+ }
+ break;
+ }
+ case cs::VideoEvent::kSourceDisconnected: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table) table->GetEntry("connected").SetBoolean(false);
+ break;
+ }
+ case cs::VideoEvent::kSourceVideoModesUpdated: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table)
+ table->GetEntry("modes").SetStringArray(
+ GetSourceModeValues(event.sourceHandle));
+ break;
+ }
+ case cs::VideoEvent::kSourceVideoModeChanged: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table)
+ table->GetEntry("mode").SetString(VideoModeToString(event.mode));
+ break;
+ }
+ case cs::VideoEvent::kSourcePropertyCreated: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table) PutSourcePropertyValue(table.get(), event, true);
+ break;
+ }
+ case cs::VideoEvent::kSourcePropertyValueUpdated: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table) PutSourcePropertyValue(table.get(), event, false);
+ break;
+ }
+ case cs::VideoEvent::kSourcePropertyChoicesUpdated: {
+ auto table = GetSourceTable(event.sourceHandle);
+ if (table) {
+ wpi::SmallString<64> name{"PropertyInfo/"};
+ name += event.name;
+ name += "/choices";
+ auto choices =
+ cs::GetEnumPropertyChoices(event.propertyHandle, &status);
+ table->GetEntry(name).SetStringArray(choices);
+ }
+ break;
+ }
+ case cs::VideoEvent::kSinkSourceChanged:
+ case cs::VideoEvent::kSinkCreated:
+ case cs::VideoEvent::kSinkDestroyed:
+ case cs::VideoEvent::kNetworkInterfacesChanged: {
+ m_addresses = cs::GetNetworkInterfaces();
+ UpdateStreamValues();
+ break;
+ }
+ default:
+ break;
+ }
+ },
+ 0x4fff, true};
+
+ // Listener for NetworkTable events
+ // We don't currently support changing settings via NT due to
+ // synchronization issues, so just update to current setting if someone
+ // else tries to change it.
+ wpi::SmallString<64> buf;
+ m_tableListener = nt::NetworkTableInstance::GetDefault().AddEntryListener(
+ kPublishName + wpi::Twine('/'),
+ [=](const nt::EntryNotification& event) {
+ wpi::StringRef relativeKey =
+ event.name.substr(wpi::StringRef(kPublishName).size() + 1);
+
+ // get source (sourceName/...)
+ auto subKeyIndex = relativeKey.find('/');
+ if (subKeyIndex == wpi::StringRef::npos) return;
+ wpi::StringRef sourceName = relativeKey.slice(0, subKeyIndex);
+ auto sourceIt = m_sources.find(sourceName);
+ if (sourceIt == m_sources.end()) return;
+
+ // get subkey
+ relativeKey = relativeKey.substr(subKeyIndex + 1);
+
+ // handle standard names
+ wpi::StringRef propName;
+ nt::NetworkTableEntry entry{event.entry};
+ if (relativeKey == "mode") {
+ // reset to current mode
+ entry.SetString(VideoModeToString(sourceIt->second.GetVideoMode()));
+ return;
+ } else if (relativeKey.startswith("Property/")) {
+ propName = relativeKey.substr(9);
+ } else if (relativeKey.startswith("RawProperty/")) {
+ propName = relativeKey.substr(12);
+ } else {
+ return; // ignore
+ }
+
+ // everything else is a property
+ auto property = sourceIt->second.GetProperty(propName);
+ switch (property.GetKind()) {
+ case cs::VideoProperty::kNone:
+ return;
+ case cs::VideoProperty::kBoolean:
+ entry.SetBoolean(property.Get() != 0);
+ return;
+ case cs::VideoProperty::kInteger:
+ case cs::VideoProperty::kEnum:
+ entry.SetDouble(property.Get());
+ return;
+ case cs::VideoProperty::kString:
+ entry.SetString(property.GetString());
+ return;
+ default:
+ return;
+ }
+ },
+ NT_NOTIFY_IMMEDIATE | NT_NOTIFY_UPDATE);
+}
+
+CameraServer::CameraServer() : m_impl(new Impl) {}
+
+CameraServer::~CameraServer() {}
+
+cs::UsbCamera CameraServer::StartAutomaticCapture() {
+ cs::UsbCamera camera = StartAutomaticCapture(m_impl->m_defaultUsbDevice++);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportUsbCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::UsbCamera CameraServer::StartAutomaticCapture(int dev) {
+ cs::UsbCamera camera{"USB Camera " + wpi::Twine(dev), dev};
+ StartAutomaticCapture(camera);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportUsbCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::UsbCamera CameraServer::StartAutomaticCapture(const wpi::Twine& name,
+ int dev) {
+ cs::UsbCamera camera{name, dev};
+ StartAutomaticCapture(camera);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportUsbCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::UsbCamera CameraServer::StartAutomaticCapture(const wpi::Twine& name,
+ const wpi::Twine& path) {
+ cs::UsbCamera camera{name, path};
+ StartAutomaticCapture(camera);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportUsbCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(const wpi::Twine& host) {
+ return AddAxisCamera("Axis Camera", host);
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(const char* host) {
+ return AddAxisCamera("Axis Camera", host);
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(const std::string& host) {
+ return AddAxisCamera("Axis Camera", host);
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(wpi::ArrayRef<std::string> hosts) {
+ return AddAxisCamera("Axis Camera", hosts);
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(const wpi::Twine& name,
+ const wpi::Twine& host) {
+ cs::AxisCamera camera{name, host};
+ StartAutomaticCapture(camera);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportAxisCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(const wpi::Twine& name,
+ const char* host) {
+ cs::AxisCamera camera{name, host};
+ StartAutomaticCapture(camera);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportAxisCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(const wpi::Twine& name,
+ const std::string& host) {
+ cs::AxisCamera camera{name, host};
+ StartAutomaticCapture(camera);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportAxisCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::AxisCamera CameraServer::AddAxisCamera(const wpi::Twine& name,
+ wpi::ArrayRef<std::string> hosts) {
+ cs::AxisCamera camera{name, hosts};
+ StartAutomaticCapture(camera);
+ auto csShared = GetCameraServerShared();
+ csShared->ReportAxisCamera(camera.GetHandle());
+ return camera;
+}
+
+cs::MjpegServer CameraServer::StartAutomaticCapture(
+ const cs::VideoSource& camera) {
+ AddCamera(camera);
+ auto server = AddServer(wpi::Twine("serve_") + camera.GetName());
+ server.SetSource(camera);
+ return server;
+}
+
+cs::CvSink CameraServer::GetVideo() {
+ cs::VideoSource source;
+ {
+ auto csShared = GetCameraServerShared();
+ std::lock_guard<wpi::mutex> lock(m_impl->m_mutex);
+ if (m_impl->m_primarySourceName.empty()) {
+ csShared->SetCameraServerError("no camera available");
+ return cs::CvSink{};
+ }
+ auto it = m_impl->m_sources.find(m_impl->m_primarySourceName);
+ if (it == m_impl->m_sources.end()) {
+ csShared->SetCameraServerError("no camera available");
+ return cs::CvSink{};
+ }
+ source = it->second;
+ }
+ return GetVideo(std::move(source));
+}
+
+cs::CvSink CameraServer::GetVideo(const cs::VideoSource& camera) {
+ wpi::SmallString<64> name{"opencv_"};
+ name += camera.GetName();
+
+ {
+ std::lock_guard<wpi::mutex> lock(m_impl->m_mutex);
+ auto it = m_impl->m_sinks.find(name);
+ if (it != m_impl->m_sinks.end()) {
+ auto kind = it->second.GetKind();
+ if (kind != cs::VideoSink::kCv) {
+ auto csShared = GetCameraServerShared();
+ csShared->SetCameraServerError("expected OpenCV sink, but got " +
+ wpi::Twine(kind));
+ return cs::CvSink{};
+ }
+ return *static_cast<cs::CvSink*>(&it->second);
+ }
+ }
+
+ cs::CvSink newsink{name};
+ newsink.SetSource(camera);
+ AddServer(newsink);
+ return newsink;
+}
+
+cs::CvSink CameraServer::GetVideo(const wpi::Twine& name) {
+ wpi::SmallString<64> nameBuf;
+ wpi::StringRef nameStr = name.toStringRef(nameBuf);
+ cs::VideoSource source;
+ {
+ std::lock_guard<wpi::mutex> lock(m_impl->m_mutex);
+ auto it = m_impl->m_sources.find(nameStr);
+ if (it == m_impl->m_sources.end()) {
+ auto csShared = GetCameraServerShared();
+ csShared->SetCameraServerError("could not find camera " + nameStr);
+ return cs::CvSink{};
+ }
+ source = it->second;
+ }
+ return GetVideo(source);
+}
+
+cs::CvSource CameraServer::PutVideo(const wpi::Twine& name, int width,
+ int height) {
+ cs::CvSource source{name, cs::VideoMode::kMJPEG, width, height, 30};
+ StartAutomaticCapture(source);
+ return source;
+}
+
+cs::MjpegServer CameraServer::AddServer(const wpi::Twine& name) {
+ int port;
+ {
+ std::lock_guard<wpi::mutex> lock(m_impl->m_mutex);
+ port = m_impl->m_nextPort++;
+ }
+ return AddServer(name, port);
+}
+
+cs::MjpegServer CameraServer::AddServer(const wpi::Twine& name, int port) {
+ cs::MjpegServer server{name, port};
+ AddServer(server);
+ return server;
+}
+
+void CameraServer::AddServer(const cs::VideoSink& server) {
+ std::lock_guard<wpi::mutex> lock(m_impl->m_mutex);
+ m_impl->m_sinks.try_emplace(server.GetName(), server);
+}
+
+void CameraServer::RemoveServer(const wpi::Twine& name) {
+ std::lock_guard<wpi::mutex> lock(m_impl->m_mutex);
+ wpi::SmallString<64> nameBuf;
+ m_impl->m_sinks.erase(name.toStringRef(nameBuf));
+}
+
+cs::VideoSink CameraServer::GetServer() {
+ wpi::SmallString<64> name;
+ {
+ std::lock_guard<wpi::mutex> lock(m_impl->m_mutex);
+ if (m_impl->m_primarySourceName.empty()) {
+ auto csShared = GetCameraServerShared();
+ csShared->SetCameraServerError("no camera available");
+ return cs::VideoSink{};
+ }
+ name = "serve_";
+ name += m_impl->m_primarySourceName;
+ }
+ return GetServer(name);
+}
+
+cs::VideoSink CameraServer::GetServer(const wpi::Twine& name) {
+ wpi::SmallString<64> nameBuf;
+ wpi::StringRef nameStr = name.toStringRef(nameBuf);
+ std::lock_guard<wpi::mutex> lock(m_impl->m_mutex);
+ auto it = m_impl->m_sinks.find(nameStr);
+ if (it == m_impl->m_sinks.end()) {
+ auto csShared = GetCameraServerShared();
+ csShared->SetCameraServerError("could not find server " + nameStr);
+ return cs::VideoSink{};
+ }
+ return it->second;
+}
+
+void CameraServer::AddCamera(const cs::VideoSource& camera) {
+ std::string name = camera.GetName();
+ std::lock_guard<wpi::mutex> lock(m_impl->m_mutex);
+ if (m_impl->m_primarySourceName.empty()) m_impl->m_primarySourceName = name;
+ m_impl->m_sources.try_emplace(name, camera);
+}
+
+void CameraServer::RemoveCamera(const wpi::Twine& name) {
+ std::lock_guard<wpi::mutex> lock(m_impl->m_mutex);
+ wpi::SmallString<64> nameBuf;
+ m_impl->m_sources.erase(name.toStringRef(nameBuf));
+}
+
+void CameraServer::SetSize(int size) {
+ std::lock_guard<wpi::mutex> lock(m_impl->m_mutex);
+ if (m_impl->m_primarySourceName.empty()) return;
+ auto it = m_impl->m_sources.find(m_impl->m_primarySourceName);
+ if (it == m_impl->m_sources.end()) return;
+ if (size == kSize160x120)
+ it->second.SetResolution(160, 120);
+ else if (size == kSize320x240)
+ it->second.SetResolution(320, 240);
+ else if (size == kSize640x480)
+ it->second.SetResolution(640, 480);
+}
diff --git a/cameraserver/src/main/native/cpp/cameraserver/CameraServerShared.cpp b/cameraserver/src/main/native/cpp/cameraserver/CameraServerShared.cpp
new file mode 100644
index 0000000..97ab090
--- /dev/null
+++ b/cameraserver/src/main/native/cpp/cameraserver/CameraServerShared.cpp
@@ -0,0 +1,43 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#include "cameraserver/CameraServerShared.h"
+
+#include <wpi/mutex.h>
+
+namespace {
+class DefaultCameraServerShared : public frc::CameraServerShared {
+ public:
+ void ReportUsbCamera(int id) override {}
+ void ReportAxisCamera(int id) override {}
+ void ReportVideoServer(int id) override {}
+ void SetCameraServerError(const wpi::Twine& error) override {}
+ void SetVisionRunnerError(const wpi::Twine& error) override {}
+ void ReportDriverStationError(const wpi::Twine& error) override {}
+ std::pair<std::thread::id, bool> GetRobotMainThreadId() const override {
+ return std::make_pair(std::thread::id(), false);
+ }
+};
+} // namespace
+
+namespace frc {
+
+static std::unique_ptr<CameraServerShared> cameraServerShared = nullptr;
+static wpi::mutex setLock;
+
+void SetCameraServerShared(std::unique_ptr<CameraServerShared> shared) {
+ std::unique_lock<wpi::mutex> lock(setLock);
+ cameraServerShared = std::move(shared);
+}
+CameraServerShared* GetCameraServerShared() {
+ std::unique_lock<wpi::mutex> lock(setLock);
+ if (!cameraServerShared) {
+ cameraServerShared = std::make_unique<DefaultCameraServerShared>();
+ }
+ return cameraServerShared.get();
+}
+} // namespace frc
diff --git a/cameraserver/src/main/native/cpp/vision/VisionRunner.cpp b/cameraserver/src/main/native/cpp/vision/VisionRunner.cpp
new file mode 100644
index 0000000..9896bbd
--- /dev/null
+++ b/cameraserver/src/main/native/cpp/vision/VisionRunner.cpp
@@ -0,0 +1,59 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#include "vision/VisionRunner.h"
+
+#include <thread>
+
+#include <opencv2/core/mat.hpp>
+
+#include "cameraserver/CameraServerShared.h"
+
+using namespace frc;
+
+VisionRunnerBase::VisionRunnerBase(cs::VideoSource videoSource)
+ : m_image(std::make_unique<cv::Mat>()),
+ m_cvSink("VisionRunner CvSink"),
+ m_enabled(true) {
+ m_cvSink.SetSource(videoSource);
+}
+
+// Located here and not in header due to cv::Mat forward declaration.
+VisionRunnerBase::~VisionRunnerBase() {}
+
+void VisionRunnerBase::RunOnce() {
+ auto csShared = frc::GetCameraServerShared();
+ auto res = csShared->GetRobotMainThreadId();
+ if (res.second && (std::this_thread::get_id() == res.first)) {
+ csShared->SetVisionRunnerError(
+ "VisionRunner::RunOnce() cannot be called from the main robot thread");
+ return;
+ }
+ auto frameTime = m_cvSink.GrabFrame(*m_image);
+ if (frameTime == 0) {
+ auto error = m_cvSink.GetError();
+ csShared->ReportDriverStationError(error);
+ } else {
+ DoProcess(*m_image);
+ }
+}
+
+void VisionRunnerBase::RunForever() {
+ auto csShared = frc::GetCameraServerShared();
+ auto res = csShared->GetRobotMainThreadId();
+ if (res.second && (std::this_thread::get_id() == res.first)) {
+ csShared->SetVisionRunnerError(
+ "VisionRunner::RunForever() cannot be called from the main robot "
+ "thread");
+ return;
+ }
+ while (m_enabled) {
+ RunOnce();
+ }
+}
+
+void VisionRunnerBase::Stop() { m_enabled = false; }
diff --git a/cameraserver/src/main/native/include/CameraServer.h b/cameraserver/src/main/native/include/CameraServer.h
new file mode 100644
index 0000000..2fcc0f2
--- /dev/null
+++ b/cameraserver/src/main/native/include/CameraServer.h
@@ -0,0 +1,19 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#pragma once
+
+// clang-format off
+#ifdef _MSC_VER
+#pragma message "warning: CameraServer.h is deprecated; include cameraserver/CameraServer.h instead"
+#else
+#warning "CameraServer.h is deprecated; include cameraserver/CameraServer.h instead"
+#endif
+
+// clang-format on
+
+#include "cameraserver/CameraServer.h"
diff --git a/cameraserver/src/main/native/include/cameraserver/CameraServer.h b/cameraserver/src/main/native/include/cameraserver/CameraServer.h
new file mode 100644
index 0000000..42a9f0c
--- /dev/null
+++ b/cameraserver/src/main/native/include/cameraserver/CameraServer.h
@@ -0,0 +1,289 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2014-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#pragma once
+
+#include <stdint.h>
+
+#include <memory>
+#include <string>
+
+#include <wpi/ArrayRef.h>
+#include <wpi/Twine.h>
+
+#include "cscore.h"
+
+namespace frc {
+
+/**
+ * Singleton class for creating and keeping camera servers.
+ *
+ * Also publishes camera information to NetworkTables.
+ */
+class CameraServer {
+ public:
+ static constexpr uint16_t kBasePort = 1181;
+ static constexpr int kSize640x480 = 0;
+ static constexpr int kSize320x240 = 1;
+ static constexpr int kSize160x120 = 2;
+
+ /**
+ * Get the CameraServer instance.
+ */
+ static CameraServer* GetInstance();
+
+ /**
+ * Start automatically capturing images to send to the dashboard.
+ *
+ * You should call this method to see a camera feed on the dashboard. If you
+ * also want to perform vision processing on the roboRIO, use getVideo() to
+ * get access to the camera images.
+ *
+ * The first time this overload is called, it calls StartAutomaticCapture()
+ * with device 0, creating a camera named "USB Camera 0". Subsequent calls
+ * increment the device number (e.g. 1, 2, etc).
+ */
+ cs::UsbCamera StartAutomaticCapture();
+
+ /**
+ * Start automatically capturing images to send to the dashboard.
+ *
+ * This overload calls StartAutomaticCapture() with a name of "USB Camera
+ * {dev}".
+ *
+ * @param dev The device number of the camera interface
+ */
+ cs::UsbCamera StartAutomaticCapture(int dev);
+
+ /**
+ * Start automatically capturing images to send to the dashboard.
+ *
+ * @param name The name to give the camera
+ * @param dev The device number of the camera interface
+ */
+ cs::UsbCamera StartAutomaticCapture(const wpi::Twine& name, int dev);
+
+ /**
+ * Start automatically capturing images to send to the dashboard.
+ *
+ * @param name The name to give the camera
+ * @param path The device path (e.g. "/dev/video0") of the camera
+ */
+ cs::UsbCamera StartAutomaticCapture(const wpi::Twine& name,
+ const wpi::Twine& path);
+
+ /**
+ * Start automatically capturing images to send to the dashboard from
+ * an existing camera.
+ *
+ * @param camera Camera
+ */
+ cs::MjpegServer StartAutomaticCapture(const cs::VideoSource& camera);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * This overload calls AddAxisCamera() with name "Axis Camera".
+ *
+ * @param host Camera host IP or DNS name (e.g. "10.x.y.11")
+ */
+ cs::AxisCamera AddAxisCamera(const wpi::Twine& host);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * This overload calls AddAxisCamera() with name "Axis Camera".
+ *
+ * @param host Camera host IP or DNS name (e.g. "10.x.y.11")
+ */
+ cs::AxisCamera AddAxisCamera(const char* host);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * This overload calls AddAxisCamera() with name "Axis Camera".
+ *
+ * @param host Camera host IP or DNS name (e.g. "10.x.y.11")
+ */
+ cs::AxisCamera AddAxisCamera(const std::string& host);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * This overload calls AddAxisCamera() with name "Axis Camera".
+ *
+ * @param hosts Array of Camera host IPs/DNS names
+ */
+ cs::AxisCamera AddAxisCamera(wpi::ArrayRef<std::string> hosts);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * This overload calls AddAxisCamera() with name "Axis Camera".
+ *
+ * @param hosts Array of Camera host IPs/DNS names
+ */
+ template <typename T>
+ cs::AxisCamera AddAxisCamera(std::initializer_list<T> hosts);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * @param name The name to give the camera
+ * @param host Camera host IP or DNS name (e.g. "10.x.y.11")
+ */
+ cs::AxisCamera AddAxisCamera(const wpi::Twine& name, const wpi::Twine& host);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * @param name The name to give the camera
+ * @param host Camera host IP or DNS name (e.g. "10.x.y.11")
+ */
+ cs::AxisCamera AddAxisCamera(const wpi::Twine& name, const char* host);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * @param name The name to give the camera
+ * @param host Camera host IP or DNS name (e.g. "10.x.y.11")
+ */
+ cs::AxisCamera AddAxisCamera(const wpi::Twine& name, const std::string& host);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * @param name The name to give the camera
+ * @param hosts Array of Camera host IPs/DNS names
+ */
+ cs::AxisCamera AddAxisCamera(const wpi::Twine& name,
+ wpi::ArrayRef<std::string> hosts);
+
+ /**
+ * Adds an Axis IP camera.
+ *
+ * @param name The name to give the camera
+ * @param hosts Array of Camera host IPs/DNS names
+ */
+ template <typename T>
+ cs::AxisCamera AddAxisCamera(const wpi::Twine& name,
+ std::initializer_list<T> hosts);
+
+ /**
+ * Get OpenCV access to the primary camera feed. This allows you to
+ * get images from the camera for image processing on the roboRIO.
+ *
+ * <p>This is only valid to call after a camera feed has been added
+ * with startAutomaticCapture() or addServer().
+ */
+ cs::CvSink GetVideo();
+
+ /**
+ * Get OpenCV access to the specified camera. This allows you to get
+ * images from the camera for image processing on the roboRIO.
+ *
+ * @param camera Camera (e.g. as returned by startAutomaticCapture).
+ */
+ cs::CvSink GetVideo(const cs::VideoSource& camera);
+
+ /**
+ * Get OpenCV access to the specified camera. This allows you to get
+ * images from the camera for image processing on the roboRIO.
+ *
+ * @param name Camera name
+ */
+ cs::CvSink GetVideo(const wpi::Twine& name);
+
+ /**
+ * Create a MJPEG stream with OpenCV input. This can be called to pass custom
+ * annotated images to the dashboard.
+ *
+ * @param name Name to give the stream
+ * @param width Width of the image being sent
+ * @param height Height of the image being sent
+ */
+ cs::CvSource PutVideo(const wpi::Twine& name, int width, int height);
+
+ /**
+ * Adds a MJPEG server at the next available port.
+ *
+ * @param name Server name
+ */
+ cs::MjpegServer AddServer(const wpi::Twine& name);
+
+ /**
+ * Adds a MJPEG server.
+ *
+ * @param name Server name
+ */
+ cs::MjpegServer AddServer(const wpi::Twine& name, int port);
+
+ /**
+ * Adds an already created server.
+ *
+ * @param server Server
+ */
+ void AddServer(const cs::VideoSink& server);
+
+ /**
+ * Removes a server by name.
+ *
+ * @param name Server name
+ */
+ void RemoveServer(const wpi::Twine& name);
+
+ /**
+ * Get server for the primary camera feed.
+ *
+ * This is only valid to call after a camera feed has been added with
+ * StartAutomaticCapture() or AddServer().
+ */
+ cs::VideoSink GetServer();
+
+ /**
+ * Gets a server by name.
+ *
+ * @param name Server name
+ */
+ cs::VideoSink GetServer(const wpi::Twine& name);
+
+ /**
+ * Adds an already created camera.
+ *
+ * @param camera Camera
+ */
+ void AddCamera(const cs::VideoSource& camera);
+
+ /**
+ * Removes a camera by name.
+ *
+ * @param name Camera name
+ */
+ void RemoveCamera(const wpi::Twine& name);
+
+ /**
+ * Sets the size of the image to use. Use the public kSize constants to set
+ * the correct mode, or set it directly on a camera and call the appropriate
+ * StartAutomaticCapture method.
+ *
+ * @deprecated Use SetResolution on the UsbCamera returned by
+ * StartAutomaticCapture() instead.
+ * @param size The size to use
+ */
+ void SetSize(int size);
+
+ private:
+ CameraServer();
+ ~CameraServer();
+
+ struct Impl;
+ std::unique_ptr<Impl> m_impl;
+};
+
+} // namespace frc
+
+#include "cameraserver/CameraServer.inc"
diff --git a/cameraserver/src/main/native/include/cameraserver/CameraServer.inc b/cameraserver/src/main/native/include/cameraserver/CameraServer.inc
new file mode 100644
index 0000000..5daf29f
--- /dev/null
+++ b/cameraserver/src/main/native/include/cameraserver/CameraServer.inc
@@ -0,0 +1,30 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#pragma once
+
+#include <string>
+#include <vector>
+
+namespace frc {
+
+template <typename T>
+inline cs::AxisCamera CameraServer::AddAxisCamera(
+ std::initializer_list<T> hosts) {
+ return AddAxisCamera("Axis Camera", hosts);
+}
+
+template <typename T>
+inline cs::AxisCamera CameraServer::AddAxisCamera(
+ const wpi::Twine& name, std::initializer_list<T> hosts) {
+ std::vector<std::string> vec;
+ vec.reserve(hosts.size());
+ for (const auto& host : hosts) vec.emplace_back(host);
+ return AddAxisCamera(name, vec);
+}
+
+} // namespace frc
diff --git a/cameraserver/src/main/native/include/cameraserver/CameraServerShared.h b/cameraserver/src/main/native/include/cameraserver/CameraServerShared.h
new file mode 100644
index 0000000..72b784e
--- /dev/null
+++ b/cameraserver/src/main/native/include/cameraserver/CameraServerShared.h
@@ -0,0 +1,31 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#pragma once
+
+#include <memory>
+#include <thread>
+#include <utility>
+
+#include <wpi/Twine.h>
+
+namespace frc {
+class CameraServerShared {
+ public:
+ virtual ~CameraServerShared() = default;
+ virtual void ReportUsbCamera(int id) = 0;
+ virtual void ReportAxisCamera(int id) = 0;
+ virtual void ReportVideoServer(int id) = 0;
+ virtual void SetCameraServerError(const wpi::Twine& error) = 0;
+ virtual void SetVisionRunnerError(const wpi::Twine& error) = 0;
+ virtual void ReportDriverStationError(const wpi::Twine& error) = 0;
+ virtual std::pair<std::thread::id, bool> GetRobotMainThreadId() const = 0;
+};
+
+void SetCameraServerShared(std::unique_ptr<CameraServerShared> shared);
+CameraServerShared* GetCameraServerShared();
+} // namespace frc
diff --git a/cameraserver/src/main/native/include/vision/VisionPipeline.h b/cameraserver/src/main/native/include/vision/VisionPipeline.h
new file mode 100644
index 0000000..de8e54c
--- /dev/null
+++ b/cameraserver/src/main/native/include/vision/VisionPipeline.h
@@ -0,0 +1,32 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#pragma once
+
+namespace cv {
+class Mat;
+} // namespace cv
+
+namespace frc {
+
+/**
+ * A vision pipeline is responsible for running a group of OpenCV algorithms to
+ * extract data from an image.
+ *
+ * @see VisionRunner
+ */
+class VisionPipeline {
+ public:
+ virtual ~VisionPipeline() = default;
+
+ /**
+ * Processes the image input and sets the result objects. Implementations
+ * should make these objects accessible.
+ */
+ virtual void Process(cv::Mat& mat) = 0;
+};
+} // namespace frc
diff --git a/cameraserver/src/main/native/include/vision/VisionRunner.h b/cameraserver/src/main/native/include/vision/VisionRunner.h
new file mode 100644
index 0000000..a317f80
--- /dev/null
+++ b/cameraserver/src/main/native/include/vision/VisionRunner.h
@@ -0,0 +1,99 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#pragma once
+
+#include <atomic>
+#include <functional>
+#include <memory>
+
+#include "cscore.h"
+#include "vision/VisionPipeline.h"
+
+namespace frc {
+
+/**
+ * Non-template base class for VisionRunner.
+ */
+class VisionRunnerBase {
+ public:
+ /**
+ * Creates a new vision runner. It will take images from the {@code
+ * videoSource}, and call the virtual DoProcess() method.
+ *
+ * @param videoSource the video source to use to supply images for the
+ * pipeline
+ */
+ explicit VisionRunnerBase(cs::VideoSource videoSource);
+
+ ~VisionRunnerBase();
+
+ VisionRunnerBase(const VisionRunnerBase&) = delete;
+ VisionRunnerBase& operator=(const VisionRunnerBase&) = delete;
+
+ /**
+ * Runs the pipeline one time, giving it the next image from the video source
+ * specified in the constructor. This will block until the source either has
+ * an image or throws an error. If the source successfully supplied a frame,
+ * the pipeline's image input will be set, the pipeline will run, and the
+ * listener specified in the constructor will be called to notify it that the
+ * pipeline ran. This must be run in a dedicated thread, and cannot be used in
+ * the main robot thread because it will freeze the robot program.
+ *
+ * <p>This method is exposed to allow teams to add additional functionality or
+ * have their own ways to run the pipeline. Most teams, however, should just
+ * use {@link #runForever} in its own thread using a std::thread.</p>
+ */
+ void RunOnce();
+
+ /**
+ * A convenience method that calls {@link #runOnce()} in an infinite loop.
+ * This must be run in a dedicated thread, and cannot be used in the main
+ * robot thread because it will freeze the robot program.
+ *
+ * <strong>Do not call this method directly from the main thread.</strong>
+ */
+ void RunForever();
+
+ /**
+ * Stop a RunForever() loop.
+ */
+ void Stop();
+
+ protected:
+ virtual void DoProcess(cv::Mat& image) = 0;
+
+ private:
+ std::unique_ptr<cv::Mat> m_image;
+ cs::CvSink m_cvSink;
+ std::atomic_bool m_enabled;
+};
+
+/**
+ * A vision runner is a convenient wrapper object to make it easy to run vision
+ * pipelines from robot code. The easiest way to use this is to run it in a
+ * std::thread and use the listener to take snapshots of the pipeline's outputs.
+ *
+ * @see VisionPipeline
+ */
+template <typename T>
+class VisionRunner : public VisionRunnerBase {
+ public:
+ VisionRunner(cs::VideoSource videoSource, T* pipeline,
+ std::function<void(T&)> listener);
+ virtual ~VisionRunner() = default;
+
+ protected:
+ void DoProcess(cv::Mat& image) override;
+
+ private:
+ T* m_pipeline;
+ std::function<void(T&)> m_listener;
+};
+} // namespace frc
+
+#include "VisionRunner.inc"
diff --git a/cameraserver/src/main/native/include/vision/VisionRunner.inc b/cameraserver/src/main/native/include/vision/VisionRunner.inc
new file mode 100644
index 0000000..1a38048
--- /dev/null
+++ b/cameraserver/src/main/native/include/vision/VisionRunner.inc
@@ -0,0 +1,35 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2016-2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+#pragma once
+
+namespace frc {
+
+/**
+ * Creates a new vision runner. It will take images from the {@code
+ * videoSource}, send them to the {@code pipeline}, and call the {@code
+ * listener} when the pipeline has finished to alert user code when it is safe
+ * to access the pipeline's outputs.
+ *
+ * @param videoSource The video source to use to supply images for the pipeline
+ * @param pipeline The vision pipeline to run
+ * @param listener A function to call after the pipeline has finished running
+ */
+template <typename T>
+VisionRunner<T>::VisionRunner(cs::VideoSource videoSource, T* pipeline,
+ std::function<void(T&)> listener)
+ : VisionRunnerBase(videoSource),
+ m_pipeline(pipeline),
+ m_listener(listener) {}
+
+template <typename T>
+void VisionRunner<T>::DoProcess(cv::Mat& image) {
+ m_pipeline->Process(image);
+ m_listener(*m_pipeline);
+}
+
+} // namespace frc
diff --git a/cameraserver/src/test/native/cpp/main.cpp b/cameraserver/src/test/native/cpp/main.cpp
new file mode 100644
index 0000000..f07ede3
--- /dev/null
+++ b/cameraserver/src/test/native/cpp/main.cpp
@@ -0,0 +1,8 @@
+/*----------------------------------------------------------------------------*/
+/* Copyright (c) 2018 FIRST. All Rights Reserved. */
+/* Open Source Software - may be modified and shared by FRC teams. The code */
+/* must be accompanied by the FIRST BSD license file in the root directory of */
+/* the project. */
+/*----------------------------------------------------------------------------*/
+
+int main() { return 0; }