Removed old frc971 domain. This should now be our current, reviewed code.
git-svn-id: https://robotics.mvla.net/svn/frc971/2013/trunk/src@4181 f308d9b7-e957-4cde-b6ac-9a88185e7312
diff --git a/971CV/src/org/frc971/AccepterThread.java b/971CV/src/org/frc971/AccepterThread.java
deleted file mode 100644
index 1cd71e8..0000000
--- a/971CV/src/org/frc971/AccepterThread.java
+++ /dev/null
@@ -1,154 +0,0 @@
-/**
- *
- */
-package org.frc971;
-
-/**
- * @author daniel
- * Accepts clients for data server
- */
-
-import java.io.IOException;
-
-import java.nio.ByteBuffer;
-import java.nio.channels.ServerSocketChannel;
-import java.nio.channels.SocketChannel;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.logging.Logger;
-
-public class AccepterThread extends Thread {
-
- private final static Logger LOG = Logger.getLogger(Logger.GLOBAL_LOGGER_NAME);
-
- private ServerSocketChannel sock;
-
- private List<SocketChannel> connected = new ArrayList<SocketChannel>();
-
- /* Holds overflow data when socket's send buffer gets full, so that
- * thread can continue running.
- */
- private Map<SocketChannel, ByteBuffer> toSend;
- /* Keeps track of how many times a write operation on a socket
- * has failed because it's buffer was full.
- */
- private Map<SocketChannel, Integer> failedAttempts; //doesn't like primitive types
-
- /** Helper function to completely erase a peer from
- * all three lists and maps that might contain it.
- */
- private void erasePeer(SocketChannel peer) {
- connected.remove(peer);
- toSend.remove(peer);
- failedAttempts.remove(peer);
- }
-
- /** Constructor
- *
- * @param sock is the ServerSocketChannel that you want to monitor
- */
- public AccepterThread(ServerSocketChannel sock) {
- super("Accepter Thread");
- setPriority(3); //lowish priority so Image Processor overrides it
- this.sock = sock;
- start();
- }
-
- /** Runs in separate thread. Continually accepts new connections. */
- public void run() {
- SocketChannel clientSock;
- while (true) {
- try {
- clientSock = sock.accept();
- //our writes must not block
- clientSock.configureBlocking(false);
- connected.add(clientSock);
- }
- catch (IOException e) {
- LOG.warning("Cannot serve image processing results to client:" + e.getMessage());
- Messages.warning("Cannot serve image processing results to client:" + e.getMessage());
- }
- }
- }
-
- /** Sends a message to all currently connected clients.
- *
- * @param message is the message that you want to send.
- */
- public void sendtoAll(ByteBuffer message) {
- /* Copy our connected list, so we don't have
- * to hold our lock forever if the writes block.
- */
- List<SocketChannel> connectedTemp = new ArrayList<SocketChannel>();
- for (SocketChannel channel : connected) {
- connectedTemp.add(channel);
- }
-
- int result;
- for (SocketChannel conn : connectedTemp) {
- try {
-
- /** If this socket has data from the
- * last send operation still waiting to be
- * sent, send this instead of our original
- * message. Since we generally want only
- * current data, our original message will
- * not be missed. However, it is imperative
- * that we finish our pending transmission,
- * because an incomplete transmission could
- * leave a client thread somewhere blocking
- * indefinitely.
- */
- if (toSend.containsKey(conn)) {
- message = toSend.get(conn);
- }
-
- result = conn.write(message);
-
- /*if our send buffer is full, store our message away
- * so we can try again later without halting the thread.
- */
- if (message.remaining() > 0) {
- toSend.put(conn, message);
- //check and update our count of failed send attempts
- if (failedAttempts.containsKey(conn)) {
- int failures = failedAttempts.get(conn);
- ++failures;
- if (failures >= 100) {
- //Socket has become dysfunctional
- LOG.info("Write would have blocked 100 times. Assuming peer disconect.");
- erasePeer(conn);
- }
- failedAttempts.put(conn, failures);
- }
- else {
- failedAttempts.put(conn, 1);
- }
- }
-
- if (result == -1) {
- //The write failed. This is probably because the client disconnected.
- LOG.info("Write returned -1. Client has probably disconnected.");
- erasePeer(conn);
- }
- }
- catch (IOException e) {
- //The write failed. This is probably because the client disconnected.
- LOG.info("Write threw IOException. Client has probably disconnected.");
- erasePeer(conn);
- }
- }
- }
-
- /** Overloaded sendtoAll method for byte arrays. */
- public void sendtoAll(byte[] message) {
- sendtoAll(ByteBuffer.wrap(message));
- }
-
- /** Overloaded sendtoAll method for Strings. */
- public void sendtoAll(String message) {
- sendtoAll(message.getBytes());
- }
-}
diff --git a/971CV/src/org/frc971/DebugCanvas.java b/971CV/src/org/frc971/DebugCanvas.java
deleted file mode 100644
index 484620c..0000000
--- a/971CV/src/org/frc971/DebugCanvas.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package org.frc971;
-
-import com.googlecode.javacv.CanvasFrame;
-import com.googlecode.javacv.cpp.opencv_core.IplImage;
-
-public class DebugCanvas {
- public boolean show;
- private CanvasFrame canvasFrame;
- private String name;
-
- public DebugCanvas(String name) {
- this.name = name;
- }
-
- public void showImage(IplImage image) {
- if (show) {
- if (canvasFrame == null) {
- canvasFrame = new CanvasFrame(name);
- }
- canvasFrame.setName(name);
- canvasFrame.showImage(image.getBufferedImage());
- } else {
- if (canvasFrame != null) {
- canvasFrame.dispose();
- canvasFrame = null;
- }
- }
- }
-}
diff --git a/971CV/src/org/frc971/DebugServerRun.java b/971CV/src/org/frc971/DebugServerRun.java
deleted file mode 100644
index cccf241..0000000
--- a/971CV/src/org/frc971/DebugServerRun.java
+++ /dev/null
@@ -1,145 +0,0 @@
-package org.frc971;
-
-import java.io.BufferedInputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-
-import java.net.InetSocketAddress;
-import java.nio.ByteBuffer;
-import java.nio.channels.ServerSocketChannel;
-import java.nio.channels.SocketChannel;
-
-import java.util.Arrays;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import com.googlecode.javacv.OpenCVFrameGrabber;
-import com.googlecode.javacv.cpp.opencv_core.*;
-import static com.googlecode.javacv.cpp.opencv_highgui.*;
-
-public class DebugServerRun {
-
- private final static Logger LOG = Logger.getLogger(Logger.GLOBAL_LOGGER_NAME);
-
- final OpenCVFrameGrabber grabber = new OpenCVFrameGrabber(-1);
-
- private ServerSocketChannel sock;
- private SocketChannel client;
-
- /** Constructs a formatted boundary header from a timestamp and content length. */
- private ByteBuffer CreateTransmission(long content_length, double timestamp) {
- StringBuilder ret = new StringBuilder();
- ret.append("\r\n--boundarydonotcross\r\n");
- ret.append("Content-Type: image/jpeg\r\n");
- ret.append("Content-Length: ");
- ret.append(content_length);
- ret.append("\r\n");
- ret.append("X-Timestamp: ");
- ret.append(timestamp);
- ret.append("\r\n\r\n");
- return ByteBuffer.wrap(ret.toString().getBytes());
- }
-
- /** Loop that pushes a data stream to the client. */
- private void push() {
- try {
- grabber.start();
- }
- catch (Exception e) {
- LOG.severe("Could not start frame grabber.");
- return;
- }
- IplImage img;
- long content_size;
- File buff_file;
- InputStream input;
- double timestamp;
- while (true) {
- //get some image data
- try {
- img = grabber.grab();
- timestamp = System.currentTimeMillis();
- /*We buffer through /dev/shm, just to make the conversion process easier.
- * I know this is really ugly, but it works a lot better than what
- * I was doing before, which segfaulted.
- */
- cvSaveImage("/dev/shm/DebugServerBuffer.jpg", img);
- buff_file = new File("/dev/shm/DebugServerBuffer.jpg");
- content_size = buff_file.length();
- int totalBytesRead = 0;
- input = new BufferedInputStream(new FileInputStream(buff_file));
- byte[] result = new byte[(int)content_size];
- while(totalBytesRead < result.length){
- int bytesRemaining = result.length - totalBytesRead;
- //input.read() returns -1, 0, or more :
- int bytesRead = input.read(result, totalBytesRead, bytesRemaining);
- if (bytesRead > 0){
- totalBytesRead = totalBytesRead + bytesRead;
- }
- }
- ByteBuffer header = CreateTransmission(content_size, timestamp);
- ByteBuffer bbuf = ByteBuffer.wrap(result);
- ByteBuffer to_send = ByteBuffer.allocate(header.capacity() + bbuf.capacity());
- to_send.put(header);
- to_send.put(bbuf);
- to_send.rewind();
- SocketCommon.sendAll(client, to_send);
- }
- catch (Exception e) {
- LOG.warning("Could not grab frame.");
- continue;
- }
- }
- }
-
- /** Constructor to start the server and bind it to a port. */
- public DebugServerRun(final int port) throws IOException {
- sock = ServerSocketChannel.open();
- sock.socket().bind(new InetSocketAddress(9714));
- client = sock.accept();
- client.configureBlocking(false);
- //we are now connected to our client. Wait for them to send us a header.
- LOG.info("Reading headers...");
- SocketCommon.readtoBoundary(client, "\r\n\r\n");
- //send one back
- LOG.info("Writing headers...");
- SocketCommon.sendAll(client, "donotcross\r\n");
- }
-
- /** Runs the server, and concurrently starts the vision processor with -vision flag. */
- public static void main(final String args[]) throws IOException {
- //main function for server
-
- String atomIP = null;
- try {
- atomIP = args[0];
- }
- catch (ArrayIndexOutOfBoundsException e) {
- System.out.println("Usage: VisionTuner [atom ip]");
- System.exit(0);
- }
-
- //set logger to log everything
- LOG.setLevel(Level.ALL);
- try {
- LogHandler handler = new LogHandler("ds_vision.log");
- TimeFormatter formatter = new TimeFormatter();
- handler.setFormatter(formatter);
- LOG.addHandler(handler);
- }
- catch (FileNotFoundException e) {
- System.err.println("Warning: Logging initialization failed.");
- }
-
- if (Arrays.asList(args).contains("-vision")) {
- LOG.info("Starting vision processor.");
- new TestClient(atomIP);
- }
-
- DebugServerRun server = new DebugServerRun(9714);
- server.push();
- }
-}
diff --git a/971CV/src/org/frc971/HTTPClient.java b/971CV/src/org/frc971/HTTPClient.java
deleted file mode 100644
index 96308a1..0000000
--- a/971CV/src/org/frc971/HTTPClient.java
+++ /dev/null
@@ -1,93 +0,0 @@
-package org.frc971;
-
-//@author: daniel
-
-import java.io.*;
-import java.net.*;
-
-import java.awt.image.BufferedImage;
-
-import java.nio.channels.SocketChannel;
-import java.nio.ByteBuffer;
-
-import java.util.logging.Logger;
-
-import javax.imageio.ImageIO;
-
-import aos.ChannelImageGetter;
-
-import edu.wpi.first.wpijavacv.WPIColorImage;
-
-public class HTTPClient {
- //Connects to HTTP Server on robot and receives images
-
- /** whether or not to print debug messages to stdout. */
- private final static boolean LOCAL_DEBUG = false;
-
- private String atomIP;
-
- private SocketChannel sock;
-
- private ChannelImageGetter cgetter;
-
- private final static Logger LOG = Logger.getLogger(Logger.GLOBAL_LOGGER_NAME);
-
- /** Small helper method for printing debug messages to stdout. */
- private void WriteDebug(String message) {
- //small helper function to write debug messages
- if (LOCAL_DEBUG)
- LOG.info("LOCAL_DEBUG: " + message);
- }
-
- /** the constructor, initializes connection, and sets up aos getter.
- * @throws IOException */
- public HTTPClient(String atomIP) throws IOException {
- //Initialize socket connection to robot
- this.atomIP = atomIP;
- sock = SocketChannel.open();
- WriteDebug("Connecting to server at " + atomIP);
- sock.connect(new InetSocketAddress(atomIP, 9714));
- sock.configureBlocking(false);
- //Write headers
- //HTTPStreamer does not actually use the headers, so we can just write terminating chars.
- WriteDebug("Writing headers...");
- SocketCommon.sendAll(sock, "\r\n\r\n");
- //Receive headers
- WriteDebug("Reading headers...");
- SocketCommon.readtoBoundary(sock, "donotcross\r\n");
- WriteDebug("Now receiving data.");
- cgetter = new ChannelImageGetter(sock);
- }
-
- /** Grabs the most current frame from the HTTPStreamer stream.
- * Returns a class instance with image and timestamp attributes. */
- public ImageWithTimestamp GetFrame() {
- ImageWithTimestamp final_image = new ImageWithTimestamp();
- //Use Brian's code to extract an image and timestamp from raw server data.
- ByteBuffer binaryImage = cgetter.getJPEG();
- if (binaryImage == null) {
- Messages.severe("Could not parse data from robot. See the log for details.");
- return null;
- }
- //Decode ByteBuffer into an IplImage
- byte[] b = new byte[binaryImage.remaining()];
- binaryImage.get(b);
- try {
- InputStream iis = new ByteArrayInputStream(b);
- BufferedImage bImageFromConvert = ImageIO.read(iis);
- final_image.image = new WPIColorImage(bImageFromConvert);
- final_image.timestamp = cgetter.getTimestamp();
- WriteDebug("Image processing successful.");
- return final_image;
- }
- catch (IOException e) {
- LOG.warning("Image processing failed: " + e.getMessage());
- return null;
- }
- }
-
- /** Gets the name to display at the top of the image window. */
- public String GetName() {
- return atomIP;
- }
-}
diff --git a/971CV/src/org/frc971/ImageWithTimestamp.java b/971CV/src/org/frc971/ImageWithTimestamp.java
deleted file mode 100644
index 51b156f..0000000
--- a/971CV/src/org/frc971/ImageWithTimestamp.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package org.frc971;
-
-import edu.wpi.first.wpijavacv.WPIColorImage;
-
-/** Small helper class for associating images and timestamps. */
-public class ImageWithTimestamp {
- WPIColorImage image = null;
- double timestamp;
-}
diff --git a/971CV/src/org/frc971/LogHandler.java b/971CV/src/org/frc971/LogHandler.java
deleted file mode 100644
index fd486fb..0000000
--- a/971CV/src/org/frc971/LogHandler.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- *
- */
-package org.frc971;
-
-import java.io.FileOutputStream;
-import java.io.FileNotFoundException;
-import java.io.PrintWriter;
-
-import java.util.logging.Handler;
-import java.util.logging.LogRecord;
-
-/**
- * @author daniel
- *
- */
-
-/** Logs data to custom files, using specific formatting. */
-public class LogHandler extends Handler {
-
- private FileOutputStream ofstream;
- PrintWriter writer;
-
- /** Constructor for log handler.
- *
- * @param filename is the name of the file you want to log to.
- * @throws FileNotFoundException if file cannot be opened or created.
- */
- public LogHandler (String filename) throws FileNotFoundException {
- super();
-
- if (filename == null || filename == "") {
- filename = "logfile.log";
- }
-
- //check if file exists, and if not, create it
- ofstream = new FileOutputStream(filename);
- writer = new PrintWriter(ofstream);
- setFormatter(new TimeFormatter());
- }
-
- /*Required methods*/
-
- /** Is required by API. Writes a new message to the log.
- * @param message is the message you want to log.
- */
- public void publish(LogRecord message) {
- //record a message
- if (!isLoggable(message)) {
- //ensure that this message should be logged by this handler
- return;
- }
- writer.print(getFormatter().format(message)); //Formatter adds trailing \n
- }
-
- /** Is required by API. Flushes the writer. */
- public void flush() {
- writer.flush();
- }
-
- /** Is required by API. Closes logfile. */
- public void close() throws SecurityException {
- writer.close();
- }
-}
diff --git a/971CV/src/org/frc971/Recognizer.java b/971CV/src/org/frc971/Recognizer.java
deleted file mode 100644
index 9292357..0000000
--- a/971CV/src/org/frc971/Recognizer.java
+++ /dev/null
@@ -1,33 +0,0 @@
-package org.frc971;
-
-import edu.wpi.first.wpijavacv.WPIColorImage;
-
-/**
- * Vision target recognizer.
- *
- * @author jerry
- */
-public interface Recognizer {
-
- /**
- * Sets the HSV filter to allow H in [minHue .. maxHue], S >= minSat,
- * V >= minVal.
- */
- void setHSVRange(int minHue, int maxHue, int minSat, int minVal);
-
- int getHueMin();
- int getHueMax();
- int getSatMin();
- int getValMin();
-
- /** Enables/disables windows to view intermediate stages, for tuning. */
- void showIntermediateStages(boolean enable);
-
- /**
- * Processes a camera image, returning an image to display for targeting
- * and debugging, e.g. with cross-hairs and marked targets.
- *<p>
- * SIDE EFFECTS: May modify cameraImage.
- */
- Target processImage(WPIColorImage cameraImage);
-}
diff --git a/971CV/src/org/frc971/Recognizer2013.java b/971CV/src/org/frc971/Recognizer2013.java
deleted file mode 100644
index 812c78b..0000000
--- a/971CV/src/org/frc971/Recognizer2013.java
+++ /dev/null
@@ -1,296 +0,0 @@
-package org.frc971;
-
-import java.util.ArrayList;
-import java.util.logging.Logger;
-
-import com.googlecode.javacv.cpp.opencv_core;
-import com.googlecode.javacv.cpp.opencv_core.CvSize;
-import com.googlecode.javacv.cpp.opencv_core.IplImage;
-import com.googlecode.javacv.cpp.opencv_imgproc;
-import com.googlecode.javacv.cpp.opencv_imgproc.IplConvKernel;
-
-import edu.wpi.first.wpijavacv.DaisyExtensions;
-import edu.wpi.first.wpijavacv.WPIBinaryImage;
-import edu.wpi.first.wpijavacv.WPIColor;
-import edu.wpi.first.wpijavacv.WPIColorImage;
-import edu.wpi.first.wpijavacv.WPIContour;
-import edu.wpi.first.wpijavacv.WPIPoint;
-import edu.wpi.first.wpijavacv.WPIPolygon;
-
-/**
- * Vision target recognizer for FRC 2013.
- *
- * @author jrussell
- * @author jerry
- */
-public class Recognizer2013 implements Recognizer {
-
- private final static Logger LOG = Logger.getLogger(Logger.GLOBAL_LOGGER_NAME);
-
- // --- Tunable recognizer constants.
- static final double kRoughlyHorizontalSlope = Math.tan(Math.toRadians(30));
- static final double kRoughlyVerticalSlope = Math.tan(Math.toRadians(90 - 30));
- static final int kHoleClosingIterations = 2;
- static final double kPolygonPercentFit = 12;
- static final int kMinWidthAt320 = 35; // for high goal and middle goals
-
- // --- Field dimensions.
- // The target aspect ratios are for the midlines of the vision target tape.
- static final double kGoalWidthIn = 54; // of the high and middle targets
- static final double kTargetWidthIn = kGoalWidthIn + 4;
- static final double kHighGoalAspect = (21 + 4) / kTargetWidthIn;
- static final double kMiddleGoalAspect = (24 + 4) / kTargetWidthIn;
- static final double kMinAspect = kHighGoalAspect * 0.6;
- static final double kMaxAspect = kMiddleGoalAspect * 1.4;
- static final double kTopTargetHeightIn = 104.125 + 21.0/2; // center of target
-
- // --- Robot and camera dimensions.
- static final double kShooterOffsetDeg = 0; // azimuth offset from camera to shooter
- static final double kHorizontalFOVDeg = 44.0; // Logitech C210 camera
- static final double kVerticalFOVDeg = 480.0 / 640.0 * kHorizontalFOVDeg;
- static final double kCameraHeightIn = 24.0; // TODO
- static final double kCameraPitchDeg = 21.0; // TODO
- static final double kTanHFOV2 = Math.tan(Math.toRadians(kHorizontalFOVDeg / 2));
- static final double kTanVFOV2 = Math.tan(Math.toRadians(kVerticalFOVDeg / 2));
-
- // --- Colors for drawing indicators on the image.
- private static final WPIColor reject1Color = WPIColor.GRAY;
- private static final WPIColor reject2Color = WPIColor.YELLOW;
- private static final WPIColor candidateColor = WPIColor.BLUE;
- private static final WPIColor targetColor = WPIColor.RED;
-
- // --- Color thresholds, initialized in the constructor.
- private int min1Hue, max1Hue, min1Sat, min1Val;
-
- // Show intermediate images for parameter tuning.
- private final DebugCanvas thresholdedCanvas = new DebugCanvas("thresholded");
- private final DebugCanvas morphedCanvas = new DebugCanvas("morphed");
-
- // Data to reuse for each frame.
- private final DaisyExtensions daisyExtensions = new DaisyExtensions();
- private final IplConvKernel morphKernel = IplConvKernel.create(3, 3, 1, 1,
- opencv_imgproc.CV_SHAPE_RECT, null);
- private final ArrayList<WPIPolygon> polygons = new ArrayList<WPIPolygon>();
-
- // Frame-size-dependent data to reuse for each frame.
- private CvSize size = null;
- private WPIColorImage rawImage;
- private IplImage bin;
- private IplImage hsv;
- private IplImage hue;
- private IplImage sat;
- private IplImage val;
- private int minWidth;
- private WPIPoint linePt1, linePt2; // crosshair endpoints
-
- public Recognizer2013() {
- setHSVRange(70, 106, 137, 27);
- }
-
- @Override
- public void setHSVRange(int minHue, int maxHue, int minSat, int minVal) {
- min1Hue = minHue - 1; // - 1 because cvThreshold() does > instead of >=
- max1Hue = maxHue + 1;
- min1Sat = minSat - 1;
- min1Val = minVal - 1;
- }
- @Override
- public int getHueMin() { return min1Hue + 1; }
- @Override
- public int getHueMax() { return max1Hue - 1; }
- @Override
- public int getSatMin() { return min1Sat + 1; }
- @Override
- public int getValMin() { return min1Val + 1; }
-
- @Override
- public void showIntermediateStages(boolean enable) {
- thresholdedCanvas.show = enable;
- morphedCanvas.show = enable;
- }
-
- @Override
- public Target processImage(WPIColorImage cameraImage) {
- // (Re)allocate the intermediate images if the input is a different
- // size than the previous image.
- if (size == null || size.width() != cameraImage.getWidth()
- || size.height() != cameraImage.getHeight()) {
- size = opencv_core.cvSize(cameraImage.getWidth(),
- cameraImage.getHeight());
- rawImage = DaisyExtensions.makeWPIColorImage(
- DaisyExtensions.getIplImage(cameraImage));
- bin = IplImage.create(size, 8, 1);
- hsv = IplImage.create(size, 8, 3);
- hue = IplImage.create(size, 8, 1);
- sat = IplImage.create(size, 8, 1);
- val = IplImage.create(size, 8, 1);
- minWidth = (kMinWidthAt320 * cameraImage.getWidth() + 319) / 320;
-
- int horizontalOffsetPixels = (int)Math.round(
- kShooterOffsetDeg * size.width() / kHorizontalFOVDeg);
- int x = size.width() / 2 + horizontalOffsetPixels;
- linePt1 = new WPIPoint(x, size.height() - 1);
- linePt2 = new WPIPoint(x, 0);
- } else {
- // Copy the camera image so it's safe to draw on.
- opencv_core.cvCopy(DaisyExtensions.getIplImage(cameraImage),
- DaisyExtensions.getIplImage(rawImage));
- }
-
- IplImage input = DaisyExtensions.getIplImage(rawImage);
-
- // Threshold the pixels in HSV color space.
- // TODO(jerry): Do this in one pass of a pixel-processing loop.
- opencv_imgproc.cvCvtColor(input, hsv, opencv_imgproc.CV_BGR2HSV_FULL);
- opencv_core.cvSplit(hsv, hue, sat, val, null);
-
- // NOTE: Since red is at the end of the cyclic color space, you can OR
- // a threshold and an inverted threshold to match red pixels.
- opencv_imgproc.cvThreshold(hue, bin, min1Hue, 255, opencv_imgproc.CV_THRESH_BINARY);
- opencv_imgproc.cvThreshold(hue, hue, max1Hue, 255, opencv_imgproc.CV_THRESH_BINARY_INV);
- opencv_imgproc.cvThreshold(sat, sat, min1Sat, 255, opencv_imgproc.CV_THRESH_BINARY);
- opencv_imgproc.cvThreshold(val, val, min1Val, 255, opencv_imgproc.CV_THRESH_BINARY);
-
- // Combine the results to obtain a binary image which is mostly the
- // interesting pixels.
- opencv_core.cvAnd(hue, bin, bin, null);
- opencv_core.cvAnd(bin, sat, bin, null);
- opencv_core.cvAnd(bin, val, bin, null);
-
- thresholdedCanvas.showImage(bin);
-
- // Fill in gaps using binary morphology.
- opencv_imgproc.cvMorphologyEx(bin, bin, null, morphKernel,
- opencv_imgproc.CV_MOP_CLOSE, kHoleClosingIterations);
-
- morphedCanvas.showImage(bin);
-
- // Find contours.
- //
- // NOTE: If we distinguished between the inner and outer boundaries of
- // the vision target rectangles, we could apply a more accurate width
- // filter and more accurately compute the target range.
- WPIBinaryImage binWpi = DaisyExtensions.makeWPIBinaryImage(bin);
- WPIContour[] contours = daisyExtensions.findConvexContours(binWpi);
-
- // Simplify the contours to polygons and filter by size and aspect ratio.
- //
- // TODO(jerry): Also look for the two vertical stripe vision targets.
- // They'll greatly increase the precision of measuring the distance. If
- // both stripes are visible, they'll increase the accuracy for
- // identifying the high goal.
- polygons.clear();
- for (WPIContour c : contours) {
- if (c.getWidth() >= minWidth) {
- double ratio = ((double) c.getHeight()) / c.getWidth();
- if (ratio >= kMinAspect && ratio <= kMaxAspect) {
- polygons.add(c.approxPolygon(kPolygonPercentFit));
- // System.out.println(" Accepted aspect ratio " + ratio);
- } else {
- // System.out.println(" Rejected aspect ratio " + ratio);
- }
- }
- }
-
- // Pick the target with the highest center-point that matches yet more
- // filter criteria.
- WPIPolygon bestTarget = null;
- int highestY = Integer.MAX_VALUE;
-
- for (WPIPolygon p : polygons) {
- // TODO(jerry): Replace boolean filters with a scoring function?
- if (p.isConvex() && p.getNumVertices() == 4) { // quadrilateral
- WPIPoint[] points = p.getPoints();
- // Filter for polygons with 2 ~horizontal and 2 ~vertical sides.
- int numRoughlyHorizontal = 0;
- int numRoughlyVertical = 0;
- for (int i = 0; i < 4; ++i) {
- double dy = points[i].getY() - points[(i + 1) % 4].getY();
- double dx = points[i].getX() - points[(i + 1) % 4].getX();
- double slope = Double.MAX_VALUE;
- if (dx != 0) {
- slope = Math.abs(dy / dx);
- }
-
- if (slope < kRoughlyHorizontalSlope) {
- ++numRoughlyHorizontal;
- } else if (slope > kRoughlyVerticalSlope) {
- ++numRoughlyVertical;
- }
- }
-
- if (numRoughlyHorizontal >= 2 && numRoughlyVertical == 2) {
- int pCenterX = p.getX() + p.getWidth() / 2;
- int pCenterY = p.getY() + p.getHeight() / 2;
-
- rawImage.drawPolygon(p, candidateColor, 2);
- rawImage.drawPoint(new WPIPoint(pCenterX, pCenterY),
- targetColor, 2);
- if (pCenterY < highestY) {
- bestTarget = p;
- highestY = pCenterY;
- }
- } else {
- rawImage.drawPolygon(p, reject2Color, 1);
- }
- } else {
- rawImage.drawPolygon(p, reject1Color, 1);
- }
- }
-
- Target found = null;
- if (bestTarget != null) {
- rawImage.drawPolygon(bestTarget, targetColor, 2);
- found = measureTarget(bestTarget);
- } else {
- LOG.fine("No target found");
- }
-
- // Draw a crosshair
- rawImage.drawLine(linePt1, linePt2, targetColor, 1);
-
- if (found == null) {
- found = new Target();
- }
- found.editedPicture = rawImage;
-
- daisyExtensions.releaseMemory();
- //System.gc();
-
- return found;
- }
-
- /**
- * Uses the camera, field, and robot dimensions to compute targeting info.
- */
- private Target measureTarget(WPIPolygon target) {
- double w = target.getWidth();
- double h = target.getHeight();
- double x = target.getX() + w / 2; // target center in view coords
- double y = target.getY() + h / 2;
-
- double vw = size.width();
- double vh = size.height();
- double xc = x - vw / 2; // target center pos'n ±from view center
- double yc = vh / 2 - y; // ... in world coords on the viewing plane
-
- // Target angles relative to the camera.
- double azimuthCam = Math.atan2(xc * 2 * kTanHFOV2, vw);
- double elevationCam = Math.atan2(yc * 2 * kTanVFOV2, vh);
- double rangeIn = kTargetWidthIn * vw / (w * 2 * kTanHFOV2);
-
- //Put results in target
- Target data = new Target();
- data.azimuth = (Math.toDegrees(azimuthCam) - kShooterOffsetDeg);
- data.elevation = (Math.toDegrees(elevationCam));
- data.range = (rangeIn / 12);
-
- LOG.fine("Best target at (" + x + ", " + y + ") " + w +" x " + h
- + ", shot azimuth=" + (Math.toDegrees(azimuthCam) - kShooterOffsetDeg) +
- " elevation=" + (Math.toDegrees(elevationCam) + kCameraPitchDeg) +
- " range=" + (rangeIn / 12));
-
- return data;
- }
-
-}
diff --git a/971CV/src/org/frc971/ResultSender.java b/971CV/src/org/frc971/ResultSender.java
deleted file mode 100644
index 441c105..0000000
--- a/971CV/src/org/frc971/ResultSender.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- *
- */
-package org.frc971;
-
-import java.io.IOException;
-
-import java.net.InetSocketAddress;
-
-import java.nio.channels.ServerSocketChannel;
-
-/**
- * @author daniel
- *
- */
-
-/** Serves processing results back to the atom. */
-public class ResultSender {
- private static final int PORT = 9716;
-
- private ServerSocketChannel sock;
-
- AccepterThread acceptor;
-
- /** Constructor. Connects to a socket and starts the accepter thread. */
- public ResultSender() throws IOException {
- sock = ServerSocketChannel.open();
- sock.socket().bind(new InetSocketAddress(PORT));
-
- //start accepter thread
- acceptor = new AccepterThread(sock);
- }
-
- /** Sends a new message of calculated attributes to the clients.
- *
- * @param azimuth is the calculated optimum azimuth for the shot.
- * @param elevation is the calculated optimum elevation for the shot.
- * @param range is the calculated optimum range for the shot.
- */
- public void send(double azimuth, double elevation, double range) {
- //Formulate a message as a String similar to an HTTP header.
- if (azimuth != -1 && elevation != -1 && range != -1) {
- StringBuilder message = new StringBuilder();
- message.append("\r\n--boundarydonotcross\r\n");
- message.append("Azimuth: ");
- message.append(azimuth);
- message.append("\r\nElevation: ");
- message.append(elevation);
- message.append("\r\nRange: ");
- message.append(range);
-
- acceptor.sendtoAll(message.toString());
- }
- }
-}
diff --git a/971CV/src/org/frc971/SocketCommon.java b/971CV/src/org/frc971/SocketCommon.java
deleted file mode 100644
index daf3a6c..0000000
--- a/971CV/src/org/frc971/SocketCommon.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/**
- *
- */
-package org.frc971;
-
-import java.io.IOException;
-
-import java.nio.ByteBuffer;
-import java.nio.channels.SocketChannel;
-import java.util.logging.Logger;
-
-/**
- * @author daniel
- * Socket operations used by other classes
- */
-public class SocketCommon {
-
- private final static Logger LOG = Logger.getLogger(Logger.GLOBAL_LOGGER_NAME);
-
- /** Reads on a SocketStream until it finds a given character sequence. */
- public static String readtoBoundary(SocketChannel sock, String boundary) {
- //reads from socket until it encounters a specific character combination
- //if boundary is null, it reads until it runs out of data
- ByteBuffer recvd = ByteBuffer.allocate(1024);
- StringBuilder sb = new StringBuilder();
- String message = "";
- try {
- int ret = 0;
- while (ret != -1) {
- ret = sock.read(recvd);
- //System.out.println(ret);
- if (ret == 0) {
- //finished receiving
- message = sb.toString();
- if (boundary == null)
- break;
- }
- else {
- for (int i = 0; i < recvd.capacity() - recvd.remaining(); i++) {
- sb.append((char)recvd.get(i));
- }
- recvd.clear();
- if (boundary != null) {
- if (sb.toString().contains(boundary)) {
- message = sb.toString();
- break;
- }
- else {
- continue;
- }
- }
- }
- }
- }
- catch (IOException e) {
- LOG.severe("Socket read failed. Check your network configuration.");
- Messages.severe("Socket read failed. Check your network configuration.");
- return null;
- }
- return message;
- }
-
- /** Guarantees that large messages will be completely sent through a socket.
- * @return Returns 0 for success, -1 for failure.
- */
- public static int sendAll(SocketChannel sock, ByteBuffer message) {
- message.rewind();
- while (message.remaining() > 0) {
- try {
- sock.write(message);
- }
- catch (IOException e) {
- LOG.warning("Socket write failed. Check your network configuration.");
- Messages.severe("Socket write failed. Check your network configuration.");
- return -1;
- }
- }
- return 0;
- }
-
- /** Overloaded method for sending a byte array. */
- public static void sendAll(SocketChannel sock, byte[] message) {
- ByteBuffer buff = ByteBuffer.wrap(message);
- sendAll(sock, buff);
- }
-
- /** Overloaded method for sending a String. */
- public static void sendAll(SocketChannel sock, String message) {
- sendAll(sock, message.getBytes());
- }
-}
diff --git a/971CV/src/org/frc971/TestClient.java b/971CV/src/org/frc971/TestClient.java
deleted file mode 100644
index 227c929..0000000
--- a/971CV/src/org/frc971/TestClient.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- *
- */
-package org.frc971;
-
-/**
- * @author daniel
- *
- */
-
-/** Small thread for running vision code concurrently with debug server. */
-public class TestClient extends Thread {
-
- private String atomIP;
-
- /** Constructor to set up new thread. */
- public TestClient(String atomIP) {
- super("Test Client");
- this.atomIP = atomIP;
- start();
- }
-
- /** Simple thread, runs the vision code. */
- public void run() {
- String[] args = {atomIP};
- VisionTuner.main(args);
- }
-}
diff --git a/971CV/src/org/frc971/TestImageGetter.java b/971CV/src/org/frc971/TestImageGetter.java
deleted file mode 100644
index 2577fa2..0000000
--- a/971CV/src/org/frc971/TestImageGetter.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/**
- *
- */
-package org.frc971;
-
-/**
- * @author daniel
- *
- */
-
-import java.io.File;
-import java.io.IOException;
-
-import java.util.logging.Logger;
-
-import javax.imageio.ImageIO;
-
-import edu.wpi.first.wpijavacv.WPIColorImage;
-
-/** Get debug images for Java camera processor. */
-public class TestImageGetter {
-
- private String path_to_images;
-
- private final static Logger LOG = Logger.getLogger(Logger.GLOBAL_LOGGER_NAME);
-
- /** The names of our debugging images, without paths.
- * The GetNext method should be used to get the first
- * image, and not GetCurrent. */
- final static String[] images = {"45in_DoubleGreen.jpg",
- "57inLargeTarget_DoubleGreenBK.jpg",
- "FullField_DoubleGreenBK3.jpg",
- "FullField_SmallGreen.jpg",
- "HybridLine_DoubleGreenBK2.jpg",
- "HybridLine_DoubleGreenBK3.jpg",
- "HybridLine_DoubleGreenBK4.jpg",
- "HybridLine_SmallGreen2.jpg",
- "HybridLine_SmallGreen3.jpg",
- "HybridLine_SmallGreen4.jpg",
- "Midfield_DoubleGreenBK2.jpg",
- "Midfield_SmallGreen2.jpg",
- "Midfield_SmallGreen3.jpg",
- "Midfield_SmallGreen4.jpg",
- "OppLine_DoubleGreenBK2.jpg",
- "OppLine_SmallGreen2.jpg",
- "PyramidRight_DoubleGreenBK2.jpg",
- "PyramidRight_SmallGreen2.jpg"
- };
-
- private int image_index = -1;
-
- private WPIColorImage current_image = null;
-
- /** Helper method to concatenate paths, similar to Python's os.path.join(). */
- private String cocatenate_paths(String path1, String path2) {
- if (path1.charAt(path1.length() - 1) == '/')
- return path1 + path2;
- else
- return path1 + "/" + path2;
- }
-
- /** Gets the name to display at the top of the image window. */
- public String GetName() {
- return images[image_index];
- }
-
- /** Constructor
- *
- * @param path_to_images is the path to the directory where our images are.
- */
- public TestImageGetter(String path_to_images) {
- this.path_to_images = path_to_images;
- }
-
- /** Gets the next debugging image.
- *
- * @return Returns a WPIColorImage.
- */
- public WPIColorImage GetNext() {
- image_index++;
- if (image_index < images.length) {
- String image_to_get = images[image_index];
- try {
- current_image = new WPIColorImage(ImageIO.read(new File(cocatenate_paths(path_to_images, image_to_get))));
- return current_image;
- }
- catch (IOException e) {
- LOG.warning("Could not open file.");
- return null;
- }
- }
- else
- image_index--;
- return null;
- }
-
- /** Gets the previous debugging image.
- *
- * @return Returns a WPIColorImage.
- */
- public WPIColorImage GetPrev() {
- image_index--;
- if (image_index >= 0) {
- String image_to_get = images[image_index];
- try {
- current_image = new WPIColorImage(ImageIO.read(new File(cocatenate_paths(path_to_images, image_to_get))));
- return current_image;
- }
- catch (IOException e) {
- LOG.warning("Could not open file.");
- return null;
- }
- }
- else
- image_index++;
- return null;
- }
-
- /** Gets the current debugging image. This is vestigial, it is not longer used.
- *
- * @return Returns a WPIColorImage
- */
- public WPIColorImage GetCurrent() {
- return current_image;
- }
-}
diff --git a/971CV/src/org/frc971/TimeFormatter.java b/971CV/src/org/frc971/TimeFormatter.java
deleted file mode 100644
index 1bff96f..0000000
--- a/971CV/src/org/frc971/TimeFormatter.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- *
- */
-package org.frc971;
-
-import java.util.logging.Formatter;
-import java.util.logging.LogRecord;
-import java.util.Date;
-
-/**
- * @author daniel
- *
- */
-
-/** Formats log messages with adequate timestamp. */
-public class TimeFormatter extends Formatter{
-
- /** Constructor, see Formatter. */
- public TimeFormatter() {
- super();
- }
-
- /** Format a message in the propper way.
- * @return Includes time, name of logger, level and message.
- */
- public String format(LogRecord message) {
- //we need to include the date and time in our message
- StringBuffer out = new StringBuffer();
- out.append("@");
- Date date = new Date(message.getMillis());
- out.append(date.toString());
- out.append(" in [");
- //add our logger's name
- out.append(message.getLoggerName());
- out.append("]: (");
- //add message level
- out.append(message.getLevel().getName());
- out.append(") ");
- //add actual message
- out.append(formatMessage(message));
- out.append("\n");
- return out.toString();
- }
-
-}
diff --git a/971CV/src/org/frc971/VisionTuner.java b/971CV/src/org/frc971/VisionTuner.java
deleted file mode 100644
index 357b58f..0000000
--- a/971CV/src/org/frc971/VisionTuner.java
+++ /dev/null
@@ -1,307 +0,0 @@
-package org.frc971;
-
-import java.awt.BorderLayout;
-import java.awt.GridLayout;
-import java.awt.event.ActionEvent;
-import java.awt.event.ActionListener;
-import java.awt.event.KeyEvent;
-
-import java.util.Arrays;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-
-import javax.swing.JButton;
-import javax.swing.JLabel;
-import javax.swing.JPanel;
-import javax.swing.JSlider;
-import javax.swing.WindowConstants;
-import javax.swing.event.ChangeEvent;
-import javax.swing.event.ChangeListener;
-
-import com.googlecode.javacv.CanvasFrame;
-import edu.wpi.first.wpijavacv.WPIColorImage;
-import edu.wpi.first.wpijavacv.WPIImage;
-
-/* REQUIRED JAVA LIBRARIES:
- * external_jars/
- * javacpp.jar
- * javacv-YOUR_OS.jar
- * javacv.jar
- * WPIJavaCV.jar
- *
- * REQUIRED NATIVE CODE LIBRARIES ON $PATH:
- * Program Files/WPIJavaCV/ [for example]
- * JavaCV_2.2.0/javacv-bin/javacv-YOUR_OS.jar
- * OpenCV_2.2.0/bin/*
- *
- * The native libraries and javacv-YOUR_OS.jar must match the 32 vs. 64-bit JVM.
- */
-/**
- * FRC 2013 vision-target recognizer tuner app.
- *
- * <p>
- * See {@link #processEvents()} for the keystroke commands.
- *
- * @author jerry
- * @author daniel
- */
-public class VisionTuner {
- private Recognizer recognizer = new Recognizer2013();
-
- private final static Logger LOG = Logger.getLogger(Logger.GLOBAL_LOGGER_NAME);
-
- private final CanvasFrame cameraFrame = new CanvasFrame("Camera");
- private final JPanel panel = new JPanel();
- private final JSlider hueMinSlider = new JSlider();
- private final JSlider hueMaxSlider = new JSlider();
- private final JSlider satMinSlider = new JSlider();
- private final JSlider valMinSlider = new JSlider();
- private final JButton showCalibration = new JButton("Calibrate");
-
- private ResultSender sender = null;
-
- private int totalFrames = -1; // don't count the first (warm-up) frame
- private double totalMsec;
- private double minMsec = Double.MAX_VALUE;
- private double maxMsec;
-
- private TestImageGetter getter;
-
- private WPIColorImage current;
-
- private String currentWindowTitle;
-
- private boolean debug = false;
-
- public VisionTuner() {
- //set logger to log everything
- LOG.setLevel(Level.ALL);
- try {
- LogHandler handler = new LogHandler("ds_vision.log");
- TimeFormatter formatter = new TimeFormatter();
- handler.setFormatter(formatter);
- LOG.addHandler(handler);
- }
- catch (FileNotFoundException e) {
- Messages.warning("Logging initialization failed.");
- }
-
- //initialize result sender
- try {
- sender = new ResultSender();
- }
- catch (IOException e) {
- LOG.severe("Server initialization failed: " + e.getMessage() + ". Result reporting disabled.");
- }
- cameraFrame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
-
- cameraFrame.getContentPane().add(panel, BorderLayout.SOUTH);
- panel.setLayout(new GridLayout(0, 1, 0, 0));
-
- showCalibration.setToolTipText("Click here if the system is not finding targets well enough.");
- panel.add(showCalibration);
- showCalibration.addActionListener(new ActionListener() {
- public void actionPerformed(ActionEvent e) {
- showCalibrationWindow();
- }
- });
-
- LOG.fine("Initial HSV range ["
- + hueMinSlider.getValue() + " .. "
- + hueMaxSlider.getValue() + "] "
- + satMinSlider.getValue() + "+ "
- + valMinSlider.getValue() + "+");
- }
-
- /** Shows a calibration window when the user clicks the Calibrate button. */
- private void showCalibrationWindow() {
- final CanvasFrame calibrationWindow = new CanvasFrame("Calibration");
- calibrationWindow.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE);
-
- final JPanel panel = new JPanel();
- calibrationWindow.getContentPane().add(panel, BorderLayout.SOUTH);
- panel.setLayout(new GridLayout(3, 3, 0, 0));
-
- hueMinSlider.setToolTipText("minimum HSV hue");
- hueMinSlider.setMaximum(255);
- hueMinSlider.setValue(recognizer.getHueMin());
- panel.add(hueMinSlider);
-
- panel.add(new JLabel("min hue max hue"));
-
- hueMaxSlider.setToolTipText("maximum HSV hue");
- hueMaxSlider.setMaximum(255);
- hueMaxSlider.setValue(recognizer.getHueMax());
- panel.add(hueMaxSlider);
-
- satMinSlider.setToolTipText("minimum HSV color saturation");
- satMinSlider.setMaximum(255);
- satMinSlider.setValue(recognizer.getSatMin());
- panel.add(satMinSlider);
-
- panel.add(new JLabel("min saturation max saturation"));
-
- valMinSlider.setToolTipText("minimum HSV brightness value");
- valMinSlider.setMaximum(255);
- valMinSlider.setValue(recognizer.getValMin());
- panel.add(valMinSlider);
-
- panel.add(new JLabel("")); //empty cells can cause problems
-
- final JButton done = new JButton("Done");
- panel.add(done);
- done.addActionListener(new ActionListener() {
- public void actionPerformed(ActionEvent e) {
- calibrationWindow.dispose();
- }
- });
-
- panel.add(new JLabel("")); //empty cells can cause problems
-
- ChangeListener sliderListener = new ChangeListener() {
- @Override
- public void stateChanged(ChangeEvent e) {
- LOG.fine("New HSV range ["
- + hueMinSlider.getValue() + " .. "
- + hueMaxSlider.getValue() + "] "
- + satMinSlider.getValue() + "+ "
- + valMinSlider.getValue() + "+");
- recognizer.setHSVRange(
- hueMinSlider.getValue(), hueMaxSlider.getValue(),
- satMinSlider.getValue(),
- valMinSlider.getValue());
- if (debug) {
- processImage(current, null);
- }
- }
- };
-
- hueMinSlider.addChangeListener(sliderListener);
- hueMaxSlider.addChangeListener(sliderListener);
- satMinSlider.addChangeListener(sliderListener);
- valMinSlider.addChangeListener(sliderListener);
-
- calibrationWindow.pack();
-
- }
-
- /**
- * Loads the named test image files.
- * Sets testImageFilenames and testImages.
- */
- private void processImage(WPIColorImage cameraImage, String title) {
- current = cameraImage;
-
- //set window title if it needs to be changed
- if (title != null && !title.equals(currentWindowTitle)) {
- cameraFrame.setTitle(title);
- currentWindowTitle = title;
- }
-
- long startTime = System.nanoTime();
- Target target = recognizer.processImage(cameraImage);
- WPIImage processedImage = target.editedPicture;
- long endTime = System.nanoTime();
-
- cameraFrame.showImage(processedImage.getBufferedImage());
-
- double milliseconds = (endTime - startTime) / 1e6;
- if (++totalFrames > 0) {
- totalMsec += milliseconds;
- minMsec = Math.min(minMsec, milliseconds);
- maxMsec = Math.max(maxMsec, milliseconds);
- LOG.fine("The recognizer took " + milliseconds + " ms, " +
- (1000 * totalFrames / totalMsec) + " fps, %.2f avg");
- }
-
- //send results to atom. (and any connected clients)
- if (sender != null) {
- sender.send(target.azimuth, target.elevation, target.range);
- }
-
- }
-
- private void previousImage() {
- WPIColorImage to_process = getter.GetPrev();
- if (to_process != null)
- processImage(to_process, getter.GetName());
- }
-
- private void nextImage() {
- WPIColorImage to_process = getter.GetNext();
- if (to_process != null)
- processImage(to_process, getter.GetName());
- }
-
- private void processEvents() {
- KeyEvent e = cameraFrame.waitKey();
-
- switch (e.getKeyCode()) {
- case KeyEvent.VK_LEFT: // left arrow key: go to previous image
- previousImage();
- break;
- case KeyEvent.VK_RIGHT: // right arrow key: go to next image
- nextImage();
- break;
- case KeyEvent.VK_Q: // Q: print time measurements then quit
- LOG.fine("The recognizer took " + (totalMsec / totalFrames) + "ms avg, " + minMsec +" min,"
- + maxMsec + " max, " + (1000 * totalFrames / totalMsec) + " fps avg");
- System.exit(0);
- }
- }
-
- public static void main(final String[] args) {
- VisionTuner tuner = new VisionTuner();
- Messages.SetWindow(tuner.cameraFrame);
-
- String atomIP = null;
- try {
- atomIP = args[0];
- }
- catch (ArrayIndexOutOfBoundsException e) {
- System.out.println("Usage: VisionTuner [atom ip]");
- System.exit(0);
- }
-
- if (Arrays.asList(args).contains("-debug")) {
- //debug mode has been requested
- tuner.debug = true;
-
- //show debugging windows
- tuner.recognizer.showIntermediateStages(true);
-
- tuner.getter = new TestImageGetter(".");
- WPIColorImage to_process = tuner.getter.GetNext();
- if (to_process != null) {
- tuner.processImage(to_process, tuner.getter.GetName());
- for (;;) {
- tuner.processEvents();
- }
- }
- else {
- LOG.severe("Could not load test images.");
- Messages.severe("Could not load test images.");
- }
- }
- else {
- try {
- HTTPClient client = new HTTPClient(atomIP);
- for (;;) {
- ImageWithTimestamp to_process = client.GetFrame();
- if (to_process.image != null) {
- tuner.processImage(to_process.image, client.GetName());
- LOG.fine("Captured time: " + Double.toString(to_process.timestamp));
- }
- }
- }
- catch (IOException e) {
- LOG.severe("Client initialization failed: " + e.getMessage() + ".");
- Messages.severe("Client initialization failed: " + e.getMessage() + ".");
- }
- }
- }
-
-}
diff --git a/971CV/src/org/frc971/private_aos_camera_jar.jar b/971CV/src/org/frc971/private_aos_camera_jar.jar
deleted file mode 100644
index 3836b5b..0000000
--- a/971CV/src/org/frc971/private_aos_camera_jar.jar
+++ /dev/null
Binary files differ