971CV now recognizes 2012 targets.

Next: Recognize 2013 targets.

Later: Remove WPICV dependencies.

git-svn-id: https://robotics.mvla.net/svn/frc971/2013/trunk/src@4119 f308d9b7-e957-4cde-b6ac-9a88185e7312
diff --git a/971CV/.classpath b/971CV/.classpath
index 01eafa6..c97b987 100644
--- a/971CV/.classpath
+++ b/971CV/.classpath
@@ -2,13 +2,6 @@
 <classpath>

 	<classpathentry kind="src" path="src"/>

 	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>

-	<classpathentry kind="lib" path="C:/WindRiver/WPILib/desktop-lib/networktables-desktop.jar" sourcepath="C:/WindRiver/WPILib/desktop-lib/networktables-desktop.src.zip">

-		<attributes>

-			<attribute name="javadoc_location" value="jar:file:/C:/WindRiver/WPILib/desktop-lib/networktables-desktop.javadoc.zip!/"/>

-		</attributes>

-	</classpathentry>

-	<classpathentry kind="lib" path="C:/Program Files/SmartDashboard/SmartDashboard.jar" sourcepath="C:/sourcecontrolled/FIRSTforge/SmartDashboard/trunk/smartdashboard/src"/>

-	<classpathentry kind="lib" path="C:/Program Files/SmartDashboard/extensions/WPICameraExtension.jar" sourcepath="C:/sourcecontrolled/FIRSTforge/SmartDashboard/trunk/extensions"/>

 	<classpathentry kind="lib" path="C:/Program Files/SmartDashboard/extensions/lib/javacpp.jar"/>

 	<classpathentry kind="lib" path="C:/Program Files/SmartDashboard/extensions/lib/javacv.jar">

 		<attributes>

diff --git a/971CV/src/edu/wpi/first/wpijavacv/DaisyExtensions.java b/971CV/src/edu/wpi/first/wpijavacv/DaisyExtensions.java
new file mode 100644
index 0000000..f914f4d
--- /dev/null
+++ b/971CV/src/edu/wpi/first/wpijavacv/DaisyExtensions.java
@@ -0,0 +1,86 @@
+package edu.wpi.first.wpijavacv;

+

+import java.util.ArrayList;

+

+import com.googlecode.javacv.cpp.opencv_core;

+import com.googlecode.javacv.cpp.opencv_core.CvMemStorage;

+import com.googlecode.javacv.cpp.opencv_core.CvSeq;

+import com.googlecode.javacv.cpp.opencv_core.IplImage;

+import com.googlecode.javacv.cpp.opencv_imgproc;

+

+/**

+ * Team Miss Daisy extensions to wpijavacv, mostly to expose more methods.

+ *<p>

+ * TODO(jerry): Wean off of wpijavacv.

+ *

+ * @author jrussell

+ * @author jerry -- Moved storage to an instance variable

+ */

+public class DaisyExtensions {

+    private final CvMemStorage storage = CvMemStorage.create();

+

+    public DaisyExtensions() {

+    }

+

+    public static CvSeq getCvSeq(WPIContour contour) {

+        return contour.getCVSeq();

+    }

+

+    public static WPIContour makeWPIContour(CvSeq seq) {

+        return new WPIContour(seq);

+    }

+

+    public static WPIGrayscaleImage makeWPIGrayscaleImage(IplImage arr) {

+        IplImage tempImage = IplImage.create(arr.cvSize(), arr.depth(), 1);

+        opencv_core.cvCopy(arr, tempImage);

+        return new WPIGrayscaleImage(tempImage);

+    }

+

+    public static WPIColorImage makeWPIColorImage(IplImage arr) {

+        IplImage tempImage = IplImage.create(arr.cvSize(), arr.depth(), 3);

+        opencv_core.cvCopy(arr, tempImage);

+        return new WPIColorImage(tempImage);

+    }

+

+    public static WPIBinaryImage makeWPIBinaryImage(IplImage arr) {

+        IplImage tempImage = IplImage.create(arr.cvSize(), arr.depth(), 1);

+        opencv_core.cvCopy(arr, tempImage);

+        return new WPIBinaryImage(tempImage);

+    }

+

+    public static IplImage getIplImage(WPIImage image) {

+        return image.image;

+    }

+

+    public WPIContour[] findConvexContours(WPIBinaryImage image) {

+        image.validateDisposed();

+

+        IplImage tempImage = IplImage.create(image.image.cvSize(),

+        	image.image.depth(), 1);

+

+        opencv_core.cvCopy(image.image, tempImage);

+

+        CvSeq contours = new CvSeq();

+        opencv_imgproc.cvFindContours(tempImage, storage, contours, 256,

+        	opencv_imgproc.CV_RETR_LIST,

+        	opencv_imgproc.CV_CHAIN_APPROX_TC89_KCOS);

+        ArrayList<WPIContour> results = new ArrayList<WPIContour>();

+        while (!WPIDisposable.isNull(contours)) {

+            CvSeq convexContour = opencv_imgproc.cvConvexHull2(contours,

+        	    storage, opencv_imgproc.CV_CLOCKWISE, 1);

+            WPIContour contour = new WPIContour(

+        	    opencv_core.cvCloneSeq(convexContour, storage));

+            results.add(contour);

+            contours = contours.h_next();

+        }

+

+        tempImage.release();

+        WPIContour[] array = new WPIContour[results.size()];

+        return results.toArray(array);

+    }

+

+    public void releaseMemory() {

+        opencv_core.cvClearMemStorage(storage);

+    }

+

+}

diff --git a/971CV/src/org/frc971/DebugCanvas.java b/971CV/src/org/frc971/DebugCanvas.java
new file mode 100644
index 0000000..3be9a45
--- /dev/null
+++ b/971CV/src/org/frc971/DebugCanvas.java
@@ -0,0 +1,29 @@
+package org.frc971;

+

+import com.googlecode.javacv.CanvasFrame;

+import com.googlecode.javacv.cpp.opencv_core.IplImage;

+

+public class DebugCanvas {

+    public static boolean show = true;

+    private CanvasFrame canvasFrame;

+    private String name;

+

+    public DebugCanvas(String name) {

+	this.name = name;

+    }

+

+    public void showImage(IplImage image) {

+        if (show) {

+            if (canvasFrame == null) {

+        	canvasFrame = new CanvasFrame(name);

+            }

+            canvasFrame.setName(name);

+            canvasFrame.showImage(image.getBufferedImage());

+        } else {

+            if (canvasFrame != null) {

+        	canvasFrame.dispose();

+        	canvasFrame = null;

+            }

+        }

+    }

+}

diff --git a/971CV/src/org/frc971/Recognizer.java b/971CV/src/org/frc971/Recognizer.java
new file mode 100644
index 0000000..556cff0
--- /dev/null
+++ b/971CV/src/org/frc971/Recognizer.java
@@ -0,0 +1,19 @@
+package org.frc971;

+

+import edu.wpi.first.wpijavacv.WPIColorImage;

+import edu.wpi.first.wpijavacv.WPIImage;

+

+/**

+ * Vision target recognizer.

+ *

+ * @author jerry

+ */

+public interface Recognizer {

+    /**

+     * Processes a camera image, returning an image to display for targeting

+     * and debugging, e.g. with cross-hairs and marked targets.

+     *<p>

+     * SIDE EFFECTS: May modify cameraImage.

+     */

+    WPIImage processImage(WPIColorImage cameraImage);

+}

diff --git a/971CV/src/org/frc971/Recognizer2013.java b/971CV/src/org/frc971/Recognizer2013.java
new file mode 100644
index 0000000..9654e3a
--- /dev/null
+++ b/971CV/src/org/frc971/Recognizer2013.java
@@ -0,0 +1,205 @@
+package org.frc971;

+

+import java.util.ArrayList;

+

+import com.googlecode.javacv.cpp.opencv_core;

+import com.googlecode.javacv.cpp.opencv_core.CvSize;

+import com.googlecode.javacv.cpp.opencv_core.IplImage;

+import com.googlecode.javacv.cpp.opencv_imgproc;

+import com.googlecode.javacv.cpp.opencv_imgproc.IplConvKernel;

+

+import edu.wpi.first.wpijavacv.DaisyExtensions;

+import edu.wpi.first.wpijavacv.WPIBinaryImage;

+import edu.wpi.first.wpijavacv.WPIColor;

+import edu.wpi.first.wpijavacv.WPIColorImage;

+import edu.wpi.first.wpijavacv.WPIContour;

+import edu.wpi.first.wpijavacv.WPIImage;

+import edu.wpi.first.wpijavacv.WPIPoint;

+import edu.wpi.first.wpijavacv.WPIPolygon;

+

+/**

+ * Vision target recognizer for FRC 2013.

+ *

+ * @author jerry

+ */

+public class Recognizer2013 implements Recognizer {

+

+    // Constants that need to be tuned

+    static final double kRoughlyHorizontalSlope = Math.tan(Math.toRadians(20));

+    static final double kRoughlyVerticalSlope = Math.tan(Math.toRadians(90 - 20));

+    static final int kMinWidth = 20;

+    static final int kMaxWidth = 400;

+    static final int kHoleClosingIterations = 9;

+

+    static final double kShooterOffsetDeg = 0;

+    static final double kHorizontalFOVDeg = 47.0;

+    static final double kVerticalFOVDeg = 480.0 / 640.0 * kHorizontalFOVDeg;

+

+    // Colors for drawing indicators on the image.

+    private static final WPIColor reject1Color = WPIColor.GRAY;

+    private static final WPIColor reject2Color = WPIColor.YELLOW;

+    private static final WPIColor candidateColor = WPIColor.BLUE;

+    private static final WPIColor targetColor = new WPIColor(255, 0, 0);

+

+    // Show intermediate images for parameter tuning.

+    private final DebugCanvas thresholdedCanvas = new DebugCanvas("thresholded");

+    private final DebugCanvas morphedCanvas = new DebugCanvas("morphed");

+

+    // JavaCV data to reuse for each frame.

+    private final DaisyExtensions daisyExtensions = new DaisyExtensions();

+    private final IplConvKernel morphKernel = IplConvKernel.create(3, 3, 1, 1,

+		opencv_imgproc.CV_SHAPE_RECT, null);

+    private CvSize size = null;

+    private WPIContour[] contours;

+    private final ArrayList<WPIPolygon> polygons = new ArrayList<WPIPolygon>();

+    private WPIColorImage rawImage;

+    private IplImage bin;

+    private IplImage hsv;

+    private IplImage hue;

+    private IplImage sat;

+    private IplImage val;

+    private WPIPoint linePt1, linePt2; // crosshair endpoints

+

+    public Recognizer2013() {

+    }

+

+    @Override

+    public WPIImage processImage(WPIColorImage cameraImage) {

+	// (Re)allocate the intermediate images if the input is a different

+	// size than the previous image.

+        if (size == null || size.width() != cameraImage.getWidth()

+        	|| size.height() != cameraImage.getHeight()) {

+            size = opencv_core.cvSize(cameraImage.getWidth(),

+        	    cameraImage.getHeight());

+            rawImage = DaisyExtensions.makeWPIColorImage(

+        	    DaisyExtensions.getIplImage(cameraImage));

+            bin = IplImage.create(size, 8, 1);

+            hsv = IplImage.create(size, 8, 3);

+            hue = IplImage.create(size, 8, 1);

+            sat = IplImage.create(size, 8, 1);

+            val = IplImage.create(size, 8, 1);

+

+            int horizontalOffsetPixels = (int)Math.round(

+        	    kShooterOffsetDeg * size.width() / kHorizontalFOVDeg);

+            int x = size.width() / 2 + horizontalOffsetPixels;

+            linePt1 = new WPIPoint(x, size.height() - 1);

+            linePt2 = new WPIPoint(x, 0);

+        } else {

+            opencv_core.cvCopy(DaisyExtensions.getIplImage(cameraImage),

+        	    DaisyExtensions.getIplImage(rawImage));

+        }

+

+        IplImage input = DaisyExtensions.getIplImage(rawImage);

+

+        // Threshold the pixels in HSV color space.

+        // TODO(jerry): Do this in one pass of a pixel-processing loop.

+        opencv_imgproc.cvCvtColor(input, hsv, opencv_imgproc.CV_BGR2HSV);

+        opencv_core.cvSplit(hsv, hue, sat, val, null);

+

+        // NOTE: Since red is at the end of the cyclic color space, you can OR

+        // a threshold and an inverted threshold to match red pixels.

+        // TODO(jerry): Use tunable constants instead of literals.

+        opencv_imgproc.cvThreshold(hue, bin, 60 - 15, 255, opencv_imgproc.CV_THRESH_BINARY);

+        opencv_imgproc.cvThreshold(hue, hue, 60 + 15, 255, opencv_imgproc.CV_THRESH_BINARY_INV);

+        opencv_imgproc.cvThreshold(sat, sat, 200, 255, opencv_imgproc.CV_THRESH_BINARY);

+        opencv_imgproc.cvThreshold(val, val, 55, 255, opencv_imgproc.CV_THRESH_BINARY);

+

+        // Combine the results to obtain a binary image which is mostly the

+        // interesting pixels.

+        opencv_core.cvAnd(hue, bin, bin, null);

+        opencv_core.cvAnd(bin, sat, bin, null);

+        opencv_core.cvAnd(bin, val, bin, null);

+

+        thresholdedCanvas.showImage(bin);

+

+        // Fill in gaps using binary morphology.

+        opencv_imgproc.cvMorphologyEx(bin, bin, null, morphKernel,

+        	opencv_imgproc.CV_MOP_CLOSE, kHoleClosingIterations);

+

+        morphedCanvas.showImage(bin);

+

+        // Find contours.

+        WPIBinaryImage binWpi = DaisyExtensions.makeWPIBinaryImage(bin);

+        contours = daisyExtensions.findConvexContours(binWpi);

+

+        // Simplify the contour to polygons and filter by size and aspect ratio.

+        // TODO(jerry): Use tunable constants instead of literals.

+        polygons.clear();

+        for (WPIContour c : contours) {

+            double ratio = ((double) c.getHeight()) / ((double) c.getWidth());

+            if (ratio < 1.0 && ratio > 0.5 && c.getWidth() >= kMinWidth

+        	    && c.getWidth() <= kMaxWidth) {

+                polygons.add(c.approxPolygon(20));

+            }

+        }

+

+        // Pick the highest target that matches more filter criteria.

+        WPIPolygon bestTarget = null;

+        int highestY = Integer.MAX_VALUE;

+

+        for (WPIPolygon p : polygons) {

+            if (p.isConvex() && p.getNumVertices() == 4) { // quadrilateral

+                WPIPoint[] points = p.getPoints();

+                // We expect the polygon to have a top line that is nearly

+                // horizontal and two side lines that are nearly vertical.

+                int numRoughlyHorizontal = 0;

+                int numRoughlyVertical = 0;

+                for (int i = 0; i < 4; ++i) {

+                    double dy = points[i].getY() - points[(i + 1) % 4].getY();

+                    double dx = points[i].getX() - points[(i + 1) % 4].getX();

+                    double slope = Double.MAX_VALUE;

+                    if (dx != 0) {

+                        slope = Math.abs(dy / dx);

+                    }

+

+                    if (slope < kRoughlyHorizontalSlope) {

+                        ++numRoughlyHorizontal;

+                    } else if (slope > kRoughlyVerticalSlope) {

+                        ++numRoughlyVertical;

+                    }

+                }

+

+                if (numRoughlyHorizontal >= 1 && numRoughlyVertical == 2) {

+                    rawImage.drawPolygon(p, candidateColor, 2);

+

+                    int pCenterX = p.getX() + p.getWidth() / 2;

+                    int pCenterY = p.getY() + p.getHeight() / 2;

+

+                    rawImage.drawPoint(new WPIPoint(pCenterX, pCenterY),

+                	    candidateColor, 3);

+                    if (pCenterY < highestY) {

+                        bestTarget = p;

+                        highestY = pCenterY;

+                    }

+                } else {

+                    rawImage.drawPolygon(p, reject2Color, 1);

+                }

+            } else {

+                rawImage.drawPolygon(p, reject1Color, 1);

+            }

+        }

+

+        if (bestTarget != null) {

+            double w = bestTarget.getWidth();

+            double h = bestTarget.getHeight();

+            double x = bestTarget.getX() + w / 2;

+            double y = bestTarget.getY() + h / 2;

+

+            rawImage.drawPolygon(bestTarget, targetColor, 2);

+

+            System.out.println("Best target at (" + x + ", " + y + ") size "

+        	    + w + " x " + h);

+        } else {

+            System.out.println("No target found");

+        }

+

+        // Draw a crosshair

+        rawImage.drawLine(linePt1, linePt2, targetColor, 1);

+

+        daisyExtensions.releaseMemory();

+        //System.gc();

+

+        return rawImage;

+    }

+

+}

diff --git a/971CV/src/org/frc971/VisionTuner.java b/971CV/src/org/frc971/VisionTuner.java
index 7ecd98a..2d57ea5 100644
--- a/971CV/src/org/frc971/VisionTuner.java
+++ b/971CV/src/org/frc971/VisionTuner.java
@@ -10,6 +10,7 @@
 import com.googlecode.javacv.CanvasFrame;

 

 import edu.wpi.first.wpijavacv.WPIColorImage;

+import edu.wpi.first.wpijavacv.WPIImage;

 

 /* REQUIRED JAVA LIBRARIES:

  *   Program Files/SmartDashboard/

@@ -35,6 +36,7 @@
     private WPIColorImage[] testImages;

     private final CanvasFrame cameraFrame = new CanvasFrame("Camera");

     private int currentIndex = 0;

+    private Recognizer recognizer = new Recognizer2013();

 

     public VisionTuner(String[] imageFilenames) {

         cameraFrame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);

@@ -44,7 +46,6 @@
         loadTestImages(imageFilenames);

     }

 

-

     /**

      * Loads the named test image files.

      * Sets testImageFilenames and testImages.

@@ -59,9 +60,11 @@
             System.out.println("Loading image file: " + imageFilename);

             WPIColorImage rawImage = null;

             try {

-                rawImage = new WPIColorImage(ImageIO.read(new File(imageFilename)));

+                rawImage = new WPIColorImage(ImageIO.read(

+                	new File(imageFilename)));

             } catch (IOException e) {

-                System.err.println("Couldn't load image file: " + imageFilename + ": " + e.getMessage());

+                System.err.println("Couldn't load image file: " + imageFilename

+                	+ ": " + e.getMessage());

                 System.exit(1);

                 return;

             }

@@ -70,9 +73,11 @@
     }

 

     private void processCurrentImage() {

-        WPIColorImage rawImage = testImages[currentIndex];

-        cameraFrame.showImage(rawImage.getBufferedImage());

+        WPIColorImage cameraImage = testImages[currentIndex];

         cameraFrame.setTitle(testImageFilenames[currentIndex]);

+

+        WPIImage processedImage = recognizer.processImage(cameraImage);

+        cameraFrame.showImage(processedImage.getBufferedImage());

     }

 

     private void previousImage() {