A slightly modified (updated for 2013 libraries) version of the Miss Daisy 2012 robot vision code.

git-svn-id: https://robotics.mvla.net/svn/frc971/2013/trunk/src@4110 f308d9b7-e957-4cde-b6ac-9a88185e7312
diff --git a/DaisyCV/.classpath b/DaisyCV/.classpath
new file mode 100644
index 0000000..5d9405c
--- /dev/null
+++ b/DaisyCV/.classpath
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="UTF-8"?>

+<classpath>

+	<classpathentry kind="src" path="src"/>

+	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>

+	<classpathentry kind="lib" path="C:/Program Files/SmartDashboard/SmartDashboard.jar" sourcepath="C:/sourcecontrolled/FIRSTforge/SmartDashboard/trunk/smartdashboard/src">

+		<attributes>

+			<attribute name="javadoc_location" value="jar:file:/C:/WindRiver/WPILib/SmartDashboard.javadoc.zip!/"/>

+		</attributes>

+	</classpathentry>

+	<classpathentry kind="lib" path="C:/Program Files/SmartDashboard/extensions/WPICameraExtension.jar" sourcepath="C:/sourcecontrolled/FIRSTforge/SmartDashboard/trunk/extensions"/>

+	<classpathentry kind="lib" path="C:/Program Files/SmartDashboard/extensions/lib/javacpp.jar"/>

+	<classpathentry kind="lib" path="C:/Program Files/SmartDashboard/extensions/lib/javacv.jar">

+		<attributes>

+			<attribute name="org.eclipse.jdt.launching.CLASSPATH_ATTR_LIBRARY_PATH_ENTRY" value="C:/Program Files/WPIJavaCV/OpenCV_2.2.0/bin"/>

+		</attributes>

+	</classpathentry>

+	<classpathentry kind="lib" path="C:/Program Files/SmartDashboard/extensions/lib/javacv-windows-x86.jar">

+		<attributes>

+			<attribute name="org.eclipse.jdt.launching.CLASSPATH_ATTR_LIBRARY_PATH_ENTRY" value="C:/Program Files/WPIJavaCV/JavaCV_2.2.0/javacv-bin"/>

+		</attributes>

+	</classpathentry>

+	<classpathentry kind="lib" path="C:/Program Files/SmartDashboard/extensions/lib/WPIJavaCV.jar" sourcepath="C:/sourcecontrolled/FIRSTforge/SmartDashboard/trunk/WPIJavaCV/src"/>

+	<classpathentry kind="lib" path="C:/WindRiver/WPILib/desktop-lib/networktables-desktop.jar" sourcepath="C:/WindRiver/WPILib/desktop-lib/networktables-desktop.src.zip">

+		<attributes>

+			<attribute name="javadoc_location" value="jar:file:/C:/WindRiver/WPILib/desktop-lib/networktables-desktop.javadoc.zip!/"/>

+		</attributes>

+	</classpathentry>

+	<classpathentry kind="output" path="bin"/>

+</classpath>

diff --git a/DaisyCV/.project b/DaisyCV/.project
new file mode 100644
index 0000000..472e408
--- /dev/null
+++ b/DaisyCV/.project
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>

+<projectDescription>

+	<name>DaisyCV</name>

+	<comment></comment>

+	<projects>

+	</projects>

+	<buildSpec>

+		<buildCommand>

+			<name>org.eclipse.jdt.core.javabuilder</name>

+			<arguments>

+			</arguments>

+		</buildCommand>

+	</buildSpec>

+	<natures>

+		<nature>org.eclipse.jdt.core.javanature</nature>

+	</natures>

+</projectDescription>

diff --git a/DaisyCV/green-test-images/image-01.jpg b/DaisyCV/green-test-images/image-01.jpg
new file mode 100644
index 0000000..3816924
--- /dev/null
+++ b/DaisyCV/green-test-images/image-01.jpg
Binary files differ
diff --git a/DaisyCV/green-test-images/image-02.jpg b/DaisyCV/green-test-images/image-02.jpg
new file mode 100644
index 0000000..818ebf1
--- /dev/null
+++ b/DaisyCV/green-test-images/image-02.jpg
Binary files differ
diff --git a/DaisyCV/green-test-images/image-03.jpg b/DaisyCV/green-test-images/image-03.jpg
new file mode 100644
index 0000000..382212a
--- /dev/null
+++ b/DaisyCV/green-test-images/image-03.jpg
Binary files differ
diff --git a/DaisyCV/green-test-images/image-04.jpg b/DaisyCV/green-test-images/image-04.jpg
new file mode 100644
index 0000000..408fb7a
--- /dev/null
+++ b/DaisyCV/green-test-images/image-04.jpg
Binary files differ
diff --git a/DaisyCV/green-test-images/image-05.jpg b/DaisyCV/green-test-images/image-05.jpg
new file mode 100644
index 0000000..ab21d5e
--- /dev/null
+++ b/DaisyCV/green-test-images/image-05.jpg
Binary files differ
diff --git a/DaisyCV/green-test-images/image-06.jpg b/DaisyCV/green-test-images/image-06.jpg
new file mode 100644
index 0000000..9e963da
--- /dev/null
+++ b/DaisyCV/green-test-images/image-06.jpg
Binary files differ
diff --git a/DaisyCV/green-test-images/image-07.jpg b/DaisyCV/green-test-images/image-07.jpg
new file mode 100644
index 0000000..e00b774
--- /dev/null
+++ b/DaisyCV/green-test-images/image-07.jpg
Binary files differ
diff --git a/DaisyCV/green-test-images/image-08.jpg b/DaisyCV/green-test-images/image-08.jpg
new file mode 100644
index 0000000..147498f
--- /dev/null
+++ b/DaisyCV/green-test-images/image-08.jpg
Binary files differ
diff --git a/DaisyCV/green-test-images/image-09.jpg b/DaisyCV/green-test-images/image-09.jpg
new file mode 100644
index 0000000..ad8dcbd
--- /dev/null
+++ b/DaisyCV/green-test-images/image-09.jpg
Binary files differ
diff --git a/DaisyCV/green-test-images/image-10.jpg b/DaisyCV/green-test-images/image-10.jpg
new file mode 100644
index 0000000..34465bf
--- /dev/null
+++ b/DaisyCV/green-test-images/image-10.jpg
Binary files differ
diff --git a/DaisyCV/green-test-images/image-11.jpg b/DaisyCV/green-test-images/image-11.jpg
new file mode 100644
index 0000000..03522e7
--- /dev/null
+++ b/DaisyCV/green-test-images/image-11.jpg
Binary files differ
diff --git a/DaisyCV/green-test-images/image-12.jpg b/DaisyCV/green-test-images/image-12.jpg
new file mode 100644
index 0000000..b0317f2
--- /dev/null
+++ b/DaisyCV/green-test-images/image-12.jpg
Binary files differ
diff --git a/DaisyCV/green-test-images/image-13.jpg b/DaisyCV/green-test-images/image-13.jpg
new file mode 100644
index 0000000..c329779
--- /dev/null
+++ b/DaisyCV/green-test-images/image-13.jpg
Binary files differ
diff --git a/DaisyCV/src/edu/missdaisy/smartdashboard/daisycv/DaisyCVWidget.java b/DaisyCV/src/edu/missdaisy/smartdashboard/daisycv/DaisyCVWidget.java
new file mode 100644
index 0000000..ae555dd
--- /dev/null
+++ b/DaisyCV/src/edu/missdaisy/smartdashboard/daisycv/DaisyCVWidget.java
@@ -0,0 +1,412 @@
+package edu.missdaisy.smartdashboard.daisycv;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.NoSuchElementException;
+import java.util.Scanner;
+import java.util.TreeMap;
+
+import javax.imageio.ImageIO;
+
+import com.googlecode.javacv.CanvasFrame;
+import com.googlecode.javacv.cpp.opencv_core;
+import com.googlecode.javacv.cpp.opencv_core.CvSize;
+import com.googlecode.javacv.cpp.opencv_core.IplImage;
+import com.googlecode.javacv.cpp.opencv_imgproc;
+import com.googlecode.javacv.cpp.opencv_imgproc.IplConvKernel;
+
+import edu.wpi.first.smartdashboard.camera.WPICameraExtension;
+import edu.wpi.first.smartdashboard.gui.DashboardFrame;
+import edu.wpi.first.smartdashboard.robot.Robot;
+import edu.wpi.first.wpijavacv.DaisyExtensions;
+import edu.wpi.first.wpijavacv.WPIBinaryImage;
+import edu.wpi.first.wpijavacv.WPIColor;
+import edu.wpi.first.wpijavacv.WPIColorImage;
+import edu.wpi.first.wpijavacv.WPIContour;
+import edu.wpi.first.wpijavacv.WPIImage;
+import edu.wpi.first.wpijavacv.WPIPoint;
+import edu.wpi.first.wpijavacv.WPIPolygon;
+
+/* HOW TO GET THIS COMPILING:
+ *  1. Install the SmartDashboard using the installer (if on Windows)
+ *      1a. Verify that the OpenCV libraries are in your PATH (on Windows)
+ *  2. Add the following libraries to the project:
+ *     SmartDashboard.jar
+ *     extensions/WPICameraExtension.jar
+ *     lib/NetworkTable_Client.jar
+ *     extensions/lib/javacpp.jar
+ *     extensions/lib/javacv-*your environment*.jar
+ *     extensions/lib/javacv.jar
+ *     extensions/lib/WPIJavaCV.jar
+ *
+ */
+/**
+ * @author jrussell
+ */
+@SuppressWarnings("serial")
+public class DaisyCVWidget extends WPICameraExtension
+{
+    public static final String NAME = "DaisyCV Target Tracker";
+    private WPIColor targetColor = new WPIColor(255, 0, 0);
+
+    // Constants that need to be tuned
+    private static final double kNearlyHorizontalSlope = Math.tan(Math.toRadians(20));
+    private static final double kNearlyVerticalSlope = Math.tan(Math.toRadians(90-20));
+    private static final int kMinWidth = 20;
+    private static final int kMaxWidth = 200;
+    private static final double kRangeOffset = 0.0;
+    private static final int kHoleClosingIterations = 9;
+
+    private static final double kShooterOffsetDeg = -1.55;
+    private static final double kHorizontalFOVDeg = 47.0;
+
+    private static final double kVerticalFOVDeg = 480.0/640.0*kHorizontalFOVDeg;
+    private static final double kCameraHeightIn = 54.0;
+    private static final double kCameraPitchDeg = 21.0;
+    private static final double kTopTargetHeightIn = 98.0 + 2.0 + 9.0; // 98 to rim, +2 to bottom of target, +9 to center of target
+
+    private TreeMap<Double, Double> rangeTable;
+
+    private boolean m_debugMode = false;
+
+    // Store JavaCV temporaries as members to reduce memory management during processing
+    private CvSize size = null;
+    private WPIContour[] contours;
+    private ArrayList<WPIPolygon> polygons;
+    private IplConvKernel morphKernel;
+    private IplImage bin;
+    private IplImage hsv;
+    private IplImage hue;
+    private IplImage sat;
+    private IplImage val;
+    private WPIPoint linePt1;
+    private WPIPoint linePt2;
+    private int horizontalOffsetPixels;
+
+    public DaisyCVWidget()
+    {
+        this(false);
+    }
+
+    public DaisyCVWidget(boolean debug)
+    {
+        m_debugMode = debug;
+        morphKernel = IplConvKernel.create(3, 3, 1, 1, opencv_imgproc.CV_SHAPE_RECT, null);
+
+        rangeTable = new TreeMap<Double,Double>();
+        //rangeTable.put(110.0, 3800.0+kRangeOffset);
+        //rangeTable.put(120.0, 3900.0+kRangeOffset);
+        //rangeTable.put(130.0, 4000.0+kRangeOffset);
+        rangeTable.put(140.0, 3434.0+kRangeOffset);
+        rangeTable.put(150.0, 3499.0+kRangeOffset);
+        rangeTable.put(160.0, 3544.0+kRangeOffset);
+        rangeTable.put(170.0, 3574.0+kRangeOffset);
+        rangeTable.put(180.0, 3609.0+kRangeOffset);
+        rangeTable.put(190.0, 3664.0+kRangeOffset);
+        rangeTable.put(200.0, 3854.0+kRangeOffset);
+        rangeTable.put(210.0, 4034.0+kRangeOffset);
+        rangeTable.put(220.0, 4284.0+kRangeOffset);
+        rangeTable.put(230.0, 4434.0+kRangeOffset);
+        rangeTable.put(240.0, 4584.0+kRangeOffset);
+        rangeTable.put(250.0, 4794.0+kRangeOffset);
+        rangeTable.put(260.0, 5034.0+kRangeOffset);
+        rangeTable.put(270.0, 5234.0+kRangeOffset);
+
+        DaisyExtensions.init();
+    }
+
+
+    public double getRPMsForRange(double range)
+    {
+        double lowKey = -1.0;
+        double lowVal = -1.0;
+        for( double key : rangeTable.keySet() )
+        {
+            if( range < key )
+            {
+                double highVal = rangeTable.get(key);
+                if( lowKey > 0.0 )
+                {
+                    double m = (range-lowKey)/(key-lowKey);
+                    return lowVal+m*(highVal-lowVal);
+                }
+                else
+                    return highVal;
+            }
+            lowKey = key;
+            lowVal = rangeTable.get(key);
+        }
+
+        return 5234.0+kRangeOffset;
+    }
+
+    @Override
+    public WPIImage processImage(WPIColorImage rawImage)
+    {
+        double heading = 0.0;
+
+        // Get the current heading of the robot first
+        if( !m_debugMode )
+        {
+            try
+            {
+                heading = Robot.getTable().getNumber("Heading");
+            }
+            catch( NoSuchElementException e)
+            {
+            }
+            catch( IllegalArgumentException e )
+            {
+            }
+        }
+
+        if( size == null || size.width() != rawImage.getWidth() || size.height() != rawImage.getHeight() )
+        {
+            size = opencv_core.cvSize(rawImage.getWidth(),rawImage.getHeight());
+            bin = IplImage.create(size, 8, 1);
+            hsv = IplImage.create(size, 8, 3);
+            hue = IplImage.create(size, 8, 1);
+            sat = IplImage.create(size, 8, 1);
+            val = IplImage.create(size, 8, 1);
+            horizontalOffsetPixels =  (int)Math.round(kShooterOffsetDeg*(size.width()/kHorizontalFOVDeg));
+            linePt1 = new WPIPoint(size.width()/2+horizontalOffsetPixels,size.height()-1);
+            linePt2 = new WPIPoint(size.width()/2+horizontalOffsetPixels,0);
+        }
+        // Get the raw IplImages for OpenCV
+        IplImage input = DaisyExtensions.getIplImage(rawImage);
+
+        // Convert to HSV color space
+        opencv_imgproc.cvCvtColor(input, hsv, opencv_imgproc.CV_BGR2HSV);
+        opencv_core.cvSplit(hsv, hue, sat, val, null);
+
+        // Threshold each component separately
+        // Hue
+        // NOTE: Red is at the end of the color space, so you need to OR together
+        // a thresh and inverted thresh in order to get points that are red
+        opencv_imgproc.cvThreshold(hue, bin, 60-15, 255, opencv_imgproc.CV_THRESH_BINARY);
+        opencv_imgproc.cvThreshold(hue, hue, 60+15, 255, opencv_imgproc.CV_THRESH_BINARY_INV);
+
+        // Saturation
+        opencv_imgproc.cvThreshold(sat, sat, 200, 255, opencv_imgproc.CV_THRESH_BINARY);
+
+        // Value
+        opencv_imgproc.cvThreshold(val, val, 55, 255, opencv_imgproc.CV_THRESH_BINARY);
+
+        // Combine the results to obtain our binary image which should for the most
+        // part only contain pixels that we care about
+        opencv_core.cvAnd(hue, bin, bin, null);
+        opencv_core.cvAnd(bin, sat, bin, null);
+        opencv_core.cvAnd(bin, val, bin, null);
+
+        // Uncomment the next two lines to see the raw binary image
+        //CanvasFrame result = new CanvasFrame("binary");
+        //result.showImage(bin.getBufferedImage());
+
+        // Fill in any gaps using binary morphology
+        opencv_imgproc.cvMorphologyEx(bin, bin, null, morphKernel, opencv_imgproc.CV_MOP_CLOSE, kHoleClosingIterations);
+
+        // Uncomment the next two lines to see the image post-morphology
+        //CanvasFrame result2 = new CanvasFrame("morph");
+        //result2.showImage(bin.getBufferedImage());
+
+        // Find contours
+        WPIBinaryImage binWpi = DaisyExtensions.makeWPIBinaryImage(bin);
+        contours = DaisyExtensions.findConvexContours(binWpi);
+
+        polygons = new ArrayList<WPIPolygon>();
+        for (WPIContour c : contours)
+        {
+            double ratio = ((double) c.getHeight()) / ((double) c.getWidth());
+            if (ratio < 1.0 && ratio > 0.5 && c.getWidth() > kMinWidth && c.getWidth() < kMaxWidth)
+            {
+                polygons.add(c.approxPolygon(20));
+            }
+        }
+
+        WPIPolygon square = null;
+        int highest = Integer.MAX_VALUE;
+
+        for (WPIPolygon p : polygons)
+        {
+            if (p.isConvex() && p.getNumVertices() == 4)
+            {
+                // We passed the first test...we fit a rectangle to the polygon
+                // Now do some more tests
+
+                WPIPoint[] points = p.getPoints();
+                // We expect to see a top line that is nearly horizontal, and two side lines that are nearly vertical
+                int numNearlyHorizontal = 0;
+                int numNearlyVertical = 0;
+                for( int i = 0; i < 4; i++ )
+                {
+                    double dy = points[i].getY() - points[(i+1) % 4].getY();
+                    double dx = points[i].getX() - points[(i+1) % 4].getX();
+                    double slope = Double.MAX_VALUE;
+                    if( dx != 0 )
+                        slope = Math.abs(dy/dx);
+
+                    if( slope < kNearlyHorizontalSlope )
+                        ++numNearlyHorizontal;
+                    else if( slope > kNearlyVerticalSlope )
+                        ++numNearlyVertical;
+                }
+
+                if(numNearlyHorizontal >= 1 && numNearlyVertical == 2)
+                {
+                    rawImage.drawPolygon(p, WPIColor.BLUE, 2);
+
+                    int pCenterX = (p.getX() + (p.getWidth() / 2));
+                    int pCenterY = (p.getY() + (p.getHeight() / 2));
+
+                    rawImage.drawPoint(new WPIPoint(pCenterX, pCenterY), targetColor, 5);
+                    if (pCenterY < highest) // Because coord system is funny
+                    {
+                        square = p;
+                        highest = pCenterY;
+                    }
+                }
+            }
+            else
+            {
+                rawImage.drawPolygon(p, WPIColor.YELLOW, 1);
+            }
+        }
+
+        if (square != null)
+        {
+            double x = square.getX() + (square.getWidth() / 2);
+            x = (2 * (x / size.width())) - 1;
+            double y = square.getY() + (square.getHeight() / 2);
+            y = -((2 * (y / size.height())) - 1);
+
+            double azimuth = this.boundAngle0to360Degrees(x*kHorizontalFOVDeg/2.0 + heading - kShooterOffsetDeg);
+            double range = (kTopTargetHeightIn-kCameraHeightIn)/Math.tan((y*kVerticalFOVDeg/2.0 + kCameraPitchDeg)*Math.PI/180.0);
+            double rpms = getRPMsForRange(range);
+
+            if (!m_debugMode)
+            {
+
+//                Robot.getTable().beginTransaction();
+                Robot.getTable().putBoolean("found", true);
+                Robot.getTable().putNumber("azimuth", azimuth);
+                Robot.getTable().putNumber("rpms", rpms);
+//                Robot.getTable().endTransaction();
+            } else
+            {
+                System.out.println("Target found");
+                System.out.println("x: " + x);
+                System.out.println("y: " + y);
+                System.out.println("azimuth: " + azimuth);
+                System.out.println("range: " + range);
+                System.out.println("rpms: " + rpms);
+            }
+            rawImage.drawPolygon(square, targetColor, 7);
+        } else
+        {
+
+            if (!m_debugMode)
+            {
+                Robot.getTable().putBoolean("found", false);
+            } else
+            {
+                System.out.println("Target not found");
+            }
+        }
+
+        // Draw a crosshair
+        rawImage.drawLine(linePt1, linePt2, targetColor, 2);
+
+        DaisyExtensions.releaseMemory();
+
+        //System.gc();
+
+        return rawImage;
+    }
+
+    private double boundAngle0to360Degrees(double angle)
+    {
+        // Naive algorithm
+        while(angle >= 360.0)
+        {
+            angle -= 360.0;
+        }
+        while(angle < 0.0)
+        {
+            angle += 360.0;
+        }
+        return angle;
+    }
+
+    public static void main(String[] args)
+    {
+        if (args.length == 0)
+        {
+            System.out.println("Usage: Arguments are paths to image files to test the program on");
+            return;
+        }
+
+        DashboardFrame frame = new DashboardFrame(false); // suggested 2013 adaptation
+        frame.getPrefs();
+
+        // Create the widget
+        DaisyCVWidget widget = new DaisyCVWidget(true);
+        CanvasFrame original = new CanvasFrame("Raw");
+        CanvasFrame result = new CanvasFrame("Result");
+
+        long totalTime = 0;
+        for (int i = 0; i < args.length; i++)
+        {
+            // Load the image
+            WPIColorImage rawImage = null;
+            try
+            {
+                rawImage = new WPIColorImage(ImageIO.read(new File(args[i%args.length])));
+            } catch (IOException e)
+            {
+                System.err.println("Could not find file!");
+                return;
+            }
+
+            //shows the raw image before processing to eliminate the possibility
+            //that both may be the modified image.
+            original.showImage(rawImage.getBufferedImage());
+
+            WPIImage resultImage = null;
+
+            // Process image
+            long startTime, endTime;
+            startTime = System.nanoTime();
+            resultImage = widget.processImage(rawImage);
+            endTime = System.nanoTime();
+
+            // Display results
+            totalTime += (endTime - startTime);
+            double milliseconds = (double) (endTime - startTime) / 1000000.0;
+            System.out.format("Processing took %.2f milliseconds%n", milliseconds);
+            System.out.format("(%.2f frames per second)%n", 1000.0 / milliseconds);
+
+            result.showImage(resultImage.getBufferedImage());
+
+            System.out.println("Waiting for ENTER to continue to next image or exit...");
+            Scanner console = new Scanner(System.in);
+            console.nextLine();
+        }
+
+        if (original.isVisible())
+        {
+            original.setVisible(false);
+            original.dispose();
+        }
+        if (result.isVisible())
+        {
+            result.setVisible(false);
+            result.dispose();
+        }
+
+        double milliseconds = (double) (totalTime) / 1000000.0 / (args.length);
+        System.out.format("AVERAGE:%.2f milliseconds%n", milliseconds);
+        System.out.format("(%.2f frames per second)%n", 1000.0 / milliseconds);
+        System.exit(0);
+    }
+}
diff --git a/DaisyCV/src/edu/wpi/first/wpijavacv/DaisyExtensions.java b/DaisyCV/src/edu/wpi/first/wpijavacv/DaisyExtensions.java
new file mode 100644
index 0000000..41f12a1
--- /dev/null
+++ b/DaisyCV/src/edu/wpi/first/wpijavacv/DaisyExtensions.java
@@ -0,0 +1,89 @@
+package edu.wpi.first.wpijavacv;
+
+import java.util.ArrayList;
+
+import com.googlecode.javacv.cpp.opencv_core;
+import com.googlecode.javacv.cpp.opencv_core.CvMemStorage;
+import com.googlecode.javacv.cpp.opencv_core.CvSeq;
+import com.googlecode.javacv.cpp.opencv_core.IplImage;
+import com.googlecode.javacv.cpp.opencv_imgproc;
+
+/**
+ *
+ * @author jrussell
+ */
+public class DaisyExtensions
+{
+    public static CvSeq getCvSeq(WPIContour contour)
+    {
+        return contour.getCVSeq();
+    }
+
+    public static WPIContour makeWPIContour(CvSeq seq)
+    {
+        return new WPIContour(seq);
+    }
+
+    public static WPIGrayscaleImage makeWPIGrayscaleImage(IplImage arr)
+    {
+        IplImage tempImage = IplImage.create(arr.cvSize(), arr.depth(), 1);
+        opencv_core.cvCopy(arr, tempImage);
+        return new WPIGrayscaleImage(tempImage);
+    }
+
+    public static WPIColorImage makeWPIColorImage(IplImage arr)
+    {
+        IplImage tempImage = IplImage.create(arr.cvSize(), arr.depth(), 1);
+        opencv_core.cvCopy(arr, tempImage);
+        return new WPIColorImage(tempImage);
+    }
+
+    public static WPIBinaryImage makeWPIBinaryImage(IplImage arr)
+    {
+        IplImage tempImage = IplImage.create(arr.cvSize(), arr.depth(), 1);
+        opencv_core.cvCopy(arr, tempImage);
+        return new WPIBinaryImage(tempImage);
+    }
+
+    public static IplImage getIplImage(WPIImage image)
+    {
+        return image.image;
+    }
+
+    private static CvMemStorage storage;
+//    private static ArrayList<CvSeq> thingsToDispose;
+
+    public static void init()
+    {
+        storage = CvMemStorage.create();
+    }
+
+    public static WPIContour[] findConvexContours(WPIBinaryImage image)
+    {
+        image.validateDisposed();
+
+        IplImage tempImage = IplImage.create(image.image.cvSize(), image.image.depth(), 1);
+
+        opencv_core.cvCopy(image.image, tempImage);
+
+        CvSeq contours = new CvSeq();
+        opencv_imgproc.cvFindContours(tempImage, storage, contours, 256, opencv_imgproc.CV_RETR_LIST, opencv_imgproc.CV_CHAIN_APPROX_TC89_KCOS);
+        ArrayList<WPIContour> results = new ArrayList<WPIContour>();
+        while (!WPIDisposable.isNull(contours)) {
+            CvSeq convexContour = opencv_imgproc.cvConvexHull2(contours, storage, opencv_imgproc.CV_CLOCKWISE, 1);
+            WPIContour contour = new WPIContour(opencv_core.cvCloneSeq(convexContour, storage));
+            results.add(contour);
+            contours = contours.h_next();
+        }
+
+        tempImage.release();
+        WPIContour[] array = new WPIContour[results.size()];
+        return results.toArray(array);
+    }
+
+    public static void releaseMemory()
+    {
+        opencv_core.cvClearMemStorage(storage);
+    }
+
+}