-Changed application to display image source at the top of the window.
-Removed the extra windows to show intermediate stages when not in debug mode. 
Interestingly, this improved performance significantly.
-Modified slider listener so that it (hopefully) doesn't cause any more segfaults.
-Hid all the calibration controls away in a separate calibration window.
They are accessed by a button on the main display. 
I also added labels to each of the sliders.
-Application now takes the IP address of the atom as a command-line argument.
-Code now actually uses result sender, which I had forgot to do last time.
-I made a small modification to Brian's code which reduced the application's 
average consumption of RAM from two gigabytes to eight hundred megabytes.


git-svn-id: https://robotics.mvla.net/svn/frc971/2013/trunk/src@4151 f308d9b7-e957-4cde-b6ac-9a88185e7312
diff --git a/971CV/src/org/frc971/Recognizer2013.java b/971CV/src/org/frc971/Recognizer2013.java
index 7fbf28e..812c78b 100644
--- a/971CV/src/org/frc971/Recognizer2013.java
+++ b/971CV/src/org/frc971/Recognizer2013.java
@@ -14,7 +14,6 @@
 import edu.wpi.first.wpijavacv.WPIColor;

 import edu.wpi.first.wpijavacv.WPIColorImage;

 import edu.wpi.first.wpijavacv.WPIContour;

-import edu.wpi.first.wpijavacv.WPIImage;

 import edu.wpi.first.wpijavacv.WPIPoint;

 import edu.wpi.first.wpijavacv.WPIPolygon;

 

@@ -111,7 +110,7 @@
     }

 

     @Override

-    public WPIImage processImage(WPIColorImage cameraImage) {

+    public Target processImage(WPIColorImage cameraImage) {

         // (Re)allocate the intermediate images if the input is a different

         // size than the previous image.

         if (size == null || size.width() != cameraImage.getWidth()

@@ -239,26 +238,32 @@
             }

         }

 

+        Target found = null;

         if (bestTarget != null) {

             rawImage.drawPolygon(bestTarget, targetColor, 2);

-            measureTarget(bestTarget);

+            found = measureTarget(bestTarget);

         } else {

             LOG.fine("No target found");

         }

 

         // Draw a crosshair

         rawImage.drawLine(linePt1, linePt2, targetColor, 1);

+        

+        if (found == null) {

+        	found = new Target();

+        }

+        found.editedPicture = rawImage;

 

         daisyExtensions.releaseMemory();

         //System.gc();

-

-        return rawImage;

+        

+        return found;

     }

 

     /**

      * Uses the camera, field, and robot dimensions to compute targeting info.

      */

-    private void measureTarget(WPIPolygon target) {

+    private Target measureTarget(WPIPolygon target) {

         double w = target.getWidth();

         double h = target.getHeight();

         double x = target.getX() + w / 2; // target center in view coords

@@ -274,10 +279,18 @@
         double elevationCam = Math.atan2(yc * 2 * kTanVFOV2, vh);

         double rangeIn = kTargetWidthIn * vw / (w * 2 * kTanHFOV2);

 

+        //Put results in target

+        Target data = new Target();

+        data.azimuth = (Math.toDegrees(azimuthCam) - kShooterOffsetDeg);

+        data.elevation = (Math.toDegrees(elevationCam));

+        data.range = (rangeIn / 12);

+        

         LOG.fine("Best target at (" + x + ", " + y + ") " + w +" x " + h

                 + ", shot azimuth=" + (Math.toDegrees(azimuthCam) - kShooterOffsetDeg) + 

                 " elevation=" + (Math.toDegrees(elevationCam) + kCameraPitchDeg) + 

                 " range=" + (rangeIn / 12));

+        

+        return data;

     }

 

 }