blob: 9654e3a96c3aba0f5c69bd2e568fd25143e776c0 [file] [log] [blame]
jerrym6ebe6452013-02-18 03:00:31 +00001package org.frc971;
2
3import java.util.ArrayList;
4
5import com.googlecode.javacv.cpp.opencv_core;
6import com.googlecode.javacv.cpp.opencv_core.CvSize;
7import com.googlecode.javacv.cpp.opencv_core.IplImage;
8import com.googlecode.javacv.cpp.opencv_imgproc;
9import com.googlecode.javacv.cpp.opencv_imgproc.IplConvKernel;
10
11import edu.wpi.first.wpijavacv.DaisyExtensions;
12import edu.wpi.first.wpijavacv.WPIBinaryImage;
13import edu.wpi.first.wpijavacv.WPIColor;
14import edu.wpi.first.wpijavacv.WPIColorImage;
15import edu.wpi.first.wpijavacv.WPIContour;
16import edu.wpi.first.wpijavacv.WPIImage;
17import edu.wpi.first.wpijavacv.WPIPoint;
18import edu.wpi.first.wpijavacv.WPIPolygon;
19
20/**
21 * Vision target recognizer for FRC 2013.
22 *
23 * @author jerry
24 */
25public class Recognizer2013 implements Recognizer {
26
27 // Constants that need to be tuned
28 static final double kRoughlyHorizontalSlope = Math.tan(Math.toRadians(20));
29 static final double kRoughlyVerticalSlope = Math.tan(Math.toRadians(90 - 20));
30 static final int kMinWidth = 20;
31 static final int kMaxWidth = 400;
32 static final int kHoleClosingIterations = 9;
33
34 static final double kShooterOffsetDeg = 0;
35 static final double kHorizontalFOVDeg = 47.0;
36 static final double kVerticalFOVDeg = 480.0 / 640.0 * kHorizontalFOVDeg;
37
38 // Colors for drawing indicators on the image.
39 private static final WPIColor reject1Color = WPIColor.GRAY;
40 private static final WPIColor reject2Color = WPIColor.YELLOW;
41 private static final WPIColor candidateColor = WPIColor.BLUE;
42 private static final WPIColor targetColor = new WPIColor(255, 0, 0);
43
44 // Show intermediate images for parameter tuning.
45 private final DebugCanvas thresholdedCanvas = new DebugCanvas("thresholded");
46 private final DebugCanvas morphedCanvas = new DebugCanvas("morphed");
47
48 // JavaCV data to reuse for each frame.
49 private final DaisyExtensions daisyExtensions = new DaisyExtensions();
50 private final IplConvKernel morphKernel = IplConvKernel.create(3, 3, 1, 1,
51 opencv_imgproc.CV_SHAPE_RECT, null);
52 private CvSize size = null;
53 private WPIContour[] contours;
54 private final ArrayList<WPIPolygon> polygons = new ArrayList<WPIPolygon>();
55 private WPIColorImage rawImage;
56 private IplImage bin;
57 private IplImage hsv;
58 private IplImage hue;
59 private IplImage sat;
60 private IplImage val;
61 private WPIPoint linePt1, linePt2; // crosshair endpoints
62
63 public Recognizer2013() {
64 }
65
66 @Override
67 public WPIImage processImage(WPIColorImage cameraImage) {
68 // (Re)allocate the intermediate images if the input is a different
69 // size than the previous image.
70 if (size == null || size.width() != cameraImage.getWidth()
71 || size.height() != cameraImage.getHeight()) {
72 size = opencv_core.cvSize(cameraImage.getWidth(),
73 cameraImage.getHeight());
74 rawImage = DaisyExtensions.makeWPIColorImage(
75 DaisyExtensions.getIplImage(cameraImage));
76 bin = IplImage.create(size, 8, 1);
77 hsv = IplImage.create(size, 8, 3);
78 hue = IplImage.create(size, 8, 1);
79 sat = IplImage.create(size, 8, 1);
80 val = IplImage.create(size, 8, 1);
81
82 int horizontalOffsetPixels = (int)Math.round(
83 kShooterOffsetDeg * size.width() / kHorizontalFOVDeg);
84 int x = size.width() / 2 + horizontalOffsetPixels;
85 linePt1 = new WPIPoint(x, size.height() - 1);
86 linePt2 = new WPIPoint(x, 0);
87 } else {
88 opencv_core.cvCopy(DaisyExtensions.getIplImage(cameraImage),
89 DaisyExtensions.getIplImage(rawImage));
90 }
91
92 IplImage input = DaisyExtensions.getIplImage(rawImage);
93
94 // Threshold the pixels in HSV color space.
95 // TODO(jerry): Do this in one pass of a pixel-processing loop.
96 opencv_imgproc.cvCvtColor(input, hsv, opencv_imgproc.CV_BGR2HSV);
97 opencv_core.cvSplit(hsv, hue, sat, val, null);
98
99 // NOTE: Since red is at the end of the cyclic color space, you can OR
100 // a threshold and an inverted threshold to match red pixels.
101 // TODO(jerry): Use tunable constants instead of literals.
102 opencv_imgproc.cvThreshold(hue, bin, 60 - 15, 255, opencv_imgproc.CV_THRESH_BINARY);
103 opencv_imgproc.cvThreshold(hue, hue, 60 + 15, 255, opencv_imgproc.CV_THRESH_BINARY_INV);
104 opencv_imgproc.cvThreshold(sat, sat, 200, 255, opencv_imgproc.CV_THRESH_BINARY);
105 opencv_imgproc.cvThreshold(val, val, 55, 255, opencv_imgproc.CV_THRESH_BINARY);
106
107 // Combine the results to obtain a binary image which is mostly the
108 // interesting pixels.
109 opencv_core.cvAnd(hue, bin, bin, null);
110 opencv_core.cvAnd(bin, sat, bin, null);
111 opencv_core.cvAnd(bin, val, bin, null);
112
113 thresholdedCanvas.showImage(bin);
114
115 // Fill in gaps using binary morphology.
116 opencv_imgproc.cvMorphologyEx(bin, bin, null, morphKernel,
117 opencv_imgproc.CV_MOP_CLOSE, kHoleClosingIterations);
118
119 morphedCanvas.showImage(bin);
120
121 // Find contours.
122 WPIBinaryImage binWpi = DaisyExtensions.makeWPIBinaryImage(bin);
123 contours = daisyExtensions.findConvexContours(binWpi);
124
125 // Simplify the contour to polygons and filter by size and aspect ratio.
126 // TODO(jerry): Use tunable constants instead of literals.
127 polygons.clear();
128 for (WPIContour c : contours) {
129 double ratio = ((double) c.getHeight()) / ((double) c.getWidth());
130 if (ratio < 1.0 && ratio > 0.5 && c.getWidth() >= kMinWidth
131 && c.getWidth() <= kMaxWidth) {
132 polygons.add(c.approxPolygon(20));
133 }
134 }
135
136 // Pick the highest target that matches more filter criteria.
137 WPIPolygon bestTarget = null;
138 int highestY = Integer.MAX_VALUE;
139
140 for (WPIPolygon p : polygons) {
141 if (p.isConvex() && p.getNumVertices() == 4) { // quadrilateral
142 WPIPoint[] points = p.getPoints();
143 // We expect the polygon to have a top line that is nearly
144 // horizontal and two side lines that are nearly vertical.
145 int numRoughlyHorizontal = 0;
146 int numRoughlyVertical = 0;
147 for (int i = 0; i < 4; ++i) {
148 double dy = points[i].getY() - points[(i + 1) % 4].getY();
149 double dx = points[i].getX() - points[(i + 1) % 4].getX();
150 double slope = Double.MAX_VALUE;
151 if (dx != 0) {
152 slope = Math.abs(dy / dx);
153 }
154
155 if (slope < kRoughlyHorizontalSlope) {
156 ++numRoughlyHorizontal;
157 } else if (slope > kRoughlyVerticalSlope) {
158 ++numRoughlyVertical;
159 }
160 }
161
162 if (numRoughlyHorizontal >= 1 && numRoughlyVertical == 2) {
163 rawImage.drawPolygon(p, candidateColor, 2);
164
165 int pCenterX = p.getX() + p.getWidth() / 2;
166 int pCenterY = p.getY() + p.getHeight() / 2;
167
168 rawImage.drawPoint(new WPIPoint(pCenterX, pCenterY),
169 candidateColor, 3);
170 if (pCenterY < highestY) {
171 bestTarget = p;
172 highestY = pCenterY;
173 }
174 } else {
175 rawImage.drawPolygon(p, reject2Color, 1);
176 }
177 } else {
178 rawImage.drawPolygon(p, reject1Color, 1);
179 }
180 }
181
182 if (bestTarget != null) {
183 double w = bestTarget.getWidth();
184 double h = bestTarget.getHeight();
185 double x = bestTarget.getX() + w / 2;
186 double y = bestTarget.getY() + h / 2;
187
188 rawImage.drawPolygon(bestTarget, targetColor, 2);
189
190 System.out.println("Best target at (" + x + ", " + y + ") size "
191 + w + " x " + h);
192 } else {
193 System.out.println("No target found");
194 }
195
196 // Draw a crosshair
197 rawImage.drawLine(linePt1, linePt2, targetColor, 1);
198
199 daisyExtensions.releaseMemory();
200 //System.gc();
201
202 return rawImage;
203 }
204
205}