blob: aafa6abdaa371570793a1aa6f2e7f9d34cdb427e [file] [log] [blame]
danielpb913fa72013-03-03 06:23:20 +00001package org.spartanrobotics;
2
3import java.util.ArrayList;
4import java.util.logging.Logger;
5
6import com.googlecode.javacv.cpp.opencv_core;
7import com.googlecode.javacv.cpp.opencv_core.CvSize;
8import com.googlecode.javacv.cpp.opencv_core.IplImage;
9import com.googlecode.javacv.cpp.opencv_imgproc;
10import com.googlecode.javacv.cpp.opencv_imgproc.IplConvKernel;
11
12import edu.wpi.first.wpijavacv.DaisyExtensions;
13import edu.wpi.first.wpijavacv.WPIBinaryImage;
14import edu.wpi.first.wpijavacv.WPIColor;
15import edu.wpi.first.wpijavacv.WPIColorImage;
16import edu.wpi.first.wpijavacv.WPIContour;
17import edu.wpi.first.wpijavacv.WPIPoint;
18import edu.wpi.first.wpijavacv.WPIPolygon;
19
20/**
21 * Vision target recognizer for FRC 2013.
22 *
23 * @author jrussell
24 * @author jerry
25 */
26public class Recognizer2013 implements Recognizer {
27
28 private final static Logger LOG = Logger.getLogger(
29 Recognizer2013.class.getName());
30
31 // --- Tunable recognizer constants.
32 static final double kRoughlyHorizontalSlope = Math.tan(Math.toRadians(30));
33 static final double kRoughlyVerticalSlope = Math.tan(Math.toRadians(90 - 30));
34 static final int kHoleClosingIterations = 2;
35 static final double kPolygonPercentFit = 12;
36 static final int kMinWidthAt320 = 35; // for high goal and middle goals
37
38 // --- Field dimensions.
39 // The target aspect ratios are for the midlines of the vision target tape.
40 static final double kGoalWidthIn = 54; // of the high and middle targets
41 static final double kTargetWidthIn = kGoalWidthIn + 4;
42 static final double kHighGoalAspect = (21 + 4) / kTargetWidthIn;
43 static final double kMiddleGoalAspect = (24 + 4) / kTargetWidthIn;
44 static final double kMinAspect = kHighGoalAspect * 0.6;
45 static final double kMaxAspect = kMiddleGoalAspect * 1.4;
46 static final double kTopTargetHeightIn = 104.125 + 21.0/2; // center of target
47
48 // --- Robot and camera dimensions.
49 static final double kShooterOffsetDeg = 0; // azimuth offset from camera to shooter
50 static final double kHorizontalFOVDeg = 44.0; // Logitech C210 camera
51 static final double kVerticalFOVDeg = 480.0 / 640.0 * kHorizontalFOVDeg;
52 static final double kCameraHeightIn = 24.0; // TODO
53 static final double kCameraPitchDeg = 21.0; // TODO
54 static final double kTanHFOV2 = Math.tan(Math.toRadians(kHorizontalFOVDeg / 2));
55 static final double kTanVFOV2 = Math.tan(Math.toRadians(kVerticalFOVDeg / 2));
56
57 // --- Colors for drawing indicators on the image.
58 private static final WPIColor reject1Color = WPIColor.GRAY;
59 private static final WPIColor reject2Color = WPIColor.YELLOW;
60 private static final WPIColor candidateColor = WPIColor.BLUE;
61 private static final WPIColor targetColor = WPIColor.RED;
62
63 // --- Color thresholds, initialized in the constructor.
64 private int min1Hue, max1Hue, min1Sat, min1Val;
65
66 // Show intermediate images for parameter tuning.
67 private final DebugCanvas thresholdedCanvas = new DebugCanvas("thresholded");
68 private final DebugCanvas morphedCanvas = new DebugCanvas("morphed");
69
70 // Data to reuse for each frame.
71 private final DaisyExtensions daisyExtensions = new DaisyExtensions();
72 private final IplConvKernel morphKernel = IplConvKernel.create(3, 3, 1, 1,
73 opencv_imgproc.CV_SHAPE_RECT, null);
74 private final ArrayList<WPIPolygon> polygons = new ArrayList<WPIPolygon>();
75
76 // Frame-size-dependent data to reuse for each frame.
77 private CvSize size = null;
78 private WPIColorImage rawImage;
79 private IplImage bin;
80 private IplImage hsv;
81 private IplImage hue;
82 private IplImage sat;
83 private IplImage val;
84 private int minWidth;
85 private WPIPoint linePt1, linePt2; // crosshair endpoints
86
87 public Recognizer2013() {
88 setHSVRange(70, 106, 137, 27);
89 }
90
91 @Override
92 public void setHSVRange(int minHue, int maxHue, int minSat, int minVal) {
93 min1Hue = minHue - 1; // - 1 because cvThreshold() does > instead of >=
94 max1Hue = maxHue + 1;
95 min1Sat = minSat - 1;
96 min1Val = minVal - 1;
97 }
98 @Override
99 public int getHueMin() { return min1Hue + 1; }
100 @Override
101 public int getHueMax() { return max1Hue - 1; }
102 @Override
103 public int getSatMin() { return min1Sat + 1; }
104 @Override
105 public int getValMin() { return min1Val + 1; }
106
107 @Override
108 public void showIntermediateStages(boolean enable) {
109 thresholdedCanvas.show = enable;
110 morphedCanvas.show = enable;
111 }
112
113 @Override
114 public Target processImage(WPIColorImage cameraImage) {
115 // (Re)allocate the intermediate images if the input is a different
116 // size than the previous image.
117 if (size == null || size.width() != cameraImage.getWidth()
118 || size.height() != cameraImage.getHeight()) {
119 size = opencv_core.cvSize(cameraImage.getWidth(),
120 cameraImage.getHeight());
121 rawImage = DaisyExtensions.makeWPIColorImage(
122 DaisyExtensions.getIplImage(cameraImage));
123 bin = IplImage.create(size, 8, 1);
124 hsv = IplImage.create(size, 8, 3);
125 hue = IplImage.create(size, 8, 1);
126 sat = IplImage.create(size, 8, 1);
127 val = IplImage.create(size, 8, 1);
128 minWidth = (kMinWidthAt320 * cameraImage.getWidth() + 319) / 320;
129
130 int horizontalOffsetPixels = (int)Math.round(
131 kShooterOffsetDeg * size.width() / kHorizontalFOVDeg);
132 int x = size.width() / 2 + horizontalOffsetPixels;
133 linePt1 = new WPIPoint(x, size.height() - 1);
134 linePt2 = new WPIPoint(x, 0);
135 } else {
136 // Copy the camera image so it's safe to draw on.
137 opencv_core.cvCopy(DaisyExtensions.getIplImage(cameraImage),
138 DaisyExtensions.getIplImage(rawImage));
139 }
140
141 IplImage input = DaisyExtensions.getIplImage(rawImage);
142
143 // Threshold the pixels in HSV color space.
144 // TODO(jerry): Do this in one pass of a pixel-processing loop.
145 opencv_imgproc.cvCvtColor(input, hsv, opencv_imgproc.CV_BGR2HSV_FULL);
146 opencv_core.cvSplit(hsv, hue, sat, val, null);
147
148 // NOTE: Since red is at the end of the cyclic color space, you can OR
149 // a threshold and an inverted threshold to match red pixels.
150 opencv_imgproc.cvThreshold(hue, bin, min1Hue, 255, opencv_imgproc.CV_THRESH_BINARY);
151 opencv_imgproc.cvThreshold(hue, hue, max1Hue, 255, opencv_imgproc.CV_THRESH_BINARY_INV);
152 opencv_imgproc.cvThreshold(sat, sat, min1Sat, 255, opencv_imgproc.CV_THRESH_BINARY);
153 opencv_imgproc.cvThreshold(val, val, min1Val, 255, opencv_imgproc.CV_THRESH_BINARY);
154
155 // Combine the results to obtain a binary image which is mostly the
156 // interesting pixels.
157 opencv_core.cvAnd(hue, bin, bin, null);
158 opencv_core.cvAnd(bin, sat, bin, null);
159 opencv_core.cvAnd(bin, val, bin, null);
160
161 thresholdedCanvas.showImage(bin);
162
163 // Fill in gaps using binary morphology.
164 opencv_imgproc.cvMorphologyEx(bin, bin, null, morphKernel,
165 opencv_imgproc.CV_MOP_CLOSE, kHoleClosingIterations);
166
167 morphedCanvas.showImage(bin);
168
169 // Find contours.
170 //
171 // NOTE: If we distinguished between the inner and outer boundaries of
172 // the vision target rectangles, we could apply a more accurate width
173 // filter and more accurately compute the target range.
174 WPIBinaryImage binWpi = DaisyExtensions.makeWPIBinaryImage(bin);
175 WPIContour[] contours = daisyExtensions.findConvexContours(binWpi);
176
177 // Simplify the contours to polygons and filter by size and aspect ratio.
178 //
179 // TODO(jerry): Also look for the two vertical stripe vision targets.
180 // They'll greatly increase the precision of measuring the distance. If
181 // both stripes are visible, they'll increase the accuracy for
182 // identifying the high goal.
183 polygons.clear();
184 for (WPIContour c : contours) {
185 if (c.getWidth() >= minWidth) {
186 double ratio = ((double) c.getHeight()) / c.getWidth();
187 if (ratio >= kMinAspect && ratio <= kMaxAspect) {
188 polygons.add(c.approxPolygon(kPolygonPercentFit));
189 }
190 }
191 }
192
193 // Pick the target with the highest center-point that matches yet more
194 // filter criteria.
195 WPIPolygon bestTarget = null;
196 int highestY = Integer.MAX_VALUE;
197
198 for (WPIPolygon p : polygons) {
199 // TODO(jerry): Replace boolean filters with a scoring function?
200 if (p.isConvex() && p.getNumVertices() == 4) { // quadrilateral
201 WPIPoint[] points = p.getPoints();
202 // Filter for polygons with 2 ~horizontal and 2 ~vertical sides.
203 int numRoughlyHorizontal = 0;
204 int numRoughlyVertical = 0;
205 for (int i = 0; i < 4; ++i) {
206 double dy = points[i].getY() - points[(i + 1) % 4].getY();
207 double dx = points[i].getX() - points[(i + 1) % 4].getX();
208 double slope = Double.MAX_VALUE;
209 if (dx != 0) {
210 slope = Math.abs(dy / dx);
211 }
212
213 if (slope < kRoughlyHorizontalSlope) {
214 ++numRoughlyHorizontal;
215 } else if (slope > kRoughlyVerticalSlope) {
216 ++numRoughlyVertical;
217 }
218 }
219
220 if (numRoughlyHorizontal >= 2 && numRoughlyVertical == 2) {
221 int pCenterX = p.getX() + p.getWidth() / 2;
222 int pCenterY = p.getY() + p.getHeight() / 2;
223
224 rawImage.drawPolygon(p, candidateColor, 2);
225 rawImage.drawPoint(new WPIPoint(pCenterX, pCenterY),
226 targetColor, 2);
227 if (pCenterY < highestY) {
228 bestTarget = p;
229 highestY = pCenterY;
230 }
231 } else {
232 rawImage.drawPolygon(p, reject2Color, 1);
233 }
234 } else {
235 rawImage.drawPolygon(p, reject1Color, 1);
236 }
237 }
238
239 Target found = null;
240 if (bestTarget != null) {
241 rawImage.drawPolygon(bestTarget, targetColor, 2);
242 found = measureTarget(bestTarget);
243 } else {
244 LOG.fine("No target found");
245 }
246
247 // Draw a crosshair
248 rawImage.drawLine(linePt1, linePt2, targetColor, 1);
249
250 if (found == null) {
251 found = new Target();
252 }
253 found.editedPicture = rawImage;
254
255 daisyExtensions.releaseMemory();
256 //System.gc();
257
258 return found;
259 }
260
261 /**
262 * Uses the camera, field, and robot dimensions to compute targeting info.
263 */
264 private Target measureTarget(WPIPolygon target) {
265 double w = target.getWidth();
266 double h = target.getHeight();
267 double x = target.getX() + w / 2; // target center in view coords
268 double y = target.getY() + h / 2;
269
270 double vw = size.width();
271 double vh = size.height();
272 double xc = x - vw / 2; // target center pos'n ±from view center
273 double yc = vh / 2 - y; // ... in world coords on the viewing plane
274
275 // Target angles relative to the camera.
276 double azimuthCam = Math.atan2(xc * 2 * kTanHFOV2, vw);
277 double elevationCam = Math.atan2(yc * 2 * kTanVFOV2, vh);
278 double rangeIn = kTargetWidthIn * vw / (w * 2 * kTanHFOV2);
279
280 //Put results in target
281 Target data = new Target();
282 data.azimuth = (Math.toDegrees(azimuthCam) - kShooterOffsetDeg);
283 data.elevation = (Math.toDegrees(elevationCam));
284 data.range = (rangeIn / 12);
285
286 LOG.info(String.format("Best target at (%.2f, %.2f) %.2fx%.2f," +
287 " shot azimuth=%.2f," +
288 " elevation=%.2f," +
289 " range=%.2f",
290 x, y, w, h,
291 (Math.toDegrees(azimuthCam) - kShooterOffsetDeg),
292 (Math.toDegrees(elevationCam) + kCameraPitchDeg),
293 (rangeIn / 12)));
294
295 return data;
296 }
297
298}