blob: 812c78beb796720ebe0815bdda5c9704fd435e22 [file] [log] [blame]
jerrym6ebe6452013-02-18 03:00:31 +00001package org.frc971;
2
3import java.util.ArrayList;
danielp54e997e2013-02-21 01:54:23 +00004import java.util.logging.Logger;
jerrym6ebe6452013-02-18 03:00:31 +00005
6import com.googlecode.javacv.cpp.opencv_core;
7import com.googlecode.javacv.cpp.opencv_core.CvSize;
8import com.googlecode.javacv.cpp.opencv_core.IplImage;
9import com.googlecode.javacv.cpp.opencv_imgproc;
10import com.googlecode.javacv.cpp.opencv_imgproc.IplConvKernel;
11
12import edu.wpi.first.wpijavacv.DaisyExtensions;
13import edu.wpi.first.wpijavacv.WPIBinaryImage;
14import edu.wpi.first.wpijavacv.WPIColor;
15import edu.wpi.first.wpijavacv.WPIColorImage;
16import edu.wpi.first.wpijavacv.WPIContour;
jerrym6ebe6452013-02-18 03:00:31 +000017import edu.wpi.first.wpijavacv.WPIPoint;
18import edu.wpi.first.wpijavacv.WPIPolygon;
19
20/**
21 * Vision target recognizer for FRC 2013.
22 *
jerrymf0c84552013-02-19 00:51:20 +000023 * @author jrussell
jerrym6ebe6452013-02-18 03:00:31 +000024 * @author jerry
25 */
26public class Recognizer2013 implements Recognizer {
danielp54e997e2013-02-21 01:54:23 +000027
28 private final static Logger LOG = Logger.getLogger(Logger.GLOBAL_LOGGER_NAME);
jerrym6ebe6452013-02-18 03:00:31 +000029
jerrymf0c84552013-02-19 00:51:20 +000030 // --- Tunable recognizer constants.
jerrymcd2c3322013-02-18 08:49:01 +000031 static final double kRoughlyHorizontalSlope = Math.tan(Math.toRadians(30));
32 static final double kRoughlyVerticalSlope = Math.tan(Math.toRadians(90 - 30));
jerrymcd2c3322013-02-18 08:49:01 +000033 static final int kHoleClosingIterations = 2;
34 static final double kPolygonPercentFit = 12;
jerrymaa7a63b2013-02-18 06:31:22 +000035 static final int kMinWidthAt320 = 35; // for high goal and middle goals
36
jerrymf0c84552013-02-19 00:51:20 +000037 // --- Field dimensions.
38 // The target aspect ratios are for the midlines of the vision target tape.
39 static final double kGoalWidthIn = 54; // of the high and middle targets
40 static final double kTargetWidthIn = kGoalWidthIn + 4;
41 static final double kHighGoalAspect = (21 + 4) / kTargetWidthIn;
42 static final double kMiddleGoalAspect = (24 + 4) / kTargetWidthIn;
jerrymaa7a63b2013-02-18 06:31:22 +000043 static final double kMinAspect = kHighGoalAspect * 0.6;
44 static final double kMaxAspect = kMiddleGoalAspect * 1.4;
jerrymf0c84552013-02-19 00:51:20 +000045 static final double kTopTargetHeightIn = 104.125 + 21.0/2; // center of target
jerrym6ebe6452013-02-18 03:00:31 +000046
jerrymf0c84552013-02-19 00:51:20 +000047 // --- Robot and camera dimensions.
48 static final double kShooterOffsetDeg = 0; // azimuth offset from camera to shooter
49 static final double kHorizontalFOVDeg = 44.0; // Logitech C210 camera
jerrym6ebe6452013-02-18 03:00:31 +000050 static final double kVerticalFOVDeg = 480.0 / 640.0 * kHorizontalFOVDeg;
jerrymf0c84552013-02-19 00:51:20 +000051 static final double kCameraHeightIn = 24.0; // TODO
52 static final double kCameraPitchDeg = 21.0; // TODO
53 static final double kTanHFOV2 = Math.tan(Math.toRadians(kHorizontalFOVDeg / 2));
54 static final double kTanVFOV2 = Math.tan(Math.toRadians(kVerticalFOVDeg / 2));
jerrym6ebe6452013-02-18 03:00:31 +000055
jerrymaa7a63b2013-02-18 06:31:22 +000056 // --- Colors for drawing indicators on the image.
jerrym6ebe6452013-02-18 03:00:31 +000057 private static final WPIColor reject1Color = WPIColor.GRAY;
58 private static final WPIColor reject2Color = WPIColor.YELLOW;
59 private static final WPIColor candidateColor = WPIColor.BLUE;
jerrymf0c84552013-02-19 00:51:20 +000060 private static final WPIColor targetColor = WPIColor.RED;
jerrym6ebe6452013-02-18 03:00:31 +000061
jerrymf0c84552013-02-19 00:51:20 +000062 // --- Color thresholds, initialized in the constructor.
jerrymcd2469c2013-02-18 20:15:28 +000063 private int min1Hue, max1Hue, min1Sat, min1Val;
64
jerrym6ebe6452013-02-18 03:00:31 +000065 // Show intermediate images for parameter tuning.
66 private final DebugCanvas thresholdedCanvas = new DebugCanvas("thresholded");
67 private final DebugCanvas morphedCanvas = new DebugCanvas("morphed");
68
jerrymaa7a63b2013-02-18 06:31:22 +000069 // Data to reuse for each frame.
jerrym6ebe6452013-02-18 03:00:31 +000070 private final DaisyExtensions daisyExtensions = new DaisyExtensions();
71 private final IplConvKernel morphKernel = IplConvKernel.create(3, 3, 1, 1,
jerrymdda60132013-02-18 09:25:03 +000072 opencv_imgproc.CV_SHAPE_RECT, null);
jerrym6ebe6452013-02-18 03:00:31 +000073 private final ArrayList<WPIPolygon> polygons = new ArrayList<WPIPolygon>();
jerrymaa7a63b2013-02-18 06:31:22 +000074
75 // Frame-size-dependent data to reuse for each frame.
76 private CvSize size = null;
jerrym6ebe6452013-02-18 03:00:31 +000077 private WPIColorImage rawImage;
78 private IplImage bin;
79 private IplImage hsv;
80 private IplImage hue;
81 private IplImage sat;
82 private IplImage val;
jerrymaa7a63b2013-02-18 06:31:22 +000083 private int minWidth;
jerrym6ebe6452013-02-18 03:00:31 +000084 private WPIPoint linePt1, linePt2; // crosshair endpoints
85
86 public Recognizer2013() {
jerrymdda60132013-02-18 09:25:03 +000087 setHSVRange(70, 106, 137, 27);
jerrym6ebe6452013-02-18 03:00:31 +000088 }
89
90 @Override
jerrymcd2c3322013-02-18 08:49:01 +000091 public void setHSVRange(int minHue, int maxHue, int minSat, int minVal) {
jerrymdda60132013-02-18 09:25:03 +000092 min1Hue = minHue - 1; // - 1 because cvThreshold() does > instead of >=
93 max1Hue = maxHue + 1;
94 min1Sat = minSat - 1;
95 min1Val = minVal - 1;
jerrymcd2c3322013-02-18 08:49:01 +000096 }
97 @Override
98 public int getHueMin() { return min1Hue + 1; }
99 @Override
100 public int getHueMax() { return max1Hue - 1; }
101 @Override
jerrymcd2469c2013-02-18 20:15:28 +0000102 public int getSatMin() { return min1Sat + 1; }
jerrymcd2c3322013-02-18 08:49:01 +0000103 @Override
jerrymcd2469c2013-02-18 20:15:28 +0000104 public int getValMin() { return min1Val + 1; }
jerrymcd2c3322013-02-18 08:49:01 +0000105
106 @Override
jerrymf96c32c2013-02-18 19:30:45 +0000107 public void showIntermediateStages(boolean enable) {
108 thresholdedCanvas.show = enable;
109 morphedCanvas.show = enable;
110 }
111
112 @Override
danielp3c598e52013-02-24 06:12:54 +0000113 public Target processImage(WPIColorImage cameraImage) {
jerrymdda60132013-02-18 09:25:03 +0000114 // (Re)allocate the intermediate images if the input is a different
115 // size than the previous image.
jerrym6ebe6452013-02-18 03:00:31 +0000116 if (size == null || size.width() != cameraImage.getWidth()
jerrymdda60132013-02-18 09:25:03 +0000117 || size.height() != cameraImage.getHeight()) {
jerrym6ebe6452013-02-18 03:00:31 +0000118 size = opencv_core.cvSize(cameraImage.getWidth(),
jerrymdda60132013-02-18 09:25:03 +0000119 cameraImage.getHeight());
jerrym6ebe6452013-02-18 03:00:31 +0000120 rawImage = DaisyExtensions.makeWPIColorImage(
jerrymdda60132013-02-18 09:25:03 +0000121 DaisyExtensions.getIplImage(cameraImage));
jerrym6ebe6452013-02-18 03:00:31 +0000122 bin = IplImage.create(size, 8, 1);
123 hsv = IplImage.create(size, 8, 3);
124 hue = IplImage.create(size, 8, 1);
125 sat = IplImage.create(size, 8, 1);
126 val = IplImage.create(size, 8, 1);
jerrymaa7a63b2013-02-18 06:31:22 +0000127 minWidth = (kMinWidthAt320 * cameraImage.getWidth() + 319) / 320;
jerrym6ebe6452013-02-18 03:00:31 +0000128
129 int horizontalOffsetPixels = (int)Math.round(
jerrymdda60132013-02-18 09:25:03 +0000130 kShooterOffsetDeg * size.width() / kHorizontalFOVDeg);
jerrym6ebe6452013-02-18 03:00:31 +0000131 int x = size.width() / 2 + horizontalOffsetPixels;
132 linePt1 = new WPIPoint(x, size.height() - 1);
133 linePt2 = new WPIPoint(x, 0);
134 } else {
jerrymaa7a63b2013-02-18 06:31:22 +0000135 // Copy the camera image so it's safe to draw on.
jerrym6ebe6452013-02-18 03:00:31 +0000136 opencv_core.cvCopy(DaisyExtensions.getIplImage(cameraImage),
jerrymdda60132013-02-18 09:25:03 +0000137 DaisyExtensions.getIplImage(rawImage));
jerrym6ebe6452013-02-18 03:00:31 +0000138 }
139
140 IplImage input = DaisyExtensions.getIplImage(rawImage);
141
142 // Threshold the pixels in HSV color space.
143 // TODO(jerry): Do this in one pass of a pixel-processing loop.
jerrymaa7a63b2013-02-18 06:31:22 +0000144 opencv_imgproc.cvCvtColor(input, hsv, opencv_imgproc.CV_BGR2HSV_FULL);
jerrym6ebe6452013-02-18 03:00:31 +0000145 opencv_core.cvSplit(hsv, hue, sat, val, null);
146
147 // NOTE: Since red is at the end of the cyclic color space, you can OR
148 // a threshold and an inverted threshold to match red pixels.
jerrymcd2c3322013-02-18 08:49:01 +0000149 opencv_imgproc.cvThreshold(hue, bin, min1Hue, 255, opencv_imgproc.CV_THRESH_BINARY);
150 opencv_imgproc.cvThreshold(hue, hue, max1Hue, 255, opencv_imgproc.CV_THRESH_BINARY_INV);
151 opencv_imgproc.cvThreshold(sat, sat, min1Sat, 255, opencv_imgproc.CV_THRESH_BINARY);
152 opencv_imgproc.cvThreshold(val, val, min1Val, 255, opencv_imgproc.CV_THRESH_BINARY);
jerrym6ebe6452013-02-18 03:00:31 +0000153
154 // Combine the results to obtain a binary image which is mostly the
155 // interesting pixels.
156 opencv_core.cvAnd(hue, bin, bin, null);
157 opencv_core.cvAnd(bin, sat, bin, null);
158 opencv_core.cvAnd(bin, val, bin, null);
159
160 thresholdedCanvas.showImage(bin);
161
162 // Fill in gaps using binary morphology.
163 opencv_imgproc.cvMorphologyEx(bin, bin, null, morphKernel,
jerrymdda60132013-02-18 09:25:03 +0000164 opencv_imgproc.CV_MOP_CLOSE, kHoleClosingIterations);
jerrym6ebe6452013-02-18 03:00:31 +0000165
166 morphedCanvas.showImage(bin);
167
168 // Find contours.
jerrymf0c84552013-02-19 00:51:20 +0000169 //
170 // NOTE: If we distinguished between the inner and outer boundaries of
171 // the vision target rectangles, we could apply a more accurate width
172 // filter and more accurately compute the target range.
jerrym6ebe6452013-02-18 03:00:31 +0000173 WPIBinaryImage binWpi = DaisyExtensions.makeWPIBinaryImage(bin);
jerrymaa7a63b2013-02-18 06:31:22 +0000174 WPIContour[] contours = daisyExtensions.findConvexContours(binWpi);
jerrym6ebe6452013-02-18 03:00:31 +0000175
jerrymaa7a63b2013-02-18 06:31:22 +0000176 // Simplify the contours to polygons and filter by size and aspect ratio.
177 //
178 // TODO(jerry): Also look for the two vertical stripe vision targets.
179 // They'll greatly increase the precision of measuring the distance. If
180 // both stripes are visible, they'll increase the accuracy for
181 // identifying the high goal.
jerrym6ebe6452013-02-18 03:00:31 +0000182 polygons.clear();
183 for (WPIContour c : contours) {
jerrymaa7a63b2013-02-18 06:31:22 +0000184 if (c.getWidth() >= minWidth) {
jerrymdda60132013-02-18 09:25:03 +0000185 double ratio = ((double) c.getHeight()) / c.getWidth();
186 if (ratio >= kMinAspect && ratio <= kMaxAspect) {
187 polygons.add(c.approxPolygon(kPolygonPercentFit));
188 // System.out.println(" Accepted aspect ratio " + ratio);
189 } else {
190 // System.out.println(" Rejected aspect ratio " + ratio);
191 }
jerrym6ebe6452013-02-18 03:00:31 +0000192 }
193 }
194
jerrymaa7a63b2013-02-18 06:31:22 +0000195 // Pick the target with the highest center-point that matches yet more
196 // filter criteria.
jerrym6ebe6452013-02-18 03:00:31 +0000197 WPIPolygon bestTarget = null;
198 int highestY = Integer.MAX_VALUE;
199
200 for (WPIPolygon p : polygons) {
jerrymaa7a63b2013-02-18 06:31:22 +0000201 // TODO(jerry): Replace boolean filters with a scoring function?
jerrym6ebe6452013-02-18 03:00:31 +0000202 if (p.isConvex() && p.getNumVertices() == 4) { // quadrilateral
203 WPIPoint[] points = p.getPoints();
jerrymaa7a63b2013-02-18 06:31:22 +0000204 // Filter for polygons with 2 ~horizontal and 2 ~vertical sides.
jerrym6ebe6452013-02-18 03:00:31 +0000205 int numRoughlyHorizontal = 0;
206 int numRoughlyVertical = 0;
207 for (int i = 0; i < 4; ++i) {
208 double dy = points[i].getY() - points[(i + 1) % 4].getY();
209 double dx = points[i].getX() - points[(i + 1) % 4].getX();
210 double slope = Double.MAX_VALUE;
211 if (dx != 0) {
212 slope = Math.abs(dy / dx);
213 }
214
215 if (slope < kRoughlyHorizontalSlope) {
216 ++numRoughlyHorizontal;
217 } else if (slope > kRoughlyVerticalSlope) {
218 ++numRoughlyVertical;
219 }
220 }
221
jerrymaa7a63b2013-02-18 06:31:22 +0000222 if (numRoughlyHorizontal >= 2 && numRoughlyVertical == 2) {
jerrym6ebe6452013-02-18 03:00:31 +0000223 int pCenterX = p.getX() + p.getWidth() / 2;
224 int pCenterY = p.getY() + p.getHeight() / 2;
225
jerrymf0c84552013-02-19 00:51:20 +0000226 rawImage.drawPolygon(p, candidateColor, 2);
jerrym6ebe6452013-02-18 03:00:31 +0000227 rawImage.drawPoint(new WPIPoint(pCenterX, pCenterY),
jerrymdda60132013-02-18 09:25:03 +0000228 targetColor, 2);
jerrym6ebe6452013-02-18 03:00:31 +0000229 if (pCenterY < highestY) {
230 bestTarget = p;
231 highestY = pCenterY;
232 }
233 } else {
234 rawImage.drawPolygon(p, reject2Color, 1);
235 }
236 } else {
237 rawImage.drawPolygon(p, reject1Color, 1);
238 }
239 }
240
danielp3c598e52013-02-24 06:12:54 +0000241 Target found = null;
jerrym6ebe6452013-02-18 03:00:31 +0000242 if (bestTarget != null) {
jerrym6ebe6452013-02-18 03:00:31 +0000243 rawImage.drawPolygon(bestTarget, targetColor, 2);
danielp3c598e52013-02-24 06:12:54 +0000244 found = measureTarget(bestTarget);
jerrym6ebe6452013-02-18 03:00:31 +0000245 } else {
danielp54e997e2013-02-21 01:54:23 +0000246 LOG.fine("No target found");
jerrym6ebe6452013-02-18 03:00:31 +0000247 }
248
249 // Draw a crosshair
250 rawImage.drawLine(linePt1, linePt2, targetColor, 1);
danielp3c598e52013-02-24 06:12:54 +0000251
252 if (found == null) {
253 found = new Target();
254 }
255 found.editedPicture = rawImage;
jerrym6ebe6452013-02-18 03:00:31 +0000256
257 daisyExtensions.releaseMemory();
258 //System.gc();
danielp3c598e52013-02-24 06:12:54 +0000259
260 return found;
jerrym6ebe6452013-02-18 03:00:31 +0000261 }
262
jerrymf0c84552013-02-19 00:51:20 +0000263 /**
264 * Uses the camera, field, and robot dimensions to compute targeting info.
265 */
danielp3c598e52013-02-24 06:12:54 +0000266 private Target measureTarget(WPIPolygon target) {
jerrymf0c84552013-02-19 00:51:20 +0000267 double w = target.getWidth();
268 double h = target.getHeight();
269 double x = target.getX() + w / 2; // target center in view coords
270 double y = target.getY() + h / 2;
271
272 double vw = size.width();
273 double vh = size.height();
274 double xc = x - vw / 2; // target center pos'n ±from view center
275 double yc = vh / 2 - y; // ... in world coords on the viewing plane
276
277 // Target angles relative to the camera.
278 double azimuthCam = Math.atan2(xc * 2 * kTanHFOV2, vw);
279 double elevationCam = Math.atan2(yc * 2 * kTanVFOV2, vh);
280 double rangeIn = kTargetWidthIn * vw / (w * 2 * kTanHFOV2);
281
danielp3c598e52013-02-24 06:12:54 +0000282 //Put results in target
283 Target data = new Target();
284 data.azimuth = (Math.toDegrees(azimuthCam) - kShooterOffsetDeg);
285 data.elevation = (Math.toDegrees(elevationCam));
286 data.range = (rangeIn / 12);
287
danielp54e997e2013-02-21 01:54:23 +0000288 LOG.fine("Best target at (" + x + ", " + y + ") " + w +" x " + h
289 + ", shot azimuth=" + (Math.toDegrees(azimuthCam) - kShooterOffsetDeg) +
290 " elevation=" + (Math.toDegrees(elevationCam) + kCameraPitchDeg) +
291 " range=" + (rangeIn / 12));
danielp3c598e52013-02-24 06:12:54 +0000292
293 return data;
jerrymf0c84552013-02-19 00:51:20 +0000294 }
295
jerrym6ebe6452013-02-18 03:00:31 +0000296}