| 1 | package org.usfirst.frc.team3501.robot.subsystems; |
| 2 | |
| 3 | import java.util.ArrayList; |
| 4 | import java.util.Iterator; |
| 5 | |
| 6 | import edu.wpi.first.wpilibj.networktables.NetworkTable; |
| 7 | |
| 8 | /** |
| 9 | * |
| 10 | */ |
| 11 | |
| 12 | /** |
| 13 | * |
| 14 | * @author Elijah Kaufman |
| 15 | * @version 1.0 |
| 16 | * @description Uses opencv and network table 3.0 to detect the vision targets |
| 17 | * |
| 18 | */ |
| 19 | public class TowerTracker { |
| 20 | |
| 21 | /** |
| 22 | * static method to load opencv and networkTables |
| 23 | */ |
| 24 | static { |
| 25 | System |
| 26 | .setProperty( |
| 27 | "java.library.path", |
| 28 | System.getProperty("java.library.path") |
| 29 | + ":/home/cindy/Robotics/Workspace/opencv-3.1.0/cmake/lib"); // path |
| 30 | // to |
| 31 | // opencv_java310 |
| 32 | // System.out.println(System.getProperty("java.library.path")); |
| 33 | System.loadLibrary(Core.NATIVE_LIBRARY_NAME); |
| 34 | NetworkTable.setClientMode(); |
| 35 | NetworkTable.setIPAddress("roborio-3502.local"); |
| 36 | } |
| 37 | // constants for the color rbg values |
| 38 | public static final Scalar |
| 39 | RED = new Scalar(0, 0, 255), |
| 40 | BLUE = new Scalar(255, 0, 0), |
| 41 | GREEN = new Scalar(0, 255, 0), |
| 42 | BLACK = new Scalar(0, 0, 0), |
| 43 | YELLOW = new Scalar(0, 255, 255), |
| 44 | // these are the threshold values in order |
| 45 | LOWER_BOUNDS = new Scalar(58, 0, 109), |
| 46 | UPPER_BOUNDS = new Scalar(93, 255, 240); |
| 47 | |
| 48 | // the size for resing the image |
| 49 | public static final Size resize = new Size(320, 240); |
| 50 | |
| 51 | // ignore these |
| 52 | public static VideoCapture videoCapture; |
| 53 | public static Mat matOriginal, matHSV, matThresh, clusters, matHeirarchy; |
| 54 | |
| 55 | // Constants for known variables |
| 56 | // the height to the top of the target in first stronghold is 97 inches |
| 57 | public static final int TOP_TARGET_HEIGHT = 97; |
| 58 | // the physical height of the camera lens |
| 59 | public static final int TOP_CAMERA_HEIGHT = 32; |
| 60 | |
| 61 | // camera details, can usually be found on the datasheets of the camera |
| 62 | public static final double VERTICAL_FOV = 51; |
| 63 | public static final double HORIZONTAL_FOV = 67; |
| 64 | public static final double CAMERA_ANGLE = 10; |
| 65 | |
| 66 | public static boolean shouldRun = true; |
| 67 | |
| 68 | /** |
| 69 | * |
| 70 | * @param args |
| 71 | * command line arguments |
| 72 | * just the main loop for the program and the entry points |
| 73 | */ |
| 74 | public static void main(String[] args) { |
| 75 | // TODO Auto-generated method stub |
| 76 | matOriginal = new Mat(); |
| 77 | matHSV = new Mat(); |
| 78 | matThresh = new Mat(); |
| 79 | clusters = new Mat(); |
| 80 | matHeirarchy = new Mat(); |
| 81 | // main loop of the program |
| 82 | while (shouldRun) { |
| 83 | try { |
| 84 | // opens up the camera stream and tries to load it |
| 85 | videoCapture = new VideoCapture(); |
| 86 | // replaces the ##.## with your team number |
| 87 | videoCapture.open("http://10.35.01.11/mjpg/video.mjpg"); |
| 88 | // Example |
| 89 | // cap.open("http://10.30.19.11/mjpg/video.mjpg"); |
| 90 | // wait until it is opened |
| 91 | while (!videoCapture.isOpened()) { |
| 92 | } |
| 93 | // time to actually process the acquired images |
| 94 | processImage(); |
| 95 | } catch (Exception e) { |
| 96 | e.printStackTrace(); |
| 97 | break; |
| 98 | } |
| 99 | } |
| 100 | // make sure the java process quits when the loop finishes |
| 101 | videoCapture.release(); |
| 102 | System.exit(0); |
| 103 | } |
| 104 | |
| 105 | /** |
| 106 | * |
| 107 | * reads an image from a live image capture and outputs information to the |
| 108 | * SmartDashboard or a file |
| 109 | */ |
| 110 | public static void processImage() { |
| 111 | ArrayList<MatOfPoint> contours = new ArrayList<MatOfPoint>(); |
| 112 | double x, y, targetX, targetY, distance, azimuth; |
| 113 | // frame counter |
| 114 | int FrameCount = 0; |
| 115 | long before = System.currentTimeMillis(); |
| 116 | // only run for the specified time |
| 117 | while (FrameCount < 100) { |
| 118 | contours.clear(); |
| 119 | // capture from the axis camera |
| 120 | videoCapture.read(matOriginal); |
| 121 | // captures from a static file for testing |
| 122 | // matOriginal = Imgcodecs.imread("someFile.png"); |
| 123 | Imgproc.cvtColor(matOriginal, matHSV, Imgproc.COLOR_BGR2HSV); |
| 124 | Core.inRange(matHSV, LOWER_BOUNDS, UPPER_BOUNDS, matThresh); |
| 125 | Imgproc.findContours(matThresh, contours, matHeirarchy, |
| 126 | Imgproc.RETR_EXTERNAL, |
| 127 | Imgproc.CHAIN_APPROX_SIMPLE); |
| 128 | // make sure the contours that are detected are at least 20x20 |
| 129 | // pixels with an area of 400 and an aspect ration greater then 1 |
| 130 | for (Iterator<MatOfPoint> iterator = contours.iterator(); iterator |
| 131 | .hasNext();) { |
| 132 | MatOfPoint matOfPoint = iterator.next(); |
| 133 | Rect rec = Imgproc.boundingRect(matOfPoint); |
| 134 | if (rec.height < 25 || rec.width < 25) { |
| 135 | iterator.remove(); |
| 136 | continue; |
| 137 | } |
| 138 | float aspect = (float) rec.width / (float) rec.height; |
| 139 | if (aspect < 1.0) |
| 140 | iterator.remove(); // remove contour if width < height |
| 141 | } |
| 142 | for (MatOfPoint mop : contours) { |
| 143 | Rect rec = Imgproc.boundingRect(mop); |
| 144 | Imgproc.rectangle(matOriginal, rec.br(), rec.tl(), BLACK); |
| 145 | } |
| 146 | // if there is only 1 target, then we have found the target we want |
| 147 | if (contours.size() == 1) { |
| 148 | Rect rec = Imgproc.boundingRect(contours.get(0)); |
| 149 | // "fun" math brought to you by miss daisy (team 341)! |
| 150 | y = rec.br().y + rec.height / 2; |
| 151 | y = -((2 * (y / matOriginal.height())) - 1); |
| 152 | distance = (TOP_TARGET_HEIGHT - TOP_CAMERA_HEIGHT) / |
| 153 | Math.tan((y * VERTICAL_FOV / 2.0 + CAMERA_ANGLE) * Math.PI / 180); |
| 154 | // angle to target...would not rely on this |
| 155 | targetX = rec.tl().x + rec.width / 2; |
| 156 | targetX = (2 * (targetX / matOriginal.width())) - 1; |
| 157 | azimuth = normalize360(targetX * HORIZONTAL_FOV / 2.0 + 0); |
| 158 | // drawing info on target |
| 159 | Point center = new Point(rec.br().x - rec.width / 2 - 15, rec.br().y |
| 160 | - rec.height / 2); |
| 161 | Point centerw = new Point(rec.br().x - rec.width / 2 - 15, rec.br().y |
| 162 | - rec.height / 2 - 20); |
| 163 | // Imgproc.putText(matOriginal, "" + (int) distance, center, |
| 164 | // Core.FONT_HERSHEY_PLAIN, 1, BLACK); |
| 165 | // Imgproc.putText(matOriginal, "" + (int) azimuth, centerw, |
| 166 | // Core.FONT_HERSHEY_PLAIN, 1, BLACK); |
| 167 | System.out.println(distance + "m, " + azimuth + " degrees " |
| 168 | + center.x + ", " + center.y); |
| 169 | } |
| 170 | // output an image for debugging |
| 171 | // Imgcodecs.imwrite("output.png", matOriginal); |
| 172 | FrameCount++; |
| 173 | } |
| 174 | shouldRun = false; |
| 175 | } |
| 176 | |
| 177 | /** |
| 178 | * @param angle |
| 179 | * a nonnormalized angle |
| 180 | */ |
| 181 | public static double normalize360(double angle) { |
| 182 | while (angle >= 360.0) |
| 183 | { |
| 184 | angle -= 360.0; |
| 185 | } |
| 186 | while (angle < 0.0) |
| 187 | { |
| 188 | angle += 360.0; |
| 189 | } |
| 190 | return angle; |
| 191 | } |
| 192 | } |