Unverified Commit 779433a7 authored by Aditya Kamath's avatar Aditya Kamath Committed by GitHub
Browse files

Add files via upload

parent 32d42740
cmake_minimum_required(VERSION 2.8)
project(droneref_cv)
find_package( OpenCV REQUIRED )
add_executable(ball_detect_red src/ball_detect_red.cpp)
add_executable(ball_detect_yellow src/ball_detect_yellow.cpp)
add_executable(aruco_detect src/aruco_detect.cpp)
add_executable(player_tracker src/player_tracker.cpp)
add_executable(aruco_test src/aruco_test.cpp)
add_executable(peters_code src/peters_code.cpp)
set(EXECUTABLE_OUTPUT_PATH ${PROJECT_SOURCE_DIR}/bin)
target_link_libraries( ball_detect_red ${OpenCV_LIBS} )
target_link_libraries( ball_detect_yellow ${OpenCV_LIBS} )
target_link_libraries( ball_detect ${OpenCV_LIBS} )
target_link_libraries( player_tracker ${OpenCV_LIBS} )
target_link_libraries(aruco_test ${OpenCV_LIBS})
target_link_libraries(peters_code ${OpenCV_LIBS})
set(CMAKE_BUILD_TYPE Debug)
<?xml version="1.0"?>
<opencv_storage>
<calibration_time>"za 06 mei 2017 13:26:35 CEST"</calibration_time>
<image_width>1024</image_width>
<image_height>768</image_height>
<flags>0</flags>
<camera_matrix type_id="opencv-matrix">
<rows>3</rows>
<cols>3</cols>
<dt>d</dt>
<data>
3.7918575178239792e+02 0. 5.3356290001110233e+02 0.
3.7864655128944844e+02 3.7715032550746349e+02 0. 0. 1.</data></camera_matrix>
<distortion_coefficients type_id="opencv-matrix">
<rows>1</rows>
<cols>5</cols>
<dt>d</dt>
<data>
-2.8806240852107684e-01 7.0975791614964603e-02
-5.2679420299604274e-04 -4.4826680006297226e-04
-7.1377250376219838e-03</data></distortion_coefficients>
<avg_reprojection_error>1.2817100554014482e+00</avg_reprojection_error>
</opencv_storage>
<?xml version="1.0"?>
<opencv_storage>
<calibration_time>"vr 05 mei 2017 18:12:02 CEST"</calibration_time>
<image_width>640</image_width>
<image_height>480</image_height>
<flags>0</flags>
<camera_matrix type_id="opencv-matrix">
<rows>3</rows>
<cols>3</cols>
<dt>d</dt>
<data>
2.3670764887983177e+02 0. 3.1952981529956298e+02 0.
2.3529691756630183e+02 2.2554756882283064e+02 0. 0. 1.</data></camera_matrix>
<distortion_coefficients type_id="opencv-matrix">
<rows>1</rows>
<cols>5</cols>
<dt>d</dt>
<data>
-2.7086967831833159e-01 5.9345367785705518e-02
2.4027259472500081e-03 3.8852346420784989e-04
-5.1110468480417602e-03</data></distortion_coefficients>
<avg_reprojection_error>1.0620442014340550e+00</avg_reprojection_error>
</opencv_storage>
#include <opencv2/highgui.hpp>
#include <opencv2/calib3d.hpp>
#include <opencv2/aruco.hpp>
#include <opencv2/aruco/charuco.hpp>
#include <opencv2/imgproc.hpp>
#include <vector>
#include <iostream>
#include <ctime>
#include<stdio.h> //printf
#include<string.h> //memset
#include<stdlib.h> //exit(0);
#include<arpa/inet.h>
#include<sys/socket.h>
#include <errno.h>
using namespace cv;
using namespace std;
int main(int argc, char* argv[]){
cv::VideoCapture inputVideo;
inputVideo.open(0);
int image_width, image_height;
Mat cameraMatrix, distCoeffs;
FileStorage fs("CameraCalibration_Data.xml", FileStorage::READ);
fs.open("../CameraCalibration_Data.xml", FileStorage::READ);
fs["image_width"] >> image_width;
fs["image_height"] >> image_height;
fs["camera_matrix"] >> cameraMatrix;
fs["distortion_coefficients"] >> distCoeffs;
fs.release();
Ptr<aruco::Dictionary> aruco_dictionary = aruco::getPredefinedDictionary(aruco::DICT_4X4_250); // load dictionary
while(inputVideo.grab()){
Mat image, imageCopy;
inputVideo.retrieve(image);
image.copyTo(imageCopy);
vector<int> ids;
vector<vector<Point2f> > corners;
aruco::detectMarkers(image, aruco_dictionary, corners, ids);
if(ids.size() > 0){
aruco::drawDetectedMarkers(imageCopy, corners, ids);
vector<Mat> rvecs, tvecs;
aruco::estimatePoseSingleMarkers(corners, 0.05, cameraMatrix, distCoeffs, rvecs, tvecs);
for(int i=0; i<ids.size(); i++){
aruco::drawAxis(imageCopy, cameraMatrix, distCoeffs, rvecs[i], tvecs[i], 0.1);
}
}
imshow("out", imageCopy);
}
return 0;
}
#include <vector>
#include <iostream>
#include <ctime>
#include<stdio.h>
#include<string.h>
#include<stdlib.h>
#include<arpa/inet.h>
#include<sys/socket.h>
#include <errno.h>
using namespace std;
#define UDP_BUFLEN 512
//can be changed to whatever is required
struct STRUCT_ball_position {
double x;
double y;
double z;
};
int main(int argc, char* argv[])
{
// -- UDP_CONNECTION --
//Structure for address of server
struct sockaddr_in myaddr;
myaddr.sin_addr.s_addr=0;//htonl(INADDR_ANY); // ip addres INADDR_ANY = local host
myaddr.sin_port=htons(51234); //port number
myaddr.sin_family = AF_INET; //http://stackoverflow.com/questions/337422/how-to-udp-broadcast-with-c-in-linux
// Create Socket
int sockfd; // file descriptor
sockfd = socket(AF_INET, SOCK_DGRAM, 0);
if (sockfd < 0) {
perror("ERROR opening socket");
return 0;
}
// -- VARIABLE TO STORE RESULTS --
STRUCT_ball_position ball_position; // variable to safe position of drone/camera
// -- INFINIT LOOP --
cout << "Start tx " << endl;
clock_t timer_begin = clock(); // timer to measure execution time
double timer_elapsed_time;
while (1)
{
// Show sample time
//if(DEBUG) {
timer_elapsed_time = double(clock() - timer_begin) / CLOCKS_PER_SEC;
//cout << "Tsample: "<< timer_elapsed_time << endl;
timer_begin = clock();
//}
ball_position.x = 1;
ball_position.y = 2;
ball_position.z = 3;
// Send to UDP port
char buf[UDP_BUFLEN]; // Create clear buffer
memcpy(buf, &ball_position, sizeof(ball_position)); // copy drone_position to buffer
int bytes_sent = sendto(sockfd, buf, sizeof(ball_position), MSG_DONTWAIT, (struct sockaddr *)&myaddr, sizeof(myaddr)); // send bytes to UDP port
if(bytes_sent == -1) { printf("Error sending msg: %s\n", strerror(errno)); } // error if bytes not sent
// Print results on terminal
cout <<"Bytes sent: "<< bytes_sent << " @ "<< 1/timer_elapsed_time <<" Hz"<< " x"<< ball_position.x << " y"<< ball_position.y << " z"<< ball_position.z << endl;
//WAITKEY IS AN OPENCV FUNCTION, SO UNCOMMENT IT WHEN YOU ADD THE BALL DETECTION CODE
// Wait some time to give operating system time to do other stuff
// waitKey(5);
} // end of while loop
// Close program
return 0;
}
// RedBallTracker.cpp
#include<opencv2/core/core.hpp>
#include<opencv2/highgui/highgui.hpp>
#include<opencv2/imgproc/imgproc.hpp>
#include<iostream>
///////////////////////////////////////////////////////////////////////////////////////////////////
int main() {
cv::VideoCapture capWebcam(0); // declare a VideoCapture object and associate to webcam, 0 => use 1st webcam
if (capWebcam.isOpened() == false) { // check if VideoCapture object was associated to webcam successfully
std::cout << "error: capWebcam not accessed successfully\n\n"; // if not, print error message to std out
return(0); // and exit program
}
cv::Mat imgOriginal; // input image
cv::Mat imgHSV;
cv::Mat imgThreshLow;
cv::Mat imgThreshHigh;
cv::Mat imgThresh;
std::vector<cv::Vec3f> v3fCircles; // 3 element vector of floats, this will be the pass by reference output of HoughCircles()
char charCheckForEscKey = 0;
while (charCheckForEscKey != 27 && capWebcam.isOpened()) { // until the Esc key is pressed or webcam connection is lost
bool blnFrameReadSuccessfully = capWebcam.read(imgOriginal); // get next frame
if (!blnFrameReadSuccessfully || imgOriginal.empty()) { // if frame not read successfully
std::cout << "error: frame not read from webcam\n"; // print error message to std out
break; // and jump out of while loop
}
cv::cvtColor(imgOriginal, imgHSV, CV_BGR2HSV);
cv::inRange(imgHSV, cv::Scalar(0, 155, 155), cv::Scalar(18, 255, 255), imgThreshLow);
cv::inRange(imgHSV, cv::Scalar(165, 155, 155), cv::Scalar(179, 255, 255), imgThreshHigh);
cv::add(imgThreshLow, imgThreshHigh, imgThresh);
cv::GaussianBlur(imgThresh, imgThresh, cv::Size(3, 3), 0);
cv::Mat structuringElement = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(3, 3));
cv::dilate(imgThresh, imgThresh, structuringElement);
cv::erode(imgThresh, imgThresh, structuringElement);
// fill circles vector with all circles in processed image
cv::HoughCircles(imgThresh, // input image
v3fCircles, // function output (must be a standard template library vector
CV_HOUGH_GRADIENT, // two-pass algorithm for detecting circles, this is the only choice available
2, // size of image / this value = "accumulator resolution", i.e. accum res = size of image / 2
imgThresh.rows / 4, // min distance in pixels between the centers of the detected circles
100, // high threshold of Canny edge detector (called by cvHoughCircles)
50, // low threshold of Canny edge detector (set at 1/2 previous value)
10, // min circle radius (any circles with smaller radius will not be returned)
400); // max circle radius (any circles with larger radius will not be returned)
for (int i = 0; i < v3fCircles.size(); i++) { // for each circle . . .
// show ball position x, y, and radius to command line
std::cout << "ball position x = " << v3fCircles[i][0] // x position of center point of circle
<< ", y = " << v3fCircles[i][1] // y position of center point of circle
<< ", radius = " << v3fCircles[i][2] << "\n"; // radius of circle
// draw small green circle at center of detected object
cv::circle(imgOriginal, // draw on original image
cv::Point((int)v3fCircles[i][0], (int)v3fCircles[i][1]), // center point of circle
3, // radius of circle in pixels
cv::Scalar(0, 255, 0), // draw pure green (remember, its BGR, not RGB)
CV_FILLED); // thickness, fill in the circle
// draw red circle around the detected object
cv::circle(imgOriginal, // draw on original image
cv::Point((int)v3fCircles[i][0], (int)v3fCircles[i][1]), // center point of circle
(int)v3fCircles[i][2], // radius of circle in pixels
cv::Scalar(0, 0, 255), // draw pure red (remember, its BGR, not RGB)
3); // thickness of circle in pixels
} // end for
// declare windows
cv::namedWindow("imgOriginal", CV_WINDOW_AUTOSIZE); // note: you can use CV_WINDOW_NORMAL which allows resizing the window
cv::namedWindow("imgThresh", CV_WINDOW_AUTOSIZE); // or CV_WINDOW_AUTOSIZE for a fixed size window matching the resolution of the image
// CV_WINDOW_AUTOSIZE is the default
cv::imshow("imgOriginal", imgOriginal); // show windows
cv::imshow("imgThresh", imgThresh);
charCheckForEscKey = cv::waitKey(1); // delay (in ms) and get key press, if any
} // end while
return(0);
}
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <iostream>
/*
TRACK A YELLOW BALL - OBJECT DETECTION METHOD USING COLOR SEPERATION OPEN CV 3.1.0
author - Rachit Gulati
*/
int main() {
cv::VideoCapture capWebcam(0); // declare a VideoCapture object to associate webcam, 0 means use 1st (default) webcam
if (capWebcam.isOpened() == false) // To check if object was associated to webcam successfully
{
std::cout << "error: Webcam connect unsuccessful\n"; // if not then print error message
return(0); // and exit program
}
cv::Mat imgOriginal; // Input image
cv::Mat hsvImg; // HSV Image
cv::Mat threshImg; // Thresh Image
std::vector<cv::Vec3f> v3fCircles; // 3 element vector of floats, this will be the pass by reference output of HoughCircles()
char charCheckForEscKey = 0;
int lowH = 21; // Set Hue
int highH = 30;
int lowS = 200; // Set Saturation
int highS = 255;
int lowV = 102; // Set Value
int highV = 225;
// HUE for YELLOW is 21-30.
// Adjust Saturation and Value depending on the lighting condition of the environment as well as the surface of the object.
while (charCheckForEscKey != 27 && capWebcam.isOpened()) { // until the Esc is pressed or webcam connection is lost
bool blnFrameReadSuccessfully = capWebcam.read(imgOriginal); // get next frame
if (!blnFrameReadSuccessfully || imgOriginal.empty()) { // if frame read unsuccessfully
std::cout << "error: frame can't read \n"; // print error message
break; // jump out of loop
}
cv::cvtColor(imgOriginal, hsvImg, CV_BGR2HSV); // Convert Original Image to HSV Thresh Image
cv::inRange(hsvImg, cv::Scalar(lowH, lowS, lowV), cv::Scalar(highH, highS, highV), threshImg);
cv::GaussianBlur(threshImg, threshImg, cv::Size(3, 3), 0); //Blur Effect
cv::dilate(threshImg, threshImg, 0); // Dilate Filter Effect
cv::erode(threshImg, threshImg, 0); // Erode Filter Effect
// fill circles vector with all circles in processed image
cv::HoughCircles(threshImg,v3fCircles,CV_HOUGH_GRADIENT,2,threshImg.rows / 4,100,50,10,800); // algorithm for detecting circles
for (int i = 0; i < v3fCircles.size(); i++) { // for each circle
std::cout << "Ball position X = "<< v3fCircles[i][0] // x position of center point of circle
<<",\tY = "<< v3fCircles[i][1] // y position of center point of circle
<<",\tRadius = "<< v3fCircles[i][2]<< "\n"; // radius of circle
// draw small green circle at center of object detected
cv::circle(imgOriginal, // draw on original image
cv::Point((int)v3fCircles[i][0], (int)v3fCircles[i][1]), // center point of circle
3, // radius of circle in pixels
cv::Scalar(0, 255, 0), // draw green
CV_FILLED); // thickness
// draw red circle around object detected
cv::circle(imgOriginal, // draw on original image
cv::Point((int)v3fCircles[i][0], (int)v3fCircles[i][1]), // center point of circle
(int)v3fCircles[i][2], // radius of circle in pixels
cv::Scalar(0, 0, 255), // draw red
3); // thickness
}
// declare windows
cv::namedWindow("imgOriginal", CV_WINDOW_AUTOSIZE);
cv::namedWindow("threshImg", CV_WINDOW_AUTOSIZE);
/* Create trackbars in "threshImg" window to adjust according to object and environment.*/
cv::createTrackbar("LowH", "threshImg", &lowH, 179); //Hue (0 - 179)
cv::createTrackbar("HighH", "threshImg", &highH, 179);
cv::createTrackbar("LowS", "threshImg", &lowS, 255); //Saturation (0 - 255)
cv::createTrackbar("HighS", "threshImg", &highS, 255);
cv::createTrackbar("LowV", "threshImg", &lowV, 255); //Value (0 - 255)
cv::createTrackbar("HighV", "threshImg", &highV, 255);
cv::imshow("imgOriginal", imgOriginal); // show windows
cv::imshow("threshImg", threshImg);
charCheckForEscKey = cv::waitKey(1); // delay and get key press
}
return(0);
}
#include <opencv2/highgui.hpp>
#include <opencv2/calib3d.hpp>
#include <opencv2/aruco/charuco.hpp>
#include <opencv2/imgproc.hpp>
#include <vector>
#include <iostream>
#include <ctime>
#include<stdio.h> //printf
#include<string.h> //memset
#include<stdlib.h> //exit(0);
#include<arpa/inet.h>
#include<sys/socket.h>
#include <errno.h>
using namespace cv;
using namespace std;
// Changes:
// 5-5-2017 Peter Rooijakkers initial design
// 9-5-2017 Peter Rooijakkers add UDP connection
// 19-5-2017 Peter Rooijakkers add extra markers
// -- SETTINGS ! --
// #define DEBUG false // true to show the feed from the camera, false to hide imshow
#define CAM_DEV_NR 0 // 0 for drone and 1 for laptop because then the webcam is often already 0
#define PARAM_EXPOSURE_TIME 0.01 // keep as low as possible to avoid motion blur
#define UDP_BUFLEN 512
struct STRUCT_drone_position {
double x;
double y;
double z;
double yaw;
double pitch;
double roll;
};
int main(int argc, char* argv[])
{
// -- LOAD INPUT ARGUMENTS --
cout << "Parameters:"<< endl;
cout << " -e 0.01 exposure_time" << endl;
cout << " -d 1 DEBUG: ( 1 imshow) ( 0 hide)" << endl;
cout << " -c 0 cam_dev_nr (integer)" << endl;
double exposure_time = PARAM_EXPOSURE_TIME;
int DEBUG = 0;
int cam_dev_nr = CAM_DEV_NR;
for (int i = 1; i < argc; ++i){ // scan through all input arguments
if (string(argv[i]) == "-e" && i+1 <= argc){
exposure_time = atof( argv[i+1]);
cout << "exposure_time = " << exposure_time << endl;
}
if (string(argv[i]) == "-d" && i+1 <= argc){
DEBUG = atoi( argv[i+1] );
cout << "DEBUG = "<< DEBUG << endl;
}
if (string(argv[i]) == "-c" && i+1 <= argc){
cam_dev_nr = atoi( argv[i+1] );
cout << "cam_dev_nr = " << cam_dev_nr << endl;
}
}
// -- LOAD FROM FILE: Camera Calibration Parameters -- //
int image_width, image_height;
Mat cameraMatrix, distCoeffs;
FileStorage fs("CameraCalibration_Data.xml", FileStorage::READ);
fs.open("../CameraCalibration_Data.xml", FileStorage::READ);
fs["image_width"] >> image_width;
fs["image_height"] >> image_height;
fs["camera_matrix"] >> cameraMatrix;
fs["distortion_coefficients"] >> distCoeffs;
fs.release();
// -- VARIABLE DECLARATIONS -- //
// Mat opencv coordinates
// --> x
// |
// v y
Mat frame; // color
Mat im; // gray scale
// -- VARIABLES FOR ARUCO -- //
Ptr<aruco::Dictionary> aruco_dictionary = aruco::getPredefinedDictionary(aruco::DICT_4X4_250); // load dictionary
vector<int> aruco_ids; // to store found marker Id numbers
vector<vector<Point2f> > aruco_corners, aruco_rejectedCandidates; // to store found marker corners
Ptr<aruco::DetectorParameters> aruco_parameters = aruco::DetectorParameters::create(); // set aruco parameters
//aruco_parameters->adaptiveThreshWinSizeMin = 100;
//aruco_parameters->adaptiveThreshWinSizeMax = 200;
//aruco_parameters->adaptiveThreshWinSizeStep = 200;
//aruco_parameters->adaptiveThreshConstant = 128;
//aruco_parameters->doCornerRefinement = 0;
// World-points/object-points of Aruco markers currently hardcoded
// aruco corner coordinates (clock wise)
// 1 2
// 3 4
// Creates a vector of vectors where each element is a 3D position of one of the corners of the aruco markers
// VectorWithMarkers.VectorWith4MarkerCorners.3DPosOf1Corner
float m = 0.095; // Marker size in meter (rib length of black square)
float ox = 0.210; // Ofset of markers in x-dir
float oy = 0.297; // Ofset of markers in y-dir
vector<int> aruco_objIds; // store ID marker WF (world frame)
vector< vector<Point3f> > aruco_objPoints; // store all corner pos WF
vector<Point3f> aruco_objPoints_temp; // store corners of 1 marker temprary
// Hard coded world points
int idn = 100; // first marker ID
for(int i = 0; i < 4; ++i){ // i markers in X direction
for(int j = 0; j < 4; ++j){ // j markers in Y direction
aruco_objIds.push_back(idn); // add ID to vector
aruco_objPoints_temp.clear(); // clear temporary vector
aruco_objPoints_temp.push_back( Point3f(i*ox+0,j*oy+0,0) ); // add corner 1 to temporary vector
aruco_objPoints_temp.push_back( Point3f(i*ox+m,j*oy+0,0) ); // add corner 2 to temporary vector
aruco_objPoints_temp.push_back( Point3f(i*ox+m,j*oy+m,0) ); // add corner 3 to temporary vector
aruco_objPoints_temp.push_back( Point3f(i*ox+0,j*oy+m,0) ); // add corner 4 to temporary vector
aruco_objPoints.push_back( aruco_objPoints_temp ); // add temporary vector to vector with all corners
++idn; // increase aruco ID
}
}
// Second orgin of markers
m = 0.123;
ox = 0.75;
oy = 0.75;
idn = 50;
for(int i = 0; i < 4; ++i){ // i markers in X direction
for(int j = 0; j < 5; ++j){ // j markers in Y direction
aruco_objIds.push_back(idn); // add ID to vector
aruco_objPoints_temp.clear(); // clear temporary vector
aruco_objPoints_temp.push_back( Point3f(i*ox+0,j*oy+0,0) ); // add corner 1 to temporary vector
aruco_objPoints_temp.push_back( Point3f(i*ox+m,j*oy+0,0) ); // add corner 2 to temporary vector
aruco_objPoints_temp.push_back( Point3f(i*ox+m,j*oy+m,0) ); // add corner 3 to temporary vector
aruco_objPoints_temp.push_back( Point3f(i*ox+0,j*oy+m,0) ); // add corner 4 to temporary vector
aruco_objPoints.push_back( aruco_objPoints_temp ); // add temporary vector to vector with all corners
++idn; // increase aruco ID
}
}
// -- UDP_CONNECTION --
//Structure for address of server
struct sockaddr_in myaddr;
myaddr.sin_addr.s_addr=0;//htonl(INADDR_ANY); // ip addres INADDR_ANY = local host
myaddr.sin_port=htons(51234); //port number
myaddr.sin_family = AF_INET; //http://stackoverflow.com/questions/337422/how-to-udp-broadcast-with-c-in-linux
// Create Socket
int sockfd; // file descriptor
sockfd = socket(AF_INET, SOCK_DGRAM, 0);
if (sockfd < 0) {