Ivan.cpp

/*

Singapore Polytechnic

Singapore Autonomous Underwater Vehicle Challenge 2017

Computer Vision System

Version : ALPHA

* /

//Include Libraries


 * 1) include 


 * 1) include


 * 1) include


 * 1) include 


 * 1) include


 * 1) include 


 * 1) include 


 * 1) include 


 * 1) include "ros/ros.h"


 * 1) include "std_msgs/String.h"


 * 1) include "std_msgs/Int16.h"


 * 1) include "geometry_msgs/Point32.h"


 * 1) include 


 * 1) include 


 * 1) include


 * 1) include "std_msgs/Float32.h" //added

//Declarations

using namespace cv;

int state = 0;

int count = 0;

int hitflare = 0;

bool ifgui = true;

Mat hsvbottom;

Mat hsvfront;

int H;

int S;

int V;

int bot_L_r;

int bot_L_g;

int bot_L_b;

int bot_U_r;

int bot_U_g;

int bot_U_b;

int number = 0;

int posX = 0;

int posY = 1;

Moments oMoments;

double dArea;

double dM01;

double dM10;

int countF;

int countB;

int cam_calibration = 0 ;

int front;

float curstate = 1;

int bottom;

void current_state(const std_msgs::Float32::ConstPtr& msg)

{

curstate = msg->data;

}

void camcalibration(const std_msgs::Int16::ConstPtr& msg)

{

cam_calibration = msg->data;

}

int main(int argc, char **argv)

{

//SETUP

ros::init(argc, argv, "auv_camera");

ros::NodeHandle n;

//ros::Publisher camera_pub_yl = n.advertise("cam_yellow", 1000);

ros::Subscriber sub_curstate = n.subscribe("CurrentState", 1000,current_state);

ros::Subscriber sub_cam_calibration = n.subscribe("cam_calbration", 1000, camcalibration);

ros::Publisher camera_pub_state = n.advertise("cam_state", 1000);

ros::Publisher camera_pub_angle = n.advertise("cam_angle", 1000);

ros::Publisher camera_pub_rd = n.advertise("cam_red", 1000);

ros::Rate loop_rate(10);

std_msgs::String cam_angle;

std_msgs::String cam_state;

//geometry_msgs::Point32 cam_black;

geometry_msgs::Point32 cam_red;

//geometry_msgs::Point32 cam_blue;

if(cam_calibration ==1)

{

front = 1;

bottom = 0;

}

if(cam_calibration ==0)

{

front = 0;

bottom = 1;

}

//Initialise Bottom Camera

FILE * pfile;

pfile = fopen("ivan.txt","w");

VideoCapture cambottom(bottom);

if(!cambottom.isOpened) //Checking sequence

{

return -1;

}

cambottom.set(CV_CAP_PROP_FRAME_WIDTH, 320);

cambottom.set(CV_CAP_PROP_FRAME_HEIGHT, 240);

//Initialise Front Camera

VideoCapture camfront(front);

if(!camfront.isOpened) //Checking sequence

{

return -1;

}

camfront.set(CV_CAP_PROP_FRAME_WIDTH, 320);

camfront.set(CV_CAP_PROP_FRAME_HEIGHT, 240);

char color = 'R';

int iLowH;

int iHighH;

int iLowS ;

int iHighS;

int iLowV;

int iHighV;

int RLowH = 14;

int RHighH = 69;

int RLowS = 11;

int RHighS = 74;

int RLowV = 38;

int RHighV = 89;

int GLowH =25;

int GHighH = 76;

int GLowS = 60;

int GHighS = 155;

int GLowV = 23;

int GHighV = 80;

int BLowH =83;

int BHighH = 179;

int BLowS = 30;

int BHighS = 122;

int BLowV = 0;

int BHighV = 29;

int YLowH = 22;

int YHighH = 38;

int YLowS = 102;

int YHighS = 128;

int YLowV = 90;

int YHighV = 105;

// if(ifgui) namedWindow("Original Frame (Bottom)",1);

//if(ifgui) namedWindow("Thresholded Frame (Bottom)",1);

//cam_state.data="black";

//camera_pub_state.publish(cam_state);

while(1)

{

count += 1;

//pfile = fopen("results.txt","w");

if(curstate == 1)

{

Mat framefront;

camfront >> framefront; // Get a new frame from camera

GaussianBlur(framefront, framefront, Size(7,7), 1.5, 1.5);

cvtColor(framefront, hsvbottom, CV_BGR2HSV);

Mat imgThresholdedF;

inRange(framefront, Scalar(RLowH,RLowS,RLowV), Scalar(RHighH,RHighS,RHighV), imgThresholdedF);

erode(imgThresholdedF, imgThresholdedF,getStructuringElement(MORPH_ELLIPSE, Size(5,5)));                //Eliminate the small particles

dilate(imgThresholdedF, imgThresholdedF,getStructuringElement(MORPH_ELLIPSE, Size(5,5)));

erode(imgThresholdedF, imgThresholdedF,getStructuringElement(MORPH_ELLIPSE, Size(5,5)));

dilate(imgThresholdedF, imgThresholdedF,getStructuringElement(MORPH_ELLIPSE, Size(5,5)));

oMoments = moments(imgThresholdedF);

dM01 = oMoments.m01;

dM10 = oMoments.m10;

dArea = oMoments.m00;

if(dArea >2000)

{

int posX = dM10/dArea;

int posY = dM01/dArea;

cam_red.x=posX;

cam_red.y=posY;

camera_pub_rd.publish(cam_red);

//printf( " %d /n ", posX );

}else{

printf("Red not detected \n");

}

unsigned char *input = (unsigned char*)(imgThresholdedF.data);

if(ifgui) imshow("Original Frame (Front)", framefront);

if(ifgui) imshow("[Black] Thresholded Frame (Front)", imgThresholdedF);

if(waitKey(50) >= 0) break;

}

if(curstate == 3)

{

Mat framebottom;

cambottom >> framebottom; // Get a new frame from camera

GaussianBlur(framebottom, framebottom, Size(7,7), 1.5, 1.5);

cvtColor(framebottom, hsvbottom, CV_BGR2HSV);

Mat imgThresholdedB;

inRange(framebottom, Scalar(GLowH,GLowS,GLowV), Scalar(GHighH,GHighS,GHighV), imgThresholdedB);

erode(imgThresholdedB, imgThresholdedB,getStructuringElement(MORPH_ELLIPSE, Size(5,5)));                //Eliminate the small particles

dilate(imgThresholdedB, imgThresholdedB,getStructuringElement(MORPH_ELLIPSE, Size(5,5)));

erode(imgThresholdedB, imgThresholdedB,getStructuringElement(MORPH_ELLIPSE, Size(5,5)));

dilate(imgThresholdedB, imgThresholdedB,getStructuringElement(MORPH_ELLIPSE, Size(5,5)));

oMoments = moments(imgThresholdedB);

dM01 = oMoments.m01;

dM10 = oMoments.m10;

dArea = oMoments.m00;

if(dArea >2000)

{

cam_state.data="detected";

camera_pub_state.publish(cam_state);

}else

{

cam_state.data="not detected";

camera_pub_state.publish(cam_state);

}

if(ifgui) imshow("Original Frame (Bottom)", framebottom);

if(ifgui) imshow("[Black] Thresholded Frame (Bottom)", imgThresholdedB);

if(waitKey(50) >= 0) break;

}

if(curstate == 5)

{

Mat framebottom;

cambottom >> framebottom; // Get a new frame from camera

GaussianBlur(framebottom, framebottom, Size(7,7), 1.5, 1.5);

cvtColor(framebottom, hsvbottom, CV_BGR2HSV);

Mat imgThresholdedB;

inRange(framebottom, Scalar(BLowH,BLowS,BLowV), Scalar(BHighH,BHighS,BHighV), imgThresholdedB);

erode(imgThresholdedB, imgThresholdedB,getStructuringElement(MORPH_ELLIPSE, Size(5,5)));                //Eliminate the small particles

dilate(imgThresholdedB, imgThresholdedB,getStructuringElement(MORPH_ELLIPSE, Size(5,5)));

erode(imgThresholdedB, imgThresholdedB,getStructuringElement(MORPH_ELLIPSE, Size(5,5)));

dilate(imgThresholdedB, imgThresholdedB,getStructuringElement(MORPH_ELLIPSE, Size(5,5)));

oMoments = moments(imgThresholdedB);

dM01 = oMoments.m01;

dM10 = oMoments.m10;

dArea = oMoments.m00;

if(dArea >2000)

{

cam_state.data="detected";

camera_pub_state.publish(cam_state);

}else

{

cam_state.data="not detected";

camera_pub_state.publish(cam_state);

}

if(ifgui) imshow("Original Frame (Bottom)", framebottom);

if(ifgui) imshow("[Black] Thresholded Frame (Bottom)", imgThresholdedB);

if(waitKey(30) >= 0) break;

}

if(curstate == 7)

{

Mat framefront;

camfront >> framefront; // Get a new frame from camera

GaussianBlur(framefront, framefront, Size(7,7), 1.5, 1.5);

cvtColor(framefront, hsvbottom, CV_BGR2HSV);

Mat imgThresholdedF;

inRange(framefront, Scalar(YLowH,YLowS,YLowV), Scalar(YHighH,YHighS,YHighV), imgThresholdedF);

erode(imgThresholdedF, imgThresholdedF,getStructuringElement(MORPH_ELLIPSE, Size(5,5)));                //Eliminate the small particles

dilate(imgThresholdedF, imgThresholdedF,getStructuringElement(MORPH_ELLIPSE, Size(5,5)));

erode(imgThresholdedF, imgThresholdedF,getStructuringElement(MORPH_ELLIPSE, Size(5,5)));

dilate(imgThresholdedF, imgThresholdedF,getStructuringElement(MORPH_ELLIPSE, Size(5,5)));

oMoments = moments(imgThresholdedF);

dM01 = oMoments.m01;

dM10 = oMoments.m10;

dArea = oMoments.m00;

if(dArea >2000)

{

cam_state.data="detected";

camera_pub_state.publish(cam_state);

}else

{

cam_state.data="not detected";

camera_pub_state.publish(cam_state);

}

unsigned char *input = (unsigned char*)(imgThresholdedF.data);

if(ifgui) imshow("Original Frame (Front)", framefront);

if(ifgui) imshow("[Black] Thresholded Frame (Front)", imgThresholdedF);

if(waitKey(50) >= 0) break;

} // case 7

}

// the camera will be deinitialized automatically in VideoCapture destructor

return 0;

}