Autocalib.cpp 8.88 KB
#include <Autocalib.h>

vector<vector<cv::Point2d> > Autocalib::findFeaturePoint(vector<cv::Mat> img,double qualityLevel )
{
	vector<vector<cv::Point2d> > MatchVector;//measurement matrix

	vector<cv::KeyPoint> kpts1;
    	cv::Mat desc1;
    	cv::Ptr<cv::AKAZE> akaze =cv::AKAZE::create();
    	akaze->setThreshold(qualityLevel);
    	akaze->detectAndCompute(img[0], cv::noArray(), kpts1, desc1);
	
	//Step 1. Extract features AKAZE
	for(int i=1;i<img.size();i++)
	{	

		vector<cv::KeyPoint> kpts2;
    		cv::Mat desc2;
    		akaze->detectAndCompute(img[i], cv::noArray(), kpts2, desc2);
		
    		cv::BFMatcher matcher(cv::NORM_HAMMING);
   		vector<vector<cv::DMatch> > nn_matches;
    		matcher.knnMatch(desc1, desc2, nn_matches, 2);
    		vector<cv::KeyPoint> matched1, matched2;
    		vector<cv::DMatch> good_matches; 
 		double nnMatchRatio = 0.5;
    		for(int j = 0; j < nn_matches.size(); j++) 
		{
        		cv::DMatch match = nn_matches[j][0];
        		float dist1 = nn_matches[j][0].distance;
        		float dist2 = nn_matches[j][1].distance;
        		if(dist1 < nnMatchRatio * dist2) 
			{
            			good_matches.push_back(match);
            			matched1.push_back(kpts1[match.queryIdx]);
            			matched2.push_back(kpts2[match.trainIdx]);
        		}
    		}
	
    		//step 2.Draw matches and display
    		cv::Mat img_matches;
    		cv::drawMatches( img[i-1], kpts1, img[i], kpts2, good_matches, img_matches );
    		cv::namedWindow( "Matches", cv::WINDOW_NORMAL);
    		cv::imshow("Matches", img_matches );
		cv::waitKey(0);
		desc1=desc2.clone();
		kpts1.swap(kpts2);

		//step 3.Populate data
    		vector<cv::Point2d> imgpts1, imgpts2;
   		for(int k = 0; k < matched1.size(); k++ )
    		{
        		imgpts1.push_back(matched1[k].pt); // queryIdx is the "left" image
        		imgpts2.push_back(matched2[k].pt); // trainIdx is the "right" image
			//cout << imgpts1[k]<< endl;
    		}
			
			MatchVector.push_back(imgpts1);
			MatchVector.push_back(imgpts2);
	}

	
	return MatchVector;
}

//search the element which has point correpond with all of others image
//new function
vector<vector<cv::Point2d> > Autocalib::getMeasureVector(vector<vector<cv::Point2d> > MatchVector)
{
	vector<vector<cv::Point2d> > measureVector;
	vector<vector<cv::Point2d> > tempMeasureVector;
	measureVector.push_back(MatchVector[0]);
	measureVector.push_back(MatchVector[1]);
	

	for (int i=2;i<= MatchVector.size()/2;i++)
	{
		vector<cv::Point2d> ::iterator indxMatch;
		vector<cv::Point2d> tempVector;
		

		for (int j=0;j<measureVector[i-1].size();j++ )
		{
			indxMatch = find(MatchVector[2*i-2].begin(),MatchVector[2*i-2].end(),measureVector[i-1][j] );
			if (indxMatch!=MatchVector[2*i-2].end())
			{	
				int dis=distance(MatchVector[2*i-2].begin(),indxMatch);
				tempVector.push_back(MatchVector[2*i-1][dis]); 
					
			}
			else
			{		
				for(int n=i-1;n>=0;n--)
				{
					measureVector[n][j].x=0;
					measureVector[n][j].y=0;
				} 	
			}
			
		} 
		measureVector.push_back(tempVector);
		

		tempMeasureVector=searchAllMatch(measureVector);
		measureVector.clear();

		measureVector.swap(tempMeasureVector);
		tempMeasureVector.clear();
		
	}
	
	return measureVector;
}

//remove the element which contain {0,0}(dosen't have correspond point with all of others image )
//new function
vector<vector<cv::Point2d> > Autocalib::searchAllMatch(vector<vector<cv::Point2d> > vector)
{
	cv::Point2d pt{0,0};
	
	for (int i=0; i<vector.size();i++)
	{
		std::vector<cv::Point2d> ::iterator indxNotMatch;
		std::vector<cv::Point2d> ::iterator tempIndx;

		for(indxNotMatch=vector[i].begin();indxNotMatch!=vector[i].end();)
		{
			if(*indxNotMatch==pt)
			{
				tempIndx=indxNotMatch;
				indxNotMatch=vector[i].erase(tempIndx);	
			}
			else {  indxNotMatch++;  }                          
		}
	}

	return vector;
}

//transfert vector<vector<Point2d> > to Mat
//new function
cv::Mat Autocalib::vectorPoint2Mat(vector<vector<cv::Point2d> > vector)
{
	cv::Mat M(vector.size()*2, vector[0].size(),CV_64F);	
	
	for (int i=0;i< vector.size();i++)
	{
		for(int j=0; j<vector[i].size();j++)
		{
			M.at<double>(2*i,j)=vector[i][j].x;
			M.at<double>(2*i+1,j)=vector[i][j].y;	
		}
	}

	return M;
}


vector<cv::Mat> Autocalib::estimatFundMatVector(vector<vector<cv::Point2d> > MeasureVector)
{
	vector<cv::Mat> FundMatVector;	

	for(int i=1; i< MeasureVector.size();i++)
	{
		FundMatFitting * fmestim = new FundMatFitting();
		fmestim->setData(MeasureVector[i-1],MeasureVector[i]);
		robest::LMedS * ransacSolver = new robest::LMedS();
    		ransacSolver->solve(fmestim);
 		cv::Mat FM = (cv::Mat_<double>(3,3));
    		FM = fmestim->getResult();
		FundMatVector.push_back(FM);	
	}

	return FundMatVector;	
	
}




cv::Mat Autocalib::plotStereoWithEpilines(cv::Mat img1,cv::Mat img2,cv::Mat F,vector<cv::Point2d> pts1,vector<cv::Point2d> pts2)
{
    cv::Mat epilines1;
    cv::Mat epilines2 ;

    cv::computeCorrespondEpilines(pts1,1, F,epilines2);
    cv::computeCorrespondEpilines(pts2,2, F,epilines1);

    CV_Assert(img1.size() == img2.size() && img1.type() == img2.type());
    cv::Mat outImg(img1.rows, img1.cols*2, CV_8UC3);
    cv::Rect rect1(0,0, img1.cols, img1.rows);
    cv::Rect rect2(img1.cols, 0, img2.cols, img2.rows);

    if (img1.type() == CV_8U)
    {
        cv::cvtColor(img1, outImg(rect1), cv::COLOR_GRAY2BGR);
        cv::cvtColor(img2, outImg(rect2), cv::COLOR_GRAY2BGR);
    }
    else
    {
        img1.copyTo(outImg(rect1));
        img2.copyTo(outImg(rect2));
    }

    cv::RNG rng;

    for(int i=0; i<pts1.size(); i++)
    {
        cv::Scalar color = cv::Scalar(rng.uniform(0,255), rng.uniform(0, 255), rng.uniform(0, 255));

        std::vector<cv::Point> intesecPoints;
        intesecPoints.push_back(cv::Point(0,-epilines2.at<double>(i,2)/epilines2.at<double>(i,1)));
        intesecPoints.push_back(cv::Point(img2.cols,-(epilines2.at<double>(i,2)+epilines2.at<double>(i,0)*img2.cols)/epilines2.at<double>(i,1)));
        intesecPoints.push_back(cv::Point(-epilines2.at<double>(i,2)/epilines2.at<double>(i,0),0));
        intesecPoints.push_back(cv::Point(-(epilines2.at<double>(i,2)+epilines2.at<double>(i,1)*img2.rows)/epilines2.at<double>(i,0),img2.rows));

        if ( intesecPoints[3].x < 0 || intesecPoints[3].x > img2.cols )
            intesecPoints.erase( intesecPoints.begin() + 3);
        if ( intesecPoints[2].x < 0 || intesecPoints[2].x > img2.cols )
            intesecPoints.erase( intesecPoints.begin() + 2);
        if ( intesecPoints[1].y < 0 || intesecPoints[1].y > img2.rows )
            intesecPoints.erase( intesecPoints.begin() + 1);
        if ( intesecPoints[0].y < 0 || intesecPoints[0].y > img2.rows )
            intesecPoints.erase( intesecPoints.begin() + 0);

        cv::line(outImg(rect2),
                 intesecPoints[0],
                intesecPoints[1],
                color, 1, cv::LINE_AA);
        cv::circle(outImg(rect2), pts2[i], 3, color, -1, cv::LINE_AA);

        intesecPoints.clear();

        intesecPoints.push_back(cv::Point(0,-epilines1.at<double>(i,2)/epilines1.at<double>(i,1)));
        intesecPoints.push_back(cv::Point(img1.cols,-(epilines1.at<double>(i,2)+epilines1.at<double>(i,0)*img1.cols)/epilines1.at<double>(i,1)));
        intesecPoints.push_back(cv::Point(-epilines1.at<double>(i,2)/epilines1.at<double>(i,0),0));
        intesecPoints.push_back(cv::Point(-(epilines1.at<double>(i,2)+epilines1.at<double>(i,1)*img1.rows)/epilines1.at<double>(i,0),img1.rows));

        if ( intesecPoints[3].x < 0 || intesecPoints[3].x > img1.cols )
            intesecPoints.erase( intesecPoints.begin() + 3);
        if ( intesecPoints[2].x < 0 || intesecPoints[2].x > img1.cols )
            intesecPoints.erase( intesecPoints.begin() + 2);
        if ( intesecPoints[1].y < 0 || intesecPoints[1].y > img1.rows )
            intesecPoints.erase( intesecPoints.begin() + 1);
        if ( intesecPoints[0].y < 0 || intesecPoints[0].y > img1.rows )
            intesecPoints.erase( intesecPoints.begin() + 0);

        cv::line(outImg(rect1),
                 intesecPoints[0],
                intesecPoints[1],
                color, 1, cv::LINE_AA);
        cv::circle(outImg(rect1), pts1[i], 3, color, -1, cv::LINE_AA);

        intesecPoints.clear();
    }

    return outImg;
}


void Autocalib::showEpipolarLines(vector<cv::Mat> image, vector<vector<cv::Point2d> > measureVector, vector<cv::Mat> fundMatrix)
{
	for (int i=1;i<image.size();i++)
	{
		cv::Mat outImage=plotStereoWithEpilines(image[i-1],image[i],fundMatrix[i-1],measureVector[i-1],measureVector[i]);
		cv::namedWindow( "Epipolar lines", cv::WINDOW_NORMAL );
		cv::imshow("Epipolar lines", outImage);
		cv::waitKey(0); 
	}
} 



vector <cv::Mat> Autocalib::estimatParameter(cv::Mat measureMatrix)
{
	Optimization opt;
	opt.setMeasurementMatrix(measureMatrix);
	opt.run();
	cv::Mat A=opt.getCalibrationMatrix();
	cout << "calibration matrix " << A << endl;
	cv::Mat X=opt.getRotationAnglesDeg();
	cout << "parametres de camera "<<endl << X << endl;
	return opt.getCameraMatrices();
}