学习OpenCV——ORB简化版&Location加速版

时间:2023-03-09 00:43:31
学习OpenCV——ORB简化版&Location加速版

根据前面surf简化版的结构,重新把ORB检测的代码给简化以下,发现虽然速度一样,确实能省好多行代码,关键是有

BruteForceMatcher<HammingLUT>matcher的帮忙,直接省的写了一个函数;

NB类型:class gpu::BruteForceMatcher_GPU

再加上findHomography,之后perspectiveTransform就可以location,但是这样速度很慢;

于是改动一下,求matches的keypoints的x与y坐标和的平均值,基本上就是对象中心!!!

以这个点为中心画与原对象大小相同的矩形框,就可以定位出大概位置,但是肯定不如透视变换准确,而且不具有尺度不变性。

但是鲁棒性应该更好,因为,只要能match成功,基本都能定位中心,但是透视变换有时却因为尺度变换过大等因素,画出很不靠谱的矩形框!

  1. #include "opencv2/objdetect/objdetect.hpp"
  2. #include "opencv2/features2d/features2d.hpp"
  3. #include "opencv2/highgui/highgui.hpp"
  4. #include "opencv2/calib3d/calib3d.hpp"
  5. #include "opencv2/imgproc/imgproc_c.h"
  6. #include "opencv2/imgproc/imgproc.hpp"
  7. #include <string>
  8. #include <vector>
  9. #include <iostream>
  10. using namespace cv;
  11. using namespace std;
  12. char* image_filename1 = "D:/src.jpg";
  13. char* image_filename2 = "D:/Demo.jpg";
  14. int main()
  15. {
  16. Mat img1 = imread( image_filename1, CV_LOAD_IMAGE_GRAYSCALE );
  17. Mat img2 = imread( image_filename2, CV_LOAD_IMAGE_GRAYSCALE );
  18. int64 st,et;
  19. ORB orb1(30,ORB::CommonParams(1.2,1));
  20. ORB orb2(100,ORB::CommonParams(1.2,1));
  21. vector<KeyPoint>keys1,keys2;
  22. Mat descriptor1,descriptor2;
  23. orb1(img1,Mat(),keys1,descriptor1,false);
  24. st=getTickCount();
  25. orb2(img2,Mat(),keys2,descriptor2,false);
  26. et=getTickCount()-st;
  27. et=et*1000/(double)getTickFrequency();
  28. cout<<"extract time:"<<et<<"ms"<<endl;
  29. vector<DMatch> matches;
  30. //<em>class </em><tt class="descclassname">gpu::</tt><tt class="descname"><span class="highlighted">BruteForce</span>Matcher_GPU</tt>
  31. BruteForceMatcher<HammingLUT>matcher;//BruteForceMatcher支持<Hamming> <L1<float>> <L2<float>>
  32. //FlannBasedMatcher matcher;不支持
  33. st=getTickCount();
  34. matcher.match(descriptor1,descriptor2,matches);
  35. et=getTickCount()-st;
  36. et=et*1000/getTickFrequency();
  37. cout<<"match time:"<<et<<"ms"<<endl;
  38. Mat img_matches;
  39. drawMatches( img1, keys1, img2, keys2,
  40. matches, img_matches, Scalar::all(-1), Scalar::all(-1),
  41. vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS );
  42. imshow("match",img_matches);
  43. cout<<"match size:"<<matches.size()<<endl;
  44. /*
  45. Mat showImg;
  46. drawMatches(img1,keys1,img2,keys2,matchs,showImg);
  47. imshow( "win", showImg );
  48. */
  49. waitKey(0);
  50. st=getTickCount();
  51. vector<Point2f>pt1;
  52. vector<Point2f>pt2;
  53. float x=0,y=0;
  54. for(size_t i=0;i<matches.size();i++)
  55. {
  56. pt1.push_back(keys1[matches[i].queryIdx].pt);
  57. pt2.push_back(keys2[matches[i].trainIdx].pt);
  58. x+=keys2[matches[i].trainIdx].pt.x;
  59. y+=keys2[matches[i].trainIdx].pt.y;
  60. }
  61. x=x/matches.size();
  62. y=y/matches.size();
  63. Mat homo;
  64. homo=findHomography(pt1,pt2,CV_RANSAC);
  65. vector<Point2f>src_cornor(4);
  66. vector<Point2f>dst_cornor(4);
  67. src_cornor[0]=cvPoint(0,0);
  68. src_cornor[1]=cvPoint(img1.cols,0);
  69. src_cornor[2]=cvPoint(img1.cols,img1.rows);
  70. src_cornor[3]=cvPoint(0,img1.rows);
  71. perspectiveTransform(src_cornor,dst_cornor,homo);
  72. Mat img=imread(image_filename2,1);
  73. line(img,dst_cornor[0],dst_cornor[1],Scalar(255,0,0),2);
  74. line(img,dst_cornor[1],dst_cornor[2],Scalar(255,0,0),2);
  75. line(img,dst_cornor[2],dst_cornor[3],Scalar(255,0,0),2);
  76. line(img,dst_cornor[3],dst_cornor[0],Scalar(255,0,0),2);
  77. /*
  78. line(img,cvPoint((int)dst_cornor[0].x,(int)dst_cornor[0].y),cvPoint((int)dst_cornor[1].x,(int)dst_cornor[1].y),Scalar(255,0,0),2);
  79. line(img,cvPoint((int)dst_cornor[1].x,(int)dst_cornor[1].y),cvPoint((int)dst_cornor[2].x,(int)dst_cornor[2].y),Scalar(255,0,0),2);
  80. line(img,cvPoint((int)dst_cornor[2].x,(int)dst_cornor[2].y),cvPoint((int)dst_cornor[3].x,(int)dst_cornor[3].y),Scalar(255,0,0),2);
  81. line(img,cvPoint((int)dst_cornor[3].x,(int)dst_cornor[3].y),cvPoint((int)dst_cornor[0].x,(int)dst_cornor[0].y),Scalar(255,0,0),2);
  82. */
  83. circle(img,Point(x,y),10,Scalar(0,0,255),3,CV_FILLED);
  84. line(img,Point(x-img1.cols/2,y-img1.rows/2),Point(x+img1.cols/2,y-img1.rows/2),Scalar(0,0,255),2);
  85. line(img,Point(x+img1.cols/2,y-img1.rows/2),Point(x+img1.cols/2,y+img1.rows/2),Scalar(0,0,255),2);
  86. line(img,Point(x+img1.cols/2,y+img1.rows/2),Point(x-img1.cols/2,y+img1.rows/2),Scalar(0,0,255),2);
  87. line(img,Point(x-img1.cols/2,y+img1.rows/2),Point(x-img1.cols/2,y-img1.rows/2),Scalar(0,0,255),2);
  88. imshow("location",img);
  89. et=getTickCount()-st;
  90. et=et*1000/getTickFrequency();
  91. cout<<"location time:"<<et<<"ms"<<endl;
  92. waitKey(0);
  93. }

学习OpenCV——ORB简化版&Location加速版学习OpenCV——ORB简化版&Location加速版

学习OpenCV——ORB简化版&Location加速版

from: http://blog.****.net/yangtrees/article/details/7545820