2017-10-18 35 views
0

投射點我想:使用OpenCV的

  1. 做射線平面相交針對幾個點是在世界座標,
  2. 然後我得到的是相交
  3. 那麼那些點我試着將這些點從該世界座標投影到圖像座標,

但我得到的點在0.4,0.1,0.5等範圍內。

下面是我在做什麼,希望你發現錯誤

 Mat cameraIntrinsics(3, 3, CV_32F); 

     cameraIntrinsics.at<float>(0, 0) = 1.6003814935684204; 
     cameraIntrinsics.at<float>(0, 1) = 0; 
     cameraIntrinsics.at<float>(0, 2) = -0.0021958351135253906; 
     cameraIntrinsics.at<float>(1, 0) = 0; 
     cameraIntrinsics.at<float>(1, 1) = 1.6003814935684204; 
     cameraIntrinsics.at<float>(1, 2) = -0.0044271680526435375; 
     cameraIntrinsics.at<float>(2, 0) = 0; 
     cameraIntrinsics.at<float>(2, 1) = 0; 
     cameraIntrinsics.at<float>(2, 2) = 1; 


     Mat invCameraIntrinsics = cameraIntrinsics.inv(); 

     std::vector<cv::Point3f> points3D; 
     std::vector<Ray> rays; 
     for (int i = 0; i < corners.size(); i++) 
     { 
     cv::Point3f pt; 

     pt.z = -1.0f; 

     pt.x = corners[i].x; 
     pt.y = corners[i].y; 

     points3D.push_back(pt); 

     Ray ray; 

     ray.origin = Vec3f(0, 0, 0); 
     ray.direction = Vec3f(pt.x, pt.y, pt.z); 

     rays.push_back(ray); 
     } 

     std::vector<cv::Point3f> pointsTransformed3D; 


     cv::transform(points3D, pointsTransformed3D, invCameraIntrinsics); 


     std::vector<cv::Vec3f> contacts; 

     for (int i = 0; i < pointsTransformed3D.size(); i++) 
     { 
     Vec3f pt(pointsTransformed3D[i].x, pointsTransformed3D[i].y, pointsTransformed3D[i].z); 

     cv::Vec3f contact; 
     std::pair<bool, double> test = linePlaneIntersection(contact, rays[i].direction, rays[i].origin, Vec3f(0, 1, 0), pt); 
     if (test.first == true) 
     { 
      cv::Vec3f contact(rays[i].origin + (rays[i].direction) * test.second); 
      contacts.push_back(contact); 
     } 
     } 


     Mat rotationMatrix(3, 3, CV_32F); 

     rotationMatrix.at<float>(0, 0) = 0.9115078799790896; 
     rotationMatrix.at<float>(0, 1) = -0.1883612409043686; 
     rotationMatrix.at<float>(0, 2) = -0.3656137684237178; 
     rotationMatrix.at<float>(1, 0) = -0.3046835686704949; 
     rotationMatrix.at<float>(1, 1) = 0.2878667580409447; 
     rotationMatrix.at<float>(1, 2) = -0.9079100465339108; 
     rotationMatrix.at<float>(2, 0) = 0.2762631132059388; 
     rotationMatrix.at<float>(2, 1) = 0.9389636694462479; 
     rotationMatrix.at<float>(2, 2) = 0.2050022432604093; 

     cv::Mat rVec(3, 1, CV_32F); // Rotation vector 
     Rodrigues(rotationMatrix, rVec); 
     double norm = cv::norm(rVec); 

     float theta = (float)(sqrt(rVec.at<float>(0)*rVec.at<float>(0) + rVec.at<float>(1)*rVec.at<float>(1) + rVec.at<float>(2)*rVec.at<float>(2)) * 180/3.14567898726); 

     cv::Mat tVec(3, 1, CV_32F); // Translation vector 
     tVec.at<float>(0) = 21.408294677734375; 
     tVec.at<float>(1) = 531.1319580078125; 
     tVec.at<float>(2) = 705.74224853515625; 

     cv::Mat distCoeffs(5, 1, CV_32F); // Distortion vector 
     distCoeffs.at<float>(0) = 0; 
     distCoeffs.at<float>(1) = 0; 
     distCoeffs.at<float>(2) = 0; 
     distCoeffs.at<float>(3) = 0; 
     distCoeffs.at<float>(4) = 0; 

     std::vector<cv::Point2d> projectedPoints; 
     std::vector < cv::Point3d> ContactPoints; 

     for (int i = 0; i < contacts.size(); i++) 
     { 
     cv::Point3d pt; 

     pt.x = contacts[i][0]; 
     pt.y = contacts[i][1]; 
     pt.z = contacts[i][2]; 

     ContactPoints.push_back(pt); 
     } 


     cv::projectPoints(ContactPoints, rVec, tVec, cameraIntrinsics, distCoeffs, projectedPoints); 

     for (size_t i = 0; i < projectedPoints.size(); i++) 
     { 
     cv::Point2d pt; 

     pt.x = projectedPoints[i].x; 
     pt.y = projectedPoints[i].y; 

     cv::circle(src, pt, 10, cv::Scalar(255, 0, 255), -1); 
     } 

     imshow("My window", src); 
    } 
    } 

cv:waitKey(0); 
    return 0; 
} 
+0

該cameraIntriniscs看起來真的很奇怪...焦距太小(它通常在100s甚至1000s ...並且中心也太小,通常它是在圖像中心lke)。這將是就像在一個角落......你是如何獲得它的? – api55

+0

@ api55我從ARKIT –

+0

嗯,這很奇怪...他們看起來完全錯誤,至少對於OpenCV來說,它可能是某種程度上的縮放,或者它使用不同的符號,或兩者兼而有之。[Here](http://ksimek.github.io/2013/08/13/intrinsic/)可以找到更多關於相機矩陣的信息。當焦距小於真實焦距時,你會看到較小的物體......這對你的行爲是有意義的,而且,在你的情況下,光學中心爲0,嘗試使用1600作爲焦距,圖像的中間爲中心(例如,如果它是640x480,它會在319.5,239.5),你可能會得到一些讓事情更有意義 – api55

回答

0

你內在的參數實在是太小:1.6像素的焦距是物理準針孔攝像頭的廢話。

+0

我實際上是遵循那篇文章http://nghiaho.com/?page_id=363 如果你想在這裏是完整的源代碼:https://pastebin.com/9MKx0uCT並使用該圖像https ://imgur.com/a/eFzbM –