カメラの行列を計算し、画像の歪みを補正するOpenCVコードがあります。カメラのマトリックスから焦点距離を見つける方法は?
ここはOpenCVとC++のコードの一部です。
//default capture width and height
const int FRAME_WIDTH = 1288;
const int FRAME_HEIGHT = 964;
//max number of objects to be detected in frame
const int MAX_NUM_OBJECTS=50;
//minimum and maximum object area
const int MIN_OBJECT_AREA = 2*2;
const int MAX_OBJECT_AREA = FRAME_HEIGHT*FRAME_WIDTH/1.5;
Mat DistortedImg; //storage for copy of the image raw
Mat UndistortedImg; //
double cameraM[3][3] = {{1103.732864, 0.000000, 675.056365}, {0.000000, 1100.058630, 497.063376}, {0, 0, 1}}; //camera matrix to be edited
Mat CameraMatrix = Mat(3, 3, CV_64FC1, cameraM);
double distortionC[5] = {-0.346476, 0.142352, -0.000084, -0.001727, 0.000000}; //distortioncoefficient to be edited
Mat DistortionCoef = Mat(1, 5, CV_64FC1, distortionC);
double rArray[3][3] = {{1, 0, 0}, {0, 1, 0}, {0, 0, 1}};
Mat RArray = Mat(3, 3, CV_64FC1, rArray); //originally CV_64F
double newCameraM[3][3] = {{963.436584, 0.000000, 680.157832}, {0.000000, 1021.688843, 498.825528}, {0, 0, 1}};
Mat NewCameraMatrix = Mat(3, 3, CV_64FC1, newCameraM);
Size UndistortedSize(1288,964);
Mat map1;
Mat map2;
string intToString(int number)
{
std::stringstream ss;
ss << number;
return ss.str();
}
void imageCb(const sensor_msgs::ImageConstPtr& msg) //callback function defination
{
cv_bridge::CvImagePtr cv_ptr;
try
{
cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8); //convert ROS image to CV image and make copy of it storing in cv_ptr(a pointer)
}
catch (cv_bridge::Exception& e)
{
ROS_ERROR("cv_bridge exception: %s", e.what());
return;
}
/* image working procedure starting from here inside the main function.
* The purpose of the image processing is to use the existing video to working out the
* cordinate of the detected object, using color extraction technique.
*/
bool trackObjects = true;
bool useMorphOps = true;
Mat cameraFeed;
Mat HSV;
Mat threshold;
Mat ideal_image;
//x and y values for the location of the object
int x=0, y=0;
createTrackbars();
//store image to matrix
cv_ptr->image.copyTo(DistortedImg); //=Tan= copy the image from ardrone to DistortedImg for processing
initUndistortRectifyMap(CameraMatrix, DistortionCoef, RArray, NewCameraMatrix, UndistortedSize, CV_32FC1, map1, map2);
remap(DistortedImg, cameraFeed, map1, map2, INTER_LINEAR, BORDER_CONSTANT, Scalar(0,0,0));
cvtColor(cameraFeed,HSV,COLOR_BGR2HSV); //convert frame from BGR to HSV colorspace
//output the after-threshold matrix to Mat threshold
inRange(HSV,Scalar(iLowH_1, iLowS_1, iLowV_1),Scalar(iHighH_1, iHighS_1, iHighV_1),threshold);
//inRange(HSV,Scalar(0, 87, 24),Scalar(9, 255, 255),threshold); //red
morphOps(threshold);
GaussianBlur(threshold, ideal_image, Size(9, 9), 2, 2);
trackFilteredObject1(x,y,ideal_image,cameraFeed);
namedWindow("Image with deal1", 0);
namedWindow("Original Image", 0);
imshow("Image with deal1",ideal_image);
imshow("Original Image", cameraFeed);
//delay 30ms so that screen can refresh.
//image will not appear without this waitKey() command
cv::waitKey(30);
}
カメラのマトリックスから焦点距離を見つけるには、このコードの使い方がわかりません。このコードは、カメラのマトリックスと、焦点距離を見つける必要から計算する必要があります。しかし、どのように私はカメラマトリックスと焦点距離を取得する方法であるかわからない。カメラ行列3x3行列。しかし、それらのパラメータはどのように計算されますか?
助けが必要ですか?