摄像机矩阵由内参矩阵和外参矩阵组成,对摄像机矩阵进行QR分解可以得到内参矩阵和外参矩阵。

内参包括焦距、主点、倾斜系数、畸变系数

(1)

其中,fx,fy为焦距,一般情况下,二者相等,x0、y0为主点坐标(相对于成像平面),s为坐标轴倾斜参数,理想情况下为0

外参包括旋转矩阵R3×3、平移向量T3×1,它们共同描述了如何把点从世界坐标系转换到摄像机坐标系,旋转矩阵描述了世界坐标系的坐标轴相对于摄像机坐标轴的方向,平移向量描述了在摄像机坐标系下空间原点的位置。

标定双目后,首先要根据其畸变系数来校正原图,可以参考
http://blog.csdn.net/qq_15947787/article/details/51471535

Mat jiaozheng( Mat image )
{
Size image_size = image.size();
float intrinsic[3][3] = {589.2526583947847,0,321.8607532099886,0,585.7784771038199,251.0338528599469,0,0,1};
float distortion[1][5] = {-0.5284205687061442, 0.3373615384253201, -0.002133029981628697, 0.001511983002864886, -0.1598661778309496};
Mat intrinsic_matrix = Mat(3,3,CV_32FC1,intrinsic);
Mat distortion_coeffs = Mat(1,5,CV_32FC1,distortion);
Mat R = Mat::eye(3,3,CV_32F);
Mat mapx = Mat(image_size,CV_32FC1);
Mat mapy = Mat(image_size,CV_32FC1);
initUndistortRectifyMap(intrinsic_matrix,distortion_coeffs,R,intrinsic_matrix,image_size,CV_32FC1,mapx,mapy);
Mat t = image.clone();
cv::remap( image, t, mapx, mapy, INTER_LINEAR);
return t;
}
校正完成后就可以进行坐标计算了,分两种

(1)世界坐标系——>像面坐标系

首先将世界坐标系——>摄像机坐标系
已知某点在世界坐标系中的坐标为(Xw, Yw, Zw),由旋转和平移矩阵可得摄像机坐标系和世界坐标系的关系为

(2)
然后将摄像机坐标系——>像面坐标系
(3)

其中[u v 1]T为点在图像坐标系中的坐标,[Xc Yc Zc 1]T为点在摄像机坐标系中的坐标,K为摄像机内参数矩阵。

这样最终可以得到:

(4)
(2)像面坐标系——>世界坐标系

光轴会聚模型:

对于两相机分别有:

(5) (6)

公式56,左边Z应分别为Zc1,Zc2

其中,

(7)

这样可以把(5)(6)写成

(8)

公式8左边Z应为Zc1

(9)

公式9左边Z应为Zc2

将(8)(9)整理可以得到

(10)

以上公式参考评论6楼 wisemanjack

采用最小二乘法求解X,Y,Z,在opencv中可以用solve(A,B,XYZ,DECOMP_SVD)求解

代码如下:

//opencv2.4.9 vs2012
#include <opencv2\opencv.hpp>
#include
#include

using namespace std;
using namespace cv;

Point2f xyz2uv(Point3f worldPoint,float intrinsic[3][3],float translation[1][3],float rotation[3][3]);
Point3f uv2xyz(Point2f uvLeft,Point2f uvRight);

//图片对数量
int PicNum = 14;

//左相机内参数矩阵
float leftIntrinsic[3][3] = {4037.82450, 0, 947.65449,
0, 3969.79038, 455.48718,
0, 0, 1};
//左相机畸变系数
float leftDistortion[1][5] = {0.18962, -4.05566, -0.00510, 0.02895, 0};
//左相机旋转矩阵
float leftRotation[3][3] = {0.912333, -0.211508, 0.350590,
0.023249, -0.828105, -0.560091,
0.408789, 0.519140, -0.750590};
//左相机平移向量
float leftTranslation[1][3] = {-127.199992, 28.190639, 1471.356768};

//右相机内参数矩阵
float rightIntrinsic[3][3] = {3765.83307, 0, 339.31958,
0, 3808.08469, 660.05543,
0, 0, 1};
//右相机畸变系数
float rightDistortion[1][5] = {-0.24195, 5.97763, -0.02057, -0.01429, 0};
//右相机旋转矩阵
float rightRotation[3][3] = {-0.134947, 0.989568, -0.050442,
0.752355, 0.069205, -0.655113,
-0.644788, -0.126356, -0.753845};
//右相机平移向量
float rightTranslation[1][3] = {50.877397, -99.796492, 1507.312197};

int main()
{
//已知空间坐标求成像坐标
Point3f point(700,220,530);
cout<<“左相机中坐标:”<<endl;
cout<<xyz2uv(point,leftIntrinsic,leftTranslation,leftRotation)<<endl;
cout<<“右相机中坐标:”<<endl;
cout<<xyz2uv(point,rightIntrinsic,rightTranslation,rightRotation)<<endl;

//已知左右相机成像坐标求空间坐标
Point2f l = xyz2uv(point,leftIntrinsic,leftTranslation,leftRotation);
Point2f r = xyz2uv(point,rightIntrinsic,rightTranslation,rightRotation);
Point3f worldPoint;
worldPoint = uv2xyz(l,r);
cout<<"空间坐标为:"<<endl<<uv2xyz(l,r)<<endl;system("pause");return 0;

}

//************************************
// Description: 根据左右相机中成像坐标求解空间坐标
// Method: uv2xyz
// FullName: uv2xyz
// Access: public
// Parameter: Point2f uvLeft
// Parameter: Point2f uvRight
// Returns: cv::Point3f
// Author: 小白
// Date: 2017/01/10
// History:
//************************************
Point3f uv2xyz(Point2f uvLeft,Point2f uvRight)
{
// [u1] |X| [u2] |X|
//Z*|v1| = Ml*|Y| Z*|v2| = Mr*|Y|
// [ 1] |Z| [ 1] |Z|
// |1| |1|
Mat mLeftRotation = Mat(3,3,CV_32F,leftRotation);
Mat mLeftTranslation = Mat(3,1,CV_32F,leftTranslation);
Mat mLeftRT = Mat(3,4,CV_32F);//左相机M矩阵
hconcat(mLeftRotation,mLeftTranslation,mLeftRT);
Mat mLeftIntrinsic = Mat(3,3,CV_32F,leftIntrinsic);
Mat mLeftM = mLeftIntrinsic * mLeftRT;
//cout<<"左相机M矩阵 = "<<endl<<mLeftM<<endl;

Mat mRightRotation = Mat(3,3,CV_32F,rightRotation);
Mat mRightTranslation = Mat(3,1,CV_32F,rightTranslation);
Mat mRightRT = Mat(3,4,CV_32F);//右相机M矩阵
hconcat(mRightRotation,mRightTranslation,mRightRT);
Mat mRightIntrinsic = Mat(3,3,CV_32F,rightIntrinsic);
Mat mRightM = mRightIntrinsic * mRightRT;
//cout<<"右相机M矩阵 = "<<endl<<mRightM<<endl;//最小二乘法A矩阵
Mat A = Mat(4,3,CV_32F);
A.at<float>(0,0) = uvLeft.x * mLeftM.at<float>(2,0) - mLeftM.at<float>(0,0);
A.at<float>(0,1) = uvLeft.x * mLeftM.at<float>(2,1) - mLeftM.at<float>(0,1);
A.at<float>(0,2) = uvLeft.x * mLeftM.at<float>(2,2) - mLeftM.at<float>(0,2);A.at<float>(1,0) = uvLeft.y * mLeftM.at<float>(2,0) - mLeftM.at<float>(1,0);
A.at<float>(1,1) = uvLeft.y * mLeftM.at<float>(2,1) - mLeftM.at<float>(1,1);
A.at<float>(1,2) = uvLeft.y * mLeftM.at<float>(2,2) - mLeftM.at<float>(1,2);A.at<float>(2,0) = uvRight.x * mRightM.at<float>(2,0) - mRightM.at<float>(0,0);
A.at<float>(2,1) = uvRight.x * mRightM.at<float>(2,1) - mRightM.at<float>(0,1);
A.at<float>(2,2) = uvRight.x * mRightM.at<float>(2,2) - mRightM.at<float>(0,2);A.at<float>(3,0) = uvRight.y * mRightM.at<float>(2,0) - mRightM.at<float>(1,0);
A.at<float>(3,1) = uvRight.y * mRightM.at<float>(2,1) - mRightM.at<float>(1,1);
A.at<float>(3,2) = uvRight.y * mRightM.at<float>(2,2) - mRightM.at<float>(1,2);//最小二乘法B矩阵
Mat B = Mat(4,1,CV_32F);
B.at<float>(0,0) = mLeftM.at<float>(0,3) - uvLeft.x * mLeftM.at<float>(2,3);
B.at<float>(1,0) = mLeftM.at<float>(1,3) - uvLeft.y * mLeftM.at<float>(2,3);
B.at<float>(2,0) = mRightM.at<float>(0,3) - uvRight.x * mRightM.at<float>(2,3);
B.at<float>(3,0) = mRightM.at<float>(1,3) - uvRight.y * mRightM.at<float>(2,3);Mat XYZ = Mat(3,1,CV_32F);
//采用SVD最小二乘法求解XYZ
solve(A,B,XYZ,DECOMP_SVD);//cout<<"空间坐标为 = "<<endl<<XYZ<<endl;//世界坐标系中坐标
Point3f world;
world.x = XYZ.at<float>(0,0);
world.y = XYZ.at<float>(1,0);
world.z = XYZ.at<float>(2,0);return world;

}

//************************************
// Description: 将世界坐标系中的点投影到左右相机成像坐标系中
// Method: xyz2uv
// FullName: xyz2uv
// Access: public
// Parameter: Point3f worldPoint
// Parameter: float intrinsic[3][3]
// Parameter: float translation[1][3]
// Parameter: float rotation[3][3]
// Returns: cv::Point2f
// Author: 小白
// Date: 2017/01/10
// History:
//************************************
Point2f xyz2uv(Point3f worldPoint,float intrinsic[3][3],float translation[1][3],float rotation[3][3])
{
// [fx s x0] [Xc] [Xw] [u] 1 [Xc]
//K = |0 fy y0| TEMP = [R T] |Yc| = TEMP*|Yw| | | = —*K *|Yc|
// [ 0 0 1 ] [Zc] |Zw| [v] Zc [Zc]
// [1 ]
Point3f c;
c.x = rotation[0][0]*worldPoint.x + rotation[0][1]*worldPoint.y + rotation[0][2]*worldPoint.z + translation[0][0]*1;
c.y = rotation[1][0]*worldPoint.x + rotation[1][1]*worldPoint.y + rotation[1][2]*worldPoint.z + translation[0][1]*1;
c.z = rotation[2][0]*worldPoint.x + rotation[2][1]*worldPoint.y + rotation[2][2]*worldPoint.z + translation[0][2]*1;

Point2f uv;
uv.x = (intrinsic[0][0]*c.x + intrinsic[0][1]*c.y + intrinsic[0][2]*c.z)/c.z;
uv.y = (intrinsic[1][0]*c.x + intrinsic[1][1]*c.y + intrinsic[1][2]*c.z)/c.z;return uv;

}

2017/5/26补充

matlab或者opencv标定完都是在左相机上建立世界坐标系,于是上面代码对应的改为:

//左相机旋转矩阵
float leftRotation[3][3] = {1,0,0, 0,1,0, 0,0,1 };
//左相机平移向量
float leftTranslation[1][3] = {0,0,0};
畸变矩阵(默认获得5个畸变参数k1,k2,p1,p2,k3)

——————————————————————————————————————————————————————————————

像面坐标系——>世界坐标系还有一种模型是光轴平行模型

双目立体视觉三位测量是基于视差原理:

(11)

(12)

这里,除cx‘外的所有参数都来自于左图像,cx’是主点在右图像上的x坐标。如果主光线在无穷远处相交,那么cx=cx‘,并且右下角的项为0,给定一个二维齐次点和其关联的视差d,我们可以将此点投影到三维中:

(13)

三维坐标就是(X / W , Y / W , Z / W)

光轴平行模型要得到视差图,http://blog.csdn.net/wangchao7281/article/details/52506691?locationNum=7

可以参考opencv的例子,例子的使用方法可以参考http://blog.csdn.net/t247555529/article/details/48046859

得到视差图后可以调用cvReprojectImageTo3D输出的三维坐标

看到很多人输出三维坐标时z出现10000,那个其实是输出方式不对,应该是下面这样

Point p;
p.x =294,p.y=189;
cout<<p<< "in world coordinate: " << xyz.at§*16 <<endl;
为什么要乘以16呢?

因为在OpenCV2.0中,BM函数得出的结果是以16位符号数的形式的存储的,出于精度需要,所有的视差在输出时都扩大了16倍(2^4)。其具体代码表示如下:

dptr[y*dstep] = (short)(((ndisp - mind - 1 + mindisp)*256 + (d != 0 ? (p-n)*128/d : 0) + 15) >> 4);

可以看到,原始视差在左移8位(256)并且加上一个修正值之后又右移了4位,最终的结果就是左移4位

因此,在实际求距离时,cvReprojectTo3D出来的X/W,Y/W,Z/W都要乘以16 (也就是W除以16),才能得到正确的三维坐标信息

下面是在opencv3.0下实现的该方法的测距(转载)

/*****************************/
/
立体匹配和测距 /
/
*****************************/

#include <opencv2/opencv.hpp>
#include

using namespace std;
using namespace cv;

const int imageWidth = 640; //摄像头的分辨率
const int imageHeight = 480;
Size imageSize = Size(imageWidth, imageHeight);

Mat rgbImageL, grayImageL;
Mat rgbImageR, grayImageR;
Mat rectifyImageL, rectifyImageR;

Rect validROIL;//图像校正之后,会对图像进行裁剪,这里的validROI就是指裁剪之后的区域
Rect validROIR;

Mat mapLx, mapLy, mapRx, mapRy; //映射表
Mat Rl, Rr, Pl, Pr, Q; //校正旋转矩阵R,投影矩阵P 重投影矩阵Q
Mat xyz; //三维坐标

Point origin; //鼠标按下的起始点
Rect selection; //定义矩形选框
bool selectObject = false; //是否选择对象

int blockSize = 0, uniquenessRatio =0, numDisparities=0;
Ptr bm = StereoBM::create(16, 9);

/*
事先标定好的相机的参数
fx 0 cx
0 fy cy
0 0 1
*/
Mat cameraMatrixL = (Mat_(3, 3) << 836.771593170594,0,319.970748854743,
0,839.416501863912,228.788913693256,
0, 0, 1);
Mat distCoeffL = (Mat_(5, 1) << 0, 0, 0, 0, 0);

Mat cameraMatrixR = (Mat_(3, 3) << 838.101721655709,0,319.647150557935,
0,840.636812165056,250.655818405938,
0, 0, 1);
Mat distCoeffR = (Mat_(5, 1) << 0, 0, 0, 0, 0);

Mat T = (Mat_(3, 1) << -39.7389449993974,0.0740619639178984,0.411914303245886);//T平移向量
Mat rec = (Mat_(3, 1) << -0.00306, -0.03207, 0.00206);//rec旋转向量
Mat R = (Mat_(3, 3) << 0.999957725513956,-0.00103511880221423,0.00913650447492805,
0.00114462826834523,0.999927476064641,-0.0119888463633882,
-0.00912343197938050,0.0119987974423658,0.999886389470751);//R 旋转矩阵

/立体匹配/
void stereo_match(int,void*)
{
bm->setBlockSize(2blockSize+5); //SAD窗口大小,5~21之间为宜
bm->setROI1(validROIL);
bm->setROI2(validROIR);
bm->setPreFilterCap(31);
bm->setMinDisparity(0); //最小视差,默认值为0, 可以是负值,int型
bm->setNumDisparities(numDisparities
16+16);//视差窗口,即最大视差值与最小视差值之差,窗口大小必须是16的整数倍,int型
bm->setTextureThreshold(10);
bm->setUniquenessRatio(uniquenessRatio);//uniquenessRatio主要可以防止误匹配
bm->setSpeckleWindowSize(100);
bm->setSpeckleRange(32);
bm->setDisp12MaxDiff(-1);
Mat disp, disp8;
bm->compute(rectifyImageL, rectifyImageR, disp);//输入图像必须为灰度图
disp.convertTo(disp8, CV_8U, 255 / ((numDisparities * 16 + 16)*16.));//计算出的视差是CV_16S格式
reprojectImageTo3D(disp, xyz, Q, true); //在实际求距离时,ReprojectTo3D出来的X / W, Y / W, Z / W都要乘以16(也就是W除以16),才能得到正确的三维坐标信息。
xyz = xyz * 16;
imshow(“disparity”, disp8);
}

/描述:鼠标操作回调/
static void onMouse(int event, int x, int y, int, void*)
{
if (selectObject)
{
selection.x = MIN(x, origin.x);
selection.y = MIN(y, origin.y);
selection.width = std::abs(x - origin.x);
selection.height = std::abs(y - origin.y);
}

switch (event)
{
case EVENT_LBUTTONDOWN:   //鼠标左按钮按下的事件origin = Point(x, y);selection = Rect(x, y, 0, 0);selectObject = true;cout << origin <<"in world coordinate is: " << xyz.at<Vec3f>(origin) << endl;break;
case EVENT_LBUTTONUP:    //鼠标左按钮释放的事件selectObject = false;if (selection.width > 0 && selection.height > 0)break;
}

}

/主函数/
int main()
{
/*
立体校正
*/
//Rodrigues(rec, R); //Rodrigues变换
stereoRectify(cameraMatrixL, distCoeffL, cameraMatrixR, distCoeffR, imageSize, R, T, Rl, Rr, Pl, Pr, Q, CALIB_ZERO_DISPARITY,
0, imageSize, &validROIL, &validROIR);
initUndistortRectifyMap(cameraMatrixL, distCoeffL, Rl, Pr, imageSize, CV_32FC1, mapLx, mapLy);
initUndistortRectifyMap(cameraMatrixR, distCoeffR, Rr, Pr, imageSize, CV_32FC1, mapRx, mapRy);

/*
读取图片
*/
rgbImageL = imread("左2.jpg", CV_LOAD_IMAGE_COLOR);
cvtColor(rgbImageL, grayImageL, CV_BGR2GRAY);
rgbImageR = imread("右2.jpg", CV_LOAD_IMAGE_COLOR);
cvtColor(rgbImageR, grayImageR, CV_BGR2GRAY);imshow("ImageL Before Rectify", grayImageL);
imshow("ImageR Before Rectify", grayImageR);/*
经过remap之后,左右相机的图像已经共面并且行对准了
*/
remap(grayImageL, rectifyImageL, mapLx, mapLy, INTER_LINEAR);
remap(grayImageR, rectifyImageR, mapRx, mapRy, INTER_LINEAR);/*
把校正结果显示出来
*/
Mat rgbRectifyImageL, rgbRectifyImageR;
cvtColor(rectifyImageL, rgbRectifyImageL, CV_GRAY2BGR);  //伪彩色图
cvtColor(rectifyImageR, rgbRectifyImageR, CV_GRAY2BGR);//单独显示
//rectangle(rgbRectifyImageL, validROIL, Scalar(0, 0, 255), 3, 8);
//rectangle(rgbRectifyImageR, validROIR, Scalar(0, 0, 255), 3, 8);
imshow("ImageL After Rectify", rgbRectifyImageL);
imshow("ImageR After Rectify", rgbRectifyImageR);//显示在同一张图上
Mat canvas;
double sf;
int w, h;
sf = 600. / MAX(imageSize.width, imageSize.height);
w = cvRound(imageSize.width * sf);
h = cvRound(imageSize.height * sf);
canvas.create(h, w * 2, CV_8UC3);   //注意通道//左图像画到画布上
Mat canvasPart = canvas(Rect(w * 0, 0, w, h));                                //得到画布的一部分
resize(rgbRectifyImageL, canvasPart, canvasPart.size(), 0, 0, INTER_AREA);     //把图像缩放到跟canvasPart一样大小
Rect vroiL(cvRound(validROIL.x*sf), cvRound(validROIL.y*sf),                //获得被截取的区域    cvRound(validROIL.width*sf), cvRound(validROIL.height*sf));
//rectangle(canvasPart, vroiL, Scalar(0, 0, 255), 3, 8);                      //画上一个矩形
cout << "Painted ImageL" << endl;//右图像画到画布上
canvasPart = canvas(Rect(w, 0, w, h));                                      //获得画布的另一部分
resize(rgbRectifyImageR, canvasPart, canvasPart.size(), 0, 0, INTER_LINEAR);
Rect vroiR(cvRound(validROIR.x * sf), cvRound(validROIR.y*sf),cvRound(validROIR.width * sf), cvRound(validROIR.height * sf));
//rectangle(canvasPart, vroiR, Scalar(0, 0, 255), 3, 8);
cout << "Painted ImageR" << endl;//画上对应的线条
for (int i = 0; i < canvas.rows; i += 16)line(canvas, Point(0, i), Point(canvas.cols, i), Scalar(0, 255, 0), 1, 8);
imshow("rectified", canvas);/*
立体匹配
*/
namedWindow("disparity", CV_WINDOW_AUTOSIZE);
// 创建SAD窗口 Trackbar
createTrackbar("BlockSize:\n", "disparity",&blockSize, 8, stereo_match);
// 创建视差唯一性百分比窗口 Trackbar
createTrackbar("UniquenessRatio:\n", "disparity", &uniquenessRatio, 50, stereo_match);
// 创建视差窗口 Trackbar
createTrackbar("NumDisparities:\n", "disparity", &numDisparities, 16, stereo_match);
//鼠标响应函数setMouseCallback(窗口名称, 鼠标回调函数, 传给回调函数的参数,一般取0)
setMouseCallback("disparity", onMouse, 0);
stereo_match(0,0);waitKey(0);
return 0;

}

————————————————————————————————————————

最后是完整的大作业代码,感觉自己写的并不是很好,敷衍了事~~

运行该程序,会对素材文件夹中图像进行处理,生成三个文件夹,分别是“大球圆心”、“畸变校正”、“亮度对比度”以及一个csv文件,文件名为“三维坐标.csv”,里面记录了计算得到的左右相机中球的像面坐标和解算出的空间坐标。该程序默认对所有计算出的圆心坐标进行了重新赋值,如需修改圆心坐标,需在工程中的initPos()修改赋值语句,如需程序自动计算,在主函数中注释该指令即可,但是自动计算的圆心并不准确。

//opencv2.4.9 vs2012
#include <opencv2\opencv.hpp>
#include
#include

using namespace std;
using namespace cv;

//图片对数量
#define PicNum 14

//左相机内参数矩阵
float leftIntrinsic[3][3] = {4037.82450, 0, 947.65449,
0, 3969.79038, 455.48718,
0, 0, 1};
//左相机畸变系数
float leftDistortion[1][5] = {0.18962, -4.05566, -0.00510, 0.02895, 0};
//左相机旋转矩阵
float leftRotation[3][3] = {0.912333, -0.211508, 0.350590,
0.023249, -0.828105, -0.560091,
0.408789, 0.519140, -0.750590};
//左相机平移向量
float leftTranslation[1][3] = {-127.199992, 28.190639, 1471.356768};

//右相机内参数矩阵
float rightIntrinsic[3][3] = {3765.83307, 0, 339.31958,
0, 3808.08469, 660.05543,
0, 0, 1};
//右相机畸变系数
float rightDistortion[1][5] = {-0.24195, 5.97763, -0.02057, -0.01429, 0};
//右相机旋转矩阵
float rightRotation[3][3] = {-0.134947, 0.989568, -0.050442,
0.752355, 0.069205, -0.655113,
-0.644788, -0.126356, -0.753845};
//右相机平移向量
float rightTranslation[1][3] = {50.877397, -99.796492, 1507.312197};

//球坐标数组
//大球
float rightDaqiu[PicNum][2] = {0};
float leftDaqiu[PicNum][2] = {0};
float worldDaqiu[PicNum][3] = {0};
//小球
float rightXiaoqiu[PicNum][2] = {0};
float leftXiaoqiu[PicNum][2] = {0};
float worldXiaoqiu[PicNum][3] = {0};
//花球
float rightHuaqiu[PicNum][2] = {0};
float leftHuaqiu[PicNum][2] = {0};
float worldHuaqiu[PicNum][3] = {0};

void ContrastAndBright(double alpha, double beta);//调节亮度/对比度
void CorrectionProcess();//对素材校正畸变
void initPos();//手动赋值球的图像坐标
void Daqiu();//计算大球图像坐标
Mat PictureCorrection( Mat image ,float intrinsic[3][3],float distortion[1][5]);//单张图像畸变校正
Point2f xyz2uv(Point3f worldPoint,float intrinsic[3][3],float translation[1][3],float rotation[3][3]);//从世界坐标转为图像坐标
Point3f uv2xyz(Point2f uvLeft,Point2f uvRight);//从图像坐标转为世界坐标

int main()
{
CorrectionProcess();//对素材校正畸变
ContrastAndBright(2.5,50);//调节亮度/对比度

Daqiu();//自动计算大球坐标initPos();//手动修正,如需验证数据,可以在该函数中修改//求取大球的空间坐标
cout<<"求解大球的世界坐标:"<<endl;
for (int i=0; i<PicNum; i++)
{Point2f l,r;Point3f worldPoint;l.x = leftDaqiu[i][0];l.y = leftDaqiu[i][1];r.x = rightDaqiu[i][0];r.y = rightDaqiu[i][1];worldPoint = uv2xyz(l,r);cout<< worldPoint <<endl;worldDaqiu[i][0] = worldPoint.x;worldDaqiu[i][1] = worldPoint.y;worldDaqiu[i][2] = worldPoint.z;
}cout<<"求解小球的世界坐标:"<<endl;
for (int i=0; i<PicNum; i++)
{Point2f l,r;Point3f worldPoint;l.x = leftXiaoqiu[i][0];l.y = leftXiaoqiu[i][1];r.x = rightXiaoqiu[i][0];r.y = rightXiaoqiu[i][1];worldPoint = uv2xyz(l,r);cout<< worldPoint <<endl;worldXiaoqiu[i][0] = worldPoint.x;worldXiaoqiu[i][1] = worldPoint.y;worldXiaoqiu[i][2] = worldPoint.z;
}cout<<"求解花球的世界坐标:"<<endl;
for (int i=0; i<PicNum; i++)
{Point2f l,r;Point3f worldPoint;l.x = leftHuaqiu[i][0];l.y = leftHuaqiu[i][1];r.x = rightHuaqiu[i][0];r.y = rightHuaqiu[i][1];worldPoint = uv2xyz(l,r);cout<< worldPoint <<endl;worldHuaqiu[i][0] = worldPoint.x;worldHuaqiu[i][1] = worldPoint.y;worldHuaqiu[i][2] = worldPoint.z;
}//csv文件写入部分
ofstream oFile;  //定义文件输出流   oFile.open("三维坐标.csv", ios::out | ios::trunc);    //打开要输出的文件      这样就很容易的输出一个需要的excel 文件
//写入大球数据
oFile << "大球坐标" << endl;
oFile << "左相机坐标,,,右相机坐标,,,世界坐标" << endl;
oFile << "x,y,,x,y,,x,y,z" << endl;
for (int i=0; i<PicNum ;i++)
{oFile << leftDaqiu[i][0] <<","<< leftDaqiu[i][1] << ",," << rightDaqiu[i][0] <<","<< rightDaqiu[i][1] << ",," << worldDaqiu[i][0] <<","<<  worldDaqiu[i][1] <<","<<  worldDaqiu[i][2] << endl;
}//写入小球数据
oFile << "小球坐标" << endl;
oFile << "左相机坐标,,,右相机坐标,,,世界坐标" << endl;
oFile << "x,y,,x,y,,x,y,z" << endl;
for (int i=0; i<PicNum ;i++)
{oFile << leftXiaoqiu[i][0] <<","<< leftXiaoqiu[i][1] << ",," << rightXiaoqiu[i][0] <<","<< rightXiaoqiu[i][1] << ",," << worldXiaoqiu[i][0] <<","<<  worldXiaoqiu[i][1] <<","<<  worldXiaoqiu[i][2] << endl;
}//写入花球数据
oFile << "花球坐标" << endl;
oFile << "左相机坐标,,,右相机坐标,,,世界坐标" << endl;
oFile << "x,y,,x,y,,x,y,z" << endl;
for (int i=0; i<PicNum ;i++)
{oFile << leftHuaqiu[i][0] <<","<< leftHuaqiu[i][1] << ",," << rightHuaqiu[i][0] <<","<< rightHuaqiu[i][1] << ",," << worldHuaqiu[i][0] <<","<<  worldHuaqiu[i][1] <<","<<  worldHuaqiu[i][2] << endl;
}//关闭文件
oFile.close();  //test
已知空间坐标求成像坐标
//Point3f point(700,220,530);
//cout<<"左相机中坐标:"<<endl;
//cout<<xyz2uv(point,leftIntrinsic,leftTranslation,leftRotation)<<endl;
//cout<<"右相机中坐标:"<<endl;
//cout<<xyz2uv(point,rightIntrinsic,rightTranslation,rightRotation)<<endl;//已知左右相机成像坐标求空间坐标
//Point2f l = xyz2uv(point,leftIntrinsic,leftTranslation,leftRotation);
//Point2f r = xyz2uv(point,rightIntrinsic,rightTranslation,rightRotation);
//Point3f worldPoint;
//worldPoint = uv2xyz(l,r);
//cout<<"空间坐标为:"<<endl<<uv2xyz(l,r)<<endl;/*
//csv文件读取部分
string value;//临时字符串
ifstream iFile("三维坐标.csv");//打开要读入的文件
//循环行读取
while (iFile.good())
{getline(iFile,value);//getline(iFile,value,','); //.csv文件用","作为分隔符cout<<value<<endl;
}
*/system("pause");return 0;

}

//************************************
// Description: 修正圆心坐标
// Method: initPos
// FullName: initPos
// Access: public
// Returns: void
// Author: bhy
// Date: 2016/12/25
// History:
//************************************
void initPos()
{
//手动修正
leftDaqiu[0][0] = 1175; leftDaqiu[0][1] = 7; rightDaqiu[0][0] = 256; rightDaqiu[0][1] = 1;
leftDaqiu[1][0] = 823; leftDaqiu[1][1] = 603; rightDaqiu[1][0] = 289; rightDaqiu[1][1] = 431;
leftDaqiu[2][0] = 963; leftDaqiu[2][1] = 360; rightDaqiu[2][0] = 283; rightDaqiu[2][1] = 169;
leftDaqiu[3][0] = 1065; leftDaqiu[3][1] = 180; rightDaqiu[3][0] = 294; rightDaqiu[3][1] = 1;
leftDaqiu[4][0] = 1039; leftDaqiu[4][1] = 217; rightDaqiu[4][0] = 314; rightDaqiu[4][1] = 68;
leftDaqiu[5][0] = 896; leftDaqiu[5][1] = 448; rightDaqiu[5][0] = 378; rightDaqiu[5][1] = 402;
leftDaqiu[6][0] = 933; leftDaqiu[6][1] = 376; rightDaqiu[6][0] = 398; rightDaqiu[6][1] = 347;
leftDaqiu[7][0] = 868; leftDaqiu[7][1] = 463; rightDaqiu[7][0] = 423; rightDaqiu[7][1] = 418;
leftDaqiu[8][0] = 878; leftDaqiu[8][1] = 417; rightDaqiu[8][0] = 458; rightDaqiu[8][1] = 466;
leftDaqiu[9][0] = 860; leftDaqiu[9][1] = 423; rightDaqiu[9][0] = 481; rightDaqiu[9][1] = 490;
leftDaqiu[10][0] = 840; leftDaqiu[10][1] = 442; rightDaqiu[10][0] = 499; rightDaqiu[10][1] = 500;
leftDaqiu[11][0] = 822; leftDaqiu[11][1] = 414; rightDaqiu[11][0] = 523; rightDaqiu[11][1] = 511;
leftDaqiu[12][0] = 805; leftDaqiu[12][1] = 406; rightDaqiu[12][0] = 538; rightDaqiu[12][1] = 516;
leftDaqiu[13][0] = 802; leftDaqiu[13][1] = 402; rightDaqiu[13][0] = 549; rightDaqiu[13][1] = 514;

leftXiaoqiu[0][0] = 1250;   leftXiaoqiu[0][1] = 120;   rightXiaoqiu[0][0] = 308;  rightXiaoqiu[0][1] = 313;
leftXiaoqiu[1][0] = 1034;   leftXiaoqiu[1][1] = 481;   rightXiaoqiu[1][0] = 314;  rightXiaoqiu[1][1] = 482;
leftXiaoqiu[2][0] = 1207;   leftXiaoqiu[2][1] = 228;   rightXiaoqiu[2][0] = 284;  rightXiaoqiu[2][1] = 186;
leftXiaoqiu[3][0] = 1343;   leftXiaoqiu[3][1] = 55;    rightXiaoqiu[3][0] = 252;  rightXiaoqiu[3][1] = -20;
leftXiaoqiu[4][0] = 1326;   leftXiaoqiu[4][1] = 102;   rightXiaoqiu[4][0] = 242;  rightXiaoqiu[4][1] = 23;
leftXiaoqiu[5][0] = 1021;   leftXiaoqiu[5][1] = 625;   rightXiaoqiu[5][0] = 269;  rightXiaoqiu[5][1] = 632;
leftXiaoqiu[6][0] = 1123;   leftXiaoqiu[6][1] = 489;   rightXiaoqiu[6][0] = 241;  rightXiaoqiu[6][1] = 458;
leftXiaoqiu[7][0] = 1147;   leftXiaoqiu[7][1] = 475;   rightXiaoqiu[7][0] = 224;  rightXiaoqiu[7][1] = 404;
leftXiaoqiu[8][0] = 1078;   leftXiaoqiu[8][1] = 595;   rightXiaoqiu[8][0] = 223;  rightXiaoqiu[8][1] = 558;
leftXiaoqiu[9][0] = 1062;   leftXiaoqiu[9][1] = 635;   rightXiaoqiu[9][0] = 216;  rightXiaoqiu[9][1] = 598;
leftXiaoqiu[10][0] = 1080;  leftXiaoqiu[10][1] = 619;  rightXiaoqiu[10][0] = 201; rightXiaoqiu[10][1] = 576;
leftXiaoqiu[11][0] = 1054;  leftXiaoqiu[11][1] = 690;  rightXiaoqiu[11][0] = 189; rightXiaoqiu[11][1] = 633;
leftXiaoqiu[12][0] = 1046;  leftXiaoqiu[12][1] = 724;  rightXiaoqiu[12][0] = 179; rightXiaoqiu[12][1] = 655;
leftXiaoqiu[13][0] = 1046;  leftXiaoqiu[13][1] = 726;  rightXiaoqiu[13][0] = 172; rightXiaoqiu[13][1] = 656;leftHuaqiu[0][0] = 1075;  leftHuaqiu[0][1] = 111;   rightHuaqiu[0][0] = 120;  rightHuaqiu[0][1] = -30;//出视场
leftHuaqiu[1][0] = 708;   leftHuaqiu[1][1] = 810;   rightHuaqiu[1][0] = 142;  rightHuaqiu[1][1] = 382;
leftHuaqiu[2][0] = 876;   leftHuaqiu[2][1] = 518;   rightHuaqiu[2][0] = 131;  rightHuaqiu[2][1] = 83;
leftHuaqiu[3][0] = 1021;  leftHuaqiu[3][1] = 253;   rightHuaqiu[3][0] = 180;   rightHuaqiu[3][1] = -60;//出视场
leftHuaqiu[4][0] = 1019;  leftHuaqiu[4][1] = 248;   rightHuaqiu[4][0] = 183;   rightHuaqiu[4][1] = -30;//出视场
leftHuaqiu[5][0] = 764;   leftHuaqiu[5][1] = 654;   rightHuaqiu[5][0] = 278;  rightHuaqiu[5][1] = 437;
leftHuaqiu[6][0] = 844;   leftHuaqiu[6][1] = 486;   rightHuaqiu[6][0] = 286;  rightHuaqiu[6][1] = 261;
leftHuaqiu[7][0] = 852;   leftHuaqiu[7][1] = 425;   rightHuaqiu[7][0] = 305;  rightHuaqiu[7][1] = 206;
leftHuaqiu[8][0] = 780;   leftHuaqiu[8][1] = 523;   rightHuaqiu[8][0] = 347;  rightHuaqiu[8][1] = 359;
leftHuaqiu[9][0] = 757;   leftHuaqiu[9][1] = 530;   rightHuaqiu[9][0] = 363;  rightHuaqiu[9][1] = 382;
leftHuaqiu[10][0] = 765;  leftHuaqiu[10][1] = 505;  rightHuaqiu[10][0] = 368; rightHuaqiu[10][1] = 345;
leftHuaqiu[11][0] = 702;  leftHuaqiu[11][1] = 554;  rightHuaqiu[11][0] = 382; rightHuaqiu[11][1] = 398;
leftHuaqiu[12][0] = 666;  leftHuaqiu[12][1] = 573;  rightHuaqiu[12][0] = 386; rightHuaqiu[12][1] = 408;
leftHuaqiu[13][0] = 656;  leftHuaqiu[13][1] = 581;  rightHuaqiu[13][0] = 384; rightHuaqiu[13][1] = 398;

}

//************************************
// 2016/12/2 by 小白
// Method: uv2xyz
// FullName: uv2xyz
// Access: public
// Returns: cv::Point3f 世界坐标
// Qualifier: 根据左右相机中成像坐标求解空间坐标
// Parameter: Point2f uvLeft 左相机中成像坐标
// Parameter: Point2f uvRight 右相机中成像坐标
//************************************
Point3f uv2xyz(Point2f uvLeft,Point2f uvRight)
{
// [u1] |X| [u2] |X|
//Z*|v1| = Ml*|Y| Z*|v2| = Mr*|Y|
// [ 1] |Z| [ 1] |Z|
// |1| |1|
Mat mLeftRotation = Mat(3,3,CV_32F,leftRotation);
Mat mLeftTranslation = Mat(3,1,CV_32F,leftTranslation);
Mat mLeftRT = Mat(3,4,CV_32F);//左相机M矩阵
hconcat(mLeftRotation,mLeftTranslation,mLeftRT);
Mat mLeftIntrinsic = Mat(3,3,CV_32F,leftIntrinsic);
Mat mLeftM = mLeftIntrinsic * mLeftRT;
//cout<<"左相机M矩阵 = "<<endl<<mLeftM<<endl;

Mat mRightRotation = Mat(3,3,CV_32F,rightRotation);
Mat mRightTranslation = Mat(3,1,CV_32F,rightTranslation);
Mat mRightRT = Mat(3,4,CV_32F);//右相机M矩阵
hconcat(mRightRotation,mRightTranslation,mRightRT);
Mat mRightIntrinsic = Mat(3,3,CV_32F,rightIntrinsic);
Mat mRightM = mRightIntrinsic * mRightRT;
//cout<<"右相机M矩阵 = "<<endl<<mRightM<<endl;//最小二乘法A矩阵
Mat A = Mat(4,3,CV_32F);
A.at<float>(0,0) = uvLeft.x * mLeftM.at<float>(2,0) - mLeftM.at<float>(0,0);
A.at<float>(0,1) = uvLeft.x * mLeftM.at<float>(2,1) - mLeftM.at<float>(0,1);
A.at<float>(0,2) = uvLeft.x * mLeftM.at<float>(2,2) - mLeftM.at<float>(0,2);A.at<float>(1,0) = uvLeft.y * mLeftM.at<float>(2,0) - mLeftM.at<float>(1,0);
A.at<float>(1,1) = uvLeft.y * mLeftM.at<float>(2,1) - mLeftM.at<float>(1,1);
A.at<float>(1,2) = uvLeft.y * mLeftM.at<float>(2,2) - mLeftM.at<float>(1,2);A.at<float>(2,0) = uvRight.x * mRightM.at<float>(2,0) - mRightM.at<float>(0,0);
A.at<float>(2,1) = uvRight.x * mRightM.at<float>(2,1) - mRightM.at<float>(0,1);
A.at<float>(2,2) = uvRight.x * mRightM.at<float>(2,2) - mRightM.at<float>(0,2);A.at<float>(3,0) = uvRight.y * mRightM.at<float>(2,0) - mRightM.at<float>(1,0);
A.at<float>(3,1) = uvRight.y * mRightM.at<float>(2,1) - mRightM.at<float>(1,1);
A.at<float>(3,2) = uvRight.y * mRightM.at<float>(2,2) - mRightM.at<float>(1,2);//最小二乘法B矩阵
Mat B = Mat(4,1,CV_32F);
B.at<float>(0,0) = mLeftM.at<float>(0,3) - uvLeft.x * mLeftM.at<float>(2,3);
B.at<float>(1,0) = mLeftM.at<float>(1,3) - uvLeft.y * mLeftM.at<float>(2,3);
B.at<float>(2,0) = mRightM.at<float>(0,3) - uvRight.x * mRightM.at<float>(2,3);
B.at<float>(3,0) = mRightM.at<float>(1,3) - uvRight.y * mRightM.at<float>(2,3);Mat XYZ = Mat(3,1,CV_32F);
//采用SVD最小二乘法求解XYZ
solve(A,B,XYZ,DECOMP_SVD);//cout<<"空间坐标为 = "<<endl<<XYZ<<endl;//世界坐标系中坐标
Point3f world;
world.x = XYZ.at<float>(0,0);
world.y = XYZ.at<float>(1,0);
world.z = XYZ.at<float>(2,0);return world;

}

//************************************
// Description: 将空间坐标转换为像面坐标,用于检验
// Method: xyz2uv
// FullName: xyz2uv
// Access: public
// Parameter: Point3f worldPoint
// Parameter: float intrinsic[3][3]
// Parameter: float translation[1][3]
// Parameter: float rotation[3][3]
// Returns: cv::Point2f
// Author: bhy
// Date: 2016/12/28
// History:
//************************************
Point2f xyz2uv(Point3f worldPoint,float intrinsic[3][3],float translation[1][3],float rotation[3][3])
{
// [fx s x0] [Xc] [Xw] [u] 1 [Xc]
//K = |0 fy y0| TEMP = [R T] |Yc| = TEMP*|Yw| | | = —*K *|Yc|
// [ 0 0 1 ] [Zc] |Zw| [v] Zc [Zc]
// [1 ]
Point3f c;
c.x = rotation[0][0]*worldPoint.x + rotation[0][1]*worldPoint.y + rotation[0][2]*worldPoint.z + translation[0][0]*1;
c.y = rotation[1][0]*worldPoint.x + rotation[1][1]*worldPoint.y + rotation[1][2]*worldPoint.z + translation[0][1]*1;
c.z = rotation[2][0]*worldPoint.x + rotation[2][1]*worldPoint.y + rotation[2][2]*worldPoint.z + translation[0][2]*1;

Point2f uv;
uv.x = (intrinsic[0][0]*c.x + intrinsic[0][1]*c.y + intrinsic[0][2]*c.z)/c.z + 0.5;//加0.5去整 == 四舍五入
uv.y = (intrinsic[1][0]*c.x + intrinsic[1][1]*c.y + intrinsic[1][2]*c.z)/c.z + 0.5;//加0.5去整 == 四舍五入return uv;

}

//************************************
// Description: 对畸变校正文件夹下的图片批量进行亮度/对比度调节
// Method: ContrastAndBright
// FullName: ContrastAndBright
// Access: public
// Parameter: double alpha
// Parameter: double beta
// Returns: void
// Author: bhy
// Date: 2016/12/28
// History:
//************************************
void ContrastAndBright(double alpha, double beta)
{
//double alpha =3;
//double beta = 40;
cout<<“"<<endl;
cout<<" 亮度/对比度调节 "<<endl;
cout<<"
”<<endl;

cout<<"当前 alpha = "<<alpha<<endl;
cout<<"当前 beta = "<<beta<<endl;Mat src,dst; system("md 亮度对比度\\rightky1");//右相机调节
//如果校正图像目录不存在,则创建该目录for (int ii=1; ii<=PicNum; ii++)
{cout<<"右:第"<<ii<<"张图片"<<endl;char* filename = new char[100];sprintf(filename,"畸变校正/rightky1/r%d.bmp",ii);src = imread(filename);//顺次读入图片delete []filename;//释放字符串dst = Mat::zeros(src.size(),src.type());//清空目标矩阵//根据alpha,beta重新计算灰度值for (int i = 0;i<src.rows;++i)  for(int j= 0;j<src.cols;++j)  for(int k = 0;k<3;++k)  dst.at<Vec3b>(i,j)[k] = saturate_cast<uchar>(src.at<Vec3b>(i,j)[k]*alpha+beta);  char* output = new char[100];sprintf(output,"亮度对比度/rightky1/r%d.bmp",ii);imwrite(output,dst); //顺次保存校正图delete []output;//释放字符串
}//左相机调节
//如果校正图像目录不存在,则创建该目录
system("md 亮度对比度\\leftky1");for (int ii=1; ii<=PicNum; ii++)
{cout<<"左:第"<<ii<<"张图片"<<endl;char* filename = new char[100];sprintf(filename,"畸变校正/leftky1/l%d.bmp",ii);src = imread(filename);//顺次读入图片delete []filename;//释放字符串dst = Mat::zeros(src.size(),src.type());//清空目标矩阵//根据alpha,beta重新计算灰度值for (int i = 0;i<src.rows;++i)  for(int j= 0;j<src.cols;++j)  for(int k = 0;k<3;++k)  dst.at<Vec3b>(i,j)[k] = saturate_cast<uchar>(src.at<Vec3b>(i,j)[k]*alpha+beta);  char* output = new char[100];sprintf(output,"亮度对比度/leftky1/l%d.bmp",ii);imwrite(output,dst); //顺次保存校正图delete []output;//释放字符串
}cout<<"**********************************************"<<endl;
cout<<"             亮度/对比度调节结束              "<<endl;
cout<<"**********************************************"<<endl;

}

//************************************
// Description: 对素材文件夹中的图片批量进行畸变校正
// Method: CorrectionProcess
// FullName: CorrectionProcess
// Access: public
// Returns: void
// Author: bhy
// Date: 2016/12/28
// History:
//************************************
void CorrectionProcess()
{
cout<<“"<<endl;
cout<<" 畸变校正 "<<endl;
cout<<"
”<<endl;

Mat image;//使用畸变系数与内参校正右相机原图
//如果校正图像目录不存在,则创建该目录
system("md 畸变校正\\rightky1");for (int i=1; i<=PicNum; i++)
{cout<<"右:校正第"<<i<<"张图片"<<endl;char* filename = new char[100];sprintf(filename,"素材/rightky1/r%d.bmp",i);image = imread(filename,IMREAD_GRAYSCALE);//顺次读入图片delete []filename;//释放字符串char* output = new char[100];sprintf(output,"畸变校正/rightky1/r%d.bmp",i);imwrite(output,PictureCorrection(image,rightIntrinsic,rightDistortion)); //顺次保存校正图delete []output;//释放字符串
}//使用畸变系数与内参校正左相机原图
//如果校正图像目录不存在,则创建该目录
system("md 畸变校正\\leftky1");for (int i=1; i<=PicNum; i++)
{cout<<"左:校正第"<<i<<"张图片"<<endl;char* filename = new char[100];sprintf(filename,"素材/leftky1/l%d.bmp",i);image = imread(filename,IMREAD_GRAYSCALE);//顺次读入图片delete []filename;//释放字符串char* output = new char[100];sprintf(output,"畸变校正/leftky1/l%d.bmp",i);imwrite(output,PictureCorrection(image,leftIntrinsic,leftDistortion)); //顺次保存校正图delete []output;//释放字符串
}
cout<<"**********************************************"<<endl;
cout<<"                  畸变校正结束                "<<endl;
cout<<"**********************************************"<<endl;

}

//************************************
// Method: PictureCorrection
// FullName: PictureCorrection
// Access: public
// Returns: cv::Mat 校正图
// Qualifier: 根据畸变系数与内参校正一张图片
// Parameter: Mat image
// Parameter: float intrinsic[3][3] 内参
// Parameter: float distortion[1][5] 畸变矩阵
//************************************
Mat PictureCorrection( Mat image ,float intrinsic[3][3],float distortion[1][5])
{
Size image_size = image.size();

Mat intrinsic_matrix = Mat(3,3,CV_32FC1,intrinsic);
Mat distortion_coeffs = Mat(1,5,CV_32FC1,distortion);
Mat R = Mat::eye(3,3,CV_32F);
Mat mapx = Mat(image_size,CV_32FC1);
Mat mapy = Mat(image_size,CV_32FC1);
initUndistortRectifyMap(intrinsic_matrix,distortion_coeffs,R,intrinsic_matrix,image_size,CV_32FC1,mapx,mapy);
Mat t = image.clone();
cv::remap( image, t, mapx, mapy, INTER_LINEAR);
return t;

}

//************************************
// Description: 采用hough变换求取大球圆心
// Method: Daqiu
// FullName: Daqiu
// Access: public
// Returns: void
// Author: bhy
// Date: 2016/12/28
// History:
//************************************
void Daqiu()
{
cout<<“"<<endl;
cout<<" 计算大球圆心 "<<endl;
cout<<"
”<<endl;

Mat img;
//右相机 ContrastAndBright(2.5,50);
system("md 大球圆心\\rightky1");cout<<"右图:"<<endl;for (int i=1; i<=PicNum; i++)
{stringstream strStm;string strFileName;strStm << i;strStm >> strFileName;strFileName = "亮度对比度/rightky1/r" + strFileName + ".bmp";img = imread(strFileName,IMREAD_GRAYSCALE);GaussianBlur(img,img,Size(5,5),0);vector<Vec3f> circles;HoughCircles( img, circles, CV_HOUGH_GRADIENT, 3 ,70, 70, 30, 95 ,100);//hough圆变换Point2f center(0,0);float radius;for (int j = 0; j < circles.size(); j++){if (circles[j][0] > center.x && circles[j][0] < img.cols/2){center.x = circles[j][0];center.y = circles[j][1];radius = circles[j][2];//半径  }}rightDaqiu[i-1][0] = center.x;rightDaqiu[i-1][1] = center.y;CvScalar color = CV_RGB(0,0,0);circle( img, (Point)center, radius, color, 3, 8, 0);//绘制圆circle( img, (Point)center, 3, color, 3, 8, 0);//绘制圆心cout<<"圆心:"<<center<<endl;//为了保证精度,以原值输出char* output = new char[100];sprintf(output,"大球圆心/rightky1/r%d.bmp",i);imwrite(output,img); //顺次保存校正图delete []output;//释放字符串
}system("md 大球圆心\\leftky1");cout<<"左图:"<<endl;for (int i=1; i<=PicNum; i++)
{stringstream strStm;string strFileName;strStm << i;strStm >> strFileName;strFileName = "亮度对比度/leftky1/l" + strFileName + ".bmp";img = imread(strFileName,IMREAD_GRAYSCALE);GaussianBlur(img,img,Size(5,5),0);vector<Vec3f> circles;HoughCircles( img, circles, CV_HOUGH_GRADIENT, 3 ,70, 30, 50, 100 ,110);//hough圆变换Point2f center(0,1024);float radius;for (int j = 0; j < circles.size(); j++){if (circles[j][1]<center.y){center.x = circles[j][0];center.y = circles[j][1];radius = circles[j][2];//半径}}leftDaqiu[i-1][0] = center.x;leftDaqiu[i-1][1] = center.y;CvScalar color = CV_RGB(0,0,0);circle( img, center, radius, color, 3, 8, 0);//绘制圆circle( img, center, 3, color, 3, 8, 0);//绘制圆心cout<<"圆心:"<<center<<endl;//为了保证精度,以原值输出char* output = new char[100];sprintf(output,"大球圆心/leftky1/l%d.bmp",i);imwrite(output,img); //顺次保存校正图delete []output;//释放字符串
}cout<<"**********************************************"<<endl;
cout<<"                  圆心计算结束                "<<endl;
cout<<"**********************************************"<<endl;

}

运行该程序,会提取生成的“三维坐标.csv”中的空间坐标数据,并绘制运动轨迹需要注意的是,在“三维坐标.csv”文件中直接修改圆心坐标没有用,需要在工程中的initPos()修改。

clc;clear;
M = csvread(‘三维坐标.csv’,3,6,[3,6,16,8]);
x = M(:,1);
y = M(:,2);
z = M(:,3);
plot3(x,y,z,‘r’);
%legend(‘大球’);
hold on

M = csvread(‘三维坐标.csv’,20,6,[20,6,33,8]);
x = M(:,1);
y = M(:,2);
z = M(:,3);
plot3(x,y,z,‘g’);
%legend(‘小球’);
hold on

M = csvread(‘三维坐标.csv’,37,6,[37,6,50,8]);
x = M(:,1);
y = M(:,2);
z = M(:,3);
plot3(x,y,z,‘b’);
%legend(‘花球’);
hold on

legend(‘大球’,‘小球’,‘花球’);
title(‘小球运动轨迹’);
xlabel(‘x’);
ylabel(‘y’);
zlabel(‘z’);
grid on
axis square

双目视觉 XYZ求解相关推荐

  1. 结构光相移法中相机投影仪的标定信息如何与相位差联系

    前奏 先解释一下相移法中相机所捕获的条纹图案的数学表达式的含义 这里需要解释的是,表示第n张相移图上(x,y)点处的灰度值,为条纹图背景值,为调制强度函数.与上图中的变量解释不一样,查看了将近十几个人 ...

  2. 双目视觉几何框架详解(玉米专栏8篇汇总)

    一.图像坐标:我想和世界坐标谈谈(A) 玉米竭力用轻松具体的描述来讲述双目三维重建中的一些数学问题.希望这样的方式让大家以一个轻松的心态阅读玉米的<计算机视觉学习笔记>双目视觉数学架构系列 ...

  3. 双目视觉三维重建框架

    一.图像坐标:我想和世界坐标谈谈(A) 玉米竭力用轻松具体的描述来讲述双目三维重建中的一些数学问题.希望这样的方式让大家以一个轻松的心态阅读玉米的<计算机视觉学习笔记>双目视觉数学架构系列 ...

  4. 双目视觉集合框架详解

    双目视觉几何框架详解 一.图像坐标:我想和世界坐标谈谈(A) 玉米竭力用轻松具体的描述来讲述双目三维重建中的一些数学问题.希望这样的方式让大家以一个轻松的心态阅读玉米的<计算机视觉学习笔记> ...

  5. 双目视觉标定原理详解(张氏标定)

    一.图像坐标:我想和世界坐标谈谈(A) 玉米竭力用轻松具体的描述来讲述双目三维重建中的一些数学问题.希望这样的方式让大家以一个轻松的心态阅读玉米的<计算机视觉学习笔记>双目视觉数学架构系列 ...

  6. 双目视觉(一)双目视觉系统

    系列文章: 双目视觉(一)双目视觉系统 双目视觉(二)双目匹配的困难和评判标准 双目视觉(三)立体匹配算法 双目视觉(四)匹配代价 双目视觉(五)立体匹配算法之动态规划全局匹配 双目视觉(六)U-V视 ...

  7. 双目视觉焦距_第七节、双目视觉之空间坐标计算

    在上一节我们已经介绍了如何对相机进行标定.然后获取相机的内部参数,外部参数. 内参包括焦距.主点.倾斜系数.畸变系数: $$M=\begin{bmatrix} f_x & γ & u_ ...

  8. 【SLAM后端】—— ceres优化相机位姿求解

    求解结果如下: mat 初始化,eigenvalue初始化 Mat K = ( Mat_<double> ( 3,3 ) << 520.9, 0, 325.1, 0, 521. ...

  9. 来聊聊双目视觉的基础知识(视觉深度、标定、立体匹配)

    点击上方"3D视觉工坊",选择"星标" 干货第一时间送达 1 双目视觉的视差与深度 人类具有一双眼睛,对同一目标可以形成视差,因而能清晰地感知到三维世界.因此, ...

最新文章

  1. 植物MWAS研究—谷子产量与微生物组关联分析
  2. OpenCV 图像的混合
  3. matlab 小波变换_matlab小波工具箱实例(二):时频分析和连续小波变换
  4. 沙家浜《智斗》系列,孩儿版。三、棋手
  5. cassandra可视化工具_一位数据科学家的私房工具清单
  6. 罗技 连点 脚本_罗技推出多款《英雄联盟》联名外设 看了就忍不住想要
  7. GDB中创建要素数据集
  8. linux ubuntu20.04 problems
  9. 周华健,歌声伴我成长(五)
  10. Linux下配置安装NFS
  11. 6. Controller
  12. 高等代数第3版下 [丘维声 著] 2015年版_2020年成人高考 专升本 高等数学复习攻略...
  13. 自然语言处理(八)——语言模型性能评价
  14. java-工具-开源
  15. 新媒体运营模式的发展历史
  16. 计算GPS坐标的直线距离
  17. ITIL事件管理流程关键知识
  18. 非常好听但比较难找的歌曲
  19. 仿微信二维码极速扫描(MLKit及CameraX初体验),安卓消息分发机制
  20. 校内互测题 by LMY (FSN)

热门文章

  1. L1-020. 帅到没朋友(2016)
  2. 泛型类、泛型方法的使用与理解
  3. 在linux系统下运行jar包的命令如下:
  4. 智能合约--如何实现可升级的智能合约
  5. 完美扒站, 整站下载
  6. 如何防止Excel工作表名称被修改
  7. PetaLinux使用Gstreamer传输USB摄像头到DP显示屏
  8. 从传统营销到网络销售的四个特点
  9. 白盒测试——NextDate函数测试(基本路径覆盖法)
  10. Leetcode 1823 找出游戏的获胜者 (约瑟夫环问题)