已知原图img一像素点坐标p(x,y),变换前矩阵坐标pts1,变换后矩阵坐标pts2,求变换后p点对应坐标

#求变换矩阵M
M = cv2.getPerspectiveTransform(pts1, pts2)# 坐标转换
def cvt_pos(pos, cvt_mat_t):u = pos[0]v = pos[1]x = (cvt_mat_t[0][0]*u+cvt_mat_t[0][1]*v+cvt_mat_t[0][2])/(cvt_mat_t[2][0]*u+cvt_mat_t[2][1]*v+cvt_mat_t[2][2])y = (cvt_mat_t[1][0]*u+cvt_mat_t[1][1]*v+cvt_mat_t[1][2])/(cvt_mat_t[2][0]*u+cvt_mat_t[2][1]*v+cvt_mat_t[2][2])return (int(x), int(y))# 调用函数
newpoint = cvt_pos(p, M)

例二:opencv的接口

//用于求得透视变换的变换矩阵,
//src::源图像上的四个顶点坐标
//dst::src的坐标在目标图像上的对应坐标
//返回值:3X3的透视变换矩阵
//在车道线检测代码中作用:得到将原始图转换到鸟瞰图的转换矩阵
cv::Mat getPerspectiveTransform(const Point2f* src, const Point2f* dst)
//求得点/点数组在经过变换矩阵m后的对应坐标
//src:目标点,如鸟瞰图中的坐标
//m:src到dst的转换矩阵
//dst:src经过m转换后的对应点
在车道线检测代码中作用:将鸟瞰图中的车道线坐标转换到原始视图下的像素作弊码
void perspectiveTransform(InputArray src, OutputArray dst, InputArray m ) //对图像进行透视变换
//src:输入图像
//dst:输出图像
//M:变换矩阵,如getPerspectiveTransform函数得到的矩阵
//dsize:目标图像的大小
//flags:目标图像的插值方法
//borderMode:外推方法
//borderValue:常量边界时使用
//在车道线检测代码中作用:
//   1.将原始图像转换到鸟瞰图中,进行车道线检测;
//   2.将鸟瞰图转换到原始视图下,以进行结果展示等
void warpPerspective(InputArray src, OutputArray dst, InputArray M, Size dsize, int flags=INTER_LINEAR, int borderMode=BORDER_CONSTANT, const Scalar& borderValue=Scalar())
//====================================================================//
// Created by liheng on 19-2-12.
//Program:将逆透视变换后的坐标点转换到原图中
//Data:2019.2.12
//Author:liheng
//Version:V1.0
//====================================================================//#include <iostream>
#include <opencv2/opencv.hpp>
#include <opencv2/imgproc/types_c.h>int main()
{//首先读入图像cv::Mat srcImage = cv::imread("../pictures/000177.png",cv::IMREAD_GRAYSCALE);//定义源点和目标点,源点为正常读入的图像的点,目标点为转换后的鸟瞰图上的对应点cv::Point2f srcPoints[4],dstPoints[4];srcPoints[0] = cv::Point2f(369,375);srcPoints[1] = cv::Point2f(545,221);srcPoints[2] = cv::Point2f(650,221);srcPoints[3] = cv::Point2f(793,375);dstPoints[0] = cv::Point2f(339,375);dstPoints[1] = cv::Point2f(339,211);dstPoints[2] = cv::Point2f(823,211);dstPoints[3] = cv::Point2f(823,375);//1°求解变换矩阵cv::Mat m_persctiveMat = cv::getPerspectiveTransform(srcPoints,dstPoints);//读入图像转换为鸟瞰图的矩阵cv::Mat m_unPersctiveMat =cv::getPerspectiveTransform(dstPoints,srcPoints);//鸟瞰图到原始图像的转换矩阵//2°求解鸟瞰图cv::Mat birdViewImage;cv::warpPerspective(srcImage,birdViewImage,m_persctiveMat,cv::Size(srcImage.cols,srcImage.rows),cv::INTER_LINEAR);//鸟瞰图车道线上的两点.Note:此处为了简单,仅选择2点进行变换std::vector<cv::Point2f> leftLine,rightLine;leftLine.push_back(cv::Point2f(661,0));leftLine.push_back(cv::Point2f(366,376));rightLine.push_back(cv::Point2f(1097,0));rightLine.push_back(cv::Point2f(883,376));//3°求解其在原始图像上对应的坐标std::vector<cv::Point2f> unWarpedLeftLine,unWarpedRightLine;cv::perspectiveTransform(leftLine,unWarpedLeftLine,m_unPersctiveMat);cv::perspectiveTransform(rightLine,unWarpedRightLine,m_unPersctiveMat);//线段可视化cv::cvtColor(srcImage,srcImage,CV_GRAY2BGR);cv::line(srcImage,unWarpedLeftLine[0],unWarpedLeftLine[1],cv::Scalar(0,255,0),2);cv::line(srcImage,unWarpedRightLine[0],unWarpedRightLine[1],cv::Scalar(0,255,0),2);cv::cvtColor(birdViewImage,birdViewImage,CV_GRAY2BGR);cv::line(birdViewImage,leftLine[0],leftLine[1],cv::Scalar(0,255,0),2);cv::line(birdViewImage,rightLine[0],rightLine[1],cv::Scalar(0,255,0),2);cv::imshow("srcImage",srcImage);cv::imshow("birdViewImage",birdViewImage);cv::waitKey(0);return 0;
}
  private void InitGetPerspectiveWarpMatrix(){var tl = new Vector2(115,807); var tr = new Vector2(1757,795);var bl = new Vector2(265,152);var br = new Vector2(1610,153); var tm = new Vector2(270,152);var bm = new Vector2(120,800);warpMatrix = GetWarpMatrix(tl,tr,br,bl);TestPerspectiveMatrix(tl, bl,tm,tr, br, bm);}private void TestPerspectiveMatrix(Vector2 tl,Vector2 bl, Vector2 tm,Vector2 tr, Vector2 br, Vector2 bm){var testPointTl = GetDstPointFromSrcPoint(tl, warpMatrix);var testPointTr = GetDstPointFromSrcPoint(tr, warpMatrix);var testPointBl = GetDstPointFromSrcPoint(bl, warpMatrix);var testPointBr = GetDstPointFromSrcPoint(br, warpMatrix);Debug.Log("Test InputPointTl "+tl +" outPut = "+testPointTl);Debug.Log("Test InputPointTr "+tr +" outPut = "+testPointTr);Debug.Log("Test InputPointBl "+bl+" outPut = "+testPointBl);Debug.Log("Test InputPointBr "+br +" outPut = "+testPointBr);var testA = new Vector2(124,964);var testB = new Vector2(50,195);        var testC = new Vector2(1680,195);var testE = new Vector2(946,156);var testF = new Vector2(946,794);var testG = new Vector2(574,156);var testH = new Vector2(485,796);var testI = new Vector2(344,195);var testJ = new Vector2(1459,195);var testK = new Vector2(233,738);var testL = new Vector2(1552,735);var testPA = GetDstPointFromSrcPoint(testA, warpMatrix);var testPB = GetDstPointFromSrcPoint(testB, warpMatrix);var testPC = GetDstPointFromSrcPoint(testC, warpMatrix);var testPE = GetDstPointFromSrcPoint(testE, warpMatrix);var testPF = GetDstPointFromSrcPoint(testF, warpMatrix);var testPG = GetDstPointFromSrcPoint(testG, warpMatrix);var testPH = GetDstPointFromSrcPoint(testH, warpMatrix);var testPI = GetDstPointFromSrcPoint(testI, warpMatrix);var testPJ = GetDstPointFromSrcPoint(testJ, warpMatrix);var testPK = GetDstPointFromSrcPoint(testK, warpMatrix);var testPL = GetDstPointFromSrcPoint(testL, warpMatrix);Debug.Log("Test========== InputPointA "+testA+" outPut = "+testPA);Debug.Log("Test========= InputPointB "+testB +" outPut = "+testPB);Debug.Log("Test========= InputPointC "+testC +" outPut = "+testPC);Debug.Log("Test========== InputPointE "+testE+" outPut = "+testPE);Debug.Log("Test========= InputPointF "+testF +" outPut = "+testPF);Debug.Log("Test========= InputPointG "+testG +" outPut = "+testPG);Debug.Log("Test========= InputPointH "+testH +" outPut = "+testPH);Debug.Log("Test========= InputPointI "+testI +" outPut = "+testPI);Debug.Log("Test========= InputPointJ "+testJ +" outPut = "+testPJ);Debug.Log("Test========= InputPointK "+testK +" outPut = "+testPK); Debug.Log("Test========= InputPointL "+testL +" outPut = "+testPL); var inputMat = webcamTexToMat.rgbaMat;Texture2D inputTexture = Resources.Load("InputTexture") as Texture2D;Mat outputMat = new Mat(inputTexture.height, inputTexture.width, CvType.CV_8UC4);Imgproc.cvtColor(inputMat, outputMat, Imgproc.COLOR_RGB2GRAY);Imgproc.warpPerspective(inputMat, outputMat,warpMatrix, new Size(inputMat.rows(), inputMat.cols()));Mat outputMat0 = inputMat.clone();Texture2D outputTexture = new Texture2D(outputMat0.cols(), outputMat0.rows(), TextureFormat.RGBA32, false);Utils.matToTexture2D(outputMat0, outputTexture);imgPer.gameObject.GetComponent<RawImage>().texture = outputTexture;}private void ShowLittelScreen(){var inputMat = webcamTexToMat.rgbaMat;Mat outputMat0 = inputMat.clone();Texture2D outputTexture = new Texture2D(outputMat0.cols(), outputMat0.rows(), TextureFormat.RGBA32, false);Utils.matToTexture2D(outputMat0, outputTexture);Debug.Log("OutputTxture..............."+outputTexture.name);imgPer.gameObject.GetComponent<RawImage>().texture = outputTexture; }private void InitTrainData(){//faceXml_path = Application.streamingAssetsPath + "/haarcascade_frontalface_alt2.xml"; faceXml_path = Application.streamingAssetsPath + "/golfcascade.xml"; classifier = new CascadeClassifier(faceXml_path); }private Mat GetWarpMatrix(Vector2 tl, Vector2 tr, Vector2 br, Vector2 bl){// Mat srcRectMat = new Mat(4, 1, CvType.CV_32FC2);// Mat dstRectMat = new Mat(4, 1, CvType.CV_32FC2);//// var srcCameraMat = webcamTexToMat.rgbaMat;// srcRectMat.put(0, 0, tl.x, tl.y, tr.x, tr.y, bl.x, bl.y, br.x, br.y);// //dstRectMat.put(0, 0, 0.0, srcCameraMat.rows(),srcCameraMat.cols(),srcCameraMat.rows(), 0.0, 0.0,srcCameraMat.rows(), 0);// dstRectMat.put(0, 0,//     0, 0,//     3800, 0,//     0,1800,//     3800,1800);//// Point[] pointArr = new Point[4];// pointArr[0] = new Point(tl.x, tl.y);// pointArr[1] = new Point(tr.x, tr.y);// pointArr[2] = new Point(bl.x, bl.y);// pointArr[3] = new Point(br.x, br.y);// var srcP = new MatOfPoint2f(pointArr);//// Point[] dstPointArr = new Point[4];// dstPointArr[0] = new Point(0,1080);// dstPointArr[1] = new Point(1920, 1080);// dstPointArr[2] = new Point(0,0);// dstPointArr[3] = new Point(1080,0);// var dstP = new MatOfPoint2f(dstPointArr);Texture2D inputTexture = Resources.Load("InputTexture") as Texture2D;  Mat inputMat = new Mat(inputTexture.height, inputTexture.width, CvType.CV_8UC4);  //Mat outputMat = new Mat(inputTexture.height, inputTexture.width, CvType.CV_8UC4);  Mat srcRectMat = new Mat(4, 1, CvType.CV_32FC2);  Mat dstRectMat = new Mat(4, 1, CvType.CV_32FC2);  srcRectMat.put(0, 0, tl.x, tl.y, tr.x, tr.y, bl.x, bl.y, br.x, br.y);  dstRectMat.put(0, 0, // 0.0, inputMat.rows(), // inputMat.cols(), inputMat.rows(), // 0.0, 0.0, // inputMat.rows(), 0);  0.0, 1690, 3690,1690, 0.0, 0.0, 3690, 0);  // Debug.Log("TargetPoint tl = "+0+", "+inputMat.rows() +//           " tr = "+inputMat.cols() + " , "+ inputMat.rows()+//           " bl = "+0+" , "+0 + //           " br = "+inputMat.rows() + " , "+0);//Mat perspectiveTransformHomo =Calib3d.findHomography(srcP,dstP);Mat perspectiveTransform = Imgproc.getPerspectiveTransform(srcRectMat, dstRectMat);Debug.Log("PerspectiveMatrix: "+perspectiveTransform +" type = "+CvType.typeToString(perspectiveTransform.type()));Debug.Log("matrix = "+perspectiveTransform.dump());//Debug.Log("matrixHomo = "+perspectiveTransformHomo.dump());//perspectiveTransform.convertTo(perspectiveTransform, CvType.CV_32FC2);return perspectiveTransform;//return perspectiveTransformHomo;//return getPerspectiveTransform(target_pt, origin_pt);//可逆}private Vector2 GetDstPointFromSrcPoint(Vector2 inputPoint, Mat warpMatrix){var outPutPoint = new Vector2();// Mat inputMat = new Mat(3,1, CvType.CV_64FC1);// //Mat outPutMat = new Mat(3,1, CvType.CV_64FC1);//// //Mat inputMat = new Mat(3,1, CvType.CV_32FC2);// inputMat.put(0, 0, inputPoint.x,inputPoint.y);// Mat outMat = warpMatrix * inputMat;//// var p = new Point();// p.x = inputPoint.x;// p.y = inputPoint.y;//// //Core.perspectiveTransform(inputMat, outPutMat,warpMatrix);//// // Debug.Log("Matrix1: "+outMat+// //           " cols="+outMat.cols()+ " rows="+outMat.rows()+" channels()="+outMat.channels()+// //           " type = "+CvType.typeToString(outMat.type()));// Debug.Log("TestMatrix1: "+outMat.dump());// //Debug.Log("TestMatrix2: "+outPutMat.dump());////  var xx =  (float) (outMat.get(0,0)[0]);//  Debug.Log("outMat float x = "+xx);//  //  var yy = (float) (outMat.get(1,0)[0]);//  Debug.Log("outMat double y = "+yy);//  //  outPutPoint.x = math.round(xx) ;//  outPutPoint.y = math.round(yy);outPutPoint = cvt_pos(inputPoint, warpMatrix);return outPutPoint;}private Vector2 cvt_pos(Vector2 pos, Mat cvt_mat_t){var desPos = new Vector2();var u = pos[0];var v = pos[1];var x = (cvt_mat_t.get(0,0)[0] * u +cvt_mat_t.get(0,1)[0] * v + cvt_mat_t.get(0,2)[0]) /(cvt_mat_t.get(2,0)[0] * u + cvt_mat_t.get(2,1)[0] * v + cvt_mat_t.get(2,2)[0]);var y = (cvt_mat_t.get(1,0)[0] * u + cvt_mat_t.get(1,1)[0] * v + cvt_mat_t.get(1,2)[0]) /(cvt_mat_t.get(2,0)[0] * u + cvt_mat_t.get(2,1)[0] * v + cvt_mat_t.get(2,2)[0]);desPos.x = (float) x;desPos.y = (float) y;return desPos;}

OpenCV-透视变换及对二维点求透视变换之后的坐标_leonardohaig的博客-CSDN博客_二维点透视变换

OpenCV中二维点求取进行仿射和透视变换之后的坐标点方法_雪山飞狐的博客-CSDN博客

OpenCV实现点对点的透视变换(C++)_LankyBin的博客-CSDN博客

4.OpenCV之透视转换_码农LEO&MOVE的博客-CSDN博客_opencv透视变换

OpenCV利用透视变换矫正图像 - 飘杨...... - 博客园

15-案例六 对象提取与测量_哔哩哔哩_bilibili

Opencv 原图像素坐标点透视变换后对应坐标点相关推荐

  1. Opencv-python 求原坐标点透视变换后对应坐标点

    1.关于透视变换原理不赘述 2.已知原图img一像素点坐标p(x,y),变换前矩阵坐标pts1,变换后矩阵坐标pts2,求变换后p点对应坐标 3.程序 # p pts1 pts2#求变换矩阵M M = ...

  2. 过程记录 yolov3目标检测输出目标坐标和原图物体像素坐标比较

    设备:jetsonnano B01 一个usb广角摄像头 语言为python 库为opencv 框架pytorch 大家都知道yolov3的坐标变换过程比较复杂,大概要经过图片缩放,对数空间变换等等, ...

  3. 正射影像地理坐标转像素坐标c语言,数字正射影像坐标转换及储存管理方法研究...

    摘要:我国使用的坐标系统主要有1954年北京坐标系.1980年西安坐标系.CGCS-2000国家大地坐标系以及地方独立坐标系.随着信息共享的扩大化,数据标准的统一化,4D产品客观的需要进行坐标系统的任 ...

  4. c++/opencv利用相机位姿估计实现2D图像像素坐标到3D世界坐标的转换

    最近在做自动泊车项目中的车位线检测,用到了将图像像素坐标转换为真实世界坐标的过程,该过程可以通过世界坐标到图像像素坐标之间的关系进行求解,在我的一篇博文中已经详细讲解了它们之间的数学关系,不清楚的童鞋 ...

  5. 解决透视变换后图片信息丢失的问题

    问题背景:最近在做图像拼接,思路是首先对输入的两幅图进行特征提取,提取的方法使用的是经典的SIFT(Scale-invariant feature transform)算法:然后进行特征匹配,匹配的思 ...

  6. 解决透视变换后图片信息丢失的问题,附程序

    解决透视变换后图片信息丢失的问题,附程序 问题背景 问题描述 问题原因 解决方法 问题背景 最近在做图像拼接,思路是首先对输入的两幅图进行特征提取,提取的方法使用的是经典的SIFT(Scale-inv ...

  7. 一种近似方法将场地坐标转为像素坐标

    一种近似方法将场地坐标转为像素坐标 先上代码: for (int i = 0;i < WIDTH;i++){for (int j = 0;j < HEIGHT;j++){nNewi = i ...

  8. 从像素坐标到相机坐标_相机标定方法

    在图像测量过程以及机器视觉应用中,为确定空间物体表面某点的三维几何位置与其在图像中对应点之间的相互关系,必须建立相机成像的几何模型,这些几何模型参数就是相机参数.在大多数条件下这些参数(内参.外参.畸 ...

  9. 从像素坐标到相机坐标_多视图几何基础——深入理解相机内外参数

    上一篇:前言(comming soon) 关键词:相机模型,多视图几何,相机内参数,相机外参数,skew畸变 1. 针孔相机模型 Figure 1 针孔相机模型是一种理想化的简单相机模型,也是成像的最 ...

最新文章

  1. linux查看网卡速率
  2. 退出python命令行-在cmd命令行里进入和退出Python程序的方法
  3. 牛客练习赛33 D tokitsukaze and Inverse Number (树状数组求逆序对,结论)
  4. Angular中数据文本绑定、绑定Html、绑定属性、双向数据绑定的实现方式
  5. Codeforces Round #570 (Div. 3)
  6. uva 11374(Dijkstra) HappyNewYear!!!
  7. tb文件 vivado_Vivado FPGA设计基础操作流程:Vivado的基本使用
  8. 11行Python代码,盗取了室友的U盘内容
  9. 转帖——精妙SQL语句
  10. mysql5.7.18压缩包下载_MySQL5.6.30 升级到MySQL5.7.18
  11. c语言 步进电机 程序,步进电机c语言控制程序
  12. 关于 pace 有意思的一篇文章
  13. 为什么要进行前后端分离
  14. python prettytable输出对齐_漂亮的输出-----prettytable和colorama的使用
  15. 飞塔防火墙服务激活操作步骤
  16. 一图看懂Python生态圈图像格式转换
  17. 【机器学习百科全书目录】PRML ESL MLAPP 西瓜书 花书 RLAI 统计学习方法 蒲公英书
  18. LNMP+WordPress部署
  19. Python爬虫:中国结算,关于新开股票账户数等参数数据的爬取
  20. 07-白盒测试方法-逻辑覆盖法

热门文章

  1. vmlinuz、initrd.img、vmlinuz
  2. 路径追踪相关的一些内容
  3. 蓝桥七届 凑算式 JAVA
  4. IDEA的修改背景照片and使用技巧
  5. c语言中输入格式带括号,C语言中printf()后的括号里面的怎么写,表示什么?...
  6. 密码学系列之:生日攻击
  7. TongWeb上传文件功能介绍
  8. Facebook账号为什么老是被限制?
  9. 品牌服装电商HTML网页模板
  10. Web项目实现前端锁屏功能