已知原图img一像素点坐标p(x,y),变换前矩阵坐标pts1,变换后矩阵坐标pts2,求变换后p点对应坐标

#求变换矩阵M
M = cv2.getPerspectiveTransform(pts1, pts2)
 
# 坐标转换
def cvt_pos(pos, cvt_mat_t):
    u = pos[0]
    v = pos[1]
    x = (cvt_mat_t[0][0]*u+cvt_mat_t[0][1]*v+cvt_mat_t[0][2])/(cvt_mat_t[2][0]*u+cvt_mat_t[2][1]*v+cvt_mat_t[2][2])
    y = (cvt_mat_t[1][0]*u+cvt_mat_t[1][1]*v+cvt_mat_t[1][2])/(cvt_mat_t[2][0]*u+cvt_mat_t[2][1]*v+cvt_mat_t[2][2])
    return (int(x), int(y))
 
# 调用函数
newpoint = cvt_pos(p, M)

例二:opencv的接口

//用于求得透视变换的变换矩阵,
//src::源图像上的四个顶点坐标
//dst::src的坐标在目标图像上的对应坐标
//返回值:3X3的透视变换矩阵
//在车道线检测代码中作用:得到将原始图转换到鸟瞰图的转换矩阵
cv::Mat getPerspectiveTransform(const Point2f* src, const Point2f* dst)
//求得点/点数组在经过变换矩阵m后的对应坐标
//src:目标点,如鸟瞰图中的坐标
//m:src到dst的转换矩阵
//dst:src经过m转换后的对应点
在车道线检测代码中作用:将鸟瞰图中的车道线坐标转换到原始视图下的像素作弊码
void perspectiveTransform(InputArray src, OutputArray dst, InputArray m ) 
 //对图像进行透视变换
//src:输入图像
//dst:输出图像
//M:变换矩阵,如getPerspectiveTransform函数得到的矩阵
//dsize:目标图像的大小
//flags:目标图像的插值方法
//borderMode:外推方法
//borderValue:常量边界时使用
//在车道线检测代码中作用:
//   1.将原始图像转换到鸟瞰图中,进行车道线检测;
//   2.将鸟瞰图转换到原始视图下,以进行结果展示等
void warpPerspective(InputArray src, OutputArray dst, InputArray M, Size dsize, int flags=INTER_LINEAR, int borderMode=BORDER_CONSTANT, const Scalar& borderValue=Scalar())
//====================================================================//
// Created by liheng on 19-2-12.
//Program:将逆透视变换后的坐标点转换到原图中
//Data:2019.2.12
//Author:liheng
//Version:V1.0
//====================================================================//
 
#include <iostream>
#include <opencv2/opencv.hpp>
#include <opencv2/imgproc/types_c.h>
 
int main()
{
    //首先读入图像
    cv::Mat srcImage = cv::imread("../pictures/000177.png",cv::IMREAD_GRAYSCALE);
 
    //定义源点和目标点,源点为正常读入的图像的点,目标点为转换后的鸟瞰图上的对应点
    cv::Point2f srcPoints[4],dstPoints[4];
    srcPoints[0] = cv::Point2f(369,375);
    srcPoints[1] = cv::Point2f(545,221);
    srcPoints[2] = cv::Point2f(650,221);
    srcPoints[3] = cv::Point2f(793,375);
 
    dstPoints[0] = cv::Point2f(339,375);
    dstPoints[1] = cv::Point2f(339,211);
    dstPoints[2] = cv::Point2f(823,211);
    dstPoints[3] = cv::Point2f(823,375);
 
    //1°求解变换矩阵
    cv::Mat m_persctiveMat = cv::getPerspectiveTransform(srcPoints,dstPoints);//读入图像转换为鸟瞰图的矩阵
    cv::Mat m_unPersctiveMat =cv::getPerspectiveTransform(dstPoints,srcPoints);//鸟瞰图到原始图像的转换矩阵
 
    //2°求解鸟瞰图
    cv::Mat birdViewImage;
    cv::warpPerspective(srcImage,birdViewImage,m_persctiveMat,cv::Size(srcImage.cols,srcImage.rows),cv::INTER_LINEAR);
 
 
    //鸟瞰图车道线上的两点.Note:此处为了简单,仅选择2点进行变换
    std::vector<cv::Point2f> leftLine,rightLine;
    leftLine.push_back(cv::Point2f(661,0));
    leftLine.push_back(cv::Point2f(366,376));
 
    rightLine.push_back(cv::Point2f(1097,0));
    rightLine.push_back(cv::Point2f(883,376));
 
    //3°求解其在原始图像上对应的坐标
    std::vector<cv::Point2f> unWarpedLeftLine,unWarpedRightLine;
    cv::perspectiveTransform(leftLine,unWarpedLeftLine,m_unPersctiveMat);
    cv::perspectiveTransform(rightLine,unWarpedRightLine,m_unPersctiveMat);
 
    //线段可视化
    cv::cvtColor(srcImage,srcImage,CV_GRAY2BGR);
    cv::line(srcImage,unWarpedLeftLine[0],unWarpedLeftLine[1],cv::Scalar(0,255,0),2);
    cv::line(srcImage,unWarpedRightLine[0],unWarpedRightLine[1],cv::Scalar(0,255,0),2);
 
    cv::cvtColor(birdViewImage,birdViewImage,CV_GRAY2BGR);
    cv::line(birdViewImage,leftLine[0],leftLine[1],cv::Scalar(0,255,0),2);
    cv::line(birdViewImage,rightLine[0],rightLine[1],cv::Scalar(0,255,0),2);
 
    cv::imshow("srcImage",srcImage);
    cv::imshow("birdViewImage",birdViewImage);
    cv::waitKey(0);
 
 
    return 0;
}

 

private void InitGetPerspectiveWarpMatrix()
        {
             
             var tl = new Vector2(115,807); 
             var tr = new Vector2(1757,795);
             var bl = new Vector2(265,152);
             var br = new Vector2(1610,153); 
            
            var tm = new Vector2(270,152);
            var bm = new Vector2(120,800);
            
            warpMatrix = GetWarpMatrix(tl,tr,br,bl);
            TestPerspectiveMatrix(tl, bl,tm,tr, br, bm);
        }

        private void TestPerspectiveMatrix(Vector2 tl,Vector2 bl, Vector2 tm,Vector2 tr, Vector2 br, Vector2 bm)
        {
            var testPointTl = GetDstPointFromSrcPoint(tl, warpMatrix);
            var testPointTr = GetDstPointFromSrcPoint(tr, warpMatrix);
            var testPointBl = GetDstPointFromSrcPoint(bl, warpMatrix);
            var testPointBr = GetDstPointFromSrcPoint(br, warpMatrix);

            Debug.Log("Test InputPointTl "+tl +" outPut = "+testPointTl);
            Debug.Log("Test InputPointTr "+tr +" outPut = "+testPointTr);
            Debug.Log("Test InputPointBl "+bl+" outPut = "+testPointBl);
            Debug.Log("Test InputPointBr "+br +" outPut = "+testPointBr);
            

            var testA = new Vector2(124,964);
            var testB = new Vector2(50,195);        
            var testC = new Vector2(1680,195);
            
            var testE = new Vector2(946,156);
            var testF = new Vector2(946,794);
            var testG = new Vector2(574,156);
            var testH = new Vector2(485,796);
            
            var testI = new Vector2(344,195);
            var testJ = new Vector2(1459,195);
            var testK = new Vector2(233,738);
            var testL = new Vector2(1552,735);
            
            var testPA = GetDstPointFromSrcPoint(testA, warpMatrix);
            var testPB = GetDstPointFromSrcPoint(testB, warpMatrix);
            var testPC = GetDstPointFromSrcPoint(testC, warpMatrix);
            
            var testPE = GetDstPointFromSrcPoint(testE, warpMatrix);
            var testPF = GetDstPointFromSrcPoint(testF, warpMatrix);
            var testPG = GetDstPointFromSrcPoint(testG, warpMatrix);
            var testPH = GetDstPointFromSrcPoint(testH, warpMatrix);
            
            var testPI = GetDstPointFromSrcPoint(testI, warpMatrix);
            var testPJ = GetDstPointFromSrcPoint(testJ, warpMatrix);
            var testPK = GetDstPointFromSrcPoint(testK, warpMatrix);
            var testPL = GetDstPointFromSrcPoint(testL, warpMatrix);
            
            Debug.Log("Test========== InputPointA "+testA+" outPut = "+testPA);
            Debug.Log("Test========= InputPointB "+testB +" outPut = "+testPB);
            Debug.Log("Test========= InputPointC "+testC +" outPut = "+testPC);
            
            Debug.Log("Test========== InputPointE "+testE+" outPut = "+testPE);
            Debug.Log("Test========= InputPointF "+testF +" outPut = "+testPF);
            Debug.Log("Test========= InputPointG "+testG +" outPut = "+testPG);
            Debug.Log("Test========= InputPointH "+testH +" outPut = "+testPH);
            
            Debug.Log("Test========= InputPointI "+testI +" outPut = "+testPI);
            Debug.Log("Test========= InputPointJ "+testJ +" outPut = "+testPJ);
            Debug.Log("Test========= InputPointK "+testK +" outPut = "+testPK); 
            Debug.Log("Test========= InputPointL "+testL +" outPut = "+testPL); 
            
            var inputMat = webcamTexToMat.rgbaMat;
            Texture2D inputTexture = Resources.Load("InputTexture") as Texture2D;
            Mat outputMat = new Mat(inputTexture.height, inputTexture.width, CvType.CV_8UC4);
            
            Imgproc.cvtColor(inputMat, outputMat, Imgproc.COLOR_RGB2GRAY);
            Imgproc.warpPerspective(inputMat, outputMat,warpMatrix, new Size(inputMat.rows(), inputMat.cols()));

            Mat outputMat0 = inputMat.clone();
            Texture2D outputTexture = new Texture2D(outputMat0.cols(), outputMat0.rows(), TextureFormat.RGBA32, false);
            Utils.matToTexture2D(outputMat0, outputTexture);
            imgPer.gameObject.GetComponent<RawImage>().texture = outputTexture;
        }

        private void ShowLittelScreen()
        {
            var inputMat = webcamTexToMat.rgbaMat;
            Mat outputMat0 = inputMat.clone();
            Texture2D outputTexture = new Texture2D(outputMat0.cols(), outputMat0.rows(), TextureFormat.RGBA32, false);
            Utils.matToTexture2D(outputMat0, outputTexture);
            Debug.Log("OutputTxture..............."+outputTexture.name);
            imgPer.gameObject.GetComponent<RawImage>().texture = outputTexture; 
        }

        private void InitTrainData()
        {
            //faceXml_path = Application.streamingAssetsPath + "/haarcascade_frontalface_alt2.xml"; 
            faceXml_path = Application.streamingAssetsPath + "/golfcascade.xml"; 
            classifier = new CascadeClassifier(faceXml_path); 
        }
        
        private Mat GetWarpMatrix(Vector2 tl, Vector2 tr, Vector2 br, Vector2 bl)
        {
            // Mat srcRectMat = new Mat(4, 1, CvType.CV_32FC2);
            // Mat dstRectMat = new Mat(4, 1, CvType.CV_32FC2);
            //
            // var srcCameraMat = webcamTexToMat.rgbaMat;
            // srcRectMat.put(0, 0, tl.x, tl.y, tr.x, tr.y, bl.x, bl.y, br.x, br.y);
            // //dstRectMat.put(0, 0, 0.0, srcCameraMat.rows(),srcCameraMat.cols(),srcCameraMat.rows(), 0.0, 0.0,srcCameraMat.rows(), 0);
            // dstRectMat.put(0, 0,
            //     0, 0,
            //     3800, 0,
            //     0,1800,
            //     3800,1800);
            //
            
            // Point[] pointArr = new Point[4];
            // pointArr[0] = new Point(tl.x, tl.y);
            // pointArr[1] = new Point(tr.x, tr.y);
            // pointArr[2] = new Point(bl.x, bl.y);
            // pointArr[3] = new Point(br.x, br.y);
            // var srcP = new MatOfPoint2f(pointArr);
            //
            // Point[] dstPointArr = new Point[4];
            // dstPointArr[0] = new Point(0,1080);
            // dstPointArr[1] = new Point(1920, 1080);
            // dstPointArr[2] = new Point(0,0);
            // dstPointArr[3] = new Point(1080,0);
            // var dstP = new MatOfPoint2f(dstPointArr);
            
            Texture2D inputTexture = Resources.Load("InputTexture") as Texture2D;  
            Mat inputMat = new Mat(inputTexture.height, inputTexture.width, CvType.CV_8UC4);  
            //Mat outputMat = new Mat(inputTexture.height, inputTexture.width, CvType.CV_8UC4);  
            Mat srcRectMat = new Mat(4, 1, CvType.CV_32FC2);  
            Mat dstRectMat = new Mat(4, 1, CvType.CV_32FC2);  
  
            srcRectMat.put(0, 0, 
                tl.x, tl.y, 
                tr.x, tr.y, 
                bl.x, bl.y, 
                br.x, br.y);  
        
            dstRectMat.put(0, 0, 
                // 0.0, inputMat.rows(), 
                // inputMat.cols(), inputMat.rows(), 
                // 0.0, 0.0, 
                // inputMat.rows(), 0);  
                0.0, 1690, 
                3690,1690, 
                0.0, 0.0, 
                3690, 0);  
            // Debug.Log("TargetPoint tl = "+0+", "+inputMat.rows() +
            //           " tr = "+inputMat.cols() + " , "+ inputMat.rows()+
            //           " bl = "+0+" , "+0 + 
            //           " br = "+inputMat.rows() + " , "+0);

            //Mat perspectiveTransformHomo =Calib3d.findHomography(srcP,dstP);
            Mat perspectiveTransform = Imgproc.getPerspectiveTransform(srcRectMat, dstRectMat);
            
            Debug.Log("PerspectiveMatrix: "+perspectiveTransform +" type = "+CvType.typeToString(perspectiveTransform.type()));
            Debug.Log("matrix = "+perspectiveTransform.dump());
            //Debug.Log("matrixHomo = "+perspectiveTransformHomo.dump());
            //perspectiveTransform.convertTo(perspectiveTransform, CvType.CV_32FC2);
            return perspectiveTransform;
            //return perspectiveTransformHomo;
            //return getPerspectiveTransform(target_pt, origin_pt);//可逆
        }

        private Vector2 GetDstPointFromSrcPoint(Vector2 inputPoint, Mat warpMatrix)
        {
            var outPutPoint = new Vector2();
                     
            // Mat inputMat = new Mat(3,1, CvType.CV_64FC1);
            // //Mat outPutMat = new Mat(3,1, CvType.CV_64FC1);
            //
            // //Mat inputMat = new Mat(3,1, CvType.CV_32FC2);
            // inputMat.put(0, 0, inputPoint.x,inputPoint.y);
            // Mat outMat = warpMatrix * inputMat;
            //
            // var p = new Point();
            // p.x = inputPoint.x;
            // p.y = inputPoint.y;
            //
            // //Core.perspectiveTransform(inputMat, outPutMat,warpMatrix);
            //
            // // Debug.Log("Matrix1: "+outMat+
            // //           " cols="+outMat.cols()+ " rows="+outMat.rows()+" channels()="+outMat.channels()+
            // //           " type = "+CvType.typeToString(outMat.type()));
            // Debug.Log("TestMatrix1: "+outMat.dump());
            // //Debug.Log("TestMatrix2: "+outPutMat.dump());
            //
            //  var xx =  (float) (outMat.get(0,0)[0]);
            //  Debug.Log("outMat float x = "+xx);
            //  
            //  var yy = (float) (outMat.get(1,0)[0]);
            //  Debug.Log("outMat double y = "+yy);
            //  
            //  outPutPoint.x = math.round(xx) ;
            //  outPutPoint.y = math.round(yy);

            outPutPoint = cvt_pos(inputPoint, warpMatrix);
            return outPutPoint;
        }

        private Vector2 cvt_pos(Vector2 pos, Mat cvt_mat_t)
        {
            var desPos = new Vector2();
            var u = pos[0];
            var v = pos[1];
            var x = (cvt_mat_t.get(0,0)[0] * u +
                     cvt_mat_t.get(0,1)[0] * v + 
                     cvt_mat_t.get(0,2)[0]) /
                    (cvt_mat_t.get(2,0)[0] * u + 
                     cvt_mat_t.get(2,1)[0] * v + 
                     cvt_mat_t.get(2,2)[0]);
            
            var y = (cvt_mat_t.get(1,0)[0] * u + 
                     cvt_mat_t.get(1,1)[0] * v + 
                     cvt_mat_t.get(1,2)[0]) /
                    (cvt_mat_t.get(2,0)[0] * u + 
                     cvt_mat_t.get(2,1)[0] * v + 
                     cvt_mat_t.get(2,2)[0]);

            desPos.x = (float) x;
            desPos.y = (float) y;
            return desPos;
        }