zoukankan      html  css  js  c++  java
  • SURF 特征法

     public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
            {
                int k = 2;
                double uniquenessThreshold = 0.8;
                double hessianThresh = 300;
    
                Stopwatch watch;
                homography = null;
    
                modelKeyPoints = new VectorOfKeyPoint();
                observedKeyPoints = new VectorOfKeyPoint();
                using (UMat uModelImage = modelImage.ToUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.ToUMat(AccessType.Read))
                {
                    SURF surfCPU = new SURF(hessianThresh);
                    //extract features from the object image
                    UMat modelDescriptors = new UMat();
                    surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);
                    
                    watch = Stopwatch.StartNew();
    
                    // extract features from the observed image
                    UMat observedDescriptors = new UMat();
                    surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                    BFMatcher matcher = new BFMatcher(DistanceType.L2);
                    matcher.Add(modelDescriptors);
    
                    matcher.KnnMatch(observedDescriptors, matches, k, null);
                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                    mask.SetTo(new MCvScalar(255));
                    Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
    
                    int nonZeroCount = CvInvoke.CountNonZero(mask);
                    if (nonZeroCount >= 4)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                           matches, mask, 1.5, 20);
                        if (nonZeroCount >= 4)
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                               observedKeyPoints, matches, mask, 2);
                    }
    
                    watch.Stop();
    
                }
                matchTime = watch.ElapsedMilliseconds;
            }
    
            /// <summary>
            /// Draw the model image and observed image, the matched features and homography projection.
            /// </summary>
            /// <param name="modelImage">The model image</param>
            /// <param name="observedImage">The observed image</param>
            /// <param name="matchTime">The output total time for computing the homography matrix.</param>
            /// <returns>The model image and observed image, the matched features and homography projection.</returns>
            public static Mat Draw(Mat modelImage, Mat observedImage, out long matchTime)
            {
                Mat homography;
                VectorOfKeyPoint modelKeyPoints;
                VectorOfKeyPoint observedKeyPoints;
                using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                {
                    Mat mask;
                    FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                       out mask, out homography);
    
                    //Draw the matched keypoints
                    Mat result = new Mat();
                    Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                       matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);
    
                    #region draw the projected region on the image
    
                    if (homography != null)
                    {
                        //draw a rectangle along the projected model
                        Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                        PointF[] pts = new PointF[]
                        {
                      new PointF(rect.Left, rect.Bottom),
                      new PointF(rect.Right, rect.Bottom),
                      new PointF(rect.Right, rect.Top),
                      new PointF(rect.Left, rect.Top)
                        };
                        pts = CvInvoke.PerspectiveTransform(pts, homography);
    
                        Point[] points = Array.ConvertAll<PointF, Point>(pts, Point.Round);
                        using (VectorOfPoint vp = new VectorOfPoint(points))
                        {
                            CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                        }
    
                    }
    
                    #endregion
    
                    return result;
    
                }
            }
  • 相关阅读:
    LOJ6433 [PKUSC2018] 最大前缀和 【状压DP】
    [NOIP2017] 宝藏 【树形DP】【状压DP】
    51Nod1824 染色游戏 【Lucas定理】【FMT】【位运算】
    51Nod1778 小Q的集合 【组合数】【Lucas定理】
    LOJ6436 [PKUSC2018] 神仙的游戏 【FFT】
    LOJ6432 [PKUSC2018] 真实排名 【组合数】
    BZOJ5210 最大连通子块和 【树链剖分】【堆】【动态DP】
    LOJ2269 [SDOI2017] 切树游戏 【FWT】【动态DP】【树链剖分】【线段树】
    洛谷3707 [SDOI2017] 相关分析 【线段树】
    RBAC
  • 原文地址:https://www.cnblogs.com/daxiongblog/p/5729957.html
Copyright © 2011-2022 走看看