c#图像几何特征匹配

2023-11-18

using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using OpenCvSharp;

namespace TargetMeasureObject
{
    class GeometricProcessor:NormalProcessor
    {

        static public double ContourBaryCenterOrientationGet(Point[] cnt)
        {
            var minAreaRect = Cv2.MinAreaRect(cnt);
            Point2f[] minAreaRectPts = Cv2.BoxPoints(minAreaRect);
            var M = Cv2.Moments(cnt);
            Point baryCenter = new Point((int)(M.M10 / M.M00), (int)(M.M01 / M.M00));

            Func<Point2f, Point2f, double> PpDistance = (p1, p2) =>
            {
                return Math.Sqrt(Math.Pow((p1.X - p2.X), 2) + Math.Pow((p1.Y - p2.Y), 2));
            };
            Func<Point2f, Point2f, Point2f,Point2f, double> PpWidAngle = (p0, p1, p2,p3) =>
            {
                double angle = double.NaN;
                Point2f pd = new Point2f();
                Point2f pu = new Point2f();
                Point2f pRemain = new Point2f();
                if (PpDistance(p0, p1) > PpDistance(p1, p2))
                {
                    pd = p0;
                }
                pd = (PpDistance(p0, p1) > PpDistance(p1, p2)) ? p1 : p1;
                pu = (PpDistance(p0, p1) > PpDistance(p1, p2)) ? p0 : p2;
                pRemain = pu == p0 ? p2 : p0;
                double areaDown = Cv2.ContourArea(new Point[] { (Point)pd, (Point)pRemain, baryCenter });
                double areaUp = Cv2.ContourArea(new Point[] { (Point)pu, (Point)p3, baryCenter });
                angle = Math.Atan((pu.Y - pd.Y) / (pu.X - pd.X)) * (180 / Math.PI);
                if ((int)PpDistance(p0, p1) != (int)PpDistance(p1, p2))
                {
                    if (angle == 0 || angle == 90)
                    {
                        angle = angle == 0 ? areaDown >= areaUp ? 0 : 180 : areaDown >= areaUp ? 90 : 270;
                    }
                    else
                    {
                        if (areaDown >= areaUp)
                        {
                            if (angle > 0 && angle < 90)
                            {
                                angle = 180 - angle;
                            }
                            else if (angle < 0)
                            {
                                angle = Math.Abs(angle);
                            }
                        }
                        else
                        {
                            if (angle > 0 && angle < 90)
                            {
                                angle = 360 - angle;
                            }
                            else if (angle < 0 && angle > -90)
                            {
                                angle = Math.Abs(angle) + 180;
                            }
                        }
                    }
                }
                else
                {
                    angle = angle == 90 ? 0 : angle;
                }
                return angle;
            };
            return PpWidAngle(minAreaRectPts[0], minAreaRectPts[1], minAreaRectPts[2], minAreaRectPts[3]);
        }
        /// <summary>
        /// 获取轮廓最大宽度得方向
        /// </summary>
        /// <param name="minAreaRectPts">轮廓最小面积矩形boxpoints</param>
        /// <returns>最大宽度得角度</returns>
        static public void ContourWidthOrientionGet(Point2f[] minAreaRectPts)
        {
            //Func<Point2f, Point2f, double> PpDistance = (p1, p2) =>
            //{
            //    return Math.Sqrt(Math.Pow((p1.X - p2.X), 2) + Math.Pow((p1.Y - p2.Y), 2));
            //};
            //Func<Point2f, Point2f, Point2f, double> PpWidAngle = (p0, p1, p2) =>
            //{
            //    double angle = double.NaN;
            //    Point2f pd = new Point2f();
            //    Point2f pu = new Point2f();
            //    if (PpDistance(p0, p1) > PpDistance(p1, p2))
            //    {
            //        pd = p0;
            //    }
            //    pd = (PpDistance(p0, p1) > PpDistance(p1, p2)) ? p1 : p1;
            //    pu = (PpDistance(p0, p1) > PpDistance(p1, p2)) ? p0 : p2;
            //    angle = Math.Atan((pu.Y - pd.Y) / (pu.X - pd.X)) * (180 / Math.PI);
            //    return angle;
            //};
            //return PpWidAngle(minAreaRectPts[0], minAreaRectPts[1], minAreaRectPts[2]);
        }
        /// <summary>
        /// 通过几何特征将待处理图像处理成模板宽度方向得图像
        /// </summary>
        /// <param name="pattern"></param>
        /// <param name="other"></param>
        /// <param name="deltImg"></param>
        /// <returns></returns>
        static public Mat ContourWidthOrientionRotation(ImageGeometricData pattern, ImageGeometricData other,Mat deltImg,bool show=false)
        {
            Mat _outImg = deltImg.Clone();
            double deltaAng = pattern.ContourWidthOriention - other.ContourWidthOriention;
            _outImg = ImageRotate(_outImg, new Point2f(_outImg.Width / 2, _outImg.Height / 2), -deltaAng);
            if (show)
            {
                ImageShow("ContourWidthOrientionDealShowing", ImagesMerge(new Mat[] { deltImg }, _outImg));
            }
            return _outImg;
        }
        /// <summary>
        /// 通过提供几何数据中该几何边界矩形为参考,对待处理图像得边界矩形进行缩放
        /// </summary>
        /// <param name="pattern">模板几何参数</param>
        /// <param name="deltImg">待处理图像(二值图)</param>
        /// <param name="show"></param>
        /// <returns></returns>   
        static public Mat ContourSizeStretch(ImageGeometricData pattern,Mat deltImg, bool show=false)
        {
            Mat _tImg = deltImg.Clone();
            Rect deltRect= BlobProcessor.ImageConnectedFieldSegment(_tImg,show:false).ConnectedFieldDatas.FieldRects[0];
            double maxDiffRatio =
                    Math.Abs(pattern.ContourBoundRect.Width - deltRect.Width) >
                    Math.Abs(pattern.ContourBoundRect.Height - deltRect.Height) ?
                    (double)pattern.ContourBoundRect.Width / (double)deltRect.Width :
                    (double)pattern.ContourBoundRect.Height / (double)deltRect.Height;
            Cv2.Resize(_tImg, _tImg, new Size(0, 0), maxDiffRatio, maxDiffRatio, InterpolationFlags.Cubic);
            if (show)
            {
                ImageShow("ContourSizeDealShowing", ImagesMerge(new Mat[] { deltImg }, _tImg));
            }
            return _tImg;
        }
        /// <summary>
        /// 获取两个轮廓尺寸相同的几何特征匹配成绩,其中轮廓数量权重较高,外部轮廓点数量轮廓较低,其他平均
        /// </summary>
        /// <param name="geo1"></param>
        /// <param name="geo2"></param>
        /// <param name="matchType"></param>
        /// <returns></returns>
        static public ImageGeometricScore GeometricsScoreCalc(ImageGeometricData geo1, ImageGeometricData geo2, ENUMS.GEOMETRIC_MATCH_TYPE matchType)
        {
            ImageGeometricScore gScore = new ImageGeometricScore();
            double score = 0;
            double scoreSum = 0;
            bool frog = false;
            bool flag = matchType is ENUMS.GEOMETRIC_MATCH_TYPE.ALL;
            switch (matchType)
            {
                case ENUMS.GEOMETRIC_MATCH_TYPE.CONTOUR_AREA:
                    {
                        score = (geo1.ContourArea > geo2.ContourArea) ? geo2.ContourArea / geo1.ContourArea : geo1.ContourArea / geo2.ContourArea;
                        gScore.ContourAreaScore=score *= 100;
                        if (frog)
                        {
                            scoreSum += score;
                            goto case ENUMS.GEOMETRIC_MATCH_TYPE.CONTOUR_LENGTH;
                        }
                        break;
                    }
                case ENUMS.GEOMETRIC_MATCH_TYPE.CONTOUR_LENGTH:
                    {
                        score = (geo1.ContourLength > geo2.ContourLength) ? geo2.ContourLength / geo1.ContourLength : geo1.ContourLength / geo2.ContourLength;
                        gScore.ContourLengthScore = score *= 100;
                        if (frog)
                        {
                            scoreSum += score;
                            goto case ENUMS.GEOMETRIC_MATCH_TYPE.CONTOUR_AREA_LEN_RATIO;
                        }
                        break;
                    }
                case ENUMS.GEOMETRIC_MATCH_TYPE.CONTOUR_AREA_LEN_RATIO:
                    {
                        score = (geo1.ContourALRatio > geo2.ContourALRatio) ? geo2.ContourALRatio / geo1.ContourALRatio : geo1.ContourALRatio / geo2.ContourALRatio;
                        gScore.ContourALRatioScore = score *= 100;
                        if (frog)
                        {
                            scoreSum += score;
                            goto case ENUMS.GEOMETRIC_MATCH_TYPE.CONTOUR_NUMBER;
                        }
                        break;
                    }
                case ENUMS.GEOMETRIC_MATCH_TYPE.CONTOUR_NUMBER:
                    {
                        //当轮廓数量不相等时,成绩为0
                        score = (geo1.ContourNumber != geo2.ContourNumber) ? 0: 1;
                        gScore.ContourNumberScore = score *= 130;
                        if (frog)
                        {
                            scoreSum += score;
                            goto case ENUMS.GEOMETRIC_MATCH_TYPE.CONTOUR_OUT_POINT;
                        }
                        break;
                    }
                case ENUMS.GEOMETRIC_MATCH_TYPE.CONTOUR_OUT_POINT:
                    {
                        /*经过测试发现,当图像处理效果较差时,外部轮廓点数量对检测效果并不明显,因此缩小
                         外部轮廓点数量得权重*/
                        score = (geo1.ContourOutPtsNumber > geo2.ContourOutPtsNumber) ? ((double)geo2.ContourOutPtsNumber / (double)geo1.ContourOutPtsNumber) : ((double)geo1.ContourOutPtsNumber / (double)geo2.ContourOutPtsNumber);
                        gScore.ContourOutPtsNumberScore = score *= 70;
                        if (frog)
                        {
                            scoreSum += score;
                            goto case ENUMS.GEOMETRIC_MATCH_TYPE.CONTOUR_MINAREARL_LEN_RATIO;
                        }
                        break;
                    }
                case ENUMS.GEOMETRIC_MATCH_TYPE.CONTOUR_MINAREARL_LEN_RATIO:
                    {
                        score = (geo1.ContourLMinRectRatio > geo2.ContourLMinRectRatio) ? geo2.ContourLMinRectRatio / geo1.ContourLMinRectRatio : geo1.ContourLMinRectRatio / geo2.ContourLMinRectRatio;
                        gScore.ContourLMinRectRatioScore = score *= 100;
                        if (frog)
                        {
                            scoreSum += score;
                            goto case ENUMS.GEOMETRIC_MATCH_TYPE.CONTOUR_MINAREARC_AREA_RATIO;
                        }
                        break;
                    }

                case ENUMS.GEOMETRIC_MATCH_TYPE.CONTOUR_MINAREARC_AREA_RATIO:
                    {
                        score = (geo1.ContourAMinCircleRatio > geo2.ContourAMinCircleRatio) ? geo2.ContourAMinCircleRatio / geo1.ContourAMinCircleRatio : geo1.ContourAMinCircleRatio / geo2.ContourAMinCircleRatio;
                        gScore.ContourAMinCircleRatioScore = score *= 100;
                        if (frog)
                        {
                            scoreSum += score;
                            flag = false;
                            goto case ENUMS.GEOMETRIC_MATCH_TYPE.ALL;
                        }
                        break;
                    }

                case ENUMS.GEOMETRIC_MATCH_TYPE.ALL:
                    {
                        if (flag)
                        {
                            frog = true;
                            goto case (ENUMS.GEOMETRIC_MATCH_TYPE)0;
                        }
                        scoreSum /= (double)(ENUMS.GEOMETRIC_MATCH_TYPE.ALL);
                        gScore.AnverageScore = scoreSum;
                        break;
                    }
            }
            return gScore;
        }
        static public ImageGeometricData  GeometricInformGet(Mat img, double thresh=120, bool show = true)
        {
            ImageGeometricData ImageGeometricData = new ImageGeometricData();
            using (var uImg = img.Clone())
            {
                Cv2.Threshold(uImg, uImg, thresh, 255, ThresholdTypes.Binary);
                Cv2.FindContours(uImg, out Point[][] cnts, out HierarchyIndex[] hIndexs, RetrievalModes.External, ContourApproximationModes.ApproxSimple);
                /*筛选轮廓中最大轮廓*/
                cnts = cnts.ToList().OrderByDescending(cnt => Cv2.ContourArea(cnt)).ToArray();
                double area = Cv2.ContourArea(cnts[0]);
                double length = Cv2.ArcLength(cnts[0], true);
                double ratio = area / length;
                double epsilon = 2;
                Cv2.MinEnclosingCircle(cnts[0], out Point2f center, out float radius);
                double encloseCircleArea = Math.PI * (Math.Pow(radius, 2));
                Rect boundRect = Cv2.BoundingRect(cnts[0]);
                RotatedRect minARect = Cv2.MinAreaRect(cnts[0]);
                /*获取轮廓宽度方向*/
                double widOtiention = ContourBaryCenterOrientationGet(cnts[0]);
                /*获取轮廓宽度方向*/
                double minARectLen = (minARect.Size.Width + minARect.Size.Height) * 2;
                Point[] approxCnts = Cv2.ApproxPolyDP(cnts[0], epsilon, true);
                int ptsnum = approxCnts.Length;
                /*获取全部轮廓中有效轮廓=>面积!=0 && 点数>2*/
                Cv2.FindContours(uImg, out Point[][] aCnts, out HierarchyIndex[] nIndexs, RetrievalModes.List, ContourApproximationModes.ApproxSimple);
                List<Point[]> nCnts = new List<Point[]>();
                for(int i = 0; i < aCnts.Length; i++)
                {
                    double _area = Math.Abs(Cv2.ContourArea(aCnts[i]));
                    if(aCnts[i].Length>2 && _area>5)
                    {
                        nCnts.Add(aCnts[i]);
                    }
                }
                int cntsNum = nCnts.Count;
                ImageGeometricData.ContourArea = area;
                ImageGeometricData.ContourLength = length;
                ImageGeometricData.ContourALRatio = ratio;
                double minAreaCirArea = Math.PI * Math.Pow(radius, 2);
                //当轮廓拟合圆面积与轮廓面试比值超过0.9时,则该轮廓为圆
                ImageGeometricData.ContourOutPtsNumber = area/ minAreaCirArea > 0.90?int.MaxValue:ptsnum;
                ImageGeometricData.ContourNumber = cntsNum;
                ImageGeometricData.ContourWidthOriention = widOtiention;
                ImageGeometricData.ContourTotalPoints = nCnts.ToArray();
                ImageGeometricData.ContourExternalPoints = cnts;
                ImageGeometricData.ContourBoundRect = boundRect;
                ImageGeometricData.ContourMinAreaRectWHRation = minARect.Size.Width > minARect.Size.Height ? (double)minARect.Size.Height / (double)minARect.Size.Width : (double)minARect.Size.Width / (double)minARect.Size.Height;
                ImageGeometricData.ContourMinAreaRect = minARect;
                ImageGeometricData.ContourLMinRectRatio = (minARectLen > length) ? length / minARectLen : minARectLen / length;
                ImageGeometricData.ContourAMinCircleRatio = (area > encloseCircleArea) ? encloseCircleArea / area : area / encloseCircleArea;
                ImageGeometricData.ContourApproxPoints = approxCnts;
                if (show)
                {
                    Mat _showImg = uImg.Clone();
                    Cv2.Merge(new Mat[] { uImg, uImg, uImg }, _showImg);
                    Cv2.DrawContours(_showImg, cnts, -1, Scalar.Blue, thickness: 2);
                    ImageShow("GeometricInformGet", _showImg);
                }
                return ImageGeometricData;
            }
        }
        static public ImageGeometricData  GeometricInformGet(Point[][] contour, bool show = true)
        {
            ImageGeometricData ImageGeometricData = new ImageGeometricData();
            contour = contour.ToList().OrderByDescending(cnt => Cv2.ContourArea(cnt)).ToArray();
            double area = Cv2.ContourArea(contour[0]);
            double length = Cv2.ArcLength(contour[0], true);
            double ratio = area / length;
            double epsilon = 2;
            Cv2.MinEnclosingCircle(contour[0], out Point2f center, out float radius);
            double encloseCircleArea = Math.PI * (Math.Pow(radius, 2));
            Rect boundRect = Cv2.BoundingRect(contour[0]);
            RotatedRect minARect = Cv2.MinAreaRect(contour[0]);
            /*获取轮廓宽度方向*/
            double widOtiention = ContourBaryCenterOrientationGet(contour[0]);
            /*获取轮廓宽度方向*/
            double minARectLen = (minARect.Size.Width + minARect.Size.Height) * 2;
            Point[] approxCnts = Cv2.ApproxPolyDP(contour[0], epsilon, true);
            int ptsnum = approxCnts.Length;
            /*获取全部轮廓中有效轮廓=>面积!=0 && 点数>2*/

            List<Point[]> nCnts = new List<Point[]>();
            for (int i = 0; i < contour.Length; i++)
            {
                double _area = Math.Abs(Cv2.ContourArea(contour[i]));
                if (contour[i].Length > 2 && _area > 5)
                {
                    nCnts.Add(contour[i]);
                }
            }
            int cntsNum = nCnts.Count;
            ImageGeometricData.ContourArea = area;
            ImageGeometricData.ContourLength = length;
            ImageGeometricData.ContourALRatio = ratio;
            double minAreaCirArea = Math.PI * Math.Pow(radius, 2);
            //当轮廓拟合圆面积与轮廓面试比值超过0.9时,则该轮廓为圆
            ImageGeometricData.ContourOutPtsNumber = area / minAreaCirArea > 0.90 ? int.MaxValue : ptsnum;
            ImageGeometricData.ContourNumber = cntsNum;
            ImageGeometricData.ContourWidthOriention = widOtiention;
            ImageGeometricData.ContourTotalPoints = nCnts.ToArray();
            ImageGeometricData.ContourExternalPoints = new Point[][] { contour [0]};
            ImageGeometricData.ContourBoundRect = boundRect;
            ImageGeometricData.ContourMinAreaRectWHRation = minARect.Size.Width > minARect.Size.Height ? (double)minARect.Size.Height / (double)minARect.Size.Width : (double)minARect.Size.Width / (double)minARect.Size.Height;
            ImageGeometricData.ContourMinAreaRect = minARect;
            ImageGeometricData.ContourLMinRectRatio = (minARectLen > length) ? length / minARectLen : minARectLen / length;
            ImageGeometricData.ContourAMinCircleRatio = (area > encloseCircleArea) ? encloseCircleArea / area : area / encloseCircleArea;
            ImageGeometricData.ContourApproxPoints = approxCnts;
            if (show)
            {
                Mat _showImg = new Mat(boundRect.Size,MatType.CV_8UC3,Scalar.Black);
                Cv2.DrawContours(_showImg, contour, -1, Scalar.Blue, thickness: 2);
                ImageShow("GeometricInformGet", _showImg);
            }
            return ImageGeometricData;
        }
        static public ImageGeometricScore ImageGeometricMatcher(Mat patternImg,Mat trainImg,double thresh=110, ENUMS.GEOMETRIC_MATCH_TYPE matchType=ENUMS.GEOMETRIC_MATCH_TYPE.ALL,bool show=false)
        {
            ImageGeometricScore result = new ImageGeometricScore();
            if (patternImg.Channels() != trainImg.Channels())
            {
                ImageWarningShow("GeometricMatcher:Channel xx");
                return null;
            }
            Mat _pImg = patternImg.Clone();
            Mat _tImg = trainImg.Clone();

            Cv2.Threshold(_pImg, _pImg, thresh, 255, ThresholdTypes.Binary);
            Cv2.Threshold(_tImg, _tImg, thresh, 255, ThresholdTypes.Binary);

            ImageGeometricData geo1 = GeometricInformGet(_pImg, show: false);
            ImageGeometricData geo2 = GeometricInformGet(_tImg, show: false);
            if (geo1.ContourMinAreaRectWHRation < 0.8 &&
                geo2.ContourMinAreaRectWHRation < 0.8
                && Math.Abs(geo1.ContourWidthOriention - geo2.ContourWidthOriention) > 5)
            {
                /*模板宽高比大:模板角度与匹配图像角度不一致时,几何检测结果影响明显*/
                _tImg = ContourWidthOrientionRotation(geo1, geo2, _tImg, show: false);
                ImageBlobData tempbbData = BlobProcessor.ImageConnectedFieldSegment(_tImg, show: false);
                _tImg = (Mat)tempbbData.ConnectedFieldDatas.SegmentedSubRegionImage[0].M2;
                _tImg = ContourSizeStretch(geo1, _tImg, show: false);
                ImagesMerge(new Mat[] { _pImg }, _tImg, show: false);
            }
            else
            {
                /*模板宽高比大较小且宽度角度差较小时,直接对二值图进行拉伸(按照较小图像尺寸)*/
                ImageCoupleStretch(ref _pImg, ref _tImg, _pImg.Size() != _tImg.Size(), false, _show: false);
            }
            /*图像尺寸变化后出现细微差距,简易进行模糊处理*/
            geo1 = GeometricInformGet(_pImg, show: false);
            geo2 = GeometricInformGet(_tImg, show: false);
            /*模板角度与匹配图像角度不一致时,测试结果影响明显*/

            /*轮廓形状特征计算-计算结果不佳,匹配效果较差,此处不使用*/
            {
                //var score1 = Cv2.MatchShapes(geo1.ContourExternalPoints[0], geo2.ContourExternalPoints[0], ShapeMatchModes.I1);
                //var score2 = Cv2.MatchShapes(geo1.ContourExternalPoints[0], geo2.ContourExternalPoints[0], ShapeMatchModes.I2);
                //var score3 = Cv2.MatchShapes(geo1.ContourExternalPoints[0], geo2.ContourExternalPoints[0], ShapeMatchModes.I3);
                //Mat _pCntImg = new Mat();
                //Mat _tCntImg = new Mat();
                //for (int i = 0; i < geo1.ContourExternalPoints[0].Length; i++)
                //{
                //    _pCntImg.PushBack(geo1.ContourExternalPoints[0][i]);
                //}
                //for (int i = 0; i < geo2.ContourExternalPoints[0].Length; i++)
                //{
                //    _tCntImg.PushBack(geo2.ContourExternalPoints[0][i]);
                //}
                //var extractor = ShapeContextDistanceExtractor.Create();
                //float escore1 = extractor.ComputeDistance(_pCntImg, _tCntImg);
                //var hauExtractor = HausdorffDistanceExtractor.Create();
                //float hauScore = hauExtractor.ComputeDistance(_pCntImg, _tCntImg);
            }
            /*轮廓形状特征计算*/

            result = GeometricsScoreCalc(geo1, geo2, matchType);
            if (show)
            {
                Cv2.Merge(new Mat[] { _pImg, _pImg, _pImg }, _pImg);
                Cv2.Merge(new Mat[] { _tImg, _tImg, _tImg }, _tImg);
                Cv2.DrawContours(_pImg, geo1.ContourTotalPoints, -1, Scalar.Blue, thickness: 2);
                Cv2.DrawContours(_tImg, geo2.ContourTotalPoints, -1, Scalar.Green, thickness: 2);
                ImagesMerge(new Mat[] { _pImg, _tImg }, null, ENUMS.IMAGE_PERMUTATION_TYPE.HORIZONTAL, show:show);
            }
            return result;
        }
        static public object ImageGeometricClassified(List<Pair> pairs, double qualifiedScore = 90, bool show = false)
        {
            List<Pair> _imgs = new List<Pair>();
            for (int i = 0; i < pairs.Count; i++)
            {
                _imgs.Add(new Pair(pairs[i].M1, ((Mat)pairs[i].M2).Clone()));
            }
            List<List<Pair>> _outImges = new List<List<Pair>>();


            while (_imgs.Count != 0)
            {
                List<Pair> _group = new List<Pair>();
                _group.Add(_imgs[0]);
                _imgs.RemoveAt(0);
                for (int i = 0; i < _imgs.Count; i++)
                {
                    double score = ImageGeometricMatcher((Mat)_group[0].M2, (Mat)_imgs[i].M2, show: false).AnverageScore;
                    if (score > qualifiedScore)
                    {
                        _group.Add(_imgs[i]);
                        _imgs.RemoveAt(i);
                        i -= 1;
                    }
                }
                _group = _group.OrderBy(pic => ((Mat)pic.M2).Width * ((Mat)pic.M2).Height).ToList();
                _outImges.Add(_group);
            }
            if (show)
            {
                Mat _showImg = new Mat();
                for (int i = 0; i < _outImges.Count; i++)
                {

                    Mat temp = ImagesMerge(PairsMatQueues(_outImges[i]), null, ENUMS.IMAGE_PERMUTATION_TYPE.HORIZONTAL, ENUMS.PLACE_ORIENTATION.LEFT);
                    _showImg = ImagesMerge(new Mat[] { temp }, _showImg, placeOiren: ENUMS.PLACE_ORIENTATION.DOWN);
                }
                ImageShow("ImagesHistgramClassified", _showImg);
            }
            return _outImges;
        }
        static public object ImageGeometricMatchTool(Mat patImg,Mat detImg,double qualifiedScore=90,bool show=false)
        {
            List<ImageGeometricMatchResult> result = new List<ImageGeometricMatchResult>();
            Mat detectImg = detImg.Clone(), patternImg = patImg.Clone();
            Mat _sImg = detectImg.Clone();
            if (_sImg.Type() == MatType.CV_8UC1)
            {
                Cv2.Merge(new Mat[] { _sImg, _sImg, _sImg }, _sImg);
            }
            ImageBlobData patBbData = new ImageBlobData();
            ImageBlobData detBbData = new ImageBlobData();
            /***获取图像直方图数据-为去除图像背景做准备***/
            patBbData = BlobProcessor.ImageHisgramDataGet(patternImg);
            /***去除图像背景-通过直方图特征***/
            BlobProcessor.ImageBackgroundClear(patternImg, ref patBbData, blurSize: 5, thresh: 100);
            detBbData = BlobProcessor.ImageHisgramDataGet(detectImg);
            BlobProcessor.ImageBackgroundClear(detectImg, ref detBbData, blurSize: 5, thresh: 100);
            /***获取图像连通域信息-为图像分割准备***/
            BlobProcessor.ImageConnectedFieldSegment(detectImg, ref detBbData, detBbData.HistgramPeakPixel[0] * 2.3, show: true);
            BlobProcessor.ImageConnectedFieldSegment(patternImg, ref patBbData, patBbData.HistgramPeakPixel[0] * 2.3, show: true);
            Dictionary<int, Scalar> colorPool = MatchColorMaker(patBbData.ConnectedFieldDatas.SegmentedSubRegionImage.Count);
            /***模板内的子图分类***/
            var patGroups = (List<List<Pair>>)GeometricProcessor.ImageGeometricClassified(patBbData.ConnectedFieldDatas.SegmentedSubRegionImage, show: false) ;
            for (int i = 0; i < detBbData.ConnectedFieldDatas.SegmentedSubRegionImage.Count; i++)
            {
                Mat _dImg = ((Mat)detBbData.ConnectedFieldDatas.SegmentedSubRegionImage[i].M2).Clone();
                Rect _dRect = detBbData.ConnectedFieldDatas.FieldRects[i];
                Rect _pRect = new Rect();
                ImageGeometricScore bestScore = new ImageGeometricScore();
                int bestID = -1;
                for (int j = 0; j < patGroups.Count; j++)
                {
                    Mat _pImg = ((Mat)patGroups[j][0].M2).Clone();
                    if(j==1 && i == 2)
                    {

                    }
                    var score = ImageGeometricMatcher(_pImg, _dImg, show: true);
                    if (score.AnverageScore > bestScore.AnverageScore)
                    {
                        bestScore = score;
                        _pRect = patBbData.ConnectedFieldDatas.FieldRects[(int)patGroups[j][0].M1];
                        bestID = j;
                    }
                }
                if (bestScore.AnverageScore > qualifiedScore)
                {
                    ImageGeometricMatchResult _res = new ImageGeometricMatchResult();
                    _res.PatternID = bestID;
                    _res.MatchImages = new Pair(
                        ((Mat)patBbData.ConnectedFieldDatas.SegmentedSubRegionImage[(int)patGroups[bestID][0].M1].M2).Clone(),
                        ((Mat)detBbData.ConnectedFieldDatas.SegmentedSubRegionImage[i].M2).Clone());
                    _res.MatchPosition = new Pair(_pRect, _dRect);
                    _res.MatchScore = bestScore;
                    result.Add(_res);
                    _sImg.Rectangle(_dRect, colorPool[bestID], 2);
                    _sImg = ImageAnnotation(_sImg,
                        new List<List<string>>() { new List<string>() { "ModelID:"+bestID.ToString(),
                            "GeoScore:"+bestScore.AnverageScore.ToString("0.00"), } },
                        new List<Point>() { new Point(_dRect.X, _dRect.Y) });
                }
            }
            if (show)
            {
                Mat _patAttach = new Mat();
                for (int i = 0; i < patGroups.Count; i++)
                {
                    _patAttach = ImagesMerge(new Mat[] { (Mat)patGroups[i][0].M2 }, _patAttach, placeOiren: ENUMS.PLACE_ORIENTATION.DOWN);
                }
                _sImg = ImagesMerge(new Mat[] { _patAttach }, _sImg);
                ImageShow("sss", _sImg);
            }
            return result;
        }
        
        static public List<ImageEdgePtInform> ImageEdgeInformationGet(Mat img,double minContrast,double maxContrast,ref Point relaOrgPoint,ref Size edgeSize)
        {
            Mat _uImg = img.Clone();
            if (_uImg.Type() == MatType.CV_8UC3)
            {
                Cv2.CvtColor(_uImg, _uImg, ColorConversionCodes.BGR2GRAY);
            }
            List<ImageEdgePtInform> validEdgeInforms = new List<ImageEdgePtInform>();
            int Height = _uImg.Rows;
            int Width = _uImg.Cols;
            int edgePtCount = 0;
            bool flag = true;
            Size validSize = new Size();
            Mat xMagnitude = _uImg.Sobel(MatType.CV_64FC1, 1, 0, 3);//double-幅度y
            Mat yMagnitude = _uImg.Sobel(MatType.CV_64FC1, 0, 1, 3);//double-幅度x
            Mat normMagnitude = Mat.Zeros(_uImg.Size(), MatType.CV_64FC1);//double
            Mat orgMagnitude = Mat.Zeros(_uImg.Size(), MatType.CV_64FC1);//double-幅度
            Mat orientation = Mat.Zeros(_uImg.Size(), MatType.CV_32FC1);//int-方向
            double maxMagnitude = double.MinValue;
            relaOrgPoint = new Point(int.MaxValue,int.MaxValue);
            unsafe
            {
                //收集模板幅度和角度
                for(int row = 0; row < Height; row++)
                {
                    double* _xRowPtr = (double*)xMagnitude.Ptr(row);
                    double* _yRowPtr = (double*)yMagnitude.Ptr(row);
                    double* _magnitudeRowPtr = (double*)orgMagnitude.Ptr(row);
                    int* _orientRowPtr = (int*)orientation.Ptr(row);
                    for (int col = 0; col < Width; col++)
                    {
                        double dx = _xRowPtr[col];
                        double dy = _yRowPtr[col];
                        double _mag = Math.Sqrt(dx * dx + dy * dy);
                        double _orient = Math.Atan(dy / dx) * (180.0 / Math.PI);
                        if(_orient > -22.5&& _orient <= 22.5)
                        {
                            _orient = 0;
                        }
                        else if(_orient>22.5 && _orient <= 67.5)
                        {
                            _orient = 45;
                        }
                        else if(_orient>67.5 || _orient <= -67.5)
                        {
                            _orient = 90;
                        }
                        else if(_orient>-67.5 && _orient < -22.5)
                        {
                            _orient = -45;
                        }
                        else
                        {
                            _orient = 0;
                        }
                        _magnitudeRowPtr[col] = _mag;
                        _orientRowPtr[col] = (int)_orient;
                        maxMagnitude = Math.Max(maxMagnitude, _mag);
                    }
                }
                //模板幅度非极大值抑制
                normMagnitude = orgMagnitude.Clone();
                for (int row = 0; row < Height; row++)
                {
                    int* _oriRowPtr = (int*)orientation.Ptr(row);
                    double* _magGrayRowPtr = (double*)normMagnitude.Ptr(row);
                    double* _magRowPtr = (double*)orgMagnitude.Ptr(row);
                    for (int col = 0; col < Width; col++)
                    {
                        int cenOrient = _oriRowPtr[col];
                        double cenMag = _magRowPtr[col];
                        double leftMag = 0;
                        double rightMag = 0;
                        switch (cenOrient)
                        {
                            case 0:
                                {
                                    if(col-1>=0 && col + 1 < Width)
                                    {
                                        leftMag = ((double*)orgMagnitude.Ptr(row))[col - 1];
                                        rightMag = ((double*)orgMagnitude.Ptr(row))[col + 1];
                                    }
                                    break;
                                }
                            case 45:
                                {
                                    if(col-1>=0 && col+1 <Width && row-1>=0 && row + 1 < Height)
                                    {
                                        leftMag = ((double*)orgMagnitude.Ptr(row + 1))[col - 1];
                                        rightMag = ((double*)orgMagnitude.Ptr(row - 1))[col + 1];
                                    }
                                    break;
                                }
                            case 90:
                                {
                                    if (row - 1 >= 0 && row + 1 < Height)
                                    {
                                        leftMag = ((double*)orgMagnitude.Ptr(row - 1))[col];
                                        rightMag = ((double*)orgMagnitude.Ptr(row + 1))[col];
                                    }
                                    break;
                                }
                            case -45:
                                {
                                    if (col - 1 >= 0 && col + 1 < Width && row - 1 >= 0 && row + 1 < Height)
                                    {
                                        leftMag = ((double*)orgMagnitude.Ptr(row - 1))[col - 1];
                                        rightMag = ((double*)orgMagnitude.Ptr(row + 1))[col + 1];
                                    }
                                    break;
                                }
                            default:
                                break;
                        }
                        if(cenMag<leftMag || cenMag < rightMag || row==0 || row==Height-1 || col==0 || col==Width-1 )
                        {
                            _magGrayRowPtr[col] = 0;
                        }
                        else
                        {
                            _magGrayRowPtr[col] = 255.0 * (cenMag / maxMagnitude);
                        }

                    }
                }
                //模板滞后阈值
                for (int row = 0; row < Height; row++)
                {
                    double* magRowPtr = (double*)orgMagnitude.Ptr(row);
                    for (int col = 0; col < Width; col++)
                    {
                        flag = true;
                        double grayMag = ((double*)normMagnitude.Ptr(row))[col];
                        if (grayMag < maxContrast)
                        {
                            if (grayMag < minContrast)
                            {
                                magRowPtr[col] = 0;
                                flag = false;
                            }
                            else 
                            {
                                if (col - 1 >= 0 && col + 1 < Width && row - 1 >= 0 && row + 1 < Height)
                                {
                                    if(((double*)normMagnitude.Ptr(row-1))[col-1]<maxContrast&&
                                        ((double*)normMagnitude.Ptr(row))[col-1] < maxContrast&&
                                        ((double*)normMagnitude.Ptr(row+1))[col-1] < maxContrast&&
                                        ((double*)normMagnitude.Ptr(row-1))[col] < maxContrast&&
                                        ((double*)normMagnitude.Ptr(row+1))[col] < maxContrast&&
                                        ((double*)normMagnitude.Ptr(row-1))[col+1] < maxContrast&& 
                                        ((double*)normMagnitude.Ptr(row))[col+1] < maxContrast&&
                                        ((double*)normMagnitude.Ptr(row+1))[col+1] < maxContrast)
                                    {
                                        magRowPtr[col] = 0;
                                        flag = false;
                                    }
                                }
                                else
                                {
                                    magRowPtr[col] = 0;
                                    flag = false;
                                }
                            }
                        }
                        double dx = ((double*)xMagnitude.Ptr(row))[col];
                        double dy = ((double*)yMagnitude.Ptr(row))[col];
                        double mag = Math.Sqrt(dx * dx + dy * dy);
                        int* orient = (int*)orientation.Ptr(row);
                        if (flag && (dx!=0 || dy!=0))
                        {
                            validSize.Width = Math.Max(col, validSize.Width);
                            validSize.Height = Math.Max(row, validSize.Height);
                            relaOrgPoint.X = Math.Min(col, relaOrgPoint.X);
                            relaOrgPoint.Y = Math.Min(row, relaOrgPoint.Y);
                            ImageEdgePtInform edgeInform = new ImageEdgePtInform();             
                            edgeInform.DerivativeX = dx;
                            edgeInform.DerivativeY = dy;
                            edgeInform.Magnitude = 1.0/ mag;
                            edgeInform.Orientation = orient[col];
                            edgeInform.RelativePos = new Point(col, row);
                            validEdgeInforms.Add(edgeInform);
                            edgePtCount++;
                        }
                    }

                }
            }
            for (int i = 0; i < validEdgeInforms.Count; i++)
            {
                img.Circle(validEdgeInforms[i].RelativePos, 1, Scalar.Red);
            }
            ImageShow("ksdkskdks", img);
            for (int i = 0; i < edgePtCount; i++)
            {
                int x = validEdgeInforms[i].RelativePos.X;
                int y = validEdgeInforms[i].RelativePos.Y;
                validEdgeInforms[i].RelativePos = new Point(x - relaOrgPoint.X, y - relaOrgPoint.Y);
            }
            edgeSize = new Size(validSize.Width-relaOrgPoint.X,validSize.Height-relaOrgPoint.Y);
            
            return validEdgeInforms;
        }

        static public List<ImageEdgePtInform> ImageContourEdgeInfomGet(Mat img,int margin,ref Size validSize,bool show=false)
        {
            Mat _uImg = img.Clone();
            if (_uImg.Type() != MatType.CV_8UC1)
            {
                Cv2.CvtColor(_uImg, _uImg, ColorConversionCodes.BGR2GRAY);
            }
            int edgePointCount = 0;
            Point relaOrgPt = new Point(int.MaxValue,int.MaxValue);
            List<ImageEdgePtInform> resultEdgeInforms = new List<ImageEdgePtInform>();
            Mat _blur = _uImg.GaussianBlur(new Size(3, 3), 0);
            Mat _threImg= _blur.Threshold(120, 255, ThresholdTypes.BinaryInv);
            _threImg.FindContours(out Point[][] cnts, out HierarchyIndex[] hids, RetrievalModes.List, ContourApproximationModes.ApproxNone);
            cnts = cnts.Where(cnt => Cv2.ContourArea(cnt) > 3 && Cv2.ArcLength(cnt, false) > 3 && cnt.Length > 3).ToArray();
            cnts=cnts.ToList().OrderByDescending(cnt => Cv2.ContourArea(cnt)).ToArray();
            
            foreach (var cnt in cnts) edgePointCount += cnt.Length;
            Mat xDerivative = _uImg.Sobel(MatType.CV_64FC1, 1, 0, 3);
            Mat yDerivative = _uImg.Sobel(MatType.CV_64FC1, 0, 1, 3);
            //临时测试
            var M = Cv2.Moments(cnts[0]);
            Point bcenter = new Point((int)(M.M10 / M.M00), (int)(M.M01 / M.M00));
            unsafe
            {
                foreach(var cnt in cnts)
                {
                    foreach(var pt in cnt)
                    {
                        ImageEdgePtInform ptInform = new ImageEdgePtInform();
                        int row = pt.Y, col = pt.X;
                        relaOrgPt.X = Math.Min(relaOrgPt.X, col);
                        relaOrgPt.Y = Math.Min(relaOrgPt.Y, row);
                        validSize.Width = Math.Max(validSize.Width, col);
                        validSize.Height = Math.Max(validSize.Height, row);
                        double dx = ((double*)xDerivative.Ptr(row))[col];
                        double dy = ((double*)yDerivative.Ptr(row))[col];
                        double mag = Math.Sqrt(dx * dx + dy * dy);
                        double barycentOrient = ContourBaryCenterOrientationGet(cnts[0]);
                        ptInform.DerivativeX = dx;
                        ptInform.DerivativeY = dy;
                        ptInform.Magnitude = (double)(1.0 / mag);
                        ptInform.RelativePos = pt;
                        ptInform.BarycentOrient = barycentOrient;
                        resultEdgeInforms.Add(ptInform);
                    }
                }
                foreach (var inf in resultEdgeInforms)
                {
                    inf.RelativePos = new Point(inf.RelativePos.X - relaOrgPt.X, inf.RelativePos.Y - relaOrgPt.Y);
                }
                validSize = new Size(validSize.Width - relaOrgPt.X, validSize.Height - relaOrgPt.Y);
                if (show)
                {
                    Mat _sImg = img.Clone();
                    _sImg.DrawContours(cnts, -1, Scalar.Green, 1);
                    _sImg.Circle(relaOrgPt, 2, Scalar.Red);
                    _sImg.Rectangle(new Rect(relaOrgPt, validSize), Scalar.Blue, 1);
                    _sImg = ImageBasicLineDrawing(_sImg, bcenter, orientation: ENUMS.IMAGE_PERMUTATION_TYPE.HORIZONTAL);
                    _sImg = ImageBasicLineDrawing(_sImg, bcenter, orientation: ENUMS.IMAGE_PERMUTATION_TYPE.VERTICAL);
                    _sImg.Circle(bcenter, 5, Scalar.Blue, -1);
                    ImageShow("asdsad", _sImg);
                }
            }
            return resultEdgeInforms;
        }
        static public double ImageEdgeMatch(Mat img,List<ImageEdgePtInform> queryEdgeInforms,
            double minScore,double greediness, Size validSize, out Point conformPoints)
        {
            Mat _uImg = img.Clone();
            if (_uImg.Type() == MatType.CV_8UC3)
            {
                Cv2.CvtColor(_uImg, _uImg, ColorConversionCodes.BGR2GRAY);
            }
            Cv2.GaussianBlur(_uImg, _uImg, new Size(3, 3), 0);
            int Width = _uImg.Width;
            int Height = _uImg.Height;
            int queryCount = queryEdgeInforms.Count;
            double partialScore = 0;
            double resultScore = 0;
            conformPoints =new Point();
            unsafe
            {
                Mat txMagnitude = _uImg.Sobel(MatType.CV_64FC1, 1, 0, 3);
                Mat tyMagnitude = _uImg.Sobel(MatType.CV_64FC1, 0, 1, 3);
                Mat torgMagnitude = Mat.Zeros(_uImg.Size(), MatType.CV_64FC1);
                double normMinScore = minScore / (double)queryCount;
                double normGreediness = ((1 - greediness * minScore) / (1 - greediness)) / queryCount;

                for(int row = 0; row < Height; row++)
                {
                    double* xMag = (double*)txMagnitude.Ptr(row);
                    double* yMag = (double*)tyMagnitude.Ptr(row);
                    double* oMag = (double*)torgMagnitude.Ptr(row);
                    for (int col = 0; col < Width; col++)
                    {
                        double dx = xMag[col], dy = yMag[col];
                        double _mag = Math.Sqrt(dx * dx + dy * dy);
                        oMag[col] = _mag;
                    }
                }
                for(int row = 0; row < Height; row++)
                {
                    for (int col = 0; col < Width; col++)
                    {
                        double sum = 0;
                        double corSum = 0;
                        bool flag = false;
                        for (int cn = 0; cn < queryCount; cn++)
                        {
                            int xoff = queryEdgeInforms[cn].RelativePos.X;
                            int yoff = queryEdgeInforms[cn].RelativePos.Y;
                            int relaX = xoff + col;
                            int relaY = yoff + row;
                            if (relaY >= Height || relaX >= Width)
                            {
                                continue;
                            }
                            double txD = ((double*)txMagnitude.Ptr(relaY))[relaX];
                            double tyD = ((double*)tyMagnitude.Ptr(relaY))[relaX];
                            double tMag = ((double*)torgMagnitude.Ptr(relaY))[relaX];
                            double qxD = queryEdgeInforms[cn].DerivativeX;
                            double qyD = queryEdgeInforms[cn].DerivativeY;
                            double qMag = queryEdgeInforms[cn].Magnitude;
                            if((txD!=0 || tyD != 0) && (qxD != 0 || qyD != 0))
                            {
                                sum += (txD * qxD + tyD * qyD) * qMag / tMag;
                            }
                            corSum += 1;
                            partialScore = sum / corSum;
                            double curJudge = Math.Min((minScore - 1) + normGreediness * normMinScore, normMinScore * corSum);
                            if (partialScore < curJudge)
                            {
                                break;
                            }
                        }
                        if (partialScore > resultScore)
                        {
                            resultScore = partialScore;
                            conformPoints = new Point(col, row);
                        }

                    }
                }
                //img.Circle(conformPoints, 3, Scalar.Blue);
                //img.Rectangle(new Rect(conformPoints, validSize), Scalar.Red);
                //ImageShow("hhhh", img);

                //二次筛选
                if (resultScore > 0.5)
                {
                    if(conformPoints.X+validSize.Width>Width||
                        conformPoints.Y + validSize.Height > Height)
                    {
                        resultScore = 0;
                    }
                }

                return resultScore;
            }


        }
        
    }
}

本文内容由网友自发贡献,版权归原作者所有,本站不承担相应法律责任。如您发现有涉嫌抄袭侵权的内容,请联系:hwhale#tublm.com(使用前将#替换为@)

c#图像几何特征匹配 的相关文章

随机推荐