1. 程式人生 > 其它 >OpenCVSharp配置及講解_在C#中使用OpenCV_以Opencv的7大追蹤演算法為例

OpenCVSharp配置及講解_在C#中使用OpenCV_以Opencv的7大追蹤演算法為例

本篇部落格應該是屬於開荒,因為很難找到C#版Opencv的文章。

本文會詳細講解如何一步步配置OPENCVSHARP(C#中的OPENCV),並給出三個demo,分別是追蹤演算法CamShift以及Tracker在.NET C#中的實現,以及OPENCV 影象類OpenCvSharp.Mat與C# 影象類System.Drawing.Bitmap的互相轉換。
任意新建一個控制檯程式,然後開啟Nuget包管理器,搜尋OpenCvSharp,選擇那個頭像為猿猴臉的那個庫。為什麼在眾多OpenCvSharp庫中選擇這個庫,是因為其他庫我都經過了測試,不能跑。
如下圖:

首先宣告下,OpenCvSharp是對Opencv C++的封裝,以便其能夠在.Net中使用。
引入該庫 OpenCvSharp;
using OpenCvSharp;

我在這裡直接上原始碼了,我對一些OPENCV C++ 的程式在C# OpenCvSharp 中進行了重寫,下面分別是兩個例子:
追蹤_OpenCVSharp_Tracker;
Camshift互動式追蹤CSharp版;
關於如何使用,兩個程式都需要你在OPENCV的窗體上畫一個矩形ROI框,移動物體,該方框就會跟隨物體而移動,並實時輸出該方框左上角座標。
關於該opencv的影象處理講解,請大家自己查一下。

//追蹤_OpenCVSharp_Tracker;
using System;
using System.Drawing;
using System.Drawing.Imaging;
using
OpenCvSharp; using OpenCvSharp.Tracking; namespace 追蹤_OpenCVSharp_Tracker { class Program { private static Mat image = new Mat(); private static OpenCvSharp.Point originPoint = new OpenCvSharp.Point(); private static Rect2d selectedRect = new Rect2d(); private
static bool selectRegion = false; private static int trackingFlag = 0; private static void OnMouse(MouseEvent Event, int x, int y, MouseEvent Flags, IntPtr ptr) { if (selectRegion) { selectedRect.X = Math.Min(x, originPoint.X); selectedRect.Y = Math.Min(y, originPoint.Y); selectedRect.Width = Math.Abs(x - originPoint.X); selectedRect.Height = Math.Abs(y - originPoint.Y); selectedRect = selectedRect & new Rect2d(0, 0, image.Cols, image.Rows); } switch (Event) { case MouseEvent.LButtonDown: originPoint = new OpenCvSharp.Point(x, y); selectedRect = new Rect2d(x, y, 0, 0); selectRegion = true; break; case MouseEvent.LButtonUp: selectRegion = false; if (selectedRect.Width > 0 && selectedRect.Height > 0) { trackingFlag = -1; } break; } } static void Main(string[] args) { TrackerKCF tracker_KCF= TrackerKCF.Create(); TrackerMIL trackerMIL = TrackerMIL.Create(); //cv::Ptr<cv::Tracker> tracker = TrackerCSRT.Create(); TrackerMedianFlow trackerMedianFlow = TrackerMedianFlow.Create(); TrackerMOSSE trackerMOSSE = TrackerMOSSE.Create(); TrackerTLD trackerTLD = TrackerTLD.Create(); VideoCapture cap = new VideoCapture(); cap.Open(0); if (cap.IsOpened()) { string windowName = "KCF Tracker"; string windowName2 = "OriginFrame"; Mat frame = new Mat(); Mat outputMat = new Mat(); Cv2.NamedWindow(windowName, 0); Cv2.NamedWindow(windowName2, 0); Cv2.SetMouseCallback(windowName, OnMouse, new IntPtr()); while (true) { cap.Read(frame); // Check if 'frame' is empty if (frame.Empty()) { break; } frame.CopyTo(image); if (trackingFlag != 0) { tracker_KCF.Init(frame, selectedRect); tracker_KCF.Update(frame, ref selectedRect); frame.CopyTo(outputMat); Rect rect = new Rect((int)selectedRect.X, (int)selectedRect.Y, (int)selectedRect.Width, (int)selectedRect.Height); Console.WriteLine(rect.X+" "+rect.Y); Cv2.Rectangle(outputMat, rect, new Scalar(255, 255, 0), 2); Cv2.ImShow(windowName2, outputMat); } if (selectRegion && selectedRect.Width > 0 && selectedRect.Height > 0) { Mat roi = new Mat(image, new Rect((int)selectedRect.X, (int)selectedRect.Y, (int)selectedRect.Width, (int)selectedRect.Height)); Cv2.BitwiseNot(roi, roi); } Cv2.ImShow(windowName, image); int ch = Cv2.WaitKey(25); if (ch == 27) { break; } } } } } }
//Camshift互動式追蹤CSharp版;
using OpenCvSharp;
using System;

namespace 互動式追蹤CSharp版
{
    class Program

    {
        private static Mat image = new Mat();
        private static Point originPoint = new Point();
        private static Rect selectedRect = new Rect();
        private static bool selectRegion = false;
        private static int trackingFlag = 0;
        //private static CvMouseCallback callBackFunc = new CvMouseCallback(OnMouse);
        private static void OnMouse(MouseEvent Event, int x, int y, MouseEvent Flags, IntPtr ptr)
        {
            if (selectRegion)
            {
                selectedRect.X = Math.Min(x, originPoint.X);
                selectedRect.Y = Math.Min(y, originPoint.Y);
                selectedRect.Width = Math.Abs(x - originPoint.X);
                selectedRect.Height = Math.Abs(y - originPoint.Y);

                selectedRect = selectedRect & new Rect(0, 0, image.Cols, image.Rows);
            }

            switch (Event)

            {
                case MouseEvent.LButtonDown:
                    originPoint = new Point(x, y);
                    selectedRect = new Rect(x, y, 0, 0);
                    selectRegion = true;
                    break;

                case MouseEvent.LButtonUp:
                    selectRegion = false;
                    if (selectedRect.Width > 0 && selectedRect.Height > 0)
                    {
                        trackingFlag = -1;
                    }
                    break;
            }
        }
        static void Main(string[] args)
        {
            VideoCapture cap = new VideoCapture();
            cap.Open(0);
            if (cap.IsOpened())
            {
                int ch;
                Rect trackingRect = new Rect();

                // range of values for the 'H' channel in HSV ('H' stands for Hue)
                Rangef hist_range = new Rangef(0.0f, 180.0f);
                Rangef[] histRanges = { hist_range };
                //const float* histRanges = hueRanges;

                // min value for the 'S' channel in HSV ('S' stands for Saturation)
                int minSaturation = 40;

                // min and max values for the 'V' channel in HSV ('V' stands for Value)
                int minValue = 20, maxValue = 245;

                // size of the histogram bin
                int[] histSize = { 8 };

                string windowName = "CAMShift Tracker";
                //string windowNameTest = "Test";
                Cv2.NamedWindow(windowName, 0);
                //Cv2.NamedWindow(windowNameTest, 0);
                Cv2.SetMouseCallback(windowName, OnMouse, new IntPtr());
                Mat frame = new Mat();
                Mat hsvImage = new Mat();
                Mat hueImage = new Mat();
                Mat mask = new Mat();
                Mat hist = new Mat();
                Mat backproj = new Mat();

                // Image size scaling factor for the input frames from the webcam
                double scalingFactor = 1;

                // Iterate until the user presses the Esc key
                while (true)
                {
                    // Capture the current frame
                    cap.Read(frame);

                    // Check if 'frame' is empty
                    if (frame.Empty())
                        break;

                    // Resize the frame
                    Cv2.Resize(frame, frame, new Size(), scalingFactor, scalingFactor, InterpolationFlags.Area);
                    frame.CopyTo(image);

                    // Convert to HSV colorspace
                    Cv2.CvtColor(image, hsvImage, ColorConversionCodes.BGR2HSV);
                    if (trackingFlag != 0)
                    {
                        // Check for all the values in 'hsvimage' that are within the specified range
                        // and put the result in 'mask'
                        Cv2.InRange(hsvImage, new Scalar(0, minSaturation, minValue), new Scalar(180, 256, maxValue), mask);
                        /* # 通俗的來講,這個函式就是判斷hsv中每一個畫素是否在[lowerb,upperb]之間,注意集合的開閉。
                # 結果是,那麼在mask相應畫素位置填上255,反之則是0。即重點突出該顏色
                # 即檢查陣列元素是否在另外兩個陣列元素值之間。這裡的陣列通常也就是矩陣Mat或向量。
                # 要特別注意的是:該函式輸出的mask是一幅二值化之後的影象。*/
                        //imshow(windowNameTest, mask);
                        //waitKey(0);
                        // Mix the specified channels
                        int[] channels = { 0, 0 };
                        //cout << hsvImage.depth() << endl;
                        hueImage.Create(hsvImage.Size(), hsvImage.Depth());
                        //cout << hueImage.channels() << endl; ;
                        hueImage = hsvImage.ExtractChannel(0);
                        //Cv2.MixChannels(hsvImage, hueImage, channels);
                        /*mixChannels mixChannels()函式用於將輸入陣列的指定通道複製到輸出陣列的指定通道。
                        void mixChannels(
                        const Mat* src, //輸入陣列或向量矩陣,所有矩陣的大小和深度必須相同。
                        size_t nsrcs, //矩陣的數量
                        Mat* dst, //輸出陣列或矩陣向量,大小和深度必須與src[0]相同
                        size_t ndsts,//矩陣的數量
                        const int* fromTo,//指定被複制通道與要複製到的位置組成的索引對
                        size_t npairs //fromTo中索引對的數目*/
                        if (trackingFlag < 0)
                        {
                            // Create images based on selected regions of interest

                            Mat roi = new Mat(hueImage, selectedRect);
                            Mat maskroi = new Mat(mask, selectedRect);
                            Mat[] roi_source = { roi };
                            int[] channels_ = { 0 };
                            // Compute the histogram and normalize it
                            Cv2.CalcHist(roi_source, channels_, maskroi, hist, 1, histSize, histRanges);
                            Cv2.Normalize(hist, hist, 0, 255, NormTypes.MinMax);

                            trackingRect = selectedRect;
                            trackingFlag = 1;
                        }
                        Mat[] hueImgs = { hueImage };
                        int[] channels_back = { 0 };
                        // Compute the histogram back projection
                        Cv2.CalcBackProject(hueImgs, channels_back, hist, backproj, histRanges);
                        backproj &= mask;
                        //TermCriteria criteria = new TermCriteria(CriteriaTypes.Eps | CriteriaTypes.MaxIter, 10, 1);
                        RotatedRect rotatedTrackingRect = Cv2.CamShift(backproj, ref trackingRect, new TermCriteria(CriteriaType.Eps | CriteriaType.MaxIter, 10, 1));

                        // Check if the area of trackingRect is too small
                        if ((trackingRect.Width * trackingRect.Height) <= 1)
                        {
                            // Use an offset value to make sure the trackingRect has a minimum size
                            int cols = backproj.Cols, rows = backproj.Rows;
                            int offset = Math.Min(rows, cols) + 1;
                            trackingRect = new Rect(trackingRect.X - offset, trackingRect.Y - offset, trackingRect.X + offset, trackingRect.Y + offset) & new Rect(0, 0, cols, rows);
                        }

                        // Draw the ellipse on top of the image
                        Cv2.Ellipse(image, rotatedTrackingRect, new Scalar(0, 255, 0), 3, LineTypes.Link8);
                    }


                    // Apply the 'negative' effect on the selected region of interest
                    if (selectRegion && selectedRect.Width > 0 && selectedRect.Height > 0)
                    {
                        Mat roi = new Mat(image, selectedRect);
                        Cv2.BitwiseNot(roi, roi);
                    }
                    // Display the output image
                    Cv2.ImShow(windowName, image);

                    // Get the keyboard input and check if it's 'Esc'
                    // 27 -> ASCII value of 'Esc' key

                    ch = Cv2.WaitKey(25);
                    if (ch == 27)
                    {
                        break;
                    }
                }
            }
        }
    }
}

另外i,還有一個.Net FrameWork Winform 窗體實時演示攝像機的畫面,幷包括OpenCVSharp.Mat 類與System.Drawing.Bitmap類的互相轉換。
該Demo的窗體介面:

該demo程式碼:

using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using OpenCvSharp;
using System.Drawing.Imaging;
using System.Runtime.InteropServices;

namespace OpenCVSharp_Mat與Bitmap的轉換
{
    public partial class Form1 : Form
    {
        VideoCapture cap;
        Mat frame = new Mat();
        Mat dstMat = new Mat();
        Bitmap bmp;
        public Form1()
        {
            InitializeComponent();
        }
        public static Bitmap MatToBitmap(Mat dst)

        {
            return new Bitmap(dst.Cols, dst.Rows, (int)dst.Step(), PixelFormat.Format24bppRgb, dst.Data);

        }
        public static Mat BitmapToMat(Bitmap srcbit)
        {
            int iwidth = srcbit.Width;
            int iheight = srcbit.Height;
            int iByte = iwidth * iheight * 3;
            byte[] result = new byte[iByte];
            int step;
            Rectangle rect = new Rectangle(0, 0, iwidth, iheight);
            BitmapData bmpData = srcbit.LockBits(rect, ImageLockMode.ReadWrite, srcbit.PixelFormat);
            IntPtr iPtr = bmpData.Scan0;
            Marshal.Copy(iPtr, result, 0, iByte);
            step = bmpData.Stride;
            srcbit.UnlockBits(bmpData);
            return new Mat(srcbit.Height, srcbit.Width, new MatType(MatType.CV_8UC3), result, step);
        }
        private void btnRun_Click(object sender, EventArgs e)
        {
            timer1.Enabled = true;
        }

        private void timer1_Tick(object sender, EventArgs e)
        {
            if (cap.IsOpened())
            {
                cap.Read(frame);
                bmp = MatToBitmap(frame);
                pictureBox1.Image = bmp;
                dstMat = BitmapToMat(bmp);
                Cv2.ImShow("dstMat", dstMat);
                //Cv2.WaitKey();
           }
        }

        private void Form1_Load(object sender, EventArgs e)
        {
            try
            {
                cap = new VideoCapture();
                cap.Open(0);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }
           
        }

        private void Form1_FormClosed(object sender, FormClosedEventArgs e)
        {
            timer1.Enabled = false;
            if (cap.IsOpened())
            {
                cap.Dispose();
            }
        }
    }
}

裡面涉及到一些數字影象處理的函式,大家不懂得可以自己去翻書或者上網查閱資料。多多單步執行,你就會發現他為什麼要這樣寫。具體理論知識篇幅較長,一晚上都講不完,請大家自行查資料。

如果大家能看到這裡,相比是非常喜歡這篇部落格了,也對UP主很認可。
UP主雖然年紀還不算大,可是已經歷過人生的起起落落:
18歲選專業被調劑到生物材料,沒能修成 控制工程及其自動化,計算機與微電子 相關專業。
23歲在職跨專業考研計算機失敗。
期間又經歷了種種碰壁,種種挫折。
但UP主始終覺得只要保持著對知識的敬畏之心,督促自己努力下去,
即使命運給了UP主這一生最不堪的開篇,那又能怎麼樣呢。
或許改變自己的命運不願服從命運的安排是我們這些人一貫的追求。

那就請關注點贊加收藏吧。
本文影象類的轉換部分參考以下部落格:
https://blog.csdn.net/qq_34455723/article/details/90053593