基於OpenCV 的影象分割
1、影象閾值化
原始碼:
#include "opencv2/highgui/highgui.hpp" #include "opencv2/imgproc/imgproc.hpp" #include <iostream> using namespace std; using namespace cv; int thresholds=50; int model=2; Mat image,srcimage; void track(int ,void *) { Mat result; threshold(srcimage,result,thresholds,255,CV_THRESH_BINARY); //imshow("原圖",result); if(model==0) { threshold(srcimage,result,thresholds,255,CV_THRESH_BINARY); imshow("分割",result); } if(model==1) { threshold(srcimage,result,thresholds,255,THRESH_BINARY_INV); imshow("分割",result); } if(model==2) { threshold(srcimage,result,thresholds,255,THRESH_TRUNC); imshow("分割",result); } if(model==3) { threshold(srcimage,result,thresholds,255,THRESH_TOZERO); imshow("分割",result); } if(model==4) { threshold(srcimage,result,thresholds,255,THRESH_TOZERO_INV); imshow("分割",result); } } int main() { image=imread("2.2.tif"); if(!image.data) { return 0; } cvtColor(image,srcimage,CV_BGR2GRAY); namedWindow("分割",WINDOW_AUTOSIZE); cv::createTrackbar("閾a值:","分割",&thresholds,255,track); cv::createTrackbar("模式:","分割",&model,4,track); track(thresholds,0); track(model,0); waitKey(0); return 0; }
實現結果:
2、閾值處理
//閾值處理 #include "opencv2/core/core.hpp" #include "opencv2/highgui/highgui.hpp" #include "opencv2/imgproc/imgproc.hpp" using namespace cv; using namespace std; int main() { printf("鍵盤按鍵ESC--退出程式"); Mat g_srcImage = imread("1.tif",0); if(!g_srcImage.data) { printf("讀取圖片失敗"); } imshow("原始圖",g_srcImage); //大津法閾值分割顯示 /*大津法,簡稱OTSU.它是按影象的灰度特性,將影象分成背景 和目標2部分。背景和目標之間的類間方差越大,說明構成影象 的2部分的差別越大,當部分目標錯分為背景或部分背景錯分為 目標都會導致2部分差別變小。*/ Mat OtsuImage; threshold(g_srcImage,OtsuImage,0,255,THRESH_OTSU);//0不起作用,可為任意閾值 imshow("OtsuImage",OtsuImage); //自適應分割並顯示 Mat AdaptImage; //THRESH_BINARY_INV:引數二值化取反 adaptiveThreshold(g_srcImage,AdaptImage,255,0,THRESH_BINARY_INV,7,8); imshow("AdaptImage",AdaptImage); while(1) { int key; key = waitKey(20); if((char)key == 27) { break; } } }
3、拉普拉斯檢測
//Laplacian檢測 #include "opencv2/core/core.hpp" #include "opencv2/highgui/highgui.hpp" #include "opencv2/imgproc/imgproc.hpp" using namespace cv; using namespace std; /*,在只關心邊緣的位置而不考慮其周圍的象素灰度差值時比較合適。 Laplace 運算元對孤立象素的響應要比對邊緣或線的響應要更強烈,因此 只適用於無噪聲圖象。存在噪聲情況下,使用 Laplacian 運算元檢測邊 緣之前需要先進行低通濾波。*/ int main() { Mat src,src_gray,dst,abs_dst; src = imread("1.jpg"); imshow("原始影象",src); //高斯濾波 GaussianBlur(src,src,Size(3,3),0,0,BORDER_DEFAULT); //轉化為灰度圖,輸入只能為單通道 cvtColor(src,src_gray,CV_BGR2GRAY); Laplacian(src_gray,dst,CV_16S,3,1,0,BORDER_DEFAULT); convertScaleAbs(dst,abs_dst); imshow("效果圖Laplace變換",abs_dst); waitKey(); return 0; }
效果圖:
4、canny演算法的邊緣檢測
原始碼
#include "opencv2/core/core.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
using namespace cv;
using namespace std;
/*如果某一畫素位置的幅值超過高閾值,該畫素被保留為邊緣畫素。如果某
一畫素位置的幅值小於低閾值,該畫素被排除。如果某一畫素位置的幅值在
兩個閾值之間,該畫素僅僅在連線到一個高於高閾值的畫素時被保留。 */
int main()
{
Mat picture2=imread("1.jpg");
Mat new_picture2;
Mat picture2_1=picture2.clone();
Mat gray_picture2 , edge , new_edge;
imshow("【原始圖】Canny邊緣檢測" , picture2);
Canny(picture2_1 , new_picture2 ,150 , 100 ,3 );
imshow("【效果圖】Canny邊緣檢測", new_picture2 );
Mat dstImage,grayImage;
//dstImage與srcImage同大小型別
dstImage.create(picture2_1.size() , picture2_1.type());
cvtColor(picture2_1,gray_picture2,CV_BGR2GRAY);//轉化為灰度圖
blur(gray_picture2 , edge , Size(3,3));//用3x3的核心降噪
Canny(edge,edge,3,9,3);
dstImage = Scalar::all(0);//將dst內所有元素設定為0
//使用canny運算元的邊緣圖edge作為掩碼,將原圖拷貝到dst中
picture2_1.copyTo(dstImage,edge);
imshow("效果圖Canny邊緣檢測2",dstImage);
waitKey();
}
5、影象的分水嶺演算法
原始碼:
#include “opencv2/core/core.hpp”
#include “opencv2/highgui/highgui.hpp”
#include “opencv2/imgproc/imgproc.hpp”
#include
using namespace cv;
using namespace std;
#define WINDOW_NAME1 “顯示/操作視窗”
#define WINDOW_NAME2 “分水嶺演算法效果圖”
Mat g_maskImage,g_srcImage;
Point prevPt(-1,-1);
static void ShowHelpText();
static void on_Mouse(int event,int x,int y,int flags,void*);
//輸出一些幫助資訊
static void ShowHelpText()
{
printf(“當前使用的版本為:“CV_VERSION);
printf(”\n”);
printf(“分水嶺演算法—點中圖片進行滑鼠或按鍵操作\n”);
printf(“請先用滑鼠在圖片視窗中標記出大致的區域,\n然後再按鍵【1】或者【space】啟動演算法”);
printf("\n按鍵操作說明:\n"
“鍵盤按鍵【1】或者【space】–執行的分水嶺分割演算法\n”
“鍵盤按鍵【2】–回覆原始圖片\n”
“鍵盤按鍵【ESC】–退出程式\n”);
}
static void on_Mouse(int event,int x,int y,int flags,void*)
{
if(x<0||x>=g_srcImage.cols||y<0||y>=g_srcImage.rows)
return;
if(event == CV_EVENT_LBUTTONUP||!(flags & CV_EVENT_FLAG_LBUTTON))
prevPt = Point(-1,-1);
else if(event == CV_EVENT_LBUTTONDOWN)
prevPt= Point(x,y);
else if(event == CV_EVENT_MOUSEMOVE && (flags & CV_EVENT_FLAG_LBUTTON))
{
Point pt(x,y);
if(prevPt.x<0)
prevPt = pt;
line(g_maskImage,prevPt,pt,Scalar::all(255),5,8,0);
line(g_srcImage,prevPt,pt,Scalar::all(255),5,8,0);
prevPt = pt;
imshow(WINDOW_NAME1,g_srcImage);
}
}
int main(int argc,char** argv)
{
system(“color A5”);
ShowHelpText();
g_srcImage = imread("1.jpg",1);
imshow(WINDOW_NAME1,g_srcImage);
Mat srcImage,grayImage;
g_srcImage.copyTo(srcImage);
cvtColor(g_srcImage,g_maskImage,CV_BGR2GRAY);
cvtColor(g_maskImage,grayImage,CV_GRAY2BGR);//灰度圖轉BGR3通道,但每通道的值都是原先單通道的值,所以也是顯示灰色的
g_maskImage = Scalar::all(0);//黑
setMouseCallback(WINDOW_NAME1,on_Mouse,0);
while(1)
{
int c = waitKey(0);
if((char)c == 27)
break;
if((char)c == '2')
{
g_maskImage = Scalar::all(0);//黑
srcImage.copyTo(g_srcImage);
imshow("image",g_srcImage);
}
if((char)c == '1'||(char)c == ' ')
{
int i,j,compCount = 0;
vector<vector<Point>> contours;//定義輪廓
vector<Vec4i> hierarchy;//定義輪廓的層次
findContours(g_maskImage,contours,hierarchy,RETR_CCOMP,CHAIN_APPROX_SIMPLE);
if(contours.empty())
continue;
Mat maskImage(g_maskImage.size(),CV_32S);
maskImage = Scalar::all(0);
for(int index = 0;index >= 0;index = hierarchy[index][0],compCount++)
drawContours(maskImage,contours,index,Scalar::all(compCount+1),-1,8,hierarchy,INT_MAX);
if(compCount == 0)
continue;
vector<Vec3b> colorTab;
for(i=0;i<compCount;i++)
{
int b = theRNG().uniform(0,255);
int g = theRNG().uniform(0,255);
int r = theRNG().uniform(0,255);
colorTab.push_back(Vec3b((uchar)b,(uchar)g,(uchar)r));
}
//計算處理時間並輸出到視窗中
double dTime = (double)getTickCount();
watershed(srcImage,maskImage);
dTime = (double)getTickCount()-dTime;
printf("\t處理時間=%gms\n",dTime*1000./getTickFrequency());
//雙層迴圈,將分水嶺影象遍歷存入watershedImage中
Mat watershedImage(maskImage.size(),CV_8UC3);
for(i=0;i<maskImage.rows;i++)
for(j=0;j<maskImage.cols;j++)
{
int index = maskImage.at<int>(i,j);
if(index == -1)
watershedImage.at<Vec3b>(i,j) = Vec3b(255,255,255);
else if(index<=0||index>compCount)
watershedImage.at<Vec3b>(i,j) = Vec3b(0,0,0);
else
watershedImage.at<Vec3b>(i,j) = colorTab[index-1];
}
//混合灰度圖和分水嶺效果圖並顯示最終的視窗
watershedImage = watershedImage*0.5+grayImage*0.5;
imshow(WINDOW_NAME2,watershedImage);
}
}
waitKey();
return 0;
}
效果:
在這裡插入圖片描述
![在這裡插入圖片描述](https://img-blog.csdnimg.cn/20181121123847750.png?x-oss-process=image/watermark,type_ZmFuZ3poZW5naGVpdGk,shadow_10,text_aHR0cHM6Ly9ibG9nLmNzZG4ubmV0L3ZpY3RvX2NoYW8=,size_16,color_FFFFFF,t_70)