1. 程式人生 > >OpenNI + OpenCv2的第二個實驗手勢識別

OpenNI + OpenCv2的第二個實驗手勢識別

還是參照了該大牛同學的文章

不過個人比較喜歡用opencv2 中的東西,改寫了下程式碼,重新貼上來。

OpenNI GestureGenerator的 Callback function形式為

RegisterGestureCallbacks(       GestureRecognized RecognizedCB,
                                GestureProgress   ProgressCB,
                                void*   pCookie,
                                XnCallbackHandle& hCallback)

兩個回撥函式原型分別為:

void (XN_CALLBACK_TYPE* GestureRecognized)(     GestureGenerator& generator,
                                                const XnChar*  strGesture,
                                                const XnPoint3D*  pIDPosition,
                                                const XnPoint3D*  pEndPosition,
                                                void* pCookie);

void (XN_CALLBACK_TYPE* GestureProgress)(       GestureGenerator& generator,
                                                const XnChar*   strGesture,
                                                const XnPoint3D*  pPosition,
                                                XnFloat  fProgress,
                                                void*  pCookie); 


本實驗程式碼如下:

#include "stdafx.h"
#include "opencv2/opencv.hpp"
#include "XnCppWrapper.h"  

using namespace cv;
using namespace std;

// XnPoint3D運算子過載
ostream &operator<<( ostream &out, const XnPoint3D &rPoint)
{
	out << "(" <<rPoint.X<< "," <<rPoint.Y<< ","<<rPoint.X<<")";
	return out;
}

// callback function for gesture recognized
void XN_CALLBACK_TYPE gestureRecog( xn::GestureGenerator &generator,
									const XnChar *strGesture,
									const XnPoint3D *pIDposition,
									const XnPoint3D *pEndPosition,
									void *pCookie)
{
	cout<<strGesture <<" from" <<*pIDposition<< " to "<<*pEndPosition<<endl;

	int imgStartX = 0;
	int imgStartY = 0;
	int imgEndX = 0;
	int imgEndY = 0;
	char locationInfo[100];

	imgStartX = (int)(640/2 - pIDposition->X );
	imgStartY = (int)(480/2 - pIDposition->Y );
	imgEndX = (int)(640/2 - pEndPosition->X );
	imgEndY = (int)(480/2 - pEndPosition->Y );

	Mat refimage(480, 640, CV_8UC3, (uchar *)pCookie);
	
	if(strcmp(strGesture, "RaiseHand") == 0)
	{
		circle(refimage, Point(imgStartX, imgStartY), 1, Scalar(255, 0, 0), 2 );
	}
	else if (strcmp(strGesture, "Wave") == 0)
	{
		line(refimage, Point(imgStartX, imgStartY), Point(imgEndX, imgEndY), Scalar(0, 255, 0), 4);
	}
	else if (strcmp(strGesture, "Click") == 0)
	{
		circle(refimage, Point(imgStartX, imgStartY), 6, Scalar(0, 0, 255), 2 );
	}

	Mat imageROI(refimage, Rect(40, 420, 400, 60) );
	for(int row = 0; row < imageROI.rows; row++ )
	{
		uchar *dataPtr = imageROI.ptr<uchar>(row);
		for(int col = 0; col < imageROI.cols; col++)
		{
			*dataPtr++ = 255;
			*dataPtr++ = 255;
			*dataPtr++ = 255;
		}
	}

	sprintf_s(locationInfo, "From: %d,%d to %d,%d",(int)pIDposition->X,(int)pIDposition->Y,(int)(pEndPosition->X),(int)(pEndPosition->Y) );
	putText(imageROI,
			locationInfo,
			Point(30, 40),
			FONT_HERSHEY_DUPLEX,
			0.6,
			Scalar(255, 0, 255),
			2,
			4,
			false );
}

void clearImg(Mat &SrcImage)
{
	if(SrcImage.channels() == 3)
	{
		for(int row = 0; row < 480; row++)
		{
			uchar *dataPtr=  SrcImage.ptr<uchar>(row); 
			for(int col = 0; col < 640; col++)
			{
				*dataPtr++ = 255;
				*dataPtr++ = 255;
				*dataPtr++ = 255;
			}
		}
		string handString = "Hand Raise";
		putText(SrcImage,
				handString,
				Point(20, 20),
				FONT_HERSHEY_DUPLEX,
				1,
				Scalar(255, 0, 0),
				2,
				4,
				false );

		handString = "Hand Wave";
		putText(SrcImage,
				handString,
				Point(20, 50),
				FONT_HERSHEY_DUPLEX,
				1,
				Scalar(0, 255, 0),
				2,
				4,
				false );

		handString = "Hand Push";
		putText(SrcImage,
				handString,
				Point(20, 80),
				FONT_HERSHEY_DUPLEX,
				1,
				Scalar(0, 0, 255),
				2,
				4,
				false );


	}
	else if(SrcImage.channels() == 1)
	{
		for (int row = 0; row < 480; row++)
		{
			uchar *dataPtr = SrcImage.ptr<uchar>(row); 
			for(int col = 0; col < 640; col++)
			{
				*dataPtr++ = 255;				
			}
		}
	}
	
}


void XN_CALLBACK_TYPE gestureProgress(  xn::GestureGenerator &generator,
										const XnChar *strGesture,
										const XnPoint3D *pPosition,
										XnFloat fProgress,
										void *pCookie)
{
	cout << strGesture << ":" << fProgress << " at " << *pPosition << endl;
}



int main( int argc, char **argv )
{
	Mat drawPadIMg(480, 640, CV_8UC3);
	Mat cameraImg(480, 640, CV_8UC3);

	namedWindow("Gesture", WINDOW_AUTOSIZE); 
	namedWindow("Camera", WINDOW_AUTOSIZE);
	
	clearImg(drawPadIMg);
	
	XnStatus res;
	char key = 0;

	xn::Context context;
	res = context.Init();

	xn::ImageMetaData imgMD;

	//create generate
	xn::ImageGenerator imageGenerator;
	res = imageGenerator.Create(context);

	xn::GestureGenerator gestureGenerator;
	res = gestureGenerator.Create(context);

	// Add gesture
	gestureGenerator.AddGesture("Wave", NULL);
	gestureGenerator.AddGesture("Click", NULL);
	gestureGenerator.AddGesture("RaiseHand", NULL);

	// Register callback functions of gesture generator
	XnCallbackHandle handle;
	gestureGenerator.RegisterGestureCallbacks(gestureRecog, gestureProgress, (void *)drawPadIMg.data, handle);
	//註冊手勢回撥函式gestureRecog  gestureProgress 用於識別與處理.如果引數為NULL,則表示不需要處理.pCookie是傳入的使用者資料.handle用於登出。
	context.StartGeneratingAll();//開始工作
	res = context.WaitAndUpdateAll();

	while( (key != 27) && !(res = context.WaitAndUpdateAll() )  )
	{
		if(key=='c')
		{
			clearImg(drawPadIMg);
		}
		imageGenerator.GetMetaData(imgMD);

		//convert ImageMetaDate to Mat  
		uchar *imageMDPointer = (uchar *)imgMD.Data();  
		Mat imageRGB(480, 640, CV_8UC3, imageMDPointer);//Mat(int rows, int cols, int type, void* data, size_t step=AUTO_STEP);  
		cvtColor(imageRGB, cameraImg, CV_RGB2BGR);

		imshow("Gesture", drawPadIMg);
		imshow("Camera", cameraImg);

		key = waitKey(20);
	}
	gestureGenerator.UnregisterGestureCallbacks(handle);
	context.StopGeneratingAll();  
	context.Release();
	return 0;
}

今天就到這裡,明天繼續。