在LCD上顯示攝像頭影象2__實現攝像頭模組
阿新 • • 發佈:2019-02-16
新建工程檔案,新建資料夾render、video、convert、include等,主要實現功能如下:
video – 對裝置的處理
convert – 視訊格式轉換
render – 縮放函式、合併函式
include – 標頭檔案
實現 vedio 檔案下的函式 (實現從攝像頭讀出資料)
video_manager.h ,按照流程圖實現
#ifndef _VIDEO_MANAGER__H
#define _VIDEO_MANAGER__H
#include <config.h>
#include <pic_operation.h> /*包含GetFrame存入的結構體*/
#define NB_BUFFER 4;/*申請了四個buf*/
struct VideoDevice;
struct VideoOpr;
typedef VideoDevice T_VideoDevice,*PT_VideoDevice;
typedef VideoOpr T_VideoOpr,*PT_VideoOpr;
/* 用VideoDevice來描述裝置 */
typedef struct VideoDevice {
int iFd;/* 開啟檔案時,記錄檔案控制代碼 */
int iPixelFormat;/* 解析度 */
int iWidth;/* 解析度--寬 */
int iHeight;/* 解析度--長 */
int iVideoBufCnt;/* buf記數 */
int iVideoBufMaxLen;/* buf的最大長度 */
int iVideoBufCurIndex; /*當前是哪個buf*/
unsigned char *pucVideBuf[NB_BUFFER];
/*功能函式*/
PT_VideoOpr ptOpr;
}T_VideoDevice,*PT_VideoDevice;
/*存放資料,參考Zoom.c--PicZoom*/
typedef struct VideoBuf{
T_PixelDatas tpixelDatas;/*圖片的象素資料結構體*/
int iPixelFormat;/*T_PixelDatas未體現格式,這裡加上*/
}T_VideoBuf,*PT_VideoBuf;
/*功能函式*/
typedef struct VideoOpr {
int (*InitDevice)(char *srtDeviceName,PT_VideoDevice ptVideoDevice);
int (*ExitDevice)(PT_VideoDevice ptVideoDevice);
int (*GetFrame)(PT_VideoDevice ptVideoDevice,PT_VideoBuf ptVideoBuf);
int (*PutFrame)(PT_VideoDevice ptVideoDevice,PT_VideoBuf ptVideoBuf);
int (*StartDevice)(PT_VideoDevice ptVideoDevice);
int (*StoptDevice)(PT_VideoDevice ptVideoDevice);
}T_VideoOpr,*PT_VideoOpr;
#endif /*_VIDEO_MANAGER__H */
video_manager.c ,參考:fonts_manager.c
#include <config.h>
#include <video_manager.h>
#include <string.h>
static PT_VideoOpr g_ptVideoOprHead = NULL; /*定義連結串列頭*/
/**********************************************************************
* 函式名稱: RegisterVideoOpr
* 功能描述: 註冊"字型模組", 所謂字型模組就是取出字元點陣圖的方法
* 輸入引數: ptVideoOpr - 一個結構體,內含"取出字元點陣圖"的操作函式
* 輸出引數: 無
* 返 回 值: 0 - 成功, 其他值 - 失敗
***********************************************************************/
int RegisterVideoOpr(PT_VideoOpr ptVideoOpr) /*註冊功能函式*/
{
PT_VideoOpr ptTmp;
if (!g_ptVideoOprHead)
{
g_ptVideoOprHead = ptVideoOpr;
ptVideoOpr->ptNext = NULL;
}
else
{
ptTmp = g_ptVideoOprHead;
while (ptTmp->ptNext)
{
ptTmp = ptTmp->ptNext;
}
ptTmp->ptNext = ptVideoOpr;
ptVideoOpr->ptNext = NULL;
}
return 0;
}
/**********************************************************************
* 函式名稱: ShowVideoOpr
* 功能描述: 顯示本程式能支援的"字型模組"
* 輸入引數: 無
* 輸出引數: 無
* 返 回 值: 無
***********************************************************************/
void ShowVideoOpr(void)
{
int i = 0;
PT_VideoOpr ptTmp = g_ptVideoOprHead;
/* 列印連結串列內容 */
while (ptTmp)
{
printf("%02d %s\n", i++, ptTmp->name);
ptTmp = ptTmp->ptNext;
}
}
/**********************************************************************
* 函式名稱: GetVideoOpr
* 功能描述: 根據名字取出指定的"字型模組"
* 輸入引數: pcName - 名字
* 輸出引數: 無
* 返 回 值: NULL - 失敗,沒有指定的模組,
* 非NULL - 字型模組的PT_VideoOpr結構體指標
***********************************************************************/
PT_VideoOpr GetVideoOpr(char *pcName)
{
PT_VideoOpr ptTmp = g_ptVideoOprHead;
while (ptTmp)
{
if (strcmp(ptTmp->name, pcName) == 0)
{
return ptTmp;
}
ptTmp = ptTmp->ptNext;
}
return NULL;
}
/**********************************************************************
* 函式名稱: FontsInit
* 功能描述: 呼叫各個字型模組的初始化函式
* 輸入引數: 無
* 輸出引數: 無
* 返 回 值: 0 - 成功, 其他值 - 失敗
***********************************************************************/
int VideoInit(void)
{
int iError;
iError = V4l2Init(); /*註冊了g_tV4l2VideoOpr*/
return iError;
}
v4l2.c
首先構造並註冊結構體 g_tV4l2VideoOpr
/* 構造一個VideoOpr結構體 */
static T_VideoOpr g_tV4l2VideoOpr = {
.name = "v4l2",
.InitDevice = V4l2InitDevice,
.ExitDevice = V4l2ExitDevice,
.GetFrame = V4l2GetFrameForStreaming,
.PutFrame = V4l2PutFrameForStreaming,
.StartDevice = V4l2StartDevice,
.StopDevice = V4l2StopDevice,
};
/* 註冊這個結構體 */
int V4l2Init(void)
{
return RegisterVideoOpr(&g_tV4l2VideoOpr);
}
接下來逐步實現實現如下功能函式
1.1 V4l2InitDevice,
1.2 V4l2ExitDevice,
1.3 V4l2GetFrameForStreaming,
1.4 V4l2PutFrameForStreaming,
1.5 V4l2StartDevice,
1.6 V4l2StopDevice
1.1 V4l2InitDevice
1.1.1 參考luvcview的v4l2uvc.c–init_v4l2
if ((vd->fd = open(vd->videodevice, O_RDWR)) == -1) {
perror("ERROR opening V4L interface \n");
exit(1);
}
memset(&vd->cap, 0, sizeof(struct v4l2_capability));
ret = ioctl(vd->fd, VIDIOC_QUERYCAP, &vd->cap);
if (ret < 0) {
printf("Error opening device %s: unable to query device.\n",
vd->videodevice);
goto fatal;
}
/*判斷是否為視訊捕獲裝置*/
if ((vd->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
printf("Error opening device %s: video capture not supported.\n",
vd->videodevice);
goto fatal;;
}
if (vd->grabmethod) {
/* 判斷視訊流是STREAMING還是READWRITE */
if (!(vd->cap.capabilities & V4L2_CAP_STREAMING)) {
printf("%s does not support streaming i/o\n", vd->videodevice);
goto fatal;
}
} else {
if (!(vd->cap.capabilities & V4L2_CAP_READWRITE)) {
printf("%s does not support read i/o\n", vd->videodevice);
goto fatal;
}
注:
關於VIDIOC_QUERYCAP
的用法,可以在核心中搜索VIDIOC_QUERYCAP
,找到Uvc_v4l2.c (drivers\media\video\uvc): case VIDIOC_QUERYCAP:
/* Query capabilities */
case VIDIOC_QUERYCAP:
{
struct v4l2_capability *cap = arg;/* 傳入的引數為v4l2_capability */
memset(cap, 0, sizeof *cap);
strlcpy(cap->driver, "uvcvideo", sizeof cap->driver);
strlcpy(cap->card, vdev->name, sizeof cap->card);
usb_make_path(stream->dev->udev,
cap->bus_info, sizeof(cap->bus_info));
cap->version = LINUX_VERSION_CODE;
if (stream->type == V4L2_BUF_TYPE_VIDEO_CAPTURE)
cap->capabilities = V4L2_CAP_VIDEO_CAPTURE
| V4L2_CAP_STREAMING;
else
cap->capabilities = V4L2_CAP_VIDEO_OUTPUT
| V4L2_CAP_STREAMING;
break;
}
實現攝像頭效能查詢,資料流查詢:
static int V4l2InitDevice(char *strDevName, PT_VideoDevice ptVideoDevice)
{
int i;
int iFd;
int iError;
struct v4l2_capability tV4l2Cap;/*定義裝置效能結構體*/
struct v4l2_fmtdesc tFmtDesc;/*定義裝置格式描述符結構體*/
struct v4l2_format tV4l2Fmt;/*定義格式設定結構體*/
struct v4l2_requestbuffers tV4l2ReqBuffs;/*STREAMING介面的buf*/
struct v4l2_buffer tV4l2Buf; /**READWRITE介面的buf*/
iFd = open (strDevName,O_RDWR);/*開啟(裝置名,方式-可讀可寫)*/
if(iFd<0)
{
DBG_PRINTF("can not open %s\n", strDevName);
return -1;
}
ptVideoDevice->iFd = iFd;/*設定裝置控制代碼為開啟的控制代碼*/
/*判斷是否為攝像頭*/
if (!(tV4l2Cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
{
DBG_PRINTF("%s is not a video capture device\n", strDevName);
goto err_exit;
}
/*判斷介面是否為V4L2_CAP_STREAMING*/
if (tV4l2Cap.capabilities & V4L2_CAP_STREAMING) {
DBG_PRINTF("%s supports streaming i/o\n", strDevName);
}
/*判斷介面是否為V4L2_CAP_READWRITE*/
if (tV4l2Cap.capabilities & V4L2_CAP_READWRITE) {
DBG_PRINTF("%s supports read i/o\n", strDevName);
}
}
1.1.2 參考luvcview的v4l2uvc.c – enum_frame_formats,列出了支援的格式
int enum_frame_formats(int dev)
{
int ret;
struct v4l2_fmtdesc fmt;
memset(&fmt, 0, sizeof(fmt));
fmt.index = 0;
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
while ((ret = ioctl(dev, VIDIOC_ENUM_FMT, &fmt)) == 0) {
fmt.index++;
printf("{ pixelformat = '%c%c%c%c', description = '%s' }\n",
fmt.pixelformat & 0xFF, (fmt.pixelformat >> 8) & 0xFF,
(fmt.pixelformat >> 16) & 0xFF, (fmt.pixelformat >> 24) & 0xFF,
fmt.description);
ret = enum_frame_sizes(dev, fmt.pixelformat);
if (ret != 0)
printf(" Unable to enumerate frame sizes.\n");
}
if (errno != EINVAL) {
printf("ERROR enumerating frame formats: %d\n", errno);
return errno;
}
return 0;
}
實現攝像頭格式查詢設定:
/*核心的videodev2.h檔案定義了格式*/
static int g_aiSupportedFormats[] = {V4L2_PIX_FMT_YUYV, V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_RGB565};/*攝像頭支援的三種格式*/
static int isSupportThisFormat(int iPixelFormat)/*支援iPixelFormat這種格式*/
{
int i;
for (i = 0; i < sizeof(g_aiSupportedFormats)/sizeof(g_aiSupportedFormats[0]); i++)
{
if (g_aiSupportedFormats[i] == iPixelFormat) /*如果支援iPixelFormat,返回 1*/
return 1;
}
return 0;
}
static int V4l2InitDevice(char *strDevName, PT_VideoDevice ptVideoDevice)
{
...
struct v4l2_format tV4l2Fmt;/*定義格式設定結構體*/
memset(&tFmtDesc, 0, sizeof(tFmtDesc));
tFmtDesc.index = 0;
tFmtDesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;/*視訊捕獲裝置*/
while ((iError = ioctl(iFd, VIDIOC_ENUM_FMT, &tFmtDesc)) == 0) {
if (isSupportThisFormat(tFmtDesc.pixelformat)) /*如果支援裝置的某種格式pixelformat*/
{
ptVideoDevice->iPixelFormat = tFmtDesc.pixelformat;
break;
}
tFmtDesc.index++;
}
if (!ptVideoDevice->iPixelFormat) /*不能支援攝像頭驅動的格式*/
{
DBG_PRINTF("can not support the format of this device\n");
goto err_exit;
}
}
1.1.3 參考:luvcview–v4l2uvc.c–init_v4l2–/* set format in */
/* set format in */
memset(&vd->fmt, 0, sizeof(struct v4l2_format));
vd->fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
vd->fmt.fmt.pix.width = vd->width;
vd->fmt.fmt.pix.height = vd->height;
vd->fmt.fmt.pix.pixelformat = vd->formatIn;
vd->fmt.fmt.pix.field = V4L2_FIELD_ANY;
ret = ioctl(vd->fd, VIDIOC_S_FMT, &vd->fmt);
if (ret < 0) {
printf("Unable to set format: %d.\n", errno);
goto fatal;
}
ret = ioctl(vd->fd, VIDIOC_S_FMT, &vd->fmt);
if (ret < 0) {
printf("Unable to set format: %d.\n", errno);
goto fatal;
}
if ((vd->fmt.fmt.pix.width != vd->width) ||
(vd->fmt.fmt.pix.height != vd->height)) {
printf(" format asked unavailable get width %d height %d \n",
vd->fmt.fmt.pix.width, vd->fmt.fmt.pix.height);
vd->width = vd->fmt.fmt.pix.width;
vd->height = vd->fmt.fmt.pix.height;
實現攝像頭視訊格式設定:
static int V4l2InitDevice(char *strDevName, PT_VideoDevice ptVideoDevice)
{
...
struct v4l2_format tV4l2Fmt;/*定義格式設定結構體*/
int iLcdWidth;
int iLcdHeigt;
int iLcdBpp;
/* set format in */
GetDispResolution(&iLcdWidt h, &iLcdHeigt, &iLcdBpp);/*讀出LCD的解析度引數*/
memset(&tV4l2Fmt, 0, sizeof(struct v4l2_format));
tV4l2Fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4l2Fmt.fmt.pix.pixelformat = ptVideoDevice->iPixelFormat;/*設定為支援的格式*/
tV4l2Fmt.fmt.pix.width = iLcdWidth;
tV4l2Fmt.fmt.pix.height = iLcdHeigt;
tV4l2Fmt.fmt.pix.field = V4L2_FIELD_ANY;
/* 如果驅動程式發現無法某些引數(比如解析度),
* 它會調整這些引數, 並且返回給應用程式
*/
iError = ioctl(iFd, VIDIOC_S_FMT, &tV4l2Fmt);
if (iError)
{
DBG_PRINTF("Unable to set format\n");
goto err_exit;
}
ptVideoDevice->iWidth = tV4l2Fmt.fmt.pix.width;
ptVideoDevice->iHeight = tV4l2Fmt.fmt.pix.height;
}
注:disp_manage.c--GetDispResolution,獲取LCD解析度函式
1.1.4 參考:luvcview–v4l2uvc.c–init_v4l2 – /* request buffers */
/* request buffers */
memset(&vd->rb, 0, sizeof(struct v4l2_requestbuffers));
vd->rb.count = NB_BUFFER;
vd->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
vd->rb.memory = V4L2_MEMORY_MMAP;
ret = ioctl(vd->fd, VIDIOC_REQBUFS, &vd->rb);
if (ret < 0) {
printf("Unable to allocate buffers: %d.\n", errno);
goto fatal;
}
實現資料快取請求:
#define NB_BUFFER 4;/*申請了四個buf*/
static int V4l2InitDevice(char *strDevName, PT_VideoDevice ptVideoDevice)
{
...
struct v4l2_requestbuffers tV4l2ReqBuffs;/*STREAMING介面的buf*/
/* request buffers */
memset(&tV4l2ReqBuffs, 0, sizeof(struct v4l2_requestbuffers));
tV4l2ReqBuffs.count = NB_BUFFER; /* NB_BUFFER =4 */
tV4l2ReqBuffs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4l2ReqBuffs.memory = V4L2_MEMORY_MMAP;
iError = ioctl(iFd, VIDIOC_REQBUFS, &tV4l2ReqBuffs);
if (iError)
{
DBG_PRINTF("Unable to allocate buffers.\n");
goto err_exit;
}
}
1.1.5 參考:luvcview–v4l2uvc.c–init_v4l2 –/* map the buffers /、/ Queue the buffers. */
/* map the buffers */
for (i = 0; i < NB_BUFFER; i++) {
memset(&vd->buf, 0, sizeof(struct v4l2_buffer));
vd->buf.index = i;
vd->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
vd->buf.memory = V4L2_MEMORY_MMAP;
ret = ioctl(vd->fd, VIDIOC_QUERYBUF, &vd->buf);
if (ret < 0) {
printf("Unable to query buffer (%d).\n", errno);
goto fatal;
}
if (debug)
printf("length: %u offset: %u\n", vd->buf.length,
vd->buf.m.offset);
vd->mem[i] = mmap(0 /* start anywhere */ ,
vd->buf.length, PROT_READ, MAP_SHARED, vd->fd,
vd->buf.m.offset);
if (vd->mem[i] == MAP_FAILED) {
printf("Unable to map buffer (%d)\n", errno);
goto fatal;
}
if (debug)
printf("Buffer mapped at address %p.\n", vd->mem[i]);
}
/* Queue the buffers. */
for (i = 0; i < NB_BUFFER; ++i) {
memset(&vd->buf, 0, sizeof(struct v4l2_buffer));
vd->buf.index = i;
vd->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
vd->buf.memory = V4L2_MEMORY_MMAP;
ret = ioctl(vd->fd, VIDIOC_QBUF, &vd->buf);
if (ret < 0) {
printf("Unable to queue buffer (%d).\n", errno);
goto fatal;;
}
}
return 0;
fatal:
return -1;
實現buffers對映到應用程式的儲存空間,並將buffers放入佇列
struct v4l2_requestbuffers tV4l2ReqBuffs;/*STREAMING介面的buf*/
struct v4l2_buffer tV4l2Buf; /**READWRITE介面的buf*/
static int V4l2InitDevice(char *strDevName, PT_VideoDevice ptVideoDevice)
{
...
if (tV4l2Cap.capabilities & V4L2_CAP_STREAMING)/*STREAMING介面的裝置*/
{
/* map the buffers */
for (i = 0; i < ptVideoDevice->iVideoBufCnt; i++)
{
memset(&tV4l2Buf, 0, sizeof(struct v4l2_buffer));
tV4l2Buf.index = i;
tV4l2Buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4l2Buf.memory = V4L2_MEMORY_MMAP;
iError = ioctl(iFd, VIDIOC_QUERYBUF, &tV4l2Buf);
if (iError)
{
DBG_PRINTF("Unable to query buffer.\n");
goto err_exit;
}
ptVideoDevice->iVideoBufMaxLen = tV4l2Buf.length;
/*對於每個pucVideBuf呼叫MMAP,將地址對映到應用程式的空間*/
ptVideoDevice->pucVideBuf[i] = mmap(0 /* start anywhere */ ,
tV4l2Buf.length, PROT_READ, MAP_SHARED, iFd,
tV4l2Buf.m.offset);
if (ptVideoDevice->pucVideBuf[i] == MAP_FAILED)
{
DBG_PRINTF("Unable to map buffer\n");
goto err_exit;
}
}
/* Queue the buffers. */
for (i = 0; i < ptVideoDevice->iVideoBufCnt; i++)
{
memset(&tV4l2Buf, 0, sizeof(struct v4l2_buffer));
tV4l2Buf.index = i;
tV4l2Buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4l2Buf.memory = V4L2_MEMORY_MMAP;
iError = ioctl(iFd, VIDIOC_QBUF, &tV4l2Buf);
if (iError)
{
DBG_PRINTF("Unable to queue buffer.\n");
goto err_exit;
}
}
}
else if (tV4l2Cap.capabilities & V4L2_CAP_READWRITE)/*READWRITE介面的裝置*/
{
g_tV4l2VideoOpr.GetFrame = V4l2GetFrameForReadWrite;
g_tV4l2VideoOpr.PutFrame = V4l2PutFrameForReadWrite;
/* read(fd, buf, size) */
ptVideoDevice->iVideoBufCnt = 1;
/* 在這個程式所能支援的格式裡, 一個象素最多隻需要4位元組 */
ptVideoDevice->iVideoBufMaxLen = ptVideoDevice->iWidth * ptVideoDevice->iHeight * 4;
ptVideoDevice->pucVideBuf[0] = malloc(ptVideoDevice->iVideoBufMaxLen);
}
return 0;
err_exit:
close(iFd);
return -1;
}
1.2 V4l2ExitDevice
核心中執行 man mmp
知道 mmp的包含標頭檔案 #include <sys/mman.h>
static int V4l2ExitDevice(PT_VideoDevice ptVideoDevice)
{
int i;
for (i = 0; i < ptVideoDevice->iVideoBufCnt; i++)
{
if (ptVideoDevice->pucVideBuf[i])
{
munmap(ptVideoDevice->pucVideBuf[i], ptVideoDevice->iVideoBufMaxLen);/*buf的號以及大小*/
ptVideoDevice->pucVideBuf[i] = NULL;
}
}
close(ptVideoDevice->iFd);/*關閉裝置控制代碼*/
return 0;
}
1.5 實現V4l2StartDevice 參考 v4l2uvc.c – video_enable
static int video_enable(struct vdIn *vd)
{
int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
int ret;
ret = ioctl(vd->fd, VIDIOC_STREAMON, &type);
if (ret < 0) {
printf("Unable to %s capture: %d.\n", "start", errno);
return ret;
}
vd->isstreaming = 1;
return 0;
}
實現:
static int V4l2StartDevice(PT_VideoDevice ptVideoDevice)
{
int iType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
int iError;
iError = ioctl(ptVideoDevice->iFd, VIDIOC_STREAMON, &iType);
if (iError)
{
DBG_PRINTF("Unable to start capture.\n");
return -1;
}
return 0;
}
1.6 實現 V4l2StopDevice 參考 v4l2uvc.c – video_disable
static int video_disable(struct vdIn *vd)
{
int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
int ret;
ret = ioctl(vd->fd, VIDIOC_STREAMOFF, &type);
if (ret < 0) {
printf("Unable to %s capture: %d.\n", "stop", errno);
return ret;
}
vd->isstreaming = 0;
return 0;
}
實現:
static int V4l2StopDevice(PT_VideoDevice ptVideoDevice)
{
int iType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
int iError;
iError = ioctl(ptVideoDevice->iFd, VIDIOC_STREAMOFF, &iType);
if (iError)
{
DBG_PRINTF("Unable to stop capture.\n");
return -1;
}
return 0;
}
1.3 V4l2GetFrameForStreaming
參考:
① man poll 檢視poll的用法
② VIDIOC_DQBUF的用法:
memset(&vd->buf, 0, sizeof(struct v4l2_buffer));
vd->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
vd->buf.memory = V4L2_MEMORY_MMAP;
ret = ioctl(vd->fd, VIDIOC_DQBUF, &vd->buf);
if (ret < 0) {
printf("Unable to dequeue buffer (%d).\n", errno);
goto err;
/* Write the raw data to the file */
ret = fwrite(vd->mem[vd->buf.index], vd->buf.bytesused, 1, frame);
if(ret < 1) {
perror("Unable to write to file");
goto end_capture;
}
③ tPixelDatas設定可以參考 pic_opration.h中
/* 圖片的象素資料 */
typedef struct PixelDatas {
int iWidth; /* 寬度: 一行有多少個象素 */
int iHeight; /* 高度: 一列有多少個象素 */
int iBpp; /* 一個象素用多少位來表示 */
int iLineBytes; /* 一行資料有多少位元組 */
int iTotalBytes; /* 所有位元組數 */
unsigned char *aucPixelDatas; /* 象素資料儲存的地方 */
}T_PixelDatas, *PT_PixelDatas;
實現:
static int V4l2GetFrameForStreaming(PT_VideoDevice ptVideoDevice, PT_VideoBuf ptVideoBuf)
{
struct pollfd tFds[1];/* 定義poll陣列*/
int iRet;
struct v4l2_buffer tV4l2Buf;
/* poll 資料 */
tFds[0].fd = ptVideoDevice->iFd;
tFds[0].events = POLLIN;
iRet = poll(tFds, 1, -1);/*poll函式引數:陣列 項數 時間(-1一直等待)*/
if (iRet <= 0)
{
DBG_PRINTF("poll error!\n");
return -1;
}
/* VIDIOC_DQBUF 從佇列中取出 */
memset(&tV4l2Buf, 0, sizeof(struct v4l2_buffer));
tV4l2Buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4l2Buf.memory = V4L2_MEMORY_MMAP;
iRet = ioctl(ptVideoDevice->iFd, VIDIOC_DQBUF, &tV4l2Buf);
if (iRet < 0)
{
DBG_PRINTF("Unable to dequeue buffer.\n");
return -1;
}
ptVideoDevice->iVideoBufCurIndex = tV4l2Buf.index;/* 將tV4l2Buf的index儲存,等待V4l2PutFrameForStreaming使用 */
ptVideoBuf->iPixelFormat = ptVideoDevice->iPixelFormat;
ptVideoBuf->tPixelDatas.iWidth = ptVideoDevice->iWidth;
ptVideoBuf->tPixelDatas.iHeight = ptVideoDevice->iHeight;
ptVideoBuf->tPixelDatas.iBpp = (ptVideoDevice->iPixelFormat == V4L2_PIX_FMT_YUYV) ? 16 : \
(ptVideoDevice->iPixelFormat == V4L2_PIX_FMT_MJPEG) ? : 0 \
(ptVideoDevice->iPixelFormat == V4L2_PIX_FMT_RGB565) : 16;
ptVideoBuf->tPixelDatas.iLineBytes = ptVideoDevice->iWidth * ptVideoBuf->tPixelDatas.iBpp / 8;
ptVideoBuf->tPixelDatas.iTotalBytes = tV4l2Buf->bytesused;
ptVideoBuf->tPixelDatas.aucPixelDatas = ptVideoDevice->pucVideBuf[tV4l2Buf.index];
return 0;
}
1.4 V4l2PutFrameForStreaming
參考:v4l2uvc.c–init_v4l2
/* Queue the buffers. */
for (i = 0; i < NB_BUFFER; ++i) {
memset(&vd->buf, 0, sizeof(struct v4l2_buffer));
vd->buf.index = i;
vd->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
vd->buf.memory = V4L2_MEMORY_MMAP;
ret = ioctl(vd->fd, VIDIOC_QBUF, &vd->buf);
if (ret < 0) {
printf("Unable to queue buffer (%d).\n", errno);
goto fatal;;
}
}
實現:
static int V4l2PutFrameForStreaming(PT_VideoDevice ptVideoDevice, PT_VideoBuf ptVideoBuf)
{
/* VIDIOC_QBUF */
struct v4l2_buffer tV4l2Buf;
int iError;
memset(&tV4l2Buf, 0, sizeof(struct v4l2_buffer));
tV4l2Buf.index = ptVideoDevice->iVideoBufCurIndex;
tV4l2Buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4l2Buf.memory = V4L2_MEMORY_MMAP;
iError = ioctl(ptVideoDevice->iFd, VIDIOC_QBUF, &tV4l2Buf);
if (iError)
{
DBG_PRINTF("Unable to queue buffer.\n");
return -1;
}
return 0;
}
V4l2GetFrameForReadWrite和V4l2PutFrameForReadWrite
static int V4l2GetFrameForReadWrite(PT_VideoDevice ptVideoDevice, PT_VideoBuf ptVideoBuf)
{
int iRet;
iRet = read(ptVideoDevice->iFd, ptVideoDevice->pucVideBuf[0], ptVideoDevice->iVideoBufMaxLen);
if (iRet <= 0)
{
return -1;
}
ptVideoBuf->iPixelFormat = ptVideoDevice->iPixelFormat;
ptVideoBuf->tPixelDatas.iWidth = ptVideoDevice->iWidth;
ptVideoBuf->tPixelDatas.iHeight = ptVideoDevice->iHeight;
ptVideoBuf->tPixelDatas.iBpp = (ptVideoDevice->iPixelFormat == V4L2_PIX_FMT_YUYV) ? 16 : \
(ptVideoDevice->iPixelFormat == V4L2_PIX_FMT_MJPEG) ? : 0 \
(ptVideoDevice->iPixelFormat == V4L2_PIX_FMT_RGB565) : 16;
ptVideoBuf->tPixelDatas.iLineBytes = ptVideoDevice->iWidth * ptVideoBuf->tPixelDatas.iBpp / 8;
ptVideoBuf->tPixelDatas.iTotalBytes = iRet;
ptVideoBuf->tPixelDatas.aucPixelDatas = ptVideoDevice->pucVideBuf[0];
return 0;
}
static int V4l2PutFrameForReadWrite(PT_VideoDevice ptVideoDevice, PT_VideoBuf ptVideoBuf)
{
return 0;
}
video資料夾函式實現。