Qt淺談之四十Centos下Qt結合v4l2實現的視訊顯示
阿新 • • 發佈:2019-02-17
一、簡介
v4l2是針對uvc免驅usb裝置的程式設計框架,主要用於採集usb攝像頭。 可從網上下載最新的原始碼(包括v4l2.c和v4l2.h兩個檔案),本文中修改過。Qt執行介面如下(動態變化的):
二、詳解
1、準備
(1)插入usb攝像頭,檢測裝置檔案/dev/video0
與程式碼中的pd.dev_name = "/dev/video0";保持一致。
(2)檢測顏色編碼
安裝包#yum install v4l-utils,然後執行命令#v4l2-ctl -d /dev/video0 --list-formats
顏色編碼格式為YUYV,與程式碼中的s->fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;保持一致。
(3)遇到錯誤
在centos6.6中,遇到了錯誤:
VIDIOC_STREAMON error 28, 裝置上沒有空間
沒有查到原因,不清楚什麼問題,試瞭如下方法也不行:#rmmod uvcvideo
#modprobe uvcvideo quirks=128
於是切換到centos6.3上,能順利的執行(在虛擬機器中也是可以正常執行的)。2、主要點
(1)將YUYV轉換成rgb顯示在介面,以前使用MPEG沒有顯示(2)將視訊流儲存到本地檔案(最小單位為秒,需要更快的可以調整到毫米)convert_yuv_to_rgb_buffer((unsigned char *)pd.buffers[pd.buf.index].start,bufrgb,640,480); QImage image(bufrgb,640,480,QImage::Format_RGB888); ui.displayLabel->setPixmap(QPixmap::fromImage(image));
if (bufrgb > 0 && strlen((char *)bufrgb) > 0) { tm_time = localtime(&now); char filename[30] = {0}; sprintf(filename,"%4d-%02d-%02d_%02d.%02d.%02d.png",1900+tm_time->tm_year,1+tm_time->tm_mon,tm_time->tm_mday, tm_time->tm_hour,tm_time->tm_min,tm_time->tm_sec); QImage image(bufrgb,640,480,QImage::Format_RGB888); image.save(filename); }
3、完整程式碼
(1)v4l2.h#ifndef __V4L2_H__
#define __V4L2_H__
#include <linux/types.h>
#include <linux/videodev2.h>
#ifdef __cplusplus
extern "C" {
#endif
typedef struct
{
void *start;
size_t length;
}buffer;
typedef struct
{
int fd;
int n_buffers;
char *dev_name;
buffer *buffers;
struct v4l2_buffer buf;
struct v4l2_format fmt;
}pass_data;
int init_dev (pass_data *s);
void stop_dev (pass_data *s);
void read_frame(pass_data *s);
void return_data (pass_data *s);
void init_mmap(pass_data *s);
void init_device(pass_data *s);
int open_device(pass_data *s);
void start_capturing(pass_data *s);
void close_device(pass_data *s);
void stop_capturing(pass_data *s);
void stop_device(pass_data *s);
void errno_exit(const char *s);
int xioctl(int fd, int request, void *arg);
void process_image(void *p, pass_data *s, int i);
int convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height);
#ifdef __cplusplus
}
#endif
#endif
(2)v4l2.c#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <errno.h>
#include <fcntl.h>
#include <unistd.h>
#include <malloc.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include "v4l2.h"
#define CLEAR(x) memset (&x, 0, sizeof(x))
char *mpeg[] = {"./1.mpeg", "./2.mpeg", "./3.mpeg", "./4.mpeg", "./5.mpeg"};
int init_dev (pass_data *s)
{
int flag = open_device(s);
if (flag != 0) {
return flag;
}
init_device(s);
init_mmap(s);
start_capturing(s);
fprintf(stdout, "'%s' initialize finish ...\n", s->dev_name);
return 0;
}
void stop_dev (pass_data *s)
{
stop_capturing(s);
stop_device(s);
close_device(s);
fprintf(stdout, "close '%s' ...\n", s->dev_name);
}
void process_image(void *p, pass_data *s, int i)
{
fputc ('.', stdout);
fflush (stdout);
fprintf (stderr, "%s", mpeg[i]);
int fd;
if ((fd = open (mpeg[i], O_RDWR | O_CREAT | O_TRUNC, 0644)) == -1)
errno_exit("open");
if ((write (fd, (struct v4l2_buffer *)p, s->fmt.fmt.pix.sizeimage)) == -1)
errno_exit("write");
close (fd);
}
void read_frame(pass_data *s)
{
CLEAR (s->buf);
s->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
s->buf.memory = V4L2_MEMORY_MMAP;
if (xioctl (s->fd, VIDIOC_DQBUF, &s->buf) == -1)
{
switch (errno)
{
case EAGAIN:
errno_exit ("VIDIOC_DQBUF");
case EIO:
/* could ignore EIO, see spec. */
default:
errno_exit ("VIDIOC_DQBUF");
}
}
assert (s->buf.index < s->n_buffers);
}
void return_data (pass_data *s)
{
if (xioctl (s->fd, VIDIOC_QBUF, &s->buf) == -1) errno_exit ("VIDIOC_QBUF");
}
void start_capturing(pass_data *s)
{
unsigned int i;
enum v4l2_buf_type type;
for (i = 0; i < s->n_buffers; ++ i)
{
struct v4l2_buffer buf;
CLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (xioctl (s->fd, VIDIOC_QBUF, &buf) == -1)
errno_exit("VIDIOC_QBUF");
}
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl (s->fd, VIDIOC_STREAMON, &type))
errno_exit("VIDIOC_STREAMON");
}
void stop_capturing(pass_data *s)
{
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl (s->fd, VIDIOC_STREAMOFF, &type))
errno_exit("VIDIOC_STREAMOFF");
}
void init_mmap(pass_data *s)
{
struct v4l2_requestbuffers req;
CLEAR (req);
req.count = 20;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (xioctl (s->fd, VIDIOC_REQBUFS, &req))
{
if (EINVAL == errno)
{
fprintf(stderr, "%s does not support 'memory mapping'\n", s->dev_name);
exit (EXIT_FAILURE);
}
else
{
errno_exit ("VIDIOC_REQBUFS");
}
}
if (req.count < 2)
{
fprintf(stderr, "Insufficient buffer memory on %s\n", s->dev_name);
exit (EXIT_FAILURE);
}
if ((s->buffers = (buffer *)calloc (req.count, sizeof (*s->buffers))) == NULL)
{
fprintf(stderr, "Out of memory\n");
exit ( EXIT_FAILURE);
}
for (s->n_buffers = 0; s->n_buffers < req.count; ++ s->n_buffers)
{
struct v4l2_buffer buf;
CLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = s->n_buffers;
if (xioctl (s->fd, VIDIOC_QUERYBUF, &buf) == -1)
errno_exit("VIDIOC_QUERYBUF");
s->buffers[s->n_buffers].length = buf.length;
s->buffers[s->n_buffers].start =
mmap( NULL,
buf.length,
PROT_READ | PROT_WRITE,
MAP_SHARED,
s->fd,
buf.m.offset
);
if (s->buffers[s->n_buffers].start == MAP_FAILED)
errno_exit ("mmap");
#if _DEBUG_
fprintf(stdout, "%d -> %p\n", s->n_buffers, s->buffers[s->n_buffers].start);
#endif
}
}
void init_device(pass_data* s)
{
struct v4l2_capability cap;
struct v4l2_cropcap cropcap;
struct v4l2_crop crop;
unsigned int min;
if (xioctl (s->fd, VIDIOC_QUERYCAP, &cap) == -1)
{
if (EINVAL == errno)
{
fprintf (stderr, "%s is no V4L2 device\n", s->dev_name);
exit (EXIT_FAILURE);
}
else
{
errno_exit ("VIDIOC_QUERYCAP");
}
}
if (! (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
{
fprintf(stderr, "%s is no video captrue device\n", s->dev_name);
exit(EXIT_FAILURE);
}
if (! (cap.capabilities & V4L2_CAP_STREAMING))
{
fprintf(stderr, "%s does not support straming I/O\n", s->dev_name);
exit(EXIT_FAILURE);
}
CLEAR(cropcap);
cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl (s->fd, VIDIOC_CROPCAP, &cropcap) == 0)
{
crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
crop.c = cropcap.defrect;
if (xioctl (s->fd, VIDIOC_S_CROP, &crop))
{
switch (errno)
{
case EINVAL:
break;
default:
break;
}
}
else
{
/* Errors ignored */
}
}
CLEAR (s->fmt);
s->fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
s->fmt.fmt.pix.width = 640;
s->fmt.fmt.pix.height = 480;
s->fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
s->fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
if (xioctl (s->fd, VIDIOC_S_FMT, &s->fmt) == -1)
errno_exit("VIDIOC_S_FMT");
min = s->fmt.fmt.pix.width * 2;
if (s->fmt.fmt.pix.bytesperline < min)
s->fmt.fmt.pix.bytesperline = min;
min = s->fmt.fmt.pix.bytesperline * s->fmt.fmt.pix.height;
if (s->fmt.fmt.pix.sizeimage < min)
s->fmt.fmt.pix.sizeimage = min;
}
void stop_device(pass_data *s)
{
unsigned int i;
for (i = 0; i < s->n_buffers; ++i)
if (munmap (s->buffers[i].start, s->buffers[i].length) == -1)
errno_exit("munmap");
}
int open_device(pass_data *s)
{
struct stat st;
if (stat (s->dev_name, &st) == -1)
{
fprintf(stderr, "Can't identify '%s':[%d] %s\n", s->dev_name, errno, strerror(errno));
return -1;
}
if (!S_ISCHR (st.st_mode))
{
fprintf(stderr, "%s is no device\n", s->dev_name);
return -2;
}
if ((s->fd = open (s->dev_name, O_RDWR, 0)) == -1 )
{
fprintf(stderr, "Can't oprn '%s': error %d, %s\n", s->dev_name, errno, strerror(errno));
return -2;
}
return 0;
}
void close_device(pass_data *s)
{
close (s->fd);
}
int xioctl(int fd, int request, void *arg)
{
int r;
do r = ioctl(fd, request, arg);
while(r == -1 && EINTR == errno);
return r;
}
void errno_exit(const char *s)
{
fprintf(stderr, "%s error %d, %s\n", s, errno, strerror(errno));
exit(EXIT_FAILURE);
}
static int convert_yuv_to_rgb_pixel(int y, int u, int v)
{
unsigned int pixel32 = 0;
unsigned char *pixel = (unsigned char *)&pixel32;
int r, g, b;
r = y + (1.370705 * (v-128));
g = y - (0.698001 * (v-128)) - (0.337633 * (u-128));
b = y + (1.732446 * (u-128));
if(r > 255) r = 255;
if(g > 255) g = 255;
if(b > 255) b = 255;
if(r < 0) r = 0;
if(g < 0) g = 0;
if(b < 0) b = 0;
pixel[0] = r * 220 / 256;
pixel[1] = g * 220 / 256;
pixel[2] = b * 220 / 256;
return pixel32;
}
int convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height)
{
unsigned int in, out = 0;
unsigned int pixel_16;
unsigned char pixel_24[3];
unsigned int pixel32;
int y0, u, y1, v;
for(in = 0; in < width * height * 2; in += 4) {
pixel_16 = yuv[in + 3] << 24 |
yuv[in + 2] << 16 |
yuv[in + 1] << 8 |
yuv[in + 0];
y0 = (pixel_16 & 0x000000ff);
u = (pixel_16 & 0x0000ff00) >> 8;
y1 = (pixel_16 & 0x00ff0000) >> 16;
v = (pixel_16 & 0xff000000) >> 24;
pixel32 = convert_yuv_to_rgb_pixel(y0, u, v);
pixel_24[0] = (pixel32 & 0x000000ff);
pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
rgb[out++] = pixel_24[0];
rgb[out++] = pixel_24[1];
rgb[out++] = pixel_24[2];
pixel32 = convert_yuv_to_rgb_pixel(y1, u, v);
pixel_24[0] = (pixel32 & 0x000000ff);
pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
rgb[out++] = pixel_24[0];
rgb[out++] = pixel_24[1];
rgb[out++] = pixel_24[2];
}
return 0;
}
(3)videodisplay.h#ifndef VIDEODISPLAY_H
#define VIDEODISPLAY_H
#include <QtGui>
#include "ui_dialog.h"
#include "v4l2.h"
class VideoDisplay : public QDialog
{
Q_OBJECT
public:
VideoDisplay(QWidget *parent = 0);
~VideoDisplay();
private slots:
void beginCapture();
void flushBuff();
void savebmpData();
private:
Ui::Dialog ui;
pass_data pd;
QTimer *timer;
unsigned char *bufrgb;
};
#endif // VIDEODISPLAY_H
(4)videodisplay.cpp#include <string.h>
#include "videodisplay.h"
VideoDisplay::VideoDisplay(QWidget *parent)
: QDialog(parent)
, bufrgb(NULL)
{
ui.setupUi(this);
connect(ui.beginButton, SIGNAL(clicked()), this, SLOT(beginCapture()));
connect(ui.saveButton, SIGNAL(clicked()), this, SLOT(savebmpData()));
connect(ui.exitButton, SIGNAL(clicked()), this, SLOT(reject()));
timer = new QTimer(this);
timer->setInterval(10);
connect(timer, SIGNAL(timeout()), this, SLOT(flushBuff()));
pd.dev_name = "/dev/video0";
}
VideoDisplay::~VideoDisplay()
{
if (timer->isActive()) {
timer->stop();
}
}
void VideoDisplay::beginCapture()
{
int flag = init_dev(&pd);
if (flag == -1) {
QMessageBox::information(this,tr("Tip"),tr("no device"));
exit(1);
}
else if (flag == -2) {
QMessageBox::information(this,tr("Tip"),tr("device is wrong"));
exit(2);
}
else if (flag == -3) {
QMessageBox::information(this,tr("Tip"),tr("can not open device"));
exit(3);
}
timer->start();
ui.beginButton->setDisabled(TRUE);
}
void VideoDisplay::flushBuff()
{
read_frame (&pd);
if (!bufrgb) {
bufrgb = (unsigned char *)malloc(640 * 480* 3);
}
memset(bufrgb, 0, 640 * 480* 3);
convert_yuv_to_rgb_buffer((unsigned char *)pd.buffers[pd.buf.index].start,bufrgb,640,480);
QImage image(bufrgb,640,480,QImage::Format_RGB888);
ui.displayLabel->setPixmap(QPixmap::fromImage(image));
return_data(&pd);
}
void VideoDisplay::savebmpData()
{
FILE *fp;
time_t now;
struct tm *tm_time;
time(&now);
if (bufrgb > 0 && strlen((char *)bufrgb) > 0) {
tm_time = localtime(&now);
char filename[30] = {0};
sprintf(filename,"%4d-%02d-%02d_%02d.%02d.%02d.png",1900+tm_time->tm_year,1+tm_time->tm_mon,tm_time->tm_mday,
tm_time->tm_hour,tm_time->tm_min,tm_time->tm_sec);
QImage image(bufrgb,640,480,QImage::Format_RGB888);
image.save(filename);
}
}
(5)編譯執行點選BEGIN開始顯示,點選SAVE會在當前的可執行程式目錄下生成介面顯示的視訊的png的檔案:2016-01-20_17.28.36.png、2016-01-20_17.36.08.png
三、總結
(1)不同的系統中v4l2的問題不一樣,通過調整引數可以解決部分,但有些因能力有限實在無法解決。
(2)該文僅是以前畢業設計的一個開頭,還有影象的轉化和處理等一系列的自動識別的功能,接著可以將圖片存放到資料庫。
(3)若有問題或建議,請留言,在此感謝!