從cimutils到核心-(1)VIDIOC_REQBUFS(申請buffer)
應用層:
全域性變數
struct buffer {
void * start;
size_t length;
};
struct buffer * buffers = NULL;
struct v4l2_requestbuffers req;
req.count = 2; //申請兩個buffer
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
ioctl(fd, VIDIOC_REQBUFS, &req)
buffers = malloc(req.count * sizeof(*buffers)); //使用者層開闢記憶體,用於儲存mmap對映核心返回的buffer的起始地址
------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
核心:
(1)
drivers/media/v4l2-core/v4l2-ioctl.c
IOCTL_INFO_FNC(VIDIOC_REQBUFS, v4l_reqbufs, v4l_print_requestbuffers, INFO_FL_PRIO | INFO_FL_QUEUE)
static int v4l_reqbufs(const struct v4l2_ioctl_ops *ops, struct file *file, void *fh, void *arg)
{
struct v4l2_requestbuffers *p = arg;
int ret = check_fmt(file, p->type);
if (ret)
return ret;
CLEAR_AFTER_FIELD(p, memory);
return ops->vidioc_reqbufs(file, fh, p);
}
(2)
drivers/media/platform/soc_camera/soc_camera.c
static int soc_camera_reqbufs(struct file *file, void *priv,
struct v4l2_requestbuffers *p)
{
int ret;
struct soc_camera_device *icd = file->private_data;
struct soc_camera_host *ici = to_soc_camera_host(icd->parent);
WARN_ON(priv != file->private_data);
if (icd->streamer && icd->streamer != file)
return -EBUSY;
if (ici->ops->init_videobuf) {
ret = videobuf_reqbufs(&icd->vb_vidq, p);
if (ret < 0)
return ret;
ret = ici->ops->reqbufs(icd, p);
} else {
ret = vb2_reqbufs(&icd->vb2_vidq, p);
}
if (!ret && !icd->streamer)
icd->streamer = file;
return ret;
}
(3)
drivers/media/v4l2-core/videobuf2-core.c
int vb2_reqbufs(struct vb2_queue *q, struct v4l2_requestbuffers *req)
{
int ret = __verify_memory_type(q, req->memory, req->type);
return ret ? ret : __reqbufs(q, req);
}
EXPORT_SYMBOL_GPL(vb2_reqbufs);
static int __reqbufs(struct vb2_queue *q, struct v4l2_requestbuffers *req)
{
ret = call_qop(q, queue_setup, q, NULL, &num_buffers, &num_planes,
q->plane_sizes, q->alloc_ctx); //調進我們的控制器驅動
ret = __vb2_queue_alloc(q, req->memory, num_buffers, num_planes); //在核心空間分配struct vb2_buffer *vb 的大小,還呼叫了控制器驅動的buf_init介面,初始化我們驅動維護的連結串列INIT_LIST_HEAD(&buf->list); struct jz_buffer *buf
__setup_offsets(q, buffer); //目的是求得struct vb2_buffer結構體成員偏移地址 vb->v4l2_planes[plane].m.mem_offset = off(即是每個buffer的起始地址)
//vb->v4l2_planes[plane].m.mem_offset = off;
off += vb->v4l2_planes[plane].length; //加了長度,得出來的off是一個buffer得偏移大小
off = PAGE_ALIGN(off);
buffer[0]
Buffer 0, plane 0 offset 0x00000000 起始地址 這個地址傳遞給應用層,mmap最後一個引數用
Buffer 0, plane 0 offset 0x00096000 結束地址
buffer[1]
Buffer 1, plane 0 offset 0x00096000 起始地址 這個地址傳遞給應用層,mmap最後一個引數用
Buffer 1, plane 0 offset 0x0012c000 結束地址
}
(4)
drivers/media/platform/soc_camera/jz_camera_v13.c 我們的控制器驅動
static int jz_queue_setup(struct vb2_queue *vq, const struct v4l2_format *fmt, unsigned int *nbuffers, unsigned int *nplanes, unsigned int sizes[], void *alloc_ctxs[])
{
if(jz_camera_alloc_desc(pcdev, *nbuffers))//分配dma描述符的記憶體 jz_camera_dma_desc
}
乾的主要工作總結:
根據應用層把結構體(struct v4l2_requestbuffers )傳遞給核心,核心根據這個結構體填充結構體struct vb2_buffer *vb,並申請兩個buffer記憶體
struct v4l2_requestbuffers {
__u32 count;
__u32 type; /* enum v4l2_buf_type */
__u32 memory; /* enum v4l2_memory */
__u32 reserved[2];
};
struct vb2_buffer {
struct v4l2_buffer v4l2_buf;
struct v4l2_plane v4l2_planes[VIDEO_MAX_PLANES];
struct vb2_queue *vb2_queue;
unsigned int num_planes;
/* Private: internal use only */
enum vb2_buffer_state state;
struct list_head queued_entry;
struct list_head done_entry;
struct vb2_plane planes[VIDEO_MAX_PLANES];
};