ffmpeg - v4l2影象格式轉換
阿新 • • 發佈:2021-05-11
目標:攝像頭資料通過v4l2採集,然後進行影象格式轉化以及拉伸縮放。
程式碼:
#include <stdio.h> #include "libavcodec/avcodec.h" #include "libavformat/avformat.h" #include "libavdevice/avdevice.h" #include "libswscale/swscale.h" #define dev "/dev/video0" // 寬高由使用者自定義,格式為使用最廣泛的yuv420p void get_one_frame(const char *picfile, int w, int h) { // 初始化格式上下文 AVInputFormat *in_fmt = av_find_input_format("video4linux2"); if (in_fmt == NULL) { printf("can't find_input_format\n"); return ; } AVFormatContext *fmt_ctx = NULL; if (avformat_open_input(&fmt_ctx, dev, in_fmt, NULL) < 0) { printf("can't open_input_file\n"); return ; } // printf device info av_dump_format(fmt_ctx, 0, dev, 0); // 找到視訊流的索引 int videoindex = -1; for (int i = 0; i < fmt_ctx->nb_streams; i++) { if (fmt_ctx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) { videoindex = i; break; } } if (videoindex == -1) { printf("can't find video stream index\n"); avformat_close_input(&fmt_ctx); return ; } AVCodecContext *cod_ctx = fmt_ctx->streams[videoindex]->codec; //printf("camera width:%d height:%d format:%d\n", cod_ctx->width, cod_ctx->height, cod_ctx->pix_fmt); struct SwsContext *sws_ctx; // 輸出影象的解析度和格式 sws_ctx = sws_getContext(cod_ctx->width, cod_ctx->height, cod_ctx->pix_fmt, \ w, h, AV_PIX_FMT_YUV420P, \ SWS_BILINEAR, NULL, NULL, NULL); // 縮放演算法, 輸入/輸出影象的濾波器資訊,縮放演算法的引數 // 讀取一幀資料 AVPacket *packet = (AVPacket *)av_malloc(sizeof(*packet)); av_read_frame(fmt_ctx, packet); printf("packet->data:%d\n", packet->size); // YUY2的儲存格式是packed的,[Y0,U0,Y1,V0] // packed格式的data[]陣列中只有一個維度, align為1時,跨度s = width char *yuy2buf[4]; int yuy2_linesize[4]; // 按照指定的解析度和影象格式以及指定的對齊方式,分析影象記憶體,返回申請的記憶體空間大小 int yuy2_size = av_image_alloc(yuy2buf, yuy2_linesize, cod_ctx->width, cod_ctx->height, cod_ctx->pix_fmt, 1); printf("yuy2_size:%d linesize: {%d}\n", yuy2_size, yuy2_linesize[0]); // YUV420P的儲存格式是planar的,[YYYY UU VV] // planar格式的data[]陣列中有多維,yuv420p_linesize[] = {s, s/2, s/2} char *yuv420pbuf[4]; int yuv420p_linesize[4]; int yuv420p_size = av_image_alloc(yuv420pbuf, yuv420p_linesize, w, h, AV_PIX_FMT_YUV420P, 1); printf("yuv420p size:%d linesize: {%d %d %d}\n", yuv420p_size, yuv420p_linesize[0], yuv420p_linesize[1], yuv420p_linesize[2]); FILE *fp = fopen(picfile, "w+"); memcpy(yuy2buf[0], packet->data, packet->size); sws_scale(sws_ctx, \ (const uint8_t **)yuy2buf, yuy2_linesize, \ 0, cod_ctx->height, \ yuv420pbuf, yuv420p_linesize); //printf("sws_scale() ok\n"); fwrite(yuv420pbuf[0], 1, yuv420p_size, fp); fclose(fp); av_free_packet(packet); avformat_close_input(&fmt_ctx); sws_freeContext(sws_ctx); av_freep(yuy2buf); av_freep(yuv420pbuf); } int main(int argc, char const* argv[]) { avdevice_register_all(); get_one_frame("output.yuv", 640, 480); return 0; }
問題:stack smashing detected
原因:yuy2是packed的,它的data[]陣列只有一個維度,於是定義成了char *yuv420pbuf[3]; 但這樣定義是錯誤的。因為av_image_alloc函式原型中傳入的是uint8_t *pointers[4]和int linesizes[4]
。
解決:將陣列長度修改為4即可。
執行結果:
參考: