FFMpeg+SDL2.0 播放本地AVI檔案練手示例程式
阿新 • • 發佈:2019-01-06
FFMpeg版本: ffmpeg-3.4-win32-dev
SDL版本:SDL2-2.0.7
參考文章:
雷神的各種相關文章。。。
希望能有一定的參考幫助吧……
// FFmpeg_playTest.cpp : 定義控制檯應用程式的入口點。 // #include "stdafx.h" #ifdef __cplusplus extern "C" { #endif #include "include/libavcodec/avcodec.h" #include "include/libswscale/swscale.h" #include "include/libavutil/imgutils.h" #include "SDL2-2.0.7/include/SDL.h" #include <stdlib.h> #include <stdio.h> #include <string.h> #include <math.h> #ifdef __cplusplus } #endif #pragma comment(lib, "lib/avcodec.lib") #pragma comment(lib, "lib/avutil.lib") #pragma comment(lib, "lib/swscale.lib") #pragma comment(lib, "SDL2-2.0.7/lib/x86/SDL2.lib") #define INBUF_SIZE 4096 int ShowFrame(struct SwsContext *pSwsContext, SDL_Renderer* pRenderer, SDL_Texture* pSDLTexture, AVCodecContext* pCodecContext, AVFrame* pSrcFrame, AVFrame* pDestFrame); static void decode(AVCodecContext *dec_ctx, AVFrame *frame, AVPacket *pkt, AVFrame *pDestframe ) { char buf[1024]; int ret; ret = avcodec_send_packet(dec_ctx, pkt); if (ret < 0) { fprintf(stderr, "Error sending a packet for decoding\n"); exit(1); } while (ret >= 0) { ret = avcodec_receive_frame(dec_ctx, frame); if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) return; else if (ret < 0) { fprintf(stderr, "Error during decoding\n"); exit(1); } printf("saving frame %3d\n", dec_ctx->frame_number); fflush(stdout); } } int _tmain(int argc, _TCHAR* argv[]) { FILE* pFile = NULL; const char* chFileName = NULL; //chFileName = "./test.avi"; chFileName = "./bigbuckbunny_480x272.h264"; fopen_s(&pFile, chFileName, "rb"); if (!pFile) { fprintf(stderr, "Could not open %s\n", chFileName); exit(1); } const AVCodec *pCodec = NULL; AVCodecParserContext* pParserCtx = NULL; AVCodecContext* pCodecCtx = NULL; AVFrame* pFram = NULL; uint8_t inbuf[INBUF_SIZE + AV_INPUT_BUFFER_PADDING_SIZE] = {0}; uint8_t *pData = NULL; size_t data_size = 0; int iRet = 0; AVPacket *pPkt = NULL; avcodec_register_all(); pPkt = av_packet_alloc(); if (!pPkt) { printf("av_packet_alloc failed.\n"); exit(1); } memset(inbuf + INBUF_SIZE, 0, AV_INPUT_BUFFER_PADDING_SIZE); //find the video decoder //pCodec = avcodec_find_decoder(AV_CODEC_ID_MPEG4); pCodec = avcodec_find_decoder(AV_CODEC_ID_H264); if (!pCodec) { fprintf(stderr, "Codec not found.\n"); exit(1); } pParserCtx = av_parser_init(pCodec->id); if (pParserCtx == NULL) { fprintf(stderr, "parser not found.\n"); exit(1); } pCodecCtx = avcodec_alloc_context3(pCodec); if (pCodecCtx == NULL) { fprintf(stderr, "could not allocate video contex \n"); exit(1); } if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) { fprintf(stderr, "could not open codec\n"); exit(1); } pFram = av_frame_alloc(); if (pFram == NULL) { fprintf(stderr, "Could not allocate video frame\n"); exit(1); } //SDL初始化 if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) { printf("Could not initialize SDL --%s\n", SDL_GetError()); exit(1); } SDL_Window* pScreen = NULL; SDL_Renderer* pSdlRenderer = NULL; SDL_Texture* pSdlTexture = NULL; SDL_Event sdlEvent; struct SwsContext *img_convert_ctx = NULL; AVFrame* pFrameYUV = NULL; uint8_t* out_buffer = NULL; while (!feof(pFile)) { data_size = fread(inbuf, 1, INBUF_SIZE, pFile); if (!data_size) { printf("read file failed.\n"); break; } pData = inbuf; while (data_size > 0) { iRet = av_parser_parse2(pParserCtx, pCodecCtx, &pPkt->data, &pPkt->size, pData, data_size, AV_NOPTS_VALUE, AV_NOPTS_VALUE, 0); if (iRet < 0) { fprintf(stderr, "Error while parsing\n"); exit(1); } pData += iRet; data_size -= iRet; if (pPkt->size) { printf("[packet] size = %6d\n", pPkt->size); switch (pParserCtx->pict_type) { case AV_PICTURE_TYPE_I: printf("Type:I\t"); break; case AV_PICTURE_TYPE_P: printf("Type:P\t"); break; case AV_PICTURE_TYPE_B: printf("Type:B\t"); break; default: printf("Type:Other\t"); break; } //--------------------------------------------DECODE------------------------------ int ret; ret = avcodec_send_packet(pCodecCtx, pPkt); if (ret < 0) { fprintf(stderr, "Error sending a packet for decoding\n"); exit(1); } while (ret >= 0) { ret = avcodec_receive_frame(pCodecCtx, pFram); if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { //fprintf(stderr, "(ret == AVERROR(EAGAIN) || ret == AVERROR_EOF\n"); break; } else if (ret < 0) { fprintf(stderr, "Error during decoding\n"); exit(1); } //由於需要先成功解析一幀才能獲取到影象的寬高等資訊,所以這些初始化的操作只能放到這裡來進行 if (pFrameYUV == NULL) { pFrameYUV = av_frame_alloc(); //儲存轉換後的AVFrame out_buffer = new uint8_t[av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1)]; av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1); } if (pScreen == NULL) { //SDL init----------------------------------------- pScreen = SDL_CreateWindow("RTSP Client Demo", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, pCodecCtx->width, pCodecCtx->height, SDL_WINDOW_RESIZABLE | SDL_WINDOW_OPENGL); if (pScreen == NULL) { printf("SDL: could not set video mode -exit.\n"); exit(1); } pSdlRenderer = SDL_CreateRenderer(pScreen, -1, 0); pSdlTexture = SDL_CreateTexture(pSdlRenderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING, pCodecCtx->width, pCodecCtx->height); } ShowFrame(img_convert_ctx, pSdlRenderer, pSdlTexture, pCodecCtx, pFram, pFrameYUV); //顯示一幀 printf("saving frame %3d, width = %d, height = %d\n", pCodecCtx->frame_number, pCodecCtx->width, pCodecCtx->height); fflush(stdout); } //-------------------------------------END Decode----------------------------------- } else { //printf("pPkt->size = 0\n"); } av_packet_unref(pPkt); SDL_PollEvent(&sdlEvent); switch (sdlEvent.type) { case SDL_QUIT: SDL_Quit(); exit(0); break; default: break; } } } decode(pCodecCtx, pFram, NULL, NULL); if (pFile) { fclose(pFile); pFile = NULL; } if (pSdlTexture) { SDL_DestroyTexture(pSdlTexture); pSdlTexture = NULL; } if (pSdlRenderer) { SDL_DestroyRenderer(pSdlRenderer); pSdlRenderer = NULL; } av_parser_close(pParserCtx); avcodec_free_context(&pCodecCtx); av_frame_free(&pFram); if (out_buffer) { delete[] out_buffer; out_buffer = NULL; } if (pFrameYUV) { av_frame_free(&pFrameYUV); } av_packet_unref(pPkt); return 0; } int ShowFrame(struct SwsContext *pSwsContext, SDL_Renderer* pRenderer, SDL_Texture* pSDLTexture, AVCodecContext* pCodecContext, AVFrame* pSrcFrame, AVFrame* pDestFrame) { if ( !pRenderer || !pSDLTexture || !pCodecContext || !pSrcFrame) { printf("Show frame failed, param is null.\n"); return -1; } //畫素格式轉換 prame 轉換成 pFrame Yuv pSwsContext = sws_getContext(pCodecContext->width, pCodecContext->height, pCodecContext->pix_fmt, pCodecContext->width, pCodecContext->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL); sws_scale(pSwsContext, (const uint8_t* const*)pSrcFrame->data, pSrcFrame->linesize, 0, pSrcFrame->height, pDestFrame->data, pDestFrame->linesize); sws_freeContext(pSwsContext); //SDL 顯示---------------------------- SDL_Rect sdlRect; sdlRect.x = 0; sdlRect.y = 0; sdlRect.w = pCodecContext->width; sdlRect.h = pCodecContext->height; SDL_UpdateTexture(pSDLTexture, &sdlRect, pDestFrame->data[0], pDestFrame->linesize[0]); SDL_RenderClear(pRenderer); SDL_RenderCopy(pRenderer, pSDLTexture, &sdlRect, &sdlRect); SDL_RenderPresent(pRenderer); //延時20ms SDL_Delay(20); return 0; }