(八) 解碼mp4,匯出為YUV420P
阿新 • • 發佈:2018-12-10
#include <unistd.h> #include "hjcommon.hpp" extern "C" { #include "libavutil/imgutils.h" } JNIEXPORT void JNICALL Java_hankin_hjmedia_mpeg_some_MP42YUVActivity_decode(JNIEnv *env, jobject instance, jstring src_, jstring dst_) { char src[128]; hjcpyJstr2char(env, src_, src); char dst[128]; hjcpyJstr2char(env, dst_, dst); AVFormatContext * avFormatContext = 0; int ret = avformat_open_input(&avFormatContext, src, 0, 0); if (ret!=0) { LOGE("avformat_open_input error."); return; } ret = avformat_find_stream_info(avFormatContext, 0); if (ret!=0) { LOGE("avformat_find_stream_info error."); return; } int videoStream = av_find_best_stream(avFormatContext, AVMEDIA_TYPE_VIDEO, -1, -1, 0, 0); AVStream * avs = avFormatContext->streams[videoStream]; int width = avs->codecpar->width; int height = avs->codecpar->height; // duration=10024000, fps=25.00, width=1920, height=1080 LOGD("duration=%lld, fps=%.2f, width=%d, height=%d", avFormatContext->duration, hj_r2d(avs->avg_frame_rate), width, height); AVCodecContext * videoCodec = avcodec_alloc_context3(0); int gvRet = hjgetAVDecoder6_1(videoCodec, avs->codecpar, true, false); // nexus上,硬解碼的效果沒有軟解碼好 if (gvRet!=0) return; int frameCount = 0; AVPacket * packet = av_packet_alloc(); AVFrame * frame = av_frame_alloc(); bool is = true; FILE * fp = fopen(dst, "wb+"); AVFrame * tmpFrame = 0; int format = -1; AVFrame * convertFrame = 0; SwsContext * swsContext = 0; while (true) { int re = av_read_frame(avFormatContext, packet); if (re!=0) break; if (packet->stream_index==videoStream) { int num = 0; while (true) { num++; ret = avcodec_send_packet(videoCodec, packet); // 硬解碼的時候,avcodec_send_packet可能會失敗,這裡迴圈嘗試傳送5次 if (ret!=0) { LOGW("avcodec_send_packet error"); usleep(1000*2); } if (ret==0 || num>=5) break; } while (true) { ret = avcodec_receive_frame(videoCodec, frame); if (ret!=0) break; frameCount++; tmpFrame = frame; format = frame->format; // mp4軟解碼的format為AV_PIX_FMT_YUV420P,硬解碼看硬體 LOGD("pts=%lld, size=%d, format=%d, 幀數=%d", frame->pts, frame->pkt_size, format, frameCount); if (is && format!=AV_PIX_FMT_YUV420P) // 視訊格式不為 AV_PIX_FMT_YUV420P ,需要轉換為yuv,然後儲存到sdcard { is = false; convertFrame = av_frame_alloc(); /* int av_image_get_buffer_size( // 返回使用給定引數儲存影象所需的資料量的大小(以位元組為單位)。 enum AVPixelFormat pix_fmt, // the pixel format of the image int width, // 寬度 int height, // 高度 int align // 一個畫素佔幾個位元組? ); */ int imgSize = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, width, height, 1); uint8_t * buf = (uint8_t*) av_malloc(imgSize); /* int av_image_fill_arrays( // 為AVFrame的資料分配記憶體 uint8_t *dst_data[4], // framd的data int dst_linesize[4], // frame的linesize const uint8_t *src, // 申請好了的記憶體 enum AVPixelFormat pix_fmt, // 視訊格式 int width, // 寬 int height, // 高 int align // 一個畫素佔幾個位元組? ); */ int ss = av_image_fill_arrays(convertFrame->data, convertFrame->linesize, buf, AV_PIX_FMT_YUV420P, width, height, 1); // av_image_get_buffer_size=3110400, av_image_fill_arrays=3110400, format=23 LOGI("av_image_get_buffer_size=%d, av_image_fill_arrays=%d, format=%d", imgSize, ss, format); } if (format!=AV_PIX_FMT_YUV420P) // 視訊格式不為 AV_PIX_FMT_YUV420P ,需要轉換為yuv,然後儲存到sdcard { // 視訊格式轉換 swsContext = sws_getCachedContext(swsContext, width, height, (AVPixelFormat) frame->format, width, height, AV_PIX_FMT_YUV420P, SWS_FAST_BILINEAR, NULL, NULL, NULL); sws_scale(swsContext, (const uint8_t**)frame->data, frame->linesize, 0, height, convertFrame->data, convertFrame->linesize); tmpFrame = convertFrame; } // 儲存到sdcard int y_size = width * height; fwrite(tmpFrame->data[0], 1, y_size, fp); // 1920*1080 10秒鐘的視訊(共241幀)存為yuv,大小為 1920*1080*1.5*241 = 700多M 。。 fwrite(tmpFrame->data[1], 1, y_size / 4, fp); fwrite(tmpFrame->data[2], 1, y_size / 4, fp); } } av_packet_unref(packet); } end: av_packet_free(&packet); av_frame_free(&frame); avcodec_close(videoCodec); avcodec_free_context(&videoCodec); avformat_close_input(&avFormatContext); fflush(fp); fclose(fp); // 呼叫java函式 jclass clz = env->GetObjectClass(instance); jmethodID mid = env->GetMethodID(clz, "setStatus", "()V"); env->CallVoidMethod(instance, mid); }