ffmpeg複用h264、aac為ts流
阿新 • • 發佈:2019-02-09
MuxerAndroid.c
#include <jni.h> #include <string.h> #include <stdio.h> #include <android/log.h> #include <libavcodec/avcodec.h> #include <libavformat/avformat.h> #include <libswscale/swscale.h> #include <libavutil/frame.h> #include <libavutil/pixfmt.h> #define LOG_TAG "FFMPEGSample" #define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__) #define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__) /* FIX: H.264 in some container format (FLV, MP4, MKV etc.) need "h264_mp4toannexb" bitstream filter (BSF) *Add SPS,PPS in front of IDR frame *Add start code ("0,0,0,1") in front of NALU H.264 in some container (MPEG2TS) don't need this BSF. */ //'1': Use H.264 Bitstream Filter #define USE_H264BSF 0 /* FIX:AAC in some container format (FLV, MP4, MKV etc.) need "aac_adtstoasc" bitstream filter (BSF) */ //'1': Use AAC Bitstream Filter #define USE_AACBSF 0 JNIEXPORT jint JNICALL Java_com_example_ffmpegmuxer_MuxerAndroid_Muxering(JNIEnv * env, jclass obj) { AVOutputFormat *ofmt = NULL; //Input AVFormatContext and Output AVFormatContext AVFormatContext *ifmt_ctx_v = NULL, *ifmt_ctx_a = NULL,*ofmt_ctx = NULL; AVPacket pkt; int ret, i; int videoindex_v=-1,videoindex_out=-1; int audioindex_a=-1,audioindex_out=-1; int frame_index=0; int64_t cur_pts_v=0,cur_pts_a=0; //const char *in_filename_v = "cuc_ieschool.ts";//Input file URL const char *in_filename_v = "/sdcard/Muxer/cuc_ieschool.h264"; //const char *in_filename_a = "cuc_ieschool.mp3"; //const char *in_filename_a = "gowest.m4a"; const char *in_filename_a = "/sdcard/Muxer/gowest.aac"; //const char *in_filename_a = "huoyuanjia.mp3"; const char *out_filename = "/sdcard/Muxer/test.ts";//Output file URL av_register_all(); //Input if ((ret = avformat_open_input(&ifmt_ctx_v, in_filename_v, 0, 0)) < 0) { LOGI( "Could not open input file."); goto end; } if ((ret = avformat_find_stream_info(ifmt_ctx_v, 0)) < 0) { LOGI( "Failed to retrieve input stream information"); goto end; } if ((ret = avformat_open_input(&ifmt_ctx_a, in_filename_a, 0, 0)) < 0) { LOGI( "Could not open input file."); goto end; } if ((ret = avformat_find_stream_info(ifmt_ctx_a, 0)) < 0) { LOGI( "Failed to retrieve input stream information"); goto end; } LOGI("===========Input Information==========\n"); av_dump_format(ifmt_ctx_v, 0, in_filename_v, 0); av_dump_format(ifmt_ctx_a, 0, in_filename_a, 0); LOGI("======================================\n"); //Output avformat_alloc_output_context2(&ofmt_ctx, NULL, NULL, out_filename); if (!ofmt_ctx) { LOGI( "Could not create output context\n"); ret = AVERROR_UNKNOWN; goto end; } ofmt = ofmt_ctx->oformat; for (i = 0; i < ifmt_ctx_v->nb_streams; i++) { //Create output AVStream according to input AVStream if(ifmt_ctx_v->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO){ AVStream *in_stream = ifmt_ctx_v->streams[i]; AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec); videoindex_v=i; if (!out_stream) { LOGI( "Failed allocating output stream\n"); ret = AVERROR_UNKNOWN; goto end; } videoindex_out=out_stream->index; //Copy the settings of AVCodecContext if (avcodec_copy_context(out_stream->codec, in_stream->codec) < 0) { LOGI( "Failed to copy context from input to output stream codec context\n"); goto end; } out_stream->codec->codec_tag = 0; if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER) out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER; break; } } for (i = 0; i < ifmt_ctx_a->nb_streams; i++) { //Create output AVStream according to input AVStream if(ifmt_ctx_a->streams[i]->codec->codec_type==AVMEDIA_TYPE_AUDIO){ AVStream *in_stream = ifmt_ctx_a->streams[i]; AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec); audioindex_a=i; if (!out_stream) { LOGI( "Failed allocating output stream\n"); ret = AVERROR_UNKNOWN; goto end; } audioindex_out=out_stream->index; //Copy the settings of AVCodecContext if (avcodec_copy_context(out_stream->codec, in_stream->codec) < 0) { LOGI( "Failed to copy context from input to output stream codec context\n"); goto end; } out_stream->codec->codec_tag = 0; if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER) out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER; break; } } LOGI("==========Output Information==========\n"); av_dump_format(ofmt_ctx, 0, out_filename, 1); LOGI("======================================\n"); //Open output file if (!(ofmt->flags & AVFMT_NOFILE)) { if (avio_open(&ofmt_ctx->pb, out_filename, AVIO_FLAG_WRITE) < 0) { LOGI( "Could not open output file '%s'", out_filename); goto end; } } //Write file header if (avformat_write_header(ofmt_ctx, NULL) < 0) { LOGI( "Error occurred when opening output file\n"); goto end; } //FIX #if USE_H264BSF AVBitStreamFilterContext* h264bsfc = av_bitstream_filter_init("h264_mp4toannexb"); #endif #if USE_AACBSF AVBitStreamFilterContext* aacbsfc = av_bitstream_filter_init("aac_adtstoasc"); #endif while (1) { AVFormatContext *ifmt_ctx; int stream_index=0; AVStream *in_stream, *out_stream; //Get an AVPacket if(av_compare_ts(cur_pts_v,ifmt_ctx_v->streams[videoindex_v]->time_base,cur_pts_a,ifmt_ctx_a->streams[audioindex_a]->time_base) <= 0){ ifmt_ctx=ifmt_ctx_v; stream_index=videoindex_out; if(av_read_frame(ifmt_ctx, &pkt) >= 0){ do{ in_stream = ifmt_ctx->streams[pkt.stream_index]; out_stream = ofmt_ctx->streams[stream_index]; if(pkt.stream_index==videoindex_v){ //FIX:No PTS (Example: Raw H.264) //Simple Write PTS if(pkt.pts==AV_NOPTS_VALUE){ //Write PTS AVRational time_base1=in_stream->time_base; //Duration between 2 frames (us) int64_t calc_duration=(double)AV_TIME_BASE/av_q2d(in_stream->r_frame_rate); //Parameters pkt.pts=(double)(frame_index*calc_duration)/(double)(av_q2d(time_base1)*AV_TIME_BASE); pkt.dts=pkt.pts; pkt.duration=(double)calc_duration/(double)(av_q2d(time_base1)*AV_TIME_BASE); frame_index++; } cur_pts_v=pkt.pts; break; } }while(av_read_frame(ifmt_ctx, &pkt) >= 0); }else{ break; } }else{ ifmt_ctx=ifmt_ctx_a; stream_index=audioindex_out; if(av_read_frame(ifmt_ctx, &pkt) >= 0){ do{ in_stream = ifmt_ctx->streams[pkt.stream_index]; out_stream = ofmt_ctx->streams[stream_index]; if(pkt.stream_index==audioindex_a){ //FIX:No PTS //Simple Write PTS if(pkt.pts==AV_NOPTS_VALUE){ //Write PTS AVRational time_base1=in_stream->time_base; //Duration between 2 frames (us) int64_t calc_duration=(double)AV_TIME_BASE/av_q2d(in_stream->r_frame_rate); //Parameters pkt.pts=(double)(frame_index*calc_duration)/(double)(av_q2d(time_base1)*AV_TIME_BASE); pkt.dts=pkt.pts; pkt.duration=(double)calc_duration/(double)(av_q2d(time_base1)*AV_TIME_BASE); frame_index++; } cur_pts_a=pkt.pts; break; } }while(av_read_frame(ifmt_ctx, &pkt) >= 0); }else{ break; } } //FIX:Bitstream Filter #if USE_H264BSF av_bitstream_filter_filter(h264bsfc, in_stream->codec, NULL, &pkt.data, &pkt.size, pkt.data, pkt.size, 0); #endif #if USE_AACBSF av_bitstream_filter_filter(aacbsfc, out_stream->codec, NULL, &pkt.data, &pkt.size, pkt.data, pkt.size, 0); #endif //Convert PTS/DTS pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, (AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX)); pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, (AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX)); pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base); pkt.pos = -1; pkt.stream_index=stream_index; LOGI("Write 1 Packet. size:%5d",pkt.size); LOGI("Write 1 Packet.pts--->%11d",pkt.pts); //Write if (av_interleaved_write_frame(ofmt_ctx, &pkt) < 0) { LOGI( "Error muxing packet\n"); break; } av_free_packet(&pkt); } //Write file trailer av_write_trailer(ofmt_ctx); #if USE_H264BSF av_bitstream_filter_close(h264bsfc); #endif #if USE_AACBSF av_bitstream_filter_close(aacbsfc); #endif end: avformat_close_input(&ifmt_ctx_v); avformat_close_input(&ifmt_ctx_a); /* close output */ if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE)) avio_close(ofmt_ctx->pb); avformat_free_context(ofmt_ctx); if (ret < 0 && ret != AVERROR_EOF) { LOGI( "Error occurred.\n"); return -1; } return 0; }
Android.mk
LOCAL_PATH := $(call my-dir) $(warning $(LOCAL_PATH)) include $(CLEAR_VARS) LOCAL_MODULE :=avcodec-55 LOCAL_SRC_FILES :=lib/libavcodec-55.so LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/libavcodec include $(PREBUILT_SHARED_LIBRARY) include $(CLEAR_VARS) LOCAL_MODULE :=avfilter-3 LOCAL_SRC_FILES :=lib/libavfilter-3.so LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/libavfilter include $(PREBUILT_SHARED_LIBRARY) include $(CLEAR_VARS) LOCAL_MODULE :=avformat-55 LOCAL_SRC_FILES :=lib/libavformat-55.so LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/libavformat include $(PREBUILT_SHARED_LIBRARY) include $(CLEAR_VARS) LOCAL_MODULE := avutil-52 LOCAL_SRC_FILES :=lib/libavutil-52.so LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/libavutil include $(PREBUILT_SHARED_LIBRARY) include $(CLEAR_VARS) LOCAL_MODULE := swresample-0 LOCAL_SRC_FILES :=lib/libswresample-0.so LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/libswresample include $(PREBUILT_SHARED_LIBRARY) include $(CLEAR_VARS) LOCAL_MODULE := swscale-2 LOCAL_SRC_FILES :=lib/libswscale-2.so LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/libswscale include $(PREBUILT_SHARED_LIBRARY) include $(CLEAR_VARS) LOCAL_MODULE :=ffmpeg LOCAL_SRC_FILES :=MuxerAndroid.c # LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include LOCAL_LDLIBS := -llog -ljnigraphics -lz -landroid LOCAL_SHARED_LIBRARIES:= libavcodec-55 libavfilter-3 libavformat-55 libavutil-52 libswscale-2 libswresample-0 include $(BUILD_SHARED_LIBRARY)