1. 程式人生 > 其它 >ubuntu18.04 基於ffmpeg4.3的Demo

ubuntu18.04 基於ffmpeg4.3的Demo

技術標籤:ffmpegubuntuffmpegdemoscene

ubuntu18.04 基於ffmpeg的Demo


感謝雷神

本文僅作為個人學習過程中的總結記錄,不做其他任何用途!向雷神致敬!

本demo是在在雷神的《最簡單的基於FFmpeg的推流器》、《最簡單的基於FFMPEG的推流器附件:收流器》有點小修改後純linux版本。

最簡單的基於FFmpeg的推流器
最簡單的基於FFMPEG的推流器附件:收流器
具體實現原理參照雷神的blog。

一、ubuntu18.04 搭建Nginx和RTMP服務

安裝Nginx和RTMP:
請參照:Ubuntu18.04搭建本地RTMP伺服器librtmp+nginx,推送flv檔案播放

注意:
./configure --prefix=/usr/local/nginx --add-module=…/nginx-rtmp-module-master/
–add-module=…/nginx-rtmp-module-master 表示在nginx服務中增加rtmp模組,這點不能忽略。

埠查詢命令:
 1)sudo netstat -nultp
 2)sudo netstat -anp |grep 22

nginx相關操作命令

sudo service nginx start    # 啟動
sudo service nginx reload   # 過載
sudo service nginx restart  # 重啟
sudo service nginx stop     # 停止

sudo /usr/local/nginx/sbin/nginx -v  # 檢視版本
sudo /usr/local/nginx/sbin/nginx     # 啟動
sudo /usr/local/nginx/sbin/nginx -s stop   # 停止
sudo /usr/local/nginx/sbin/nginx -s reload # 重啟

在 nginx.conf 配置檔案中,注意修改下圖的標記的埠號;

如果不修改,會發現nginx無法啟動,提示端口占用;

cd /usr/local/nginx/conf
vim nginx.conf

在這裡插入圖片描述

二、原始碼修改

1.傳送端原始碼(雷神)

/**
 * 最簡單的基於FFmpeg的推流器(推送RTMP)
 * Simplest FFmpeg Streamer (Send RTMP)
 * 
 * 雷霄驊 Lei Xiaohua
 * [email protected]
 * 中國傳媒大學/數字電視技術
 * Communication University of China / Digital TV Technology
 * http://blog.csdn.net/leixiaohua1020
 * 
 * 本例子實現了推送本地視訊至流媒體伺服器(以RTMP為例)。
 * 是使用FFmpeg進行流媒體推送最簡單的教程。
 *
 * This example stream local media files to streaming media 
 * server (Use RTMP as example). 
 * It's the simplest FFmpeg streamer.
 * 
 */
#include <stdio.h>
#define __STDC_CONSTANT_MACROS
#ifdef _WIN32
//Windows
extern "C"
{
#include "libavformat/avformat.h"
#include "libavutil/mathematics.h"
#include "libavutil/time.h"
};
#else
//Linux...
#ifdef __cplusplus
extern "C"
{
#endif
#include <libavformat/avformat.h>
#include <libavutil/mathematics.h>
#include <libavutil/time.h>
#ifdef __cplusplus
};
#endif
#endif
int main(int argc, char* argv[])
{
	AVOutputFormat *ofmt = NULL;
	//Input AVFormatContext and Output AVFormatContext
	AVFormatContext *ifmt_ctx = NULL, *ofmt_ctx = NULL;
	AVPacket pkt;
	const char *in_filename, *out_filename;
	int ret, i;
	int videoindex=-1;
	int frame_index=0;
	int64_t start_time=0;
	//in_filename  = "cuc_ieschool.mov";
	//in_filename  = "cuc_ieschool.mkv";
	//in_filename  = "cuc_ieschool.ts";
	//in_filename  = "cuc_ieschool.mp4";
	//in_filename  = "cuc_ieschool.h264";
	in_filename  = "cuc_ieschool.flv";//輸入URL(Input file URL)
	//in_filename  = "shanghai03_p.h264";
	//out_filename = "rtmp://localhost/publishlive/livestream";//輸出 URL(Output URL)[RTMP]
	out_filename = "rtmp://localhost/live/room";
	//out_filename = "rtp://233.233.233.233:6666";//輸出 URL(Output URL)[UDP]
	av_register_all();
	//Network
	avformat_network_init();
	//Input
	if ((ret = avformat_open_input(&ifmt_ctx, in_filename, 0, 0)) < 0) {
		printf( "Could not open input file.");
		goto end;
	}
	if ((ret = avformat_find_stream_info(ifmt_ctx, 0)) < 0) {
		printf( "Failed to retrieve input stream information");
		goto end;
	}
	for(i=0; i<ifmt_ctx->nb_streams; i++) 
		if(ifmt_ctx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO){
			videoindex=i;
			break;
		}
	av_dump_format(ifmt_ctx, 0, in_filename, 0);
	//Output
	avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_filename); //RTMP
	//avformat_alloc_output_context2(&ofmt_ctx, NULL, "mpegts", out_filename);//UDP
	if (!ofmt_ctx) {
		printf( "Could not create output context\n");
		ret = AVERROR_UNKNOWN;
		goto end;
	}

	ofmt = ofmt_ctx->oformat;
	for (i = 0; i < ifmt_ctx->nb_streams; i++) {
		//Create output AVStream according to input AVStream
		AVStream *in_stream = ifmt_ctx->streams[i];
		AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
		if (!out_stream) {
			printf( "Failed allocating output stream\n");
			ret = AVERROR_UNKNOWN;
			goto end;
		}

		//Copy the settings of AVCodecContext
		ret = avcodec_copy_context(out_stream->codec, in_stream->codec);
		if (ret < 0) {
			printf( "Failed to copy context from input to output stream codec context\n");
			goto end;
		}
		out_stream->codec->codec_tag = 0;
		//if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
		//	out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
	}

	//Dump Format------------------
	av_dump_format(ofmt_ctx, 0, out_filename, 1);
	//Open output URL
	if (!(ofmt->flags & AVFMT_NOFILE)) {
		ret = avio_open(&ofmt_ctx->pb, out_filename, AVIO_FLAG_WRITE);
		if (ret < 0) {
			printf( "Could not open output URL '%s'", out_filename);
			goto end;
		}
	}

	//Write file header
	ret = avformat_write_header(ofmt_ctx, NULL);
	if (ret < 0) {
		printf( "Error occurred when opening output URL\n");
		goto end;
	}
	start_time=av_gettime();
	while (1) {
		AVStream *in_stream, *out_stream;
		//Get an AVPacket
		ret = av_read_frame(ifmt_ctx, &pkt);
		if (ret < 0)
			break;
		//FIX:No PTS (Example: Raw H.264)
		//Simple Write PTS
		if(pkt.pts==AV_NOPTS_VALUE){
			//Write PTS
			AVRational time_base1=ifmt_ctx->streams[videoindex]->time_base;
			//Duration between 2 frames (us)
			int64_t calc_duration=(double)AV_TIME_BASE/av_q2d(ifmt_ctx->streams[videoindex]->r_frame_rate);
			//Parameters
			pkt.pts=(double)(frame_index*calc_duration)/(double)		(av_q2d(time_base1)*AV_TIME_BASE);
			pkt.dts=pkt.pts;
			pkt.duration=(double)calc_duration/(double)(av_q2d(time_base1)*AV_TIME_BASE);
		}
		//Important:Delay
		if(pkt.stream_index==videoindex){
			AVRational time_base=ifmt_ctx->streams[videoindex]->time_base;
			AVRational time_base_q={1,AV_TIME_BASE};
			int64_t pts_time = av_rescale_q(pkt.dts, time_base, time_base_q);
			int64_t now_time = av_gettime() - start_time;
			if (pts_time > now_time)
				av_usleep(pts_time - now_time);
		}
		in_stream  = ifmt_ctx->streams[pkt.stream_index];
		out_stream = ofmt_ctx->streams[pkt.stream_index];
		/* copy packet */
		//Convert PTS/DTS
		pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));

		pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
		pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
		pkt.pos = -1;
		//Print to Screen
		if(pkt.stream_index==videoindex){
			printf("Send %8d video frames to output URL\n",frame_index);
			frame_index++;
		}

		//ret = av_write_frame(ofmt_ctx, &pkt);
		ret = av_interleaved_write_frame(ofmt_ctx, &pkt);
		if (ret < 0) {
			printf( "Error muxing packet\n");
			break;
		}
		av_free_packet(&pkt);
	}

	//Write file trailer
	av_write_trailer(ofmt_ctx);
end:
	avformat_close_input(&ifmt_ctx);
	/* close output */
	if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
		avio_close(ofmt_ctx->pb);
	avformat_free_context(ofmt_ctx);
	if (ret < 0 && ret != AVERROR_EOF) {
		printf( "Error occurred.\n");
		return -1;
	}
	return 0;
}

注意:

1. out_filename = "rtmp://localhost/live/room"; 輸出到RTMP的地址為 nginx.conf配置檔案中配置的RTMP服務中的地址(不知道怎麼描述這個地址,也能理解中空間吧)。
2. in_filename  = "cuc_ieschool.flv"; 輸入時,注意視訊檔案所在的路勁;
3. //if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
	//	out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
	這2行程式碼註釋掉了,“CODEC_FLAG_GLOBAL_HEADER” 變數在linux下沒有,window下有;查了不少資料,沒找到具體原因。懂這塊的,請不吝指點!

編譯:

gcc simplest_ffmpeg_streamer.cpp -g -o simplest_ffmpeg_streamer.out \
-I /usr/local/ffmpeg/include -L /usr/local/ffmpeg/lib -lavformat -lavcodec -lavutil

/usr/local/ffmpeg/include和 /usr/local/ffmpeg/lib 注意修改為自己的ffmpeg的實際安裝路徑。

執行

錯誤提示:
1.RTMP讀服務為開啟RTMP服務
解決:
開啟nginx 服務:

sudo /usr/local/nginx/sbin/nginx
  1. flv檔案推送完成後
    flv缺頭
    該錯誤可以忽略,不影響檔案傳送。查過資料,好像與ffmpeg傳送flv的機制有關,沒弄懂。

2.接收資料

/**
 * 最簡單的基於FFmpeg的收流器(接收RTMP)
 * Simplest FFmpeg Receiver (Receive RTMP)
 * 
 * 雷霄驊 Lei Xiaohua
 * [email protected]
 * 中國傳媒大學/數字電視技術
 * Communication University of China / Digital TV Technology
 * http://blog.csdn.net/leixiaohua1020
 * 
 * 本例子將流媒體資料(以RTMP為例)儲存成本地檔案。
 * 是使用FFmpeg進行流媒體接收最簡單的教程。
 *
 * This example saves streaming media data (Use RTMP as example)
 * as a local file.
 * It's the simplest FFmpeg stream receiver.
 * 
 */

#include <stdio.h>
#define __STDC_CONSTANT_MACROS
#ifdef _WIN32
//Windows
extern "C"
{
#include "libavformat/avformat.h"
#include "libavutil/mathematics.h"
#include "libavutil/time.h"
};
#else
//Linux...
#ifdef __cplusplus
extern "C"
{
#endif
#include <libavformat/avformat.h>
#include <libavutil/mathematics.h>
#include <libavutil/time.h>
#ifdef __cplusplus
};
#endif
#endif

//'1': Use H.264 Bitstream Filter 
#define USE_H264BSF 0
int main(int argc, char* argv[])
{
	AVOutputFormat *ofmt = NULL;
	//Input AVFormatContext and Output AVFormatContext
	AVFormatContext *ifmt_ctx = NULL, *ofmt_ctx = NULL;
	AVPacket pkt;
	const char *in_filename, *out_filename;
	int ret, i;
	int videoindex=-1;
	int frame_index=0;
	//in_filename  = "rtmp://live.hkstv.hk.lxdns.com/live/hks";
	in_filename  = "rtmp://localhost/live/room";
	//in_filename  = "rtp://233.233.233.233:6666";
	//out_filename = "receive.ts";
	//out_filename = "receive.mkv";
	out_filename = "receive.flv";

	av_register_all();
	//Network
	avformat_network_init();
	//Input
	if ((ret = avformat_open_input(&ifmt_ctx, in_filename, 0, 0)) < 0) {
		printf( "Could not open input file.");
		goto end;
	}
	if ((ret = avformat_find_stream_info(ifmt_ctx, 0)) < 0) {
		printf( "Failed to retrieve input stream information");
		goto end;
	}

	for(i=0; i<ifmt_ctx->nb_streams; i++) 
		if(ifmt_ctx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO){
			videoindex=i;
			break;
		}

	av_dump_format(ifmt_ctx, 0, in_filename, 0);

	//Output
	avformat_alloc_output_context2(&ofmt_ctx, NULL, NULL, out_filename); //RTMP

	if (!ofmt_ctx) {
		printf( "Could not create output context\n");
		ret = AVERROR_UNKNOWN;
		goto end;
	}
	ofmt = ofmt_ctx->oformat;
	for (i = 0; i < ifmt_ctx->nb_streams; i++) {
		//Create output AVStream according to input AVStream
		AVStream *in_stream = ifmt_ctx->streams[i];
		AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
		if (!out_stream) {
			printf( "Failed allocating output stream\n");
			ret = AVERROR_UNKNOWN;
			goto end;
		}
		//Copy the settings of AVCodecContext
		ret = avcodec_copy_context(out_stream->codec, in_stream->codec);
		if (ret < 0) {
			printf( "Failed to copy context from input to output stream codec context\n");
			goto end;
		}
		out_stream->codec->codec_tag = 0;
		//if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
			//out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
	}
	//Dump Format------------------
	av_dump_format(ofmt_ctx, 0, out_filename, 1);
	//Open output URL
	if (!(ofmt->flags & AVFMT_NOFILE)) {
		ret = avio_open(&ofmt_ctx->pb, out_filename, AVIO_FLAG_WRITE);
		if (ret < 0) {
			printf( "Could not open output URL '%s'", out_filename);
			goto end;
		}
	}
	//Write file header
	ret = avformat_write_header(ofmt_ctx, NULL);
	if (ret < 0) {
		printf( "Error occurred when opening output URL\n");
		goto end;
	}

#if USE_H264BSF
	AVBitStreamFilterContext* h264bsfc =  av_bitstream_filter_init("h264_mp4toannexb"); 
#endif

	while (1) {
		AVStream *in_stream, *out_stream;
		//Get an AVPacket
		ret = av_read_frame(ifmt_ctx, &pkt);
		if (ret < 0)
			break;
		
		in_stream  = ifmt_ctx->streams[pkt.stream_index];
		out_stream = ofmt_ctx->streams[pkt.stream_index];
		/* copy packet */
		//Convert PTS/DTS
		pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
		pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
		pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
		pkt.pos = -1;
		//Print to Screen
		if(pkt.stream_index==videoindex){
			printf("Receive %8d video frames from input URL\n",frame_index);
			frame_index++;

#if USE_H264BSF
			av_bitstream_filter_filter(h264bsfc, in_stream->codec, NULL, &pkt.data, &pkt.size, pkt.data, pkt.size, 0);
#endif
		}
		//ret = av_write_frame(ofmt_ctx, &pkt);
		ret = av_interleaved_write_frame(ofmt_ctx, &pkt);

		if (ret < 0) {
			printf( "Error muxing packet\n");
			break;
		}
		
		av_free_packet(&pkt);
		
	}

#if USE_H264BSF
	av_bitstream_filter_close(h264bsfc);  
#endif

	//Write file trailer
	av_write_trailer(ofmt_ctx);
end:
	avformat_close_input(&ifmt_ctx);
	/* close output */
	if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
		avio_close(ofmt_ctx->pb);
	avformat_free_context(ofmt_ctx);
	if (ret < 0 && ret != AVERROR_EOF) {
		printf( "Error occurred.\n");
		return -1;
	}
	return 0;
}

注意

  1. in_filename = “rtmp://localhost/live/room”; 表示獲取資料流的路徑,與傳送端匹配。也可以是其他RTMP路徑;(其他的路徑可以嘗試,部分程式碼需要修改)
  2. out_filename = “receive.flv”; 輸出流為“*.flv”檔案。 也可以是其他檔案,需要修改程式碼,這個涉及到ffmpeg的檔案格式轉換。
  3. //if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
    //out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
    這2行程式碼註釋,“CODEC_FLAG_GLOBAL_HEADER”,linux下沒找到這個變數。

編譯

gcc simplest_ffmpeg_receiver.cpp -g -o simplest_ffmpeg_receiver.out \
-I /usr/local/ffmpeg/include -L /usr/local/ffmpeg/lib -lavformat -lavcodec -lavutil

/usr/local/ffmpeg/include和 /usr/local/ffmpeg/lib 注意修改為自己的ffmpeg的實際安裝路徑。

執行

執行結束後會在當前目錄下找到“*.flv”檔案。

總結

流媒體播放器

smplayer
具體安裝方法略。

作為ffmpeg的初學者,需要一套可執行程式,幫助自己理解ffmepg。
在摸索的過程中,總結了以上內容,希望對大家有用!

文中引用了幾位大神的blog。

感謝!