H264視訊通過RTMP傳送
視訊通過RTMP方式釋出需要一個RTMP Server(常見的有FMS、Wowza Media Server, 開源的有CRtmpServer、Red5等),原始視訊只要按照RTMP協議傳送給RTMP Server就可以RTMP視訊流的釋出了。為了便於視訊的打包釋出,封裝了一個RTMPStream,目前只支援傳送H264的視訊檔案。可以直接傳送H264資料幀或H264檔案,RTMPStream提供的介面如下。
class CRTMPStream { public: CRTMPStream(void); ~CRTMPStream(void); public: // 連線到RTMP Server bool Connect(const char* url); // 斷開連線 void Close(); // 傳送MetaData bool SendMetadata(LPRTMPMetadata lpMetaData); // 傳送H264資料幀 bool SendH264Packet(unsigned char *data,unsigned int size,bool bIsKeyFrame,unsigned int nTimeStamp); // 傳送H264檔案 bool SendH264File(const char *pFileName); //... }
呼叫示例:
#include <stdio.h> #include "RTMPStream\RTMPStream.h" int main(int argc,char* argv[]) { CRTMPStream rtmpSender; bool bRet = rtmpSender.Connect("rtmp://192.168.1.104/live/test"); rtmpSender.SendH264File("E:\\video\\test.264"); rtmpSender.Close(); }
通過JwPlayer播放效果如下:
最後附上RTMPStream完整的程式碼:
/********************************************************************
filename: RTMPStream.h
created: 2013-04-3
author: firehood
purpose: 傳送H264視訊到RTMP Server,使用libRtmp庫
*********************************************************************/
#pragma once
#include "rtmp.h"
#include "rtmp_sys.h"
#include "amf.h"
#include <stdio.h>
#define FILEBUFSIZE (1024 * 1024 * 10) // 10M
// NALU單元
typedef struct _NaluUnit
{
int type;
int size;
unsigned char *data;
}NaluUnit;
typedef struct _RTMPMetadata
{
// video, must be h264 type
unsigned int nWidth;
unsigned int nHeight;
unsigned int nFrameRate; // fps
unsigned int nVideoDataRate; // bps
unsigned int nSpsLen;
unsigned char Sps[1024];
unsigned int nPpsLen;
unsigned char Pps[1024];
// audio, must be aac type
bool bHasAudio;
unsigned int nAudioSampleRate;
unsigned int nAudioSampleSize;
unsigned int nAudioChannels;
char pAudioSpecCfg;
unsigned int nAudioSpecCfgLen;
} RTMPMetadata,*LPRTMPMetadata;
class CRTMPStream
{
public:
CRTMPStream(void);
~CRTMPStream(void);
public:
// 連線到RTMP Server
bool Connect(const char* url);
// 斷開連線
void Close();
// 傳送MetaData
bool SendMetadata(LPRTMPMetadata lpMetaData);
// 傳送H264資料幀
bool SendH264Packet(unsigned char *data,unsigned int size,bool bIsKeyFrame,unsigned int nTimeStamp);
// 傳送H264檔案
bool SendH264File(const char *pFileName);
private:
// 送快取中讀取一個NALU包
bool ReadOneNaluFromBuf(NaluUnit &nalu);
// 傳送資料
int SendPacket(unsigned int nPacketType,unsigned char *data,unsigned int size,unsigned int nTimestamp);
private:
RTMP* m_pRtmp;
unsigned char* m_pFileBuf;
unsigned int m_nFileBufSize;
unsigned int m_nCurPos;
};
原始檔:
/********************************************************************
filename: RTMPStream.cpp
created: 2013-04-3
author: firehood
modify: 2013-6-7
modify: godspeed513
purpose: 傳送H264視訊到RTMP Server,使用libRtmp庫
*********************************************************************/
#include "RTMPStream.h"
//#include "SpsDecode.h" 原作者沒有提供 modify by godspeed513
#ifdef WIN32
#include <windows.h>
#endif
#ifdef WIN32
#pragma comment(lib,"WS2_32.lib")
#pragma comment(lib,"winmm.lib")
#endif
int nBufferSize = 352*288;
enum
{
FLV_CODECID_H264 = 7,
};
int InitSockets()
{
#ifdef WIN32
WORD version;
WSADATA wsaData;
version = MAKEWORD(1, 1);
return (WSAStartup(version, &wsaData) == 0);
#else
return TRUE;
#endif
}
inline void CleanupSockets()
{
#ifdef WIN32
WSACleanup();
#endif
}
char * put_byte( char *output, uint8_t nVal )
{
output[0] = nVal;
return output+1;
}
char * put_be16(char *output, uint16_t nVal )
{
output[1] = nVal & 0xff;
output[0] = nVal >> 8;
return output+2;
}
char * put_be24(char *output,uint32_t nVal )
{
output[2] = nVal & 0xff;
output[1] = nVal >> 8;
output[0] = nVal >> 16;
return output+3;
}
char * put_be32(char *output, uint32_t nVal )
{
output[3] = nVal & 0xff;
output[2] = nVal >> 8;
output[1] = nVal >> 16;
output[0] = nVal >> 24;
return output+4;
}
char * put_be64( char *output, uint64_t nVal )
{
output=put_be32( output, nVal >> 32 );
output=put_be32( output, nVal );
return output;
}
char * put_amf_string( char *c, const char *str )
{
uint16_t len = strlen( str );
c=put_be16( c, len );
memcpy(c,str,len);
return c+len;
}
char * put_amf_double( char *c, double d )
{
*c++ = AMF_NUMBER; /* type: Number */
{
unsigned char *ci, *co;
ci = (unsigned char *)&d;
co = (unsigned char *)c;
co[0] = ci[7];
co[1] = ci[6];
co[2] = ci[5];
co[3] = ci[4];
co[4] = ci[3];
co[5] = ci[2];
co[6] = ci[1];
co[7] = ci[0];
}
return c+8;
}
CRTMPStream::CRTMPStream(void):
m_pRtmp(NULL),
m_nFileBufSize(0),
m_nCurPos(0)
{
m_pFileBuf = new unsigned char[FILEBUFSIZE];
memset(m_pFileBuf,0,FILEBUFSIZE);
InitSockets();
m_pRtmp = RTMP_Alloc();
RTMP_Init(m_pRtmp);
}
CRTMPStream::~CRTMPStream(void)
{
Close();
WSACleanup();
delete[] m_pFileBuf;
}
bool CRTMPStream::Connect(const char* url)
{
if(RTMP_SetupURL(m_pRtmp, (char*)url)<0)
{
return FALSE;
}
RTMP_EnableWrite(m_pRtmp);
if(RTMP_Connect(m_pRtmp, NULL)<0)
{
return FALSE;
}
if(RTMP_ConnectStream(m_pRtmp,0)<0)
{
return FALSE;
}
return TRUE;
}
void CRTMPStream::Close()
{
if(m_pRtmp)
{
RTMP_Close(m_pRtmp);
RTMP_Free(m_pRtmp);
m_pRtmp = NULL;
}
}
int CRTMPStream::SendPacket(unsigned int nPacketType,unsigned char *data,unsigned int size,unsigned int nTimestamp)
{
if(m_pRtmp == NULL)
{
return FALSE;
}
RTMPPacket packet;
RTMPPacket_Reset(&packet);
RTMPPacket_Alloc(&packet,size);
packet.m_packetType = nPacketType;
packet.m_nChannel = 0x04;
packet.m_headerType = RTMP_PACKET_SIZE_LARGE;
packet.m_nTimeStamp = nTimestamp;
packet.m_nInfoField2 = m_pRtmp->m_stream_id;
packet.m_nBodySize = size;
memcpy(packet.m_body,data,size);
int nRet = RTMP_SendPacket(m_pRtmp,&packet,0);
RTMPPacket_Free(&packet);
return nRet;
}
bool CRTMPStream::SendMetadata(LPRTMPMetadata lpMetaData)
{
if(lpMetaData == NULL)
{
return false;
}
char body[1024] = {0};;
char * p = (char *)body;
p = put_byte(p, AMF_STRING );
p = put_amf_string(p , "@setDataFrame" );
p = put_byte( p, AMF_STRING );
p = put_amf_string( p, "onMetaData" );
p = put_byte(p, AMF_OBJECT );
p = put_amf_string( p, "copyright" );
p = put_byte(p, AMF_STRING );
p = put_amf_string( p, "firehood" );
p =put_amf_string( p, "width");
p =put_amf_double( p, lpMetaData->nWidth);
p =put_amf_string( p, "height");
p =put_amf_double( p, lpMetaData->nHeight);
p =put_amf_string( p, "framerate" );
p =put_amf_double( p, lpMetaData->nFrameRate);
p =put_amf_string( p, "videocodecid" );
p =put_amf_double( p, FLV_CODECID_H264 );
p =put_amf_string( p, "" );
p =put_byte( p, AMF_OBJECT_END );
int index = p-body;
SendPacket(RTMP_PACKET_TYPE_INFO,(unsigned char*)body,p-body,0);
/////////////////////////////////////////////////////////////////////////
/*VEDIO*/
//需要首先發送"AVC sequence header"
int i = 0;
body[i++] = 0x17; // 1:keyframe 7:AVC
body[i++] = 0x00; // AVC sequence header
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = 0x00; // fill in 0;
// AVCDecoderConfigurationRecord.
body[i++] = 0x01; // configurationVersion 版本號
body[i++] = lpMetaData->Sps[1]; // AVCProfileIndication
body[i++] = lpMetaData->Sps[2]; // profile_compatibility
body[i++] = lpMetaData->Sps[3]; // AVCLevelIndication
body[i++] = 0xff; // reserved_6bit(11111100)+lengthSizeMinusOne_2bit(00000011)=0xff
// sps nums
body[i++] = 0xE1; //&0x1f
// sps data length
body[i++] = lpMetaData->nSpsLen>>8;
body[i++] = lpMetaData->nSpsLen&0xff;
// sps data
memcpy(&body[i],lpMetaData->Sps,lpMetaData->nSpsLen);
i= i+lpMetaData->nSpsLen;
// pps nums
body[i++] = 0x01; //&0x1f
// pps data length
body[i++] = lpMetaData->nPpsLen>>8;
body[i++] = lpMetaData->nPpsLen&0xff;
// pps data
memcpy(&body[i],lpMetaData->Pps,lpMetaData->nPpsLen);
i= i+lpMetaData->nPpsLen;
return SendPacket(RTMP_PACKET_TYPE_VIDEO,(unsigned char*)body,i,0);
}
bool CRTMPStream::SendH264Packet(unsigned char *data,unsigned int size,bool bIsKeyFrame,unsigned int nTimeStamp)
{
if(data == NULL && size<11)
{
return false;
}
unsigned char *body = new unsigned char[size+9];
int i = 0;
if(bIsKeyFrame)
{
body[i++] = 0x17;// 1:Iframe 7:AVC
}
else
{
body[i++] = 0x27;// 2:Pframe 7:AVC
}
body[i++] = 0x01;// AVC NALU
body[i++] = 0x00;
body[i++] = 0x00;
body[i++] = 0x00;//fill 0
// NAL length
body[i++] = size>>24;
body[i++] = size>>16;
body[i++] = size>>8;
body[i++] = size&0xff;;
// NAL data
memcpy(&body[i],data,size);
bool bRet = SendPacket(RTMP_PACKET_TYPE_VIDEO,body,i+size,nTimeStamp);
if(!bRet){
printf("SendH264Packet傳送失敗!\n");
}
delete[] body;
return bRet;
}
bool CRTMPStream::SendH264File(const char *pFileName)
{
if(pFileName == NULL)
{
return FALSE;
}
FILE *fp = fopen(pFileName, "rb");
if(!fp)
{
printf("ERROR:open file %s failed!",pFileName);
}
fseek(fp, 0, SEEK_SET);//定位在檔案開頭
//獲取檔案位元組數,並將檔案讀入到m_pFileBuf中;
m_nFileBufSize = fread(m_pFileBuf, sizeof(unsigned char), FILEBUFSIZE, fp);
if(m_nFileBufSize >= FILEBUFSIZE)
{
printf("warning : File size is larger than BUFSIZE\n");
}
fclose(fp);
RTMPMetadata metaData;
memset(&metaData,0,sizeof(RTMPMetadata));
NaluUnit naluUnit;
//由於我的H264檔案的一幀資料的第一塊是SEI,因此要先跳過,
//否則後面的SPS幀無法取出(我H264檔案的機構是SEI,SPS,PPS,IDR)
// 讀取SEI幀
ReadOneNaluFromBuf(naluUnit);//modify by godspeed513
// 讀取SPS幀
ReadOneNaluFromBuf(naluUnit);
metaData.nSpsLen = naluUnit.size;
memcpy(metaData.Sps,naluUnit.data,naluUnit.size);
// 讀取PPS幀
ReadOneNaluFromBuf(naluUnit);
metaData.nPpsLen = naluUnit.size;
memcpy(metaData.Pps,naluUnit.data,naluUnit.size);
// 解碼SPS,獲取視訊影象寬、高資訊
int width = 0,height = 0;
/*
h264_decode_sps(metaData.Sps,metaData.nSpsLen,width,height);
metaData.nWidth = width;
metaData.nHeight = height;
*/
//modify by godspeed513
metaData.nWidth = 320;
metaData.nHeight = 240;
metaData.nFrameRate = 25;
// 傳送MetaData
SendMetadata(&metaData);
unsigned int tick = 0;
while(ReadOneNaluFromBuf(naluUnit))
{
bool bKeyframe = (naluUnit.type == 0x05) ? TRUE : FALSE;
// 傳送PPS、H264資料幀;不傳送SEI幀、SPS幀;這樣可以提高發送效率
// modify by godspeed513
if(naluUnit.type == 0x05 || naluUnit.type == 0x08){
SendH264Packet(naluUnit.data,naluUnit.size,bKeyframe,tick);
msleep(40);//1000ms/25fps=40t
tick +=40;
printf("%d\n",tick);
}
}
return TRUE;
}
bool CRTMPStream::ReadOneNaluFromBuf(NaluUnit &nalu)
{
int i = m_nCurPos;
while(i<m_nFileBufSize)
{
if(m_pFileBuf[i++] == 0x00 && m_pFileBuf[i++] == 0x00 && m_pFileBuf[i++] == 0x00 && m_pFileBuf[i++] == 0x01)
{
int pos = i;
while (pos<m_nFileBufSize)
{
if(m_pFileBuf[pos++] == 0x00 && m_pFileBuf[pos++] == 0x00 && m_pFileBuf[pos++] == 0x00 && m_pFileBuf[pos++] == 0x01)
{
break;
}
}
nalu.size = pos-i;//modify by godspeed513
/*
if(pos == nBufferSize)
{
nalu.size = pos-i;
}
else
{
nalu.size = (pos-4)-i;
}*/
nalu.type = m_pFileBuf[i]&0x1f;
//printf("nal_unit_type=%d\n",nalu.type);
nalu.data = &m_pFileBuf[i];
//printf("nalu.size=%d\n",nalu.size);
m_nCurPos = pos-4;
return TRUE;
}
}
return FALSE;
}
相關推薦
H264視訊通過RTMP傳送
前面的文章中提到了通過RTSP(Real Time Streaming Protocol)的方式來實現視訊的直播,但RTSP方式的一個弊端是如果需要支援客戶端通過網頁來訪問,就需要在在頁面中嵌入一個ActiveX控制元件,而ActiveX一般都需要簽名才能正常
H264視訊通過RTMP直播
前面的文章中提到了通過RTSP(Real Time Streaming Protocol)的方式來實現視訊的直播,但RTSP方式的一個弊端是如果需要支援客戶端通過網頁來訪問,就需要在在頁面中嵌入一個ActiveX控制元件,而ActiveX一般都需要簽名才
通過RTMP傳送264流(檔案或幀)
H264視訊通過RTMP傳送 題外話,通過RTSP(Real Time Streaming Protocol)的方式來實現視訊的直播,但RTSP方式的一個弊端是如果需要支援客戶端通過網頁來訪問,就需要在在頁面中嵌入一個ActiveX控制元件,而ActiveX一
利用ffmpeg進行攝像頭提取視訊編碼為h264通過RTP傳送資料到指定的rtp地址
話不多說命令如下: ffmpeg -f dshow -i video="Logitech QuickCam Easy/Cool" -vcodec libx264 -preset:v ultrafast -tune:v zerolatency -f rtp rtp://127
rtmp傳送H264及aac的音視訊
RTMP推送的音視訊流的封裝形式和FLV格式相似,由此可知,向FMS推送H264和AAC直播流,需要首先發送"AVC sequence header"和"AAC sequence header",這兩項資料包含的是重要的編碼資訊,沒有它們,解碼器將無法解碼。 AVC
linux系統RTMPdump(libRTMP) 通過RTMP 釋出H264資料
關於RTMPdump的使用介紹,很多的都是在Windows平臺的應用,雷神有做一個系列的分析,但是雷神的也主要是以Windows平臺為主。本文主要的工作是將雷神《最簡單的基於librtmp的示例:釋出H.264(H.264通過RTMP釋出)》中的工程移植到linux系
通過LIBRTMP傳送H264和AAC
</pre><span style="font-family:KaiTi_GB2312;"><br style="margin: 0px; padding: 0px; font-size: 14px; line-height: 24px;" /><span style
RTMP資料流提取RTMP視訊流組成H264視訊檔案
首先我們獲得h264的流,在監聽裡,我們通過引數可以獲得RTMP包 IStreamPacket,呼叫getData()方法直接獲得包資料 放入IOBuffer。以下是提取並修改資料存成h264檔案的步驟 1. 新增監聽 IStreamListener 2. 通過IOBu
RTMP協議以及提取RTMP視訊流組成H264視訊檔案
首先我們獲得h264的流,在監聽裡,我們通過引數可以獲得RTMP包 IStreamPacket,呼叫getData()方法直接獲得包資料 放入IOBuffer。以下是提取並修改資料存成h264檔案的步驟 1. 新增監聽 IStreamListener 2. 通
telnet通過smtp傳送郵件
////////////////////////////////////CMD指令////////////////////////////////////////////// cmd telnet smtp.163.com 25 or &nb
通過javamailsenderimpl傳送郵件
參考文章:http://blog.csdn.net/qq_33556185/article/details/51028952 import javax.mail.internet.MimeMessage; import org.springframework.beans.factory.anno
FFmpeg 4.0.2編碼YUV序列為H264視訊檔案
/****************************** 功能:編碼YUV序列為h264視訊檔案 FFmpeg:4.0.2 ******************************/ #include <iostream> extern "C" { #include &
python 將視訊 通過視訊幀轉換成時間
def frames_to_timecode(framerate,frames): """ 視訊 通過視訊幀轉換成時間 :param framerate: 視訊幀率 :param frames: 當前視訊幀數 :return:時間(00:00:01:0
通過python傳送簡訊,樹莓派實現報警系統!
看到用python程式碼實現簡訊傳送的文章,感覺以後可以結合樹莓派進行監控報警資訊的傳送,所以就研究了一下,整個過程還是比較簡單的,註冊一個賬戶再寫幾行程式碼就可以了。 這裡用到的簡訊代理服務商是Twilio,國內也有不少這樣的服務商,比如阿里雲。
java通過SMTP傳送QQ郵件的完全步驟
轉自: https://blog.csdn.net/qq422733429/article/details/51280020 java通過SMTP傳送QQ郵件的完全步驟 首先,我們需要開啟QQ郵箱的SMTP服務,因為QQ郵箱對於一般的使用者都是預設關閉SM
Spring(非boot)通過ssl傳送郵件
參考:https://blog.csdn.net/liguo9860/article/details/6874040 我這裡就說一下使用Spring的JavaMailSender如何實現 1. 配置JavaMailSender @Bean public JavaM
js通過外掛傳送郵件
這個外掛為SmtpJS 官網地址為 https://www.smtpjs.com/ 方法很簡單 <script src="https://smtpjs.com/v2/smtp.js"> </script> Email.send( "[email
通過ajax傳送post請求向controller傳遞引數後,頁面無法跳轉問題
我的專案之前都是用location.href來向後臺傳遞引數,因為最近在學習ajax,所以將傳值方式改成了ajax傳送post請求,於是問題便接踵而來,我發現在controller中通過返回ModelAndView物件竟然不能實現頁面跳轉了,剛開始百思不得其解,現在總算明白這到底是怎麼一回事了,也想
h264視訊編解碼
KevinLib開發類庫說明本類庫為快速發視訊系統必備參考之一,實現介面簡單,開放原始碼,可以無限制的重複使用 開發工具 VC++7.0 實現了視訊採集,音訊採集,壓縮解壓編碼:H264,MPEG4,WMV9,DIVX,XVID等 另外類庫裡有一些檔案操作類,介面十分簡單,十分鐘就可以建
c++通過dispatchCustomEvent傳送事件,通知js層,進入前臺和後臺
// This function will be called when the app is inactive. When comes a phone call,it's be invoked too void AppDelegate::applicationDidEnterBackgroun