攝像頭採集,264編碼,live555直播(1)
阿新 • • 發佈:2019-01-07
1.首先需要修改live555,定義從 記憶體中直接獲取source而不是從檔案讀取source的類。
自己實現的類命名為 H264FramedLiveSource
/* * Filename: H264FramedLiveSource.hh * Auther: chenbin * Create date: 2013/ 1/22 */ #ifndef _H264FRAMEDLIVESOURCE_HH #define _H264FRAMEDLIVESOURCE_HH #include <FramedSource.hh> class H264FramedLiveSource : publicFramedSource { public: static H264FramedLiveSource* createNew(UsageEnvironment& env, char const* fileName, unsigned preferredFrameSize = 0, unsigned playTimePerFrame = 0); protected: H264FramedLiveSource(UsageEnvironment& env, char const* fileName, unsigned preferredFrameSize, unsigned playTimePerFrame);// called only by createNew() ~H264FramedLiveSource(); private: // redefined virtual functions: virtual void doGetNextFrame(); int TransportData( unsigned char* to, unsigned maxSize ); protected: FILE *fp; }; #endif
/* * Filename: H264FramedLiveSource.cpp * Auther: mlj * Create date: 2013/ 1/22*/ #include "H264FramedLiveSource.hh" H264FramedLiveSource::H264FramedLiveSource( UsageEnvironment& env, char const* fileName, unsigned preferredFrameSize, unsigned playTimePerFrame ) : FramedSource(env) { fp = fopen( fileName, "rb" ); } H264FramedLiveSource* H264FramedLiveSource::createNew( UsageEnvironment& env, char const* fileName, unsigned preferredFrameSize /*= 0*/, unsigned playTimePerFrame /*= 0*/ ) { H264FramedLiveSource* newSource = new H264FramedLiveSource(env, fileName, preferredFrameSize, playTimePerFrame); return newSource; } H264FramedLiveSource::~H264FramedLiveSource() { fclose(fp); } long filesize(FILE *stream) { long curpos, length; curpos = ftell(stream); fseek(stream, 0L, SEEK_END); length = ftell(stream); fseek(stream, curpos, SEEK_SET); return length; } void H264FramedLiveSource::doGetNextFrame() { if( filesize(fp) > fMaxSize) fFrameSize = fread(fTo,1,fMaxSize,fp); else { fFrameSize = fread(fTo,1,filesize(fp),fp); fseek(fp, 0, SEEK_SET); } //fFrameSize = fMaxSize; nextTask() = envir().taskScheduler().scheduleDelayedTask( 0, (TaskFunc*)FramedSource::afterGetting, this);//表示延遲0秒後再執行 afterGetting 函式 return; }
在 H264FramedLiveSource::doGetNextFrame() 中,將要傳送的內容複製到 fTo,最大為fMaxSize,fFrameSize指示實際傳送的內容是多少位元組。這裡暫時還是從檔案讀作為測試。
2、定義自己的ServerMedia
/* * Filename: H264LiveVideoServerMediaSubssion.hh * Auther: mlj * Create date: 2013/ 1/22 */ #ifndef _H264_LIVE_VIDEO_SERVER_MEDIA_SUBSESSION_HH #define _H264_LIVE_VIDEO_SERVER_MEDIA_SUBSESSION_HH #include "H264VideoFileServerMediaSubsession.hh" class H264LiveVideoServerMediaSubssion: public H264VideoFileServerMediaSubsession { public: static H264LiveVideoServerMediaSubssion* createNew( UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource ); protected: // we're a virtual base class H264LiveVideoServerMediaSubssion( UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource ); ~H264LiveVideoServerMediaSubssion(); protected: // redefined virtual functions FramedSource* createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate); public: char fFileName[100]; }; #endif
/* * Filename: H264LiveVideoServerMediaSubssion.cpp * Auther: chenbin * Create date: 2012/11/29 */ #include "H264LiveVideoServerMediaSubssion.hh" #include "H264FramedLiveSource.hh" #include "H264VideoStreamFramer.hh" H264LiveVideoServerMediaSubssion* H264LiveVideoServerMediaSubssion::createNew( UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource ) { return new H264LiveVideoServerMediaSubssion( env, fileName, reuseFirstSource ); } H264LiveVideoServerMediaSubssion::H264LiveVideoServerMediaSubssion( UsageEnvironment& env, char const* fileName, Boolean reuseFirstSource ) : H264VideoFileServerMediaSubsession( env, fileName, reuseFirstSource ) { strcpy(fFileName,fileName); } H264LiveVideoServerMediaSubssion::~H264LiveVideoServerMediaSubssion() { } FramedSource* H264LiveVideoServerMediaSubssion::createNewStreamSource( unsigned clientSessionId, unsigned& estBitrate ) { /* Remain to do : assign estBitrate */ estBitrate = 1000; // kbps, estimate // Create the video source: H264FramedLiveSource* liveSource = H264FramedLiveSource::createNew(envir(), fFileName); if (liveSource == NULL) { return NULL; } // Create a framer for the Video Elementary Stream: return H264VideoStreamFramer::createNew(envir(), liveSource); }
3、主函式
/********** This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.) This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA **********/ // Copyright (c) 1996-2012, Live Networks, Inc. All rights reserved // A test program that demonstrates how to stream - via unicast RTP // - various kinds of file on demand, using a built-in RTSP server. // main program #include "H264LiveVideoServerMediaSubssion.hh" #include "H264FramedLiveSource.hh" #include "liveMedia.hh" #include "BasicUsageEnvironment.hh" #pragma comment (lib, "Ws2_32.lib") #pragma comment (lib, "BasicUsageEnvironment.lib") #pragma comment (lib, "groupsock.lib") #pragma comment (lib, "liveMedia.lib") #pragma comment (lib, "UsageEnvironment.lib") UsageEnvironment* env; // To make the second and subsequent client for each stream reuse the same // input stream as the first client (rather than playing the file from the // start for each client), change the following "False" to "True": Boolean reuseFirstSource = False; // To stream *only* MPEG-1 or 2 video "I" frames // (e.g., to reduce network bandwidth), // change the following "False" to "True": Boolean iFramesOnly = False; static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms, char const* streamName, char const* inputFileName); // fwd static char newMatroskaDemuxWatchVariable; static MatroskaFileServerDemux* demux; static void onMatroskaDemuxCreation(MatroskaFileServerDemux* newDemux, void* /*clientData*/) { demux = newDemux; newMatroskaDemuxWatchVariable = 1; } int main(int argc, char** argv) { // Begin by setting up our usage environment: TaskScheduler* scheduler = BasicTaskScheduler::createNew(); env = BasicUsageEnvironment::createNew(*scheduler); UserAuthenticationDatabase* authDB = NULL; #ifdef ACCESS_CONTROL // To implement client access control to the RTSP server, do the following: authDB = new UserAuthenticationDatabase; authDB->addUserRecord("username1", "password1"); // replace these with real strings // Repeat the above with each <username>, <password> that you wish to allow // access to the server. #endif // Create the RTSP server: RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554, authDB); if (rtspServer == NULL) { *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; exit(1); } char const* descriptionString = "Session streamed by \"testOnDemandRTSPServer\""; // Set up each of the possible streams that can be served by the // RTSP server. Each such stream is implemented using a // "ServerMediaSession" object, plus one or more // "ServerMediaSubsession" objects for each audio/video substream. // A H.264 video elementary stream: { char const* streamName = "h264ESVideoTest"; char const* inputFileName = "test.264"; ServerMediaSession* sms = ServerMediaSession::createNew(*env, streamName, streamName, descriptionString); sms->addSubsession(H264LiveVideoServerMediaSubssion ::createNew(*env, inputFileName, reuseFirstSource));//修改為自己實現的servermedia H264LiveVideoServerMediaSubssion rtspServer->addServerMediaSession(sms); announceStream(rtspServer, sms, streamName, inputFileName); } // Also, attempt to create a HTTP server for RTSP-over-HTTP tunneling. // Try first with the default HTTP port (80), and then with the alternative HTTP // port numbers (8000 and 8080). //if (rtspServer->setUpTunnelingOverHTTP(80) || rtspServer->setUpTunnelingOverHTTP(8000) || rtspServer->setUpTunnelingOverHTTP(8080)) { // *env << "\n(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling.)\n"; //} else { // *env << "\n(RTSP-over-HTTP tunneling is not available.)\n"; //} env->taskScheduler().doEventLoop(); // does not return return 0; // only to prevent compiler warning } static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms, char const* streamName, char const* inputFileName) { char* url = rtspServer->rtspURL(sms); UsageEnvironment& env = rtspServer->envir(); env << "\n\"" << streamName << "\" stream, from the file \"" << inputFileName << "\"\n"; env << "Play this stream using the URL \"" << url << "\"\n"; delete[] url; }
使用 ffplay.exe rtsp://115.156.164.19:8554/h264ESVideoTest 可以播放test.264的視訊.
相關配置:live555的四個庫放在lib資料夾下。
庫目錄:G:\workspace\avs\live555test\live555test\lib
包含目錄:G:\workspace\avs\live555test\live555test\BasicUsageEnvironment\include;G:\workspace\avs\live555test\live555test\UsageEnvironment\include;G:\workspace\avs\live555test\live555test\liveMedia\include;G:\workspace\avs\live555test\live555test\groupsock\include
原始碼 :
svn checkout http://live555-send-test.googlecode.com/svn/trunk/ live555-send-test-read-onlyFROM: http://www.cnblogs.com/mlj318/archive/2013/01/23/2872932.html