1. 程式人生 > >庖丁解牛-----Live555原始碼徹底解密(根據OpenRTSP講解)

庖丁解牛-----Live555原始碼徹底解密(根據OpenRTSP講解)

OpenRtsp中寫H264檔案注意的地方:

OpenRtsp客戶端包括OpenRtsp.cpp和playCommon.cpp 兩個檔案

1)接受buffer儲存到檔案

void H264VideoFileSink::afterGettingFrame(unsignedframeSize,unsignednumTruncatedBytes,structtimeval presentationTime) {

  unsigned char const start_code[4] = {0x00, 0x00, 0x00, 0x01};

  if (!fHaveWrittenFirstFrame) {

    // If we have PPS/SPS NAL units encoded in a "sprop parameter string", prepend these to the file:

    unsigned numSPropRecords;

    SPropRecord* sPropRecords = parseSPropParameterSets(fSPropParameterSetsStr,numSPropRecords);

    for (unsignedi = 0;i < numSPropRecords; ++i) {

      addData(start_code, 4,presentationTime);

      addData(sPropRecords[i].sPropBytes,sPropRecords[i].sPropLength,presentationTime

);

    }

    delete[] sPropRecords;

    fHaveWrittenFirstFrame = True; // for next time

  }

  // Write the input data to the file, with the start code in front:

  addData(start_code, 4,presentationTime);

  // Call the parent class to complete the normal file write with the input data:

  FileSink::afterGettingFrame

(frameSize,numTruncatedBytes,presentationTime);

}

2)獲取到路徑名

              // Create an output file for each desired stream:

                   char outFileName[1000];

                   if (singleMedium ==NULL) {

                       // Output file name is

                       //     "<filename-prefix><medium_name>-<codec_name>-<counter>"

                       static unsigned streamCounter = 0;

                       //outFileName為檔名

                       snprintf(outFileName,sizeofoutFileName,"%s%s-%s-%d",

                            fileNamePrefix,subsession->mediumName(),

                            subsession->codecName(), ++streamCounter);

                   } else {

                       sprintf(outFileName,"stdout");

                   }

3)儲存檔案

void FileSink::afterGettingFrame(unsignedframeSize,

                    unsigned numTruncatedBytes,

                    struct timeval presentationTime) {

  if (numTruncatedBytes > 0) {

    envir() << "FileSink::afterGettingFrame(): The input frame data was too large for our buffer size ("

         << fBufferSize << ").  "

            << numTruncatedBytes <<" bytes of trailing data was dropped! Correct this by increasing the \"bufferSize\" parameter in the \"createNew()\" call to at least "

            << fBufferSize + numTruncatedBytes << "\n";

  }

  addData(fBuffer,frameSize,presentationTime);

  if (fOutFid ==NULL ||fflush(fOutFid) ==EOF) {

    // The output file has closed.  Handle this the same way as if the input source had closed:

    if (fSource !=NULL)fSource->stopGettingFrames();

    onSourceClosure(this);

    return;

  }

  if (fPerFrameFileNameBuffer !=NULL) {

    if (fOutFid !=NULL) {fclose(fOutFid);fOutFid =NULL; }

  }

  // Then try getting the next frame:

  continuePlaying();

}

1)   寫檔案

void FileSink::addData(unsignedcharconst* data, unsigned dataSize,

                struct timeval presentationTime) {

  if (fPerFrameFileNameBuffer !=NULL) {

    // Special case: Open a new file on-the-fly for this frame

    sprintf(fPerFrameFileNameBuffer,"%s-%lu.%06lu",fPerFrameFileNamePrefix,

         presentationTime.tv_sec,presentationTime.tv_usec);

    fOutFid = OpenOutputFile(envir(), fPerFrameFileNameBuffer);

  }

  // Write to our file:

#ifdef TEST_LOSS

static unsigned const framesPerPacket = 10;

static unsigned const frameCount = 0;

static Boolean const packetIsLost;

if ((frameCount++)%framesPerPacket == 0) {

packetIsLost = (our_random()%10 == 0); // simulate 10% packet loss #####

}

if (!packetIsLost)

#endif

  if (fOutFid !=NULL &&data !=NULL) {

    fwrite(data, 1,dataSize,fOutFid);

  }

}

其中  unsigned char* fBuffer; 用來儲存檔案的Buff;

  fBuffer = new unsigned char[bufferSize]; 緩衝區的size,預設多少?

#ifndef _H264_VIDEO_FILE_SINK_HH

#define _H264_VIDEO_FILE_SINK_HH

#ifndef _FILE_SINK_HH

#include "FileSink.hh"

#endif

class H264VideoFileSink: public FileSink {

public:

  static H264VideoFileSink* createNew(UsageEnvironment&env,char const* fileName,

                         char const* sPropParameterSetsStr = NULL,

  // An optional 'SDP format' string (comma-separated Base64-encoded) representing SPS and/or PPS NAL-units to prepend to the output

                   unsigned bufferSize = 100000,

                         BooleanoneFilePerFrame =False);

  // See "FileSink.hh" for a description of these parameters.

protected:

  H264VideoFileSink(UsageEnvironment&env,FILE* fid,

             char const* sPropParameterSetsStr,

             unsigned bufferSize, char const* perFrameFileNamePrefix);

      // called only by createNew()

  virtual ~H264VideoFileSink();

protected: // redefined virtual functions:

  virtual void afterGettingFrame(unsignedframeSize,unsignednumTruncatedBytes,structtimeval presentationTime);

private:

  char const*fSPropParameterSetsStr;

  Boolean fHaveWrittenFirstFrame;

};

#endif

預設的是100k 可以修改,在OpenRtsp中修改unsignedfileSinkBufferSize = 1000000;           //modify by zh 100000 to 1000000

OpenRtsp還需要獲取幀率的資訊,進行錄影,幀率的引數可以從OpenRtsp中進行修改;

unsigned movieFPS = 30; // default                 //幀率資訊modify by zhongh 15 to 30

相機1080p的 30幀,每秒1.18M

OpenRtsp將檔案寫成AVI的程式碼如下:

else if (outputAVIFile) {

              // Create an "AVIFileSink", to write to 'stdout':

              //如何將stdout修改成寫avi 檔案

              aviOut = AVIFileSink::createNew(*env, *session,"d:\\test1.avi",

                   fileSinkBufferSize,

                   movieWidth,movieHeight,

                   movieFPS,              //幀率

                   packetLossCompensate);//包丟失補償;

              if (aviOut ==NULL) {

                   *env << "Failed to create AVI file sink for stdout: " << env->getResultMsg();

                   shutdown();

              }

aviOut->startPlaying(sessionAfterPlaying,NULL);