ios 視頻流H264硬編碼---分解LFLiveKit
阿新 • • 發佈:2018-10-19
header count enable api osc center dealloc using 默認
#import "LFHardwareVideoEncoder.h" #import <VideoToolbox/VideoToolbox.h> @interface LFHardwareVideoEncoder (){ VTCompressionSessionRef compressionSession; // 編碼器 NSInteger frameCount; // 幀數(用於設置關鍵幀) NSData *sps; NSData *pps; FILE *fp; BOOL enabledWriteVideoFile; } @property (nonatomic, strong) LFLiveVideoConfiguration*configuration; @property (nonatomic, weak) id<LFVideoEncodingDelegate> h264Delegate; @property (nonatomic) NSInteger currentVideoBitRate; @property (nonatomic) BOOL isBackGround; @end @implementation LFHardwareVideoEncoder #pragma mark -- LifeCycle - (instancetype)initWithVideoStreamConfiguration:(LFLiveVideoConfiguration *)configuration {if (self = [super init]) { NSLog(@"USE LFHardwareVideoEncoder"); _configuration = configuration; [self resetCompressionSession]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(willEnterBackground:) name:UIApplicationWillResignActiveNotificationobject:nil]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(willEnterForeground:) name:UIApplicationDidBecomeActiveNotification object:nil]; #ifdef DEBUG enabledWriteVideoFile = NO; [self initForFilePath]; #endif } return self; } - (void)resetCompressionSession { if (compressionSession) { /* 1.強制完成一些或全部未處理的視頻幀數據 2.編碼器失效(類似於讓Timer失效) 3.釋放內存(VTCompressionSession是一套C函數,需要開發者手動管理內存) 4.編碼器置空 */ VTCompressionSessionCompleteFrames(compressionSession, kCMTimeInvalid); VTCompressionSessionInvalidate(compressionSession); CFRelease(compressionSession); compressionSession = NULL; } /* 創建編碼器 1.分配器,NULL標識使用默認的 2.視頻幀的像素寬 3.視頻幀的像素高 4.編碼類型 5.如果使用指定的視頻編碼器就傳值,NULL表示VideoToolBox自己創建一個 6.源緩存,NULL表示不使用VideoToolBox創建的,而使用費VTB分配的內存,這樣可以拷貝圖片數據 7.壓縮數據分配器,NULL表示使用默認的 8.回調函數(編碼後的視頻幀數據) 9.回調方法所在的對象類實例,也會傳遞給回調函數(回調函數會被VTCompressionSessionEncodeFrame函數喚醒並且異步執行) 10.指向編碼器的內存地址 */ OSStatus status = VTCompressionSessionCreate(NULL, _configuration.videoSize.width, _configuration.videoSize.height, kCMVideoCodecType_H264, NULL, NULL, NULL, VideoCompressonOutputCallback, (__bridge void *)self, &compressionSession); if (status != noErr) { return; } _currentVideoBitRate = _configuration.videoBitRate; // 值越大效果越好,幀數據越大 VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, (__bridge CFTypeRef)@(_configuration.videoMaxKeyframeInterval)); VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration, (__bridge CFTypeRef)@(_configuration.videoMaxKeyframeInterval/_configuration.videoFrameRate)); VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_ExpectedFrameRate, (__bridge CFTypeRef)@(_configuration.videoFrameRate)); VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_AverageBitRate, (__bridge CFTypeRef)@(_configuration.videoBitRate)); NSArray *limit = @[@(_configuration.videoBitRate * 1.5/8), @(1)]; // 關鍵幀間隔,越低效果越好,幀數據越大 VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_DataRateLimits, (__bridge CFArrayRef)limit); VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_RealTime, // 實施編碼輸出,降低編碼延遲 kCFBooleanTrue); VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_Main_AutoLevel); VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_AllowFrameReordering, kCFBooleanTrue); VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_H264EntropyMode, kVTH264EntropyMode_CABAC); VTCompressionSessionPrepareToEncodeFrames(compressionSession); } - (void)setVideoBitRate:(NSInteger)videoBitRate { if(_isBackGround) return; VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_AverageBitRate, (__bridge CFTypeRef)@(videoBitRate)); NSArray *limit = @[@(videoBitRate * 1.5/8), @(1)]; VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_DataRateLimits, (__bridge CFArrayRef)limit); _currentVideoBitRate = videoBitRate; } - (NSInteger)videoBitRate { return _currentVideoBitRate; } - (void)dealloc { if (compressionSession != NULL) { VTCompressionSessionCompleteFrames(compressionSession, kCMTimeInvalid); VTCompressionSessionInvalidate(compressionSession); CFRelease(compressionSession); compressionSession = NULL; } [[NSNotificationCenter defaultCenter] removeObserver:self]; } #pragma mark -- LFVideoEncoder - (void)encodeVideoData:(CVPixelBufferRef)pixelBuffer timeStamp:(uint64_t)timeStamp { if(_isBackGround) return; frameCount++; /* CMTime CMTimeMake ( int64_t value, //表示 當前視頻播放到的第幾楨數 int32_t timescale //每秒的幀數 ); */ CMTime presentationTimeStamp = CMTimeMake(frameCount, (int32_t)_configuration.videoFrameRate); VTEncodeInfoFlags flags; CMTime duration = CMTimeMake(1, (int32_t)_configuration.videoFrameRate); // 設置當前幀為 關鍵幀.關鍵幀間隔在config文件中定義了,是幀率24(fps)*2,即 frameCount % (24*2) = 0就設置為關鍵幀 NSDictionary *properties = nil; if (frameCount % (int32_t)_configuration.videoMaxKeyframeInterval == 0) { properties = @{(__bridge NSString *)kVTEncodeFrameOptionKey_ForceKeyFrame: @YES}; } NSNumber *timeNumber = @(timeStamp); /* 開啟幀編碼 1.編碼器 2.一個將要被壓縮的視頻幀,不能為空 3.展示當前幀的時間戳 4.播放24幀需要多長時間,默認kCMTimeInvalid 5.當前幀的屬性(主要判斷是否為關鍵幀) 6.當前幀關聯的值(時間戳),會傳遞給回調函數 7.編碼操作的信息 */ OSStatus status = VTCompressionSessionEncodeFrame(compressionSession, pixelBuffer, presentationTimeStamp, duration, (__bridge CFDictionaryRef)properties, (__bridge_retained void *)timeNumber, &flags); if(status != noErr){ [self resetCompressionSession]; } } - (void)stopEncoder { VTCompressionSessionCompleteFrames(compressionSession, kCMTimeIndefinite); } - (void)setDelegate:(id<LFVideoEncodingDelegate>)delegate { _h264Delegate = delegate; } #pragma mark -- Notification - (void)willEnterBackground:(NSNotification*)notification{ _isBackGround = YES; } - (void)willEnterForeground:(NSNotification*)notification{ [self resetCompressionSession]; _isBackGround = NO; } #pragma mark -- VideoCallBack /* 1.可以引用到幀編碼函數中的參數 2.原始幀數據的引用值(未編碼之前) 3.編碼是否成功 4.編碼操作的信息 5.編碼後的幀(如果編碼成功並且沒有丟幀,反之參數為NULL) */ static void VideoCompressonOutputCallback(void *VTref, void *VTFrameRef, OSStatus status, VTEncodeInfoFlags infoFlags, CMSampleBufferRef sampleBuffer){ // 編碼失敗 或 丟幀 if (!sampleBuffer) return; /* 返回一個不可變數組(元素是dictionary) 1.訪問的對象 2.如果樣本為空是否創建一個空數組 */ CFArrayRef array = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true); if (!array) return; // 獲取數組中第一個dictionary數據 CFDictionaryRef dic = (CFDictionaryRef)CFArrayGetValueAtIndex(array, 0); if (!dic) return; // 是否是關鍵幀 BOOL keyframe = !CFDictionaryContainsKey(dic, kCMSampleAttachmentKey_NotSync); // 獲取幀編碼函數中的時間戳參數 uint64_t timeStamp = [((__bridge_transfer NSNumber *)VTFrameRef) longLongValue]; // 獲取回調函數所在類的實例對象 LFHardwareVideoEncoder *videoEncoder = (__bridge LFHardwareVideoEncoder *)VTref; if (status != noErr) { return; } // 關鍵幀 且 尚未設置sps if (keyframe && !videoEncoder->sps) { // 獲取樣本緩存中的一個樣本的格式描述(獲取幀描述信息) CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer); // sps和pps一般保存在視頻文件頭中。sps在前pps在後 size_t sparameterSetSize, sparameterSetCount; const uint8_t *sparameterSet; /* 獲取sps信息(序列參數集) H.264碼流中第一個NALU單元,保存了一組編碼視頻序列的全局參數. 1. 格式信息 2. sps信息在format中的索引 3. 指向參數集的指針,如果不需要這些信息傳NULL 4. 指向參數字節多少的指針, 5. 指向參數數量的指針, 6. */ OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 0, &sparameterSet, &sparameterSetSize, &sparameterSetCount, 0); if (statusCode == noErr) { size_t pparameterSetSize, pparameterSetCount; const uint8_t *pparameterSet; // 獲取pps信息(圖像參數集)H.264碼流中第二個NALU單元, OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 1, &pparameterSet, &pparameterSetSize, &pparameterSetCount, 0); // 寫入本地 if (statusCode == noErr) { // 設置sps、pps videoEncoder->sps = [NSData dataWithBytes:sparameterSet length:sparameterSetSize]; videoEncoder->pps = [NSData dataWithBytes:pparameterSet length:pparameterSetSize]; /* 處理數據時,sps、pps放在H264視頻流的最前端 編碼的視頻寫入本地時,需要添加4個字節的頭信息並且卸載H264文件最前面 如果推流,sps、pps、頭信息放入flv數據區即可。 */ if (videoEncoder->enabledWriteVideoFile) { NSMutableData *data = [[NSMutableData alloc] init]; uint8_t header[] = {0x00, 0x00, 0x00, 0x01}; [data appendBytes:header length:4]; [data appendData:videoEncoder->sps]; [data appendBytes:header length:4]; [data appendData:videoEncoder->pps]; fwrite(data.bytes, 1, data.length, videoEncoder->fp); } } } } CMBlockBufferRef dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer); size_t length, totalLength; char *dataPointer; /* 獲取IDR信息 1.媒體信息數據 2.IDR信息在媒體信息數據中的索引 3.IDR信息數據長度 4.媒體信息數據長度 5.媒體信息數據的字節數(前4個字節是數據長度) */ OSStatus statusCodeRet = CMBlockBufferGetDataPointer(dataBuffer, 0, &length, &totalLength, &dataPointer); if (statusCodeRet == noErr) { size_t bufferOffset = 0; static const int AVCCHeaderLength = 4; // 循環獲取NALU數據,真正用來播放的視頻幀數據 while (bufferOffset < totalLength - AVCCHeaderLength) { // Read the NAL unit length uint32_t NALUnitLength = 0; memcpy(&NALUnitLength, dataPointer + bufferOffset, AVCCHeaderLength); // 大小端轉換 NALUnitLength = CFSwapInt32BigToHost(NALUnitLength); LFVideoFrame *videoFrame = [LFVideoFrame new]; videoFrame.timestamp = timeStamp; videoFrame.data = [[NSData alloc] initWithBytes:(dataPointer + bufferOffset + AVCCHeaderLength) length:NALUnitLength]; videoFrame.isKeyFrame = keyframe; videoFrame.sps = videoEncoder->sps; videoFrame.pps = videoEncoder->pps; if (videoEncoder.h264Delegate && [videoEncoder.h264Delegate respondsToSelector:@selector(videoEncoder:videoFrame:)]) { [videoEncoder.h264Delegate videoEncoder:videoEncoder videoFrame:videoFrame]; } if (videoEncoder->enabledWriteVideoFile) { NSMutableData *data = [[NSMutableData alloc] init]; if (keyframe) { uint8_t header[] = {0x00, 0x00, 0x00, 0x01}; [data appendBytes:header length:4]; } else { uint8_t header[] = {0x00, 0x00, 0x01}; [data appendBytes:header length:3]; } [data appendData:videoFrame.data]; fwrite(data.bytes, 1, data.length, videoEncoder->fp); } bufferOffset += AVCCHeaderLength + NALUnitLength; } } } - (void)initForFilePath { NSString *path = [self GetFilePathByfileName:@"IOSCamDemo.h264"]; NSLog(@"%@", path); self->fp = fopen([path cStringUsingEncoding:NSUTF8StringEncoding], "wb"); } - (NSString *)GetFilePathByfileName:(NSString*)filename { NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); NSString *documentsDirectory = [paths objectAtIndex:0]; NSString *writablePath = [documentsDirectory stringByAppendingPathComponent:filename]; return writablePath; }
ios 視頻流H264硬編碼---分解LFLiveKit