用AVFoundation自定義視訊錄製功能
如果移動端訪問不佳,請訪問我的個人部落格
系統自帶的錄製視訊的功能顯然無法滿足美工和專案經理的要求,自定義視訊錄製就非常重要了,那麼下面來帶大家制作屬於自己的視訊錄製介面。
簡介
自定義視訊錄製需要用到的框架主要是AVFoundation
和CoreMedia
,包括視訊輸出,輸入和檔案的讀寫,下面給大家羅列一下將要用到的類:
- AVCaptureSession
- AVCaptureVideoPreviewLayer
- AVCaptureDeviceInput
- AVCaptureConnection
- AVCaptureVideoDataOutput
- AVCaptureAudioDataOutput
- AVAssetWriter
- AVAssetWriterInput
下面詳細介紹每個類和程式碼實現
AVCaptureSession
AVCaptureSession
是AVFoundation
捕捉類的中心樞紐,我們先從這個類入手,在視訊捕獲時,客戶端可以例項化AVCaptureSession
並新增適當的AVCaptureInputs
、AVCaptureDeviceInput
和輸出,比如AVCaptureMovieFileOutput
。通過[AVCaptureSession startRunning]
開始資料流從輸入到輸出,和[AVCaptureSession stopRunning]
//捕獲視訊的會話
- (AVCaptureSession *)recordSession {
if (_recordSession == nil) {
_recordSession = [[AVCaptureSession alloc] init];
//新增後置攝像頭的輸出
if ([_recordSession canAddInput:self.backCameraInput]) {
[_recordSession addInput:self .backCameraInput];
}
//新增後置麥克風的輸出
if ([_recordSession canAddInput:self.audioMicInput]) {
[_recordSession addInput:self.audioMicInput];
}
//新增視訊輸出
if ([_recordSession canAddOutput:self.videoOutput]) {
[_recordSession addOutput:self.videoOutput];
//設定視訊的解析度為後置攝像頭
NSDictionary* actual = self.videoOutput.videoSettings;
_cx = [[actual objectForKey:@"Height"] integerValue];
_cy = [[actual objectForKey:@"Width"] integerValue];
}
//新增音訊輸出
if ([_recordSession canAddOutput:self.audioOutput]) {
[_recordSession addOutput:self.audioOutput];
}
//設定視訊錄製的方向
self.videoConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
}
return _recordSession;
}
AVCaptureDevice
AVCaptureDevice
的每個例項對應一個裝置,如攝像頭或麥克風。AVCaptureDevice
的例項不能直接建立。所有現有裝置可以使用類方法devicesWithMediaType:defaultDeviceWithMediaType:
獲取,裝置可以提供一個或多個給定流媒體型別。AVCaptureDevice
例項可用於提供給AVCaptureSession
建立一個為AVCaptureDeviceInput
型別的輸入源。
//返回前置攝像頭
- (AVCaptureDevice *)frontCamera {
return [self cameraWithPosition:AVCaptureDevicePositionFront];
}
//返回後置攝像頭
- (AVCaptureDevice *)backCamera {
return [self cameraWithPosition:AVCaptureDevicePositionBack];
}
//用來返回是前置攝像頭還是後置攝像頭
- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition) position {
//返回和視訊錄製相關的所有預設裝置
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
//遍歷這些裝置返回跟position相關的裝置
for (AVCaptureDevice *device in devices) {
if ([device position] == position) {
return device;
}
}
return nil;
}
//開啟閃光燈
- (void)openFlashLight {
AVCaptureDevice *backCamera = [self backCamera];
if (backCamera.torchMode == AVCaptureTorchModeOff) {
[backCamera lockForConfiguration:nil];
backCamera.torchMode = AVCaptureTorchModeOn;
backCamera.flashMode = AVCaptureFlashModeOn;
[backCamera unlockForConfiguration];
}
}
//關閉閃光燈
- (void)closeFlashLight {
AVCaptureDevice *backCamera = [self backCamera];
if (backCamera.torchMode == AVCaptureTorchModeOn) {
[backCamera lockForConfiguration:nil];
backCamera.torchMode = AVCaptureTorchModeOff;
backCamera.flashMode = AVCaptureTorchModeOff;
[backCamera unlockForConfiguration];
}
}
AVCaptureDeviceInput
AVCaptureDeviceInput
是AVCaptureSession輸入源,提供媒體資料從裝置連線到系統,通過AVCaptureDevice
的例項化得到,就是我們將要用到的裝置輸出源裝置,也就是前後攝像頭,通過[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]
方法獲得。
//後置攝像頭輸入
- (AVCaptureDeviceInput *)backCameraInput {
if (_backCameraInput == nil) {
NSError *error;
_backCameraInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self backCamera] error:&error];
if (error) {
[SVProgressHUD showErrorWithStatus:@"獲取後置攝像頭失敗~"];
}
}
return _backCameraInput;
}
//前置攝像頭輸入
- (AVCaptureDeviceInput *)frontCameraInput {
if (_frontCameraInput == nil) {
NSError *error;
_frontCameraInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self frontCamera] error:&error];
if (error) {
[SVProgressHUD showErrorWithStatus:@"獲取前置攝像頭失敗~"];
}
}
return _frontCameraInput;
}
AVCaptureVideoPreviewLayer
是CoreAnimation
裡面layer的一個子類,用來做為AVCaptureSession
預覽視訊輸出,簡單來說就是來做為拍攝的視訊呈現的一個layer。
//捕獲到的視訊呈現的layer
- (AVCaptureVideoPreviewLayer *)previewLayer {
if (_previewLayer == nil) {
//通過AVCaptureSession初始化
AVCaptureVideoPreviewLayer *preview = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.recordSession];
//設定比例為鋪滿全屏
preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
_previewLayer = preview;
}
return _previewLayer;
}
AVCaptureMovieFileOutput
AVCaptureMovieFileOutput
是AVCaptureFileOutput
的子類,用來寫入QuickTime
視訊型別的媒體檔案。因為這個類在iphone上並不能實現暫停錄製,和不能定義視訊檔案的型別,所以在這裡並不使用,而是用靈活性更強的AVCaptureVideoDataOutput
和AVCaptureAudioDataOutput
來實現視訊的錄製。
AVCaptureVideoDataOutput
AVCaptureVideoDataOutput
是AVCaptureOutput
一個子類,可以用於用來輸出未壓縮或壓縮的視訊捕獲的幀,AVCaptureVideoDataOutput
產生的例項可以使用其他媒體視訊幀適合的api處理,應用程式可以用captureOutput:didOutputSampleBuffer:fromConnection:
代理方法來獲取幀資料。
//視訊輸出
- (AVCaptureVideoDataOutput *)videoOutput {
if (_videoOutput == nil) {
_videoOutput = [[AVCaptureVideoDataOutput alloc] init];
[_videoOutput setSampleBufferDelegate:self queue:self.captureQueue];
NSDictionary* setcapSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
nil];
_videoOutput.videoSettings = setcapSettings;
}
return _videoOutput;
}
AVCaptureAudioDataOutput
AVCaptureAudioDataOutput
是AVCaptureOutput
的子類,可用於用來輸出捕獲來的非壓縮或壓縮的音訊樣本,AVCaptureAudioDataOutput
產生的例項可以使用其他媒體視訊幀適合的api處理,應用程式可以用captureOutput:didOutputSampleBuffer:fromConnection:
代理方法來獲取音訊資料。
//音訊輸出
- (AVCaptureAudioDataOutput *)audioOutput {
if (_audioOutput == nil) {
_audioOutput = [[AVCaptureAudioDataOutput alloc] init];
[_audioOutput setSampleBufferDelegate:self queue:self.captureQueue];
}
return _audioOutput;
}
AVCaptureConnection
AVCaptureConnection
代表AVCaptureInputPort
或埠之間的連線,和一個AVCaptureOutput
或AVCaptureVideoPreviewLayer
在AVCaptureSession
中的呈現。
//視訊連線
- (AVCaptureConnection *)videoConnection {
_videoConnection = [self.videoOutput connectionWithMediaType:AVMediaTypeVideo];
return _videoConnection;
}
//音訊連線
- (AVCaptureConnection *)audioConnection {
if (_audioConnection == nil) {
_audioConnection = [self.audioOutput connectionWithMediaType:AVMediaTypeAudio];
}
return _audioConnection;
}
AVAssetWriter
AVAssetWriter
為寫入媒體資料到一個新的檔案提供服務,AVAssetWriter
的例項可以規定寫入媒體檔案的格式,如QuickTime
電影檔案格式或MPEG-4
檔案格式等等。AVAssetWriter
有多個並行的軌道媒體資料,基本的有視訊軌道和音訊軌道,將會在下面介紹。AVAssetWriter
的單個例項可用於一次寫入一個單一的檔案。那些希望寫入多次檔案的客戶端必須每一次用一個新的AVAssetWriter
例項。
//初始化方法
- (instancetype)initPath:(NSString*)path Height:(NSInteger)cy width:(NSInteger)cx channels:(int)ch samples:(Float64) rate {
self = [super init];
if (self) {
self.path = path;
//先把路徑下的檔案給刪除掉,保證錄製的檔案是最新的
[[NSFileManager defaultManager] removeItemAtPath:self.path error:nil];
NSURL* url = [NSURL fileURLWithPath:self.path];
//初始化寫入媒體型別為MP4型別
_writer = [AVAssetWriter assetWriterWithURL:url fileType:AVFileTypeMPEG4 error:nil];
//使其更適合在網路上播放
_writer.shouldOptimizeForNetworkUse = YES;
//初始化視訊輸出
[self initVideoInputHeight:cy width:cx];
//確保採集到rate和ch
if (rate != 0 && ch != 0) {
//初始化音訊輸出
[self initAudioInputChannels:ch samples:rate];
}
}
return self;
}
AVAssetWriterInput
用AVAssetWriterInput
去拼接一個多媒體樣本型別為CMSampleBuffer
的例項到AVAssetWriter
物件的輸出檔案的一個軌道;當有多個輸入時, AVAssetWriter
試圖在用於儲存和播放效率的理想模式寫媒體資料。它的每一個輸入訊號,是否能接受媒體的資料根據通過readyForMoreMediaData
的值來判斷。如果readyForMoreMediaData
是YES
,說明輸入可以接受媒體資料。並且你只能媒體資料追加到輸入端。
//初始化視訊輸入
- (void)initVideoInputHeight:(NSInteger)cy width:(NSInteger)cx {
//錄製視訊的一些配置,解析度,編碼方式等等
NSDictionary* settings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInteger: cx], AVVideoWidthKey,
[NSNumber numberWithInteger: cy], AVVideoHeightKey,
nil];
//初始化視訊寫入類
_videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings];
//表明輸入是否應該調整其處理為實時資料來源的資料
_videoInput.expectsMediaDataInRealTime = YES;
//將視訊輸入源加入
[_writer addInput:_videoInput];
}
//初始化音訊輸入
- (void)initAudioInputChannels:(int)ch samples:(Float64)rate {
//音訊的一些配置包括音訊各種這裡為AAC,音訊通道、取樣率和音訊的位元率
NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
[ NSNumber numberWithInt: ch], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: rate], AVSampleRateKey,
[ NSNumber numberWithInt: 128000], AVEncoderBitRateKey,
nil];
//初始化音訊寫入類
_audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:settings];
//表明輸入是否應該調整其處理為實時資料來源的資料
_audioInput.expectsMediaDataInRealTime = YES;
//將音訊輸入源加入
[_writer addInput:_audioInput];
}
上面是錄製之前的一些需要的類和配置,下面介紹的是如何將獲取到的資料呈現出來和怎樣進行檔案寫入
寫入資料
#pragma mark - 寫入資料
- (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
BOOL isVideo = YES;
@synchronized(self) {
if (!self.isCapturing || self.isPaused) {
return;
}
if (captureOutput != self.videoOutput) {
isVideo = NO;
}
//初始化編碼器,當有音訊和視訊引數時建立編碼器
if ((self.recordEncoder == nil) && !isVideo)
{
CMFormatDescriptionRef fmt = CMSampleBufferGetFormatDescription(sampleBuffer);
[self setAudioFormat:fmt];
NSString *videoName = [NSString getUploadFile_type:@"video" fileType:@"mp4"];
self.videoPath = [[self getVideoCachePath] stringByAppendingPathComponent:videoName];
self.recordEncoder = [WCLRecordEncoder encoderForPath:self.videoPath Height:_cy width:_cx channels:_channels samples:_samplerate];
}
//判斷是否中斷錄製過
if (self.discont) {
if (isVideo) {
return;
}
self.discont = NO;
// 計算暫停的時間
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CMTime last = isVideo ? _lastVideo : _lastAudio;
if (last.flags & kCMTimeFlags_Valid) {
if (_timeOffset.flags & kCMTimeFlags_Valid) {
pts = CMTimeSubtract(pts, _timeOffset);
}
CMTime offset = CMTimeSubtract(pts, last);
if (_timeOffset.value == 0) {
_timeOffset = offset;
}else {
_timeOffset = CMTimeAdd(_timeOffset, offset);
}
}
_lastVideo.flags = 0;
_lastAudio.flags = 0;
}
// 增加sampleBuffer的引用計時,這樣我們可以釋放這個或修改這個資料,防止在修改時被釋放
CFRetain(sampleBuffer);
if (_timeOffset.value > 0) {
CFRelease(sampleBuffer);
//根據得到的timeOffset調整
sampleBuffer = [self adjustTime:sampleBuffer by:_timeOffset];
}
// 記錄暫停上一次錄製的時間
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CMTime dur = CMSampleBufferGetDuration(sampleBuffer);
if (dur.value > 0) {
pts = CMTimeAdd(pts, dur);
}
if (isVideo) {
_lastVideo = pts;
}else {
_lastAudio = pts;
}
}
CMTime dur = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if (self.startTime.value == 0) {
self.startTime = dur;
}
CMTime sub = CMTimeSubtract(dur, self.startTime);
self.currentRecordTime = CMTimeGetSeconds(sub);
if (self.currentRecordTime > self.maxRecordTime) {
if (self.currentRecordTime - self.maxRecordTime < 0.1) {
if ([self.delegate respondsToSelector:@selector(recordProgress:)]) {
dispatch_async(dispatch_get_main_queue(), ^{
[self.delegate recordProgress:self.currentRecordTime/self.maxRecordTime];
});
}
}
return;
}
if ([self.delegate respondsToSelector:@selector(recordProgress:)]) {
dispatch_async(dispatch_get_main_queue(), ^{
[self.delegate recordProgress:self.currentRecordTime/self.maxRecordTime];
});
}
// 進行資料編碼
[self.recordEncoder encodeFrame:sampleBuffer isVideo:isVideo];
CFRelease(sampleBuffer);
}
//設定音訊格式
- (void)setAudioFormat:(CMFormatDescriptionRef)fmt {
const AudioStreamBasicDescription *asbd = CMAudioFormatDescriptionGetStreamBasicDescription(fmt);
_samplerate = asbd->mSampleRate;
_channels = asbd->mChannelsPerFrame;
}
//調整媒體資料的時間
- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset {
CMItemCount count;
CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count);
for (CMItemCount i = 0; i < count; i++) {
pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset);
pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset);
}
CMSampleBufferRef sout;
CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout);
free(pInfo);
return sout;
}
//通過這個方法寫入資料
- (BOOL)encodeFrame:(CMSampleBufferRef) sampleBuffer isVideo:(BOOL)isVideo {
//資料是否準備寫入
if (CMSampleBufferDataIsReady(sampleBuffer)) {
//寫入狀態為未知,保證視訊先寫入
if (_writer.status == AVAssetWriterStatusUnknown && isVideo) {
//獲取開始寫入的CMTime
CMTime startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
//開始寫入
[_writer startWriting];
[_writer startSessionAtSourceTime:startTime];
}
//寫入失敗
if (_writer.status == AVAssetWriterStatusFailed) {
NSLog(@"writer error %@", _writer.error.localizedDescription);
return NO;
}
//判斷是否是視訊
if (isVideo) {
//視訊輸入是否準備接受更多的媒體資料
if (_videoInput.readyForMoreMediaData == YES) {
//拼接資料
[_videoInput appendSampleBuffer:sampleBuffer];
return YES;
}
}else {
//音訊輸入是否準備接受更多的媒體資料
if (_audioInput.readyForMoreMediaData) {
//拼接資料
[_audioInput appendSampleBuffer:sampleBuffer];
return YES;
}
}
}
return NO;
}
完成錄製並寫入相簿
//停止錄製
- (void) stopCaptureHandler:(void (^)(UIImage *movieImage))handler {
@synchronized(self) {
if (self.isCapturing) {
NSString* path = self.recordEncoder.path;
NSURL* url = [NSURL fileURLWithPath:path];
self.isCapturing = NO;
dispatch_async(_captureQueue, ^{
[self.recordEncoder finishWithCompletionHandler:^{
self.isCapturing = NO;
self.recordEncoder = nil;
[[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
[PHAssetChangeRequest creationRequestForAssetFromVideoAtFileURL:url];
} completionHandler:^(BOOL success, NSError * _Nullable error) {
NSLog(@"儲存成功");
}];
[self movieToImageHandler:handler];
}];
});
}
}
}
//獲取視訊第一幀的圖片
- (void)movieToImageHandler:(void (^)(UIImage *movieImage))handler {
NSURL *url = [NSURL fileURLWithPath:self.videoPath];
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:url options:nil];
AVAssetImageGenerator *generator = [[AVAssetImageGenerator alloc] initWithAsset:asset];
generator.appliesPreferredTrackTransform = TRUE;
CMTime thumbTime = CMTimeMakeWithSeconds(0, 60);
generator.apertureMode = AVAssetImageGeneratorApertureModeEncodedPixels;
AVAssetImageGeneratorCompletionHandler generatorHandler =
^(CMTime requestedTime, CGImageRef im, CMTime actualTime, AVAssetImageGeneratorResult result, NSError *error){
if (result == AVAssetImageGeneratorSucceeded) {
UIImage *thumbImg = [UIImage imageWithCGImage:im];
if (handler) {
dispatch_async(dispatch_get_main_queue(), ^{
handler(thumbImg);
});
}
}
};
[generator generateCGImagesAsynchronouslyForTimes:
[NSArray arrayWithObject:[NSValue valueWithCMTime:thumbTime]] completionHandler:generatorHandler];
}
//完成視訊錄製時呼叫
- (void)finishWithCompletionHandler:(void (^)(void))handler {
[_writer finishWritingWithCompletionHandler: handler];
}
以上就是本部落格內容的全部內容,大家如果有什麼疑問可以問我,本文附帶有demo,大家可以去看看具體怎麼使用,有用的話可以點一下star,謝謝大家的閱讀~~