1. 程式人生 > >iOS上視訊剪輯片段和新增背景音樂

iOS上視訊剪輯片段和新增背景音樂

最近在研究剪輯視訊,目前知道了怎麼剪輯視訊片段和新增背景音樂,不多說了,程式碼寫的很清楚,如下:

1 新增背景音樂呼叫

    [PSJVideoEditor cropWithVideoUrlStr:_videoUrl audioUrl:_audioUrl start:3 end:8 isOrignalSound:isOrignalSound completion:^(NSString *outPath, BOOL isSuccess) {
        if (isSuccess) {
            _videoUrl = [NSURL URLWithString:outPath];
        }
    }];

2 剪輯視訊片段呼叫
        [PSJVideoEditor addBackgroundMiusicWithVideoUrlStr:_videoUrl audioUrl:_audioUrl start:3 end:8 isOrignalSound:isOrignalSound completion:^(NSString *outPath, BOOL isSuccess) {
            if (isSuccess) {
                _videoUrl = [NSURL URLWithString:outPath];
            }
        }];


3  附上原始碼

#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>

@interface PSJVideoEditor : NSObject

/*!
 @method
 @brief  視訊新增音訊
 @discussion
 @param videoUrl 視訊URL
 @param audioUrl 音訊URL
 @param startTime 音訊插入開始時間
 @param startTime 音訊插入結束時間
 @param isOrignalSound 是否保留原聲
 @param completionHandle   完成回撥
 */
+ (void)addBackgroundMiusicWithVideoUrlStr:(NSURL *)videoUrl
                                  audioUrl:(NSURL *)audioUrl
                                     start:(CGFloat)startTime
                                       end:(CGFloat)startTime
                            isOrignalSound:(BOOL)isOrignal
                                completion:(void (^)(NSString *outPath,BOOL isSuccess))completionHandle;
/*!
 @method
 @brief  剪輯視訊
 @discussion
 @param videoUrl 視訊URL
 @param audioUrl 音訊URL
 @param startTime 剪輯開始時間
 @param startTime 剪輯結束時間
 @param isOrignalSound 是否保留原聲
 @param completionHandle   完成回撥
 */
+ (void)cropWithVideoUrlStr:(NSURL *)videoUrl
                   audioUrl:(NSURL *)audioUrl
                      start:(CGFloat)startTime
                        end:(CGFloat)endTime
             isOrignalSound:(BOOL)isOrignal
                 completion:(void (^)(NSString *outPath,BOOL isSuccess))completionHandle;


@end


#import "PSJVideoEditor.h"
#import <AVFoundation/AVFoundation.h>

#define MediaFileName @"MixVideo.MOV"

@implementation PSJVideoEditor

+ (void)addBackgroundMiusicWithVideoUrlStr:(NSURL *)videoUrl
                                  audioUrl:(NSURL *)audioUrl
                                     start:(CGFloat)startTime
                                       end:(CGFloat)endTime
                                 isOrignalSound:(BOOL)isOrignal
                                completion:(void (^)(NSString *outPath,BOOL isSuccess))completionHandle
{
    //聲音來源路徑(最終混合的音訊)
    NSURL   *audio_inputFileUrl = audioUrl;
    
    //視訊來源路徑
    NSURL   *video_inputFileUrl = videoUrl;
    
    NSString *outputFilePath = [PSJVideoEditor fileSavePath];
    NSURL   *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
    
    
    CMTime nextClipStartTime = kCMTimeZero;
    
    //建立可變的音訊視訊組合
    AVMutableComposition* mixComposition =[AVMutableComposition composition];
    
    //視訊採集
    AVURLAsset* videoAsset =[[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
    CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
    AVMutableCompositionTrack*a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                                    preferredTrackID:kCMPersistentTrackID_Invalid];
    [a_compositionVideoTrack insertTimeRange:video_timeRange
                                     ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
                                      atTime:nextClipStartTime
                                       error:nil];
    
    
    CMTime start = CMTimeMakeWithSeconds(startTime, videoAsset.duration.timescale);
    CMTime duration = CMTimeMakeWithSeconds(endTime - startTime,videoAsset.duration.timescale);
    CMTimeRange audio_timeRange = CMTimeRangeMake(start, duration);
    
    
<span style="color:#3366ff;">    if (isOrignal) {
        //視訊聲音採集(也可不執行這段程式碼不採集視訊音軌,合併後的視訊檔案將沒有視訊原來的聲音)
        CMTimeRange videoTimeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
        AVMutableCompositionTrack *compositionVoiceTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
        
        [compositionVoiceTrack insertTimeRange:videoTimeRange ofTrack:([videoAsset tracksWithMediaType:AVMediaTypeAudio].count>0)?[videoAsset tracksWithMediaType:AVMediaTypeAudio].firstObject:nil atTime:kCMTimeZero error:nil];
    }</span>
    
   <span style="color:#ff0000;"> //音樂聲音採集
    AVURLAsset* audioAsset =[[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
    //CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);//聲音長度擷取範圍==視訊長度
    AVMutableCompositionTrack*b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
                                                                                    preferredTrackID:kCMPersistentTrackID_Invalid];
    //3秒到8秒 則atTime從3秒開始
    [b_compositionAudioTrack insertTimeRange:audio_timeRange
                                     ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0]
                                      atTime:start
                                       error:nil];</span>
    
    //建立一個輸出
    AVAssetExportSession* _assetExport =[[AVAssetExportSession alloc]initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
    _assetExport.outputFileType = AVFileTypeQuickTimeMovie;
    _assetExport.outputURL = outputFileUrl;
    _assetExport.shouldOptimizeForNetworkUse= YES;
    
    [_assetExport exportAsynchronouslyWithCompletionHandler:
     ^(void ) {
         switch ([_assetExport status]) {
             case AVAssetExportSessionStatusFailed: {
                 NSLog(@"合成失敗:%@",[[_assetExport error] description]);
                 completionHandle(outputFilePath,NO);
             } break;
             case AVAssetExportSessionStatusCancelled: {
                 completionHandle(outputFilePath,NO);
             } break;
             case AVAssetExportSessionStatusCompleted: {
                 completionHandle(outputFilePath,YES);
             } break;
             default: {
                 completionHandle(outputFilePath,NO);
             } break;
         }
         
     }
     ];
}

+ (void)cropWithVideoUrlStr:(NSURL *)videoUrl
                   audioUrl:(NSURL *)audioUrl
                      start:(CGFloat)startTime
                        end:(CGFloat)endTime
             isOrignalSound:(BOOL)isOrignal
                 completion:(void (^)(NSString *outPath,BOOL isSuccess))completionHandle
{
    NSURL   *audio_inputFileUrl = audioUrl;
    
    //視訊來源路徑
    NSURL   *video_inputFileUrl = videoUrl;
    
    NSString *outputFilePath = [PSJVideoEditor fileSavePath];
    NSURL   *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
    
    AVURLAsset* videoAsset =[[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
    
    
    CMTime nextClipStartTime = kCMTimeZero;
    
    //建立可變的音訊視訊組合
    AVMutableComposition* mixComposition =[AVMutableComposition composition];
    
    //視訊採集
    CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
    AVMutableCompositionTrack*a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                                    preferredTrackID:kCMPersistentTrackID_Invalid];
    [a_compositionVideoTrack insertTimeRange:video_timeRange
                                     ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
                                      atTime:nextClipStartTime
                                       error:nil];
    
    
    CMTime start = CMTimeMakeWithSeconds(startTime, videoAsset.duration.timescale);
    CMTime duration = CMTimeMakeWithSeconds(endTime - startTime,videoAsset.duration.timescale);
    CMTimeRange audio_timeRange = CMTimeRangeMake(start, duration);
    
    
    if (isOrignal) {
        //視訊聲音採集(也可不執行這段程式碼不採集視訊音軌,合併後的視訊檔案將沒有視訊原來的聲音)
        CMTimeRange videoTimeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
        AVMutableCompositionTrack *compositionVoiceTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
        
        [compositionVoiceTrack insertTimeRange:videoTimeRange ofTrack:([videoAsset tracksWithMediaType:AVMediaTypeAudio].count>0)?[videoAsset tracksWithMediaType:AVMediaTypeAudio].firstObject:nil atTime:kCMTimeZero error:nil];
    }
    
    //音樂聲音採集
    AVURLAsset* audioAsset =[[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
    //CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);//聲音長度擷取範圍==視訊長度
    AVMutableCompositionTrack*b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
                                                                                    preferredTrackID:kCMPersistentTrackID_Invalid];
    //3秒到8秒 則atTime從3秒開始
    [b_compositionAudioTrack insertTimeRange:audio_timeRange
                                     ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0]
                                      atTime:start
                                       error:nil];


    
    NSArray *compatiblePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:videoAsset];
    if ([compatiblePresets containsObject:AVAssetExportPresetMediumQuality]) {
        
        AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]
                                               initWithAsset:videoAsset presetName:AVAssetExportPresetPassthrough];
        NSURL *furl = outputFileUrl;
        
        exportSession.outputURL = furl;
        exportSession.outputFileType = AVFileTypeQuickTimeMovie;
        exportSession.shouldOptimizeForNetworkUse= YES;
        
        <span style="color:#ff0000;">//剪輯視訊片段 設定timeRange
        CMTime start = CMTimeMakeWithSeconds(startTime, videoAsset.duration.timescale);
        CMTime duration = CMTimeMakeWithSeconds(endTime - startTime,videoAsset.duration.timescale);
        CMTimeRange range = CMTimeRangeMake(start, duration);
        exportSession.timeRange = range;</span>
        
        [exportSession exportAsynchronouslyWithCompletionHandler:
         ^(void ) {
             switch ([exportSession status]) {
                 case AVAssetExportSessionStatusFailed: {
                     NSLog(@"合成失敗:%@",[[exportSession error] description]);
                     completionHandle(outputFilePath,NO);
                 } break;
                 case AVAssetExportSessionStatusCancelled: {
                     completionHandle(outputFilePath,NO);
                 } break;
                 case AVAssetExportSessionStatusCompleted: {
                     completionHandle(outputFilePath,YES);
                 } break;
                 default: {
                     completionHandle(outputFilePath,NO);
                 } break;
             }
             
         }
         ];
    }
}

+ (CGFloat)getMediaDurationWithMediaUrl:(NSString *)mediaUrlStr {
    
    NSURL *mediaUrl = [NSURL URLWithString:mediaUrlStr];
    AVURLAsset *mediaAsset = [[AVURLAsset alloc] initWithURL:mediaUrl options:nil];
    CMTime duration = mediaAsset.duration;
    
    return duration.value / duration.timescale;
}

+ (NSString *)getMediaFilePath {
    
    return [NSTemporaryDirectory() stringByAppendingPathComponent:MediaFileName];
    
}

+ (NSString *)fileSavePath
{
    NSDate *date = [NSDate date];
    NSInteger nowInter = (long)[date timeIntervalSince1970];
    NSString *fileName = [NSString stringWithFormat:@"output%ld.mp4",(long)nowInter];
    
    NSString *documentsDirectory =[NSHomeDirectory()
                                   stringByAppendingPathComponent:@"Documents"];
    //最終合成輸出路徑
    NSString *outputFilePath =[documentsDirectory stringByAppendingPathComponent:fileName];
    // NSURL   *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
    
    if([[NSFileManager defaultManager]fileExistsAtPath:outputFilePath])
        [[NSFileManager defaultManager]removeItemAtPath:outputFilePath error:nil];
    return outputFilePath;
}


@end