iOS 多媒體程式設計(二)——視訊(AVPlayer)
阿新 • • 發佈:2019-01-31
和音訊播放一樣,ios也提供個很多的API。如mediaPlayer.framework下的MPMediaPlayerController、AVFounditon.framework下的AVPlayer和AVKit下的AVPlayerViewcontroller。MPMovieplayerController已經在ios9.0中被廢棄了,用來替代的是AVPlayerViewcontroller。
所以下面我主要介紹一下AVPlayer。
AVPlayer既可以播放音樂又可以播放視訊;使用AVPlayer不能直接顯示視訊,必須要加入AVPlayerLayer中,並新增到其他能顯示的layer中。
AVPlayer和AVAudioPlayer,如果要使用的話,需要新增 AVFoundation.framework 和其標頭檔案 #import <AVFoundation/AVFoundation>
主要程式碼如下(接上一篇文章)
/* iOS9預設禁用http協議,改為https協議,如果繼續使用http則會報錯 App Transport Security has blocked a cleartext HTTP (http://) resource load since it is insecure. Temporary exceptions can be configured via your app's Info.plist file. 因此需要修改info.plist檔案,新增如下 <key>NSAppTransportSecurity</key> <dict> <key>NSAllowsArbitraryLoads</key> <true/> </dict> */ -(void)addAVPlayerLayer { //NSString *path = [[NSBundle mainBundle] pathForResource:@"183" ofType:@"wav"]; //NSURL *url = [NSURL fileURLWithPath:path]; //NSURL *url = [NSURL URLWithString:@"http://v.jxvdy.com/sendfile/w5bgP3A8JgiQQo5l0hvoNGE2H16WbN09X-ONHPq3P3C1BISgf7C-qVs6_c8oaw3zKScO78I--b0BGFBRxlpw13sf2e54QA"]; NSURL *url = [NSURL URLWithString:@"http://static.tripbe.com/videofiles/20121214/9533522808.f4v.mp4"]; //'MPMoviePlayerController' is deprecated: first deprecated in iOS 9.0 - Use AVPlayerViewController in AVKit //[self.view addSubview:[MPMoviePlayerController new].view]; self.avPlayerItem = [AVPlayerItem playerItemWithURL:url]; self.avPlayer = [AVPlayer playerWithPlayerItem:self.avPlayerItem]; AVPlayerLayer *avPlayerLayer = [AVPlayerLayer playerLayerWithPlayer:self.avPlayer]; CGRect screenRect = [UIScreen mainScreen].bounds; avPlayerLayer.frame = CGRectMake(0, screenRect.size.height/2, screenRect.size.width, screenRect.size.height/2); /* AVF_EXPORT NSString *const AVLayerVideoGravityResizeAspect NS_AVAILABLE(10_7, 4_0); AVF_EXPORT NSString *const AVLayerVideoGravityResizeAspectFill NS_AVAILABLE(10_7, 4_0); AVF_EXPORT NSString *const AVLayerVideoGravityResize NS_AVAILABLE(10_7, 4_0); */ avPlayerLayer.videoGravity = AVLayerVideoGravityResizeAspect; avPlayerLayer.repeatCount = 1; //avPlayerLayer.backgroundColor = [UIColor blueColor].CGColor; [self.view.layer addSublayer:avPlayerLayer]; //呼叫 [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(avPlayerDidEnd) name:AVPlayerItemDidPlayToEndTimeNotification object:self.avPlayerItem]; /* status共有三種屬性 (1)AVPlayerStatusUnknown, (2)AVPlayerStatusReadyToPlay, (3)AVPlayerStatusFailed */ [self.avPlayerItem addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil]; //loadedTimeRanges 表示已經緩衝的進度,監聽此屬性可以在UI中更新緩衝進度 [self.avPlayerItem addObserver:self forKeyPath:@"loadedTimeRanges" options:NSKeyValueObservingOptionNew context:nil]; NSLog(@"AVPlayerItemStatus enum = {%ld, %ld, %ld}", AVPlayerItemStatusUnknown, AVPlayerStatusReadyToPlay, AVPlayerItemStatusFailed); } -(void)playAVPlayer { NSLog(@"AVPlayer play"); [self.avPlayerItem seekToTime:kCMTimeZero]; [self.avPlayer play]; } -(void)avPlayerDidEnd { NSLog(@"AVPlayer end"); } //定義在@interface NSKeyValueObserving.h //Given that the receiver has been registered as an observer of the value at a key path relative to an object, be notified of a change to that value. -(void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context { NSLog(@"observeValueForKeyPath"); AVPlayerItem *playerItem = (AVPlayerItem *)object; if([keyPath isEqualToString:@"status"]) { switch (playerItem.status) { case AVPlayerItemStatusUnknown: NSLog(@"AVPlayerItemStatusUnknown"); break; case AVPlayerItemStatusReadyToPlay: { NSLog(@"AVPlayerItemStatusReadyToPlay"); CMTime duration = playerItem.duration; //轉換為時間戳(1970到現在的秒數),timescale是 時間表[尺度],時標,時間量程(value/timescale = seconds) CGFloat totalSecond = duration.value / duration.timescale; CGFloat totalSecond2 = CMTimeGetSeconds(duration); NSLog(@"AVPlayer getSeconds(value/timescale) = %f", totalSecond); NSLog(@"AVPlayer getSeconds(CMTimeGetSeconds) = %f", totalSecond2); NSString *totalTimeStr = [self convertTime:totalSecond]; NSLog(@"AVPlayer totalTimeStr = %@", totalTimeStr); //監聽播放狀態 [self monitoringPlayback:playerItem]; } break; case AVPlayerItemStatusFailed: NSLog(@"AVPlayerItemStatusFailed"); break; default: break; } } else if([keyPath isEqualToString:@"loadedTimeRanges"]) { //計算緩衝進度 NSTimeInterval timeInterval = [self aVPlayerLoadedEndTime]; NSLog(@"AVPlayer timeInterval = %f", timeInterval); CMTime duration = playerItem.duration; CGFloat totalDuration = CMTimeGetSeconds(duration); CGFloat progress = timeInterval / totalDuration; NSLog(@"AVPlayer loadedTimeRanges = %f", progress); } } -(NSString *)convertTime:(CGFloat)second { //second為1970年到現在的TimeInterval(秒數) NSDate *date = [NSDate dateWithTimeIntervalSince1970:second]; NSDateFormatter *formatter = [[NSDateFormatter alloc] init]; if(second / 3600 >= 1) { [formatter setDateFormat:@"HH:mm:ss"]; } else { [formatter setDateFormat:@"mm:ss"]; } NSString *timeStr = [formatter stringFromDate:date]; return timeStr; } -(void)monitoringPlayback:(AVPlayerItem *)playerItem { __block ViewController *myClass = self; //CMTimeMake(a,b) a當前第幾幀,b每秒多少幀。當前播放時間為a/b //CMTimeMakeWithSeconds a當前時間,b每秒多少幀。當前的幀序號為a*b //Periodic是週期性, 佇列傳NULL代表在主執行緒中執行 [self.avPlayer addPeriodicTimeObserverForInterval:CMTimeMake(1, 1) queue:NULL usingBlock:^(CMTime time) { //計算當前在第幾秒 CGFloat currentSecond = self.avPlayerItem.currentTime.value/self.avPlayerItem.currentTime.timescale; //Capturing 'self' strongly in this block is likely to lead to a retain cycle NSString *currentTimeStr = [myClass convertTime:currentSecond]; NSLog(@"AVPlayer monitoringPlayback currentSecond = %f (%@)", currentSecond, currentTimeStr); }]; } //獲取當前緩衝的進度,換算成與1970年相差的秒數 -(NSTimeInterval)aVPlayerLoadedEndTime { //NSValue物件是用來儲存一個C或Objective-C資料的簡單容器。它可以儲存任意型別的資料(既包括基本型別,也包括point、struct、id) //NSValue類的目標就是允許以上資料型別的資料結構能夠被新增到集合裡。 NSArray<NSValue *> *loadedTimeRanges = self.avPlayer.currentItem.loadedTimeRanges; NSLog(@"AVPlayer timeRanges = %@", loadedTimeRanges); /* 獲取緩衝區域 typedef struct { CMTime start; //@field start The start time of the time range. CMTime duration; // @field duration The duration of the time range. } CMTimeRange; */ CMTimeRange timeRange = [loadedTimeRanges.firstObject CMTimeRangeValue]; //Converts a CMTime to seconds. float startSeconds = CMTimeGetSeconds(timeRange.start); float durationSeconds = CMTimeGetSeconds(timeRange.duration); NSTimeInterval endSeconds = startSeconds + durationSeconds; return endSeconds; }
執行列印截圖
開啟程式,但還未按下播放按鈕時,會顯示第一幀。
(此處略去一些列印內容。。。。。。)
可以看到如果不等其全部緩衝好,就按下 “播放”按鈕其也可以正常播放,這就說明AVPlayer是一個網路播放器(即AVPlayer支援變下邊播的流媒體播放器)
左圖為等其全部載入好再按下"播放"按鈕;右圖為還未全部緩衝完就點選"播放"按鈕
(此處略去一些列印內容。。。。。。)
週期性的列印每幀的資訊。
播放完畢後會停在最後一幀