使用FFMPeg解碼顯示ts流
阿新 • • 發佈:2019-02-13
公司最近專案不是很忙,就將之前弄東西整理了一下,解碼直播流使用FFMPeg,
之前做的一個專案是智慧家居的控制系統,加監控,這個監控有多個攝像頭,每一個都要顯示出來,並將預覽的畫面在不同的位置顯示出來,達到同步的效果,之前使用的VLC解碼顯示的,但是太多預覽源的話,每一個都要解碼一次,一個是耗費效能,另一個是每次拖拽到一個控制器上就預覽一個解碼一次,顯然這種方式,不符合要求,
理想狀態是,一個源直解碼一次,同一個源在不同的地方顯示多次的話,是直接拿資料渲染,
下面是是我的做成的效果
由於錄屏軟體的限制,不能操作,這三個畫面顯示的是解碼一次,顯示在不同的地方………就是我只解碼了一路流將資料顯示在多個不同的位置
這是我在別人的基礎上進行執行緒優化的,借鑑了別人的程式碼,我將程式碼封裝在一個類中,呼叫很簡單 最後面有原始碼檔案下載連線
@implementation CQMovieView
{
AVFormatContext *FormatCtx;
AVCodecContext *codecCtx;
AVFrame *avframe;
AVStream *stream;
AVPacket packet;
AVPicture picture;
int videoStram;
double fps;
BOOL isReleaseResources;
dispatch_queue_t queue;
UIImageView *imageView;
}
- (instancetype)initWithFrame:(CGRect)frame
{
if (self = [super initWithFrame:frame]) {
self .frame = frame;
}
return self;
}
-(void)Video:(NSString *)moviePath
{
queue = dispatch_queue_create("LABEL", DISPATCH_QUEUE_SERIAL);
self.cruutenPath = [moviePath copy];
imageView = [[UIImageView alloc]initWithFrame:self.bounds];
[self addSubview:imageView];
dispatch_async(queue, ^{
[self initializeResources:[moviePath UTF8String]];
});
}
-(void)displayNextFrame:(NSTimer *)timer {
NSTimeInterval startTime = [NSDate timeIntervalSinceReferenceDate];
// self.TimerLabel.text = [NSString stringWithFormat:@"%f s",video.currentTime];
if (![self stepFrame]) {
[timer invalidate];
return;
}
imageView.image = self.currentImage;
// _ImageSubView.image = video.currentImage;
float frameTime = 1.0 / ([NSDate timeIntervalSinceReferenceDate] - startTime);
if (_lastFrameTime < 0) {
_lastFrameTime = frameTime;
} else {
_lastFrameTime = LERP(frameTime, _lastFrameTime, 0.8);
}
}
- (BOOL)initializeResources:(const char *)filePath {
isReleaseResources = NO;
AVCodec *pCodec;
//註冊所有解碼器
avcodec_register_all();
av_register_all();
avformat_network_init();
//開啟視屏檔案
if (avformat_open_input(&FormatCtx, filePath, NULL, NULL) != 0) {
NSLog(@"開啟檔案失敗");
}
//檢查資料流
if (avformat_find_stream_info(FormatCtx, NULL) < 0) {
NSLog(@"檢查資料流失敗");
}
//根據資料流,找到第一個視屏流
if ((videoStram = av_find_best_stream(FormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &pCodec, 0)) < 0) {
NSLog(@"沒有找到第一個視屏流");
// goto initError;
}
//獲取視屏流的編解碼上下文的指標
stream = FormatCtx->streams[videoStram];
codecCtx = stream->codec;
#if DEBUG
//列印視屏流的詳細資訊
av_dump_format(FormatCtx, videoStram, filePath, 0);
#endif
if (stream->avg_frame_rate.den && stream->avg_frame_rate.num) {
fps = av_q2d(stream->avg_frame_rate);
}else{fps = 30;}
//查詢解碼器
pCodec = avcodec_find_decoder(codecCtx->codec_id);
if (pCodec == NULL) {
NSLog(@"沒有找到解碼器");
}
//開啟解碼器
if (avcodec_open2(codecCtx, pCodec, NULL) < 0) {
NSLog(@"開啟解碼器失敗");
// goto initError;
}
//分配視屏幀
avframe = av_frame_alloc();
_outputWidth = codecCtx->width;
_outputHeight = codecCtx->height;
dispatch_async(dispatch_get_main_queue(), ^{
[self seekTime:0.0];
[NSTimer scheduledTimerWithTimeInterval: 1 / fps
target:self
selector:@selector(displayNextFrame:)
userInfo:nil
repeats:YES];
});
return YES;
return NO;
}
- (void)seekTime:(double)seconds {
AVRational timeBase = FormatCtx->streams[videoStram]->time_base;
int64_t targetFrame = (int64_t)((double)timeBase.den / timeBase.num *seconds);
avformat_seek_file(FormatCtx, videoStram, 0, targetFrame, targetFrame, AVSEEK_FLAG_FRAME);
avcodec_flush_buffers(codecCtx);
}
- (BOOL)stepFrame
{
int frameFinished = 0;
while (!frameFinished && av_read_frame(FormatCtx, &packet) >= 0) {
if (packet.stream_index == videoStram) {
avcodec_decode_video2(codecCtx, avframe, &frameFinished, &packet);
}
}
if (frameFinished == 0 && isReleaseResources == NO) {
[self releaseResources];
}
return frameFinished != 0;
}
- (void)replaceTheResources:(NSString *)moviePath {
if (!isReleaseResources) {
[self releaseResources];
}
self.cruutenPath = [moviePath copy];
[self initializeResources:[moviePath UTF8String]];
}
-(void)redialPaly
{
[self initializeResources:[self.cruutenPath UTF8String]];
}
#pragma mark 重寫屬性訪問方法
-(void)setOutputWidth:(int)newValue {
if (_outputWidth == newValue)return;
_outputWidth = newValue;
}
-(void)setOutputHeight:(int)newValue {
if (_outputHeight == newValue) return;
_outputHeight = newValue;
}
-(UIImage *)currentImage {
if (!avframe->data[0]) return nil;
return [self imageFromAVPicture];
return nil;
}
-(double)duration {
return (double)FormatCtx->duration / AV_TIME_BASE;
}
- (double)currentTime {
AVRational timeBase = FormatCtx->streams[videoStram]->time_base;
return packet.pts * (double)timeBase.num / timeBase.den;
}
- (int)sourceWidth {
return codecCtx->width;
}
- (int)sourceHeight {
return codecCtx->height;
}
- (double)fps {
return fps;
}
#pragma mark - 內部方法
- (UIImage *)imageFromAVPicture
{
avpicture_free(&picture);
avpicture_alloc(&picture, AV_PIX_FMT_RGB24, _outputWidth, _outputHeight);
struct SwsContext *imgConverCtx = sws_getContext(avframe->width,
avframe->height,
AV_PIX_FMT_YUV420P,
_outputWidth,
_outputHeight,
AV_PIX_FMT_RGB24,
SWS_FAST_BILINEAR,
NULL,
NULL,
NULL);
if(imgConverCtx == nil)return nil;
sws_scale(imgConverCtx,
avframe->data,
avframe->linesize,
0,
avframe->height,
picture.data,
picture.linesize);
sws_freeContext(imgConverCtx);
CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
CFDataRef data = CFDataCreate(kCFAllocatorDefault, picture.data[0], picture.linesize[0] * _outputHeight);
CGDataProviderRef provider = CGDataProviderCreateWithCFData(data);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGImageRef cgImage = CGImageCreate(_outputWidth, _outputHeight, 8, 24, picture.linesize[0], colorSpace, bitmapInfo, provider, NULL, NO, kCGRenderingIntentDefault);
UIImage *image = [UIImage imageWithCGImage:cgImage];
CGImageRelease(cgImage);
CGColorSpaceRelease(colorSpace);
CGDataProviderRelease(provider);
CFRelease(data);
return image;
}
#pragma mark --------------------------
#pragma mark - 釋放資源
- (void)releaseResources {
NSLog(@"釋放資源");
// SJLogFunc
isReleaseResources = YES;
// 釋放RGB
avpicture_free(&picture);
// 釋放frame
av_packet_unref(&packet);
// 釋放YUV frame
av_free(avframe);
// 關閉解碼器
if (codecCtx) avcodec_close(codecCtx);
// 關閉檔案
if (FormatCtx) avformat_close_input(&FormatCtx);
avformat_network_deinit();
}
這是檔案的詳細程式碼資源下載 程式碼檔案下載