1. 程式人生 > >獲取圖片中指定區域圖片

獲取圖片中指定區域圖片

最近在搞直接一個類似於二維碼的東西,同樣也是需要獲取其中某個區域的圖片。直接上最為主要的一些程式碼吧。

下面這個是初始化AV部分,這樣就可以將影象在view上面展示了。這裡簡單的闡述一下在這其中碰到的問題和解決方法。

1.如果在layer上面搞出一個“洞 ”,就是真正的裁剪區域,在這裡用的是CAShapeLayer,利用fillMode,這樣就可以通過mask方式作用在將覆蓋在perviewLayer上面的coverLayer了。

2. 我們可以很容易的拿到整個的image,就可以在delegate中的sampleBuffer中拿到了。這裡我使用的是AVCaptureVideoDataOutput,這樣就可以不斷的獲取到取樣的流了。

3. 從整個image中拿到裁剪區域中的圖片。在這個問題上面花了不少時間和心思始終不能正確的拿到裁剪區域的影象。先是用了CGImageFromImage ,一個是才出來的圖片位置和大小不對。之後轉用cgcontext的方式。但是還是不太對。不斷的使用google啊,怎麼搞呢,琢磨啊。因為剛開始layer的呈現方式是fill的,這樣實際的圖片大小並不是和螢幕的大小是一樣的。思前想後,可以確定是這個問題了,然後開始吧。針對不同的videoGravity的方式計算出裁剪區域實際在圖片中物件的位置和大小,於是就有了一個calcRect的方法,這個方法就是將之前在螢幕上挖出來的“洞”對應到圖片中的位置去。

總算是搞出來了。有興趣的看看吧。

//
//  ScanView.m
//  xxoo
//
//  Created by Tommy on 13-11-6.
//  Copyright (c) 2013年 Tommy. All rights reserved.
//

#import "ScanView.h"
#import <AVFoundation/AVFoundation.h>


static inline double radians (double degrees) {return degrees * M_PI/180;}

@interface ScanView()<AVCaptureVideoDataOutputSampleBufferDelegate>

@property AVCaptureVideoPreviewLayer* previewLayer;
@property AVCaptureSession* session;
@property AVCaptureDevice* videoDevice;
@property dispatch_queue_t camera_sample_queue;
@property CALayer* coverLayer;
@property CAShapeLayer* cropLayer;
@property CALayer* stillImageLayer;
@property  AVCaptureStillImageOutput* stillImageOutput;

@property UIImageView* stillImageView;
@property UIImage* cropImage;

@property BOOL hasSetFocus;



@end

@implementation ScanView

- (id)initWithFrame:(CGRect)frame
{
    self = [super initWithFrame:frame];
    if (self) {
        // Initialization code
        self.hasSetFocus = NO;
        [self initAVCaptuer];
        [self initOtherLayers];
    }
    return self;
}

/*
// Only override drawRect: if you perform custom drawing.
// An empty implementation adversely affects performance during animation.
- (void)drawRect:(CGRect)rect
{
    // Drawing code
}
*/
-(void)layoutSubviews
{
    [self.previewLayer setFrame:self.bounds];
    [self.coverLayer setFrame:self.bounds];
    self.coverLayer.mask = self.cropLayer;
}

- (void) initAVCaptuer{
    
    self.cropRect = CGRectZero;
    
    self.videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    AVCaptureDeviceInput* input = [[AVCaptureDeviceInput alloc]initWithDevice:self.videoDevice error:nil];
    
    AVCaptureVideoDataOutput* output = [[AVCaptureVideoDataOutput alloc]init];
    output.alwaysDiscardsLateVideoFrames = YES;
    self.camera_sample_queue = dispatch_queue_create ("com.scan.video.sample_queue", DISPATCH_QUEUE_SERIAL);
    [output setSampleBufferDelegate:self queue:self.camera_sample_queue];
    
    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
    NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
    NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
    [output setVideoSettings:videoSettings];
    
    
    self.stillImageOutput = [[AVCaptureStillImageOutput alloc]init];
    NSDictionary* outputSettings = @{AVVideoCodecKey:AVVideoCodecJPEG};
    [self.stillImageOutput setOutputSettings:outputSettings];
    
    self.session = [[AVCaptureSession alloc]init];
    self.session.sessionPreset = AVCaptureSessionPresetMedium;
    
    if ([self.session canAddInput:input])
    {
        [self.session addInput:input];
        
        if ([self.session canAddOutput:output])
        {
            [self.session addOutput:self.stillImageOutput];
            [self.session addOutput:output];
            
            self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
            self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspect;
          
            [self.layer addSublayer: self.previewLayer];
            
            return; // success
        }
    }
    
    self.session = nil;
}

- (void)setCropRect:(CGRect)cropRect
{
    _cropRect = cropRect;
    if(!CGRectEqualToRect(CGRectZero, self.cropRect)){

        self.cropLayer = [[CAShapeLayer alloc] init];
        CGMutablePathRef path = CGPathCreateMutable();
        
        CGPathAddRect(path, nil, self.cropRect);
        CGPathAddRect(path, nil, self.bounds);
        
        [self.cropLayer setFillRule:kCAFillRuleEvenOdd];
        [self.cropLayer setPath:path];
        [self.cropLayer setFillColor:[[UIColor whiteColor] CGColor]];
        
        [self.cropLayer setNeedsDisplay];
        
        //[self setVideoFocus];
        
    }
    
    [self.stillImageLayer setFrame:CGRectMake(100, 450, CGRectGetWidth(cropRect), CGRectGetHeight(cropRect))];
}

- (void) setVideoFocus{
    
    NSError *error;
    CGPoint foucsPoint = CGPointMake(CGRectGetMidX(self.cropRect), CGRectGetMidY(self.cropRect));
    if([self.videoDevice isFocusPointOfInterestSupported]
       &&[self.videoDevice lockForConfiguration:&error] &&!self.hasSetFocus){
        self.hasSetFocus = YES;
        [self.videoDevice setFocusPointOfInterest:[self convertToPointOfInterestFromViewCoordinates:foucsPoint]];
        [self.videoDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
        [self.videoDevice unlockForConfiguration];
    }
//    [self.videoDevice setFocusMode:AVCaptureFocusModeAutoFocus];
    NSLog(@"error:%@",error);
    
}


- (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates
{
    CGPoint pointOfInterest = CGPointMake(.5f, .5f);
    CGSize frameSize = self.frame.size;
    
    AVCaptureVideoPreviewLayer *videoPreviewLayer = self.previewLayer;
    
    if ([self.previewLayer isMirrored]) {
        viewCoordinates.x = frameSize.width - viewCoordinates.x;
    }
    
    if ( [[videoPreviewLayer videoGravity] isEqualToString:AVLayerVideoGravityResize] ) {
        pointOfInterest = CGPointMake(viewCoordinates.y / frameSize.height, 1.f - (viewCoordinates.x / frameSize.width));
    } else {
        CGRect cleanAperture;
        for (AVCaptureInputPort *port in [[[[self session] inputs] lastObject] ports]) {
            if ([port mediaType] == AVMediaTypeVideo) {
                cleanAperture = CMVideoFormatDescriptionGetCleanAperture([port formatDescription], YES);
                CGSize apertureSize = cleanAperture.size;
                CGPoint point = viewCoordinates;
                
                CGFloat apertureRatio = apertureSize.height / apertureSize.width;
                CGFloat viewRatio = frameSize.width / frameSize.height;
                CGFloat xc = .5f;
                CGFloat yc = .5f;
                
                if ( [[videoPreviewLayer videoGravity] isEqualToString:AVLayerVideoGravityResizeAspect] ) {
                    if (viewRatio > apertureRatio) {
                        CGFloat y2 = frameSize.height;
                        CGFloat x2 = frameSize.height * apertureRatio;
                        CGFloat x1 = frameSize.width;
                        CGFloat blackBar = (x1 - x2) / 2;
                        if (point.x >= blackBar && point.x <= blackBar + x2) {
                            xc = point.y / y2;
                            yc = 1.f - ((point.x - blackBar) / x2);
                        }
                    } else {
                        CGFloat y2 = frameSize.width / apertureRatio;
                        CGFloat y1 = frameSize.height;
                        CGFloat x2 = frameSize.width;
                        CGFloat blackBar = (y1 - y2) / 2;
                        if (point.y >= blackBar && point.y <= blackBar + y2) {
                            xc = ((point.y - blackBar) / y2);
                            yc = 1.f - (point.x / x2);
                        }
                    }
                } else if ([[videoPreviewLayer videoGravity] isEqualToString:AVLayerVideoGravityResizeAspectFill]) {
                    if (viewRatio > apertureRatio) {
                        CGFloat y2 = apertureSize.width * (frameSize.width / apertureSize.height);
                        xc = (point.y + ((y2 - frameSize.height) / 2.f)) / y2;
                        yc = (frameSize.width - point.x) / frameSize.width;
                    } else {
                        CGFloat x2 = apertureSize.height * (frameSize.height / apertureSize.width);
                        yc = 1.f - ((point.x + ((x2 - frameSize.width) / 2)) / x2);
                        xc = point.y / frameSize.height;
                    }
                    
                }
                
                pointOfInterest = CGPointMake(xc, yc);
                break;
            }
        }
    }
    
    return pointOfInterest;
}

- (void) initOtherLayers{
    self.coverLayer = [CALayer layer];
    
    self.coverLayer.backgroundColor = [[[UIColor blackColor] colorWithAlphaComponent:0.6] CGColor];
    [self.layer addSublayer:self.coverLayer];
    
    if(!CGRectEqualToRect(CGRectZero, self.cropRect)){
    
        self.cropLayer = [[CAShapeLayer alloc] init];
        CGMutablePathRef path = CGPathCreateMutable();
        
        CGPathAddRect(path, nil, self.cropRect);
        CGPathAddRect(path, nil, self.bounds);
        
        [self.cropLayer setFillRule:kCAFillRuleEvenOdd];
        [self.cropLayer setPath:path];
        [self.cropLayer setFillColor:[[UIColor redColor] CGColor]];
    }
    
    self.stillImageLayer = [CALayer layer];
    self.stillImageLayer.backgroundColor = [[UIColor yellowColor] CGColor];
    self.stillImageLayer.contentsGravity = kCAGravityResizeAspect;
    [self.coverLayer addSublayer:self.stillImageLayer];
    
    
    self.stillImageView = [[UIImageView alloc]initWithFrame:CGRectMake(0,300, 100, 100)];
    self.stillImageView.backgroundColor = [UIColor redColor];
    self.stillImageView.contentMode = UIViewContentModeScaleAspectFit;
    [self addSubview:self.stillImageView];
    
    
    self.previewLayer.contentsGravity = kCAGravityResizeAspect;
    
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
    
    
    [self setVideoFocus];
    
    UIImage *image = [self imageFromSampleBuffer:sampleBuffer];
    self.cropImage = [self cropImageInRect:image];
    
    dispatch_async(dispatch_get_main_queue(), ^{
        
       [self.stillImageView setImage:image];
      // [self.stillImageLayer setContents:(id)[self.cropImage CGImage]];
    });
    
}
// 通過抽樣快取資料建立一個UIImage物件
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
    // 為媒體資料設定一個CMSampleBuffer的Core Video影象快取物件
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    // 鎖定pixel buffer的基地址
    CVPixelBufferLockBaseAddress(imageBuffer, 0);
    
    // 得到pixel buffer的基地址
    void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
    
    // 得到pixel buffer的行位元組數
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    // 得到pixel buffer的寬和高
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
    
    //NSLog(@"%zu,%zu",width,height);
    
    // 建立一個依賴於裝置的RGB顏色空間
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    
    // 用抽樣快取的資料建立一個位圖格式的圖形上下文(graphics context)物件
    CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
                                                 bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    

    // 根據這個點陣圖context中的畫素資料建立一個Quartz image物件
    CGImageRef quartzImage = CGBitmapContextCreateImage(context);
    // 解鎖pixel buffer
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);
    
    // 釋放context和顏色空間
    CGContextRelease(context);
    CGColorSpaceRelease(colorSpace);
    
//    cgimageget`
    
    // 用Quartz image建立一個UIImage物件image
    //UIImage *image = [UIImage imageWithCGImage:quartzImage];
    UIImage *image = [UIImage imageWithCGImage:quartzImage scale:1.0f orientation:UIImageOrientationRight];
    
    // 釋放Quartz image物件
    CGImageRelease(quartzImage);
    
    return (image);
    
    
}



- (CGRect) calcRect:(CGSize)imageSize{
    NSString* gravity = self.previewLayer.videoGravity;
    CGRect cropRect = self.cropRect;
    CGSize screenSize = self.previewLayer.bounds.size;
    
    CGFloat screenRatio = screenSize.height / screenSize.width ;
    CGFloat imageRatio = imageSize.height /imageSize.width;
    
    CGRect presentImageRect = self.previewLayer.bounds;
    CGFloat scale = 1.0;
    
    
    if([AVLayerVideoGravityResizeAspect isEqual: gravity]){
        
        CGFloat presentImageWidth = imageSize.width;
        CGFloat presentImageHeigth = imageSize.height;
        if(screenRatio > imageRatio){
            presentImageWidth = screenSize.width;
            presentImageHeigth = presentImageWidth * imageRatio;
            
        }else{
            presentImageHeigth = screenSize.height;
            presentImageWidth = presentImageHeigth / imageRatio;
        }
        
        presentImageRect.size = CGSizeMake(presentImageWidth, presentImageHeigth);
        presentImageRect.origin = CGPointMake((screenSize.width-presentImageWidth)/2.0, (screenSize.height-presentImageHeigth)/2.0);
    
    }else if([AVLayerVideoGravityResizeAspectFill isEqual:gravity]){
        
        CGFloat presentImageWidth = imageSize.width;
        CGFloat presentImageHeigth = imageSize.height;
        if(screenRatio > imageRatio){
            presentImageHeigth = screenSize.height;
            presentImageWidth = presentImageHeigth / imageRatio;
        }else{
            presentImageWidth = screenSize.width;
            presentImageHeigth = presentImageWidth * imageRatio;
        }
        
        presentImageRect.size = CGSizeMake(presentImageWidth, presentImageHeigth);
        presentImageRect.origin = CGPointMake((screenSize.width-presentImageWidth)/2.0, (screenSize.height-presentImageHeigth)/2.0);
        
    }else{
        NSAssert(0, @"dont support:%@",gravity);
    }
    
    scale = CGRectGetWidth(presentImageRect) / imageSize.width;
    
    CGRect rect = cropRect;
    rect.origin = CGPointMake(CGRectGetMinX(cropRect)-CGRectGetMinX(presentImageRect), CGRectGetMinY(cropRect)-CGRectGetMinY(presentImageRect));
    
    rect.origin.x /= scale;
    rect.origin.y /= scale;
    rect.size.width /= scale;
    rect.size.height  /= scale;
    
    return rect;
}

#define SUBSET_SIZE 360

- (UIImage*) cropImageInRect:(UIImage*)image{

    CGSize size = [image size];
    CGRect cropRect = [self calcRect:size];

    float scale = fminf(1.0f, fmaxf(SUBSET_SIZE / cropRect.size.width, SUBSET_SIZE / cropRect.size.height));
    CGPoint offset = CGPointMake(-cropRect.origin.x, -cropRect.origin.y);
    
    size_t subsetWidth = cropRect.size.width * scale;
    size_t subsetHeight = cropRect.size.height * scale;
    
    
    CGColorSpaceRef grayColorSpace = CGColorSpaceCreateDeviceGray();
    
    CGContextRef ctx =
    CGBitmapContextCreate(nil,
                          subsetWidth,
                          subsetHeight,
                          8,
                          0,
                          grayColorSpace,
                          kCGImageAlphaNone|kCGBitmapByteOrderDefault);
    CGColorSpaceRelease(grayColorSpace);
    CGContextSetInterpolationQuality(ctx, kCGInterpolationNone);
    CGContextSetAllowsAntialiasing(ctx, false);

    // adjust the coordinate system
    CGContextTranslateCTM(ctx, 0.0, subsetHeight);
    CGContextScaleCTM(ctx, 1.0, -1.0);
    
    
    UIGraphicsPushContext(ctx);
    CGRect rect = CGRectMake(offset.x * scale, offset.y * scale, scale * size.width, scale * size.height);

    [image drawInRect:rect];
    
    UIGraphicsPopContext();
    
    CGContextFlush(ctx);
    
    
    CGImageRef subsetImageRef = CGBitmapContextCreateImage(ctx);
    
    UIImage* subsetImage = [UIImage imageWithCGImage:subsetImageRef];

    CGImageRelease(subsetImageRef);
    
    CGContextRelease(ctx);

    
    return subsetImage;
}  



- (void) start{
    
    dispatch_sync (self.camera_sample_queue, ^{
        [self.session startRunning]; });
    
}
- (void) stop{
    if(self.session){
        [self.session stopRunning];
    }
    
}


@end