1. 程式人生 > >iOS 人臉識別

iOS 人臉識別

1.在控制其中新增如下屬性

/** 輸入裝置 */
@property (nonatomic,strong) AVCaptureDevice *device;
/** 攝像頭輸入流 */
@property (nonatomic,strong) AVCaptureDeviceInput *deviceInput;
/** 攝像頭輸出流 */
@property (nonatomic,strong) AVCaptureVideoDataOutput *videoDataOutput;
/** 流 */
@property (nonatomic,strong) AVCaptureSession *session;
/** 輸出重新整理執行緒 */
@property (nonatomic,strong) dispatch_queue_t videoDataOutputQueue;
/**  */
@property (nonatomic,strong) AVCaptureVideoPreviewLayer *previewLayer;
/** 人臉識別 */
@property (nonatomic,strong) CIDetector *faceDetector;
2.懶載入
- (CIDetector *)faceDetector{
    if (_faceDetector == nil) {
        NSDictionary *faceDetectorOptions = [NSDictionary dictionaryWithObjectsAndKeys:CIDetectorAccuracyLow, CIDetectorAccuracy, nil];
        _faceDetector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil options:faceDetectorOptions];
    }
    return _faceDetector;
}

3.初始化所有屬性
/**
 * 初始化 人臉識別
 */
- (void)setupDetector {
    // 1.裝置方向
    self.devicePosition = AVCaptureDevicePositionFront;
    
    // 2.獲取設別
    for (AVCaptureDevice *device in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
        if (device.position == self.devicePosition) {
            self.device = device;
            break;
        }
    }
    
    //   處理設別不存在
    if (!self.device) {
        // 設別不存在
#warning TODO
        NSLog(@"設別不存在");
    }
    
    NSError *error = nil;
    
    // 3.輸入流
    self.deviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:self.device error:&error];
    //   處理輸入流異常
    if (error) {
#warning TODO
        NSLog(@"輸入流初始化異常");
    }

    // 4.處理輸出流執行緒
#define FYFVideoDataOutputQueue "VideoDataOutputQueue"
    self.videoDataOutputQueue = dispatch_queue_create(FYFVideoDataOutputQueue, DISPATCH_QUEUE_SERIAL);
    
    // 5.輸出流(從指定的視訊中採集資料)
    self.videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
    //   設定採集相片的畫素格式
    NSDictionary *rgbOutputSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCMPixelFormat_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
    [self.videoDataOutput setVideoSettings:rgbOutputSettings];
    //   處理輸出執行緒被阻塞時,丟棄掉沒有處理的畫面
    [self.videoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
    //
    [self.videoDataOutput setSampleBufferDelegate:self queue:self.videoDataOutputQueue];
    
    // 6.設定 session
    self.session = [[AVCaptureSession alloc] init];
    //   1>設定取樣質量
    [self.session setSessionPreset:AVCaptureSessionPresetHigh];
    //   2>新增輸入流
    if ([self.session canAddInput:self.deviceInput]) {
        [self.session addInput:self.deviceInput];
    } else {
        // 處理不能新增 input
    }
    //   3>新增輸出流
    if ([self.session canAddOutput:self.videoDataOutput]) {
        [self.session addOutput:self.videoDataOutput];
    } else {
        // 處理不能新增 output
    }
    
    // 7.相機層建立
    self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
    self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspect;
    [self.previewLayer.connection setVideoOrientation:AVCaptureVideoOrientationLandscapeRight];
    //self.previewLayer.frame = self.view.bounds;
    
    // 8.在 view 中新增 PreviewLayer
    CALayer *rootLayer = [self.view layer];
    [rootLayer setMasksToBounds:YES];
    self.previewLayer.frame = rootLayer.bounds;
    [rootLayer insertSublayer:self.previewLayer atIndex:0];
       
    // 9.開始
    [self.session startRunning];
}
4.實現代理,檢測人臉
#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
    // 獲取圖片
    // CVPixelBuffer(core video pixel buffer): 指的是主記憶體中的圖片快取,用來儲存圖片畫素資料。應用程式在產生圖片幀、解壓縮視訊資料或呼叫Core Image的時候可以呼叫此物件
    // 實時處理帶監測的圖片
    CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    // CMAttachmentBearer是一個基於CF的物件,支援鍵/值/模式 附件API。 任何CF物件都可以新增到CMAttachmentBearer物件,來儲存額外資訊
    CFDictionaryRef attachments = CMCopyDictionaryOfAttachments(kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate);
    CIImage *ciImage = [[CIImage alloc] initWithCVPixelBuffer:pixelBuffer options:(__bridge NSDictionary *)(attachments)];
    
    if (attachments) {
        CFRelease(attachments);
    }
    
    NSDictionary *imageOptionPortrait = [NSDictionary dictionaryWithObject:[self getImageOrientationByDeviceOrientation:UIDeviceOrientationPortrait] forKey:CIDetectorImageOrientation];
    NSDictionary *imageOptionPortraitUpsideDown = [NSDictionary dictionaryWithObject:[self getImageOrientationByDeviceOrientation:UIDeviceOrientationPortraitUpsideDown] forKey:CIDetectorImageOrientation];
    NSDictionary *imageOptionLandscapeLeft = [NSDictionary dictionaryWithObject:[self getImageOrientationByDeviceOrientation:UIDeviceOrientationLandscapeLeft] forKey:CIDetectorImageOrientation];
    NSDictionary *imageOptionLandscapeRight = [NSDictionary dictionaryWithObject:[self getImageOrientationByDeviceOrientation:UIDeviceOrientationLandscapeRight] forKey:CIDetectorImageOrientation];
    
    NSMutableArray *allFeatures = [NSMutableArray array];
    [allFeatures addObjectsFromArray:[self.faceDetector featuresInImage:ciImage options:imageOptionPortrait]];
    [allFeatures addObjectsFromArray:[self.faceDetector featuresInImage:ciImage options:imageOptionPortraitUpsideDown]];
    [allFeatures addObjectsFromArray:[self.faceDetector featuresInImage:ciImage options:imageOptionLandscapeLeft]];
    [allFeatures addObjectsFromArray:[self.faceDetector featuresInImage:ciImage options:imageOptionLandscapeRight]];
    
    if (allFeatures.count) {
        // 檢測到人臉

    } else {
        // 未檢測到人臉
    }
}

5.其他方法
/**
 *  根據裝置方向獲取圖片的方向
 */
- (NSNumber *)getImageOrientationByDeviceOrientation:(UIDeviceOrientation)deviceOrientation {
    int imageOrientation;
    enum {
        PHOTOS_EXIF_0ROW_TOP_0COL_LEFT			= 1, //   1  =  0th row is at the top, and 0th column is on the left (THE DEFAULT).
        PHOTOS_EXIF_0ROW_TOP_0COL_RIGHT			= 2, //   2  =  0th row is at the top, and 0th column is on the right.
        PHOTOS_EXIF_0ROW_BOTTOM_0COL_RIGHT      = 3, //   3  =  0th row is at the bottom, and 0th column is on the right.
        PHOTOS_EXIF_0ROW_BOTTOM_0COL_LEFT       = 4, //   4  =  0th row is at the bottom, and 0th column is on the left.
        PHOTOS_EXIF_0ROW_LEFT_0COL_TOP          = 5, //   5  =  0th row is on the left, and 0th column is the top.
        PHOTOS_EXIF_0ROW_RIGHT_0COL_TOP         = 6, //   6  =  0th row is on the right, and 0th column is the top.
        PHOTOS_EXIF_0ROW_RIGHT_0COL_BOTTOM      = 7, //   7  =  0th row is on the right, and 0th column is the bottom.
        PHOTOS_EXIF_0ROW_LEFT_0COL_BOTTOM       = 8  //   8  =  0th row is on the left, and 0th column is the bottom.
    };
    
    switch (deviceOrientation) {
        case UIDeviceOrientationPortraitUpsideDown:  // Device oriented vertically, home button on the top
            imageOrientation = PHOTOS_EXIF_0ROW_LEFT_0COL_BOTTOM;
            break;
        case UIDeviceOrientationLandscapeLeft:       // Device oriented horizontally, home button on the right
            //			if (self.isUsingFrontFacingCamera)
            imageOrientation =  PHOTOS_EXIF_0ROW_BOTTOM_0COL_RIGHT;
            //			else
            //				exifOrientation = PHOTOS_EXIF_0ROW_TOP_0COL_LEFT;
            break;
        case UIDeviceOrientationLandscapeRight:      // Device oriented horizontally, home button on the left
            //			if (self.isUsingFrontFacingCamera)
            imageOrientation = PHOTOS_EXIF_0ROW_TOP_0COL_LEFT;
            //			else
            //				exifOrientation = PHOTOS_EXIF_0ROW_BOTTOM_0COL_RIGHT;
            break;
        case UIDeviceOrientationPortrait:            // Device oriented vertically, home button on the bottom
        default:
            imageOrientation = PHOTOS_EXIF_0ROW_RIGHT_0COL_TOP;
            break;
    }
    return [NSNumber numberWithInt:imageOrientation];
}

6.重置
// clean up capture setup
- (void)teardownAVCapture {
    for(AVCaptureInput *input in self.session.inputs){
        [self.session removeInput:input];
    }
    for(AVCaptureOutput *output in self.session.outputs){
        [self.session removeOutput:output];
    }
    [self.session stopRunning];
    self.videoDataOutput = nil;
    self.videoDataOutputQueue = nil;
    self.device = nil;
    [self.previewLayer removeFromSuperlayer];
    self.previewLayer = nil;
}