iOS AVCaptureSession實現獲取攝像頭影象,並識別圖片中身份證號碼
自定義照相機通過裝置攝像頭實時獲取身份證圖片資訊,然後識別圖片中身份證號碼,採用ocr識別數字
宣告
import
import
import
import “MBProgressHUD.h”
//#import “Tesseract.h”
@protocol PassImage
-(void)PassImagedata:(id)_data;
@end
@interface DZC_Carmer_photo : UIViewController
實現
import “DZC_Carmer_photo.h”
import “UIImage+DZC_UImageScral.h”
import “DzcDES.h”
import “DZC_CustomLine.h”
//#import “Tesseract.h”
@interface DZC_Carmer_photo ()
{
UIButton *btn;
UIImageView *imgView;
}
@end
@implementation DZC_Carmer_photo
-(instancetype)init
{
self=[super init];
if (self)
{
[self initialSession];
return self;
}
return self;
}
-(void)viewWillDisappear:(BOOL)animated
{
[super viewWillDisappear:animated];
[self.operationQueue cancelAllOperations];
}
- (void)viewDidLoad {
[super viewDidLoad];
self.operationQueue = [[NSOperationQueue alloc] init];
// UIButton *carmerBtn=[[UIButton alloc]initWithFrame:CGRectMake(SCREENWIDTH/2-50, SCREENHEIGHT-100, 100, 50)];
// //[carmerBtn setTransform:CGAffineTransformMakeRotation(M_PI/2)];
// [carmerBtn setTitle:@”拍照” forState:UIControlStateNormal];
// carmerBtn.backgroundColor=[UIColor orangeColor];
// [carmerBtn addTarget:self action:@selector(shutterCamera) forControlEvents:UIControlEventTouchUpInside];
// self.shutterButton=carmerBtn;
// [self.view addSubview:carmerBtn];
// [self.view bringSubviewToFront:carmerBtn];
UIButton *returnBtn=[[UIButton alloc]initWithFrame:CGRectMake(10, SCREENHEIGHT-100, 100, 50)];
//[returnBtn setTransform:CGAffineTransformMakeRotation(M_PI/2)];
[returnBtn setTitle:@”撤銷” forState:UIControlStateNormal];
returnBtn.backgroundColor=[UIColor orangeColor];
[returnBtn addTarget:self action:@selector(CloseBtn) forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:returnBtn];
[self.view bringSubviewToFront:returnBtn];
UILabel *titleLable=[[UILabel alloc]initWithFrame:CGRectMake(SCREENWIDTH-110, SCREENHEIGHT-120, 100, 140)];
//[titleLable setTransform:CGAffineTransformMakeRotation(M_PI/2)];
[titleLable setFont:[UIFont systemFontOfSize:10]];
[titleLable setText:@”友情提醒:掃描身份證時請將身份證號碼放置於矩形框內”];
titleLable.numberOfLines=0;
[titleLable setTextColor:[UIColor whiteColor]];
[self.view addSubview:titleLable];
MB_HUD=[[MBProgressHUD alloc]init];
[self.view addSubview:MB_HUD];
iamgeview=[[UIImageView alloc]initWithFrame:CGRectMake(0, 0, 300, 60)];
iamgeview.center=CGPointMake(SCREENWIDTH/2, SCREENHEIGHT/2);
iamgeview.layer.borderWidth=2.0;
iamgeview.layer.borderColor=[UIColor orangeColor].CGColor;
[self.view addSubview:iamgeview];
[self.view bringSubviewToFront:iamgeview];
/*
imgView=[[UIImageView alloc]initWithFrame:CGRectMake(0, 20, SCREENWIDTH, 50)];
imgView.backgroundColor=[UIColor orangeColor];
[self.view addSubview:imgView];
[self.view bringSubviewToFront:imgView];
*/
// Do any additional setup after loading the view.
}
-(void)CloseBtn
{
[self dismissViewControllerAnimated:YES completion:nil];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (void) initialSession
{
//這個方法的執行我放在init方法裡了
self.session = [[AVCaptureSession alloc] init];
[self.session setSessionPreset:AVCaptureSessionPresetMedium];
self.videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self backCamera] error:nil];
//[self fronCamera]方法會返回一個AVCaptureDevice物件,因為我初始化時是採用前攝像頭,所以這麼寫,具體的實現方法後面會介紹
/*
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary * outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG,AVVideoCodecKey, nil];
//這是輸出流的設定引數AVVideoCodecJPEG引數表示以JPEG的圖片格式輸出圖片
[self.stillImageOutput setOutputSettings:outputSettings];
*/
self.VideoDataOutput=[[AVCaptureVideoDataOutput alloc]init];
if ([self.session canAddInput:self.videoInput]) {
[self.session addInput:self.videoInput];
}
/*
if ([self.session canAddOutput:self.stillImageOutput]) {
[self.session addOutput:self.stillImageOutput];
}
*/
if ([self.session canAddOutput:self.VideoDataOutput])
{
[self.session addOutput:self.VideoDataOutput];
dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
[self.VideoDataOutput setSampleBufferDelegate:self queue:queue];
//dispatch_release(queue);
// Specify the pixel format
self.VideoDataOutput.videoSettings =
[NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
}
}
- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition) position {
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if ([device position] == position) {
_cameraAvaible=YES;
return device;
}
}
_cameraAvaible=NO;
return nil;
}
(AVCaptureDevice *)frontCamera {
return [self cameraWithPosition:AVCaptureDevicePositionFront];
}(AVCaptureDevice *)backCamera {
return [self cameraWithPosition:AVCaptureDevicePositionBack];
}
//接下來在viewWillAppear方法裡執行載入預覽圖層的方法(void) setUpCameraLayer
{
if (_cameraAvaible == NO) return;if (self.previewLayer == nil) {
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session]; self.previewLayer.videoGravity=AVLayerVideoGravityResizeAspect; self.previewLayer.frame=CGRectMake(0,0, SCREENWIDTH, SCREENHEIGHT); self.previewLayer.position=CGPointMake(SCREENWIDTH/2, SCREENHEIGHT/2); [self.view.layer insertSublayer:self.previewLayer atIndex:0]; /* self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session]; self.cameraShowView=[[UIView alloc]initWithFrame:CGRectMake(0, 20, SCREENWIDTH, 300)]; UIView * view = self.cameraShowView; CALayer * viewLayer = [view layer]; [viewLayer setMasksToBounds:YES]; CGRect bounds = [view bounds]; [self.previewLayer setFrame:bounds]; [self.previewLayer setVideoGravity:AVLayerVideoGravityResizeAspect]; [viewLayer insertSublayer:self.previewLayer below:[[viewLayer sublayers] objectAtIndex:0]]; */
}
}
-(void)viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
[self setUpCameraLayer];
// DZC_CustomLine *line=[[DZC_CustomLine alloc]init];
// line.backgroundColor=[UIColor clearColor];
// line.frame=self.previewLayer.frame;
// [self.view addSubview:line];
// [line setNeedsDisplay];
//[self turnOnLed];//開啟閃光燈
}
//在viewDidAppear和viewDidDisappear方法中啟動和關閉session(void) viewDidAppear:(BOOL)animated
{
[super viewDidAppear:animated];
if (self.session) {
[self.session startRunning];
}
}(void) viewDidDisappear:(BOOL)animated
{
[super viewDidDisappear: animated];
if (self.session) {
[self.session stopRunning];
}
[self.previewLayer removeFromSuperlayer];
self.previewLayer =nil;
self.session=nil;
//[self turnOffLed];//關閉閃光燈
}
//這是切換鏡頭的按鈕方法(void)toggleCamera {
NSUInteger cameraCount = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count];
if (cameraCount > 1) {
NSError *error;
AVCaptureDeviceInput *newVideoInput;
AVCaptureDevicePosition position = [[_videoInput device] position];if (position == AVCaptureDevicePositionBack) newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self frontCamera] error:&error]; else if (position == AVCaptureDevicePositionFront) newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self backCamera] error:&error]; else return; if (newVideoInput != nil) { [self.session beginConfiguration]; [self.session removeInput:self.videoInput]; if ([self.session canAddInput:newVideoInput]) { [self.session addInput:newVideoInput]; [self setVideoInput:newVideoInput]; } else { [self.session addInput:self.videoInput]; } [self.session commitConfiguration]; } else if (error) { NSLog(@"toggle carema failed, error = %@", error); }
}
DLog(@”切換攝像頭”);
}
// 這是拍照按鈕的方法(void) shutterCamera
{
[MB_HUD show:YES];self.shutterButton.userInteractionEnabled=NO;
AVCaptureConnection * videoConnection = [self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
if (!videoConnection) {[MB_HUD setDetailsLabelText:@"take photo failed!"]; [MB_HUD hide:YES afterDelay:1.0]; return;
}
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer == NULL) {
[MB_HUD setDetailsLabelText:@”take photo null!”];
[MB_HUD hide:YES afterDelay:1.0];
return;
}NSData * imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; UIImage * image = [UIImage imageWithData:imageData]; DLog(@"image size = %@",NSStringFromCGSize(image.size));
UIImage * imageOne=[self fixOrientation:image];
UIImage *imageTwo=[self image:imageOne rotation:UIImageOrientationLeft];
UIImage *imageThree=[self scaleFromImage:imageTwo toSize:CGSizeMake(308, 400)];
DLog(@”image size = %@”,NSStringFromCGSize(imageThree.size));
[self.delegate PassImagedata:imageThree];
imageThree=nil;
[MB_HUD hide:YES];
[self dismissViewControllerAnimated:YES completion:nil];
//[self turnOffLed];
}];
}
-(void)captureOutput:(AVCaptureOutput )captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection )connection
{
DLog(@”有圖片”);
[NSThread sleepForTimeInterval:1.5];
[self showImageView:sampleBuffer];
}
-(void)showImageView:(CMSampleBufferRef)_sampleBuffer
{
UIImage *image=[self imageFromSampleBuffer:_sampleBuffer];
if (image)
{
UIImage *one=[self fixOrientation:image];
NSData *data=UIImageJPEGRepresentation(one, 1.0);
one=[UIImage imageWithData:data];
UIImage * img=[one getSubImage:CGRectMake(SCREENWIDTH/2-150, iamgeview.frame.origin.y-60, iamgeview.frame.size.width, iamgeview.frame.size.height)];
img=[self grayscale:img type:3];
/*
UIImage *imag=[UIImage imageNamed:@"image_sample.jpg"];
imag=[self grayscale:imag type:3];
*/
[self recognizeImageWithTesseract:img];
/*
Tesseract *tesseract= [[Tesseract alloc] initWithDataPath:@"tessdata" language:@"eng"];
[tesseract setVariableValue:@"0123456789" forKey:@"tessedit_char_whitelist"];
[tesseract setImage:img];
[tesseract recognize];
NSString *str=[tesseract recognizedText];
[tesseract clear];
tesseract=nil;
str=[DzcDES trim:str];
NSRange range=[str rangeOfString:@" "];
if (range.location !=NSNotFound)
{
NSArray *array=[str componentsSeparatedByString:@" "];
for(NSString *ss in array)
{
DLog(@"---%@",ss);
NSString * s=[DzcDES trim:ss];
if ([DzcDES validateIdentityCard:s])
{
DLog(@"字串為身份證號碼");
dispatch_async(dispatch_get_main_queue(), ^{
[MB_HUD show:YES];
[MB_HUD setDetailsLabelText:s];
[self.delegate PassImagedata:s];
[self dismissViewControllerAnimated:YES completion:nil];
});
}
}
}
else
{
str=[DzcDES trim:str];
if ([DzcDES validateIdentityCard:str])
{
dispatch_async(dispatch_get_main_queue(), ^{
[MB_HUD show:YES];
[MB_HUD setDetailsLabelText:str];
[self.delegate PassImagedata:str];
[self dismissViewControllerAnimated:YES completion:nil];
});
}
}
[[NSThread currentThread]cancel];
*/
}
return;
}
//修正圖片方向
- (UIImage )fixOrientation:(UIImage )aImage
{
// No-op if the orientation is already correct
if (aImage.imageOrientation == UIImageOrientationUp)
return aImage;
// We need to calculate the proper transformation to make the image upright.
// We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored.
CGAffineTransform transform = CGAffineTransformIdentity;
switch (aImage.imageOrientation) {
case UIImageOrientationDown:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, aImage.size.width, aImage.size.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
transform = CGAffineTransformTranslate(transform, aImage.size.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
break;
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, 0, aImage.size.height);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
default:
break;
}
switch (aImage.imageOrientation) {
case UIImageOrientationUpMirrored:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, aImage.size.width, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationLeftMirrored:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, aImage.size.height, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
default:
break;
}
// Now we draw the underlying CGImage into a new context, applying the transform
// calculated above.
CGContextRef ctx = CGBitmapContextCreate(NULL, aImage.size.width, aImage.size.height,
CGImageGetBitsPerComponent(aImage.CGImage), 0,
CGImageGetColorSpace(aImage.CGImage),
CGImageGetBitmapInfo(aImage.CGImage));
CGContextConcatCTM(ctx, transform);
switch (aImage.imageOrientation) {
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
// Grr...
CGContextDrawImage(ctx, CGRectMake(0,0,aImage.size.height,aImage.size.width), aImage.CGImage);
break;
default:
CGContextDrawImage(ctx, CGRectMake(0,0,aImage.size.width,aImage.size.height), aImage.CGImage);
break;
}
// And now we just create a new UIImage from the drawing context
CGImageRef cgimg = CGBitmapContextCreateImage(ctx);
UIImage *img = [UIImage imageWithCGImage:cgimg];
CGContextRelease(ctx);
CGImageRelease(cgimg);
return img;
}
pragma mark - Navigation
/*
// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
// Get the new view controller using [segue destinationViewController].
// Pass the selected object to the new view controller.
}
*/
- (UIImage )image:(UIImage )image rotation:(UIImageOrientation)orientation
{
long double rotate = 0.0;
CGRect rect;
float translateX = 0;
float translateY = 0;
float scaleX = 1.0;
float scaleY = 1.0;
switch (orientation) {
case UIImageOrientationLeft:
rotate = M_PI_2;
rect = CGRectMake(0, 0, image.size.height, image.size.width);
translateX = 0;
translateY = -rect.size.width;
scaleY = rect.size.width/rect.size.height;
scaleX = rect.size.height/rect.size.width;
break;
case UIImageOrientationRight:
rotate = 3 * M_PI_2;
rect = CGRectMake(0, 0, image.size.height, image.size.width);
translateX = -rect.size.height;
translateY = 0;
scaleY = rect.size.width/rect.size.height;
scaleX = rect.size.height/rect.size.width;
break;
case UIImageOrientationDown:
rotate = M_PI;
rect = CGRectMake(0, 0, image.size.width, image.size.height);
translateX = -rect.size.width;
translateY = -rect.size.height;
break;
default:
rotate = 0.0;
rect = CGRectMake(0, 0, image.size.width, image.size.height);
translateX = 0;
translateY = 0;
break;
}
UIGraphicsBeginImageContext(rect.size);
CGContextRef context = UIGraphicsGetCurrentContext();
//做CTM變換
CGContextTranslateCTM(context, 0.0, rect.size.height);
CGContextScaleCTM(context, 1.0, -1.0);
CGContextRotateCTM(context, rotate);
CGContextTranslateCTM(context, translateX, translateY);
CGContextScaleCTM(context, scaleX, scaleY);
//繪製圖片
CGContextDrawImage(context, CGRectMake(0, 0, rect.size.width, rect.size.height), image.CGImage);
UIImage *newPic = UIGraphicsGetImageFromCurrentImageContext();
return newPic;
}
// 改變影象的尺寸,方便上傳伺服器
- (UIImage ) scaleFromImage: (UIImage ) image toSize: (CGSize) size
{
UIGraphicsBeginImageContext(size);
[image drawInRect:CGRectMake(0, 0, size.width, size.height)];
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsPopContext();
UIGraphicsEndImageContext();
return newImage;
}
-(void)turnOffLed {
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device hasTorch]) {
[device lockForConfiguration:nil];
[device setTorchMode: AVCaptureTorchModeOff];
[device unlockForConfiguration];
}
}
-(void)turnOnLed {
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device hasTorch]) {
[device lockForConfiguration:nil];
[device setTorchMode: AVCaptureTorchModeOn];
[device unlockForConfiguration];
}
}
// 通過抽樣快取資料建立一個UIImage物件
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
// Get a CMSampleBuffer’s Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// Get the number of bytes per row for the pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
// Get the number of bytes per row for the pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// Free up the context and color space
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
// Create an image object from the Quartz image
//UIImage *image = [UIImage imageWithCGImage:quartzImage];
UIImage *image = [UIImage imageWithCGImage:quartzImage scale:1.0f orientation:UIImageOrientationRight];
// Release the Quartz image
CGImageRelease(quartzImage);
return image;
}
//將圖片改成黑白圖片
- (UIImage*)grayscale:(UIImage*)anImage type:(int)type {
CGImageRef imageRef = anImage.CGImage;
size_t width = CGImageGetWidth(imageRef);
size_t height = CGImageGetHeight(imageRef);
size_t bitsPerComponent = CGImageGetBitsPerComponent(imageRef);
size_t bitsPerPixel = CGImageGetBitsPerPixel(imageRef);
size_t bytesPerRow = CGImageGetBytesPerRow(imageRef);
CGColorSpaceRef colorSpace = CGImageGetColorSpace(imageRef);
CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef);
bool shouldInterpolate = CGImageGetShouldInterpolate(imageRef);
CGColorRenderingIntent intent = CGImageGetRenderingIntent(imageRef);
CGDataProviderRef dataProvider = CGImageGetDataProvider(imageRef);
CFDataRef data = CGDataProviderCopyData(dataProvider);
UInt8 *buffer = (UInt8*)CFDataGetBytePtr(data);
NSUInteger x, y;
for (y = 0; y < height; y++) {
for (x = 0; x < width; x++) {
UInt8 *tmp;
tmp = buffer + y * bytesPerRow + x * 4;
UInt8 red,green,blue;
red = *(tmp + 0);
green = *(tmp + 1);
blue = *(tmp + 2);
UInt8 brightness;
switch (type) {
case 1:
brightness = (77 * red + 28 * green + 151 * blue) / 256;
*(tmp + 0) = brightness;
*(tmp + 1) = brightness;
*(tmp + 2) = brightness;
break;
case 2:
*(tmp + 0) = red;
*(tmp + 1) = green * 0.7;
*(tmp + 2) = blue * 0.4;
break;
case 3:
*(tmp + 0) = 255 - red;
*(tmp + 1) = 255 - green;
*(tmp + 2) = 255 - blue;
break;
default:
*(tmp + 0) = red;
*(tmp + 1) = green;
*(tmp + 2) = blue;
break;
}
}
}
CFDataRef effectedData = CFDataCreate(NULL, buffer, CFDataGetLength(data));
CGDataProviderRef effectedDataProvider = CGDataProviderCreateWithCFData(effectedData);
CGImageRef effectedCgImage = CGImageCreate(
width, height,
bitsPerComponent, bitsPerPixel, bytesPerRow,
colorSpace, bitmapInfo, effectedDataProvider,
NULL, shouldInterpolate, intent);
UIImage *effectedImage = [[UIImage alloc] initWithCGImage:effectedCgImage];
CGImageRelease(effectedCgImage);
CFRelease(effectedDataProvider);
CFRelease(effectedData);
CFRelease(data);
return effectedImage ;
}
pragma mark- 圖片識別程式碼
-(void)recognizeImageWithTesseract:(UIImage *)image
{
// Animate a progress activity indicator
// Create a new `G8RecognitionOperation` to perform the OCR asynchronously
// It is assumed that there is a .traineddata file for the language pack
// you want Tesseract to use in the "tessdata" folder in the root of the
// project AND that the "tessdata" folder is a referenced folder and NOT
// a symbolic group in your project
G8RecognitionOperation *operation = [[G8RecognitionOperation alloc] initWithLanguage:@"eng"];
// Use the original Tesseract engine mode in performing the recognition
// (see G8Constants.h) for other engine mode options
operation.tesseract.engineMode = G8OCREngineModeTesseractOnly;
// Let Tesseract automatically segment the page into blocks of text
// based on its analysis (see G8Constants.h) for other page segmentation
// mode options
operation.tesseract.pageSegmentationMode = G8PageSegmentationModeAutoOnly;
// Optionally limit the time Tesseract should spend performing the
// recognition
//operation.tesseract.maximumRecognitionTime = 1.0;
// Set the delegate for the recognition to be this class
// (see `progressImageRecognitionForTesseract` and
// `shouldCancelImageRecognitionForTesseract` methods below)
operation.delegate = self;
// Optionally limit Tesseract's recognition to the following whitelist
// and blacklist of characters
operation.tesseract.charWhitelist = @"0123456789";
//operation.tesseract.charBlacklist = @"56789";
// Set the image on which Tesseract should perform recognition
operation.tesseract.image = image;
// Optionally limit the region in the image on which Tesseract should
// perform recognition to a rectangle
//operation.tesseract.rect = CGRectMake(20, 20, 100, 100);
// Specify the function block that should be executed when Tesseract
// finishes performing recognition on the image
operation.recognitionCompleteBlock = ^(G8Tesseract *tesseract) {
// Fetch the recognized text
NSString *recognizedText = tesseract.recognizedText;
DLog(@"-----%@", recognizedText);
NSString * str=[DzcDES trim:recognizedText];
NSRange range=[str rangeOfString:@" "];
if (range.location !=NSNotFound)
{
NSArray *array=[str componentsSeparatedByString:@" "];
for(NSString *ss in array)
{
DLog(@"---%@",ss);
NSString * s=[DzcDES trim:ss];
if ([DzcDES validateIdentityCard:s])
{
DLog(@"字串為身份證號碼");
dispatch_async(dispatch_get_main_queue(), ^{
[self.delegate PassImagedata:s];
[self dismissViewControllerAnimated:YES completion:nil];
});
}
}
}
else
{
str=[DzcDES trim:str];
if ([DzcDES validateIdentityCard:str])
{
dispatch_async(dispatch_get_main_queue(), ^{
[self.delegate PassImagedata:str];
[self dismissViewControllerAnimated:YES completion:nil];
});
}
}
// Remove the animated progress activity indicator
// Spawn an alert with the recognized text
};
// Display the image to be recognized in the view
// Finally, add the recognition operation to the queue
[self.operationQueue addOperation:operation];
}
-(void)progressImageRecognitionForTesseract:(G8Tesseract *)tesseract
{
DLog(@”progress: %lu”, (unsigned long)tesseract.progress);
}
-(BOOL)shouldCancelImageRecognitionForTesseract:(G8Tesseract *)tesseract
{
return NO;
}
@end