ios 人脸检测

简介:

#import "OpenCVTestViewController.h"

#import <opencv2/imgproc/imgproc_c.h>
#import <opencv2/objdetect/objdetect.hpp>

@implementation OpenCVTestViewController
@synthesize imageView;

- (void)dealloc {
AudioServicesDisposeSystemSoundID(alertSoundID);
[imageView dealloc];
[super dealloc];
}

#pragma mark -
#pragma mark OpenCV Support Methods

// NOTE you SHOULD cvReleaseImage() for the return value when end of the code.
- (IplImage *)CreateIplImageFromUIImage:(UIImage *)image {
CGImageRef imageRef = image.CGImage;

CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
IplImage *iplimage = cvCreateImage(cvSize(image.size.width, image.size.height), IPL_DEPTH_8U, 4);
CGContextRef contextRef = CGBitmapContextCreate(iplimage->imageData, iplimage->width, iplimage->height,
iplimage->depth, iplimage->widthStep,
colorSpace, kCGImageAlphaPremultipliedLast|kCGBitmapByteOrderDefault);
CGContextDrawImage(contextRef, CGRectMake(0, 0, image.size.width, image.size.height), imageRef);
CGContextRelease(contextRef);
CGColorSpaceRelease(colorSpace);

IplImage *ret = cvCreateImage(cvGetSize(iplimage), IPL_DEPTH_8U, 3);
cvCvtColor(iplimage, ret, CV_RGBA2BGR);
cvReleaseImage(&iplimage);

return ret;
}

// NOTE You should convert color mode as RGB before passing to this function
- (UIImage *)UIImageFromIplImage:(IplImage *)image {
NSLog(@"IplImage (%d, %d) %d bits by %d channels, %d bytes/row %s", image->width, image->height, image->depth, image->nChannels, image->widthStep, image->channelSeq);

CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
NSData *data = [NSData dataWithBytes:image->imageData length:image->imageSize];
CGDataProviderRef provider = CGDataProviderCreateWithCFData((CFDataRef)data);
CGImageRef imageRef = CGImageCreate(image->width, image->height,
image->depth, image->depth * image->nChannels, image->widthStep,
colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault,
provider, NULL, false, kCGRenderingIntentDefault);
UIImage *ret = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);
return ret;
}

#pragma mark -
#pragma mark Utilities for intarnal use

- (void)showProgressIndicator:(NSString *)text {
//[UIApplication sharedApplication].networkActivityIndicatorVisible = YES;
self.view.userInteractionEnabled = FALSE;
if(!progressHUD) {
CGFloat w = 160.0f, h = 120.0f;
progressHUD = [[UIProgressHUD alloc] initWithFrame:CGRectMake((self.view.frame.size.width-w)/2, (self.view.frame.size.height-h)/2, w, h)];
[progressHUD setText:text];
[progressHUD showInView:self.view];
}
}

- (void)hideProgressIndicator {
//[UIApplication sharedApplication].networkActivityIndicatorVisible = NO;
self.view.userInteractionEnabled = TRUE;
if(progressHUD) {
[progressHUD hide];
[progressHUD release];
progressHUD = nil;

AudioServicesPlaySystemSound(alertSoundID);
}
}

- (void)opencvEdgeDetect {
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];

if(imageView.image) {
cvSetErrMode(CV_ErrModeParent);

// Create grayscale IplImage from UIImage
IplImage *img_color = [self CreateIplImageFromUIImage:imageView.image];
IplImage *img = cvCreateImage(cvGetSize(img_color), IPL_DEPTH_8U, 1);
cvCvtColor(img_color, img, CV_BGR2GRAY);
cvReleaseImage(&img_color);

// Detect edge
IplImage *img2 = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
cvCanny(img, img2, 64, 128, 3);
cvReleaseImage(&img);

// Convert black and whilte to 24bit image then convert to UIImage to show
IplImage *image = cvCreateImage(cvGetSize(img2), IPL_DEPTH_8U, 3);
for(int y=0; y<img2->height; y++) {
for(int x=0; x<img2->width; x++) {
char *p = image->imageData + y * image->widthStep + x * 3;
*p = *(p+1) = *(p+2) = img2->imageData[y * img2->widthStep + x];
}
}
cvReleaseImage(&img2);
imageView.image = [self UIImageFromIplImage:image];
cvReleaseImage(&image);

[self hideProgressIndicator];
}

[pool release];
}

- (void) opencvFaceDetect:(UIImage *)overlayImage {
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];

if(imageView.image) {
cvSetErrMode(CV_ErrModeParent);

IplImage *image = [self CreateIplImageFromUIImage:imageView.image];

// Scaling down
IplImage *small_image = cvCreateImage(cvSize(image->width/2,image->height/2), IPL_DEPTH_8U, 3);
cvPyrDown(image, small_image, CV_GAUSSIAN_5x5);
int scale = 2;

// Load XML
NSString *path = [[NSBundle mainBundle] pathForResource:@"haarcascade_frontalface_default" ofType:@"xml"];
CvHaarClassifierCascade* cascade = (CvHaarClassifierCascade*)cvLoad([path cStringUsingEncoding:NSASCIIStringEncoding], NULL, NULL, NULL);
CvMemStorage* storage = cvCreateMemStorage(0);

// Detect faces and draw rectangle on them
CvSeq* faces = cvHaarDetectObjects(small_image, cascade, storage, 1.2f, 2, CV_HAAR_DO_CANNY_PRUNING, cvSize(0,0), cvSize(20, 20));
cvReleaseImage(&small_image);

// Create canvas to show the results
CGImageRef imageRef = imageView.image.CGImage;
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef contextRef = CGBitmapContextCreate(NULL, imageView.image.size.width, imageView.image.size.height,
8, imageView.image.size.width * 4,
colorSpace, kCGImageAlphaPremultipliedLast|kCGBitmapByteOrderDefault);
CGContextDrawImage(contextRef, CGRectMake(0, 0, imageView.image.size.width, imageView.image.size.height), imageRef);

CGContextSetLineWidth(contextRef, 4);
CGContextSetRGBStrokeColor(contextRef, 0.0, 0.0, 1.0, 0.5);

// Draw results on the iamge
for(int i = 0; i < faces->total; i++) {
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];

// Calc the rect of faces
CvRect cvrect = *(CvRect*)cvGetSeqElem(faces, i);
CGRect face_rect = CGContextConvertRectToDeviceSpace(contextRef, CGRectMake(cvrect.x * scale, cvrect.y * scale, cvrect.width * scale, cvrect.height * scale));

if(overlayImage) {
CGContextDrawImage(contextRef, face_rect, overlayImage.CGImage);
} else {
CGContextStrokeRect(contextRef, face_rect);
}

[pool release];
}

imageView.image = [UIImage imageWithCGImage:CGBitmapContextCreateImage(contextRef)];
CGContextRelease(contextRef);
CGColorSpaceRelease(colorSpace);

cvReleaseMemStorage(&storage);
cvReleaseHaarClassifierCascade(&cascade);

[self hideProgressIndicator];
}

[pool release];
}


#pragma mark -
#pragma mark IBAction

- (IBAction)loadImage:(id)sender {
if(!actionSheetAction) {
UIActionSheet *actionSheet = [[UIActionSheet alloc] initWithTitle:@""
delegate:self cancelButtonTitle:@"Cancel" destructiveButtonTitle:nil
otherButtonTitles:@"Use Photo from Library", @"Take Photo with Camera", @"Use Default Lena", nil];
actionSheet.actionSheetStyle = UIActionSheetStyleDefault;
actionSheetAction = ActionSheetToSelectTypeOfSource;
[actionSheet showInView:self.view];
[actionSheet release];
}
}

- (IBAction)saveImage:(id)sender {
if(imageView.image) {
[self showProgressIndicator:@"Saving"];
UIImageWriteToSavedPhotosAlbum(imageView.image, self, @selector(finishUIImageWriteToSavedPhotosAlbum:didFinishSavingWithError:contextInfo:), nil);
}
}

- (void)finishUIImageWriteToSavedPhotosAlbum:(UIImage *)image didFinishSavingWithError:(NSError *)error contextInfo:(void *)contextInfo {
[self hideProgressIndicator];
}

- (IBAction)edgeDetect:(id)sender {
[self showProgressIndicator:@"Detecting"];
[self performSelectorInBackground:@selector(opencvEdgeDetect) withObject:nil];
}

- (IBAction)faceDetect:(id)sender {
cvSetErrMode(CV_ErrModeParent);
if(imageView.image && !actionSheetAction) {
UIActionSheet *actionSheet = [[UIActionSheet alloc] initWithTitle:@""
delegate:self cancelButtonTitle:@"Cancel" destructiveButtonTitle:nil
otherButtonTitles:@"Bounding Box", @"Laughing Man", nil];
actionSheet.actionSheetStyle = UIActionSheetStyleDefault;
actionSheetAction = ActionSheetToSelectTypeOfMarks;
[actionSheet showInView:self.view];
[actionSheet release];
}
}

#pragma mark -
#pragma mark UIViewControllerDelegate

- (void)viewDidLoad {
[super viewDidLoad];
[[UIApplication sharedApplication] setStatusBarStyle:UIStatusBarStyleBlackOpaque animated:YES];
[self loadImage:nil];

NSURL *url = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"Tink" ofType:@"aiff"] isDirectory:NO];
AudioServicesCreateSystemSoundID((CFURLRef)url, &alertSoundID);
}

- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation {
return NO;
}

#pragma mark -
#pragma mark UIActionSheetDelegate

- (void)actionSheet:(UIActionSheet *)actionSheet clickedButtonAtIndex:(NSInteger)buttonIndex {
switch(actionSheetAction) {
case ActionSheetToSelectTypeOfSource: {
UIImagePickerControllerSourceType sourceType;
if (buttonIndex == 0) {
sourceType = UIImagePickerControllerSourceTypePhotoLibrary;
} else if(buttonIndex == 1) {
sourceType = UIImagePickerControllerSourceTypeCamera;
} else if(buttonIndex == 2) {
NSString *path = [[NSBundle mainBundle] pathForResource:@"lena" ofType:@"jpg"];
imageView.image = [UIImage imageWithContentsOfFile:path];
break;
} else {
// Cancel
break;
}
if([UIImagePickerController isSourceTypeAvailable:sourceType]) {
UIImagePickerController *picker = [[UIImagePickerController alloc] init];
picker.sourceType = sourceType;
picker.delegate = self;
picker.allowsImageEditing = NO;
[self presentModalViewController:picker animated:YES];
[picker release];
}
break;
}
case ActionSheetToSelectTypeOfMarks: {
if(buttonIndex != 0 && buttonIndex != 1) {
break;
}

UIImage *image = nil;
if(buttonIndex == 1) {
NSString *path = [[NSBundle mainBundle] pathForResource:@"laughing_man" ofType:@"png"];
image = [UIImage imageWithContentsOfFile:path];
}

[self showProgressIndicator:@"Detecting"];
[self performSelectorInBackground:@selector(opencvFaceDetect:) withObject:image];
break;
}
}
actionSheetAction = 0;
}

#pragma mark -
#pragma mark UIImagePickerControllerDelegate

- (UIImage *)scaleAndRotateImage:(UIImage *)image {
static int kMaxResolution = 640;

CGImageRef imgRef = image.CGImage;
CGFloat width = CGImageGetWidth(imgRef);
CGFloat height = CGImageGetHeight(imgRef);

CGAffineTransform transform = CGAffineTransformIdentity;
CGRect bounds = CGRectMake(0, 0, width, height);
if (width > kMaxResolution || height > kMaxResolution) {
CGFloat ratio = width/height;
if (ratio > 1) {
bounds.size.width = kMaxResolution;
bounds.size.height = bounds.size.width / ratio;
} else {
bounds.size.height = kMaxResolution;
bounds.size.width = bounds.size.height * ratio;
}
}

CGFloat scaleRatio = bounds.size.width / width;
CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef));
CGFloat boundHeight;

UIImageOrientation orient = image.imageOrientation;
switch(orient) {
case UIImageOrientationUp:
transform = CGAffineTransformIdentity;
break;
case UIImageOrientationUpMirrored:
transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
break;
case UIImageOrientationDown:
transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationDownMirrored:
transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
break;
case UIImageOrientationLeftMirrored:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationLeft:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationRightMirrored:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeScale(-1.0, 1.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
case UIImageOrientationRight:
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
default:
[NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"];
}

UIGraphicsBeginImageContext(bounds.size);
CGContextRef context = UIGraphicsGetCurrentContext();
if (orient == UIImageOrientationRight || orient == UIImageOrientationLeft) {
CGContextScaleCTM(context, -scaleRatio, scaleRatio);
CGContextTranslateCTM(context, -height, 0);
} else {
CGContextScaleCTM(context, scaleRatio, -scaleRatio);
CGContextTranslateCTM(context, 0, -height);
}
CGContextConcatCTM(context, transform);
CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef);
UIImage *imageCopy = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();

return imageCopy;
}

- (void)imagePickerController:(UIImagePickerController *)picker
didFinishPickingImage:(UIImage *)image
editingInfo:(NSDictionary *)editingInfo
{
imageView.image = [self scaleAndRotateImage:image];
[[picker parentViewController] dismissModalViewControllerAnimated:YES];
}

- (void)imagePickerControllerDidCancel:(UIImagePickerController *)picker {
[[picker parentViewController] dismissModalViewControllerAnimated:YES];
}

本文转自博客园知识天地的博客,原文链接:ios 人脸检测,如需转载请自行联系原博主。

相关文章
|
测试技术 程序员 C++
iOS:项目中无用类检测和无用图片检测汇总
在涉及到项目大改版,或者涉及到某个功能模块大变更,就会涉及到图片废弃和文件废弃的情况。 但是这时候就会遗留下一个很大的问题,没有将废弃的、无用的文件类或资源删除干净。而这次需要对工程代码的无用资源和无用文件进行删除处理,感触颇多,故在此笔记。 首先,感觉很多人的代码习惯还是恶待提高。比如我发现一些人的代码操作习惯,从好到次,可以大略分以下情况
1268 0
iOS:项目中无用类检测和无用图片检测汇总
|
3月前
|
API 开发工具 Android开发
视觉智能开放平台产品使用合集之人脸活体检测能力是否支持Android端或者iOS端直接调用
视觉智能开放平台是指提供一系列基于视觉识别技术的API和服务的平台,这些服务通常包括图像识别、人脸识别、物体检测、文字识别、场景理解等。企业或开发者可以通过调用这些API,快速将视觉智能功能集成到自己的应用或服务中,而无需从零开始研发相关算法和技术。以下是一些常见的视觉智能开放平台产品及其应用场景的概览。
|
6月前
|
监控 API iOS开发
克魔助手 - iOS性能检测平台
众所周知,如今的用户变得越来越关心app的体验,开发者必须关注应用性能所带来的用户流失问题。目前危害较大的性能问题主要有:闪退、卡顿、发热、耗电快、网络劫持等,但是做过iOS开发的人都知道,在开发过程中我们没有一个很直观的工具可以实时的知道开发者写出来的代码会不会造成性能问题,虽然Xcode里提供了耗电量检测、内存泄漏检测等工具,但是这些工具使用效果并不理想(如Leak无法发现循环引用造成的内存泄漏)。所以这篇文章主要是介绍一款实时监控app各项性能指标的工具,包括CPU占用率、内存使用量、内存泄漏、FPS、卡顿检测,并且会分析造成这些性能问题的原因。
|
存储 iOS开发
iOS主线程耗时检测方案
找出那个拖后腿的凶手
228 1
iOS主线程耗时检测方案
|
iOS开发
iOS UIDevice & 屏幕旋转检测
iOS UIDevice & 屏幕旋转检测
49 0
|
存储 iOS开发 UED
iOS 性能检测新方式​——AnimationHitches
iOS 性能检测新方式​——AnimationHitches
iOS 性能检测新方式​——AnimationHitches
|
iOS开发
iOS 检测字符串中数字个数、特殊符号个数
iOS 检测字符串中数字个数、特殊符号个数
171 0
|
iOS开发
iOS 检测字符串中是否含有数字、特殊符号
iOS 检测字符串中是否含有数字、特殊符号
376 0
|
iOS开发
iOS 检测字符串中是否含有字母、大写字母、小写字母
iOS 检测字符串中是否含有字母、大写字母、小写字母
328 0
|
iOS开发
IOS检测版本更新(***为app id)
IOS检测版本更新(***为app id)
69 0