swift

iOS 人脸检测

2018-02-01  本文已影响529人  li_礼光

如果用iOS 原生的CIDetector来做人脸检测

/** CIDetector发现的脸部特征。
  所有的位置都是相对于原始图像. */
NS_CLASS_AVAILABLE(10_7, 5_0)
@interface CIFaceFeature : CIFeature
{
    CGRect bounds;
    BOOL hasLeftEyePosition;
    CGPoint leftEyePosition;
    BOOL hasRightEyePosition;
    CGPoint rightEyePosition;
    BOOL hasMouthPosition;
    CGPoint mouthPosition;
    
    
    BOOL hasTrackingID;
    int trackingID;
    BOOL hasTrackingFrameCount;
    int trackingFrameCount;
    
    BOOL hasFaceAngle;
    float faceAngle;
    
    BOOL hasSmile;
    BOOL leftEyeClosed;
    BOOL rightEyeClosed;
}

/**脸部各个基点的坐标。
 
  请注意,左眼是脸左侧的眼睛
  从观察者的角度来看。 这不是左眼
  主体的视角. */

@property (readonly, assign) CGRect bounds;              //指示图像坐标中的人脸位置和尺寸的矩形。
@property (readonly, assign) BOOL hasLeftEyePosition;    //指示检测器是否找到了人脸的左眼。
@property (readonly, assign) CGPoint leftEyePosition;    //左眼的坐标
@property (readonly, assign) BOOL hasRightEyePosition;   //指示检测器是否找到了人脸的右眼。
@property (readonly, assign) CGPoint rightEyePosition;   //右眼的坐标
@property (readonly, assign) BOOL hasMouthPosition;      //指示检测器是否找到了人脸的口
@property (readonly, assign) CGPoint mouthPosition;      //口的坐标

@property (readonly, assign) BOOL hasTrackingID;         //指示面部对象是否具有跟踪ID。
@property (readonly, assign) int trackingID;
@property (readonly, assign) BOOL hasTrackingFrameCount; //指示面部对象的布尔值具有跟踪帧计数。
@property (readonly, assign) int trackingFrameCount;     //跟踪帧计数

@property (readonly, assign) BOOL hasFaceAngle;  //指示是否有关于脸部旋转的信息可用。
@property (readonly, assign) float faceAngle;    //旋转是以度数逆时针测量的,其中零指示在眼睛之间画出的线相对于图像方向是水平的。

@property (readonly, assign) BOOL hasSmile;             //是否有笑脸
@property (readonly, assign) BOOL leftEyeClosed;        //左眼是否闭上
@property (readonly, assign) BOOL rightEyeClosed;       //右眼是否闭上

@end

关于trackingID
核心图像提供了在视频流中检测到的脸部的跟踪标识符,您可以使用该标识符来识别在一个视频帧中检测到的CIFaceFeature对象是在先前视频帧中检测到的同一个脸部。
只有在框架中存在人脸并且不与特定人脸相关联时,该标识符才会一直存在。 
换句话说,如果脸部移出视频帧并在稍后返回到帧中,则分配另一个ID。 (核心图像检测面部,但不识别特定的面部。)

这个有点抽象,暂时不理解

测试

打印数据

 bounds                  : {{237.5, 115}, {435, 435}}
 hasLeftEyePosition      : 1
 leftEyePosition         : {381.25, 405}
 hasRightEyePosition     : 1
 rightEyePosition        : {550, 388.75}
 hasMouthPosition        : 1
 mouthPosition           : {450, 225}
 hasTrackingID           : 0
 trackingID              : 0
 hasTrackingFrameCount   : 0
 trackingFrameCount      : 0
 hasFaceAngle            : 1
 faceAngle               : 3.000000rightEyeClosed
 hasSmile                : 0
 leftEyeClosed           : 0
 rightEyeClosed          : 0

发现

hasSmile, leftEyeClosed, rightEyeClosed 检测都不准确

整体来说除了:

bounds ,leftEyePosition, rightEyePosition, mouthPosition这四个属性有可用价值,其他的可用价值体验就不那么明显了

也就是

可以用,但不推荐

实现思路

(遵循协议 : AVCaptureVideoDataOutputSampleBufferDelegate)
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
       fromConnection:(AVCaptureConnection *)connection;
方法:
// 通过抽样缓存数据创建一个UIImage对象
- (UIImage *)imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
    //CIImage -> CGImageRef -> UIImage
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);  //拿到缓冲区帧数据
    CIImage *ciImage = [CIImage imageWithCVPixelBuffer:imageBuffer];            //创建CIImage对象
    CIContext *temporaryContext = [CIContext contextWithOptions:nil];           //创建上下文
    CGImageRef cgImageRef = [temporaryContext createCGImage:ciImage fromRect:CGRectMake(0, 0, CVPixelBufferGetWidth(imageBuffer), CVPixelBufferGetHeight(imageBuffer))];
    UIImage *result = [[UIImage alloc] initWithCGImage:cgImageRef scale:1.0 orientation:UIImageOrientationLeftMirrored];  //创建UIImage对象
    CGImageRelease(cgImageRef);  //释放上下文
    return result;
}
/**识别脸部*/
-(NSArray *)detectFaceWithImage:(UIImage *)faceImag
{
    //此处是CIDetectorAccuracyHigh,若用于real-time的人脸检测,则用CIDetectorAccuracyLow,更快
    CIDetector *faceDetector = [CIDetector detectorOfType:CIDetectorTypeFace
                                                  context:nil
                                                  options:@{CIDetectorAccuracy: CIDetectorAccuracyHigh}];
    CIImage *ciimg = [CIImage imageWithCGImage:faceImag.CGImage];
    NSArray *features = [faceDetector featuresInImage:ciimg];

    CIFaceFeature *faceFeature = [features firstObject];
    if (faceFeature) {
        NSLog(@"bounds                  : %@",NSStringFromCGRect(faceFeature.bounds));
        NSLog(@"hasLeftEyePosition      : %d",faceFeature.hasLeftEyePosition);
        NSLog(@"leftEyePosition         : %@",NSStringFromCGPoint(faceFeature.leftEyePosition));
        NSLog(@"hasRightEyePosition     : %d",faceFeature.hasRightEyePosition);
        NSLog(@"rightEyePosition        : %@",NSStringFromCGPoint(faceFeature.rightEyePosition));
        NSLog(@"hasMouthPosition        : %d",faceFeature.hasMouthPosition);
        NSLog(@"mouthPosition           : %@",NSStringFromCGPoint(faceFeature.mouthPosition));
        NSLog(@"hasTrackingID           : %d",faceFeature.hasTrackingID);
        NSLog(@"trackingID              : %d",faceFeature.trackingID);
        NSLog(@"hasTrackingFrameCount   : %d",faceFeature.hasTrackingFrameCount);
        NSLog(@"trackingFrameCount      : %d",faceFeature.trackingFrameCount);
        NSLog(@"hasFaceAngle            : %d",faceFeature.hasFaceAngle);
        NSLog(@"faceAngle               : %frightEyeClosed",faceFeature.faceAngle);
        NSLog(@"hasSmile                : %d",faceFeature.hasSmile);
        NSLog(@"leftEyeClosed           : %d",faceFeature.leftEyeClosed);
        NSLog(@"rightEyeClosed          : %d",faceFeature.rightEyeClosed);
        NSLog(@"\n\n\n");
    }
    
    return features;
}

关于人脸识别的一些思考 :
上一篇下一篇

猜你喜欢

热点阅读