【iOS】一个简单的人脸跟踪Demo

作者 : 开心源码 本文共3660个字,预计阅读时间需要10分钟 发布时间: 2022-05-13 共121人阅读

1、
sessionView – 相机画面的容器View
self.detector – 脸部特征识别器

- (void)viewDidLoad {    [super viewDidLoad];        self.sessionView = [[UIView alloc] initWithFrame:self.view.bounds];    [self.view addSubview:self.sessionView];        self.faceView = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"a"]];    self.faceView.frame = CGRectZero;    [self.view addSubview:self.faceView];        self.leftEyeView = [[UIView alloc] init];    self.leftEyeView.alpha = 0.4;    self.leftEyeView.backgroundColor = [UIColor greenColor];    [self.view addSubview:self.leftEyeView];        self.rightEyeView = [[UIView alloc] init];    self.rightEyeView.alpha = 0.4;    self.rightEyeView.backgroundColor = [UIColor yellowColor];    [self.view addSubview:self.rightEyeView];    self.mouthView = [[UIView alloc] init];    self.mouthView.alpha = 0.4;    self.mouthView.backgroundColor = [UIColor redColor];    [self.view addSubview:self.mouthView];    self.context = [CIContext context];    self.detector = [CIDetector detectorOfType:CIDetectorTypeFace context:self.context options:@{CIDetectorAccuracy:CIDetectorAccuracyHigh}];}

2、点击屏幕任意地方打开相机

- (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event {    // 避免重复打开,首先关闭原价的session    [self.session stopRunning];    self.session = [[AVCaptureSession alloc] init];    // 移除原有的相机画面Layer    [self.layer removeFromSuperlayer];        NSError *error;        // Device    NSArray *devices = [AVCaptureDevice devices];    NSLog(@"devices = %@", devices);    AVCaptureDevice *defaultDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];        // Input    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:defaultDevice error:&error];    [self.session addInput:input];    // Output    AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];    [output setSampleBufferDelegate:(id)self queue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)];    [self.session addOutput:output];        // 开始捕获相机画面    [self.session startRunning];        // 将相机画面增加到容器View中    self.layer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];    self.layer.frame = self.view.bounds;    [self.sessionView.layer addSublayer:self.layer];}

3、脸部特征跟踪

// AVCaptureAudioDataOutputSampleBufferDelegate- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {//    printf("%s\n", __func__);    // 1、获取当前帧图像    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);    CIImage *image = [[CIImage alloc] initWithCVImageBuffer:imageBuffer];        CGFloat imageW = image.extent.size.width;    CGFloat imageH = image.extent.size.height;        2、对图像进行脸部特征识别    CIFeature *feature = [[self.detector featuresInImage:image] lastObject];    if (feature) {        if (self.leftEyeView.frame.size.width == 0) {            self.leftEyeView.frame = CGRectMake(0, 0, 20, 20);        }        if (self.rightEyeView.frame.size.width == 0) {            self.rightEyeView.frame = CGRectMake(0, 0, 20, 20);        }        if (self.mouthView.frame.size.width == 0) {            self.mouthView.frame = CGRectMake(0, 0, 20, 20);        }        NSLog(@"find");        CIFaceFeature *face = (CIFaceFeature *)feature;        dispatch_async(dispatch_get_main_queue(), ^{            self.faceView.frame = CGRectMake(face.bounds.origin.y / imageW * self.sessionView.frame.size.height,                                             face.bounds.origin.x / imageH * self.sessionView.frame.size.width,                                             face.bounds.size.width / imageH * self.sessionView.frame.size.width,                                             face.bounds.size.height / imageW * self.sessionView.frame.size.height);                        self.leftEyeView.center = CGPointMake(face.leftEyePosition.y / imageW * self.sessionView.frame.size.height,                                                  face.leftEyePosition.x / imageH * self.sessionView.frame.size.width);                        self.rightEyeView.center = CGPointMake(face.rightEyePosition.y / imageW * self.sessionView.frame.size.height,                                                   face.rightEyePosition.x / imageH * self.sessionView.frame.size.width);                        self.mouthView.center = CGPointMake(face.mouthPosition.y / imageW * self.sessionView.frame.size.height,                                                face.mouthPosition.x / imageH * self.sessionView.frame.size.width);                    });    }}

大功告成

手机记得横过来,home键在右边
Demo地址: MagicBlind/Face-Detector

说明
1. 本站所有资源来源于用户上传和网络,如有侵权请邮件联系站长!
2. 分享目的仅供大家学习和交流,您必须在下载后24小时内删除!
3. 不得使用于非法商业用途,不得违反国家法律。否则后果自负!
4. 本站提供的源码、模板、插件等等其他资源,都不包含技术服务请大家谅解!
5. 如有链接无法下载、失效或广告,请联系管理员处理!
6. 本站资源售价只是摆设,本站源码仅提供给会员学习使用!
7. 如遇到加密压缩包,请使用360解压,如遇到无法解压的请联系管理员
开心源码网 » 【iOS】一个简单的人脸跟踪Demo

发表回复