0
カムビューだけでアプリを構築する必要があります。私のカムが顔を見ていることを検出する必要があります。私が行っている私のカムが開いている間に顔を検出する
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
NSString *path = [[NSBundle mainBundle] pathForResource:@"picture" ofType:@"JPG"];
NSURL *url = [NSURL fileURLWithPath:path];
CIContext *context = [CIContext contextWithOptions:nil];
CIImage *image = [CIImage imageWithContentsOfURL:url];
NSDictionary *options = @{CIDetectorAccuracy: CIDetectorAccuracyHigh};
CIDetector *detector = [CIDetector detectorOfType:CIDetectorTypeFace context:context options:options];
NSArray *features = [detector featuresInImage:image];
}
以下: 私は画像の上に顔を検出し、何かを構築していますが、私はカムで作業する必要があり、ここに私はこれまでやっていることである
-(void)viewWillAppear:(BOOL)animated{
_session = [[AVCaptureSession alloc] init];
[_session setSessionPreset:AVCaptureSessionPresetPhoto];
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error;
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if([_session canAddInput:deviceInput]){
[_session addInput:deviceInput];
}
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_session];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
CALayer *rootLayer = [[self view] layer];
[rootLayer setMasksToBounds:YES];
CGRect frame = self.frameCapture.frame;
[previewLayer setFrame:frame];
[rootLayer insertSublayer:previewLayer atIndex:0];
[_session startRunning];
}
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection{
for(AVMetadataObject *metadataObject in metadataObjects) {
if([metadataObject.type isEqualToString:AVMetadataObjectTypeFace]) {
_faceDetectedLabel.text = @"face detected";
}
}
}
まだ顔を検出していない、何か間違っている?
AVCaptureSessionを検索してください。 –