I am developing an app where I have displayed the detected data from a QR code but the problem is that QR code is not detected. I have used this code:
NSDictionary *detectorOptions = #{ CIDetectorAccuracy : CIDetectorAccuracyHigh };
CIDetector *faceDetector = [CIDetector detectorOfType:CIDetectorTypeQRCode context:nil options:detectorOptions];
NSArray *features = [faceDetector featuresInImage:chosenImage.CIImage];
CIQRCodeFeature *faceFeature;
for(faceFeature in features)
{
qrcodedetected = YES;
self.decodedstr = [NSString stringWithFormat:#"%#",faceFeature.messageString];
break;
}
I have searched a lot but not succeeded. I have used this code from Apple default form. Every time I will get nil in result. If anybody has any solution regarding this then please share with me. It would be appreciated. Thanks in advance.
// initialize
AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
// create session
AVCaptureMetadataOutput *captureMetadataOutput = [[AVCaptureMetadataOutput alloc] init];
dispatch_queue_t dispatchQueue = dispatch_queue_create("QRCodeQueue", NULL);
[captureMetadataOutput setMetadataObjectsDelegate:self queue:dispatchQueue];
[captureMetadataOutput setMetadataObjectTypes:[captureMetadataOutput availableMetadataObjectTypes]];
[self.captureSession addOutput:captureMetadataOutput];
// add camera view layer
AVCaptureVideoPreviewLayer *captureLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
[captureLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
[captureLayer setFrame:self.view.layer.bounds];
[self.view.layer addSublayer:captureLayer];
// delegate method
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection {
// Specify the barcodes you want to read here:
NSArray *supportedBarcodeTypes = #[AVMetadataObjectTypeUPCECode, AVMetadataObjectTypeCode39Code, AVMetadataObjectTypeCode39Mod43Code, AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode93Code, AVMetadataObjectTypeCode128Code, AVMetadataObjectTypePDF417Code, AVMetadataObjectTypeQRCode, AVMetadataObjectTypeAztecCode];
for (AVMetadataObject *barcodeMetadata in metadataObjects) {
for (NSString *supportedBarcode in supportedBarcodeTypes) {
if ([supportedBarcode isEqualToString:barcodeMetadata.type]) {
// get barcode object AVMetadataMachineReadableCodeObject
AVMetadataMachineReadableCodeObject *barcodeObject = (AVMetadataMachineReadableCodeObject *)[self.captureLayer transformedMetadataObjectForMetadataObject:barcodeMetadata];
NSString *capturedBarcode = [barcodeObject stringValue];
// do what you want...
}
}
}
}
Related
I am trying to apply a CIFilter onto live camera feed (and be able to capture a filtered still image).
I have seen on StackOverflow some code pertaining the issue, but I haven't been able to get it to work.
My issue is that in the method captureOutput the filter seems correctly applied (I put a breakpoint in there and QuickLooked it), but I don't see it in my UIView (I see the original feed, without the filter).
Also I am not sure which output I should add to the session:
[self.session addOutput: self.stillOutput]; //AVCaptureStillImageOutput
[self.session addOutput: self.videoDataOut]; //AVCaptureVideoDataOutput
And which of those I should iterate through when looking for a connection (in findVideoConnection).
I am totally confused.
Here's some code:
viewDidLoad
-(void)viewDidLoad {
[super viewDidLoad];
self.shutterButton.userInteractionEnabled = YES;
self.context = [CIContext contextWithOptions: #{kCIContextUseSoftwareRenderer : #(YES)}];
self.filter = [CIFilter filterWithName:#"CIGaussianBlur"];
[self.filter setValue:#15 forKey:kCIInputRadiusKey];
}
prepare session
-(void)prepareSessionWithDevicePosition: (AVCaptureDevicePosition)position {
AVCaptureDevice* device = [self videoDeviceWithPosition: position];
self.currentPosition = position;
self.session = [[AVCaptureSession alloc] init];
self.session.sessionPreset = AVCaptureSessionPresetPhoto;
NSError* error = nil;
self.deviceInput = [AVCaptureDeviceInput deviceInputWithDevice: device error: &error];
if ([self.session canAddInput: self.deviceInput]) {
[self.session addInput: self.deviceInput];
}
AVCaptureVideoPreviewLayer* previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession: self.session];
previewLayer.videoGravity = AVLayerVideoGravityResizeAspect;
self.videoDataOut = [AVCaptureVideoDataOutput new];
[self.videoDataOut setSampleBufferDelegate: self queue:dispatch_queue_create("bufferQueue", DISPATCH_QUEUE_SERIAL)];
self.videoDataOut.alwaysDiscardsLateVideoFrames = YES;
CALayer* rootLayer = [[self view] layer];
rootLayer.masksToBounds = YES;
CGRect frame = self.previewView.frame;
previewLayer.frame = frame;
[rootLayer insertSublayer: previewLayer atIndex: 1];
self.stillOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary* outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
self.stillOutput.outputSettings = outputSettings;
[self.session addOutput: self.stillOutput];
//tried [self.session addOutput: self.videoDataOut];
//and didn't work (filtered image didn't show, and also couldn't take pictures)
[self findVideoConnection];
}
find video connection
-(void)findVideoConnection {
for (AVCaptureConnection* connection in self.stillOutput.connections) {
//also tried self.videoDataOut.connections
for (AVCaptureInputPort* port in [connection inputPorts]) {
if ([[port mediaType] isEqualToString: AVMediaTypeVideo]) {
self.videoConnection = connection;
break;
}
}
if (self.videoConnection != nil) {
break;
}
}
}
capture output, apply filter and put it in the CALayer
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
// Get a CMSampleBuffer's Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// turn buffer into an image we can manipulate
CIImage *result = [CIImage imageWithCVPixelBuffer:imageBuffer];
// filter
[self.filter setValue:result forKey: #"inputImage"];
// render image
CGImageRef blurredImage = [self.context createCGImage:self.filter.outputImage fromRect:result.extent];
UIImage* img = [UIImage imageWithCGImage: blurredImage];
//Did this to check whether the image was actually filtered.
//And surprisingly it was.
dispatch_async(dispatch_get_main_queue(), ^{
//The image present in my UIView is for some reason not blurred.
self.previewView.layer.contents = (__bridge id)blurredImage;
CGImageRelease(blurredImage);
});
}
I am using iOS 7 AVFoundation framework for Barcode scanning. It works fine for me but on some Barcodes it gives me wrong result.
I am adding code file and Barcode also to generate the issue. Please take a look and help me to find out the problem.
Code Sample:
ViewController.h Class-
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#interface ViewController : UIViewController <AVCaptureMetadataOutputObjectsDelegate>
{
AVCaptureSession *_session;
AVCaptureDevice *_device;
AVCaptureDeviceInput *_input;
AVCaptureMetadataOutput *_output;
AVCaptureVideoPreviewLayer *_prevLayer;
UIView *_highlightView;
IBOutlet UIView *_viewBg;
IBOutlet UIButton *_btnScanning;
}
#end
ViewController.m Class Methods -
- (void)setScanningVideoOrientation
{
if ([UIDevice currentDevice].orientation == UIInterfaceOrientationPortrait)
{
_prevLayer.connection.videoOrientation = AVCaptureVideoOrientationPortrait;
}
else if ([UIDevice currentDevice].orientation == UIInterfaceOrientationPortraitUpsideDown)
{
_prevLayer.connection.videoOrientation = AVCaptureVideoOrientationPortraitUpsideDown;
}
else if ([UIDevice currentDevice].orientation == UIInterfaceOrientationLandscapeLeft)
{
_prevLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
}
else if ([UIDevice currentDevice].orientation == UIInterfaceOrientationLandscapeRight)
{
_prevLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
}
}
- (void)startScanning
{
// Create Session.------------------------------------------------------
_session = nil;
_session = [[AVCaptureSession alloc] init];
_device = nil;
_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
_input = nil;
_input = [AVCaptureDeviceInput deviceInputWithDevice:_device error:&error];
if (_input)
{
[_session addInput:_input];
_output = nil;
_output = [[AVCaptureMetadataOutput alloc] init];
[_output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
[_session addOutput:_output];
_output.metadataObjectTypes = [_output availableMetadataObjectTypes];
_prevLayer = nil;
_prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_prevLayer.frame = _viewBg.frame;
_prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self setScanningVideoOrientation];
[self.view.layer addSublayer:_prevLayer];
[_session startRunning];
} else
{
NSLog(#"Error: %#", error);
}
//----------------------------------------------------------------------
}
In this delegate method. I am getting wrong barcode string. Please take a look on 'detectionString'.
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
CGRect highlightViewRect = CGRectZero;
AVMetadataMachineReadableCodeObject *barCodeObject;
NSString *detectionString = nil;
NSArray *barCodeTypes = #[AVMetadataObjectTypeUPCECode, AVMetadataObjectTypeCode39Code, AVMetadataObjectTypeCode39Mod43Code,
AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode93Code, AVMetadataObjectTypeCode128Code,
AVMetadataObjectTypePDF417Code, AVMetadataObjectTypeQRCode, AVMetadataObjectTypeAztecCode];
for (AVMetadataObject *metadata in metadataObjects) {
for (NSString *type in barCodeTypes) {
if ([metadata.type isEqualToString:type])
{
barCodeObject = (AVMetadataMachineReadableCodeObject *)[_prevLayer transformedMetadataObjectForMetadataObject:(AVMetadataMachineReadableCodeObject *)metadata];
highlightViewRect = barCodeObject.bounds;
detectionString = [(AVMetadataMachineReadableCodeObject *)metadata stringValue];
break;
}
}
if (detectionString != nil)
{
NSLog(#"ViewController-->captureOutput-->detectionString = %#",detectionString);
[self stopScanning];
break;
}
}
barCodeObject = nil;
detectionString = nil;
barCodeTypes = nil;
}
Barcode Image -
I am getting result - 0649954006029
But it should be - 649954006029.
As for some other barcodes, i am seeing 'l' before actual barcode scanning string.
Hope it will help you to identify the problem.
Thanks.
The reason you're getting a 13 digit code is because you're trying to scan a UPC-A code (649954..) and iOS doesn't have a separate category for UPC-A codes.
UPC-A codes come scanned in as EAN13 codes, and those are 13 digits long, so they get a prefix of 0.
From AVFoundation/AVMetadata.h
/*! #constant AVMetadataObjectTypeEAN13Code
#abstract An identifier for an instance of
AVMetadataMachineReadableCodeObject having a type
AVMetadataObjectTypeEAN13Code.
#discussion
AVMetadataMachineReadableCodeObject objects generated from EAN-13 (including UPC-A) codes return this constant as their type. */
AVF_EXPORT NSString *const AVMetadataObjectTypeEAN13Code NS_AVAILABLE(NA, 7_0);
I'm making an app that has a camera preview in a View, inside this View I want to draw the camera data and also another View that shows a little rectangle when data it's captured, for example, in a BarCode scanning Scenario the Camera is shown in a view, when a BarCode it's found a Rectangle will be drawn showing that it has scanned a Barcode.
My current View Hierarchy it's the following:
View
{
-UIView cameraHolder
{
-UIView highlightView
}
}
I'm managed to get the camera showed and scanning things, but the highlight View it's not being shown, why is this happening?
This is the code for initializing the highlight View:
-(void)setUpHiglightView{
self.highlightView = [[UIView alloc] init];
self.highlightView.autoresizingMask = UIViewAutoresizingFlexibleTopMargin|UIViewAutoresizingFlexibleLeftMargin|UIViewAutoresizingFlexibleRightMargin|UIViewAutoresizingFlexibleBottomMargin;
self.highlightView.layer.borderColor = [UIColor greenColor].CGColor;
self.highlightView.layer.borderWidth = 3;
}
and this is the code for when a data it's captured:
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection{
CGRect highlightViewRect = CGRectZero;
AVMetadataMachineReadableCodeObject *barCodeObject;
NSString *detectionString = nil;
NSArray *barCodeTypes = #[AVMetadataObjectTypeUPCECode, AVMetadataObjectTypeCode39Code, AVMetadataObjectTypeCode39Mod43Code,
AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode93Code, AVMetadataObjectTypeCode128Code,
AVMetadataObjectTypePDF417Code, AVMetadataObjectTypeQRCode, AVMetadataObjectTypeAztecCode];
for(AVMetadataObject *metadata in metadataObjects){
for(NSString *type in barCodeTypes){
if([metadata.type isEqualToString:type]){
barCodeObject = (AVMetadataMachineReadableCodeObject *)[prevLayer transformedMetadataObjectForMetadataObject:(AVMetadataMachineReadableCodeObject*)metadata];
highlightViewRect = barCodeObject.bounds;
detectionString = [(AVMetadataMachineReadableCodeObject*)metadata stringValue];
break;
}
}
}
if(detectionString != nil){
[self.itemIdTextField setText:detectionString];
}else{
//NSLog(#"Got Nothing");
}
NSLog(#"Position: [%f,%f][%f,%f]",highlightViewRect.origin.x, highlightViewRect.origin.y,highlightViewRect.size.height, highlightViewRect.size.width);
self.highlightView.frame = highlightViewRect;
}
Also the code that initializes the camera:
-(void)setupBarCodeScanner{
[self setUpHiglightView];
session = [[AVCaptureSession alloc] init];
device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if(input){
[session addInput:input];
}else{
[self showAlertDialogWithTitle:#"Error" andMessage:#"There was an error while accessing your camera"];
NSLog(#"Error: %#", error);
}
output = [[AVCaptureMetadataOutput alloc] init];
[output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
[session addOutput:output];
output.metadataObjectTypes = [output availableMetadataObjectTypes];
prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:session];
prevLayer.frame = self.cameraHolder.bounds;
prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.cameraHolder.layer addSublayer:prevLayer];
}
Thank you very much!
Add self.highlightView to self.view at the end of captureOutput:
[self.view addSubview:self.highlightView];
It doesn't look like you're adding that view anywhere. You create it and set its frame, but I'm not seeing where you're adding to the view hierarchy.
I am using AVFoundation and AVCaptureMetadataOutput to scan a QR barcode in iOS7, I present a view controller which allows the user to scan a barcode. It is working fine, ie. a barcode is being scanned and I can output the barcode string to console.
But it keeps scanning over and over again, see screen shot. What I want it to do is scan the barcode just the once and then dismissViewController.
Here is my code for the delegate method:
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
CGRect highlightViewRect = CGRectZero;
AVMetadataMachineReadableCodeObject *barCodeObject;
NSString *detectionString = nil;
NSArray *barCodeTypes = #[AVMetadataObjectTypeUPCECode, AVMetadataObjectTypeCode39Code, AVMetadataObjectTypeCode39Mod43Code,
AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode93Code, AVMetadataObjectTypeCode128Code,
AVMetadataObjectTypePDF417Code, AVMetadataObjectTypeQRCode, AVMetadataObjectTypeAztecCode];
for (AVMetadataObject *metadata in metadataObjects) {
for (NSString *type in barCodeTypes) {
if ([metadata.type isEqualToString:type])
{
barCodeObject = (AVMetadataMachineReadableCodeObject *)[self.preview transformedMetadataObjectForMetadataObject:(AVMetadataMachineReadableCodeObject *)metadata];
highlightViewRect = barCodeObject.bounds;
detectionString = [(AVMetadataMachineReadableCodeObject *)metadata stringValue];
break;
}
}
if (detectionString != nil)
{
NSLog(#"Barcode: %#", detectionString);
break;
}
else
NSLog(#"None");
}
self.highlightView.frame = highlightViewRect;
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
....
self.highlightView.frame = highlightViewRect;
[_session stopRunning]; //<---I add this and it worked for me.
}
Here is a good link that might help.
You need to stop the captureSesson using:
captureSession.stopRunning() once the barcode is scanned, otherwise it will keep scanning the code even if videoPreviewLayer is stopped.
I am trying to write an app that involves both front and rear camera and switching between them. As far as I understand, in the addVideoInput method, I have to change the IDs in
AVCaptureDevice *videoDevice = [AVCaptureDevice deviceWithUniqueID:(NSString *)deviceUniqueID];
But which NSStrings are those IDs?
Or, if it should be done in the other way, please, give a suggestion.
Thank you for help!
Ok, I have managed to find out a solution. I don't know if it's right or wrong, it was taken from http://www.bunnyhero.org/2010/08/15/turn-your-iphone-into-a-vampire-with-avfoundation-and-ios-4/
Just use
AVCaptureDevice *captureDevice = [self frontFacingCameraIfAvailable];
where frontFacingCameraIfAvailable is:
-(AVCaptureDevice *)frontFacingCameraIfAvailable
{
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices)
{
if (device.position == AVCaptureDevicePositionFront)
{
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if ( ! captureDevice)
{
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
You can usually get frontal camera using
AVCaptureDevice *frontalCamera = [AVCaptureDevice deviceWithUniqueID:#"com.apple.avfoundation.avcapturedevice.built-in_video:1"];
But I would by all means rather use your accepted method - this one is not safe at all.
I was facing issue scanning QR Code with Front Camera. I looked out for so many resources and libraries in order to do so. Library was not fulfilling my requirement as I needed customised UI for scanner. And the piece of code on the internet to scan QR Code was also deprecated. So by debugging and knowing the device type I applied position of the camera and it worked. I am posting this as an answer so that it will help another peer like me looking for the answer.
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
[self setupScanner];
[self openScanner:nil];
}
#pragma mark- Actions
- (IBAction)openScanner:(id)sender {
if([UIImagePickerController isCameraDeviceAvailable:UIImagePickerControllerCameraDeviceFront]){
[self.session startRunning];
}
}
- (IBAction)stopScanner:(id)sender {
[self.session stopRunning];
}
- (void)setupScanner {
#if !(TARGET_OS_SIMULATOR)
//self.device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
self.device = [self frontFacingCameraIfAvailable];
self.input = [AVCaptureDeviceInput deviceInputWithDevice:self.device error:nil];
self.session = [[AVCaptureSession alloc] init];
self.output = [[AVCaptureMetadataOutput alloc] init];
if([self.session canAddOutput:self.output]) {
[self.session addOutput:self.output];
}
if ([self.session canAddInput:self.input]){
[self.session addInput:self.input];
}
[self.output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
[self.output setMetadataObjectTypes:#[AVMetadataObjectTypeQRCode]];
self.preview = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
self.preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.preview.frame = CGRectMake(0, 0, CGRectGetWidth(self.pLayer.frame), CGRectGetHeight(self.pLayer.frame));
AVCaptureConnection *con = self.preview.connection;
con.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
//pLayer is a UIView outlet on which the scanner fits or occupies its area to scan QR Code
[self.pLayer.layer insertSublayer:self.preview atIndex:0];
#endif
}
#pragma mark - AVCaptureMetadataOutputObjectsDelegate
- (void)captureOutput:(AVCaptureOutput *)output didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects fromConnection:(AVCaptureConnection *)connection {
CGRect highlightViewRect = CGRectZero;
AVMetadataMachineReadableCodeObject *barCodeObject;
NSString *detectionString = nil;
NSArray *barCodeTypes = #[AVMetadataObjectTypeUPCECode, AVMetadataObjectTypeCode39Code, AVMetadataObjectTypeCode39Mod43Code,
AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode93Code, AVMetadataObjectTypeCode128Code,
AVMetadataObjectTypePDF417Code, AVMetadataObjectTypeQRCode, AVMetadataObjectTypeAztecCode];
for (AVMetadataObject *metadata in metadataObjects) {
for (NSString *type in barCodeTypes) {
if ([metadata.type isEqualToString:type])
{
barCodeObject = (AVMetadataMachineReadableCodeObject *)[self.preview transformedMetadataObjectForMetadataObject:(AVMetadataMachineReadableCodeObject *)metadata];
highlightViewRect = barCodeObject.bounds;
detectionString = [(AVMetadataMachineReadableCodeObject *)metadata stringValue];
break;
}
}
if (detectionString != nil) {
self.codeLabel.text = detectionString;
[self stopScanner:nil];
//Do your work with QR Code String ---
break;
}
else
self.codeLabel.text = #"CODE";
}
}
#pragma mark- Capture Device
-(AVCaptureDevice *)frontFacingCameraIfAvailable {
AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionFront];
NSLog(#"capture device %#",captureDevice.description);
NSLog(#"device type %#",captureDevice.deviceType);
NSLog(#"unique Id: %#",captureDevice.uniqueID);
//com.apple.avfoundation.avcapturedevice.built-in_video:1
//Device Position: 2
NSLog(#"frontFacingCameraIfAvailable-> Device Position: %ld",(long)captureDevice.position);
return captureDevice;
}
-(AVCaptureDevice *)backFacingCameraIfAvailable {
AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSLog(#"capture device %#",captureDevice.description);
NSLog(#"device type %#",captureDevice.deviceType);
NSLog(#"unique Id: %#",captureDevice.uniqueID);
NSLog(#"backFacingCameraIfAvailable-> Device Position: %ld",(long)captureDevice.position);
return captureDevice;
}