xcode saying cannot find ZBarSDK.h, however it is in my project - ios

The build error I am getting is the following:
Lexical or preprocessor issue 'ZBarSDK.h' file not found
I see it under my project, what do I need to do to get the build to find it?
I've tried dragging it into my main project folder.

forgot ZBarSDk this already included in AVFoundation frame work
use this
try this on ios 7 and newer
to Capture QR code:
- (IBAction)Capture:(id)sender {
isFirst=true;
_session = [[AVCaptureSession alloc] init];
_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
_input = [AVCaptureDeviceInput deviceInputWithDevice:_device error:&error];
if (_input) {
[_session addInput:_input];
} else {
NSLog(#"Error: %#", error);
}
_output = [[AVCaptureMetadataOutput alloc] init];
[_output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
[_session addOutput:_output];
_output.metadataObjectTypes = [_output availableMetadataObjectTypes];
_prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_prevLayer.frame = self.view.bounds;
_prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer:_prevLayer];
[_session startRunning];
}
To read use its delegate methode:
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
CGRect highlightViewRect = CGRectZero;
AVMetadataMachineReadableCodeObject *barCodeObject;
NSString *detectionString = nil;
NSArray *barCodeTypes = #[AVMetadataObjectTypeUPCECode, AVMetadataObjectTypeCode39Code, AVMetadataObjectTypeCode39Mod43Code,
AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode93Code, AVMetadataObjectTypeCode128Code,
AVMetadataObjectTypePDF417Code, AVMetadataObjectTypeQRCode, AVMetadataObjectTypeAztecCode];
for (AVMetadataObject *metadata in metadataObjects) {
for (NSString *type in barCodeTypes) {
if ([metadata.type isEqualToString:type])
{
barCodeObject = (AVMetadataMachineReadableCodeObject *)[_prevLayer transformedMetadataObjectForMetadataObject:(AVMetadataMachineReadableCodeObject *)metadata];
highlightViewRect = barCodeObject.bounds;
detectionString = [(AVMetadataMachineReadableCodeObject *)metadata stringValue];
break;
}
}
if (detectionString != nil)
{
if (isFirst) {
isFirst=false;
_label.text = detectionString;
break;
}
}
else
_label.text = #"(none)";
}
_highlightView.frame = highlightViewRect;
}

Related

AVCaptureMovieFileOutput Orientation In Landscape

My app uses the front facing camera to record video. I have the Preview Layer successfully set up to show it in landscape right mode, which is the only way the app will run. How do I make sure the Movie output displays correctly?
session = [[AVCaptureSession alloc] init];
[session beginConfiguration];
session.sessionPreset = AVCaptureSessionPresetMedium;
CALayer *viewLayer = self.vImagePreview.layer;
NSLog(#"viewLayer = %#", viewLayer);
self.captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
self.captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.captureVideoPreviewLayer.frame = self.vImagePreview.bounds;
[self.captureVideoPreviewLayer setCornerRadius:14];
[self.captureVideoPreviewLayer setBorderWidth:3.0];
[self.captureVideoPreviewLayer setBorderColor:[[UIColor whiteColor] CGColor]];
self.captureVideoPreviewLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
[[vImagePreview layer] setCornerRadius:14];
[[vImagePreview layer] setBorderWidth:3.0];
[[vImagePreview layer] setBorderColor:[[UIColor whiteColor] CGColor]];
[self.vImagePreview.layer addSublayer:self.captureVideoPreviewLayer];
AVCaptureDevice *device = [self frontFacingCameraIfAvailable];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error2 = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error2];
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
NSString *archives = [documentsDirectoryPath stringByAppendingPathComponent:#"archives"];
NSString *editedfilename = [[selectedCountry lastPathComponent] stringByDeletingPathExtension];
NSString *datestring = [[editedfilename stringByAppendingString:#" "] stringByAppendingString:currentTime];
NSLog(#"%#", datestring);
NSString *outputpathofmovie = [[archives stringByAppendingPathComponent:datestring] stringByAppendingString:#".mp4"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputpathofmovie];
[session addInput:input];
[session addInput:audioInput];
[session addOutput:movieFileOutput];
[session commitConfiguration];
[session startRunning];
NSUserDefaults *userDefaults = [NSUserDefaults standardUserDefaults];
[movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
NSLog(#"OutputURL%#", outputURL);
}
-(void)viewDidLayoutSubviews {
[super viewDidLayoutSubviews];
UIInterfaceOrientation orientation = [[UIApplication sharedApplication] statusBarOrientation];
switch (orientation) {
case UIInterfaceOrientationPortrait:
[self.captureVideoPreviewLayer.connection setVideoOrientation:AVCaptureVideoOrientationPortrait];
break;
case UIInterfaceOrientationPortraitUpsideDown:
[self.captureVideoPreviewLayer.connection setVideoOrientation:AVCaptureVideoOrientationPortraitUpsideDown];
break;
case UIInterfaceOrientationLandscapeLeft:
[self.captureVideoPreviewLayer.connection setVideoOrientation:AVCaptureVideoOrientationLandscapeLeft];
break;
case UIInterfaceOrientationLandscapeRight:
[self.captureVideoPreviewLayer.connection setVideoOrientation:AVCaptureVideoOrientationLandscapeRight];
break;
}
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error {
//finished
NSLog(#"ErrorMessage%#", error);
}
-(IBAction)endcall {
[player stop];
[session stopRunning];
}
-(AVCaptureDevice *)frontFacingCameraIfAvailable
{
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices)
{
if (device.position == AVCaptureDevicePositionFront)
{
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if ( ! captureDevice)
{
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
- ( void ) captureOutput: ( AVCaptureOutput * ) captureOutput
didOutputSampleBuffer: ( CMSampleBufferRef ) sampleBuffer
fromConnection: ( AVCaptureConnection * ) connection
{
connection.videoOrientation = //ur desired mode;
}
try implementing the above delegate method

What is the best QR reader library for the IOS? [closed]

Closed. This question does not meet Stack Overflow guidelines. It is not currently accepting answers.
We don’t allow questions seeking recommendations for books, tools, software libraries, and more. You can edit the question so it can be answered with facts and citations.
Closed 6 years ago.
Improve this question
I want to use QR reader library in my ios application.
What is the best QR reader library for the IOS?
I found some in github but not sure is ok or not.
try this on ios7 and newer
to Capture QR code:
- (IBAction)Capture:(id)sender {
isFirst=true;
_session = [[AVCaptureSession alloc] init];
_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
_input = [AVCaptureDeviceInput deviceInputWithDevice:_device error:&error];
if (_input) {
[_session addInput:_input];
} else {
NSLog(#"Error: %#", error);
}
_output = [[AVCaptureMetadataOutput alloc] init];
[_output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
[_session addOutput:_output];
_output.metadataObjectTypes = [_output availableMetadataObjectTypes];
_prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_prevLayer.frame = self.view.bounds;
_prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer:_prevLayer];
[_session startRunning];
}
To read use its delegate methode:
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
CGRect highlightViewRect = CGRectZero;
AVMetadataMachineReadableCodeObject *barCodeObject;
NSString *detectionString = nil;
NSArray *barCodeTypes = #[AVMetadataObjectTypeUPCECode, AVMetadataObjectTypeCode39Code, AVMetadataObjectTypeCode39Mod43Code,
AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode93Code, AVMetadataObjectTypeCode128Code,
AVMetadataObjectTypePDF417Code, AVMetadataObjectTypeQRCode, AVMetadataObjectTypeAztecCode];
for (AVMetadataObject *metadata in metadataObjects) {
for (NSString *type in barCodeTypes) {
if ([metadata.type isEqualToString:type])
{
barCodeObject = (AVMetadataMachineReadableCodeObject *)[_prevLayer transformedMetadataObjectForMetadataObject:(AVMetadataMachineReadableCodeObject *)metadata];
highlightViewRect = barCodeObject.bounds;
detectionString = [(AVMetadataMachineReadableCodeObject *)metadata stringValue];
break;
}
}
if (detectionString != nil)
{
if (isFirst) {
isFirst=false;
_label.text = detectionString;
break;
}
}
else
_label.text = #"(none)";
}
_highlightView.frame = highlightViewRect;
}
iOS already have QR reader implemented in AVFoundation from iOS 7, here is a tutorial on how to implement it

iOS custom camera (take photo), appear a bug

this bug appears very accidental, I have no idea how to fix, please help me,
main code is here:
if you want to see the complete project, you can check this link
https://github.com/liman123/custom-camera
the bug is : when I take photo, the screen become this pic, this pic is distorted!
- (void)setupConfiguration
{
_captureSession = [[AVCaptureSession alloc]init];
_captureSession.sessionPreset = AVCaptureSessionPresetPhoto;
_capturedImageView = [[UIImageView alloc]init];
_capturedImageView.frame = self.view.frame; // just to even it out
_capturedImageView.backgroundColor = [UIColor clearColor];
_capturedImageView.userInteractionEnabled = YES;
_capturedImageView.contentMode = UIViewContentModeScaleAspectFill;
_captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:_captureSession];
_captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
_captureVideoPreviewLayer.frame = self.view.bounds;
[self.view.layer addSublayer:_captureVideoPreviewLayer];
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
if (devices.count > 0) {
_captureDevice = devices[0];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:_captureDevice error:&error];
[_captureSession addInput:input];
_stillImageOutput = [[AVCaptureStillImageOutput alloc]init];
NSDictionary * outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[_stillImageOutput setOutputSettings:outputSettings];
[_captureSession addOutput:_stillImageOutput];
}
}
-(void)captureButtonClick
{
_isCapturingImage = YES;
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in _stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
[_stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
if (imageSampleBuffer) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *capturedImage = [[UIImage alloc]initWithData:imageData scale:1];
_isCapturingImage = NO;
_capturedImageView.image = capturedImage;
_selectedImage = capturedImage;
imageData = nil;
[self.view addSubview:_imageSelectedView];
}
}];
}

Barcode Scanner in iOS

I Used AV Foundation framework for barcode scanning functionality.
session = [[AVCaptureSession alloc] init];
device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (input) {
[session addInput:input];
} else {
NSLog(#"Error: %#", error);
}
output = [[AVCaptureMetadataOutput alloc] init];
[output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
[session addOutput:output];
output.metadataObjectTypes = [output availableMetadataObjectTypes];
prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:session];
prevLayer.frame = self.view.bounds;
using delegete method i will get barcode result. i.e barcode number
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
CGRect highlightViewRect = CGRectZero;
AVMetadataMachineReadableCodeObject *barCodeObject;
NSString *detectionString = nil;
NSArray *barCodeTypes = #[AVMetadataObjectTypeUPCECode, AVMetadataObjectTypeCode39Code, AVMetadataObjectTypeCode39Mod43Code,
AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode93Code, AVMetadataObjectTypeCode128Code,
AVMetadataObjectTypePDF417Code, AVMetadataObjectTypeQRCode, AVMetadataObjectTypeAztecCode];
for (AVMetadataObject *metadata in metadataObjects) {
for (NSString *type in barCodeTypes) {
if ([metadata.type isEqualToString:type])
{
barCodeObject = (AVMetadataMachineReadableCodeObject *)[_prevLayer transformedMetadataObjectForMetadataObject:(AVMetadataMachineReadableCodeObject *)metadata];
barcodeString = [(AVMetadataMachineReadableCodeObject *)metadata stringValue];
break;
}
}
if (barcodeString != nil)
{
NSLog(#"Barcode String: %#",barcodeString);
}
else
label.text = #"(none)";
}
}
Note: its working. but, the main reason, i'm not getting barcode number always. will you please help me for this issue.
Your delegate method looks fine, but you should add: videoGravity to your AVCaptureVideoPreviewLayer (prevLayer) and get the session started. Try adding these lines of code in the end of your first method:
prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer:prevLayer];
[session startRunning];
I have used this open source project for Barcode scanning..
it will surely help you:-
LINK:- https://github.com/jpwidmer/iOS7-BarcodeScanner
Declare these objects in your .m (implementation) file:
AVCaptureSession *_session;
AVCaptureDevice *_device;
AVCaptureDeviceInput *_input;
AVCaptureMetadataOutput *_output;
AVCaptureVideoPreviewLayer *_prevLayer;
UIView *_highlightView;
UILabel *_label;
then add this method (which will be called by you):
-(void)readBarcode:(UIViewController*)myView
{
_highlightView = [[UIView alloc] init];
_highlightView.autoresizingMask = UIViewAutoresizingFlexibleTopMargin|UIViewAutoresizingFlexibleLeftMargin|UIViewAutoresizingFlexibleRightMargin|UIViewAutoresizingFlexibleBottomMargin;
_highlightView.layer.borderColor = [UIColor greenColor].CGColor;
_highlightView.layer.borderWidth = 3;
[self.view addSubview:_highlightView];
_label = [[UILabel alloc] init];
_label.frame = CGRectMake(0, self.view.bounds.size.height - 40, self.view.bounds.size.width, 40);
_label.autoresizingMask = UIViewAutoresizingFlexibleTopMargin;
_label.backgroundColor = [UIColor colorWithWhite:0.15 alpha:0.65];
_label.textColor = [UIColor whiteColor];
_label.textAlignment = NSTextAlignmentCenter;
_label.text = #"(none)";
[self.view addSubview:_label];
_session = [[AVCaptureSession alloc] init];
_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
_input = [AVCaptureDeviceInput deviceInputWithDevice:_device error:&error];
if (_input) {
[_session addInput:_input];
} else {
NSLog(#"Error: %#", error);
}
_output = [[AVCaptureMetadataOutput alloc] init];
[_output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
[_session addOutput:_output];
_output.metadataObjectTypes = [_output availableMetadataObjectTypes];
_prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_prevLayer.frame = self.view.bounds;
_prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer:_prevLayer];
[_session startRunning];
[self.view bringSubviewToFront:_highlightView];
[self.view bringSubviewToFront:_label];
}
and finally add the AVCaptureMetadataOutputObjectsDelegate:
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
CGRect highlightViewRect = CGRectZero;
AVMetadataMachineReadableCodeObject *barCodeObject;
NSString *detectionString = nil;
NSArray *barCodeTypes = #[AVMetadataObjectTypeUPCECode, AVMetadataObjectTypeCode39Code, AVMetadataObjectTypeCode39Mod43Code,
AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode93Code, AVMetadataObjectTypeCode128Code,
AVMetadataObjectTypePDF417Code, AVMetadataObjectTypeQRCode, AVMetadataObjectTypeAztecCode];
for (AVMetadataObject *metadata in metadataObjects) {
for (NSString *type in barCodeTypes) {
if ([metadata.type isEqualToString:type])
{
barCodeObject = (AVMetadataMachineReadableCodeObject *)[_prevLayer transformedMetadataObjectForMetadataObject:(AVMetadataMachineReadableCodeObject *)metadata];
highlightViewRect = barCodeObject.bounds;
detectionString = [(AVMetadataMachineReadableCodeObject *)metadata stringValue];
break;
}
}
if (detectionString != nil)
{
_label.text = detectionString;
break;
}
else
_label.text = #"(none)";
}
_highlightView.frame = highlightViewRect;
}
Just have a look at this post. You should read it carefully and also check the apple docs. And you can dowload the sample project from that post.
I do not know what problem you are facing exactly because your code looks fine. And may be the problem is you are scanning unsupportable barcodes like DataMatrix.
Please log your detectionString and see if every time you are getting value.
In my Case i was using serial queues and hence was facing problems .
Always use Main queue like below
[captureMetadataOutput setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];

AVCaptureMetadataOutput, rectOfInterest scans outside

So I'm trying AVCaptureMetadataOutput for scanning QR-codes. The problem I have is that the scanning can occur outside the preview area even though I use rectOfInterest see image below:
Here's the code:
- (void)capture
{
session = [[AVCaptureSession alloc] init];
device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ( [device lockForConfiguration:NULL] == YES ) {
CGPoint point = CGPointMake(0.5,0.5);
[device setFocusPointOfInterest:point];
[device setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
[device unlockForConfiguration];
}
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device
error:&error];
if (!input)
{
NSLog(#"Error: %#", error);
return;
}
[session addInput:input];
//Add the metadata output device
output = [[AVCaptureMetadataOutput alloc] init];
[output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
[session addOutput:output];
output.metadataObjectTypes = #[AVMetadataObjectTypeQRCode, AVMetadataObjectTypeCode128Code];
output.rectOfInterest = self.livevideo.bounds;
newCaptureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
newCaptureVideoPreviewLayer.frame = self.livevideo.bounds;
newCaptureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.livevideo.layer insertSublayer:newCaptureVideoPreviewLayer above:self.livevideo.layer];
highlightView = [[UIView alloc] init];
highlightView.autoresizingMask = UIViewAutoresizingFlexibleTopMargin|UIViewAutoresizingFlexibleLeftMargin|UIViewAutoresizingFlexibleRightMargin|UIViewAutoresizingFlexibleBottomMargin;
highlightView.layer.borderColor = [UIColor greenColor].CGColor;
highlightView.layer.borderWidth = 3;
[self.livevideo addSubview:highlightView];
[session startRunning];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputMetadataObjects:(NSArray *)metadataObjects
fromConnection:(AVCaptureConnection *)connection
{
AVMetadataMachineReadableCodeObject *barCodeObject;
CGRect highlightViewRect = CGRectZero;
for (AVMetadataObject *metadata in metadataObjects)
{
for (NSString *type in output.metadataObjectTypes) {
if ([metadata.type isEqualToString:type])
{
barCodeObject = (AVMetadataMachineReadableCodeObject *)[newCaptureVideoPreviewLayer transformedMetadataObjectForMetadataObject:(AVMetadataMachineReadableCodeObject *)metadata];
highlightViewRect = barCodeObject.bounds;
#try {
NSString *code =[barCodeObject stringValue];
NSLog(#"Read type: %#", type);
self.barcode.text = code;
}
#catch (NSException *exception) {
NSLog(#"%#", exception.reason);
}
break;
}
}
}
highlightView.frame = highlightViewRect;
}
That is the documented behavior:
Metadata objects whose bounds do not intersect with the rectOfInterest will not be returned.
So, if the QR code at all intersects the rectangle, it will be detected.

Resources