I'm struggling with delegate creation and usage. Could someone help me understand what I'm doing wrong? According to all the examples I have read this is correct but my data is not being returned.
ViewController (Parent)
.h
#import <UIKit/UIKit.h>
#import "BarcodeViewController.h"
#interface ViewController: UIViewController <BarcodeViewDelegate> {
IBOutlet UILabel *bcode;
}
#end
.m
-(void)setbarcode:(NSString*)barcode
{
NSLog(#" data %#", barcode);
bcode.text = barcode;
}
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender
{
// Make sure your segue name in storyboard is the same as this line
if ([[segue identifier] isEqualToString:#"scanbarcode"])
{
BarcodeViewController *cv = [segue destinationViewController];
cv.delegate = self;
}
}
The Barcode view controller (Child view)
.h
#protocol BarcodeViewDelegate <NSObject>
-(void)setbarcode:(NSString*)barcode;
#end
#interface BarcodeViewController : UIViewController
{
id<BarcodeViewDelegate> delegate;
}
#property(nonatomic,assign)id delegate;
#end
.m
#import "BarcodeViewController.h"
#import <AVFoundation/AVFoundation.h>
#interface BarcodeViewController () <AVCaptureMetadataOutputObjectsDelegate>
{
AVCaptureSession *_session;
AVCaptureDevice *_device;
AVCaptureDeviceInput *_input;
AVCaptureMetadataOutput *_output;
AVCaptureVideoPreviewLayer *_prevLayer;
UIView *_highlightView;
UIImageView *_imageOverlay;
}
#end
#implementation BarcodeViewController
- (void)viewDidLoad
{
[super viewDidLoad];
/*
* setup scanner view
*/
_highlightView = [[UIView alloc] init];
_highlightView.autoresizingMask = UIViewAutoresizingFlexibleTopMargin|UIViewAutoresizingFlexibleLeftMargin|UIViewAutoresizingFlexibleRightMargin|UIViewAutoresizingFlexibleBottomMargin;
_highlightView.layer.borderColor = [UIColor greenColor].CGColor;
_highlightView.layer.borderWidth = 3;
[self.view addSubview:_highlightView];
/*
* setup a overlay guide
*/
_imageOverlay = [[UIImageView alloc] initWithImage:[UIImage imageNamed:#"camera_overlay"]];
[self.view addSubview:_imageOverlay];
_session = [[AVCaptureSession alloc] init];
_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
_input = [AVCaptureDeviceInput deviceInputWithDevice:_device error:&error];
if (_input) {
[_session addInput:_input];
} else {
NSLog(#"Error: %#", error);
}
_output = [[AVCaptureMetadataOutput alloc] init];
[_output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
[_session addOutput:_output];
_output.metadataObjectTypes = [_output availableMetadataObjectTypes];
_prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_prevLayer.frame = self.view.bounds;
_prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer:_prevLayer];
[_session startRunning];
[self.view bringSubviewToFront:_highlightView];
[self.view bringSubviewToFront:_imageOverlay];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
CGRect highlightViewRect = CGRectZero;
AVMetadataMachineReadableCodeObject *barCodeObject;
NSString *detectionString = nil;
NSArray *barCodeTypes = #[AVMetadataObjectTypeUPCECode, AVMetadataObjectTypeCode39Code, AVMetadataObjectTypeCode39Mod43Code,
AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode93Code, AVMetadataObjectTypeCode128Code,
AVMetadataObjectTypePDF417Code, AVMetadataObjectTypeQRCode, AVMetadataObjectTypeAztecCode];
/*
* keep looking around while we look for the barcode
*/
for (AVMetadataObject *metadata in metadataObjects) {
for (NSString *type in barCodeTypes) {
if ([metadata.type isEqualToString:type])
{
barCodeObject = (AVMetadataMachineReadableCodeObject *)[_prevLayer transformedMetadataObjectForMetadataObject:(AVMetadataMachineReadableCodeObject *)metadata];
//highlightViewRect = barCodeObject.bounds;
detectionString = [(AVMetadataMachineReadableCodeObject *)metadata stringValue];
break;
}
}
if (detectionString != nil)
{
/*
* Set the detected barcode so we can use it.
* - perform segway to another view
*/
//barcode = detectionString;
//NSLog(#" data %#", detectionString);
[delegate setbarcode: detectionString];
[self.navigationController popViewControllerAnimated:YES];
break;
}
//else
/*
* Just reset the barcode value for now
*/
//_barcode = false;
}
_highlightView.frame = highlightViewRect;
}
#end
Your problem is simple.
You define an instance variable and a property to hold the delegate:
{
id delegate;
}
#property(nonatomic,assign)id delegate;
You set the delegate via the property:
cv.delegate = self;
Then access it via the instance variable:
[delegate setbarcode: detectionString];
The property is backed by a different instance variable, automatically defined as _delegate, which you should not be accessing. You should always access via the property, as self.delegate. When calling your delegate ivar, it will be nil, so nothing is being sent back.
Remove the unnecessary instance variable declaration, type the property correctly (as id<BarcodeViewDelegate> rather than just id) and always access it via the property, and you'll be fine.
Related
I am trying to speed up the reading of barcodes in my app, the app works fine but is a tad slow at reading barcodes.
How do I improve the speed of reading the barcodes?
Here is the code I have so far.
#import "ScanViewController.h"
#import "Utils.h"
#import <AVFoundation/AVFoundation.h>
#import <AudioToolbox/AudioToolbox.h>
#interface ScanViewController ()<AVCaptureMetadataOutputObjectsDelegate>
#property (nonatomic, readwrite) AVCaptureSession *captureSession;
#property (nonatomic, readwrite) AVCaptureVideoPreviewLayer *videoPreviewLayer;
#property (nonatomic, readwrite) UIView *qrCodeFrameView;
#property (nonatomic, readwrite) UILabel *qrCodeTextView;
#property (nonatomic, readwrite) NSArray *supportedCodeTypes;
#property (nonatomic, readwrite) long long lastScanTime;
#property (nonatomic, readwrite) NSString *lastScanCode;
#property (nonatomic, readwrite) AVAudioPlayer *audioPlayer;
#end
#implementation ScanViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
self.captureSession = [AVCaptureSession new];
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
self.supportedCodeTypes = #[AVMetadataObjectTypeUPCECode,
AVMetadataObjectTypeEAN13Code,
AVMetadataObjectTypeEAN8Code];
// AVMetadataObjectTypeCode39Code,
// AVMetadataObjectTypeCode39Mod43Code,
// AVMetadataObjectTypeCode93Code,
// AVMetadataObjectTypeCode128Code,
// AVMetadataObjectTypeAztecCode,
// AVMetadataObjectTypePDF417Code,
// AVMetadataObjectTypeITF14Code,
// AVMetadataObjectTypeDataMatrixCode,
// AVMetadataObjectTypeInterleaved2of5Code,
// AVMetadataObjectTypeQRCode];
AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVCaptureSessionPre];
if(captureDevice == nil) {
NSLog(#"Failed to get the camera device");
return;
}
#try {
// Get an instance of the AVCaptureDeviceInput class using the previous device object.
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:nil];
// Set the input device on the capture session.
[self.captureSession addInput:input];
// Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
AVCaptureMetadataOutput *captureMetadataOutput = [[AVCaptureMetadataOutput alloc] init];
[self.captureSession addOutput:captureMetadataOutput];
// Set delegate and use the default dispatch queue to execute the call back
[captureMetadataOutput setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
captureMetadataOutput.metadataObjectTypes = self.supportedCodeTypes;
// captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
} #catch (NSException *error) {
// If any error occurs, simply print it out and don't continue any more.
NSLog(#"%#", error);
return;
}
// Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
self.videoPreviewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
self.videoPreviewLayer.videoGravity = kCAGravityResizeAspectFill;
self.videoPreviewLayer.frame = self.view.layer.bounds;
[self.view.layer addSublayer:self.videoPreviewLayer];
// Start video capture.
[self.captureSession startRunning];
// Move the result view and loading view to the front
[self.view bringSubviewToFront:self.resultView];
[self.view bringSubviewToFront:self.loadingView];
// Initialize QR Code Frame to highlight the QR code
self.qrCodeFrameView = [[UIView alloc] init];
if (self.qrCodeFrameView) {
self.qrCodeFrameView.layer.borderColor = UIColor.greenColor.CGColor;
self.qrCodeFrameView.layer.borderWidth = 2;
[self.view addSubview:self.qrCodeFrameView];
[self.view bringSubviewToFront:self.qrCodeFrameView];
}
self.qrCodeTextView = [[UILabel alloc] init];
if (self.qrCodeTextView) {
[self.qrCodeTextView setTextColor:UIColor.greenColor];
[self.qrCodeTextView setFont:[UIFont systemFontOfSize:20]];
[self.qrCodeFrameView addSubview:self.qrCodeTextView];
}
[self rotateLoadingImage];
[self setResultType:RESULT_TYPE_WORKING codeContent:#"Ready" price:0.00];
[self.loadingView setHidden:YES];
}
-(void)viewWillDisappear:(BOOL)animated {
if (self.audioPlayer != nil) {
[self.audioPlayer stop];
self.audioPlayer = nil;
}
[super viewWillDisappear:animated];
}
/*
#pragma mark - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
// Get the new view controller using [segue destinationViewController].
// Pass the selected object to the new view controller.
}
*/
-(void) updatePreviewLayer:(AVCaptureConnection*)layer orientation:(AVCaptureVideoOrientation)orientation {
layer.videoOrientation = orientation;
self.videoPreviewLayer.frame = self.view.bounds;
}
-(void)viewDidLayoutSubviews {
[super viewDidLayoutSubviews];
if(self.videoPreviewLayer.connection != nil) {
UIDevice *currentDevice = [UIDevice currentDevice];
UIDeviceOrientation orientation = [currentDevice orientation];
AVCaptureConnection *previewLayerConnection = self.videoPreviewLayer.connection;
if(previewLayerConnection.isVideoOrientationSupported) {
switch (orientation) {
case UIDeviceOrientationPortrait:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationPortrait];
break;
case UIDeviceOrientationLandscapeRight:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationLandscapeLeft];
break;
case UIDeviceOrientationLandscapeLeft:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationLandscapeRight];
break;
case UIDeviceOrientationPortraitUpsideDown:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationPortraitUpsideDown];
break;
default:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationPortrait];
break;
}
}
}
}
-(void)captureOutput:(AVCaptureOutput *)output didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects fromConnection:(AVCaptureConnection *)connection {
// Check if the metadataObjects array is not nil and it contains at least one object.
if (metadataObjects.count == 0) {
self.qrCodeFrameView.frame = CGRectZero;
return;
}
// Get the metadata object.
AVMetadataMachineReadableCodeObject *metadataObj = (AVMetadataMachineReadableCodeObject*)(metadataObjects[0]);
if ([self.supportedCodeTypes containsObject:metadataObj.type]) {
// If the found metadata is equal to the QR code metadata (or barcode) then update the status label's text and set the bounds
AVMetadataObject *barCodeObject = [self.videoPreviewLayer transformedMetadataObjectForMetadataObject:metadataObj];
NSString *code = metadataObj.stringValue;
if (code != nil) {
// check upc a code
if ([self checkUpcACode:metadataObj.type code:code] == NO) {
self.qrCodeTextView.text = #"";
return;
}
int i=0;
for (i=0; i<code.length; i++) {
char ch = [code characterAtIndex:i];
if (ch != '0') break;
}
if (i>0) i--;
code = [code substringFromIndex:i];
self.qrCodeFrameView.frame = barCodeObject.bounds;
[self.qrCodeTextView setText:code];
self.qrCodeTextView.frame = CGRectMake(0, self.qrCodeFrameView.frame.size.height-20, self.qrCodeFrameView.frame.size.width, 20);
NSLog(#"%#", code);
[self handleBarcode:code];
} else {
self.qrCodeTextView.text = #"";
}
}
}
-(BOOL)checkUpcACode:(AVMetadataObjectType)type code:(NSString*)code {
if (type == AVMetadataObjectTypeEAN13Code) {
if ([code hasPrefix:#"0"] && [code length] > 0) {
return YES;
}
}
return NO;
}
#end```
Solution was from PBK on Apple Forums
//change [self handleBarcode:code];
// to
dispatch_async(dispatch_get_main_queue(), ^{
[self handleBarcode:code];
});
I have a barcodescanner in one viewcontroller and a uiwebview in a second viewcontroller. How can i get the string which was created in the first viewcontroller catch in the second viewcontroller. I tried several things and tested it with nslog but it always shows me (nil) for the string value. Help me please out of this.
Here is my code:
igViewController.m:
#import <AVFoundation/AVFoundation.h>
#import "igViewController.h"
#import "ViewController.h"
#interface igViewController () <AVCaptureMetadataOutputObjectsDelegate>
{
AVCaptureSession *_session;
AVCaptureDevice *_device;
AVCaptureDeviceInput *_input;
AVCaptureMetadataOutput *_output;
AVCaptureVideoPreviewLayer *_prevLayer;
UIView *_highlightView;
}
#end
#implementation igViewController
#synthesize detectionString;
#synthesize delegate;
#synthesize thedetected;
- (void)viewDidLoad
{
[super viewDidLoad];
_highlightView = [[UIView alloc] init];
_highlightView.autoresizingMask = UIViewAutoresizingFlexibleTopMargin|UIViewAutoresizingFlexibleLeftMargin|UIViewAutoresizingFlexibleRightMargin|UIViewAutoresizingFlexibleBottomMargin;
_highlightView.layer.borderColor = [UIColor greenColor].CGColor;
_highlightView.layer.borderWidth = 3;
[self.view addSubview:_highlightView];
_label = [[UILabel alloc] init];
_label.frame = CGRectMake(0, self.view.bounds.size.height - 40, self.view.bounds.size.width, 40);
_label.autoresizingMask = UIViewAutoresizingFlexibleTopMargin;
_label.backgroundColor = [UIColor colorWithWhite:0.15 alpha:0.65];
_label.textColor = [UIColor whiteColor];
_label.textAlignment = NSTextAlignmentCenter;
_label.text = #"(nichts)";
[self.view addSubview:_label];
_session = [[AVCaptureSession alloc] init];
_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
_input = [AVCaptureDeviceInput deviceInputWithDevice:_device error:&error];
if (_input) {
[_session addInput:_input];
} else {
NSLog(#"Error: %#", error);
}
_output = [[AVCaptureMetadataOutput alloc] init];
[_output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
[_session addOutput:_output];
_output.metadataObjectTypes = [_output availableMetadataObjectTypes];
_prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_prevLayer.frame = self.view.bounds;
_prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer:_prevLayer];
[_session startRunning];
[self.view bringSubviewToFront:_highlightView];
[self.view bringSubviewToFront:_label];
}
-(void) viewWillAppear:(BOOL)animated{
if(detectionString!=nil){
UIStoryboard *mainStoryboard = [UIStoryboard storyboardWithName:#"Main" bundle:nil];
UIViewController *top = [UIApplication sharedApplication].keyWindow.rootViewController;
UIViewController *vc = [mainStoryboard instantiateViewControllerWithIdentifier:#"Controller1"];
[top presentViewController:vc animated:YES completion:nil];
}
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
CGRect highlightViewRect = CGRectZero;
AVMetadataMachineReadableCodeObject *barCodeObject;
detectionString = nil;
NSArray *barCodeTypes = #[AVMetadataObjectTypeUPCECode, AVMetadataObjectTypeCode39Code, AVMetadataObjectTypeCode39Mod43Code,
AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode93Code, AVMetadataObjectTypeCode128Code,
AVMetadataObjectTypePDF417Code, AVMetadataObjectTypeQRCode, AVMetadataObjectTypeAztecCode];
for (AVMetadataObject *metadata in metadataObjects) {
for (NSString *type in barCodeTypes) {
if ([metadata.type isEqualToString:type])
{
barCodeObject = (AVMetadataMachineReadableCodeObject *)[_prevLayer transformedMetadataObjectForMetadataObject:(AVMetadataMachineReadableCodeObject *)metadata];
highlightViewRect = barCodeObject.bounds;
detectionString = [(AVMetadataMachineReadableCodeObject *)metadata stringValue];
setSomeNumber:[(AVMetadataMachineReadableCodeObject *)metadata stringValue];
break;
}
}
if (detectionString != nil)
{
_label.text = detectionString;
// ViewController *otherViewController=[[ViewController alloc] init];
UIStoryboard *mainStoryboard = [UIStoryboard storyboardWithName:#"Main" bundle:nil];
UIViewController *vc = [mainStoryboard instantiateViewControllerWithIdentifier:#"Controller1"];
ViewController *otherViewController=[[ViewController alloc] init];
[otherViewController barcodeverarbeiten:detectionString];
NSLog(#"detectionstringssssss %#", detectionString);
[self presentViewController:vc animated:YES completion:nil];
break;
}
else
_label.text = #"(nichts)";
}
_highlightView.frame = highlightViewRect;
}
- (void)setSomeNumber:(NSString *)tempString {
_label.text = tempString;
}
-(NSString *) thedetected{
return _label.text;
}
#end
ViewController.m (only webViewDidFinishLoad
-(void) webViewDidFinishLoad:(UIWebView *)awebView{
NSLog(#"detectionstring %#", otherViewController1.thedetected);
if(otherViewController1.thedetected != nil){ //denk an self. ... bei igviewcontroller
[self.Webseite stringByEvaluatingJavaScriptFromString:[NSString stringWithFormat:#"document.getElementsByName('SEARCH_STRING_CONTENT')[0].value = '%#';",otherViewController1.thedetected]];
NSString *evalStr = [NSString stringWithFormat:#"setTimeout( function(){document.getElementsByName(SEARCH_STRING_CONTENT')[0].focus();},1000);"];
[self.Webseite stringByEvaluatingJavaScriptFromString:evalStr];
[self.Webseite stringByEvaluatingJavaScriptFromString:#"document.getElementsByClassName('buttonLinks searchButton withValue')[0].click();"];
}
[UIApplication sharedApplication].networkActivityIndicatorVisible = NO;
if ([awebView canGoBack] == YES){
[backButton setEnabled:YES];
}else{
[backButton setEnabled:NO];
}
if([awebView canGoForward] == YES){
[forwardButton setEnabled:YES];
}else{
[forwardButton setEnabled:NO];
}
}
igViewController.h:
#import <UIKit/UIKit.h>
#class ViewController;
#interface igViewController : UIViewController{
ViewController *delegate;
#public
UILabel *_label;
NSString *detectionString;
//NSString *tempString;
}
#property (nonatomic, strong)NSString *detectionString;
#property (nonatomic, strong) ViewController *delegate;
#property (nonatomic, strong) NSString *thedetected;
//#property (nonatomic, strong)NSString *tempString;
- (void)setSomeNumber:(NSString *)tempString;
#end
ViewController.h:
#import <UIKit/UIKit.h>
#import "FavoritenController.h"
#class igViewController;
#interface ViewController : UIViewController <UITextFieldDelegate, UIWebViewDelegate, UIActionSheetDelegate>
{
UIWebView *Webseite;
UIBarButtonItem *backButton;
UIBarButtonItem *forwardButton;
UITextField *textField;
UIBarButtonItem *refreshButton;
NSString* urlBeforeEditing;
UIBarButtonItem *barcodescan;
igViewController *otherViewController1;
FavoritenController* favoritenController;
/* ..*/
}
#property (nonatomic, retain) IBOutlet UIBarButtonItem *barcodescan;
#property (nonatomic, strong) igViewController *otherViewController1;
#property (nonatomic, retain) IBOutlet UITextField *textField;
#property (weak, nonatomic) IBOutlet UIBarButtonItem *onlinetauscher;
#property (nonatomic, retain) IBOutlet UIWebView *Webseite;
#property ( nonatomic, retain) IBOutlet UIBarButtonItem *refreshButton;
#property ( nonatomic, retain) IBOutlet UIBarButtonItem *backButton;
#property (nonatomic, retain) IBOutlet UIBarButtonItem *forwardButton;
- (IBAction)onlinetauscher:(UIBarButtonItem *)sender;
- (IBAction)pressRefresh:(UIBarButtonItem *)sender;
//- (IBAction)search:(id)sender;
- (IBAction)zeigeMedikamente:(id)sender;
- (IBAction)hinzufuegen:(id)sender;
- (void)barcodeverarbeiten:(NSString*)barcodepzn;
#end
Ok, it's working now. I used the NSUserDefaults to pass the String and catched it in my other ViewController. Cheers
I'm just starting out in Objective-C and I'm trying to create a simple app where it shows the camera view with a blur effect on it. I got the Camera output working with the AVFoundation framework. Now, I'm trying to hook up the Core image framework but to no knowledge how to, Apple documentation is confusing for me and searching for guides and tutorials online leads to no results. Thanks in advance for the help.
#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
#interface ViewController ()
#property (strong ,nonatomic) CIContext *context;
#end
#implementation ViewController
AVCaptureSession *session;
AVCaptureStillImageOutput *stillImageOutput;
-(CIContext *)context
{
if(!_context)
{
_context = [CIContext contextWithOptions:nil];
}
return _context;
}
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
}
-(void)viewWillAppear:(BOOL)animated{
session = [[AVCaptureSession alloc] init];
[session setSessionPreset:AVCaptureSessionPresetPhoto];
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error;
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if ([session canAddInput:deviceInput]) {
[session addInput:deviceInput];
}
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
CALayer *rootLayer = [[self view] layer];
[rootLayer setMasksToBounds:YES];
CGRect frame = self.imageView.frame;
[previewLayer setFrame:frame];
[previewLayer.connection setVideoOrientation:AVCaptureVideoOrientationLandscapeRight];
[rootLayer insertSublayer:previewLayer atIndex:0];
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
[session startRunning];
}
#end
Here's something to get you started. This is an updated version of the code from the following link.
https://gist.github.com/eladb/9662102
The trick is to use the AVCaptureVideoDataOutputSampleBufferDelegate.
With this delegate, you can use imageWithCVPixelBuffer to construct a CIImage from your camera buffer.
Right now though I'm trying to figure out how to reduce lag. I'll update asap.
Update: Latency is now minimal, and on some effects unnoticeable. Unfortunately, it seems that blur is one of the slowest. You may want to look into vImage.
#import "ViewController.h"
#import <CoreImage/CoreImage.h>
#import <AVFoundation/AVFoundation.h>
#interface ViewController () {
}
#property (strong, nonatomic) CIContext *coreImageContext;
#property (strong, nonatomic) AVCaptureSession *cameraSession;
#property (strong, nonatomic) AVCaptureVideoDataOutput *videoOutput;
#property (strong, nonatomic) UIView *blurCameraView;
#property (strong, nonatomic) CIFilter *filter;
#property BOOL cameraOpen;
#end
#implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
self.blurCameraView = [[UIView alloc]initWithFrame:[[UIScreen mainScreen] bounds]];
[self.view addSubview:self.blurCameraView];
//setup filter
self.filter = [CIFilter filterWithName:#"CIGaussianBlur"];
[self.filter setDefaults];
[self.filter setValue:#(3.0f) forKey:#"inputRadius"];
[self setupCamera];
[self openCamera];
// Do any additional setup after loading the view, typically from a nib.
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (void)setupCamera
{
self.coreImageContext = [CIContext contextWithOptions:#{kCIContextUseSoftwareRenderer : #(YES)}];
// session
self.cameraSession = [[AVCaptureSession alloc] init];
[self.cameraSession setSessionPreset:AVCaptureSessionPresetLow];
[self.cameraSession commitConfiguration];
// input
AVCaptureDevice *shootingCamera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *shootingDevice = [AVCaptureDeviceInput deviceInputWithDevice:shootingCamera error:NULL];
if ([self.cameraSession canAddInput:shootingDevice]) {
[self.cameraSession addInput:shootingDevice];
}
// video output
self.videoOutput = [[AVCaptureVideoDataOutput alloc] init];
self.videoOutput.alwaysDiscardsLateVideoFrames = YES;
[self.videoOutput setSampleBufferDelegate:self queue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)];
if ([self.cameraSession canAddOutput:self.videoOutput]) {
[self.cameraSession addOutput:self.videoOutput];
}
if (self.videoOutput.connections.count > 0) {
AVCaptureConnection *connection = self.videoOutput.connections[0];
connection.videoOrientation = AVCaptureVideoOrientationPortrait;
}
self.cameraOpen = NO;
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
// Get a CMSampleBuffer's Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// turn buffer into an image we can manipulate
CIImage *result = [CIImage imageWithCVPixelBuffer:imageBuffer];
// filter
[self.filter setValue:result forKey:#"inputImage"];
// render image
CGImageRef blurredImage = [self.coreImageContext createCGImage:self.filter.outputImage fromRect:result.extent];
dispatch_async(dispatch_get_main_queue(), ^{
self.blurCameraView.layer.contents = (__bridge id)blurredImage;
CGImageRelease(blurredImage);
});
}
- (void)openCamera {
if (self.cameraOpen) {
return;
}
self.blurCameraView.alpha = 0.0f;
[self.cameraSession startRunning];
[self.view layoutIfNeeded];
[UIView animateWithDuration:3.0f animations:^{
self.blurCameraView.alpha = 1.0f;
}];
self.cameraOpen = YES;
}
I am using iOS 7 AVFoundation framework for Barcode scanning. It works fine for me but on some Barcodes it gives me wrong result.
I am adding code file and Barcode also to generate the issue. Please take a look and help me to find out the problem.
Code Sample:
ViewController.h Class-
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#interface ViewController : UIViewController <AVCaptureMetadataOutputObjectsDelegate>
{
AVCaptureSession *_session;
AVCaptureDevice *_device;
AVCaptureDeviceInput *_input;
AVCaptureMetadataOutput *_output;
AVCaptureVideoPreviewLayer *_prevLayer;
UIView *_highlightView;
IBOutlet UIView *_viewBg;
IBOutlet UIButton *_btnScanning;
}
#end
ViewController.m Class Methods -
- (void)setScanningVideoOrientation
{
if ([UIDevice currentDevice].orientation == UIInterfaceOrientationPortrait)
{
_prevLayer.connection.videoOrientation = AVCaptureVideoOrientationPortrait;
}
else if ([UIDevice currentDevice].orientation == UIInterfaceOrientationPortraitUpsideDown)
{
_prevLayer.connection.videoOrientation = AVCaptureVideoOrientationPortraitUpsideDown;
}
else if ([UIDevice currentDevice].orientation == UIInterfaceOrientationLandscapeLeft)
{
_prevLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
}
else if ([UIDevice currentDevice].orientation == UIInterfaceOrientationLandscapeRight)
{
_prevLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
}
}
- (void)startScanning
{
// Create Session.------------------------------------------------------
_session = nil;
_session = [[AVCaptureSession alloc] init];
_device = nil;
_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
_input = nil;
_input = [AVCaptureDeviceInput deviceInputWithDevice:_device error:&error];
if (_input)
{
[_session addInput:_input];
_output = nil;
_output = [[AVCaptureMetadataOutput alloc] init];
[_output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
[_session addOutput:_output];
_output.metadataObjectTypes = [_output availableMetadataObjectTypes];
_prevLayer = nil;
_prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_prevLayer.frame = _viewBg.frame;
_prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self setScanningVideoOrientation];
[self.view.layer addSublayer:_prevLayer];
[_session startRunning];
} else
{
NSLog(#"Error: %#", error);
}
//----------------------------------------------------------------------
}
In this delegate method. I am getting wrong barcode string. Please take a look on 'detectionString'.
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
CGRect highlightViewRect = CGRectZero;
AVMetadataMachineReadableCodeObject *barCodeObject;
NSString *detectionString = nil;
NSArray *barCodeTypes = #[AVMetadataObjectTypeUPCECode, AVMetadataObjectTypeCode39Code, AVMetadataObjectTypeCode39Mod43Code,
AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode93Code, AVMetadataObjectTypeCode128Code,
AVMetadataObjectTypePDF417Code, AVMetadataObjectTypeQRCode, AVMetadataObjectTypeAztecCode];
for (AVMetadataObject *metadata in metadataObjects) {
for (NSString *type in barCodeTypes) {
if ([metadata.type isEqualToString:type])
{
barCodeObject = (AVMetadataMachineReadableCodeObject *)[_prevLayer transformedMetadataObjectForMetadataObject:(AVMetadataMachineReadableCodeObject *)metadata];
highlightViewRect = barCodeObject.bounds;
detectionString = [(AVMetadataMachineReadableCodeObject *)metadata stringValue];
break;
}
}
if (detectionString != nil)
{
NSLog(#"ViewController-->captureOutput-->detectionString = %#",detectionString);
[self stopScanning];
break;
}
}
barCodeObject = nil;
detectionString = nil;
barCodeTypes = nil;
}
Barcode Image -
I am getting result - 0649954006029
But it should be - 649954006029.
As for some other barcodes, i am seeing 'l' before actual barcode scanning string.
Hope it will help you to identify the problem.
Thanks.
The reason you're getting a 13 digit code is because you're trying to scan a UPC-A code (649954..) and iOS doesn't have a separate category for UPC-A codes.
UPC-A codes come scanned in as EAN13 codes, and those are 13 digits long, so they get a prefix of 0.
From AVFoundation/AVMetadata.h
/*! #constant AVMetadataObjectTypeEAN13Code
#abstract An identifier for an instance of
AVMetadataMachineReadableCodeObject having a type
AVMetadataObjectTypeEAN13Code.
#discussion
AVMetadataMachineReadableCodeObject objects generated from EAN-13 (including UPC-A) codes return this constant as their type. */
AVF_EXPORT NSString *const AVMetadataObjectTypeEAN13Code NS_AVAILABLE(NA, 7_0);
I am trying to scan Aztec code using the Apple native API. But I am not able to scan it. In the Apple guideline, I have read it, you can scan the Aztec code. But it is not working.
Please check the code which i am using.
#import <UIKit/UIKit.h>
#interface igViewController : UIViewController
#end
#import <AVFoundation/AVFoundation.h>
#import "igViewController.h"
#interface igViewController () <AVCaptureMetadataOutputObjectsDelegate>
{
AVCaptureSession *_session;
AVCaptureDevice *_device;
AVCaptureDeviceInput *_input;
AVCaptureMetadataOutput *_output;
AVCaptureVideoPreviewLayer *_prevLayer;
UIView *_highlightView;
UILabel *_label;
}
#end
#implementation igViewController
- (void)viewDidLoad
{
[super viewDidLoad];
_highlightView = [[UIView alloc] init];
_highlightView.autoresizingMask = UIViewAutoresizingFlexibleTopMargin|UIViewAutoresizingFlexibleLeftMargin|UIViewAutoresizingFlexibleRightMargin|UIViewAutoresizingFlexibleBottomMargin;
_highlightView.layer.borderColor = [UIColor greenColor].CGColor;
_highlightView.layer.borderWidth = 3;
[self.view addSubview:_highlightView];
_label = [[UILabel alloc] init];
_label.frame = CGRectMake(0, self.view.bounds.size.height - 40, self.view.bounds.size.width, 40);
_label.autoresizingMask = UIViewAutoresizingFlexibleTopMargin;
_label.backgroundColor = [UIColor colorWithWhite:0.15 alpha:0.65];
_label.textColor = [UIColor whiteColor];
_label.textAlignment = NSTextAlignmentCenter;
_label.text = #"(none)";
[self.view addSubview:_label];
_session = [[AVCaptureSession alloc] init];
_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
_input = [AVCaptureDeviceInput deviceInputWithDevice:_device error:&error];
if (_input) {
[_session addInput:_input];
} else {
NSLog(#"Error: %#", error);
}
_output = [[AVCaptureMetadataOutput alloc] init];
[_output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
[_session addOutput:_output];
_output.metadataObjectTypes = [_output availableMetadataObjectTypes];
for (NSString* avail in _output.metadataObjectTypes) {
NSLog(#"Avail...%#", avail);
}
_prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_prevLayer.frame = self.view.bounds;
_prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer:_prevLayer];
[_session startRunning];
[self.view bringSubviewToFront:_highlightView];
[self.view bringSubviewToFront:_label];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
NSLog(#"Failed...");
CGRect highlightViewRect = CGRectZero;
AVMetadataMachineReadableCodeObject *barCodeObject;
NSString *detectionString = nil;
NSArray *barCodeTypes = #[AVMetadataObjectTypeAztecCode];
for (AVMetadataObject *metadata in metadataObjects) {
NSLog(#".....%#", metadata.type);
for (NSString *type in barCodeTypes) {
if ([metadata.type isEqualToString:type])
{
barCodeObject = (AVMetadataMachineReadableCodeObject *)[_prevLayer transformedMetadataObjectForMetadataObjectAVMetadataMachineReadableCodeObject *)metadata];
highlightViewRect = barCodeObject.bounds;
detectionString = [(AVMetadataMachineReadableCodeObject *)metadata stringValue];
break;
}
}
if (detectionString != nil)
{
_label.text = detectionString;
break;
}
else
_label.text = #"(none)";
}
//_label.text = #"(nonessss)";
_highlightView.frame = highlightViewRect;
}
#end
This is my first answer on SO and I'm a total beginner with Objective-C and iOS development, so be a little gentle with me, please.
I can't actually help you fix errors in your code, as it is still very hard for me as a beginner to see what's going on, but I wanted to tell you that just a few days ago I successfully followed this tutorial on how to do exactly what you need. I adjusted the tutorials code and added comments where I needed them, so it should be easy to follow in case you'd like to try. As it is seems it is frowned upon to only post a link here, so I'm posting my code.
This is a ViewController that directly opens a scan view and reacts if a barcode (aztec in my case) is found. It should be easy to adjust to your needs. In the tutorial they used AVMetadataObjectTypeQRCode, but to scan Aztec codes, simply replace by AVMetadataObjectTypeAztecCode. I have done that already in my code.
ScanVC.h (in your case igViewController)
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#interface ScanVC : UIViewController <AVCaptureMetadataOutputObjectsDelegate>
#property (retain, nonatomic) UILabel *scannerWindow;
#property (retain, nonatomic) UILabel *statusLabel;
#property (retain, nonatomic) UIButton *cancelButton;
#end
ScanVC.m
#import "ScanVC.h"
#interface ScanVC ()
#property (nonatomic) BOOL isReading;
#property (nonatomic, strong) AVCaptureSession *captureSession;
#property (nonatomic, strong) AVCaptureVideoPreviewLayer *videoPreviewLayer;
#end
#implementation ScanVC
#synthesize cancelButton;
#synthesize statusLabel;
#synthesize scannerWindow;
- (void)viewDidLoad {
[super viewDidLoad];
_isReading = NO;
_captureSession = nil;
//place a close button
cancelButton = [UIButton buttonWithType:UIButtonTypeSystem];
[cancelButton addTarget:self action:#selector(closeScan) forControlEvents:UIControlEventTouchUpInside];
[cancelButton setTitle:#"Close" forState:UIControlStateNormal];
cancelButton.frame = CGRectMake(0, 410, 250, 40);
[self.view addSubview:cancelButton];
//place a status label
statusLabel = [[UILabel alloc] initWithFrame:CGRectMake(0, 340, 250, 40)];
statusLabel.text = #"Currently not scanning";
[self.view addSubview:statusLabel];
//place the scanner window (adjust the size)
scannerWindow = [[UILabel alloc] initWithFrame:CGRectMake(0, 0, 250, 250)];
scannerWindow.text = #"Camera Capture Window";
[self.view addSubview:scannerWindow];
//start the scan immediately when the view loads
[self startStopScan];
}
- (void)closeScan {
if(_isReading) {
[self stopReading];
}
_isReading = !_isReading;
//dismiss the view controller here?
}];
}
- (void)startStopScan {
if (!_isReading) {
if([self startReading]) {
[statusLabel setText:#"Scanning for Barcode"];
}
} else {
[self stopReading];
}
_isReading = !_isReading;
}
- (BOOL)startReading {
NSError *error;
AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if(!input) {
NSLog(#"%#", [error localizedDescription]);
return NO;
}
_captureSession = [[AVCaptureSession alloc] init];
[_captureSession addInput:input];
AVCaptureMetadataOutput *captureMetadataOutput = [[AVCaptureMetadataOutput alloc] init];
[_captureSession addOutput:captureMetadataOutput];
dispatch_queue_t dispatchQueue;
dispatchQueue = dispatch_queue_create("myQueue", NULL);
[captureMetadataOutput setMetadataObjectsDelegate:self queue:dispatchQueue];
[captureMetadataOutput setMetadataObjectTypes:[NSArray arrayWithObject:AVMetadataObjectTypeAztecCode]];
//show the preview to the user
_videoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession];
[_videoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
[_videoPreviewLayer setFrame:scannerWindow.layer.bounds];
[scannerWindow.layer addSublayer:_videoPreviewLayer];
[_captureSession startRunning];
return YES;
}
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection{
if (metadataObjects != nil && [metadataObjects count] > 0) {
AVMetadataMachineReadableCodeObject *metadataObj = [metadataObjects objectAtIndex:0];
if ([[metadataObj type] isEqualToString:AVMetadataObjectTypeAztecCode]) {
[statusLabel performSelectorOnMainThread:#selector(setText:) withObject:[metadataObj stringValue] waitUntilDone:NO];
[self performSelectorOnMainThread:#selector(stopReading) withObject:nil waitUntilDone:NO];
_isReading = NO;
//do things after a successful scan here
NSLog(#"scanner output %#", [metadataObj stringValue]);
}
}
}
- (void)stopReading {
[_captureSession stopRunning];
_captureSession = nil;
[_videoPreviewLayer removeFromSuperlayer];
}
#end