Improving the speed of reading UPC-A Barcodes Objective-C (Xcode) - ios

I am trying to speed up the reading of barcodes in my app, the app works fine but is a tad slow at reading barcodes.
How do I improve the speed of reading the barcodes?
Here is the code I have so far.
#import "ScanViewController.h"
#import "Utils.h"
#import <AVFoundation/AVFoundation.h>
#import <AudioToolbox/AudioToolbox.h>
#interface ScanViewController ()<AVCaptureMetadataOutputObjectsDelegate>
#property (nonatomic, readwrite) AVCaptureSession *captureSession;
#property (nonatomic, readwrite) AVCaptureVideoPreviewLayer *videoPreviewLayer;
#property (nonatomic, readwrite) UIView *qrCodeFrameView;
#property (nonatomic, readwrite) UILabel *qrCodeTextView;
#property (nonatomic, readwrite) NSArray *supportedCodeTypes;
#property (nonatomic, readwrite) long long lastScanTime;
#property (nonatomic, readwrite) NSString *lastScanCode;
#property (nonatomic, readwrite) AVAudioPlayer *audioPlayer;
#end
#implementation ScanViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
self.captureSession = [AVCaptureSession new];
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
self.supportedCodeTypes = #[AVMetadataObjectTypeUPCECode,
AVMetadataObjectTypeEAN13Code,
AVMetadataObjectTypeEAN8Code];
// AVMetadataObjectTypeCode39Code,
// AVMetadataObjectTypeCode39Mod43Code,
// AVMetadataObjectTypeCode93Code,
// AVMetadataObjectTypeCode128Code,
// AVMetadataObjectTypeAztecCode,
// AVMetadataObjectTypePDF417Code,
// AVMetadataObjectTypeITF14Code,
// AVMetadataObjectTypeDataMatrixCode,
// AVMetadataObjectTypeInterleaved2of5Code,
// AVMetadataObjectTypeQRCode];
AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVCaptureSessionPre];
if(captureDevice == nil) {
NSLog(#"Failed to get the camera device");
return;
}
#try {
// Get an instance of the AVCaptureDeviceInput class using the previous device object.
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:nil];
// Set the input device on the capture session.
[self.captureSession addInput:input];
// Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
AVCaptureMetadataOutput *captureMetadataOutput = [[AVCaptureMetadataOutput alloc] init];
[self.captureSession addOutput:captureMetadataOutput];
// Set delegate and use the default dispatch queue to execute the call back
[captureMetadataOutput setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
captureMetadataOutput.metadataObjectTypes = self.supportedCodeTypes;
// captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
} #catch (NSException *error) {
// If any error occurs, simply print it out and don't continue any more.
NSLog(#"%#", error);
return;
}
// Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
self.videoPreviewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
self.videoPreviewLayer.videoGravity = kCAGravityResizeAspectFill;
self.videoPreviewLayer.frame = self.view.layer.bounds;
[self.view.layer addSublayer:self.videoPreviewLayer];
// Start video capture.
[self.captureSession startRunning];
// Move the result view and loading view to the front
[self.view bringSubviewToFront:self.resultView];
[self.view bringSubviewToFront:self.loadingView];
// Initialize QR Code Frame to highlight the QR code
self.qrCodeFrameView = [[UIView alloc] init];
if (self.qrCodeFrameView) {
self.qrCodeFrameView.layer.borderColor = UIColor.greenColor.CGColor;
self.qrCodeFrameView.layer.borderWidth = 2;
[self.view addSubview:self.qrCodeFrameView];
[self.view bringSubviewToFront:self.qrCodeFrameView];
}
self.qrCodeTextView = [[UILabel alloc] init];
if (self.qrCodeTextView) {
[self.qrCodeTextView setTextColor:UIColor.greenColor];
[self.qrCodeTextView setFont:[UIFont systemFontOfSize:20]];
[self.qrCodeFrameView addSubview:self.qrCodeTextView];
}
[self rotateLoadingImage];
[self setResultType:RESULT_TYPE_WORKING codeContent:#"Ready" price:0.00];
[self.loadingView setHidden:YES];
}
-(void)viewWillDisappear:(BOOL)animated {
if (self.audioPlayer != nil) {
[self.audioPlayer stop];
self.audioPlayer = nil;
}
[super viewWillDisappear:animated];
}
/*
#pragma mark - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
// Get the new view controller using [segue destinationViewController].
// Pass the selected object to the new view controller.
}
*/
-(void) updatePreviewLayer:(AVCaptureConnection*)layer orientation:(AVCaptureVideoOrientation)orientation {
layer.videoOrientation = orientation;
self.videoPreviewLayer.frame = self.view.bounds;
}
-(void)viewDidLayoutSubviews {
[super viewDidLayoutSubviews];
if(self.videoPreviewLayer.connection != nil) {
UIDevice *currentDevice = [UIDevice currentDevice];
UIDeviceOrientation orientation = [currentDevice orientation];
AVCaptureConnection *previewLayerConnection = self.videoPreviewLayer.connection;
if(previewLayerConnection.isVideoOrientationSupported) {
switch (orientation) {
case UIDeviceOrientationPortrait:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationPortrait];
break;
case UIDeviceOrientationLandscapeRight:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationLandscapeLeft];
break;
case UIDeviceOrientationLandscapeLeft:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationLandscapeRight];
break;
case UIDeviceOrientationPortraitUpsideDown:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationPortraitUpsideDown];
break;
default:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationPortrait];
break;
}
}
}
}
-(void)captureOutput:(AVCaptureOutput *)output didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects fromConnection:(AVCaptureConnection *)connection {
// Check if the metadataObjects array is not nil and it contains at least one object.
if (metadataObjects.count == 0) {
self.qrCodeFrameView.frame = CGRectZero;
return;
}
// Get the metadata object.
AVMetadataMachineReadableCodeObject *metadataObj = (AVMetadataMachineReadableCodeObject*)(metadataObjects[0]);
if ([self.supportedCodeTypes containsObject:metadataObj.type]) {
// If the found metadata is equal to the QR code metadata (or barcode) then update the status label's text and set the bounds
AVMetadataObject *barCodeObject = [self.videoPreviewLayer transformedMetadataObjectForMetadataObject:metadataObj];
NSString *code = metadataObj.stringValue;
if (code != nil) {
// check upc a code
if ([self checkUpcACode:metadataObj.type code:code] == NO) {
self.qrCodeTextView.text = #"";
return;
}
int i=0;
for (i=0; i<code.length; i++) {
char ch = [code characterAtIndex:i];
if (ch != '0') break;
}
if (i>0) i--;
code = [code substringFromIndex:i];
self.qrCodeFrameView.frame = barCodeObject.bounds;
[self.qrCodeTextView setText:code];
self.qrCodeTextView.frame = CGRectMake(0, self.qrCodeFrameView.frame.size.height-20, self.qrCodeFrameView.frame.size.width, 20);
NSLog(#"%#", code);
[self handleBarcode:code];
} else {
self.qrCodeTextView.text = #"";
}
}
}
-(BOOL)checkUpcACode:(AVMetadataObjectType)type code:(NSString*)code {
if (type == AVMetadataObjectTypeEAN13Code) {
if ([code hasPrefix:#"0"] && [code length] > 0) {
return YES;
}
}
return NO;
}
#end```

Solution was from PBK on Apple Forums
//change [self handleBarcode:code];
// to
dispatch_async(dispatch_get_main_queue(), ^{
[self handleBarcode:code];
});

Related

Screen recording of iPhone while playing a video

In my project I have to add a book page flip animation, and in this book on the right side page a video will play. Once the first video will complete the page will turn like as book page and the second video will play on the next right side page and so on. Now I have to save all this things as a a video which can be downloaded, so that When the downloaded video get played from gallery it looks same as i am playing in my app. Right now I am recording the device's screen and saving it in server for download. All the things is ok except the video player. In the video that is I am recording, the portion where all the video is playing(on the right side page of the book) is not getting recorded.
I am using the bellow code to record the screen. If any one of you have other idea to do the same thing, please share with me or if need to change my code please suggest that. Thanks is advance.
// ASScreenRecorder.h
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
typedef void (^VideoCompletionBlock)(void);
#protocol ASScreenRecorderDelegate;
#interface ASScreenRecorder : NSObject
#property (nonatomic, readonly) BOOL isRecording;
#property (nonatomic, weak) id <ASScreenRecorderDelegate> delegate;
// if saveURL is nil, video will be saved into camera roll
// this property can not be changed whilst recording is in progress
#property (strong, nonatomic) NSURL *videoURL;
+ (instancetype)sharedInstance;
- (BOOL)startRecording;
- (void)stopRecordingWithCompletion:(VideoCompletionBlock)completionBlock;
#end
// If your view contains an AVCaptureVideoPreviewLayer or an openGL view
// you'll need to write that data into the CGContextRef yourself.
// In the viewcontroller responsible for the AVCaptureVideoPreviewLayer / openGL view
// set yourself as the delegate for ASScreenRecorder.
// [ASScreenRecorder sharedInstance].delegate = self
// Then implement 'writeBackgroundFrameInContext:(CGContextRef*)contextRef'
// use 'CGContextDrawImage' to draw your view into the provided CGContextRef
#protocol ASScreenRecorderDelegate <NSObject>
- (void)writeBackgroundFrameInContext:(CGContextRef*)contextRef;
#end
// ASScreenRecorder.m
// ScreenRecorder
//
// Created by Alan Skipp on 23/04/2014.
// Copyright (c) 2014 Alan Skipp. All rights reserved.
//
#import "ASScreenRecorder.h"
#import <AVFoundation/AVFoundation.h>
#import <QuartzCore/QuartzCore.h>
#import <AssetsLibrary/AssetsLibrary.h>
#interface ASScreenRecorder()
#property (strong, nonatomic) AVAssetWriter *videoWriter;
#property (strong, nonatomic) AVAssetWriterInput *videoWriterInput;
#property (strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor *avAdaptor;
#property (strong, nonatomic) CADisplayLink *displayLink;
#property (strong, nonatomic) NSDictionary *outputBufferPoolAuxAttributes;
#property (nonatomic) CFTimeInterval firstTimeStamp;
#property (nonatomic) BOOL isRecording;
#end
#implementation ASScreenRecorder
{
dispatch_queue_t _render_queue;
dispatch_queue_t _append_pixelBuffer_queue;
dispatch_semaphore_t _frameRenderingSemaphore;
dispatch_semaphore_t _pixelAppendSemaphore;
CGSize _viewSize;
CGFloat _scale;
CGColorSpaceRef _rgbColorSpace;
CVPixelBufferPoolRef _outputBufferPool;
}
#pragma mark - initializers
+ (instancetype)sharedInstance {
static dispatch_once_t once;
static ASScreenRecorder *sharedInstance;
dispatch_once(&once, ^{
sharedInstance = [[self alloc] init];
});
return sharedInstance;
}
- (instancetype)init
{
self = [super init];
if (self) {
_viewSize = [UIApplication sharedApplication].delegate.window.bounds.size;
_scale = [UIScreen mainScreen].scale;
// record half size resolution for retina iPads
if ((UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) && _scale > 1) {
_scale = 1.0;
}
_isRecording = NO;
_append_pixelBuffer_queue = dispatch_queue_create("ASScreenRecorder.append_queue", DISPATCH_QUEUE_SERIAL);
_render_queue = dispatch_queue_create("ASScreenRecorder.render_queue", DISPATCH_QUEUE_SERIAL);
dispatch_set_target_queue(_render_queue, dispatch_get_global_queue( DISPATCH_QUEUE_PRIORITY_HIGH, 0));
_frameRenderingSemaphore = dispatch_semaphore_create(1);
_pixelAppendSemaphore = dispatch_semaphore_create(1);
}
return self;
}
#pragma mark - public
- (void)setVideoURL:(NSURL *)videoURL
{
NSAssert(!_isRecording, #"videoURL can not be changed whilst recording is in progress");
_videoURL = videoURL;
}
- (BOOL)startRecording
{
if (!_isRecording) {
[self setUpWriter];
_isRecording = (_videoWriter.status == AVAssetWriterStatusWriting);
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:#selector(writeVideoFrame)];
[_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
}
return _isRecording;
}
- (void)stopRecordingWithCompletion:(VideoCompletionBlock)completionBlock;
{
if (_isRecording) {
_isRecording = NO;
[_displayLink removeFromRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
[self completeRecordingSession:completionBlock];
}
}
#pragma mark - private
-(void)setUpWriter
{
_rgbColorSpace = CGColorSpaceCreateDeviceRGB();
NSDictionary *bufferAttributes = #{(id)kCVPixelBufferPixelFormatTypeKey : #(kCVPixelFormatType_32BGRA),
(id)kCVPixelBufferCGBitmapContextCompatibilityKey : #YES,
(id)kCVPixelBufferWidthKey : #(_viewSize.width * _scale),
(id)kCVPixelBufferHeightKey : #(_viewSize.height * _scale),
(id)kCVPixelBufferBytesPerRowAlignmentKey : #(_viewSize.width * _scale * 4)
};
_outputBufferPool = NULL;
CVPixelBufferPoolCreate(NULL, NULL, (__bridge CFDictionaryRef)(bufferAttributes), &_outputBufferPool);
NSError* error = nil;
_videoWriter = [[AVAssetWriter alloc] initWithURL:self.videoURL ?: [self tempFileURL]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(_videoWriter);
NSInteger pixelNumber = _viewSize.width * _viewSize.height * _scale;
NSDictionary* videoCompression = #{AVVideoAverageBitRateKey: #(pixelNumber * 11.4)};
NSDictionary* videoSettings = #{AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: [NSNumber numberWithInt:_viewSize.width*_scale],
AVVideoHeightKey: [NSNumber numberWithInt:_viewSize.height*_scale],
AVVideoCompressionPropertiesKey: videoCompression};
_videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSParameterAssert(_videoWriterInput);
_videoWriterInput.expectsMediaDataInRealTime = YES;
_videoWriterInput.transform = [self videoTransformForDeviceOrientation];
_avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoWriterInput sourcePixelBufferAttributes:nil];
[_videoWriter addInput:_videoWriterInput];
[_videoWriter startWriting];
[_videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];
}
- (CGAffineTransform)videoTransformForDeviceOrientation
{
CGAffineTransform videoTransform;
switch ([UIDevice currentDevice].orientation) {
case UIDeviceOrientationLandscapeLeft:
videoTransform = CGAffineTransformMakeRotation(-M_PI_2);
break;
case UIDeviceOrientationLandscapeRight:
videoTransform = CGAffineTransformMakeRotation(M_PI_2);
break;
case UIDeviceOrientationPortraitUpsideDown:
videoTransform = CGAffineTransformMakeRotation(M_PI);
break;
default:
videoTransform = CGAffineTransformIdentity;
}
return videoTransform;
}
- (NSURL*)tempFileURL
{
NSString *outputPath = [NSHomeDirectory() stringByAppendingPathComponent:#"tmp/screenCapture.mp4"];
[self removeTempFilePath:outputPath];
return [NSURL fileURLWithPath:outputPath];
}
- (void)removeTempFilePath:(NSString*)filePath
{
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:filePath]) {
NSError* error;
if ([fileManager removeItemAtPath:filePath error:&error] == NO) {
NSLog(#"Could not delete old recording:%#", [error localizedDescription]);
}
}
}
- (void)completeRecordingSession:(VideoCompletionBlock)completionBlock;
{
dispatch_async(_render_queue, ^{
dispatch_sync(_append_pixelBuffer_queue, ^{
[_videoWriterInput markAsFinished];
[_videoWriter finishWritingWithCompletionHandler:^{
void (^completion)(void) = ^() {
[self cleanup];
dispatch_async(dispatch_get_main_queue(), ^{
if (completionBlock) completionBlock();
});
};
if (self.videoURL) {
completion();
} else {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:_videoWriter.outputURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (error) {
NSLog(#"Error copying video to camera roll:%#", [error localizedDescription]);
} else {
[self removeTempFilePath:_videoWriter.outputURL.path];
completion();
}
}];
}
}];
});
});
}
- (void)cleanup
{
self.avAdaptor = nil;
self.videoWriterInput = nil;
self.videoWriter = nil;
self.firstTimeStamp = 0;
self.outputBufferPoolAuxAttributes = nil;
CGColorSpaceRelease(_rgbColorSpace);
CVPixelBufferPoolRelease(_outputBufferPool);
}
- (void)writeVideoFrame
{
// throttle the number of frames to prevent meltdown
// technique gleaned from Brad Larson's answer here: http://stackoverflow.com/a/5956119
if (dispatch_semaphore_wait(_frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0) {
return;
}
dispatch_async(_render_queue, ^{
if (![_videoWriterInput isReadyForMoreMediaData]) return;
if (!self.firstTimeStamp) {
self.firstTimeStamp = _displayLink.timestamp;
}
CFTimeInterval elapsed = (_displayLink.timestamp - self.firstTimeStamp);
CMTime time = CMTimeMakeWithSeconds(elapsed, 1000);
CVPixelBufferRef pixelBuffer = NULL;
CGContextRef bitmapContext = [self createPixelBufferAndBitmapContext:&pixelBuffer];
if (self.delegate) {
[self.delegate writeBackgroundFrameInContext:&bitmapContext];
}
// draw each window into the context (other windows include UIKeyboard, UIAlert)
// FIX: UIKeyboard is currently only rendered correctly in portrait orientation
dispatch_sync(dispatch_get_main_queue(), ^{
UIGraphicsPushContext(bitmapContext); {
for (UIWindow *window in [[UIApplication sharedApplication] windows]) {
[window drawViewHierarchyInRect:CGRectMake(0, 0, _viewSize.width, _viewSize.height) afterScreenUpdates:NO];
}
} UIGraphicsPopContext();
});
// append pixelBuffer on a async dispatch_queue, the next frame is rendered whilst this one appends
// must not overwhelm the queue with pixelBuffers, therefore:
// check if _append_pixelBuffer_queue is ready
// if it’s not ready, release pixelBuffer and bitmapContext
if (dispatch_semaphore_wait(_pixelAppendSemaphore, DISPATCH_TIME_NOW) == 0) {
dispatch_async(_append_pixelBuffer_queue, ^{
BOOL success = [_avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
if (!success) {
NSLog(#"Warning: Unable to write buffer to video");
}
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
dispatch_semaphore_signal(_pixelAppendSemaphore);
});
} else {
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
}
dispatch_semaphore_signal(_frameRenderingSemaphore);
});
}
- (CGContextRef)createPixelBufferAndBitmapContext:(CVPixelBufferRef *)pixelBuffer
{
CVPixelBufferPoolCreatePixelBuffer(NULL, _outputBufferPool, pixelBuffer);
CVPixelBufferLockBaseAddress(*pixelBuffer, 0);
CGContextRef bitmapContext = NULL;
bitmapContext = CGBitmapContextCreate(CVPixelBufferGetBaseAddress(*pixelBuffer),
CVPixelBufferGetWidth(*pixelBuffer),
CVPixelBufferGetHeight(*pixelBuffer),
8, CVPixelBufferGetBytesPerRow(*pixelBuffer), _rgbColorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst
);
CGContextScaleCTM(bitmapContext, _scale, _scale);
CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, _viewSize.height);
CGContextConcatCTM(bitmapContext, flipVertical);
return bitmapContext;
}
#end
#Gobinda this code works in my case. You need to specify frame if you want to record portion of window. In init method viewSize is defined as window size. So you need to change viewSize as your video frame.

Need some help on how to construct/use a delegate

I'm struggling with delegate creation and usage. Could someone help me understand what I'm doing wrong? According to all the examples I have read this is correct but my data is not being returned.
ViewController (Parent)
.h
#import <UIKit/UIKit.h>
#import "BarcodeViewController.h"
#interface ViewController: UIViewController <BarcodeViewDelegate> {
IBOutlet UILabel *bcode;
}
#end
.m
-(void)setbarcode:(NSString*)barcode
{
NSLog(#" data %#", barcode);
bcode.text = barcode;
}
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender
{
// Make sure your segue name in storyboard is the same as this line
if ([[segue identifier] isEqualToString:#"scanbarcode"])
{
BarcodeViewController *cv = [segue destinationViewController];
cv.delegate = self;
}
}
The Barcode view controller (Child view)
.h
#protocol BarcodeViewDelegate <NSObject>
-(void)setbarcode:(NSString*)barcode;
#end
#interface BarcodeViewController : UIViewController
{
id<BarcodeViewDelegate> delegate;
}
#property(nonatomic,assign)id delegate;
#end
.m
#import "BarcodeViewController.h"
#import <AVFoundation/AVFoundation.h>
#interface BarcodeViewController () <AVCaptureMetadataOutputObjectsDelegate>
{
AVCaptureSession *_session;
AVCaptureDevice *_device;
AVCaptureDeviceInput *_input;
AVCaptureMetadataOutput *_output;
AVCaptureVideoPreviewLayer *_prevLayer;
UIView *_highlightView;
UIImageView *_imageOverlay;
}
#end
#implementation BarcodeViewController
- (void)viewDidLoad
{
[super viewDidLoad];
/*
* setup scanner view
*/
_highlightView = [[UIView alloc] init];
_highlightView.autoresizingMask = UIViewAutoresizingFlexibleTopMargin|UIViewAutoresizingFlexibleLeftMargin|UIViewAutoresizingFlexibleRightMargin|UIViewAutoresizingFlexibleBottomMargin;
_highlightView.layer.borderColor = [UIColor greenColor].CGColor;
_highlightView.layer.borderWidth = 3;
[self.view addSubview:_highlightView];
/*
* setup a overlay guide
*/
_imageOverlay = [[UIImageView alloc] initWithImage:[UIImage imageNamed:#"camera_overlay"]];
[self.view addSubview:_imageOverlay];
_session = [[AVCaptureSession alloc] init];
_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
_input = [AVCaptureDeviceInput deviceInputWithDevice:_device error:&error];
if (_input) {
[_session addInput:_input];
} else {
NSLog(#"Error: %#", error);
}
_output = [[AVCaptureMetadataOutput alloc] init];
[_output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
[_session addOutput:_output];
_output.metadataObjectTypes = [_output availableMetadataObjectTypes];
_prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_prevLayer.frame = self.view.bounds;
_prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer:_prevLayer];
[_session startRunning];
[self.view bringSubviewToFront:_highlightView];
[self.view bringSubviewToFront:_imageOverlay];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
CGRect highlightViewRect = CGRectZero;
AVMetadataMachineReadableCodeObject *barCodeObject;
NSString *detectionString = nil;
NSArray *barCodeTypes = #[AVMetadataObjectTypeUPCECode, AVMetadataObjectTypeCode39Code, AVMetadataObjectTypeCode39Mod43Code,
AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code, AVMetadataObjectTypeCode93Code, AVMetadataObjectTypeCode128Code,
AVMetadataObjectTypePDF417Code, AVMetadataObjectTypeQRCode, AVMetadataObjectTypeAztecCode];
/*
* keep looking around while we look for the barcode
*/
for (AVMetadataObject *metadata in metadataObjects) {
for (NSString *type in barCodeTypes) {
if ([metadata.type isEqualToString:type])
{
barCodeObject = (AVMetadataMachineReadableCodeObject *)[_prevLayer transformedMetadataObjectForMetadataObject:(AVMetadataMachineReadableCodeObject *)metadata];
//highlightViewRect = barCodeObject.bounds;
detectionString = [(AVMetadataMachineReadableCodeObject *)metadata stringValue];
break;
}
}
if (detectionString != nil)
{
/*
* Set the detected barcode so we can use it.
* - perform segway to another view
*/
//barcode = detectionString;
//NSLog(#" data %#", detectionString);
[delegate setbarcode: detectionString];
[self.navigationController popViewControllerAnimated:YES];
break;
}
//else
/*
* Just reset the barcode value for now
*/
//_barcode = false;
}
_highlightView.frame = highlightViewRect;
}
#end
Your problem is simple.
You define an instance variable and a property to hold the delegate:
{
id delegate;
}
#property(nonatomic,assign)id delegate;
You set the delegate via the property:
cv.delegate = self;
Then access it via the instance variable:
[delegate setbarcode: detectionString];
The property is backed by a different instance variable, automatically defined as _delegate, which you should not be accessing. You should always access via the property, as self.delegate. When calling your delegate ivar, it will be nil, so nothing is being sent back.
Remove the unnecessary instance variable declaration, type the property correctly (as id<BarcodeViewDelegate> rather than just id) and always access it via the property, and you'll be fine.

Aztec Code not scaaning

I am trying to scan Aztec code using the Apple native API. But I am not able to scan it. In the Apple guideline, I have read it, you can scan the Aztec code. But it is not working.
Please check the code which i am using.
#import <UIKit/UIKit.h>
#interface igViewController : UIViewController
#end
#import <AVFoundation/AVFoundation.h>
#import "igViewController.h"
#interface igViewController () <AVCaptureMetadataOutputObjectsDelegate>
{
AVCaptureSession *_session;
AVCaptureDevice *_device;
AVCaptureDeviceInput *_input;
AVCaptureMetadataOutput *_output;
AVCaptureVideoPreviewLayer *_prevLayer;
UIView *_highlightView;
UILabel *_label;
}
#end
#implementation igViewController
- (void)viewDidLoad
{
[super viewDidLoad];
_highlightView = [[UIView alloc] init];
_highlightView.autoresizingMask = UIViewAutoresizingFlexibleTopMargin|UIViewAutoresizingFlexibleLeftMargin|UIViewAutoresizingFlexibleRightMargin|UIViewAutoresizingFlexibleBottomMargin;
_highlightView.layer.borderColor = [UIColor greenColor].CGColor;
_highlightView.layer.borderWidth = 3;
[self.view addSubview:_highlightView];
_label = [[UILabel alloc] init];
_label.frame = CGRectMake(0, self.view.bounds.size.height - 40, self.view.bounds.size.width, 40);
_label.autoresizingMask = UIViewAutoresizingFlexibleTopMargin;
_label.backgroundColor = [UIColor colorWithWhite:0.15 alpha:0.65];
_label.textColor = [UIColor whiteColor];
_label.textAlignment = NSTextAlignmentCenter;
_label.text = #"(none)";
[self.view addSubview:_label];
_session = [[AVCaptureSession alloc] init];
_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
_input = [AVCaptureDeviceInput deviceInputWithDevice:_device error:&error];
if (_input) {
[_session addInput:_input];
} else {
NSLog(#"Error: %#", error);
}
_output = [[AVCaptureMetadataOutput alloc] init];
[_output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
[_session addOutput:_output];
_output.metadataObjectTypes = [_output availableMetadataObjectTypes];
for (NSString* avail in _output.metadataObjectTypes) {
NSLog(#"Avail...%#", avail);
}
_prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_prevLayer.frame = self.view.bounds;
_prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer:_prevLayer];
[_session startRunning];
[self.view bringSubviewToFront:_highlightView];
[self.view bringSubviewToFront:_label];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{
NSLog(#"Failed...");
CGRect highlightViewRect = CGRectZero;
AVMetadataMachineReadableCodeObject *barCodeObject;
NSString *detectionString = nil;
NSArray *barCodeTypes = #[AVMetadataObjectTypeAztecCode];
for (AVMetadataObject *metadata in metadataObjects) {
NSLog(#".....%#", metadata.type);
for (NSString *type in barCodeTypes) {
if ([metadata.type isEqualToString:type])
{
barCodeObject = (AVMetadataMachineReadableCodeObject *)[_prevLayer transformedMetadataObjectForMetadataObjectAVMetadataMachineReadableCodeObject *)metadata];
highlightViewRect = barCodeObject.bounds;
detectionString = [(AVMetadataMachineReadableCodeObject *)metadata stringValue];
break;
}
}
if (detectionString != nil)
{
_label.text = detectionString;
break;
}
else
_label.text = #"(none)";
}
//_label.text = #"(nonessss)";
_highlightView.frame = highlightViewRect;
}
#end
This is my first answer on SO and I'm a total beginner with Objective-C and iOS development, so be a little gentle with me, please.
I can't actually help you fix errors in your code, as it is still very hard for me as a beginner to see what's going on, but I wanted to tell you that just a few days ago I successfully followed this tutorial on how to do exactly what you need. I adjusted the tutorials code and added comments where I needed them, so it should be easy to follow in case you'd like to try. As it is seems it is frowned upon to only post a link here, so I'm posting my code.
This is a ViewController that directly opens a scan view and reacts if a barcode (aztec in my case) is found. It should be easy to adjust to your needs. In the tutorial they used AVMetadataObjectTypeQRCode, but to scan Aztec codes, simply replace by AVMetadataObjectTypeAztecCode. I have done that already in my code.
ScanVC.h (in your case igViewController)
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#interface ScanVC : UIViewController <AVCaptureMetadataOutputObjectsDelegate>
#property (retain, nonatomic) UILabel *scannerWindow;
#property (retain, nonatomic) UILabel *statusLabel;
#property (retain, nonatomic) UIButton *cancelButton;
#end
ScanVC.m
#import "ScanVC.h"
#interface ScanVC ()
#property (nonatomic) BOOL isReading;
#property (nonatomic, strong) AVCaptureSession *captureSession;
#property (nonatomic, strong) AVCaptureVideoPreviewLayer *videoPreviewLayer;
#end
#implementation ScanVC
#synthesize cancelButton;
#synthesize statusLabel;
#synthesize scannerWindow;
- (void)viewDidLoad {
[super viewDidLoad];
_isReading = NO;
_captureSession = nil;
//place a close button
cancelButton = [UIButton buttonWithType:UIButtonTypeSystem];
[cancelButton addTarget:self action:#selector(closeScan) forControlEvents:UIControlEventTouchUpInside];
[cancelButton setTitle:#"Close" forState:UIControlStateNormal];
cancelButton.frame = CGRectMake(0, 410, 250, 40);
[self.view addSubview:cancelButton];
//place a status label
statusLabel = [[UILabel alloc] initWithFrame:CGRectMake(0, 340, 250, 40)];
statusLabel.text = #"Currently not scanning";
[self.view addSubview:statusLabel];
//place the scanner window (adjust the size)
scannerWindow = [[UILabel alloc] initWithFrame:CGRectMake(0, 0, 250, 250)];
scannerWindow.text = #"Camera Capture Window";
[self.view addSubview:scannerWindow];
//start the scan immediately when the view loads
[self startStopScan];
}
- (void)closeScan {
if(_isReading) {
[self stopReading];
}
_isReading = !_isReading;
//dismiss the view controller here?
}];
}
- (void)startStopScan {
if (!_isReading) {
if([self startReading]) {
[statusLabel setText:#"Scanning for Barcode"];
}
} else {
[self stopReading];
}
_isReading = !_isReading;
}
- (BOOL)startReading {
NSError *error;
AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if(!input) {
NSLog(#"%#", [error localizedDescription]);
return NO;
}
_captureSession = [[AVCaptureSession alloc] init];
[_captureSession addInput:input];
AVCaptureMetadataOutput *captureMetadataOutput = [[AVCaptureMetadataOutput alloc] init];
[_captureSession addOutput:captureMetadataOutput];
dispatch_queue_t dispatchQueue;
dispatchQueue = dispatch_queue_create("myQueue", NULL);
[captureMetadataOutput setMetadataObjectsDelegate:self queue:dispatchQueue];
[captureMetadataOutput setMetadataObjectTypes:[NSArray arrayWithObject:AVMetadataObjectTypeAztecCode]];
//show the preview to the user
_videoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession];
[_videoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
[_videoPreviewLayer setFrame:scannerWindow.layer.bounds];
[scannerWindow.layer addSublayer:_videoPreviewLayer];
[_captureSession startRunning];
return YES;
}
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection{
if (metadataObjects != nil && [metadataObjects count] > 0) {
AVMetadataMachineReadableCodeObject *metadataObj = [metadataObjects objectAtIndex:0];
if ([[metadataObj type] isEqualToString:AVMetadataObjectTypeAztecCode]) {
[statusLabel performSelectorOnMainThread:#selector(setText:) withObject:[metadataObj stringValue] waitUntilDone:NO];
[self performSelectorOnMainThread:#selector(stopReading) withObject:nil waitUntilDone:NO];
_isReading = NO;
//do things after a successful scan here
NSLog(#"scanner output %#", [metadataObj stringValue]);
}
}
}
- (void)stopReading {
[_captureSession stopRunning];
_captureSession = nil;
[_videoPreviewLayer removeFromSuperlayer];
}
#end

iOS App: Preview Layer freezes after Recording button is clicked

I am developing an iOS App which is to record the video using the rear camera.
I have managed to get the preview layer working fine.
However, if I click the Record button, the preview freezes.
The following are my codes. Please help me solving this problem.
Pg5VideoViewController.h
#interface Pg5VideoViewController : UIViewController <AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureFileOutputRecordingDelegate> {
BOOL WeAreRecording;
IBOutlet UIView *videoViewBg;
AVCaptureSession *_captureSession;
UIImageView *_imageView;
CALayer *_customLayer;
AVCaptureVideoPreviewLayer *_prevLayer;
UIColor *pickedColor;
AVCaptureMovieFileOutput *movieFileOutput;
IBOutlet UIView *theColor;
}
#property (nonatomic,retain) IBOutlet UIView *theColor;
#property (nonatomic,retain) UIColor *pickedColor;
#property (nonatomic,retain) IBOutlet UIView *videoViewBg;
#property (nonatomic, retain) AVCaptureSession *captureSession;
#property (nonatomic, retain) UIImageView *imageView;
#property (nonatomic, retain) CALayer *customLayer;
#property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer;
#property (nonatomic, retain) AVCaptureMovieFileOutput *movieFileOutput;
-(void)initCapture;
-(UIColor *) colorOfPoint:(CGPoint)point;
-(IBAction)takeVideo:(id)sender;
#end
the Pg5VideoViewController.m file:
#implementation Pg5VideoViewController
#synthesize videoViewBg;
#synthesize captureSession = _captureSession;
#synthesize imageView = _imageView;
#synthesize customLayer = _customLayer;
#synthesize prevLayer = _prevLayer;
#synthesize pickedColor = _pickedColor;
#synthesize theColor = _theColor;
#synthesize movieFileOutput = _movieFileOutput;
#pragma mark -
#pragma mark Initialization
- (id)init {
self = [super init];
if (self) {
self.imageView = nil;
self.prevLayer = nil;
self.customLayer = nil;
}
return self;
}
- (void)initCapture {
AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput
deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]
error:nil];
movieFileOutput = [[AVCaptureVideoDataOutput alloc] init];
dispatch_queue_t queue;
queue = dispatch_queue_create("cameraQueue", NULL);
[movieFileOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[movieFileOutput setVideoSettings:videoSettings];
self.captureSession = [[AVCaptureSession alloc] init];
[self.captureSession addInput:captureInput];
[self.captureSession addOutput:movieFileOutput];
[self.captureSession setSessionPreset:AVCaptureSessionPresetMedium];
self.customLayer = [CALayer layer];
self.customLayer.frame = CGRectMake(42, 40, 940, 558);
//self.customLayer.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
//self.customLayer.contentsGravity = kCAGravityResizeAspectFill;
[self.view.layer addSublayer:self.customLayer];
[self.captureSession startRunning];
}
#pragma mark -
#pragma mark AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
[self.customLayer performSelectorOnMainThread:#selector(setContents:) withObject: (id) newImage waitUntilDone:YES];
UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];
CGImageRelease(newImage);
[self.imageView performSelectorOnMainThread:#selector(setImage:) withObject:image waitUntilDone:YES];
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
[pool drain];
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error
{
NSLog(#"didFinishRecordingToOutputFileAtURL - enter");
BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
if (RecordedSuccessfully)
{
NSLog(#"didFinishRecordingToOutputFileAtURL - success");
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
completionBlock:^(NSURL *assetURL, NSError *error)
{
if (error)
{
}
}];
}
[library release];
}
}
- (void)viewDidAppear:(BOOL)animated {
}
- (IBAction)takeVideo:(id)sender {
AVCaptureMovieFileOutput *movieFileOutput1 = [[AVCaptureMovieFileOutput alloc] init];
if(!WeAreRecording) {
NSLog(#"START RECORDING");
WeAreRecording = YES;
self.videoViewBg.backgroundColor = [UIColor redColor];
NSDateFormatter *formatter;
NSString *dateString;
formatter = [[NSDateFormatter alloc]init];
[formatter setDateFormat:#"dd-MM-yyyy HH:mm:ss"];
dateString = [formatter stringFromDate:[NSDate date]];
[formatter release];
NSLog(#"The dateString is : %#",dateString);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectoryPath = [paths objectAtIndex:0];
NSString *movieFileName = [NSString stringWithFormat: #"%#.mp4",dateString];
NSString *filePath = [documentsDirectoryPath stringByAppendingPathComponent:movieFileName];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:filePath];
[self.captureSession stopRunning];
[self.captureSession beginConfiguration];
// [self.captureSession removeOutput:movieFileOutput];
if([self.captureSession canAddOutput:movieFileOutput1])
{
[self.captureSession addOutput:movieFileOutput1];
}
else
{
NSLog(#"Couldn't add still output");
}
[movieFileOutput1 startRecordingToOutputFileURL:outputURL recordingDelegate:self];
[self.captureSession commitConfiguration];
[self.captureSession startRunning];
[outputURL release];
} else {
NSLog(#"STOP RECORDING");
WeAreRecording = NO;
self.videoViewBg.backgroundColor = [UIColor whiteColor];
[movieFileOutput1 stopRecording];
[self.captureSession removeOutput:movieFileOutput1];
}
}
-(void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {
UITouch *touch = [[event allTouches] anyObject];
CGPoint loc = [touch locationInView:self.view];
self.pickedColor = [self colorOfPoint:loc];
self.theColor.backgroundColor = self.pickedColor;
}
-(UIColor *) colorOfPoint:(CGPoint)point {
unsigned char pixel[4] = {0};
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pixel, 1, 1, 8, 4, colorSpace, kCGImageAlphaPremultipliedLast);
CGContextTranslateCTM(context, -point.x, -point.y);
[self.view.layer renderInContext:context];
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
UIColor *color = [UIColor colorWithRed:pixel[0]/255.0 green:pixel[1]/255.0 blue:pixel[2]/255.0 alpha:pixel[3]/255.0];
return color;
}
// Implement viewDidLoad to do additional setup after loading the view, typically from a nib.
- (void)viewDidLoad {
[super viewDidLoad];
[self initCapture];
WeAreRecording = NO;
self.videoViewBg.layer.cornerRadius = 55;
}
// Override to allow orientations other than the default portrait orientation.
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation {
if(interfaceOrientation == UIInterfaceOrientationLandscapeRight) {
return YES;
}
return NO;
}
- (void)didReceiveMemoryWarning {
// Releases the view if it doesn't have a superview.
[super didReceiveMemoryWarning];
// Release any cached data, images, etc. that aren't in use.
}
- (void)viewDidUnload {
[super viewDidUnload];
self.imageView = nil;
self.customLayer = nil;
self.prevLayer = nil;
[self.captureSession stopRunning];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (void)dealloc {
[movieFileOutput release];
[self.captureSession release];
[super dealloc];
}
#end
Please help
The problem here is not trivial. AVFoundation simply can't handle both AVCaptureMovieFileOutput and AVCaptureVideoDataOutput simultaneously. That means you can't dipslay preview (which requires AVCaptureVideoDataOutput) when recording (which requires AVCaptureMovieFileOutput). This is very stupid, but that's life.
The only way I know how to do this to use only AVCaptureVideoDataOutput, and inside captureOutput:didOutputSampleBuffer:fromConnection:, write the frames manually to the video file. The following code snippets should help
Properties
#property (strong, nonatomic) AVAssetWriter* recordingAssetWriter;
#property (strong, nonatomic) AVAssetWriterInput* recordingAssetWriterInput;
#property (strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor* recordingPixelBufferAdaptor;
To initialize the video file (when you start recording or something)
// Init AVAssetWriter
NSError* error = nil;
self.recordingAssetWriter = [[AVAssetWriter alloc] initWithURL:<the video file URL> fileType:AVFileTypeMPEG4 error:&error];
// Init AVAssetWriterInput & AVAssetWriterInputPixelBufferAdaptor
NSDictionary* settings = #{AVVideoWidthKey: #(480), AVVideoHeightKey: #(640), AVVideoCodecKey: AVVideoCodecH264};
self.recordingAssetWriterInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:settings];
self.recordingAssetWriterInput.expectsMediaDataInRealTime = YES;
self.recordingPixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:self.recordingAssetWriterInput sourcePixelBufferAttributes:#{(NSString*)kCVPixelBufferPixelFormatTypeKey: #(kCVPixelFormatType_32BGRA)}];
// Add Input
[self.recordingAssetWriter addInput:self.recordingAssetWriterInput];
// Start ...
_recording = YES;
To write frames to the video file
// Inside the captureOutput:didOutputSampleBuffer:fromConnection: delegate method
// _recording is the flag to see if we're recording
if (_recording) {
CMTime sampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if (self.recordingAssetWriter.status != AVAssetWriterStatusWriting) {
[self.recordingAssetWriter startWriting];
[self.recordingAssetWriter startSessionAtSourceTime:sampleTime];
}
if (self.recordingAssetWriterInput.readyForMoreMediaData) {
[self.recordingPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:sampleTime];
}
}
To finalize the video file when finish recording:
[self.recordingAssetWriterInput markAsFinished];
[self.recordingAssetWriter finishWritingWithCompletionHandler:^{
// Do not do this immediately after calling finishWritingWithCompletionHandler, since it is an async method
self.recordingAssetWriter = nil;
self.recordingAssetWriterInput = nil;
self.recordingPixelBufferAdaptor = nil;
}];
Note that I ommited error checking for clarity.

IOS Take picture with front facing camera

I'm trying to build a non-realtime face detection application.
Following this article: http://maniacdev.com/2011/11/tutorial-easy-face-detection-with-core-image-in-ios-5/ I can load in a jpg and detect faces.
I would like to automatically take a picture every 20 seconds, then display the image in a UIImageView* and then run the existing detect face function on it.
My question is two fold.
Is there an easy way to take a sample picture from the camera and
load it into a UIImageView* without saving it?
How can i automate this to happen every 30 seconds with no user interaction?
Thanks!
Look at AVFoundation Programming Guide
AVFoundation Programming Guide
This guide shows you how to use the AVFoundation to capture media.
You will need to take into account Device Rotation as the camera will display only its raw output until you rotate the output via CATransformMatrix But that is a bit more in depth than you want.
You may be able to get away with just knowing. You rotate 45° from the original point to the final rotation location.
Here is my code for my little camera testing utility.
Build a UIView and connect the IBOutlets and IBActions
ViewController.h
#import <UIKit/UIKit.h>
#interface ViewController : UIViewController
#property (weak, nonatomic) IBOutlet UIView *previewViewContainer;
#property (weak, nonatomic) IBOutlet UIView *playerViewContainer;
- (IBAction)button1Pressed:(id)sender;
- (IBAction)button2Pressed:(id)sender;
- (IBAction)button3Pressed:(id)sender;
- (IBAction)button4Pressed:(id)sender;
- (IBAction)startPressed:(id)sender;
- (IBAction)stopPressed:(id)sender;
- (IBAction)swapInputsPressed:(id)sender;
- (IBAction)recordPressed:(id)sender;
#end
ViewController.m
#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
#interface ViewController ()
#property (nonatomic, strong) AVCaptureSession *captureSession;
#property (nonatomic, strong) AVCaptureVideoPreviewLayer *capturePreviewLayer;
#property (nonatomic, strong) AVCaptureDeviceInput *frontCam;
#property (nonatomic, readonly) BOOL frontCamIsSet;
#property (nonatomic, readonly) BOOL hasFrontCam;
#property (nonatomic, readonly) BOOL isUsingFrontCam;
#property (nonatomic, strong) AVCaptureDeviceInput *backCam;
#property (nonatomic, readonly) BOOL backCamIsSet;
#property (nonatomic, readonly) BOOL hasBackCam;
#property (nonatomic, readonly) BOOL isUsingBackCam;
#property (nonatomic, strong) AVCaptureDeviceInput *mic;
#property (nonatomic, readonly) BOOL micIsSet;
#property (nonatomic, readonly) BOOL hasMic;
#end
CGFloat DegreesToRadians(CGFloat degrees)
{
return degrees * M_PI / 180;
};
CGFloat RadiansToDegrees(CGFloat radians)
{
return radians * 180 / M_PI;
};
#implementation ViewController
#pragma mark - Helper Methods
- (NSArray *) inputDevices{
return [AVCaptureDevice devices];
}
- (NSArray *) videoInputDevices{
return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
}
- (NSArray *) audioInputDevices{
return [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
}
#pragma mark - Properties
#synthesize captureSession = _captureSession;
- (AVCaptureSession *)captureSession{
if (_captureSession == nil){
_captureSession = [[AVCaptureSession alloc] init];
}
return _captureSession;
}
#synthesize capturePreviewLayer = _capturePreviewLayer;
- (AVCaptureVideoPreviewLayer *)capturePreviewLayer{
if (_capturePreviewLayer == nil){
_capturePreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
}
return _capturePreviewLayer;
}
#synthesize frontCam = _frontCam;
- (AVCaptureDeviceInput *)frontCam{
if (_frontCam == nil && !self.frontCamIsSet){
_frontCamIsSet = YES;
NSArray *videoDevices = [self videoInputDevices];
for (AVCaptureDevice *inputDevice in videoDevices) {
if ([inputDevice position] == AVCaptureDevicePositionFront){
NSError *error = nil;
_frontCam = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if (!_frontCam){
NSLog(#"Error Attaching Front Cam %#",error);
}
}
}
}
return _frontCam;
}
- (BOOL)hasFrontCam{
return self.frontCam != nil;
}
#synthesize isUsingFrontCam = _isUsingFrontCam;
#synthesize backCam = _backCam;
- (AVCaptureDeviceInput *)backCam{
if (_backCam == nil && !self.backCamIsSet){
_backCamIsSet = YES;
NSArray *videoDevices = [self videoInputDevices];
for (AVCaptureDevice *inputDevice in videoDevices) {
if ([inputDevice position] == AVCaptureDevicePositionBack){
NSError *error = nil;
_backCam = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if (!_backCam){
NSLog(#"Error Attaching Back Cam %#",error);
}
}
}
}
return _backCam;
}
- (BOOL)hasBackCam{
return self.backCam != nil;
}
#synthesize mic = _mic;
- (AVCaptureDeviceInput *)mic{
if (_mic == nil && !self.micIsSet){
_micIsSet = YES;
NSArray *audioDevices = [self audioInputDevices];
for (AVCaptureDevice *inputDevice in audioDevices) {
NSError *error = nil;
_mic = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if (!_mic){
NSLog(#"Error Attaching Mic %#",error);
}
}
}
return _mic;
}
- (BOOL)hasMic{
return self.mic != nil;
}
- (BOOL)isUsingBackCam{
return !self.isUsingFrontCam;
}
- (IBAction)button1Pressed:(id)sender {
if (NO && self.hasFrontCam && [self.captureSession canAddInput:self.frontCam]){
_isUsingFrontCam = YES;
[self.captureSession addInput:self.frontCam];
}
else if(self.hasBackCam && [self.captureSession canAddInput:self.backCam]){
_isUsingFrontCam = NO;
[self.captureSession addInput:self.backCam];
}
if (self.hasMic && [self.captureSession canAddInput:self.mic]) {
[self.captureSession addInput:self.mic];
}
}
- (IBAction)button2Pressed:(id)sender {
self.capturePreviewLayer.frame = self.previewViewContainer.layer.bounds;
[self.previewViewContainer.layer addSublayer:self.capturePreviewLayer];
}
- (void) orientationChanged:(NSNotification*) notification{
NSLog(#"Notification Of Orientation Change\n\n%#",notification.userInfo);
if (_capturePreviewLayer != nil){
CGFloat rotate90 = DegreesToRadians(90);
CGFloat rotateFinish = 0;
UIDeviceOrientation orientation = [UIDevice currentDevice].orientation;
switch (orientation) {
case UIDeviceOrientationLandscapeLeft:
rotateFinish += rotate90;
case UIDeviceOrientationPortraitUpsideDown:
rotateFinish += rotate90;
case UIDeviceOrientationLandscapeRight:
rotateFinish += rotate90;
case UIDeviceOrientationPortrait:
default:
break;
}
_capturePreviewLayer.transform = CATransform3DMakeRotation(rotateFinish, 0.0, 0.0, 1.0);
}
}
- (IBAction)button3Pressed:(id)sender {
}
- (IBAction)button4Pressed:(id)sender {
}
- (IBAction)startPressed:(id)sender {
[self.captureSession startRunning];
}
- (IBAction)stopPressed:(id)sender {
[self.captureSession stopRunning];
}
- (IBAction)swapInputsPressed:(id)sender {
if (!self.isUsingFrontCam){
_isUsingFrontCam = YES;
[self.captureSession removeInput:self.backCam];
[self.captureSession addInput:self.frontCam];
}
else {
_isUsingFrontCam = NO;
[self.captureSession removeInput:self.frontCam];
[self.captureSession addInput:self.backCam];
}
}
- (IBAction)recordPressed:(id)sender {
}
- (NSString *) applicationDocumentsDirectory{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
return basePath;
}
- (void)viewDidLoad{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(orientationChanged:)
name:UIDeviceOrientationDidChangeNotification
object:nil];
}
- (void) dealloc{
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
[[NSNotificationCenter defaultCenter] removeObserver:self
name:UIDeviceOrientationDidChangeNotification
object:nil];
}
- (void)didReceiveMemoryWarning{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
#end
Fortunately for you I just built this test app for grabbing photos.
Oh before I forget. Rending a CALayer into a graphic is as simple as
+ (UIImage *) captureImageOfView:(UIView *)srcView{
UIGraphicsBeginImageContext(srcView.bounds.size);
[srcView.layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage *anImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return anImage;
}
However I recommend you look into the AVFoundation programming guide to see how they actually capture it. This was just my own demo app and as i said. its not complete.

Resources