IOS Take picture with front facing camera - ios

I'm trying to build a non-realtime face detection application.
Following this article: http://maniacdev.com/2011/11/tutorial-easy-face-detection-with-core-image-in-ios-5/ I can load in a jpg and detect faces.
I would like to automatically take a picture every 20 seconds, then display the image in a UIImageView* and then run the existing detect face function on it.
My question is two fold.
Is there an easy way to take a sample picture from the camera and
load it into a UIImageView* without saving it?
How can i automate this to happen every 30 seconds with no user interaction?
Thanks!

Look at AVFoundation Programming Guide
AVFoundation Programming Guide
This guide shows you how to use the AVFoundation to capture media.
You will need to take into account Device Rotation as the camera will display only its raw output until you rotate the output via CATransformMatrix But that is a bit more in depth than you want.
You may be able to get away with just knowing. You rotate 45° from the original point to the final rotation location.
Here is my code for my little camera testing utility.
Build a UIView and connect the IBOutlets and IBActions
ViewController.h
#import <UIKit/UIKit.h>
#interface ViewController : UIViewController
#property (weak, nonatomic) IBOutlet UIView *previewViewContainer;
#property (weak, nonatomic) IBOutlet UIView *playerViewContainer;
- (IBAction)button1Pressed:(id)sender;
- (IBAction)button2Pressed:(id)sender;
- (IBAction)button3Pressed:(id)sender;
- (IBAction)button4Pressed:(id)sender;
- (IBAction)startPressed:(id)sender;
- (IBAction)stopPressed:(id)sender;
- (IBAction)swapInputsPressed:(id)sender;
- (IBAction)recordPressed:(id)sender;
#end
ViewController.m
#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
#interface ViewController ()
#property (nonatomic, strong) AVCaptureSession *captureSession;
#property (nonatomic, strong) AVCaptureVideoPreviewLayer *capturePreviewLayer;
#property (nonatomic, strong) AVCaptureDeviceInput *frontCam;
#property (nonatomic, readonly) BOOL frontCamIsSet;
#property (nonatomic, readonly) BOOL hasFrontCam;
#property (nonatomic, readonly) BOOL isUsingFrontCam;
#property (nonatomic, strong) AVCaptureDeviceInput *backCam;
#property (nonatomic, readonly) BOOL backCamIsSet;
#property (nonatomic, readonly) BOOL hasBackCam;
#property (nonatomic, readonly) BOOL isUsingBackCam;
#property (nonatomic, strong) AVCaptureDeviceInput *mic;
#property (nonatomic, readonly) BOOL micIsSet;
#property (nonatomic, readonly) BOOL hasMic;
#end
CGFloat DegreesToRadians(CGFloat degrees)
{
return degrees * M_PI / 180;
};
CGFloat RadiansToDegrees(CGFloat radians)
{
return radians * 180 / M_PI;
};
#implementation ViewController
#pragma mark - Helper Methods
- (NSArray *) inputDevices{
return [AVCaptureDevice devices];
}
- (NSArray *) videoInputDevices{
return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
}
- (NSArray *) audioInputDevices{
return [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
}
#pragma mark - Properties
#synthesize captureSession = _captureSession;
- (AVCaptureSession *)captureSession{
if (_captureSession == nil){
_captureSession = [[AVCaptureSession alloc] init];
}
return _captureSession;
}
#synthesize capturePreviewLayer = _capturePreviewLayer;
- (AVCaptureVideoPreviewLayer *)capturePreviewLayer{
if (_capturePreviewLayer == nil){
_capturePreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
}
return _capturePreviewLayer;
}
#synthesize frontCam = _frontCam;
- (AVCaptureDeviceInput *)frontCam{
if (_frontCam == nil && !self.frontCamIsSet){
_frontCamIsSet = YES;
NSArray *videoDevices = [self videoInputDevices];
for (AVCaptureDevice *inputDevice in videoDevices) {
if ([inputDevice position] == AVCaptureDevicePositionFront){
NSError *error = nil;
_frontCam = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if (!_frontCam){
NSLog(#"Error Attaching Front Cam %#",error);
}
}
}
}
return _frontCam;
}
- (BOOL)hasFrontCam{
return self.frontCam != nil;
}
#synthesize isUsingFrontCam = _isUsingFrontCam;
#synthesize backCam = _backCam;
- (AVCaptureDeviceInput *)backCam{
if (_backCam == nil && !self.backCamIsSet){
_backCamIsSet = YES;
NSArray *videoDevices = [self videoInputDevices];
for (AVCaptureDevice *inputDevice in videoDevices) {
if ([inputDevice position] == AVCaptureDevicePositionBack){
NSError *error = nil;
_backCam = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if (!_backCam){
NSLog(#"Error Attaching Back Cam %#",error);
}
}
}
}
return _backCam;
}
- (BOOL)hasBackCam{
return self.backCam != nil;
}
#synthesize mic = _mic;
- (AVCaptureDeviceInput *)mic{
if (_mic == nil && !self.micIsSet){
_micIsSet = YES;
NSArray *audioDevices = [self audioInputDevices];
for (AVCaptureDevice *inputDevice in audioDevices) {
NSError *error = nil;
_mic = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if (!_mic){
NSLog(#"Error Attaching Mic %#",error);
}
}
}
return _mic;
}
- (BOOL)hasMic{
return self.mic != nil;
}
- (BOOL)isUsingBackCam{
return !self.isUsingFrontCam;
}
- (IBAction)button1Pressed:(id)sender {
if (NO && self.hasFrontCam && [self.captureSession canAddInput:self.frontCam]){
_isUsingFrontCam = YES;
[self.captureSession addInput:self.frontCam];
}
else if(self.hasBackCam && [self.captureSession canAddInput:self.backCam]){
_isUsingFrontCam = NO;
[self.captureSession addInput:self.backCam];
}
if (self.hasMic && [self.captureSession canAddInput:self.mic]) {
[self.captureSession addInput:self.mic];
}
}
- (IBAction)button2Pressed:(id)sender {
self.capturePreviewLayer.frame = self.previewViewContainer.layer.bounds;
[self.previewViewContainer.layer addSublayer:self.capturePreviewLayer];
}
- (void) orientationChanged:(NSNotification*) notification{
NSLog(#"Notification Of Orientation Change\n\n%#",notification.userInfo);
if (_capturePreviewLayer != nil){
CGFloat rotate90 = DegreesToRadians(90);
CGFloat rotateFinish = 0;
UIDeviceOrientation orientation = [UIDevice currentDevice].orientation;
switch (orientation) {
case UIDeviceOrientationLandscapeLeft:
rotateFinish += rotate90;
case UIDeviceOrientationPortraitUpsideDown:
rotateFinish += rotate90;
case UIDeviceOrientationLandscapeRight:
rotateFinish += rotate90;
case UIDeviceOrientationPortrait:
default:
break;
}
_capturePreviewLayer.transform = CATransform3DMakeRotation(rotateFinish, 0.0, 0.0, 1.0);
}
}
- (IBAction)button3Pressed:(id)sender {
}
- (IBAction)button4Pressed:(id)sender {
}
- (IBAction)startPressed:(id)sender {
[self.captureSession startRunning];
}
- (IBAction)stopPressed:(id)sender {
[self.captureSession stopRunning];
}
- (IBAction)swapInputsPressed:(id)sender {
if (!self.isUsingFrontCam){
_isUsingFrontCam = YES;
[self.captureSession removeInput:self.backCam];
[self.captureSession addInput:self.frontCam];
}
else {
_isUsingFrontCam = NO;
[self.captureSession removeInput:self.frontCam];
[self.captureSession addInput:self.backCam];
}
}
- (IBAction)recordPressed:(id)sender {
}
- (NSString *) applicationDocumentsDirectory{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
return basePath;
}
- (void)viewDidLoad{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(orientationChanged:)
name:UIDeviceOrientationDidChangeNotification
object:nil];
}
- (void) dealloc{
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
[[NSNotificationCenter defaultCenter] removeObserver:self
name:UIDeviceOrientationDidChangeNotification
object:nil];
}
- (void)didReceiveMemoryWarning{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
#end
Fortunately for you I just built this test app for grabbing photos.
Oh before I forget. Rending a CALayer into a graphic is as simple as
+ (UIImage *) captureImageOfView:(UIView *)srcView{
UIGraphicsBeginImageContext(srcView.bounds.size);
[srcView.layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage *anImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return anImage;
}
However I recommend you look into the AVFoundation programming guide to see how they actually capture it. This was just my own demo app and as i said. its not complete.

Related

Improving the speed of reading UPC-A Barcodes Objective-C (Xcode)

I am trying to speed up the reading of barcodes in my app, the app works fine but is a tad slow at reading barcodes.
How do I improve the speed of reading the barcodes?
Here is the code I have so far.
#import "ScanViewController.h"
#import "Utils.h"
#import <AVFoundation/AVFoundation.h>
#import <AudioToolbox/AudioToolbox.h>
#interface ScanViewController ()<AVCaptureMetadataOutputObjectsDelegate>
#property (nonatomic, readwrite) AVCaptureSession *captureSession;
#property (nonatomic, readwrite) AVCaptureVideoPreviewLayer *videoPreviewLayer;
#property (nonatomic, readwrite) UIView *qrCodeFrameView;
#property (nonatomic, readwrite) UILabel *qrCodeTextView;
#property (nonatomic, readwrite) NSArray *supportedCodeTypes;
#property (nonatomic, readwrite) long long lastScanTime;
#property (nonatomic, readwrite) NSString *lastScanCode;
#property (nonatomic, readwrite) AVAudioPlayer *audioPlayer;
#end
#implementation ScanViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
self.captureSession = [AVCaptureSession new];
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
self.supportedCodeTypes = #[AVMetadataObjectTypeUPCECode,
AVMetadataObjectTypeEAN13Code,
AVMetadataObjectTypeEAN8Code];
// AVMetadataObjectTypeCode39Code,
// AVMetadataObjectTypeCode39Mod43Code,
// AVMetadataObjectTypeCode93Code,
// AVMetadataObjectTypeCode128Code,
// AVMetadataObjectTypeAztecCode,
// AVMetadataObjectTypePDF417Code,
// AVMetadataObjectTypeITF14Code,
// AVMetadataObjectTypeDataMatrixCode,
// AVMetadataObjectTypeInterleaved2of5Code,
// AVMetadataObjectTypeQRCode];
AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVCaptureSessionPre];
if(captureDevice == nil) {
NSLog(#"Failed to get the camera device");
return;
}
#try {
// Get an instance of the AVCaptureDeviceInput class using the previous device object.
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:nil];
// Set the input device on the capture session.
[self.captureSession addInput:input];
// Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
AVCaptureMetadataOutput *captureMetadataOutput = [[AVCaptureMetadataOutput alloc] init];
[self.captureSession addOutput:captureMetadataOutput];
// Set delegate and use the default dispatch queue to execute the call back
[captureMetadataOutput setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
captureMetadataOutput.metadataObjectTypes = self.supportedCodeTypes;
// captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
} #catch (NSException *error) {
// If any error occurs, simply print it out and don't continue any more.
NSLog(#"%#", error);
return;
}
// Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
self.videoPreviewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
self.videoPreviewLayer.videoGravity = kCAGravityResizeAspectFill;
self.videoPreviewLayer.frame = self.view.layer.bounds;
[self.view.layer addSublayer:self.videoPreviewLayer];
// Start video capture.
[self.captureSession startRunning];
// Move the result view and loading view to the front
[self.view bringSubviewToFront:self.resultView];
[self.view bringSubviewToFront:self.loadingView];
// Initialize QR Code Frame to highlight the QR code
self.qrCodeFrameView = [[UIView alloc] init];
if (self.qrCodeFrameView) {
self.qrCodeFrameView.layer.borderColor = UIColor.greenColor.CGColor;
self.qrCodeFrameView.layer.borderWidth = 2;
[self.view addSubview:self.qrCodeFrameView];
[self.view bringSubviewToFront:self.qrCodeFrameView];
}
self.qrCodeTextView = [[UILabel alloc] init];
if (self.qrCodeTextView) {
[self.qrCodeTextView setTextColor:UIColor.greenColor];
[self.qrCodeTextView setFont:[UIFont systemFontOfSize:20]];
[self.qrCodeFrameView addSubview:self.qrCodeTextView];
}
[self rotateLoadingImage];
[self setResultType:RESULT_TYPE_WORKING codeContent:#"Ready" price:0.00];
[self.loadingView setHidden:YES];
}
-(void)viewWillDisappear:(BOOL)animated {
if (self.audioPlayer != nil) {
[self.audioPlayer stop];
self.audioPlayer = nil;
}
[super viewWillDisappear:animated];
}
/*
#pragma mark - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
// Get the new view controller using [segue destinationViewController].
// Pass the selected object to the new view controller.
}
*/
-(void) updatePreviewLayer:(AVCaptureConnection*)layer orientation:(AVCaptureVideoOrientation)orientation {
layer.videoOrientation = orientation;
self.videoPreviewLayer.frame = self.view.bounds;
}
-(void)viewDidLayoutSubviews {
[super viewDidLayoutSubviews];
if(self.videoPreviewLayer.connection != nil) {
UIDevice *currentDevice = [UIDevice currentDevice];
UIDeviceOrientation orientation = [currentDevice orientation];
AVCaptureConnection *previewLayerConnection = self.videoPreviewLayer.connection;
if(previewLayerConnection.isVideoOrientationSupported) {
switch (orientation) {
case UIDeviceOrientationPortrait:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationPortrait];
break;
case UIDeviceOrientationLandscapeRight:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationLandscapeLeft];
break;
case UIDeviceOrientationLandscapeLeft:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationLandscapeRight];
break;
case UIDeviceOrientationPortraitUpsideDown:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationPortraitUpsideDown];
break;
default:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationPortrait];
break;
}
}
}
}
-(void)captureOutput:(AVCaptureOutput *)output didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects fromConnection:(AVCaptureConnection *)connection {
// Check if the metadataObjects array is not nil and it contains at least one object.
if (metadataObjects.count == 0) {
self.qrCodeFrameView.frame = CGRectZero;
return;
}
// Get the metadata object.
AVMetadataMachineReadableCodeObject *metadataObj = (AVMetadataMachineReadableCodeObject*)(metadataObjects[0]);
if ([self.supportedCodeTypes containsObject:metadataObj.type]) {
// If the found metadata is equal to the QR code metadata (or barcode) then update the status label's text and set the bounds
AVMetadataObject *barCodeObject = [self.videoPreviewLayer transformedMetadataObjectForMetadataObject:metadataObj];
NSString *code = metadataObj.stringValue;
if (code != nil) {
// check upc a code
if ([self checkUpcACode:metadataObj.type code:code] == NO) {
self.qrCodeTextView.text = #"";
return;
}
int i=0;
for (i=0; i<code.length; i++) {
char ch = [code characterAtIndex:i];
if (ch != '0') break;
}
if (i>0) i--;
code = [code substringFromIndex:i];
self.qrCodeFrameView.frame = barCodeObject.bounds;
[self.qrCodeTextView setText:code];
self.qrCodeTextView.frame = CGRectMake(0, self.qrCodeFrameView.frame.size.height-20, self.qrCodeFrameView.frame.size.width, 20);
NSLog(#"%#", code);
[self handleBarcode:code];
} else {
self.qrCodeTextView.text = #"";
}
}
}
-(BOOL)checkUpcACode:(AVMetadataObjectType)type code:(NSString*)code {
if (type == AVMetadataObjectTypeEAN13Code) {
if ([code hasPrefix:#"0"] && [code length] > 0) {
return YES;
}
}
return NO;
}
#end```
Solution was from PBK on Apple Forums
//change [self handleBarcode:code];
// to
dispatch_async(dispatch_get_main_queue(), ^{
[self handleBarcode:code];
});

Screen recording of iPhone while playing a video

In my project I have to add a book page flip animation, and in this book on the right side page a video will play. Once the first video will complete the page will turn like as book page and the second video will play on the next right side page and so on. Now I have to save all this things as a a video which can be downloaded, so that When the downloaded video get played from gallery it looks same as i am playing in my app. Right now I am recording the device's screen and saving it in server for download. All the things is ok except the video player. In the video that is I am recording, the portion where all the video is playing(on the right side page of the book) is not getting recorded.
I am using the bellow code to record the screen. If any one of you have other idea to do the same thing, please share with me or if need to change my code please suggest that. Thanks is advance.
// ASScreenRecorder.h
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
typedef void (^VideoCompletionBlock)(void);
#protocol ASScreenRecorderDelegate;
#interface ASScreenRecorder : NSObject
#property (nonatomic, readonly) BOOL isRecording;
#property (nonatomic, weak) id <ASScreenRecorderDelegate> delegate;
// if saveURL is nil, video will be saved into camera roll
// this property can not be changed whilst recording is in progress
#property (strong, nonatomic) NSURL *videoURL;
+ (instancetype)sharedInstance;
- (BOOL)startRecording;
- (void)stopRecordingWithCompletion:(VideoCompletionBlock)completionBlock;
#end
// If your view contains an AVCaptureVideoPreviewLayer or an openGL view
// you'll need to write that data into the CGContextRef yourself.
// In the viewcontroller responsible for the AVCaptureVideoPreviewLayer / openGL view
// set yourself as the delegate for ASScreenRecorder.
// [ASScreenRecorder sharedInstance].delegate = self
// Then implement 'writeBackgroundFrameInContext:(CGContextRef*)contextRef'
// use 'CGContextDrawImage' to draw your view into the provided CGContextRef
#protocol ASScreenRecorderDelegate <NSObject>
- (void)writeBackgroundFrameInContext:(CGContextRef*)contextRef;
#end
// ASScreenRecorder.m
// ScreenRecorder
//
// Created by Alan Skipp on 23/04/2014.
// Copyright (c) 2014 Alan Skipp. All rights reserved.
//
#import "ASScreenRecorder.h"
#import <AVFoundation/AVFoundation.h>
#import <QuartzCore/QuartzCore.h>
#import <AssetsLibrary/AssetsLibrary.h>
#interface ASScreenRecorder()
#property (strong, nonatomic) AVAssetWriter *videoWriter;
#property (strong, nonatomic) AVAssetWriterInput *videoWriterInput;
#property (strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor *avAdaptor;
#property (strong, nonatomic) CADisplayLink *displayLink;
#property (strong, nonatomic) NSDictionary *outputBufferPoolAuxAttributes;
#property (nonatomic) CFTimeInterval firstTimeStamp;
#property (nonatomic) BOOL isRecording;
#end
#implementation ASScreenRecorder
{
dispatch_queue_t _render_queue;
dispatch_queue_t _append_pixelBuffer_queue;
dispatch_semaphore_t _frameRenderingSemaphore;
dispatch_semaphore_t _pixelAppendSemaphore;
CGSize _viewSize;
CGFloat _scale;
CGColorSpaceRef _rgbColorSpace;
CVPixelBufferPoolRef _outputBufferPool;
}
#pragma mark - initializers
+ (instancetype)sharedInstance {
static dispatch_once_t once;
static ASScreenRecorder *sharedInstance;
dispatch_once(&once, ^{
sharedInstance = [[self alloc] init];
});
return sharedInstance;
}
- (instancetype)init
{
self = [super init];
if (self) {
_viewSize = [UIApplication sharedApplication].delegate.window.bounds.size;
_scale = [UIScreen mainScreen].scale;
// record half size resolution for retina iPads
if ((UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) && _scale > 1) {
_scale = 1.0;
}
_isRecording = NO;
_append_pixelBuffer_queue = dispatch_queue_create("ASScreenRecorder.append_queue", DISPATCH_QUEUE_SERIAL);
_render_queue = dispatch_queue_create("ASScreenRecorder.render_queue", DISPATCH_QUEUE_SERIAL);
dispatch_set_target_queue(_render_queue, dispatch_get_global_queue( DISPATCH_QUEUE_PRIORITY_HIGH, 0));
_frameRenderingSemaphore = dispatch_semaphore_create(1);
_pixelAppendSemaphore = dispatch_semaphore_create(1);
}
return self;
}
#pragma mark - public
- (void)setVideoURL:(NSURL *)videoURL
{
NSAssert(!_isRecording, #"videoURL can not be changed whilst recording is in progress");
_videoURL = videoURL;
}
- (BOOL)startRecording
{
if (!_isRecording) {
[self setUpWriter];
_isRecording = (_videoWriter.status == AVAssetWriterStatusWriting);
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:#selector(writeVideoFrame)];
[_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
}
return _isRecording;
}
- (void)stopRecordingWithCompletion:(VideoCompletionBlock)completionBlock;
{
if (_isRecording) {
_isRecording = NO;
[_displayLink removeFromRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
[self completeRecordingSession:completionBlock];
}
}
#pragma mark - private
-(void)setUpWriter
{
_rgbColorSpace = CGColorSpaceCreateDeviceRGB();
NSDictionary *bufferAttributes = #{(id)kCVPixelBufferPixelFormatTypeKey : #(kCVPixelFormatType_32BGRA),
(id)kCVPixelBufferCGBitmapContextCompatibilityKey : #YES,
(id)kCVPixelBufferWidthKey : #(_viewSize.width * _scale),
(id)kCVPixelBufferHeightKey : #(_viewSize.height * _scale),
(id)kCVPixelBufferBytesPerRowAlignmentKey : #(_viewSize.width * _scale * 4)
};
_outputBufferPool = NULL;
CVPixelBufferPoolCreate(NULL, NULL, (__bridge CFDictionaryRef)(bufferAttributes), &_outputBufferPool);
NSError* error = nil;
_videoWriter = [[AVAssetWriter alloc] initWithURL:self.videoURL ?: [self tempFileURL]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(_videoWriter);
NSInteger pixelNumber = _viewSize.width * _viewSize.height * _scale;
NSDictionary* videoCompression = #{AVVideoAverageBitRateKey: #(pixelNumber * 11.4)};
NSDictionary* videoSettings = #{AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: [NSNumber numberWithInt:_viewSize.width*_scale],
AVVideoHeightKey: [NSNumber numberWithInt:_viewSize.height*_scale],
AVVideoCompressionPropertiesKey: videoCompression};
_videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSParameterAssert(_videoWriterInput);
_videoWriterInput.expectsMediaDataInRealTime = YES;
_videoWriterInput.transform = [self videoTransformForDeviceOrientation];
_avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoWriterInput sourcePixelBufferAttributes:nil];
[_videoWriter addInput:_videoWriterInput];
[_videoWriter startWriting];
[_videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];
}
- (CGAffineTransform)videoTransformForDeviceOrientation
{
CGAffineTransform videoTransform;
switch ([UIDevice currentDevice].orientation) {
case UIDeviceOrientationLandscapeLeft:
videoTransform = CGAffineTransformMakeRotation(-M_PI_2);
break;
case UIDeviceOrientationLandscapeRight:
videoTransform = CGAffineTransformMakeRotation(M_PI_2);
break;
case UIDeviceOrientationPortraitUpsideDown:
videoTransform = CGAffineTransformMakeRotation(M_PI);
break;
default:
videoTransform = CGAffineTransformIdentity;
}
return videoTransform;
}
- (NSURL*)tempFileURL
{
NSString *outputPath = [NSHomeDirectory() stringByAppendingPathComponent:#"tmp/screenCapture.mp4"];
[self removeTempFilePath:outputPath];
return [NSURL fileURLWithPath:outputPath];
}
- (void)removeTempFilePath:(NSString*)filePath
{
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:filePath]) {
NSError* error;
if ([fileManager removeItemAtPath:filePath error:&error] == NO) {
NSLog(#"Could not delete old recording:%#", [error localizedDescription]);
}
}
}
- (void)completeRecordingSession:(VideoCompletionBlock)completionBlock;
{
dispatch_async(_render_queue, ^{
dispatch_sync(_append_pixelBuffer_queue, ^{
[_videoWriterInput markAsFinished];
[_videoWriter finishWritingWithCompletionHandler:^{
void (^completion)(void) = ^() {
[self cleanup];
dispatch_async(dispatch_get_main_queue(), ^{
if (completionBlock) completionBlock();
});
};
if (self.videoURL) {
completion();
} else {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:_videoWriter.outputURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (error) {
NSLog(#"Error copying video to camera roll:%#", [error localizedDescription]);
} else {
[self removeTempFilePath:_videoWriter.outputURL.path];
completion();
}
}];
}
}];
});
});
}
- (void)cleanup
{
self.avAdaptor = nil;
self.videoWriterInput = nil;
self.videoWriter = nil;
self.firstTimeStamp = 0;
self.outputBufferPoolAuxAttributes = nil;
CGColorSpaceRelease(_rgbColorSpace);
CVPixelBufferPoolRelease(_outputBufferPool);
}
- (void)writeVideoFrame
{
// throttle the number of frames to prevent meltdown
// technique gleaned from Brad Larson's answer here: http://stackoverflow.com/a/5956119
if (dispatch_semaphore_wait(_frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0) {
return;
}
dispatch_async(_render_queue, ^{
if (![_videoWriterInput isReadyForMoreMediaData]) return;
if (!self.firstTimeStamp) {
self.firstTimeStamp = _displayLink.timestamp;
}
CFTimeInterval elapsed = (_displayLink.timestamp - self.firstTimeStamp);
CMTime time = CMTimeMakeWithSeconds(elapsed, 1000);
CVPixelBufferRef pixelBuffer = NULL;
CGContextRef bitmapContext = [self createPixelBufferAndBitmapContext:&pixelBuffer];
if (self.delegate) {
[self.delegate writeBackgroundFrameInContext:&bitmapContext];
}
// draw each window into the context (other windows include UIKeyboard, UIAlert)
// FIX: UIKeyboard is currently only rendered correctly in portrait orientation
dispatch_sync(dispatch_get_main_queue(), ^{
UIGraphicsPushContext(bitmapContext); {
for (UIWindow *window in [[UIApplication sharedApplication] windows]) {
[window drawViewHierarchyInRect:CGRectMake(0, 0, _viewSize.width, _viewSize.height) afterScreenUpdates:NO];
}
} UIGraphicsPopContext();
});
// append pixelBuffer on a async dispatch_queue, the next frame is rendered whilst this one appends
// must not overwhelm the queue with pixelBuffers, therefore:
// check if _append_pixelBuffer_queue is ready
// if it’s not ready, release pixelBuffer and bitmapContext
if (dispatch_semaphore_wait(_pixelAppendSemaphore, DISPATCH_TIME_NOW) == 0) {
dispatch_async(_append_pixelBuffer_queue, ^{
BOOL success = [_avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
if (!success) {
NSLog(#"Warning: Unable to write buffer to video");
}
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
dispatch_semaphore_signal(_pixelAppendSemaphore);
});
} else {
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
}
dispatch_semaphore_signal(_frameRenderingSemaphore);
});
}
- (CGContextRef)createPixelBufferAndBitmapContext:(CVPixelBufferRef *)pixelBuffer
{
CVPixelBufferPoolCreatePixelBuffer(NULL, _outputBufferPool, pixelBuffer);
CVPixelBufferLockBaseAddress(*pixelBuffer, 0);
CGContextRef bitmapContext = NULL;
bitmapContext = CGBitmapContextCreate(CVPixelBufferGetBaseAddress(*pixelBuffer),
CVPixelBufferGetWidth(*pixelBuffer),
CVPixelBufferGetHeight(*pixelBuffer),
8, CVPixelBufferGetBytesPerRow(*pixelBuffer), _rgbColorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst
);
CGContextScaleCTM(bitmapContext, _scale, _scale);
CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, _viewSize.height);
CGContextConcatCTM(bitmapContext, flipVertical);
return bitmapContext;
}
#end
#Gobinda this code works in my case. You need to specify frame if you want to record portion of window. In init method viewSize is defined as window size. So you need to change viewSize as your video frame.

AVCaptureVideoPreviewLayer: using with Storyboard and Autolayout

I have a UIView on my Storyboard with four simple constraints set the fill the entire screen - my application is for landscape use only. This UIView is to show the camera's live preview, utilizing AVCaptureVideoPreviewLayer.
I have tried using some code examples found on StackOverflow and the wider internet, initiating an AVCaptureSession and AVCaptureVideoPreviewLayer; however, none have successfully functioned. The UIView on the storyboard always ends up failing to display on screen (white screen when view loads)
I wish to create my app's entire interface with the interface builder using constraints, yet reap the functionality of AVCaptureVideoPreviewLayer. Can someone provide a full code example to assist me if they have a possible solution?
This is my code, it works for me. The head file AVCameraCaptureData.h:
#interface AVCameraCaptureData : NSObject
#property (nonatomic, strong) AVCaptureSession *captureSession;
#property (nonatomic, strong) AVCaptureConnection *captureConnection;
#property (nonatomic, strong) AVCaptureDeviceInput *deviceInput;
#property (nonatomic, strong) AVCaptureStillImageOutput *captureOutput;
#property (nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer;
#end
#if defined(__cplusplus)
extern "C"
{
#endif /* defined(__cplusplus) */
AVCaptureDevice* getCaptureDeviceByPosition(AVCaptureDevicePosition position);
AVCaptureStillImageOutput* getCaptureOutput();
AVCaptureConnection* ConnectionWithMediaType (NSString* mediaType, NSArray* fromConnections);
AVCaptureVideoPreviewLayer* getCapturePreviewLayer(AVCaptureSession* captureSession);
AVCameraCaptureData* CreateCaptureData (AVCaptureDevicePosition position, CGRect presentLayerFrame, NSString* sessionPreset);
#if defined(__cplusplus)
}
#endif /* defined(__cplusplus) */
AVCameraCaptureData.m file:
#import "AVCameraCaptureData.h"
#import "AVCameraUtil.h"
#implementation AVCameraCaptureData
AVCaptureDevice* getCaptureDeviceByPosition(AVCaptureDevicePosition position)
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices)
{
if ([device position] == position)
return device;
}
return nil;
}
AVCaptureStillImageOutput* getCaptureOutput()
{
AVCaptureStillImageOutput* captureOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary* outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
[captureOutput setOutputSettings:outputSettings];
return captureOutput;
}
AVCaptureConnection* ConnectionWithMediaType (NSString* mediaType, NSArray* fromConnections)
{
for (AVCaptureConnection* connection in fromConnections)
{
for (AVCaptureInputPort* port in [connection inputPorts])
{
if ([[port mediaType] isEqual:mediaType])
return connection;
}
}
return nil;
}
AVCaptureVideoPreviewLayer* getCapturePreviewLayer(AVCaptureSession* captureSession)
{
AVCaptureVideoPreviewLayer* previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
//please set frame for yourself
[previewLayer setFrame:[AVCameraUtil screenBoundsFixedToPortraitOrientation]];
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
return previewLayer;
}
AVCameraCaptureData* CreateCaptureData (AVCaptureDevicePosition position, CGRect presentLayerFrame, NSString* sessionPreset)
{
AVCaptureSession* captureSession = [[AVCaptureSession alloc] init];
if (sessionPreset == nil)
[captureSession setSessionPreset:AVCaptureSessionPresetPhoto];
else
[captureSession setSessionPreset:sessionPreset];
AVCaptureDevice *device = getCaptureDeviceByPosition (position);
AVCaptureDeviceInput *videoDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:device error:nil];
if ([captureSession canAddInput:videoDeviceInput])
[captureSession addInput:videoDeviceInput];
AVCaptureStillImageOutput* captureOutput = getCaptureOutput();
if ([captureSession canAddOutput:captureOutput])
[captureSession addOutput:captureOutput];
AVCameraCaptureData *data = [[AVCameraCaptureData alloc] init];
data.captureSession = captureSession;
data.deviceInput = videoDeviceInput;
data.captureOutput = captureOutput;
data.captureConnection = ConnectionWithMediaType(AVMediaTypeVideo, [captureOutput connections]);
data.previewLayer = getCapturePreviewLayer(captureSession);
return data;
}
#end
Then you can init AVCameraCaptureData with function in your controller
CreateCaptureData(AVCaptureDevicePositionBack, self.view.frame,nil);
Start run session:
[self.captureData.captureSession startRunning];
Add AVCaptureVideoPreviewLayer to view.layer:
[self.view.layer addSublayer:self.captureData.previewLayer];

restart object in objective c - Matchismo Assignment 2 - Restart game

I was trying to solve assignment 2 from Stanford iOS7 development (Matchismo card game)
The game works fine. Now I have to add the Restart function. If the user press on the restart button, the game restarts (it deals new cards and it resets the score)
my game model is the #property (nonatomic, strong) CardMatchingGame *game;
this is the code for the CardMatchingGame.m:
#import "CardMatchingGame.h"
#import "PlayingCardDeck.h"
#interface CardMatchingGame()
#property (nonatomic, readwrite) NSInteger score;
#property (nonatomic, strong) NSMutableArray *cards;
#end
#implementation CardMatchingGame
static const int MATCH_BONUS = 4;
static const int MATCH_PENALTY = 2;
static const int COST_TO_CHOOSE = 1;
-(NSMutableArray *)cards{
if(!_cards) _cards = [[NSMutableArray alloc]init];
return _cards;
}
-(instancetype)initWithCardCount:(NSUInteger)count usingDeck:(Deck *)deck{
self = [super init];
if(self){
for(int i=0; i < count; i++){
Card *card = [deck drawRandomCard];
if(card){
[self.cards addObject:card];
} else{
self = nil;
break;
}
}
}
return self;
}
-(void)chooseCardAtIndex:(NSUInteger)index{
Card *card = [self cardAtIndex:index];
if(!card.isMatched){
if(card.isChosen){
card.chosen = NO;
} else{
for(Card *otherCard in self.cards){
if(otherCard.isChosen && !otherCard.isMatched){
int matchScore = [card match:#[otherCard]];
if(matchScore){
self.score += matchScore * MATCH_BONUS;
card.matched = YES;
otherCard.matched = YES;
} else{
self.score -= MATCH_PENALTY;
otherCard.chosen = NO;
}
break;
}
}
self.score -= COST_TO_CHOOSE;
card.chosen = YES;
}
}
}
-(Card *)cardAtIndex:(NSUInteger)index{
return (index < [self.cards count]) ? self.cards[index] : nil;
}
#end
here is my CardGameViewController.m:
#import "CardGameViewController.h"
#import "PlayingCardDeck.h"
#import "CardMatchingGame.h"
#interface CardGameViewController ()
#property (nonatomic, strong) Deck *deck;
#property (nonatomic, strong) CardMatchingGame *game;
#property (strong, nonatomic) IBOutletCollection(UIButton) NSArray *cardsCollection;
#property (weak, nonatomic) IBOutlet UILabel *scoreLabel;
#end
#implementation CardGameViewController
#synthesize game = _game;
-(CardMatchingGame *)game{
if(!_game) _game = [[CardMatchingGame alloc] initWithCardCount:[self.cardsCollection count]
usingDeck:self.deck];
return _game;
}
-(Deck *)deck{
if(!_deck) _deck = [[PlayingCardDeck alloc] init];
return _deck;
}
- (IBAction)touchRestartButton:(id)sender {
self.game = nil;
[self updateUI];
}
- (IBAction)touchCardButton:(UIButton *)sender {
int chosenButtonIndex = [self.cardsCollection indexOfObject:sender];
[self.game chooseCardAtIndex:chosenButtonIndex];
[self updateUI];
}
-(void)updateUI{
for(UIButton *cardButton in self.cardsCollection){
int buttonIndex = [self.cardsCollection indexOfObject:cardButton];
Card *card = [self.game cardAtIndex:buttonIndex];
[cardButton setTitle:[self titleForCard:card] forState:UIControlStateNormal];
[cardButton setBackgroundImage:[self backgroundImageForCard:card] forState:UIControlStateNormal];
cardButton.enabled = !card.isMatched;
}
self.scoreLabel.text = [NSString stringWithFormat:#"Score: %d", self.game.score];
}
-(NSString *)titleForCard:(Card *)card{
return card.isChosen ? card.contents : #"";
}
-(UIImage *)backgroundImageForCard:(Card *)card{
return [UIImage imageNamed: card.isChosen ? #"cardfront" : #"cardback"];
}
#end
In order to restart the game, I think I should simply re-initialize the property CardMatchingGame *game.
This is what I tried to do, by setting self.game = nil; Then it should automatically be re-initialized in the getter of game.
This is, indeed, the solution that I found on the internet. However, in my program it doesn't work. *game is set to nil and never restored, so the game ends when you click restart.
Could you please help me to figure out why self.game = nil doesn't work in my case?
- (IBAction)startover:(UIButton *)sender {
self.game= [[CardMatchingGame alloc] initWithCardCount:[self.cardButtons count] usingDeck:[self createDeck]];
[self updateUI];
}
If your program has the lazy init style recommended, there is no need for a new method such as start over. You are correct to set self.game to nil, now a call the the getter will start a new instance of the game. I did it by making a call to UIUpdate right after the self.game = nil. That has a call to the getter and lazily inits a new instance of the game.

Infinite loop and access error on app startup on model init

Please help....I'm losing my mind trying to figure out this problem.
I'm fairly new to iOS so don't go too hard on me if it's something obvious! ;)
I'm using xcode 4.6 and targeting iPhone6.1 Simulator.
I get the following error when starting up my app:
EXC_BAD_ACCESS code = 2
There are hundres of threads appearing in Debug Navigator which leads to to believe there is some sort of infinite loop somewhere (I just cannot see where).
The error occurs beside (id)init in PlayingCardDeck.m after entering it from ViewController.m at line:
Card *card = [self.deck drawRandonCard];
ViewConrtoller:
#import "ViewController.h"
#import "PlayingCardDeck.h"
#interface ViewController ()
#property (weak, nonatomic) IBOutlet UILabel *flipsLabel;
#property (nonatomic) int flipCount;
#property (strong, nonatomic) Deck *deck;
#property (strong, nonatomic) IBOutletCollection(UIButton) NSArray *cardButtons;
#end
#implementation ViewController
#synthesize deck = _deck;
- (IBAction)flipCard:(UIButton *)sender {
sender.selected = !sender.isSelected;
self.flipCount++;
}
- (void)setFlipCount:(int)flipCount
{
_flipCount = flipCount;
self.flipsLabel.text = [NSString stringWithFormat:#"Flips: %d", self.flipCount];
}
- (Deck *)deck
{
if (!_deck) _deck = [[PlayingCardDeck alloc] init];
return _deck;
}
- (void)setCardButtons:(NSArray *)cardButtons
{
_cardButtons = cardButtons;
for (UIButton *cardButton in cardButtons)
{
Card *card = [self.deck drawRandonCard];
[cardButton setTitle:card.contents forState:UIControlStateSelected];
}
}
#end
Deck.m
#import "Deck.h"
#interface Deck()
#property (strong, nonatomic) NSMutableArray *cards;
#end
#implementation Deck
- (NSMutableArray *)cards
{
if (!_cards) _cards = [[NSMutableArray alloc] init];
return _cards;
}
- (void)addCard:(Card *)card atTop:(BOOL)atTop
{
if (atTop)
{
[self.cards insertObject:card atIndex:0];
}
else
{
[self.cards addObject:card];
}
}
- (Card *)drawRandonCard
{
Card *randomCard = nil;
if (self.cards.count)
{
unsigned index = arc4random() % self.cards.count;
randomCard = self.cards[index];
[self.cards removeObjectAtIndex:index];
}
return randomCard;
}
#end
PlayingCardDeck.m
#import "PlayingCardDeck.h"
#import "PlayingCard.h"
#implementation PlayingCardDeck
- (id)init
{
self = [self init];
if (self)
{
for (NSString *suit in [PlayingCard validSuits])
{
for (NSUInteger rank=1; rank <= [PlayingCard maxRank]; rank++)
{
PlayingCard *card = [[PlayingCard alloc] init];
card.suit = suit;
card.rank = rank;
[self addCard:card atTop:YES];
}
}
}
return self;
}
#end
In PlayerCardDeck.m self = [self init] should be self = [super init]. That's causing the infinite loop.

Resources