I've set up a custom camera view that has overlay buttons on the view. Everything works as it should. The camera takes the picture, and the picture shows up in a UIImageView. How do I save this picture once it's in the UIImageView?
Code below:
ViewController.h
#interface ViewController : UIViewController <UIImagePickerControllerDelegate, UINavigationControllerDelegate> {
}
#property (strong, nonatomic) IBOutlet UIView *frameForCapture;
#property (strong, nonatomic) IBOutlet UIImageView *imageView;
- (IBAction)takePhoto:(id)sender;
ViewController.m
#implementation ViewController
AVCaptureSession *session;
AVCaptureStillImageOutput *stillImageOutput;
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
}
-(void)viewWillAppear:(BOOL)animated{
session = [[AVCaptureSession alloc]init];
[session setSessionPreset:AVCaptureSessionPresetPhoto];
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error;
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if ([session canAddInput:deviceInput]){
[session addInput:deviceInput];
}
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:session];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
CALayer *rootLayer = [[self view]layer];
[rootLayer setMasksToBounds:YES];
CGRect frame = self.frameForCapture.frame;
[previewLayer setFrame:frame];
[rootLayer insertSublayer:previewLayer atIndex:0];
stillImageOutput = [[AVCaptureStillImageOutput alloc]init];
NSDictionary *outputSettings = [[NSDictionary alloc]initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
[session startRunning];
}
- (IBAction)takePhoto:(id)sender {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections){
for (AVCaptureInputPort *port in [connection inputPorts]){
if([[port mediaType] isEqual:AVMediaTypeVideo]){
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer != NULL) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [UIImage imageWithData: imageData];
self.imageView.image = image;
}
}];
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
#end
You can use this function to save image in device
UIImageWriteToSavedPhotosAlbum(UIImage *image, id completionTarget, SEL completionSelector, void *contextInfo);
You only need completionTarget, completionSelector and contextInfo if you want to be notified when the image is done saving, otherwise you can pass in nil.
Related
I have followed a tutorial that guided through a way to make a custom but simple camera app, almost exactly to the needs of the use I would like it. I actually have two issues that I need changing but I will focus on this first one for now.
The following code allows the use of the back camera, but I basically need it to be changed so that I can use the front camera. I will also link here the video I sourced it from to give them credit, and I followed what one of the commenters said about using the front camera but the answer didn't help at all.
https://www.youtube.com/watch?v=Xv1FfqVy-KM
I'm not excellent at coding at all, but trying to learn. Any help would be appreciated! Many thanks.
#interface ViewController ()
#end
#implementation ViewController
AVCaptureSession *session;
AVCaptureStillImageOutput *StillImageOutput;
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
}
- (void)viewWillAppear:(BOOL)animated {
session = [[AVCaptureSession alloc] init];
[session setSessionPreset:AVCaptureSessionPresetPhoto];
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error;
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if ([session canAddInput:deviceInput]) {
[session addInput:deviceInput];
}
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
CALayer *rootLayer = [[self view] layer];
[rootLayer setMasksToBounds:YES];
CGRect frame = frameforcapture.frame;
[previewLayer setFrame:frame];
[rootLayer insertSublayer:previewLayer atIndex:0];
StillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
[StillImageOutput setOutputSettings:outputSettings];
[session addOutput:StillImageOutput];
[session startRunning];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (IBAction)takephoto:(id)sender {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in StillImageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts ]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
videoConnection = connection;
break;
}
}
}
[StillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer != NULL) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [UIImage imageWithData:imageData];
imageView.image = image;
}
}];
}
#end
this code is returns an AVCaptureDevice instance for the default device of the given media type.
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
change this code to
....
AVCaptureDevice *inputDevice = nil;
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for(AVCaptureDevice *camera in devices) {
if([camera position] == AVCaptureDevicePositionFront) { // is front camera
inputDevice = camera;
break;
}
}
......
- (IBAction)btnCameraClicked:(id)sender {
AVCaptureDevicePosition desiredPosition;
if (isUsingFrontFacingCamera)
desiredPosition = AVCaptureDevicePositionBack;
else
desiredPosition = AVCaptureDevicePositionFront;
for (AVCaptureDevice *d in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if ([d position] == desiredPosition) {
[[previewLayer session] beginConfiguration];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:d error:nil];
for (AVCaptureInput *oldInput in [[previewLayer session] inputs]) {
[[previewLayer session] removeInput:oldInput];
}
[[previewLayer session] addInput:input];
[[previewLayer session] commitConfiguration];
break;
}
}
isUsingFrontFacingCamera = !isUsingFrontFacingCamera;
}
//Declare in .h file
AVCaptureVideoPreviewLayer *previewLayer;
BOOL isUsingFrontFacingCamera;
There seems to be a number of questions about this issue here. Since most of them don’t provide code snippets, I am electing to show all my code. The problem: the picture taken is taller than the preview. My preview is w=288 by h=288; but the picture that is returned is w=2448 by h=3264. Why is the returned image not a square when my preview is a square?
AVCaptureSession *session;
AVCaptureStillImageOutput *stillImageOutput;
-(void)viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
session=[[AVCaptureSession alloc]init];
[session setSessionPreset:AVCaptureSessionPresetPhoto];
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error;
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if ([session canAddInput:deviceInput]) {
[session addInput:deviceInput];
}
AVCaptureVideoPreviewLayer * previewLayout = [[AVCaptureVideoPreviewLayer alloc]initWithSession:session];
[previewLayout setVideoGravity:AVLayerVideoGravityResizeAspectFill];
CALayer *rootLayout =[[self view]layer];
[rootLayout setMasksToBounds:YES];
CGRect frame = self.frameForCapture.frame;
[previewLayout setFrame:frame];
[rootLayout insertSublayer:previewLayout atIndex:0];
stillImageOutput=[[AVCaptureStillImageOutput alloc]init];
NSDictionary *outputSettings =[[NSDictionary alloc]initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
[session startRunning];
}
- (IBAction)takePhoto:(id)sender
{
AVCaptureConnection *videoConnection= nil;
for (AVCaptureConnection *connection in stillImageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
videoConnection=connection;
break;
}
}
if (videoConnection) {
break;
}
}
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer != NULL) {//I.E. it does exist
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *originalImage =[UIImage imageWithData:imageData ];
NSLog(#"Image w = %f; h = %f",originalImage.size.width,originalImage.size.height);
CGSize size = self.frameForCapture.frame.size;
UIGraphicsBeginImageContext(size);
[originalImage drawInRect:CGRectMake(0,0,size.width,size.width)];
UIImage* image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
self.imageView.image=image;
[self.imageView setHidden:NO];
[self.frameForCapture setHidden:YES];
}
}];
}
I have been playing around with CGImageCreateWithImageInRect to get the preview to be what I finally save and use in my ImageView. But so far no luck. One of the places I have been looking is Cropping an UIImage. Any ideas how I might fix this issue?
I am trying to align a UIButton within my ViewController. I can't tell what the specific name is of my UIButton so I think that's why I'm having a hard time moving it as a subview. Here is my code:
-(void)viewDidAppear:(BOOL)animated
{
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
CALayer *viewLayer = self.vImagePreview.layer;
NSLog(#"viewLayer = %#", viewLayer);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = self.vImagePreview.bounds;
[self.vImagePreview.layer addSublayer:captureVideoPreviewLayer];
//Modify to fit screen
CGAffineTransform translate = CGAffineTransformMakeTranslation(0.0, 0.0);
CGAffineTransform scale = CGAffineTransformMakeScale(1.333333, 1.333333);
CGAffineTransform rotate = CGAffineTransformMakeRotation(DEGREES_RADIANS(180));
CGAffineTransform transform = CGAffineTransformConcat(translate, scale);
transform = CGAffineTransformConcat(transform, rotate);
self.vImagePreview.transform = transform;
[self.vImagePreview sendSubviewToFront:UIButton];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[_stillImageOutput setOutputSettings:outputSettings];
[session addOutput:_stillImageOutput];
[session startRunning];
}
-(IBAction)captureNow {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in _stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection)
{
break;
}
}
NSLog(#"about to request a capture from: %#", _stillImageOutput);
[_stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(#"attachements: %#", exifAttachments);
} else {
NSLog(#"no attachments");
}
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
self.vImage.image = image;
//Saved Photo modifications go here
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil);
}];
}
#end
and my headers, just in case:
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import <AVKit/AVKit.h>
#import <ImageIO/CGImageProperties.h>
#define DEGREES_RADIANS(angle) ((angle) / 180.0 * M_PI)
#interface ViewController : UIViewController
#property(nonatomic, retain) IBOutlet UIView *vImagePreview;
#property(nonatomic, retain) AVCaptureStillImageOutput *stillImageOutput;
#property(nonatomic, retain) IBOutlet UIImageView *vImage;
#end
Some clarification:
My button is on my storyboard. The button should trigger the CaptureNow method, taking a picture. I just can't figure out how to get it to appear over the preview layer.
Since your UIbutton is on storyboard, so instead of using
[self.vImagePreview sendSubviewToFront:UIButton];
use this line to display UIBUtton on top.
[self.view bringSubviewToFront:UIButton];
Just before start session running at the end of the emethod.
im looking for some help with AVFoundation. Currently i have following a step by step guide on how to capture a still image from this topic: How to save photos taken using AVFoundation to Photo Album?
My question is, how can i lower the quality of my saved image as well as using the AVCaptureSessionPreset640x480. I would like to half the image quality, or if there is another way to make the saved image as small a file as possible - possibly 320x280 - then that could be better than adjusting the actual quality.
I dont know if anyone else has asked this before or not but i have been searching the web for the past couple of days and cannot find an answer. Below is my code.
`
#import "ViewController.h"
#import <ImageIO/ImageIO.h>
#interface ViewController ()
#end
#implementation ViewController
#synthesize imagePreview;
#synthesize iImage;
#synthesize stillImageOutput;
-(IBAction)captureNow {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection)
{
break;
}
}
NSLog(#"about to request a capture from: %#", stillImageOutput);
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(#"attachements: %#", exifAttachments);
} else {
NSLog(#"no attachments");
}
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
//NSData *data2 = [NSData dataWithData:UIImageJPEGRepresentation(image, 0.5f)]];
UIImage *image = [[UIImage alloc] initWithData:imageData];
self.iImage.image = image;
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil);
}];
}
-(void)viewDidAppear:(BOOL)animated
{
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPreset640x480;
CALayer *viewLayer = self.imagePreview.layer;
NSLog(#"viewLayer = %#", viewLayer);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = self.imagePreview.bounds;
[self.imagePreview.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
[session startRunning];
}
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
#end
`
Try using different Preset that will give you different resolution image.
As per the Apple Documentation, for iPhone4(Back) you will get the following resolution image for the following Preset for that session.
AVCaptureSessionPresetHigh : 1280x720
AVCaptureSessionPresetMedium : 480x360
AVCaptureSessionPresetLow : 192x144
AVCaptureSessionPreset640x480 : 640x480
AVCaptureSessionPreset1280x720 : 1280x720
AVCaptureSessionPresetPhoto : 2592x1936.This is not supported for video output
Hope this will help.
You need to set kCVPixelBufferWidthKey and kCVPixelBufferHeightKey options on AVCaptureStillImageOutput object to set resolution of your choice. This width/height will override session preset width/height. Minimal sample as below (add error check).
_session = [[AVCaptureSession alloc] init];
_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError * error;
_sessionInput = [AVCaptureDeviceInput deviceInputWithDevice:_device error:&error];
_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:320.0], (id)kCVPixelBufferWidthKey,
[NSNumber numberWithDouble:280.0], (id)kCVPixelBufferHeightKey,
[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], (id)kCVPixelBufferPixelFormatTypeKey,
nil];
[_stillImageOutput setOutputSettings:options];
[_session beginConfiguration ];
[_session addInput:_sessionInput];
[_session addOutput:_stillImageOutput];
[_session setSessionPreset:AVCaptureSessionPresetPhoto];
_avConnection = [_stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
[ _session commitConfiguration ];
.............
- (void) start
{
[self.session startRunning];
}
.............
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:self.avConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(imageSampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
NSLog(#"%d : %d", height, width);
}];
Note: I have only tried this on an mac. Ideally it should work for iOS too. Also try maintaining some aspect ratio.
I am trying to create a simple camera app with an overlay view that shows two buttons and a gesture recognizer.
Currently I have an 'OverlayViewController' with a .xib file which contains a UIView with two buttons and a gesture recognizer.
In my CameraViewController in which I show the camera on viewDidLoad through the UIImagePicker, I try to assign this picker my overlay but so far im not getting any of the buttons in the overlay....
OverlayViewController.h
#interface OverlayViewController : UIViewController <UIImagePickerControllerDelegate, UINavigationControllerDelegate, UIGestureRecognizerDelegate>
- (IBAction)take_picture:(UIButton *)sender;
- (IBAction)choose_picture:(UIButton *)sender;
#end
OverlayViewController.m
-pretty much empty (do I need to initialize the UIButtons here ??)
CameraViewController.h
#interface ProfileViewController : UIViewController <UIImagePickerControllerDelegate, UINavigationControllerDelegate, UIGestureRecognizerDelegate>
#property (strong, nonatomic) UIImagePickerController *picker;
#property (strong, nonatomic) OverlayViewController *overlay;
CameraViewController.m
-(void)setupCamera
{
self.picker = [[UIImagePickerController alloc] init];
self.picker.sourceType = UIImagePickerControllerSourceTypeCamera;
self.picker.cameraCaptureMode = UIImagePickerControllerCameraCaptureModePhoto;
self.overlay = [[OverlayViewController alloc] initWithNibName:#"OverlayViewController" bundle:nil];
// _overlay.delegate = self;
// NSArray* nibViews= [[NSBundle mainBundle] loadNibNamed:#"OverlayViewController" owner:self.overlay options:nil];
// UIView* uiView= [nibViews objectAtIndex:0];
// uiView.opaque= NO;
// uiView.backgroundColor= [UIColor clearColor];
self.picker.cameraOverlayView = self.overlay.view;
self.picker.delegate = self.overlay;
self.picker.showsCameraControls = NO;
self.picker.navigationBarHidden = YES;
self.picker.toolbarHidden = YES;
self.picker.wantsFullScreenLayout = YES;
[self presentViewController:self.picker animated:NO completion:nil];
}
Im also wondering which controller should be the delegate for these invisible buttons...
thanks for the help in advance!
There is another alternative AVCaptureSession Using it you can do your things..
put this code in viewDidAppear
//----- SHOW LIVE CAMERA PREVIEW -----
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPreset1920x1080;
CALayer *viewLayer = self.overlay.layer;
NSLog(#"viewLayer = %#", viewLayer);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[[captureVideoPreviewLayer connection] setVideoOrientation:AVCaptureVideoOrientationLandscapeLeft];
captureVideoPreviewLayer.frame = self.overlay.bounds;
[self.overlay.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
[session startRunning];
and for capture image
-(UIImage *) captureNow
{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(#"about to request a capture from: %#", stillImageOutput);
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection
completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(#"attachements: %#", exifAttachments);
}
else
NSLog(#"no attachments");
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
return image;
}];
}
here is reference link