I have written code like this :
-(IBAction)startCapture
{
//session object
captureSession = [[AVCaptureSession alloc]init];
captureSession.sessionPreset = AVCaptureSessionPresetMedium;
AVCaptureVideoPreviewLayer *previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:captureSession];
previewLayer.frame = CGRectMake(0, 10, 320, 200); ////self.view.frame; //
[self.view.layer addSublayer:previewLayer];
NSError *error = nil;
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//input object
AVCaptureDeviceInput *inputDevice = [[AVCaptureDeviceInput alloc]initWithDevice:device error:&error];
[captureSession addInput:inputDevice];
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[captureSession addOutput:stillImageOutput];
[captureSession startRunning];
}
-(IBAction) captureNow
{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(#"about to request a capture from: %#", stillImageOutput);
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
NSLog(#"exif Attachments:%#",exifAttachments);
if (exifAttachments)
{
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
self.vImage.image = image;
// Do something with the attachments.
}
else
NSLog(#"no attachments");
}];
}
to capture the images. But I want to know the shutter speed, ISO value and aperture while capturing. How can I find out these values? I tried the following:
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
NSDictionary *exifDict = (NSDictionary *)exifAttachments;
NSLog(#"\n exif data = %#",exifDict);
CFNumberRef aperaturevalue = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifApertureValue, NULL);
NSNumber *num = (NSNumber *)aperaturevalue;
NSLog(#"\n AperatureValue : %#",num);
CFNumberRef shutter = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifShutterSpeedValue, NULL);
NSNumber *shunum = (NSNumber *)shutter;
NSLog(#"\n shuttervalue : %#",shunum);
CFArrayRef isoRef = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifISOSpeedRatings, NULL);
NSArray *iso = (NSArray *)isoRef;
NSLog(#"Iso value : %#",iso);
}
but it is giving output like this:
exif data = {
ApertureValue = "2.970853605202583";
ExposureMode = 0;
ExposureProgram = 2;
FNumber = "2.8";
Flash = 32;
MeteringMode = 1;
SceneType = 1;
SensingMethod = 2;
WhiteBalance = 0;
}
2011-06-23 14:35:14.955 CameraExample[1464:307]
AperatureValue : (null)
2011-06-23 14:35:14.981 CameraExample[1464:307]
shuttervalue : (null)
2011-06-23 14:35:14.999 CameraExample[1464:307] Iso value : (null)
Try to add this code in your block :
CFDictionaryRef exifDictRef = CMGetAttachment(imageSampleBuffer,kCGImagePropertyExifDictionary, NULL);
NSDictionary *exifDict = (NSDictionary *)exifDictRef;
for (id key in exifDict) {
NSLog(#"key = %#, value = %#",key,[exifDict objectForKey:key]);
}
You should find the values you are looking for in these keys :
kCGImagePropertyExifShutterSpeedValue (result is a NSNumber)
kCGImagePropertyExifApertureValue (result is a NSNumber)
kCGImagePropertyExifISOSpeedRatings (result is a NSArray)
Related
this bug appears very accidental, I have no idea how to fix, please help me,
main code is here:
if you want to see the complete project, you can check this link
https://github.com/liman123/custom-camera
the bug is : when I take photo, the screen become this pic, this pic is distorted!
- (void)setupConfiguration
{
_captureSession = [[AVCaptureSession alloc]init];
_captureSession.sessionPreset = AVCaptureSessionPresetPhoto;
_capturedImageView = [[UIImageView alloc]init];
_capturedImageView.frame = self.view.frame; // just to even it out
_capturedImageView.backgroundColor = [UIColor clearColor];
_capturedImageView.userInteractionEnabled = YES;
_capturedImageView.contentMode = UIViewContentModeScaleAspectFill;
_captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:_captureSession];
_captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
_captureVideoPreviewLayer.frame = self.view.bounds;
[self.view.layer addSublayer:_captureVideoPreviewLayer];
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
if (devices.count > 0) {
_captureDevice = devices[0];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:_captureDevice error:&error];
[_captureSession addInput:input];
_stillImageOutput = [[AVCaptureStillImageOutput alloc]init];
NSDictionary * outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[_stillImageOutput setOutputSettings:outputSettings];
[_captureSession addOutput:_stillImageOutput];
}
}
-(void)captureButtonClick
{
_isCapturingImage = YES;
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in _stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
[_stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
if (imageSampleBuffer) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *capturedImage = [[UIImage alloc]initWithData:imageData scale:1];
_isCapturingImage = NO;
_capturedImageView.image = capturedImage;
_selectedImage = capturedImage;
imageData = nil;
[self.view addSubview:_imageSelectedView];
}
}];
}
I try to display a picture of the camera in a UIImageView.
I try to debug my application (whit #property (nonatomic,weak) IBOutlet UILabel *messageLabel;
),
-------Line "[output captureStillImageAsynchronouslyFromConnection ..."
I have the value assigned before the call. (self.messageLabel.text = #"before !!"; I never have the value #"Test !!")
the return of the variable result is "OK"
I use Entitlements:
com.apple.security.device.camera
Can you help me debug more detail.
Here is my code
-(NSString*) takePhoto
{
AVCaptureDevice *frontalCamera;
AVCaptureSession *photoSession;
NSString* result;
NSArray *allCameras = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for ( int i = 0; i < allCameras.count; i++ )
{
AVCaptureDevice *camera = [allCameras objectAtIndex:i];
if ( camera.position == AVCaptureDevicePositionFront )
{
frontalCamera = camera;
}
}
if ( frontalCamera != nil )
{
photoSession = [[AVCaptureSession alloc] init];
NSError *error;
AVCaptureDeviceInput *input =
[AVCaptureDeviceInput deviceInputWithDevice:frontalCamera error:&error];
if ( !error && [photoSession canAddInput:input] )
{
[photoSession addInput:input];
AVCaptureStillImageOutput *output = [[AVCaptureStillImageOutput alloc] init];
[output setOutputSettings:
[[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG,AVVideoCodecKey,nil]];
if ( [photoSession canAddOutput:output] )
{
[photoSession addOutput:output];
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in output.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
if ( videoConnection )
{
[photoSession startRunning];
result = #"Let's Go ?";
self.messageLabel.text = #"before !!";
[output captureStillImageAsynchronouslyFromConnection:videoConnection
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
self.messageLabel.text = #"Test !!";
if (imageDataSampleBuffer != NULL)
{
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *photo = [[UIImage alloc] initWithData:imageData];
self.vImage.image = photo;
}
}];
result = #"OK";
}
}
}
}
return result;
}
I make a Photo Capture and Video Record customise view (2 features in one view).
Init view:
_session = [[AVCaptureSession alloc] init];
_session.sessionPreset = AVCaptureSessionPreset640x480;
_captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_session];
_captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
_captureVideoPreviewLayer.frame = _previewView.layer.bounds; // parent of layer
[_previewView.layer addSublayer:_captureVideoPreviewLayer];
AVCaptureDevice *videoDevice = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo][0];
if ([videoDevice isFlashAvailable] && videoDevice.flashActive && [videoDevice lockForConfiguration:nil]) {
videoDevice.flashMode = AVCaptureFlashModeOff;
[videoDevice unlockForConfiguration];
}
NSError * error = nil;
AVCaptureDeviceInput * input = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (!input) {
if (_delegate && [_delegate respondsToSelector:#selector(customCameraViewDidLoadFailed)]) {
[_delegate customCameraViewDidLoadFailed];
}
}
if ([_session canAddInput:input]) {
[_session addInput:input];
}
AVCaptureDevice *audioDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
if (!audioDeviceInput) {
if (_delegate && [_delegate respondsToSelector:#selector(customCameraViewDidLoadFailed)]) {
[_delegate customCameraViewDidLoadFailed];
}
}
if ([_session canAddInput:audioDeviceInput]) {
[_session addInput:audioDeviceInput];
}
_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary * outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
[_stillImageOutput setOutputSettings:outputSettings];
if ([_session canAddOutput:_stillImageOutput]) {
[_session addOutput:_stillImageOutput];
}
_movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
if ([_session canAddOutput:_movieFileOutput])
{
[_session addOutput:_movieFileOutput];
AVCaptureConnection *connection = [_movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if ([connection isVideoStabilizationSupported]) {
[connection setEnablesVideoStabilizationWhenAvailable:YES];
}
}
[_session startRunning];
In Capture method:
_session.sessionPreset = AVCaptureSessionPreset640x480;
_isCapturing = YES;
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in _stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
[_loadingView startAnimating];
[_stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage * capturedImage = [[UIImage alloc]initWithData:imageData scale:1];
_isCapturing = NO;
imageData = nil;
[_loadingView stopAnimating];
if (_delegate && [_delegate respondsToSelector:#selector(customCameraView:didFinishCaptureImage:)]) {
[_delegate customCameraView:self didFinishCaptureImage:capturedImage];
}
}];
I'm facing a problem that, the first photo/video when I capture, it's always BLACK photo, video.
Can you help me to solve this problem.
Thanks a lot!
im looking for some help with AVFoundation. Currently i have following a step by step guide on how to capture a still image from this topic: How to save photos taken using AVFoundation to Photo Album?
My question is, how can i lower the quality of my saved image as well as using the AVCaptureSessionPreset640x480. I would like to half the image quality, or if there is another way to make the saved image as small a file as possible - possibly 320x280 - then that could be better than adjusting the actual quality.
I dont know if anyone else has asked this before or not but i have been searching the web for the past couple of days and cannot find an answer. Below is my code.
`
#import "ViewController.h"
#import <ImageIO/ImageIO.h>
#interface ViewController ()
#end
#implementation ViewController
#synthesize imagePreview;
#synthesize iImage;
#synthesize stillImageOutput;
-(IBAction)captureNow {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection)
{
break;
}
}
NSLog(#"about to request a capture from: %#", stillImageOutput);
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(#"attachements: %#", exifAttachments);
} else {
NSLog(#"no attachments");
}
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
//NSData *data2 = [NSData dataWithData:UIImageJPEGRepresentation(image, 0.5f)]];
UIImage *image = [[UIImage alloc] initWithData:imageData];
self.iImage.image = image;
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil);
}];
}
-(void)viewDidAppear:(BOOL)animated
{
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPreset640x480;
CALayer *viewLayer = self.imagePreview.layer;
NSLog(#"viewLayer = %#", viewLayer);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = self.imagePreview.bounds;
[self.imagePreview.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
[session startRunning];
}
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
#end
`
Try using different Preset that will give you different resolution image.
As per the Apple Documentation, for iPhone4(Back) you will get the following resolution image for the following Preset for that session.
AVCaptureSessionPresetHigh : 1280x720
AVCaptureSessionPresetMedium : 480x360
AVCaptureSessionPresetLow : 192x144
AVCaptureSessionPreset640x480 : 640x480
AVCaptureSessionPreset1280x720 : 1280x720
AVCaptureSessionPresetPhoto : 2592x1936.This is not supported for video output
Hope this will help.
You need to set kCVPixelBufferWidthKey and kCVPixelBufferHeightKey options on AVCaptureStillImageOutput object to set resolution of your choice. This width/height will override session preset width/height. Minimal sample as below (add error check).
_session = [[AVCaptureSession alloc] init];
_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError * error;
_sessionInput = [AVCaptureDeviceInput deviceInputWithDevice:_device error:&error];
_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:320.0], (id)kCVPixelBufferWidthKey,
[NSNumber numberWithDouble:280.0], (id)kCVPixelBufferHeightKey,
[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], (id)kCVPixelBufferPixelFormatTypeKey,
nil];
[_stillImageOutput setOutputSettings:options];
[_session beginConfiguration ];
[_session addInput:_sessionInput];
[_session addOutput:_stillImageOutput];
[_session setSessionPreset:AVCaptureSessionPresetPhoto];
_avConnection = [_stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
[ _session commitConfiguration ];
.............
- (void) start
{
[self.session startRunning];
}
.............
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:self.avConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(imageSampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
NSLog(#"%d : %d", height, width);
}];
Note: I have only tried this on an mac. Ideally it should work for iOS too. Also try maintaining some aspect ratio.
Following code doesn't work. Whats wrong?
AVCaptureDevice * videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput * videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
AVCaptureSession * captureSession = [[AVCaptureSession alloc] init];
captureSession.sessionPreset = AVCaptureSessionPresetMedium;
if (![captureSession canAddInput:videoInput])
NSLog(#"Can't add input");
[captureSession addInput:videoInput];
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
[self.stillImageOutput setOutputSettings:#{AVVideoCodecKey:AVVideoCodecJPEG}];
if (![captureSession canAddOutput:videoInput])
NSLog(#"Can't add output");
[captureSession addOutput:self.stillImageOutput];
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput.connections lastObject]
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error)
{
NSLog(#"!!!");
if (imageDataSampleBuffer == NULL)
{
NSLog(#"%#", error);
return;
}
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
self.imageView.image = image;
}];
// Creating preview layer
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.previewLayer.frame = self.view.layer.bounds;
[self.view.layer addSublayer:self.previewLayer];
[captureSession startRunning];
AVCaptureVideoPreviewLayer works nice, but AVCaptureStillImageOutput does not call completion handler at all...
You need to set up & start your session in one method,
then have a separate capture method :
/////////////////////////////////////////////////
////
//// Utility to find front camera
////
/////////////////////////////////////////////////
-(AVCaptureDevice *) frontFacingCameraIfAvailable{
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices){
if (device.position == AVCaptureDevicePositionFront){
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if (!captureDevice){
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
/////////////////////////////////////////////////
////
//// Setup Session, attach Video Preview Layer
//// and Capture Device, start running session
////
/////////////////////////////////////////////////
-(void) setupCaptureSession {
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[self.view.layer addSublayer:captureVideoPreviewLayer];
NSError *error = nil;
AVCaptureDevice *device = [self frontFacingCameraIfAvailable];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:outputSettings];
[session addOutput:self.stillImageOutput];
[session startRunning];
}
/////////////////////////////////////////////////
////
//// Method to capture Still Image from
//// Video Preview Layer
////
/////////////////////////////////////////////////
-(void) captureNow {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in self.stillImageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo] ) {
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(#"about to request a capture from: %#", self.stillImageOutput);
__weak typeof(self) weakSelf = self;
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
[weakSelf displayImage:image];
}];
}
This works well:
- (void)viewDidLoad
{
[super viewDidLoad];
AVCaptureDevice * videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput * videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
AVCaptureSession * captureSession = [[AVCaptureSession alloc] init];
captureSession.sessionPreset = AVCaptureSessionPresetMedium;
[captureSession addInput:videoInput];
[captureSession addOutput:self.stillImageOutput];
[captureSession startRunning];
// Creating preview layer
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.previewLayer.frame = self.view.layer.bounds;
[self.view.layer insertSublayer:self.previewLayer atIndex:0];
}
- (void)timerFired:(NSTimer *)timer
{
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput.connections lastObject]
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error)
{
NSLog(#"!!!");
if (imageDataSampleBuffer == NULL)
NSLog(#"%#", error);
NSData * imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage * image = [[UIImage alloc] initWithData:imageData];
self.imageView.image = image;
}];
}
- (void)viewDidAppear:(BOOL)animated
{
[super viewDidAppear:animated];
[NSTimer scheduledTimerWithTimeInterval:0.5 target:self selector:#selector(timerFired:) userInfo:nil repeats:YES];
}