I'm trying to get my app to create a UIImage the correct way round.
Most of my code is taken from Apple examples...
#interface CameraManager () <AVCaptureVideoDataOutputSampleBufferDelegate>
#property (nonatomic, strong) CIContext *context;
#property (nonatomic, strong) AVCaptureDevice *rearCamera;
#end
#implementation CameraManager
- (id)init {
if ((self = [super init])) {
self.context = [CIContext contextWithOptions:nil];
[self setupCamera];
[self addStillImageOutput];
}
return self;
}
- (void)setupCamera
{
self.session = [[AVCaptureSession alloc] init];
[self.session beginConfiguration];
[self.session setSessionPreset:AVCaptureSessionPresetPhoto];
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
self.rearCamera = nil;
for (AVCaptureDevice *device in devices) {
if (device.position == AVCaptureDevicePositionBack) {
self.rearCamera = device;
break;
}
}
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:self.rearCamera error:&error];
[self.session addInput:input];
AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init];
[dataOutput setAlwaysDiscardsLateVideoFrames:YES];
NSDictionary *options = #{(id)kCVPixelBufferPixelFormatTypeKey : #(kCVPixelFormatType_32BGRA)};
[dataOutput setVideoSettings:options];
[dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
[self.session addOutput:dataOutput];
[self.session commitConfiguration];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
// grab the pixel buffer
CVPixelBufferRef pixelBuffer = (CVPixelBufferRef) CMSampleBufferGetImageBuffer(sampleBuffer);
// create a CIImage from it, rotate it and zero the origin
CIImage *image = [CIImage imageWithCVPixelBuffer:pixelBuffer];
if ([[UIApplication sharedApplication] statusBarOrientation] == UIInterfaceOrientationLandscapeLeft) {
image = [image imageByApplyingTransform:CGAffineTransformMakeRotation(M_PI)];
}
CGPoint origin = [image extent].origin;
image = [image imageByApplyingTransform:CGAffineTransformMakeTranslation(-origin.x, -origin.y)];
// set it as the contents of the UIImageView
CGImageRef cgImage = [self.context createCGImage:image fromRect:[image extent]];
UIImage *uiImage = [UIImage imageWithCGImage:cgImage];
[[NSNotificationCenter defaultCenter] postNotificationName:#"image" object:uiImage];
CGImageRelease(cgImage);
}
- (void)addStillImageOutput
{
[self setStillImageOutput:[[AVCaptureStillImageOutput alloc] init]];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG,AVVideoCodecKey,nil];
[[self stillImageOutput] setOutputSettings:outputSettings];
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in [self.stillImageOutput connections]) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo] ) {
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
[[self session] addOutput:[self stillImageOutput]];
}
- (void)captureStillImage
{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in [[self stillImageOutput] connections]) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
NSLog(#"about to request a capture from: %#", [self stillImageOutput]);
[[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:videoConnection
completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
CFDictionaryRef exifAttachments = CMGetAttachment(imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments) {
NSLog(#"attachements: %#", exifAttachments);
} else {
NSLog(#"no attachments");
}
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
[[NSNotificationCenter defaultCenter] postNotificationName:kImageCapturedSuccessfully object:image];
}];
}
This is my camera manager class code.
I am displaying the preview of the camera using the OutputSampleBufferDelegate (for various reasons).
I'm using the session output to "take a photo".
The method captureStillImage is the bit I'm trying to fix.
The photos are taken with the device in LandscapeLeft orientation (the interface is also LandscapeLeft).
The previews all show the correct way around and the exif data shows the width and height the correct way around too. (X = 3264, Y = 2448).
But when I display the UIImage it is rotated 90 degrees counter clockwise. The aspect ratio of the image is correct (i.e. everything looks fine, circles are still circles) just the rotation.
I have found several categories that claim to fix this.
I have also found several StackOverflow questions with answers that also claim to fix it.
None of these worked.
Does anyone know how to rotate this thing the right way around?
Adding the following code before you call captureStillImageAsynchronouslyFromConnection is what I do usually:
if ([videoConnection isVideoOrientationSupported]) {
[videoConnection setVideoOrientation:[UIDevice currentDevice].orientation];
}
Maybe you should try setting image orientation after receiving image data in captureStillImageAsynchronouslyFromConnection completion block:
UIImage *image = [[UIImage alloc] initWithData:imageData];
image = [[UIImage alloc] initWithCGImage:image.CGImage scale:1.0f orientation:UIImageOrientationDown];
Orientation issue is with the front camera, so check device type and generate new image, it will definitely solve the orientation issue:
-(void)capture:(void(^)(UIImage *))handler{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in self.stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
if (imageSampleBuffer != NULL) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
**UIImage *capturedImage = [UIImage imageWithData:imageData];
if (self.captureDevice == [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo][1]) {
capturedImage = [[UIImage alloc] initWithCGImage:capturedImage.CGImage scale:1.0f orientation:UIImageOrientationLeftMirrored];
}**
handler(capturedImage);
}
}];
}
Related
There seems to be a number of questions about this issue here. Since most of them don’t provide code snippets, I am electing to show all my code. The problem: the picture taken is taller than the preview. My preview is w=288 by h=288; but the picture that is returned is w=2448 by h=3264. Why is the returned image not a square when my preview is a square?
AVCaptureSession *session;
AVCaptureStillImageOutput *stillImageOutput;
-(void)viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
session=[[AVCaptureSession alloc]init];
[session setSessionPreset:AVCaptureSessionPresetPhoto];
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error;
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if ([session canAddInput:deviceInput]) {
[session addInput:deviceInput];
}
AVCaptureVideoPreviewLayer * previewLayout = [[AVCaptureVideoPreviewLayer alloc]initWithSession:session];
[previewLayout setVideoGravity:AVLayerVideoGravityResizeAspectFill];
CALayer *rootLayout =[[self view]layer];
[rootLayout setMasksToBounds:YES];
CGRect frame = self.frameForCapture.frame;
[previewLayout setFrame:frame];
[rootLayout insertSublayer:previewLayout atIndex:0];
stillImageOutput=[[AVCaptureStillImageOutput alloc]init];
NSDictionary *outputSettings =[[NSDictionary alloc]initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
[session startRunning];
}
- (IBAction)takePhoto:(id)sender
{
AVCaptureConnection *videoConnection= nil;
for (AVCaptureConnection *connection in stillImageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
videoConnection=connection;
break;
}
}
if (videoConnection) {
break;
}
}
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer != NULL) {//I.E. it does exist
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *originalImage =[UIImage imageWithData:imageData ];
NSLog(#"Image w = %f; h = %f",originalImage.size.width,originalImage.size.height);
CGSize size = self.frameForCapture.frame.size;
UIGraphicsBeginImageContext(size);
[originalImage drawInRect:CGRectMake(0,0,size.width,size.width)];
UIImage* image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
self.imageView.image=image;
[self.imageView setHidden:NO];
[self.frameForCapture setHidden:YES];
}
}];
}
I have been playing around with CGImageCreateWithImageInRect to get the preview to be what I finally save and use in my ImageView. But so far no luck. One of the places I have been looking is Cropping an UIImage. Any ideas how I might fix this issue?
Hello I am using an avcapturesession in xcode to make a live camera screen so I can take photos (similar to the snapchat setup). The camera is fully functional and I have set it up so I can use the front or back camera. The back camera works fine and I can capture an image and it previews how I need it, however the front camera previews ok but when the image is captured, in the preview it is flipped and I can't see where this is occurring.
Heres my code of for the session:
- (void) initializeCamera {
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetHigh;
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
captureVideoPreviewLayer.frame = self.imagePreview.bounds;
[self.imagePreview.layer addSublayer:captureVideoPreviewLayer];
UIView *view = [self imagePreview];
CALayer *viewLayer = [view layer];
[viewLayer setMasksToBounds:YES];
CGRect bounds = [view bounds];
[captureVideoPreviewLayer setFrame:bounds];
NSArray *devices = [AVCaptureDevice devices];
AVCaptureDevice *frontCamera = nil;
AVCaptureDevice *backCamera = nil;
for (AVCaptureDevice *device in devices) {
NSLog(#"Device name: %#", [device localizedName]);
if ([device hasMediaType:AVMediaTypeVideo]) {
if ([device position] == AVCaptureDevicePositionBack) {
NSLog(#"Device position : back");
backCamera = device;
}
else {
NSLog(#"Device position : front");
frontCamera = device;
}
}
}
if (!FrontCamera) {
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:&error];
if (!input) {
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
}
if (FrontCamera) {
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:frontCamera error:&error];
if (!input) {
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
}
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
[session startRunning];
}
- (void) capImage {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo] ) {
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
NSLog(#"about to request a capture from: %#", stillImageOutput);
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
if (imageSampleBuffer != NULL) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
[self processImage:[UIImage imageWithData:imageData]];
}
}];
}
- (void) processImage:(UIImage *)image {
haveImage = YES;
if([UIDevice currentDevice].userInterfaceIdiom==UIUserInterfaceIdiomPad) { //Device is ipad
UIGraphicsBeginImageContext(CGSizeMake(3072, 4088));
[image drawInRect: CGRectMake(0, 0, 3072, 4088)];
UIImage *smallImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
CGRect cropRect = CGRectMake(0, 0, 3072, 4088);
CGImageRef imageRef = CGImageCreateWithImageInRect([smallImage CGImage], cropRect);
[captureImage setImage:[UIImage imageWithCGImage:imageRef]];
CGImageRelease(imageRef);
}else{ //Device is iphone
UIGraphicsBeginImageContext(CGSizeMake(1280, 2272));
[image drawInRect: CGRectMake(0, 0, 1280, 2272)];
UIImage *smallImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
UIImage * flippedImage = [UIImage imageWithCGImage:smallImage.CGImage scale:smallImage.scale orientation:UIImageOrientationLeftMirrored];
smallImage = flippedImage;
CGRect cropRect = CGRectMake(0, 0, 1280, 2272);
CGImageRef imageRef = CGImageCreateWithImageInRect([smallImage CGImage], cropRect);
[captureImage setImage:[UIImage imageWithCGImage:imageRef]];
CGImageRelease(imageRef);
}
}
I also want to add touch to focus and flash but I don't know where I have to implement the code here is what I have found:
flash-
for the flash all I can find regarding the torch is a toggle. I can't find a way to make it only come on and work like apples camera apps flash.
tap to focus -
ios AVFoundation tap to focus
I believe that is the default behavior for the front-facing camera. Try flipping the output image manually right before it is displayed.
I have a photo taking app that is using AVFoundation. So far everything works perfectly.
However, the one thing that is really confusing me is, what object is the captured image actually contained in?
I have been NSLogging all of the objects and some of their properties and I still can't figure out where the captured image is contained.
Here is my code for setting up the capture session:
self.session =[[AVCaptureSession alloc]init];
[self.session setSessionPreset:AVCaptureSessionPresetPhoto];
self.inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error;
self.deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:self.inputDevice error:&error];
if([self.session canAddInput:self.deviceInput])
[self.session addInput:self.deviceInput];
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:self.session];
self.rootLayer = [[self view]layer];
[self.rootLayer setMasksToBounds:YES];
[self.previewLayer setFrame:CGRectMake(0, 0, self.rootLayer.bounds.size.width, self.rootLayer.bounds.size.height)];
[self.previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
[self.rootLayer insertSublayer:self.previewLayer atIndex:0];
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
[self.session addOutput:self.stillImageOutput];
[self.session startRunning];
}
And then here is my code for capturing a still image when the user presses the capture button:
-(IBAction)stillImageCapture {
AVCaptureConnection *videoConnection = nil;
videoConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
for (AVCaptureConnection *connection in self.stillImageOutput.connections){
for (AVCaptureInputPort *port in [connection inputPorts]){
if ([[port mediaType] isEqual:AVMediaTypeVideo]){
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
[self.session stopRunning];
}
];}
When the user presses the capture button, and the above code executes, the captured image is successfully displayed on the iPhone screen, but I can't figure out which object is actually holding the captured image.
Thanks for the help.
The CMSampleBuffer is what actually contains the image.
In your captureStillImageAsynchronouslyFromConnection completion handler, you'll want something like:
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage* capturedImage = [[UIImage alloc] initWithData:imageData];
My working implementation of it:
- (void)captureStillImage
{
#try {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in _stillImageOutput.connections){
for (AVCaptureInputPort *port in [connection inputPorts]){
if ([[port mediaType] isEqual:AVMediaTypeVideo]){
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
NSLog(#"About to request a capture from: %#", [self stillImageOutput]);
[[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:videoConnection
completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
// This is here for when we need to implement Exif stuff.
//CFDictionaryRef exifAttachments = CMGetAttachment(imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
// Create a UIImage from the sample buffer data
_capturedImage = [[UIImage alloc] initWithData:imageData];
BOOL autoSave = YES;
if (autoSave)
{
UIImageWriteToSavedPhotosAlbum(_capturedImage, self, #selector(image:didFinishSavingWithError:contextInfo:), nil);
}
}];
}
#catch (NSException *exception) {
NSlog(#"ERROR: Unable to capture still image from AVFoundation camera: %#", exception);
}
}
I am programmatically launching camera using AVCaptureVideoPreviewLayer and then using the following code to take picture automatically. Output of the picture is not good resolution and brighter than how we normally use native camera and take pictures. What is the problem here, could someone help?
-(void) capturePicture
{
// Get all cameras in the application and find the frontal camera.
AVCaptureDevice *backCamera;
NSArray *allCameras = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
// Find the back camera.
for ( int i = 0; i < allCameras.count; i++ ) {
AVCaptureDevice *camera = [allCameras objectAtIndex:i];
if ( camera.position == AVCaptureDevicePositionBack ) {
backCamera = camera;
}
}
// If we did not find the camera then do not take picture.
if ( backCamera != nil ) {
// Start the process of getting a picture.
AVCaptureSession *session = [[AVCaptureSession alloc] init];
// Setup instance of input with back camera and add to session.
NSError *error;
AVCaptureDeviceInput *input =
[AVCaptureDeviceInput deviceInputWithDevice:backCamera error:&error];
if ( !error && [session canAddInput:input] ) {
// Add frontal camera to this session.
[session addInput:input];
// We need to capture still image.
AVCaptureStillImageOutput *output = [[AVCaptureStillImageOutput alloc] init];
// Captured image. settings.
[output setOutputSettings:
[[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG,AVVideoCodecKey,nil]];
if ( [session canAddOutput:output] )
{
[session addOutput:output];
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in output.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ( [[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection)
{
break;
}
}
// Finally take the picture
if ( videoConnection )
{
[session startRunning];
[output captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error)
{
if (imageDataSampleBuffer != NULL)
{
NSData *imageData = [AVCaptureStillImageOutput
jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *photo = [[UIImage alloc] initWithData:imageData];
UIImageWriteToSavedPhotosAlbum(photo, nil, nil, nil);
[session stopRunning];
NSInvocation *myInvocation = [NSInvocation invocationWithMethodSignature:[self methodSignatureForSelector:#selector(endScreen)]];
[myInvocation setSelector:#selector(endScreen)];
[myInvocation setTarget:self];
appDelegate.bImageTaken = YES;
UIImageWriteToSavedPhotosAlbum(photo, nil, nil, nil);
[NSTimer scheduledTimerWithTimeInterval:0.5 invocation:myInvocation repeats:NO];
}
}];
}
}
}
}
}
Following code doesn't work. Whats wrong?
AVCaptureDevice * videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput * videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
AVCaptureSession * captureSession = [[AVCaptureSession alloc] init];
captureSession.sessionPreset = AVCaptureSessionPresetMedium;
if (![captureSession canAddInput:videoInput])
NSLog(#"Can't add input");
[captureSession addInput:videoInput];
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
[self.stillImageOutput setOutputSettings:#{AVVideoCodecKey:AVVideoCodecJPEG}];
if (![captureSession canAddOutput:videoInput])
NSLog(#"Can't add output");
[captureSession addOutput:self.stillImageOutput];
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput.connections lastObject]
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error)
{
NSLog(#"!!!");
if (imageDataSampleBuffer == NULL)
{
NSLog(#"%#", error);
return;
}
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
self.imageView.image = image;
}];
// Creating preview layer
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.previewLayer.frame = self.view.layer.bounds;
[self.view.layer addSublayer:self.previewLayer];
[captureSession startRunning];
AVCaptureVideoPreviewLayer works nice, but AVCaptureStillImageOutput does not call completion handler at all...
You need to set up & start your session in one method,
then have a separate capture method :
/////////////////////////////////////////////////
////
//// Utility to find front camera
////
/////////////////////////////////////////////////
-(AVCaptureDevice *) frontFacingCameraIfAvailable{
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices){
if (device.position == AVCaptureDevicePositionFront){
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if (!captureDevice){
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
/////////////////////////////////////////////////
////
//// Setup Session, attach Video Preview Layer
//// and Capture Device, start running session
////
/////////////////////////////////////////////////
-(void) setupCaptureSession {
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[self.view.layer addSublayer:captureVideoPreviewLayer];
NSError *error = nil;
AVCaptureDevice *device = [self frontFacingCameraIfAvailable];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:outputSettings];
[session addOutput:self.stillImageOutput];
[session startRunning];
}
/////////////////////////////////////////////////
////
//// Method to capture Still Image from
//// Video Preview Layer
////
/////////////////////////////////////////////////
-(void) captureNow {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in self.stillImageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo] ) {
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(#"about to request a capture from: %#", self.stillImageOutput);
__weak typeof(self) weakSelf = self;
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
[weakSelf displayImage:image];
}];
}
This works well:
- (void)viewDidLoad
{
[super viewDidLoad];
AVCaptureDevice * videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput * videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
AVCaptureSession * captureSession = [[AVCaptureSession alloc] init];
captureSession.sessionPreset = AVCaptureSessionPresetMedium;
[captureSession addInput:videoInput];
[captureSession addOutput:self.stillImageOutput];
[captureSession startRunning];
// Creating preview layer
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.previewLayer.frame = self.view.layer.bounds;
[self.view.layer insertSublayer:self.previewLayer atIndex:0];
}
- (void)timerFired:(NSTimer *)timer
{
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput.connections lastObject]
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error)
{
NSLog(#"!!!");
if (imageDataSampleBuffer == NULL)
NSLog(#"%#", error);
NSData * imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage * image = [[UIImage alloc] initWithData:imageData];
self.imageView.image = image;
}];
}
- (void)viewDidAppear:(BOOL)animated
{
[super viewDidAppear:animated];
[NSTimer scheduledTimerWithTimeInterval:0.5 target:self selector:#selector(timerFired:) userInfo:nil repeats:YES];
}