I am trying to align a UIButton within my ViewController. I can't tell what the specific name is of my UIButton so I think that's why I'm having a hard time moving it as a subview. Here is my code:
-(void)viewDidAppear:(BOOL)animated
{
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
CALayer *viewLayer = self.vImagePreview.layer;
NSLog(#"viewLayer = %#", viewLayer);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = self.vImagePreview.bounds;
[self.vImagePreview.layer addSublayer:captureVideoPreviewLayer];
//Modify to fit screen
CGAffineTransform translate = CGAffineTransformMakeTranslation(0.0, 0.0);
CGAffineTransform scale = CGAffineTransformMakeScale(1.333333, 1.333333);
CGAffineTransform rotate = CGAffineTransformMakeRotation(DEGREES_RADIANS(180));
CGAffineTransform transform = CGAffineTransformConcat(translate, scale);
transform = CGAffineTransformConcat(transform, rotate);
self.vImagePreview.transform = transform;
[self.vImagePreview sendSubviewToFront:UIButton];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[_stillImageOutput setOutputSettings:outputSettings];
[session addOutput:_stillImageOutput];
[session startRunning];
}
-(IBAction)captureNow {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in _stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection)
{
break;
}
}
NSLog(#"about to request a capture from: %#", _stillImageOutput);
[_stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(#"attachements: %#", exifAttachments);
} else {
NSLog(#"no attachments");
}
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
self.vImage.image = image;
//Saved Photo modifications go here
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil);
}];
}
#end
and my headers, just in case:
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import <AVKit/AVKit.h>
#import <ImageIO/CGImageProperties.h>
#define DEGREES_RADIANS(angle) ((angle) / 180.0 * M_PI)
#interface ViewController : UIViewController
#property(nonatomic, retain) IBOutlet UIView *vImagePreview;
#property(nonatomic, retain) AVCaptureStillImageOutput *stillImageOutput;
#property(nonatomic, retain) IBOutlet UIImageView *vImage;
#end
Some clarification:
My button is on my storyboard. The button should trigger the CaptureNow method, taking a picture. I just can't figure out how to get it to appear over the preview layer.
Since your UIbutton is on storyboard, so instead of using
[self.vImagePreview sendSubviewToFront:UIButton];
use this line to display UIBUtton on top.
[self.view bringSubviewToFront:UIButton];
Just before start session running at the end of the emethod.
Related
I've set up a custom camera view that has overlay buttons on the view. Everything works as it should. The camera takes the picture, and the picture shows up in a UIImageView. How do I save this picture once it's in the UIImageView?
Code below:
ViewController.h
#interface ViewController : UIViewController <UIImagePickerControllerDelegate, UINavigationControllerDelegate> {
}
#property (strong, nonatomic) IBOutlet UIView *frameForCapture;
#property (strong, nonatomic) IBOutlet UIImageView *imageView;
- (IBAction)takePhoto:(id)sender;
ViewController.m
#implementation ViewController
AVCaptureSession *session;
AVCaptureStillImageOutput *stillImageOutput;
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
}
-(void)viewWillAppear:(BOOL)animated{
session = [[AVCaptureSession alloc]init];
[session setSessionPreset:AVCaptureSessionPresetPhoto];
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error;
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if ([session canAddInput:deviceInput]){
[session addInput:deviceInput];
}
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:session];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
CALayer *rootLayer = [[self view]layer];
[rootLayer setMasksToBounds:YES];
CGRect frame = self.frameForCapture.frame;
[previewLayer setFrame:frame];
[rootLayer insertSublayer:previewLayer atIndex:0];
stillImageOutput = [[AVCaptureStillImageOutput alloc]init];
NSDictionary *outputSettings = [[NSDictionary alloc]initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
[session startRunning];
}
- (IBAction)takePhoto:(id)sender {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections){
for (AVCaptureInputPort *port in [connection inputPorts]){
if([[port mediaType] isEqual:AVMediaTypeVideo]){
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer != NULL) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [UIImage imageWithData: imageData];
self.imageView.image = image;
}
}];
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
#end
You can use this function to save image in device
UIImageWriteToSavedPhotosAlbum(UIImage *image, id completionTarget, SEL completionSelector, void *contextInfo);
You only need completionTarget, completionSelector and contextInfo if you want to be notified when the image is done saving, otherwise you can pass in nil.
im looking for some help with AVFoundation. Currently i have following a step by step guide on how to capture a still image from this topic: How to save photos taken using AVFoundation to Photo Album?
My question is, how can i lower the quality of my saved image as well as using the AVCaptureSessionPreset640x480. I would like to half the image quality, or if there is another way to make the saved image as small a file as possible - possibly 320x280 - then that could be better than adjusting the actual quality.
I dont know if anyone else has asked this before or not but i have been searching the web for the past couple of days and cannot find an answer. Below is my code.
`
#import "ViewController.h"
#import <ImageIO/ImageIO.h>
#interface ViewController ()
#end
#implementation ViewController
#synthesize imagePreview;
#synthesize iImage;
#synthesize stillImageOutput;
-(IBAction)captureNow {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection)
{
break;
}
}
NSLog(#"about to request a capture from: %#", stillImageOutput);
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(#"attachements: %#", exifAttachments);
} else {
NSLog(#"no attachments");
}
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
//NSData *data2 = [NSData dataWithData:UIImageJPEGRepresentation(image, 0.5f)]];
UIImage *image = [[UIImage alloc] initWithData:imageData];
self.iImage.image = image;
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil);
}];
}
-(void)viewDidAppear:(BOOL)animated
{
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPreset640x480;
CALayer *viewLayer = self.imagePreview.layer;
NSLog(#"viewLayer = %#", viewLayer);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = self.imagePreview.bounds;
[self.imagePreview.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
[session startRunning];
}
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
#end
`
Try using different Preset that will give you different resolution image.
As per the Apple Documentation, for iPhone4(Back) you will get the following resolution image for the following Preset for that session.
AVCaptureSessionPresetHigh : 1280x720
AVCaptureSessionPresetMedium : 480x360
AVCaptureSessionPresetLow : 192x144
AVCaptureSessionPreset640x480 : 640x480
AVCaptureSessionPreset1280x720 : 1280x720
AVCaptureSessionPresetPhoto : 2592x1936.This is not supported for video output
Hope this will help.
You need to set kCVPixelBufferWidthKey and kCVPixelBufferHeightKey options on AVCaptureStillImageOutput object to set resolution of your choice. This width/height will override session preset width/height. Minimal sample as below (add error check).
_session = [[AVCaptureSession alloc] init];
_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError * error;
_sessionInput = [AVCaptureDeviceInput deviceInputWithDevice:_device error:&error];
_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:320.0], (id)kCVPixelBufferWidthKey,
[NSNumber numberWithDouble:280.0], (id)kCVPixelBufferHeightKey,
[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], (id)kCVPixelBufferPixelFormatTypeKey,
nil];
[_stillImageOutput setOutputSettings:options];
[_session beginConfiguration ];
[_session addInput:_sessionInput];
[_session addOutput:_stillImageOutput];
[_session setSessionPreset:AVCaptureSessionPresetPhoto];
_avConnection = [_stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
[ _session commitConfiguration ];
.............
- (void) start
{
[self.session startRunning];
}
.............
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:self.avConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(imageSampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
NSLog(#"%d : %d", height, width);
}];
Note: I have only tried this on an mac. Ideally it should work for iOS too. Also try maintaining some aspect ratio.
I am trying to create a simple camera app with an overlay view that shows two buttons and a gesture recognizer.
Currently I have an 'OverlayViewController' with a .xib file which contains a UIView with two buttons and a gesture recognizer.
In my CameraViewController in which I show the camera on viewDidLoad through the UIImagePicker, I try to assign this picker my overlay but so far im not getting any of the buttons in the overlay....
OverlayViewController.h
#interface OverlayViewController : UIViewController <UIImagePickerControllerDelegate, UINavigationControllerDelegate, UIGestureRecognizerDelegate>
- (IBAction)take_picture:(UIButton *)sender;
- (IBAction)choose_picture:(UIButton *)sender;
#end
OverlayViewController.m
-pretty much empty (do I need to initialize the UIButtons here ??)
CameraViewController.h
#interface ProfileViewController : UIViewController <UIImagePickerControllerDelegate, UINavigationControllerDelegate, UIGestureRecognizerDelegate>
#property (strong, nonatomic) UIImagePickerController *picker;
#property (strong, nonatomic) OverlayViewController *overlay;
CameraViewController.m
-(void)setupCamera
{
self.picker = [[UIImagePickerController alloc] init];
self.picker.sourceType = UIImagePickerControllerSourceTypeCamera;
self.picker.cameraCaptureMode = UIImagePickerControllerCameraCaptureModePhoto;
self.overlay = [[OverlayViewController alloc] initWithNibName:#"OverlayViewController" bundle:nil];
// _overlay.delegate = self;
// NSArray* nibViews= [[NSBundle mainBundle] loadNibNamed:#"OverlayViewController" owner:self.overlay options:nil];
// UIView* uiView= [nibViews objectAtIndex:0];
// uiView.opaque= NO;
// uiView.backgroundColor= [UIColor clearColor];
self.picker.cameraOverlayView = self.overlay.view;
self.picker.delegate = self.overlay;
self.picker.showsCameraControls = NO;
self.picker.navigationBarHidden = YES;
self.picker.toolbarHidden = YES;
self.picker.wantsFullScreenLayout = YES;
[self presentViewController:self.picker animated:NO completion:nil];
}
Im also wondering which controller should be the delegate for these invisible buttons...
thanks for the help in advance!
There is another alternative AVCaptureSession Using it you can do your things..
put this code in viewDidAppear
//----- SHOW LIVE CAMERA PREVIEW -----
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPreset1920x1080;
CALayer *viewLayer = self.overlay.layer;
NSLog(#"viewLayer = %#", viewLayer);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[[captureVideoPreviewLayer connection] setVideoOrientation:AVCaptureVideoOrientationLandscapeLeft];
captureVideoPreviewLayer.frame = self.overlay.bounds;
[self.overlay.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
[session startRunning];
and for capture image
-(UIImage *) captureNow
{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(#"about to request a capture from: %#", stillImageOutput);
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection
completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(#"attachements: %#", exifAttachments);
}
else
NSLog(#"no attachments");
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
return image;
}];
}
here is reference link
I'm trying to get my app to create a UIImage the correct way round.
Most of my code is taken from Apple examples...
#interface CameraManager () <AVCaptureVideoDataOutputSampleBufferDelegate>
#property (nonatomic, strong) CIContext *context;
#property (nonatomic, strong) AVCaptureDevice *rearCamera;
#end
#implementation CameraManager
- (id)init {
if ((self = [super init])) {
self.context = [CIContext contextWithOptions:nil];
[self setupCamera];
[self addStillImageOutput];
}
return self;
}
- (void)setupCamera
{
self.session = [[AVCaptureSession alloc] init];
[self.session beginConfiguration];
[self.session setSessionPreset:AVCaptureSessionPresetPhoto];
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
self.rearCamera = nil;
for (AVCaptureDevice *device in devices) {
if (device.position == AVCaptureDevicePositionBack) {
self.rearCamera = device;
break;
}
}
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:self.rearCamera error:&error];
[self.session addInput:input];
AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init];
[dataOutput setAlwaysDiscardsLateVideoFrames:YES];
NSDictionary *options = #{(id)kCVPixelBufferPixelFormatTypeKey : #(kCVPixelFormatType_32BGRA)};
[dataOutput setVideoSettings:options];
[dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
[self.session addOutput:dataOutput];
[self.session commitConfiguration];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
// grab the pixel buffer
CVPixelBufferRef pixelBuffer = (CVPixelBufferRef) CMSampleBufferGetImageBuffer(sampleBuffer);
// create a CIImage from it, rotate it and zero the origin
CIImage *image = [CIImage imageWithCVPixelBuffer:pixelBuffer];
if ([[UIApplication sharedApplication] statusBarOrientation] == UIInterfaceOrientationLandscapeLeft) {
image = [image imageByApplyingTransform:CGAffineTransformMakeRotation(M_PI)];
}
CGPoint origin = [image extent].origin;
image = [image imageByApplyingTransform:CGAffineTransformMakeTranslation(-origin.x, -origin.y)];
// set it as the contents of the UIImageView
CGImageRef cgImage = [self.context createCGImage:image fromRect:[image extent]];
UIImage *uiImage = [UIImage imageWithCGImage:cgImage];
[[NSNotificationCenter defaultCenter] postNotificationName:#"image" object:uiImage];
CGImageRelease(cgImage);
}
- (void)addStillImageOutput
{
[self setStillImageOutput:[[AVCaptureStillImageOutput alloc] init]];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG,AVVideoCodecKey,nil];
[[self stillImageOutput] setOutputSettings:outputSettings];
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in [self.stillImageOutput connections]) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo] ) {
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
[[self session] addOutput:[self stillImageOutput]];
}
- (void)captureStillImage
{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in [[self stillImageOutput] connections]) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
NSLog(#"about to request a capture from: %#", [self stillImageOutput]);
[[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:videoConnection
completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
CFDictionaryRef exifAttachments = CMGetAttachment(imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments) {
NSLog(#"attachements: %#", exifAttachments);
} else {
NSLog(#"no attachments");
}
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
[[NSNotificationCenter defaultCenter] postNotificationName:kImageCapturedSuccessfully object:image];
}];
}
This is my camera manager class code.
I am displaying the preview of the camera using the OutputSampleBufferDelegate (for various reasons).
I'm using the session output to "take a photo".
The method captureStillImage is the bit I'm trying to fix.
The photos are taken with the device in LandscapeLeft orientation (the interface is also LandscapeLeft).
The previews all show the correct way around and the exif data shows the width and height the correct way around too. (X = 3264, Y = 2448).
But when I display the UIImage it is rotated 90 degrees counter clockwise. The aspect ratio of the image is correct (i.e. everything looks fine, circles are still circles) just the rotation.
I have found several categories that claim to fix this.
I have also found several StackOverflow questions with answers that also claim to fix it.
None of these worked.
Does anyone know how to rotate this thing the right way around?
Adding the following code before you call captureStillImageAsynchronouslyFromConnection is what I do usually:
if ([videoConnection isVideoOrientationSupported]) {
[videoConnection setVideoOrientation:[UIDevice currentDevice].orientation];
}
Maybe you should try setting image orientation after receiving image data in captureStillImageAsynchronouslyFromConnection completion block:
UIImage *image = [[UIImage alloc] initWithData:imageData];
image = [[UIImage alloc] initWithCGImage:image.CGImage scale:1.0f orientation:UIImageOrientationDown];
Orientation issue is with the front camera, so check device type and generate new image, it will definitely solve the orientation issue:
-(void)capture:(void(^)(UIImage *))handler{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in self.stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
if (imageSampleBuffer != NULL) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
**UIImage *capturedImage = [UIImage imageWithData:imageData];
if (self.captureDevice == [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo][1]) {
capturedImage = [[UIImage alloc] initWithCGImage:capturedImage.CGImage scale:1.0f orientation:UIImageOrientationLeftMirrored];
}**
handler(capturedImage);
}
}];
}
Following code doesn't work. Whats wrong?
AVCaptureDevice * videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput * videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
AVCaptureSession * captureSession = [[AVCaptureSession alloc] init];
captureSession.sessionPreset = AVCaptureSessionPresetMedium;
if (![captureSession canAddInput:videoInput])
NSLog(#"Can't add input");
[captureSession addInput:videoInput];
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
[self.stillImageOutput setOutputSettings:#{AVVideoCodecKey:AVVideoCodecJPEG}];
if (![captureSession canAddOutput:videoInput])
NSLog(#"Can't add output");
[captureSession addOutput:self.stillImageOutput];
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput.connections lastObject]
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error)
{
NSLog(#"!!!");
if (imageDataSampleBuffer == NULL)
{
NSLog(#"%#", error);
return;
}
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
self.imageView.image = image;
}];
// Creating preview layer
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.previewLayer.frame = self.view.layer.bounds;
[self.view.layer addSublayer:self.previewLayer];
[captureSession startRunning];
AVCaptureVideoPreviewLayer works nice, but AVCaptureStillImageOutput does not call completion handler at all...
You need to set up & start your session in one method,
then have a separate capture method :
/////////////////////////////////////////////////
////
//// Utility to find front camera
////
/////////////////////////////////////////////////
-(AVCaptureDevice *) frontFacingCameraIfAvailable{
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices){
if (device.position == AVCaptureDevicePositionFront){
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if (!captureDevice){
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
/////////////////////////////////////////////////
////
//// Setup Session, attach Video Preview Layer
//// and Capture Device, start running session
////
/////////////////////////////////////////////////
-(void) setupCaptureSession {
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[self.view.layer addSublayer:captureVideoPreviewLayer];
NSError *error = nil;
AVCaptureDevice *device = [self frontFacingCameraIfAvailable];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:outputSettings];
[session addOutput:self.stillImageOutput];
[session startRunning];
}
/////////////////////////////////////////////////
////
//// Method to capture Still Image from
//// Video Preview Layer
////
/////////////////////////////////////////////////
-(void) captureNow {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in self.stillImageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo] ) {
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(#"about to request a capture from: %#", self.stillImageOutput);
__weak typeof(self) weakSelf = self;
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
[weakSelf displayImage:image];
}];
}
This works well:
- (void)viewDidLoad
{
[super viewDidLoad];
AVCaptureDevice * videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput * videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
AVCaptureSession * captureSession = [[AVCaptureSession alloc] init];
captureSession.sessionPreset = AVCaptureSessionPresetMedium;
[captureSession addInput:videoInput];
[captureSession addOutput:self.stillImageOutput];
[captureSession startRunning];
// Creating preview layer
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.previewLayer.frame = self.view.layer.bounds;
[self.view.layer insertSublayer:self.previewLayer atIndex:0];
}
- (void)timerFired:(NSTimer *)timer
{
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput.connections lastObject]
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error)
{
NSLog(#"!!!");
if (imageDataSampleBuffer == NULL)
NSLog(#"%#", error);
NSData * imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage * image = [[UIImage alloc] initWithData:imageData];
self.imageView.image = image;
}];
}
- (void)viewDidAppear:(BOOL)animated
{
[super viewDidAppear:animated];
[NSTimer scheduledTimerWithTimeInterval:0.5 target:self selector:#selector(timerFired:) userInfo:nil repeats:YES];
}