I am doing one application. In that I want to capture the live photo and show it to user for checking. I captured the live photo using below code. I got the output url once capture the live photo and now I want to show that live photo in PHLivePhotoView, how we can do that using output url. And I am getting the live photo data also, see the delegate methods below.
- (void)viewDidLoad {
[super viewDidLoad];
//Capture Session
AVCaptureSession *session = [[AVCaptureSession alloc]init];
session.sessionPreset = AVCaptureSessionPresetPhoto;
//Add device
AVCaptureDevice *device =
[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//Input
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];
if (!input)
{
NSLog(#"No Input");
}
[session addInput:input];
//Output
// AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
// [session addOutput:output];
// output.videoSettings =
// #{ (NSString *)kCVPixelBufferPixelFormatTypeKey : #(kCVPixelFormatType_32BGRA) };
AVCapturePhotoOutput *output =[[AVCapturePhotoOutput alloc]init];
[session addOutput:output];
output.livePhotoCaptureEnabled = true;
output.highResolutionCaptureEnabled = YES;
//Preview Layer
previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
UIView *myView = self.previewView;
previewLayer.frame = myView.bounds;
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.previewView.layer addSublayer:previewLayer];
//Start capture session
[session startRunning];
[session commitConfiguration];
captureOutput = [[AVCapturePhotoOutput alloc]init];
captureOutput = output;
// Do any additional setup after loading the view, typically from a nib.
}
- (IBAction)captureImage:(id)sender
{
AVCapturePhotoSettings * settings = [AVCapturePhotoSettings photoSettings];
settings.highResolutionPhotoEnabled = YES;
settings.flashMode = AVCaptureFlashModeOn;
NSString *livePhotoMovieFileName = [NSUUID UUID].UUIDString;
NSString *livePhotoMovieFilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[livePhotoMovieFileName stringByAppendingPathExtension:#"mov"]];
settings.livePhotoMovieFileURL = [NSURL fileURLWithPath:livePhotoMovieFilePath];
[captureOutput capturePhotoWithSettings:settings delegate:self];
}
- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput didFinishProcessingLivePhotoToMovieFileAtURL:(NSURL *)outputFileURL duration:(CMTime)duration photoDisplayTime:(CMTime)photoDisplayTime resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings error:(nullable NSError *)error
{
NSLog(#"%#",outputFileURL);
}
- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput didFinishProcessingPhotoSampleBuffer:(nullable CMSampleBufferRef)photoSampleBuffer previewPhotoSampleBuffer:(nullable CMSampleBufferRef)previewPhotoSampleBuffer resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings bracketSettings:(nullable AVCaptureBracketedStillImageSettings *)bracketSettings error:(nullable NSError *)error
{
photoData = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer:photoSampleBuffer previewPhotoSampleBuffer:previewPhotoSampleBuffer];
NSLog(#"%#",photoData);
}
you can use
PHLivePhoto.requestLivePhotoWithResourceFileURLs
or u can use webview too
Related
This question already has an answer here:
What is "error in __connection_block_invoke_2: Connection interrupted" in iOS?
(1 answer)
Closed 7 years ago.
Xcode/iOS 8/AVFoundation related error in console:
error in __connection_block_invoke_2: Connection interrupted
I am just adding AVCaptureVideoDataOutput to Apple's sample app 'AVCamManualUsingtheManualCaptureAPI'
What I added was:
// CoreImage wants BGRA pixel format
NSDictionary *outputSettings = #{ (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInteger:kCVPixelFormatType_32BGRA]};
// create and configure video data output
AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
videoDataOutput.videoSettings = outputSettings;
videoDataOutput.alwaysDiscardsLateVideoFrames = YES;
[videoDataOutput setSampleBufferDelegate:self queue:sessionQueue];
Above snippet inserted in to sample project:
- (void)viewDidLoad
{
[super viewDidLoad];
self.view.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
self.recordButton.layer.cornerRadius = self.stillButton.layer.cornerRadius = self.cameraButton.layer.cornerRadius = 4;
self.recordButton.clipsToBounds = self.stillButton.clipsToBounds = self.cameraButton.clipsToBounds = YES;
// Create the AVCaptureSession
AVCaptureSession *session = [[AVCaptureSession alloc] init];
[self setSession:session];
// Set up preview
[[self previewView] setSession:session];
// Check for device authorization
[self checkDeviceAuthorizationStatus];
// In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time.
// Why not do all of this on the main queue?
// -[AVCaptureSession startRunning] is a blocking call which can take a long time. We dispatch session setup to the sessionQueue so that the main queue isn't blocked (which keeps the UI responsive).
dispatch_queue_t sessionQueue = dispatch_queue_create("session queue", DISPATCH_QUEUE_SERIAL);
[self setSessionQueue:sessionQueue];
dispatch_async(sessionQueue, ^{
[self setBackgroundRecordingID:UIBackgroundTaskInvalid];
NSError *error = nil;
AVCaptureDevice *videoDevice = [AAPLCameraViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack];
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (error)
{
NSLog(#"%#", error);
}
[[self session] beginConfiguration];
if ([session canAddInput:videoDeviceInput])
{
[session addInput:videoDeviceInput];
[self setVideoDeviceInput:videoDeviceInput];
[self setVideoDevice:videoDeviceInput.device];
dispatch_async(dispatch_get_main_queue(), ^{
// Why are we dispatching this to the main queue?
// Because AVCaptureVideoPreviewLayer is the backing layer for our preview view and UIView can only be manipulated on main thread.
// Note: As an exception to the above rule, it is not necessary to serialize video orientation changes on the AVCaptureVideoPreviewLayer’s connection with other session manipulation.
[[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] setVideoOrientation:(AVCaptureVideoOrientation)[self interfaceOrientation]];
});
}
AVCaptureDevice *audioDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
if (error)
{
NSLog(#"%#", error);
}
if ([session canAddInput:audioDeviceInput])
{
[session addInput:audioDeviceInput];
}
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
if ([session canAddOutput:movieFileOutput])
{
[session addOutput:movieFileOutput];
AVCaptureConnection *connection = [movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if ([connection isVideoStabilizationSupported])
{
[connection setEnablesVideoStabilizationWhenAvailable:YES];
}
[self setMovieFileOutput:movieFileOutput];
}
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ([session canAddOutput:stillImageOutput])
{
[stillImageOutput setOutputSettings:#{AVVideoCodecKey : AVVideoCodecJPEG}];
[session addOutput:stillImageOutput];
[self setStillImageOutput:stillImageOutput];
}
// CoreImage wants BGRA pixel format
NSDictionary *outputSettings = #{ (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInteger:kCVPixelFormatType_32BGRA]};
// create and configure video data output
AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
videoDataOutput.videoSettings = outputSettings;
videoDataOutput.alwaysDiscardsLateVideoFrames = YES;
[videoDataOutput setSampleBufferDelegate:self queue:sessionQueue];
[[self session] commitConfiguration];
dispatch_async(dispatch_get_main_queue(), ^{
[self configureManualHUD];
});
});
self.manualHUDFocusView.hidden = YES;
self.manualHUDExposureView.hidden = YES;
self.manualHUDWhiteBalanceView.hidden = YES;
}
I get 'error in __connection_block_invoke_2: Connection interrupted' and also...
-(void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
..delegate never gets called.
All I am trying to do is modify their code to do live Core Image Filters.
So I fixed the issue.
You can't use AVCaptureMovieFileOutput and AVCaptureVideoDataOutput simultaneously. I took out AVCaptureMovieFileOutput, and the AVCaptureVideoDataOutput delegate is called.
I am trying to record video in my iPhone app using AVFoundation.
But whenever I click the Record button app crashes with this message
-[AVCaptureMovieFileOutput startRecordingToOutputFileURL:recordingDelegate:] - no active/enabled
connections.
I know same question asked in SO, but none of its answers helped me.
My problem is the same code works with another application perfectly, and when I try using exactly same code in this app - crashes. But still photo capture is working fine.
Adding my codes here - please help me, Thanks in advance
-(void)viewDidLoad
{
[super viewDidLoad];
self.captureSession = [[AVCaptureSession alloc] init];
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
self.videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
self.audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:nil];
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *stillImageOutputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:
AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:stillImageOutputSettings];
self.movieOutput = [[AVCaptureMovieFileOutput alloc] init];
[self.captureSession addInput:self.videoInput];
[self.captureSession addOutput:self.stillImageOutput];
previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
UIView *aView = self.view;
previewLayer.frame = CGRectMake(70, 190, 270, 270);
[aView.layer addSublayer:previewLayer];
}
-(NSURL *) tempFileURL
{
NSString *outputPath = [[NSString alloc] initWithFormat:#"%#%#", NSTemporaryDirectory(), #"output.mov"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *manager = [[NSFileManager alloc] init];
if ([manager fileExistsAtPath:outputPath])
{
[manager removeItemAtPath:outputPath error:nil];
}
return outputURL;
}
-(IBAction)capture:(id)sender
{
if (self.movieOutput.isRecording == YES)
{
[self.movieOutput stopRecording];
}
else
{
[self.movieOutput startRecordingToOutputFileURL:[self tempFileURL] recordingDelegate:self];
}
}
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error
{
BOOL recordedSuccessfully = YES;
if ([error code] != noErr)
{
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
recordedSuccessfully = [value boolValue];
NSLog(#"A problem occurred while recording: %#", error);
}
if (recordedSuccessfully) {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
completionBlock:^(NSURL *assetURL, NSError *error)
{
UIAlertView *alert;
if (!error)
{
alert = [[UIAlertView alloc] initWithTitle:#"Video Saved"
message:#"The movie was successfully saved to you photos library"
delegate:nil
cancelButtonTitle:#"OK"
otherButtonTitles:nil, nil];
}
else
{
alert = [[UIAlertView alloc] initWithTitle:#"Error Saving Video"
message:#"The movie was not saved to you photos library"
delegate:nil
cancelButtonTitle:#"OK"
otherButtonTitles:nil, nil];
}
[alert show];
}
];
}
}
I had the same problem while changing videoDevice activeFormat and later wanted to record video. Because I was using best quality video I had to set sessionPreset to high, like following
_session.sessionPreset = AVCaptureSessionPresetHigh;
and it worked for me! :)
Because currently you don't have an active connection to record video to file.
Check connection active status before recording to output file:
AVCaptureConnection *c = [self.movieOutput connectionWithMediaType:AVMediaTypeVideo];
if (c.active) {
//connection is active
} else {
//connection is not active
//try to change self.captureSession.sessionPreset,
//or change videoDevice.activeFormat
}
If connection is not active, try to change captureSession.sessionPreset or videoDevice.activeFormat.
Repeat until you have set a valid format (that means c.active == YES). Then you can record video to output file.
Several things are missing in your code :
You forgot to add movieOutput to your captureSession
Same for your audioInput
All your session configuration needs to be encapsulated by [_captureSession beginConfiguration] and [_captureSession commitConfiguration]
For audio recording you need to set the AVAudioSession to the correct category.
Here is your code updated :
- (void)viewDidLoad
{
[super viewDidLoad];
NSError *error = nil;
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryRecord
error:&error];
if(!error)
{
[[AVAudioSession sharedInstance] setActive:YES error:&error];
if(error) NSLog(#"Error while activating AudioSession : %#", error);
}
else
{
NSLog(#"Error while setting category of AudioSession : %#", error);
}
self.captureSession = [[AVCaptureSession alloc] init];
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
self.videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
self.audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:nil];
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *stillImageOutputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:
AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:stillImageOutputSettings];
self.movieOutput = [[AVCaptureMovieFileOutput alloc] init];
[self.captureSession beginConfiguration];
[self.captureSession addInput:self.videoInput];
[self.captureSession addInput:self.audioInput];
[self.captureSession addOutput:self.movieOutput];
[self.captureSession addOutput:self.stillImageOutput];
[self.captureSession commitConfiguration];
AVCaptureVideoPreviewLayer *previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
previewLayer.frame = CGRectMake(0, 0, 320, 500);
[self.view.layer addSublayer:previewLayer];
[self.captureSession startRunning];
}
- (IBAction)toggleRecording:(id)sender
{
if(!self.movieOutput.isRecording)
{
[self.recordButton setTitle:#"Stop" forState:UIControlStateNormal];
NSString *outputPath = [NSTemporaryDirectory() stringByAppendingPathComponent:#"output.mp4"];
NSFileManager *manager = [[NSFileManager alloc] init];
if ([manager fileExistsAtPath:outputPath])
{
[manager removeItemAtPath:outputPath error:nil];
}
[self.movieOutput startRecordingToOutputFileURL:[NSURL fileURLWithPath:outputPath]
recordingDelegate:self];
}
else
{
[self.recordButton setTitle:#"Start recording" forState:UIControlStateNormal];
[self.movieOutput stopRecording];
}
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(#"Did finish recording, error %# | path %# | connections %#", error, [outputFileURL absoluteString], connections);
}
Hope this helps
I find the reason of this error.
check your session's "setSessionPreset" setting,
photo's resolution setting is different from video,
for iPhone5, video resolution of the back camera is 1920*1080, the front camere is 1280*720, and photo's max resolution is 3264*2488,
so if you set error resolution to video, the connect will not be actived.
im looking for some help with AVFoundation. Currently i have following a step by step guide on how to capture a still image from this topic: How to save photos taken using AVFoundation to Photo Album?
My question is, how can i lower the quality of my saved image as well as using the AVCaptureSessionPreset640x480. I would like to half the image quality, or if there is another way to make the saved image as small a file as possible - possibly 320x280 - then that could be better than adjusting the actual quality.
I dont know if anyone else has asked this before or not but i have been searching the web for the past couple of days and cannot find an answer. Below is my code.
`
#import "ViewController.h"
#import <ImageIO/ImageIO.h>
#interface ViewController ()
#end
#implementation ViewController
#synthesize imagePreview;
#synthesize iImage;
#synthesize stillImageOutput;
-(IBAction)captureNow {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection)
{
break;
}
}
NSLog(#"about to request a capture from: %#", stillImageOutput);
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(#"attachements: %#", exifAttachments);
} else {
NSLog(#"no attachments");
}
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
//NSData *data2 = [NSData dataWithData:UIImageJPEGRepresentation(image, 0.5f)]];
UIImage *image = [[UIImage alloc] initWithData:imageData];
self.iImage.image = image;
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil);
}];
}
-(void)viewDidAppear:(BOOL)animated
{
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPreset640x480;
CALayer *viewLayer = self.imagePreview.layer;
NSLog(#"viewLayer = %#", viewLayer);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = self.imagePreview.bounds;
[self.imagePreview.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
[session startRunning];
}
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
#end
`
Try using different Preset that will give you different resolution image.
As per the Apple Documentation, for iPhone4(Back) you will get the following resolution image for the following Preset for that session.
AVCaptureSessionPresetHigh : 1280x720
AVCaptureSessionPresetMedium : 480x360
AVCaptureSessionPresetLow : 192x144
AVCaptureSessionPreset640x480 : 640x480
AVCaptureSessionPreset1280x720 : 1280x720
AVCaptureSessionPresetPhoto : 2592x1936.This is not supported for video output
Hope this will help.
You need to set kCVPixelBufferWidthKey and kCVPixelBufferHeightKey options on AVCaptureStillImageOutput object to set resolution of your choice. This width/height will override session preset width/height. Minimal sample as below (add error check).
_session = [[AVCaptureSession alloc] init];
_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError * error;
_sessionInput = [AVCaptureDeviceInput deviceInputWithDevice:_device error:&error];
_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:320.0], (id)kCVPixelBufferWidthKey,
[NSNumber numberWithDouble:280.0], (id)kCVPixelBufferHeightKey,
[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], (id)kCVPixelBufferPixelFormatTypeKey,
nil];
[_stillImageOutput setOutputSettings:options];
[_session beginConfiguration ];
[_session addInput:_sessionInput];
[_session addOutput:_stillImageOutput];
[_session setSessionPreset:AVCaptureSessionPresetPhoto];
_avConnection = [_stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
[ _session commitConfiguration ];
.............
- (void) start
{
[self.session startRunning];
}
.............
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:self.avConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(imageSampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
NSLog(#"%d : %d", height, width);
}];
Note: I have only tried this on an mac. Ideally it should work for iOS too. Also try maintaining some aspect ratio.
I was trying to get the camera input to show on a preview layer view.
self.cameraPreviewView is tied to a UIView in IB
Here is my current code that I put together from the AV Foundation Programming Guide. But the preview never shows
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetHigh;
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
NSLog(#"Couldn't create video capture device");
}
[session addInput:input];
// Create video preview layer and add it to the UI
AVCaptureVideoPreviewLayer *newCaptureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
UIView *view = self.cameraPreviewView;
CALayer *viewLayer = [view layer];
newCaptureVideoPreviewLayer.frame = view.bounds;
[viewLayer addSublayer:newCaptureVideoPreviewLayer];
self.cameraPreviewLayer = newCaptureVideoPreviewLayer;
[session startRunning];
So after some trial and error and looking at apple's AVCam Sample Code
I wrapped the PreviewLayer code and session startRunning into a grand central dispatch block like so and it started working.
dispatch_async(dispatch_get_main_queue(), ^{
AVCaptureVideoPreviewLayer *newCaptureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
UIView *view = self.cameraPreviewView;
CALayer *viewLayer = [view layer];
newCaptureVideoPreviewLayer.frame = view.bounds;
[viewLayer addSublayer:newCaptureVideoPreviewLayer];
self.cameraPreviewLayer = newCaptureVideoPreviewLayer;
[session startRunning];
});
here is my code, it works perfect for me , you can refer to it
- (void)initCapture
{
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:nil];
if (!captureInput) {
return;
}
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
/* captureOutput:didOutputSampleBuffer:fromConnection delegate method !*/
[captureOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[captureOutput setVideoSettings:videoSettings];
self.captureSession = [[AVCaptureSession alloc] init];
NSString* preset = 0;
if (!preset) {
preset = AVCaptureSessionPresetMedium;
}
self.captureSession.sessionPreset = preset;
if ([self.captureSession canAddInput:captureInput]) {
[self.captureSession addInput:captureInput];
}
if ([self.captureSession canAddOutput:captureOutput]) {
[self.captureSession addOutput:captureOutput];
}
//handle prevLayer
if (!self.captureVideoPreviewLayer) {
self.captureVideoPreviewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
}
//if you want to adjust the previewlayer frame, here!
self.captureVideoPreviewLayer.frame = self.view.bounds;
self.captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer: self.captureVideoPreviewLayer];
[self.captureSession startRunning];
}
Following code doesn't work. Whats wrong?
AVCaptureDevice * videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput * videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
AVCaptureSession * captureSession = [[AVCaptureSession alloc] init];
captureSession.sessionPreset = AVCaptureSessionPresetMedium;
if (![captureSession canAddInput:videoInput])
NSLog(#"Can't add input");
[captureSession addInput:videoInput];
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
[self.stillImageOutput setOutputSettings:#{AVVideoCodecKey:AVVideoCodecJPEG}];
if (![captureSession canAddOutput:videoInput])
NSLog(#"Can't add output");
[captureSession addOutput:self.stillImageOutput];
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput.connections lastObject]
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error)
{
NSLog(#"!!!");
if (imageDataSampleBuffer == NULL)
{
NSLog(#"%#", error);
return;
}
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
self.imageView.image = image;
}];
// Creating preview layer
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.previewLayer.frame = self.view.layer.bounds;
[self.view.layer addSublayer:self.previewLayer];
[captureSession startRunning];
AVCaptureVideoPreviewLayer works nice, but AVCaptureStillImageOutput does not call completion handler at all...
You need to set up & start your session in one method,
then have a separate capture method :
/////////////////////////////////////////////////
////
//// Utility to find front camera
////
/////////////////////////////////////////////////
-(AVCaptureDevice *) frontFacingCameraIfAvailable{
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices){
if (device.position == AVCaptureDevicePositionFront){
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if (!captureDevice){
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
/////////////////////////////////////////////////
////
//// Setup Session, attach Video Preview Layer
//// and Capture Device, start running session
////
/////////////////////////////////////////////////
-(void) setupCaptureSession {
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[self.view.layer addSublayer:captureVideoPreviewLayer];
NSError *error = nil;
AVCaptureDevice *device = [self frontFacingCameraIfAvailable];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:outputSettings];
[session addOutput:self.stillImageOutput];
[session startRunning];
}
/////////////////////////////////////////////////
////
//// Method to capture Still Image from
//// Video Preview Layer
////
/////////////////////////////////////////////////
-(void) captureNow {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in self.stillImageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo] ) {
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(#"about to request a capture from: %#", self.stillImageOutput);
__weak typeof(self) weakSelf = self;
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
[weakSelf displayImage:image];
}];
}
This works well:
- (void)viewDidLoad
{
[super viewDidLoad];
AVCaptureDevice * videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput * videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
AVCaptureSession * captureSession = [[AVCaptureSession alloc] init];
captureSession.sessionPreset = AVCaptureSessionPresetMedium;
[captureSession addInput:videoInput];
[captureSession addOutput:self.stillImageOutput];
[captureSession startRunning];
// Creating preview layer
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.previewLayer.frame = self.view.layer.bounds;
[self.view.layer insertSublayer:self.previewLayer atIndex:0];
}
- (void)timerFired:(NSTimer *)timer
{
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput.connections lastObject]
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error)
{
NSLog(#"!!!");
if (imageDataSampleBuffer == NULL)
NSLog(#"%#", error);
NSData * imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage * image = [[UIImage alloc] initWithData:imageData];
self.imageView.image = image;
}];
}
- (void)viewDidAppear:(BOOL)animated
{
[super viewDidAppear:animated];
[NSTimer scheduledTimerWithTimeInterval:0.5 target:self selector:#selector(timerFired:) userInfo:nil repeats:YES];
}