Following code doesn't work. Whats wrong?
AVCaptureDevice * videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput * videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
AVCaptureSession * captureSession = [[AVCaptureSession alloc] init];
captureSession.sessionPreset = AVCaptureSessionPresetMedium;
if (![captureSession canAddInput:videoInput])
NSLog(#"Can't add input");
[captureSession addInput:videoInput];
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
[self.stillImageOutput setOutputSettings:#{AVVideoCodecKey:AVVideoCodecJPEG}];
if (![captureSession canAddOutput:videoInput])
NSLog(#"Can't add output");
[captureSession addOutput:self.stillImageOutput];
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput.connections lastObject]
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error)
{
NSLog(#"!!!");
if (imageDataSampleBuffer == NULL)
{
NSLog(#"%#", error);
return;
}
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
self.imageView.image = image;
}];
// Creating preview layer
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.previewLayer.frame = self.view.layer.bounds;
[self.view.layer addSublayer:self.previewLayer];
[captureSession startRunning];
AVCaptureVideoPreviewLayer works nice, but AVCaptureStillImageOutput does not call completion handler at all...
You need to set up & start your session in one method,
then have a separate capture method :
/////////////////////////////////////////////////
////
//// Utility to find front camera
////
/////////////////////////////////////////////////
-(AVCaptureDevice *) frontFacingCameraIfAvailable{
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices){
if (device.position == AVCaptureDevicePositionFront){
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if (!captureDevice){
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
/////////////////////////////////////////////////
////
//// Setup Session, attach Video Preview Layer
//// and Capture Device, start running session
////
/////////////////////////////////////////////////
-(void) setupCaptureSession {
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[self.view.layer addSublayer:captureVideoPreviewLayer];
NSError *error = nil;
AVCaptureDevice *device = [self frontFacingCameraIfAvailable];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:outputSettings];
[session addOutput:self.stillImageOutput];
[session startRunning];
}
/////////////////////////////////////////////////
////
//// Method to capture Still Image from
//// Video Preview Layer
////
/////////////////////////////////////////////////
-(void) captureNow {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in self.stillImageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo] ) {
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(#"about to request a capture from: %#", self.stillImageOutput);
__weak typeof(self) weakSelf = self;
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
[weakSelf displayImage:image];
}];
}
This works well:
- (void)viewDidLoad
{
[super viewDidLoad];
AVCaptureDevice * videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput * videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
AVCaptureSession * captureSession = [[AVCaptureSession alloc] init];
captureSession.sessionPreset = AVCaptureSessionPresetMedium;
[captureSession addInput:videoInput];
[captureSession addOutput:self.stillImageOutput];
[captureSession startRunning];
// Creating preview layer
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.previewLayer.frame = self.view.layer.bounds;
[self.view.layer insertSublayer:self.previewLayer atIndex:0];
}
- (void)timerFired:(NSTimer *)timer
{
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput.connections lastObject]
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error)
{
NSLog(#"!!!");
if (imageDataSampleBuffer == NULL)
NSLog(#"%#", error);
NSData * imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage * image = [[UIImage alloc] initWithData:imageData];
self.imageView.image = image;
}];
}
- (void)viewDidAppear:(BOOL)animated
{
[super viewDidAppear:animated];
[NSTimer scheduledTimerWithTimeInterval:0.5 target:self selector:#selector(timerFired:) userInfo:nil repeats:YES];
}
Related
this bug appears very accidental, I have no idea how to fix, please help me,
main code is here:
if you want to see the complete project, you can check this link
https://github.com/liman123/custom-camera
the bug is : when I take photo, the screen become this pic, this pic is distorted!
- (void)setupConfiguration
{
_captureSession = [[AVCaptureSession alloc]init];
_captureSession.sessionPreset = AVCaptureSessionPresetPhoto;
_capturedImageView = [[UIImageView alloc]init];
_capturedImageView.frame = self.view.frame; // just to even it out
_capturedImageView.backgroundColor = [UIColor clearColor];
_capturedImageView.userInteractionEnabled = YES;
_capturedImageView.contentMode = UIViewContentModeScaleAspectFill;
_captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:_captureSession];
_captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
_captureVideoPreviewLayer.frame = self.view.bounds;
[self.view.layer addSublayer:_captureVideoPreviewLayer];
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
if (devices.count > 0) {
_captureDevice = devices[0];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:_captureDevice error:&error];
[_captureSession addInput:input];
_stillImageOutput = [[AVCaptureStillImageOutput alloc]init];
NSDictionary * outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[_stillImageOutput setOutputSettings:outputSettings];
[_captureSession addOutput:_stillImageOutput];
}
}
-(void)captureButtonClick
{
_isCapturingImage = YES;
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in _stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
[_stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
if (imageSampleBuffer) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *capturedImage = [[UIImage alloc]initWithData:imageData scale:1];
_isCapturingImage = NO;
_capturedImageView.image = capturedImage;
_selectedImage = capturedImage;
imageData = nil;
[self.view addSubview:_imageSelectedView];
}
}];
}
I never worked with AVFoundation Framework, I want to get video frames from the back camera and process with these frames. Any one to help me, your experience will be appreciated. Thanks
You can use the following code to start camera session with AVFoundation in order to capture a still image:
AVCaptureSession *session;
AVCaptureStillImageOutput *stillImageOutput;
session = [[AVCaptureSession alloc] init];
[session setSessionPreset:AVCaptureSessionPresetPhoto];
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error;
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if ([session canAddInput:deviceInput]) {
[session addInput:deviceInput];
}
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
CALayer *rootLayer = [[self view] layer];
[rootLayer setMasksToBounds:YES];
CGRect frame = self.frameForCapture.frame;
[previewLayer setFrame:frame];
[rootLayer insertSublayer:previewLayer atIndex:0];
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
[session startRunning];
Then, in order to actually capture the image, you can use a button with the following code:
- (IBAction)takePhoto:(id)sender {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer != NULL) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [UIImage imageWithData:imageData];
}
}];
}
Then, you can do whatever you want to do with the saved image.
I have followed a tutorial that guided through a way to make a custom but simple camera app, almost exactly to the needs of the use I would like it. I actually have two issues that I need changing but I will focus on this first one for now.
The following code allows the use of the back camera, but I basically need it to be changed so that I can use the front camera. I will also link here the video I sourced it from to give them credit, and I followed what one of the commenters said about using the front camera but the answer didn't help at all.
https://www.youtube.com/watch?v=Xv1FfqVy-KM
I'm not excellent at coding at all, but trying to learn. Any help would be appreciated! Many thanks.
#interface ViewController ()
#end
#implementation ViewController
AVCaptureSession *session;
AVCaptureStillImageOutput *StillImageOutput;
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
}
- (void)viewWillAppear:(BOOL)animated {
session = [[AVCaptureSession alloc] init];
[session setSessionPreset:AVCaptureSessionPresetPhoto];
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error;
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if ([session canAddInput:deviceInput]) {
[session addInput:deviceInput];
}
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
CALayer *rootLayer = [[self view] layer];
[rootLayer setMasksToBounds:YES];
CGRect frame = frameforcapture.frame;
[previewLayer setFrame:frame];
[rootLayer insertSublayer:previewLayer atIndex:0];
StillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
[StillImageOutput setOutputSettings:outputSettings];
[session addOutput:StillImageOutput];
[session startRunning];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (IBAction)takephoto:(id)sender {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in StillImageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts ]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
videoConnection = connection;
break;
}
}
}
[StillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer != NULL) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [UIImage imageWithData:imageData];
imageView.image = image;
}
}];
}
#end
this code is returns an AVCaptureDevice instance for the default device of the given media type.
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
change this code to
....
AVCaptureDevice *inputDevice = nil;
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for(AVCaptureDevice *camera in devices) {
if([camera position] == AVCaptureDevicePositionFront) { // is front camera
inputDevice = camera;
break;
}
}
......
- (IBAction)btnCameraClicked:(id)sender {
AVCaptureDevicePosition desiredPosition;
if (isUsingFrontFacingCamera)
desiredPosition = AVCaptureDevicePositionBack;
else
desiredPosition = AVCaptureDevicePositionFront;
for (AVCaptureDevice *d in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if ([d position] == desiredPosition) {
[[previewLayer session] beginConfiguration];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:d error:nil];
for (AVCaptureInput *oldInput in [[previewLayer session] inputs]) {
[[previewLayer session] removeInput:oldInput];
}
[[previewLayer session] addInput:input];
[[previewLayer session] commitConfiguration];
break;
}
}
isUsingFrontFacingCamera = !isUsingFrontFacingCamera;
}
//Declare in .h file
AVCaptureVideoPreviewLayer *previewLayer;
BOOL isUsingFrontFacingCamera;
I am looking at Apple's sample app AVCam, a basic camera function with AVFoundation. I want to add another preview window, so there would be two side by side.
How can I accomplish this?
This is what I have on the original:
- (void)viewDidLoad
{
[super viewDidLoad];
// Create the AVCaptureSession
AVCaptureSession *session = [[AVCaptureSession alloc] init];
[self setSession:session];
// Setup the preview view
[[self previewView] setSession:session];
// Check for device authorization
[self checkDeviceAuthorizationStatus];
dispatch_queue_t sessionQueue = dispatch_queue_create("session queue", DISPATCH_QUEUE_SERIAL);
[self setSessionQueue:sessionQueue];
dispatch_async(sessionQueue, ^{
[self setBackgroundRecordingID:UIBackgroundTaskInvalid];
NSError *error = nil;
AVCaptureDevice *videoDevice = [AVCamViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack];
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (error)
{
NSLog(#"%#", error);
}
if ([session canAddInput:videoDeviceInput])
{
[session addInput:videoDeviceInput];
[self setVideoDeviceInput:videoDeviceInput];
dispatch_async(dispatch_get_main_queue(),
^{
[[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] setVideoOrientation:(AVCaptureVideoOrientation)[self interfaceOrientation]];
});
}
AVCaptureDevice *audioDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
if (error)
{
NSLog(#"%#", error);
}
if ([session canAddInput:audioDeviceInput])
{
[session addInput:audioDeviceInput];
}
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
if ([session canAddOutput:movieFileOutput])
{
[session addOutput:movieFileOutput];
AVCaptureConnection *connection = [movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if ([connection isVideoStabilizationSupported])
[connection setEnablesVideoStabilizationWhenAvailable:YES];
[self setMovieFileOutput:movieFileOutput];
}
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ([session canAddOutput:stillImageOutput])
{
[stillImageOutput setOutputSettings:#{AVVideoCodecKey : AVVideoCodecJPEG}];
[session addOutput:stillImageOutput];
[self setStillImageOutput:stillImageOutput];
}
});
}
I make a Photo Capture and Video Record customise view (2 features in one view).
Init view:
_session = [[AVCaptureSession alloc] init];
_session.sessionPreset = AVCaptureSessionPreset640x480;
_captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_session];
_captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
_captureVideoPreviewLayer.frame = _previewView.layer.bounds; // parent of layer
[_previewView.layer addSublayer:_captureVideoPreviewLayer];
AVCaptureDevice *videoDevice = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo][0];
if ([videoDevice isFlashAvailable] && videoDevice.flashActive && [videoDevice lockForConfiguration:nil]) {
videoDevice.flashMode = AVCaptureFlashModeOff;
[videoDevice unlockForConfiguration];
}
NSError * error = nil;
AVCaptureDeviceInput * input = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (!input) {
if (_delegate && [_delegate respondsToSelector:#selector(customCameraViewDidLoadFailed)]) {
[_delegate customCameraViewDidLoadFailed];
}
}
if ([_session canAddInput:input]) {
[_session addInput:input];
}
AVCaptureDevice *audioDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
if (!audioDeviceInput) {
if (_delegate && [_delegate respondsToSelector:#selector(customCameraViewDidLoadFailed)]) {
[_delegate customCameraViewDidLoadFailed];
}
}
if ([_session canAddInput:audioDeviceInput]) {
[_session addInput:audioDeviceInput];
}
_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary * outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
[_stillImageOutput setOutputSettings:outputSettings];
if ([_session canAddOutput:_stillImageOutput]) {
[_session addOutput:_stillImageOutput];
}
_movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
if ([_session canAddOutput:_movieFileOutput])
{
[_session addOutput:_movieFileOutput];
AVCaptureConnection *connection = [_movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if ([connection isVideoStabilizationSupported]) {
[connection setEnablesVideoStabilizationWhenAvailable:YES];
}
}
[_session startRunning];
In Capture method:
_session.sessionPreset = AVCaptureSessionPreset640x480;
_isCapturing = YES;
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in _stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
[_loadingView startAnimating];
[_stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage * capturedImage = [[UIImage alloc]initWithData:imageData scale:1];
_isCapturing = NO;
imageData = nil;
[_loadingView stopAnimating];
if (_delegate && [_delegate respondsToSelector:#selector(customCameraView:didFinishCaptureImage:)]) {
[_delegate customCameraView:self didFinishCaptureImage:capturedImage];
}
}];
I'm facing a problem that, the first photo/video when I capture, it's always BLACK photo, video.
Can you help me to solve this problem.
Thanks a lot!