I have been trying for very long and could not get it to work out but basically I would like to display the live camera feed in the background behind my labels and buttons. Here is the code I am working with to make the camera appear
- (void)viewDidLoad {
[super viewDidLoad];
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetHigh;
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
[session addInput:input];
AVCaptureVideoPreviewLayer *newCaptureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
newCaptureVideoPreviewLayer.frame = self.view.bounds;
[self.view addSublayer:newCaptureVideoPreviewLayer.view];
[self.view sendSubviewToBack:newCaptureVideoPreviewLayer.view];
[session startRunning];
}
I do not know how to place it behind the labels on the view did load. Any help would be much appreciated!
You just need to add the view in background by using sendSubviewToBack
For more detail you can check Apple AVCam Example
sendSubviewToBack:
Moves the specified subview so that it appears behind its siblings.
AVCaptureVideoPreviewLayer *newCaptureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
newCaptureVideoPreviewLayer.frame = self.view.bounds;
[self.view.layer addSublayer:newCaptureVideoPreviewLayer];
[session startRunning];
OR
[[self.view superView] insertSubview:newCaptureVideoPreviewLayer belowSubview:self.view];
Related
I have implemented the video and audio recording with avcapturesession. It is taking some to start recording that usually ~1 or 2 seconds.
If require more information feel free comments.
Setup capture session doing below steps:
add capture session
add video input
add audio input
add video preview layer
add movie output
set up camera configuration for output connection
start capture session
-(void)setupCaptureSession {
// Set up capture session
captureSession = [AVCaptureSession new];
//add video intput
AVCaptureDevice *videoCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (videoCaptureDevice) {
NSError *error;
videoInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:videoCaptureDevice error:&error];
if(!error){
if ([captureSession canAddInput:videoInputDevice]) {
[captureSession addInput:videoInputDevice];
}else{
NSLog(#"Failed to add video input");
}
}else{
NSLog(#"Failed to create video device");
}
}else{
NSLog(#"Failed to create video capture device");
}
// add audio input
AVCaptureDevice *audioCaptureDevie = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevie error:&error];
if (audioInputDevice) {
[captureSession addInput:audioInputDevice];
}
// add outputs
// add video preview layer
NSLog(#"Adding video preview layer");
[self setPreviewLayer:[[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession]];
[[self previewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//add movie ouput
movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
movieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024;
if ([captureSession canAddOutput:movieFileOutput])
[captureSession addOutput:movieFileOutput];
// set up camera configuration for output connection
[self setUpCameraConfiguration];
[captureSession setSessionPreset:AVCaptureSessionPresetMedium];
if ([captureSession canSetSessionPreset:AVCaptureSessionPreset640x480])
[captureSession setSessionPreset:AVCaptureSessionPreset640x480];
//----- DISPLAY THE PREVIEW LAYER -----
//Display it full screen under out view controller existing controls
NSLog(#"Display the preview layer");
CGRect layerRect = [[[self view] layer] bounds];
[_previewLayer setBounds:layerRect];
[_previewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect),
CGRectGetMidY(layerRect))];
//[[[self view] layer] addSublayer:[[self CaptureManager] previewLayer]];
//We use this instead so it goes on a layer behind our UI controls (avoids us having to manually bring each control to the front):
UIView *cameraView = [[UIView alloc] init];
[[self view] addSubview:cameraView];
[self.view sendSubviewToBack:cameraView];
[[cameraView layer] addSublayer:_previewLayer];
//----- START THE CAPTURE SESSION RUNNING -----
[captureSession startRunning];
}
I want to add a png image (blue rect) on camera preview view layer. I get the preview image from this function:
-(void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
UIImage * image1 = [self ImageFromSampleBuffer:sampleBuffer];
NSData *imageData1;
}
And this function is that set preview image to an UIImageview.
-(void) SetPreview
{
session =[[AVCaptureSession alloc]init];
session.sessionPreset = AVCaptureSessionPresetPhoto;
//Add device;
AVCaptureDevice*device = nil;
device = [self cameraWithPosition:AVCaptureDevicePositionFront];
AVCaptureDeviceInput*input = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];
if(!input)
{
NSLog(#"NO Input");
}
[session addInput:input];
//Output
output = [[AVCaptureVideoDataOutput alloc] init];
[session addOutput:output];
output.videoSettings = #{(NSString*)kCVPixelBufferPixelFormatTypeKey:#(kCVPixelFormatType_32BGRA)};
//Preview layer
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
cameraView = self.view;
previewLayer.frame =CGRectMake(cameraView.bounds.origin.x+5, cameraView.bounds.origin.y+5, cameraView.bounds.size.width - 10, cameraView.bounds.size.height-10);
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[_vImage.layer addSublayer:previewLayer];
timer =[NSTimer scheduledTimerWithTimeInterval:1 target:self selector:#selector(snapshot) userInfo:nil repeats:YES];
[session startRunning];
}
How can I implement this feature?
I solved this problem putting "Back Img" on the View.
I have a view controller in which I have added an UIView full screen size, In that UIView I have the AVCapturesession that helps me to capture photos,
My view controller opens good in portrait mode but opens abruptly in landscape mode.
The code is as follows,
- (void)viewDidLoad {
[super viewDidLoad];
self.view.backgroundColor = [UIColor whiteColor];
[[UIDevice currentDevice]beginGeneratingDeviceOrientationNotifications];
[[NSNotificationCenter defaultCenter]addObserver:self selector:#selector(orientationDidChange:) name:UIDeviceOrientationDidChangeNotification object:nil];
self.camera.userInteractionEnabled = YES;
UIPinchGestureRecognizer *pinchGesture = [[UIPinchGestureRecognizer alloc]initWithTarget:self action:#selector(handlePinchGesture:)];
pinchGesture.delegate=self;
[self.camera addGestureRecognizer:pinchGesture];
[self.navigationController setNavigationBarHidden:YES animated:YES];
}
The camera is the UIView which is the property of my UIViewController,
Again,
-(void) viewDidAppear:(BOOL)animated
{
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
CALayer *viewLayer = self.camera.layer;
NSLog(#"viewLayer = %#", viewLayer);
captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = self.camera.layer.bounds;
[self.camera.layer addSublayer: captureVideoPreviewLayer];
device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
[session startRunning];
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
isUsingFlash = NO;
isUsingFrontFacingCamera = NO;
effectiveScale = 1.0;
}
My view opens wrong in landscape mode once i rotate to portrait it gets fine and on again rotation to landscape it works good, only it does not launch properly in landscape mode why?
Here I am setting the root view controller,
sb = [UIStoryboard storyboardWithName:#"Main" bundle:nil];
cameraFirstController = [sb instantiateViewControllerWithIdentifier:#"FirstViewController"];
cameraFirstController.delegate = self;
nav = [[CustomNavigationController alloc]initWithRootViewController:cameraFirstController];
[self.viewController presentViewController:nav animated:YES completion:nil];
seems like problem is order of calls when you set up the window. You need to call makeKeyAndVisible before you assign the rootViewController.
self.window = [[UIWindow alloc] initWithFrame:[UIScreen mainScreen].bounds];
[self.window makeKeyAndVisible];
self.window.rootViewController = self.YourViewController;
I have a UIImagePickerController that can be used for uploading profile images in my social networking application.
It works fine when it is used alone, i.e. no other camera interfering.
In another view, I am using AVCaptureSession and AVCaptureVideoPreviewLayer to embed the camera view inside the view. Here users can upload various photos that they have captured.
This also works fine when it is used alone, i.e. no other camera interfering.
(This is a Tab-Bar Application)
Whenever the AVCapturePreviewLayer is active, and I enter the view with the UIImagePickerController, the imagePicker takes a very long time to load, and sometimes it just freezes.
This is how I initialise the AVSession/AVCapturePreviewLayer:
self.session = [[AVCaptureSession alloc] init];
self.session.sessionPreset = AVCaptureSessionPreset640x480;
self.captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
self.captureVideoPreviewLayer.frame = self.cameraView.bounds;
[self.captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
[self.cameraView.layer addSublayer:self.captureVideoPreviewLayer];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}else
[self.session addInput:input];
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:outputSettings];
[self.session addOutput:self.stillImageOutput];
This is how i initialise the UIImagePickerController:
self.picker = [[UIImagePickerController alloc] init];
self.picker.delegate = self;
self.picker.allowsEditing = YES;
self.picker.sourceType = UIImagePickerControllerSourceTypeCamera;
[self presentViewController:self.picker animated:YES completion:NULL];
Why does the UIImagePickerController take forever to load when the previewLayer is active in another view?
How can I reduce the loading time for the UIImagePickerController?
Ok. It seems like calling:
[AVSession stopRunning];
in viewDidDisappear:(BOOL)animated
fixes this issue for me.
I have a single view application in which I am trying to test iOS7's AVCaptureMetadataOutput based on this explanation. My ViewController conforms to AVCaptureMetadataOutputObjectsDelegate and the code looks like this (almost exactly the same as Mattt's):
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
// Testing the VIN Scanner before I make it part of the library
NSLog(#"Setting up the vin scanner");
AVCaptureSession *session = [[AVCaptureSession alloc] init];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device
error:&error];
if (input) {
[session addInput:input];
} else {
NSLog(#"Error: %#", error);
}
AVCaptureMetadataOutput *output = [[AVCaptureMetadataOutput alloc] init];
[output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
[session addOutput:output];
[session startRunning];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputMetadataObjects:(NSArray *)metadataObjects
fromConnection:(AVCaptureConnection *)connection
{
NSString *code = nil;
for (AVMetadataObject *metadata in metadataObjects) {
if ([metadata.type isEqualToString:AVMetadataObjectTypeCode39Code]) {
code = [(AVMetadataMachineReadableCodeObject *)metadata stringValue];
break;
}
}
NSLog(#"code: %#", code);
}
When I run this on an iOS7 device (I've tried an iPhone 4 and iPhone 4s) XCode logs "Setting up the vin scanner" but the camera (ie the AVCaptureSession) never opens.
Edit 1:
I added the following code to show the camera output on screen:
AVCaptureVideoPreviewLayer *previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:session];
// Display full screen
previewLayer.frame = self.view.frame;
// Add the video preview layer to the view
[self.view.layer addSublayer:previewLayer];
But the display is very odd, does not conform to the screen and the way it rotates does not make sense. The other issue is that when I focus the camera on a bar code the metadata delegate method is never called. Please see pictures below:
The camera will not open the way it does for the UIImagePickerController. The problem is that your code does nothing with the output. You'll need to add a preview layer to display the output of the camera as it streams in.
AVCaptureVideoPreviewLayer *previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:session];
// Display full screen
previewLayer.frame = CGRectMake(0.0, 0.0, self.view.frame.size.width, self.view.frame.size.height);
// Add the video preview layer to the view
[self.view.layer addSublayer:previewLayer];
[session startRunning];
Edit**
After taking a deeper look at your code I noticed a few more issues.
First you need to also set the MetaDataObjectTypes you want to search for, right now your not looking for any valid object types. This should be added after you add the output to the session. You can view the full list of available types in the documentation
[output setMetadataObjectTypes:#[AVMetadataObjectTypeQRCode, AVMetadataObjectTypeEAN13Code]];
Second your AVCaptureSession *session is a local variable in your viewDidLoad, take this and place it just after your #interface ViewController () as shown below.
#interface ViewController ()
#property (nonatomic, strong) AVCaptureSession *session;
#end
#implementation ViewController
- (void)viewDidLoad
{
[super viewDidLoad];
self.session = [[AVCaptureSession alloc] init];
// Testing the VIN Scanner before I make it part of the library
NSLog(#"Setting up the vin scanner");
self.session = [[AVCaptureSession alloc] init];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device
error:&error];
if (input) {
[self.session addInput:input];
} else {
NSLog(#"Error: %#", error);
}
AVCaptureMetadataOutput *output = [[AVCaptureMetadataOutput alloc] init];
[output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
[self.session addOutput:output];
[output setMetadataObjectTypes:#[AVMetadataObjectTypeQRCode, AVMetadataObjectTypeEAN13Code]];
[self.session startRunning];
}