I set the shutter speed, ISO and other camera parameters and capture an image using the code below. If I set the ISO to something like 119, it gets reported as 125 in the Exif info (it is always rounded to a standard value). How can I tell what the real ISO was? The maxISO gets reported as 734 but if I set it to 734 it is 800 in the Exif information. I had the same problem with the exposure time. If I set it to 800ms it shows up as 1s in the but the value calculated from the ShutterSpeedValue is correct.
- (void)setCameraSettings:(long)expTime1000thSec iso:(int)isoValue
{
if ( currentCaptureDevice ) {
[captureSession beginConfiguration];
NSError *error = nil;
if ([currentCaptureDevice lockForConfiguration:&error]) {
if ([currentCaptureDevice isExposureModeSupported:AVCaptureExposureModeLocked]) {
CMTime minTime, maxTime, exposureTime;
if ( isoValue < minISO ) {
isoValue = minISO;
} else if ( isoValue > maxISO ) {
isoValue = maxISO;
}
exposureTime = CMTimeMake(expTime1000thSec, EXP_TIME_UNIT); // in 1/EXP_TIME_UNIT of a second
minTime = currentCaptureDevice.activeFormat.minExposureDuration;
maxTime = currentCaptureDevice.activeFormat.maxExposureDuration;
if ( CMTimeCompare(exposureTime, minTime) < 0 ) {
exposureTime = minTime;
} else if ( CMTimeCompare(exposureTime, maxTime) > 0 ) {
exposureTime = maxTime;
}
NSLog(#"setting exp time to %lld/%d s (want %ld) iso=%d", exposureTime.value, exposureTime.timescale, expTime1000thSec, isoValue);
[currentCaptureDevice setExposureModeCustomWithDuration:exposureTime ISO:isoValue completionHandler:nil];
}
if (currentCaptureDevice.lowLightBoostSupported) {
currentCaptureDevice.automaticallyEnablesLowLightBoostWhenAvailable = NO;
NSLog(#"setting automaticallyEnablesLowLightBoostWhenAvailable = NO");
}
// lock the gains
if ([currentCaptureDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeLocked]) {
currentCaptureDevice.whiteBalanceMode = AVCaptureWhiteBalanceModeLocked;
NSLog(#"setting AVCaptureWhiteBalanceModeLocked");
}
// set the gains
AVCaptureWhiteBalanceGains gains;
gains.redGain = 1.0;
gains.greenGain = 1.0;
gains.blueGain = 1.0;
AVCaptureWhiteBalanceGains normalizedGains = [self normalizedGains:gains];
[currentCaptureDevice setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:normalizedGains completionHandler:nil];
NSLog(#"setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains g.red=%.2lf g.green=%.2lf g.blue=%.2lf",
normalizedGains.redGain, normalizedGains.greenGain, normalizedGains.blueGain);
[currentCaptureDevice unlockForConfiguration];
}
[captureSession commitConfiguration];
}
}
- (void)captureStillImage
{
NSLog(#"about to request a capture from: %#", [self stillImageOutput]);
if ( videoConnection ) {
waitingForCapture = true;
NSLog(#"requesting a capture from: %#", [self stillImageOutput]);
[[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:videoConnection
completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
if ( imageSampleBuffer ) {
CFDictionaryRef exifAttachments = CMGetAttachment(imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments) {
NSLog(#"attachements: %#", exifAttachments);
} else {
NSLog(#"no attachments");
}
NSLog(#"name: %#", [currentCaptureDevice localizedName]);
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
[self setStillImage:image];
NSDictionary *dict = (__bridge NSDictionary*)exifAttachments;
NSString *value = [dict objectForKey:#"PixelXDimension"];
[self setImageWidth:[NSNumber numberWithInt:[value intValue]]];
value = [dict objectForKey:#"PixelYDimension"];
[self setImageHeight:[NSNumber numberWithInt:[value intValue]]];
value = [dict objectForKey:#"BrightnessValue"];
[self setImageBrightnessValue:[NSNumber numberWithFloat:[value floatValue]]];
value = [dict objectForKey:#"ShutterSpeedValue"];
double val = [value doubleValue];
val = 1.0 / pow(2.0, val);
[self setImageExposureTime:[NSNumber numberWithDouble:val]];
value = [dict objectForKey:#"ApertureValue"];
[self setImageApertureValue:[NSNumber numberWithFloat:[value floatValue]]];
NSArray *values = [dict objectForKey:#"ISOSpeedRatings"];
[self setImageISOSpeedRatings:[NSNumber numberWithInt:[ [values objectAtIndex:0] intValue]]];
NSLog(#"r/g/b gains = %.2lf/%.2lf/%.2lf",
currentCaptureDevice.deviceWhiteBalanceGains.redGain,
currentCaptureDevice.deviceWhiteBalanceGains.greenGain,
currentCaptureDevice.deviceWhiteBalanceGains.blueGain);
[[NSNotificationCenter defaultCenter] postNotificationName:kImageCapturedSuccessfully object:nil];
} else {
NSLog(#"imageSampleBuffer = NULL");
}
waitingForCapture = false;
}];
} else {
NSLog(#"can't capture from: videoConnection");
}
}
I answered your other question, same thing, you are not setting the CustomSettings mode:
[device lockForConfiguration:nil];
if([device isExposureModeSupported:AVCaptureExposureModeCustom]){
[device setExposureMode:AVCaptureExposureModeCustom];
[device setExposureModeCustomWithDuration:exposureTime ISO:exposureISO completionHandler:^(CMTime syncTime) {}];
[device setExposureTargetBias:exposureBIAS completionHandler:^(CMTime syncTime) {}];
}
Regards.
I added:
[device setExposureMode:AVCaptureExposureModeCustom];
and changed my completionHandler from nil to:
completionHandler:^(CMTime syncTime) {}
and I still see the same results. It seems that ISO can only be set to the "standard" values and not to any value between minISO and maxISO.
Related
We use PHAsset api to fetch the thumbnail of the iCloud photo ourselves. Customer report that they saw all black photo in Client. Did you encounter the same issue before and did you have any suggestion with this specific issue?
The codes used to display the UIImage is as following. We use CALayer for displaying the picture.
- (void)setImageLayerWithImage:(UIImage *)image
{
self.imageLayer.bounds = CGRectMake(0, 0, image.size.width, image.size.height);
self.imageLayer.position = CGPointMake(image.size.width/2, image.size.height/2);
self.imageLayer.contents = (id)image.CGImage;
}
The code to fetch thumbnail should be:
- (PHImageRequestID)imageInFullScreen:(MDPhoto *)photoItem
Id:(NSString*)photoID
targetSize:(CGSize)targetSize
highQualityFormat:(BOOL)highQualityFormat
isPreLoad:(BOOL)ispreLoad
saveFile:(BOOL)theSaveFile
progressHandler:(PHAssetImageProgressHandler)progressHandler
completionHandler:(void (^) (NSDictionary *, NSDictionary *))completionHandler {
CGFloat scale = [UIScreen mainScreen].scale;
targetSize = CGSizeMake(targetSize.width * scale, targetSize.height * scale);
if(targetSize.width < [MDDeviceInfo shareDevice].minImageWidth)
targetSize = CGSizeMake([MDDeviceInfo shareDevice].minImageWidth, [MDDeviceInfo shareDevice].minImageWidth * targetSize.height/targetSize.width);
PHImageRequestOptions *options = [self fullScreenRequestOptions:targetSize];
options.progressHandler = progressHandler;
MDPhoto* dict = photoItem;
PHAsset *asset = [(FBYPHAssetWrapper *)[photoItem objectForKey:kFBYAsset] entity];
if (!asset || [dict[kFBYUploadNeedRefetchAsset] boolValue] || self.isInFullscreenBrowser) {
FBYPHAssetWrapper *wraper = [FBYAssetManager getAssetWrapperByLocalIdentifier:photoItem[kFBYPHAssetLocalIdentifier]];
if (wraper) {
dict[kFBYUploadNeedRefetchAsset] = #NO;
[dict setObject:wraper forKey:kFBYAsset];
asset = [wraper entity];
}
}
if (!asset.localIdentifier) {
NSLog(#"");
}
__block UIImage *aImage = nil;
if(asset.localIdentifier && [[WDPhotoBookManager manager].currentPhotoBook containsAssetIdentifier:[asset.localIdentifier MD5]]){
NSData* imageData = [[WDPhotoBookManager manager].currentPhotoBook loadImageDataFromIdentifier:[asset.localIdentifier MD5]];
aImage = [UIImage imageWithData:imageData];
}
if (theSaveFile) {
if (!self.isInFullscreenBrowser) {
if (!aImage) {
NSString *localIdentifier = photoItem[kFBYPHAssetLocalIdentifier];
aImage = [FBYPHAssetHelper mediumSizedImageForAssetID:[FBYGlobal ImagePathWithLocalIdentifier:photoID targetSize:targetSize] pictureDir:[localIdentifier MD5]];
if (aImage && !self.isInFullscreenBrowser) {
[self drawImage:aImage localIdentifier:[FBYGlobal ImagePathWithLocalIdentifier:photoID targetSize:targetSize]];
}
}
} else {
options.networkAccessAllowed = YES;
}
}
if (ispreLoad) {
return 0;
}
theSaveFile = YES;
if (self.isInFullscreenBrowser) {
aImage = nil;
// targetSize = PHImageManagerMaximumSize;
theSaveFile = NO;
// WREN-1253
// iOS: full screen preview shows unnormally
// Comments: The fetched image is corrupted when using PHImageManagerMaximumSize.
// So I had to use the explict target size.
if(IS_SMALL_IPHONE){
}else {
targetSize = CGSizeMake(asset.pixelWidth, asset.pixelHeight);
}
}
if (self.isInEditPhotoMode) {
aImage = nil;
theSaveFile = NO;
}
BOOL assetHasBurstIdentifier = asset.burstIdentifier && [asset.burstIdentifier length] > 0;
if (!aImage) {
if (asset) {
return [[self class] requestImageForAsset:asset
withImageManager:nil
targetSize:targetSize
options:options
fixOrientation:YES
representsBurst:asset.representsBurst || assetHasBurstIdentifier
saveFile:theSaveFile
isInFullScreen:self.isInFullscreenBrowser
isNeedTranparent:YES
resultHandler:^(UIImage *result, NSDictionary *info) {
if(!result) {
NSDictionary* context = [NSDictionary dictionaryWithObjectsAndKeys:photoID, #"id",
nil];
NSMutableDictionary *dict = [NSMutableDictionary dictionaryWithDictionary:info];
[dict addEntriesFromDictionary:info];
if ([NSThread isMainThread]) {
completionHandler(context, dict);
} else {
dispatch_async(dispatch_get_main_queue(), ^{
completionHandler(context, dict);
});
}
return ;
}
NSDictionary* context = [NSDictionary dictionaryWithObjectsAndKeys:
result, #"image", photoID, #"id", nil];
if ([NSThread isMainThread]) {
[self checkAndStoreThumbnailCache:result FullScreen:photoItem Id:photoID targetSize:targetSize];
completionHandler(context, info);
} else {
dispatch_async(dispatch_get_main_queue(), ^{
[self checkAndStoreThumbnailCache:result FullScreen:photoItem Id:photoID targetSize:targetSize];
completionHandler(context, info);
});
}
if (!self.isInFullscreenBrowser) {
[self drawImage:result localIdentifier:[FBYGlobal ImagePathWithLocalIdentifier:photoID
targetSize:targetSize]];
}
}];
} else {
NSDictionary* context = [NSDictionary dictionaryWithObjectsAndKeys:photoID, #"id",
nil];
NSDictionary* info = [NSDictionary dictionaryWithObjectsAndKeys:
photoID, #"id",#"assetMissing",#"error", nil];
if ([NSThread isMainThread]) {
completionHandler(context, info);
} else {
dispatch_async(dispatch_get_main_queue(), ^{
completionHandler(context, info);
});
}
}
} else {
void (^workToDo) (void) = ^{
[self checkAndStoreThumbnailCache:aImage FullScreen:photoItem Id:photoID targetSize:targetSize];
NSDictionary* context = [NSDictionary dictionaryWithObjectsAndKeys:
aImage, #"image", photoID, #"id", nil];
completionHandler(context, nil);
};
if ([NSThread isMainThread]) {
workToDo();
} else {
dispatch_async(dispatch_get_main_queue(), ^{
workToDo();
});
}
}
return 0;
}
I've been stuck on this for quite some time. There appears to be a zoom factor when I am recording with the built in camera app on my iPhone. However, I cannot seem to get the same result when I use an AVCaptureSession inside of my app. Here is an example of what I'm talking about. The top was recorded from the iPhone camera app, and the bottom was recorded inside of my app using AVCaptureSession. It's like there is a fish eye effect occurring. My code for setting up my camera is below.
- (void)configureCameraSession{
_captureSession = [AVCaptureSession new];
[_captureSession beginConfiguration];
_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if(!_device){
[self noValidCamera];
return;
}
AVCaptureDeviceFormat *currentFormat;
int frameRate = 60;
for (AVCaptureDeviceFormat *format in _device.formats)
{
NSArray *ranges = format.videoSupportedFrameRateRanges;
AVFrameRateRange *frameRates = ranges[0];
//CMVideoFormatDescriptionGet
int resolutionWidth = CMVideoFormatDescriptionGetDimensions(format.formatDescription).width;
if(frameRates.maxFrameRate > 59 && resolutionWidth >= 1920){
CameraFormat *cformat = [[CameraFormat alloc] init];
cformat.format = format;
cformat.fps = 60;
if(frameRates.maxFrameRate > 119){
cformat.fps = 120;
}
if(frameRates.maxFrameRate > 239){
cformat.fps = 240;
}
NSString *resolution;
if(resolutionWidth > 1920){
resolution = #"4K ";
}
else{
resolution = [[NSNumber numberWithInt:CMVideoFormatDescriptionGetDimensions(format.formatDescription).height] stringValue];
resolution = [resolution stringByAppendingString:#"p "];
}
//stringValue];
NSString *fps = [[NSNumber numberWithInt:cformat.fps] stringValue];
fps = [fps stringByAppendingString:#" FPS"];
cformat.label = [resolution stringByAppendingString:fps];
BOOL isUniqueFormat = YES;
for(int i = 0; i < [_formatList count]; i++){
if([_formatList[i].label isEqualToString:cformat.label]){
isUniqueFormat = NO;
break;
}
}
if(isUniqueFormat){
[_formatList addObject:cformat];
frameRate = cformat.fps;
currentFormat = cformat.format;
}
}
}
if(!currentFormat){
[self noValidCamera];
return;
}
_currentCameraFormat = _analysisViewController.currentCameraFormat;
if(_currentCameraFormat.fps == 0){
_currentCameraFormatIndex = 0;
_currentCameraFormat = _formatList[_currentCameraFormatIndex];
_analysisViewController.currentCameraFormat = _currentCameraFormat;
currentFormat = _currentCameraFormat.format;
frameRate = _currentCameraFormat.fps;
}
else{
currentFormat = _currentCameraFormat.format;
frameRate = _currentCameraFormat.fps;
}
NSString *resolution;
if(CMVideoFormatDescriptionGetDimensions(currentFormat.formatDescription).width > 1920){
resolution = #"4K ";
}
else{
resolution = [[NSNumber numberWithInt:CMVideoFormatDescriptionGetDimensions(currentFormat.formatDescription).height] stringValue];
resolution = [resolution stringByAppendingString:#"p "];
}
NSString *fps = [[NSNumber numberWithInt:frameRate] stringValue];
fps = [fps stringByAppendingString:#" FPS ▼"];
[self.videoLabelButton setTitle:[resolution stringByAppendingString:fps] forState:UIControlStateNormal];
[_device lockForConfiguration:nil];
_device.activeFormat = currentFormat;
_device.activeVideoMinFrameDuration = CMTimeMake(1, frameRate);
_device.activeVideoMaxFrameDuration = CMTimeMake(1, frameRate);
[_device unlockForConfiguration];
//Input
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:_device error:nil];
[_captureSession addInput:input];
//Output
_movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
if([_captureSession canAddOutput:_movieFileOutput]){
[_captureSession addOutput:_movieFileOutput];
}
[self setMovieOutputOrientation];
//Preview Layer
_previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession];
_previewView = [[UIView alloc] initWithFrame:self.imageView.bounds];
[self addFocusToView:_previewView];
[_previewView addSubview:self.imageView];
_previewLayer.frame = _previewView.bounds;
_previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[_previewView.layer addSublayer:_previewLayer];
_previewLayer.connection.videoOrientation = [self videoOrientationFromCurrentDeviceOrientation];
AVCaptureVideoStabilizationMode stabilizationMode = AVCaptureVideoStabilizationModeCinematic;
if ([_device.activeFormat isVideoStabilizationModeSupported:stabilizationMode]) {
[_previewLayer.connection setPreferredVideoStabilizationMode:stabilizationMode];
}
[self.view addSubview:_previewView];
[self.view addSubview:self.topInfoBar];
[self.view addSubview:self.recordButton];
[self.view addSubview:self.doneButton];
[_captureSession commitConfiguration];
[_captureSession startRunning];
}
Okay so I finally figured it out. The issue was that on some formats video stabilization is supported so Apple will use it if it can. The problem is that I was not turning this on when I could. Basically check to see if that option is available for the device's active format and turn it on Auto.
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in [_movieFileOutput connections]){
for ( AVCaptureInputPort *port in [connection inputPorts]){
if ([[port mediaType] isEqual:AVMediaTypeVideo]){
videoConnection = connection;
}
}
}
if([videoConnection isVideoOrientationSupported]){
[videoConnection setVideoOrientation:[self videoOrientationFromCurrentDeviceOrientation]];
}
//Check if we can use video stabilization and if so then set it to auto
if (videoConnection.supportsVideoStabilization) {
videoConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
}
Requirement:
We are able to make connection from host device to multiple slave devices. For example, if device A initiate connection to device B and C, the contributor devices can accept peer connection and connected to device A. Here A is master device and B and C are contributors device. Now if B share their songs to A, A can play songs and see songs information. In meantime C will be idle but should connected. When B will finish to play songs, then C can also able to share their songs to device A.
Here are the problem that we have faced to achieve above tasks:
1. As soon as B started sharing songs to A, C got crashed. But A still able to play song shared by B.
array = [[NSMutableArray alloc] initWithObjects:[self.session connectedPeers], nil];
[_session sendData:[NSKeyedArchiver archivedDataWithRootObject:[info mutableCopy]] toPeers:[array objectAtIndex:0] withMode:MCSessionSendDataUnreliable error: &error];
NSLog(#"localizedDescription %#",error);
To overcome from this problem directly pass array without index here toPeers:array. It is working and we are able to share and play songs with device A, but song information not receive to device A.
Here are full code that we are using:
contributor Controller :
- (void)mediaPicker:(MPMediaPickerController *)mediaPicker didPickMediaItems:(MPMediaItemCollection *)mediaItemCollection
{
[self dismissViewControllerAnimated:YES completion:nil];
someMutableArray = [mediaItemCollection items];
counter = 0;
if(someMutableArray.count>1){
BOOL isselectsongone=YES;
[[NSUserDefaults standardUserDefaults] setBool:isselectsongone forKey:#"isselectsongone"];
}
[self showSpinner];
[self someSelector:nil];
}
- (void)someSelector:(NSNotification *)notification {
if(notification && someMutableArray.count>1 && counter <someMutableArray.count-1){
NSDate *start = [NSDate date];
counter=counter+1;
[self.outputStreamer stop];
self.outputStreamer = nil;
self.outputStream = nil;
}
song=[someMutableArray objectAtIndex:counter];
NSMutableDictionary *info = [NSMutableDictionary dictionary];
info=[[NSMutableDictionary alloc] init];
info[#"title"] = [song valueForProperty:MPMediaItemPropertyTitle] ? [song valueForProperty:MPMediaItemPropertyTitle] : #"";
info[#"artist"] = [song valueForProperty:MPMediaItemPropertyArtist] ? [song valueForProperty:MPMediaItemPropertyArtist] : #"";
//NSNumber *duration=[song valueForProperty:MPMediaItemPropertyPlaybackDuration];
int fullminutes = floor([timeinterval floatValue] / 60); // fullminutes is an int
int fullseconds = trunc([duration floatValue] - fullminutes * 60); // fullseconds is an int
[NSTimer scheduledTimerWithTimeInterval:[duration doubleValue]target:self selector:#selector(getdata) userInfo:nil repeats:YES];
}
-(void)getdata {
NSMutableDictionary *info = [NSMutableDictionary dictionary];
info=[[NSMutableDictionary alloc] init];
info[#"title"] = [song valueForProperty:MPMediaItemPropertyTitle] ? [song valueForProperty:MPMediaItemPropertyTitle] : #"";
info[#"artist"] = [song valueForProperty:MPMediaItemPropertyArtist] ? [song valueForProperty:MPMediaItemPropertyArtist] : #"";
NSNumber *duration=[song valueForProperty:MPMediaItemPropertyPlaybackDuration];
int fullminutes = floor([duration floatValue] / 60); // fullminutes is an int
int fullseconds = trunc([duration floatValue] - fullminutes * 60); // fullseconds is an int
info[#"duration"] = [NSString stringWithFormat:#"%d:%d", fullminutes, fullseconds];
MPMediaItemArtwork *artwork = [song valueForProperty:MPMediaItemPropertyArtwork];
UIImage *image = [artwork imageWithSize:CGSizeMake(150, 150)];
NSData * data = UIImageJPEGRepresentation(image, 0.0);
image = [UIImage imageWithData:data];
array = [[NSMutableArray alloc] initWithObjects:[self.session connectedPeers], nil];
MCPeerID* peerID11 = self.session.myPeerID;
NSMutableArray *arr=[[NSMutableArray alloc] initWithObjects:peerID11, nil];
NSLog(#"%#",arr);
if (image)
self.songArtWorkImageView.image = image;
else
self.songArtWorkImageView.image = nil;
self.songTitleLbl.text = [NSString stringWithFormat:#"%# \n[Artist : %#]", info[#"title"], info[#"artist"]];
NSError *error;
[_session sendData:[NSKeyedArchiver archivedDataWithRootObject:[info mutableCopy]] toPeers:[array objectAtIndex:0] withMode:MCSessionSendDataUnreliable error: &error];
NSLog(#"localizedDescription %#",error);
#try {
if(_session && _session.connectedPeers && [_session.connectedPeers count] > 0) {
NSLog(#"%#",[song valueForProperty:MPMediaItemPropertyAssetURL]);
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:[song valueForProperty:MPMediaItemPropertyAssetURL] options:nil];
[self convertAsset: asset complition:^(BOOL Success, NSString *filePath) {
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 1 * NSEC_PER_SEC), dispatch_get_main_queue(), ^{
if(Success) {
if(image) {
[self saveImage: image withComplition:^(BOOL status, NSString *imageName, NSURL *imageURL) {
if(status) {
#try {
[_session sendResourceAtURL:imageURL withName:imageName toPeer:[_session.connectedPeers objectAtIndex:0]withCompletionHandler:^(NSError *error) {
if (error) {
NSLog(#"Failed to send picture to %#", error.localizedDescription);
return;
}
//Clean up the temp file
NSFileManager *fileManager = [NSFileManager defaultManager];
[fileManager removeItemAtURL:imageURL error:nil];
}];
}
#catch (NSException *exception) {
}
}
}];
}
#try {
[self hideSpinner];
if(!self.outputStream) {
NSArray * connnectedPeers = [_session connectedPeers];
if([connnectedPeers count] != 0) {
[self outputStreamForPeer:[_session.connectedPeers objectAtIndex:0]];
}
}
}
#catch (NSException *exception) {
}
if(self.outputStream) {
self.outputStreamer = [[TDAudioOutputStreamer alloc] initWithOutputStream:self.outputStream];
//
[self.outputStreamer initStream:filePath];
NSLog(#"%#",filePath);
if(self.outputStreamer) {
[self.outputStreamer start];
}
else{
NSLog(#"Error: output streamer not found");
}
}
else{
//self.outputStream=[[NSOutputStream alloc] init];
self.outputStreamer = [[TDAudioOutputStreamer alloc] initWithOutputStream:self.outputStream];
[self.outputStreamer initStream:filePath];
NSLog(#"%#",filePath);
if(self.outputStreamer) {
[self.outputStreamer start];
}
}
}
else {
[UIView showMessageWithTitle:#"Error!" message:#"Error occured!" showInterval:1.5];
}
});
}];
// }
}
}
#catch (NSException *exception) {
NSLog(#"Expection: %#", [exception debugDescription]);
}
//}
}
HostViewcontroller :
- (void)session:(MCSession *)session didReceiveData:(NSData *)data fromPeer:(MCPeerID *)peerID
{
NSLog(#"%#",peerID);
NSLog(#"sessions%#",session);
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 0 * NSEC_PER_SEC), dispatch_get_main_queue(), ^{
#try {
// NSData *myData = [NSKeyedArchiver archivedDataWithRootObject:data];
info = [NSKeyedUnarchiver unarchiveObjectWithData:data];
self.songTitleLbl.text = [NSString stringWithFormat:#"%# \n[Duration: %#] [Artist : %#] ", info[#"title"], info[#"duration"], info[#"artist"]];
NSLog(#"eeret%#",self.songTitleLbl.text);
self.songArtWorkImageView.image = nil;
[self showSpinner];
}
#catch (NSException *exception) {
self.songTitleLbl.text = #"Some error occured...\nPlease try again";
self.songArtWorkImageView.image = nil;
}
});
}
Please let us know if I have missed anything here or the better way to achieve the above requirement. Any help really appreciated.
My camera was taking pictures and recording videos perfectly (using AVCaptureMovieFileOutput) and I was able to toggle camera position to front/rear normally. However, as in Instagram, Snapchat, and myriad of other apps, I wanted to also allow the user to toggle camera position while recording the video.
It seems that to be able to achieve such a thing, I need to work with AVCaptureVideoDataOutput instead because it can handle the different frames, but I don't really get it to work. Everything goes fine but after I finish the video, it just don't play and there seems to be no resultant URL from the captureOutput method. Here is my code:
- (void)initialize{
if(!_session) {
_session = [[AVCaptureSession alloc] init];
_session.sessionPreset = self.cameraQuality;
// preview layer
CGRect bounds = self.preview.layer.bounds;
_captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
_captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
_captureVideoPreviewLayer.bounds = bounds;
_captureVideoPreviewLayer.position = CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds));
[self.preview.layer addSublayer:_captureVideoPreviewLayer];
AVCaptureDevicePosition devicePosition;
switch (self.position) {
case LLCameraPositionRear:
if([self.class isRearCameraAvailable]) {
devicePosition = AVCaptureDevicePositionBack;
} else {
devicePosition = AVCaptureDevicePositionFront;
_position = LLCameraPositionFront;
}
break;
case LLCameraPositionFront:
if([self.class isFrontCameraAvailable]) {
devicePosition = AVCaptureDevicePositionFront;
} else {
devicePosition = AVCaptureDevicePositionBack;
_position = LLCameraPositionRear;
}
break;
default:
devicePosition = AVCaptureDevicePositionUnspecified;
break;
}
if(devicePosition == AVCaptureDevicePositionUnspecified) {
_videoCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
} else {
_videoCaptureDevice = [self cameraWithPosition:devicePosition];
}
NSError *error = nil;
_videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:_videoCaptureDevice error:&error];
if (!_videoDeviceInput) {
if(self.onError) {
self.onError(self, error);
}
return;
}
if([self.session canAddInput:_videoDeviceInput]) {
[self.session addInput:_videoDeviceInput];
// self.captureVideoPreviewLayer.connection.videoOrientation = [self orientationForConnection];
}
// add audio if video is enabled
if(self.videoEnabled) {
_audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
_audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:_audioCaptureDevice error:&error];
if (!_audioDeviceInput) {
if(self.onError) {
self.onError(self, error);
}
}
if([self.session canAddInput:_audioDeviceInput]) {
[self.session addInput:_audioDeviceInput];
}
// Setup the video output
_videoOutput = [[AVCaptureVideoDataOutput alloc] init];
_videoOutput.alwaysDiscardsLateVideoFrames = NO;
_videoOutput.videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
nil];
//[NSDictionary dictionaryWithObject:
//[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
// Setup the audio input
_audioOutput = [[AVCaptureAudioDataOutput alloc] init];
// Create the session
[_session addOutput:_videoOutput];
[_session addOutput:_audioOutput];
// Setup the queue
dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
[_videoOutput setSampleBufferDelegate:self queue:queue];
[_audioOutput setSampleBufferDelegate:self queue:queue];
}
// continiously adjust white balance
self.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance;
// image output
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:outputSettings];
[self.session addOutput:self.stillImageOutput];
}
//if we had disabled the connection on capture, re-enable it
if (![self.captureVideoPreviewLayer.connection isEnabled]) {
[self.captureVideoPreviewLayer.connection setEnabled:YES];
}
// [_assetWriter startWriting];
//[_assetWriter startSessionAtSourceTime:kCMTimeZero];
[self.session startRunning];
}
- (void)stop
{
[self.session stopRunning];
}
-(BOOL) setupWriter:(NSURL*)url {
NSError *error = nil;
_videoWriter = [[AVAssetWriter alloc] initWithURL:url fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(_videoWriter);
// Add video input
NSDictionary *videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:128.0*1024.0], AVVideoAverageBitRateKey,
nil ];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:192], AVVideoWidthKey,
[NSNumber numberWithInt:144], AVVideoHeightKey,
videoCompressionProps, AVVideoCompressionPropertiesKey,
nil];
_videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
NSParameterAssert(_videoWriterInput);
_videoWriterInput.expectsMediaDataInRealTime = YES;
// Add the audio input
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary* audioOutputSettings = nil;
// Both type of audio inputs causes output video file to be corrupted.
if( NO ) {
// should work from iphone 3GS on and from ipod 3rd generation
audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
nil];
} else {
// should work on any device requires more space
audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
[ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
nil ];
}
_audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType: AVMediaTypeAudio
outputSettings: audioOutputSettings ];
_audioWriterInput.expectsMediaDataInRealTime = YES;
// add input
[_videoWriter addInput:_videoWriterInput];
[_videoWriter addInput:_audioWriterInput];
return YES;
}
-(void) startVideoRecording
{
if( !self.recording )
{
NSURL* url = [[NSURL alloc] initFileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[#"movie" stringByAppendingPathExtension:#"mov"]]];
//if(!debug){
[[NSFileManager defaultManager] removeItemAtURL:url error:nil];
//}
NSLog(#"start video recording...");
if( ![self setupWriter:url] ) {
NSLog(#"Setup Writer Failed") ;
return;
}
// [_session startRunning] ;
self.recording = YES;
}
}
-(void) stopVideoRecording:(void (^)(LLSimpleCamera *camera, NSURL *outputFileUrl, NSError *error))completionBlock
{
NSLog(#"STOP RECORDING");
if(!self.videoEnabled) {
return;
}
if( self.recording )
{
self.recording = NO;
self.didRecord = completionBlock;
[_session stopRunning] ;
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^
{
if(![_videoWriter finishWriting]) {
NSLog(#"finishWriting returned NO") ;
}
});
//[_videoWriter endSessionAtSourceTime:lastSampleTime];
//[_videoWriterInput markAsFinished];
//[_audioWriterInput markAsFinished];
NSLog(#"video recording stopped");
}
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
NSLog(#"CALLING CAPTUREOUTPUT");
self.recording = NO;
[self enableTorch:NO];
if( !CMSampleBufferDataIsReady(sampleBuffer) )
{
NSLog( #"sample buffer is not ready. Skipping sample" );
return;
}
/*if(self.didRecord) {
NSLog(#"DID RECORD EXISTS !!!");
self.didRecord(self, outputFileURL, error);
}*/
//THE ABOVE CODE WOULD GET THE outputFileURL if fromt he captureOutput method delegate if I used AVCaptureMovieFileOutput
if( self.recording == YES )
{
_lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if( _videoWriter.status != AVAssetWriterStatusWriting )
{
[_videoWriter startWriting];
[_videoWriter startSessionAtSourceTime:_lastSampleTime];
}
if( captureOutput == _videoOutput )
[self newVideoSample:sampleBuffer];
else if( captureOutput == _audioOutput) {
[self newAudioSample:sampleBuffer];
}
/*
// If I add audio to the video, then the output file gets corrupted and it cannot be reproduced
else
[self newAudioSample:sampleBuffer];
*/
}
}
-(void) newVideoSample:(CMSampleBufferRef)sampleBuffer
{
if( self.recording )
{
if( _videoWriter.status > AVAssetWriterStatusWriting )
{
NSLog(#"Warning: writer status is %ld", _videoWriter.status);
if( _videoWriter.status == AVAssetWriterStatusFailed )
NSLog(#"Error: %#", _videoWriter.error);
return;
}
if( ![_videoWriterInput appendSampleBuffer:sampleBuffer] )
NSLog(#"Unable to write to video input");
}
}
-(void) newAudioSample:(CMSampleBufferRef)sampleBuffer
{
if( self.recording )
{
if( _videoWriter.status > AVAssetWriterStatusWriting )
{
NSLog(#"Warning: writer status is %ld", _videoWriter.status);
if( _videoWriter.status == AVAssetWriterStatusFailed )
NSLog(#"Error: %#", _videoWriter.error);
return;
}
if( ![_audioWriterInput appendSampleBuffer:sampleBuffer] )
NSLog(#"Unable to write to audio input");
}
}
PS1: Here are references to similar questions
Change camera capture device while recording a video
Simultaneous AVCaptureVideoDataOutput and AVCaptureMovieFileOutput
PS2: Sorry for the bad indentation on the code above. The code was perfectly indented but somehow when I post huge clanks of code here, it loses indentation.
I'm using AVFoundation. I wanna to record video using both (front and Back side) camera. I record video on one side when i change the camera mode back to front, the camera still freeze. Is it possible to record video continuously on both side.
Sample Code:
- (void) startup
{
if (_session == nil)
{
NSLog(#"Starting up server");
self.isCapturing = NO;
self.isPaused = NO;
_currentFile = 0;
_discont = NO;
// create capture device with video input
_session = [[AVCaptureSession alloc] init];
AVCaptureDevice *backCamera = [self frontCamera];
AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:nil];
[_session addInput:input];
// audio input from default mic
AVCaptureDevice* mic = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput* micinput = [AVCaptureDeviceInput deviceInputWithDevice:mic error:nil];
[_session addInput:micinput];
// create an output for YUV output with self as delegate
_captureQueue = dispatch_queue_create("com.softcraftsystems.comss", DISPATCH_QUEUE_SERIAL);
AVCaptureVideoDataOutput* videoout = [[AVCaptureVideoDataOutput alloc] init];
[videoout setSampleBufferDelegate:self queue:_captureQueue];
NSDictionary* setcapSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
nil];
videoout.videoSettings = setcapSettings;
[_session addOutput:videoout];
_videoConnection = [videoout connectionWithMediaType:AVMediaTypeVideo];
// find the actual dimensions used so we can set up the encoder to the same.
NSDictionary* actual = videoout.videoSettings;
_cy = [[actual objectForKey:#"Height"] integerValue];
_cx = [[actual objectForKey:#"Width"] integerValue];
AVCaptureAudioDataOutput* audioout = [[AVCaptureAudioDataOutput alloc] init];
[audioout setSampleBufferDelegate:self queue:_captureQueue];
[_session addOutput:audioout];
_audioConnection = [audioout connectionWithMediaType:AVMediaTypeAudio];
// for audio, we want the channels and sample rate, but we can't get those from audioout.audiosettings on ios, so
// we need to wait for the first sample
// start capture and a preview layer
[_session startRunning];
_preview = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
}
}
- (AVCaptureDevice *)frontCamera
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if ([device position] == AVCaptureDevicePositionFront) {
return device;
}
}
return nil;
}
- (AVCaptureDevice *)backCamera
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if ([device position] == AVCaptureDevicePositionBack) {
return device;
}
}
return nil;
}
- (void) startupFront
{
_session = nil;
[_session stopRunning];
if (_session == nil)
{
NSLog(#"Starting up server");
self.isCapturing = NO;
self.isPaused = NO;
_currentFile = 0;
_discont = NO;
// create capture device with video input
_session = [[AVCaptureSession alloc] init];
AVCaptureDevice *backCamera = [self backCamera];
AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:nil];
[_session addInput:input];
// audio input from default mic
AVCaptureDevice* mic = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput* micinput = [AVCaptureDeviceInput deviceInputWithDevice:mic error:nil];
[_session addInput:micinput];
// create an output for YUV output with self as delegate
_captureQueue = dispatch_queue_create("com.softcraftsystems.comss", DISPATCH_QUEUE_SERIAL);
AVCaptureVideoDataOutput* videoout = [[AVCaptureVideoDataOutput alloc] init];
[videoout setSampleBufferDelegate:self queue:_captureQueue];
NSDictionary* setcapSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
nil];
videoout.videoSettings = setcapSettings;
[_session addOutput:videoout];
_videoConnection = [videoout connectionWithMediaType:AVMediaTypeVideo];
// find the actual dimensions used so we can set up the encoder to the same.
NSDictionary* actual = videoout.videoSettings;
_cy = [[actual objectForKey:#"Height"] integerValue];
_cx = [[actual objectForKey:#"Width"] integerValue];
AVCaptureAudioDataOutput* audioout = [[AVCaptureAudioDataOutput alloc] init];
[audioout setSampleBufferDelegate:self queue:_captureQueue];
[_session addOutput:audioout];
_audioConnection = [audioout connectionWithMediaType:AVMediaTypeAudio];
// for audio, we want the channels and sample rate, but we can't get those from audioout.audiosettings on ios, so
// we need to wait for the first sample
// start capture and a preview layer
[_session startRunning];
_preview = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
}
}
- (void) startCapture
{
#synchronized(self)
{
if (!self.isCapturing)
{
NSLog(#"starting capture");
// create the encoder once we have the audio params
_encoder = nil;
self.isPaused = NO;
_discont = NO;
_timeOffset = CMTimeMake(0, 0);
self.isCapturing = YES;
}
}
}
- (void) stopCapture
{
#synchronized(self)
{
if (self.isCapturing)
{
NSString* filename = [NSString stringWithFormat:#"capture%d.mp4", _currentFile];
NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];
NSURL* url = [NSURL fileURLWithPath:path];
_currentFile++;
// serialize with audio and video capture
self.isCapturing = NO;
dispatch_async(_captureQueue, ^{
[_encoder finishWithCompletionHandler:^{
self.isCapturing = NO;
_encoder = nil;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:url completionBlock:^(NSURL *assetURL, NSError *error){
NSLog(#"save completed");
[[NSFileManager defaultManager] removeItemAtPath:path error:nil];
}];
}];
});
}
}
}
- (void) pauseCapture
{
#synchronized(self)
{
if (self.isCapturing)
{
NSLog(#"Pausing capture");
self.isPaused = YES;
_discont = YES;
}
}
}
- (void) resumeCapture
{
#synchronized(self)
{
if (self.isPaused)
{
NSLog(#"Resuming capture");
self.isPaused = NO;
}
}
}
- (CMSampleBufferRef) adjustTime:(CMSampleBufferRef) sample by:(CMTime) offset
{
CMItemCount count;
CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count);
for (CMItemCount i = 0; i < count; i++)
{
pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset);
pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset);
}
CMSampleBufferRef sout;
CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout);
free(pInfo);
return sout;
}
- (void) setAudioFormat:(CMFormatDescriptionRef) fmt
{
const AudioStreamBasicDescription *asbd = CMAudioFormatDescriptionGetStreamBasicDescription(fmt);
_samplerate = asbd->mSampleRate;
_channels = asbd->mChannelsPerFrame;
}
- (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
BOOL bVideo = YES;
#synchronized(self)
{
if (!self.isCapturing || self.isPaused)
{
return;
}
if (connection != _videoConnection)
{
bVideo = NO;
}
if ((_encoder == nil) && !bVideo)
{
CMFormatDescriptionRef fmt = CMSampleBufferGetFormatDescription(sampleBuffer);
[self setAudioFormat:fmt];
NSString* filename = [NSString stringWithFormat:#"capture%d.mp4", _currentFile];
NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];
_encoder = [VideoEncoder encoderForPath:path Height:_cy width:_cx channels:_channels samples:_samplerate];
}
if (_discont)
{
if (bVideo)
{
return;
}
_discont = NO;
// calc adjustment
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CMTime last = bVideo ? _lastVideo : _lastAudio;
if (last.flags & kCMTimeFlags_Valid)
{
if (_timeOffset.flags & kCMTimeFlags_Valid)
{
pts = CMTimeSubtract(pts, _timeOffset);
}
CMTime offset = CMTimeSubtract(pts, last);
NSLog(#"Setting offset from %s", bVideo?"video": "audio");
NSLog(#"Adding %f to %f (pts %f)", ((double)offset.value)/offset.timescale, ((double)_timeOffset.value)/_timeOffset.timescale, ((double)pts.value/pts.timescale));
// this stops us having to set a scale for _timeOffset before we see the first video time
if (_timeOffset.value == 0)
{
_timeOffset = offset;
}
else
{
_timeOffset = CMTimeAdd(_timeOffset, offset);
}
}
_lastVideo.flags = 0;
_lastAudio.flags = 0;
}
// retain so that we can release either this or modified one
CFRetain(sampleBuffer);
if (_timeOffset.value > 0)
{
CFRelease(sampleBuffer);
sampleBuffer = [self adjustTime:sampleBuffer by:_timeOffset];
}
// record most recent time so we know the length of the pause
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CMTime dur = CMSampleBufferGetDuration(sampleBuffer);
if (dur.value > 0)
{
pts = CMTimeAdd(pts, dur);
}
if (bVideo)
{
_lastVideo = pts;
}
else
{
_lastAudio = pts;
}
}
// pass frame to encoder
[_encoder encodeFrame:sampleBuffer isVideo:bVideo];
CFRelease(sampleBuffer);
}
- (void) shutdown
{
NSLog(#"shutting down server");
if (_session)
{
[_session stopRunning];
_session = nil;
}
[_encoder finishWithCompletionHandler:^{
NSLog(#"Capture completed");
}];
}
According to me. it is not possible, continue recording when we switch the camera,
because, there resolution and quality difference between them, a video can have only one resolution and quality throughout the video.
and secondly, every time you switch between camera it'll alloc and initialize the camera.
unfortunately its not possible according to me.
but if you find solution, do tell me please.