My camera was taking pictures and recording videos perfectly (using AVCaptureMovieFileOutput) and I was able to toggle camera position to front/rear normally. However, as in Instagram, Snapchat, and myriad of other apps, I wanted to also allow the user to toggle camera position while recording the video.
It seems that to be able to achieve such a thing, I need to work with AVCaptureVideoDataOutput instead because it can handle the different frames, but I don't really get it to work. Everything goes fine but after I finish the video, it just don't play and there seems to be no resultant URL from the captureOutput method. Here is my code:
- (void)initialize{
if(!_session) {
_session = [[AVCaptureSession alloc] init];
_session.sessionPreset = self.cameraQuality;
// preview layer
CGRect bounds = self.preview.layer.bounds;
_captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
_captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
_captureVideoPreviewLayer.bounds = bounds;
_captureVideoPreviewLayer.position = CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds));
[self.preview.layer addSublayer:_captureVideoPreviewLayer];
AVCaptureDevicePosition devicePosition;
switch (self.position) {
case LLCameraPositionRear:
if([self.class isRearCameraAvailable]) {
devicePosition = AVCaptureDevicePositionBack;
} else {
devicePosition = AVCaptureDevicePositionFront;
_position = LLCameraPositionFront;
}
break;
case LLCameraPositionFront:
if([self.class isFrontCameraAvailable]) {
devicePosition = AVCaptureDevicePositionFront;
} else {
devicePosition = AVCaptureDevicePositionBack;
_position = LLCameraPositionRear;
}
break;
default:
devicePosition = AVCaptureDevicePositionUnspecified;
break;
}
if(devicePosition == AVCaptureDevicePositionUnspecified) {
_videoCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
} else {
_videoCaptureDevice = [self cameraWithPosition:devicePosition];
}
NSError *error = nil;
_videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:_videoCaptureDevice error:&error];
if (!_videoDeviceInput) {
if(self.onError) {
self.onError(self, error);
}
return;
}
if([self.session canAddInput:_videoDeviceInput]) {
[self.session addInput:_videoDeviceInput];
// self.captureVideoPreviewLayer.connection.videoOrientation = [self orientationForConnection];
}
// add audio if video is enabled
if(self.videoEnabled) {
_audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
_audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:_audioCaptureDevice error:&error];
if (!_audioDeviceInput) {
if(self.onError) {
self.onError(self, error);
}
}
if([self.session canAddInput:_audioDeviceInput]) {
[self.session addInput:_audioDeviceInput];
}
// Setup the video output
_videoOutput = [[AVCaptureVideoDataOutput alloc] init];
_videoOutput.alwaysDiscardsLateVideoFrames = NO;
_videoOutput.videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
nil];
//[NSDictionary dictionaryWithObject:
//[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
// Setup the audio input
_audioOutput = [[AVCaptureAudioDataOutput alloc] init];
// Create the session
[_session addOutput:_videoOutput];
[_session addOutput:_audioOutput];
// Setup the queue
dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
[_videoOutput setSampleBufferDelegate:self queue:queue];
[_audioOutput setSampleBufferDelegate:self queue:queue];
}
// continiously adjust white balance
self.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance;
// image output
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:outputSettings];
[self.session addOutput:self.stillImageOutput];
}
//if we had disabled the connection on capture, re-enable it
if (![self.captureVideoPreviewLayer.connection isEnabled]) {
[self.captureVideoPreviewLayer.connection setEnabled:YES];
}
// [_assetWriter startWriting];
//[_assetWriter startSessionAtSourceTime:kCMTimeZero];
[self.session startRunning];
}
- (void)stop
{
[self.session stopRunning];
}
-(BOOL) setupWriter:(NSURL*)url {
NSError *error = nil;
_videoWriter = [[AVAssetWriter alloc] initWithURL:url fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(_videoWriter);
// Add video input
NSDictionary *videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:128.0*1024.0], AVVideoAverageBitRateKey,
nil ];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:192], AVVideoWidthKey,
[NSNumber numberWithInt:144], AVVideoHeightKey,
videoCompressionProps, AVVideoCompressionPropertiesKey,
nil];
_videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
NSParameterAssert(_videoWriterInput);
_videoWriterInput.expectsMediaDataInRealTime = YES;
// Add the audio input
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary* audioOutputSettings = nil;
// Both type of audio inputs causes output video file to be corrupted.
if( NO ) {
// should work from iphone 3GS on and from ipod 3rd generation
audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
nil];
} else {
// should work on any device requires more space
audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
[ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
nil ];
}
_audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType: AVMediaTypeAudio
outputSettings: audioOutputSettings ];
_audioWriterInput.expectsMediaDataInRealTime = YES;
// add input
[_videoWriter addInput:_videoWriterInput];
[_videoWriter addInput:_audioWriterInput];
return YES;
}
-(void) startVideoRecording
{
if( !self.recording )
{
NSURL* url = [[NSURL alloc] initFileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[#"movie" stringByAppendingPathExtension:#"mov"]]];
//if(!debug){
[[NSFileManager defaultManager] removeItemAtURL:url error:nil];
//}
NSLog(#"start video recording...");
if( ![self setupWriter:url] ) {
NSLog(#"Setup Writer Failed") ;
return;
}
// [_session startRunning] ;
self.recording = YES;
}
}
-(void) stopVideoRecording:(void (^)(LLSimpleCamera *camera, NSURL *outputFileUrl, NSError *error))completionBlock
{
NSLog(#"STOP RECORDING");
if(!self.videoEnabled) {
return;
}
if( self.recording )
{
self.recording = NO;
self.didRecord = completionBlock;
[_session stopRunning] ;
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^
{
if(![_videoWriter finishWriting]) {
NSLog(#"finishWriting returned NO") ;
}
});
//[_videoWriter endSessionAtSourceTime:lastSampleTime];
//[_videoWriterInput markAsFinished];
//[_audioWriterInput markAsFinished];
NSLog(#"video recording stopped");
}
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
NSLog(#"CALLING CAPTUREOUTPUT");
self.recording = NO;
[self enableTorch:NO];
if( !CMSampleBufferDataIsReady(sampleBuffer) )
{
NSLog( #"sample buffer is not ready. Skipping sample" );
return;
}
/*if(self.didRecord) {
NSLog(#"DID RECORD EXISTS !!!");
self.didRecord(self, outputFileURL, error);
}*/
//THE ABOVE CODE WOULD GET THE outputFileURL if fromt he captureOutput method delegate if I used AVCaptureMovieFileOutput
if( self.recording == YES )
{
_lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if( _videoWriter.status != AVAssetWriterStatusWriting )
{
[_videoWriter startWriting];
[_videoWriter startSessionAtSourceTime:_lastSampleTime];
}
if( captureOutput == _videoOutput )
[self newVideoSample:sampleBuffer];
else if( captureOutput == _audioOutput) {
[self newAudioSample:sampleBuffer];
}
/*
// If I add audio to the video, then the output file gets corrupted and it cannot be reproduced
else
[self newAudioSample:sampleBuffer];
*/
}
}
-(void) newVideoSample:(CMSampleBufferRef)sampleBuffer
{
if( self.recording )
{
if( _videoWriter.status > AVAssetWriterStatusWriting )
{
NSLog(#"Warning: writer status is %ld", _videoWriter.status);
if( _videoWriter.status == AVAssetWriterStatusFailed )
NSLog(#"Error: %#", _videoWriter.error);
return;
}
if( ![_videoWriterInput appendSampleBuffer:sampleBuffer] )
NSLog(#"Unable to write to video input");
}
}
-(void) newAudioSample:(CMSampleBufferRef)sampleBuffer
{
if( self.recording )
{
if( _videoWriter.status > AVAssetWriterStatusWriting )
{
NSLog(#"Warning: writer status is %ld", _videoWriter.status);
if( _videoWriter.status == AVAssetWriterStatusFailed )
NSLog(#"Error: %#", _videoWriter.error);
return;
}
if( ![_audioWriterInput appendSampleBuffer:sampleBuffer] )
NSLog(#"Unable to write to audio input");
}
}
PS1: Here are references to similar questions
Change camera capture device while recording a video
Simultaneous AVCaptureVideoDataOutput and AVCaptureMovieFileOutput
PS2: Sorry for the bad indentation on the code above. The code was perfectly indented but somehow when I post huge clanks of code here, it loses indentation.
Related
I am trying to record audio in a file for 14 days. I am getting audio data in PCM format through screen mirroring in which audio is received and from this I am converting it to CMSampleBuffer using this code
-(void)createAudioSampleBufferFromData:(uint8_t*)buf withSize:(uint32_t)buflen timeStamp:(double)timeStamp{
OSStatus status;
AudioStreamBasicDescription monoStreamFormat;
memset(&monoStreamFormat, 0, sizeof(monoStreamFormat));
monoStreamFormat.mFormatID = kAudioFormatLinearPCM;
monoStreamFormat.mSampleRate = 44100;
monoStreamFormat.mChannelsPerFrame = 2;
monoStreamFormat.mBitsPerChannel = 16;
monoStreamFormat.mFramesPerPacket = 1; // uncompressed audio
monoStreamFormat.mBytesPerFrame =monoStreamFormat.mChannelsPerFrame * monoStreamFormat.mBitsPerChannel/8;
monoStreamFormat.mBytesPerPacket = monoStreamFormat.mBytesPerFrame * monoStreamFormat.mFramesPerPacket;
monoStreamFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
CMFormatDescriptionRef format = NULL;
status = CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &monoStreamFormat, 0, NULL, 0, NULL, NULL, &format);
if (status != noErr) {
// really shouldn't happen
return;
}
long blockLength = buflen;
CMSampleBufferRef sampleBuffer = NULL;
CMBlockBufferRef blockBuffer = NULL;
status = CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, buf, // memoryBlock to hold buffered data
blockLength, // block length of the mem block in bytes.
kCFAllocatorDefault, NULL,
0, // offsetToData
blockLength, // dataLength of relevant bytes, starting at offsetToData
0, &blockBuffer);
if(status == noErr)
{
CMSampleTimingInfo timings = {CMTimeMake(1, 44100.0), CMClockGetTime(CMClockGetHostTimeClock()), kCMTimeInvalid };
const size_t sampleSize = blockLength;
status = CMSampleBufferCreate(kCFAllocatorDefault,
NULL, true, NULL, NULL,
format, 1, 0, &timings, 1,
&sampleSize, &sampleBuffer);
CMItemCount itemCount = CMSampleBufferGetNumSamples(sampleBuffer);
[self notifyAudioBufferReceived:sampleBuffer];
// if(blockBuffer)
// CFRelease(blockBuffer);
}
}
And in following method this CMSampleBuffers are receiving:
- (void)airPlayServer:(id)server audioBufferReceived:(CMSampleBufferRef)buffer {
if(recordingStarted){
[decoderAndRecorder.recorder newAudioSample:buffer];
}
// CFRelease(buffer);
}
from here these buffers go to AVAssetWriter class where I am appending these buffer to record audio into file. AVAssetWriter class is following:
- (instancetype)init
{
self = [super init];
if (self) {
// Create asset writer
dateString = [self dateInString];
NSString *documents = [NSSearchPathForDirectoriesInDomains (NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex: 0];
filePath = [documents stringByAppendingPathComponent:[NSString stringWithFormat:#"screen recording - %#.caf",dateString]];
if ([[NSFileManager defaultManager] fileExistsAtPath:filePath]) {
[[NSFileManager defaultManager] removeItemAtPath:filePath
error:nil];
}
NSLog(#"%#", filePath);
NSError *error = nil;
_assetWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:filePath]
fileType:AVFileTypeCoreAudioFormat
error:&error];
AudioChannelLayout acl;
memset(&acl,0, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary* audioOutputSettings = nil;
audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatLinearPCM ], AVFormatIDKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
[ NSNumber numberWithInt: 16 ], AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
[NSNumber numberWithBool:NO], AVLinearPCMIsFloatKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
nil ];
_audioWriterInput = [[AVAssetWriterInput alloc]
initWithMediaType: AVMediaTypeAudio
outputSettings: audioOutputSettings ];
_audioWriterInput.expectsMediaDataInRealTime = YES;
NSParameterAssert(_audioWriterInput);
NSParameterAssert([_assetWriter canAddInput:_audioWriterInput]);
[_assetWriter addInput:_audioWriterInput];
}
}
return self;
}
-(void)startSession{
//This is vedio recorder code
_isStarted =[_assetWriter startWriting];
if(_isStarted)
{
NSLog(#"======== Session started");
[_assetWriter startSessionAtSourceTime:_startingTime];
}
else {
NSLog(#"======== Session failed : %#", _assetWriter.error);
}
}
-(void)endSession
{
_isStarted = NO;
[_audioWriterInput markAsFinished];
[_assetWriter finishWritingWithCompletionHandler:^{
NSLog(#"====== File Save Successfully =======");
}];
}
- (void)newAudioSample:(CMSampleBufferRef)sampleBuffer
{
if (_isStarted) {
if (_videoWriter.status == AVAssetWriterStatusWriting) {
// if(_audioWriterInput.readyForMoreMediaData){
//NSLog(#"Warning: writer status is %d", _videoWriter.status)
if(sampleBuffer){
if (![_audioWriterInput appendSampleBuffer:sampleBuffer]) {
NSLog(#"Unable to write to audio input");
}else{
NSLog(#"====Audio is being recorded===");
}
}
// }
}else if(_videoWriter.status == AVAssetWriterStatusFailed){
NSLog(#"<----- Appending failed ----->");
NSLog(#"Error: %#", _videoWriter.error);
}
}
}
That's all. File is created but with zero bytes. No data is written in it. Please help with with this problem.
I'm working on a personal iOS project that requires full screen videos (15 seconds in length) to be uploaded to a backend over a 4G connection. While I can take videos just fine, the output size of the file comes out to 30MB which makes me think I'm doing something drastically wrong when it comes to compression. Below is the code I'm using to se up the AssetWriter:
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections
{
NSLog(#"Started Recording! *******************");
self.movieWriter = [AVAssetWriter assetWriterWithURL:fileURL fileType:AVFileTypeMPEG4 error:nil];
[self.movieWriter setShouldOptimizeForNetworkUse:YES];
NSDictionary *videoCleanApertureSettings = #{
AVVideoCleanApertureWidthKey: [NSNumber numberWithFloat:self.view.frame.size.width],
AVVideoCleanApertureHeightKey: [NSNumber numberWithFloat:self.view.frame.size.height],
AVVideoCleanApertureHorizontalOffsetKey: [NSNumber numberWithInt:10],
AVVideoCleanApertureVerticalOffsetKey: [NSNumber numberWithInt:10],
};
NSDictionary *videoCompressionSettings = #{
AVVideoAverageBitRateKey: [NSNumber numberWithFloat:5000000.0],
AVVideoMaxKeyFrameIntervalKey: [NSNumber numberWithInteger:1],
AVVideoProfileLevelKey: AVVideoProfileLevelH264Baseline30,
AVVideoCleanApertureKey: videoCleanApertureSettings,
};
NSDictionary *videoSettings = #{AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: [NSNumber numberWithFloat:self.view.frame.size.width],
AVVideoHeightKey: [NSNumber numberWithFloat:self.view.frame.size.height],
AVVideoCompressionPropertiesKey: videoCompressionSettings,
};
self.movieWriterVideoInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
self.movieWriterVideoInput.expectsMediaDataInRealTime = YES;
[self.movieWriter addInput:self.movieWriterVideoInput];
NSDictionary *audioSettings = #{AVFormatIDKey: [NSNumber numberWithInteger:kAudioFormatMPEG4AAC],
AVSampleRateKey: [NSNumber numberWithFloat:44100.0],
AVNumberOfChannelsKey: [NSNumber numberWithInteger:1],
};
self.movieWriterAudioInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
self.movieWriterAudioInput.expectsMediaDataInRealTime = YES;
[self.movieWriter addInput:self.movieWriterAudioInput];
[self.movieWriter startWriting];
}
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(#"Done Recording!");
[self.movieWriterVideoInput markAsFinished];
[self.movieWriterAudioInput markAsFinished];
[self.movieWriter finishWritingWithCompletionHandler:^{
AVURLAsset *compressedVideoAsset = [[AVURLAsset alloc] initWithURL:self.movieWriter.outputURL options:nil];
//Upload video to server
}];
}
For the setup of the actual session I'm using the following code:
//Indicate that some changes will be made to the session
[self.captureSession beginConfiguration];
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
AVCaptureInput* currentCameraInput = [self.captureSession.inputs objectAtIndex:0];
for (AVCaptureInput *captureInput in self.captureSession.inputs) {
[self.captureSession removeInput:captureInput];
}
//Get currently selected camera and use for input
AVCaptureDevice *videoCamera = nil;
if(((AVCaptureDeviceInput*)currentCameraInput).device.position == AVCaptureDevicePositionBack)
{
videoCamera = [self cameraWithPosition:AVCaptureDevicePositionBack];
}
else
{
videoCamera = [self cameraWithPosition:AVCaptureDevicePositionFront];
}
//Add input to session
AVCaptureDeviceInput *newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:videoCamera error:nil];
[self.captureSession addInput:newVideoInput];
//Add mic input to the session
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:nil];
[self.captureSession addInput:audioInput];
//Add movie output to session
for (AVCaptureOutput *output in self.captureSession.outputs) {
[self.captureSession removeOutput:output];
}
self.movieOutput = [AVCaptureMovieFileOutput new];
int32_t preferredTimeScale = 30; //Frames per second
self.movieOutput.maxRecordedDuration = CMTimeMakeWithSeconds(15, preferredTimeScale); //Setting the max video length
[self.captureSession addOutput:self.movieOutput];
//Commit all the configuration changes at once
[self.captureSession commitConfiguration];
I know that if I change AVCaptureSessionPresetHigh to a different preset I can reduce the file size of the final video, but unfortunately is looks like AVCaptureSessionPresetiFrame1280x720 is the only one that provides the full frame I'm trying to capture (which leaves me with an output size of about 20MB and is still too large for 4G uploads).
I've spent a lot of time googling and searching through other posts on Stack Overflow, but I can't seem to figure out what I'm doing wrong for the life of me and any help at all would be greatly appreciated.
You need a PhD to work with AVAssetWriter - it's non-trivial: https://developer.apple.com/library/mac/documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/05_Export.html#//apple_ref/doc/uid/TP40010188-CH9-SW1
There's an amazing library for doing exactly what you want which is just an AVAssetExportSession drop-in replacement with more crucial features like changing the bit rate: https://github.com/rs/SDAVAssetExportSession
Here's how to use it:
-(void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info
{
SDAVAssetExportSession *encoder = [SDAVAssetExportSession.alloc initWithAsset:[AVAsset assetWithURL:[info objectForKey:UIImagePickerControllerMediaURL]]];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
self.myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:#"lowerBitRate-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:self.myPathDocs];
encoder.outputURL=url;
encoder.outputFileType = AVFileTypeMPEG4;
encoder.shouldOptimizeForNetworkUse = YES;
encoder.videoSettings = #
{
AVVideoCodecKey: AVVideoCodecH264,
AVVideoCompressionPropertiesKey: #
{
AVVideoAverageBitRateKey: #2300000, // Lower bit rate here
AVVideoProfileLevelKey: AVVideoProfileLevelH264High40,
},
};
encoder.audioSettings = #
{
AVFormatIDKey: #(kAudioFormatMPEG4AAC),
AVNumberOfChannelsKey: #2,
AVSampleRateKey: #44100,
AVEncoderBitRateKey: #128000,
};
[encoder exportAsynchronouslyWithCompletionHandler:^
{
int status = encoder.status;
if (status == AVAssetExportSessionStatusCompleted)
{
AVAssetTrack *videoTrack = nil;
AVURLAsset *asset = [AVAsset assetWithURL:encoder.outputURL];
NSArray *videoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
videoTrack = [videoTracks objectAtIndex:0];
float frameRate = [videoTrack nominalFrameRate];
float bps = [videoTrack estimatedDataRate];
NSLog(#"Frame rate == %f",frameRate);
NSLog(#"bps rate == %f",bps/(1024.0 * 1024.0));
NSLog(#"Video export succeeded");
// encoder.outputURL <- this is what you want!!
}
else if (status == AVAssetExportSessionStatusCancelled)
{
NSLog(#"Video export cancelled");
}
else
{
NSLog(#"Video export failed with error: %# (%d)", encoder.error.localizedDescription, encoder.error.code);
}
}];
}
I set the shutter speed, ISO and other camera parameters and capture an image using the code below. If I set the ISO to something like 119, it gets reported as 125 in the Exif info (it is always rounded to a standard value). How can I tell what the real ISO was? The maxISO gets reported as 734 but if I set it to 734 it is 800 in the Exif information. I had the same problem with the exposure time. If I set it to 800ms it shows up as 1s in the but the value calculated from the ShutterSpeedValue is correct.
- (void)setCameraSettings:(long)expTime1000thSec iso:(int)isoValue
{
if ( currentCaptureDevice ) {
[captureSession beginConfiguration];
NSError *error = nil;
if ([currentCaptureDevice lockForConfiguration:&error]) {
if ([currentCaptureDevice isExposureModeSupported:AVCaptureExposureModeLocked]) {
CMTime minTime, maxTime, exposureTime;
if ( isoValue < minISO ) {
isoValue = minISO;
} else if ( isoValue > maxISO ) {
isoValue = maxISO;
}
exposureTime = CMTimeMake(expTime1000thSec, EXP_TIME_UNIT); // in 1/EXP_TIME_UNIT of a second
minTime = currentCaptureDevice.activeFormat.minExposureDuration;
maxTime = currentCaptureDevice.activeFormat.maxExposureDuration;
if ( CMTimeCompare(exposureTime, minTime) < 0 ) {
exposureTime = minTime;
} else if ( CMTimeCompare(exposureTime, maxTime) > 0 ) {
exposureTime = maxTime;
}
NSLog(#"setting exp time to %lld/%d s (want %ld) iso=%d", exposureTime.value, exposureTime.timescale, expTime1000thSec, isoValue);
[currentCaptureDevice setExposureModeCustomWithDuration:exposureTime ISO:isoValue completionHandler:nil];
}
if (currentCaptureDevice.lowLightBoostSupported) {
currentCaptureDevice.automaticallyEnablesLowLightBoostWhenAvailable = NO;
NSLog(#"setting automaticallyEnablesLowLightBoostWhenAvailable = NO");
}
// lock the gains
if ([currentCaptureDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeLocked]) {
currentCaptureDevice.whiteBalanceMode = AVCaptureWhiteBalanceModeLocked;
NSLog(#"setting AVCaptureWhiteBalanceModeLocked");
}
// set the gains
AVCaptureWhiteBalanceGains gains;
gains.redGain = 1.0;
gains.greenGain = 1.0;
gains.blueGain = 1.0;
AVCaptureWhiteBalanceGains normalizedGains = [self normalizedGains:gains];
[currentCaptureDevice setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:normalizedGains completionHandler:nil];
NSLog(#"setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains g.red=%.2lf g.green=%.2lf g.blue=%.2lf",
normalizedGains.redGain, normalizedGains.greenGain, normalizedGains.blueGain);
[currentCaptureDevice unlockForConfiguration];
}
[captureSession commitConfiguration];
}
}
- (void)captureStillImage
{
NSLog(#"about to request a capture from: %#", [self stillImageOutput]);
if ( videoConnection ) {
waitingForCapture = true;
NSLog(#"requesting a capture from: %#", [self stillImageOutput]);
[[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:videoConnection
completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
if ( imageSampleBuffer ) {
CFDictionaryRef exifAttachments = CMGetAttachment(imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments) {
NSLog(#"attachements: %#", exifAttachments);
} else {
NSLog(#"no attachments");
}
NSLog(#"name: %#", [currentCaptureDevice localizedName]);
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
[self setStillImage:image];
NSDictionary *dict = (__bridge NSDictionary*)exifAttachments;
NSString *value = [dict objectForKey:#"PixelXDimension"];
[self setImageWidth:[NSNumber numberWithInt:[value intValue]]];
value = [dict objectForKey:#"PixelYDimension"];
[self setImageHeight:[NSNumber numberWithInt:[value intValue]]];
value = [dict objectForKey:#"BrightnessValue"];
[self setImageBrightnessValue:[NSNumber numberWithFloat:[value floatValue]]];
value = [dict objectForKey:#"ShutterSpeedValue"];
double val = [value doubleValue];
val = 1.0 / pow(2.0, val);
[self setImageExposureTime:[NSNumber numberWithDouble:val]];
value = [dict objectForKey:#"ApertureValue"];
[self setImageApertureValue:[NSNumber numberWithFloat:[value floatValue]]];
NSArray *values = [dict objectForKey:#"ISOSpeedRatings"];
[self setImageISOSpeedRatings:[NSNumber numberWithInt:[ [values objectAtIndex:0] intValue]]];
NSLog(#"r/g/b gains = %.2lf/%.2lf/%.2lf",
currentCaptureDevice.deviceWhiteBalanceGains.redGain,
currentCaptureDevice.deviceWhiteBalanceGains.greenGain,
currentCaptureDevice.deviceWhiteBalanceGains.blueGain);
[[NSNotificationCenter defaultCenter] postNotificationName:kImageCapturedSuccessfully object:nil];
} else {
NSLog(#"imageSampleBuffer = NULL");
}
waitingForCapture = false;
}];
} else {
NSLog(#"can't capture from: videoConnection");
}
}
I answered your other question, same thing, you are not setting the CustomSettings mode:
[device lockForConfiguration:nil];
if([device isExposureModeSupported:AVCaptureExposureModeCustom]){
[device setExposureMode:AVCaptureExposureModeCustom];
[device setExposureModeCustomWithDuration:exposureTime ISO:exposureISO completionHandler:^(CMTime syncTime) {}];
[device setExposureTargetBias:exposureBIAS completionHandler:^(CMTime syncTime) {}];
}
Regards.
I added:
[device setExposureMode:AVCaptureExposureModeCustom];
and changed my completionHandler from nil to:
completionHandler:^(CMTime syncTime) {}
and I still see the same results. It seems that ISO can only be set to the "standard" values and not to any value between minISO and maxISO.
I'm using AVFoundation. I wanna to record video using both (front and Back side) camera. I record video on one side when i change the camera mode back to front, the camera still freeze. Is it possible to record video continuously on both side.
Sample Code:
- (void) startup
{
if (_session == nil)
{
NSLog(#"Starting up server");
self.isCapturing = NO;
self.isPaused = NO;
_currentFile = 0;
_discont = NO;
// create capture device with video input
_session = [[AVCaptureSession alloc] init];
AVCaptureDevice *backCamera = [self frontCamera];
AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:nil];
[_session addInput:input];
// audio input from default mic
AVCaptureDevice* mic = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput* micinput = [AVCaptureDeviceInput deviceInputWithDevice:mic error:nil];
[_session addInput:micinput];
// create an output for YUV output with self as delegate
_captureQueue = dispatch_queue_create("com.softcraftsystems.comss", DISPATCH_QUEUE_SERIAL);
AVCaptureVideoDataOutput* videoout = [[AVCaptureVideoDataOutput alloc] init];
[videoout setSampleBufferDelegate:self queue:_captureQueue];
NSDictionary* setcapSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
nil];
videoout.videoSettings = setcapSettings;
[_session addOutput:videoout];
_videoConnection = [videoout connectionWithMediaType:AVMediaTypeVideo];
// find the actual dimensions used so we can set up the encoder to the same.
NSDictionary* actual = videoout.videoSettings;
_cy = [[actual objectForKey:#"Height"] integerValue];
_cx = [[actual objectForKey:#"Width"] integerValue];
AVCaptureAudioDataOutput* audioout = [[AVCaptureAudioDataOutput alloc] init];
[audioout setSampleBufferDelegate:self queue:_captureQueue];
[_session addOutput:audioout];
_audioConnection = [audioout connectionWithMediaType:AVMediaTypeAudio];
// for audio, we want the channels and sample rate, but we can't get those from audioout.audiosettings on ios, so
// we need to wait for the first sample
// start capture and a preview layer
[_session startRunning];
_preview = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
}
}
- (AVCaptureDevice *)frontCamera
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if ([device position] == AVCaptureDevicePositionFront) {
return device;
}
}
return nil;
}
- (AVCaptureDevice *)backCamera
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if ([device position] == AVCaptureDevicePositionBack) {
return device;
}
}
return nil;
}
- (void) startupFront
{
_session = nil;
[_session stopRunning];
if (_session == nil)
{
NSLog(#"Starting up server");
self.isCapturing = NO;
self.isPaused = NO;
_currentFile = 0;
_discont = NO;
// create capture device with video input
_session = [[AVCaptureSession alloc] init];
AVCaptureDevice *backCamera = [self backCamera];
AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:nil];
[_session addInput:input];
// audio input from default mic
AVCaptureDevice* mic = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput* micinput = [AVCaptureDeviceInput deviceInputWithDevice:mic error:nil];
[_session addInput:micinput];
// create an output for YUV output with self as delegate
_captureQueue = dispatch_queue_create("com.softcraftsystems.comss", DISPATCH_QUEUE_SERIAL);
AVCaptureVideoDataOutput* videoout = [[AVCaptureVideoDataOutput alloc] init];
[videoout setSampleBufferDelegate:self queue:_captureQueue];
NSDictionary* setcapSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
nil];
videoout.videoSettings = setcapSettings;
[_session addOutput:videoout];
_videoConnection = [videoout connectionWithMediaType:AVMediaTypeVideo];
// find the actual dimensions used so we can set up the encoder to the same.
NSDictionary* actual = videoout.videoSettings;
_cy = [[actual objectForKey:#"Height"] integerValue];
_cx = [[actual objectForKey:#"Width"] integerValue];
AVCaptureAudioDataOutput* audioout = [[AVCaptureAudioDataOutput alloc] init];
[audioout setSampleBufferDelegate:self queue:_captureQueue];
[_session addOutput:audioout];
_audioConnection = [audioout connectionWithMediaType:AVMediaTypeAudio];
// for audio, we want the channels and sample rate, but we can't get those from audioout.audiosettings on ios, so
// we need to wait for the first sample
// start capture and a preview layer
[_session startRunning];
_preview = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
}
}
- (void) startCapture
{
#synchronized(self)
{
if (!self.isCapturing)
{
NSLog(#"starting capture");
// create the encoder once we have the audio params
_encoder = nil;
self.isPaused = NO;
_discont = NO;
_timeOffset = CMTimeMake(0, 0);
self.isCapturing = YES;
}
}
}
- (void) stopCapture
{
#synchronized(self)
{
if (self.isCapturing)
{
NSString* filename = [NSString stringWithFormat:#"capture%d.mp4", _currentFile];
NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];
NSURL* url = [NSURL fileURLWithPath:path];
_currentFile++;
// serialize with audio and video capture
self.isCapturing = NO;
dispatch_async(_captureQueue, ^{
[_encoder finishWithCompletionHandler:^{
self.isCapturing = NO;
_encoder = nil;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:url completionBlock:^(NSURL *assetURL, NSError *error){
NSLog(#"save completed");
[[NSFileManager defaultManager] removeItemAtPath:path error:nil];
}];
}];
});
}
}
}
- (void) pauseCapture
{
#synchronized(self)
{
if (self.isCapturing)
{
NSLog(#"Pausing capture");
self.isPaused = YES;
_discont = YES;
}
}
}
- (void) resumeCapture
{
#synchronized(self)
{
if (self.isPaused)
{
NSLog(#"Resuming capture");
self.isPaused = NO;
}
}
}
- (CMSampleBufferRef) adjustTime:(CMSampleBufferRef) sample by:(CMTime) offset
{
CMItemCount count;
CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count);
for (CMItemCount i = 0; i < count; i++)
{
pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset);
pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset);
}
CMSampleBufferRef sout;
CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout);
free(pInfo);
return sout;
}
- (void) setAudioFormat:(CMFormatDescriptionRef) fmt
{
const AudioStreamBasicDescription *asbd = CMAudioFormatDescriptionGetStreamBasicDescription(fmt);
_samplerate = asbd->mSampleRate;
_channels = asbd->mChannelsPerFrame;
}
- (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
BOOL bVideo = YES;
#synchronized(self)
{
if (!self.isCapturing || self.isPaused)
{
return;
}
if (connection != _videoConnection)
{
bVideo = NO;
}
if ((_encoder == nil) && !bVideo)
{
CMFormatDescriptionRef fmt = CMSampleBufferGetFormatDescription(sampleBuffer);
[self setAudioFormat:fmt];
NSString* filename = [NSString stringWithFormat:#"capture%d.mp4", _currentFile];
NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];
_encoder = [VideoEncoder encoderForPath:path Height:_cy width:_cx channels:_channels samples:_samplerate];
}
if (_discont)
{
if (bVideo)
{
return;
}
_discont = NO;
// calc adjustment
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CMTime last = bVideo ? _lastVideo : _lastAudio;
if (last.flags & kCMTimeFlags_Valid)
{
if (_timeOffset.flags & kCMTimeFlags_Valid)
{
pts = CMTimeSubtract(pts, _timeOffset);
}
CMTime offset = CMTimeSubtract(pts, last);
NSLog(#"Setting offset from %s", bVideo?"video": "audio");
NSLog(#"Adding %f to %f (pts %f)", ((double)offset.value)/offset.timescale, ((double)_timeOffset.value)/_timeOffset.timescale, ((double)pts.value/pts.timescale));
// this stops us having to set a scale for _timeOffset before we see the first video time
if (_timeOffset.value == 0)
{
_timeOffset = offset;
}
else
{
_timeOffset = CMTimeAdd(_timeOffset, offset);
}
}
_lastVideo.flags = 0;
_lastAudio.flags = 0;
}
// retain so that we can release either this or modified one
CFRetain(sampleBuffer);
if (_timeOffset.value > 0)
{
CFRelease(sampleBuffer);
sampleBuffer = [self adjustTime:sampleBuffer by:_timeOffset];
}
// record most recent time so we know the length of the pause
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CMTime dur = CMSampleBufferGetDuration(sampleBuffer);
if (dur.value > 0)
{
pts = CMTimeAdd(pts, dur);
}
if (bVideo)
{
_lastVideo = pts;
}
else
{
_lastAudio = pts;
}
}
// pass frame to encoder
[_encoder encodeFrame:sampleBuffer isVideo:bVideo];
CFRelease(sampleBuffer);
}
- (void) shutdown
{
NSLog(#"shutting down server");
if (_session)
{
[_session stopRunning];
_session = nil;
}
[_encoder finishWithCompletionHandler:^{
NSLog(#"Capture completed");
}];
}
According to me. it is not possible, continue recording when we switch the camera,
because, there resolution and quality difference between them, a video can have only one resolution and quality throughout the video.
and secondly, every time you switch between camera it'll alloc and initialize the camera.
unfortunately its not possible according to me.
but if you find solution, do tell me please.
I want to change the container of .mov video files that I pick using
UIImagePickerController and compressed them via AVAssetExportSession with AVAssetExportPresetMediumQuality and shouldOptimizeForNetworkUse = YES to .mp4 container.
I need programmatically way/sample code to perform a fastest trans-wrap on iPhone/iPad application
I tried to set AVAssetExportSession.outputFileType property to AVFileTypeMPEG4 but it is not supported and I got an exception.
I tried to do this transform using AVAssetWriter by specifying fileType:AVFileTypeMPEG4, actually I got .mp4 output file, but it was not wrap-trans, the output file was 3x bigger than source, and the convert process took 128 sec for video with 60 sec duration.
I need solution that will run quickly and will keep the file size
This is the code I use to convert .mov to .mp4:
I set assetWriter options on setUpReaderAndWriterReturningError method
#import "MCVideoConverter.h"
#import <AVFoundation/AVAsset.h>
#import <AVFoundation/AVAssetTrack.h>
#import <AVFoundation/AVAssetReader.h>
#import <AVFoundation/AVAssetReaderOutput.h>
#import <AVFoundation/AVAssetWriter.h>
#import <AVFoundation/AVAssetWriterInput.h>
#import <AVFoundation/AVMediaFormat.h>
#import <AVFoundation/AVAudioSettings.h>
#import <AVFoundation/AVVideoSettings.h>
#import <AVFoundation/AVAssetImageGenerator.h>
#import <AVFoundation/AVTime.h>
#import <CoreMedia/CMSampleBuffer.h>
#protocol RWSampleBufferChannelDelegate;
#interface RWSampleBufferChannel : NSObject
{
#private
AVAssetReaderOutput *assetReaderOutput;
AVAssetWriterInput *assetWriterInput;
dispatch_block_t completionHandler;
dispatch_queue_t serializationQueue;
BOOL finished; // only accessed on serialization queue
}
- (id)initWithAssetReaderOutput:(AVAssetReaderOutput *)assetReaderOutput assetWriterInput:(AVAssetWriterInput *)assetWriterInput;
- (void)startWithDelegate:(id <RWSampleBufferChannelDelegate>)delegate completionHandler:(dispatch_block_t)completionHandler; // delegate is retained until completion handler is called. Completion handler is guaranteed to be called exactly once, whether reading/writing finishes, fails, or is cancelled. Delegate may be nil.
- (void)cancel;
#property (nonatomic, readonly) NSString *mediaType;
#end
#protocol RWSampleBufferChannelDelegate <NSObject>
#required
- (void)sampleBufferChannel:(RWSampleBufferChannel *)sampleBufferChannel didReadSampleBuffer:(CMSampleBufferRef)sampleBuffer;
#end
#interface MCVideoConverter () <RWSampleBufferChannelDelegate>
// These three methods are always called on the serialization dispatch queue
- (BOOL)setUpReaderAndWriterReturningError:(NSError **)outError; // make sure "tracks" key of asset is loaded before calling this
- (BOOL)startReadingAndWritingReturningError:(NSError **)outError;
- (void)readingAndWritingDidFinishSuccessfully:(BOOL)success withError:(NSError *)error;
#end
#implementation MCVideoConverter
+ (NSArray *)readableTypes
{
return [AVURLAsset audiovisualTypes];;
}
+ (BOOL)canConcurrentlyReadDocumentsOfType:(NSString *)typeName
{
return YES;
}
- (id)init
{
self = [super init];
if (self)
{
NSString *serializationQueueDescription = [NSString stringWithFormat:#"%# serialization queue", self];
serializationQueue = dispatch_queue_create([serializationQueueDescription UTF8String], NULL);
}
return self;
}
- (void)dealloc
{
[asset release];
[outputURL release];
[assetReader release];
[assetWriter release];
[audioSampleBufferChannel release];
[videoSampleBufferChannel release];
if (serializationQueue)
dispatch_release(serializationQueue);
[super dealloc];
}
#synthesize asset=asset;
#synthesize timeRange=timeRange;
#synthesize writingSamples=writingSamples;
#synthesize outputURL=outputURL;
#synthesize propgerssView;
- (void)convertVideo:(NSURL*) inputURL outputURL: (NSURL*) _outputURL progress:(UIProgressView*) _propgerssView
{
self.asset = [AVURLAsset URLAssetWithURL:inputURL options:nil];
self.propgerssView = _propgerssView;
cancelled = NO;
[self performSelector:#selector(startProgressSheetWithURL:) withObject:_outputURL afterDelay:0.0]; // avoid starting a new sheet while in
}
- (void)startProgressSheetWithURL:(NSURL *)localOutputURL
{
[self setOutputURL:localOutputURL];
[self setWritingSamples:YES];
AVAsset *localAsset = [self asset];
[localAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:#"tracks", #"duration", nil] completionHandler:^
{
// Dispatch the setup work to the serialization queue, to ensure this work is serialized with potential cancellation
dispatch_async(serializationQueue, ^{
// Since we are doing these things asynchronously, the user may have already cancelled on the main thread. In that case, simply return from this block
if (cancelled)
return;
BOOL success = YES;
NSError *localError = nil;
success = ([localAsset statusOfValueForKey:#"tracks" error:&localError] == AVKeyValueStatusLoaded);
if (success)
success = ([localAsset statusOfValueForKey:#"duration" error:&localError] == AVKeyValueStatusLoaded);
if (success)
{
[self setTimeRange:CMTimeRangeMake(kCMTimeZero, [localAsset duration])];
// AVAssetWriter does not overwrite files for us, so remove the destination file if it already exists
NSFileManager *fm = [NSFileManager defaultManager];
NSString *localOutputPath = [localOutputURL path];
if ([fm fileExistsAtPath:localOutputPath])
success = [fm removeItemAtPath:localOutputPath error:&localError];
}
// Set up the AVAssetReader and AVAssetWriter, then begin writing samples or flag an error
if (success)
success = [self setUpReaderAndWriterReturningError:&localError];
if (success)
success = [self startReadingAndWritingReturningError:&localError];
if (!success)
[self readingAndWritingDidFinishSuccessfully:success withError:localError];
});
}];
}
- (BOOL)setUpReaderAndWriterReturningError:(NSError **)outError
{
BOOL success = YES;
NSError *localError = nil;
AVAsset *localAsset = [self asset];
NSURL *localOutputURL = [self outputURL];
// Create asset reader and asset writer
assetReader = [[AVAssetReader alloc] initWithAsset:asset error:&localError];
success = (assetReader != nil);
if (success)
{
//changed assetWriter = [[AVAssetWriter alloc] initWithURL:localOutputURL fileType:AVFileTypeQuickTimeMovie error:&localError];
assetWriter = [[AVAssetWriter alloc] initWithURL:localOutputURL fileType:AVFileTypeMPEG4 error:&localError];
success = (assetWriter != nil);
}
// Create asset reader outputs and asset writer inputs for the first audio track and first video track of the asset
if (success)
{
AVAssetTrack *audioTrack = nil, *videoTrack = nil;
// Grab first audio track and first video track, if the asset has them
NSArray *audioTracks = [localAsset tracksWithMediaType:AVMediaTypeAudio];
if ([audioTracks count] > 0)
audioTrack = [audioTracks objectAtIndex:0];
NSArray *videoTracks = [localAsset tracksWithMediaType:AVMediaTypeVideo];
if ([videoTracks count] > 0)
videoTrack = [videoTracks objectAtIndex:0];
if (audioTrack)
{
// Decompress to Linear PCM with the asset reader
NSDictionary *decompressionAudioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kAudioFormatLinearPCM], AVFormatIDKey,
nil];
AVAssetReaderOutput *output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:decompressionAudioSettings];
[assetReader addOutput:output];
AudioChannelLayout stereoChannelLayout = {
.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo,
.mChannelBitmap = 0,
.mNumberChannelDescriptions = 0
};
NSData *channelLayoutAsData = [NSData dataWithBytes:&stereoChannelLayout length:offsetof(AudioChannelLayout, mChannelDescriptions)];
// Compress to 128kbps AAC with the asset writer
NSDictionary *compressionAudioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithInteger:128000], AVEncoderBitRateKey,
[NSNumber numberWithInteger:44100], AVSampleRateKey,
channelLayoutAsData, AVChannelLayoutKey,
[NSNumber numberWithUnsignedInteger:2], AVNumberOfChannelsKey,
nil];
AVAssetWriterInput *input = [AVAssetWriterInput assetWriterInputWithMediaType:[audioTrack mediaType] outputSettings:compressionAudioSettings];
[assetWriter addInput:input];
// Create and save an instance of RWSampleBufferChannel, which will coordinate the work of reading and writing sample buffers
audioSampleBufferChannel = [[RWSampleBufferChannel alloc] initWithAssetReaderOutput:output assetWriterInput:input];
}
if (videoTrack)
{
// Decompress to ARGB with the asset reader
NSDictionary *decompressionVideoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB], (id)kCVPixelBufferPixelFormatTypeKey,
[NSDictionary dictionary], (id)kCVPixelBufferIOSurfacePropertiesKey,
nil];
AVAssetReaderOutput *output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:decompressionVideoSettings];
[assetReader addOutput:output];
// Get the format description of the track, to fill in attributes of the video stream that we don't want to change
CMFormatDescriptionRef formatDescription = NULL;
NSArray *formatDescriptions = [videoTrack formatDescriptions];
if ([formatDescriptions count] > 0)
formatDescription = (CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0];
// Grab track dimensions from format description
CGSize trackDimensions = {
.width = 0.0,
.height = 0.0,
};
if (formatDescription)
trackDimensions = CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, false, false);
else
trackDimensions = [videoTrack naturalSize];
// Grab clean aperture, pixel aspect ratio from format description
NSMutableDictionary *compressionSettings = nil;
// [NSMutableDictionary dictionaryWithObjectsAndKeys:
// AVVideoProfileLevelH264Baseline30, AVVideoProfileLevelKey,
// [NSNumber numberWithInt:960000], AVVideoAverageBitRateKey,
// [NSNumber numberWithInt:1],AVVideoMaxKeyFrameIntervalKey,
// nil ];
//NSDictionary *videoSettings = nil;
if (formatDescription)
{
NSDictionary *cleanAperture = nil;
NSDictionary *pixelAspectRatio = nil;
CFDictionaryRef cleanApertureFromCMFormatDescription = CMFormatDescriptionGetExtension(formatDescription, kCMFormatDescriptionExtension_CleanAperture);
if (cleanApertureFromCMFormatDescription)
{
cleanAperture = [NSDictionary dictionaryWithObjectsAndKeys:
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureWidth), AVVideoCleanApertureWidthKey,
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureHeight), AVVideoCleanApertureHeightKey,
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureHorizontalOffset), AVVideoCleanApertureHorizontalOffsetKey,
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureVerticalOffset), AVVideoCleanApertureVerticalOffsetKey,
nil];
}
CFDictionaryRef pixelAspectRatioFromCMFormatDescription = CMFormatDescriptionGetExtension(formatDescription, kCMFormatDescriptionExtension_PixelAspectRatio);
if (pixelAspectRatioFromCMFormatDescription)
{
pixelAspectRatio = [NSDictionary dictionaryWithObjectsAndKeys:
CFDictionaryGetValue(pixelAspectRatioFromCMFormatDescription, kCMFormatDescriptionKey_PixelAspectRatioHorizontalSpacing), AVVideoPixelAspectRatioHorizontalSpacingKey,
CFDictionaryGetValue(pixelAspectRatioFromCMFormatDescription, kCMFormatDescriptionKey_PixelAspectRatioVerticalSpacing), AVVideoPixelAspectRatioVerticalSpacingKey,
nil];
}
if (cleanAperture || pixelAspectRatio)
{
if (cleanAperture)
[compressionSettings setObject:cleanAperture forKey:AVVideoCleanApertureKey];
if (pixelAspectRatio)
[compressionSettings setObject:pixelAspectRatio forKey:AVVideoPixelAspectRatioKey];
}
}
// Compress to H.264 with the asset writer
NSMutableDictionary *videoSettings = [NSMutableDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithDouble:trackDimensions.width], AVVideoWidthKey,
[NSNumber numberWithDouble:trackDimensions.height], AVVideoHeightKey,
nil];
if (compressionSettings)
[videoSettings setObject:compressionSettings forKey:AVVideoCompressionPropertiesKey];
AVAssetWriterInput *input = [AVAssetWriterInput assetWriterInputWithMediaType:[videoTrack mediaType] outputSettings:videoSettings];
[assetWriter addInput:input];
// Create and save an instance of RWSampleBufferChannel, which will coordinate the work of reading and writing sample buffers
videoSampleBufferChannel = [[RWSampleBufferChannel alloc] initWithAssetReaderOutput:output assetWriterInput:input];
}
}
if (outError)
*outError = localError;
return success;
}
- (BOOL)startReadingAndWritingReturningError:(NSError **)outError
{
BOOL success = YES;
NSError *localError = nil;
// Instruct the asset reader and asset writer to get ready to do work
success = [assetReader startReading];
if (!success)
localError = [assetReader error];
if (success)
{
success = [assetWriter startWriting];
if (!success)
localError = [assetWriter error];
}
if (success)
{
dispatch_group_t dispatchGroup = dispatch_group_create();
// Start a sample-writing session
[assetWriter startSessionAtSourceTime:[self timeRange].start];
// Start reading and writing samples
if (audioSampleBufferChannel)
{
// Only set audio delegate for audio-only assets, else let the video channel drive progress
id <RWSampleBufferChannelDelegate> delegate = nil;
if (!videoSampleBufferChannel)
delegate = self;
dispatch_group_enter(dispatchGroup);
[audioSampleBufferChannel startWithDelegate:delegate completionHandler:^{
dispatch_group_leave(dispatchGroup);
}];
}
if (videoSampleBufferChannel)
{
dispatch_group_enter(dispatchGroup);
[videoSampleBufferChannel startWithDelegate:self completionHandler:^{
dispatch_group_leave(dispatchGroup);
}];
}
// Set up a callback for when the sample writing is finished
dispatch_group_notify(dispatchGroup, serializationQueue, ^{
BOOL finalSuccess = YES;
NSError *finalError = nil;
if (cancelled)
{
[assetReader cancelReading];
[assetWriter cancelWriting];
}
else
{
if ([assetReader status] == AVAssetReaderStatusFailed)
{
finalSuccess = NO;
finalError = [assetReader error];
}
if (finalSuccess)
{
finalSuccess = [assetWriter finishWriting];
if (!finalSuccess)
finalError = [assetWriter error];
}
}
[self readingAndWritingDidFinishSuccessfully:finalSuccess withError:finalError];
});
dispatch_release(dispatchGroup);
}
if (outError)
*outError = localError;
return success;
}
- (void)cancel
{
self.propgerssView = nil;
// Dispatch cancellation tasks to the serialization queue to avoid races with setup and teardown
dispatch_async(serializationQueue, ^{
[audioSampleBufferChannel cancel];
[videoSampleBufferChannel cancel];
cancelled = YES;
});
}
- (void)readingAndWritingDidFinishSuccessfully:(BOOL)success withError:(NSError *)error
{
NSLog(#"%s[%d] - success = %d error = %#", __FUNCTION__, __LINE__, success, error);
if (!success)
{
[assetReader cancelReading];
[assetWriter cancelWriting];
}
// Tear down ivars
[assetReader release];
assetReader = nil;
[assetWriter release];
assetWriter = nil;
[audioSampleBufferChannel release];
audioSampleBufferChannel = nil;
[videoSampleBufferChannel release];
videoSampleBufferChannel = nil;
cancelled = NO;
// Dispatch UI-related tasks to the main queue
dispatch_async(dispatch_get_main_queue(), ^{
if (!success)
{
}
[self setWritingSamples:NO];
});
}
static double progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer, CMTimeRange timeRange)
{
CMTime progressTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
progressTime = CMTimeSubtract(progressTime, timeRange.start);
CMTime sampleDuration = CMSampleBufferGetDuration(sampleBuffer);
if (CMTIME_IS_NUMERIC(sampleDuration))
progressTime= CMTimeAdd(progressTime, sampleDuration);
return CMTimeGetSeconds(progressTime) / CMTimeGetSeconds(timeRange.duration);
}
static void removeARGBColorComponentOfPixelBuffer(CVPixelBufferRef pixelBuffer, size_t componentIndex)
{
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
size_t bufferHeight = CVPixelBufferGetHeight(pixelBuffer);
size_t bufferWidth = CVPixelBufferGetWidth(pixelBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
static const size_t bytesPerPixel = 4; // constant for ARGB pixel format
unsigned char *base = (unsigned char *)CVPixelBufferGetBaseAddress(pixelBuffer);
for (size_t row = 0; row < bufferHeight; ++row)
{
for (size_t column = 0; column < bufferWidth; ++column)
{
unsigned char *pixel = base + (row * bytesPerRow) + (column * bytesPerPixel);
pixel[componentIndex] = 0;
}
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
}
+ (size_t)componentIndexFromFilterTag:(NSInteger)filterTag
{
return (size_t)filterTag; // we set up the tags in the popup button to correspond directly with the index they modify
}
- (void)sampleBufferChannel:(RWSampleBufferChannel *)sampleBufferChannel didReadSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
CVPixelBufferRef pixelBuffer = NULL;
// Calculate progress (scale of 0.0 to 1.0)
double progress = progressOfSampleBufferInTimeRange(sampleBuffer, [self timeRange]);
NSLog(#"%s[%d] - progress = %f", __FUNCTION__, __LINE__, progress);
// Grab the pixel buffer from the sample buffer, if possible
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (imageBuffer && (CFGetTypeID(imageBuffer) == CVPixelBufferGetTypeID()))
{
pixelBuffer = (CVPixelBufferRef)imageBuffer;
if (filterTag >= 0) // -1 means "no filtering, please"
removeARGBColorComponentOfPixelBuffer(pixelBuffer, [[self class] componentIndexFromFilterTag:filterTag]);
}
}
#end
#interface RWSampleBufferChannel ()
- (void)callCompletionHandlerIfNecessary; // always called on the serialization queue
#end
#implementation RWSampleBufferChannel
- (id)initWithAssetReaderOutput:(AVAssetReaderOutput *)localAssetReaderOutput assetWriterInput:(AVAssetWriterInput *)localAssetWriterInput
{
self = [super init];
if (self)
{
assetReaderOutput = [localAssetReaderOutput retain];
assetWriterInput = [localAssetWriterInput retain];
finished = NO;
NSString *serializationQueueDescription = [NSString stringWithFormat:#"%# serialization queue", self];
serializationQueue = dispatch_queue_create([serializationQueueDescription UTF8String], NULL);
}
return self;
}
- (void)dealloc
{
[assetReaderOutput release];
[assetWriterInput release];
if (serializationQueue)
dispatch_release(serializationQueue);
[completionHandler release];
[super dealloc];
}
- (NSString *)mediaType
{
return [assetReaderOutput mediaType];
}
- (void)startWithDelegate:(id <RWSampleBufferChannelDelegate>)delegate completionHandler:(dispatch_block_t)localCompletionHandler
{
completionHandler = [localCompletionHandler copy]; // released in -callCompletionHandlerIfNecessary
[assetWriterInput requestMediaDataWhenReadyOnQueue:serializationQueue usingBlock:^{
if (finished)
return;
BOOL completedOrFailed = NO;
// Read samples in a loop as long as the asset writer input is ready
while ([assetWriterInput isReadyForMoreMediaData] && !completedOrFailed)
{
CMSampleBufferRef sampleBuffer = [assetReaderOutput copyNextSampleBuffer];
if (sampleBuffer != NULL)
{
if ([delegate respondsToSelector:#selector(sampleBufferChannel:didReadSampleBuffer:)])
[delegate sampleBufferChannel:self didReadSampleBuffer:sampleBuffer];
BOOL success = [assetWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
sampleBuffer = NULL;
completedOrFailed = !success;
}
else
{
completedOrFailed = YES;
}
}
if (completedOrFailed)
[self callCompletionHandlerIfNecessary];
}];
}
- (void)cancel
{
dispatch_async(serializationQueue, ^{
[self callCompletionHandlerIfNecessary];
});
}
- (void)callCompletionHandlerIfNecessary
{
// Set state to mark that we no longer need to call the completion handler, grab the completion handler, and clear out the ivar
BOOL oldFinished = finished;
finished = YES;
if (oldFinished == NO)
{
[assetWriterInput markAsFinished]; // let the asset writer know that we will not be appending any more samples to this input
dispatch_block_t localCompletionHandler = [completionHandler retain];
[completionHandler release];
completionHandler = nil;
if (localCompletionHandler)
{
localCompletionHandler();
[localCompletionHandler release];
}
}
}
#end
Hey It was for a long while, but I end up with good solution and it may help someone in future
my code:
-(void) compressVideo
{
asset = [[AVURLAsset alloc] initWithURL:videoUrl options:nil];
exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetPassthrough];
NSLog(#" %#", [AVAssetExportSession exportPresetsCompatibleWithAsset:asset]);
NSLog(#" %#", exportSession.supportedFileTypes);
NSLog(#"----------------------------------------- convert to mp4");
NSLog(#" %#", exportSession.supportedFileTypes);
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.outputURL = [self outputVideoPath:#"outPut" ext:#"mp4"];
[exportSession exportAsynchronouslyWithCompletionHandler:^{
ICQLog(#" exportSession.status = %d exportSession.error = %#", exportSession.status, exportSession.error);
if ( exportSession && (exportSession.status == AVAssetExportSessionStatusCompleted) )
{
ICQLog(#" exportSession.outputURL = %#", exportSession.outputURL);
// we need to remove temporary files
[[NSFileManager defaultManager] removeItemAtURL:videoUrl error:NULL];
[videoUrl release];
videoUrl = [exportSession.outputURL retain];
}
else
{
//TODO - report error
}
[exportSession release], exportSession = nil;
[asset release], asset = nil;
}];
I can't help with the trans-wrap stuff, I haven't got my head into this.
Is the main priority to get the file output as a .mp4 without having to reprocess it? If it is then just use .mp4 as the file extension of the movie clip that was output by you code and this should work fine. I have used this approach today and it works. i didn't have to convert it from .mov to .mp4 because essentially a .mp4 file is the same as a .mov file with some additional standards based functionality.
Hope this is of help.
This is the code I used.
(BOOL)encodeVideo:(NSURL *)videoURL
{
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:videoURL options:nil];
// Create the composition and tracks
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
NSArray *assetVideoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if (assetVideoTracks.count <= 0)
{
NSLog(#"Error reading the transformed video track");
return NO;
}
// Insert the tracks in the composition's tracks
AVAssetTrack *assetVideoTrack = [assetVideoTracks firstObject];
[videoTrack insertTimeRange:assetVideoTrack.timeRange ofTrack:assetVideoTrack atTime:CMTimeMake(0, 1) error:nil];
[videoTrack setPreferredTransform:assetVideoTrack.preferredTransform];
AVAssetTrack *assetAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[audioTrack insertTimeRange:assetAudioTrack.timeRange ofTrack:assetAudioTrack atTime:CMTimeMake(0, 1) error:nil];
// Export to mp4
NSString *mp4Quality = [MGPublic isIOSAbove:#"6.0"] ? AVAssetExportPresetMediumQuality : AVAssetExportPresetPassthrough;
NSString *exportPath = [NSString stringWithFormat:#"%#/%#.mp4",
[NSHomeDirectory() stringByAppendingString:#"/tmp"],
[BSCommon uuidString]];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:mp4Quality];
exportSession.outputURL = exportUrl;
CMTime start = CMTimeMakeWithSeconds(0.0, 0);
CMTimeRange range = CMTimeRangeMake(start, [asset duration]);
exportSession.timeRange = range;
exportSession.outputFileType = AVFileTypeMPEG4;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status])
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"MP4 Successful!");
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed: %#", [[exportSession error] localizedDescription]);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
default:
break;
}
}];
return YES;
}