Hi I am capturing screen and make as video. It works well in simulator. While I run it in device. App crashes. below I put my code and error
- (void)stopRecording
{
[self.displayLink invalidate];
startTimestamp = 0.0;
dispatch_async(queue, ^
{
//if (deferImageprocess) {
if (self.writer.status != AVAssetWriterStatusCompleted && self.writer.status != AVAssetWriterStatusUnknown) {
[self.writerInput markAsFinished];
}
if ([self.writer respondsToSelector:#selector(finishWritingWithCompletionHandler:)]) {
[self.writer finishWritingWithCompletionHandler:^
{
ALAssetsLibrary* library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:[NSURL fileURLWithPath:self.pathStr]
completionBlock:^(NSURL *assetURL, NSError *error){
if (!error)
{
NSLog(#"Saved Successfully");
[self finishBackgroundTask];
[self.delegate recordingFinished:[NSString stringWithFormat:#"%#", self.pathStr]];
}
}];
}];
}
else {
[self.writer finishWriting];
ALAssetsLibrary* library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:[NSURL fileURLWithPath:self.pathStr]
completionBlock:^(NSURL *assetURL, NSError *error){
}];
[self finishBackgroundTask];
[self restartRecordingIfNeeded];
}
});
}
**This above to Wirte the video file in Document as well as gallery**
- (void)captureFrame:(CADisplayLink *)displayLink
{
dispatch_async(queue, ^
{
if (self.writerInput.readyForMoreMediaData) {
CVReturn status = kCVReturnSuccess;
CVPixelBufferRef buffer = NULL;
CFTypeRef backingData;
#if APPSTORE_SAFE || TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
__block UIImage *screenshot = nil;
dispatch_sync(dispatch_get_main_queue(), ^{
screenshot = [self screenshot];
});
CGImageRef image = [[self screenshot] CGImage];
CGDataProviderRef dataProvider = CGImageGetDataProvider(image);
CFDataRef data = CGDataProviderCopyData(dataProvider);
backingData = CFDataCreateMutableCopy(kCFAllocatorDefault, CFDataGetLength(data), data);
CFRelease(data);
const UInt8 *bytePtr = CFDataGetBytePtr(backingData);
status = CVPixelBufferCreateWithBytes(kCFAllocatorDefault,
CGImageGetWidth(image),
CGImageGetHeight(image),
kCVPixelFormatType_32BGRA,
(void *)bytePtr,
CGImageGetBytesPerRow(image),
NULL,
NULL,
NULL,
&buffer);
NSLog(#"%d", status);
NSParameterAssert(status == kCVReturnSuccess && buffer);
#else
CFTypeRef surface = [UIWindow createScreenIOSurface];
backingData = surface;
NSDictionary *pixelBufferAttributes = #{(NSString *)kCVPixelBufferPixelFormatTypeKey : #(kCVPixelFormatType_32BGRA)};
status = CVPixelBufferCreateWithIOSurface(NULL, surface, (__bridge CFDictionaryRef)(pixelBufferAttributes), &buffer);
NSLog(#"%d", status);
NSParameterAssert(status == kCVReturnSuccess && buffer);
#endif
if (buffer) {
CFAbsoluteTime currentTime = CFAbsoluteTimeGetCurrent();
CFTimeInterval elapsedTime = currentTime - firstFrameTime;
CMTime presentTime = CMTimeMake(elapsedTime * TIME_SCALE, TIME_SCALE);
if(![self.writerInputPixelBufferAdaptor appendPixelBuffer:buffer withPresentationTime:presentTime]) {
[self stopRecording];
NSLog(#"Failed");
}
CVPixelBufferRelease(buffer);
}
CFRelease(backingData);
}
});
if (startTimestamp == 0.0) {
startTimestamp = displayLink.timestamp;
}
NSTimeInterval dalta = displayLink.timestamp - startTimestamp;
if (self.autosaveDuration > 0 && dalta > self.autosaveDuration) {
startTimestamp = 0.0;
[self rotateFile];
}
}
Above to capture frames.
Now I got this error While stopRecording.
Terminating app due to uncaught exception 'NSInternalInconsistencyException', reason: '*** -[AVAssetWriterInput markAsFinished] Cannot call method when status is 2'.
I don't know what problem here. Please tell me any suggestion
This error causes because of
Marking writer as finished when the status is 2.
Trying to reuse your AVAssetWriter instance. Remember to always create a new instance when you need to start the recording.
Threading issues, create your queue instead as default queue provided by sdk are concurrent, they work in concurrent manner and not serial.
Related
I want to record Uiview in ios .. I have tried but not getting clear Video.. screenshot is here :
[1]: https://i.stack.imgur.com/Gmwkr.png please suggest .. it looks like there is problem in frame i am passing..
#import "screerecorder.h"
#import <QuartzCore/QuartzCore.h>
#import <MobileCoreServices/UTCoreTypes.h>
#import <AssetsLibrary/AssetsLibrary.h>
#interface screerecorder(Private)
- (void) writeVideoFrameAtTime:(CMTime)time;
#end
#implementation screerecorder
#synthesize currentScreen, frameRate, delegate;
- (void) initialize {
// Initialization code
self.clearsContextBeforeDrawing = YES;
self.currentScreen = nil;
self.frameRate = 10.0f; //10 frames per seconds
_recording = false;
videoWriter = nil;
videoWriterInput = nil;
avAdaptor = nil;
startedAt = nil;
bitmapData = NULL;
}
- (id) initWithCoder:(NSCoder *)aDecoder {
self = [super initWithCoder:aDecoder];
if (self) {
[self initialize];
}
return self;
}
- (id) init {
self = [super init];
if (self) {
[self initialize];
}
return self;
}
- (id)initWithFrame:(CGRect)frame {
self = [super initWithFrame:frame];
if (self) {
[self initialize];
}
return self;
}
- (CGContextRef) createBitmapContextOfSize:(CGSize) size {
CGContextRef context = NULL;
CGColorSpaceRef colorSpace;
int bitmapByteCount;
int bitmapBytesPerRow;
bitmapBytesPerRow = (size.width * 4);
bitmapByteCount = (bitmapBytesPerRow * size.height);
colorSpace = CGColorSpaceCreateDeviceRGB();
if (bitmapData != NULL) {
free(bitmapData);
}
bitmapData = malloc( bitmapByteCount );
if (bitmapData == NULL) {
fprintf (stderr, "Memory not allocated!");
return NULL;
}
context = CGBitmapContextCreate (bitmapData,
size.width,
size.height,
8, // bits per component
bitmapBytesPerRow,
colorSpace,
kCGImageAlphaNoneSkipFirst);
CGContextSetAllowsAntialiasing(context,NO);
if (context== NULL) {
free (bitmapData);
fprintf (stderr, "Context not created!");
return NULL;
}
CGColorSpaceRelease( colorSpace );
return context;
}
//static int frameCount = 0; //debugging
- (void) drawRect:(CGRect)rect {
NSDate* start = [NSDate date];
CGContextRef context = [self createBitmapContextOfSize:self.frame.size];
//not sure why this is necessary...image renders upside-down and mirrored
// CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, self.frame.size.height);
//CGContextConcatCTM(context, flipVertical);
[self.layer renderInContext:context];
CGImageRef cgImage = CGBitmapContextCreateImage(context);
UIImage* background = [UIImage imageWithCGImage: cgImage];
// CGImageRelease(cgImage);
self.currentScreen = background;
//debugging
//if (frameCount < 40) {
// NSString* filename = [NSString stringWithFormat:#"Documents/frame_%d.png", frameCount];
// NSString* pngPath = [NSHomeDirectory() stringByAppendingPathComponent:filename];
// [UIImagePNGRepresentation(self.currentScreen) writeToFile: pngPath atomically: YES];
// frameCount++;
//}
//NOTE: to record a scrollview while it is scrolling you need to implement your UIScrollViewDelegate such that it calls
// 'setNeedsDisplay' on the ScreenCaptureView.
if (_recording) {
float millisElapsed = [[NSDate date] timeIntervalSinceDate:startedAt] * 1000.0;
[self writeVideoFrameAtTime:CMTimeMake((int)millisElapsed, 1000)];
}
float processingSeconds = [[NSDate date] timeIntervalSinceDate:start];
float delayRemaining = (1.0 / self.frameRate) - processingSeconds;
CGContextRelease(context);
//redraw at the specified framerate
[self performSelector:#selector(setNeedsDisplay) withObject:nil afterDelay:delayRemaining > 0.0 ? delayRemaining : 0.01];
}
- (void) cleanupWriter {
avAdaptor = nil;
videoWriterInput = nil;
videoWriter = nil;
startedAt = nil;
if (bitmapData != NULL) {
free(bitmapData);
bitmapData = NULL;
}
}
- (void)dealloc {
[self cleanupWriter];
}
- (NSURL*) tempFileURL {
NSString* outputPath = [[NSString alloc] initWithFormat:#"%#/%#", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], #"output.mp4"];
NSURL* outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath]) {
NSError* error;
if ([fileManager removeItemAtPath:outputPath error:&error] == NO) {
NSLog(#"Could not delete old recording file at path: %#", outputPath);
}
}
return outputURL;
}
-(BOOL) setUpWriter {
NSError* error = nil;
videoWriter = [[AVAssetWriter alloc] initWithURL:[self tempFileURL] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
//Configure video
NSDictionary* videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:1024.0*1024.0], AVVideoAverageBitRateKey,
nil ];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:self.frame.size.width], AVVideoWidthKey,
[NSNumber numberWithInt:self.frame.size.height], AVVideoHeightKey,
videoCompressionProps, AVVideoCompressionPropertiesKey,
nil];
videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] ;
NSParameterAssert(videoWriterInput);
videoWriterInput.expectsMediaDataInRealTime = YES;
NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:bufferAttributes] ;
//add input
[videoWriter addInput:videoWriterInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];
return YES;
}
- (void) completeRecordingSession {
[videoWriterInput markAsFinished];
// Wait for the video
int status = videoWriter.status;
while (status == AVAssetWriterStatusUnknown) {
NSLog(#"Waiting...");
[NSThread sleepForTimeInterval:0.5f];
status = videoWriter.status;
}
#synchronized(self) {
BOOL success = [videoWriter finishWriting];
if (!success) {
NSLog(#"finishWriting returned NO");
}
[self cleanupWriter];
id delegateObj = self.delegate;
NSString *outputPath = [[NSString alloc] initWithFormat:#"%#/%#", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], #"output.mp4"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSLog(#"Completed recording, file is stored at: %#", outputURL);
obj = [[ViewController alloc]init];
[obj mergeAudioandvideo:outputURL];
if ([delegateObj respondsToSelector:#selector(recordingFinished:)]) {
[delegateObj performSelectorOnMainThread:#selector(recordingFinished:) withObject:(success ? outputURL : nil) waitUntilDone:YES];
}
}
}
- (bool) startRecording {
bool result = NO;
#synchronized(self) {
if (! _recording) {
result = [self setUpWriter];
startedAt = [NSDate date];
_recording = true;
}
}
return result;
}
- (void) stopRecording {
#synchronized(self) {
if (_recording) {
_recording = false;
[self completeRecordingSession];
}
}
}
-(void) writeVideoFrameAtTime:(CMTime)time {
if (![videoWriterInput isReadyForMoreMediaData]) {
NSLog(#"Not ready for video data");
}
else {
#synchronized (self) {
UIImage* newFrame = self.currentScreen;
CVPixelBufferRef pixelBuffer = NULL;
CGImageRef cgImage = CGImageCreateCopy([newFrame CGImage]);
CFDataRef image = CGDataProviderCopyData(CGImageGetDataProvider(cgImage));
int status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, avAdaptor.pixelBufferPool, &pixelBuffer);
if(status != 0){
//could not get a buffer from the pool
NSLog(#"Error creating pixel buffer: status=%d", status);
}
// set image data into pixel buffer
CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
uint8_t* destPixels = CVPixelBufferGetBaseAddress(pixelBuffer);
CFDataGetBytes(image, CFRangeMake(0, CFDataGetLength(image)), destPixels); //XXX: will work if the pixel buffer is contiguous and has the same bytesPerRow as the input data
if(status == 0){
BOOL success = [avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
if (!success)
NSLog(#"Warning: Unable to write buffer to video");
}
//clean up
CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
CVPixelBufferRelease( pixelBuffer );
CFRelease(image);
CGImageRelease(cgImage);
}
}
}
#end
You can try this library - Glimpse. It can record any UIView.
I am trying to screen capture a view that has APPLCameraViewContoller in it. For some reason when it gets saved to camera roll no mater what I do what the camera is looking at does not get captured only the UIView with labels does so it is a black background with the labels. I want to have the labels on top of the Camera View. Any suggestions or examples on how to go about this. Here is the screen capture .m which I am assuming is the reason why this is happening.
#interface ASScreenRecorder()
#property (strong, nonatomic) AVAssetWriter *videoWriter;
#property (strong, nonatomic) AVAssetWriterInput *videoWriterInput;
#property (strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor *avAdaptor;
#property (strong, nonatomic) CADisplayLink *displayLink;
#property (strong, nonatomic) NSDictionary *outputBufferPoolAuxAttributes;
#property (nonatomic) CFTimeInterval firstTimeStamp;
#property (nonatomic) BOOL isRecording;
#end
#implementation ASScreenRecorder
{
dispatch_queue_t _render_queue;
dispatch_queue_t _append_pixelBuffer_queue;
dispatch_semaphore_t _frameRenderingSemaphore;
dispatch_semaphore_t _pixelAppendSemaphore;
CGSize _viewSize;
CGFloat _scale;
CGColorSpaceRef _rgbColorSpace;
CVPixelBufferPoolRef _outputBufferPool;
}
#pragma mark - initializers
+ (instancetype)sharedInstance {
static dispatch_once_t once;
static ASScreenRecorder *sharedInstance;
dispatch_once(&once, ^{
sharedInstance = [[self alloc] init];
});
return sharedInstance;
}
- (instancetype)init
{
self = [super init];
if (self) {
_viewSize = [UIApplication sharedApplication].delegate.window.bounds.size;
_scale = [UIScreen mainScreen].scale;
// record half size resolution for retina iPads
if ((UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) && _scale > 1) {
_scale = 1.0;
}
_isRecording = NO;
_append_pixelBuffer_queue = dispatch_queue_create("ASScreenRecorder.append_queue", DISPATCH_QUEUE_SERIAL);
_render_queue = dispatch_queue_create("ASScreenRecorder.render_queue", DISPATCH_QUEUE_SERIAL);
dispatch_set_target_queue(_render_queue, dispatch_get_global_queue( DISPATCH_QUEUE_PRIORITY_HIGH, 0));
_frameRenderingSemaphore = dispatch_semaphore_create(1);
_pixelAppendSemaphore = dispatch_semaphore_create(1);
}
return self;
}
#pragma mark - public
- (void)setVideoURL:(NSURL *)videoURL
{
NSAssert(!_isRecording, #"videoURL can not be changed whilst recording is in progress");
_videoURL = videoURL;
}
- (BOOL)startRecording
{
if (!_isRecording) {
[self setUpWriter];
_isRecording = (_videoWriter.status == AVAssetWriterStatusWriting);
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:#selector(writeVideoFrame)];
[_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
}
return _isRecording;
}
- (void)stopRecordingWithCompletion:(VideoCompletionBlock)completionBlock;
{
if (_isRecording) {
_isRecording = NO;
[_displayLink removeFromRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
[self completeRecordingSession:completionBlock];
}
}
#pragma mark - private
-(void)setUpWriter
{
_rgbColorSpace = CGColorSpaceCreateDeviceRGB();
NSDictionary *bufferAttributes = #{(id)kCVPixelBufferPixelFormatTypeKey : #(kCVPixelFormatType_32BGRA),
(id)kCVPixelBufferCGBitmapContextCompatibilityKey : #YES,
(id)kCVPixelBufferWidthKey : #(_viewSize.width * _scale),
(id)kCVPixelBufferHeightKey : #(_viewSize.height * _scale),
(id)kCVPixelBufferBytesPerRowAlignmentKey : #(_viewSize.width * _scale * 4)
};
_outputBufferPool = NULL;
CVPixelBufferPoolCreate(NULL, NULL, (__bridge CFDictionaryRef)(bufferAttributes), &_outputBufferPool);
NSError* error = nil;
_videoWriter = [[AVAssetWriter alloc] initWithURL:self.videoURL ?: [self tempFileURL]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(_videoWriter);
NSInteger pixelNumber = _viewSize.width * _viewSize.height * _scale;
NSDictionary* videoCompression = #{AVVideoAverageBitRateKey: #(pixelNumber * 11.4)};
NSDictionary* videoSettings = #{AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: [NSNumber numberWithInt:_viewSize.width*_scale],
AVVideoHeightKey: [NSNumber numberWithInt:_viewSize.height*_scale],
AVVideoCompressionPropertiesKey: videoCompression};
_videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSParameterAssert(_videoWriterInput);
_videoWriterInput.expectsMediaDataInRealTime = YES;
_videoWriterInput.transform = [self videoTransformForDeviceOrientation];
_avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoWriterInput sourcePixelBufferAttributes:nil];
[_videoWriter addInput:_videoWriterInput];
[_videoWriter startWriting];
[_videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];
}
- (CGAffineTransform)videoTransformForDeviceOrientation
{
CGAffineTransform videoTransform;
switch ([UIDevice currentDevice].orientation) {
case UIDeviceOrientationLandscapeLeft:
videoTransform = CGAffineTransformMakeRotation(-M_PI_2);
break;
case UIDeviceOrientationLandscapeRight:
videoTransform = CGAffineTransformMakeRotation(M_PI_2);
break;
case UIDeviceOrientationPortraitUpsideDown:
videoTransform = CGAffineTransformMakeRotation(M_PI);
break;
default:
videoTransform = CGAffineTransformIdentity;
}
return videoTransform;
}
- (NSURL*)tempFileURL
{
NSString *outputPath = [NSHomeDirectory() stringByAppendingPathComponent:#"tmp/screenCapture.mp4"];
[self removeTempFilePath:outputPath];
return [NSURL fileURLWithPath:outputPath];
}
- (void)removeTempFilePath:(NSString*)filePath
{
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:filePath]) {
NSError* error;
if ([fileManager removeItemAtPath:filePath error:&error] == NO) {
NSLog(#"Could not delete old recording:%#", [error localizedDescription]);
}
}
}
- (void)completeRecordingSession:(VideoCompletionBlock)completionBlock;
{
dispatch_async(_render_queue, ^{
dispatch_sync(_append_pixelBuffer_queue, ^{
[_videoWriterInput markAsFinished];
[_videoWriter finishWritingWithCompletionHandler:^{
void (^completion)(void) = ^() {
[self cleanup];
dispatch_async(dispatch_get_main_queue(), ^{
if (completionBlock) completionBlock();
});
};
if (self.videoURL) {
completion();
} else {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:_videoWriter.outputURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (error) {
NSLog(#"Error copying video to camera roll:%#", [error localizedDescription]);
} else {
[self removeTempFilePath:_videoWriter.outputURL.path];
completion();
}
}];
}
}];
});
});
}
- (void)cleanup
{
self.avAdaptor = nil;
self.videoWriterInput = nil;
self.videoWriter = nil;
self.firstTimeStamp = 0;
self.outputBufferPoolAuxAttributes = nil;
CGColorSpaceRelease(_rgbColorSpace);
CVPixelBufferPoolRelease(_outputBufferPool);
}
- (void)writeVideoFrame
{
// throttle the number of frames to prevent meltdown
// technique gleaned from Brad Larson's answer here: http://stackoverflow.com/a/5956119
if (dispatch_semaphore_wait(_frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0) {
return;
}
dispatch_async(_render_queue, ^{
if (![_videoWriterInput isReadyForMoreMediaData]) return;
if (!self.firstTimeStamp) {
self.firstTimeStamp = _displayLink.timestamp;
}
CFTimeInterval elapsed = (_displayLink.timestamp - self.firstTimeStamp);
CMTime time = CMTimeMakeWithSeconds(elapsed, 1000);
CVPixelBufferRef pixelBuffer = NULL;
CGContextRef bitmapContext = [self createPixelBufferAndBitmapContext:&pixelBuffer];
if (self.delegate) {
[self.delegate writeBackgroundFrameInContext:&bitmapContext];
}
// draw each window into the context (other windows include UIKeyboard, UIAlert)
// FIX: UIKeyboard is currently only rendered correctly in portrait orientation
dispatch_sync(dispatch_get_main_queue(), ^{
UIGraphicsPushContext(bitmapContext); {
for (UIWindow *window in [[UIApplication sharedApplication] windows]) {
[window drawViewHierarchyInRect:CGRectMake(0, 0, _viewSize.width, _viewSize.height) afterScreenUpdates:NO];
}
} UIGraphicsPopContext();
});
// append pixelBuffer on a async dispatch_queue, the next frame is rendered whilst this one appends
// must not overwhelm the queue with pixelBuffers, therefore:
// check if _append_pixelBuffer_queue is ready
// if it’s not ready, release pixelBuffer and bitmapContext
if (dispatch_semaphore_wait(_pixelAppendSemaphore, DISPATCH_TIME_NOW) == 0) {
dispatch_async(_append_pixelBuffer_queue, ^{
BOOL success = [_avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
if (!success) {
NSLog(#"Warning: Unable to write buffer to video");
}
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
dispatch_semaphore_signal(_pixelAppendSemaphore);
});
} else {
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
}
dispatch_semaphore_signal(_frameRenderingSemaphore);
});
}
- (CGContextRef)createPixelBufferAndBitmapContext:(CVPixelBufferRef *)pixelBuffer
{
CVPixelBufferPoolCreatePixelBuffer(NULL, _outputBufferPool, pixelBuffer);
CVPixelBufferLockBaseAddress(*pixelBuffer, 0);
CGContextRef bitmapContext = NULL;
bitmapContext = CGBitmapContextCreate(CVPixelBufferGetBaseAddress(*pixelBuffer),
CVPixelBufferGetWidth(*pixelBuffer),
CVPixelBufferGetHeight(*pixelBuffer),
8, CVPixelBufferGetBytesPerRow(*pixelBuffer), _rgbColorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst
);
CGContextScaleCTM(bitmapContext, _scale, _scale);
CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, _viewSize.height);
CGContextConcatCTM(bitmapContext, flipVertical);
return bitmapContext;
}
I would be more than happy to provide my full source code to anyone who could tackle something like this because posting multiple .m's on this would take up a lot of space.
I'm not sure if it's similar for you. But I've been using ASScreenRecorder to record an ARSceneView, and that has been going fine. Have a look at the following link, you can provide it a view to render and it records and provides an output URL link. You might have to make a small edit to the class to get the StopRecording Function's completion handler to work.
https://github.com/alskipp/ASScreenRecorder
I have a method that returns the data of an ALAsset. I call this method for one asset after the other to get NSData from them and upload it to a server. After every 4 or 5 calls, the code gets stuck on rep.size call below. When I pause and resume execution in XCode it starts to work again. I am completely stumped, any help would be appreciated.
Clarification: The deadlock/code gets stuck inside the ALAssetsLibrary code, not inside my code.
Additional info: I have just one instance of ALAssetsLibrary and I am making sure it is not being used from any other thread.
+(void)getDataFromAssetURL:(NSString *) myImageURL ofType:(enum ImageType)imageType andPerformBlock:(NSDataBlock)block blocking:(BOOL)blocking
{
NSConditionLock * albumReadLock = nil;
if (blocking) {
albumReadLock = [[NSConditionLock alloc] initWithCondition:PENDING];
}
#autoreleasepool {
#try {
NSURL *str = [[NSURL alloc] initWithString: myImageURL];
ALAsset * asset = [[AppManager sharedInstance].assetObjectCache objectForKey:str];
if (asset && NO) {
block( [Util getDataFromAsset:asset ofType:imageType] );
// notifies the lock that "all tasks are finished"
[albumReadLock lock];
[albumReadLock unlockWithCondition:ALLFINISHED];
}
else {
ALAssetsLibraryAssetForURLResultBlock resultblock = ^(ALAsset *myasset)
{
#autoreleasepool {
#try {
[[AppManager sharedInstance].assetObjectCache setObject:myasset forKey:str];
NSData * assetData = [Util getDataFromAsset:myasset ofType:imageType];
block(assetData);
// notifies the lock that "all tasks are finished"
[albumReadLock lock];
[albumReadLock unlockWithCondition:ALLFINISHED];
}
#catch (NSException *exception) {
block(nil);
// important: notifies lock that "all tasks finished" (even though they failed)
[albumReadLock lock];
[albumReadLock unlockWithCondition:ALLFINISHED];
}
#finally {
}
}
};
ALAssetsLibraryAccessFailureBlock failureblock = ^(NSError *myerror)
{
block(nil);
// important: notifies lock that "all tasks finished" (even though they failed)
[albumReadLock lock];
[albumReadLock unlockWithCondition:ALLFINISHED];
};
if(str)
{
NSURL *asseturl = str;
[[AppManager sharedInstance].assetslibrary assetForURL:asseturl
resultBlock:resultblock
failureBlock:failureblock];
}
else {
block(nil);
// notifies the lock that "all tasks are finished"
[albumReadLock lock];
[albumReadLock unlockWithCondition:ALLFINISHED];
}
}
}
#catch (NSException *exception) {
block(nil);
// notifies the lock that "all tasks are finished"
[albumReadLock lock];
[albumReadLock unlockWithCondition:ALLFINISHED];
}
#finally {
}
}
if (blocking) {
// non-busy wait for the asset read to finish (specifically until the condition is "all finished")
[albumReadLock lockWhenCondition:ALLFINISHED];
[albumReadLock unlock];
}
}
+(NSData *)getDataFromAsset:(ALAsset *)myasset ofType:(enum ImageType)imageType
{
ALAssetRepresentation *rep = [myasset defaultRepresentation];
CGImageRef iref = nil;
NSData *assetData = nil;
if (imageType == FULL_RESOLUTION) {
Byte *buffer = (Byte*)malloc(rep.size);
NSUInteger buffered = [rep getBytes:buffer fromOffset:0.0 length:rep.size error:nil];
assetData = [NSData dataWithBytesNoCopy:buffer length:buffered freeWhenDone:YES];
}
else if (imageType == LARGE_IMAGE){
iref = [rep fullScreenImage];
UIImage * uiimage = [UIImage imageWithCGImage:iref];
////NSLog(#"%f %f", uiimage.size.width, uiimage.size.height);
assetData = UIImageJPEGRepresentation(uiimage, 1.0f);
}
else if (imageType == SQUARE_THUMB){
iref = [myasset thumbnail];
assetData = UIImageJPEGRepresentation([UIImage imageWithCGImage:iref], 1.0f);
}
else if (imageType == PERSPECTIVE_THUMB){
iref = [myasset aspectRatioThumbnail];
assetData = UIImageJPEGRepresentation([UIImage imageWithCGImage:iref], 1.0f);
}
return assetData;
}
New information: Turns out the problem only occurs on a specific device. As stated above, if I pause and un-pause the debugger in XCode the code moves forward.
You will cause a deadlock in the a ALAsset code if you initiate this block of code from the main thread. You must first drop onto a background thread because the ALAssetsLibrary requires the ability to execute code on the main thread, and if the main thread is blocked in the manner that you have outline above then it will be waiting forever to do so.
i am using text to speech, starting audio works fine, but i cant stop it. here is how i do start audio:
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, (unsigned long)NULL), ^(void) {
[[self view] setNeedsDisplay];
[self synthesizeInBackground];
[queue waitUntilAllOperationsAreFinished];
[self setIsSpeaking: false];
[[self view] setNeedsDisplay];
});
synthesizeInBackground
- (void) synthesizeInBackground {
XLog(#"-----------------------------------entered");
queue = [[NSOperationQueue alloc] init];
XLog(#"queue: %#", queue);
operation = [[NSInvocationOperation alloc] initWithTarget:self selector:#selector(synthesize) object:nil];
XLog(#"operation: %#", operation);
[queue addOperation: operation];
}
synthesize
- (void)synthesize {
XLog(#"-----------------------------------entered");
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
callback_userdata userdata;
NSError *error = nil;
self.paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
self.documentsDirectory = [self.paths objectAtIndex:0];
self.path = [self.documentsDirectory stringByAppendingPathComponent:#"readSearchresults.txt"];
IvonaStreamer *streamer = [[IvonaStreamer alloc] initWithVoice:voice withText:[NSString stringWithContentsOfFile:self.path encoding:NSUTF8StringEncoding error:&error] atSpeed:[NSNumber numberWithFloat:-1]];
//IvonaStreamer *streamer = [[IvonaStreamer alloc] initWithVoice:voice withText:#"Dies ist ein Testtext." atSpeed:[NSNumber numberWithFloat:-1]];
if (streamer == nil) {
XLog(#"Cannot start streamer");
[self setTtsError: #"Cannot start streamer"];
return;
}
userdata.speak = &(self->isSpeaking);
userdata.streamer = streamer;
#define NUM_BUFFERS 3
#define BUFFER_SIZE 22050
OSStatus err;
AudioQueueRef audioQueue;
//XLog(#"audioQueue: %d", audioQueue);
XLog(#"[voice getSampleRate]: %i", [voice getSampleRate]);
AudioStreamBasicDescription deviceFormat;
deviceFormat.mSampleRate = [voice getSampleRate];
deviceFormat.mFormatID = kAudioFormatLinearPCM;
deviceFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger;
deviceFormat.mBytesPerPacket = 2;
deviceFormat.mFramesPerPacket = 1;
deviceFormat.mBytesPerFrame = 2;
deviceFormat.mChannelsPerFrame = 1;
deviceFormat.mBitsPerChannel = 16;
deviceFormat.mReserved = 0;
XLog(#"deviceFormat.mSampleRate: %f", deviceFormat.mSampleRate);
/*
XLog(#"deviceFormat.mSampleRate: %f", deviceFormat.mSampleRate);
XLog(#"deviceFormat.mFormatID: %lu", deviceFormat.mFormatID);
XLog(#"deviceFormat.mFormatFlags: %lu", deviceFormat.mFormatFlags);
XLog(#"deviceFormat.mBytesPerPacket %lu", deviceFormat.mBytesPerPacket);
XLog(#"deviceFormat.mFramesPerPacket %lu", deviceFormat.mFramesPerPacket);
XLog(#"deviceFormat.mBytesPerFrame %lu", deviceFormat.mBytesPerFrame);
XLog(#"deviceFormat.mChannelsPerFrame %lu", deviceFormat.mChannelsPerFrame);
XLog(#"deviceFormat.mBitsPerChannel %lu", deviceFormat.mBitsPerChannel);
XLog(#"deviceFormat.mReserved %lu", deviceFormat.mReserved);
*/
err = AudioQueueNewOutput(&deviceFormat,
AudioQueueCallback,
&userdata,
CFRunLoopGetCurrent(),
kCFRunLoopCommonModes,
0,
&audioQueue);
if (err != noErr) {
XLog(#"Cannot create audio output");
[self setTtsError: #"Cannot create audio output"];
[streamer stop];
return;
}
AudioQueueAddPropertyListener(audioQueue, kAudioQueueProperty_IsRunning,
AudioQueuePropertyListener, NULL);
for (int i = 0; i < NUM_BUFFERS; i++) {
AudioQueueBufferRef buffer;
err = AudioQueueAllocateBuffer(audioQueue, BUFFER_SIZE, &buffer);
if (err != noErr) {
XLog(#"Cannot allocate audio buffer");
[self setTtsError: #"Cannot allocate audio buffer"];
[streamer stop];
return;
}
AudioQueueCallback(&userdata, audioQueue, buffer);
}
err = AudioQueueStart(audioQueue, NULL);
if (err != noErr) {
XLog(#"Cannot start audio");
[self setTtsError: #"Cannot start audio"];
[streamer stop];
return;
}
CFRunLoopRun();
[streamer stop];
[pool release];
}
AudioQueueCallback
void AudioQueueCallback(void *userData, AudioQueueRef audioQueue,
AudioQueueBufferRef buffer)
{
//XLog(#"-----------------------------------entered");
void *data = buffer->mAudioData;
UInt32 num_bytes = buffer->mAudioDataBytesCapacity;
//XLog(#"num_bytes: %lu", num_bytes);
UInt32 to_write = num_bytes / sizeof(short);
//XLog(#"to_write: %lu", to_write);
NSInteger num_samples;
//XLog(#"num_samples: %i", num_samples);
IvonaStreamer *streamer = ((callback_userdata*) userData)->streamer;
bool *enabled = ((callback_userdata*) userData)->speak;
//XLog(#"streamer.getWarnings: %#", streamer.getWarnings);
if(!*enabled) {
XLog(#"!*enabled");
AudioQueueStop(audioQueue, false);
}
num_samples = [streamer synthSamples:to_write toCArray:data];
//XLog(#"num_samples: %i", num_samples);
if (num_samples > 0) {
//XLog(#"num_samples > 0");
buffer->mAudioDataByteSize = num_samples * sizeof(short);
AudioQueueEnqueueBuffer(audioQueue, buffer, 0, NULL);
} else {
//XLog(#"! (num_samples > 0)");
AudioQueueStop(audioQueue, false);
}
}
AudioQueuePropertyListener
void AudioQueuePropertyListener(void *userData, AudioQueueRef audioQueue,
AudioQueuePropertyID id)
{
XLog(#"-----------------------------------entered");
UInt32 isRunning, size = sizeof(isRunning);
AudioQueueGetProperty(audioQueue, kAudioQueueProperty_IsRunning, &isRunning, &size);
if (isRunning == 0) {
XLog(#"isRunning == 0");
CFRunLoopStop(CFRunLoopGetCurrent());
}
if (isRunning != 0) {
XLog(#"nicht null#######");
}
}
I try to stop in other method(UIAlertView delegate method):
if (alertView.tag == 997) {
if (buttonIndex == 0) {
XLog(#"vorlesen abbrechen geklickt.");
[queue cancelAllOperations];
AudioQueueRef audioQueue;
//AudioQueueDispose(audioQueue, false);
AudioQueueStop(audioQueue, false);
}
i am cancelling all operations and calling AudioQueueDispose, also tried with AudioQueueStop, but nothing works here.
So my question is, HOW can i stop audio here?
AudioQueueStop should work and be sufficient. From Apples Documentation, AudioQueueReset is called from AudioQueueStop.
AudioQueueDispose is a bit too much if you want to start it again later.
I believe that you need to call AudioQueueReset before you call AudioQueueStop.
AudioQueueReset (audioQueue);
AudioQueueStop (audioQueue, YES);
AudioQueueDispose (audioQueue, YES);
I want to change the container of .mov video files that I pick using
UIImagePickerController and compressed them via AVAssetExportSession with AVAssetExportPresetMediumQuality and shouldOptimizeForNetworkUse = YES to .mp4 container.
I need programmatically way/sample code to perform a fastest trans-wrap on iPhone/iPad application
I tried to set AVAssetExportSession.outputFileType property to AVFileTypeMPEG4 but it is not supported and I got an exception.
I tried to do this transform using AVAssetWriter by specifying fileType:AVFileTypeMPEG4, actually I got .mp4 output file, but it was not wrap-trans, the output file was 3x bigger than source, and the convert process took 128 sec for video with 60 sec duration.
I need solution that will run quickly and will keep the file size
This is the code I use to convert .mov to .mp4:
I set assetWriter options on setUpReaderAndWriterReturningError method
#import "MCVideoConverter.h"
#import <AVFoundation/AVAsset.h>
#import <AVFoundation/AVAssetTrack.h>
#import <AVFoundation/AVAssetReader.h>
#import <AVFoundation/AVAssetReaderOutput.h>
#import <AVFoundation/AVAssetWriter.h>
#import <AVFoundation/AVAssetWriterInput.h>
#import <AVFoundation/AVMediaFormat.h>
#import <AVFoundation/AVAudioSettings.h>
#import <AVFoundation/AVVideoSettings.h>
#import <AVFoundation/AVAssetImageGenerator.h>
#import <AVFoundation/AVTime.h>
#import <CoreMedia/CMSampleBuffer.h>
#protocol RWSampleBufferChannelDelegate;
#interface RWSampleBufferChannel : NSObject
{
#private
AVAssetReaderOutput *assetReaderOutput;
AVAssetWriterInput *assetWriterInput;
dispatch_block_t completionHandler;
dispatch_queue_t serializationQueue;
BOOL finished; // only accessed on serialization queue
}
- (id)initWithAssetReaderOutput:(AVAssetReaderOutput *)assetReaderOutput assetWriterInput:(AVAssetWriterInput *)assetWriterInput;
- (void)startWithDelegate:(id <RWSampleBufferChannelDelegate>)delegate completionHandler:(dispatch_block_t)completionHandler; // delegate is retained until completion handler is called. Completion handler is guaranteed to be called exactly once, whether reading/writing finishes, fails, or is cancelled. Delegate may be nil.
- (void)cancel;
#property (nonatomic, readonly) NSString *mediaType;
#end
#protocol RWSampleBufferChannelDelegate <NSObject>
#required
- (void)sampleBufferChannel:(RWSampleBufferChannel *)sampleBufferChannel didReadSampleBuffer:(CMSampleBufferRef)sampleBuffer;
#end
#interface MCVideoConverter () <RWSampleBufferChannelDelegate>
// These three methods are always called on the serialization dispatch queue
- (BOOL)setUpReaderAndWriterReturningError:(NSError **)outError; // make sure "tracks" key of asset is loaded before calling this
- (BOOL)startReadingAndWritingReturningError:(NSError **)outError;
- (void)readingAndWritingDidFinishSuccessfully:(BOOL)success withError:(NSError *)error;
#end
#implementation MCVideoConverter
+ (NSArray *)readableTypes
{
return [AVURLAsset audiovisualTypes];;
}
+ (BOOL)canConcurrentlyReadDocumentsOfType:(NSString *)typeName
{
return YES;
}
- (id)init
{
self = [super init];
if (self)
{
NSString *serializationQueueDescription = [NSString stringWithFormat:#"%# serialization queue", self];
serializationQueue = dispatch_queue_create([serializationQueueDescription UTF8String], NULL);
}
return self;
}
- (void)dealloc
{
[asset release];
[outputURL release];
[assetReader release];
[assetWriter release];
[audioSampleBufferChannel release];
[videoSampleBufferChannel release];
if (serializationQueue)
dispatch_release(serializationQueue);
[super dealloc];
}
#synthesize asset=asset;
#synthesize timeRange=timeRange;
#synthesize writingSamples=writingSamples;
#synthesize outputURL=outputURL;
#synthesize propgerssView;
- (void)convertVideo:(NSURL*) inputURL outputURL: (NSURL*) _outputURL progress:(UIProgressView*) _propgerssView
{
self.asset = [AVURLAsset URLAssetWithURL:inputURL options:nil];
self.propgerssView = _propgerssView;
cancelled = NO;
[self performSelector:#selector(startProgressSheetWithURL:) withObject:_outputURL afterDelay:0.0]; // avoid starting a new sheet while in
}
- (void)startProgressSheetWithURL:(NSURL *)localOutputURL
{
[self setOutputURL:localOutputURL];
[self setWritingSamples:YES];
AVAsset *localAsset = [self asset];
[localAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:#"tracks", #"duration", nil] completionHandler:^
{
// Dispatch the setup work to the serialization queue, to ensure this work is serialized with potential cancellation
dispatch_async(serializationQueue, ^{
// Since we are doing these things asynchronously, the user may have already cancelled on the main thread. In that case, simply return from this block
if (cancelled)
return;
BOOL success = YES;
NSError *localError = nil;
success = ([localAsset statusOfValueForKey:#"tracks" error:&localError] == AVKeyValueStatusLoaded);
if (success)
success = ([localAsset statusOfValueForKey:#"duration" error:&localError] == AVKeyValueStatusLoaded);
if (success)
{
[self setTimeRange:CMTimeRangeMake(kCMTimeZero, [localAsset duration])];
// AVAssetWriter does not overwrite files for us, so remove the destination file if it already exists
NSFileManager *fm = [NSFileManager defaultManager];
NSString *localOutputPath = [localOutputURL path];
if ([fm fileExistsAtPath:localOutputPath])
success = [fm removeItemAtPath:localOutputPath error:&localError];
}
// Set up the AVAssetReader and AVAssetWriter, then begin writing samples or flag an error
if (success)
success = [self setUpReaderAndWriterReturningError:&localError];
if (success)
success = [self startReadingAndWritingReturningError:&localError];
if (!success)
[self readingAndWritingDidFinishSuccessfully:success withError:localError];
});
}];
}
- (BOOL)setUpReaderAndWriterReturningError:(NSError **)outError
{
BOOL success = YES;
NSError *localError = nil;
AVAsset *localAsset = [self asset];
NSURL *localOutputURL = [self outputURL];
// Create asset reader and asset writer
assetReader = [[AVAssetReader alloc] initWithAsset:asset error:&localError];
success = (assetReader != nil);
if (success)
{
//changed assetWriter = [[AVAssetWriter alloc] initWithURL:localOutputURL fileType:AVFileTypeQuickTimeMovie error:&localError];
assetWriter = [[AVAssetWriter alloc] initWithURL:localOutputURL fileType:AVFileTypeMPEG4 error:&localError];
success = (assetWriter != nil);
}
// Create asset reader outputs and asset writer inputs for the first audio track and first video track of the asset
if (success)
{
AVAssetTrack *audioTrack = nil, *videoTrack = nil;
// Grab first audio track and first video track, if the asset has them
NSArray *audioTracks = [localAsset tracksWithMediaType:AVMediaTypeAudio];
if ([audioTracks count] > 0)
audioTrack = [audioTracks objectAtIndex:0];
NSArray *videoTracks = [localAsset tracksWithMediaType:AVMediaTypeVideo];
if ([videoTracks count] > 0)
videoTrack = [videoTracks objectAtIndex:0];
if (audioTrack)
{
// Decompress to Linear PCM with the asset reader
NSDictionary *decompressionAudioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kAudioFormatLinearPCM], AVFormatIDKey,
nil];
AVAssetReaderOutput *output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:decompressionAudioSettings];
[assetReader addOutput:output];
AudioChannelLayout stereoChannelLayout = {
.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo,
.mChannelBitmap = 0,
.mNumberChannelDescriptions = 0
};
NSData *channelLayoutAsData = [NSData dataWithBytes:&stereoChannelLayout length:offsetof(AudioChannelLayout, mChannelDescriptions)];
// Compress to 128kbps AAC with the asset writer
NSDictionary *compressionAudioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithInteger:128000], AVEncoderBitRateKey,
[NSNumber numberWithInteger:44100], AVSampleRateKey,
channelLayoutAsData, AVChannelLayoutKey,
[NSNumber numberWithUnsignedInteger:2], AVNumberOfChannelsKey,
nil];
AVAssetWriterInput *input = [AVAssetWriterInput assetWriterInputWithMediaType:[audioTrack mediaType] outputSettings:compressionAudioSettings];
[assetWriter addInput:input];
// Create and save an instance of RWSampleBufferChannel, which will coordinate the work of reading and writing sample buffers
audioSampleBufferChannel = [[RWSampleBufferChannel alloc] initWithAssetReaderOutput:output assetWriterInput:input];
}
if (videoTrack)
{
// Decompress to ARGB with the asset reader
NSDictionary *decompressionVideoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB], (id)kCVPixelBufferPixelFormatTypeKey,
[NSDictionary dictionary], (id)kCVPixelBufferIOSurfacePropertiesKey,
nil];
AVAssetReaderOutput *output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:decompressionVideoSettings];
[assetReader addOutput:output];
// Get the format description of the track, to fill in attributes of the video stream that we don't want to change
CMFormatDescriptionRef formatDescription = NULL;
NSArray *formatDescriptions = [videoTrack formatDescriptions];
if ([formatDescriptions count] > 0)
formatDescription = (CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0];
// Grab track dimensions from format description
CGSize trackDimensions = {
.width = 0.0,
.height = 0.0,
};
if (formatDescription)
trackDimensions = CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, false, false);
else
trackDimensions = [videoTrack naturalSize];
// Grab clean aperture, pixel aspect ratio from format description
NSMutableDictionary *compressionSettings = nil;
// [NSMutableDictionary dictionaryWithObjectsAndKeys:
// AVVideoProfileLevelH264Baseline30, AVVideoProfileLevelKey,
// [NSNumber numberWithInt:960000], AVVideoAverageBitRateKey,
// [NSNumber numberWithInt:1],AVVideoMaxKeyFrameIntervalKey,
// nil ];
//NSDictionary *videoSettings = nil;
if (formatDescription)
{
NSDictionary *cleanAperture = nil;
NSDictionary *pixelAspectRatio = nil;
CFDictionaryRef cleanApertureFromCMFormatDescription = CMFormatDescriptionGetExtension(formatDescription, kCMFormatDescriptionExtension_CleanAperture);
if (cleanApertureFromCMFormatDescription)
{
cleanAperture = [NSDictionary dictionaryWithObjectsAndKeys:
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureWidth), AVVideoCleanApertureWidthKey,
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureHeight), AVVideoCleanApertureHeightKey,
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureHorizontalOffset), AVVideoCleanApertureHorizontalOffsetKey,
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureVerticalOffset), AVVideoCleanApertureVerticalOffsetKey,
nil];
}
CFDictionaryRef pixelAspectRatioFromCMFormatDescription = CMFormatDescriptionGetExtension(formatDescription, kCMFormatDescriptionExtension_PixelAspectRatio);
if (pixelAspectRatioFromCMFormatDescription)
{
pixelAspectRatio = [NSDictionary dictionaryWithObjectsAndKeys:
CFDictionaryGetValue(pixelAspectRatioFromCMFormatDescription, kCMFormatDescriptionKey_PixelAspectRatioHorizontalSpacing), AVVideoPixelAspectRatioHorizontalSpacingKey,
CFDictionaryGetValue(pixelAspectRatioFromCMFormatDescription, kCMFormatDescriptionKey_PixelAspectRatioVerticalSpacing), AVVideoPixelAspectRatioVerticalSpacingKey,
nil];
}
if (cleanAperture || pixelAspectRatio)
{
if (cleanAperture)
[compressionSettings setObject:cleanAperture forKey:AVVideoCleanApertureKey];
if (pixelAspectRatio)
[compressionSettings setObject:pixelAspectRatio forKey:AVVideoPixelAspectRatioKey];
}
}
// Compress to H.264 with the asset writer
NSMutableDictionary *videoSettings = [NSMutableDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithDouble:trackDimensions.width], AVVideoWidthKey,
[NSNumber numberWithDouble:trackDimensions.height], AVVideoHeightKey,
nil];
if (compressionSettings)
[videoSettings setObject:compressionSettings forKey:AVVideoCompressionPropertiesKey];
AVAssetWriterInput *input = [AVAssetWriterInput assetWriterInputWithMediaType:[videoTrack mediaType] outputSettings:videoSettings];
[assetWriter addInput:input];
// Create and save an instance of RWSampleBufferChannel, which will coordinate the work of reading and writing sample buffers
videoSampleBufferChannel = [[RWSampleBufferChannel alloc] initWithAssetReaderOutput:output assetWriterInput:input];
}
}
if (outError)
*outError = localError;
return success;
}
- (BOOL)startReadingAndWritingReturningError:(NSError **)outError
{
BOOL success = YES;
NSError *localError = nil;
// Instruct the asset reader and asset writer to get ready to do work
success = [assetReader startReading];
if (!success)
localError = [assetReader error];
if (success)
{
success = [assetWriter startWriting];
if (!success)
localError = [assetWriter error];
}
if (success)
{
dispatch_group_t dispatchGroup = dispatch_group_create();
// Start a sample-writing session
[assetWriter startSessionAtSourceTime:[self timeRange].start];
// Start reading and writing samples
if (audioSampleBufferChannel)
{
// Only set audio delegate for audio-only assets, else let the video channel drive progress
id <RWSampleBufferChannelDelegate> delegate = nil;
if (!videoSampleBufferChannel)
delegate = self;
dispatch_group_enter(dispatchGroup);
[audioSampleBufferChannel startWithDelegate:delegate completionHandler:^{
dispatch_group_leave(dispatchGroup);
}];
}
if (videoSampleBufferChannel)
{
dispatch_group_enter(dispatchGroup);
[videoSampleBufferChannel startWithDelegate:self completionHandler:^{
dispatch_group_leave(dispatchGroup);
}];
}
// Set up a callback for when the sample writing is finished
dispatch_group_notify(dispatchGroup, serializationQueue, ^{
BOOL finalSuccess = YES;
NSError *finalError = nil;
if (cancelled)
{
[assetReader cancelReading];
[assetWriter cancelWriting];
}
else
{
if ([assetReader status] == AVAssetReaderStatusFailed)
{
finalSuccess = NO;
finalError = [assetReader error];
}
if (finalSuccess)
{
finalSuccess = [assetWriter finishWriting];
if (!finalSuccess)
finalError = [assetWriter error];
}
}
[self readingAndWritingDidFinishSuccessfully:finalSuccess withError:finalError];
});
dispatch_release(dispatchGroup);
}
if (outError)
*outError = localError;
return success;
}
- (void)cancel
{
self.propgerssView = nil;
// Dispatch cancellation tasks to the serialization queue to avoid races with setup and teardown
dispatch_async(serializationQueue, ^{
[audioSampleBufferChannel cancel];
[videoSampleBufferChannel cancel];
cancelled = YES;
});
}
- (void)readingAndWritingDidFinishSuccessfully:(BOOL)success withError:(NSError *)error
{
NSLog(#"%s[%d] - success = %d error = %#", __FUNCTION__, __LINE__, success, error);
if (!success)
{
[assetReader cancelReading];
[assetWriter cancelWriting];
}
// Tear down ivars
[assetReader release];
assetReader = nil;
[assetWriter release];
assetWriter = nil;
[audioSampleBufferChannel release];
audioSampleBufferChannel = nil;
[videoSampleBufferChannel release];
videoSampleBufferChannel = nil;
cancelled = NO;
// Dispatch UI-related tasks to the main queue
dispatch_async(dispatch_get_main_queue(), ^{
if (!success)
{
}
[self setWritingSamples:NO];
});
}
static double progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer, CMTimeRange timeRange)
{
CMTime progressTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
progressTime = CMTimeSubtract(progressTime, timeRange.start);
CMTime sampleDuration = CMSampleBufferGetDuration(sampleBuffer);
if (CMTIME_IS_NUMERIC(sampleDuration))
progressTime= CMTimeAdd(progressTime, sampleDuration);
return CMTimeGetSeconds(progressTime) / CMTimeGetSeconds(timeRange.duration);
}
static void removeARGBColorComponentOfPixelBuffer(CVPixelBufferRef pixelBuffer, size_t componentIndex)
{
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
size_t bufferHeight = CVPixelBufferGetHeight(pixelBuffer);
size_t bufferWidth = CVPixelBufferGetWidth(pixelBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
static const size_t bytesPerPixel = 4; // constant for ARGB pixel format
unsigned char *base = (unsigned char *)CVPixelBufferGetBaseAddress(pixelBuffer);
for (size_t row = 0; row < bufferHeight; ++row)
{
for (size_t column = 0; column < bufferWidth; ++column)
{
unsigned char *pixel = base + (row * bytesPerRow) + (column * bytesPerPixel);
pixel[componentIndex] = 0;
}
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
}
+ (size_t)componentIndexFromFilterTag:(NSInteger)filterTag
{
return (size_t)filterTag; // we set up the tags in the popup button to correspond directly with the index they modify
}
- (void)sampleBufferChannel:(RWSampleBufferChannel *)sampleBufferChannel didReadSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
CVPixelBufferRef pixelBuffer = NULL;
// Calculate progress (scale of 0.0 to 1.0)
double progress = progressOfSampleBufferInTimeRange(sampleBuffer, [self timeRange]);
NSLog(#"%s[%d] - progress = %f", __FUNCTION__, __LINE__, progress);
// Grab the pixel buffer from the sample buffer, if possible
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (imageBuffer && (CFGetTypeID(imageBuffer) == CVPixelBufferGetTypeID()))
{
pixelBuffer = (CVPixelBufferRef)imageBuffer;
if (filterTag >= 0) // -1 means "no filtering, please"
removeARGBColorComponentOfPixelBuffer(pixelBuffer, [[self class] componentIndexFromFilterTag:filterTag]);
}
}
#end
#interface RWSampleBufferChannel ()
- (void)callCompletionHandlerIfNecessary; // always called on the serialization queue
#end
#implementation RWSampleBufferChannel
- (id)initWithAssetReaderOutput:(AVAssetReaderOutput *)localAssetReaderOutput assetWriterInput:(AVAssetWriterInput *)localAssetWriterInput
{
self = [super init];
if (self)
{
assetReaderOutput = [localAssetReaderOutput retain];
assetWriterInput = [localAssetWriterInput retain];
finished = NO;
NSString *serializationQueueDescription = [NSString stringWithFormat:#"%# serialization queue", self];
serializationQueue = dispatch_queue_create([serializationQueueDescription UTF8String], NULL);
}
return self;
}
- (void)dealloc
{
[assetReaderOutput release];
[assetWriterInput release];
if (serializationQueue)
dispatch_release(serializationQueue);
[completionHandler release];
[super dealloc];
}
- (NSString *)mediaType
{
return [assetReaderOutput mediaType];
}
- (void)startWithDelegate:(id <RWSampleBufferChannelDelegate>)delegate completionHandler:(dispatch_block_t)localCompletionHandler
{
completionHandler = [localCompletionHandler copy]; // released in -callCompletionHandlerIfNecessary
[assetWriterInput requestMediaDataWhenReadyOnQueue:serializationQueue usingBlock:^{
if (finished)
return;
BOOL completedOrFailed = NO;
// Read samples in a loop as long as the asset writer input is ready
while ([assetWriterInput isReadyForMoreMediaData] && !completedOrFailed)
{
CMSampleBufferRef sampleBuffer = [assetReaderOutput copyNextSampleBuffer];
if (sampleBuffer != NULL)
{
if ([delegate respondsToSelector:#selector(sampleBufferChannel:didReadSampleBuffer:)])
[delegate sampleBufferChannel:self didReadSampleBuffer:sampleBuffer];
BOOL success = [assetWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
sampleBuffer = NULL;
completedOrFailed = !success;
}
else
{
completedOrFailed = YES;
}
}
if (completedOrFailed)
[self callCompletionHandlerIfNecessary];
}];
}
- (void)cancel
{
dispatch_async(serializationQueue, ^{
[self callCompletionHandlerIfNecessary];
});
}
- (void)callCompletionHandlerIfNecessary
{
// Set state to mark that we no longer need to call the completion handler, grab the completion handler, and clear out the ivar
BOOL oldFinished = finished;
finished = YES;
if (oldFinished == NO)
{
[assetWriterInput markAsFinished]; // let the asset writer know that we will not be appending any more samples to this input
dispatch_block_t localCompletionHandler = [completionHandler retain];
[completionHandler release];
completionHandler = nil;
if (localCompletionHandler)
{
localCompletionHandler();
[localCompletionHandler release];
}
}
}
#end
Hey It was for a long while, but I end up with good solution and it may help someone in future
my code:
-(void) compressVideo
{
asset = [[AVURLAsset alloc] initWithURL:videoUrl options:nil];
exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetPassthrough];
NSLog(#" %#", [AVAssetExportSession exportPresetsCompatibleWithAsset:asset]);
NSLog(#" %#", exportSession.supportedFileTypes);
NSLog(#"----------------------------------------- convert to mp4");
NSLog(#" %#", exportSession.supportedFileTypes);
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.outputURL = [self outputVideoPath:#"outPut" ext:#"mp4"];
[exportSession exportAsynchronouslyWithCompletionHandler:^{
ICQLog(#" exportSession.status = %d exportSession.error = %#", exportSession.status, exportSession.error);
if ( exportSession && (exportSession.status == AVAssetExportSessionStatusCompleted) )
{
ICQLog(#" exportSession.outputURL = %#", exportSession.outputURL);
// we need to remove temporary files
[[NSFileManager defaultManager] removeItemAtURL:videoUrl error:NULL];
[videoUrl release];
videoUrl = [exportSession.outputURL retain];
}
else
{
//TODO - report error
}
[exportSession release], exportSession = nil;
[asset release], asset = nil;
}];
I can't help with the trans-wrap stuff, I haven't got my head into this.
Is the main priority to get the file output as a .mp4 without having to reprocess it? If it is then just use .mp4 as the file extension of the movie clip that was output by you code and this should work fine. I have used this approach today and it works. i didn't have to convert it from .mov to .mp4 because essentially a .mp4 file is the same as a .mov file with some additional standards based functionality.
Hope this is of help.
This is the code I used.
(BOOL)encodeVideo:(NSURL *)videoURL
{
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:videoURL options:nil];
// Create the composition and tracks
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
NSArray *assetVideoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if (assetVideoTracks.count <= 0)
{
NSLog(#"Error reading the transformed video track");
return NO;
}
// Insert the tracks in the composition's tracks
AVAssetTrack *assetVideoTrack = [assetVideoTracks firstObject];
[videoTrack insertTimeRange:assetVideoTrack.timeRange ofTrack:assetVideoTrack atTime:CMTimeMake(0, 1) error:nil];
[videoTrack setPreferredTransform:assetVideoTrack.preferredTransform];
AVAssetTrack *assetAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[audioTrack insertTimeRange:assetAudioTrack.timeRange ofTrack:assetAudioTrack atTime:CMTimeMake(0, 1) error:nil];
// Export to mp4
NSString *mp4Quality = [MGPublic isIOSAbove:#"6.0"] ? AVAssetExportPresetMediumQuality : AVAssetExportPresetPassthrough;
NSString *exportPath = [NSString stringWithFormat:#"%#/%#.mp4",
[NSHomeDirectory() stringByAppendingString:#"/tmp"],
[BSCommon uuidString]];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:mp4Quality];
exportSession.outputURL = exportUrl;
CMTime start = CMTimeMakeWithSeconds(0.0, 0);
CMTimeRange range = CMTimeRangeMake(start, [asset duration]);
exportSession.timeRange = range;
exportSession.outputFileType = AVFileTypeMPEG4;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status])
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"MP4 Successful!");
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed: %#", [[exportSession error] localizedDescription]);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
default:
break;
}
}];
return YES;
}