Screen Capture a UIView that has Camera function in it - ios

I am trying to screen capture a view that has APPLCameraViewContoller in it. For some reason when it gets saved to camera roll no mater what I do what the camera is looking at does not get captured only the UIView with labels does so it is a black background with the labels. I want to have the labels on top of the Camera View. Any suggestions or examples on how to go about this. Here is the screen capture .m which I am assuming is the reason why this is happening.
#interface ASScreenRecorder()
#property (strong, nonatomic) AVAssetWriter *videoWriter;
#property (strong, nonatomic) AVAssetWriterInput *videoWriterInput;
#property (strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor *avAdaptor;
#property (strong, nonatomic) CADisplayLink *displayLink;
#property (strong, nonatomic) NSDictionary *outputBufferPoolAuxAttributes;
#property (nonatomic) CFTimeInterval firstTimeStamp;
#property (nonatomic) BOOL isRecording;
#end
#implementation ASScreenRecorder
{
dispatch_queue_t _render_queue;
dispatch_queue_t _append_pixelBuffer_queue;
dispatch_semaphore_t _frameRenderingSemaphore;
dispatch_semaphore_t _pixelAppendSemaphore;
CGSize _viewSize;
CGFloat _scale;
CGColorSpaceRef _rgbColorSpace;
CVPixelBufferPoolRef _outputBufferPool;
}
#pragma mark - initializers
+ (instancetype)sharedInstance {
static dispatch_once_t once;
static ASScreenRecorder *sharedInstance;
dispatch_once(&once, ^{
sharedInstance = [[self alloc] init];
});
return sharedInstance;
}
- (instancetype)init
{
self = [super init];
if (self) {
_viewSize = [UIApplication sharedApplication].delegate.window.bounds.size;
_scale = [UIScreen mainScreen].scale;
// record half size resolution for retina iPads
if ((UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) && _scale > 1) {
_scale = 1.0;
}
_isRecording = NO;
_append_pixelBuffer_queue = dispatch_queue_create("ASScreenRecorder.append_queue", DISPATCH_QUEUE_SERIAL);
_render_queue = dispatch_queue_create("ASScreenRecorder.render_queue", DISPATCH_QUEUE_SERIAL);
dispatch_set_target_queue(_render_queue, dispatch_get_global_queue( DISPATCH_QUEUE_PRIORITY_HIGH, 0));
_frameRenderingSemaphore = dispatch_semaphore_create(1);
_pixelAppendSemaphore = dispatch_semaphore_create(1);
}
return self;
}
#pragma mark - public
- (void)setVideoURL:(NSURL *)videoURL
{
NSAssert(!_isRecording, #"videoURL can not be changed whilst recording is in progress");
_videoURL = videoURL;
}
- (BOOL)startRecording
{
if (!_isRecording) {
[self setUpWriter];
_isRecording = (_videoWriter.status == AVAssetWriterStatusWriting);
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:#selector(writeVideoFrame)];
[_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
}
return _isRecording;
}
- (void)stopRecordingWithCompletion:(VideoCompletionBlock)completionBlock;
{
if (_isRecording) {
_isRecording = NO;
[_displayLink removeFromRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
[self completeRecordingSession:completionBlock];
}
}
#pragma mark - private
-(void)setUpWriter
{
_rgbColorSpace = CGColorSpaceCreateDeviceRGB();
NSDictionary *bufferAttributes = #{(id)kCVPixelBufferPixelFormatTypeKey : #(kCVPixelFormatType_32BGRA),
(id)kCVPixelBufferCGBitmapContextCompatibilityKey : #YES,
(id)kCVPixelBufferWidthKey : #(_viewSize.width * _scale),
(id)kCVPixelBufferHeightKey : #(_viewSize.height * _scale),
(id)kCVPixelBufferBytesPerRowAlignmentKey : #(_viewSize.width * _scale * 4)
};
_outputBufferPool = NULL;
CVPixelBufferPoolCreate(NULL, NULL, (__bridge CFDictionaryRef)(bufferAttributes), &_outputBufferPool);
NSError* error = nil;
_videoWriter = [[AVAssetWriter alloc] initWithURL:self.videoURL ?: [self tempFileURL]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(_videoWriter);
NSInteger pixelNumber = _viewSize.width * _viewSize.height * _scale;
NSDictionary* videoCompression = #{AVVideoAverageBitRateKey: #(pixelNumber * 11.4)};
NSDictionary* videoSettings = #{AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: [NSNumber numberWithInt:_viewSize.width*_scale],
AVVideoHeightKey: [NSNumber numberWithInt:_viewSize.height*_scale],
AVVideoCompressionPropertiesKey: videoCompression};
_videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSParameterAssert(_videoWriterInput);
_videoWriterInput.expectsMediaDataInRealTime = YES;
_videoWriterInput.transform = [self videoTransformForDeviceOrientation];
_avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoWriterInput sourcePixelBufferAttributes:nil];
[_videoWriter addInput:_videoWriterInput];
[_videoWriter startWriting];
[_videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];
}
- (CGAffineTransform)videoTransformForDeviceOrientation
{
CGAffineTransform videoTransform;
switch ([UIDevice currentDevice].orientation) {
case UIDeviceOrientationLandscapeLeft:
videoTransform = CGAffineTransformMakeRotation(-M_PI_2);
break;
case UIDeviceOrientationLandscapeRight:
videoTransform = CGAffineTransformMakeRotation(M_PI_2);
break;
case UIDeviceOrientationPortraitUpsideDown:
videoTransform = CGAffineTransformMakeRotation(M_PI);
break;
default:
videoTransform = CGAffineTransformIdentity;
}
return videoTransform;
}
- (NSURL*)tempFileURL
{
NSString *outputPath = [NSHomeDirectory() stringByAppendingPathComponent:#"tmp/screenCapture.mp4"];
[self removeTempFilePath:outputPath];
return [NSURL fileURLWithPath:outputPath];
}
- (void)removeTempFilePath:(NSString*)filePath
{
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:filePath]) {
NSError* error;
if ([fileManager removeItemAtPath:filePath error:&error] == NO) {
NSLog(#"Could not delete old recording:%#", [error localizedDescription]);
}
}
}
- (void)completeRecordingSession:(VideoCompletionBlock)completionBlock;
{
dispatch_async(_render_queue, ^{
dispatch_sync(_append_pixelBuffer_queue, ^{
[_videoWriterInput markAsFinished];
[_videoWriter finishWritingWithCompletionHandler:^{
void (^completion)(void) = ^() {
[self cleanup];
dispatch_async(dispatch_get_main_queue(), ^{
if (completionBlock) completionBlock();
});
};
if (self.videoURL) {
completion();
} else {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:_videoWriter.outputURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (error) {
NSLog(#"Error copying video to camera roll:%#", [error localizedDescription]);
} else {
[self removeTempFilePath:_videoWriter.outputURL.path];
completion();
}
}];
}
}];
});
});
}
- (void)cleanup
{
self.avAdaptor = nil;
self.videoWriterInput = nil;
self.videoWriter = nil;
self.firstTimeStamp = 0;
self.outputBufferPoolAuxAttributes = nil;
CGColorSpaceRelease(_rgbColorSpace);
CVPixelBufferPoolRelease(_outputBufferPool);
}
- (void)writeVideoFrame
{
// throttle the number of frames to prevent meltdown
// technique gleaned from Brad Larson's answer here: http://stackoverflow.com/a/5956119
if (dispatch_semaphore_wait(_frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0) {
return;
}
dispatch_async(_render_queue, ^{
if (![_videoWriterInput isReadyForMoreMediaData]) return;
if (!self.firstTimeStamp) {
self.firstTimeStamp = _displayLink.timestamp;
}
CFTimeInterval elapsed = (_displayLink.timestamp - self.firstTimeStamp);
CMTime time = CMTimeMakeWithSeconds(elapsed, 1000);
CVPixelBufferRef pixelBuffer = NULL;
CGContextRef bitmapContext = [self createPixelBufferAndBitmapContext:&pixelBuffer];
if (self.delegate) {
[self.delegate writeBackgroundFrameInContext:&bitmapContext];
}
// draw each window into the context (other windows include UIKeyboard, UIAlert)
// FIX: UIKeyboard is currently only rendered correctly in portrait orientation
dispatch_sync(dispatch_get_main_queue(), ^{
UIGraphicsPushContext(bitmapContext); {
for (UIWindow *window in [[UIApplication sharedApplication] windows]) {
[window drawViewHierarchyInRect:CGRectMake(0, 0, _viewSize.width, _viewSize.height) afterScreenUpdates:NO];
}
} UIGraphicsPopContext();
});
// append pixelBuffer on a async dispatch_queue, the next frame is rendered whilst this one appends
// must not overwhelm the queue with pixelBuffers, therefore:
// check if _append_pixelBuffer_queue is ready
// if it’s not ready, release pixelBuffer and bitmapContext
if (dispatch_semaphore_wait(_pixelAppendSemaphore, DISPATCH_TIME_NOW) == 0) {
dispatch_async(_append_pixelBuffer_queue, ^{
BOOL success = [_avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
if (!success) {
NSLog(#"Warning: Unable to write buffer to video");
}
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
dispatch_semaphore_signal(_pixelAppendSemaphore);
});
} else {
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
}
dispatch_semaphore_signal(_frameRenderingSemaphore);
});
}
- (CGContextRef)createPixelBufferAndBitmapContext:(CVPixelBufferRef *)pixelBuffer
{
CVPixelBufferPoolCreatePixelBuffer(NULL, _outputBufferPool, pixelBuffer);
CVPixelBufferLockBaseAddress(*pixelBuffer, 0);
CGContextRef bitmapContext = NULL;
bitmapContext = CGBitmapContextCreate(CVPixelBufferGetBaseAddress(*pixelBuffer),
CVPixelBufferGetWidth(*pixelBuffer),
CVPixelBufferGetHeight(*pixelBuffer),
8, CVPixelBufferGetBytesPerRow(*pixelBuffer), _rgbColorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst
);
CGContextScaleCTM(bitmapContext, _scale, _scale);
CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, _viewSize.height);
CGContextConcatCTM(bitmapContext, flipVertical);
return bitmapContext;
}
I would be more than happy to provide my full source code to anyone who could tackle something like this because posting multiple .m's on this would take up a lot of space.

I'm not sure if it's similar for you. But I've been using ASScreenRecorder to record an ARSceneView, and that has been going fine. Have a look at the following link, you can provide it a view to render and it records and provides an output URL link. You might have to make a small edit to the class to get the StopRecording Function's completion handler to work.
https://github.com/alskipp/ASScreenRecorder

Related

AVAssetWriter write image to mp4 file, but file content is only half

i want to record a window's snapshot to a mp4 file in a macOS app.
start the creater with function [aMp4CreaterEntity startRecordWithSize:CGSizeMake(2320, 1080) pixelType:kCVPixelFormatType_32ARGB]
run a timer 15 times per second, snapshotting the window, use the function CGWindowListCreateImage to get CGImageRef which width = 2320 and height = 1080,
call the creater function [aMp4CreaterEntity recordImage:theCGImageRef timeStamp:[[NSDate date] timeIntervalSince1970]] to send CGImageRef to my aMp4CreaterEntity
call [aMp4CreaterEntity stopRecord] function, and get the mp4 file at
everything runs alright, except the mp4 file's content contains only half of what was sent earlier, and the laster content was lost. and every CVPixelBufferRef was append by AVAssetWriterInputPixelBufferAdaptor when i debug.
At first I think the CMTime setting is wrong, but after modify it to half or double, the error is continue.
I'm new to audio and video, can someone help me solve this problem or explain it in detail?
BTW: i record the audio in an other file at the same time, but it has the same problem - earlier half content. and i can read the swift code direct.
this is my recorder sample code with Objective-C.
#import "Mp4Creater.h"
#import <AVFoundation/AVFoundation.h>
#interface Mp4Creater()
#property (nonatomic, strong) AVAssetWriter *videoWriter;
#property (nonatomic, strong) AVAssetWriterInput *videoInput;
#property (nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *videoAdaptor;
#property (nonatomic, strong) NSString *videoOutputPath;
#property (nonatomic, strong) NSDictionary *videoSettings;
#property (nonatomic, assign) NSTimeInterval startTs;
#property (nonatomic, assign) NSTimeInterval latestTs;
#property (nonatomic, strong) NSOperationQueue *opQueue;
#property (nonatomic, assign) BOOL isRecording;
#property (nonatomic, assign) NSUInteger frameRate; // 15
#property (nonatomic, assign) NSUInteger iFrameInterval; // 3s
#end
#implementation Mp4Creater
- (instancetype)init
{
self = [super init];
if (self) {
_videoWriter = nil;
_videoInput = nil;
_videoAdaptor = nil;
_videoOutputPath = nil;
_videoSettings = nil;
_startTs = -1;
_latestTs = -1;
_isRecording = NO;
_frameRate = 15;
_iFrameInterval = 3;
}
return self;
}
- (void)dealloc
{
[_opQueue cancelAllOperations];
}
- (BOOL)addVideoInputWithSize:(CGSize)size pixelType:(UInt32)pixelType {
NSString *codecKey = AVVideoCodecTypeH264;
_videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:codecKey, AVVideoCodecKey,
[NSNumber numberWithInt: size.width], AVVideoWidthKey,
[NSNumber numberWithInt: size.height], AVVideoHeightKey,
[NSDictionary dictionaryWithObjectsAndKeys:AVVideoYCbCrMatrix_ITU_R_709_2, AVVideoYCbCrMatrixKey, AVVideoTransferFunction_ITU_R_709_2, AVVideoTransferFunctionKey, AVVideoColorPrimaries_ITU_R_709_2, AVVideoColorPrimariesKey, nil], AVVideoColorPropertiesKey,
[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt: size.width * size.height * 2], AVVideoAverageBitRateKey,
[NSNumber numberWithInt: (int)(_frameRate*_iFrameInterval)], AVVideoMaxKeyFrameIntervalKey,
[NSNumber numberWithInt: (int)(_iFrameInterval)], AVVideoMaxKeyFrameIntervalDurationKey,
AVVideoProfileLevelH264BaselineAutoLevel, AVVideoProfileLevelKey,
nil], AVVideoCompressionPropertiesKey,
nil];
AVAssetWriterInput *videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:_videoSettings];
videoInput.expectsMediaDataInRealTime = YES;
if ([_videoWriter canAddInput:videoInput]) {
[_videoWriter addInput:videoInput];
_videoInput = videoInput;
}
else {
return NO;
}
NSDictionary *sourcePixelBufferAttributes = #{(NSString *)kCVPixelBufferPixelFormatTypeKey : #(pixelType)};
_videoAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:_videoInput sourcePixelBufferAttributes:sourcePixelBufferAttributes];
return YES;
}
- (BOOL)startRecordWithSize:(CGSize)size pixelType:(UInt32)pixelType {
if (self.isRecording) {
return YES;
}
self.startTs = -1;
NSString *outputFile;
NSString *guid = [[NSUUID new] UUIDString];
NSString *fileName = [NSString stringWithFormat:#"video_%#.mp4", guid];
outputFile = [NSTemporaryDirectory() stringByAppendingPathComponent:fileName];
self.videoOutputPath = outputFile;
NSError *error = nil;
//----initialize compression engine
self.videoWriter = [[AVAssetWriter alloc]initWithURL:[NSURL fileURLWithPath:self.videoOutputPath]
fileType:AVFileTypeMPEG4
error:&error];
self.videoWriter.shouldOptimizeForNetworkUse = YES;
if(error) {
return NO;
}
if (self.videoWriter == nil) {
return NO;
}
if (![self addVideoInputWithSize:size pixelType:pixelType ]) {
[self stopRecord];
return NO;
}
self->_isRecording = YES;
[self.videoWriter startWriting];
[self.videoWriter startSessionAtSourceTime:kCMTimeZero];
_opQueue = [[NSOperationQueue alloc] init];
_opQueue.maxConcurrentOperationCount = 1;
return YES;
}
- (void)stopRecord {
if (!self.isRecording) {
return;
}
[_opQueue cancelAllOperations];
NSOperationQueue *oldQueue = _opQueue;
_opQueue = nil;
[oldQueue waitUntilAllOperationsAreFinished];
if (self.videoInput != nil) {
[self.videoInput markAsFinished];
}
self.videoInput = nil;
self.videoAdaptor = nil;
if (self.videoWriter != nil) {
__block BOOL success = NO;
if (self.videoWriter.status == AVAssetWriterStatusWriting) {
success = YES;
}
[self.videoWriter finishWritingWithCompletionHandler:^{
if (self.videoWriter.status == AVAssetWriterStatusCompleted) {
if (success) {
return;
}
}
}];
}
self->_isRecording = NO;
}
- (void)recordImage:(CGImageRef)image timeStamp:(NSTimeInterval)ts {
CGImageRef retainImage = CGImageRetain(image);
__weak __typeof__(self) weak_self = self;
[_opQueue addOperationWithBlock:^{
__typeof__(self) self = weak_self;
if (!self.isRecording) {
return;
}
if (self.startTs < 0) {
self.startTs = ts;
}
self.latestTs = ts;
CMTime cmTime = CMTimeMake((ts - self.startTs) * 1000, 1000);
if (self.videoWriter != nil) {
if (self.videoWriter.status == AVAssetWriterStatusWriting) {
if (self.videoInput != nil && self.videoInput.isReadyForMoreMediaData) {
CVPixelBufferRef buffer = [self CVPixelBufferRefFromCGImage:retainImage];
if (buffer != NULL) {
[self.videoAdaptor appendPixelBuffer:buffer withPresentationTime:cmTime];
CVPixelBufferRelease(buffer);
}
}
}
}
CGImageRelease(retainImage);
}];
}
- (CVPixelBufferRef)CVPixelBufferRefFromCGImage:(CGImageRef)image {
size_t pixelsWide = CGImageGetWidth(image);
size_t pixelsHigh = CGImageGetHeight(image);
NSInteger bitmapBytesPerRow = (pixelsWide * 4);
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, pixelsWide, pixelsHigh, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef)options, &pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
if (status == kCVReturnSuccess) {
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, pixelsWide, pixelsHigh, 8, bitmapBytesPerRow, rgbColorSpace, (CGBitmapInfo)kCGImageAlphaPremultipliedFirst);
NSParameterAssert(context);
CGContextConcatCTM(context, CGAffineTransformIdentity);
CGContextDrawImage(context, CGRectMake(0, 0, pixelsWide, pixelsHigh), image);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
return pxbuffer;
}
else {
return nil;
}
}
#end

Screen video Recording in ios

I want to record Uiview in ios .. I have tried but not getting clear Video.. screenshot is here :
[1]: https://i.stack.imgur.com/Gmwkr.png please suggest .. it looks like there is problem in frame i am passing..
#import "screerecorder.h"
#import <QuartzCore/QuartzCore.h>
#import <MobileCoreServices/UTCoreTypes.h>
#import <AssetsLibrary/AssetsLibrary.h>
#interface screerecorder(Private)
- (void) writeVideoFrameAtTime:(CMTime)time;
#end
#implementation screerecorder
#synthesize currentScreen, frameRate, delegate;
- (void) initialize {
// Initialization code
self.clearsContextBeforeDrawing = YES;
self.currentScreen = nil;
self.frameRate = 10.0f; //10 frames per seconds
_recording = false;
videoWriter = nil;
videoWriterInput = nil;
avAdaptor = nil;
startedAt = nil;
bitmapData = NULL;
}
- (id) initWithCoder:(NSCoder *)aDecoder {
self = [super initWithCoder:aDecoder];
if (self) {
[self initialize];
}
return self;
}
- (id) init {
self = [super init];
if (self) {
[self initialize];
}
return self;
}
- (id)initWithFrame:(CGRect)frame {
self = [super initWithFrame:frame];
if (self) {
[self initialize];
}
return self;
}
- (CGContextRef) createBitmapContextOfSize:(CGSize) size {
CGContextRef context = NULL;
CGColorSpaceRef colorSpace;
int bitmapByteCount;
int bitmapBytesPerRow;
bitmapBytesPerRow = (size.width * 4);
bitmapByteCount = (bitmapBytesPerRow * size.height);
colorSpace = CGColorSpaceCreateDeviceRGB();
if (bitmapData != NULL) {
free(bitmapData);
}
bitmapData = malloc( bitmapByteCount );
if (bitmapData == NULL) {
fprintf (stderr, "Memory not allocated!");
return NULL;
}
context = CGBitmapContextCreate (bitmapData,
size.width,
size.height,
8, // bits per component
bitmapBytesPerRow,
colorSpace,
kCGImageAlphaNoneSkipFirst);
CGContextSetAllowsAntialiasing(context,NO);
if (context== NULL) {
free (bitmapData);
fprintf (stderr, "Context not created!");
return NULL;
}
CGColorSpaceRelease( colorSpace );
return context;
}
//static int frameCount = 0; //debugging
- (void) drawRect:(CGRect)rect {
NSDate* start = [NSDate date];
CGContextRef context = [self createBitmapContextOfSize:self.frame.size];
//not sure why this is necessary...image renders upside-down and mirrored
// CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, self.frame.size.height);
//CGContextConcatCTM(context, flipVertical);
[self.layer renderInContext:context];
CGImageRef cgImage = CGBitmapContextCreateImage(context);
UIImage* background = [UIImage imageWithCGImage: cgImage];
// CGImageRelease(cgImage);
self.currentScreen = background;
//debugging
//if (frameCount < 40) {
// NSString* filename = [NSString stringWithFormat:#"Documents/frame_%d.png", frameCount];
// NSString* pngPath = [NSHomeDirectory() stringByAppendingPathComponent:filename];
// [UIImagePNGRepresentation(self.currentScreen) writeToFile: pngPath atomically: YES];
// frameCount++;
//}
//NOTE: to record a scrollview while it is scrolling you need to implement your UIScrollViewDelegate such that it calls
// 'setNeedsDisplay' on the ScreenCaptureView.
if (_recording) {
float millisElapsed = [[NSDate date] timeIntervalSinceDate:startedAt] * 1000.0;
[self writeVideoFrameAtTime:CMTimeMake((int)millisElapsed, 1000)];
}
float processingSeconds = [[NSDate date] timeIntervalSinceDate:start];
float delayRemaining = (1.0 / self.frameRate) - processingSeconds;
CGContextRelease(context);
//redraw at the specified framerate
[self performSelector:#selector(setNeedsDisplay) withObject:nil afterDelay:delayRemaining > 0.0 ? delayRemaining : 0.01];
}
- (void) cleanupWriter {
avAdaptor = nil;
videoWriterInput = nil;
videoWriter = nil;
startedAt = nil;
if (bitmapData != NULL) {
free(bitmapData);
bitmapData = NULL;
}
}
- (void)dealloc {
[self cleanupWriter];
}
- (NSURL*) tempFileURL {
NSString* outputPath = [[NSString alloc] initWithFormat:#"%#/%#", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], #"output.mp4"];
NSURL* outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath]) {
NSError* error;
if ([fileManager removeItemAtPath:outputPath error:&error] == NO) {
NSLog(#"Could not delete old recording file at path: %#", outputPath);
}
}
return outputURL;
}
-(BOOL) setUpWriter {
NSError* error = nil;
videoWriter = [[AVAssetWriter alloc] initWithURL:[self tempFileURL] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
//Configure video
NSDictionary* videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:1024.0*1024.0], AVVideoAverageBitRateKey,
nil ];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:self.frame.size.width], AVVideoWidthKey,
[NSNumber numberWithInt:self.frame.size.height], AVVideoHeightKey,
videoCompressionProps, AVVideoCompressionPropertiesKey,
nil];
videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] ;
NSParameterAssert(videoWriterInput);
videoWriterInput.expectsMediaDataInRealTime = YES;
NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:bufferAttributes] ;
//add input
[videoWriter addInput:videoWriterInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];
return YES;
}
- (void) completeRecordingSession {
[videoWriterInput markAsFinished];
// Wait for the video
int status = videoWriter.status;
while (status == AVAssetWriterStatusUnknown) {
NSLog(#"Waiting...");
[NSThread sleepForTimeInterval:0.5f];
status = videoWriter.status;
}
#synchronized(self) {
BOOL success = [videoWriter finishWriting];
if (!success) {
NSLog(#"finishWriting returned NO");
}
[self cleanupWriter];
id delegateObj = self.delegate;
NSString *outputPath = [[NSString alloc] initWithFormat:#"%#/%#", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], #"output.mp4"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSLog(#"Completed recording, file is stored at: %#", outputURL);
obj = [[ViewController alloc]init];
[obj mergeAudioandvideo:outputURL];
if ([delegateObj respondsToSelector:#selector(recordingFinished:)]) {
[delegateObj performSelectorOnMainThread:#selector(recordingFinished:) withObject:(success ? outputURL : nil) waitUntilDone:YES];
}
}
}
- (bool) startRecording {
bool result = NO;
#synchronized(self) {
if (! _recording) {
result = [self setUpWriter];
startedAt = [NSDate date];
_recording = true;
}
}
return result;
}
- (void) stopRecording {
#synchronized(self) {
if (_recording) {
_recording = false;
[self completeRecordingSession];
}
}
}
-(void) writeVideoFrameAtTime:(CMTime)time {
if (![videoWriterInput isReadyForMoreMediaData]) {
NSLog(#"Not ready for video data");
}
else {
#synchronized (self) {
UIImage* newFrame = self.currentScreen;
CVPixelBufferRef pixelBuffer = NULL;
CGImageRef cgImage = CGImageCreateCopy([newFrame CGImage]);
CFDataRef image = CGDataProviderCopyData(CGImageGetDataProvider(cgImage));
int status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, avAdaptor.pixelBufferPool, &pixelBuffer);
if(status != 0){
//could not get a buffer from the pool
NSLog(#"Error creating pixel buffer: status=%d", status);
}
// set image data into pixel buffer
CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
uint8_t* destPixels = CVPixelBufferGetBaseAddress(pixelBuffer);
CFDataGetBytes(image, CFRangeMake(0, CFDataGetLength(image)), destPixels); //XXX: will work if the pixel buffer is contiguous and has the same bytesPerRow as the input data
if(status == 0){
BOOL success = [avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
if (!success)
NSLog(#"Warning: Unable to write buffer to video");
}
//clean up
CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
CVPixelBufferRelease( pixelBuffer );
CFRelease(image);
CGImageRelease(cgImage);
}
}
}
#end
You can try this library - Glimpse. It can record any UIView.

Signal Sigabrt Error

I cloned this project from Github and when I build and ran it it showed up this Sigabrt error. The console says,
Make a symbolic breakpoint at UIViewAlertForUnsatisfiableConstraints to catch this in the debugger.
The methods in the UIConstraintBasedLayoutDebugging category on UIView listed in may also be helpful.
MoltinSwiftExample(10351,0x700000323000) malloc: * error for object 0x7ff288d71cc0: pointer being freed was not allocated
* set a breakpoint in malloc_error_break to debug
Here is my code.
#import "SDWebImageDownloaderOperation.h"
#import "SDWebImageDecoder.h"
#import "UIImage+MultiFormat.h"
#import <ImageIO/ImageIO.h>
#import "SDWebImageManager.h"
NSString *const SDWebImageDownloadStartNotification = #"SDWebImageDownloadStartNotification";
NSString *const SDWebImageDownloadReceiveResponseNotification = #"SDWebImageDownloadReceiveResponseNotification";
NSString *const SDWebImageDownloadStopNotification = #"SDWebImageDownloadStopNotification";
NSString *const SDWebImageDownloadFinishNotification = #"SDWebImageDownloadFinishNotification";
#interface SDWebImageDownloaderOperation () <NSURLConnectionDataDelegate>
#property (copy, nonatomic) SDWebImageDownloaderProgressBlock progressBlock;
#property (copy, nonatomic) SDWebImageDownloaderCompletedBlock completedBlock;
#property (copy, nonatomic) SDWebImageNoParamsBlock cancelBlock;
#property (assign, nonatomic, getter = isExecuting) BOOL executing;
#property (assign, nonatomic, getter = isFinished) BOOL finished;
#property (strong, nonatomic) NSMutableData *imageData;
#property (strong, nonatomic) NSURLConnection *connection;
#property (strong, atomic) NSThread *thread;
#if TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_4_0
#property (assign, nonatomic) UIBackgroundTaskIdentifier backgroundTaskId;
#endif
#end
#implementation SDWebImageDownloaderOperation {
size_t width, height;
UIImageOrientation orientation;
BOOL responseFromCached;
}
#synthesize executing = _executing;
#synthesize finished = _finished;
- (id)initWithRequest:(NSURLRequest *)request
options:(SDWebImageDownloaderOptions)options
progress:(SDWebImageDownloaderProgressBlock)progressBlock
completed:(SDWebImageDownloaderCompletedBlock)completedBlock
cancelled:(SDWebImageNoParamsBlock)cancelBlock {
if ((self = [super init])) {
_request = request;
_shouldDecompressImages = YES;
_shouldUseCredentialStorage = YES;
_options = options;
_progressBlock = [progressBlock copy];
_completedBlock = [completedBlock copy];
_cancelBlock = [cancelBlock copy];
_executing = NO;
_finished = NO;
_expectedSize = 0;
responseFromCached = YES; // Initially wrong until `connection:willCacheResponse:` is called or not called
}
return self;
}
- (void)start {
#synchronized (self) {
if (self.isCancelled) {
self.finished = YES;
[self reset];
return;
}
#if TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_4_0
Class UIApplicationClass = NSClassFromString(#"UIApplication");
BOOL hasApplication = UIApplicationClass && [UIApplicationClass respondsToSelector:#selector(sharedApplication)];
if (hasApplication && [self shouldContinueWhenAppEntersBackground]) {
__weak __typeof__ (self) wself = self;
UIApplication * app = [UIApplicationClass performSelector:#selector(sharedApplication)];
self.backgroundTaskId = [app beginBackgroundTaskWithExpirationHandler:^{
__strong __typeof (wself) sself = wself;
if (sself) {
[sself cancel];
[app endBackgroundTask:sself.backgroundTaskId];
sself.backgroundTaskId = UIBackgroundTaskInvalid;
}
}];
}
#endif
self.executing = YES;
self.connection = [[NSURLConnection alloc] initWithRequest:self.request delegate:self startImmediately:NO];
self.thread = [NSThread currentThread];
}
[self.connection start];
if (self.connection) {
if (self.progressBlock) {
self.progressBlock(0, NSURLResponseUnknownLength);
}
dispatch_async(dispatch_get_main_queue(), ^{
[[NSNotificationCenter defaultCenter] postNotificationName:SDWebImageDownloadStartNotification object:self];
});
if (floor(NSFoundationVersionNumber) <= NSFoundationVersionNumber_iOS_5_1) {
// Make sure to run the runloop in our background thread so it can process downloaded data
// Note: we use a timeout to work around an issue with NSURLConnection cancel under iOS 5
// not waking up the runloop, leading to dead threads (see https://github.com/rs/SDWebImage/issues/466)
CFRunLoopRunInMode(kCFRunLoopDefaultMode, 10, false);
}
else {
CFRunLoopRun();
}
if (!self.isFinished) {
[self.connection cancel];
[self connection:self.connection didFailWithError:[NSError errorWithDomain:NSURLErrorDomain code:NSURLErrorTimedOut userInfo:#{NSURLErrorFailingURLErrorKey : self.request.URL}]];
}
}
else {
if (self.completedBlock) {
self.completedBlock(nil, nil, [NSError errorWithDomain:NSURLErrorDomain code:0 userInfo:#{NSLocalizedDescriptionKey : #"Connection can't be initialized"}], YES);
}
}
#if TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_4_0
Class UIApplicationClass = NSClassFromString(#"UIApplication");
if(!UIApplicationClass || ![UIApplicationClass respondsToSelector:#selector(sharedApplication)]) {
return;
}
if (self.backgroundTaskId != UIBackgroundTaskInvalid) {
UIApplication * app = [UIApplication performSelector:#selector(sharedApplication)];
[app endBackgroundTask:self.backgroundTaskId];
self.backgroundTaskId = UIBackgroundTaskInvalid;
}
#endif
}
- (void)cancel {
#synchronized (self) {
if (self.thread) {
[self performSelector:#selector(cancelInternalAndStop) onThread:self.thread withObject:nil waitUntilDone:NO];
}
else {
[self cancelInternal];
}
}
}
- (void)cancelInternalAndStop {
if (self.isFinished) return;
[self cancelInternal];
CFRunLoopStop(CFRunLoopGetCurrent());
}
- (void)cancelInternal {
if (self.isFinished) return;
[super cancel];
if (self.cancelBlock) self.cancelBlock();
if (self.connection) {
[self.connection cancel];
dispatch_async(dispatch_get_main_queue(), ^{
[[NSNotificationCenter defaultCenter] postNotificationName:SDWebImageDownloadStopNotification object:self];
});
// As we cancelled the connection, its callback won't be called and thus won't
// maintain the isFinished and isExecuting flags.
if (self.isExecuting) self.executing = NO;
if (!self.isFinished) self.finished = YES;
}
[self reset];
}
- (void)done {
self.finished = YES;
self.executing = NO;
[self reset];
}
- (void)reset {
self.cancelBlock = nil;
self.completedBlock = nil;
self.progressBlock = nil;
self.connection = nil;
self.imageData = nil;
self.thread = nil;
}
- (void)setFinished:(BOOL)finished {
[self willChangeValueForKey:#"isFinished"];
_finished = finished;
[self didChangeValueForKey:#"isFinished"];
}
- (void)setExecuting:(BOOL)executing {
[self willChangeValueForKey:#"isExecuting"];
_executing = executing;
[self didChangeValueForKey:#"isExecuting"];
}
- (BOOL)isConcurrent {
return YES;
}
#pragma mark NSURLConnection (delegate)
- (void)connection:(NSURLConnection *)connection didReceiveResponse:(NSURLResponse *)response {
//'304 Not Modified' is an exceptional one
if (![response respondsToSelector:#selector(statusCode)] || ([((NSHTTPURLResponse *)response) statusCode] < 400 && [((NSHTTPURLResponse *)response) statusCode] != 304)) {
NSInteger expected = response.expectedContentLength > 0 ? (NSInteger)response.expectedContentLength : 0;
self.expectedSize = expected;
if (self.progressBlock) {
self.progressBlock(0, expected);
}
self.imageData = [[NSMutableData alloc] initWithCapacity:expected];
self.response = response;
dispatch_async(dispatch_get_main_queue(), ^{
[[NSNotificationCenter defaultCenter] postNotificationName:SDWebImageDownloadReceiveResponseNotification object:self];
});
}
else {
NSUInteger code = [((NSHTTPURLResponse *)response) statusCode];
//This is the case when server returns '304 Not Modified'. It means that remote image is not changed.
//In case of 304 we need just cancel the operation and return cached image from the cache.
if (code == 304) {
[self cancelInternal];
} else {
[self.connection cancel];
}
dispatch_async(dispatch_get_main_queue(), ^{
[[NSNotificationCenter defaultCenter] postNotificationName:SDWebImageDownloadStopNotification object:self];
});
if (self.completedBlock) {
self.completedBlock(nil, nil, [NSError errorWithDomain:NSURLErrorDomain code:[((NSHTTPURLResponse *)response) statusCode] userInfo:nil], YES);
}
CFRunLoopStop(CFRunLoopGetCurrent());
[self done];
}
}
- (void)connection:(NSURLConnection *)connection didReceiveData:(NSData *)data {
[self.imageData appendData:data];
if ((self.options & SDWebImageDownloaderProgressiveDownload) && self.expectedSize > 0 && self.completedBlock) {
// The following code is from http://www.cocoaintheshell.com/2011/05/progressive-images-download-imageio/
// Thanks to the author #Nyx0uf
// Get the total bytes downloaded
const NSInteger totalSize = self.imageData.length;
// Update the data source, we must pass ALL the data, not just the new bytes
CGImageSourceRef imageSource = CGImageSourceCreateWithData((__bridge CFDataRef)self.imageData, NULL);
if (width + height == 0) {
CFDictionaryRef properties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, NULL);
if (properties) {
NSInteger orientationValue = -1;
CFTypeRef val = CFDictionaryGetValue(properties, kCGImagePropertyPixelHeight);
if (val) CFNumberGetValue(val, kCFNumberLongType, &height);
val = CFDictionaryGetValue(properties, kCGImagePropertyPixelWidth);
if (val) CFNumberGetValue(val, kCFNumberLongType, &width);
val = CFDictionaryGetValue(properties, kCGImagePropertyOrientation);
if (val) CFNumberGetValue(val, kCFNumberNSIntegerType, &orientationValue);
CFRelease(properties);
// When we draw to Core Graphics, we lose orientation information,
// which means the image below born of initWithCGIImage will be
// oriented incorrectly sometimes. (Unlike the image born of initWithData
// in connectionDidFinishLoading.) So save it here and pass it on later.
orientation = [[self class] orientationFromPropertyValue:(orientationValue == -1 ? 1 : orientationValue)];
}
}
if (width + height > 0 && totalSize < self.expectedSize) {
// Create the image
CGImageRef partialImageRef = CGImageSourceCreateImageAtIndex(imageSource, 0, NULL);
#ifdef TARGET_OS_IPHONE
// Workaround for iOS anamorphic image
if (partialImageRef) {
const size_t partialHeight = CGImageGetHeight(partialImageRef);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef bmContext = CGBitmapContextCreate(NULL, width, height, 8, width * 4, colorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedFirst);
CGColorSpaceRelease(colorSpace);
if (bmContext) {
CGContextDrawImage(bmContext, (CGRect){.origin.x = 0.0f, .origin.y = 0.0f, .size.width = width, .size.height = partialHeight}, partialImageRef);
CGImageRelease(partialImageRef);
partialImageRef = CGBitmapContextCreateImage(bmContext);
CGContextRelease(bmContext);
}
else {
CGImageRelease(partialImageRef);
partialImageRef = nil;
}
}
#endif
if (partialImageRef) {
UIImage *image = [UIImage imageWithCGImage:partialImageRef scale:1 orientation:orientation];
NSString *key = [[SDWebImageManager sharedManager] cacheKeyForURL:self.request.URL];
UIImage *scaledImage = [self scaledImageForKey:key image:image];
if (self.shouldDecompressImages) {
image = [UIImage decodedImageWithImage:scaledImage];
}
else {
image = scaledImage;
}
CGImageRelease(partialImageRef);
dispatch_main_sync_safe(^{
if (self.completedBlock) {
self.completedBlock(image, nil, nil, NO);
}
});
}
}
CFRelease(imageSource);
}
if (self.progressBlock) {
self.progressBlock(self.imageData.length, self.expectedSize);
}
}
+ (UIImageOrientation)orientationFromPropertyValue:(NSInteger)value {
switch (value) {
case 1:
return UIImageOrientationUp;
case 3:
return UIImageOrientationDown;
case 8:
return UIImageOrientationLeft;
case 6:
return UIImageOrientationRight;
case 2:
return UIImageOrientationUpMirrored;
case 4:
return UIImageOrientationDownMirrored;
case 5:
return UIImageOrientationLeftMirrored;
case 7:
return UIImageOrientationRightMirrored;
default:
return UIImageOrientationUp;
}
}
- (UIImage *)scaledImageForKey:(NSString *)key image:(UIImage *)image {
return SDScaledImageForKey(key, image);
}
- (void)connectionDidFinishLoading:(NSURLConnection *)aConnection {
SDWebImageDownloaderCompletedBlock completionBlock = self.completedBlock;
#synchronized(self) {
CFRunLoopStop(CFRunLoopGetCurrent());
self.thread = nil;
self.connection = nil;
dispatch_async(dispatch_get_main_queue(), ^{
[[NSNotificationCenter defaultCenter] postNotificationName:SDWebImageDownloadStopNotification object:self];
[[NSNotificationCenter defaultCenter] postNotificationName:SDWebImageDownloadFinishNotification object:self];
});
}
if (![[NSURLCache sharedURLCache] cachedResponseForRequest:_request]) {
responseFromCached = NO;
}
if (completionBlock) {
if (self.options & SDWebImageDownloaderIgnoreCachedResponse && responseFromCached) {
completionBlock(nil, nil, nil, YES);
} else if (self.imageData) {
UIImage *image = [UIImage sd_imageWithData:self.imageData];
NSString *key = [[SDWebImageManager sharedManager] cacheKeyForURL:self.request.URL];
image = [self scaledImageForKey:key image:image];
// Do not force decoding animated GIFs
if (!image.images) {
if (self.shouldDecompressImages) {
image = [UIImage decodedImageWithImage:image];
}
}
if (CGSizeEqualToSize(image.size, CGSizeZero)) {
completionBlock(nil, nil, [NSError errorWithDomain:SDWebImageErrorDomain code:0 userInfo:#{NSLocalizedDescriptionKey : #"Downloaded image has 0 pixels"}], YES);
}
else {
completionBlock(image, self.imageData, nil, YES);
}
} else {
completionBlock(nil, nil, [NSError errorWithDomain:SDWebImageErrorDomain code:0 userInfo:#{NSLocalizedDescriptionKey : #"Image data is nil"}], YES);
}
}
self.completionBlock = nil;
[self done];
}
- (void)connection:(NSURLConnection *)connection didFailWithError:(NSError *)error {
#synchronized(self) {
CFRunLoopStop(CFRunLoopGetCurrent());
self.thread = nil;
self.connection = nil;
dispatch_async(dispatch_get_main_queue(), ^{
[[NSNotificationCenter defaultCenter] postNotificationName:SDWebImageDownloadStopNotification object:self];
});
}
if (self.completedBlock) {
self.completedBlock(nil, nil, error, YES);
}
self.completionBlock = nil;
[self done];
}
- (NSCachedURLResponse *)connection:(NSURLConnection *)connection willCacheResponse:(NSCachedURLResponse *)cachedResponse {
responseFromCached = NO; // If this method is called, it means the response wasn't read from cache
if (self.request.cachePolicy == NSURLRequestReloadIgnoringLocalCacheData) {
// Prevents caching of responses
return nil;
}
else {
return cachedResponse;
}
}
- (BOOL)shouldContinueWhenAppEntersBackground {
return self.options & SDWebImageDownloaderContinueInBackground;
}
- (BOOL)connectionShouldUseCredentialStorage:(NSURLConnection __unused *)connection {
return self.shouldUseCredentialStorage;
}
- (void)connection:(NSURLConnection *)connection willSendRequestForAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge{
if ([challenge.protectionSpace.authenticationMethod isEqualToString:NSURLAuthenticationMethodServerTrust]) {
if (!(self.options & SDWebImageDownloaderAllowInvalidSSLCertificates) &&
[challenge.sender respondsToSelector:#selector(performDefaultHandlingForAuthenticationChallenge:)]) {
[challenge.sender performDefaultHandlingForAuthenticationChallenge:challenge];
} else {
NSURLCredential *credential = [NSURLCredential credentialForTrust:challenge.protectionSpace.serverTrust];
[[challenge sender] useCredential:credential forAuthenticationChallenge:challenge];
}
} else {
if ([challenge previousFailureCount] == 0) {
if (self.credential) {
[[challenge sender] useCredential:self.credential forAuthenticationChallenge:challenge];
} else {
[[challenge sender] continueWithoutCredentialForAuthenticationChallenge:challenge];
}
} else {
[[challenge sender] continueWithoutCredentialForAuthenticationChallenge:challenge];
}
}
}
#end
The line with the error is,
CFRunLoopRun();

Screen recording of iPhone while playing a video

In my project I have to add a book page flip animation, and in this book on the right side page a video will play. Once the first video will complete the page will turn like as book page and the second video will play on the next right side page and so on. Now I have to save all this things as a a video which can be downloaded, so that When the downloaded video get played from gallery it looks same as i am playing in my app. Right now I am recording the device's screen and saving it in server for download. All the things is ok except the video player. In the video that is I am recording, the portion where all the video is playing(on the right side page of the book) is not getting recorded.
I am using the bellow code to record the screen. If any one of you have other idea to do the same thing, please share with me or if need to change my code please suggest that. Thanks is advance.
// ASScreenRecorder.h
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
typedef void (^VideoCompletionBlock)(void);
#protocol ASScreenRecorderDelegate;
#interface ASScreenRecorder : NSObject
#property (nonatomic, readonly) BOOL isRecording;
#property (nonatomic, weak) id <ASScreenRecorderDelegate> delegate;
// if saveURL is nil, video will be saved into camera roll
// this property can not be changed whilst recording is in progress
#property (strong, nonatomic) NSURL *videoURL;
+ (instancetype)sharedInstance;
- (BOOL)startRecording;
- (void)stopRecordingWithCompletion:(VideoCompletionBlock)completionBlock;
#end
// If your view contains an AVCaptureVideoPreviewLayer or an openGL view
// you'll need to write that data into the CGContextRef yourself.
// In the viewcontroller responsible for the AVCaptureVideoPreviewLayer / openGL view
// set yourself as the delegate for ASScreenRecorder.
// [ASScreenRecorder sharedInstance].delegate = self
// Then implement 'writeBackgroundFrameInContext:(CGContextRef*)contextRef'
// use 'CGContextDrawImage' to draw your view into the provided CGContextRef
#protocol ASScreenRecorderDelegate <NSObject>
- (void)writeBackgroundFrameInContext:(CGContextRef*)contextRef;
#end
// ASScreenRecorder.m
// ScreenRecorder
//
// Created by Alan Skipp on 23/04/2014.
// Copyright (c) 2014 Alan Skipp. All rights reserved.
//
#import "ASScreenRecorder.h"
#import <AVFoundation/AVFoundation.h>
#import <QuartzCore/QuartzCore.h>
#import <AssetsLibrary/AssetsLibrary.h>
#interface ASScreenRecorder()
#property (strong, nonatomic) AVAssetWriter *videoWriter;
#property (strong, nonatomic) AVAssetWriterInput *videoWriterInput;
#property (strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor *avAdaptor;
#property (strong, nonatomic) CADisplayLink *displayLink;
#property (strong, nonatomic) NSDictionary *outputBufferPoolAuxAttributes;
#property (nonatomic) CFTimeInterval firstTimeStamp;
#property (nonatomic) BOOL isRecording;
#end
#implementation ASScreenRecorder
{
dispatch_queue_t _render_queue;
dispatch_queue_t _append_pixelBuffer_queue;
dispatch_semaphore_t _frameRenderingSemaphore;
dispatch_semaphore_t _pixelAppendSemaphore;
CGSize _viewSize;
CGFloat _scale;
CGColorSpaceRef _rgbColorSpace;
CVPixelBufferPoolRef _outputBufferPool;
}
#pragma mark - initializers
+ (instancetype)sharedInstance {
static dispatch_once_t once;
static ASScreenRecorder *sharedInstance;
dispatch_once(&once, ^{
sharedInstance = [[self alloc] init];
});
return sharedInstance;
}
- (instancetype)init
{
self = [super init];
if (self) {
_viewSize = [UIApplication sharedApplication].delegate.window.bounds.size;
_scale = [UIScreen mainScreen].scale;
// record half size resolution for retina iPads
if ((UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) && _scale > 1) {
_scale = 1.0;
}
_isRecording = NO;
_append_pixelBuffer_queue = dispatch_queue_create("ASScreenRecorder.append_queue", DISPATCH_QUEUE_SERIAL);
_render_queue = dispatch_queue_create("ASScreenRecorder.render_queue", DISPATCH_QUEUE_SERIAL);
dispatch_set_target_queue(_render_queue, dispatch_get_global_queue( DISPATCH_QUEUE_PRIORITY_HIGH, 0));
_frameRenderingSemaphore = dispatch_semaphore_create(1);
_pixelAppendSemaphore = dispatch_semaphore_create(1);
}
return self;
}
#pragma mark - public
- (void)setVideoURL:(NSURL *)videoURL
{
NSAssert(!_isRecording, #"videoURL can not be changed whilst recording is in progress");
_videoURL = videoURL;
}
- (BOOL)startRecording
{
if (!_isRecording) {
[self setUpWriter];
_isRecording = (_videoWriter.status == AVAssetWriterStatusWriting);
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:#selector(writeVideoFrame)];
[_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
}
return _isRecording;
}
- (void)stopRecordingWithCompletion:(VideoCompletionBlock)completionBlock;
{
if (_isRecording) {
_isRecording = NO;
[_displayLink removeFromRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
[self completeRecordingSession:completionBlock];
}
}
#pragma mark - private
-(void)setUpWriter
{
_rgbColorSpace = CGColorSpaceCreateDeviceRGB();
NSDictionary *bufferAttributes = #{(id)kCVPixelBufferPixelFormatTypeKey : #(kCVPixelFormatType_32BGRA),
(id)kCVPixelBufferCGBitmapContextCompatibilityKey : #YES,
(id)kCVPixelBufferWidthKey : #(_viewSize.width * _scale),
(id)kCVPixelBufferHeightKey : #(_viewSize.height * _scale),
(id)kCVPixelBufferBytesPerRowAlignmentKey : #(_viewSize.width * _scale * 4)
};
_outputBufferPool = NULL;
CVPixelBufferPoolCreate(NULL, NULL, (__bridge CFDictionaryRef)(bufferAttributes), &_outputBufferPool);
NSError* error = nil;
_videoWriter = [[AVAssetWriter alloc] initWithURL:self.videoURL ?: [self tempFileURL]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(_videoWriter);
NSInteger pixelNumber = _viewSize.width * _viewSize.height * _scale;
NSDictionary* videoCompression = #{AVVideoAverageBitRateKey: #(pixelNumber * 11.4)};
NSDictionary* videoSettings = #{AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: [NSNumber numberWithInt:_viewSize.width*_scale],
AVVideoHeightKey: [NSNumber numberWithInt:_viewSize.height*_scale],
AVVideoCompressionPropertiesKey: videoCompression};
_videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSParameterAssert(_videoWriterInput);
_videoWriterInput.expectsMediaDataInRealTime = YES;
_videoWriterInput.transform = [self videoTransformForDeviceOrientation];
_avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoWriterInput sourcePixelBufferAttributes:nil];
[_videoWriter addInput:_videoWriterInput];
[_videoWriter startWriting];
[_videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];
}
- (CGAffineTransform)videoTransformForDeviceOrientation
{
CGAffineTransform videoTransform;
switch ([UIDevice currentDevice].orientation) {
case UIDeviceOrientationLandscapeLeft:
videoTransform = CGAffineTransformMakeRotation(-M_PI_2);
break;
case UIDeviceOrientationLandscapeRight:
videoTransform = CGAffineTransformMakeRotation(M_PI_2);
break;
case UIDeviceOrientationPortraitUpsideDown:
videoTransform = CGAffineTransformMakeRotation(M_PI);
break;
default:
videoTransform = CGAffineTransformIdentity;
}
return videoTransform;
}
- (NSURL*)tempFileURL
{
NSString *outputPath = [NSHomeDirectory() stringByAppendingPathComponent:#"tmp/screenCapture.mp4"];
[self removeTempFilePath:outputPath];
return [NSURL fileURLWithPath:outputPath];
}
- (void)removeTempFilePath:(NSString*)filePath
{
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:filePath]) {
NSError* error;
if ([fileManager removeItemAtPath:filePath error:&error] == NO) {
NSLog(#"Could not delete old recording:%#", [error localizedDescription]);
}
}
}
- (void)completeRecordingSession:(VideoCompletionBlock)completionBlock;
{
dispatch_async(_render_queue, ^{
dispatch_sync(_append_pixelBuffer_queue, ^{
[_videoWriterInput markAsFinished];
[_videoWriter finishWritingWithCompletionHandler:^{
void (^completion)(void) = ^() {
[self cleanup];
dispatch_async(dispatch_get_main_queue(), ^{
if (completionBlock) completionBlock();
});
};
if (self.videoURL) {
completion();
} else {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:_videoWriter.outputURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (error) {
NSLog(#"Error copying video to camera roll:%#", [error localizedDescription]);
} else {
[self removeTempFilePath:_videoWriter.outputURL.path];
completion();
}
}];
}
}];
});
});
}
- (void)cleanup
{
self.avAdaptor = nil;
self.videoWriterInput = nil;
self.videoWriter = nil;
self.firstTimeStamp = 0;
self.outputBufferPoolAuxAttributes = nil;
CGColorSpaceRelease(_rgbColorSpace);
CVPixelBufferPoolRelease(_outputBufferPool);
}
- (void)writeVideoFrame
{
// throttle the number of frames to prevent meltdown
// technique gleaned from Brad Larson's answer here: http://stackoverflow.com/a/5956119
if (dispatch_semaphore_wait(_frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0) {
return;
}
dispatch_async(_render_queue, ^{
if (![_videoWriterInput isReadyForMoreMediaData]) return;
if (!self.firstTimeStamp) {
self.firstTimeStamp = _displayLink.timestamp;
}
CFTimeInterval elapsed = (_displayLink.timestamp - self.firstTimeStamp);
CMTime time = CMTimeMakeWithSeconds(elapsed, 1000);
CVPixelBufferRef pixelBuffer = NULL;
CGContextRef bitmapContext = [self createPixelBufferAndBitmapContext:&pixelBuffer];
if (self.delegate) {
[self.delegate writeBackgroundFrameInContext:&bitmapContext];
}
// draw each window into the context (other windows include UIKeyboard, UIAlert)
// FIX: UIKeyboard is currently only rendered correctly in portrait orientation
dispatch_sync(dispatch_get_main_queue(), ^{
UIGraphicsPushContext(bitmapContext); {
for (UIWindow *window in [[UIApplication sharedApplication] windows]) {
[window drawViewHierarchyInRect:CGRectMake(0, 0, _viewSize.width, _viewSize.height) afterScreenUpdates:NO];
}
} UIGraphicsPopContext();
});
// append pixelBuffer on a async dispatch_queue, the next frame is rendered whilst this one appends
// must not overwhelm the queue with pixelBuffers, therefore:
// check if _append_pixelBuffer_queue is ready
// if it’s not ready, release pixelBuffer and bitmapContext
if (dispatch_semaphore_wait(_pixelAppendSemaphore, DISPATCH_TIME_NOW) == 0) {
dispatch_async(_append_pixelBuffer_queue, ^{
BOOL success = [_avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
if (!success) {
NSLog(#"Warning: Unable to write buffer to video");
}
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
dispatch_semaphore_signal(_pixelAppendSemaphore);
});
} else {
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
}
dispatch_semaphore_signal(_frameRenderingSemaphore);
});
}
- (CGContextRef)createPixelBufferAndBitmapContext:(CVPixelBufferRef *)pixelBuffer
{
CVPixelBufferPoolCreatePixelBuffer(NULL, _outputBufferPool, pixelBuffer);
CVPixelBufferLockBaseAddress(*pixelBuffer, 0);
CGContextRef bitmapContext = NULL;
bitmapContext = CGBitmapContextCreate(CVPixelBufferGetBaseAddress(*pixelBuffer),
CVPixelBufferGetWidth(*pixelBuffer),
CVPixelBufferGetHeight(*pixelBuffer),
8, CVPixelBufferGetBytesPerRow(*pixelBuffer), _rgbColorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst
);
CGContextScaleCTM(bitmapContext, _scale, _scale);
CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, _viewSize.height);
CGContextConcatCTM(bitmapContext, flipVertical);
return bitmapContext;
}
#end
#Gobinda this code works in my case. You need to specify frame if you want to record portion of window. In init method viewSize is defined as window size. So you need to change viewSize as your video frame.

iOS App: Preview Layer freezes after Recording button is clicked

I am developing an iOS App which is to record the video using the rear camera.
I have managed to get the preview layer working fine.
However, if I click the Record button, the preview freezes.
The following are my codes. Please help me solving this problem.
Pg5VideoViewController.h
#interface Pg5VideoViewController : UIViewController <AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureFileOutputRecordingDelegate> {
BOOL WeAreRecording;
IBOutlet UIView *videoViewBg;
AVCaptureSession *_captureSession;
UIImageView *_imageView;
CALayer *_customLayer;
AVCaptureVideoPreviewLayer *_prevLayer;
UIColor *pickedColor;
AVCaptureMovieFileOutput *movieFileOutput;
IBOutlet UIView *theColor;
}
#property (nonatomic,retain) IBOutlet UIView *theColor;
#property (nonatomic,retain) UIColor *pickedColor;
#property (nonatomic,retain) IBOutlet UIView *videoViewBg;
#property (nonatomic, retain) AVCaptureSession *captureSession;
#property (nonatomic, retain) UIImageView *imageView;
#property (nonatomic, retain) CALayer *customLayer;
#property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer;
#property (nonatomic, retain) AVCaptureMovieFileOutput *movieFileOutput;
-(void)initCapture;
-(UIColor *) colorOfPoint:(CGPoint)point;
-(IBAction)takeVideo:(id)sender;
#end
the Pg5VideoViewController.m file:
#implementation Pg5VideoViewController
#synthesize videoViewBg;
#synthesize captureSession = _captureSession;
#synthesize imageView = _imageView;
#synthesize customLayer = _customLayer;
#synthesize prevLayer = _prevLayer;
#synthesize pickedColor = _pickedColor;
#synthesize theColor = _theColor;
#synthesize movieFileOutput = _movieFileOutput;
#pragma mark -
#pragma mark Initialization
- (id)init {
self = [super init];
if (self) {
self.imageView = nil;
self.prevLayer = nil;
self.customLayer = nil;
}
return self;
}
- (void)initCapture {
AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput
deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]
error:nil];
movieFileOutput = [[AVCaptureVideoDataOutput alloc] init];
dispatch_queue_t queue;
queue = dispatch_queue_create("cameraQueue", NULL);
[movieFileOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[movieFileOutput setVideoSettings:videoSettings];
self.captureSession = [[AVCaptureSession alloc] init];
[self.captureSession addInput:captureInput];
[self.captureSession addOutput:movieFileOutput];
[self.captureSession setSessionPreset:AVCaptureSessionPresetMedium];
self.customLayer = [CALayer layer];
self.customLayer.frame = CGRectMake(42, 40, 940, 558);
//self.customLayer.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
//self.customLayer.contentsGravity = kCAGravityResizeAspectFill;
[self.view.layer addSublayer:self.customLayer];
[self.captureSession startRunning];
}
#pragma mark -
#pragma mark AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
[self.customLayer performSelectorOnMainThread:#selector(setContents:) withObject: (id) newImage waitUntilDone:YES];
UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];
CGImageRelease(newImage);
[self.imageView performSelectorOnMainThread:#selector(setImage:) withObject:image waitUntilDone:YES];
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
[pool drain];
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error
{
NSLog(#"didFinishRecordingToOutputFileAtURL - enter");
BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
if (RecordedSuccessfully)
{
NSLog(#"didFinishRecordingToOutputFileAtURL - success");
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
completionBlock:^(NSURL *assetURL, NSError *error)
{
if (error)
{
}
}];
}
[library release];
}
}
- (void)viewDidAppear:(BOOL)animated {
}
- (IBAction)takeVideo:(id)sender {
AVCaptureMovieFileOutput *movieFileOutput1 = [[AVCaptureMovieFileOutput alloc] init];
if(!WeAreRecording) {
NSLog(#"START RECORDING");
WeAreRecording = YES;
self.videoViewBg.backgroundColor = [UIColor redColor];
NSDateFormatter *formatter;
NSString *dateString;
formatter = [[NSDateFormatter alloc]init];
[formatter setDateFormat:#"dd-MM-yyyy HH:mm:ss"];
dateString = [formatter stringFromDate:[NSDate date]];
[formatter release];
NSLog(#"The dateString is : %#",dateString);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectoryPath = [paths objectAtIndex:0];
NSString *movieFileName = [NSString stringWithFormat: #"%#.mp4",dateString];
NSString *filePath = [documentsDirectoryPath stringByAppendingPathComponent:movieFileName];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:filePath];
[self.captureSession stopRunning];
[self.captureSession beginConfiguration];
// [self.captureSession removeOutput:movieFileOutput];
if([self.captureSession canAddOutput:movieFileOutput1])
{
[self.captureSession addOutput:movieFileOutput1];
}
else
{
NSLog(#"Couldn't add still output");
}
[movieFileOutput1 startRecordingToOutputFileURL:outputURL recordingDelegate:self];
[self.captureSession commitConfiguration];
[self.captureSession startRunning];
[outputURL release];
} else {
NSLog(#"STOP RECORDING");
WeAreRecording = NO;
self.videoViewBg.backgroundColor = [UIColor whiteColor];
[movieFileOutput1 stopRecording];
[self.captureSession removeOutput:movieFileOutput1];
}
}
-(void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {
UITouch *touch = [[event allTouches] anyObject];
CGPoint loc = [touch locationInView:self.view];
self.pickedColor = [self colorOfPoint:loc];
self.theColor.backgroundColor = self.pickedColor;
}
-(UIColor *) colorOfPoint:(CGPoint)point {
unsigned char pixel[4] = {0};
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pixel, 1, 1, 8, 4, colorSpace, kCGImageAlphaPremultipliedLast);
CGContextTranslateCTM(context, -point.x, -point.y);
[self.view.layer renderInContext:context];
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
UIColor *color = [UIColor colorWithRed:pixel[0]/255.0 green:pixel[1]/255.0 blue:pixel[2]/255.0 alpha:pixel[3]/255.0];
return color;
}
// Implement viewDidLoad to do additional setup after loading the view, typically from a nib.
- (void)viewDidLoad {
[super viewDidLoad];
[self initCapture];
WeAreRecording = NO;
self.videoViewBg.layer.cornerRadius = 55;
}
// Override to allow orientations other than the default portrait orientation.
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation {
if(interfaceOrientation == UIInterfaceOrientationLandscapeRight) {
return YES;
}
return NO;
}
- (void)didReceiveMemoryWarning {
// Releases the view if it doesn't have a superview.
[super didReceiveMemoryWarning];
// Release any cached data, images, etc. that aren't in use.
}
- (void)viewDidUnload {
[super viewDidUnload];
self.imageView = nil;
self.customLayer = nil;
self.prevLayer = nil;
[self.captureSession stopRunning];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (void)dealloc {
[movieFileOutput release];
[self.captureSession release];
[super dealloc];
}
#end
Please help
The problem here is not trivial. AVFoundation simply can't handle both AVCaptureMovieFileOutput and AVCaptureVideoDataOutput simultaneously. That means you can't dipslay preview (which requires AVCaptureVideoDataOutput) when recording (which requires AVCaptureMovieFileOutput). This is very stupid, but that's life.
The only way I know how to do this to use only AVCaptureVideoDataOutput, and inside captureOutput:didOutputSampleBuffer:fromConnection:, write the frames manually to the video file. The following code snippets should help
Properties
#property (strong, nonatomic) AVAssetWriter* recordingAssetWriter;
#property (strong, nonatomic) AVAssetWriterInput* recordingAssetWriterInput;
#property (strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor* recordingPixelBufferAdaptor;
To initialize the video file (when you start recording or something)
// Init AVAssetWriter
NSError* error = nil;
self.recordingAssetWriter = [[AVAssetWriter alloc] initWithURL:<the video file URL> fileType:AVFileTypeMPEG4 error:&error];
// Init AVAssetWriterInput & AVAssetWriterInputPixelBufferAdaptor
NSDictionary* settings = #{AVVideoWidthKey: #(480), AVVideoHeightKey: #(640), AVVideoCodecKey: AVVideoCodecH264};
self.recordingAssetWriterInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:settings];
self.recordingAssetWriterInput.expectsMediaDataInRealTime = YES;
self.recordingPixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:self.recordingAssetWriterInput sourcePixelBufferAttributes:#{(NSString*)kCVPixelBufferPixelFormatTypeKey: #(kCVPixelFormatType_32BGRA)}];
// Add Input
[self.recordingAssetWriter addInput:self.recordingAssetWriterInput];
// Start ...
_recording = YES;
To write frames to the video file
// Inside the captureOutput:didOutputSampleBuffer:fromConnection: delegate method
// _recording is the flag to see if we're recording
if (_recording) {
CMTime sampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if (self.recordingAssetWriter.status != AVAssetWriterStatusWriting) {
[self.recordingAssetWriter startWriting];
[self.recordingAssetWriter startSessionAtSourceTime:sampleTime];
}
if (self.recordingAssetWriterInput.readyForMoreMediaData) {
[self.recordingPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:sampleTime];
}
}
To finalize the video file when finish recording:
[self.recordingAssetWriterInput markAsFinished];
[self.recordingAssetWriter finishWritingWithCompletionHandler:^{
// Do not do this immediately after calling finishWritingWithCompletionHandler, since it is an async method
self.recordingAssetWriter = nil;
self.recordingAssetWriterInput = nil;
self.recordingPixelBufferAdaptor = nil;
}];
Note that I ommited error checking for clarity.

Resources