AVCaptureVideoDataOutput and AVCaptureMovieFileOutput at the same time? - ios

Here was a similar question. And I have the same problem. Here is my code
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
self.packagesBufferMutableArray = [NSMutableArray array];
self.fps = 25;
[self initDateFormatter];
// [self setupCaptureSession];
[self performSelector:#selector(stopWork) withObject:nil afterDelay:5];
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (void)viewDidAppear:(BOOL)animated
{
[super viewDidAppear:animated];
self.isExtra = YES;
[self.captureSession startRunning];
}
- (void)initDateFormatter {
self.dateFormatter = [NSDateFormatter new];
[_dateFormatter setDateFormat:#"yy-MM-dd--HH-mm-ss"];
}
- (NSString *)generateFilePathForMovie {
return [NSString stringWithFormat:#"%#/%#.mov",
[NSHomeDirectory() stringByAppendingPathComponent:#"Documents"],
[_dateFormatter stringFromDate:[NSDate date]]];
}
- (NSDictionary *)settingsForWriterInput {
int bitRate = (300 + /*self.currentQuality*/5 * 90) * 1024; //NORMAL 750 * 1024
NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:bitRate], AVVideoAverageBitRateKey,
nil];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:480], AVVideoWidthKey,
[NSNumber numberWithInt:320], AVVideoHeightKey,
codecSettings, AVVideoCompressionPropertiesKey,
nil];
return videoSettings;
}
- (AVAssetWriterInput *)createVideoWriterInput {
NSDictionary *videoSettings = [self settingsForWriterInput];
return [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
}
- (void)setupCaptureSession
{
NSError *error = nil;
self.captureSession = [[AVCaptureSession alloc] init];
self.captureSession.sessionPreset = AVCaptureSessionPresetMedium;
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
// Create a device input with the device and add it to the session.
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input)
{
// Handling the error appropriately.
}
[self.captureSession addInput:input];
// Create a VideoDataOutput and add it to the session
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
self.assetWriterInput =[self createVideoWriterInput];// [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
self.assetWriterInput.expectsMediaDataInRealTime = YES;
AVAssetWriter *assetWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:[self generateFilePathForMovie]] fileType:AVFileTypeMPEG4 error:&error];
[assetWriter addInput:self.assetWriterInput];
self.assetWriterPixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:self.assetWriterInput sourcePixelBufferAttributes:
[NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA],kCVPixelBufferPixelFormatTypeKey, nil]];
[self.captureSession addOutput:output];
// Configure your output.
dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
[output setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
// Specify the pixel format
output.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
// If you wish to cap the frame rate to a known value, such as 15 fps, set
// minFrameDuration.
for (AVCaptureOutput* output in self.captureSession.outputs)
{
if ([output isKindOfClass:[AVCaptureVideoDataOutput class]])
{
AVCaptureConnection* connection = [output connectionWithMediaType:AVMediaTypeVideo];
CMTimeShow(connection.videoMinFrameDuration);
CMTimeShow(connection.videoMaxFrameDuration);
CMTime frameDuration = CMTimeMake(1, self.fps);
if (connection.isVideoMinFrameDurationSupported)
connection.videoMinFrameDuration = frameDuration;
if (connection.isVideoMaxFrameDurationSupported)
connection.videoMaxFrameDuration = frameDuration;
CMTimeShow(connection.videoMinFrameDuration);
CMTimeShow(connection.videoMaxFrameDuration);
}
else
{
AVCaptureConnection* connection = [output connectionWithMediaType:AVMediaTypeVideo];
CMTimeShow(connection.videoMinFrameDuration);
CMTimeShow(connection.videoMaxFrameDuration);
if (connection.isVideoMinFrameDurationSupported)
connection.videoMinFrameDuration = CMTimeMake(1, 20);
if (connection.isVideoMaxFrameDurationSupported)
connection.videoMaxFrameDuration = CMTimeMake(1, 20);
CMTimeShow(connection.videoMinFrameDuration);
CMTimeShow(connection.videoMaxFrameDuration);
}
}
// Start the session running to start the flow of data
[assetWriter startWriting];
[assetWriter startSessionAtSourceTime:kCMTimeZero];
[self.captureSession startRunning];
// Assign session to an ivar.
}
- (NSMutableDictionary *)createEmptyPackage
{
return [NSMutableDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInteger:0], #"framesCount", [NSMutableArray array], #"framesArray", nil];
}
- (void)updateCurrentPackageWithFrameImage:(UIImage *)frameImage
{
if (self.currentPackageMutableDictionary == nil)
{
NSLog(#"new package with number %d", self.packagesBufferMutableArray.count);
self.currentPackageMutableDictionary = [self createEmptyPackage];
}
NSInteger framesCount = [[self.currentPackageMutableDictionary objectForKey:#"framesCount"] integerValue];
NSMutableArray *framesArray = [self.currentPackageMutableDictionary objectForKey:#"framesArray"];
NSLog(#"added %d frame at current package", framesCount);
framesCount ++;
[framesArray addObject:frameImage];
[self.currentPackageMutableDictionary setObject:[NSNumber numberWithInteger:framesCount] forKey:#"framesCount"];
if (framesCount == self.fps)
{
[self.packagesBufferMutableArray addObject:[NSDictionary dictionaryWithDictionary:self.currentPackageMutableDictionary]];
self.currentPackageMutableDictionary = nil;
if ((self.packagesBufferMutableArray.count == 31) && !self.isExtra)
{
NSLog(#"remove old package");
[self.packagesBufferMutableArray removeObjectAtIndex:0];
}
}
}
// Delegate routine that is called when a sample buffer was written
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
// Create a UIImage from the sample buffer data
UIImage *image = [self imageFromSampleBuffer:sampleBuffer];
// UIImageWriteToSavedPhotosAlbum(image, self, #selector(image:didFinishSavingWithError:contextInfo:), nil);
[self updateCurrentPackageWithFrameImage:image];
// UIImageView *imageView = [[UIImageView alloc] initWithImage:image];
// imageView.frame = CGRectMake(0, 0, image.size.width, image.size.height);
// [self.view addSubview:imageView];
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// a very dense way to keep track of the time at which this frame
// occurs relative to the output stream, but it's just an example!
static int64_t frameNumber = 0;
if(self.assetWriterInput.readyForMoreMediaData)
[self.assetWriterPixelBufferAdaptor appendPixelBuffer:imageBuffer
withPresentationTime:CMTimeMake(frameNumber, 24)];
frameNumber++;
}
- (void) image: (UIImage *) image
didFinishSavingWithError: (NSError *) error
contextInfo: (void *) contextInfo
{
}
- (void)startWork
{
}
- (void)stopWork
{
[self.captureSession stopRunning];
[self.assetWriter finishWriting];
}
// Create a UIImage from sample buffer data
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
// Get a CMSampleBuffer's Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// Get the number of bytes per row for the pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
// Get the number of bytes per row for the pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// Free up the context and color space
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
// Create an image object from the Quartz image
UIImage *image = [UIImage imageWithCGImage:quartzImage];
// Release the Quartz image
CGImageRelease(quartzImage);
return (image);
}
So, it create mov file, but i can't run it with any players.
QuickTime Player can not open the "14-02-02 - 21-29-11.movĀ», as the movie file format is not recognized.
Maybe I must set some others parameters or use another algorithm?
Could someone tell or put an example?

Related

Screen video Recording in ios

I want to record Uiview in ios .. I have tried but not getting clear Video.. screenshot is here :
[1]: https://i.stack.imgur.com/Gmwkr.png please suggest .. it looks like there is problem in frame i am passing..
#import "screerecorder.h"
#import <QuartzCore/QuartzCore.h>
#import <MobileCoreServices/UTCoreTypes.h>
#import <AssetsLibrary/AssetsLibrary.h>
#interface screerecorder(Private)
- (void) writeVideoFrameAtTime:(CMTime)time;
#end
#implementation screerecorder
#synthesize currentScreen, frameRate, delegate;
- (void) initialize {
// Initialization code
self.clearsContextBeforeDrawing = YES;
self.currentScreen = nil;
self.frameRate = 10.0f; //10 frames per seconds
_recording = false;
videoWriter = nil;
videoWriterInput = nil;
avAdaptor = nil;
startedAt = nil;
bitmapData = NULL;
}
- (id) initWithCoder:(NSCoder *)aDecoder {
self = [super initWithCoder:aDecoder];
if (self) {
[self initialize];
}
return self;
}
- (id) init {
self = [super init];
if (self) {
[self initialize];
}
return self;
}
- (id)initWithFrame:(CGRect)frame {
self = [super initWithFrame:frame];
if (self) {
[self initialize];
}
return self;
}
- (CGContextRef) createBitmapContextOfSize:(CGSize) size {
CGContextRef context = NULL;
CGColorSpaceRef colorSpace;
int bitmapByteCount;
int bitmapBytesPerRow;
bitmapBytesPerRow = (size.width * 4);
bitmapByteCount = (bitmapBytesPerRow * size.height);
colorSpace = CGColorSpaceCreateDeviceRGB();
if (bitmapData != NULL) {
free(bitmapData);
}
bitmapData = malloc( bitmapByteCount );
if (bitmapData == NULL) {
fprintf (stderr, "Memory not allocated!");
return NULL;
}
context = CGBitmapContextCreate (bitmapData,
size.width,
size.height,
8, // bits per component
bitmapBytesPerRow,
colorSpace,
kCGImageAlphaNoneSkipFirst);
CGContextSetAllowsAntialiasing(context,NO);
if (context== NULL) {
free (bitmapData);
fprintf (stderr, "Context not created!");
return NULL;
}
CGColorSpaceRelease( colorSpace );
return context;
}
//static int frameCount = 0; //debugging
- (void) drawRect:(CGRect)rect {
NSDate* start = [NSDate date];
CGContextRef context = [self createBitmapContextOfSize:self.frame.size];
//not sure why this is necessary...image renders upside-down and mirrored
// CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, self.frame.size.height);
//CGContextConcatCTM(context, flipVertical);
[self.layer renderInContext:context];
CGImageRef cgImage = CGBitmapContextCreateImage(context);
UIImage* background = [UIImage imageWithCGImage: cgImage];
// CGImageRelease(cgImage);
self.currentScreen = background;
//debugging
//if (frameCount < 40) {
// NSString* filename = [NSString stringWithFormat:#"Documents/frame_%d.png", frameCount];
// NSString* pngPath = [NSHomeDirectory() stringByAppendingPathComponent:filename];
// [UIImagePNGRepresentation(self.currentScreen) writeToFile: pngPath atomically: YES];
// frameCount++;
//}
//NOTE: to record a scrollview while it is scrolling you need to implement your UIScrollViewDelegate such that it calls
// 'setNeedsDisplay' on the ScreenCaptureView.
if (_recording) {
float millisElapsed = [[NSDate date] timeIntervalSinceDate:startedAt] * 1000.0;
[self writeVideoFrameAtTime:CMTimeMake((int)millisElapsed, 1000)];
}
float processingSeconds = [[NSDate date] timeIntervalSinceDate:start];
float delayRemaining = (1.0 / self.frameRate) - processingSeconds;
CGContextRelease(context);
//redraw at the specified framerate
[self performSelector:#selector(setNeedsDisplay) withObject:nil afterDelay:delayRemaining > 0.0 ? delayRemaining : 0.01];
}
- (void) cleanupWriter {
avAdaptor = nil;
videoWriterInput = nil;
videoWriter = nil;
startedAt = nil;
if (bitmapData != NULL) {
free(bitmapData);
bitmapData = NULL;
}
}
- (void)dealloc {
[self cleanupWriter];
}
- (NSURL*) tempFileURL {
NSString* outputPath = [[NSString alloc] initWithFormat:#"%#/%#", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], #"output.mp4"];
NSURL* outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath]) {
NSError* error;
if ([fileManager removeItemAtPath:outputPath error:&error] == NO) {
NSLog(#"Could not delete old recording file at path: %#", outputPath);
}
}
return outputURL;
}
-(BOOL) setUpWriter {
NSError* error = nil;
videoWriter = [[AVAssetWriter alloc] initWithURL:[self tempFileURL] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
//Configure video
NSDictionary* videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:1024.0*1024.0], AVVideoAverageBitRateKey,
nil ];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:self.frame.size.width], AVVideoWidthKey,
[NSNumber numberWithInt:self.frame.size.height], AVVideoHeightKey,
videoCompressionProps, AVVideoCompressionPropertiesKey,
nil];
videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] ;
NSParameterAssert(videoWriterInput);
videoWriterInput.expectsMediaDataInRealTime = YES;
NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:bufferAttributes] ;
//add input
[videoWriter addInput:videoWriterInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];
return YES;
}
- (void) completeRecordingSession {
[videoWriterInput markAsFinished];
// Wait for the video
int status = videoWriter.status;
while (status == AVAssetWriterStatusUnknown) {
NSLog(#"Waiting...");
[NSThread sleepForTimeInterval:0.5f];
status = videoWriter.status;
}
#synchronized(self) {
BOOL success = [videoWriter finishWriting];
if (!success) {
NSLog(#"finishWriting returned NO");
}
[self cleanupWriter];
id delegateObj = self.delegate;
NSString *outputPath = [[NSString alloc] initWithFormat:#"%#/%#", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], #"output.mp4"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSLog(#"Completed recording, file is stored at: %#", outputURL);
obj = [[ViewController alloc]init];
[obj mergeAudioandvideo:outputURL];
if ([delegateObj respondsToSelector:#selector(recordingFinished:)]) {
[delegateObj performSelectorOnMainThread:#selector(recordingFinished:) withObject:(success ? outputURL : nil) waitUntilDone:YES];
}
}
}
- (bool) startRecording {
bool result = NO;
#synchronized(self) {
if (! _recording) {
result = [self setUpWriter];
startedAt = [NSDate date];
_recording = true;
}
}
return result;
}
- (void) stopRecording {
#synchronized(self) {
if (_recording) {
_recording = false;
[self completeRecordingSession];
}
}
}
-(void) writeVideoFrameAtTime:(CMTime)time {
if (![videoWriterInput isReadyForMoreMediaData]) {
NSLog(#"Not ready for video data");
}
else {
#synchronized (self) {
UIImage* newFrame = self.currentScreen;
CVPixelBufferRef pixelBuffer = NULL;
CGImageRef cgImage = CGImageCreateCopy([newFrame CGImage]);
CFDataRef image = CGDataProviderCopyData(CGImageGetDataProvider(cgImage));
int status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, avAdaptor.pixelBufferPool, &pixelBuffer);
if(status != 0){
//could not get a buffer from the pool
NSLog(#"Error creating pixel buffer: status=%d", status);
}
// set image data into pixel buffer
CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
uint8_t* destPixels = CVPixelBufferGetBaseAddress(pixelBuffer);
CFDataGetBytes(image, CFRangeMake(0, CFDataGetLength(image)), destPixels); //XXX: will work if the pixel buffer is contiguous and has the same bytesPerRow as the input data
if(status == 0){
BOOL success = [avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
if (!success)
NSLog(#"Warning: Unable to write buffer to video");
}
//clean up
CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
CVPixelBufferRelease( pixelBuffer );
CFRelease(image);
CGImageRelease(cgImage);
}
}
}
#end
You can try this library - Glimpse. It can record any UIView.

Make Video From Image iOS

I found this tutorial http://codethink.no-ip.org/wordpress/archives/673#comment-118063 from this SO question Screen capture video in iOS programmatically of how to do something like this, and it was a bit outdated for iOS, so I renewed it, and am very close to having it work, but putting the UIImages together just isn't quite working right now.
Here is how I call the method in viewDidLoad
[captureView performSelector:#selector(startRecording) withObject:nil afterDelay:1.0];
[captureView performSelector:#selector(stopRecording) withObject:nil afterDelay:5.0];
and captureView is an IBOutlet connected to my view.
And then I have the class ScreenCapture.h & .m
Here is .h
#protocol ScreenCaptureViewDelegate <NSObject>
- (void) recordingFinished:(NSString*)outputPathOrNil;
#end
#interface ScreenCaptureView : UIView {
//video writing
AVAssetWriter *videoWriter;
AVAssetWriterInput *videoWriterInput;
AVAssetWriterInputPixelBufferAdaptor *avAdaptor;
//recording state
BOOL _recording;
NSDate* startedAt;
void* bitmapData;
}
//for recording video
- (bool) startRecording;
- (void) stopRecording;
//for accessing the current screen and adjusting the capture rate, etc.
#property(retain) UIImage* currentScreen;
#property(assign) float frameRate;
#property(nonatomic, assign) id<ScreenCaptureViewDelegate> delegate;
#end
And here is my .m
#interface ScreenCaptureView(Private)
- (void) writeVideoFrameAtTime:(CMTime)time;
#end
#implementation ScreenCaptureView
#synthesize currentScreen, frameRate, delegate;
- (void) initialize {
// Initialization code
self.clearsContextBeforeDrawing = YES;
self.currentScreen = nil;
self.frameRate = 10.0f; //10 frames per seconds
_recording = false;
videoWriter = nil;
videoWriterInput = nil;
avAdaptor = nil;
startedAt = nil;
bitmapData = NULL;
}
- (id) initWithCoder:(NSCoder *)aDecoder {
self = [super initWithCoder:aDecoder];
if (self) {
[self initialize];
}
return self;
}
- (id) init {
self = [super init];
if (self) {
[self initialize];
}
return self;
}
- (id)initWithFrame:(CGRect)frame {
self = [super initWithFrame:frame];
if (self) {
[self initialize];
}
return self;
}
- (CGContextRef) createBitmapContextOfSize:(CGSize) size {
CGContextRef context = NULL;
CGColorSpaceRef colorSpace;
int bitmapByteCount;
int bitmapBytesPerRow;
bitmapBytesPerRow = (size.width * 4);
bitmapByteCount = (bitmapBytesPerRow * size.height);
colorSpace = CGColorSpaceCreateDeviceRGB();
if (bitmapData != NULL) {
free(bitmapData);
}
bitmapData = malloc( bitmapByteCount );
if (bitmapData == NULL) {
fprintf (stderr, "Memory not allocated!");
return NULL;
}
context = CGBitmapContextCreate (bitmapData,
size.width,
size.height,
8, // bits per component
bitmapBytesPerRow,
colorSpace,
(CGBitmapInfo) kCGImageAlphaNoneSkipFirst);
CGContextSetAllowsAntialiasing(context,NO);
if (context== NULL) {
free (bitmapData);
fprintf (stderr, "Context not created!");
return NULL;
}
CGColorSpaceRelease( colorSpace );
return context;
}
static int frameCount = 0; //debugging
- (void) drawRect:(CGRect)rect {
NSDate* start = [NSDate date];
CGContextRef context = [self createBitmapContextOfSize:self.frame.size];
//not sure why this is necessary...image renders upside-down and mirrored
CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, self.frame.size.height);
CGContextConcatCTM(context, flipVertical);
[self.layer renderInContext:context];
CGImageRef cgImage = CGBitmapContextCreateImage(context);
UIImage* background = [UIImage imageWithCGImage: cgImage];
CGImageRelease(cgImage);
self.currentScreen = background;
//debugging
if (frameCount < 40) {
NSString* filename = [NSString stringWithFormat:#"Documents/frame_%d.png", frameCount];
NSString* pngPath = [NSHomeDirectory() stringByAppendingPathComponent:filename];
[UIImagePNGRepresentation(self.currentScreen) writeToFile: pngPath atomically: YES];
frameCount++;
}
//NOTE: to record a scrollview while it is scrolling you need to implement your UIScrollViewDelegate such that it calls
// 'setNeedsDisplay' on the ScreenCaptureView.
if (_recording) {
float millisElapsed = [[NSDate date] timeIntervalSinceDate:startedAt] * 1000.0;
[self writeVideoFrameAtTime:CMTimeMake((int)millisElapsed, 1000)];
}
float processingSeconds = [[NSDate date] timeIntervalSinceDate:start];
float delayRemaining = (1.0 / self.frameRate) - processingSeconds;
CGContextRelease(context);
//redraw at the specified framerate
[self performSelector:#selector(setNeedsDisplay) withObject:nil afterDelay:delayRemaining > 0.0 ? delayRemaining : 0.01];
}
- (void) cleanupWriter {
avAdaptor = nil;
videoWriterInput = nil;
videoWriter = nil;
startedAt = nil;
if (bitmapData != NULL) {
free(bitmapData);
bitmapData = NULL;
}
}
- (void)dealloc {
[self cleanupWriter];
}
- (NSURL*) tempFileURL {
NSString* outputPath = [[NSString alloc] initWithFormat:#"%#/%#", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], #"output.mp4"];
NSURL* outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath]) {
NSError* error;
if ([fileManager removeItemAtPath:outputPath error:&error] == NO) {
NSLog(#"Could not delete old recording file at path: %#", outputPath);
}
}
return outputURL;
}
-(BOOL) setUpWriter {
NSError* error = nil;
videoWriter = [[AVAssetWriter alloc] initWithURL:[self tempFileURL] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
//Configure video
NSDictionary* videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:1024.0*1024.0], AVVideoAverageBitRateKey,
nil ];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:self.frame.size.width], AVVideoWidthKey,
[NSNumber numberWithInt:self.frame.size.height], AVVideoHeightKey,
videoCompressionProps, AVVideoCompressionPropertiesKey,
nil];
videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSParameterAssert(videoWriterInput);
videoWriterInput.expectsMediaDataInRealTime = YES;
NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:bufferAttributes];
//add input
[videoWriter addInput:videoWriterInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];
return YES;
}
- (void) completeRecordingSession {
[videoWriterInput markAsFinished];
// Wait for the video
int status = videoWriter.status;
while (status == AVAssetWriterStatusUnknown) {
NSLog(#"Waiting...");
[NSThread sleepForTimeInterval:0.5f];
status = videoWriter.status;
}
#synchronized(self) {
[videoWriter finishWritingWithCompletionHandler:^{
[self cleanupWriter];
BOOL success = YES;
id delegateObj = self.delegate;
NSString *outputPath = [[NSString alloc] initWithFormat:#"%#/%#", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], #"output.mp4"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSLog(#"Completed recording, file is stored at: %#", outputURL);
if ([delegateObj respondsToSelector:#selector(recordingFinished:)]) {
[delegateObj performSelectorOnMainThread:#selector(recordingFinished:) withObject:(success ? outputURL : nil) waitUntilDone:YES];
}
}];
}
}
- (bool) startRecording {
bool result = NO;
#synchronized(self) {
if (! _recording) {
result = [self setUpWriter];
startedAt = [NSDate date];
_recording = true;
}
}
return result;
}
- (void) stopRecording {
#synchronized(self) {
if (_recording) {
_recording = false;
[self completeRecordingSession];
}
}
}
-(void) writeVideoFrameAtTime:(CMTime)time {
if (![videoWriterInput isReadyForMoreMediaData]) {
NSLog(#"Not ready for video data");
}
else {
#synchronized (self) {
UIImage *newFrame = self.currentScreen;
CVPixelBufferRef pixelBuffer = NULL;
CGImageRef cgImage = CGImageCreateCopy([newFrame CGImage]);
CFDataRef image = CGDataProviderCopyData(CGImageGetDataProvider(cgImage));
int status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, avAdaptor.pixelBufferPool, &pixelBuffer);
if(status != 0){
//could not get a buffer from the pool
NSLog(#"Error creating pixel buffer: status=%d", status);
}
// set image data into pixel buffer
CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
uint8_t *destPixels = CVPixelBufferGetBaseAddress(pixelBuffer);
CFDataGetBytes(image, CFRangeMake(0, CFDataGetLength(image)), destPixels); //XXX: will work if the pixel buffer is contiguous and has the same bytesPerRow as the input data
if(status == 0){
BOOL success = [avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
if (!success)
NSLog(#"Warning: Unable to write buffer to video");
}
//clean up
CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
CVPixelBufferRelease( pixelBuffer );
CFRelease(image);
CGImageRelease(cgImage);
}
}
}
And I as you can see in the drawRect method I save all the images, and they look great, but then when I try to make the video, it creates just a still image that looks like this, when the images look like this.
Here is the output, it is a video but just this. When the picture looks normal (not slanted and all weird)
My question is what is going wrong when the video is being made?
Thanks for the help and your time, I know this is a long question.
I found this post after having the same issue with certain resolutions causing the exact same video effect when I wanted to create a CVPixelBufferRef from a CGImageRef (coming from a UIImage.)
The very short answer in my case was that I had hard wired the bytes per row to be 4 times the width. Which used to work all the time! Now I query the CVPixelBuffer itself to get this value and poof, problem solved!
Code that created the problem was this:
CGContextRef context = CGBitmapContextCreate(pxdata, w, h, 8, 4*w, rgbColorSpace, bitMapInfo);
Code that fixed the problem was this:
CGContextRef context = CGBitmapContextCreate(
pxdata, w, h,
8, CVPixelBufferGetBytesPerRow(pxbuffer),
rgbColorSpace,bitMapInfo);
And in both cases, the bitMapInfo was set:
GBitmapInfo bitMapInfo =kCGImageAlphaPremultipliedFirst; // According to Apple's doc, this is safe: June 26, 2014
Pixel Buffer adaptors only work with certain pixel sizes of images. You're probably going to need to change the size of the images. You can imagine that what's happening in your video is that the writer is trying to write your, let's say, 361x241 images into a 360x240 size space. Each row starts with the last pixel of the last row so that it ends up getting diagonally skewed like you see. Check the apple docs for supported dimensions. I believe that I used 480x320 and it's supported. You can use this method to resize your images:
+(UIImage *)scaleImage:(UIImage*)image toSize:(CGSize)newSize {
CGRect scaledImageRect = CGRectZero;
CGFloat aspectWidth = newSize.width / image.size.width;
CGFloat aspectHeight = newSize.height / image.size.height;
CGFloat aspectRatio = 3.0 / 2;
scaledImageRect.size.width = image.size.width * aspectRatio;
scaledImageRect.size.height = image.size.height * aspectRatio;
scaledImageRect.origin.x = (newSize.width - scaledImageRect.size.width) / 2.0f;
scaledImageRect.origin.y = (newSize.height - scaledImageRect.size.height) / 2.0f;
UIGraphicsBeginImageContextWithOptions(CGSizeMake(480, 320), NO, 0 );
[image drawInRect:scaledImageRect];
UIImage* scaledImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return scaledImage;
}
I think this is because the pixelBuffer bytes per row does not match the UIImage bytes per row. In my case (iPhone 6 iOS8.3) the UIImage is 568 x 320 and the CFDataGetLength is 727040 so the bytes per row is 2272. But the pixelBuffer bytes per row is 2304. I think this extra 32 bytes is from padding so that bytes per row in the pixelBuffer is divisible by 64. How you force the pixelBuffer to match the input data, or vice versa, across all devices I'm not sure yet.
I've suffered a lot in this case. I tried so many ways to create video from the Image array but result was almost same as yours.
The problem was in the CVPixel buffer. The Buffer I used to create from the image was not correct.
But finally I got it working.
Main Function to create video at a url from an Array
You just have toinput array of images and fps, and size can be equal to size of images (if you want).
fps = num of images in array / desired duration
for example: fps = 90 / 3 = 30
- (void)getVideoFrom:(NSArray *)array
toPath:(NSString*)path
size:(CGSize)size
fps:(int)fps
withCallbackBlock:(void (^) (BOOL))callbackBlock
{
NSLog(#"%#", path);
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path]
fileType:AVFileTypeMPEG4
error:&error];
if (error) {
if (callbackBlock) {
callbackBlock(NO);
}
return;
}
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = #{AVVideoCodecKey: AVVideoCodecTypeH264,
AVVideoWidthKey: [NSNumber numberWithInt:size.width],
AVVideoHeightKey: [NSNumber numberWithInt:size.height]};
AVAssetWriterInput* writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer;
CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &buffer);
CMTime presentTime = CMTimeMake(0, fps);
int i = 0;
while (1)
{
if(writerInput.readyForMoreMediaData){
presentTime = CMTimeMake(i, fps);
if (i >= [array count]) {
buffer = NULL;
} else {
buffer = [self pixelBufferFromCGImage:[array[i] CGImage] size:CGSizeMake(480, 320)];
}
if (buffer) {
//append buffer
BOOL appendSuccess = [self appendToAdapter:adaptor
pixelBuffer:buffer
atTime:presentTime
withInput:writerInput];
NSAssert(appendSuccess, #"Failed to append");
i++;
} else {
//Finish the session:
[writerInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{
NSLog(#"Successfully closed video writer");
if (videoWriter.status == AVAssetWriterStatusCompleted) {
if (callbackBlock) {
callbackBlock(YES);
}
} else {
if (callbackBlock) {
callbackBlock(NO);
}
}
}];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
NSLog (#"Done");
break;
}
}
}
}
Function to get CVPixelBuffer from CGImage
-(CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image size:(CGSize)imageSize
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVPixelBufferCreate(kCFAllocatorDefault, CGImageGetWidth(image),
CGImageGetHeight(image), kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
&pxbuffer);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, CGImageGetWidth(image),
CGImageGetHeight(image), 8, CVPixelBufferGetBytesPerRow(pxbuffer), rgbColorSpace,
(int)kCGImageAlphaNoneSkipFirst);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
Function to append to adapter
-(BOOL)appendToAdapter:(AVAssetWriterInputPixelBufferAdaptor*)adaptor
pixelBuffer:(CVPixelBufferRef)buffer
atTime:(CMTime)presentTime
withInput:(AVAssetWriterInput*)writerInput
{
while (!writerInput.readyForMoreMediaData) {
usleep(1);
}
return [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
}

AVFoundation Custom Camera Causing Crash / Memory Pressure?

I'm making use of AVFoundation to integrate a custom camera into my app... The problem is, I'm getting a rare but still occurring crash due to memory pressure, I'm not sure why as I'm using ARC, and Memory in Xcode is only around 20mb around the time of crash? What's going on?
Here's my code
- (void)setupCamera
{
self.session = [[AVCaptureSession alloc] init];
[self.session setSessionPreset:AVCaptureSessionPresetPhoto];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];
if ([self.session canAddInput:input]) {
[self.session addInput:input];
}
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
if ([self.session canAddOutput:output]) {
output.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
[self.session addOutput:output];
[output setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
}
self.preview = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
[self.preview setVideoGravity:AVLayerVideoGravityResizeAspectFill];
[self.preview setFrame:self.cameraView.frame];
CALayer *cameraLayer = self.cameraView.layer;
[cameraLayer setMasksToBounds:YES];
[cameraLayer addSublayer:self.preview];
for (AVCaptureConnection *connection in output.connections) {
if (connection.supportsVideoOrientation) {
[connection setVideoOrientation:AVCaptureVideoOrientationPortrait];
}
}
NSURL *shutterUrl = [[NSURL alloc] initWithString:[[NSBundle mainBundle] pathForResource: #"shutter" ofType: #"wav"]];
self.shutterPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:shutterUrl error:nil];
[self.shutterPlayer prepareToPlay];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
if (self.doCapture) {
self.doCapture = NO;
[self.shutterPlayer setCurrentTime:0];
[self.shutterPlayer play];
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef rawContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef quartzImage = CGBitmapContextCreateImage(rawContext);
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
CGContextRelease(rawContext);
CGColorSpaceRelease(colorSpace);
UIImage *rawImage = [UIImage imageWithCGImage:quartzImage];
CGImageRelease(quartzImage);
float rawWidth = rawImage.size.width;
float rawHeight = rawImage.size.height;
CGRect cropRect = (rawHeight > rawWidth) ? CGRectMake(0, (rawHeight - rawWidth) / 2, rawWidth, rawWidth) : CGRectMake((rawWidth - rawHeight) / 2, 0, rawHeight, rawHeight);
CGImageRef croppedImageRef = CGImageCreateWithImageInRect([rawImage CGImage], cropRect);
UIImage *croppedImage = [UIImage imageWithCGImage:croppedImageRef];
CGImageRelease(croppedImageRef);
[self saveImage:croppedImage];
}
}
- (void)saveImage:(UIImage *)image
{
[self.capturedImages addObject:image];
NSArray *scrollItemXIB = [[NSBundle mainBundle] loadNibNamed:#"SellPreviewImagesScrollItemView" owner:self options:nil];
UIView *scrollItemView = [scrollItemXIB lastObject];
UIImageView *previewImage = (UIImageView *)[scrollItemView viewWithTag:PREVIEW_IMAGES_SCROLL_ITEM_TAG_IMAGE];
previewImage.image = image;
UIButton *deleteButton = (UIButton *)[scrollItemView viewWithTag:PREVIEW_IMAGES_SCROLL_ITEM_TAG_BADGE_BUTTON];
[deleteButton addTarget:self action:#selector(deleteImage:) forControlEvents:UIControlEventTouchUpInside];
UIButton *previewButton = (UIButton *)[scrollItemView viewWithTag:PREVIEW_IMAGES_SCROLL_ITEM_TAG_BUTTON];
[previewButton addTarget:self action:#selector(previewImage:) forControlEvents:UIControlEventTouchUpInside];
[self addItemToScroll:scrollItemView];
[self checkCapturedImagesLimit];
if ([self.capturedImages count] == 1) {
[self makeCoverPhoto:[self.capturedImages objectAtIndex:0]];
[self cells:self.previewImagesToggle setHidden:NO];
[self reloadDataAnimated:YES];
}
}
Where you set self.doCapture = true? It seems like you allocate a lot of memory for temporary objects. Try to use #autoreleasepool directive:
#autoreleasepool {
self.doCapture = NO;
[self.shutterPlayer setCurrentTime:0];
[self.shutterPlayer play];
...
if ([self.capturedImages count] == 1) {
[self makeCoverPhoto:[self.capturedImages objectAtIndex:0]];
[self cells:self.previewImagesToggle setHidden:NO];
[self reloadDataAnimated:YES];
}
}
I think I might have had the same problem ("Terminated Due To Memory Pressure"), and I just changed the setup of the AVCaptureSession from the viewDidAppear to ViewDidLoad. It was growing up to 100mb or so on the iPad, and now it is going up to 14mb with a big image overlay on top.

Can't get AVFoundation to work with AVCaptureSessionPresetPhoto resolution

I can't seem to get the pixel alignment to work with AVFoundation in AVCaptureSessionPresetPhoto resolution. Pixel alignment works fine with lower resolution like AVCaptureSessionPreset1280x720 (AVCaptureSessionPreset1280x720_Picture).
Specifically, when I uncomment these lines:
if ([captureSession canSetSessionPreset:AVCaptureSessionPresetPhoto]) {
[captureSession setSessionPreset:AVCaptureSessionPresetPhoto];
} else {
NSLog(#"Unable to set resolution to AVCaptureSessionPresetPhoto");
}
I get a missed aligned image as shown in the 2nd image below. Any comments/suggestions are greatly appreciated.
Here's my code to set up the 1) capture session, 2) delegate callback, and 3) save one steaming image to verify pixel alignment.
1. Capture Session set up
- (void)InitCaptureSession {
captureSession = [[AVCaptureSession alloc] init];
if ([captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
[captureSession setSessionPreset:AVCaptureSessionPreset1280x720];
} else {
NSLog(#"Unable to set resolution to AVCaptureSessionPreset1280x720");
}
// if ([captureSession canSetSessionPreset:AVCaptureSessionPresetPhoto]) {
// [captureSession setSessionPreset:AVCaptureSessionPresetPhoto];
// } else {
// NSLog(#"Unable to set resolution to AVCaptureSessionPresetPhoto");
// }
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
videoInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:nil];
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
captureOutput.alwaysDiscardsLateVideoFrames = YES;
dispatch_queue_t queue;
queue = dispatch_queue_create("cameraQueue", NULL);
[captureOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[captureOutput setVideoSettings:videoSettings];
[captureSession addInput:videoInput];
[captureSession addOutput:captureOutput];
[captureOutput release];
previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
CALayer *rootLayer = [previewView layer];// self.view.layer; //
[rootLayer setMasksToBounds:YES];
[previewLayer setFrame:[rootLayer bounds]];
[rootLayer addSublayer:previewLayer];
[captureSession startRunning];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
static int processedImage = 0;
processedImage++;
if (processedImage==100) {
[self SaveImage:sampleBuffer];
}
[pool drain];
}
// Create a UIImage CMSampleBufferRef and save for verifying pixel alignment
- (void) SaveImage:(CMSampleBufferRef) sampleBuffer
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
CvSize imageSize;
imageSize.width = CVPixelBufferGetWidth(imageBuffer); ;
imageSize.height = CVPixelBufferGetHeight(imageBuffer);
IplImage *image = cvCreateImage(imageSize, IPL_DEPTH_8U, 1);
void *y_channel = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
char *tempPointer = image->imageData;
memcpy(tempPointer, y_channel, image->imageSize);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceGray();
NSData *data = [NSData dataWithBytes:image->imageData length:image->imageSize];
CGDataProviderRef provider = CGDataProviderCreateWithCFData((CFDataRef)data);
CGImageRef imageRef = CGImageCreate(image->width, image->height,
8, 8, image->width,
colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault,
provider, NULL, false, kCGRenderingIntentDefault);
UIImage *Saveimage = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);
UIImageWriteToSavedPhotosAlbum(Saveimage, nil, nil, nil);
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
}
Within SaveImage, the 5th argument of CGImageCreate is bytesPerRow and you should not pass image->width since the number of bytes per row may differ in case of memory alignment. This is the case with the AVCaptureSessionPresetPhoto where width = 852 (w/ an iPhone 4 camera) while the number of bytes per row for the 1-st plane (Y) is 864 since it is 16-bytes aligned.
1/ You should get the bytes per row as follow:
size_t bpr = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);
2/ Then take care to take into account the bytes per row while copying the pixels to your IplImage:
char *y_channel = (char *) CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
// row by row copy
for (int i = 0; i < image->height; i++)
memcpy(tempPointer + i*image->widthStep, y_channel + i*bpr, image->width);
You can keep the [NSData dataWithBytes:image->imageData length:image->imageSize]; as is since image->imageSize takes into account the alignment (imageSize = height*widthStep).
3/ At last pass the IplImage width step as CGImageCreate 5-th parameter:
CGImageCreate(image->width, image->height, 8, 8, image->widthStep, ...);

Capturing the screen with AVAssetWriter-- Works fine on simulator but on device creates black video

i m trying to capture the frame buffer data and convert into video for my iphone game..
i m using AVAssetWriter to accomplish this thing.
The code is working fine on simulator but not on device itself.. on device it generates a black video
i m using the follwing code :
//This code initializes the AVAsetWriter and other things
- (void) testVideoWriter {
CGRect screenBoundst = [[UIScreen mainScreen] bounds];
//initialize global info
MOVIE_NAME = #"Documents/Movie5.mp4";
//CGSize size = CGSizeMake(screenBoundst.size.width, screenBoundst.size.height);
CGSize size = CGSizeMake(320, 480);
frameLength = CMTimeMake(1, 5);
currentTime = kCMTimeZero;
currentFrame = 0;
MOVIE_PATH = [NSHomeDirectory() stringByAppendingPathComponent:MOVIE_NAME];
NSError *error = nil;
videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:MOVIE_PATH]
fileType:AVFileTypeMPEG4 error:&error];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width ], AVVideoWidthKey,
[NSNumber numberWithInt:size.height ], AVVideoHeightKey, nil];
writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA],
kCVPixelBufferPixelFormatTypeKey, nil];
adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:
writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
[adaptor retain];
CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, self.frame.size.height);
flipVertical = CGAffineTransformRotate(flipVertical,(90.0*3.14f/180.0f));
[writerInput setTransform:flipVertical];
[videoWriter addInput:writerInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
VIDEO_WRITER_IS_READY = true;
}
//this code capture the screen data
- (void) captureScreenVideo {
if (!writerInput.readyForMoreMediaData) {
return;
}
CGRect screenBounds = [[UIScreen mainScreen] bounds];
NSLog(#"width : %f Height : %f",screenBounds.size.width,screenBounds.size.height);
CGSize esize = CGSizeMake(screenBounds.size.width, screenBounds.size.height);
NSInteger myDataLength = esize.width * esize.height * 4;
GLuint *buffer = (GLuint *) malloc(myDataLength);
glReadPixels(0, 0, esize.width, esize.height, GL_RGBA, GL_UNSIGNED_BYTE, buffer);
CVPixelBufferRef pixel_buffer = NULL;
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, esize.width,
esize.height, kCVPixelFormatType_32BGRA, (CFDictionaryRef) options,
&pixel_buffer);
NSParameterAssert(status == kCVReturnSuccess && pixel_buffer != NULL);
CVPixelBufferLockBaseAddress(pixel_buffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pixel_buffer);
NSParameterAssert(pixel_buffer != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, esize.width,
esize.height, 8, 4*esize.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGAffineTransform flipVerticalq = CGAffineTransformMake(1, 0, 0, -1, 0, self.frame.size.height);
flipVerticalq = CGAffineTransformRotate(flipVerticalq,(90.0*3.14f/180.0f));
CGContextConcatCTM(context, flipVerticalq);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);
if(![adaptor appendPixelBuffer:pixel_buffer withPresentationTime:currentTime]) {
NSLog(#"FAIL");
} else {
NSLog(#"Success:%d", currentFrame);
currentTime = CMTimeAdd(currentTime, frameLength);
}
free(buffer);
CVPixelBufferRelease(pixel_buffer);
}
// this code saves the video to library on device itself
- (void) moveVideoToSavedPhotos {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
NSString *localVid = [NSHomeDirectory() stringByAppendingPathComponent:MOVIE_NAME];
NSURL* fileURL = [NSURL fileURLWithPath:localVid];
NSLog(#"movie saved %#",fileURL);
BOOL isVideoOK = UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(localVid);
if (NO == isVideoOK)
NSLog(#"Video at %# is not compatible",localVid);
else {
NSLog(#"video ok");
}
[library writeVideoAtPathToSavedPhotosAlbum:fileURL
completionBlock:^(NSURL *assetURL, NSError *error) {
if (error) {
NSLog(#"%#: Error saving context: %#", [self class], [error localizedDescription]);
}
}];
[library release];
}
// following code stops the video recording after particular number of frames
if (VIDEO_WRITER_IS_READY) {
[self captureScreenVideo];
currentFrame++;
if (currentFrame > 500) {
VIDEO_WRITER_IS_READY = false;
[writerInput markAsFinished];
if (![videoWriter finishWriting]) {
NSLog(#"writing not finished");
}
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
[writerInput release];
[videoWriter release];
NSLog(#"saving the file");
[self moveVideoToSavedPhotos];
}
}
also when the video captured on simulator is mirrored video...i dont have any idea where m wrong...please clarify me this thing... and hope you guys dont mind reading the whole code..

Resources