In my project I have to add a book page flip animation, and in this book on the right side page a video will play. Once the first video will complete the page will turn like as book page and the second video will play on the next right side page and so on. Now I have to save all this things as a a video which can be downloaded, so that When the downloaded video get played from gallery it looks same as i am playing in my app. Right now I am recording the device's screen and saving it in server for download. All the things is ok except the video player. In the video that is I am recording, the portion where all the video is playing(on the right side page of the book) is not getting recorded.
I am using the bellow code to record the screen. If any one of you have other idea to do the same thing, please share with me or if need to change my code please suggest that. Thanks is advance.
// ASScreenRecorder.h
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
typedef void (^VideoCompletionBlock)(void);
#protocol ASScreenRecorderDelegate;
#interface ASScreenRecorder : NSObject
#property (nonatomic, readonly) BOOL isRecording;
#property (nonatomic, weak) id <ASScreenRecorderDelegate> delegate;
// if saveURL is nil, video will be saved into camera roll
// this property can not be changed whilst recording is in progress
#property (strong, nonatomic) NSURL *videoURL;
+ (instancetype)sharedInstance;
- (BOOL)startRecording;
- (void)stopRecordingWithCompletion:(VideoCompletionBlock)completionBlock;
#end
// If your view contains an AVCaptureVideoPreviewLayer or an openGL view
// you'll need to write that data into the CGContextRef yourself.
// In the viewcontroller responsible for the AVCaptureVideoPreviewLayer / openGL view
// set yourself as the delegate for ASScreenRecorder.
// [ASScreenRecorder sharedInstance].delegate = self
// Then implement 'writeBackgroundFrameInContext:(CGContextRef*)contextRef'
// use 'CGContextDrawImage' to draw your view into the provided CGContextRef
#protocol ASScreenRecorderDelegate <NSObject>
- (void)writeBackgroundFrameInContext:(CGContextRef*)contextRef;
#end
// ASScreenRecorder.m
// ScreenRecorder
//
// Created by Alan Skipp on 23/04/2014.
// Copyright (c) 2014 Alan Skipp. All rights reserved.
//
#import "ASScreenRecorder.h"
#import <AVFoundation/AVFoundation.h>
#import <QuartzCore/QuartzCore.h>
#import <AssetsLibrary/AssetsLibrary.h>
#interface ASScreenRecorder()
#property (strong, nonatomic) AVAssetWriter *videoWriter;
#property (strong, nonatomic) AVAssetWriterInput *videoWriterInput;
#property (strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor *avAdaptor;
#property (strong, nonatomic) CADisplayLink *displayLink;
#property (strong, nonatomic) NSDictionary *outputBufferPoolAuxAttributes;
#property (nonatomic) CFTimeInterval firstTimeStamp;
#property (nonatomic) BOOL isRecording;
#end
#implementation ASScreenRecorder
{
dispatch_queue_t _render_queue;
dispatch_queue_t _append_pixelBuffer_queue;
dispatch_semaphore_t _frameRenderingSemaphore;
dispatch_semaphore_t _pixelAppendSemaphore;
CGSize _viewSize;
CGFloat _scale;
CGColorSpaceRef _rgbColorSpace;
CVPixelBufferPoolRef _outputBufferPool;
}
#pragma mark - initializers
+ (instancetype)sharedInstance {
static dispatch_once_t once;
static ASScreenRecorder *sharedInstance;
dispatch_once(&once, ^{
sharedInstance = [[self alloc] init];
});
return sharedInstance;
}
- (instancetype)init
{
self = [super init];
if (self) {
_viewSize = [UIApplication sharedApplication].delegate.window.bounds.size;
_scale = [UIScreen mainScreen].scale;
// record half size resolution for retina iPads
if ((UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) && _scale > 1) {
_scale = 1.0;
}
_isRecording = NO;
_append_pixelBuffer_queue = dispatch_queue_create("ASScreenRecorder.append_queue", DISPATCH_QUEUE_SERIAL);
_render_queue = dispatch_queue_create("ASScreenRecorder.render_queue", DISPATCH_QUEUE_SERIAL);
dispatch_set_target_queue(_render_queue, dispatch_get_global_queue( DISPATCH_QUEUE_PRIORITY_HIGH, 0));
_frameRenderingSemaphore = dispatch_semaphore_create(1);
_pixelAppendSemaphore = dispatch_semaphore_create(1);
}
return self;
}
#pragma mark - public
- (void)setVideoURL:(NSURL *)videoURL
{
NSAssert(!_isRecording, #"videoURL can not be changed whilst recording is in progress");
_videoURL = videoURL;
}
- (BOOL)startRecording
{
if (!_isRecording) {
[self setUpWriter];
_isRecording = (_videoWriter.status == AVAssetWriterStatusWriting);
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:#selector(writeVideoFrame)];
[_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
}
return _isRecording;
}
- (void)stopRecordingWithCompletion:(VideoCompletionBlock)completionBlock;
{
if (_isRecording) {
_isRecording = NO;
[_displayLink removeFromRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
[self completeRecordingSession:completionBlock];
}
}
#pragma mark - private
-(void)setUpWriter
{
_rgbColorSpace = CGColorSpaceCreateDeviceRGB();
NSDictionary *bufferAttributes = #{(id)kCVPixelBufferPixelFormatTypeKey : #(kCVPixelFormatType_32BGRA),
(id)kCVPixelBufferCGBitmapContextCompatibilityKey : #YES,
(id)kCVPixelBufferWidthKey : #(_viewSize.width * _scale),
(id)kCVPixelBufferHeightKey : #(_viewSize.height * _scale),
(id)kCVPixelBufferBytesPerRowAlignmentKey : #(_viewSize.width * _scale * 4)
};
_outputBufferPool = NULL;
CVPixelBufferPoolCreate(NULL, NULL, (__bridge CFDictionaryRef)(bufferAttributes), &_outputBufferPool);
NSError* error = nil;
_videoWriter = [[AVAssetWriter alloc] initWithURL:self.videoURL ?: [self tempFileURL]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(_videoWriter);
NSInteger pixelNumber = _viewSize.width * _viewSize.height * _scale;
NSDictionary* videoCompression = #{AVVideoAverageBitRateKey: #(pixelNumber * 11.4)};
NSDictionary* videoSettings = #{AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: [NSNumber numberWithInt:_viewSize.width*_scale],
AVVideoHeightKey: [NSNumber numberWithInt:_viewSize.height*_scale],
AVVideoCompressionPropertiesKey: videoCompression};
_videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSParameterAssert(_videoWriterInput);
_videoWriterInput.expectsMediaDataInRealTime = YES;
_videoWriterInput.transform = [self videoTransformForDeviceOrientation];
_avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoWriterInput sourcePixelBufferAttributes:nil];
[_videoWriter addInput:_videoWriterInput];
[_videoWriter startWriting];
[_videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];
}
- (CGAffineTransform)videoTransformForDeviceOrientation
{
CGAffineTransform videoTransform;
switch ([UIDevice currentDevice].orientation) {
case UIDeviceOrientationLandscapeLeft:
videoTransform = CGAffineTransformMakeRotation(-M_PI_2);
break;
case UIDeviceOrientationLandscapeRight:
videoTransform = CGAffineTransformMakeRotation(M_PI_2);
break;
case UIDeviceOrientationPortraitUpsideDown:
videoTransform = CGAffineTransformMakeRotation(M_PI);
break;
default:
videoTransform = CGAffineTransformIdentity;
}
return videoTransform;
}
- (NSURL*)tempFileURL
{
NSString *outputPath = [NSHomeDirectory() stringByAppendingPathComponent:#"tmp/screenCapture.mp4"];
[self removeTempFilePath:outputPath];
return [NSURL fileURLWithPath:outputPath];
}
- (void)removeTempFilePath:(NSString*)filePath
{
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:filePath]) {
NSError* error;
if ([fileManager removeItemAtPath:filePath error:&error] == NO) {
NSLog(#"Could not delete old recording:%#", [error localizedDescription]);
}
}
}
- (void)completeRecordingSession:(VideoCompletionBlock)completionBlock;
{
dispatch_async(_render_queue, ^{
dispatch_sync(_append_pixelBuffer_queue, ^{
[_videoWriterInput markAsFinished];
[_videoWriter finishWritingWithCompletionHandler:^{
void (^completion)(void) = ^() {
[self cleanup];
dispatch_async(dispatch_get_main_queue(), ^{
if (completionBlock) completionBlock();
});
};
if (self.videoURL) {
completion();
} else {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:_videoWriter.outputURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (error) {
NSLog(#"Error copying video to camera roll:%#", [error localizedDescription]);
} else {
[self removeTempFilePath:_videoWriter.outputURL.path];
completion();
}
}];
}
}];
});
});
}
- (void)cleanup
{
self.avAdaptor = nil;
self.videoWriterInput = nil;
self.videoWriter = nil;
self.firstTimeStamp = 0;
self.outputBufferPoolAuxAttributes = nil;
CGColorSpaceRelease(_rgbColorSpace);
CVPixelBufferPoolRelease(_outputBufferPool);
}
- (void)writeVideoFrame
{
// throttle the number of frames to prevent meltdown
// technique gleaned from Brad Larson's answer here: http://stackoverflow.com/a/5956119
if (dispatch_semaphore_wait(_frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0) {
return;
}
dispatch_async(_render_queue, ^{
if (![_videoWriterInput isReadyForMoreMediaData]) return;
if (!self.firstTimeStamp) {
self.firstTimeStamp = _displayLink.timestamp;
}
CFTimeInterval elapsed = (_displayLink.timestamp - self.firstTimeStamp);
CMTime time = CMTimeMakeWithSeconds(elapsed, 1000);
CVPixelBufferRef pixelBuffer = NULL;
CGContextRef bitmapContext = [self createPixelBufferAndBitmapContext:&pixelBuffer];
if (self.delegate) {
[self.delegate writeBackgroundFrameInContext:&bitmapContext];
}
// draw each window into the context (other windows include UIKeyboard, UIAlert)
// FIX: UIKeyboard is currently only rendered correctly in portrait orientation
dispatch_sync(dispatch_get_main_queue(), ^{
UIGraphicsPushContext(bitmapContext); {
for (UIWindow *window in [[UIApplication sharedApplication] windows]) {
[window drawViewHierarchyInRect:CGRectMake(0, 0, _viewSize.width, _viewSize.height) afterScreenUpdates:NO];
}
} UIGraphicsPopContext();
});
// append pixelBuffer on a async dispatch_queue, the next frame is rendered whilst this one appends
// must not overwhelm the queue with pixelBuffers, therefore:
// check if _append_pixelBuffer_queue is ready
// if it’s not ready, release pixelBuffer and bitmapContext
if (dispatch_semaphore_wait(_pixelAppendSemaphore, DISPATCH_TIME_NOW) == 0) {
dispatch_async(_append_pixelBuffer_queue, ^{
BOOL success = [_avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
if (!success) {
NSLog(#"Warning: Unable to write buffer to video");
}
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
dispatch_semaphore_signal(_pixelAppendSemaphore);
});
} else {
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
}
dispatch_semaphore_signal(_frameRenderingSemaphore);
});
}
- (CGContextRef)createPixelBufferAndBitmapContext:(CVPixelBufferRef *)pixelBuffer
{
CVPixelBufferPoolCreatePixelBuffer(NULL, _outputBufferPool, pixelBuffer);
CVPixelBufferLockBaseAddress(*pixelBuffer, 0);
CGContextRef bitmapContext = NULL;
bitmapContext = CGBitmapContextCreate(CVPixelBufferGetBaseAddress(*pixelBuffer),
CVPixelBufferGetWidth(*pixelBuffer),
CVPixelBufferGetHeight(*pixelBuffer),
8, CVPixelBufferGetBytesPerRow(*pixelBuffer), _rgbColorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst
);
CGContextScaleCTM(bitmapContext, _scale, _scale);
CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, _viewSize.height);
CGContextConcatCTM(bitmapContext, flipVertical);
return bitmapContext;
}
#end
#Gobinda this code works in my case. You need to specify frame if you want to record portion of window. In init method viewSize is defined as window size. So you need to change viewSize as your video frame.
Related
i want to record a window's snapshot to a mp4 file in a macOS app.
start the creater with function [aMp4CreaterEntity startRecordWithSize:CGSizeMake(2320, 1080) pixelType:kCVPixelFormatType_32ARGB]
run a timer 15 times per second, snapshotting the window, use the function CGWindowListCreateImage to get CGImageRef which width = 2320 and height = 1080,
call the creater function [aMp4CreaterEntity recordImage:theCGImageRef timeStamp:[[NSDate date] timeIntervalSince1970]] to send CGImageRef to my aMp4CreaterEntity
call [aMp4CreaterEntity stopRecord] function, and get the mp4 file at
everything runs alright, except the mp4 file's content contains only half of what was sent earlier, and the laster content was lost. and every CVPixelBufferRef was append by AVAssetWriterInputPixelBufferAdaptor when i debug.
At first I think the CMTime setting is wrong, but after modify it to half or double, the error is continue.
I'm new to audio and video, can someone help me solve this problem or explain it in detail?
BTW: i record the audio in an other file at the same time, but it has the same problem - earlier half content. and i can read the swift code direct.
this is my recorder sample code with Objective-C.
#import "Mp4Creater.h"
#import <AVFoundation/AVFoundation.h>
#interface Mp4Creater()
#property (nonatomic, strong) AVAssetWriter *videoWriter;
#property (nonatomic, strong) AVAssetWriterInput *videoInput;
#property (nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *videoAdaptor;
#property (nonatomic, strong) NSString *videoOutputPath;
#property (nonatomic, strong) NSDictionary *videoSettings;
#property (nonatomic, assign) NSTimeInterval startTs;
#property (nonatomic, assign) NSTimeInterval latestTs;
#property (nonatomic, strong) NSOperationQueue *opQueue;
#property (nonatomic, assign) BOOL isRecording;
#property (nonatomic, assign) NSUInteger frameRate; // 15
#property (nonatomic, assign) NSUInteger iFrameInterval; // 3s
#end
#implementation Mp4Creater
- (instancetype)init
{
self = [super init];
if (self) {
_videoWriter = nil;
_videoInput = nil;
_videoAdaptor = nil;
_videoOutputPath = nil;
_videoSettings = nil;
_startTs = -1;
_latestTs = -1;
_isRecording = NO;
_frameRate = 15;
_iFrameInterval = 3;
}
return self;
}
- (void)dealloc
{
[_opQueue cancelAllOperations];
}
- (BOOL)addVideoInputWithSize:(CGSize)size pixelType:(UInt32)pixelType {
NSString *codecKey = AVVideoCodecTypeH264;
_videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:codecKey, AVVideoCodecKey,
[NSNumber numberWithInt: size.width], AVVideoWidthKey,
[NSNumber numberWithInt: size.height], AVVideoHeightKey,
[NSDictionary dictionaryWithObjectsAndKeys:AVVideoYCbCrMatrix_ITU_R_709_2, AVVideoYCbCrMatrixKey, AVVideoTransferFunction_ITU_R_709_2, AVVideoTransferFunctionKey, AVVideoColorPrimaries_ITU_R_709_2, AVVideoColorPrimariesKey, nil], AVVideoColorPropertiesKey,
[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt: size.width * size.height * 2], AVVideoAverageBitRateKey,
[NSNumber numberWithInt: (int)(_frameRate*_iFrameInterval)], AVVideoMaxKeyFrameIntervalKey,
[NSNumber numberWithInt: (int)(_iFrameInterval)], AVVideoMaxKeyFrameIntervalDurationKey,
AVVideoProfileLevelH264BaselineAutoLevel, AVVideoProfileLevelKey,
nil], AVVideoCompressionPropertiesKey,
nil];
AVAssetWriterInput *videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:_videoSettings];
videoInput.expectsMediaDataInRealTime = YES;
if ([_videoWriter canAddInput:videoInput]) {
[_videoWriter addInput:videoInput];
_videoInput = videoInput;
}
else {
return NO;
}
NSDictionary *sourcePixelBufferAttributes = #{(NSString *)kCVPixelBufferPixelFormatTypeKey : #(pixelType)};
_videoAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:_videoInput sourcePixelBufferAttributes:sourcePixelBufferAttributes];
return YES;
}
- (BOOL)startRecordWithSize:(CGSize)size pixelType:(UInt32)pixelType {
if (self.isRecording) {
return YES;
}
self.startTs = -1;
NSString *outputFile;
NSString *guid = [[NSUUID new] UUIDString];
NSString *fileName = [NSString stringWithFormat:#"video_%#.mp4", guid];
outputFile = [NSTemporaryDirectory() stringByAppendingPathComponent:fileName];
self.videoOutputPath = outputFile;
NSError *error = nil;
//----initialize compression engine
self.videoWriter = [[AVAssetWriter alloc]initWithURL:[NSURL fileURLWithPath:self.videoOutputPath]
fileType:AVFileTypeMPEG4
error:&error];
self.videoWriter.shouldOptimizeForNetworkUse = YES;
if(error) {
return NO;
}
if (self.videoWriter == nil) {
return NO;
}
if (![self addVideoInputWithSize:size pixelType:pixelType ]) {
[self stopRecord];
return NO;
}
self->_isRecording = YES;
[self.videoWriter startWriting];
[self.videoWriter startSessionAtSourceTime:kCMTimeZero];
_opQueue = [[NSOperationQueue alloc] init];
_opQueue.maxConcurrentOperationCount = 1;
return YES;
}
- (void)stopRecord {
if (!self.isRecording) {
return;
}
[_opQueue cancelAllOperations];
NSOperationQueue *oldQueue = _opQueue;
_opQueue = nil;
[oldQueue waitUntilAllOperationsAreFinished];
if (self.videoInput != nil) {
[self.videoInput markAsFinished];
}
self.videoInput = nil;
self.videoAdaptor = nil;
if (self.videoWriter != nil) {
__block BOOL success = NO;
if (self.videoWriter.status == AVAssetWriterStatusWriting) {
success = YES;
}
[self.videoWriter finishWritingWithCompletionHandler:^{
if (self.videoWriter.status == AVAssetWriterStatusCompleted) {
if (success) {
return;
}
}
}];
}
self->_isRecording = NO;
}
- (void)recordImage:(CGImageRef)image timeStamp:(NSTimeInterval)ts {
CGImageRef retainImage = CGImageRetain(image);
__weak __typeof__(self) weak_self = self;
[_opQueue addOperationWithBlock:^{
__typeof__(self) self = weak_self;
if (!self.isRecording) {
return;
}
if (self.startTs < 0) {
self.startTs = ts;
}
self.latestTs = ts;
CMTime cmTime = CMTimeMake((ts - self.startTs) * 1000, 1000);
if (self.videoWriter != nil) {
if (self.videoWriter.status == AVAssetWriterStatusWriting) {
if (self.videoInput != nil && self.videoInput.isReadyForMoreMediaData) {
CVPixelBufferRef buffer = [self CVPixelBufferRefFromCGImage:retainImage];
if (buffer != NULL) {
[self.videoAdaptor appendPixelBuffer:buffer withPresentationTime:cmTime];
CVPixelBufferRelease(buffer);
}
}
}
}
CGImageRelease(retainImage);
}];
}
- (CVPixelBufferRef)CVPixelBufferRefFromCGImage:(CGImageRef)image {
size_t pixelsWide = CGImageGetWidth(image);
size_t pixelsHigh = CGImageGetHeight(image);
NSInteger bitmapBytesPerRow = (pixelsWide * 4);
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, pixelsWide, pixelsHigh, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef)options, &pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
if (status == kCVReturnSuccess) {
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, pixelsWide, pixelsHigh, 8, bitmapBytesPerRow, rgbColorSpace, (CGBitmapInfo)kCGImageAlphaPremultipliedFirst);
NSParameterAssert(context);
CGContextConcatCTM(context, CGAffineTransformIdentity);
CGContextDrawImage(context, CGRectMake(0, 0, pixelsWide, pixelsHigh), image);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
return pxbuffer;
}
else {
return nil;
}
}
#end
I am trying to speed up the reading of barcodes in my app, the app works fine but is a tad slow at reading barcodes.
How do I improve the speed of reading the barcodes?
Here is the code I have so far.
#import "ScanViewController.h"
#import "Utils.h"
#import <AVFoundation/AVFoundation.h>
#import <AudioToolbox/AudioToolbox.h>
#interface ScanViewController ()<AVCaptureMetadataOutputObjectsDelegate>
#property (nonatomic, readwrite) AVCaptureSession *captureSession;
#property (nonatomic, readwrite) AVCaptureVideoPreviewLayer *videoPreviewLayer;
#property (nonatomic, readwrite) UIView *qrCodeFrameView;
#property (nonatomic, readwrite) UILabel *qrCodeTextView;
#property (nonatomic, readwrite) NSArray *supportedCodeTypes;
#property (nonatomic, readwrite) long long lastScanTime;
#property (nonatomic, readwrite) NSString *lastScanCode;
#property (nonatomic, readwrite) AVAudioPlayer *audioPlayer;
#end
#implementation ScanViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
self.captureSession = [AVCaptureSession new];
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
self.supportedCodeTypes = #[AVMetadataObjectTypeUPCECode,
AVMetadataObjectTypeEAN13Code,
AVMetadataObjectTypeEAN8Code];
// AVMetadataObjectTypeCode39Code,
// AVMetadataObjectTypeCode39Mod43Code,
// AVMetadataObjectTypeCode93Code,
// AVMetadataObjectTypeCode128Code,
// AVMetadataObjectTypeAztecCode,
// AVMetadataObjectTypePDF417Code,
// AVMetadataObjectTypeITF14Code,
// AVMetadataObjectTypeDataMatrixCode,
// AVMetadataObjectTypeInterleaved2of5Code,
// AVMetadataObjectTypeQRCode];
AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVCaptureSessionPre];
if(captureDevice == nil) {
NSLog(#"Failed to get the camera device");
return;
}
#try {
// Get an instance of the AVCaptureDeviceInput class using the previous device object.
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:nil];
// Set the input device on the capture session.
[self.captureSession addInput:input];
// Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
AVCaptureMetadataOutput *captureMetadataOutput = [[AVCaptureMetadataOutput alloc] init];
[self.captureSession addOutput:captureMetadataOutput];
// Set delegate and use the default dispatch queue to execute the call back
[captureMetadataOutput setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
captureMetadataOutput.metadataObjectTypes = self.supportedCodeTypes;
// captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
} #catch (NSException *error) {
// If any error occurs, simply print it out and don't continue any more.
NSLog(#"%#", error);
return;
}
// Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
self.videoPreviewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
self.videoPreviewLayer.videoGravity = kCAGravityResizeAspectFill;
self.videoPreviewLayer.frame = self.view.layer.bounds;
[self.view.layer addSublayer:self.videoPreviewLayer];
// Start video capture.
[self.captureSession startRunning];
// Move the result view and loading view to the front
[self.view bringSubviewToFront:self.resultView];
[self.view bringSubviewToFront:self.loadingView];
// Initialize QR Code Frame to highlight the QR code
self.qrCodeFrameView = [[UIView alloc] init];
if (self.qrCodeFrameView) {
self.qrCodeFrameView.layer.borderColor = UIColor.greenColor.CGColor;
self.qrCodeFrameView.layer.borderWidth = 2;
[self.view addSubview:self.qrCodeFrameView];
[self.view bringSubviewToFront:self.qrCodeFrameView];
}
self.qrCodeTextView = [[UILabel alloc] init];
if (self.qrCodeTextView) {
[self.qrCodeTextView setTextColor:UIColor.greenColor];
[self.qrCodeTextView setFont:[UIFont systemFontOfSize:20]];
[self.qrCodeFrameView addSubview:self.qrCodeTextView];
}
[self rotateLoadingImage];
[self setResultType:RESULT_TYPE_WORKING codeContent:#"Ready" price:0.00];
[self.loadingView setHidden:YES];
}
-(void)viewWillDisappear:(BOOL)animated {
if (self.audioPlayer != nil) {
[self.audioPlayer stop];
self.audioPlayer = nil;
}
[super viewWillDisappear:animated];
}
/*
#pragma mark - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
// Get the new view controller using [segue destinationViewController].
// Pass the selected object to the new view controller.
}
*/
-(void) updatePreviewLayer:(AVCaptureConnection*)layer orientation:(AVCaptureVideoOrientation)orientation {
layer.videoOrientation = orientation;
self.videoPreviewLayer.frame = self.view.bounds;
}
-(void)viewDidLayoutSubviews {
[super viewDidLayoutSubviews];
if(self.videoPreviewLayer.connection != nil) {
UIDevice *currentDevice = [UIDevice currentDevice];
UIDeviceOrientation orientation = [currentDevice orientation];
AVCaptureConnection *previewLayerConnection = self.videoPreviewLayer.connection;
if(previewLayerConnection.isVideoOrientationSupported) {
switch (orientation) {
case UIDeviceOrientationPortrait:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationPortrait];
break;
case UIDeviceOrientationLandscapeRight:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationLandscapeLeft];
break;
case UIDeviceOrientationLandscapeLeft:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationLandscapeRight];
break;
case UIDeviceOrientationPortraitUpsideDown:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationPortraitUpsideDown];
break;
default:
[self updatePreviewLayer:previewLayerConnection orientation:AVCaptureVideoOrientationPortrait];
break;
}
}
}
}
-(void)captureOutput:(AVCaptureOutput *)output didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects fromConnection:(AVCaptureConnection *)connection {
// Check if the metadataObjects array is not nil and it contains at least one object.
if (metadataObjects.count == 0) {
self.qrCodeFrameView.frame = CGRectZero;
return;
}
// Get the metadata object.
AVMetadataMachineReadableCodeObject *metadataObj = (AVMetadataMachineReadableCodeObject*)(metadataObjects[0]);
if ([self.supportedCodeTypes containsObject:metadataObj.type]) {
// If the found metadata is equal to the QR code metadata (or barcode) then update the status label's text and set the bounds
AVMetadataObject *barCodeObject = [self.videoPreviewLayer transformedMetadataObjectForMetadataObject:metadataObj];
NSString *code = metadataObj.stringValue;
if (code != nil) {
// check upc a code
if ([self checkUpcACode:metadataObj.type code:code] == NO) {
self.qrCodeTextView.text = #"";
return;
}
int i=0;
for (i=0; i<code.length; i++) {
char ch = [code characterAtIndex:i];
if (ch != '0') break;
}
if (i>0) i--;
code = [code substringFromIndex:i];
self.qrCodeFrameView.frame = barCodeObject.bounds;
[self.qrCodeTextView setText:code];
self.qrCodeTextView.frame = CGRectMake(0, self.qrCodeFrameView.frame.size.height-20, self.qrCodeFrameView.frame.size.width, 20);
NSLog(#"%#", code);
[self handleBarcode:code];
} else {
self.qrCodeTextView.text = #"";
}
}
}
-(BOOL)checkUpcACode:(AVMetadataObjectType)type code:(NSString*)code {
if (type == AVMetadataObjectTypeEAN13Code) {
if ([code hasPrefix:#"0"] && [code length] > 0) {
return YES;
}
}
return NO;
}
#end```
Solution was from PBK on Apple Forums
//change [self handleBarcode:code];
// to
dispatch_async(dispatch_get_main_queue(), ^{
[self handleBarcode:code];
});
I am trying to screen capture a view that has APPLCameraViewContoller in it. For some reason when it gets saved to camera roll no mater what I do what the camera is looking at does not get captured only the UIView with labels does so it is a black background with the labels. I want to have the labels on top of the Camera View. Any suggestions or examples on how to go about this. Here is the screen capture .m which I am assuming is the reason why this is happening.
#interface ASScreenRecorder()
#property (strong, nonatomic) AVAssetWriter *videoWriter;
#property (strong, nonatomic) AVAssetWriterInput *videoWriterInput;
#property (strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor *avAdaptor;
#property (strong, nonatomic) CADisplayLink *displayLink;
#property (strong, nonatomic) NSDictionary *outputBufferPoolAuxAttributes;
#property (nonatomic) CFTimeInterval firstTimeStamp;
#property (nonatomic) BOOL isRecording;
#end
#implementation ASScreenRecorder
{
dispatch_queue_t _render_queue;
dispatch_queue_t _append_pixelBuffer_queue;
dispatch_semaphore_t _frameRenderingSemaphore;
dispatch_semaphore_t _pixelAppendSemaphore;
CGSize _viewSize;
CGFloat _scale;
CGColorSpaceRef _rgbColorSpace;
CVPixelBufferPoolRef _outputBufferPool;
}
#pragma mark - initializers
+ (instancetype)sharedInstance {
static dispatch_once_t once;
static ASScreenRecorder *sharedInstance;
dispatch_once(&once, ^{
sharedInstance = [[self alloc] init];
});
return sharedInstance;
}
- (instancetype)init
{
self = [super init];
if (self) {
_viewSize = [UIApplication sharedApplication].delegate.window.bounds.size;
_scale = [UIScreen mainScreen].scale;
// record half size resolution for retina iPads
if ((UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) && _scale > 1) {
_scale = 1.0;
}
_isRecording = NO;
_append_pixelBuffer_queue = dispatch_queue_create("ASScreenRecorder.append_queue", DISPATCH_QUEUE_SERIAL);
_render_queue = dispatch_queue_create("ASScreenRecorder.render_queue", DISPATCH_QUEUE_SERIAL);
dispatch_set_target_queue(_render_queue, dispatch_get_global_queue( DISPATCH_QUEUE_PRIORITY_HIGH, 0));
_frameRenderingSemaphore = dispatch_semaphore_create(1);
_pixelAppendSemaphore = dispatch_semaphore_create(1);
}
return self;
}
#pragma mark - public
- (void)setVideoURL:(NSURL *)videoURL
{
NSAssert(!_isRecording, #"videoURL can not be changed whilst recording is in progress");
_videoURL = videoURL;
}
- (BOOL)startRecording
{
if (!_isRecording) {
[self setUpWriter];
_isRecording = (_videoWriter.status == AVAssetWriterStatusWriting);
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:#selector(writeVideoFrame)];
[_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
}
return _isRecording;
}
- (void)stopRecordingWithCompletion:(VideoCompletionBlock)completionBlock;
{
if (_isRecording) {
_isRecording = NO;
[_displayLink removeFromRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
[self completeRecordingSession:completionBlock];
}
}
#pragma mark - private
-(void)setUpWriter
{
_rgbColorSpace = CGColorSpaceCreateDeviceRGB();
NSDictionary *bufferAttributes = #{(id)kCVPixelBufferPixelFormatTypeKey : #(kCVPixelFormatType_32BGRA),
(id)kCVPixelBufferCGBitmapContextCompatibilityKey : #YES,
(id)kCVPixelBufferWidthKey : #(_viewSize.width * _scale),
(id)kCVPixelBufferHeightKey : #(_viewSize.height * _scale),
(id)kCVPixelBufferBytesPerRowAlignmentKey : #(_viewSize.width * _scale * 4)
};
_outputBufferPool = NULL;
CVPixelBufferPoolCreate(NULL, NULL, (__bridge CFDictionaryRef)(bufferAttributes), &_outputBufferPool);
NSError* error = nil;
_videoWriter = [[AVAssetWriter alloc] initWithURL:self.videoURL ?: [self tempFileURL]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(_videoWriter);
NSInteger pixelNumber = _viewSize.width * _viewSize.height * _scale;
NSDictionary* videoCompression = #{AVVideoAverageBitRateKey: #(pixelNumber * 11.4)};
NSDictionary* videoSettings = #{AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: [NSNumber numberWithInt:_viewSize.width*_scale],
AVVideoHeightKey: [NSNumber numberWithInt:_viewSize.height*_scale],
AVVideoCompressionPropertiesKey: videoCompression};
_videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSParameterAssert(_videoWriterInput);
_videoWriterInput.expectsMediaDataInRealTime = YES;
_videoWriterInput.transform = [self videoTransformForDeviceOrientation];
_avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoWriterInput sourcePixelBufferAttributes:nil];
[_videoWriter addInput:_videoWriterInput];
[_videoWriter startWriting];
[_videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];
}
- (CGAffineTransform)videoTransformForDeviceOrientation
{
CGAffineTransform videoTransform;
switch ([UIDevice currentDevice].orientation) {
case UIDeviceOrientationLandscapeLeft:
videoTransform = CGAffineTransformMakeRotation(-M_PI_2);
break;
case UIDeviceOrientationLandscapeRight:
videoTransform = CGAffineTransformMakeRotation(M_PI_2);
break;
case UIDeviceOrientationPortraitUpsideDown:
videoTransform = CGAffineTransformMakeRotation(M_PI);
break;
default:
videoTransform = CGAffineTransformIdentity;
}
return videoTransform;
}
- (NSURL*)tempFileURL
{
NSString *outputPath = [NSHomeDirectory() stringByAppendingPathComponent:#"tmp/screenCapture.mp4"];
[self removeTempFilePath:outputPath];
return [NSURL fileURLWithPath:outputPath];
}
- (void)removeTempFilePath:(NSString*)filePath
{
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:filePath]) {
NSError* error;
if ([fileManager removeItemAtPath:filePath error:&error] == NO) {
NSLog(#"Could not delete old recording:%#", [error localizedDescription]);
}
}
}
- (void)completeRecordingSession:(VideoCompletionBlock)completionBlock;
{
dispatch_async(_render_queue, ^{
dispatch_sync(_append_pixelBuffer_queue, ^{
[_videoWriterInput markAsFinished];
[_videoWriter finishWritingWithCompletionHandler:^{
void (^completion)(void) = ^() {
[self cleanup];
dispatch_async(dispatch_get_main_queue(), ^{
if (completionBlock) completionBlock();
});
};
if (self.videoURL) {
completion();
} else {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:_videoWriter.outputURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (error) {
NSLog(#"Error copying video to camera roll:%#", [error localizedDescription]);
} else {
[self removeTempFilePath:_videoWriter.outputURL.path];
completion();
}
}];
}
}];
});
});
}
- (void)cleanup
{
self.avAdaptor = nil;
self.videoWriterInput = nil;
self.videoWriter = nil;
self.firstTimeStamp = 0;
self.outputBufferPoolAuxAttributes = nil;
CGColorSpaceRelease(_rgbColorSpace);
CVPixelBufferPoolRelease(_outputBufferPool);
}
- (void)writeVideoFrame
{
// throttle the number of frames to prevent meltdown
// technique gleaned from Brad Larson's answer here: http://stackoverflow.com/a/5956119
if (dispatch_semaphore_wait(_frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0) {
return;
}
dispatch_async(_render_queue, ^{
if (![_videoWriterInput isReadyForMoreMediaData]) return;
if (!self.firstTimeStamp) {
self.firstTimeStamp = _displayLink.timestamp;
}
CFTimeInterval elapsed = (_displayLink.timestamp - self.firstTimeStamp);
CMTime time = CMTimeMakeWithSeconds(elapsed, 1000);
CVPixelBufferRef pixelBuffer = NULL;
CGContextRef bitmapContext = [self createPixelBufferAndBitmapContext:&pixelBuffer];
if (self.delegate) {
[self.delegate writeBackgroundFrameInContext:&bitmapContext];
}
// draw each window into the context (other windows include UIKeyboard, UIAlert)
// FIX: UIKeyboard is currently only rendered correctly in portrait orientation
dispatch_sync(dispatch_get_main_queue(), ^{
UIGraphicsPushContext(bitmapContext); {
for (UIWindow *window in [[UIApplication sharedApplication] windows]) {
[window drawViewHierarchyInRect:CGRectMake(0, 0, _viewSize.width, _viewSize.height) afterScreenUpdates:NO];
}
} UIGraphicsPopContext();
});
// append pixelBuffer on a async dispatch_queue, the next frame is rendered whilst this one appends
// must not overwhelm the queue with pixelBuffers, therefore:
// check if _append_pixelBuffer_queue is ready
// if it’s not ready, release pixelBuffer and bitmapContext
if (dispatch_semaphore_wait(_pixelAppendSemaphore, DISPATCH_TIME_NOW) == 0) {
dispatch_async(_append_pixelBuffer_queue, ^{
BOOL success = [_avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
if (!success) {
NSLog(#"Warning: Unable to write buffer to video");
}
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
dispatch_semaphore_signal(_pixelAppendSemaphore);
});
} else {
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
}
dispatch_semaphore_signal(_frameRenderingSemaphore);
});
}
- (CGContextRef)createPixelBufferAndBitmapContext:(CVPixelBufferRef *)pixelBuffer
{
CVPixelBufferPoolCreatePixelBuffer(NULL, _outputBufferPool, pixelBuffer);
CVPixelBufferLockBaseAddress(*pixelBuffer, 0);
CGContextRef bitmapContext = NULL;
bitmapContext = CGBitmapContextCreate(CVPixelBufferGetBaseAddress(*pixelBuffer),
CVPixelBufferGetWidth(*pixelBuffer),
CVPixelBufferGetHeight(*pixelBuffer),
8, CVPixelBufferGetBytesPerRow(*pixelBuffer), _rgbColorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst
);
CGContextScaleCTM(bitmapContext, _scale, _scale);
CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, _viewSize.height);
CGContextConcatCTM(bitmapContext, flipVertical);
return bitmapContext;
}
I would be more than happy to provide my full source code to anyone who could tackle something like this because posting multiple .m's on this would take up a lot of space.
I'm not sure if it's similar for you. But I've been using ASScreenRecorder to record an ARSceneView, and that has been going fine. Have a look at the following link, you can provide it a view to render and it records and provides an output URL link. You might have to make a small edit to the class to get the StopRecording Function's completion handler to work.
https://github.com/alskipp/ASScreenRecorder
I am trying to create square video and for that I am using 640*480 camera view but not able to crop video from top? I have tried many thing but no success. so please suggest how to do it, What transform is helpful?
Usage:
Create a property (or any other variable to hold the VideoTranscoder)
self.videoTranscoder = [SCVideoTranscoder new];
self.videoTranscoder.asset = PUT_UR_AVASSET_HERE;
self.videoTranscoder.outputURL = PUT_A_FILE_URL_HERE;
__weak typeof(self) weakSelf = self;
self.videoTranscoder.completionBlock = ^(BOOL success){
//PUT YOUR CODE HERE WHEN THE TRANSCODING IS DONE...
};
[self.videoTranscoder start];
You can cancel the transcoding process, if you just call [transcoder cancel].
SCVideoTranscoder.h file
#import <Foundation/Foundation.h>
#interface SCVideoTranscoder : NSObject
#property (nonatomic, strong) AVAsset *asset;
#property (nonatomic, assign) BOOL cancelled;
#property (nonatomic, strong) NSURL *outputURL;
#property (nonatomic, strong) void (^completionBlock)(BOOL success);
- (void)start;
- (void)cancel;
#end
SCVideoTranscoder.m file
In the file you can find that I setup the video to be 480x480, you can ofc. change this to the value you like!
#import "SCVideoTranscoder.h"
#interface SCVideoTranscoder()
#property (nonatomic, strong) dispatch_queue_t mainSerializationQueue;
#property (nonatomic, strong) dispatch_queue_t rwAudioSerializationQueue;
#property (nonatomic, strong) dispatch_queue_t rwVideoSerializationQueue;
#property (nonatomic, strong) dispatch_group_t dispatchGroup;
#property (nonatomic, strong) AVAssetReader* assetReader;
#property (nonatomic, strong) AVAssetWriter* assetWriter;
#property (nonatomic, strong) AVAssetReaderTrackOutput *assetReaderAudioOutput;
#property (nonatomic, strong) AVAssetReaderTrackOutput *assetReaderVideoOutput;
#property (nonatomic, strong) AVAssetWriterInput *assetWriterAudioInput;
#property (nonatomic, strong) AVAssetWriterInput *assetWriterVideoInput;
#property (nonatomic, assign) BOOL audioFinished;
#property (nonatomic, assign) BOOL videoFinished;
#end
#implementation SCVideoTranscoder
- (void)start
{
NSString *serializationQueueDescription = [NSString stringWithFormat:#"%# serialization queue", self];
// Create the main serialization queue.
self.mainSerializationQueue = dispatch_queue_create([serializationQueueDescription UTF8String], NULL);
NSString *rwAudioSerializationQueueDescription = [NSString stringWithFormat:#"%# rw audio serialization queue", self];
// Create the serialization queue to use for reading and writing the audio data.
self.rwAudioSerializationQueue = dispatch_queue_create([rwAudioSerializationQueueDescription UTF8String], NULL);
NSString *rwVideoSerializationQueueDescription = [NSString stringWithFormat:#"%# rw video serialization queue", self];
// Create the serialization queue to use for reading and writing the video data.
self.rwVideoSerializationQueue = dispatch_queue_create([rwVideoSerializationQueueDescription UTF8String], NULL);
self.cancelled = NO;
// Asynchronously load the tracks of the asset you want to read.
[self.asset loadValuesAsynchronouslyForKeys:#[#"tracks"] completionHandler:^{
// Once the tracks have finished loading, dispatch the work to the main serialization queue.
dispatch_async(self.mainSerializationQueue, ^{
// Due to asynchronous nature, check to see if user has already cancelled.
if (self.cancelled)
return;
BOOL success = YES;
NSError *localError = nil;
// Check for success of loading the assets tracks.
success = ([self.asset statusOfValueForKey:#"tracks" error:&localError] == AVKeyValueStatusLoaded);
if (success)
{
// If the tracks loaded successfully, make sure that no file exists at the output path for the asset writer.
NSFileManager *fm = [NSFileManager defaultManager];
NSString *localOutputPath = [self.outputURL path];
if ([fm fileExistsAtPath:localOutputPath])
success = [fm removeItemAtPath:localOutputPath error:&localError];
}
if (success)
{
success = [self setupAssetReaderAndAssetWriter:&localError];
if (success)
{
[self startAssetReaderAndWriter:&localError];
}else{
[self readingAndWritingDidFinishSuccessfully:success withError:localError];
}
}
});
}];
}
- (BOOL)setupAssetReaderAndAssetWriter:(NSError **)outError
{
// Create and initialize the asset reader.
self.assetReader = [[AVAssetReader alloc] initWithAsset:self.asset error:outError];
BOOL success = (self.assetReader != nil);
if (success)
{
// If the asset reader was successfully initialized, do the same for the asset writer.
self.assetWriter = [[AVAssetWriter alloc] initWithURL:self.outputURL fileType:AVFileTypeMPEG4 error:outError];
success = (self.assetWriter != nil);
}
if (success)
{
// If the reader and writer were successfully initialized, grab the audio and video asset tracks that will be used.
AVAssetTrack *assetAudioTrack = nil, *assetVideoTrack = nil;
NSArray *audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];
if ([audioTracks count] > 0)
assetAudioTrack = [audioTracks objectAtIndex:0];
NSArray *videoTracks = [self.asset tracksWithMediaType:AVMediaTypeVideo];
if ([videoTracks count] > 0)
assetVideoTrack = [videoTracks objectAtIndex:0];
if (assetAudioTrack)
{
// If there is an audio track to read, set the decompression settings to Linear PCM and create the asset reader output.
NSDictionary *decompressionAudioSettings = #{ AVFormatIDKey : [NSNumber numberWithUnsignedInt:kAudioFormatLinearPCM] };
self.assetReaderAudioOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:assetAudioTrack outputSettings:decompressionAudioSettings];
if (DEBUG) {
self.assetReader.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMake(20, 1));
}else{
self.assetReader.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMake(10, 1));
}
[self.assetReader addOutput:self.assetReaderAudioOutput];
// Then, set the compression settings to 128kbps AAC and create the asset writer input.
AudioChannelLayout stereoChannelLayout = {
.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo,
.mChannelBitmap = 0,
.mNumberChannelDescriptions = 0
};
NSData *channelLayoutAsData = [NSData dataWithBytes:&stereoChannelLayout length:offsetof(AudioChannelLayout, mChannelDescriptions)];
NSDictionary *compressionAudioSettings = #{
AVFormatIDKey : [NSNumber numberWithUnsignedInt:kAudioFormatMPEG4AAC],
AVEncoderBitRateKey : [NSNumber numberWithInteger:128000],
AVSampleRateKey : [NSNumber numberWithInteger:44100],
AVChannelLayoutKey : channelLayoutAsData,
AVNumberOfChannelsKey : [NSNumber numberWithUnsignedInteger:2]
};
self.assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:[assetAudioTrack mediaType] outputSettings:compressionAudioSettings];
[self.assetWriter addInput:self.assetWriterAudioInput];
}
if (assetVideoTrack)
{
// If there is a video track to read, set the decompression settings for YUV and create the asset reader output.
NSDictionary *decompressionVideoSettings = #{
(id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithUnsignedInt:kCVPixelFormatType_422YpCbCr8],
(id)kCVPixelBufferIOSurfacePropertiesKey : [NSDictionary dictionary]
};
self.assetReaderVideoOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:assetVideoTrack outputSettings:decompressionVideoSettings];
[self.assetReader addOutput:self.assetReaderVideoOutput];
CMFormatDescriptionRef formatDescription = NULL;
// Grab the video format descriptions from the video track and grab the first one if it exists.
NSArray *videoFormatDescriptions = [assetVideoTrack formatDescriptions];
if ([videoFormatDescriptions count] > 0)
formatDescription = (__bridge CMFormatDescriptionRef)[videoFormatDescriptions objectAtIndex:0];
CGSize trackDimensions = {
.width = 0.0,
.height = 0.0,
};
CGAffineTransform videoTransform = [assetVideoTrack preferredTransform];
// If the video track had a format description, grab the track dimensions from there. Otherwise, grab them direcly from the track itself.
if (formatDescription)
trackDimensions = CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, false, false);
else
trackDimensions = [assetVideoTrack naturalSize];
int width = 480;
int height = 480;
int bitrate = 1000000;
NSDictionary *compressionSettings = #{
AVVideoAverageBitRateKey: [NSNumber numberWithInt:bitrate],AVVideoMaxKeyFrameIntervalKey: #(150),
AVVideoProfileLevelKey: AVVideoProfileLevelH264BaselineAutoLevel,
AVVideoAllowFrameReorderingKey: #NO,
AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCAVLC,
AVVideoExpectedSourceFrameRateKey: #(30)
};
if ([SCDeviceModel speedgrade]==0) {
width = 320;
height = 320;
compressionSettings = #{
AVVideoAverageBitRateKey: [NSNumber numberWithInt:bitrate],
AVVideoProfileLevelKey: AVVideoProfileLevelH264BaselineAutoLevel,
AVVideoAllowFrameReorderingKey: #NO};
}
NSDictionary *videoSettings =
#{
AVVideoCodecKey: AVVideoCodecH264,
AVVideoScalingModeKey: AVVideoScalingModeResizeAspectFill,
AVVideoWidthKey: [NSNumber numberWithInt:width],
AVVideoHeightKey: [NSNumber numberWithInt:height],
AVVideoCompressionPropertiesKey: compressionSettings};
// Create the asset writer input and add it to the asset writer.
self.assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:[assetVideoTrack mediaType] outputSettings:videoSettings];
self.assetWriterVideoInput.transform = videoTransform;
[self.assetWriter addInput:self.assetWriterVideoInput];
}
}
return success;
}
- (void)startAssetReaderAndWriter:(NSError **)outError
{
BOOL success = YES;
// Attempt to start the asset reader.
success = [self.assetReader startReading];
if (!success)
*outError = [self.assetReader error];
if (success)
{
// If the reader started successfully, attempt to start the asset writer.
success = [self.assetWriter startWriting];
if (!success)
*outError = [self.assetWriter error];
}
if (success)
{
// If the asset reader and writer both started successfully, create the dispatch group where the reencoding will take place and start a sample-writing session.
self.dispatchGroup = dispatch_group_create();
[self.assetWriter startSessionAtSourceTime:kCMTimeZero];
self.audioFinished = NO;
self.videoFinished = NO;
if (self.assetWriterAudioInput)
{
// If there is audio to reencode, enter the dispatch group before beginning the work.
dispatch_group_enter(self.dispatchGroup);
// Specify the block to execute when the asset writer is ready for audio media data, and specify the queue to call it on.
[self.assetWriterAudioInput requestMediaDataWhenReadyOnQueue:self.rwAudioSerializationQueue usingBlock:^{
// Because the block is called asynchronously, check to see whether its task is complete.
if (self.audioFinished)
return;
BOOL completedOrFailed = NO;
// If the task isn't complete yet, make sure that the input is actually ready for more media data.
while ([self.assetWriterAudioInput isReadyForMoreMediaData] && !completedOrFailed)
{
// Get the next audio sample buffer, and append it to the output file.
CMSampleBufferRef sampleBuffer = [self.assetReaderAudioOutput copyNextSampleBuffer];
if (sampleBuffer != NULL)
{
BOOL success = [self.assetWriterAudioInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
sampleBuffer = NULL;
completedOrFailed = !success;
}
else
{
completedOrFailed = YES;
}
}
if (completedOrFailed)
{
// Mark the input as finished, but only if we haven't already done so, and then leave the dispatch group (since the audio work has finished).
BOOL oldFinished = self.audioFinished;
self.audioFinished = YES;
if (oldFinished == NO)
{
[self.assetWriterAudioInput markAsFinished];
}
dispatch_group_leave(self.dispatchGroup);
}
}];
}
if (self.assetWriterVideoInput)
{
// If we had video to reencode, enter the dispatch group before beginning the work.
dispatch_group_enter(self.dispatchGroup);
// Specify the block to execute when the asset writer is ready for video media data, and specify the queue to call it on.
[self.assetWriterVideoInput requestMediaDataWhenReadyOnQueue:self.rwVideoSerializationQueue usingBlock:^{
// Because the block is called asynchronously, check to see whether its task is complete.
if (self.videoFinished)
return;
BOOL completedOrFailed = NO;
// If the task isn't complete yet, make sure that the input is actually ready for more media data.
while ([self.assetWriterVideoInput isReadyForMoreMediaData] && !completedOrFailed)
{
// Get the next video sample buffer, and append it to the output file.
CMSampleBufferRef sampleBuffer = [self.assetReaderVideoOutput copyNextSampleBuffer];
if (sampleBuffer != NULL)
{
BOOL success = [self.assetWriterVideoInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
sampleBuffer = NULL;
completedOrFailed = !success;
}
else
{
completedOrFailed = YES;
}
}
if (completedOrFailed)
{
// Mark the input as finished, but only if we haven't already done so, and then leave the dispatch group (since the video work has finished).
BOOL oldFinished = self.videoFinished;
self.videoFinished = YES;
if (oldFinished == NO)
{
[self.assetWriterVideoInput markAsFinished];
}
dispatch_group_leave(self.dispatchGroup);
}
}];
}
// Set up the notification that the dispatch group will send when the audio and video work have both finished.
dispatch_group_notify(self.dispatchGroup, self.mainSerializationQueue, ^{
BOOL finalSuccess = YES;
NSError *finalError = nil;
// Check to see if the work has finished due to cancellation.
if (self.cancelled)
{
// If so, cancel the reader and writer.
[self.assetReader cancelReading];
[self.assetWriter cancelWriting];
}
else
{
// If cancellation didn't occur, first make sure that the asset reader didn't fail.
if ([self.assetReader status] == AVAssetReaderStatusFailed)
{
finalSuccess = NO;
finalError = [self.assetReader error];
}
// If the asset reader didn't fail, attempt to stop the asset writer and check for any errors.
}
if (finalSuccess)
{
[self.assetWriter finishWritingWithCompletionHandler:^{
NSError *finalError = nil;
if(self.assetWriter.status == AVAssetWriterStatusFailed)
{
finalError = self.assetWriter.error;
}
[self readingAndWritingDidFinishSuccessfully:finalError==nil
withError:finalError];
}];
return;
}
// Call the method to handle completion, and pass in the appropriate parameters to indicate whether reencoding was successful.
[self readingAndWritingDidFinishSuccessfully:finalSuccess withError:finalError];
});
}
}
- (void)readingAndWritingDidFinishSuccessfully:(BOOL)success withError:(NSError *)error
{
if (!success)
{
// If the reencoding process failed, we need to cancel the asset reader and writer.
[self.assetReader cancelReading];
[self.assetWriter cancelWriting];
dispatch_async(dispatch_get_main_queue(), ^{
if (self.completionBlock) {
self.completionBlock(NO);
}
});
}
else
{
// Reencoding was successful, reset booleans.
self.cancelled = NO;
self.videoFinished = NO;
self.audioFinished = NO;
dispatch_async(dispatch_get_main_queue(), ^{
if (self.completionBlock) {
self.completionBlock(YES);
}
});
}
}
- (void)cancel
{
// Handle cancellation asynchronously, but serialize it with the main queue.
dispatch_async(self.mainSerializationQueue, ^{
// If we had audio data to reencode, we need to cancel the audio work.
if (self.assetWriterAudioInput)
{
// Handle cancellation asynchronously again, but this time serialize it with the audio queue.
dispatch_async(self.rwAudioSerializationQueue, ^{
// Update the Boolean property indicating the task is complete and mark the input as finished if it hasn't already been marked as such.
BOOL oldFinished = self.audioFinished;
self.audioFinished = YES;
if (oldFinished == NO)
{
[self.assetWriterAudioInput markAsFinished];
}
// Leave the dispatch group since the audio work is finished now.
dispatch_group_leave(self.dispatchGroup);
});
}
if (self.assetWriterVideoInput)
{
// Handle cancellation asynchronously again, but this time serialize it with the video queue.
dispatch_async(self.rwVideoSerializationQueue, ^{
// Update the Boolean property indicating the task is complete and mark the input as finished if it hasn't already been marked as such.
BOOL oldFinished = self.videoFinished;
self.videoFinished = YES;
if (oldFinished == NO)
{
[self.assetWriterVideoInput markAsFinished];
}
// Leave the dispatch group, since the video work is finished now.
dispatch_group_leave(self.dispatchGroup);
});
}
// Set the cancelled Boolean property to YES to cancel any work on the main queue as well.
self.cancelled = YES;
});
}
#end
[composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
into
AVMutableCompositionTrack *theTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
fix your time range,height,width etc.
I am developing an iOS App which is to record the video using the rear camera.
I have managed to get the preview layer working fine.
However, if I click the Record button, the preview freezes.
The following are my codes. Please help me solving this problem.
Pg5VideoViewController.h
#interface Pg5VideoViewController : UIViewController <AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureFileOutputRecordingDelegate> {
BOOL WeAreRecording;
IBOutlet UIView *videoViewBg;
AVCaptureSession *_captureSession;
UIImageView *_imageView;
CALayer *_customLayer;
AVCaptureVideoPreviewLayer *_prevLayer;
UIColor *pickedColor;
AVCaptureMovieFileOutput *movieFileOutput;
IBOutlet UIView *theColor;
}
#property (nonatomic,retain) IBOutlet UIView *theColor;
#property (nonatomic,retain) UIColor *pickedColor;
#property (nonatomic,retain) IBOutlet UIView *videoViewBg;
#property (nonatomic, retain) AVCaptureSession *captureSession;
#property (nonatomic, retain) UIImageView *imageView;
#property (nonatomic, retain) CALayer *customLayer;
#property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer;
#property (nonatomic, retain) AVCaptureMovieFileOutput *movieFileOutput;
-(void)initCapture;
-(UIColor *) colorOfPoint:(CGPoint)point;
-(IBAction)takeVideo:(id)sender;
#end
the Pg5VideoViewController.m file:
#implementation Pg5VideoViewController
#synthesize videoViewBg;
#synthesize captureSession = _captureSession;
#synthesize imageView = _imageView;
#synthesize customLayer = _customLayer;
#synthesize prevLayer = _prevLayer;
#synthesize pickedColor = _pickedColor;
#synthesize theColor = _theColor;
#synthesize movieFileOutput = _movieFileOutput;
#pragma mark -
#pragma mark Initialization
- (id)init {
self = [super init];
if (self) {
self.imageView = nil;
self.prevLayer = nil;
self.customLayer = nil;
}
return self;
}
- (void)initCapture {
AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput
deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]
error:nil];
movieFileOutput = [[AVCaptureVideoDataOutput alloc] init];
dispatch_queue_t queue;
queue = dispatch_queue_create("cameraQueue", NULL);
[movieFileOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[movieFileOutput setVideoSettings:videoSettings];
self.captureSession = [[AVCaptureSession alloc] init];
[self.captureSession addInput:captureInput];
[self.captureSession addOutput:movieFileOutput];
[self.captureSession setSessionPreset:AVCaptureSessionPresetMedium];
self.customLayer = [CALayer layer];
self.customLayer.frame = CGRectMake(42, 40, 940, 558);
//self.customLayer.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
//self.customLayer.contentsGravity = kCAGravityResizeAspectFill;
[self.view.layer addSublayer:self.customLayer];
[self.captureSession startRunning];
}
#pragma mark -
#pragma mark AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
[self.customLayer performSelectorOnMainThread:#selector(setContents:) withObject: (id) newImage waitUntilDone:YES];
UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];
CGImageRelease(newImage);
[self.imageView performSelectorOnMainThread:#selector(setImage:) withObject:image waitUntilDone:YES];
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
[pool drain];
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error
{
NSLog(#"didFinishRecordingToOutputFileAtURL - enter");
BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
if (RecordedSuccessfully)
{
NSLog(#"didFinishRecordingToOutputFileAtURL - success");
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
completionBlock:^(NSURL *assetURL, NSError *error)
{
if (error)
{
}
}];
}
[library release];
}
}
- (void)viewDidAppear:(BOOL)animated {
}
- (IBAction)takeVideo:(id)sender {
AVCaptureMovieFileOutput *movieFileOutput1 = [[AVCaptureMovieFileOutput alloc] init];
if(!WeAreRecording) {
NSLog(#"START RECORDING");
WeAreRecording = YES;
self.videoViewBg.backgroundColor = [UIColor redColor];
NSDateFormatter *formatter;
NSString *dateString;
formatter = [[NSDateFormatter alloc]init];
[formatter setDateFormat:#"dd-MM-yyyy HH:mm:ss"];
dateString = [formatter stringFromDate:[NSDate date]];
[formatter release];
NSLog(#"The dateString is : %#",dateString);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectoryPath = [paths objectAtIndex:0];
NSString *movieFileName = [NSString stringWithFormat: #"%#.mp4",dateString];
NSString *filePath = [documentsDirectoryPath stringByAppendingPathComponent:movieFileName];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:filePath];
[self.captureSession stopRunning];
[self.captureSession beginConfiguration];
// [self.captureSession removeOutput:movieFileOutput];
if([self.captureSession canAddOutput:movieFileOutput1])
{
[self.captureSession addOutput:movieFileOutput1];
}
else
{
NSLog(#"Couldn't add still output");
}
[movieFileOutput1 startRecordingToOutputFileURL:outputURL recordingDelegate:self];
[self.captureSession commitConfiguration];
[self.captureSession startRunning];
[outputURL release];
} else {
NSLog(#"STOP RECORDING");
WeAreRecording = NO;
self.videoViewBg.backgroundColor = [UIColor whiteColor];
[movieFileOutput1 stopRecording];
[self.captureSession removeOutput:movieFileOutput1];
}
}
-(void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {
UITouch *touch = [[event allTouches] anyObject];
CGPoint loc = [touch locationInView:self.view];
self.pickedColor = [self colorOfPoint:loc];
self.theColor.backgroundColor = self.pickedColor;
}
-(UIColor *) colorOfPoint:(CGPoint)point {
unsigned char pixel[4] = {0};
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pixel, 1, 1, 8, 4, colorSpace, kCGImageAlphaPremultipliedLast);
CGContextTranslateCTM(context, -point.x, -point.y);
[self.view.layer renderInContext:context];
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
UIColor *color = [UIColor colorWithRed:pixel[0]/255.0 green:pixel[1]/255.0 blue:pixel[2]/255.0 alpha:pixel[3]/255.0];
return color;
}
// Implement viewDidLoad to do additional setup after loading the view, typically from a nib.
- (void)viewDidLoad {
[super viewDidLoad];
[self initCapture];
WeAreRecording = NO;
self.videoViewBg.layer.cornerRadius = 55;
}
// Override to allow orientations other than the default portrait orientation.
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation {
if(interfaceOrientation == UIInterfaceOrientationLandscapeRight) {
return YES;
}
return NO;
}
- (void)didReceiveMemoryWarning {
// Releases the view if it doesn't have a superview.
[super didReceiveMemoryWarning];
// Release any cached data, images, etc. that aren't in use.
}
- (void)viewDidUnload {
[super viewDidUnload];
self.imageView = nil;
self.customLayer = nil;
self.prevLayer = nil;
[self.captureSession stopRunning];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (void)dealloc {
[movieFileOutput release];
[self.captureSession release];
[super dealloc];
}
#end
Please help
The problem here is not trivial. AVFoundation simply can't handle both AVCaptureMovieFileOutput and AVCaptureVideoDataOutput simultaneously. That means you can't dipslay preview (which requires AVCaptureVideoDataOutput) when recording (which requires AVCaptureMovieFileOutput). This is very stupid, but that's life.
The only way I know how to do this to use only AVCaptureVideoDataOutput, and inside captureOutput:didOutputSampleBuffer:fromConnection:, write the frames manually to the video file. The following code snippets should help
Properties
#property (strong, nonatomic) AVAssetWriter* recordingAssetWriter;
#property (strong, nonatomic) AVAssetWriterInput* recordingAssetWriterInput;
#property (strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor* recordingPixelBufferAdaptor;
To initialize the video file (when you start recording or something)
// Init AVAssetWriter
NSError* error = nil;
self.recordingAssetWriter = [[AVAssetWriter alloc] initWithURL:<the video file URL> fileType:AVFileTypeMPEG4 error:&error];
// Init AVAssetWriterInput & AVAssetWriterInputPixelBufferAdaptor
NSDictionary* settings = #{AVVideoWidthKey: #(480), AVVideoHeightKey: #(640), AVVideoCodecKey: AVVideoCodecH264};
self.recordingAssetWriterInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:settings];
self.recordingAssetWriterInput.expectsMediaDataInRealTime = YES;
self.recordingPixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:self.recordingAssetWriterInput sourcePixelBufferAttributes:#{(NSString*)kCVPixelBufferPixelFormatTypeKey: #(kCVPixelFormatType_32BGRA)}];
// Add Input
[self.recordingAssetWriter addInput:self.recordingAssetWriterInput];
// Start ...
_recording = YES;
To write frames to the video file
// Inside the captureOutput:didOutputSampleBuffer:fromConnection: delegate method
// _recording is the flag to see if we're recording
if (_recording) {
CMTime sampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if (self.recordingAssetWriter.status != AVAssetWriterStatusWriting) {
[self.recordingAssetWriter startWriting];
[self.recordingAssetWriter startSessionAtSourceTime:sampleTime];
}
if (self.recordingAssetWriterInput.readyForMoreMediaData) {
[self.recordingPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:sampleTime];
}
}
To finalize the video file when finish recording:
[self.recordingAssetWriterInput markAsFinished];
[self.recordingAssetWriter finishWritingWithCompletionHandler:^{
// Do not do this immediately after calling finishWritingWithCompletionHandler, since it is an async method
self.recordingAssetWriter = nil;
self.recordingAssetWriterInput = nil;
self.recordingPixelBufferAdaptor = nil;
}];
Note that I ommited error checking for clarity.