I am attempting to take an AVCaptureSession from the back camera and transfer it to a texture, mapped onto a quad.
View the complete source here.
Regardless of any preset I use, the didDropSampleBuffer callback is reporting 'OutOfBuffers'. I have attempted to copy the sampleBuffer passed to didOutputSampleBuffer, but perhaps my implementation has an issue.
I have also tried to use a SERIAL_QUEUE, as I know captureSession's startRecording is a blocking function and it shouldn't be on the main queue. However, using the main queue is the only way I've been able to see any frames.
Here is my AV setup:
- (void)setupAV
{
_sessionQueue = dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL);
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, self.context, NULL, &_videoTextureCache);
if (err) {
NSLog(#"Couldn't create video cache.");
return;
}
self.captureSession = [[AVCaptureSession alloc] init];
if (!self.captureSession) {
return;
}
[self.captureSession beginConfiguration];
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
AVCaptureDevicePosition devicePosition = AVCaptureDevicePositionBack;
AVCaptureDeviceDiscoverySession *deviceDiscoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:#[AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:devicePosition];
for (AVCaptureDevice *device in deviceDiscoverySession.devices) {
if (device.position == devicePosition) {
self.captureDevice = device;
if (self.captureDevice != nil) {
break;
}
}
}
NSError *captureDeviceError = nil;
AVCaptureDeviceInput *input = [[AVCaptureDeviceInput alloc] initWithDevice:self.captureDevice error:&captureDeviceError];
if (captureDeviceError) {
NSLog(#"Couldn't configure device input.");
return;
}
if (![self.captureSession canAddInput:input]) {
NSLog(#"Couldn't add video input.");
[self.captureSession commitConfiguration];
return;
}
[self.captureSession addInput:input];
self.videoOutput = [[AVCaptureVideoDataOutput alloc] init];
if (!self.videoOutput) {
NSLog(#"Error creating video output.");
[self.captureSession commitConfiguration];
return;
}
self.videoOutput.alwaysDiscardsLateVideoFrames = YES;
NSDictionary *settings = [[NSDictionary alloc] initWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
self.videoOutput.videoSettings = settings;
[self.videoOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
if ([self.captureSession canAddOutput:self.videoOutput]) {
[self.captureSession addOutput:self.videoOutput];
} else {
NSLog(#"Couldn't add video output.");
[self.captureSession commitConfiguration];
return;
}
if (self.captureSession.isRunning) {
NSLog(#"Session is already running.");
[self.captureSession commitConfiguration];
return;
}
// NSError *configLockError;
// int frameRate = 24;
// [self.captureDevice lockForConfiguration:&configLockError];
// self.captureDevice.activeVideoMinFrameDuration = CMTimeMake(1, frameRate);
// self.captureDevice.activeVideoMaxFrameDuration = CMTimeMake(1, frameRate);
// [self.captureDevice unlockForConfiguration];
//
// if (configLockError) {
// NSLog(#"Error locking for configuration. %#", configLockError);
// }
[self.captureSession commitConfiguration];
}
And here is my captureOutput callback:
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
// if (_sampleBuffer) {
// CFRelease(_sampleBuffer);
// _sampleBuffer = nil;
// }
//
// OSStatus status = CMSampleBufferCreateCopy(kCFAllocatorDefault, sampleBuffer, &_sampleBuffer);
// if (noErr != status) {
// _sampleBuffer = nil;
// }
//
// if (!_sampleBuffer) {
// return;
// }
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (!_videoTextureCache) {
NSLog(#"No video texture cache");
return;
}
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t height = CVPixelBufferGetHeight(pixelBuffer);
_rgbaTexture = nil;
// Periodic texture cache flush every frame
CVOpenGLESTextureCacheFlush(_videoTextureCache, 0);
// CVOpenGLESTextureCacheCreateTextureFromImage will create GLES texture
// optimally from CVImageBufferRef.
glActiveTexture(GL_TEXTURE0);
CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
_videoTextureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_RGBA,
(GLsizei)width,
(GLsizei)height,
GL_BGRA,
GL_UNSIGNED_BYTE,
0,
&_rgbaTexture);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
if (err) {
NSLog(#"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
if (_rgbaTexture) {
glBindTexture(CVOpenGLESTextureGetTarget(_rgbaTexture), CVOpenGLESTextureGetName(_rgbaTexture));
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
}
For completeness, here are the ivar and property declarations:
#interface AVViewController () <AVCaptureVideoDataOutputSampleBufferDelegate> {
CVOpenGLESTextureRef _rgbaTexture;
CVOpenGLESTextureCacheRef _videoTextureCache;
dispatch_queue_t _sessionQueue;
GLuint _program;
GLuint _vertexArray;
GLuint _vertexBuffer;
CMSampleBufferRef _sampleBuffer;
}
#property (nonatomic, strong) EAGLContext *context;
#property (nonatomic, strong) AVCaptureSession *captureSession;
#property (nonatomic, strong) AVCaptureDevice *captureDevice;
#property (nonatomic, strong) AVCaptureVideoDataOutput *videoOutput;
#property (readwrite) GLint vertexAttrib;
#property (readwrite) GLint textureAttrib;
#property (readwrite) GLint videoFrameUniform;
I have searched and searched and cannot find a solution to this. Any help would be greatly appreciated.
Related
How can I render Videos using Metal or OpenGL ES?
I'm talking about decoding and displaying the frames by myself.
I am very new to Metal and OpenGL ES and I don't know where to begin.
What you're asking isn't trivial for someone just getting started with this, so you might want to break this down into smaller parts. That said, I have done this and can describe the general process.
First, you'll start with an AVAssetReader and set up AVAssetReaderOutputs for the audio and video tracks. From those, you iterate through the CMSampleBufferRefs. For each video frame, you'll extract a CVImageBufferRef.
Uploading the video frames to OpenGL textures can go a couple of different ways. The most performant path is to work with YUV data and upload the Y and UV planes via iOS's texture caches. You'll then use a YUV -> RGB shader to combine these planes and convert to the RGB colorspace for processing or display.
You could also work with BGRA data from the movie file and upload that directly into a texture, but there's more overhead to that process. It is simpler, though, and avoids the need for a manual color conversion shader.
After that, you'll take your texture and render it to a quad using a passthrough vertex and fragment shader, or you could do shader-based processing on the video.
Similar processes and pathways exist for uploading to Metal, and the starting point is the same.
Now, if you don't want to implement all that by hand, I've written an open source framework called GPUImage that encapsulates this. It comes in Objective-C and Swift varieties. If you don't want to pull the entire framework, focus on the GPUImageMovie (for the former) or the MovieInput (for the latter) classes. They contain all the code needed to do this, so you can extract the implementations there and use them directly.
Here's the simplest, fastest way to get started:
#import UIKit;
#import AVFoundation;
#import CoreMedia;
#import <MetalKit/MetalKit.h>
#import <Metal/Metal.h>
#import <MetalPerformanceShaders/MetalPerformanceShaders.h>
#interface ViewController : UIViewController <MTKViewDelegate, AVCaptureVideoDataOutputSampleBufferDelegate> {
NSString *_displayName;
NSString *serviceType;
}
#property (retain, nonatomic) SessionContainer *session;
#property (retain, nonatomic) AVCaptureSession *avSession;
#end;
#import "ViewController.h"
#interface ViewController () {
MTKView *_metalView;
id<MTLDevice> _device;
id<MTLCommandQueue> _commandQueue;
id<MTLTexture> _texture;
CVMetalTextureCacheRef _textureCache;
}
#property (strong, nonatomic) AVCaptureDevice *videoDevice;
#property (nonatomic) dispatch_queue_t sessionQueue;
#end
#implementation ViewController
- (void)viewDidLoad {
NSLog(#"%s", __PRETTY_FUNCTION__);
[super viewDidLoad];
_device = MTLCreateSystemDefaultDevice();
_metalView = [[MTKView alloc] initWithFrame:self.view.bounds];
[_metalView setContentMode:UIViewContentModeScaleAspectFit];
_metalView.device = _device;
_metalView.delegate = self;
_metalView.clearColor = MTLClearColorMake(1, 1, 1, 1);
_metalView.colorPixelFormat = MTLPixelFormatBGRA8Unorm;
_metalView.framebufferOnly = NO;
_metalView.autoResizeDrawable = NO;
CVMetalTextureCacheCreate(NULL, NULL, _device, NULL, &_textureCache);
[self.view addSubview:_metalView];
self.sessionQueue = dispatch_queue_create( "session queue", DISPATCH_QUEUE_SERIAL );
if ([self setupCamera]) {
[_avSession startRunning];
}
}
- (BOOL)setupCamera {
NSLog(#"%s", __PRETTY_FUNCTION__);
#try {
NSError * error;
_avSession = [[AVCaptureSession alloc] init];
[_avSession beginConfiguration];
[_avSession setSessionPreset:AVCaptureSessionPreset640x480];
// get list of devices; connect to front-facing camera
self.videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (self.videoDevice == nil) return FALSE;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:self.videoDevice error:&error];
[_avSession addInput:input];
dispatch_queue_t sampleBufferQueue = dispatch_queue_create("CameraMulticaster", DISPATCH_QUEUE_SERIAL);
AVCaptureVideoDataOutput * dataOutput = [[AVCaptureVideoDataOutput alloc] init];
[dataOutput setAlwaysDiscardsLateVideoFrames:YES];
[dataOutput setVideoSettings:#{(id)kCVPixelBufferPixelFormatTypeKey: #(kCVPixelFormatType_32BGRA)}];
[dataOutput setSampleBufferDelegate:self queue:sampleBufferQueue];
[_avSession addOutput:dataOutput];
[_avSession commitConfiguration];
} #catch (NSException *exception) {
NSLog(#"%s - %#", __PRETTY_FUNCTION__, exception.description);
return FALSE;
} #finally {
return TRUE;
}
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
{
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t height = CVPixelBufferGetHeight(pixelBuffer);
CVMetalTextureRef texture = NULL;
CVReturn status = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, MTLPixelFormatBGRA8Unorm, width, height, 0, &texture);
if(status == kCVReturnSuccess)
{
_metalView.drawableSize = CGSizeMake(width, height);
_texture = CVMetalTextureGetTexture(texture);
_commandQueue = [_device newCommandQueue];
CFRelease(texture);
}
}
}
- (void)drawInMTKView:(MTKView *)view {
// creating command encoder
if (_texture) {
id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer];
id<MTLTexture> drawingTexture = view.currentDrawable.texture;
// set up and encode the filter
MPSImageGaussianBlur *filter = [[MPSImageGaussianBlur alloc] initWithDevice:_device sigma:5];
[filter encodeToCommandBuffer:commandBuffer sourceTexture:_texture destinationTexture:drawingTexture];
// committing the drawing
[commandBuffer presentDrawable:view.currentDrawable];
[commandBuffer commit];
_texture = nil;
}
}
- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
}
#end
In my project I have to add a book page flip animation, and in this book on the right side page a video will play. Once the first video will complete the page will turn like as book page and the second video will play on the next right side page and so on. Now I have to save all this things as a a video which can be downloaded, so that When the downloaded video get played from gallery it looks same as i am playing in my app. Right now I am recording the device's screen and saving it in server for download. All the things is ok except the video player. In the video that is I am recording, the portion where all the video is playing(on the right side page of the book) is not getting recorded.
I am using the bellow code to record the screen. If any one of you have other idea to do the same thing, please share with me or if need to change my code please suggest that. Thanks is advance.
// ASScreenRecorder.h
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
typedef void (^VideoCompletionBlock)(void);
#protocol ASScreenRecorderDelegate;
#interface ASScreenRecorder : NSObject
#property (nonatomic, readonly) BOOL isRecording;
#property (nonatomic, weak) id <ASScreenRecorderDelegate> delegate;
// if saveURL is nil, video will be saved into camera roll
// this property can not be changed whilst recording is in progress
#property (strong, nonatomic) NSURL *videoURL;
+ (instancetype)sharedInstance;
- (BOOL)startRecording;
- (void)stopRecordingWithCompletion:(VideoCompletionBlock)completionBlock;
#end
// If your view contains an AVCaptureVideoPreviewLayer or an openGL view
// you'll need to write that data into the CGContextRef yourself.
// In the viewcontroller responsible for the AVCaptureVideoPreviewLayer / openGL view
// set yourself as the delegate for ASScreenRecorder.
// [ASScreenRecorder sharedInstance].delegate = self
// Then implement 'writeBackgroundFrameInContext:(CGContextRef*)contextRef'
// use 'CGContextDrawImage' to draw your view into the provided CGContextRef
#protocol ASScreenRecorderDelegate <NSObject>
- (void)writeBackgroundFrameInContext:(CGContextRef*)contextRef;
#end
// ASScreenRecorder.m
// ScreenRecorder
//
// Created by Alan Skipp on 23/04/2014.
// Copyright (c) 2014 Alan Skipp. All rights reserved.
//
#import "ASScreenRecorder.h"
#import <AVFoundation/AVFoundation.h>
#import <QuartzCore/QuartzCore.h>
#import <AssetsLibrary/AssetsLibrary.h>
#interface ASScreenRecorder()
#property (strong, nonatomic) AVAssetWriter *videoWriter;
#property (strong, nonatomic) AVAssetWriterInput *videoWriterInput;
#property (strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor *avAdaptor;
#property (strong, nonatomic) CADisplayLink *displayLink;
#property (strong, nonatomic) NSDictionary *outputBufferPoolAuxAttributes;
#property (nonatomic) CFTimeInterval firstTimeStamp;
#property (nonatomic) BOOL isRecording;
#end
#implementation ASScreenRecorder
{
dispatch_queue_t _render_queue;
dispatch_queue_t _append_pixelBuffer_queue;
dispatch_semaphore_t _frameRenderingSemaphore;
dispatch_semaphore_t _pixelAppendSemaphore;
CGSize _viewSize;
CGFloat _scale;
CGColorSpaceRef _rgbColorSpace;
CVPixelBufferPoolRef _outputBufferPool;
}
#pragma mark - initializers
+ (instancetype)sharedInstance {
static dispatch_once_t once;
static ASScreenRecorder *sharedInstance;
dispatch_once(&once, ^{
sharedInstance = [[self alloc] init];
});
return sharedInstance;
}
- (instancetype)init
{
self = [super init];
if (self) {
_viewSize = [UIApplication sharedApplication].delegate.window.bounds.size;
_scale = [UIScreen mainScreen].scale;
// record half size resolution for retina iPads
if ((UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) && _scale > 1) {
_scale = 1.0;
}
_isRecording = NO;
_append_pixelBuffer_queue = dispatch_queue_create("ASScreenRecorder.append_queue", DISPATCH_QUEUE_SERIAL);
_render_queue = dispatch_queue_create("ASScreenRecorder.render_queue", DISPATCH_QUEUE_SERIAL);
dispatch_set_target_queue(_render_queue, dispatch_get_global_queue( DISPATCH_QUEUE_PRIORITY_HIGH, 0));
_frameRenderingSemaphore = dispatch_semaphore_create(1);
_pixelAppendSemaphore = dispatch_semaphore_create(1);
}
return self;
}
#pragma mark - public
- (void)setVideoURL:(NSURL *)videoURL
{
NSAssert(!_isRecording, #"videoURL can not be changed whilst recording is in progress");
_videoURL = videoURL;
}
- (BOOL)startRecording
{
if (!_isRecording) {
[self setUpWriter];
_isRecording = (_videoWriter.status == AVAssetWriterStatusWriting);
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:#selector(writeVideoFrame)];
[_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
}
return _isRecording;
}
- (void)stopRecordingWithCompletion:(VideoCompletionBlock)completionBlock;
{
if (_isRecording) {
_isRecording = NO;
[_displayLink removeFromRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
[self completeRecordingSession:completionBlock];
}
}
#pragma mark - private
-(void)setUpWriter
{
_rgbColorSpace = CGColorSpaceCreateDeviceRGB();
NSDictionary *bufferAttributes = #{(id)kCVPixelBufferPixelFormatTypeKey : #(kCVPixelFormatType_32BGRA),
(id)kCVPixelBufferCGBitmapContextCompatibilityKey : #YES,
(id)kCVPixelBufferWidthKey : #(_viewSize.width * _scale),
(id)kCVPixelBufferHeightKey : #(_viewSize.height * _scale),
(id)kCVPixelBufferBytesPerRowAlignmentKey : #(_viewSize.width * _scale * 4)
};
_outputBufferPool = NULL;
CVPixelBufferPoolCreate(NULL, NULL, (__bridge CFDictionaryRef)(bufferAttributes), &_outputBufferPool);
NSError* error = nil;
_videoWriter = [[AVAssetWriter alloc] initWithURL:self.videoURL ?: [self tempFileURL]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(_videoWriter);
NSInteger pixelNumber = _viewSize.width * _viewSize.height * _scale;
NSDictionary* videoCompression = #{AVVideoAverageBitRateKey: #(pixelNumber * 11.4)};
NSDictionary* videoSettings = #{AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: [NSNumber numberWithInt:_viewSize.width*_scale],
AVVideoHeightKey: [NSNumber numberWithInt:_viewSize.height*_scale],
AVVideoCompressionPropertiesKey: videoCompression};
_videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSParameterAssert(_videoWriterInput);
_videoWriterInput.expectsMediaDataInRealTime = YES;
_videoWriterInput.transform = [self videoTransformForDeviceOrientation];
_avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoWriterInput sourcePixelBufferAttributes:nil];
[_videoWriter addInput:_videoWriterInput];
[_videoWriter startWriting];
[_videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];
}
- (CGAffineTransform)videoTransformForDeviceOrientation
{
CGAffineTransform videoTransform;
switch ([UIDevice currentDevice].orientation) {
case UIDeviceOrientationLandscapeLeft:
videoTransform = CGAffineTransformMakeRotation(-M_PI_2);
break;
case UIDeviceOrientationLandscapeRight:
videoTransform = CGAffineTransformMakeRotation(M_PI_2);
break;
case UIDeviceOrientationPortraitUpsideDown:
videoTransform = CGAffineTransformMakeRotation(M_PI);
break;
default:
videoTransform = CGAffineTransformIdentity;
}
return videoTransform;
}
- (NSURL*)tempFileURL
{
NSString *outputPath = [NSHomeDirectory() stringByAppendingPathComponent:#"tmp/screenCapture.mp4"];
[self removeTempFilePath:outputPath];
return [NSURL fileURLWithPath:outputPath];
}
- (void)removeTempFilePath:(NSString*)filePath
{
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:filePath]) {
NSError* error;
if ([fileManager removeItemAtPath:filePath error:&error] == NO) {
NSLog(#"Could not delete old recording:%#", [error localizedDescription]);
}
}
}
- (void)completeRecordingSession:(VideoCompletionBlock)completionBlock;
{
dispatch_async(_render_queue, ^{
dispatch_sync(_append_pixelBuffer_queue, ^{
[_videoWriterInput markAsFinished];
[_videoWriter finishWritingWithCompletionHandler:^{
void (^completion)(void) = ^() {
[self cleanup];
dispatch_async(dispatch_get_main_queue(), ^{
if (completionBlock) completionBlock();
});
};
if (self.videoURL) {
completion();
} else {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:_videoWriter.outputURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (error) {
NSLog(#"Error copying video to camera roll:%#", [error localizedDescription]);
} else {
[self removeTempFilePath:_videoWriter.outputURL.path];
completion();
}
}];
}
}];
});
});
}
- (void)cleanup
{
self.avAdaptor = nil;
self.videoWriterInput = nil;
self.videoWriter = nil;
self.firstTimeStamp = 0;
self.outputBufferPoolAuxAttributes = nil;
CGColorSpaceRelease(_rgbColorSpace);
CVPixelBufferPoolRelease(_outputBufferPool);
}
- (void)writeVideoFrame
{
// throttle the number of frames to prevent meltdown
// technique gleaned from Brad Larson's answer here: http://stackoverflow.com/a/5956119
if (dispatch_semaphore_wait(_frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0) {
return;
}
dispatch_async(_render_queue, ^{
if (![_videoWriterInput isReadyForMoreMediaData]) return;
if (!self.firstTimeStamp) {
self.firstTimeStamp = _displayLink.timestamp;
}
CFTimeInterval elapsed = (_displayLink.timestamp - self.firstTimeStamp);
CMTime time = CMTimeMakeWithSeconds(elapsed, 1000);
CVPixelBufferRef pixelBuffer = NULL;
CGContextRef bitmapContext = [self createPixelBufferAndBitmapContext:&pixelBuffer];
if (self.delegate) {
[self.delegate writeBackgroundFrameInContext:&bitmapContext];
}
// draw each window into the context (other windows include UIKeyboard, UIAlert)
// FIX: UIKeyboard is currently only rendered correctly in portrait orientation
dispatch_sync(dispatch_get_main_queue(), ^{
UIGraphicsPushContext(bitmapContext); {
for (UIWindow *window in [[UIApplication sharedApplication] windows]) {
[window drawViewHierarchyInRect:CGRectMake(0, 0, _viewSize.width, _viewSize.height) afterScreenUpdates:NO];
}
} UIGraphicsPopContext();
});
// append pixelBuffer on a async dispatch_queue, the next frame is rendered whilst this one appends
// must not overwhelm the queue with pixelBuffers, therefore:
// check if _append_pixelBuffer_queue is ready
// if it’s not ready, release pixelBuffer and bitmapContext
if (dispatch_semaphore_wait(_pixelAppendSemaphore, DISPATCH_TIME_NOW) == 0) {
dispatch_async(_append_pixelBuffer_queue, ^{
BOOL success = [_avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
if (!success) {
NSLog(#"Warning: Unable to write buffer to video");
}
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
dispatch_semaphore_signal(_pixelAppendSemaphore);
});
} else {
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
}
dispatch_semaphore_signal(_frameRenderingSemaphore);
});
}
- (CGContextRef)createPixelBufferAndBitmapContext:(CVPixelBufferRef *)pixelBuffer
{
CVPixelBufferPoolCreatePixelBuffer(NULL, _outputBufferPool, pixelBuffer);
CVPixelBufferLockBaseAddress(*pixelBuffer, 0);
CGContextRef bitmapContext = NULL;
bitmapContext = CGBitmapContextCreate(CVPixelBufferGetBaseAddress(*pixelBuffer),
CVPixelBufferGetWidth(*pixelBuffer),
CVPixelBufferGetHeight(*pixelBuffer),
8, CVPixelBufferGetBytesPerRow(*pixelBuffer), _rgbColorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst
);
CGContextScaleCTM(bitmapContext, _scale, _scale);
CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, _viewSize.height);
CGContextConcatCTM(bitmapContext, flipVertical);
return bitmapContext;
}
#end
#Gobinda this code works in my case. You need to specify frame if you want to record portion of window. In init method viewSize is defined as window size. So you need to change viewSize as your video frame.
I am trying to screen capture a view that has APPLCameraViewContoller in it. For some reason when it gets saved to camera roll no mater what I do what the camera is looking at does not get captured only the UIView with labels does so it is a black background with the labels. I want to have the labels on top of the Camera View. Any suggestions or examples on how to go about this. Here is the screen capture .m which I am assuming is the reason why this is happening.
#interface ASScreenRecorder()
#property (strong, nonatomic) AVAssetWriter *videoWriter;
#property (strong, nonatomic) AVAssetWriterInput *videoWriterInput;
#property (strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor *avAdaptor;
#property (strong, nonatomic) CADisplayLink *displayLink;
#property (strong, nonatomic) NSDictionary *outputBufferPoolAuxAttributes;
#property (nonatomic) CFTimeInterval firstTimeStamp;
#property (nonatomic) BOOL isRecording;
#end
#implementation ASScreenRecorder
{
dispatch_queue_t _render_queue;
dispatch_queue_t _append_pixelBuffer_queue;
dispatch_semaphore_t _frameRenderingSemaphore;
dispatch_semaphore_t _pixelAppendSemaphore;
CGSize _viewSize;
CGFloat _scale;
CGColorSpaceRef _rgbColorSpace;
CVPixelBufferPoolRef _outputBufferPool;
}
#pragma mark - initializers
+ (instancetype)sharedInstance {
static dispatch_once_t once;
static ASScreenRecorder *sharedInstance;
dispatch_once(&once, ^{
sharedInstance = [[self alloc] init];
});
return sharedInstance;
}
- (instancetype)init
{
self = [super init];
if (self) {
_viewSize = [UIApplication sharedApplication].delegate.window.bounds.size;
_scale = [UIScreen mainScreen].scale;
// record half size resolution for retina iPads
if ((UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) && _scale > 1) {
_scale = 1.0;
}
_isRecording = NO;
_append_pixelBuffer_queue = dispatch_queue_create("ASScreenRecorder.append_queue", DISPATCH_QUEUE_SERIAL);
_render_queue = dispatch_queue_create("ASScreenRecorder.render_queue", DISPATCH_QUEUE_SERIAL);
dispatch_set_target_queue(_render_queue, dispatch_get_global_queue( DISPATCH_QUEUE_PRIORITY_HIGH, 0));
_frameRenderingSemaphore = dispatch_semaphore_create(1);
_pixelAppendSemaphore = dispatch_semaphore_create(1);
}
return self;
}
#pragma mark - public
- (void)setVideoURL:(NSURL *)videoURL
{
NSAssert(!_isRecording, #"videoURL can not be changed whilst recording is in progress");
_videoURL = videoURL;
}
- (BOOL)startRecording
{
if (!_isRecording) {
[self setUpWriter];
_isRecording = (_videoWriter.status == AVAssetWriterStatusWriting);
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:#selector(writeVideoFrame)];
[_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
}
return _isRecording;
}
- (void)stopRecordingWithCompletion:(VideoCompletionBlock)completionBlock;
{
if (_isRecording) {
_isRecording = NO;
[_displayLink removeFromRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
[self completeRecordingSession:completionBlock];
}
}
#pragma mark - private
-(void)setUpWriter
{
_rgbColorSpace = CGColorSpaceCreateDeviceRGB();
NSDictionary *bufferAttributes = #{(id)kCVPixelBufferPixelFormatTypeKey : #(kCVPixelFormatType_32BGRA),
(id)kCVPixelBufferCGBitmapContextCompatibilityKey : #YES,
(id)kCVPixelBufferWidthKey : #(_viewSize.width * _scale),
(id)kCVPixelBufferHeightKey : #(_viewSize.height * _scale),
(id)kCVPixelBufferBytesPerRowAlignmentKey : #(_viewSize.width * _scale * 4)
};
_outputBufferPool = NULL;
CVPixelBufferPoolCreate(NULL, NULL, (__bridge CFDictionaryRef)(bufferAttributes), &_outputBufferPool);
NSError* error = nil;
_videoWriter = [[AVAssetWriter alloc] initWithURL:self.videoURL ?: [self tempFileURL]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(_videoWriter);
NSInteger pixelNumber = _viewSize.width * _viewSize.height * _scale;
NSDictionary* videoCompression = #{AVVideoAverageBitRateKey: #(pixelNumber * 11.4)};
NSDictionary* videoSettings = #{AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: [NSNumber numberWithInt:_viewSize.width*_scale],
AVVideoHeightKey: [NSNumber numberWithInt:_viewSize.height*_scale],
AVVideoCompressionPropertiesKey: videoCompression};
_videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSParameterAssert(_videoWriterInput);
_videoWriterInput.expectsMediaDataInRealTime = YES;
_videoWriterInput.transform = [self videoTransformForDeviceOrientation];
_avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoWriterInput sourcePixelBufferAttributes:nil];
[_videoWriter addInput:_videoWriterInput];
[_videoWriter startWriting];
[_videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];
}
- (CGAffineTransform)videoTransformForDeviceOrientation
{
CGAffineTransform videoTransform;
switch ([UIDevice currentDevice].orientation) {
case UIDeviceOrientationLandscapeLeft:
videoTransform = CGAffineTransformMakeRotation(-M_PI_2);
break;
case UIDeviceOrientationLandscapeRight:
videoTransform = CGAffineTransformMakeRotation(M_PI_2);
break;
case UIDeviceOrientationPortraitUpsideDown:
videoTransform = CGAffineTransformMakeRotation(M_PI);
break;
default:
videoTransform = CGAffineTransformIdentity;
}
return videoTransform;
}
- (NSURL*)tempFileURL
{
NSString *outputPath = [NSHomeDirectory() stringByAppendingPathComponent:#"tmp/screenCapture.mp4"];
[self removeTempFilePath:outputPath];
return [NSURL fileURLWithPath:outputPath];
}
- (void)removeTempFilePath:(NSString*)filePath
{
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:filePath]) {
NSError* error;
if ([fileManager removeItemAtPath:filePath error:&error] == NO) {
NSLog(#"Could not delete old recording:%#", [error localizedDescription]);
}
}
}
- (void)completeRecordingSession:(VideoCompletionBlock)completionBlock;
{
dispatch_async(_render_queue, ^{
dispatch_sync(_append_pixelBuffer_queue, ^{
[_videoWriterInput markAsFinished];
[_videoWriter finishWritingWithCompletionHandler:^{
void (^completion)(void) = ^() {
[self cleanup];
dispatch_async(dispatch_get_main_queue(), ^{
if (completionBlock) completionBlock();
});
};
if (self.videoURL) {
completion();
} else {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:_videoWriter.outputURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (error) {
NSLog(#"Error copying video to camera roll:%#", [error localizedDescription]);
} else {
[self removeTempFilePath:_videoWriter.outputURL.path];
completion();
}
}];
}
}];
});
});
}
- (void)cleanup
{
self.avAdaptor = nil;
self.videoWriterInput = nil;
self.videoWriter = nil;
self.firstTimeStamp = 0;
self.outputBufferPoolAuxAttributes = nil;
CGColorSpaceRelease(_rgbColorSpace);
CVPixelBufferPoolRelease(_outputBufferPool);
}
- (void)writeVideoFrame
{
// throttle the number of frames to prevent meltdown
// technique gleaned from Brad Larson's answer here: http://stackoverflow.com/a/5956119
if (dispatch_semaphore_wait(_frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0) {
return;
}
dispatch_async(_render_queue, ^{
if (![_videoWriterInput isReadyForMoreMediaData]) return;
if (!self.firstTimeStamp) {
self.firstTimeStamp = _displayLink.timestamp;
}
CFTimeInterval elapsed = (_displayLink.timestamp - self.firstTimeStamp);
CMTime time = CMTimeMakeWithSeconds(elapsed, 1000);
CVPixelBufferRef pixelBuffer = NULL;
CGContextRef bitmapContext = [self createPixelBufferAndBitmapContext:&pixelBuffer];
if (self.delegate) {
[self.delegate writeBackgroundFrameInContext:&bitmapContext];
}
// draw each window into the context (other windows include UIKeyboard, UIAlert)
// FIX: UIKeyboard is currently only rendered correctly in portrait orientation
dispatch_sync(dispatch_get_main_queue(), ^{
UIGraphicsPushContext(bitmapContext); {
for (UIWindow *window in [[UIApplication sharedApplication] windows]) {
[window drawViewHierarchyInRect:CGRectMake(0, 0, _viewSize.width, _viewSize.height) afterScreenUpdates:NO];
}
} UIGraphicsPopContext();
});
// append pixelBuffer on a async dispatch_queue, the next frame is rendered whilst this one appends
// must not overwhelm the queue with pixelBuffers, therefore:
// check if _append_pixelBuffer_queue is ready
// if it’s not ready, release pixelBuffer and bitmapContext
if (dispatch_semaphore_wait(_pixelAppendSemaphore, DISPATCH_TIME_NOW) == 0) {
dispatch_async(_append_pixelBuffer_queue, ^{
BOOL success = [_avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
if (!success) {
NSLog(#"Warning: Unable to write buffer to video");
}
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
dispatch_semaphore_signal(_pixelAppendSemaphore);
});
} else {
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
}
dispatch_semaphore_signal(_frameRenderingSemaphore);
});
}
- (CGContextRef)createPixelBufferAndBitmapContext:(CVPixelBufferRef *)pixelBuffer
{
CVPixelBufferPoolCreatePixelBuffer(NULL, _outputBufferPool, pixelBuffer);
CVPixelBufferLockBaseAddress(*pixelBuffer, 0);
CGContextRef bitmapContext = NULL;
bitmapContext = CGBitmapContextCreate(CVPixelBufferGetBaseAddress(*pixelBuffer),
CVPixelBufferGetWidth(*pixelBuffer),
CVPixelBufferGetHeight(*pixelBuffer),
8, CVPixelBufferGetBytesPerRow(*pixelBuffer), _rgbColorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst
);
CGContextScaleCTM(bitmapContext, _scale, _scale);
CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, _viewSize.height);
CGContextConcatCTM(bitmapContext, flipVertical);
return bitmapContext;
}
I would be more than happy to provide my full source code to anyone who could tackle something like this because posting multiple .m's on this would take up a lot of space.
I'm not sure if it's similar for you. But I've been using ASScreenRecorder to record an ARSceneView, and that has been going fine. Have a look at the following link, you can provide it a view to render and it records and provides an output URL link. You might have to make a small edit to the class to get the StopRecording Function's completion handler to work.
https://github.com/alskipp/ASScreenRecorder
Here was a similar question. And I have the same problem. Here is my code
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
self.packagesBufferMutableArray = [NSMutableArray array];
self.fps = 25;
[self initDateFormatter];
// [self setupCaptureSession];
[self performSelector:#selector(stopWork) withObject:nil afterDelay:5];
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (void)viewDidAppear:(BOOL)animated
{
[super viewDidAppear:animated];
self.isExtra = YES;
[self.captureSession startRunning];
}
- (void)initDateFormatter {
self.dateFormatter = [NSDateFormatter new];
[_dateFormatter setDateFormat:#"yy-MM-dd--HH-mm-ss"];
}
- (NSString *)generateFilePathForMovie {
return [NSString stringWithFormat:#"%#/%#.mov",
[NSHomeDirectory() stringByAppendingPathComponent:#"Documents"],
[_dateFormatter stringFromDate:[NSDate date]]];
}
- (NSDictionary *)settingsForWriterInput {
int bitRate = (300 + /*self.currentQuality*/5 * 90) * 1024; //NORMAL 750 * 1024
NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:bitRate], AVVideoAverageBitRateKey,
nil];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:480], AVVideoWidthKey,
[NSNumber numberWithInt:320], AVVideoHeightKey,
codecSettings, AVVideoCompressionPropertiesKey,
nil];
return videoSettings;
}
- (AVAssetWriterInput *)createVideoWriterInput {
NSDictionary *videoSettings = [self settingsForWriterInput];
return [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
}
- (void)setupCaptureSession
{
NSError *error = nil;
self.captureSession = [[AVCaptureSession alloc] init];
self.captureSession.sessionPreset = AVCaptureSessionPresetMedium;
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
// Create a device input with the device and add it to the session.
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input)
{
// Handling the error appropriately.
}
[self.captureSession addInput:input];
// Create a VideoDataOutput and add it to the session
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
self.assetWriterInput =[self createVideoWriterInput];// [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
self.assetWriterInput.expectsMediaDataInRealTime = YES;
AVAssetWriter *assetWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:[self generateFilePathForMovie]] fileType:AVFileTypeMPEG4 error:&error];
[assetWriter addInput:self.assetWriterInput];
self.assetWriterPixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:self.assetWriterInput sourcePixelBufferAttributes:
[NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA],kCVPixelBufferPixelFormatTypeKey, nil]];
[self.captureSession addOutput:output];
// Configure your output.
dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
[output setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
// Specify the pixel format
output.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
// If you wish to cap the frame rate to a known value, such as 15 fps, set
// minFrameDuration.
for (AVCaptureOutput* output in self.captureSession.outputs)
{
if ([output isKindOfClass:[AVCaptureVideoDataOutput class]])
{
AVCaptureConnection* connection = [output connectionWithMediaType:AVMediaTypeVideo];
CMTimeShow(connection.videoMinFrameDuration);
CMTimeShow(connection.videoMaxFrameDuration);
CMTime frameDuration = CMTimeMake(1, self.fps);
if (connection.isVideoMinFrameDurationSupported)
connection.videoMinFrameDuration = frameDuration;
if (connection.isVideoMaxFrameDurationSupported)
connection.videoMaxFrameDuration = frameDuration;
CMTimeShow(connection.videoMinFrameDuration);
CMTimeShow(connection.videoMaxFrameDuration);
}
else
{
AVCaptureConnection* connection = [output connectionWithMediaType:AVMediaTypeVideo];
CMTimeShow(connection.videoMinFrameDuration);
CMTimeShow(connection.videoMaxFrameDuration);
if (connection.isVideoMinFrameDurationSupported)
connection.videoMinFrameDuration = CMTimeMake(1, 20);
if (connection.isVideoMaxFrameDurationSupported)
connection.videoMaxFrameDuration = CMTimeMake(1, 20);
CMTimeShow(connection.videoMinFrameDuration);
CMTimeShow(connection.videoMaxFrameDuration);
}
}
// Start the session running to start the flow of data
[assetWriter startWriting];
[assetWriter startSessionAtSourceTime:kCMTimeZero];
[self.captureSession startRunning];
// Assign session to an ivar.
}
- (NSMutableDictionary *)createEmptyPackage
{
return [NSMutableDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInteger:0], #"framesCount", [NSMutableArray array], #"framesArray", nil];
}
- (void)updateCurrentPackageWithFrameImage:(UIImage *)frameImage
{
if (self.currentPackageMutableDictionary == nil)
{
NSLog(#"new package with number %d", self.packagesBufferMutableArray.count);
self.currentPackageMutableDictionary = [self createEmptyPackage];
}
NSInteger framesCount = [[self.currentPackageMutableDictionary objectForKey:#"framesCount"] integerValue];
NSMutableArray *framesArray = [self.currentPackageMutableDictionary objectForKey:#"framesArray"];
NSLog(#"added %d frame at current package", framesCount);
framesCount ++;
[framesArray addObject:frameImage];
[self.currentPackageMutableDictionary setObject:[NSNumber numberWithInteger:framesCount] forKey:#"framesCount"];
if (framesCount == self.fps)
{
[self.packagesBufferMutableArray addObject:[NSDictionary dictionaryWithDictionary:self.currentPackageMutableDictionary]];
self.currentPackageMutableDictionary = nil;
if ((self.packagesBufferMutableArray.count == 31) && !self.isExtra)
{
NSLog(#"remove old package");
[self.packagesBufferMutableArray removeObjectAtIndex:0];
}
}
}
// Delegate routine that is called when a sample buffer was written
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
// Create a UIImage from the sample buffer data
UIImage *image = [self imageFromSampleBuffer:sampleBuffer];
// UIImageWriteToSavedPhotosAlbum(image, self, #selector(image:didFinishSavingWithError:contextInfo:), nil);
[self updateCurrentPackageWithFrameImage:image];
// UIImageView *imageView = [[UIImageView alloc] initWithImage:image];
// imageView.frame = CGRectMake(0, 0, image.size.width, image.size.height);
// [self.view addSubview:imageView];
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// a very dense way to keep track of the time at which this frame
// occurs relative to the output stream, but it's just an example!
static int64_t frameNumber = 0;
if(self.assetWriterInput.readyForMoreMediaData)
[self.assetWriterPixelBufferAdaptor appendPixelBuffer:imageBuffer
withPresentationTime:CMTimeMake(frameNumber, 24)];
frameNumber++;
}
- (void) image: (UIImage *) image
didFinishSavingWithError: (NSError *) error
contextInfo: (void *) contextInfo
{
}
- (void)startWork
{
}
- (void)stopWork
{
[self.captureSession stopRunning];
[self.assetWriter finishWriting];
}
// Create a UIImage from sample buffer data
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
// Get a CMSampleBuffer's Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// Get the number of bytes per row for the pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
// Get the number of bytes per row for the pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// Free up the context and color space
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
// Create an image object from the Quartz image
UIImage *image = [UIImage imageWithCGImage:quartzImage];
// Release the Quartz image
CGImageRelease(quartzImage);
return (image);
}
So, it create mov file, but i can't run it with any players.
QuickTime Player can not open the "14-02-02 - 21-29-11.mov», as the movie file format is not recognized.
Maybe I must set some others parameters or use another algorithm?
Could someone tell or put an example?
I've been fighting for a week off and on to save out my opengl renderings (which I'm using for green screening) to video via an AVAssetWriter.
I have created a simple rig below to show what I'm doing.
I've asked on apple forums and received advice on the process, which is also described here:
allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/ and is used in the GPUImage library.
To my knowledge I am doing the exact same thing - I even use the method from GPUImage to create the FBO.
I have verified that the drawing is okay (I have drawing methods in this code too; which are disabled),
The FBO is created okay and returns success for : glCheckFramebufferStatus
There are no crashes, No exceptions, no warnings, the writer is in fine status, all texturecaches, buffers, etc are created without error.
However I still get BLACK for my video output.
If I set my glClear to white, then I get a white rectangle which is not at the video size I requested.
I never get my triangle rendere into my video.
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import "TestViewController.h"
/////////////////////////////////////////////////////////////////
// This data type is used to store information for each vertex
typedef struct
{
GLKVector3 positionCoords;
}
SceneVertex;
/////////////////////////////////////////////////////////////////
// Define vertex data for a triangle to use in example
static const SceneVertex vertices[] =
{
{{-1.0f, -1.0f, 1.0}}, // lower left corner
{{1.0f, -1.0f, 0.5}}, // lower right corner
{{1.0f, 1.0f, 0.0}} // upper left corner
};
#interface TestViewController ()
#property(nonatomic, readwrite, assign) CVOpenGLESTextureCacheRef videoTextureCache;
#property(strong, nonatomic) GLKTextureInfo *background;
#property(nonatomic, strong) AVAssetWriter *assetWriter;
#property(nonatomic) BOOL isRecording;
#property(nonatomic, strong) AVAssetWriterInput *assetWriterVideoInput;
#property(nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferInput;
#property(nonatomic, assign) CFAbsoluteTime startTime;
#property(nonatomic, strong) GLKView *glkView;
#property(nonatomic, strong) GLKBaseEffect *screenGLEffect;
#property(nonatomic, strong) GLKBaseEffect *FBOGLEffect;
#property(nonatomic, strong) NSTimer *recordingTimer;
- (BOOL)isRetina;
#end
#implementation TestViewController
{
CVOpenGLESTextureCacheRef _writerTextureCache;
GLuint _writerRenderFrameBuffer;
GLuint vertexBufferID;
EAGLContext *_writerContext;
CVOpenGLESTextureRef _writerTexture;
}
- (GLKBaseEffect *)createBasicDrawingEffectInCurrentContext
{
GLKBaseEffect *basicGLEffect = [[GLKBaseEffect alloc] init];
basicGLEffect.useConstantColor = GL_TRUE;
basicGLEffect.constantColor = GLKVector4Make(
.5f, // Red
1.0f, // Green
.5f, // Blue
1.0f);// Alpha
// Set the background color stored in the current context
glClearColor(0.0f, 0.0f, 0.0f, 1.0f); // background color
// Generate, bind, and initialize contents of a buffer to be
// stored in GPU memory
glGenBuffers(1, // STEP 1
&vertexBufferID);
glBindBuffer(GL_ARRAY_BUFFER, // STEP 2
vertexBufferID);
glBufferData( // STEP 3
GL_ARRAY_BUFFER, // Initialize buffer contents
sizeof(vertices), // Number of bytes to copy
vertices, // Address of bytes to copy
GL_STATIC_DRAW); // Hint: cache in GPU memory
return basicGLEffect;
}
/////////////////////////////////////////////////////////////////
//
- (void)viewDidUnload
{
[super viewDidUnload];
// Make the view's context current
GLKView *view = (GLKView *) self.view;
[EAGLContext setCurrentContext:view.context];
// Stop using the context created in -viewDidLoad
((GLKView *) self.view).context = nil;
[EAGLContext setCurrentContext:nil];
//////////////////////////////////////////////////////////////
#pragma mark AVWriter setup
//////////////////////////////////////////////////////////////
- (NSString *)tempFilePath
{
return [NSHomeDirectory() stringByAppendingPathComponent:#"Documents/output2.m4v"];
}
- (void)removeTempFile
{
NSString *path = [self tempFilePath];
NSFileManager *fileManager = [NSFileManager defaultManager];
BOOL exists = [fileManager fileExistsAtPath:path];
NSLog(#">>>remove %# Exists %d", path, exists);
NSError *error;
unlink([path UTF8String]);
NSLog(#">>>AFTER REMOVE %# Exists %d %#", path, exists, error);
}
- (void)createWriter
{
//My setup code is based heavily on the GPUImage project, https://github.com/BradLarson/GPUImage so some of these dictionary names and structure are similar to the code from that project - I recommend you check it out if you are interested in Video filtering/recording
[self removeTempFile];
NSError *error;
self.assetWriter = [[AVAssetWriter alloc]
initWithURL:[NSURL fileURLWithPath:[self tempFilePath]]
fileType:AVFileTypeQuickTimeMovie
error:&error];
if (error)
{
NSLog(#"Couldn't create writer, %#", error.localizedDescription);
return;
}
NSDictionary *outputSettings = #{
AVVideoCodecKey : AVVideoCodecH264,
AVVideoWidthKey : #640,
AVVideoHeightKey : #480
};
self.assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:outputSettings];
self.assetWriterVideoInput.expectsMediaDataInRealTime = YES;
NSDictionary *sourcePixelBufferAttributesDictionary = #{(id) kCVPixelBufferPixelFormatTypeKey : #(kCVPixelFormatType_32BGRA),
(id) kCVPixelBufferWidthKey : #640,
(id) kCVPixelBufferHeightKey : #480};
self.assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.assetWriterVideoInput
sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
self.assetWriterVideoInput.transform = CGAffineTransformMakeScale(1, -1);
if ([_assetWriter canAddInput:self.assetWriterVideoInput])
{
[_assetWriter addInput:self.assetWriterVideoInput];
} else
{
NSLog(#"can't add video writer input %#", self.assetWriterVideoInput);
}
/*
_assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:nil];
if ([_assetWriter canAddInput:_assetWriterAudioInput]) {
[_assetWriter addInput:_assetWriterAudioInput];
_assetWriterAudioInput.expectsMediaDataInRealTime = YES;
}
*/
}
- (void)writeMovieToLibraryWithPath:(NSURL *)path
{
NSLog(#"writing %# to library", path);
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:path
completionBlock:^(NSURL *assetURL, NSError *error) {
if (error)
{
NSLog(#"Error saving to library%#", [error localizedDescription]);
} else
{
NSLog(#"SAVED %# to photo lib", path);
}
}];
}
//////////////////////////////////////////////////////////////
#pragma mark touch handling
//////////////////////////////////////////////////////////////
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
{
[super touchesEnded:touches withEvent:event];
if (self.isRecording)
{
[self finishRecording];
} else
{
[self startRecording];
}
}
//////////////////////////////////////////////////////////////
#pragma mark recording
//////////////////////////////////////////////////////////////
- (void)startRecording;
{
NSLog(#"started recording");
#warning debugging startrecording
// NSLog(#"bypassing usual write method");
// if (![assetWriter startWriting]){
// NSLog(#"writer not started %#, %d", assetWriter.error, assetWriter.status);
// }
self.startTime = CFAbsoluteTimeGetCurrent();
[self createWriter];
[self.assetWriter startWriting];
[self.assetWriter startSessionAtSourceTime:kCMTimeZero];
NSAssert([self.assetWriterPixelBufferInput pixelBufferPool], #"writerpixelbuffer input has no pools");
if (!_writerContext)
{
_writerContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
if (!_writerContext || ![EAGLContext setCurrentContext:_writerContext])
{
NSLog(#"Problem with OpenGL context.");
return;
}
}
[EAGLContext setCurrentContext:_writerContext];
NSLog(#"Creating FBO");
[self createDataFBOUsingGPUImagesMethod];
// [self createDataFBO];
self.isRecording = YES;
NSLog(#"Recording is started");
self.recordingTimer = [NSTimer scheduledTimerWithTimeInterval:1 / 30
target:self
selector:#selector(tick:)
userInfo:nil repeats:YES];
}
- (void)tick:(id)tick
{
[self drawBasicGLTOFBOForWriting];
}
- (void)finishRecording;
{
[self.recordingTimer invalidate];
self.recordingTimer = nil;
NSLog(#"finished recording");
if (self.assetWriter.status == AVAssetWriterStatusCompleted || !self.isRecording)
{
NSLog(#"already completed ingnoring");
return;
}
NSLog(#"Asset writer writing");
self.isRecording = NO;
// runOnMainQueueWithoutDeadlocking(^{
NSLog(#"markng inputs as finished");
//TODO - these cause an error
[self.assetWriterVideoInput markAsFinished];
__weak TestViewController *blockSelf = self;
[self.assetWriter finishWritingWithCompletionHandler:^{
if (self.assetWriter.error == nil)
{
NSLog(#"saved ok - writing to lib");
[self writeMovieToLibraryWithPath:[NSURL fileURLWithPath:[self tempFilePath]]];
} else
{
NSLog(#" did not save due to error %#", self.assetWriter.error);
}
}];
// });
}
- (void)drawBasicGLTOFBOForWriting
{
if (!self.isRecording)
{
return;
}
[EAGLContext setCurrentContext:_writerContext];
if (!self.FBOGLEffect)
{
self.FBOGLEffect = [self createBasicDrawingEffectInCurrentContext];
}
glDisable(GL_DEPTH_TEST);
glBindFramebuffer(GL_FRAMEBUFFER, _writerRenderFrameBuffer);
glClearColor(1, 1, 1, 1);
glClear(GL_COLOR_BUFFER_BIT);
[self.FBOGLEffect prepareToDraw];
// Clear Frame Buffer (erase previous drawing)
// Enable use of positions from bound vertex buffer
glEnableVertexAttribArray( // STEP 4
GLKVertexAttribPosition);
glVertexAttribPointer( // STEP 5
GLKVertexAttribPosition,
3, // three components per vertex
GL_FLOAT, // data is floating point
GL_FALSE, // no fixed point scaling
sizeof(SceneVertex), // no gaps in data
NULL); // NULL tells GPU to start at
// beginning of bound buffer
// Draw triangles using the first three vertices in the
// currently bound vertex buffer
glDrawArrays(GL_TRIANGLES, // STEP 6
0, // Start with first vertex in currently bound buffer
3); // Use three vertices from currently bound buffer
glFlush();
CFAbsoluteTime interval = (CFAbsoluteTimeGetCurrent() - self.startTime) * 1000;
CMTime currentTime = CMTimeMake((int) interval, 1000);
[self writeToFileWithTime:currentTime];
}
- (void)writeToFileWithTime:(CMTime)time
{
if (!self.assetWriterVideoInput.readyForMoreMediaData)
{
NSLog(#"Had to drop a video frame");
return;
}
if (kCVReturnSuccess == CVPixelBufferLockBaseAddress(_writerPixelBuffer,
kCVPixelBufferLock_ReadOnly))
{
uint8_t *pixels = (uint8_t *) CVPixelBufferGetBaseAddress(_writerPixelBuffer);
// process pixels how you like!
BOOL success = [self.assetWriterPixelBufferInput appendPixelBuffer:_writerPixelBuffer
withPresentationTime:time];
NSLog(#"wrote at %# : %#", CMTimeCopyDescription(NULL, time), success ? #"YES" : #"NO");
CVPixelBufferUnlockBaseAddress(_writerPixelBuffer, kCVPixelBufferLock_ReadOnly);
}
}
//////////////////////////////////////////////////////////////
#pragma mark FBO setup
//////////////////////////////////////////////////////////////
- (void)createDataFBOUsingGPUImagesMethod;
{
glActiveTexture(GL_TEXTURE1);
glGenFramebuffers(1, &_writerRenderFrameBuffer);
glBindFramebuffer(GL_FRAMEBUFFER, _writerRenderFrameBuffer);
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _writerContext, NULL, &_writerTextureCache);
if (err)
{
NSAssert(NO, #"Error at CVOpenGLESTextureCacheCreate %d", err);
}
// Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/
CVPixelBufferPoolCreatePixelBuffer(NULL, [self.assetWriterPixelBufferInput pixelBufferPool], &_writerPixelBuffer);
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _writerTextureCache, _writerPixelBuffer,
NULL, // texture attributes
GL_TEXTURE_2D,
GL_RGBA, // opengl format
480,
320,
GL_BGRA, // native iOS format
GL_UNSIGNED_BYTE,
0,
&_writerTexture);
if (err)
{
NSAssert(NO, #"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
glBindTexture(CVOpenGLESTextureGetTarget(_writerTexture), CVOpenGLESTextureGetName(_writerTexture));
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(_writerTexture), 0);
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
NSAssert(status == GL_FRAMEBUFFER_COMPLETE, #"Incomplete filter FBO: %d", status);
}
#end
Four possibilities jump to mind:
Your Viewport isn't the right size/shape/in the right place. Try calling glViewport somewhere before drawing anything.
Your shader is broken. I see you don't have any kind of shader setup, so you might need to add a basic passthrough vertex and fragment shader pair that just multiplies position by perspective and modelview matrix, and draws using vertex color, or a fixed color.
Your Projection matrix isn't good. Try using a basic orthographic matrix at first.
Your Modelview matrix isn't good. If you can animate something, try starting with the identity matrix and then slowly rotating it through first the X axis then the Y axis.
Make sure _writerpixelbuffer is not NULL.