I am using AVFoundation to capture a video and present it on iPhone screen.
I want to know every time a video frame is dropped and get its time,
I read that this delegate method is exactly what I want:
-(void)captureOutput:(AVCaptureOutput*)captureOutput didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection;
This is my code setting the delegate:
#interface PRVideoRecorderManger ()<AVCaptureFileOutputRecordingDelegate, AVCaptureVideoDataOutputSampleBufferDelegate>
AVCaptureVideoDataOutput *videoCaptureOutput = [[AVCaptureVideoDataOutput alloc] init];
dispatch_queue_t queue = dispatch_queue_create("myQueue", DISPATCH_QUEUE_SERIAL);
[videoCaptureOutput setAlwaysDiscardsLateVideoFrames:YES];
[videoCaptureOutput setSampleBufferDelegate:self queue:queue];
// Specify the pixel format
videoCaptureOutput.videoSettings =
[NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey]; //kCVPixelBufferPixelFormatTypeKey
// If you wish to cap the frame rate to a known value, such as 15 fps, set
// minFrameDuration.
videoCaptureOutput.minFrameDuration = CMTimeMake(1, videoFPS);
if([_captureSession canAddOutput:videoCaptureOutput])
{
[_captureSession addOutput:videoCaptureOutput];
}
else
{
NSLog(#"can't Add output");
}
_movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
[[_movieFileOutput connectionWithMediaType:AVMediaTypeVideo ] setVideoOrientation:orientation];
[_captureSession addOutput:_movieFileOutput];
[_movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
[_captureSession startRunning];
But captureOutPut:didDropSampleBuffer is NEVER called, why is that?
EDIT:
I have realised that if i comment AVCaptureMovieFileOutput it does call
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
Is there a way to receive output sample buffer AND finish recording to output file at URL
because right now it only let me recieve one.
Related
I tried many other blogs and stack overflow. I didn't get solution for this, I can able to create custom camera with preview. I need video with custom frame, that's why I am using AVAssetWriter. But i unable to save recorded video into documents. I tried like this,
-(void) initilizeCameraConfigurations {
if(!captureSession) {
captureSession = [[AVCaptureSession alloc] init];
[captureSession beginConfiguration];
captureSession.sessionPreset = AVCaptureSessionPresetHigh;
self.view.backgroundColor = UIColor.blackColor;
CGRect bounds = self.view.bounds;
captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
captureVideoPreviewLayer.backgroundColor = [UIColor clearColor].CGColor;
captureVideoPreviewLayer.bounds = self.view.frame;
captureVideoPreviewLayer.connection.videoOrientation = AVCaptureVideoOrientationPortrait;
captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
captureVideoPreviewLayer.position = CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds));
[self.view.layer addSublayer:captureVideoPreviewLayer];
[self.view bringSubviewToFront:self.controlsBgView];
}
// Add input to session
NSError *err;
videoCaptureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoCaptureDevice error:&err];
if([captureSession canAddInput:videoCaptureDeviceInput]) {
[captureSession addInput:videoCaptureDeviceInput];
}
docPathUrl = [[NSURL alloc] initFileURLWithPath:[self getDocumentsUrl]];
assetWriter = [AVAssetWriter assetWriterWithURL:docPathUrl fileType:AVFileTypeQuickTimeMovie error:&err];
NSParameterAssert(assetWriter);
//assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, 1000);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:300], AVVideoWidthKey,
[NSNumber numberWithInt:300], AVVideoHeightKey,
nil];
writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
writerInput.expectsMediaDataInRealTime = YES;
writerInput.transform = CGAffineTransformMakeRotation(M_PI);
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
[NSNumber numberWithInt:300], kCVPixelBufferWidthKey,
[NSNumber numberWithInt:300], kCVPixelBufferHeightKey,
nil];
assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
if([assetWriter canAddInput:writerInput]) {
[assetWriter addInput:writerInput];
}
// Set video stabilization mode to preview layer
AVCaptureVideoStabilizationMode stablilizationMode = AVCaptureVideoStabilizationModeCinematic;
if([videoCaptureDevice.activeFormat isVideoStabilizationModeSupported:stablilizationMode]) {
[captureVideoPreviewLayer.connection setPreferredVideoStabilizationMode:stablilizationMode];
}
// image output
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[captureSession addOutput:stillImageOutput];
[captureSession commitConfiguration];
if (![captureVideoPreviewLayer.connection isEnabled]) {
[captureVideoPreviewLayer.connection setEnabled:YES];
}
[captureSession startRunning];
}
-(IBAction)startStopVideoRecording:(id)sender {
if(captureSession) {
if(isVideoRecording) {
[writerInput markAsFinished];
[assetWriter finishWritingWithCompletionHandler:^{
NSLog(#"Finished writing...checking completion status...");
if (assetWriter.status != AVAssetWriterStatusFailed && assetWriter.status == AVAssetWriterStatusCompleted)
{
// Video saved
} else
{
NSLog(#"#123 Video writing failed: %#", assetWriter.error);
}
}];
} else {
[assetWriter startWriting];
[assetWriter startSessionAtSourceTime:kCMTimeZero];
isVideoRecording = YES;
}
}
}
-(NSString *) getDocumentsUrl {
NSString *docPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject];
docPath = [[docPath stringByAppendingPathComponent:#"Movie"] stringByAppendingString:#".mov"];
if([[NSFileManager defaultManager] fileExistsAtPath:docPath]) {
NSError *err;
[[NSFileManager defaultManager] removeItemAtPath:docPath error:&err];
}
NSLog(#"Movie path : %#",docPath);
return docPath;
}
#end
Correct me if anything wrong. Thank you in advance.
You don't say what actually goes wrong, but two things look wrong with your code:
docPath = [[docPath stringByAppendingPathComponent:#"Movie"] stringByAppendingString:#".mov"];
looks like it creates an undesired path like this #"/path/Movie/.mov", when you want this:
docPath = [docPath stringByAppendingPathComponent:#"Movie.mov"];
And your timeline is wrong. Your asset writer starts at time 0, but the sampleBuffers start at CMSampleBufferGetPresentationTimestamp(sampleBuffer) > 0, so instead do this:
-(void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
if(firstSampleBuffer) {
[assetWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimestamp(sampleBuffer)];
}
[writerInput appendSampleBuffer:sampleBuffer];
}
Conceptually, you have to main functional areas: One that generates video frames – this the AVCaptureSession, and everything that is attached to it –, and another that writes these frames to a file – in your case the AVAssetWriter with attached inputs.
The problem with your code is: There is no connection between these two. No video frames / images coming out of the capture session are passed to the asset writer inputs.
Furthermore, the AVCaptureStillImageOutput method -captureStillImageAsynchronouslyFromConnection:completionHandler: is nowhere called, so the capture session actually produces no frames.
So, as a minimum, implement something like this:
-(IBAction)captureStillImageAndAppend:(id)sender
{
[stillImageOutput captureStillImageAsynchronouslyFromConnection:stillImageOutput.connections.firstObject completionHandler:
^(CMSampleBufferRef imageDataSampleBuffer, NSError* error)
{
// check error, omitted here
if (CMTIME_IS_INVALID( startTime)) // startTime is an ivar
[assetWriter startSessionAtSourceTime:(startTime = CMSampleBufferGetPresentationTimeStamp( imageDataSampleBuffer))];
[writerInput appendSampleBuffer:imageDataSampleBuffer];
}];
}
Remove the AVAssetWriterInputPixelBufferAdaptor, it's not used.
But there are issues with AVCaptureStillImageOutput:
it's only intended to produce still images, not videos
it must be configured to produce uncompressed sample buffers if the asset writer input is configured to compress the appended sample buffers (stillImageOutput.outputSettings = #{ (NSString*)kCVPixelBufferPixelFormatTypeKey: #(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)};)
it's deprecated under iOS
If you actually want to produce a video, as opposed to a sequence of still images, instead of the AVCaptureStillImageOutput add a AVCaptureVideoDataOutput to the capture session. It needs a delegate and a serial dispatch queue to output the sample buffers. The delegate has to implement something like this:
-(void)captureOutput:(AVCaptureOutput*)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection
{
if (CMTIME_IS_INVALID( startTime)) // startTime is an ivar
[assetWriter startSessionAtSourceTime:(startTime = CMSampleBufferGetPresentationTimeStamp( sampleBuffer))];
[writerInput appendSampleBuffer:sampleBuffer];
}
Note that
you will want to make sure that the AVCaptureVideoDataOutput only outputs frames when you're actually recording; add/remove it from the capture session or enable/disable its connection in the startStopVideoRecording action
reset the startTime to kCMTimeInvalid before starting another recording
I am new to Objective-C and iOS technology.I want to record the video through code and during run time, I have to get each frame as raw data for some processing.How can I achieve this? Please any one help me. Thanks in Advance. Here is my code so far:
- (void)viewDidLoad
{
[super viewDidLoad];
[self setupCaptureSession];
}
The viewDidAppear function
-(void)viewDidAppear:(BOOL)animated
{
if (!_bpickeropen)
{
_bpickeropen = true;
_picker = [[UIImagePickerController alloc] init];
_picker.delegate = self;
NSArray *sourceTypes = [UIImagePickerController availableMediaTypesForSourceType:picker.sourceType];
if (![sourceTypes containsObject:(NSString *)kUTTypeMovie ])
{
NSLog(#"device not supported");
return;
}
_picker.sourceType = UIImagePickerControllerSourceTypeCamera;
_picker.mediaTypes = [NSArray arrayWithObjects:(NSString *)kUTTypeMovie,nil];//,(NSString *) kUTTypeImage
_picker.videoQuality = UIImagePickerControllerQualityTypeHigh;
[self presentModalViewController:_picker animated:YES];
}
}
// Delegate routine that is called when a sample buffer was written
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(cameraFrame, 0);
GLubyte *rawImageBytes = CVPixelBufferGetBaseAddress(cameraFrame);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(cameraFrame);
**NSData *dataForRawBytes = [NSData dataWithBytes:rawImageBytes length:bytesPerRow * CVPixelBufferGetHeight(cameraFrame)];
**
PROBLEMS
1.(Here i am getting the raw bytes only once)
2.(After that i want to store this raw bytes as binary file in app path).
// Do whatever with your bytes
NSLog(#"bytes per row %zd",bytesPerRow);
[dataForRawBytes writeToFile:[self datafilepath]atomically:YES];
NSLog(#"Sample Buffer Data is %#\n",dataForRawBytes);
CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
}
here i am setting the delegate of output// Create and configure a capture session and start it running
- (void)setupCaptureSession
{
NSError *error = nil;
// Create the session
AVCaptureSession *session = [[AVCaptureSession alloc] init];
// Configure the session to produce lower resolution video frames, if your
// processing algorithm can cope. We'll specify medium quality for the
// chosen device.
session.sessionPreset = AVCaptureSessionPresetMedium;
// Find a suitable AVCaptureDevice
AVCaptureDevice *device = [AVCaptureDevice
defaultDeviceWithMediaType:AVMediaTypeVideo];
// Create a device input with the device and add it to the session.
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device
error:&error];
if (!input)
{
// Handling the error appropriately.
}
[session addInput:input];
// Create a VideoDataOutput and add it to the session
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
[session addOutput:output];
// Configure your output.
dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
[output setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
// Specify the pixel format
output.videoSettings =
[NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey]; //kCVPixelBufferPixelFormatTypeKey
// If you wish to cap the frame rate to a known value, such as 15 fps, set
// minFrameDuration.
// output.minFrameDuration = CMTimeMake(1, 15);
// Start the session running to start the flow of data
[session startRunning];
// Assign session to an ivar.
//[self setSession:session];
}
I appreciate any help.Thanks in advance.
You could look into the AVFoundation framework. It allows you access to the raw data generated from the camera.
This link is a good intro-level project to the AVFoundation video camera usage.
In order to get individual frames from the video output, you could use the AVCaptureVideoDataOutput class from the AVFoundation framework.
Hope this helps.
EDIT: You could look at the delegate functions of AVCaptureVideoDataOutputSampleBufferDelegate, in particular the captureOutput:didOutputSampleBuffer:fromConnection: method. This will be called every time a new frame is captured.
If you do not know how delegates work, this link is a good example of delegates.
I need to make recorder with ability to capture some frames.
Now all I tried is to set AVCaptureSession with 2 inputs(audio and video) and 2 outputs:
//configure output to writeVideo
self.movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
[self.session addOutput:self.movieFileOutput];
self.videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
[self.videoDataOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
// Configure output to catch frames.
dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
[self.videoDataOutput setSampleBufferDelegate:self queue:queue];
[self.session addOutput:self.videoDataOutput];
in interface file I conform class to protocols AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureFileOutputRecordingDelegate. Writing in file works perfect but method
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
is never called.
In case when I remove [self.session addOutput:self.movieFileOutput]; line so that there is only self.videoDataOutput in sessions outputs.
capturing imagies works perfect.
How can I configure videoInput in this two outputs.
Thanks for all advices you'll give=)
I'm not sure what I should put in the method
- (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection;
In order to write the frames to a video. Can anybody share with me the body of their code for this method, where the results is recording the frames to a movie?
I thought I had my assetWriter and videoInput setup correctly, but all I'm getting is a movie with 1 frame used repeatedly.
Check the Apple Sample Code Rosy Writer. Its a very good example of what you are looking for.
You can successfully recording a video and grabbing frames at the same time using this method:
AVCaptureSession *captureSession = [AVCaptureSession new];
AVCaptureDevice *captureDevice = [AVCaptureDevice new];
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput new];
AVCaptureVideoDataOutput *output = [AVCaptureVideoDataOutput new];
NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:640], AVVideoWidthKey, [NSNumber numberWithInt:480], AVVideoHeightKey, AVVideoCodecH264, AVVideoCodecKey, nil];
AVAssetWriterInput *assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
/ AVCaptureVideDataOutput /
AVAssetWriterInputPixelBufferAdaptor *pixelBufferAdaptor =
[[AVAssetWriterInputPixelBufferAdaptor alloc]
initWithAssetWriterInput:assetWriterInput
sourcePixelBufferAttributes:
[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA],
kCVPixelBufferPixelFormatTypeKey,
nil]];
/* Asset writer with MPEG4 format*/
AVAssetWriter *assetWriterMyData = [[AVAssetWriter alloc]
initWithURL:URLFromSomwhere
fileType:AVFileTypeMPEG4
error:you need to check error conditions,
this example is too lazy];
[assetWriterMyData addInput:assetWriterInput];
assetWriterInput.expectsMediaDataInRealTime = YES;
/ Start writing data /
[assetWriterMyData startWriting];
[assetWriterMyData startSessionAtSourceTime:kCMTimeZero];
[captureSession startRunning];
- (void) captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// a very dense way to keep track of the time at which this frame
// occurs relative to the output stream, but it's just an example!
static int64_t frameNumber = 0;
if(assetWriterInput.readyForMoreMediaData)
[pixelBufferAdaptor appendPixelBuffer:imageBuffer
withPresentationTime:CMTimeMake(frameNumber, 25)];
frameNumber++;
}
/* To stop recording, stop capture session and finish writing data*/
[captureSession stopRunning];
[assetWriterMyData finishWriting];
I am trying to throttle my video capture framerate for my application, as I have found that it is impacting VoiceOver performance.
At the moment, it captures frames from the video camera, and then processes the frames using OpenGL routines as quickly as possible. I would like to set a specific framerate in the capture process.
I was expecting to be able to do this by using videoMinFrameDuration or minFrameDuration, but this seems to make no difference to performance. Any ideas?
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices)
{
if ([device position] == AVCaptureDevicePositionBack)
{
backFacingCamera = device;
// SET SOME OTHER PROPERTIES
}
}
// Create the capture session
captureSession = [[AVCaptureSession alloc] init];
// Add the video input
NSError *error = nil;
videoInput = [[[AVCaptureDeviceInput alloc] initWithDevice:backFacingCamera error:&error] autorelease];
// Add the video frame output
videoOutput = [[AVCaptureVideoDataOutput alloc] init];
[videoOutput setAlwaysDiscardsLateVideoFrames:YES];
[videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
[videoOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
// Start capturing
if([backFacingCamera supportsAVCaptureSessionPreset:AVCaptureSessionPreset1920x1080])
{
[captureSession setSessionPreset:AVCaptureSessionPreset1920x1080];
captureDeviceWidth = 1920;
captureDeviceHeight = 1080;
#if defined(VA_DEBUG)
NSLog(#"Video AVCaptureSessionPreset1920x1080");
#endif
}
else do some fall back stuff
// If you wish to cap the frame rate to a known value, such as 15 fps, set
// minFrameDuration.
AVCaptureConnection *conn = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
if (conn.supportsVideoMinFrameDuration)
conn.videoMinFrameDuration = CMTimeMake(1,2);
else
videoOutput.minFrameDuration = CMTimeMake(1,2);
if ([captureSession canAddInput:videoInput])
[captureSession addInput:videoInput];
if ([captureSession canAddOutput:videoOutput])
[captureSession addOutput:videoOutput];
if (![captureSession isRunning])
[captureSession startRunning];
Any ideas? Am I missing something? Is this the best way to throttle?
AVCaptureConnection *conn = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
if (conn.supportsVideoMinFrameDuration)
conn.videoMinFrameDuration = CMTimeMake(1,2);
else
videoOutput.minFrameDuration = CMTimeMake(1,2);
Mike Ullrich's answer worked up until ios 7. These two methods are unfortunately deprecated in ios7. You have to set the activeVideo{Min|Max}FrameDuration on the AVCaptureDevice itself. Something like:
int fps = 30; // Change this value
AVCaptureDevice *device = ...; // Get the active capture device
[device lockForConfiguration:nil];
[device setActiveVideoMinFrameDuration:CMTimeMake(1, fps)];
[device setActiveVideoMaxFrameDuration:CMTimeMake(1, fps)];
[device unlockForConfiguration];
Turns out you need to set both videoMinFrameDuration and videoMaxFrameDuration for either one to work.
eg:
[conn setVideoMinFrameDuration:CMTimeMake(1,1)];
[conn setVideoMaxFrameDuration:CMTimeMake(1,1)];