What I Did:-
I have tried to enable stabilization and HDR but it's not working.I think I was in right path.When I was trying to check that the current device supports stabilization and HDR in that both case I had got only false case in all devices.
Please guide me if any mistakes had done in the below code snippet.
Thanks in advance!!
My Code Snippet:-
func createAVSession() throws -> AVCaptureSession {
AppLog.LogFunction(object: LOG_Start)
// Start out with low quality
let session = AVCaptureSession()
session.sessionPreset = AVCaptureSessionPresetPhoto
// Input from video camera
let device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
let currentFormat = device?.activeFormat.isVideoHDRSupported
try device?.lockForConfiguration()
if device?.activeFormat.isVideoHDRSupported == true {
device?.automaticallyAdjustsVideoHDREnabled = false
device?.isVideoHDREnabled = true
print("device?.isVideoHDREnabled\(device?.isVideoHDREnabled)")
}
if (device?.isFocusModeSupported(.continuousAutoFocus))! {
device?.focusMode = AVCaptureFocusMode.continuousAutoFocus
print("device?.focusMode\(device?.focusMode.rawValue)")
}
if (device?.isSmoothAutoFocusSupported)! {
device?.isSmoothAutoFocusEnabled = true
print("device?.isSmoothAutoFocusEnabled\(device?.isSmoothAutoFocusEnabled)")
}
if (device?.isExposureModeSupported(.continuousAutoExposure))! {
device?.exposureMode = .continuousAutoExposure
print("device?.exposureMode\(device?.exposureMode.rawValue)")
}
device?.unlockForConfiguration()
let input = try AVCaptureDeviceInput(device: device)
do {
try input.device.lockForConfiguration()
input.device.activeVideoMaxFrameDuration = CMTimeMake(1, 30)
input.device.activeVideoMinFrameDuration = CMTimeMake(1, 30)
input.device.unlockForConfiguration()
}
catch {
print("Failed to set FPS")
}
// Output
let videoOutput = AVCaptureVideoDataOutput()
videoOutput.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as AnyHashable: kCVPixelFormatType_32BGRA]
videoOutput.alwaysDiscardsLateVideoFrames = true
videoOutput.setSampleBufferDelegate(self, queue: sessionQueue)
let stillImageOutput: AVCaptureStillImageOutput = AVCaptureStillImageOutput()
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
//stillImageOutput.isHighResolutionStillImageOutputEnabled = true
if stillImageOutput.isStillImageStabilizationSupported {
stillImageOutput.automaticallyEnablesStillImageStabilizationWhenAvailable = true
print("stillImageOutput.isStillImageStabilizationActive\(stillImageOutput.isStillImageStabilizationActive)")
}
// Join it all together
session.addInput(input)
session.addOutput(videoOutput)
if session.canAddOutput(stillImageOutput) {
session.addOutput(stillImageOutput)
self.stillImageOutput = stillImageOutput
}
if let connection = videoOutput.connection(withMediaType: AVMediaTypeVideo) {
if connection.isVideoOrientationSupported {
connection.videoOrientation = .portrait
}
if connection.isVideoStabilizationSupported {
connection.preferredVideoStabilizationMode = .standard
print("connection.activeVideoStabilizationMode\(connection.activeVideoStabilizationMode.rawValue)")
}
}
AppLog.LogFunction(object: LOG_End)
return session
}
What worked for me on the stabilization issue was to test for it in the delegate. In my project, I use the AVCaptureVideoDataOutputSampleBufferDelegate to write to my file as I test for certain things in the pixel buffer before I decide to write. It was the one place I found where it would say stabilization is allowed. Anyway, here is how I did it for the stabilization issue. Hope it helps.
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!){
self.lockQueue.sync {
if !self.isCapturing || self.isPaused {
return
}
let isVideo = captureOutput is AVCaptureVideoDataOutput
if isVideo && self.videoWriter == nil {
// testing to make sure dealing with video and not audio
let connection = captureOutput.connection(withMediaType: AVMediaTypeVideo)
if (connection?.isVideoStabilizationSupported)! {
connection?.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.cinematic
}
//other work excluded as irrelevant
}
}
}
Related
I have created a video file from an iPhone's camera/microphone using AVAssetWriter, AVAssetWriterInput, etc. and when I play the resulting video in iMovie, QuickTime player, and Davinci Resolve, there appears to be no audio. iMovie's audio waveform is empty, although the QuickTime inspector shows an audio track present.
However, I opened the same video file using Ocenaudio and it reads and plays the audio completely fine. So I know the audio is somewhere in the fie.
I used widely available code for using AVAssetWriter, such as:
previewVideoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: videoOutputSettings)
previewVideoWriterInput.expectsMediaDataInRealTime = true
previewAudioWriterInput = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: audioOutputSettings)
previewAudioWriterInput.expectsMediaDataInRealTime = true
if previewVideoWriter.canAdd(previewVideoWriterInput) == true { previewVideoWriter.add(previewVideoWriterInput) }
if previewVideoWriter.canAdd(previewAudioWriterInput) == true { previewVideoWriter.add(previewAudioWriterInput) }
The main difference is that I'm using a CIFilter on the video data in the captureOutput() function. Here's a simplified version of my captureOutout() code:
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
recordingQueue.async {
autoreleasepool {
if output == self.previewVideoOutput {
let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
let cameraImage = CIImage(cvPixelBuffer: pixelBuffer!)
if self.currentFilter != nil { self.currentFilter.setValue(cameraImage, forKey: kCIInputImageKey) }
let filteredImage = UIImage(ciImage: self.currentFilter != nil ? self.currentFilter.value(forKey: kCIOutputImageKey) as! CIImage : cameraImage)
let timeStampie = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
self.previewProgressTimeStamp = timeStampie
if self.previewFirstFrameTimeStamp == CMTime.zero {
self.previewFirstFrameTimeStamp = self.previewProgressTimeStamp
}
self.previewProgressTimeStamp = CMTimeSubtract(self.previewProgressTimeStamp, self.previewFirstFrameTimeStamp)
if self.previewVideoWriterInput.isReadyForMoreMediaData {
let appendSuccess = self.previewVideoAdapter.append(self.pixelBufferFromImage(filteredImage)!, withPresentationTime: self.previewProgressTimeStamp)
}
}
else if output == self.previewAudioOutput {
if self.previewAudioWriterInput.isReadyForMoreMediaData {
let appendSuccess = self.previewAudioWriterInput.append(sampleBuffer)
}
}
}
}
}
This is the code I use when it's time to finish recording:
if previewVideoWriterInput != nil {
previewVideoWriterInput.markAsFinished()
previewAudioWriterInput.markAsFinished()
}
if previewVideoWriter != nil {
previewVideoWriter.endSession(atSourceTime: previewProgressTimeStamp)
previewVideoWriter.finishWriting { () -> Void in
self.recordingCompletedHandler?()
}
}
Any ideas why the audio being written to the video file is not being seen by video players? And how would I fix that?
Edit:
Here's the code I'm using to setup the AVCapture outputs as requested.
captureSession = AVCaptureSession()
captureSession.beginConfiguration()
captureSession.sessionPreset = .vga640x480
captureSession.usesApplicationAudioSession = true
if frontCameraInput != nil && captureSession.canAddInput(frontCameraInput) == true { captureSession.addInput(frontCameraInput) }
if let audioCaptureDeviceInput = AVCaptureDevice.default(for: .audio) { audioCaptureDevice = audioCaptureDeviceInput }
if audioCaptureDevice != nil {
do {
audioCaptureDeviceInput = try AVCaptureDeviceInput(device: audioCaptureDevice)
} catch let error {
print("\(error.localizedDescription)")
}
}
if audioCaptureDeviceInput != nil && captureSession.canAddInput(audioCaptureDeviceInput) == true { captureSession.addInput(audioCaptureDeviceInput) }
previewVideoOutput.setSampleBufferDelegate(self, queue: recordingQueue)
previewAudioOutput.setSampleBufferDelegate(self, queue: recordingQueue)
if captureSession.canAddOutput(previewVideoOutput) { captureSession.addOutput(previewVideoOutput) }
if captureSession.canAddOutput(previewAudioOutput) { captureSession.addOutput(previewAudioOutput) }
captureSession.commitConfiguration()
I'm currently developing an App which records a video and then shows you the video to check if the video was good enough. However when displaying the recorded video, it shows it in the wrong orientation..
So basically I'm recording it in LandscapeRight modus. But when displaying it displays it in portrait mode, and also apparently records it in display mode as well. Even when I set it AVCaptureVideoOrientation.LandscapeRight.
Here is the code I'm using to setup the recording:
func setupAVCapture(){
session = AVCaptureSession()
session.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices()
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the front camera
if(device.position == AVCaptureDevicePosition.Front) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
beginSession()
break
}
}
}
}
}
func beginSession(){
var err : NSError? = nil
var deviceInput:AVCaptureDeviceInput?
do {
deviceInput = try AVCaptureDeviceInput(device: captureDevice)
} catch let error as NSError {
err = error
deviceInput = nil
};
if err != nil {
print("error: \(err?.localizedDescription)")
}
if self.session.canAddInput(deviceInput){
self.session.addInput(deviceInput)
}
self.videoDataOutput = AVCaptureVideoDataOutput()
self.videoDataOutput.alwaysDiscardsLateVideoFrames=true
self.videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL)
self.videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
if session.canAddOutput(self.videoDataOutput){
session.addOutput(self.videoDataOutput)
}
self.videoDataOutput.connectionWithMediaType(AVMediaTypeVideo).enabled = true
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
self.previewLayer.frame = self.view.bounds
self.previewLayer.masksToBounds = true
self.previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
let rootLayer :CALayer = CameraPreview.layer
rootLayer.masksToBounds=true
rootLayer.addSublayer(self.previewLayer)
session.startRunning()
}
After this the next delegate gets called and I display it in the same view, but for a playback:
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
playbackAvailable = true
SaveButton.hidden = false
recording = false
stopCamera()
// let library = PHPhotoLibrary.sharedPhotoLibrary()
// library.performChanges({ PHAssetChangeRequest.creationRequestForAssetFromVideoAtFileURL(outputFileURL)! }, completionHandler: {success, error in debugPrint("Finished saving asset. %#", (success ? "Success." : error!)) })
// Play Video
player = AVPlayer(URL: outputFileURL)
playerLayer = AVPlayerLayer(player: player)
playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
playerLayer.masksToBounds = true
playerLayer.frame = self.view.bounds
CameraPreview.layer.addSublayer(playerLayer)
player.play()
}
Now the playback displays it in the wrong orientation, anyone knows how to fix this? I've also got the orientation of the viewcontroller to LandscapeRight.
Solution:
// Neccesary to record in the correct orientation
// ** MUST BE IMPLEMENTED AFTER SETING UP THE MOVIEFILE **
var videoConnection: AVCaptureConnection? = nil
if let connections = videoFileOutput.connections{
for x in connections {
if let connection = x as? AVCaptureConnection{
for port in connection.inputPorts{
if(port.mediaType == AVMediaTypeVideo){
videoConnection = connection
}
}
}
}
}
if(videoConnection != nil){
if let vidConnect: AVCaptureConnection = videoConnection!{
if(vidConnect.supportsVideoOrientation){
vidConnect.videoOrientation = AVCaptureVideoOrientation(rawValue: UIApplication.sharedApplication().statusBarOrientation.rawValue)!
}
}
}
Basically you need to set the AVCaptureVideoOrientation of the connection to the correct orientation before you start recording. Or else it might record in the wrong orientation.
I configured the rear camera to be 120 fps. However, when I checked the sample output with captureOutput() by printing the time such function is called (see below), the difference is roughly 33ms (30fps). No matter what fps I set with activeVideoMinFrameDuration and activeVideoMaxFrameDuration, the resulting fps observed in captureOutput() is always 30 fps.
I've tested this on a iPhone 6 which can handle slow-motion video. I've read the Apple official doc at https://developer.apple.com/library/ios/documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/04_MediaCapture.html. Any clue?
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate
{
var captureDevice: AVCaptureDevice?
let captureSession = AVCaptureSession()
let videoCaptureOutput = AVCaptureVideoDataOutput()
var startTime = NSDate.timeIntervalSinceReferenceDate()
// press button to start the video session
#IBAction func startPressed() {
if captureSession.inputs.count > 0 && captureSession.outputs.count > 0 {
startTime = NSDate.timeIntervalSinceReferenceDate()
captureSession.startRunning()
}
}
override func viewDidLoad() {
super.viewDidLoad()
// set capture session resolution
captureSession.sessionPreset = AVCaptureSessionPresetLow
let devices = AVCaptureDevice.devices()
var avFormat: AVCaptureDeviceFormat? = nil
for device in devices {
if (device.hasMediaType(AVMediaTypeVideo)) {
if (device.position == AVCaptureDevicePosition.Back) {
for vFormat in device.formats {
let ranges = vFormat.videoSupportedFrameRateRanges as! [AVFrameRateRange]
let filtered: Array<Double> = ranges.map({ $0.maxFrameRate } ).filter( {$0 >= 119.0} )
if !filtered.isEmpty {
// found a good device with good format!
captureDevice = device as? AVCaptureDevice
avFormat = vFormat as? AVCaptureDeviceFormat
}
}
}
}
}
// use the found capture device and format to set things up
if let dv = captureDevice {
// configure
do {
try dv.lockForConfiguration()
} catch _ {
print("failed locking device")
}
dv.activeFormat = avFormat
dv.activeVideoMinFrameDuration = CMTimeMake(1, 120)
dv.activeVideoMaxFrameDuration = CMTimeMake(1, 120)
dv.unlockForConfiguration()
// input -> session
do {
let input = try AVCaptureDeviceInput(device: dv)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
}
} catch _ {
print("failed adding capture device as input to capture session")
}
}
// output -> session
let videoQueue = dispatch_queue_create("videoQueue", DISPATCH_QUEUE_SERIAL)
videoCaptureOutput.setSampleBufferDelegate(self, queue: videoQueue)
videoCaptureOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey: Int(kCVPixelFormatType_32BGRA)]
videoCaptureOutput.alwaysDiscardsLateVideoFrames = true
if captureSession.canAddOutput(videoCaptureOutput) {
captureSession.addOutput(videoCaptureOutput)
}
}
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!)
{
print( "\(NSDate.timeIntervalSinceReferenceDate() - startTime)" )
// More pixel/frame processing here
}
}
Answer found. Swapping orders of the two blocks "configure" and "input -> session".
I'm trying to build an app which will capture frames from the camera and process them with OpenCV before saving those files to the device, but at a specific frame rate.
What I'm stuck on at the moment is the fact that AVCaptureVideoDataOutputSampleBufferDelegate doesn't appear to respect the AVCaptureDevice.activeVideoMinFrameDuration, or AVCaptureDevice.activeVideoMaxFrameDuration settings.
captureOutput runs far quicker than 2 frames per second as the above settings would indicate.
Do you happen to know how one could achieve this, with or without the delegate?
ViewController:
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewDidAppear(animated: Bool) {
setupCaptureSession()
}
func setupCaptureSession() {
let session : AVCaptureSession = AVCaptureSession()
session.sessionPreset = AVCaptureSessionPreset1280x720
let videoDevices : [AVCaptureDevice] = AVCaptureDevice.devices() as! [AVCaptureDevice]
for device in videoDevices {
if device.position == AVCaptureDevicePosition.Back {
let captureDevice : AVCaptureDevice = device
do {
try captureDevice.lockForConfiguration()
captureDevice.activeVideoMinFrameDuration = CMTimeMake(1, 2)
captureDevice.activeVideoMaxFrameDuration = CMTimeMake(1, 2)
captureDevice.unlockForConfiguration()
let input : AVCaptureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
if session.canAddInput(input) {
try session.addInput(input)
}
let output : AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()
let dispatch_queue : dispatch_queue_t = dispatch_queue_create("streamoutput", nil)
output.setSampleBufferDelegate(self, queue: dispatch_queue)
session.addOutput(output)
session.startRunning()
let previewLayer = AVCaptureVideoPreviewLayer(session: session)
previewLayer.connection.videoOrientation = .LandscapeRight
let previewBounds : CGRect = CGRectMake(0,0,self.view.frame.width/2,self.view.frame.height+20)
previewLayer.backgroundColor = UIColor.blackColor().CGColor
previewLayer.frame = previewBounds
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
self.imageView.layer.addSublayer(previewLayer)
self.previewMat.frame = CGRectMake(previewBounds.width, 0, previewBounds.width, previewBounds.height)
} catch _ {
}
break
}
}
}
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
self.wrapper.processBuffer(self.getUiImageFromBuffer(sampleBuffer), self.previewMat)
}
So I've figured out the problem.
In the comments section for AVCaptureDevice.h above the activeVideoMinFrameDuration property it states:
On iOS, the receiver's activeVideoMinFrameDuration resets to its
default value under the following conditions:
The receiver's activeFormat changes
The receiver's AVCaptureDeviceInput's session's sessionPreset changes
The receiver's AVCaptureDeviceInput is added to a session
The last bullet point was causing my problem, so doing the following solved the problem for me:
do {
let input : AVCaptureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
if session.canAddInput(input) {
try session.addInput(input)
}
try captureDevice.lockForConfiguration()
captureDevice.activeVideoMinFrameDuration = CMTimeMake(value: 1, timescale: 2)
captureDevice.activeVideoMaxFrameDuration = CMTimeMake(value: 1, timescale: 2)
captureDevice.unlockForConfiguration()
let output : AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()
let dispatch_queue : dispatch_queue_t = dispatch_queue_create("streamoutput", nil)
output.setSampleBufferDelegate(self, queue: dispatch_queue)
session.addOutput(output)
In an app I'm developing the user is required to take a "selfie" (Yes, I know, but the app is for private use only).
I've got everything working with the camera showing in the circular UIView region, however I cannot get it to scale and fill the circle properly. Here's what it's doing now:
And here's what I want it to be doing:
Here's the code for my UIView:
var cameraView = UIView()
cameraView.frame = CGRectMake(100, self.view.center.y-260, 568, 568)
cameraView.backgroundColor = UIColor(red:26/255, green:188/255, blue:156/255, alpha:1)
cameraView.layer.cornerRadius = 284
cameraView.layer.borderColor = UIColor.whiteColor().CGColor
cameraView.layer.borderWidth = 15
cameraView.contentMode = UIViewContentMode.ScaleToFill
cameraView.layer.masksToBounds = true
I have tried a few different contentMode options, include ScaleToFill, ScaleAspectFill, and ScaleAspectFit. They all generate the same exact result.
As it turns out, the camera's "self.previewLayer" has a property that determines how the camera's content fills a View.
In the following code I changed "self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspect" to "self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill"
extension SelfieViewController: AVCaptureVideoDataOutputSampleBufferDelegate{
func setupAVCapture(){
session.sessionPreset = AVCaptureSessionPreset640x480
let devices = AVCaptureDevice.devices();
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the front camera
if(device.position == AVCaptureDevicePosition.Front) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
beginSession()
break
}
}
}
}
}
func beginSession(){
var err : NSError? = nil
var deviceInput:AVCaptureDeviceInput = AVCaptureDeviceInput(device: captureDevice, error: &err)
if err != nil {
println("error: \(err?.localizedDescription)")
}
if self.session.canAddInput(deviceInput){
self.session.addInput(deviceInput)
}
self.videoDataOutput = AVCaptureVideoDataOutput()
var rgbOutputSettings = [NSNumber(integer: kCMPixelFormat_32BGRA):kCVPixelBufferPixelFormatTypeKey]
self.videoDataOutput.alwaysDiscardsLateVideoFrames=true
self.videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL)
self.videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
if session.canAddOutput(self.videoDataOutput){
session.addOutput(self.videoDataOutput)
}
self.videoDataOutput.connectionWithMediaType(AVMediaTypeVideo).enabled = true
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
var rootLayer :CALayer = self.cameraView.layer
rootLayer.masksToBounds=true
self.previewLayer.frame = rootLayer.bounds
rootLayer.addSublayer(self.previewLayer)
session.startRunning()
}
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
// do stuff here
}
// clean up AVCapture
func stopCamera(){
session.stopRunning()
}
}