In an app I'm developing the user is required to take a "selfie" (Yes, I know, but the app is for private use only).
I've got everything working with the camera showing in the circular UIView region, however I cannot get it to scale and fill the circle properly. Here's what it's doing now:
And here's what I want it to be doing:
Here's the code for my UIView:
var cameraView = UIView()
cameraView.frame = CGRectMake(100, self.view.center.y-260, 568, 568)
cameraView.backgroundColor = UIColor(red:26/255, green:188/255, blue:156/255, alpha:1)
cameraView.layer.cornerRadius = 284
cameraView.layer.borderColor = UIColor.whiteColor().CGColor
cameraView.layer.borderWidth = 15
cameraView.contentMode = UIViewContentMode.ScaleToFill
cameraView.layer.masksToBounds = true
I have tried a few different contentMode options, include ScaleToFill, ScaleAspectFill, and ScaleAspectFit. They all generate the same exact result.
As it turns out, the camera's "self.previewLayer" has a property that determines how the camera's content fills a View.
In the following code I changed "self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspect" to "self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill"
extension SelfieViewController: AVCaptureVideoDataOutputSampleBufferDelegate{
func setupAVCapture(){
session.sessionPreset = AVCaptureSessionPreset640x480
let devices = AVCaptureDevice.devices();
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the front camera
if(device.position == AVCaptureDevicePosition.Front) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
beginSession()
break
}
}
}
}
}
func beginSession(){
var err : NSError? = nil
var deviceInput:AVCaptureDeviceInput = AVCaptureDeviceInput(device: captureDevice, error: &err)
if err != nil {
println("error: \(err?.localizedDescription)")
}
if self.session.canAddInput(deviceInput){
self.session.addInput(deviceInput)
}
self.videoDataOutput = AVCaptureVideoDataOutput()
var rgbOutputSettings = [NSNumber(integer: kCMPixelFormat_32BGRA):kCVPixelBufferPixelFormatTypeKey]
self.videoDataOutput.alwaysDiscardsLateVideoFrames=true
self.videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL)
self.videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
if session.canAddOutput(self.videoDataOutput){
session.addOutput(self.videoDataOutput)
}
self.videoDataOutput.connectionWithMediaType(AVMediaTypeVideo).enabled = true
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
var rootLayer :CALayer = self.cameraView.layer
rootLayer.masksToBounds=true
self.previewLayer.frame = rootLayer.bounds
rootLayer.addSublayer(self.previewLayer)
session.startRunning()
}
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
// do stuff here
}
// clean up AVCapture
func stopCamera(){
session.stopRunning()
}
}
Related
What I Did:-
I have tried to enable stabilization and HDR but it's not working.I think I was in right path.When I was trying to check that the current device supports stabilization and HDR in that both case I had got only false case in all devices.
Please guide me if any mistakes had done in the below code snippet.
Thanks in advance!!
My Code Snippet:-
func createAVSession() throws -> AVCaptureSession {
AppLog.LogFunction(object: LOG_Start)
// Start out with low quality
let session = AVCaptureSession()
session.sessionPreset = AVCaptureSessionPresetPhoto
// Input from video camera
let device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
let currentFormat = device?.activeFormat.isVideoHDRSupported
try device?.lockForConfiguration()
if device?.activeFormat.isVideoHDRSupported == true {
device?.automaticallyAdjustsVideoHDREnabled = false
device?.isVideoHDREnabled = true
print("device?.isVideoHDREnabled\(device?.isVideoHDREnabled)")
}
if (device?.isFocusModeSupported(.continuousAutoFocus))! {
device?.focusMode = AVCaptureFocusMode.continuousAutoFocus
print("device?.focusMode\(device?.focusMode.rawValue)")
}
if (device?.isSmoothAutoFocusSupported)! {
device?.isSmoothAutoFocusEnabled = true
print("device?.isSmoothAutoFocusEnabled\(device?.isSmoothAutoFocusEnabled)")
}
if (device?.isExposureModeSupported(.continuousAutoExposure))! {
device?.exposureMode = .continuousAutoExposure
print("device?.exposureMode\(device?.exposureMode.rawValue)")
}
device?.unlockForConfiguration()
let input = try AVCaptureDeviceInput(device: device)
do {
try input.device.lockForConfiguration()
input.device.activeVideoMaxFrameDuration = CMTimeMake(1, 30)
input.device.activeVideoMinFrameDuration = CMTimeMake(1, 30)
input.device.unlockForConfiguration()
}
catch {
print("Failed to set FPS")
}
// Output
let videoOutput = AVCaptureVideoDataOutput()
videoOutput.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as AnyHashable: kCVPixelFormatType_32BGRA]
videoOutput.alwaysDiscardsLateVideoFrames = true
videoOutput.setSampleBufferDelegate(self, queue: sessionQueue)
let stillImageOutput: AVCaptureStillImageOutput = AVCaptureStillImageOutput()
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
//stillImageOutput.isHighResolutionStillImageOutputEnabled = true
if stillImageOutput.isStillImageStabilizationSupported {
stillImageOutput.automaticallyEnablesStillImageStabilizationWhenAvailable = true
print("stillImageOutput.isStillImageStabilizationActive\(stillImageOutput.isStillImageStabilizationActive)")
}
// Join it all together
session.addInput(input)
session.addOutput(videoOutput)
if session.canAddOutput(stillImageOutput) {
session.addOutput(stillImageOutput)
self.stillImageOutput = stillImageOutput
}
if let connection = videoOutput.connection(withMediaType: AVMediaTypeVideo) {
if connection.isVideoOrientationSupported {
connection.videoOrientation = .portrait
}
if connection.isVideoStabilizationSupported {
connection.preferredVideoStabilizationMode = .standard
print("connection.activeVideoStabilizationMode\(connection.activeVideoStabilizationMode.rawValue)")
}
}
AppLog.LogFunction(object: LOG_End)
return session
}
What worked for me on the stabilization issue was to test for it in the delegate. In my project, I use the AVCaptureVideoDataOutputSampleBufferDelegate to write to my file as I test for certain things in the pixel buffer before I decide to write. It was the one place I found where it would say stabilization is allowed. Anyway, here is how I did it for the stabilization issue. Hope it helps.
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!){
self.lockQueue.sync {
if !self.isCapturing || self.isPaused {
return
}
let isVideo = captureOutput is AVCaptureVideoDataOutput
if isVideo && self.videoWriter == nil {
// testing to make sure dealing with video and not audio
let connection = captureOutput.connection(withMediaType: AVMediaTypeVideo)
if (connection?.isVideoStabilizationSupported)! {
connection?.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.cinematic
}
//other work excluded as irrelevant
}
}
}
I am using swift3 and can't change my resolution to custom values when i use AVCaptureSessionPresetMedium etc. it doesn't fit the screen scale(1/1.77).
let output = AVCaptureVideoDataOutput()
output.setSampleBufferDelegate(self, queue: sampleQueue)
let metaOutput = AVCaptureMetadataOutput()
metaOutput.setMetadataObjectsDelegate(self, queue: faceQueue)
session.beginConfiguration()
// Desired resolution : 720x1280px
// session.sessionPreset = AVCaptureSessionPresetMedium;
if session.canAddInput(input) {
session.addInput(input)
}
if session.canAddOutput(output) {
output .alwaysDiscardsLateVideoFrames = true;
session.addOutput(output)
connection1 = output.connection(withMediaType: AVMediaTypeVideo)
connection1?.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.auto;
connection1?.videoOrientation = .portrait
connection1?.isVideoMirrored = true;
}
if session.canAddOutput(metaOutput) {
output .alwaysDiscardsLateVideoFrames = true;
session.addOutput(metaOutput)
connection2 = metaOutput.connection(withMediaType: AVMediaTypeMetadata)
connection2?.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.auto;
connection2?.videoOrientation = .portrait
connection2?.isVideoMirrored = true
}
You should use the AVCaptureSessionPreset1280x720 preset. The presets are denoted in landscape but the capture setting of 1280x720 is the same as 720x1280 the only difference is the orientation. For example with an app that supports rotation:
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!)
{
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
cameraImage = CIImage(cvPixelBuffer: pixelBuffer)
print(cameraImage?.extent ?? "")
}
Will print (0.0, 0.0, 1280.0, 720.0) when in landscape, and (0.0, 0.0, 720.0, 1280.0) when in portrait.
I'm currently developing an App which records a video and then shows you the video to check if the video was good enough. However when displaying the recorded video, it shows it in the wrong orientation..
So basically I'm recording it in LandscapeRight modus. But when displaying it displays it in portrait mode, and also apparently records it in display mode as well. Even when I set it AVCaptureVideoOrientation.LandscapeRight.
Here is the code I'm using to setup the recording:
func setupAVCapture(){
session = AVCaptureSession()
session.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices()
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the front camera
if(device.position == AVCaptureDevicePosition.Front) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
beginSession()
break
}
}
}
}
}
func beginSession(){
var err : NSError? = nil
var deviceInput:AVCaptureDeviceInput?
do {
deviceInput = try AVCaptureDeviceInput(device: captureDevice)
} catch let error as NSError {
err = error
deviceInput = nil
};
if err != nil {
print("error: \(err?.localizedDescription)")
}
if self.session.canAddInput(deviceInput){
self.session.addInput(deviceInput)
}
self.videoDataOutput = AVCaptureVideoDataOutput()
self.videoDataOutput.alwaysDiscardsLateVideoFrames=true
self.videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL)
self.videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
if session.canAddOutput(self.videoDataOutput){
session.addOutput(self.videoDataOutput)
}
self.videoDataOutput.connectionWithMediaType(AVMediaTypeVideo).enabled = true
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
self.previewLayer.frame = self.view.bounds
self.previewLayer.masksToBounds = true
self.previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
let rootLayer :CALayer = CameraPreview.layer
rootLayer.masksToBounds=true
rootLayer.addSublayer(self.previewLayer)
session.startRunning()
}
After this the next delegate gets called and I display it in the same view, but for a playback:
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
playbackAvailable = true
SaveButton.hidden = false
recording = false
stopCamera()
// let library = PHPhotoLibrary.sharedPhotoLibrary()
// library.performChanges({ PHAssetChangeRequest.creationRequestForAssetFromVideoAtFileURL(outputFileURL)! }, completionHandler: {success, error in debugPrint("Finished saving asset. %#", (success ? "Success." : error!)) })
// Play Video
player = AVPlayer(URL: outputFileURL)
playerLayer = AVPlayerLayer(player: player)
playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
playerLayer.masksToBounds = true
playerLayer.frame = self.view.bounds
CameraPreview.layer.addSublayer(playerLayer)
player.play()
}
Now the playback displays it in the wrong orientation, anyone knows how to fix this? I've also got the orientation of the viewcontroller to LandscapeRight.
Solution:
// Neccesary to record in the correct orientation
// ** MUST BE IMPLEMENTED AFTER SETING UP THE MOVIEFILE **
var videoConnection: AVCaptureConnection? = nil
if let connections = videoFileOutput.connections{
for x in connections {
if let connection = x as? AVCaptureConnection{
for port in connection.inputPorts{
if(port.mediaType == AVMediaTypeVideo){
videoConnection = connection
}
}
}
}
}
if(videoConnection != nil){
if let vidConnect: AVCaptureConnection = videoConnection!{
if(vidConnect.supportsVideoOrientation){
vidConnect.videoOrientation = AVCaptureVideoOrientation(rawValue: UIApplication.sharedApplication().statusBarOrientation.rawValue)!
}
}
}
Basically you need to set the AVCaptureVideoOrientation of the connection to the correct orientation before you start recording. Or else it might record in the wrong orientation.
I'm trying to build an app which will capture frames from the camera and process them with OpenCV before saving those files to the device, but at a specific frame rate.
What I'm stuck on at the moment is the fact that AVCaptureVideoDataOutputSampleBufferDelegate doesn't appear to respect the AVCaptureDevice.activeVideoMinFrameDuration, or AVCaptureDevice.activeVideoMaxFrameDuration settings.
captureOutput runs far quicker than 2 frames per second as the above settings would indicate.
Do you happen to know how one could achieve this, with or without the delegate?
ViewController:
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewDidAppear(animated: Bool) {
setupCaptureSession()
}
func setupCaptureSession() {
let session : AVCaptureSession = AVCaptureSession()
session.sessionPreset = AVCaptureSessionPreset1280x720
let videoDevices : [AVCaptureDevice] = AVCaptureDevice.devices() as! [AVCaptureDevice]
for device in videoDevices {
if device.position == AVCaptureDevicePosition.Back {
let captureDevice : AVCaptureDevice = device
do {
try captureDevice.lockForConfiguration()
captureDevice.activeVideoMinFrameDuration = CMTimeMake(1, 2)
captureDevice.activeVideoMaxFrameDuration = CMTimeMake(1, 2)
captureDevice.unlockForConfiguration()
let input : AVCaptureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
if session.canAddInput(input) {
try session.addInput(input)
}
let output : AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()
let dispatch_queue : dispatch_queue_t = dispatch_queue_create("streamoutput", nil)
output.setSampleBufferDelegate(self, queue: dispatch_queue)
session.addOutput(output)
session.startRunning()
let previewLayer = AVCaptureVideoPreviewLayer(session: session)
previewLayer.connection.videoOrientation = .LandscapeRight
let previewBounds : CGRect = CGRectMake(0,0,self.view.frame.width/2,self.view.frame.height+20)
previewLayer.backgroundColor = UIColor.blackColor().CGColor
previewLayer.frame = previewBounds
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
self.imageView.layer.addSublayer(previewLayer)
self.previewMat.frame = CGRectMake(previewBounds.width, 0, previewBounds.width, previewBounds.height)
} catch _ {
}
break
}
}
}
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
self.wrapper.processBuffer(self.getUiImageFromBuffer(sampleBuffer), self.previewMat)
}
So I've figured out the problem.
In the comments section for AVCaptureDevice.h above the activeVideoMinFrameDuration property it states:
On iOS, the receiver's activeVideoMinFrameDuration resets to its
default value under the following conditions:
The receiver's activeFormat changes
The receiver's AVCaptureDeviceInput's session's sessionPreset changes
The receiver's AVCaptureDeviceInput is added to a session
The last bullet point was causing my problem, so doing the following solved the problem for me:
do {
let input : AVCaptureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
if session.canAddInput(input) {
try session.addInput(input)
}
try captureDevice.lockForConfiguration()
captureDevice.activeVideoMinFrameDuration = CMTimeMake(value: 1, timescale: 2)
captureDevice.activeVideoMaxFrameDuration = CMTimeMake(value: 1, timescale: 2)
captureDevice.unlockForConfiguration()
let output : AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()
let dispatch_queue : dispatch_queue_t = dispatch_queue_create("streamoutput", nil)
output.setSampleBufferDelegate(self, queue: dispatch_queue)
session.addOutput(output)
I'm trying to capture the screen while doing a video preview with AVFoundation (AVCaptureDeviceInput and AVCaptureVideoDataOutput)
Initiate preview:
func startCamera(){
var screenSize = UIScreen.mainScreen().bounds.size;
self.previewView = UIView(frame: CGRectMake(0, 0, UIScreen.mainScreen().bounds.size.width, UIScreen.mainScreen().bounds.size.height));
self.previewView.contentMode = UIViewContentMode.ScaleAspectFit
self.view.addSubview(previewView);
session.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices();
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the front camera
if(device.position == AVCaptureDevicePosition.Back) {
captureDevice = device as? AVCaptureDevice;
if captureDevice != nil {
beginSession();
break;
}
}
}
}
}
func beginSession() {
var err : NSError? = nil
var deviceInput:AVCaptureDeviceInput = AVCaptureDeviceInput(device: captureDevice!, error: &err);
if err != nil {
println("error: \(err?.localizedDescription)");
}
if session.canAddInput(deviceInput){
session.addInput(deviceInput);
}
videoDataOutput = AVCaptureVideoDataOutput()
if let videoDataOutput = videoDataOutput {
var rgbOutputSettings = [NSNumber(integer: kCMPixelFormat_32BGRA):kCVPixelBufferPixelFormatTypeKey]
videoDataOutput.alwaysDiscardsLateVideoFrames=true;
videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
if session.canAddOutput(self.videoDataOutput){
session.addOutput(self.videoDataOutput)
}
videoDataOutput.connectionWithMediaType(AVMediaTypeVideo).enabled = true
if let previewLayer = AVCaptureVideoPreviewLayer(session: self.session) {
self.previewLayer = previewLayer
previewLayer.videoGravity = AVLayerVideoGravityResizeAspect
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
var rootLayer :CALayer = self.previewView.layer;
rootLayer.masksToBounds=true;
previewLayer.frame = rootLayer.bounds;
rootLayer.addSublayer(self.previewLayer);
session.startRunning();
delay(8, closure: { () -> () in
self.processImage()
})
}
}
}
Code to capture the screen:
func processImage() {
UIGraphicsBeginImageContextWithOptions(view.bounds.size, false, 0)
previewLayer!.renderInContext(UIGraphicsGetCurrentContext())
// tried previewView!.layer.render... to no avail
let previewImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
savePDFImage(previewImage, name: "front.pdf")
}
The image returned is just all white. How do I grab a screenshot of what's on the screen while doing a video preview?
Don't capture the screen. Instead, capture a frame from the buffer and use that.
Implement the AVCaptureVideoDataOutputSampleBufferDelegate.
On the VideoDataOuput, set the setSampleBufferDelegate
Implement the captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) method.
When you store the image to the device, play the shutter sound yourself.
In the end, your code looks more like this:
var videoDataOutput:AVCaptureVideoDataOutput?;
var videoDataOutputQueue:dispatch_queue_t = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
var stillImageOutput:AVCaptureStillImageOutput?
var previewLayer:AVCaptureVideoPreviewLayer?
var captureDevice:AVCaptureDevice?
let session = AVCaptureSession()
func beginSession() {
var err : NSError? = nil
var deviceInput:AVCaptureDeviceInput = AVCaptureDeviceInput(device: captureDevice!, error: &err);
if err != nil {
println("error: \(err?.localizedDescription)");
}
if session.canAddInput(deviceInput){
session.addInput(deviceInput);
}
stillImageOutput = AVCaptureStillImageOutput()
videoDataOutput = AVCaptureVideoDataOutput()
if let videoDataOutput = videoDataOutput, stillImageOutput = stillImageOutput {
videoDataOutput.alwaysDiscardsLateVideoFrames=true;
videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:Int(kCVPixelFormatType_32BGRA)]
videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
if session.canAddOutput(videoDataOutput){
session.addOutput(videoDataOutput)
}
stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]
if session.canAddOutput(stillImageOutput) {
session.addOutput(stillImageOutput)
}
videoDataOutput.connectionWithMediaType(AVMediaTypeVideo).enabled = true
if let previewLayer = AVCaptureVideoPreviewLayer(session: self.session) {
self.previewLayer = previewLayer
previewLayer.videoGravity = AVLayerVideoGravityResizeAspect
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
var rootLayer :CALayer = self.previewView.layer;
rootLayer.masksToBounds=true;
previewLayer.frame = rootLayer.bounds;
rootLayer.addSublayer(self.previewLayer);
session.startRunning();
}
}
}
// this gets called periodically with an image
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
if let image = CheckResponse.imageFromSampleBuffer(sampleBuffer) {
if keepImage(image) {
AudioServicesPlaySystemSound(1108)
session.stopRunning()
}
}
}
// This is in the Objective-C CheckResponse class to get an image from the buffer:
+ (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer {
CVPixelBufferRef pb = CMSampleBufferGetImageBuffer(sampleBuffer);
CIImage *ciimg = [CIImage imageWithCVPixelBuffer:pb];
// show result
CIContext *context = [CIContext contextWithOptions:nil];
CGImageRef ref = [context createCGImage:ciimg fromRect:ciimg.extent];
UIImage *image = [UIImage imageWithCGImage:ref scale:1.0 orientation:(UIImageOrientationUp)];
CFRelease(ref);
return image;
}