I'm currently developing an App which records a video and then shows you the video to check if the video was good enough. However when displaying the recorded video, it shows it in the wrong orientation..
So basically I'm recording it in LandscapeRight modus. But when displaying it displays it in portrait mode, and also apparently records it in display mode as well. Even when I set it AVCaptureVideoOrientation.LandscapeRight.
Here is the code I'm using to setup the recording:
func setupAVCapture(){
session = AVCaptureSession()
session.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices()
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the front camera
if(device.position == AVCaptureDevicePosition.Front) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
beginSession()
break
}
}
}
}
}
func beginSession(){
var err : NSError? = nil
var deviceInput:AVCaptureDeviceInput?
do {
deviceInput = try AVCaptureDeviceInput(device: captureDevice)
} catch let error as NSError {
err = error
deviceInput = nil
};
if err != nil {
print("error: \(err?.localizedDescription)")
}
if self.session.canAddInput(deviceInput){
self.session.addInput(deviceInput)
}
self.videoDataOutput = AVCaptureVideoDataOutput()
self.videoDataOutput.alwaysDiscardsLateVideoFrames=true
self.videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL)
self.videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
if session.canAddOutput(self.videoDataOutput){
session.addOutput(self.videoDataOutput)
}
self.videoDataOutput.connectionWithMediaType(AVMediaTypeVideo).enabled = true
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
self.previewLayer.frame = self.view.bounds
self.previewLayer.masksToBounds = true
self.previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
let rootLayer :CALayer = CameraPreview.layer
rootLayer.masksToBounds=true
rootLayer.addSublayer(self.previewLayer)
session.startRunning()
}
After this the next delegate gets called and I display it in the same view, but for a playback:
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
playbackAvailable = true
SaveButton.hidden = false
recording = false
stopCamera()
// let library = PHPhotoLibrary.sharedPhotoLibrary()
// library.performChanges({ PHAssetChangeRequest.creationRequestForAssetFromVideoAtFileURL(outputFileURL)! }, completionHandler: {success, error in debugPrint("Finished saving asset. %#", (success ? "Success." : error!)) })
// Play Video
player = AVPlayer(URL: outputFileURL)
playerLayer = AVPlayerLayer(player: player)
playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
playerLayer.masksToBounds = true
playerLayer.frame = self.view.bounds
CameraPreview.layer.addSublayer(playerLayer)
player.play()
}
Now the playback displays it in the wrong orientation, anyone knows how to fix this? I've also got the orientation of the viewcontroller to LandscapeRight.
Solution:
// Neccesary to record in the correct orientation
// ** MUST BE IMPLEMENTED AFTER SETING UP THE MOVIEFILE **
var videoConnection: AVCaptureConnection? = nil
if let connections = videoFileOutput.connections{
for x in connections {
if let connection = x as? AVCaptureConnection{
for port in connection.inputPorts{
if(port.mediaType == AVMediaTypeVideo){
videoConnection = connection
}
}
}
}
}
if(videoConnection != nil){
if let vidConnect: AVCaptureConnection = videoConnection!{
if(vidConnect.supportsVideoOrientation){
vidConnect.videoOrientation = AVCaptureVideoOrientation(rawValue: UIApplication.sharedApplication().statusBarOrientation.rawValue)!
}
}
}
Basically you need to set the AVCaptureVideoOrientation of the connection to the correct orientation before you start recording. Or else it might record in the wrong orientation.
Related
I'm currently working on a camera app. Everything worked fine, but when I tried to change the constraints of the Vision View the log suddenly printed this error.
[warning]the specified colorspace format is not supported. falling back on libyuv.
I have no Idea where it comes from and what I should change. Below I'll past the relevant code where I set up the camera.
func initializeCameraSession() {
// Set up Values
//1: Create a new AV Session
// , xf , AVCaptureVideoDataOutputSampleBufferDelegate // Get camera devices
let devices = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .front).devices
//2: Select a capture device
avSession.sessionPreset = .low
do {
if let captureDevice = devices.first {
let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
}
avSession.beginConfiguration()
if avSession.canAddInput(captureDeviceInput) {
avSession.addInput(captureDeviceInput)
self.videoDeviceInput = captureDeviceInput
} else {
print("Couldn't add video device input to the session.")
avSession.commitConfiguration()
return
}
avSession.commitConfiguration()
}
} catch {
print(error.localizedDescription)
}
//3: Show output on a preview layer
let captureOutput = AVCaptureVideoDataOutput()
captureOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue"))
captureOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as String) : NSNumber(value: kCVPixelFormatType_32BGRA )]
avSession.addOutput(captureOutput)
let previewLayer = AVCaptureVideoPreviewLayer(session: avSession)
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
previewLayer.connection?.videoOrientation = .portrait
previewLayer.frame = visionView.bounds
visionView.layer.addSublayer(previewLayer)
view.bringSubviewToFront(visionView)
visionView.isHidden = true
visionView.alpha = 0.0
avSession.startRunning()
}
}
What I Did:-
I have tried to enable stabilization and HDR but it's not working.I think I was in right path.When I was trying to check that the current device supports stabilization and HDR in that both case I had got only false case in all devices.
Please guide me if any mistakes had done in the below code snippet.
Thanks in advance!!
My Code Snippet:-
func createAVSession() throws -> AVCaptureSession {
AppLog.LogFunction(object: LOG_Start)
// Start out with low quality
let session = AVCaptureSession()
session.sessionPreset = AVCaptureSessionPresetPhoto
// Input from video camera
let device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
let currentFormat = device?.activeFormat.isVideoHDRSupported
try device?.lockForConfiguration()
if device?.activeFormat.isVideoHDRSupported == true {
device?.automaticallyAdjustsVideoHDREnabled = false
device?.isVideoHDREnabled = true
print("device?.isVideoHDREnabled\(device?.isVideoHDREnabled)")
}
if (device?.isFocusModeSupported(.continuousAutoFocus))! {
device?.focusMode = AVCaptureFocusMode.continuousAutoFocus
print("device?.focusMode\(device?.focusMode.rawValue)")
}
if (device?.isSmoothAutoFocusSupported)! {
device?.isSmoothAutoFocusEnabled = true
print("device?.isSmoothAutoFocusEnabled\(device?.isSmoothAutoFocusEnabled)")
}
if (device?.isExposureModeSupported(.continuousAutoExposure))! {
device?.exposureMode = .continuousAutoExposure
print("device?.exposureMode\(device?.exposureMode.rawValue)")
}
device?.unlockForConfiguration()
let input = try AVCaptureDeviceInput(device: device)
do {
try input.device.lockForConfiguration()
input.device.activeVideoMaxFrameDuration = CMTimeMake(1, 30)
input.device.activeVideoMinFrameDuration = CMTimeMake(1, 30)
input.device.unlockForConfiguration()
}
catch {
print("Failed to set FPS")
}
// Output
let videoOutput = AVCaptureVideoDataOutput()
videoOutput.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as AnyHashable: kCVPixelFormatType_32BGRA]
videoOutput.alwaysDiscardsLateVideoFrames = true
videoOutput.setSampleBufferDelegate(self, queue: sessionQueue)
let stillImageOutput: AVCaptureStillImageOutput = AVCaptureStillImageOutput()
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
//stillImageOutput.isHighResolutionStillImageOutputEnabled = true
if stillImageOutput.isStillImageStabilizationSupported {
stillImageOutput.automaticallyEnablesStillImageStabilizationWhenAvailable = true
print("stillImageOutput.isStillImageStabilizationActive\(stillImageOutput.isStillImageStabilizationActive)")
}
// Join it all together
session.addInput(input)
session.addOutput(videoOutput)
if session.canAddOutput(stillImageOutput) {
session.addOutput(stillImageOutput)
self.stillImageOutput = stillImageOutput
}
if let connection = videoOutput.connection(withMediaType: AVMediaTypeVideo) {
if connection.isVideoOrientationSupported {
connection.videoOrientation = .portrait
}
if connection.isVideoStabilizationSupported {
connection.preferredVideoStabilizationMode = .standard
print("connection.activeVideoStabilizationMode\(connection.activeVideoStabilizationMode.rawValue)")
}
}
AppLog.LogFunction(object: LOG_End)
return session
}
What worked for me on the stabilization issue was to test for it in the delegate. In my project, I use the AVCaptureVideoDataOutputSampleBufferDelegate to write to my file as I test for certain things in the pixel buffer before I decide to write. It was the one place I found where it would say stabilization is allowed. Anyway, here is how I did it for the stabilization issue. Hope it helps.
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!){
self.lockQueue.sync {
if !self.isCapturing || self.isPaused {
return
}
let isVideo = captureOutput is AVCaptureVideoDataOutput
if isVideo && self.videoWriter == nil {
// testing to make sure dealing with video and not audio
let connection = captureOutput.connection(withMediaType: AVMediaTypeVideo)
if (connection?.isVideoStabilizationSupported)! {
connection?.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.cinematic
}
//other work excluded as irrelevant
}
}
}
I was able to successfully grab the recorded video by following this question
here
Basically
Inherit from AVCaptureFileOutputRecordingDelegate prototype
Loop through available devices
Creating a session with the camera
Start Recording
Stop Recording
Get the Record video by implementing above prototype's method
But the file doesn't comes with the audio.
According to this question, i have to record audio separately and merge the video and audio using mentioned classes
But i have no idea how to implement video and audio recording at the same time.
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the back camera
if(device.position == AVCaptureDevicePosition.Back) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
print("Capture device found")
beginSession()
}
}
}
}
in this loop only available device types are .Front and .Back
Following is the way to record video with audio using AVFoundation framework. The steps are:
1. Prepare the session:
self.captureSession = AVCaptureSession()
2. Prepare available video and audio devices:
let session = AVCaptureDevice.DiscoverySession.init(deviceTypes:[.builtInWideAngleCamera, .builtInMicrophone], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)
let cameras = (session.devices.compactMap{$0})
for camera in cameras {
if camera.position == .front {
self.frontCamera = camera
}
if camera.position == .back {
self.rearCamera = camera
try camera.lockForConfiguration()
camera.focusMode = .continuousAutoFocus
camera.unlockForConfiguration()
}
}
3. Prepare session inputs:
guard let captureSession = self.captureSession else {
throw CameraControllerError.captureSessionIsMissing
}
if let rearCamera = self.rearCamera {
self.rearCameraInput = try AVCaptureDeviceInput(device: rearCamera)
if captureSession.canAddInput(self.rearCameraInput!) {
captureSession.addInput(self.rearCameraInput!)
self.currentCameraPosition = .rear
} else {
throw CameraControllerError.inputsAreInvalid
}
} else if let frontCamera = self.frontCamera {
self.frontCameraInput = try AVCaptureDeviceInput(device: frontCamera)
if captureSession.canAddInput(self.frontCameraInput!) {
captureSession.addInput(self.frontCameraInput!)
self.currentCameraPosition = .front
} else {
throw CameraControllerError.inputsAreInvalid
}
} else {
throw CameraControllerError.noCamerasAvailable
}
// Add audio input
if let audioDevice = self.audioDevice {
self.audioInput = try AVCaptureDeviceInput(device: audioDevice)
if captureSession.canAddInput(self.audioInput!) {
captureSession.addInput(self.audioInput!)
} else {
throw CameraControllerError.inputsAreInvalid
}
}
4. Prepare output:
self.videoOutput = AVCaptureMovieFileOutput()
if captureSession.canAddOutput(self.videoOutput!) {
captureSession.addOutput(self.videoOutput!)
}
captureSession.startRunning()
5. Start recording:
func recordVideo(completion: #escaping (URL?, Error?) -> Void) {
guard let captureSession = self.captureSession, captureSession.isRunning else {
completion(nil, CameraControllerError.captureSessionIsMissing)
return
}
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let fileUrl = paths[0].appendingPathComponent("output.mp4")
try? FileManager.default.removeItem(at: fileUrl)
videoOutput!.startRecording(to: fileUrl, recordingDelegate: self)
self.videoRecordCompletionBlock = completion
}
6. Stop recording:
func stopRecording(completion: #escaping (Error?) -> Void) {
guard let captureSession = self.captureSession, captureSession.isRunning else {
completion(CameraControllerError.captureSessionIsMissing)
return
}
self.videoOutput?.stopRecording()
}
7. Implement the delegate:
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if error == nil {
//do something
} else {
//do something
}
}
I took idea from here: https://www.appcoda.com/avfoundation-swift-guide/
Here is the complete project https://github.com/rubaiyat6370/iOS-Tutorial/
Found the answer, This answer goes with this code
It can simply done by
declare another capture device variable
loop through devices and initialize camera and audio capture device variable
add audio input to session
code
var captureDevice : AVCaptureDevice?
var captureAudio :AVCaptureDevice?
Loop through devices and Initialize capture devices
var captureDeviceVideoFound: Bool = false
var captureDeviceAudioFound:Bool = false
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the front camera
if(device.position == AVCaptureDevicePosition.Front) {
captureDevice = device as? AVCaptureDevice //initialize video
if captureDevice != nil {
print("Capture device found")
captureDeviceVideoFound = true;
}
}
}
if(device.hasMediaType(AVMediaTypeAudio)){
print("Capture device audio init")
captureAudio = device as? AVCaptureDevice //initialize audio
captureDeviceAudioFound = true
}
}
if(captureDeviceAudioFound && captureDeviceVideoFound){
beginSession()
}
Inside Session
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice))
try captureSession.addInput(AVCaptureDeviceInput(device: captureAudio))
This will output the video file with audio. no need to merge audio or do anything.
This apples documentation helps
Followed the answer from #Mumu but it didn't work for me because of the call to AVCaptureDevice.DiscoverySession.init that was returning video devices only.
Here is my version that works on iOS 14, Swift 5:
var captureSession: AVCaptureSession? = nil
var camera: AVCaptureDevice? = nil
var microphone: AVCaptureDevice? = nil
var videoOutput: AVCaptureFileOutput? = nil
var previewLayer: AVCaptureVideoPreviewLayer? = nil
func findDevices() {
camera = nil
microphone = nil
//Search for video media type and we need back camera only
let session = AVCaptureDevice.DiscoverySession.init(deviceTypes:[.builtInWideAngleCamera],
mediaType: AVMediaType.video, position: AVCaptureDevice.Position.back)
var devices = (session.devices.compactMap{$0})
//Search for microphone
let asession = AVCaptureDevice.DiscoverySession.init(deviceTypes:[.builtInMicrophone],
mediaType: AVMediaType.audio, position: AVCaptureDevice.Position.unspecified)
//Combine all devices into one list
devices.append(contentsOf: asession.devices.compactMap{$0})
for device in devices {
if device.position == .back {
do {
try device.lockForConfiguration()
device.focusMode = .continuousAutoFocus
device.flashMode = .off
device.whiteBalanceMode = .continuousAutoWhiteBalance
device.unlockForConfiguration()
camera = device
} catch {
}
}
if device.hasMediaType(.audio) {
microphone = device
}
}
}
func initVideoRecorder()->Bool {
captureSession = AVCaptureSession()
guard let captureSession = captureSession else {return false}
captureSession.sessionPreset = .hd4K3840x2160
findDevices()
guard let camera = camera else { return false}
do {
let cameraInput = try AVCaptureDeviceInput(device: camera)
captureSession.addInput(cameraInput)
} catch {
self.camera = nil
return false
}
if let audio = microphone {
do {
let audioInput = try AVCaptureDeviceInput(device: audio)
captureSession.addInput(audioInput)
} catch {
}
}
videoOutput = AVCaptureMovieFileOutput()
if captureSession.canAddOutput(videoOutput!) {
captureSession.addOutput(videoOutput!)
captureSession.startRunning()
videoOutput?.connection(with: .video)?.videoOrientation = .landscapeRight
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer?.videoGravity = .resizeAspect
previewLayer?.connection?.videoOrientation = .landscapeRight
return true
}
return false
}
func startRecording()->Bool {
guard let captureSession = captureSession, captureSession.isRunning else {return false}
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let fileUrl = paths[0].appendingPathComponent(getVideoName())
try? FileManager.default.removeItem(at: fileUrl)
videoOutput?.startRecording(to: fileUrl, recordingDelegate: self)
return true
}
I had this problem also, but when I grouped adding the video input and the sound input after, the audio worked. This is my code for adding the inputs.
if (cameraSession.canAddInput(deviceInput) == true && cameraSession.canAddInput(audioDeviceInput) == true) {//detects if devices can be added
cameraSession.addInput(deviceInput)//adds video
cameraSession.addInput(audioDeviceInput)//adds audio
}
Also I found you have to have video input first or else there won't be audio. I originally had them in two if statements, but I found putting them in one lets video and audio be recorded together. Hope this helps.
Record Video With Audio
//Get Video Device
if let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as? [AVCaptureDevice] {
for device in devices {
if device.hasMediaType(AVMediaTypeVideo) {
if device.position == AVCaptureDevicePosition.back {
videoCaptureDevice = device
}
}
}
if videoCaptureDevice != nil {
do {
// Add Video Input
try self.captureSession.addInput(AVCaptureDeviceInput(device: videoCaptureDevice))
// Get Audio Device
let audioInput = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
//Add Audio Input
try self.captureSession.addInput(AVCaptureDeviceInput(device: audioInput))
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait
self.videoView.layer.addSublayer(self.previewLayer)
//Add File Output
self.captureSession.addOutput(self.movieOutput)
captureSession.startRunning()
} catch {
print(error)
}
}
}
For more details refer this link:
https://medium.com/#santhosh3386/ios-avcapturesession-record-video-with-audio-23c8f8c9a8f8
I'm trying to get a camera view for a camera app to show. After messing with the setup camera view code without success. I think it has something to do with my GLKView setup code. The UIView in the storyboard shows up on the app but its blank (white). I don't get any errors.
I'm building this for at least iOS 7.0.
I added both the createGLKView and the setupCameraView blocks just incase!
EDIT: I forgot to set an outlet to the UIView. It shows now, but the camera view is black. My background is white so I know that the camera view is showing up black.
func createGLKView() {
if (self.context != nil) {
return
}
self.context = EAGLContext(API: EAGLRenderingAPI.OpenGLES2)
//.init(API:EAGLRenderingAPI.OpenGLES2)
var view: GLKView = GLKView(frame: self.bounds)
//.init(frame: bounds.self)
view.autoresizingMask = UIViewAutoresizing(2) | UIViewAutoresizing(5)
view.translatesAutoresizingMaskIntoConstraints()
view.context = self.context
view.contentScaleFactor = 1.0
view.drawableDepthFormat = GLKViewDrawableDepthFormat.Format24
self.insertSubview(view, atIndex: 0)
glkView = view
glGenRenderbuffers(1, &renderBuffer)
glBindRenderbuffer(GLenum(GL_RENDERBUFFER), renderBuffer)
coreImageContext = CIContext(EAGLContext: self.context)
EAGLContext.setCurrentContext(self.context)
println("GLKView setup")
}
func setupCameraView() {
self.createGLKView()
var possibleDevices: NSArray = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
//var device: AVCaptureDevice = possibleDevices.firstObject as! AVCaptureDevice
var device: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
/*
if device != true {
println("device returned!")
return
}
*/
let devices = AVCaptureDevice.devices()
// Loop through all the capture devices on this phone
for camera in devices {
// Make sure this particular device supports video
if (camera.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the back camera
if(camera.position == AVCaptureDevicePosition.Back) {
device = camera as! AVCaptureDevice
}
}
}
imageDetectionConfidence = 0.0
var session: AVCaptureSession = AVCaptureSession()
self.captureSession = session
session.beginConfiguration()
self.captureDevice = device
/*
for camera in possibleDevices {
if camera.position == AVCaptureDevicePosition.Back {
device = camera as! AVCaptureDevice
println("back camera selected!")
break
}
}
*/
var error: NSError?
/*
var input: AVCaptureDeviceInput = AVCaptureDeviceInput.deviceInputWithDevice(device, error: &error) as! AVCaptureDeviceInput
session.sessionPreset = AVCaptureSessionPresetPhoto
session.addInput(input)
*/
if let input: AVCaptureDeviceInput = AVCaptureDeviceInput.deviceInputWithDevice(device, error: &error) as? AVCaptureDeviceInput {
session.sessionPreset = AVCaptureSessionPresetPhoto
session.addInput(input)
}
var dataOutput: AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()
dataOutput.alwaysDiscardsLateVideoFrames = true
//dataOutput.videoSettings = NSDictionary(object: Int(kCVPixelFormatType_32BGRA), forKey: kCVPixelBufferPixelFormatTypeKey as String) as [NSObject: AnyObject]
dataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_32BGRA]
dataOutput.setSampleBufferDelegate(self, queue: dispatch_get_main_queue())
session.addOutput(dataOutput)
self.stillImageOutput = AVCaptureStillImageOutput()
session.addOutput(self.stillImageOutput)
var connection: AVCaptureConnection = (dataOutput.connections as NSArray).firstObject as! AVCaptureConnection
connection.videoOrientation = AVCaptureVideoOrientation.Portrait
if device.flashAvailable {
device.lockForConfiguration(nil)
device.flashMode = AVCaptureFlashMode.Off
device.unlockForConfiguration()
if device.isFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus) {
device.lockForConfiguration(nil)
device.focusMode = AVCaptureFocusMode.ContinuousAutoFocus
device.unlockForConfiguration()
}
}
session.commitConfiguration()
}
Assuming that the code is correct, you could try to bring your subview to the front. I've found that sometimes when adding a new subview it isn't always shown above the others and gets hidden behind them.
self.bringSubviewToFront(view)
EDIT: It might help for others to know what iOS version you are programming for.
In an app I'm developing the user is required to take a "selfie" (Yes, I know, but the app is for private use only).
I've got everything working with the camera showing in the circular UIView region, however I cannot get it to scale and fill the circle properly. Here's what it's doing now:
And here's what I want it to be doing:
Here's the code for my UIView:
var cameraView = UIView()
cameraView.frame = CGRectMake(100, self.view.center.y-260, 568, 568)
cameraView.backgroundColor = UIColor(red:26/255, green:188/255, blue:156/255, alpha:1)
cameraView.layer.cornerRadius = 284
cameraView.layer.borderColor = UIColor.whiteColor().CGColor
cameraView.layer.borderWidth = 15
cameraView.contentMode = UIViewContentMode.ScaleToFill
cameraView.layer.masksToBounds = true
I have tried a few different contentMode options, include ScaleToFill, ScaleAspectFill, and ScaleAspectFit. They all generate the same exact result.
As it turns out, the camera's "self.previewLayer" has a property that determines how the camera's content fills a View.
In the following code I changed "self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspect" to "self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill"
extension SelfieViewController: AVCaptureVideoDataOutputSampleBufferDelegate{
func setupAVCapture(){
session.sessionPreset = AVCaptureSessionPreset640x480
let devices = AVCaptureDevice.devices();
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the front camera
if(device.position == AVCaptureDevicePosition.Front) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
beginSession()
break
}
}
}
}
}
func beginSession(){
var err : NSError? = nil
var deviceInput:AVCaptureDeviceInput = AVCaptureDeviceInput(device: captureDevice, error: &err)
if err != nil {
println("error: \(err?.localizedDescription)")
}
if self.session.canAddInput(deviceInput){
self.session.addInput(deviceInput)
}
self.videoDataOutput = AVCaptureVideoDataOutput()
var rgbOutputSettings = [NSNumber(integer: kCMPixelFormat_32BGRA):kCVPixelBufferPixelFormatTypeKey]
self.videoDataOutput.alwaysDiscardsLateVideoFrames=true
self.videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL)
self.videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
if session.canAddOutput(self.videoDataOutput){
session.addOutput(self.videoDataOutput)
}
self.videoDataOutput.connectionWithMediaType(AVMediaTypeVideo).enabled = true
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
var rootLayer :CALayer = self.cameraView.layer
rootLayer.masksToBounds=true
self.previewLayer.frame = rootLayer.bounds
rootLayer.addSublayer(self.previewLayer)
session.startRunning()
}
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
// do stuff here
}
// clean up AVCapture
func stopCamera(){
session.stopRunning()
}
}