Video recording start automatically when camera Open? - ios

I have read lots of tutorials for video player. But How to start recording when camera open in a custom view with time limit.

Here is the working solution with a swift version
var session: AVCaptureSession?
var userreponsevideoData = NSData()
var userreponsethumbimageData = NSData()
override func viewDidLoad() {
super.viewDidLoad()
createSession()
}
func for stopRecording(){
session?.stopRunning()
}
func createSession() {
var input: AVCaptureDeviceInput?
let movieFileOutput = AVCaptureMovieFileOutput()
videosPreviewLayer?.frame.size = photoPreviewImageView.frame.size
session = AVCaptureSession()
let error: NSError? = nil
do { input = try AVCaptureDeviceInput(device: self.cameraWithPosition(position: .back)!) } catch {return}
if error == nil {
session?.addInput(input!)
} else {
print("camera input error: \(String(describing: error))")
}
videosPreviewLayer = AVCaptureVideoPreviewLayer(session: session!)
videosPreviewLayer?.frame.size = self.photoPreviewImageView.frame.size
videosPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
videosPreviewLayer?.connection?.videoOrientation = .portrait
photoPreviewImageView.layer.sublayers?.forEach { $0.removeFromSuperlayer() }
photoPreviewImageView.layer.addSublayer(videosPreviewLayer!)
switchCameraButton.isHidden=true
flashButton.isHidden=true
msgLabel.isHidden=true
galleryCollectionView.isHidden=true
timerLabel.isHidden=false
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
fileURL = URL(string:"\(documentsURL.appendingPathComponent("temp"))" + ".mov")
print("*****fileurl%#",fileURL ?? "00000")
let maxDuration: CMTime = CMTimeMake(600, 10)
movieFileOutput.maxRecordedDuration = maxDuration
movieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024
if self.session!.canAddOutput(movieFileOutput) {
self.session!.addOutput(movieFileOutput)
}
session?.startRunning()
movieFileOutput.startRecording(to: fileURL!, recordingDelegate: self)
}
func cameraWithPosition(position: AVCaptureDevice.Position) -> AVCaptureDevice? {
let devices = AVCaptureDevice.devices(for: AVMediaType.video)
for device in devices {
if device.position == position {
return device
}
}
return nil
}
}
extension SwipeGallerymainViewController: AVCaptureFileOutputRecordingDelegate
{
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
print(outputFileURL)
let filemainurl = outputFileURL
do
{
let asset = AVURLAsset(url:filemainurl as URL, options:nil)
print(asset)
let imgGenerator = AVAssetImageGenerator(asset: asset)
imgGenerator.appliesPreferredTrackTransform = true
let cgImage = try imgGenerator.copyCGImage(at: CMTimeMake(0, 1), actualTime: nil)
let uiImage = UIImage(cgImage: cgImage)
previewImage = uiImage
userreponsethumbimageData = NSData(contentsOf: filemainurl as URL)!
print(userreponsethumbimageData.length)
print(uiImage)
}
catch let error as NSError
{
print(error)
return
}
let VideoFilePath = URL(fileURLWithPath:NSTemporaryDirectory()).appendingPathComponent("mergeVideo\(arc4random()%1000)d").appendingPathExtension("mp4").absoluteString
if FileManager.default.fileExists(atPath: VideoFilePath)
{
print("exist")
do
{
try FileManager.default.removeItem(atPath: VideoFilePath)
}
catch { }
}
let tempfilemainurl = NSURL(string: VideoFilePath)!
let sourceAsset = AVURLAsset(url:filemainurl as URL, options:nil)
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: sourceAsset, presetName: AVAssetExportPresetMediumQuality)!
assetExport.outputFileType = AVFileType.mov
assetExport.outputURL = tempfilemainurl as URL
assetExport.exportAsynchronously { () -> Void in
switch assetExport.status
{
case AVAssetExportSessionStatus.completed:
DispatchQueue.main.async {
do
{
self.userreponsevideoData = try NSData(contentsOf: tempfilemainurl as URL, options: NSData.ReadingOptions())
print("MB - \(self.userreponsevideoData.length) byte")
self.isVideoLoad=true
self.performSegue(withIdentifier:"previewSegue", sender:self)
}
catch
{
print(error)
}
}
case AVAssetExportSessionStatus.failed:
print("failed \(String(describing: assetExport.error))")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(String(describing: assetExport.error))")
default:
print("complete")
}
}
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
print(fileURL)
}
}

I have done same thing in java using Robot class on laptop. Robot class is capable of sending keystrokes and mouse movements automatically. You can try same thing on your platform. Automate the button press event so that recording get started.

Related

Split video into chunks of 30 seconds Ios swift 5

I've been trying to let user choose from video gallery and split video to chunks of 30 seconds!
When I select a video of 1 minute, it splits to two videos of 30 seconds each, and it works fine!
I tried another example of video 35 seconds, and it splits to two videos of 30 and 4 seconds each
But when I select a video with more than 1 minute, it splits the video to chunks of random
numbers such as 34 seconds or 40, etc.
I don't want that!
I want to split videos to 30 seconds each!
This's my viewController code so far
import UIKit
import AVKit
import MobileCoreServices
import Photos
class ViewController: UIViewController {
#IBOutlet weak var videoView: UIImageView!
#IBOutlet var imageView: UIImageView!
var player: AVPlayer!
var avpController = AVPlayerViewController()
var isVideoGettinGEdited = false
#IBAction func didTapButton(){
let picker = UIImagePickerController()
picker.delegate = self
picker.sourceType = .savedPhotosAlbum
picker.mediaTypes = ["public.movie"]
picker.allowsEditing = false
present(picker, animated: true, completion: nil)
}
}
extension ViewController: UIImagePickerControllerDelegate, UINavigationControllerDelegate {
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
if let image = info[UIImagePickerController.InfoKey(rawValue: "UIImagePickerControllerEditedImage")]as? UIImage{
imageView.image = image
} else {
guard
let mediaType = info[UIImagePickerController.InfoKey.mediaType] as? String,
mediaType == (kUTTypeMovie as String),
let url = info[UIImagePickerController.InfoKey.mediaURL] as? URL
else { return }
let videoAsset = AVURLAsset(url: url)
let videoDuration = videoAsset.duration
let durationTime = ceil( CMTimeGetSeconds(videoDuration))
var startTime = 0.0
var endTime = durationTime
var numberOfBreaks = Int((Double(durationTime)/30.0))
let isReminderTime = Double(durationTime.truncatingRemainder(dividingBy: 30.0))
if isReminderTime > 0 {
numberOfBreaks = numberOfBreaks + 1
}
if Double(durationTime) <= 30 {
self.cropVideo(atURL: url, startTime: startTime, endTime: endTime, fileName: "Output.mp4")
} else {
endTime = 30
while numberOfBreaks != 0 {
if !isVideoGettinGEdited {
print("Start time = \(startTime) and Endtime = \(endTime)")
self.cropVideo(atURL: url, startTime: startTime, endTime: endTime, fileName: "Output-\(numberOfBreaks).mp4")
numberOfBreaks = numberOfBreaks - 1
startTime = endTime
let timeLeft = Double(durationTime) - startTime
if timeLeft >= 30.0 {
endTime = endTime + 30.0
} else {
endTime = timeLeft
}
}
}
}
}
picker.dismiss(animated: true, completion: nil)
}
func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
picker.dismiss(animated: true, completion: nil)
}
}
extension ViewController {
func cropVideo(atURL url:URL, startTime:Double, endTime:Double, fileName:String) {
let asset = AVURLAsset(url: url)
let exportSession = AVAssetExportSession.init(asset: asset, presetName: AVAssetExportPresetHighestQuality)!
var outputURL = URL(string:NSSearchPathForDirectoriesInDomains(FileManager.SearchPathDirectory.documentDirectory, FileManager.SearchPathDomainMask.userDomainMask, true).last!)
let fileManager = FileManager.default
do {
try fileManager.createDirectory(at: outputURL!, withIntermediateDirectories: true, attributes: nil)
} catch {
print(error)
}
outputURL?.appendPathComponent("\(fileName).mp4")
// Remove existing file
do {
try fileManager.removeItem(atPath: outputURL!.absoluteString)
} catch {
print(error)
}
exportSession.outputURL = URL(fileURLWithPath: outputURL!.absoluteString)
exportSession.shouldOptimizeForNetworkUse = true
exportSession.outputFileType = AVFileType.mp4
let start = CMTimeMakeWithSeconds(startTime, preferredTimescale: 600) // you will modify time range here
let duration = CMTimeMakeWithSeconds(endTime, preferredTimescale: 600)
let range = CMTimeRangeMake(start: start, duration: duration)
exportSession.timeRange = range
exportSession.exportAsynchronously {
self.isVideoGettinGEdited = false
switch(exportSession.status) {
case .completed:
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: URL(fileURLWithPath: outputURL!.absoluteString))
}) { completed, error in
DispatchQueue.main.async {
self.view.isUserInteractionEnabled = true
if completed {
print("Video has been saved to your photos.")
} else {
if error != nil {
print("Failed to save video in photos \(error).")
}
}
}
}
break
case .failed:
print("failed with \(exportSession.error)")
break
case .cancelled: break
default:
print("default")
break
}
}
}
//MARK:- saveVideoFromURL
func saveVideoFromURL(_ videoURL:String) {
self.view.isUserInteractionEnabled = false
DispatchQueue.global(qos: .background).async {
if let url = URL(string: videoURL),
let urlData = NSData(contentsOf: url) {
let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0];
print("url path component = \(url.lastPathComponent)")
let filePath="\(documentsPath)/\(url.lastPathComponent)"
urlData.write(toFile: filePath, atomically: true)
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: URL(fileURLWithPath: filePath))
}) { completed, error in
DispatchQueue.main.async {
self.view.isUserInteractionEnabled = true
if completed {
print("Video has been saved to your photos.")
} else {
if error != nil {
print("Failed to save video.")
}
}
}
}
}
}
}
}
Thanks!
You main issue is when calculating the duration. Btw I would change the preferred scale to 1 as well:
change
let duration = CMTimeMakeWithSeconds(endTime, preferredTimescale: 1)
to
let duration = CMTimeMakeWithSeconds(endTime-startTime, preferredTimescale: 1)
I have also done some other changes to your code as following:
extension ViewController: UIImagePickerControllerDelegate, UINavigationControllerDelegate {
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
if let image = info[.editedImage] as? UIImage {
imageView.image = image
} else {
guard
let mediaType = info[.mediaType] as? String,
mediaType == "public.movie",
let url = info[.mediaURL] as? URL
else { return }
let videoAsset = AVURLAsset(url: url)
let videoDuration = videoAsset.duration
let durationTime = ceil(videoDuration.seconds)
print("durationTime:" , durationTime)
struct Duration {
let start: Double
let end: Double
}
let durations: [Duration]
if durationTime < 30 {
durations = [Duration(start: 0, end: durationTime)]
} else {
durations = (0...Int(durationTime)/30).compactMap {
if Double($0*30) == min(Double($0*30)+30, durationTime) {
return nil
}
return Duration(
start: Double($0*30),
end: min(Double($0*30)+30, durationTime)
)
}
}
for index in durations.indices {
let startTime = durations[index].start
let endTime = durations[index].end
print("Start time = \(startTime) and Endtime = \(endTime)")
saveVideo(
at: url,
startTime: startTime,
endTime: endTime,
fileName: "Output-\(index)"
)
}
}
picker.dismiss(animated: true, completion: nil)
}
func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
picker.dismiss(animated: true, completion: nil)
}
}
extension ViewController {
func saveVideo(
at url: URL,
startTime: Double,
endTime:Double,
fileName: String
) {
let asset = AVURLAsset(url: url)
let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality)!
let outputURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
.appendingPathComponent(fileName)
.appendingPathExtension("mp4")
// Remove existing file
if FileManager.default.fileExists(atPath: outputURL.path) {
do {
try FileManager.default.removeItem(at: outputURL)
} catch {
print(error)
}
}
exportSession.outputURL = outputURL
exportSession.shouldOptimizeForNetworkUse = true
exportSession.outputFileType = .mp4
let start = CMTimeMakeWithSeconds(startTime, preferredTimescale: 1)
let duration = CMTimeMakeWithSeconds(endTime-startTime, preferredTimescale: 1)
let range = CMTimeRangeMake(start: start, duration: duration)
print("Will Render \(fileName) from \(start.seconds) to \(duration)")
exportSession.timeRange = range
exportSession.exportAsynchronously {
print("Did Render \(fileName) from \(start.seconds) to \(duration)")
self.isVideoGettinGEdited = false
switch exportSession.status {
case .completed:
self.checkDuration(for: fileName, at: outputURL)
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL)
}) { completed, error in
if let error = error {
print("Failed to save video in photos", error)
return
}
DispatchQueue.main.async {
self.view.isUserInteractionEnabled = true
if completed {
print("Video has been saved to your photos.")
} else {
print("Video saving has NOT been completed")
}
}
}
break
case .failed:
print("failed with:", exportSession.error ?? "no error")
break
case .cancelled: break
default: break
}
}
}
}
Sample Project

WebRTC iOS: Record Remote Audio stream using WebRTC

I am working on an audio streaming application with recording functionality for a receiver.
I got stuck at the point where the user want to record audio stream on the receiver side.
Below is my code
Initialisation
var engine = AVAudioEngine()
var recordingFile: AVAudioFile?
var audioPlayer: AVAudioPlayer?
let player = AVAudioPlayerNode()
var isRecording: Bool = false
Initialise AudioEngine
func initializeAudioEngine() {
let input = self.engine.inputNode
let format = input.inputFormat(forBus: 0)
self.engine.attach(self.player)
let mainMixerNode = self.engine.mainMixerNode
self.engine.connect(input, to:mainMixerNode, format: format)
self.engine.prepare()
do {
try self.engine.start()
self.startRecording()
} catch (let error) {
print("START FAILED", error)
}
}
Start Recording
func startRecording() {
self.createRecordingFile()
self.engine.mainMixerNode.installTap(onBus: 0,
bufferSize: 1024,
format: self.engine.mainMixerNode.outputFormat(forBus: 0)) { (buffer, time) -> Void in
do {
self.isRecording = true
try self.recordingFile?.write(from: buffer)
} catch (let error) {
print("RECORD ERROR", error);
}
return
}
}
Create Buffer
private func createBuffer(forFileNamed fileName: String) -> AVAudioPCMBuffer? {
var res: AVAudioPCMBuffer?
if let fileURL = Bundle.main.url(forResource: fileName, withExtension: "caf") {
do {
let file = try AVAudioFile(forReading: fileURL)
res = AVAudioPCMBuffer(pcmFormat: file.processingFormat, frameCapacity:AVAudioFrameCount(file.length))
if let _ = res {
do {
try file.read(into: res!)
} catch (let error) {
print("ERROR read file", error)
}
}
} catch (let error) {
print("ERROR file creation", error)
}
}
return res
}
Stop Recording
func stopRecording() {
self.engine.mainMixerNode.removeTap(onBus: 0)
}
I am trying to record using earphone, but It's not working
Its will work because once you setup
let audiosession = AVAudioSession()
As AVAudioSessionCategoryPlayAndRecord and set
audiosession.setActive(true)
It will start recording whichever audio dump to device.
WebRTC does not have any Internal API to start or stop recording.
We can try using AVAudioSession instead.
First setUp Audio session
func setUPAudioSession() -> Bool {
let audiosession = AVAudioSession()
do {
try audiosession.setCategory(AVAudioSessionCategoryPlayAndRecord)
} catch(let error) {
print("--> \(error.localizedDescription)")
}
do {
try audiosession.setActive(true)
} catch (let error) {
print("--> \(error.localizedDescription)")
}
return audiosession.isInputAvailable;
}
After setUp the audio session now start recording as below
func startRecording() -> Bool {
var settings: [String: Any] = [String: String]()
settings[AVFormatIDKey] = kAudioFormatLinearPCM
settings[AVSampleRateKey] = 8000.0
settings[AVNumberOfChannelsKey] = 1
settings[AVLinearPCMBitDepthKey] = 16
settings[AVLinearPCMIsBigEndianKey] = false
settings[AVLinearPCMIsFloatKey] = false
settings[AVAudioQualityMax] = AVEncoderAudioQualityKey
//Create device directory where recorded file will be save automatically
let searchPaths: [String] = NSSearchPathForDirectoriesInDomains(.documentDirectory, .allDomainsMask, true)
let documentPath_ = searchPaths.first
let pathToSave = "\(documentPath_)/\(dateString)"
let url: URL = URL(pathToSave)
recorder = try? AVAudioRecorder(url: url, settings: settings)
// Initialize degate, metering, etc.
recorder.delegate = self;
recorder.meteringEnabled = true;
recorder?.prepareToRecord()
if let recordIs = recorder {
return recordIs.record()
}
return false
}
Play recorded file
func playrecodingFile() {
//Get the path of recorded file saved in previous method
let searchPaths: [String] = NSSearchPathForDirectoriesInDomains(.documentDirectory, .allDomainsMask, true)
let documentPath_ = searchPaths.first
let fileManager = FileManager.default
let arrayListOfRecordSound: [String]
if fileManager.fileExists(atPath: recordingFolder()) {
let arrayListOfRecordSound = try? fileManager.contentsOfDirectory(atPath: documentPath_)
}
let selectedSound = "\(documentPath_)/\(arrayListOfRecordSound.first)"
let url = URL.init(fileURLWithPath: selectedSound)
let player = try? AVAudioPlayer(contentsOf: url)
player?.delegate = self;
try? AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
player?.prepareToPlay()
player?.play()
}
Stop recording
func stopRecording() {
recorder?.stop()
}
pauseRecording
func pauseRecording() {
recorder?.pause()
}
Stop recording
func stopRecording() {
recorder?.stop()
}

Assertion failure in void _UIPerformResizeOfTextViewForTextContainer while passing data in Segue

func trimVideo (sourceURL: URL, destinationURL: URL, trimPoints: TrimPoints, completion: #escaping () -> Void) {
guard sourceURL.isFileURL else { return }
guard destinationURL.isFileURL else { return }
let options = [
AVURLAssetPreferPreciseDurationAndTimingKey: true
]
let asset = AVURLAsset(url: sourceURL, options: options)
let preferredPreset = AVAssetExportPresetPassthrough
if verifyPresetForAsset(preset: preferredPreset, asset: asset) {
let composition = AVMutableComposition()
let videoCompTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: CMPersistentTrackID())
let audioCompTrack = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: CMPersistentTrackID())
guard let assetVideoTrack: AVAssetTrack = asset.tracks(withMediaType: .video).first else { return }
guard let assetAudioTrack: AVAssetTrack = asset.tracks(withMediaType: .audio).first else { return }
var accumulatedTime = kCMTimeZero
for (startTimeForCurrentSlice, endTimeForCurrentSlice) in trimPoints {
let durationOfCurrentSlice = CMTimeSubtract(endTimeForCurrentSlice, startTimeForCurrentSlice)
let timeRangeForCurrentSlice = CMTimeRangeMake(startTimeForCurrentSlice, durationOfCurrentSlice)
do {
try videoCompTrack!.insertTimeRange(timeRangeForCurrentSlice, of: assetVideoTrack, at: accumulatedTime)
try audioCompTrack!.insertTimeRange(timeRangeForCurrentSlice, of: assetAudioTrack, at: accumulatedTime)
accumulatedTime = CMTimeAdd(accumulatedTime, durationOfCurrentSlice)
}
catch let compError {
print("TrimVideo: error during composition: \(compError)")
}
}
guard let exportSession = AVAssetExportSession(asset: composition, presetName: preferredPreset) else { return }
exportSession.outputURL = destinationURL as URL
exportSession.outputFileType = AVFileType.m4v
exportSession.shouldOptimizeForNetworkUse = true
removeFileAtURLIfExists(url: destinationURL as URL)
exportSession.exportAsynchronously {
completion()
}
}
else {
print("TrimVideo - Could not find a suitable export preset for the input video")
}
}
#IBAction func nextButtonPressed(_ sender: Any) {
if MyVariables.isScreenshot == true {
//get image from current time
print("screenshot")
guard let currentTime = trimmerView.currentPlayerTime else {
return
}
self.thumbnailImage = imageFromVideo(url: footageURL!, time: currentTime )
self.screenshotOut = imageFromVideo(url: footageURL!, time: currentTime )
self.performSegue(withIdentifier: "CreatePost_Segue", sender: nil)
} else {
print("video")
let outputFileName = NSUUID().uuidString
let outputFilePath = (NSTemporaryDirectory() as NSString).appendingPathComponent((outputFileName as NSString).appendingPathExtension("mov")!)
self.videoURL = URL(fileURLWithPath: outputFilePath)
trimVideo(sourceURL: self.footageURL!, destinationURL: self.videoURL!, trimPoints: [(trimmerView.startTime!,trimmerView.endTime!)], completion: self.finishVideo)
}
}
func finishVideo() -> Void {
guard let VideoURL = self.videoURL else { return }
self.thumbnailImage = setThumbnailFrom(path: VideoURL)
print("got to segue") //This does print successfully so something is happening in the segue...
self.performSegue(withIdentifier: "CreatePost_Segue", sender: nil)
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?){
if segue.identifier == "CreatePost_Segue" {
let controller = segue.destination as! CreatePostViewController
controller.thumbnailImage = self.thumbnailImage
controller.videoURL = self.videoURL
controller.screenshotOut = self.screenshotOut
}
}
So in nextButtonPressed you can see that I if the. media is a video (it is), I am using the trimVideo function and a custom completion handler of finishVideo to create a thumbnail with the trimmed video as well as perform the segue itself.
Everything executes without error until the segue so I believe I am sending the data wrong perhaps? Perhaps something to do with screenshotOut not being set if its a video?
The full error is
*** Assertion failure in void _UIPerformResizeOfTextViewForTextContainer(NSLayoutManager *, UIView<NSTextContainerView> *, NSTextContainer *, NSUInteger)(), /BuildRoot/Library/Caches/com.apple.xbs/Sources/UIFoundation/UIFoundation-546.2/UIFoundation/TextSystem/NSLayoutManager_Private.m:1619
I fixed this with the advice #aBilal17 gave to run the segue on the main thread like so:
func finishVideo() -> Void {
guard let VideoURL = self.videoURL else { return }
self.thumbnailImage = setThumbnailFrom(path: VideoURL)
print("got to segue")
DispatchQueue.main.async {
self.performSegue(withIdentifier: "CreatePost_Segue", sender: nil)
}
}

Camera app freezes during phone call

I have a bug in my camera app. If you open the app while on a phone call, the entire app freezes. I've tried using AVCaptureSessionWasInterrupted and AVCaptureSessionInterruptionEnded notifications to handle the audio input management during a phone call, but have had no luck fixing the issue. When I comment out the audio input setup, the app no longer freezes during a phone call, so I'm pretty confident the issue lies somewhere with the audio management.
Why is the app freezing during phone calls and how can I fix it?
Thanks in advance!
Relevant code:
class CameraManager: NSObject {
static let shared = CameraManager()
private let notificationQueue = OperationQueue.main
var delegate: CameraManagerDelegate? = nil
let session = AVCaptureSession()
var captureDeviceInput: AVCaptureDeviceInput? = nil
var audioInput: AVCaptureDeviceInput? = nil
let photoOutput = AVCapturePhotoOutput()
let videoOutput = AVCaptureMovieFileOutput()
var isRecording: Bool {
return videoOutput.isRecording
}
func getCurrentVideoCaptureDevice() throws -> AVCaptureDevice {
guard let device = self.captureDeviceInput?.device else {
throw CameraManagerError.missingCaptureDeviceInput
}
return device
}
func getZoomFactor() throws -> CGFloat {
return try getCurrentVideoCaptureDevice().videoZoomFactor
}
func getMaxZoomFactor() throws -> CGFloat {
return try getCurrentVideoCaptureDevice().activeFormat.videoMaxZoomFactor
}
override init() {
super.init()
NotificationCenter.default.addObserver(forName: Notification.Name.UIApplicationDidBecomeActive, object: nil, queue: notificationQueue) { [unowned self] (notification) in
self.session.startRunning()
try? self.setupCamera()
try? self.setZoomLevel(zoomLevel: 1.0)
if Settings.shared.autoRecord {
try? self.startRecording()
}
}
NotificationCenter.default.addObserver(forName: Notification.Name.UIApplicationWillResignActive, object: nil, queue: notificationQueue) { [unowned self] (notification) in
self.stopRecording()
self.session.stopRunning()
}
NotificationCenter.default.addObserver(forName: Notification.Name.AVCaptureSessionWasInterrupted, object: nil, queue: notificationQueue) { [unowned self] (notification) in
if let audioInput = self.audioInput {
self.session.removeInput(audioInput)
}
}
NotificationCenter.default.addObserver(forName: Notification.Name.AVCaptureSessionInterruptionEnded, object: nil, queue: notificationQueue) { [unowned self] (notification) in
try? self.setupAudio()
}
try? self.setupSession()
}
func setupSession() throws {
session.sessionPreset = .high
if !session.isRunning {
session.startRunning()
}
if Utils.checkPermissions() {
try setupInputs()
setupOutputs()
}
}
func setupInputs() throws {
try setupCamera()
try setupAudio()
}
func setupCamera() throws {
do {
try setCamera(position: Settings.shared.defaultCamera)
} catch CameraManagerError.unableToFindCaptureDevice(let position) {
//some devices don't have a front camera, so try the back for setup
if position == .front {
try setCamera(position: .back)
}
}
}
func setupAudio() throws {
if let audioInput = self.audioInput {
self.session.removeInput(audioInput)
}
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
throw CameraManagerError.unableToGetAudioDevice
}
let audioInput = try AVCaptureDeviceInput(device: audioDevice)
if session.canAddInput(audioInput) {
session.addInput(audioInput)
self.audioInput = audioInput
} else {
self.delegate?.unableToAddAudioInput()
}
}
func setupOutputs() {
self.photoOutput.isHighResolutionCaptureEnabled = true
guard session.canAddOutput(self.photoOutput) else {
//error
return
}
session.addOutput(self.photoOutput)
guard session.canAddOutput(self.videoOutput) else {
//error
return
}
session.addOutput(self.videoOutput)
}
func startRecording() throws {
if !self.videoOutput.isRecording {
let documentDirectory = try FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor:nil, create:false)
let url = documentDirectory.appendingPathComponent(UUID().uuidString + ".mov")
self.videoOutput.startRecording(to: url, recordingDelegate: self)
}
}
func stopRecording() {
if self.videoOutput.isRecording {
self.videoOutput.stopRecording()
}
}
func setZoomLevel(zoomLevel: CGFloat) throws {
guard let captureDevice = self.captureDeviceInput?.device else {
throw CameraManagerError.missingCaptureDevice
}
try captureDevice.lockForConfiguration()
captureDevice.videoZoomFactor = zoomLevel
captureDevice.unlockForConfiguration()
}
func capturePhoto() {
let photoOutputSettings = AVCapturePhotoSettings()
photoOutputSettings.flashMode = Settings.shared.flash
photoOutputSettings.isAutoStillImageStabilizationEnabled = true
photoOutputSettings.isHighResolutionPhotoEnabled = true
self.photoOutput.capturePhoto(with: photoOutputSettings, delegate: self)
}
func toggleCamera() throws {
if let captureDeviceInput = self.captureDeviceInput,
captureDeviceInput.device.position == .back {
try setCamera(position: .front)
} else {
try setCamera(position: .back)
}
}
func setCamera(position: AVCaptureDevice.Position) throws {
if let captureDeviceInput = self.captureDeviceInput {
if captureDeviceInput.device.position == position {
return
} else {
session.removeInput(captureDeviceInput)
}
}
var device: AVCaptureDevice? = nil
switch position {
case .front:
device = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
default:
device = AVCaptureDevice.default(for: .video)
}
guard let nonNilDevice = device else {
throw CameraManagerError.unableToFindCaptureDevice(position)
}
try nonNilDevice.lockForConfiguration()
if nonNilDevice.isFocusModeSupported(.continuousAutoFocus) {
nonNilDevice.focusMode = .continuousAutoFocus
}
if nonNilDevice.isExposureModeSupported(.continuousAutoExposure) {
nonNilDevice.exposureMode = .continuousAutoExposure
}
nonNilDevice.unlockForConfiguration()
let input = try AVCaptureDeviceInput(device: nonNilDevice)
guard session.canAddInput(input) else {
throw CameraManagerError.unableToAddCaptureDeviceInput
}
session.addInput(input)
self.captureDeviceInput = input
}
func setFocus(point: CGPoint) throws {
guard let device = self.captureDeviceInput?.device else {
throw CameraManagerError.missingCaptureDeviceInput
}
guard device.isFocusPointOfInterestSupported && device.isFocusModeSupported(.autoFocus) else {
throw CameraManagerError.notSupportedByDevice
}
try device.lockForConfiguration()
device.focusPointOfInterest = point
device.focusMode = .autoFocus
device.unlockForConfiguration()
}
func setExposure(point: CGPoint) throws {
guard let device = self.captureDeviceInput?.device else {
throw CameraManagerError.missingCaptureDeviceInput
}
guard device.isExposurePointOfInterestSupported && device.isExposureModeSupported(.autoExpose) else {
throw CameraManagerError.notSupportedByDevice
}
try device.lockForConfiguration()
device.exposurePointOfInterest = point
device.exposureMode = .autoExpose
device.unlockForConfiguration()
}
}
extension CameraManager: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, willCapturePhotoFor resolvedSettings: AVCaptureResolvedPhotoSettings) {
self.delegate?.cameraManagerWillCapturePhoto()
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation() else {
//error
return
}
let capturedImage = UIImage.init(data: imageData , scale: 1.0)
if let image = capturedImage {
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil)
}
self.delegate?.cameraManagerDidFinishProcessingPhoto()
}
}
extension CameraManager: AVCaptureFileOutputRecordingDelegate {
func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
self.delegate?.cameraManagerDidStartRecording()
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
self.delegate?.cameraManagerDidFinishRecording()
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputFileURL)
}) { saved, error in
if saved {
do {
try FileManager.default.removeItem(at: outputFileURL)
} catch _ as NSError {
//error
}
}
}
}
}

Record video using custom camera automatically in swift 3.0

How to record video automatically in iOS without user interaction on camera controls ?
the requirement is to record video from front camera on open a view but condition is camera controls are disable,
video record and save automatic on going and back from that view.
Swift 3.0
finally i solve the problem just copy and paste the whole code and
connect outlet and this working fine.
class TestViewController: UIViewController {
#IBOutlet weak var myView: UIView!
var session: AVCaptureSession?
var userreponsevideoData = NSData()
var userreponsethumbimageData = NSData()
override func viewDidLoad() {
super.viewDidLoad()
createSession()
}
override func viewDidAppear(animated: Bool) {
super.viewDidAppear(animated)
}
func createSession() {
var input: AVCaptureDeviceInput?
let movieFileOutput = AVCaptureMovieFileOutput()
var prevLayer: AVCaptureVideoPreviewLayer?
prevLayer?.frame.size = myView.frame.size
session = AVCaptureSession()
let error: NSError? = nil
do { input = try AVCaptureDeviceInput(device: self.cameraWithPosition(.Front)!) } catch {return}
if error == nil {
session?.addInput(input)
} else {
print("camera input error: \(error)")
}
prevLayer = AVCaptureVideoPreviewLayer(session: session)
prevLayer?.frame.size = myView.frame.size
prevLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
prevLayer?.connection.videoOrientation = .Portrait
myView.layer.addSublayer(prevLayer!)
let documentsURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
let filemainurl = NSURL(string: ("\(documentsURL.URLByAppendingPathComponent("temp")!)" + ".mov"))
let maxDuration: CMTime = CMTimeMake(600, 10)
movieFileOutput.maxRecordedDuration = maxDuration
movieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024
if self.session!.canAddOutput(movieFileOutput) {
self.session!.addOutput(movieFileOutput)
}
session?.startRunning()
movieFileOutput.startRecordingToOutputFileURL(filemainurl, recordingDelegate: self)
}
func cameraWithPosition(position: AVCaptureDevicePosition) -> AVCaptureDevice? {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
for device in devices {
if device.position == position {
return device as? AVCaptureDevice
}
}
return nil
}
#IBAction func pressbackbutton(sender: AnyObject) {
session?.stopRunning()
}
}
extension TestViewController: AVCaptureFileOutputRecordingDelegate
{
func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
print(fileURL)
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
print(outputFileURL)
let filemainurl = outputFileURL
do
{
let asset = AVURLAsset(URL: filemainurl, options: nil)
print(asset)
let imgGenerator = AVAssetImageGenerator(asset: asset)
imgGenerator.appliesPreferredTrackTransform = true
let cgImage = try imgGenerator.copyCGImageAtTime(CMTimeMake(0, 1), actualTime: nil)
let uiImage = UIImage(CGImage: cgImage)
userreponsethumbimageData = NSData(contentsOfURL: filemainurl)!
print(userreponsethumbimageData.length)
print(uiImage)
// imageData = UIImageJPEGRepresentation(uiImage, 0.1)
}
catch let error as NSError
{
print(error)
return
}
SVProgressHUD.showWithMaskType(SVProgressHUDMaskType.Clear)
let VideoFilePath = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("mergeVideo\(arc4random()%1000)d")!.URLByAppendingPathExtension("mp4")!.absoluteString
if NSFileManager.defaultManager().fileExistsAtPath(VideoFilePath!)
{
do
{
try NSFileManager.defaultManager().removeItemAtPath(VideoFilePath!)
}
catch { }
}
let tempfilemainurl = NSURL(string: VideoFilePath!)!
let sourceAsset = AVURLAsset(URL: filemainurl!, options: nil)
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: sourceAsset, presetName: AVAssetExportPresetMediumQuality)!
assetExport.outputFileType = AVFileTypeQuickTimeMovie
assetExport.outputURL = tempfilemainurl
assetExport.exportAsynchronouslyWithCompletionHandler { () -> Void in
switch assetExport.status
{
case AVAssetExportSessionStatus.Completed:
dispatch_async(dispatch_get_main_queue(),
{
do
{
SVProgressHUD .dismiss()
self.userreponsevideoData = try NSData(contentsOfURL: tempfilemainurl, options: NSDataReadingOptions())
print("MB - \(self.userreponsevideoData.length) byte")
}
catch
{
SVProgressHUD .dismiss()
print(error)
}
})
case AVAssetExportSessionStatus.Failed:
print("failed \(assetExport.error)")
case AVAssetExportSessionStatus.Cancelled:
print("cancelled \(assetExport.error)")
default:
print("complete")
SVProgressHUD .dismiss()
}
}
}
}
Here is the swift 4.1 version
#objc func longPress(gesture: UILongPressGestureRecognizer) {
if gesture.state == UIGestureRecognizerState.began {
print("Long Press")
captureButton.setImage(UIImage(named: "recording"), for:.normal)
createSession()
}else if gesture.state == UIGestureRecognizerState.changed {
}
else if gesture.state == UIGestureRecognizerState.ended {
captureButton.setImage(UIImage(named: "camaraTap"), for:.normal)
session?.stopRunning()
}
}
func createSession() {
var input: AVCaptureDeviceInput?
let movieFileOutput = AVCaptureMovieFileOutput()
videosPreviewLayer?.frame.size = photoPreviewImageView.frame.size
session = AVCaptureSession()
let error: NSError? = nil
do { input = try AVCaptureDeviceInput(device: self.cameraWithPosition(position: .back)!) } catch {return}
if error == nil {
session?.addInput(input!)
} else {
print("camera input error: \(String(describing: error))")
}
videosPreviewLayer = AVCaptureVideoPreviewLayer(session: session!)
videosPreviewLayer?.frame.size = self.photoPreviewImageView.frame.size
videosPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
videosPreviewLayer?.connection?.videoOrientation = .portrait
photoPreviewImageView.layer.sublayers?.forEach { $0.removeFromSuperlayer() }
photoPreviewImageView.layer.addSublayer(videosPreviewLayer!)
switchCameraButton.isHidden=true
flashButton.isHidden=true
msgLabel.isHidden=true
galleryCollectionView.isHidden=true
timerLabel.isHidden=false
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
fileURL = URL(string:"\(documentsURL.appendingPathComponent("temp"))" + ".mov")
print("*****fileurl%#",fileURL ?? "00000")
let maxDuration: CMTime = CMTimeMake(600, 10)
movieFileOutput.maxRecordedDuration = maxDuration
movieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024
if self.session!.canAddOutput(movieFileOutput) {
self.session!.addOutput(movieFileOutput)
}
session?.startRunning()
movieFileOutput.startRecording(to: fileURL!, recordingDelegate: self)
}
func cameraWithPosition(position: AVCaptureDevice.Position) -> AVCaptureDevice? {
let devices = AVCaptureDevice.devices(for: AVMediaType.video)
for device in devices {
if device.position == position {
return device
}
}
return nil
}
}
extension SwipeGallerymainViewController: AVCaptureFileOutputRecordingDelegate
{
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
print(outputFileURL)
let filemainurl = outputFileURL
do
{
let asset = AVURLAsset(url:filemainurl as URL, options:nil)
print(asset)
let imgGenerator = AVAssetImageGenerator(asset: asset)
imgGenerator.appliesPreferredTrackTransform = true
let cgImage = try imgGenerator.copyCGImage(at: CMTimeMake(0, 1), actualTime: nil)
let uiImage = UIImage(cgImage: cgImage)
previewImage = uiImage
userreponsethumbimageData = NSData(contentsOf: filemainurl as URL)!
print(userreponsethumbimageData.length)
print(uiImage)
// imageData = UIImageJPEGRepresentation(uiImage, 0.1)
}
catch let error as NSError
{
print(error)
return
}
let VideoFilePath = URL(fileURLWithPath:NSTemporaryDirectory()).appendingPathComponent("mergeVideo\(arc4random()%1000)d").appendingPathExtension("mp4").absoluteString
if FileManager.default.fileExists(atPath: VideoFilePath)
{
print("exist")
do
{
try FileManager.default.removeItem(atPath: VideoFilePath)
}
catch { }
}
let tempfilemainurl = NSURL(string: VideoFilePath)!
let sourceAsset = AVURLAsset(url:filemainurl as URL, options:nil)
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: sourceAsset, presetName: AVAssetExportPresetMediumQuality)!
assetExport.outputFileType = AVFileType.mov
assetExport.outputURL = tempfilemainurl as URL
assetExport.exportAsynchronously { () -> Void in
switch assetExport.status
{
case AVAssetExportSessionStatus.completed:
DispatchQueue.main.async {
do
{
// SVProgressHUD .dismiss()
self.userreponsevideoData = try NSData(contentsOf: tempfilemainurl as URL, options: NSData.ReadingOptions())
print("MB - \(self.userreponsevideoData.length) byte")
self.isVideoLoad=true
self.performSegue(withIdentifier:"previewSegue", sender:self)
}
catch
{
// SVProgressHUD .dismiss()
print(error)
}
}
case AVAssetExportSessionStatus.failed:
print("failed \(String(describing: assetExport.error))")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(String(describing: assetExport.error))")
default:
print("complete")
// SVProgressHUD .dismiss()
}
}
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
print(fileURL)
}
}

Resources