swift avfoundation kCMSampleBufferError_ArrayTooSmall when using AVCaptureVideoDataOutputSampleBufferDelegate - buffer

I have swift audio video recorder app. I have issues with error
Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could not be completed" UserInfo={NSLocalizedFailureReason=An unknown error occurred (-12737), NSLocalizedDescription=The operation could not be completed, NSUnderlyingError=0x7fae164ae1f0 {Error Domain=NSOSStatusErrorDomain Code=-12737 "(null)"}}
It occurs randomly, sometimes works, sometimes not. I have found that the error code -12737 means kCMSampleBufferError_ArrayTooSmall error.
My question is how can I increase the buffer and fix this error? I'm using AVCaptureVideoDataOutputSampleBufferDelegate class, here is the code:
//
// ViewController.swift
// CustomCamera
//
// Created by Taras Chernyshenko on 6/27/17.
// Copyright © 2017 Taras Chernyshenko. All rights reserved.
//
import AVFoundation
import Photos
class NewRecorder: NSObject,
AVCaptureAudioDataOutputSampleBufferDelegate,
AVCaptureVideoDataOutputSampleBufferDelegate {
private var session: AVCaptureSession = AVCaptureSession()
private var deviceInput: AVCaptureScreenInput?
private var previewLayer: AVCaptureVideoPreviewLayer?
private var videoOutput: AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()
private var audioOutput: AVCaptureAudioDataOutput = AVCaptureAudioDataOutput()
private var audioConnection: AVCaptureConnection?
private var videoConnection: AVCaptureConnection?
private var assetWriter: AVAssetWriter?
private var audioInput: AVAssetWriterInput?
private var videoInput: AVAssetWriterInput?
private var fileManager: FileManager = FileManager()
private var isRecordingSessionStarted: Bool = false
private var destinationUrl: URL
private var fps: Int
private var showCursor: Bool
private var highlightClicks: Bool
private var displayId: CGDirectDisplayID
private var audioDevice: AVCaptureDevice?
private var videoCodec: String?
private var width: Int
private var height: Int
private var audioBitrate: Int
private var videoBitrate: Int
var onStart: (() -> Void)?
var onFinish: (() -> Void)?
var onError: ((Error) -> Void)?
var onPause: (() -> Void)?
var onResume: (() -> Void)?
private var recordingQueue = DispatchQueue(label: "recording.queue")
init(destination: URL, fps: Int, cropRect: CGRect?, showCursor: Bool, highlightClicks: Bool, displayId: CGDirectDisplayID = CGMainDisplayID(), audioDevice: AVCaptureDevice? = .default(for: .audio), videoCodec: String = "avc1", width: Int, height: Int, audioBitrate: Int, videoBitrate: Int) {
self.destinationUrl = destination
self.fps = fps
self.showCursor = showCursor
self.highlightClicks = highlightClicks
self.displayId = displayId
self.audioDevice = audioDevice!
self.videoCodec = videoCodec
self.width = width
self.height = height
self.audioBitrate = audioBitrate
self.videoBitrate = videoBitrate
}
func setup() {
self.session.sessionPreset = AVCaptureSession.Preset.high
if self.fileManager.isDeletableFile(atPath: self.destinationUrl.path) {
_ = try? self.fileManager.removeItem(atPath: self.destinationUrl.path)
}
self.assetWriter = try? AVAssetWriter(outputURL: self.destinationUrl,
fileType: AVFileType.mp4)
self.assetWriter!.movieFragmentInterval = kCMTimeInvalid
self.assetWriter!.shouldOptimizeForNetworkUse = true
let audioSettings = [
AVFormatIDKey : kAudioFormatMPEG4AAC,
AVNumberOfChannelsKey : 2,
AVSampleRateKey : 44100.0,
AVEncoderBitRateKey: self.audioBitrate
] as [String : Any]
let videoSettings = [
AVVideoCodecKey : self.videoCodec!,
AVVideoWidthKey : self.width,
AVVideoHeightKey : self.height,
AVVideoCompressionPropertiesKey: [
AVVideoAverageBitRateKey: NSNumber(value: self.videoBitrate)
]
] as [String : Any]
self.videoInput = AVAssetWriterInput(mediaType: AVMediaType.video,
outputSettings: videoSettings)
self.audioInput = AVAssetWriterInput(mediaType: AVMediaType.audio,
outputSettings: audioSettings)
self.videoInput?.expectsMediaDataInRealTime = true
self.audioInput?.expectsMediaDataInRealTime = true
if self.assetWriter!.canAdd(self.videoInput!) {
self.assetWriter?.add(self.videoInput!)
}
if self.assetWriter!.canAdd(self.audioInput!) {
self.assetWriter?.add(self.audioInput!)
}
//self.deviceInput = try? AVCaptureDeviceInput(device: self.videoDevice)
self.deviceInput = AVCaptureScreenInput(displayID: self.displayId)
self.deviceInput!.minFrameDuration = CMTimeMake(1, Int32(self.fps))
self.deviceInput!.capturesCursor = self.showCursor
self.deviceInput!.capturesMouseClicks = self.highlightClicks
self.session.startRunning()
// DispatchQueue.main.async {
self.session.beginConfiguration()
if self.session.canAddInput(self.deviceInput!) {
self.session.addInput(self.deviceInput!)
}
if self.session.canAddOutput(self.videoOutput) {
self.session.addOutput(self.videoOutput)
}
self.videoConnection = self.videoOutput.connection(with: AVMediaType.video)
let audioIn = try? AVCaptureDeviceInput(device: self.audioDevice!)
if self.session.canAddInput(audioIn!) {
self.session.addInput(audioIn!)
}
if self.session.canAddOutput(self.audioOutput) {
self.session.addOutput(self.audioOutput)
}
self.audioConnection = self.audioOutput.connection(with: AVMediaType.audio)
self.session.commitConfiguration()
//}
}
func start() {
self.startRecording()
print("R")
}
func stop() {
self.stopRecording()
}
func startRecording() {
if self.assetWriter?.startWriting() != true {
print("error: \(self.assetWriter?.error.debugDescription ?? "")")
}
if !self.isRecordingSessionStarted {
let presentationTime = CMTimeMakeWithSeconds(1, 30)
self.assetWriter?.startSession(atSourceTime: presentationTime)
self.isRecordingSessionStarted = true
}
self.videoOutput.setSampleBufferDelegate(self, queue: self.recordingQueue)
self.audioOutput.setSampleBufferDelegate(self, queue: self.recordingQueue)
}
func stopRecording() {
self.videoOutput.setSampleBufferDelegate(nil, queue: nil)
self.audioOutput.setSampleBufferDelegate(nil, queue: nil)
self.assetWriter?.finishWriting {
print("Saved in folder \(self.destinationUrl)")
exit(0)
}
}
func captureOutput(_ captureOutput: AVCaptureOutput, didOutput
sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
//if assetWriter?.status == AVAssetWriterStatus.writing {
let description = CMSampleBufferGetFormatDescription(sampleBuffer)!
if CMFormatDescriptionGetMediaType(description) == kCMMediaType_Audio {
if self.audioInput!.isReadyForMoreMediaData {
//print("appendSampleBuffer audio");
self.audioInput?.append(sampleBuffer)
}
} else {
if self.videoInput!.isReadyForMoreMediaData {
//print("appendSampleBuffer video");
let appendStatus = self.videoInput!.append(sampleBuffer)
if !appendStatus {
if self.assetWriter!.status == AVAssetWriterStatus.failed {
print(self.assetWriter!.error!)
}
print("Error writing video buffer");
}
}
}
//}
}
}
Here is similar issue AVAssetWriter goes AVAssetWriterStatusFailed after appendSampleBuffer:

My pc ran out of memory, this was the problem.

Related

mAudioEngine.start() crash in ios

Terminating app due to uncaught exception
'com.apple.coreaudio.avfaudio', reason: 'required condition is false:
_engine->IsRunning()'
Getting crash in iphone 13 mini
public class SwiftSoundStreamPlugin: NSObject, FlutterPlugin {
private var channel: FlutterMethodChannel
private var registrar: FlutterPluginRegistrar
private var hasPermission: Bool = false
private var debugLogging: Bool = false
//========= Recorder's vars
private let mAudioEngine = AVAudioEngine()
private let mRecordBus = 0
private var mInputNode: AVAudioInputNode
private var mRecordSampleRate: Double = 16000 // 16Khz
private var mRecordBufferSize: AVAudioFrameCount = 8192
private var mRecordChannel = 0
private var mRecordSettings: [String:Int]!
private var mRecordFormat: AVAudioFormat!
//========= Player's vars
private let PLAYER_OUTPUT_SAMPLE_RATE: Double = 44100 // 32Khz
private let mPlayerBus = 0
private let mPlayerNode = AVAudioPlayerNode()
private var mPlayerSampleRate: Double = 44100 // 16Khz
private var mPlayerBufferSize: AVAudioFrameCount = 127000
private var mPlayerOutputFormat: AVAudioFormat!
private var mPlayerInputFormat: AVAudioFormat!
/** ======== Basic Plugin initialization ======== **/
public static func register(with registrar: FlutterPluginRegistrar) {
let channel = FlutterMethodChannel(name: "vn.casperpas.sound_stream:methods", binaryMessenger: registrar.messenger())
let instance = SwiftSoundStreamPlugin( channel, registrar: registrar)
registrar.addMethodCallDelegate(instance, channel: channel)
}
init( _ channel: FlutterMethodChannel, registrar: FlutterPluginRegistrar ) {
self.channel = channel
self.registrar = registrar
self.mInputNode = mAudioEngine.inputNode
super.init()
self.attachPlayer()
mAudioEngine.prepare()
}
public func handle(_ call: FlutterMethodCall, result: #escaping FlutterResult) {
switch call.method {
case "hasPermission":
hasPermission(result)
case "initializeRecorder":
initializeRecorder(call, result)
case "startRecording":
startRecording(result)
case "stopRecording":
stopRecording(result)
case "setBufferSize":
setBufferSize(call, result)
case "initializePlayer":
initializePlayer(call, result)
case "startPlayer":
startPlayer(result)
case "stopPlayer":
stopPlayer(result)
case "writeChunk":
writeChunk(call, result)
default:
print("Unrecognized method: \(call.method)")
sendResult(result, FlutterMethodNotImplemented)
}
}
private func sendResult(_ result: #escaping FlutterResult, _ arguments: Any?) {
DispatchQueue.main.async {
result( arguments )
}
}
private func invokeFlutter( _ method: String, _ arguments: Any? ) {
DispatchQueue.main.async {
self.channel.invokeMethod( method, arguments: arguments )
}
}
/** ======== Plugin methods ======== **/
private func checkAndRequestPermission(completion callback: #escaping ((Bool) -> Void)) {
if (hasPermission) {
callback(hasPermission)
return
}
var permission: AVAudioSession.RecordPermission
#if swift(>=4.2)
permission = AVAudioSession.sharedInstance().recordPermission
#else
permission = AVAudioSession.sharedInstance().recordPermission()
#endif
switch permission {
case .granted:
print("granted")
hasPermission = true
callback(hasPermission)
break
case .denied:
print("denied")
hasPermission = false
callback(hasPermission)
break
case .undetermined:
print("undetermined")
AVAudioSession.sharedInstance().requestRecordPermission() { [unowned self] allowed in
if allowed {
self.hasPermission = true
print("undetermined true")
callback(self.hasPermission)
} else {
self.hasPermission = false
print("undetermined false")
callback(self.hasPermission)
}
}
break
default:
callback(hasPermission)
break
}
}
private func hasPermission( _ result: #escaping FlutterResult) {
checkAndRequestPermission { value in
self.sendResult(result, value)
}
}
private func startEngine() {
guard !mAudioEngine.isRunning else {
return
}
if mAudioEngine.outputNode.outputFormat(forBus: mPlayerBus).channelCount == 0 {
// if count is 0 then it throws a exception or crash
mAudioEngine.reset()
/*
or try changing object
mAudioEngine = AVAudioEngine()
*/
}
mAudioEngine.reset()
try? mAudioEngine.start()
}
private func stopEngine() {
mAudioEngine.stop()
mAudioEngine.reset()
}
private func sendEventMethod(_ name: String, _ data: Any) {
var eventData: [String: Any] = [:]
eventData["name"] = name
eventData["data"] = data
invokeFlutter("platformEvent", eventData)
}
private func initializeRecorder(_ call: FlutterMethodCall, _ result: #escaping FlutterResult) {
guard let argsArr = call.arguments as? Dictionary<String,AnyObject>
else {
sendResult(result, FlutterError( code: SoundStreamErrors.Unknown.rawValue,
message:"Incorrect parameters",
details: nil ))
return
}
mRecordSampleRate = argsArr["sampleRate"] as? Double ?? mRecordSampleRate
debugLogging = argsArr["showLogs"] as? Bool ?? debugLogging
mRecordFormat = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatInt16, sampleRate: mRecordSampleRate, channels: 2, interleaved: true)
checkAndRequestPermission { isGranted in
if isGranted {
self.sendRecorderStatus(SoundStreamStatus.Initialized)
self.sendResult(result, true)
} else {
self.sendResult(result, FlutterError( code: SoundStreamErrors.Unknown.rawValue,
message:"Incorrect parameters",
details: nil ))
}
}
}
private func resetEngineForRecord() {
mAudioEngine.inputNode.removeTap(onBus: mRecordBus)
let input = mAudioEngine.inputNode
let inputFormat = input.outputFormat(forBus: mRecordBus)
let converter = AVAudioConverter(from: inputFormat, to: mRecordFormat!)!
let ratio: Float = Float(inputFormat.sampleRate)/Float(mRecordFormat.sampleRate)
input.installTap(onBus: mRecordBus, bufferSize: mRecordBufferSize, format: inputFormat) { (buffer, time) -> Void in
let inputCallback: AVAudioConverterInputBlock = { inNumPackets, outStatus in
outStatus.pointee = .haveData
return buffer
}
let convertedBuffer = AVAudioPCMBuffer(pcmFormat: self.mRecordFormat!, frameCapacity: UInt32(Float(buffer.frameCapacity) / ratio))!
var error: NSError?
let status = converter.convert(to: convertedBuffer, error: &error, withInputFrom: inputCallback)
assert(status != .error)
if (self.mRecordFormat?.commonFormat == AVAudioCommonFormat.pcmFormatInt16) {
let values = self.audioBufferToBytes(convertedBuffer)
self.sendMicData(values)
}
}
}
private func startRecording(_ result: #escaping FlutterResult) {
resetEngineForRecord()
startEngine()
sendRecorderStatus(SoundStreamStatus.Playing)
result(true)
}
private func stopRecording(_ result: #escaping FlutterResult) {
mAudioEngine.inputNode.removeTap(onBus: mRecordBus)
sendRecorderStatus(SoundStreamStatus.Stopped)
result(true)
}
private func sendMicData(_ data: [UInt8]) {
let channelData = FlutterStandardTypedData(bytes: NSData(bytes: data, length: data.count) as Data)
sendEventMethod("dataPeriod", channelData)
}
private func sendRecorderStatus(_ status: SoundStreamStatus) {
sendEventMethod("recorderStatus", status.rawValue)
}
private func setBufferSize(_ call: FlutterMethodCall, _ result: #escaping FlutterResult) {
guard let argsArr = call.arguments as? Dictionary<String,AnyObject>
else {
sendResult(result, FlutterError( code: SoundStreamErrors.Unknown.rawValue,
message:"Incorrect parameters",
details: nil ))
return
}
mPlayerBufferSize = argsArr["bufferSize"] as? AVAudioFrameCount ?? mPlayerBufferSize
// result(true)
}
private func initializePlayer(_ call: FlutterMethodCall, _ result: #escaping FlutterResult) {
guard let argsArr = call.arguments as? Dictionary<String,AnyObject>
else {
sendResult(result, FlutterError( code: SoundStreamErrors.Unknown.rawValue,
message:"Incorrect parameters",
details: nil ))
return
}
mPlayerSampleRate = argsArr["sampleRate"] as? Double ?? mPlayerSampleRate
debugLogging = argsArr["showLogs"] as? Bool ?? debugLogging
mPlayerInputFormat = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatInt16, sampleRate: mPlayerSampleRate, channels: 1, interleaved: true)
sendPlayerStatus(SoundStreamStatus.Initialized)
}
private func attachPlayer() {
mPlayerOutputFormat = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatFloat32, sampleRate: PLAYER_OUTPUT_SAMPLE_RATE, channels: 1, interleaved: true)
mAudioEngine.attach(mPlayerNode)
mAudioEngine.connect(mPlayerNode, to: mAudioEngine.outputNode, format: mPlayerOutputFormat)
// mAudioEngine.connect(mPlayerNode, to: mAudioEngine.mainMixerNode, format: mPlayerOutputFormat)
}
private func startPlayer(_ result: #escaping FlutterResult) {
startEngine()
if !mPlayerNode.isPlaying {
mPlayerNode.play()
}
sendPlayerStatus(SoundStreamStatus.Playing)
result(true)
}
private func stopPlayer(_ result: #escaping FlutterResult) {
if mPlayerNode.isPlaying {
mPlayerNode.stop()
}
sendPlayerStatus(SoundStreamStatus.Stopped)
result(true)
}
private func sendPlayerStatus(_ status: SoundStreamStatus) {
sendEventMethod("playerStatus", status.rawValue)
}
private func writeChunk(_ call: FlutterMethodCall, _ result: #escaping FlutterResult) {
guard let argsArr = call.arguments as? Dictionary<String,AnyObject>,
let data = argsArr["data"] as? FlutterStandardTypedData
else {
sendResult(result, FlutterError( code: SoundStreamErrors.FailedToWriteBuffer.rawValue,
message:"Failed to write Player buffer",
details: nil ))
return
}
let byteData = [UInt8](data.data)
pushPlayerChunk(byteData, result)
}
private func pushPlayerChunk(_ chunk: [UInt8], _ result: #escaping FlutterResult) {
let buffer = bytesToAudioBuffer(chunk)
mPlayerNode.scheduleBuffer(convertBufferFormat(
buffer,
from: mPlayerInputFormat,
to: mPlayerOutputFormat
));
result(true)
}
private func convertBufferFormat(_ buffer: AVAudioPCMBuffer, from: AVAudioFormat, to: AVAudioFormat) -> AVAudioPCMBuffer {
let formatConverter = AVAudioConverter(from: from, to: to)
let ratio: Float = Float(from.sampleRate)/Float(to.sampleRate)
let pcmBuffer = AVAudioPCMBuffer(pcmFormat: to, frameCapacity: UInt32(Float(buffer.frameCapacity) / ratio))!
var error: NSError? = nil
let inputBlock: AVAudioConverterInputBlock = {inNumPackets, outStatus in
outStatus.pointee = .haveData
return buffer
}
formatConverter?.convert(to: pcmBuffer, error: &error, withInputFrom: inputBlock)
return pcmBuffer
}
private func audioBufferToBytes(_ audioBuffer: AVAudioPCMBuffer) -> [UInt8] {
let srcLeft = audioBuffer.int16ChannelData![0]
let bytesPerFrame = audioBuffer.format.streamDescription.pointee.mBytesPerFrame
let numBytes = Int(bytesPerFrame * audioBuffer.frameLength)
// initialize bytes by 0
var audioByteArray = [UInt8](repeating: 0, count: numBytes)
srcLeft.withMemoryRebound(to: UInt8.self, capacity: numBytes) { srcByteData in
audioByteArray.withUnsafeMutableBufferPointer {
$0.baseAddress!.initialize(from: srcByteData, count: numBytes)
}
}
return audioByteArray
}
private func bytesToAudioBuffer(_ buf: [UInt8]) -> AVAudioPCMBuffer {
let frameLength = UInt32(buf.count) / mPlayerInputFormat.streamDescription.pointee.mBytesPerFrame
let audioBuffer = AVAudioPCMBuffer(pcmFormat: mPlayerInputFormat, frameCapacity: frameLength)!
audioBuffer.frameLength = frameLength
let dstLeft = audioBuffer.int16ChannelData![0]
buf.withUnsafeBufferPointer {
let src = UnsafeRawPointer($0.baseAddress!).bindMemory(to: Int16.self, capacity: Int(frameLength))
dstLeft.initialize(from: src, count: Int(frameLength))
}
return audioBuffer
}
}

RPScreenRecorder.shared().isAvailable is always false

I'm trying to record my screen with a sample ios application.
But it does not work because RPScreen.shared().isAvailable always returns false.
These are my codes:
ViewController.swift
import UIKit
class ViewController: UIViewController {
#IBOutlet weak var StartRecordingButton: UIButton!
#IBOutlet weak var EndRecordingButton: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
StartRecordingButton.addTarget(self, action: #selector(startRecord(_:)), for: .touchUpInside)
EndRecordingButton.addTarget(self, action: #selector(stopRecord(_:)), for: .touchUpInside)
}
private lazy var recorder: ScreenRecorder = ScreenRecorder(configuration: ScreenRecorder.Configuration(), completion: {
(url, error) in
guard let url = url else {
fatalError("\(#function) record failed \(String(describing: error))")
}
debugPrint(#function, "success", url)
})
#objc func startRecord(_ sender: UIButton) {
recordStart()
}
#objc func stopRecord(_ sender: UIButton) {
recordStop()
}
private func recordStart() {
guard !recorder.isRecording else { return }
do {
try recorder.start()
} catch {
fatalError("start recording failed \(error)")
}
}
private func recordStop() {
guard recorder.isRecording else { return }
do {
try recorder.end()
} catch {
fatalError("finish recording failed \(error)")
}
}
}
ScreenRecorder.swift
import ReplayKit
#available(iOS 11.0, *)
public class ScreenRecorder: NSObject {
let screenRecorder = RPScreenRecorder.shared()
// Alias for arguments
public typealias Completion = (URL?, Error?) -> ()
let completion: Completion
let configuration: Configuration
public init (configuration: Configuration, completion: #escaping Completion) {
self.configuration = configuration
self.completion = completion
super.init()
}
// Start recording screen
public func start() throws {
print(screenRecorder.isAvailable)
guard screenRecorder.isAvailable else {
throw ScreenRecorderError.notAvailable
}
guard !screenRecorder.isRecording else {
throw ScreenRecorderError.alreadyRunning
}
try setUp()
assetWriter?.startWriting()
assetWriter?.startSession(atSourceTime: CMTime.zero)
screenRecorder.startCapture(handler: { [weak self] (cmSampleBuffer, rpSampleBufferType, error) in
if let error = error {
debugPrint(#function, "something happened", error)
}
if RPSampleBufferType.video == rpSampleBufferType {
self?.appendVideo(sampleBuffer: cmSampleBuffer)
}
}) { [weak self] (error) in
if let error = error {
self?.completion(nil, error)
}
}
}
public func end() throws {
guard screenRecorder.isRecording else {
throw ScreenRecorderError.notRunning
}
screenRecorder.stopCapture { [weak self] (error) in
if let error = error {
self?.completion(nil, error)
}
self?.videoAssetWriterInput?.markAsFinished()
self?.assetWriter?.finishWriting {
DispatchQueue.main.async {
self?.completion(self?.cacheFileURL, nil)
}
}
}
}
public var isRecording: Bool {
return screenRecorder.isRecording
}
private var startTime: CMTime?
private var assetWriter: AVAssetWriter?
private var videoAssetWriterInput: AVAssetWriterInput?
private var writerInputPixelBufferAdapter: AVAssetWriterInputPixelBufferAdaptor?
private func setUp() throws {
try createCacheDirectoryIfNeeded()
try removeOldCachedFile()
guard let cacheURL = cacheFileURL else {
throw ScreenRecorderError.invalidURL
}
let assetWriter = try AVAssetWriter(url: cacheURL, fileType: configuration.fileType)
let videoSettings: [String: Any] = [
AVVideoCodecKey: configuration.codec,
AVVideoWidthKey: UInt(configuration.videoSize.width),
AVVideoHeightKey: UInt(configuration.videoSize.height),
]
let videoAssetWriterInput = try AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
videoAssetWriterInput.expectsMediaDataInRealTime = true
if assetWriter.canAdd(videoAssetWriterInput) {
assetWriter.add(videoAssetWriterInput)
}
self.assetWriter = assetWriter
self.videoAssetWriterInput = videoAssetWriterInput
self.writerInputPixelBufferAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoAssetWriterInput, sourcePixelBufferAttributes: [
kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB)
])
}
private func appendVideo(sampleBuffer: CMSampleBuffer) {
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
let firstTime: CMTime
if let startTime = self.startTime {
firstTime = startTime
} else {
firstTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
startTime = firstTime
}
let currentTime: CMTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
let diffTime: CMTime = CMTimeSubtract(currentTime, firstTime)
if writerInputPixelBufferAdapter?.assetWriterInput.isReadyForMoreMediaData ?? false {
writerInputPixelBufferAdapter?.append(pixelBuffer, withPresentationTime: diffTime)
}
}
private func createCacheDirectoryIfNeeded() throws {
guard let cacheDirectoryURL = cacheDirectoryURL else { return }
let fileManager = FileManager.default
guard !fileManager.fileExists(atPath: cacheDirectoryURL.path) else { return }
try fileManager.createDirectory(at: cacheDirectoryURL, withIntermediateDirectories: true, attributes: nil)
}
private func removeOldCachedFile() throws {
guard let cacheURL = cacheFileURL else { return }
let fileManager = FileManager.default
guard fileManager.fileExists(atPath: cacheURL.path) else { return }
try fileManager.removeItem(at: cacheURL)
}
private var cacheDirectoryURL: URL? = {
guard let path = NSSearchPathForDirectoriesInDomains(.cachesDirectory, .userDomainMask, true).first else {
return nil
}
return URL(fileURLWithPath: path).appendingPathComponent("ScreenRecorder")
}()
private var cacheFileURL: URL? {
guard let cacheDirectoryURL = cacheDirectoryURL else { return nil }
return cacheDirectoryURL.appendingPathComponent("screenrecord.mp4")
}
}
#available(iOS 11.0, *)
extension ScreenRecorder {
public struct Configuration{
public var codec: AVVideoCodecType = .h264
public var fileType: AVFileType = .mp4
public var videoSize: CGSize = CGSize(
width: UIScreen.main.bounds.width,
height: UIScreen.main.bounds.height
)
public var audioQuality: AVAudioQuality = .medium
public var audioFormatID: AudioFormatID = kAudioFormatMPEG4AAC
public var numberOfChannels: UInt = 2
public var sampleRate: Double = 44100.0
public var bitrate: UInt = 16
public init() {}
}
public enum ScreenRecorderError: Error {
case notAvailable
case alreadyRunning
case notRunning
case invalidURL
}
}
And it shows this fatal error which I wrote:
ios_record_screen[1258:213516] Fatal error: start recording failed notAvailable
I've enabled screen recording in Settings app in my iPhone8, and tried to run on my friend's iPhone X as well.
But both phones didn't work...
I could not find helpful information in the Internet.
Hope a help.
I hope the problem for those who struggled before has been resolved
In my case,
override func viewDidLoad()
needed
RPScreenRecorder.shared().delegate = self
syntax.
Of course, even the delegate extension that comes with it.
I was implementing RPScreenRecorder in a new view, which was working normally in other views, and I encountered the same problem as the author in the process.
It was a problem that the delegate was not imported while looking for a difference from the previous one.
Hope this helps anyone who finds this page in the future.

Recording videos with real-time filters in Swift

I am new to swift and trying to build a camera app which can apply real-time filters, and save with the applied filters.
So far i can preview real-time with the applied filters, but when i save the video its all black.
import UIKit
import AVFoundation
import AssetsLibrary
import CoreMedia
import Photos
class ViewController: UIViewController , AVCaptureVideoDataOutputSampleBufferDelegate {
var captureSession: AVCaptureSession!
#IBOutlet weak var previewView: UIView!
#IBOutlet weak var recordButtton: UIButton!
#IBOutlet weak var imageView: UIImageView!
var assetWriter: AVAssetWriter?
var assetWriterPixelBufferInput: AVAssetWriterInputPixelBufferAdaptor?
var isWriting = false
var currentSampleTime: CMTime?
var currentVideoDimensions: CMVideoDimensions?
override func viewDidLoad() {
super.viewDidLoad()
FilterVendor.register()
setupCaptureSession()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
func setupCaptureSession() {
let captureSession = AVCaptureSession()
captureSession.sessionPreset = AVCaptureSessionPresetPhoto
guard let captureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo), let input = try? AVCaptureDeviceInput(device: captureDevice) else {
print("Can't access the camera")
return
}
if captureSession.canAddInput(input) {
captureSession.addInput(input)
}
let videoOutput = AVCaptureVideoDataOutput()
videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
if captureSession.canAddOutput(videoOutput) {
captureSession.addOutput(videoOutput)
}
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
if((previewLayer) != nil) {
view.layer.addSublayer(previewLayer!)
}
captureSession.startRunning()
}
#IBAction func record(_ sender: Any) {
if isWriting {
print("stop record")
self.isWriting = false
assetWriterPixelBufferInput = nil
assetWriter?.finishWriting(completionHandler: {[unowned self] () -> Void in
self.saveMovieToCameraRoll()
})
} else {
print("start record")
createWriter()
assetWriter?.startWriting()
assetWriter?.startSession(atSourceTime: currentSampleTime!)
isWriting = true
}
}
func saveMovieToCameraRoll() {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: self.movieURL() as URL)
}) { saved, error in
if saved {
print("saved")
}
}
}
func movieURL() -> NSURL {
let tempDir = NSTemporaryDirectory()
let url = NSURL(fileURLWithPath: tempDir).appendingPathComponent("tmpMov.mov")
return url! as NSURL
}
func checkForAndDeleteFile() {
let fm = FileManager.default
let url = movieURL()
let exist = fm.fileExists(atPath: url.path!)
if exist {
do {
try fm.removeItem(at: url as URL)
} catch let error as NSError {
print(error.localizedDescription)
}
}
}
func createWriter() {
self.checkForAndDeleteFile()
do {
assetWriter = try AVAssetWriter(outputURL: movieURL() as URL, fileType: AVFileTypeQuickTimeMovie)
} catch let error as NSError {
print(error.localizedDescription)
return
}
let outputSettings = [
AVVideoCodecKey : AVVideoCodecH264,
AVVideoWidthKey : Int(currentVideoDimensions!.width),
AVVideoHeightKey : Int(currentVideoDimensions!.height)
] as [String : Any]
let assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings as? [String : AnyObject])
assetWriterVideoInput.expectsMediaDataInRealTime = true
assetWriterVideoInput.transform = CGAffineTransform(rotationAngle: CGFloat(M_PI / 2.0))
let sourcePixelBufferAttributesDictionary = [
String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_32BGRA),
String(kCVPixelBufferWidthKey) : Int(currentVideoDimensions!.width),
String(kCVPixelBufferHeightKey) : Int(currentVideoDimensions!.height),
String(kCVPixelFormatOpenGLESCompatibility) : kCFBooleanTrue
] as [String : Any]
assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterVideoInput,
sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)
if assetWriter!.canAdd(assetWriterVideoInput) {
assetWriter!.add(assetWriterVideoInput)
} else {
print("no way\(assetWriterVideoInput)")
}
}
func captureOutput(_ captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection) {
autoreleasepool {
connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft;
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
let cameraImage = CIImage(cvPixelBuffer: pixelBuffer)
let filter = CIFilter(name: "Fİlter")!
filter.setValue(cameraImage, forKey: kCIInputImageKey)
let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)!
self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription)
self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)
if self.isWriting {
if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true {
var newPixelBuffer: CVPixelBuffer? = nil
CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput!.pixelBufferPool!, &newPixelBuffer)
let success = self.assetWriterPixelBufferInput?.append(newPixelBuffer!, withPresentationTime: self.currentSampleTime!)
if success == false {
print("Pixel Buffer failed")
}
}
}
DispatchQueue.main.async {
if let outputValue = filter.value(forKey: kCIOutputImageKey) as? CIImage {
let filteredImage = UIImage(ciImage: outputValue)
self.imageView.image = filteredImage
}
}
}
}
}
I've added some comments to the critical part below:
func captureOutput(_ captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection) {
autoreleasepool {
connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft;
// COMMENT: This line makes sense - this is your pixelbuffer from the camera.
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
// COMMENT: OK, so you turn pixelBuffer into a CIImage...
let cameraImage = CIImage(cvPixelBuffer: pixelBuffer)
// COMMENT: And now you've create a CIImage with a Filter instruction...
let filter = CIFilter(name: "Fİlter")!
filter.setValue(cameraImage, forKey: kCIInputImageKey)
let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)!
self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription)
self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)
if self.isWriting {
if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true {
// COMMENT: Here's where it gets weird. You've declared a new, empty pixelBuffer... but you already have one (pixelBuffer) that contains the image you want to write...
var newPixelBuffer: CVPixelBuffer? = nil
// COMMENT: And you grabbed memory from the pool.
CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput!.pixelBufferPool!, &newPixelBuffer)
// COMMENT: And now you wrote an empty pixelBuffer back <-- this is what's causing the black frame.
let success = self.assetWriterPixelBufferInput?.append(newPixelBuffer!, withPresentationTime: self.currentSampleTime!)
if success == false {
print("Pixel Buffer failed")
}
}
}
// COMMENT: And now you're sending the filtered image back to the screen.
DispatchQueue.main.async {
if let outputValue = filter.value(forKey: kCIOutputImageKey) as? CIImage {
let filteredImage = UIImage(ciImage: outputValue)
self.imageView.image = filteredImage
}
}
}
}
It looks to me like you're basically getting the screen image, creating a filtered copy, then making a NEW pixel buffer which is empty and writing that out.
If you write the pixelBuffer you grabbed instead of the new one you're creating, you should successfully write the image.
What you need to successfully write out the filtered video is to create a new CVPixelBuffer from a CIImage - that solution exists here on StackOverflow already, I know because I needed that step myself!

EZAudio doesnt work: Thread1 EXC_BAD_ACCESS while creating EZRecorder instance

My complete implementation of EZAudio:
class ViewController: UIViewController, EZMicrophoneDelegate, EZRecorderDelegate {
#IBOutlet var recordingAudioPlot: EZAudioPlot!
private var isRecording = false {
didSet {
if isRecording {
player.pause()
recordingAudioPlot.clear()
microphone.startFetchingAudio()
recorder = EZRecorder(url: filePathUrl(), clientFormat: microphone.audioStreamBasicDescription(), fileType: EZRecorderFileType.M4A, delegate: self)
// ** Here is where the error occurs **
} else {
recorder.delegate = nil
microphone.stopFetchingAudio()
recorder.closeAudioFile()
player.playAudioFile(EZAudioFile(url: filePathUrl()))
}
}
}
private var microphone = EZMicrophone()
private var recorder = EZRecorder()
private var player = EZAudioPlayer()
#IBAction func startStopRecordingButtonTapped(_ sender: UIButton) {
isRecording = !isRecording
}
override func viewDidLoad() {
super.viewDidLoad()
let session = AVAudioSession.sharedInstance()
try! session.setCategory(AVAudioSessionCategoryPlayAndRecord)
try! session.setActive(true)
microphone.delegate = self
try! session.overrideOutputAudioPort(.speaker)
}
func microphone(_ microphone: EZMicrophone!, hasAudioReceived buffer: UnsafeMutablePointer<UnsafeMutablePointer<Float>?>!, withBufferSize bufferSize: UInt32, withNumberOfChannels numberOfChannels: UInt32) {
DispatchQueue.main.async {
self.recordingAudioPlot.updateBuffer(buffer[0], withBufferSize: bufferSize)
}
}
func microphone(_ microphone: EZMicrophone!, hasBufferList bufferList: UnsafeMutablePointer<AudioBufferList>!, withBufferSize bufferSize: UInt32, withNumberOfChannels numberOfChannels: UInt32) {
if isRecording {
recorder.appendData(from: bufferList, withBufferSize: bufferSize)
}
}
private func filePathUrl() -> URL {
let path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first ?? ""
return URL(fileURLWithPath: String(format: "%#/%#", path, "pathtofile.m4a"))
}
}
The error is following:
What goes wrong?
The solution is to declare recorder as optional type, not an instance:
private var recorder: EZRecorder?
Something happens when first time it tries to deallocate first initialized recorder... but now there is nil so the error doesn't exist anymore.

how to draw rectangular shape on camera and capture only that posrtion which are inside rectangle in swift

I am working on swift my requirement is to create rectangular area on
camera. where I have to capture only those portion that is inside rectangle
and remaining portion should displayed as blur.
I tried many links but most of them are in Obj-C.i know I have to add UI layer over AVCapture layer.
This
[Click Here] link helps me but i could not achieve my objectives.
I tried by reducing the size of ImageView in storyboard but in this case my camera adjust whole image in this small image view.
here is sample image.
Here is my existing code for camera :
class VideoFeedMicr: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate
{
let outputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL)
let device: AVCaptureDevice? = {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) as! [AVCaptureDevice]
var camera: AVCaptureDevice? = nil
for device in devices {
if device.position == .Back {
camera = device
}
}
return camera
}()
var input: AVCaptureDeviceInput? = nil
var delegate: VideoFeedDelegateMicr? = nil
let session: AVCaptureSession = {
let session = AVCaptureSession()
session.sessionPreset = AVCaptureSessionPresetHigh
return session
}()
let videoDataOutput: AVCaptureVideoDataOutput = {
let output = AVCaptureVideoDataOutput()
output.videoSettings = [ kCVPixelBufferPixelFormatTypeKey: NSNumber(unsignedInt: kCMPixelFormat_32BGRA) ]
output.alwaysDiscardsLateVideoFrames = true
return output
}()
func start() throws {
var error: NSError! = NSError(domain: "Migrator", code: 0, userInfo: nil)
do {
try configure()
session.startRunning()
return
} catch let error1 as NSError {
error = error1
}
throw error
}
func stop() {
session.stopRunning()
}
private func configure() throws {
var error: NSError! = NSError(domain: "Migrator", code: 0, userInfo: nil)
do {
let maybeInput: AnyObject = try AVCaptureDeviceInput(device: device!)
input = maybeInput as? AVCaptureDeviceInput
if session.canAddInput(input) {
session.addInput(input)
videoDataOutput.setSampleBufferDelegate(self, queue: outputQueue);
if session.canAddOutput(videoDataOutput) {
session.addOutput(videoDataOutput)
let connection = videoDataOutput.connectionWithMediaType(AVMediaTypeVideo)
connection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
return
} else {
print("Video output error.");
}
} else {
print("Video input error. Maybe unauthorised or no camera.")
}
} catch let error1 as NSError {
error = error1
print("Failed to start capturing video with error: \(error)")
}
throw error
}
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
// Update the delegate
if delegate != nil {
delegate!.videoFeedMicr(self, didUpdateWithSampleBuffer: sampleBuffer)
}
}
}
here i am calling this
class ViewMicrScanactivity: UIViewController,VideoFeedDelegateMicr
{
// #IBOutlet weak var button: UIButton!
#IBOutlet weak var button: UIButton!
// #IBOutlet weak var imageView: UIImageView!
let feed: VideoFeedMicr = VideoFeedMicr()
var chequefound :Bool = false;
var accountnumber = ""
var amountlimit = ""
#IBOutlet weak var chequeimage: UIImageView!
override func viewDidLoad()
{
super.viewDidLoad()
let value = UIInterfaceOrientation.LandscapeRight.rawValue
UIDevice.currentDevice().setValue(value, forKey: "orientation")
}
override func shouldAutorotate() -> Bool {
return true;
}
override func awakeFromNib() {
super.awakeFromNib()
feed.delegate = self
}
override func viewWillAppear(animated: Bool) {
super.viewWillAppear(animated)
startVideoFeed()
}
override func viewDidDisappear(animated: Bool) {
super.viewDidDisappear(animated)
feed.stop()
}
func startVideoFeed() {
do {
try feed.start()
print("Video started.")
}
catch {
// alert?
// need to look into device permissions
}
}
func videoFeedMicr(videoFeed: VideoFeedMicr, didUpdateWithSampleBuffer sampleBuffer: CMSampleBuffer!)
{
let filter = FaceObscurationFilterMicr(sampleBuffer: sampleBuffer)
if(!chequefound)
{
chequefound = filter.process()
dispatch_async(dispatch_get_main_queue(), { () -> Void in
self.chequeimage.image = filter.inputuiimage!
if(self.chequefound)
{
filter.cropmicr = filter.cropToBounds(filter.inputuiimage! , X:0.0 , Y:Double(filter.inputuiimage!.size.height) - Double(90.0) ,width:Double(filter.inputuiimage!.size.width) , height:Double(60.0));
self.chequeimage.image = filter.cropmicr
// let image = UIImage(named: filter.cropmicr )
//let scaledImage = scaleImage(image!, maxDimension: 640)
self.performImageRecognitionnew(filter.cropmicr!)
}
// self.chequeimage.image = filter.cropmicr!
})
}
else
{
print("chequefound = true")
}
}
}

Resources