I'm using AVFoundation for video recording. I also use the CXCallObserverDelegate to listen to when a phone call is disconnected.
I go to the background
I make a phone call
While the phone call is active I bring the app back to the foreground and press the button to modally present the vc that contains AVFoundation
Once the vc is on scene and because I'm currently on a phone call the .audioDeviceInUseByAnotherClient gets called and I stop the capture session
Once the phone call is disconnected then CXCallObserver call.hasEnded is called and I restart the capture session. The .sessionInterruptionEnded also gets called but this isn't causing the issue.
This is where the problem occurs. Once call.hasEnded is called then .audioDeviceInUseByAnotherClient gets called again. Since the code to stop the capture session is in there this results in capture session stopping again
In step 6 why does .audioDeviceInUseByAnotherClient get called again after the call has been disconnected?
func sessionWasInterrupted(notification: NSNotification) {
let reasonIntegerValue = userInfoValue.integerValue,
let reason = AVCaptureSession.InterruptionReason(rawValue: reasonIntegerValue) {
case .audioDeviceInUseByAnotherClient:
stopCaptureSession()
}
}
}
func sessionInterruptionEnded(notification: NSNotification) {
print("-----Capture session interruption ended")
restartCaptureSession()
}
func callObserver(_ callObserver: CXCallObserver, callChanged call: CXCall) {
if call.hasEnded == true {
print("\nCXCallState :Disconnected")
restartCaptureSession()
}
if call.hasConnected == true && call.hasEnded == false {
print("\nCXCallState : Connected")
// *** THIS NEVER GETS CALLED IN THIS SCENARIO ***
}
}
fileprivate func stopCaptureSession() {
if captureSession.isRunning {
DispatchQueue.global(qos: .background).async { [weak self] in
DispatchQueue.main.sync {
self?.captureSession.stopRunning()
}
DispatchQueue.main.async {
self?.previewLayer?.removeFromSuperlayer()
self?.previewLayer = nil
}
}
}
}
func restartCaptureSession() {
if !captureSession.isRunning {
DispatchQueue.global(qos: .background).async { [weak self] in
DispatchQueue.main.sync {
self?.captureSession.startRunning()
}
DispatchQueue.main.asyncAfter(deadline: .now() + 1.5) {
if let safeSelf = self {
if safeSelf.previewLayer == nil {
self?.previewLayer = AVCaptureVideoPreviewLayer(session: self!.captureSession)
self?.previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
guard let previewLayer = self?.previewLayer else { return }
previewLayer.frame = self!.containerViewForPreviewLayer.bounds
self?.containerViewForPreviewLayer.layer.insertSublayer(previewLayer, at: 0)
}
}
}
}
}
}
Related
I'm completely new to Swift programming and I don't know how to make this work.
Basically I have and iOS app which observes incoming phone calls and declines it automatically if the caller number is equal to a user defined number to decline, without showing user any notification (this app is only for my private use).
Currently I have this code which observes incoming call. It works like a charm but I don't know how to make it decline the call (for now all phone numbers).
import CallKit
class CallTest: NSObject, CXCallObserverDelegate, ObservableObject {
#Published var incomingCallNumber: String?
let callObserver = CXCallObserver()
override init() {
super.init()
callObserver.setDelegate(self, queue: nil)
}
func callObserver(_ callObserver: CXCallObserver, callChanged call: CXCall) {
if (call.isOutgoing == false && call.hasConnected == false) {
print("Incoming call")
}
}
}
I've tried to decline incoming call like this.
import CallKit
class CallTest: NSObject, CXCallObserverDelegate, ObservableObject {
#Published var incomingCallNumber: String?
let callObserver = CXCallObserver()
override init() {
super.init()
callObserver.setDelegate(self, queue: nil)
}
func callObserver(_ callObserver: CXCallObserver, callChanged call: CXCall) {
if (call.isOutgoing == false && call.hasConnected == false) {
print("Incoming call")
let callController = CXCallController()
let endCallAction = CXEndCallAction(call: call.uuid)
let transaction = CXTransaction(action: endCallAction)
callController.request(transaction) { error in
if let error = error {
print("Error declining call: \(error)")
} else {
print("Call declined successfully")
}
}
}
}
}
But I keep getting error:
Error Domain=com.apple.CallKit.error.requesttransaction Code=2 "(null)"
Which means 'unknownCallProvider', but I don't know how to set provider for this.
I've implemented two functions in View controller (setupRemoteTransportControls() and setupNowPlaying()) and added one function to AppDelegate, but I'm still unable to see background audio controls of my app on the lock screen and also audio interruption function isn't working. This is the live stream from url, as you can spot on in the code. In the general settings I have added background playing:
What I would like to do is to print on the Remote Command Center artist, title and albumArt labes and UIImage (labels an UIImage are taken from my station API) , but i was stuck just displaying the command center. Here is my code:
import UIKit
import AVKit
import MediaPlayer
class ViewController: UIViewController, AVAudioPlayerDelegate {
#IBAction func buttonPressed(_ sender: UIButton){
if isPlaying {
player.pause()
sender.setImage(playImage, for: .normal)
} else {
let url = "https://myradio.com/radio.mp3"
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.mixWithOthers, .allowAirPlay])
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback, options: [])
print("Playback OK")
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
print(error)
}
player = AVPlayer(url: URL(string: url)!)
player.volume = 1.0
player.rate = 1.0
player.play()
sender.setImage(pauseImage, for: .normal)
}
isPlaying.toggle()
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
overrideUserInterfaceStyle = .light
setupRemoteTransportControls()
requestNowPlaying()
setupNowPlaying()
}
// Here is the API data downloading part, so i skipped it
//Command Center audio controls
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player.rate == 1.0 {
self.player.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player.rate == 1.0 {
self.player.pause()
return .success
}
return .commandFailed
}
}
func setupNowPlaying() {
// Define Now Playing Info
var nowPlayingInfo = [String : Any]()
nowPlayingInfo[MPMediaItemPropertyTitle] = "Test"
if let image = UIImage(named: "Default_albumart") {
nowPlayingInfo[MPMediaItemPropertyArtwork] = MPMediaItemArtwork(boundsSize: image.size) { size in
return image
}
}
nowPlayingInfo[MPNowPlayingInfoPropertyIsLiveStream] = true
// Set the metadata
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
func updateNowPlaying(isPause: Bool) {
// Define Now Playing Info
let nowPlayingInfo = MPNowPlayingInfoCenter.default().nowPlayingInfo!
//nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = player.currentTime
//nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = isPause ? 0 : 1
// Set the metadata
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
//audio interruption
#objc func handleInterruption(notification: Notification) {
guard let userInfo = notification.userInfo,
let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
return
}
// Switch over the interruption type.
switch type {
case .began:
print("Interruption began")
case .ended:
// An interruption ended. Resume playback, if appropriate.
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
player.play()
} else {
// An interruption ended. Don't resume playback.
}
default: ()
}
}
}
Here's what I've added in My AppDelegate.swift:
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
application.beginReceivingRemoteControlEvents()
// Override point for customization after application launch.
return true
}
Remove .mixWithOthers from your category options.
I think the reasoning is that only the primary iOS audio app can control the remote screen. .mixWithOthers is for secondary audio apps.
Identify yourself as .longForm audio content provider with this code:
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback, mode: AVAudioSessionModeDefault, routeSharingPolicy: .longForm)
For whole implementation of AirPlay2 check this Apple WWDC presentation: https://developer.apple.com/videos/play/wwdc2017/509/
I am not quite sure if I am missing a function or something, but when a users phone rings and or they ask siri or anything that stops my app audio from playing. It does not re-start my app playing when the user has finished their task.
I am wondering is there a function I am missing, or can Apple iOS apps not do this?
I thought it would be something to do with:
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player?.rate == 0.0 {
self.player?.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player?.rate == 1.0 {
self.player?.pause()
return .success
}
return .commandFailed
}
// self.nowplaying(artist: "Anna", song: "test")
}
I have found that I need to add this part but how do I call it?
func handleInterruption(notification: Notification) {
guard let userInfo = notification.userInfo,
let interruptionTypeRawValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
let interruptionType = AVAudioSession.InterruptionType(rawValue: interruptionTypeRawValue) else {
return
}
switch interruptionType {
case .began:
print("interruption began")
case .ended:
print("interruption ended")
default:
print("UNKNOWN")
}
}
You need to set your audio session to AVAudioSessionCategoryPlayback. If you don't set this mode, you will have the default mode AVAudioSessionCategorySoloAmbient.
You can set the mode in didFinishLaunching.
e.g.
func application(_ application: UIApplication,
didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
// Get the singleton instance.
let audioSession = AVAudioSession.sharedInstance()
do {
// Set the audio session category, mode, and options.
try audioSession.setCategory(.playback, mode: .default, options: [])
} catch {
print("Failed to set audio session category.")
}
// Other post-launch configuration.
return true
}
You will also need to implement interruption observation
func setupNotifications() {
// Get the default notification center instance.
let nc = NotificationCenter.default
nc.addObserver(self,
selector: #selector(handleInterruption),
name: AVAudioSession.interruptionNotification,
object: nil)
}
#objc func handleInterruption(notification: Notification) {
// To be implemented.
}
I have been using ReplayKit for all past updates, but now with iOS 12 my recordings sometimes work, sometimes don't... but usually they don't. Most of the time when I stop the recording this is what I get:
a completely black screen.
This hasn't happened to me before and it is extremely frustrating. This is how I use ReplayKit to record the screen:
import ReplayKit
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, UIImagePickerControllerDelegate, UINavigationControllerDelegate, RPPreviewViewControllerDelegate {
func startRecording() {
func start() {
guard RPScreenRecorder.shared().isAvailable else {
print("Recording is not available at this time.")
return
}
RPScreenRecorder.shared().isMicrophoneEnabled = micToggle
RPScreenRecorder.shared().startRecording { [unowned self] (error) in
guard error == nil else {
print("There was an error starting the recording.")
return
}
print("Started Recording Successfully")
isRecording = true
}
}
DispatchQueue.main.async {
start()
}
}
func stopRecording() {
func stop() {
RPScreenRecorder.shared().stopRecording { [unowned self] (preview, error) in
print("Stopped recording")
guard preview != nil else {
print("Preview controller is not available.")
return
}
onGoingScene = true
preview?.previewControllerDelegate = self
self.present(preview!, animated: true, completion: nil)
print("presented")
isRecording = false
}
}
DispatchQueue.main.async {
stop()
}
}
func previewControllerDidFinish(_ previewController: RPPreviewViewController) {
previewController.dismiss(animated: true, completion: nil)
RPScreenRecorder.shared().discardRecording {
print("discarded")
}
}
When it works, all the print statements are printed, but when the black screen appears the last print statement is "presented".
I am absolutely desperate for some help because I have no idea how to get around this. ANY help would be much appreciated.
THANKS
Edit:
I just realised that I am using an 'AVCaptureVideoPreviewLayer` if that may be the issue. If so, what's the fix?
Here is the start and stop functions.
#IBAction func startRecordingAction(sender: AnyObject) {
activityView.hidden = false
// start recording
recorder.startRecordingWithMicrophoneEnabled(true) { [unowned self] (error) in
dispatch_async(dispatch_get_main_queue()) {
[unowned self] in
self.activityView.hidden = true
}
if let error = error {
print("Failed start recording: \(error.localizedDescription)")
return
}
print("Start recording")
self.buttonEnabledControl(true)
}
}
#IBAction func stopRecordingAction(sender: AnyObject) {
activityView.hidden = false
//end recording
recorder.stopRecordingWithHandler({ [unowned self] (previewViewController, error) in
dispatch_async(dispatch_get_main_queue()) {
self.activityView.hidden = true
}
self.buttonEnabledControl(false)
if let error = error {
print("Failed stop recording: \(error.localizedDescription)")
return
}
print("Stop recording")
previewViewController?.previewControllerDelegate = self
dispatch_async(dispatch_get_main_queue()) { [unowned self] in
// show preview vindow
self.presentViewController(previewViewController!, animated: true, completion: nil)
}
})
}
func screenRecorderDidChangeAvailability(screenRecorder: RPScreenRecorder) {
let availability = screenRecorder.available
print("Availability: \(availability)\n");
}
// MARK: - RPPreviewViewControllerDelegate
// called when preview is finished
func previewControllerDidFinish(previewController: RPPreviewViewController) {
print("Preview finish");
dispatch_async(dispatch_get_main_queue()) {
[unowned previewController] in
// close preview window
previewController.dismissViewControllerAnimated(true, completion: nil)
}
}
I want to record only a part of screen and I want to show a custom alert, not ReplayKit standard alert message. I can use 3 party pods, no problem.
Maybe you can advice me a different way, without ReplayKit.
Unfortunately, you cannot record a particular UIView for now with/Using Replay Kit.
For recording particular view here are some alternative's hope this helps you out.
https://github.com/wess/Glimpse
https://github.com/adam-roth/screen-cap-view
https://github.com/andydrizen/UIViewRecorder
Hope this helps you out.