HKWorkoutSession is not running consistently - ios

in watchOS, i am just trying to get HR value in real-time while a workoutSession is running.
func startHeartRateQuery(updateHandler: #escaping ([HKQuantitySample]?) -> Void) {
guard let quantityType = HKObjectType.quantityType(forIdentifier: .heartRate) else {
return
}
let heartRateQuery = HKAnchoredObjectQuery(type: quantityType, predicate: nil, anchor: anchor, limit: Int(HKObjectQueryNoLimit)) { (query, sampleObjects, deletedObjects, newAnchor, error) -> Void in
guard let newAnchor = newAnchor else {return}
self.anchor = newAnchor
updateHandler(sampleObjects as? [HKQuantitySample])
}
heartRateQuery.updateHandler = {(query, samples, deleteObjects, newAnchor, error) -> Void in
self.anchor = newAnchor!
updateHandler(samples as? [HKQuantitySample])
}
healthStore.execute(heartRateQuery)
activeDataQueries.append(heartRateQuery)
}
And This is how i do start workoutSession.
func session(_ session: WCSession, didReceiveUserInfo userInfo: [String : Any] = [:]) {
guard let d = delegate else {
return
}
if let u = userInfo["status"] as? String {
d.receivedWorkoutRunningStatus(u)
}
if let c = userInfo["clock"] as? String {
d.receiveWorkoutRestPeriodClock(c)
}
}
You can find receivedWorkoutRunningStatus function on InterfaceController.
This is InterfaceController.swift which is first screen on Watch App.
class InterfaceController: WKInterfaceController, HKWorkoutSessionDelegate {
#IBOutlet var lblHeartRate: WKInterfaceLabel!
#IBOutlet var lblSplitIntervalNumber: WKInterfaceLabel!
#IBOutlet var lblRestPeriodClock: WKInterfaceLabel!
private let healthStoreManager = WatchHealthKitManager()
private let parentConnector = ParentConnector()
private var workoutSession: HKWorkoutSession!
override func awake(withContext context: Any?) {
super.awake(withContext: context)
// Create a workout session with the workout configuration
do {
let workoutConfiguration = HKWorkoutConfiguration()
workoutConfiguration.locationType = .indoor
workoutConfiguration.activityType = .rowing
workoutSession = try HKWorkoutSession(configuration: workoutConfiguration)
} catch {
fatalError(error.localizedDescription)
}
//// initial setup
parentConnector.activate()
parentConnector.delegate = self
workoutSession.delegate = self
}
// MARK: - Data Accumulation
private func startAccumulatingData() {
healthStoreManager.startHeartRateQuery() { quantitySamples in
DispatchQueue.main.async {
guard !self.isPaused() else {
return
}
guard let heartRateSamples = quantitySamples else {
return
}
let hrUnit = HKUnit(from: "count/min")
guard let sample = heartRateSamples.first else {
return
}
let value = sample.quantity.doubleValue(for: hrUnit)
self.updateHeartRate(value: value)
self.parentConnector.transfer(value: value)
}
}
}
func workoutSession(_ workoutSession: HKWorkoutSession,
didChangeTo toState: HKWorkoutSessionState,
from fromState: HKWorkoutSessionState,
date: Date) {
switch (toState) {
case .running:
startAccumulatingData()
case .ended:
stopAccumulatingData()
default:
print("Error")
}
}
func receivedWorkoutRunningStatus(_ status: String) {
if (status == "Start") {
healthStoreManager.start(workoutSession)
} else if (status == "Finish") {
healthStoreManager.end(workoutSession)
lblHeartRate.setText("No Active Workout")
}
DispatchQueue.main.async {
self.lblSplitIntervalNumber.setText(status)
}
}
On the iPhone App, I send a "Start" string using transferUserInfo function to trigger the workoutSession beginning. This is not working properly, it only sometimes works, it is very inconsistent.
I would appreciate if you have any advice or alternative approaches.
Thank you in advance.

Related

iOS "Request is missing required authentication credential. Expected OAuth 2 access token, login cookie or other valid authentication credential

I've logged in with firebase in my iOS app and I have the Google Sheets API pod initialized in the podfile but I'm getting this error when I try to append data to my spreadsheet:
Request is missing required authentication credential. Expected OAuth 2 access token, login cookie or other valid authentication credential.
Here is my spreadsheets code:
//
// SpredsheetsController.swift
// frcscout
//
// Created by Elliot Scher on 12/21/22
//
import UIKit
import GoogleAPIClientForREST
import GoogleSignIn
class SpreadsheetsController: UIViewController {
let utils = Utils()
let sheetService = GTLRSheetsService()
override func viewDidLoad() {
super.viewDidLoad()
sheetService.authorizer = GIDSignIn.sharedInstance.currentUser?.authentication.fetcherAuthorizer()
}
#IBAction func appendDataPressed(_ sender: UIButton) {
appendData()
}
#IBAction func specificCellPressed(_ sender: UIButton) {
sendDataToCell()
}
#IBAction func readDataPressed(_ sender: UIButton) {
readData()
}
#IBAction func readSheetsPressed(_ sender: UIButton) {
readSheets()
}
}
extension SpreadsheetsController {
func appendData() {
let spreadsheetId = K.sheetID
let range = "A1:Q"
let rangeToAppend = GTLRSheets_ValueRange.init();
let data = ["this", "is", "a", "test"]
rangeToAppend.values = [data]
let query = GTLRSheetsQuery_SpreadsheetsValuesAppend.query(withObject: rangeToAppend, spreadsheetId: spreadsheetId, range: range)
query.valueInputOption = "USER_ENTERED"
sheetService.executeQuery(query) { (ticket, result, error) in
if let error = error {
print("Error in appending data: \(error)")
} else {
print("Data sent: \(data)")
}
}
}
func sendDataToCell() {
let spreadsheetId = K.sheetID
let currentRange = "A5:B5" //Any range on the sheet, for instance: A5:B6
let results = ["this is a test"]
let rangeToAppend = GTLRSheets_ValueRange.init();
rangeToAppend.values = [results]
let query = GTLRSheetsQuery_SpreadsheetsValuesUpdate.query(withObject: rangeToAppend, spreadsheetId: spreadsheetId, range: currentRange)
query.valueInputOption = "USER_ENTERED"
sheetService.executeQuery(query) { (ticket, result, error) in
if let error = error {
print(error)
} else {
print("Sending: \(results)")
}
}
}
func readData() {
print("Getting sheet data...")
let spreadsheetId = K.sheetID
let range = "A1:Q"
let query = GTLRSheetsQuery_SpreadsheetsValuesGet
.query(withSpreadsheetId: spreadsheetId, range:range)
sheetService.executeQuery(query) { (ticket, result, error) in
if let error = error {
print(error)
return
}
guard let result = result as? GTLRSheets_ValueRange else {
return
}
let rows = result.values!
var stringRows = rows as! [[String]]
for row in stringRows {
stringRows.append(row)
print(row)
}
if rows.isEmpty {
print("No data found.")
return
}
print("Number of rows in sheet: \(rows.count)")
}
}
func readSheets() {
print("func findSpreadNameAndSheets executing...")
let spreadsheetId = K.sheetID
let query = GTLRSheetsQuery_SpreadsheetsGet.query(withSpreadsheetId: spreadsheetId)
sheetService.executeQuery(query) { (ticket, result, error) in
if let error = error {
print(error)
} else {
let result = result as? GTLRSheets_Spreadsheet
let sheets = result?.sheets
if let sheetInfo = sheets {
for info in sheetInfo {
print("New sheet found: \(String(describing: info.properties?.title))")
}
}
}
}
}
}
Here is my authentication code:
//
// AuthenticationViewModel.swift
// frcscout
//
// Created by Elliot Scher on 12/15/22.
//
import Foundation
import Firebase
import GoogleSignIn
class AuthenticationViewModel: ObservableObject {
var credential:AuthCredential? = nil
enum SignInState {
case signedIn
case signedOut
}
#Published var state: SignInState = .signedOut
func signIn() {
// 1
if GIDSignIn.sharedInstance.hasPreviousSignIn() {
GIDSignIn.sharedInstance.restorePreviousSignIn { [unowned self] user, error in
authenticateUser(for: user, with: error)
}
} else {
guard let clientID = FirebaseApp.app()?.options.clientID else { return }
let configuration = GIDConfiguration(clientID: clientID)
guard let windowScene = UIApplication.shared.connectedScenes.first as? UIWindowScene else { return }
guard let rootViewController = windowScene.windows.first?.rootViewController else { return }
GIDSignIn.sharedInstance.signIn(with: configuration, presenting: rootViewController) { [unowned self] user, error in
authenticateUser(for: user, with: error)
}
}
}
private func authenticateUser(for user: GIDGoogleUser?, with error: Error?) {
if let error = error {
print(error.localizedDescription)
return
}
guard let authentication = user?.authentication, let idToken = authentication.idToken else { return }
credential = GoogleAuthProvider.credential(withIDToken: idToken, accessToken: authentication.accessToken)
Auth.auth().signIn(with: credential!) { [unowned self] (_, error) in
if let error = error {
print(error.localizedDescription)
} else {
self.state = .signedIn
}
}
}
func signOut() {
GIDSignIn.sharedInstance.signOut()
do {
try Auth.auth().signOut()
state = .signedOut
} catch {
print(error.localizedDescription)
}
}
func getCredential() -> AuthCredential {
return credential!
}
}
I think I need an oauth2 token but I'm not sure how to get it from firebase. Could someone help me resolve this issue? Thanks!!
Resolved:
I had to put this line:
sheetService.authorizer = GIDSignIn.sharedInstance.currentUser?.authentication.fetcherAuthorizer()
at the beginning of each method.

Network Extension didn't called

I am trying to connect VPN using OpenVPNAdapter but the PacketTunnelProvider isn't called from the controller. What am i missing here?
Controller.swift
import NetworkExtension
var providerManager: NETunnelProviderManager!
var provider = PacketTunnelProvider()
override func viewDidLoad() {
super.viewDidLoad()
self.loadProviderManager {
self.configureVPN(response: self.arrResponse[0], serverAddress: self.arrResponse[0].iP, username: "vpn", password: "vpn")
}
}
func loadProviderManager(completion:#escaping () -> Void) {
NETunnelProviderManager.loadAllFromPreferences { (managers, error) in
if error == nil {
self.providerManager = managers?.first ?? NETunnelProviderManager()
completion()
}
}
}
func configureVPN(response:Response,serverAddress: String, username: String, password: String) {
let data = Data(base64Encoded:response.openVPNConfigDataBase64, options: .ignoreUnknownCharacters)
print(data!)
let decodedString = String(data: data!, encoding: .utf8)!
print(decodedString)
self.providerManager?.loadFromPreferences { error in
if error == nil {
let tunnelProtocol = NETunnelProviderProtocol()
tunnelProtocol.username = username
tunnelProtocol.serverAddress = serverAddress
tunnelProtocol.providerBundleIdentifier = "***.*****.********.***********.********"
tunnelProtocol.providerConfiguration = ["ovpn": data!, "username": username, "password": password]
tunnelProtocol.disconnectOnSleep = false
self.providerManager.protocolConfiguration = tunnelProtocol
self.providerManager.localizedDescription = "SMVPN"
self.providerManager.isEnabled = true
self.providerManager.saveToPreferences(completionHandler: { (error) in
if error == nil {
self.providerManager.loadFromPreferences(completionHandler: { (error) in
if error == nil {
self.provider.startTunnel(options: nil) { error in //this called here not in network extension
if error != nil {
print(error!)
}else {
}
}
}else {
print(error!.localizedDescription)
}
})
}
})
}
}
}
Project Entitlement
PacketTunnelProvider.swift
import NetworkExtension
import OpenVPNAdapter
class PacketTunnelProvider: NEPacketTunnelProvider {
var startHandler: ((Error?) -> Void)?
var stopHandler: (() -> Void)?
var vpnReachability = OpenVPNReachability()
var configuration: OpenVPNConfiguration!
var properties: OpenVPNConfigurationEvaluation!
var UDPSession: NWUDPSession!
var TCPConnection: NWTCPConnection!
lazy var vpnAdapter: OpenVPNAdapter = {
let adapter = OpenVPNAdapter()
adapter.delegate = self
return adapter
}()
override func startTunnel(options: [String : NSObject]?, completionHandler: #escaping (Error?) -> Void) {
// Add code here to start the process of connecting the tunnel.
guard
let protocolConfiguration = protocolConfiguration as? NETunnelProviderProtocol,
let providerConfiguration = protocolConfiguration.providerConfiguration
else {
fatalError()
}
guard let ovpnFileContent: Data = providerConfiguration["ovpn"] as? Data else { return }
let configuration = OpenVPNConfiguration()
configuration.fileContent = ovpnFileContent
do {
properties = try vpnAdapter.apply(configuration: configuration)
} catch {
completionHandler(error)
return
}
configuration.tunPersist = true
if !properties.autologin {
if let username: String = providerConfiguration["username"] as? String, let password: String = providerConfiguration["password"] as? String {
let credentials = OpenVPNCredentials()
credentials.username = username
credentials.password = password
do {
try vpnAdapter.provide(credentials: credentials)
} catch {
completionHandler(error)
return
}
}
}
vpnReachability.startTracking { [weak self] status in
guard status != .notReachable else { return }
self?.vpnAdapter.reconnect(afterTimeInterval: 5)
}
startHandler = completionHandler
vpnAdapter.connect(using: self)
}
override func stopTunnel(with reason: NEProviderStopReason, completionHandler: #escaping () -> Void) {
// Add code here to start the process of stopping the tunnel.
stopHandler = completionHandler
if vpnReachability.isTracking {
vpnReachability.stopTracking()
}
vpnAdapter.disconnect()
completionHandler()
}
override func handleAppMessage(_ messageData: Data, completionHandler: ((Data?) -> Void)?) {
// Add code here to handle the message.
if let handler = completionHandler {
handler(messageData)
}
}
override func sleep(completionHandler: #escaping () -> Void) {
// Add code here to get ready to sleep.
completionHandler()
}
override func wake() {
// Add code here to wake up.
}
}
extension PacketTunnelProvider: OpenVPNAdapterDelegate {
func openVPNAdapter(_ openVPNAdapter: OpenVPNAdapter, configureTunnelWithNetworkSettings networkSettings: NEPacketTunnelNetworkSettings?, completionHandler: #escaping (Error?) -> Void) {
setTunnelNetworkSettings(networkSettings) { (error) in
completionHandler(error == nil ? self.packetFlow as? Error : nil)
}
}
func openVPNAdapter(_ openVPNAdapter: OpenVPNAdapter, configureTunnelWithNetworkSettings networkSettings: NEPacketTunnelNetworkSettings?, completionHandler: #escaping (OpenVPNAdapterPacketFlow?) -> Void) {
setTunnelNetworkSettings(networkSettings) { (error) in
completionHandler(error == nil ? self.packetFlow : nil)
}
}
func openVPNAdapter(_ openVPNAdapter: OpenVPNAdapter, handleEvent event: OpenVPNAdapterEvent, message: String?) {
switch event {
case .connected:
if reasserting {
reasserting = false
}
guard let startHandler = startHandler else { return }
startHandler(nil)
self.startHandler = nil
case .disconnected:
guard let stopHandler = stopHandler else { return }
if vpnReachability.isTracking {
vpnReachability.stopTracking()
}
stopHandler()
self.stopHandler = nil
case .reconnecting:
reasserting = true
default:
break
}
}
func openVPNAdapter(_ openVPNAdapter: OpenVPNAdapter, handleError error: Error) {
guard let fatal = (error as NSError).userInfo[OpenVPNAdapterErrorFatalKey] as? Bool, fatal == true else {
return
}
NSLog("Error: \(error.localizedDescription)")
NSLog("Connection Info: \(vpnAdapter.connectionInformation.debugDescription)")
if vpnReachability.isTracking {
vpnReachability.stopTracking()
}
if let startHandler = startHandler {
startHandler(error)
self.startHandler = nil
} else {
cancelTunnelWithError(error)
}
}
func openVPNAdapter(_ openVPNAdapter: OpenVPNAdapter, handleLogMessage logMessage: String) {
NSLog("Log: \(logMessage)")
}
}
extension PacketTunnelProvider: OpenVPNAdapterPacketFlow {
func readPackets(completionHandler: #escaping ([Data], [NSNumber]) -> Void) {
packetFlow.readPackets(completionHandler: completionHandler)
}
func writePackets(_ packets: [Data], withProtocols protocols: [NSNumber]) -> Bool {
return packetFlow.writePackets(packets, withProtocols: protocols)
}
}
extension NEPacketTunnelFlow: OpenVPNAdapterPacketFlow {}
Extension Entitlement
self.provider.startTunnel(options: nil) { error in // This is called here, not in network extension
This method is called from controller but didn't get called in network extension.
I posted all my code, so if I missed something, then please let me know. Any help is appreciated.
I found this question but I haven't figured it out yet.
PacketTunnelProvider network extension not called Swift 3
You haven't mentioned anything about how you are testing network extension. You should attach a Network extension process before running your project for debugging. Then only network extension methods will trigger for debug.
Finally i found my mistake it's i tried to called network extension but it will never called. so i called NETunnelProviderManager directly and it will work.
self.providerManager.loadFromPreferences(completionHandler: { (error) in
if error == nil {
do {
try self.providerManager.connection.startVPNTunnel()
} catch let error {
print(error.localizedDescription)
}
}else {
print(error!.localizedDescription)
}
})

How to connect a user using the OpenVPNAdapter

I am working on a personal project where I will be connecting the user to a VPN. I have followed two blog posts on doing this which are https://medium.com/better-programming/how-to-build-an-openvpn-client-on-ios-c8f927c11e80 & https://kean.blog/post/vpn-configuration-manager. I was able to configure the VPN settings but still, it does not connect. Here are the images for my project Signing & Capabilities one for the App and the other is for the network extension.
my ViewController looks like this. There is a folder in the user app document directory that is named user which gets downloaded from the internet and these are the files that are in the folder. I am not sure if I am supposed to use it other than the user.ovpn file
import UIKit
import Zip
import Alamofire
import NetworkExtension
class ViewController: UIViewController {
var selectedServer: Server?
var providerManager: NETunnelProviderManager!
override func viewDidLoad() {
super.viewDidLoad()
self.loadProviderManager {
self.configureVPN(serverAddress: "openvpn://\(self.selectedServer!.server.address):\(self.selectedServer!.server.port)", username: "\(self.username)", password: "\(self.password)")
}
}
func loadProviderManager(completion:#escaping () -> Void) {
NETunnelProviderManager.loadAllFromPreferences { (managers, error) in
if error == nil {
self.providerManager = managers?.first ?? NETunnelProviderManager()
completion()
}
}
}
func configureVPN(serverAddress: String, username: String, password: String) {
guard let configData = self.readFile(path: "user/user.ovpn") else { return }
self.providerManager?.loadFromPreferences { error in
if error == nil {
let tunnelProtocol = NETunnelProviderProtocol()
tunnelProtocol.username = username
tunnelProtocol.serverAddress = serverAddress
tunnelProtocol.providerBundleIdentifier = "com.example.Networking.tunnel"
tunnelProtocol.providerConfiguration = ["ovpn": configData, "username": username, "password": password]
tunnelProtocol.disconnectOnSleep = false
self.providerManager.protocolConfiguration = tunnelProtocol
self.providerManager.localizedDescription = "OpenVPN"
self.providerManager.isEnabled = true
self.providerManager.saveToPreferences(completionHandler: { (error) in
if error == nil {
self.providerManager.loadFromPreferences(completionHandler: { (error) in
do {
try self.providerManager.connection.startVPNTunnel()
} catch let error {
print(error.localizedDescription)
}
})
}
})
}
}
}
func readFile(path: String) -> Data? {
let fileManager = FileManager.default
do {
let documentDirectory = try fileManager.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: false)
let fileURL = documentDirectory.appendingPathComponent(path)
return try Data(contentsOf: fileURL, options: .uncached)
}
catch let error {
print(error.localizedDescription)
}
return nil
}
}
And my PacketTunnelProvider looks like this
import NetworkExtension
import OpenVPNAdapter
class PacketTunnelProvider: NEPacketTunnelProvider {
var startHandler: ((Error?) -> Void)?
var stopHandler: (() -> Void)?
var vpnReachability = OpenVPNReachability()
var configuration: OpenVPNConfiguration!
var properties: OpenVPNProperties!
var UDPSession: NWUDPSession!
var TCPConnection: NWTCPConnection!
lazy var vpnAdapter: OpenVPNAdapter = {
let adapter = OpenVPNAdapter()
adapter.delegate = self
return adapter
}()
override func startTunnel(options: [String : NSObject]?, completionHandler: #escaping (Error?) -> Void) {
guard
let protocolConfiguration = protocolConfiguration as? NETunnelProviderProtocol,
let providerConfiguration = protocolConfiguration.providerConfiguration
else {
fatalError()
}
guard let ovpnFileContent: Data = providerConfiguration["ovpn"] as? Data else { return }
let configuration = OpenVPNConfiguration()
configuration.fileContent = ovpnFileContent
do {
properties = try vpnAdapter.apply(configuration: configuration)
} catch {
completionHandler(error)
return
}
configuration.tunPersist = true
if !properties.autologin {
if let username: String = providerConfiguration["username"] as? String, let password: String = providerConfiguration["password"] as? String {
let credentials = OpenVPNCredentials()
credentials.username = username
credentials.password = password
do {
try vpnAdapter.provide(credentials: credentials)
} catch {
completionHandler(error)
return
}
}
}
vpnReachability.startTracking { [weak self] status in
guard status != .notReachable else { return }
self?.vpnAdapter.reconnect(afterTimeInterval: 5)
}
startHandler = completionHandler
vpnAdapter.connect()
}
override func stopTunnel(with reason: NEProviderStopReason, completionHandler: #escaping () -> Void) {
stopHandler = completionHandler
if vpnReachability.isTracking {
vpnReachability.stopTracking()
}
vpnAdapter.disconnect()
}
override func handleAppMessage(_ messageData: Data, completionHandler: ((Data?) -> Void)?) {
if let handler = completionHandler {
handler(messageData)
}
}
override func sleep(completionHandler: #escaping () -> Void) {
completionHandler()
}
override func wake() {
}
}
extension PacketTunnelProvider: OpenVPNAdapterDelegate {
func openVPNAdapter(_ openVPNAdapter: OpenVPNAdapter, configureTunnelWithNetworkSettings networkSettings: NEPacketTunnelNetworkSettings?, completionHandler: #escaping (OpenVPNAdapterPacketFlow?) -> Void) {
setTunnelNetworkSettings(networkSettings) { (error) in
completionHandler(error == nil ? self.packetFlow : nil)
}
}
func openVPNAdapter(_ openVPNAdapter: OpenVPNAdapter, handleEvent event: OpenVPNAdapterEvent, message: String?) {
switch event {
case .connected:
if reasserting {
reasserting = false
}
guard let startHandler = startHandler else { return }
startHandler(nil)
self.startHandler = nil
case .disconnected:
guard let stopHandler = stopHandler else { return }
if vpnReachability.isTracking {
vpnReachability.stopTracking()
}
stopHandler()
self.stopHandler = nil
case .reconnecting:
reasserting = true
default:
break
}
}
func openVPNAdapter(_ openVPNAdapter: OpenVPNAdapter, handleError error: Error) {
guard let fatal = (error as NSError).userInfo[OpenVPNAdapterErrorFatalKey] as? Bool, fatal == true else {
return
}
NSLog("Error: \(error.localizedDescription)")
NSLog("Connection Info: \(vpnAdapter.connectionInformation.debugDescription)")
if vpnReachability.isTracking {
vpnReachability.stopTracking()
}
if let startHandler = startHandler {
startHandler(error)
self.startHandler = nil
} else {
cancelTunnelWithError(error)
}
}
func openVPNAdapter(_ openVPNAdapter: OpenVPNAdapter, handleLogMessage logMessage: String) {
NSLog("Log: \(logMessage)")
}
}
extension PacketTunnelProvider: OpenVPNAdapterPacketFlow {
func readPackets(completionHandler: #escaping ([Data], [NSNumber]) -> Void) {
packetFlow.readPackets(completionHandler: completionHandler)
}
func writePackets(_ packets: [Data], withProtocols protocols: [NSNumber]) -> Bool {
return packetFlow.writePackets(packets, withProtocols: protocols)
}
}
extension NEPacketTunnelFlow: OpenVPNAdapterPacketFlow {}

RPScreenRecorder.shared().isAvailable is always false

I'm trying to record my screen with a sample ios application.
But it does not work because RPScreen.shared().isAvailable always returns false.
These are my codes:
ViewController.swift
import UIKit
class ViewController: UIViewController {
#IBOutlet weak var StartRecordingButton: UIButton!
#IBOutlet weak var EndRecordingButton: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
StartRecordingButton.addTarget(self, action: #selector(startRecord(_:)), for: .touchUpInside)
EndRecordingButton.addTarget(self, action: #selector(stopRecord(_:)), for: .touchUpInside)
}
private lazy var recorder: ScreenRecorder = ScreenRecorder(configuration: ScreenRecorder.Configuration(), completion: {
(url, error) in
guard let url = url else {
fatalError("\(#function) record failed \(String(describing: error))")
}
debugPrint(#function, "success", url)
})
#objc func startRecord(_ sender: UIButton) {
recordStart()
}
#objc func stopRecord(_ sender: UIButton) {
recordStop()
}
private func recordStart() {
guard !recorder.isRecording else { return }
do {
try recorder.start()
} catch {
fatalError("start recording failed \(error)")
}
}
private func recordStop() {
guard recorder.isRecording else { return }
do {
try recorder.end()
} catch {
fatalError("finish recording failed \(error)")
}
}
}
ScreenRecorder.swift
import ReplayKit
#available(iOS 11.0, *)
public class ScreenRecorder: NSObject {
let screenRecorder = RPScreenRecorder.shared()
// Alias for arguments
public typealias Completion = (URL?, Error?) -> ()
let completion: Completion
let configuration: Configuration
public init (configuration: Configuration, completion: #escaping Completion) {
self.configuration = configuration
self.completion = completion
super.init()
}
// Start recording screen
public func start() throws {
print(screenRecorder.isAvailable)
guard screenRecorder.isAvailable else {
throw ScreenRecorderError.notAvailable
}
guard !screenRecorder.isRecording else {
throw ScreenRecorderError.alreadyRunning
}
try setUp()
assetWriter?.startWriting()
assetWriter?.startSession(atSourceTime: CMTime.zero)
screenRecorder.startCapture(handler: { [weak self] (cmSampleBuffer, rpSampleBufferType, error) in
if let error = error {
debugPrint(#function, "something happened", error)
}
if RPSampleBufferType.video == rpSampleBufferType {
self?.appendVideo(sampleBuffer: cmSampleBuffer)
}
}) { [weak self] (error) in
if let error = error {
self?.completion(nil, error)
}
}
}
public func end() throws {
guard screenRecorder.isRecording else {
throw ScreenRecorderError.notRunning
}
screenRecorder.stopCapture { [weak self] (error) in
if let error = error {
self?.completion(nil, error)
}
self?.videoAssetWriterInput?.markAsFinished()
self?.assetWriter?.finishWriting {
DispatchQueue.main.async {
self?.completion(self?.cacheFileURL, nil)
}
}
}
}
public var isRecording: Bool {
return screenRecorder.isRecording
}
private var startTime: CMTime?
private var assetWriter: AVAssetWriter?
private var videoAssetWriterInput: AVAssetWriterInput?
private var writerInputPixelBufferAdapter: AVAssetWriterInputPixelBufferAdaptor?
private func setUp() throws {
try createCacheDirectoryIfNeeded()
try removeOldCachedFile()
guard let cacheURL = cacheFileURL else {
throw ScreenRecorderError.invalidURL
}
let assetWriter = try AVAssetWriter(url: cacheURL, fileType: configuration.fileType)
let videoSettings: [String: Any] = [
AVVideoCodecKey: configuration.codec,
AVVideoWidthKey: UInt(configuration.videoSize.width),
AVVideoHeightKey: UInt(configuration.videoSize.height),
]
let videoAssetWriterInput = try AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
videoAssetWriterInput.expectsMediaDataInRealTime = true
if assetWriter.canAdd(videoAssetWriterInput) {
assetWriter.add(videoAssetWriterInput)
}
self.assetWriter = assetWriter
self.videoAssetWriterInput = videoAssetWriterInput
self.writerInputPixelBufferAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoAssetWriterInput, sourcePixelBufferAttributes: [
kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB)
])
}
private func appendVideo(sampleBuffer: CMSampleBuffer) {
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
let firstTime: CMTime
if let startTime = self.startTime {
firstTime = startTime
} else {
firstTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
startTime = firstTime
}
let currentTime: CMTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
let diffTime: CMTime = CMTimeSubtract(currentTime, firstTime)
if writerInputPixelBufferAdapter?.assetWriterInput.isReadyForMoreMediaData ?? false {
writerInputPixelBufferAdapter?.append(pixelBuffer, withPresentationTime: diffTime)
}
}
private func createCacheDirectoryIfNeeded() throws {
guard let cacheDirectoryURL = cacheDirectoryURL else { return }
let fileManager = FileManager.default
guard !fileManager.fileExists(atPath: cacheDirectoryURL.path) else { return }
try fileManager.createDirectory(at: cacheDirectoryURL, withIntermediateDirectories: true, attributes: nil)
}
private func removeOldCachedFile() throws {
guard let cacheURL = cacheFileURL else { return }
let fileManager = FileManager.default
guard fileManager.fileExists(atPath: cacheURL.path) else { return }
try fileManager.removeItem(at: cacheURL)
}
private var cacheDirectoryURL: URL? = {
guard let path = NSSearchPathForDirectoriesInDomains(.cachesDirectory, .userDomainMask, true).first else {
return nil
}
return URL(fileURLWithPath: path).appendingPathComponent("ScreenRecorder")
}()
private var cacheFileURL: URL? {
guard let cacheDirectoryURL = cacheDirectoryURL else { return nil }
return cacheDirectoryURL.appendingPathComponent("screenrecord.mp4")
}
}
#available(iOS 11.0, *)
extension ScreenRecorder {
public struct Configuration{
public var codec: AVVideoCodecType = .h264
public var fileType: AVFileType = .mp4
public var videoSize: CGSize = CGSize(
width: UIScreen.main.bounds.width,
height: UIScreen.main.bounds.height
)
public var audioQuality: AVAudioQuality = .medium
public var audioFormatID: AudioFormatID = kAudioFormatMPEG4AAC
public var numberOfChannels: UInt = 2
public var sampleRate: Double = 44100.0
public var bitrate: UInt = 16
public init() {}
}
public enum ScreenRecorderError: Error {
case notAvailable
case alreadyRunning
case notRunning
case invalidURL
}
}
And it shows this fatal error which I wrote:
ios_record_screen[1258:213516] Fatal error: start recording failed notAvailable
I've enabled screen recording in Settings app in my iPhone8, and tried to run on my friend's iPhone X as well.
But both phones didn't work...
I could not find helpful information in the Internet.
Hope a help.
I hope the problem for those who struggled before has been resolved
In my case,
override func viewDidLoad()
needed
RPScreenRecorder.shared().delegate = self
syntax.
Of course, even the delegate extension that comes with it.
I was implementing RPScreenRecorder in a new view, which was working normally in other views, and I encountered the same problem as the author in the process.
It was a problem that the delegate was not imported while looking for a difference from the previous one.
Hope this helps anyone who finds this page in the future.

Camera app freezes during phone call

I have a bug in my camera app. If you open the app while on a phone call, the entire app freezes. I've tried using AVCaptureSessionWasInterrupted and AVCaptureSessionInterruptionEnded notifications to handle the audio input management during a phone call, but have had no luck fixing the issue. When I comment out the audio input setup, the app no longer freezes during a phone call, so I'm pretty confident the issue lies somewhere with the audio management.
Why is the app freezing during phone calls and how can I fix it?
Thanks in advance!
Relevant code:
class CameraManager: NSObject {
static let shared = CameraManager()
private let notificationQueue = OperationQueue.main
var delegate: CameraManagerDelegate? = nil
let session = AVCaptureSession()
var captureDeviceInput: AVCaptureDeviceInput? = nil
var audioInput: AVCaptureDeviceInput? = nil
let photoOutput = AVCapturePhotoOutput()
let videoOutput = AVCaptureMovieFileOutput()
var isRecording: Bool {
return videoOutput.isRecording
}
func getCurrentVideoCaptureDevice() throws -> AVCaptureDevice {
guard let device = self.captureDeviceInput?.device else {
throw CameraManagerError.missingCaptureDeviceInput
}
return device
}
func getZoomFactor() throws -> CGFloat {
return try getCurrentVideoCaptureDevice().videoZoomFactor
}
func getMaxZoomFactor() throws -> CGFloat {
return try getCurrentVideoCaptureDevice().activeFormat.videoMaxZoomFactor
}
override init() {
super.init()
NotificationCenter.default.addObserver(forName: Notification.Name.UIApplicationDidBecomeActive, object: nil, queue: notificationQueue) { [unowned self] (notification) in
self.session.startRunning()
try? self.setupCamera()
try? self.setZoomLevel(zoomLevel: 1.0)
if Settings.shared.autoRecord {
try? self.startRecording()
}
}
NotificationCenter.default.addObserver(forName: Notification.Name.UIApplicationWillResignActive, object: nil, queue: notificationQueue) { [unowned self] (notification) in
self.stopRecording()
self.session.stopRunning()
}
NotificationCenter.default.addObserver(forName: Notification.Name.AVCaptureSessionWasInterrupted, object: nil, queue: notificationQueue) { [unowned self] (notification) in
if let audioInput = self.audioInput {
self.session.removeInput(audioInput)
}
}
NotificationCenter.default.addObserver(forName: Notification.Name.AVCaptureSessionInterruptionEnded, object: nil, queue: notificationQueue) { [unowned self] (notification) in
try? self.setupAudio()
}
try? self.setupSession()
}
func setupSession() throws {
session.sessionPreset = .high
if !session.isRunning {
session.startRunning()
}
if Utils.checkPermissions() {
try setupInputs()
setupOutputs()
}
}
func setupInputs() throws {
try setupCamera()
try setupAudio()
}
func setupCamera() throws {
do {
try setCamera(position: Settings.shared.defaultCamera)
} catch CameraManagerError.unableToFindCaptureDevice(let position) {
//some devices don't have a front camera, so try the back for setup
if position == .front {
try setCamera(position: .back)
}
}
}
func setupAudio() throws {
if let audioInput = self.audioInput {
self.session.removeInput(audioInput)
}
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
throw CameraManagerError.unableToGetAudioDevice
}
let audioInput = try AVCaptureDeviceInput(device: audioDevice)
if session.canAddInput(audioInput) {
session.addInput(audioInput)
self.audioInput = audioInput
} else {
self.delegate?.unableToAddAudioInput()
}
}
func setupOutputs() {
self.photoOutput.isHighResolutionCaptureEnabled = true
guard session.canAddOutput(self.photoOutput) else {
//error
return
}
session.addOutput(self.photoOutput)
guard session.canAddOutput(self.videoOutput) else {
//error
return
}
session.addOutput(self.videoOutput)
}
func startRecording() throws {
if !self.videoOutput.isRecording {
let documentDirectory = try FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor:nil, create:false)
let url = documentDirectory.appendingPathComponent(UUID().uuidString + ".mov")
self.videoOutput.startRecording(to: url, recordingDelegate: self)
}
}
func stopRecording() {
if self.videoOutput.isRecording {
self.videoOutput.stopRecording()
}
}
func setZoomLevel(zoomLevel: CGFloat) throws {
guard let captureDevice = self.captureDeviceInput?.device else {
throw CameraManagerError.missingCaptureDevice
}
try captureDevice.lockForConfiguration()
captureDevice.videoZoomFactor = zoomLevel
captureDevice.unlockForConfiguration()
}
func capturePhoto() {
let photoOutputSettings = AVCapturePhotoSettings()
photoOutputSettings.flashMode = Settings.shared.flash
photoOutputSettings.isAutoStillImageStabilizationEnabled = true
photoOutputSettings.isHighResolutionPhotoEnabled = true
self.photoOutput.capturePhoto(with: photoOutputSettings, delegate: self)
}
func toggleCamera() throws {
if let captureDeviceInput = self.captureDeviceInput,
captureDeviceInput.device.position == .back {
try setCamera(position: .front)
} else {
try setCamera(position: .back)
}
}
func setCamera(position: AVCaptureDevice.Position) throws {
if let captureDeviceInput = self.captureDeviceInput {
if captureDeviceInput.device.position == position {
return
} else {
session.removeInput(captureDeviceInput)
}
}
var device: AVCaptureDevice? = nil
switch position {
case .front:
device = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
default:
device = AVCaptureDevice.default(for: .video)
}
guard let nonNilDevice = device else {
throw CameraManagerError.unableToFindCaptureDevice(position)
}
try nonNilDevice.lockForConfiguration()
if nonNilDevice.isFocusModeSupported(.continuousAutoFocus) {
nonNilDevice.focusMode = .continuousAutoFocus
}
if nonNilDevice.isExposureModeSupported(.continuousAutoExposure) {
nonNilDevice.exposureMode = .continuousAutoExposure
}
nonNilDevice.unlockForConfiguration()
let input = try AVCaptureDeviceInput(device: nonNilDevice)
guard session.canAddInput(input) else {
throw CameraManagerError.unableToAddCaptureDeviceInput
}
session.addInput(input)
self.captureDeviceInput = input
}
func setFocus(point: CGPoint) throws {
guard let device = self.captureDeviceInput?.device else {
throw CameraManagerError.missingCaptureDeviceInput
}
guard device.isFocusPointOfInterestSupported && device.isFocusModeSupported(.autoFocus) else {
throw CameraManagerError.notSupportedByDevice
}
try device.lockForConfiguration()
device.focusPointOfInterest = point
device.focusMode = .autoFocus
device.unlockForConfiguration()
}
func setExposure(point: CGPoint) throws {
guard let device = self.captureDeviceInput?.device else {
throw CameraManagerError.missingCaptureDeviceInput
}
guard device.isExposurePointOfInterestSupported && device.isExposureModeSupported(.autoExpose) else {
throw CameraManagerError.notSupportedByDevice
}
try device.lockForConfiguration()
device.exposurePointOfInterest = point
device.exposureMode = .autoExpose
device.unlockForConfiguration()
}
}
extension CameraManager: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, willCapturePhotoFor resolvedSettings: AVCaptureResolvedPhotoSettings) {
self.delegate?.cameraManagerWillCapturePhoto()
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation() else {
//error
return
}
let capturedImage = UIImage.init(data: imageData , scale: 1.0)
if let image = capturedImage {
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil)
}
self.delegate?.cameraManagerDidFinishProcessingPhoto()
}
}
extension CameraManager: AVCaptureFileOutputRecordingDelegate {
func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
self.delegate?.cameraManagerDidStartRecording()
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
self.delegate?.cameraManagerDidFinishRecording()
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputFileURL)
}) { saved, error in
if saved {
do {
try FileManager.default.removeItem(at: outputFileURL)
} catch _ as NSError {
//error
}
}
}
}
}

Resources