I've been trying to add Shazam matching to my app using the new ShazamKit. I've used Apple's sample code found here and adapted it slightly.
import ShazamKit
import AVFAudio
import Combine
#available(iOS 15.0, *)
class ShazamMatcher: NSObject, ObservableObject, SHSessionDelegate {
// MARK: - Properties
#Published var result: SHMatch?
#Published var isRecording = false
private var isInitialSetupDone = false
private var session: SHSession?
private let audioEngine = AVAudioEngine()
// MARK: - Actions
func match() throws {
result = nil
session = SHSession()
session?.delegate = self
try doInitialSetupIfNeeded()
AVAudioSession.sharedInstance().requestRecordPermission { [weak self] success in
guard success, let self = self else {
return
}
try? self.audioEngine.start()
self.isRecording = true
}
}
func stopMatching() {
audioEngine.stop()
isRecording = false
}
// MARK: - Setup
private func doInitialSetupIfNeeded() throws {
guard !isInitialSetupDone else {
return
}
let audioFormat = AVAudioFormat(
standardFormatWithSampleRate: audioEngine.inputNode.outputFormat(forBus: 0).sampleRate,
channels: 1
)
audioEngine.inputNode.installTap(onBus: 0, bufferSize: 2048, format: audioFormat) { [weak session] buffer, audioTime in
session?.matchStreamingBuffer(buffer, at: audioTime)
}
try AVAudioSession.sharedInstance().setCategory(.record)
isInitialSetupDone = true
}
// MARK: - SHSessionDelegate
func session(_ session: SHSession, didFind match: SHMatch) {
// Handle match here
}
func session(_ session: SHSession, didNotFindMatchFor signature: SHSignature, error: Error?) {
// Handle error here
}
}
However, when calling match(), the delegate eventually reports an error The operation couldn’t be completed. (com.apple.ShazamKit error 202.)
I've added a new key using my bundle identifier for the ShazamKit services and downloaded the .p8 file. Do I need this file and if so, how?
Has anybody been able to resolve this error?
I've found a solution. First, apparently the inter-app audio entitlement has to be enabled.
Second, it seems like you need a SHSignatureGenerator as well (I though it would be enough to call matchStreamingBuffer
Here's code that works:
https://github.com/heysaik/ShazamKit-Demo/blob/main/Shazam/ViewController.swift
Related
I've created a MacOS app project which contains System Network Extension (DNS proxy). With help of https://developer.apple.com/forums/thread/81103?answerId=246229022 and How to use NEDNSProxyProvider in iOS 11. I enabled DNSProxyProvider. When I built the app for the first time a prompt asked to allow the system extension. I allowed it and Under network in system preference, I can see DNS is not running (yellow dot).
Not running
I added OSSystemextensionRequest from the container App. This is my code.
import Cocoa
import NetworkExtension
import SystemExtensions
import OSLog
class ViewController: NSViewController {
let log=OSLog(subsystem:"com.example.applesamplecode.DNSTestBed", category: "app")
let manager = NEDNSProxyManager.shared()
private func installSystemExtension() {
os_log("DNSFProxy: installing system extension")
let request = OSSystemExtensionRequest.activationRequest(
forExtensionWithIdentifier: "my system extension's bundle indentifier",
queue: .main
)
request.delegate = self
OSSystemExtensionManager.shared.submitRequest(request)
}
private func enable() {
NSLog("enabled already ",self.manager.isEnabled )
NSLog(self.manager.isEnabled ? "Yes" : "No")
self.update {
self.manager.localizedDescription = "DNS"
let proto = NEDNSProxyProviderProtocol()
proto.serverAddress = "localhost"
proto.providerBundleIdentifier = "my system extension's bundle indentifier"
self.manager.providerProtocol = proto
self.manager.isEnabled = true
}
NSLog(self.manager.isEnabled ? "Yes" : "No")
}
private func disable() {
self.update {
self.manager.isEnabled = false
}
}
private func update(_ body: #escaping () -> Void) {
self.manager.loadFromPreferences { (error) in
guard error == nil else {
NSLog("DNS Test App: load error")
return
}
body()
self.manager.saveToPreferences { (error) in
guard error == nil else {
NSLog("DNS Test App: save error")
return
}
NSLog("DNS Test App: saved")
}
}
}
override func viewDidLoad() {
super.viewDidLoad()
self.enable()
self.installSystemExtension()
}
}
extension ViewController: OSSystemExtensionRequestDelegate {
func request(_ request: OSSystemExtensionRequest, actionForReplacingExtension existing: OSSystemExtensionProperties, withExtension ext: OSSystemExtensionProperties) -> OSSystemExtensionRequest.ReplacementAction {
os_log("DNSProxy: Replacing extension %# version %# with version %#", request.identifier, existing.bundleShortVersion, ext.bundleShortVersion)
return .replace
}
func requestNeedsUserApproval(_ request: OSSystemExtensionRequest) {
os_log("DNSProxy: Extension %# requires user approval", request.identifier)
}
func request(_ request: OSSystemExtensionRequest, didFailWithError error: Error) {
os_log("DNSProxy: System extension request failed: %#", error.localizedDescription)
}
/* Other delegate methods here */
func request(_ request: OSSystemExtensionRequest, didFinishWithResult result: OSSystemExtensionRequest.Result) {
switch result {
case .completed:
var manager = NEDNSProxyManager.shared()
case .willCompleteAfterReboot:
os_log("DNSProxy: willCompleteAfterReboot")
#unknown default:
os_log("DNSProxy: default")
}
}
}
Info.plist of system extension:
info.plist
I'm getting an error when the activation request happens: The operation couldn’t be completed. (OSSystemExtensionErrorDomain error 1.). Any help would be great. Thanks.
P.S: I also posted this in apple forums https://developer.apple.com/forums/thread/717647
I'm using a companion app to authorize a user with a 3rd party service. Once authorized, I update a UserDefaults variable to true. On the companion app side, the view updates correctly and shows that the user has been authenticated. However, on the watch OS side the view does not update. Would I need to use the Watch Connectivity API and send a message to the watch to update the state? Or is there a simple way?
Phone App
struct AuthenticationView: View {
#State private var startingWebAuthenticationSession = false
#AppStorage("authorized") private var authorized = false
var body: some View {
Group {
if !authorized {
VStack {
Button("Connect", action: { self.startingWebAuthenticationSession = true })
.webAuthenticationSession(isPresented: $startingWebAuthenticationSession) {
WebAuthenticationSession(
url: URL(string: "https://service.com/oauth/authorize?scope=email%2Cread_stats&response_type=code&redirect_uri=watch%3A%2F%2Foauth-callback&client_id=\(clientId)")!,
callbackURLScheme: callbackURLScheme
) { callbackURL, error in
guard error == nil, let successURL = callbackURL else {
return
}
let oAuthCode = NSURLComponents(string: (successURL.absoluteString))?.queryItems?.filter({$0.name == "code"}).first
guard let authorizationCode = oAuthCode?.value else { return }
let url = URL(string: "https://service.com/oauth/token")
var request = URLRequest(url: url!)
request.httpMethod = "POST"
let params = "client_id=\(clientId)&client_secret=\(clientSecret)&grant_type=authorization_code&code=\(authorizationCode)&redirect_uri=\(callbackURLScheme)://oauth-callback";
request.httpBody = params.data(using: String.Encoding.utf8);
let task = URLSession.shared.dataTask(with: request) { (data, response, error) in
if let error = error {
print("Error took place \(error)")
return
}
if let data = data, let response = String(data: data, encoding: .utf8) {
let accessTokenResponse: AccessTokenResponse = try! JSONDecoder().decode(AccessTokenResponse.self, from: response.data(using: .utf8)!)
let defaults = UserDefaults.standard
authorized = true
startingWebAuthenticationSession = false
defaults.set(accessTokenResponse.access_token, forKey: DefaultsKeys.accessToken) //TODO: Store securely
ConnectivityService.shared.send(authorized: true)
}
}
task.resume()
}
.prefersEphemeralWebBrowserSession(false)
}
}
}
else {
VStack {
Text("Authenticated!")
}
}
}
}
}
WatchOS
import SwiftUI
struct ConnectView: View {
#ObservedObject var connectivityService: ConnectivityService
var body: some View {
if !$connectivityService.authorized.wrappedValue {
VStack {
Text("Open the app on your primary device to connect.")
}
}
else {
//Some other view
}
}
}
EDIT:
Trying with Watch Connectivity API but the issue I'm experiencing is that when I authenticate from the phone, it'll take some time for the ConnectView to update the authorized variable. I know Watch Connectivity API doesn't update right away but at minimum I'd need some way for the watch to pick up that a secret access token has been retrieved and it can transition to the next view; whether that's through a shared state variable, UserDefaults, or whatever other mechanism.
Here is the ConnectivityService class I'm using:
import Foundation
import Combine
import WatchConnectivity
final class ConnectivityService: NSObject, ObservableObject {
static let shared = ConnectivityService()
#Published var authorized: Bool = false
override private init() {
super.init()
#if !os(watchOS)
guard WCSession.isSupported() else {
return
}
#endif
WCSession.default.delegate = self
WCSession.default.activate()
}
public func send(authorized: Bool, errorHandler: ((Error) -> Void)? = nil) {
guard WCSession.default.activationState == .activated else {
return
}
#if os(watchOS)
guard WCSession.default.isCompanionAppInstalled else {
return
}
#else
guard WCSession.default.isWatchAppInstalled else {
return
}
#endif
let authorizationInfo: [String: Bool] = [
DefaultsKeys.authorized: authorized
]
WCSession.default.sendMessage(authorizationInfo, replyHandler: nil)
WCSession.default.transferUserInfo(authorizationInfo)
}
}
extension ConnectivityService: WCSessionDelegate {
func session(_ session: WCSession, activationDidCompleteWith activationState: WCSessionActivationState, error: Error?) { }
func session(
_ session: WCSession,
didReceiveUserInfo userInfo: [String: Any] = [:]
) {
let key = DefaultsKeys.authorized
guard let authorized = userInfo[key] as? Bool else {
return
}
self.authorized = authorized
}
func session(_ session: WCSession, didReceiveMessage message: [String : Any]) {
self.authorized = true
}
#if os(iOS)
func sessionDidBecomeInactive(_ session: WCSession) {
}
func sessionDidDeactivate(_ session: WCSession) {
WCSession.default.activate()
}
#endif
}
I tried doing these two lines but they have varying results:
WCSession.default.sendMessage(authorizationInfo, replyHandler: nil)
WCSession.default.transferUserInfo(authorizationInfo)
In the first line, XCode will say that no watch app could be found, even though I'm connected to both physical devices through XCode; launch phone first then watch. I believe the first one is immediate and the second is more of when the queue feels like it. Sometimes if I hard close the watch app, it'll pick up the state change in the authorized variable, sometimes it won't. Very frustrating inter-device communication.
UserDefaults doesn't pick up the access token value on the watch side. Maybe I have to use App Groups?
I do see this error on the Watch side:
Publishing changes from background threads is not allowed; make sure to publish values from the main thread (via operators like receive(on:)) on model updates.
So I thought to try and encapsulate the self.authorized = authorized call into something like:
DispatchQueue.main.async {
self.authorized = authorized
}
But it didn't do anything as far as solving the immediate state change issue.
When I try to call sessionManager.initialize() neither func sessionManager(manager: SPTSessionManager, didFailWith error: Error) nor func sessionManager(manager: SPTSessionManager, didInitiate session: SPTSession) are called.
I have a nodeJS server running on AWS for token access and refresh and I have also tried running a local Ruby server to get the token. No matter what, calling initialize() does nothing. It does fail or succeed and nothing is output to console. I have tried running the XCode debugger and it seems as if the program just skips past initialize. Here is my complete ViewController.swift file with the unrelated/private parts deleted:
import UIKit
import Firebase
class LobbyAdminViewController: UIViewController, SPTSessionManagerDelegate, SPTAppRemoteDelegate, SPTAppRemotePlayerStateDelegate {
fileprivate let SpotifyClientID = "client_id"
fileprivate let SpotifyRedirectURI = URL(string: "redirect_url")!
fileprivate var lastPlayerState: SPTAppRemotePlayerState?
var refreshAPI = "token_server/refresh_token"
var tokenAPI = "token_server/token"
lazy var configuration: SPTConfiguration = {
let configuration = SPTConfiguration(clientID: SpotifyClientID, redirectURL: SpotifyRedirectURI)
configuration.playURI = ""
configuration.tokenSwapURL = URL(string: tokenAPI)
configuration.tokenRefreshURL = URL(string: refreshAPI)
return configuration
}()
lazy var sessionManager: SPTSessionManager = {
let manager = SPTSessionManager(configuration: configuration, delegate: self)
return manager
}()
lazy var appRemote: SPTAppRemote = {
let appRemote = SPTAppRemote(configuration: configuration, logLevel: .debug)
appRemote.delegate = self
return appRemote
}()
override func viewDidLoad() {
super.viewDidLoad()
let random = Int(arc4random_uniform(900000) + 100000)
lobbyCode = String(random)
lobbyCodeLabel.text = lobbyCode
var ref: DatabaseReference!
ref = Database.database().reference()
ref.child(lobbyCode).child("null").setValue("null")
let scope: SPTScope = [.appRemoteControl]
if #available(iOS 11, *) {
print("ios 11+")
sessionManager.initiateSession(with: scope, options: .clientOnly)
} else {
print("ios 11-")
sessionManager.initiateSession(with: scope, options: .clientOnly, presenting: self)
}
}
func update(playerState: SPTAppRemotePlayerState) {
print("Updating")
lastPlayerState = playerState
currentSongLabel.text = playerState.track.name
currentArtistLabel.text = playerState.track.artist.name
if playerState.isPaused {
pausePlayButton.setBackgroundImage(UIImage(named: "play"), for: .normal)
} else {
pausePlayButton.setBackgroundImage(UIImage(named: "pause"), for: .normal)
}
}
func fetchPlayerState() {
print("Getting player state")
appRemote.playerAPI?.getPlayerState({ [weak self] (playerState, error) in
if let error = error {
print("Error getting player state:" + error.localizedDescription)
} else if let playerState = playerState as? SPTAppRemotePlayerState {
self?.update(playerState: playerState)
}
})
}
#IBAction func onTap_pausePlayButton(_ sender: UIButton) {
print("tapped")
if let lastPlayerState = lastPlayerState, lastPlayerState.isPaused {
appRemote.playerAPI?.resume(nil)
print("Resuming")
} else {
appRemote.playerAPI?.pause(nil)
print("Pausing")
}
}
func sessionManager(manager: SPTSessionManager, didFailWith error: Error) {
print("Bad init")
print(error.localizedDescription)
}
func sessionManager(manager: SPTSessionManager, didRenew session: SPTSession) {
print("Renewed")
}
func sessionManager(manager: SPTSessionManager, didInitiate session: SPTSession) {
print("Trying to connect")
appRemote.connectionParameters.accessToken = session.accessToken
print(session.accessToken)
appRemote.connect()
}
// MARK: - SPTAppRemoteDelegate
func appRemoteDidEstablishConnection(_ appRemote: SPTAppRemote) {
print("App Remote Connected")
appRemote.playerAPI?.delegate = self
appRemote.playerAPI?.subscribe(toPlayerState: { (success, error) in
if let error = error {
print("Error subscribing to player state:" + error.localizedDescription)
}
})
fetchPlayerState()
}
func appRemote(_ appRemote: SPTAppRemote, didDisconnectWithError error: Error?) {
lastPlayerState = nil
print("Error connecting to app remote")
}
func appRemote(_ appRemote: SPTAppRemote, didFailConnectionAttemptWithError error: Error?) {
lastPlayerState = nil
print("Another error connectiong to app remote")
}
// MARK: - SPTAppRemotePlayerAPIDelegate
func playerStateDidChange(_ playerState: SPTAppRemotePlayerState) {
print("Player state changed")
update(playerState: playerState)
}
// MARK: - Private Helpers
fileprivate func presentAlertController(title: String, message: String, buttonTitle: String) {
let controller = UIAlertController(title: title, message: message, preferredStyle: .alert)
let action = UIAlertAction(title: buttonTitle, style: .default, handler: nil)
controller.addAction(action)
present(controller, animated: true)
}
}
The only print() statement that fires is "ios 11" in viewDidLoad()
I have scoured the internet for anyone with the same issue and have come up empty.
The only thing I can think of that could be causing this issue is a known runtime issue with iOS 13. This error:
Can't end BackgroundTask: no background task exists with identifier 8 (0x8), or it may have already been ended. Break in UIApplicationEndBackgroundTaskError() to debug.
fires every time the app is sent to the background (ie when the app redirects to spotify to authenticate). However, this issue exists with even a blank app in XCode and does not halt execution.
I just figured that out now. In the scene delegate class you have to implement the
func scene(_ scene: UIScene, openURLContexts URLContexts: Set<UIOpenURLContext>) {
}
method and you have to access the sessionManager that you have in your LobbyAdminViewController and create an instance of it and add these lines of code into the method
func scene(_ scene: UIScene, openURLContexts URLContexts: Set<UIOpenURLContext>) {
print("Opened url")
guard let url = URLContexts.first?.url else {
return
}
lobbyAdminVC.sessionManager.application(UIApplication.shared, open: url, options: [:])
}
After doing this, the app remote connected and all the print statements printed and said app remote was connected.
I was having the same issue, kept getting the exact same Can't end BackgroundTask error and was stumped for quite a while. Until I figured out the issue (in my case). It has to do with your AppDelegate.swift file. This error code doesn't actually have to do with the issue I don't think, this is just the last thing logged to the console before the session initialization abruptly stops.
With the introduction of scene delegates, the default App Delegate file has changed in recent months. What you need to do is make sure that you are not using this newer App Delegate that works with the scene delegate, but rather, you need to convert your App Delegate to the way it looked in the past.
For me, removing scene delegate completely from my app involved two steps:
1. Revert your AppDelegate.swift file
Mine looks something like this:
import UIKit
#UIApplicationMain
class AppDelegate: UIResponder, UIApplicationDelegate {
var window: UIWindow?
var application: UIApplication!
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
// Override point for customization after application launch.
return true
}
func application(_ app: UIApplication, open url: URL, options: [UIApplication.OpenURLOptionsKey : Any] = [:]) -> Bool {
return true
}
func applicationWillResignActive(_ application: UIApplication) {
//SpotifyManager.shared.appRemote is of type SPTAppRemote
if SpotifyManager.shared.appRemote.isConnected {
SpotifyManager.shared.appRemote.disconnect()
}
}
func applicationDidBecomeActive(_ application: UIApplication) {
//SpotifyManager.shared.appRemote is of type SPTAppRemote
if let _ = SpotifyManager.shared.appRemote.connectionParameters.accessToken {
SpotifyManager.shared.appRemote.connect()
}
}
}
2. Remove Application Scene Manifest from your info.plist
In your info.plist file, there is a property that tells your app that you are using the scene delegate. We need to delete this from the plist. It should look something like this:
<key>UIApplicationSceneManifest</key>
<dict>
<key>New item</key>
<string></string>
<key>UIApplicationSupportsMultipleScenes</key>
<false/>
<key>UISceneConfigurations</key>
<dict>
<key>UIWindowSceneSessionRoleApplication</key>
<array>
<dict>
<key>UISceneConfigurationName</key>
<string>Default Configuration</string>
<key>UISceneDelegateClassName</key>
<string>$(PRODUCT_MODULE_NAME).SceneDelegate</string>
<key>UISceneStoryboardFile</key>
<string>Main</string>
</dict>
</array>
</dict>
</dict>
If you update your app delegate and remove this property from the plist, it should work for you (or, at least, it worked for me).
P.S. if you want to use scene delegate and use the Spotify SDK, I believe you have to do it in the way outlined in this resource. Notably, look for the part of the authorization guide that mentions "If you are using UIScene then you need to use appropriate method in your scene delegate."
I am using Apple's Instruments tool to check out the current progress of my application and manage any leaks early. I seem to have a lot of leaks, but I cannot figure out where they are coming from.
In my application, I have a SignInOperation which is a subclass of Operation. It also conforms to URLSessionDataDelegate so that it can handle my requests without needing to use completion handlers. For example, when adding an instance of SignInOperation to an OperationQueue instance, the operation that performs updates to the UI can just check the error and user properties on the SignInOperation and handle UI updates accordingly since it will have the SignInOperation instance as a dependency.
The class follows:
import UIKit
/// Manages a sign-in operation.
internal final class SignInOperation: Operation, URLSessionDataDelegate {
// MARK: - Properties
/// An internal flag that indicates whether the operation is currently executing.
private var _executing = false
/// An internal flag that indicates wheterh the operation is finished.
private var _finished = false
/// The received data from the operation.
private var receivedData = Data()
/// The data task used for sign-in.
private var sessionTask: URLSessionDataTask?
/// The URL session that is used for coordinating tasks used for sign-in.
private var localURLSession: URLSession { return URLSession(configuration: localConfiguration, delegate: self, delegateQueue: nil) }
/// The configuration used for configuring the URL session used for sign-in.
private var localConfiguration: URLSessionConfiguration { return .ephemeral }
/// The credentials used for user-sign-in.
private var credentials: UserCredentials
/// The current user.
internal var currentUser: User?
/// The error encountered while attempting sign-in.
internal var error: NetworkRequestError?
/// The cookie storage used for persisting an authentication cookie.
internal var cookieStorage: HTTPCookieStorage?
/// A Boolean value indicating whether the operation is currently executing.
override internal(set) var isExecuting: Bool {
get { return _executing }
set {
willChangeValue(forKey: "isExecuting")
_executing = newValue
didChangeValue(forKey: "isExecuting")
}
}
/// A Boolean value indicating whether the operation has finished executing its task.
override internal(set) var isFinished: Bool {
get { return _finished }
set {
willChangeValue(forKey: "isFinished")
_finished = newValue
didChangeValue(forKey: "isFinished")
}
}
/// A Boolean value indicating whether the operation executes its task asynchronously.
override var isAsynchronous: Bool { return true }
// MARK: - Initialization
/// Returns an instane of `SignInOperation`.
/// - parameter credentials: The credentials for user-sign-in.
init(credentials: UserCredentials, cookieStorage: HTTPCookieStorage = CookieStorage.defaultStorage) {
self.credentials = credentials
self.cookieStorage = cookieStorage
super.init()
localURLSession.configuration.httpCookieAcceptPolicy = .never
}
// MARK: - Operation Lifecycle
override func start() {
if isCancelled {
isFinished = true
return
}
isExecuting = true
let request = NetworkingRouter.signIn(credentials: credentials).urlRequest
sessionTask = localURLSession.dataTask(with: request)
guard let task = sessionTask else { fatalError("Failed to get task") }
task.resume()
}
// MARK: - URL Session Delegate
func urlSession(_ session: URLSession, dataTask: URLSessionDataTask, didReceive response: URLResponse, completionHandler: #escaping (URLSession.ResponseDisposition) -> Void) {
if isCancelled {
isFinished = true
sessionTask?.cancel()
return
}
guard let statusCode = (response as? HTTPURLResponse)?.statusCode else { fatalError("Could not determine status code") }
setError(from: statusCode)
completionHandler(disposition(from: statusCode))
}
func urlSession(_ session: URLSession, dataTask: URLSessionDataTask, didReceive data: Data) {
if isCancelled {
guard let task = sessionTask else { fatalError("Failed to get task") }
task.cancel()
return
}
receivedData.append(data)
}
func urlSession(_ session: URLSession, task: URLSessionTask, didCompleteWithError error: Error?) {
defer { isFinished = true }
if isCancelled {
guard let task = sessionTask else { fatalError("Failed to get task") }
task.cancel()
}
if let statusCode = (task.response as? HTTPURLResponse)?.statusCode { setError(from: statusCode) } else if let taskError = error as? NSError { setError(from: taskError) }
if self.error == nil {
guard let taskResponse = task.response else { fatalError("Invalid response") }
setAuthenticationCookie(from: taskResponse)
processData()
}
}
// MARK: - Helpers
/// Handles the processing of the data received from the data task.
private func processData() {
currentUser = UserModelCreator.user(from: receivedData)
}
/// Handles the persistence of the returned cookie from the request's response.
private func setAuthenticationCookie(from response: URLResponse) {
guard let storage = cookieStorage else { fatalError() }
let cookiePersistenceManager = ResponseCookiePersistenceManger(storage: storage)
cookiePersistenceManager.removePreviousCookies()
guard let httpURLResponse = response as? HTTPURLResponse else { fatalError("Invalid response type") }
if let cookie = ResponseCookieParser.cookie(from: httpURLResponse) {cookiePersistenceManager.persistCookie(cookie: cookie) }
}
/// Handles the return of a specified HTTP status code.
/// - parameter statusCode: The status code.
private func setError(from statusCode: Int) {
switch statusCode {
case 200: error = nil
case 401: error = .invalidCredentials
default: error = .generic
}
}
/// Returns a `URLResponse.ResponseDisposition` for the specified HTTP status code.
/// - parameter code: The status code.
/// - Returns: A disposition.
private func disposition(from code: Int) -> URLSession.ResponseDisposition {
switch code {
case 200: return .allow
default: return .cancel
}
}
/// Handles the return of an error from a network request.
/// - parameter error: The error.
private func setError(from error: NSError) {
switch error.code {
case Int(CFNetworkErrors.cfurlErrorTimedOut.rawValue): self.error = .requestTimedOut
case Int(CFNetworkErrors.cfurlErrorNotConnectedToInternet.rawValue): self.error = .noInternetConnection
default: self.error = .generic
}
}
}
Then, to see if everything works, I call the operation in viewDidAppear:, which results in all of the expected data being printed:
import UIKit
class ViewController: UIViewController {
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
let credentials = UserCredentials(emailAddress: "xxxxxxx#xxxx.xx", password: "xxxxxxxxxxxxxxxxxx")
let signInOp = SignInOperation(credentials: credentials)
let printOperation = Operation()
printOperation.addDependency(signInOp)
printOperation.completionBlock = {
if let error = signInOp.error { return print("\n====> Sign-in Error: \(error.message)\n") }
if let user = signInOp.currentUser { print("\n====> User: \(user)\n") }
}
let queue = OperationQueue()
queue.addOperations([signInOp, printOperation], waitUntilFinished: false)
}
}
However, when using the Leaks profiler in Instruments, I get some alarming data.
I don't really know where to start here. When I click on any of the detected leaks, I am not taken to my code that the leak originates from. I have watched a few tutorials and read Apple's documentation, but I am stuck trying to figure out where the leaks are coming from. It seems like a ridiculous amount/
I don't see anywhere in my code where I have strong reference cycles, so I am asking for some help with trying to figure out how to resolve the 421 detected leaks.
It turns out that I do have two strong reference cycles, which are the two following properties in my SignInOperation subclass: sessionTask & localURLSession.
After making these properties weak, I no longer have leaks detected:
/// The URL session that is used for coordinating tasks used for sign-in.
private weak var localURLSession: URLSession { return URLSession(configuration: localConfiguration, delegate: self, delegateQueue: nil) }
/// The configuration used for configuring the URL session used for sign-in.
private weak var localConfiguration: URLSessionConfiguration { return .ephemeral }
What is the screen device called in iOS/Swift?
When I print the devices I get
(
"<AVCaptureFigVideoDevice: 0x134d0f210 [Back Camera][com.apple.avfoundation.avcapturedevice.built-in_video:0]>",
"<AVCaptureFigVideoDevice: 0x134e0af80 [Front Camera][com.apple.avfoundation.avcapturedevice.built-in_video:1]>",
"<AVCaptureFigAudioDevice: 0x174265440 [iPad Microphone][com.apple.avfoundation.avcapturedevice.built-in_audio:0]>"
)
So where's the screen ID?
There's just too much outdated objective c code while swift is a moving target. I'm looking for a swift solution to capture video from my iPad screen and audio from built-in microphone. The audio will be a separate question.
Here is a screen grabber for OS X
https://github.com/kennyledet/SwiftCap
// AVCaptureSession holds inputs and outputs for real-time capture
let mSession = AVCaptureSession()
let mScreenCapOutput = AVCaptureMovieFileOutput()
var mOutputPath = ""
// Just capture main display for now
let mMainDisplayId = CGMainDisplayID()
but I cannot find in the documentation the display ID, CGMainDisplayID, for an iPad...
Here is a typical solution for a camera in swift
https://github.com/bradley/iOSSwiftSimpleAVCamera
but it has too many errors and doesn't compile with iOS 8.1 or 8.2 and grabs video from camera.
func addVideoOutput() {
var rgbOutputSettings: NSDictionary = NSDictionary(object: Int(CInt(kCIFormatRGBA8)), forKey: kCVPixelBufferPixelFormatTypeKey)
self.videoDeviceOutput = AVCaptureVideoDataOutput()
self.videoDeviceOutput.alwaysDiscardsLateVideoFrames = true
self.videoDeviceOutput.setSampleBufferDelegate(self, queue: self.sessionQueue)
if self.session.canAddOutput(self.videoDeviceOutput) {
self.session.addOutput(self.videoDeviceOutput)
}
}
https://developer.apple.com/library/ios/documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/04_MediaCapture.html#//apple_ref/doc/uid/TP40010188-CH5-SW18
Apple gives an objective-c solution like this
/*
* Create video connection
*/
AVCaptureDeviceInput *videoIn = [[AVCaptureDeviceInput alloc] initWithDevice:[self videoDeviceWithPosition:AVCaptureDevicePositionBack] error:nil];
if ([_captureSession canAddInput:videoIn])
[_captureSession addInput:videoIn];
AVCaptureVideoDataOutput *videoOut = [[AVCaptureVideoDataOutput alloc] init];
[videoOut setAlwaysDiscardsLateVideoFrames:YES];
[videoOut setVideoSettings:#{(id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInt:kCVPixelFormatType_32BGRA]}];
dispatch_queue_t videoCaptureQueue = dispatch_queue_create("Video Capture Queue", DISPATCH_QUEUE_SERIAL);
[videoOut setSampleBufferDelegate:self queue:videoCaptureQueue];
if ([_captureSession canAddOutput:videoOut])
[_captureSession addOutput:videoOut];
_videoConnection = [videoOut connectionWithMediaType:AVMediaTypeVideo];
self.videoOrientation = _videoConnection.videoOrientation;
if([self.session canSetSessionPreset:AVCaptureSessionPreset640x480])
[self.session setSessionPreset:AVCaptureSessionPreset640x480]; // Lower video resolution to decrease recorded movie size
return YES;
}
This should be easy.....???
Here is a working copy of iOSSwiftSimpleAVCamera in swift. It doesn't quite solve your problem but it is a bit of a starting point for anyone else that looks at this thread. Some of the error checking was removed from this code so be weary, it will only work on an actual device not in the simulator.
App delegate
//
// AppDelegate.swift
// iOSSwiftSimpleAVCamera
//
// Created by Bradley Griffith on 7/1/14.
// Copyright (c) 2014 Bradley Griffith. All rights reserved.
//
import UIKit
import CoreData
#UIApplicationMain
class AppDelegate: UIResponder, UIApplicationDelegate {
var window: UIWindow?
func applicationDidFinishLaunching(application: UIApplication) {
}
func applicationWillResignActive(application: UIApplication) {
// Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
// Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game.
}
func applicationDidEnterBackground(application: UIApplication) {
// Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
// If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
}
func applicationWillEnterForeground(application: UIApplication) {
// Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background.
}
func applicationDidBecomeActive(application: UIApplication) {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
}
func applicationWillTerminate(application: UIApplication) {
// Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
// Saves changes in the application's managed object context before the application terminates.
self.saveContext()
}
func saveContext () {
var error: NSError? = nil
let managedObjectContext = self.managedObjectContext
//if managedObjectContext != nil {
if managedObjectContext.hasChanges && !managedObjectContext.save(&error) {
// Replace this implementation with code to handle the error appropriately.
// abort() causes the application to generate a crash log and terminate. You should not use this function in a shipping application, although it may be useful during development.
//println("Unresolved error \(error), \(error.userInfo)")
abort()
// &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&}
}
}
// #pragma mark - Core Data stack
// Returns the managed object context for the application.
// If the context doesn't already exist, it is created and bound to the persistent store coordinator for the application.
var managedObjectContext: NSManagedObjectContext {
if !(_managedObjectContext != nil) {
let coordinator = self.persistentStoreCoordinator
//if coordinator != nil {
_managedObjectContext = NSManagedObjectContext()
_managedObjectContext!.persistentStoreCoordinator = coordinator
//&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&}
}
return _managedObjectContext!
}
var _managedObjectContext: NSManagedObjectContext? = nil
// Returns the managed object model for the application.
// If the model doesn't already exist, it is created from the application's model.
var managedObjectModel: NSManagedObjectModel {
if !(_managedObjectModel != nil) {
let modelURL = NSBundle.mainBundle().URLForResource("iOSSwiftSimpleAVCamera", withExtension: "momd")
_managedObjectModel = NSManagedObjectModel(contentsOfURL: modelURL!)
}
return _managedObjectModel!
}
var _managedObjectModel: NSManagedObjectModel? = nil
// Returns the persistent store coordinator for the application.
// If the coordinator doesn't already exist, it is created and the application's store added to it.
var persistentStoreCoordinator: NSPersistentStoreCoordinator {
if !(_persistentStoreCoordinator != nil) {
let storeURL = self.applicationDocumentsDirectory.URLByAppendingPathComponent("iOSSwiftSimpleAVCamera.sqlite")
var error: NSError? = nil
_persistentStoreCoordinator = NSPersistentStoreCoordinator(managedObjectModel: self.managedObjectModel)
if _persistentStoreCoordinator!.addPersistentStoreWithType(NSSQLiteStoreType, configuration: nil, URL: storeURL, options: nil, error: &error) == nil {
/*
Replace this implementation with code to handle the error appropriately.
abort() causes the application to generate a crash log and terminate. You should not use this function in a shipping application, although it may be useful during development.
Typical reasons for an error here include:
* The persistent store is not accessible;
* The schema for the persistent store is incompatible with current managed object model.
Check the error message to determine what the actual problem was.
If the persistent store is not accessible, there is typically something wrong with the file path. Often, a file URL is pointing into the application's resources directory instead of a writeable directory.
If you encounter schema incompatibility errors during development, you can reduce their frequency by:
* Simply deleting the existing store:
NSFileManager.defaultManager().removeItemAtURL(storeURL, error: nil)
* Performing automatic lightweight migration by passing the following dictionary as the options parameter:
[NSMigratePersistentStoresAutomaticallyOption: true, NSInferMappingModelAutomaticallyOption: true}
Lightweight migration will only work for a limited set of schema changes; consult "Core Data Model Versioning and Data Migration Programming Guide" for details.
*/
//println("Unresolved error \(error), \(error.userInfo)")
abort()
}
}
return _persistentStoreCoordinator!
}
var _persistentStoreCoordinator: NSPersistentStoreCoordinator? = nil
// #pragma mark - Application's Documents directory
// Returns the URL to the application's Documents directory.
var applicationDocumentsDirectory: NSURL {
let urls = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)
return urls[urls.endIndex-1] as! NSURL
}
}
CameraSessionController
//
// CameraSessionController.swift
// iOSSwiftSimpleAVCamera
//
// Created by Bradley Griffith on 7/1/14.
// Copyright (c) 2014 Bradley Griffith. All rights reserved.
//
import UIKit
import AVFoundation
import CoreMedia
import CoreImage
#objc protocol CameraSessionControllerDelegate {
optional func cameraSessionDidOutputSampleBuffer(sampleBuffer: CMSampleBuffer!)
}
class CameraSessionController: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
var session: AVCaptureSession!
var sessionQueue: dispatch_queue_t!
var videoDeviceInput: AVCaptureDeviceInput!
var videoDeviceOutput: AVCaptureVideoDataOutput!
var stillImageOutput: AVCaptureStillImageOutput!
var runtimeErrorHandlingObserver: AnyObject?
var sessionDelegate: CameraSessionControllerDelegate?
/* Class Methods
------------------------------------------*/
class func deviceWithMediaType(mediaType: NSString, position: AVCaptureDevicePosition) -> AVCaptureDevice {
var devices: NSArray = AVCaptureDevice.devicesWithMediaType(mediaType as String)
var captureDevice: AVCaptureDevice = devices.firstObject as! AVCaptureDevice
for object:AnyObject in devices {
let device = object as! AVCaptureDevice
if (device.position == position) {
captureDevice = device
break
}
}
return captureDevice
}
/* Lifecycle
------------------------------------------*/
override init() {
super.init();
self.session = AVCaptureSession()
self.authorizeCamera();
self.sessionQueue = dispatch_queue_create("CameraSessionController Session", DISPATCH_QUEUE_SERIAL)
dispatch_async(self.sessionQueue, {
self.session.beginConfiguration()
self.addVideoInput()
self.addVideoOutput()
self.addStillImageOutput()
self.session.commitConfiguration()
})
}
/* Instance Methods
------------------------------------------*/
func authorizeCamera() {
AVCaptureDevice.requestAccessForMediaType(AVMediaTypeVideo, completionHandler: {
(granted: Bool) -> Void in
// If permission hasn't been granted, notify the user.
if !granted {
dispatch_async(dispatch_get_main_queue(), {
UIAlertView(
title: "Could not use camera!",
message: "This application does not have permission to use camera. Please update your privacy settings.",
delegate: self,
cancelButtonTitle: "OK").show()
})
}
});
}
func addVideoInput() -> Bool {
var success: Bool = false
var error: NSError?
var videoDevice: AVCaptureDevice = CameraSessionController.deviceWithMediaType(AVMediaTypeVideo, position: AVCaptureDevicePosition.Back)
self.videoDeviceInput = AVCaptureDeviceInput.deviceInputWithDevice(videoDevice, error: &error) as! AVCaptureDeviceInput;
if !(error != nil) {
if self.session.canAddInput(self.videoDeviceInput) {
self.session.addInput(self.videoDeviceInput)
success = true
}
}
return success
}
func addVideoOutput() {
//&&&&&&&&&&&&&&&&&&&&&var rgbOutputSettings: NSDictionary = NSDictionary(object: Int(CInt(kCIFormatRGBA8)), forKey: kCVPixelBufferPixelFormatTypeKey)
self.videoDeviceOutput = AVCaptureVideoDataOutput()
self.videoDeviceOutput.alwaysDiscardsLateVideoFrames = true
self.videoDeviceOutput.setSampleBufferDelegate(self, queue: self.sessionQueue)
if self.session.canAddOutput(self.videoDeviceOutput) {
self.session.addOutput(self.videoDeviceOutput)
}
}
func addStillImageOutput() {
self.stillImageOutput = AVCaptureStillImageOutput()
self.stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if self.session.canAddOutput(self.stillImageOutput) {
self.session.addOutput(self.stillImageOutput)
}
}
func startCamera() {
dispatch_async(self.sessionQueue, {
var weakSelf: CameraSessionController? = self
self.runtimeErrorHandlingObserver = NSNotificationCenter.defaultCenter().addObserverForName(AVCaptureSessionRuntimeErrorNotification, object: self.sessionQueue, queue: nil, usingBlock: {
(note: NSNotification!) -> Void in
let strongSelf: CameraSessionController = weakSelf!
dispatch_async(strongSelf.sessionQueue, {
strongSelf.session.startRunning()
})
})
self.session.startRunning()
})
}
func teardownCamera() {
dispatch_async(self.sessionQueue, {
self.session.stopRunning()
NSNotificationCenter.defaultCenter().removeObserver(self.runtimeErrorHandlingObserver!)
})
}
func focusAndExposeAtPoint(point: CGPoint) {
dispatch_async(self.sessionQueue, {
var device: AVCaptureDevice = self.videoDeviceInput.device
var error: NSErrorPointer!
if device.lockForConfiguration(error) {
if device.focusPointOfInterestSupported && device.isFocusModeSupported(AVCaptureFocusMode.AutoFocus) {
device.focusPointOfInterest = point
device.focusMode = AVCaptureFocusMode.AutoFocus
}
if device.exposurePointOfInterestSupported && device.isExposureModeSupported(AVCaptureExposureMode.AutoExpose) {
device.exposurePointOfInterest = point
device.exposureMode = AVCaptureExposureMode.AutoExpose
}
device.unlockForConfiguration()
}
else {
// TODO: Log error.
}
})
}
func captureImage(completion:((image: UIImage?, error: NSError?) -> Void)?) {
if (completion != nil){
if(self.stillImageOutput != nil) {
return
}}
dispatch_async(self.sessionQueue, {
self.stillImageOutput.captureStillImageAsynchronouslyFromConnection(self.stillImageOutput.connectionWithMediaType(AVMediaTypeVideo), completionHandler: {
(imageDataSampleBuffer: CMSampleBuffer?, error: NSError?) -> Void in
if (imageDataSampleBuffer != nil)
{
if(error != nil)
{
completion!(image:nil, error:nil)
}
}
else if (imageDataSampleBuffer != nil) {
var imageData: NSData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer)
var image: UIImage = UIImage(data: imageData)!
completion!(image:image, error:nil)
}
})
})
}
/* AVCaptureVideoDataOutput Delegate
------------------------------------------*/
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
self.sessionDelegate?.cameraSessionDidOutputSampleBuffer?(sampleBuffer)
}
}
camera view controller
//
// CameraViewController.swift
// iOSSwiftSimpleAVCamera
//
// Created by Bradley Griffith on 7/1/14.
// Copyright (c) 2014 Bradley Griffith. All rights reserved.
//
import UIKit
import CoreMedia
import AVFoundation
class CameraViewController: UIViewController, CameraSessionControllerDelegate {
var cameraSessionController: CameraSessionController!
var previewLayer: AVCaptureVideoPreviewLayer!
/* Lifecycle
------------------------------------------*/
override func viewDidLoad() {
super.viewDidLoad()
self.cameraSessionController = CameraSessionController()
self.cameraSessionController.sessionDelegate = self
self.setupPreviewLayer()
}
override func viewWillAppear(animated: Bool) {
super.viewWillAppear(animated)
self.cameraSessionController.startCamera()
}
override func viewWillDisappear(animated: Bool) {
super.viewWillDisappear(animated)
self.cameraSessionController.teardownCamera()
}
/* Instance Methods
------------------------------------------*/
func setupPreviewLayer() {
var minSize = min(self.view.bounds.size.width, self.view.bounds.size.height)
var bounds: CGRect = CGRectMake(0.0, 0.0, minSize, minSize)
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.cameraSessionController.session)
self.previewLayer.bounds = bounds
self.previewLayer.position = CGPointMake(CGRectGetMidX(self.view.bounds), CGRectGetMidY(self.view.bounds))
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
self.view.layer.addSublayer(self.previewLayer)
}
func cameraSessionDidOutputSampleBuffer(sampleBuffer: CMSampleBuffer!) {
// Any frame processing could be done here.
}
}
If you want to capture the video of screen and save it, there is also an option to make a number of screenshots, and later to convert array of images to the video, not very efficient from performance standpoint though, you probably won't have 30-60 fps, but if you are ok w/ 5-20 fps you might want to take a look at this example for swift3.