iOS Swift 2 Record Video AVCaptureSession - ios

I created an AVCaptureSession and attached to it the front facing camera
do {
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice))
}catch{print("err")}
Now I want to start and stop recording on touche events. How do I do this?
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
print("touch")
//Start Recording
}
override func touchesEnded(touches: Set<UITouch>, withEvent event: UIEvent?) {
print("release");
//End Recording and Save
}

You didn't mention if you're using AVCaptureMovieFileOutput or AVCaptureVideoDataOutput as an output for your session. The former is well suited for recording a video quickly and without further coding the later is used for more advanced recording by getting chunks of CMSampleBuffer during the recording session.
For the scope of this answer I'll go with AVCaptureMovieFileOutput, here is some minimalist starting code:
import UIKit
import AVFoundation
import AssetsLibrary
class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {
var captureSession = AVCaptureSession()
lazy var frontCameraDevice: AVCaptureDevice? = {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) as! [AVCaptureDevice]
return devices.filter{$0.position == .Front}.first
}()
lazy var micDevice: AVCaptureDevice? = {
return AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
}()
var movieOutput = AVCaptureMovieFileOutput()
private var tempFilePath: NSURL = {
let tempPath = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("tempMovie").URLByAppendingPathExtension("mp4").absoluteString
if NSFileManager.defaultManager().fileExistsAtPath(tempPath) {
do {
try NSFileManager.defaultManager().removeItemAtPath(tempPath)
} catch { }
}
return NSURL(string: tempPath)!
}()
private var library = ALAssetsLibrary()
override func viewDidLoad() {
super.viewDidLoad()
//start session configuration
captureSession.beginConfiguration()
captureSession.sessionPreset = AVCaptureSessionPresetHigh
// add device inputs (front camera and mic)
captureSession.addInput(deviceInputFromDevice(frontCameraDevice))
captureSession.addInput(deviceInputFromDevice(micDevice))
// add output movieFileOutput
movieOutput.movieFragmentInterval = kCMTimeInvalid
captureSession.addOutput(movieOutput)
// start session
captureSession.commitConfiguration()
captureSession.startRunning()
}
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
print("touch")
// start capture
movieOutput.startRecordingToOutputFileURL(tempFilePath, recordingDelegate: self)
}
override func touchesEnded(touches: Set<UITouch>, withEvent event: UIEvent?) {
print("release")
//stop capture
movieOutput.stopRecording()
}
private func deviceInputFromDevice(device: AVCaptureDevice?) -> AVCaptureDeviceInput? {
guard let validDevice = device else { return nil }
do {
return try AVCaptureDeviceInput(device: validDevice)
} catch let outError {
print("Device setup error occured \(outError)")
return nil
}
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
if (error != nil)
{
print("Unable to save video to the iPhone \(error.localizedDescription)")
}
else
{
// save video to photo album
library.writeVideoAtPathToSavedPhotosAlbum(outputFileURL, completionBlock: { (assetURL: NSURL?, error: NSError?) -> Void in
if (error != nil) {
print("Unable to save video to the iPhone \(error!.localizedDescription)")
}
})
}
}
}
For more informations on Camera Capture refer to WWDC 2014 - Session 508

Related

View turns into black screen when i close rewarded video ad in iOS

I have successfully implemented AdMob rewarded video ad. I have implemented it in GameViewController class and calling it into gameScene using NotificationCenter method and it is working. When i close rewarded video after video completion, my GameScene turns into black screen. Here is my code of GameViewController:
import UIKit
import SpriteKit
import GoogleMobileAds
class GameViewController: UIViewController, GADBannerViewDelegate, GADRewardBasedVideoAdDelegate, UIAlertViewDelegate{
var rewardBasedVideo:GADRewardBasedVideoAd?
var adRequestInProgress = false
override func viewDidLoad() {
super.viewDidLoad()
// create our scene:
let menuScene = MenuScene
let skView = self.view as! SKView
skView.showsPhysics = true
skView.ignoresSiblingOrder = true
let size = CGSize(width:590, height:390)
menuScene.size = CGSize(width: size.width, height: size.height)
skView.presentScene(menuScene)
skView.translatesAutoresizingMaskIntoConstraints = false
NotificationCenter.default.addObserver(self, selector: #selector(self.videoReward), name:NSNotification.Name(rawValue:"showVideoRewardAd"), object: nil)
}
override var supportedInterfaceOrientations : UIInterfaceOrientationMask{
return UIInterfaceOrientationMask.landscape;
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Release any cached data, images, etc that aren't in use.
}
override var prefersStatusBarHidden : Bool {
return true
}
func videoReward(){
rewardBasedVideo = GADRewardBasedVideoAd.sharedInstance()
rewardBasedVideo?.delegate = self
rewardBasedVideo?.load(GADRequest(),
withAdUnitID: "ca-app-pub-3940256099942544/1712485313")
if rewardBasedVideo?.isReady == true {
rewardBasedVideo?.present(fromRootViewController: self)
} else {
UIAlertView(title: "Reward based video not ready",
message: "The reward based video didn't finish loading or failed to load",
delegate: self.view,
cancelButtonTitle: "ok").show()
}
}
func rewardBasedVideoAd(_ rewardBasedVideoAd: GADRewardBasedVideoAd,
didFailToLoadWithError error: Error) {
adRequestInProgress = false
print("Reward based video ad failed to load: \(error.localizedDescription)")
}
func rewardBasedVideoAdDidReceive(_ rewardBasedVideoAd: GADRewardBasedVideoAd) {
adRequestInProgress = false
rewardBasedVideo?.present(fromRootViewController: self)
print("Reward based video ad is received.")
}
}
My GameScene Code where i am calling rewarded video looks like this:
override func touchesBegan(_ touches: Set<UITouch>, with
event: UIEvent?) {
for touch in touches {
let location = touch.location(in: self)
let nodeTouched = atPoint(location)
if let gameSprite = nodeTouched as? GameSprite{
gameSprite.onTap()
}
if nodeTouched.name == "play" {
NotificationCenter.default.post(name: NSNotification.Name(rawValue:"showVideoRewardAd"), object:nil)
}
}
Now tell me, Where is my mistake?

playback recorded content in AVCapture

I am trying to playback the recorded session in full view after it is recorder.
Kind of like "snapchat".
I can record and play the videoplay back in a UIView, but it is shown with "play", "Done" "Stop" buttons. I don't want that. I want it to look like snapchat.
This is my code, where i found Here, but I modified a tiny little bit. :)
import UIKit
import AVFoundation
import AssetsLibrary
import Photos
import MediaPlayer
import AVKit
class Camera: UIViewController, AVCaptureFileOutputRecordingDelegate {
#IBOutlet var cameraView: UIView!
var previewLayer : AVCaptureVideoPreviewLayer?
var captureDevice:AVCaptureDevice!
var CamChoser = false
var moviePlayer: MPMoviePlayerController?
#IBOutlet weak var playback: UIView!
#IBOutlet weak var exitCameraModeButton: UIButton!
#IBAction func exitCameraModeButton(sender: AnyObject) {
self.dismissViewControllerAnimated(true, completion: nil)
}
var captureSession = AVCaptureSession()
lazy var cameraDevice: AVCaptureDevice? = {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) as! [AVCaptureDevice]
return devices.filter{$0.position == .Front}.first
}()
lazy var micDevice: AVCaptureDevice? = {
return AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
}()
var movieOutput = AVCaptureMovieFileOutput()
private var tempFilePath: NSURL = {
let tempPath = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("tempMovie").URLByAppendingPathExtension("mp4").absoluteString
if NSFileManager.defaultManager().fileExistsAtPath(tempPath) {
do {
try NSFileManager.defaultManager().removeItemAtPath(tempPath)
} catch { }
}
return NSURL(string: tempPath)!
}()
private var library = ALAssetsLibrary()
//private var library = PHPhotoLibrary()
#IBOutlet weak var switchCameraButton: UIButton!
#IBAction func switchCameraButton(sender: AnyObject) {
//startSession()
}
override func viewDidLoad() {
super.viewDidLoad()
//start session configuration
captureSession.beginConfiguration()
captureSession.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices()
startSession()
}
func startSession() {
// add device inputs (front camera and mic)
print(CamChoser)
captureSession.addInput(deviceInputFromDevice(cameraDevice))
captureSession.addInput(deviceInputFromDevice(micDevice))
// add output movieFileOutput
movieOutput.movieFragmentInterval = kCMTimeInvalid
captureSession.addOutput(movieOutput)
// start session
captureSession.commitConfiguration()
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.cameraView.layer.addSublayer(previewLayer!)
self.cameraView.bringSubviewToFront(self.exitCameraModeButton)
self.cameraView.bringSubviewToFront(self.switchCameraButton)
previewLayer?.frame = self.cameraView.layer.frame
captureSession.startRunning()
}
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
print("touch")
// start capture
movieOutput.startRecordingToOutputFileURL(tempFilePath, recordingDelegate: self)
}
override func touchesEnded(touches: Set<UITouch>, withEvent event: UIEvent?) {
print("release")
//stop capture
movieOutput.stopRecording()
let videoUrl = movieOutput.outputFileURL
moviePlayer = MPMoviePlayerController(contentURL: videoUrl)
moviePlayer!.movieSourceType = MPMovieSourceType.Unknown
moviePlayer!.view.frame = playback.bounds
moviePlayer!.scalingMode = MPMovieScalingMode.AspectFill
moviePlayer!.controlStyle = MPMovieControlStyle.Embedded
moviePlayer!.shouldAutoplay = true
playback.addSubview((moviePlayer?.view)!)
//moviePlayer!.prepareToPlay()
moviePlayer?.setFullscreen(true, animated: true)
moviePlayer!.play()
cameraView.bringSubviewToFront(playback)
}
private func deviceInputFromDevice(device: AVCaptureDevice?) -> AVCaptureDeviceInput? {
guard let validDevice = device else { return nil }
do {
return try AVCaptureDeviceInput(device: validDevice)
} catch let outError {
print("Device setup error occured \(outError)")
return nil
}
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
if (error != nil) {
print("Unable to save video to the iPhone \(error.localizedDescription)")
} else {
// save video to photo album
library.writeVideoAtPathToSavedPhotosAlbum(outputFileURL, completionBlock: { (assetURL: NSURL?, error: NSError?) -> Void in
if (error != nil) {
print("Unable to save video to the iPhone \(error!.localizedDescription)")
}
})
}
}
}
Just so you know, MPMoviePlayerController has been deprecated for iOS 9. The issue is your control style is set to embedded which by default, displays the control buttons. Use MPMovieControleStyle.None to remove the controls.
See MPMovieControlStyle documentation for more details.

Stopping the background music when starting a game

I have background music which starts when the app is launched in GameViewController.swift using the following code:
class GameViewController: UIViewController {
// VARIABLES
var backgroundMusicPlayer : AVAudioPlayer!
// AUDIO PLAYER
func playBackgroundMusic(filename: String) {
let url = NSBundle.mainBundle().URLForResource(filename, withExtension: nil)
var error : NSError? = nil
do {
backgroundMusicPlayer = try AVAudioPlayer(contentsOfURL: url!)
} catch let error1 as NSError {
error = error1
backgroundMusicPlayer = nil
}
if backgroundMusicPlayer == nil {
print("Could not create audio player: \(error!)")
return
}
backgroundMusicPlayer.numberOfLoops = -1
backgroundMusicPlayer.prepareToPlay()
backgroundMusicPlayer.play()
}
func stopBackgroundMusic() {
backgroundMusicPlayer.stop()
}
override func viewDidLoad() {
super.viewDidLoad()
playBackgroundMusic("MainTheme.mp3")
<< Various irrelevant code >>
}
Because this is run in the viewController, it persists through changing scenes on the menu (i.e. opening the "shop" scene) and creates a seamless track. When I click the "Play" button on the menu scene I want the music to then stop, and transition to the game. I have the stopBackgroundMusic() method in the GameViewController but I don't know how to call it on on the menu scene. IN THE MENU SCENE I tried this:
// TOUCH
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
let touch = touches.first as UITouch?
let touchLocation = touch!.locationInNode(self)
let touchedNode = self.nodeAtPoint(touchLocation)
if touchedNode.name == "startGame" {
GameViewController.stopBackgroundMusic()
let transitionType = SKTransition.fadeWithDuration(2)
let viewSize = self.view?.bounds.size
let scene = GameScene(size: viewSize!)
self.view?.presentScene(scene, transition: transitionType)
}
}
But I get an error saying I'm missing parameter #1 in call for stopBackgroundMusic() which shouldn't require any parameters. Am I calling this method wrong? Thanks!
You are referring to your class by using GameViewController but your function is at the object instance level.
If you declare the variable and function at the class level, your code in the touchesBegan function should work fine.
static var backgroundMusicPlayer : AVAudioPlayer!
class func playBackgroundMusic(filename: String) ...
class func stopBackgroundMusic()
override func viewDidLoad() {
super.viewDidLoad()
GameViewController.playBackgroundMusic("MainTheme.mp3")
<< Various irrelevant code >>
}

Accessing Camera in Swift 2 iOS

I am trying to access camera and take a picture in my iPad app, below is the code which works for swift 1.2 but is failing with 3 types of errors(highlighted below in BLOCK comments) in Swift 2. I have spent time looking for swift 2 way of writing it but haven't gotten anywhere. Any suggestion is highly appreciated.
import UIKit
import AVFoundation
class PictureController: UIViewController {
let captureSession = AVCaptureSession()
var previewLayer : AVCaptureVideoPreviewLayer?
// If we find a device we'll store it here for later use
var captureDevice : AVCaptureDevice?
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
captureSession.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices()
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the back camera
if(device.position == AVCaptureDevicePosition.Back) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
print("Capture device found")
beginSession()
}
}
}
}
}
func focusTo(value : Float) {
if let device = captureDevice {
if(device.lockForConfiguration(nil)) { // CALL CAN THROW BUT IS NOT MARKED WITH 'TRY' AND THE ERROR IS NOT HANDLED
// nil IS NOT COMPATIBLE WITH EXPECTED ARGUMENT TYPE '()'
device.setFocusModeLockedWithLensPosition(value, completionHandler: { (time) -> Void in
//
})
device.unlockForConfiguration()
}
}
}
let screenWidth = UIScreen.mainScreen().bounds.size.width
override func touchesBegan(touches: NSSet, withEvent event: UIEvent) { //METHOD DOES NOT OVERRIDE ANY METHOD FROM SUPERCLASS
let anyTouch = touches.anyObject() as! UITouch
let touchPercent = anyTouch.locationInView(self.view).x / screenWidth
focusTo(Float(touchPercent))
}
override func touchesMoved(touches: NSSet, withEvent event: UIEvent) { //METHOD DOES NOT OVERRIDE ANY METHOD FROM SUPERCLASS
let anyTouch = touches.anyObject() as! UITouch
let touchPercent = anyTouch.locationInView(self.view).x / screenWidth
focusTo(Float(touchPercent))
}
func configureDevice() {
if let device = captureDevice {
device.lockForConfiguration(nil) // CALL CAN THROW BUT IS NOT MARKED WITH 'TRY' AND THE ERROR IS NOT HANDLED
// nil IS NOT COMPATIBLE WITH EXPECTED ARGUMENT TYPE '()'
device.focusMode = .Locked
device.unlockForConfiguration()
}
}
func beginSession() {
configureDevice()
var err : NSError? = nil
captureSession.addInput(AVCaptureDeviceInput(device: captureDevice, error: &err))
// Cannot invoke initializer for type 'AVCaptureDeviceInput' with an argument list of type '(device: AVCaptureDevice?, error: inout NSError?)'
if err != nil {
print("error: \(err?.localizedDescription)")
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.view.layer.addSublayer(previewLayer)
previewLayer?.frame = self.view.layer.frame
captureSession.startRunning()
}
}
error 1: Call can throw, but it is not marked with 'try' and the error is not handled
Type '()' does not conform to protocol 'BooleanType'
error 2: Method does not override any method from its superclass
for touchesBegan and touchesMoved
error 3: Cannot invoke initializer for type 'AVCaptureDeviceInput' with an argument list of type '(device: AVCaptureDevice?, error: inout NSError?)'
UPDATE:
ALL OTHER ISSUES ARE FIXED (please ref comments if you are looking for solutions), EXCEPT FOR THE BELOW ONE.
func focusTo(value : Float) {
if let device = captureDevice {
if(device.lockForConfiguration()) { // THIS LINE IS THROWING BELOW MENTIONED ERROR
device.setFocusModeLockedWithLensPosition(value, completionHandler: { (time) -> Void in
})
device.unlockForConfiguration()
}
}
}
ERROR:
Type '()' does not conform to protocol 'BooleanType'
Call can throw, but it is not marked with 'try' and the error is not handled
do {
try device.lockForConfiguration()
device.setFocusModeLockedWithLensPosition(value, completionHandler: { (time) -> Void in
//
})
device.unlockForConfiguration()
} catch let error as NSError {
print(error.code)
}
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
}
I am posting a complete solution here for Swift2, which incorporates solutions from gkhanacer https://stackoverflow.com/a/36719940/2144994 and all the code examples in this question. This is a complete ViewController
import UIKit
import AVFoundation
class FirstViewController: UIViewController {
let captureSession = AVCaptureSession()
var previewLayer : AVCaptureVideoPreviewLayer?
// If we find a device we'll store it here for later use
var captureDevice : AVCaptureDevice?
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
captureSession.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices()
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the back camera
if(device.position == AVCaptureDevicePosition.Back) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
print("Capture device found")
beginSession()
}
}
}
}
}
let screenWidth = UIScreen.mainScreen().bounds.size.width
func configureDevice() {
if let device = captureDevice {
do {
try device.lockForConfiguration()
device.focusMode = .AutoFocus
device.unlockForConfiguration()
} catch let error as NSError {
print(error.code)
}
}
}
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
let screenSize = previewLayer!.bounds.size
let frameSize:CGSize = view.frame.size
if let touchPoint = touches.first {
let location:CGPoint = touchPoint.locationInView(self.view)
let x = location.x / frameSize.width
let y = 1.0 - (location.x / frameSize.width)
let focusPoint = CGPoint(x: x, y: y)
print("POINT : X: \(x), Y: \(y)")
let captureDevice = (AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) as! [AVCaptureDevice]).filter{$0.position == .Back}.first
if let device = captureDevice {
do {
try device.lockForConfiguration()
let support:Bool = device.focusPointOfInterestSupported
if support {
print("focusPointOfInterestSupported: \(support)")
device.focusPointOfInterest = focusPoint
// device.focusMode = .ContinuousAutoFocus
device.focusMode = .AutoFocus
// device.focusMode = .Locked
device.unlockForConfiguration()
print("Focus point was set successfully")
}
else{
print("focusPointOfInterestSupported is not supported: \(support)")
}
}
catch {
// just ignore
print("Focus point error")
}
}
}
}
override func touchesEnded(touches: Set<UITouch>, withEvent event: UIEvent?) {
if let touch = touches.first{
print("\(touch)")
}
super.touchesEnded(touches, withEvent: event)
}
override func touchesMoved(touches: Set<UITouch>, withEvent event: UIEvent?) {
if let touch = touches.first{
print("\(touch)")
}
super.touchesMoved(touches, withEvent: event)
}
func beginSession() {
configureDevice()
try! captureSession.addInput(AVCaptureDeviceInput(device: captureDevice))
// Cannot invoke initializer for type 'AVCaptureDeviceInput' with an argument list of type '(device: AVCaptureDevice?, error: inout NSError?)'
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.view.layer.addSublayer(previewLayer!)
previewLayer?.frame = self.view.layer.frame
captureSession.startRunning()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}

Remote Control event in iOS with Swift

Trying to figure out how to read the Apple headphone's volume buttons to use as a trigger for the camera shutter (as the Apple Camera app does).
From the documentation on Remote Control Events,
Remote Control Received With Event, and this git repo, I've pieced together that I'll probably need an AVAudioPlayer object, .beginReceivingRemoteControlEvents(), and remoteControlReceivedWithEvent, along with making this view canBecomeFirstResponder() return true.
import UIKit
import AVFoundation
class ViewController: UIViewController, AVAudioPlayerDelegate {
var player: AVAudioPlayer!
override func viewDidLoad() {
super.viewDidLoad()
var session: AVAudioSession = AVAudioSession.sharedInstance()
session.setActive(true, error: nil)
}
override func viewDidAppear(animated: Bool) {
super.viewDidAppear(animated)
println("viewDidAppear worked...")
self.becomeFirstResponder()
UIApplication.sharedApplication().beginReceivingRemoteControlEvents()
}
override func canBecomeFirstResponder() -> Bool {
return true
}
override func remoteControlReceivedWithEvent(event: UIEvent) {
let rc = event.subtype
println("does this work? \(rc.rawValue)")
//takePicture()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
}
I expected to get "does this work" when hitting the volume buttons on the headphones, instead I just see it adjust the headphone volume like normal. So I must be missing something, maybe with a delegate or AVSession?
I cross-posted this on r/swift, where I was told it probably requires playing audio (quoted straight from the documentation).
So while this isn't the ideal solution, it works for my own private use.
import UIKit
import AVFoundation
import MediaPlayer
class ViewController: UIViewController, AVAudioPlayerDelegate {
var testPlayer: AVAudioPlayer? = nil
func loadSound(filename: NSString) -> AVAudioPlayer {
let url = NSBundle.mainBundle().URLForResource(filename as String, withExtension: "caf")
var error: NSError? = nil
let player = AVAudioPlayer(contentsOfURL: url, error: &error)
if error != nil {
println("Error loading \(url): \(error?.localizedDescription)")
} else {
player.prepareToPlay()
}
return player
}
override func viewDidLoad() {
super.viewDidLoad()
self.testPlayer = self.loadSound("silence")
self.testPlayer?.numberOfLoops = -1
self.testPlayer?.play()
}
override func canBecomeFirstResponder() -> Bool {
return true
}
override func viewDidAppear(animated: Bool) {
super.viewDidAppear(animated)
self.becomeFirstResponder()
UIApplication.sharedApplication().beginReceivingRemoteControlEvents()
}
override func remoteControlReceivedWithEvent(event: UIEvent) {
let rc = event.subtype
println("rc.rawValue: \(rc.rawValue)")
// take photo
}
}
I noticed that in Apple's camera app, the +/- volume buttons trigger the camera, and the microphone button pauses/plays any audio running in another app, but in this implementation the volume buttons still control the volume (and any audio has been paused when the app is launched).
An rc.rawValue: 103 corresponds to a single click of the microphone button, a double click returns 104, and a triple click returns 105, and then sometimes bumping a couple at a time returns a 108 or 109.
Based on Cody's answer but updated for 2019 (Swift 5)
import UIKit
import AVFoundation
import MediaPlayer
class ViewController: UIViewController, AVAudioPlayerDelegate {
var myPlayer: AVAudioPlayer? = nil
func loadSound(filename: NSString) -> AVAudioPlayer? {
let url = Bundle.main.url(forResource: filename as String, withExtension: "mp3")
do {
let player = try AVAudioPlayer(contentsOf: url ?? URL(fileURLWithPath: ""))
player.prepareToPlay()
return player
}
catch {
print("Error : \(error)")
return nil
}
}
override func viewDidLoad() {
super.viewDidLoad()
guard let testPlayer = loadSound(filename: "silence") else {
print("Not able to load the sound")
return
}
testPlayer.delegate = self
testPlayer.volume = 0.8
testPlayer.numberOfLoops = -1
myPlayer = testPlayer
myPlayer?.play()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
self.becomeFirstResponder()
UIApplication.shared.beginReceivingRemoteControlEvents()
}
override func remoteControlReceived(with event: UIEvent?) {
let rc = event?.subtype
print("rc.rawValue: \(rc?.rawValue)")
// Do your thing
}
}

Resources