I am trying to change the camera of the publisher in OpenTok. In Android it is super easy, but I don't understand how to do it in objective c for ios.
I tried :
if (_publisher.cameraPosition == AVCaptureDevicePositionFront)
{
_publisher.cameraPosition = AVCaptureDevicePositionBack; // back camera
} else
{
_publisher.cameraPosition = AVCaptureDevicePositionFront; // front camera
}
I have to say that I am a beginner in objective c (and in OpenTok).
How should I do?
Thank you :)
Try this one:
func setCameraPosition(_ position: AVCaptureDevicePosition) -> Bool {
guard let preset = captureSession?.sessionPreset else {
return false
}
let newVideoInput: AVCaptureDeviceInput? = {
do {
if position == AVCaptureDevicePosition.back {
return try AVCaptureDeviceInput.init(device: backFacingCamera())
} else if position == AVCaptureDevicePosition.front {
return try AVCaptureDeviceInput.init(device: frontFacingCamera())
} else {
return nil
}
} catch {
return nil
}
}()
guard let newInput = newVideoInput else {
return false
}
var success = true
captureQueue.sync {
captureSession?.beginConfiguration()
captureSession?.removeInput(videoInput)
if captureSession?.canAddInput(newInput) ?? false {
captureSession?.addInput(newInput)
videoInput = newInput
} else {
success = false
captureSession?.addInput(videoInput)
}
captureSession?.commitConfiguration()
}
if success {
capturePreset = preset
}
return success
}
func toggleCameraPosition() -> Bool {
guard hasMultipleCameras else {
return false
}
if videoInput?.device.position == .front {
return setCameraPosition(.back)
} else {
return setCameraPosition(.front)
}
}
For Swift 4
if(publisher.cameraPosition == .back)
{
publisher.cameraPosition = .front
}else{
publisher.cameraPosition = .back
}
I haven't checked for Objective C but it will be same like
We have a function that when it finishes another function should be called in it's completion block but whatever is inside the completion block is never called. Here is the function:
func appendAllData (completion: () -> Void) {
guard let movieDetails = self.movieDetailsData else {
// handle nil case
return;
}
if let posterImage = self.movieDetailsData?.poster {
self.posterArray.append(posterImage)
}
if let overview = self.movieDetailsData?.overview {
self.overviewArray.append(overview)
}
if let releaseDate = self.movieDetailsData?.releaseData {
self.releaseInfoArray.append(releaseDate)
}
if let runtime = self.movieDetailsData?.runtime {
self.releaseInfoArray.append(String(describing: runtime))
}
if let genre = self.movieDetailsData?.genre {
if !genre.isEmpty {
self.releaseInfoArray.append(genre[0].name)
}
}
if let budget = self.movieDetailsData?.budget {
self.boxOfficeArray.append(budget)
}
if let revenue = self.movieDetailsData?.revenue {
self.boxOfficeArray.append(revenue)
}
if let homepage = self.movieDetailsData?.homepage {
self.homePageArray.append(homepage)
}
if let images = self.movieDetailsData?.images {
self.imageArray += images.backdropImages.map{ $0.filePath }
}
}
Here is how it's used:
self.appendAllData(completion: { _ in
//Nothing inside here gets called
DispatchQueue.main.async {
print(self.movieDetailsData?.poster )
if let bgImage = self.movieDetailsData?.poster {
self.backgroundImage.sd_setImage(with: URL(string:"\(baseImageURL)\(bgImage)"))
print("background pic loaded")
self.backgroundImage.addBlurEffect()
}
self.detailTableView.reloadData()
}
})
Nothing inside the completion block is called, any idea how to fix this?
I believe you need to call the completion() at the end for it execute your completion code.
func appendAllData (completion: () -> Void) {
guard let movieDetails = self.movieDetailsData else {
// handle nil case
return;
}
if let posterImage = self.movieDetailsData?.poster {
self.posterArray.append(posterImage)
}
if let overview = self.movieDetailsData?.overview {
self.overviewArray.append(overview)
}
if let releaseDate = self.movieDetailsData?.releaseData {
self.releaseInfoArray.append(releaseDate)
}
if let runtime = self.movieDetailsData?.runtime {
self.releaseInfoArray.append(String(describing: runtime))
}
if let genre = self.movieDetailsData?.genre {
if !genre.isEmpty {
self.releaseInfoArray.append(genre[0].name)
}
}
if let budget = self.movieDetailsData?.budget {
self.boxOfficeArray.append(budget)
}
if let revenue = self.movieDetailsData?.revenue {
self.boxOfficeArray.append(revenue)
}
if let homepage = self.movieDetailsData?.homepage {
self.homePageArray.append(homepage)
}
if let images = self.movieDetailsData?.images {
self.imageArray += images.backdropImages.map{ $0.filePath }
}
completion()
}
I'm using Swift 3 with GPUImage. I have a LUT image file named: lut_lookup.png I have successfully used this with GPUImage on a still image and it applies and displays the result using the LUT filter.
I'm now trying to apply the same LUT filter to the live camera view, but I cannot seem to get it to work. It essentially doesn't even display the camera.
I've supplied my code below, any help would be greatly appreciated. It's probably something simple I've missed/done wrong, but I can't seem to spot it.
import UIKit
import GPUImage
class LiveCameraVC: UIViewController
{
// MARK: - Variables
let videoCamera: GPUImageVideoCamera? = {
if let videoCamera = GPUImageVideoCamera(sessionPreset: AVCaptureSessionPreset640x480, cameraPosition: .back) {
videoCamera.outputImageOrientation = .portrait
videoCamera.horizontallyMirrorFrontFacingCamera = false
videoCamera.horizontallyMirrorRearFacingCamera = false
return videoCamera
} else {
print("GPUImageVideoCamera Nil")
return nil
}
}()
let filter: GPUImageLookupFilter = {
let filter = GPUImageLookupFilter()
filter.intensity = 1.0
return filter
}()
// MARK: - UI
let modifiedImageView: GPUImageView = {
let imageView = GPUImageView.newAutoLayout()
imageView.fillMode = kGPUImageFillModePreserveAspectRatioAndFill;
imageView.isUserInteractionEnabled = true
return imageView
}()
// MARK: - Life Cycle
override func viewDidLoad() {
super.viewDidLoad()
let tapGesture:UITapGestureRecognizer = UITapGestureRecognizer().bk_init { [unowned self] (sender:UIGestureRecognizer?, state:UIGestureRecognizerState, location:CGPoint) in
print("Tapped")
self.performLut()
} as! UITapGestureRecognizer
modifiedImageView.addGestureRecognizer(tapGesture)
}
override func loadView() {
super.loadView()
self.view.addSubview(modifiedImageView)
modifiedImageView.autoPinEdgesToSuperviewEdges()
}
// MARK: - Private
private func performLut() {
print("performLut")
if let videoCamera = videoCamera {
if let lookupImageSource = GPUImagePicture(image: UIImage(named:"lut_lookup")) {
videoCamera.addTarget(filter, atTextureLocation: 0)
lookupImageSource.addTarget(filter, atTextureLocation: 1)
filter.addTarget(modifiedImageView)
videoCamera.startCapture()
} else {
print("videoCamera Nil")
}
} else {
print("GPUImageVideoCamera Nil")
}
}
}
Thanks
Solved it. I changed the code to this:
let imagePicture: GPUImagePicture? = {
if let imagePicture = GPUImagePicture(image: UIImage(named:"lut_lookup")) {
return imagePicture
} else {
print("GPUImagePicture Nil")
return nil
}
}()
private func performLut() {
if let videoCamera = videoCamera {
if let imagePicture = imagePicture {
videoCamera.addTarget(filter)
imagePicture.addTarget(filter)
imagePicture.processImage()
filter.addTarget(modifiedImageView)
videoCamera.startCapture()
}
}
}
Hopefully that helps someone!
I need to run a function when the contacts have changed. If the application is active, you can do this with NotificationCenter as narrated in this post (sometimes It works when I add a new number to an existing contact). How do I know that the contact (or contacts) have been changed after the launch of the application?
I made the following functions for my task
#objc private func matchingContacts() {
if isSuccessContactUploading {
contactManager.matchingContacts(notMatch: { [weak self] in
guard let _self = self else { return }
debugPrint("matchingContacts != equals")
_self.isSuccessContactUploading = false
_self.syncContacts()
})
}
}
These functions are in ContactManager
func matchingContacts(notMatch: (() -> Void)?) {
getContacts { (contacts, error) in
if error == nil {
debugPrint("contacts count", contacts.count)
self.getContactsDictionaryFromCache(contacts, notMatch: {
notMatch?()
})
}
}
}
private func getContactsDictionaryFromCache(_ contacts: [CNContact], notMatch: (() -> Void)?) {
var isMatching = true
for contact in contacts {
let key = contact.identifier
do {
let cache = try Cache<NSDictionary>(name: "Contacts")
if let contactDictionary = cache[key] {
if !contactDictionary.isEqual(to: contact.dictionary) {
debugPrint("contactDictionary not matching")
isMatching = false
}
} else {
debugPrint("contactDictionary isn't here")
isMatching = false
}
} catch {
debugPrint(error.localizedDescription)
isMatching = false
}
}
if !isMatching {
notMatch?()
}
cacheContacts(contacts)
}
private func cacheContacts(_ contacts: [CNContact]) {
for contact in contacts {
let contactDictionary = contact.dictionary as NSDictionary
let key = contact.identifier
do {
let cache = try Cache<NSDictionary>(name: "Contacts")
cache[key] = contactDictionary
} catch {
debugPrint(error.localizedDescription)
}
}
}
So, I have the following classes:
audio recorder (using AudioToolbox and CoreAudio) that records audio.
audio player (using AVFoundation)
The recorder captures audio, sends it to a server who then replies with another audio, then the player plays the received audio.
When I tried to call the recorder again to record audio, its doesn't records the audio properly.
Any idea on how I can reset the recorder so that it can properly recorder after the audio player finishes playing?
I tried initialize the recorder again (right before recording) but that doesnt work.
the line that affects the recorder is the 2 lines below, which unfortunate are needed in order to play audio with AVFoundation.
let sharedSession = AVAudioSession.sharedInstance()
try sharedSession.setCategory(AVAudioSessionCategoryPlayback)
Flow of execution
Recorder Audio
Send and Received audio to Server
Play Audio from server Recorder Audio at this point the recorder Does NOT record properly
Send and Received audio to Server FAILS
Play Audio FAILS
Thank you in advance!
Lu
Link to recorder project
AudioRecorder:
import UIKit
import CoreAudio
import AudioToolbox
class SpeechRecorder: NSObject {
static let sharedInstance = SpeechRecorder()
// MARK:- properties
#objc enum Status: Int {
case ready
case busy
case error
}
internal struct RecordState {
var format: AudioStreamBasicDescription
var queue: UnsafeMutablePointer<AudioQueueRef?>
var buffers: [AudioQueueBufferRef?]
var file: AudioFileID?
var currentPacket: Int64
var recording: Bool
};
private var _recordState: RecordState?
private var _audioURL:URL?
var format: AudioFormatID {
get { return _recordState!.format.mFormatID }
set { _recordState!.format.mFormatID = newValue }
}
var sampleRate: Float64 {
get { return _recordState!.format.mSampleRate }
set { _recordState!.format.mSampleRate = newValue }
}
var formatFlags: AudioFormatFlags {
get { return _recordState!.format.mFormatFlags }
set { _recordState!.format.mFormatFlags = newValue }
}
var channelsPerFrame: UInt32 {
get { return _recordState!.format.mChannelsPerFrame }
set { _recordState!.format.mChannelsPerFrame = newValue }
}
var bitsPerChannel: UInt32 {
get { return _recordState!.format.mBitsPerChannel }
set { _recordState!.format.mBitsPerChannel = newValue }
}
var framesPerPacket: UInt32 {
get { return _recordState!.format.mFramesPerPacket }
set { _recordState!.format.mFramesPerPacket = newValue }
}
var bytesPerFrame: UInt32 {
get { return _recordState!.format.mBytesPerFrame }
set { _recordState!.format.mBytesPerFrame = newValue }
}
var bytesPerPacket: UInt32 {
get { return _recordState!.format.mBytesPerPacket }
set { _recordState!.format.mBytesPerPacket = newValue }
}
//MARK: - Handlers
public var handler: ((_ status:Status, _ data:NSData?, _ errorDesc:String?) -> Void)?
// MARK:- Init
override init()
{
super.init()
self._recordState = RecordState(format: AudioStreamBasicDescription(),
queue: UnsafeMutablePointer<AudioQueueRef?>.allocate(capacity: 1),
buffers: [AudioQueueBufferRef?](repeating: nil, count: 1),
file: nil,
currentPacket: 0,
recording: false)
}//eom
// MARK:- OutputFile
private func getDocumentsPath()->URL
{
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentsDirectory = paths[0]
return documentsDirectory
}
func setOutputFileNameWithDocumentsDirectory(nameDesired:String)
{
_audioURL = getDocumentsPath().appendingPathComponent(nameDesired)
setOutputFile(url: _audioURL!)
}//eom
func setOutputFileNameWithTempDirectory(nameDesired:String)
{
let tempDir = NSTemporaryDirectory()
let tempURLdir = URL(fileURLWithPath: tempDir)
_audioURL = tempURLdir.appendingPathComponent(nameDesired)
setOutputFile(url: _audioURL!)
}//eom
private func setOutputFile(path: String)
{
setOutputFile(url: URL(fileURLWithPath: path))
}//eom
private func setOutputFile(url: URL)
{
AudioFileCreateWithURL(url as CFURL,
kAudioFileWAVEType,
&_recordState!.format,
AudioFileFlags.dontPageAlignAudioData.union(.eraseFile),
&_recordState!.file)
}
// MARK:- Start / Stop Recording
func start()
{
handler?(.busy, nil, nil)
self._recordState?.currentPacket = 0
let inputAudioQueue: AudioQueueInputCallback =
{ (userData: UnsafeMutableRawPointer?,
audioQueue: AudioQueueRef,
bufferQueue: AudioQueueBufferRef,
startTime: UnsafePointer<AudioTimeStamp>,
packets: UInt32,
packetDescription: UnsafePointer<AudioStreamPacketDescription>?) in
let internalRSP = unsafeBitCast(userData, to: UnsafeMutablePointer<RecordState>.self)
if packets > 0
{
var packetsReceived = packets
let outputStream:OSStatus = AudioFileWritePackets(internalRSP.pointee.file!,
false,
bufferQueue.pointee.mAudioDataByteSize,
packetDescription,
internalRSP.pointee.currentPacket,
&packetsReceived,
bufferQueue.pointee.mAudioData)
if outputStream != 0
{
if verbose
{
print("Error with AudioFileWritePackets")
//<----DEBUG
switch outputStream
{
case kAudioFilePermissionsError:
print("kAudioFilePermissionsError")
break
case kAudioFileNotOptimizedError:
print("kAudioFileNotOptimizedError")
break
case kAudioFileInvalidChunkError:
print("kAudioFileInvalidChunkError")
break
case kAudioFileDoesNotAllow64BitDataSizeError:
print("kAudioFileDoesNotAllow64BitDataSizeError")
break
case kAudioFileInvalidPacketOffsetError:
print("kAudioFileInvalidPacketOffsetError")
break
case kAudioFileInvalidFileError:
print("kAudioFileInvalidFileError")
break
case kAudioFileOperationNotSupportedError:
print("kAudioFileOperationNotSupportedError")
break
case kAudioFileNotOpenError:
print("kAudioFileNotOpenError")
break
case kAudioFileEndOfFileError:
print("kAudioFileEndOfFileError")
break
case kAudioFilePositionError:
print("kAudioFilePositionError")
break
case kAudioFileFileNotFoundError:
print("kAudioFileFileNotFoundError")
break
case kAudioFileUnspecifiedError:
print("kAudioFileUnspecifiedError")
break
case kAudioFileUnsupportedFileTypeError:
print("kAudioFileUnsupportedFileTypeError")
break
case kAudioFileUnsupportedDataFormatError:
print("kAudioFileUnsupportedDataFormatError")
break
case kAudioFileUnsupportedPropertyError:
print("kAudioFileUnsupportedPropertyError")
break
case kAudioFileBadPropertySizeError:
print("kAudioFileBadPropertySizeError")
break
default:
print("unknown error")
break
}
//<----DEBUG
}
}
internalRSP.pointee.currentPacket += Int64(packetsReceived)
}
if internalRSP.pointee.recording
{
let outputStream:OSStatus = AudioQueueEnqueueBuffer(audioQueue, bufferQueue, 0, nil)
if outputStream != 0
{
if verbose
{
print("Error with AudioQueueEnqueueBuffer")
//<----DEBUG
switch outputStream
{
case kAudioFilePermissionsError:
print("kAudioFilePermissionsError")
break
case kAudioFileNotOptimizedError:
print("kAudioFileNotOptimizedError")
break
case kAudioFileInvalidChunkError:
print("kAudioFileInvalidChunkError")
break
case kAudioFileDoesNotAllow64BitDataSizeError:
print("kAudioFileDoesNotAllow64BitDataSizeError")
break
case kAudioFileInvalidPacketOffsetError:
print("kAudioFileInvalidPacketOffsetError")
break
case kAudioFileInvalidFileError:
print("kAudioFileInvalidFileError")
break
case kAudioFileOperationNotSupportedError:
print("kAudioFileOperationNotSupportedError")
break
case kAudioFileNotOpenError:
print("kAudioFileNotOpenError")
break
case kAudioFileEndOfFileError:
print("kAudioFileEndOfFileError")
break
case kAudioFilePositionError:
print("kAudioFilePositionError")
break
case kAudioFileFileNotFoundError:
print("kAudioFileFileNotFoundError")
break
case kAudioFileUnspecifiedError:
print("kAudioFileUnspecifiedError")
break
case kAudioFileUnsupportedFileTypeError:
print("kAudioFileUnsupportedFileTypeError")
break
case kAudioFileUnsupportedDataFormatError:
print("kAudioFileUnsupportedDataFormatError")
break
case kAudioFileUnsupportedPropertyError:
print("kAudioFileUnsupportedPropertyError")
break
case kAudioFileBadPropertySizeError:
print("kAudioFileBadPropertySizeError")
break
default:
print("unknown error")
break
//<----DEBUG
}
}
}
}
}
let queueResults = AudioQueueNewInput(&_recordState!.format, inputAudioQueue, &_recordState, nil, nil, 0, _recordState!.queue)
if queueResults == 0
{
let bufferByteSize: Int = calculate(format: _recordState!.format, seconds: 0.5)
for index in (0..<_recordState!.buffers.count)
{
AudioQueueAllocateBuffer(_recordState!.queue.pointee!, UInt32(bufferByteSize), &_recordState!.buffers[index])
AudioQueueEnqueueBuffer(_recordState!.queue.pointee!, _recordState!.buffers[index]!, 0, nil)
}
AudioQueueStart(_recordState!.queue.pointee!, nil)
_recordState?.recording = true
}
else
{
handler?(.error, nil, "Error setting audio input.")
}
}//eom
func stop()
{
_recordState?.recording = false
if let recordingState: RecordState = _recordState
{
AudioQueueStop(recordingState.queue.pointee!, true)
AudioQueueDispose(recordingState.queue.pointee!, true)
AudioFileClose(recordingState.file!)
let audioData:NSData? = NSData(contentsOf: _audioURL!)
handler?(.ready, audioData, nil)
}
}//eom
// MARK:- Helper methods
func calculate(format: AudioStreamBasicDescription, seconds: Double) -> Int
{
let framesRequiredForBufferTime = Int(ceil(seconds * format.mSampleRate))
if framesRequiredForBufferTime > 0
{
return (framesRequiredForBufferTime * Int(format.mBytesPerFrame))
}
else
{
var maximumPacketSize = UInt32(0)
if format.mBytesPerPacket > 0
{
maximumPacketSize = format.mBytesPerPacket
}
else
{
audioQueueProperty(propertyId: kAudioQueueProperty_MaximumOutputPacketSize, value: &maximumPacketSize)
}
var packets = 0
if format.mFramesPerPacket > 0
{
packets = (framesRequiredForBufferTime / Int(format.mFramesPerPacket))
} else
{
packets = framesRequiredForBufferTime
}
if packets == 0
{
packets = 1
}
return (packets * Int(maximumPacketSize))
}
}//eom
func audioQueueProperty<T>(propertyId: AudioQueuePropertyID, value: inout T)
{
let propertySize = UnsafeMutablePointer<UInt32>.allocate(capacity: 1)
propertySize.pointee = UInt32(MemoryLayout<T>.size)
let queueResults = AudioQueueGetProperty(_recordState!.queue.pointee!, propertyId, &value, propertySize)
propertySize.deallocate(capacity: 1)
if queueResults != 0 {
handler?(.error, nil, "Unable to get audio queue property.")
}
}//eom
}
Player:
import UIKit
import AVFoundation
protocol AudioPlayerDelegate {
func audioPlayer_playbackError(playerItemID:String, error:String)
func audioPlayer_playbackSuccess(playerItemID:String)
}
class AudioPlayer: NSObject, AVAudioPlayerDelegate
{
//properties
private var _audioPlayer:AVAudioPlayer?
var delegate:AudioPlayerDelegate?
var playerItemID:String = ""
var volume:Float?
//MARK: - Play Audio
func playAudioFromData(_ playerItemID:String, dataToPlay:Data)
{
do {
let sharedSession = AVAudioSession.sharedInstance()
try sharedSession.setCategory(AVAudioSessionCategoryPlayback)
try sharedSession.setActive(true)
_audioPlayer = try AVAudioPlayer(data: dataToPlay)
_audioPlayer?.numberOfLoops = 0
_audioPlayer?.isMeteringEnabled = true
_audioPlayer?.delegate = self
//volume
if volume != nil {
_audioPlayer?.volume = volume!
}
//id
self.playerItemID = playerItemID
_audioPlayer?.play()
}
catch let error {
self.delegate?.audioPlayer_playbackError(playerItemID: self.playerItemID, error: error.localizedDescription)
}
}//eom
func playAudioFromUrl(_ url:URL)
{
do {
let sharedSession = AVAudioSession.sharedInstance()
try sharedSession.setCategory(AVAudioSessionCategoryPlayback)
try sharedSession.setActive(true)
if FileManager.default.fileExists(atPath: url.path) {
_audioPlayer = try AVAudioPlayer(contentsOf: url)
_audioPlayer?.numberOfLoops = 0
_audioPlayer?.isMeteringEnabled = true
_audioPlayer?.delegate = self
//volume
if volume != nil {
_audioPlayer?.volume = volume!
}
//id
self.playerItemID = url.absoluteString
_audioPlayer?.play()
}
else {
self.delegate?.audioPlayer_playbackError(playerItemID: self.playerItemID, error: "audio file does not exist")
}
}
catch let error {
self.delegate?.audioPlayer_playbackError(playerItemID: self.playerItemID, error: error.localizedDescription)
}
}//eom
//MARK: - Player Options
func pausePlay()
{
_audioPlayer?.pause()
}//eom
func stopPlay()
{
_audioPlayer?.stop()
do {
let sharedSession = AVAudioSession.sharedInstance()
try sharedSession.setActive(false)
}
catch let error {
if verbose { print("un-able to set session to inactive, error: \(error)") }
}
}//eom
//MARK: - Delegates
func audioPlayerDecodeErrorDidOccur(_ player: AVAudioPlayer, error: Error?) {
//inactive session
do {
let sharedSession = AVAudioSession.sharedInstance()
try sharedSession.setActive(false)
}
catch let error {
if verbose { print("un-able to set session to inactive, error: \(error)") }
}
//report status
if error != nil {
self.delegate?.audioPlayer_playbackError(playerItemID: self.playerItemID, error: error!.localizedDescription)
}
else {
self.delegate?.audioPlayer_playbackError(playerItemID: self.playerItemID, error: "decode error did occurred")
}
//reset
self._audioPlayer?.delegate = nil
self._audioPlayer = nil
self.playerItemID = ""
}//eom
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
//inactive session
do {
let sharedSession = AVAudioSession.sharedInstance()
try sharedSession.setActive(false)
}
catch let error {
if verbose { print("un-able to set session to inactive, error: \(error)") }
}
//report status
if flag {
delegate?.audioPlayer_playbackSuccess(playerItemID: self.playerItemID)
}
else {
delegate?.audioPlayer_playbackError(playerItemID: self.playerItemID, error: "player finish playing with error")
}
//reset
self._audioPlayer?.delegate = nil
self._audioPlayer = nil
self.playerItemID = ""
}//eom
}//eoc
If you are going to be working with both AudioToolBox and AVFoundation, you may want to be careful with the AudioSession. AVFoundation does a lot of updates to the AudioSession on the backend.
A quick fix for your Player would be to removed any audio session calls like the below:
let sharedSession = AVAudioSession.sharedInstance()
try sharedSession.setCategory(AVAudioSessionCategoryPlayback)
try sharedSession.setActive(true)
_audioPlayer?.numberOfLoops = 0
_audioPlayer?.isMeteringEnabled = true
For more advance audio manipulation, Check out the book Learning Core Audio by Chris Adamson, Kevin Avila