How can I get AvPlayer to work like AvAudioPlayer. What I mean is, I cannot seem to get duration to work at all. Here is what I need to convert to AvPlayer:
import UIKit
import AVKit
import AVFoundation
class ModalViewController: UIViewController {
var audioPlayer = AVAudioPlayer()
let varSend = VarSend.sharedInstance
var timer:NSTimer!
var toggleState = 2
#IBOutlet weak var slider: UISlider!
#IBOutlet weak var sermonImage: UIImageView!
#IBOutlet weak var sermont: UILabel!
#IBOutlet weak var sermond: UILabel!
#IBOutlet weak var sermonE: UILabel!
#IBOutlet weak var sermonL: UILabel!
#IBOutlet weak var play: UIButton!
#IBOutlet var layer: UIView!
override func viewDidLoad() {
super.viewDidLoad()
let url = varSend.url
print("Setting up.")
do {
let data1 = NSData(contentsOfURL: NSURL(string:url)!)
audioPlayer = try AVAudioPlayer(data: data1!)
audioPlayer.prepareToPlay()
audioPlayer.volume = 1.0
audioPlayer.play()
} catch {
print("Error getting the audio file")
}
slider.maximumValue = Float(audioPlayer.duration)
timer = NSTimer.scheduledTimerWithTimeInterval(0.1, target: self, selector: Selector("updateSlider"), userInfo: nil, repeats: true)
slider.setThumbImage(UIImage(named: "circle"), forState: .Normal)
slider.setThumbImage(UIImage(named: "circle"), forState: .Highlighted)
let title = varSend.sermonName
self.sermont.text = title
let date = varSend.sermonDate
self.sermond.text = date
let image = varSend.sermonPic
ImageLoader.sharedLoader.imageForUrl(image, completionHandler:{(image: UIImage?, url: String) in
self.sermonImage.image = image!
})
}
#IBAction func ChangeAudioTime(sender: AnyObject) {
audioPlayer.stop()
audioPlayer.currentTime = NSTimeInterval(slider.value)
audioPlayer.prepareToPlay()
audioPlayer.volume = 1.0
audioPlayer.play()
}
func updateSlider() {
slider.value = Float(audioPlayer.currentTime)
let currentTime = Int(audioPlayer.currentTime)
let minutes = currentTime / 60
let seconds = currentTime - minutes * 60
let con = Int(audioPlayer.duration)
let currentItem = con - currentTime
let minutesU = Int(currentItem / 60)
let secondsU = Int(currentItem % 60)
sermonE.text = NSString(format: "%02d:%02d", minutes,seconds) as String
let timeLeft = NSString(format: "%02d:%02d", minutesU,secondsU) as String
sermonL.text = "-\(timeLeft)"
if currentItem == 1 {
//audioPlayer.pause()
toggleState = 1
print(toggleState)
play.setImage(UIImage(named:"play.png"),forState:UIControlState.Normal)
}
}
#IBAction func playPauseButton(sender: AnyObject) {
let playBtn = sender as! UIButton
if toggleState == 1 {
audioPlayer.play()
toggleState = 2
playBtn.setImage(UIImage(named:"pause.png"),forState:UIControlState.Normal)
} else {
audioPlayer.pause()
toggleState = 1
playBtn.setImage(UIImage(named:"play.png"),forState:UIControlState.Normal)
}
}
#IBAction func play(sender: AnyObject) {
audioPlayer.play()
}
#IBAction func pause(sender: AnyObject) {
audioPlayer.pause()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
#IBAction func close(sender: AnyObject) {
self.dismissViewControllerAnimated(true, completion: nil)
audioPlayer.stop()
}
}
And here is what I have tried already:
import UIKit
import Alamofire
import SwiftyJSON
import AVKit
import AVFoundation
import CoreMedia
class test: UIViewController {
var audioPlayer:AVPlayer!
var playerItem:AVPlayerItem!
var timer:NSTimer!
#IBOutlet weak var sermonE: UILabel!
#IBOutlet weak var sermonL: UILabel!
#IBOutlet weak var slider: UISlider!
override func viewDidLoad() {
super.viewDidLoad()
//let playerItem:AVPlayerItem!;
let playerURL = "example.com"
let steamingURL:NSURL = NSURL(string:playerURL)!
audioPlayer = AVPlayer(URL: steamingURL)
timer = NSTimer.scheduledTimerWithTimeInterval(0.1, target: self, selector: Selector("updateSlider"), userInfo: nil, repeats: true)
}
func updateSlider() {
let item = audioPlayer?.currentItem
let durationInSeconds = CMTimeGetSeconds(item!.duration)
print(durationInSeconds)
}
#IBAction func ChangeAudioTime(sender: AnyObject) {
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
#IBAction func Play(sender: AnyObject) {
audioPlayer.play()
}
}
I have been searching for days and Apple's docs are very hard to make out.
I even tried
self.player.currentItem.asset.duration
from: How to get Duration from AVPlayer (Not AVAudioPlayer)?
Any help would be much appreciated.
Swift 2.0
loadedTimeRanges The array contains NSValue objects containing a CMTimeRange value
indicating the times ranges for which the player item has media data
readily available. The time ranges returned may be discontinuous.
So I call myplayer.getCurrentTrackDuration every 1 second and noticed that when I'm streaming I got the correct final duration after 3-4 second.
extension AVPlayer {
//run this every 1 second of streaming (or use KVO)
//In Http stream the duration it going to increase and probably finallize near to 7% of the total duration of the song
func getCurrentTrackDuration () -> Float64 {
guard let currentItem = self.currentItem else { return 0.0 }
guard currentItem.loadedTimeRanges.count > 0 else { return 0.0 }
let timeInSecond = CMTimeGetSeconds((currentItem.loadedTimeRanges[0].CMTimeRangeValue).duration);
return timeInSecond >= 0.0 ? timeInSecond : 0.0
}
}
Related
So I have this big file with more than 600 words. I add them in my array and now I need to randomly show them on the Label. Every time a button is pressed the new random word has to be shown until the timer is ended. I've searched multiple sources for this question but couldn't apply them, because I'm new to this
import UIKit
class ViewController: UIViewController {
var wordArray: [String] = []
var i: Int = 0
var timer = Timer()
var totalSecond = 5
func startTimer() {
timer = Timer.scheduledTimer(timeInterval: 1, target: self, selector: #selector(updateTime), userInfo: nil, repeats: true)
}
#objc func updateTime() {
if totalSecond != 0 {
totalSecond -= 1
timerLabel.text = "\(totalSecond) seconds left"
} else {
endTimer()
}
}
func endTimer() {
timer.invalidate()
}
func timeFormatted(_ totalSeconds: Int) -> String {
let seconds: Int = totalSeconds % 60
return String(format: "0:%02d", seconds)
}
#IBOutlet weak var timerLabel: UILabel!
#IBOutlet weak var showWordLabel: UILabel!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
let url = Bundle.main.url(forResource: "english_words", withExtension: "txt")!
do {
let string = try String(contentsOf: url, encoding: .utf8)
wordArray = string.components(separatedBy: CharacterSet.newlines)
} catch {
print(error)
}
startTimer()
}
#IBAction func nextBtn(_ sender: UIButton) {
}
}
If you use swift 4.2 and above you can use randomElement() It returns an optional to avoid the empty case.
let yourArray = ["one", "two", "three", "Four"]
if let randomName = yourArray.randomElement() {
print(randomName)
}
if you use Swift 4.1 or below, you do in this way:
let array = ["one", "two", "three", "Four"]
let randomIndex = Int(arc4random_uniform(UInt32(array.count)))
print(array[randomIndex])
Although I'm not 100% sure I understand you problem here a modified version of your viewController.
See comments in code.
class ViewController: UIViewController {
var wordArray: [String] = []
var i: Int = 0
var timer = Timer()
var totalSecond = 5
func startTimer() {
// Stop old timer
timer.invalidate()
// Start new timer
timer = Timer.scheduledTimer(timeInterval: 1, target: self, selector: #selector(updateTime), userInfo: nil, repeats: true)
}
#objc func updateTime() {
if totalSecond != 0 {
totalSecond -= 1
timerLabel.text = "\(totalSecond) seconds left"
} else {
endTimer()
}
}
func endTimer() {
timer.invalidate()
}
func timeFormatted(_ totalSeconds: Int) -> String {
let seconds: Int = totalSeconds % 60
return String(format: "0:%02d", seconds)
}
#IBOutlet weak var timerLabel: UILabel!
#IBOutlet weak var showWordLabel: UILabel!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
let url = Bundle.main.url(forResource: "english_words", withExtension: "txt")!
do {
let string = try String(contentsOf: url, encoding: .utf8)
wordArray = string.components(separatedBy: CharacterSet.newlines)
} catch {
print(error)
}
startTimer()
}
#IBAction func nextBtn(_ sender: UIButton) {
// Fetch random Word from array
let randomValue = Int.random(in: 0..<wordArray.count)
let randomWord = wordArray[randomValue]
// Assign to label
showWordLabel.text = randomWord
// Restart timer
startTimer()
}
}
I always have worked in obj-c, but I want to get my head 'round swift. I am nearly there with my code, but I just don't know how to loop my wav. It stops after one time playing. I have found some instructions, but I haven't found the solution yet for my code. I hope anyone knows the answer and can help me. So the question is: What do I have to do to make my wav loop when pressing #IBAction func playButtonTapped ?? I will give all my code, just to be sure. Thanks in advance:-)
class ViewController: UIViewController {
#IBOutlet var startAllButton: UIButton!
var audioEngine = AVAudioEngine()
var playerNode = AVAudioPlayerNode()
let timeShift = AVAudioUnitTimePitch()
let pedoMeter = CMPedometer()
let bpm: Float = 110
var avgStarted: Bool = false
var steps: Int = 0
var timer = Timer()
var adjustedBpm: Float = 110
var timerCount = 10 {
didSet {
if timerCount == 0 {
stopCountingSteps()
}
}
}
var lastTap: Date? = nil
#IBOutlet weak var tempoTap: UIButton!
#IBOutlet weak var slider: UISlider!
#IBOutlet weak var label: UILabel!
#IBOutlet weak var playButton: UIButton!
#IBOutlet weak var timerLabel: UILabel!
#IBOutlet weak var stepCountLabel: UILabel!
#IBOutlet weak var avgLabel: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
setup()
}
func setup() {
label.text = "110"
audioEngine.attach(playerNode)
audioEngine.attach(timeShift)
audioEngine.connect(playerNode, to: timeShift, format: nil)
audioEngine.connect(timeShift, to: audioEngine.mainMixerNode, format: nil)
audioEngine.prepare()
timerLabel.text = ""
stepCountLabel.text = ""
do {
try audioEngine.start()
} catch {
print("Could not start audio engine")
}
}
#IBAction func sliderValueChanged(_ sender: UISlider) {
label.text = String(sender.value)
self.label.text = String(format:"%.f", sender.value)
adjustedBpm = sender.value
timeShift.rate = adjustedBpm/bpm
}
#IBAction func playButtonTapped(_ sender: Any) {
let url = Bundle.main.url(forResource: "25loop110", withExtension: ".wav")
if let url = url {
do {
let audioFile = try AVAudioFile(forReading: url)
timeShift.rate = adjustedBpm/bpm
playerNode.scheduleFile(audioFile, at: nil, completionHandler: nil)
} catch {
print("could not load audio file")
}
} else {
print("could not load audio file")
}
playerNode.play()
}
The problem is these lines:
let audioFile = try AVAudioFile(forReading: url)
playerNode.scheduleFile(audioFile, at: nil, completionHandler: nil)
Where’s the loop? Nowhere. That code plays the file once.
You cannot loop with a file in AVAudioEngine. You loop with a buffer. You read the file into a buffer and call scheduleBuffer(buffer, at: nil, options: .loops).
Closed. This question needs details or clarity. It is not currently accepting answers.
Want to improve this question? Add details and clarify the problem by editing this post.
Closed 3 years ago.
Improve this question
I want to create a simple AVAudioPlayer element but am unsure what to fix. I am making a UISlider and 2 labels that all correspond to the AVAudioPlayer.
Example:
I have a bunch of trouble trying to properly implement the slider and the labels updateTimeFunction? I tried my best...how can I do this? :)
#IBOutlet weak var playbackSlider: UISlider!
#IBOutlet weak var playbackTimeLabelFront: UILabel!
#IBOutlet weak var playbackTimeLabelBack: UILabel!
var timer: Timer!
var isPlaying = false
var audioRecorder: AVAudioRecorder!
var audioPlayer : AVAudioPlayer!
var recordingSession: AVAudioSession!
func loadRecordingUI() {
do {
audioPlayer = try AVAudioPlayer(contentsOf: getFileUrl())
audioPlayer!.delegate = self
audioPlayer!.prepareToPlay()
} catch {
print("audioPlayer error: \(error.localizedDescription)")
}
}
print("Audio Success")
}
#IBAction func playTapped(_ sender: Any) {
if isPlaying {
audioPlayer!.pause()
isPlaying = false
} else {
audioPlayer!.play()
isPlaying = true
timer = Timer.scheduledTimer(timeInterval: 1.0, target: self, selector: #selector(updateTime), userInfo: nil, repeats: true)
}
}
#objc func updateTime() {
let currentTime = Int(audioPlayer.currentTime)
let minutes = currentTime/60
let seconds = currentTime - minutes * 60
playbackTimeLabelFront.text = ??
}
Here is working code
#IBOutlet weak var slider: UISlider!
#IBOutlet weak var lblTotalDuration: UILabel!
#IBOutlet weak var lblCurrentDuration: UILabel!
var timer:Timer!
var audioPlayer: AVAudioPlayer!
// Pass your Audiofile path here
func prepareAudio(path:String) {
let mp3URL = NSURL(string: path)
do {
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
} catch _ { }
do {
try AVAudioSession.sharedInstance().setActive(true)
} catch _ { }
audioPlayer = try AVAudioPlayer(contentsOf: mp3URL as! URL)
audioPlayer.delegate = self
slider.maximumValue = CFloat(audioPlayer.duration)
slider.minimumValue = CFloat(0.0)
slider.value = CFloat(0.0)
audioPlayer.prepareToPlay()
// Total Audio Duration
let time = calculateTimeFromNSTimeInterval(audioPlayer.duration)
totalLengthOfAudio = "\(time.minute):\(time.second)"
lblTotalDuration.text = totalLengthOfAudio
lblCurrentDuration.text = "00:00"
audioPlayer.play()
startTimer()
} catch let error as NSError {
print(error.localizedDescription)
}
}
Timer and Update Label
func startTimer(){
let timer = Timer.scheduledTimer(timeInterval: 1.0, target: self, selector: #selector(PlayerViewController.update(_:)), userInfo: nil,repeats: true)
timer.fire()
}
func stopTimer(){
timer.invalidate()
}
func update(_ timer: Timer){
if !audioPlayer.isPlaying{
return
}
let time = calculateTimeFromNSTimeInterval(audioPlayer.currentTime)
DispatchQueue.main.async{
self.lblCurrentDuration.text = "\(time.minute):\(time.second)"
self.slider.value = CFloat(self.audioPlayer.currentTime)
}
}
Calculate Song Length
func calculateTimeFromNSTimeInterval(_ duration:TimeInterval) ->(minute:String, second:String){
let minute_ = abs(Int((duration/60).truncatingRemainder(dividingBy: 60)))
let second_ = abs(Int(duration.truncatingRemainder(dividingBy: 60)))
// var hour = hour_ > 9 ? "\(hour_)" : "0\(hour_)"
let minute = minute_ > 9 ? "\(minute_)" : "0\(minute_)"
let second = second_ > 9 ? "\(second_)" : "0\(second_)"
return (minute,second)
}
when the user gets from CallKit at that time I am switching root on accept button click of Call but somehow root controller object always found nil and application crashed
Case : this is happing when the application running in the background and phone state is locked.
func provider(_ provider: CXProvider, perform action: CXAnswerCallAction) {
endCallTimer()
guard let call = callManager.callWithUUID(uuid: action.callUUID) else {
action.fail()
return
}
let mainStoryboard: UIStoryboard = UIStoryboard(name: "Main", bundle: nil)
let incomingCall = mainStoryboard.instantiateViewController(withIdentifier: "CallConnectedVC") as! IncomingController
incomingCall.connectToCalling(duration:self.callDict["duration"] as! Int, consumerId: "\(self.callDict["consumerId"] as! Int)", categoryName: self.callDict["categoryTopic"] as! String, categoryImage: "", callId: self.callDict["callId"] as! String)
let nav = UINavigationController(rootViewController: incomingCall)
UIApplication.shared.keyWindow?.makeKeyAndVisible()
UIApplication.shared.keyWindow?.rootViewController = nav
configureAudioSession()
call.answer()
action.fulfill()
}
IncomingController code -
import UIKit
import TwilioVideo
import AVFoundation
import SDWebImage
import FirebaseAnalytics
class IncomingController: UIViewController {
var camera: TVICameraCapturer?
#IBOutlet weak var img_user: UIImageView!
#IBOutlet weak var lblName: UILabel!
#IBOutlet weak var lbltopic: UILabel!
#IBOutlet weak var lblTimer: UILabel!
#IBOutlet weak var btn_speaker : UIButton!
#IBOutlet weak var btn_video: UIButton!
#IBOutlet weak var btn_mute: UIButton!
#IBOutlet weak var btn_extendcall: UIButton!
#IBOutlet weak var btn_endcall: UIButton!
//Video
#IBOutlet weak var btn_toogleMic : UIButton!
#IBOutlet weak var btnConnectAudio : UIButton!
#IBOutlet weak var btnFlipCamera : UIButton!
#IBOutlet weak var view_video: UIView!
#IBOutlet weak var lblviedoName: UILabel!
#IBOutlet weak var lblvideoTimer: UILabel!
#IBOutlet weak var lblvideoTopic: UILabel!
#IBOutlet weak var provider_previewView: TVIVideoView!
#IBOutlet weak var provider_remoteView: TVIVideoView!
#IBOutlet weak var provider_previewViewShadow: UIView!
#IBOutlet weak var btnView : UIView!
#IBOutlet weak var constrain_btnView: NSLayoutConstraint!
//UpdatedVideo
#IBOutlet weak var viewDisableVideo: UIView!
#IBOutlet weak var lblDisableViedoName: UILabel!
#IBOutlet weak var lblDisableVideoTimer: UILabel!
#IBOutlet weak var lblDisableTopic: UILabel!
#IBOutlet weak var lblDisableText: UILabel!
#IBOutlet weak var imgUserDisable: UIImageView!
var NotificationDict = NSMutableDictionary()
var Calltimer: Timer? = Timer()
var sec = 60
var min = 6
var audioDevice: TVIDefaultAudioDevice = TVIDefaultAudioDevice(block: {
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayAndRecord, mode: AVAudioSessionModeVoiceChat, options: .mixWithOthers)
try AVAudioSession.sharedInstance().setPreferredSampleRate(48000)
try AVAudioSession.sharedInstance().setPreferredIOBufferDuration(0.01)
} catch {
print(error)
}
})
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
TwilioVideo.audioDevice = self.audioDevice
self.prepareLocalMedia()
connectToCalling(duration:NotificationDict["duration"] as! Int, consumerId: "\(NotificationDict["consumerId"] as! Int)", categoryName: NotificationDict["categoryTopic"] as! String, categoryImage: "", callId: NotificationDict["callId"] as! String)
}
//MARK:- Speaker Method
func setAudioOutputSpeaker(_ enabled: Bool) {
let session = AVAudioSession.sharedInstance()
try? session.setCategory(AVAudioSessionCategoryPlayAndRecord)
try? session.setMode(AVAudioSessionModeVoiceChat)
if enabled {
try? session.overrideOutputAudioPort(AVAudioSessionPortOverride.speaker)
} else {
try? session.overrideOutputAudioPort(AVAudioSessionPortOverride.none)
}
try? session.setActive(true)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
//MARK:- Custom Method
func connectToCalling(duration:Int, consumerId:String, categoryName:String, categoryImage:String, callId:String)
{
let Url = "\(Constant.tokenUrl)"
let Param = ["duration":duration, "consumerId":consumerId, "categoryName":categoryName, "categoryImage":categoryImage, "callId":callId] as [String:Any]
ApiResponse.onResponseKeyPost(url: Url, parms: Param as NSDictionary, completion: { (dict, errr) in
print("dict responce",dict)
if(errr == ""){
OperationQueue.main.addOperation {
accessToken = dict["providerToken"] as! String
let connectOptions = TVIConnectOptions.init(token: accessToken) { (builder) in
if let videoTrack = ProvideCallObject.localVideoTrack {
builder.videoTracks = [videoTrack]
}
// We will share a local audio track only if ExampleAVAudioEngineDevice is selected.
if let audioTrack = ProvideCallObject.localAudioTrack {
builder.audioTracks = [audioTrack]
}
userDef.set("\(dict["roomId"]!)", forKey: "roomId")
builder.roomName = "\(dict["roomId"]!)"
}
ProvideCallObject.Provideroom = TwilioVideo.connect(with: connectOptions, delegate: self)
}
}
})
}
#objc func updateCountDown() {
if btn_endcall.isUserInteractionEnabled == false
{
btn_endcall.isUserInteractionEnabled = true
}
if sec == 0 {
if min == 0
{
sec = 0
min = 0
goToFeedback()
Calltimer?.invalidate()
}else
{
sec = 59
min = min - 1
if min == 0 {
SystemSoundID.playFileNamed(fileName: "60 Seconds v2", withExtenstion: "m4a")
}
}
}else
{
var timeString = ""
sec = sec - 1
if min < 10
{
timeString = timeString + "0" + String(min)
if min == 2 && sec == 0{
SystemSoundID.playFileNamed(fileName: "2 Mins v2", withExtenstion: "m4a")
}
}
else
{
timeString = timeString + String(min)
}
if sec < 10
{
timeString = timeString + ":0" + String(sec)
}
else
{
timeString = timeString + ":" + String(sec)
}
lblTimer.text = "\(timeString) Free Minutes"
lblvideoTimer.text = "\(timeString) Free Minutes"
lblDisableVideoTimer.text = "\(timeString) remaining"
}
}
func prepareLocalMedia() {
if (ProvideCallObject.localAudioTrack == nil) {
ProvideCallObject.localAudioTrack = TVILocalAudioTrack.init(options: nil, enabled: true, name: "Microphone")
if (ProvideCallObject.localAudioTrack == nil) {
print("Failed to create audio track")
}
}
if (ProvideCallObject.localVideoTrack == nil) {
self.startPreview()
}
changeButtonImage(isVideoEnable: true)
}
//Video
func setupRemoteVideoView() {
self.provider_previewViewShadow.frame = CGRect(x: self.view_video.bounds.width - 132, y: self.view_video.bounds.height - 239, width: 112, height: 149)
self.provider_previewView.frame = self.provider_previewViewShadow.bounds
self.provider_remoteView.bringSubview(toFront: self.provider_previewViewShadow)
self.provider_remoteView.isHidden = false
}
// MARK: Private
func startPreview() {
if PlatformUtils.isSimulator {
return
}
camera = TVICameraCapturer(source: .frontCamera, delegate: self)
ProvideCallObject.localVideoTrack = TVILocalVideoTrack.init(capturer: camera!, enabled: true, constraints: nil, name: "Camera")
if (ProvideCallObject.localVideoTrack == nil) {
print("Failed to create video track")
} else {
ProvideCallObject.localVideoTrack!.addRenderer(self.provider_previewView)
let tap = UITapGestureRecognizer(target: self, action: #selector(IncomingController.flipCamera))
self.provider_previewView.addGestureRecognizer(tap)
}
}
#objc func flipCamera() {
if (self.camera?.source == .frontCamera) {
self.camera?.selectSource(.backCameraWide)
} else {
self.camera?.selectSource(.frontCamera)
}
}
func cleanupRemoteParticipant() {
if ((ProvideCallObject.remoteParticipant) != nil) {
if ((ProvideCallObject.remoteParticipant?.videoTracks.count)! > 0) {
let remoteVideoTrack = ProvideCallObject.remoteParticipant?.remoteVideoTracks[0].remoteTrack
remoteVideoTrack?.removeRenderer(self.provider_remoteView!)
self.provider_remoteView?.isHidden = true
}
}
ProvideCallObject.remoteParticipant = nil
}
}
// MARK:- TVIRoomDelegate
extension IncomingController : TVIRoomDelegate {
func callDetails(room_sid:String,callID:String) {
let params = ["room_sid":room_sid,"callId": callID,"isCallEnd":0] as [String : Any]
ApiResponse.onResponsePost(url: Constant.callDetailsTwilio, parms: params as NSDictionary) { (response, error) in
}
}
func didConnect(to room: TVIRoom) {
ProvideCallObject.localParticipant = ProvideCallObject.Provideroom?.localParticipant
ProvideCallObject.localParticipant?.delegate = self
Calltimer = Timer.scheduledTimer(timeInterval: 1.0, target: self, selector: #selector(updateCountDown), userInfo: nil, repeats: true)
NotificationDict["roomsid"] = room.sid
btn_video.isUserInteractionEnabled = true
-----------------------------------------
if (room.remoteParticipants.count > 0) {
ProvideCallObject.remoteParticipant = room.remoteParticipants[0]
ProvideCallObject.remoteParticipant?.delegate = self
self.callDetails(room_sid: room.sid, callID: NotificationDict["callId"] as! String)
}
if !isHeadPhoneAvailabel(){
self.setAudioOutputSpeaker(true)
}
}
func room(_ room: TVIRoom, didDisconnectWithError error: Error?) {
self.cleanupRemoteParticipant()
}
func room(_ room: TVIRoom, didFailToConnectWithError error: Error) {
}
func room(_ room: TVIRoom, participantDidConnect participant: TVIRemoteParticipant) {
if (ProvideCallObject.remoteParticipant == nil) {
ProvideCallObject.remoteParticipant = participant
ProvideCallObject.remoteParticipant?.delegate = self
}
// print("Participant \(participant.identity) connected with \(participant.remoteAudioTracks.count) audio and \(participant.remoteVideoTracks.count) video tracks")
}
func room(_ room: TVIRoom, participantDidDisconnect participant: TVIRemoteParticipant) {
if (ProvideCallObject.remoteParticipant == participant) {
cleanupRemoteParticipant()
}
goToFeedback()
}
}
// MARK: TVIRemoteParticipantDelegate
extension IncomingController : TVILocalParticipantDelegate {
func localParticipant(_ participant: TVILocalParticipant, publishedVideoTrack: TVILocalVideoTrackPublication) {
ProvideCallObject.localVideoTrack = publishedVideoTrack.videoTrack as? TVILocalVideoTrack
}
}
// MARK: TVIRemoteParticipantDelegate
extension IncomingController : TVIRemoteParticipantDelegate {
func subscribed(to videoTrack: TVIRemoteVideoTrack,
publication: TVIRemoteVideoTrackPublication,
for participant: TVIRemoteParticipant) {
if (ProvideCallObject.remoteParticipant == participant) {
setupRemoteVideoView()
videoTrack.addRenderer(self.provider_remoteView!)
}
}
func remoteParticipant(_ participant: TVIRemoteParticipant,
enabledVideoTrack publication: TVIRemoteVideoTrackPublication) {
// print("Participant \(participant.identity) enabled \(publication.trackName) video track")
self.viewDisableVideo.isHidden = true
changeButtonImage(isVideoEnable: true)
}
func remoteParticipant(_ participant: TVIRemoteParticipant,
disabledVideoTrack publication: TVIRemoteVideoTrackPublication) {
self.viewDisableVideo.isHidden = false
changeButtonImage(isVideoEnable: false)
// print("Participant \(participant.identity) disabled \(publication.trackName) video track")
}
}
extension IncomingController : TVICameraCapturerDelegate {
func cameraCapturer(_ capturer: TVICameraCapturer, didStartWith source: TVICameraCaptureSource) {
// Layout the camera preview with dimensions appropriate for our orientation.
self.provider_previewView.shouldMirror = (source == .frontCamera)
}
}
// MARK: TVIVideoViewDelegate
extension IncomingController : TVIVideoViewDelegate {
func videoView(_ view: TVIVideoView, videoDimensionsDidChange dimensions: CMVideoDimensions) {
self.view.setNeedsLayout()
}
}
so it is possible to switch or change root controller in define case?
Thanks
Hey I am working on a project(which is in swift) and it compares two audio signals and measure the correctness. AUDIOKIT pod is used to convert the audio from microphone(AKAmplitudeTracker) to float numbers. I am trying to implement the same method by applying the tracker on the AKAudioPlayer. What I am trying to do is performing sampling on the source signal and the reference signal and get it as amplitude data only, and then performing DTW(Dynamic time warping) algorithm.
Is there any means by which I can get the AKAudioPlayer music to be converted as amplitude data? Is it possible to add a tracker to the AKAudioPlayer currently playing music? Codes are given below. I need some expert advices, Thanks in advance and Happy coding.
//
// Conductor.swift
// AmplitudeTracker
//
// Created by Mark Jeschke on 10/3/17.
// Copyright © 2017 Mark Jeschke. All rights reserved.
//
import AudioKit
import AudioKitUI
// Treat the conductor like a manager for the audio engine.
class Conductor {
// Singleton of the Conductor class to avoid multiple instances of the audio engine
var url:URL?
var fileName:String?
var type:String?
static let sharedInstance = Conductor()
var isPlayingKit:Bool?
var micTracker: AKAmplitudeTracker!
var mp3Tracker: AKAmplitudeTracker!
var player:AKAudioPlayer!
var mic: AKMicrophone!
var delay: AKDelay!
var reverb: AKCostelloReverb!
// Balance between the delay and reverb mix.
var reverbAmountMixer = AKDryWetMixer()
func play(file: String, type: String) -> AKAudioPlayer? {
let url = Bundle.main.url(forResource: file, withExtension: type)
let file = try! AKAudioFile(forReading: url!)
player = try! AKAudioPlayer(file: file)
if self.isPlayingKit! {
player.play()
mp3Tracker = AKAmplitudeTracker(player)
delay = AKDelay(mp3Tracker)
delay.time = 0.0
delay.feedback = 0.0
delay.dryWetMix = 0.5
reverb = AKCostelloReverb(delay)
reverb.presetShortTailCostelloReverb()
reverbAmountMixer = AKDryWetMixer(delay, reverb, balance: 0.8)
AudioKit.output = reverbAmountMixer
}
else {
self.isPlayingKit = true
AudioKit.output = nil
player.stop()
}
return player
}
init() {
AKSettings.playbackWhileMuted = true
mic = AKMicrophone()
print("INIT CONDUCTOR")
micTracker = AKAmplitudeTracker(mic)
delay = AKDelay(micTracker)
delay.time = 0.5
delay.feedback = 0.1
delay.dryWetMix = 0.5
reverb = AKCostelloReverb(delay)
reverb.presetShortTailCostelloReverb()
reverbAmountMixer = AKDryWetMixer(delay, reverb, balance: 0.8)
AudioKit.output = reverbAmountMixer
isPlayingKit = true
startAudioEngine()
}
func startAudioEngine() {
AudioKit.start()
isPlayingKit = true
print("Audio engine started")
}
func stopAudioEngine() {
AudioKit.stop()
isPlayingKit = false
print("Audio engine stopped")
}
}
The above mentioned method captures the amplitude of the microphone.
The below given is the location where I tried to use AKAmplitudeTracker on AKAudioPlayer.
//
// ViewController.swift
// AudioBoom
//
// Created by Alex Babu on 20/06/18.
// Copyright © 2018 Naico. All rights reserved.
//
import AudioKit
class ViewController: UIViewController {
var instantanousAmplitudeData = [Double]()
var timer:Timer?
var timerCount:Int?
let conductor = Conductor.sharedInstance
var player:AKAudioPlayer?
#IBOutlet weak var holderView: UIView!
#IBOutlet weak var equalizer: UILabel!
#IBOutlet weak var percentageLabel: UILabel!
override func viewDidLoad() {
super.viewDidLoad()
timerCount = 0
playMusicOutlet.layer.cornerRadius = playMusicOutlet.frame.size.height/2
playMusicOutlet.layer.borderColor = UIColor.cyan.cgColor
playMusicOutlet.layer.borderWidth = 2.0
playMusicOutlet.clipsToBounds = true
musicDTW.layer.cornerRadius = musicDTW.frame.size.height/2
musicDTW.layer.borderColor = UIColor.cyan.cgColor
musicDTW.layer.borderWidth = 2.0
musicDTW.clipsToBounds = true
holderView.layer.cornerRadius = holderView.frame.size.width/2
holderView.clipsToBounds = true
}
override var preferredStatusBarStyle: UIStatusBarStyle {
return .lightContent
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
#IBOutlet weak var playMusicOutlet: UIButton!
#IBAction func playMusic(_ sender: Any) {
playMusicOutlet.setTitle("Talk now", for: .normal)
self.equalizer.isHidden = false
timerCount = 0
AVAudioSession.sharedInstance().requestRecordPermission({(_ granted: Bool) -> Void in
if granted {
print("Permission granted")
self.timer = Timer.scheduledTimer(withTimeInterval: 0.05, repeats: true) { [unowned self] (timer) in
if let count = self.timerCount {
DispatchQueue.main.async {
self.timerCount = count + 1
print("Amplitude of mic detected:\(self.conductor.micTracker.amplitude)")
print("Amplitude of mic counter:\(String(describing: count))")
print("Amplitude of mp3 detected:\(self.conductor.micTracker.amplitude)")
print("Amplitude of mp3 Counter:\(String(describing: count))")
self.instantanousAmplitudeData.append(self.conductor.micTracker.amplitude)
self.equalizer.frame.size.height = CGFloat(self.conductor.micTracker.amplitude * 500)
self.percentageLabel.text = String(Int(((self.conductor.micTracker.amplitude * 500)/500) * 100)) + "%"
if count == 10000 {
timer.invalidate()
self.equalizer.isHidden = true
}
}
}
}
}
else {
print("Permission denied")
}
})
}
#IBOutlet weak var musicDTW: UIButton!
#IBAction func musicDTWAction(_ sender: Any) {
let anotherConductor = Conductor.sharedInstance
if let ccc = anotherConductor.play(file: "Timebomb", type: "mp3") {
musicDTW.setTitle("Music DTW on", for: .normal)
if let mp3Tracker = conductor.mp3Tracker {
self.equalizer.frame.size.height = CGFloat(mp3Tracker.amplitude * 500)
}
}
else {
musicDTW.setTitle("Music DTW off", for: .normal)
}
}
}
There's a lot going on with all this code, so its hard to debug it but what you describe is definitely possible and you probably just have some small thing wrong. Perhaps you can share the repo with me privately and I can fix it for you.
Try these out!
Conductor Class
import AudioKit
import AudioKitUI
// Treat the conductor like a manager for the audio engine.
class Conductor {
// Singleton of the Conductor class to avoid multiple instances of the audio engine
var url:URL?
var fileName:String?
var type:String?
static let sharedInstance = Conductor()
var isPlayingKit:Bool?
var micTracker: AKAmplitudeTracker!
var mp3Tracker: AKAmplitudeTracker!
var player:AKAudioPlayer!
var mic: AKMicrophone!
var delay: AKDelay!
var reverb: AKCostelloReverb!
// Balance between the delay and reverb mix.
var reverbAmountMixer = AKDryWetMixer()
func play(file: String, type: String) -> AKAudioPlayer? {
let url = Bundle.main.url(forResource: file, withExtension: type)
let file = try! AKAudioFile(forReading: url!)
player = try! AKAudioPlayer(file: file)
if self.isPlayingKit! {
mp3Tracker = AKAmplitudeTracker(player)
delay = AKDelay(mp3Tracker)
delay.time = 0.0
delay.feedback = 0.0
delay.dryWetMix = 0.5
reverb = AKCostelloReverb(delay)
reverb.presetShortTailCostelloReverb()
reverbAmountMixer = AKDryWetMixer(delay, reverb, balance: 0.8)
AudioKit.output = reverbAmountMixer //#1
player.play() //#2
}
else {
self.isPlayingKit = true
AudioKit.output = nil
// player.stop()
stopAudioEngine()
}
return player
}
func isPlayingAudioKit() -> Bool {
return isPlayingKit!
}
init() {
self.isPlayingKit = false
}
func initMicrophone() {
AKSettings.playbackWhileMuted = true
mic = AKMicrophone()
print("INIT CONDUCTOR")
micTracker = AKAmplitudeTracker(mic)
delay = AKDelay(micTracker)
delay.time = 1.5
delay.feedback = 0.1
delay.dryWetMix = 1.0
reverb = AKCostelloReverb(delay)
reverb.presetShortTailCostelloReverb()
reverbAmountMixer = AKDryWetMixer(delay, reverb, balance: 0.5)
AudioKit.output = reverbAmountMixer
isPlayingKit = true
startAudioEngine()
}
func startAudioEngine() {
AudioKit.start()
isPlayingKit = true
print("Audio engine started")
}
func stopAudioEngine() {
AudioKit.stop()
isPlayingKit = false
print("Audio engine stopped")
}
}
ViewController
import AudioKit
class ViewController: UIViewController {
var instantanousUserAudioData = [Float]()
var referenceAudioData = [Float]()
var timer:Timer?
var timerCount:Int?
let conductor = Conductor.sharedInstance
#IBOutlet weak var holderView: UIView!
#IBOutlet weak var equalizer: UILabel!
#IBOutlet weak var percentageLabel: UILabel!
#IBOutlet weak var timerOutlet: UIButton!
#IBOutlet weak var micOutlet: UIButton!
#IBOutlet weak var DTWOutlet: UIButton!
#IBOutlet weak var musicOutlet: UIButton!
#IBOutlet weak var resultLabel: UILabel!
#IBAction func timerAction(_ sender: Any) {
self.timer?.invalidate()
}
override func viewDidLoad() {
super.viewDidLoad()
timerCount = 0
micOutlet.layer.cornerRadius = micOutlet.frame.size.height/2
micOutlet.layer.borderColor = UIColor.cyan.cgColor
micOutlet.layer.borderWidth = 2.0
micOutlet.clipsToBounds = true
musicOutlet.layer.cornerRadius = musicOutlet.frame.size.height/2
musicOutlet.layer.borderColor = UIColor.cyan.cgColor
musicOutlet.layer.borderWidth = 2.0
musicOutlet.clipsToBounds = true
DTWOutlet.layer.cornerRadius = DTWOutlet.frame.size.height/2
DTWOutlet.layer.borderColor = UIColor.cyan.cgColor
DTWOutlet.layer.borderWidth = 2.0
DTWOutlet.clipsToBounds = true
timerOutlet.layer.cornerRadius = timerOutlet.frame.size.height/2
timerOutlet.layer.borderColor = UIColor.cyan.cgColor
timerOutlet.layer.borderWidth = 2.0
timerOutlet.clipsToBounds = true
holderView.layer.cornerRadius = holderView.frame.size.width/2
holderView.clipsToBounds = true
self.micOutlet.isEnabled = false
self.musicOutlet.isEnabled = false
AVAudioSession.sharedInstance().requestRecordPermission({(_ granted: Bool) -> Void in
self.micOutlet.isEnabled = granted
self.musicOutlet.isEnabled = granted
})
}
override var preferredStatusBarStyle: UIStatusBarStyle {
return .lightContent
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
#IBAction func micAction(_ sender: Any) {
conductor.initMicrophone()
self.timerCount = 0
self.equalizer.isHidden = false
self.timer = Timer.scheduledTimer(withTimeInterval: 0.05, repeats: true) { [unowned self] (timer) in
if let count = self.timerCount {
DispatchQueue.main.async {
self.timerCount = count + 1
print("Amplitude of mic detected:\(self.conductor.micTracker.amplitude)")
print("Amplitude of mp3 Counter:\(String(describing: count))")
self.instantanousUserAudioData.append(Float(self.conductor.micTracker.amplitude))
self.equalizer.frame.size.height = CGFloat(self.conductor.micTracker.amplitude * 500)
self.percentageLabel.text = String(Int(((self.conductor.micTracker.amplitude * 500)/500) * 100)) + "%"
if count > 10 && self.conductor.micTracker.amplitude == 0.0 && self.instantanousUserAudioData.last == 0.0 {
self.micOutlet.backgroundColor = .green
self.micOutlet.setTitleColor(.black, for: .normal)
self.micOutlet.layer.borderColor = UIColor.red.cgColor
timer.invalidate()
}
if count == 0 {
self.micOutlet.backgroundColor = .clear
self.micOutlet.setTitleColor(.cyan, for: .normal)
self.micOutlet.layer.borderColor = UIColor.cyan.cgColor
}
}
}
}
}
#IBAction func musicAction(_ sender: Any) {
self.timerCount = 0
if self.conductor.play(file: voiceReference, type: type_mp3) != nil {
self.timer?.invalidate()
self.timer = Timer.scheduledTimer(withTimeInterval: 0.05, repeats: true) { [unowned self] (timer) in
if let count = self.timerCount {
DispatchQueue.main.async {
self.timerCount = count + 1
print("Amplitude of mp3 detected:\(self.conductor.mp3Tracker.amplitude)")
print("Amplitude of mp3 Counter:\(String(describing: count))")
self.referenceAudioData.append(Float(self.conductor.mp3Tracker.amplitude))
self.equalizer.frame.size.height = CGFloat(self.conductor.mp3Tracker.amplitude * 500)
self.equalizer.isHidden = false
self.percentageLabel.text = String(Int(((self.conductor.mp3Tracker.amplitude * 500)/500) * 100)) + "%"
if count > 10 && self.conductor.mp3Tracker.amplitude == 0.0 && self.referenceAudioData.last == 0.0 {
self.musicOutlet.backgroundColor = .green
self.musicOutlet.setTitleColor(.black, for: .normal)
self.musicOutlet.layer.borderColor = UIColor.red.cgColor
timer.invalidate()
}
if count == 0 {
self.musicOutlet.backgroundColor = .clear
self.musicOutlet.setTitleColor(.cyan, for: .normal)
self.musicOutlet.layer.borderColor = UIColor.cyan.cgColor
}
}
}
}
}
else {
}
}
#IBAction func resultAction(_ sender: Any) {
print("mic array:\(instantanousUserAudioData)")
print("song array:\(referenceAudioData)")
self.timer?.invalidate()
if referenceAudioData.count > 0, instantanousUserAudioData.count > 0 {
let refData = knn_curve_label_pair(curve: referenceAudioData,label: "reference")
let userData = knn_curve_label_pair(curve: instantanousUserAudioData,label: "userData")
let attempt:KNNDTW = KNNDTW()
attempt.train(data_sets: [refData,userData])
let prediction: knn_certainty_label_pair = attempt.predict(curve_to_test: referenceAudioData)
print("predicted :" + prediction.label, "with ", prediction.probability * 100,"% certainty")
resultLabel.text = "DTW cost is " + String(attempt.dtw_cost(y: referenceAudioData, x: instantanousUserAudioData))
print("COST OF THE DTW ALGORITHM IS : \(String(attempt.dtw_cost(y: referenceAudioData, x: instantanousUserAudioData)))")
}
}
}