How to manage the AVAudioPlayer isPlaying in each tableviewCell? - ios

I have a tableview and have multiple cells in tableView.
Each cell has an item of AVAudioPlayer
And I face a problem.
I don't know how to manage the AVAudioPlayer.
When I play first AVAudioPlayer, and then play second AVAudioPlayer, the sound will overlap.
How to stop first AVAudioPlayer in my customized cell, and play second AVAudioPlayer?
Thanks.
This is my customized cell:
class TableViewCell: UITableViewCell {
#IBOutlet weak var myImageView: UIImageView!
#IBOutlet weak var myChatBubbleView: UIView!
#IBOutlet weak var myDateLabel: UILabel!
#IBOutlet weak var mySecondLabel: UILabel!
#IBOutlet weak var myRecordPlayerBtn: MenuButton!
private var timer:Timer?
private var elapsedTimeInSecond:Int = 0
var audioPlayer:AVAudioPlayer?
var message:ChatroomMessage?
var chatroomId:String = ""
var delegate:PlayRecordDelegate?
override func awakeFromNib() {
super.awakeFromNib()
// Initialization code
self.backgroundColor = defaultBackgroundColor
self.tintColor = defaultChatroomCheckButtonColor
myImageView.layer.masksToBounds = true
myImageView.layer.cornerRadius = defaultIconRadius
myChatBubbleView.backgroundColor = defaultChatGreenBubbleColor
myChatBubbleView.layer.cornerRadius = defaultButtonRadius
myDateLabel.textColor = defaultChatTimeColor
mySecondLabel.textColor = defaultChatTimeColor
mySecondLabel.isHidden = true
myRecordPlayerBtn.imageView?.animationDuration = 1
myRecordPlayerBtn.imageView?.animationImages = [
UIImage(named: "img_myRocordPlaying1")!,
UIImage(named: "img_myRocordPlaying2")!,
UIImage(named: "img_myRocordPlaying3")!
]
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
// Configure the view for the selected state
}
func loadByMessage(_ message:ChatroomMessage, chatroomId:String) {
self.message = message
self.chatroomId = chatroomId
myRecordPlayerBtn.addTarget(self, action: #selector(recordPlay), for: .touchUpInside)
}
func resetRecordAnimation() {
self.myRecordPlayerBtn.imageView!.stopAnimating()
self.myRecordPlayerBtn.isSelected = false
}
func recordPlay(_ sender: UIButton) {
self.myRecordPlayerBtn.imageView?.startAnimating()
let documentsDirectoryURL = try! FileManager().url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true).appendingPathComponent("\(chatroomId)/Record/")
let fileName = message?.content.substring(from: 62)
let fileURL = documentsDirectoryURL.appendingPathComponent(fileName!)
if FileManager.default.fileExists(atPath: fileURL.path) {
let asset = AVURLAsset(url: URL(fileURLWithPath: fileURL.path), options: nil)
let audioDuration = asset.duration
let audioDurationSeconds = CMTimeGetSeconds(audioDuration)
self.elapsedTimeInSecond = Int(audioDurationSeconds)
if audioPlayer?.isPlaying == true {
audioPlayer?.stop()
DispatchQueue.main.async {
self.resetTimer(second: self.elapsedTimeInSecond)
self.startTimer()
}
}
updateTimeLabel()
startTimer()
audioPlayer = try? AVAudioPlayer(contentsOf: fileURL)
audioPlayer?.delegate = self
audioPlayer?.play()
}else{
//don't have file in local
let recordUrl = URL(string: (message?.content)!)
URLSession.shared.downloadTask(with: recordUrl!, completionHandler: { (location, response, error) in
guard
let httpURLResponse = response as? HTTPURLResponse, httpURLResponse.statusCode == 200,
let mimeType = response?.mimeType, mimeType.hasPrefix("audio"),
let location = location, error == nil
else { return }
do {
try FileManager.default.moveItem(at: location, to: fileURL)
let asset = AVURLAsset(url: URL(fileURLWithPath: fileURL.path), options: nil)
let audioDuration = asset.duration
let audioDurationSeconds = CMTimeGetSeconds(audioDuration)
self.elapsedTimeInSecond = Int(audioDurationSeconds)
DispatchQueue.main.async {
self.updateTimeLabel()
self.startTimer()
}
self.audioPlayer = try? AVAudioPlayer(contentsOf: fileURL)
self.audioPlayer?.delegate = self
self.audioPlayer?.play()
} catch {
print(error)
}
}).resume()
}
}
func startTimer() {
timer?.invalidate()
timer = Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true, block: { (timer) in
self.elapsedTimeInSecond -= 1
self.updateTimeLabel()
})
}
func resetTimer(second:Int) {
timer?.invalidate()
elapsedTimeInSecond = second
updateTimeLabel()
}
func updateTimeLabel() {
let seconds = elapsedTimeInSecond % 60
let minutes = (elapsedTimeInSecond/60) % 60
mySecondLabel.isHidden = false
mySecondLabel.text = String(format: "%02d:%02d", minutes,seconds)
}
}
extension TableViewCell:AVAudioPlayerDelegate {
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
let documentsDirectoryURL = try! FileManager().url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true).appendingPathComponent("\(Id)/Record/")
let fileName = message?.content.substring(from: 62)
let fileURL = documentsDirectoryURL.appendingPathComponent(fileName!)
if FileManager.default.fileExists(atPath: fileURL.path) {
let asset = AVURLAsset(url: URL(fileURLWithPath: fileURL.path), options: nil)
let audioDuration = asset.duration
let audioDurationSeconds = CMTimeGetSeconds(audioDuration)
DispatchQueue.main.async {
self.resetTimer(second: Int(audioDurationSeconds))
self.myRecordPlayerBtn.imageView!.stopAnimating()
self.myRecordPlayerBtn.imageView?.image = #imageLiteral(resourceName: "img_myRocordDefault")
}
}
}
}

Probably first initialize to check if your player is playing
if audioPlayer != nil{
if audioPlayer?.isPlaying == true {
audioPlayer?.stop()
DispatchQueue.main.async {
self.resetTimer(second: self.elapsedTimeInSecond)
self.startTimer()
}
}
}

If you don't want to play two audio track at the same time, you should use a shared instance of AVAudioPlayer
It will be better for performances and you can define the instance as static var in your controller. It will be accessible in each cell.

I have developed a music palyer application, and I used a shared instance in the MusicPlayManager:
class MusicPlayManager{
var player : AVAudioPlayer?
static let sharedInstance = MusicPlayManager.init()
private override init() {
super.init()
}
// something else, such as palyNext, playPrevious methods
}
In your viewController,you can use MusicPlayManager.sharedInstance.player

Related

Share Extension don't open/share .HEIC type image

i created share extension for my app which can share image/videos from photos(Gallery) and it work properly but here problem is if i select .HEIC type file then click on my share extension it show error as shown in below screenshot .
import UIKit
import Social
import MobileCoreServices
import AVKit
import Toast_Swift
//#objc(ShareExtensionViewController)
class ShareViewController: UIViewController {
#IBOutlet weak var image: UIImageView!
#IBOutlet weak var btn: UIButton!
#IBOutlet weak var lbl: UILabel!
#IBOutlet weak var view2: UIView!
#IBOutlet weak var cancel: UIButton!
#IBOutlet weak var blurview: UIVisualEffectView!
var allMedia = [Data()]
var singleImage = UIImage()
var imagesUrl = [URL]()
let groupPath = FileManager.default.containerURL(forSecurityApplicationGroupIdentifier: "
my group identifire ")
var saveDone = Bool()
override func viewDidLoad() {
super.viewDidLoad()
let attachments = (self.extensionContext?.inputItems.first as? NSExtensionItem)?.attachments ?? []
self.btn.setTitle("Add", for: .normal)
self.btn.layer.cornerRadius = 10
self.cancel.layer.cornerRadius = 10
self.image.clipsToBounds = false
applyshadow(image: image)
blurview.isHidden = true
}
override func viewDidAppear(_ animated: Bool) {
self.handleSharedFile()
}
// MARK:- function
func applyshadow(image: UIImageView) {
image.clipsToBounds = false
image.layer.shadowColor = UIColor.systemGray.cgColor
image.layer.shadowOpacity = 1
image.layer.shadowOffset = CGSize.zero
image.layer.shadowRadius = 7.5
}
func videotext(string : String) ->Bool{
let videoextension = [".MP4",".mp4",".mkv",".MKV",".AVI",".avi",".mov", ".MOV"]
if videoextension.contains(string){
return true
}
return false
}
func videoThumb(filepath: URL) -> UIImage{
do{
// let filepath = data?.appendingPathComponent(lbl[indexPath.row])
let asset = AVURLAsset(url: filepath, options: nil)
let imgGenerator = AVAssetImageGenerator(asset: asset)
imgGenerator.appliesPreferredTrackTransform = true
let cgImage = try imgGenerator.copyCGImage(at: CMTimeMake(value: 0, timescale: 1), actualTime: nil)
let uiImage = UIImage(cgImage: cgImage)
return uiImage
}catch let error {
print("Error: \(error.localizedDescription)")
return UIImage()
}
}
private func handleSharedFile() {
// extracting the path to the URL that is being shared
let attachments = (self.extensionContext?.inputItems.first as? NSExtensionItem)?.attachments ?? []
let contentType = kUTTypeData as String
for provider in attachments {
// Check if the content type is the same as we expected
if provider.hasItemConformingToTypeIdentifier(contentType) {
provider.loadItem(forTypeIdentifier: contentType,
options: nil) { [unowned self] (data, error) in
guard error == nil else { return }
if let url = data as? URL {
imagesUrl.append(url)
print(imagesUrl)
if videotext(string: String(url.lastPathComponent.suffix(4))){
DispatchQueue.main.async {
self.image.image = videoThumb(filepath: url)
if attachments.count > 1 {
lbl.text = "\(attachments.count) Items"
}else{
lbl.text = url.lastPathComponent
}
}
}else {
let imageData = try? Data(contentsOf: url)
DispatchQueue.main.async {
if provider == attachments.last{
self.image.image = UIImage(data: imageData!)
}
self.singleImage = UIImage(data: imageData!)!
if attachments.count > 1 {
lbl.text = "\(attachments.count) Items"
}else{
lbl.text = url.lastPathComponent
}
}
}
}else {
print("Impossible to save image")
}
}
}
}
}
thank You in advance for read, giving attention, answer and upvote my question :)

iOS - converting to fmp4 (from mp4/m4a) and streaming it

So I didn't managed to find any code about this issue.
I have recorded an mp4 file(audioFile.mp4) and now i want to stream it through socket , yet i have problem in converting.
I know that there is the ffmpeg(https://www.ffmpeg.org/) platform yet i didn't see any code of that.
Would appreciate any idea.
class ViewController: UIViewController {
var requestManager = RequestManager()
var socket: WebSocket?
var audioRecorder: AVAudioRecorder!
#IBOutlet weak var recordBtn: UIButton!
#IBOutlet weak var playBtn: UIButton!
var fileName: String = "audioFile.mp4"
var soundRecorder: AVAudioRecorder?
var soundPlayer: AVAudioPlayer?
var audioSession = AVAudioSession.sharedInstance()
override func viewDidLoad() {
super.viewDidLoad()
self.socket?.delegate = self
setUpRecorder()
playBtn.isEnabled = false
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
self.audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSession.Category.playAndRecord, mode: .measurement, options: .defaultToSpeaker)
try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
} catch {
print(error)
}
}
func getDocDirector() -> URL {
let path = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return path[0]
}
func setUpRecorder() {
let audioFileName = getDocDirector().appendingPathComponent(fileName)
let recordSettings: [String: Any] = [AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVEncoderAudioQualityKey: AVAudioQuality.max.rawValue,
AVNumberOfChannelsKey: 1,
AVEncoderBitRateKey: 16000,
AVSampleRateKey: 16000]
do {
soundRecorder = try AVAudioRecorder(url: audioFileName, settings: recordSettings)
soundRecorder?.delegate = self
soundRecorder?.prepareToRecord()
} catch {
print(error)
}
}
func setUpPlayer() {
let audioFileName = getDocDirector().appendingPathComponent(fileName)
do {
soundPlayer = try AVAudioPlayer(contentsOf: audioFileName)
soundPlayer?.delegate = self
soundPlayer?.prepareToPlay()
soundPlayer?.volume = 1.0
} catch {
print(error)
}
}
#IBAction func recordAction(_ sender: Any) {
if recordBtn.titleLabel?.text == "Record" {
soundRecorder?.record()
recordBtn.setTitle("Stop", for: .normal)
playBtn.isEnabled = false
} else {
soundRecorder?.stop()
recordBtn.setTitle("Record", for: .normal)
playBtn.isEnabled = false
}
}
#IBAction func playAction(_ sender: Any) {
if playBtn.titleLabel?.text == "Play" {
playBtn.setTitle("Stop", for: .normal)
recordBtn.isEnabled = false
setUpPlayer()
soundPlayer?.play()
} else {
playBtn.setTitle("Play", for: .normal)
recordBtn.isEnabled = false
}
}
func openSocket() {
getUrl(success: { [weak self] (url) in
self?.socket = WebSocket(url: URL(string: url)!)
self?.socket?.connect()
}) { (e) in
//
}
}
}

How to record an audio stream for save it in file / swift 4.2

I am creating a radio application for iPhone (coded in Swift 4.2) and I want to add a function allowing me to record and save in a file, the sound produced by my radio (read from an AVPlayer) when I push a button. Which code should I use?
The code is in Swift 4.2, with Xcode 10.1.
I search on the web : "How to record an audio stream swift 4.2", "How to record audio from AVPlayer swift 4.2", but I can't find an answer.
My code:
import UIKit
import AVFoundation
import MediaPlayer
class ViewControllerPlayer: UIViewController {
var URl = "http://link_of_audio_stream"
var player:AVPlayer?
var playerItem:AVPlayerItem?
var playerLayer:AVPlayerLayer?
override func viewDidLoad() {
super.viewDidLoad()
let url = URL(string: URl)
let playerItem1:AVPlayerItem = AVPlayerItem(url: url!)
player = AVPlayer(playerItem: playerItem1)
}
#IBAction func Play(_ sender: Any) {
player?.play()
}
#IBAction func Pause(_ sender: Any) {
player?.pause()
}
private var audioRecorder: AVAudioRecorder!
func startRecording() throws {
guard let newFileURL = createURLForNewRecord() else {
throw RecordingServiceError.canNotCreatePath
}
do {
var urlString = URL(string: URl)
urlString = newFileURL
audioRecorder = try AVAudioRecorder(url: newFileURL,
settings: [AVFormatIDKey:Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 8000,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.min.rawValue])
audioRecorder.delegate = self as? AVAudioRecorderDelegate
audioRecorder.prepareToRecord()
audioRecorder.record(forDuration: TimeConstants.recordDuration)
//error: Use of unresolved identifier 'TimeConstants'
} catch let error {
print(error)
}
}
func STOPREC1() throws {
audioRecorder.stop()
audioRecorder = nil
print("Recording finished successfully.")
}
enum RecordingServiceError: String, Error {
case canNotCreatePath = "Can not create path for new recording"
}
private func createURLForNewRecord() -> URL? {
guard let appGroupFolderUrl = FileManager.getAppFolderURL() else {
return nil
}
let date = String(describing: Date())
let fullFileName = "Enregistrement radio " + date + ".m4a"
let newRecordFileName = appGroupFolderUrl.appendingPathComponent(fullFileName)
return newRecordFileName
}
}
extension FileManager {
class func getAppFolderURL() -> URL? {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentsDirectory = paths[0]
return documentsDirectory
}
}
After multiple internet search, I found the solution.
I found this Swift Class somewhere on internet named « CachingPlayerItem.swift », it will allow the to record an online audio stream.
import Foundation
import AVFoundation
fileprivate extension URL {
func withScheme(_ scheme: String) -> URL? {
var components = URLComponents(url: self, resolvingAgainstBaseURL: false)
components?.scheme = scheme
return components?.url
}
}
#objc protocol CachingPlayerItemDelegate {
/// Is called when the media file is fully downloaded.
#objc optional func playerItem(_ playerItem: CachingPlayerItem, didFinishDownloadingData data: Data)
/// Is called every time a new portion of data is received.
#objc optional func playerItem(_ playerItem: CachingPlayerItem, didDownloadBytesSoFar bytesDownloaded: Int, outOf bytesExpected: Int)
/// Is called after initial prebuffering is finished, means
/// we are ready to play.
#objc optional func playerItemReadyToPlay(_ playerItem: CachingPlayerItem)
/// Is called when the data being downloaded did not arrive in time to
/// continue playback.
#objc optional func playerItemPlaybackStalled(_ playerItem: CachingPlayerItem)
/// Is called on downloading error.
#objc optional func playerItem(_ playerItem: CachingPlayerItem, downloadingFailedWith error: Error)
}
open class CachingPlayerItem: AVPlayerItem {
class ResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSessionDelegate, URLSessionDataDelegate, URLSessionTaskDelegate {
var playingFromData = false
var mimeType: String? // is required when playing from Data
var session: URLSession?
var mediaData: Data?
var response: URLResponse?
var pendingRequests = Set<AVAssetResourceLoadingRequest>()
weak var owner: CachingPlayerItem?
var fileURL: URL!
var outputStream: OutputStream?
func resourceLoader(_ resourceLoader: AVAssetResourceLoader, shouldWaitForLoadingOfRequestedResource loadingRequest: AVAssetResourceLoadingRequest) -> Bool {
if playingFromData {
// Nothing to load.
} else if session == nil {
// If we're playing from a url, we need to download the file.
// We start loading the file on first request only.
guard let initialUrl = owner?.url else {
fatalError("internal inconsistency")
}
startDataRequest(with: initialUrl)
}
pendingRequests.insert(loadingRequest)
processPendingRequests()
return true
}
func startDataRequest(with url: URL) {
var recordingName = "record.mp3"
if let recording = owner?.recordingName{
recordingName = recording
}
fileURL = try! FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: false)
.appendingPathComponent(recordingName)
let configuration = URLSessionConfiguration.default
configuration.requestCachePolicy = .reloadIgnoringLocalAndRemoteCacheData
session = URLSession(configuration: configuration, delegate: self, delegateQueue: nil)
session?.dataTask(with: url).resume()
outputStream = OutputStream(url: fileURL, append: true)
outputStream?.schedule(in: RunLoop.current, forMode: RunLoop.Mode.default)
outputStream?.open()
}
func resourceLoader(_ resourceLoader: AVAssetResourceLoader, didCancel loadingRequest: AVAssetResourceLoadingRequest) {
pendingRequests.remove(loadingRequest)
}
// MARK: URLSession delegate
func urlSession(_ session: URLSession, dataTask: URLSessionDataTask, didReceive data: Data) {
let bytesWritten = data.withUnsafeBytes{outputStream?.write($0, maxLength: data.count)}
}
func urlSession(_ session: URLSession, dataTask: URLSessionDataTask, didReceive response: URLResponse, completionHandler: #escaping (URLSession.ResponseDisposition) -> Void) {
completionHandler(Foundation.URLSession.ResponseDisposition.allow)
mediaData = Data()
self.response = response
processPendingRequests()
}
func urlSession(_ session: URLSession, task: URLSessionTask, didCompleteWithError error: Error?) {
if let errorUnwrapped = error {
owner?.delegate?.playerItem?(owner!, downloadingFailedWith: errorUnwrapped)
return
}
processPendingRequests()
owner?.delegate?.playerItem?(owner!, didFinishDownloadingData: mediaData!)
}
// MARK: -
func processPendingRequests() {
// get all fullfilled requests
let requestsFulfilled = Set<AVAssetResourceLoadingRequest>(pendingRequests.compactMap {
self.fillInContentInformationRequest($0.contentInformationRequest)
if self.haveEnoughDataToFulfillRequest($0.dataRequest!) {
$0.finishLoading()
return $0
}
return nil
})
// remove fulfilled requests from pending requests
_ = requestsFulfilled.map { self.pendingRequests.remove($0) }
}
func fillInContentInformationRequest(_ contentInformationRequest: AVAssetResourceLoadingContentInformationRequest?) {
if playingFromData {
contentInformationRequest?.contentType = self.mimeType
contentInformationRequest?.contentLength = Int64(mediaData!.count)
contentInformationRequest?.isByteRangeAccessSupported = true
return
}
guard let responseUnwrapped = response else {
// have no response from the server yet
return
}
contentInformationRequest?.contentType = responseUnwrapped.mimeType
contentInformationRequest?.contentLength = responseUnwrapped.expectedContentLength
contentInformationRequest?.isByteRangeAccessSupported = true
}
func haveEnoughDataToFulfillRequest(_ dataRequest: AVAssetResourceLoadingDataRequest) -> Bool {
let requestedOffset = Int(dataRequest.requestedOffset)
let requestedLength = dataRequest.requestedLength
let currentOffset = Int(dataRequest.currentOffset)
guard let songDataUnwrapped = mediaData,
songDataUnwrapped.count > currentOffset else {
return false
}
let bytesToRespond = min(songDataUnwrapped.count - currentOffset, requestedLength)
let dataToRespond = songDataUnwrapped.subdata(in: Range(uncheckedBounds: (currentOffset, currentOffset + bytesToRespond)))
dataRequest.respond(with: dataToRespond)
return songDataUnwrapped.count >= requestedLength + requestedOffset
}
deinit {
session?.invalidateAndCancel()
}
}
fileprivate let resourceLoaderDelegate = ResourceLoaderDelegate()
fileprivate let url: URL
fileprivate let initialScheme: String?
fileprivate var customFileExtension: String?
weak var delegate: CachingPlayerItemDelegate?
func stopDownloading(){
resourceLoaderDelegate.session?.invalidateAndCancel()
}
open func download() {
if resourceLoaderDelegate.session == nil {
resourceLoaderDelegate.startDataRequest(with: url)
}
}
private let cachingPlayerItemScheme = "cachingPlayerItemScheme"
var recordingName = "record.mp3"
/// Is used for playing remote files.
convenience init(url: URL, recordingName: String) {
self.init(url: url, customFileExtension: nil, recordingName: recordingName)
}
/// Override/append custom file extension to URL path.
/// This is required for the player to work correctly with the intended file type.
init(url: URL, customFileExtension: String?, recordingName: String) {
guard let components = URLComponents(url: url, resolvingAgainstBaseURL: false),
let scheme = components.scheme,
var urlWithCustomScheme = url.withScheme(cachingPlayerItemScheme) else {
fatalError("Urls without a scheme are not supported")
}
self.recordingName = recordingName
self.url = url
self.initialScheme = scheme
if let ext = customFileExtension {
urlWithCustomScheme.deletePathExtension()
urlWithCustomScheme.appendPathExtension(ext)
self.customFileExtension = ext
}
let asset = AVURLAsset(url: urlWithCustomScheme)
asset.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main)
super.init(asset: asset, automaticallyLoadedAssetKeys: nil)
resourceLoaderDelegate.owner = self
addObserver(self, forKeyPath: "status", options: NSKeyValueObservingOptions.new, context: nil)
NotificationCenter.default.addObserver(self, selector: #selector(playbackStalledHandler), name:NSNotification.Name.AVPlayerItemPlaybackStalled, object: self)
}
/// Is used for playing from Data.
init(data: Data, mimeType: String, fileExtension: String) {
guard let fakeUrl = URL(string: cachingPlayerItemScheme + "://whatever/file.\(fileExtension)") else {
fatalError("internal inconsistency")
}
self.url = fakeUrl
self.initialScheme = nil
resourceLoaderDelegate.mediaData = data
resourceLoaderDelegate.playingFromData = true
resourceLoaderDelegate.mimeType = mimeType
let asset = AVURLAsset(url: fakeUrl)
asset.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main)
super.init(asset: asset, automaticallyLoadedAssetKeys: nil)
resourceLoaderDelegate.owner = self
addObserver(self, forKeyPath: "status", options: NSKeyValueObservingOptions.new, context: nil)
NotificationCenter.default.addObserver(self, selector: #selector(playbackStalledHandler), name:NSNotification.Name.AVPlayerItemPlaybackStalled, object: self)
}
// MARK: KVO
override open func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey : Any]?, context: UnsafeMutableRawPointer?) {
delegate?.playerItemReadyToPlay?(self)
}
// MARK: Notification hanlers
#objc func playbackStalledHandler() {
delegate?.playerItemPlaybackStalled?(self)
}
// MARK: -
override init(asset: AVAsset, automaticallyLoadedAssetKeys: [String]?) {
fatalError("not implemented")
}
deinit {
NotificationCenter.default.removeObserver(self)
removeObserver(self, forKeyPath: "status")
resourceLoaderDelegate.session?.invalidateAndCancel()
}
}
After, in you main swift file, you put this code to record:
let recordingName = "my_rec_name.mp3"
var playerItem: CachingPlayerItem!
let url_stream = URL(string: "http://my_url_stream_link")
playerItem = CachingPlayerItem(url: url_stream!, recordingName: recordingName ?? "record.mp3")
var player1 = AVPlayer(playerItem: playerItem)
player1.automaticallyWaitsToMinimizeStalling = false
And to stop the record, you use this code:
playerItem.stopDownloading()
recordingName = nil
playerItem = nil
Recordings will be saved on the directory of your app.
I had a really hard time with this one so I am posting an answer.
Remember to add these lines to your info.plist:
Here is my controller that records the voice input and returns it to a previous controller:
import Foundation
import UIKit
import Speech
class SpeechToTextViewController: UIViewController {
#IBOutlet weak var animationView: UIView!
#IBOutlet weak var circleView: UIView!
#IBOutlet weak var micImage: UIImageView!
#IBOutlet weak var listeningLabel: UILabel!
#IBOutlet weak var buttonStartView: UIView!
#IBOutlet weak var cancelRecordingButton: UIButton!
#IBOutlet weak var stopRecordingButton: UIButton!
#IBOutlet weak var startRecordingButton: UIButton!
private let audioEngine = AVAudioEngine()
private let speechRecognizer = SFSpeechRecognizer(locale: Locale.init(identifier:"en-US"))
private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest!
private var recognitionTask: SFSpeechRecognitionTask?
private var isRecording: Bool = false
var delegate: SpeechToTextViewDelegate?
override func viewDidLoad() {
super.viewDidLoad()
self.view.backgroundColor = UIColor(white: 1.0, alpha: 0.25)
self.stopRecordingButton.isHidden = true
self.listeningLabel.isHidden = true
}
#IBAction func startStopRecording(_ sender: Any) {
isRecording = !isRecording
if isRecording && !audioEngine.isRunning {
self.cancelRecordingButton.isHidden = true
self.startRecordingButton.isHidden = true
self.stopRecordingButton.isHidden = false
self.listeningLabel.isHidden = false
UIView.animate(withDuration: 1, animations: {}) { _ in
UIView.animate(withDuration: 1, delay: 0.25, options: [.autoreverse, .repeat], animations: {
self.circleView.transform = CGAffineTransform(scaleX: 1.5, y: 1.5)
})
}
do {
try recordSpeech()
} catch {
print(error)
}
} else {
self.listeningLabel.isHidden = true
stopRecording()
}
}
func recordSpeech() throws {
recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
let node = audioEngine.inputNode
let recordingFormat = node.outputFormat(forBus: 0)
node.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) {buffer, _ in
self.recognitionRequest.append(buffer)
}
audioEngine.prepare()
try audioEngine.start()
guard let myRecognizer = SFSpeechRecognizer() else {
print("myRecognizer is unable to be created")
return
}
if !myRecognizer.isAvailable
{
print("myRecognizer is not available")
return
}
recognitionTask = speechRecognizer?.recognitionTask(with: recognitionRequest, resultHandler: { result, error in
var isFinal = false
if let result = result
{
isFinal = result.isFinal
self.delegate?.appendMessage(result.bestTranscription.formattedString)
}
if error != nil || isFinal {
if error != nil {
print("error trying to capture speech to text")
print(error!)
}
self.stopRecording()
}
})
}
func stopRecording() {
if audioEngine.isRunning {
self.audioEngine.stop()
self.recognitionRequest.endAudio()
// Cancel the previous task if it's running
if let recognitionTask = recognitionTask {
recognitionTask.cancel()
self.recognitionTask = nil
}
}
delegate?.doneTalking()
self.dismiss(animated: true, completion: nil)
}
#IBAction func cancelRecording(_ sender: Any) {
delegate?.doneTalking()
self.dismiss(animated: true, completion: nil)
}
}
Use AVAudioRecorder for recording:
private var audioRecorder: AVAudioRecorder!
After you declared an audio recorder you can write a recording method:
func startRecording() throws {
guard let newFileURL = createURLForNewRecord() else {
throw RecordingServiceError.canNotCreatePath
}
do {
currentFileURL = newFileURL
audioRecorder = try AVAudioRecorder(url: newFileURL,
settings: [AVFormatIDKey:Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 8000,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.min.rawValue])
audioRecorder.delegate = self
audioRecorder.prepareToRecord()
audioRecorder.record(forDuration: TimeConstants.recordDuration)
} catch let error {
print(error)
}
}
And use some helper methods and structs:
enum RecordingServiceError: String, Error {
case canNotCreatePath = "Can not create path for new recording"
}
private func createURLForNewRecord() -> URL? {
guard let appGroupFolderUrl = FileManager.getAppFolderURL() else {
return nil
}
let fileNamePrefix = DateFormatter.stringFromDate(Date())
let fullFileName = "Record_" + fileNamePrefix + ".m4a"
let newRecordFileName = appGroupFolderUrl.appendingPathComponent(fullFileName)
return newRecordFileName
}
extension FileManager {
class func getAppFolderURL() -> URL? {
return FileManager.default.containerURL(forSecurityApplicationGroupIdentifier: "you app bundle")
}
}

CAGradientLayer animation not in sync (animation delay)

I was doing the weather app, everything was fine until I add the CAGradientLayer transition to UIView. It still worked but the transition took so long, around 10-15 seconds. I'm using the Apixu API for the weather. One thing to notice is the audio was perfectly playing, right away. I have checked the duration and there was nothing wrong with animation:
import UIKit
import AVFoundation
class ViewController: UIViewController, UISearchBarDelegate {
#IBOutlet weak var searchBar: UISearchBar!
#IBOutlet weak var cityLbl: UILabel!
#IBOutlet weak var conditionLbl: UILabel!
#IBOutlet weak var degreeLbl: UILabel!
#IBOutlet weak var imgView: UIImageView!
#IBOutlet weak var gradientView: UIView!
let gradientLayer = CAGradientLayer()
let gradientLayer2 = CAGradientLayer()
let gradientLayer3 = CAGradientLayer()
var degree: Int!
var condition: String!
var imgURL: String!
var city: String!
var exists: Bool = true
var audioPlayer1 = AVAudioPlayer()
var audioPlayer2 = AVAudioPlayer()
var audioPlayer3 = AVAudioPlayer()
var audioPlayerSunny = AVAudioPlayer()
var audioPlayerSnow = AVAudioPlayer()
var audioPlayerThunder = AVAudioPlayer()
// Audio part.
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
gradientLayer.frame = gradientView.bounds
gradientLayer.colors = [UIColor.blue.cgColor, UIColor.black.cgColor]
gradientView.layer.insertSublayer(gradientLayer, at: 0)
//gradient color view
gradientLayer2.colors = [UIColor.red.cgColor, UIColor.purple.cgColor]
//gradient color view 2
gradientLayer3.colors = [UIColor.orange.cgColor, UIColor.green.cgColor]
//gradient color view 3
searchBar.delegate = self
// Audio Code starts.
do {
audioPlayer1 = try AVAudioPlayer(contentsOf: URL.init(fileURLWithPath: Bundle.main.path(forResource: "ambiance", ofType: "mp3")!))
audioPlayer1.prepareToPlay()
audioPlayer2 = try AVAudioPlayer(contentsOf: URL.init(fileURLWithPath: Bundle.main.path(forResource: "guitar", ofType: "mp3")!))
audioPlayer2.prepareToPlay()
audioPlayer3 = try AVAudioPlayer(contentsOf: URL.init(fileURLWithPath: Bundle.main.path(forResource: "rain", ofType: "mp3")!))
audioPlayer3.prepareToPlay()
audioPlayerSunny = try AVAudioPlayer(contentsOf: URL.init(fileURLWithPath: Bundle.main.path(forResource: "sunny", ofType: "mp3")!))
audioPlayerSunny.prepareToPlay()
audioPlayerSnow = try AVAudioPlayer(contentsOf: URL.init(fileURLWithPath: Bundle.main.path(forResource: "snow", ofType: "mp3")!))
audioPlayerSnow.prepareToPlay()
audioPlayerThunder = try AVAudioPlayer(contentsOf: URL.init(fileURLWithPath: Bundle.main.path(forResource: "thunder", ofType: "mp3")!))
audioPlayerThunder.prepareToPlay()
}
catch {
print(error)
} // Audio Code ends.
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
searchBar.resignFirstResponder()
}
func searchBarSearchButtonClicked(_ searchBar: UISearchBar) {
searchBar.resignFirstResponder()
let urlRequest = URLRequest(url: URL(string: "http://api.apixu.com/v1/current.json?key=494942fa74444eabb6973325170510&q=\(searchBar.text!.replacingOccurrences(of: " ", with: "%20"))")!)
let task = URLSession.shared.dataTask(with: urlRequest) { (data, response, error) in
if error == nil {
do {
let json = try JSONSerialization.jsonObject(with: data!, options: .mutableContainers) as! [String : AnyObject]
if let current = json["current"] as? [String : AnyObject] {
if let temp = current["temp_c"] as? Float {
self.degree = Int(temp.rounded())
}
if let condition = current["condition"] as? [String : AnyObject] {
self.condition = condition["text"] as! String
let icon = condition["icon"] as! String
self.imgURL = "http:\(icon)"
}
}
if let location = json["location"] as? [String : AnyObject] {
self.city = location["name"] as! String
}
if let _ = json["error"] {
self.exists = false
}
if self.condition == "Sunny" || self.condition == "Clear" {
self.audioPlayerSunny.currentTime = 0
self.audioPlayerSunny.play()
self.audioPlayer1.stop()
self.audioPlayer3.stop()
self.audioPlayer2.stop()
self.audioPlayerSnow.stop()
self.audioPlayerThunder.stop()
// Audio Code.
let colorChangeAnimation = CABasicAnimation(keyPath: "colors")
colorChangeAnimation.duration = 1.0
colorChangeAnimation.toValue = self.gradientLayer3.colors
colorChangeAnimation.fillMode = kCAFillModeForwards
colorChangeAnimation.isRemovedOnCompletion = false
self.gradientLayer.add(colorChangeAnimation, forKey: "colorChange")
//effects
} else if self.condition.lowercased().range(of:"cloudy") != nil || self.condition.lowercased().range(of:"overcast") != nil {
self.audioPlayer1.currentTime = 0
self.audioPlayer1.play()
self.audioPlayer2.stop()
self.audioPlayer3.stop()
self.audioPlayerSnow.stop()
self.audioPlayerSunny.stop()
self.audioPlayerThunder.stop()
// Audio Code.
let colorChangeAnimation = CABasicAnimation(keyPath: "colors")
colorChangeAnimation.duration = 1.0
colorChangeAnimation.toValue = self.gradientLayer2.colors
colorChangeAnimation.fillMode = kCAFillModeForwards
colorChangeAnimation.isRemovedOnCompletion = false
self.gradientLayer.add(colorChangeAnimation, forKey: "colorChange")
//effects
} else if self.condition.lowercased().range(of:"snow") != nil || self.condition.lowercased().range(of:"snowy") != nil || self.condition.lowercased().range(of:"sleet") != nil
{
self.audioPlayerSnow.currentTime = 0
self.audioPlayerSnow.play()
self.audioPlayer2.stop()
self.audioPlayer3.stop()
self.audioPlayer1.stop()
self.audioPlayerSunny.stop()
self.audioPlayerThunder.stop()
// Audio Code.
} else if self.condition.lowercased().range(of:"rain") != nil || self.condition.lowercased().range(of:"rainy") != nil || self.condition.lowercased().range(of:"drizzle") != nil
{
self.audioPlayer3.currentTime = 0
self.audioPlayer3.play()
self.audioPlayer2.stop()
self.audioPlayer1.stop()
self.audioPlayerSnow.stop()
self.audioPlayerSunny.stop()
self.audioPlayerThunder.stop()
// Audio Code.
} else if self.condition.lowercased().range(of:"thunder") != nil || self.condition.lowercased().range(of:"thundery") != nil {
self.audioPlayerThunder.currentTime = 0
self.audioPlayerThunder.play()
self.audioPlayer2.stop()
self.audioPlayer3.stop()
self.audioPlayerSnow.stop()
self.audioPlayerSunny.stop()
self.audioPlayer1.stop()
// Audio Code.
} else {
self.audioPlayer2.currentTime = 0
self.audioPlayer2.play()
self.audioPlayer1.stop()
self.audioPlayer3.stop()
self.audioPlayerSnow.stop()
self.audioPlayerSunny.stop()
self.audioPlayerThunder.stop()
// Audio Code.
}
DispatchQueue.main.async {
if self.exists{
self.degreeLbl.isHidden = false
self.conditionLbl.isHidden = false
self.imgView.isHidden = false
self.degreeLbl.text = "\(self.degree.description)°"
self.cityLbl.text = self.city
self.conditionLbl.text = self.condition
self.imgView.downloadImage(from: self.imgURL!)
}
else {
self.degreeLbl.isHidden = true
self.conditionLbl.isHidden = true
self.imgView.isHidden = true
self.cityLbl.text = "No matching city found"
self.exists = true
}
}
} catch let jsonError {
print(jsonError.localizedDescription)
}
}
}
task.resume()
}
}
extension UIImageView {
func downloadImage(from url: String) {
let urlRequest = URLRequest(url: URL(string: url)!)
let task = URLSession.shared.dataTask(with: urlRequest) { (data, response, error) in
if error == nil {
DispatchQueue.main.async {
self.image = UIImage(data: data!)
}
}
}
task.resume()
}
}

How to automatically go to the next track when the one is currently playing ends?

I'm quite new to swift and I want to implement a piece of code that helps me reach the next song when the one that is currently playing ends.
I tried to copy the code inside my "#IBAction func nextAction" (which works fine):
#IBAction func nextAction(sender: AnyObject) {
self.nextTrack()
}
func nextTrack() {
if trackId == 0 || trackId < 4 {
if shuffle.on {
trackId = Int(arc4random_uniform(UInt32(library.count)))
}else {
trackId += 1
}
if let coverImage = library[trackId]["coverImage"]{
coverImageView.image = UIImage(named: "\(coverImage).jpg")
}
songTitleLabel.text = library[trackId]["title"]
artistLabel.text = library[trackId]["artist"]
audioPlayer.currentTime = 0
progressView.progress = 0
let path = NSBundle.mainBundle().pathForResource("\(trackId)", ofType: "mp3")
if let path = path {
let mp3URL = NSURL(fileURLWithPath: path)
do {
audioPlayer = try AVAudioPlayer(contentsOfURL: mp3URL)
audioPlayer.play()
NSTimer.scheduledTimerWithTimeInterval(1.0, target: self, selector: #selector(PlayerViewController.updateProgressView), userInfo: nil, repeats: true)
progressView.setProgress(Float(audioPlayer.currentTime/audioPlayer.duration), animated: false)
} catch let error as NSError {
print(error.localizedDescription)
}
}
}
}
And tried to put it inside an if condition like this (inside the viewDidLoad):
if audioPlayer.currentTime >= audioPlayer.duration {
self.nextTrack()
}
I don't have any errors but at runtime this method isn't working and the song ends without playing the next one.
To make the situation more clear here's my controller:
import UIKit
import AVFoundation
class PlayerViewController: UIViewController {
#IBOutlet weak var coverImageView: UIImageView!
#IBOutlet weak var progressView: UIProgressView!
#IBOutlet weak var songTitleLabel: UILabel!
#IBOutlet weak var artistLabel: UILabel!
#IBOutlet weak var shuffle: UISwitch!
var trackId: Int = 0
var library = MusicLibrary().library
var audioPlayer: AVAudioPlayer!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
if let coverImage = library[trackId]["coverImage"]{
coverImageView.image = UIImage(named: "\(coverImage).jpg")
}
songTitleLabel.text = library[trackId]["title"]
artistLabel.text = library[trackId]["artist"]
let path = NSBundle.mainBundle().pathForResource("\(trackId)", ofType: "mp3")
if let path = path {
let mp3URL = NSURL(fileURLWithPath: path)
do {
audioPlayer = try AVAudioPlayer(contentsOfURL: mp3URL)
audioPlayer.play()
NSTimer.scheduledTimerWithTimeInterval(1.0, target: self, selector: #selector(PlayerViewController.updateProgressView), userInfo: nil, repeats: true)
progressView.setProgress(Float(audioPlayer.currentTime/audioPlayer.duration), animated: false)
} catch let error as NSError {
print(error.localizedDescription)
}
}
}
override func viewWillDisappear(animated: Bool) {
audioPlayer.stop()
}
func updateProgressView(){
if audioPlayer.playing {
progressView.setProgress(Float(audioPlayer.currentTime/audioPlayer.duration), animated: true)
}
}
#IBAction func playAction(sender: AnyObject) {
if !audioPlayer.playing {
audioPlayer.play()
}
}
#IBAction func stopAction(sender: AnyObject) {
audioPlayer.stop()
audioPlayer.currentTime = 0
progressView.progress = 0
}
#IBAction func pauseAction(sender: AnyObject) {
audioPlayer.pause()
}
#IBAction func fastForwardAction(sender: AnyObject) {
var time: NSTimeInterval = audioPlayer.currentTime
time += 5.0
if time > audioPlayer.duration {
stopAction(self)
}else {
audioPlayer.currentTime = time
}
}
#IBAction func rewindAction(sender: AnyObject) {
var time: NSTimeInterval = audioPlayer.currentTime
time -= 5.0
if time < 0 {
stopAction(self)
}else {
audioPlayer.currentTime = time
}
}
#IBAction func previousAction(sender: AnyObject) {
if trackId != 0 || trackId > 0 {
if shuffle.on {
trackId = Int(arc4random_uniform(UInt32(library.count)))
}else {
trackId -= 1
}
if let coverImage = library[trackId]["coverImage"]{
coverImageView.image = UIImage(named: "\(coverImage).jpg")
}
songTitleLabel.text = library[trackId]["title"]
artistLabel.text = library[trackId]["artist"]
audioPlayer.currentTime = 0
progressView.progress = 0
let path = NSBundle.mainBundle().pathForResource("\(trackId)", ofType: "mp3")
if let path = path {
let mp3URL = NSURL(fileURLWithPath: path)
do {
audioPlayer = try AVAudioPlayer(contentsOfURL: mp3URL)
audioPlayer.play()
NSTimer.scheduledTimerWithTimeInterval(1.0, target: self, selector: #selector(PlayerViewController.updateProgressView), userInfo: nil, repeats: true)
progressView.setProgress(Float(audioPlayer.currentTime/audioPlayer.duration), animated: false)
} catch let error as NSError {
print(error.localizedDescription)
}
}
}
}
#IBAction func swipeDownAction(sender: AnyObject) {
self.close()
}
#IBAction func closeAction(sender: AnyObject) {
self.close()
}
#IBAction func nextAction(sender: AnyObject) {
self.nextTrack()
}
func close() {
self.dismissViewControllerAnimated(true, completion: nil)
}
func nextTrack() {
if trackId == 0 || trackId < 4 {
if shuffle.on {
trackId = Int(arc4random_uniform(UInt32(library.count)))
}else {
trackId += 1
}
if let coverImage = library[trackId]["coverImage"]{
coverImageView.image = UIImage(named: "\(coverImage).jpg")
}
songTitleLabel.text = library[trackId]["title"]
artistLabel.text = library[trackId]["artist"]
audioPlayer.currentTime = 0
progressView.progress = 0
let path = NSBundle.mainBundle().pathForResource("\(trackId)", ofType: "mp3")
if let path = path {
let mp3URL = NSURL(fileURLWithPath: path)
do {
audioPlayer = try AVAudioPlayer(contentsOfURL: mp3URL)
audioPlayer.play()
NSTimer.scheduledTimerWithTimeInterval(1.0, target: self, selector: #selector(PlayerViewController.updateProgressView), userInfo: nil, repeats: true)
progressView.setProgress(Float(audioPlayer.currentTime/audioPlayer.duration), animated: false)
} catch let error as NSError {
print(error.localizedDescription)
}
}
}
}
}
All the code is written in Xcode 7.3.1
You should use AVAudioPlayer delegate method audioPlayerDidFinishPlaying(player: AVAudioPlayer, successfully flag: Bool) which is called when the audio player finishes playing a sound.
Make your PlayerViewController confirm to AVAudioPlayerDelegate protocol like this:
class PlayerViewController: UIViewController, AVAudioPlayerDelegate {
Make sure to set self as the delegate of the audioPlayer you create, to do that in your viewDidLoad,previousAction and nextTrack method you need to add
audioPlayer.delegate = self
after this line:
audioPlayer = try AVAudioPlayer(contentsOfURL: mp3URL)
Now you can use the delegate method to know when the audio is finished playing and go to the next track, just add this inside your class:
func audioPlayerDidFinishPlaying(player: AVAudioPlayer, successfully flag: Bool) {
if flag {
self.nextTrack()
} else {
// did not finish successfully
}
}

Resources