So I am having this small issue, where the seek bar is not being moved forward when the user taps on the fast forward or rewind buttons on the native iOS controller.
See video
https://youtu.be/CJP131GpSYI
The section of code that makes it possible is printed below.
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
let changePlaybackPositionCommand = commandCenter.changePlaybackPositionCommand
changePlaybackPositionCommand.isEnabled = true
changePlaybackPositionCommand.addTarget { event in
let seconds = (event as? MPChangePlaybackPositionCommandEvent)?.positionTime ?? 0
let time = CMTime(seconds: seconds, preferredTimescale: 1)
self.player?.seek(to: time)
return .success
}
let skipBackwardCommand = commandCenter.skipBackwardCommand
if(MusicPlayer.mediatype == "podcast")
{
skipBackwardCommand.isEnabled = true
}
else{
skipBackwardCommand.isEnabled = false
}
skipBackwardCommand.preferredIntervals = [NSNumber(value: 10)]
skipBackwardCommand.addTarget(handler: skipBackward)
let skipForwardCommand = commandCenter.skipForwardCommand
if(MusicPlayer.mediatype == "podcast")
{
skipForwardCommand.isEnabled = true
}
else{
skipForwardCommand.isEnabled = false
}
skipForwardCommand.addTarget(handler: skipForward)
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player?.rate == 0.0 {
self.player?.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player?.rate == 1.0 {
self.player?.pause()
return .success
}
return .commandFailed
}
func skipBackward(_ event: MPRemoteCommandEvent) -> MPRemoteCommandHandlerStatus {
//self.player?.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: -30), preferredTimescale: 1))
// print(CMTimeGetSeconds((self.player?.currentTime())!)) //Output: 42
//print(event.interval)
self.player!.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: -30), preferredTimescale: 1))
return .success
}
func skipForward(_ event: MPRemoteCommandEvent) -> MPRemoteCommandHandlerStatus {
self.player?.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: 30), preferredTimescale: 1))
return .success
}
}
Full Media Player code:
import SwiftUI
//import Foundation
import AVFoundation
import MediaPlayer
import AVKit
struct NowPlayingData: Codable , Identifiable {
var id,artist,song,cover:String
private enum CodingKeys : String, CodingKey {
case id = "_id", artist , song , cover
}
}
class MusicPlayer {
static let shared = MusicPlayer()
static var mediatype = ""
static var artist = ""
static var song = ""
static var cover = ""
static var urls = ""
static var dur = 0
static var uuid = UIDevice.current.identifierForVendor?.uuidString
var player: AVPlayer?
let playerViewController = AVPlayerViewController()
func gettype(completion: #escaping (String) -> Void){
completion(MusicPlayer.mediatype)
}
func getPodCastPlayerNP(completion: #escaping (NowPlayingData) -> ()) {
// Timer.scheduledTimer(withTimeInterval: 15, repeats: true) { (timer) in
let songdata = "{\"_id\": \"1\",\"song\": \"\(MusicPlayer.song)\",\"artist\": \"\(MusicPlayer.artist)\", \"cover\": \"\(MusicPlayer.cover)\"}"
let data: Foundation.Data = songdata.data(using: .utf8)!
let podcast = try! JSONDecoder().decode(NowPlayingData.self, from: data)
//print(data!)
// let episode = podcast.programs
DispatchQueue.main.async{
// The array is stored under programs now
//print(podcast)
completion(podcast)
}
// }
}
func startBackgroundMusic(url: String, type:String) {
MusicPlayer.mediatype = String(type)
//let urlString = "http://stream.radiomedia.com.au:8003/stream"
let urlString = url+"?uuid="+MusicPlayer.uuid!
print(urlString)
guard let url = URL.init(string: urlString) else { return }
let playerItem = AVPlayerItem.init(url: url)
player = AVPlayer.init(playerItem: playerItem)
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.duckOthers, .defaultToSpeaker, .mixWithOthers, .allowAirPlay])
print("Playback OK")
// let defaults = UserDefaults.standard
// defaults.set("1", forKey: defaultsKeys.musicplayer_connected)
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
// let defaults = UserDefaults.standard
// defaults.set("0", forKey: defaultsKeys.musicplayer_connected)
print(error)
}
#if targetEnvironment(simulator)
self.playerViewController.player = player
self.playerViewController.player?.play()
print("SIMULATOR")
#else
self.setupRemoteTransportControls()
player?.play()
#endif
}
func startBackgroundMusicTwo() {
let urlString = "http://stream.radiomedia.com.au:8003/stream"
//let urlString = url
guard let url = URL.init(string: urlString) else { return }
let playerItem = AVPlayerItem.init(url: url)
player = AVPlayer.init(playerItem: playerItem)
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.duckOthers, .defaultToSpeaker, .mixWithOthers, .allowAirPlay])
print("Playback OK")
// let defaults = UserDefaults.standard
// defaults.set("1", forKey: defaultsKeys.musicplayer_connected)
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
// let defaults = UserDefaults.standard
// defaults.set("0", forKey: defaultsKeys.musicplayer_connected)
print(error)
}
#if targetEnvironment(simulator)
self.playerViewController.player = player
self.playerViewController.player?.play()
print("SIMULATOR")
#else
self.setupRemoteTransportControls()
player?.play()
#endif
}
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
let changePlaybackPositionCommand = commandCenter.changePlaybackPositionCommand
changePlaybackPositionCommand.isEnabled = true
changePlaybackPositionCommand.addTarget { event in
let seconds = (event as? MPChangePlaybackPositionCommandEvent)?.positionTime ?? 0
let time = CMTime(seconds: seconds, preferredTimescale: 1)
self.player?.seek(to: time)
return .success
}
let skipBackwardCommand = commandCenter.skipBackwardCommand
if(MusicPlayer.mediatype == "podcast")
{
skipBackwardCommand.isEnabled = true
}
else{
skipBackwardCommand.isEnabled = false
}
skipBackwardCommand.preferredIntervals = [NSNumber(value: 10)]
skipBackwardCommand.addTarget(handler: skipBackward)
let skipForwardCommand = commandCenter.skipForwardCommand
if(MusicPlayer.mediatype == "podcast")
{
skipForwardCommand.isEnabled = true
}
else{
skipForwardCommand.isEnabled = false
}
skipForwardCommand.addTarget(handler: skipForward)
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player?.rate == 0.0 {
self.player?.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player?.rate == 1.0 {
self.player?.pause()
return .success
}
return .commandFailed
}
func skipBackward(_ event: MPRemoteCommandEvent) -> MPRemoteCommandHandlerStatus {
//self.player?.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: -30), preferredTimescale: 1))
// print(CMTimeGetSeconds((self.player?.currentTime())!)) //Output: 42
//print(event.interval)
self.player!.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: -30), preferredTimescale: 1))
return .success
}
func skipForward(_ event: MPRemoteCommandEvent) -> MPRemoteCommandHandlerStatus {
self.player?.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: 30), preferredTimescale: 1))
return .success
}
}
func nowplaying(with artwork: MPMediaItemArtwork, artist: String, song: String, duration: Int){
if(duration == 0){
MPNowPlayingInfoCenter.default().nowPlayingInfo = [
MPMediaItemPropertyTitle:song,
MPMediaItemPropertyArtist:artist,
MPMediaItemPropertyArtwork: artwork,
MPNowPlayingInfoPropertyIsLiveStream: true
]
}
else{
MPNowPlayingInfoCenter.default().nowPlayingInfo = [
MPMediaItemPropertyTitle:song,
MPMediaItemPropertyArtist:artist,
MPMediaItemPropertyArtwork: artwork,
MPNowPlayingInfoPropertyIsLiveStream: false,
MPMediaItemPropertyPlaybackDuration: duration,
MPNowPlayingInfoPropertyPlaybackRate: 1.0,
MPNowPlayingInfoPropertyElapsedPlaybackTime: CMTimeGetSeconds((self.player?.currentTime())!)
]
}
// self.getArtBoard();
}
func setupNowPlayingInfo(with artwork: MPMediaItemArtwork) {
MPNowPlayingInfoCenter.default().nowPlayingInfo = [
MPMediaItemPropertyTitle: "Some name",
MPMediaItemPropertyArtist: "Some name",
MPMediaItemPropertyArtwork: artwork,
//MPMediaItemPropertyPlaybackDuration: CMTimeGetSeconds(currentItem.duration),
//MPNowPlayingInfoPropertyPlaybackRate: 1,
//MPNowPlayingInfoPropertyElapsedPlaybackTime: CMTimeGetSeconds(currentItem.currentTime())
]
}
func getData(from url: URL, completion: #escaping (UIImage?) -> Void) {
URLSession.shared.dataTask(with: url, completionHandler: {(data, response, error) in
if let data = data {
completion(UIImage(data:data))
}
})
.resume()
}
func getArtBoard(artist: String, song: String, cover: String, urls: String, duration: Int) {
// MusicPlayer.JN = "[{'artist': \(artist), 'song':\(song), 'cover': \(cover)}]"
MusicPlayer.artist = artist
MusicPlayer.song = song
MusicPlayer.cover = cover
MusicPlayer.urls = urls
guard let url = URL(string: cover) else { return }
getData(from: url) { [weak self] image in
guard let self = self,
let downloadedImage = image else {
return
}
let artwork = MPMediaItemArtwork.init(boundsSize: downloadedImage.size, requestHandler: { _ -> UIImage in
return downloadedImage
})
self.nowplaying(with: artwork, artist: artist, song: song, duration: duration)
}
}
func stopBackgroundMusic() {
guard let player = player else { return }
player.pause()
}
}
Full GitHub code:
https://github.com/redimongo/iOS-Radio-App
func skipBackward(_ event: MPRemoteCommandEvent) -> MPRemoteCommandHandlerStatus {
//self.player?.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: -30), preferredTimescale: 1))
// print(CMTimeGetSeconds((self.player?.currentTime())!)) //Output: 42
//print(event.interval)
//self.player!.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: -30), preferredTimescale: 1))
let currentTime = self.player?.currentTime()
self.player?.seek(to: CMTime(seconds: currentTime!.seconds - 30, preferredTimescale: 1), completionHandler: { isCompleted in
if isCompleted {
}
})
return .success
}
func skipForward(_ event: MPRemoteCommandEvent) -> MPRemoteCommandHandlerStatus {
//self.player?.seek(to: CMTimeMakeWithSeconds(CMTimeGetSeconds((self.player?.currentTime())!).advanced(by: 30), preferredTimescale: 1))
let currentTime = self.player?.currentTime()
self.player?.seek(to: CMTime(seconds: currentTime!.seconds + 30, preferredTimescale: 1), completionHandler: { isCompleted in
if isCompleted {
}
})
return .success
}
Change this 2 function with above code and let me know. what you face here.
Related
I've been trying to let user choose from video gallery and split video to chunks of 30 seconds!
When I select a video of 1 minute, it splits to two videos of 30 seconds each, and it works fine!
I tried another example of video 35 seconds, and it splits to two videos of 30 and 4 seconds each
But when I select a video with more than 1 minute, it splits the video to chunks of random
numbers such as 34 seconds or 40, etc.
I don't want that!
I want to split videos to 30 seconds each!
This's my viewController code so far
import UIKit
import AVKit
import MobileCoreServices
import Photos
class ViewController: UIViewController {
#IBOutlet weak var videoView: UIImageView!
#IBOutlet var imageView: UIImageView!
var player: AVPlayer!
var avpController = AVPlayerViewController()
var isVideoGettinGEdited = false
#IBAction func didTapButton(){
let picker = UIImagePickerController()
picker.delegate = self
picker.sourceType = .savedPhotosAlbum
picker.mediaTypes = ["public.movie"]
picker.allowsEditing = false
present(picker, animated: true, completion: nil)
}
}
extension ViewController: UIImagePickerControllerDelegate, UINavigationControllerDelegate {
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
if let image = info[UIImagePickerController.InfoKey(rawValue: "UIImagePickerControllerEditedImage")]as? UIImage{
imageView.image = image
} else {
guard
let mediaType = info[UIImagePickerController.InfoKey.mediaType] as? String,
mediaType == (kUTTypeMovie as String),
let url = info[UIImagePickerController.InfoKey.mediaURL] as? URL
else { return }
let videoAsset = AVURLAsset(url: url)
let videoDuration = videoAsset.duration
let durationTime = ceil( CMTimeGetSeconds(videoDuration))
var startTime = 0.0
var endTime = durationTime
var numberOfBreaks = Int((Double(durationTime)/30.0))
let isReminderTime = Double(durationTime.truncatingRemainder(dividingBy: 30.0))
if isReminderTime > 0 {
numberOfBreaks = numberOfBreaks + 1
}
if Double(durationTime) <= 30 {
self.cropVideo(atURL: url, startTime: startTime, endTime: endTime, fileName: "Output.mp4")
} else {
endTime = 30
while numberOfBreaks != 0 {
if !isVideoGettinGEdited {
print("Start time = \(startTime) and Endtime = \(endTime)")
self.cropVideo(atURL: url, startTime: startTime, endTime: endTime, fileName: "Output-\(numberOfBreaks).mp4")
numberOfBreaks = numberOfBreaks - 1
startTime = endTime
let timeLeft = Double(durationTime) - startTime
if timeLeft >= 30.0 {
endTime = endTime + 30.0
} else {
endTime = timeLeft
}
}
}
}
}
picker.dismiss(animated: true, completion: nil)
}
func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
picker.dismiss(animated: true, completion: nil)
}
}
extension ViewController {
func cropVideo(atURL url:URL, startTime:Double, endTime:Double, fileName:String) {
let asset = AVURLAsset(url: url)
let exportSession = AVAssetExportSession.init(asset: asset, presetName: AVAssetExportPresetHighestQuality)!
var outputURL = URL(string:NSSearchPathForDirectoriesInDomains(FileManager.SearchPathDirectory.documentDirectory, FileManager.SearchPathDomainMask.userDomainMask, true).last!)
let fileManager = FileManager.default
do {
try fileManager.createDirectory(at: outputURL!, withIntermediateDirectories: true, attributes: nil)
} catch {
print(error)
}
outputURL?.appendPathComponent("\(fileName).mp4")
// Remove existing file
do {
try fileManager.removeItem(atPath: outputURL!.absoluteString)
} catch {
print(error)
}
exportSession.outputURL = URL(fileURLWithPath: outputURL!.absoluteString)
exportSession.shouldOptimizeForNetworkUse = true
exportSession.outputFileType = AVFileType.mp4
let start = CMTimeMakeWithSeconds(startTime, preferredTimescale: 600) // you will modify time range here
let duration = CMTimeMakeWithSeconds(endTime, preferredTimescale: 600)
let range = CMTimeRangeMake(start: start, duration: duration)
exportSession.timeRange = range
exportSession.exportAsynchronously {
self.isVideoGettinGEdited = false
switch(exportSession.status) {
case .completed:
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: URL(fileURLWithPath: outputURL!.absoluteString))
}) { completed, error in
DispatchQueue.main.async {
self.view.isUserInteractionEnabled = true
if completed {
print("Video has been saved to your photos.")
} else {
if error != nil {
print("Failed to save video in photos \(error).")
}
}
}
}
break
case .failed:
print("failed with \(exportSession.error)")
break
case .cancelled: break
default:
print("default")
break
}
}
}
//MARK:- saveVideoFromURL
func saveVideoFromURL(_ videoURL:String) {
self.view.isUserInteractionEnabled = false
DispatchQueue.global(qos: .background).async {
if let url = URL(string: videoURL),
let urlData = NSData(contentsOf: url) {
let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0];
print("url path component = \(url.lastPathComponent)")
let filePath="\(documentsPath)/\(url.lastPathComponent)"
urlData.write(toFile: filePath, atomically: true)
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: URL(fileURLWithPath: filePath))
}) { completed, error in
DispatchQueue.main.async {
self.view.isUserInteractionEnabled = true
if completed {
print("Video has been saved to your photos.")
} else {
if error != nil {
print("Failed to save video.")
}
}
}
}
}
}
}
}
Thanks!
You main issue is when calculating the duration. Btw I would change the preferred scale to 1 as well:
change
let duration = CMTimeMakeWithSeconds(endTime, preferredTimescale: 1)
to
let duration = CMTimeMakeWithSeconds(endTime-startTime, preferredTimescale: 1)
I have also done some other changes to your code as following:
extension ViewController: UIImagePickerControllerDelegate, UINavigationControllerDelegate {
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
if let image = info[.editedImage] as? UIImage {
imageView.image = image
} else {
guard
let mediaType = info[.mediaType] as? String,
mediaType == "public.movie",
let url = info[.mediaURL] as? URL
else { return }
let videoAsset = AVURLAsset(url: url)
let videoDuration = videoAsset.duration
let durationTime = ceil(videoDuration.seconds)
print("durationTime:" , durationTime)
struct Duration {
let start: Double
let end: Double
}
let durations: [Duration]
if durationTime < 30 {
durations = [Duration(start: 0, end: durationTime)]
} else {
durations = (0...Int(durationTime)/30).compactMap {
if Double($0*30) == min(Double($0*30)+30, durationTime) {
return nil
}
return Duration(
start: Double($0*30),
end: min(Double($0*30)+30, durationTime)
)
}
}
for index in durations.indices {
let startTime = durations[index].start
let endTime = durations[index].end
print("Start time = \(startTime) and Endtime = \(endTime)")
saveVideo(
at: url,
startTime: startTime,
endTime: endTime,
fileName: "Output-\(index)"
)
}
}
picker.dismiss(animated: true, completion: nil)
}
func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
picker.dismiss(animated: true, completion: nil)
}
}
extension ViewController {
func saveVideo(
at url: URL,
startTime: Double,
endTime:Double,
fileName: String
) {
let asset = AVURLAsset(url: url)
let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality)!
let outputURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
.appendingPathComponent(fileName)
.appendingPathExtension("mp4")
// Remove existing file
if FileManager.default.fileExists(atPath: outputURL.path) {
do {
try FileManager.default.removeItem(at: outputURL)
} catch {
print(error)
}
}
exportSession.outputURL = outputURL
exportSession.shouldOptimizeForNetworkUse = true
exportSession.outputFileType = .mp4
let start = CMTimeMakeWithSeconds(startTime, preferredTimescale: 1)
let duration = CMTimeMakeWithSeconds(endTime-startTime, preferredTimescale: 1)
let range = CMTimeRangeMake(start: start, duration: duration)
print("Will Render \(fileName) from \(start.seconds) to \(duration)")
exportSession.timeRange = range
exportSession.exportAsynchronously {
print("Did Render \(fileName) from \(start.seconds) to \(duration)")
self.isVideoGettinGEdited = false
switch exportSession.status {
case .completed:
self.checkDuration(for: fileName, at: outputURL)
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL)
}) { completed, error in
if let error = error {
print("Failed to save video in photos", error)
return
}
DispatchQueue.main.async {
self.view.isUserInteractionEnabled = true
if completed {
print("Video has been saved to your photos.")
} else {
print("Video saving has NOT been completed")
}
}
}
break
case .failed:
print("failed with:", exportSession.error ?? "no error")
break
case .cancelled: break
default: break
}
}
}
}
Sample Project
I'm playing audio from a JSON source that I built to feed the audio URL and it's audio information to my app (Such as title, description, and cover art URL). The audio is a radio station feed. Audio works, play/stop controls work, cover art works, and background audio works. I'm stumped on adding the controls to Control Center to play/stop the audio while outside the app. I've taken a look at Apple's Documentation on this and it's very straight forward. I've also enabled the build setting to allow for background fetch, background audio, and Bluetooth. But it doesn't seem to work on my iPhone attached to Xcode via USB-C (I'm assuming iOS Simulator doesn't support Control Center). Below is my working code. Any thoughts on how to get this working? Do I need to pass on the audio to setupRemoteTransportControls()? I'm assuming though Title and Cover Art will need to be passed from data source, but overall the Control Center will not recognize that the app is playing audio.
ViewController.swift
import UIKit
import MediaPlayer
import AVFoundation
import Foundation
class ViewController: UIViewController {
var player: AVPlayer!
var playerItem: AVPlayerItem!
var audioCheck: Timer?
var timer: Timer?
var passText: String? = "Test"
#IBOutlet weak var trackTitle: UILabel!
#IBOutlet weak var togglePlay: UIButton!
#IBOutlet weak var coverPhoto: UIImageView!
func loadAudio() {
let audioURL = URL.init(string: "AUDIO_URL_GOES_HERE")
player = AVPlayer.init(url: audioURL!)
}
let minutes = 60
func playAudio() {
let audioURL = URL.init(string: "AUDIO_URL_GOES_HERE")
player = AVPlayer.init(url: audioURL!)
player.play()
}
#IBAction func OpenPlayer(_ sender: Any) {
performSegue(withIdentifier: "PlayerSegue", sender: self)
}
override func viewDidLoad() {
super.viewDidLoad()
loadAudio()
// Audio in the background
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSession.Category.playback)
} catch {
print("Error: Audio is not paying in the background.")
}
// Timer checking if other audio sources are running
audioCheck = Timer.scheduledTimer(timeInterval: 0, target: self, selector: #selector(timedAudioCheck), userInfo: nil, repeats: true)
struct currentTrack: Codable {
let title: String
let artwork_url: String
}
struct getTrack: Codable {
// let name: String
// let status: String
let current_track: currentTrack
}
let jsonURL = URL(string: "JSON_URL_GOES_HERE")!
URLSession.shared.dataTask(with: jsonURL) {data, _, _ in
if let data = data {
let trackData = try? JSONDecoder().decode([getTrack].self, from: data)
// print(users)
// print(trackData![0].current_track.title)
// print(trackData![0].current_track.artwork_url)
DispatchQueue.main.async {
self.trackTitle.text = trackData![0].current_track.title
let coverPhotoURL = trackData![0].current_track.artwork_url
if let coverPhotoConverted = URL(string: coverPhotoURL) {
do {
let coverPhotoData = try Data(contentsOf: coverPhotoConverted)
self.coverPhoto.image = UIImage(data: coverPhotoData)
} catch {
}
}
}
}
}.resume()
_ = Timer.scheduledTimer(withTimeInterval: 10.0, repeats: true) { timer in
// print("Data updated!")
let jsonURL = URL(string: "JSON_URL_GOES_HERE")!
URLSession.shared.dataTask(with: jsonURL) {data, _, _ in
if let data = data {
let trackData = try? JSONDecoder().decode([getTrack].self, from: data)
// print(users)
// print(trackData![0].current_track.title)
// print(trackData![0].current_track.artwork_url)
DispatchQueue.main.async {
self.trackTitle.text = trackData![0].current_track.title
let coverPhotoURL = trackData![0].current_track.artwork_url
if let coverPhotoConverted = URL(string: coverPhotoURL) {
do {
let coverPhotoData = try Data(contentsOf: coverPhotoConverted)
self.coverPhoto.image = UIImage(data: coverPhotoData)
} catch {
}
}
}
}
}.resume()
}
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player.rate == 0.0 {
self.player.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player.rate == 1.0 {
self.player.pause()
return .success
}
return .commandFailed
}
}
// Apple's Documentation on playing audio in control center
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player.rate == 0.0 {
self.player.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player.rate == 1.0 {
self.player.pause()
return .success
}
return .commandFailed
}
}
func setupNowPlaying() {
// Define Now Playing Info
var nowPlayingInfo = [String : Any]()
nowPlayingInfo[MPMediaItemPropertyTitle] = "My Movie"
if let image = UIImage(named: "lockscreen") {
nowPlayingInfo[MPMediaItemPropertyArtwork] =
MPMediaItemArtwork(boundsSize: image.size) { size in
return image
}
}
nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = playerItem.currentTime().seconds
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = playerItem.asset.duration.seconds
nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = player.rate
// Set the metadata
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
}
// Along with timer, checks if other audio sources are playing and resets the audio in the app
#objc func timedAudioCheck() {
if (AVAudioSession.sharedInstance().secondaryAudioShouldBeSilencedHint) {
togglePlay.setTitle("Play", for: .normal)
} else if (player.rate == 0) {
togglePlay.setTitle("Play", for: .normal)
} else {
togglePlay.setTitle("Puase", for: .normal)
}
}
#IBAction func togglePlay(_ sender: UIButton) {
if player.rate == 0 {
// Plays the audio stream
sender.setTitle("Pause", for: .normal)
playAudio()
} else {
sender.setTitle("Play", for: .normal)
player.pause()
}
}
}
Apple's suggestion to add Control Center Support
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player.rate == 0.0 {
self.player.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player.rate == 1.0 {
self.player.pause()
return .success
}
return .commandFailed
}
}
func setupNowPlaying() {
// Define Now Playing Info
var nowPlayingInfo = [String : Any]()
nowPlayingInfo[MPMediaItemPropertyTitle] = "My Movie"
if let image = UIImage(named: "lockscreen") {
nowPlayingInfo[MPMediaItemPropertyArtwork] =
MPMediaItemArtwork(boundsSize: image.size) { size in
return image
}
}
nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = playerItem.currentTime().seconds
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = playerItem.asset.duration.seconds
nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = player.rate
// Set the metadata
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
So i am using the following .swift file to get notification showing the media player controls on the lock screen and notification screen. The following code works fine on iOS but not on iPad.
I have uploaded the full code to Github
https://github.com/redimongo/DRN1
The part in question is what do I need to add for the iPad to get media control?
//
// MusicPlayer.swift
// DRN1
//
// Created by Russell Harrower on 25/11/19.
// Copyright © 2019 Russell Harrower. All rights reserved.
//
import Foundation
import AVFoundation
import MediaPlayer
import Kingfisher
class MusicPlayer {
static let shared = MusicPlayer()
var player: AVPlayer?
func startBackgroundMusic() {
self.setupRemoteTransportControls()
let urlString = "http://stream.radiomedia.com.au:8003/stream"
guard let url = URL.init(string: urlString)
else {
return
}
let playerItem = AVPlayerItem.init(url: url)
player = AVPlayer.init(playerItem: playerItem)
player?.play()
do {
// try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.mixWithOthers, .allowAirPlay])
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.duckOthers, .defaultToSpeaker, .mixWithOthers, .allowAirPlay])
print("Playback OK")
let defaults = UserDefaults.standard
defaults.set("1", forKey: defaultsKeys.musicplayer_connected)
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
let defaults = UserDefaults.standard
defaults.set("0", forKey: defaultsKeys.musicplayer_connected)
print(error)
}
}
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player?.rate == 0.0 {
self.player?.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player?.rate == 1.0 {
self.player?.pause()
return .success
}
return .commandFailed
}
// self.nowplaying(artist: "Anna", song: "test")
}
func nowplaying(with artwork: MPMediaItemArtwork, artist: String, song: String){
MPNowPlayingInfoCenter.default().nowPlayingInfo = [
MPMediaItemPropertyTitle:song,
MPMediaItemPropertyArtist:artist,
MPMediaItemPropertyArtwork: artwork,
MPNowPlayingInfoPropertyIsLiveStream: true
]
// self.getArtBoard();
}
func setupNowPlayingInfo(with artwork: MPMediaItemArtwork) {
MPNowPlayingInfoCenter.default().nowPlayingInfo = [
// MPMediaItemPropertyTitle: "Some name",
// MPMediaItemPropertyArtist: "Some name",
MPMediaItemPropertyArtwork: artwork,
//MPMediaItemPropertyPlaybackDuration: CMTimeGetSeconds(currentItem.duration),
//MPNowPlayingInfoPropertyPlaybackRate: 1,
//MPNowPlayingInfoPropertyElapsedPlaybackTime: CMTimeGetSeconds(currentItem.currentTime())
]
}
func getData(from url: URL, completion: #escaping (UIImage?) -> Void) {
URLSession.shared.dataTask(with: url, completionHandler: {(data, response, error) in
if let data = data {
completion(UIImage(data:data))
}
})
.resume()
}
func getArtBoard(artist: String, song: String, cover: String) {
guard let url = URL(string: cover) else { return }
getData(from: url) { [weak self] image in
guard let self = self,
let downloadedImage = image else {
return
}
let artwork = MPMediaItemArtwork.init(boundsSize: downloadedImage.size, requestHandler: { _ -> UIImage in
return downloadedImage
})
self.nowplaying(with: artwork, artist: artist, song: song)
}
}
func stopBackgroundMusic() {
guard let player = player else { return }
player.pause()
}
}
I have added with no success
override func viewDidLoad() {
UIApplication.shared.beginReceivingRemoteControlEvents()
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
} catch {
print(error)
}
}
iOS simulators running iOS11+ (including iPadOS) will not show remote control center unless if it’s the default controls created by AVPlayerController. You will need a real device to test MPRemoteCommandCenter or install iOS 10 and lower to run it on simulator.
override func viewDidLoad() {
super.viewDidLoad()
// This is an alternative
let avPlayerViewController = AVPlayerViewController()
avPlayerViewController.player = MusicPlayer.shared.player
avPlayerViewController.player?.play()
#if targetEnvironment(simulator)
/*
if you don’t set up remote controls, the AVPlayer from
AVPlayerController will immediately create default remote controls
otherwise your simulator will not show the controls.
*/
#else
MusicPlayer.shared.setupRemoteTransportControls()
// more custom controls set up
#endif
}
Also there is also no need to call UIApplication.shared.beginReceivingRemoteControlEvents() on devices running iOS 7 and later.
**EDIT : **
—— In your MusicPlayer Class —
import Foundation
import AVFoundation
import MediaPlayer
import AVKit
class MusicPlayer {
static let shared = MusicPlayer()
var player: AVPlayer?
let playerViewController = AVPlayerViewController()
func startBackgroundMusic() {
let urlString = "http://stream.radiomedia.com.au:8003/stream"
guard let url = URL.init(string: urlString) else { return }
let playerItem = AVPlayerItem.init(url: url)
player = AVPlayer.init(playerItem: playerItem)
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.duckOthers, .defaultToSpeaker, .mixWithOthers, .allowAirPlay])
print("Playback OK")
// let defaults = UserDefaults.standard
// defaults.set("1", forKey: defaultsKeys.musicplayer_connected)
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
// let defaults = UserDefaults.standard
// defaults.set("0", forKey: defaultsKeys.musicplayer_connected)
print(error)
}
#if targetEnvironment(simulator)
if playerViewController.player == nil {
playerViewController.player = player
}
playerViewController.player?.play()
print("SIMULATOR")
#else
self.setupRemoteTransportControls()
player?.play()
#endif
}
func setupRemoteTransportControls() {
// Get the shared MPRemoteCommandCenter
let commandCenter = MPRemoteCommandCenter.shared()
// Add handler for Play Command
commandCenter.playCommand.addTarget { [unowned self] event in
if self.player?.rate == 0.0 {
self.player?.play()
return .success
}
return .commandFailed
}
// Add handler for Pause Command
commandCenter.pauseCommand.addTarget { [unowned self] event in
if self.player?.rate == 1.0 {
self.player?.pause()
return .success
}
return .commandFailed
}
// self.nowplaying(artist: "Anna", song: "test")
}
func nowplaying(with artwork: MPMediaItemArtwork, artist: String, song: String){
MPNowPlayingInfoCenter.default().nowPlayingInfo = [
MPMediaItemPropertyTitle:song,
MPMediaItemPropertyArtist:artist,
MPMediaItemPropertyArtwork: artwork,
MPNowPlayingInfoPropertyIsLiveStream: true
]
// self.getArtBoard();
}
func setupNowPlayingInfo(with artwork: MPMediaItemArtwork) {
MPNowPlayingInfoCenter.default().nowPlayingInfo = [
// MPMediaItemPropertyTitle: "Some name",
// MPMediaItemPropertyArtist: "Some name",
MPMediaItemPropertyArtwork: artwork,
//MPMediaItemPropertyPlaybackDuration: CMTimeGetSeconds(currentItem.duration),
//MPNowPlayingInfoPropertyPlaybackRate: 1,
//MPNowPlayingInfoPropertyElapsedPlaybackTime: CMTimeGetSeconds(currentItem.currentTime())
]
}
func getData(from url: URL, completion: #escaping (UIImage?) -> Void) {
URLSession.shared.dataTask(with: url, completionHandler: {(data, response, error) in
if let data = data {
completion(UIImage(data:data))
}
})
.resume()
}
func getArtBoard(artist: String, song: String, cover: String) {
guard let url = URL(string: cover) else { return }
getData(from: url) { [weak self] image in
guard let self = self,
let downloadedImage = image else {
return
}
let artwork = MPMediaItemArtwork.init(boundsSize: downloadedImage.size, requestHandler: { _ -> UIImage in
return downloadedImage
})
self.nowplaying(with: artwork, artist: artist, song: song)
}
}
func stopBackgroundMusic() {
#if targetEnvironment(simulator)
if playerViewController.player != nil {
playerViewController.player?.pause()
}
#else
guard let player = player else { return }
player.pause()
#endif
}
}
Tested it using iPadOS Simulator (7th Generation)
Also, you can use the Media Player framework’s MPRemoteCommandCenter and MPNowPlayingInfoCenter classes with AVPlayer.
You need to invoke beginReceivingRemoteControlEvents()
Swift 3+
UIApplication.sharedApplication().beginReceivingRemoteControlEvents()
AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
If you would like to specify custom actions for the MPRemoteCommandCenter:
let commandCenter = MPRemoteCommandCenter.shared()
commandCenter.nextTrackCommand.isEnabled = true
commandCenter.nextTrackCommand.addTarget(self, action:#selector(nextTrackCommandSelector))
For more information refer Apple's official Documentation
I am playing an audio which can be pausing, resuming and stoped I have also a slider which can be use to change the play the audio current point and a label that show duration of the audio and I want to pause the timer when the user pause the audio and I read If I want that I can invalidate and nill the timer then start it again , but the issue with that it will replay the audio from the start, is there is way to start the timer at the last point it was paused?
func startTimer() {
if replayTimer == nil {
replayTimer = Timer.scheduledTimer(timeInterval: 0.1, target: self, selector: #selector(updateSlider), userInfo: nil, repeats: true)
}
}
#objc func updateSlider() {
progressBar.value = Float(audio.audio!.currentTime)
}
#IBAction func playReceiverVoicenote(_ sender: Any) {
if replayTimer == nil {
audioBtn.setImage(#imageLiteral(resourceName: "pause"), for: .normal)
audio.playAudio(filePath: filePath!)
startTimer()
receiverProgressBar.maximumValue = audio.getAudioDuration()
} else if audio.isAudioPlaying() {
audioBtn.setImage(#imageLiteral(resourceName: "playAudio"), for: .normal)
audio.pauseAudio()
replayTimer?.invalidate()
replayTimer = nil
} else {
audioBtn.setImage(#imageLiteral(resourceName: "pause"), for: .normal)
audio.replayAudio()
startTimer()
}
}
func playAudio(filePath:URL){
do {
audio = try AVAudioPlayer(contentsOf: filePath)
audio!.delegate = self
audio!.prepareToPlay()
audio!.volume = 1.0
audio!.play()
} catch {
print(error.localizedDescription)
}
}
func pauseAudio() {
audio!.pause()
}
func replayAudio() {
audio!.play()
}
func stopAudio() {
audio!.stop()
}
When the audio is going to pause save audio.currentTime in a variable and invalidate the timer.
When the audio is going to resume get the saved currentTime, call play(atTime:) passing the time interval and restart the timer
You can also use this Audio Manager in this way:
let player = AudioPlayer()
player.loadAudio(url: URL(string: "myAudioUrl.mp3")!, name: "", img: "")
And the action of the button to play and pause:
if player.isPlaying {
player.pauseAudio()
} else {
player.playAudio { isFinish, player, currentTimeInSec, remainingTimeInSec in
if isFinish {
// Audio finish to play
}
}
}
Who return in block closure:
isFinish: Bool // If the audio has finished playing
player: AVAudioPlayer // The player
currentTimeInSec: Int // The current time in seconds of the audio playing
remainingTimeInsec: Int // The remaining time
AudioPlayer:
import Foundation
import AVFoundation
import MediaPlayer
class AudioPlayer {
var audioPlayer: AVAudioPlayer?
var hasBeenPaused = false
var songName = ""
var songImage = ""
var timer: Timer?
var isPlaying: Bool {
return audioPlayer?.isPlaying ?? false
}
public func loadAudio(url: URL, name: String, img: String) {
songName = name
songImage = img
setupRemoteTransportControls()
do {
audioPlayer = try AVAudioPlayer(contentsOf: url)
audioPlayer?.prepareToPlay()
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSession.Category.playback, mode: .default)
} catch let sessionError {
print(sessionError)
}
} catch let songPlayerError {
print(songPlayerError)
}
}
public func playAudio(completion: ((_ isFinish: Bool, _ player: AVAudioPlayer, _ currentTimeInSec: Int, _ restTimeInSec: Int) -> ())? = nil) {
guard let audioPlayer = audioPlayer else { return }
audioPlayer.play()
setupNowPlaying()
if timer == nil {
timer = Timer.scheduledTimer(withTimeInterval: 1, repeats: true) { _ in
let currentTime = Int(audioPlayer.currentTime)
let remainingTime = Int(audioPlayer.duration) - Int(currentTime)
if remainingTime == 0 {
completion?(true, audioPlayer, currentTime, remainingTime)
if self.timer != nil {
self.timer!.invalidate()
self.timer = nil
}
} else {
completion?(false, audioPlayer, currentTime, remainingTime)
}
}
}
}
public func pauseAudio() {
guard let audioPlayer = audioPlayer else { return }
if audioPlayer.isPlaying {
audioPlayer.pause()
hasBeenPaused = true
} else {
hasBeenPaused = false
}
setupNowPlaying()
if timer != nil {
timer!.invalidate()
timer = nil
}
}
public func replayAudio() {
guard let audioPlayer = audioPlayer else { return }
if audioPlayer.isPlaying || hasBeenPaused {
audioPlayer.stop()
audioPlayer.currentTime = 0
audioPlayer.play()
} else {
audioPlayer.play()
}
setupNowPlaying()
}
public func stopAudio() {
guard let audioPlayer = audioPlayer else { return }
audioPlayer.stop()
setupNowPlaying()
if timer != nil {
timer!.invalidate()
timer = nil
}
}
func setupRemoteTransportControls() {
let commandCenter = MPRemoteCommandCenter.shared()
commandCenter.previousTrackCommand.isEnabled = false
commandCenter.nextTrackCommand.isEnabled = false
commandCenter.skipBackwardCommand.isEnabled = false
commandCenter.skipForwardCommand.isEnabled = false
commandCenter.playCommand.isEnabled = true
commandCenter.playCommand.addTarget { (event) -> MPRemoteCommandHandlerStatus in
//Update your button here for the play command
self.playAudio()
NotificationCenter.default.post(name: NSNotification.Name(rawValue: "updatedControlCenterAudio"), object: nil)
return .success
}
commandCenter.pauseCommand.isEnabled = true
commandCenter.pauseCommand.addTarget { (event) -> MPRemoteCommandHandlerStatus in
//Update your button here for the pause command
self.pauseAudio()
NotificationCenter.default.post(name: NSNotification.Name(rawValue: "updatedControlCenterAudio"), object: nil)
return .success
}
}
func setupNowPlaying() {
guard let audioPlayer = audioPlayer else { return }
var nowPlayingInfo = [String: Any]()
nowPlayingInfo[MPMediaItemPropertyTitle] = songName
if let image = UIImage(named: songImage) {
nowPlayingInfo[MPMediaItemPropertyArtwork] =
MPMediaItemArtwork(boundsSize: image.size) { size in
return image
}
}
nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = audioPlayer.currentTime
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = audioPlayer.duration
nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = audioPlayer.rate
// Set the metadata
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
}
I'm working on a project that needs to record video segments and then merge these segments in one video. I'm using AVFoundation framework to do so.
The problem is that when I merge the segments, there's always a black frame or no sound between the chunks. Also, sometimes the sound is not synchronised after merging.
I've tried many options to solve this problem but didn't find a good solution.
I've tried many solutions on Stack overflow but none of them worked.
I've also tried to use MKOVideoMerge but I still have the problem.
I've made a small View controller below that records video camera and create a new segment every 10 seconds. When the user taps on "stop", all the segments are merged and saved to camera roll :
If anyone have managed to merge two video segments without dropping frame or sound, help would be much appreciated :)
import UIKit
import AVFoundation
import Photos
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate {
private enum SessionSetupResult {
case success
case notAuthorized
case configurationFailed
}
#IBOutlet weak var btnStartStop: UIButton!
#IBOutlet weak var previewView: PreviewView!
private let session = AVCaptureSession()
private let sessionQueue = DispatchQueue(label: "session queue")
private let mergeQueue = DispatchQueue(label: "merge queue")
private var setupResult: SessionSetupResult = .success
private var videoDeviceInput: AVCaptureDeviceInput!
lazy private var movieBufferOutput = AVCaptureVideoDataOutput()
lazy private var audioBufferOutput = AVCaptureAudioDataOutput()
private var movieConnection: AVCaptureConnection!
private var audioConnection: AVCaptureConnection!
private var assetWriter: AVAssetWriter! = nil
private var assetWriterInput: AVAssetWriterInput! = nil
private var audioWriterInput: AVAssetWriterInput! = nil
private var chunkNumber = 0
private let chunkMaxDuration = 10.0
private var chunkStartTime: CMTime! = nil
private var chunkOutputURL: URL! = nil
private var stopRecording: Bool = false
#IBAction func startStop(_ sender: Any) {
stopRecording = true
btnStartStop.setTitle("recording", for: .normal)
}
override func viewDidLoad() {
super.viewDidLoad()
previewView.session = session
previewView.videoPreviewLayer.videoGravity = .resizeAspectFill
switch AVCaptureDevice.authorizationStatus(for: .video) {
case .authorized:
break
case .notDetermined:
sessionQueue.suspend()
AVCaptureDevice.requestAccess(for: .video, completionHandler: { granted in
if !granted {
self.setupResult = .notAuthorized
}
self.sessionQueue.resume()
})
default:
setupResult = .notAuthorized
}
sessionQueue.async {
self.configureSession()
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
self.cleanTempDirectory()
sessionQueue.async {
switch self.setupResult {
case .success:
break
case .notAuthorized:
DispatchQueue.main.async {
let changePrivacySetting = "Not authorized"
let message = NSLocalizedString(changePrivacySetting, comment: "Alert message when the user has denied access to the camera")
let alertController = UIAlertController(title: "Not authorized", message: message, preferredStyle: .alert)
alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"),
style: .cancel,
handler: nil))
alertController.addAction(UIAlertAction(title: NSLocalizedString("Settings", comment: "Alert button to open Settings"),
style: .`default`,
handler: { _ in
UIApplication.shared.open(URL(string: UIApplicationOpenSettingsURLString)!, options: [:], completionHandler: nil)
}))
self.present(alertController, animated: true, completion: nil)
}
case .configurationFailed:
DispatchQueue.main.async {
let alertMsg = "Error"
let message = NSLocalizedString("Error", comment: alertMsg)
let alertController = UIAlertController(title: "Error", message: message, preferredStyle: .alert)
alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"),
style: .cancel,
handler: nil))
self.present(alertController, animated: true, completion: nil)
}
}
}
}
private func configureSession() {
if setupResult != .success {
return
}
session.beginConfiguration()
session.sessionPreset = .high
do {
var defaultVideoDevice: AVCaptureDevice?
if let dualCameraDevice = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back) {
defaultVideoDevice = dualCameraDevice
} else if let backCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) {
defaultVideoDevice = backCameraDevice
} else if let frontCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) {
defaultVideoDevice = frontCameraDevice
}
let videoDeviceInput = try AVCaptureDeviceInput(device: defaultVideoDevice!)
if session.canAddInput(videoDeviceInput) {
session.addInput(videoDeviceInput)
self.videoDeviceInput = videoDeviceInput
DispatchQueue.main.async {
self.previewView.videoPreviewLayer.connection?.videoOrientation = .landscapeRight
}
} else {
setupResult = .configurationFailed
session.commitConfiguration()
return
}
} catch {
setupResult = .configurationFailed
session.commitConfiguration()
return
}
do {
let audioDevice = AVCaptureDevice.default(for: .audio)
let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice!)
if session.canAddInput(audioDeviceInput) {
session.addInput(audioDeviceInput)
}
} catch {
}
movieBufferOutput.videoSettings = [
String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
]
if self.session.canAddOutput(movieBufferOutput) {
self.session.addOutput(movieBufferOutput)
if let connection = self.movieBufferOutput.connection(with: .video) {
movieConnection = connection
connection.videoOrientation = .landscapeRight
if connection.isVideoStabilizationSupported {
connection.preferredVideoStabilizationMode = .auto
}
}
} else {
setupResult = .configurationFailed
session.commitConfiguration()
return
}
if self.session.canAddOutput(audioBufferOutput) {
self.session.addOutput(audioBufferOutput)
if let connection = self.audioBufferOutput.connection(with: .audio) {
audioConnection = connection
}
} else {
print("Could not add audio output to the session")
setupResult = .configurationFailed
session.commitConfiguration()
return
}
let queue: DispatchQueue = DispatchQueue(label: "MediaOutputQueue")
let audioQueue: DispatchQueue = DispatchQueue(label: "AudioOutputQueue")
self.movieBufferOutput.setSampleBufferDelegate(self, queue: queue)
self.audioBufferOutput.setSampleBufferDelegate(self, queue: audioQueue)
self.movieBufferOutput.alwaysDiscardsLateVideoFrames = true
session.commitConfiguration()
self.session.startRunning()
}
func createWriterInput(for presentationTimeStamp: CMTime) {
self.stopRecording = false
let fileManager = FileManager.default
let outputFileName = "chunk\(chunkNumber)"
let outputFilePath = (NSTemporaryDirectory() as NSString).appendingPathComponent((outputFileName as NSString).appendingPathExtension("mp4")!)
chunkOutputURL = URL(fileURLWithPath: outputFilePath)
try? fileManager.removeItem(at: chunkOutputURL)
assetWriter = try! AVAssetWriter(outputURL: chunkOutputURL, fileType: .mp4)
let outputSettings: [String: Any] = [AVVideoCodecKey:AVVideoCodecH264, AVVideoWidthKey: 1280, AVVideoHeightKey: 720]
assetWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: outputSettings)
assetWriterInput.expectsMediaDataInRealTime = true
assetWriter.add(assetWriterInput)
let audioSettings = [
AVFormatIDKey : kAudioFormatMPEG4AAC,
AVNumberOfChannelsKey : 1,
AVSampleRateKey : 44100.0,
AVEncoderBitRateKey: 192000
] as [String : Any]
audioWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioSettings)
audioWriterInput.expectsMediaDataInRealTime = true;
assetWriter.add(audioWriterInput)
chunkNumber += 1
chunkStartTime = presentationTimeStamp
assetWriter.startWriting()
assetWriter.startSession(atSourceTime: chunkStartTime)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
override var shouldAutorotate: Bool {
return false
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .landscapeRight
}
override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation {
return .landscapeRight
}
func cleanTempDirectory() {
let tempVideosPath = NSTemporaryDirectory()
var isDirectory = ObjCBool(true)
if !FileManager.default.fileExists(atPath: tempVideosPath, isDirectory: &isDirectory) {
return
}
let tempVideosURL = URL(fileURLWithPath: tempVideosPath)
do {
let directoryContents = try FileManager.default.contentsOfDirectory(at: tempVideosURL, includingPropertiesForKeys: [.contentModificationDateKey], options: [.skipsHiddenFiles,.skipsSubdirectoryDescendants])
let mp4Files = directoryContents.filter{ $0.pathExtension == "mp4" }.map { url in
(url, (try? url.resourceValues(forKeys: [.creationDateKey]))?.creationDate ?? Date.distantPast)
}.sorted(by: { $0.1 < $1.1 })
for mp4 in mp4Files {
try? FileManager.default.removeItem(at: mp4.0)
}
} catch {
}
}
func getTempVideos() -> [URL] {
let tempVideosPath = NSTemporaryDirectory()
var isDirectory = ObjCBool(true)
if !FileManager.default.fileExists(atPath: tempVideosPath, isDirectory: &isDirectory) {
return []
}
var videosURL: [URL] = []
let videosUrl = URL(fileURLWithPath: tempVideosPath)
do {
let directoryContents = try FileManager.default.contentsOfDirectory(at: videosUrl, includingPropertiesForKeys: [.contentModificationDateKey], options: [.skipsHiddenFiles,.skipsSubdirectoryDescendants])
let mp4Files = directoryContents.filter{ $0.pathExtension == "mp4" }.map { url in
(url, (try? url.resourceValues(forKeys: [.creationDateKey]))?.creationDate ?? Date.distantPast)
}.sorted(by: { $0.1 > $1.1 })
var i = 0
for mp4 in mp4Files {
videosURL.append(mp4.0)
i = i + 1
if i > 5 {
break
}
}
} catch {
return []
}
return videosURL
}
func getRecordedVideoURL() -> URL {
var tempVideosPath = NSTemporaryDirectory()
tempVideosPath = (tempVideosPath as NSString).appendingPathComponent("videos")
var isDirectory = ObjCBool(true)
if !FileManager.default.fileExists(atPath: tempVideosPath, isDirectory: &isDirectory) {
do {
try FileManager.default.createDirectory(at: URL(fileURLWithPath: tempVideosPath), withIntermediateDirectories: true, attributes: nil)
} catch {
}
}
let outputFileName = "test-" + NSUUID().uuidString
let outputFileURL = URL(fileURLWithPath: tempVideosPath).appendingPathComponent(outputFileName).appendingPathExtension("mp4")
try? FileManager.default.removeItem(at: outputFileURL)
return outputFileURL
}
func mergeVideos(urls:[URL], excludedUrl: URL, completion:#escaping (_ exporter: AVAssetExportSession?) -> ()) -> Void {
let mainComposition = AVMutableComposition()
let compositionVideoTrack = mainComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
let soundtrackTrack = mainComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
let assetOptions = [AVURLAssetPreferPreciseDurationAndTimingKey: true]
var insertTime = kCMTimeZero
var audioInsertTime = kCMTimeZero
var videos: [(asset: AVURLAsset, videoTrack: AVAssetTrack, videoDuration: CMTime)] = []
for url in urls {
if url.path != excludedUrl.path {
let videoAsset = AVURLAsset(url: url, options : assetOptions)
if videoAsset.tracks(withMediaType: .video).count > 0 && videoAsset.tracks(withMediaType: .audio).count > 0 {
let videoTrack = videoAsset.tracks(withMediaType: .video)[0]
let videoDuration = videoTrack.timeRange.duration
videos.append((asset: videoAsset, videoTrack: videoTrack, videoDuration: videoDuration))
} else {
break
}
}
}
var hasError: Bool = false
for video in videos.reversed() {
let audioTrack = video.asset.tracks(withMediaType: .audio)[0]
let audioDuration = audioTrack.timeRange.duration
do {
try compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, video.videoDuration), of: video.videoTrack, at: insertTime)
} catch let error {
hasError = true
print(error)
}
do {
try soundtrackTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, audioDuration), of: audioTrack, at: audioInsertTime)
} catch let error {
hasError = true
print(error)
}
insertTime = CMTimeAdd(insertTime, video.videoDuration)
audioInsertTime = CMTimeAdd(audioInsertTime, audioDuration)
}
if videos.count == 0 {
hasError = true
}
if !hasError {
let outputFileURL = getRecordedVideoURL()
let exporter = AVAssetExportSession(asset: mainComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = outputFileURL
exporter?.outputFileType = AVFileType.mp4
exporter?.shouldOptimizeForNetworkUse = true
exporter?.exportAsynchronously {
DispatchQueue.main.async {
completion(exporter!)
}
}
} else {
completion(nil)
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if connection == self.audioConnection {
if let audioInput = self.audioWriterInput, audioInput.isReadyForMoreMediaData {
if !audioInput.append(sampleBuffer) {
print("Error writing audio buffer");
}
}
} else {
if let videoInput = self.assetWriterInput, videoInput.isReadyForMoreMediaData {
if !videoInput.append(sampleBuffer) {
print("Error writing video buffer");
}
}
}
if connection == movieConnection {
let presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
if assetWriter == nil {
createWriterInput(for: presentationTimeStamp)
} else {
let currentChunkDuration = CMTimeGetSeconds(CMTimeSubtract(presentationTimeStamp, chunkStartTime))
if currentChunkDuration >= chunkMaxDuration || self.stopRecording == true {
let chunkAssetWriter = assetWriter!
let assetWriterInput = self.assetWriterInput
let audioWriterInput = self.audioWriterInput
let stopRecording = self.stopRecording
createWriterInput(for: presentationTimeStamp)
assetWriterInput?.markAsFinished()
audioWriterInput?.markAsFinished()
chunkAssetWriter.endSession(atSourceTime: presentationTimeStamp)
chunkAssetWriter.finishWriting {
DispatchQueue.main.async {
self.btnStartStop.setTitle("stop", for: .normal)
}
if stopRecording {
self.mergeQueue.async {
self.mergeVideos(urls: self.getTempVideos(), excludedUrl: self.chunkOutputURL!, completion: { exportSession in
if let exportSession = exportSession {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: exportSession.outputURL!)
}) { saved, error in
DispatchQueue.main.async {
if saved {
let alertController = UIAlertController(title: "Your video was successfully saved", message: nil, preferredStyle: .alert)
let defaultAction = UIAlertAction(title: "OK", style: .default, handler: nil)
alertController.addAction(defaultAction)
self.present(alertController, animated: true, completion: nil)
}
}
}
}
})
}
}
}
}
}
}
}}
Thanks for your help :)