I am new in swift also stake overflow. Advanced thank's for attention.
Basically am trying to build a custom camera that will record video with Audio. it means video will play with sound when i play this video. las few days i was try to build this custom camera. i already followed my tutorial but Still missing something from my camera. i was try as per my custom camera is only recording video. maybe it not recording audio. i don't understand. i was searching for this answer, not find appropriate answer for this.
here is What i did
import UIKit
import AVFoundation
import SVProgressHUD
import MediaPlayer
import MobileCoreServices
import AVKit
var videoUrl = [AnyObject]()
class TestViewController: UIViewController {
#IBOutlet var viewVidioPlayer: UIView!
#IBOutlet weak var myView: UIView!
var session: AVCaptureSession?
var userreponsevideoData = NSData()
var userreponsethumbimageData = NSData()
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
}
// here i create session
func createSession() {
var input: AVCaptureDeviceInput?
let movieFileOutput = AVCaptureMovieFileOutput()
var prevLayer: AVCaptureVideoPreviewLayer?
prevLayer?.frame.size = myView.frame.size
session = AVCaptureSession()
let error: NSError? = nil
do {
input = try AVCaptureDeviceInput(device: self.cameraWithPosition(position: .front)!) } catch {return}
if error == nil {
session?.addInput(input)
} else {
print("camera input error: \(String(describing: error))")
}
prevLayer = AVCaptureVideoPreviewLayer(session: session)
prevLayer?.frame.size = myView.frame.size
prevLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
prevLayer?.connection.videoOrientation = .portrait
myView.layer.addSublayer(prevLayer!)
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let filemainurl = NSURL(string: ("\(documentsURL.appendingPathComponent("temp"))" + ".mp4"))
let maxDuration: CMTime = CMTimeMake(600, 10)
movieFileOutput.maxRecordedDuration = maxDuration
movieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024
if self.session!.canAddOutput(movieFileOutput) {
self.session!.addOutput(movieFileOutput)
}
session?.startRunning()
movieFileOutput.startRecording(toOutputFileURL: filemainurl! as URL, recordingDelegate: self)
}
func cameraWithPosition(position: AVCaptureDevicePosition) -> AVCaptureDevice? {
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
for device in devices! {
if (device as AnyObject).position == position {
return device as? AVCaptureDevice
}
}
return nil
}
#IBAction func pressbackbutton(sender: AnyObject) {
session?.stopRunning()
}
#IBAction func Record(_ sender: Any) {
createSession()
}
#IBAction func play(_ sender: Any) {
self.videoPlay()
}
func videoPlay()
{
let documentsUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
do {
// Get the directory contents urls (including subfolders urls)
let directoryContents = try FileManager.default.contentsOfDirectory(at: documentsUrl, includingPropertiesForKeys: nil, options: [])
print(directoryContents)
// if you want to filter the directory contents you can do like this:
videoUrl = directoryContents.filter{ $0.pathExtension == "mp4" } as [AnyObject]
print("mp3 urls:",videoUrl)
let playerController = AVPlayerViewController()
playerController.delegate = self as? AVPlayerViewControllerDelegate
let movieURL = videoUrl[0]
print(movieURL)
let player = AVPlayer(url: movieURL as! URL)
playerController.player = player
self.addChildViewController(playerController)
self.view.addSubview(playerController.view)
playerController.view.frame = self.view.frame
player.play()
player.volume = 1.0
player.rate = 1.0
} catch let error as NSError {
print(error.localizedDescription)
}
}
}
extension TestViewController: AVCaptureFileOutputRecordingDelegate
{
#available(iOS 4.0, *)
private func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: URL!, fromConnections connections: [AnyObject]!) {
print(fileURL)
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
let filemainurl = outputFileURL
do
{
let asset = AVURLAsset(url: filemainurl! as URL, options: nil)
//AVURLAsset(URL: filemainurl as! URL, options: nil)
print(asset)
let imgGenerator = AVAssetImageGenerator(asset: asset)
imgGenerator.appliesPreferredTrackTransform = true
let cgImage = try imgGenerator.copyCGImage(at: CMTimeMake(0, 1), actualTime: nil)
let uiImage = UIImage(cgImage: cgImage)
userreponsethumbimageData = try NSData(contentsOf: filemainurl! as URL)
print(userreponsethumbimageData.length)
print(uiImage)
// imageData = UIImageJPEGRepresentation(uiImage, 0.1)
}
catch let error as NSError
{
print(error)
return
}
SVProgressHUD.show(with: SVProgressHUDMaskType.clear)
let VideoFilePath = NSURL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("mergeVideo\(arc4random()%1000)d")!.appendingPathExtension("mp4").absoluteString
if FileManager.default.fileExists(atPath: VideoFilePath)
{
do
{
try FileManager.default.removeItem(atPath: VideoFilePath)
}
catch { }
}
let tempfilemainurl = NSURL(string: VideoFilePath)!
let sourceAsset = AVURLAsset(url: filemainurl! as URL, options: nil)
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: sourceAsset, presetName: AVAssetExportPresetMediumQuality)!
assetExport.outputFileType = AVFileTypeQuickTimeMovie
assetExport.outputURL = tempfilemainurl as URL
assetExport.exportAsynchronously { () -> Void in
switch assetExport.status
{
case AVAssetExportSessionStatus.completed:
DispatchQueue.main.async(execute: {
do
{
SVProgressHUD .dismiss()
self.userreponsevideoData = try NSData(contentsOf: tempfilemainurl as URL, options: NSData.ReadingOptions())
print("MB - \(self.userreponsevideoData.length) byte")
}
catch
{
SVProgressHUD .dismiss()
print(error)
}
})
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport.error)")
default:
print("complete")
SVProgressHUD .dismiss()
}
}
}
}
There all i have done. so I don't understand what is missing from this code. Why audio is not playing with video or why not recoding audio with video.
Use this cocopods for your project. It makes your job quiet easy.
It has all instructions on what to do and also contains a demo project to test it works as you intended it to.
SwiftyCam
Related
Apple's sample code Identifying Trajectories in Video contains the following delegate callback:
func cameraViewController(_ controller: CameraViewController, didReceiveBuffer buffer: CMSampleBuffer, orientation: CGImagePropertyOrientation) {
let visionHandler = VNImageRequestHandler(cmSampleBuffer: buffer, orientation: orientation, options: [:])
if gameManager.stateMachine.currentState is GameManager.TrackThrowsState {
DispatchQueue.main.async {
// Get the frame of rendered view
let normalizedFrame = CGRect(x: 0, y: 0, width: 1, height: 1)
self.jointSegmentView.frame = controller.viewRectForVisionRect(normalizedFrame)
self.trajectoryView.frame = controller.viewRectForVisionRect(normalizedFrame)
}
// Perform the trajectory request in a separate dispatch queue.
trajectoryQueue.async {
do {
try visionHandler.perform([self.detectTrajectoryRequest])
if let results = self.detectTrajectoryRequest.results {
DispatchQueue.main.async {
self.processTrajectoryObservations(controller, results)
}
}
} catch {
AppError.display(error, inViewController: self)
}
}
}
}
However, instead of drawing UI whenever detectTrajectoryRequest.results exist (https://developer.apple.com/documentation/vision/vndetecttrajectoriesrequest/3675672-results), I'm interested in using the CMTimeRange provided by each result to construct a new video. In effect, this would filter down the original video to only frames with trajectories.
What would be a good approach to transferring only frames with trajectories from an AVAssetReader to an AVAssetWriter?
By the time you identify a trajectory in captured video frames or from frames decoded from a file you may not have the initial frames in memory any more, so the easiest way to create your file containing only trajectories is to keep the original file on hand, and then insert its trajectory snippets into an AVComposition which you then export using AVAssetExportSession.
This sample captures frames from the camera, encodes them to a file whilst analysing them for trajectories and after 20 seconds, it closes the file and then creates the new file containing only trajectory snippets.
If you're interested in detecting trajectories in a pre-existing file, it's not too hard to rewire this code.
import UIKit
import AVFoundation
import Vision
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
let session = AVCaptureSession()
var assetWriter: AVAssetWriter!
var assetWriterInput: AVAssetWriterInput!
var assetWriterStartTime: CMTime = .zero
var assetWriterStarted = false
var referenceFileURL: URL!
var timeRangesOfInterest: [Double : CMTimeRange] = [:]
func startWritingFile(outputURL: URL, initialSampleBuffer: CMSampleBuffer) {
try? FileManager.default.removeItem(at: outputURL)
assetWriter = try! AVAssetWriter(outputURL: outputURL, fileType: .mov)
let dimensions = initialSampleBuffer.formatDescription!.dimensions
assetWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: [AVVideoCodecKey: AVVideoCodecType.h264, AVVideoWidthKey: dimensions.width, AVVideoHeightKey: dimensions.height])
assetWriter.add(assetWriterInput)
assetWriter.startWriting()
self.assetWriterStartTime = CMSampleBufferGetPresentationTimeStamp(initialSampleBuffer)
assetWriter.startSession(atSourceTime: self.assetWriterStartTime)
}
func stopWritingFile(completion: #escaping (() -> Void)) {
let assetWriterToFinish = self.assetWriter!
self.assetWriterInput = nil
self.assetWriter = nil
assetWriterToFinish.finishWriting {
print("finished writing: \(assetWriterToFinish.status.rawValue)")
completion()
}
}
func exportVideoTimeRanges(inputFileURL: URL, outputFileURL: URL, timeRanges: [CMTimeRange]) {
let inputAsset = AVURLAsset(url: inputFileURL)
let inputVideoTrack = inputAsset.tracks(withMediaType: .video).first!
let composition = AVMutableComposition()
let compositionTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)!
var insertionPoint: CMTime = .zero
for timeRange in timeRanges {
try! compositionTrack.insertTimeRange(timeRange, of: inputVideoTrack, at: insertionPoint)
insertionPoint = insertionPoint + timeRange.duration
}
let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)!
try? FileManager.default.removeItem(at: outputFileURL)
exportSession.outputURL = outputFileURL
exportSession.outputFileType = .mov
exportSession.exportAsynchronously {
print("export finished: \(exportSession.status.rawValue) - \(exportSession.error)")
}
}
override func viewDidLoad() {
super.viewDidLoad()
let inputDevice = AVCaptureDevice.default(for: .video)!
let input = try! AVCaptureDeviceInput(device: inputDevice)
let output = AVCaptureVideoDataOutput()
output.setSampleBufferDelegate(self, queue: DispatchQueue.main)
session.addInput(input)
session.addOutput(output)
session.startRunning()
DispatchQueue.main.asyncAfter(deadline: .now() + 20) {
self.stopWritingFile {
print("finished writing")
let trajectoriesFileURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] .appendingPathComponent("trajectories.mov")
self.exportVideoTimeRanges(inputFileURL: self.referenceFileURL, outputFileURL: trajectoriesFileURL, timeRanges: self.timeRangesOfInterest.map { $0.1 })
}
}
}
// Lazily create a single instance of VNDetectTrajectoriesRequest.
private lazy var request: VNDetectTrajectoriesRequest = {
return VNDetectTrajectoriesRequest(frameAnalysisSpacing: .zero,
trajectoryLength: 10,
completionHandler: completionHandler)
}()
// AVCaptureVideoDataOutputSampleBufferDelegate callback.
func captureOutput(_ output: AVCaptureOutput,
didOutput sampleBuffer: CMSampleBuffer,
from connection: AVCaptureConnection) {
if !assetWriterStarted {
self.referenceFileURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] .appendingPathComponent("reference.mov")
startWritingFile(outputURL: self.referenceFileURL, initialSampleBuffer: sampleBuffer)
assetWriterStarted = true
}
if assetWriterInput != nil && assetWriterInput.isReadyForMoreMediaData {
assetWriterInput.append(sampleBuffer)
}
do {
let requestHandler = VNImageRequestHandler(cmSampleBuffer: sampleBuffer)
try requestHandler.perform([request])
} catch {
// Handle the error.
}
}
func completionHandler(request: VNRequest, error: Error?) {
guard let request = request as? VNDetectTrajectoriesRequest else { return }
if let results = request.results,
results.count > 0 {
NSLog("\(results)")
for result in results {
var fileRelativeTimeRange = result.timeRange
fileRelativeTimeRange.start = fileRelativeTimeRange.start - self.assetWriterStartTime
self.timeRangesOfInterest[fileRelativeTimeRange.start.seconds] = fileRelativeTimeRange
}
}
}
}
I am trying to record video in my iPhone app using AVCaptureSession. It works fine for the first time, however if I click Record button for the second time the App terminates with the error below:
Terminating app due to uncaught exception
'NSInvalidArgumentException', reason: '*** -[AVCaptureSession
addOutput:] An AVCaptureOutput instance may not be added to more than
one session'
First I was getting cannot support multiple video / audio sessions error and I have added the code to check if the input is added before adding another input. Now I'm getting this new error.
Any help is appreciated.
class CaptureViewViewController: BaseViewController,AVCaptureFileOutputRecordingDelegate{
let session: AVCaptureSession = AVCaptureSession()
var movieOutput = AVCaptureMovieFileOutput()
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
session.beginConfiguration()
session.sessionPreset = AVCaptureSession.Preset.high
if let device = AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .front)
{
if let audioDevice = AVCaptureDevice.default(for: AVMediaType.audio){
do {
let audioInput = try AVCaptureDeviceInput(device: audioDevice)
if let inputs = session.inputs as? [AVCaptureDeviceInput] {
for input in inputs {
session.removeInput(input)
}
}
self.session.addInput(audioInput)
} catch {
print("Unable to add audio device to the recording.")
}
}
do {
if let inputs = session.inputs as? [AVCaptureDeviceInput] {
for input in inputs {
session.removeInput(input)
}
}
try session.addInput(AVCaptureDeviceInput(device: device))
} catch {
print(error.localizedDescription)
}
let previewLayer = AVCaptureVideoPreviewLayer(session: session)
self.preViewImage.layer.addSublayer(previewLayer)
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
previewLayer.frame = self.preViewImage.layer.bounds
}
movieOutput.movieFragmentInterval = CMTime.invalid
session.addOutput(movieOutput
session.commitConfiguration()
session.startRunning()
}
#IBAction func handleMoviewRecord(_ sender: Any) {
if videoRecordState == .stop {
// let documentsUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first! as NSURL
// if let fileUrl = documentsUrl.appendingPathComponent( "output.mov" )?.absoluteURL {
// print(fileUrl)
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let fileUrl = paths[0].appendingPathComponent("output.mov")
try? FileManager.default.removeItem(at: fileUrl)
movieOutput.startRecording(to: fileUrl, recordingDelegate: self)
// (to: URL(fileURLWithPath: paths), recordingDelegate: self)
recordVideoButton.setTitle("Stop Recording", for: .normal)
recordVideoButton.backgroundColor = UIColor.red
videoRecordState = .start
// }
}else {
self.movieOutput.stopRecording()
session.stopRunning()
recordVideoButton.setTitle("Start Recording", for: .normal)
videoRecordState = .stop
session.removeOutput(movieOutput)
}
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
print("FINISHED \(Error.self)")
if error == nil {
UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path, nil, nil, nil)
UserDefaults.set(.videoPath, to: outputFileURL.path)
NotificationCenter.default.post(name: NSNotification.Name(rawValue: "refresh"), object: nil)
navigationController?.popViewController(animated: true)
}
}
I have an array of [AVAsset](). Whenever I record different videos at different durations the below code merges all the durations into 1 video but it will only play the last video in a loop.
For eg. video1 is 1 minute and shows a dog walking, video2 is 1 minute and shows a bird flying, video3 is 1 minute and shows a horse running. The video will merge and play for 3 minutes but it will only show the horse running for 1 minute each three consecutive times.
Where am I going wrong at?
var movieFileOutput = AVCaptureMovieFileOutput()
var arrayVideos = [AVAsset]()
var videoFileUrl: URL?
// button to record video
#objc func recordButtonTapped() {
// Stop recording
if movieFileOutput.isRecording {
movieFileOutput.stopRecording()
print("Stop Recording")
} else {
// Start recording
movieFileOutput.connection(with: AVMediaType.video)?.videoOrientation = videoOrientation()
movieFileOutput.maxRecordedDuration = maxRecordDuration()
videoFileUrl = URL(fileURLWithPath: videoFileLocation())
if let videoFileUrlFromCamera = videoFileUrl {
movieFileOutput.startRecording(to: videoFileUrlFromCamera, recordingDelegate: self)
}
}
}
func videoFileLocation() -> String {
return NSTemporaryDirectory().appending("videoFile.mov")
}
// button to save the merged video
#objc func saveButtonTapped() {
mergeVids()
}
// function to merge and save videos
func mergeVids() {
let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: .video,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
compositionVideoTrack?.preferredTransform = CGAffineTransform(rotationAngle: .pi / 2)
let soundtrackTrack = mixComposition.addMutableTrack(withMediaType: .audio,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
var insertTime = CMTime.zero
for videoAsset in arrayVideos {
do {
try compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: videoAsset.duration),
of: videoAsset.tracks(withMediaType: .video)[0],
at: insertTime)
try soundtrackTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
duration: videoAsset.duration),
of: videoAsset.tracks(withMediaType: .audio)[0],
at: insertTime)
insertTime = CMTimeAdd(insertTime, videoAsset.duration)
} catch let error as NSError {
print("\(error.localizedDescription)")
}
}
let outputFileURL = URL(fileURLWithPath: NSTemporaryDirectory() + "merge.mp4")
let path = outputFileURL.path
if FileManager.default.fileExists(atPath: path) {
try! FileManager.default.removeItem(atPath: path)
}
let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter!.outputURL = outputFileURL
exporter!.outputFileType = AVFileType.mp4
exporter!.shouldOptimizeForNetworkUse = true
exporter!.exportAsynchronously { [weak self] in
let cameraVideoURL = exporter!.outputURL!
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: cameraVideoURL)
}) { (saved, error) in
if let error = error { return }
if !saved { return }
// url is saved
self?.videoFileUrl = nil
self?.arrayVideos.removeAll()
}
}
}
// AVCaptureFileOutputRecording Delegates
func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
print("+++++++++++++++Started")
print("*****Started recording: \(fileURL)\n")
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if error == nil {
let asset = AVAsset(url: outputFileURL)
arrayVideos.append(asset)
print(arrayVideos.count)
} else {
print("Error recording movie: \(error!.localizedDescription)")
}
func cleanUp() {
let path = outputFileURL.path
if FileManager.default.fileExists(atPath: path) {
do {
try FileManager.default.removeItem(atPath: path)
} catch {
print("Could not remove file at url: \(outputFileURL)")
}
}
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
print("++++++Frame Drop: \(connection.description)")
}
Thanks to #alxlives for testing out the merge function and pointing out that since it was fine on his machine the problem must've been somewhere else.
The problem was here:
func videoFileLocation() -> String {
return NSTemporaryDirectory().appending("videoFile.mov")
}
In the recordButtonTapped when it used the above code it kept using the same "videoFile.mov" extension:
videoFileUrl = URL(fileURLWithPath: videoFileLocation()) // <<< it gets called here every time a new video runs
if let videoFileUrlFromCamera = videoFileUrl {
movieFileOutput.startRecording(to: videoFileUrlFromCamera, recordingDelegate: self)
}
To fix it I needed to make each extension unique:
func videoFileLocation() -> String {
let uuid = UUID().uuidString
return NSTemporaryDirectory().appending("videoFile_\(uuid).mov")
}
is it possible to retrieve the recorded audio file from the document directory? I have looked through the whole internet, but using AVAsset/WKAudioFileAsset does not seem to be the right path.
Is there just the way to convert the recorded audio file to NSData (binary format), send it to the server and then the server will convert the binary data back into the audio file?
I have found this here: https://iosdevcenters.blogspot.com/2016/04/save-and-get-image-from-document.html
(This example is about retrieving images and they have used UIImage)
So, I am curious whether there is an equivalent way for retrieving an audio file? Is there a class for audio to save the file into that class just as UIImage for images?
I hope someone can help me out. Thank you very much.
My code looks as follows:
import WatchKit
import Foundation
import AVFoundation
class InterfaceController: WKInterfaceController, AVAudioRecorderDelegate{
#IBOutlet weak var btn: WKInterfaceButton!
var recordingSession : AVAudioSession!
var audioRecorder : AVAudioRecorder!
var settings = [String : Any]()
override func awake(withContext context: Any?) {
super.awake(withContext: context)
recordingSession = AVAudioSession.sharedInstance()
do{
try recordingSession.setCategory(AVAudioSession.Category.playAndRecord)
try recordingSession.setActive(true)
recordingSession.requestRecordPermission(){[unowned self] allowed in
DispatchQueue.main.async {
if allowed{
print("Allow")
} else{
print("Don't Allow")
}
}
}
}
catch{
print("failed to record!")
}
// Configure interface objects here.
// Audio Settings
settings = [
AVFormatIDKey:Int(kAudioFormatLinearPCM),
AVSampleRateKey:44100.0,
AVNumberOfChannelsKey:1,
AVLinearPCMBitDepthKey:8,
AVLinearPCMIsFloatKey:false,
AVLinearPCMIsBigEndianKey:false,
AVEncoderAudioQualityKey:AVAudioQuality.max.rawValue
]
}
override func willActivate() {
// This method is called when watch view controller is about to be visible to user
super.willActivate()
print("Test")
}
override func didDeactivate() {
// This method is called when watch view controller is no longer visible
super.didDeactivate()
}
func directoryURL() -> URL? {
let fileManager = FileManager.default
let urls = fileManager.urls(for: .documentDirectory, in: .userDomainMask)
let documentDirectory = urls[0] as URL
let soundUrl = documentDirectory.appendingPathComponent("sound.wav")
// var soundUrlStr = soundUrl?.path
//print(fileManager.fileExists(atPath: soundUrlStr))
let filePath = (soundUrl).path
print(filePath)
print("URL")
print(soundUrl)
return soundUrl as URL?
}
func startRecording(){
let audioSession = AVAudioSession.sharedInstance()
do{
audioRecorder = try AVAudioRecorder(url: self.directoryURL()! as URL,
settings: settings)
audioRecorder.delegate = self
audioRecorder.prepareToRecord()
audioRecorder.record(forDuration: 5.0)
}
catch {
finishRecording(success: false)
}
do {
try audioSession.setActive(true)
audioRecorder.record()
} catch {
}
}
func finishRecording(success: Bool) {
audioRecorder.stop()
if success {
print(success)
} else {
audioRecorder = nil
print("Somthing Wrong.")
}
}
#IBAction func recordAudio() {
let path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as String
let url = URL(fileURLWithPath: path)
let pathPart = url.appendingPathComponent("sound.wav")
let filePath = pathPart.path
let fileManager = FileManager.default
if fileManager.fileExists(atPath: filePath){
print("File exists!")
let audioAsset = WKAudioFileAsset(url:pathPart)
// let playerItem = WKAudioFilePlayerItem(asset:audioAsset)
print("Audio File")
print(audioAsset)
print("WAV file")
print(NSData(contentsOfFile: filePath) as Any)
}else{
print("File does not exist")
}
if audioRecorder == nil {
print("Pressed")
self.btn.setTitle("Stop")
self.btn.setBackgroundColor(UIColor(red: 119.0/255.0, green: 119.0/255.0, blue: 119.0/255.0, alpha: 1.0))
self.startRecording()
} else {
self.btn.setTitle("Record")
print("Pressed2")
self.btn.setBackgroundColor(UIColor(red: 221.0/255.0, green: 27.0/255.0, blue: 50.0/255.0, alpha: 1.0))
self.finishRecording(success: true)
}
}
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
if !flag {
finishRecording(success: false)
}
}
}
How would I stream audio from a URL in Swift without downloading the mp3 file on the device? What do I need to import? Do I need certain libraries? Add anything to the info.plist? Please comment your code.
You can use iOS AVPLayer for Streaming audio from url.
var player: AVPlayer!
let url = URL.init(string: "https://www.soundhelix.com/examples/mp3/SoundHelix-Song-1.mp3")
let playerItem: AVPlayerItem = AVPlayerItem(url: url!)
player = AVPlayer(playerItem: playerItem)
let playerLayer = AVPlayerLayer(player: player!)
playerLayer?.frame = CGRect(x: 0, y: 0, width: 10, height: 50)
self.view.layer.addSublayer(playerLayer!)
player.play()
class MusicPlayer {
public static var instance = MusicPlayer()
var player = AVPlayer()
func initPlayer(url: String) {
guard let url = URL(string: url) else { return }
let playerItem = AVPlayerItem(url: url)
player = AVPlayer(playerItem: playerItem)
playAudioBackground()
}
func playAudioBackground() {
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback, mode: AVAudioSession.Mode.default, options: [.mixWithOthers, .allowAirPlay])
print("Playback OK")
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
print(error)
}
}
func pause(){
player.pause()
}
func play() {
player.play()
}
}
This class will play music in the background and play any audio/video URL.
For online streaming you have to use AVFoundation framework.
var player: AVPlayer!
let url = URL.init(string: "https://www.soundhelix.com/examples/mp3/SoundHelix-Song-1.mp3")
player = AVPlayer.init(url: url!)
To play:
player.play()
To pause:
player.pause()
I test it with your url and it work
var player: AVPlayer?
let url = "https://www.soundhelix.com/examples/mp3/SoundHelix-Song-1.mp3"
let playerItem = AVPlayerItem( url:NSURL( string:url )! as URL )
player = AVPlayer(playerItem:playerItem)
player!.rate = 1.0;
player!.play()
here you can go
import AVFoundation
var progressTimer:Timer?
{
willSet {
progressTimer?.invalidate()
}
}
var playerStream: AVPlayer?
var playerItem: AVPlayerItem?
func playerStream(urlStream : String) {
if let playerStream = playerStream {
if playerStream.isPlaying {
stopProgressTimer()
playerStream.pause()
} else {
startProgressTimer()
playerStream.play()
}
} else {
if let urlStr = urlStream.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed) {
if let TempURL = URL.init(string: urlStr) {
playerItem = AVPlayerItem(url: TempURL)
playerStream = AVPlayer(playerItem: playerItem)
NotificationCenter.default.addObserver(self, selector: #selector(playerItemDidPlayToEndTime), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: playerItem)
}
}
}
}
func playerItemDidPlayToEndTime() {
stopProgressTimer()
self.playProgressView.progress = 0.0
if let playerStream = self.playerStream {
playerStream.replaceCurrentItem(with: playerItem)
playerStream.seek(to: kCMTimeZero)
// playerStream.seek(to: .zero) swift 4.0
}
}
func stopProgressTimer() {
progressTimer?.invalidate()
progressTimer = nil
}
func startProgressTimer() {
if #available(iOS 10.0, *) {
progressTimer = Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true){ [weak self] in
self?.updateProgressTimer()
}
} else {
progressTimer = Timer.scheduledTimer(timeInterval: 1.0, target: self, selector: #selector(self.updateProgressTimer), userInfo: nil, repeats: true)
}
}
#objc func updateProgressTimer() {
if let playerItem = playerItem {
if let pa = playerStream {
let floatTime = Float(CMTimeGetSeconds(pa.currentTime()))
let floatTimeDu = Float(CMTimeGetSeconds(playerItem.duration))
playProgressView.progress = Double(floatTime / floatTimeDu)
}
}
}
func playAudioBackground() {
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback, mode: AVAudioSession.Mode.default, options: [.mixWithOthers, .allowAirPlay])
print("Playback OK")
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
print(error)
}
}
should be
func playAudioBackground() {
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback, mode: AVAudioSession.Mode.default, options: [.mixWithOthers])
print("Playback OK")
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
print(error)
}
}
because .allowAirPlay is not allowed with AVAudioSession.Category.playback and will cause a real device to throw an exception. It works fine on a simulator but not device and as a result your audio session will not be configured properly.
I would have replied to it, but my reputation wasn't high enough to allow me to...