I've been trying to upload audio recording right after user stops recording to the Firebase. But it doesn't do anything apart from creating a new folder named "audio".
Code I'm using for starting and stopping recording
#IBAction func recordAudio(_ sender: AnyObject) {
recordingLabel.text = "Recording in progress"
stopRecordingButton.isEnabled = true
recordButton.isEnabled = false
let dirPath = NSSearchPathForDirectoriesInDomains(.documentDirectory,.userDomainMask, true)[0] as String
let recordingName = "recordedVoice.wav"
let pathArray = [dirPath, recordingName]
let filePath = URL(string: pathArray.joined(separator: "/"))
let session = AVAudioSession.sharedInstance()
try! session.setCategory(AVAudioSessionCategoryPlayAndRecord, with:AVAudioSessionCategoryOptions.defaultToSpeaker)
try! audioRecorder = AVAudioRecorder(url: filePath!, settings: [:])
audioRecorder.delegate = self
audioRecorder.isMeteringEnabled = true
audioRecorder.prepareToRecord()
audioRecorder.record()
}
#IBAction func stopRecording(_ sender: AnyObject) {
print("Stop recording button was pressed")
recordButton.isEnabled = true
stopRecordingButton.isEnabled = false
recordingLabel.text = "Tap to Record"
audioRecorder.stop()
let audioSession = AVAudioSession.sharedInstance()
try! audioSession.setActive(false)
}
code I'm using for uploading to Firebase
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
print("finished recording")
let storageRef = Storage.storage().reference().child("audio/recordedVoice.wav")
if let uploadData = AVFileType(self.recordedVoice.wav!) {
storageRef.put(uploadData, metadata: nil) {(metadata, error) in
if error != nil {
print(error)
return
}
}
}
}
Please help me!
Try:
let audioName = NSUUID().uuidString //You'll get unique audioFile name
let storageRef = Storage.storage().reference().child("audio").child(audioName)
let metadata = StorageMetadata()
metadata.contentType = "audio/wav"
if let uploadData = AVFileType(self.recordedVoice.wav!) {
storageRef.putData(uploadData, metadata: metadata) { (metadata, err) in
if err != nil {
//print(err)
return
}
if let _ = metadata?.downloadURL()?.absoluteString {
print("uploading done!")
}
}
}
so I have been working on this for hours and here is my answer:
func getDocumentsDirectory() -> URL {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0]
}
func getFileURL() -> URL {
let path = getDocumentsDirectory()
let filePath = path.appendingPathComponent(K.fileName)
return filePath
}
let referance = storage.reference()
let mediaFolder = referance.child("media")
let id = UUID().uuidString // using uuid to give uniq names to audiofiles preventing overwrite
let mediaRef = mediaFolder.child(id + K.fileName) // creating file referance using uuid + filename
let path = getFileURL() // getting filepath
do {
let data = try Data(contentsOf: path) // getting data from filepath
mediaRef.putData(data) { metadata, error in
if error != nil {
self.showAlert(title: "Error", message: error?.localizedDescription, cancelButtonTitle: "cancel", handler: nil)
} else {
mediaRef.downloadURL { url, error in
let url = url?.absoluteString
print(url)
}
}
}
print("record has come")
} catch {
print("error cant get audio file")
}
I'm relatively new at coding and still learning. So that my answer may not be the best and shortest. But This worked for me.
Related
I'm trying to share a vcf file using UIActivityViewController. It shares the file with all other options except mail. It just opens the mail composer without any attachment. Here's my code:
guard let directoryURL = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask).first else {
return
}
var filename = NSUUID().uuidString
if let fullname = CNContactFormatter().string(from: contact) {
filename = fullname.components(separatedBy: " ").joined(separator: " ")
}
let fileURL = directoryURL
.appendingPathComponent(filename)
.appendingPathExtension("vcf")
do{
let data = try CNContactVCardSerialization.data(with: [contact])
print("filename: \(filename)")
print("contact: \(String(describing: String(data: data, encoding: String.Encoding.utf8)))")
try data.write(to: fileURL, options: [.atomicWrite])
}
catch{
print(error.localizedDescription)
}
let activityViewController = UIActivityViewController(
activityItems: [fileURL],
applicationActivities: nil
)
present(activityViewController, animated: true, completion: nil)
I want to attach this contact as a vcf file in mail app when user selects mail option for sharing.
Use mimeType #"text/x-vcard" for a vcf file in addAttachmentData:mimeType:fileName of MFMailComposeViewController
Here is draft code using your's i have tested.
class ViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
let contactStore = CNContactStore()
var contacts = [CNContact]()
let keys = [CNContactFormatter.descriptorForRequiredKeys(for: .fullName), CNContactVCardSerialization.descriptorForRequiredKeys()]
let request = CNContactFetchRequest(keysToFetch: keys)
do {
try contactStore.enumerateContacts(with: request) {
(contact, stop) in
// Array containing all unified contacts from everywhere
contacts.append(contact)
}
self.shareContact(contact: contacts.first!)
}
catch {
print("unable to fetch contacts")
}
}
func shareContact(contact:CNContact) {
guard let directoryURL = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask).first else {
return
}
var filename = NSUUID().uuidString
if let fullname = CNContactFormatter().string(from: contact) {
filename = fullname.components(separatedBy: " ").joined(separator: " ")
}
let fileURL = directoryURL
.appendingPathComponent(filename)
.appendingPathExtension("vcf")
do{
let data = try CNContactVCardSerialization.data(with: [contact])
print("filename: \(filename)")
print("contact: \(String(describing: String(data: data, encoding: String.Encoding.utf8)))")
print(fileURL)
try data.write(to: fileURL, options: [.atomicWrite])
}
catch{
print(error.localizedDescription)
}
Timer.scheduledTimer(withTimeInterval: 1.0, repeats: false) { (t) in
self.showActivityController(fileURL: fileURL)
t.invalidate()
}
}
func showActivityController (fileURL:URL) {
let activityViewController = UIActivityViewController(
activityItems: [fileURL],
applicationActivities: nil
)
present(activityViewController, animated: true, completion: nil)
}
}
I am working on an audio streaming application with recording functionality for a receiver.
I got stuck at the point where the user want to record audio stream on the receiver side.
Below is my code
Initialisation
var engine = AVAudioEngine()
var recordingFile: AVAudioFile?
var audioPlayer: AVAudioPlayer?
let player = AVAudioPlayerNode()
var isRecording: Bool = false
Initialise AudioEngine
func initializeAudioEngine() {
let input = self.engine.inputNode
let format = input.inputFormat(forBus: 0)
self.engine.attach(self.player)
let mainMixerNode = self.engine.mainMixerNode
self.engine.connect(input, to:mainMixerNode, format: format)
self.engine.prepare()
do {
try self.engine.start()
self.startRecording()
} catch (let error) {
print("START FAILED", error)
}
}
Start Recording
func startRecording() {
self.createRecordingFile()
self.engine.mainMixerNode.installTap(onBus: 0,
bufferSize: 1024,
format: self.engine.mainMixerNode.outputFormat(forBus: 0)) { (buffer, time) -> Void in
do {
self.isRecording = true
try self.recordingFile?.write(from: buffer)
} catch (let error) {
print("RECORD ERROR", error);
}
return
}
}
Create Buffer
private func createBuffer(forFileNamed fileName: String) -> AVAudioPCMBuffer? {
var res: AVAudioPCMBuffer?
if let fileURL = Bundle.main.url(forResource: fileName, withExtension: "caf") {
do {
let file = try AVAudioFile(forReading: fileURL)
res = AVAudioPCMBuffer(pcmFormat: file.processingFormat, frameCapacity:AVAudioFrameCount(file.length))
if let _ = res {
do {
try file.read(into: res!)
} catch (let error) {
print("ERROR read file", error)
}
}
} catch (let error) {
print("ERROR file creation", error)
}
}
return res
}
Stop Recording
func stopRecording() {
self.engine.mainMixerNode.removeTap(onBus: 0)
}
I am trying to record using earphone, but It's not working
Its will work because once you setup
let audiosession = AVAudioSession()
As AVAudioSessionCategoryPlayAndRecord and set
audiosession.setActive(true)
It will start recording whichever audio dump to device.
WebRTC does not have any Internal API to start or stop recording.
We can try using AVAudioSession instead.
First setUp Audio session
func setUPAudioSession() -> Bool {
let audiosession = AVAudioSession()
do {
try audiosession.setCategory(AVAudioSessionCategoryPlayAndRecord)
} catch(let error) {
print("--> \(error.localizedDescription)")
}
do {
try audiosession.setActive(true)
} catch (let error) {
print("--> \(error.localizedDescription)")
}
return audiosession.isInputAvailable;
}
After setUp the audio session now start recording as below
func startRecording() -> Bool {
var settings: [String: Any] = [String: String]()
settings[AVFormatIDKey] = kAudioFormatLinearPCM
settings[AVSampleRateKey] = 8000.0
settings[AVNumberOfChannelsKey] = 1
settings[AVLinearPCMBitDepthKey] = 16
settings[AVLinearPCMIsBigEndianKey] = false
settings[AVLinearPCMIsFloatKey] = false
settings[AVAudioQualityMax] = AVEncoderAudioQualityKey
//Create device directory where recorded file will be save automatically
let searchPaths: [String] = NSSearchPathForDirectoriesInDomains(.documentDirectory, .allDomainsMask, true)
let documentPath_ = searchPaths.first
let pathToSave = "\(documentPath_)/\(dateString)"
let url: URL = URL(pathToSave)
recorder = try? AVAudioRecorder(url: url, settings: settings)
// Initialize degate, metering, etc.
recorder.delegate = self;
recorder.meteringEnabled = true;
recorder?.prepareToRecord()
if let recordIs = recorder {
return recordIs.record()
}
return false
}
Play recorded file
func playrecodingFile() {
//Get the path of recorded file saved in previous method
let searchPaths: [String] = NSSearchPathForDirectoriesInDomains(.documentDirectory, .allDomainsMask, true)
let documentPath_ = searchPaths.first
let fileManager = FileManager.default
let arrayListOfRecordSound: [String]
if fileManager.fileExists(atPath: recordingFolder()) {
let arrayListOfRecordSound = try? fileManager.contentsOfDirectory(atPath: documentPath_)
}
let selectedSound = "\(documentPath_)/\(arrayListOfRecordSound.first)"
let url = URL.init(fileURLWithPath: selectedSound)
let player = try? AVAudioPlayer(contentsOf: url)
player?.delegate = self;
try? AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
player?.prepareToPlay()
player?.play()
}
Stop recording
func stopRecording() {
recorder?.stop()
}
pauseRecording
func pauseRecording() {
recorder?.pause()
}
Stop recording
func stopRecording() {
recorder?.stop()
}
func trimVideo (sourceURL: URL, destinationURL: URL, trimPoints: TrimPoints, completion: #escaping () -> Void) {
guard sourceURL.isFileURL else { return }
guard destinationURL.isFileURL else { return }
let options = [
AVURLAssetPreferPreciseDurationAndTimingKey: true
]
let asset = AVURLAsset(url: sourceURL, options: options)
let preferredPreset = AVAssetExportPresetPassthrough
if verifyPresetForAsset(preset: preferredPreset, asset: asset) {
let composition = AVMutableComposition()
let videoCompTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: CMPersistentTrackID())
let audioCompTrack = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: CMPersistentTrackID())
guard let assetVideoTrack: AVAssetTrack = asset.tracks(withMediaType: .video).first else { return }
guard let assetAudioTrack: AVAssetTrack = asset.tracks(withMediaType: .audio).first else { return }
var accumulatedTime = kCMTimeZero
for (startTimeForCurrentSlice, endTimeForCurrentSlice) in trimPoints {
let durationOfCurrentSlice = CMTimeSubtract(endTimeForCurrentSlice, startTimeForCurrentSlice)
let timeRangeForCurrentSlice = CMTimeRangeMake(startTimeForCurrentSlice, durationOfCurrentSlice)
do {
try videoCompTrack!.insertTimeRange(timeRangeForCurrentSlice, of: assetVideoTrack, at: accumulatedTime)
try audioCompTrack!.insertTimeRange(timeRangeForCurrentSlice, of: assetAudioTrack, at: accumulatedTime)
accumulatedTime = CMTimeAdd(accumulatedTime, durationOfCurrentSlice)
}
catch let compError {
print("TrimVideo: error during composition: \(compError)")
}
}
guard let exportSession = AVAssetExportSession(asset: composition, presetName: preferredPreset) else { return }
exportSession.outputURL = destinationURL as URL
exportSession.outputFileType = AVFileType.m4v
exportSession.shouldOptimizeForNetworkUse = true
removeFileAtURLIfExists(url: destinationURL as URL)
exportSession.exportAsynchronously {
completion()
}
}
else {
print("TrimVideo - Could not find a suitable export preset for the input video")
}
}
#IBAction func nextButtonPressed(_ sender: Any) {
if MyVariables.isScreenshot == true {
//get image from current time
print("screenshot")
guard let currentTime = trimmerView.currentPlayerTime else {
return
}
self.thumbnailImage = imageFromVideo(url: footageURL!, time: currentTime )
self.screenshotOut = imageFromVideo(url: footageURL!, time: currentTime )
self.performSegue(withIdentifier: "CreatePost_Segue", sender: nil)
} else {
print("video")
let outputFileName = NSUUID().uuidString
let outputFilePath = (NSTemporaryDirectory() as NSString).appendingPathComponent((outputFileName as NSString).appendingPathExtension("mov")!)
self.videoURL = URL(fileURLWithPath: outputFilePath)
trimVideo(sourceURL: self.footageURL!, destinationURL: self.videoURL!, trimPoints: [(trimmerView.startTime!,trimmerView.endTime!)], completion: self.finishVideo)
}
}
func finishVideo() -> Void {
guard let VideoURL = self.videoURL else { return }
self.thumbnailImage = setThumbnailFrom(path: VideoURL)
print("got to segue") //This does print successfully so something is happening in the segue...
self.performSegue(withIdentifier: "CreatePost_Segue", sender: nil)
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?){
if segue.identifier == "CreatePost_Segue" {
let controller = segue.destination as! CreatePostViewController
controller.thumbnailImage = self.thumbnailImage
controller.videoURL = self.videoURL
controller.screenshotOut = self.screenshotOut
}
}
So in nextButtonPressed you can see that I if the. media is a video (it is), I am using the trimVideo function and a custom completion handler of finishVideo to create a thumbnail with the trimmed video as well as perform the segue itself.
Everything executes without error until the segue so I believe I am sending the data wrong perhaps? Perhaps something to do with screenshotOut not being set if its a video?
The full error is
*** Assertion failure in void _UIPerformResizeOfTextViewForTextContainer(NSLayoutManager *, UIView<NSTextContainerView> *, NSTextContainer *, NSUInteger)(), /BuildRoot/Library/Caches/com.apple.xbs/Sources/UIFoundation/UIFoundation-546.2/UIFoundation/TextSystem/NSLayoutManager_Private.m:1619
I fixed this with the advice #aBilal17 gave to run the segue on the main thread like so:
func finishVideo() -> Void {
guard let VideoURL = self.videoURL else { return }
self.thumbnailImage = setThumbnailFrom(path: VideoURL)
print("got to segue")
DispatchQueue.main.async {
self.performSegue(withIdentifier: "CreatePost_Segue", sender: nil)
}
}
I am having trouble recording audio and displaying it in a tableview. I am able to record and immediately play it back, but the audio doesn't seem to actually be stored to the device permanently, so I am unable to call it from the tableview. The directory also seems to change each time the app is open. How can I correct my code for permanent save and recall when populating tableview rows?
func record() {
let audioSession:AVAudioSession = AVAudioSession.sharedInstance()
if (audioSession.respondsToSelector("requestRecordPermission:")) {
AVAudioSession.sharedInstance().requestRecordPermission({(granted: Bool)-> Void in
if granted {
print("granted")
try! audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try! audioSession.setActive(true)
let documentsDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
let fullPath = (documentsDirectory as NSString).stringByAppendingPathComponent("Mobile.PCM")
let url = NSURL.fileURLWithPath(fullPath)
print(fullPath)
let settings: [String : AnyObject] = [
AVFormatIDKey:Int(kAudioFormatAppleIMA4),
AVSampleRateKey:44100.0,
AVNumberOfChannelsKey:2,
AVEncoderBitRateKey:12800,
AVLinearPCMBitDepthKey:16,
AVEncoderAudioQualityKey:AVAudioQuality.Max.rawValue
]
try! self.audioRecorder = AVAudioRecorder(URL: url, settings: settings)
self.audioRecorder.meteringEnabled = true
self.audioRecorder.record()
} else{
print("not granted")
}
})
}
}
I am able to record and save with this:
#IBAction func record(sender: UIButton) {
if player != nil && player.playing {
player.stop()
}
if recorder == nil {
print("recording. recorder nil")
// recordButton.setTitle("Pause", forState:.Normal)
playButton.enabled = false
stopButton.enabled = true
recordWithPermission(true)
return
}
if recorder != nil && recorder.recording {
print("pausing")
recorder.pause()
recordButton.setTitle("Continue", forState:.Normal)
} else {
print("recording")
// recordButton.setTitle("Pause", forState:.Normal)
playButton.enabled = false
stopButton.enabled = true
recordWithPermission(false)
}
}
func setupRecorder() {
let format = NSDateFormatter()
format.dateFormat="yyyy-MM-dd-HH-mm-ss"
let currentFileName = "recording-\(format.stringFromDate(NSDate())).caf"
print(currentFileName)
let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
self.soundFileURL = documentsDirectory.URLByAppendingPathComponent(currentFileName)
if NSFileManager.defaultManager().fileExistsAtPath(soundFileURL.absoluteString) {
print("soundfile \(soundFileURL.absoluteString) exists")
}
let recordSettings:[String : AnyObject] = [
AVFormatIDKey: NSNumber(unsignedInt:kAudioFormatAppleLossless),
AVEncoderAudioQualityKey : AVAudioQuality.Max.rawValue,
AVEncoderBitRateKey : 320000,
AVNumberOfChannelsKey: 2,
AVSampleRateKey : 44100.0
]
do {
recorder = try AVAudioRecorder(URL: soundFileURL, settings: recordSettings)
recorder.delegate = self
recorder.meteringEnabled = true
recorder.prepareToRecord() soundFileURL
} catch let error as NSError {
recorder = nil
print(error.localizedDescription)
}
}
func recordWithPermission(setup:Bool) {
let session:AVAudioSession = AVAudioSession.sharedInstance()
if (session.respondsToSelector("requestRecordPermission:")) {
AVAudioSession.sharedInstance().requestRecordPermission({(granted: Bool)-> Void in
if granted {
print("Permission to record granted")
self.setSessionPlayAndRecord()
if setup {
self.setupRecorder()
}
self.recorder.record()
self.meterTimer = NSTimer.scheduledTimerWithTimeInterval(0.1,
target:self,
selector:"updateAudioMeter:",
userInfo:nil,
repeats:true)
} else {
print("Permission to record not granted")
}
})
} else {
print("requestRecordPermission unrecognized")
}
}
I am able to recall it in a TableView with this:
override func viewDidLoad() {
super.viewDidLoad()
tableView.reloadData()
listRecordings()
}
func tableView(tableView: UITableView, cellForRowAtIndexPath indexPath: NSIndexPath) -> UITableViewCell {
let cell: UITableViewCell = self.tableView.dequeueReusableCellWithIdentifier("cell")!
cell.textLabel!.text = recordings[indexPath.row].lastPathComponent
return cell
}
func listRecordings() {
let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
do {
let urls = try NSFileManager.defaultManager().contentsOfDirectoryAtURL(documentsDirectory, includingPropertiesForKeys: nil, options: NSDirectoryEnumerationOptions.SkipsHiddenFiles)
self.recordings = urls.filter( { (name: NSURL) -> Bool in
return name.lastPathComponent!.hasSuffix("caf")
})
} catch let error as NSError {
print(error.localizedDescription)
} catch {
print("something went wrong")
}
}
How do I upload and load back images from cloud kit with swift?
What attribute type do I use?
What code do I use? This is the code I use currently...
func SaveImageInCloud(ImageToSave: UIImage) {
let newRecord:CKRecord = CKRecord(recordType: "ImageRecord")
newRecord.setValue(ImageToSave, forKey: "Image")
if let database = self.privateDatabase {
database.saveRecord(newRecord, completionHandler: { (record:CKRecord!, error:NSError! ) in
if error != nil {
NSLog(error.localizedDescription)
}
else {
dispatch_async(dispatch_get_main_queue()) {
println("finished")
}
}
})
}
You need to create a CKAsset and add that to your record. You can do that with code like this:
func SaveImageInCloud(ImageToSave: UIImage) {
let newRecord:CKRecord = CKRecord(recordType: "ImageRecord")
let nsDocumentDirectory = NSSearchPathDirectory.DocumentDirectory
let nsUserDomainMask = NSSearchPathDomainMask.UserDomainMask
if let paths = NSSearchPathForDirectoriesInDomains(nsDocumentDirectory, nsUserDomainMask, true) {
if paths.count > 0 {
if let dirPath = paths[0] as? String {
let writePath = dirPath.stringByAppendingPathComponent("Image2.png")
UIImagePNGRepresentation(ImageToSave).writeToFile(writePath, atomically: true)
var File : CKAsset? = CKAsset(fileURL: NSURL(fileURLWithPath: writePath))
newRecord.setValue(File, forKey: "Image")
}
}
}
if let database = self.privateDatabase {
database.saveRecord(newRecord, completionHandler: { (record:CKRecord!, error:NSError! ) in
if error != nil {
NSLog(error.localizedDescription)
} else {
dispatch_async(dispatch_get_main_queue()) {
println("finished")
}
}
})
}
Here's something similar to Edwin's answer but a little more compact. I've tested this and it works well.
This example is saving "myImage" UIImageView into "mySaveRecord" CKRecord, just replace those names with your respective ones.
let documentDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as! String
let imageFilePath = documentDirectory.stringByAppendingPathComponent("lastimage")
UIImagePNGRepresentation(myImage).writeToFile(imageFilePath, atomically: true)
let asset = CKAsset(fileURL: NSURL(fileURLWithPath: imageFilePath))
mySaveRecord.setObject(asset, forKey: "ProfilePicture")
CKContainer.defaultContainer().publicCloudDatabase.saveRecord(mySaveRecord, completionHandler: {
record, error in
if error != nil {
println("\(error)")
} else {
//record saved successfully!
}
})
I created this little extension in Swift 5 to convert from UIImage to CKAsset:
extension UIImage {
func toCKAsset(name: String? = nil) -> CKAsset? {
guard let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first else {
return nil
}
guard let imageFilePath = NSURL(fileURLWithPath: documentDirectory)
.appendingPathComponent(name ?? "asset#\(UUID.init().uuidString)")
else {
return nil
}
do {
try self.pngData()?.write(to: imageFilePath)
return CKAsset(fileURL: imageFilePath)
} catch {
print("Error converting UIImage to CKAsset!")
}
return nil
}
}
You can then use it as you have it in your question:
if let asset = ImageToSave.toCKAsset() {
newRecord.setObject(asset, forKey: "Image")
CKContainer.defaultContainer().publicCloudDatabase.saveRecord(newRecord, completionHandler: {
record, error in
if error != nil {
println("\(error)")
} else {
// saved successfully!
}
})
}
This answer works with Swift 2.2 & iOS 9, and separates the file creation from the upload so that you can properly test against both, since they are distinct actions with their own potential issues.
For the uploadPhoto function, the recordType variable is the value you use in your CloudKit dashboard. The "photo" key in the photo["photo"] = asset line is the field name for your record type.
func uploadPhoto(image: UIImage, recordName: String) {
let privateDB = CKContainer.defaultContainer().privateCloudDatabase
let photoID = CKRecordID(recordName: recordName)
let photo = CKRecord(recordType: recordType, recordID: photoID)
let asset = CKAsset(fileURL: writeImage(image))
photo["photo"] = asset
privateDB.saveRecord(photo) { (record, error) in
guard error == nil else {
print(error?.localizedDescription)
return
}
print("Successful")
}
}
func writeImage(image: UIImage) -> NSURL {
let documentsURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask).first!
let fileURL = documentsURL.URLByAppendingPathComponent(NSUUID().UUIDString + ".png")
if let imageData = UIImagePNGRepresentation(image) {
imageData.writeToURL(fileURL, atomically: false)
}
return fileURL
}
You can call this with the following:
uploadPhoto(UIImage(named: "foo.png")!, recordName: "bar")
You'll want to pick the Asset value type in the dashboard for this value.
newRecord.setValue(ImageToSave, forKey: "Image")
UIImage is not an allowed type on CKRecord. Your best option is to write this image out to a file, then create a CKAsset and set that on the record.