Swift: Firebase Storage Code Block Not Executing - ios

I have the following function to pull images from a Firebase Storage database.
For some reason, the print(imageRef) line is working well but the imageRef.getData() code block is being skipped completely. It's not executing either of the print statements "error updating..." or "Got the image".
What could be causing this?
func updateCurrentUser() {
var downloadedImages : [UIImage?] = []
for i in 0...8 {
let storageRef = Storage.storage().reference()
let imageRef = storageRef.child(self.currentUser.userid + "/img" + String(i) + ".jpg")
print(imageRef)
// Download in memory with a maximum allowed size of 1MB (1 * 1024 * 1024 bytes)
imageRef.getData(maxSize: 1 * 1024 * 1024) { data, error in
if let error = error {
print("error updating returning user: \(error.localizedDescription)")
} else {
// Data for the image is returned
let image = UIImage(data: data!)
downloadedImages[i] = image
print("Got the image")
}
}
}
self.currentUser.images = downloadedImages
}

Firebase is asynchronous and firebase data is only valid within the closure following the firebase function. In a nutshell, code is faster than the internet and it takes time for data to download.
Here's an abbreviated version of your code... note the comments
func updateCurrentUser() {
var downloadedImages : [UIImage?] = []
for i in 0...8 {
imageRef.getData(maxSize: 1 * 1024 * 1024) { data, error in
//firebase data is ONLY valid here so append each image to an array
self.currentUser.images.append(downloadedImage)
}
}
//the following line will execute WAY before the images are downloaded
// so self.currentUser.images will always be empty
self.currentUser.images = downloadedImages
}
You're probably going to want to use a completion handler as well. See my answer this question for further reading

Related

Import big set in Core data with relations in batches

I'm trying to import a large data set of around 80k objects. I'm trying to follow Apple example
I have two issues:
In the code example there is a comment:
// taskContext.performAndWait runs on the URLSession's delegate queue
// so it won’t block the main thread.
But in my case I'm not using URLSession to fetch the JSON. The file is bundled with the app. In this case how to make sure the import won’t block the main thread. Should I create a custom queue ? Any example ?
In the example it's just importing an array of entities. But in my case I need to import just one entity that has 70k object in a relation to many.
So what I want to achieve is:
If there is a ContactBook don't import anything because we have already imported the JSON.
If there is no ContactBook create one and import all the 70k Contact object to the contacts relation of the ContactBook. This should happen in batches like in the example and should not block the UI.
What I have tried:
private func insertContactbookIfNeeded() {
let fetch: NSFetchRequest<Contactbook> = ContactBook.fetchRequest()
let contactBookCount = (try? context.count(for: fetch)) ?? 0
if contactBookCount > 0 {
return
}
let contacts = Bundle.main.decode([ContactJSON].self, from: "contacts.json")
// Process records in batches to avoid a high memory footprint.
let batchSize = 256
let count = contacts.count
// Determine the total number of batches.
var numBatches = count / batchSize
numBatches += count % batchSize > 0 ? 1 : 0
for batchNumber in 0 ..< numBatches {
// Determine the range for this batch.
let batchStart = batchNumber * batchSize
let batchEnd = batchStart + min(batchSize, count - batchNumber * batchSize)
let range = batchStart..<batchEnd
// Create a batch for this range from the decoded JSON.
let contactsBatch = Array(contacts[range])
// Stop the entire import if any batch is unsuccessful.
if !importOneBatch(contactsBatch) {
assertionFailure("Could not import batch number \(batchNumber) range \(range)")
return
}
}
}
private func importOneBatch(_ contactsBatch: [ContactJSON]) -> Bool {
var success = false
// Create a private queue context.
let taskContext = container.newBackgroundContext()
taskContext.mergePolicy = NSMergeByPropertyObjectTrumpMergePolicy
// NOT TRUE IN MY CASE: (Any suggestion ??)
// taskContext.performAndWait runs on the URLSession's delegate queue
// so it won’t block the main thread.
print("isMainThread: \(Thread.isMainThread)") // prints true
taskContext.performAndWait {
let fetchRequest: NSFetchRequest<ContactBook> = ContactBook.fetchRequest()
fetchRequest.returnsObjectsAsFaults = true
fetchRequest.includesSubentities = false
let contactBookCount = (try? taskContext.count(for: fetchRequest)) ?? 0
var contactBook: ContactBook?
if contactBookCount > 0 {
do {
contactBook = try taskContext.fetch(fetchRequest).first
} catch let error as NSError {
assertionFailure("can't fetch the contactBook \(error)")
}
} else {
contactBook = ContactBook(context: taskContext)
}
guard let book = contactBook else {
assertionFailure("Could not fetch the contactBook")
return
}
// Create a new record for each contact in the batch.
for contactJSON in contactsBatch {
// Create a Contact managed object on the private queue context.
let contact = Contact(context: taskContext)
// Populate the Contact's properties using the raw data.
contact.name = contactJSON.name
contact.subContacts = NSSet(array: contactJSON.subContacts { subC -> Contact in
let contact = Contact(context: taskContext)
contact.name = subC.name
})
book.addToContacts(contact)
}
// Save all insertions and deletions from the context to the store.
if taskContext.hasChanges {
do {
try taskContext.save()
} catch {
print("Error: \(error)\nCould not save Core Data context.")
return
}
// Reset the taskContext to free the cache and lower the memory footprint.
taskContext.reset()
}
success = true
}
return success
}
The problem is that this is very slow because in each batch I fetch the workbook (which is getting bigger in each iteration) to be able to insert new batch of contacts in the contact book. Is there a efficient way to avoid fetching the workbook in each batch ? also any suggestion to make this is faster ? increase the batch size ? create a background queue ?
Update:
I have tried to create a ContactBook once in insertWordbookIfNeeded and pass it to importOneBatch with each iteration but I get:
Thread 1: Exception: "Illegal attempt to establish a relationship
'contactBook' between objects in different contexts

Deprecated withUnsafeBytes [duplicate]

I previously used this code in Swift 4.2 to generate an id:
public static func generateId() throws -> UInt32 {
let data: Data = try random(bytes: 4)
let value: UInt32 = data.withUnsafeBytes { $0.pointee } // deprecated warning!
return value // + some other stuff
}
withUnsafeBytes is deprecated on Swift 5.0. How can I solve this?
In Swift 5 the withUnsafeBytes() method of Data calls the closure with an (untyped) UnsafeRawBufferPointer, and you can load() the value from the raw memory:
let value = data.withUnsafeBytes { $0.load(as: UInt32.self) }
(compare How to use Data.withUnsafeBytes in a well-defined manner? in the Swift forum). Note that this requires that the memory is aligned on a 4-byte boundary. For alternatives see round trip Swift number types to/from Data.
Note also that as of Swift 4.2 you can create a random 32-bit integer simply using the new Random API:
let randomId = UInt32.random(in: .min ... .max)
On Xcode 10.2, Swift 5, using $0.load(as:) didn't work for me, both when reading from the pointer or writing to it.
Instead, using $0.baseAddress?.assumingMemoryBound(to:) seems to work well.
Example reading from the pointer buffer (code is unrelated to the question):
var reachability: SCNetworkReachability?
data.withUnsafeBytes { ptr in
guard let bytes = ptr.baseAddress?.assumingMemoryBound(to: Int8.self) else {
return
}
reachability = SCNetworkReachabilityCreateWithName(nil, bytes)
}
Example writing to the buffer pointer (code is unrelated to the question):
try outputData.withUnsafeMutableBytes { (outputBytes: UnsafeMutableRawBufferPointer) in
let status = CCKeyDerivationPBKDF(CCPBKDFAlgorithm(kCCPBKDF2),
passphrase,
passphrase.utf8.count,
salt,
salt.utf8.count,
CCPseudoRandomAlgorithm(kCCPRFHmacAlgSHA1),
rounds,
outputBytes.baseAddress?.assumingMemoryBound(to: UInt8.self),
kCCKeySizeAES256)
guard status == kCCSuccess else {
throw Error.keyDerivationError
}
}
The code from the question would look like:
let value = data.withUnsafeBytes {
$0.baseAddress?.assumingMemoryBound(to: UInt32.self)
}
In cases where the 'withUnsafeBytes' is deprecated: use withUnsafeBytes<R>(…) warning persists, it seems like the compiler can get confused when the closure has only one line. Making the closure have two or more lines might remove the ambiguity.
One more way to fix this warning to use bindMemory(to:).
var rawKey = Data(count: rawKeyLength)
let status = rawKey.withUnsafeMutableBytes { rawBytes -> Int32 in
guard let rawBytes = rawBytes.bindMemory(to: UInt8.self).baseAddress else {
return Int32(kCCMemoryFailure)
}
return CCSymmetricKeyUnwrap(alg, ivBytes, iv.count, keyBytes, key.count, wrappedKeyBytes, wrappedKey.count, rawBytes, &rawKeyLength)
}
I got this error as I was trying to figure out a compression stream tutorial. To get it to work, I added a step of converting the raw buffer pointer to a UnsafePointer
Original code from a tutorial I was working on.
--> where input: Data
--> where stream: compression_stream
//Method that shows the deprecation alert
return input.withUnsafeBytes { (srcPointer: UnsafePointer<UInt8>) in
//holder
var output = Data()
//Source and destination buffers
stream.src_ptr = srcPointer //UnsafePointer<UInt8>
stream.src_size = input.count
… etc.
}
Code with a conversion to make the above code work with a valid method
return input.withUnsafeBytes { bufferPtr in
//holder
var output = Data()
//Get the Raw pointer at the initial position of the UnsafeRawBuffer
let base: UnsafeRawPointer? = bufferPtr.baseAddress
//Unwrap (Can be combined with above, but kept it separate for clarity)
guard let srcPointer = base else {
return output
}
//Bind the memory to the type
let count = bufferPtr.count
let typedPointer: UnsafePointer<UInt8> = srcPointer.bindMemory(to: UInt8.self, capacity: count)
// Jump back into the original method
stream.src_ptr = typedPointer //UnsafePointer<UInt8>
}

Irregular timestamp in CMMotionManager

Researched this a lot for the past week but could not find a similar issue. I am collecting accelerometer data on the Apple Watch and looking at the timestamp outputted by the the DeviceMotion class which presents an issue. The timestamps do not always go in chronoligcal order. For example, if I collect a few minutes of motion data at 100Hz, I get the following timestamps from a CSV file I generate in the same app (these are UTC timestamps):
1531099702.768570
1531099702.778460
1531099702.788380
1531099702.798270
1531099702.738870
1531099702.748780
1531099702.758670
1531099702.768590
If you notice, at the 5th timestamp, the time has jumped back by a significant amount compared to the 4th one.
Does anyone know what might cause this?
Thanks!
This is my code:
func startUpdates() {
motionManager = CMMotionManager()
if motionManager.isDeviceMotionAvailable {
self.motionManager.deviceMotionUpdateInterval = updateInterval
self.motionManager.showsDeviceMovementDisplay = true
self.motionManager.startDeviceMotionUpdates(using: .xArbitraryZVertical, to: .main, withHandler: { (data, error) in
// Make sure the data is valid before accessing it.
if let validData = data {
// find boot time of device to normalize timestamps
if (self.accArray.isEmpty) {
self.bootTime = Date(timeIntervalSinceNow:-validData.timestamp)
}
self.accArray.append([validData.timestamp + self.bootTime.timeIntervalSince1970,
validData.attitude.pitch,
validData.attitude.roll,
validData.attitude.yaw,
validData.userAcceleration.x,
validData.userAcceleration.y,
validData.userAcceleration.z,
validData.rotationRate.x,
validData.rotationRate.y,
validData.rotationRate.z,
validData.gravity.x,
validData.gravity.y,
validData.gravity.z])
if (self.accArray.count == 100) {
let accChunk = NSKeyedArchiver.archivedData(withRootObject: self.accArray)
watchSessionManager.sharedManager.sendMessageData(accChunk: accChunk)
self.reset()
}
}
})
}
}
All data from accArray is inserted into a CSV file which is generated in the following manner:
// Create CSV with collected array
func createCSV() -> String {
if(self.accArray.count == 0) {
return("No data captured")
}
else {
// Create CSV structure
var csvText = "Number,Pitch,Roll,Yaw,RotX,RotY,RotZ,GravX,GravY,GravZ,AccX,AccY,AccZ,Time\n"
for index in 0...accArray.count-1 {
let newLine = [String(index),
String(accArray[index][1]),
String(accArray[index][2]),
String(accArray[index][3]),
String(accArray[index][4]),
String(accArray[index][5]),
String(accArray[index][6]),
String(accArray[index][7]),
String(accArray[index][8]),
String(accArray[index][9]),
String(accArray[index][10]),
String(accArray[index][11]),
String(accArray[index][12]),
String(accArray[index][0])].joined(separator: ",")
// Insert data into CSV file
csvText.append(newLine + "\n")
}
self.reset()
return(csvText)
}
}

Is the ios iPhone simulator causing memory usage analysis to swell?

I am trying to process a large text file in my app. I know that I want to be careful with the amount of memory being consumed while I read the data. Once a piece of data is read the app does not need to keep the data around.
Thanks to “Martin R” and the post Read a file/URL line-by-line for helping me jump start my effort.
I am trying to monitor the memory consumption of my app as it reads in the large data file so that I can be sure it is behaving as expected. Here’s where I am running into a problem.
When I run Instruments using Command-I from within Xcode and I monitor allocations I see that during the read of the file the app peeks at ~15MB and then drops back down. This is fairly repeatable +/- 0.5MB.
When I run the app using Command-R from within Xcode and then let it finish reading through the file, and then press record within Instruments, the memory consumption now swells to ~360MB.
So to clarify, the two ways I have done measurement of memory allocations are:
Profile:
1. Xcode Command-I.
2. Instruments Record Allocations. Observe ~15MB
Simulate and Profile:
1. Xcode Command-R.
2. Let app run to “IDLE”.
3. Instruments Record. Observe ~360MB.
I have been trying to figure out a few things here.
Q1. Why the difference? (This may answer all my questions)
Q2. Do I have a real problem or is this only a problem because of how debug code is annotated on to the simulator?
Q3. Similar to Q2, if I run a debug build on a real device, will it have the same issue?
Q4. For my app, ~15MB is acceptable when parsing the file, but ~360MB will not be. Is there another way I can continue to debug on my device without taking this 360MB hit?
Version 8.1 (8B62)
Sierra
2.7Ghz i5
MacBook Pro Circa 2015
Sample Code attached. The first part of the file is merely a copy of the code from the referenced post for reader convenience. One can take this code as is and run it in Xcode. At the bottom is the ViewController ViewDidLoad() method where things "run". The memory “swell” is after “File opened”.
//
//
import UIKit
/* Originally from
* stackoverflow:
* https://stackoverflow.com/questions/24581517/read-a-file-url-line-by-line-in-swift
* posted by Martin R.
* Much thanks!
*/
class StreamReader {
let encoding : String.Encoding
let chunkSize : Int
var fileHandle : FileHandle!
let delimData : Data
var buffer : Data
var atEof : Bool
init?(path: String, delimiter: String = "\n", encoding: String.Encoding = .utf8,
chunkSize: Int = 4096) {
guard let fileHandle = FileHandle(forReadingAtPath: path),
let delimData = delimiter.data(using: encoding) else {
return nil
}
self.encoding = encoding
self.chunkSize = chunkSize
self.fileHandle = fileHandle
self.delimData = delimData
self.buffer = Data(capacity: chunkSize)
self.atEof = false
}
deinit {
self.close()
}
/// Return next line, or nil on EOF.
func nextLine() -> String? {
precondition(fileHandle != nil, "Attempt to read from closed file")
// Read data chunks from file until a line delimiter is found:
while !atEof {
if let range = buffer.range(of: delimData) {
// Convert complete line (excluding the delimiter) to a string:
let line = String(data: buffer.subdata(in: 0..<range.lowerBound), encoding: encoding)
// Remove line (and the delimiter) from the buffer:
buffer.removeSubrange(0..<range.upperBound)
return line
}
let tmpData = fileHandle.readData(ofLength: chunkSize)
if tmpData.count > 0 {
buffer.append(tmpData)
} else {
// EOF or read error.
atEof = true
if buffer.count > 0 {
// Buffer contains last line in file (not terminated by delimiter).
let line = String(data: buffer as Data, encoding: encoding)
buffer.count = 0
return line
}
}
}
return nil
}
/// Start reading from the beginning of file.
func rewind() -> Void {
fileHandle.seek(toFileOffset: 0)
buffer.count = 0
atEof = false
}
/// Close the underlying file. No reading must be done after calling this method.
func close() -> Void {
fileHandle?.closeFile()
fileHandle = nil
}
}
extension StreamReader : Sequence {
func makeIterator() -> AnyIterator<String> {
return AnyIterator {
return self.nextLine()
}
}
}
class ViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
let path2WordList = Bundle.main.path(forResource: "large_text_file", ofType: "txt")
var wordCnt: Int = 0
if nil != path2WordList {
if let aStreamReader = StreamReader(path: path2WordList!) {
defer { aStreamReader.close() }
print("File openned")
/* Read and discard */
while aStreamReader.nextLine() != nil {
wordCnt += 1
}
} // if let ...
} // if nil ...
print ("Final wordCnt := \(wordCnt)")
} // viewDidLoad
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
I've encountered problems like this when using long running while loops. The problem is that anything that is allocated into the current autorelease pool won't get deallocated until the loop exits.
To guard against this, you can wrap the contents of your while loop in autoreleasepool(invoking:). This will cause each iteration of your loop to have its own autorelease pool that is drained each time.
It would look something like this:
/// Return next line, or nil on EOF.
func nextLine() -> String? {
precondition(fileHandle != nil, "Attempt to read from closed file")
var result: String? = nil
// Read data chunks from file until a line delimiter is found:
while !atEof, result == nil {
result = autoreleasepool {
if let range = buffer.range(of: delimData) {
// Convert complete line (excluding the delimiter) to a string:
let line = String(data: buffer.subdata(in: 0..<range.lowerBound), encoding: encoding)
// Remove line (and the delimiter) from the buffer:
buffer.removeSubrange(0..<range.upperBound)
return line
}
let tmpData = fileHandle.readData(ofLength: chunkSize)
if tmpData.count > 0 {
buffer.append(tmpData)
} else {
// EOF or read error.
atEof = true
if buffer.count > 0 {
// Buffer contains last line in file (not terminated by delimiter).
let line = String(data: buffer as Data, encoding: encoding)
buffer.count = 0
return line
}
}
return nil
}
}
return result
}
As to whether your memory growth is a side effect of the debug environment, it's hard to say. But it would probably be wise to guard against this kind of growth regardless.

Read a text file line by line in Swift?

I just started learning Swift. I have got my code to read from the text file, and the App displays the content of the entire text file. How can I display line by line and call upon that line multiple times?
TextFile.txt contains the following:
1. Banana
2. Apple
3. pear
4. strawberry
5. blueberry
6. blackcurrant
The following is what currently have..
if let path = NSBundle.mainBundle().pathForResource("TextFile", ofType: "txt"){
var data = String(contentsOfFile:path, encoding: NSUTF8StringEncoding, error: nil)
if let content = (data){
TextView.text = content
}
If there is another way of doing this please let me know. It would be much appreciated.
Swift 3.0
if let path = Bundle.main.path(forResource: "TextFile", ofType: "txt") {
do {
let data = try String(contentsOfFile: path, encoding: .utf8)
let myStrings = data.components(separatedBy: .newlines)
TextView.text = myStrings.joined(separator: ", ")
} catch {
print(error)
}
}
The variable myStrings should be each line of the data.
The code used is from:
Reading file line by line in iOS SDK written in Obj-C and using NSString
Check edit history for previous versions of Swift.
Swift 5.5
The solution below shows how to read one line at a time. This is quite different from reading the entire contents into memory. Reading line-by-line scales well if you have a large file to read. Putting an entire file into memory does not scale well for large files.
The example below uses a while loop that quits when there are no more lines, but you can choose a different number of lines to read if you wish.
The code works as follows:
create a URL that tells where the file is located
make sure the file exists
open the file for reading
set up some initial variables for reading
read each line using getLine()
close the file and free the buffer when done
You could make the code less verbose if you wish; I have included comments to explain what the variables' purposes are.
Swift 5.5
import Cocoa
// get URL to the the documents directory in the sandbox
let home = FileManager.default.homeDirectoryForCurrentUser
// add a filename
let fileUrl = home
.appendingPathComponent("Documents")
.appendingPathComponent("my_file")
.appendingPathExtension("txt")
// make sure the file exists
guard FileManager.default.fileExists(atPath: fileUrl.path) else {
preconditionFailure("file expected at \(fileUrl.absoluteString) is missing")
}
// open the file for reading
// note: user should be prompted the first time to allow reading from this location
guard let filePointer:UnsafeMutablePointer<FILE> = fopen(fileUrl.path,"r") else {
preconditionFailure("Could not open file at \(fileUrl.absoluteString)")
}
// a pointer to a null-terminated, UTF-8 encoded sequence of bytes
var lineByteArrayPointer: UnsafeMutablePointer<CChar>? = nil
// see the official Swift documentation for more information on the `defer` statement
// https://docs.swift.org/swift-book/ReferenceManual/Statements.html#grammar_defer-statement
defer {
// remember to close the file when done
fclose(filePointer)
// The buffer should be freed by even if getline() failed.
lineByteArrayPointer?.deallocate()
}
// the smallest multiple of 16 that will fit the byte array for this line
var lineCap: Int = 0
// initial iteration
var bytesRead = getline(&lineByteArrayPointer, &lineCap, filePointer)
while (bytesRead > 0) {
// note: this translates the sequence of bytes to a string using UTF-8 interpretation
let lineAsString = String.init(cString:lineByteArrayPointer!)
// do whatever you need to do with this single line of text
// for debugging, can print it
print(lineAsString)
// updates number of bytes read, for the next iteration
bytesRead = getline(&lineByteArrayPointer, &lineCap, filePointer)
}
If you have a huge file and don't want to load all data to memory with String, Data etc. you can use function readLine() which reads content from standard input line by line until EOF is reached.
let path = "path/file.txt"
guard let file = freopen(path, "r", stdin) else {
return
}
defer {
fclose(file)
}
while let line = readLine() {
print(line)
}
This is not pretty, but I believe it works (on Swift 5). This uses the underlying POSIX getline command for iteration and file reading.
typealias LineState = (
// pointer to a C string representing a line
linePtr:UnsafeMutablePointer<CChar>?,
linecap:Int,
filePtr:UnsafeMutablePointer<FILE>?
)
/// Returns a sequence which iterates through all lines of the the file at the URL.
///
/// - Parameter url: file URL of a file to read
/// - Returns: a Sequence which lazily iterates through lines of the file
///
/// - warning: the caller of this function **must** iterate through all lines of the file, since aborting iteration midway will leak memory and a file pointer
/// - precondition: the file must be UTF8-encoded (which includes, ASCII-encoded)
func lines(ofFile url:URL) -> UnfoldSequence<String,LineState>
{
let initialState:LineState = (linePtr:nil, linecap:0, filePtr:fopen(fileURL.path,"r"))
return sequence(state: initialState, next: { (state) -> String? in
if getline(&state.linePtr, &state.linecap, state.filePtr) > 0,
let theLine = state.linePtr {
return String.init(cString:theLine)
}
else {
if let actualLine = state.linePtr { free(actualLine) }
fclose(state.filePtr)
return nil
}
})
}
Here is how you might use it:
for line in lines(ofFile:myFileURL) {
print(line)
}
Probably the simplest, and easiest way to do this in Swift 5.0, would be the following:
import Foundation
// Determine the file name
let filename = "main.swift"
// Read the contents of the specified file
let contents = try! String(contentsOfFile: filename)
// Split the file into separate lines
let lines = contents.split(separator:"\n")
// Iterate over each line and print the line
for line in lines {
print("\(line)")
}
Note: This reads the entire file into memory, and then just iterates over the file in memory to produce lines....
Credit goes to: https://wiki.codermerlin.com/mediawiki/index.php/Code_Snippet:_Print_a_File_Line-by-Line
Update for Swift 2.0 / Xcode 7.2
do {
if let path = NSBundle.mainBundle().pathForResource("TextFile", ofType: "txt"){
let data = try String(contentsOfFile:path, encoding: NSUTF8StringEncoding)
let myStrings = data.componentsSeparatedByCharactersInSet(NSCharacterSet.newlineCharacterSet())
print(myStrings)
}
} catch let err as NSError {
//do sth with Error
print(err)
}
Also worth to mention is that this code reads a file which is in the project folder (since pathForResource is used), and not in e.g. the documents folder of the device
You probably do want to read the entire file in at once. I bet it's very small.
But then you want to split the resulting string into an array, and then distribute the array's contents among various UI elements, such as table cells.
A simple example:
var x: String = "abc\ndef"
var y = x.componentsSeparatedByString("\n")
// y is now a [String]: ["abc", "def"]
One more getline solution:
Easy to use. Just copy past.
Tested on real project.
extension URL
{
func foreachRow(_ mode:String, _ rowParcer:((String, Int)->Bool) )
{
//Here we should use path not the absoluteString (wich contains file://)
let path = self.path
guard let cfilePath = (path as NSString).utf8String,
let m = (mode as NSString).utf8String
else {return}
//Open file with specific mode (just use "r")
guard let file = fopen(cfilePath, m)
else {
print("fopen can't open file: \"\(path)\", mode: \"\(mode)\"")
return
}
//Row capacity for getline()
var cap = 0
var row_index = 0
//Row container for getline()
var cline:UnsafeMutablePointer<CChar>? = nil
//Free memory and close file at the end
defer{free(cline); fclose(file)}
while getline(&cline, &cap, file) > 0
{
if let crow = cline,
// the output line may contain '\n' that's why we filtered it
let s = String(utf8String: crow)?.filter({($0.asciiValue ?? 0) >= 32})
{
if rowParcer(s, row_index)
{
break
}
}
row_index += 1
}
}
}
Usage:
let token = "mtllib "
var mtlRow = ""
largeObjFileURL.foreachRow("r"){ (row, i) -> Bool in
if row.hasPrefix(token)
{
mtlRow = row
return true // end of file reading
}
return false // continue file reading
}
Here is an example of writeing and reading a text file one line at a time
in Swift version 5. Reads one line in at a time and includes EOF detection
//
// main.swift
// IO
//
// Created by Michael LeVine on 8/30/22.
//
import Foundation
let file = "file.txt" //this is the file. we will write to and read from it
let text = "some text\n" //just a text
// test file will be placed on deasktop
let dir = FileManager.default.urls(for: .desktopDirectory, in: .userDomainMask).first
let fileURL = dir!.appendingPathComponent(file).path
let fileURL2 = dir!.appendingPathComponent(file)
let fileManager = FileManager.default
// the following variable used by eof detection which also use var fileManager internally
var eofOffset: UInt64 = 0
if fileManager.fileExists(atPath: fileURL) {
do { try fileManager.removeItem(atPath: fileURL)}
catch {
print("Error removeing old \(fileURL)")
exit(1)
}
}
// create the new file
fileManager.createFile(atPath: fileURL, contents:Data(" ".utf8), attributes: nil)
var fileHandle = FileHandle(forWritingAtPath: fileURL)
//writing
for _ in 1...10 {
fileHandle!.write(text.data(using: .utf8)!)
}
do {
try fileHandle!.close()
}
catch { print("write close error \(error)")
exit(1)
}
// now to read text file by 2 methods
// first use String to read whole file in one gulp
let contents = try! String(contentsOfFile: fileURL)
let lines = contents.split(separator: "\n")
var i: Int = 0
// print out one way
for line in lines {
print("\(i) \(line)")
i=i+1
}
// printout another way
for j in 0...9 {
print("\(i) \(j) \(lines[j])")
i = i + 1
}
//Open up to see about reading line at a time
fileHandle = FileHandle(forReadingAtPath: fileURL)
eofInit() // must be called immediately after fileHandle init
var outputLine: String = ""
i = 0
// read a line and print it out as recieved
while true {
outputLine = getLine()
if eofTest(){
if outputLine.count > 0 {
print("\(i) \(outputLine)")
}
exit(1)
}
print("\(i) \(outputLine)")
i = i + 1
}
// function reads one character at each call and returns it as a 1 character string
// is called only by "getLine"
func getChar() -> String {
var ch: Data
if eofTest() {
return ""
}
do {
try ch = fileHandle!.read(upToCount: 1)! // read 1 character from text file
} catch { print("read 1 char \(error)")
exit(1)
}
let ch2: UnicodeScalar = UnicodeScalar(ch[0]) // convert to unicode scaler as intermediate value
let ch3: String = String(ch2) // Now create string containing that one returned character
return ch3 // and pass to calling function
}
// read in whole line one character at a time -- assumes line terminated by linefeed
func getLine() -> String {
var outputLine : String = ""
var char : String = ""
// keep fetching characters till line feed/eof found
lineLoop:
while true { // its an infinite loop
if eofTest() {
break lineLoop
}
char = getChar() // get next character
if char == "\n" { // test for linefeed
break lineLoop // if found exit loop
}
outputLine.append(char) // lf not found -- append char to output line
}
return outputLine // got line -- return it to calling routine
}
//eof handleing
//init routine must be called immediately after fileHandle inited to get current position
// at start of file
func eofInit()
{ var beginningOffset: UInt64 = 0
do {
try beginningOffset = fileHandle!.offset()
try eofOffset = fileHandle!.seekToEnd()
try fileHandle!.seek(toOffset: beginningOffset)
} catch {
print("Init eof detection error \(error)")
}
}
func eofTest() -> Bool{
var current: UInt64 = 0
do {
current = try fileHandle!.offset()
} catch {
print("eof test get current \(error)")
exit(1)
}
if current < eofOffset {
return false
} else {
return true
}
}
Based on Jason Cross answer simplified version line by line reader(gist).
import Darwin
class FileLineReader {
init?(path: String, removeNewLineOnEnd: Bool = true) {
file = fopen(path, "r")
self.removeNewLineOnEnd = removeNewLineOnEnd
if file == nil {
return nil
}
}
deinit {
fclose(file)
}
var iterator: AnyIterator<String> {
return AnyIterator(self.getNextLine)
}
func getNextLine() -> String? {
var line: UnsafeMutablePointer<CChar>!
var linecap: Int = 0
defer { free(line) }
if getline(&line, &linecap, file) > 0 {
if removeNewLineOnEnd {
var i = 0
while line[i] != 0 { i += 1 }
if i > 0 && line[i-1] == 10 { // new line symbol
line[i-1] = 0
}
}
return String(cString: line)
} else {
return nil
}
}
private let file: UnsafeMutablePointer<FILE>!
private let removeNewLineOnEnd: Bool
}
iUrii approach may not work if you need to open several files.

Resources