I am making an app about book.
In the app,
i want to make the app auto-filling book info by getting ISBN(Barcode)
views
There is 2 classes.
one is 'UploadMain',the other is 'ScanView'
I can get ISBN by scanning,
but i have a problem to pass data from ScanView to UploadMain.
In ScanView i have used optional Binding like below
if let UploadVC = self.storyboard?.instantiateViewControllerWithIdentifier("UploadMain") as? UploadMain {
UploadVC.ISBNstring = self.detectionString!
}
Code for UploadMain Class
override func viewDidLoad(){
super.viewDidLoad()
ISBN.delegate = self
}
override func viewWillAppear(animated: Bool){
ISBN.text = ISBNstring
}
i don't know whats the problem my code.
Full Code of UploadMain
import UIKit
import Foundation
class UploadMain: UIViewController,UITextFieldDelegate {
var ISBNstring: String = ""
var TitleString: String = ""
var AuthorString: String = ""
var PubString: String = ""
var PriceSting: String = ""
#IBOutlet weak var ISBN: UITextField!
#IBOutlet weak var bookTitle: UITextField!
#IBOutlet weak var bookAuthor: UITextField!
#IBOutlet weak var bookPub: UITextField!
#IBOutlet weak var bookPrice: UITextField!
override func viewDidLoad(){
super.viewDidLoad()
ISBN.delegate = self
}
override func viewWillAppear(animated: Bool){
ISBN.text = ISBNstring
}
#IBAction func Upload(sender: AnyObject) {
dismissViewControllerAnimated(true, completion: nil)
}
}
ScanView class
import UIKit
import AVFoundation
import Foundation
class ScanView : UIViewController, AVCaptureMetadataOutputObjectsDelegate {
let session : AVCaptureSession = AVCaptureSession()
var previewLayer : AVCaptureVideoPreviewLayer!
var detectionString : String!
let apiKey : String = "---------dddddd"
override func viewDidLoad() {
super.viewDidLoad()
// For the sake of discussion this is the camera
let device = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
// Create a nilable NSError to hand off to the next method.
// Make sure to use the "var" keyword and not "let"
var error : NSError? = nil
var input: AVCaptureDeviceInput = AVCaptureDeviceInput()
do {
input = try AVCaptureDeviceInput(device: device) as AVCaptureDeviceInput
} catch let myJSONError {
print(myJSONError)
}
// If our input is not nil then add it to the session, otherwise we're kind of done!
if input != AVCaptureDeviceInput() {
session.addInput(input)
}
else {
// This is fine for a demo, do something real with this in your app. :)
print(error)
}
let output = AVCaptureMetadataOutput()
output.setMetadataObjectsDelegate(self, queue: dispatch_get_main_queue())
session.addOutput(output)
output.metadataObjectTypes = output.availableMetadataObjectTypes
previewLayer = AVCaptureVideoPreviewLayer(session: session)
previewLayer.frame = self.view.bounds
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
self.view.layer.addSublayer(previewLayer)
// Start the scanner. You'll have to end it yourself later.
session.startRunning()
}
// This is called when we find a known barcode type with the camera.
func captureOutput(captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [AnyObject]!, fromConnection connection: AVCaptureConnection!) {
var highlightViewRect = CGRectZero
var barCodeObject : AVMetadataObject!
let barCodeTypes = [AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code]
// The scanner is capable of capturing multiple 2-dimensional barcodes in one scan.
for metadata in metadataObjects {
for barcodeType in barCodeTypes {
if metadata.type == barcodeType {
barCodeObject = self.previewLayer.transformedMetadataObjectForMetadataObject(metadata as! AVMetadataMachineReadableCodeObject)
highlightViewRect = barCodeObject.bounds
detectionString = (metadata as! AVMetadataMachineReadableCodeObject).stringValue
self.session.stopRunning()
self.alert(detectionString)
// Daum Book API 호출
let apiURI = NSURL(string: "https://apis.daum.net/search/book?apikey=\(apiKey)&q=\(detectionString)&searchType=isbn&output=json")
let apidata : NSData? = NSData(contentsOfURL: apiURI!)
NSLog("API Result = %#", NSString(data: apidata!, encoding: NSUTF8StringEncoding)!)
**if let UploadVC = self.storyboard?.instantiateViewControllerWithIdentifier("UploadMain") as? UploadMain {
UploadVC.ISBNstring = self.detectionString!
}**
break
}
}
}
print(detectionString)
self.navigationController?.popViewControllerAnimated(true)
}
func alert(Code: String){
let actionSheet:UIAlertController = UIAlertController(title: "Barcode", message: "\(Code)", preferredStyle: UIAlertControllerStyle.Alert)
// for alert add .Alert instead of .Action Sheet
// start copy
let firstAlertAction:UIAlertAction = UIAlertAction(title: "OK", style: UIAlertActionStyle.Default, handler:
{
(alertAction:UIAlertAction!) in
// action when pressed
self.session.startRunning()
})
actionSheet.addAction(firstAlertAction)
}
}
In the ScanView you are creating the new instance of UploadMain, that is not available in window hierarchy, So that data is not available to the UploadMain. To solve your problem you need to create one protocol and pass the delegate of that protocol to ScanView. So create one protocol like this.
protocol IsbnDelegate {
func passData(isbnStr: String)
}
Now inherit this protocol in UploadMain and override its method passData in the UploadMain like below
class UploadMain: UIViewController,UITextFieldDelegate,IsbnDelegate {
//your code
//Add this method
func passData(isbnStr: String) {
self.ISBN.text = isbnStr
}
//Also override prepareForSegue like this
override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject?) {
let destVC = segue.destinationViewController as! ScanView
destVC.delegate = self
}
}
After that create one delegate object in ScanView, Change your code of ScanView like this
import UIKit
import AVFoundation
import Foundation
class ScanView : UIViewController, AVCaptureMetadataOutputObjectsDelegate {
let session : AVCaptureSession = AVCaptureSession()
var previewLayer : AVCaptureVideoPreviewLayer!
var detectionString : String!
let apiKey : String = "---------dddddd"
var delegate: IsbnDelegate?
override func viewDidLoad() {
super.viewDidLoad()
// For the sake of discussion this is the camera
let device = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
// Create a nilable NSError to hand off to the next method.
// Make sure to use the "var" keyword and not "let"
var error : NSError? = nil
var input: AVCaptureDeviceInput = AVCaptureDeviceInput()
do {
input = try AVCaptureDeviceInput(device: device) as AVCaptureDeviceInput
} catch let myJSONError {
print(myJSONError)
}
// If our input is not nil then add it to the session, otherwise we're kind of done!
if input != AVCaptureDeviceInput() {
session.addInput(input)
}
else {
// This is fine for a demo, do something real with this in your app. :)
print(error)
}
let output = AVCaptureMetadataOutput()
output.setMetadataObjectsDelegate(self, queue: dispatch_get_main_queue())
session.addOutput(output)
output.metadataObjectTypes = output.availableMetadataObjectTypes
previewLayer = AVCaptureVideoPreviewLayer(session: session)
previewLayer.frame = self.view.bounds
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
self.view.layer.addSublayer(previewLayer)
// Start the scanner. You'll have to end it yourself later.
session.startRunning()
}
// This is called when we find a known barcode type with the camera.
func captureOutput(captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [AnyObject]!, fromConnection connection: AVCaptureConnection!) {
var highlightViewRect = CGRectZero
var barCodeObject : AVMetadataObject!
let barCodeTypes = [AVMetadataObjectTypeEAN13Code, AVMetadataObjectTypeEAN8Code]
// The scanner is capable of capturing multiple 2-dimensional barcodes in one scan.
for metadata in metadataObjects {
for barcodeType in barCodeTypes {
if metadata.type == barcodeType {
barCodeObject = self.previewLayer.transformedMetadataObjectForMetadataObject(metadata as! AVMetadataMachineReadableCodeObject)
highlightViewRect = barCodeObject.bounds
detectionString = (metadata as! AVMetadataMachineReadableCodeObject).stringValue
self.session.stopRunning()
self.alert(detectionString)
// Daum Book API 호출
let apiURI = NSURL(string: "https://apis.daum.net/search/book?apikey=\(apiKey)&q=\(detectionString)&searchType=isbn&output=json")
let apidata : NSData? = NSData(contentsOfURL: apiURI!)
NSLog("API Result = %#", NSString(data: apidata!, encoding: NSUTF8StringEncoding)!)
//Here We are passing the data of your ScanView to UploadMain
self.delegate.passData(self.detectionString!)
break
}
}
}
print(detectionString)
self.navigationController?.popViewControllerAnimated(true)
}
func alert(Code: String){
let actionSheet:UIAlertController = UIAlertController(title: "Barcode", message: "\(Code)", preferredStyle: UIAlertControllerStyle.Alert)
// for alert add .Alert instead of .Action Sheet
// start copy
let firstAlertAction:UIAlertAction = UIAlertAction(title: "OK", style: UIAlertActionStyle.Default, handler:
{
(alertAction:UIAlertAction!) in
// action when pressed
self.session.startRunning()
})
actionSheet.addAction(firstAlertAction)
}
}
For more detail about protcol follow this link
1) Apple Documentation
2) Tutorial 1
3) Tutorial 2
Hope this will help you.
To me it appears you are passing data back from the view controller.
When you call
if let UploadVC = self.storyboard?.instantiateViewControllerWithIdentifier("UploadMain") as? UploadMain {
UploadVC.ISBNstring = self.detectionString!
}
You are creating a new instance of that view controller, and it is successfully receiving the data (But you could never use it).
What you are wanting to do is send data
I could write it out for you, but theres actually some great tutorials
text
video
Related
I'm trying to make a quiz application using the API, I can import the
data into the quizManager file, but I can't transfer the data to the
viewController, When I try to print in quizManger, I can print, but in viewController does not.
how do I move data to viewController?
QuizManager
import Foundation
protocol quizManagerDelegate {
func didUpdateQuiz(_ Quizmanager: QuizManager ,quiz: QuizModel)
}
struct QuizManager {
var delegate: quizManagerDelegate?
func performRequest(){
let urlString = "https://opentdb.com/api.php?amount=1&type=multiple"
if let url = URL(string: urlString){
let session = URLSession(configuration: .default)
let task = session.dataTask(with: url) { data, response, error in
if error != nil {
print(error!)
return
}
if let safeData = data{
if let quiz = self.parseJSON(quizdata: safeData){
delegate?.didUpdateQuiz(self, quiz: quiz)
}
}
}
task.resume()
}
}
func handle(data: Data?, response: URLResponse?, error: Error?) -> Void {
}
func parseJSON(quizdata: Data) -> QuizModel? {
let decoder = JSONDecoder()
do{
let decodedData = try decoder.decode(Welcome.self, from: quizdata)
let correct = decodedData.results?[0].correct_answer ?? "error"
let quest = decodedData.results?[0].question ?? "error"
let incorrect = decodedData.results?[0].incorrect_answers ?? ["error"]
let question = QuizModel(correctAnswer: correct, question: quest, falseAnswer: incorrect)
// print(question.correctAnswer)
// print(question.question)
// print(question.falseAnswer)
return question
} catch {
print(error)
return nil
}
}
}
QuizData
import Foundation
// MARK: - Welcome
struct Welcome: Codable {
let results: [Result]?
}
// MARK: - Result
struct Result: Codable {
let category: String?
let question, correct_answer: String?
let incorrect_answers: [String]?
}
QuizModel
import Foundation
struct QuizModel {
let correctAnswer : String
let question : String
let falseAnswer : [String]
}
ViewController
import UIKit
class ViewController: UIViewController {
#IBOutlet weak var ChoiceButton4: UIButton!
#IBOutlet weak var ChoiceButton3: UIButton!
#IBOutlet weak var ChoiceButton2: UIButton!
#IBOutlet weak var ChoiceButton1: UIButton!
#IBOutlet weak var QuestionTextView: UITextView!
var quizMangager = QuizManager()
override func viewDidLoad() {
super.viewDidLoad()
QuestionTextView.layer.cornerRadius = 15
quizMangager.performRequest()
self.quizMangager.delegate = self
}
}
extension ViewController : quizManagerDelegate{
func didUpdateQuiz(_ Quizmanager: QuizManager, quiz: QuizModel) {
DispatchQueue.main.async {
print("***")
print(quiz.correctAnswer)
}
}
}
The problem is in viewDidLoad that lines
quizMangager.performRequest()
self.quizMangager.delegate = self
You call performRequest before to delegate it so when performRequest is called var delegate: quizManagerDelegate? is nil . Just call delegate before call function
override func viewDidLoad() {
super.viewDidLoad()
...
quizMangager.delegate = self
quizMangager.performRequest()
...
}
You have a slight mistake in your code. You just need to confirm the delegate first and then call its function .
I've tried to build a delegate design pattern. I have a simple delegate in WeatherManager , but it's always nil.
I've tried to add weatherManager.delegate = self in override func viewDidLoad() of WeatherViewController.
However, I have another Protocol and delegate, which works fine.
I'm using the API to obtain geographic coordinates from city names, and the URL of the API to get information about the weather is created and executed.
WeatherManager
import Foundation
// delegate design parttern
protocol WeatherManagerDelegate:AnyObject {
func didUpdateWeather(inputWeatherModel: WeatherModel)
}
class WeatherManager {
// delegate
weak var delegate: WeatherManagerDelegate?
// fetchCoordinate
func fetchWeather(urlString: String) {
performRequest(inputURLString: urlString)
}
// performRequest
func performRequest(inputURLString: String) {
// 1. Create URL
if let url = URL(string: inputURLString) {
// 2. Create URLSession
let session = URLSession(configuration: .default)
// 3. Give the URLSession a task
let task = session.dataTask(with: url) { (data, response, error) in
if error != nil {
print(error!)
return
}
if let safeData = data {
if let weather = self.parseJSON(JSONobject: safeData) {
// ---------------- problem here -------------------
self.delegate?.didUpdateWeather(inputWeatherModel: weather)
}
}
}
// 4. Start the task
task.resume()
}
}
// parse JSON object
func parseJSON(JSONobject: Data) -> WeatherModel? {
let decoder = JSONDecoder()
do {
let decodedData = try decoder.decode(WeatherData.self, from: JSONobject)
let id = decodedData.weather[0].id
let temp = decodedData.main.temp
let name = decodedData.name
let weather = WeatherModel(conditionId: id, cityName: name, temperature: temp)
return weather
} catch {
print(error)
return nil
}
}
}
WeatherModel
import Foundation
struct WeatherModel {
let conditionId: Int
let cityName: String
let temperature: Double
var temperatureString: String {
return String(format: "%.1f", temperature)
}
var conditionName: String {
switch conditionId {
case 200...232:
return "cloud.bolt"
case 300...321:
return "cloud.drizzle"
case 500...531:
return "cloud.rain"
case 600...622:
return "cloud.snow"
case 701...781:
return "cloud.fog"
case 800:
return "sun.max"
case 801...804:
return "cloud.bolt"
default:
return "cloud"
}
}
}
WeatherViewController
import UIKit
class WeatherViewController: UIViewController, UITextFieldDelegate, WeatherManagerDelegate {
#IBOutlet weak var conditionImageView: UIImageView!
#IBOutlet weak var temperatureLabel: UILabel!
#IBOutlet weak var searchTextField: UITextField!
#IBOutlet weak var cityLabel: UILabel!
let weatherManager = WeatherManager()
var coordinateManager = CoordinateManager()
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
searchTextField.delegate = self
searchTextField.keyboardType = .asciiCapable
weatherManager.delegate = self
}
#IBAction func searchPressed(_ sender: UIButton) {
searchTextField.endEditing(true)
}
func textFieldShouldReturn(_ textField: UITextField) -> Bool {
searchTextField.endEditing(true)
return true
}
func textFieldShouldEndEditing(_ textField: UITextField) -> Bool {
if textField.text != "" {
return true
} else {
textField.placeholder = "Type Something"
return false
}
}
func textFieldDidEndEditing(_ textField: UITextField) {
if let city = searchTextField.text {
coordinateManager.fetchCoordinate(cityName: city)
}
searchTextField.text = ""
}
func didUpdateWeather(inputWeatherModel: WeatherModel) {
print(inputWeatherModel.temperature)
}
}
CoordinateManager
import Foundation
struct CoordinateManager {
// WeatherManager
let weathetManager = WeatherManager()
// geographical coordinates(lat, lon) 地理座標
let coordinateURL = "https://api.openweathermap.org/geo/1.0/direct?limit=1&appid=c8e60c6317c653a1789294c00f54ae19"
// fetchCoordinate
func fetchCoordinate(cityName: String) {
let urlString = "\(coordinateURL)&q=\(cityName)"
performRequest(inputURLString: urlString)
}
// transformURLString
func transformURLString(_ string: String) -> URLComponents? {
guard let urlPath = string.components(separatedBy: "?").first else {
return nil
}
var components = URLComponents(string: urlPath)
if let queryString = string.components(separatedBy: "?").last {
components?.queryItems = []
let queryItems = queryString.components(separatedBy: "&")
for queryItem in queryItems {
guard let itemName = queryItem.components(separatedBy: "=").first,
let itemValue = queryItem.components(separatedBy: "=").last else {
continue
}
components?.queryItems?.append(URLQueryItem(name: itemName, value: itemValue))
}
}
return components!
}
// performRequest
func performRequest(inputURLString: String) {
// 1. Create URL
let components = transformURLString(inputURLString)
if let url = components?.url {
// 2. Create URLSession
let session = URLSession(configuration: .default)
// 3. Give the URLSession a task
let task = session.dataTask(with: url) {(data, response, error) in
if error != nil {
print(error!)
return
}
if let safeData = data {
self.parseJSON(JSONobject: safeData)
}
}
// 4. Start the task
task.resume()
}
}
// parseJSON
func parseJSON(JSONobject: Data) {
let decoder = JSONDecoder()
do {
let decodedData = try decoder.decode([CoordinateData].self, from: JSONobject)
let name = decodedData[0].name
let lat = decodedData[0].lat
let lon = decodedData[0].lon
let coordinateModel = CoordinateModel(name: name, lat: lat, lon: lon)
let weatherURL = coordinateModel.urlString
// ------- WeatherManager fetchWeather ---------
weathetManager.fetchWeather(urlString: weatherURL)
} catch {
print(error)
}
}
}
CoordinateModel
import Foundation
struct CoordinateModel {
let name: String
let lat: Double
let lon: Double
// wheather 地理座標を元に検索した天気の情報
let weatherstr = "https://api.openweathermap.org/data/2.5/weather?"
let str = "units=metric&appid=c8e60c6317c653a1789294c00f54ae19#"
// computedproperty
var urlString: String {
return "\(weatherstr)&lat=\(lat)&lon=\(lon)&\(str)"
}
}
You create a weatherManager in the WeatherViewController , and in the viewDidLoad method you set the WeatherViewController as a delegate to the weatherManager instance you created here. In the CoordinateManager class, you create a new weatherManager, and don't set a delegate for it.
2 solutions, use what is most convenient for you:
Create an initializer for the CoordinateManager, to pass it the WeatherManager that you have created and that you are listening to
// CoordinateManager class
let weatherManager: WeatherManager
init(weatherManager: WeatherManager) {
self.weatherManager = weatherManager
}
// WeatherViewController class
let weatherManager = WeatherManager()
var coordinateManager = CoordinateManager(weatherManager: weatherManager)
You can directly set WeatherViewController as a delegate to the desired weatherManager instance.
// WeatherViewController class
override func viewDidLoad() {
super.viewDidLoad()
searchTextField.delegate = self
searchTextField.keyboardType = .asciiCapable
// Instead weatherManager.delegate = self
coordinateManager.weatherManager.delegate = self
}
I am trying to build a document scanner that is able to read text off of any document/card. However, it sometimes has trouble identifying text correctly off of a credit card. The accuracy is decent, but there is definitely room for improvement. I used the VisionTextRecognition framework and have used all the standard settings which are the right ones for setting up text recognition.
This is what I had to setup the text recognition request
textRecognitionRequest = VNRecognizeTextRequest(completionHandler: { (request, error) in
if let results = request.results, !results.isEmpty {
if let requestResults = request.results as? [VNRecognizedTextObservation] {
var foundText = ""
for observation in recognizedText {
guard let candidate = observation.topCandidates(1).first else { continue }
foundText.append(candidate.string + "\n")
}
}
}
})
textRecognitionRequest.recognitionLevel = .accurate
textRecognitionRequest.usesLanguageCorrection = true
Does anyone have any suggestions for improving the identification programmatically by either pre-processing or post-processing the scan at some point?
UPDATE: I've made a fully open source project that may help you do exactly what you need. Check it out: https://github.com/ethanwa/credit-card-scanner-and-validator
**
You can't do much to improve accuracy beyond adding some preset values to specifically look for, which doesn't make sense with CC numbers so I won't even bother showing that code. You'll need to rely on Apple to improve their text recognition model as iOS iterates for it to truly improve.
What I suggest in the meantime are these two things you can do:
Do validation on your credit card numbers that you think you're recieving. For example, Visa starts with 4, MasterCard starts with 5, Discover with 6, Amex with 3, etc. They have specific lengths and so on. See here: https://www.freeformatter.com/credit-card-number-generator-validator.html
Keep iterating over and over on a camera feed until you get a number that validates. I'm not sure if you are currently just taking a picture of the card, and processing that image (which it sounds like you are doing), but you should be processing many images per second until you get a valid CC. This is most likely how Apple does it when adding a card via Apple Pay on your phone, or when depositing checks digitally using banking apps (finding valid routing and account numbers).
Here's an example of what I mean...
I wrote this code that can pick out and validate ISBN numbers (basically 10 and 13 digit numbers that catalog books, which have a check digit for validation) in any given text and will keep looking until it finds all the numbers and then validates. It works extremely well and is very fast. Check out this Swift 5.3 code:
import UIKit
import Vision
import Photos
import AVFoundation
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
var recognizedText = ""
var finalText = ""
var image: UIImage?
var processing = false
#IBOutlet weak var nameLabel: UILabel!
#IBOutlet weak var setLabel: UILabel!
#IBOutlet weak var numberLabel: UILabel!
lazy var textDetectionRequest: VNRecognizeTextRequest = {
let request = VNRecognizeTextRequest(completionHandler: self.handleDetectedText)
request.recognitionLevel = .accurate
request.usesLanguageCorrection = false
return request
}()
private let videoOutput = AVCaptureVideoDataOutput()
private let captureSession = AVCaptureSession()
private lazy var previewLayer: AVCaptureVideoPreviewLayer = {
let preview = AVCaptureVideoPreviewLayer(session: self.captureSession)
preview.videoGravity = .resizeAspect
return preview
}()
// MARK: AV
override func viewDidLoad() {
super.viewDidLoad()
self.addCameraInput()
self.addVideoOutput()
}
private func addCameraInput() {
let device = AVCaptureDevice.default(for: .video)!
let cameraInput = try! AVCaptureDeviceInput(device: device)
self.captureSession.addInput(cameraInput)
}
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
self.previewLayer.frame = self.view.bounds
}
private func addVideoOutput() {
self.videoOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(value: kCVPixelFormatType_32BGRA)] as [String : Any]
self.videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "my.image.handling.queue"))
self.captureSession.addOutput(self.videoOutput)
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection)
{
if !processing
{
guard let frame = CMSampleBufferGetImageBuffer(sampleBuffer) else {
debugPrint("unable to get image from sample buffer")
return
}
print("did receive image frame")
// process image here
self.processing = true
let ciimage : CIImage = CIImage(cvPixelBuffer: frame)
let theimage : UIImage = self.convert(cmage: ciimage)
self.image = theimage
processImage()
}
}
// Convert CIImage to CGImage
func convert(cmage:CIImage) -> UIImage
{
let context:CIContext = CIContext.init(options: nil)
let cgImage:CGImage = context.createCGImage(cmage, from: cmage.extent)!
let image:UIImage = UIImage.init(cgImage: cgImage)
return image
}
// AV
func processImage()
{
DispatchQueue.main.async {
self.nameLabel.text = ""
self.setLabel.text = ""
self.numberLabel.text = ""
}
guard let image = image, let cgImage = image.cgImage else { return }
let requests = [textDetectionRequest]
let imageRequestHandler = VNImageRequestHandler(cgImage: cgImage, orientation: .right, options: [:])
DispatchQueue.global(qos: .userInitiated).async {
do {
try imageRequestHandler.perform(requests)
} catch let error {
print("Error: \(error)")
}
}
}
fileprivate func handleDetectedText(request: VNRequest?, error: Error?)
{
self.finalText = ""
if let error = error {
print(error.localizedDescription)
self.processing = false
return
}
guard let results = request?.results, results.count > 0 else {
print("No text was found.")
self.processing = false
return
}
if let requestResults = request?.results as? [VNRecognizedTextObservation] {
self.recognizedText = ""
for observation in requestResults {
guard let candidiate = observation.topCandidates(1).first else { return }
self.recognizedText += candidiate.string
self.recognizedText += " "
}
var replaced = self.recognizedText.replacingOccurrences(of: "-", with: "")
replaced = String(replaced.filter { !"\n\t\r".contains($0) })
let replacedArr = replaced.components(separatedBy: " ")
for here in replacedArr
{
let final = here.trimmingCharacters(in: CharacterSet.whitespacesAndNewlines)
if (final.count == 10 || final.count == 13) && final.containsISBNnums && Validate.isbn(final) // validate barcode
{
self.finalText += final
print(final)
self.captureSession.stopRunning()
DispatchQueue.main.async {
self.previewLayer.removeFromSuperlayer()
}
break
}
}
DispatchQueue.main.async {
self.numberLabel.text = self.finalText
}
}
self.processing = false
}
// MARK: Buttons
// This is a live camera view that will start a capture session
#IBAction func takePhoto(_ sender: Any) {
self.view.layer.addSublayer(self.previewLayer)
self.captureSession.startRunning()
}
#IBAction func choosePhoto(_ sender: Any) {
presentPhotoPicker(type: .photoLibrary)
}
fileprivate func presentPhotoPicker(type: UIImagePickerController.SourceType) {
let controller = UIImagePickerController()
controller.sourceType = type
controller.delegate = self
present(controller, animated: true, completion: nil)
}
}
extension ViewController: UIImagePickerControllerDelegate, UINavigationControllerDelegate {
func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
dismiss(animated: true, completion: nil)
}
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
dismiss(animated: true, completion: nil)
image = info[.originalImage] as? UIImage
processImage()
}
}
extension String {
var containsISBNnums: Bool {
guard self.count > 0 else { return false }
let nums: Set<Character> = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "X"]
return Set(self).isSubset(of: nums)
}
}
I have set up my custom camera, and already coded the video preview. I have a button on the screen that i want to use to capture video when it is pressed. I don't know how to go about it. Everything so far is set up and working fine.
In the start recording button function, i just need the code necessary to capture the video and save it. Thank you
class CameraViewController: UIViewController, AVAudioRecorderDelegate {
#IBOutlet var recordOutlet: UIButton!
#IBOutlet var recordLabel: UILabel!
#IBOutlet var cameraView: UIView!
var tempImage: UIImageView?
var captureSession: AVCaptureSession?
var stillImageOutput: AVCapturePhotoOutput?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var currentCaptureDevice: AVCaptureDevice?
var usingFrontCamera = false
/* This is the function i want to use to start
recording a video */
#IBAction func recordingButton(_ sender: Any) {
}
it seems as though Apple prefers developers to use the default camera for capturing video. If you are ok with that, I found a tutorial online with code to help. https://www.raywenderlich.com/94404/play-record-merge-videos-ios-swift.
You can scroll down to the "recording video" section and it will walk you through it with code.
Here's some of what it says: "
import MobileCoreServices
You’ll also need to adopt the same protocols as PlayVideoViewController, by adding the following to the end of the file:
`extension RecordVideoViewController: UIImagePickerControllerDelegate {
}
extension RecordVideoViewController: UINavigationControllerDelegate {
}
Add the following code to RecordVideoViewController:
`func startCameraFromViewController(viewController: UIViewController, withDelegate delegate: protocol<UIImagePickerControllerDelegate, UINavigationControllerDelegate>) -> Bool {
if UIImagePickerController.isSourceTypeAvailable(.Camera) == false {
return false
}
var cameraController = UIImagePickerController()
cameraController.sourceType = .Camera
cameraController.mediaTypes = [kUTTypeMovie as NSString as String]
cameraController.allowsEditing = false
cameraController.delegate = delegate
presentViewController(cameraController, animated: true, completion: nil)
return true
}`
This method follows the same logic is in PlayVideoViewController, but it accesses the .Camera instead to record video.
Now add the following to record(_:):
startCameraFromViewController(self, withDelegate: self)
You are again in familiar territory. The code simply calls startCameraControllerFromViewController(_:usingDelegate:) when you tap the “Record Video” button.
Build and run to see what you’ve got so far.
Go to the Record screen and press the “Record Video” button. Instead of the Photo Gallery, the camera UI opens. Start recording a video by tapping the red record button at the bottom of the screen, and tap it again when you’re done recording."
Cheers,
Theo
Here is worked code, you need to deal correct with optional values and error handling in real project, but you can use this next code as example:
//
// ViewController.swift
// CustomCamera
//
// Created by Taras Chernyshenko on 6/27/17.
// Copyright © 2017 Taras Chernyshenko. All rights reserved.
//
import UIKit
import AVFoundation
import AssetsLibrary
class CameraViewController: UIViewController,
AVCaptureAudioDataOutputSampleBufferDelegate,
AVCaptureVideoDataOutputSampleBufferDelegate {
#IBOutlet var recordOutlet: UIButton!
#IBOutlet var recordLabel: UILabel!
#IBOutlet var cameraView: UIView!
var tempImage: UIImageView?
private var session: AVCaptureSession = AVCaptureSession()
private var deviceInput: AVCaptureDeviceInput?
private var previewLayer: AVCaptureVideoPreviewLayer?
private var videoOutput: AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()
private var audioOutput: AVCaptureAudioDataOutput = AVCaptureAudioDataOutput()
private var videoDevice: AVCaptureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
private var audioConnection: AVCaptureConnection?
private var videoConnection: AVCaptureConnection?
private var assetWriter: AVAssetWriter?
private var audioInput: AVAssetWriterInput?
private var videoInput: AVAssetWriterInput?
private var fileManager: FileManager = FileManager()
private var recordingURL: URL?
private var isCameraRecording: Bool = false
private var isRecordingSessionStarted: Bool = false
private var recordingQueue = DispatchQueue(label: "recording.queue")
var captureSession: AVCaptureSession?
var stillImageOutput: AVCapturePhotoOutput?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var currentCaptureDevice: AVCaptureDevice?
var usingFrontCamera = false
/* This is the function i want to use to start
recording a video */
#IBAction func recordingButton(_ sender: Any) {
if self.isCameraRecording {
self.stopRecording()
} else {
self.startRecording()
}
self.isCameraRecording = !self.isCameraRecording
}
override func viewDidLoad() {
super.viewDidLoad()
self.setup()
}
private func setup() {
self.session.sessionPreset = AVCaptureSessionPresetHigh
self.recordingURL = URL(fileURLWithPath: "\(NSTemporaryDirectory() as String)/file.mov")
if self.fileManager.isDeletableFile(atPath: self.recordingURL!.path) {
_ = try? self.fileManager.removeItem(atPath: self.recordingURL!.path)
}
self.assetWriter = try? AVAssetWriter(outputURL: self.recordingURL!,
fileType: AVFileTypeQuickTimeMovie)
let audioSettings = [
AVFormatIDKey : kAudioFormatAppleIMA4,
AVNumberOfChannelsKey : 1,
AVSampleRateKey : 16000.0
] as [String : Any]
let videoSettings = [
AVVideoCodecKey : AVVideoCodecH264,
AVVideoWidthKey : UIScreen.main.bounds.size.width,
AVVideoHeightKey : UIScreen.main.bounds.size.height
] as [String : Any]
self.videoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo,
outputSettings: videoSettings)
self.audioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio,
outputSettings: audioSettings)
self.videoInput?.expectsMediaDataInRealTime = true
self.audioInput?.expectsMediaDataInRealTime = true
if self.assetWriter!.canAdd(self.videoInput!) {
self.assetWriter?.add(self.videoInput!)
}
if self.assetWriter!.canAdd(self.audioInput!) {
self.assetWriter?.add(self.audioInput!)
}
self.deviceInput = try? AVCaptureDeviceInput(device: self.videoDevice)
if self.session.canAddInput(self.deviceInput) {
self.session.addInput(self.deviceInput)
}
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
self.previewLayer?.videoGravity = AVLayerVideoGravityResizeAspect
let rootLayer = self.view.layer
rootLayer.masksToBounds = true
self.previewLayer?.frame = CGRect(x: 0, y: 0, width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.height)
rootLayer.insertSublayer(self.previewLayer!, at: 0)
self.session.startRunning()
DispatchQueue.main.async {
self.session.beginConfiguration()
if self.session.canAddOutput(self.videoOutput) {
self.session.addOutput(self.videoOutput)
}
self.videoConnection = self.videoOutput.connection(withMediaType: AVMediaTypeVideo)
if self.videoConnection?.isVideoStabilizationSupported == true {
self.videoConnection?.preferredVideoStabilizationMode = .auto
}
self.session.commitConfiguration()
let audioDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
let audioIn = try? AVCaptureDeviceInput(device: audioDevice)
if self.session.canAddInput(audioIn) {
self.session.addInput(audioIn)
}
if self.session.canAddOutput(self.audioOutput) {
self.session.addOutput(self.audioOutput)
}
self.audioConnection = self.audioOutput.connection(withMediaType: AVMediaTypeAudio)
}
}
private func startRecording() {
if self.assetWriter?.startWriting() != true {
print("error: \(self.assetWriter?.error.debugDescription ?? "")")
}
self.videoOutput.setSampleBufferDelegate(self, queue: self.recordingQueue)
self.audioOutput.setSampleBufferDelegate(self, queue: self.recordingQueue)
}
private func stopRecording() {
self.videoOutput.setSampleBufferDelegate(nil, queue: nil)
self.audioOutput.setSampleBufferDelegate(nil, queue: nil)
self.assetWriter?.finishWriting {
print("saved")
}
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer
sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
if !self.isRecordingSessionStarted {
let presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
self.assetWriter?.startSession(atSourceTime: presentationTime)
self.isRecordingSessionStarted = true
}
let description = CMSampleBufferGetFormatDescription(sampleBuffer)!
if CMFormatDescriptionGetMediaType(description) == kCMMediaType_Audio {
if self.audioInput!.isReadyForMoreMediaData {
print("appendSampleBuffer audio");
self.audioInput?.append(sampleBuffer)
}
} else {
if self.videoInput!.isReadyForMoreMediaData {
print("appendSampleBuffer video");
if !self.videoInput!.append(sampleBuffer) {
print("Error writing video buffer");
}
}
}
}
}
So I have this xCode project I am working on to make an iPhone app in the Swift language. I am trying to store two strings an a binary data image in an object in core data that I can retrieve in this viewcontroller. The table view that I use to transition to this view is showing the correct item associations but when I load this view the attributes don't load in. Why is the object not being uploaded and displayed to the viewcontroller ?
import UIKit
import CoreData
class IndividualMainViewController: UIViewController, NSFetchedResultsControllerDelegate, UIImagePickerControllerDelegate, UINavigationControllerDelegate
{
#IBOutlet weak var backCancelButton: UIBarButtonItem!
#IBOutlet weak var saveEditButton: UIBarButtonItem!
#IBOutlet weak var deviceImageButton: UIButton!
#IBOutlet weak var cameraImageButton: UIButton!
var item: Item? = nil
let moc = (UIApplication.sharedApplication().delegate as! AppDelegate).managedObjectContext
let frc: NSFetchedResultsController = NSFetchedResultsController()
#IBOutlet weak var imageHolder: UIImageView!
#IBOutlet weak var horseName: UITextField!
#IBOutlet weak var officialName: UITextField!
override func viewDidLoad()
{
super.viewDidLoad()
if(item == nil)
{
print("item returned nil")
print(item)
}
else
{
print("Object did load")
self.horseName.text = item?.name
self.officialName.text = item?.offName
self.imageHolder.image = UIImage(data: (item?.image)!)
saveEditButton.title = "Edit"
backCancelButton.title = "To Barn"
self.deviceImageButton.hidden = true
self.cameraImageButton.hidden = true
}
let tap: UIGestureRecognizer = UITapGestureRecognizer(target: self, action: "dismissEditor")
view.addGestureRecognizer(tap)
}
override func didReceiveMemoryWarning()
{
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func dismissEditor()
{
view.endEditing(true)
}
#IBAction func addImageFromDevice(sender: AnyObject)
{
let pickerController = UIImagePickerController()
pickerController.delegate = self
pickerController.sourceType = UIImagePickerControllerSourceType.PhotoLibrary
pickerController.allowsEditing = true
self.presentViewController(pickerController, animated: true, completion: nil)
}
#IBAction func addImageFromCamera(sender: AnyObject)
{
let pickerController = UIImagePickerController()
pickerController.delegate = self
pickerController.sourceType = UIImagePickerControllerSourceType.Camera
pickerController.allowsEditing = true
self.presentViewController(pickerController, animated: true, completion: nil)
}
func imagePickerController(picker: UIImagePickerController, didFinishPickingImage image: UIImage, editingInfo: [String : AnyObject]?)
{
self.dismissViewControllerAnimated(true, completion: nil)
self.imageHolder.image = image
}
#IBAction func saveEditTapped(sender: AnyObject)
{
if(saveEditButton.title == "Save")
{
if item != nil
{
editItem()
print("Item edit saved")
saveEditButton.title = "Edit"
backCancelButton.title = "To Barn"
}
else
{
createNewItem()
print("New item created")
performSegueWithIdentifier("back", sender: nil)
}
}
else
{
saveEditButton.title = "Save"
backCancelButton.title = "Cancel"
print("Not saved")
}
}
func createNewItem()
{
let entityDescription = NSEntityDescription.entityForName("Item", inManagedObjectContext: moc)
let item = Item(entity: entityDescription!, insertIntoManagedObjectContext: moc)
item.name = horseName.text
item.offName = officialName.text
item.image = UIImagePNGRepresentation(imageHolder.image!)
do
{
try self.moc.save()
}
catch
{
print("Failed ot create new object")
return
}
}
func editItem()
{
item?.name = horseName.text
item?.offName = officialName.text
item!.image = UIImagePNGRepresentation(imageHolder.image!)
do
{
try self.moc.save()
}
catch
{
print("Failed to save item")
return
}
}
}
Initializing the NSFetchedResultsController just with the generic initializer is not sufficient.
You have to tell the controller what to do, which managed object context to use, to set the delegate and to perform an initial fetch.
The usual way in Swift is to declare the controller as lazy computed variable.
lazy var fetchedResultsController: NSFetchedResultsController = {
let fetchRequest = NSFetchRequest()
let entity = NSEntityDescription.entityForName(self.entity, inManagedObjectContext: self.managedObjectContext)
fetchRequest.entity = entity
fetchRequest.predicate = self.fetchPredicate
// Set the batch size to a suitable number.
fetchRequest.fetchBatchSize = 20
// Edit the sort key as appropriate.
fetchRequest.sortDescriptors = [self.sortDescriptor]
// Edit the section name key path and cache name if appropriate.
// nil for section name key path means "no sections".
let aFetchedResultsController = NSFetchedResultsController(fetchRequest: fetchRequest, managedObjectContext: self.managedObjectContext!, sectionNameKeyPath: nil, cacheName: nil)
aFetchedResultsController.delegate = self
do {
try aFetchedResultsController.performFetch()
} catch let error as NSError {
print(error)
}
return aFetchedResultsController
}()
Replace all parameters represented by instance variables (starting with self.) with your values