Cannot able to change PKToolpicker tools - ios

I need to Draw over an image using PencilKit. the problem I face is I can't able to change the PencilKit tool picker tools (Eg pen to pencil). I am able to use only pen mode in the PKInkTool
Posted my code below
class ViewController: UIViewController, PKToolPickerObserver {
private var canvasView :PKCanvasView!
#IBOutlet weak var imgView: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewDidAppear(_ animated: Bool) {
setup()
}
func setup() {
let navigationItem = navigationController?.navigationBar.topItem
let pencilButton = UIButton(frame: CGRect(x: 0, y: 0, width: 50, height: 50))
pencilButton.addTarget(self, action: #selector(showPKToolKit), for: .touchUpInside)
pencilButton.setImage(UIImage(systemName: "pencil.tip"), for: .normal)
pencilButton.tintColor = .tintColor
navigationItem?.rightBarButtonItem = UIBarButtonItem(customView: pencilButton)
let fileManager = FileManager.default
guard let documentsURL = fileManager.urls(for: .documentDirectory, in: .userDomainMask).first else { return }
let urls: [URL]
do {
urls = try fileManager.contentsOfDirectory(at: documentsURL, includingPropertiesForKeys: nil)
} catch {
fatalError("Couldn't load files from documents directory")
}
do{
let data = try Data(contentsOf: urls[0])
print(data)
imgView.image = UIImage(data:data)
}catch{
print("failed to load image")
}
canvasView = PKCanvasView.init(frame: self.imgView.frame)
self.canvasView.isOpaque = false
self.view.addSubview(self.canvasView)
}
#objc func showPKToolKit(){
let toolPicker = PKToolPicker.init()
toolPicker.setVisible(true, forFirstResponder: canvasView)
toolPicker.addObserver(canvasView)
canvasView.becomeFirstResponder()
self.canvasView.drawingPolicy = .anyInput
}
}
so this was my code.
output video link
anybody help to resolve the issue

I have updated the showPktoolkit method
#objc func showPKToolKit(){
if #available(iOS 14.0, *) {
toolPicker = PKToolPicker()
} else {
let window = parent?.view.window
toolPicker = PKToolPicker.shared(for: window!)
}
toolPicker.setVisible(true, forFirstResponder: canvasView)
toolPicker.addObserver(canvasView)
toolPicker.addObserver(self)
canvasView.becomeFirstResponder()
self.canvasView.drawingPolicy = .anyInput
}
Now it is working for me
video link

Related

Unable to save pdf after signature

I am able to sign a pdf, But when I tried to saved it, Its not saving the updated pdf file.
Code
override func viewDidAppear(_ animated: Bool) {
pdfContainerView.usePageViewController(true, withViewOptions: nil)
guard let signatureImage = signatureImage, let page = pdfContainerView.currentPage else { return }
let pageBounds = page.bounds(for: .cropBox)
let imageBounds = CGRect(x: pageBounds.midX, y: pageBounds.midY, width: 200, height: 100)
let imageStamp = ImageStampAnnotation(with: signatureImage, forBounds: imageBounds, withProperties: nil)
page.addAnnotation(imageStamp)
btnSaveSign.isHidden = false
}
func setupPdfView(url:String) {
if let documentURL = URL(string: url),
let data = try? Data(contentsOf: documentURL),
let document = PDFDocument(data: data) {
// Set document to the view, center it, and set background color
//pdfContainerView.displayMode = .singlePageContinuous
pdfContainerView.document = document
pdfContainerView.autoScales = true
pdfContainerView.displayDirection = .horizontal
pdfContainerView.backgroundColor = UIColor.white
btnNewSign.isHidden = false
lblPlaceholder.isHidden = true
let panAnnotationGesture = UIPanGestureRecognizer(target: self, action: #selector(didPanAnnotation(sender:)))
pdfContainerView.addGestureRecognizer(panAnnotationGesture)
}
}
#objc func didPanAnnotation(sender: UIPanGestureRecognizer) {
let touchLocation = sender.location(in: pdfContainerView)
guard let page = pdfContainerView.page(for: touchLocation, nearest: true)
else {
return
}
let locationOnPage = pdfContainerView.convert(touchLocation, to: page)
switch sender.state {
case .began:
guard let annotation = page.annotation(at: locationOnPage) else {
return
}
if annotation.isKind(of: ImageStampAnnotation.self) {
currentlySelectedAnnotation = annotation
}
case .changed:
guard let annotation = currentlySelectedAnnotation else {
return
}
let initialBounds = annotation.bounds
// Set the center of the annotation to the spot of our finger
annotation.bounds = CGRect(x: locationOnPage.x - (initialBounds.width / 2), y: locationOnPage.y - (initialBounds.height / 2), width: initialBounds.width, height: initialBounds.height)
//print("move to \(locationOnPage)")
case .ended, .cancelled, .failed:
currentlySelectedAnnotation = nil
default:
break
}
}
#objc func clickButton(){
let importMenu = UIDocumentPickerViewController(documentTypes: [String(kUTTypePDF)], in: .import)
importMenu.delegate = self
importMenu.modalPresentationStyle = .formSheet
self.present(importMenu, animated: true, completion: nil)
}
public func documentPicker(_ controller: UIDocumentPickerViewController, didPickDocumentsAt urls: [URL]) {
guard let myURL = urls.first else {
return
}
print("import result : \(myURL)")
self.pdfURLToSave = "\(myURL)"
let pdfView = PDFView(frame: view.frame)
title = "My PDF Viewer"
setupPdfView(url:"\(myURL)")
}
func documentInteractionControllerViewControllerForPreview(_ controller: UIDocumentInteractionController) -> UIViewController {
return self//or use return self.navigationController for fetching app navigation bar colour
}
public func documentMenu(_ documentMenu:UIDocumentPickerViewController, didPickDocumentPicker documentPicker: UIDocumentPickerViewController) {
documentPicker.delegate = self
present(documentPicker, animated: true, completion: nil)
}
Code snippet for save file-
#IBAction func btnSaveAction(_ sender: Any) {
if let documentURL = URL(string: self.pdfURLToSave),
let data = try? Data(contentsOf: documentURL),
let document = PDFDocument(data: data) {
if let data = document.dataRepresentation(){
let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
let documentsDirectory = paths[0] // Get documents folder
let folderPathUrl = URL(fileURLWithPath: documentsDirectory).appendingPathComponent("SignApp.pdf")
if FileManager.default.fileExists(atPath: folderPathUrl.path){
try? FileManager.default.removeItem(at: folderPathUrl)
}
pdfContainerView.document?.write(toFile: "\(folderPathUrl)")
}
}
}
ImageStampAnnotation Class
class ImageStampAnnotation: PDFAnnotation {
var image: UIImage!
// A custom init that sets the type to Stamp on default and assigns our Image variable
init(with image: UIImage!, forBounds bounds: CGRect, withProperties properties: [AnyHashable : Any]?) {
super.init(bounds: bounds, forType: PDFAnnotationSubtype.stamp, withProperties: properties)
self.image = image
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func draw(with box: PDFDisplayBox, in context: CGContext) {
// Get the CGImage of our image
guard let cgImage = self.image.cgImage else { return }
// Draw our CGImage in the context of our PDFAnnotation bounds
context.draw(cgImage, in: self.bounds)
}
}
Error-
CGPDFContextCreate: failed to create PDF context delegate..
App screen shots-
It's not possible for me to edit your code directly to show a proper solution, since your code isn't complete, but here is a short coordinator class, including your pan annotation code that saves an updated file each time a pan gesture is completed:
class Coordinator : NSObject {
var pdfView : PDFView?
var currentlySelectedAnnotation : PDFAnnotation?
func getDocumentsDirectory() -> URL {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentsDirectory = paths[0]
return documentsDirectory
}
func save() {
guard let pdfView = pdfView else {
return
}
let url = getDocumentsDirectory().appendingPathComponent("SavedPDF.pdf")
pdfView.document?.write(to: url)
print("Wrote pdf to: ", url)
}
#objc func didPanAnnotation(sender: UIPanGestureRecognizer) {
guard let pdfContainerView = pdfView else {
fatalError()
}
let touchLocation = sender.location(in: pdfContainerView)
guard let page = pdfContainerView.page(for: touchLocation, nearest: true)
else {
return
}
let locationOnPage = pdfContainerView.convert(touchLocation, to: page)
switch sender.state {
case .began:
guard let annotation = page.annotation(at: locationOnPage) else {
return
}
print("Set")
if annotation.isKind(of: ImageStampAnnotation.self) {
currentlySelectedAnnotation = annotation
}
case .changed:
guard let annotation = currentlySelectedAnnotation else {
return
}
let initialBounds = annotation.bounds
// Set the center of the annotation to the spot of our finger
annotation.bounds = CGRect(x: locationOnPage.x - (initialBounds.width / 2), y: locationOnPage.y - (initialBounds.height / 2), width: initialBounds.width, height: initialBounds.height)
//print("move to \(locationOnPage)")
case .ended, .cancelled, .failed:
currentlySelectedAnnotation = nil
save()
default:
break
}
}
}
For context, if it helps, this is how I set up the view controller and the PDFView initially:
let vc = UIViewController()
let pdfContainerView = PDFView()
let fileUrl = Bundle.main.url(forResource: "High Noon - SCORE", withExtension: "pdf")!
let pdfDocument = PDFDocument(url: fileUrl)
pdfContainerView.document = pdfDocument
pdfContainerView.autoScales = true
vc.view.addSubview(pdfContainerView)
let panAnnotationGesture = UIPanGestureRecognizer(target: context.coordinator, action: #selector(context.coordinator.didPanAnnotation(sender:)))
pdfContainerView.addGestureRecognizer(panAnnotationGesture)
let page = pdfContainerView.currentPage!
let pageBounds = page.bounds(for: .cropBox)
let imageBounds = CGRect(x: pageBounds.midX, y: pageBounds.midY, width: 200, height: 100)
let imageStamp = ImageStampAnnotation(with: UIImage(systemName: "pencil"), forBounds: imageBounds, withProperties: nil)
page.addAnnotation(imageStamp)

Is WKAudioFileAsset an audio file and can it be sent to a server?

I tried to look for a solution but could not find anything. I want to know whether a WKAudioFileAsset is an audio file?
Can it be considered as a .wav file?
Is it possible to send a WKAudioFileAsset to a php server? Can you post some sample code how to the send a WKAudioFileAsset/AVAsset to the server?
This is the code I have until now. I need the audio file from the document directory.
import WatchKit
import Foundation
import AVFoundation
class InterfaceController: WKInterfaceController, AVAudioRecorderDelegate{
#IBOutlet weak var btn: WKInterfaceButton!
var recordingSession : AVAudioSession!
var audioRecorder : AVAudioRecorder!
var settings = [String : Any]()
override func awake(withContext context: Any?) {
super.awake(withContext: context)
recordingSession = AVAudioSession.sharedInstance()
do{
try recordingSession.setCategory(AVAudioSession.Category.playAndRecord)
try recordingSession.setActive(true)
recordingSession.requestRecordPermission(){[unowned self] allowed in
DispatchQueue.main.async {
if allowed{
print("Allow")
} else{
print("Don't Allow")
}
}
}
}
catch{
print("failed to record!")
}
// Configure interface objects here.
// Audio Settings
settings = [
AVFormatIDKey:Int(kAudioFormatLinearPCM),
AVSampleRateKey:44100.0,
AVNumberOfChannelsKey:1,
AVLinearPCMBitDepthKey:8,
AVLinearPCMIsFloatKey:false,
AVLinearPCMIsBigEndianKey:false,
AVEncoderAudioQualityKey:AVAudioQuality.max.rawValue
]
}
override func willActivate() {
// This method is called when watch view controller is about to be visible to user
super.willActivate()
print("Test")
}
override func didDeactivate() {
// This method is called when watch view controller is no longer visible
super.didDeactivate()
}
func directoryURL() -> URL? {
let fileManager = FileManager.default
let urls = fileManager.urls(for: .documentDirectory, in: .userDomainMask)
let documentDirectory = urls[0] as URL
let soundUrl = documentDirectory.appendingPathComponent("sound.wav")
// var soundUrlStr = soundUrl?.path
//print(fileManager.fileExists(atPath: soundUrlStr))
let filePath = (soundUrl).path
print(filePath)
print("URL")
print(soundUrl)
return soundUrl as URL?
}
func startRecording(){
let audioSession = AVAudioSession.sharedInstance()
do{
audioRecorder = try AVAudioRecorder(url: self.directoryURL()! as URL,
settings: settings)
audioRecorder.delegate = self
audioRecorder.prepareToRecord()
audioRecorder.record(forDuration: 5.0)
}
catch {
finishRecording(success: false)
}
do {
try audioSession.setActive(true)
audioRecorder.record()
} catch {
}
}
func finishRecording(success: Bool) {
audioRecorder.stop()
if success {
print(success)
} else {
audioRecorder = nil
print("Somthing Wrong.")
}
}
#IBAction func recordAudio() {
let path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as String
let url = URL(fileURLWithPath: path)
let pathPart = url.appendingPathComponent("sound.wav")
let filePath = pathPart.path
let fileManager = FileManager.default
if fileManager.fileExists(atPath: filePath){
print("File exists!")
let audioAsset = WKAudioFileAsset(url:pathPart)
// let playerItem = WKAudioFilePlayerItem(asset:audioAsset)
print("Audio File")
print(audioAsset)
print("WAV file")
print(NSData(contentsOfFile: filePath) as Any)
}else{
print("File does not exist")
}
if audioRecorder == nil {
print("Pressed")
self.btn.setTitle("Stop")
self.btn.setBackgroundColor(UIColor(red: 119.0/255.0, green: 119.0/255.0, blue: 119.0/255.0, alpha: 1.0))
self.startRecording()
} else {
self.btn.setTitle("Record")
print("Pressed2")
self.btn.setBackgroundColor(UIColor(red: 221.0/255.0, green: 27.0/255.0, blue: 50.0/255.0, alpha: 1.0))
self.finishRecording(success: true)
}
}
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
if !flag {
finishRecording(success: false)
}
}
}

Getting the audio file from the document directory

I have looked through the whole internet to find a way to obtain the whole recorded audio file from the document directory. Is there any way to do so? Do you normally do it via an AVAsset/WKAudioFileAsset?
I tried to print the Asset and I got something like this here:
Are these the .wav files??
I need the recorded .wav files to send them to the server.
I hope someone can help me out. Thank you very much.
Here is the whole code: http://s000.tinyupload.com/?file_id=38128676605242153939
Here is my code:
import WatchKit
import Foundation
import AVFoundation
class InterfaceController: WKInterfaceController, AVAudioRecorderDelegate{
#IBOutlet weak var btn: WKInterfaceButton!
var recordingSession : AVAudioSession!
var audioRecorder : AVAudioRecorder!
var settings = [String : Any]()
override func awake(withContext context: Any?) {
super.awake(withContext: context)
recordingSession = AVAudioSession.sharedInstance()
do{
try recordingSession.setCategory(AVAudioSession.Category.playAndRecord)
try recordingSession.setActive(true)
recordingSession.requestRecordPermission(){[unowned self] allowed in
DispatchQueue.main.async {
if allowed{
print("Allow")
} else{
print("Don't Allow")
}
}
}
}
catch{
print("failed to record!")
}
// Configure interface objects here.
// Audio Settings
settings = [
AVFormatIDKey:Int(kAudioFormatLinearPCM),
AVSampleRateKey:44100.0,
AVNumberOfChannelsKey:1,
AVLinearPCMBitDepthKey:8,
AVLinearPCMIsFloatKey:false,
AVLinearPCMIsBigEndianKey:false,
AVEncoderAudioQualityKey:AVAudioQuality.max.rawValue
]
}
override func willActivate() {
// This method is called when watch view controller is about to be visible to user
super.willActivate()
print("Test")
}
override func didDeactivate() {
// This method is called when watch view controller is no longer visible
super.didDeactivate()
}
func directoryURL() -> URL? {
let fileManager = FileManager.default
let urls = fileManager.urls(for: .documentDirectory, in: .userDomainMask)
let documentDirectory = urls[0] as URL
let soundUrl = documentDirectory.appendingPathComponent("sound.wav")
// var soundUrlStr = soundUrl?.path
//print(fileManager.fileExists(atPath: soundUrlStr))
let filePath = (soundUrl).path
print(filePath)
print("URL")
print(soundUrl)
return soundUrl as URL?
}
func startRecording(){
let audioSession = AVAudioSession.sharedInstance()
do{
audioRecorder = try AVAudioRecorder(url: self.directoryURL()! as URL,
settings: settings)
audioRecorder.delegate = self
audioRecorder.prepareToRecord()
audioRecorder.record(forDuration: 5.0)
}
catch {
finishRecording(success: false)
}
do {
try audioSession.setActive(true)
audioRecorder.record()
} catch {
}
}
func finishRecording(success: Bool) {
audioRecorder.stop()
if success {
print(success)
} else {
audioRecorder = nil
print("Somthing Wrong.")
}
}
#IBAction func recordAudio() {
let path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as String
let url = URL(fileURLWithPath: path)
let pathPart = url.appendingPathComponent("sound.wav")
let filePath = pathPart.path
let fileManager = FileManager.default
if fileManager.fileExists(atPath: filePath){
print("File exists!")
let audioAsset = WKAudioFileAsset(url:pathPart)
let playerItem = WKAudioFilePlayerItem(asset:audioAsset)
print("Audio File")
print(audioAsset)
print("WAV file")
print(playerItem)
}else{
print("File does not exist")
}
if audioRecorder == nil {
print("Pressed")
self.btn.setTitle("Stop")
self.btn.setBackgroundColor(UIColor(red: 119.0/255.0, green: 119.0/255.0, blue: 119.0/255.0, alpha: 1.0))
self.startRecording()
} else {
self.btn.setTitle("Record")
print("Pressed2")
self.btn.setBackgroundColor(UIColor(red: 221.0/255.0, green: 27.0/255.0, blue: 50.0/255.0, alpha: 1.0))
self.finishRecording(success: true)
}
}
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
if !flag {
finishRecording(success: false)
}
}
}

Creating zoom on UIImageView in swift

I am currently loading images from my server. The images are plenty depending on the number of images it return. I have been able to display these images successfully and I can scroll down to view all images. Now my problem is I want a way which I can click on a specific image i will be able to zoom it. I have shared my code below:
//Declaration of variables
var albumID: String?
var imagePath: String?
var path: String?
var getClickImage = UIImageView()
var zoomscrollV = UIScrollView()
var imageArray = [String]()
//view did load
override func viewDidLoad() {
super.viewDidLoad()
setUpViewsAlbumPhotos()
zoomscrollV.delegate = self
if !CheckInternet.Connection(){
showAlert(title: "No Internet", message: "Please connect your device to an internet connection")
}else{
fetchPhotos(albumID: albumID)
}
}
//viewDidAppear
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
zoomscrollV.isHidden = true
zoomscrollV.frame = CGRect(x:0, y:0, width:self.view.frame.width, height:self.view.frame.height - 50)
zoomscrollV.minimumZoomScale=1
zoomscrollV.maximumZoomScale=10
zoomscrollV.bounces=false
self.view.addSubview(zoomscrollV)
getClickImage=UIImageView()
getClickImage.frame = CGRect(x:0, y:0, width:zoomscrollV.frame.width, height:zoomscrollV.frame.height)
getClickImage.backgroundColor = .black
getClickImage.contentMode = .scaleAspectFit
zoomscrollV.addSubview(getClickImage)
}
//This code makes an async call to download images and details from the server
let async_call = URL(string: "\(String.api_albumPhotos)\(albumID ?? "")")
let request = NSMutableURLRequest(url: async_call!)
request.httpMethod = "GET"
request.addValue("application/json", forHTTPHeaderField: "Content-Type")
request.addValue("application/json", forHTTPHeaderField: "Accept")
let task = URLSession.shared.dataTask(with: request as URLRequest){
data, response, error in
if error != nil {
print("error is:: \(error!.localizedDescription)")
DispatchQueue.main.async {
self.showAlert(title: "Error", message: "Sorry try again later")
self.stopActivityLoader()
}
return
}
do {
let myJSON = try JSONSerialization.jsonObject(with: data!, options: .mutableContainers) as? NSDictionary
if let parseJSON = myJSON {
var responseCode: String!
var message: NSArray!
responseCode = parseJSON["responseCode"] as! String?
if responseCode == "200" {
DispatchQueue.main.async {
self.stopActivityLoader()
message = parseJSON["message"] as? NSArray
self.path = parseJSON["path"] as? String
if let message = message {
let totalMessage = message.count
let viewHeight = self.view.frame.height
var scrollHeight = 0
var contentViewTopConstraint: CGFloat = 20
// self.preference.set(parseJSON, forKey: UserDefaultKeys.albums.rawValue)
for obj in message{
if let dict = obj as? NSDictionary {
self.imagePath = dict.value(forKey: "path") as? String
let imageID = dict.value(forKey: "id") as? String
let albumThumbnail = photos()
self.scrollView.addSubview(albumThumbnail)
albumThumbnail.translatesAutoresizingMaskIntoConstraints = false
albumThumbnail.leadingAnchor.constraint(equalTo: self.contentView.leadingAnchor).isActive = true
albumThumbnail.topAnchor.constraint(equalTo: self.scrollView.topAnchor, constant: contentViewTopConstraint).isActive = true
albumThumbnail.trailingAnchor.constraint(equalTo: self.contentView.trailingAnchor).isActive = true
albumThumbnail.heightAnchor.constraint(equalToConstant: 150).isActive = true
albumThumbnail.isUserInteractionEnabled = true
albumThumbnail.contentMode = .scaleAspectFit
let touchRec = UITapGestureRecognizer(target: self, action: #selector(self.myImageTapped(_:)))
albumThumbnail.addGestureRecognizer(touchRec)
if let path = self.path{
if let imagePath = self.imagePath{
let strippedPath = path.replacingOccurrences(of: "\\", with: "")
let strippedImagePath = imagePath.replacingOccurrences(of: "\\", with: "")
print("\(strippedPath)\(strippedImagePath)")
albumThumbnail.sd_setImage(with: URL(string: "\(strippedPath)\(strippedImagePath)"), placeholderImage: UIImage(named: "default_profile"), options: [.continueInBackground, .progressiveDownload])
if let wrapped = self.path {
self.imageArray.append("\(strippedPath)\(strippedImagePath)")
// print(self.imageArray.append(wrapped))
}
}
}
contentViewTopConstraint = contentViewTopConstraint + 170
}
}
scrollHeight = totalMessage * 170
if totalMessage <= 1 {
self.scrollView.contentSize.height = viewHeight + 20
}else{
self.scrollView.contentSize.height = CGFloat(scrollHeight)
}
}
}
}else{
//Show alert
DispatchQueue.main.async {
self.showAlert(title: "Error", message: "Sorry could not update album. Try again")
self.stopActivityLoader()
}
}
}
}catch{
print("you:: \(error.localizedDescription)")
//Show alert
DispatchQueue.main.async {
self.showAlert(title: "Error", message: "Sorry could not update album. Try again")
self.stopActivityLoader()
}
}
}
task.resume()
}
#objc func myImageTapped(_ sender: UITapGestureRecognizer) {
zoomscrollV.isHidden = false
let myImage = imageArray[(sender.view?.tag)!]
print("myImage \(myImage)")
// RESPECTIVE IMAGE
getClickImage.image = UIImage(named: myImage)
let closeButton: UIButton = UIButton(type: .custom)
closeButton.frame = CGRect(x:40.0, y:self.view.frame.height - 50, width:self.view.frame.width - 80, height:50.0)
closeButton.addTarget(self, action: #selector(self.closeZoom), for: .touchUpInside)
closeButton.setTitle("CLOSE ZOOM", for: .normal)
closeButton.setTitleColor(UIColor.white, for: .normal)
// CLOSE BUTTON
self.view.addSubview(closeButton)
}``
#objc func closeZoom(sender: AnyObject) {
zoomscrollV.isHidden = true
zoomscrollV.setZoomScale(1.0, animated: false)
sender.removeFromSuperview()
}
//SCROLLVIEW DELEGATE
func viewForZooming(in scrollView: UIScrollView) -> UIView? {
return getClickImage
}
========================== Edit Jan 24 ============================
CONVERT URL TO IMAGE
#objc func myImageTapped(_ sender: UITapGestureRecognizer) {
zoomscrollV.isHidden = false
let myImageURL = imageArray[(sender.view?.tag)!]
if let url = URL( string: myImageURL)
{
DispatchQueue.global().async {
if let data = try? Data( contentsOf:url){
DispatchQueue.main.async {
let myImage = UIImage( data:data)
print("myImage \(myImage)")
// RESPECTIVE IMAGE
getClickImage.image = UIImage(named: myImage)
let closeButton: UIButton = UIButton(type: .custom)
closeButton.frame = CGRect(x:40.0, y:self.view.frame.height - 50, width:self.view.frame.width - 80, height:50.0)
closeButton.addTarget(self, action: #selector(self.closeZoom), for: .touchUpInside)
closeButton.setTitle("CLOSE ZOOM", for: .normal)
closeButton.setTitleColor(UIColor.white, for: .normal)
// CLOSE BUTTON
self.view.addSubview(closeButton)
}
}
}
}
===============================================================
On image click, U have create scrollview and add respective image as subview, then zoom will work out.
var getClickImage = UIImageView()
var zoomscrollV = UIScrollView()
override func viewDidLoad() {
super.viewDidLoad()
zoomscrollV.delegate = self
}
override func viewDidAppear(_ animated: Bool) {
zoomscrollV.isHidden = true
zoomscrollV.frame = CGRect(x:0, y:0, width:self.view.frame.width, height:self.view.frame.height - 50)
zoomscrollV.minimumZoomScale=1
zoomscrollV.maximumZoomScale=10
zoomscrollV.bounces=false
self.view.addSubview(zoomscrollV)
getClickImage=UIImageView()
getClickImage.frame = CGRect(x:0, y:0, width:zoomscrollV.frame.width, height:zoomscrollV.frame.height)
getClickImage.backgroundColor = .black
getClickImage.contentMode = .scaleAspectFit
zoomscrollV.addSubview(getClickImage)
}
// ON CLICKING IMAGE ACTION
#objc func myImageTapped(_ sender: UITapGestureRecognizer) {
zoomscrollV.isHidden = false
// RESPECTIVE IMAGE
getClickImage.image = BG_CARD_IMGS[(sender.view?.tag)!]
let closeButton: UIButton = UIButton(type: .custom)
closeButton.frame = CGRect(x:40.0, y:self.view.frame.height - 50, width:self.view.frame.width - 80, height:50.0)
closeButton.addTarget(self, action: #selector(self.closeZoom), for: .touchUpInside)
closeButton.setTitle("CLOSE ZOOM", for: .normal)
closeButton.setTitleColor(UIColor.red, for: .normal)
// CLOSE BUTTON
self.view.addSubview(closeButton)
}
#objc func closeZoom(sender: AnyObject) {
zoomscrollV.isHidden = true
zoomscrollV.setZoomScale(1.0, animated: false)
sender.removeFromSuperview()
}
//SCROLLVIEW DELEGATE
func viewForZooming(in scrollView: UIScrollView) -> UIView? {
return getClickImage
}
OUTPUT

How to manage the AVAudioPlayer isPlaying in each tableviewCell?

I have a tableview and have multiple cells in tableView.
Each cell has an item of AVAudioPlayer
And I face a problem.
I don't know how to manage the AVAudioPlayer.
When I play first AVAudioPlayer, and then play second AVAudioPlayer, the sound will overlap.
How to stop first AVAudioPlayer in my customized cell, and play second AVAudioPlayer?
Thanks.
This is my customized cell:
class TableViewCell: UITableViewCell {
#IBOutlet weak var myImageView: UIImageView!
#IBOutlet weak var myChatBubbleView: UIView!
#IBOutlet weak var myDateLabel: UILabel!
#IBOutlet weak var mySecondLabel: UILabel!
#IBOutlet weak var myRecordPlayerBtn: MenuButton!
private var timer:Timer?
private var elapsedTimeInSecond:Int = 0
var audioPlayer:AVAudioPlayer?
var message:ChatroomMessage?
var chatroomId:String = ""
var delegate:PlayRecordDelegate?
override func awakeFromNib() {
super.awakeFromNib()
// Initialization code
self.backgroundColor = defaultBackgroundColor
self.tintColor = defaultChatroomCheckButtonColor
myImageView.layer.masksToBounds = true
myImageView.layer.cornerRadius = defaultIconRadius
myChatBubbleView.backgroundColor = defaultChatGreenBubbleColor
myChatBubbleView.layer.cornerRadius = defaultButtonRadius
myDateLabel.textColor = defaultChatTimeColor
mySecondLabel.textColor = defaultChatTimeColor
mySecondLabel.isHidden = true
myRecordPlayerBtn.imageView?.animationDuration = 1
myRecordPlayerBtn.imageView?.animationImages = [
UIImage(named: "img_myRocordPlaying1")!,
UIImage(named: "img_myRocordPlaying2")!,
UIImage(named: "img_myRocordPlaying3")!
]
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
// Configure the view for the selected state
}
func loadByMessage(_ message:ChatroomMessage, chatroomId:String) {
self.message = message
self.chatroomId = chatroomId
myRecordPlayerBtn.addTarget(self, action: #selector(recordPlay), for: .touchUpInside)
}
func resetRecordAnimation() {
self.myRecordPlayerBtn.imageView!.stopAnimating()
self.myRecordPlayerBtn.isSelected = false
}
func recordPlay(_ sender: UIButton) {
self.myRecordPlayerBtn.imageView?.startAnimating()
let documentsDirectoryURL = try! FileManager().url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true).appendingPathComponent("\(chatroomId)/Record/")
let fileName = message?.content.substring(from: 62)
let fileURL = documentsDirectoryURL.appendingPathComponent(fileName!)
if FileManager.default.fileExists(atPath: fileURL.path) {
let asset = AVURLAsset(url: URL(fileURLWithPath: fileURL.path), options: nil)
let audioDuration = asset.duration
let audioDurationSeconds = CMTimeGetSeconds(audioDuration)
self.elapsedTimeInSecond = Int(audioDurationSeconds)
if audioPlayer?.isPlaying == true {
audioPlayer?.stop()
DispatchQueue.main.async {
self.resetTimer(second: self.elapsedTimeInSecond)
self.startTimer()
}
}
updateTimeLabel()
startTimer()
audioPlayer = try? AVAudioPlayer(contentsOf: fileURL)
audioPlayer?.delegate = self
audioPlayer?.play()
}else{
//don't have file in local
let recordUrl = URL(string: (message?.content)!)
URLSession.shared.downloadTask(with: recordUrl!, completionHandler: { (location, response, error) in
guard
let httpURLResponse = response as? HTTPURLResponse, httpURLResponse.statusCode == 200,
let mimeType = response?.mimeType, mimeType.hasPrefix("audio"),
let location = location, error == nil
else { return }
do {
try FileManager.default.moveItem(at: location, to: fileURL)
let asset = AVURLAsset(url: URL(fileURLWithPath: fileURL.path), options: nil)
let audioDuration = asset.duration
let audioDurationSeconds = CMTimeGetSeconds(audioDuration)
self.elapsedTimeInSecond = Int(audioDurationSeconds)
DispatchQueue.main.async {
self.updateTimeLabel()
self.startTimer()
}
self.audioPlayer = try? AVAudioPlayer(contentsOf: fileURL)
self.audioPlayer?.delegate = self
self.audioPlayer?.play()
} catch {
print(error)
}
}).resume()
}
}
func startTimer() {
timer?.invalidate()
timer = Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true, block: { (timer) in
self.elapsedTimeInSecond -= 1
self.updateTimeLabel()
})
}
func resetTimer(second:Int) {
timer?.invalidate()
elapsedTimeInSecond = second
updateTimeLabel()
}
func updateTimeLabel() {
let seconds = elapsedTimeInSecond % 60
let minutes = (elapsedTimeInSecond/60) % 60
mySecondLabel.isHidden = false
mySecondLabel.text = String(format: "%02d:%02d", minutes,seconds)
}
}
extension TableViewCell:AVAudioPlayerDelegate {
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
let documentsDirectoryURL = try! FileManager().url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true).appendingPathComponent("\(Id)/Record/")
let fileName = message?.content.substring(from: 62)
let fileURL = documentsDirectoryURL.appendingPathComponent(fileName!)
if FileManager.default.fileExists(atPath: fileURL.path) {
let asset = AVURLAsset(url: URL(fileURLWithPath: fileURL.path), options: nil)
let audioDuration = asset.duration
let audioDurationSeconds = CMTimeGetSeconds(audioDuration)
DispatchQueue.main.async {
self.resetTimer(second: Int(audioDurationSeconds))
self.myRecordPlayerBtn.imageView!.stopAnimating()
self.myRecordPlayerBtn.imageView?.image = #imageLiteral(resourceName: "img_myRocordDefault")
}
}
}
}
Probably first initialize to check if your player is playing
if audioPlayer != nil{
if audioPlayer?.isPlaying == true {
audioPlayer?.stop()
DispatchQueue.main.async {
self.resetTimer(second: self.elapsedTimeInSecond)
self.startTimer()
}
}
}
If you don't want to play two audio track at the same time, you should use a shared instance of AVAudioPlayer
It will be better for performances and you can define the instance as static var in your controller. It will be accessible in each cell.
I have developed a music palyer application, and I used a shared instance in the MusicPlayManager:
class MusicPlayManager{
var player : AVAudioPlayer?
static let sharedInstance = MusicPlayManager.init()
private override init() {
super.init()
}
// something else, such as palyNext, playPrevious methods
}
In your viewController,you can use MusicPlayManager.sharedInstance.player

Resources