I was doing the weather app, everything was fine until I add the CAGradientLayer transition to UIView. It still worked but the transition took so long, around 10-15 seconds. I'm using the Apixu API for the weather. One thing to notice is the audio was perfectly playing, right away. I have checked the duration and there was nothing wrong with animation:
import UIKit
import AVFoundation
class ViewController: UIViewController, UISearchBarDelegate {
#IBOutlet weak var searchBar: UISearchBar!
#IBOutlet weak var cityLbl: UILabel!
#IBOutlet weak var conditionLbl: UILabel!
#IBOutlet weak var degreeLbl: UILabel!
#IBOutlet weak var imgView: UIImageView!
#IBOutlet weak var gradientView: UIView!
let gradientLayer = CAGradientLayer()
let gradientLayer2 = CAGradientLayer()
let gradientLayer3 = CAGradientLayer()
var degree: Int!
var condition: String!
var imgURL: String!
var city: String!
var exists: Bool = true
var audioPlayer1 = AVAudioPlayer()
var audioPlayer2 = AVAudioPlayer()
var audioPlayer3 = AVAudioPlayer()
var audioPlayerSunny = AVAudioPlayer()
var audioPlayerSnow = AVAudioPlayer()
var audioPlayerThunder = AVAudioPlayer()
// Audio part.
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
gradientLayer.frame = gradientView.bounds
gradientLayer.colors = [UIColor.blue.cgColor, UIColor.black.cgColor]
gradientView.layer.insertSublayer(gradientLayer, at: 0)
//gradient color view
gradientLayer2.colors = [UIColor.red.cgColor, UIColor.purple.cgColor]
//gradient color view 2
gradientLayer3.colors = [UIColor.orange.cgColor, UIColor.green.cgColor]
//gradient color view 3
searchBar.delegate = self
// Audio Code starts.
do {
audioPlayer1 = try AVAudioPlayer(contentsOf: URL.init(fileURLWithPath: Bundle.main.path(forResource: "ambiance", ofType: "mp3")!))
audioPlayer1.prepareToPlay()
audioPlayer2 = try AVAudioPlayer(contentsOf: URL.init(fileURLWithPath: Bundle.main.path(forResource: "guitar", ofType: "mp3")!))
audioPlayer2.prepareToPlay()
audioPlayer3 = try AVAudioPlayer(contentsOf: URL.init(fileURLWithPath: Bundle.main.path(forResource: "rain", ofType: "mp3")!))
audioPlayer3.prepareToPlay()
audioPlayerSunny = try AVAudioPlayer(contentsOf: URL.init(fileURLWithPath: Bundle.main.path(forResource: "sunny", ofType: "mp3")!))
audioPlayerSunny.prepareToPlay()
audioPlayerSnow = try AVAudioPlayer(contentsOf: URL.init(fileURLWithPath: Bundle.main.path(forResource: "snow", ofType: "mp3")!))
audioPlayerSnow.prepareToPlay()
audioPlayerThunder = try AVAudioPlayer(contentsOf: URL.init(fileURLWithPath: Bundle.main.path(forResource: "thunder", ofType: "mp3")!))
audioPlayerThunder.prepareToPlay()
}
catch {
print(error)
} // Audio Code ends.
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
searchBar.resignFirstResponder()
}
func searchBarSearchButtonClicked(_ searchBar: UISearchBar) {
searchBar.resignFirstResponder()
let urlRequest = URLRequest(url: URL(string: "http://api.apixu.com/v1/current.json?key=494942fa74444eabb6973325170510&q=\(searchBar.text!.replacingOccurrences(of: " ", with: "%20"))")!)
let task = URLSession.shared.dataTask(with: urlRequest) { (data, response, error) in
if error == nil {
do {
let json = try JSONSerialization.jsonObject(with: data!, options: .mutableContainers) as! [String : AnyObject]
if let current = json["current"] as? [String : AnyObject] {
if let temp = current["temp_c"] as? Float {
self.degree = Int(temp.rounded())
}
if let condition = current["condition"] as? [String : AnyObject] {
self.condition = condition["text"] as! String
let icon = condition["icon"] as! String
self.imgURL = "http:\(icon)"
}
}
if let location = json["location"] as? [String : AnyObject] {
self.city = location["name"] as! String
}
if let _ = json["error"] {
self.exists = false
}
if self.condition == "Sunny" || self.condition == "Clear" {
self.audioPlayerSunny.currentTime = 0
self.audioPlayerSunny.play()
self.audioPlayer1.stop()
self.audioPlayer3.stop()
self.audioPlayer2.stop()
self.audioPlayerSnow.stop()
self.audioPlayerThunder.stop()
// Audio Code.
let colorChangeAnimation = CABasicAnimation(keyPath: "colors")
colorChangeAnimation.duration = 1.0
colorChangeAnimation.toValue = self.gradientLayer3.colors
colorChangeAnimation.fillMode = kCAFillModeForwards
colorChangeAnimation.isRemovedOnCompletion = false
self.gradientLayer.add(colorChangeAnimation, forKey: "colorChange")
//effects
} else if self.condition.lowercased().range(of:"cloudy") != nil || self.condition.lowercased().range(of:"overcast") != nil {
self.audioPlayer1.currentTime = 0
self.audioPlayer1.play()
self.audioPlayer2.stop()
self.audioPlayer3.stop()
self.audioPlayerSnow.stop()
self.audioPlayerSunny.stop()
self.audioPlayerThunder.stop()
// Audio Code.
let colorChangeAnimation = CABasicAnimation(keyPath: "colors")
colorChangeAnimation.duration = 1.0
colorChangeAnimation.toValue = self.gradientLayer2.colors
colorChangeAnimation.fillMode = kCAFillModeForwards
colorChangeAnimation.isRemovedOnCompletion = false
self.gradientLayer.add(colorChangeAnimation, forKey: "colorChange")
//effects
} else if self.condition.lowercased().range(of:"snow") != nil || self.condition.lowercased().range(of:"snowy") != nil || self.condition.lowercased().range(of:"sleet") != nil
{
self.audioPlayerSnow.currentTime = 0
self.audioPlayerSnow.play()
self.audioPlayer2.stop()
self.audioPlayer3.stop()
self.audioPlayer1.stop()
self.audioPlayerSunny.stop()
self.audioPlayerThunder.stop()
// Audio Code.
} else if self.condition.lowercased().range(of:"rain") != nil || self.condition.lowercased().range(of:"rainy") != nil || self.condition.lowercased().range(of:"drizzle") != nil
{
self.audioPlayer3.currentTime = 0
self.audioPlayer3.play()
self.audioPlayer2.stop()
self.audioPlayer1.stop()
self.audioPlayerSnow.stop()
self.audioPlayerSunny.stop()
self.audioPlayerThunder.stop()
// Audio Code.
} else if self.condition.lowercased().range(of:"thunder") != nil || self.condition.lowercased().range(of:"thundery") != nil {
self.audioPlayerThunder.currentTime = 0
self.audioPlayerThunder.play()
self.audioPlayer2.stop()
self.audioPlayer3.stop()
self.audioPlayerSnow.stop()
self.audioPlayerSunny.stop()
self.audioPlayer1.stop()
// Audio Code.
} else {
self.audioPlayer2.currentTime = 0
self.audioPlayer2.play()
self.audioPlayer1.stop()
self.audioPlayer3.stop()
self.audioPlayerSnow.stop()
self.audioPlayerSunny.stop()
self.audioPlayerThunder.stop()
// Audio Code.
}
DispatchQueue.main.async {
if self.exists{
self.degreeLbl.isHidden = false
self.conditionLbl.isHidden = false
self.imgView.isHidden = false
self.degreeLbl.text = "\(self.degree.description)°"
self.cityLbl.text = self.city
self.conditionLbl.text = self.condition
self.imgView.downloadImage(from: self.imgURL!)
}
else {
self.degreeLbl.isHidden = true
self.conditionLbl.isHidden = true
self.imgView.isHidden = true
self.cityLbl.text = "No matching city found"
self.exists = true
}
}
} catch let jsonError {
print(jsonError.localizedDescription)
}
}
}
task.resume()
}
}
extension UIImageView {
func downloadImage(from url: String) {
let urlRequest = URLRequest(url: URL(string: url)!)
let task = URLSession.shared.dataTask(with: urlRequest) { (data, response, error) in
if error == nil {
DispatchQueue.main.async {
self.image = UIImage(data: data!)
}
}
}
task.resume()
}
}
Related
i created share extension for my app which can share image/videos from photos(Gallery) and it work properly but here problem is if i select .HEIC type file then click on my share extension it show error as shown in below screenshot .
import UIKit
import Social
import MobileCoreServices
import AVKit
import Toast_Swift
//#objc(ShareExtensionViewController)
class ShareViewController: UIViewController {
#IBOutlet weak var image: UIImageView!
#IBOutlet weak var btn: UIButton!
#IBOutlet weak var lbl: UILabel!
#IBOutlet weak var view2: UIView!
#IBOutlet weak var cancel: UIButton!
#IBOutlet weak var blurview: UIVisualEffectView!
var allMedia = [Data()]
var singleImage = UIImage()
var imagesUrl = [URL]()
let groupPath = FileManager.default.containerURL(forSecurityApplicationGroupIdentifier: "
my group identifire ")
var saveDone = Bool()
override func viewDidLoad() {
super.viewDidLoad()
let attachments = (self.extensionContext?.inputItems.first as? NSExtensionItem)?.attachments ?? []
self.btn.setTitle("Add", for: .normal)
self.btn.layer.cornerRadius = 10
self.cancel.layer.cornerRadius = 10
self.image.clipsToBounds = false
applyshadow(image: image)
blurview.isHidden = true
}
override func viewDidAppear(_ animated: Bool) {
self.handleSharedFile()
}
// MARK:- function
func applyshadow(image: UIImageView) {
image.clipsToBounds = false
image.layer.shadowColor = UIColor.systemGray.cgColor
image.layer.shadowOpacity = 1
image.layer.shadowOffset = CGSize.zero
image.layer.shadowRadius = 7.5
}
func videotext(string : String) ->Bool{
let videoextension = [".MP4",".mp4",".mkv",".MKV",".AVI",".avi",".mov", ".MOV"]
if videoextension.contains(string){
return true
}
return false
}
func videoThumb(filepath: URL) -> UIImage{
do{
// let filepath = data?.appendingPathComponent(lbl[indexPath.row])
let asset = AVURLAsset(url: filepath, options: nil)
let imgGenerator = AVAssetImageGenerator(asset: asset)
imgGenerator.appliesPreferredTrackTransform = true
let cgImage = try imgGenerator.copyCGImage(at: CMTimeMake(value: 0, timescale: 1), actualTime: nil)
let uiImage = UIImage(cgImage: cgImage)
return uiImage
}catch let error {
print("Error: \(error.localizedDescription)")
return UIImage()
}
}
private func handleSharedFile() {
// extracting the path to the URL that is being shared
let attachments = (self.extensionContext?.inputItems.first as? NSExtensionItem)?.attachments ?? []
let contentType = kUTTypeData as String
for provider in attachments {
// Check if the content type is the same as we expected
if provider.hasItemConformingToTypeIdentifier(contentType) {
provider.loadItem(forTypeIdentifier: contentType,
options: nil) { [unowned self] (data, error) in
guard error == nil else { return }
if let url = data as? URL {
imagesUrl.append(url)
print(imagesUrl)
if videotext(string: String(url.lastPathComponent.suffix(4))){
DispatchQueue.main.async {
self.image.image = videoThumb(filepath: url)
if attachments.count > 1 {
lbl.text = "\(attachments.count) Items"
}else{
lbl.text = url.lastPathComponent
}
}
}else {
let imageData = try? Data(contentsOf: url)
DispatchQueue.main.async {
if provider == attachments.last{
self.image.image = UIImage(data: imageData!)
}
self.singleImage = UIImage(data: imageData!)!
if attachments.count > 1 {
lbl.text = "\(attachments.count) Items"
}else{
lbl.text = url.lastPathComponent
}
}
}
}else {
print("Impossible to save image")
}
}
}
}
}
thank You in advance for read, giving attention, answer and upvote my question :)
Thanks for visiting this question.
I recently started learning swift. I'm making an audio player ish app.
The nextButton works only for a couple of times, and then it just becomes silent. I'm trying to make an array loop over and over if nextButton is being pressed. So, it means when I press the "next" button, it will go to the next track and if tracks from array finished, it will play all track over again.
All I'm trying to achieve it to get the "next" button work without any stops.
Array and Audio Player:
let sound1 = NSURL(fileURLWithPath: Bundle.main.path(forResource: "1", ofType: "mp3")!)
let sound2 = NSURL(fileURLWithPath: Bundle.main.path(forResource: "2", ofType: "mp3")!)
let sound3 = NSURL(fileURLWithPath: Bundle.main.path(forResource: "3", ofType: "mp3")!)
lazy var soundArray: [NSURL] = [sound1, sound2, sound3]
var audioPlayer = AVAudioPlayer()
var run = true
var currentIndex = 0
func playRandomSound() {
let randNo = Int(arc4random_uniform(UInt32(soundArray.count)))
if run == true{
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback)
try AVAudioSession.sharedInstance().setActive(true)
try audioPlayer = AVAudioPlayer(contentsOf: soundArray[randNo] as URL)
audioPlayer.prepareToPlay()
audioPlayer.play()
} catch {
print(error)
}
}
}
nextButton:
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
currentIndex = indexPath.row
}
#IBAction func nextButton(_ sender: Any) {
audioPlayer.stop()
print("Audio did stop")
if currentIndex + 1 < soundArray.count {
currentIndex += 1
playRandomSound()
}
}
Full Code:
import UIKit
import AVFoundation
extension UILabel {
func pushUp(_ text: String?) {
let animation:CATransition = CATransition()
animation.timingFunction = CAMediaTimingFunction(name: CAMediaTimingFunctionName.easeInEaseOut)
animation.type = CATransitionType.push
animation.subtype = CATransitionSubtype.fromTop
animation.duration = 1
self.layer.add(animation, forKey: CATransitionType.push.rawValue)
self.text = text
}
}
class HideShow: UIViewController, AVAudioPlayerDelegate{
override func viewDidLoad() {
super.viewDidLoad()
}
#IBOutlet weak var moveOn: UILabel!
#IBOutlet weak var Logo: UILabel!
#IBOutlet weak var BGimage: UIImageView!
#IBOutlet weak var nextButton2: UIButton!
#IBOutlet weak var nextButton: UIButton!
#IBOutlet weak var LayDown: UILabel!
#IBOutlet weak var audioChanger: UIButton!
#IBOutlet weak var buttonView: UIView!
let notification = NotificationCenter.default
let toImage = UIImage(named:"BG2.jpeg")
#IBAction func buttonTapped(_ sender: UIButton){
UIView.animate(withDuration: 2) {
self.LayDown.alpha = 1
}
UIView.transition(with: self.BGimage,
duration: 1,
options: .transitionCrossDissolve,
animations: { self.BGimage.image = self.toImage },
completion: nil)
LayDown.pushUp("CLOSE YOUR EYES")
self.nextButton.isHidden = true
self.nextButton2.isHidden = false
}
let sound1 = NSURL(fileURLWithPath: Bundle.main.path(forResource: "1", ofType: "mp3")!)
let sound2 = NSURL(fileURLWithPath: Bundle.main.path(forResource: "2", ofType: "mp3")!)
let sound3 = NSURL(fileURLWithPath: Bundle.main.path(forResource: "3", ofType: "mp3")!)
lazy var soundArray: [NSURL] = [sound1, sound2, sound3]
var audioPlayer = AVAudioPlayer()
var run = true
var currentIndex = 0
func playRandomSound() {
let randNo = Int(arc4random_uniform(UInt32(soundArray.count)))
if run == true{
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback)
try AVAudioSession.sharedInstance().setActive(true)
try audioPlayer = AVAudioPlayer(contentsOf: soundArray[randNo] as URL)
audioPlayer.prepareToPlay()
audioPlayer.play()
} catch {
print(error)
}
}
}
#IBAction func buttonTapped2(_ sender: Any) {
UIView.animate(withDuration: 1) {
self.nextButton2.alpha = 0
self.LayDown.alpha = 0
self.nextButton.alpha = 0
self.BGimage.alpha = 0
self.moveOn.alpha = 0
self.Logo.alpha = 0
self.audioChanger.isHidden = false
self.buttonView.isHidden = false
}
playRandomSound()
}
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
currentIndex = indexPath.row
}
#IBAction func nextButton(_ sender: Any) {
audioPlayer.stop()
print("Audio did stop")
if currentIndex + 1 < soundArray.count {
currentIndex += 1
playRandomSound()
}
}
}
Let me know if you have any questions,
Thanks in advance!:)
I have tried to make a separate class for the location functionality (separate swift file in the Watchkit Exension folder) and I want to use the location instance of that class in the InterfaceController class to track the location of the user. But, xcode gets stuck and I also cannot deploy and test the application. I do not know what the reason is.
Here is the location code and the InterfaceController class:
The whole project is here: https://filebin.net/s40xlqzeukfwc39w
I hope you can help me out since I am still a beginner in terms of Swift and WatchKit. Thank you very much.
import WatchKit
import Foundation
import CoreLocation
class LocationOutside{
var locationManager: CLLocationManager = CLLocationManager()
init(){
locationManager.requestWhenInUseAuthorization()
locationManager.desiredAccuracy = kCLLocationAccuracyHundredMeters
locationManager.delegate = (self as! CLLocationManagerDelegate)
locationManager.requestLocation()
}
func locationManager(_ manager: CLLocationManager, didUpdateLocations locations: [CLLocation]) {
let currentLoc = locations[0]
let lat = currentLoc.coordinate.latitude
let long = currentLoc.coordinate.longitude
print(lat)
print(long)
}
func locationManager(_manager: CLLocationManager,didFailWithError error: Error){
if let locationErr = error as? CLError{
switch locationErr{
case CLError.locationUnknown:
print("unknown location")
case CLError.denied:
print("denied")
default:
print("another type of location error")
}
}else{
print("other error: ", error.localizedDescription)
}
}
}
import WatchKit
import Foundation
import CoreLocation
import HealthKit
import AVFoundation
import CoreMotion
let hrType:HKQuantityType = HKObjectType.quantityType(forIdentifier: HKQuantityTypeIdentifier.heartRate)!
// Date will be constructed in database --> server side
class InterfaceController: WKInterfaceController,AVAudioRecorderDelegate{
var saveUrl: URL?
var outDoorLocation = LocationOutside.init()
// to conduct permission to retrieve location data
var locationManager: CLLocationManager = CLLocationManager()
// Outlets for testing
#IBOutlet weak var button: WKInterfaceButton!
#IBOutlet weak var furtherSigLabels: WKInterfaceLabel!
var recordingSession : AVAudioSession!
var audioRecorder : AVAudioRecorder!
var settings = [String : Any]()
// distinguish start recording heartbeat
var isRecording = false
//For workout session
let healthStore = HKHealthStore()
var session: HKWorkoutSession?
var currentQuery: HKQuery?
var filename: String?
let motionManager = CMMotionManager()
let queue = OperationQueue()
var gravityStr = ""
var userAccelerStr = ""
var rotationRateStr = ""
var attitudeStr = ""
var movement = ""
var manualLat: Double = 0.0
var manualLong: Double = 0.0
var heartRateVal: Double = 0.0
var prev_grav_z: Double = 0.0
var prev_acc_z: Double = 0.0
var grav_x:Double = 0.0
var grav_y:Double = 0.0
var grav_z:Double = 0.0
var acc_x:Double = 0.0
var acc_y:Double = 0.0
var acc_z:Double = 0.0
var sendOrNot:Bool = false
override func awake(withContext context: Any?) {
super.awake(withContext: context)
/**
locationManager.requestWhenInUseAuthorization()
locationManager.desiredAccuracy = kCLLocationAccuracyBest
locationManager.delegate = self
locationManager.requestLocation()
*/
// managing authorization
let healthService:HealthDataService = HealthDataService()
healthService.authorizeHealthKitAccess { (success, error) in
if success {
print("HealthKit authorization received.")
} else {
print("HealthKit authorization denied!")
if error != nil {
print("\(String(describing: error))")
}
}
}
motionManager.deviceMotionUpdateInterval = 0.5
}
/**
func locationManager(_ manager: CLLocationManager, didUpdateLocations locations: [CLLocation]) {
let currentLoc = locations[0]
let lat = currentLoc.coordinate.latitude
let long = currentLoc.coordinate.longitude
manualLat = lat
manualLong = long
/**
let request = NSMutableURLRequest(url: NSURL(string: "http://147.46.242.219/addgps.php")! as URL)
request.httpMethod = "POST"
let postString = "a=\(lat)&b=\(long)"
request.httpBody = postString.data(using: .utf8)
let task = URLSession.shared.dataTask(with: request as URLRequest) {
data, response, error in
if error != nil {
print("error=\(error)")
return
}
print("response = \(response)")
let responseString = NSString(data: data!, encoding: String.Encoding.utf8.rawValue)
print("responseString = \(responseString)")
}
task.resume()
*/
}
*/
/**
func locationManager(_: CLLocationManager, didFailWithError error: Error) {
let err = CLError.Code(rawValue: (error as NSError).code)!
switch err {
case .locationUnknown:
break
default:
print(err)
}
}
*/
override func willActivate() {
// This method is called when watch view controller is about to be visible to user
super.willActivate()
motionManager.startDeviceMotionUpdates(to: queue) { (deviceMotion: CMDeviceMotion?, error: Error?) in
if error != nil {
print("Encountered error: \(error!)")
}
if deviceMotion != nil {
self.grav_x = (deviceMotion?.gravity.x)!
self.grav_y = (deviceMotion?.gravity.y)!
self.grav_z = (deviceMotion?.gravity.z)!
self.gravityStr = String(format: "grav_x: %.2f, grav_y: %.2f, grav_z: %.2f" ,
self.grav_x,
self.grav_y,
self.grav_z)
if self.prev_grav_z == 0.0 {
self.prev_grav_z = self.grav_z
self.sendOrNot = true
}
else{
if (self.grav_z - self.prev_grav_z) <= -0.25{
//print("Gravity: ",self.grav_z, self.prev_grav_z)
self.sendOrNot = true
}
else{
self.sendOrNot = false
}
self.prev_grav_z = self.grav_z
}
//self.sendData(x: self.gravityStr)
// print(self.gravityStr)
self.acc_x = (deviceMotion?.userAcceleration.x)!
self.acc_y = (deviceMotion?.userAcceleration.y)!
self.acc_z = (deviceMotion?.userAcceleration.z)!
self.userAccelerStr = String(format: "acc_x: %.2f, acc_y: %.2f, acc_z: %.2f" ,
self.acc_x,
self.acc_y,
self.acc_z)
if (self.acc_z - self.prev_acc_z) <= -0.2{
//print("Accelero_z: ",self.acc_z, self.prev_acc_z)
self.sendOrNot = true
}
else{
self.sendOrNot = false
}
self.prev_acc_z = self.acc_z
self.rotationRateStr = String(format: "rota_x: %.2f, rota_y: %.2f, rota_z: %.2f" ,
(deviceMotion?.rotationRate.x)!,
(deviceMotion?.rotationRate.y)!,
(deviceMotion?.rotationRate.z)!)
//self.sendData(x: self.rotationRateStr)
//print(self.rotationRateStr)
self.attitudeStr = String(format: "atti_roll: %.1f, atti_pitch: %.1f, atti_yaw: %.1f" ,
(deviceMotion?.attitude.roll)!,
(deviceMotion?.attitude.pitch)!,
(deviceMotion?.attitude.yaw)!)
//self.sendData(x: self.attitudeStr)
//print(self.attitudeStr)
//self.movement = self.gravityStr + self.userAccelerStr + self.rotationRateStr + self.attitudeStr
if self.sendOrNot{
//print("Falling motion detected!")
self.movement = "\(self.gravityStr), \(self.userAccelerStr), \(self.rotationRateStr), \(self.attitudeStr), \("_1")"
}
else{
self.movement = "\(self.gravityStr), \(self.userAccelerStr), \(self.rotationRateStr), \(self.attitudeStr), \("_0")"
}
//print(self.movement)
self.sendOrNot = false
}
}
}
override func didDeactivate() {
// This method is called when watch view controller is no longer visible
super.didDeactivate()
motionManager.stopDeviceMotionUpdates()
}
/**
func sendData(x:String){
let request = NSMutableURLRequest(url: NSURL(string: "http://147.46.242.219/addgyro2.php")! as URL)
request.httpMethod = "POST"
let postString = "a=\(x)"
request.httpBody = postString.data(using: .utf8)
let task = URLSession.shared.dataTask(with: request as URLRequest) {
data, response, error in
if error != nil {
print("error=\(error)")
return
}
print("response = \(response)")
let responseString = NSString(data: data!, encoding: String.Encoding.utf8.rawValue)
print("responseString = \(responseString)")
}
task.resume()
}
*/
/**
func getDocumentsDirectory() -> URL
{
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentsDirectory = paths[0]
return documentsDirectory
}
func getFileUrl() -> URL
{
let filePath = getDocumentsDirectory().appendingPathComponent(filename!)
return filePath
}
func startRecording(){
let audioSession = AVAudioSession.sharedInstance()
do{
audioRecorder = try AVAudioRecorder(url: getFileUrl(),
settings: settings)
audioRecorder.delegate = self
audioRecorder.prepareToRecord()
audioRecorder.record(forDuration: 5.0)
}
catch {
finishRecording(success: false)
}
do {
try audioSession.setActive(true)
audioRecorder.record()
} catch {
}
}
func finishRecording(success: Bool) {
audioRecorder.stop()
audioRecorder = nil
if success {
print(success)
} else {
audioRecorder = nil
print("Somthing Wrong.")
}
}
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
if !flag {
finishRecording(success: false)
}
}
*/
// generate a short unique id
struct ShortCodeGenerator {
private static let base62chars = [Character]("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz")
private static let maxBase : UInt32 = 62
static func getCode(withBase base: UInt32 = maxBase, length: Int) -> String {
var code = ""
for _ in 0..<length {
let random = Int(arc4random_uniform(min(base, maxBase)))
code.append(base62chars[random])
}
return code
}
}
// Getting the address from longitude and latitude
func getAddressFromLatLon(pdblLatitude: String, withLongitude pdblLongitude: String) {
var center : CLLocationCoordinate2D = CLLocationCoordinate2D()
let lat: Double = Double("\(pdblLatitude)")!
//21.228124
let lon: Double = Double("\(pdblLongitude)")!
//72.833770
let ceo: CLGeocoder = CLGeocoder()
center.latitude = lat
center.longitude = lon
let loc: CLLocation = CLLocation(latitude:center.latitude, longitude: center.longitude)
ceo.reverseGeocodeLocation(loc, completionHandler:
{(placemarks, error) in
if (error != nil)
{
//print("reverse geodcode fail: \(error!.localizedDescription)")
}
let pm = placemarks! as [CLPlacemark]
if pm.count > 0 {
let pm = placemarks![0]
//print(pm.country)
//print(pm.locality)
//print(pm.subLocality)
//print(pm.thoroughfare)
//print(pm.postalCode)
//print(pm.subThoroughfare)
var addressString : String = ""
if pm.subLocality != nil {
addressString = addressString + pm.subLocality! + ", "
}
if pm.thoroughfare != nil {
addressString = addressString + pm.thoroughfare! + ", "
}
if pm.locality != nil {
addressString = addressString + pm.locality! + ", "
}
if pm.country != nil {
addressString = addressString + pm.country! + ", "
}
if pm.postalCode != nil {
addressString = addressString + pm.postalCode! + " "
}
//print(addressString)
}
})
}
#IBAction func manualBtnPressed() {
// manual reporting functionality
// generating 6 character long unique id
let uniqueId = ShortCodeGenerator.getCode(length: 6)
let txtMsg = "I am student \(uniqueId). I need help!"
print(txtMsg)
// Getting the address
if manualLat != 0.0 && manualLong != 0.0 {
var latStr = String(format:"%.2f",manualLat)
var longStr = String(format:"%.2f",manualLong)
getAddressFromLatLon(pdblLatitude: latStr, withLongitude: longStr)
let request = NSMutableURLRequest(url: NSURL(string: "http://147.46.242.219/addmanual.php")! as URL)
request.httpMethod = "POST"
let postString = "a=\(manualLat)&b=\(manualLong)&c=\(txtMsg)"
print(postString)
request.httpBody = postString.data(using: .utf8)
let task = URLSession.shared.dataTask(with: request as URLRequest) {
data, response, error in
if error != nil {
//print("error=\(error)")
return
}
//print("response = \(response)")
let responseString = NSString(data: data!, encoding: String.Encoding.utf8.rawValue)
//print("responseString = \(responseString)")
}
task.resume()
}
}
// when button clicked label is shown
#IBAction func btnPressed() {
if(!isRecording){
let stopTitle = NSMutableAttributedString(string: "Stop Recording")
stopTitle.setAttributes([NSAttributedString.Key.foregroundColor: UIColor.red], range: NSMakeRange(0, stopTitle.length))
button.setAttributedTitle(stopTitle)
isRecording = true
startWorkout() //Start workout session/healthkit streaming
}else{
let exitTitle = NSMutableAttributedString(string: "Start Recording")
exitTitle.setAttributes([NSAttributedString.Key.foregroundColor: UIColor.red], range: NSMakeRange(0, exitTitle.length))
button.setAttributedTitle(exitTitle)
isRecording = false
healthStore.end(session!)
}
}
}
extension InterfaceController: HKWorkoutSessionDelegate{
func workoutSession(_ workoutSession: HKWorkoutSession, didChangeTo toState: HKWorkoutSessionState, from fromState: HKWorkoutSessionState, date: Date) {
switch toState {
case .running:
//print(date)
if let query = heartRateQuery(date){
self.currentQuery = query
healthStore.execute(query)
}
//Execute Query
case .ended:
//Stop Query
healthStore.stop(self.currentQuery!)
session = nil
default:
print("Unexpected state: \(toState)")
}
}
func workoutSession(_ workoutSession: HKWorkoutSession, didFailWithError error: Error) {
//Do Nothing
}
func startWorkout(){
// If a workout has already been started, do nothing.
if (session != nil) {
return
}
// Configure the workout session.
let workoutConfiguration = HKWorkoutConfiguration()
workoutConfiguration.activityType = .running
workoutConfiguration.locationType = .outdoor
do {
session = try HKWorkoutSession(configuration: workoutConfiguration)
session?.delegate = self
} catch {
fatalError("Unable to create workout session")
}
healthStore.start(self.session!)
//print("Start Workout Session")
// Here audio?
/**
if audioRecorder == nil {
print("Pressed")
filename = NSUUID().uuidString+".wav"
self.startRecording()
} else {
let path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as String
let url = URL(fileURLWithPath: path)
print("Filename\(filename!)")
let pathPart = url.appendingPathComponent(filename!)
let filePath = pathPart.path
let request = NSMutableURLRequest(url: NSURL(string: "http://147.46.242.219/addsound.php")! as URL)
request.httpMethod = "POST"
let audioData = NSData(contentsOfFile: filePath)
print("Result is\(getFileUrl().path)")
print("Binary data printing")
print(audioData)
let postString = "a=\(audioData)"
request.httpBody = postString.data(using: .utf8)
let task = URLSession.shared.dataTask(with: request as URLRequest){
data, response, error in
if error != nil {
print("error=\(error)")
return
}
print("response = \(response)")
let responseString = NSString(data: data!, encoding: String.Encoding.utf8.rawValue)
print("responseString = \(responseString)")
}
task.resume()
print("Pressed2")
self.finishRecording(success: true)
}
*/
}
func heartRateQuery(_ startDate: Date) -> HKQuery? {
let datePredicate = HKQuery.predicateForSamples(withStart: startDate, end: nil, options: .strictEndDate)
let predicate = NSCompoundPredicate(andPredicateWithSubpredicates:[datePredicate])
let heartRateQuery = HKAnchoredObjectQuery(type: hrType, predicate: predicate, anchor: nil, limit: Int(HKObjectQueryNoLimit)) { (query, sampleObjects, deletedObjects, newAnchor, error) -> Void in
//Do nothing
}
heartRateQuery.updateHandler = {(query, samples, deleteObjects, newAnchor, error) -> Void in
guard let samples = samples as? [HKQuantitySample] else {return}
DispatchQueue.main.async {
guard let sample = samples.first else { return }
// after extraction of bpm value conversion to double
let value = sample.quantity.doubleValue(for: HKUnit(from: "count/min"))
//print("Type of value is +\(type(of:value))")
let request = NSMutableURLRequest(url: NSURL(string: "http://147.46.242.219/addall.php")! as URL)
request.httpMethod = "POST"
//print(self.movement)
//let randomStr = 42.0
let postString = "gps_x=\(self.manualLat)&gps_y=\(self.manualLong)&a=\(self.movement)&hr=\(value)"
//print(postString)
request.httpBody = postString.data(using: .utf8)
let task = URLSession.shared.dataTask(with: request as URLRequest) {
data, response, error in
if error != nil {
//print("error=\(error)")
return
}
//print("response = \(response)")
let responseString = NSString(data: data!, encoding: String.Encoding.utf8.rawValue)
//print("responseString = \(responseString)")
}
task.resume()
//print("This line is executed!")
//print(String(UInt16(value)))
}
}
return heartRateQuery
}
}
class HealthDataService {
internal let healthKitStore:HKHealthStore = HKHealthStore()
init() {}
func authorizeHealthKitAccess(_ completion: ((_ success:Bool, _ error:Error?) -> Void)!) {
let typesToShare = Set([hrType])
let typesToSave = Set([hrType])
healthKitStore.requestAuthorization(toShare: typesToShare, read: typesToSave) { (success, error) in
completion(success, error)
}
}
}
After I tried running your project in Xcode on Apple Watch, it crashed and the console said Could not cast value of type 'BullyingDetection_WatchKit_Extension.LocationOutside' (0x92ca0) to 'CLLocationManagerDelegate' (0x6b433f84).
On line 20 of your LocationOutside class, locationManager.delegate = (self as! CLLocationManagerDelegate) will have to be locationManager.delegate = self and the class will need to conform to the CLLocationManagerManagerDelegate protocol which also requires inheriting from NSObject:
class LocationOutside: NSObject, CLLocationManagerDelegate {
}
This should lead you in the right direction.
It may help to also read about delegation here https://docs.swift.org/swift-book/LanguageGuide/Protocols.html#ID276.
I have a tableview and have multiple cells in tableView.
Each cell has an item of AVAudioPlayer
And I face a problem.
I don't know how to manage the AVAudioPlayer.
When I play first AVAudioPlayer, and then play second AVAudioPlayer, the sound will overlap.
How to stop first AVAudioPlayer in my customized cell, and play second AVAudioPlayer?
Thanks.
This is my customized cell:
class TableViewCell: UITableViewCell {
#IBOutlet weak var myImageView: UIImageView!
#IBOutlet weak var myChatBubbleView: UIView!
#IBOutlet weak var myDateLabel: UILabel!
#IBOutlet weak var mySecondLabel: UILabel!
#IBOutlet weak var myRecordPlayerBtn: MenuButton!
private var timer:Timer?
private var elapsedTimeInSecond:Int = 0
var audioPlayer:AVAudioPlayer?
var message:ChatroomMessage?
var chatroomId:String = ""
var delegate:PlayRecordDelegate?
override func awakeFromNib() {
super.awakeFromNib()
// Initialization code
self.backgroundColor = defaultBackgroundColor
self.tintColor = defaultChatroomCheckButtonColor
myImageView.layer.masksToBounds = true
myImageView.layer.cornerRadius = defaultIconRadius
myChatBubbleView.backgroundColor = defaultChatGreenBubbleColor
myChatBubbleView.layer.cornerRadius = defaultButtonRadius
myDateLabel.textColor = defaultChatTimeColor
mySecondLabel.textColor = defaultChatTimeColor
mySecondLabel.isHidden = true
myRecordPlayerBtn.imageView?.animationDuration = 1
myRecordPlayerBtn.imageView?.animationImages = [
UIImage(named: "img_myRocordPlaying1")!,
UIImage(named: "img_myRocordPlaying2")!,
UIImage(named: "img_myRocordPlaying3")!
]
}
override func setSelected(_ selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
// Configure the view for the selected state
}
func loadByMessage(_ message:ChatroomMessage, chatroomId:String) {
self.message = message
self.chatroomId = chatroomId
myRecordPlayerBtn.addTarget(self, action: #selector(recordPlay), for: .touchUpInside)
}
func resetRecordAnimation() {
self.myRecordPlayerBtn.imageView!.stopAnimating()
self.myRecordPlayerBtn.isSelected = false
}
func recordPlay(_ sender: UIButton) {
self.myRecordPlayerBtn.imageView?.startAnimating()
let documentsDirectoryURL = try! FileManager().url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true).appendingPathComponent("\(chatroomId)/Record/")
let fileName = message?.content.substring(from: 62)
let fileURL = documentsDirectoryURL.appendingPathComponent(fileName!)
if FileManager.default.fileExists(atPath: fileURL.path) {
let asset = AVURLAsset(url: URL(fileURLWithPath: fileURL.path), options: nil)
let audioDuration = asset.duration
let audioDurationSeconds = CMTimeGetSeconds(audioDuration)
self.elapsedTimeInSecond = Int(audioDurationSeconds)
if audioPlayer?.isPlaying == true {
audioPlayer?.stop()
DispatchQueue.main.async {
self.resetTimer(second: self.elapsedTimeInSecond)
self.startTimer()
}
}
updateTimeLabel()
startTimer()
audioPlayer = try? AVAudioPlayer(contentsOf: fileURL)
audioPlayer?.delegate = self
audioPlayer?.play()
}else{
//don't have file in local
let recordUrl = URL(string: (message?.content)!)
URLSession.shared.downloadTask(with: recordUrl!, completionHandler: { (location, response, error) in
guard
let httpURLResponse = response as? HTTPURLResponse, httpURLResponse.statusCode == 200,
let mimeType = response?.mimeType, mimeType.hasPrefix("audio"),
let location = location, error == nil
else { return }
do {
try FileManager.default.moveItem(at: location, to: fileURL)
let asset = AVURLAsset(url: URL(fileURLWithPath: fileURL.path), options: nil)
let audioDuration = asset.duration
let audioDurationSeconds = CMTimeGetSeconds(audioDuration)
self.elapsedTimeInSecond = Int(audioDurationSeconds)
DispatchQueue.main.async {
self.updateTimeLabel()
self.startTimer()
}
self.audioPlayer = try? AVAudioPlayer(contentsOf: fileURL)
self.audioPlayer?.delegate = self
self.audioPlayer?.play()
} catch {
print(error)
}
}).resume()
}
}
func startTimer() {
timer?.invalidate()
timer = Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true, block: { (timer) in
self.elapsedTimeInSecond -= 1
self.updateTimeLabel()
})
}
func resetTimer(second:Int) {
timer?.invalidate()
elapsedTimeInSecond = second
updateTimeLabel()
}
func updateTimeLabel() {
let seconds = elapsedTimeInSecond % 60
let minutes = (elapsedTimeInSecond/60) % 60
mySecondLabel.isHidden = false
mySecondLabel.text = String(format: "%02d:%02d", minutes,seconds)
}
}
extension TableViewCell:AVAudioPlayerDelegate {
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
let documentsDirectoryURL = try! FileManager().url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true).appendingPathComponent("\(Id)/Record/")
let fileName = message?.content.substring(from: 62)
let fileURL = documentsDirectoryURL.appendingPathComponent(fileName!)
if FileManager.default.fileExists(atPath: fileURL.path) {
let asset = AVURLAsset(url: URL(fileURLWithPath: fileURL.path), options: nil)
let audioDuration = asset.duration
let audioDurationSeconds = CMTimeGetSeconds(audioDuration)
DispatchQueue.main.async {
self.resetTimer(second: Int(audioDurationSeconds))
self.myRecordPlayerBtn.imageView!.stopAnimating()
self.myRecordPlayerBtn.imageView?.image = #imageLiteral(resourceName: "img_myRocordDefault")
}
}
}
}
Probably first initialize to check if your player is playing
if audioPlayer != nil{
if audioPlayer?.isPlaying == true {
audioPlayer?.stop()
DispatchQueue.main.async {
self.resetTimer(second: self.elapsedTimeInSecond)
self.startTimer()
}
}
}
If you don't want to play two audio track at the same time, you should use a shared instance of AVAudioPlayer
It will be better for performances and you can define the instance as static var in your controller. It will be accessible in each cell.
I have developed a music palyer application, and I used a shared instance in the MusicPlayManager:
class MusicPlayManager{
var player : AVAudioPlayer?
static let sharedInstance = MusicPlayManager.init()
private override init() {
super.init()
}
// something else, such as palyNext, playPrevious methods
}
In your viewController,you can use MusicPlayManager.sharedInstance.player
I have an app which records sound from microphone and then sends it to my website through NSUrlRequest. To test it, I added that audio is played from website so I can hear if it worked.
When I test it on simulator, everything works fine: audio is recorded and uploaded and I can hear it, but when I install it on my iPhone, I cannot hear anything and at my website, there is a corrupted audio file.
My TestNahravani.swift Code:
import UIKit
import AVFoundation
class TestNahravani: UIViewController, AVAudioRecorderDelegate, AVAudioPlayerDelegate {
#IBOutlet weak var recordButton: UIButton!
#IBOutlet weak var playButton: UIButton!
var soundRecorder: AVAudioRecorder!
var soundPlayer:AVAudioPlayer?
let fileName = "demo.m4a"
override func viewDidLoad() {
super.viewDidLoad()
setupRecorder()
}
#IBAction func recordSound(sender: UIButton) {
if (sender.titleLabel?.text == "Record"){
soundRecorder.record()
sender.setTitle("Stop", for: .normal)
playButton.isEnabled = false
} else {
soundRecorder.stop()
sender.setTitle("Record", for: .normal)
}
}
#IBAction func playSound(sender: UIButton) {
if (sender.titleLabel?.text == "Play"){
recordButton.isEnabled = false
sender.setTitle("Stop", for: .normal)
preparePlayer()
} else {
soundPlayer?.stop()
sender.setTitle("Play", for: .normal)
}
}
// MARK:- AVRecorder Setup
func setupRecorder() {
//set the settings for recorder
let recordSettings = [AVSampleRateKey : NSNumber(value: Float(44100.0)),
AVNumberOfChannelsKey : NSNumber(value: 2),
AVEncoderAudioQualityKey : NSNumber(value: Int32(AVAudioQuality.max.rawValue)),
AVFormatIDKey : NSNumber(value: kAudioFormatMPEG4AAC)]
var error: NSError?
do {
soundRecorder = try AVAudioRecorder(url: getFileURL() as URL, settings: recordSettings)
} catch let error1 as NSError {
error = error1
soundRecorder = nil
}
if let err = error {
print("AVAudioRecorder error: \(err.localizedDescription)")
} else {
soundRecorder.delegate = self
soundRecorder.prepareToRecord()
}
}
// MARK:- Prepare AVPlayer
func preparePlayer() {
var errorX: NSError?
let dirPaths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
let docsDir: AnyObject=dirPaths[0] as AnyObject
var recordedFilePath : String = docsDir.appendingPathComponent(fileName)
let recordedFileURL = getFileURL()
// "currentFilename", "recordedFilePath" and "recordedFileURL" are all global variables
// This recording stored at "recordedFileURL" can be played back fine.
let sendToPath = "http://www.kvetinac97.cz/jt.php"
let sendToURL = NSURL(string: sendToPath)
let recording: NSData! = NSData(contentsOf: recordedFileURL as URL)
if recording == nil {
recordedFilePath = "FailedUpload"
}
let boundary = "--------14737809831466499882746641449----"
let contentType = "multipart/form-data;boundary=\(boundary)"
let beginningBoundary = "--\(boundary)"
let endingBoundary = "--\(boundary)--"
let header = "Content-Disposition: form-data; name=\"\(fileName)\"; filename=\"\(recordedFilePath)\"\r\n"
let body = NSMutableData()
body.append(("\(beginningBoundary)\r\n" as NSString).data(using: String.Encoding.utf8.rawValue)!)
body.append((header as NSString).data(using: String.Encoding.utf8.rawValue)!)
body.append(("Content-Type: application/octet-stream\r\n\r\n" as NSString).data(using: String.Encoding.utf8.rawValue)!)
body.append(recording! as Data) // adding the recording here
body.append(("\r\n\(endingBoundary)\r\n" as NSString).data(using: String.Encoding.utf8.rawValue)!)
let request = NSMutableURLRequest()
request.url = sendToURL! as URL
request.httpMethod = "POST"
request.addValue(contentType, forHTTPHeaderField: "Content-Type")
request.addValue("multipart/form-data", forHTTPHeaderField: "Accept")
request.httpBody = body as Data
let session = URLSession.shared
let task = session.dataTask(with: request as URLRequest, completionHandler: { (data, response, error) -> Void in
if let data = NSData(contentsOf: NSURL(string: "http://www.kvetinac97.cz/uploads/demo.m4a")! as URL) {
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
try AVAudioSession.sharedInstance().setActive(true)
self.soundPlayer = try AVAudioPlayer(data: data as Data, fileTypeHint: AVFileType.m4a.rawValue)
self.soundPlayer!.delegate = self
self.soundPlayer!.prepareToPlay()
self.soundPlayer!.volume = 1.0
self.soundPlayer!.play()
} catch let error1 as NSError {
errorX = error1
self.soundPlayer = nil
print ("Chyba nejaka \(error1)")
}
}
else {
print ("Smulicka")
}
})
task.resume()
}
func generateBoundaryString() -> String {
return "Boundary-\(NSUUID().uuidString)"
}
// MARK:- File URL
func getCacheDirectory() -> String {
let paths = NSSearchPathForDirectoriesInDomains(.cachesDirectory,.userDomainMask, true)
return paths[0]
}
func getFileURL() -> NSURL {
let path = getCacheDirectory().appending(fileName)
let filePath = NSURL(fileURLWithPath: path)
return filePath
}
// MARK:- AVAudioPlayer delegate methods
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
recordButton.isEnabled = true
playButton.setTitle("Play", for: .normal)
}
// MARK:- AVAudioRecorder delegate methods
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
playButton.isEnabled = true
recordButton.setTitle("Record", for: .normal)
}
// MARK:- didReceiveMemoryWarning
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
}
I figured out where my problem was
As it appears, iPhone simulator does not require AVAudioSession to be activated whilst real iPhone does.
So it can be fixed easily with adding
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryRecord)
try AVAudioSession.sharedInstance().setActive(true)
before audioRecorder initialization.