I'm having an issue while moving a view that contains a SwiftUI view near the edges of the screen. The SwiftUI view moves itself to avoid being blocked by the safe area insets
I'm using UIKit to handle dragging the view via UIHostingController and a UIPanGestureRecognizer. Here's the code
import UIKit
class ViewController: UIViewController {
var contentView: UIView!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
let contentVc = UIHostingController(rootView: Content())
addChild(contentVc)
contentView = contentVc.view
let contentHeight = contentView.sizeThatFits(view.bounds.size).height
contentView.frame = CGRect(x: 0, y: 200, width: view.bounds.width, height: contentHeight)
view.addSubview(contentView)
contentVc.didMove(toParent: self)
let drag = UIPanGestureRecognizer(target: self, action: #selector(drag(_:)))
contentView.addGestureRecognizer(drag)
view.backgroundColor = .orange
}
var startingPoint = CGPoint.zero
#objc func drag(_ gesture: UIPanGestureRecognizer) {
switch gesture.state {
case .began:
startingPoint = contentView.frame.origin
case .changed:
let location = gesture.translation(in: view)
contentView.frame.origin.y = startingPoint.y + location.y
default:
break
}
}
}
import SwiftUI
struct Content: View {
var body: some View {
VStack {
ForEach(0..<10) { i in
Text(String(i))
}
}
.background(Color.blue)
.padding(.vertical, 32)
.ignoresSafeArea()
}
}
What I'm expecting is the blue view to not to adjust itself vertically (in the preview the blue view's top padding is shrinking)
I found 2 ways for solving this issue:
Way1: Using UIKit Gesture!
class ViewController: UIViewController {
var contentView: UIView!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
let contentVc = UIHostingController(rootView: Content())
addChild(contentVc)
contentView = contentVc.view
let contentHeight = contentView.sizeThatFits(view.bounds.size).height
contentView.frame = CGRect(x: 0, y: 100, width: view.bounds.width, height: contentHeight)
contentView.backgroundColor = .clear
view.addSubview(contentView)
contentVc.didMove(toParent: self)
let drag = UIPanGestureRecognizer(target: self, action: #selector(drag(_:)))
contentView.addGestureRecognizer(drag)
view.backgroundColor = .orange
}
var startingPoint = CGPoint.zero
#objc func drag(_ gesture: UIPanGestureRecognizer) {
switch gesture.state {
case .began:
startingPoint = contentView.frame.origin
case .changed:
let location = gesture.translation(in: view)
contentView.frame.origin.y = startingPoint.y + location.y
default:
break
}
}
}
import SwiftUI
struct Content: View {
var body: some View {
VStack {
ForEach(0..<10) { i in
Text(String(i))
}
}
.padding()
.background(Color.blue)
.frame(maxWidth: .infinity)
.background(Color.white)
}
}
Way2: Using SwiftUI Gesture!
class ViewController: UIViewController {
var contentView: UIView!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
let contentVc = UIHostingController(rootView: Content())
addChild(contentVc)
contentView = contentVc.view
contentView.frame = CGRect(x: 0, y: 0, width: view.bounds.width, height: view.bounds.height)
contentView.backgroundColor = .clear
view.addSubview(contentView)
contentVc.didMove(toParent: self)
view.backgroundColor = .orange
}
}
struct Content: View {
#State private var offset: CGFloat = .zero
#State private var lastOffset: CGFloat = .zero
var body: some View {
VStack {
ForEach(0..<10) { i in
Text(String(i))
}
}
.padding()
.background(Color.blue)
.frame(maxWidth: .infinity)
.background(Color.white)
.offset(y: offset)
.gesture(DragGesture(minimumDistance: .zero, coordinateSpace: .local).onChanged { value in
offset = lastOffset + value.translation.height
}
.onEnded { value in
lastOffset = lastOffset + value.translation.height
offset = lastOffset
})
}
}
I have spent a lot of time researching this question and have tried a few approaches but none have worked for me. Any help would be greatly appreciated by this Swift noob.
To gain more MapKit experience, I am trying to show the user their path with a polyline on the map that follows their movements, similar to run apps that track the user. I can track the user just fine but I cannot show a live map with a Polyline that updates as they move. When closing the map and coming back to it the polyline is updated with the missing coordinates. ** I suspect the answer to my problem relates to this :) **
Image showing the map after it loads and the user moves away from the Polyline. Closing the map and reopening redraws the missing points of the polyline
LocationViewModel.swift
var locationArray: [CLLocation] = []
var coordArray: [CLLocationCoordinate2D] = []
(...)
var coordinates2D:[CLLocationCoordinate2D] {
var coordArray = [CLLocationCoordinate2D]()
for c in locationArray {
let lat = c.coordinate.latitude
let long = c.coordinate.longitude
let x = CLLocationCoordinate2D(latitude: lat, longitude: long)
coordArray.append(x)
}
return coordArray
}
(...)
func locationManager(_ manager: CLLocationManager, didUpdateLocations locations: [CLLocation]) {
for newLocation in locations {
let howRecent = newLocation.timestamp.timeIntervalSinceNow
guard newLocation.horizontalAccuracy < 20 && abs(howRecent) < 10 else { continue }
speeds.append(contentsOf: locationArray.map{$0.speed}) //append all new speed updates to the array
altitude.append(contentsOf: locationArray.map{$0.altitude}) //append all new speed updates to the array
locationArray.append(newLocation)
}
SecondMap.swift
import Foundation
import SwiftUI
import MapKit
struct SecondMap: UIViewRepresentable {
typealias UIViewType = MKMapView
#EnvironmentObject var model: LocationsModel
#State var regionZoom = LocationService.sharedLocInstance.locationManager.location?.coordinate
func makeUIView(context: Context) -> MKMapView {
let mapView = MKMapView()
mapView.showsUserLocation = true
mapView.userTrackingMode = .followWithHeading
return mapView
}
func updateUIView(_ uiView: MKMapView, context: Context) {
// uiView.removeAnnotation(uiView.annotations)
// uiView.showAnnotations(self.locations, animated: true)
if regionZoom != nil {
let span = MKCoordinateSpan(latitudeDelta: 0.005, longitudeDelta: 0.005)
let region = MKCoordinateRegion(center: regionZoom!, span: span)
uiView.setRegion(region, animated: true)
}
let polyline = MKPolyline(coordinates: model.coordinates2D, count: model.coordinates2D.count)
uiView.removeOverlay(polyline)
uiView.addOverlay(polyline)
uiView.delegate = context.coordinator
}
static func dismantleUIView(_ uiView: MKMapView, coordinator: ()) {
// uiView.removeAnnotation(uiView.annotations)
}
//MARK: - Create Coordinator Class
func makeCoordinator() -> Coordinator {
return Coordinator()
}
class Coordinator: NSObject, MKMapViewDelegate {
func mapView(_ mapView: MKMapView, viewFor annotation: MKAnnotation) -> MKAnnotationView? {
// If the annotation is the user dot then return nil
if annotation is MKUserLocation {
return nil
}
// Create an annotation view
let annotationView = MKMarkerAnnotationView(annotation: annotation, reuseIdentifier: "business")
annotationView.canShowCallout = false
annotationView.rightCalloutAccessoryView = UIButton(type: .detailDisclosure)
annotationView.isHidden = true
return annotationView
}
func mapView(_ mapView: MKMapView, rendererFor overlay: MKOverlay) -> MKOverlayRenderer {
if let routePolyline = overlay as? MKPolyline {
let renderer = MKPolylineRenderer(polyline: routePolyline)
renderer.strokeColor = UIColor.blue
renderer.lineWidth = 7
return renderer
}
return MKOverlayRenderer()
}
}
}
TrackerDetailCard
import SwiftUI
import MapKit
struct TrackerDetailView: View {
#EnvironmentObject var model: LocationsModel
#State var isMapShowing = false
// #State var regionZoom = LocationService.sharedLocInstance.locationManager.location?.coordinate
var body: some View {
NavigationView {
if !isMapShowing {
VStack(alignment: .leading) {
HStack{
Text("Map info and stuff")
Button {
self.isMapShowing = true
} label: {
Text("Launch Map")
.font(.system(size: 12))
.foregroundColor(.gray)
}
.buttonStyle(NEUMORPHISM_BUTTON())
} // HSTACK END
HStack {
Button {
LocationService.sharedLocInstance.locationManager.startUpdatingLocation()
} label: {
Text("Record runs")
.font(.system(size: 12))
.foregroundColor(.gray)
}
.buttonStyle(NEUMORPHISM_BUTTON_SQ())
Button {
LocationService.sharedLocInstance.locationManager.stopUpdatingLocation()
} label: {
Text("End recording")
.font(.system(size: 12))
.foregroundColor(.gray)
}
.buttonStyle(NEUMORPHISM_BUTTON_SQ())
Button {
print("button tapped")
} label: {
Text("Something")
.font(.system(size: 12))
.foregroundColor(.gray)
}
.buttonStyle(NEUMORPHISM_BUTTON_SQ())
}
VStack{
Text("Average Speed: \(model.avgSpeed)")
Text("Top Speed: \(model.topSpeed)")
// Text("Top Speed: \(model.altitude)")
}
Spacer()
} // VSTACK END
.navigationBarHidden(true)
}
else{
ZStack(alignment: .top){
// show map
SecondMap()
.ignoresSafeArea()
ZStack{
Rectangle()
.foregroundColor(.white)
.cornerRadius(5)
.frame(height: 48)
HStack{
Image(systemName: "location")
Spacer()
Button("Back to home") {
self.isMapShowing = false
}
}.padding()
}.padding()
}
}
} // BODY END
}
}
I have a SwiftUI app that includes a map. I want to capture an image of the map and display that image as a SwiftUI Image on another view. I have been unable to find any documentation on this. I tried two approaches at capturing and neither of them work. See the extensions below.
This is a simplified example:
ContentView:
struct ContentView: View {
#State private var showDetail: Bool = false
#State private var thumbImage: Image = Image(systemName: "gear")
var body: some View {
VStack {
Text("This is the ContentView")
if showDetail {
DetailMapView(thumbImage: $thumbImage)
}
if !showDetail {
Image(systemName: "gear")
.resizable()
.frame(width: 200, height: 200)
}
Button(action: {
self.showDetail.toggle()
}) {
Text("Tap for Map")
}
}
}
}
And the MapView:
struct DetailMapView: UIViewRepresentable {
typealias UIViewType = MKMapView
#Binding var thumbImage: Image
class Coordinator: NSObject, MKMapViewDelegate {
var parent: DetailMapView
init(_ parent: DetailMapView) {
self.parent = parent
}
}//coordinator
func makeCoordinator() -> Coordinator {
Coordinator(self)
}
func makeUIView(context: Context) -> MKMapView {
let mapView = MKMapView()
mapView.showsUserLocation = true
mapView.delegate = context.coordinator
// this does not work - it crashes
// let s = mapView.pb_takeSnapshot()
// self.thumbImage = Image(uiImage: s)
//this does not work either - it produces lots of console complaints
let t = mapView.screenshot
DispatchQueue.main.async {
self.thumbImage = Image(uiImage: t)
}
return mapView
}
}
extension UIView {
func pb_takeSnapshot() -> UIImage {
UIGraphicsBeginImageContextWithOptions(bounds.size, false, UIScreen.main.scale)
drawHierarchy(in: self.bounds, afterScreenUpdates: true)
let image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return image!
}
}
extension UIView {
var screenshot: UIImage{
UIGraphicsBeginImageContextWithOptions(self.bounds.size, false, 0)
guard let context = UIGraphicsGetCurrentContext() else { return UIImage() }
self.layer.render(in: context)
guard let screenShot = UIGraphicsGetImageFromCurrentImageContext() else { return UIImage() };
UIGraphicsEndImageContext()
return screenShot
}
}
Console output for the screenshot version:
[VKDefault] TextureAtlasPage: Atlas page destroyed with outstanding references.: Assertion with expression - _textureRefs == 0 : Failed in file - /AppleInternal/BuildRoot/Library/Caches/com.apple.xbs/Sources/VectorKit_Sim/VectorKit-1606.34.10.29.27/src/TextureAtlas.cpp line - 604
[VKDefault] TextureAtlasPage: Atlas page destroyed with outstanding references.: Assertion with expression - _textureRefs == 0 : Failed in file - /AppleInternal/BuildRoot/Library/Caches/com.apple.xbs/Sources/VectorKit_Sim/VectorKit-1606.34.10.29.27/src/TextureAtlas.cpp line - 604
Any guidance would be appreciated. Xcode 11.4 (11E146)
For others:
This works for me - forget the extensions above. The key is to use
mapViewDidFinishRenderingMap and that function needs to be inside the Coordinator class.
func mapViewDidFinishRenderingMap(_ mapView: MKMapView, fullyRendered: Bool) {
//setup whatever region you want to see :mapView.setRegion(region, animated: true)
let render = UIGraphicsImageRenderer(size: mapView.bounds.size)
let ratio = mapView.bounds.size.height / mapView.bounds.size.width
let img = render.image { (ctx) in
mapView.drawHierarchy(in: CGRect(x: 100, y: 100, width: 300, height: 300 * ratio), afterScreenUpdates: true)
}
DispatchQueue.main.async {
self.parent.thumbImage = Image(uiImage: img)
}
}
i would recommend to use MKMapSnapshotter from Apple because if you render the map manually yourself you always get the Apple symbol with it...which you "normally" do not want ;)
Right now my app display a customUserAnnotationView with a custom image where the user annotation is (you can see this in ViewController.swift). I have also created a custom UIView that I want to use as an annotation just above the user annotation (the code and image for it are under SpeechBubble.swift).
I want to combine these two objects so that I can show the CustomUserAnnotationView with the Custom UIView(SpeechBubble.swift) placed in an annotation above.
My attempts at making a frankenstein program from multiple mapbox tutorials have not worked out for me. I only want to place the custom annotation class I created above the image, and maybe add a small triangle to make it look like a speech bubble.
ViewController.swift
import Mapbox
class ViewController: UIViewController, MGLMapViewDelegate {
override func viewDidLoad() {
super.viewDidLoad()
let mapView = MGLMapView(frame: view.bounds)
mapView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
mapView.delegate = self
// Enable heading tracking mode so that the arrow will appear.
mapView.userTrackingMode = .followWithHeading
// Enable the permanent heading indicator, which will appear when the tracking mode is not `.followWithHeading`.
mapView.showsUserHeadingIndicator = true
view.addSubview(mapView)
let idea = UITextView(frame: CGRect(x: 0, y: 0, width: 100, height: 40))
idea.text = "Hello There"
idea.textAlignment = NSTextAlignment.center
let sb = SpeechBubble(coord: mapView.targetCoordinate, idea: idea)
mapView.addSubview(sb)
}
func mapView(_ mapView: MGLMapView, annotationCanShowCallout annotation: MGLAnnotation) -> Bool {
return true
}
func mapView(_ mapView: MGLMapView, viewFor annotation: MGLAnnotation) -> MGLAnnotationView? {
// Substitute our custom view for the user location annotation. This custom view is defined below.
if annotation is MGLUserLocation && mapView.userLocation != nil {
return Avatar()
}
return nil
}
// Optional: tap the user location annotation to toggle heading tracking mode.
func mapView(_ mapView: MGLMapView, didSelect annotation: MGLAnnotation) {
if mapView.userTrackingMode != .followWithHeading {
mapView.userTrackingMode = .followWithHeading
} else {
mapView.resetNorth()
}
// We're borrowing this method as a gesture recognizer, so reset selection state.
mapView.deselectAnnotation(annotation, animated: false)
}
}
SpeechBubble.swift
import UIKit
import Mapbox
class SpeechBubble: UIView, MGLMapViewDelegate{
//var sbView: UIView
init(coord: CLLocationCoordinate2D, idea: UITextView) {
let width = CGFloat(180)
let height = UITextField.layoutFittingExpandedSize.height + 32
super.init(frame: CGRect(x: CGFloat(coord.latitude), y: CGFloat(coord.longitude), width: width, height: height))
self.addSubview(idea)
self.addSubview(buttonsView());
self.addSubview(upvoteView());
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func upvoteView() -> UIView {
let uView = UIView()
let vCnt = UILabel(frame: CGRect(x: 0, y: 0, width: 200, height: 21))
vCnt.center = CGPoint(x: 10.5, y: 32)
vCnt.textAlignment = .center
vCnt.text = "0"
let uButton = UIButton(type: .custom)
uButton.frame = CGRect(x: vCnt.frame.size.width + 5, y: 0, width: 32, height: 32);
let uImage = UIImage (named: "Upvote")
uButton.setImage(uImage, for: .normal)
uView.frame.size.width = vCnt.frame.size.width + uButton.frame.size.width + 5
uView.frame.size.height = max(vCnt.frame.size.height, uButton.frame.size.height)
uView.frame = CGRect(
x: 0,
y: self.frame.size.height - uView.frame.size.height,
width: uView.frame.size.width,
height: uView.frame.size.height );
uView.addSubview(vCnt)
uView.addSubview(uButton)
return uView
}
func buttonsView() -> UIView {
let bView = UIView()
let jButton = UIButton(type: .custom)
rButton.frame = CGRect(x: 0, y: 0, width: 35, height: 32);
let rImage = UIImage (named: "Rocket")
rButton.setImage(rImage, for: .normal)
let pButton = UIButton(type: .custom)
pButton.frame = CGRect(x: jButton.frame.size.width + 5, y: 0, width: 31, height: 36);
let pImage = UIImage (named: "Profile")
pButton.setImage(pImage, for: .normal)
bView.frame.size.width = rButton.frame.size.width + pButton.frame.size.width + 5
bView.frame.size.height = max(rButton.frame.size.height, pButton.frame.size.height)
bView.frame = CGRect(
x: self.frame.size.width - bView.frame.size.width,
y: self.frame.size.height - bView.frame.size.height,
width: bView.frame.size.width,
height: bView.frame.size.height );
bView.addSubview(rButton)
bView.addSubview(pButton)
return bView
}
}
Avatar.swift
import Mapbox
class Avatar: MGLUserLocationAnnotationView {
let size: CGFloat = 48
var arrow: CALayer!
//var arrow: CAShapeLayer!
// -update is a method inherited from MGLUserLocationAnnotationView. It updates the appearance of the user location annotation when needed. This can be called many times a second, so be careful to keep it lightweight.
override func update() {
if frame.isNull {
frame = CGRect(x: 0, y: 0, width: size, height: size)
return setNeedsLayout()
}
// Check whether we have the user’s location yet.
if CLLocationCoordinate2DIsValid(userLocation!.coordinate) {
setupLayers()
updateHeading()
}
}
private func updateHeading() {
// Show the heading arrow, if the heading of the user is available.
if let heading = userLocation!.heading?.trueHeading {
arrow.isHidden = false
// Get the difference between the map’s current direction and the user’s heading, then convert it from degrees to radians.
let rotation: CGFloat = -MGLRadiansFromDegrees(mapView!.direction - heading)
// If the difference would be perceptible, rotate the arrow.
if abs(rotation) > 0.01 {
// Disable implicit animations of this rotation, which reduces lag between changes.
CATransaction.begin()
CATransaction.setDisableActions(true)
arrow.setAffineTransform(CGAffineTransform.identity.rotated(by: rotation))
CATransaction.commit()
}
} else {
arrow.isHidden = true
}
}
private func setupLayers() {
// This dot forms the base of the annotation.
if arrow == nil {
arrow = CALayer()
let myImage = UIImage(named: "will_smith")?.cgImage
arrow.bounds = CGRect(x: 0, y: 0, width: size, height: size)
arrow.contents = myImage
layer.addSublayer(arrow)
}
}
// Calculate the vector path for an arrow, for use in a shape layer.
private func arrowPath() -> CGPath {
let max: CGFloat = size / 2
let pad: CGFloat = 3
let top = CGPoint(x: max * 0.5, y: 0)
let left = CGPoint(x: 0 + pad, y: max - pad)
let right = CGPoint(x: max - pad, y: max - pad)
let center = CGPoint(x: max * 0.5, y: max * 0.6)
let bezierPath = UIBezierPath()
bezierPath.move(to: top)
bezierPath.addLine(to: left)
bezierPath.addLine(to: center)
bezierPath.addLine(to: right)
bezierPath.addLine(to: top)
bezierPath.close()
return bezierPath.cgPath
}
}
--------------------------------------------------------------------------------------------------------
UPDATE
I tried to create a Frankenstein program of the Answer and my code and am receiving the following the error Property 'self.representedObject' not initialized at super.init call within SpeechBubble.swift. I also move all my old code from speechBubble.swift into insideSpeechBubble.swift
Updated SpeechBubble.swift
import UIKit
import Mapbox
class SpeechBubble: UIView, MGLCalloutView {
// Your IBOutlets //
var representedObject: MGLAnnotation
var annotationPoint: CGPoint
// Required views but unused for this implementation.
lazy var leftAccessoryView = UIView()
lazy var rightAccessoryView = UIView()
var contentView: MGLMapView
weak var delegate: MGLCalloutViewDelegate?
// MARK: - init methods
required init(annotation: MGLAnnotation, frame: CGRect, annotationPoint: CGPoint) {
let idea = UITextView(frame: CGRect(x: 0, y: 0, width: 100, height: 40))
idea.text = "Hello There"
idea.textAlignment = NSTextAlignment.center
self.representedObject = annotation
self.annotationPoint = annotationPoint
contentView = InsideSpeechBubble(coord: annotationPoint, idea: idea )
super.init(frame: frame)
commonInit()
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
commonInit()
}
private func commonInit() {
Bundle.main.loadNibNamed("SpeechBubble", owner: self, options: nil)
addSubview(contentView as UIView)
contentView.frame = self.bounds
// Do your initialisation //
}
// MARK: - MGLCalloutView methods
func presentCallout(from rect: CGRect, in view: UIView, constrainedTo constrainedRect: CGRect, animated: Bool) {
// Present the custom callout slightly above the annotation's view. Initially invisble.
self.center = annotationPoint.applying(CGAffineTransform(translationX: 0, y: -self.frame.height - 20.0))
// I have logic here for setting the correct image and button states //
}
func dismissCallout(animated: Bool) {
removeFromSuperview()
}
}
Updated ViewController.swift
import Mapbox
class ViewController: UIViewController, MGLMapViewDelegate {
//let point = MGLPointAnnotation()
override func viewDidLoad() {
super.viewDidLoad()
let mapView = MGLMapView(frame: view.bounds)
mapView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
mapView.delegate = self
// Enable heading tracking mode so that the arrow will appear.
mapView.userTrackingMode = .followWithHeading
// Enable the permanent heading indicator, which will appear when the tracking mode is not `.followWithHeading`.
mapView.showsUserHeadingIndicator = true
view.addSubview(mapView)
let HighDea = UITextView(frame: CGRect(x: 0, y: 0, width: 100, height: 40))
HighDea.text = "Hello There"
HighDea.textAlignment = NSTextAlignment.center
//let sb = SpeechBubble()
//mapView.addSubview(sb)
}
func mapView(_ mapView: MGLMapView, annotationCanShowCallout annotation: MGLAnnotation) -> Bool {
return true
}
func mapView(_ mapView: MGLMapView, viewFor annotation: MGLAnnotation) -> MGLAnnotationView? {
// Substitute our custom view for the user location annotation. This custom view is defined below.
if annotation is MGLUserLocation && mapView.userLocation != nil {
return Avatar()
}
return nil
}
func mapView(_ mapView: MGLMapView, calloutViewFor annotation: MGLAnnotation) -> MGLCalloutView? {
// Do your annotation-specific preparation here //
// I get the correct size from my xib file.
let viewFrame = CGRect(origin: CGPoint(x: 0, y: 0), size: CGSize(width: 261.0, height: 168.0))
// Get the annotation's location in the view's coordinate system.
let annotationPoint = mapView.convert(annotation.coordinate, toPointTo: nil)
let customCalloutView = SpeechBubble(annotation: annotation, frame: viewFrame, annotationPoint: annotationPoint)
return customCalloutView
}
// func mapView(_ mapView: MGLMapView, viewFor annotation: MGLAnnotation) -> MGLAnnotationView? {
// This example is only concerned with point annotations.
// guard annotation is MGLPointAnnotation else {
// return nil
// }
// Use the point annotation’s longitude value (as a string) as the reuse identifier for its view.
// let reuseIdentifier = "\(annotation.coordinate.longitude)"
// For better performance, always try to reuse existing annotations.
// var annotationView = mapView.dequeueReusableAnnotationView(withIdentifier: reuseIdentifier)
// If there’s no reusable annotation view available, initialize a new one.
// if annotationView == nil {
// annotationView = CustomAnnotationView(reuseIdentifier: reuseIdentifier)
// annotationView!.bounds = CGRect(x: 0, y: 0, width: 40, height: 40)
// Set the annotation view’s background color to a value determined by its longitude.
// let hue = CGFloat(annotation.coordinate.longitude) / 100
// annotationView!.backgroundColor = UIColor(hue: hue, saturation: 0.5, brightness: 1, alpha: 1)
// }
// return annotationView
// }
// Optional: tap the user location annotation to toggle heading tracking mode.
func mapView(_ mapView: MGLMapView, didSelect annotation: MGLAnnotation) {
if mapView.userTrackingMode != .followWithHeading {
mapView.userTrackingMode = .followWithHeading
} else {
mapView.resetNorth()
}
// We're borrowing this method as a gesture recognizer, so reset selection state.
mapView.deselectAnnotation(annotation, animated: false)
}
}
When I implemented a custom callout for my Mapbox annotations I used a xib file to design the actual callout. I find that it gives me a lot more instant feedback than than trying to conjure the UI from code (but obviously do whatever your preference is).
Which gives me something like the following.
Using a UIImage for the background allows me to achieve any shape I choose. Here I use transparency around the white to give me the circular elements and the bottom triangle you mention in your question.
The Swift file for this UIView (your SpeechBubble) needs to conform to the MGLCalloutView protocol not MGLMapViewDelegate as you have it currently. Your ViewController is the MGLMapViewDelegate, not your custom callout. Pair the xib file and the Swift file in the usual way in Identity Inspector in IB. So would be something like this:
import UIKit
import Mapbox
class SpeechBubble: UIView, MGLCalloutView {
// Your IBOutlets //
#IBOutlet var contentView: UIView! // The custom callout's view.
var representedObject: MGLAnnotation
var annotationPoint: CGPoint
// Required views but unused for this implementation.
lazy var leftAccessoryView = UIView()
lazy var rightAccessoryView = UIView()
weak var delegate: MGLCalloutViewDelegate?
// MARK: - init methods
required init(annotation: YourAnnotation, frame: CGRect, annotationPoint: CGPoint) {
self.representedObject = annotation
self.annotationPoint = annotationPoint
super.init(frame: frame)
commonInit()
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
commonInit()
}
private func commonInit() {
Bundle.main.loadNibNamed("SpeechBubble", owner: self, options: nil)
addSubview(contentView)
contentView.frame = self.bounds
// Do your initialisation //
}
// MARK: - MGLCalloutView methods
func presentCallout(from rect: CGRect, in view: UIView, constrainedTo constrainedRect: CGRect, animated: Bool) {
// Present the custom callout slightly above the annotation's view. Initially invisble.
self.center = annotationPoint.applying(CGAffineTransform(translationX: 0, y: -self.frame.height - 20.0))
// I have logic here for setting the correct image and button states //
}
func dismissCallout(animated: Bool) {
removeFromSuperview()
}
Then you just seem to be missing the MGLMapViewDelegate method to actually return your SpeechBubble view when requested. It should be in your ViewController file.
func mapView(_ mapView: MGLMapView, calloutViewFor annotation: MGLAnnotation) -> MGLCalloutView? {
// Do your annotation-specific preparation here //
// I get the correct size from my xib file.
let viewFrame = CGRect(origin: CGPoint(x: 0, y: 0), size: CGSize(width: 261.0, height: 168.0))
// Get the annotation's location in the view's coordinate system.
let annotationPoint = mapView.convert(annotation.coordinate, toPointTo: nil)
let customCalloutView = SpeechBubble(annotation: YourAnnotation, frame: viewFrame, annotationPoint: annotationPoint)
return customCalloutView
}
Hopefully this will get you closer to achieving what you're trying to do. BTW this version of your question is miles ahead of the first one.
EDIT +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
It's going to be almost impossible to work this through without sight of your project so I have put together a bare bones implementation. It is based on the Mapbox example here: Mapbox Custom Callout which for some reason doesn't show how to actually supply the callout view. I've also extended it to allow for a custom annotation image. If you can get this working you should be able to move the relevant parts into your own project.
I strongly recommend that if you try to implement the stuff below that you do it in a fresh project.
The view controller.
import Mapbox
class ViewController: UIViewController, MGLMapViewDelegate {
override func viewDidLoad() {
super.viewDidLoad()
let mapView = MGLMapView(frame: view.bounds, styleURL: MGLStyle.lightStyleURL)
mapView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
mapView.tintColor = .darkGray
view.addSubview(mapView)
// Set the map view‘s delegate property.
mapView.delegate = self
// Initialize and add the marker annotation.
let coordinate = CLLocationCoordinate2D(latitude: 0, longitude: 0)
let marker = MyAnnotation(coordinate: coordinate, title: "Bingo", subtitle: "Bongo")
// Add marker to the map.
mapView.addAnnotation(marker)
}
func mapView(_ mapView: MGLMapView, annotationCanShowCallout annotation: MGLAnnotation) -> Bool {
return true
}
func mapView(_ mapView: MGLMapView, calloutViewFor annotation: MGLAnnotation) -> MGLCalloutView? {
// Instantiate and return our custom callout view.
let annotationPoint = mapView.convert(annotation.coordinate, toPointTo: nil)
let viewFrame = CGRect(origin: CGPoint(x: 0, y: 0), size: CGSize(width: 250.0, height: 178.0))
return CustomCalloutView(representedObject: annotation, frame: viewFrame, annotationPoint: annotationPoint)
}
func mapView(_ mapView: MGLMapView, viewFor annotation: MGLAnnotation) -> MGLAnnotationView? {
if let annotationView = mapView.dequeueReusableAnnotationView(withIdentifier: "myAnnotationView") {
return annotationView
} else {
let annotationView = MyAnnotationView(reuseIdentifier: "myAnnotationView", size: CGSize(width: 45, height: 45), annotation: annotation)
return annotationView
}
}
func mapView(_ mapView: MGLMapView, tapOnCalloutFor annotation: MGLAnnotation) {
// Optionally handle taps on the callout.
print("Tapped the callout for: \(annotation)")
// Hide the callout.
mapView.deselectAnnotation(annotation, animated: true)
}
}
CustomCalloutView.swift
import UIKit
import Mapbox
class CustomCalloutView: UIView, MGLCalloutView {
#IBOutlet var contentView: UIView!
weak var delegate: MGLCalloutViewDelegate?
var representedObject: MGLAnnotation
var annotationPoint: CGPoint
// Required views but unused for this implementation.
lazy var leftAccessoryView = UIView()
lazy var rightAccessoryView = UIView()
required init(representedObject: MGLAnnotation, frame: CGRect, annotationPoint: CGPoint) {
self.representedObject = representedObject
self.annotationPoint = annotationPoint
super.init(frame: frame)
commonInit()
}
required init?(coder aDecoder: NSCoder) {
let coordinate = CLLocationCoordinate2D(latitude: 0.0, longitude: 0.0)
self.representedObject = MyAnnotation(coordinate: coordinate, title: "", subtitle: "")
self.annotationPoint = CGPoint(x: 50.0, y: 50.0)
super.init(coder: aDecoder)
commonInit()
}
func commonInit() {
Bundle.main.loadNibNamed("CustomCalloutView", owner: self, options: nil)
addSubview(contentView)
}
func presentCallout(from rect: CGRect, in view: UIView, constrainedTo constrainedRect: CGRect, animated: Bool) {
// Present the custom callout slightly above the annotation's view. Initially invisble.
self.center = annotationPoint.applying(CGAffineTransform(translationX: 0.0, y: -120.0))
view.addSubview(self)
}
func dismissCallout(animated: Bool) {
removeFromSuperview()
}
}
This is associated/identified with a xib file. It just contains a simple image shape for now. I had to (re)introduce the contentView IBOutlet as I was having trouble loading things from the Bundle and adding it to self in commonInit() made everything happy.
The custom annotation class.
import UIKit
import Mapbox
// MGLAnnotation protocol reimplementation
class MyAnnotation: NSObject, MGLAnnotation {
// As a reimplementation of the MGLAnnotation protocol, we have to add mutable coordinate and (sub)title properties ourselves.
var coordinate: CLLocationCoordinate2D
var title: String?
var subtitle: String?
// Custom properties that we will use to customize the annotation.
var image: UIImage?
var reuseIdentifier: String?
init(coordinate: CLLocationCoordinate2D, title: String?, subtitle: String?) {
self.coordinate = coordinate
self.title = title
self.subtitle = subtitle
self.reuseIdentifier = "myAnnotation"
}
}
The MGLAnnotationView subclass.
import UIKit
import Mapbox
class MyAnnotationView: MGLAnnotationView {
init(reuseIdentifier: String, size: CGSize, annotation: MGLAnnotation) {
super.init(reuseIdentifier: reuseIdentifier)
// This property prevents the annotation from changing size when the map is tilted.
scalesWithViewingDistance = false
// Begin setting up the view.
frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)
let imageView = UIImageView(frame: frame)
var image = UIImage()
if annotation is MyAnnotation {
image = UIImage(named: "frog")!
}
imageView.image = image
addSubview(imageView)
}
override init(frame: CGRect) {
super.init(frame: frame)
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
Naturally there is a lot of hard coded numbers and the requirement for an image called frog but you can change all of that and improve it as you wish. The CustomCalloutView.swift and CustomCalloutView.xib need to be linked in the usual way in the identity inspector, etc.
I am having an issue that GMSMarker changes camera focus on any kind of popup alert or whenever I tap on marker and app navigates to google maps application. Following is my implementation. I add google maps container to my viewcontroller header in layoutsubviews method. No idea what's going on. Kindly help.
override func viewDidLayoutSubviews()
{
super.viewDidLayoutSubviews()
if mapView == nil
{
let camera = GMSCameraPosition.camera(withLatitude: 45.582045, longitude:74.32937, zoom: 14.0)
mapView = GMSMapView.map(withFrame: CGRect(x: 0, y: 0, width: self.mapContainerView.bounds.size.width, height: self.mapContainerView.bounds.size.height), camera: camera)
mapView.delegate = self
do {
// Set the map style by passing the URL of the local file.
if let styleURL = Bundle.main.url(forResource: "style", withExtension: "json") {
mapView.mapStyle = try GMSMapStyle(contentsOfFileURL: styleURL)
} else {
NSLog("Unable to find style.json")
}
} catch {
NSLog("One or more of the map styles failed to load. \(error)")
}
self.mapContainerView.addSubview(mapView)
mapView.settings.setAllGesturesEnabled(false)
let marker = AppointmentMapDataManager(mapView: mapView).setAppointmentMarker()
// let location = GMSCameraPosition.camera(withLatitude: marker.position.latitude,
// longitude: marker.position.longitude,
// zoom: 14)
// mapView.camera = location
var bounds = GMSCoordinateBounds()
bounds = bounds.includingCoordinate((marker as AnyObject).position)
let update = GMSCameraUpdate.fit(bounds, with: UIEdgeInsets(top: self.mapContainerView.frame.height/2 - 33, left: self.mapContainerView.frame.width/2 - 81, bottom: 0, right: 0))
mapView.moveCamera(update)
}
}
Instead of move your camera in the viewDidLayoutSubView which is a wrong approach use didTap method of the GMSMapViewDelegate or if you want to do it automatically use a execute after delay
//method for center camera based in your own code
func centerInMarker(marker: GMSMarker) {
var bounds = GMSCoordinateBounds()
bounds = bounds.includingCoordinate((marker as AnyObject).position)
let update = GMSCameraUpdate.fit(bounds, with: UIEdgeInsets(top: (self.mapView?.frame.height)!/2 - 33, left: (self.mapView?.frame.width)!/2 - 81, bottom: 0, right: 0))
mapView?.moveCamera(update)
}
You can use it in the delegate method
func mapView(_ mapView: GMSMapView, didTap marker: GMSMarker) -> Bool {
self.centerInMarker(marker: marker)
return true
}
Or simply when you add your marker, with delay
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) {
self.centerInMarker(marker: marker)
}
You should add in viewDidLoad method, and update frame in viewDidLayoutSubviews.