I am placing overlays with MapOverlayRenderer. I first use a UIImage in a view over the map and can rotate it in its view and then convert the points (define boundingMapRect, coordinate, etc.) and then place it (Renderer/addOverlay) onto the map with the correct inserted image (CGContextDrawImage) and correct rotation. Each time that I create a new overlay [UIImage in view, rotate, convert points, addOverlay) it works just fine. But, on each successive overlay added to the Map, the code also rotates all of the other overlays already on the map. It seems as though my rotation code (below) is being applied to all of the overlays.
Should I be making an array which includes boundingMapRect and coordinates for each overlay - if so, should these be in viewcontroller?
Should I be naming each overlay [overlay.title? - how do I set it and call it?]
Appreciate some guidance - thanks.
MapOverlayView.h
#interface MapOverlayView ()
#property (nonatomic, strong) UIImage *overlayImage;
#end
#implementation MapOverlayView
- (instancetype)initWithOverlay:(id<MKOverlay>)overlay overlayImage:
(UIImage *)overlayImage {
self = [super initWithOverlay:overlay];
if (self) {
_overlayImage = overlayImage;
}
return self;
}
- (void)drawMapRect:(MKMapRect)mapRect zoomScale:
(MKZoomScale)zoomScale inContext:(CGContextRef)context {
CGImageRef imageReference = self.overlayImage.CGImage;
MKMapRect theMapRect = self.overlay.boundingMapRect;
CGRect theRect = [self rectForMapRect:theMapRect];
IonQuestSingleton *tmp = [IonQuestSingleton sharedSingleton];
float new_angle = tmp.image_angle;
NSLog(#"%s%f","New angle is ",new_angle);
{
if (new_angle<90) {
CGContextRotateCTM(context,new_angle*M_PI/180);
CGContextScaleCTM(context, 1.0, -1.0);
CGContextTranslateCTM(context, 0.0, -theRect.size.height);
CGContextDrawImage(context, theRect, imageReference);
NSLog(#"%s%f","New angle 90 is ",new_angle);//tick
}
else if (new_angle >90 && new_angle <=180)
{
CGContextRotateCTM(context,new_angle*M_PI/180);
CGContextScaleCTM(context, 1.0, -1.0);
CGContextTranslateCTM(context, 0.0, 0.0);
CGContextDrawImage(context, theRect, imageReference);
NSLog(#"%s%f","New angle 180 is ",new_angle);//tick
}
else if (new_angle >180 && new_angle <=270)
{
CGContextRotateCTM(context,new_angle*M_PI/180);
CGContextScaleCTM(context, 1.0, -1.0);
CGContextTranslateCTM(context, -theRect.size.width, 0.0);
CGContextDrawImage(context, theRect, imageReference);
NSLog(#"%s%f","New angle 270 is ",new_angle);//tick
}
else if (new_angle >270) {
CGContextRotateCTM(context,new_angle*M_PI/180);
CGContextScaleCTM(context, 1.0, -1.0);
CGContextTranslateCTM(context, -theRect.size.width,
-theRect.size.height);
CGContextDrawImage(context, theRect, imageReference);//tick
NSLog(#"%s%f","New angle 360 is ",new_angle);
}
}
}
#end
MapOverlay.h
#import "MapOverlay.h"
#import "IonQuestSingleton.h"
#implementation MapOverlay
#define debug 1
#synthesize boundingMapRect;
#synthesize coordinate;
-(CLLocationCoordinate2D)coordinate {
//Image center point
IonQuestSingleton *tmp = [IonQuestSingleton sharedSingleton];
return CLLocationCoordinate2DMake(tmp.image_ctr_lat,
tmp.image_ctr_long);
}
- (instancetype)init
{
self = [super init];
if (self) {
IonQuestSingleton *tmp = [IonQuestSingleton sharedSingleton];
MKMapPoint upperLeft =
MKMapPointForCoordinate(CLLocationCoordinate2DMake(tmp.image_tl_lat,
tmp.image_tl_long));
MKMapPoint boundsLeft = ```MKMapPointForCoordinate(CLLocationCoordinate2DMake(tmp.image_tl_bounds_lat, tmp.image_tl_bounds_long));
boundingMapRect = MKMapRectMake(upperLeft.x, upperLeft.y,
fabs(upperLeft.x - boundsLeft.x), fabs(upperLeft.x - boundsLeft.x));
}
return self;
}
#end
Viewcontroller.h
-(void) loadOverlay
{
MapOverlay *overlay = [[MapOverlay alloc] init];
[self.mapView addOverlay:overlay level:MKOverlayLevelAboveLabels];
}
- (MKOverlayRenderer *)mapView:(MKMapView *)mapView rendererForOverlay:
(id<MKOverlay>)overlay {
NSString *mapNameFromMVCPos = [NSString
stringWithFormat:#"%#%s",self.mapNameFromMVC,"Pos"];//not working
{
if ([overlay isKindOfClass:[MapOverlay class]]) {
UIImage *magicMountainImage = [UIImage
imageNamed:mapNameFromMVCPos];
MKOverlayRenderer *overlayView = [[MapOverlayView alloc]
initWithOverlay:overlay overlayImage:magicMountainImage];
return overlayView;
} etc [evaluate other overlaytypes]
Related
I am building a scratch card experience where I have a background color and a transparent png on the top.
Beneath this, I have the actual image with the content of the scratch card.
I want to combine the background color and the transparent image as one uiimage so that when i scratch this, I am able to see the below content.
I have tried putting a background color to the actual image but when I scratch it, I cannot see the content. Instead the background color starts clearing the transparent image.
I have written the following code for clearing the area of the image after i touch it:
- (UIImage *)addTouches:(NSSet *)touches {
CGSize size = CGSizeMake(self.image.size.width * self.image.scale, self.image.size.height * self.image.scale);
CGContextRef ctx = _imageContext;
CGContextSetFillColorWithColor(ctx,[UIColor clearColor].CGColor);
CGContextSetStrokeColorWithColor(ctx,[UIColor colorWithRed:0 green:0 blue:0 alpha:0].CGColor);
int tempFilled = _tilesFilled;
// process touches
for (UITouch *touch in touches) {
CGContextBeginPath(ctx);
CGPoint touchPoint = [touch locationInView:self];
touchPoint = fromUItoQuartz(touchPoint, self.bounds.size);
touchPoint = scalePoint(touchPoint, self.bounds.size, size);
if(UITouchPhaseBegan == touch.phase){
[self.touchPoints removeAllObjects];
[self.touchPoints addObject:[NSValue valueWithCGPoint:touchPoint]];
[self.touchPoints addObject:[NSValue valueWithCGPoint:touchPoint]];
// on begin, we just draw ellipse
CGRect rect = CGRectMake(touchPoint.x - _radius, touchPoint.y - _radius, _radius*2, _radius*2);
CGContextAddEllipseInRect(ctx, rect);
CGContextFillPath(ctx);
static const FillTileWithPointFunc fillTileFunc = (FillTileWithPointFunc) [self methodForSelector:#selector(fillTileWithPoint:)];
(*fillTileFunc)(self,#selector(fillTileWithPoint:),rect.origin);
} else if (UITouchPhaseMoved == touch.phase) {
[self.touchPoints addObject:[NSValue valueWithCGPoint:touchPoint]];
// then touch moved, we draw superior-width line
CGContextSetStrokeColor(ctx, CGColorGetComponents([UIColor clearColor].CGColor));
CGContextSetLineCap(ctx, kCGLineCapRound);
CGContextSetLineWidth(ctx, 2 * _radius);
// CGContextMoveToPoint(ctx, prevPoint.x, prevPoint.y);
// CGContextAddLineToPoint(ctx, rect.origin.x, rect.origin.y);
while(self.touchPoints.count > 3){
CGPoint bezier[4];
bezier[0] = ((NSValue*)self.touchPoints[1]).CGPointValue;
bezier[3] = ((NSValue*)self.touchPoints[2]).CGPointValue;
CGFloat k = 0.3;
CGFloat len = sqrt(pow(bezier[3].x - bezier[0].x, 2) + pow(bezier[3].y - bezier[0].y, 2));
bezier[1] = ((NSValue*)self.touchPoints[0]).CGPointValue;
bezier[1] = [self normalizeVector:CGPointMake(bezier[0].x - bezier[1].x - (bezier[0].x - bezier[3].x), bezier[0].y - bezier[1].y - (bezier[0].y - bezier[3].y) )];
bezier[1].x *= len * k;
bezier[1].y *= len * k;
bezier[1].x += bezier[0].x;
bezier[1].y += bezier[0].y;
bezier[2] = ((NSValue*)self.touchPoints[3]).CGPointValue;
bezier[2] = [self normalizeVector:CGPointMake( (bezier[3].x - bezier[2].x) - (bezier[3].x - bezier[0].x), (bezier[3].y - bezier[2].y) - (bezier[3].y - bezier[0].y) )];
bezier[2].x *= len * k;
bezier[2].y *= len * k;
bezier[2].x += bezier[3].x;
bezier[2].y += bezier[3].y;
CGContextMoveToPoint(ctx, bezier[0].x, bezier[0].y);
CGContextAddCurveToPoint(ctx, bezier[1].x, bezier[1].y, bezier[2].x, bezier[2].y, bezier[3].x, bezier[3].y);
[self.touchPoints removeObjectAtIndex:0];
}
CGContextStrokePath(ctx);
CGPoint prevPoint = [touch previousLocationInView:self];
prevPoint = fromUItoQuartz(prevPoint, self.bounds.size);
prevPoint = scalePoint(prevPoint, self.bounds.size, size);
static const FillTileWithTwoPointsFunc fillTileFunc = (FillTileWithTwoPointsFunc) [self methodForSelector:#selector(fillTileWithTwoPoints:end:)];
(*fillTileFunc)(self,#selector(fillTileWithTwoPoints:end:),touchPoint, prevPoint);
}
}
// was _tilesFilled changed?
if(tempFilled != _tilesFilled) {
[_delegate mdScratchImageView:self didChangeMaskingProgress:self.maskingProgress];
}
CGImageRef cgImage = CGBitmapContextCreateImage(ctx);
UIImage *image = [UIImage imageWithCGImage:cgImage];
CGImageRelease(cgImage);
return image;
}
/*
* filling tile with one ellipse
*/
-(void)fillTileWithPoint:(CGPoint) point{
size_t x,y;
point.x = MAX( MIN(point.x, self.image.size.width - 1) , 0);
point.y = MAX( MIN(point.y, self.image.size.height - 1), 0);
x = point.x * self.maskedMatrix.max.x / self.image.size.width;
y = point.y * self.maskedMatrix.max.y / self.image.size.height;
char value = [self.maskedMatrix valueForCoordinates:x y:y];
if (!value){
[self.maskedMatrix setValue:1 forCoordinates:x y:y];
_tilesFilled++;
}
}
/*
* filling tile with line
*/
-(void)fillTileWithTwoPoints:(CGPoint)begin end:(CGPoint)end{
CGFloat incrementerForx,incrementerFory;
static const FillTileWithPointFunc fillTileFunc = (FillTileWithPointFunc) [self methodForSelector:#selector(fillTileWithPoint:)];
/* incrementers - about size of a tile */
incrementerForx = (begin.x < end.x ? 1 : -1) * self.image.size.width / _tilesX;
incrementerFory = (begin.y < end.y ? 1 : -1) * self.image.size.height / _tilesY;
// iterate on points between begin and end
CGPoint i = begin;
while(i.x <= MAX(begin.x, end.x) && i.y <= MAX(begin.y, end.y) && i.x >= MIN(begin.x, end.x) && i.y >= MIN(begin.y, end.y)){
(*fillTileFunc)(self,#selector(fillTileWithPoint:),i);
i.x += incrementerForx;
i.y += incrementerFory;
}
(*fillTileFunc)(self,#selector(fillTileWithPoint:),end);
}
What you probably want to do is use a Layer Mask.
When masking a layer, from Apple's docs:
The layer’s alpha channel determines how much of the layer’s content and background shows through. Fully or partially opaque pixels allow the underlying content to show through, but fully transparent pixels block that content.
So, you'd want to use a path to mask your dark circle.
However, to get the "scratch off" effect, you would need to draw the path with a Clear stroke... which you cannot accomplish with a CAShapeLayer.
So, we'll use a custom CALayer subclass.
MyShapeLayer.h
//
// MyShapeLayer.h
//
#import <QuartzCore/QuartzCore.h>
#interface MyShapeLayer : CALayer
#property(nonatomic) CGPathRef path;
#end
MyShapeLayer.m
//
// MyShapeLayer.m
//
#import <UIKit/UIKit.h>
#import "MyShapeLayer.h"
#implementation MyShapeLayer
- (void)drawInContext:(CGContextRef)inContext {
// fill entire layer with solid color
CGContextSetGrayFillColor(inContext, 0.0, 1.0);
CGContextFillRect(inContext, self.bounds);
// we want to "clear" the stroke
CGContextSetStrokeColorWithColor(inContext, [UIColor clearColor].CGColor);
// any color will work, as the mask uses the alpha value
CGContextSetFillColorWithColor(inContext, [UIColor whiteColor].CGColor);
// adjust drawing-line-width as desired
CGContextSetLineWidth(inContext, 60.0);
CGContextSetLineCap(inContext, kCGLineCapRound);
CGContextSetLineJoin(inContext, kCGLineJoinRound);
CGContextAddPath(inContext, self.path);
CGContextSetBlendMode(inContext, kCGBlendModeSourceIn);
CGContextDrawPath(inContext, kCGPathFillStroke);
}
#end
Now we can create a UIView subclass to draw a filled-circle path on a CAShapeLayer and mask it with our MyShapeLayer.
ScratchOffView.h
//
// ScratchOffView.h
//
#import <UIKit/UIKit.h>
#interface ScratchOffView : UIView
#property (assign, readwrite) CGFloat expandedBounds;
#end
ScratchOffView.m
//
// ScratchOffView.m
//
#import "ScratchOffView.h"
#import "MyShapeLayer.h"
#interface ScratchOffView()
#property (strong, nonatomic) UIBezierPath *maskPath;
#property (strong, nonatomic) MyShapeLayer *maskLayer;
#property (strong, nonatomic) CAShapeLayer *scratchOffShapeLayer;
#property (strong, nonatomic) CALayer *scratchOffLayer;
#end
#implementation ScratchOffView
- (instancetype)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
[self commonInit];
}
return self;
}
- (instancetype)initWithCoder:(NSCoder *)coder
{
self = [super initWithCoder:coder];
if (self) {
[self commonInit];
}
return self;
}
- (void)commonInit {
_maskPath = [UIBezierPath new];
_maskLayer = [MyShapeLayer new];
_scratchOffLayer = [CALayer new];
_scratchOffShapeLayer = [CAShapeLayer new];
// Important, otherwise you will get a black rectangle
_maskLayer.opaque = NO;
// add the layer holding the shape to "Scratch Off"
[self.layer addSublayer:_scratchOffShapeLayer];
UIColor *c = [UIColor colorWithRed:50.0 / 255.0 green:150.0 / 255.0 blue:140.0 / 255.0 alpha:1.0];
[_scratchOffShapeLayer setFillColor:c.CGColor];
// set the mask layer
[_scratchOffShapeLayer setMask:_maskLayer];
// default 0.0 == no expanded bounds for touch
_expandedBounds = 0.0;
}
- (void)layoutSubviews {
[super layoutSubviews];
[_maskLayer setFrame:[self bounds]];
[_scratchOffShapeLayer setFrame:[self bounds]];
UIBezierPath *b = [UIBezierPath bezierPathWithOvalInRect:[self bounds]];
[_scratchOffShapeLayer setPath:b.CGPath];
// triggers drawInContext
[_maskLayer setNeedsDisplay];
}
- (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event {
UITouch *touch = [touches anyObject];
CGPoint currentPoint = [touch locationInView:self];
[_maskPath moveToPoint:currentPoint];
}
- (void)touchesMoved:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event {
UITouch *touch = [touches anyObject];
CGPoint currentPoint = [touch locationInView:self];
// add line to our maskPath
[_maskPath addLineToPoint:currentPoint];
// update the mask layer path
[_maskLayer setPath:_maskPath.CGPath];
// triggers drawInContext
[_maskLayer setNeedsDisplay];
}
- (BOOL)pointInside:(CGPoint)point withEvent:(UIEvent *)event {
// accept touch if within expanded bounds
// setting _expandedBounds to a Positive number allows the
// touches to start outside the frame
CGRect r = CGRectInset([self bounds], -_expandedBounds, -_expandedBounds);
return CGRectContainsPoint(r, point);
}
#end
Note that we've added a property: expandedBounds. Since the touches will only register if they begin on this view, we can (virtually) expand the bounds of the view so the user can touch and "drag into the circle."
Here is a complete example implementation. To try and match your question, I use this image (420 x 460 pixels) as the "background" image:
and this image (284 x 284 pixels) as the "image to reveal under the scratch-off circle" (the transparent area is the size we want the circle to be):
ScratchOffTestViewController.h
//
// ScratchOffTestViewController.h
//
#import <UIKit/UIKit.h>
#interface ScratchOffTestViewController : UIViewController
#end
ScratchOffTestViewController.m
//
// ScratchOffTestViewController.m
//
#import "ScratchOffTestViewController.h"
#import "ScratchOffView.h"
#interface ScratchOffTestViewController ()
#property (strong, nonatomic) ScratchOffView *scratchOffView;
#end
#implementation ScratchOffTestViewController
- (void)viewDidLoad {
[super viewDidLoad];
self.view.backgroundColor = [UIColor whiteColor];
// create the Scratch Off View
_scratchOffView = [ScratchOffView new];
// load background and giftBox image
UIImage *bkgImage = [UIImage imageNamed:#"backgroundImage"];
UIImage *giftBoxImage = [UIImage imageNamed:#"giftBox"];
if (!bkgImage || !giftBoxImage) {
NSLog(#"Could not load images!!!");
return;
}
UIImageView *bkgImageView = [UIImageView new];
UIImageView *giftImageView = [UIImageView new];
bkgImageView.image = bkgImage;
giftImageView.image = giftBoxImage;
bkgImageView.translatesAutoresizingMaskIntoConstraints = NO;
giftImageView.translatesAutoresizingMaskIntoConstraints = NO;
_scratchOffView.translatesAutoresizingMaskIntoConstraints = NO;
[self.view addSubview:bkgImageView];
[self.view addSubview:giftImageView];
[self.view addSubview:_scratchOffView];
UILayoutGuide *g = [self.view safeAreaLayoutGuide];
[NSLayoutConstraint activateConstraints:#[
// constrain background image view to background image size
[bkgImageView.widthAnchor constraintEqualToConstant:bkgImage.size.width],
[bkgImageView.heightAnchor constraintEqualToConstant:bkgImage.size.height],
// centered
[bkgImageView.centerXAnchor constraintEqualToAnchor:g.centerXAnchor],
[bkgImageView.centerYAnchor constraintEqualToAnchor:g.centerYAnchor],
// constrain giftBox image view to giftBox image size
[giftImageView.widthAnchor constraintEqualToConstant:giftBoxImage.size.width],
[giftImageView.heightAnchor constraintEqualToConstant:giftBoxImage.size.height],
// centered horizontally, and a little above vertically
[giftImageView.centerXAnchor constraintEqualToAnchor:bkgImageView.centerXAnchor],
[giftImageView.centerYAnchor constraintEqualToAnchor:bkgImageView.centerYAnchor],
// constrain Scratch Off View to giftImageView
[_scratchOffView.widthAnchor constraintEqualToAnchor:giftImageView.widthAnchor],
[_scratchOffView.heightAnchor constraintEqualToAnchor:giftImageView.widthAnchor],
[_scratchOffView.centerXAnchor constraintEqualToAnchor:giftImageView.centerXAnchor],
[_scratchOffView.centerYAnchor constraintEqualToAnchor:giftImageView.centerYAnchor],
]];
// expand the touch bounds of the Scratch Off View by 80-pts
_scratchOffView.expandedBounds = 80.0;
return;
}
#end
On start, we see this:
and after touch-drag a bit on the circle, we see this:
If we continue dragging our touch around, the dark-green circle will eventually be completely gone -- we will have "scratched it off."
I want to draw a country's flag on top of country boundary. I am using the coordinate data from here
The way I am adding overlay is
[mapView addOverlays:countryName];
Then rendering it as
- (MKOverlayRenderer *)mapView:(MKMapView *)mapView rendererForOverlay:(id<MKOverlay>)overlay
{
if ([overlay isKindOfClass:[MKPolygon class]])
{
MKPolygonRenderer *renderer = [[MKPolygonRenderer alloc] initWithPolygon:overlay];
renderer.fillColor = [ColorUtil colorFromHexString:#"#EE5A43" withAlpha:0.6];
renderer.strokeColor = [[UIColor whiteColor] colorWithAlphaComponent:1.0];
renderer.lineWidth = 2;
return renderer;
}
return nil;
}
Now my idea was to draw the overlay with the flag. So I subclasses MKPolygonRenderer and added
#implementation MyMKOverlayRenderer
- (instancetype)initWithOverlay:(id<MKOverlay>)overlay overlayImage:(UIImage *)overlayImage {
self = [super initWithOverlay:overlay];
if (self) {
_overlayImage = overlayImage;
}
return self;
}
- (void)drawMapRect:(MKMapRect)mapRect zoomScale:(MKZoomScale)zoomScale inContext:(CGContextRef)context {
CGImageRef imageReference = self.overlayImage.CGImage;
MKMapRect theMapRect = self.overlay.boundingMapRect;
CGRect theRect = [self rectForMapRect:theMapRect];
CGContextScaleCTM(context, 1.0, -1.0);
CGContextTranslateCTM(context, 0.0, -theRect.size.height);
CGContextDrawImage(context, theRect, imageReference);
}
#end
Now check the two attached images. I was expecting the flag to be inside the boundary of the polygon. But looks like it is not happening. So may be my approach is wrong. Can some expert help me in correct direction.
I solved this problem. The key was to create a UIBezierPath and clip the overlay image.
I have posted the entire solution here
Now it looks like
When I am adding a imageOverlay with high resolution image and map zooms image in the map gone and got memory issues.some times my app crashes. How can this be solved?
Here is my code
-(void)addOverlay
{
MapOverlay *overlay = [[MapOverlay alloc] initWithRouteOverlay:self.routeOverlayObj];
[self.mapView addOverlay:overlay];
}
- (MKOverlayRenderer *)mapView:(MKMapView *)mapView rendererForOverlay:(id<MKOverlay>)overlay
{
if([overlay isKindOfClass:[MapOverlay class]])
{
mapOverlay = (MapOverlay *)overlay;
NSLog(#"%# %#",routeInfoObj.routeOverlay,[UIImage imageNamed:routeInfoObj.routeOverlay]);
ATTMapOverLayRender *mapOverlayRender = [[ATTMapOverLayRender alloc] initWithOverlay:mapOverlay overlayImage:[UIImage imageNamed:routeInfoObj.routeOverlay] andImageFile:[NSString stringWithFormat:#"%#.png",routeInfoObj.routeOverlay]];
return mapOverlayRender;
}
else
{
MKPolylineRenderer *routeRenderer = [[MKPolylineRenderer alloc] initWithPolyline:routePolyline];
routeRenderer.strokeColor = [UIColor redColor];
routeRenderer.fillColor = [UIColor redColor];
routeRenderer.lineWidth = 5;
return routeRenderer;
}
return nil;
}
and in "ATTMApOVerlayRender.h"
#interface ATTMapOverLayRender : MKOverlayRenderer
- (instancetype)initWithOverlay:(id<MKOverlay>)overlay overlayImage:(UIImage *)overlayImage andImageFile:(NSString *)file;
#property (nonatomic, weak) UIImage *overlayImage;
#end
"ATTMApOVerlayRender.h"
#import "ATTMapOverLayRender.h"
#implementation ATTMapOverLayRender
- (instancetype)initWithOverlay:(id<MKOverlay>)overlay overlayImage:(UIImage *)overlayImage andImageFile:(NSString *)file {
self = [super initWithOverlay:overlay];
if (self) {
self.overlayImage = overlayImage;
}
return self;
}
- (void)drawMapRect:(MKMapRect)mapRect zoomScale:(MKZoomScale)zoomScale inContext:(CGContextRef)context {
CGImageRef image = _overlayImage.CGImage;
MKMapRect overlayMapRect = [self.overlay boundingMapRect];
CGRect overlayRect = [self rectForMapRect:overlayMapRect];
// draw image
CGContextScaleCTM(context, 1.0, -1.0);
CGContextTranslateCTM(context, 0.0, -overlayRect.size.height);
CGContextDrawImage(context, overlayRect, image);
}
#end
I'm running into similar problems on a tile overlay project as well.
I was able to significantly improve the memory usage by maintaining a single MKMapOverlayRenderer.
Rather than allocating memory for a new MKOverlayRenderer (or MKPolylineRenderer) every time the mapView: renderForOverlay: method is called, create a class variable and rewrite to it.
So in your ViewController.h or whatever class contains your mapView that calls -(MKOverlayRenderer*)mapView:(MKMapView*)mapView rendererForOverlay:(id<MKOverlay>)overlay have,
#property (nonatomic, strong) MKOverlayRenderer *renderer;
And in the declaration of -(MKOverlayRenderer*)mapView:(MKMapView*)mapView rendererForOverlay:(id<MKOverlay>)overlay
use
self.renderer = [[MKOverlayRenderer alloc] init...
I try to convert LatLonBox coordinate to CLLocationCoordinate2D. But I think somewhere is a mistake because die Image doesn't fit..
<LatLonBox>
<north>2.254403</north>
<south>-55.256903</south>
<east>161.994477</east>
<west>98.003023</west>
<rotation>0</rotation>
</LatLonBox>
Here my function to convert the LatLonBox coordinates.
** --- MapOverlay : NSObject --- **
-(id)initWithNorth:(double)north south:(double)south west:(double)west east:(double)east imageStr:(NSString*)imageStr{
self = [super init];
if (self) {
imageString = imageStr;
CLLocationCoordinate2D lowerLeftCoordinate = CLLocationCoordinate2DMake(south, west); // south, west
CLLocationCoordinate2D upperRightCoordinate = CLLocationCoordinate2DMake(north, east); //north,east
baseCoordinate = lowerLeftCoordinate;
MKMapPoint lowerLeft = MKMapPointForCoordinate(lowerLeftCoordinate);
MKMapPoint upperRight = MKMapPointForCoordinate(upperRightCoordinate);
mapRect = MKMapRectMake(lowerLeft.x, upperRight.y, upperRight.x - lowerLeft.x, lowerLeft.y - upperRight.y);
}
return self;
}
** --- MapOverlayView : MKOverlayView --- **
- (void)drawMapRect:(MKMapRect)mapRect zoomScale:(MKZoomScale)zoomScale inContext:(CGContextRef)ctx
{
CGImageRef imageReference = image.CGImage;
//Loading and setting the image
MKMapRect theMapRect = [self.overlay boundingMapRect];
CGRect theRect = [self rectForMapRect:theMapRect];
// We need to flip and reposition the image here
CGContextScaleCTM(ctx, 1.0, -1.0);
CGContextTranslateCTM(ctx, 0.0, -theRect.size.height);
//drawing the image to the context
CGContextDrawImage(ctx, theRect, imageReference);
}
** --- My Map --- **
//Adding the overlay to the map
MapOverlay * mapOverlay = [[MapOverlay alloc] initWithNorth:2.254403
south:-55.256903
west:161.994477
east:98.003023
imageStr:#"EarthquakeHazard.png"];
[self addOverlay:mapOverlay];
The thing is, if I change the north value. (-2.2544..) The image move down. Then it should not move up to??
Well, the functionality works fine and correct. My .kml file had wrong values...
Thanks for your help!!!
mapRect = MKMapRectMake(lowerLeft.x, upperRight.y, upperRight.x - lowerLeft.x, lowerLeft.y - upperRight.y);
should probably be
mapRect = MKMapRectMake(lowerLeft.x, upperRight.y, upperRight.x - lowerLeft.x, upperRight.y - lowerLeft.y);
i am trying to detect face in UIImageview and place image on mouth.
i have tried this method, but i can't transform CoreImage Coordination system to UIkit coordination system. here is my code:
code updated but still not functioning, just rotating view
#interface ProcessImageViewController ()
#end
#implementation ProcessImageViewController
#synthesize receivedImageData;
#synthesize renderImageView;
#synthesize viewToRender;
#synthesize preview;
#synthesize pancontrol;
#synthesize pinchcontrol;
#synthesize rotatecontrol;
- (BOOL)prefersStatusBarHidden {
return YES;
}
- (void)viewDidLoad
{
[super viewDidLoad];
renderImageView.image = receivedImageData;
renderImageView.contentMode = UIViewContentModeScaleToFill;
}
-(void)tryAddCliparts
{
NSLog(#"button clicked");
[self performSelectorInBackground:#selector(markFaces:) withObject:renderImageView];
}
- (IBAction)handlePan:(UIPanGestureRecognizer *)recognizer {
CGFloat firstX = recognizer.view.center.x;
CGFloat firstY = recognizer.view.center.y;
CGPoint translationPoint = [recognizer translationInView:self.view];
CGPoint translatedPoint = CGPointMake(firstX + translationPoint.x, firstY+ translationPoint.y);
CGFloat viewW = renderImageView.frame.size.width;
CGFloat viewH = renderImageView.frame.size.height;
if (translatedPoint.x<0 || translatedPoint.x>viewW)
translatedPoint.x = renderImageView.frame.origin.x;
if (translatedPoint.y<0|| translatedPoint.y>viewH)
translatedPoint.y = renderImageView.frame.origin.y;
recognizer.view.center = CGPointMake(translatedPoint.x, translatedPoint.y);
[recognizer setTranslation:CGPointMake(0, 0) inView:self.view];
}
- (IBAction)handlePinch:(UIPinchGestureRecognizer *)recognizer {
recognizer.view.transform = CGAffineTransformScale(recognizer.view.transform, recognizer.scale, recognizer.scale);
recognizer.scale = 1;
}
- (IBAction)handleRotate:(UIRotationGestureRecognizer *)recognizer {
recognizer.view.transform = CGAffineTransformRotate(recognizer.view.transform, recognizer.rotation);
recognizer.rotation = 0;
}
- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldRecognizeSimultaneouslyWithGestureRecognizer:(UIGestureRecognizer *)otherGestureRecognizer {
return YES;
}
-(void)markFaces:(UIImageView *)facePicture
{
NSLog(#"face detection started");
// draw a ci image from view
CIImage *image = [CIImage imageWithCGImage:facePicture.image.CGImage];
// Create face detector with high accuracy
CIDetector* detector = [CIDetector detectorOfType:CIDetectorTypeFace
context:nil options:[NSDictionary dictionaryWithObject:CIDetectorAccuracyHigh forKey:CIDetectorAccuracy]];
CGAffineTransform transform = CGAffineTransformMakeScale(1, -1);
transform = CGAffineTransformTranslate(transform,
0,-facePicture.bounds.size.height);
// Get features from the image
NSArray* features = [detector featuresInImage:image];
for(CIFaceFeature* faceFeature in features) {
// Transform CoreImage coordinates to UIKit
CGRect faceRect = CGRectApplyAffineTransform(faceFeature.bounds, transform);
UIImage *mustache = [UIImage imageNamed:#"mustacheok.png"];
UIImageView *mustacheview = [[UIImageView alloc] initWithImage:mustache];
mustacheview.contentMode = UIViewContentModeScaleAspectFill;
[mustacheview.layer setBorderColor:[[UIColor whiteColor] CGColor]];
[mustacheview.layer setBorderWidth:3];
[mustacheview addGestureRecognizer:pancontrol];
[mustacheview addGestureRecognizer:pinchcontrol];
[mustacheview addGestureRecognizer:rotatecontrol];
mustacheview.userInteractionEnabled=YES;
CGPoint mouthPos = CGPointApplyAffineTransform(faceFeature.mouthPosition, transform);
[mustacheview setFrame:CGRectMake(mouthPos.x, mouthPos.y, mustacheview.frame.size.width, mustacheview.frame.size.height)];
[viewToRender addSubview:mustacheview];
[viewToRender bringSubviewToFront:mustacheview];
}
}
#end
CGAffineTransform transform = CGAffineTransformMakeScale(1, -1);
transform = CGAffineTransformTranslate(transform,
0,-facePicture.bounds.size.height);
for (CIFaceFeature *faceFeature in features) {
// Transform CoreImage coordinates to UIKit
CGRect faceRect = CGRectApplyAffineTransform(faceFeature.bounds, transform);
if (faceFeature.hasMouthPosition) {
// Transform CoreImage coordinates to UIKit
CGPoint mouthPos = CGPointApplyAffineTransform(faceFeature.mouthPosition, transform);
}
}
the only thing I see wrong on your code is this:
[mustacheview setFrame:CGRectMake(mouthPos.x, mouthPos.y, mustacheview.frame.size.width, mustacheview.frame.size.height)];
you should use:
[mustacheview setCenter:mouthPos];
because the detector returns the mouth center point.
CoreImage uses the same coordinate system as CoreGraphics, a bottom left coordinate system, as opposed to the top left coordinate system of UIKit.
So you basically have to flip along the Y-axis (multiply with -1 and offset the height of the screen)
CGAffineTransformation flipVertical =
CGAffineTransformMake(1, 0, 0, -1, 0, self.bounds.size.height);