Right now I'm allocating and initializing three UIImageViews that take up the entire screen and are stacked in the viewDidLoad method. Its actually taking some time to do this. Is there a way to do this automatically so the view just has them before its even loaded? like an init method that would speed this up?
- (void)viewDidLoad {
[super viewDidLoad];
self.mySubviews = [[NSMutableArray alloc] init];
self.videoCounterTags = [[NSMutableArray alloc] init];
int c = (int)[self.scenes count];
c--;
NSLog(#"int c = %d", c);
self.myCounter = [NSNumber numberWithInt:c];
for (int i=0; i<=c; i++) {
//create imageView
UIImageView *imageView =[[UIImageView alloc] initWithFrame:CGRectMake(0, 0, self.view.bounds.size.width, self.view.bounds.size.height)];
[imageView setUserInteractionEnabled:YES]; // <--- This is very important
imageView.tag = i; // <--- Add tag to track this subview in the view stack
[self.view addSubview:imageView];
NSLog(#"added image view %d", i);
//get scene object
PFObject *sceneObject = self.scenes[i];
//get the PFFile and filetype
PFFile *file = [sceneObject objectForKey:#"file"];
NSString *fileType = [sceneObject objectForKey:#"fileType"];
//check the filetype
if ([fileType isEqual: #"image"])
{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
//get image
NSURL *imageFileUrl = [[NSURL alloc] initWithString:file.url];
NSData *imageData = [NSData dataWithContentsOfURL:imageFileUrl];
dispatch_async(dispatch_get_main_queue(), ^{
imageView.image = [UIImage imageWithData:imageData];
});
});
}
//its a video
else
{
// the video player
NSURL *fileUrl = [NSURL URLWithString:file.url];
self.avPlayer = [AVPlayer playerWithURL:fileUrl];
self.avPlayer.actionAtItemEnd = AVPlayerActionAtItemEndNone;
self.avPlayerLayer = [AVPlayerLayer playerLayerWithPlayer:self.avPlayer];
//self.avPlayerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(playerItemDidReachEnd:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:[self.avPlayer currentItem]];
CGRect screenRect = [[UIScreen mainScreen] bounds];
self.avPlayerLayer.frame = CGRectMake(0, 0, screenRect.size.width, screenRect.size.height);
[imageView.layer addSublayer:self.avPlayerLayer];
NSNumber *tag = [NSNumber numberWithInt:i+1];
NSLog(#"tag = %#", tag);
[self.videoCounterTags addObject:tag];
}
}
UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:#selector(viewTapped:)];
// dummyScreen is just a see through view that sits on top of my image stack and holds my tap gesture recognizer
[self.view bringSubviewToFront:self.dummyScreen];
[self.dummyScreen addGestureRecognizer:tapGesture];
}
The problem is this line:
dispatch_async(dispatch_get_global_queue...
That moves you onto a background thread, and thus there is no telling when the code will be executed. Hence the delay.
If these are local files (i.e., the URL is the file URL of an image file in your app bundle), there is no need for any dispatch_async in your code. Remove all of that and do everything on the main thread. That way, it will happen as fast as possible.
If these are remote files (i.e., you have to do networking to get hold of them), then there's probably nothing you can do to speed things up; networking takes time, and viewDidLoad is just about as early as you can possibly be notified that it's time to get hold of the images.
Related
I am using "GPUImageMovieWriter" for video filtering. But the completion for GPUImageMovieWriter is taking so long time same like the original video time duration. This is below code that I am using.
-(void)applyFilterToVideo:(NSInteger)filterNumber{
#autoreleasepool
{
self.showLoading=YES;
_movieFile = [[GPUImageMovie alloc] initWithURL:originalFileUrl];
self.videoFilter=[self filter:filterNumber]; // get selected filter
_movieFile.runBenchmark = YES;
_movieFile.playAtActualSpeed = YES;
[_movieFile addTarget:_videoFilter];
//Setting path for temporary storing the video in document directory
NSURL *movieURL = [self dataFilePath:#"tempVideo.mp4"]; // url where we want to save our new edited video
NSLog(#"movieURL = %#", movieURL.absoluteString);
CGSize size =[self getVideoResolution:originalFileUrl];
self.movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:size];
[_videoFilter addTarget:_movieWriter];
// this let allow gpumoviewriter to handle the audio. if not used the resulted video is without audio.
_movieWriter.shouldPassthroughAudio = YES;
_movieFile.audioEncodingTarget = _movieWriter;
[_movieFile enableSynchronizedEncodingUsingMovieWriter:_movieWriter];
[self.movieWriter startRecording];
[_movieFile startProcessing];
__block BOOL completeRec = NO;
__unsafe_unretained typeof(self) weakSelf = self;
//completion block video editing completed and this block is called.
[self.movieWriter setCompletionBlock:^{
[weakSelf.videoFilter removeTarget:weakSelf.movieWriter];
[weakSelf.movieWriter finishRecording];
[weakSelf.movieFile removeTarget:weakSelf.videoFilter];
if (!completeRec)
{
// playing our new filtered video
[weakSelf performSelectorOnMainThread:#selector(playTheVideo:) withObject:movieURL waitUntilDone:NO];
completeRec = YES;
}
}];
}
}
-(void) playTheVideo:(NSURL *)videoURL{
NSTimeInterval time= videoPlayer.currentPlaybackTime;
UIView *parentView = imageViewFiltered; // adjust as needed
CGRect bounds = parentView.bounds; // get bounds of parent view
CGRect subviewFrame = CGRectInset(bounds, 0, 0); // left and right margin of 0
videoPlayer.view.frame = subviewFrame;
videoPlayer.view.autoresizingMask = (UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight);
[parentView addSubview:videoPlayer.view];
videoPlayer.contentURL = videoURL;
[videoPlayer setCurrentPlaybackTime:time];
[videoPlayer stop];
[videoPlayer play];
self.showLoading=NO;
}
Can any one please tell me how to reduce the processing time?
I want to create a quick picture/movie viewer - for a birthday celebration.
First I want to show some pictures - then a movie - and then some pictures again. The first picture animation shows up fine, movie starts fine and catching AVPlayerItemDidPlayToEndTimeNotification just fine to launch pictures again, - but second animation never starts - I cannot work this out ?? - DoShow1 is not working - please advice anybody out there ??
- (void)DoShow
{
NSArray *imageNames = #[#"1.jpg", #"2.jpg", #"3.png", #"4.png",
#"5.png", #"6.png",#"7.png", #"8.png",
#"9.png", #"10.png", #"11.png",#"12.png",
#"13.png"];
NSMutableArray *images = [[NSMutableArray alloc] init];
for (int i = 0; i < imageNames.count; i++) {
[images addObject:[UIImage imageNamed:[imageNames objectAtIndex:i]]];
}
_imageview.animationImages = images;
_imageview.animationDuration = 10;
_imageview.animationRepeatCount = 1;
[_imageview startAnimating];
[self performSelector:#selector(playmovie) withObject:nil afterDelay: 10];
}
- (void)DoShow1
{
NSArray *imageNames = #[#"14.jpg", #"16.jpg", #"18.jpg", #"19.jpg",
#"20.jpg", #"22.jpg",#"23.jpg", #"24.jpg"];
NSMutableArray *images = [[NSMutableArray alloc] init];
for (int i = 0; i < imageNames.count; i++) {
[images addObject:[UIImage imageNamed:[imageNames objectAtIndex:i]]];
}
_imageview2.animationImages = images;
_imageview2.animationDuration = 2;
_imageview2.animationRepeatCount = 100;
[_imageview2 startAnimating];
}
- (void) playmovie
{
MyMusicPlayerManager *sharedManager = [MyMusicPlayerManager sharedManager];
[sharedManager ChkPaused];
NSBundle *Bundle = [NSBundle mainBundle];
NSString *moviePath = [Bundle pathForResource:#"stine_ane" ofType:#"mp4"];
NSLog(#"String is %#", moviePath);
NSURL *url = [NSURL fileURLWithPath:moviePath];
AVPlayerItem *video = [[AVPlayerItem alloc] initWithURL:url];
AVQueuePlayer *queue = [[AVQueuePlayer alloc] initWithItems:#[video]];
video = [[AVPlayerItem alloc] initWithURL:url];
[queue insertItem:video afterItem:nil];
self.avmovieplayer = [[AVPlayerViewController alloc] init];
self.avmovieplayer.player = queue;
[self presentViewController:self.avmovieplayer animated:YES completion:nil];
self.avmovieplayer.player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(myMovieFinishedCallback:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:[avmovieplayer.player currentItem]];
[self.avmovieplayer setShowsPlaybackControls:YES];
[self.avmovieplayer.player play];
}
- (void)myMovieFinishedCallback:(NSNotification *)notification {
[self dismissViewControllerAnimated:YES completion:Nil];
[self DoShow1];
}
- (IBAction)DoStart:(id)sender {
[self DoShow];
}
[self dismissViewControllerAnimated:YES completion:Nil];
[self DoShow1];
this is probably calling in succession and animation doesnt start at the same time the view is being dismissed.
try it in the completion handler which is called after the view is
dismissed
[self dismissViewControllerAnimated:YES completion:^{
[self DoShow1];
}];
I have multiple imageview subviews getting stacked based on my incoming data. Basically all these subviews are either set to an image or a video layer based on my incoming data. The problem i have is playing videos. i can play the first video in the stack but every video after that is just the sound of the first video. How can i play each accordingly?
the views are navigated through with a tap event like snapchat. see below:
#interface SceneImageViewController ()
#property (strong, nonatomic) NSURL *videoUrl;
#property (strong, nonatomic) AVPlayer *avPlayer;
#property (strong, nonatomic) AVPlayerLayer *avPlayerLayer;
#end
#implementation SceneImageViewController
- (void)viewDidLoad {
[super viewDidLoad];
self.mySubviews = [[NSMutableArray alloc] init];
self.videoCounterTags = [[NSMutableArray alloc] init];
int c = (int)[self.scenes count];
c--;
NSLog(#"int c = %d", c);
self.myCounter = [NSNumber numberWithInt:c];
for (int i=0; i<=c; i++) {
//create imageView
UIImageView *imageView =[[UIImageView alloc] initWithFrame:CGRectMake(0, 0, self.view.bounds.size.width, self.view.bounds.size.height)];
[imageView setUserInteractionEnabled:YES]; // <--- This is very important
imageView.tag = i; // <--- Add tag to track this subview in the view stack
[self.view addSubview:imageView];
NSLog(#"added image view %d", i);
//get scene object
PFObject *sceneObject = self.scenes[i];
//get the PFFile and filetype
PFFile *file = [sceneObject objectForKey:#"file"];
NSString *fileType = [sceneObject objectForKey:#"fileType"];
//check the filetype
if ([fileType isEqual: #"image"])
{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
//get image
NSURL *imageFileUrl = [[NSURL alloc] initWithString:file.url];
NSData *imageData = [NSData dataWithContentsOfURL:imageFileUrl];
dispatch_async(dispatch_get_main_queue(), ^{
imageView.image = [UIImage imageWithData:imageData];
});
});
}
//its a video
else
{
// the video player
NSURL *fileUrl = [NSURL URLWithString:file.url];
self.avPlayer = [AVPlayer playerWithURL:fileUrl];
self.avPlayer.actionAtItemEnd = AVPlayerActionAtItemEndNone;
self.avPlayerLayer = [AVPlayerLayer playerLayerWithPlayer:self.avPlayer];
//self.avPlayerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(playerItemDidReachEnd:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:[self.avPlayer currentItem]];
CGRect screenRect = [[UIScreen mainScreen] bounds];
self.avPlayerLayer.frame = CGRectMake(0, 0, screenRect.size.width, screenRect.size.height);
[imageView.layer addSublayer:self.avPlayerLayer];
NSNumber *tag = [NSNumber numberWithInt:i+1];
NSLog(#"tag = %#", tag);
[self.videoCounterTags addObject:tag];
//[self.avPlayer play];
}
}
UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:#selector(viewTapped:)];
[self.view bringSubviewToFront:self.screen];
[self.screen addGestureRecognizer:tapGesture];
}
- (void)viewTapped:(UIGestureRecognizer *)gesture{
NSLog(#"touch!");
[self.avPlayer pause];
int i = [self.myCounter intValue];
NSLog(#"counter = %d", i);
for(UIImageView *subview in [self.view subviews]) {
if(subview.tag== i) {
[subview removeFromSuperview];
}
}
if ([self.videoCounterTags containsObject:self.myCounter]) {
NSLog(#"play video!!!");
[self.avPlayer play];
}
if (i == 0) {
[self.avPlayer pause];
[self.navigationController popViewControllerAnimated:NO];
}
i--;
self.myCounter = [NSNumber numberWithInt:i];
NSLog(#"counter after = %d", i);
}
What Brooks Hanes said is correct you keep overriding the avplayer.
This is what i suggest for you to do:
Add the tap gesture to the imageView instead of the screen (or for a cleaner approach use UIButton instead):
UIImageView *imageView =[[UIImageView alloc] initWithFrame:CGRectMake(0, 0, self.view.bounds.size.width, self.view.bounds.size.height)];
[imageView setUserInteractionEnabled:YES]; // <--- This is very important
imageView.tag = i; // <--- Add tag to track this subview in the view stack
[self.view addSubview:imageView];
NSLog(#"added image view %d", i);
UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:imageView action:#selector(viewTapped:)];
[imageView addGestureRecognizer:tapGesture];
This way in your viewTapped: method you could get the tag of the pressed image like so: gesture.view.tag instead of using the myCounter.
To get the video working you could create a new AVPlayer for each video but that might turn quite expensive memory wise. A better approach will be to use AVPlayerItem and switch the AVPlayer's AVPlayerItem when changing the video.
So in the for loop do something like this where self.videoFiles is a NSMutableDictionary property:
// the video player
NSNumber *tag = [NSNumber numberWithInt:i+1];
NSURL *fileUrl = [NSURL URLWithString:file.url];
//save your video file url paired with the ImageView it belongs to.
[self.videosFiles setObject:fileUrl forKey:tag];
// you only need to initialize the player once.
if(self.avPlayer == nil){
AVAsset *asset = [AVAsset assetWithURL:fileUrl];
AVPlayerItem *item = [[AVPlayerItem alloc] initWithAsset:asset];
self.avPlayer = [[AVPlayer alloc] initWithPlayerItem:item];
self.avPlayer.actionAtItemEnd = AVPlayerActionAtItemEndNone;
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(playerItemDidReachEnd:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:[self.avPlayer currentItem]];
}
// you don't need to keep the layer as a property
// (unless you need it for some reason
AVPlayerLayer* avPlayerLayer = [AVPlayerLayer playerLayerWithPlayer:self.avPlayer];
avPlayerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
CGRect screenRect = [[UIScreen mainScreen] bounds];
avPlayerLayer.frame = CGRectMake(0, 0, screenRect.size.width, screenRect.size.height);
[imageView.layer addSublayer:avPlayerLayer];
NSLog(#"tag = %#", tag);
[self.videoCounterTags addObject:tag];
Now in your viewTapped:
if ([self.videoCounterTags containsObject:gesture.view.tag]) {
NSLog(#"play video!!!");
AVAsset *asset = [AVAsset assetWithURL:[self.videoFiles objectForKey:gesture.view.tag]];
AVPlayerItem *item = [[AVPlayerItem alloc] initWithAsset:asset];
self.avPlayer replaceCurrentItemWithPlayerItem: item];
[self.avLayer play];
}
Or use the self.videoFiles instead and then you don't need self.videoCounterTags at all:
NSURL* fileURL = [self.videoFiles objectForKey:gesture.view.tag];
if (fileURL!=nil) {
NSLog(#"play video!!!");
AVAsset *asset = [AVAsset assetWithURL:fileURL];
AVPlayerItem *item = [[AVPlayerItem alloc] initWithAsset:asset];
self.avPlayer replaceCurrentItemWithPlayerItem: item];
[self.avLayer play];
}
That's the gist of it.
Take a look at the way you're setting up the myCounter variable. It is set one time and never changes until a view is tapped, and then it is set to the count of scenes, -1.
In addition, try looking at the way you're setting to the _avPlayer pointer var. It's always being set, over and over, and it seems that in a for loop you'd want to be storing references instead of simply updating the same pointer to the value latest in collection of scenes.
Also, from Apple's documentation:
You can create arbitrary numbers of player layers with the same AVPlayer object. Only the most recently created player layer will actually display the video content on-screen.
So, it's possible that since you're using the same AVPlayer object to create all these AVPlayer layers, that you're never going to see any more than one actual video layer work.
I have the following code to play animation with images:
-(void)playPopAnimation:(void (^)(void))completion{
__block NSMutableArray *images;
float animationDuration = 1.0f;
images = [[NSMutableArray alloc] initWithCapacity:26];
for(int i=1; i<26; i++){
NSString *fileName = [[NSBundle mainBundle] pathForResource:[NSString stringWithFormat:#"bulut_pop%d",i] ofType:#"png"];
UIImage *image = [UIImage imageWithContentsOfFile:fileName];
//UIImage* image = [UIImage imageNamed:[NSString stringWithFormat:#"bulut_pop%d.png",i]];
[images addObject:image];
}
[Helper playSound:#"button_pop"];
UIImageView* imageView = [[UIImageView alloc] initWithFrame:self.itemImage.frame];
imageView.animationImages = images;
imageView.animationDuration = animationDuration;
imageView.animationRepeatCount = 1; // 0 = nonStop repeat
[self addSubview:imageView];
[imageView startAnimating];
self.imageButton.userInteractionEnabled = NO;
[[UIApplication sharedApplication] beginIgnoringInteractionEvents];
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, animationDuration * NSEC_PER_SEC), dispatch_get_current_queue(), ^{
//[self.delegate myItemCell:self cellDidTappedFor:[self item]];
self.imageButton.userInteractionEnabled = YES;
images = nil;
});
Each time I call this function, memory increases by 10 MB. What is the good way to play animated image views?
Each time you call this method, you add a new image view (with its array of images) to your view. You should refactor your code so that the creation of the array and imageView are inside an if clause testing whether imageView is nil, so that part only runs the first time the method is called (and make imageView a property).
UPDATE #1: forgot to mention this is running on an iPad app
This is my revised code (still not working, but got rid of the unnecessary code):
NSURL *tmpDirURL = [NSURL fileURLWithPath:NSTemporaryDirectory() isDirectory:YES];
NSURL *fileURL = [[tmpDirURL URLByAppendingPathComponent:#"custImage"] URLByAppendingPathExtension:#"png"];
NSError*writeError = nil;
[client.aClientImage writeToURL:fileURL options:0 error:&writeError];
NSAssert(writeError==nil, writeError);
// write appointment info
NSString *htmlString;
if(client.aClientEMail.length > 0) {
htmlString = [NSString stringWithFormat:NSLocalizedString(#"HTML_STRING1",nil),
client.aClientFirstName,
client.aClientLastName,
client.aClientEMail.length == 0? #"": client.aClientEMail,
client.aClientPrimaryPhone,
apptSelected.aServices,
fileURL];
}
else {
htmlString = [NSString stringWithFormat:NSLocalizedString(#"HTML_STRING2",nil),
client.aClientFirstName,
client.aClientLastName,
client.aClientPrimaryPhone,
apptSelected.aServices,
fileURL];
}
When I look at custImage in the XCode debugger, I see a different image from the previous image, which is correct. However, when it gets time to display the image at fileURL, it's a totally different image than custImage and is the same image that was displayed the first time!
UPDATE #2: I have figured out that fileURL has the correct image, but it is NOT being written to the device the second time (the first image is not being replaced).
UPDATE #3: this is the contents of htmlString that is displayed in the UIWebView popover:
<html><head><style type="text/css"> body {font-family: "Verdana"; font-size: 12;} </style></head><body>
<h2>Rolf Marsh</h2><p>phone: 213-555-1234<p>services: Art, Decals<p><img src="file:///private/var/mobile/Applications/FEE7159E-1FF8-4B94-A446-2A4C72E0AD41/tmp/custImage.png"/></body></html>
Any suggestions on how to fix this?
you never write the data to disk as far as I can see.
Only in the commented out section you attempt to.
Write it in between:
NSURL *tmpDirURL = [NSURL fileURLWithPath:NSTemporaryDirectory() isDirectory:YES];
NSURL *fileURL = [[tmpDirURL URLByAppendingPathComponent:#"custImage"] URLByAppendingPathExtension:#"png"];
//write
NSError*writeError = nil;
[client.aClientImage writeToURL:fileURL options:0 error:&writeError];
NSAssert(writeError==nil, writeError);
//read / use in the html or so
...
Dont forget to reload the web view or whatever you use to show the html
I figured it out... for others that are having the same problem, here is the code that works. Notice that I do NOT save it to a temporary file, since I already have the image in the Core Data entity. Here's the code:
-(void) showExistingAppointmentDetails: (AppointmentInfo *) apptSelected {
// make rectangle to attach popover
CGRect rectangle = CGRectMake( [apptSelected.aPosX floatValue], [apptSelected.aPosY floatValue],
[apptSelected.aPosW floatValue], [apptSelected.aPosH floatValue]);
// see if this is for staff; if so, don't display anything
if([apptSelected.aApptKey isEqual: #"Staff"])
return;
NSPredicate *predicate = ([NSPredicate predicateWithFormat: #"aClientKey == %#", apptSelected.aApptKey ]); // was aApptKey
ClientInfo *client = [ClientInfo MR_findFirstWithPredicate:predicate inContext:localContext];
UIImage *image = [UIImage imageWithData:client.aClientImage]; // image is good <---------
// write appointment info into html string
NSString *htmlString;
if(client.aClientEMail.length > 0) {
htmlString = [NSString stringWithFormat:NSLocalizedString(#"HTML_STRING1",nil),
client.aClientFirstName,
client.aClientLastName,
client.aClientEMail.length == 0? #"": client.aClientEMail,
client.aClientPrimaryPhone,
apptSelected.aServices,
[self htmlForPNGImage:image]];
}
else {
htmlString = [NSString stringWithFormat:NSLocalizedString(#"HTML_STRING2",nil),
client.aClientFirstName,
client.aClientLastName,
client.aClientPrimaryPhone,
apptSelected.aServices,
[self htmlForPNGImage:image]];
}
UIViewController* popoverContent = [[UIViewController alloc] init];
UIView* popoverView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 250, 300)];
popoverView.backgroundColor = [UIColor colorWithWhite:(CGFloat)1.0 alpha:(CGFloat)1.0]; // frame color?
popoverContent.view = popoverView;
//resize the popover view shown in the current view to the view's size
popoverContent.contentSizeForViewInPopover = CGSizeMake(250, 300);
// add the UIWebView for RichText
UIWebView *webView = [[UIWebView alloc] initWithFrame:popoverView.frame];
webView.backgroundColor = [UIColor whiteColor]; // change background color here
// add the webView to the popover
[webView loadHTMLString:htmlString baseURL:[NSURL URLWithString:nil]];
[popoverView addSubview:webView];
// if previous popoverController is still visible... dismiss it
if ([popoverController isPopoverVisible]) {
[popoverController dismissPopoverAnimated:YES];
}
//create a popover controller
popoverController = [[UIPopoverController alloc] initWithContentViewController:popoverContent];
[popoverController presentPopoverFromRect:rectangle inView:self
permittedArrowDirections:UIPopoverArrowDirectionAny animated:YES];
}
- (NSString *) htmlForPNGImage:(UIImage *) image {
NSData *imageData = UIImagePNGRepresentation(image);
NSString *imageSource = [NSString stringWithFormat:#"data:image/png;base64,%#",[imageData base64Encoding]];
return [NSString stringWithFormat:#"%#", imageSource];
}