Custom camera iOS - ios

I got an app, where i tried to implement custom camera. This is source code:
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = self.vImagePreview.bounds;
[self.vImagePreview.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//device.position ;
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
[session addInput:input];
[session startRunning];
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
Then i tried to create photo and send it to other view controller:
-(IBAction) captureNow
{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
//NSLog(#"about to request a capture from: %#", stillImageOutput);
AcceptingPhotoViewController *photo = [[AcceptingPhotoViewController alloc] initWithNibName:#"AcceptingPhotoViewController" bundle:nil];
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
photo.image = [[UIImage alloc] init ];
photo.image = image;
photo.photoFromCamera = YES;
[self.navigationController pushViewController:photo animated:NO];
}];
}
But in my target class this image is rotating to the left on 90 degrees:
I tried to rotate it back:
float newSide = MAX([image size].width, [image size].height);
CGSize size = CGSizeMake(newSide, newSide);
UIGraphicsBeginImageContext(size);
CGContextRef ctx = UIGraphicsGetCurrentContext();
CGContextTranslateCTM(ctx, newSide/2, newSide/2);
CGContextRotateCTM(ctx, 1.57079633);
CGContextDrawImage(UIGraphicsGetCurrentContext(),CGRectMake(-[image size].width/2,-[image size].height/2,size.width, size.height),image.CGImage);
UIImage *i = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
image = i;
My image rotated on the right, but it display mirrored and stretch.
Any help? Can i rotate in other way, or maybe i should take photo not like this?

I found solve of this problem:
I rotate and transform image in this way
UIView* rotatedViewBox = [[UIView alloc] initWithFrame: CGRectMake(0, 0, image.size.width, image.size.height)];
float angleRadians = 90 * ((float)M_PI / 180.0f);
CGAffineTransform t = CGAffineTransformMakeRotation(angleRadians);
rotatedViewBox.transform = t;
CGSize rotatedSize = rotatedViewBox.frame.size;
UIGraphicsBeginImageContext(rotatedSize);
CGContextRef bitmap = UIGraphicsGetCurrentContext();
CGContextTranslateCTM(bitmap, rotatedSize.width/2, rotatedSize.height/2);
CGContextRotateCTM(bitmap, angleRadians);
CGContextScaleCTM(bitmap, 1.0, -1.0);
CGContextDrawImage(bitmap, CGRectMake(-image.size.width / 2, -image.size.height / 2, image.size.width, image.size.height), [image CGImage]);
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
image = newImage;
CGSize newSize = CGSizeMake(image.size.height, image.size.width);
UIGraphicsBeginImageContext(newSize);
// Tell the old image to draw in this new context, with the desired
// new size
[image drawInRect:CGRectMake(0,0,image.size.height,image.size.width)];
// Get the new image from the context
UIImage* newImage2 = UIGraphicsGetImageFromCurrentImageContext();
// End the context
UIGraphicsEndImageContext();
image = newImage2;

I also may the same problem just copy and paste.
Try this code it will helps u..
- (UIImage *)fixrotation:(UIImage *)image{
if (image.imageOrientation == UIImageOrientationDown) return image;
CGAffineTransform transform = CGAffineTransformIdentity;
switch (image.imageOrientation) {
case UIImageOrientationDown:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, image.size.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
break;
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, 0, image.size.height);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
case UIImageOrientationUp:
case UIImageOrientationUpMirrored:
break;
}
switch (image.imageOrientation) {
case UIImageOrientationUpMirrored:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationLeftMirrored:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, image.size.height, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationUp:
case UIImageOrientationDown:
case UIImageOrientationLeft:
case UIImageOrientationRight:
break;
}
// Now we draw the underlying CGImage into a new context, applying the transform
// calculated above.
CGContextRef ctx = CGBitmapContextCreate(NULL, image.size.width, image.size.height,
CGImageGetBitsPerComponent(image.CGImage), 0,
CGImageGetColorSpace(image.CGImage),
CGImageGetBitmapInfo(image.CGImage));
CGContextConcatCTM(ctx, transform);
switch (image.imageOrientation) {
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
// Grr...
CGContextDrawImage(ctx, CGRectMake(0,0,image.size.height,image.size.width), image.CGImage);
break;
default:
CGContextDrawImage(ctx, CGRectMake(0,0,image.size.width,image.size.height), image.CGImage);
break;
}
// And now we just create a new UIImage from the drawing context
CGImageRef cgimg = CGBitmapContextCreateImage(ctx);
UIImage *img = [UIImage imageWithCGImage:cgimg];
CGContextRelease(ctx);
CGImageRelease(cgimg);
return img;
}

Related

Having issues with AVCaptureVideoOrientation showing camera preview incorrectly

I've been given some code from 2011 to update for a client, I've managed to get it all sorted for iOS7 etc and submitted the update to Apple.
I now have an issue with the app which I need to sort out as soon as possible. I'm really struggling to find a solution, I've tried all sorts!
In brief, this app allows users to take multiple photos in a custom camera view (with a camera preview at all times) then hit a done button to close the window and return to the previous screen.
However, when the camera preview shows on screen, it's is in the incorrect orientation, by 90 degrees counter clockwise.
Here is an example.....
I need to force the user to take the photo in landscape mode.
The second issue is if the user does take a landscape photo, it saves it 90 degrees counter clockwise as well.
Can anybody help????
Full code is below, this is using the original code that was supplied, witha few tweaks to try and fix it without much joy!
Many thanks in advance
Simon
//
// CameraViewController.m
// X
//
// Created by X on 8/9/11.
// Copyright 2011 __MyCompanyName__. All rights reserved.
//
#import "CameraViewController.h"
#import <ImageIO/ImageIO.h>
static AVCaptureVideoOrientation avOrientationForInterfaceOrientation(UIInterfaceOrientation iOrientation);
static AVCaptureVideoOrientation avOrientationForInterfaceOrientation(UIInterfaceOrientation iOrientation)
{
AVCaptureVideoOrientation result = iOrientation;
if ( iOrientation == UIInterfaceOrientationLandscapeLeft )
result = AVCaptureVideoOrientationLandscapeLeft;
else if ( iOrientation == UIInterfaceOrientationLandscapeRight )
result = AVCaptureVideoOrientationLandscapeRight;
return result;
}
#implementation CameraViewController
#synthesize stillImageOutput, delegate;
#synthesize session, captureVideoPreviewLayer;
- (void)willRotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration
{
AVCaptureVideoOrientation avcaptureOrientation = avOrientationForInterfaceOrientation(toInterfaceOrientation);
self.captureVideoPreviewLayer.connection.videoOrientation = avcaptureOrientation;
}
- (void)dealloc {
[stillImageOutput release];
[captureVideoPreviewLayer release];
[session release];
[super dealloc];
}
- (void)didReceiveMemoryWarning {
// Releases the view if it doesn't have a superview.
[super didReceiveMemoryWarning];
// Release any cached data, images, etc that aren't in use.
}
#pragma mark - View lifecycle
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view from its nib.
// rotate the status bar
[UIApplication sharedApplication].statusBarOrientation = UIInterfaceOrientationLandscapeLeft;
// try and rotate the camera live preview
}
-(void)viewDidAppear:(BOOL)animated {
[super viewDidAppear:animated];
self.session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
self.captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
AVCaptureVideoOrientation avcaptureOrientation = avOrientationForInterfaceOrientation(self.interfaceOrientation);
self.captureVideoPreviewLayer.connection.videoOrientation = avcaptureOrientation;
//captureVideoPreviewLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
NSLog(#"doing something");
captureVideoPreviewLayer.frame = cameraView.bounds;
[cameraView.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
[session startRunning];
canDismiss = 0;
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(didRotate:)
name:UIDeviceOrientationDidChangeNotification object:nil];
}
- (void)viewDidUnload {
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
-(BOOL)shouldAutorotate {
return YES;
}
- (NSUInteger)supportedInterfaceOrientations
{
return UIInterfaceOrientationMaskLandscapeLeft;
}
// Set the camera to force itself to a landscape view
- (UIInterfaceOrientation)preferredInterfaceOrientationForPresentation
{
return UIInterfaceOrientationLandscapeLeft;
}
/*- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation {
// Return YES for supported orientations
//return (interfaceOrientation == UIInterfaceOrientationPortrait);
return (interfaceOrientation == UIInterfaceOrientationPortrait) | (interfaceOrientation == UIInterfaceOrientationLandscapeLeft) | (interfaceOrientation == UIInterfaceOrientationLandscapeRight);
}*/
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
if(interfaceOrientation == UIInterfaceOrientationLandscapeLeft)
{
captureVideoPreviewLayer.connection.videoOrientation = UIInterfaceOrientationLandscapeLeft;
}
// and so on for other orientations
return ((interfaceOrientation == UIInterfaceOrientationLandscapeLeft));
}
- (void) didRotate:(NSNotification *)notification {
[UIView beginAnimations:nil context:nil];
if ( UIInterfaceOrientationIsLandscape([[UIDevice currentDevice] orientation]) ) {
[topView setFrame:CGRectMake(0, 0, 320, 27)];
[bottomView setFrame:CGRectMake(0, 453, 320, 27)];
} else {
[topView setFrame:CGRectMake(0, 0, 320, 110)];
[bottomView setFrame:CGRectMake(0, 350, 320, 110)];
}
switch ([[UIDevice currentDevice] orientation]) {
case UIInterfaceOrientationLandscapeLeft:
btnDone.transform = CGAffineTransformMakeRotation(3*M_PI_2);
btnTakePicture.transform = CGAffineTransformMakeRotation(3*M_PI_2);
break;
default:
btnDone.transform = CGAffineTransformMakeRotation(M_PI_2);
btnTakePicture.transform = CGAffineTransformMakeRotation(M_PI_2);
break;
}
[UIView commitAnimations];
}
-(IBAction)takePhoto {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(#"about to request a capture from: %#", stillImageOutput);
canDismiss++;
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(#"attachements: %#", exifAttachments);
}
else
NSLog(#"no attachments");
int width = [[[NSUserDefaults standardUserDefaults] objectForKey:#"image_width"] intValue];
int height = [[[NSUserDefaults standardUserDefaults] objectForKey:#"image_height"] intValue];
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage * image = [[UIImage alloc] initWithData:imageData];
NSLog(#"Image w:%f h:%f", image.size.width, image.size.height);
UIImage * rotatedimage;
if ( UIInterfaceOrientationIsLandscape([[UIDevice currentDevice] orientation]) ) {
// Rotate the image
CGSize newSize = CGSizeMake(image.size.height, image.size.width);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef ctx = CGBitmapContextCreate (NULL, newSize.width, newSize.height, 8, 0, colorSpace, kCGImageAlphaPremultipliedLast);
CGColorSpaceRelease(colorSpace);
if ( [[UIDevice currentDevice] orientation] == UIInterfaceOrientationLandscapeLeft ) {
CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0, newSize.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
CGContextScaleCTM(ctx, -1.0, 1.0);
CGContextTranslateCTM(ctx, -newSize.width, 0);
CGContextConcatCTM(ctx, transform);
}
CGContextDrawImage(ctx, CGRectMake(0, 0, newSize.width, newSize.height), image.CGImage);
CGImageRef sourceImageRef = CGBitmapContextCreateImage(ctx);
rotatedimage = [UIImage imageWithCGImage:sourceImageRef];
CGContextRelease(ctx);
NSLog(#"Rotated Image w:%f h:%f", rotatedimage.size.width, rotatedimage.size.height);
// Scale the image
newSize = CGSizeMake(width, height);
UIGraphicsBeginImageContext(newSize);
[rotatedimage drawInRect:CGRectMake(0,0,newSize.width,newSize.height)];
//image is the original UIImage
UIImage * newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
NSLog(#"New Image w:%f h:%f", newImage.size.width, newImage.size.height);
[delegate tookPhoto:newImage];
} else {
// Scale the image
CGSize newSize = CGSizeMake(width, image.size.height/(image.size.width/width));
UIGraphicsBeginImageContext( newSize );
[image drawInRect:CGRectMake(0,0,newSize.width,newSize.height)];
UIImage * newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
NSLog(#"New Image w:%f h:%f", newImage.size.width, newImage.size.height);
// Chop out the middle
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef ctx = CGBitmapContextCreate (NULL, width, height, 8, 0, colorSpace, kCGImageAlphaPremultipliedLast);
CGColorSpaceRelease(colorSpace);
if ( [[UIDevice currentDevice] orientation] == UIInterfaceOrientationPortraitUpsideDown ) {
CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0, newImage.size.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
CGContextScaleCTM(ctx, -1.0, 1.0);
CGContextTranslateCTM(ctx, -newImage.size.width, 0);
CGContextConcatCTM(ctx, transform);
CGContextDrawImage(ctx, CGRectMake(0, (newImage.size.height-height)/2, newImage.size.width, newImage.size.height), newImage.CGImage);
} else {
CGContextDrawImage(ctx, CGRectMake(0, -(newImage.size.height-height)/2, newImage.size.width, newImage.size.height), newImage.CGImage);
}
CGImageRef sourceImageRef = CGBitmapContextCreateImage(ctx);
CGContextRelease(ctx);
[delegate tookPhoto:[UIImage imageWithCGImage:sourceImageRef]];
}
[image release];
canDismiss--;
}];
}
// action when the 'tick' button is pressed
-(IBAction)done {
if ( canDismiss == 0 ) {
self.delegate = nil;
// animate the camera view away
[self dismissViewControllerAnimated:YES completion:nil];
// swap the status bar back to the default portrait
[UIApplication sharedApplication].statusBarOrientation = UIInterfaceOrientationPortrait;
}
}
#end

Rotating camera and photo taken issue in iOS 7

I've been given an app from a few years ago to update from a new client, and been provided with the original code.
After making the updates they wanted to get the app working again, we discovered that there was a bug where if the user rotates their device to take a landscape photo, the view on screen doesn't rotate and takes the photo at 90 degrees, rather than a true landscape orientation.
This was meant to be a quick job, which has turned into a big headache.
Any ideas what I can do to the code below in order to fix this issue?
Many thanks!
//
// CameraViewController.m
//
//
// Created by XXX on 8/9/11.
// Copyright 2011 __MyCompanyName__. All rights reserved.
//
#import "CameraViewController.h"
#import <ImageIO/ImageIO.h>
#implementation CameraViewController
#synthesize stillImageOutput, delegate;
#synthesize session, captureVideoPreviewLayer;
- (void)dealloc {
[stillImageOutput release];
[captureVideoPreviewLayer release];
[session release];
[super dealloc];
}
- (void)didReceiveMemoryWarning {
// Releases the view if it doesn't have a superview.
[super didReceiveMemoryWarning];
// Release any cached data, images, etc that aren't in use.
}
#pragma mark - View lifecycle
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view from its nib.
}
-(void)viewDidAppear:(BOOL)animated {
[super viewDidAppear:animated];
self.session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
self.captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = cameraView.bounds;
[cameraView.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
[session startRunning];
canDismiss = 0;
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(didRotate:)
name:UIDeviceOrientationDidChangeNotification object:nil];
}
- (void)viewDidUnload {
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation {
// Return YES for supported orientations
return (interfaceOrientation == UIInterfaceOrientationPortrait);
}
- (void) didRotate:(NSNotification *)notification {
[UIView beginAnimations:nil context:nil];
if ( UIInterfaceOrientationIsLandscape([[UIDevice currentDevice] orientation]) ) {
[topView setFrame:CGRectMake(0, 0, 320, 27)];
[bottomView setFrame:CGRectMake(0, 433, 320, 27)];
} else {
[topView setFrame:CGRectMake(0, 0, 320, 110)];
[bottomView setFrame:CGRectMake(0, 350, 320, 110)];
}
switch ([[UIDevice currentDevice] orientation]) {
case UIInterfaceOrientationPortrait:
btnDone.transform = CGAffineTransformIdentity;
btnTakePicture.transform = CGAffineTransformIdentity;
break;
case UIInterfaceOrientationPortraitUpsideDown:
btnDone.transform = CGAffineTransformMakeRotation(M_PI);
btnTakePicture.transform = CGAffineTransformMakeRotation(M_PI);
break;
case UIInterfaceOrientationLandscapeLeft:
btnDone.transform = CGAffineTransformMakeRotation(3*M_PI_2);
btnTakePicture.transform = CGAffineTransformMakeRotation(3*M_PI_2);
break;
default:
btnDone.transform = CGAffineTransformMakeRotation(M_PI_2);
btnTakePicture.transform = CGAffineTransformMakeRotation(M_PI_2);
break;
}
[UIView commitAnimations];
}
-(IBAction)takePhoto {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(#"about to request a capture from: %#", stillImageOutput);
canDismiss++;
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(#"attachements: %#", exifAttachments);
}
else
NSLog(#"no attachments");
int width = [[[NSUserDefaults standardUserDefaults] objectForKey:#"image_width"] intValue];
int height = [[[NSUserDefaults standardUserDefaults] objectForKey:#"image_height"] intValue];
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage * image = [[UIImage alloc] initWithData:imageData];
NSLog(#"Image w:%f h:%f", image.size.width, image.size.height);
UIImage * rotatedimage;
if ( UIInterfaceOrientationIsLandscape([[UIDevice currentDevice] orientation]) ) {
// Rotate the image
CGSize newSize = CGSizeMake(image.size.height, image.size.width);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef ctx = CGBitmapContextCreate (NULL, newSize.width, newSize.height, 8, 0, colorSpace, kCGImageAlphaPremultipliedLast);
CGColorSpaceRelease(colorSpace);
if ( [[UIDevice currentDevice] orientation] == UIInterfaceOrientationLandscapeLeft ) {
CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0, newSize.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
CGContextScaleCTM(ctx, -1.0, 1.0);
CGContextTranslateCTM(ctx, -newSize.width, 0);
CGContextConcatCTM(ctx, transform);
}
CGContextDrawImage(ctx, CGRectMake(0, 0, newSize.width, newSize.height), image.CGImage);
CGImageRef sourceImageRef = CGBitmapContextCreateImage(ctx);
rotatedimage = [UIImage imageWithCGImage:sourceImageRef];
CGContextRelease(ctx);
NSLog(#"Rotated Image w:%f h:%f", rotatedimage.size.width, rotatedimage.size.height);
// Scale the image
newSize = CGSizeMake(width, height);
UIGraphicsBeginImageContext(newSize);
[rotatedimage drawInRect:CGRectMake(0,0,newSize.width,newSize.height)];
//image is the original UIImage
UIImage * newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
NSLog(#"New Image w:%f h:%f", newImage.size.width, newImage.size.height);
[delegate tookPhoto:newImage];
} else {
// Scale the image
CGSize newSize = CGSizeMake(width, image.size.height/(image.size.width/width));
UIGraphicsBeginImageContext( newSize );
[image drawInRect:CGRectMake(0,0,newSize.width,newSize.height)];
UIImage * newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
NSLog(#"New Image w:%f h:%f", newImage.size.width, newImage.size.height);
// Chop out the middle
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef ctx = CGBitmapContextCreate (NULL, width, height, 8, 0, colorSpace, kCGImageAlphaPremultipliedLast);
CGColorSpaceRelease(colorSpace);
if ( [[UIDevice currentDevice] orientation] == UIInterfaceOrientationPortraitUpsideDown ) {
CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0, newImage.size.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
CGContextScaleCTM(ctx, -1.0, 1.0);
CGContextTranslateCTM(ctx, -newImage.size.width, 0);
CGContextConcatCTM(ctx, transform);
CGContextDrawImage(ctx, CGRectMake(0, (newImage.size.height-height)/2, newImage.size.width, newImage.size.height), newImage.CGImage);
} else {
CGContextDrawImage(ctx, CGRectMake(0, -(newImage.size.height-height)/2, newImage.size.width, newImage.size.height), newImage.CGImage);
}
CGImageRef sourceImageRef = CGBitmapContextCreateImage(ctx);
CGContextRelease(ctx);
[delegate tookPhoto:[UIImage imageWithCGImage:sourceImageRef]];
}
[image release];
canDismiss--;
}];
}
-(IBAction)done {
if ( canDismiss == 0 ) {
self.delegate = nil;
//[self dismissModalViewControllerAnimated:YES];
[self dismissViewControllerAnimated:YES completion:nil];
}
}
#end
If I'm not mistaking you have to place this code:
Try this:
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation {
// Return YES for supported orientations
if([landscapeAllowedID contains: currentViewID])
return (interfaceOrientation == UIInterfaceOrientationPortrait) | (interfaceOrientation == UIInterfaceOrientationLandscape);
}
EDIT
Also add this to the UIViewController.
-(NSUInteger)supportedInterfaceOrientations{
return UIInterfaceOrientationMaskPortrait | UIInterfaceOrientationMaskLandscape; // etc
}
If that doesn't work place the code in a custom UINavigationController. You have to the UINavigationController to let it know somehow what view is currently viewed.
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation {
// Return YES for supported orientations
if([landscapeAllowedID contains: currentViewID])
return (interfaceOrientation == UIInterfaceOrientationPortrait) | (interfaceOrientation == UIInterfaceOrientationLandscape);
}

drawMapRect context + Grand Central Dispatch

I have this function:
- (void)drawMapRect:(MKMapRect)mapRect zoomScale:(MKZoomScale)zoomScale inContext:(CGContextRef)context
{
TileOverlay *tileOverlay = (TileOverlay *)self.overlay;
NSArray *tilesInRect = [tileOverlay tilesInMapRect:mapRect zoomScale:zoomScale];
CGContextSetAlpha(context, tileAlpha);
for (ImageTile *tile in tilesInRect)
{
__block UIImage * image;
CGRect rect = [self rectForMapRect:tile.frame];
NSString *path = [[NSString alloc] initWithFormat:#".../%#.png", tile.imagePath];
NSLog(#"Loading tile from URL %#", path);
image =[UIImage imageWithData: [NSData dataWithContentsOfURL:[NSURL URLWithString: path]]];
CGContextSaveGState(context);
CGContextTranslateCTM(context, CGRectGetMinX(rect), CGRectGetMinY(rect));
CGContextScaleCTM(context, 1/zoomScale, 1/zoomScale);
CGContextTranslateCTM(context, 0, image.size.height);
CGContextScaleCTM(context, 1, -1);
CGContextDrawImage(context, CGRectMake(0, 0, image.size.width, image.size.height), [image CGImage]);
CGContextRestoreGState(context);
}
}
As you know dataWithContentsOfURL blocking thread until in will be done. I want to add image loading block to the GCD section.
I tried to do it like this:
- (void)drawMapRect:(MKMapRect)mapRect zoomScale:(MKZoomScale)zoomScale inContext:(CGContextRef)context
{
TileOverlay *tileOverlay = (TileOverlay *)self.overlay;
NSArray *tilesInRect = [tileOverlay tilesInMapRect:mapRect zoomScale:zoomScale];
CGContextSetAlpha(context, tileAlpha);
for (ImageTile *tile in tilesInRect)
{
__block UIImage * image;
CGRect rect = [self rectForMapRect:tile.frame];
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW,0), ^{
NSString *path = [[NSString alloc] initWithFormat:#".../%#.png", tile.imagePath];
NSLog(#"Loading tile from URL %#", path);
image =[UIImage imageWithData: [NSData dataWithContentsOfURL:[NSURL URLWithString: path]]];
CGContextSaveGState(context);
CGContextTranslateCTM(context, CGRectGetMinX(rect), CGRectGetMinY(rect));
CGContextScaleCTM(context, 1/zoomScale, 1/zoomScale);
CGContextTranslateCTM(context, 0, image.size.height);
CGContextScaleCTM(context, 1, -1);
CGContextDrawImage(context, CGRectMake(0, 0, image.size.width, image.size.height), [image CGImage]);
CGContextRestoreGState(context);
});
}
}
But I got context error. Please help me with this stuff.
How to use context operations in GCD block?
My first note is that MKOverlayView is depreciated. You should consider switching to MKOverlayRenderer.
There is no situation where you should use GCD from within a -draw__ method. That includes MKOverlayView -drawMapRect:zoomScale:inContext: as well UIView -drawRect:. Instead, you should be using an NSOperationQueue, in conjunction with -canDrawMapRect:zoomScale:zoomScale and setNeedsDisplayInMapRect:.
Here's some sudo-code for it:
- (BOOL)canDrawMapRect:(MKMapRect)mapRect zoomScale:(MKZoomScale)zoomScale
{
BOOL hasAtLeastOneTile = NO;
TileOverlay *tileOverlay = (TileOverlay *)self.overlay;
NSArray *tilesInRect = [tileOverlay tilesInMapRect:mapRect zoomScale:zoomScale];
for (ImageTile *tile in tilesInRect) {
if ([tile isAvailable]) {
hasAtLeastOneTile = hasAtLeastOneTile || YES;
} else {
// Add operation to NSOperationQueue to fetch tile
__weak MKOverlayView *weakOverlay = self; // Weak ref to prevent retain cycles
NSOperation *op = [NSBlockOperation blockOperationWithBlock: ^{
//TODO: Load Tile
[weakOverlay setNeedsDisplayInMapRect:mapRect];
}];
[self.operationQueue addOperation:op];
}
}
return hasAtLeastOneTile;
}
Then in your -drawMapRect:zoomScale:inContext: you draw what tiles you have available and skip the ones that are not.

How to crop an image from AVCapture to a rect seen on the display

This is driving me crazy because I can't get it to work. I have the following scenario:
I'm using an AVCaptureSession and an AVCaptureVideoPreviewLayer to create my own camera interface. The interface shows a rectangle. Below is the AVCaptureVideoPreviewLayer that fills the whole screen.
I want to the captured image to be cropped in a way, that the resulting image shows exactly the content seen in the rect on the display.
My setup looks like this:
_session = [[AVCaptureSession alloc] init];
AVCaptureSession *session = _session;
session.sessionPreset = AVCaptureSessionPresetPhoto;
AVCaptureDevice *camera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (camera == nil) {
[self showImagePicker];
_isSetup = YES;
return;
}
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
captureVideoPreviewLayer.frame = self.liveCapturePlaceholderView.bounds;
[self.liveCapturePlaceholderView.layer addSublayer:captureVideoPreviewLayer];
NSError *error;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:camera error:&error];
if (error) {
HGAlertViewWrapper *av = [[HGAlertViewWrapper alloc] initWithTitle:kFailedConnectingToCameraAlertViewTitle message:kFailedConnectingToCameraAlertViewMessage cancelButtonTitle:kFailedConnectingToCameraAlertViewCancelButtonTitle otherButtonTitles:#[kFailedConnectingToCameraAlertViewRetryButtonTitle]];
[av showWithBlock:^(NSString *buttonTitle){
if ([buttonTitle isEqualToString:kFailedConnectingToCameraAlertViewCancelButtonTitle]) {
[self.delegate gloameCameraViewControllerDidCancel:self];
}
else {
[self setupAVSession];
}
}];
}
[session addInput:input];
NSDictionary *options = #{ AVVideoCodecKey : AVVideoCodecJPEG };
_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
[_stillImageOutput setOutputSettings:options];
[session addOutput:_stillImageOutput];
[session startRunning];
_isSetup = YES;
I'm capturing the image like this:
[_stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
if (error) {
MWLogDebug(#"Error capturing image from camera. %#, %#", error, [error userInfo]);
_capturePreviewLayer.connection.enabled = YES;
}
else
{
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
CGRect cropRect = [self createCropRectForImage:image];
UIImage *croppedImage;// = [self cropImage:image toRect:cropRect];
UIGraphicsBeginImageContext(cropRect.size);
[image drawAtPoint:CGPointMake(-cropRect.origin.x, -cropRect.origin.y)];
croppedImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
self.capturedImage = croppedImage;
[_session stopRunning];
}
}];
In the createCropRectForImage: method I've tried various ways to calculate the rect to cut out of the image, but with no success so far.
- (CGRect)createCropRectForImage:(UIImage *)image
{
CGPoint maskTopLeftCorner = CGPointMake(self.maskRectView.frame.origin.x, self.maskRectView.frame.origin.y);
CGPoint maskBottomRightCorner = CGPointMake(self.maskRectView.frame.origin.x + self.maskRectView.frame.size.width, self.maskRectView.frame.origin.y + self.maskRectView.frame.size.height);
CGPoint maskTopLeftCornerInLayerCoords = [_capturePreviewLayer convertPoint:maskTopLeftCorner fromLayer:self.maskRectView.layer.superlayer];
CGPoint maskBottomRightCornerInLayerCoords = [_capturePreviewLayer convertPoint:maskBottomRightCorner fromLayer:self.maskRectView.layer.superlayer];
CGPoint maskTopLeftCornerInDeviceCoords = [_capturePreviewLayer captureDevicePointOfInterestForPoint:maskTopLeftCornerInLayerCoords];
CGPoint maskBottomRightCornerInDeviceCoords = [_capturePreviewLayer captureDevicePointOfInterestForPoint:maskBottomRightCornerInLayerCoords];
float x = maskTopLeftCornerInDeviceCoords.x * image.size.width;
float y = (1 - maskTopLeftCornerInDeviceCoords.y) * image.size.height;
float width = fabsf(maskTopLeftCornerInDeviceCoords.x - maskBottomRightCornerInDeviceCoords.x) * image.size.width;
float height = fabsf(maskTopLeftCornerInDeviceCoords.y - maskBottomRightCornerInDeviceCoords.y) * image.size.height;
return CGRectMake(x, y, width, height);
}
That is my current version but doesn't even get the proportions right. Could some one please help me!
I have also tried using this method to crop my image:
- (UIImage*)cropImage:(UIImage*)originalImage toRect:(CGRect)rect{
CGImageRef imageRef = CGImageCreateWithImageInRect([originalImage CGImage], rect);
CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef);
CGColorSpaceRef colorSpaceInfo = CGImageGetColorSpace(imageRef);
CGContextRef bitmap = CGBitmapContextCreate(NULL, rect.size.width, rect.size.height, CGImageGetBitsPerComponent(imageRef), CGImageGetBytesPerRow(imageRef), colorSpaceInfo, bitmapInfo);
if (originalImage.imageOrientation == UIImageOrientationLeft) {
CGContextRotateCTM (bitmap, radians(90));
CGContextTranslateCTM (bitmap, 0, -rect.size.height);
} else if (originalImage.imageOrientation == UIImageOrientationRight) {
CGContextRotateCTM (bitmap, radians(-90));
CGContextTranslateCTM (bitmap, -rect.size.width, 0);
} else if (originalImage.imageOrientation == UIImageOrientationUp) {
// NOTHING
} else if (originalImage.imageOrientation == UIImageOrientationDown) {
CGContextTranslateCTM (bitmap, rect.size.width, rect.size.height);
CGContextRotateCTM (bitmap, radians(-180.));
}
CGContextDrawImage(bitmap, CGRectMake(0, 0, rect.size.width, rect.size.height), imageRef);
CGImageRef ref = CGBitmapContextCreateImage(bitmap);
UIImage *resultImage=[UIImage imageWithCGImage:ref];
CGImageRelease(imageRef);
CGContextRelease(bitmap);
CGImageRelease(ref);
return resultImage;
}
Does anybody have the 'right combination' of methods to make this work? :)
In Swift 3:
private func cropToPreviewLayer(originalImage: UIImage) -> UIImage {
let outputRect = previewLayer.metadataOutputRectConverted(fromLayerRect: previewLayer.bounds)
var cgImage = originalImage.cgImage!
let width = CGFloat(cgImage.width)
let height = CGFloat(cgImage.height)
let cropRect = CGRect(x: outputRect.origin.x * width, y: outputRect.origin.y * height, width: outputRect.size.width * width, height: outputRect.size.height * height)
cgImage = cgImage.cropping(to: cropRect)!
let croppedUIImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: originalImage.imageOrientation)
return croppedUIImage
}
I've solved this problem by using metadataOutputRectOfInterestForRect function.
It works with any orientation.
[_stillImageOutput captureStillImageAsynchronouslyFromConnection:stillImageConnection
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error)
{
if (error)
{
[_delegate cameraView:self error:#"Take picture failed"];
}
else
{
NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *takenImage = [UIImage imageWithData:jpegData];
CGRect outputRect = [_previewLayer metadataOutputRectOfInterestForRect:_previewLayer.bounds];
CGImageRef takenCGImage = takenImage.CGImage;
size_t width = CGImageGetWidth(takenCGImage);
size_t height = CGImageGetHeight(takenCGImage);
CGRect cropRect = CGRectMake(outputRect.origin.x * width, outputRect.origin.y * height, outputRect.size.width * width, outputRect.size.height * height);
CGImageRef cropCGImage = CGImageCreateWithImageInRect(takenCGImage, cropRect);
takenImage = [UIImage imageWithCGImage:cropCGImage scale:1 orientation:takenImage.imageOrientation];
CGImageRelease(cropCGImage);
}
}
];
The takenImage is still imageOrientation dependent image. You can delete orientation information for further image processing.
UIGraphicsBeginImageContext(takenImage.size);
[takenImage drawAtPoint:CGPointZero];
takenImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
In Swift 4:
I prefer to never force-unwrap to avoid crashes, so I use optionals and guards in mine.
private func cropToPreviewLayer(originalImage: UIImage) -> UIImage? {
guard let cgImage = originalImage.cgImage else { return nil }
let outputRect = previewLayer.metadataOutputRectConverted(fromLayerRect: previewLayer.bounds)
let width = CGFloat(cgImage.width)
let height = CGFloat(cgImage.height)
let cropRect = CGRect(x: outputRect.origin.x * width, y: outputRect.origin.y * height, width: outputRect.size.width * width, height: outputRect.size.height * height)
if let croppedCGImage = cgImage.cropping(to: cropRect) {
return UIImage(cgImage: croppedCGImage, scale: 1.0, orientation: originalImage.imageOrientation)
}
return nil
}
AVMakeRectWithAspectRatioInsideRect
this api is from AVFoundation, it will return the crop region for the image given the crop size.

Given a URL to a movie, how can retrieve it's info?

Under iOS, I need to get the codec used to make the move (H.264, MJPEG and so on), the movies' width and height, the file size, and the number of frames in the movie. I tried using AVAsset for the movie duration and such but the duration was always zero. Same for the width/height. I also tried using a movie player controller but that did not work either (see code below). The docs are a bit confusing and with multiple ways of getting to the same place you can't seem to get to the same spot.
Has anyone got the above information working properly? I am sure I am missing a few things but I was hoping for sample code and/or pointers?
Edit: I added a better code example. But, there are questions that remain. How do I get the creation date of the movie, the codec used to compress it and the movie's file size? Anybody figure these out?
Thanks
- (IBAction)getMovieInfo
{
int hours = 0, minutes = 0, seconds = 0;
NSURL* sourceMovieURL = [NSURL URLWithString:#"http://trailers.apple.com/movies/summit/stepuprevolution/stepuprevolution-tlr1_h480p.mov"];
AVURLAsset* movieAsset = [AVURLAsset URLAssetWithURL:sourceMovieURL options:nil];
NSArray *tracks = [movieAsset tracksWithMediaType:AVMediaTypeVideo];
if ([tracks count] != 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
NSTimeInterval durationSeconds = CMTimeGetSeconds([movieAsset duration]);
CGSize videoSize = videoTrack.naturalSize;
//
// Let's get the movie's meta data
//
// Start with the duration of the movie
hours = durationSeconds / 3600;
minutes = durationSeconds / 60;
seconds = (int)durationSeconds % 60;
durationLabel.text = [NSString stringWithFormat:#"%d:%d:%d", hours, minutes, seconds];
// Next is the creation (posting) date of the movie
//postedLabel.text = AVMetadataQuickTimeUserDataKeyCreationDate;
//The resolution of the movie
resolutionLabel.text = [NSString stringWithFormat:#"%g x %g", videoSize.width, videoSize.height];
// The frame rate of the movie
rateLabel.text = [NSString stringWithFormat:#"%g fps", [videoTrack nominalFrameRate]];
// The frame count of the movie
countLabel.text = [NSString stringWithFormat:#"%g", [videoTrack nominalFrameRate] * durationSeconds];
// Get the codec used to compress the movie
// And lastly, let's generate a thumbnail of the movie
AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:movieAsset];
if (imageGenerator != NULL) {
CMTime thumbPoint = CMTimeMakeWithSeconds(15.0, 600);
NSError *error = nil;
CGImageRef thumbnail = [imageGenerator copyCGImageAtTime:thumbPoint actualTime:nil error:&error];
if (thumbnail != NULL) {
// Convert CGImage thumbnail to UIImage and then scale it.
UIImage *tempImage = [[UIImage alloc] initWithCGImage:thumbnail];
if (tempImage != NULL) {
// Let's scale the image and put the it into the imageview.
self.thumbDisplay.image=[self scaleAndRotateImage:tempImage];
CGImageRelease(thumbnail);
}
}
}
}
}
- (UIImage *)scaleAndRotateImage:(UIImage *)image {
CGImageRef imgRef = image.CGImage;
CGFloat width = 135.0;
CGFloat height = 75.0;
CGAffineTransform transform = CGAffineTransformIdentity;
CGRect bounds = CGRectMake(0, 0, width, height);
CGFloat scaleRatio = bounds.size.width / width;
CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef));
CGFloat boundHeight;
UIImageOrientation orient = image.imageOrientation;
switch (orient) {
case UIImageOrientationUp: //EXIF = 1
transform = CGAffineTransformIdentity;
break;
case UIImageOrientationUpMirrored: //EXIF = 2
transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
break;
case UIImageOrientationDown: //EXIF = 3
transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationDownMirrored: //EXIF = 4
transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
break;
case UIImageOrientationLeftMirrored: //EXIF = 5
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationLeft: //EXIF = 6
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationRightMirrored: //EXIF = 7
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeScale(-1.0, 1.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
case UIImageOrientationRight: //EXIF = 8
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
default:
[NSException raise:NSInternalInconsistencyException format:#"Invalid image orientation"];
}
UIGraphicsBeginImageContext(bounds.size);
CGContextRef context = UIGraphicsGetCurrentContext();
if (orient == UIImageOrientationRight || orient == UIImageOrientationLeft) {
CGContextScaleCTM(context, -scaleRatio, scaleRatio);
CGContextTranslateCTM(context, -height, 0);
} else {
CGContextScaleCTM(context, scaleRatio, -scaleRatio);
CGContextTranslateCTM(context, 0, -height);
}
CGContextConcatCTM(context, transform);
CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef);
UIImage *imageCopy = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return imageCopy;
}
NSURL's method getResourceValue:forKey:error allows you to get total number of bytes:
NSURL *fileUrl = [NSURL fileURLWithPath:filePath];
NSString *size = NULL;
[fileUrl getResourceValue:&size forKey:NSURLFileSizeKey error:nil];
NSLog(#"Bytes : %#",size);
It is good practice to check for any errors returned.
Try:
CMTime lengthTime = [movieAsset duration];
Float64 seconds = CMTimeGetSeconds(lengthTime);
NSLog(#"Asset is %g seconds long", seconds);

Resources