I've been given an app from a few years ago to update from a new client, and been provided with the original code.
After making the updates they wanted to get the app working again, we discovered that there was a bug where if the user rotates their device to take a landscape photo, the view on screen doesn't rotate and takes the photo at 90 degrees, rather than a true landscape orientation.
This was meant to be a quick job, which has turned into a big headache.
Any ideas what I can do to the code below in order to fix this issue?
Many thanks!
//
// CameraViewController.m
//
//
// Created by XXX on 8/9/11.
// Copyright 2011 __MyCompanyName__. All rights reserved.
//
#import "CameraViewController.h"
#import <ImageIO/ImageIO.h>
#implementation CameraViewController
#synthesize stillImageOutput, delegate;
#synthesize session, captureVideoPreviewLayer;
- (void)dealloc {
[stillImageOutput release];
[captureVideoPreviewLayer release];
[session release];
[super dealloc];
}
- (void)didReceiveMemoryWarning {
// Releases the view if it doesn't have a superview.
[super didReceiveMemoryWarning];
// Release any cached data, images, etc that aren't in use.
}
#pragma mark - View lifecycle
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view from its nib.
}
-(void)viewDidAppear:(BOOL)animated {
[super viewDidAppear:animated];
self.session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
self.captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = cameraView.bounds;
[cameraView.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
[session startRunning];
canDismiss = 0;
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(didRotate:)
name:UIDeviceOrientationDidChangeNotification object:nil];
}
- (void)viewDidUnload {
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation {
// Return YES for supported orientations
return (interfaceOrientation == UIInterfaceOrientationPortrait);
}
- (void) didRotate:(NSNotification *)notification {
[UIView beginAnimations:nil context:nil];
if ( UIInterfaceOrientationIsLandscape([[UIDevice currentDevice] orientation]) ) {
[topView setFrame:CGRectMake(0, 0, 320, 27)];
[bottomView setFrame:CGRectMake(0, 433, 320, 27)];
} else {
[topView setFrame:CGRectMake(0, 0, 320, 110)];
[bottomView setFrame:CGRectMake(0, 350, 320, 110)];
}
switch ([[UIDevice currentDevice] orientation]) {
case UIInterfaceOrientationPortrait:
btnDone.transform = CGAffineTransformIdentity;
btnTakePicture.transform = CGAffineTransformIdentity;
break;
case UIInterfaceOrientationPortraitUpsideDown:
btnDone.transform = CGAffineTransformMakeRotation(M_PI);
btnTakePicture.transform = CGAffineTransformMakeRotation(M_PI);
break;
case UIInterfaceOrientationLandscapeLeft:
btnDone.transform = CGAffineTransformMakeRotation(3*M_PI_2);
btnTakePicture.transform = CGAffineTransformMakeRotation(3*M_PI_2);
break;
default:
btnDone.transform = CGAffineTransformMakeRotation(M_PI_2);
btnTakePicture.transform = CGAffineTransformMakeRotation(M_PI_2);
break;
}
[UIView commitAnimations];
}
-(IBAction)takePhoto {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(#"about to request a capture from: %#", stillImageOutput);
canDismiss++;
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(#"attachements: %#", exifAttachments);
}
else
NSLog(#"no attachments");
int width = [[[NSUserDefaults standardUserDefaults] objectForKey:#"image_width"] intValue];
int height = [[[NSUserDefaults standardUserDefaults] objectForKey:#"image_height"] intValue];
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage * image = [[UIImage alloc] initWithData:imageData];
NSLog(#"Image w:%f h:%f", image.size.width, image.size.height);
UIImage * rotatedimage;
if ( UIInterfaceOrientationIsLandscape([[UIDevice currentDevice] orientation]) ) {
// Rotate the image
CGSize newSize = CGSizeMake(image.size.height, image.size.width);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef ctx = CGBitmapContextCreate (NULL, newSize.width, newSize.height, 8, 0, colorSpace, kCGImageAlphaPremultipliedLast);
CGColorSpaceRelease(colorSpace);
if ( [[UIDevice currentDevice] orientation] == UIInterfaceOrientationLandscapeLeft ) {
CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0, newSize.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
CGContextScaleCTM(ctx, -1.0, 1.0);
CGContextTranslateCTM(ctx, -newSize.width, 0);
CGContextConcatCTM(ctx, transform);
}
CGContextDrawImage(ctx, CGRectMake(0, 0, newSize.width, newSize.height), image.CGImage);
CGImageRef sourceImageRef = CGBitmapContextCreateImage(ctx);
rotatedimage = [UIImage imageWithCGImage:sourceImageRef];
CGContextRelease(ctx);
NSLog(#"Rotated Image w:%f h:%f", rotatedimage.size.width, rotatedimage.size.height);
// Scale the image
newSize = CGSizeMake(width, height);
UIGraphicsBeginImageContext(newSize);
[rotatedimage drawInRect:CGRectMake(0,0,newSize.width,newSize.height)];
//image is the original UIImage
UIImage * newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
NSLog(#"New Image w:%f h:%f", newImage.size.width, newImage.size.height);
[delegate tookPhoto:newImage];
} else {
// Scale the image
CGSize newSize = CGSizeMake(width, image.size.height/(image.size.width/width));
UIGraphicsBeginImageContext( newSize );
[image drawInRect:CGRectMake(0,0,newSize.width,newSize.height)];
UIImage * newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
NSLog(#"New Image w:%f h:%f", newImage.size.width, newImage.size.height);
// Chop out the middle
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef ctx = CGBitmapContextCreate (NULL, width, height, 8, 0, colorSpace, kCGImageAlphaPremultipliedLast);
CGColorSpaceRelease(colorSpace);
if ( [[UIDevice currentDevice] orientation] == UIInterfaceOrientationPortraitUpsideDown ) {
CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0, newImage.size.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
CGContextScaleCTM(ctx, -1.0, 1.0);
CGContextTranslateCTM(ctx, -newImage.size.width, 0);
CGContextConcatCTM(ctx, transform);
CGContextDrawImage(ctx, CGRectMake(0, (newImage.size.height-height)/2, newImage.size.width, newImage.size.height), newImage.CGImage);
} else {
CGContextDrawImage(ctx, CGRectMake(0, -(newImage.size.height-height)/2, newImage.size.width, newImage.size.height), newImage.CGImage);
}
CGImageRef sourceImageRef = CGBitmapContextCreateImage(ctx);
CGContextRelease(ctx);
[delegate tookPhoto:[UIImage imageWithCGImage:sourceImageRef]];
}
[image release];
canDismiss--;
}];
}
-(IBAction)done {
if ( canDismiss == 0 ) {
self.delegate = nil;
//[self dismissModalViewControllerAnimated:YES];
[self dismissViewControllerAnimated:YES completion:nil];
}
}
#end
If I'm not mistaking you have to place this code:
Try this:
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation {
// Return YES for supported orientations
if([landscapeAllowedID contains: currentViewID])
return (interfaceOrientation == UIInterfaceOrientationPortrait) | (interfaceOrientation == UIInterfaceOrientationLandscape);
}
EDIT
Also add this to the UIViewController.
-(NSUInteger)supportedInterfaceOrientations{
return UIInterfaceOrientationMaskPortrait | UIInterfaceOrientationMaskLandscape; // etc
}
If that doesn't work place the code in a custom UINavigationController. You have to the UINavigationController to let it know somehow what view is currently viewed.
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation {
// Return YES for supported orientations
if([landscapeAllowedID contains: currentViewID])
return (interfaceOrientation == UIInterfaceOrientationPortrait) | (interfaceOrientation == UIInterfaceOrientationLandscape);
}
Related
I found this tutorial http://codethink.no-ip.org/wordpress/archives/673#comment-118063 from this SO question Screen capture video in iOS programmatically of how to do something like this, and it was a bit outdated for iOS, so I renewed it, and am very close to having it work, but putting the UIImages together just isn't quite working right now.
Here is how I call the method in viewDidLoad
[captureView performSelector:#selector(startRecording) withObject:nil afterDelay:1.0];
[captureView performSelector:#selector(stopRecording) withObject:nil afterDelay:5.0];
and captureView is an IBOutlet connected to my view.
And then I have the class ScreenCapture.h & .m
Here is .h
#protocol ScreenCaptureViewDelegate <NSObject>
- (void) recordingFinished:(NSString*)outputPathOrNil;
#end
#interface ScreenCaptureView : UIView {
//video writing
AVAssetWriter *videoWriter;
AVAssetWriterInput *videoWriterInput;
AVAssetWriterInputPixelBufferAdaptor *avAdaptor;
//recording state
BOOL _recording;
NSDate* startedAt;
void* bitmapData;
}
//for recording video
- (bool) startRecording;
- (void) stopRecording;
//for accessing the current screen and adjusting the capture rate, etc.
#property(retain) UIImage* currentScreen;
#property(assign) float frameRate;
#property(nonatomic, assign) id<ScreenCaptureViewDelegate> delegate;
#end
And here is my .m
#interface ScreenCaptureView(Private)
- (void) writeVideoFrameAtTime:(CMTime)time;
#end
#implementation ScreenCaptureView
#synthesize currentScreen, frameRate, delegate;
- (void) initialize {
// Initialization code
self.clearsContextBeforeDrawing = YES;
self.currentScreen = nil;
self.frameRate = 10.0f; //10 frames per seconds
_recording = false;
videoWriter = nil;
videoWriterInput = nil;
avAdaptor = nil;
startedAt = nil;
bitmapData = NULL;
}
- (id) initWithCoder:(NSCoder *)aDecoder {
self = [super initWithCoder:aDecoder];
if (self) {
[self initialize];
}
return self;
}
- (id) init {
self = [super init];
if (self) {
[self initialize];
}
return self;
}
- (id)initWithFrame:(CGRect)frame {
self = [super initWithFrame:frame];
if (self) {
[self initialize];
}
return self;
}
- (CGContextRef) createBitmapContextOfSize:(CGSize) size {
CGContextRef context = NULL;
CGColorSpaceRef colorSpace;
int bitmapByteCount;
int bitmapBytesPerRow;
bitmapBytesPerRow = (size.width * 4);
bitmapByteCount = (bitmapBytesPerRow * size.height);
colorSpace = CGColorSpaceCreateDeviceRGB();
if (bitmapData != NULL) {
free(bitmapData);
}
bitmapData = malloc( bitmapByteCount );
if (bitmapData == NULL) {
fprintf (stderr, "Memory not allocated!");
return NULL;
}
context = CGBitmapContextCreate (bitmapData,
size.width,
size.height,
8, // bits per component
bitmapBytesPerRow,
colorSpace,
(CGBitmapInfo) kCGImageAlphaNoneSkipFirst);
CGContextSetAllowsAntialiasing(context,NO);
if (context== NULL) {
free (bitmapData);
fprintf (stderr, "Context not created!");
return NULL;
}
CGColorSpaceRelease( colorSpace );
return context;
}
static int frameCount = 0; //debugging
- (void) drawRect:(CGRect)rect {
NSDate* start = [NSDate date];
CGContextRef context = [self createBitmapContextOfSize:self.frame.size];
//not sure why this is necessary...image renders upside-down and mirrored
CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, self.frame.size.height);
CGContextConcatCTM(context, flipVertical);
[self.layer renderInContext:context];
CGImageRef cgImage = CGBitmapContextCreateImage(context);
UIImage* background = [UIImage imageWithCGImage: cgImage];
CGImageRelease(cgImage);
self.currentScreen = background;
//debugging
if (frameCount < 40) {
NSString* filename = [NSString stringWithFormat:#"Documents/frame_%d.png", frameCount];
NSString* pngPath = [NSHomeDirectory() stringByAppendingPathComponent:filename];
[UIImagePNGRepresentation(self.currentScreen) writeToFile: pngPath atomically: YES];
frameCount++;
}
//NOTE: to record a scrollview while it is scrolling you need to implement your UIScrollViewDelegate such that it calls
// 'setNeedsDisplay' on the ScreenCaptureView.
if (_recording) {
float millisElapsed = [[NSDate date] timeIntervalSinceDate:startedAt] * 1000.0;
[self writeVideoFrameAtTime:CMTimeMake((int)millisElapsed, 1000)];
}
float processingSeconds = [[NSDate date] timeIntervalSinceDate:start];
float delayRemaining = (1.0 / self.frameRate) - processingSeconds;
CGContextRelease(context);
//redraw at the specified framerate
[self performSelector:#selector(setNeedsDisplay) withObject:nil afterDelay:delayRemaining > 0.0 ? delayRemaining : 0.01];
}
- (void) cleanupWriter {
avAdaptor = nil;
videoWriterInput = nil;
videoWriter = nil;
startedAt = nil;
if (bitmapData != NULL) {
free(bitmapData);
bitmapData = NULL;
}
}
- (void)dealloc {
[self cleanupWriter];
}
- (NSURL*) tempFileURL {
NSString* outputPath = [[NSString alloc] initWithFormat:#"%#/%#", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], #"output.mp4"];
NSURL* outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath]) {
NSError* error;
if ([fileManager removeItemAtPath:outputPath error:&error] == NO) {
NSLog(#"Could not delete old recording file at path: %#", outputPath);
}
}
return outputURL;
}
-(BOOL) setUpWriter {
NSError* error = nil;
videoWriter = [[AVAssetWriter alloc] initWithURL:[self tempFileURL] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
//Configure video
NSDictionary* videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:1024.0*1024.0], AVVideoAverageBitRateKey,
nil ];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:self.frame.size.width], AVVideoWidthKey,
[NSNumber numberWithInt:self.frame.size.height], AVVideoHeightKey,
videoCompressionProps, AVVideoCompressionPropertiesKey,
nil];
videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSParameterAssert(videoWriterInput);
videoWriterInput.expectsMediaDataInRealTime = YES;
NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:bufferAttributes];
//add input
[videoWriter addInput:videoWriterInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];
return YES;
}
- (void) completeRecordingSession {
[videoWriterInput markAsFinished];
// Wait for the video
int status = videoWriter.status;
while (status == AVAssetWriterStatusUnknown) {
NSLog(#"Waiting...");
[NSThread sleepForTimeInterval:0.5f];
status = videoWriter.status;
}
#synchronized(self) {
[videoWriter finishWritingWithCompletionHandler:^{
[self cleanupWriter];
BOOL success = YES;
id delegateObj = self.delegate;
NSString *outputPath = [[NSString alloc] initWithFormat:#"%#/%#", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], #"output.mp4"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSLog(#"Completed recording, file is stored at: %#", outputURL);
if ([delegateObj respondsToSelector:#selector(recordingFinished:)]) {
[delegateObj performSelectorOnMainThread:#selector(recordingFinished:) withObject:(success ? outputURL : nil) waitUntilDone:YES];
}
}];
}
}
- (bool) startRecording {
bool result = NO;
#synchronized(self) {
if (! _recording) {
result = [self setUpWriter];
startedAt = [NSDate date];
_recording = true;
}
}
return result;
}
- (void) stopRecording {
#synchronized(self) {
if (_recording) {
_recording = false;
[self completeRecordingSession];
}
}
}
-(void) writeVideoFrameAtTime:(CMTime)time {
if (![videoWriterInput isReadyForMoreMediaData]) {
NSLog(#"Not ready for video data");
}
else {
#synchronized (self) {
UIImage *newFrame = self.currentScreen;
CVPixelBufferRef pixelBuffer = NULL;
CGImageRef cgImage = CGImageCreateCopy([newFrame CGImage]);
CFDataRef image = CGDataProviderCopyData(CGImageGetDataProvider(cgImage));
int status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, avAdaptor.pixelBufferPool, &pixelBuffer);
if(status != 0){
//could not get a buffer from the pool
NSLog(#"Error creating pixel buffer: status=%d", status);
}
// set image data into pixel buffer
CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
uint8_t *destPixels = CVPixelBufferGetBaseAddress(pixelBuffer);
CFDataGetBytes(image, CFRangeMake(0, CFDataGetLength(image)), destPixels); //XXX: will work if the pixel buffer is contiguous and has the same bytesPerRow as the input data
if(status == 0){
BOOL success = [avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
if (!success)
NSLog(#"Warning: Unable to write buffer to video");
}
//clean up
CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
CVPixelBufferRelease( pixelBuffer );
CFRelease(image);
CGImageRelease(cgImage);
}
}
}
And I as you can see in the drawRect method I save all the images, and they look great, but then when I try to make the video, it creates just a still image that looks like this, when the images look like this.
Here is the output, it is a video but just this. When the picture looks normal (not slanted and all weird)
My question is what is going wrong when the video is being made?
Thanks for the help and your time, I know this is a long question.
I found this post after having the same issue with certain resolutions causing the exact same video effect when I wanted to create a CVPixelBufferRef from a CGImageRef (coming from a UIImage.)
The very short answer in my case was that I had hard wired the bytes per row to be 4 times the width. Which used to work all the time! Now I query the CVPixelBuffer itself to get this value and poof, problem solved!
Code that created the problem was this:
CGContextRef context = CGBitmapContextCreate(pxdata, w, h, 8, 4*w, rgbColorSpace, bitMapInfo);
Code that fixed the problem was this:
CGContextRef context = CGBitmapContextCreate(
pxdata, w, h,
8, CVPixelBufferGetBytesPerRow(pxbuffer),
rgbColorSpace,bitMapInfo);
And in both cases, the bitMapInfo was set:
GBitmapInfo bitMapInfo =kCGImageAlphaPremultipliedFirst; // According to Apple's doc, this is safe: June 26, 2014
Pixel Buffer adaptors only work with certain pixel sizes of images. You're probably going to need to change the size of the images. You can imagine that what's happening in your video is that the writer is trying to write your, let's say, 361x241 images into a 360x240 size space. Each row starts with the last pixel of the last row so that it ends up getting diagonally skewed like you see. Check the apple docs for supported dimensions. I believe that I used 480x320 and it's supported. You can use this method to resize your images:
+(UIImage *)scaleImage:(UIImage*)image toSize:(CGSize)newSize {
CGRect scaledImageRect = CGRectZero;
CGFloat aspectWidth = newSize.width / image.size.width;
CGFloat aspectHeight = newSize.height / image.size.height;
CGFloat aspectRatio = 3.0 / 2;
scaledImageRect.size.width = image.size.width * aspectRatio;
scaledImageRect.size.height = image.size.height * aspectRatio;
scaledImageRect.origin.x = (newSize.width - scaledImageRect.size.width) / 2.0f;
scaledImageRect.origin.y = (newSize.height - scaledImageRect.size.height) / 2.0f;
UIGraphicsBeginImageContextWithOptions(CGSizeMake(480, 320), NO, 0 );
[image drawInRect:scaledImageRect];
UIImage* scaledImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return scaledImage;
}
I think this is because the pixelBuffer bytes per row does not match the UIImage bytes per row. In my case (iPhone 6 iOS8.3) the UIImage is 568 x 320 and the CFDataGetLength is 727040 so the bytes per row is 2272. But the pixelBuffer bytes per row is 2304. I think this extra 32 bytes is from padding so that bytes per row in the pixelBuffer is divisible by 64. How you force the pixelBuffer to match the input data, or vice versa, across all devices I'm not sure yet.
I've suffered a lot in this case. I tried so many ways to create video from the Image array but result was almost same as yours.
The problem was in the CVPixel buffer. The Buffer I used to create from the image was not correct.
But finally I got it working.
Main Function to create video at a url from an Array
You just have toinput array of images and fps, and size can be equal to size of images (if you want).
fps = num of images in array / desired duration
for example: fps = 90 / 3 = 30
- (void)getVideoFrom:(NSArray *)array
toPath:(NSString*)path
size:(CGSize)size
fps:(int)fps
withCallbackBlock:(void (^) (BOOL))callbackBlock
{
NSLog(#"%#", path);
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path]
fileType:AVFileTypeMPEG4
error:&error];
if (error) {
if (callbackBlock) {
callbackBlock(NO);
}
return;
}
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = #{AVVideoCodecKey: AVVideoCodecTypeH264,
AVVideoWidthKey: [NSNumber numberWithInt:size.width],
AVVideoHeightKey: [NSNumber numberWithInt:size.height]};
AVAssetWriterInput* writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer;
CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &buffer);
CMTime presentTime = CMTimeMake(0, fps);
int i = 0;
while (1)
{
if(writerInput.readyForMoreMediaData){
presentTime = CMTimeMake(i, fps);
if (i >= [array count]) {
buffer = NULL;
} else {
buffer = [self pixelBufferFromCGImage:[array[i] CGImage] size:CGSizeMake(480, 320)];
}
if (buffer) {
//append buffer
BOOL appendSuccess = [self appendToAdapter:adaptor
pixelBuffer:buffer
atTime:presentTime
withInput:writerInput];
NSAssert(appendSuccess, #"Failed to append");
i++;
} else {
//Finish the session:
[writerInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{
NSLog(#"Successfully closed video writer");
if (videoWriter.status == AVAssetWriterStatusCompleted) {
if (callbackBlock) {
callbackBlock(YES);
}
} else {
if (callbackBlock) {
callbackBlock(NO);
}
}
}];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
NSLog (#"Done");
break;
}
}
}
}
Function to get CVPixelBuffer from CGImage
-(CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image size:(CGSize)imageSize
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVPixelBufferCreate(kCFAllocatorDefault, CGImageGetWidth(image),
CGImageGetHeight(image), kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
&pxbuffer);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, CGImageGetWidth(image),
CGImageGetHeight(image), 8, CVPixelBufferGetBytesPerRow(pxbuffer), rgbColorSpace,
(int)kCGImageAlphaNoneSkipFirst);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
Function to append to adapter
-(BOOL)appendToAdapter:(AVAssetWriterInputPixelBufferAdaptor*)adaptor
pixelBuffer:(CVPixelBufferRef)buffer
atTime:(CMTime)presentTime
withInput:(AVAssetWriterInput*)writerInput
{
while (!writerInput.readyForMoreMediaData) {
usleep(1);
}
return [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
}
I've been given some code from 2011 to update for a client, I've managed to get it all sorted for iOS7 etc and submitted the update to Apple.
I now have an issue with the app which I need to sort out as soon as possible. I'm really struggling to find a solution, I've tried all sorts!
In brief, this app allows users to take multiple photos in a custom camera view (with a camera preview at all times) then hit a done button to close the window and return to the previous screen.
However, when the camera preview shows on screen, it's is in the incorrect orientation, by 90 degrees counter clockwise.
Here is an example.....
I need to force the user to take the photo in landscape mode.
The second issue is if the user does take a landscape photo, it saves it 90 degrees counter clockwise as well.
Can anybody help????
Full code is below, this is using the original code that was supplied, witha few tweaks to try and fix it without much joy!
Many thanks in advance
Simon
//
// CameraViewController.m
// X
//
// Created by X on 8/9/11.
// Copyright 2011 __MyCompanyName__. All rights reserved.
//
#import "CameraViewController.h"
#import <ImageIO/ImageIO.h>
static AVCaptureVideoOrientation avOrientationForInterfaceOrientation(UIInterfaceOrientation iOrientation);
static AVCaptureVideoOrientation avOrientationForInterfaceOrientation(UIInterfaceOrientation iOrientation)
{
AVCaptureVideoOrientation result = iOrientation;
if ( iOrientation == UIInterfaceOrientationLandscapeLeft )
result = AVCaptureVideoOrientationLandscapeLeft;
else if ( iOrientation == UIInterfaceOrientationLandscapeRight )
result = AVCaptureVideoOrientationLandscapeRight;
return result;
}
#implementation CameraViewController
#synthesize stillImageOutput, delegate;
#synthesize session, captureVideoPreviewLayer;
- (void)willRotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration
{
AVCaptureVideoOrientation avcaptureOrientation = avOrientationForInterfaceOrientation(toInterfaceOrientation);
self.captureVideoPreviewLayer.connection.videoOrientation = avcaptureOrientation;
}
- (void)dealloc {
[stillImageOutput release];
[captureVideoPreviewLayer release];
[session release];
[super dealloc];
}
- (void)didReceiveMemoryWarning {
// Releases the view if it doesn't have a superview.
[super didReceiveMemoryWarning];
// Release any cached data, images, etc that aren't in use.
}
#pragma mark - View lifecycle
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view from its nib.
// rotate the status bar
[UIApplication sharedApplication].statusBarOrientation = UIInterfaceOrientationLandscapeLeft;
// try and rotate the camera live preview
}
-(void)viewDidAppear:(BOOL)animated {
[super viewDidAppear:animated];
self.session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
self.captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
AVCaptureVideoOrientation avcaptureOrientation = avOrientationForInterfaceOrientation(self.interfaceOrientation);
self.captureVideoPreviewLayer.connection.videoOrientation = avcaptureOrientation;
//captureVideoPreviewLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
NSLog(#"doing something");
captureVideoPreviewLayer.frame = cameraView.bounds;
[cameraView.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
[session startRunning];
canDismiss = 0;
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(didRotate:)
name:UIDeviceOrientationDidChangeNotification object:nil];
}
- (void)viewDidUnload {
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
-(BOOL)shouldAutorotate {
return YES;
}
- (NSUInteger)supportedInterfaceOrientations
{
return UIInterfaceOrientationMaskLandscapeLeft;
}
// Set the camera to force itself to a landscape view
- (UIInterfaceOrientation)preferredInterfaceOrientationForPresentation
{
return UIInterfaceOrientationLandscapeLeft;
}
/*- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation {
// Return YES for supported orientations
//return (interfaceOrientation == UIInterfaceOrientationPortrait);
return (interfaceOrientation == UIInterfaceOrientationPortrait) | (interfaceOrientation == UIInterfaceOrientationLandscapeLeft) | (interfaceOrientation == UIInterfaceOrientationLandscapeRight);
}*/
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
if(interfaceOrientation == UIInterfaceOrientationLandscapeLeft)
{
captureVideoPreviewLayer.connection.videoOrientation = UIInterfaceOrientationLandscapeLeft;
}
// and so on for other orientations
return ((interfaceOrientation == UIInterfaceOrientationLandscapeLeft));
}
- (void) didRotate:(NSNotification *)notification {
[UIView beginAnimations:nil context:nil];
if ( UIInterfaceOrientationIsLandscape([[UIDevice currentDevice] orientation]) ) {
[topView setFrame:CGRectMake(0, 0, 320, 27)];
[bottomView setFrame:CGRectMake(0, 453, 320, 27)];
} else {
[topView setFrame:CGRectMake(0, 0, 320, 110)];
[bottomView setFrame:CGRectMake(0, 350, 320, 110)];
}
switch ([[UIDevice currentDevice] orientation]) {
case UIInterfaceOrientationLandscapeLeft:
btnDone.transform = CGAffineTransformMakeRotation(3*M_PI_2);
btnTakePicture.transform = CGAffineTransformMakeRotation(3*M_PI_2);
break;
default:
btnDone.transform = CGAffineTransformMakeRotation(M_PI_2);
btnTakePicture.transform = CGAffineTransformMakeRotation(M_PI_2);
break;
}
[UIView commitAnimations];
}
-(IBAction)takePhoto {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(#"about to request a capture from: %#", stillImageOutput);
canDismiss++;
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(#"attachements: %#", exifAttachments);
}
else
NSLog(#"no attachments");
int width = [[[NSUserDefaults standardUserDefaults] objectForKey:#"image_width"] intValue];
int height = [[[NSUserDefaults standardUserDefaults] objectForKey:#"image_height"] intValue];
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage * image = [[UIImage alloc] initWithData:imageData];
NSLog(#"Image w:%f h:%f", image.size.width, image.size.height);
UIImage * rotatedimage;
if ( UIInterfaceOrientationIsLandscape([[UIDevice currentDevice] orientation]) ) {
// Rotate the image
CGSize newSize = CGSizeMake(image.size.height, image.size.width);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef ctx = CGBitmapContextCreate (NULL, newSize.width, newSize.height, 8, 0, colorSpace, kCGImageAlphaPremultipliedLast);
CGColorSpaceRelease(colorSpace);
if ( [[UIDevice currentDevice] orientation] == UIInterfaceOrientationLandscapeLeft ) {
CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0, newSize.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
CGContextScaleCTM(ctx, -1.0, 1.0);
CGContextTranslateCTM(ctx, -newSize.width, 0);
CGContextConcatCTM(ctx, transform);
}
CGContextDrawImage(ctx, CGRectMake(0, 0, newSize.width, newSize.height), image.CGImage);
CGImageRef sourceImageRef = CGBitmapContextCreateImage(ctx);
rotatedimage = [UIImage imageWithCGImage:sourceImageRef];
CGContextRelease(ctx);
NSLog(#"Rotated Image w:%f h:%f", rotatedimage.size.width, rotatedimage.size.height);
// Scale the image
newSize = CGSizeMake(width, height);
UIGraphicsBeginImageContext(newSize);
[rotatedimage drawInRect:CGRectMake(0,0,newSize.width,newSize.height)];
//image is the original UIImage
UIImage * newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
NSLog(#"New Image w:%f h:%f", newImage.size.width, newImage.size.height);
[delegate tookPhoto:newImage];
} else {
// Scale the image
CGSize newSize = CGSizeMake(width, image.size.height/(image.size.width/width));
UIGraphicsBeginImageContext( newSize );
[image drawInRect:CGRectMake(0,0,newSize.width,newSize.height)];
UIImage * newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
NSLog(#"New Image w:%f h:%f", newImage.size.width, newImage.size.height);
// Chop out the middle
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef ctx = CGBitmapContextCreate (NULL, width, height, 8, 0, colorSpace, kCGImageAlphaPremultipliedLast);
CGColorSpaceRelease(colorSpace);
if ( [[UIDevice currentDevice] orientation] == UIInterfaceOrientationPortraitUpsideDown ) {
CGAffineTransform transform = CGAffineTransformMakeTranslation(0.0, newImage.size.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
CGContextScaleCTM(ctx, -1.0, 1.0);
CGContextTranslateCTM(ctx, -newImage.size.width, 0);
CGContextConcatCTM(ctx, transform);
CGContextDrawImage(ctx, CGRectMake(0, (newImage.size.height-height)/2, newImage.size.width, newImage.size.height), newImage.CGImage);
} else {
CGContextDrawImage(ctx, CGRectMake(0, -(newImage.size.height-height)/2, newImage.size.width, newImage.size.height), newImage.CGImage);
}
CGImageRef sourceImageRef = CGBitmapContextCreateImage(ctx);
CGContextRelease(ctx);
[delegate tookPhoto:[UIImage imageWithCGImage:sourceImageRef]];
}
[image release];
canDismiss--;
}];
}
// action when the 'tick' button is pressed
-(IBAction)done {
if ( canDismiss == 0 ) {
self.delegate = nil;
// animate the camera view away
[self dismissViewControllerAnimated:YES completion:nil];
// swap the status bar back to the default portrait
[UIApplication sharedApplication].statusBarOrientation = UIInterfaceOrientationPortrait;
}
}
#end
I am working on a photo taking app. The app's preview layer is set to take up exactly half of the screen using this code:
[_previewLayer setFrame:CGRectMake(0, 0, rootLayer.bounds.size.width, rootLayer.bounds.size.height/2)];
This looks perfect and there is no distortion at all while the user is viewing the camera's "preview" / what they are seeing while taking the picture.
However, once they actually take the photo, I create a sub layer and set it's frame property to my preview layer's property, and set the photo as the contents of the sub layer.
This does technically work. Once the user takes the photo, the photo shows up on the top half of the screen like it should.
The only problem is that the photo is distorted.
It looks stretched out, almost as if I'm taking a landscape photo.
Any help is greatly appreciated I am totally desperate on this and have not been able to fix it after working on it all day today.
Here is all of my view controller's code:
#import "MediaCaptureVC.h"
#interface MediaCaptureVC ()
#end
#implementation MediaCaptureVC
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
// Custom initialization
}
return self;
}
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view.
AVCaptureSession *session =[[AVCaptureSession alloc]init];
[session setSessionPreset:AVCaptureSessionPresetPhoto];
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = [[NSError alloc]init];
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if([session canAddInput:deviceInput])
[session addInput:deviceInput];
_previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:session];
[_previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
CALayer *rootLayer = [[self view]layer];
[rootLayer setMasksToBounds:YES];
[_previewLayer setFrame:CGRectMake(0, 0, rootLayer.bounds.size.width, rootLayer.bounds.size.height/2)];
[rootLayer insertSublayer:_previewLayer atIndex:0];
_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
[session addOutput:_stillImageOutput];
[session startRunning];
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
-(UIImage*) rotate:(UIImage*) src andOrientation:(UIImageOrientation)orientation
{
UIGraphicsBeginImageContext(src.size);
CGContextRef context=(UIGraphicsGetCurrentContext());
if (orientation == UIImageOrientationRight) {
CGContextRotateCTM (context, 90/180*M_PI) ;
} else if (orientation == UIImageOrientationLeft) {
CGContextRotateCTM (context, -90/180*M_PI);
} else if (orientation == UIImageOrientationDown) {
// NOTHING
} else if (orientation == UIImageOrientationUp) {
CGContextRotateCTM (context, 90/180*M_PI);
}
[src drawAtPoint:CGPointMake(0, 0)];
UIImage *img=UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return img;
}
-(IBAction)stillImageCapture {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in _stillImageOutput.connections){
for (AVCaptureInputPort *port in [connection inputPorts]){
if ([[port mediaType] isEqual:AVMediaTypeVideo]){
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
NSLog(#"about to request a capture from: %#", _stillImageOutput);
[_stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if(imageDataSampleBuffer) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [[UIImage alloc]initWithData:imageData];
image = [self rotate:image andOrientation:image.imageOrientation];
CALayer *subLayer = [CALayer layer];
CGImageRef imageRef = image.CGImage;
subLayer.contents = (id)[UIImage imageWithCGImage:imageRef].CGImage;
subLayer.frame = _previewLayer.frame;
CALayer *rootLayer = [[self view]layer];
[rootLayer setMasksToBounds:YES];
[subLayer setFrame:CGRectMake(0, 0, rootLayer.bounds.size.width, rootLayer.bounds.size.height/2)];
[_previewLayer addSublayer:subLayer];
NSLog(#"%#", subLayer.contents);
NSLog(#"Orientation: %d", image.imageOrientation);
}
}];
}
#end
Hi I hope this helps you -
The code seems more complex than it should be because most of the code is done on the CALayer level instead of the imageView / view level however I think the issue is that the proportion of the frame from the original capture to your mini viewport is different and this is distorting the UIImage in this statement :
[subLayer setFrame:CGRectMake(0, 0, rootLayer.bounds.size.width, rootLayer.bounds.size.height/2)];
What needs to be done is to capture the proportion of sublayer.frame and get the best size that will fit into the rootlayer or the Image view associated with it
I have some code before that does this : coded a subroutine before that handles the proportion (Note you will need to adjust the origin of the frame to get what you want!)
...
CGRect newbounds = [self figure_proportion:image to_fit_rect(rootLayer.frame)
if (newbounds.size.height < rootLayer.frame.size.height) {
rootLayer ..... (Code to adjust origin of image view frame)
-(CGRect) figure_proportion:(UIImage *) image2 to_fit_rect:(CGRect) rect {
CGSize image_size = image2.size;
CGRect newrect = rect;
float wfactor = image_size.width/ image_size.height;
float hfactor = image_size.height/ image_size.width;
if (image2.size.width > image2.size.height) {
newrect.size.width = rect.size.width;
newrect.size.height = (rect.size.width * hfactor);
}
else if (image2.size.height > image2.size.width) {
newrect.size.height = rect.size.height;
newrect.size.width = (rect.size.height * wfactor);
}
else {
newrect.size.width = rect.size.width;
newrect.size.height = newrect.size.width;
}
if (newrect.size.height > rect.size.height) {
newrect.size.height = rect.size.height;
newrect.size.width = (newrect.size.height* wfactor);
}
if (newrect.size.width > rect.size.width) {
newrect.size.width = rect.size.width;
newrect.size.height = (newrect.size.width* hfactor);
}
return(newrect);
}
This is driving me crazy because I can't get it to work. I have the following scenario:
I'm using an AVCaptureSession and an AVCaptureVideoPreviewLayer to create my own camera interface. The interface shows a rectangle. Below is the AVCaptureVideoPreviewLayer that fills the whole screen.
I want to the captured image to be cropped in a way, that the resulting image shows exactly the content seen in the rect on the display.
My setup looks like this:
_session = [[AVCaptureSession alloc] init];
AVCaptureSession *session = _session;
session.sessionPreset = AVCaptureSessionPresetPhoto;
AVCaptureDevice *camera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (camera == nil) {
[self showImagePicker];
_isSetup = YES;
return;
}
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
captureVideoPreviewLayer.frame = self.liveCapturePlaceholderView.bounds;
[self.liveCapturePlaceholderView.layer addSublayer:captureVideoPreviewLayer];
NSError *error;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:camera error:&error];
if (error) {
HGAlertViewWrapper *av = [[HGAlertViewWrapper alloc] initWithTitle:kFailedConnectingToCameraAlertViewTitle message:kFailedConnectingToCameraAlertViewMessage cancelButtonTitle:kFailedConnectingToCameraAlertViewCancelButtonTitle otherButtonTitles:#[kFailedConnectingToCameraAlertViewRetryButtonTitle]];
[av showWithBlock:^(NSString *buttonTitle){
if ([buttonTitle isEqualToString:kFailedConnectingToCameraAlertViewCancelButtonTitle]) {
[self.delegate gloameCameraViewControllerDidCancel:self];
}
else {
[self setupAVSession];
}
}];
}
[session addInput:input];
NSDictionary *options = #{ AVVideoCodecKey : AVVideoCodecJPEG };
_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
[_stillImageOutput setOutputSettings:options];
[session addOutput:_stillImageOutput];
[session startRunning];
_isSetup = YES;
I'm capturing the image like this:
[_stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
if (error) {
MWLogDebug(#"Error capturing image from camera. %#, %#", error, [error userInfo]);
_capturePreviewLayer.connection.enabled = YES;
}
else
{
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
CGRect cropRect = [self createCropRectForImage:image];
UIImage *croppedImage;// = [self cropImage:image toRect:cropRect];
UIGraphicsBeginImageContext(cropRect.size);
[image drawAtPoint:CGPointMake(-cropRect.origin.x, -cropRect.origin.y)];
croppedImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
self.capturedImage = croppedImage;
[_session stopRunning];
}
}];
In the createCropRectForImage: method I've tried various ways to calculate the rect to cut out of the image, but with no success so far.
- (CGRect)createCropRectForImage:(UIImage *)image
{
CGPoint maskTopLeftCorner = CGPointMake(self.maskRectView.frame.origin.x, self.maskRectView.frame.origin.y);
CGPoint maskBottomRightCorner = CGPointMake(self.maskRectView.frame.origin.x + self.maskRectView.frame.size.width, self.maskRectView.frame.origin.y + self.maskRectView.frame.size.height);
CGPoint maskTopLeftCornerInLayerCoords = [_capturePreviewLayer convertPoint:maskTopLeftCorner fromLayer:self.maskRectView.layer.superlayer];
CGPoint maskBottomRightCornerInLayerCoords = [_capturePreviewLayer convertPoint:maskBottomRightCorner fromLayer:self.maskRectView.layer.superlayer];
CGPoint maskTopLeftCornerInDeviceCoords = [_capturePreviewLayer captureDevicePointOfInterestForPoint:maskTopLeftCornerInLayerCoords];
CGPoint maskBottomRightCornerInDeviceCoords = [_capturePreviewLayer captureDevicePointOfInterestForPoint:maskBottomRightCornerInLayerCoords];
float x = maskTopLeftCornerInDeviceCoords.x * image.size.width;
float y = (1 - maskTopLeftCornerInDeviceCoords.y) * image.size.height;
float width = fabsf(maskTopLeftCornerInDeviceCoords.x - maskBottomRightCornerInDeviceCoords.x) * image.size.width;
float height = fabsf(maskTopLeftCornerInDeviceCoords.y - maskBottomRightCornerInDeviceCoords.y) * image.size.height;
return CGRectMake(x, y, width, height);
}
That is my current version but doesn't even get the proportions right. Could some one please help me!
I have also tried using this method to crop my image:
- (UIImage*)cropImage:(UIImage*)originalImage toRect:(CGRect)rect{
CGImageRef imageRef = CGImageCreateWithImageInRect([originalImage CGImage], rect);
CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef);
CGColorSpaceRef colorSpaceInfo = CGImageGetColorSpace(imageRef);
CGContextRef bitmap = CGBitmapContextCreate(NULL, rect.size.width, rect.size.height, CGImageGetBitsPerComponent(imageRef), CGImageGetBytesPerRow(imageRef), colorSpaceInfo, bitmapInfo);
if (originalImage.imageOrientation == UIImageOrientationLeft) {
CGContextRotateCTM (bitmap, radians(90));
CGContextTranslateCTM (bitmap, 0, -rect.size.height);
} else if (originalImage.imageOrientation == UIImageOrientationRight) {
CGContextRotateCTM (bitmap, radians(-90));
CGContextTranslateCTM (bitmap, -rect.size.width, 0);
} else if (originalImage.imageOrientation == UIImageOrientationUp) {
// NOTHING
} else if (originalImage.imageOrientation == UIImageOrientationDown) {
CGContextTranslateCTM (bitmap, rect.size.width, rect.size.height);
CGContextRotateCTM (bitmap, radians(-180.));
}
CGContextDrawImage(bitmap, CGRectMake(0, 0, rect.size.width, rect.size.height), imageRef);
CGImageRef ref = CGBitmapContextCreateImage(bitmap);
UIImage *resultImage=[UIImage imageWithCGImage:ref];
CGImageRelease(imageRef);
CGContextRelease(bitmap);
CGImageRelease(ref);
return resultImage;
}
Does anybody have the 'right combination' of methods to make this work? :)
In Swift 3:
private func cropToPreviewLayer(originalImage: UIImage) -> UIImage {
let outputRect = previewLayer.metadataOutputRectConverted(fromLayerRect: previewLayer.bounds)
var cgImage = originalImage.cgImage!
let width = CGFloat(cgImage.width)
let height = CGFloat(cgImage.height)
let cropRect = CGRect(x: outputRect.origin.x * width, y: outputRect.origin.y * height, width: outputRect.size.width * width, height: outputRect.size.height * height)
cgImage = cgImage.cropping(to: cropRect)!
let croppedUIImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: originalImage.imageOrientation)
return croppedUIImage
}
I've solved this problem by using metadataOutputRectOfInterestForRect function.
It works with any orientation.
[_stillImageOutput captureStillImageAsynchronouslyFromConnection:stillImageConnection
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error)
{
if (error)
{
[_delegate cameraView:self error:#"Take picture failed"];
}
else
{
NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *takenImage = [UIImage imageWithData:jpegData];
CGRect outputRect = [_previewLayer metadataOutputRectOfInterestForRect:_previewLayer.bounds];
CGImageRef takenCGImage = takenImage.CGImage;
size_t width = CGImageGetWidth(takenCGImage);
size_t height = CGImageGetHeight(takenCGImage);
CGRect cropRect = CGRectMake(outputRect.origin.x * width, outputRect.origin.y * height, outputRect.size.width * width, outputRect.size.height * height);
CGImageRef cropCGImage = CGImageCreateWithImageInRect(takenCGImage, cropRect);
takenImage = [UIImage imageWithCGImage:cropCGImage scale:1 orientation:takenImage.imageOrientation];
CGImageRelease(cropCGImage);
}
}
];
The takenImage is still imageOrientation dependent image. You can delete orientation information for further image processing.
UIGraphicsBeginImageContext(takenImage.size);
[takenImage drawAtPoint:CGPointZero];
takenImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
In Swift 4:
I prefer to never force-unwrap to avoid crashes, so I use optionals and guards in mine.
private func cropToPreviewLayer(originalImage: UIImage) -> UIImage? {
guard let cgImage = originalImage.cgImage else { return nil }
let outputRect = previewLayer.metadataOutputRectConverted(fromLayerRect: previewLayer.bounds)
let width = CGFloat(cgImage.width)
let height = CGFloat(cgImage.height)
let cropRect = CGRect(x: outputRect.origin.x * width, y: outputRect.origin.y * height, width: outputRect.size.width * width, height: outputRect.size.height * height)
if let croppedCGImage = cgImage.cropping(to: cropRect) {
return UIImage(cgImage: croppedCGImage, scale: 1.0, orientation: originalImage.imageOrientation)
}
return nil
}
AVMakeRectWithAspectRatioInsideRect
this api is from AVFoundation, it will return the crop region for the image given the crop size.
I got an app, where i tried to implement custom camera. This is source code:
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = self.vImagePreview.bounds;
[self.vImagePreview.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//device.position ;
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
[session addInput:input];
[session startRunning];
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
Then i tried to create photo and send it to other view controller:
-(IBAction) captureNow
{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
//NSLog(#"about to request a capture from: %#", stillImageOutput);
AcceptingPhotoViewController *photo = [[AcceptingPhotoViewController alloc] initWithNibName:#"AcceptingPhotoViewController" bundle:nil];
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
photo.image = [[UIImage alloc] init ];
photo.image = image;
photo.photoFromCamera = YES;
[self.navigationController pushViewController:photo animated:NO];
}];
}
But in my target class this image is rotating to the left on 90 degrees:
I tried to rotate it back:
float newSide = MAX([image size].width, [image size].height);
CGSize size = CGSizeMake(newSide, newSide);
UIGraphicsBeginImageContext(size);
CGContextRef ctx = UIGraphicsGetCurrentContext();
CGContextTranslateCTM(ctx, newSide/2, newSide/2);
CGContextRotateCTM(ctx, 1.57079633);
CGContextDrawImage(UIGraphicsGetCurrentContext(),CGRectMake(-[image size].width/2,-[image size].height/2,size.width, size.height),image.CGImage);
UIImage *i = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
image = i;
My image rotated on the right, but it display mirrored and stretch.
Any help? Can i rotate in other way, or maybe i should take photo not like this?
I found solve of this problem:
I rotate and transform image in this way
UIView* rotatedViewBox = [[UIView alloc] initWithFrame: CGRectMake(0, 0, image.size.width, image.size.height)];
float angleRadians = 90 * ((float)M_PI / 180.0f);
CGAffineTransform t = CGAffineTransformMakeRotation(angleRadians);
rotatedViewBox.transform = t;
CGSize rotatedSize = rotatedViewBox.frame.size;
UIGraphicsBeginImageContext(rotatedSize);
CGContextRef bitmap = UIGraphicsGetCurrentContext();
CGContextTranslateCTM(bitmap, rotatedSize.width/2, rotatedSize.height/2);
CGContextRotateCTM(bitmap, angleRadians);
CGContextScaleCTM(bitmap, 1.0, -1.0);
CGContextDrawImage(bitmap, CGRectMake(-image.size.width / 2, -image.size.height / 2, image.size.width, image.size.height), [image CGImage]);
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
image = newImage;
CGSize newSize = CGSizeMake(image.size.height, image.size.width);
UIGraphicsBeginImageContext(newSize);
// Tell the old image to draw in this new context, with the desired
// new size
[image drawInRect:CGRectMake(0,0,image.size.height,image.size.width)];
// Get the new image from the context
UIImage* newImage2 = UIGraphicsGetImageFromCurrentImageContext();
// End the context
UIGraphicsEndImageContext();
image = newImage2;
I also may the same problem just copy and paste.
Try this code it will helps u..
- (UIImage *)fixrotation:(UIImage *)image{
if (image.imageOrientation == UIImageOrientationDown) return image;
CGAffineTransform transform = CGAffineTransformIdentity;
switch (image.imageOrientation) {
case UIImageOrientationDown:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, image.size.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
break;
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, 0, image.size.height);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
case UIImageOrientationUp:
case UIImageOrientationUpMirrored:
break;
}
switch (image.imageOrientation) {
case UIImageOrientationUpMirrored:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationLeftMirrored:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, image.size.height, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationUp:
case UIImageOrientationDown:
case UIImageOrientationLeft:
case UIImageOrientationRight:
break;
}
// Now we draw the underlying CGImage into a new context, applying the transform
// calculated above.
CGContextRef ctx = CGBitmapContextCreate(NULL, image.size.width, image.size.height,
CGImageGetBitsPerComponent(image.CGImage), 0,
CGImageGetColorSpace(image.CGImage),
CGImageGetBitmapInfo(image.CGImage));
CGContextConcatCTM(ctx, transform);
switch (image.imageOrientation) {
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
// Grr...
CGContextDrawImage(ctx, CGRectMake(0,0,image.size.height,image.size.width), image.CGImage);
break;
default:
CGContextDrawImage(ctx, CGRectMake(0,0,image.size.width,image.size.height), image.CGImage);
break;
}
// And now we just create a new UIImage from the drawing context
CGImageRef cgimg = CGBitmapContextCreateImage(ctx);
UIImage *img = [UIImage imageWithCGImage:cgimg];
CGContextRelease(ctx);
CGImageRelease(cgimg);
return img;
}