Under iOS, I need to get the codec used to make the move (H.264, MJPEG and so on), the movies' width and height, the file size, and the number of frames in the movie. I tried using AVAsset for the movie duration and such but the duration was always zero. Same for the width/height. I also tried using a movie player controller but that did not work either (see code below). The docs are a bit confusing and with multiple ways of getting to the same place you can't seem to get to the same spot.
Has anyone got the above information working properly? I am sure I am missing a few things but I was hoping for sample code and/or pointers?
Edit: I added a better code example. But, there are questions that remain. How do I get the creation date of the movie, the codec used to compress it and the movie's file size? Anybody figure these out?
Thanks
- (IBAction)getMovieInfo
{
int hours = 0, minutes = 0, seconds = 0;
NSURL* sourceMovieURL = [NSURL URLWithString:#"http://trailers.apple.com/movies/summit/stepuprevolution/stepuprevolution-tlr1_h480p.mov"];
AVURLAsset* movieAsset = [AVURLAsset URLAssetWithURL:sourceMovieURL options:nil];
NSArray *tracks = [movieAsset tracksWithMediaType:AVMediaTypeVideo];
if ([tracks count] != 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
NSTimeInterval durationSeconds = CMTimeGetSeconds([movieAsset duration]);
CGSize videoSize = videoTrack.naturalSize;
//
// Let's get the movie's meta data
//
// Start with the duration of the movie
hours = durationSeconds / 3600;
minutes = durationSeconds / 60;
seconds = (int)durationSeconds % 60;
durationLabel.text = [NSString stringWithFormat:#"%d:%d:%d", hours, minutes, seconds];
// Next is the creation (posting) date of the movie
//postedLabel.text = AVMetadataQuickTimeUserDataKeyCreationDate;
//The resolution of the movie
resolutionLabel.text = [NSString stringWithFormat:#"%g x %g", videoSize.width, videoSize.height];
// The frame rate of the movie
rateLabel.text = [NSString stringWithFormat:#"%g fps", [videoTrack nominalFrameRate]];
// The frame count of the movie
countLabel.text = [NSString stringWithFormat:#"%g", [videoTrack nominalFrameRate] * durationSeconds];
// Get the codec used to compress the movie
// And lastly, let's generate a thumbnail of the movie
AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:movieAsset];
if (imageGenerator != NULL) {
CMTime thumbPoint = CMTimeMakeWithSeconds(15.0, 600);
NSError *error = nil;
CGImageRef thumbnail = [imageGenerator copyCGImageAtTime:thumbPoint actualTime:nil error:&error];
if (thumbnail != NULL) {
// Convert CGImage thumbnail to UIImage and then scale it.
UIImage *tempImage = [[UIImage alloc] initWithCGImage:thumbnail];
if (tempImage != NULL) {
// Let's scale the image and put the it into the imageview.
self.thumbDisplay.image=[self scaleAndRotateImage:tempImage];
CGImageRelease(thumbnail);
}
}
}
}
}
- (UIImage *)scaleAndRotateImage:(UIImage *)image {
CGImageRef imgRef = image.CGImage;
CGFloat width = 135.0;
CGFloat height = 75.0;
CGAffineTransform transform = CGAffineTransformIdentity;
CGRect bounds = CGRectMake(0, 0, width, height);
CGFloat scaleRatio = bounds.size.width / width;
CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef));
CGFloat boundHeight;
UIImageOrientation orient = image.imageOrientation;
switch (orient) {
case UIImageOrientationUp: //EXIF = 1
transform = CGAffineTransformIdentity;
break;
case UIImageOrientationUpMirrored: //EXIF = 2
transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
break;
case UIImageOrientationDown: //EXIF = 3
transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationDownMirrored: //EXIF = 4
transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
break;
case UIImageOrientationLeftMirrored: //EXIF = 5
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationLeft: //EXIF = 6
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationRightMirrored: //EXIF = 7
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeScale(-1.0, 1.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
case UIImageOrientationRight: //EXIF = 8
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
default:
[NSException raise:NSInternalInconsistencyException format:#"Invalid image orientation"];
}
UIGraphicsBeginImageContext(bounds.size);
CGContextRef context = UIGraphicsGetCurrentContext();
if (orient == UIImageOrientationRight || orient == UIImageOrientationLeft) {
CGContextScaleCTM(context, -scaleRatio, scaleRatio);
CGContextTranslateCTM(context, -height, 0);
} else {
CGContextScaleCTM(context, scaleRatio, -scaleRatio);
CGContextTranslateCTM(context, 0, -height);
}
CGContextConcatCTM(context, transform);
CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef);
UIImage *imageCopy = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return imageCopy;
}
NSURL's method getResourceValue:forKey:error allows you to get total number of bytes:
NSURL *fileUrl = [NSURL fileURLWithPath:filePath];
NSString *size = NULL;
[fileUrl getResourceValue:&size forKey:NSURLFileSizeKey error:nil];
NSLog(#"Bytes : %#",size);
It is good practice to check for any errors returned.
Try:
CMTime lengthTime = [movieAsset duration];
Float64 seconds = CMTimeGetSeconds(lengthTime);
NSLog(#"Asset is %g seconds long", seconds);
Related
We have a process that takes high resolution source PNG/JPG images and creates renditions of these images in various lower resolution formats / cropped versions.
void ResizeAndSaveSourceImageFromFile(NSString *imagePath, NSInteger width, NSInteger height, NSString *destinationFolder, NSString *fileName, BOOL shouldCrop, NSInteger rotation, NSInteger cornerRadius, BOOL removeAlpha) {
NSString *outputFilePath = [NSString stringWithFormat:#"%#/%#", destinationFolder, fileName];
NSImage *sourceImage = [[NSImage alloc] initWithContentsOfFile:imagePath];
NSSize sourceSize = sourceImage.size;
float sourceAspect = sourceSize.width / sourceSize.height;
float desiredAspect = width / height;
float finalWidth = width;
float finalHeight = height;
if (shouldCrop == true) {
if (desiredAspect > sourceAspect) {
width = height * sourceAspect;
} else if (desiredAspect < sourceAspect) {
height = width / sourceAspect;
}
}
if (width < finalWidth) {
width = finalWidth;
height = width / sourceAspect;
}
if (height < finalHeight) {
height = finalHeight;
width = height * sourceAspect;
}
NSImage *resizedImage = ImageByScalingToSize(sourceImage, CGSizeMake(width, height));
if (shouldCrop == true) {
resizedImage = ImageByCroppingImage(resizedImage, CGSizeMake(finalWidth, finalHeight));
}
if (rotation != 0) {
resizedImage = ImageRotated(resizedImage, rotation);
}
if (cornerRadius != 0) {
resizedImage = ImageRounded(resizedImage, cornerRadius);
}
NSBitmapImageRep *imgRep = UnscaledBitmapImageRep(resizedImage, removeAlpha);
NSBitmapImageFileType type = NSPNGFileType;
if ([fileName rangeOfString:#".jpg"].location != NSNotFound) {
type = NSJPEGFileType;
}
NSData *imageData = [imgRep representationUsingType:type properties: #{}];
[imageData writeToFile:outputFilePath atomically:NO];
if ([outputFilePath rangeOfString:#"land-mdpi"].location != NSNotFound) {
[imageData writeToFile:[outputFilePath stringByReplacingOccurrencesOfString:#"land-mdpi" withString:#"tvdpi"] atomically:NO];
}
}
NSImage* ImageByScalingToSize(NSImage* sourceImage, NSSize newSize) {
if (! sourceImage.isValid) return nil;
NSBitmapImageRep *rep = [[NSBitmapImageRep alloc]
initWithBitmapDataPlanes:NULL
pixelsWide:newSize.width
pixelsHigh:newSize.height
bitsPerSample:8
samplesPerPixel:4
hasAlpha:YES
isPlanar:NO
colorSpaceName:NSCalibratedRGBColorSpace
bytesPerRow:0
bitsPerPixel:0];
rep.size = newSize;
[NSGraphicsContext saveGraphicsState];
[NSGraphicsContext setCurrentContext:[NSGraphicsContext graphicsContextWithBitmapImageRep:rep]];
[sourceImage drawInRect:NSMakeRect(0, 0, newSize.width, newSize.height) fromRect:NSZeroRect operation:NSCompositingOperationCopy fraction:1.0];
[NSGraphicsContext restoreGraphicsState];
NSImage *newImage = [[NSImage alloc] initWithSize:newSize];
[newImage addRepresentation:rep];
return newImage;
}
NSBitmapImageRep* UnscaledBitmapImageRep(NSImage *image, BOOL removeAlpha) {
NSBitmapImageRep *rep = [[NSBitmapImageRep alloc]
initWithBitmapDataPlanes:NULL
pixelsWide:image.size.width
pixelsHigh:image.size.height
bitsPerSample:8
samplesPerPixel:4
hasAlpha:YES
isPlanar:NO
colorSpaceName:NSDeviceRGBColorSpace
bytesPerRow:0
bitsPerPixel:0];
[NSGraphicsContext saveGraphicsState];
[NSGraphicsContext setCurrentContext:
[NSGraphicsContext graphicsContextWithBitmapImageRep:rep]];
[image drawAtPoint:NSMakePoint(0, 0)
fromRect:NSZeroRect
operation:NSCompositingOperationSourceOver
fraction:1.0];
[NSGraphicsContext restoreGraphicsState];
NSBitmapImageRep *imgRepFinal = rep;
if (removeAlpha == YES) {
NSImage *newImage = [[NSImage alloc] initWithSize:[rep size]];
[newImage addRepresentation:rep];
static int const kNumberOfBitsPerColour = 5;
NSRect imageRect = NSMakeRect(0.0, 0.0, newImage.size.width, newImage.size.height);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef tileGraphicsContext = CGBitmapContextCreate (NULL, imageRect.size.width, imageRect.size.height, kNumberOfBitsPerColour, 2 * imageRect.size.width, colorSpace, kCGBitmapByteOrder16Little | kCGImageAlphaNoneSkipFirst);
NSData *imageDataTIFF = [newImage TIFFRepresentation];
CGImageRef imageRef = [[NSBitmapImageRep imageRepWithData:imageDataTIFF] CGImage];
CGContextDrawImage(tileGraphicsContext, imageRect, imageRef);
// Create an NSImage from the tile graphics context
CGImageRef newImageRef = CGBitmapContextCreateImage(tileGraphicsContext);
NSImage *newNSImage = [[NSImage alloc] initWithCGImage:newImageRef size:imageRect.size];
// Clean up
CGImageRelease(newImageRef);
CGContextRelease(tileGraphicsContext);
CGColorSpaceRelease(colorSpace);
CGImageRef CGImage = [newNSImage CGImageForProposedRect:nil context:nil hints:nil];
imgRepFinal = [[NSBitmapImageRep alloc] initWithCGImage:CGImage];
}
return imgRepFinal;
}
NSImage* ImageByCroppingImage(NSImage* image, CGSize size) {
NSInteger trueWidth = image.representations[0].pixelsWide;
double refWidth = image.size.width;
double refHeight = image.size.height;
double scale = trueWidth / refWidth;
double x = (refWidth - size.width) / 2.0;
double y = (refHeight - size.height) / 2.0;
CGRect cropRect = CGRectMake(x * scale, y * scale, size.width * scale, size.height * scale);
CGImageSourceRef source = CGImageSourceCreateWithData((CFDataRef)[image TIFFRepresentation], NULL);
CGImageRef maskRef = CGImageSourceCreateImageAtIndex(source, 0, NULL);
CGImageRef imageRef = CGImageCreateWithImageInRect(maskRef, cropRect);
NSImage *cropped = [[NSImage alloc] initWithCGImage:imageRef size:size];
CGImageRelease(imageRef);
return cropped;
}
This process works well and gets the results we want. We can re-run these functions on hundreds of images and get the same output every time. We then commit these files in git repos.
HOWEVER, every time we update macOS to a new version (such as updating to High Sierra, Monterey, etc.) when we run these functions ALL of the images result in an output that is different and has different hashes so git treats these images as being changed even though the source images are identical.
FURTHER, JPG images seem to have a different output when run on an Intel mac vs. an Apple M1 mac.
We have checked the head of the output images using a command like:
od -bc banner.png | head
This results in the same head data in all cases even though the actual image data doesn't match after version changes.
We've also checked CGImageSourceCopyPropertiesAtIndex such as:
{
ColorModel = RGB;
Depth = 8;
HasAlpha = 1;
PixelHeight = 1080;
PixelWidth = 1920;
ProfileName = "Generic RGB Profile";
"{Exif}" = {
PixelXDimension = 1920;
PixelYDimension = 1080;
};
"{PNG}" = {
InterlaceType = 0;
};
}
Which do not show any differences between versions of macOS or Intel vs. M1.
We don't want the hash to keep changing on us and resulting in extra churn in git and hoping for feedback that may help in us getting consistent output in all cases.
Any tips are greatly appreciated.
For ios, I use AVAssetExportSession to compress AVAsset to mp4 file. (6 seconds video).
If i keep the origin screen size (1280*720), the video size is 645362 byte
When i set the videoComposition and crop the video size into (720*405), the output video size is 631862 byte.
Why the video size is crop more than half, and the size of video is almost the same ?
The code is listed below:
NSURL * saveURL = [NSURL fileURLWithPath:savePath];
//create an avassetrack with our asset
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize videoSz = clipVideoTrack.naturalSize;
CGRect cropRect = CGRectMake(0, 0, videoSz.width, 320.0/568.0*videoSz.width);
//create a video composition and preset some settings
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1, 30);
CGFloat cropOffX = cropRect.origin.x;
CGFloat cropOffY = cropRect.origin.y;
CGFloat cropWidth = cropRect.size.width;
CGFloat cropHeight = cropRect.size.height;
videoComposition.renderSize = CGSizeMake(cropWidth, cropHeight);
//create a video instruction
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
UIImageOrientation videoOrientation = [self getVideoOrientationFromAsset:asset];
CGAffineTransform t1 = CGAffineTransformIdentity;
CGAffineTransform t2 = CGAffineTransformIdentity;
switch (videoOrientation) {
case UIImageOrientationUp:
t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height - cropOffX, 0 - cropOffY );
t2 = CGAffineTransformRotate(t1, M_PI_2 );
break;
case UIImageOrientationDown:
t1 = CGAffineTransformMakeTranslation(0 - cropOffX, clipVideoTrack.naturalSize.width - cropOffY ); // not fixed width is the real height in upside down
t2 = CGAffineTransformRotate(t1, - M_PI_2 );
break;
case UIImageOrientationRight:
t1 = CGAffineTransformMakeTranslation(0 - cropOffX, 0 - cropOffY );
t2 = CGAffineTransformRotate(t1, 0 );
break;
case UIImageOrientationLeft:
t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.width - cropOffX, clipVideoTrack.naturalSize.height - cropOffY );
t2 = CGAffineTransformRotate(t1, M_PI );
break;
default:
NSLog(#"no supported orientation has been found in this video");
break;
}
CGAffineTransform finalTransform = t2;
[transformer setTransform:finalTransform atTime:kCMTimeZero];
//add the transformer layer instructions, then add to video composition
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
AVAssetExportSession *avAssetExportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetMediumQuality];
if (!keepOrigin) {
[avAssetExportSession setVideoComposition:videoComposition];
}
[avAssetExportSession setOutputFileType:AVFileTypeMPEG4];
[avAssetExportSession setShouldOptimizeForNetworkUse:YES];
[avAssetExportSession exportAsynchronouslyWithCompletionHandler:^(void){
NSData * data = [NSData dataWithContentsOfURL:[NSURL fileURLWithPath:savePath]];
NSUInteger size = data.length;
NSLog(#"print video length = %lu", (unsigned long)size);
}];
Hey everyone I am cropping a video taken from the camera on iPhone and then cropping it an playing it back like this. When i do it however I get a weird green line around the bottom and right side of the video? Not sure why this is happening or how to fix it. Here is how I am cropping.
- (UIImageOrientation)getVideoOrientationFromAsset:(AVAsset *)asset
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize size = [videoTrack naturalSize];
CGAffineTransform txf = [videoTrack preferredTransform];
if (size.width == txf.tx && size.height == txf.ty)
return UIImageOrientationLeft; //return UIInterfaceOrientationLandscapeLeft;
else if (txf.tx == 0 && txf.ty == 0)
return UIImageOrientationRight; //return UIInterfaceOrientationLandscapeRight;
else if (txf.tx == 0 && txf.ty == size.width)
return UIImageOrientationDown; //return UIInterfaceOrientationPortraitUpsideDown;
else
return UIImageOrientationUp; //return UIInterfaceOrientationPortrait;
}
- (AVAssetExportSession*)applyCropToVideoWithAsset:(AVAsset*)asset AtRect:(CGRect)cropRect OnTimeRange:(CMTimeRange)cropTimeRange ExportToUrl:(NSURL*)outputUrl ExistingExportSession:(AVAssetExportSession*)exporter WithCompletion:(void(^)(BOOL success, NSError* error, NSURL* videoUrl))completion
{
// NSLog(#"CALLED");
//create an avassetrack with our asset
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
//create a video composition and preset some settings
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1, 30);
CGFloat cropOffX = cropRect.origin.x;
CGFloat cropOffY = cropRect.origin.y;
CGFloat cropWidth = cropRect.size.width;
CGFloat cropHeight = cropRect.size.height;
// NSLog(#"width: %f - height: %f - x: %f - y: %f", cropWidth, cropHeight, cropOffX, cropOffY);
videoComposition.renderSize = CGSizeMake(cropWidth, cropHeight);
//create a video instruction
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = cropTimeRange;
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
UIImageOrientation videoOrientation = [self getVideoOrientationFromAsset:asset];
CGAffineTransform t1 = CGAffineTransformIdentity;
CGAffineTransform t2 = CGAffineTransformIdentity;
switch (videoOrientation) {
case UIImageOrientationUp:
t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height - cropOffX, 0 - cropOffY );
t2 = CGAffineTransformRotate(t1, M_PI_2 );
break;
case UIImageOrientationDown:
t1 = CGAffineTransformMakeTranslation(0 - cropOffX, clipVideoTrack.naturalSize.width - cropOffY ); // not fixed width is the real height in upside down
t2 = CGAffineTransformRotate(t1, - M_PI_2 );
break;
case UIImageOrientationRight:
t1 = CGAffineTransformMakeTranslation(0 - cropOffX, 0 - cropOffY );
t2 = CGAffineTransformRotate(t1, 0 );
break;
case UIImageOrientationLeft:
t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.width - cropOffX, clipVideoTrack.naturalSize.height - cropOffY );
t2 = CGAffineTransformRotate(t1, M_PI );
break;
default:
NSLog(#"no supported orientation has been found in this video");
break;
}
CGAffineTransform finalTransform = t2;
[transformer setTransform:finalTransform atTime:kCMTimeZero];
//add the transformer layer instructions, then add to video composition
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
//Remove any prevouis videos at that path
[[NSFileManager defaultManager] removeItemAtURL:outputUrl error:nil];
if (!exporter){
exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality] ;
}
// assign all instruction for the video processing (in this case the transformation for cropping the video
exporter.videoComposition = videoComposition;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
if (outputUrl){
exporter.outputURL = outputUrl;
[exporter exportAsynchronouslyWithCompletionHandler:^{
switch ([exporter status]) {
case AVAssetExportSessionStatusFailed:
NSLog(#"crop Export failed: %#", [[exporter error] localizedDescription]);
if (completion){
dispatch_async(dispatch_get_main_queue(), ^{
completion(NO,[exporter error],nil);
});
return;
}
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"crop Export canceled");
if (completion){
dispatch_async(dispatch_get_main_queue(), ^{
completion(NO,nil,nil);
});
return;
}
break;
default:
break;
}
if (completion){
dispatch_async(dispatch_get_main_queue(), ^{
completion(YES,nil,outputUrl);
});
}
}];
}
return exporter;
}
And then I play and call the crop like this.
AVAsset *assest = [AVAsset assetWithURL:self.videoURL];
NSString * documentsPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSString *exportPath = [documentsPath stringByAppendingFormat:#"/croppedvideo.mp4"];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
AVAssetExportSession *exporter = [AVAssetExportSession exportSessionWithAsset:assest presetName:AVAssetExportPresetLowQuality];
[self applyCropToVideoWithAsset:assest AtRect:CGRectMake(self.view.frame.size.width/2 - 57.5 - 5, self.view.frame.size.height / 2 - 140, 115, 85) OnTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(assest.duration.value, 1))
ExportToUrl:exportUrl ExistingExportSession:exporter WithCompletion:^(BOOL success, NSError *error, NSURL *videoUrl) {
AVPlayer *player = [AVPlayer playerWithURL:videoUrl];
AVPlayerLayer *layer = [AVPlayerLayer playerLayerWithPlayer:player];
layer.frame = CGRectMake(125, 365, 115, 115);
UIView *view = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 400, 400)];
[view.layer addSublayer:layer];
[self.view addSubview:view];
[player play];
If you want to test this, the code just need to be copied and pasted and then set a video, and you will see what I am talking about.
Thanks for taking the time to help me I know it is a fair bit of code.
There are width requirements for either the iOS encoders or the video format itself. Try making your width even or divisible by 4.
I wasn't aware of there being a similar requirement for height, but that could be worth a try too.
I've never found it documented, but requiring evenness makes a certain amount of sense as h.264 uses a 4:2:0 yuv colour space, where the UV component is half the size (in both dimensions) of the Y channel, which has the overall dimensions of the video. If those dimensions weren't even, the UV dimensions wouldn't be integral.
p.s. the hint in these cases is the mysterious green colour. I think it corresponds to 0, 0, 0 in YUV.
The answer by #Rhythmic saved my day.
In my app, I need square video as per screen width size. So for iPhone 5 this comes 320 pixels, and for iPhone 6 this becomes 375 pixels.
So I came across same green line issue for iPhone 6 size resolution. Because its screen size width is 375 pixels. And not divisible by 2 or 4.
To come out of this we made these changes:
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainInstruction.timeRange = range;
MainCompositionInst.frameDuration = VideoFrameDuration; //Constants
MainCompositionInst.renderScale = VideoRenderScale; //Constants
if ((int)SCREEN_WIDTH % 2 == 0)
MainCompositionInst.renderSize = CGSizeMake(SCREEN_WIDTH, SCREEN_WIDTH);
else // This does the trick
MainCompositionInst.renderSize = CGSizeMake(SCREEN_WIDTH+1, SCREEN_WIDTH+1);
Just add one more pixel to it so it becomes divisible by 2 or 4.
I am capturing the video using UIImagePickerController, i can crop the video using the following code,
AVAsset *asset = [AVAsset assetWithURL:url];
//create an avassetrack with our asset
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
//create a video composition and preset some settings
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1, 30);
//here we are setting its render size to its height x height (Square)
videoComposition.renderSize = CGSizeMake(clipVideoTrack.naturalSize.height, clipVideoTrack.naturalSize.height);
//create a video instruction
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30));
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
//Here we shift the viewing square up to the TOP of the video so we only see the top
CGAffineTransform t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, -20);
//Use this code if you want the viewing square to be in the middle of the video
//CGAffineTransform t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, -(clipVideoTrack.naturalSize.width - clipVideoTrack.naturalSize.height) /2 );
//Make sure the square is portrait
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);
CGAffineTransform finalTransform = t2;
[transformer setTransform:finalTransform atTime:kCMTimeZero];
//add the transformer layer instructions, then add to video composition
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
//Create an Export Path to store the cropped video
NSString *outputPath = [NSString stringWithFormat:#"%#%#", NSTemporaryDirectory(), #"video.mp4"];
NSURL *exportUrl = [NSURL fileURLWithPath:outputPath];
//Remove any prevouis videos at that path
[[NSFileManager defaultManager] removeItemAtURL:exportUrl error:nil];
//Export
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetLowQuality] ;
exporter.videoComposition = videoComposition;
exporter.outputURL = exportUrl;
exporter.outputFileType = AVFileTypeMPEG4;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
//Call when finished
[self exportDidFinish:exporter];
});
}];
But i dont know how to fix the orientation issue. Like instagram and vine app , (i.e) if i capture the video even in landscape mode, it should be in portrait mode and need to crop the video as square. Pls give me the solution... i am struggling with this issue...
I suppose the source code come from this link ( project code included )
http://www.one-dreamer.com/cropping-video-square-like-vine-instagram-xcode/
You need first to know the REAL video orientation:
- (UIImageOrientation)getVideoOrientationFromAsset:(AVAsset *)asset
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize size = [videoTrack naturalSize];
CGAffineTransform txf = [videoTrack preferredTransform];
if (size.width == txf.tx && size.height == txf.ty)
return UIImageOrientationLeft; //return UIInterfaceOrientationLandscapeLeft;
else if (txf.tx == 0 && txf.ty == 0)
return UIImageOrientationRight; //return UIInterfaceOrientationLandscapeRight;
else if (txf.tx == 0 && txf.ty == size.width)
return UIImageOrientationDown; //return UIInterfaceOrientationPortraitUpsideDown;
else
return UIImageOrientationUp; //return UIInterfaceOrientationPortrait;
}
I made that function in a way that it return the right orientation as if it was an image
Then, i modified the function to fix the right orientation, supporting any crop region not just a square, like this:
// apply the crop to passed video asset (set outputUrl to avoid the saving on disk ). Return the exporter session object
- (AVAssetExportSession*)applyCropToVideoWithAsset:(AVAsset*)asset AtRect:(CGRect)cropRect OnTimeRange:(CMTimeRange)cropTimeRange ExportToUrl:(NSURL*)outputUrl ExistingExportSession:(AVAssetExportSession*)exporter WithCompletion:(void(^)(BOOL success, NSError* error, NSURL* videoUrl))completion
{
//create an avassetrack with our asset
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
//create a video composition and preset some settings
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1, 30);
CGFloat cropOffX = cropRect.origin.x;
CGFloat cropOffY = cropRect.origin.y;
CGFloat cropWidth = cropRect.size.width;
CGFloat cropHeight = cropRect.size.height;
videoComposition.renderSize = CGSizeMake(cropWidth, cropHeight);
//create a video instruction
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = cropTimeRange;
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
UIImageOrientation videoOrientation = [self getVideoOrientationFromAsset:asset];
CGAffineTransform t1 = CGAffineTransformIdentity;
CGAffineTransform t2 = CGAffineTransformIdentity;
switch (videoOrientation) {
case UIImageOrientationUp:
t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height - cropOffX, 0 - cropOffY );
t2 = CGAffineTransformRotate(t1, M_PI_2 );
break;
case UIImageOrientationDown:
t1 = CGAffineTransformMakeTranslation(0 - cropOffX, clipVideoTrack.naturalSize.width - cropOffY ); // not fixed width is the real height in upside down
t2 = CGAffineTransformRotate(t1, - M_PI_2 );
break;
case UIImageOrientationRight:
t1 = CGAffineTransformMakeTranslation(0 - cropOffX, 0 - cropOffY );
t2 = CGAffineTransformRotate(t1, 0 );
break;
case UIImageOrientationLeft:
t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.width - cropOffX, clipVideoTrack.naturalSize.height - cropOffY );
t2 = CGAffineTransformRotate(t1, M_PI );
break;
default:
NSLog(#"no supported orientation has been found in this video");
break;
}
CGAffineTransform finalTransform = t2;
[transformer setTransform:finalTransform atTime:kCMTimeZero];
//add the transformer layer instructions, then add to video composition
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
//Remove any prevouis videos at that path
[[NSFileManager defaultManager] removeItemAtURL:outputUrl error:nil];
if (!exporter){
exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality] ;
}
// assign all instruction for the video processing (in this case the transformation for cropping the video
exporter.videoComposition = videoComposition;
//exporter.outputFileType = AVFileTypeQuickTimeMovie;
if (outputUrl){
exporter.outputURL = outputUrl;
[exporter exportAsynchronouslyWithCompletionHandler:^{
switch ([exporter status]) {
case AVAssetExportSessionStatusFailed:
NSLog(#"crop Export failed: %#", [[exporter error] localizedDescription]);
if (completion){
dispatch_async(dispatch_get_main_queue(), ^{
completion(NO,[exporter error],nil);
});
return;
}
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"crop Export canceled");
if (completion){
dispatch_async(dispatch_get_main_queue(), ^{
completion(NO,nil,nil);
});
return;
}
break;
default:
break;
}
if (completion){
dispatch_async(dispatch_get_main_queue(), ^{
completion(YES,nil,outputUrl);
});
}
}];
}
return exporter;
}
Tested in all recorded video orientation (Up,Down,Lanscape R, Landscape L) in both normal and front camera cases. I tested it on iPhone 5S (iOS 8.1), iPhone 6 Plus (iOS 8.1)
Hope it helps
This is my code to create a vine-like video from a video on disk. This is written in swift:
static let MaxDuration: CMTimeValue = 12
class func compressVideoAsset(_ asset: AVAsset, output: URL, completion: #escaping (_ data: Data?) -> Void)
{
let session = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetMediumQuality)!
session.videoComposition = self.squareVideoCompositionForAsset(asset)
session.outputURL = output
session.outputFileType = AVFileTypeMPEG4
session.shouldOptimizeForNetworkUse = true
session.canPerformMultiplePassesOverSourceMediaData = true
let duration = CMTimeValue(CGFloat(asset.duration.value) / CGFloat(asset.duration.timescale) * 30)
session.timeRange = CMTimeRange(start: kCMTimeZero, duration: CMTime(value: min(duration, VideoCompressor.MaxDuration * 30), timescale: 30))
session.exportAsynchronously(completionHandler: { () -> Void in
let data = try? Data(contentsOf: output)
DispatchQueue.main.async(execute: { () -> Void in
completion(data)
})
})
}
private class func squareVideoCompositionForAsset(_ asset: AVAsset) -> AVVideoComposition
{
let track = asset.tracks(withMediaType: AVMediaTypeVideo)[0]
let length = min(track.naturalSize.width, track.naturalSize.height)
var transform = track.preferredTransform
let size = track.naturalSize
let scale: CGFloat = (transform.a == -1 && transform.b == 0 && transform.c == 0 && transform.d == -1) ? -1 : 1 // check for inversion
transform = transform.translatedBy(x: scale * -(size.width - length) / 2, y: scale * -(size.height - length) / 2)
let transformer = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
transformer.setTransform(transform, at: kCMTimeZero)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRange(start: kCMTimeZero, duration: kCMTimePositiveInfinity)
instruction.layerInstructions = [transformer]
let composition = AVMutableVideoComposition()
composition.frameDuration = CMTime(value: 1, timescale: 30)
composition.renderSize = CGSize(width: length, height: length)
composition.instructions = [instruction]
return composition
}
I know this question is old but some people may still be wondering why some of the videos from the camera roll zoom in after they're cropped. I faced this problem and realized that the cropRect I was using as a frame was not scaled for the different aspect ratios of the video. To fix this problem I simply added the code below to crop the very top of the video into a square. If you want to change the position just change the y value but make sure to scale it according to the video. Luca Iaco provided some great code to get started with. I appreciate it!
CGSize videoSize = [[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] naturalSize];
float scaleFactor;
if (videoSize.width > videoSize.height) {
scaleFactor = videoSize.height/320;
}
else if (videoSize.width == videoSize.height){
scaleFactor = videoSize.height/320;
}
else{
scaleFactor = videoSize.width/320;
}
CGFloat cropOffX = 0;
CGFloat cropOffY = 0;
CGFloat cropWidth = 320 *scaleFactor;
CGFloat cropHeight = 320 *scaleFactor;
I got an app, where i tried to implement custom camera. This is source code:
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = self.vImagePreview.bounds;
[self.vImagePreview.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//device.position ;
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
[session addInput:input];
[session startRunning];
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
Then i tried to create photo and send it to other view controller:
-(IBAction) captureNow
{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
//NSLog(#"about to request a capture from: %#", stillImageOutput);
AcceptingPhotoViewController *photo = [[AcceptingPhotoViewController alloc] initWithNibName:#"AcceptingPhotoViewController" bundle:nil];
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
photo.image = [[UIImage alloc] init ];
photo.image = image;
photo.photoFromCamera = YES;
[self.navigationController pushViewController:photo animated:NO];
}];
}
But in my target class this image is rotating to the left on 90 degrees:
I tried to rotate it back:
float newSide = MAX([image size].width, [image size].height);
CGSize size = CGSizeMake(newSide, newSide);
UIGraphicsBeginImageContext(size);
CGContextRef ctx = UIGraphicsGetCurrentContext();
CGContextTranslateCTM(ctx, newSide/2, newSide/2);
CGContextRotateCTM(ctx, 1.57079633);
CGContextDrawImage(UIGraphicsGetCurrentContext(),CGRectMake(-[image size].width/2,-[image size].height/2,size.width, size.height),image.CGImage);
UIImage *i = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
image = i;
My image rotated on the right, but it display mirrored and stretch.
Any help? Can i rotate in other way, or maybe i should take photo not like this?
I found solve of this problem:
I rotate and transform image in this way
UIView* rotatedViewBox = [[UIView alloc] initWithFrame: CGRectMake(0, 0, image.size.width, image.size.height)];
float angleRadians = 90 * ((float)M_PI / 180.0f);
CGAffineTransform t = CGAffineTransformMakeRotation(angleRadians);
rotatedViewBox.transform = t;
CGSize rotatedSize = rotatedViewBox.frame.size;
UIGraphicsBeginImageContext(rotatedSize);
CGContextRef bitmap = UIGraphicsGetCurrentContext();
CGContextTranslateCTM(bitmap, rotatedSize.width/2, rotatedSize.height/2);
CGContextRotateCTM(bitmap, angleRadians);
CGContextScaleCTM(bitmap, 1.0, -1.0);
CGContextDrawImage(bitmap, CGRectMake(-image.size.width / 2, -image.size.height / 2, image.size.width, image.size.height), [image CGImage]);
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
image = newImage;
CGSize newSize = CGSizeMake(image.size.height, image.size.width);
UIGraphicsBeginImageContext(newSize);
// Tell the old image to draw in this new context, with the desired
// new size
[image drawInRect:CGRectMake(0,0,image.size.height,image.size.width)];
// Get the new image from the context
UIImage* newImage2 = UIGraphicsGetImageFromCurrentImageContext();
// End the context
UIGraphicsEndImageContext();
image = newImage2;
I also may the same problem just copy and paste.
Try this code it will helps u..
- (UIImage *)fixrotation:(UIImage *)image{
if (image.imageOrientation == UIImageOrientationDown) return image;
CGAffineTransform transform = CGAffineTransformIdentity;
switch (image.imageOrientation) {
case UIImageOrientationDown:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, image.size.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
break;
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, 0, image.size.height);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
case UIImageOrientationUp:
case UIImageOrientationUpMirrored:
break;
}
switch (image.imageOrientation) {
case UIImageOrientationUpMirrored:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationLeftMirrored:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, image.size.height, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationUp:
case UIImageOrientationDown:
case UIImageOrientationLeft:
case UIImageOrientationRight:
break;
}
// Now we draw the underlying CGImage into a new context, applying the transform
// calculated above.
CGContextRef ctx = CGBitmapContextCreate(NULL, image.size.width, image.size.height,
CGImageGetBitsPerComponent(image.CGImage), 0,
CGImageGetColorSpace(image.CGImage),
CGImageGetBitmapInfo(image.CGImage));
CGContextConcatCTM(ctx, transform);
switch (image.imageOrientation) {
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
// Grr...
CGContextDrawImage(ctx, CGRectMake(0,0,image.size.height,image.size.width), image.CGImage);
break;
default:
CGContextDrawImage(ctx, CGRectMake(0,0,image.size.width,image.size.height), image.CGImage);
break;
}
// And now we just create a new UIImage from the drawing context
CGImageRef cgimg = CGBitmapContextCreateImage(ctx);
UIImage *img = [UIImage imageWithCGImage:cgimg];
CGContextRelease(ctx);
CGImageRelease(cgimg);
return img;
}