I'm trying to take screenshots from background using IOSurface, here's my code
IOMobileFramebufferConnection connect;
kern_return_t result;
IOSurfaceRef screenSurface = NULL;
io_service_t framebufferService = IOServiceGetMatchingService(kIOMasterPortDefault, IOServiceMatching("AppleH1CLCD"));
if(!framebufferService)
framebufferService = IOServiceGetMatchingService(kIOMasterPortDefault, IOServiceMatching("AppleM2CLCD"));
if(!framebufferService)
framebufferService = IOServiceGetMatchingService(kIOMasterPortDefault, IOServiceMatching("AppleCLCD"));
result = IOMobileFramebufferOpen(framebufferService, mach_task_self(), 0, &connect);
result = IOMobileFramebufferGetLayerDefaultSurface(connect, 0, (CoreSurfaceBufferRef*)&screenSurface);
uint32_t aseed;
IOSurfaceLock(screenSurface, kIOSurfaceLockReadOnly, &aseed);
uint32_t width = IOSurfaceGetWidth(screenSurface);
uint32_t height = IOSurfaceGetHeight(screenSurface);
CFMutableDictionaryRef dict;
int pitch = width*4, size = width*height*4;
int bPE=4;
char pixelFormat[4] = {'A','R','G','B'};
dict = CFDictionaryCreateMutable(kCFAllocatorDefault, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
CFDictionarySetValue(dict, kIOSurfaceIsGlobal, kCFBooleanTrue);
CFDictionarySetValue(dict, kIOSurfaceBytesPerRow, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &pitch));
CFDictionarySetValue(dict, kIOSurfaceBytesPerElement, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &bPE));
CFDictionarySetValue(dict, kIOSurfaceWidth, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &width));
CFDictionarySetValue(dict, kIOSurfaceHeight, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &height));
CFDictionarySetValue(dict, kIOSurfacePixelFormat, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, pixelFormat));
CFDictionarySetValue(dict, kIOSurfaceAllocSize, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &size));
IOSurfaceRef destSurf = IOSurfaceCreate(dict);
IOSurfaceAcceleratorRef outAcc;
IOSurfaceAcceleratorCreate(NULL, 0, &outAcc);
IOSurfaceAcceleratorTransferSurface(outAcc, screenSurface, destSurf, dict, NULL);
IOSurfaceUnlock(screenSurface, kIOSurfaceLockReadOnly, &aseed);
CFRelease(outAcc);
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, IOSurfaceGetBaseAddress(destSurf), (width * height * 4), NULL);
CGImageRef cgImage=CGImageCreate(width, height, 8,
8*4, IOSurfaceGetBytesPerRow(destSurf),
CGColorSpaceCreateDeviceRGB(), kCGImageAlphaNoneSkipFirst |kCGBitmapByteOrder32Little,
provider, NULL,
YES, kCGRenderingIntentDefault);
UIImage *img = [UIImage imageWithCGImage:cgImage];
saveImage(img);
I combined the code from IOSurface - IOS Private API - Capture screenshot in background and IOSurfaces - Artefacts in video and unable to grab video surfaces with little modification, the code works well in homescreen and most apps, but returns a black screen in games. Any ideas how to fix that? Thanks.
Related
I am working on IOS Application in which taking clear screen shot is required I've tried
func captureView() -> UIImage {
//hide controls if needed
let rect: CGRect = self.imageView.frame
UIGraphicsBeginImageContext(rect.size)
let context: CGContextRef = UIGraphicsGetCurrentContext()!
self.view.layer.renderInContext(context)
let img: UIImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return img
}
But the quality of image is not good. Please suggest me something for it.
change the line
UIGraphicsBeginImageContext(rect.size)
to
UIGraphicsBeginImageContextWithOptions(rect.size, false, 0.0);
Add the below code block in a new file. And, there you go.
extension UIView {
// Convert a uiview to uiimage
func captureView() -> UIImage {
// Gradually increase the number for high resolution.
let scale = 1.0
UIGraphicsBeginImageContextWithOptions(bounds.size, opaque, scale)
layer.renderInContext(UIGraphicsGetCurrentContext()!)
let image:UIImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return image
}
}
Now, you can call captureView() on any subclass of UIView and UIView itself to get the UIImage of the view.
You can change the value of scale in UIGraphicsBeginImageContextWithOptions method to get a high resolution image.
You can use the IOSurface private framework to do so
IOMobileFramebufferConnection connect;
kern_return_t result;
IOSurfaceRef screenSurface = NULL;
io_service_t framebufferService = IOServiceGetMatchingService(kIOMasterPortDefault, IOServiceMatching("AppleH1CLCD"));
if(!framebufferService)
framebufferService = IOServiceGetMatchingService(kIOMasterPortDefault, IOServiceMatching("AppleM2CLCD"));
if(!framebufferService)
framebufferService = IOServiceGetMatchingService(kIOMasterPortDefault, IOServiceMatching("AppleCLCD"));
result = IOMobileFramebufferOpen(framebufferService, mach_task_self(), 0, &connect);
result = IOMobileFramebufferGetLayerDefaultSurface(connect, 0, &screenSurface);
uint32_t aseed;
IOSurfaceLock(screenSurface, kIOSurfaceLockReadOnly, &aseed);
uint32_t width = IOSurfaceGetWidth(screenSurface);
uint32_t height = IOSurfaceGetHeight(screenSurface);
CFMutableDictionaryRef dict;
int pitch = width*4, size = width*height*4;
int bPE=4;
char pixelFormat[4] = {'A','R','G','B'};
dict = CFDictionaryCreateMutable(kCFAllocatorDefault, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
CFDictionarySetValue(dict, kIOSurfaceIsGlobal, kCFBooleanTrue);
CFDictionarySetValue(dict, kIOSurfaceBytesPerRow, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &pitch));
CFDictionarySetValue(dict, kIOSurfaceBytesPerElement, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &bPE));
CFDictionarySetValue(dict, kIOSurfaceWidth, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &width));
CFDictionarySetValue(dict, kIOSurfaceHeight, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &height));
CFDictionarySetValue(dict, kIOSurfacePixelFormat, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, pixelFormat));
CFDictionarySetValue(dict, kIOSurfaceAllocSize, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &size));
IOSurfaceRef destSurf = IOSurfaceCreate(dict);
void* outAcc;
IOSurfaceAcceleratorCreate(NULL, 0, &outAcc);
IOSurfaceAcceleratorTransferSurface(outAcc, screenSurface, destSurf, dict, NULL);
IOSurfaceUnlock(screenSurface, kIOSurfaceLockReadOnly, &aseed);
CFRelease(outAcc);
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, IOSurfaceGetBaseAddress(destSurf), (width * height * 4), NULL);
CGImageRef cgImage=CGImageCreate(width, height, 8,
8*4, IOSurfaceGetBytesPerRow(destSurf),
CGColorSpaceCreateDeviceRGB(), kCGImageAlphaNoneSkipFirst |kCGBitmapByteOrder32Little,
provider, NULL,
YES, kCGRenderingIntentDefault);
UIImage *img = [UIImage imageWithCGImage:cgImage];
use
UIGraphicsBeginImageContextWithOptions(your size, opaque, scale
instead of
UIGraphicsBeginImageContext(rect.size)
The last parameter in UIGraphicsBeginImageContextWithOptions is scale. If you only need High resolution, try setting a value for scale to 1.0.
func captureView() -> UIImage {
//hide controls if needed
let rect: CGRect = self.imageView.frame
UIGraphicsBeginImageContextWithOptions(rect.size, true, 1.0)//add this line
let context: CGContextRef = UIGraphicsGetCurrentContext()!
self.view.layer.renderInContext(context)
let img: UIImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return img
}
A function need to be solved: background desktop video recording
I use NSTimer cycle call IOSurface for screenshots,The following code:
NSTimer * Timer = [NSTimer scheduledTimerWithTimeInterval:0.1f target:self selector:#selector(ScreenShot) userInfo:nil repeats:YES];
-(void)ScreenShot{io_service_t framebufferService;
framebufferService = IOServiceGetMatchingService(kIOMasterPortDefault, IOServiceMatching("AppleH1CLCD"));
if (!framebufferService) {
framebufferService = IOServiceGetMatchingService(kIOMasterPortDefault, IOServiceMatching("AppleM2CLCD"));
}
if (!framebufferService) {
framebufferService = IOServiceGetMatchingService(kIOMasterPortDefault, IOServiceMatching("AppleCLCD"));
}
IOMobileFramebufferConnection connect;
kern_return_t result;
IOSurfaceRef ScreenSurface;
result = IOMobileFramebufferOpen(framebufferService, mach_task_self(), 0, &connect);
result = IOMobileFramebufferGetLayerDefaultSurface(connect, 0, &ScreenSurface);uint32_t aseed;
uint32_t Width;
uint32_t Height;
int Pitch;
int size;
int bPE;
CFMutableDictionaryRef dict;
IOSurfaceLock(ScreenSurface, kIOSurfaceLockReadOnly, &aseed);
Width = IOSurfaceGetWidth(ScreenSurface);
Height = IOSurfaceGetHeight(ScreenSurface);
Pitch = Width * 4;
size = Width * Height * 4;
bPE = 4;
char pixelFormat[4] = {'A','R','G','B'};
dict = CFDictionaryCreateMutable(kCFAllocatorDefault, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
CFDictionarySetValue(dict, kIOSurfaceIsGlobal, kCFBooleanTrue);
CFDictionarySetValue(dict, kIOSurfaceBytesPerRow, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &Pitch));
CFDictionarySetValue(dict, kIOSurfaceBytesPerElement, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &bPE));
CFDictionarySetValue(dict, kIOSurfaceWidth, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &Width));
CFDictionarySetValue(dict, kIOSurfaceHeight, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &Height));
CFDictionarySetValue(dict, kIOSurfacePixelFormat, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, pixelFormat));
CFDictionarySetValue(dict, kIOSurfaceAllocSize, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &size));
CGDataProviderRef provider;
IOSurfaceRef destSurf;
void* outAcc;
CGImageRef cgImage;
destSurf = IOSurfaceCreate(dict);
IOSurfaceAcceleratorCreate(NULL, 0, &outAcc);
IOSurfaceAcceleratorTransferSurface(outAcc, ScreenSurface, destSurf, dict, NULL);
IOSurfaceUnlock(ScreenSurface, kIOSurfaceLockReadOnly, &aseed);
CFRelease(outAcc);
provider = CGDataProviderCreateWithData(NULL, IOSurfaceGetBaseAddress(destSurf), (Width * Height * 4), NULL);
cgImage = CGImageCreate(Width, Height, 8, 8*4, IOSurfaceGetBytesPerRow(destSurf), CGColorSpaceCreateDeviceRGB(), kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little, provider, NULL, YES, kCGRenderingIntentDefault);}
can screenshot the success。
Now the question is:
but probably run 1 minute or so, prompt “Received memory warning.”
Sliding screen, screen method will be delayed
Or what method can the background desktop video recording?
I want to capture the whole screen of iOS and save it to a BMP (using private api), I get the IOSurfaceRef with IOMobileFramebufferConnection first, then find a way to save the surface bytes to a BMP file.
I tried two methods, method screenshot0: got the bytes from screenSurface directly and save it to BMP, but got a fuzzy dislocation image; method screenshot1: used IOSurfaceAcceleratorTransferSurface to transfer the surface bytes to a new IOSurfaceRef and saved it to a BMP file, got a clear but mirrored and 360 degree turned image.
I want to know, why can't I use the bytes from the original IOSurfaceRef directly? Are the bytes in IOSurfaceRef are mirrored? How can I get the right BMP screenshot?
Thank you!
screenshot0: method image:
screenshot1: method image:
- (NSString *)getBmpSavePath:(NSString *)savePath
{
NSString *path = nil;
if (![[[savePath pathExtension] lowercaseString] isEqualToString:#"bmp"]) {
path = [savePath stringByDeletingPathExtension];
path = [path stringByAppendingPathExtension:#"bmp"];
}
return path;
}
- (IOSurfaceRef)getScreenSurface
{
IOSurfaceRef screenSurface = NULL;
io_service_t framebufferService = NULL;
IOMobileFramebufferConnection framebufferConnection = NULL;
framebufferService = IOServiceGetMatchingService(kIOMasterPortDefault, IOServiceMatching("AppleH1CLCD"));
if(!framebufferService)
framebufferService = IOServiceGetMatchingService(kIOMasterPortDefault, IOServiceMatching("AppleM2CLCD"));
if(!framebufferService)
framebufferService = IOServiceGetMatchingService(kIOMasterPortDefault, IOServiceMatching("AppleCLCD"));
if (framebufferService) {
kern_return_t result;
result = IOMobileFramebufferOpen(framebufferService, mach_task_self(), 0, &framebufferConnection);
if (result == KERN_SUCCESS) {
IOMobileFramebufferGetLayerDefaultSurface(framebufferConnection, 0, &screenSurface);
}
}
return screenSurface;
}
- (void)screenshot0:(NSString *)savePath
{
IOSurfaceRef screenSurface = [self getScreenSurface];
if (screenSurface) {
IOSurfaceLock(screenSurface, kIOSurfaceLockReadOnly, NULL);
size_t width = IOSurfaceGetWidth(screenSurface);
size_t height = IOSurfaceGetHeight(screenSurface);
void *bytes = IOSurfaceGetBaseAddress(screenSurface);
NSString *path = [self getBmpSavePath:savePath];
bmp_write(bytes, width, height, [path UTF8String]);
IOSurfaceUnlock(screenSurface, kIOSurfaceLockReadOnly, NULL);
}
}
- (void)screenshot1:(NSString *)savePath
{
IOSurfaceRef screenSurface = [self getScreenSurface];
if (screenSurface) {
IOSurfaceLock(screenSurface, kIOSurfaceLockReadOnly, NULL);
size_t width = IOSurfaceGetWidth(screenSurface);
size_t height = IOSurfaceGetHeight(screenSurface);
size_t bytesPerElement = IOSurfaceGetBytesPerElement(screenSurface);
OSType pixelFormat = IOSurfaceGetPixelFormat(screenSurface);
size_t bytesPerRow = self.bytesPerElement * self.width;
size_t allocSize = bytesPerRow * self.height;
//============== Why shoud I do this step? Why can't I IOSurfaceGetBaseAddress directly from screenSurface like method screenshot0:???
NSDictionary *properties = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kIOSurfaceIsGlobal,
[NSNumber numberWithUnsignedLong:bytesPerElement], kIOSurfaceBytesPerElement,
[NSNumber numberWithUnsignedLong:bytesPerRow], kIOSurfaceBytesPerRow,
[NSNumber numberWithUnsignedLong:width], kIOSurfaceWidth,
[NSNumber numberWithUnsignedLong:height], kIOSurfaceHeight,
[NSNumber numberWithUnsignedInt:pixelFormat], kIOSurfacePixelFormat,
[NSNumber numberWithUnsignedLong:allocSize], kIOSurfaceAllocSize,
nil];
IOSurfaceRef destSurf = IOSurfaceCreate((__bridge CFDictionaryRef)(properties));
IOSurfaceAcceleratorRef outAcc;
IOSurfaceAcceleratorCreate(NULL, 0, &outAcc);
IOSurfaceLock(screenSurface, kIOSurfaceLockReadOnly, NULL);
IOSurfaceAcceleratorTransferSurface(outAcc, screenSurface, destSurf, (__bridge CFDictionaryRef)(properties), NULL);
IOSurfaceUnlock(screenSurface, kIOSurfaceLockReadOnly, NULL);
CFRelease(outAcc);
//==============
void *bytes = IOSurfaceGetBaseAddress(destSurf);
NSString *path = [self getBmpSavePath:savePath];
bmp_write(bytes, width, height, [path UTF8String]);
IOSurfaceUnlock(screenSurface, kIOSurfaceLockReadOnly, NULL);
}
}
int bmp_write(const void *image, size_t xsize, size_t ysize, const char *filename)
{
unsigned char header[54] = {
0x42, 0x4d, 0, 0, 0, 0, 0, 0, 0, 0,
54, 0, 0, 0, 40, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 32, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0
};
long file_size = (long)xsize * (long)ysize * 4 + 54;
header[2] = (unsigned char)(file_size &0x000000ff);
header[3] = (file_size >> 8) & 0x000000ff;
header[4] = (file_size >> 16) & 0x000000ff;
header[5] = (file_size >> 24) & 0x000000ff;
long width = xsize;
header[18] = width & 0x000000ff;
header[19] = (width >> 8) &0x000000ff;
header[20] = (width >> 16) &0x000000ff;
header[21] = (width >> 24) &0x000000ff;
long height = ysize;
header[22] = height &0x000000ff;
header[23] = (height >> 8) &0x000000ff;
header[24] = (height >> 16) &0x000000ff;
header[25] = (height >> 24) &0x000000ff;
char fname_bmp[128];
sprintf(fname_bmp, "%s", filename);
FILE *fp;
if (!(fp = fopen(fname_bmp, "wb")))
return -1;
fwrite(header, sizeof(unsigned char), 54, fp);
fwrite(image, sizeof(unsigned char), (size_t)(long)xsize * ysize * 4, fp);
fclose(fp);
return 0;
}
CGDisplayCreateImage(CGMainDisplayID()) ? I don't know if it works on iOS by works on macOS. Why are you using CGDsipalyStream ?
I am using a AVAssetWriter to record movie frames.
I was wondering how I would go about zooming/scaling a frame (it can be set at the beginning of the movie, it does not have to be on a per frame basis..)
I assume I should be able to do that with the CMSampleBuffer that append to by assetWriterInput
Psydocode:
CMSampleBufferRef scaledSampleBuffer = [self scale:sampleBuffer];
[_videoWriterInput appendSampleBuffer:scaledSampleBuffer];
I have found some stackoverflow questions on this:
This one should create a vImage_Buffer, but I have no idea how I get that into my _videoWriterInput:
Stackoverflow
I tried to convert it back with something like this:
NSInteger cropX0 = 100,
cropY0 = 100,
cropHeight = 100,
cropWidth = 100,
outWidth = 480,
outHeight = 480;
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
vImage_Buffer inBuff;
inBuff.height = cropHeight;
inBuff.width = cropWidth;
inBuff.rowBytes = bytesPerRow;
NSInteger startpos = cropY0*bytesPerRow+4*cropX0;
inBuff.data = baseAddress+startpos;
unsigned char *outImg= (unsigned char*)malloc(4*outWidth*outHeight);
vImage_Buffer outBuff = {outImg, outHeight, outWidth, 4*outWidth};
vImage_Error err = vImageScale_ARGB8888(&inBuff, &outBuff, NULL, 0);
if (err != kvImageNoError) NSLog(#" error %ld", err);
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
CVPixelBufferRef pixelBuffer;
CVPixelBufferCreate(kCFAllocatorSystemDefault, 480, 480, kCVPixelFormatType_32BGRA, NULL, &pixelBuffer);
CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
vImage_CGImageFormat format = {
.bitsPerComponent = 8,
.bitsPerPixel = 32,
.bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaNoneSkipFirst, //BGRX8888
.colorSpace = NULL, //sRGB
};
vImageBuffer_CopyToCVPixelBuffer(&outBuff,
&format,
pixelBuffer,
NULL,
NULL,
kvImageNoFlags);
CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
CMSampleTimingInfo sampleTime = {
.duration = CMSampleBufferGetDuration(sampleBuffer),
.presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
.decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
};
CMVideoFormatDescriptionRef videoInfo = NULL;
CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo);
CMSampleBufferRef oBuf;
CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo, &sampleTime, &oBuf);
But that doesn't work either (corrupted frames)
Copied together form other Stackoverflow articles, I came up with this code, but it results in a broken video:
CGAffineTransform transform = CGAffineTransformMakeScale(2, 2); // zoom 2x
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:CMSampleBufferGetImageBuffer(sampleBuffer)
options:[NSDictionary dictionaryWithObjectsAndKeys:[NSNull null], kCIImageColorSpace, nil]];
ciImage = [[ciImage imageByApplyingTransform:transform] imageByCroppingToRect:CGRectMake(0, 0, 480, 480)];
CVPixelBufferRef pixelBuffer;
CVPixelBufferCreate(kCFAllocatorSystemDefault, 480, 480, kCVPixelFormatType_32BGRA, NULL, &pixelBuffer);
CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
CIContext * ciContext = [CIContext contextWithOptions: nil];
[ciContext render:ciImage toCVPixelBuffer:pixelBuffer];
CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
CMSampleTimingInfo sampleTime = {
.duration = CMSampleBufferGetDuration(sampleBuffer),
.presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
.decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
};
CMVideoFormatDescriptionRef videoInfo = NULL;
CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo);
CMSampleBufferRef oBuf;
CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo, &sampleTime, &oBuf);
Any ideas?
I want to capture a screenshot from a background service. Private API is just fine as I don't need to submit to app store. I have already tried UIGetScreenImage and it does not work from background app.
I am using following code which I got from SO. IOSurfaceCreate returns null to me. Any help appreciated.
CFMutableDictionaryRef dict;
IOSurfaceRef screenSurface = NULL;
char pixelFormat[4] = {'A','R','G','B'};
dict = CFDictionaryCreateMutable(kCFAllocatorDefault, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
uint32_t width ;
uint32_t height;
void *pitch;
void *bPE;
void *size;
CFDictionarySetValue(dict, kIOSurfaceIsGlobal, kCFBooleanTrue);
CFDictionarySetValue(dict, kIOSurfaceBytesPerRow, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &pitch));
CFDictionarySetValue(dict, kIOSurfaceBytesPerElement, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &bPE));
CFDictionarySetValue(dict, kIOSurfaceWidth, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &width));
CFDictionarySetValue(dict, kIOSurfaceHeight, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &height));
CFDictionarySetValue(dict, kIOSurfacePixelFormat, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, pixelFormat));
CFDictionarySetValue(dict, kIOSurfaceAllocSize, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &size));
IOSurfaceRef destSurf = IOSurfaceCreate(dict);
IOSurfaceAcceleratorRef outAcc;
IOSurfaceAcceleratorCreate(NULL, 0, &outAcc);
CFDictionaryRef ed = (__bridge CFDictionaryRef)[NSDictionary dictionaryWithObjectsAndKeys: nil];
IOSurfaceAcceleratorTransferSurface(outAcc, screenSurface, destSurf, ed, NULL);
uint32_t aseed;
IOSurfaceUnlock(screenSurface, kIOSurfaceLockReadOnly, &aseed);
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, IOSurfaceGetBaseAddress(destSurf), (width*height*4), NULL);
CGImageRef cgImage=CGImageCreate(width, height, 8, 8*4, IOSurfaceGetBytesPerRow(destSurf), CGColorSpaceCreateDeviceRGB(), kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little, provider, NULL, YES, kCGRenderingIntentDefault);
UIImage *image = [UIImage imageWithCGImage: cgImage];
UIImageWriteToSavedPhotosAlbum(image, self_object, nil, nil);
CGImageRelease(cgImage);
CFRelease(destSurf);
After combining code from 2 places, I was able to save photo to photo album (with my application in background).
IOSurfaces - Artefacts in video and unable to grab video surfaces
https://github.com/tomcool420/SMFramework/blob/master/SMFScreenCapture.m
Please try out and let me know if it works for you.
-(void)SavePhoto
{
IOMobileFramebufferConnection connect;
kern_return_t result;
IOSurfaceRef screenSurface = NULL;
io_service_t framebufferService = IOServiceGetMatchingService(kIOMasterPortDefault, IOServiceMatching("AppleH1CLCD"));
if(!framebufferService)
framebufferService = IOServiceGetMatchingService(kIOMasterPortDefault, IOServiceMatching("AppleM2CLCD"));
if(!framebufferService)
framebufferService = IOServiceGetMatchingService(kIOMasterPortDefault, IOServiceMatching("AppleCLCD"));
result = IOMobileFramebufferOpen(framebufferService, mach_task_self(), 0, &connect);
result = IOMobileFramebufferGetLayerDefaultSurface(connect, 0, &screenSurface);
uint32_t aseed;
IOSurfaceLock(screenSurface, kIOSurfaceLockReadOnly, &aseed);
uint32_t width = IOSurfaceGetWidth(screenSurface);
uint32_t height = IOSurfaceGetHeight(screenSurface);
//int m_width = 320;
//int m_height = 480;
CFMutableDictionaryRef dict;
int pitch = width*4, size = width*height*4;
int bPE=4;
char pixelFormat[4] = {'A','R','G','B'};
dict = CFDictionaryCreateMutable(kCFAllocatorDefault, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
CFDictionarySetValue(dict, kIOSurfaceIsGlobal, kCFBooleanTrue);
CFDictionarySetValue(dict, kIOSurfaceBytesPerRow, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &pitch));
CFDictionarySetValue(dict, kIOSurfaceBytesPerElement, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &bPE));
CFDictionarySetValue(dict, kIOSurfaceWidth, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &width));
CFDictionarySetValue(dict, kIOSurfaceHeight, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &height));
CFDictionarySetValue(dict, kIOSurfacePixelFormat, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, pixelFormat));
CFDictionarySetValue(dict, kIOSurfaceAllocSize, CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &size));
IOSurfaceRef destSurf = IOSurfaceCreate(dict);
void* outAcc;
IOSurfaceAcceleratorCreate(NULL, 0, &outAcc);
IOSurfaceAcceleratorTransferSurface(outAcc, screenSurface, destSurf, dict, NULL);
IOSurfaceUnlock(screenSurface, kIOSurfaceLockReadOnly, &aseed);
CFRelease(outAcc);
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, IOSurfaceGetBaseAddress(destSurf), (width * height * 4), NULL);
CGImageRef cgImage=CGImageCreate(width, height, 8,
8*4, IOSurfaceGetBytesPerRow(destSurf),
CGColorSpaceCreateDeviceRGB(), kCGImageAlphaNoneSkipFirst |kCGBitmapByteOrder32Little,
provider, NULL,
YES, kCGRenderingIntentDefault);
UIImage *img = [UIImage imageWithCGImage:cgImage];
UIImageWriteToSavedPhotosAlbum(img, self, nil, nil);
// MOST RELEVANT PART OF CODE
//CVPixelBufferCreateWithBytes(NULL, width, height, kCVPixelFormatType_32BGRA, IOSurfaceGetBaseAddress(destSurf), IOSurfaceGetBytesPerRow(destSurf), NULL, NULL, NULL, &sampleBuffer);
}