I cloned this project from Github and when I build and ran it it showed up this Sigabrt error. The console says,
Make a symbolic breakpoint at UIViewAlertForUnsatisfiableConstraints to catch this in the debugger.
The methods in the UIConstraintBasedLayoutDebugging category on UIView listed in may also be helpful.
MoltinSwiftExample(10351,0x700000323000) malloc: * error for object 0x7ff288d71cc0: pointer being freed was not allocated
* set a breakpoint in malloc_error_break to debug
Here is my code.
#import "SDWebImageDownloaderOperation.h"
#import "SDWebImageDecoder.h"
#import "UIImage+MultiFormat.h"
#import <ImageIO/ImageIO.h>
#import "SDWebImageManager.h"
NSString *const SDWebImageDownloadStartNotification = #"SDWebImageDownloadStartNotification";
NSString *const SDWebImageDownloadReceiveResponseNotification = #"SDWebImageDownloadReceiveResponseNotification";
NSString *const SDWebImageDownloadStopNotification = #"SDWebImageDownloadStopNotification";
NSString *const SDWebImageDownloadFinishNotification = #"SDWebImageDownloadFinishNotification";
#interface SDWebImageDownloaderOperation () <NSURLConnectionDataDelegate>
#property (copy, nonatomic) SDWebImageDownloaderProgressBlock progressBlock;
#property (copy, nonatomic) SDWebImageDownloaderCompletedBlock completedBlock;
#property (copy, nonatomic) SDWebImageNoParamsBlock cancelBlock;
#property (assign, nonatomic, getter = isExecuting) BOOL executing;
#property (assign, nonatomic, getter = isFinished) BOOL finished;
#property (strong, nonatomic) NSMutableData *imageData;
#property (strong, nonatomic) NSURLConnection *connection;
#property (strong, atomic) NSThread *thread;
#if TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_4_0
#property (assign, nonatomic) UIBackgroundTaskIdentifier backgroundTaskId;
#endif
#end
#implementation SDWebImageDownloaderOperation {
size_t width, height;
UIImageOrientation orientation;
BOOL responseFromCached;
}
#synthesize executing = _executing;
#synthesize finished = _finished;
- (id)initWithRequest:(NSURLRequest *)request
options:(SDWebImageDownloaderOptions)options
progress:(SDWebImageDownloaderProgressBlock)progressBlock
completed:(SDWebImageDownloaderCompletedBlock)completedBlock
cancelled:(SDWebImageNoParamsBlock)cancelBlock {
if ((self = [super init])) {
_request = request;
_shouldDecompressImages = YES;
_shouldUseCredentialStorage = YES;
_options = options;
_progressBlock = [progressBlock copy];
_completedBlock = [completedBlock copy];
_cancelBlock = [cancelBlock copy];
_executing = NO;
_finished = NO;
_expectedSize = 0;
responseFromCached = YES; // Initially wrong until `connection:willCacheResponse:` is called or not called
}
return self;
}
- (void)start {
#synchronized (self) {
if (self.isCancelled) {
self.finished = YES;
[self reset];
return;
}
#if TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_4_0
Class UIApplicationClass = NSClassFromString(#"UIApplication");
BOOL hasApplication = UIApplicationClass && [UIApplicationClass respondsToSelector:#selector(sharedApplication)];
if (hasApplication && [self shouldContinueWhenAppEntersBackground]) {
__weak __typeof__ (self) wself = self;
UIApplication * app = [UIApplicationClass performSelector:#selector(sharedApplication)];
self.backgroundTaskId = [app beginBackgroundTaskWithExpirationHandler:^{
__strong __typeof (wself) sself = wself;
if (sself) {
[sself cancel];
[app endBackgroundTask:sself.backgroundTaskId];
sself.backgroundTaskId = UIBackgroundTaskInvalid;
}
}];
}
#endif
self.executing = YES;
self.connection = [[NSURLConnection alloc] initWithRequest:self.request delegate:self startImmediately:NO];
self.thread = [NSThread currentThread];
}
[self.connection start];
if (self.connection) {
if (self.progressBlock) {
self.progressBlock(0, NSURLResponseUnknownLength);
}
dispatch_async(dispatch_get_main_queue(), ^{
[[NSNotificationCenter defaultCenter] postNotificationName:SDWebImageDownloadStartNotification object:self];
});
if (floor(NSFoundationVersionNumber) <= NSFoundationVersionNumber_iOS_5_1) {
// Make sure to run the runloop in our background thread so it can process downloaded data
// Note: we use a timeout to work around an issue with NSURLConnection cancel under iOS 5
// not waking up the runloop, leading to dead threads (see https://github.com/rs/SDWebImage/issues/466)
CFRunLoopRunInMode(kCFRunLoopDefaultMode, 10, false);
}
else {
CFRunLoopRun();
}
if (!self.isFinished) {
[self.connection cancel];
[self connection:self.connection didFailWithError:[NSError errorWithDomain:NSURLErrorDomain code:NSURLErrorTimedOut userInfo:#{NSURLErrorFailingURLErrorKey : self.request.URL}]];
}
}
else {
if (self.completedBlock) {
self.completedBlock(nil, nil, [NSError errorWithDomain:NSURLErrorDomain code:0 userInfo:#{NSLocalizedDescriptionKey : #"Connection can't be initialized"}], YES);
}
}
#if TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_4_0
Class UIApplicationClass = NSClassFromString(#"UIApplication");
if(!UIApplicationClass || ![UIApplicationClass respondsToSelector:#selector(sharedApplication)]) {
return;
}
if (self.backgroundTaskId != UIBackgroundTaskInvalid) {
UIApplication * app = [UIApplication performSelector:#selector(sharedApplication)];
[app endBackgroundTask:self.backgroundTaskId];
self.backgroundTaskId = UIBackgroundTaskInvalid;
}
#endif
}
- (void)cancel {
#synchronized (self) {
if (self.thread) {
[self performSelector:#selector(cancelInternalAndStop) onThread:self.thread withObject:nil waitUntilDone:NO];
}
else {
[self cancelInternal];
}
}
}
- (void)cancelInternalAndStop {
if (self.isFinished) return;
[self cancelInternal];
CFRunLoopStop(CFRunLoopGetCurrent());
}
- (void)cancelInternal {
if (self.isFinished) return;
[super cancel];
if (self.cancelBlock) self.cancelBlock();
if (self.connection) {
[self.connection cancel];
dispatch_async(dispatch_get_main_queue(), ^{
[[NSNotificationCenter defaultCenter] postNotificationName:SDWebImageDownloadStopNotification object:self];
});
// As we cancelled the connection, its callback won't be called and thus won't
// maintain the isFinished and isExecuting flags.
if (self.isExecuting) self.executing = NO;
if (!self.isFinished) self.finished = YES;
}
[self reset];
}
- (void)done {
self.finished = YES;
self.executing = NO;
[self reset];
}
- (void)reset {
self.cancelBlock = nil;
self.completedBlock = nil;
self.progressBlock = nil;
self.connection = nil;
self.imageData = nil;
self.thread = nil;
}
- (void)setFinished:(BOOL)finished {
[self willChangeValueForKey:#"isFinished"];
_finished = finished;
[self didChangeValueForKey:#"isFinished"];
}
- (void)setExecuting:(BOOL)executing {
[self willChangeValueForKey:#"isExecuting"];
_executing = executing;
[self didChangeValueForKey:#"isExecuting"];
}
- (BOOL)isConcurrent {
return YES;
}
#pragma mark NSURLConnection (delegate)
- (void)connection:(NSURLConnection *)connection didReceiveResponse:(NSURLResponse *)response {
//'304 Not Modified' is an exceptional one
if (![response respondsToSelector:#selector(statusCode)] || ([((NSHTTPURLResponse *)response) statusCode] < 400 && [((NSHTTPURLResponse *)response) statusCode] != 304)) {
NSInteger expected = response.expectedContentLength > 0 ? (NSInteger)response.expectedContentLength : 0;
self.expectedSize = expected;
if (self.progressBlock) {
self.progressBlock(0, expected);
}
self.imageData = [[NSMutableData alloc] initWithCapacity:expected];
self.response = response;
dispatch_async(dispatch_get_main_queue(), ^{
[[NSNotificationCenter defaultCenter] postNotificationName:SDWebImageDownloadReceiveResponseNotification object:self];
});
}
else {
NSUInteger code = [((NSHTTPURLResponse *)response) statusCode];
//This is the case when server returns '304 Not Modified'. It means that remote image is not changed.
//In case of 304 we need just cancel the operation and return cached image from the cache.
if (code == 304) {
[self cancelInternal];
} else {
[self.connection cancel];
}
dispatch_async(dispatch_get_main_queue(), ^{
[[NSNotificationCenter defaultCenter] postNotificationName:SDWebImageDownloadStopNotification object:self];
});
if (self.completedBlock) {
self.completedBlock(nil, nil, [NSError errorWithDomain:NSURLErrorDomain code:[((NSHTTPURLResponse *)response) statusCode] userInfo:nil], YES);
}
CFRunLoopStop(CFRunLoopGetCurrent());
[self done];
}
}
- (void)connection:(NSURLConnection *)connection didReceiveData:(NSData *)data {
[self.imageData appendData:data];
if ((self.options & SDWebImageDownloaderProgressiveDownload) && self.expectedSize > 0 && self.completedBlock) {
// The following code is from http://www.cocoaintheshell.com/2011/05/progressive-images-download-imageio/
// Thanks to the author #Nyx0uf
// Get the total bytes downloaded
const NSInteger totalSize = self.imageData.length;
// Update the data source, we must pass ALL the data, not just the new bytes
CGImageSourceRef imageSource = CGImageSourceCreateWithData((__bridge CFDataRef)self.imageData, NULL);
if (width + height == 0) {
CFDictionaryRef properties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, NULL);
if (properties) {
NSInteger orientationValue = -1;
CFTypeRef val = CFDictionaryGetValue(properties, kCGImagePropertyPixelHeight);
if (val) CFNumberGetValue(val, kCFNumberLongType, &height);
val = CFDictionaryGetValue(properties, kCGImagePropertyPixelWidth);
if (val) CFNumberGetValue(val, kCFNumberLongType, &width);
val = CFDictionaryGetValue(properties, kCGImagePropertyOrientation);
if (val) CFNumberGetValue(val, kCFNumberNSIntegerType, &orientationValue);
CFRelease(properties);
// When we draw to Core Graphics, we lose orientation information,
// which means the image below born of initWithCGIImage will be
// oriented incorrectly sometimes. (Unlike the image born of initWithData
// in connectionDidFinishLoading.) So save it here and pass it on later.
orientation = [[self class] orientationFromPropertyValue:(orientationValue == -1 ? 1 : orientationValue)];
}
}
if (width + height > 0 && totalSize < self.expectedSize) {
// Create the image
CGImageRef partialImageRef = CGImageSourceCreateImageAtIndex(imageSource, 0, NULL);
#ifdef TARGET_OS_IPHONE
// Workaround for iOS anamorphic image
if (partialImageRef) {
const size_t partialHeight = CGImageGetHeight(partialImageRef);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef bmContext = CGBitmapContextCreate(NULL, width, height, 8, width * 4, colorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedFirst);
CGColorSpaceRelease(colorSpace);
if (bmContext) {
CGContextDrawImage(bmContext, (CGRect){.origin.x = 0.0f, .origin.y = 0.0f, .size.width = width, .size.height = partialHeight}, partialImageRef);
CGImageRelease(partialImageRef);
partialImageRef = CGBitmapContextCreateImage(bmContext);
CGContextRelease(bmContext);
}
else {
CGImageRelease(partialImageRef);
partialImageRef = nil;
}
}
#endif
if (partialImageRef) {
UIImage *image = [UIImage imageWithCGImage:partialImageRef scale:1 orientation:orientation];
NSString *key = [[SDWebImageManager sharedManager] cacheKeyForURL:self.request.URL];
UIImage *scaledImage = [self scaledImageForKey:key image:image];
if (self.shouldDecompressImages) {
image = [UIImage decodedImageWithImage:scaledImage];
}
else {
image = scaledImage;
}
CGImageRelease(partialImageRef);
dispatch_main_sync_safe(^{
if (self.completedBlock) {
self.completedBlock(image, nil, nil, NO);
}
});
}
}
CFRelease(imageSource);
}
if (self.progressBlock) {
self.progressBlock(self.imageData.length, self.expectedSize);
}
}
+ (UIImageOrientation)orientationFromPropertyValue:(NSInteger)value {
switch (value) {
case 1:
return UIImageOrientationUp;
case 3:
return UIImageOrientationDown;
case 8:
return UIImageOrientationLeft;
case 6:
return UIImageOrientationRight;
case 2:
return UIImageOrientationUpMirrored;
case 4:
return UIImageOrientationDownMirrored;
case 5:
return UIImageOrientationLeftMirrored;
case 7:
return UIImageOrientationRightMirrored;
default:
return UIImageOrientationUp;
}
}
- (UIImage *)scaledImageForKey:(NSString *)key image:(UIImage *)image {
return SDScaledImageForKey(key, image);
}
- (void)connectionDidFinishLoading:(NSURLConnection *)aConnection {
SDWebImageDownloaderCompletedBlock completionBlock = self.completedBlock;
#synchronized(self) {
CFRunLoopStop(CFRunLoopGetCurrent());
self.thread = nil;
self.connection = nil;
dispatch_async(dispatch_get_main_queue(), ^{
[[NSNotificationCenter defaultCenter] postNotificationName:SDWebImageDownloadStopNotification object:self];
[[NSNotificationCenter defaultCenter] postNotificationName:SDWebImageDownloadFinishNotification object:self];
});
}
if (![[NSURLCache sharedURLCache] cachedResponseForRequest:_request]) {
responseFromCached = NO;
}
if (completionBlock) {
if (self.options & SDWebImageDownloaderIgnoreCachedResponse && responseFromCached) {
completionBlock(nil, nil, nil, YES);
} else if (self.imageData) {
UIImage *image = [UIImage sd_imageWithData:self.imageData];
NSString *key = [[SDWebImageManager sharedManager] cacheKeyForURL:self.request.URL];
image = [self scaledImageForKey:key image:image];
// Do not force decoding animated GIFs
if (!image.images) {
if (self.shouldDecompressImages) {
image = [UIImage decodedImageWithImage:image];
}
}
if (CGSizeEqualToSize(image.size, CGSizeZero)) {
completionBlock(nil, nil, [NSError errorWithDomain:SDWebImageErrorDomain code:0 userInfo:#{NSLocalizedDescriptionKey : #"Downloaded image has 0 pixels"}], YES);
}
else {
completionBlock(image, self.imageData, nil, YES);
}
} else {
completionBlock(nil, nil, [NSError errorWithDomain:SDWebImageErrorDomain code:0 userInfo:#{NSLocalizedDescriptionKey : #"Image data is nil"}], YES);
}
}
self.completionBlock = nil;
[self done];
}
- (void)connection:(NSURLConnection *)connection didFailWithError:(NSError *)error {
#synchronized(self) {
CFRunLoopStop(CFRunLoopGetCurrent());
self.thread = nil;
self.connection = nil;
dispatch_async(dispatch_get_main_queue(), ^{
[[NSNotificationCenter defaultCenter] postNotificationName:SDWebImageDownloadStopNotification object:self];
});
}
if (self.completedBlock) {
self.completedBlock(nil, nil, error, YES);
}
self.completionBlock = nil;
[self done];
}
- (NSCachedURLResponse *)connection:(NSURLConnection *)connection willCacheResponse:(NSCachedURLResponse *)cachedResponse {
responseFromCached = NO; // If this method is called, it means the response wasn't read from cache
if (self.request.cachePolicy == NSURLRequestReloadIgnoringLocalCacheData) {
// Prevents caching of responses
return nil;
}
else {
return cachedResponse;
}
}
- (BOOL)shouldContinueWhenAppEntersBackground {
return self.options & SDWebImageDownloaderContinueInBackground;
}
- (BOOL)connectionShouldUseCredentialStorage:(NSURLConnection __unused *)connection {
return self.shouldUseCredentialStorage;
}
- (void)connection:(NSURLConnection *)connection willSendRequestForAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge{
if ([challenge.protectionSpace.authenticationMethod isEqualToString:NSURLAuthenticationMethodServerTrust]) {
if (!(self.options & SDWebImageDownloaderAllowInvalidSSLCertificates) &&
[challenge.sender respondsToSelector:#selector(performDefaultHandlingForAuthenticationChallenge:)]) {
[challenge.sender performDefaultHandlingForAuthenticationChallenge:challenge];
} else {
NSURLCredential *credential = [NSURLCredential credentialForTrust:challenge.protectionSpace.serverTrust];
[[challenge sender] useCredential:credential forAuthenticationChallenge:challenge];
}
} else {
if ([challenge previousFailureCount] == 0) {
if (self.credential) {
[[challenge sender] useCredential:self.credential forAuthenticationChallenge:challenge];
} else {
[[challenge sender] continueWithoutCredentialForAuthenticationChallenge:challenge];
}
} else {
[[challenge sender] continueWithoutCredentialForAuthenticationChallenge:challenge];
}
}
}
#end
The line with the error is,
CFRunLoopRun();
Related
I am trying to pass some simple data from my ViewController in Swift to a ViewController in Objective C but I have little experience with Obj-C and I can't find any information on how to accomplish it.
Here is my Swift ViewController code:
func navigateToUpdateViewController(imageURL: URL) {
let storyboard = UIStoryboard(name: "Main", bundle: nil)
let vc = storyboard.instantiateViewController(withIdentifier: "suotaViewController") as! SUOTAViewController
vc.imageURL = imageURL
self.navigationController?.pushViewController(vc, animated: true)
}
I am trying to figure out how to add the code on the Obj-C side to make this code work. I just need the imageURL property to be set. I have tried this code but I don't really understand it and it doesn't even work:
#property (nonatomic, retain) NSURL *imageURL;
Here is the Obj-C ViewController:
#define UIALERTVIEW_TAG_REBOOT 1
#import "SUOTAViewController.h"
#interface SUOTAViewController ()
#property (nonatomic, retain) NSURL *imageURL;
#end
#implementation SUOTAViewController
#synthesize blockSize;
- (void)viewDidLoad {
[super viewDidLoad];
self.textView.text = #"";
storage = [ParameterStorage getInstance];
manager = storage.manager;
[self.progressView setProgress:0];
[self.progressTextLabel setText:[NSString stringWithFormat:#"%d%%", 0]];
// Enable notifications on the status characteristic
[manager setNotifications:GenericServiceManager.SPOTA_SERVICE_CBUUID characteristicUUID:GenericServiceManager.SPOTA_SERV_STATUS_CBUUID enable:YES];
}
- (void) viewDidAppear:(BOOL)animated {
[super viewDidAppear:animated];
[UIApplication sharedApplication].idleTimerDisabled = YES;
// Enable notifications for BLE events
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(didDisconnectFromDevice:)
name:BluetoothManagerDisconnectedFromDevice
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(didUpdateValueForCharacteristic:)
name:GenericServiceManagerDidReceiveValue
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(didSendValueForCharacteristic:)
name:GenericServiceManagerDidSendValue
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(onBleOperationError:)
name:GenericServiceManagerWriteError
object:nil];
step = 1;
[self doStep];
}
- (void) viewWillDisappear:(BOOL)animated {
[super viewWillDisappear:animated];
[UIApplication sharedApplication].idleTimerDisabled = NO;
// Disable notifications for BLE events
[[NSNotificationCenter defaultCenter] removeObserver:self name:BluetoothManagerDisconnectedFromDevice object:nil];
[[NSNotificationCenter defaultCenter] removeObserver:self name:GenericServiceManagerDidReceiveValue object:nil];
[[NSNotificationCenter defaultCenter] removeObserver:self name:GenericServiceManagerDidSendValue object:nil];
[[NSNotificationCenter defaultCenter] removeObserver:self name:GenericServiceManagerWriteError object:nil];
}
- (void) didDisconnectFromDevice: (NSNotification*)notification {
if (step != 8) {
UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:step != 7 ? #"Upload Failed" : #"Device Disconnected"
message:#"The connection to the remote device was lost."
delegate:nil cancelButtonTitle:#"OK" otherButtonTitles:nil];
[alertView show];
}
}
- (void) didUpdateValueForCharacteristic: (NSNotification*)notification {
CBCharacteristic *characteristic = (CBCharacteristic*) notification.object;
if ([characteristic.UUID isEqual:GenericServiceManager.SPOTA_SERV_STATUS_CBUUID]) {
char value;
[characteristic.value getBytes:&value length:sizeof(char)];
NSString *message = [self getErrorMessage:value];
[self debug:message UILog:(value != SPOTAR_CMP_OK)];
if (expectedValue != 0) {
// Check if value equals the expected value
if (value == expectedValue) {
// If so, continue with the next step
step = nextStep;
expectedValue = 0; // Reset
[self doStep];
} else {
// Else display an error message
UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:#"Error" message:message delegate:nil cancelButtonTitle:#"OK" otherButtonTitles:nil];
[alertView show];
expectedValue = 0; // Reset
}
}
}
}
- (void) didSendValueForCharacteristic: (NSNotification*)notification {
if (step && step != 7) {
[self doStep];
}
}
- (void) onBleOperationError: (NSNotification*)notification {
[self debug:[NSString stringWithFormat:#"Error in BLE operation on characteristic %#", ((CBCharacteristic*)notification.object).UUID] UILog:YES];
if (step != 8) {
UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:#"Upload Failed"
message:#"The firmware upload procedure encountered an error."
delegate:nil cancelButtonTitle:#"OK" otherButtonTitles:nil];
[alertView show];
}
}
- (void) doStep {
[self debug:[NSString stringWithFormat:#"*** Next step: %d", step] UILog:NO];
switch (step) {
case 1: {
// Step 1: Set memory type
step = 0;
expectedValue = 0x10;
nextStep = 2;
uploadStart = [NSDate date];
uint32_t _memDevData = (self.memoryType << 24) | self.memoryBank;
[self debug:[NSString stringWithFormat:#"Set SPOTA_MEM_DEV: %#010x", _memDevData] UILog:YES];
NSData *memDevData = [NSData dataWithBytes:&_memDevData length:sizeof(uint32_t)];
[manager writeValue:GenericServiceManager.SPOTA_SERVICE_CBUUID characteristicUUID:GenericServiceManager.SPOTA_MEM_DEV_CBUUID data:memDevData];
break;
}
case 2: {
// Step 2: Set memory params
uint32_t _memInfoData = 0;
if (self.memoryType == MEM_TYPE_SUOTA_SPI) {
_memInfoData = (self.spiMISOGPIO << 24) | (self.spiMOSIGPIO << 16) | (self.spiCSGPIO << 8) | self.spiSCKGPIO;
} else if (self.memoryType == MEM_TYPE_SUOTA_I2C) {
_memInfoData = (self.i2cAddress << 16) | (self.i2cSCLGPIO << 8) | self.i2cSDAGPIO;
}
[self debug:[NSString stringWithFormat:#"Set SPOTA_GPIO_MAP: %#010x", _memInfoData] UILog:YES];
NSData *memInfoData = [NSData dataWithBytes:&_memInfoData length:sizeof(uint32_t)];
step = 3;
[manager writeValue:GenericServiceManager.SPOTA_SERVICE_CBUUID characteristicUUID:GenericServiceManager.SPOTA_GPIO_MAP_CBUUID data:memInfoData];
break;
}
case 3: {
// Load patch data
[self debug:[NSString stringWithFormat:#"Loading data from %#", storage.file_url.absoluteString.stringByRemovingPercentEncoding] UILog:YES];
fileData = [[NSData dataWithContentsOfURL:storage.file_url] mutableCopy];
[self appendChecksum];
[self debug:[NSString stringWithFormat:#"Upload size: %d bytes", (int) [fileData length]] UILog:YES];
// Step 3: Set patch length
chunkSize = MIN(manager.suotaPatchDataSize, manager.suotaMtu - 3);
blockSize = MAX(blockSize, chunkSize);
if (blockSize > fileData.length) {
blockSize = fileData.length;
if (chunkSize > blockSize)
chunkSize = blockSize;
}
blockStartByte = 0;
[self debug:[NSString stringWithFormat:#"Chunk size: %d bytes", chunkSize] UILog:YES];
step = 4;
[self doStep];
break;
}
case 4: {
// Set patch length
[self debug:[NSString stringWithFormat:#"Set SPOTA_PATCH_LEN: %d", blockSize] UILog:YES];
NSData *patchLengthData = [NSData dataWithBytes:&blockSize length:sizeof(uint16_t)];
step = 5;
[manager writeValue:GenericServiceManager.SPOTA_SERVICE_CBUUID characteristicUUID:GenericServiceManager.SPOTA_PATCH_LEN_CBUUID data:patchLengthData];
break;
}
case 5: {
// Send current block in chunks of 20 bytes
if (blockStartByte == 0)
[self debug:#"Upload procedure started" UILog:YES];
step = 0;
expectedValue = 0x02;
nextStep = 5;
int dataLength = (int) [fileData length];
int chunkStartByte = 0;
while (chunkStartByte < blockSize) {
// Check if we have less than current block-size bytes remaining
int bytesRemaining = blockSize - chunkStartByte;
int currChunkSize = bytesRemaining >= chunkSize ? chunkSize : bytesRemaining;
[self debug:[NSString stringWithFormat:#"Sending bytes %d to %d (%d/%d) of %d", blockStartByte + chunkStartByte + 1, blockStartByte + chunkStartByte + currChunkSize, chunkStartByte + currChunkSize, blockSize, dataLength] UILog:NO];
double progress = (double)(blockStartByte + chunkStartByte + currChunkSize) / (double)dataLength;
[self.progressView setProgress:progress];
[self.progressTextLabel setText:[NSString stringWithFormat:#"%d%%", (int)(100 * progress)]];
// Step 4: Send next n bytes of the patch
char bytes[currChunkSize];
[fileData getBytes:bytes range:NSMakeRange(blockStartByte + chunkStartByte, currChunkSize)];
NSData *byteData = [NSData dataWithBytes:bytes length:currChunkSize];
// On to the chunk
chunkStartByte += currChunkSize;
// Check if we are passing the current block
if (chunkStartByte >= blockSize) {
// Prepare for next block
blockStartByte += blockSize;
int bytesRemaining = dataLength - blockStartByte;
if (bytesRemaining == 0) {
nextStep = 6;
} else if (bytesRemaining < blockSize) {
blockSize = bytesRemaining;
nextStep = 4; // Back to step 4, setting the patch length
}
}
[manager writeValueWithoutResponse:GenericServiceManager.SPOTA_SERVICE_CBUUID characteristicUUID:GenericServiceManager.SPOTA_PATCH_DATA_CBUUID data:byteData];
}
break;
}
case 6: {
// Send SUOTA END command
step = 0;
expectedValue = 0x02;
nextStep = 7;
uint32_t suotaEnd = 0xFE000000;
[self debug:[NSString stringWithFormat:#"Send SUOTA END command: %#010x", suotaEnd] UILog:YES];
NSData *suotaEndData = [NSData dataWithBytes:&suotaEnd length:sizeof(uint32_t)];
[manager writeValue:GenericServiceManager.SPOTA_SERVICE_CBUUID characteristicUUID:GenericServiceManager.SPOTA_MEM_DEV_CBUUID data:suotaEndData];
break;
}
case 7: {
[self debug:#"Upload completed" UILog:YES];
NSTimeInterval elapsed = [[NSDate date] timeIntervalSinceDate:uploadStart];
[self debug:[NSString stringWithFormat:#"Elapsed time: %.3f", elapsed] UILog:YES];
// Wait for user to confirm reboot
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Device has been updated" message:#"Do you wish to reboot the device?" delegate:self cancelButtonTitle:#"No" otherButtonTitles:#"Yes, reboot", nil];
[alert setTag:UIALERTVIEW_TAG_REBOOT];
[alert show];
break;
}
case 8: {
// Go back to overview of devices
[self dismissViewControllerAnimated:YES completion:nil];
break;
}
}
}
- (void)alertView:(UIAlertView *)alertView clickedButtonAtIndex:(NSInteger)buttonIndex {
if (alertView.tag == UIALERTVIEW_TAG_REBOOT) {
if (buttonIndex != alertView.cancelButtonIndex) {
// Send reboot signal to device
step = 8;
uint32_t suotaReboot = 0xFD000000;
[self debug:[NSString stringWithFormat:#"Send SUOTA REBOOT command: %#010x", suotaReboot] UILog:YES];
NSData *suotaRebootData = [NSData dataWithBytes:&suotaReboot length:sizeof(uint32_t)];
[manager writeValue:GenericServiceManager.SPOTA_SERVICE_CBUUID characteristicUUID:GenericServiceManager.SPOTA_MEM_DEV_CBUUID data:suotaRebootData];
}
}
}
- (void) debug:(NSString*)message UILog:(BOOL)uiLog {
if (uiLog) {
self.textView.text = [self.textView.text stringByAppendingString:[NSString stringWithFormat:#"\n%#", message]];
[self.textView scrollRangeToVisible:NSMakeRange([self.textView.text length], 0)];
}
NSLog(#"%#", message);
}
- (void) appendChecksum {
uint8_t crc_code = 0;
const char *bytes = [fileData bytes];
for (int i = 0; i < [fileData length]; i++) {
crc_code ^= bytes[i];
}
[self debug:[NSString stringWithFormat:#"Checksum for file: %#4x", crc_code] UILog:YES];
[fileData appendBytes:&crc_code length:sizeof(uint8_t)];
}
#end
First, your Obj-C class is setting *imageURL as a private property. You haven't exposed it for use elsewhere.
So, let's start simple...
SUOTAViewController.h
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
#interface SUOTAViewController : UIViewController
#property (strong, nonatomic) NSURL *imageURL;
#end
NS_ASSUME_NONNULL_END
SUOTAViewController.m
#import "SUOTAViewController.h"
#interface SUOTAViewController ()
#end
#implementation SUOTAViewController
- (void)viewDidLoad {
[super viewDidLoad];
NSLog(#"Image URL: %#", _imageURL);
}
#end
YourProject-Bridging-Header.h
//
// Use this file to import your target's public headers that you would like to expose to Swift.
//
#import "SUOTAViewController.h"
Swift View Controller
guard let imageURL = URL(string: "https://i.stack.imgur.com/Cv0gs.png") else { return }
let storyboard = UIStoryboard(name: "Main", bundle: nil)
if let vc = storyboard.instantiateViewController(withIdentifier: "suotaViewController") as? SUOTAViewController {
vc.imageURL = imageURL
self.navigationController?.pushViewController(vc, animated: true)
}
The Obj-C class above will output the URL to the debug console.
You can do whatever you want with it after that.
This is the image that URL is pointing to:
I'm trying to run my application on iPhone 6 (iOS 9.3.2) and using XCode 7.3 but the problem is that it is getting crashed everytime I move from one tab to another. The Exception is pointing to this class and the method marked as "error".
Can anyone please help me out with this?
I'm attaching the code for your reference.
Thanks.
//
// AsyncImageView.m
//
#import "AsyncImageView.h"
#import <objc/message.h>
#define MAX_IMAGE_SIZE 1.5 * 1024 * 1024
#define RESIZE_IMG 0
NSString *const AsyncImageLoadDidFinish = #"AsyncImageLoadDidFinish";
NSString *const AsyncImageLoadDidFail = #"AsyncImageLoadDidFail";
NSString *const AsyncImageTargetReleased = #"AsyncImageTargetReleased";
NSString *const AsyncImageImageKey = #"image";
NSString *const AsyncImageURLKey = #"URL";
NSString *const AsyncImageCacheKey = #"cache";
NSString *const AsyncImageErrorKey = #"error";
#interface AsyncImageConnection : NSObject
#property (nonatomic, strong) NSURLConnection *connection;
#property (nonatomic, strong) NSMutableData *data;
#property (nonatomic, strong) NSURL *URL;
#property (nonatomic, strong) NSCache *cache;
#property (nonatomic, strong) id target;
#property (nonatomic, assign) SEL success;
#property (nonatomic, assign) SEL failure;
#property (nonatomic, readonly, getter = isLoading) BOOL loading;
#property (nonatomic, readonly) BOOL cancelled;
- (AsyncImageConnection *)initWithURL:(NSURL *)URL
cache:(NSCache *)cache
target:(id)target
success:(SEL)success
failure:(SEL)failure;
- (void)start;
- (void)cancel;
- (BOOL)isInCache;
#end
#implementation AsyncImageConnection
#synthesize connection = _connection;
#synthesize data = _data;
#synthesize URL = _URL;
#synthesize cache = _cache;
#synthesize target = _target;
#synthesize success = _success;
#synthesize failure = _failure;
#synthesize loading = _loading;
#synthesize cancelled = _cancelled;
- (AsyncImageConnection *)initWithURL:(NSURL *)URL
cache:(NSCache *)cache
target:(id)target
success:(SEL)success
failure:(SEL)failure
{
if ((self = [self init]))
{
self.URL = URL;
self.cache = cache;
self.target = target;
self.success = success;
self.failure = failure;
}
return self;
}
- (UIImage *)cachedImage
{
if ([_URL isFileURL])
{
NSString *path = [[_URL absoluteURL] path];
NSString *resourcePath = [[NSBundle mainBundle] resourcePath];
if ([path hasPrefix:resourcePath])
{
return [UIImage imageNamed:[path substringFromIndex:[resourcePath length]]];
}
}
return [_cache objectForKey:_URL];
}
- (BOOL)isInCache
{
return [self cachedImage] != nil;
}
- (void)loadFailedWithError:(NSError *)error
{
_loading = NO;
_cancelled = NO;
[[NSNotificationCenter defaultCenter] postNotificationName:AsyncImageLoadDidFail
object:_target
userInfo:[NSDictionary dictionaryWithObjectsAndKeys:
_URL, AsyncImageURLKey,
error, AsyncImageErrorKey,
nil]];
}
- (void)cacheImage:(UIImage *)image
{
if (!_cancelled)
{
if (image && _URL)
{
BOOL storeInCache = YES;
if ([_URL isFileURL])
{
if ([[[_URL absoluteURL] path] hasPrefix:[[NSBundle mainBundle] resourcePath]])
{
//do not store in cache
storeInCache = NO;
}
}
if (storeInCache)
{
#if RESIZE_IMG
// resize the image before storing
NSData *data = UIImageJPEGRepresentation(image, 1.0);
if (data.length > MAX_IMAGE_SIZE)
{
image = [HFUtils imageWithImage:image scaledBy:MAX_IMAGE_SIZE / (float)data.length];
}
#endif
[_cache setObject:image forKey:_URL];
}
}
NSMutableDictionary *userInfo = [NSMutableDictionary dictionaryWithObjectsAndKeys:
image, AsyncImageImageKey,
_URL, AsyncImageURLKey,
nil];
if (_cache)
{
[userInfo setObject:_cache forKey:AsyncImageCacheKey];
}
_loading = NO;
[[NSNotificationCenter defaultCenter] postNotificationName:AsyncImageLoadDidFinish
object:_target
userInfo:[[userInfo copy] autorelease]];
}
else
{
_loading = NO;
_cancelled = NO;
}
}
- (void)processDataInBackground:(NSData *)data
{
#synchronized ([self class])
{
if (!_cancelled)
{
UIImage *image = [[UIImage alloc] initWithData:data];
if (image)
{
//add to cache (may be cached already but it doesn't matter)
[self performSelectorOnMainThread:#selector(cacheImage:)
withObject:image
waitUntilDone:YES];
[image release];
}
else
{
#autoreleasepool
{
NSError *error = [NSError errorWithDomain:#"AsyncImageLoader" code:0 userInfo:[NSDictionary dictionaryWithObject:#"Invalid image data" forKey:NSLocalizedDescriptionKey]];
[self performSelectorOnMainThread:#selector(loadFailedWithError:) withObject:error waitUntilDone:YES];
}
}
}
else
{
//clean up
[self performSelectorOnMainThread:#selector(cacheImage:)
withObject:nil
waitUntilDone:YES];
}
}
}
- (void)connection:(NSURLConnection *)connection didReceiveResponse:(NSURLResponse *)response
{
self.data = [NSMutableData data];
}
- (void)connection:(NSURLConnection *)connection didReceiveData:(NSData *)data
{
//add data
[_data appendData:data];
}
- (void)connectionDidFinishLoading:(NSURLConnection *)connection
{
[self performSelectorInBackground:#selector(processDataInBackground:) withObject:_data];
self.connection = nil;
self.data = nil;
}
- (void)connection:(NSURLConnection *)connection didFailWithError:(NSError *)error
{
self.connection = nil;
self.data = nil;
[self loadFailedWithError:error];
}
- (void)start
{
if (_loading && !_cancelled)
{
return;
}
//begin loading
_loading = YES;
_cancelled = NO;
//check for nil URL
if (_URL == nil)
{
[self cacheImage:nil];
return;
}
//check for cached image
UIImage *image = [self cachedImage];
if (image)
{
//add to cache (cached already but it doesn't matter)
[self performSelectorOnMainThread:#selector(cacheImage:)
withObject:image
waitUntilDone:NO];
return;
}
//begin load
NSURLRequest *request = [NSURLRequest requestWithURL:_URL
cachePolicy:NSURLCacheStorageNotAllowed
timeoutInterval:[AsyncImageLoader sharedLoader].loadingTimeout];
_connection = [[NSURLConnection alloc] initWithRequest:request delegate:self startImmediately:NO];
[_connection scheduleInRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
[_connection start];
}
- (void)cancel
{
_cancelled = YES;
[_connection cancel];
self.connection = nil;
self.data = nil;
}
- (void)dealloc
{
[_connection release];
[_data release];
[_URL release];
[_target release];
[super ah_dealloc];
}
#end
#interface AsyncImageLoader ()
#property (nonatomic, strong) NSMutableArray *connections;
#end
#implementation AsyncImageLoader
#synthesize cache = _cache;
#synthesize connections = _connections;
#synthesize concurrentLoads = _concurrentLoads;
#synthesize loadingTimeout = _loadingTimeout;
+ (AsyncImageLoader *)sharedLoader
{
static AsyncImageLoader *sharedInstance = nil;
if (sharedInstance == nil)
{
sharedInstance = [[self alloc] init];
}
return sharedInstance;
}
+ (NSCache *)defaultCache
{
static NSCache *sharedInstance = nil;
if (sharedInstance == nil)
{
sharedInstance = [[NSCache alloc] init];
}
return sharedInstance;
}
- (AsyncImageLoader *)init
{
if ((self = [super init]))
{
self.cache = [[self class] defaultCache];
_concurrentLoads = 2;
_loadingTimeout = 60.0;
_connections = [[NSMutableArray alloc] init];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(imageLoaded:)
name:AsyncImageLoadDidFinish
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(imageFailed:)
name:AsyncImageLoadDidFail
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(targetReleased:)
name:AsyncImageTargetReleased
object:nil];
}
return self;
}
- (void)updateQueue
{
//start connections
NSUInteger count = 0;
for (AsyncImageConnection *connection in _connections)
{
if (![connection isLoading])
{
if ([connection isInCache])
{
[connection start];
}
else if (count < _concurrentLoads)
{
count ++;
[connection start];
}
}
}
}
- (void)imageLoaded:(NSNotification *)notification
{
//complete connections for URL
NSURL *URL = [notification.userInfo objectForKey:AsyncImageURLKey];
for (int i = (int)[_connections count] - 1; i >= 0; i--)
{
AsyncImageConnection *connection = [_connections objectAtIndex:(NSUInteger)i];
if (connection.URL == URL || [connection.URL isEqual:URL])
{
//cancel earlier connections for same target/action
for (int j = i - 1; j >= 0; j--)
{
AsyncImageConnection *earlier = [_connections objectAtIndex:(NSUInteger)j];
if (earlier.target == connection.target &&
earlier.success == connection.success)
{
[earlier cancel];
[_connections removeObjectAtIndex:(NSUInteger)j];
i--;
}
}
//cancel connection (in case it's a duplicate)
[connection cancel];
//perform action
UIImage *image = [notification.userInfo objectForKey:AsyncImageImageKey];
#if RESIZE_IMG
// resize image before sending it over
NSData *data = UIImageJPEGRepresentation(image, 1.0);
if (data.length > MAX_IMAGE_SIZE)
{
image = [HFUtils imageWithImage:image scaledBy:MAX_IMAGE_SIZE / (float)data.length];
}
#endif
objc_msgSend(connection.target, connection.success, image, connection.URL);
//remove from queue
[_connections removeObjectAtIndex:(NSUInteger)i];
}
}
//update the queue
[self updateQueue];
}
- (void)imageFailed:(NSNotification *)notification
{
//remove connections for URL
NSURL *URL = [notification.userInfo objectForKey:AsyncImageURLKey];
for (int i = (int)[_connections count] - 1; i >= 0; i--)
{
AsyncImageConnection *connection = [_connections objectAtIndex:(NSUInteger)i];
if ([connection.URL isEqual:URL])
{
//cancel connection (in case it's a duplicate)
[connection cancel];
//perform failure action
if (connection.failure)
{
NSError *error = [notification.userInfo objectForKey:AsyncImageErrorKey];
objc_msgSend(connection.target, connection.failure, error, URL);
}
//remove from queue
[_connections removeObjectAtIndex:(NSUInteger)i];
}
}
//update the queue
[self updateQueue];
}
- (void)targetReleased:(NSNotification *)notification
{
//remove connections for URL
id target = [notification object];
for (int i = (int)[_connections count] - 1; i >= 0; i--)
{
AsyncImageConnection *connection = [_connections objectAtIndex:(NSUInteger)i];
if (connection.target == target)
{
//cancel connection
[connection cancel];
[_connections removeObjectAtIndex:(NSUInteger)i];
}
}
//update the queue
[self updateQueue];
}
- (void)loadImageWithURL:(NSURL *)URL target:(id)target success:(SEL)success failure:(SEL)failure
{
if (URL == nil)
return;
//check cache
UIImage *image = [_cache objectForKey:URL];
if (image)
{
[self cancelLoadingImagesForTarget:self action:success];
if (success) [target performSelectorOnMainThread:success withObject:image waitUntilDone:NO];
return;
}
//create new connection
AsyncImageConnection *connection = [[AsyncImageConnection alloc] initWithURL:URL
cache:_cache
target:target
success:success
failure:failure];
BOOL added = NO;
for (NSUInteger i = 0; i < [_connections count]; i++)
{
AsyncImageConnection *existingConnection = [_connections objectAtIndex:i];
if (!existingConnection.loading)
{
[_connections insertObject:connection atIndex:i];
added = YES;
break;
}
}
if (!added)
{
[_connections addObject:connection];
}
[connection release];
[self updateQueue];
}
- (void)loadImageWithURL:(NSURL *)URL target:(id)target action:(SEL)action
{
[self loadImageWithURL:URL target:target success:action failure:NULL];
}
- (void)loadImageWithURL:(NSURL *)URL
{
[self loadImageWithURL:URL target:nil success:NULL failure:NULL];
}
- (void)cancelLoadingURL:(NSURL *)URL target:(id)target action:(SEL)action
{
for (int i = (int)[_connections count] - 1; i >= 0; i--)
{
AsyncImageConnection *connection = [_connections objectAtIndex:(NSUInteger)i];
if ([connection.URL isEqual:URL] && connection.target == target && connection.success == action)
{
[connection cancel];
[_connections removeObjectAtIndex:(NSUInteger)i];
}
}
}
- (void)cancelLoadingURL:(NSURL *)URL target:(id)target
{
for (int i = (int)[_connections count] - 1; i >= 0; i--)
{
AsyncImageConnection *connection = [_connections objectAtIndex:(NSUInteger)i];
if ([connection.URL isEqual:URL] && connection.target == target)
{
[connection cancel];
[_connections removeObjectAtIndex:(NSUInteger)i];
}
}
}
- (void)cancelLoadingURL:(NSURL *)URL
{
for (int i = (int)[_connections count] - 1; i >= 0; i--)
{
AsyncImageConnection *connection = [_connections objectAtIndex:(NSUInteger)i];
if ([connection.URL isEqual:URL])
{
[connection cancel];
[_connections removeObjectAtIndex:(NSUInteger)i];
}
}
}
- (void)cancelLoadingImagesForTarget:(id)target action:(SEL)action
{
for (int i = (int)[_connections count] - 1; i >= 0; i--)
{
AsyncImageConnection *connection = [_connections objectAtIndex:(NSUInteger)i];
if (connection.target == target && connection.success == action)
{
[connection cancel];
}
}
}
- (void)cancelLoadingImagesForTarget:(id)target
{
for (int i = (int)[_connections count] - 1; i >= 0; i--)
{
AsyncImageConnection *connection = [_connections objectAtIndex:(NSUInteger)i];
if (connection.target == target)
{
[connection cancel];
}
}
}
- (NSURL *)URLForTarget:(id)target action:(SEL)action
{
//return the most recent image URL assigned to the target for the given action
//this is not neccesarily the next image that will be assigned
for (int i = (int)[_connections count] - 1; i >= 0; i--)
{
AsyncImageConnection *connection = [_connections objectAtIndex:(NSUInteger)i];
if (connection.target == target && connection.success == action)
{
return [[connection.URL ah_retain] autorelease];
}
}
return nil;
}
- (NSURL *)URLForTarget:(id)target
{
//return the most recent image URL assigned to the target
//this is not neccesarily the next image that will be assigned
for (int i = (int)[_connections count] - 1; i >= 0; i--)
{
AsyncImageConnection *connection = [_connections objectAtIndex:(NSUInteger)i];
if (connection.target == target)
{
return [[connection.URL ah_retain] autorelease];
}
}
return nil;
}
- (void)dealloc
{
[[NSNotificationCenter defaultCenter] removeObserver:self];
[_cache release];
[_connections release];
[super ah_dealloc];
}
#end
#implementation UIImageView(AsyncImageView)
- (void)setImageURL:(NSURL *)imageURL
{
[[AsyncImageLoader sharedLoader] loadImageWithURL:imageURL target:self action:#selector(setImage:)];
}
- (NSURL *)imageURL
{
return [[AsyncImageLoader sharedLoader] URLForTarget:self action:#selector(setImage:)];
}
#end
#interface AsyncImageView ()
#property (nonatomic, strong) UIActivityIndicatorView *activityView;
#end
#implementation AsyncImageView
#synthesize showActivityIndicator = _showActivityIndicator;
#synthesize activityIndicatorStyle = _activityIndicatorStyle;
#synthesize crossfadeImages = _crossfadeImages;
#synthesize crossfadeDuration = _crossfadeDuration;
#synthesize activityView = _activityView;
- (void)setUp
{
_showActivityIndicator = YES;
_activityIndicatorStyle = UIActivityIndicatorViewStyleGray;
_crossfadeImages = YES;
_crossfadeDuration = 0.4;
}
- (id)initWithFrame:(CGRect)frame
{
if ((self = [super initWithFrame:frame]))
{
[self setUp];
}
return self;
}
- (id)initWithCoder:(NSCoder *)aDecoder
{
if ((self = [super initWithCoder:aDecoder]))
{
[self setUp];
}
return self;
}
- (void)setImageURL:(NSURL *)imageURL
{
super.imageURL = imageURL;
if (_showActivityIndicator && !self.image && imageURL)
{
if (_activityView == nil)
{
_activityView = [[UIActivityIndicatorView alloc] initWithActivityIndicatorStyle:_activityIndicatorStyle];
_activityView.hidesWhenStopped = YES;
_activityView.center = CGPointMake(self.bounds.size.width / 2.0f, self.bounds.size.height / 2.0f);
_activityView.autoresizingMask = UIViewAutoresizingFlexibleLeftMargin | UIViewAutoresizingFlexibleTopMargin | UIViewAutoresizingFlexibleRightMargin | UIViewAutoresizingFlexibleBottomMargin;
[self addSubview:_activityView];
}
[_activityView startAnimating];
}
}
- (void)setActivityIndicatorStyle:(UIActivityIndicatorViewStyle)style
{
_activityIndicatorStyle = style;
[_activityView removeFromSuperview];
self.activityView = nil;
}
- (void)setImage:(UIImage *)image //error
{
if (_crossfadeImages)
{
//implement crossfade transition without needing to import QuartzCore
id animation = objc_msgSend(NSClassFromString(#"CATransition"), #selector(animation));
objc_msgSend(animation, #selector(setType:), #"kCATransitionFade");
objc_msgSend(animation, #selector(setDuration:), _crossfadeDuration);
objc_msgSend(self.layer, #selector(addAnimation:forKey:), animation, nil);
}
super.image = image;
[_activityView stopAnimating];
}
- (void)dealloc
{
[[AsyncImageLoader sharedLoader] cancelLoadingURL:self.imageURL target:self];
[_activityView release];
[super ah_dealloc];
}
#end
I am trying to screen capture a view that has APPLCameraViewContoller in it. For some reason when it gets saved to camera roll no mater what I do what the camera is looking at does not get captured only the UIView with labels does so it is a black background with the labels. I want to have the labels on top of the Camera View. Any suggestions or examples on how to go about this. Here is the screen capture .m which I am assuming is the reason why this is happening.
#interface ASScreenRecorder()
#property (strong, nonatomic) AVAssetWriter *videoWriter;
#property (strong, nonatomic) AVAssetWriterInput *videoWriterInput;
#property (strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor *avAdaptor;
#property (strong, nonatomic) CADisplayLink *displayLink;
#property (strong, nonatomic) NSDictionary *outputBufferPoolAuxAttributes;
#property (nonatomic) CFTimeInterval firstTimeStamp;
#property (nonatomic) BOOL isRecording;
#end
#implementation ASScreenRecorder
{
dispatch_queue_t _render_queue;
dispatch_queue_t _append_pixelBuffer_queue;
dispatch_semaphore_t _frameRenderingSemaphore;
dispatch_semaphore_t _pixelAppendSemaphore;
CGSize _viewSize;
CGFloat _scale;
CGColorSpaceRef _rgbColorSpace;
CVPixelBufferPoolRef _outputBufferPool;
}
#pragma mark - initializers
+ (instancetype)sharedInstance {
static dispatch_once_t once;
static ASScreenRecorder *sharedInstance;
dispatch_once(&once, ^{
sharedInstance = [[self alloc] init];
});
return sharedInstance;
}
- (instancetype)init
{
self = [super init];
if (self) {
_viewSize = [UIApplication sharedApplication].delegate.window.bounds.size;
_scale = [UIScreen mainScreen].scale;
// record half size resolution for retina iPads
if ((UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) && _scale > 1) {
_scale = 1.0;
}
_isRecording = NO;
_append_pixelBuffer_queue = dispatch_queue_create("ASScreenRecorder.append_queue", DISPATCH_QUEUE_SERIAL);
_render_queue = dispatch_queue_create("ASScreenRecorder.render_queue", DISPATCH_QUEUE_SERIAL);
dispatch_set_target_queue(_render_queue, dispatch_get_global_queue( DISPATCH_QUEUE_PRIORITY_HIGH, 0));
_frameRenderingSemaphore = dispatch_semaphore_create(1);
_pixelAppendSemaphore = dispatch_semaphore_create(1);
}
return self;
}
#pragma mark - public
- (void)setVideoURL:(NSURL *)videoURL
{
NSAssert(!_isRecording, #"videoURL can not be changed whilst recording is in progress");
_videoURL = videoURL;
}
- (BOOL)startRecording
{
if (!_isRecording) {
[self setUpWriter];
_isRecording = (_videoWriter.status == AVAssetWriterStatusWriting);
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:#selector(writeVideoFrame)];
[_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
}
return _isRecording;
}
- (void)stopRecordingWithCompletion:(VideoCompletionBlock)completionBlock;
{
if (_isRecording) {
_isRecording = NO;
[_displayLink removeFromRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
[self completeRecordingSession:completionBlock];
}
}
#pragma mark - private
-(void)setUpWriter
{
_rgbColorSpace = CGColorSpaceCreateDeviceRGB();
NSDictionary *bufferAttributes = #{(id)kCVPixelBufferPixelFormatTypeKey : #(kCVPixelFormatType_32BGRA),
(id)kCVPixelBufferCGBitmapContextCompatibilityKey : #YES,
(id)kCVPixelBufferWidthKey : #(_viewSize.width * _scale),
(id)kCVPixelBufferHeightKey : #(_viewSize.height * _scale),
(id)kCVPixelBufferBytesPerRowAlignmentKey : #(_viewSize.width * _scale * 4)
};
_outputBufferPool = NULL;
CVPixelBufferPoolCreate(NULL, NULL, (__bridge CFDictionaryRef)(bufferAttributes), &_outputBufferPool);
NSError* error = nil;
_videoWriter = [[AVAssetWriter alloc] initWithURL:self.videoURL ?: [self tempFileURL]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(_videoWriter);
NSInteger pixelNumber = _viewSize.width * _viewSize.height * _scale;
NSDictionary* videoCompression = #{AVVideoAverageBitRateKey: #(pixelNumber * 11.4)};
NSDictionary* videoSettings = #{AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: [NSNumber numberWithInt:_viewSize.width*_scale],
AVVideoHeightKey: [NSNumber numberWithInt:_viewSize.height*_scale],
AVVideoCompressionPropertiesKey: videoCompression};
_videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSParameterAssert(_videoWriterInput);
_videoWriterInput.expectsMediaDataInRealTime = YES;
_videoWriterInput.transform = [self videoTransformForDeviceOrientation];
_avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoWriterInput sourcePixelBufferAttributes:nil];
[_videoWriter addInput:_videoWriterInput];
[_videoWriter startWriting];
[_videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];
}
- (CGAffineTransform)videoTransformForDeviceOrientation
{
CGAffineTransform videoTransform;
switch ([UIDevice currentDevice].orientation) {
case UIDeviceOrientationLandscapeLeft:
videoTransform = CGAffineTransformMakeRotation(-M_PI_2);
break;
case UIDeviceOrientationLandscapeRight:
videoTransform = CGAffineTransformMakeRotation(M_PI_2);
break;
case UIDeviceOrientationPortraitUpsideDown:
videoTransform = CGAffineTransformMakeRotation(M_PI);
break;
default:
videoTransform = CGAffineTransformIdentity;
}
return videoTransform;
}
- (NSURL*)tempFileURL
{
NSString *outputPath = [NSHomeDirectory() stringByAppendingPathComponent:#"tmp/screenCapture.mp4"];
[self removeTempFilePath:outputPath];
return [NSURL fileURLWithPath:outputPath];
}
- (void)removeTempFilePath:(NSString*)filePath
{
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:filePath]) {
NSError* error;
if ([fileManager removeItemAtPath:filePath error:&error] == NO) {
NSLog(#"Could not delete old recording:%#", [error localizedDescription]);
}
}
}
- (void)completeRecordingSession:(VideoCompletionBlock)completionBlock;
{
dispatch_async(_render_queue, ^{
dispatch_sync(_append_pixelBuffer_queue, ^{
[_videoWriterInput markAsFinished];
[_videoWriter finishWritingWithCompletionHandler:^{
void (^completion)(void) = ^() {
[self cleanup];
dispatch_async(dispatch_get_main_queue(), ^{
if (completionBlock) completionBlock();
});
};
if (self.videoURL) {
completion();
} else {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:_videoWriter.outputURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (error) {
NSLog(#"Error copying video to camera roll:%#", [error localizedDescription]);
} else {
[self removeTempFilePath:_videoWriter.outputURL.path];
completion();
}
}];
}
}];
});
});
}
- (void)cleanup
{
self.avAdaptor = nil;
self.videoWriterInput = nil;
self.videoWriter = nil;
self.firstTimeStamp = 0;
self.outputBufferPoolAuxAttributes = nil;
CGColorSpaceRelease(_rgbColorSpace);
CVPixelBufferPoolRelease(_outputBufferPool);
}
- (void)writeVideoFrame
{
// throttle the number of frames to prevent meltdown
// technique gleaned from Brad Larson's answer here: http://stackoverflow.com/a/5956119
if (dispatch_semaphore_wait(_frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0) {
return;
}
dispatch_async(_render_queue, ^{
if (![_videoWriterInput isReadyForMoreMediaData]) return;
if (!self.firstTimeStamp) {
self.firstTimeStamp = _displayLink.timestamp;
}
CFTimeInterval elapsed = (_displayLink.timestamp - self.firstTimeStamp);
CMTime time = CMTimeMakeWithSeconds(elapsed, 1000);
CVPixelBufferRef pixelBuffer = NULL;
CGContextRef bitmapContext = [self createPixelBufferAndBitmapContext:&pixelBuffer];
if (self.delegate) {
[self.delegate writeBackgroundFrameInContext:&bitmapContext];
}
// draw each window into the context (other windows include UIKeyboard, UIAlert)
// FIX: UIKeyboard is currently only rendered correctly in portrait orientation
dispatch_sync(dispatch_get_main_queue(), ^{
UIGraphicsPushContext(bitmapContext); {
for (UIWindow *window in [[UIApplication sharedApplication] windows]) {
[window drawViewHierarchyInRect:CGRectMake(0, 0, _viewSize.width, _viewSize.height) afterScreenUpdates:NO];
}
} UIGraphicsPopContext();
});
// append pixelBuffer on a async dispatch_queue, the next frame is rendered whilst this one appends
// must not overwhelm the queue with pixelBuffers, therefore:
// check if _append_pixelBuffer_queue is ready
// if it’s not ready, release pixelBuffer and bitmapContext
if (dispatch_semaphore_wait(_pixelAppendSemaphore, DISPATCH_TIME_NOW) == 0) {
dispatch_async(_append_pixelBuffer_queue, ^{
BOOL success = [_avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
if (!success) {
NSLog(#"Warning: Unable to write buffer to video");
}
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
dispatch_semaphore_signal(_pixelAppendSemaphore);
});
} else {
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
}
dispatch_semaphore_signal(_frameRenderingSemaphore);
});
}
- (CGContextRef)createPixelBufferAndBitmapContext:(CVPixelBufferRef *)pixelBuffer
{
CVPixelBufferPoolCreatePixelBuffer(NULL, _outputBufferPool, pixelBuffer);
CVPixelBufferLockBaseAddress(*pixelBuffer, 0);
CGContextRef bitmapContext = NULL;
bitmapContext = CGBitmapContextCreate(CVPixelBufferGetBaseAddress(*pixelBuffer),
CVPixelBufferGetWidth(*pixelBuffer),
CVPixelBufferGetHeight(*pixelBuffer),
8, CVPixelBufferGetBytesPerRow(*pixelBuffer), _rgbColorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst
);
CGContextScaleCTM(bitmapContext, _scale, _scale);
CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, _viewSize.height);
CGContextConcatCTM(bitmapContext, flipVertical);
return bitmapContext;
}
I would be more than happy to provide my full source code to anyone who could tackle something like this because posting multiple .m's on this would take up a lot of space.
I'm not sure if it's similar for you. But I've been using ASScreenRecorder to record an ARSceneView, and that has been going fine. Have a look at the following link, you can provide it a view to render and it records and provides an output URL link. You might have to make a small edit to the class to get the StopRecording Function's completion handler to work.
https://github.com/alskipp/ASScreenRecorder
I hava a Play audio class used AudioToolBox.framework ,AudioQueue.
I encountered a problem, every piece of audio data playback, the memory will be increased, after playback is complete, the memory will not be reduced. If the batch test, it will be added to the hundreds of megabytes of memory, I want to know what causes memory has been increased, the audio data on each pass of each object is released or other reasons.
Here is my playThread class code:
#interface PlayThread()
{
BOOL transferDataComplete; // if thers is no data transfer to playthread set transferDataComplete = yes;
NSMutableArray *receiveDataArray;// audio data array
BOOL isPlay;// if audioqueue start,isPlay = yes,
}
#end
#pragma mark class implementation
#implementation PlayThread
- (instancetype)init
{
if (self = [super init]) {
receiveDataArray = [[NSMutableArray alloc]init];
isPlay = NO;
transferDataComplete = false;
bufferOverCount = QUEUE_BUFFER_SIZE;
audioQueue = nil;
}
return self;
}
// audio queue callback function
static void BufferCallback(void *inUserData,AudioQueueRef inAQ,AudioQueueBufferRef buffer)
{
USCPlayThread* player=(__bridge USCPlayThread*)inUserData;
[player fillBuffer:inAQ queueBuffer:buffer];
}
// fill buffer
-(void)fillBuffer:(AudioQueueRef)queue queueBuffer:(AudioQueueBufferRef)buffer
{
while (true){
NSData *audioData = [self getAudioData];
if( transferDataComplete && audioData == nil) {
bufferOverCount --;
break;
}
else if(audioData != nil){
memcpy(buffer->mAudioData, [audioData bytes] , audioData.length);
buffer->mAudioDataByteSize = (UInt32)audioData.length;
AudioQueueEnqueueBuffer(queue, buffer, 0, NULL);
break;
}
else
break;
} // while
if(bufferOverCount == 0){
// stop audioqueue
[self stopAudioQueue];
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:#selector(playComplete)]) {
[self.delegate playComplete];
}
});
}
}
-(void)addPlayData:(NSData *)data
{
NSUInteger count = 0;
#synchronized(receiveDataArray){
[receiveDataArray addObject:data];
}
}
/**
* get data from receiveDataArray
*/
-(NSData*)getAudioData
{
NSData *headData = nil;
#synchronized(receiveDataArray){
if(receiveDataArray.count > 0){
headData = [receiveDataArray objectAtIndex:0];
[receiveDataArray removeObjectAtIndex:0];
}
}
return headData;
}
- (void)startPlay // start audioqueue to play audio data
{
[self reset];
[self open];
for(int i=0; i<QUEUE_BUFFER_SIZE; i++)
{
[self fillBuffer:audioQueue queueBuffer:audioQueueBuffers[i]];
}
// audioqueuestart
AudioQueueStart(audioQueue, NULL);
#synchronized(self){
isPlay = YES;
}
if ([self.delegate respondsToSelector:#selector(playBegin)]) {
[self.delegate playBegin];
}
}
-(void)createAudioQueue
{
if (audioQueue) {
return;
}
AudioQueueNewOutput(&audioDescription, BufferCallback, (__bridge void *)(self), nil, nil, 0, &audioQueue);
if(audioQueue){
for(int i=0;i<QUEUE_BUFFER_SIZE;i++){
AudioQueueAllocateBufferWithPacketDescriptions(audioQueue, EVERY_READ_LENGTH, 0, &audioQueueBuffers[i]);
}
}
}
-(void)stopAudioQueue
{
if(audioQueue == nil){
return;
}
#synchronized(self){
if(isPlay){
isPlay = NO;
}
}
AudioQueueStop(audioQueue, TRUE);
}
-(void)setAudioFormat
{
audioDescription.mSampleRate = 16000;
audioDescription.mFormatID = kAudioFormatLinearPCM;
audioDescription.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
audioDescription.mChannelsPerFrame = 1;
audioDescription.mFramesPerPacket = 1;
audioDescription.mBitsPerChannel = 16;
audioDescription.mBytesPerFrame = (audioDescription.mBitsPerChannel/8) * audioDescription.mChannelsPerFrame;
audioDescription.mBytesPerPacket = audioDescription.mBytesPerFrame ;
}
-(void)close
{
if (audioQueue) {
AudioQueueStop(audioQueue, true);
AudioQueueDispose(audioQueue, true);
audioQueue = nil;
isPlay = NO;
}
}
-(BOOL)open {
if([self isOpen]){
return YES;
}
[self close];
[self setAudioFormat];
[self createAudioQueue];
return YES;
}
-(BOOL)isOpen
{
return (audioQueue != nil);
}
- (void)reset
{
bufferOverCount = QUEUE_BUFFER_SIZE;
transferDataComplete = NO;
}
- (BOOL)isPlaying
{
return isPlay;
}
- (void)disposeQueue
{
if (audioQueue) {
AudioQueueDispose(audioQueue, YES);
}
audioQueue = nil;
}
- (void)dealloc
{
[self disposeQueue];
}
Here is ViewContrller.m :
- (void)viewDidLoad {
[super viewDidLoad];
PlayThread *playThread = [[PlayThread alloc]init];
playThread.delegate = self;
self.playThread = playThread;
for (int i = 0; i < 10; i++)
{ // create empth audio data to simulate
NSMutableData *data = [[NSMutableData alloc]initWithLength:10000];
[self.playThread addPlayData:data];
}
[self.playThread startPlay];
}
Here is PlayThread's delegate method:
// When the play completely,then play once again,memory will continue to increase
- (void)playComplete
{
dispatch_async(dispatch_get_main_queue(), ^{
for (int i = 0; i < 10; i++)
{
NSMutableData *data = [[NSMutableData alloc]initWithLength:10000];
[self.playThread addPlayData:data];
}
[self.playThread startPlay];
});
}
Why memory has continued to increase, how can promptly release memory?
AudioQueueNewOutput(&audioDescription, BufferCallback, (__bridge void *)(self), nil, nil, 0, &audioQueue);
here parameter can not be nil
Hi I am building a game that uses the gamekit framework but I am having trouble sending to int using the "sendDataToAllPlayers", it cannot distinguish between the two int that I am sending. Here is some of my code:
typedef enum {
kMessageTypeRandomNumber = 0,
kMessageQN = 0,
kMessageTypeGameBegin,
kMessageTypeSelectAnswer1,
kMessageTypeSelectAnswer2,
kMessageTypeSelectAnswer3,
kMessageTypeGameOver
} MessageType;
typedef struct {
MessageType messageType;
} Message;
typedef struct {
Message message;
uint32_t randomNumber;
int SelectedQ;
} MessageRandomNumber;
the following is the send methods:
-(void)sendTheSelectedRandomQuestionWithQuestion {
MessageRandomNumber message;
message.message.messageType = kMessageQN;
message.SelectedQ = randomSelectedQuestion;
NSData *data = [NSData dataWithBytes:&message length:sizeof(MessageRandomNumber)];
[self sendData:data];
}
- (void)sendRandomNumber {
//ourRandom = arc4random()%100;
MessageRandomNumber message;
message.message.messageType = kMessageTypeRandomNumber;
message.randomNumber = ourRandom;
NSData *data = [NSData dataWithBytes:&message length:sizeof(MessageRandomNumber)];
[self sendData:data];
}
- (void)sendData:(NSData *)data {
NSError *error;
BOOL success = [[GCHelper sharedInstance].match sendDataToAllPlayers:data withDataMode:GKMatchSendDataReliable error:&error];
if (!success) {
NSLog(#"Error sending init packet");
[self matchEnded];
}
}
the following is the didreceivedatamethod:
- (void)match:(GKMatch *)match didReceiveData:(NSData *)data fromPlayer:(NSString *)playerID {
//Store away other player ID for later
if (otherPlayerID == nil) {
otherPlayerID = playerID;
}
Message *message = (Message *) [data bytes];
if (message->messageType == kMessageQN) {
NSLog(#"Received The Selected Question To Display");
debugLabel.text = #"received the selected q";
MessageRandomNumber * messageSelectedQuestion = (MessageRandomNumber *) [data bytes];
NSLog(#"The Selected Question is number: %ud",messageSelectedQuestion->SelectedQ);
randomSelectedQuestion = messageSelectedQuestion->SelectedQ;
[self displayTheSlectedQuestion];
} else if (message->messageType == kMessageTypeRandomNumber) {
MessageRandomNumber * messageInit = (MessageRandomNumber *) [data bytes];
NSLog(#"Received random number: %ud, ours %ud", messageInit->randomNumber, ourRandom);
bool tie = false;
if (messageInit->randomNumber == ourRandom) {
//NSLog(#"TIE!");
ourRandom = arc4random();
tie = true;
[self sendRandomNumber];
} else if (ourRandom > messageInit->randomNumber) {
NSLog(#"We are player 1");
isPlayer1 = YES;
//[self sendTheSelectedRandomQuestionWithQuestion];
} else {
NSLog(#"We are player 2");
isPlayer1 = NO;
}
if (!tie) {
receivedRandom = YES;
if (gameState == kGameStateWaitingForRandomNumber) {
[self setGameState:kGameStateWaitingForStart];
}
[self tryStartGame];
}
}
}
}
but for some mysterious reason every time I call the sendTheSelectedRandomQuestionWithQuestion when it is received it thinks that it is randomNumber and not SelectedQ? Can anyone help me please?
Ok, just figured out the problem. It should be:
typedef enum {
kMessageTypeRandomNumber = 0,
kMessageQN = 1,
kMessageTypeGameBegin,
kMessageTypeSelectAnswer1,
kMessageTypeSelectAnswer2,
kMessageTypeSelectAnswer3,
kMessageTypeGameOver
} MessageType;
Instead of:
typedef enum {
kMessageTypeRandomNumber = 0,
kMessageQN = 0,
kMessageTypeGameBegin,
kMessageTypeSelectAnswer1,
kMessageTypeSelectAnswer2,
kMessageTypeSelectAnswer3,
kMessageTypeGameOver
} MessageType;