I've searched for days for a way to upload an IOS asset without creating a copy of the file in temp directory without luck. I got the code working with a temp copy but copying a video file that could be anywhere from 10MB to 4GB is not realistic.
The closest I have come to reading the asset in read-only mode is the code below. Per the apple documentation this should work - see the following links:
https://developer.apple.com/library/ios/documentation/Miscellaneous/Reference/EntitlementKeyReference/Chapters/EnablingAppSandbox.html
I have enabled these keys:
<key>com.apple.security.assets.movies.read-write</key>
<string>YES</string>
<key>com.apple.security.assets.music.read-write</key>
<string>YES</string>
<key>com.apple.security.assets.pictures.read-write</key>
<string>YES</string>
<key>com.apple.security.files.downloads.read-write</key>
<string>YES</string>
Here is the code:
// QueueController.h
#import <AVFoundation/AVFoundation.h>
#import <AWSS3.h>
#import <Foundation/Foundation.h>
#import <MobileCoreServices/MobileCoreServices.h>
#import "Reachability1.h"
#import "TransferType.h"
#import "TransferModel.h"
#import "Util.h"
#interface QueueController : NSObject<NSURLSessionDelegate>
#property(atomic, strong) NSURLSession* session;
#property(atomic, strong) NSNumber* sessionCount;
#property(atomic, strong) NSURLSessionConfiguration* configuration;
+ (QueueController*)sharedInstance;
- (void)transferMediaViaQueue:(MediaItem*)mediaItem
withTransferType:(TransferType)transferType;
#end
#implementation QueueController {
NSOperationQueue* copyQueue;
NSOperationQueue* transferQueue;
NSMutableArray* inProcessTransferArray;
NSMutableArray* pendingTransferArray;
bool isTransferring;
}
static QueueController* sharedInstance = nil;
// Get the shared instance and create it if necessary.
+ (QueueController*)sharedInstance {
#synchronized(self) {
if (sharedInstance == nil) {
sharedInstance = [[QueueController alloc] init];
}
}
return sharedInstance;
}
- (id)init {
if (self = [super init]) {
appDelegate =
(RootViewControllerAppDelegate*)[UIApplication sharedApplication]
.delegate;
copyQueue = [[NSOperationQueue alloc] init];
transferQueue = [[NSOperationQueue alloc] init];
transferQueue.maxConcurrentOperationCount = MAX_CONCURRENT_TRANSFERS;
inProcessTransferArray = [[NSMutableArray alloc] init];
pendingTransferArray = [[NSMutableArray alloc] init];
isTransferring = false;
if (self.session == nil) {
self.configuration = [NSURLSessionConfiguration
backgroundSessionConfigurationWithIdentifier:#"transferQueue"];
self.session = [NSURLSession sessionWithConfiguration:self.configuration
delegate:self
delegateQueue:transferQueue];
}
}
return self;
}
- (void)transferMediaViaQueue:(MediaItem*)mediaItem
withTransferType:(TransferType)transferType {
// Create a transfer model
NSUserDefaults* defaultUser = [NSUserDefaults standardUserDefaults];
NSString* user_id = [defaultUser valueForKey:#"UserId"];
TransferModel* transferModel = [[TransferModel alloc] init];
transferModel.mediaItem = mediaItem;
transferModel.transferType = transferType;
transferModel.s3Path = user_id;
transferModel.s3file_name = mediaItem.mediaName;
transferModel.assetURL =
[[mediaItem.mediaLocalAsset defaultRepresentation] url];
ALAssetRepresentation* mediaRep =
[mediaItem.mediaLocalAsset defaultRepresentation];
transferModel.content_type =
(__bridge_transfer NSString*)UTTypeCopyPreferredTagWithClass(
(__bridge CFStringRef)[mediaRep UTI], kUTTagClassMIMEType);
#synchronized(pendingTransferArray) {
if ((!isTransferring) &&
(transferQueue.operationCount < MAX_CONCURRENT_TRANSFERS)) {
isTransferring = true;
if (transferModel.transferType == UPLOAD) {
/**
* Read ALAsset from NSURLRequestStream
*/
NSInvocationOperation* uploadOP = [[NSInvocationOperation alloc]
initWithTarget:self
selector:#selector(uploadMediaViaLocalPath:)
object:transferModel];
[transferQueue addOperation:uploadOP];
[inProcessTransferArray addObject:transferModel];
}
} else {
// Add to pending
[pendingTransferArray addObject:transferModel];
}
}
}
- (void)uploadMediaViaLocalPath:(TransferModel*)transferModel {
#try {
/**
* Fetch readable asset
*/
NSURL* assetURL =
[[transferModel.mediaItem.mediaLocalAsset defaultRepresentation] url];
NSData* fileToUpload = [[NSData alloc] initWithContentsOfURL:assetURL];
NSURLRequest* assetAsRequest =
[NSURLRequest requestWithURL:assetURL
cachePolicy:NSURLRequestUseProtocolCachePolicy
timeoutInterval:60.0];
/**
* Fetch signed URL
*/
AWSS3GetPreSignedURLRequest* getPreSignedURLRequest =
[AWSS3GetPreSignedURLRequest new];
getPreSignedURLRequest.bucket = BUCKET_NAME;
NSString* s3Key = [NSString stringWithFormat:#"%#/%#", transferModel.s3Path, transferModel.s3file_name];
getPreSignedURLRequest.key = s3Key;
getPreSignedURLRequest.HTTPMethod = AWSHTTPMethodPUT;
getPreSignedURLRequest.expires = [NSDate dateWithTimeIntervalSinceNow:3600];
// Important: must set contentType for PUT request
// getPreSignedURLRequest.contentType = transferModel.mediaItem.mimeType;
getPreSignedURLRequest.contentType = transferModel.content_type;
NSLog(#"mimeType: %#", transferModel.content_type);
/**
* Upload the file
*/
[[[AWSS3PreSignedURLBuilder defaultS3PreSignedURLBuilder]
getPreSignedURL:getPreSignedURLRequest]
continueWithBlock:^id(BFTask* task) {
NSURLSessionUploadTask* uploadTask;
transferModel.sessionTask = uploadTask;
if (task.error) {
NSLog(#"Error: %#", task.error);
} else {
NSURL* presignedURL = task.result;
NSLog(#"upload presignedURL is: \n%#", presignedURL);
NSMutableURLRequest* request =
[NSMutableURLRequest requestWithURL:presignedURL];
request.cachePolicy = NSURLRequestReloadIgnoringLocalCacheData;
[request setHTTPMethod:#"PUT"];
[request setValue:transferModel.content_type
forHTTPHeaderField:#"Content-Type"];
uploadTask =
[self.session uploadTaskWithStreamedRequest:assetAsRequest];
[uploadTask resume];
}
return nil;
}];
} #catch (NSException* exception) {
NSLog(#"exception: %#", exception);
} #finally {
}
}
- (void)URLSession:(NSURLSession*)session
task:(NSURLSessionTask*)task
didSendBodyData:(int64_t)bytesSent
totalBytesSent:(int64_t)totalBytesSent
totalBytesExpectedToSend:(int64_t)totalBytesExpectedToSend {
// Calculate progress
double progress = (double)totalBytesSent / (double)totalBytesExpectedToSend;
NSLog(#"UploadTask progress: %lf", progress);
}
- (void)URLSession:(NSURLSession*)session
task:(NSURLSessionTask*)task
didCompleteWithError:(NSError*)error {
NSLog(#"(void)URLSession:session task:(NSURLSessionTask*)task "
#"didCompleteWithError:error called...%#",
error);
}
- (void)URLSessionDidFinishEventsForBackgroundURLSession:
(NSURLSession*)session {
NSLog(#"URLSessionDidFinishEventsForBackgroundURLSession called...");
}
// NSURLSessionDataDelegate
- (void)URLSession:(NSURLSession*)session
dataTask:(NSURLSessionDataTask*)dataTask
didReceiveResponse:(NSURLResponse*)response
completionHandler:
(void (^)(NSURLSessionResponseDisposition disposition))completionHandler {
//completionHandler(NSURLSessionResponseAllow);
}
#end
But I'm receiving this error:
(void)URLSession:session task:(NSURLSessionTask*)task didCompleteWithError:error called...Error Domain=NSURLErrorDomain Code=-999 "cancelled" UserInfo=0x17166f840 {NSErrorFailingURLStringKey=assets-library://asset/asset.MOV?id=94F90EEB-BB6A-4E9D-B77E-CDD60173B60C&ext=MOV, NSLocalizedDescription=cancelled, NSErrorFailingURLKey=assets-library://asset/asset.MOV?id=94F90EEB-BB6A-4E9D-B77E-CDD60173B60C&ext=MOV}
userInfo: {
NSErrorFailingURLKey = "assets-library://asset/asset.MOV?id=94F90EEB-BB6A-4E9D-B77E-CDD60173B60C&ext=MOV";
NSErrorFailingURLStringKey = "assets-library://asset/asset.MOV?id=94F90EEB-BB6A-4E9D-B77E-CDD60173B60C&ext=MOV";
NSLocalizedDescription = cancelled;
}
Thanks in advance for your help.
Regards,
-J
A couple of comments regarding using NSURLSessionUploadTask:
If you implement didReceiveResponse, you must call the completionHandler.
If you call uploadTaskWithStreamedRequest, the documentation for the request parameter warns us that:
The body stream and body data in this request object are ignored, and NSURLSession calls its delegate’s URLSession:task:needNewBodyStream: method to provide the body data.
So you must implement needNewBodyStream if implementing a NSInputStream based request.
Be forewarned, but using a stream-base request like this creates a request with a "chunked" transfer encoding and not all servers can handle that.
At one point in the code, you appear to try to load the contents of the asset into a NSData. If you have assets that are that large, you cannot reasonably load that into a NSData object. Besides, that's inconsistent with using uploadTaskWithStreamedRequest.
You either need to create NSInputStream or upload it from a file.
You appear to be using the asset URL for the NSURLRequest. That URL should be the URL for your web service.
When using image picker, you have access to two URL keys: the media URL (a file:// URL for movies, but not pictures) and the assets library reference URL (an assets-library:// URL). If you're using the media URL, you can use that for uploading movies. But you cannot use the assets library reference URL for uploading purposes. You can only use that in conjunction with ALAssetsLibrary.
The ALAssetPropertyURL is a purely URL identifier for the asset i.e to identify assets and asset groups and I dont think you can use it directly to upload to a service.
You could use AVAssetExportSession to export the asset to temp url if other methods are teditious.
i.e
[AVAssetExportSession exportSessionWithAsset:[AVURLAsset URLAssetWithURL:assetURL options:nil] presetName:AVAssetExportPresetPassthrough];
Related
This is my first time that I am using Baidu API. I am having problem implementing Baidu places auto-complete API in my project. I am using the Baidu developers link to http://lbsyun.baidu.com/index.php?title=iossdk.
someone please give me to some tutorial in this regard?
i am following this tutorial. link
but in this tutorial i can not receive json file, give me a error
{ "Status": 102, "message": "MCODE parameter is not present, mobile
type mcode required parameter"}
Seems you should use the POI Search module of BaiduMapKit.Try this.
BMKCitySearchOption *citySearchOption = [[BMKCitySearchOption alloc]init];
citySearchOption.pageIndex = curPage;//here is the page index , you can set it to 0
citySearchOption.pageCapacity = 10;
citySearchOption.city= #"上海";//here is the city where you want to search the road
citySearchOption.keyword = #"淮海路";//here is the road name or someplace name you want to search
BOOL flag = [_poisearch poiSearchInCity:citySearchOption];
if(flag) {
_nextPageButton.enabled = true;
NSLog(#"success");
}
else {
_nextPageButton.enabled = false;
NSLog(#"fail");
}
Implement AutoComplete In Baidu Map using Baidu Web API
- (void)viewDidLoad {
BaseString = #"http://api.map.baidu.com/place/v2/suggestion?query=";
ak = #"56dIEtBAp1CU7u8ZMcq8DyUH2mVsn38x"; mcode = #"com.baidu.Baidu-Map-Demo";
regionkey = #"中国";
PathString = #"http://api.map.baidu.com/direction/v2/transit?origin=";
self .mapView .userTrackingMode = BMKUserTrackingModeFollow;
// 2. Set the map type self.mapView.mapType = BMKMapTypeStandard;
// 3. Set Agent self.mapView.delegate = self;
[super viewDidLoad];
mapView.frame = CGRectMake(0,0,self.view.frame.size.width,self.view.frame.size.height);
mapView.delegate = self; anotation = [[BMKPointAnnotation alloc]init];
destination = [[BMKPointAnnotation alloc]init];
PathUrl = [[NSURL alloc]init];
finalPathArray = [[NSMutableArray alloc]init];
session = [NSURLSession sessionWithConfiguration:[NSURLSessionConfiguration defaultSessionConfiguration]];
downloadURL = [[NSURL alloc]init];
path = [[BMKPolyline alloc]init];
flag = 0;
}
-(void)GetSuggestion: (NSString *)query {
NSString *stringUrl = [NSString stringWithFormat:#"%#%#&page_size=10&page_num=0&scope=1®ion=%#&output=json&ak=%#&mcode=%#",BaseString,query,regionkey,ak,mcode]; stringUrl = [stringUrl stringByAddingPercentEncodingWithAllowedCharacters:[NSCharacterSet URLFragmentAllowedCharacterSet]];
downloadURL = [NSURL URLWithString:stringUrl];
if (downloadURL != nil) {
if (DownloadTask != nil) {
[DownloadTask suspend];
}
DownloadTask = [session dataTaskWithURL:downloadURL completionHandler:^(NSData * _Nullable data, NSURLResponse * _Nullable response, NSError * _Nullable error) {
NSDictionary *AutocompleteData = [NSJSONSerialization JSONObjectWithData:data options:kNilOptions error:nil];
resultArray = AutocompleteData[#"result"];
tbl_result.hidden = NO;
[tbl_result reloadData];
}];
[DownloadTask resume];
}
}
MCODE parameter means your bundle id must spacify bundle id with urlFor example write url for autocomplete FOR Autocomplete use this function
I'm trying to use SFSpeechRecognizer but I don't have a way to test if I'm implementing it correctly, and since its a relatively new class i couldn't find a sample code (I don't know swift). Am I making any unforgivable mistakes/missing something ?
[SFSpeechRecognizer requestAuthorization:^(SFSpeechRecognizerAuthorizationStatus status){
if (status == SFSpeechRecognizerAuthorizationStatusAuthorized) {
SFSpeechRecognizer* recognizer = [[SFSpeechRecognizer alloc] init];
recognizer.delegate = self;
SFSpeechAudioBufferRecognitionRequest* request = [[SFSpeechAudioBufferRecognitionRequest alloc] init];
request.contextualStrings = #[#"data", #"bank", #"databank"];
SFSpeechRecognitionTask* task = [recognizer recognitionTaskWithRequest:request resultHandler:^(SFSpeechRecognitionResult* result, NSError* error){
SFTranscription* transcript = result.bestTranscription;
NSLog(#"%#", transcript);
}];
}
}];
I´m trying too but this code works for me, after all SFSpeechRecognizer and SFSpeechAudioBufferRecognitionRequest are not the same, so I think (haven´t tested) you have to ask for different permissions (have you asked for permissions before? to use the microphone and the speechRecognition?). Ok here´s the code:
//Available over iOS 10, only for maximum 1 minute, need internet connection; can be sourced from an audio recorded file or over the microphone
NSLocale *local =[[NSLocale alloc] initWithLocaleIdentifier:#"es-MX"];
speechRecognizer = [[SFSpeechRecognizer alloc] initWithLocale:local];
NSString *soundFilePath = [myDir stringByAppendingPathComponent:#"/sound.m4a"];
NSURL *url = [[NSURL alloc] initFileURLWithPath:soundFilePath];
if(!speechRecognizer.isAvailable)
NSLog(#"speechRecognizer is not available, maybe it has no internet connection");
SFSpeechURLRecognitionRequest *urlRequest = [[SFSpeechURLRecognitionRequest alloc] initWithURL:url];
urlRequest.shouldReportPartialResults = YES; // YES if animate writting
[speechRecognizer recognitionTaskWithRequest: urlRequest resultHandler: ^(SFSpeechRecognitionResult * _Nullable result, NSError * _Nullable error)
{
NSString *transcriptText = result.bestTranscription.formattedString;
if(!error)
{
NSLog(#"transcriptText");
}
}];
I am using NSURLSession to download xml files and then I want to do different processing to this files, like parsing them:
-(void)parseFeed:(NSURL *)url
{
NSMutableURLRequest* request = [NSMutableURLRequest requestWithURL:url];
NSURLSessionDataTask* task = [FeedSessionManager.sharedManager.session dataTaskWithRequest:request completionHandler:^(NSData* data, NSURLResponse* response, NSError* error)
{
Parser* parser = [[Parser alloc] initWithData:data];
[self.feeds addObjectsFromArray:[parser items]];
}];
[task resume];
}
Parser object will parse the xml file using NSXMLParser. The parseFeed:(NSURL*)url is called from the ViewController:
Downloader* downloader = [[Downloader alloc] init];
[downloader parseFeed:[NSURL URLWithString:#"http://www.engadget.com/tag/features/rss.xml"]];
NSArray* items = [downloader feeds];
And this is how I create the NSURLSession object:
-(id)init
{
if(self = [super init])
{
_session = [NSURLSession sessionWithConfiguration:FeedSessionConfiguration.defaultSessionConfiguration delegate:self delegateQueue:nil];
}
return self;
}
Of course this approach doesn't work for me. Inside parseFeed method I want to wait until all data is downloaded and processed. Only then I want to access the self.feeds array in the ViewController.
Can someone point me into the right direction into doing this ? Or maybe point me to a different approach ?
I have used ASIHTTPRequest but now no longer maintained but you can use AFHTTPClient's operation queue
AFHTTPClient *client = [[AFHTTPClient alloc] initWithBaseURL:nil];
// Important if only downloading one file at a time
[client.operationQueue setMaxConcurrentOperationCount: 1];
NSArray *videoURLs; // An array of strings you want to download
for (NSString * videoURL in videoURLs) {
// …setup your requests as before
[client enqueueHTTPRequestOperation:downloadRequest];
}
I have a lot of troubles porting my sqlite code to the App Store due to the way sqlite executes transactions (using wal or journal files). Relevant part from Apple Documentation is:
Your app needs to be able to open or save multiple related files with the same name and different extensions (for example, to automatically open a subtitle file with the same name as a movie file, or to allow for a SQLite journal file). To gain access to that secondary file, create a class that conforms to the NSFilePresenter protocol. This object should provide the main file’s URL as its primaryPresentedItemURL property, and should provide the secondary file’s URL as its presentedItemURL property. After the user opens the main file, your file presenter object should call the addFilePresenter: class method on the NSFileCoordinator class to register itself.
Apple DTS provides me the following code:
- (void)openSQLiteFileAtURL:(NSURL *)fileURL {
NSFileCoordinator *fc = [[NSFileCoordinator alloc] initWithFilePresenter:nil];
[fc coordinateReadingItemAtURL:fileURL options:0 error:NULL byAccessor:^(NSURL *newURL) {
sqlite3 *db = NULL;
char *zErrMsg = 0;
[SQLiteRelatedItemPresenter addPresentersForURL:newURL];
int rc = sqlite3_open_v2([[newURL path] fileSystemRepresentation], &db, SQLITE_OPEN_READWRITE | SQLITE_OPEN_CREATE, NULL);
NSLog(#"open %# = %d", [newURL path], rc);
rc = sqlite3_exec(db, "CREATE TABLE foo (col1 INTEGER);", callback, 0, &zErrMsg);
if( rc!=SQLITE_OK ){
NSLog(#"SQL error: %s\n", zErrMsg);
sqlite3_free(zErrMsg);
}
// more sqlite code here
sqlite3_close(db);
}];
return;
}
where SQLiteRelatedItemPresenter is:
#interface SQLiteRelatedItemPresenter : NSObject <NSFilePresenter>
{
NSURL *primaryPresentedItemURL;
NSURL *presentedItemURL;
}
static NSOperationQueue *_presentedItemOperationQueue;
#implementation SQLiteRelatedItemPresenter
+ (void) initialize {
[super initialize];
if (_presentedItemOperationQueue == nil) {
_presentedItemOperationQueue = [[NSOperationQueue alloc] init];
}
}
+ (void)addPresentersForURL:(NSURL *)databaseURL {
SQLiteRelatedItemPresenter *p1, *p2, *p3, *p4;
p1 =[[SQLiteRelatedItemPresenter alloc] initWithFileURL:databaseURL prefix:nil suffix:#"-wal"];
[NSFileCoordinator addFilePresenter:p1];
p2 = [[SQLiteRelatedItemPresenter alloc] initWithFileURL:databaseURL prefix:nil suffix:#"-shm"];
[NSFileCoordinator addFilePresenter:p2];
p3 = [[SQLiteRelatedItemPresenter alloc] initWithFileURL:databaseURL prefix:nil suffix:#"-journal"];
[NSFileCoordinator addFilePresenter:p3];
p4 = [[SQLiteRelatedItemPresenter alloc] initWithFileURL:databaseURL prefix:#"." suffix:#"-conch"];
[NSFileCoordinator addFilePresenter:p4];
// +filePresenters will only return once the asynchronously added file presenters are done being registered
[NSFileCoordinator filePresenters];
}
- initWithFileURL:(NSURL *)fileURL prefix:(NSString *)prefix suffix:(NSString *)suffix {
self = [super init];
if (self) {
primaryPresentedItemURL = fileURL;
NSString *path = [fileURL path];
if (prefix) {
NSString *name = [path lastPathComponent];
NSString *dir = [path stringByDeletingLastPathComponent];
path = [dir stringByAppendingPathComponent:[prefix stringByAppendingString:name]];
}
if (suffix) {
path = [path stringByAppendingString:suffix];
}
presentedItemURL = [NSURL fileURLWithPath:path];
}
return self;
}
- (NSURL *)presentedItemURL {
return presentedItemURL;
}
- (NSOperationQueue *)presentedItemOperationQueue {
return _presentedItemOperationQueue;
}
- (NSURL *)primaryPresentedItemURL {
return primaryPresentedItemURL;
}
This specific example works fine for sqlite operations all executed inside the openSQLiteFileAtURL method. I am having a lot of troubles if I try to divid logic into sub-methods, for example:
- openSQLiteFileAtURL:(NSURL *)databaseURL; // just open the db
- executeSQLStatement:(NSString *)sql; //perform read/write operations into the sqlite db previously opened in the openSQLiteFileAtURL method
- closeSQLite(); //close db method.
Seems like that addPresentersForURL should be called only once (in openSQLiteFileAtURL) but I wasn't able to have a working app due to sandbox privileges errors... any help?
I have written a custom NSURLProtocol (called "memory:") that allows me to fetch stored NSData items from a NSDictionary based on a name. For example, this code registers the NSURLProtocol class and adds some data:
[VPMemoryURLProtocol register];
[VPMemoryURLProtocol addData:data withName:#"video"];
This allows me to refer to the NSData via a url like "memory://video".
Below is my custom NSURLProtocol implementation:
NSMutableDictionary* gMemoryMap = nil;
#implementation VPMemoryURLProtocol
{
}
+ (void)register
{
static BOOL inited = NO;
if (!inited)
{
[NSURLProtocol registerClass:[VPMemoryURLProtocol class]];
inited = YES;
}
}
+ (void)addData:(NSData *)data withName:(NSString *)name
{
if (!gMemoryMap)
{
gMemoryMap = [NSMutableDictionary new];
}
gMemoryMap[name] = data;
}
+ (BOOL)canInitWithRequest:(NSURLRequest *)request
{
NSLog(#"URL: %#, Scheme: %#",
[[request URL] absoluteString],
[[request URL] scheme]);
NSString* theScheme = [[request URL] scheme];
return [theScheme caseInsensitiveCompare:#"memory"] == NSOrderedSame;
}
+ (NSURLRequest *)canonicalRequestForRequest:(NSURLRequest *)request
{
return request;
}
- (void)startLoading
{
NSString* name = [[self.request URL] path];
NSData* data = gMemoryMap[name];
NSURLResponse* response = [[NSURLResponse alloc] initWithURL:[self.request URL]
MIMEType:#"video/mp4"
expectedContentLength:-1
textEncodingName:nil];
id<NSURLProtocolClient> client = [self client];
[client URLProtocol:self didReceiveResponse:response
cacheStoragePolicy:NSURLCacheStorageNotAllowed];
[client URLProtocol:self didLoadData:data];
[client URLProtocolDidFinishLoading:self];
}
- (void)stopLoading
{
}
I am not sure whether this code works or not but that is not what I have a problem with. Despite registering the custom protocol, canInitWithRequest: is never called when I try to use the URL in this code:
NSURL* url = [NSURL URLWithString:#"memory://video"];
AVURLAsset* asset = [[AVURLAsset alloc] initWithURL:url options:nil];
AVAssetImageGenerator* imageGen = [AVAssetImageGenerator assetImageGeneratorWithAsset:asset];
CMTime time = CMTimeMakeWithSeconds(0, 600);
NSError* error;
CMTime actualTime;
CGImageRef image = [imageGen copyCGImageAtTime:time
actualTime:&actualTime
error:&error];
UIImage* uiImage = [UIImage imageWithCGImage:image];
CGImageRelease(image);
image is always nil if I use "memory://video" but works fine if I use "file:///...". What am I missing? Why isn't canInitWithRequest not being called? Does AVFoundation only support specific URL protocols and not custom ones?
Thanks
Certainly the underpinnings used to only support particular URL schemes— as an eBook developer I've seen this happen for any media type loaded through a URL such as epub:// or zip://. In those cases, on iOS 5.x and earlier, tracing through the relevant code would wind up in a QuickTime method which compared the URL scheme against a small number of supported ones: file, http, https, ftp and whatever it is that iTunes uses-- I forget what it's called.
In iOS 6+ there is a new API in AVFoundation, however, which is designed to help here. While I've not used it personally, this is how it should work:
NSURL* url = [NSURL URLWithString:#"memory://video"];
AVURLAsset* asset = [[AVURLAsset alloc] initWithURL:url options:nil];
////////////////////////////////////////////////////////////////
// NEW CODE START
AVAssetResourceLoader* loader = [asset resourceLoader];
id<AVAssetResourceLoaderDelegate> delegate = [SomeClass newInstanceWithNSURLProtocolClass: [VPMemoryURLProtocol class]];
[loader setDelegate: delegate queue: some_dispatch_queue];
// NEW CODE END
////////////////////////////////////////////////////////////////
AVAssetImageGenerator* imageGen = [AVAssetImageGenerator assetImageGeneratorWithAsset:asset];
CMTime time = CMTimeMakeWithSeconds(0, 600);
With this in place, you need only implement the AVAssetResourceLoader protocol somewhere, which is very simple as it contains only one method. Since you already have an NSURLProtocol implementation, all your real work is done and you can simply hand off the real work to the Cocoa loading system or your protocol class directly.
Again, I'll point out that I've yet to actually make use of this, so the above is entirely theoretical.