I have a trouble with playing queue of local (saved on device) song. I'm using this cocoapod - https://github.com/tumtumtum/StreamingKit. My first item in queue starts to play one more time after its finished.
It has stopReason status NONE
func audioPlayer(_ audioPlayer: STKAudioPlayer, didFinishPlayingQueueItemId queueItemId: NSObject, with stopReason: STKAudioPlayerStopReason, andProgress progress: Double, andDuration duration: Double) {
if stopReason == .eof || stopReason == .pendingNext {
checkNextTrack()
}
if stopReason == .none {
print("NONE")
}
if stopReason == .error || stopReason == .userAction || stopReason == .disposed {
stop()
resetAudioPlayer()
}
}
Other elements has statuses .eof or .pendingNext and this is correct behaviour. What should I do in that case? All remote urls are playing correctly.
Thnx!
UPDATE:
Queue creating
internal func playWithQueue(queue: [Song], index: Int = 0) {
var audioListNew = [AudioItem]()
for (index, value) in queue.enumerated() {
let audioItem = AudioItem(audioItem: value, audioIndex: index)
audioListNew.append(audioItem)
}
guard index >= 0 && index < audioListNew.count else { return }
newQueue(queue: audioListNew, index: index)
}
func newQueue(queue: [AudioItem], index: Int = 0) {
self.queue = queue
audioPlayer.clearQueue()
if let currentSong = self.queue[index].audioItem {
play(file: currentSong)
for (songIndex, _) in queue.enumerated() {
audioPlayer.queue( (queue[Int((index + songIndex) % queue.count)].audioItem?.songRealUrl)! )
}
}
currentIndex = index
}
i have the same problem and i solve it!
You need to do smth like that:
-(void) audioPlayer:(STKAudioPlayer*)audioPlayer didFinishBufferingSourceWithQueueItemId:(NSObject*)queueItemId
{
SampleQueueId* queueId = (SampleQueueId*)queueItemId;
NSLog(#"Requeuing: %#", [queueId.url description]);
// [self.audioPlayer queueDataSource:[STKAudioPlayer dataSourceFromURL:queueId.url] withQueueItemId:[[SampleQueueId alloc] initWithUrl:queueId.url andCount:queueId.count+1]];
}
After that you need to write stop actions:
-(void) audioPlayer:(STKAudioPlayer*)audioPlayer didFinishPlayingQueueItemId:(NSObject*)queueItemId withReason:(STKAudioPlayerStopReason)stopReason andProgress:(double)progress andDuration:(double)duration
{
SampleQueueId* queueId = (SampleQueueId*)queueItemId;
NSLog(#"Finished: %#", [queueId.url description]);
if(stopReason == STKAudioPlayerStopReasonEof)
{
if(self.isMainAudioPlay)
{
[self.mainAudioPlayer setState:STKAudioPlayerStateStopped];
[self.mainAudioPlayer stop];
self.playMainAudioFrame.image = [UIImage imageNamed:PlayIconMain];
}
else
{
[self.audioPlayer setState:STKAudioPlayerStateStopped];
[self.audioPlayer stop];
if(self.comments.count > 0)
{
UITableView *tableView = self.tabelView;
RCommentsModel *newsStop = (self.comments)[self.indexpathForStop.row];
newsStop.commentPlayIcon = PlayIconMain;
[self.comments replaceObjectAtIndex:self.indexpathForStop.row withObject:newsStop];
[tableView beginUpdates];
[tableView reloadRowsAtIndexPaths:#[self.indexpathForStop] withRowAnimation:UITableViewRowAnimationNone];
[tableView endUpdates];
}
}
}
else if (stopReason == STKAudioPlayerStopReasonUserAction)
{
if(self.isMainAudioPlay)
{
[self.mainAudioPlayer setState:STKAudioPlayerStateStopped];
[self.mainAudioPlayer stop];
self.playMainAudioFrame.image = [UIImage imageNamed:PlayIconMain];
}
else
{
[self.audioPlayer setState:STKAudioPlayerStateStopped];
[self.audioPlayer stop];
if(self.comments.count > 0)
{
UITableView *tableView = self.tabelView;
RCommentsModel *newsStop = (self.comments)[self.indexpathForStop.row];
newsStop.commentPlayIcon = PlayIconMain;
[self.comments replaceObjectAtIndex:self.indexpathForStop.row withObject:newsStop];
[tableView beginUpdates];
[tableView reloadRowsAtIndexPaths:#[self.indexpathForStop] withRowAnimation:UITableViewRowAnimationNone];
[tableView endUpdates];
}
}
}
else if (stopReason == STKAudioPlayerStopReasonNone)
{
}
}
Hope it helps!:) I spend a lot of time to fixed it - and its worked!
Related
I am trying to do a slide show like animation on the home page of my app. An image appears in the proper UI View, but it never transitions to a different photo.
EDIT: Trying to set the image before transition view:
- (void)viewDidLoad {
[super viewDidLoad];
/*set image before transition */
_slideShow.image = [UIImage imageNamed:#"Slide Show"];
[self fetchSpecies];
[self beginSlideShow];
}
- (void) beginSlideShow{
NSLog(#"Called");
if([imageUrls_ count] < 1){
NSLog(#"EMPTY");
return;
}
[UIView transitionWithView:_slideShow duration:0.2 options: UIViewAnimationOptionTransitionCrossDissolve animations:^{
int index = arc4random() % [imageUrls_ count];
_slideShow.image = [self getImage:index];
// Account for the case where some images just aren't there
while (_slideShow.image == nil) {
index = arc4random() % [imageUrls_ count];
_slideShow.image = [self getImage:index];
}
} completion:^(BOOL finished) {
if (finished){
//logic
}
}];
}
- (void) fetchSpecies{
NSFetchRequest* request = [NSFetchRequest fetchRequestWithEntityName:# "Species"];
NSError* error = nil;
NSManagedObjectContext* context = [(LeafletAppDelegate*)[[UIApplication sharedApplication] delegate] managedObjectContext];
NSArray* species = [context executeFetchRequest:request error:&error];
if (!species || error) { // nil is an error
NSLog(#"error");
// handle error
}
// Get all the image urls and description to use for the random images
imageUrls_ = [[NSMutableArray arrayWithCapacity:100] retain];
imageDescs_ = [[NSMutableArray arrayWithCapacity:100] retain];
NSString *url;
NSString *name;
for (Species *s in species) {
name = [NSString stringWithFormat:#"%#", [s commonNameFirstLast]];
url = [s.ExampleImageLeaf pathForLocalImageUsingThumbnail:YES];
if (url) {
[imageUrls_ addObject:url];
[imageDescs_ addObject:[NSString stringWithFormat:#"%#", name]];
}
url = [s.ExampleImageFlower pathForLocalImageUsingThumbnail:YES];
if (url) {
[imageUrls_ addObject:url];
[imageDescs_ addObject:[NSString stringWithFormat:#"%#", name]];
}
url = [s.ExampleImageFruit pathForLocalImageUsingThumbnail:YES];
if (url) {
[imageUrls_ addObject:url];
[imageDescs_ addObject:[NSString stringWithFormat:#"%#", name]];
}
}
}
- (UIImage *) getImage:(NSUInteger)index
{
// NSLog(#"empty");
NSString *url = [imageUrls_ objectAtIndex:index];
return [UIImage imageWithContentsOfFile:url];
}
I checked and imageUrls has 660 elements, so its not an issue of not having enough photos. I would be very grateful for any insight/suggestions, I am very new to iOS.
I have taken two images for demo purpose, and i have changed the view's tag on swipe action to change the image based on tag value. The imageView transitions from one photo to another. Below the code is given where slideView is an UIImageView.
Swift3
Inside viewDidLoad method:
slideView.image = UIImage(named: "image1")
slideView.tag = 0
On Swipe action called the below method:
func beginSlideShow() {
UIView.transition(with: slideView, duration: 1.0, options: [.transitionCrossDissolve, .curveEaseOut], animations: {
if self.slideView.tag == 1 {
self.slideView.image = UIImage(named: "image2")
} else {
self.slideView.image = UIImage(named: "image1")
}
}, completion: { (finished: Bool) in
if finished {
NSLog("animation finished")
}
})
}
Objective-C
- (void) beginSlideShow {
[UIView transitionWithView:_slideView duration:1.0 options: UIViewAnimationOptionTransitionCrossDissolve animations:^{
if (_slideView.tag == 1) {
_slideView.image = [UIImage imageNamed:#"image2"];
} else {
_slideView.image = [UIImage imageNamed:#"image1"];
}
} completion:^(BOOL finished) {
if (finished) {
NSLog(#"animation finished");
}
}];
}
- (id)objectAtIndexSafe:(NSUInteger)index
{
if (index < [self count]) {
return [self objectAtIndexSafe:index];
}else {
TTSafeKitAssert(NO, #"unsafe");
return nil;
}
}
TTSafeKitAssert : send dumped threads info(use PlCashReporter) to sever,but when objectAtIndexSafe called in for in,TTSafeKitAssert will send so many times . How can i just send once in this situation!
If it is a category:
- (id)objectAtIndexSafe:(NSUInteger)index
{
if (index < [self count]) {
return [self objectAtIndex:index];
}else {
TTSafeKitAssert(NO, #"unsafe");
return nil;
}
}
I'm plotting Real Time ECG using CorePlot library.When it uses in iPad air the performance is okay.But when i tried it with iPad mini, There is a delay in the plotting.I have done with collapseLayer and this link also.that didn't solved my problem.Can anyone suggest new solution for this.
My code is below:
-(void)newData:(NSTimer *)theTimer
{
for (int i =0;i<plotcount;i++){
if([Qrrch0 count]>0 || [Qrrch1 count]>0 || [Qrrch2 count]>0 || [Qrrch3 count]>0 || [Qrrspo2 count]>0 ){
if(g1==1 && thePlot){
currentIndex ++;
}
if(g2==1 && thePlot1){
currentIndex1 ++;
}
if(g3==1 && thePlot2){
currentIndex2 ++;
}
if(g4==1 && thePlot3){
currentIndex3 ++;
}
if(spo2==1 && thePlot4){
currentIndex4 ++;
}
if(arrayIndex>=kchannel1-1)
{
arrayIndex=0;
}
if(arrayIndex1>=kchannel2-1)
{
arrayIndex1=0;
}
if(arrayIndex>=kchannel1-1)
{
arrayIndex=0;
}
if(arrayIndex2>=kchannel3-1)
{
arrayIndex2=0;
}
if(arrayIndex3>=kchannel4-1)
{
arrayIndex3=0;
}
if(arrayIndex4>=kchannel5-1)
{
arrayIndex4=0;
}
if(g1==1 && thePlot){
currentIndex5++;
if(currentIndex5>=kchannel1)
{
if(arrayIndex==0)
{
[thePlot reloadDataInIndexRange:NSMakeRange(arrayIndex, arrayIndex)];
}else{
[thePlot deleteDataInIndexRange:NSMakeRange(arrayIndex, 1)];
}
}
if([Qrrch0 count]!=0)
{
arrPlot[arrayIndex]=[[Qrrch0 objectAtIndex:0] integerValue];
lastPlot0=[Qrrch0 objectAtIndex:0];
}else{
arrPlot[arrayIndex]=[lastPlot0 integerValue];
}
arrayIndex++;
}
if(g2==1 && thePlot1){
currentIndex6++;
if(currentIndex6>=kchannel2)
{
if(arrayIndex1==0)
{
[thePlot1 reloadDataInIndexRange:NSMakeRange(arrayIndex1, arrayIndex1)];
}else{
[thePlot1 deleteDataInIndexRange:NSMakeRange(arrayIndex1, 1)];
}
}
if([Qrrch1 count]!=0)
{
arrPlot1[arrayIndex1]=[[Qrrch1 objectAtIndex:0] integerValue];
lastPlot1=[Qrrch1 objectAtIndex:0];
}else{
arrPlot1[arrayIndex1]=[lastPlot1 integerValue];
}
arrayIndex1++;
}
if(g3==1 && thePlot2){
currentIndex7++;
if(currentIndex7>=kchannel3)
{
if(arrayIndex2==0)
{
[thePlot2 reloadDataInIndexRange:NSMakeRange(arrayIndex2, arrayIndex2)];
}else{
[thePlot2 deleteDataInIndexRange:NSMakeRange(arrayIndex2, 1)];
}
}
if([Qrrch2 count]!=0)
{
arrPlot2[arrayIndex2]=[[Qrrch2 objectAtIndex:0] integerValue];
lastPlot2=[Qrrch2 objectAtIndex:0];
}else{
arrPlot2[arrayIndex2]=[lastPlot2 integerValue];
}
arrayIndex2++;
}
if(g4==1 && thePlot3){
currentIndex8++;
if(currentIndex8>=kchannel4)
{
if(arrayIndex3==0)
{
[thePlot3 reloadDataInIndexRange:NSMakeRange(arrayIndex3, arrayIndex3)];
}else{
[thePlot3 deleteDataInIndexRange:NSMakeRange(arrayIndex3, 1)];
}
}
if([Qrrch3 count]!=0)
{
arrPlot3[arrayIndex3]=[[Qrrch3 objectAtIndex:0] integerValue];
lastPlot3=[Qrrch3 objectAtIndex:0];
}else{
arrPlot3[arrayIndex3]=[lastPlot3 integerValue];
}
arrayIndex3++;
}
if(spo2==1 && thePlot4){
currentIndex9++;
if(currentIndex9>=kchannel5)
{
if(arrayIndex4==0)
{
[thePlot4 reloadDataInIndexRange:NSMakeRange(arrayIndex4, arrayIndex4)];
}else{
[thePlot4 deleteDataInIndexRange:NSMakeRange(arrayIndex4, 1)];
}
}
if([Qrrspo2 count]!=0)
{
arrPlot4[arrayIndex4]=[[Qrrspo2 objectAtIndex:0] integerValue];
lastPlot4=[Qrrspo2 objectAtIndex:0];
}else{
arrPlot4[arrayIndex4]=[lastPlot4 integerValue];
}
arrayIndex4++;
}
if(g1==1 && thePlot){
if([Qrrch0 count]!=0)
{
[Qrrch0 removeObjectAtIndex:0];
}
if(currentIndex>=kchannel1)
{
currentIndex=1;
}
[thePlot insertDataAtIndex:currentIndex-1 numberOfRecords:1];
}
if(g2==1 && thePlot1){
if([Qrrch1 count]!=0)
{
[Qrrch1 removeObjectAtIndex:0];
}
if(currentIndex1>=kchannel2)
{
currentIndex1=1;
}
[thePlot1 insertDataAtIndex:currentIndex1-1 numberOfRecords:1];
}
if(g3==1 && thePlot2){
if([Qrrch2 count]!=0)
{
[Qrrch2 removeObjectAtIndex:0];
}
if(currentIndex2>=kchannel3)
{
currentIndex2=1;
}
[thePlot2 insertDataAtIndex:currentIndex2-1 numberOfRecords:1];
}
if(g4==1 && thePlot3){
if([Qrrch3 count]!=0)
{
[Qrrch3 removeObjectAtIndex:0];
}
if(currentIndex3>=kchannel4)
{
currentIndex3=1;
}
[thePlot3 insertDataAtIndex:currentIndex3-1 numberOfRecords:1];
}
if(spo2==1 && thePlot4){
if([Qrrspo2 count]!=0)
{
[Qrrspo2 removeObjectAtIndex:0];
}
if(currentIndex4>=kchannel5)
{
currentIndex4=1;
}
[thePlot4 insertDataAtIndex:currentIndex4-1 numberOfRecords:1];
}
}
else
{
[self datacha];
}
}
}
-(void) datacha{
NSArray *array;
if([FinalArray count]>0){
if(g1==1 && thePlot){
array = [[NSArray alloc] initWithArray:[FinalArray objectAtIndex:0]];
[Qrrch0 addObjectsFromArray:array];
}
if(g1==1 && thePlot){
[FinalArray removeObjectAtIndex:0];
}
}
if([FinalArray1 count]>0){
if(g2==1 && thePlot1){
array = [[NSArray alloc] initWithArray:[FinalArray1 objectAtIndex:0]];
[Qrrch1 addObjectsFromArray:array];
}
if(g2==1 && thePlot1){
[FinalArray1 removeObjectAtIndex:0];
}
}
if([FinalArray2 count]>0){
if(g3==1 && thePlot2){
array = [[NSArray alloc] initWithArray:[FinalArray2 objectAtIndex:0]];
[Qrrch2 addObjectsFromArray:array];
}
if(g3==1 && thePlot2){
[FinalArray2 removeObjectAtIndex:0];
}
} if([FinalArray3 count]>0){
if(g4==1 && thePlot3){
array = [[NSArray alloc] initWithArray:[FinalArray3 objectAtIndex:0]];
[Qrrch3 addObjectsFromArray:array];
}
if(g4==1 && thePlot3){
[FinalArray3 removeObjectAtIndex:0];
}
}
if([FinalArray4 count]>0){
if(spo2==1 && thePlot4){
array = [[NSArray alloc] initWithArray:[FinalArray4 objectAtIndex:0]];
[Qrrspo2 addObjectsFromArray:array];
}
if(spo2==1 && thePlot4){
[FinalArray4 removeObjectAtIndex:0];
}
}
}
#pragma mark -
#pragma mark Plot Data Source Methods
-(NSUInteger)numberOfRecordsForPlot:(CPTPlot *)plot
{
if(plot == thePlot)
{
if(currentIndex5>=kchannel1)
{
return kchannel1;
}else{
return currentIndex5;
}
}
if(plot == thePlot1)
{
if(currentIndex6>=kchannel2)
{
return kchannel2;
}else{
return currentIndex6;
}
}
if(plot == thePlot2)
{
if(currentIndex7>=kchannel3)
{
return kchannel3;
}else{
return currentIndex7;
}
}
if(plot == thePlot3)
{
if(currentIndex8>=kchannel4)
{
return kchannel4;
}else{
return currentIndex8;
}
}
if(plot == thePlot4)
{
if(currentIndex9>=kchannel5)
{
return kchannel5;
}else{
return currentIndex9;
}
}
return 0;
}
-(double)doubleForPlot:(CPTPlot *)plot field:(NSUInteger)fieldEnum recordIndex:(NSUInteger)idx{
double num;
NSString *textval = [NSString stringWithFormat:#"%d", countofpktloss];
self.losscount.text = textval;
switch ( fieldEnum ) {
case CPTScatterPlotFieldX:
if(plot == thePlot){
num =currentIndex;
}
else if(plot == thePlot1)
{
num = currentIndex1;
}
else if(plot == thePlot2)
{
num = currentIndex2;
}
else if(plot == thePlot3)
{
num = currentIndex3;
}
else if(plot == thePlot4)
{
num = currentIndex4;
}
break;
case CPTScatterPlotFieldY:
if(plot == thePlot){
num =arrPlot[idx];
}
else
if(plot == thePlot1){
num =arrPlot1[idx];
}
else if(plot == thePlot2){
num =arrPlot2[idx];
}
else if(plot == thePlot3)
{
num =arrPlot3[idx];
}
else if(plot == thePlot4)
{
num =arrPlot4[idx];
}
break;
default:
break;
}
return num;
}
The collapsesLayers property is there to help save memory for a static graph. Using it on a graph that updates frequently makes the performance worse since it requires the graph to redraw everything, not just the part that changed, e.g., the plot.
Added comments
Make sure the timer stops when you expect it to. Otherwise, it will keep adding points to the plots.
How often does the timer fire to add points to the plots? There's no point in updating the plots more than 60 times per second and you may need to reduce that further on older devices like the iPad 2 to get good performance with lots of data points. You can add more than one point in each update if needed.
Check the ranges used to reload data points. You're passing the same number as the location and length of the range. I suspect you mean to use a length of one (1) for each range. If so, you're reloading more data than required on each pass.
Have you tried looking at other chart libraries? SciChart provides an iOS chart tailored for realtime ECG applications. It's extremely fast and doesn't have the performance problems of Core plot.
See iOS Chart Performance Comparison
and SciChart iOS ECG demo
Disclosure, I am the tech lead on the scichart projects
basically I'm using firebase to query a user's 'status' property and doing so in a do/while loop.
If the status property is free then i want to break the loop and continue with the rest of the method. If the status property is not free then I want to query firebase again for a new user until a free user is found.
My firebase code works fine outside the loop but doesn't seem to be called inside of it. Here is the loop:
__block uint32_t rando;
self.freedom = #"about to check";
do {
//check if free
[self checkIfFree:^(BOOL finished) {
if (finished) {
if ([self.freedom isEqualToString:#"free"]) {
//should break loop here
}
else if ([self.freedom isEqualToString:#"matched"]){
//get another user
do {
//picking another random user from array
rando = arc4random_uniform(arraycount);
}
while (rando == randomIndex && rando == [self.randString intValue]);
self.randString = [NSString stringWithFormat:#"%u", rando];
[users removeAllObjects];
[users addObject:[usersArray objectAtIndex:rando]];
self.freeUser = [users objectAtIndex:0];
//should repeat check here but doesn't work
}
else{
NSLog(#"error!");
}
}
else{
NSLog(#"not finished the checking yet");
}
}];
} while (![self.freedom isEqual: #"free"]);
And here's my firebase code:
-(void)checkIfFree:(myCompletion) compblock{
self.freeUserFB = [[Firebase alloc] initWithUrl:[NSString stringWithFormat: #"https://skipchat.firebaseio.com/users/%#", self.freeUser.objectId]];
[self.freeUserFB observeEventType:FEventTypeValue withBlock:^(FDataSnapshot *snapshot)
{
self.otherStatus = snapshot.value[#"status"];
NSLog(#"snapshot info %#", snapshot.value);
if ([self.otherStatus isEqualToString:#"free"]) {
self.userIsFree = YES;
self.freedom = #"free";
}
else{
self.userIsFree = NO;
self.freedom = #"matched";
}
compblock(YES);
}];
}
Thanks!
I am not sure I understand correctly your question.
If you want to run your completion code again till some condition is matched (in this case [self.freedom isEqualToString:#"free"]) you can do the following (removing the do - while):
void( ^ myResponseBlock)(BOOL finished) = ^ void(BOOL finished) {
if (finished) {
if ([self.freedom isEqualToString: #"free"]) {
return;
} else if ([self.freedom isEqualToString: #"matched"]) {
//get another user
do {
//picking another random user from array
rando = arc4random_uniform(arraycount);
}
while (rando == randomIndex && rando == [self.randString intValue]);
self.randString = [NSString stringWithFormat: #"%u", rando];
[users removeAllObjects];
[users addObject:usersArray[rando]];
self.freeUser = users.firstObject;
// Schedule another async check
[self checkIfFree: myResponseBlock];
} else {
NSLog(#"error!");
}
} else {
NSLog(#"not finished the checking yet");
}
};
[self checkIfFree: myResponseBlock];
I have converted the Apple RosyWriter example code to comply with ARC and modern objective C. I've been reading up on how people upload 5-10 second clips to a server with the captureOutput:didOutputSampleBuffer:fromConnection method, but I'm unsure what to do within... from my RosyWriter hybrid:
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
if (connection == _videoConnection) {
CMTime timeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
[self calculateFramerateAtTimestamp:timeStamp];
if (_videoDimensions.height == 0 && _videoDimensions.width == 0)
_videoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
if (_videoType == 0)
_videoType = CMFormatDescriptionGetMediaSubType(formatDescription);
}
CFRetain(sampleBuffer);
CFRetain(formatDescription);
dispatch_async(movieWritingQueue, ^{
if (_assetWriter) {
BOOL wasReadyToRecord = (_readyToRecordAudio && _readyToRecordVideo);
if (connection == _videoConnection) {
if (!_readyToRecordVideo)
_readyToRecordVideo = [self setupAssetWriterVideoInput:formatDescription];
if (_readyToRecordAudio && _readyToRecordVideo)
[self writeSampleBuffer:sampleBuffer ofType:AVMediaTypeVideo];
}else if (connection == _audioConnection) {
if (!_readyToRecordAudio)
_readyToRecordAudio = [self setupAssetWriterAudioInput:formatDescription];
if (_readyToRecordVideo && _readyToRecordAudio)
[self writeSampleBuffer:sampleBuffer ofType:AVMediaTypeAudio];
}
BOOL isReadyToRecord = (_readyToRecordAudio && _readyToRecordVideo);
if (!wasReadyToRecord && isReadyToRecord) {
_recordingWillBeStarted = NO;
_recording = YES;
[_delegate recordingDidStart];
}
}
CFRelease(sampleBuffer);
CFRelease(formatDescription);
});
}
Which then writes the sample buffer like so:
-(void)writeSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(NSString*)mediaType {
if (_assetWriter.status == AVAssetWriterStatusUnknown) {
if ([_assetWriter startWriting]) {
[_assetWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
}else {
[self showError:[_assetWriter error] source:#"Write sample buffer"];
}
}
if (_assetWriter.status == AVAssetWriterStatusWriting) {
if (mediaType == AVMediaTypeVideo) {
if (_videoInput.readyForMoreMediaData) {
if (![_videoInput appendSampleBuffer:sampleBuffer]) {
[self showError:[_assetWriter error] source:#"set up video asset writer"];
}
}
}else if (mediaType == AVMediaTypeAudio) {
if (_audioInput.readyForMoreMediaData) {
if (![_audioInput appendSampleBuffer:sampleBuffer]) {
[self showError:[_assetWriter error] source:#"set up audio asset writer"];
}
}
}
}
}
Now my question is... Should i be creating and swapping assetWriter's in the captureOutput:didOutputSampleBuffer:fromConnection: or the writeSampleBuffer:ofType: method? From what I can see within ffmpeg-ios, it implements a class that has a custom writeSampleBuffer:ofType method on separate assetWriter classes, one of which is a segmented encoder every 5 seconds... but how do I make this upload to a server?