I have converted the Apple RosyWriter example code to comply with ARC and modern objective C. I've been reading up on how people upload 5-10 second clips to a server with the captureOutput:didOutputSampleBuffer:fromConnection method, but I'm unsure what to do within... from my RosyWriter hybrid:
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
if (connection == _videoConnection) {
CMTime timeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
[self calculateFramerateAtTimestamp:timeStamp];
if (_videoDimensions.height == 0 && _videoDimensions.width == 0)
_videoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
if (_videoType == 0)
_videoType = CMFormatDescriptionGetMediaSubType(formatDescription);
}
CFRetain(sampleBuffer);
CFRetain(formatDescription);
dispatch_async(movieWritingQueue, ^{
if (_assetWriter) {
BOOL wasReadyToRecord = (_readyToRecordAudio && _readyToRecordVideo);
if (connection == _videoConnection) {
if (!_readyToRecordVideo)
_readyToRecordVideo = [self setupAssetWriterVideoInput:formatDescription];
if (_readyToRecordAudio && _readyToRecordVideo)
[self writeSampleBuffer:sampleBuffer ofType:AVMediaTypeVideo];
}else if (connection == _audioConnection) {
if (!_readyToRecordAudio)
_readyToRecordAudio = [self setupAssetWriterAudioInput:formatDescription];
if (_readyToRecordVideo && _readyToRecordAudio)
[self writeSampleBuffer:sampleBuffer ofType:AVMediaTypeAudio];
}
BOOL isReadyToRecord = (_readyToRecordAudio && _readyToRecordVideo);
if (!wasReadyToRecord && isReadyToRecord) {
_recordingWillBeStarted = NO;
_recording = YES;
[_delegate recordingDidStart];
}
}
CFRelease(sampleBuffer);
CFRelease(formatDescription);
});
}
Which then writes the sample buffer like so:
-(void)writeSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(NSString*)mediaType {
if (_assetWriter.status == AVAssetWriterStatusUnknown) {
if ([_assetWriter startWriting]) {
[_assetWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
}else {
[self showError:[_assetWriter error] source:#"Write sample buffer"];
}
}
if (_assetWriter.status == AVAssetWriterStatusWriting) {
if (mediaType == AVMediaTypeVideo) {
if (_videoInput.readyForMoreMediaData) {
if (![_videoInput appendSampleBuffer:sampleBuffer]) {
[self showError:[_assetWriter error] source:#"set up video asset writer"];
}
}
}else if (mediaType == AVMediaTypeAudio) {
if (_audioInput.readyForMoreMediaData) {
if (![_audioInput appendSampleBuffer:sampleBuffer]) {
[self showError:[_assetWriter error] source:#"set up audio asset writer"];
}
}
}
}
}
Now my question is... Should i be creating and swapping assetWriter's in the captureOutput:didOutputSampleBuffer:fromConnection: or the writeSampleBuffer:ofType: method? From what I can see within ffmpeg-ios, it implements a class that has a custom writeSampleBuffer:ofType method on separate assetWriter classes, one of which is a segmented encoder every 5 seconds... but how do I make this upload to a server?
Related
I'm trying to create a converter which will make a video out of set of images. Everything is at its place, AVFormatContext, AVCodecContext, AVCodec. I'm creating YUV AVFrame out of UIImage and send it to encoder by avcodec_send_frame() method. Everything goes fine until I'm trying to get AVPacket with method avcodec_receive_packet(). Every time it returns -53 which means - output is not available in the current state - user must try to send input. As I said, I'm sending an input before I'm trying to get something and sending is successful.
Here's my code:
Init ffmpeg entities:
- (BOOL)setupForConvert:(DummyFVPVideoFile *)videoFile outputPath:(NSString *)path
{
if (!videoFile) {
[self.delegate convertationFailed:#"VideoFile is nil!"];
return NO;
}
currentVideoFile = videoFile;
outputPath = path;
BOOL success = NO;
success = [self initFormatCtxAndCodecs:path];
if (!success) {
return NO;
}
success = [self addCameraStreams:videoFile];
if (!success) {
return NO;
}
success = [self openIOContext:path];
if (!success) {
return NO;
}
return YES;
}
- (BOOL)initFormatCtxAndCodecs:(NSString *)path
{
//AVOutputFormat *fmt = av_guess_format("mp4", NULL, NULL);
int ret = avformat_alloc_output_context2(&pFormatCtx, NULL, NULL, [path UTF8String]);
if (ret < 0) {
NSLog(#"Couldn't create output context");
return NO;
}
//encoder codec init
pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!pCodec) {
NSLog(#"Couldn't find a encoder codec!");
return NO;
}
pCodecCtx = avcodec_alloc_context3(pCodec);
if (!pCodecCtx) {
NSLog(#"Couldn't alloc encoder codec context!");
return NO;
}
pCodecCtx->codec_tag = AV_CODEC_ID_H264;
pCodecCtx->bit_rate = 400000;
pCodecCtx->width = currentVideoFile.size.width;
pCodecCtx->height = currentVideoFile.size.height;
pCodecCtx->time_base = (AVRational){1, (int)currentVideoFile.framerate};
pCodecCtx->framerate = (AVRational){(int)currentVideoFile.framerate, 1};
pCodecCtx->gop_size = 10;
pCodecCtx->max_b_frames = 1;
pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
NSLog(#"Couldn't open the encoder codec!");
return NO;
}
pPacket = av_packet_alloc();
return YES;
}
- (BOOL)addCameraStreams:(DummyFVPVideoFile *)videoFile
{
AVCodecParameters *params = avcodec_parameters_alloc();
if (!params) {
NSLog(#"Couldn't allocate codec parameters!");
return NO;
}
if (avcodec_parameters_from_context(params, pCodecCtx) < 0) {
NSLog(#"Couldn't copy parameters from context!");
return NO;
}
for (int i = 0; i < videoFile.idCameras.count - 1; i++)
{
NSString *path = [videoFile.url URLByAppendingPathComponent:videoFile.idCameras[i]].path;
AVStream *stream = avformat_new_stream(pFormatCtx, pCodec);
if (!stream) {
NSLog(#"Couldn't alloc stream!");
return NO;
}
if (avcodec_parameters_copy(stream->codecpar, params) < 0) {
NSLog(#"Couldn't copy parameters into stream!");
return NO;
}
stream->avg_frame_rate.num = videoFile.framerate;
stream->avg_frame_rate.den = 1;
stream->codecpar->codec_tag = 0; //some silly workaround
stream->index = i;
streams[path] = [[VideoStream alloc] initWithStream:stream];
}
return YES;
}
- (BOOL)openIOContext:(NSString *)path
{
AVIOContext *ioCtx = nil;
if (avio_open(&ioCtx, [path UTF8String], AVIO_FLAG_WRITE) < 0) {
return NO;
}
pFormatCtx->pb = ioCtx;
return YES;
}
And here's convertation process:
- (void)launchConvert:(DummyFVPVideoFile *)videoFile
{
BOOL convertInProgress = YES;
unsigned int frameCount = 1;
unsigned long pts = 0;
BOOL success = NO;
success = [self writeHeader];
if (!success) {
NSLog(#"Couldn't write header!");
return;
}
AVRational defaultTimeBase;
defaultTimeBase.num = 1;
defaultTimeBase.den = videoFile.framerate;
AVRational streamTimeBase = streams.allValues.firstObject.stream->time_base;
while (convertInProgress)
{
pts += av_rescale_q(1, defaultTimeBase, streamTimeBase);
for (NSString *path in streams.allKeys)
{
UIImage *img = [UIImage imageWithContentsOfFile:[NSString stringWithFormat:#"%#/%u.jpg", path, frameCount]];
AVPacket *pkt = [self getAVPacket:img withPts:pts];
if (!pkt->data) { continue; }
pkt->stream_index = streams[path].stream->index;
//check all settings of pkt
if (![self writePacket:pkt]) {
NSLog(#"Couldn't write packet!");
convertInProgress = NO;
break;
}
}
frameCount++;
}
success = [self writeTrailer];
if (!success) {
NSLog(#"Couldn't write trailer!");
return;
}
NSLog(#"Convertation finished!");
//delegate convertationFinished method
}
- (BOOL)writeHeader
{
if (avformat_write_header(pFormatCtx, NULL) < 0) {
return NO;
}
return YES;
}
- (BOOL)writePacket:(AVPacket *)pkt
{
if (av_interleaved_write_frame(pFormatCtx, pkt) != 0) {
return NO;
}
return YES;
}
- (BOOL)writeTrailer
{
if (av_write_trailer(pFormatCtx) != 0) {
return NO;
}
return YES;
}
/**
This method will create AVPacket out of UIImage.
#return AVPacket
*/
- (AVPacket *)getAVPacket:(UIImage *)img withPts:(unsigned long)pts
{
if (!img) {
NSLog(#"imgData is nil!");
return nil;
}
uint8_t *imgData = [self getPixelDataFromImage:img];
AVFrame *frame_yuv = av_frame_alloc();
if (!frame_yuv) {
NSLog(#"frame_yuv is nil!");
return nil;
}
frame_yuv->format = AV_PIX_FMT_YUV420P;
frame_yuv->width = (int)img.size.width;
frame_yuv->height = (int)img.size.height;
int ret = av_image_alloc(frame_yuv->data,
frame_yuv->linesize,
frame_yuv->width,
frame_yuv->height,
frame_yuv->format,
32);
if (ret < 0) {
NSLog(#"Couldn't alloc yuv frame!");
return nil;
}
struct SwsContext *sws_ctx = nil;
sws_ctx = sws_getContext((int)img.size.width, (int)img.size.height, AV_PIX_FMT_RGB24,
(int)img.size.width, (int)img.size.height, AV_PIX_FMT_YUV420P,
0, NULL, NULL, NULL);
const uint8_t *scaleData[1] = { imgData };
int inLineSize[1] = { 4 * img.size.width };
sws_scale(sws_ctx, scaleData, inLineSize, 0, (int)img.size.height, frame_yuv->data, frame_yuv->linesize);
frame_yuv->pict_type = AV_PICTURE_TYPE_I;
frame_yuv->pts = pCodecCtx->frame_number;
ret = avcodec_send_frame(pCodecCtx, frame_yuv); //every time everything is fine
if (ret != 0) {
NSLog(#"Couldn't send yuv frame!");
return nil;
}
av_init_packet(pPacket);
pPacket->dts = pPacket->pts = pts;
do {
ret = avcodec_receive_packet(pCodecCtx, pPacket); //every time -35 error
NSLog(#"ret = %d", ret);
if (ret == AVERROR_EOF) {
NSLog(#"AVERROR_EOF!");
} else if (ret == AVERROR(EAGAIN)) {
NSLog(#"AVERROR(EAGAIN)");
} else if (ret == AVERROR(EINVAL)) {
NSLog(#"AVERROR(EINVAL)");
}
if (ret != 0) {
NSLog(#"Couldn't receive packet!");
//return nil;
}
} while ( ret == 0 );
free(imgData);
av_packet_unref(pPacket);
av_packet_free(pPacket);
av_frame_unref(&frame_yuv);
av_frame_free(&frame_yuv);
//perform other clean up and test dat shit
return pPacket;
}
Any insights would be helpful. Thanks!
There may be two reasons.
According to on of the documents of FFmpeg you may need to feed more then one packet to avcodec_send_frame() to receive packet return successful.
I cannot confirm that you allocated enough sized buffer for pPacket. The functions av_packet_alloc() and av_init_packet() won't allocate any buffer but latter sets it to NULL instead. So allocation must be done after init. Somewhere you should allocate buffer either manually or with av_new_packet(pPacket, SIZE).
Hope that helps.
I have a trouble with playing queue of local (saved on device) song. I'm using this cocoapod - https://github.com/tumtumtum/StreamingKit. My first item in queue starts to play one more time after its finished.
It has stopReason status NONE
func audioPlayer(_ audioPlayer: STKAudioPlayer, didFinishPlayingQueueItemId queueItemId: NSObject, with stopReason: STKAudioPlayerStopReason, andProgress progress: Double, andDuration duration: Double) {
if stopReason == .eof || stopReason == .pendingNext {
checkNextTrack()
}
if stopReason == .none {
print("NONE")
}
if stopReason == .error || stopReason == .userAction || stopReason == .disposed {
stop()
resetAudioPlayer()
}
}
Other elements has statuses .eof or .pendingNext and this is correct behaviour. What should I do in that case? All remote urls are playing correctly.
Thnx!
UPDATE:
Queue creating
internal func playWithQueue(queue: [Song], index: Int = 0) {
var audioListNew = [AudioItem]()
for (index, value) in queue.enumerated() {
let audioItem = AudioItem(audioItem: value, audioIndex: index)
audioListNew.append(audioItem)
}
guard index >= 0 && index < audioListNew.count else { return }
newQueue(queue: audioListNew, index: index)
}
func newQueue(queue: [AudioItem], index: Int = 0) {
self.queue = queue
audioPlayer.clearQueue()
if let currentSong = self.queue[index].audioItem {
play(file: currentSong)
for (songIndex, _) in queue.enumerated() {
audioPlayer.queue( (queue[Int((index + songIndex) % queue.count)].audioItem?.songRealUrl)! )
}
}
currentIndex = index
}
i have the same problem and i solve it!
You need to do smth like that:
-(void) audioPlayer:(STKAudioPlayer*)audioPlayer didFinishBufferingSourceWithQueueItemId:(NSObject*)queueItemId
{
SampleQueueId* queueId = (SampleQueueId*)queueItemId;
NSLog(#"Requeuing: %#", [queueId.url description]);
// [self.audioPlayer queueDataSource:[STKAudioPlayer dataSourceFromURL:queueId.url] withQueueItemId:[[SampleQueueId alloc] initWithUrl:queueId.url andCount:queueId.count+1]];
}
After that you need to write stop actions:
-(void) audioPlayer:(STKAudioPlayer*)audioPlayer didFinishPlayingQueueItemId:(NSObject*)queueItemId withReason:(STKAudioPlayerStopReason)stopReason andProgress:(double)progress andDuration:(double)duration
{
SampleQueueId* queueId = (SampleQueueId*)queueItemId;
NSLog(#"Finished: %#", [queueId.url description]);
if(stopReason == STKAudioPlayerStopReasonEof)
{
if(self.isMainAudioPlay)
{
[self.mainAudioPlayer setState:STKAudioPlayerStateStopped];
[self.mainAudioPlayer stop];
self.playMainAudioFrame.image = [UIImage imageNamed:PlayIconMain];
}
else
{
[self.audioPlayer setState:STKAudioPlayerStateStopped];
[self.audioPlayer stop];
if(self.comments.count > 0)
{
UITableView *tableView = self.tabelView;
RCommentsModel *newsStop = (self.comments)[self.indexpathForStop.row];
newsStop.commentPlayIcon = PlayIconMain;
[self.comments replaceObjectAtIndex:self.indexpathForStop.row withObject:newsStop];
[tableView beginUpdates];
[tableView reloadRowsAtIndexPaths:#[self.indexpathForStop] withRowAnimation:UITableViewRowAnimationNone];
[tableView endUpdates];
}
}
}
else if (stopReason == STKAudioPlayerStopReasonUserAction)
{
if(self.isMainAudioPlay)
{
[self.mainAudioPlayer setState:STKAudioPlayerStateStopped];
[self.mainAudioPlayer stop];
self.playMainAudioFrame.image = [UIImage imageNamed:PlayIconMain];
}
else
{
[self.audioPlayer setState:STKAudioPlayerStateStopped];
[self.audioPlayer stop];
if(self.comments.count > 0)
{
UITableView *tableView = self.tabelView;
RCommentsModel *newsStop = (self.comments)[self.indexpathForStop.row];
newsStop.commentPlayIcon = PlayIconMain;
[self.comments replaceObjectAtIndex:self.indexpathForStop.row withObject:newsStop];
[tableView beginUpdates];
[tableView reloadRowsAtIndexPaths:#[self.indexpathForStop] withRowAnimation:UITableViewRowAnimationNone];
[tableView endUpdates];
}
}
}
else if (stopReason == STKAudioPlayerStopReasonNone)
{
}
}
Hope it helps!:) I spend a lot of time to fixed it - and its worked!
I used followed codes to encode a video with several local pictures. but the problem is I have 30 pictures, and only get 1 second video, is there any way to get the video with 30 seconds and 24 frame rate?
- (BOOL)encodeReadySamplesFromOutput:(AVAssetReaderOutput *)output toInput:(AVAssetWriterInput *)input
{
NSLog(#"Frame init m == %d",m);
while (input.isReadyForMoreMediaData)
{
CMSampleBufferRef sampleBuffer = [output copyNextSampleBuffer];
if (sampleBuffer)
{
BOOL handled = NO;
BOOL error = NO;
CMItemCount count;
CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, 0, nil, &count);
CMSampleTimingInfo *timingInfo = malloc(sizeof(CMSampleTimingInfo) * count);
CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, count, timingInfo, &count);
for (CMItemCount i = 0; i < count; i++)
{
timingInfo[i].decodeTimeStamp = kCMTimeInvalid;
timingInfo[i].presentationTimeStamp = CMTimeMake(m, 24);
// timingInfo[i].duration = CMTimeMake(1, 12);
}
CMSampleBufferRef completedSampleBuffer;
CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, sampleBuffer, count, timingInfo, &completedSampleBuffer);
free(timingInfo);
if (self.reader.status != AVAssetReaderStatusReading || self.writer.status != AVAssetWriterStatusWriting)
{
handled = YES;
error = YES;
}
if (!handled && self.videoOutput == output)
{
// update the video progress
++m;
NSLog(#"Frame m == %d",m);
lastSamplePresentationTime = CMSampleBufferGetPresentationTimeStamp(completedSampleBuffer);
CMTimeValue value = lastSamplePresentationTime.value;
CMTimeScale scale = lastSamplePresentationTime.timescale;
NSLog(#"Frame value == %lld", value);
NSLog(#"Frame scale == %d",scale);
self.progress = duration == 0 ? 1 : CMTimeGetSeconds(lastSamplePresentationTime) / duration;
if ([self.delegate respondsToSelector:#selector(exportSession:renderFrame:withPresentationTime:toBuffer:)])
{
CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(completedSampleBuffer);
CVPixelBufferRef renderBuffer = NULL;
CVPixelBufferPoolCreatePixelBuffer(NULL, self.videoPixelBufferAdaptor.pixelBufferPool, &renderBuffer);
[self.delegate exportSession:self renderFrame:pixelBuffer withPresentationTime:lastSamplePresentationTime toBuffer:renderBuffer];
if (![self.videoPixelBufferAdaptor appendPixelBuffer:renderBuffer withPresentationTime:lastSamplePresentationTime])
{
error = YES;
}
CVPixelBufferRelease(renderBuffer);
handled = YES;
}
}
if (!handled && ![input appendSampleBuffer:completedSampleBuffer])
{
error = YES;
}
CFRelease(sampleBuffer);
CFRelease(completedSampleBuffer);
if (error)
{
return NO;
}
}
else
{
[input markAsFinished];
return NO;
}
}
return YES;
}
Not unless you get a lot more pictures or repeat the ones you have.
In either case, you're going to have to calculate presentation time yourself, with something like CMTimeMake(m, 24), e.g.:
[self.videoPixelBufferAdaptor appendPixelBuffer:renderBuffer withPresentationTime:CMTimeMake(m, 24)];
If you dropped the 24fps requirement (why do you need that?) you could get a 30second video, of 30 images at 1fps by using CMTimeMake(m, 1) instead in appendPixelBuffer:withPresentationTime:.
I hava a Play audio class used AudioToolBox.framework ,AudioQueue.
I encountered a problem, every piece of audio data playback, the memory will be increased, after playback is complete, the memory will not be reduced. If the batch test, it will be added to the hundreds of megabytes of memory, I want to know what causes memory has been increased, the audio data on each pass of each object is released or other reasons.
Here is my playThread class code:
#interface PlayThread()
{
BOOL transferDataComplete; // if thers is no data transfer to playthread set transferDataComplete = yes;
NSMutableArray *receiveDataArray;// audio data array
BOOL isPlay;// if audioqueue start,isPlay = yes,
}
#end
#pragma mark class implementation
#implementation PlayThread
- (instancetype)init
{
if (self = [super init]) {
receiveDataArray = [[NSMutableArray alloc]init];
isPlay = NO;
transferDataComplete = false;
bufferOverCount = QUEUE_BUFFER_SIZE;
audioQueue = nil;
}
return self;
}
// audio queue callback function
static void BufferCallback(void *inUserData,AudioQueueRef inAQ,AudioQueueBufferRef buffer)
{
USCPlayThread* player=(__bridge USCPlayThread*)inUserData;
[player fillBuffer:inAQ queueBuffer:buffer];
}
// fill buffer
-(void)fillBuffer:(AudioQueueRef)queue queueBuffer:(AudioQueueBufferRef)buffer
{
while (true){
NSData *audioData = [self getAudioData];
if( transferDataComplete && audioData == nil) {
bufferOverCount --;
break;
}
else if(audioData != nil){
memcpy(buffer->mAudioData, [audioData bytes] , audioData.length);
buffer->mAudioDataByteSize = (UInt32)audioData.length;
AudioQueueEnqueueBuffer(queue, buffer, 0, NULL);
break;
}
else
break;
} // while
if(bufferOverCount == 0){
// stop audioqueue
[self stopAudioQueue];
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:#selector(playComplete)]) {
[self.delegate playComplete];
}
});
}
}
-(void)addPlayData:(NSData *)data
{
NSUInteger count = 0;
#synchronized(receiveDataArray){
[receiveDataArray addObject:data];
}
}
/**
* get data from receiveDataArray
*/
-(NSData*)getAudioData
{
NSData *headData = nil;
#synchronized(receiveDataArray){
if(receiveDataArray.count > 0){
headData = [receiveDataArray objectAtIndex:0];
[receiveDataArray removeObjectAtIndex:0];
}
}
return headData;
}
- (void)startPlay // start audioqueue to play audio data
{
[self reset];
[self open];
for(int i=0; i<QUEUE_BUFFER_SIZE; i++)
{
[self fillBuffer:audioQueue queueBuffer:audioQueueBuffers[i]];
}
// audioqueuestart
AudioQueueStart(audioQueue, NULL);
#synchronized(self){
isPlay = YES;
}
if ([self.delegate respondsToSelector:#selector(playBegin)]) {
[self.delegate playBegin];
}
}
-(void)createAudioQueue
{
if (audioQueue) {
return;
}
AudioQueueNewOutput(&audioDescription, BufferCallback, (__bridge void *)(self), nil, nil, 0, &audioQueue);
if(audioQueue){
for(int i=0;i<QUEUE_BUFFER_SIZE;i++){
AudioQueueAllocateBufferWithPacketDescriptions(audioQueue, EVERY_READ_LENGTH, 0, &audioQueueBuffers[i]);
}
}
}
-(void)stopAudioQueue
{
if(audioQueue == nil){
return;
}
#synchronized(self){
if(isPlay){
isPlay = NO;
}
}
AudioQueueStop(audioQueue, TRUE);
}
-(void)setAudioFormat
{
audioDescription.mSampleRate = 16000;
audioDescription.mFormatID = kAudioFormatLinearPCM;
audioDescription.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
audioDescription.mChannelsPerFrame = 1;
audioDescription.mFramesPerPacket = 1;
audioDescription.mBitsPerChannel = 16;
audioDescription.mBytesPerFrame = (audioDescription.mBitsPerChannel/8) * audioDescription.mChannelsPerFrame;
audioDescription.mBytesPerPacket = audioDescription.mBytesPerFrame ;
}
-(void)close
{
if (audioQueue) {
AudioQueueStop(audioQueue, true);
AudioQueueDispose(audioQueue, true);
audioQueue = nil;
isPlay = NO;
}
}
-(BOOL)open {
if([self isOpen]){
return YES;
}
[self close];
[self setAudioFormat];
[self createAudioQueue];
return YES;
}
-(BOOL)isOpen
{
return (audioQueue != nil);
}
- (void)reset
{
bufferOverCount = QUEUE_BUFFER_SIZE;
transferDataComplete = NO;
}
- (BOOL)isPlaying
{
return isPlay;
}
- (void)disposeQueue
{
if (audioQueue) {
AudioQueueDispose(audioQueue, YES);
}
audioQueue = nil;
}
- (void)dealloc
{
[self disposeQueue];
}
Here is ViewContrller.m :
- (void)viewDidLoad {
[super viewDidLoad];
PlayThread *playThread = [[PlayThread alloc]init];
playThread.delegate = self;
self.playThread = playThread;
for (int i = 0; i < 10; i++)
{ // create empth audio data to simulate
NSMutableData *data = [[NSMutableData alloc]initWithLength:10000];
[self.playThread addPlayData:data];
}
[self.playThread startPlay];
}
Here is PlayThread's delegate method:
// When the play completely,then play once again,memory will continue to increase
- (void)playComplete
{
dispatch_async(dispatch_get_main_queue(), ^{
for (int i = 0; i < 10; i++)
{
NSMutableData *data = [[NSMutableData alloc]initWithLength:10000];
[self.playThread addPlayData:data];
}
[self.playThread startPlay];
});
}
Why memory has continued to increase, how can promptly release memory?
AudioQueueNewOutput(&audioDescription, BufferCallback, (__bridge void *)(self), nil, nil, 0, &audioQueue);
here parameter can not be nil
Im trying to upload a file and im having a few problems. I have an input stream from a local path:
self.stmIn = [NSInputStream inputStreamWithFileAtPath:localPath];
The variable localPath has this URL:
/Users/JBG/Library/Application Support/iPhone Simulator/6.0/Applications/734F4DC6-8683-42BB-AB0D-A5553BC22C55/Documents/100046-003.jpg
I can open the stream without any problem. The problem comes when i try to read it:
bytesRead = [self.stmIn read:self.buffer maxLength:kSendBufferSize];
The result is -1. I dont understand the problem. Any idea?
Thanks and regards
EDIT: the code is based(taken) on the Apple's SimpleFTPSample but i implemented it as a NSObject class beside of UIViewController
EDIT: got this info using the streamError method: Error Domain=NSPOSIXErrorDomain Code=14 "The operation coudn't be completed. Bad address"
EDIT: I add the code:
- (uint8_t *)buffer {
return self->buffer;
}
- (BOOL)isSending {
return (self.stmOut != nil);
}
- (void)startSend {
BOOL success;
NSURL *url;
[[NetworkManager sharedInstance] didStartNetworkOperation];
assert([[NSFileManager defaultManager] fileExistsAtPath:localPath]);
assert(self.stmOut == nil); // don't tap send twice in a row!
assert(self.stmIn == nil); // ditto
url = [[NetworkManager sharedInstance] smartURLForString:ftpPath];
success = (url != nil);
if (success) {
url = CFBridgingRelease(CFURLCreateCopyAppendingPathComponent(NULL, (CFURLRef) url, (CFStringRef) [localPath lastPathComponent], false));
success = (url != nil);
}
if ( ! success) {
NSLog(#"Invalid URL");
} else {
self.stmIn = [NSInputStream inputStreamWithFileAtPath:localPath];
assert(self.stmIn != nil);
[self.stmIn open];
self.stmOut = CFBridgingRelease(CFWriteStreamCreateWithFTPURL(NULL, (CFURLRef) url));
assert(self.stmOut != nil);
success = [self.stmOut setProperty:ftpUser forKey:(id)kCFStreamPropertyFTPUserName];
assert(success);
success = [self.stmOut setProperty:ftpPwd forKey:(id)kCFStreamPropertyFTPPassword];
assert(success);
self.stmOut.delegate = self;
[self.stmOut scheduleInRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
[self.stmOut open];
}
}
- (void)stopSendWithStatus:(NSString *)statusString {
if (self.stmOut != nil) {
[self.stmOut removeFromRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
self.stmOut.delegate = nil;
[self.stmOut close];
self.stmOut = nil;
}
if (self.stmIn != nil) {
[self.stmIn close];
self.stmIn = nil;
}
if (statusString == nil) {
statusString = #"Archivo subido";
}
[[NetworkManager sharedInstance] didStopNetworkOperation];
}
- (void)stream:(NSStream *)aStream handleEvent:(NSStreamEvent)eventCode {
assert(aStream == self.stmOut);
switch (eventCode) {
case NSStreamEventOpenCompleted: {
NSLog(#"Opened connection");
} break;
case NSStreamEventHasBytesAvailable: {
assert(NO);
} break;
case NSStreamEventHasSpaceAvailable: {
NSLog(#"Sending");
if (self.bufferOffset == self.bufferLimit) {
NSInteger bytesRead;
bytesRead = [self.stmIn read:self.buffer maxLength:kSendBufferSize];
if (bytesRead == -1) {
[self stopSendWithStatus:#"File read error"];
} else if (bytesRead == 0) {
[self stopSendWithStatus:nil];
} else {
self.bufferOffset = 0;
self.bufferLimit = bytesRead;
}
}
if (self.bufferOffset != self.bufferLimit) {
NSInteger bytesWritten;
bytesWritten = [self.stmOut write:&self.buffer[self.bufferOffset] maxLength:self.bufferLimit - self.bufferOffset];
assert(bytesWritten != 0);
if (bytesWritten == -1) {
[self stopSendWithStatus:#"Network write error"];
} else {
self.bufferOffset += bytesWritten;
}
}
} break;
case NSStreamEventErrorOccurred: {
[self stopSendWithStatus:#"Stream open error"];
} break;
case NSStreamEventEndEncountered: {
// ignore
} break;
default: {
assert(NO);
} break;
}
}
Try this code.. Hope it will help you..
- (void)sendDidStart
{
// self.statusLabel.text = #"Sending";
[[NetworkManager sharedInstance] didStartNetworkOperation];
}
- (void)updateStatus:(NSString *)statusString
{
assert(statusString != nil);
//self.statusLabel.text = statusString;
}
- (void)sendDidStopWithStatus:(NSString *)statusString
{
if (statusString == nil) {
statusString = #"Put succeeded";
}
[[NetworkManager sharedInstance] didStopNetworkOperation];
}
#pragma mark * Core transfer code
// This is the code that actually does the networking.
// Because buffer is declared as an array, you have to use a custom getter.
// A synthesised getter doesn't compile.
- (uint8_t *)buffer
{
return self->_buffer;
}
- (BOOL)isSending
{
return (self.networkStream != nil);
}
- (void)startSend:(NSString *)filePath
{
BOOL success;
NSURL * url;
NSLog(#"localfilepath..:%#",localFilePath);
assert(localFilePath != nil);
assert([[NSFileManager defaultManager] fileExistsAtPath:localFilePath]);
assert( [localFilePath.pathExtension isEqual:#"png"] || [localFilePath.pathExtension isEqual:#"jpg"] );
assert(self.networkStream == nil); // don't tap send twice in a row!
assert(self.fileStream == nil); // ditto
// First get and check the URL.
url = [[NetworkManager sharedInstance] smartURLForString:#"ftp://yourFTPLink/"];
success = (url != nil);
if (success) {
// Add the last part of the file name to the end of the URL to form the final
// URL that we're going to put to.
// url = CFBridgingRelease(
// CFURLCreateCopyAppendingPathComponent(NULL, (__bridge CFURLRef) url, (__bridge CFStringRef) #"minkle.png" , false)
// );
url = CFBridgingRelease(
CFURLCreateCopyAppendingPathComponent(NULL, ( CFURLRef) url, ( CFStringRef) imageString , false)
);
success = (url != nil);
}
// If the URL is bogus, let the user know. Otherwise kick off the connection.
if ( ! success) {
// self.statusLabel.text = #"Invalid URL";
}
else
{
// Open a stream for the file we're going to send. We do not open this stream;
// NSURLConnection will do it for us.
self.fileStream = [NSInputStream inputStreamWithFileAtPath:localFilePath];
assert(self.fileStream != nil);
[self.fileStream open];
// Open a CFFTPStream for the URL.
self.networkStream = CFBridgingRelease(
CFWriteStreamCreateWithFTPURL(NULL, ( CFURLRef) url)
);
assert(self.networkStream != nil);
// if ([self.usernameText.text length] != 0) {
success = [self.networkStream setProperty:#"yourUserName" forKey:(id)kCFStreamPropertyFTPUserName];
assert(success);
success = [self.networkStream setProperty:#"yourPassword" forKey:(id)kCFStreamPropertyFTPPassword];
assert(success);
//}
self.networkStream.delegate = self;
[self.networkStream scheduleInRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
[self.networkStream open];
// Tell the UI we're sending.
[self sendDidStart];
}
}
- (void)stopSendWithStatus:(NSString *)statusString
{
if (self.networkStream != nil) {
[self.networkStream removeFromRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
self.networkStream.delegate = nil;
[self.networkStream close];
self.networkStream = nil;
}
if (self.fileStream != nil) {
[self.fileStream close];
self.fileStream = nil;
}
[self sendDidStopWithStatus:statusString];
}
- (void)stream:(NSStream *)aStream handleEvent:(NSStreamEvent)eventCode
// An NSStream delegate callback that's called when events happen on our
// network stream.
{
#pragma unused(aStream)
assert(aStream == self.networkStream);
switch (eventCode) {
case NSStreamEventOpenCompleted: {
[self updateStatus:#"Opened connection"];
} break;
case NSStreamEventHasBytesAvailable: {
assert(NO); // should never happen for the output stream
} break;
case NSStreamEventHasSpaceAvailable: {
[self updateStatus:#"Sending"];
// If we don't have any data buffered, go read the next chunk of data.
if (self.bufferOffset == self.bufferLimit) {
NSInteger bytesRead;
bytesRead = [self.fileStream read:self.buffer maxLength:jSendBufferSize];
if (bytesRead == -1) {
[self stopSendWithStatus:#"File read error"];
} else if (bytesRead == 0) {
[self stopSendWithStatus:nil];
} else {
self.bufferOffset = 0;
self.bufferLimit = bytesRead;
}
}
// If we're not out of data completely, send the next chunk.
if (self.bufferOffset != self.bufferLimit) {
NSInteger bytesWritten;
bytesWritten = [self.networkStream write:&self.buffer[self.bufferOffset] maxLength:self.bufferLimit - self.bufferOffset];
assert(bytesWritten != 0);
if (bytesWritten == -1) {
[self stopSendWithStatus:#"Network write error"];
} else {
self.bufferOffset += bytesWritten;
}
}
} break;
case NSStreamEventErrorOccurred: {
[self stopSendWithStatus:#"Stream open error"];
} break;
case NSStreamEventEndEncountered: {
// ignore
} break;
default: {
assert(NO);
} break;
}
}
Did you turn this string:
/Users/JBG/Library/Application Support/iPhone Simulator/6.0/Applications/734F4DC6-8683-42BB-AB0D-A5553BC22C55/Documents/100046-003.jpg
into a file URL?
You should be using something like fileURLWithPath: from the [NSURL](https://developer.apple.com/library/mac/#documentation/Cocoa/Reference/Foundation/Classes/NSURL_Class/Reference/Reference.html] docs to turn the string path into a proper URL.