CocosDenshion: why isPlaying always false (I can hear the music)? - ios

I have this:
-(ALuint)ID
{ return self.soundSource->_sourceId; }
-(void)play
{
[self.engine playSound:self.soundSource.soundId
sourceGroupId:0
pitch:self.pitch
pan:1.0
gain:1.0
loop:NO];
}
-(NSTimeInterval)duration
{ return durationOfSourceId(self.ID); }
-(NSTimeInterval)offset
{ return elapsedTimeOfSourceId(self.ID); }
-(BOOL)isPlaying
{
NSTimeInterval secondsRemaining = self.duration - self.offset;
NSLog(#"<%.3f>", self.soundSource.durationInSeconds);
NSLog(#"<%.3f> - <%.3f> = <%.3f> isPlaying <%i>", self.duration, self.offset, secondsRemaining, self.soundSource.isPlaying);
return (secondsRemaining > 0.0);
}
#pragma mark - OpenAL addons
static NSTimeInterval elapsedTimeOfSourceId(ALuint sourceID)
{
float result = 0.0;
alGetSourcef(sourceID, AL_SEC_OFFSET, &result);
return result;
}
static NSTimeInterval durationOfSourceId(ALuint sourceID)
{
//Thanks to http://stackoverflow.com/a/8822347
ALint bufferID, bufferSize, frequency, bitsPerSample, channels;
alGetSourcei(sourceID, AL_BUFFER, &bufferID);
alGetBufferi(bufferID, AL_SIZE, &bufferSize);
alGetBufferi(bufferID, AL_FREQUENCY, &frequency);
alGetBufferi(bufferID, AL_CHANNELS, &channels);
alGetBufferi(bufferID, AL_BITS, &bitsPerSample);
NSTimeInterval result = ((double)bufferSize)/(frequency*channels*(bitsPerSample/8));
return result;
}
Where engine is just an instance of CDSoundEngine. I really want to know when will the music stop. I'm into it for a whole day now, and I'm tired.
It logs:
[1445:707] <1.656>
[1445:707] <1.656> - <0.000> = <1.656> isPlaying <0>
So the OpenAL source ID is right (since I can get the duration).
The CDSoundSource is also right (since I can get the duration from that as well).
I can hear the sound playing.
But AL_SEC_OFFSET is always 0.0, isPlaying is always NO.

Why dont you get the state of the source and check if it is really being played?:
alGetSourcei(source, AL_SOURCE_STATE, &state);
return (state == AL_PLAYING);

Related

EA Opens more orders than expected in MQL4

Everything seems fine. But EA usually opens multiple trades in the same second... The way I built it is very linear and i can't seem to spot the logical mistake. It is basically a random martingale EA to test stuff out. Any indicator (that's why I called it random, haven't decided myself) can be put in there.
Basic idea is that it has an upper and lower threshold which determines when it is in buy zone and when at sell zone. Once it is in either zone, if trend goes against it (determined by indicator's value, not symbol's price) it opens another trade with the same SL/TP of the initial order. Also it checks whether initial trade still runs so it does not open other ones and once the initial trade is open. After that the criterias about the rest of the trades (that go against the trade are different).
The problem is that it opens multiple trades at times that it shouldn't, or like 3-4 trades within the same second or two. Any idea why this happens?
#property copyright "Copyright 2020, MetaQuotes Software Corp."
#property link "https://www.mql5.com"
#property version "1.00"
#property strict
input int stepValue = 5;
input double lotsize = 0.01;
input int stoploss = 2000;
input int takeprofit = 140;
input int slippage = 10;
input double upper_border = 60.0;
input double lower_border = 40.0;
const string EAComment = "Xind";
string mode = "";
bool first_trade = false;
int InitTicket = 1;
double X = 0.0;
double X_Last = 0.0;
//+------------------------------------------------------------------+
//| Expert initialization function |
//+------------------------------------------------------------------+
int OnInit()
{
//---
first_trade = false;
//---
return(INIT_SUCCEEDED);
}
//+------------------------------------------------------------------+
//| Expert deinitialization function |
//+------------------------------------------------------------------+
void OnDeinit(const int reason)
{
//---
}
//+------------------------------------------------------------------+
//| Expert tick function |
//+------------------------------------------------------------------+
void OnTick()
{
//---
SearchSignal();
if (mode == "Buy")
{
if (first_trade == false)
{
Buy();
}
if (first_trade == true)
{
MartinCheck();
CloseCheck();
}
}
if (mode == "Sell")
{
if (first_trade == false)
{
Sell();
}
if (first_trade == true)
{
MartinCheck();
CloseCheck();
}
}
}
//+------------------------------------------------------------------+
void Buy()
{
X_Last = X;
first_trade = true;
InitTicket = OrderSend(NULL,OP_BUY,lotsize,Ask,slippage,Ask-stoploss*Point,Ask+takeprofit*Point,EAComment,1,0,clrDarkBlue);
}
//---
void Sell()
{
X_Last = X;
first_trade = true;
InitTicket = OrderSend(NULL,OP_SELL,lotsize,Bid,slippage,Bid+stoploss*Point,Bid-takeprofit*Point,EAComment,1,0,clrDarkRed);
}
//---
void MartinBuy()
{
if (OrderSelect(InitTicket, SELECT_BY_TICKET) == true)
{
double new_SL = OrderStopLoss();
double new_TP = OrderTakeProfit();
int dont_care = OrderSend(NULL,OP_BUY,lotsize,Ask,slippage,new_SL,new_TP,EAComment+" martin",1,0,clrDarkBlue);
}
}
//---
void MartinSell()
{
if (OrderSelect(InitTicket, SELECT_BY_TICKET) == true)
{
double new_SL = OrderStopLoss();
double new_TP = OrderTakeProfit();
int dont_care = OrderSend(NULL,OP_SELL,lotsize,Bid,slippage,new_SL,new_TP,EAComment+" martin",1,0,clrDarkRed);
}
}
//---
void SearchSignal()
{
X = 0.0; //where 0.0, put here the iCustom for external indicators, or some built-in indicator
if (X >= upper_border)
{
mode = "Sell";
}
else if (X <= lower_border)
{
mode = "Buy";
}
else
{
mode = "";
first_trade = false;
InitTicket = 1;
X_Last = 0.0;
}
}
//---
void CloseCheck()
{
if (OrderSelect(InitTicket, SELECT_BY_TICKET))
{
if (OrderCloseTime() == 0)
{
first_trade = true;
}
else if (OrderCloseTime() != 0)
{
first_trade = false;
}
else
{
return;
}
}
}
//---
void MartinCheck()
{
if (mode == "Buy")
{
if ((X_Last - stepValue) >= X)
{
X_Last = X;
MartinBuy();
}
}
if (mode == "Sell")
{
if ((X_Last + stepValue) <= X)
{
X_Last = X;
MartinSell();
}
}
}
The layout of your code makes it possible for several processes to happen in sequence all on the same tick which I assume you do not want. Try changing your code initially to this and work from there:
void OnTick()
{
SearchSignal();
if(mode=="Buy")
{
if(!first_trade) Buy();
else
{
MartinCheck();
CloseCheck();
}
}
else if(mode=="Sell")
{
if(!first_trade) Sell();
else
{
MartinCheck();
CloseCheck();
}
}
}
Remember to use if(...) else to stop executing all functions when it should only be an either/or situation.

AVPlayer addPeriodicTimeObserverForInterval:queue:usingBlock: doesn't stop

Description:
Our app is a music player app.
[AVPlayer addPeriodicTimeObserverForInterval:queue:usingBlock:]
We use this method to record the actual playing time of a user's song. Before it starts playing (and so does the single loop), we set the record time variable (playingSeconds) to 0 and keep +1 throughout the callback.
Normally, the method stops after a song is played.
But what we see in the background is that some users, a three-minute song, actually play for over 30,000 seconds (variable playingSeconds).
Question:
I just want to know what's causing the method to keep calling and not stop.
Or is there any other cause for this phenomenon?
Thanks!
#weakify(self);
self.playbackTimeObserver = [self.player addPeriodicTimeObserverForInterval:CMTimeMake(1.0, 1.0)
queue:NULL
usingBlock:^(CMTime time) {
#strongify(self);
if (!self.flagSeeking) {
double ct = self.playerItem.currentTime.value / self.playerItem.currentTime.timescale;
if (self.totalDuration == 0) {
self.totalDuration = self.playerItem.duration.value / self.playerItem.duration.timescale;
[self updateNowPlayingInfo:YES];
}
if (ct <= self.totalDuration && self.currentTime != ct && !self.isBuffering) {
self.currentTime = ct;
}
if (self.state == BPAudioPlayerStatePlaying && !self.isBuffering) {
if (ct <= 0) {
self.playingSeconds = 0;
} else {
self.playingSeconds += 1;
}
}
}

Strictly scheduled loop timing in Swift

What is the best way to schedule a repeated task with very strict timing (accurate and reliable enough for musical sequencing)? From the Apple docs, it is clear that NSTimer is not reliable in this sense (i.e., "A timer is not a real-time mechanism"). An approach that I borrowed from AudioKit's AKPlaygroundLoop seems consistent within about 4ms (if not quite accurate), and might be feasible:
class JHLoop: NSObject{
var trigger: Int {
return Int(60 * duration) // 60fps * t in seconds
}
var counter: Int = 0
var duration: Double = 1.0 // in seconds, but actual loop is ~1.017s
var displayLink: CADisplayLink?
weak var delegate: JHLoopDelegate?
init(dur: Double) {
duration = dur
}
func stopLoop() {
displayLink?.invalidate()
}
func startLoop() {
counter = 0
displayLink = CADisplayLink(target: self, selector: "update")
displayLink?.frameInterval = 1
displayLink?.addToRunLoop(NSRunLoop.currentRunLoop(), forMode: NSRunLoopCommonModes)
}
func update() {
if counter < trigger {
counter++
} else {
counter = 0
// execute loop here
NSLog("loop executed")
delegate!.loopBody()
}
}
}
protocol JHLoopDelegate: class {
func loopBody()
}
↑ Replaced code with the actual class I will try to use for the time being.
For reference, I am hoping to make a polyrhythmic drum sequencer, so consistency is most important. I will also need to be able to smoothly modify the loop, and ideally the looping period, in real time.
Is there a better way to do this?
You can try to use mach_wait_until() api. It’s pretty good for high precision timer. I changed apple example from here a little. It works fine in mine command line tool project. In below code snippet I changed main() method from my project to startLoop(). Also you can see this.
Hope it helps.
static const uint64_t NANOS_PER_USEC = 1000ULL;
static const uint64_t NANOS_PER_MILLISEC = 1000ULL * NANOS_PER_USEC;
static const uint64_t NANOS_PER_SEC = 1000ULL * NANOS_PER_MILLISEC;
static mach_timebase_info_data_t timebase_info;
static uint64_t nanos_to_abs(uint64_t nanos) {
return nanos * timebase_info.denom / timebase_info.numer;
}
func startLoop() {
while(true) { //
int64_t nanosec = waitSomeTime(1000); // each second
NSLog(#"%lld", nanosec);
update() // call needed update here
}
}
uint64_t waitSomeTime(int64_t eachMillisec) {
uint64_t start;
uint64_t end;
uint64_t elapsed;
uint64_t elapsedNano;
if ( timebase_info.denom == 0 ) {
(void) mach_timebase_info(&timebase_info);
}
// Start the clock.
start = mach_absolute_time();
mach_wait_until(start + nanos_to_abs(eachMillisec * NANOS_PER_MILLISEC));
// Stop the clock.
end = mach_absolute_time();
// Calculate the duration.
elapsed = end - start;
elapsedNano = elapsed * timebase_info.numer / timebase_info.denom;
return elapsedNano;
}

How do I use the AudioUnit to play the audio stream from server?

- (void)openPlayThreadWithRtmpURL:(NSString *)rtmpURL {
spx_int16_t *input_buffer;
do {
if (self.rtmpDelegate) {
[self.rtmpDelegate evenCallbackWithEvent:2000];
}
//init speex decoder and config;
speex_bits_init(&dbits);
dec_state = speex_decoder_init(&speex_wb_mode);
speex_decoder_ctl(dec_state, SPEEX_GET_FRAME_SIZE, &dec_frame_size);
input_buffer = malloc(dec_frame_size * sizeof(short));
NSLog(#"Init Speex decoder success frame_size = %d",dec_frame_size);
//init rtmp
pPlayRtmp = RTMP_Alloc();
RTMP_Init(pPlayRtmp);
NSLog(#"Play RTMP_Init %#\n", rtmpURL);
if (!RTMP_SetupURL(pPlayRtmp, (char*)[rtmpURL UTF8String])) {
NSLog(#"Play RTMP_SetupURL error\n");
if(self.rtmpDelegate) {
[self.rtmpDelegate evenCallbackWithEvent:2002];
}
break;
}
if (!RTMP_Connect(pPlayRtmp, NULL) || !RTMP_ConnectStream(pPlayRtmp, 0)) {
NSLog(#"Play RTMP_Connect or RTMP_ConnectStream error\n");
if(self.rtmpDelegate) {
[self.rtmpDelegate evenCallbackWithEvent:2002];
}
break;
}
if(self.rtmpDelegate) {
[self.rtmpDelegate evenCallbackWithEvent:2001];
}
NSLog(#"Player RTMP_Connected \n");
RTMPPacket rtmp_pakt = {0};
isStartPlay = YES;
while (isStartPlay && RTMP_ReadPacket(pPlayRtmp, &rtmp_pakt)) {
if (RTMPPacket_IsReady(&rtmp_pakt)) {
if (!rtmp_pakt.m_nBodySize) {
continue;
}
if (rtmp_pakt.m_packetType == RTMP_PACKET_TYPE_AUDIO) {
NSLog(#"Audio size = %d head = %d time = %d", rtmp_pakt.m_nBodySize, rtmp_pakt.m_body[0], rtmp_pakt.m_nTimeStamp);
speex_bits_read_from(&dbits, rtmp_pakt.m_body + 1, rtmp_pakt.m_nBodySize - 1);
speex_decode_int(dec_state, &dbits, input_buffer); //audioData in the input_buffer
//do something...
} else if (rtmp_pakt.m_packetType == RTMP_PACKET_TYPE_VIDEO) {
// 处理视频数据包
} else if (rtmp_pakt.m_packetType == RTMP_PACKET_TYPE_INVOKE) {
// 处理invoke包
NSLog(#"RTMP_PACKET_TYPE_INVOKE");
RTMP_ClientPacket(pPlayRtmp,&rtmp_pakt);
} else if (rtmp_pakt.m_packetType == RTMP_PACKET_TYPE_INFO) {
// 处理信息包
//NSLog(#"RTMP_PACKET_TYPE_INFO");
} else if (rtmp_pakt.m_packetType == RTMP_PACKET_TYPE_FLASH_VIDEO) {
// 其他数据
int index = 0;
while (1) {
int StreamType; //1-byte
int MediaSize; //3-byte
int TiMMER; //3-byte
int Reserve; //4-byte
char* MediaData; //MediaSize-byte
int TagLen; //4-byte
StreamType = rtmp_pakt.m_body[index];
index += 1;
MediaSize = bigThreeByteToInt(rtmp_pakt.m_body + index);
index += 3;
TiMMER = bigThreeByteToInt(rtmp_pakt.m_body + index);
index += 3;
Reserve = bigFourByteToInt(rtmp_pakt.m_body + index);
index += 4;
MediaData = rtmp_pakt.m_body + index;
index += MediaSize;
TagLen = bigFourByteToInt(rtmp_pakt.m_body + index);
index += 4;
//NSLog(#"bodySize:%d index:%d",rtmp_pakt.m_nBodySize,index);
//LOGI("StreamType:%d MediaSize:%d TiMMER:%d TagLen:%d\n", StreamType, MediaSize, TiMMER, TagLen);
if (StreamType == 0x08) {
//音频包
//int MediaSize = bigThreeByteToInt(rtmp_pakt.m_body+1);
// LOGI("FLASH audio size:%d head:%d time:%d\n", MediaSize, MediaData[0], TiMMER);
speex_bits_read_from(&dbits, MediaData + 1, MediaSize - 1);
speex_decode_int(dec_state, &dbits, input_buffer);
//[mAudioPlayer putAudioData:input_buffer];
// putAudioQueue(output_buffer,dec_frame_size);
} else if (StreamType == 0x09) {
//视频包
// LOGI( "video size:%d head:%d\n", MediaSize, MediaData[0]);
}
if (rtmp_pakt.m_nBodySize == index) {
break;
}
}
}
RTMPPacket_Free(&rtmp_pakt);
}
}
if (isStartPlay) {
if(self.rtmpDelegate) {
[self.rtmpDelegate evenCallbackWithEvent:2005];
}
isStartPlay = NO;
}
} while (0);
[mAudioPlayer stopPlay];
if (self.rtmpDelegate) {
[self.rtmpDelegate evenCallbackWithEvent:2004];
}
if (RTMP_IsConnected(pPlayRtmp)) {
RTMP_Close(pPlayRtmp);
}
RTMP_Free(pPlayRtmp);
free(input_buffer);
speex_bits_destroy(&dbits);
speex_decoder_destroy(dec_state);
}
This is my custom method. RtmpURL is a NSString'S object, it is a stream server address. Use this method, I can get the encoded of audio stream from the server, after that, I use speex decoder to decode the data that I got, just like this:
//init speex decoder and config;
speex_bits_init(&dbits);
dec_state = speex_decoder_init(&speex_wb_mode);
speex_decoder_ctl(dec_state, SPEEX_GET_FRAME_SIZE, &dec_frame_size);
input_buffer = malloc(dec_frame_size * sizeof(short));
NSLog(#"Init Speex decoder success frame_size = %d",dec_frame_size);
if (rtmp_pakt.m_packetType == RTMP_PACKET_TYPE_AUDIO) {
NSLog(#"Audio size = %d head = %d time = %d", rtmp_pakt.m_nBodySize, rtmp_pakt.m_body[0], rtmp_pakt.m_nTimeStamp);
speex_bits_read_from(&dbits, rtmp_pakt.m_body + 1, rtmp_pakt.m_nBodySize - 1);
speex_decode_int(dec_state, &dbits, input_buffer); //audioData in the input_buffer
//do something...
}
Now, decoded of audio data are stored in the input_buffer, and this is my confusion. How do I use the AudioUnit to play the audio data.And this is my playback callback function:
OSStatus playCallback(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData){
AudioPlayer *THIS = (__bridge AudioPlayer *)inRefCon;
//How do I use the AudioUnit to play the audio stream from server?
return noErr;
}
I hope some friends to solve my confusion, if you were used the audioUnit, Thank you so much!
In your playCallback, you need to copy the audio into the buffer ioData.
For example
memcpy (ioData->mBuffers[0].mData, input_buffer + offset, numBytes );
// increase offset based on how many frames it requests.
The input variable inNumberFrames is the number of frames that it is ready for. This might be less than the number of frames in input_buffer. So you need to keep track of your play position.
I do not know your audio format this specified in your audio stream basic description. You need to calculate how many bytes need copied considering mono/stereo, number of bytes per channel, and of course inNumberFrames.
There are some very good resources here link

How to write XCTestCase for asynchronized method?

I'm working on a unit test for one of my model which is using asynchronized call to my rest api.The method used to request my API is like this:
requestOnComplete:(void(^)())complete onError:(void(^)(NSString* errMsg))fail;
In my test case:
-(void)testMyApiCall
{
[myObj requestOnComplete:^{
XCTAssertTrue(YES,#"Success");
} onError:^(NSString *errorString) {
XCTFail(#"Failed.%#", errorString);
}];
}
As I expected, this test always pass because of the asynchronized call. Can anybody advise on this issue? Thanks.
You can use lib XCAsyncTestCase
It simple to do XCTestCas asynchronized method.
Ex as your test function here is your code:
-(void)testMyApiCall
{
[myObj requestOnComplete:^{
[self notify:XCTestAsyncTestCaseStatusSucceeded];
} onError:^(NSString *errorString) {
[self notify:XCTestAsyncTestCaseStatusFailed];
}];
[self waitForStatus:XCTestAsyncTestCaseStatusSucceeded timeout:10];
}
I use these helper functions
BOOL XLCRunloopRunUntil(CFTimeInterval timeout, BOOL (^condition)(void));
#define XLCAssertTrueBeforeTimeout(expr, timeout, format...) \
XCTAssertTrue( (XLCRunloopRunUntil(timeout, ^BOOL{ return expr; })) , ## format )
static inline void XLCRunloopRunOnce()
{
while (CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.001, YES) == kCFRunLoopRunHandledSource ||
CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0, YES) == kCFRunLoopRunHandledSource);
}
static inline void XLCRunloopRun(CFTimeInterval timeout)
{
CFRunLoopRunInMode(kCFRunLoopDefaultMode, timeout, NO);
XLCRunloopRunOnce();
}
BOOL XLCRunloopRunUntil(CFTimeInterval timeout, BOOL (^condition)(void)) {
static mach_timebase_info_data_t timebaseInfo;
if ( timebaseInfo.denom == 0 ) {
mach_timebase_info(&timebaseInfo);
}
uint64_t timeoutNano = timeout * 1e9;
uint64_t start = mach_absolute_time();
do {
CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.01, YES);
XLCRunloopRunOnce();
uint64_t end = mach_absolute_time();
uint64_t elapsed = end - start;
uint64_t elapseNano = elapsed * timebaseInfo.numer / timebaseInfo.denom;
if (elapseNano >= timeoutNano) {
return NO;
}
} while (!condition());
return YES;
}
example
-(void)testMyApiCall
{
__block BOOL done = NO;
[myObj requestOnComplete:^{
// XCTAssertTrue(YES,#"Success"); // this line is pointless
done = YES;
} onError:^(NSString *errorString) {
XCTFail(#"Failed.%#", errorString);
done = YES;
}];
XLCAssertTrueBeforeTimeout(done, 1, "should finish within 1 seconds");
}

Resources