Related
For a project I try do use the ESP8266 RTOS SDK.
First step I install the tools and the toolchain. The hello_world example and the other gpio example works fine. I try the softAP example and get a Guru Meditation Error: Core 0 panic'ed (StoreProhibited). Exception was unhandled Error. I figured out that the line 62 : .automode = WIFI_AUTH_WPA_WPA2_PSK not works. I tried WIFI_AUTH_WEP,WIFI_AUTH_WPA_PSK,WIFI_AUTH_WPA2_PSK but only with WIFI_AUTH_OPEN the softAP works. Anyone same behavior or some tips?
Console Trace:
ets Jan 8 2013,rst cause:1, boot mode:(3,6)
load 0x40100000, len 7040, room 16
tail 0
chksum 0xe5
load 0x3ffe8408, len 24, room 8
tail 0
chksum 0x6c
load 0x3ffe8420, len 3312, room 8
tail 8
chksum 0x75
csum 0x75
I (123) boot: ESP-IDF v3.4-rc 2nd stage bootloader
I (123) boot: compile time 19:41:32
I (207) qio_mode: Enabling default flash chip QIO
I (207) boot: SPI Speed : 40MHz
I (208) boot: SPI Mode : QOUT
I (212) boot: SPI Flash Size : 2MB
I (219) boot: Partition Table:
I (224) boot: ## Label Usage Type ST Offset Length
I (236) boot: 0 nvs WiFi data 01 02 00009000 00006000
I (247) boot: 1 phy_init RF data 01 01 0000f000 00001000
I (259) boot: 2 factory factory app 00 00 00010000 000f0000
I (271) boot: End of partition table
I (277) esp_image: segment 0: paddr=0x00010010 vaddr=0x40210010 size=0x52c80 (339072) map
I (406) esp_image: segment 1: paddr=0x00062c98 vaddr=0x40262c90 size=0x0f594 ( 62868) map
I (428) esp_image: segment 2: paddr=0x00072234 vaddr=0x3ffe8000 size=0x005fc ( 1532) load
I (429) esp_image: segment 3: paddr=0x00072838 vaddr=0x40100000 size=0x00080 ( 128) load
I (439) esp_image: segment 4: paddr=0x000728c0 vaddr=0x40100080 size=0x05560 ( 21856) load
I (460) boot: Loaded app from partition at offset 0x10000
I (481) wifi softAP: ESP_WIFI_MODE_AP
I (484) system_api: Base MAC address is not set, read default base MAC address from EFUSE
I (486) system_api: Base MAC address is not set, read default base MAC address from EFUSE
phy_version: 1163.0, 665d56c, Jun 24 2020, 10:00:08, RTOS new
I (557) phy_init: phy ver: 1163_0
I (567) wifi softAP: ----------------###------------
ESP_ERROR_CHECK failed: esp_err_t 0x2 (ERROR) at 0x4021f7cc
file: "softap_example_main.c" line 73
func: wifi_init_softap
expression: esp_wifi_set_config(ESP_IF_WIFI_AP, &wifi_config)
abort() was called at PC 0x4021f7cf on core 0
Guru Meditation Error: Core 0 panic'ed (StoreProhibited). Exception was unhandled.
Core 0 register dump:
PC : 0x40221c72 PS : 0x00000030 A0 : 0x40221c70 A1 : 0x3ffeb550
A2 : 0x00000000 A3 : 0xffffffdb A4 : 0x00000001 A5 : 0x00000001
A6 : 0x00000000 A7 : 0x4026663c A8 : 0x00000020 A9 : 0x00000000
A10 : 0x00000008 A11 : 0x00000000 A12 : 0x00000000 A13 : 0x00000000
A14 : 0x00000000 A15 : 0x00000000 SAR : 0x0000001e EXCCAUSE: 0x0000001d
Backtrace: 0x40221c72:0x3ffeb550 0x4021f7d2:0x3ffeb560 0x4022182e:0x3ffeb570 0x40221894:0x3ffeb630 0x402118ef:0x3ffeb640
Example Code from GitHub: (examples/wifi/getting_started/softAP/main/softap_example_main.c)
/* WiFi softAP Example
This example code is in the Public Domain (or CC0 licensed, at your option.)
Unless required by applicable law or agreed to in writing, this
software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
CONDITIONS OF ANY KIND, either express or implied.
*/
#include <string.h>
#include "freertos/FreeRTOS.h"
#include "freertos/task.h"
#include "esp_system.h"
#include "esp_wifi.h"
#include "esp_event.h"
#include "esp_log.h"
#include "nvs_flash.h"
#include "lwip/err.h"
#include "lwip/sys.h"
/* The examples use WiFi configuration that you can set via project configuration menu.
If you'd rather not, just change the below entries to strings with
the config you want - ie #define EXAMPLE_WIFI_SSID "mywifissid"
*/
#define EXAMPLE_ESP_WIFI_SSID CONFIG_ESP_WIFI_SSID
#define EXAMPLE_ESP_WIFI_PASS CONFIG_ESP_WIFI_PASSWORD
#define EXAMPLE_MAX_STA_CONN CONFIG_ESP_MAX_STA_CONN
static const char *TAG = "wifi softAP";
static void wifi_event_handler(void* arg, esp_event_base_t event_base,
int32_t event_id, void* event_data)
{
if (event_id == WIFI_EVENT_AP_STACONNECTED) {
wifi_event_ap_staconnected_t* event = (wifi_event_ap_staconnected_t*) event_data;
ESP_LOGI(TAG, "station "MACSTR" join, AID=%d",
MAC2STR(event->mac), event->aid);
} else if (event_id == WIFI_EVENT_AP_STADISCONNECTED) {
wifi_event_ap_stadisconnected_t* event = (wifi_event_ap_stadisconnected_t*) event_data;
ESP_LOGI(TAG, "station "MACSTR" leave, AID=%d",
MAC2STR(event->mac), event->aid);
}
}
void wifi_init_softap()
{
tcpip_adapter_init();
ESP_ERROR_CHECK(esp_event_loop_create_default());
wifi_init_config_t cfg = WIFI_INIT_CONFIG_DEFAULT();
ESP_ERROR_CHECK(esp_wifi_init(&cfg));
ESP_ERROR_CHECK(esp_event_handler_register(WIFI_EVENT, ESP_EVENT_ANY_ID, &wifi_event_handler, NULL));
wifi_config_t wifi_config = {
.ap = {
.ssid = EXAMPLE_ESP_WIFI_SSID,
.ssid_len = strlen(EXAMPLE_ESP_WIFI_SSID),
.password = EXAMPLE_ESP_WIFI_PASS,
.max_connection = EXAMPLE_MAX_STA_CONN,
.authmode = WIFI_AUTH_WPA_WPA2_PSK
},
};
if (strlen(EXAMPLE_ESP_WIFI_PASS) == 0) {
wifi_config.ap.authmode = WIFI_AUTH_OPEN;
}
ESP_ERROR_CHECK(esp_wifi_set_mode(WIFI_MODE_AP));
ESP_ERROR_CHECK(esp_wifi_set_config(ESP_IF_WIFI_AP, &wifi_config));
ESP_ERROR_CHECK(esp_wifi_start());
ESP_LOGI(TAG, "wifi_init_softap finished. SSID:%s password:%s",
EXAMPLE_ESP_WIFI_SSID, EXAMPLE_ESP_WIFI_PASS);
}
void app_main()
{
ESP_ERROR_CHECK(nvs_flash_init());
ESP_LOGI(TAG, "ESP_WIFI_MODE_AP");
wifi_init_softap();
}
You have to run idf.py menuconfig and set the SSID and password values.
Password´s under 8 characters gets a Guru-Meditation ERR if the
wifi_config.ap.authmode = WIFI_AUTH_WEP or,
wifi_config.ap.authmode = WIFI_AUTH_WPA_PSK or,
wifi_config.ap.authmode = WIFI_AUTH_WPA2_PSK . Other authmodes not yet testet.
To provid this Error, make sure your pw has more then 7 characters or/and set
the If-Condition after the wifi_config to:
if (strlen(EXAMPLE_ESP_WIFI_PASS) < 8) {
wifi_config.ap.authmode = WIFI_AUTH_OPEN;
ESP_LOGI(TAG," pw-lenght under 8 charcters. Set WIFI_AUTH_OPEN");
}
Your WiFi is not protectet but your µC not crashes.
I am having an issue playing MOV camera captured files from an iPhone. My FFMPEG implementation has no problem playing most file formats, this issue is exclusive only for camera captured MOV.
When trying to open the file, I can see in the logs that many requests are made, each requests decoding only one frame, before making a new request which results the video being buffered extremely slowly.
It takes roughly a minute to buffer about a few seconds of the video.
Another thing to mention is that the very same problematic file is played without an issue locally. The problem is when trying to decode while streaming.
I compiled my code on Xcode 11, iOS SDK 13, with cocoapods mobile-ffmpeg-https 4.2.
Here is a rough representation of my code, its pretty standard:
Here is how I open AVFormatContext:
AVFormatContext *context = avformat_alloc_context();
context->interrupt_callback.callback = FFMPEGFormatContextIOHandler_IO_CALLBACK;
context->interrupt_callback.opaque = (__bridge void *)(handler);
av_log_set_level(AV_LOG_TRACE);
int result = avformat_open_input(&context, [request.urlAsString UTF8String], NULL, NULL);
if (result != 0) {
if (context != NULL) {
avformat_free_context(context);
}
return nil;
}
result = avformat_find_stream_info(context, NULL);
if (result < 0) {
avformat_close_input(&context);
return nil;
}
Video decoder is opened like so, audio decoder is nearly identical
AVCodecParameters *params = context->streams[streamIndex]->codecpar;
AVCodec *codec = avcodec_find_decoder(params->codec_id);
if (codec == NULL) {
return NULL;
}
AVCodecContext *codecContext = avcodec_alloc_context3(codec);
if (codecContext == NULL) {
return NULL;
}
codecContext->thread_count = 6;
int result = avcodec_parameters_to_context(codecContext, params);
if (result < 0) {
avcodec_free_context(&codecContext);
return NULL;
}
result = avcodec_open2(codecContext, codec, NULL);
if (result < 0) {
avcodec_free_context(&codecContext);
return NULL;
}
I read the data from the server like so:
AVPacket packet;
int result = av_read_frame(formatContext, &avPacket);
if (result == 0) {
avcodec_send_packet(codecContext, &avPacket);
// .... decode ....
}
Logs after opening the decoders:
// [tls] Request is made here
// [tls] Request response headers are here
Probing mov,mp4,m4a,3gp,3g2,mj2 score:100 size:2048
Probing mp3 score:1 size:2048
[mov,mp4,m4a,3gp,3g2,mj2 # 0x115918e00] Format mov,mp4,m4a,3gp,3g2,mj2 probed with size=2048 and score=100
[mov,mp4,m4a,3gp,3g2,mj2 # 0x115918e00] type:'ftyp' parent:'root' sz: 20 8 23077123
[mov,mp4,m4a,3gp,3g2,mj2 # 0x115918e00] ISO: File Type Major Brand: qt
[mov,mp4,m4a,3gp,3g2,mj2 # 0x115918e00] type:'wide' parent:'root' sz: 8 28 23077123
[mov,mp4,m4a,3gp,3g2,mj2 # 0x115918e00] type:'mdat' parent:'root' sz: 23066642 36 23077123
// [tls] Request is made here
// [tls] Request response headers are here
[mov,mp4,m4a,3gp,3g2,mj2 # 0x115918e00] stream 0, sample 4, dts 133333
[mov,mp4,m4a,3gp,3g2,mj2 # 0x115918e00] stream 1, sample 48, dts 1114558
[mov,mp4,m4a,3gp,3g2,mj2 # 0x115918e00] stream 2, sample 1, dts 2666667
[h264 # 0x116080200] nal_unit_type: 1(Coded slice of a non-IDR picture), nal_ref_idc: 1
// [tls] Request is made here
// [tls] Request response headers are here
[mov,mp4,m4a,3gp,3g2,mj2 # 0x115918e00] stream 0, sample 4, dts 133333
[mov,mp4,m4a,3gp,3g2,mj2 # 0x115918e00] stream 1, sample 48, dts 1114558
[mov,mp4,m4a,3gp,3g2,mj2 # 0x115918e00] stream 2, sample 1, dts 2666667
[h264 # 0x116080200] nal_unit_type: 1(Coded slice of a non-IDR picture), nal_ref_idc: 1
// [tls] Request is made here
// [tls] Request response headers are here
// ...
These are some warnings I found in the log
[mov,mp4,m4a,3gp,3g2,mj2 # 0x11c030800] interrupted
[mov,mp4,m4a,3gp,3g2,mj2 # 0x11c030800] stream 0: start_time: 0.000 duration: 11.833
[mov,mp4,m4a,3gp,3g2,mj2 # 0x11c030800] stream 1: start_time: 0.000 duration: 11.832
[mov,mp4,m4a,3gp,3g2,mj2 # 0x11c030800] stream 2: start_time: 0.000 duration: 11.833
[mov,mp4,m4a,3gp,3g2,mj2 # 0x11c030800] stream 3: start_time: 0.000 duration: 11.833
[mov,mp4,m4a,3gp,3g2,mj2 # 0x11c030800] format: start_time: 0.000 duration: 11.833 bitrate=15601 kb/s
[mov,mp4,m4a,3gp,3g2,mj2 # 0x11c030800] Could not find codec parameters for stream 0 (Video: h264, 1 reference frame (avc1 / 0x31637661), none(bt709, left), 1920x1080, 1/1200, 15495 kb/s): unspecified pixel format
Consider increasing the value for the 'analyzeduration' and 'probesize' options
[mov,mp4,m4a,3gp,3g2,mj2 # 0x11c030800] After avformat_find_stream_info() pos: 23077123 bytes read:16293 seeks:1 frames:0
Also when calling avformat_open_input(...), 2 GET requests are made, before the returning.
Notice the "Probing mp3 score:1", that is not shown for other MOV files or any other files.
I have tried different versions of ffmpeg, I have tried messing around with the delays of the stream, I tried removing my custom interrupt callback, nothing have worked.
Code works fine with any other videos I have tested (mp4, mkv, avi).
Metadata of the test file:
Metadata:
major_brand : qt
minor_version : 0
compatible_brands: qt
creation_time : 2019-04-14T08:17:03.000000Z
com.apple.quicktime.make: Apple
com.apple.quicktime.model: iPhone 7
com.apple.quicktime.software: 12.2
com.apple.quicktime.creationdate: 2019-04-14T11:17:03+0300
Duration: 00:00:16.83, bitrate: N/A
Stream #0:0(und), 0, 1/600: Video: h264, 1 reference frame (avc1 / 0x31637661), none(bt709), 1920x1080 (0x0), 0/1, 15301 kb/s, 30 fps, 30 tbr, 600 tbn (default)
Metadata:
creation_time : 2019-04-14T08:17:03.000000Z
handler_name : Core Media Video
encoder : H.264
Stream #0:1(und), 0, 1/44100: Audio: aac (mp4a / 0x6134706D), 44100 Hz, mono, 100 kb/s (default)
Metadata:
creation_time : 2019-04-14T08:17:03.000000Z
handler_name : Core Media Audio
Stream #0:2(und), 0, 1/600: Data: none (mebx / 0x7862656D), 0/1, 0 kb/s (default)
Metadata:
creation_time : 2019-04-14T08:17:03.000000Z
handler_name : Core Media Metadata
Stream #0:3(und), 0, 1/600: Data: none (mebx / 0x7862656D), 0/1, 0 kb/s (default)
Metadata:
creation_time : 2019-04-14T08:17:03.000000Z
handler_name : Core Media Metadata
I found a fix for this (sorta of):
Set the io_open callback of the AVFormatContext to your own function, and then when that is called, after you call the default io_open you change the buffer size.
static int (*IO_OPEN_DEFAULT)(struct AVFormatContext *s, AVIOContext **pb, const char *url, int flags, AVDictionary **options);
int IO_OPEN_OVERRIDE(struct AVFormatContext *s, AVIOContext **pb, const char *url, int flags, AVDictionary **options) {
int result = IO_OPEN_DEFAULT(s, pb, url, flags, options);
pb[0]->buffer_size = 41239179;
return result;
}
This fixes the issue. The value you set it to is usually very large (20 to 40MB). You can get that value from the second network request bytes range when you are opening the format context (first network request is made with bytes range 0-* and then the second network request is made with bytes range XXXX-* where XXXX should be the buffer size).
The reason why this fixes the issue, is because by buffering all of that data, aviocontext no longer needs to make a new network request to get the audio data. The audio data is already buffered (or at least the first position is).
There might be a better way to fix this issue, it seems like apple MOV files separate their video and audio data with these massive chunks for some reason and that causes ffmpeg to make a million network requests, for each frame.
Okay, I'm clearly missing some important piece here. I'm trying to do low-latency audio across the network, and my fundamental frames are 10ms. I expected this to be no problem. My target phone is an iPhone X speakers--so my hardware sample rate should be locked to 48000Hz. I'm requesting 10ms which is a nice even divisor and should be 480, 960, 1920 or 3840 depending upon how you want to slice frames/samples/bytes.
Yet, for the life of me, I absolute cannot get iOS to do anything I regard as sane. I get 10.667ms buffer duration which is ludicrous--iOS is going out of it's way to give me buffer sizes that aren't integer multiples of the sampleRate. Even worse, the frame is sightly LONG which means that I have to absorb not one but two packets of latency in order to be able to fill that buffer. I can't get maximumFrameToRender to change at all, and the system is returning 0 as my sample rate even though it quite plainly is rendering at 48000Hz.
I'm clearly missing something important--what is it? Did I forget to disconnect/connect something in order to get a direct hardware mapping? (My format is 1 which pcmFormatFloat32--I would expect pcmFormatInt16 or pcmFormatInt32 for mapping directly to hardware so something in the OS is probably getting in the way) Pointers are appreciated and I'm happy to go read more. Or is AUAudioUnit simply half-baked and I need to go backward to older, more useful APIs? Or did I completely miss the plot and low-latency audio folks use a whole different set of audio management functions?
Thanks for the help--it's much appreciated.
Output from code:
2019-11-07 23:28:29.782786-0800 latencytest[3770:50382] Ready to receive user events
2019-11-07 23:28:34.727478-0800 latencytest[3770:50382] Start button pressed
2019-11-07 23:28:34.727745-0800 latencytest[3770:50382] Launching auxiliary thread
2019-11-07 23:28:34.729278-0800 latencytest[3770:50445] Thread main started
2019-11-07 23:28:35.006005-0800 latencytest[3770:50445] Sample rate: 0
2019-11-07 23:28:35.016935-0800 latencytest[3770:50445] Buffer duration: 0.010667
2019-11-07 23:28:35.016970-0800 latencytest[3770:50445] Number of output busses: 2
2019-11-07 23:28:35.016989-0800 latencytest[3770:50445] Max frames: 4096
2019-11-07 23:28:35.017010-0800 latencytest[3770:50445] Can perform output: 1
2019-11-07 23:28:35.017023-0800 latencytest[3770:50445] Output Enabled: 1
2019-11-07 23:28:35.017743-0800 latencytest[3770:50445] Bus channels: 2
2019-11-07 23:28:35.017864-0800 latencytest[3770:50445] Bus format: 1
2019-11-07 23:28:35.017962-0800 latencytest[3770:50445] Bus rate: 0
2019-11-07 23:28:35.018039-0800 latencytest[3770:50445] Sleeping 0
2019-11-07 23:28:35.018056-0800 latencytest[3770:50445] Buffer count: 2 4096
2019-11-07 23:28:36.023220-0800 latencytest[3770:50445] Sleeping 1
2019-11-07 23:28:36.023400-0800 latencytest[3770:50445] Buffer count: 190 389120
2019-11-07 23:28:37.028610-0800 latencytest[3770:50445] Sleeping 2
2019-11-07 23:28:37.028790-0800 latencytest[3770:50445] Buffer count: 378 774144
2019-11-07 23:28:38.033983-0800 latencytest[3770:50445] Sleeping 3
2019-11-07 23:28:38.034142-0800 latencytest[3770:50445] Buffer count: 566 1159168
2019-11-07 23:28:39.039333-0800 latencytest[3770:50445] Sleeping 4
2019-11-07 23:28:39.039534-0800 latencytest[3770:50445] Buffer count: 756 1548288
2019-11-07 23:28:40.041787-0800 latencytest[3770:50445] Sleeping 5
2019-11-07 23:28:40.041943-0800 latencytest[3770:50445] Buffer count: 944 1933312
2019-11-07 23:28:41.042878-0800 latencytest[3770:50445] Sleeping 6
2019-11-07 23:28:41.043037-0800 latencytest[3770:50445] Buffer count: 1132 2318336
2019-11-07 23:28:42.048219-0800 latencytest[3770:50445] Sleeping 7
2019-11-07 23:28:42.048375-0800 latencytest[3770:50445] Buffer count: 1320 2703360
2019-11-07 23:28:43.053613-0800 latencytest[3770:50445] Sleeping 8
2019-11-07 23:28:43.053771-0800 latencytest[3770:50445] Buffer count: 1508 3088384
2019-11-07 23:28:44.058961-0800 latencytest[3770:50445] Sleeping 9
2019-11-07 23:28:44.059119-0800 latencytest[3770:50445] Buffer count: 1696 3473408
Actual code:
import UIKit
import os.log
import Foundation
import AudioToolbox
import AVFoundation
class AuxiliaryWork: Thread {
let II_SAMPLE_RATE = 48000
var iiStopRequested: Int32 = 0; // Int32 is normally guaranteed to be atomic on most architectures
var iiBufferFillCount: Int32 = 0;
var iiBufferByteCount: Int32 = 0;
func requestStop() {
iiStopRequested = 1;
}
func myAVAudioSessionInterruptionNotificationHandler(notification: Notification ) -> Void {
os_log(OSLogType.info, "AVAudioSession Interrupted: %s", notification.debugDescription)
}
func myAudioUnitProvider(actionFlags: UnsafeMutablePointer<AudioUnitRenderActionFlags>, timestamp: UnsafePointer<AudioTimeStamp>,
frameCount: AUAudioFrameCount, inputBusNumber: Int, inputData: UnsafeMutablePointer<AudioBufferList>) -> AUAudioUnitStatus {
let ppInputData = UnsafeMutableAudioBufferListPointer(inputData)
let iiNumBuffers = ppInputData.count
if (iiNumBuffers > 0) {
assert(iiNumBuffers == 2)
for bbBuffer in ppInputData {
assert(Int(bbBuffer.mDataByteSize) == 2048) // FIXME: This should be 960 or 1920 ...
iiBufferFillCount += 1
iiBufferByteCount += Int32(bbBuffer.mDataByteSize)
memset(bbBuffer.mData, 0, Int(bbBuffer.mDataByteSize)) // Just send silence
}
} else {
os_log(OSLogType.error, "Zero buffers from system")
assert(iiNumBuffers != 0) // Force crash since os_log would cause an audio hiccup due to locks anyway
}
return noErr
}
override func main() {
os_log(OSLogType.info, "Thread main started")
#if os(iOS)
let kOutputUnitSubType = kAudioUnitSubType_RemoteIO
#else
let kOutputUnitSubType = kAudioUnitSubtype_HALOutput
#endif
let audioSession = AVAudioSession.sharedInstance() // FIXME: Causes the following message No Factory registered for id
try! audioSession.setCategory(AVAudioSession.Category.playback, options: [])
try! audioSession.setMode(AVAudioSession.Mode.measurement)
try! audioSession.setPreferredSampleRate(48000.0)
try! audioSession.setPreferredIOBufferDuration(0.010)
NotificationCenter.default.addObserver(
forName: AVAudioSession.interruptionNotification,
object: nil,
queue: nil,
using: myAVAudioSessionInterruptionNotificationHandler
)
let ioUnitDesc = AudioComponentDescription(
componentType: kAudioUnitType_Output,
componentSubType: kOutputUnitSubType,
componentManufacturer: kAudioUnitManufacturer_Apple,
componentFlags: 0,
componentFlagsMask: 0)
let auUnit = try! AUAudioUnit(componentDescription: ioUnitDesc,
options: AudioComponentInstantiationOptions())
auUnit.outputProvider = myAudioUnitProvider;
auUnit.maximumFramesToRender = 256
try! audioSession.setActive(true)
try! auUnit.allocateRenderResources() // Make sure audio unit has hardware resources--we could provide the buffers from the circular buffer if we want
try! auUnit.startHardware()
os_log(OSLogType.info, "Sample rate: %d", audioSession.sampleRate);
os_log(OSLogType.info, "Buffer duration: %f", audioSession.ioBufferDuration);
os_log(OSLogType.info, "Number of output busses: %d", auUnit.outputBusses.count);
os_log(OSLogType.info, "Max frames: %d", auUnit.maximumFramesToRender);
os_log(OSLogType.info, "Can perform output: %d", auUnit.canPerformOutput)
os_log(OSLogType.info, "Output Enabled: %d", auUnit.isOutputEnabled)
//os_log(OSLogType.info, "Audio Format: %p", audioFormat)
var bus0 = auUnit.outputBusses[0]
os_log(OSLogType.info, "Bus channels: %d", bus0.format.channelCount)
os_log(OSLogType.info, "Bus format: %d", bus0.format.commonFormat.rawValue)
os_log(OSLogType.info, "Bus rate: %d", bus0.format.sampleRate)
for ii in 0..<10 {
if (iiStopRequested != 0) {
os_log(OSLogType.info, "Manual stop requested");
break;
}
os_log(OSLogType.info, "Sleeping %d", ii);
os_log(OSLogType.info, "Buffer count: %d %d", iiBufferFillCount, iiBufferByteCount)
Thread.sleep(forTimeInterval: 1.0);
}
auUnit.stopHardware()
}
}
class FirstViewController: UIViewController {
var thrAuxiliaryWork: AuxiliaryWork? = nil;
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
}
#IBAction func startButtonPressed(_ sender: Any) {
os_log(OSLogType.error, "Start button pressed");
os_log(OSLogType.error, "Launching auxiliary thread");
thrAuxiliaryWork = AuxiliaryWork();
thrAuxiliaryWork?.start();
}
#IBAction func stopButtonPressed(_ sender: Any) {
os_log(OSLogType.error, "Stop button pressed");
os_log(OSLogType.error, "Manually stopping auxiliary thread");
thrAuxiliaryWork?.requestStop();
}
#IBAction func muteButtonPressed(_ sender: Any) {
os_log(OSLogType.error, "Mute button pressed");
}
#IBAction func unmuteButtonPressed(_ sender: Any) {
os_log(OSLogType.error, "Unmute button pressed");
}
}
You cannot beat iOS silicon hardware into submission by assuming the API will do it for you. You have to do your own buffering if you want to abstract the hardware.
For the very best (lowest) latencies, your software will have to (potentially dynamically) adapt to the actual hardware capabilities, which can vary from device to device, and mode to mode.
The hardware sample rate appears to be either 44.1ksps (older iOS devices), 48ksps (newer arm64 iOS devices), or an integer multiple thereof (and potentially other rates when plugging in non-AirPod Bluetooth headsets, or external ADCs). The actual hardware DMA (or equivalent) buffers seem to always be a power of 2 in size, potentially down to 64 samples on newest devices. However various iOS power saving modes will increase the buffer size (by powers of 2) up to 4k samples, especially on older iOS devices. If you request a sample rate other than the hardware rate, the OS might resample the buffers to a different size than a power of 2, and this size can change from Audio Unit callback to subsequent callback if the resampling ratio isn't an exact integer.
Audio Units are the lowest level accessible via public API on iOS devices. Everything else is built on top, and thus potentially incurs greater latencies. For instance, if you use the Audio Queue API with non-hardware buffer sizes, the OS will internally use power-of-2 audio buffers to access the hardware, and chop them up or fractionally concatenate them to return or fetch Audio Queue buffers of non-hardware sizes. Slower and jittery.
Far from being half-baked, for a long time the iOS API was the only API usable on mobile phones and tablets for live low-latency music performance. But by developing software matched to the hardware.
I would like to retrive the UID of MiFare cards. I'm using an iPhone X, Xcode 11 and iOS 13.
I'm aware this wasn't possible (specifically reading the UID) until iOS 13 according to this website: https://gototags.com/blog/apple-expands-nfc-on-iphone-in-ios-13/ and this guy: https://www.reddit.com/r/apple/comments/c0gzf0/clearing_up_misunderstandings_and/
The phones NFC reader is correctly detecting the card however the unique identifier is always returned as empty or nil. I can read the payload however and irrelvant to iOS but I can do this in Android (confirms the card isn't faulty or just odd)
Apple Sample Project: https://developer.apple.com/documentation/corenfc/building_an_nfc_tag-reader_app
func tagReaderSession(_ session: NFCTagReaderSession, didDetect tags: [NFCTag]) {
if case let NFCTag.miFare(tag) = tags.first! {
session.connect(to: tags.first!) { (error: Error?) in
let apdu = NFCISO7816APDU(instructionClass: 0, instructionCode: 0xB0, p1Parameter: 0, p2Parameter: 0, data: Data(), expectedResponseLength: 16)
tag.queryNDEFStatus(completionHandler: {(status: NFCNDEFStatus, e: Int, error: Error?) in
debugPrint("\(status) \(e) \(error)")
})
tag.sendMiFareISO7816Command(apdu) { (data, sw1, sw2, error) in
debugPrint(data)
debugPrint(error)
debugPrint(tag.identifier)
debugPrint(String(data: tag.identifier, encoding: .utf8))
}
}
}
}
I'm aware these sorts of hacks: CoreNFC not reading UID in iOS
But they are closed and only apply to iOS 11 for a short time in the past.
Ok I have an answer.
tag.identifier isn't empty -- per se -- if you examine from Xcodes debugger it appears empty (0x00 is the value!). It's type is Data and printing it will reveal the length of the Data but not how it's encoded. In this case it's a [UInt8] but stored as a bag of bits, I don't understand why Apple have done it this way -- it's clunky -- I'm sure they have good reasons. I would have stored it as a type String -- after all the whole point of a high level language like Swift is to abstract us away from such hadware implementation details.
The following code will retrive the UID from a MiFare card:
if case let NFCTag.miFare(tag) = tags.first! {
session.connect(to: tags.first!) { (error: Error?) in
let apdu = NFCISO7816APDU(instructionClass: 0, instructionCode: 0xB0, p1Parameter: 0, p2Parameter: 0, data: Data(), expectedResponseLength: 16)
tag.sendMiFareISO7816Command(apdu) { (apduData, sw1, sw2, error) in
let tagUIDData = tag.identifier
var byteData: [UInt8] = []
tagUIDData.withUnsafeBytes { byteData.append(contentsOf: $0) }
var uidString = ""
for byte in byteData {
let decimalNumber = String(byte, radix: 16)
if (Int(decimalNumber) ?? 0) < 10 { // add leading zero
uidString.append("0\(decimalNumber)")
} else {
uidString.append(decimalNumber)
}
}
debugPrint("\(byteData) converted to Tag UID: \(uidString)")
}
}
}
I know you have said that it returns nil but for clarity for future readers:
Assuming it is not a Felica tag, it should be on the identifier field when it is detected:
func tagReaderSession(_ session: NFCTagReaderSession, didDetect tags: [NFCTag]) {
if case let NFCTag.miFare(tag) = tags.first! {
print(tag.identifier as NSData)
}
}
But in your case, it's empty (see edit below). For most tags the APDU to get the UID of a tag is
0xff // Class
0xca // INS
0x00 // P1
0x00 // P2
0x00 // Le
so you could try using tag.sendMiFareCommand to send that command manually.
Edit: Response from OP, it wasn't empty but was unclear because printing Data in Swift doesn't show in console
In iOS13 I was able read the Tag.identifier value for various MIFARE family's DESfire and UltraLight tags same as #scott-condron's answer, but for various MIFARE Classic ICs (the unknown family member?) my Console shows different error types.
Perhaps private framework APIs similar to the iOS11 work-around in the hack you mentioned would be helpful in these cases, e.g. to intercept and amend the discovery polling routine, but I wouldn't know which ones or how to use them.
Below you can find some test results for MIFARE Classic 4K (emulation) tags, as also reported in this github thread and this MIFARE support thread. Following Table 6 of Application Note #10833, the Select Acknowledge (SAK) value of 0x38 of the emulation tags below translates into 0 0 1 1 1 0 0 0 for bits 8..1, i.e. bits 6, 5, and 4 are 1, and therefore these SAK values classify as Smart MX with CLASSIC 4K as per Figure 3 of Application Note #10834.
an Infineon Classic 4k Emulation successfully logs 1 tags found with the correct UID (31:9A:2F:88), ATQA (0x0200), SAK (detects 0x20, i.e. ISO 14443-4 protocol, and 0x18, i.e. MIFARE 4K, both part of the expected value: 0x38) and respective tag type (both Generic 4A and MiFare classified correctly), but then throws a Stack Error:
error 14:48:08.675369 +0200 nfcd 00000001 04e04390 -
[NFDriverWrapper connectTag:]:1436 Failed to connect to tag:
<NFTagInternal: 0x104e05cd0>-{length = 8, bytes = 0x7bad030077180efa}
{ Tech=A Type=Generic 4A ID={length = 4, bytes = 0x319a2f88}
SAK={length = 1, bytes = 0x20} ATQA={length = 2, bytes = 0x0200} historicalBytes={length = 0, bytes = 0x}}
:
error 14:48:08.682881 +0200 nfcd 00000001 04e04390 -
[NFDriverWrapper connectTag:]:1436 Failed to connect to tag:
<NFTagInternal: 0x104e1d600>-{length = 8, bytes = 0x81ad0300984374f3}
{ Tech=A Type=MiFare ID={length = 4, bytes = 0x319a2f88}
SAK={length = 1, bytes = 0x18} ATQA={length = 2, bytes = 0x0200} historicalBytes={length = 0, bytes = 0x}}
:
default 14:48:08.683150 +0200 nfcd 00000001 04e07470 -
[_NFReaderSession handleRemoteTagsDetected:]:445 1 tags found
default 14:48:08.685792 +0200 nfcd 00000001 04e07470 -
[_NFReaderSession connect:callback:]:507 NFC-Example
:
error 14:48:08.693429 +0200 nfcd 00000001 04e04390 -
[NFDriverWrapper connectTag:]:1436 Failed to connect to tag:
<NFTagInternal: 0x104e05cd0>-{length = 8, bytes = 0x81ad0300984374f3}
{ Tech=A Type=MiFare ID={length = 4, bytes = 0x319a2f88}
SAK=(null) ATQA=(null) historicalBytes={length = 0, bytes = 0x}}
:
error 14:48:08.694019 +0200 NFC-Example 00000002 802e2700 -
[NFCTagReaderSession _connectTag:error:]:568 Error
Domain=NFCError Code=100 "Stack Error" UserInfo={NSLocalizedDescription=Stack Error, NSUnderlyingError=0x2822a86c0
{Error Domain=nfcd Code=15 "Stack Error" UserInfo={NSLocalizedDescription=Stack Error}}}
an NXP SmartMX (Classic 4k emulation) with UID CF:3E:40:04 is discovered initially, but a reception error during ISO 14443-4A presence check (Proc Iso-Dep pres chk ntf: Receiption failed) continuously restarts the discovery polling until the session finally expires, possibly preventing the other SAK value 0x18 (for MIFARE 4K tag type) to be received:
error 10:44:50.650673 +0200 nfcd Proc Iso-Dep pres chk ntf: Receiption failed
:
error 10:44:50.677470 +0200 nfcd 00000001 04e04390 -
[NFDriverWrapper disconnectTag:tagRemovalDetect:]:1448 Failed to disconnect tag:
<NFTagInternal: 0x104f09930>-{length = 8, bytes = 0x07320d00f3041861}
{ Tech=A Type=Generic 4A ID={length = 4, bytes = 0xcf3e4004}
SAK={length = 1, bytes = 0x20} ATQA={length = 2, bytes = 0x0200} historicalBytes={length = 0, bytes = 0x}}
default 10:44:50.677682 +0200 nfcd 00000001 04e04390 -
[NFDriverWrapper restartDiscovery]:1953
an actual NXP Classic 4k with UID 2D:FE:9B:87 remains undetected and throws no error. The discovery polling session for this tag simply times out after 60 seconds and logs the last 128 discovery messages transmitted (Tx) and received (Rx), among which the following pattern is repeated (which does include the expected UID: 2D FE 9B 87):
error 11:42:19.511354 +0200 nfcd 1571305339.350902 Tx '21 03 07 03 FF 01 00 01 01 01 6F 61'
error 11:42:19.511484 +0200 nfcd 1571305339.353416 Rx '41 03 01'
error 11:42:19.511631 +0200 nfcd 1571305339.353486 Rx '00 F6 89'
error 11:42:19.511755 +0200 nfcd 1571305339.362455 Rx '61 05 14'
error 11:42:19.511905 +0200 nfcd 1571305339.362529 Rx '01 80 80 00 FF 01 09 02 00 04 2D FE 9B 87 01 18 00 00 00 00 2D 11'
error 11:42:19.512152 +0200 nfcd 1571305339.362734 Tx '21 06 01 00 44 AB'
error 11:42:19.512323 +0200 nfcd 1571305339.363959 Rx '41 06 01'
error 11:42:19.512489 +0200 nfcd 1571305339.364028 Rx '00 1D 79'
error 11:42:19.512726 +0200 nfcd 1571305339.364300 Rx '61 06 02'
error 11:42:19.512914 +0200 nfcd 1571305339.364347 Rx '00 00 EB 78'
This question already has an answer here:
Timestamp function that has been working reliably just caused EXC_BAD_INSTRUCTION
(1 answer)
Closed 5 years ago.
My code, which use SimplePing:
func simplePing(_ pinger: SimplePing, didSendPacket packet: Data, sequenceNumber: UInt16) {
begin[String(sequenceNumber)] = Int(Date().timeIntervalSince1970 * 1000)//AppDelegate.swift:185
print("Send: \(Common.startCount)")
}
It works perfectly in my simulator and iPhone, but after made it available on AppStore, I received about 20 crash logs with some error like that:
Exception Type: EXC_BREAKPOINT (SIGTRAP)
Exception Codes: 0x0000000000000001, 0x00000000e7ffdefe
Triggered by Thread: 1
...
Thread 1 name:
Thread 1 Crashed:
0 ME 0x0011e5cc specialized AppDelegate.simplePing(SimplePing, didSendPacket : Data, sequenceNumber : UInt16) -> () + 652 (AppDelegate.swift:185)
1 ME 0x00116990 #objc AppDelegate.simplePing(SimplePing, didSendPacket : Data, sequenceNumber : UInt16) -> () + 68
2 ME 0x00116818 #objc AppDelegate.simplePing(SimplePing, didSendPacket : Data, sequenceNumber : UInt16) -> () + 40
3 ME 0x000f3348 -[SimplePing sendPingWithData:] + 640 (SimplePing.m:297)
...
I can't reproduce that crash, so I have to analysis that line of the code:
begin[String(sequenceNumber)]
begin initialized with begin = [String: Int](), so its type is [String : Int], and sequenceNumber's type is UInt16. So I think begin[String(sequenceNumber)] doesn't have any potential bug.
Int(Date().timeIntervalSince1970 * 1000)
And Int(Date().timeIntervalSince1970 * 1000) is just something like Int(aDouble * 1000), it seems correct in any situation.
So I get really confused with that crash log, could anyone please give me some hints?
From the docs for Int:
On 32-bit platforms, Int is the same size as Int32, and on 64-bit platforms, Int is the same size as Int64.
A signed 32-bit integer has a maximum value of 2,147,483,647.
At the moment, Int(Date().timeIntervalSince1970 * 1000) returns a value of 1,495,855,170,970.
That is significantly larger than what will fit in a 32-bit integer.
The crash is being caused by the overflow you are attempting when casting the Double to an Int when run on a 32-bit iOS device.
I would suggest an explicit use of Int64 instead of Int:
begin[String(sequenceNumber)] = Int64(Date().timeIntervalSince1970 * 1000)