Where to save GoogleService-Info.plist in Flutter without Xcode? - ios

I have my app already submited, reviewed and online in Google Play & iTunes, but recently I added the firebase_messaging plugin for notification feature.
While joining the Firebase Cloud Messaging, Google forced me to download two files:
google-services.json - Saved in the folder of my flutter App: MyApp\android\app
GoogleService-Info.plist - I don't know where to save it, but tried on MyApp\ios\Runner and MyApp\ios\Runner.xcodeworkspace without success
And all the tutorials point to the use of Xcode to set the file GoogleService-Info.plist properly, but I do not own a Mac and I've been using with success the windows application AppUploader to submit my iOS apps.
Is there any way to not use the Xcode and be able to attach the GoogleService-Info.plist to my project?

If you want to avoid using Xcode, you have to manually edit this file :
/ios/Runner.xcodeproj/project.pbxproj
You will need to reference GoogleService-Info.plist in order to make it included during the build process.
Here are the 4 differents sections where you need to add a line (see example line for GoogleService-Info.plist):
/* Begin PBXBuildFile section */
1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */; };
3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; };
3B80C3941E831B6300D905FE /* App.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3B80C3931E831B6300D905FE /* App.framework */; };
3B80C3951E831B6300D905FE /* App.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 3B80C3931E831B6300D905FE /* App.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
74858FAF1ED2DC5600515810 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 74858FAE1ED2DC5600515810 /* AppDelegate.swift */; };
9705A1C61CF904A100538489 /* Flutter.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 9740EEBA1CF902C7004384FC /* Flutter.framework */; };
9705A1C71CF904A300538489 /* Flutter.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 9740EEBA1CF902C7004384FC /* Flutter.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; };
DE4C455E21DE1E4300EA0709 /* GoogleService-Info.plist in Resources */ = {isa = PBXBuildFile; fileRef = DE4C455D21DE1E4300EA0709 /* GoogleService-Info.plist */; };
97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; };
97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; };
/* End PBXBuildFile section */
Here :
/* Begin PBXFileReference section */
1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = "<group>"; };
1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = "<group>"; };
3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = "<group>"; };
3B80C3931E831B6300D905FE /* App.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = App.framework; path = Flutter/App.framework; sourceTree = "<group>"; };
74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "Runner-Bridging-Header.h"; sourceTree = "<group>"; };
74858FAE1ED2DC5600515810 /* AppDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = "<group>"; };
7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = "<group>"; };
9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = "<group>"; };
9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = "<group>"; };
9740EEBA1CF902C7004384FC /* Flutter.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Flutter.framework; path = Flutter/Flutter.framework; sourceTree = "<group>"; };
97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; };
97C146FB1CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
DE4C455D21DE1E4300EA0709 /* GoogleService-Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = "GoogleService-Info.plist"; sourceTree = "<group>"; };
/* End PBXFileReference section */
Here:
97C146F01CF9000F007C117D /* Runner */ = {
isa = PBXGroup;
children = (
97C146FA1CF9000F007C117D /* Main.storyboard */,
97C146FD1CF9000F007C117D /* Assets.xcassets */,
97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */,
97C147021CF9000F007C117D /* Info.plist */,
DE4C455D21DE1E4300EA0709 /* GoogleService-Info.plist */,
97C146F11CF9000F007C117D /* Supporting Files */,
1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */,
1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */,
74858FAE1ED2DC5600515810 /* AppDelegate.swift */,
74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */,
);
path = Runner;
sourceTree = "<group>";
};
And here :
/* Begin PBXResourcesBuildPhase section */
97C146EC1CF9000F007C117D /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */,
3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */,
97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */,
97C146FC1CF9000F007C117D /* Main.storyboard in Resources */,
DE4C455E21DE1E4300EA0709 /* GoogleService-Info.plist in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
Tested successfully with codemagic, this modification made the provided GoogleService-Info.plist included in the App IPA

Somewhere along the line, AppUploader must be using XCode to build an iOS app from your code. While I don't know how they do that (their documentation is scarce on details), the best you can hope for is to put the google-services-info.plist in the right place before uploading the app.
In my projects I always put the google-services-info.plist in the myappname/ios/Runner directory that the Flutter tooling auto-generates. That's where the build then picks it up from.

after a while I bumped into it using the new XCode too I can guarantee you as already suggested that it works in this way.
Enter it under the
iOS -> Runner (here)
PS: From XCode I always took the reference of the file, it did not copy it to me. If anyone has succeeded even without this procedure above I would like to understand exactly how to do it from XCode.

Related

Xcode 14.2 producing multiple info.plist

I have a info.plist file I want to use inside my project.
The Location is set to Relative to project
The target membership is checked
I get this error
Multiple commands produce '...appname.app/Info.plist'
Under that I have two children items of that error
Target 'targetname' (project 'targetname') has copy command from 'my own folder/Info.plist' to '/Users/.../Library/Developer/Xcode/DerivedData/project.../Build/Products/Debug-iphoneos/appname.app/Info.plist'
Target 'targetname' (project 'projectname') has process command with output '/Users/.../Library/Developer/Xcode/DerivedData/.../Build/Products/Debug-iphoneos/appname.app/Info.plist'
I tried editing the other info.plist directly, it just gets regenerated
I tried deleting the derived data folder, it just gets regenerated
I added the info.plist to copy bundle resources there are no duplicates
There are no duplicates in compile sources either
Could it be something with
project.pbxproj
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 56;
objects = {
/* Begin PBXBuildFile section */
75B28283299B83B10087A029 /* Info.plist in Resources */ = {isa = PBXBuildFile; fileRef = 75B28282299B83B10087A029 /* Info.plist */; };
75E959AD299943800053FCFD /* SettingsModal.swift in Sources */ = {isa = PBXBuildFile; fileRef = 75E959AC299943800053FCFD /* SettingsModal.swift */; };
75F816D829980B1E002062A9 /* appname.swift in Sources */ = {isa = PBXBuildFile; fileRef = 75F816D729980B1E002062A9 /* appname.swift */; };
75F816DA29980B1E002062A9 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 75F816D929980B1E002062A9 /* ContentView.swift */; };
75F816E129980B1F002062A9 /* Persistence.swift in Sources */ = {isa = PBXBuildFile; fileRef = 75F816E029980B1F002062A9 /* Persistence.swift */; };
75F816E429980B1F002062A9 /* appname.xcdatamodeld in Sources */ = {isa = PBXBuildFile; fileRef = 75F816E229980B1F002062A9 /* projectname.xcdatamodeld */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
75B28282299B83B10087A029 /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = Info.plist; path = projectname/Info.plist; sourceTree = SOURCE_ROOT; };
...
GENERATE_INFOPLIST_FILE = YES;
Any help is welcome, thanks
Temporary fix
INFOPLIST_KEY_NSFaceIDUsageDescription = "This app uses Face ID for authentication purposes.";

Using both CAN1 & CAN2 both in STM32F446 properly?

I want to use both CAN1 and CAN2 for my application with 500kbps and 125kbps speeds respectively. I have initialized both of them as per my requirements using HAL. Where in Tx is implemented in polling and Rx in interrupts. CAN1 Tx and Rx working perfect. Coming to CAN2 Tx is working and Rx is not working. Interrupt itself is not firing up for CAN2 Rx (HAL_CAN_RxFifo0MsgPendingCallback is not being called). I've read in datasheet that SRAM sharing process is happening between 2 CANs. I'm unable to comprehend that. Is that an issue?
I'm attaching the code also. Please check and help!
Little overview - all MCU clocks running at - 16 MHz using internal HSI, No filter in CAN configs
################ MAIN FILE ####################
/* Private variables ---------------------------------------------------------*/
CAN_HandleTypeDef hcan1;
CAN_HandleTypeDef hcan2;
/* USER CODE BEGIN PV */
CAN_TxHeaderTypeDef txheader;
uint8_t txdata[8];
CAN_RxHeaderTypeDef RxHeader;
uint8_t RxData[8];
CAN_TxHeaderTypeDef txheader1;
uint8_t txdata1[8];
CAN_RxHeaderTypeDef RxHeader1;
uint8_t RxData1[8];
/* USER CODE END PV */
/* Private function prototypes -----------------------------------------------*/
void SystemClock_Config(void);
static void MX_GPIO_Init(void);
static void MX_CAN1_Init(void);
static void MX_CAN2_Init(void);
int main(void)
{
/* USER CODE BEGIN 1 */
uint8_t fill = 0;
/* USER CODE END 1 */
/* MCU Configuration--------------------------------------------------------*/
/* Reset of all peripherals, Initializes the Flash interface and the Systick. */
HAL_Init();
/* USER CODE BEGIN Init */
/* USER CODE END Init */
/* Configure the system clock */
SystemClock_Config();
/* USER CODE BEGIN SysInit */
/* USER CODE END SysInit */
/* Initialize all configured peripherals */
MX_GPIO_Init();
MX_CAN1_Init();
MX_CAN2_Init();
/* USER CODE BEGIN 2 */
/* USER CODE END 2 */
/* Infinite loop */
/* USER CODE BEGIN WHILE */
while (1)
{
/* USER CODE END WHILE */
/* USER CODE BEGIN 3 */
txheader.DLC = 8;
txheader.ExtId = 0x11111111;
txheader.IDE = CAN_ID_EXT;
txheader.RTR = CAN_RTR_DATA;
for(uint8_t i = 0; i<8;i++)
{
txdata[i] = i;
}
// txdata[0] = fill;
HAL_Delay(250);
HAL_CAN_AddTxMessage(&hcan1,&txheader,txdata, (uint32_t *)CAN_TX_MAILBOX0);
HAL_Delay(250);
HAL_CAN_AddTxMessage(&hcan2,&txheader,txdata, (uint32_t *)CAN_TX_MAILBOX0);
}
/* USER CODE END 3 */
}
void SystemClock_Config(void)
{
RCC_OscInitTypeDef RCC_OscInitStruct = {0};
RCC_ClkInitTypeDef RCC_ClkInitStruct = {0};
/** Configure the main internal regulator output voltage
*/
__HAL_RCC_PWR_CLK_ENABLE();
__HAL_PWR_VOLTAGESCALING_CONFIG(PWR_REGULATOR_VOLTAGE_SCALE3);
/** Initializes the RCC Oscillators according to the specified parameters
* in the RCC_OscInitTypeDef structure.
*/
RCC_OscInitStruct.OscillatorType = RCC_OSCILLATORTYPE_HSI;
RCC_OscInitStruct.HSIState = RCC_HSI_ON;
RCC_OscInitStruct.HSICalibrationValue = RCC_HSICALIBRATION_DEFAULT;
RCC_OscInitStruct.PLL.PLLState = RCC_PLL_NONE;
if (HAL_RCC_OscConfig(&RCC_OscInitStruct) != HAL_OK)
{
Error_Handler();
}
/** Initializes the CPU, AHB and APB buses clocks
*/
RCC_ClkInitStruct.ClockType = RCC_CLOCKTYPE_HCLK|RCC_CLOCKTYPE_SYSCLK
|RCC_CLOCKTYPE_PCLK1|RCC_CLOCKTYPE_PCLK2;
RCC_ClkInitStruct.SYSCLKSource = RCC_SYSCLKSOURCE_HSI;
RCC_ClkInitStruct.AHBCLKDivider = RCC_SYSCLK_DIV1;
RCC_ClkInitStruct.APB1CLKDivider = RCC_HCLK_DIV1;
RCC_ClkInitStruct.APB2CLKDivider = RCC_HCLK_DIV1;
if (HAL_RCC_ClockConfig(&RCC_ClkInitStruct, FLASH_LATENCY_0) != HAL_OK)
{
Error_Handler();
}
}
/**
* #brief CAN1 Initialization Function
* #param None
* #retval None
*/
static void MX_CAN1_Init(void)
{
/* USER CODE BEGIN CAN1_Init 0 */
CAN_FilterTypeDef sFilterConfig;
/* USER CODE END CAN1_Init 0 */
/* USER CODE BEGIN CAN1_Init 1 */
/* USER CODE END CAN1_Init 1 */
hcan1.Instance = CAN1;
hcan1.Init.Prescaler = 2;
hcan1.Init.Mode = CAN_MODE_NORMAL;
hcan1.Init.SyncJumpWidth = CAN_SJW_1TQ;
hcan1.Init.TimeSeg1 = CAN_BS1_13TQ;
hcan1.Init.TimeSeg2 = CAN_BS2_2TQ;
hcan1.Init.TimeTriggeredMode = DISABLE;
hcan1.Init.AutoBusOff = DISABLE;
hcan1.Init.AutoWakeUp = DISABLE;
hcan1.Init.AutoRetransmission = DISABLE;
hcan1.Init.ReceiveFifoLocked = DISABLE;
hcan1.Init.TransmitFifoPriority = DISABLE;
if (HAL_CAN_Init(&hcan1) != HAL_OK)
{
Error_Handler();
}
/* USER CODE BEGIN CAN1_Init 2 */
sFilterConfig.FilterBank = 0;
sFilterConfig.FilterMode = CAN_FILTERMODE_IDMASK;
sFilterConfig.FilterScale = CAN_FILTERSCALE_32BIT;
sFilterConfig.FilterIdHigh = 0x0000;
sFilterConfig.FilterIdLow = 0x0000;
sFilterConfig.FilterMaskIdHigh = 0x0000;
sFilterConfig.FilterMaskIdLow = 0x0000;
sFilterConfig.FilterFIFOAssignment = CAN_RX_FIFO0;
sFilterConfig.FilterActivation = ENABLE;
sFilterConfig.SlaveStartFilterBank = 14;
if (HAL_CAN_ConfigFilter(&hcan1, &sFilterConfig) != HAL_OK)
{
/* Filter configuration Error */
Error_Handler();
}
HAL_CAN_Start(&hcan1);
/*##-4- Activate CAN RX notification #######################################*/
if (HAL_CAN_ActivateNotification(&hcan1, CAN_IT_RX_FIFO0_MSG_PENDING) != HAL_OK)
{
/* Notification Error */
Error_Handler();
}
/* USER CODE END CAN1_Init 2 */
}
/**
* #brief CAN1 Initialization Function
* #param None
* #retval None
*/
static void MX_CAN2_Init(void)
{
/* USER CODE BEGIN CAN1_Init 0 */
CAN_FilterTypeDef sFilterConfig;
/* USER CODE END CAN1_Init 0 */
/* USER CODE BEGIN CAN1_Init 1 */
/* USER CODE END CAN1_Init 1 */
hcan2.Instance = CAN2;
hcan2.Init.Prescaler = 8;
hcan2.Init.Mode = CAN_MODE_NORMAL;
hcan2.Init.SyncJumpWidth = CAN_SJW_1TQ;
hcan2.Init.TimeSeg1 = CAN_BS1_13TQ;
hcan2.Init.TimeSeg2 = CAN_BS2_2TQ;
hcan2.Init.TimeTriggeredMode = DISABLE;
hcan2.Init.AutoBusOff = DISABLE;
hcan2.Init.AutoWakeUp = DISABLE;
hcan2.Init.AutoRetransmission = DISABLE;
hcan2.Init.ReceiveFifoLocked = DISABLE;
hcan2.Init.TransmitFifoPriority = DISABLE;
if (HAL_CAN_Init(&hcan2) != HAL_OK)
{
Error_Handler();
}
/* USER CODE BEGIN CAN1_Init 2 */
sFilterConfig.FilterBank = 0;
sFilterConfig.FilterMode = CAN_FILTERMODE_IDMASK;
sFilterConfig.FilterScale = CAN_FILTERSCALE_32BIT;
sFilterConfig.FilterIdHigh = 0x0000;
sFilterConfig.FilterIdLow = 0x0000;
sFilterConfig.FilterMaskIdHigh = 0x0000;
sFilterConfig.FilterMaskIdLow = 0x0000;
sFilterConfig.FilterFIFOAssignment = CAN_RX_FIFO0;
sFilterConfig.FilterActivation = ENABLE;
sFilterConfig.SlaveStartFilterBank = 14;
if (HAL_CAN_ConfigFilter(&hcan2, &sFilterConfig) != HAL_OK)
{
/* Filter configuration Error */
Error_Handler();
}
HAL_CAN_Start(&hcan2);
/*##-4- Activate CAN RX notification #######################################*/
if (HAL_CAN_ActivateNotification(&hcan2, CAN_IT_RX_FIFO0_MSG_PENDING) != HAL_OK)
{
/* Notification Error */
Error_Handler();
}
/* USER CODE END CAN1_Init 2 */
}
/**
* #brief GPIO Initialization Function
* #param None
* #retval None
*/
static void MX_GPIO_Init(void)
{
/* GPIO Ports Clock Enable */
__HAL_RCC_GPIOA_CLK_ENABLE();
}
/* USER CODE BEGIN 4 */
/**
* #brief Rx Fifo 0 message pending callback
* #param hcan: pointer to a CAN_HandleTypeDef structure that contains
* the configuration information for the specified CAN.
* #retval None
*/
uint8_t cflag;
void HAL_CAN_RxFifo0MsgPendingCallback(CAN_HandleTypeDef *hcan)
{
/* Get RX message */
if (HAL_CAN_GetRxMessage(hcan, CAN_RX_FIFO0, &RxHeader, RxData) != HAL_OK)
{
/* Reception Error */
Error_Handler();
}
/* Display LEDx */
if ((RxHeader.StdId == 0x321) && (RxHeader.IDE == CAN_ID_STD) && (RxHeader.DLC == 2))
{
}
/* Display LEDx */
if ((RxHeader.ExtId == 0x11111111) && (RxHeader.IDE == CAN_ID_EXT) && (RxHeader.DLC == 8))
{
cflag = 1;
}
else
cflag = 0;
}
/* USER CODE END 4 */
##################### MSP FILE ######################
/**
* Initializes the Global MSP.
*/
void HAL_MspInit(void)
{
/* USER CODE BEGIN MspInit 0 */
/* USER CODE END MspInit 0 */
__HAL_RCC_SYSCFG_CLK_ENABLE();
__HAL_RCC_PWR_CLK_ENABLE();
/* System interrupt init*/
/* USER CODE BEGIN MspInit 1 */
/* USER CODE END MspInit 1 */
}
static uint32_t HAL_RCC_CAN1_CLK_ENABLED=0;
/**
* #brief CAN MSP Initialization
* This function configures the hardware resources used in this example
* #param hcan: CAN handle pointer
* #retval None
*/
void HAL_CAN_MspInit(CAN_HandleTypeDef* hcan)
{
GPIO_InitTypeDef GPIO_InitStruct = {0};
if(hcan->Instance==CAN1)
{
/* USER CODE BEGIN CAN1_MspInit 0 */
/* USER CODE END CAN1_MspInit 0 */
/* Peripheral clock enable */
HAL_RCC_CAN1_CLK_ENABLED++;
if(HAL_RCC_CAN1_CLK_ENABLED==1){
__HAL_RCC_CAN1_CLK_ENABLE();
}
__HAL_RCC_GPIOA_CLK_ENABLE();
/**CAN1 GPIO Configuration
PA11 ------> CAN1_RX
PA12 ------> CAN1_TX
*/
GPIO_InitStruct.Pin = GPIO_PIN_11|GPIO_PIN_12;
GPIO_InitStruct.Mode = GPIO_MODE_AF_PP;
GPIO_InitStruct.Pull = GPIO_NOPULL;
GPIO_InitStruct.Speed = GPIO_SPEED_FREQ_VERY_HIGH;
GPIO_InitStruct.Alternate = GPIO_AF9_CAN1;
HAL_GPIO_Init(GPIOA, &GPIO_InitStruct);
/* USER CODE BEGIN CAN1_MspInit 1 */
HAL_NVIC_SetPriority(CAN1_RX0_IRQn, 1, 0);
HAL_NVIC_EnableIRQ(CAN1_RX0_IRQn);
/* USER CODE END CAN1_MspInit 1 */
}
else if(hcan->Instance==CAN2)
{
/* USER CODE BEGIN CAN2_MspInit 0 */
/* USER CODE END CAN2_MspInit 0 */
/* Peripheral clock enable */
__HAL_RCC_CAN2_CLK_ENABLE();
HAL_RCC_CAN1_CLK_ENABLED++;
if(HAL_RCC_CAN1_CLK_ENABLED==1){
__HAL_RCC_CAN1_CLK_ENABLE();
}
__HAL_RCC_GPIOB_CLK_ENABLE();
/**CAN2 GPIO Configuration
PB12 ------> CAN2_RX
PB13 ------> CAN2_TX
*/
GPIO_InitStruct.Pin = GPIO_PIN_12|GPIO_PIN_13;
GPIO_InitStruct.Mode = GPIO_MODE_AF_PP;
GPIO_InitStruct.Pull = GPIO_NOPULL;
GPIO_InitStruct.Speed = GPIO_SPEED_FREQ_VERY_HIGH;
GPIO_InitStruct.Alternate = GPIO_AF9_CAN2;
HAL_GPIO_Init(GPIOB, &GPIO_InitStruct);
/* CAN2 interrupt Init */
HAL_NVIC_SetPriority(CAN2_RX0_IRQn, 0, 0);
HAL_NVIC_EnableIRQ(CAN2_RX0_IRQn);
/* USER CODE BEGIN CAN2_MspInit 1 */
/* USER CODE END CAN2_MspInit 1 */
}
}
/**
* #brief CAN MSP De-Initialization
* This function freeze the hardware resources used in this example
* #param hcan: CAN handle pointer
* #retval None
*/
void HAL_CAN_MspDeInit(CAN_HandleTypeDef* hcan)
{
if(hcan->Instance==CAN1)
{
/* USER CODE BEGIN CAN1_MspDeInit 0 */
/* USER CODE END CAN1_MspDeInit 0 */
/* Peripheral clock disable */
HAL_RCC_CAN1_CLK_ENABLED--;
if(HAL_RCC_CAN1_CLK_ENABLED==0){
__HAL_RCC_CAN1_CLK_DISABLE();
}
/**CAN1 GPIO Configuration
PA11 ------> CAN1_RX
PA12 ------> CAN1_TX
*/
HAL_GPIO_DeInit(GPIOA, GPIO_PIN_11|GPIO_PIN_12);
/* USER CODE BEGIN CAN1_MspDeInit 1 */
/* USER CODE END CAN1_MspDeInit 1 */
}
else if(hcan->Instance==CAN2)
{
/* USER CODE BEGIN CAN2_MspDeInit 0 */
/* USER CODE END CAN2_MspDeInit 0 */
/* Peripheral clock disable */
__HAL_RCC_CAN2_CLK_DISABLE();
HAL_RCC_CAN1_CLK_ENABLED--;
if(HAL_RCC_CAN1_CLK_ENABLED==0){
__HAL_RCC_CAN1_CLK_DISABLE();
}
/**CAN2 GPIO Configuration
PB12 ------> CAN2_RX
PB13 ------> CAN2_TX
*/
HAL_GPIO_DeInit(GPIOB, GPIO_PIN_12|GPIO_PIN_13);
/* CAN2 interrupt DeInit */
HAL_NVIC_DisableIRQ(CAN2_RX0_IRQn);
/* USER CODE BEGIN CAN2_MspDeInit 1 */
/* USER CODE END CAN2_MspDeInit 1 */
}
}
/* USER CODE BEGIN 1 */
/* USER CODE END 1 */
/************************ (C) COPYRIGHT STMicroelectronics *****END OF FILE****/
######################### IT FILE #############################
/* Includes ------------------------------------------------------------------*/
#include "main.h"
#include "stm32f4xx_it.h"
/* External variables --------------------------------------------------------*/
extern CAN_HandleTypeDef hcan2;
/**
* #brief This function handles System tick timer.
*/
void SysTick_Handler(void)
{
/* USER CODE BEGIN SysTick_IRQn 0 */
/* USER CODE END SysTick_IRQn 0 */
HAL_IncTick();
/* USER CODE BEGIN SysTick_IRQn 1 */
/* USER CODE END SysTick_IRQn 1 */
}
/******************************************************************************/
/* STM32F4xx Peripheral Interrupt Handlers */
/* Add here the Interrupt Handlers for the used peripherals. */
/* For the available peripheral interrupt handler names, */
/* please refer to the startup file (startup_stm32f4xx.s). */
/******************************************************************************/
/**
* #brief This function handles CAN2 RX0 interrupt.
*/
void CAN2_RX0_IRQHandler(void)
{
/* USER CODE BEGIN CAN2_RX0_IRQn 0 */
/* USER CODE END CAN2_RX0_IRQn 0 */
HAL_CAN_IRQHandler(&hcan2);
/* USER CODE BEGIN CAN2_RX0_IRQn 1 */
/* USER CODE END CAN2_RX0_IRQn 1 */
}
/* USER CODE BEGIN 1 */
/**
* #brief This function handles CAN1 RX0 interrupt request.
* #param None
* #retval None
*/
extern CAN_HandleTypeDef hcan1;
void CAN1_RX0_IRQHandler(void)
{
HAL_CAN_IRQHandler(&hcan1);
}
/* USER CODE END 1 */
/************************ (C) COPYRIGHT STMicroelectronics *****END OF FILE****/
I solved my problem. Got help from ST community. Please refer to the link below.
https://community.st.com/s/feed/0D53W00000RQCwgSAH?t=1608012202888
Problem was filter configuration for CAN1 and CAN2. There are 28 filter banks (0-27) split between CAN1 and CAN2. First half (0-13) for CAN1 and the rest for CAN2. So just make some changes in the code posted above -
CAN 1:
sFilterConfig.FilterBank = 0;
:
sFilterConfig.SlaveStartFilterBank = 14;
CAN2:
sFilterConfig.FilterBank = 14; // previously 0
:
sFilterConfig.SlaveStartFilterBank = 14; // previously 27
Thanks to Lundin for suggesting sync point. Use this website for CAN Bit time calculations for various chips -
http://www.bittiming.can-wiki.info/

Xcode pbxproj file's structure got changed

For some reason - maybe has to do with a latest Xcode upgrade to version 7.2.1 - my pbxproj's structure got changed, and because of that, when I'm about to merge a PR, it just shows me a one big conflict on the entire file.
The reason, as I see it, is that one of them has this structure (posting the start of the file):
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 46;
objects = {
/* Begin PBXBuildFile section */
009025BEF30C41848E6869F6 /* RolloutDynamic_18.m in Sources */ = {isa = PBXBuildFile; fileRef = E1BF845EAB9E4962853253B5 /* RolloutDynamic_18.m */; settings = {COMPILER_FLAGS = "-fobjc-arc"; }; };
00AFDB74BD584F1C9BD41B9D /* RolloutDynamic_03.m in Sources */ = {isa = PBXBuildFile; fileRef = 588F21D12977444EAE3C70F5 /* RolloutDynamic_03.m */; settings = {COMPILER_FLAGS = "-fobjc-arc"; }; };
054D17D506BF45EE98822AB9 /* RolloutDynamic_15.m in Sources */ = {isa = PBXBuildFile; fileRef = 54D47CE8182E4482955DA02E /* RolloutDynamic_15.m */; settings = {COMPILER_FLAGS = "-fobjc-arc"; }; };
and the other one that structure (start of the file):
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>archiveVersion</key>
<string>1</string>
<key>classes</key>
<dict/>
<key>objectVersion</key>
<string>46</string>
<key>objects</key>
<dict>
<key>18011C201B1F865300F52714</key>
<dict>
<key>fileEncoding</key>
<string>4</string>
<key>isa</key>
<string>PBXFileReference</string>
<key>lastKnownFileType</key>
<string>sourcecode.c.h</string>
<key>path</key>
<string>UserProfileTableViewController.h</string>
<key>sourceTree</key>
<string><group></string>
</dict>
Any idea how can this be solved?

why isn't my DEBUG preprocessor macro being recognized objective-c

I took over a project that has several build schemes: demo, release, debug and production. throughout the code.. there are several preprocessor macro if statements ie
#ifdef DEMO
static NSString *const URL_SMART_TAXI = #"http://demo.theapp.com";
#elif PRODUCTION
static NSString *const URL_SMART_TAXI = #"http://prod.theapp.com";
#elif DEBUG
static NSString *const URL_SMART_TAXI = #"http://localhost:8000";
#else
static NSString *const URL_SMART_TAXI = #"http://dev.theapp.com";
#endif
for some reason, this always works when i'm building with a demo scheme or a production one.. but it just doesn't work for debug (whenever I change the scheme and run for debug.. it always skips the debug and goes for the wild card option)..
I looked all over the project and I don't see any special treatment given for demo or production that's not given for debug..
If I run grep -nri %environment% * this is the result:
grep -nri production *
project.pbxproj:2767: 84380FEB1705D3E40085487D /* Production */ = {
project.pbxproj:2797: name = Production;
project.pbxproj:2799: 84380FEC1705D3E40085487D /* Production */ = {
project.pbxproj:2832: "-DPRODUCTION",
project.pbxproj:2846: name = Production;
project.pbxproj:3013: 84380FEB1705D3E40085487D /* Production */,
project.pbxproj:3024: 84380FEC1705D3E40085487D /* Production */,
xcshareddata/xcschemes/theApp.xcscheme:47: buildConfiguration = "Production"
grep -nri demo *
project.pbxproj:2685: 6314932116E4F7D000B351CA /* Demo */ = {
project.pbxproj:2715: name = Demo;
project.pbxproj:2717: 6314932216E4F7D000B351CA /* Demo */ = {
project.pbxproj:2751: "-DDEMO",
project.pbxproj:2765: name = Demo;
project.pbxproj:3012: 6314932116E4F7D000B351CA /* Demo */,
project.pbxproj:3023: 6314932216E4F7D000B351CA /* Demo */,
xcshareddata/xcschemes/theApp.xcscheme:87: buildConfiguration = "Demo"
grep -nri debug *
project.pbxproj:2848: 847D410E168CBD3700CE1B96 /* Debug */ = {
project.pbxproj:2863: "DEBUG=1",
project.pbxproj:2879: name = Debug;
project.pbxproj:2912: 847D4111168CBD3700CE1B96 /* Debug */ = {
project.pbxproj:2955: name = Debug;
project.pbxproj:2972: "DEBUG=1",
project.pbxproj:3010: 847D410E168CBD3700CE1B96 /* Debug */,
project.pbxproj:3021: 847D4111168CBD3700CE1B96 /* Debug */,
xcshareddata/xcschemes/theApp.xcscheme:26: selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
xcshareddata/xcschemes/theApp.xcscheme:27: selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
xcshareddata/xcschemes/theApp.xcscheme:29: buildConfiguration = "Debug">
xcshareddata/xcschemes/theApp.xcscheme:43: selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
xcshareddata/xcschemes/theApp.xcscheme:44: selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
xcshareddata/xcschemes/theApp.xcscheme:49: debugDocumentVersioning = "YES"
xcshareddata/xcschemes/theApp.xcscheme:72: debugDocumentVersioning = "YES">
xcshareddata/xcschemes/theApp.xcscheme:84: buildConfiguration = "Debug">
any ideas?
update: + relevant parts of build settings
This is because you're doing "#elif", which is NOT the same thing as "#elifdef" (if such a thing exists).
You should define PRODUCTION, DEBUG and DEMO all at the same time, but set only one to "1" and the others to "0".

Real-time converting the PCM buffer to AAC data for iOS using Remote IO and Audio Convert Service

I'm using Remote IO to get the audio buffer from PCM, I want to real-time send the data to Darwin Server by cellular network (3G network). I choose The AAC format as there is an article from Fraunhofer called "AAC-ELD based Audio Communication on iOS A Developer’s Guide". The sample code works great. The audio is recorded in LPCM format and encoded to AACELD and decoded back to LPCM and finally performed playback immediately, but it's AACELD(Enhanced Low Delay) format. When I change the format from "kAudioFormatMPEG4AAC_ELD" to "kAudioFormatMPEG4AAC". I can hear the audio for 1 second and the audio is stuck for the next 1 second and the pattern continues. And the audio is twice as frequent as the reality which means the sound last 1 second in real world will only last 0.5 second for playback. I then change the sample frame size from 512 to 1024. the frequency is normal but I can hear the audio for 2 second and it is stuck for the next 2 seconds and the pattern continues... I figured out that the AudioConverterFillComplexBuffer function fails for 2 seconds and then works well in the next 2 seconds. I don't know why. Please help. Thanks in advance.
I really didn't change much of the code just changed the formatID and sample frame size from 512 to 1024
The article is here: http://www.full-hd-voice.com/content/dam/fullhdvoice/documents/iOS-ACE-AP-v2.pdf
1.global variables
static AudioBuffer g_inputBuffer;
static AudioBuffer g_outputBuffer;
static AudioComponentInstance g_audioUnit;
static AudioUnitElement g_outputBus = 0;
static AudioUnitElement g_inputBus = 1;
static UInt32 g_outChannels = 2;
static UInt32 g_inChannels = 1;
static UInt32 g_frameSize = 1024;
static UInt32 g_inputByteSize = 0;
static UInt32 g_outputByteSize = 0;
static unsigned int g_initialized = 0;
static AACELDEncoder *g_encoder = NULL;
static AACELDDecoder *g_decoder = NULL;
static MagicCookie g_cookie;
/* Structure to keep the encoder configuration */
typedef struct EncoderProperties_
{
Float64 samplingRate;
UInt32 inChannels;
UInt32 outChannels;
UInt32 frameSize;
UInt32 bitrate;
} EncoderProperties;
/* Structure to keep the magic cookie */
typedef struct MagicCookie_
{
void *data;
int byteSize;
} MagicCookie;
/* Structure to keep one encoded AU */
typedef struct EncodedAudioBuffer_
{
UInt32 mChannels;
UInt32 mDataBytesSize;
void *data;
} EncodedAudioBuffer;
typedef struct DecoderProperties_
{
Float64 samplingRate;
UInt32 inChannels;
UInt32 outChannels;
UInt32 frameSize;
} DecoderProperties;
2.initialise Audio Session and Audio Unit and encoder&decoder
void InitAudioUnit()
{
/* Calculate the required input and output buffer sizes */
g_inputByteSize = g_frameSize * g_inChannels * sizeof(AudioSampleType);
g_outputByteSize = g_frameSize * g_outChannels * sizeof(AudioSampleType);
/* Initialize the I/O buffers */
g_inputBuffer.mNumberChannels = g_inChannels;
g_inputBuffer.mDataByteSize = g_inputByteSize;
if (g_initialized)
free(g_inputBuffer.mData);
g_inputBuffer.mData = malloc(sizeof(unsigned char)*g_inputByteSize);
memset(g_inputBuffer.mData, 0, g_inputByteSize);
g_outputBuffer.mNumberChannels = g_outChannels;
g_outputBuffer.mDataByteSize = g_outputByteSize;
if (g_initialized)
free(g_outputBuffer.mData);
g_outputBuffer.mData = malloc(sizeof(unsigned char)*g_outputByteSize);
memset(g_outputBuffer.mData, 0, g_outputByteSize);
g_initialized = 1;
/* Initialize the audio session */
AudioSessionInitialize(NULL, NULL, interruptionListener, NULL);
/* Activate the audio session */
AudioSessionSetActive(TRUE);
/* Enable recording for full-duplex I/O */
UInt32 audioCategory = kAudioSessionCategory_PlayAndRecord;
AudioSessionSetProperty(kAudioSessionProperty_AudioCategory,
sizeof(audioCategory),
&audioCategory);
/* Set the route change listener */
AudioSessionAddPropertyListener(kAudioSessionProperty_AudioRouteChange,
routeChangeListener,
NULL);
/* Set the preferred buffer time */
Float32 preferredBufferTime = 1024.0 / 44100.0;
AudioSessionSetProperty(kAudioSessionProperty_PreferredHardwareIOBufferDuration,
sizeof(preferredBufferTime),
&preferredBufferTime);
/* Setup the audio component for I/O */
AudioComponentDescription componentDesc;
memset(&componentDesc, 0, sizeof(componentDesc));
componentDesc.componentType = kAudioUnitType_Output;
componentDesc.componentSubType = kAudioUnitSubType_RemoteIO;
componentDesc.componentManufacturer = kAudioUnitManufacturer_Apple;
/* Find and create the audio component */
AudioComponent auComponent = AudioComponentFindNext(NULL, &componentDesc);
AudioComponentInstanceNew(auComponent, &g_audioUnit);
/* Enable the audio input */
UInt32 enableAudioInput = 1;
AudioUnitSetProperty(g_audioUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input,
g_inputBus,
&enableAudioInput,
sizeof(enableAudioInput));
/* Setup the render callback */
AURenderCallbackStruct renderCallbackInfo;
renderCallbackInfo.inputProc = audioUnitRenderCallback;
renderCallbackInfo.inputProcRefCon = NULL;
AudioUnitSetProperty(g_audioUnit,
kAudioUnitProperty_SetRenderCallback,
kAudioUnitScope_Input,
g_outputBus,
&renderCallbackInfo,
sizeof(renderCallbackInfo));
/* Set the input and output audio stream formats */
AudioStreamBasicDescription audioFormat;
audioFormat.mSampleRate = 44100;
audioFormat.mFormatID = kAudioFormatLinearPCM;
audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
audioFormat.mFramesPerPacket = 1;
audioFormat.mBitsPerChannel = 8 * sizeof(AudioSampleType);
audioFormat.mChannelsPerFrame = g_inChannels;
audioFormat.mBytesPerFrame = audioFormat.mChannelsPerFrame * sizeof(AudioSampleType);
audioFormat.mBytesPerPacket = audioFormat.mBytesPerFrame;
AudioUnitSetProperty(g_audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output,
g_inputBus,
&audioFormat,
sizeof(audioFormat));
audioFormat.mChannelsPerFrame = g_outChannels;
audioFormat.mBytesPerFrame = audioFormat.mChannelsPerFrame * sizeof(AudioSampleType);
audioFormat.mBytesPerPacket = audioFormat.mBytesPerFrame;
AudioUnitSetProperty(g_audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input,
g_outputBus,
&audioFormat,
sizeof(audioFormat));
/* Initialize the ELD codec */
InitAACELD();
}
void InitAACELD()
{
EncoderProperties p;
p.samplingRate = 44100.0;
p.inChannels = 1;
p.outChannels = 1;
p.frameSize = 1024;
p.bitrate = 32000;
g_encoder = CreateAACELDEncoder();
InitAACELDEncoder(g_encoder, p, &g_cookie);
DecoderProperties dp;
dp.samplingRate = 44100.0;
dp.inChannels = 1;
dp.outChannels = 2;
dp.frameSize = p.frameSize;
g_decoder = CreateAACELDDecoder();
InitAACELDDecoder(g_decoder, dp, &g_cookie);
}
int InitAACELDEncoder(AACELDEncoder *encoder, EncoderProperties props, MagicCookie *outCookie)
{
/* Copy the provided encoder properties */
encoder->inChannels = props.inChannels;
encoder->outChannels = props.outChannels;
encoder->samplingRate = props.samplingRate;
encoder->frameSize = props.frameSize;
encoder->bitrate = props.bitrate;
/* Convenience macro to fill out the ASBD structure.
Available only when __cplusplus is defined! */
FillOutASBDForLPCM(encoder->sourceFormat,
encoder->samplingRate,
encoder->inChannels,
8*sizeof(AudioSampleType),
8*sizeof(AudioSampleType),
false,
false);
/* Set the format parameters for AAC-ELD encoding. */
encoder->destinationFormat.mFormatID = kAudioFormatMPEG4AAC;
encoder->destinationFormat.mChannelsPerFrame = encoder->outChannels;
encoder->destinationFormat.mSampleRate = encoder->samplingRate;
/* Get the size of the formatinfo structure */
UInt32 dataSize = sizeof(encoder->destinationFormat);
/* Request the propertie from CoreAudio */
AudioFormatGetProperty(kAudioFormatProperty_FormatInfo,
0,
NULL,
&dataSize,
&(encoder->destinationFormat));
/* Create a new audio converter */
AudioConverterNew(&(encoder->sourceFormat),
&(encoder->destinationFormat),
&(encoder->audioConverter));
if (!encoder->audioConverter)
{
return -1;
}
/* Try to set the desired output bitrate */
UInt32 outputBitrate = encoder->bitrate;
dataSize = sizeof(outputBitrate);
AudioConverterSetProperty(encoder->audioConverter,
kAudioConverterEncodeBitRate,
dataSize,
&outputBitrate);
/* Query the maximum possible output packet size */
if (encoder->destinationFormat.mBytesPerPacket == 0)
{
UInt32 maxOutputSizePerPacket = 0;
dataSize = sizeof(maxOutputSizePerPacket);
AudioConverterGetProperty(encoder->audioConverter,
kAudioConverterPropertyMaximumOutputPacketSize,
&dataSize,
&maxOutputSizePerPacket);
encoder->maxOutputPacketSize = maxOutputSizePerPacket;
}
else
{
encoder->maxOutputPacketSize = encoder->destinationFormat.mBytesPerPacket;
}
/* Fetch the Magic Cookie from the ELD implementation */
UInt32 cookieSize = 0;
AudioConverterGetPropertyInfo(encoder->audioConverter,
kAudioConverterCompressionMagicCookie,
&cookieSize,
NULL);
char* cookie = (char*)malloc(cookieSize*sizeof(char));
AudioConverterGetProperty(encoder->audioConverter,
kAudioConverterCompressionMagicCookie,
&cookieSize,
cookie);
outCookie->data = cookie;
outCookie->byteSize = cookieSize;
/* Prepare the temporary AU buffer for encoding */
encoder->encoderBuffer = malloc(encoder->maxOutputPacketSize);
return 0;
}
int InitAACELDDecoder(AACELDDecoder* decoder, DecoderProperties props, const MagicCookie *cookie)
{
/* Copy the provided decoder properties */
decoder->inChannels = props.inChannels;
decoder->outChannels = props.outChannels;
decoder->samplingRate = props.samplingRate;
decoder->frameSize = props.frameSize;
/* We will decode to LPCM */
FillOutASBDForLPCM(decoder->destinationFormat,
decoder->samplingRate,
decoder->outChannels,
8*sizeof(AudioSampleType),
8*sizeof(AudioSampleType),
false,
false);
/* from AAC-ELD, having the same sampling rate, but possibly a different channel configuration */
decoder->sourceFormat.mFormatID = kAudioFormatMPEG4AAC;
decoder->sourceFormat.mChannelsPerFrame = decoder->inChannels;
decoder->sourceFormat.mSampleRate = decoder->samplingRate;
/* Get the rest of the format info */
UInt32 dataSize = sizeof(decoder->sourceFormat);
AudioFormatGetProperty(kAudioFormatProperty_FormatInfo,
0,
NULL,
&dataSize,
&(decoder->sourceFormat));
/* Create a new AudioConverter instance for the conversion AAC-ELD -> LPCM */
AudioConverterNew(&(decoder->sourceFormat),
&(decoder->destinationFormat),
&(decoder->audioConverter));
if (!decoder->audioConverter)
{
return -1;
}
/* Check for variable output packet size */
if (decoder->destinationFormat.mBytesPerPacket == 0)
{
UInt32 maxOutputSizePerPacket = 0;
dataSize = sizeof(maxOutputSizePerPacket);
AudioConverterGetProperty(decoder->audioConverter,
kAudioConverterPropertyMaximumOutputPacketSize,
&dataSize,
&maxOutputSizePerPacket);
decoder->maxOutputPacketSize = maxOutputSizePerPacket;
}
else
{
decoder->maxOutputPacketSize = decoder->destinationFormat.mBytesPerPacket;
}
/* Set the corresponding encoder cookie */
AudioConverterSetProperty(decoder->audioConverter,
kAudioConverterDecompressionMagicCookie,
cookie->byteSize,
cookie->data);
return 0;
}
3.Render Callback and encoder & decoder
static OSStatus audioUnitRenderCallback(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberOfFrames,
AudioBufferList *ioData)
{
/* Get the input samples */
AudioUnitRender(g_audioUnit,
ioActionFlags,
inTimeStamp,
g_inputBus,
inNumberOfFrames,
ioData);
/* Copy to global input buffer */
memcpy(g_inputBuffer.mData, ioData->mBuffers[0].mData, g_inputBuffer.mDataByteSize);
/* Encode with AudioConverter */
EncodedAudioBuffer encodedAU;
EncodeAACELD(g_encoder, &g_inputBuffer, &encodedAU);
/* Decode with AudioConverter */
g_outputBuffer.mDataByteSize = g_outputByteSize;
DecodeAACELD(g_decoder, &encodedAU, &g_outputBuffer);
/* Copy output samples to Audio Units' IO buffer */
ioData->mBuffers[0].mNumberChannels = g_outputBuffer.mNumberChannels;
ioData->mBuffers[0].mDataByteSize = g_outputBuffer.mDataByteSize;
memcpy(ioData->mBuffers[0].mData, g_outputBuffer.mData, g_outputBuffer.mDataByteSize);
return noErr;
}
static OSStatus encodeProc(AudioConverterRef inAudioConverter,
UInt32 *ioNumberDataPackets,
AudioBufferList *ioData,
AudioStreamPacketDescription **outDataPacketDescription,
void *inUserData)
{
/* Get the current encoder state from the inUserData parameter */
AACELDEncoder *encoder = (AACELDEncoder*) inUserData;
/* Compute the maximum number of output packets */
UInt32 maxPackets = encoder->bytesToEncode / encoder->sourceFormat.mBytesPerPacket;
if (*ioNumberDataPackets > maxPackets)
{
/* If requested number of packets is bigger, adjust */
*ioNumberDataPackets = maxPackets;
}
/* Check to make sure we have only one audio buffer */
if (ioData->mNumberBuffers != 1)
{
return 1;
}
/* Set the data to be encoded */
ioData->mBuffers[0].mDataByteSize = encoder->currentSampleBuffer->mDataByteSize;
ioData->mBuffers[0].mData = encoder->currentSampleBuffer->mData;
ioData->mBuffers[0].mNumberChannels = encoder->currentSampleBuffer->mNumberChannels;
if (outDataPacketDescription)
{
*outDataPacketDescription = NULL;
}
if (encoder->bytesToEncode == 0)
{
// We are currently out of data but want to keep on processing
// See Apple Technical Q&A QA1317
return 1;
}
encoder->bytesToEncode = 0;
return noErr;
}
int EncodeAACELD(AACELDEncoder *encoder, AudioBuffer *inSamples, EncodedAudioBuffer *outData)
{
/* Clear the encoder buffer */
memset(encoder->encoderBuffer, 0, sizeof(encoder->maxOutputPacketSize));
/* Keep a reference to the samples that should be encoded */
encoder->currentSampleBuffer = inSamples;
encoder->bytesToEncode = inSamples->mDataByteSize;
UInt32 numOutputDataPackets = 1;
AudioStreamPacketDescription outPacketDesc[1];
/* Create the output buffer list */
AudioBufferList outBufferList;
outBufferList.mNumberBuffers = 1;
outBufferList.mBuffers[0].mNumberChannels = encoder->outChannels;
outBufferList.mBuffers[0].mDataByteSize = encoder->maxOutputPacketSize;
outBufferList.mBuffers[0].mData = encoder->encoderBuffer;
/* Start the encoding process */
OSStatus status = AudioConverterFillComplexBuffer(encoder->audioConverter,
encodeProc,
encoder,
&numOutputDataPackets,
&outBufferList,
outPacketDesc);
if (status != noErr)
{
return -1;
}
/* Set the ouput data */
outData->mChannels = encoder->outChannels;
outData->data = encoder->encoderBuffer;
outData->mDataBytesSize = outPacketDesc[0].mDataByteSize;
return 0;
}
static OSStatus decodeProc(AudioConverterRef inAudioConverter,
UInt32 *ioNumberDataPackets,
AudioBufferList *ioData,
AudioStreamPacketDescription **outDataPacketDescription,
void *inUserData)
{
/* Get the current decoder state from the inUserData parameter */
AACELDDecoder *decoder = (AACELDDecoder*)inUserData;
/* Compute the maximum number of output packets */
UInt32 maxPackets = decoder->bytesToDecode / decoder->maxOutputPacketSize;
if (*ioNumberDataPackets > maxPackets)
{
/* If requested number of packets is bigger, adjust */
*ioNumberDataPackets = maxPackets;
}
/* If there is data to be decoded, set it accordingly */
if (decoder->bytesToDecode)
{
ioData->mBuffers[0].mData = decoder->decodeBuffer;
ioData->mBuffers[0].mDataByteSize = decoder->bytesToDecode;
ioData->mBuffers[0].mNumberChannels = decoder->inChannels;
}
/* And set the packet description */
if (outDataPacketDescription)
{
decoder->packetDesc[0].mStartOffset = 0;
decoder->packetDesc[0].mVariableFramesInPacket = 0;
decoder->packetDesc[0].mDataByteSize = decoder->bytesToDecode;
(*outDataPacketDescription) = decoder->packetDesc;
}
if (decoder->bytesToDecode == 0)
{
// We are currently out of data but want to keep on processing
// See Apple Technical Q&A QA1317
return 1;
}
decoder->bytesToDecode = 0;
return noErr;
}
int DecodeAACELD(AACELDDecoder* decoder, EncodedAudioBuffer *inData, AudioBuffer *outSamples)
{
OSStatus status = noErr;
/* Keep a reference to the samples that should be decoded */
decoder->decodeBuffer = inData->data;
decoder->bytesToDecode = inData->mDataBytesSize;
UInt32 outBufferMaxSizeBytes = decoder->frameSize * decoder->outChannels * sizeof(AudioSampleType);
assert(outSamples->mDataByteSize <= outBufferMaxSizeBytes);
UInt32 numOutputDataPackets = outBufferMaxSizeBytes / decoder->maxOutputPacketSize;
/* Output packet stream are 512 LPCM samples */
AudioStreamPacketDescription outputPacketDesc[1024];
/* Create the output buffer list */
AudioBufferList outBufferList;
outBufferList.mNumberBuffers = 1;
outBufferList.mBuffers[0].mNumberChannels = decoder->outChannels;
outBufferList.mBuffers[0].mDataByteSize = outSamples->mDataByteSize;
outBufferList.mBuffers[0].mData = outSamples->mData;
/* Start the decoding process */
status = AudioConverterFillComplexBuffer(decoder->audioConverter,
decodeProc,
decoder,
&numOutputDataPackets,
&outBufferList,
outputPacketDesc);
if (noErr != status)
{
return -1;
}
return 0;
}

Resources