Problem reading data from multiple ADC channels with stm32f4-discovery board using DMA - dma

I am trying to read data from channel 0,1,2 and 3 of ADC1. The issue is that when I read from channel 1 and channel 2 and run the code in debug mode it shows the correct valueenter image description heres but when I modified it for doing the same for channel 3 and 4 also,it does not show any value.Following is the code and the screen shot of memory map:
#include "stm32f4xx.h"
#include "stm32f4_discovery.h"
uint16_t ADC1ConvertedValue[4] = {0,0,0,0};//Stores converted vals [2] = {0,0}
void DMA_config()
{
RCC_AHB1PeriphClockCmd(RCC_AHB1Periph_DMA2 ,ENABLE);
DMA_InitTypeDef DMA_InitStruct;
DMA_InitStruct.DMA_Channel = DMA_Channel_0;
DMA_InitStruct.DMA_PeripheralBaseAddr = (uint32_t) 0x4001204C;//ADC1's data register
DMA_InitStruct.DMA_Memory0BaseAddr = (uint32_t)&ADC1ConvertedValue;
DMA_InitStruct.DMA_DIR = DMA_DIR_PeripheralToMemory;
DMA_InitStruct.DMA_BufferSize = 4;//2
DMA_InitStruct.DMA_PeripheralInc = DMA_PeripheralInc_Disable;
DMA_InitStruct.DMA_MemoryInc = DMA_MemoryInc_Enable;
DMA_InitStruct.DMA_PeripheralDataSize = DMA_PeripheralDataSize_HalfWord;//Reads 16 bit values _HalfWord
DMA_InitStruct.DMA_MemoryDataSize = DMA_MemoryDataSize_HalfWord;//Stores 16 bit values _Halfword
DMA_InitStruct.DMA_Mode = DMA_Mode_Circular;
DMA_InitStruct.DMA_Priority = DMA_Priority_High;
DMA_InitStruct.DMA_FIFOMode = DMA_FIFOMode_Enable;
DMA_InitStruct.DMA_FIFOThreshold = DMA_FIFOThreshold_HalfFull;//_HalfFull
DMA_InitStruct.DMA_MemoryBurst = DMA_MemoryBurst_Single;
DMA_InitStruct.DMA_PeripheralBurst = DMA_PeripheralBurst_Single;
DMA_Init(DMA2_Stream0, &DMA_InitStruct);
DMA_Cmd(DMA2_Stream0, ENABLE);
}
void ADC_config()
{
/* Configure GPIO pins ******************************************************/
ADC_InitTypeDef ADC_InitStruct;
ADC_CommonInitTypeDef ADC_CommonInitStruct;
GPIO_InitTypeDef GPIO_InitStruct;
RCC_AHB1PeriphClockCmd( RCC_AHB1Periph_GPIOA, ENABLE);
RCC_APB2PeriphClockCmd(RCC_APB2Periph_ADC1, ENABLE);//ADC1 is connected to the APB2 peripheral bus
GPIO_InitStruct.GPIO_Pin = GPIO_Pin_0 | GPIO_Pin_1 | GPIO_Pin_2 | GPIO_Pin_3;// PA0,PA1,PA3,PA3
GPIO_InitStruct.GPIO_Mode = GPIO_Mode_AN;//The pins are configured in analog mode
GPIO_InitStruct.GPIO_PuPd = GPIO_PuPd_NOPULL ;//We don't need any pull up or pull down
GPIO_Init(GPIOA, &GPIO_InitStruct);//Initialize GPIOA pins with the configuration
/* ADC Common Init **********************************************************/
ADC_CommonInitStruct.ADC_Mode = ADC_Mode_Independent;
ADC_CommonInitStruct.ADC_Prescaler = ADC_Prescaler_Div2;
ADC_CommonInitStruct.ADC_DMAAccessMode = ADC_DMAAccessMode_Disabled;
ADC_CommonInitStruct.ADC_TwoSamplingDelay = ADC_TwoSamplingDelay_5Cycles;
ADC_CommonInit(&ADC_CommonInitStruct);
/* ADC1 Init ****************************************************************/
ADC_InitStruct.ADC_Resolution = ADC_Resolution_12b;//Input voltage is converted into a 12bit int (max 4095)
ADC_InitStruct.ADC_ScanConvMode = ENABLE;//The scan is configured in multiple channels
ADC_InitStruct.ADC_ContinuousConvMode = ENABLE;//Continuous conversion: input signal is sampled more than once
ADC_InitStruct.ADC_ExternalTrigConv = DISABLE;
ADC_InitStruct.ADC_ExternalTrigConvEdge = ADC_ExternalTrigConvEdge_None;
ADC_InitStruct.ADC_DataAlign = ADC_DataAlign_Right;//Data converted will be shifted to right
ADC_InitStruct.ADC_NbrOfConversion = 4;
ADC_Init(ADC1, &ADC_InitStruct);//Initialize ADC with the configuration
/* Select the channels to be read from **************************************/
ADC_RegularChannelConfig(ADC1, ADC_Channel_0, 1, ADC_SampleTime_144Cycles);//PA0
ADC_RegularChannelConfig(ADC1, ADC_Channel_1, 2, ADC_SampleTime_144Cycles);//PA1
ADC_RegularChannelConfig(ADC1, ADC_Channel_2, 3, ADC_SampleTime_144Cycles);//PA2
ADC_RegularChannelConfig(ADC1, ADC_Channel_3, 4, ADC_SampleTime_144Cycles);//PA3
/* Enable DMA request after last transfer (Single-ADC mode) */
ADC_DMARequestAfterLastTransferCmd(ADC1, ENABLE);
/* Enable ADC1 DMA */
ADC_DMACmd(ADC1, ENABLE);
/* Enable ADC1 */
ADC_Cmd(ADC1, ENABLE);
}
int main(void)
{
DMA_config();
ADC_config();
while(1)
{
ADC_SoftwareStartConv(ADC1);
//value=ADC_Read();
}
return 0;
}

You shouldn't start the ADC conversion multiple times (in your while(1) loop).
Since you've configured the ADC with .ADC_ContinuousConvMode = ENABLE it should be enough to start it before the loop and do nothing within it.

Related

ESP32+SN65HVD230 custom baud rate

I am trying to read CAN bus from my car (my car use 95Kbps can speed).
I use this library Link. To add custom CAN baud rate, added this
typedef enum {
CAN_SPEED_100KBPS = 100, /**< \brief CAN Node runs at 100kBit/s. */
CAN_SPEED_125KBPS = 125, /**< \brief CAN Node runs at 125kBit/s. */
CAN_SPEED_200KBPS = 200, /**< \brief CAN Node runs at 250kBit/s. */
CAN_SPEED_250KBPS = 250, /**< \brief CAN Node runs at 250kBit/s. */
CAN_SPEED_500KBPS = 500, /**< \brief CAN Node runs at 500kBit/s. */
CAN_SPEED_800KBPS = 800, /**< \brief CAN Node runs at 800kBit/s. */
CAN_SPEED_1000KBPS = 1000 /**< \brief CAN Node runs at 1000kBit/s. */
CAN_SPEED_95KBPS = 95 /**< \brief CAN Node runs at 95kBit/s. */ //This string I added
} CAN_speed_t;
And I use esp32can_basic.ino with two modifications
Set speed
CAN_cfg.speed = CAN_SPEED_95KBPS;
Delete writing
if (currentMillis - previousMillis >= interval) {
previousMillis = currentMillis;
CAN_frame_t tx_frame;
tx_frame.FIR.B.FF = CAN_frame_std;
tx_frame.MsgID = 0x001;
tx_frame.FIR.B.DLC = 8;
tx_frame.data.u8[0] = 0x00;
tx_frame.data.u8[1] = 0x01;
tx_frame.data.u8[2] = 0x02;
tx_frame.data.u8[3] = 0x03;
tx_frame.data.u8[4] = 0x04;
tx_frame.data.u8[5] = 0x05;
tx_frame.data.u8[6] = 0x06;
tx_frame.data.u8[7] = 0x07;
ESP32Can.CANWriteFrame(&tx_frame);
But nothing work :(
My hardware is ESP32-PICO-D4 and SN65HVD230 CAN Board. What have I done wrong?
UPD1 in CAN.c in int CAN_init() there are this strings
switch (CAN_cfg.speed) {
case CAN_SPEED_1000KBPS:
MODULE_CAN->BTR1.B.TSEG1 = 0x4;
__tq = 0.125;
break;
case CAN_SPEED_800KBPS:
MODULE_CAN->BTR1.B.TSEG1 = 0x6;
__tq = 0.125;
break;
case CAN_SPEED_200KBPS:
MODULE_CAN->BTR1.B.TSEG1 = 0xc;
MODULE_CAN->BTR1.B.TSEG2 = 0x5;
__tq = 0.25;
break;
default:
MODULE_CAN->BTR1.B.TSEG1 = 0xc;
__tq = ((float) 1000 / CAN_cfg.speed) / 16;
}
I think this code calculate tq.
"Nothing work" - no CAN codes read. I think I set wrong baud rate.
TQ (Time Quanta) is like the Sampling rate at which this SP is taken, which is also between TSEG1 and TSEG2 First and second Segment.
You need to calculate this values (check here) for more infor and a calculator

Time errors in ADC

I measure distance using ultrasonic signal. STM32F1 generate Ultrasound signal, STM32F4 writing this signal using microphone. Both STM32 are synchronized using signal generated another device, they connected one wire.
Question: Why signal comes with different times? although I don’t move receiver or transmitter. It's gives errors 50mm.
Dispersion signals
Code of receiver is here:
while (1)
{
if(HAL_GPIO_ReadPin(GPIOA, GPIO_PIN_5) != 0x00)
{
Get_UZ_Signal(uz_signal);
Send_Signal(uz_signal);
}
}
void Get_UZ_Signal(uint16_t* uz_signal)
{
int i,j;
uint16_t uz_buf[10];
HAL_ADC_Start_DMA(&hadc1, (uint32_t*)&uz_buf, 300000);
for(i = 0; i<lenght_signal; i++)
{
j=10;
while(j>0)
{
j--;
}
uz_signal[i] = uz_buf[0];
}
HAL_ADC_Stop_DMA(&hadc1);
}
hadc1.Instance = ADC1;
hadc1.Init.ClockPrescaler = ADC_CLOCK_SYNC_PCLK_DIV4;
hadc1.Init.Resolution = ADC_RESOLUTION_12B;
hadc1.Init.ScanConvMode = DISABLE;
hadc1.Init.ContinuousConvMode = ENABLE;
hadc1.Init.DiscontinuousConvMode = DISABLE;
hadc1.Init.ExternalTrigConvEdge = ADC_EXTERNALTRIGCONVEDGE_NONE;
hadc1.Init.ExternalTrigConv = ADC_SOFTWARE_START;
hadc1.Init.DataAlign = ADC_DATAALIGN_RIGHT;
hadc1.Init.NbrOfConversion = 1;
hadc1.Init.DMAContinuousRequests = DISABLE;
hadc1.Init.EOCSelection = ADC_EOC_SINGLE_CONV;
More information here:
https://github.com/BooSooV/Indoor-Ultrasonic-Positioning-System/tree/master/Studying_ultrasonic_signals/Measured_lengths_dispersion
PROBLEM RESOLVED
Dispersion signals final
The time errors was created by the transmitter, I make some changed in it. Now synchro signal takes with help EXTI, and PWM generated all time, and I controlee signal with Enable or Disable pin on driver. Now I have dispersion 5mm, it enough for me.
Final programs is here
https://github.com/BooSooV/Indoor-Ultrasonic-Positioning-System/tree/master/Studying_ultrasonic_signals/Measured_lengths_dispersion
Maybe, It is a problem of time processing. The speed of sound is 343 meters / sec. Then, 50mm is about 0.15 msec.
Have you thought to call HAL_ADC_Start_DMA() from the main()
uint16_t uz_buf[10];
HAL_ADC_Start_DMA(&hadc1, (uint32_t*)&uz_buf, 10 );//300000);
// Sizeof Buffer ---------------------^
and you call
void Get_UZ_Signal(uint16_t* uz_signal) {
int i;
// For debugging - Get the time to process
// Get the Time from the System Tick. This counter wrap around
// from 0 to SysTick->LOAD every 1 msec
int32_t startTime = SysTick->VAL;
__HAL_ADC_ENABLE(&hadc1); // Start the DMA
// return remaining data units in the current DMA Channel transfer
while(__HAL_DMA_GET_COUNTER(&hadc1) != 0)
;
for(i = 0; i<lenght_signal; i++) {
// uz_signal[i] = uz_buf[0];
// 0 or i ------------^
uz_signal[i] = uz_buf[i];
__HAL_ADC_DISABLE(&hadc1); // Stop the DMA
int32_t endTime = SysTick->VAL;
// check if negative, case of wrap around
int32_t difTime = endTime - startTime;
if ( difTime < 0 )
difTime += SysTick->LOAD
__HAL_DMA_SET_COUNTER(&hadc1, 10); // Reset the counter
// If the DMA buffer is 10, the COUNTER will start at 10
// and decrement
// Ref. Manual: 9.4.4 DMA channel x number of data register (DMA_CNDTRx)
In your code
MX_USART1_UART_Init();
HAL_ADC_Start_DMA(&hadc1, (uint32_t*)&uz_signal, 30000);
// Must be stopped because is running now
// How long is the buffer: uint16_t uz_signal[ 30000 ] ?
__HAL_ADC_DISABLE(&hadc1); // must be disable
// reset the counter
__HAL_DMA_SET_COUNTER(&hdma_adc1, 30000);
while (1)
{
if(HAL_GPIO_ReadPin(GPIOA, GPIO_PIN_5) != 0x00)
{
__HAL_ADC_ENABLE(&hadc1);
while(__HAL_DMA_GET_COUNTER(&hdma_adc1) != 0)
;
__HAL_ADC_DISABLE(&hadc1);
__HAL_DMA_SET_COUNTER(&hdma_adc1, 30000);
// 30,000 is the sizeof your buffer?
...
}
}
I made some test with uC STM32F407 # 168mHz. I'll show you the code below.
The speed of sound is 343 meters / second.
During the test, I calculated the time to process the ADC conversion. Each conversion takes about 0.35 uSec (60 ticks).
Result
Size Corresponding
Array Time Distance
100 0.041ms 14mm
1600 0.571ms 196mm
10000 3.57ms 1225mm
In the code, you'll see the start time. Be careful, the SysTick is a decremental counter starting # the uC speed (168MHz = from 168000 to 0). It could be good idea to get the msec time with HAL_GetTick(), and usec with SysTick.
int main(void)
{
...
MX_DMA_Init();
MX_ADC1_Init();
// Configure the channel in the way you want
ADC_ChannelConfTypeDef sConfig;
sConfig.Channel = ADC_CHANNEL_0; //ADC1_CHANNEL;
sConfig.Rank = 1;
sConfig.SamplingTime = ADC_SAMPLETIME_3CYCLES;
sConfig.Offset = 0;
HAL_ADC_ConfigChannel(&hadc1, &sConfig);
// Start the DMA channel and Stop it
HAL_ADC_Start_DMA(&hadc1, (uint32_t*)&uz_signal, sizeof( uz_signal ) / sizeof( uint16_t ));
HAL_ADC_Stop_DMA( &hadc1 );
// The SysTick is a decrement counter
// Can be use to count usec
// https://www.sciencedirect.com/topics/engineering/systick-timer
tickLoad = SysTick->LOAD;
while (1)
{
// Set the buffer to zero
for( uint32_t i=0; i < (sizeof( uz_signal ) / sizeof( uint16_t )); i++)
uz_signal[i] = 0;
// Reset the counter ready to restart
DMA2_Stream0->NDTR = (uint16_t)(sizeof( uz_signal ) / sizeof( uint16_t ));
/* Enable the Peripheral */
ADC1->CR2 |= ADC_CR2_ADON;
/* Start conversion if ADC is effectively enabled */
/* Clear regular group conversion flag and overrun flag */
ADC1->SR = ~(ADC_FLAG_EOC | ADC_FLAG_OVR);
/* Enable ADC overrun interrupt */
ADC1->CR1 |= (ADC_IT_OVR);
/* Enable ADC DMA mode */
ADC1->CR2 |= ADC_CR2_DMA;
/* Start the DMA channel */
/* Enable Common interrupts*/
DMA2_Stream0->CR |= DMA_IT_TC | DMA_IT_TE | DMA_IT_DME | DMA_IT_HT;
DMA2_Stream0->FCR |= DMA_IT_FE;
DMA2_Stream0->CR |= DMA_SxCR_EN;
//===================================================
// The DMA is ready to start
// Your if(HAL_GPIO_ReadPin( ... ) will be here
HAL_Delay( 10 );
//===================================================
// Get the time
tickStart = SysTick->VAL;
// Start the DMA
ADC1->CR2 |= (uint32_t)ADC_CR2_SWSTART;
// Wait until the conversion is completed
while( DMA2_Stream0->NDTR != 0)
;
// Get end time
tickEnd = SysTick->VAL;
/* Stop potential conversion on going, on regular and injected groups */
ADC1->CR2 &= ~ADC_CR2_ADON;
/* Disable the selected ADC DMA mode */
ADC1->CR2 &= ~ADC_CR2_DMA;
/* Disable ADC overrun interrupt */
ADC1->CR1 &= ~(ADC_IT_OVR);
// Get processing time
tickDiff = tickStart - tickEnd;
//===================================================
// Your processing will go here
HAL_Delay( 10 );
//===================================================
}
}
So, you'll have the start and end time. I think you have to make the formula on the start time.
Good luck
If you want to know the execution time, you could use that little function. It returns the micro Seconds
static uint32_t timeMicroSecDivider = 0;
extern uint32_t uwTick;
// The SysTick->LOAD matchs the uC Speed / 1000.
// If the uC clock is 80MHz, the the LOAD is 80000
// The SysTick->VAL is a decrement counter from (LOAD-1) to 0
//====================================================
uint64_t getTimeMicroSec()
{
if ( timeMicroSecDivider == 0)
{
// Number of clock by micro second
timeMicroSecDivider = SysTick->LOAD / 1000;
}
return( (uwTick * 1000) + ((SysTick->LOAD - SysTick->VAL) / timeMicroSecDivider));
}

ParamValidationExt error with WelsInitEncoderExt failed while setting up OpenH264 encoder

Scenario:
I am using OpenH264 with my App to encode into a video_file.mp4.
Environment:
Platform : MacOs Sierra
Compiler : Clang++
The code:
Following is the crux of the code I have:
void EncodeVideoFile() {
ISVCEncoder * encoder_;
std:string video_file_name = "/Path/to/some/folder/video_file.mp4";
EncodeFileParam * pEncFileParam;
SEncParamExt * pEnxParamExt;
float frameRate = 1000;
EUsageType usageType = EUsageType::CAMERA_VIDEO_REAL_TIME;
bool denoise = false;
bool lossless = true;
bool enable_ltr = false;
int layers = 1;
bool cabac = false;
int sliceMode = 1;
pEncFileParam = new EncodeFileParam;
pEncFileParam->eUsageType = EUsageType::CAMERA_VIDEO_REAL_TIME;
pEncFileParam->pkcFileName = video_file_name.c_str();
pEncFileParam->iWidth = frame_width;
pEncFileParam->iHeight = frame_height;
pEncFileParam->fFrameRate = frameRate;
pEncFileParam->iLayerNum = layers;
pEncFileParam->bDenoise = denoise;
pEncFileParam->bLossless = lossless;
pEncFileParam->bEnableLtr = enable_ltr;
pEncFileParam->bCabac = cabac;
int rv = WelsCreateSVCEncoder (&encoder_);
pEnxParamExt = new SEncParamExt;
pEnxParamExt->iUsageType = pEncFileParam->eUsageType;
pEnxParamExt->iPicWidth = pEncFileParam->iWidth;
pEnxParamExt->iPicHeight = pEncFileParam->iHeight;
pEnxParamExt->fMaxFrameRate = pEncFileParam->fFrameRate;
pEnxParamExt->iSpatialLayerNum = pEncFileParam->iLayerNum;
pEnxParamExt->bEnableDenoise = pEncFileParam->bDenoise;
pEnxParamExt->bIsLosslessLink = pEncFileParam->bLossless;
pEnxParamExt->bEnableLongTermReference = pEncFileParam->bEnableLtr;
pEnxParamExt->iEntropyCodingModeFlag = pEncFileParam->bCabac ? 1 : 0;
for (int i = 0; i < pEnxParamExt->iSpatialLayerNum; i++) {
pEnxParamExt->sSpatialLayers[i].sSliceArgument.uiSliceMode = pEncFileParam->eSliceMode;
}
encoder_->InitializeExt(pEnxParamExt);
int videoFormat = videoFormatI420;
encoder_->SetOption (ENCODER_OPTION_DATAFORMAT, &videoFormat);
int frameSize = frame_width * frame_height * 3 / 2;
int total_num = 500;
BufferedData buf;
buf.SetLength (frameSize);
// check the buffer before proceeding
if (buf.Length() != (size_t)frameSize) {
CloseEncoder();
return;
}
SFrameBSInfo info;
memset (&info, 0, sizeof (SFrameBSInfo));
SSourcePicture pic;
memset (&pic, 0, sizeof (SSourcePicture));
pic.iPicWidth = frame_width;
pic.iPicHeight = frame_height;
pic.iColorFormat = videoFormatI420;
pic.iStride[0] = pic.iPicWidth;
pic.iStride[1] = pic.iStride[2] = pic.iPicWidth >> 1;
pic.pData[0] = buf.data();
pic.pData[1] = pic.pData[0] + frame_width * frame_height;
pic.pData[2] = pic.pData[1] + (frame_width * frame_height >> 2);
for(int num = 0; num < total_num; num++) {
// try to encode the frame
rv = encoder_->EncodeFrame (&pic, &info);
}
if (encoder_) {
encoder_->Uninitialize();
WelsDestroySVCEncoder (encoder_);
}
}
Above code is something I pulled up from official usage examples of OpenH264 where BufferedData.h is a class I reused from OpenH264 utils
Issue:
But, I am getting the following error:
[OpenH264] this = 0x0x1038bc8c0, Error:ParamValidationExt(), width > 0, height > 0, width * height <= 9437184, invalid 0 x 0 in dependency layer settings!
[OpenH264] this = 0x0x1038bc8c0, Error:WelsInitEncoderExt(), ParamValidationExt failed return 2.
[OpenH264] this = 0x0x1038bc8c0, Error:CWelsH264SVCEncoder::Initialize(), WelsInitEncoderExt failed.
Above does not crash the application but it goes through a blank run without creating the video_file.mp4 with the dummy data that I am trying to write into it.
Question:
There seems to be something wrong with the set up config I applying to pEnxParamExtwhich goes into encoder_->InitializeExt.
What am I doing wrong with the set up of the encoder?
Note:
I am not trying to hook up to any camera device. I am just trying to create a .mp4 video out of some dummy image data.
If you want to get complete and working OpenH264 Encoder Initialization procedure you can click... here.
According to your problem scenario, you are trying to create a video file(.mp4/.avi) from some dummy images. This task can be accomplished using two different libraries: i) Library for Codec, ii) Library for Container.
i) Library for Codec: It's so much easy to use a OpenH264 to compress data. One thing I must mention is that, OpenH264 always works with raw frames e.g. yuv420 data. So, if you want to compress your image data, you have to convert these image data into yuv420 color format. To get OpenH264 click... here
ii) Library for Container: After getting the encoded data you have to use another library to create the container with extension .mp4, .avi, .flv etc. There exists a lot of libraries in github to do that staff like FFmpeg, OpenCV, Bento4, MP4Maker, mp4parser etc. Before using these libraries please check in detail about the license issues. If you use FFmpeg, you will not need to use OpenH264 becuse FFmpeg itself works along with several codecs. You will also find lot more working examples as so many developers are working with video data out there.
Hope it helps. :)

Can't save to Flash Memory?

I am using the following library <flash.h> to Erase/Write/Read from memory but unfortunately the data I am trying to save doesn't seem to be written to flash memory. I am using PIC18F87j11 with MPLAB XC8 compiler. Also when I read the program memory from PIC after attempting to write to it, there is no data on address 0x1C0CA. What am I doing wrong?
char read[1];
/* set FOSC clock to 8MHZ */
OSCCON = 0b01110000;
/* turn off 4x PLL */
OSCTUNE = 0x00;
TRISDbits.TRISD6 = 0; // set as ouput
TRISDbits.TRISD7 = 0; // set as ouput
LATDbits.LATD6 = 0; // LED 1 OFF
LATDbits.LATD7 = 1; // LED 2 ON
EraseFlash(0x1C0CA, 0x1C0CA);
WriteBytesFlash(0x1C0CA, 1, 0x01);
ReadFlash(0x1C0CA, 1, read[0]);
if (read[0] == 0x01)
LATDbits.LATD6 = 1; // LED 1 ON
while (1) {
}
I don't know what WriteFlashBytes does but the page size for your device is 64 bytes and after writing you need to write an ulock sequence to EECON2 and EECON1 registers to start programming the flash memory

Portaudio MME device behaviour issue

I am using the multiple-output-device feature provided by paMME host API to output audio through multiple stereo devices. I also need to use a single multichannel input device using MME.
- When I configure just the output device and play internally generated audio, there is no problem.
- However problem starts to occur when I configure both the input device and the mulitple-stereo output devices. The application crashes when I try to use more than two channels on the output. That is, if I try to increment the 'out' pointer for more than 2*frames_per_buffer , it crashes, which indicates that buffer has been allocated only to two output channels.
Can anybody throw some light on what could be the problem. The configuration code is given below:
outputParameters.device = paUseHostApiSpecificDeviceSpecification;
outputParameters.channelCount = 8;
outputParameters.sampleFormat = paInt16;
outputParameters.hostApiSpecificStreamInfo = NULL;
wmmeStreamInfo.size = sizeof(PaWinMmeStreamInfo);
wmmeStreamInfo.hostApiType = paMME;
wmmeStreamInfo.version = 1;
wmmeStreamInfo.flags = paWinMmeUseMultipleDevices;
wmmeDeviceAndNumChannels[0].device = selectedDeviceIndex[0];
wmmeDeviceAndNumChannels[0].channelCount = 2;
wmmeDeviceAndNumChannels[1].device = selectedDeviceIndex[1];
wmmeDeviceAndNumChannels[1].channelCount = 2;
wmmeDeviceAndNumChannels[2].device = selectedDeviceIndex[2];
wmmeDeviceAndNumChannels[2].channelCount = 2;
wmmeDeviceAndNumChannels[3].device = selectedDeviceIndex[3];
wmmeDeviceAndNumChannels[3].channelCount = 2;
wmmeStreamInfo.devices = wmmeDeviceAndNumChannels;
wmmeStreamInfo.deviceCount = 4;
outputParameters.suggestedLatency = Pa_GetDeviceInfo( selectedDeviceIndex[0] )->defaultLowOutputLatency;
outputParameters.hostApiSpecificStreamInfo = &wmmeStreamInfo;
inputParameters.device = selectedInputDeviceIndex; /* default output device */
inputParameters.channelCount = 8; /* stereo output */
inputParameters.sampleFormat = paInt16; /* 32 bit floating point output */
inputParameters.suggestedLatency = Pa_GetDeviceInfo( inputParameters.device )->defaultLowInputLatency;
inputParameters.hostApiSpecificStreamInfo = NULL;
Thanks and regards,
Siddharth Kumar.

Resources