AnsweredAssumed Answered

Wrong value conversion ADC in injected scan mode

Question asked by Lisi.Marco on Feb 16, 2017
Latest reply on Feb 17, 2017 by Lisi.Marco

Hello, I'm trying to read 3 voltages with the ADC2 on my stm32f207zc, using an injected mode + scan mode configuration, but I'm observing this behavior:

basically the each of the 3 voltages are generated by one differential amplifier witch gives about 1.7V when no differential input is present.

The point is that the converted value I read is 2.0 V for all 3 inputs.

Then if i change the voltage to 3.3V (maximum), I read with the ADC something around 4V.

I'm sure of the voltages cause I'm watching them with an oscilloscope.

Here is my code, I use std_periph_library:

initialization af the adc:


void ADC2Init(void)
GPIO_InitTypeDef GPIO_InitStructure;
ADC_InitTypeDef ADC_InitStruct;
ADC_CommonInitTypeDef ADC_CommonInitStructure;

RCC_APB2PeriphClockCmd(RCC_APB2Periph_ADC2, ENABLE);

/* Configure ADC2 Channe10,12,13 pin as analog input ******************************/
GPIO_InitStructure.GPIO_Pin = GPIO_Pin_0 | GPIO_Pin_2 | GPIO_Pin_3;
GPIO_InitStructure.GPIO_Mode = GPIO_Mode_AN;
GPIO_InitStructure.GPIO_PuPd = GPIO_PuPd_NOPULL ;
GPIO_Init(GPIOC, &GPIO_InitStructure);

/* ADC Common Init **********************************************************/
ADC_CommonInitStructure.ADC_Mode = ADC_Mode_Independent;
ADC_CommonInitStructure.ADC_Prescaler = ADC_Prescaler_Div2;
ADC_CommonInitStructure.ADC_DMAAccessMode = ADC_DMAAccessMode_Disabled;
ADC_CommonInitStructure.ADC_TwoSamplingDelay = ADC_TwoSamplingDelay_5Cycles;

/* ADC2 Init ****************************************************************/
ADC_InitStruct.ADC_Resolution = ADC_Resolution_12b;
ADC_InitStruct.ADC_ScanConvMode = ENABLE;
ADC_InitStruct.ADC_ContinuousConvMode = DISABLE;
ADC_InitStruct.ADC_ExternalTrigConvEdge = ADC_ExternalTrigConvEdge_None;
ADC_InitStruct.ADC_ExternalTrigConv = ADC_ExternalTrigConv_T1_CC1;
ADC_InitStruct.ADC_DataAlign = ADC_DataAlign_Right;
ADC_InitStruct.ADC_NbrOfConversion = 1;
ADC_Init(ADC2, &ADC_InitStruct);

ADC_InjectedSequencerLengthConfig(ADC2, 1);

ADC_AutoInjectedConvCmd(ADC2, DISABLE);
ADC_InjectedChannelConfig(ADC2, ADC_Channel_10,1, ADC_SampleTime_3Cycles);
ADC_InjectedChannelConfig(ADC2, ADC_Channel_12,2, ADC_SampleTime_3Cycles);
ADC_InjectedChannelConfig(ADC2, ADC_Channel_13,3, ADC_SampleTime_3Cycles);

/* Enable ADC2 */


and then the read of the converted value:


ADC_SoftwareStartInjectedConv(ADC2); //per far partire la conversione
while ( ADC_GetFlagStatus(ADC2, ADC_FLAG_JEOC) == RESET)

va1 = ((float)ADC_GetInjectedConversionValue(ADC2, ADC_InjectedChannel_1))/1000.0;
vb1 = ((float)ADC_GetInjectedConversionValue(ADC2, ADC_InjectedChannel_2))/1000.0;
vc1 = ((float)ADC_GetInjectedConversionValue(ADC2, ADC_InjectedChannel_3))/1000.0;




What am I doing wrong?

Thank you!!