2022-06-09 08:36 AM
I am trying to use I2C DMA on 2 STM32H7B3I-DKs. After the message is sent to the slave device using HAL_I2C_Master_Transmit_DMA(), the slave seems to receive the message as HAL_I2C_Slave_Receive_DMA() returns HAL_OK but HAL_I2C_SlaveRxCpltCallback() fails to execute afterwards.
Master config:
static void MX_I2C4_Init(void)
{
hi2c4.Instance = I2C4;
hi2c4.Init.Timing = 0x00303D5B;
hi2c4.Init.OwnAddress1 = 0;
hi2c4.Init.AddressingMode = I2C_ADDRESSINGMODE_7BIT;
hi2c4.Init.DualAddressMode = I2C_DUALADDRESS_DISABLE;
hi2c4.Init.OwnAddress2 = 0;
hi2c4.Init.OwnAddress2Masks = I2C_OA2_NOMASK;
hi2c4.Init.GeneralCallMode = I2C_GENERALCALL_DISABLE;
hi2c4.Init.NoStretchMode = I2C_NOSTRETCH_DISABLE;
if (HAL_I2C_Init(&hi2c4) != HAL_OK)
{
Error_Handler();
}
/** Configure Analogue filter
*/
if (HAL_I2CEx_ConfigAnalogFilter(&hi2c4, I2C_ANALOGFILTER_ENABLE) != HAL_OK)
{
Error_Handler();
}
/** Configure Digital filter
*/
if (HAL_I2CEx_ConfigDigitalFilter(&hi2c4, 0) != HAL_OK)
{
Error_Handler();
}
}
static void MX_BDMA2_Init(void)
{
/* DMA controller clock enable */
__HAL_RCC_BDMA2_CLK_ENABLE();
/* DMA interrupt init */
/* BDMA2_Channel0_IRQn interrupt configuration */
HAL_NVIC_SetPriority(BDMA2_Channel0_IRQn, 0, 0);
HAL_NVIC_EnableIRQ(BDMA2_Channel0_IRQn);
/* BDMA2_Channel1_IRQn interrupt configuration */
HAL_NVIC_SetPriority(BDMA2_Channel1_IRQn, 0, 0);
HAL_NVIC_EnableIRQ(BDMA2_Channel1_IRQn);
}
Master code:
void main()
{
HAL_Init();
SystemClock_Config();
MX_GPIO_Init();
MX_BDMA2_Init();
MX_I2C4_Init();
char Buffer[16]="hi";
while(HAL_I2C_Master_Transmit_DMA(&hi2c4, 0x11,(uint8_t*)Buffer, sizeof(Buffer))!= HAL_OK)
{
if (HAL_I2C_GetError(&hi2c4) != HAL_I2C_ERROR_AF)
{
Error_Handler();
}
}
while (HAL_I2C_GetState(&hi2c4) != HAL_I2C_STATE_READY)
{
}
}
slave config:
static void MX_I2C4_Init(void)
{
hi2c4.Instance = I2C4;
hi2c4.Init.Timing = 0x00303D5B;
hi2c4.Init.OwnAddress1 = 0x11;
hi2c4.Init.AddressingMode = I2C_ADDRESSINGMODE_7BIT;
hi2c4.Init.DualAddressMode = I2C_DUALADDRESS_DISABLE;
hi2c4.Init.OwnAddress2 = 0;
hi2c4.Init.OwnAddress2Masks = I2C_OA2_NOMASK;
hi2c4.Init.GeneralCallMode = I2C_GENERALCALL_DISABLE;
hi2c4.Init.NoStretchMode = I2C_NOSTRETCH_DISABLE;
if (HAL_I2C_Init(&hi2c4) != HAL_OK)
{
Error_Handler();
}
/** Configure Analogue filter
*/
if (HAL_I2CEx_ConfigAnalogFilter(&hi2c4, I2C_ANALOGFILTER_ENABLE) != HAL_OK)
{
Error_Handler();
}
/** Configure Digital filter
*/
if (HAL_I2CEx_ConfigDigitalFilter(&hi2c4, 0) != HAL_OK)
{
Error_Handler();
}
}
slave code:
char Buffer[100];
int main()
{
HAL_Init();
SystemClock_Config();
MX_GPIO_Init();
MX_BDMA2_Init();
MX_I2C4_Init();
MX_USART1_UART_Init();
if(HAL_I2C_Slave_Receive_DMA(&hi2c4, (uint8_t *)Buffer, sizeof(Buffer)) != HAL_OK)
{
Error_Handler();
}
while (HAL_I2C_GetState(&hi2c4) != HAL_I2C_STATE_READY)
{
}
}
void HAL_I2C_SlaveRxCpltCallback (I2C_HandleTypeDef * hi2c)
{
if (hi2c->Instance==hi2c4.Instance)
{
HAL_UART_Transmit(&huart1,(uint8_t *)Buffer,strlen(Buffer),1000);
}
}
I read from a few other posts that there seems to be a bug in HAL_I2C_MspInit() where the order of how the peripherals are initialised is wrong. But I'm not sure if this is the case. Below shows the intialisation code for I2C.
void HAL_I2C_MspInit(I2C_HandleTypeDef* hi2c)
{
GPIO_InitTypeDef GPIO_InitStruct = {0};
RCC_PeriphCLKInitTypeDef PeriphClkInitStruct = {0};
if(hi2c->Instance==I2C4)
{
/* USER CODE BEGIN I2C4_MspInit 0 */
/* USER CODE END I2C4_MspInit 0 */
/** Initializes the peripherals clock
*/
PeriphClkInitStruct.PeriphClockSelection = RCC_PERIPHCLK_I2C4;
PeriphClkInitStruct.I2c4ClockSelection = RCC_I2C4CLKSOURCE_D3PCLK1;
if (HAL_RCCEx_PeriphCLKConfig(&PeriphClkInitStruct) != HAL_OK)
{
Error_Handler();
}
__HAL_RCC_GPIOD_CLK_ENABLE();
/**I2C4 GPIO Configuration
PD13 ------> I2C4_SDA
PD12 ------> I2C4_SCL
*/
GPIO_InitStruct.Pin = I2C4_SDA_Pin|I2C4_SCL_Pin;
GPIO_InitStruct.Mode = GPIO_MODE_AF_OD;
GPIO_InitStruct.Pull = GPIO_NOPULL;
GPIO_InitStruct.Speed = GPIO_SPEED_FREQ_VERY_HIGH;
GPIO_InitStruct.Alternate = GPIO_AF4_I2C4;
HAL_GPIO_Init(GPIOD, &GPIO_InitStruct);
/* Peripheral clock enable */
__HAL_RCC_I2C4_CLK_ENABLE();
/* I2C4 DMA Init */
/* I2C4_RX Init */
hdma_i2c4_rx.Instance = BDMA2_Channel0;
hdma_i2c4_rx.Init.Request = BDMA_REQUEST_I2C4_RX;
hdma_i2c4_rx.Init.Direction = DMA_PERIPH_TO_MEMORY;
hdma_i2c4_rx.Init.PeriphInc = DMA_PINC_DISABLE;
hdma_i2c4_rx.Init.MemInc = DMA_MINC_ENABLE;
hdma_i2c4_rx.Init.PeriphDataAlignment = DMA_PDATAALIGN_BYTE;
hdma_i2c4_rx.Init.MemDataAlignment = DMA_MDATAALIGN_BYTE;
hdma_i2c4_rx.Init.Mode = DMA_NORMAL;
hdma_i2c4_rx.Init.Priority = DMA_PRIORITY_LOW;
if (HAL_DMA_Init(&hdma_i2c4_rx) != HAL_OK)
{
Error_Handler();
}
__HAL_LINKDMA(hi2c,hdmarx,hdma_i2c4_rx);
/* I2C4_TX Init */
hdma_i2c4_tx.Instance = BDMA2_Channel1;
hdma_i2c4_tx.Init.Request = BDMA_REQUEST_I2C4_TX;
hdma_i2c4_tx.Init.Direction = DMA_MEMORY_TO_PERIPH;
hdma_i2c4_tx.Init.PeriphInc = DMA_PINC_DISABLE;
hdma_i2c4_tx.Init.MemInc = DMA_MINC_ENABLE;
hdma_i2c4_tx.Init.PeriphDataAlignment = DMA_PDATAALIGN_BYTE;
hdma_i2c4_tx.Init.MemDataAlignment = DMA_MDATAALIGN_BYTE;
hdma_i2c4_tx.Init.Mode = DMA_NORMAL;
hdma_i2c4_tx.Init.Priority = DMA_PRIORITY_LOW;
if (HAL_DMA_Init(&hdma_i2c4_tx) != HAL_OK)
{
Error_Handler();
}
__HAL_LINKDMA(hi2c,hdmatx,hdma_i2c4_tx);
/* I2C4 interrupt Init */
HAL_NVIC_SetPriority(I2C4_EV_IRQn, 0, 0);
HAL_NVIC_EnableIRQ(I2C4_EV_IRQn);
HAL_NVIC_SetPriority(I2C4_ER_IRQn, 0, 0);
HAL_NVIC_EnableIRQ(I2C4_ER_IRQn);
/* USER CODE BEGIN I2C4_MspInit 1 */
/* USER CODE END I2C4_MspInit 1 */
}
}
Does anyone know how to solve this issue? Or is anyone else experiencing this problem too?
Thank you for your time!
Update 1:
After stepping through the code, I have noticed that the state of I2C4 is always HAL_I2C_STATE_BUSY_RX. However, I have already enabled the event & error interrupts and BDMA interrupts.
2022-06-15 01:21 PM
Hello @JTurn.2 ,
You may get inspired from the available I2C example within STM32CubeH7 MCU package:
STM32Cube_FW_H7_V1.10.0\Projects\STM32H7B3I-EVAL\Examples\I2C\
Make sure that the DMA initialization is called before any other peripheral initialization (HAL_***_Init)
Imen