drv_spi.c 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900
  1. /*
  2. * Copyright (c) 2006-2021, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2022-05-16 shelton first version
  9. * 2022-11-10 shelton support spi dma
  10. * 2023-01-31 shelton add support f421/f425
  11. * 2023-04-08 shelton add support f423
  12. * 2023-10-18 shelton add support f402/f405
  13. * 2024-04-12 shelton add support a403a and a423
  14. * 2024-08-30 shelton add support m412 and m416
  15. */
  16. #include "drv_common.h"
  17. #include "drv_spi.h"
  18. #include "drv_config.h"
  19. #include <string.h>
  20. #ifdef RT_USING_SPI
  21. #if !defined(BSP_USING_SPI1) && !defined(BSP_USING_SPI2) && \
  22. !defined(BSP_USING_SPI3) && !defined(BSP_USING_SPI4)
  23. #error "Please define at least one BSP_USING_SPIx"
  24. #endif
  25. //#define DRV_DEBUG
  26. #define LOG_TAG "drv.pwm"
  27. #include <drv_log.h>
  28. enum
  29. {
  30. #ifdef BSP_USING_SPI1
  31. SPI1_INDEX,
  32. #endif
  33. #ifdef BSP_USING_SPI2
  34. SPI2_INDEX,
  35. #endif
  36. #ifdef BSP_USING_SPI3
  37. SPI3_INDEX,
  38. #endif
  39. #ifdef BSP_USING_SPI4
  40. SPI4_INDEX,
  41. #endif
  42. };
  43. static struct at32_spi_config spi_config[] = {
  44. #ifdef BSP_USING_SPI1
  45. SPI1_CONFIG,
  46. #endif
  47. #ifdef BSP_USING_SPI2
  48. SPI2_CONFIG,
  49. #endif
  50. #ifdef BSP_USING_SPI3
  51. SPI3_CONFIG,
  52. #endif
  53. #ifdef BSP_USING_SPI4
  54. SPI4_CONFIG,
  55. #endif
  56. };
  57. /* private rt-thread spi ops function */
  58. static rt_err_t configure(struct rt_spi_device* device, struct rt_spi_configuration* configuration);
  59. static rt_ssize_t xfer(struct rt_spi_device* device, struct rt_spi_message* message);
  60. static struct rt_spi_ops at32_spi_ops =
  61. {
  62. configure,
  63. xfer
  64. };
  65. /**
  66. * attach the spi device to spi bus, this function must be used after initialization.
  67. */
  68. rt_err_t rt_hw_spi_device_attach(const char *bus_name, const char *device_name, gpio_type *cs_gpiox, uint16_t cs_gpio_pin)
  69. {
  70. gpio_init_type gpio_init_struct;
  71. RT_ASSERT(bus_name != RT_NULL);
  72. RT_ASSERT(device_name != RT_NULL);
  73. rt_err_t result;
  74. struct rt_spi_device *spi_device;
  75. struct at32_spi_cs *cs_pin;
  76. /* initialize the cs pin & select the slave*/
  77. gpio_default_para_init(&gpio_init_struct);
  78. gpio_init_struct.gpio_pins = cs_gpio_pin;
  79. gpio_init_struct.gpio_mode = GPIO_MODE_OUTPUT;
  80. gpio_init_struct.gpio_out_type = GPIO_OUTPUT_PUSH_PULL;
  81. gpio_init_struct.gpio_drive_strength = GPIO_DRIVE_STRENGTH_STRONGER;
  82. gpio_init(cs_gpiox, &gpio_init_struct);
  83. gpio_bits_set(cs_gpiox, cs_gpio_pin);
  84. /* attach the device to spi bus */
  85. spi_device = (struct rt_spi_device *)rt_malloc(sizeof(struct rt_spi_device));
  86. RT_ASSERT(spi_device != RT_NULL);
  87. cs_pin = (struct at32_spi_cs *)rt_malloc(sizeof(struct at32_spi_cs));
  88. RT_ASSERT(cs_pin != RT_NULL);
  89. cs_pin->gpio_x = cs_gpiox;
  90. cs_pin->gpio_pin = cs_gpio_pin;
  91. result = rt_spi_bus_attach_device(spi_device, device_name, bus_name, (void *)cs_pin);
  92. if (result != RT_EOK)
  93. {
  94. LOG_D("%s attach to %s faild, %d\n", device_name, bus_name, result);
  95. }
  96. RT_ASSERT(result == RT_EOK);
  97. LOG_D("%s attach to %s done", device_name, bus_name);
  98. return result;
  99. }
  100. static rt_err_t configure(struct rt_spi_device* device,
  101. struct rt_spi_configuration* configuration)
  102. {
  103. struct rt_spi_bus * spi_bus = (struct rt_spi_bus *)device->bus;
  104. struct at32_spi *instance = (struct at32_spi *)spi_bus->parent.user_data;
  105. spi_init_type spi_init_struct;
  106. RT_ASSERT(device != RT_NULL);
  107. RT_ASSERT(configuration != RT_NULL);
  108. at32_msp_spi_init(instance->config->spi_x);
  109. /* data_width */
  110. if(configuration->data_width <= 8)
  111. {
  112. spi_init_struct.frame_bit_num = SPI_FRAME_8BIT;
  113. }
  114. else if(configuration->data_width <= 16)
  115. {
  116. spi_init_struct.frame_bit_num = SPI_FRAME_16BIT;
  117. }
  118. else
  119. {
  120. return -RT_EIO;
  121. }
  122. /* baudrate */
  123. {
  124. uint32_t spi_apb_clock;
  125. uint32_t max_hz;
  126. crm_clocks_freq_type clocks_struct;
  127. max_hz = configuration->max_hz;
  128. crm_clocks_freq_get(&clocks_struct);
  129. LOG_D("sys freq: %d\n", clocks_struct.sclk_freq);
  130. LOG_D("max freq: %d\n", max_hz);
  131. if (instance->config->spi_x == SPI1)
  132. {
  133. spi_apb_clock = clocks_struct.apb2_freq;
  134. LOG_D("pclk2 freq: %d\n", clocks_struct.apb2_freq);
  135. }
  136. else
  137. {
  138. spi_apb_clock = clocks_struct.apb1_freq;
  139. LOG_D("pclk1 freq: %d\n", clocks_struct.apb1_freq);
  140. }
  141. if(max_hz >= (spi_apb_clock / 2))
  142. {
  143. spi_init_struct.mclk_freq_division = SPI_MCLK_DIV_2;
  144. }
  145. else if (max_hz >= (spi_apb_clock / 4))
  146. {
  147. spi_init_struct.mclk_freq_division = SPI_MCLK_DIV_4;
  148. }
  149. else if (max_hz >= (spi_apb_clock / 8))
  150. {
  151. spi_init_struct.mclk_freq_division = SPI_MCLK_DIV_8;
  152. }
  153. else if (max_hz >= (spi_apb_clock / 16))
  154. {
  155. spi_init_struct.mclk_freq_division = SPI_MCLK_DIV_16;
  156. }
  157. else if (max_hz >= (spi_apb_clock / 32))
  158. {
  159. spi_init_struct.mclk_freq_division = SPI_MCLK_DIV_32;
  160. }
  161. else if (max_hz >= (spi_apb_clock / 64))
  162. {
  163. spi_init_struct.mclk_freq_division = SPI_MCLK_DIV_64;
  164. }
  165. else if (max_hz >= (spi_apb_clock / 128))
  166. {
  167. spi_init_struct.mclk_freq_division = SPI_MCLK_DIV_128;
  168. }
  169. else
  170. {
  171. /* min prescaler 256 */
  172. spi_init_struct.mclk_freq_division = SPI_MCLK_DIV_256;
  173. }
  174. } /* baudrate */
  175. switch(configuration->mode & RT_SPI_MODE_3)
  176. {
  177. case RT_SPI_MODE_0:
  178. spi_init_struct.clock_phase = SPI_CLOCK_PHASE_1EDGE;
  179. spi_init_struct.clock_polarity = SPI_CLOCK_POLARITY_LOW;
  180. break;
  181. case RT_SPI_MODE_1:
  182. spi_init_struct.clock_phase = SPI_CLOCK_PHASE_2EDGE;
  183. spi_init_struct.clock_polarity = SPI_CLOCK_POLARITY_LOW;
  184. break;
  185. case RT_SPI_MODE_2:
  186. spi_init_struct.clock_phase = SPI_CLOCK_PHASE_1EDGE;
  187. spi_init_struct.clock_polarity = SPI_CLOCK_POLARITY_HIGH;
  188. break;
  189. case RT_SPI_MODE_3:
  190. spi_init_struct.clock_phase = SPI_CLOCK_PHASE_2EDGE;
  191. spi_init_struct.clock_polarity = SPI_CLOCK_POLARITY_HIGH;
  192. break;
  193. }
  194. /* msb or lsb */
  195. if(configuration->mode & RT_SPI_MSB)
  196. {
  197. spi_init_struct.first_bit_transmission = SPI_FIRST_BIT_MSB;
  198. }
  199. else
  200. {
  201. spi_init_struct.first_bit_transmission = SPI_FIRST_BIT_LSB;
  202. }
  203. spi_init_struct.transmission_mode = SPI_TRANSMIT_FULL_DUPLEX;
  204. spi_init_struct.master_slave_mode = SPI_MODE_MASTER;
  205. spi_init_struct.cs_mode_selection = SPI_CS_SOFTWARE_MODE;
  206. /* disable spi to change transfer size */
  207. spi_enable(instance->config->spi_x, FALSE);
  208. /* init spi */
  209. spi_init(instance->config->spi_x, &spi_init_struct);
  210. /* enable spi */
  211. spi_enable(instance->config->spi_x, TRUE);
  212. /* disable spi crc */
  213. spi_crc_enable(instance->config->spi_x, FALSE);
  214. return RT_EOK;
  215. };
  216. static void _spi_dma_receive(struct at32_spi *instance, rt_uint8_t *buffer, rt_uint32_t size)
  217. {
  218. dma_channel_type* dma_channel = instance->config->dma_rx->dma_channel;
  219. dma_channel->dtcnt = size;
  220. dma_channel->paddr = (rt_uint32_t)&(instance->config->spi_x->dt);
  221. dma_channel->maddr = (rt_uint32_t)buffer;
  222. /* enable transmit complete interrupt */
  223. dma_interrupt_enable(dma_channel, DMA_FDT_INT, TRUE);
  224. /* enable dma receive */
  225. spi_i2s_dma_receiver_enable(instance->config->spi_x, TRUE);
  226. /* mark dma flag */
  227. instance->config->dma_rx->dma_done = RT_FALSE;
  228. /* enable dma channel */
  229. dma_channel_enable(dma_channel, TRUE);
  230. }
  231. static void _spi_dma_transmit(struct at32_spi *instance, rt_uint8_t *buffer, rt_uint32_t size)
  232. {
  233. dma_channel_type *dma_channel = instance->config->dma_tx->dma_channel;
  234. dma_channel->dtcnt = size;
  235. dma_channel->paddr = (rt_uint32_t)&(instance->config->spi_x->dt);
  236. dma_channel->maddr = (rt_uint32_t)buffer;
  237. /* enable spi error interrupt */
  238. spi_i2s_interrupt_enable(instance->config->spi_x, SPI_I2S_ERROR_INT, TRUE);
  239. /* enable transmit complete interrupt */
  240. dma_interrupt_enable(dma_channel, DMA_FDT_INT, TRUE);
  241. /* enable dma transmit */
  242. spi_i2s_dma_transmitter_enable(instance->config->spi_x, TRUE);
  243. /* mark dma flag */
  244. instance->config->dma_tx->dma_done = RT_FALSE;
  245. /* enable dma channel */
  246. dma_channel_enable(dma_channel, TRUE);
  247. }
  248. static void _spi_polling_receive_transmit(struct at32_spi *instance, rt_uint8_t *recv_buf, rt_uint8_t *send_buf, \
  249. rt_uint32_t size, rt_uint8_t data_mode)
  250. {
  251. /* data frame length 8 bit */
  252. if(data_mode <= 8)
  253. {
  254. const rt_uint8_t *send_ptr = send_buf;
  255. rt_uint8_t * recv_ptr = recv_buf;
  256. LOG_D("spi poll transfer start: %d\n", size);
  257. while(size--)
  258. {
  259. rt_uint8_t data = 0xFF;
  260. if(send_ptr != RT_NULL)
  261. {
  262. data = *send_ptr++;
  263. }
  264. /* wait until the transmit buffer is empty */
  265. while(spi_i2s_flag_get(instance->config->spi_x, SPI_I2S_TDBE_FLAG) == RESET);
  266. /* send the byte */
  267. spi_i2s_data_transmit(instance->config->spi_x, data);
  268. /* wait until a data is received */
  269. while(spi_i2s_flag_get(instance->config->spi_x, SPI_I2S_RDBF_FLAG) == RESET);
  270. /* get the received data */
  271. data = spi_i2s_data_receive(instance->config->spi_x);
  272. if(recv_ptr != RT_NULL)
  273. {
  274. *recv_ptr++ = data;
  275. }
  276. }
  277. LOG_D("spi poll transfer finsh\n");
  278. }
  279. /* data frame length 16 bit */
  280. else if(data_mode <= 16)
  281. {
  282. const rt_uint16_t * send_ptr = (rt_uint16_t *)send_buf;
  283. rt_uint16_t * recv_ptr = (rt_uint16_t *)recv_buf;
  284. while(size--)
  285. {
  286. rt_uint16_t data = 0xFF;
  287. if(send_ptr != RT_NULL)
  288. {
  289. data = *send_ptr++;
  290. }
  291. /* wait until the transmit buffer is empty */
  292. while(spi_i2s_flag_get(instance->config->spi_x, SPI_I2S_TDBE_FLAG) == RESET);
  293. /* send the byte */
  294. spi_i2s_data_transmit(instance->config->spi_x, data);
  295. /* wait until a data is received */
  296. while(spi_i2s_flag_get(instance->config->spi_x, SPI_I2S_RDBF_FLAG) == RESET);
  297. /* get the received data */
  298. data = spi_i2s_data_receive(instance->config->spi_x);
  299. if(recv_ptr != RT_NULL)
  300. {
  301. *recv_ptr++ = data;
  302. }
  303. }
  304. }
  305. }
  306. static rt_ssize_t xfer(struct rt_spi_device* device, struct rt_spi_message* message)
  307. {
  308. struct rt_spi_bus * at32_spi_bus = (struct rt_spi_bus *)device->bus;
  309. struct at32_spi *instance = (struct at32_spi *)at32_spi_bus->parent.user_data;
  310. struct rt_spi_configuration *config = &device->config;
  311. struct at32_spi_cs * at32_spi_cs = device->parent.user_data;
  312. rt_size_t message_length = 0, already_send_length = 0;
  313. rt_uint16_t send_length = 0;
  314. rt_uint8_t *recv_buf;
  315. const rt_uint8_t *send_buf;
  316. RT_ASSERT(device != NULL);
  317. RT_ASSERT(message != NULL);
  318. /* take cs */
  319. if(message->cs_take)
  320. {
  321. gpio_bits_reset(at32_spi_cs->gpio_x, at32_spi_cs->gpio_pin);
  322. LOG_D("spi take cs\n");
  323. }
  324. message_length = message->length;
  325. recv_buf = message->recv_buf;
  326. send_buf = message->send_buf;
  327. while (message_length)
  328. {
  329. /* the HAL library use uint16 to save the data length */
  330. if (message_length > 65535)
  331. {
  332. send_length = 65535;
  333. message_length = message_length - 65535;
  334. }
  335. else
  336. {
  337. send_length = message_length;
  338. message_length = 0;
  339. }
  340. /* calculate the start address */
  341. already_send_length = message->length - send_length - message_length;
  342. /* avoid null pointer problems */
  343. if (message->send_buf)
  344. {
  345. send_buf = (rt_uint8_t *)message->send_buf + already_send_length;
  346. }
  347. if (message->recv_buf)
  348. {
  349. recv_buf = (rt_uint8_t *)message->recv_buf + already_send_length;
  350. }
  351. /* start once data exchange in dma mode */
  352. if (message->send_buf && message->recv_buf)
  353. {
  354. if ((instance->config->spi_dma_flag & RT_DEVICE_FLAG_DMA_RX) && \
  355. (instance->config->spi_dma_flag & RT_DEVICE_FLAG_DMA_TX))
  356. {
  357. _spi_dma_receive(instance, (uint8_t *)recv_buf, send_length);
  358. _spi_dma_transmit(instance, (uint8_t *)send_buf, send_length);
  359. /* wait transfer complete */
  360. while(spi_i2s_flag_get(instance->config->spi_x, SPI_I2S_BF_FLAG) != RESET);
  361. while((instance->config->dma_tx->dma_done == RT_FALSE) || (instance->config->dma_rx->dma_done == RT_FALSE));
  362. /* clear rx overrun flag */
  363. spi_i2s_flag_clear(instance->config->spi_x, SPI_I2S_ROERR_FLAG);
  364. spi_enable(instance->config->spi_x, FALSE);
  365. spi_enable(instance->config->spi_x, TRUE);
  366. }
  367. else
  368. {
  369. _spi_polling_receive_transmit(instance, (uint8_t *)recv_buf, (uint8_t *)send_buf, send_length, config->data_width);
  370. }
  371. }
  372. else if (message->send_buf)
  373. {
  374. if (instance->config->spi_dma_flag & RT_DEVICE_FLAG_DMA_TX)
  375. {
  376. _spi_dma_transmit(instance, (uint8_t *)send_buf, send_length);
  377. /* wait transfer complete */
  378. while(spi_i2s_flag_get(instance->config->spi_x, SPI_I2S_BF_FLAG) != RESET);
  379. while(instance->config->dma_tx->dma_done == RT_FALSE);
  380. /* clear rx overrun flag */
  381. spi_i2s_flag_clear(instance->config->spi_x, SPI_I2S_ROERR_FLAG);
  382. spi_enable(instance->config->spi_x, FALSE);
  383. spi_enable(instance->config->spi_x, TRUE);
  384. }
  385. else
  386. {
  387. _spi_polling_receive_transmit(instance, RT_NULL, (uint8_t *)send_buf, send_length, config->data_width);
  388. }
  389. if (message->cs_release && (device->config.mode & RT_SPI_3WIRE))
  390. {
  391. /* release the cs by disable spi when using 3 wires spi */
  392. spi_enable(instance->config->spi_x, FALSE);
  393. }
  394. }
  395. else
  396. {
  397. memset((void *)recv_buf, 0xff, send_length);
  398. if (instance->config->spi_dma_flag & RT_DEVICE_FLAG_DMA_RX)
  399. {
  400. _spi_dma_receive(instance, (uint8_t *)recv_buf, send_length);
  401. _spi_dma_transmit(instance, (uint8_t *)recv_buf, send_length);
  402. /* wait transfer complete */
  403. while(spi_i2s_flag_get(instance->config->spi_x, SPI_I2S_BF_FLAG) != RESET);
  404. while((instance->config->dma_tx->dma_done == RT_FALSE) || (instance->config->dma_rx->dma_done == RT_FALSE));
  405. /* clear rx overrun flag */
  406. spi_i2s_flag_clear(instance->config->spi_x, SPI_I2S_ROERR_FLAG);
  407. spi_enable(instance->config->spi_x, FALSE);
  408. spi_enable(instance->config->spi_x, TRUE);
  409. }
  410. else
  411. {
  412. /* clear the old error flag */
  413. spi_i2s_flag_clear(instance->config->spi_x, SPI_I2S_ROERR_FLAG);
  414. _spi_polling_receive_transmit(instance, (uint8_t *)recv_buf, (uint8_t *)recv_buf, send_length, config->data_width);
  415. }
  416. }
  417. }
  418. /* release cs */
  419. if(message->cs_release)
  420. {
  421. gpio_bits_set(at32_spi_cs->gpio_x, at32_spi_cs->gpio_pin);
  422. LOG_D("spi release cs\n");
  423. }
  424. return message->length;
  425. }
  426. static void _dma_base_channel_check(struct at32_spi *instance)
  427. {
  428. dma_channel_type *rx_channel = instance->config->dma_rx->dma_channel;
  429. dma_channel_type *tx_channel = instance->config->dma_tx->dma_channel;
  430. if(instance->config->spi_dma_flag & RT_DEVICE_FLAG_DMA_RX)
  431. {
  432. instance->config->dma_rx->dma_done = RT_TRUE;
  433. instance->config->dma_rx->dma_x = (dma_type *)((rt_uint32_t)rx_channel & ~0xFF);
  434. instance->config->dma_rx->channel_index = ((((rt_uint32_t)rx_channel & 0xFF) - 8) / 0x14) + 1;
  435. }
  436. if(instance->config->spi_dma_flag & RT_DEVICE_FLAG_DMA_TX)
  437. {
  438. instance->config->dma_tx->dma_done = RT_TRUE;
  439. instance->config->dma_tx->dma_x = (dma_type *)((rt_uint32_t)tx_channel & ~0xFF);
  440. instance->config->dma_tx->channel_index = ((((rt_uint32_t)tx_channel & 0xFF) - 8) / 0x14) + 1;
  441. }
  442. }
  443. static void at32_spi_dma_init(struct at32_spi *instance)
  444. {
  445. dma_init_type dma_init_struct;
  446. /* search dma base and channel index */
  447. _dma_base_channel_check(instance);
  448. /* config dma channel */
  449. dma_default_para_init(&dma_init_struct);
  450. dma_init_struct.peripheral_inc_enable = FALSE;
  451. dma_init_struct.memory_inc_enable = TRUE;
  452. dma_init_struct.peripheral_data_width = DMA_PERIPHERAL_DATA_WIDTH_BYTE;
  453. dma_init_struct.memory_data_width = DMA_MEMORY_DATA_WIDTH_BYTE;
  454. dma_init_struct.priority = DMA_PRIORITY_MEDIUM;
  455. dma_init_struct.loop_mode_enable = FALSE;
  456. if (instance->config->spi_dma_flag & RT_DEVICE_FLAG_DMA_RX)
  457. {
  458. crm_periph_clock_enable(instance->config->dma_rx->dma_clock, TRUE);
  459. dma_init_struct.direction = DMA_DIR_PERIPHERAL_TO_MEMORY;
  460. dma_reset(instance->config->dma_rx->dma_channel);
  461. dma_init(instance->config->dma_rx->dma_channel, &dma_init_struct);
  462. #if defined (SOC_SERIES_AT32F425)
  463. dma_flexible_config(instance->config->dma_rx->dma_x, instance->config->dma_rx->flex_channel, \
  464. (dma_flexible_request_type)instance->config->dma_rx->request_id);
  465. #endif
  466. #if defined (SOC_SERIES_AT32F435) || defined (SOC_SERIES_AT32F437) || \
  467. defined (SOC_SERIES_AT32F423) || defined (SOC_SERIES_AT32F402) || \
  468. defined (SOC_SERIES_AT32F405) || defined (SOC_SERIES_AT32A423) || \
  469. defined (SOC_SERIES_AT32M412) || defined (SOC_SERIES_AT32M416)
  470. dmamux_enable(instance->config->dma_rx->dma_x, TRUE);
  471. dmamux_init(instance->config->dma_rx->dmamux_channel, (dmamux_requst_id_sel_type)instance->config->dma_rx->request_id);
  472. #endif
  473. /* dma irq should set in dma rx mode */
  474. nvic_irq_enable(instance->config->dma_rx->dma_irqn, 0, 1);
  475. }
  476. if (instance->config->spi_dma_flag & RT_DEVICE_FLAG_DMA_TX)
  477. {
  478. crm_periph_clock_enable(instance->config->dma_tx->dma_clock, TRUE);
  479. dma_init_struct.direction = DMA_DIR_MEMORY_TO_PERIPHERAL;
  480. dma_reset(instance->config->dma_tx->dma_channel);
  481. dma_init(instance->config->dma_tx->dma_channel, &dma_init_struct);
  482. #if defined (SOC_SERIES_AT32F425)
  483. dma_flexible_config(instance->config->dma_tx->dma_x, instance->config->dma_tx->flex_channel, \
  484. (dma_flexible_request_type)instance->config->dma_tx->request_id);
  485. #endif
  486. #if defined (SOC_SERIES_AT32F435) || defined (SOC_SERIES_AT32F437) || \
  487. defined (SOC_SERIES_AT32F423) || defined (SOC_SERIES_AT32F402) || \
  488. defined (SOC_SERIES_AT32F405) || defined (SOC_SERIES_AT32A423) || \
  489. defined (SOC_SERIES_AT32M412) || defined (SOC_SERIES_AT32M416)
  490. dmamux_enable(instance->config->dma_tx->dma_x, TRUE);
  491. dmamux_init(instance->config->dma_tx->dmamux_channel, (dmamux_requst_id_sel_type)instance->config->dma_tx->request_id);
  492. #endif
  493. /* dma irq should set in dma tx mode */
  494. nvic_irq_enable(instance->config->dma_tx->dma_irqn, 0, 1);
  495. }
  496. if((instance->config->spi_dma_flag & RT_DEVICE_FLAG_DMA_TX) || \
  497. (instance->config->spi_dma_flag & RT_DEVICE_FLAG_DMA_RX))
  498. {
  499. nvic_irq_enable(instance->config->irqn, 0, 0);
  500. }
  501. }
  502. void spi_dma_isr(struct dma_config *dma_instance)
  503. {
  504. volatile rt_uint32_t reg_sts = 0, index = 0;
  505. reg_sts = dma_instance->dma_x->sts;
  506. index = dma_instance->channel_index;
  507. if ((reg_sts & (DMA_FDT_FLAG << (4 * (index - 1)))) != RESET)
  508. {
  509. /* clear dma flag */
  510. dma_instance->dma_x->clr |= (rt_uint32_t)((DMA_FDT_FLAG << (4 * (index - 1))) | \
  511. (DMA_HDT_FLAG << (4 * (index - 1))));
  512. /* disable interrupt */
  513. dma_interrupt_enable(dma_instance->dma_channel, DMA_FDT_INT, FALSE);
  514. /* disable dma channel */
  515. dma_channel_enable(dma_instance->dma_channel, FALSE);
  516. /* mark done flag */
  517. dma_instance->dma_done = RT_TRUE;
  518. }
  519. }
  520. void spi_isr(spi_type *spi_x)
  521. {
  522. if(spi_i2s_flag_get(spi_x, SPI_I2S_ROERR_FLAG) != RESET)
  523. {
  524. /* clear rx overrun error flag */
  525. spi_i2s_flag_clear(spi_x, SPI_I2S_ROERR_FLAG);
  526. }
  527. if(spi_i2s_flag_get(spi_x, SPI_MMERR_FLAG) != RESET)
  528. {
  529. /* clear master mode error flag */
  530. spi_i2s_flag_clear(spi_x, SPI_MMERR_FLAG);
  531. }
  532. }
  533. #ifdef BSP_USING_SPI1
  534. void SPI1_IRQHandler(void)
  535. {
  536. /* enter interrupt */
  537. rt_interrupt_enter();
  538. spi_isr(spi_config[SPI1_INDEX].spi_x);
  539. /* leave interrupt */
  540. rt_interrupt_leave();
  541. }
  542. #if defined(BSP_SPI1_RX_USING_DMA)
  543. void SPI1_RX_DMA_IRQHandler(void)
  544. {
  545. /* enter interrupt */
  546. rt_interrupt_enter();
  547. spi_dma_isr(spi_config[SPI1_INDEX].dma_rx);
  548. /* leave interrupt */
  549. rt_interrupt_leave();
  550. }
  551. #endif /* defined(BSP_SPI1_RX_USING_DMA) */
  552. #if defined(BSP_SPI1_TX_USING_DMA)
  553. void SPI1_TX_DMA_IRQHandler(void)
  554. {
  555. /* enter interrupt */
  556. rt_interrupt_enter();
  557. spi_dma_isr(spi_config[SPI1_INDEX].dma_tx);
  558. /* leave interrupt */
  559. rt_interrupt_leave();
  560. }
  561. #endif /* defined(BSP_SPI1_TX_USING_DMA) */
  562. #endif
  563. #ifdef BSP_USING_SPI2
  564. void SPI2_IRQHandler(void)
  565. {
  566. /* enter interrupt */
  567. rt_interrupt_enter();
  568. spi_isr(spi_config[SPI2_INDEX].spi_x);
  569. /* leave interrupt */
  570. rt_interrupt_leave();
  571. }
  572. #if defined(BSP_SPI2_RX_USING_DMA)
  573. void SPI2_RX_DMA_IRQHandler(void)
  574. {
  575. /* enter interrupt */
  576. rt_interrupt_enter();
  577. spi_dma_isr(spi_config[SPI2_INDEX].dma_rx);
  578. /* leave interrupt */
  579. rt_interrupt_leave();
  580. }
  581. #endif /* defined(BSP_SPI2_RX_USING_DMA) */
  582. #if defined(BSP_SPI2_TX_USING_DMA)
  583. void SPI2_TX_DMA_IRQHandler(void)
  584. {
  585. /* enter interrupt */
  586. rt_interrupt_enter();
  587. spi_dma_isr(spi_config[SPI2_INDEX].dma_tx);
  588. /* leave interrupt */
  589. rt_interrupt_leave();
  590. }
  591. #endif /* defined(BSP_SPI2_TX_USING_DMA) */
  592. #endif
  593. #ifdef BSP_USING_SPI3
  594. void SPI3_IRQHandler(void)
  595. {
  596. /* enter interrupt */
  597. rt_interrupt_enter();
  598. spi_isr(spi_config[SPI3_INDEX].spi_x);
  599. /* leave interrupt */
  600. rt_interrupt_leave();
  601. }
  602. #if defined(BSP_SPI3_RX_USING_DMA)
  603. void SPI3_RX_DMA_IRQHandler(void)
  604. {
  605. /* enter interrupt */
  606. rt_interrupt_enter();
  607. spi_dma_isr(spi_config[SPI3_INDEX].dma_rx);
  608. /* leave interrupt */
  609. rt_interrupt_leave();
  610. }
  611. #endif /* defined(BSP_SPI3_RX_USING_DMA) */
  612. #if defined(BSP_SPI3_TX_USING_DMA)
  613. void SPI3_TX_DMA_IRQHandler(void)
  614. {
  615. /* enter interrupt */
  616. rt_interrupt_enter();
  617. spi_dma_isr(spi_config[SPI3_INDEX].dma_tx);
  618. /* leave interrupt */
  619. rt_interrupt_leave();
  620. }
  621. #endif /* defined(BSP_SPI3_TX_USING_DMA) */
  622. #endif
  623. #ifdef BSP_USING_SPI4
  624. void SPI4_IRQHandler(void)
  625. {
  626. /* enter interrupt */
  627. rt_interrupt_enter();
  628. spi_isr(spi_config[SPI4_INDEX].spi_x);
  629. /* leave interrupt */
  630. rt_interrupt_leave();
  631. }
  632. #if defined(BSP_SPI4_RX_USING_DMA)
  633. void SPI4_RX_DMA_IRQHandler(void)
  634. {
  635. /* enter interrupt */
  636. rt_interrupt_enter();
  637. spi_dma_isr(spi_config[SPI4_INDEX].dma_rx);
  638. /* leave interrupt */
  639. rt_interrupt_leave();
  640. }
  641. #endif /* defined(BSP_SPI4_RX_USING_DMA) */
  642. #if defined(BSP_SPI4_TX_USING_DMA)
  643. void SPI4_TX_DMA_IRQHandler(void)
  644. {
  645. /* enter interrupt */
  646. rt_interrupt_enter();
  647. spi_dma_isr(spi_config[SPI4_INDEX].dma_tx);
  648. /* leave interrupt */
  649. rt_interrupt_leave();
  650. }
  651. #endif /* defined(BSP_SPI14_TX_USING_DMA) */
  652. #endif
  653. #if defined (SOC_SERIES_AT32F421)
  654. void SPI1_TX_RX_DMA_IRQHandler(void)
  655. {
  656. #if defined(BSP_USING_SPI1) && defined(BSP_SPI1_TX_USING_DMA)
  657. SPI1_TX_DMA_IRQHandler();
  658. #endif
  659. #if defined(BSP_USING_SPI1) && defined(BSP_SPI1_RX_USING_DMA)
  660. SPI1_RX_DMA_IRQHandler();
  661. #endif
  662. }
  663. void SPI2_TX_RX_DMA_IRQHandler(void)
  664. {
  665. #if defined(BSP_USING_SPI2) && defined(BSP_SPI2_TX_USING_DMA)
  666. SPI2_TX_DMA_IRQHandler();
  667. #endif
  668. #if defined(BSP_USING_SPI2) && defined(BSP_SPI2_RX_USING_DMA)
  669. SPI2_RX_DMA_IRQHandler();
  670. #endif
  671. }
  672. #endif
  673. #if defined (SOC_SERIES_AT32F425)
  674. void SPI1_TX_RX_DMA_IRQHandler(void)
  675. {
  676. #if defined(BSP_USING_SPI1) && defined(BSP_SPI1_TX_USING_DMA)
  677. SPI1_TX_DMA_IRQHandler();
  678. #endif
  679. #if defined(BSP_USING_SPI1) && defined(BSP_SPI1_RX_USING_DMA)
  680. SPI1_RX_DMA_IRQHandler();
  681. #endif
  682. }
  683. void SPI3_2_TX_RX_DMA_IRQHandler(void)
  684. {
  685. #if defined(BSP_USING_SPI2) && defined(BSP_SPI2_TX_USING_DMA)
  686. SPI2_TX_DMA_IRQHandler();
  687. #endif
  688. #if defined(BSP_USING_SPI2) && defined(BSP_SPI2_RX_USING_DMA)
  689. SPI2_RX_DMA_IRQHandler();
  690. #endif
  691. #if defined(BSP_USING_SPI3) && defined(BSP_SPI3_TX_USING_DMA)
  692. SPI3_TX_DMA_IRQHandler();
  693. #endif
  694. #if defined(BSP_USING_SPI3) && defined(BSP_SPI3_RX_USING_DMA)
  695. SPI3_RX_DMA_IRQHandler();
  696. #endif
  697. }
  698. #endif
  699. static struct at32_spi spis[sizeof(spi_config) / sizeof(spi_config[0])] = {0};
  700. static void at32_spi_get_dma_config(void)
  701. {
  702. #ifdef BSP_USING_SPI1
  703. spi_config[SPI1_INDEX].spi_dma_flag = 0;
  704. #ifdef BSP_SPI1_RX_USING_DMA
  705. spi_config[SPI1_INDEX].spi_dma_flag |= RT_DEVICE_FLAG_DMA_RX;
  706. static struct dma_config spi1_dma_rx = SPI1_RX_DMA_CONFIG;
  707. spi_config[SPI1_INDEX].dma_rx = &spi1_dma_rx;
  708. #endif
  709. #ifdef BSP_SPI1_TX_USING_DMA
  710. spi_config[SPI1_INDEX].spi_dma_flag |= RT_DEVICE_FLAG_DMA_TX;
  711. static struct dma_config spi1_dma_tx = SPI1_TX_DMA_CONFIG;
  712. spi_config[SPI1_INDEX].dma_tx = &spi1_dma_tx;
  713. #endif
  714. #endif
  715. #ifdef BSP_USING_SPI2
  716. spi_config[SPI2_INDEX].spi_dma_flag = 0;
  717. #ifdef BSP_SPI2_RX_USING_DMA
  718. spi_config[SPI2_INDEX].spi_dma_flag |= RT_DEVICE_FLAG_DMA_RX;
  719. static struct dma_config spi2_dma_rx = SPI2_RX_DMA_CONFIG;
  720. spi_config[SPI2_INDEX].dma_rx = &spi2_dma_rx;
  721. #endif
  722. #ifdef BSP_SPI2_TX_USING_DMA
  723. spi_config[SPI2_INDEX].spi_dma_flag |= RT_DEVICE_FLAG_DMA_TX;
  724. static struct dma_config spi2_dma_tx = SPI2_TX_DMA_CONFIG;
  725. spi_config[SPI2_INDEX].dma_tx = &spi2_dma_tx;
  726. #endif
  727. #endif
  728. #ifdef BSP_USING_SPI3
  729. spi_config[SPI3_INDEX].spi_dma_flag = 0;
  730. #ifdef BSP_SPI3_RX_USING_DMA
  731. spi_config[SPI3_INDEX].spi_dma_flag |= RT_DEVICE_FLAG_DMA_RX;
  732. static struct dma_config spi3_dma_rx = SPI3_RX_DMA_CONFIG;
  733. spi_config[SPI3_INDEX].dma_rx = &spi3_dma_rx;
  734. #endif
  735. #ifdef BSP_SPI3_TX_USING_DMA
  736. spi_config[SPI3_INDEX].spi_dma_flag |= RT_DEVICE_FLAG_DMA_TX;
  737. static struct dma_config spi3_dma_tx = SPI3_TX_DMA_CONFIG;
  738. spi_config[SPI3_INDEX].dma_tx = &spi3_dma_tx;
  739. #endif
  740. #endif
  741. #ifdef BSP_USING_SPI4
  742. spi_config[SPI4_INDEX].spi_dma_flag = 0;
  743. #ifdef BSP_SPI4_RX_USING_DMA
  744. spi_config[SPI4_INDEX].spi_dma_flag |= RT_DEVICE_FLAG_DMA_RX;
  745. static struct dma_config spi4_dma_rx = SPI4_RX_DMA_CONFIG;
  746. spi_config[SPI4_INDEX].dma_rx = &spi4_dma_rx;
  747. #endif
  748. #ifdef BSP_SPI4_TX_USING_DMA
  749. spi_config[SPI4_INDEX].spi_dma_flag |= RT_DEVICE_FLAG_DMA_TX;
  750. static struct dma_config spi4_dma_tx = SPI4_TX_DMA_CONFIG;
  751. spi_config[SPI4_INDEX].dma_tx = &spi4_dma_tx;
  752. #endif
  753. #endif
  754. }
  755. int rt_hw_spi_init(void)
  756. {
  757. int i;
  758. rt_err_t result;
  759. rt_size_t obj_num = sizeof(spi_config) / sizeof(spi_config[0]);
  760. at32_spi_get_dma_config();
  761. for (i = 0; i < obj_num; i++)
  762. {
  763. spis[i].config = &spi_config[i];
  764. spis[i].spi_bus.parent.user_data = (void *)&spis[i];
  765. if(spis[i].config->spi_dma_flag & (RT_DEVICE_FLAG_DMA_RX | RT_DEVICE_FLAG_DMA_TX))
  766. {
  767. at32_spi_dma_init(&spis[i]);
  768. }
  769. result = rt_spi_bus_register(&(spis[i].spi_bus), spis[i].config->spi_name, &at32_spi_ops);
  770. }
  771. return result;
  772. }
  773. INIT_BOARD_EXPORT(rt_hw_spi_init);
  774. #endif