drv_spi.c 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713
  1. /*
  2. * Copyright (c) 2021-2024 HPMicro
  3. *
  4. * SPDX-License-Identifier: BSD-3-Clause
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2022-02-01 HPMicro First version
  9. * 2023-02-15 HPMicro Add DMA support
  10. * 2023-07-14 HPMicro Manage the DMA buffer alignment in driver
  11. * 2023-12-14 HPMicro change state blocking wait to interrupt semaphore wait for DMA
  12. * 2024-06-10 HPMicro Add the SPI pin settings
  13. */
  14. #include <rtthread.h>
  15. #ifdef BSP_USING_SPI
  16. #include <rtdevice.h>
  17. #include "board.h"
  18. #include "drv_spi.h"
  19. #include "hpm_spi_drv.h"
  20. #include "hpm_sysctl_drv.h"
  21. #include "hpm_dma_mgr.h"
  22. #include "hpm_dmamux_drv.h"
  23. #include "hpm_l1c_drv.h"
  24. struct hpm_spi
  25. {
  26. uint32_t instance;
  27. char *bus_name;
  28. SPI_Type *spi_base;
  29. spi_control_config_t control_config;
  30. struct rt_spi_bus spi_bus;
  31. rt_sem_t xfer_sem;
  32. rt_bool_t enable_dma;
  33. rt_uint8_t tx_dmamux;
  34. rt_uint8_t rx_dmamux;
  35. dma_resource_t tx_dma;
  36. dma_resource_t rx_dma;
  37. rt_uint8_t spi_irq;
  38. rt_sem_t spi_xfer_done_sem;
  39. rt_sem_t txdma_xfer_done_sem;
  40. rt_sem_t rxdma_xfer_done_sem;
  41. void (*spi_pins_init)(SPI_Type *spi_base);
  42. };
  43. static rt_err_t hpm_spi_configure(struct rt_spi_device *device, struct rt_spi_configuration *cfg);
  44. static rt_ssize_t hpm_spi_xfer(struct rt_spi_device *device, struct rt_spi_message *msg);
  45. static struct hpm_spi hpm_spis[] =
  46. {
  47. #if defined(BSP_USING_SPI0)
  48. {
  49. .bus_name = "spi0",
  50. .spi_base = HPM_SPI0,
  51. #if defined(BSP_SPI0_USING_DMA)
  52. .enable_dma = RT_TRUE,
  53. #endif
  54. .tx_dmamux = HPM_DMA_SRC_SPI0_TX,
  55. .rx_dmamux = HPM_DMA_SRC_SPI0_RX,
  56. .spi_irq = IRQn_SPI0,
  57. #if !defined BSP_SPI0_USING_HARD_CS
  58. .spi_pins_init = init_spi_pins_with_gpio_as_cs,
  59. #else
  60. .spi_pins_init = init_spi_pins,
  61. #endif
  62. },
  63. #endif
  64. #if defined(BSP_USING_SPI1)
  65. {
  66. .bus_name = "spi1",
  67. .spi_base = HPM_SPI1,
  68. #if defined(BSP_SPI1_USING_DMA)
  69. .enable_dma = RT_TRUE,
  70. #endif
  71. .tx_dmamux = HPM_DMA_SRC_SPI1_TX,
  72. .rx_dmamux = HPM_DMA_SRC_SPI1_RX,
  73. .spi_irq = IRQn_SPI1,
  74. #if !defined BSP_SPI1_USING_HARD_CS
  75. .spi_pins_init = init_spi_pins_with_gpio_as_cs,
  76. #else
  77. .spi_pins_init = init_spi_pins,
  78. #endif
  79. },
  80. #endif
  81. #if defined(BSP_USING_SPI2)
  82. {
  83. .bus_name = "spi2",
  84. .spi_base = HPM_SPI2,
  85. #if defined(BSP_SPI2_USING_DMA)
  86. .enable_dma = RT_TRUE,
  87. #endif
  88. .tx_dmamux = HPM_DMA_SRC_SPI2_TX,
  89. .rx_dmamux = HPM_DMA_SRC_SPI2_RX,
  90. .spi_irq = IRQn_SPI2,
  91. #if !defined BSP_SPI2_USING_HARD_CS
  92. .spi_pins_init = init_spi_pins_with_gpio_as_cs,
  93. #else
  94. .spi_pins_init = init_spi_pins,
  95. #endif
  96. },
  97. #endif
  98. #if defined(BSP_USING_SPI3)
  99. {
  100. .bus_name = "spi3",
  101. .spi_base = HPM_SPI3,
  102. #if defined(BSP_SPI3_USING_DMA)
  103. .enable_dma = RT_TRUE,
  104. #endif
  105. .tx_dmamux = HPM_DMA_SRC_SPI3_TX,
  106. .rx_dmamux = HPM_DMA_SRC_SPI3_RX,
  107. .spi_irq = IRQn_SPI3,
  108. #if !defined BSP_SPI3_USING_HARD_CS
  109. .spi_pins_init = init_spi_pins_with_gpio_as_cs,
  110. #else
  111. .spi_pins_init = init_spi_pins,
  112. #endif
  113. },
  114. #endif
  115. #if defined(BSP_USING_SPI4)
  116. {
  117. .bus_name = "spi4",
  118. .spi_base = HPM_SPI4,
  119. #if defined(BSP_SPI4_USING_DMA)
  120. .enable_dma = RT_TRUE,
  121. #endif
  122. .tx_dmamux = HPM_DMA_SRC_SPI4_TX,
  123. .rx_dmamux = HPM_DMA_SRC_SPI4_RX,
  124. .spi_irq = IRQn_SPI4,
  125. #if !defined BSP_SPI4_USING_HARD_CS
  126. .spi_pins_init = init_spi_pins_with_gpio_as_cs,
  127. #else
  128. .spi_pins_init = init_spi_pins,
  129. #endif
  130. },
  131. #endif
  132. #if defined(BSP_USING_SPI5)
  133. {
  134. .bus_name = "spi5",
  135. .spi_base = HPM_SPI5,
  136. #if defined(BSP_SPI5_USING_DMA)
  137. .enable_dma = RT_TRUE,
  138. #endif
  139. .tx_dmamux = HPM_DMA_SRC_SPI5_TX,
  140. .rx_dmamux = HPM_DMA_SRC_SPI5_RX,
  141. .spi_irq = IRQn_SPI5,
  142. #if !defined BSP_SPI5_USING_HARD_CS
  143. .spi_pins_init = init_spi_pins_with_gpio_as_cs,
  144. #else
  145. .spi_pins_init = init_spi_pins,
  146. #endif
  147. },
  148. #endif
  149. #if defined(BSP_USING_SPI6)
  150. {
  151. .bus_name = "spi6",
  152. .spi_base = HPM_SPI6,
  153. #if defined(BSP_SPI6_USING_DMA)
  154. .enable_dma = RT_TRUE,
  155. #endif
  156. .tx_dmamux = HPM_DMA_SRC_SPI6_TX,
  157. .rx_dmamux = HPM_DMA_SRC_SPI6_RX,
  158. .spi_irq = IRQn_SPI6,
  159. #if !defined BSP_SPI6_USING_HARD_CS
  160. .spi_pins_init = init_spi_pins_with_gpio_as_cs,
  161. #else
  162. .spi_pins_init = init_spi_pins,
  163. #endif
  164. },
  165. #endif
  166. #if defined(BSP_USING_SPI7)
  167. {
  168. .bus_name = "spi7",
  169. .spi_base = HPM_SPI7,
  170. #if defined(BSP_SPI7_USING_DMA)
  171. .enable_dma = RT_TRUE,
  172. #endif
  173. .tx_dmamux = HPM_DMA_SRC_SPI7_TX,
  174. .rx_dmamux = HPM_DMA_SRC_SPI7_RX,
  175. .spi_irq = IRQn_SPI7,
  176. #if !defined BSP_SPI7_USING_HARD_CS
  177. .spi_pins_init = init_spi_pins_with_gpio_as_cs,
  178. #else
  179. .spi_pins_init = init_spi_pins,
  180. #endif
  181. },
  182. #endif
  183. };
  184. static struct rt_spi_ops hpm_spi_ops =
  185. {
  186. .configure = hpm_spi_configure,
  187. .xfer = hpm_spi_xfer,
  188. };
  189. static inline void handle_spi_isr(SPI_Type *ptr)
  190. {
  191. volatile uint32_t irq_status;
  192. RT_ASSERT(ptr != RT_NULL);
  193. rt_base_t level;
  194. level = rt_hw_interrupt_disable();
  195. irq_status = spi_get_interrupt_status(ptr);
  196. if (irq_status & spi_end_int)
  197. {
  198. for (uint32_t i = 0; i < sizeof(hpm_spis) / sizeof(hpm_spis[0]); i++)
  199. {
  200. if (hpm_spis[i].spi_base == ptr)
  201. {
  202. rt_sem_release(hpm_spis[i].spi_xfer_done_sem);
  203. }
  204. }
  205. spi_disable_interrupt(ptr, spi_end_int);
  206. spi_clear_interrupt_status(ptr, spi_end_int);
  207. }
  208. rt_hw_interrupt_enable(level);
  209. }
  210. #if defined(BSP_USING_SPI0)
  211. void spi0_isr(void)
  212. {
  213. handle_spi_isr(HPM_SPI0);
  214. }
  215. SDK_DECLARE_EXT_ISR_M(IRQn_SPI0, spi0_isr);
  216. #endif
  217. #if defined(BSP_USING_SPI1)
  218. void spi1_isr(void)
  219. {
  220. handle_spi_isr(HPM_SPI1);
  221. }
  222. SDK_DECLARE_EXT_ISR_M(IRQn_SPI1, spi1_isr);
  223. #endif
  224. #if defined(BSP_USING_SPI2)
  225. void spi2_isr(void)
  226. {
  227. handle_spi_isr(HPM_SPI2);
  228. }
  229. SDK_DECLARE_EXT_ISR_M(IRQn_SPI2, spi2_isr);
  230. #endif
  231. #if defined(BSP_USING_SPI3)
  232. void spi3_isr(void)
  233. {
  234. handle_spi_isr(HPM_SPI3);
  235. }
  236. SDK_DECLARE_EXT_ISR_M(IRQn_SPI3, spi3_isr);
  237. #endif
  238. void spi_dma_channel_tc_callback(DMA_Type *ptr, uint32_t channel, void *user_data)
  239. {
  240. struct hpm_spi *spi = (struct hpm_spi *)user_data;
  241. RT_ASSERT(spi != RT_NULL);
  242. RT_ASSERT(ptr != RT_NULL);
  243. rt_base_t level;
  244. level = rt_hw_interrupt_disable();
  245. if ((spi->tx_dma.base == ptr) && spi->tx_dma.channel == channel)
  246. {
  247. dma_mgr_disable_chn_irq(&spi->tx_dma, DMA_MGR_INTERRUPT_MASK_TC);
  248. rt_sem_release(spi->txdma_xfer_done_sem);
  249. }
  250. if ((spi->rx_dma.base == ptr) && spi->rx_dma.channel == channel)
  251. {
  252. dma_mgr_disable_chn_irq(&spi->rx_dma, DMA_MGR_INTERRUPT_MASK_TC);
  253. rt_sem_release(spi->rxdma_xfer_done_sem);
  254. }
  255. rt_hw_interrupt_enable(level);
  256. }
  257. static rt_err_t hpm_spi_configure(struct rt_spi_device *device, struct rt_spi_configuration *cfg)
  258. {
  259. spi_timing_config_t timing_config = { 0 };
  260. spi_format_config_t format_config = { 0 };
  261. struct hpm_spi *spi = RT_NULL;
  262. spi = (struct hpm_spi *) (device->bus->parent.user_data);
  263. RT_ASSERT(spi != RT_NULL);
  264. if (cfg->data_width != 8 && cfg->data_width != 16 && cfg->data_width != 32)
  265. {
  266. return -RT_EINVAL;
  267. }
  268. spi_master_get_default_timing_config(&timing_config);
  269. spi_master_get_default_format_config(&format_config);
  270. spi->spi_pins_init(spi->spi_base);
  271. timing_config.master_config.clk_src_freq_in_hz = board_init_spi_clock(spi->spi_base);
  272. format_config.common_config.data_len_in_bits = cfg->data_width;
  273. format_config.common_config.cpha = cfg->mode & RT_SPI_CPHA ? 1 : 0;
  274. format_config.common_config.cpol = cfg->mode & RT_SPI_CPOL ? 1 : 0;
  275. format_config.common_config.lsb = cfg->mode & RT_SPI_MSB ? false : true;
  276. format_config.common_config.mosi_bidir = cfg->mode & RT_SPI_3WIRE ? true : false;
  277. spi_format_init(spi->spi_base, &format_config);
  278. if (cfg->max_hz > timing_config.master_config.clk_src_freq_in_hz)
  279. {
  280. cfg->max_hz = timing_config.master_config.clk_src_freq_in_hz;
  281. }
  282. timing_config.master_config.sclk_freq_in_hz = cfg->max_hz;
  283. spi_master_timing_init(spi->spi_base, &timing_config);
  284. spi_master_get_default_control_config(&spi->control_config);
  285. spi->control_config.master_config.addr_enable = false;
  286. spi->control_config.master_config.cmd_enable = false;
  287. spi->control_config.master_config.token_enable = false;
  288. spi->control_config.common_config.trans_mode = spi_trans_write_read_together;
  289. return RT_EOK;
  290. }
  291. static hpm_stat_t hpm_spi_xfer_polling(struct rt_spi_device *device, struct rt_spi_message *msg)
  292. {
  293. struct hpm_spi *spi = (struct hpm_spi *) (device->bus->parent.user_data);
  294. hpm_stat_t spi_stat = status_success;
  295. uint32_t remaining_size = msg->length;
  296. uint32_t transfer_len;
  297. uint8_t *tx_buf = (uint8_t*) msg->send_buf;
  298. uint8_t *rx_buf = (uint8_t*) msg->recv_buf;
  299. while (remaining_size > 0)
  300. {
  301. transfer_len = MIN(SPI_SOC_TRANSFER_COUNT_MAX, remaining_size);
  302. spi->control_config.common_config.tx_dma_enable = false;
  303. spi->control_config.common_config.rx_dma_enable = false;
  304. if (msg->send_buf != NULL && msg->recv_buf != NULL)
  305. {
  306. spi->control_config.common_config.trans_mode = spi_trans_write_read_together;
  307. spi_stat = spi_transfer(spi->spi_base, &spi->control_config,
  308. NULL,
  309. NULL, tx_buf, transfer_len, rx_buf, transfer_len);
  310. }
  311. else if (msg->send_buf != NULL)
  312. {
  313. spi->control_config.common_config.trans_mode = spi_trans_write_only;
  314. spi_stat = spi_transfer(spi->spi_base, &spi->control_config,
  315. NULL,
  316. NULL, (uint8_t*) tx_buf, transfer_len,
  317. NULL, 0);
  318. }
  319. else
  320. {
  321. spi->control_config.common_config.trans_mode = spi_trans_read_only;
  322. spi_stat = spi_transfer(spi->spi_base, &spi->control_config,
  323. NULL,
  324. NULL,
  325. NULL, 0, rx_buf, transfer_len);
  326. }
  327. if (spi_stat != status_success)
  328. {
  329. break;
  330. }
  331. if (tx_buf != NULL)
  332. {
  333. tx_buf += transfer_len;
  334. }
  335. if (rx_buf != NULL)
  336. {
  337. rx_buf += transfer_len;
  338. }
  339. remaining_size -= transfer_len;
  340. }
  341. return spi_stat;
  342. }
  343. hpm_stat_t spi_tx_trigger_dma(DMA_Type *dma_ptr, uint8_t ch_num, SPI_Type *spi_ptr, uint32_t src, uint8_t data_width, uint32_t size)
  344. {
  345. dma_handshake_config_t config;
  346. config.ch_index = ch_num;
  347. config.dst = (uint32_t)&spi_ptr->DATA;
  348. config.dst_fixed = true;
  349. config.src = src;
  350. config.src_fixed = false;
  351. config.data_width = data_width;
  352. config.size_in_byte = size;
  353. return dma_setup_handshake(dma_ptr, &config, true);
  354. }
  355. hpm_stat_t spi_rx_trigger_dma(DMA_Type *dma_ptr, uint8_t ch_num, SPI_Type *spi_ptr, uint32_t dst, uint8_t data_width, uint32_t size)
  356. {
  357. dma_handshake_config_t config;
  358. config.ch_index = ch_num;
  359. config.dst = dst;
  360. config.dst_fixed = false;
  361. config.src = (uint32_t)&spi_ptr->DATA;
  362. config.src_fixed = true;
  363. config.data_width = data_width;
  364. config.size_in_byte = size;
  365. return dma_setup_handshake(dma_ptr, &config, true);
  366. }
  367. static rt_uint32_t hpm_spi_xfer_dma(struct rt_spi_device *device, struct rt_spi_message *msg)
  368. {
  369. struct hpm_spi *spi = (struct hpm_spi *) (device->bus->parent.user_data);
  370. hpm_stat_t spi_stat = status_success;
  371. uint32_t remaining_size = msg->length;
  372. uint32_t transfer_len;
  373. uint8_t *raw_alloc_tx_buf = RT_NULL;
  374. uint8_t *raw_alloc_rx_buf = RT_NULL;
  375. uint8_t *aligned_tx_buf = RT_NULL;
  376. uint8_t *aligned_rx_buf = RT_NULL;
  377. uint32_t aligned_len = 0;
  378. if (msg->length <= 0)
  379. {
  380. return status_invalid_argument;
  381. }
  382. aligned_len = (msg->length + HPM_L1C_CACHELINE_SIZE - 1U) & ~(HPM_L1C_CACHELINE_SIZE - 1U);
  383. if (msg->send_buf != RT_NULL)
  384. {
  385. if (l1c_dc_is_enabled())
  386. {
  387. /* The allocated pointer is always RT_ALIGN_SIZE aligned */
  388. raw_alloc_tx_buf = (uint8_t*)rt_malloc(aligned_len + HPM_L1C_CACHELINE_SIZE - RT_ALIGN_SIZE);
  389. RT_ASSERT(raw_alloc_tx_buf != RT_NULL);
  390. aligned_tx_buf = (uint8_t*)HPM_L1C_CACHELINE_ALIGN_UP((uint32_t)raw_alloc_tx_buf);
  391. rt_memcpy(aligned_tx_buf, msg->send_buf, msg->length);
  392. l1c_dc_flush((uint32_t) aligned_tx_buf, aligned_len);
  393. }
  394. else
  395. {
  396. aligned_tx_buf = (uint8_t*) msg->send_buf;
  397. }
  398. }
  399. if (msg->recv_buf != RT_NULL)
  400. {
  401. if (l1c_dc_is_enabled())
  402. {
  403. /* The allocated pointer is always RT_ALIGN_SIZE aligned */
  404. raw_alloc_rx_buf = (uint8_t*)rt_malloc(aligned_len + HPM_L1C_CACHELINE_SIZE - RT_ALIGN_SIZE);
  405. RT_ASSERT(raw_alloc_rx_buf != RT_NULL);
  406. aligned_rx_buf = (uint8_t*)HPM_L1C_CACHELINE_ALIGN_UP((uint32_t)raw_alloc_rx_buf);
  407. }
  408. else
  409. {
  410. aligned_rx_buf = msg->recv_buf;
  411. }
  412. }
  413. uint8_t *tx_buf = aligned_tx_buf;
  414. uint8_t *rx_buf = aligned_rx_buf;
  415. uint32_t core_id = read_csr(CSR_MHARTID);
  416. spi->spi_base->CTRL &= ~(SPI_CTRL_TXDMAEN_MASK | SPI_CTRL_RXDMAEN_MASK);
  417. spi->control_config.common_config.tx_dma_enable = false;
  418. spi->control_config.common_config.rx_dma_enable = false;
  419. spi_disable_interrupt(spi->spi_base, spi_end_int);
  420. while (remaining_size > 0)
  421. {
  422. transfer_len = MIN(SPI_SOC_TRANSFER_COUNT_MAX, remaining_size);
  423. if (msg->send_buf != NULL && msg->recv_buf != NULL)
  424. {
  425. spi_enable_interrupt(spi->spi_base, spi_end_int);
  426. spi->control_config.common_config.tx_dma_enable = true;
  427. spi->control_config.common_config.rx_dma_enable = true;
  428. spi->control_config.common_config.trans_mode = spi_trans_write_read_together;
  429. spi_stat = spi_setup_dma_transfer(spi->spi_base, &spi->control_config, NULL, NULL, transfer_len,
  430. transfer_len);
  431. if (spi_stat != status_success)
  432. {
  433. break;
  434. }
  435. dmamux_config(HPM_DMAMUX, spi->tx_dma.channel, spi->tx_dmamux, true);
  436. spi_stat = spi_tx_trigger_dma(spi->tx_dma.base, spi->tx_dma.channel, spi->spi_base,
  437. core_local_mem_to_sys_address(core_id, (uint32_t) tx_buf),
  438. DMA_TRANSFER_WIDTH_BYTE, transfer_len);
  439. /* setup spi rx trigger dma transfer*/
  440. dmamux_config(HPM_DMAMUX, spi->rx_dma.channel, spi->rx_dmamux, true);
  441. spi_stat = spi_rx_trigger_dma(spi->rx_dma.base, spi->rx_dma.channel, spi->spi_base,
  442. core_local_mem_to_sys_address(core_id, (uint32_t) rx_buf),
  443. DMA_TRANSFER_WIDTH_BYTE, transfer_len);
  444. if (spi_stat != status_success)
  445. {
  446. break;
  447. }
  448. dma_mgr_enable_chn_irq(&spi->tx_dma, DMA_MGR_INTERRUPT_MASK_TC);
  449. dma_mgr_enable_chn_irq(&spi->rx_dma, DMA_MGR_INTERRUPT_MASK_TC);
  450. rt_sem_take(spi->spi_xfer_done_sem, RT_WAITING_FOREVER);
  451. rt_sem_take(spi->txdma_xfer_done_sem, RT_WAITING_FOREVER);
  452. rt_sem_take(spi->rxdma_xfer_done_sem, RT_WAITING_FOREVER);
  453. }
  454. else if (msg->send_buf != NULL)
  455. {
  456. spi_enable_interrupt(spi->spi_base, spi_end_int);
  457. spi->control_config.common_config.tx_dma_enable = true;
  458. spi->control_config.common_config.trans_mode = spi_trans_write_only;
  459. spi_stat = spi_setup_dma_transfer(spi->spi_base, &spi->control_config, NULL, NULL, transfer_len, 0);
  460. if (spi_stat != status_success)
  461. {
  462. break;
  463. }
  464. dmamux_config(HPM_DMAMUX, spi->tx_dma.channel, spi->tx_dmamux, true);
  465. spi_stat = spi_tx_trigger_dma(spi->tx_dma.base, spi->tx_dma.channel, spi->spi_base,
  466. core_local_mem_to_sys_address(core_id, (uint32_t) tx_buf),
  467. DMA_TRANSFER_WIDTH_BYTE, transfer_len);
  468. if (spi_stat != status_success)
  469. {
  470. break;
  471. }
  472. dma_mgr_enable_chn_irq(&spi->tx_dma, DMA_MGR_INTERRUPT_MASK_TC);
  473. rt_sem_take(spi->spi_xfer_done_sem, RT_WAITING_FOREVER);
  474. rt_sem_take(spi->txdma_xfer_done_sem, RT_WAITING_FOREVER);
  475. }
  476. else
  477. {
  478. spi->control_config.common_config.rx_dma_enable = true;
  479. spi->control_config.common_config.trans_mode = spi_trans_read_only;
  480. spi_stat = spi_setup_dma_transfer(spi->spi_base, &spi->control_config, NULL, NULL, 0, transfer_len);
  481. if (spi_stat != status_success)
  482. {
  483. break;
  484. }
  485. /* setup spi rx trigger dma transfer*/
  486. dmamux_config(HPM_DMAMUX, spi->rx_dma.channel, spi->rx_dmamux, true);
  487. spi_stat = spi_rx_trigger_dma(spi->rx_dma.base, spi->rx_dma.channel, spi->spi_base,
  488. core_local_mem_to_sys_address(core_id, (uint32_t) rx_buf),
  489. DMA_TRANSFER_WIDTH_BYTE, transfer_len);
  490. if (spi_stat != status_success)
  491. {
  492. break;
  493. }
  494. spi_enable_interrupt(spi->spi_base, spi_end_int);
  495. dma_mgr_enable_chn_irq(&spi->rx_dma, DMA_MGR_INTERRUPT_MASK_TC);
  496. rt_sem_take(spi->spi_xfer_done_sem, RT_WAITING_FOREVER);
  497. rt_sem_take(spi->rxdma_xfer_done_sem, RT_WAITING_FOREVER);
  498. }
  499. if (tx_buf != NULL)
  500. {
  501. tx_buf += transfer_len;
  502. }
  503. if (rx_buf != NULL)
  504. {
  505. rx_buf += transfer_len;
  506. }
  507. remaining_size -= transfer_len;
  508. spi->spi_base->CTRL &= ~(SPI_CTRL_TXDMAEN_MASK | SPI_CTRL_RXDMAEN_MASK);
  509. }
  510. if (l1c_dc_is_enabled() && (msg->length > 0))
  511. {
  512. /* cache invalidate for receive buff */
  513. if (aligned_tx_buf != RT_NULL)
  514. {
  515. rt_free(raw_alloc_tx_buf);
  516. raw_alloc_tx_buf = RT_NULL;
  517. aligned_tx_buf = RT_NULL;
  518. }
  519. if (aligned_rx_buf != RT_NULL)
  520. {
  521. l1c_dc_invalidate((uint32_t) aligned_rx_buf, aligned_len);
  522. rt_memcpy(msg->recv_buf, aligned_rx_buf, msg->length);
  523. rt_free(raw_alloc_rx_buf);
  524. raw_alloc_rx_buf = RT_NULL;
  525. aligned_rx_buf = RT_NULL;
  526. }
  527. }
  528. return spi_stat;
  529. }
  530. static rt_ssize_t hpm_spi_xfer(struct rt_spi_device *device, struct rt_spi_message *msg)
  531. {
  532. RT_ASSERT(device != RT_NULL);
  533. RT_ASSERT(msg != RT_NULL);
  534. RT_ASSERT(device->bus != RT_NULL);
  535. RT_ASSERT(device->bus->parent.user_data != RT_NULL);
  536. cs_ctrl_callback_t cs_pin_control = (cs_ctrl_callback_t) device->parent.user_data;
  537. struct hpm_spi *spi = (struct hpm_spi *) (device->bus->parent.user_data);
  538. hpm_stat_t spi_stat = status_success;
  539. if ((cs_pin_control != NULL) && msg->cs_take)
  540. {
  541. cs_pin_control(SPI_CS_TAKE);
  542. }
  543. if (spi->enable_dma)
  544. {
  545. spi_stat = hpm_spi_xfer_dma(device, msg);
  546. }
  547. else
  548. {
  549. spi_stat = hpm_spi_xfer_polling(device, msg);
  550. }
  551. if (spi_stat != status_success)
  552. {
  553. msg->length = 0;
  554. }
  555. if ((cs_pin_control != NULL) && msg->cs_release)
  556. {
  557. cs_pin_control(SPI_CS_RELEASE);
  558. }
  559. return msg->length;
  560. }
  561. rt_err_t rt_hw_spi_device_attach(const char *bus_name, const char *device_name, cs_ctrl_callback_t callback)
  562. {
  563. RT_ASSERT(bus_name != RT_NULL);
  564. RT_ASSERT(device_name != RT_NULL);
  565. rt_err_t result;
  566. struct rt_spi_device *spi_device;
  567. /* attach the device to spi bus*/
  568. spi_device = (struct rt_spi_device *) rt_malloc(sizeof(struct rt_spi_device));
  569. RT_ASSERT(spi_device != RT_NULL);
  570. result = rt_spi_bus_attach_device(spi_device, device_name, bus_name, (void*)callback);
  571. RT_ASSERT(result == RT_EOK);
  572. return result;
  573. }
  574. int rt_hw_spi_init(void)
  575. {
  576. rt_err_t ret = RT_EOK;
  577. hpm_stat_t stat;
  578. for (uint32_t i = 0; i < sizeof(hpm_spis) / sizeof(hpm_spis[0]); i++)
  579. {
  580. struct hpm_spi *spi = &hpm_spis[i];
  581. spi->spi_bus.parent.user_data = spi;
  582. if (spi->enable_dma)
  583. {
  584. stat = dma_mgr_request_resource(&spi->tx_dma);
  585. dma_mgr_install_chn_tc_callback(&spi->tx_dma, spi_dma_channel_tc_callback, (void *)&hpm_spis[i]);
  586. if (stat != status_success)
  587. {
  588. return -RT_ERROR;
  589. }
  590. stat = dma_mgr_request_resource(&spi->rx_dma);
  591. dma_mgr_install_chn_tc_callback(&spi->rx_dma, spi_dma_channel_tc_callback, (void *)&hpm_spis[i]);
  592. if (stat != status_success)
  593. {
  594. return -RT_ERROR;
  595. }
  596. intc_m_enable_irq_with_priority(hpm_spis[i].spi_irq, 2);
  597. dma_mgr_enable_dma_irq_with_priority(&spi->tx_dma, 1);
  598. dma_mgr_enable_dma_irq_with_priority(&spi->rx_dma, 1);
  599. }
  600. ret = rt_spi_bus_register(&spi->spi_bus, spi->bus_name, &hpm_spi_ops);
  601. if (ret != RT_EOK)
  602. {
  603. break;
  604. }
  605. char sem_name[RT_NAME_MAX];
  606. rt_sprintf(sem_name, "%s_s", hpm_spis[i].bus_name);
  607. hpm_spis[i].xfer_sem = rt_sem_create(sem_name, 0, RT_IPC_FLAG_PRIO);
  608. if (hpm_spis[i].xfer_sem == RT_NULL)
  609. {
  610. ret = RT_ENOMEM;
  611. break;
  612. }
  613. rt_sprintf(sem_name, "%s_ds", hpm_spis[i].bus_name);
  614. hpm_spis[i].spi_xfer_done_sem = rt_sem_create(sem_name, 0, RT_IPC_FLAG_PRIO);
  615. if (hpm_spis[i].spi_xfer_done_sem == RT_NULL)
  616. {
  617. ret = RT_ENOMEM;
  618. break;
  619. }
  620. rt_sprintf(sem_name, "%s_rds", hpm_spis[i].bus_name);
  621. hpm_spis[i].rxdma_xfer_done_sem = rt_sem_create(sem_name, 0, RT_IPC_FLAG_PRIO);
  622. if (hpm_spis[i].rxdma_xfer_done_sem == RT_NULL)
  623. {
  624. ret = RT_ENOMEM;
  625. break;
  626. }
  627. rt_sprintf(sem_name, "%s_tds", hpm_spis[i].bus_name);
  628. hpm_spis[i].txdma_xfer_done_sem = rt_sem_create(sem_name, 0, RT_IPC_FLAG_PRIO);
  629. if (hpm_spis[i].txdma_xfer_done_sem == RT_NULL)
  630. {
  631. ret = RT_ENOMEM;
  632. break;
  633. }
  634. }
  635. return ret;
  636. }
  637. INIT_BOARD_EXPORT(rt_hw_spi_init);
  638. #endif /*BSP_USING_SPI*/