drv_spi.c 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623
  1. /*
  2. * Copyright (c) 2021-2023 HPMicro
  3. *
  4. * SPDX-License-Identifier: BSD-3-Clause
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2022-02-01 HPMicro First version
  9. * 2023-02-15 HPMicro Add DMA support
  10. * 2023-07-14 HPMicro Manage the DMA buffer alignment in driver
  11. * 2023-12-14 HPMicro change state blocking wait to interrupt semaphore wait for DMA
  12. */
  13. #include <rtthread.h>
  14. #ifdef BSP_USING_SPI
  15. #include <rtdevice.h>
  16. #include "board.h"
  17. #include "drv_spi.h"
  18. #include "hpm_spi_drv.h"
  19. #include "hpm_sysctl_drv.h"
  20. #include "hpm_dma_mgr.h"
  21. #include "hpm_dmamux_drv.h"
  22. #include "hpm_l1c_drv.h"
  23. struct hpm_spi
  24. {
  25. uint32_t instance;
  26. char *bus_name;
  27. SPI_Type *spi_base;
  28. spi_control_config_t control_config;
  29. struct rt_spi_bus spi_bus;
  30. rt_sem_t xfer_sem;
  31. rt_bool_t enable_dma;
  32. rt_uint8_t tx_dmamux;
  33. rt_uint8_t rx_dmamux;
  34. dma_resource_t tx_dma;
  35. dma_resource_t rx_dma;
  36. rt_uint8_t spi_irq;
  37. rt_sem_t spi_xfer_done_sem;
  38. rt_sem_t txdma_xfer_done_sem;
  39. rt_sem_t rxdma_xfer_done_sem;
  40. };
  41. static rt_err_t hpm_spi_configure(struct rt_spi_device *device, struct rt_spi_configuration *cfg);
  42. static rt_ssize_t hpm_spi_xfer(struct rt_spi_device *device, struct rt_spi_message *msg);
  43. static struct hpm_spi hpm_spis[] =
  44. {
  45. #if defined(BSP_USING_SPI0)
  46. {
  47. .bus_name = "spi0",
  48. .spi_base = HPM_SPI0,
  49. #if defined(BSP_SPI0_USING_DMA)
  50. .enable_dma = RT_TRUE,
  51. #endif
  52. .tx_dmamux = HPM_DMA_SRC_SPI0_TX,
  53. .rx_dmamux = HPM_DMA_SRC_SPI0_RX,
  54. .spi_irq = IRQn_SPI0,
  55. },
  56. #endif
  57. #if defined(BSP_USING_SPI1)
  58. {
  59. .bus_name = "spi1",
  60. .spi_base = HPM_SPI1,
  61. #if defined(BSP_SPI1_USING_DMA)
  62. .enable_dma = RT_TRUE,
  63. #endif
  64. .tx_dmamux = HPM_DMA_SRC_SPI1_TX,
  65. .rx_dmamux = HPM_DMA_SRC_SPI1_RX,
  66. .spi_irq = IRQn_SPI1,
  67. },
  68. #endif
  69. #if defined(BSP_USING_SPI2)
  70. {
  71. .bus_name = "spi2",
  72. .spi_base = HPM_SPI2,
  73. #if defined(BSP_SPI2_USING_DMA)
  74. .enable_dma = RT_TRUE,
  75. #endif
  76. .tx_dmamux = HPM_DMA_SRC_SPI2_TX,
  77. .rx_dmamux = HPM_DMA_SRC_SPI2_RX,
  78. .spi_irq = IRQn_SPI2,
  79. },
  80. #endif
  81. #if defined(BSP_USING_SPI3)
  82. {
  83. .bus_name = "spi3",
  84. .spi_base = HPM_SPI3,
  85. #if defined(BSP_SPI3_USING_DMA)
  86. .enable_dma = RT_TRUE,
  87. #endif
  88. .tx_dmamux = HPM_DMA_SRC_SPI3_TX,
  89. .rx_dmamux = HPM_DMA_SRC_SPI3_RX,
  90. .spi_irq = IRQn_SPI3,
  91. },
  92. #endif
  93. };
  94. static struct rt_spi_ops hpm_spi_ops =
  95. {
  96. .configure = hpm_spi_configure,
  97. .xfer = hpm_spi_xfer,
  98. };
  99. static inline void handle_spi_isr(SPI_Type *ptr)
  100. {
  101. volatile uint32_t irq_status;
  102. RT_ASSERT(ptr != RT_NULL);
  103. rt_base_t level;
  104. level = rt_hw_interrupt_disable();
  105. irq_status = spi_get_interrupt_status(ptr);
  106. if (irq_status & spi_end_int)
  107. {
  108. for (uint32_t i = 0; i < sizeof(hpm_spis) / sizeof(hpm_spis[0]); i++)
  109. {
  110. if (hpm_spis[i].spi_base == ptr)
  111. {
  112. rt_sem_release(hpm_spis[i].spi_xfer_done_sem);
  113. }
  114. }
  115. spi_disable_interrupt(ptr, spi_end_int);
  116. spi_clear_interrupt_status(ptr, spi_end_int);
  117. }
  118. rt_hw_interrupt_enable(level);
  119. }
  120. #if defined(BSP_USING_SPI0)
  121. void spi0_isr(void)
  122. {
  123. handle_spi_isr(HPM_SPI0);
  124. }
  125. SDK_DECLARE_EXT_ISR_M(IRQn_SPI0, spi0_isr);
  126. #endif
  127. #if defined(BSP_USING_SPI1)
  128. void spi1_isr(void)
  129. {
  130. handle_spi_isr(HPM_SPI1);
  131. }
  132. SDK_DECLARE_EXT_ISR_M(IRQn_SPI1, spi1_isr);
  133. #endif
  134. #if defined(BSP_USING_SPI2)
  135. void spi2_isr(void)
  136. {
  137. handle_spi_isr(HPM_SPI2);
  138. }
  139. SDK_DECLARE_EXT_ISR_M(IRQn_SPI2, spi2_isr);
  140. #endif
  141. #if defined(BSP_USING_SPI3)
  142. void spi3_isr(void)
  143. {
  144. handle_spi_isr(HPM_SPI3);
  145. }
  146. SDK_DECLARE_EXT_ISR_M(IRQn_SPI3, spi3_isr);
  147. #endif
  148. void spi_dma_channel_tc_callback(DMA_Type *ptr, uint32_t channel, void *user_data)
  149. {
  150. struct hpm_spi *spi = (struct hpm_spi *)user_data;
  151. RT_ASSERT(spi != RT_NULL);
  152. RT_ASSERT(ptr != RT_NULL);
  153. rt_base_t level;
  154. level = rt_hw_interrupt_disable();
  155. if ((spi->tx_dma.base == ptr) && spi->tx_dma.channel == channel)
  156. {
  157. dma_mgr_disable_chn_irq(&spi->tx_dma, DMA_MGR_INTERRUPT_MASK_TC);
  158. rt_sem_release(spi->txdma_xfer_done_sem);
  159. }
  160. if ((spi->rx_dma.base == ptr) && spi->rx_dma.channel == channel)
  161. {
  162. dma_mgr_disable_chn_irq(&spi->rx_dma, DMA_MGR_INTERRUPT_MASK_TC);
  163. rt_sem_release(spi->rxdma_xfer_done_sem);
  164. }
  165. rt_hw_interrupt_enable(level);
  166. }
  167. static rt_err_t hpm_spi_configure(struct rt_spi_device *device, struct rt_spi_configuration *cfg)
  168. {
  169. spi_timing_config_t timing_config = { 0 };
  170. spi_format_config_t format_config = { 0 };
  171. struct hpm_spi *spi = RT_NULL;
  172. spi = (struct hpm_spi *) (device->bus->parent.user_data);
  173. RT_ASSERT(spi != RT_NULL);
  174. if (cfg->data_width != 8 && cfg->data_width != 16 && cfg->data_width != 32)
  175. {
  176. return -RT_EINVAL;
  177. }
  178. spi_master_get_default_timing_config(&timing_config);
  179. spi_master_get_default_format_config(&format_config);
  180. init_spi_pins_with_gpio_as_cs(spi->spi_base);
  181. timing_config.master_config.clk_src_freq_in_hz = board_init_spi_clock(spi->spi_base);
  182. format_config.common_config.data_len_in_bits = cfg->data_width;
  183. format_config.common_config.cpha = cfg->mode & RT_SPI_CPHA ? 1 : 0;
  184. format_config.common_config.cpol = cfg->mode & RT_SPI_CPOL ? 1 : 0;
  185. format_config.common_config.lsb = cfg->mode & RT_SPI_MSB ? false : true;
  186. format_config.common_config.mosi_bidir = cfg->mode & RT_SPI_3WIRE ? true : false;
  187. spi_format_init(spi->spi_base, &format_config);
  188. if (cfg->max_hz > timing_config.master_config.clk_src_freq_in_hz)
  189. {
  190. cfg->max_hz = timing_config.master_config.clk_src_freq_in_hz;
  191. }
  192. timing_config.master_config.sclk_freq_in_hz = cfg->max_hz;
  193. spi_master_timing_init(spi->spi_base, &timing_config);
  194. spi_master_get_default_control_config(&spi->control_config);
  195. spi->control_config.master_config.addr_enable = false;
  196. spi->control_config.master_config.cmd_enable = false;
  197. spi->control_config.master_config.token_enable = false;
  198. spi->control_config.common_config.trans_mode = spi_trans_write_read_together;
  199. return RT_EOK;
  200. }
  201. static hpm_stat_t hpm_spi_xfer_polling(struct rt_spi_device *device, struct rt_spi_message *msg)
  202. {
  203. struct hpm_spi *spi = (struct hpm_spi *) (device->bus->parent.user_data);
  204. hpm_stat_t spi_stat = status_success;
  205. uint32_t remaining_size = msg->length;
  206. uint32_t transfer_len;
  207. uint8_t *tx_buf = (uint8_t*) msg->send_buf;
  208. uint8_t *rx_buf = (uint8_t*) msg->recv_buf;
  209. while (remaining_size > 0)
  210. {
  211. transfer_len = MIN(SPI_SOC_TRANSFER_COUNT_MAX, remaining_size);
  212. spi->control_config.common_config.tx_dma_enable = false;
  213. spi->control_config.common_config.rx_dma_enable = false;
  214. if (msg->send_buf != NULL && msg->recv_buf != NULL)
  215. {
  216. spi->control_config.common_config.trans_mode = spi_trans_write_read_together;
  217. spi_stat = spi_transfer(spi->spi_base, &spi->control_config,
  218. NULL,
  219. NULL, tx_buf, transfer_len, rx_buf, transfer_len);
  220. }
  221. else if (msg->send_buf != NULL)
  222. {
  223. spi->control_config.common_config.trans_mode = spi_trans_write_only;
  224. spi_stat = spi_transfer(spi->spi_base, &spi->control_config,
  225. NULL,
  226. NULL, (uint8_t*) tx_buf, transfer_len,
  227. NULL, 0);
  228. }
  229. else
  230. {
  231. spi->control_config.common_config.trans_mode = spi_trans_read_only;
  232. spi_stat = spi_transfer(spi->spi_base, &spi->control_config,
  233. NULL,
  234. NULL,
  235. NULL, 0, rx_buf, transfer_len);
  236. }
  237. if (spi_stat != status_success)
  238. {
  239. break;
  240. }
  241. if (tx_buf != NULL)
  242. {
  243. tx_buf += transfer_len;
  244. }
  245. if (rx_buf != NULL)
  246. {
  247. rx_buf += transfer_len;
  248. }
  249. remaining_size -= transfer_len;
  250. }
  251. return spi_stat;
  252. }
  253. hpm_stat_t spi_tx_trigger_dma(DMA_Type *dma_ptr, uint8_t ch_num, SPI_Type *spi_ptr, uint32_t src, uint8_t data_width, uint32_t size)
  254. {
  255. dma_handshake_config_t config;
  256. config.ch_index = ch_num;
  257. config.dst = (uint32_t)&spi_ptr->DATA;
  258. config.dst_fixed = true;
  259. config.src = src;
  260. config.src_fixed = false;
  261. config.data_width = data_width;
  262. config.size_in_byte = size;
  263. return dma_setup_handshake(dma_ptr, &config, true);
  264. }
  265. hpm_stat_t spi_rx_trigger_dma(DMA_Type *dma_ptr, uint8_t ch_num, SPI_Type *spi_ptr, uint32_t dst, uint8_t data_width, uint32_t size)
  266. {
  267. dma_handshake_config_t config;
  268. config.ch_index = ch_num;
  269. config.dst = dst;
  270. config.dst_fixed = false;
  271. config.src = (uint32_t)&spi_ptr->DATA;
  272. config.src_fixed = true;
  273. config.data_width = data_width;
  274. config.size_in_byte = size;
  275. return dma_setup_handshake(dma_ptr, &config, true);
  276. }
  277. static rt_uint32_t hpm_spi_xfer_dma(struct rt_spi_device *device, struct rt_spi_message *msg)
  278. {
  279. struct hpm_spi *spi = (struct hpm_spi *) (device->bus->parent.user_data);
  280. hpm_stat_t spi_stat = status_success;
  281. uint32_t remaining_size = msg->length;
  282. uint32_t transfer_len;
  283. uint8_t *raw_alloc_tx_buf = RT_NULL;
  284. uint8_t *raw_alloc_rx_buf = RT_NULL;
  285. uint8_t *aligned_tx_buf = RT_NULL;
  286. uint8_t *aligned_rx_buf = RT_NULL;
  287. uint32_t aligned_len = 0;
  288. if (msg->length <= 0)
  289. {
  290. return status_invalid_argument;
  291. }
  292. aligned_len = (msg->length + HPM_L1C_CACHELINE_SIZE - 1U) & ~(HPM_L1C_CACHELINE_SIZE - 1U);
  293. if (msg->send_buf != RT_NULL)
  294. {
  295. if (l1c_dc_is_enabled())
  296. {
  297. /* The allocated pointer is always RT_ALIGN_SIZE aligned */
  298. raw_alloc_tx_buf = (uint8_t*)rt_malloc(aligned_len + HPM_L1C_CACHELINE_SIZE - RT_ALIGN_SIZE);
  299. RT_ASSERT(raw_alloc_tx_buf != RT_NULL);
  300. aligned_tx_buf = (uint8_t*)HPM_L1C_CACHELINE_ALIGN_UP((uint32_t)raw_alloc_tx_buf);
  301. rt_memcpy(aligned_tx_buf, msg->send_buf, msg->length);
  302. l1c_dc_flush((uint32_t) aligned_tx_buf, aligned_len);
  303. }
  304. else
  305. {
  306. aligned_tx_buf = (uint8_t*) msg->send_buf;
  307. }
  308. }
  309. if (msg->recv_buf != RT_NULL)
  310. {
  311. if (l1c_dc_is_enabled())
  312. {
  313. /* The allocated pointer is always RT_ALIGN_SIZE aligned */
  314. raw_alloc_rx_buf = (uint8_t*)rt_malloc(aligned_len + HPM_L1C_CACHELINE_SIZE - RT_ALIGN_SIZE);
  315. RT_ASSERT(raw_alloc_rx_buf != RT_NULL);
  316. aligned_rx_buf = (uint8_t*)HPM_L1C_CACHELINE_ALIGN_UP((uint32_t)raw_alloc_rx_buf);
  317. }
  318. else
  319. {
  320. aligned_rx_buf = msg->recv_buf;
  321. }
  322. }
  323. uint8_t *tx_buf = aligned_tx_buf;
  324. uint8_t *rx_buf = aligned_rx_buf;
  325. uint32_t core_id = read_csr(CSR_MHARTID);
  326. spi->spi_base->CTRL &= ~(SPI_CTRL_TXDMAEN_MASK | SPI_CTRL_RXDMAEN_MASK);
  327. spi->control_config.common_config.tx_dma_enable = false;
  328. spi->control_config.common_config.rx_dma_enable = false;
  329. spi_disable_interrupt(spi->spi_base, spi_end_int);
  330. while (remaining_size > 0)
  331. {
  332. transfer_len = MIN(SPI_SOC_TRANSFER_COUNT_MAX, remaining_size);
  333. if (msg->send_buf != NULL && msg->recv_buf != NULL)
  334. {
  335. spi_enable_interrupt(spi->spi_base, spi_end_int);
  336. spi->control_config.common_config.tx_dma_enable = true;
  337. spi->control_config.common_config.rx_dma_enable = true;
  338. spi->control_config.common_config.trans_mode = spi_trans_write_read_together;
  339. spi_stat = spi_setup_dma_transfer(spi->spi_base, &spi->control_config, NULL, NULL, transfer_len,
  340. transfer_len);
  341. if (spi_stat != status_success)
  342. {
  343. break;
  344. }
  345. dmamux_config(HPM_DMAMUX, spi->tx_dma.channel, spi->tx_dmamux, true);
  346. spi_stat = spi_tx_trigger_dma(spi->tx_dma.base, spi->tx_dma.channel, spi->spi_base,
  347. core_local_mem_to_sys_address(core_id, (uint32_t) tx_buf),
  348. DMA_TRANSFER_WIDTH_BYTE, transfer_len);
  349. /* setup spi rx trigger dma transfer*/
  350. dmamux_config(HPM_DMAMUX, spi->rx_dma.channel, spi->rx_dmamux, true);
  351. spi_stat = spi_rx_trigger_dma(spi->rx_dma.base, spi->rx_dma.channel, spi->spi_base,
  352. core_local_mem_to_sys_address(core_id, (uint32_t) rx_buf),
  353. DMA_TRANSFER_WIDTH_BYTE, transfer_len);
  354. if (spi_stat != status_success)
  355. {
  356. break;
  357. }
  358. dma_mgr_enable_chn_irq(&spi->tx_dma, DMA_MGR_INTERRUPT_MASK_TC);
  359. dma_mgr_enable_chn_irq(&spi->rx_dma, DMA_MGR_INTERRUPT_MASK_TC);
  360. rt_sem_take(spi->spi_xfer_done_sem, RT_WAITING_FOREVER);
  361. rt_sem_take(spi->txdma_xfer_done_sem, RT_WAITING_FOREVER);
  362. rt_sem_take(spi->rxdma_xfer_done_sem, RT_WAITING_FOREVER);
  363. }
  364. else if (msg->send_buf != NULL)
  365. {
  366. spi_enable_interrupt(spi->spi_base, spi_end_int);
  367. spi->control_config.common_config.tx_dma_enable = true;
  368. spi->control_config.common_config.trans_mode = spi_trans_write_only;
  369. spi_stat = spi_setup_dma_transfer(spi->spi_base, &spi->control_config, NULL, NULL, transfer_len, 0);
  370. if (spi_stat != status_success)
  371. {
  372. break;
  373. }
  374. dmamux_config(HPM_DMAMUX, spi->tx_dma.channel, spi->tx_dmamux, true);
  375. spi_stat = spi_tx_trigger_dma(spi->tx_dma.base, spi->tx_dma.channel, spi->spi_base,
  376. core_local_mem_to_sys_address(core_id, (uint32_t) tx_buf),
  377. DMA_TRANSFER_WIDTH_BYTE, transfer_len);
  378. if (spi_stat != status_success)
  379. {
  380. break;
  381. }
  382. dma_mgr_enable_chn_irq(&spi->tx_dma, DMA_MGR_INTERRUPT_MASK_TC);
  383. rt_sem_take(spi->spi_xfer_done_sem, RT_WAITING_FOREVER);
  384. rt_sem_take(spi->txdma_xfer_done_sem, RT_WAITING_FOREVER);
  385. }
  386. else
  387. {
  388. spi->control_config.common_config.rx_dma_enable = true;
  389. spi->control_config.common_config.trans_mode = spi_trans_read_only;
  390. spi_stat = spi_setup_dma_transfer(spi->spi_base, &spi->control_config, NULL, NULL, 0, transfer_len);
  391. if (spi_stat != status_success)
  392. {
  393. break;
  394. }
  395. /* setup spi rx trigger dma transfer*/
  396. dmamux_config(HPM_DMAMUX, spi->rx_dma.channel, spi->rx_dmamux, true);
  397. spi_stat = spi_rx_trigger_dma(spi->rx_dma.base, spi->rx_dma.channel, spi->spi_base,
  398. core_local_mem_to_sys_address(core_id, (uint32_t) rx_buf),
  399. DMA_TRANSFER_WIDTH_BYTE, transfer_len);
  400. if (spi_stat != status_success)
  401. {
  402. break;
  403. }
  404. spi_enable_interrupt(spi->spi_base, spi_end_int);
  405. dma_mgr_enable_chn_irq(&spi->rx_dma, DMA_MGR_INTERRUPT_MASK_TC);
  406. rt_sem_take(spi->spi_xfer_done_sem, RT_WAITING_FOREVER);
  407. rt_sem_take(spi->rxdma_xfer_done_sem, RT_WAITING_FOREVER);
  408. }
  409. if (tx_buf != NULL)
  410. {
  411. tx_buf += transfer_len;
  412. }
  413. if (rx_buf != NULL)
  414. {
  415. rx_buf += transfer_len;
  416. }
  417. remaining_size -= transfer_len;
  418. spi->spi_base->CTRL &= ~(SPI_CTRL_TXDMAEN_MASK | SPI_CTRL_RXDMAEN_MASK);
  419. }
  420. if (l1c_dc_is_enabled() && (msg->length > 0))
  421. {
  422. /* cache invalidate for receive buff */
  423. if (aligned_tx_buf != RT_NULL)
  424. {
  425. rt_free(raw_alloc_tx_buf);
  426. raw_alloc_tx_buf = RT_NULL;
  427. aligned_tx_buf = RT_NULL;
  428. }
  429. if (aligned_rx_buf != RT_NULL)
  430. {
  431. l1c_dc_invalidate((uint32_t) aligned_rx_buf, aligned_len);
  432. rt_memcpy(msg->recv_buf, aligned_rx_buf, msg->length);
  433. rt_free(raw_alloc_rx_buf);
  434. raw_alloc_rx_buf = RT_NULL;
  435. aligned_rx_buf = RT_NULL;
  436. }
  437. }
  438. return spi_stat;
  439. }
  440. static rt_ssize_t hpm_spi_xfer(struct rt_spi_device *device, struct rt_spi_message *msg)
  441. {
  442. RT_ASSERT(device != RT_NULL);
  443. RT_ASSERT(msg != RT_NULL);
  444. RT_ASSERT(device->bus != RT_NULL);
  445. RT_ASSERT(device->bus->parent.user_data != RT_NULL);
  446. cs_ctrl_callback_t cs_pin_control = (cs_ctrl_callback_t) device->parent.user_data;
  447. struct hpm_spi *spi = (struct hpm_spi *) (device->bus->parent.user_data);
  448. hpm_stat_t spi_stat = status_success;
  449. if ((cs_pin_control != NULL) && msg->cs_take)
  450. {
  451. cs_pin_control(SPI_CS_TAKE);
  452. }
  453. if (spi->enable_dma)
  454. {
  455. spi_stat = hpm_spi_xfer_dma(device, msg);
  456. }
  457. else
  458. {
  459. spi_stat = hpm_spi_xfer_polling(device, msg);
  460. }
  461. if (spi_stat != status_success)
  462. {
  463. msg->length = 0;
  464. }
  465. if ((cs_pin_control != NULL) && msg->cs_release)
  466. {
  467. cs_pin_control(SPI_CS_RELEASE);
  468. }
  469. return msg->length;
  470. }
  471. rt_err_t rt_hw_spi_device_attach(const char *bus_name, const char *device_name, cs_ctrl_callback_t callback)
  472. {
  473. RT_ASSERT(bus_name != RT_NULL);
  474. RT_ASSERT(device_name != RT_NULL);
  475. rt_err_t result;
  476. struct rt_spi_device *spi_device;
  477. /* attach the device to spi bus*/
  478. spi_device = (struct rt_spi_device *) rt_malloc(sizeof(struct rt_spi_device));
  479. RT_ASSERT(spi_device != RT_NULL);
  480. result = rt_spi_bus_attach_device(spi_device, device_name, bus_name, (void*)callback);
  481. RT_ASSERT(result == RT_EOK);
  482. return result;
  483. }
  484. int rt_hw_spi_init(void)
  485. {
  486. rt_err_t ret = RT_EOK;
  487. hpm_stat_t stat;
  488. for (uint32_t i = 0; i < sizeof(hpm_spis) / sizeof(hpm_spis[0]); i++)
  489. {
  490. struct hpm_spi *spi = &hpm_spis[i];
  491. spi->spi_bus.parent.user_data = spi;
  492. if (spi->enable_dma)
  493. {
  494. stat = dma_mgr_request_resource(&spi->tx_dma);
  495. dma_mgr_install_chn_tc_callback(&spi->tx_dma, spi_dma_channel_tc_callback, (void *)&hpm_spis[i]);
  496. if (stat != status_success)
  497. {
  498. return -RT_ERROR;
  499. }
  500. stat = dma_mgr_request_resource(&spi->rx_dma);
  501. dma_mgr_install_chn_tc_callback(&spi->rx_dma, spi_dma_channel_tc_callback, (void *)&hpm_spis[i]);
  502. if (stat != status_success)
  503. {
  504. return -RT_ERROR;
  505. }
  506. intc_m_enable_irq_with_priority(hpm_spis[i].spi_irq, 2);
  507. dma_mgr_enable_dma_irq_with_priority(&spi->tx_dma, 1);
  508. dma_mgr_enable_dma_irq_with_priority(&spi->rx_dma, 1);
  509. }
  510. ret = rt_spi_bus_register(&spi->spi_bus, spi->bus_name, &hpm_spi_ops);
  511. if (ret != RT_EOK)
  512. {
  513. break;
  514. }
  515. char sem_name[RT_NAME_MAX];
  516. rt_sprintf(sem_name, "%s_s", hpm_spis[i].bus_name);
  517. hpm_spis[i].xfer_sem = rt_sem_create(sem_name, 0, RT_IPC_FLAG_PRIO);
  518. if (hpm_spis[i].xfer_sem == RT_NULL)
  519. {
  520. ret = RT_ENOMEM;
  521. break;
  522. }
  523. rt_sprintf(sem_name, "%s_ds", hpm_spis[i].bus_name);
  524. hpm_spis[i].spi_xfer_done_sem = rt_sem_create(sem_name, 0, RT_IPC_FLAG_PRIO);
  525. if (hpm_spis[i].spi_xfer_done_sem == RT_NULL)
  526. {
  527. ret = RT_ENOMEM;
  528. break;
  529. }
  530. rt_sprintf(sem_name, "%s_rds", hpm_spis[i].bus_name);
  531. hpm_spis[i].rxdma_xfer_done_sem = rt_sem_create(sem_name, 0, RT_IPC_FLAG_PRIO);
  532. if (hpm_spis[i].rxdma_xfer_done_sem == RT_NULL)
  533. {
  534. ret = RT_ENOMEM;
  535. break;
  536. }
  537. rt_sprintf(sem_name, "%s_tds", hpm_spis[i].bus_name);
  538. hpm_spis[i].txdma_xfer_done_sem = rt_sem_create(sem_name, 0, RT_IPC_FLAG_PRIO);
  539. if (hpm_spis[i].txdma_xfer_done_sem == RT_NULL)
  540. {
  541. ret = RT_ENOMEM;
  542. break;
  543. }
  544. }
  545. return ret;
  546. }
  547. INIT_BOARD_EXPORT(rt_hw_spi_init);
  548. #endif /*BSP_USING_SPI*/