drv_spi.c 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506
  1. /*
  2. * Copyright (c) 2021-2023 HPMicro
  3. *
  4. * SPDX-License-Identifier: BSD-3-Clause
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2022-02-01 HPMicro First version
  9. * 2023-02-15 HPMicro Add DMA support
  10. * 2023-07-14 HPMicro Manage the DMA buffer alignment in driver
  11. */
  12. #include <rtthread.h>
  13. #ifdef BSP_USING_SPI
  14. #include <rtdevice.h>
  15. #include "board.h"
  16. #include "drv_spi.h"
  17. #include "hpm_spi_drv.h"
  18. #include "hpm_sysctl_drv.h"
  19. #include "hpm_dma_manager.h"
  20. #include "hpm_dmamux_drv.h"
  21. #include "hpm_l1c_drv.h"
  22. struct hpm_spi
  23. {
  24. uint32_t instance;
  25. char *bus_name;
  26. SPI_Type *spi_base;
  27. spi_control_config_t control_config;
  28. struct rt_spi_bus spi_bus;
  29. rt_sem_t xfer_sem;
  30. rt_bool_t enable_dma;
  31. rt_uint8_t tx_dmamux;
  32. rt_uint8_t rx_dmamux;
  33. hpm_dma_resource_t tx_dma;
  34. hpm_dma_resource_t rx_dma;
  35. };
  36. static rt_err_t hpm_spi_configure(struct rt_spi_device *device, struct rt_spi_configuration *cfg);
  37. static rt_ssize_t hpm_spi_xfer(struct rt_spi_device *device, struct rt_spi_message *msg);
  38. static struct hpm_spi hpm_spis[] =
  39. {
  40. #if defined(BSP_USING_SPI0)
  41. {
  42. .bus_name = "spi0",
  43. .spi_base = HPM_SPI0,
  44. .enable_dma = RT_TRUE,
  45. .tx_dmamux = HPM_DMA_SRC_SPI0_TX,
  46. .rx_dmamux = HPM_DMA_SRC_SPI0_RX,
  47. },
  48. #endif
  49. #if defined(BSP_USING_SPI1)
  50. {
  51. .bus_name = "spi1",
  52. .spi_base = HPM_SPI1,
  53. .enable_dma = RT_TRUE,
  54. .tx_dmamux = HPM_DMA_SRC_SPI1_TX,
  55. .rx_dmamux = HPM_DMA_SRC_SPI1_RX,
  56. },
  57. #endif
  58. #if defined(BSP_USING_SPI2)
  59. {
  60. .bus_name = "spi2",
  61. .spi_base = HPM_SPI2,
  62. .enable_dma = RT_TRUE,
  63. .tx_dmamux = HPM_DMA_SRC_SPI2_TX,
  64. .rx_dmamux = HPM_DMA_SRC_SPI2_RX,
  65. },
  66. #endif
  67. #if defined(BSP_USING_SPI3)
  68. {
  69. .bus_name = "spi3",
  70. .spi_base = HPM_SPI3,
  71. .enable_dma = RT_TRUE,
  72. .tx_dmamux = HPM_DMA_SRC_SPI3_TX,
  73. .rx_dmamux = HPM_DMA_SRC_SPI3_RX,
  74. },
  75. #endif
  76. };
  77. static struct rt_spi_ops hpm_spi_ops =
  78. {
  79. .configure = hpm_spi_configure,
  80. .xfer = hpm_spi_xfer,
  81. };
  82. static rt_err_t hpm_spi_configure(struct rt_spi_device *device, struct rt_spi_configuration *cfg)
  83. {
  84. spi_timing_config_t timing_config = { 0 };
  85. spi_format_config_t format_config = { 0 };
  86. struct hpm_spi *spi = RT_NULL;
  87. spi = (struct hpm_spi *) (device->bus->parent.user_data);
  88. RT_ASSERT(spi != RT_NULL);
  89. if (cfg->data_width != 8 && cfg->data_width != 16 && cfg->data_width != 32)
  90. {
  91. return RT_EINVAL;
  92. }
  93. spi_master_get_default_timing_config(&timing_config);
  94. spi_master_get_default_format_config(&format_config);
  95. init_spi_pins(spi->spi_base);
  96. timing_config.master_config.clk_src_freq_in_hz = board_init_spi_clock(spi->spi_base);
  97. format_config.common_config.data_len_in_bits = cfg->data_width;
  98. format_config.common_config.cpha = cfg->mode & RT_SPI_CPHA ? 1 : 0;
  99. format_config.common_config.cpol = cfg->mode & RT_SPI_CPOL ? 1 : 0;
  100. format_config.common_config.lsb = cfg->mode & RT_SPI_MSB ? false : true;
  101. format_config.common_config.mosi_bidir = cfg->mode & RT_SPI_3WIRE ? true : false;
  102. spi_format_init(spi->spi_base, &format_config);
  103. if (cfg->max_hz > timing_config.master_config.clk_src_freq_in_hz)
  104. {
  105. cfg->max_hz = timing_config.master_config.clk_src_freq_in_hz;
  106. }
  107. timing_config.master_config.sclk_freq_in_hz = cfg->max_hz;
  108. spi_master_timing_init(spi->spi_base, &timing_config);
  109. spi_master_get_default_control_config(&spi->control_config);
  110. spi->control_config.master_config.addr_enable = false;
  111. spi->control_config.master_config.cmd_enable = false;
  112. spi->control_config.master_config.token_enable = false;
  113. spi->control_config.common_config.trans_mode = spi_trans_write_read_together;
  114. return RT_EOK;
  115. }
  116. static hpm_stat_t hpm_spi_xfer_polling(struct rt_spi_device *device, struct rt_spi_message *msg)
  117. {
  118. struct hpm_spi *spi = (struct hpm_spi *) (device->bus->parent.user_data);
  119. hpm_stat_t spi_stat = status_success;
  120. uint32_t remaining_size = msg->length;
  121. uint32_t transfer_len;
  122. uint8_t *tx_buf = (uint8_t*) msg->send_buf;
  123. uint8_t *rx_buf = (uint8_t*) msg->recv_buf;
  124. while (remaining_size > 0)
  125. {
  126. transfer_len = MIN(512, remaining_size);
  127. spi->control_config.common_config.tx_dma_enable = false;
  128. spi->control_config.common_config.rx_dma_enable = false;
  129. if (msg->send_buf != NULL && msg->recv_buf != NULL)
  130. {
  131. spi->control_config.common_config.trans_mode = spi_trans_write_read_together;
  132. spi_stat = spi_transfer(spi->spi_base, &spi->control_config,
  133. NULL,
  134. NULL, tx_buf, transfer_len, rx_buf, transfer_len);
  135. }
  136. else if (msg->send_buf != NULL)
  137. {
  138. spi->control_config.common_config.trans_mode = spi_trans_write_only;
  139. spi_stat = spi_transfer(spi->spi_base, &spi->control_config,
  140. NULL,
  141. NULL, (uint8_t*) tx_buf, transfer_len,
  142. NULL, 0);
  143. }
  144. else
  145. {
  146. spi->control_config.common_config.trans_mode = spi_trans_read_only;
  147. spi_stat = spi_transfer(spi->spi_base, &spi->control_config,
  148. NULL,
  149. NULL,
  150. NULL, 0, rx_buf, transfer_len);
  151. }
  152. if (spi_stat != status_success)
  153. {
  154. break;
  155. }
  156. if (tx_buf != NULL)
  157. {
  158. tx_buf += transfer_len;
  159. }
  160. if (rx_buf != NULL)
  161. {
  162. rx_buf += transfer_len;
  163. }
  164. remaining_size -= transfer_len;
  165. }
  166. return spi_stat;
  167. }
  168. hpm_stat_t spi_tx_trigger_dma(DMA_Type *dma_ptr, uint8_t ch_num, SPI_Type *spi_ptr, uint32_t src, uint8_t data_width, uint32_t size)
  169. {
  170. dma_handshake_config_t config;
  171. config.ch_index = ch_num;
  172. config.dst = (uint32_t)&spi_ptr->DATA;
  173. config.dst_fixed = true;
  174. config.src = src;
  175. config.src_fixed = false;
  176. config.data_width = data_width;
  177. config.size_in_byte = size;
  178. return dma_setup_handshake(dma_ptr, &config, true);
  179. }
  180. hpm_stat_t spi_rx_trigger_dma(DMA_Type *dma_ptr, uint8_t ch_num, SPI_Type *spi_ptr, uint32_t dst, uint8_t data_width, uint32_t size)
  181. {
  182. dma_handshake_config_t config;
  183. config.ch_index = ch_num;
  184. config.dst = dst;
  185. config.dst_fixed = false;
  186. config.src = (uint32_t)&spi_ptr->DATA;
  187. config.src_fixed = true;
  188. config.data_width = data_width;
  189. config.size_in_byte = size;
  190. return dma_setup_handshake(dma_ptr, &config, true);
  191. }
  192. static hpm_stat_t hpm_spi_wait_idle(SPI_Type *ptr)
  193. {
  194. hpm_stat_t status = status_success;
  195. rt_tick_t start_tick = rt_tick_get();
  196. while(ptr->STATUS & SPI_STATUS_SPIACTIVE_MASK)
  197. {
  198. if ((rt_tick_get() - start_tick) > RT_TICK_PER_SECOND)
  199. {
  200. status = status_timeout;
  201. break;
  202. }
  203. }
  204. return status;
  205. }
  206. static rt_uint32_t hpm_spi_xfer_dma(struct rt_spi_device *device, struct rt_spi_message *msg)
  207. {
  208. struct hpm_spi *spi = (struct hpm_spi *) (device->bus->parent.user_data);
  209. hpm_stat_t spi_stat = status_success;
  210. uint32_t remaining_size = msg->length;
  211. uint32_t transfer_len;
  212. uint8_t *raw_alloc_tx_buf = RT_NULL;
  213. uint8_t *raw_alloc_rx_buf = RT_NULL;
  214. uint8_t *aligned_tx_buf = RT_NULL;
  215. uint8_t *aligned_rx_buf = RT_NULL;
  216. uint32_t aligned_len = 0;
  217. if (msg->length > 0)
  218. {
  219. aligned_len = (msg->length + HPM_L1C_CACHELINE_SIZE - 1U) & ~(HPM_L1C_CACHELINE_SIZE - 1U);
  220. if (msg->send_buf != RT_NULL)
  221. {
  222. if (l1c_dc_is_enabled())
  223. {
  224. /* The allocated pointer is always RT_ALIGN_SIZE aligned */
  225. raw_alloc_tx_buf = (uint8_t*)rt_malloc(aligned_len + HPM_L1C_CACHELINE_SIZE - RT_ALIGN_SIZE);
  226. RT_ASSERT(raw_alloc_tx_buf != RT_NULL);
  227. aligned_tx_buf = (uint8_t*)HPM_L1C_CACHELINE_ALIGN_UP((uint32_t)raw_alloc_tx_buf);
  228. rt_memcpy(aligned_tx_buf, msg->send_buf, msg->length);
  229. l1c_dc_flush((uint32_t) aligned_tx_buf, aligned_len);
  230. }
  231. else
  232. {
  233. aligned_tx_buf = (uint8_t*) msg->send_buf;
  234. }
  235. }
  236. if (msg->recv_buf != RT_NULL)
  237. {
  238. if (l1c_dc_is_enabled())
  239. {
  240. /* The allocated pointer is always RT_ALIGN_SIZE aligned */
  241. raw_alloc_rx_buf = (uint8_t*)rt_malloc(aligned_len + HPM_L1C_CACHELINE_SIZE - RT_ALIGN_SIZE);
  242. RT_ASSERT(raw_alloc_rx_buf != RT_NULL);
  243. aligned_rx_buf = (uint8_t*)HPM_L1C_CACHELINE_ALIGN_UP((uint32_t)raw_alloc_rx_buf);
  244. }
  245. else
  246. {
  247. aligned_rx_buf = msg->recv_buf;
  248. }
  249. }
  250. }
  251. uint8_t *tx_buf = aligned_tx_buf;
  252. uint8_t *rx_buf = aligned_rx_buf;
  253. uint32_t core_id = read_csr(CSR_MHARTID);
  254. spi->spi_base->CTRL &= ~(SPI_CTRL_TXDMAEN_MASK | SPI_CTRL_RXDMAEN_MASK);
  255. while (remaining_size > 0)
  256. {
  257. transfer_len = MIN(512, remaining_size);
  258. spi->control_config.common_config.tx_dma_enable = false;
  259. spi->control_config.common_config.rx_dma_enable = false;
  260. if (msg->send_buf != NULL && msg->recv_buf != NULL)
  261. {
  262. spi->control_config.common_config.trans_mode = spi_trans_write_read_together;
  263. spi->control_config.common_config.tx_dma_enable = true;
  264. spi->control_config.common_config.rx_dma_enable = true;
  265. spi->control_config.common_config.trans_mode = spi_trans_write_read_together;
  266. spi_stat = spi_setup_dma_transfer(spi->spi_base, &spi->control_config, NULL, NULL, transfer_len,
  267. transfer_len);
  268. if (spi_stat != status_success)
  269. {
  270. break;
  271. }
  272. dmamux_config(HPM_DMAMUX, spi->tx_dma.channel, spi->tx_dmamux, true);
  273. spi_stat = spi_tx_trigger_dma(spi->tx_dma.base, spi->tx_dma.channel, spi->spi_base,
  274. core_local_mem_to_sys_address(core_id, (uint32_t) tx_buf),
  275. DMA_TRANSFER_WIDTH_BYTE, transfer_len);
  276. /* setup spi rx trigger dma transfer*/
  277. dmamux_config(HPM_DMAMUX, spi->rx_dma.channel, spi->rx_dmamux, true);
  278. spi_stat = spi_rx_trigger_dma(spi->rx_dma.base, spi->rx_dma.channel, spi->spi_base,
  279. core_local_mem_to_sys_address(core_id, (uint32_t) rx_buf),
  280. DMA_TRANSFER_WIDTH_BYTE, transfer_len);
  281. if (spi_stat != status_success)
  282. {
  283. break;
  284. }
  285. }
  286. else if (msg->send_buf != NULL)
  287. {
  288. spi->control_config.common_config.tx_dma_enable = true;
  289. spi->control_config.common_config.trans_mode = spi_trans_write_only;
  290. spi_stat = spi_setup_dma_transfer(spi->spi_base, &spi->control_config, NULL, NULL, transfer_len, 0);
  291. if (spi_stat != status_success)
  292. {
  293. break;
  294. }
  295. dmamux_config(HPM_DMAMUX, spi->tx_dma.channel, spi->tx_dmamux, true);
  296. spi_stat = spi_tx_trigger_dma(spi->tx_dma.base, spi->tx_dma.channel, spi->spi_base,
  297. core_local_mem_to_sys_address(core_id, (uint32_t) tx_buf),
  298. DMA_TRANSFER_WIDTH_BYTE, transfer_len);
  299. if (spi_stat != status_success)
  300. {
  301. break;
  302. }
  303. }
  304. else
  305. {
  306. spi->control_config.common_config.rx_dma_enable = true;
  307. spi->control_config.common_config.trans_mode = spi_trans_read_only;
  308. spi_stat = spi_setup_dma_transfer(spi->spi_base, &spi->control_config, NULL, NULL, 0, transfer_len);
  309. if (spi_stat != status_success)
  310. {
  311. break;
  312. }
  313. /* setup spi rx trigger dma transfer*/
  314. dmamux_config(HPM_DMAMUX, spi->rx_dma.channel, spi->rx_dmamux, true);
  315. spi_stat = spi_rx_trigger_dma(spi->rx_dma.base, spi->rx_dma.channel, spi->spi_base,
  316. core_local_mem_to_sys_address(core_id, (uint32_t) rx_buf),
  317. DMA_TRANSFER_WIDTH_BYTE, transfer_len);
  318. if (spi_stat != status_success)
  319. {
  320. break;
  321. }
  322. }
  323. spi_stat = hpm_spi_wait_idle(spi->spi_base);
  324. if (spi_stat != status_success)
  325. {
  326. break;
  327. }
  328. if (tx_buf != NULL)
  329. {
  330. tx_buf += transfer_len;
  331. }
  332. if (rx_buf != NULL)
  333. {
  334. rx_buf += transfer_len;
  335. }
  336. remaining_size -= transfer_len;
  337. spi->spi_base->CTRL &= ~(SPI_CTRL_TXDMAEN_MASK | SPI_CTRL_RXDMAEN_MASK);
  338. }
  339. if (l1c_dc_is_enabled() && (msg->length > 0))
  340. {
  341. /* cache invalidate for receive buff */
  342. if (aligned_tx_buf != RT_NULL)
  343. {
  344. rt_free(raw_alloc_tx_buf);
  345. raw_alloc_tx_buf = RT_NULL;
  346. aligned_tx_buf = RT_NULL;
  347. }
  348. if (aligned_rx_buf != RT_NULL)
  349. {
  350. l1c_dc_invalidate((uint32_t) aligned_rx_buf, aligned_len);
  351. rt_memcpy(msg->recv_buf, aligned_rx_buf, msg->length);
  352. rt_free(raw_alloc_rx_buf);
  353. raw_alloc_rx_buf = RT_NULL;
  354. aligned_rx_buf = RT_NULL;
  355. }
  356. }
  357. return spi_stat;
  358. }
  359. static rt_ssize_t hpm_spi_xfer(struct rt_spi_device *device, struct rt_spi_message *msg)
  360. {
  361. RT_ASSERT(device != RT_NULL);
  362. RT_ASSERT(msg != RT_NULL);
  363. RT_ASSERT(device->bus != RT_NULL);
  364. RT_ASSERT(device->bus->parent.user_data != RT_NULL);
  365. cs_ctrl_callback_t cs_pin_control = (cs_ctrl_callback_t) device->parent.user_data;
  366. struct hpm_spi *spi = (struct hpm_spi *) (device->bus->parent.user_data);
  367. hpm_stat_t spi_stat = status_success;
  368. if ((cs_pin_control != NULL) && msg->cs_take)
  369. {
  370. cs_pin_control(SPI_CS_TAKE);
  371. }
  372. if (spi->enable_dma)
  373. {
  374. spi_stat = hpm_spi_xfer_dma(device, msg);
  375. }
  376. else
  377. {
  378. spi_stat = hpm_spi_xfer_polling(device, msg);
  379. }
  380. if (spi_stat != status_success)
  381. {
  382. msg->length = 0;
  383. }
  384. if ((cs_pin_control != NULL) && msg->cs_release)
  385. {
  386. cs_pin_control(SPI_CS_RELEASE);
  387. }
  388. return msg->length;
  389. }
  390. rt_err_t rt_hw_spi_device_attach(const char *bus_name, const char *device_name, cs_ctrl_callback_t callback)
  391. {
  392. RT_ASSERT(bus_name != RT_NULL);
  393. RT_ASSERT(device_name != RT_NULL);
  394. rt_err_t result;
  395. struct rt_spi_device *spi_device;
  396. /* attach the device to spi bus*/
  397. spi_device = (struct rt_spi_device *) rt_malloc(sizeof(struct rt_spi_device));
  398. RT_ASSERT(spi_device != RT_NULL);
  399. result = rt_spi_bus_attach_device(spi_device, device_name, bus_name, (void*)callback);
  400. RT_ASSERT(result == RT_EOK);
  401. return result;
  402. }
  403. int rt_hw_spi_init(void)
  404. {
  405. rt_err_t ret = RT_EOK;
  406. hpm_stat_t stat;
  407. for (uint32_t i = 0; i < sizeof(hpm_spis) / sizeof(hpm_spis[0]); i++)
  408. {
  409. struct hpm_spi *spi = &hpm_spis[i];
  410. spi->spi_bus.parent.user_data = spi;
  411. if (spi->enable_dma)
  412. {
  413. stat = dma_manager_request_resource(&spi->tx_dma);
  414. if (stat != status_success)
  415. {
  416. return -RT_ERROR;
  417. }
  418. stat = dma_manager_request_resource(&spi->rx_dma);
  419. if (stat != status_success)
  420. {
  421. return -RT_ERROR;
  422. }
  423. }
  424. ret = rt_spi_bus_register(&spi->spi_bus, spi->bus_name, &hpm_spi_ops);
  425. if (ret != RT_EOK)
  426. {
  427. break;
  428. }
  429. char sem_name[RT_NAME_MAX];
  430. rt_sprintf(sem_name, "%s_s", hpm_spis[i].bus_name);
  431. hpm_spis[i].xfer_sem = rt_sem_create(sem_name, 0, RT_IPC_FLAG_PRIO);
  432. }
  433. return ret;
  434. }
  435. INIT_BOARD_EXPORT(rt_hw_spi_init);
  436. #endif /*BSP_USING_SPI*/