hpm_spi.c 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429
  1. /*
  2. * Copyright (c) 2021 HPMicro
  3. *
  4. * SPDX-License-Identifier: BSD-3-Clause
  5. *
  6. */
  7. #include "hpm_spi.h"
  8. static hpm_stat_t hpm_spi_tx_trigger_dma(DMA_Type *dma_ptr, uint8_t ch_num, SPI_Type *spi_ptr, uint32_t src, uint8_t data_width, uint32_t size)
  9. {
  10. dma_handshake_config_t config;
  11. dma_default_handshake_config(dma_ptr, &config);
  12. config.ch_index = ch_num;
  13. config.dst = (uint32_t)&spi_ptr->DATA;
  14. config.dst_fixed = true;
  15. config.src = src;
  16. config.src_fixed = false;
  17. config.data_width = data_width;
  18. config.size_in_byte = size;
  19. return dma_setup_handshake(dma_ptr, &config, true);
  20. }
  21. static hpm_stat_t hpm_spi_rx_trigger_dma(DMA_Type *dma_ptr, uint8_t ch_num, SPI_Type *spi_ptr, uint32_t dst, uint8_t data_width, uint32_t size)
  22. {
  23. dma_handshake_config_t config;
  24. dma_default_handshake_config(dma_ptr, &config);
  25. config.ch_index = ch_num;
  26. config.dst = dst;
  27. config.dst_fixed = false;
  28. config.src = (uint32_t)&spi_ptr->DATA;
  29. config.src_fixed = true;
  30. config.data_width = data_width;
  31. config.size_in_byte = size;
  32. return dma_setup_handshake(dma_ptr, &config, true);
  33. }
  34. void hpm_spi_prepare_dma_tx_descriptors(spi_context_t *context, spi_control_config_t *config, uint32_t trans_count,
  35. uint32_t *spi_transctrl, dma_linked_descriptor_t *tx_dma_descriptors)
  36. {
  37. SPI_Type *ptr = context->ptr;
  38. uint32_t dma_transfer_size[trans_count];
  39. uint32_t tx_count = context->tx_count;
  40. uint32_t per_trans_size = context->per_trans_max;
  41. uint32_t dma_ch = context->dma_context.tx_dma_ch;
  42. uint8_t *tx_buff = context->tx_buff;
  43. dma_channel_config_t dma_ch_config;
  44. static uint8_t dummy_cmd = 0xff;
  45. uint32_t temp32;
  46. uint32_t tx_buff_index = 0;
  47. dma_default_channel_config(context->dma_context.dma_ptr, &dma_ch_config);
  48. for (uint32_t i = 0; i < trans_count; i++) {
  49. if (tx_count > per_trans_size) {
  50. temp32 = per_trans_size;
  51. tx_count -= per_trans_size;
  52. } else {
  53. temp32 = tx_count;
  54. }
  55. *(spi_transctrl + i) = SPI_TRANSCTRL_TRANSMODE_SET(config->common_config.trans_mode == spi_trans_write_read_together ?
  56. spi_trans_write_read_together : spi_trans_write_only)
  57. | SPI_TRANSCTRL_DUALQUAD_SET(config->common_config.data_phase_fmt)
  58. | SPI_TRANSCTRL_WRTRANCNT_SET(temp32 - 1)
  59. | SPI_TRANSCTRL_RDTRANCNT_SET(temp32 - 1);
  60. if (i == 0) {
  61. /* Set the count of data transferred by dma to be one more than that of spi */
  62. /* when dma transfer finished, there are data in SPI fifo, dma should not execute the dma descriptor which changes SPI CTRL register */
  63. temp32 = temp32 + 1;
  64. }
  65. if (i == trans_count - 1) {
  66. temp32 = temp32 - 1;
  67. }
  68. dma_transfer_size[i] = temp32;
  69. /* SPI CTRL */
  70. dma_ch_config.size_in_byte = 4;
  71. dma_ch_config.src_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(spi_transctrl + i));
  72. dma_ch_config.dst_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&ptr->TRANSCTRL);
  73. dma_ch_config.src_width = DMA_TRANSFER_WIDTH_WORD;
  74. dma_ch_config.dst_width = DMA_TRANSFER_WIDTH_WORD;
  75. dma_ch_config.src_burst_size = DMA_NUM_TRANSFER_PER_BURST_1T;
  76. dma_ch_config.src_mode = DMA_HANDSHAKE_MODE_NORMAL;
  77. dma_ch_config.dst_mode = DMA_HANDSHAKE_MODE_NORMAL;
  78. dma_ch_config.src_addr_ctrl = DMA_ADDRESS_CONTROL_FIXED;
  79. dma_ch_config.dst_addr_ctrl = DMA_ADDRESS_CONTROL_FIXED;
  80. dma_ch_config.linked_ptr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 1));
  81. dma_config_linked_descriptor(context->dma_context.dma_ptr, tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS, dma_ch, &dma_ch_config);
  82. /* SPI CMD */
  83. dma_ch_config.size_in_byte = 1;
  84. dma_ch_config.src_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&dummy_cmd);
  85. dma_ch_config.dst_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&ptr->CMD);
  86. dma_ch_config.src_width = DMA_TRANSFER_WIDTH_BYTE;
  87. dma_ch_config.dst_width = DMA_TRANSFER_WIDTH_BYTE;
  88. dma_ch_config.src_burst_size = DMA_NUM_TRANSFER_PER_BURST_1T;
  89. dma_ch_config.src_mode = DMA_HANDSHAKE_MODE_NORMAL;
  90. dma_ch_config.dst_mode = DMA_HANDSHAKE_MODE_NORMAL;
  91. dma_ch_config.src_addr_ctrl = DMA_ADDRESS_CONTROL_FIXED;
  92. dma_ch_config.dst_addr_ctrl = DMA_ADDRESS_CONTROL_FIXED;
  93. dma_ch_config.linked_ptr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 2));
  94. dma_config_linked_descriptor(context->dma_context.dma_ptr, tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 1, dma_ch, &dma_ch_config);
  95. /* SPI DATA */
  96. dma_ch_config.size_in_byte = dma_transfer_size[i] << context->dma_context.data_width;
  97. dma_ch_config.src_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(tx_buff + tx_buff_index));
  98. dma_ch_config.dst_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&ptr->DATA);
  99. dma_ch_config.src_width = context->dma_context.data_width;
  100. dma_ch_config.dst_width = context->dma_context.data_width;
  101. dma_ch_config.src_burst_size = DMA_NUM_TRANSFER_PER_BURST_1T;
  102. dma_ch_config.src_mode = DMA_HANDSHAKE_MODE_NORMAL;
  103. dma_ch_config.dst_mode = DMA_HANDSHAKE_MODE_HANDSHAKE;
  104. dma_ch_config.src_addr_ctrl = DMA_ADDRESS_CONTROL_INCREMENT;
  105. dma_ch_config.dst_addr_ctrl = DMA_ADDRESS_CONTROL_FIXED;
  106. if (i == trans_count - 1) {
  107. dma_ch_config.linked_ptr = 0;
  108. } else {
  109. dma_ch_config.linked_ptr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(tx_dma_descriptors + (i + 1) * SPI_DMA_DESC_COUNT_PER_TRANS));
  110. }
  111. dma_config_linked_descriptor(context->dma_context.dma_ptr, tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 2, dma_ch, &dma_ch_config);
  112. tx_buff_index += temp32 * context->data_len_in_byte;
  113. }
  114. }
  115. void hpm_prepare_dma_rx_descriptors(spi_context_t *context, spi_control_config_t *config, uint32_t trans_count,
  116. uint32_t *spi_transctrl, dma_linked_descriptor_t *rx_dma_descriptors)
  117. {
  118. SPI_Type *ptr = context->ptr;
  119. uint32_t dma_transfer_size[trans_count];
  120. uint32_t rx_count = context->rx_count;
  121. uint32_t per_trans_size = context->per_trans_max;
  122. uint32_t dma_ch = context->dma_context.rx_dma_ch;
  123. uint8_t *rx_buff = context->rx_buff;
  124. dma_channel_config_t dma_ch_config;
  125. static uint8_t dummy_cmd = 0xff;
  126. uint32_t temp32;
  127. uint32_t rx_buff_index = 0;
  128. dma_default_channel_config(context->dma_context.dma_ptr, &dma_ch_config);
  129. for (uint32_t i = 0; i < trans_count; i++) {
  130. if (rx_count > per_trans_size) {
  131. temp32 = per_trans_size;
  132. rx_count -= per_trans_size;
  133. } else {
  134. temp32 = rx_count;
  135. }
  136. *(spi_transctrl + i) = SPI_TRANSCTRL_TRANSMODE_SET(spi_trans_read_only) |
  137. SPI_TRANSCTRL_DUALQUAD_SET(config->common_config.data_phase_fmt) |
  138. SPI_TRANSCTRL_WRTRANCNT_SET(temp32 - 1) |
  139. SPI_TRANSCTRL_RDTRANCNT_SET(temp32 - 1);
  140. dma_transfer_size[i] = temp32;
  141. /* SPI CTRL */
  142. dma_ch_config.size_in_byte = 4;
  143. dma_ch_config.src_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(spi_transctrl + i));
  144. dma_ch_config.dst_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&ptr->TRANSCTRL);
  145. dma_ch_config.src_width = DMA_TRANSFER_WIDTH_WORD;
  146. dma_ch_config.dst_width = DMA_TRANSFER_WIDTH_WORD;
  147. dma_ch_config.src_burst_size = DMA_NUM_TRANSFER_PER_BURST_1T;
  148. dma_ch_config.src_mode = DMA_HANDSHAKE_MODE_NORMAL;
  149. dma_ch_config.dst_mode = DMA_HANDSHAKE_MODE_NORMAL;
  150. dma_ch_config.src_addr_ctrl = DMA_ADDRESS_CONTROL_FIXED;
  151. dma_ch_config.dst_addr_ctrl = DMA_ADDRESS_CONTROL_FIXED;
  152. dma_ch_config.linked_ptr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 1));
  153. dma_config_linked_descriptor(context->dma_context.dma_ptr, rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS, dma_ch, &dma_ch_config);
  154. /* SPI CMD */
  155. dma_ch_config.size_in_byte = 1;
  156. dma_ch_config.src_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&dummy_cmd);
  157. dma_ch_config.dst_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&ptr->CMD);
  158. dma_ch_config.src_width = DMA_TRANSFER_WIDTH_BYTE;
  159. dma_ch_config.dst_width = DMA_TRANSFER_WIDTH_BYTE;
  160. dma_ch_config.src_burst_size = DMA_NUM_TRANSFER_PER_BURST_1T;
  161. dma_ch_config.src_mode = DMA_HANDSHAKE_MODE_NORMAL;
  162. dma_ch_config.dst_mode = DMA_HANDSHAKE_MODE_NORMAL;
  163. dma_ch_config.src_addr_ctrl = DMA_ADDRESS_CONTROL_FIXED;
  164. dma_ch_config.dst_addr_ctrl = DMA_ADDRESS_CONTROL_FIXED;
  165. dma_ch_config.linked_ptr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 2));
  166. dma_config_linked_descriptor(context->dma_context.dma_ptr, rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 1, dma_ch, &dma_ch_config);
  167. /* SPI DATA */
  168. dma_ch_config.size_in_byte = dma_transfer_size[i] << context->dma_context.data_width;
  169. dma_ch_config.src_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&ptr->DATA);
  170. dma_ch_config.dst_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(rx_buff + rx_buff_index));
  171. dma_ch_config.src_width = context->dma_context.data_width;
  172. dma_ch_config.dst_width = context->dma_context.data_width;
  173. dma_ch_config.src_burst_size = DMA_NUM_TRANSFER_PER_BURST_1T;
  174. dma_ch_config.src_mode = DMA_HANDSHAKE_MODE_HANDSHAKE;
  175. dma_ch_config.dst_mode = DMA_HANDSHAKE_MODE_NORMAL;
  176. dma_ch_config.src_addr_ctrl = DMA_ADDRESS_CONTROL_FIXED;
  177. dma_ch_config.dst_addr_ctrl = DMA_ADDRESS_CONTROL_INCREMENT;
  178. if (i == trans_count - 1) {
  179. dma_ch_config.linked_ptr = 0;
  180. } else {
  181. dma_ch_config.linked_ptr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(rx_dma_descriptors + (i + 1) * SPI_DMA_DESC_COUNT_PER_TRANS));
  182. }
  183. dma_config_linked_descriptor(context->dma_context.dma_ptr, rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 2, dma_ch, &dma_ch_config);
  184. rx_buff_index += temp32 * context->data_len_in_byte;
  185. }
  186. }
  187. static uint32_t hpm_spi_get_trans_count(spi_context_t *context, spi_control_config_t *config)
  188. {
  189. uint32_t total_trans_count, per_trans_count, trans_count;
  190. per_trans_count = context->per_trans_max;
  191. if (config->common_config.trans_mode == spi_trans_write_only || config->common_config.trans_mode == spi_trans_dummy_write) {
  192. total_trans_count = context->tx_count;
  193. } else if (config->common_config.trans_mode == spi_trans_read_only || config->common_config.trans_mode == spi_trans_dummy_read) {
  194. total_trans_count = context->rx_count;
  195. } else {
  196. /* write read together */
  197. assert(context->tx_count == context->rx_count);
  198. total_trans_count = context->tx_count;
  199. }
  200. trans_count = (total_trans_count + per_trans_count - 1) / per_trans_count;
  201. return trans_count;
  202. }
  203. /**
  204. * spi with dma chain workflow
  205. *
  206. * 1. call spi_setup_dma_transfer to config SPI for first transmission
  207. * 2. execute data transmission phase in dma chain descriptor
  208. * 3. execute setting SPI CTRL register phase in dma chain descriptor
  209. * 4. execute writing SPI CMD register phase in dma chain descriptor
  210. * 5. Repeat steps 2-4 until finish the transmission
  211. */
  212. static hpm_stat_t spi_setup_trans_with_dma_chain(spi_context_t *context, spi_control_config_t *config)
  213. {
  214. hpm_stat_t stat = status_success;
  215. SPI_Type *spi_ptr = context->ptr;
  216. DMA_Type *dma_ptr = context->dma_context.dma_ptr;
  217. DMAMUX_Type *dmamux_ptr = context->dma_context.dmamux_ptr;
  218. dma_linked_descriptor_t *dma_linked_descriptor = context->dma_linked_descriptor;
  219. uint32_t *spi_transctrl = context->spi_transctrl;
  220. uint32_t dma_channel;
  221. uint32_t trans_count;
  222. dma_channel_config_t dma_ch_config = {0};
  223. trans_count = hpm_spi_get_trans_count(context, config);
  224. /* active spi cs pin */
  225. context->write_cs(context->cs_pin, SPI_CS_ACTIVE);
  226. /* config SPI for first dma transmission */
  227. stat = spi_setup_dma_transfer(spi_ptr,
  228. config,
  229. &context->cmd,
  230. &context->addr,
  231. MIN(context->tx_count, context->per_trans_max),
  232. MIN(context->rx_count, context->per_trans_max));
  233. if (stat != status_success) {
  234. return stat;
  235. }
  236. if (config->common_config.trans_mode == spi_trans_write_only || config->common_config.trans_mode == spi_trans_dummy_write) {
  237. /* write only */
  238. hpm_spi_prepare_dma_tx_descriptors(context, config, trans_count, spi_transctrl, dma_linked_descriptor);
  239. dma_channel = context->dma_context.tx_dma_ch;
  240. dmamux_config(dmamux_ptr, context->dma_context.tx_dmamux_ch, context->dma_context.tx_req, true);
  241. } else if (config->common_config.trans_mode == spi_trans_read_only || config->common_config.trans_mode == spi_trans_dummy_read) {
  242. /* read only */
  243. hpm_prepare_dma_rx_descriptors(context, config, trans_count, spi_transctrl, dma_linked_descriptor);
  244. dma_channel = context->dma_context.rx_dma_ch;
  245. dmamux_config(dmamux_ptr, context->dma_context.rx_dmamux_ch, context->dma_context.rx_req, true);
  246. } else if (config->common_config.trans_mode == spi_trans_write_read_together) {
  247. /* write and read together */
  248. hpm_spi_prepare_dma_tx_descriptors(context, config, trans_count, spi_transctrl, dma_linked_descriptor);
  249. dma_channel = context->dma_context.tx_dma_ch;
  250. dmamux_config(dmamux_ptr, context->dma_context.tx_dmamux_ch, context->dma_context.tx_req, true);
  251. dmamux_config(dmamux_ptr, context->dma_context.rx_dmamux_ch, context->dma_context.rx_req, true);
  252. /* spi tx use chained dma descriptor, spi rx use unchained dma */
  253. stat = hpm_spi_rx_trigger_dma(dma_ptr,
  254. context->dma_context.rx_dma_ch,
  255. spi_ptr,
  256. core_local_mem_to_sys_address(context->running_core, (uint32_t)context->rx_buff),
  257. context->dma_context.data_width,
  258. context->rx_size);
  259. if (stat != status_success) {
  260. return stat;
  261. }
  262. }
  263. /* use a dummy dma transfer to start SPI trans dma chain */
  264. static uint32_t dummy_data1 = 0xff, dummy_data2 = 0xff;
  265. dma_default_channel_config(context->dma_context.dma_ptr, &dma_ch_config);
  266. dma_ch_config.src_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&dummy_data1);
  267. dma_ch_config.dst_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&dummy_data2);
  268. dma_ch_config.src_burst_size = DMA_NUM_TRANSFER_PER_BURST_1T;
  269. dma_ch_config.src_width = DMA_TRANSFER_WIDTH_WORD;
  270. dma_ch_config.dst_width = DMA_TRANSFER_WIDTH_WORD;
  271. dma_ch_config.size_in_byte = 4;
  272. /* start data transmission phase in dma chain */
  273. dma_ch_config.linked_ptr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(dma_linked_descriptor + SPI_DMA_DESC_COUNT_PER_TRANS - 1));
  274. stat = dma_setup_channel(dma_ptr, dma_channel, &dma_ch_config, true);
  275. if (stat != status_success) {
  276. return stat;
  277. }
  278. return stat;
  279. }
  280. static hpm_stat_t spi_setup_trans_with_dma(spi_context_t *context, spi_control_config_t *config)
  281. {
  282. hpm_stat_t stat = status_success;
  283. SPI_Type *spi_ptr = context->ptr;
  284. DMA_Type *dma_ptr = context->dma_context.dma_ptr;
  285. DMAMUX_Type *dmamux_ptr = context->dma_context.dmamux_ptr;
  286. uint32_t trans_mode = config->common_config.trans_mode;
  287. if (context->write_cs != NULL) {
  288. context->write_cs(context->cs_pin, SPI_CS_ACTIVE);
  289. }
  290. stat = spi_setup_dma_transfer(spi_ptr, config,
  291. &context->cmd, &context->addr,
  292. context->tx_count, context->rx_count);
  293. if (stat != status_success) {
  294. return stat;
  295. }
  296. if (trans_mode != spi_trans_write_only && trans_mode != spi_trans_dummy_write && trans_mode != spi_trans_no_data) {
  297. dmamux_config(dmamux_ptr, context->dma_context.rx_dmamux_ch, context->dma_context.rx_req, true);
  298. stat = hpm_spi_rx_trigger_dma(dma_ptr,
  299. context->dma_context.rx_dma_ch,
  300. spi_ptr,
  301. core_local_mem_to_sys_address(context->running_core, (uint32_t)context->rx_buff),
  302. context->dma_context.data_width,
  303. context->rx_size);
  304. if (stat != status_success) {
  305. return stat;
  306. }
  307. }
  308. if (trans_mode != spi_trans_read_only && trans_mode != spi_trans_dummy_read && trans_mode != spi_trans_no_data) {
  309. dmamux_config(dmamux_ptr, context->dma_context.tx_dmamux_ch, context->dma_context.tx_req, true);
  310. stat = hpm_spi_tx_trigger_dma(dma_ptr,
  311. context->dma_context.tx_dma_ch,
  312. spi_ptr,
  313. core_local_mem_to_sys_address(context->running_core, (uint32_t)context->tx_buff),
  314. context->dma_context.data_width,
  315. context->tx_size);
  316. if (stat != status_success) {
  317. return stat;
  318. }
  319. }
  320. return stat;
  321. }
  322. hpm_stat_t hpm_spi_setup_dma_transfer(spi_context_t *context, spi_control_config_t *config)
  323. {
  324. assert(context != NULL || config != NULL);
  325. /* use dma */
  326. assert(&context->dma_context != NULL);
  327. /* spi per trans data size not zero */
  328. assert(context->per_trans_max);
  329. hpm_stat_t stat = status_success;
  330. uint32_t trans_mode = config->common_config.trans_mode;
  331. if (l1c_dc_is_enabled()) {
  332. /* cache writeback for tx buff */
  333. if (context->tx_buff != NULL && context->tx_size != 0) {
  334. uint32_t aligned_start = HPM_L1C_CACHELINE_ALIGN_DOWN((uint32_t)context->tx_buff);
  335. uint32_t aligned_end = HPM_L1C_CACHELINE_ALIGN_UP((uint32_t)context->tx_buff + context->tx_size);
  336. uint32_t aligned_size = aligned_end - aligned_start;
  337. l1c_dc_writeback(aligned_start, aligned_size);
  338. }
  339. /* cache invalidate for receive buff */
  340. if (context->rx_buff != NULL && context->rx_size != 0) {
  341. uint32_t aligned_start = HPM_L1C_CACHELINE_ALIGN_DOWN((uint32_t)context->rx_buff);
  342. uint32_t aligned_end = HPM_L1C_CACHELINE_ALIGN_UP((uint32_t)context->rx_buff + context->rx_size);
  343. uint32_t aligned_size = aligned_end - aligned_start;
  344. l1c_dc_invalidate(aligned_start, aligned_size);
  345. }
  346. }
  347. if ((context->rx_count > context->per_trans_max) || (context->tx_count > context->per_trans_max)) {
  348. /* multiple SPI transmissions with chained DMA */
  349. assert(trans_mode == spi_trans_read_only || trans_mode == spi_trans_dummy_read
  350. || trans_mode == spi_trans_write_only || trans_mode == spi_trans_dummy_write
  351. || trans_mode == spi_trans_write_read_together);
  352. /* master mode */
  353. assert((context->ptr->TRANSFMT & SPI_TRANSFMT_SLVMODE_MASK) != SPI_TRANSFMT_SLVMODE_MASK);
  354. /* GPIO should be used to replace SPI CS pin for SPI chained DMA transmissions */
  355. assert(context->write_cs != NULL);
  356. stat = spi_setup_trans_with_dma_chain(context, config);
  357. } else {
  358. /* one SPI transmissions with chained DMA */
  359. stat = spi_setup_trans_with_dma(context, config);
  360. }
  361. return stat;
  362. }
  363. /* Using GPIO as SPI CS pin */
  364. /* When SPI trans completed, GPIO cs pin should be released manually */
  365. hpm_stat_t hpm_spi_release_gpio_cs(spi_context_t *context)
  366. {
  367. hpm_stat_t stat;
  368. SPI_Type *ptr = context->ptr;
  369. assert(context->write_cs != NULL);
  370. stat = spi_wait_for_idle_status(ptr);
  371. if (stat != status_success) {
  372. return stat;
  373. }
  374. context->write_cs(context->cs_pin, !SPI_CS_ACTIVE);
  375. return status_success;
  376. }