hpm_spi.c 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384
  1. /*
  2. * Copyright (c) 2021 hpmicro
  3. *
  4. * SPDX-License-Identifier: BSD-3-Clause
  5. *
  6. */
  7. #include "hpm_spi.h"
  8. static hpm_stat_t hpm_spi_tx_trigger_dma(DMA_Type *dma_ptr, uint8_t ch_num, SPI_Type *spi_ptr, uint32_t src, uint32_t size)
  9. {
  10. dma_handshake_config_t config;
  11. config.ch_index = ch_num;
  12. config.dst = (uint32_t)&spi_ptr->DATA;
  13. config.dst_fixed = true;
  14. config.src = src;
  15. config.src_fixed = false;
  16. config.size_in_byte = size;
  17. return dma_setup_handshake(dma_ptr, &config);
  18. }
  19. static hpm_stat_t hpm_spi_rx_trigger_dma(DMA_Type *dma_ptr, uint8_t ch_num, SPI_Type *spi_ptr, uint32_t dst, uint32_t size)
  20. {
  21. dma_handshake_config_t config;
  22. config.ch_index = ch_num;
  23. config.dst = dst;
  24. config.dst_fixed = false;
  25. config.src = (uint32_t)&spi_ptr->DATA;
  26. config.src_fixed = true;
  27. config.size_in_byte = size;
  28. return dma_setup_handshake(dma_ptr, &config);
  29. }
  30. void hpm_spi_prepare_dma_tx_descriptors(spi_context_t *context, spi_control_config_t *config, uint32_t trans_count,
  31. uint32_t *spi_transctrl, dma_linked_descriptor_t *tx_dma_descriptors)
  32. {
  33. SPI_Type *ptr = context->ptr;
  34. uint32_t dma_transfer_size[trans_count];
  35. uint32_t tx_size = context->tx_size;
  36. uint32_t per_trans_size = context->per_trans_max;
  37. uint32_t dmamux_ch = context->dma_context.tx_dmamux_ch;
  38. uint8_t *tx_buff = context->tx_buff;
  39. static uint8_t dummy_cmd = 0xff;
  40. uint32_t temp32;
  41. uint32_t tx_buff_index = 0;
  42. for (uint32_t i = 0; i < trans_count; i++) {
  43. if (tx_size > per_trans_size) {
  44. temp32 = per_trans_size;
  45. tx_size -= per_trans_size;
  46. } else {
  47. temp32 = tx_size;
  48. }
  49. *(spi_transctrl + i) = SPI_TRANSCTRL_TRANSMODE_SET(config->common_config.trans_mode == spi_trans_write_read_together ?
  50. spi_trans_write_read_together : spi_trans_write_only)
  51. | SPI_TRANSCTRL_DUALQUAD_SET(config->common_config.data_phase_fmt)
  52. | SPI_TRANSCTRL_WRTRANCNT_SET(temp32 - 1)
  53. | SPI_TRANSCTRL_RDTRANCNT_SET(temp32 - 1);
  54. if (i == 0) {
  55. temp32 = temp32 + 1; /* DMA transmits one byte more than SPI at the first transmission */
  56. }
  57. if (i == trans_count - 1) {
  58. temp32 = temp32 - 1;
  59. }
  60. dma_transfer_size[i] = temp32;
  61. /* SPI CTRL */
  62. (tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS)->trans_size = 1;
  63. (tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS)->src_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(spi_transctrl + i));
  64. (tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS)->dst_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&ptr->TRANSCTRL);
  65. (tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS)->ctrl = DMA_CHCTRL_CTRL_SRCWIDTH_SET(DMA_TRANSFER_WIDTH_WORD)
  66. | DMA_CHCTRL_CTRL_DSTWIDTH_SET(DMA_TRANSFER_WIDTH_WORD)
  67. | DMA_CHCTRL_CTRL_SRCBURSTSIZE_SET(DMA_NUM_TRANSFER_PER_BURST_1T)
  68. | DMA_CHCTRL_CTRL_SRCREQSEL_SET(dmamux_ch)
  69. | DMA_CHCTRL_CTRL_DSTREQSEL_SET(dmamux_ch);
  70. (tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS)->linked_ptr =
  71. core_local_mem_to_sys_address(context->running_core, (uint32_t)(tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 1));
  72. /* SPI CMD */
  73. (tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 1)->trans_size = 1;
  74. (tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 1)->src_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&dummy_cmd);
  75. (tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 1)->dst_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&ptr->CMD);
  76. (tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 1)->ctrl = DMA_CHCTRL_CTRL_SRCWIDTH_SET(DMA_TRANSFER_WIDTH_BYTE)
  77. | DMA_CHCTRL_CTRL_DSTWIDTH_SET(DMA_TRANSFER_WIDTH_BYTE)
  78. | DMA_CHCTRL_CTRL_SRCBURSTSIZE_SET(DMA_NUM_TRANSFER_PER_BURST_1T)
  79. | DMA_CHCTRL_CTRL_SRCREQSEL_SET(dmamux_ch)
  80. | DMA_CHCTRL_CTRL_DSTREQSEL_SET(dmamux_ch);
  81. (tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 1)->linked_ptr =
  82. core_local_mem_to_sys_address(context->running_core, (uint32_t)(tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 2));
  83. /* SPI DATA */
  84. (tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 2)->trans_size = dma_transfer_size[i];
  85. (tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 2)->src_addr =
  86. core_local_mem_to_sys_address(context->running_core, (uint32_t)(tx_buff + tx_buff_index));
  87. (tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 2)->dst_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&ptr->DATA);
  88. (tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 2)->ctrl = DMA_CHCTRL_CTRL_SRCWIDTH_SET(DMA_TRANSFER_WIDTH_BYTE)
  89. | DMA_CHCTRL_CTRL_DSTWIDTH_SET(DMA_TRANSFER_WIDTH_BYTE)
  90. | DMA_CHCTRL_CTRL_SRCBURSTSIZE_SET(DMA_NUM_TRANSFER_PER_BURST_1T)
  91. | DMA_CHCTRL_CTRL_DSTMODE_SET(DMA_HANDSHAKE_MODE_HANDSHAKE)
  92. | DMA_CHCTRL_CTRL_DSTADDRCTRL_SET(DMA_ADDRESS_CONTROL_FIXED)
  93. | DMA_CHCTRL_CTRL_SRCREQSEL_SET(dmamux_ch)
  94. | DMA_CHCTRL_CTRL_DSTREQSEL_SET(dmamux_ch);
  95. if (i == trans_count - 1) {
  96. (tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 2)->linked_ptr = 0;
  97. } else {
  98. (tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 2)->linked_ptr =
  99. core_local_mem_to_sys_address(context->running_core, (uint32_t)(tx_dma_descriptors + (i + 1) * SPI_DMA_DESC_COUNT_PER_TRANS));
  100. }
  101. tx_buff_index += temp32;
  102. }
  103. }
  104. void hpm_prepare_dma_rx_descriptors(spi_context_t *context, spi_control_config_t *config, uint32_t trans_count,
  105. uint32_t *spi_transctrl, dma_linked_descriptor_t *rx_dma_descriptors)
  106. {
  107. SPI_Type *ptr = context->ptr;
  108. uint32_t dma_transfer_size[trans_count];
  109. uint32_t rx_size = context->rx_size;
  110. uint32_t per_trans_size = context->per_trans_max;
  111. uint32_t dmamux_ch = context->dma_context.rx_dmamux_ch;
  112. uint8_t *rx_buff = context->rx_buff;
  113. static uint8_t dummy_cmd = 0xff;
  114. uint32_t temp32;
  115. uint32_t rx_buff_index = 0;
  116. for (uint32_t i = 0; i < trans_count; i++) {
  117. if (rx_size > per_trans_size) {
  118. temp32 = per_trans_size;
  119. rx_size -= per_trans_size;
  120. } else {
  121. temp32 = rx_size;
  122. }
  123. *(spi_transctrl + i) = SPI_TRANSCTRL_TRANSMODE_SET(spi_trans_read_only) |
  124. SPI_TRANSCTRL_DUALQUAD_SET(config->common_config.data_phase_fmt) |
  125. SPI_TRANSCTRL_WRTRANCNT_SET(temp32 - 1) |
  126. SPI_TRANSCTRL_RDTRANCNT_SET(temp32 - 1);
  127. dma_transfer_size[i] = temp32;
  128. /* SPI CTRL */
  129. (rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS)->trans_size = 1;
  130. (rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS)->src_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(spi_transctrl + i));
  131. (rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS)->dst_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&ptr->TRANSCTRL);
  132. (rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS)->ctrl = DMA_CHCTRL_CTRL_SRCWIDTH_SET(DMA_TRANSFER_WIDTH_WORD)
  133. | DMA_CHCTRL_CTRL_DSTWIDTH_SET(DMA_TRANSFER_WIDTH_WORD)
  134. | DMA_CHCTRL_CTRL_SRCBURSTSIZE_SET(DMA_NUM_TRANSFER_PER_BURST_1T)
  135. | DMA_CHCTRL_CTRL_SRCREQSEL_SET(dmamux_ch)
  136. | DMA_CHCTRL_CTRL_DSTREQSEL_SET(dmamux_ch);
  137. (rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS)->linked_ptr =
  138. core_local_mem_to_sys_address(context->running_core, (uint32_t)(rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 1));
  139. /* SPI CMD */
  140. (rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 1)->trans_size = 1;
  141. (rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 1)->src_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&dummy_cmd);
  142. (rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 1)->dst_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&ptr->CMD);
  143. (rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 1)->ctrl = DMA_CHCTRL_CTRL_SRCWIDTH_SET(DMA_TRANSFER_WIDTH_BYTE)
  144. | DMA_CHCTRL_CTRL_DSTWIDTH_SET(DMA_TRANSFER_WIDTH_BYTE)
  145. | DMA_CHCTRL_CTRL_SRCBURSTSIZE_SET(DMA_NUM_TRANSFER_PER_BURST_1T)
  146. | DMA_CHCTRL_CTRL_SRCREQSEL_SET(dmamux_ch)
  147. | DMA_CHCTRL_CTRL_DSTREQSEL_SET(dmamux_ch);
  148. (rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 1)->linked_ptr =
  149. core_local_mem_to_sys_address(context->running_core, (uint32_t)(rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 2));
  150. /* SPI DATA */
  151. (rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 2)->trans_size = dma_transfer_size[i];
  152. (rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 2)->src_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&ptr->DATA);
  153. (rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 2)->dst_addr =
  154. core_local_mem_to_sys_address(context->running_core, (uint32_t)(rx_buff + rx_buff_index));
  155. (rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 2)->ctrl = DMA_CHCTRL_CTRL_SRCWIDTH_SET(DMA_TRANSFER_WIDTH_BYTE)
  156. | DMA_CHCTRL_CTRL_DSTWIDTH_SET(DMA_TRANSFER_WIDTH_BYTE)
  157. | DMA_CHCTRL_CTRL_SRCBURSTSIZE_SET(DMA_NUM_TRANSFER_PER_BURST_1T)
  158. | DMA_CHCTRL_CTRL_SRCMODE_SET(DMA_HANDSHAKE_MODE_HANDSHAKE)
  159. | DMA_CHCTRL_CTRL_SRCADDRCTRL_SET(DMA_ADDRESS_CONTROL_FIXED)
  160. | DMA_CHCTRL_CTRL_SRCREQSEL_SET(dmamux_ch)
  161. | DMA_CHCTRL_CTRL_DSTREQSEL_SET(dmamux_ch);
  162. if (i == trans_count - 1) {
  163. (rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 2)->linked_ptr = 0;
  164. } else {
  165. (rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 2)->linked_ptr =
  166. core_local_mem_to_sys_address(context->running_core, (uint32_t)(rx_dma_descriptors + (i + 1) * SPI_DMA_DESC_COUNT_PER_TRANS));
  167. }
  168. rx_buff_index += temp32;
  169. }
  170. }
  171. static uint32_t hpm_spi_get_trans_count(spi_context_t *context, spi_control_config_t *config)
  172. {
  173. uint32_t total_trans_size, per_trans_size, trans_count;
  174. per_trans_size = context->per_trans_max;
  175. if (config->common_config.trans_mode == spi_trans_write_only || config->common_config.trans_mode == spi_trans_dummy_write) {
  176. total_trans_size = context->tx_size;
  177. } else if (config->common_config.trans_mode == spi_trans_read_only || config->common_config.trans_mode == spi_trans_dummy_read) {
  178. total_trans_size = context->rx_size;
  179. } else {
  180. /* write read together */
  181. assert(context->tx_size == context->rx_size);
  182. total_trans_size = context->tx_size;
  183. }
  184. trans_count = (total_trans_size + per_trans_size - 1) / per_trans_size;
  185. return trans_count;
  186. }
  187. static hpm_stat_t spi_setup_trans_with_dma_chain(spi_context_t *context, spi_control_config_t *config)
  188. {
  189. hpm_stat_t stat = status_success;
  190. SPI_Type *spi_ptr = context->ptr;
  191. DMA_Type *dma_ptr = context->dma_context.dma_ptr;
  192. DMAMUX_Type *dmamux_ptr = context->dma_context.dmamux_ptr;
  193. dma_linked_descriptor_t *dma_linked_descriptor = context->dma_linked_descriptor;
  194. uint32_t *spi_transctrl = context->spi_transctrl;
  195. uint32_t dma_channel;
  196. uint32_t trans_count;
  197. dma_channel_config_t dma_ch_config = {0};
  198. trans_count = hpm_spi_get_trans_count(context, config);
  199. /* active spi cs pin */
  200. context->write_cs(context->cs_pin, SPI_CS_ACTIVE);
  201. stat = spi_setup_dma_transfer(spi_ptr,
  202. config,
  203. &context->cmd,
  204. &context->addr,
  205. MIN(context->tx_size, context->per_trans_max),
  206. MIN(context->rx_size, context->per_trans_max));
  207. if (stat != status_success) {
  208. return stat;
  209. }
  210. if (config->common_config.trans_mode == spi_trans_write_only || config->common_config.trans_mode == spi_trans_dummy_write) {
  211. /* write only */
  212. hpm_spi_prepare_dma_tx_descriptors(context, config, trans_count, spi_transctrl, dma_linked_descriptor);
  213. dma_channel = context->dma_context.tx_dma_ch;
  214. dmamux_config(dmamux_ptr, context->dma_context.tx_dmamux_ch, context->dma_context.tx_req, true);
  215. } else if (config->common_config.trans_mode == spi_trans_read_only || config->common_config.trans_mode == spi_trans_dummy_read) {
  216. /* read only */
  217. hpm_prepare_dma_rx_descriptors(context, config, trans_count, spi_transctrl, dma_linked_descriptor);
  218. dma_channel = context->dma_context.rx_dma_ch;
  219. dmamux_config(dmamux_ptr, context->dma_context.rx_dmamux_ch, context->dma_context.rx_req, true);
  220. } else if (config->common_config.trans_mode == spi_trans_write_read_together) {
  221. /* write and read together */
  222. hpm_spi_prepare_dma_tx_descriptors(context, config, trans_count, spi_transctrl, dma_linked_descriptor);
  223. dma_channel = context->dma_context.tx_dma_ch;
  224. dmamux_config(dmamux_ptr, context->dma_context.tx_dmamux_ch, context->dma_context.tx_req, true);
  225. dmamux_config(dmamux_ptr, context->dma_context.rx_dmamux_ch, context->dma_context.rx_req, true);
  226. /* spi tx use chained dma descriptor, spi rx use unchained dma */
  227. stat = hpm_spi_rx_trigger_dma(dma_ptr,
  228. context->dma_context.rx_dmamux_ch,
  229. spi_ptr,
  230. core_local_mem_to_sys_address(context->running_core, (uint32_t)context->rx_buff),
  231. context->rx_size);
  232. if (stat != status_success) {
  233. return stat;
  234. }
  235. }
  236. /* use a dummy dma transfer to start SPI trans dma chain */
  237. static uint32_t dummy_data1 = 0xff, dummy_data2 = 0xff;
  238. dma_ch_config.src_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&dummy_data1);
  239. dma_ch_config.dst_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&dummy_data2);
  240. dma_ch_config.src_burst_size = DMA_NUM_TRANSFER_PER_BURST_1T;
  241. dma_ch_config.src_width = DMA_TRANSFER_WIDTH_WORD;
  242. dma_ch_config.dst_width = DMA_TRANSFER_WIDTH_WORD;
  243. dma_ch_config.size_in_byte = 4;
  244. dma_ch_config.linked_ptr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(dma_linked_descriptor + SPI_DMA_DESC_COUNT_PER_TRANS - 1));
  245. stat = dma_setup_channel(dma_ptr, dma_channel, &dma_ch_config);
  246. if (stat != status_success) {
  247. return stat;
  248. }
  249. return stat;
  250. }
  251. static hpm_stat_t spi_setup_trans_with_dma(spi_context_t *context, spi_control_config_t *config)
  252. {
  253. hpm_stat_t stat = status_success;
  254. SPI_Type *spi_ptr = context->ptr;
  255. DMA_Type *dma_ptr = context->dma_context.dma_ptr;
  256. DMAMUX_Type *dmamux_ptr = context->dma_context.dmamux_ptr;
  257. uint32_t trans_mode = config->common_config.trans_mode;
  258. if (context->write_cs != NULL) {
  259. context->write_cs(context->cs_pin, SPI_CS_ACTIVE);
  260. }
  261. stat = spi_setup_dma_transfer(spi_ptr, config,
  262. &context->cmd, &context->addr,
  263. context->tx_size, context->rx_size);
  264. if (stat != status_success) {
  265. return stat;
  266. }
  267. if (trans_mode != spi_trans_write_only && trans_mode != spi_trans_dummy_write && trans_mode != spi_trans_no_data) {
  268. dmamux_config(dmamux_ptr, context->dma_context.rx_dmamux_ch, context->dma_context.rx_req, true);
  269. stat = hpm_spi_rx_trigger_dma(dma_ptr,
  270. context->dma_context.rx_dmamux_ch,
  271. spi_ptr,
  272. core_local_mem_to_sys_address(context->running_core, (uint32_t)context->rx_buff),
  273. context->rx_size);
  274. if (stat != status_success) {
  275. return stat;
  276. }
  277. /* cache invalidate for receive buff */
  278. if (l1c_dc_is_enabled()) {
  279. l1c_dc_invalidate((uint32_t)context->rx_buff, context->rx_size);
  280. }
  281. }
  282. if (trans_mode != spi_trans_read_only && trans_mode != spi_trans_dummy_read && trans_mode != spi_trans_no_data) {
  283. dmamux_config(dmamux_ptr, context->dma_context.tx_dmamux_ch, context->dma_context.tx_req, true);
  284. /* cache writeback for tx buff */
  285. if (l1c_dc_is_enabled()) {
  286. l1c_dc_writeback((uint32_t)context->tx_buff, context->tx_size);
  287. }
  288. stat = hpm_spi_tx_trigger_dma(dma_ptr,
  289. context->dma_context.tx_dmamux_ch,
  290. spi_ptr,
  291. core_local_mem_to_sys_address(context->running_core, (uint32_t)context->tx_buff),
  292. context->tx_size);
  293. if (stat != status_success) {
  294. return stat;
  295. }
  296. }
  297. return stat;
  298. }
  299. hpm_stat_t hpm_spi_setup_dma_transfer(spi_context_t *context, spi_control_config_t *config)
  300. {
  301. assert(context != NULL || config != NULL);
  302. /* use dma */
  303. assert(&context->dma_context != NULL);
  304. /* spi per trans data size not zero */
  305. assert(context->per_trans_max);
  306. hpm_stat_t stat = status_success;
  307. uint32_t trans_mode = config->common_config.trans_mode;
  308. if ((context->rx_size > context->per_trans_max) || (context->tx_size > context->per_trans_max)) {
  309. /* multiple SPI transmissions with chained DMA */
  310. assert(trans_mode == spi_trans_read_only || trans_mode == spi_trans_dummy_read
  311. || trans_mode == spi_trans_write_only || trans_mode == spi_trans_dummy_write
  312. || trans_mode == spi_trans_write_read_together);
  313. /* master mode */
  314. assert((context->ptr->TRANSFMT & SPI_TRANSFMT_SLVMODE_MASK) != SPI_TRANSFMT_SLVMODE_MASK);
  315. /* GPIO should be used to replace SPI CS pin for SPI chained DMA transmissions */
  316. assert(context->write_cs != NULL);
  317. stat = spi_setup_trans_with_dma_chain(context, config);
  318. } else {
  319. /* one SPI transmissions with chained DMA */
  320. stat = spi_setup_trans_with_dma(context, config);
  321. }
  322. return stat;
  323. }
  324. /* Using GPIO as SPI CS pin */
  325. /* When SPI trans completed, GPIO cs pin should be released manually */
  326. hpm_stat_t hpm_spi_release_gpio_cs(spi_context_t *context)
  327. {
  328. hpm_stat_t stat;
  329. SPI_Type *ptr = context->ptr;
  330. assert(context->write_cs != NULL);
  331. stat = spi_wait_for_idle_status(ptr);
  332. if (stat != status_success) {
  333. return stat;
  334. }
  335. context->write_cs(context->cs_pin, !SPI_CS_ACTIVE);
  336. return status_success;
  337. }