drv_pdma.c 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149
  1. /**************************************************************************//**
  2. *
  3. * @copyright (C) 2020 Nuvoton Technology Corp. All rights reserved.
  4. *
  5. * SPDX-License-Identifier: Apache-2.0
  6. *
  7. * Change Logs:
  8. * Date Author Notes
  9. * 2020-11-11 Wayne First version
  10. *
  11. ******************************************************************************/
  12. #include <rtconfig.h>
  13. #if defined(BSP_USING_PDMA)
  14. #include <rtdevice.h>
  15. #include <rtthread.h>
  16. #include <drv_pdma.h>
  17. #include <nu_bitutil.h>
  18. #include "drv_sys.h"
  19. /* Private define ---------------------------------------------------------------*/
  20. // RT_DEV_NAME_PREFIX pdma
  21. #ifndef NU_PDMA_MEMFUN_ACTOR_MAX
  22. #define NU_PDMA_MEMFUN_ACTOR_MAX (4)
  23. #endif
  24. #define NU_PDMA_SG_TBL_MAXSIZE (NU_PDMA_SG_LIMITED_DISTANCE/sizeof(DSCT_T))
  25. #define NU_PDMA_CH_MAX (2*PDMA_CH_MAX) /* Specify maximum channels of PDMA */
  26. #define NU_PDMA_CH_Pos (0) /* Specify first channel number of PDMA */
  27. #define NU_PDMA_CH_Msk (((1 << NU_PDMA_CH_MAX) - 1) << NU_PDMA_CH_Pos)
  28. #define NU_PDMA_CH_HALF_Msk (((1 << PDMA_CH_MAX) - 1) << NU_PDMA_CH_Pos)
  29. #define NU_PDMA_GET_BASE(ch) (PDMA_T *)((((ch)/PDMA_CH_MAX)>0)?PDMA1_BA:PDMA0_BA)
  30. #define NU_PDMA_GET_MOD_CHIDX(ch) ((ch)%PDMA_CH_MAX)
  31. /* Private typedef --------------------------------------------------------------*/
  32. struct nu_pdma_periph_ctl
  33. {
  34. uint32_t m_u32Peripheral;
  35. nu_pdma_memctrl_t m_eMemCtl;
  36. };
  37. typedef struct nu_pdma_periph_ctl nu_pdma_periph_ctl_t;
  38. struct nu_pdma_chn
  39. {
  40. struct nu_pdma_chn_cb m_sCB_Event;
  41. struct nu_pdma_chn_cb m_sCB_Trigger;
  42. struct nu_pdma_chn_cb m_sCB_Disable;
  43. nu_pdma_desc_t *m_ppsSgtbl;
  44. uint32_t m_u32WantedSGTblNum;
  45. uint32_t m_u32EventFilter;
  46. uint32_t m_u32IdleTimeout_us;
  47. nu_pdma_periph_ctl_t m_spPeripCtl;
  48. };
  49. typedef struct nu_pdma_chn nu_pdma_chn_t;
  50. struct nu_pdma_memfun_actor
  51. {
  52. int m_i32ChannID;
  53. uint32_t m_u32Result;
  54. rt_sem_t m_psSemMemFun;
  55. } ;
  56. typedef struct nu_pdma_memfun_actor *nu_pdma_memfun_actor_t;
  57. /* Private functions ------------------------------------------------------------*/
  58. static int nu_pdma_peripheral_set(uint32_t u32PeriphType);
  59. static void nu_pdma_init(void);
  60. static void nu_pdma_channel_enable(int i32ChannID);
  61. static void nu_pdma_channel_disable(int i32ChannID);
  62. static void nu_pdma_channel_reset(int i32ChannID);
  63. static rt_err_t nu_pdma_timeout_set(int i32ChannID, int i32Timeout_us);
  64. static void nu_pdma_periph_ctrl_fill(int i32ChannID, int i32CtlPoolIdx);
  65. static rt_ssize_t nu_pdma_memfun(void *dest, void *src, uint32_t u32DataWidth, unsigned int u32TransferCnt, nu_pdma_memctrl_t eMemCtl);
  66. static void nu_pdma_memfun_cb(void *pvUserData, uint32_t u32Events);
  67. static void nu_pdma_memfun_actor_init(void);
  68. static int nu_pdma_memfun_employ(void);
  69. static int nu_pdma_non_transfer_count_get(int32_t i32ChannID);
  70. /* Public functions -------------------------------------------------------------*/
  71. /* Private variables ------------------------------------------------------------*/
  72. static volatile int nu_pdma_inited = 0;
  73. static volatile uint32_t nu_pdma_chn_mask = 0;
  74. static nu_pdma_chn_t nu_pdma_chn_arr[NU_PDMA_CH_MAX];
  75. static volatile uint32_t nu_pdma_memfun_actor_mask = 0;
  76. static volatile uint32_t nu_pdma_memfun_actor_maxnum = 0;
  77. static rt_sem_t nu_pdma_memfun_actor_pool_sem = RT_NULL;
  78. static rt_mutex_t nu_pdma_memfun_actor_pool_lock = RT_NULL;
  79. static void nu_pdma_isr(int vector, void *pvdata);
  80. static const nu_pdma_periph_ctl_t g_nu_pdma_peripheral_ctl_pool[ ] =
  81. {
  82. // M2M
  83. { PDMA_MEM, eMemCtl_SrcInc_DstInc },
  84. // M2P
  85. { PDMA_UART0_TX, eMemCtl_SrcInc_DstFix },
  86. { PDMA_UART1_TX, eMemCtl_SrcInc_DstFix },
  87. { PDMA_UART2_TX, eMemCtl_SrcInc_DstFix },
  88. { PDMA_UART3_TX, eMemCtl_SrcInc_DstFix },
  89. { PDMA_UART4_TX, eMemCtl_SrcInc_DstFix },
  90. { PDMA_UART5_TX, eMemCtl_SrcInc_DstFix },
  91. { PDMA_UART6_TX, eMemCtl_SrcInc_DstFix },
  92. { PDMA_UART7_TX, eMemCtl_SrcInc_DstFix },
  93. { PDMA_UART8_TX, eMemCtl_SrcInc_DstFix },
  94. { PDMA_UART9_TX, eMemCtl_SrcInc_DstFix },
  95. { PDMA_QSPI0_TX, eMemCtl_SrcInc_DstFix },
  96. { PDMA_SPI0_TX, eMemCtl_SrcInc_DstFix },
  97. { PDMA_SPI1_TX, eMemCtl_SrcInc_DstFix },
  98. { PDMA_I2C0_TX, eMemCtl_SrcInc_DstFix },
  99. { PDMA_I2C1_TX, eMemCtl_SrcInc_DstFix },
  100. { PDMA_I2C2_TX, eMemCtl_SrcInc_DstFix },
  101. { PDMA_I2C3_TX, eMemCtl_SrcInc_DstFix },
  102. // P2M
  103. { PDMA_UART0_RX, eMemCtl_SrcFix_DstInc },
  104. { PDMA_UART1_RX, eMemCtl_SrcFix_DstInc },
  105. { PDMA_UART2_RX, eMemCtl_SrcFix_DstInc },
  106. { PDMA_UART3_RX, eMemCtl_SrcFix_DstInc },
  107. { PDMA_UART4_RX, eMemCtl_SrcFix_DstInc },
  108. { PDMA_UART5_RX, eMemCtl_SrcFix_DstInc },
  109. { PDMA_UART6_RX, eMemCtl_SrcFix_DstInc },
  110. { PDMA_UART7_RX, eMemCtl_SrcFix_DstInc },
  111. { PDMA_UART8_RX, eMemCtl_SrcFix_DstInc },
  112. { PDMA_UART9_RX, eMemCtl_SrcFix_DstInc },
  113. { PDMA_QSPI0_RX, eMemCtl_SrcFix_DstInc },
  114. { PDMA_SPI0_RX, eMemCtl_SrcFix_DstInc },
  115. { PDMA_SPI1_RX, eMemCtl_SrcFix_DstInc },
  116. { PDMA_I2C0_RX, eMemCtl_SrcFix_DstInc },
  117. { PDMA_I2C1_RX, eMemCtl_SrcFix_DstInc },
  118. { PDMA_I2C2_RX, eMemCtl_SrcFix_DstInc },
  119. { PDMA_I2C3_RX, eMemCtl_SrcFix_DstInc },
  120. };
  121. #define NU_PERIPHERAL_SIZE ( sizeof(g_nu_pdma_peripheral_ctl_pool) / sizeof(g_nu_pdma_peripheral_ctl_pool[0]) )
  122. static struct nu_pdma_memfun_actor nu_pdma_memfun_actor_arr[NU_PDMA_MEMFUN_ACTOR_MAX];
  123. static int nu_pdma_peripheral_set(uint32_t u32PeriphType)
  124. {
  125. int idx = 0;
  126. while (idx < NU_PERIPHERAL_SIZE)
  127. {
  128. if (g_nu_pdma_peripheral_ctl_pool[idx].m_u32Peripheral == u32PeriphType)
  129. return idx;
  130. idx++;
  131. }
  132. // Not such peripheral
  133. return -1;
  134. }
  135. static void nu_pdma_periph_ctrl_fill(int i32ChannID, int i32CtlPoolIdx)
  136. {
  137. nu_pdma_chn_t *psPdmaChann = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos];
  138. psPdmaChann->m_spPeripCtl.m_u32Peripheral = g_nu_pdma_peripheral_ctl_pool[i32CtlPoolIdx].m_u32Peripheral;
  139. psPdmaChann->m_spPeripCtl.m_eMemCtl = g_nu_pdma_peripheral_ctl_pool[i32CtlPoolIdx].m_eMemCtl;
  140. }
  141. static void nu_pdma_init(void)
  142. {
  143. if (nu_pdma_inited)
  144. return;
  145. nu_pdma_chn_mask = ~(NU_PDMA_CH_Msk);
  146. rt_memset(nu_pdma_chn_arr, 0x00, NU_PDMA_CH_MAX * sizeof(nu_pdma_chn_t));
  147. nu_sys_ipclk_enable(PDMA0CKEN);
  148. nu_sys_ipclk_enable(PDMA1CKEN);
  149. nu_sys_ip_reset(PDMA0RST);
  150. nu_sys_ip_reset(PDMA1RST);
  151. /* Initialize PDMA0 setting */
  152. PDMA_Open(PDMA0, NU_PDMA_CH_HALF_Msk);
  153. PDMA_Close(PDMA0);
  154. /* Register PDMA0 ISR */
  155. rt_hw_interrupt_install(IRQ_PDMA0, nu_pdma_isr, (void *)PDMA0, "pdma0");
  156. rt_hw_interrupt_umask(IRQ_PDMA0);
  157. /* Initialize PDMA1 setting */
  158. PDMA_Open(PDMA1, NU_PDMA_CH_HALF_Msk);
  159. PDMA_Close(PDMA1);
  160. /* Register PDMA1 ISR */
  161. rt_hw_interrupt_install(IRQ_PDMA1, nu_pdma_isr, (void *)PDMA1, "pdma1");
  162. rt_hw_interrupt_umask(IRQ_PDMA1);
  163. /* Assign first SG table address to SRAM's start address */
  164. PDMA0->SCATBA = PDMA1->SCATBA = BOARD_SDRAM_START;
  165. nu_pdma_inited = 1;
  166. }
  167. static void nu_pdma_channel_enable(int i32ChannID)
  168. {
  169. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  170. PDMA_Open(PDMA, 1 << NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  171. }
  172. static inline void nu_pdma_channel_disable(int i32ChannID)
  173. {
  174. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  175. PDMA->CHCTL &= ~(1 << NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  176. }
  177. static inline void nu_pdma_channel_reset(int i32ChannID)
  178. {
  179. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  180. PDMA->CHRST = (1 << NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  181. }
  182. void nu_pdma_channel_terminate(int i32ChannID)
  183. {
  184. PDMA_T *PDMA;
  185. if (!(nu_pdma_chn_mask & (1 << i32ChannID)))
  186. goto exit_pdma_channel_terminate;
  187. PDMA = NU_PDMA_GET_BASE(i32ChannID);
  188. // Reset specified channel ID
  189. nu_pdma_channel_reset(i32ChannID);
  190. // Clean descriptor table control register.
  191. PDMA->DSCT[NU_PDMA_GET_MOD_CHIDX(i32ChannID)].CTL = 0UL;
  192. PDMA->CHCTL |= (1 << NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  193. exit_pdma_channel_terminate:
  194. return;
  195. }
  196. static rt_err_t nu_pdma_timeout_set(int i32ChannID, int i32Timeout_us)
  197. {
  198. rt_err_t ret = -RT_EINVAL;
  199. PDMA_T *PDMA = NULL;
  200. if (!(nu_pdma_chn_mask & (1 << i32ChannID)))
  201. goto exit_nu_pdma_timeout_set;
  202. PDMA = NU_PDMA_GET_BASE(i32ChannID);
  203. nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_u32IdleTimeout_us = i32Timeout_us;
  204. if (i32Timeout_us && (NU_PDMA_GET_MOD_CHIDX(i32ChannID) < PDMA_CH_MAX)) // Limit
  205. {
  206. uint32_t u32HCLK = sysGetClock(SYS_HCLK) * 1000000;
  207. uint32_t u32ToClk_Max = u32HCLK / (1 << 8);
  208. uint32_t u32Divider = ((i32Timeout_us * u32ToClk_Max) / 1000000) / (1 << 16);
  209. uint32_t u32TOutCnt = ((i32Timeout_us * u32ToClk_Max) / 1000000) % (1 << 16);
  210. PDMA_DisableTimeout(PDMA, 1 << NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  211. PDMA_EnableInt(PDMA, NU_PDMA_GET_MOD_CHIDX(i32ChannID), PDMA_INT_TIMEOUT); // Interrupt type
  212. if (u32Divider > 7)
  213. {
  214. u32Divider = 7;
  215. u32TOutCnt = (1 << 16);
  216. }
  217. PDMA->TOUTPSC |= (u32Divider << (PDMA_TOUTPSC_TOUTPSC1_Pos * NU_PDMA_GET_MOD_CHIDX(i32ChannID)));
  218. PDMA_SetTimeOut(PDMA, NU_PDMA_GET_MOD_CHIDX(i32ChannID), 1, u32TOutCnt);
  219. ret = RT_EOK;
  220. }
  221. else
  222. {
  223. PDMA_DisableInt(PDMA, NU_PDMA_GET_MOD_CHIDX(i32ChannID), PDMA_INT_TIMEOUT); // Interrupt type
  224. PDMA_DisableTimeout(PDMA, 1 << NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  225. }
  226. exit_nu_pdma_timeout_set:
  227. return -(ret);
  228. }
  229. int nu_pdma_channel_allocate(int32_t i32PeripType)
  230. {
  231. int i, i32PeripCtlIdx;
  232. nu_pdma_init();
  233. if ((i32PeripCtlIdx = nu_pdma_peripheral_set(i32PeripType)) < 0)
  234. goto exit_nu_pdma_channel_allocate;
  235. /* Find the position of first '0' in nu_pdma_chn_mask. */
  236. i = nu_cto(nu_pdma_chn_mask);
  237. if (i != 32)
  238. {
  239. nu_pdma_chn_mask |= (1 << i);
  240. rt_memset(nu_pdma_chn_arr + i - NU_PDMA_CH_Pos, 0x00, sizeof(nu_pdma_chn_t));
  241. /* Set idx number of g_nu_pdma_peripheral_ctl_pool */
  242. nu_pdma_periph_ctrl_fill(i, i32PeripCtlIdx);
  243. /* Reset channel */
  244. nu_pdma_channel_reset(i);
  245. nu_pdma_channel_enable(i);
  246. return i;
  247. }
  248. exit_nu_pdma_channel_allocate:
  249. // No channel available
  250. return -(RT_ERROR);
  251. }
  252. rt_err_t nu_pdma_channel_free(int i32ChannID)
  253. {
  254. rt_err_t ret = -RT_EINVAL;
  255. if (! nu_pdma_inited)
  256. goto exit_nu_pdma_channel_free;
  257. if (i32ChannID < NU_PDMA_CH_MAX && i32ChannID >= NU_PDMA_CH_Pos)
  258. {
  259. nu_pdma_chn_mask &= ~(1 << i32ChannID);
  260. nu_pdma_channel_disable(i32ChannID);
  261. ret = RT_EOK;
  262. }
  263. exit_nu_pdma_channel_free:
  264. return -(ret);
  265. }
  266. rt_err_t nu_pdma_filtering_set(int i32ChannID, uint32_t u32EventFilter)
  267. {
  268. rt_err_t ret = -RT_EINVAL;
  269. if (!(nu_pdma_chn_mask & (1 << i32ChannID)))
  270. goto exit_nu_pdma_filtering_set;
  271. nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_u32EventFilter = u32EventFilter;
  272. ret = RT_EOK;
  273. exit_nu_pdma_filtering_set:
  274. return -(ret) ;
  275. }
  276. uint32_t nu_pdma_filtering_get(int i32ChannID)
  277. {
  278. if (!(nu_pdma_chn_mask & (1 << i32ChannID)))
  279. goto exit_nu_pdma_filtering_get;
  280. return nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_u32EventFilter;
  281. exit_nu_pdma_filtering_get:
  282. return 0;
  283. }
  284. rt_err_t nu_pdma_callback_register(int i32ChannID, nu_pdma_chn_cb_t psChnCb)
  285. {
  286. rt_err_t ret = -RT_EINVAL;
  287. nu_pdma_chn_cb_t psChnCb_Current = RT_NULL;
  288. RT_ASSERT(psChnCb != RT_NULL);
  289. if (!(nu_pdma_chn_mask & (1 << i32ChannID)))
  290. goto exit_nu_pdma_callback_register;
  291. switch (psChnCb->m_eCBType)
  292. {
  293. case eCBType_Event:
  294. psChnCb_Current = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_sCB_Event;
  295. break;
  296. case eCBType_Trigger:
  297. psChnCb_Current = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_sCB_Trigger;
  298. break;
  299. case eCBType_Disable:
  300. psChnCb_Current = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_sCB_Disable;
  301. break;
  302. default:
  303. goto exit_nu_pdma_callback_register;
  304. }
  305. psChnCb_Current->m_pfnCBHandler = psChnCb->m_pfnCBHandler;
  306. psChnCb_Current->m_pvUserData = psChnCb->m_pvUserData;
  307. ret = RT_EOK;
  308. exit_nu_pdma_callback_register:
  309. return -(ret) ;
  310. }
  311. nu_pdma_cb_handler_t nu_pdma_callback_hijack(int i32ChannID, nu_pdma_cbtype_t eCBType, nu_pdma_chn_cb_t psChnCb_Hijack)
  312. {
  313. nu_pdma_chn_cb_t psChnCb_Current = RT_NULL;
  314. struct nu_pdma_chn_cb sChnCB_Tmp;
  315. RT_ASSERT(psChnCb_Hijack != NULL);
  316. if (!(nu_pdma_chn_mask & (1 << i32ChannID)))
  317. goto exit_nu_pdma_callback_hijack;
  318. switch (eCBType)
  319. {
  320. case eCBType_Event:
  321. psChnCb_Current = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_sCB_Event;
  322. break;
  323. case eCBType_Trigger:
  324. psChnCb_Current = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_sCB_Trigger;
  325. break;
  326. case eCBType_Disable:
  327. psChnCb_Current = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_sCB_Disable;
  328. break;
  329. default:
  330. goto exit_nu_pdma_callback_hijack;
  331. }
  332. /* Backup */
  333. sChnCB_Tmp.m_pfnCBHandler = psChnCb_Current->m_pfnCBHandler;
  334. sChnCB_Tmp.m_pvUserData = psChnCb_Current->m_pvUserData;
  335. /* Update */
  336. psChnCb_Current->m_pfnCBHandler = psChnCb_Hijack->m_pfnCBHandler;
  337. psChnCb_Current->m_pvUserData = psChnCb_Hijack->m_pvUserData;
  338. /* Restore */
  339. psChnCb_Hijack->m_pfnCBHandler = sChnCB_Tmp.m_pfnCBHandler;
  340. psChnCb_Hijack->m_pvUserData = sChnCB_Tmp.m_pvUserData;
  341. exit_nu_pdma_callback_hijack:
  342. return sChnCB_Tmp.m_pfnCBHandler;
  343. }
  344. static int nu_pdma_non_transfer_count_get(int32_t i32ChannID)
  345. {
  346. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  347. return ((PDMA->DSCT[NU_PDMA_GET_MOD_CHIDX(i32ChannID)].CTL & PDMA_DSCT_CTL_TXCNT_Msk) >> PDMA_DSCT_CTL_TXCNT_Pos) + 1;
  348. }
  349. int nu_pdma_transferred_byte_get(int32_t i32ChannID, int32_t i32TriggerByteLen)
  350. {
  351. int i32BitWidth = 0;
  352. int cur_txcnt = 0;
  353. PDMA_T *PDMA;
  354. if (!(nu_pdma_chn_mask & (1 << i32ChannID)))
  355. goto exit_nu_pdma_transferred_byte_get;
  356. PDMA = NU_PDMA_GET_BASE(i32ChannID);
  357. i32BitWidth = PDMA->DSCT[NU_PDMA_GET_MOD_CHIDX(i32ChannID)].CTL & PDMA_DSCT_CTL_TXWIDTH_Msk;
  358. i32BitWidth = (i32BitWidth == PDMA_WIDTH_8) ? 1 : (i32BitWidth == PDMA_WIDTH_16) ? 2 : (i32BitWidth == PDMA_WIDTH_32) ? 4 : 0;
  359. cur_txcnt = nu_pdma_non_transfer_count_get(i32ChannID);
  360. return (i32TriggerByteLen - (cur_txcnt) * i32BitWidth);
  361. exit_nu_pdma_transferred_byte_get:
  362. return -1;
  363. }
  364. nu_pdma_memctrl_t nu_pdma_channel_memctrl_get(int i32ChannID)
  365. {
  366. nu_pdma_memctrl_t eMemCtrl = eMemCtl_Undefined;
  367. if (!(nu_pdma_chn_mask & (1 << i32ChannID)))
  368. goto exit_nu_pdma_channel_memctrl_get;
  369. eMemCtrl = nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_spPeripCtl.m_eMemCtl;
  370. exit_nu_pdma_channel_memctrl_get:
  371. return eMemCtrl;
  372. }
  373. rt_err_t nu_pdma_channel_memctrl_set(int i32ChannID, nu_pdma_memctrl_t eMemCtrl)
  374. {
  375. rt_err_t ret = -RT_EINVAL;
  376. nu_pdma_chn_t *psPdmaChann = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos];
  377. if (!(nu_pdma_chn_mask & (1 << i32ChannID)))
  378. goto exit_nu_pdma_channel_memctrl_set;
  379. else if ((eMemCtrl < eMemCtl_SrcFix_DstFix) || (eMemCtrl > eMemCtl_SrcInc_DstInc))
  380. goto exit_nu_pdma_channel_memctrl_set;
  381. /* PDMA_MEM/SAR_FIX/BURST mode is not supported. */
  382. if ((psPdmaChann->m_spPeripCtl.m_u32Peripheral == PDMA_MEM) &&
  383. ((eMemCtrl == eMemCtl_SrcFix_DstInc) || (eMemCtrl == eMemCtl_SrcFix_DstFix)))
  384. goto exit_nu_pdma_channel_memctrl_set;
  385. nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_spPeripCtl.m_eMemCtl = eMemCtrl;
  386. ret = RT_EOK;
  387. exit_nu_pdma_channel_memctrl_set:
  388. return -(ret);
  389. }
  390. static void nu_pdma_channel_memctrl_fill(nu_pdma_memctrl_t eMemCtl, uint32_t *pu32SrcCtl, uint32_t *pu32DstCtl)
  391. {
  392. switch ((int)eMemCtl)
  393. {
  394. case eMemCtl_SrcFix_DstFix:
  395. *pu32SrcCtl = PDMA_SAR_FIX;
  396. *pu32DstCtl = PDMA_DAR_FIX;
  397. break;
  398. case eMemCtl_SrcFix_DstInc:
  399. *pu32SrcCtl = PDMA_SAR_FIX;
  400. *pu32DstCtl = PDMA_DAR_INC;
  401. break;
  402. case eMemCtl_SrcInc_DstFix:
  403. *pu32SrcCtl = PDMA_SAR_INC;
  404. *pu32DstCtl = PDMA_DAR_FIX;
  405. break;
  406. case eMemCtl_SrcInc_DstInc:
  407. *pu32SrcCtl = PDMA_SAR_INC;
  408. *pu32DstCtl = PDMA_DAR_INC;
  409. break;
  410. default:
  411. break;
  412. }
  413. }
  414. /* This is for Scatter-gather DMA. */
  415. rt_err_t nu_pdma_desc_setup(int i32ChannID, nu_pdma_desc_t dma_desc, uint32_t u32DataWidth, uint32_t u32AddrSrc,
  416. uint32_t u32AddrDst, int32_t i32TransferCnt, nu_pdma_desc_t next, uint32_t u32BeSilent)
  417. {
  418. nu_pdma_periph_ctl_t *psPeriphCtl = NULL;
  419. PDMA_T *PDMA = NULL;
  420. uint32_t u32SrcCtl = 0;
  421. uint32_t u32DstCtl = 0;
  422. rt_err_t ret = -RT_EINVAL;
  423. if (!dma_desc)
  424. goto exit_nu_pdma_desc_setup;
  425. else if (!(nu_pdma_chn_mask & (1 << i32ChannID)))
  426. goto exit_nu_pdma_desc_setup;
  427. else if (!(u32DataWidth == 8 || u32DataWidth == 16 || u32DataWidth == 32))
  428. goto exit_nu_pdma_desc_setup;
  429. else if ((u32AddrSrc % (u32DataWidth / 8)) || (u32AddrDst % (u32DataWidth / 8)))
  430. goto exit_nu_pdma_desc_setup;
  431. else if (i32TransferCnt > NU_PDMA_MAX_TXCNT)
  432. goto exit_nu_pdma_desc_setup;
  433. PDMA = NU_PDMA_GET_BASE(i32ChannID);
  434. psPeriphCtl = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_spPeripCtl;
  435. nu_pdma_channel_memctrl_fill(psPeriphCtl->m_eMemCtl, &u32SrcCtl, &u32DstCtl);
  436. dma_desc->CTL = ((i32TransferCnt - 1) << PDMA_DSCT_CTL_TXCNT_Pos) |
  437. ((u32DataWidth == 8) ? PDMA_WIDTH_8 : (u32DataWidth == 16) ? PDMA_WIDTH_16 : PDMA_WIDTH_32) |
  438. u32SrcCtl |
  439. u32DstCtl |
  440. PDMA_OP_BASIC;
  441. dma_desc->SA = u32AddrSrc;
  442. dma_desc->DA = u32AddrDst;
  443. dma_desc->NEXT = 0; /* Terminating node by default. */
  444. if (psPeriphCtl->m_u32Peripheral == PDMA_MEM)
  445. {
  446. /* For M2M transfer */
  447. dma_desc->CTL |= (PDMA_REQ_BURST | PDMA_BURST_32);
  448. }
  449. else
  450. {
  451. /* For P2M and M2P transfer */
  452. dma_desc->CTL |= (PDMA_REQ_SINGLE);
  453. }
  454. if (next)
  455. {
  456. /* Link to Next and modify to scatter-gather DMA mode. */
  457. dma_desc->CTL = (dma_desc->CTL & ~PDMA_DSCT_CTL_OPMODE_Msk) | PDMA_OP_SCATTER;
  458. dma_desc->NEXT = (uint32_t)next - (PDMA->SCATBA);
  459. }
  460. /* Be silent */
  461. if (u32BeSilent)
  462. dma_desc->CTL |= PDMA_DSCT_CTL_TBINTDIS_Msk;
  463. ret = RT_EOK;
  464. exit_nu_pdma_desc_setup:
  465. return -(ret);
  466. }
  467. rt_err_t nu_pdma_sgtbls_allocate(nu_pdma_desc_t *ppsSgtbls, int num)
  468. {
  469. int i;
  470. nu_pdma_desc_t psSgTblHead;
  471. RT_ASSERT(ppsSgtbls != NULL);
  472. RT_ASSERT(num > 0);
  473. psSgTblHead = (nu_pdma_desc_t) rt_malloc_align(RT_ALIGN(sizeof(DSCT_T) * num, 32), 32);
  474. RT_ASSERT(psSgTblHead != RT_NULL);
  475. rt_memset((void *)psSgTblHead, 0, sizeof(DSCT_T) * num);
  476. for (i = 0; i < num; i++)
  477. ppsSgtbls[i] = &psSgTblHead[i];
  478. return RT_EOK;
  479. }
  480. void nu_pdma_sgtbls_free(nu_pdma_desc_t *ppsSgtbls, int num)
  481. {
  482. nu_pdma_desc_t psSgTblHead;
  483. RT_ASSERT(ppsSgtbls != NULL);
  484. psSgTblHead = *ppsSgtbls;
  485. RT_ASSERT(psSgTblHead != NULL);
  486. rt_free_align(psSgTblHead);
  487. }
  488. static void _nu_pdma_transfer(int i32ChannID, uint32_t u32Peripheral, nu_pdma_desc_t head, uint32_t u32IdleTimeout_us)
  489. {
  490. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  491. nu_pdma_chn_t *psPdmaChann = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos];
  492. #if defined(BSP_USING_MMU)
  493. /* Writeback data in dcache to memory before transferring. */
  494. {
  495. static uint32_t bNonCacheAlignedWarning = 1;
  496. nu_pdma_desc_t next = head;
  497. while (next != RT_NULL)
  498. {
  499. uint32_t u32TxCnt = ((next->CTL & PDMA_DSCT_CTL_TXCNT_Msk) >> PDMA_DSCT_CTL_TXCNT_Pos) + 1;
  500. uint32_t u32DataWidth = (1 << ((next->CTL & PDMA_DSCT_CTL_TXWIDTH_Msk) >> PDMA_DSCT_CTL_TXWIDTH_Pos));
  501. uint32_t u32SrcCtl = (next->CTL & PDMA_DSCT_CTL_SAINC_Msk);
  502. uint32_t u32DstCtl = (next->CTL & PDMA_DSCT_CTL_DAINC_Msk);
  503. uint32_t u32FlushLen = u32TxCnt * u32DataWidth;
  504. #if 0
  505. rt_kprintf("[%s] i32ChannID=%d\n", __func__, i32ChannID);
  506. rt_kprintf("[%s] PDMA=0x%08x\n", __func__, (uint32_t)PDMA);
  507. rt_kprintf("[%s] u32TxCnt=%d\n", __func__, u32TxCnt);
  508. rt_kprintf("[%s] u32DataWidth=%d\n", __func__, u32DataWidth);
  509. rt_kprintf("[%s] u32SrcCtl=0x%08x\n", __func__, u32SrcCtl);
  510. rt_kprintf("[%s] u32DstCtl=0x%08x\n", __func__, u32DstCtl);
  511. rt_kprintf("[%s] u32FlushLen=%d\n", __func__, u32FlushLen);
  512. #endif
  513. /* Flush Src buffer into memory. */
  514. if ((u32SrcCtl == PDMA_SAR_INC)) // for M2P, M2M
  515. mmu_clean_invalidated_dcache(next->SA, u32FlushLen);
  516. /* Flush Dst buffer into memory. */
  517. if ((u32DstCtl == PDMA_DAR_INC)) // for P2M, M2M
  518. mmu_clean_invalidated_dcache(next->DA, u32FlushLen);
  519. /* Flush descriptor into memory */
  520. if (!((rt_uint32_t)next & NONCACHEABLE))
  521. mmu_clean_invalidated_dcache((rt_uint32_t)next, sizeof(DSCT_T));
  522. if (bNonCacheAlignedWarning)
  523. {
  524. if ((u32FlushLen & (CACHE_LINE_SIZE - 1)) ||
  525. (next->SA & (CACHE_LINE_SIZE - 1)) ||
  526. (next->DA & (CACHE_LINE_SIZE - 1)) ||
  527. ((rt_uint32_t)next & (CACHE_LINE_SIZE - 1)))
  528. {
  529. /*
  530. Race-condition avoidance between DMA-transferring and DCache write-back:
  531. Source, destination, DMA descriptor address and length should be aligned at len(CACHE_LINE_SIZE)
  532. */
  533. bNonCacheAlignedWarning = 0;
  534. rt_kprintf("[PDMA-W]\n");
  535. }
  536. }
  537. next = (nu_pdma_desc_t)next->NEXT;
  538. if (next == head) break;
  539. }
  540. }
  541. #endif
  542. PDMA_DisableTimeout(PDMA, 1 << NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  543. PDMA_EnableInt(PDMA, NU_PDMA_GET_MOD_CHIDX(i32ChannID), PDMA_INT_TRANS_DONE);
  544. nu_pdma_timeout_set(i32ChannID, u32IdleTimeout_us);
  545. /* Set scatter-gather mode and head */
  546. /* Take care the head structure, you should make sure cache-coherence. */
  547. PDMA_SetTransferMode(PDMA,
  548. NU_PDMA_GET_MOD_CHIDX(i32ChannID),
  549. u32Peripheral,
  550. (head->NEXT != 0) ? 1 : 0,
  551. (uint32_t)head);
  552. /* If peripheral is M2M, trigger it. */
  553. if (u32Peripheral == PDMA_MEM)
  554. {
  555. PDMA_Trigger(PDMA, NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  556. }
  557. else if (psPdmaChann->m_sCB_Trigger.m_pfnCBHandler)
  558. {
  559. psPdmaChann->m_sCB_Trigger.m_pfnCBHandler(psPdmaChann->m_sCB_Trigger.m_pvUserData, psPdmaChann->m_sCB_Trigger.m_u32Reserved);
  560. }
  561. }
  562. static void _nu_pdma_free_sgtbls(nu_pdma_chn_t *psPdmaChann)
  563. {
  564. if (psPdmaChann->m_ppsSgtbl)
  565. {
  566. nu_pdma_sgtbls_free(psPdmaChann->m_ppsSgtbl, psPdmaChann->m_u32WantedSGTblNum);
  567. rt_free_align((void *)psPdmaChann->m_ppsSgtbl);
  568. psPdmaChann->m_ppsSgtbl = RT_NULL;
  569. psPdmaChann->m_u32WantedSGTblNum = 0;
  570. }
  571. }
  572. static rt_err_t _nu_pdma_transfer_chain(int i32ChannID, uint32_t u32DataWidth, uint32_t u32AddrSrc, uint32_t u32AddrDst, uint32_t u32TransferCnt, uint32_t u32IdleTimeout_us)
  573. {
  574. int i = 0;
  575. rt_err_t ret = -RT_ERROR;
  576. nu_pdma_periph_ctl_t *psPeriphCtl = NULL;
  577. nu_pdma_chn_t *psPdmaChann = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos];
  578. nu_pdma_memctrl_t eMemCtl = nu_pdma_channel_memctrl_get(i32ChannID);
  579. rt_uint32_t u32Offset = 0;
  580. rt_uint32_t u32TxCnt = 0;
  581. psPeriphCtl = &psPdmaChann->m_spPeripCtl;
  582. if (psPdmaChann->m_u32WantedSGTblNum != (u32TransferCnt / NU_PDMA_MAX_TXCNT + 1))
  583. {
  584. if (psPdmaChann->m_u32WantedSGTblNum > 0)
  585. _nu_pdma_free_sgtbls(psPdmaChann);
  586. psPdmaChann->m_u32WantedSGTblNum = u32TransferCnt / NU_PDMA_MAX_TXCNT + 1;
  587. psPdmaChann->m_ppsSgtbl = (nu_pdma_desc_t *)rt_malloc_align(sizeof(nu_pdma_desc_t) * psPdmaChann->m_u32WantedSGTblNum, 4);
  588. if (!psPdmaChann->m_ppsSgtbl)
  589. goto exit__nu_pdma_transfer_chain;
  590. ret = nu_pdma_sgtbls_allocate(psPdmaChann->m_ppsSgtbl, psPdmaChann->m_u32WantedSGTblNum);
  591. if (ret != RT_EOK)
  592. goto exit__nu_pdma_transfer_chain;
  593. }
  594. for (i = 0; i < psPdmaChann->m_u32WantedSGTblNum; i++)
  595. {
  596. u32TxCnt = (u32TransferCnt > NU_PDMA_MAX_TXCNT) ? NU_PDMA_MAX_TXCNT : u32TransferCnt;
  597. ret = nu_pdma_desc_setup(i32ChannID,
  598. psPdmaChann->m_ppsSgtbl[i],
  599. u32DataWidth,
  600. (eMemCtl & 0x2ul) ? u32AddrSrc + u32Offset : u32AddrSrc, /* Src address is Inc or not. */
  601. (eMemCtl & 0x1ul) ? u32AddrDst + u32Offset : u32AddrDst, /* Dst address is Inc or not. */
  602. u32TxCnt,
  603. ((i + 1) == psPdmaChann->m_u32WantedSGTblNum) ? RT_NULL : psPdmaChann->m_ppsSgtbl[i + 1],
  604. ((i + 1) == psPdmaChann->m_u32WantedSGTblNum) ? 0 : 1); // Silent, w/o TD interrupt
  605. if (ret != RT_EOK)
  606. goto exit__nu_pdma_transfer_chain;
  607. u32TransferCnt -= u32TxCnt;
  608. u32Offset += (u32TxCnt * u32DataWidth / 8);
  609. }
  610. _nu_pdma_transfer(i32ChannID, psPeriphCtl->m_u32Peripheral, psPdmaChann->m_ppsSgtbl[0], u32IdleTimeout_us);
  611. ret = RT_EOK;
  612. return ret;
  613. exit__nu_pdma_transfer_chain:
  614. _nu_pdma_free_sgtbls(psPdmaChann);
  615. return -(ret);
  616. }
  617. rt_err_t nu_pdma_transfer(int i32ChannID, uint32_t u32DataWidth, uint32_t u32AddrSrc, uint32_t u32AddrDst, uint32_t u32TransferCnt, uint32_t u32IdleTimeout_us)
  618. {
  619. rt_err_t ret = -RT_EINVAL;
  620. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  621. nu_pdma_desc_t head;
  622. nu_pdma_chn_t *psPdmaChann;
  623. nu_pdma_periph_ctl_t *psPeriphCtl = NULL;
  624. if (!(nu_pdma_chn_mask & (1 << i32ChannID)))
  625. goto exit_nu_pdma_transfer;
  626. else if (!u32TransferCnt)
  627. goto exit_nu_pdma_transfer;
  628. else if (u32TransferCnt > NU_PDMA_MAX_TXCNT)
  629. return _nu_pdma_transfer_chain(i32ChannID, u32DataWidth, u32AddrSrc, u32AddrDst, u32TransferCnt, u32IdleTimeout_us);
  630. psPdmaChann = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos];
  631. psPeriphCtl = &psPdmaChann->m_spPeripCtl;
  632. head = &PDMA->DSCT[NU_PDMA_GET_MOD_CHIDX(i32ChannID)];
  633. ret = nu_pdma_desc_setup(i32ChannID,
  634. head,
  635. u32DataWidth,
  636. u32AddrSrc,
  637. u32AddrDst,
  638. u32TransferCnt,
  639. RT_NULL,
  640. 0);
  641. if (ret != RT_EOK)
  642. goto exit_nu_pdma_transfer;
  643. _nu_pdma_transfer(i32ChannID, psPeriphCtl->m_u32Peripheral, head, u32IdleTimeout_us);
  644. ret = RT_EOK;
  645. exit_nu_pdma_transfer:
  646. return -(ret);
  647. }
  648. rt_err_t nu_pdma_sg_transfer(int i32ChannID, nu_pdma_desc_t head, uint32_t u32IdleTimeout_us)
  649. {
  650. rt_err_t ret = -RT_EINVAL;
  651. nu_pdma_periph_ctl_t *psPeriphCtl = NULL;
  652. if (!head)
  653. goto exit_nu_pdma_sg_transfer;
  654. else if (!(nu_pdma_chn_mask & (1 << i32ChannID)))
  655. goto exit_nu_pdma_sg_transfer;
  656. psPeriphCtl = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_spPeripCtl;
  657. _nu_pdma_transfer(i32ChannID, psPeriphCtl->m_u32Peripheral, head, u32IdleTimeout_us);
  658. ret = RT_EOK;
  659. exit_nu_pdma_sg_transfer:
  660. return -(ret);
  661. }
  662. static void nu_pdma_isr(int vector, void *pvdata)
  663. {
  664. int i;
  665. PDMA_T *PDMA = (void *)pvdata;
  666. uint32_t intsts = PDMA_GET_INT_STATUS(PDMA);
  667. uint32_t abtsts = PDMA_GET_ABORT_STS(PDMA);
  668. uint32_t tdsts = PDMA_GET_TD_STS(PDMA);
  669. uint32_t unalignsts = PDMA_GET_ALIGN_STS(PDMA);
  670. uint32_t reqto = intsts & PDMA_INTSTS_REQTOFn_Msk;
  671. uint32_t reqto_ch = (reqto >> PDMA_INTSTS_REQTOF0_Pos);
  672. int allch_sts = (reqto_ch | tdsts | abtsts | unalignsts);
  673. // Abort
  674. if (intsts & PDMA_INTSTS_ABTIF_Msk)
  675. {
  676. // Clear all Abort flags
  677. PDMA_CLR_ABORT_FLAG(PDMA, abtsts);
  678. }
  679. // Transfer done
  680. if (intsts & PDMA_INTSTS_TDIF_Msk)
  681. {
  682. // Clear all transfer done flags
  683. PDMA_CLR_TD_FLAG(PDMA, tdsts);
  684. }
  685. // Unaligned
  686. if (intsts & PDMA_INTSTS_ALIGNF_Msk)
  687. {
  688. // Clear all Unaligned flags
  689. PDMA_CLR_ALIGN_FLAG(PDMA, unalignsts);
  690. }
  691. // Timeout
  692. if (reqto)
  693. {
  694. // Clear all Timeout flags
  695. PDMA->INTSTS = reqto;
  696. }
  697. // Find the position of first '1' in allch_sts.
  698. while ((i = nu_ctz(allch_sts)) != 32)
  699. {
  700. int j = i;
  701. int ch_mask = (1 << i);
  702. if (PDMA == PDMA1)
  703. {
  704. j += PDMA_CH_MAX;
  705. }
  706. if (nu_pdma_chn_mask & (1 << j))
  707. {
  708. int ch_event = 0;
  709. nu_pdma_chn_t *dma_chn = nu_pdma_chn_arr + j - NU_PDMA_CH_Pos;
  710. if (dma_chn->m_sCB_Event.m_pfnCBHandler)
  711. {
  712. if (abtsts & ch_mask)
  713. {
  714. ch_event |= NU_PDMA_EVENT_ABORT;
  715. }
  716. if (tdsts & ch_mask)
  717. {
  718. ch_event |= NU_PDMA_EVENT_TRANSFER_DONE;
  719. }
  720. if (unalignsts & ch_mask)
  721. {
  722. ch_event |= NU_PDMA_EVENT_ALIGNMENT;
  723. }
  724. if (reqto_ch & ch_mask)
  725. {
  726. PDMA_DisableTimeout(PDMA, ch_mask);
  727. ch_event |= NU_PDMA_EVENT_TIMEOUT;
  728. }
  729. if (dma_chn->m_sCB_Disable.m_pfnCBHandler)
  730. dma_chn->m_sCB_Disable.m_pfnCBHandler(dma_chn->m_sCB_Disable.m_pvUserData, dma_chn->m_sCB_Disable.m_u32Reserved);
  731. if (dma_chn->m_u32EventFilter & ch_event)
  732. dma_chn->m_sCB_Event.m_pfnCBHandler(dma_chn->m_sCB_Event.m_pvUserData, ch_event);
  733. if (reqto_ch & ch_mask)
  734. nu_pdma_timeout_set(j, nu_pdma_chn_arr[j - NU_PDMA_CH_Pos].m_u32IdleTimeout_us);
  735. }//if(dma_chn->handler)
  736. } //if (nu_pdma_chn_mask & ch_mask)
  737. // Clear the served bit.
  738. allch_sts &= ~ch_mask;
  739. } //while
  740. }
  741. static void nu_pdma_memfun_actor_init(void)
  742. {
  743. int i = 0 ;
  744. nu_pdma_init();
  745. for (i = 0; i < NU_PDMA_MEMFUN_ACTOR_MAX; i++)
  746. {
  747. rt_memset(&nu_pdma_memfun_actor_arr[i], 0, sizeof(struct nu_pdma_memfun_actor));
  748. if (-(RT_ERROR) != (nu_pdma_memfun_actor_arr[i].m_i32ChannID = nu_pdma_channel_allocate(PDMA_MEM)))
  749. {
  750. nu_pdma_memfun_actor_arr[i].m_psSemMemFun = rt_sem_create("memactor_sem", 0, RT_IPC_FLAG_FIFO);
  751. }
  752. else
  753. break;
  754. }
  755. if (i)
  756. {
  757. nu_pdma_memfun_actor_maxnum = i;
  758. nu_pdma_memfun_actor_mask = ~(((1 << i) - 1));
  759. nu_pdma_memfun_actor_pool_sem = rt_sem_create("mempool_sem", nu_pdma_memfun_actor_maxnum, RT_IPC_FLAG_FIFO);
  760. RT_ASSERT(nu_pdma_memfun_actor_pool_sem != RT_NULL);
  761. nu_pdma_memfun_actor_pool_lock = rt_mutex_create("mempool_lock", RT_IPC_FLAG_PRIO);
  762. RT_ASSERT(nu_pdma_memfun_actor_pool_lock != RT_NULL);
  763. }
  764. }
  765. static void nu_pdma_memfun_cb(void *pvUserData, uint32_t u32Events)
  766. {
  767. rt_err_t result = RT_EOK;
  768. nu_pdma_memfun_actor_t psMemFunActor = (nu_pdma_memfun_actor_t)pvUserData;
  769. psMemFunActor->m_u32Result = u32Events;
  770. result = rt_sem_release(psMemFunActor->m_psSemMemFun);
  771. RT_ASSERT(result == RT_EOK);
  772. }
  773. static int nu_pdma_memfun_employ(void)
  774. {
  775. int idx = -1 ;
  776. rt_err_t result = RT_EOK;
  777. /* Headhunter */
  778. if (nu_pdma_memfun_actor_pool_sem &&
  779. ((result = rt_sem_take(nu_pdma_memfun_actor_pool_sem, RT_WAITING_FOREVER)) == RT_EOK))
  780. {
  781. result = rt_mutex_take(nu_pdma_memfun_actor_pool_lock, RT_WAITING_FOREVER);
  782. RT_ASSERT(result == RT_EOK);
  783. /* Find the position of first '0' in nu_pdma_memfun_actor_mask. */
  784. idx = nu_cto(nu_pdma_memfun_actor_mask);
  785. if (idx != 32)
  786. {
  787. nu_pdma_memfun_actor_mask |= (1 << idx);
  788. }
  789. else
  790. {
  791. idx = -1;
  792. }
  793. result = rt_mutex_release(nu_pdma_memfun_actor_pool_lock);
  794. RT_ASSERT(result == RT_EOK);
  795. }
  796. return idx;
  797. }
  798. static rt_ssize_t nu_pdma_memfun(void *dest, void *src, uint32_t u32DataWidth, unsigned int u32TransferCnt, nu_pdma_memctrl_t eMemCtl)
  799. {
  800. nu_pdma_memfun_actor_t psMemFunActor = NULL;
  801. struct nu_pdma_chn_cb sChnCB;
  802. rt_err_t result = -RT_ERROR;
  803. int idx;
  804. rt_size_t ret = 0;
  805. /* Employ actor */
  806. while ((idx = nu_pdma_memfun_employ()) < 0);
  807. psMemFunActor = &nu_pdma_memfun_actor_arr[idx];
  808. /* Set PDMA memory control to eMemCtl. */
  809. nu_pdma_channel_memctrl_set(psMemFunActor->m_i32ChannID, eMemCtl);
  810. /* Register ISR callback function */
  811. sChnCB.m_eCBType = eCBType_Event;
  812. sChnCB.m_pfnCBHandler = nu_pdma_memfun_cb;
  813. sChnCB.m_pvUserData = (void *)psMemFunActor;
  814. nu_pdma_filtering_set(psMemFunActor->m_i32ChannID, NU_PDMA_EVENT_ABORT | NU_PDMA_EVENT_TRANSFER_DONE);
  815. nu_pdma_callback_register(psMemFunActor->m_i32ChannID, &sChnCB);
  816. psMemFunActor->m_u32Result = 0;
  817. /* Trigger it */
  818. nu_pdma_transfer(psMemFunActor->m_i32ChannID,
  819. u32DataWidth,
  820. (uint32_t)src,
  821. (uint32_t)dest,
  822. u32TransferCnt,
  823. 0);
  824. /* Wait it done. */
  825. result = rt_sem_take(psMemFunActor->m_psSemMemFun, RT_WAITING_FOREVER);
  826. RT_ASSERT(result == RT_EOK);
  827. /* Give result if get NU_PDMA_EVENT_TRANSFER_DONE.*/
  828. if (psMemFunActor->m_u32Result & NU_PDMA_EVENT_TRANSFER_DONE)
  829. {
  830. ret += u32TransferCnt;
  831. }
  832. else
  833. {
  834. ret += (u32TransferCnt - nu_pdma_non_transfer_count_get(psMemFunActor->m_i32ChannID));
  835. }
  836. /* Terminate it if get ABORT event */
  837. if (psMemFunActor->m_u32Result & NU_PDMA_EVENT_ABORT)
  838. {
  839. nu_pdma_channel_terminate(psMemFunActor->m_i32ChannID);
  840. }
  841. result = rt_mutex_take(nu_pdma_memfun_actor_pool_lock, RT_WAITING_FOREVER);
  842. RT_ASSERT(result == RT_EOK);
  843. nu_pdma_memfun_actor_mask &= ~(1 << idx);
  844. result = rt_mutex_release(nu_pdma_memfun_actor_pool_lock);
  845. RT_ASSERT(result == RT_EOK);
  846. /* Fire actor */
  847. result = rt_sem_release(nu_pdma_memfun_actor_pool_sem);
  848. RT_ASSERT(result == RT_EOK);
  849. return ret;
  850. }
  851. rt_size_t nu_pdma_mempush(void *dest, void *src, uint32_t data_width, unsigned int transfer_count)
  852. {
  853. if (data_width == 8 || data_width == 16 || data_width == 32)
  854. return nu_pdma_memfun(dest, src, data_width, transfer_count, eMemCtl_SrcInc_DstFix);
  855. return 0;
  856. }
  857. void *nu_pdma_memcpy(void *dest, void *src, unsigned int count)
  858. {
  859. int i = 0;
  860. uint32_t u32Offset = 0;
  861. uint32_t u32Remaining = count;
  862. for (i = 4; (i > 0) && (u32Remaining > 0) ; i >>= 1)
  863. {
  864. uint32_t u32src = (uint32_t)src + u32Offset;
  865. uint32_t u32dest = (uint32_t)dest + u32Offset;
  866. if (((u32src % i) == (u32dest % i)) &&
  867. ((u32src % i) == 0) &&
  868. (RT_ALIGN_DOWN(u32Remaining, i) >= i))
  869. {
  870. uint32_t u32TXCnt = u32Remaining / i;
  871. if (u32TXCnt != nu_pdma_memfun((void *)u32dest, (void *)u32src, i * 8, u32TXCnt, eMemCtl_SrcInc_DstInc))
  872. goto exit_nu_pdma_memcpy;
  873. u32Offset += (u32TXCnt * i);
  874. u32Remaining -= (u32TXCnt * i);
  875. }
  876. }
  877. if (count == u32Offset)
  878. return dest;
  879. exit_nu_pdma_memcpy:
  880. return NULL;
  881. }
  882. /**
  883. * PDMA memfun actor initialization
  884. */
  885. int rt_hw_pdma_memfun_init(void)
  886. {
  887. nu_pdma_memfun_actor_init();
  888. return 0;
  889. }
  890. INIT_DEVICE_EXPORT(rt_hw_pdma_memfun_init);
  891. #endif // #if defined(BSP_USING_PDMA)