drv_pdma.c 32 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081
  1. /**************************************************************************//**
  2. *
  3. * @copyright (C) 2020 Nuvoton Technology Corp. All rights reserved.
  4. *
  5. * SPDX-License-Identifier: Apache-2.0
  6. *
  7. * Change Logs:
  8. * Date Author Notes
  9. * 2020-2-7 Wayne First version
  10. *
  11. ******************************************************************************/
  12. #include <rtconfig.h>
  13. #if defined(BSP_USING_PDMA)
  14. #include <rtdevice.h>
  15. #include <rtthread.h>
  16. #include <drv_pdma.h>
  17. #include <nu_bitutil.h>
  18. /* Private define ---------------------------------------------------------------*/
  19. // RT_DEV_NAME_PREFIX pdma
  20. #ifndef NU_PDMA_MEMFUN_ACTOR_MAX
  21. #define NU_PDMA_MEMFUN_ACTOR_MAX (4)
  22. #endif
  23. #define NU_PDMA_SG_TBL_MAXSIZE (NU_PDMA_SG_LIMITED_DISTANCE/sizeof(DSCT_T))
  24. #define NU_PDMA_CH_MAX (2*PDMA_CH_MAX) /* Specify maximum channels of PDMA */
  25. #define NU_PDMA_CH_Pos (0) /* Specify first channel number of PDMA */
  26. #define NU_PDMA_CH_Msk (((1 << NU_PDMA_CH_MAX) - 1) << NU_PDMA_CH_Pos)
  27. #define NU_PDMA_CH_HALF_Msk (((1 << PDMA_CH_MAX) - 1) << NU_PDMA_CH_Pos)
  28. #define NU_PDMA_GET_BASE(ch) (PDMA_T *)((((ch)/PDMA_CH_MAX)>0)?PDMA1_BASE:PDMA0_BASE)
  29. #define NU_PDMA_GET_MOD_CHIDX(ch) ((ch)%PDMA_CH_MAX)
  30. /* Private typedef --------------------------------------------------------------*/
  31. struct nu_pdma_periph_ctl
  32. {
  33. uint32_t m_u32Peripheral;
  34. nu_pdma_memctrl_t m_eMemCtl;
  35. };
  36. typedef struct nu_pdma_periph_ctl nu_pdma_periph_ctl_t;
  37. struct nu_pdma_chn
  38. {
  39. nu_pdma_cb_handler_t m_pfnCBHandler;
  40. void *m_pvUserData;
  41. uint32_t m_u32EventFilter;
  42. uint32_t m_u32IdleTimeout_us;
  43. nu_pdma_periph_ctl_t m_spPeripCtl;
  44. };
  45. typedef struct nu_pdma_chn nu_pdma_chn_t;
  46. struct nu_pdma_memfun_actor
  47. {
  48. int m_i32ChannID;
  49. uint32_t m_u32Result;
  50. rt_sem_t m_psSemMemFun;
  51. } ;
  52. typedef struct nu_pdma_memfun_actor *nu_pdma_memfun_actor_t;
  53. /* Private functions ------------------------------------------------------------*/
  54. static int nu_pdma_peripheral_set(uint32_t u32PeriphType);
  55. static void nu_pdma_init(void);
  56. static void nu_pdma_channel_enable(int i32ChannID);
  57. static void nu_pdma_channel_disable(int i32ChannID);
  58. static void nu_pdma_channel_reset(int i32ChannID);
  59. static rt_err_t nu_pdma_timeout_set(int i32ChannID, int i32Timeout_us);
  60. static void nu_pdma_periph_ctrl_fill(int i32ChannID, int i32CtlPoolIdx);
  61. static rt_ssize_t nu_pdma_memfun(void *dest, void *src, uint32_t u32DataWidth, unsigned int count, nu_pdma_memctrl_t eMemCtl);
  62. static void nu_pdma_memfun_cb(void *pvUserData, uint32_t u32Events);
  63. static void nu_pdma_memfun_actor_init(void);
  64. static int nu_pdma_memfun_employ(void);
  65. static int nu_pdma_non_transfer_count_get(int32_t i32ChannID);
  66. /* Public functions -------------------------------------------------------------*/
  67. /* Private variables ------------------------------------------------------------*/
  68. static volatile int nu_pdma_inited = 0;
  69. static volatile uint32_t nu_pdma_chn_mask = 0;
  70. static nu_pdma_chn_t nu_pdma_chn_arr[NU_PDMA_CH_MAX];
  71. static volatile uint32_t nu_pdma_memfun_actor_mask = 0;
  72. static volatile uint32_t nu_pdma_memfun_actor_maxnum = 0;
  73. static rt_sem_t nu_pdma_memfun_actor_pool_sem = RT_NULL;
  74. static rt_mutex_t nu_pdma_memfun_actor_pool_lock = RT_NULL;
  75. static const nu_pdma_periph_ctl_t g_nu_pdma_peripheral_ctl_pool[ ] =
  76. {
  77. // M2M
  78. { PDMA_MEM, eMemCtl_SrcInc_DstInc },
  79. // M2P
  80. { PDMA_USB_TX, eMemCtl_SrcInc_DstFix },
  81. { PDMA_UART0_TX, eMemCtl_SrcInc_DstFix },
  82. { PDMA_UART1_TX, eMemCtl_SrcInc_DstFix },
  83. { PDMA_UART2_TX, eMemCtl_SrcInc_DstFix },
  84. { PDMA_UART3_TX, eMemCtl_SrcInc_DstFix },
  85. { PDMA_UART4_TX, eMemCtl_SrcInc_DstFix },
  86. { PDMA_UART5_TX, eMemCtl_SrcInc_DstFix },
  87. { PDMA_USCI0_TX, eMemCtl_SrcInc_DstFix },
  88. { PDMA_USCI1_TX, eMemCtl_SrcInc_DstFix },
  89. { PDMA_QSPI0_TX, eMemCtl_SrcInc_DstFix },
  90. { PDMA_SPI0_TX, eMemCtl_SrcInc_DstFix },
  91. { PDMA_SPI1_TX, eMemCtl_SrcInc_DstFix },
  92. { PDMA_SPI2_TX, eMemCtl_SrcInc_DstFix },
  93. { PDMA_SPI3_TX, eMemCtl_SrcInc_DstFix },
  94. { PDMA_I2C0_TX, eMemCtl_SrcInc_DstFix },
  95. { PDMA_I2C1_TX, eMemCtl_SrcInc_DstFix },
  96. { PDMA_I2C2_TX, eMemCtl_SrcInc_DstFix },
  97. { PDMA_I2S0_TX, eMemCtl_SrcInc_DstFix },
  98. { PDMA_DAC0_TX, eMemCtl_SrcInc_DstFix },
  99. { PDMA_DAC1_TX, eMemCtl_SrcInc_DstFix },
  100. { PDMA_EPWM0_CH0_TX, eMemCtl_SrcInc_DstFix },
  101. { PDMA_EPWM0_CH1_TX, eMemCtl_SrcInc_DstFix },
  102. { PDMA_EPWM0_CH2_TX, eMemCtl_SrcInc_DstFix },
  103. { PDMA_EPWM0_CH3_TX, eMemCtl_SrcInc_DstFix },
  104. { PDMA_EPWM0_CH4_TX, eMemCtl_SrcInc_DstFix },
  105. { PDMA_EPWM0_CH5_TX, eMemCtl_SrcInc_DstFix },
  106. { PDMA_EPWM1_CH0_TX, eMemCtl_SrcInc_DstFix },
  107. { PDMA_EPWM1_CH1_TX, eMemCtl_SrcInc_DstFix },
  108. { PDMA_EPWM1_CH2_TX, eMemCtl_SrcInc_DstFix },
  109. { PDMA_EPWM1_CH3_TX, eMemCtl_SrcInc_DstFix },
  110. { PDMA_EPWM1_CH4_TX, eMemCtl_SrcInc_DstFix },
  111. { PDMA_EPWM1_CH5_TX, eMemCtl_SrcInc_DstFix },
  112. // P2M
  113. { PDMA_USB_RX, eMemCtl_SrcFix_DstInc },
  114. { PDMA_UART0_RX, eMemCtl_SrcFix_DstInc },
  115. { PDMA_UART1_RX, eMemCtl_SrcFix_DstInc },
  116. { PDMA_UART2_RX, eMemCtl_SrcFix_DstInc },
  117. { PDMA_UART3_RX, eMemCtl_SrcFix_DstInc },
  118. { PDMA_UART4_RX, eMemCtl_SrcFix_DstInc },
  119. { PDMA_UART5_RX, eMemCtl_SrcFix_DstInc },
  120. { PDMA_USCI0_RX, eMemCtl_SrcFix_DstInc },
  121. { PDMA_USCI1_RX, eMemCtl_SrcFix_DstInc },
  122. { PDMA_QSPI0_RX, eMemCtl_SrcFix_DstInc },
  123. { PDMA_SPI0_RX, eMemCtl_SrcFix_DstInc },
  124. { PDMA_SPI1_RX, eMemCtl_SrcFix_DstInc },
  125. { PDMA_SPI2_RX, eMemCtl_SrcFix_DstInc },
  126. { PDMA_SPI3_RX, eMemCtl_SrcFix_DstInc },
  127. { PDMA_EPWM0_P1_RX, eMemCtl_SrcFix_DstInc },
  128. { PDMA_EPWM0_P2_RX, eMemCtl_SrcFix_DstInc },
  129. { PDMA_EPWM0_P3_RX, eMemCtl_SrcFix_DstInc },
  130. { PDMA_EPWM1_P1_RX, eMemCtl_SrcFix_DstInc },
  131. { PDMA_EPWM1_P2_RX, eMemCtl_SrcFix_DstInc },
  132. { PDMA_EPWM1_P3_RX, eMemCtl_SrcFix_DstInc },
  133. { PDMA_I2C0_RX, eMemCtl_SrcFix_DstInc },
  134. { PDMA_I2C1_RX, eMemCtl_SrcFix_DstInc },
  135. { PDMA_I2C2_RX, eMemCtl_SrcFix_DstInc },
  136. { PDMA_I2S0_RX, eMemCtl_SrcFix_DstInc },
  137. { PDMA_TMR0, eMemCtl_SrcFix_DstInc },
  138. { PDMA_TMR1, eMemCtl_SrcFix_DstInc },
  139. { PDMA_TMR2, eMemCtl_SrcFix_DstInc },
  140. { PDMA_TMR3, eMemCtl_SrcFix_DstInc },
  141. { PDMA_ADC_RX, eMemCtl_SrcFix_DstInc },
  142. };
  143. #define NU_PERIPHERAL_SIZE ( sizeof(g_nu_pdma_peripheral_ctl_pool) / sizeof(g_nu_pdma_peripheral_ctl_pool[0]) )
  144. static struct nu_pdma_memfun_actor nu_pdma_memfun_actor_arr[NU_PDMA_MEMFUN_ACTOR_MAX];
  145. /* SG table pool */
  146. static DSCT_T nu_pdma_sgtbl_arr[NU_PDMA_SGTBL_POOL_SIZE] = { 0 };
  147. static uint32_t nu_pdma_sgtbl_token[RT_ALIGN(NU_PDMA_SGTBL_POOL_SIZE, 32) / 32];
  148. static rt_mutex_t g_mutex_sg = RT_NULL;
  149. static int nu_pdma_peripheral_set(uint32_t u32PeriphType)
  150. {
  151. int idx = 0;
  152. while (idx < NU_PERIPHERAL_SIZE)
  153. {
  154. if (g_nu_pdma_peripheral_ctl_pool[idx].m_u32Peripheral == u32PeriphType)
  155. return idx;
  156. idx++;
  157. }
  158. // Not such peripheral
  159. return -1;
  160. }
  161. static void nu_pdma_periph_ctrl_fill(int i32ChannID, int i32CtlPoolIdx)
  162. {
  163. nu_pdma_chn_t *psPdmaChann = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos];
  164. psPdmaChann->m_spPeripCtl.m_u32Peripheral = g_nu_pdma_peripheral_ctl_pool[i32CtlPoolIdx].m_u32Peripheral;
  165. psPdmaChann->m_spPeripCtl.m_eMemCtl = g_nu_pdma_peripheral_ctl_pool[i32CtlPoolIdx].m_eMemCtl;
  166. }
  167. static void nu_pdma_init(void)
  168. {
  169. int latest = 0;
  170. if (nu_pdma_inited)
  171. return;
  172. g_mutex_sg = rt_mutex_create("sgtbles", RT_IPC_FLAG_PRIO);
  173. RT_ASSERT(g_mutex_sg != RT_NULL);
  174. nu_pdma_chn_mask = ~(NU_PDMA_CH_Msk);
  175. rt_memset(nu_pdma_chn_arr, 0x00, NU_PDMA_CH_MAX * sizeof(nu_pdma_chn_t));
  176. /* Initialize PDMA0 setting */
  177. PDMA_Open(PDMA0, NU_PDMA_CH_HALF_Msk);
  178. PDMA_Close(PDMA0);
  179. NVIC_EnableIRQ(PDMA0_IRQn);
  180. /* Initialize PDMA1 setting */
  181. PDMA_Open(PDMA1, NU_PDMA_CH_HALF_Msk);
  182. PDMA_Close(PDMA1);
  183. NVIC_EnableIRQ(PDMA1_IRQn);
  184. rt_memset(&nu_pdma_sgtbl_arr[0], 0x00, sizeof(nu_pdma_sgtbl_arr));
  185. /* Assign first SG table address as PDMA SG table base address */
  186. PDMA0->SCATBA = (uint32_t)&nu_pdma_sgtbl_arr[0];
  187. PDMA1->SCATBA = (uint32_t)&nu_pdma_sgtbl_arr[0];
  188. /* Initialize token pool. */
  189. rt_memset(&nu_pdma_sgtbl_token[0], 0xff, sizeof(nu_pdma_sgtbl_token));
  190. if (NU_PDMA_SGTBL_POOL_SIZE % 32)
  191. {
  192. latest = (NU_PDMA_SGTBL_POOL_SIZE) / 32;
  193. nu_pdma_sgtbl_token[latest] ^= ~((1 << (NU_PDMA_SGTBL_POOL_SIZE % 32)) - 1) ;
  194. }
  195. nu_pdma_inited = 1;
  196. }
  197. static void nu_pdma_channel_enable(int i32ChannID)
  198. {
  199. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  200. PDMA_Open(PDMA, 1 << NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  201. }
  202. static inline void nu_pdma_channel_disable(int i32ChannID)
  203. {
  204. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  205. PDMA->CHCTL &= ~(1 << NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  206. }
  207. static inline void nu_pdma_channel_reset(int i32ChannID)
  208. {
  209. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  210. PDMA->CHRST = (1 << NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  211. }
  212. void nu_pdma_channel_terminate(int i32ChannID)
  213. {
  214. PDMA_T *PDMA;
  215. if (!(nu_pdma_chn_mask & (1 << i32ChannID)))
  216. goto exit_pdma_channel_terminate;
  217. PDMA = NU_PDMA_GET_BASE(i32ChannID);
  218. // Reset specified channel ID
  219. nu_pdma_channel_reset(i32ChannID);
  220. // Clean descriptor table control register.
  221. PDMA->DSCT[NU_PDMA_GET_MOD_CHIDX(i32ChannID)].CTL = 0UL;
  222. PDMA->CHCTL |= (1 << NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  223. exit_pdma_channel_terminate:
  224. return;
  225. }
  226. static rt_err_t nu_pdma_timeout_set(int i32ChannID, int i32Timeout_us)
  227. {
  228. rt_err_t ret = -RT_EINVAL;
  229. PDMA_T *PDMA = NULL;
  230. if (!(nu_pdma_chn_mask & (1 << i32ChannID)))
  231. goto exit_nu_pdma_timeout_set;
  232. PDMA = NU_PDMA_GET_BASE(i32ChannID);
  233. nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_u32IdleTimeout_us = i32Timeout_us;
  234. if (i32Timeout_us && NU_PDMA_GET_MOD_CHIDX(i32ChannID) <= 1) // Limit
  235. {
  236. uint32_t u32ToClk_Max = 1000000 / (CLK_GetHCLKFreq() / (1 << 8));
  237. uint32_t u32Divider = (i32Timeout_us / u32ToClk_Max) / (1 << 16);
  238. uint32_t u32TOutCnt = (i32Timeout_us / u32ToClk_Max) % (1 << 16);
  239. PDMA_DisableTimeout(PDMA, 1 << NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  240. PDMA_EnableInt(PDMA, NU_PDMA_GET_MOD_CHIDX(i32ChannID), PDMA_INT_TIMEOUT); // Interrupt type
  241. if (u32Divider > 7)
  242. {
  243. u32Divider = 7;
  244. u32TOutCnt = (1 << 16);
  245. }
  246. PDMA->TOUTPSC |= (u32Divider << (PDMA_TOUTPSC_TOUTPSC1_Pos * NU_PDMA_GET_MOD_CHIDX(i32ChannID)));
  247. PDMA_SetTimeOut(PDMA, NU_PDMA_GET_MOD_CHIDX(i32ChannID), 1, u32TOutCnt);
  248. ret = RT_EOK;
  249. }
  250. else
  251. {
  252. PDMA_DisableInt(PDMA, NU_PDMA_GET_MOD_CHIDX(i32ChannID), PDMA_INT_TIMEOUT); // Interrupt type
  253. PDMA_DisableTimeout(PDMA, 1 << NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  254. }
  255. exit_nu_pdma_timeout_set:
  256. return -(ret);
  257. }
  258. int nu_pdma_channel_allocate(int32_t i32PeripType)
  259. {
  260. int i, i32PeripCtlIdx;
  261. nu_pdma_init();
  262. if ((i32PeripCtlIdx = nu_pdma_peripheral_set(i32PeripType)) < 0)
  263. goto exit_nu_pdma_channel_allocate;
  264. /* Find the position of first '0' in nu_pdma_chn_mask. */
  265. i = nu_cto(nu_pdma_chn_mask);
  266. if (i != 32)
  267. {
  268. nu_pdma_chn_mask |= (1 << i);
  269. rt_memset(nu_pdma_chn_arr + i - NU_PDMA_CH_Pos, 0x00, sizeof(nu_pdma_chn_t));
  270. /* Set idx number of g_nu_pdma_peripheral_ctl_pool */
  271. nu_pdma_periph_ctrl_fill(i, i32PeripCtlIdx);
  272. /* Reset channel */
  273. nu_pdma_channel_reset(i);
  274. nu_pdma_channel_enable(i);
  275. return i;
  276. }
  277. exit_nu_pdma_channel_allocate:
  278. // No channel available
  279. return -(RT_ERROR);
  280. }
  281. rt_err_t nu_pdma_channel_free(int i32ChannID)
  282. {
  283. rt_err_t ret = -RT_EINVAL;
  284. if (! nu_pdma_inited)
  285. goto exit_nu_pdma_channel_free;
  286. if (i32ChannID < NU_PDMA_CH_MAX && i32ChannID >= NU_PDMA_CH_Pos)
  287. {
  288. nu_pdma_chn_mask &= ~(1 << i32ChannID);
  289. nu_pdma_channel_disable(i32ChannID);
  290. ret = RT_EOK;
  291. }
  292. exit_nu_pdma_channel_free:
  293. return -(ret);
  294. }
  295. rt_err_t nu_pdma_callback_register(int i32ChannID, nu_pdma_cb_handler_t pfnHandler, void *pvUserData, uint32_t u32EventFilter)
  296. {
  297. rt_err_t ret = -RT_EINVAL;
  298. if (!(nu_pdma_chn_mask & (1 << i32ChannID)))
  299. goto exit_nu_pdma_callback_register;
  300. nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_pfnCBHandler = pfnHandler;
  301. nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_pvUserData = pvUserData;
  302. nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_u32EventFilter = u32EventFilter;
  303. ret = RT_EOK;
  304. exit_nu_pdma_callback_register:
  305. return -(ret) ;
  306. }
  307. nu_pdma_cb_handler_t nu_pdma_callback_hijack(int i32ChannID, nu_pdma_cb_handler_t *ppfnHandler_Hijack,
  308. void **ppvUserData_Hijack, uint32_t *pu32Events_Hijack)
  309. {
  310. nu_pdma_cb_handler_t pfnHandler_Org = NULL;
  311. void *pvUserData_Org;
  312. uint32_t u32Events_Org;
  313. RT_ASSERT(ppfnHandler_Hijack != NULL);
  314. RT_ASSERT(ppvUserData_Hijack != NULL);
  315. RT_ASSERT(pu32Events_Hijack != NULL);
  316. if (!(nu_pdma_chn_mask & (1 << i32ChannID)))
  317. goto exit_nu_pdma_callback_hijack;
  318. pfnHandler_Org = nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_pfnCBHandler;
  319. pvUserData_Org = nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_pvUserData;
  320. u32Events_Org = nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_u32EventFilter;
  321. nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_pfnCBHandler = *ppfnHandler_Hijack;
  322. nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_pvUserData = *ppvUserData_Hijack;
  323. nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_u32EventFilter = *pu32Events_Hijack;
  324. *ppfnHandler_Hijack = pfnHandler_Org;
  325. *ppvUserData_Hijack = pvUserData_Org;
  326. *pu32Events_Hijack = u32Events_Org;
  327. exit_nu_pdma_callback_hijack:
  328. return pfnHandler_Org;
  329. }
  330. static int nu_pdma_non_transfer_count_get(int32_t i32ChannID)
  331. {
  332. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  333. return ((PDMA->DSCT[NU_PDMA_GET_MOD_CHIDX(i32ChannID)].CTL & PDMA_DSCT_CTL_TXCNT_Msk) >> PDMA_DSCT_CTL_TXCNT_Pos) + 1;
  334. }
  335. int nu_pdma_transferred_byte_get(int32_t i32ChannID, int32_t i32TriggerByteLen)
  336. {
  337. int i32BitWidth = 0;
  338. int cur_txcnt = 0;
  339. PDMA_T *PDMA;
  340. if (!(nu_pdma_chn_mask & (1 << i32ChannID)))
  341. goto exit_nu_pdma_transferred_byte_get;
  342. PDMA = NU_PDMA_GET_BASE(i32ChannID);
  343. i32BitWidth = PDMA->DSCT[NU_PDMA_GET_MOD_CHIDX(i32ChannID)].CTL & PDMA_DSCT_CTL_TXWIDTH_Msk;
  344. i32BitWidth = (i32BitWidth == PDMA_WIDTH_8) ? 1 : (i32BitWidth == PDMA_WIDTH_16) ? 2 : (i32BitWidth == PDMA_WIDTH_32) ? 4 : 0;
  345. cur_txcnt = nu_pdma_non_transfer_count_get(i32ChannID);
  346. return (i32TriggerByteLen - (cur_txcnt) * i32BitWidth);
  347. exit_nu_pdma_transferred_byte_get:
  348. return -1;
  349. }
  350. nu_pdma_memctrl_t nu_pdma_channel_memctrl_get(int i32ChannID)
  351. {
  352. nu_pdma_memctrl_t eMemCtrl = eMemCtl_Undefined;
  353. if (!(nu_pdma_chn_mask & (1 << i32ChannID)))
  354. goto exit_nu_pdma_channel_memctrl_get;
  355. eMemCtrl = nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_spPeripCtl.m_eMemCtl;
  356. exit_nu_pdma_channel_memctrl_get:
  357. return eMemCtrl;
  358. }
  359. rt_err_t nu_pdma_channel_memctrl_set(int i32ChannID, nu_pdma_memctrl_t eMemCtrl)
  360. {
  361. rt_err_t ret = -RT_EINVAL;
  362. nu_pdma_chn_t *psPdmaChann = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos];
  363. if (!(nu_pdma_chn_mask & (1 << i32ChannID)))
  364. goto exit_nu_pdma_channel_memctrl_set;
  365. else if ((eMemCtrl < eMemCtl_SrcFix_DstFix) || (eMemCtrl > eMemCtl_SrcInc_DstInc))
  366. goto exit_nu_pdma_channel_memctrl_set;
  367. /* PDMA_MEM/SAR_FIX/BURST mode is not supported. */
  368. if ((psPdmaChann->m_spPeripCtl.m_u32Peripheral == PDMA_MEM) &&
  369. ((eMemCtrl == eMemCtl_SrcFix_DstInc) || (eMemCtrl == eMemCtl_SrcFix_DstFix)))
  370. goto exit_nu_pdma_channel_memctrl_set;
  371. nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_spPeripCtl.m_eMemCtl = eMemCtrl;
  372. ret = RT_EOK;
  373. exit_nu_pdma_channel_memctrl_set:
  374. return -(ret);
  375. }
  376. static void nu_pdma_channel_memctrl_fill(nu_pdma_memctrl_t eMemCtl, uint32_t *pu32SrcCtl, uint32_t *pu32DstCtl)
  377. {
  378. switch ((int)eMemCtl)
  379. {
  380. case eMemCtl_SrcFix_DstFix:
  381. *pu32SrcCtl = PDMA_SAR_FIX;
  382. *pu32DstCtl = PDMA_DAR_FIX;
  383. break;
  384. case eMemCtl_SrcFix_DstInc:
  385. *pu32SrcCtl = PDMA_SAR_FIX;
  386. *pu32DstCtl = PDMA_DAR_INC;
  387. break;
  388. case eMemCtl_SrcInc_DstFix:
  389. *pu32SrcCtl = PDMA_SAR_INC;
  390. *pu32DstCtl = PDMA_DAR_FIX;
  391. break;
  392. case eMemCtl_SrcInc_DstInc:
  393. *pu32SrcCtl = PDMA_SAR_INC;
  394. *pu32DstCtl = PDMA_DAR_INC;
  395. break;
  396. default:
  397. break;
  398. }
  399. }
  400. /* This is for Scatter-gather DMA. */
  401. rt_err_t nu_pdma_desc_setup(int i32ChannID, nu_pdma_desc_t dma_desc, uint32_t u32DataWidth, uint32_t u32AddrSrc,
  402. uint32_t u32AddrDst, int32_t i32TransferCnt, nu_pdma_desc_t next)
  403. {
  404. nu_pdma_periph_ctl_t *psPeriphCtl = NULL;
  405. PDMA_T *PDMA = NULL;
  406. uint32_t u32SrcCtl = 0;
  407. uint32_t u32DstCtl = 0;
  408. rt_err_t ret = -RT_EINVAL;
  409. if (!dma_desc)
  410. goto exit_nu_pdma_desc_setup;
  411. else if (!(nu_pdma_chn_mask & (1 << i32ChannID)))
  412. goto exit_nu_pdma_desc_setup;
  413. else if (!(u32DataWidth == 8 || u32DataWidth == 16 || u32DataWidth == 32))
  414. goto exit_nu_pdma_desc_setup;
  415. else if ((u32AddrSrc % (u32DataWidth / 8)) || (u32AddrDst % (u32DataWidth / 8)))
  416. goto exit_nu_pdma_desc_setup;
  417. else if (i32TransferCnt > NU_PDMA_MAX_TXCNT)
  418. goto exit_nu_pdma_desc_setup;
  419. PDMA = NU_PDMA_GET_BASE(i32ChannID);
  420. psPeriphCtl = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_spPeripCtl;
  421. nu_pdma_channel_memctrl_fill(psPeriphCtl->m_eMemCtl, &u32SrcCtl, &u32DstCtl);
  422. dma_desc->CTL = ((i32TransferCnt - 1) << PDMA_DSCT_CTL_TXCNT_Pos) |
  423. ((u32DataWidth == 8) ? PDMA_WIDTH_8 : (u32DataWidth == 16) ? PDMA_WIDTH_16 : PDMA_WIDTH_32) |
  424. u32SrcCtl |
  425. u32DstCtl |
  426. PDMA_OP_BASIC;
  427. dma_desc->SA = u32AddrSrc;
  428. dma_desc->DA = u32AddrDst;
  429. dma_desc->NEXT = 0; /* Terminating node by default. */
  430. if (psPeriphCtl->m_u32Peripheral == PDMA_MEM)
  431. {
  432. /* For M2M transfer */
  433. dma_desc->CTL |= (PDMA_REQ_BURST | PDMA_BURST_32);
  434. }
  435. else
  436. {
  437. /* For P2M and M2P transfer */
  438. dma_desc->CTL |= (PDMA_REQ_SINGLE);
  439. }
  440. if (next)
  441. {
  442. /* Link to Next and modify to scatter-gather DMA mode. */
  443. dma_desc->CTL = (dma_desc->CTL & ~PDMA_DSCT_CTL_OPMODE_Msk) | PDMA_OP_SCATTER;
  444. dma_desc->NEXT = (uint32_t)next - (PDMA->SCATBA);
  445. }
  446. ret = RT_EOK;
  447. exit_nu_pdma_desc_setup:
  448. return -(ret);
  449. }
  450. static int nu_pdma_sgtbls_token_allocate(void)
  451. {
  452. int idx, i;
  453. int pool_size = sizeof(nu_pdma_sgtbl_token) / sizeof(uint32_t);
  454. for (i = 0; i < pool_size; i++)
  455. {
  456. if ((idx = nu_ctz(nu_pdma_sgtbl_token[i])) != 32)
  457. {
  458. nu_pdma_sgtbl_token[i] &= ~(1 << idx);
  459. idx += i * 32;
  460. return idx;
  461. }
  462. }
  463. /* No available */
  464. return -1;
  465. }
  466. static void nu_pdma_sgtbls_token_free(nu_pdma_desc_t psSgtbls)
  467. {
  468. int idx = (int)(psSgtbls - &nu_pdma_sgtbl_arr[0]);
  469. RT_ASSERT(idx >= 0);
  470. RT_ASSERT((idx + 1) <= NU_PDMA_SGTBL_POOL_SIZE);
  471. nu_pdma_sgtbl_token[idx / 32] |= (1 << (idx % 32));
  472. }
  473. rt_err_t nu_pdma_sgtbls_allocate(nu_pdma_desc_t *ppsSgtbls, int num)
  474. {
  475. int i, j, idx;
  476. rt_err_t result;
  477. RT_ASSERT(ppsSgtbls != NULL);
  478. RT_ASSERT(num <= NU_PDMA_SG_TBL_MAXSIZE);
  479. result = rt_mutex_take(g_mutex_sg, RT_WAITING_FOREVER);
  480. RT_ASSERT(result == RT_EOK);
  481. for (i = 0; i < num; i++)
  482. {
  483. ppsSgtbls[i] = NULL;
  484. /* Get token. */
  485. if ((idx = nu_pdma_sgtbls_token_allocate()) < 0)
  486. {
  487. rt_kprintf("No available sgtbl.\n");
  488. goto fail_nu_pdma_sgtbls_allocate;
  489. }
  490. ppsSgtbls[i] = (nu_pdma_desc_t)&nu_pdma_sgtbl_arr[idx];
  491. }
  492. result = rt_mutex_release(g_mutex_sg);
  493. RT_ASSERT(result == RT_EOK);
  494. return RT_EOK;
  495. fail_nu_pdma_sgtbls_allocate:
  496. /* Release allocated tables. */
  497. for (j = 0; j < i; j++)
  498. {
  499. if (ppsSgtbls[j] != NULL)
  500. {
  501. nu_pdma_sgtbls_token_free(ppsSgtbls[j]);
  502. }
  503. ppsSgtbls[j] = NULL;
  504. }
  505. result = rt_mutex_release(g_mutex_sg);
  506. RT_ASSERT(result == RT_EOK);
  507. return -RT_ERROR;
  508. }
  509. void nu_pdma_sgtbls_free(nu_pdma_desc_t *ppsSgtbls, int num)
  510. {
  511. int i;
  512. rt_err_t result;
  513. RT_ASSERT(ppsSgtbls != NULL);
  514. RT_ASSERT(num <= NU_PDMA_SG_TBL_MAXSIZE);
  515. result = rt_mutex_take(g_mutex_sg, RT_WAITING_FOREVER);
  516. RT_ASSERT(result == RT_EOK);
  517. for (i = 0; i < num; i++)
  518. {
  519. if (ppsSgtbls[i] != NULL)
  520. {
  521. nu_pdma_sgtbls_token_free(ppsSgtbls[i]);
  522. }
  523. ppsSgtbls[i] = NULL;
  524. }
  525. result = rt_mutex_release(g_mutex_sg);
  526. RT_ASSERT(result == RT_EOK);
  527. }
  528. static rt_err_t nu_pdma_sgtbls_valid(nu_pdma_desc_t head)
  529. {
  530. uint32_t node_addr;
  531. nu_pdma_desc_t node = head;
  532. do
  533. {
  534. node_addr = (uint32_t)node;
  535. if ((node_addr < PDMA0->SCATBA) || (node_addr - PDMA0->SCATBA) >= NU_PDMA_SG_LIMITED_DISTANCE)
  536. {
  537. rt_kprintf("The distance is over %d between 0x%08x and 0x%08x. \n", NU_PDMA_SG_LIMITED_DISTANCE, PDMA0->SCATBA, node);
  538. rt_kprintf("Please use nu_pdma_sgtbl_allocate to allocate valid sg-table.\n");
  539. return RT_ERROR;
  540. }
  541. node = (nu_pdma_desc_t)(node->NEXT + PDMA0->SCATBA);
  542. }
  543. while (((uint32_t)node != PDMA0->SCATBA) && (node != head));
  544. return RT_EOK;
  545. }
  546. static void _nu_pdma_transfer(int i32ChannID, uint32_t u32Peripheral, nu_pdma_desc_t head, uint32_t u32IdleTimeout_us)
  547. {
  548. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  549. PDMA_DisableTimeout(PDMA, 1 << NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  550. PDMA_EnableInt(PDMA, NU_PDMA_GET_MOD_CHIDX(i32ChannID), PDMA_INT_TRANS_DONE);
  551. nu_pdma_timeout_set(i32ChannID, u32IdleTimeout_us);
  552. /* Set scatter-gather mode and head */
  553. PDMA_SetTransferMode(PDMA,
  554. NU_PDMA_GET_MOD_CHIDX(i32ChannID),
  555. u32Peripheral,
  556. (head->NEXT != 0) ? 1 : 0,
  557. (uint32_t)head);
  558. /* If peripheral is M2M, trigger it. */
  559. if (u32Peripheral == PDMA_MEM)
  560. PDMA_Trigger(PDMA, NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  561. }
  562. rt_err_t nu_pdma_transfer(int i32ChannID, uint32_t u32DataWidth, uint32_t u32AddrSrc, uint32_t u32AddrDst, int32_t i32TransferCnt, uint32_t u32IdleTimeout_us)
  563. {
  564. rt_err_t ret = -RT_EINVAL;
  565. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  566. nu_pdma_periph_ctl_t *psPeriphCtl = NULL;
  567. if (!(nu_pdma_chn_mask & (1 << i32ChannID)))
  568. goto exit_nu_pdma_transfer;
  569. psPeriphCtl = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_spPeripCtl;
  570. ret = nu_pdma_desc_setup(i32ChannID,
  571. &PDMA->DSCT[NU_PDMA_GET_MOD_CHIDX(i32ChannID)],
  572. u32DataWidth,
  573. u32AddrSrc,
  574. u32AddrDst,
  575. i32TransferCnt,
  576. NULL);
  577. if (ret != RT_EOK)
  578. goto exit_nu_pdma_transfer;
  579. _nu_pdma_transfer(i32ChannID, psPeriphCtl->m_u32Peripheral, &PDMA->DSCT[NU_PDMA_GET_MOD_CHIDX(i32ChannID)], u32IdleTimeout_us);
  580. ret = RT_EOK;
  581. exit_nu_pdma_transfer:
  582. return -(ret);
  583. }
  584. rt_err_t nu_pdma_sg_transfer(int i32ChannID, nu_pdma_desc_t head, uint32_t u32IdleTimeout_us)
  585. {
  586. rt_err_t ret = -RT_EINVAL;
  587. nu_pdma_periph_ctl_t *psPeriphCtl = NULL;
  588. if (!head)
  589. goto exit_nu_pdma_sg_transfer;
  590. else if (!(nu_pdma_chn_mask & (1 << i32ChannID)))
  591. goto exit_nu_pdma_sg_transfer;
  592. else if ((ret = nu_pdma_sgtbls_valid(head)) != RT_EOK) /* Check SG-tbls. */
  593. goto exit_nu_pdma_sg_transfer;
  594. psPeriphCtl = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_spPeripCtl;
  595. _nu_pdma_transfer(i32ChannID, psPeriphCtl->m_u32Peripheral, head, u32IdleTimeout_us);
  596. ret = RT_EOK;
  597. exit_nu_pdma_sg_transfer:
  598. return -(ret);
  599. }
  600. void PDMA_IRQHandler(PDMA_T *PDMA)
  601. {
  602. int i;
  603. uint32_t intsts = PDMA_GET_INT_STATUS(PDMA);
  604. uint32_t abtsts = PDMA_GET_ABORT_STS(PDMA);
  605. uint32_t tdsts = PDMA_GET_TD_STS(PDMA);
  606. uint32_t reqto = intsts & (PDMA_INTSTS_REQTOF0_Msk | PDMA_INTSTS_REQTOF1_Msk);
  607. uint32_t reqto_ch = ((reqto & PDMA_INTSTS_REQTOF0_Msk) ? (1 << 0) : 0x0) | ((reqto & PDMA_INTSTS_REQTOF1_Msk) ? (1 << 1) : 0x0);
  608. int allch_sts = (reqto_ch | tdsts | abtsts);
  609. // Abort
  610. if (intsts & PDMA_INTSTS_ABTIF_Msk)
  611. {
  612. // Clear all Abort flags
  613. PDMA_CLR_ABORT_FLAG(PDMA, abtsts);
  614. }
  615. // Transfer done
  616. if (intsts & PDMA_INTSTS_TDIF_Msk)
  617. {
  618. // Clear all transfer done flags
  619. PDMA_CLR_TD_FLAG(PDMA, tdsts);
  620. }
  621. // Timeout
  622. if (reqto)
  623. {
  624. // Clear all Timeout flags
  625. PDMA->INTSTS = reqto;
  626. }
  627. // Find the position of first '1' in allch_sts.
  628. while ((i = nu_ctz(allch_sts)) != 32)
  629. {
  630. int j = i;
  631. int ch_mask = (1 << i);
  632. if (PDMA == PDMA1)
  633. {
  634. j += PDMA_CH_MAX;
  635. }
  636. if (nu_pdma_chn_mask & (1 << j))
  637. {
  638. int ch_event = 0;
  639. nu_pdma_chn_t *dma_chn = nu_pdma_chn_arr + j - NU_PDMA_CH_Pos;
  640. if (dma_chn->m_pfnCBHandler)
  641. {
  642. if (abtsts & ch_mask)
  643. {
  644. ch_event |= NU_PDMA_EVENT_ABORT;
  645. }
  646. if (tdsts & ch_mask) ch_event |= NU_PDMA_EVENT_TRANSFER_DONE;
  647. if (reqto_ch & ch_mask)
  648. {
  649. PDMA_DisableTimeout(PDMA, ch_mask);
  650. ch_event |= NU_PDMA_EVENT_TIMEOUT;
  651. }
  652. if (dma_chn->m_u32EventFilter & ch_event)
  653. dma_chn->m_pfnCBHandler(dma_chn->m_pvUserData, ch_event);
  654. if (reqto_ch & ch_mask)
  655. nu_pdma_timeout_set(j, nu_pdma_chn_arr[j - NU_PDMA_CH_Pos].m_u32IdleTimeout_us);
  656. }//if(dma_chn->handler)
  657. } //if (nu_pdma_chn_mask & ch_mask)
  658. // Clear the served bit.
  659. allch_sts &= ~ch_mask;
  660. } //while
  661. }
  662. void PDMA0_IRQHandler(void)
  663. {
  664. /* enter interrupt */
  665. rt_interrupt_enter();
  666. PDMA_IRQHandler(PDMA0);
  667. /* leave interrupt */
  668. rt_interrupt_leave();
  669. }
  670. void PDMA1_IRQHandler(void)
  671. {
  672. /* enter interrupt */
  673. rt_interrupt_enter();
  674. PDMA_IRQHandler(PDMA1);
  675. /* leave interrupt */
  676. rt_interrupt_leave();
  677. }
  678. static void nu_pdma_memfun_actor_init(void)
  679. {
  680. int i = 0 ;
  681. nu_pdma_init();
  682. for (i = 0; i < NU_PDMA_MEMFUN_ACTOR_MAX; i++)
  683. {
  684. rt_memset(&nu_pdma_memfun_actor_arr[i], 0, sizeof(struct nu_pdma_memfun_actor));
  685. if (-(RT_ERROR) != (nu_pdma_memfun_actor_arr[i].m_i32ChannID = nu_pdma_channel_allocate(PDMA_MEM)))
  686. {
  687. nu_pdma_memfun_actor_arr[i].m_psSemMemFun = rt_sem_create("memactor_sem", 0, RT_IPC_FLAG_FIFO);
  688. RT_ASSERT(nu_pdma_memfun_actor_arr[i].m_psSemMemFun != RT_NULL);
  689. }
  690. else
  691. break;
  692. }
  693. if (i)
  694. {
  695. nu_pdma_memfun_actor_maxnum = i;
  696. nu_pdma_memfun_actor_mask = ~(((1 << i) - 1));
  697. nu_pdma_memfun_actor_pool_sem = rt_sem_create("mempool_sem", nu_pdma_memfun_actor_maxnum, RT_IPC_FLAG_FIFO);
  698. RT_ASSERT(nu_pdma_memfun_actor_pool_sem != RT_NULL);
  699. nu_pdma_memfun_actor_pool_lock = rt_mutex_create("mempool_lock", RT_IPC_FLAG_PRIO);
  700. RT_ASSERT(nu_pdma_memfun_actor_pool_lock != RT_NULL);
  701. }
  702. }
  703. static void nu_pdma_memfun_cb(void *pvUserData, uint32_t u32Events)
  704. {
  705. rt_err_t result;
  706. nu_pdma_memfun_actor_t psMemFunActor = (nu_pdma_memfun_actor_t)pvUserData;
  707. psMemFunActor->m_u32Result = u32Events;
  708. result = rt_sem_release(psMemFunActor->m_psSemMemFun);
  709. RT_ASSERT(result == RT_EOK);
  710. }
  711. static int nu_pdma_memfun_employ(void)
  712. {
  713. int idx = -1 ;
  714. /* Headhunter */
  715. if (nu_pdma_memfun_actor_pool_sem && (rt_sem_take(nu_pdma_memfun_actor_pool_sem, RT_WAITING_FOREVER) == RT_EOK))
  716. {
  717. rt_err_t result;
  718. result = rt_mutex_take(nu_pdma_memfun_actor_pool_lock, RT_WAITING_FOREVER);
  719. RT_ASSERT(result == RT_EOK);
  720. /* Find the position of first '0' in nu_pdma_memfun_actor_mask. */
  721. idx = nu_cto(nu_pdma_memfun_actor_mask);
  722. if (idx != 32)
  723. {
  724. nu_pdma_memfun_actor_mask |= (1 << idx);
  725. }
  726. else
  727. {
  728. idx = -1;
  729. }
  730. result = rt_mutex_release(nu_pdma_memfun_actor_pool_lock);
  731. RT_ASSERT(result == RT_EOK);
  732. }
  733. return idx;
  734. }
  735. static rt_ssize_t nu_pdma_memfun(void *dest, void *src, uint32_t u32DataWidth, unsigned int u32TransferCnt, nu_pdma_memctrl_t eMemCtl)
  736. {
  737. nu_pdma_memfun_actor_t psMemFunActor = NULL;
  738. int idx;
  739. rt_size_t ret = 0;
  740. rt_uint32_t u32Offset = 0;
  741. rt_uint32_t u32TxCnt = 0;
  742. while (1)
  743. {
  744. rt_err_t result;
  745. /* Employ actor */
  746. if ((idx = nu_pdma_memfun_employ()) < 0)
  747. continue;
  748. psMemFunActor = &nu_pdma_memfun_actor_arr[idx];
  749. do
  750. {
  751. u32TxCnt = (u32TransferCnt > NU_PDMA_MAX_TXCNT) ? NU_PDMA_MAX_TXCNT : u32TransferCnt;
  752. /* Set PDMA memory control to eMemCtl. */
  753. nu_pdma_channel_memctrl_set(psMemFunActor->m_i32ChannID, eMemCtl);
  754. /* Register ISR callback function */
  755. nu_pdma_callback_register(psMemFunActor->m_i32ChannID, nu_pdma_memfun_cb, (void *)psMemFunActor, NU_PDMA_EVENT_ABORT | NU_PDMA_EVENT_TRANSFER_DONE);
  756. psMemFunActor->m_u32Result = 0;
  757. /* Trigger it */
  758. nu_pdma_transfer(psMemFunActor->m_i32ChannID,
  759. u32DataWidth,
  760. (eMemCtl & 0x2ul) ? (uint32_t)src + u32Offset : (uint32_t)src, /* Src address is Inc or not. */
  761. (eMemCtl & 0x1ul) ? (uint32_t)dest + u32Offset : (uint32_t)dest, /* Dst address is Inc or not. */
  762. u32TxCnt,
  763. 0);
  764. /* Wait it done. */
  765. result = rt_sem_take(psMemFunActor->m_psSemMemFun, RT_WAITING_FOREVER);
  766. RT_ASSERT(result == RT_EOK);
  767. /* Give result if get NU_PDMA_EVENT_TRANSFER_DONE.*/
  768. if (psMemFunActor->m_u32Result & NU_PDMA_EVENT_TRANSFER_DONE)
  769. {
  770. ret += u32TxCnt;
  771. }
  772. else
  773. {
  774. ret += (u32TxCnt - nu_pdma_non_transfer_count_get(psMemFunActor->m_i32ChannID));
  775. }
  776. /* Terminate it if get ABORT event */
  777. if (psMemFunActor->m_u32Result & NU_PDMA_EVENT_ABORT)
  778. {
  779. nu_pdma_channel_terminate(psMemFunActor->m_i32ChannID);
  780. break;
  781. }
  782. u32TransferCnt -= u32TxCnt;
  783. u32Offset += u32TxCnt * (u32DataWidth / 8);
  784. }
  785. while (u32TransferCnt > 0);
  786. result = rt_mutex_take(nu_pdma_memfun_actor_pool_lock, RT_WAITING_FOREVER);
  787. RT_ASSERT(result == RT_EOK);
  788. nu_pdma_memfun_actor_mask &= ~(1 << idx);
  789. result = rt_mutex_release(nu_pdma_memfun_actor_pool_lock);
  790. RT_ASSERT(result == RT_EOK);
  791. /* Fire actor */
  792. result = rt_sem_release(nu_pdma_memfun_actor_pool_sem);
  793. RT_ASSERT(result == RT_EOK);
  794. break;
  795. }
  796. return ret;
  797. }
  798. rt_size_t nu_pdma_mempush(void *dest, void *src, uint32_t data_width, unsigned int transfer_count)
  799. {
  800. if (data_width == 8 || data_width == 16 || data_width == 32)
  801. return nu_pdma_memfun(dest, src, data_width, transfer_count, eMemCtl_SrcInc_DstFix);
  802. return 0;
  803. }
  804. void *nu_pdma_memcpy(void *dest, void *src, unsigned int count)
  805. {
  806. int i = 0;
  807. uint32_t u32Offset = 0;
  808. uint32_t u32Remaining = count;
  809. for (i = 4; (i > 0) && (u32Remaining > 0) ; i >>= 1)
  810. {
  811. uint32_t u32src = (uint32_t)src + u32Offset;
  812. uint32_t u32dest = (uint32_t)dest + u32Offset;
  813. if (((u32src % i) == (u32dest % i)) &&
  814. ((u32src % i) == 0) &&
  815. (RT_ALIGN_DOWN(u32Remaining, i) >= i))
  816. {
  817. uint32_t u32TXCnt = u32Remaining / i;
  818. if (u32TXCnt != nu_pdma_memfun((void *)u32dest, (void *)u32src, i * 8, u32TXCnt, eMemCtl_SrcInc_DstInc))
  819. goto exit_nu_pdma_memcpy;
  820. u32Offset += (u32TXCnt * i);
  821. u32Remaining -= (u32TXCnt * i);
  822. }
  823. }
  824. if (count == u32Offset)
  825. return dest;
  826. exit_nu_pdma_memcpy:
  827. return NULL;
  828. }
  829. /**
  830. * PDMA memfun actor initialization
  831. */
  832. int rt_hw_pdma_memfun_init(void)
  833. {
  834. nu_pdma_memfun_actor_init();
  835. return 0;
  836. }
  837. INIT_DEVICE_EXPORT(rt_hw_pdma_memfun_init);
  838. #endif // #if defined(BSP_USING_PDMA)