drv_pdma.c 40 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296
  1. /**************************************************************************//**
  2. *
  3. * @copyright (C) 2020 Nuvoton Technology Corp. All rights reserved.
  4. *
  5. * SPDX-License-Identifier: Apache-2.0
  6. *
  7. * Change Logs:
  8. * Date Author Notes
  9. * 2021-7-15 Wayne First version
  10. *
  11. ******************************************************************************/
  12. #include <rtconfig.h>
  13. #if defined(BSP_USING_PDMA)
  14. #include <rtdevice.h>
  15. #include <rtthread.h>
  16. #include <drv_pdma.h>
  17. #include <nu_bitutil.h>
  18. #include "drv_sys.h"
  19. /* Private define ---------------------------------------------------------------*/
  20. // RT_DEV_NAME_PREFIX pdma
  21. #ifndef NU_PDMA_MEMFUN_ACTOR_MAX
  22. #define NU_PDMA_MEMFUN_ACTOR_MAX (4)
  23. #endif
  24. /* To select the first PDMA base */
  25. #if !defined(USE_MA35D1_SUBM)
  26. #define DEF_PDMA_BASE_START PDMA0_BASE
  27. #else
  28. #define DEF_PDMA_BASE_START PDMA2_BASE
  29. #endif
  30. enum
  31. {
  32. PDMA_START = -1,
  33. #if defined(BSP_USING_PDMA0)
  34. PDMA0_IDX,
  35. #endif
  36. #if defined(BSP_USING_PDMA1)
  37. PDMA1_IDX,
  38. #endif
  39. #if defined(BSP_USING_PDMA2)
  40. PDMA2_IDX,
  41. #endif
  42. #if defined(BSP_USING_PDMA3)
  43. PDMA3_IDX,
  44. #endif
  45. PDMA_CNT
  46. };
  47. #define NU_PDMA_SG_TBL_MAXSIZE (NU_PDMA_SG_LIMITED_DISTANCE/sizeof(DSCT_T))
  48. #define NU_PDMA_CH_MAX (PDMA_CNT*PDMA_CH_MAX) /* Specify maximum channels of PDMA */
  49. #define NU_PDMA_CH_Pos (0) /* Specify first channel number of PDMA */
  50. #define NU_PDMA_CH_Msk (PDMA_CH_Msk << NU_PDMA_CH_Pos)
  51. #define NU_PDMA_GET_BASE(ch) (PDMA_T *)((((ch)/PDMA_CH_MAX)*0x10000UL) + DEF_PDMA_BASE_START)
  52. #define NU_PDMA_GET_MOD_IDX(ch) ((ch)/PDMA_CH_MAX)
  53. #define NU_PDMA_GET_MOD_CHIDX(ch) ((ch)%PDMA_CH_MAX)
  54. /* Private typedef --------------------------------------------------------------*/
  55. struct nu_pdma_periph_ctl
  56. {
  57. uint32_t m_u32Peripheral;
  58. nu_pdma_memctrl_t m_eMemCtl;
  59. };
  60. typedef struct nu_pdma_periph_ctl nu_pdma_periph_ctl_t;
  61. struct nu_pdma_chn
  62. {
  63. struct nu_pdma_chn_cb m_sCB_Event;
  64. struct nu_pdma_chn_cb m_sCB_Trigger;
  65. struct nu_pdma_chn_cb m_sCB_Disable;
  66. nu_pdma_desc_t *m_ppsSgtbl;
  67. uint32_t m_u32WantedSGTblNum;
  68. uint32_t m_u32EventFilter;
  69. uint32_t m_u32IdleTimeout_us;
  70. nu_pdma_periph_ctl_t m_spPeripCtl;
  71. };
  72. typedef struct nu_pdma_chn nu_pdma_chn_t;
  73. struct nu_pdma_memfun_actor
  74. {
  75. int m_i32ChannID;
  76. uint32_t m_u32Result;
  77. rt_sem_t m_psSemMemFun;
  78. } ;
  79. typedef struct nu_pdma_memfun_actor *nu_pdma_memfun_actor_t;
  80. /* Private functions ------------------------------------------------------------*/
  81. static int nu_pdma_peripheral_set(uint32_t u32PeriphType);
  82. static void nu_pdma_init(void);
  83. static void nu_pdma_channel_enable(int i32ChannID);
  84. static void nu_pdma_channel_disable(int i32ChannID);
  85. static void nu_pdma_channel_reset(int i32ChannID);
  86. static rt_err_t nu_pdma_timeout_set(int i32ChannID, int i32Timeout_us);
  87. static void nu_pdma_periph_ctrl_fill(int i32ChannID, int i32CtlPoolIdx);
  88. static rt_size_t nu_pdma_memfun(void *dest, void *src, uint32_t u32DataWidth, unsigned int u32TransferCnt, nu_pdma_memctrl_t eMemCtl);
  89. static void nu_pdma_memfun_cb(void *pvUserData, uint32_t u32Events);
  90. static void nu_pdma_memfun_actor_init(void);
  91. static int nu_pdma_memfun_employ(void);
  92. static int nu_pdma_non_transfer_count_get(int32_t i32ChannID);
  93. /* Public functions -------------------------------------------------------------*/
  94. /* Private variables ------------------------------------------------------------*/
  95. static volatile int nu_pdma_inited = 0;
  96. static volatile uint32_t nu_pdma_chn_mask_arr[PDMA_CNT] = {0};
  97. static nu_pdma_chn_t nu_pdma_chn_arr[NU_PDMA_CH_MAX];
  98. static volatile uint32_t nu_pdma_memfun_actor_mask = 0;
  99. static volatile uint32_t nu_pdma_memfun_actor_maxnum = 0;
  100. static rt_sem_t nu_pdma_memfun_actor_pool_sem = RT_NULL;
  101. static rt_mutex_t nu_pdma_memfun_actor_pool_lock = RT_NULL;
  102. static void nu_pdma_isr(int vector, void *pvdata);
  103. const static struct nu_module nu_pdma_arr[] =
  104. {
  105. #if defined(BSP_USING_PDMA0)
  106. {
  107. .name = "pdma0",
  108. .m_pvBase = (void *)PDMA0,
  109. .u32RstId = PDMA0_RST,
  110. .eIRQn = PDMA0_IRQn
  111. },
  112. #endif
  113. #if defined(BSP_USING_PDMA1)
  114. {
  115. .name = "pdma1",
  116. .m_pvBase = (void *)PDMA1,
  117. .u32RstId = PDMA1_RST,
  118. .eIRQn = PDMA1_IRQn
  119. },
  120. #endif
  121. #if defined(BSP_USING_PDMA2)
  122. {
  123. .name = "pdma2",
  124. .m_pvBase = (void *)PDMA2,
  125. .u32RstId = PDMA2_RST,
  126. .eIRQn = PDMA2_IRQn
  127. },
  128. #endif
  129. #if defined(BSP_USING_PDMA3)
  130. {
  131. .name = "pdma3",
  132. .m_pvBase = (void *)PDMA3,
  133. .u32RstId = PDMA3_RST,
  134. .eIRQn = PDMA3_IRQn
  135. }
  136. #endif
  137. };
  138. static const nu_pdma_periph_ctl_t g_nu_pdma_peripheral_ctl_pool[ ] =
  139. {
  140. // M2M
  141. { PDMA_MEM, eMemCtl_SrcInc_DstInc },
  142. // M2P
  143. { PDMA_UART0_TX, eMemCtl_SrcInc_DstFix },
  144. { PDMA_UART1_TX, eMemCtl_SrcInc_DstFix },
  145. { PDMA_UART2_TX, eMemCtl_SrcInc_DstFix },
  146. { PDMA_UART3_TX, eMemCtl_SrcInc_DstFix },
  147. { PDMA_UART4_TX, eMemCtl_SrcInc_DstFix },
  148. { PDMA_UART5_TX, eMemCtl_SrcInc_DstFix },
  149. { PDMA_UART6_TX, eMemCtl_SrcInc_DstFix },
  150. { PDMA_UART7_TX, eMemCtl_SrcInc_DstFix },
  151. { PDMA_UART8_TX, eMemCtl_SrcInc_DstFix },
  152. { PDMA_UART9_TX, eMemCtl_SrcInc_DstFix },
  153. { PDMA_UART10_TX, eMemCtl_SrcInc_DstFix },
  154. { PDMA_UART11_TX, eMemCtl_SrcInc_DstFix },
  155. { PDMA_UART12_TX, eMemCtl_SrcInc_DstFix },
  156. { PDMA_UART13_TX, eMemCtl_SrcInc_DstFix },
  157. { PDMA_UART14_TX, eMemCtl_SrcInc_DstFix },
  158. { PDMA_UART15_TX, eMemCtl_SrcInc_DstFix },
  159. { PDMA_UART16_TX, eMemCtl_SrcInc_DstFix },
  160. { PDMA_QSPI0_TX, eMemCtl_SrcInc_DstFix },
  161. { PDMA_QSPI1_TX, eMemCtl_SrcInc_DstFix },
  162. { PDMA_SPI0_TX, eMemCtl_SrcInc_DstFix },
  163. { PDMA_SPI1_TX, eMemCtl_SrcInc_DstFix },
  164. { PDMA_SPI2_TX, eMemCtl_SrcInc_DstFix },
  165. { PDMA_SPI3_TX, eMemCtl_SrcInc_DstFix },
  166. { PDMA_I2C0_TX, eMemCtl_SrcInc_DstFix },
  167. { PDMA_I2C1_TX, eMemCtl_SrcInc_DstFix },
  168. { PDMA_I2C2_TX, eMemCtl_SrcInc_DstFix },
  169. { PDMA_I2C3_TX, eMemCtl_SrcInc_DstFix },
  170. { PDMA_I2C4_TX, eMemCtl_SrcInc_DstFix },
  171. { PDMA_I2C5_TX, eMemCtl_SrcInc_DstFix },
  172. { PDMA_I2S0_TX, eMemCtl_SrcInc_DstFix },
  173. { PDMA_I2S1_TX, eMemCtl_SrcInc_DstFix },
  174. // P2M
  175. { PDMA_UART0_RX, eMemCtl_SrcFix_DstInc },
  176. { PDMA_UART1_RX, eMemCtl_SrcFix_DstInc },
  177. { PDMA_UART2_RX, eMemCtl_SrcFix_DstInc },
  178. { PDMA_UART3_RX, eMemCtl_SrcFix_DstInc },
  179. { PDMA_UART4_RX, eMemCtl_SrcFix_DstInc },
  180. { PDMA_UART5_RX, eMemCtl_SrcFix_DstInc },
  181. { PDMA_UART6_RX, eMemCtl_SrcFix_DstInc },
  182. { PDMA_UART7_RX, eMemCtl_SrcFix_DstInc },
  183. { PDMA_UART8_RX, eMemCtl_SrcFix_DstInc },
  184. { PDMA_UART9_RX, eMemCtl_SrcFix_DstInc },
  185. { PDMA_UART10_RX, eMemCtl_SrcFix_DstInc },
  186. { PDMA_UART11_RX, eMemCtl_SrcFix_DstInc },
  187. { PDMA_UART12_RX, eMemCtl_SrcFix_DstInc },
  188. { PDMA_UART13_RX, eMemCtl_SrcFix_DstInc },
  189. { PDMA_UART14_RX, eMemCtl_SrcFix_DstInc },
  190. { PDMA_UART15_RX, eMemCtl_SrcFix_DstInc },
  191. { PDMA_UART16_RX, eMemCtl_SrcFix_DstInc },
  192. { PDMA_QSPI0_RX, eMemCtl_SrcFix_DstInc },
  193. { PDMA_QSPI1_RX, eMemCtl_SrcFix_DstInc },
  194. { PDMA_SPI0_RX, eMemCtl_SrcFix_DstInc },
  195. { PDMA_SPI1_RX, eMemCtl_SrcFix_DstInc },
  196. { PDMA_SPI2_RX, eMemCtl_SrcFix_DstInc },
  197. { PDMA_SPI3_RX, eMemCtl_SrcFix_DstInc },
  198. { PDMA_I2C0_RX, eMemCtl_SrcFix_DstInc },
  199. { PDMA_I2C1_RX, eMemCtl_SrcFix_DstInc },
  200. { PDMA_I2C2_RX, eMemCtl_SrcFix_DstInc },
  201. { PDMA_I2C3_RX, eMemCtl_SrcFix_DstInc },
  202. { PDMA_I2C4_RX, eMemCtl_SrcFix_DstInc },
  203. { PDMA_I2C5_RX, eMemCtl_SrcFix_DstInc },
  204. { PDMA_I2S0_RX, eMemCtl_SrcFix_DstInc },
  205. { PDMA_I2S1_RX, eMemCtl_SrcFix_DstInc },
  206. };
  207. #define NU_PERIPHERAL_SIZE ( sizeof(g_nu_pdma_peripheral_ctl_pool) / sizeof(g_nu_pdma_peripheral_ctl_pool[0]) )
  208. static struct nu_pdma_memfun_actor nu_pdma_memfun_actor_arr[NU_PDMA_MEMFUN_ACTOR_MAX];
  209. static int nu_pdma_check_is_nonallocated(uint32_t u32ChnId)
  210. {
  211. uint32_t mod_idx = NU_PDMA_GET_MOD_IDX(u32ChnId);
  212. RT_ASSERT(mod_idx < PDMA_CNT);
  213. return !(nu_pdma_chn_mask_arr[mod_idx] & (1 << NU_PDMA_GET_MOD_CHIDX(u32ChnId)));
  214. }
  215. static int nu_pdma_peripheral_set(uint32_t u32PeriphType)
  216. {
  217. int idx = 0;
  218. while (idx < NU_PERIPHERAL_SIZE)
  219. {
  220. if (g_nu_pdma_peripheral_ctl_pool[idx].m_u32Peripheral == u32PeriphType)
  221. return idx;
  222. idx++;
  223. }
  224. // Not such peripheral
  225. return -1;
  226. }
  227. static void nu_pdma_periph_ctrl_fill(int i32ChannID, int i32CtlPoolIdx)
  228. {
  229. nu_pdma_chn_t *psPdmaChann = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos];
  230. psPdmaChann->m_spPeripCtl.m_u32Peripheral = g_nu_pdma_peripheral_ctl_pool[i32CtlPoolIdx].m_u32Peripheral;
  231. psPdmaChann->m_spPeripCtl.m_eMemCtl = g_nu_pdma_peripheral_ctl_pool[i32CtlPoolIdx].m_eMemCtl;
  232. }
  233. /**
  234. * Hardware PDMA Initialization
  235. */
  236. static void nu_pdma_init(void)
  237. {
  238. int i;
  239. if (nu_pdma_inited)
  240. return;
  241. rt_memset(nu_pdma_chn_arr, 0x00, NU_PDMA_CH_MAX * sizeof(nu_pdma_chn_t));
  242. for (i = (PDMA_START + 1); i < PDMA_CNT; i++)
  243. {
  244. nu_pdma_chn_mask_arr[i] = ~(NU_PDMA_CH_Msk);
  245. nu_sys_ip_reset(nu_pdma_arr[i].u32RstId);
  246. /* Initialize PDMA setting */
  247. PDMA_Open((PDMA_T *)nu_pdma_arr[i].m_pvBase, PDMA_CH_Msk);
  248. PDMA_Close((PDMA_T *)nu_pdma_arr[i].m_pvBase);
  249. /* Register PDMA ISR */
  250. rt_hw_interrupt_install(nu_pdma_arr[i].eIRQn, nu_pdma_isr, nu_pdma_arr[i].m_pvBase, nu_pdma_arr[i].name);
  251. rt_hw_interrupt_umask(nu_pdma_arr[i].eIRQn);
  252. }
  253. nu_pdma_inited = 1;
  254. }
  255. static inline void nu_pdma_channel_enable(int i32ChannID)
  256. {
  257. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  258. int u32ModChannId = NU_PDMA_GET_MOD_CHIDX(i32ChannID);
  259. /* Clean descriptor table control register. */
  260. PDMA->DSCT[u32ModChannId].CTL = 0UL;
  261. /* Enable the channel */
  262. PDMA->CHCTL |= (1 << u32ModChannId);
  263. }
  264. static inline void nu_pdma_channel_disable(int i32ChannID)
  265. {
  266. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  267. PDMA->CHCTL &= ~(1 << NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  268. }
  269. static inline void nu_pdma_channel_reset(int i32ChannID)
  270. {
  271. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  272. int u32ModChannId = NU_PDMA_GET_MOD_CHIDX(i32ChannID);
  273. PDMA->CHRST = (1 << u32ModChannId);
  274. /* Wait for cleared channel CHCTL. */
  275. while ((PDMA->CHCTL & (1 << u32ModChannId)));
  276. }
  277. static rt_err_t nu_pdma_timeout_set(int i32ChannID, int i32Timeout_us)
  278. {
  279. rt_err_t ret = RT_EINVAL;
  280. PDMA_T *PDMA = NULL;
  281. uint32_t u32ModChannId;
  282. if (nu_pdma_check_is_nonallocated(i32ChannID))
  283. goto exit_nu_pdma_timeout_set;
  284. PDMA = NU_PDMA_GET_BASE(i32ChannID);
  285. u32ModChannId = NU_PDMA_GET_MOD_CHIDX(i32ChannID);
  286. nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_u32IdleTimeout_us = i32Timeout_us;
  287. if (i32Timeout_us)
  288. {
  289. uint32_t u32ToClk_Max = 1000000ul / (CLK_GetSYSCLK1Freq() / (1 << 8));
  290. uint32_t u32Divider = (i32Timeout_us / u32ToClk_Max) / (1 << 16);
  291. uint32_t u32TOutCnt = (i32Timeout_us / u32ToClk_Max) % (1 << 16);
  292. PDMA_DisableTimeout(PDMA, 1 << u32ModChannId);
  293. PDMA_EnableInt(PDMA, u32ModChannId, PDMA_INT_TIMEOUT); // Interrupt type
  294. if (u32Divider > 7)
  295. {
  296. u32Divider = 7;
  297. u32TOutCnt = (1 << 16) - 1;
  298. }
  299. if (u32ModChannId < 8)
  300. PDMA->TOUTPSC = (PDMA->TOUTPSC & ~(0x7ul << (PDMA_TOUTPSC_TOUTPSC1_Pos * u32ModChannId))) | (u32Divider << (PDMA_TOUTPSC_TOUTPSC1_Pos * u32ModChannId));
  301. else
  302. PDMA->TOUTPSC1 = (PDMA->TOUTPSC1 & ~(0x7ul << (PDMA_TOUTPSC_TOUTPSC1_Pos * u32ModChannId))) | (u32Divider << (PDMA_TOUTPSC_TOUTPSC1_Pos * u32ModChannId));
  303. //rt_kprintf("[%d]HCLK=%d, u32Divider=%d, u32TOutCnt=%d\n", i32Timeout_us, CLK_GetSYSCLK1Freq(), u32Divider, u32TOutCnt );
  304. PDMA_SetTimeOut(PDMA, u32ModChannId, 1, u32TOutCnt);
  305. ret = RT_EOK;
  306. }
  307. else
  308. {
  309. PDMA_DisableInt(PDMA, u32ModChannId, PDMA_INT_TIMEOUT); // Interrupt type
  310. PDMA_DisableTimeout(PDMA, 1 << u32ModChannId);
  311. }
  312. exit_nu_pdma_timeout_set:
  313. return -(ret);
  314. }
  315. void nu_pdma_channel_terminate(int i32ChannID)
  316. {
  317. if (nu_pdma_check_is_nonallocated(i32ChannID))
  318. goto exit_pdma_channel_terminate;
  319. /* Disable timeout function of specified channel. */
  320. nu_pdma_timeout_set(i32ChannID, 0);
  321. /* Reset specified channel. */
  322. nu_pdma_channel_reset(i32ChannID);
  323. /* Enable specified channel after reset. */
  324. nu_pdma_channel_enable(i32ChannID);
  325. exit_pdma_channel_terminate:
  326. return;
  327. }
  328. int nu_pdma_channel_allocate(int32_t i32PeripType)
  329. {
  330. int ChnId, i32PeripCtlIdx, j;
  331. nu_pdma_init();
  332. if ((i32PeripCtlIdx = nu_pdma_peripheral_set(i32PeripType)) < 0)
  333. goto exit_nu_pdma_channel_allocate;
  334. for (j = (PDMA_START + 1); j < PDMA_CNT; j++)
  335. {
  336. /* Find the position of first '0' in nu_pdma_chn_mask_arr[j]. */
  337. ChnId = nu_cto(nu_pdma_chn_mask_arr[j]);
  338. if (ChnId < PDMA_CH_MAX)
  339. {
  340. nu_pdma_chn_mask_arr[j] |= (1 << ChnId);
  341. ChnId += (j * PDMA_CH_MAX);
  342. rt_memset(nu_pdma_chn_arr + ChnId - NU_PDMA_CH_Pos, 0x00, sizeof(nu_pdma_chn_t));
  343. /* Set idx number of g_nu_pdma_peripheral_ctl_pool */
  344. nu_pdma_periph_ctrl_fill(ChnId, i32PeripCtlIdx);
  345. /* Reset channel */
  346. nu_pdma_channel_terminate(ChnId);
  347. return ChnId;
  348. }
  349. }
  350. exit_nu_pdma_channel_allocate:
  351. // No channel available
  352. return -(RT_ERROR);
  353. }
  354. rt_err_t nu_pdma_channel_free(int i32ChannID)
  355. {
  356. rt_err_t ret = RT_EINVAL;
  357. if (! nu_pdma_inited)
  358. goto exit_nu_pdma_channel_free;
  359. if (nu_pdma_check_is_nonallocated(i32ChannID))
  360. goto exit_nu_pdma_channel_free;
  361. if ((i32ChannID < NU_PDMA_CH_MAX) && (i32ChannID >= NU_PDMA_CH_Pos))
  362. {
  363. nu_pdma_chn_mask_arr[NU_PDMA_GET_MOD_IDX(i32ChannID)] &= ~(1 << NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  364. nu_pdma_channel_disable(i32ChannID);
  365. ret = RT_EOK;
  366. }
  367. exit_nu_pdma_channel_free:
  368. return -(ret);
  369. }
  370. rt_err_t nu_pdma_filtering_set(int i32ChannID, uint32_t u32EventFilter)
  371. {
  372. rt_err_t ret = RT_EINVAL;
  373. if (nu_pdma_check_is_nonallocated(i32ChannID))
  374. goto exit_nu_pdma_filtering_set;
  375. nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_u32EventFilter = u32EventFilter;
  376. ret = RT_EOK;
  377. exit_nu_pdma_filtering_set:
  378. return -(ret) ;
  379. }
  380. uint32_t nu_pdma_filtering_get(int i32ChannID)
  381. {
  382. if (nu_pdma_check_is_nonallocated(i32ChannID))
  383. goto exit_nu_pdma_filtering_get;
  384. return nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_u32EventFilter;
  385. exit_nu_pdma_filtering_get:
  386. return 0;
  387. }
  388. rt_err_t nu_pdma_callback_register(int i32ChannID, nu_pdma_chn_cb_t psChnCb)
  389. {
  390. rt_err_t ret = RT_EINVAL;
  391. nu_pdma_chn_cb_t psChnCb_Current = RT_NULL;
  392. RT_ASSERT(psChnCb != RT_NULL);
  393. if (nu_pdma_check_is_nonallocated(i32ChannID))
  394. goto exit_nu_pdma_callback_register;
  395. switch (psChnCb->m_eCBType)
  396. {
  397. case eCBType_Event:
  398. psChnCb_Current = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_sCB_Event;
  399. break;
  400. case eCBType_Trigger:
  401. psChnCb_Current = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_sCB_Trigger;
  402. break;
  403. case eCBType_Disable:
  404. psChnCb_Current = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_sCB_Disable;
  405. break;
  406. default:
  407. goto exit_nu_pdma_callback_register;
  408. }
  409. psChnCb_Current->m_pfnCBHandler = psChnCb->m_pfnCBHandler;
  410. psChnCb_Current->m_pvUserData = psChnCb->m_pvUserData;
  411. ret = RT_EOK;
  412. exit_nu_pdma_callback_register:
  413. return -(ret) ;
  414. }
  415. nu_pdma_cb_handler_t nu_pdma_callback_hijack(int i32ChannID, nu_pdma_cbtype_t eCBType, nu_pdma_chn_cb_t psChnCb_Hijack)
  416. {
  417. nu_pdma_chn_cb_t psChnCb_Current = RT_NULL;
  418. struct nu_pdma_chn_cb sChnCB_Tmp;
  419. RT_ASSERT(psChnCb_Hijack != NULL);
  420. sChnCB_Tmp.m_pfnCBHandler = RT_NULL;
  421. if (nu_pdma_check_is_nonallocated(i32ChannID))
  422. goto exit_nu_pdma_callback_hijack;
  423. switch (eCBType)
  424. {
  425. case eCBType_Event:
  426. psChnCb_Current = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_sCB_Event;
  427. break;
  428. case eCBType_Trigger:
  429. psChnCb_Current = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_sCB_Trigger;
  430. break;
  431. case eCBType_Disable:
  432. psChnCb_Current = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_sCB_Disable;
  433. break;
  434. default:
  435. goto exit_nu_pdma_callback_hijack;
  436. }
  437. /* Backup */
  438. sChnCB_Tmp.m_pfnCBHandler = psChnCb_Current->m_pfnCBHandler;
  439. sChnCB_Tmp.m_pvUserData = psChnCb_Current->m_pvUserData;
  440. /* Update */
  441. psChnCb_Current->m_pfnCBHandler = psChnCb_Hijack->m_pfnCBHandler;
  442. psChnCb_Current->m_pvUserData = psChnCb_Hijack->m_pvUserData;
  443. /* Restore */
  444. psChnCb_Hijack->m_pfnCBHandler = sChnCB_Tmp.m_pfnCBHandler;
  445. psChnCb_Hijack->m_pvUserData = sChnCB_Tmp.m_pvUserData;
  446. exit_nu_pdma_callback_hijack:
  447. return sChnCB_Tmp.m_pfnCBHandler;
  448. }
  449. static int nu_pdma_non_transfer_count_get(int32_t i32ChannID)
  450. {
  451. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  452. return ((PDMA->DSCT[NU_PDMA_GET_MOD_CHIDX(i32ChannID)].CTL & PDMA_DSCT_CTL_TXCNT_Msk) >> PDMA_DSCT_CTL_TXCNT_Pos) + 1;
  453. }
  454. int nu_pdma_transferred_byte_get(int32_t i32ChannID, int32_t i32TriggerByteLen)
  455. {
  456. int i32BitWidth = 0;
  457. int cur_txcnt = 0;
  458. PDMA_T *PDMA;
  459. if (nu_pdma_check_is_nonallocated(i32ChannID))
  460. goto exit_nu_pdma_transferred_byte_get;
  461. PDMA = NU_PDMA_GET_BASE(i32ChannID);
  462. if ((PDMA->DSCT[NU_PDMA_GET_MOD_CHIDX(i32ChannID)].CTL & PDMA_DSCT_CTL_OPMODE_Msk) != PDMA_OP_SCATTER)
  463. {
  464. i32BitWidth = PDMA->DSCT[NU_PDMA_GET_MOD_CHIDX(i32ChannID)].CTL & PDMA_DSCT_CTL_TXWIDTH_Msk;
  465. i32BitWidth = (i32BitWidth == PDMA_WIDTH_8) ? 1 : (i32BitWidth == PDMA_WIDTH_16) ? 2 : (i32BitWidth == PDMA_WIDTH_32) ? 4 : 0;
  466. cur_txcnt = nu_pdma_non_transfer_count_get(i32ChannID);
  467. // rt_kprintf("\n[%s] %d %d %02x\n", __func__, i32ChannID, cur_txcnt, (PDMA->DSCT[NU_PDMA_GET_MOD_CHIDX(i32ChannID)].CTL & PDMA_DSCT_CTL_OPMODE_Msk) );
  468. return (i32TriggerByteLen - (cur_txcnt) * i32BitWidth);
  469. }
  470. // rt_kprintf("\n@@@@ %d %02x @@@@\n", i32ChannID, PDMA->DSCT[NU_PDMA_GET_MOD_CHIDX(i32ChannID)].CTL & PDMA_DSCT_CTL_OPMODE_Msk);
  471. return 0;
  472. exit_nu_pdma_transferred_byte_get:
  473. return -1;
  474. }
  475. nu_pdma_desc_t nu_pdma_get_channel_desc(int32_t i32ChannID)
  476. {
  477. PDMA_T *PDMA;
  478. if (nu_pdma_check_is_nonallocated(i32ChannID))
  479. goto exit_nu_pdma_get_srcaddr;
  480. PDMA = NU_PDMA_GET_BASE(i32ChannID);
  481. return &PDMA->DSCT[NU_PDMA_GET_MOD_CHIDX(i32ChannID)];
  482. exit_nu_pdma_get_srcaddr:
  483. return RT_NULL;
  484. }
  485. nu_pdma_memctrl_t nu_pdma_channel_memctrl_get(int i32ChannID)
  486. {
  487. nu_pdma_memctrl_t eMemCtrl = eMemCtl_Undefined;
  488. if (nu_pdma_check_is_nonallocated(i32ChannID))
  489. goto exit_nu_pdma_channel_memctrl_get;
  490. eMemCtrl = nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_spPeripCtl.m_eMemCtl;
  491. exit_nu_pdma_channel_memctrl_get:
  492. return eMemCtrl;
  493. }
  494. rt_err_t nu_pdma_channel_memctrl_set(int i32ChannID, nu_pdma_memctrl_t eMemCtrl)
  495. {
  496. rt_err_t ret = RT_EINVAL;
  497. nu_pdma_chn_t *psPdmaChann = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos];
  498. if (nu_pdma_check_is_nonallocated(i32ChannID))
  499. goto exit_nu_pdma_channel_memctrl_set;
  500. else if ((eMemCtrl < eMemCtl_SrcFix_DstFix) || (eMemCtrl > eMemCtl_SrcInc_DstInc))
  501. goto exit_nu_pdma_channel_memctrl_set;
  502. /* PDMA_MEM/SAR_FIX/BURST mode is not supported. */
  503. if ((psPdmaChann->m_spPeripCtl.m_u32Peripheral == PDMA_MEM) &&
  504. ((eMemCtrl == eMemCtl_SrcFix_DstInc) || (eMemCtrl == eMemCtl_SrcFix_DstFix)))
  505. goto exit_nu_pdma_channel_memctrl_set;
  506. nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_spPeripCtl.m_eMemCtl = eMemCtrl;
  507. ret = RT_EOK;
  508. exit_nu_pdma_channel_memctrl_set:
  509. return -(ret);
  510. }
  511. static void nu_pdma_channel_memctrl_fill(nu_pdma_memctrl_t eMemCtl, uint32_t *pu32SrcCtl, uint32_t *pu32DstCtl)
  512. {
  513. switch ((int)eMemCtl)
  514. {
  515. case eMemCtl_SrcFix_DstFix:
  516. *pu32SrcCtl = PDMA_SAR_FIX;
  517. *pu32DstCtl = PDMA_DAR_FIX;
  518. break;
  519. case eMemCtl_SrcFix_DstInc:
  520. *pu32SrcCtl = PDMA_SAR_FIX;
  521. *pu32DstCtl = PDMA_DAR_INC;
  522. break;
  523. case eMemCtl_SrcInc_DstFix:
  524. *pu32SrcCtl = PDMA_SAR_INC;
  525. *pu32DstCtl = PDMA_DAR_FIX;
  526. break;
  527. case eMemCtl_SrcInc_DstInc:
  528. *pu32SrcCtl = PDMA_SAR_INC;
  529. *pu32DstCtl = PDMA_DAR_INC;
  530. break;
  531. default:
  532. break;
  533. }
  534. }
  535. /* This is for Scatter-gather DMA. */
  536. rt_err_t nu_pdma_desc_setup(int i32ChannID, nu_pdma_desc_t dma_desc, uint32_t u32DataWidth, uint32_t u32AddrSrc,
  537. uint32_t u32AddrDst, int32_t i32TransferCnt, nu_pdma_desc_t next, uint32_t u32BeSilent)
  538. {
  539. nu_pdma_periph_ctl_t *psPeriphCtl = NULL;
  540. uint32_t u32SrcCtl = 0;
  541. uint32_t u32DstCtl = 0;
  542. rt_err_t ret = RT_EINVAL;
  543. if (!dma_desc)
  544. goto exit_nu_pdma_desc_setup;
  545. else if (nu_pdma_check_is_nonallocated(i32ChannID))
  546. goto exit_nu_pdma_desc_setup;
  547. else if (!(u32DataWidth == 8 || u32DataWidth == 16 || u32DataWidth == 32))
  548. goto exit_nu_pdma_desc_setup;
  549. else if ((u32AddrSrc % (u32DataWidth / 8)) || (u32AddrDst % (u32DataWidth / 8)))
  550. goto exit_nu_pdma_desc_setup;
  551. else if (i32TransferCnt > NU_PDMA_MAX_TXCNT)
  552. goto exit_nu_pdma_desc_setup;
  553. psPeriphCtl = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_spPeripCtl;
  554. nu_pdma_channel_memctrl_fill(psPeriphCtl->m_eMemCtl, &u32SrcCtl, &u32DstCtl);
  555. dma_desc->CTL = ((i32TransferCnt - 1) << PDMA_DSCT_CTL_TXCNT_Pos) |
  556. ((u32DataWidth == 8) ? PDMA_WIDTH_8 : (u32DataWidth == 16) ? PDMA_WIDTH_16 : PDMA_WIDTH_32) |
  557. u32SrcCtl |
  558. u32DstCtl |
  559. PDMA_OP_BASIC;
  560. dma_desc->SA = u32AddrSrc;
  561. dma_desc->DA = u32AddrDst;
  562. dma_desc->NEXT = 0; /* Terminating node by default. */
  563. if (psPeriphCtl->m_u32Peripheral == PDMA_MEM)
  564. {
  565. /* For M2M transfer */
  566. dma_desc->CTL |= (PDMA_REQ_BURST | PDMA_BURST_32);
  567. }
  568. else
  569. {
  570. /* For P2M and M2P transfer */
  571. dma_desc->CTL |= (PDMA_REQ_SINGLE);
  572. }
  573. if (next)
  574. {
  575. /* Link to Next and modify to scatter-gather DMA mode. */
  576. dma_desc->CTL = (dma_desc->CTL & ~PDMA_DSCT_CTL_OPMODE_Msk) | PDMA_OP_SCATTER;
  577. dma_desc->NEXT = (uint32_t)next;
  578. }
  579. /* Be silent */
  580. if (u32BeSilent)
  581. dma_desc->CTL |= PDMA_DSCT_CTL_TBINTDIS_Msk;
  582. ret = RT_EOK;
  583. exit_nu_pdma_desc_setup:
  584. return -(ret);
  585. }
  586. rt_err_t nu_pdma_sgtbls_allocate(nu_pdma_desc_t *ppsSgtbls, int num)
  587. {
  588. int i;
  589. RT_ASSERT(ppsSgtbls != NULL);
  590. RT_ASSERT(num > 0);
  591. for (i = 0; i < num; i++)
  592. {
  593. ppsSgtbls[i] = (nu_pdma_desc_t) rt_malloc_align(RT_ALIGN(sizeof(DSCT_T), 64), 64);
  594. RT_ASSERT(ppsSgtbls[i] != RT_NULL);
  595. rt_memset((void *)ppsSgtbls[i], 0, RT_ALIGN(sizeof(DSCT_T), 64));
  596. }
  597. return RT_EOK;
  598. }
  599. void nu_pdma_sgtbls_free(nu_pdma_desc_t *ppsSgtbls, int num)
  600. {
  601. int i;
  602. RT_ASSERT(ppsSgtbls != NULL);
  603. RT_ASSERT(num > 0);
  604. for (i = 0; i < num; i++)
  605. {
  606. rt_free_align(ppsSgtbls[i]);
  607. }
  608. }
  609. static void _nu_pdma_transfer(int i32ChannID, uint32_t u32Peripheral, nu_pdma_desc_t head, uint32_t u32IdleTimeout_us)
  610. {
  611. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  612. nu_pdma_chn_t *psPdmaChann = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos];
  613. #if !defined(USE_MA35D1_SUBM)
  614. /* Writeback data in dcache to memory before transferring. */
  615. {
  616. static uint32_t bNonCacheAlignedWarning = 1;
  617. nu_pdma_desc_t next = head;
  618. int CACHE_LINE_SIZE = nu_cpu_dcache_line_size();
  619. while (next != RT_NULL)
  620. {
  621. uint32_t u32TxCnt = ((next->CTL & PDMA_DSCT_CTL_TXCNT_Msk) >> PDMA_DSCT_CTL_TXCNT_Pos) + 1;
  622. uint32_t u32DataWidth = (1 << ((next->CTL & PDMA_DSCT_CTL_TXWIDTH_Msk) >> PDMA_DSCT_CTL_TXWIDTH_Pos));
  623. uint32_t u32SrcCtl = (next->CTL & PDMA_DSCT_CTL_SAINC_Msk);
  624. uint32_t u32DstCtl = (next->CTL & PDMA_DSCT_CTL_DAINC_Msk);
  625. uint32_t u32FlushLen = u32TxCnt * u32DataWidth;
  626. #if 0
  627. rt_kprintf("[%s] i32ChannID=%d\n", __func__, i32ChannID);
  628. rt_kprintf("[%s] PDMA=0x%08x\n", __func__, (uint32_t)PDMA);
  629. rt_kprintf("[%s] u32TxCnt=%d\n", __func__, u32TxCnt);
  630. rt_kprintf("[%s] u32DataWidth=%d\n", __func__, u32DataWidth);
  631. rt_kprintf("[%s] u32SrcCtl=0x%08x\n", __func__, u32SrcCtl);
  632. rt_kprintf("[%s] u32DstCtl=0x%08x\n", __func__, u32DstCtl);
  633. rt_kprintf("[%s] u32FlushLen=%d\n", __func__, u32FlushLen);
  634. rt_kprintf("[%s] DA=%08x\n", __func__, next->DA);
  635. rt_kprintf("[%s] SA=%08x\n", __func__, next->SA);
  636. #endif
  637. /* Flush Src buffer into memory. */
  638. if ((u32SrcCtl == PDMA_SAR_INC)) // for M2P, M2M
  639. rt_hw_cpu_dcache_clean_and_invalidate((void *)next->SA, u32FlushLen);
  640. /* Flush Dst buffer into memory. */
  641. if ((u32DstCtl == PDMA_DAR_INC)) // for P2M, M2M
  642. rt_hw_cpu_dcache_clean_and_invalidate((void *)next->DA, u32FlushLen);
  643. /* Flush descriptor into memory */
  644. rt_hw_cpu_dcache_clean_and_invalidate((void *)next, sizeof(DSCT_T));
  645. if (bNonCacheAlignedWarning)
  646. {
  647. if ((u32FlushLen & (CACHE_LINE_SIZE - 1)) ||
  648. (next->SA & (CACHE_LINE_SIZE - 1)) ||
  649. (next->DA & (CACHE_LINE_SIZE - 1)) ||
  650. ((rt_uint32_t)next & (CACHE_LINE_SIZE - 1)))
  651. {
  652. /*
  653. Race-condition avoidance between DMA-transferring and DCache write-back:
  654. Source, destination, DMA descriptor address and length should be aligned at len(CACHE_LINE_SIZE)
  655. */
  656. bNonCacheAlignedWarning = 0;
  657. //rt_kprintf("[PDMA-W]\n");
  658. }
  659. }
  660. next = (nu_pdma_desc_t)next->NEXT;
  661. if (next == head) break;
  662. }
  663. }
  664. #endif
  665. nu_pdma_desc_t psDesc = nu_pdma_get_channel_desc(i32ChannID);
  666. PDMA_DisableTimeout(PDMA, 1 << NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  667. /* Set scatter-gather mode and head */
  668. /* Take care the head structure, you should make sure cache-coherence. */
  669. PDMA_SetTransferMode(PDMA,
  670. NU_PDMA_GET_MOD_CHIDX(i32ChannID),
  671. u32Peripheral,
  672. (head->NEXT != 0) ? 1 : 0,
  673. (uint32_t)head);
  674. /* PDMA fetchs description on-demand if SG enabled. We check it valid in here. */
  675. if ( (u32Peripheral != PDMA_MEM) &&
  676. (head->NEXT != 0) &&
  677. (head->DA != psDesc->DA) )
  678. {
  679. RT_ASSERT(0);
  680. }
  681. PDMA_EnableInt(PDMA, NU_PDMA_GET_MOD_CHIDX(i32ChannID), PDMA_INT_TRANS_DONE);
  682. nu_pdma_timeout_set(i32ChannID, u32IdleTimeout_us);
  683. /* If peripheral is M2M, trigger it. */
  684. if (u32Peripheral == PDMA_MEM)
  685. {
  686. PDMA_Trigger(PDMA, NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  687. }
  688. else if (psPdmaChann->m_sCB_Trigger.m_pfnCBHandler)
  689. {
  690. psPdmaChann->m_sCB_Trigger.m_pfnCBHandler(psPdmaChann->m_sCB_Trigger.m_pvUserData, psPdmaChann->m_sCB_Trigger.m_u32Reserved);
  691. }
  692. }
  693. static void _nu_pdma_free_sgtbls(nu_pdma_chn_t *psPdmaChann)
  694. {
  695. if (psPdmaChann->m_ppsSgtbl)
  696. {
  697. nu_pdma_sgtbls_free(psPdmaChann->m_ppsSgtbl, psPdmaChann->m_u32WantedSGTblNum);
  698. rt_free_align((void *)psPdmaChann->m_ppsSgtbl);
  699. psPdmaChann->m_ppsSgtbl = RT_NULL;
  700. psPdmaChann->m_u32WantedSGTblNum = 0;
  701. }
  702. }
  703. static rt_err_t _nu_pdma_transfer_chain(int i32ChannID, uint32_t u32DataWidth, uint32_t u32AddrSrc, uint32_t u32AddrDst, uint32_t u32TransferCnt, uint32_t u32IdleTimeout_us)
  704. {
  705. int i = 0;
  706. rt_err_t ret = RT_ERROR;
  707. nu_pdma_periph_ctl_t *psPeriphCtl = NULL;
  708. nu_pdma_chn_t *psPdmaChann = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos];
  709. nu_pdma_memctrl_t eMemCtl = nu_pdma_channel_memctrl_get(i32ChannID);
  710. rt_uint32_t u32Offset = 0;
  711. rt_uint32_t u32TxCnt = 0;
  712. psPeriphCtl = &psPdmaChann->m_spPeripCtl;
  713. if (psPdmaChann->m_u32WantedSGTblNum != (u32TransferCnt / NU_PDMA_MAX_TXCNT + 1))
  714. {
  715. if (psPdmaChann->m_u32WantedSGTblNum > 0)
  716. _nu_pdma_free_sgtbls(psPdmaChann);
  717. psPdmaChann->m_u32WantedSGTblNum = u32TransferCnt / NU_PDMA_MAX_TXCNT + 1;
  718. psPdmaChann->m_ppsSgtbl = (nu_pdma_desc_t *)rt_malloc_align(sizeof(nu_pdma_desc_t) * psPdmaChann->m_u32WantedSGTblNum, 4);
  719. if (!psPdmaChann->m_ppsSgtbl)
  720. goto exit__nu_pdma_transfer_chain;
  721. ret = nu_pdma_sgtbls_allocate(psPdmaChann->m_ppsSgtbl, psPdmaChann->m_u32WantedSGTblNum);
  722. if (ret != RT_EOK)
  723. goto exit__nu_pdma_transfer_chain;
  724. }
  725. for (i = 0; i < psPdmaChann->m_u32WantedSGTblNum; i++)
  726. {
  727. u32TxCnt = (u32TransferCnt > NU_PDMA_MAX_TXCNT) ? NU_PDMA_MAX_TXCNT : u32TransferCnt;
  728. ret = nu_pdma_desc_setup(i32ChannID,
  729. psPdmaChann->m_ppsSgtbl[i],
  730. u32DataWidth,
  731. (eMemCtl & 0x2ul) ? u32AddrSrc + u32Offset : u32AddrSrc, /* Src address is Inc or not. */
  732. (eMemCtl & 0x1ul) ? u32AddrDst + u32Offset : u32AddrDst, /* Dst address is Inc or not. */
  733. u32TxCnt,
  734. ((i + 1) == psPdmaChann->m_u32WantedSGTblNum) ? RT_NULL : psPdmaChann->m_ppsSgtbl[i + 1],
  735. ((i + 1) == psPdmaChann->m_u32WantedSGTblNum) ? 0 : 1); // Silent, w/o TD interrupt
  736. if (ret != RT_EOK)
  737. goto exit__nu_pdma_transfer_chain;
  738. u32TransferCnt -= u32TxCnt;
  739. u32Offset += (u32TxCnt * u32DataWidth / 8);
  740. }
  741. _nu_pdma_transfer(i32ChannID, psPeriphCtl->m_u32Peripheral, psPdmaChann->m_ppsSgtbl[0], u32IdleTimeout_us);
  742. ret = RT_EOK;
  743. return ret;
  744. exit__nu_pdma_transfer_chain:
  745. _nu_pdma_free_sgtbls(psPdmaChann);
  746. return -(ret);
  747. }
  748. rt_err_t nu_pdma_transfer(int i32ChannID, uint32_t u32DataWidth, uint32_t u32AddrSrc, uint32_t u32AddrDst, uint32_t u32TransferCnt, uint32_t u32IdleTimeout_us)
  749. {
  750. rt_err_t ret = RT_EINVAL;
  751. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  752. nu_pdma_desc_t head;
  753. nu_pdma_chn_t *psPdmaChann;
  754. nu_pdma_periph_ctl_t *psPeriphCtl = NULL;
  755. if (nu_pdma_check_is_nonallocated(i32ChannID))
  756. goto exit_nu_pdma_transfer;
  757. else if (!u32TransferCnt)
  758. goto exit_nu_pdma_transfer;
  759. else if (u32TransferCnt > NU_PDMA_MAX_TXCNT)
  760. return _nu_pdma_transfer_chain(i32ChannID, u32DataWidth, u32AddrSrc, u32AddrDst, u32TransferCnt, u32IdleTimeout_us);
  761. psPdmaChann = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos];
  762. psPeriphCtl = &psPdmaChann->m_spPeripCtl;
  763. head = &PDMA->DSCT[NU_PDMA_GET_MOD_CHIDX(i32ChannID)];
  764. ret = nu_pdma_desc_setup(i32ChannID,
  765. head,
  766. u32DataWidth,
  767. u32AddrSrc,
  768. u32AddrDst,
  769. u32TransferCnt,
  770. RT_NULL,
  771. 0);
  772. if (ret != RT_EOK)
  773. goto exit_nu_pdma_transfer;
  774. _nu_pdma_transfer(i32ChannID, psPeriphCtl->m_u32Peripheral, head, u32IdleTimeout_us);
  775. ret = RT_EOK;
  776. exit_nu_pdma_transfer:
  777. return -(ret);
  778. }
  779. rt_err_t nu_pdma_sg_transfer(int i32ChannID, nu_pdma_desc_t head, uint32_t u32IdleTimeout_us)
  780. {
  781. rt_err_t ret = RT_EINVAL;
  782. nu_pdma_periph_ctl_t *psPeriphCtl = NULL;
  783. if (!head)
  784. goto exit_nu_pdma_sg_transfer;
  785. else if (nu_pdma_check_is_nonallocated(i32ChannID))
  786. goto exit_nu_pdma_sg_transfer;
  787. psPeriphCtl = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_spPeripCtl;
  788. _nu_pdma_transfer(i32ChannID, psPeriphCtl->m_u32Peripheral, head, u32IdleTimeout_us);
  789. ret = RT_EOK;
  790. exit_nu_pdma_sg_transfer:
  791. return -(ret);
  792. }
  793. static void nu_pdma_isr(int vector, void *pvdata)
  794. {
  795. int i;
  796. PDMA_T *PDMA = (void *)pvdata;
  797. uint32_t intsts = PDMA_GET_INT_STATUS(PDMA);
  798. uint32_t abtsts = PDMA_GET_ABORT_STS(PDMA);
  799. uint32_t tdsts = PDMA_GET_TD_STS(PDMA);
  800. uint32_t unalignsts = PDMA_GET_ALIGN_STS(PDMA);
  801. uint32_t reqto = intsts & PDMA_INTSTS_REQTOFn_Msk;
  802. uint32_t reqto_ch = (reqto >> PDMA_INTSTS_REQTOFn_Pos);
  803. int allch_sts = (reqto_ch | tdsts | abtsts | unalignsts);
  804. // Abort
  805. if (intsts & PDMA_INTSTS_ABTIF_Msk)
  806. {
  807. // Clear all Abort flags
  808. PDMA_CLR_ABORT_FLAG(PDMA, abtsts);
  809. }
  810. // Transfer done
  811. if (intsts & PDMA_INTSTS_TDIF_Msk)
  812. {
  813. // Clear all transfer done flags
  814. PDMA_CLR_TD_FLAG(PDMA, tdsts);
  815. }
  816. // Unaligned
  817. if (intsts & PDMA_INTSTS_ALIGNF_Msk)
  818. {
  819. // Clear all Unaligned flags
  820. PDMA_CLR_ALIGN_FLAG(PDMA, unalignsts);
  821. }
  822. // Timeout
  823. if (reqto)
  824. {
  825. // Clear all Timeout flags
  826. PDMA->INTSTS = reqto;
  827. }
  828. // Find the position of first '1' in allch_sts.
  829. while ((i = nu_ctz(allch_sts)) < PDMA_CH_MAX)
  830. {
  831. int module_id = ((uint32_t)PDMA - DEF_PDMA_BASE_START) / 0x10000UL;
  832. int j = i + (module_id * PDMA_CH_MAX);
  833. int ch_mask = (1 << i);
  834. if (nu_pdma_chn_mask_arr[module_id] & ch_mask)
  835. {
  836. int ch_event = 0;
  837. nu_pdma_chn_t *dma_chn = nu_pdma_chn_arr + j - NU_PDMA_CH_Pos;
  838. if (dma_chn->m_sCB_Event.m_pfnCBHandler)
  839. {
  840. if (abtsts & ch_mask)
  841. {
  842. ch_event |= NU_PDMA_EVENT_ABORT;
  843. }
  844. if (tdsts & ch_mask)
  845. {
  846. ch_event |= NU_PDMA_EVENT_TRANSFER_DONE;
  847. }
  848. if (unalignsts & ch_mask)
  849. {
  850. ch_event |= NU_PDMA_EVENT_ALIGNMENT;
  851. }
  852. if (reqto_ch & ch_mask)
  853. {
  854. PDMA_DisableTimeout(PDMA, ch_mask);
  855. ch_event |= NU_PDMA_EVENT_TIMEOUT;
  856. }
  857. if (dma_chn->m_sCB_Disable.m_pfnCBHandler)
  858. dma_chn->m_sCB_Disable.m_pfnCBHandler(dma_chn->m_sCB_Disable.m_pvUserData, dma_chn->m_sCB_Disable.m_u32Reserved);
  859. if ((dma_chn->m_u32EventFilter & ch_event) && dma_chn->m_sCB_Event.m_pfnCBHandler)
  860. dma_chn->m_sCB_Event.m_pfnCBHandler(dma_chn->m_sCB_Event.m_pvUserData, ch_event);
  861. if (reqto_ch & ch_mask)
  862. nu_pdma_timeout_set(j, nu_pdma_chn_arr[j - NU_PDMA_CH_Pos].m_u32IdleTimeout_us);
  863. }//if(dma_chn->handler)
  864. } //if (nu_pdma_chn_mask & ch_mask)
  865. // Clear the served bit.
  866. allch_sts &= ~ch_mask;
  867. } //while
  868. }
  869. static void nu_pdma_memfun_actor_init(void)
  870. {
  871. int i = 0 ;
  872. nu_pdma_init();
  873. for (i = 0; i < NU_PDMA_MEMFUN_ACTOR_MAX; i++)
  874. {
  875. rt_memset(&nu_pdma_memfun_actor_arr[i], 0, sizeof(struct nu_pdma_memfun_actor));
  876. if (-(RT_ERROR) != (nu_pdma_memfun_actor_arr[i].m_i32ChannID = nu_pdma_channel_allocate(PDMA_MEM)))
  877. {
  878. nu_pdma_memfun_actor_arr[i].m_psSemMemFun = rt_sem_create("memactor_sem", 0, RT_IPC_FLAG_FIFO);
  879. RT_ASSERT(nu_pdma_memfun_actor_arr[i].m_psSemMemFun != RT_NULL);
  880. }
  881. else
  882. break;
  883. }
  884. if (i)
  885. {
  886. nu_pdma_memfun_actor_maxnum = i;
  887. nu_pdma_memfun_actor_mask = ~(((1 << i) - 1));
  888. nu_pdma_memfun_actor_pool_sem = rt_sem_create("mempool_sem", nu_pdma_memfun_actor_maxnum, RT_IPC_FLAG_FIFO);
  889. RT_ASSERT(nu_pdma_memfun_actor_pool_sem != RT_NULL);
  890. nu_pdma_memfun_actor_pool_lock = rt_mutex_create("mempool_lock", RT_IPC_FLAG_PRIO);
  891. RT_ASSERT(nu_pdma_memfun_actor_pool_lock != RT_NULL);
  892. }
  893. }
  894. static void nu_pdma_memfun_cb(void *pvUserData, uint32_t u32Events)
  895. {
  896. rt_err_t result = RT_EOK;
  897. nu_pdma_memfun_actor_t psMemFunActor = (nu_pdma_memfun_actor_t)pvUserData;
  898. psMemFunActor->m_u32Result = u32Events;
  899. result = rt_sem_release(psMemFunActor->m_psSemMemFun);
  900. RT_ASSERT(result == RT_EOK);
  901. }
  902. static int nu_pdma_memfun_employ(void)
  903. {
  904. int idx = -1 ;
  905. rt_err_t result = RT_EOK;
  906. /* Headhunter */
  907. if (nu_pdma_memfun_actor_pool_sem &&
  908. ((result = rt_sem_take(nu_pdma_memfun_actor_pool_sem, RT_WAITING_FOREVER)) == RT_EOK))
  909. {
  910. RT_ASSERT(result == RT_EOK);
  911. result = rt_mutex_take(nu_pdma_memfun_actor_pool_lock, RT_WAITING_FOREVER);
  912. RT_ASSERT(result == RT_EOK);
  913. /* Find the position of first '0' in nu_pdma_memfun_actor_mask. */
  914. idx = nu_cto(nu_pdma_memfun_actor_mask);
  915. if (idx != 32)
  916. {
  917. nu_pdma_memfun_actor_mask |= (1 << idx);
  918. }
  919. else
  920. {
  921. idx = -1;
  922. }
  923. result = rt_mutex_release(nu_pdma_memfun_actor_pool_lock);
  924. RT_ASSERT(result == RT_EOK);
  925. }
  926. return idx;
  927. }
  928. static rt_size_t nu_pdma_memfun(void *dest, void *src, uint32_t u32DataWidth, unsigned int u32TransferCnt, nu_pdma_memctrl_t eMemCtl)
  929. {
  930. nu_pdma_memfun_actor_t psMemFunActor = NULL;
  931. struct nu_pdma_chn_cb sChnCB;
  932. rt_err_t result = RT_ERROR;
  933. int idx;
  934. rt_size_t ret = 0;
  935. /* Employ actor */
  936. while ((idx = nu_pdma_memfun_employ()) < 0);
  937. psMemFunActor = &nu_pdma_memfun_actor_arr[idx];
  938. /* Set PDMA memory control to eMemCtl. */
  939. nu_pdma_channel_memctrl_set(psMemFunActor->m_i32ChannID, eMemCtl);
  940. /* Register ISR callback function */
  941. sChnCB.m_eCBType = eCBType_Event;
  942. sChnCB.m_pfnCBHandler = nu_pdma_memfun_cb;
  943. sChnCB.m_pvUserData = (void *)psMemFunActor;
  944. nu_pdma_filtering_set(psMemFunActor->m_i32ChannID, NU_PDMA_EVENT_ABORT | NU_PDMA_EVENT_TRANSFER_DONE);
  945. nu_pdma_callback_register(psMemFunActor->m_i32ChannID, &sChnCB);
  946. psMemFunActor->m_u32Result = 0;
  947. /* Trigger it */
  948. nu_pdma_transfer(psMemFunActor->m_i32ChannID,
  949. u32DataWidth,
  950. (uint32_t)src,
  951. (uint32_t)dest,
  952. u32TransferCnt,
  953. 0);
  954. /* Wait it done. */
  955. result = rt_sem_take(psMemFunActor->m_psSemMemFun, RT_WAITING_FOREVER);
  956. RT_ASSERT(result == RT_EOK);
  957. /* Give result if get NU_PDMA_EVENT_TRANSFER_DONE.*/
  958. if (psMemFunActor->m_u32Result & NU_PDMA_EVENT_TRANSFER_DONE)
  959. {
  960. ret += u32TransferCnt;
  961. }
  962. else
  963. {
  964. ret += (u32TransferCnt - nu_pdma_non_transfer_count_get(psMemFunActor->m_i32ChannID));
  965. }
  966. /* Terminate it if get ABORT event */
  967. if (psMemFunActor->m_u32Result & NU_PDMA_EVENT_ABORT)
  968. {
  969. nu_pdma_channel_terminate(psMemFunActor->m_i32ChannID);
  970. }
  971. result = rt_mutex_take(nu_pdma_memfun_actor_pool_lock, RT_WAITING_FOREVER);
  972. RT_ASSERT(result == RT_EOK);
  973. nu_pdma_memfun_actor_mask &= ~(1 << idx);
  974. result = rt_mutex_release(nu_pdma_memfun_actor_pool_lock);
  975. RT_ASSERT(result == RT_EOK);
  976. /* Fire actor */
  977. result = rt_sem_release(nu_pdma_memfun_actor_pool_sem);
  978. RT_ASSERT(result == RT_EOK);
  979. return ret;
  980. }
  981. rt_size_t nu_pdma_mempush(void *dest, void *src, uint32_t data_width, unsigned int transfer_count)
  982. {
  983. if (data_width == 8 || data_width == 16 || data_width == 32)
  984. return nu_pdma_memfun(dest, src, data_width, transfer_count, eMemCtl_SrcInc_DstFix);
  985. return 0;
  986. }
  987. void *nu_pdma_memcpy(void *dest, void *src, unsigned int count)
  988. {
  989. int i = 0;
  990. uint32_t u32Offset = 0;
  991. uint32_t u32Remaining = count;
  992. for (i = 4; (i > 0) && (u32Remaining > 0) ; i >>= 1)
  993. {
  994. uint32_t u32src = (uint32_t)src + u32Offset;
  995. uint32_t u32dest = (uint32_t)dest + u32Offset;
  996. if (((u32src % i) == (u32dest % i)) &&
  997. ((u32src % i) == 0) &&
  998. (RT_ALIGN_DOWN(u32Remaining, i) >= i))
  999. {
  1000. uint32_t u32TXCnt = u32Remaining / i;
  1001. if (u32TXCnt != nu_pdma_memfun((void *)u32dest, (void *)u32src, i * 8, u32TXCnt, eMemCtl_SrcInc_DstInc))
  1002. goto exit_nu_pdma_memcpy;
  1003. u32Offset += (u32TXCnt * i);
  1004. u32Remaining -= (u32TXCnt * i);
  1005. }
  1006. }
  1007. if (count == u32Offset)
  1008. return dest;
  1009. exit_nu_pdma_memcpy:
  1010. return NULL;
  1011. }
  1012. /**
  1013. * PDMA memfun actor initialization
  1014. */
  1015. int rt_hw_pdma_memfun_init(void)
  1016. {
  1017. nu_pdma_memfun_actor_init();
  1018. return 0;
  1019. }
  1020. INIT_DEVICE_EXPORT(rt_hw_pdma_memfun_init);
  1021. #endif // #if defined(BSP_USING_PDMA)