drv_pdma.c 39 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295
  1. /**************************************************************************//**
  2. *
  3. * @copyright (C) 2020 Nuvoton Technology Corp. All rights reserved.
  4. *
  5. * SPDX-License-Identifier: Apache-2.0
  6. *
  7. * Change Logs:
  8. * Date Author Notes
  9. * 2022-3-15 Wayne First version
  10. *
  11. ******************************************************************************/
  12. #include <rtconfig.h>
  13. #if defined(BSP_USING_PDMA)
  14. #include <rtdevice.h>
  15. #include <rthw.h>
  16. #include <rtthread.h>
  17. #include <drv_pdma.h>
  18. #include <nu_bitutil.h>
  19. #include "drv_sys.h"
  20. /* Private define ---------------------------------------------------------------*/
  21. // RT_DEV_NAME_PREFIX pdma
  22. #ifndef NU_PDMA_MEMFUN_ACTOR_MAX
  23. #define NU_PDMA_MEMFUN_ACTOR_MAX (4)
  24. #endif
  25. enum
  26. {
  27. PDMA_START = -1,
  28. PDMA0_IDX,
  29. PDMA1_IDX,
  30. PDMA_CNT
  31. };
  32. #define NU_PDMA_SG_TBL_MAXSIZE (NU_PDMA_SG_LIMITED_DISTANCE/sizeof(DSCT_T))
  33. #define NU_PDMA_CH_MAX (PDMA_CNT*PDMA_CH_MAX) /* Specify maximum channels of PDMA */
  34. #define NU_PDMA_CH_Pos (0) /* Specify first channel number of PDMA */
  35. #define NU_PDMA_CH_Msk (PDMA_CH_Msk << NU_PDMA_CH_Pos)
  36. #define NU_PDMA_GET_BASE(ch) (PDMA_T *)((((ch)/PDMA_CH_MAX)*0x10000UL) + PDMA0_BASE)
  37. #define NU_PDMA_GET_MOD_IDX(ch) ((ch)/PDMA_CH_MAX)
  38. #define NU_PDMA_GET_MOD_CHIDX(ch) ((ch)%PDMA_CH_MAX)
  39. /* Private typedef --------------------------------------------------------------*/
  40. struct nu_pdma_periph_ctl
  41. {
  42. uint32_t m_u32Peripheral;
  43. nu_pdma_memctrl_t m_eMemCtl;
  44. };
  45. typedef struct nu_pdma_periph_ctl nu_pdma_periph_ctl_t;
  46. struct nu_pdma_chn
  47. {
  48. struct nu_pdma_chn_cb m_sCB_Event;
  49. struct nu_pdma_chn_cb m_sCB_Trigger;
  50. struct nu_pdma_chn_cb m_sCB_Disable;
  51. nu_pdma_desc_t *m_ppsSgtbl;
  52. uint32_t m_u32WantedSGTblNum;
  53. uint32_t m_u32EventFilter;
  54. uint32_t m_u32IdleTimeout_us;
  55. nu_pdma_periph_ctl_t m_spPeripCtl;
  56. };
  57. typedef struct nu_pdma_chn nu_pdma_chn_t;
  58. struct nu_pdma_memfun_actor
  59. {
  60. int m_i32ChannID;
  61. uint32_t m_u32Result;
  62. rt_sem_t m_psSemMemFun;
  63. } ;
  64. typedef struct nu_pdma_memfun_actor *nu_pdma_memfun_actor_t;
  65. /* Private functions ------------------------------------------------------------*/
  66. static int nu_pdma_peripheral_set(uint32_t u32PeriphType);
  67. static void nu_pdma_init(void);
  68. static void nu_pdma_channel_enable(int i32ChannID);
  69. static void nu_pdma_channel_disable(int i32ChannID);
  70. static void nu_pdma_channel_reset(int i32ChannID);
  71. static rt_err_t nu_pdma_timeout_set(int i32ChannID, int i32Timeout_us);
  72. static void nu_pdma_periph_ctrl_fill(int i32ChannID, int i32CtlPoolIdx);
  73. static rt_ssize_t nu_pdma_memfun(void *dest, void *src, uint32_t u32DataWidth, unsigned int u32TransferCnt, nu_pdma_memctrl_t eMemCtl);
  74. static void nu_pdma_memfun_cb(void *pvUserData, uint32_t u32Events);
  75. static void nu_pdma_memfun_actor_init(void);
  76. static int nu_pdma_memfun_employ(void);
  77. static int nu_pdma_non_transfer_count_get(int32_t i32ChannID);
  78. /* Public functions -------------------------------------------------------------*/
  79. /* Private variables ------------------------------------------------------------*/
  80. static volatile int nu_pdma_inited = 0;
  81. static volatile uint32_t nu_pdma_chn_mask_arr[PDMA_CNT] = {0};
  82. static nu_pdma_chn_t nu_pdma_chn_arr[NU_PDMA_CH_MAX];
  83. static volatile uint32_t nu_pdma_memfun_actor_mask = 0;
  84. static volatile uint32_t nu_pdma_memfun_actor_maxnum = 0;
  85. static rt_sem_t nu_pdma_memfun_actor_pool_sem = RT_NULL;
  86. static rt_mutex_t nu_pdma_memfun_actor_pool_lock = RT_NULL;
  87. const static struct nu_module nu_pdma_arr[] =
  88. {
  89. {
  90. .name = "pdma0",
  91. .m_pvBase = (void *)PDMA0,
  92. .u32RstId = PDMA0_RST,
  93. .eIRQn = PDMA0_IRQn
  94. },
  95. {
  96. .name = "pdma1",
  97. .m_pvBase = (void *)PDMA1,
  98. .u32RstId = PDMA1_RST,
  99. .eIRQn = PDMA1_IRQn
  100. },
  101. };
  102. static const nu_pdma_periph_ctl_t g_nu_pdma_peripheral_ctl_pool[ ] =
  103. {
  104. // M2M
  105. { PDMA_MEM, eMemCtl_SrcInc_DstInc },
  106. // M2P
  107. { PDMA_USB_TX, eMemCtl_SrcInc_DstFix },
  108. { PDMA_UART0_TX, eMemCtl_SrcInc_DstFix },
  109. { PDMA_UART1_TX, eMemCtl_SrcInc_DstFix },
  110. { PDMA_UART2_TX, eMemCtl_SrcInc_DstFix },
  111. { PDMA_UART3_TX, eMemCtl_SrcInc_DstFix },
  112. { PDMA_UART4_TX, eMemCtl_SrcInc_DstFix },
  113. { PDMA_UART5_TX, eMemCtl_SrcInc_DstFix },
  114. { PDMA_UART6_TX, eMemCtl_SrcInc_DstFix },
  115. { PDMA_UART7_TX, eMemCtl_SrcInc_DstFix },
  116. { PDMA_UART8_TX, eMemCtl_SrcInc_DstFix },
  117. { PDMA_UART9_TX, eMemCtl_SrcInc_DstFix },
  118. { PDMA_USCI0_TX, eMemCtl_SrcInc_DstFix },
  119. { PDMA_QSPI0_TX, eMemCtl_SrcInc_DstFix },
  120. { PDMA_QSPI1_TX, eMemCtl_SrcInc_DstFix },
  121. { PDMA_SPI0_TX, eMemCtl_SrcInc_DstFix },
  122. { PDMA_SPI1_TX, eMemCtl_SrcInc_DstFix },
  123. { PDMA_SPI2_TX, eMemCtl_SrcInc_DstFix },
  124. { PDMA_SPI3_TX, eMemCtl_SrcInc_DstFix },
  125. { PDMA_SPI4_TX, eMemCtl_SrcInc_DstFix },
  126. { PDMA_SPI5_TX, eMemCtl_SrcInc_DstFix },
  127. { PDMA_SPI6_TX, eMemCtl_SrcInc_DstFix },
  128. { PDMA_SPI7_TX, eMemCtl_SrcInc_DstFix },
  129. { PDMA_SPI8_TX, eMemCtl_SrcInc_DstFix },
  130. { PDMA_SPI9_TX, eMemCtl_SrcInc_DstFix },
  131. { PDMA_SPI10_TX, eMemCtl_SrcInc_DstFix },
  132. { PDMA_I2S0_TX, eMemCtl_SrcInc_DstFix },
  133. { PDMA_I2S1_TX, eMemCtl_SrcInc_DstFix },
  134. { PDMA_DAC0_TX, eMemCtl_SrcInc_DstFix },
  135. { PDMA_DAC1_TX, eMemCtl_SrcInc_DstFix },
  136. { PDMA_EPWM0_CH0_TX, eMemCtl_SrcInc_DstFix },
  137. { PDMA_EPWM0_CH1_TX, eMemCtl_SrcInc_DstFix },
  138. { PDMA_EPWM0_CH2_TX, eMemCtl_SrcInc_DstFix },
  139. { PDMA_EPWM0_CH3_TX, eMemCtl_SrcInc_DstFix },
  140. { PDMA_EPWM0_CH4_TX, eMemCtl_SrcInc_DstFix },
  141. { PDMA_EPWM1_CH0_TX, eMemCtl_SrcInc_DstFix },
  142. { PDMA_EPWM1_CH1_TX, eMemCtl_SrcInc_DstFix },
  143. { PDMA_EPWM1_CH2_TX, eMemCtl_SrcInc_DstFix },
  144. { PDMA_EPWM1_CH3_TX, eMemCtl_SrcInc_DstFix },
  145. { PDMA_EPWM1_CH4_TX, eMemCtl_SrcInc_DstFix },
  146. // P2M
  147. { PDMA_USB_RX, eMemCtl_SrcFix_DstInc },
  148. { PDMA_UART0_RX, eMemCtl_SrcFix_DstInc },
  149. { PDMA_UART1_RX, eMemCtl_SrcFix_DstInc },
  150. { PDMA_UART2_RX, eMemCtl_SrcFix_DstInc },
  151. { PDMA_UART3_RX, eMemCtl_SrcFix_DstInc },
  152. { PDMA_UART4_RX, eMemCtl_SrcFix_DstInc },
  153. { PDMA_UART5_RX, eMemCtl_SrcFix_DstInc },
  154. { PDMA_UART6_RX, eMemCtl_SrcFix_DstInc },
  155. { PDMA_UART7_RX, eMemCtl_SrcFix_DstInc },
  156. { PDMA_UART8_RX, eMemCtl_SrcFix_DstInc },
  157. { PDMA_UART9_RX, eMemCtl_SrcFix_DstInc },
  158. { PDMA_USCI0_RX, eMemCtl_SrcFix_DstInc },
  159. { PDMA_QSPI0_RX, eMemCtl_SrcFix_DstInc },
  160. { PDMA_QSPI1_RX, eMemCtl_SrcFix_DstInc },
  161. { PDMA_SPI0_RX, eMemCtl_SrcFix_DstInc },
  162. { PDMA_SPI1_RX, eMemCtl_SrcFix_DstInc },
  163. { PDMA_SPI2_RX, eMemCtl_SrcFix_DstInc },
  164. { PDMA_SPI3_RX, eMemCtl_SrcFix_DstInc },
  165. { PDMA_SPI4_RX, eMemCtl_SrcFix_DstInc },
  166. { PDMA_SPI5_RX, eMemCtl_SrcFix_DstInc },
  167. { PDMA_SPI6_RX, eMemCtl_SrcFix_DstInc },
  168. { PDMA_SPI7_RX, eMemCtl_SrcFix_DstInc },
  169. { PDMA_SPI8_RX, eMemCtl_SrcFix_DstInc },
  170. { PDMA_SPI9_RX, eMemCtl_SrcFix_DstInc },
  171. { PDMA_SPI10_RX, eMemCtl_SrcFix_DstInc },
  172. { PDMA_EPWM0_P1_RX, eMemCtl_SrcFix_DstInc },
  173. { PDMA_EPWM0_P2_RX, eMemCtl_SrcFix_DstInc },
  174. { PDMA_EPWM0_P3_RX, eMemCtl_SrcFix_DstInc },
  175. { PDMA_EPWM1_P1_RX, eMemCtl_SrcFix_DstInc },
  176. { PDMA_EPWM1_P2_RX, eMemCtl_SrcFix_DstInc },
  177. { PDMA_EPWM1_P3_RX, eMemCtl_SrcFix_DstInc },
  178. { PDMA_I2S0_RX, eMemCtl_SrcFix_DstInc },
  179. { PDMA_I2S1_RX, eMemCtl_SrcFix_DstInc },
  180. { PDMA_EADC0_RX, eMemCtl_SrcFix_DstInc },
  181. { PDMA_EADC1_RX, eMemCtl_SrcFix_DstInc },
  182. };
  183. #define NU_PERIPHERAL_SIZE ( sizeof(g_nu_pdma_peripheral_ctl_pool) / sizeof(g_nu_pdma_peripheral_ctl_pool[0]) )
  184. static struct nu_pdma_memfun_actor nu_pdma_memfun_actor_arr[NU_PDMA_MEMFUN_ACTOR_MAX];
  185. /* SG table pool */
  186. static DSCT_T nu_pdma_sgtbl_arr[NU_PDMA_SGTBL_POOL_SIZE] = { 0 };
  187. static uint32_t nu_pdma_sgtbl_token[RT_ALIGN(NU_PDMA_SGTBL_POOL_SIZE, 32) / 32];
  188. static int nu_pdma_check_is_nonallocated(uint32_t u32ChnId)
  189. {
  190. uint32_t mod_idx = NU_PDMA_GET_MOD_IDX(u32ChnId);
  191. RT_ASSERT(mod_idx < PDMA_CNT);
  192. return !(nu_pdma_chn_mask_arr[mod_idx] & (1 << NU_PDMA_GET_MOD_CHIDX(u32ChnId)));
  193. }
  194. static int nu_pdma_peripheral_set(uint32_t u32PeriphType)
  195. {
  196. int idx = 0;
  197. while (idx < NU_PERIPHERAL_SIZE)
  198. {
  199. if (g_nu_pdma_peripheral_ctl_pool[idx].m_u32Peripheral == u32PeriphType)
  200. return idx;
  201. idx++;
  202. }
  203. // Not such peripheral
  204. return -1;
  205. }
  206. static void nu_pdma_periph_ctrl_fill(int i32ChannID, int i32CtlPoolIdx)
  207. {
  208. nu_pdma_chn_t *psPdmaChann = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos];
  209. psPdmaChann->m_spPeripCtl.m_u32Peripheral = g_nu_pdma_peripheral_ctl_pool[i32CtlPoolIdx].m_u32Peripheral;
  210. psPdmaChann->m_spPeripCtl.m_eMemCtl = g_nu_pdma_peripheral_ctl_pool[i32CtlPoolIdx].m_eMemCtl;
  211. }
  212. /**
  213. * Hardware PDMA Initialization
  214. */
  215. static void nu_pdma_init(void)
  216. {
  217. int i, latest = 0;
  218. if (nu_pdma_inited)
  219. return;
  220. rt_memset(&nu_pdma_sgtbl_arr[0], 0x00, sizeof(nu_pdma_sgtbl_arr));
  221. rt_memset(nu_pdma_chn_arr, 0x00, sizeof(nu_pdma_chn_arr));
  222. for (i = (PDMA_START + 1); i < PDMA_CNT; i++)
  223. {
  224. PDMA_T *psPDMA = (PDMA_T *)nu_pdma_arr[i].m_pvBase;
  225. nu_pdma_chn_mask_arr[i] = ~(NU_PDMA_CH_Msk);
  226. SYS_ResetModule(nu_pdma_arr[i].u32RstId);
  227. /* Initialize PDMA setting */
  228. PDMA_Open(psPDMA, PDMA_CH_Msk);
  229. PDMA_Close(psPDMA);
  230. /* Enable PDMA interrupt */
  231. NVIC_EnableIRQ(nu_pdma_arr[i].eIRQn);
  232. /* Assign first SG table address as PDMA SG table base address */
  233. psPDMA->SCATBA = (uint32_t)&nu_pdma_sgtbl_arr[0];
  234. }
  235. /* Initialize token pool. */
  236. rt_memset(&nu_pdma_sgtbl_token[0], 0xff, sizeof(nu_pdma_sgtbl_token));
  237. if (NU_PDMA_SGTBL_POOL_SIZE % 32)
  238. {
  239. latest = (NU_PDMA_SGTBL_POOL_SIZE) / 32;
  240. nu_pdma_sgtbl_token[latest] ^= ~((1 << (NU_PDMA_SGTBL_POOL_SIZE % 32)) - 1) ;
  241. }
  242. nu_pdma_inited = 1;
  243. }
  244. static inline void nu_pdma_channel_enable(int i32ChannID)
  245. {
  246. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  247. int u32ModChannId = NU_PDMA_GET_MOD_CHIDX(i32ChannID);
  248. /* Clean descriptor table control register. */
  249. PDMA->DSCT[u32ModChannId].CTL = 0UL;
  250. /* Enable the channel */
  251. PDMA->CHCTL |= (1 << u32ModChannId);
  252. }
  253. static inline void nu_pdma_channel_disable(int i32ChannID)
  254. {
  255. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  256. PDMA->CHCTL &= ~(1 << NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  257. }
  258. static inline void nu_pdma_channel_reset(int i32ChannID)
  259. {
  260. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  261. int u32ModChannId = NU_PDMA_GET_MOD_CHIDX(i32ChannID);
  262. PDMA->CHRST = (1 << u32ModChannId);
  263. /* Wait for cleared channel CHCTL. */
  264. while ((PDMA->CHCTL & (1 << u32ModChannId)));
  265. }
  266. void nu_pdma_channel_terminate(int i32ChannID)
  267. {
  268. if (nu_pdma_check_is_nonallocated(i32ChannID))
  269. goto exit_pdma_channel_terminate;
  270. //rt_kprintf("[%s] %d\n", __func__, i32ChannID);
  271. /* Reset specified channel. */
  272. nu_pdma_channel_reset(i32ChannID);
  273. /* Enable specified channel after reset. */
  274. nu_pdma_channel_enable(i32ChannID);
  275. exit_pdma_channel_terminate:
  276. return;
  277. }
  278. static rt_err_t nu_pdma_timeout_set(int i32ChannID, int i32Timeout_us)
  279. {
  280. rt_err_t ret = -RT_EINVAL;
  281. PDMA_T *PDMA = NULL;
  282. uint32_t u32ModChannId;
  283. if (nu_pdma_check_is_nonallocated(i32ChannID))
  284. goto exit_nu_pdma_timeout_set;
  285. PDMA = NU_PDMA_GET_BASE(i32ChannID);
  286. u32ModChannId = NU_PDMA_GET_MOD_CHIDX(i32ChannID);
  287. nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_u32IdleTimeout_us = i32Timeout_us;
  288. if (i32Timeout_us)
  289. {
  290. uint32_t u32ToClk_Max = 1000000 / (CLK_GetHCLKFreq() / (1 << 8));
  291. uint32_t u32Divider = (i32Timeout_us / u32ToClk_Max) / (1 << 16);
  292. uint32_t u32TOutCnt = (i32Timeout_us / u32ToClk_Max) % (1 << 16);
  293. PDMA_DisableTimeout(PDMA, 1 << u32ModChannId);
  294. PDMA_EnableInt(PDMA, u32ModChannId, PDMA_INT_TIMEOUT); // Interrupt type
  295. if (u32Divider > 7)
  296. {
  297. u32Divider = 7;
  298. u32TOutCnt = (1 << 16);
  299. }
  300. if (u32ModChannId < 8)
  301. PDMA->TOUTPSC0_7 = (PDMA->TOUTPSC0_7 & ~(0x7ul << (PDMA_TOUTPSC0_7_TOUTPSC0_Pos * u32ModChannId))) | (u32Divider << (PDMA_TOUTPSC0_7_TOUTPSC0_Pos * u32ModChannId));
  302. else
  303. PDMA->TOUTPSC8_15 = (PDMA->TOUTPSC8_15 & ~(0x7ul << (PDMA_TOUTPSC8_15_TOUTPSC8_Pos * (u32ModChannId % 8)))) | (u32Divider << (PDMA_TOUTPSC8_15_TOUTPSC8_Pos * (u32ModChannId % 8)));
  304. PDMA_SetTimeOut(PDMA, u32ModChannId, 1, u32TOutCnt);
  305. ret = RT_EOK;
  306. }
  307. else
  308. {
  309. PDMA_DisableInt(PDMA, u32ModChannId, PDMA_INT_TIMEOUT); // Interrupt type
  310. PDMA_DisableTimeout(PDMA, 1 << u32ModChannId);
  311. }
  312. exit_nu_pdma_timeout_set:
  313. return -(ret);
  314. }
  315. int nu_pdma_channel_allocate(int32_t i32PeripType)
  316. {
  317. int ChnId, i32PeripCtlIdx, j;
  318. nu_pdma_init();
  319. if ((i32PeripCtlIdx = nu_pdma_peripheral_set(i32PeripType)) < 0)
  320. goto exit_nu_pdma_channel_allocate;
  321. for (j = (PDMA_START + 1); j < PDMA_CNT; j++)
  322. {
  323. /* Find the position of first '0' in nu_pdma_chn_mask_arr[j]. */
  324. ChnId = nu_cto(nu_pdma_chn_mask_arr[j]);
  325. if (ChnId < PDMA_CH_MAX)
  326. {
  327. nu_pdma_chn_mask_arr[j] |= (1 << ChnId);
  328. ChnId += (j * PDMA_CH_MAX);
  329. rt_memset(nu_pdma_chn_arr + ChnId - NU_PDMA_CH_Pos, 0x00, sizeof(nu_pdma_chn_t));
  330. /* Set idx number of g_nu_pdma_peripheral_ctl_pool */
  331. nu_pdma_periph_ctrl_fill(ChnId, i32PeripCtlIdx);
  332. /* Reset channel */
  333. nu_pdma_channel_terminate(ChnId);
  334. return ChnId;
  335. }
  336. }
  337. exit_nu_pdma_channel_allocate:
  338. // No channel available
  339. return -(RT_ERROR);
  340. }
  341. rt_err_t nu_pdma_channel_free(int i32ChannID)
  342. {
  343. rt_err_t ret = -RT_EINVAL;
  344. if (! nu_pdma_inited)
  345. goto exit_nu_pdma_channel_free;
  346. if (nu_pdma_check_is_nonallocated(i32ChannID))
  347. goto exit_nu_pdma_channel_free;
  348. if ((i32ChannID < NU_PDMA_CH_MAX) && (i32ChannID >= NU_PDMA_CH_Pos))
  349. {
  350. nu_pdma_chn_mask_arr[NU_PDMA_GET_MOD_IDX(i32ChannID)] &= ~(1 << NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  351. nu_pdma_channel_disable(i32ChannID);
  352. ret = RT_EOK;
  353. }
  354. exit_nu_pdma_channel_free:
  355. return -(ret);
  356. }
  357. rt_err_t nu_pdma_filtering_set(int i32ChannID, uint32_t u32EventFilter)
  358. {
  359. rt_err_t ret = -RT_EINVAL;
  360. if (nu_pdma_check_is_nonallocated(i32ChannID))
  361. goto exit_nu_pdma_filtering_set;
  362. nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_u32EventFilter = u32EventFilter;
  363. ret = RT_EOK;
  364. exit_nu_pdma_filtering_set:
  365. return -(ret) ;
  366. }
  367. uint32_t nu_pdma_filtering_get(int i32ChannID)
  368. {
  369. if (nu_pdma_check_is_nonallocated(i32ChannID))
  370. goto exit_nu_pdma_filtering_get;
  371. return nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_u32EventFilter;
  372. exit_nu_pdma_filtering_get:
  373. return 0;
  374. }
  375. rt_err_t nu_pdma_callback_register(int i32ChannID, nu_pdma_chn_cb_t psChnCb)
  376. {
  377. rt_err_t ret = -RT_EINVAL;
  378. nu_pdma_chn_cb_t psChnCb_Current = RT_NULL;
  379. RT_ASSERT(psChnCb != RT_NULL);
  380. if (nu_pdma_check_is_nonallocated(i32ChannID))
  381. goto exit_nu_pdma_callback_register;
  382. switch (psChnCb->m_eCBType)
  383. {
  384. case eCBType_Event:
  385. psChnCb_Current = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_sCB_Event;
  386. break;
  387. case eCBType_Trigger:
  388. psChnCb_Current = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_sCB_Trigger;
  389. break;
  390. case eCBType_Disable:
  391. psChnCb_Current = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_sCB_Disable;
  392. break;
  393. default:
  394. goto exit_nu_pdma_callback_register;
  395. }
  396. psChnCb_Current->m_pfnCBHandler = psChnCb->m_pfnCBHandler;
  397. psChnCb_Current->m_pvUserData = psChnCb->m_pvUserData;
  398. ret = RT_EOK;
  399. exit_nu_pdma_callback_register:
  400. return -(ret) ;
  401. }
  402. nu_pdma_cb_handler_t nu_pdma_callback_hijack(int i32ChannID, nu_pdma_cbtype_t eCBType, nu_pdma_chn_cb_t psChnCb_Hijack)
  403. {
  404. nu_pdma_chn_cb_t psChnCb_Current = RT_NULL;
  405. struct nu_pdma_chn_cb sChnCB_Tmp;
  406. RT_ASSERT(psChnCb_Hijack != NULL);
  407. sChnCB_Tmp.m_pfnCBHandler = RT_NULL;
  408. if (nu_pdma_check_is_nonallocated(i32ChannID))
  409. goto exit_nu_pdma_callback_hijack;
  410. switch (eCBType)
  411. {
  412. case eCBType_Event:
  413. psChnCb_Current = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_sCB_Event;
  414. break;
  415. case eCBType_Trigger:
  416. psChnCb_Current = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_sCB_Trigger;
  417. break;
  418. case eCBType_Disable:
  419. psChnCb_Current = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_sCB_Disable;
  420. break;
  421. default:
  422. goto exit_nu_pdma_callback_hijack;
  423. }
  424. /* Backup */
  425. sChnCB_Tmp.m_pfnCBHandler = psChnCb_Current->m_pfnCBHandler;
  426. sChnCB_Tmp.m_pvUserData = psChnCb_Current->m_pvUserData;
  427. /* Update */
  428. psChnCb_Current->m_pfnCBHandler = psChnCb_Hijack->m_pfnCBHandler;
  429. psChnCb_Current->m_pvUserData = psChnCb_Hijack->m_pvUserData;
  430. /* Restore */
  431. psChnCb_Hijack->m_pfnCBHandler = sChnCB_Tmp.m_pfnCBHandler;
  432. psChnCb_Hijack->m_pvUserData = sChnCB_Tmp.m_pvUserData;
  433. exit_nu_pdma_callback_hijack:
  434. return sChnCB_Tmp.m_pfnCBHandler;
  435. }
  436. static int nu_pdma_non_transfer_count_get(int32_t i32ChannID)
  437. {
  438. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  439. return ((PDMA->DSCT[NU_PDMA_GET_MOD_CHIDX(i32ChannID)].CTL & PDMA_DSCT_CTL_TXCNT_Msk) >> PDMA_DSCT_CTL_TXCNT_Pos) + 1;
  440. }
  441. int nu_pdma_transferred_byte_get(int32_t i32ChannID, int32_t i32TriggerByteLen)
  442. {
  443. int i32BitWidth = 0;
  444. int cur_txcnt = 0;
  445. PDMA_T *PDMA;
  446. if (nu_pdma_check_is_nonallocated(i32ChannID))
  447. goto exit_nu_pdma_transferred_byte_get;
  448. PDMA = NU_PDMA_GET_BASE(i32ChannID);
  449. i32BitWidth = PDMA->DSCT[NU_PDMA_GET_MOD_CHIDX(i32ChannID)].CTL & PDMA_DSCT_CTL_TXWIDTH_Msk;
  450. i32BitWidth = (i32BitWidth == PDMA_WIDTH_8) ? 1 : (i32BitWidth == PDMA_WIDTH_16) ? 2 : (i32BitWidth == PDMA_WIDTH_32) ? 4 : 0;
  451. cur_txcnt = nu_pdma_non_transfer_count_get(i32ChannID);
  452. return (i32TriggerByteLen - (cur_txcnt) * i32BitWidth);
  453. exit_nu_pdma_transferred_byte_get:
  454. return -1;
  455. }
  456. nu_pdma_memctrl_t nu_pdma_channel_memctrl_get(int i32ChannID)
  457. {
  458. nu_pdma_memctrl_t eMemCtrl = eMemCtl_Undefined;
  459. if (nu_pdma_check_is_nonallocated(i32ChannID))
  460. goto exit_nu_pdma_channel_memctrl_get;
  461. eMemCtrl = nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_spPeripCtl.m_eMemCtl;
  462. exit_nu_pdma_channel_memctrl_get:
  463. return eMemCtrl;
  464. }
  465. rt_err_t nu_pdma_channel_memctrl_set(int i32ChannID, nu_pdma_memctrl_t eMemCtrl)
  466. {
  467. rt_err_t ret = -RT_EINVAL;
  468. nu_pdma_chn_t *psPdmaChann = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos];
  469. if (nu_pdma_check_is_nonallocated(i32ChannID))
  470. goto exit_nu_pdma_channel_memctrl_set;
  471. else if ((eMemCtrl < eMemCtl_SrcFix_DstFix) || (eMemCtrl > eMemCtl_SrcInc_DstInc))
  472. goto exit_nu_pdma_channel_memctrl_set;
  473. /* PDMA_MEM/SAR_FIX/BURST mode is not supported. */
  474. if ((psPdmaChann->m_spPeripCtl.m_u32Peripheral == PDMA_MEM) &&
  475. ((eMemCtrl == eMemCtl_SrcFix_DstInc) || (eMemCtrl == eMemCtl_SrcFix_DstFix)))
  476. goto exit_nu_pdma_channel_memctrl_set;
  477. nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_spPeripCtl.m_eMemCtl = eMemCtrl;
  478. ret = RT_EOK;
  479. exit_nu_pdma_channel_memctrl_set:
  480. return -(ret);
  481. }
  482. static void nu_pdma_channel_memctrl_fill(nu_pdma_memctrl_t eMemCtl, uint32_t *pu32SrcCtl, uint32_t *pu32DstCtl)
  483. {
  484. switch ((int)eMemCtl)
  485. {
  486. case eMemCtl_SrcFix_DstFix:
  487. *pu32SrcCtl = PDMA_SAR_FIX;
  488. *pu32DstCtl = PDMA_DAR_FIX;
  489. break;
  490. case eMemCtl_SrcFix_DstInc:
  491. *pu32SrcCtl = PDMA_SAR_FIX;
  492. *pu32DstCtl = PDMA_DAR_INC;
  493. break;
  494. case eMemCtl_SrcInc_DstFix:
  495. *pu32SrcCtl = PDMA_SAR_INC;
  496. *pu32DstCtl = PDMA_DAR_FIX;
  497. break;
  498. case eMemCtl_SrcInc_DstInc:
  499. *pu32SrcCtl = PDMA_SAR_INC;
  500. *pu32DstCtl = PDMA_DAR_INC;
  501. break;
  502. default:
  503. break;
  504. }
  505. }
  506. /* This is for Scatter-gather DMA. */
  507. rt_err_t nu_pdma_desc_setup(int i32ChannID, nu_pdma_desc_t dma_desc, uint32_t u32DataWidth, uint32_t u32AddrSrc,
  508. uint32_t u32AddrDst, int32_t i32TransferCnt, nu_pdma_desc_t next, uint32_t u32BeSilent)
  509. {
  510. nu_pdma_periph_ctl_t *psPeriphCtl = NULL;
  511. PDMA_T *PDMA = NULL;
  512. uint32_t u32SrcCtl = 0;
  513. uint32_t u32DstCtl = 0;
  514. rt_err_t ret = -RT_EINVAL;
  515. if (!dma_desc)
  516. goto exit_nu_pdma_desc_setup;
  517. else if (nu_pdma_check_is_nonallocated(i32ChannID))
  518. goto exit_nu_pdma_desc_setup;
  519. else if (!(u32DataWidth == 8 || u32DataWidth == 16 || u32DataWidth == 32))
  520. goto exit_nu_pdma_desc_setup;
  521. else if ((u32AddrSrc % (u32DataWidth / 8)) || (u32AddrDst % (u32DataWidth / 8)))
  522. goto exit_nu_pdma_desc_setup;
  523. else if (i32TransferCnt > NU_PDMA_MAX_TXCNT)
  524. goto exit_nu_pdma_desc_setup;
  525. PDMA = NU_PDMA_GET_BASE(i32ChannID);
  526. psPeriphCtl = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_spPeripCtl;
  527. nu_pdma_channel_memctrl_fill(psPeriphCtl->m_eMemCtl, &u32SrcCtl, &u32DstCtl);
  528. dma_desc->CTL = ((i32TransferCnt - 1) << PDMA_DSCT_CTL_TXCNT_Pos) |
  529. ((u32DataWidth == 8) ? PDMA_WIDTH_8 : (u32DataWidth == 16) ? PDMA_WIDTH_16 : PDMA_WIDTH_32) |
  530. u32SrcCtl |
  531. u32DstCtl |
  532. PDMA_OP_BASIC;
  533. dma_desc->SA = u32AddrSrc;
  534. dma_desc->DA = u32AddrDst;
  535. dma_desc->NEXT = 0; /* Terminating node by default. */
  536. if (psPeriphCtl->m_u32Peripheral == PDMA_MEM)
  537. {
  538. /* For M2M transfer */
  539. dma_desc->CTL |= (PDMA_REQ_BURST | PDMA_BURST_32);
  540. }
  541. else
  542. {
  543. /* For P2M and M2P transfer */
  544. dma_desc->CTL |= (PDMA_REQ_SINGLE);
  545. }
  546. if (next)
  547. {
  548. /* Link to Next and modify to scatter-gather DMA mode. */
  549. dma_desc->CTL = (dma_desc->CTL & ~PDMA_DSCT_CTL_OPMODE_Msk) | PDMA_OP_SCATTER;
  550. dma_desc->NEXT = (uint32_t)next - (PDMA->SCATBA);
  551. }
  552. /* Be silent */
  553. if (u32BeSilent)
  554. dma_desc->CTL |= PDMA_DSCT_CTL_TBINTDIS_Msk;
  555. ret = RT_EOK;
  556. exit_nu_pdma_desc_setup:
  557. return -(ret);
  558. }
  559. static int nu_pdma_sgtbls_token_allocate(void)
  560. {
  561. int idx, i;
  562. int pool_size = sizeof(nu_pdma_sgtbl_token) / sizeof(uint32_t);
  563. for (i = 0; i < pool_size; i++)
  564. {
  565. if ((idx = nu_ctz(nu_pdma_sgtbl_token[i])) != 32)
  566. {
  567. nu_pdma_sgtbl_token[i] &= ~(1 << idx);
  568. idx += i * 32;
  569. return idx;
  570. }
  571. }
  572. /* No available */
  573. return -1;
  574. }
  575. static void nu_pdma_sgtbls_token_free(nu_pdma_desc_t psSgtbls)
  576. {
  577. int idx = (int)(psSgtbls - &nu_pdma_sgtbl_arr[0]);
  578. RT_ASSERT(idx >= 0);
  579. RT_ASSERT((idx + 1) <= NU_PDMA_SGTBL_POOL_SIZE);
  580. nu_pdma_sgtbl_token[idx / 32] |= (1 << (idx % 32));
  581. }
  582. void nu_pdma_sgtbls_free(nu_pdma_desc_t *ppsSgtbls, int num)
  583. {
  584. int i;
  585. rt_base_t level;
  586. RT_ASSERT(ppsSgtbls != NULL);
  587. RT_ASSERT(num <= NU_PDMA_SG_TBL_MAXSIZE);
  588. level = rt_hw_interrupt_disable();
  589. for (i = 0; i < num; i++)
  590. {
  591. if (ppsSgtbls[i] != NULL)
  592. {
  593. nu_pdma_sgtbls_token_free(ppsSgtbls[i]);
  594. }
  595. ppsSgtbls[i] = NULL;
  596. }
  597. rt_hw_interrupt_enable(level);
  598. }
  599. rt_err_t nu_pdma_sgtbls_allocate(nu_pdma_desc_t *ppsSgtbls, int num)
  600. {
  601. int i, idx;
  602. rt_base_t level;
  603. RT_ASSERT(ppsSgtbls);
  604. RT_ASSERT(num <= NU_PDMA_SG_TBL_MAXSIZE);
  605. level = rt_hw_interrupt_disable();
  606. for (i = 0; i < num; i++)
  607. {
  608. ppsSgtbls[i] = NULL;
  609. /* Get token. */
  610. if ((idx = nu_pdma_sgtbls_token_allocate()) < 0)
  611. {
  612. rt_kprintf("No available sgtbl.\n");
  613. goto fail_nu_pdma_sgtbls_allocate;
  614. }
  615. ppsSgtbls[i] = (nu_pdma_desc_t)&nu_pdma_sgtbl_arr[idx];
  616. }
  617. rt_hw_interrupt_enable(level);
  618. return RT_EOK;
  619. fail_nu_pdma_sgtbls_allocate:
  620. /* Release allocated tables. */
  621. nu_pdma_sgtbls_free(ppsSgtbls, i);
  622. rt_hw_interrupt_enable(level);
  623. return -RT_ERROR;
  624. }
  625. static rt_err_t nu_pdma_sgtbls_valid(nu_pdma_desc_t head)
  626. {
  627. uint32_t node_addr;
  628. nu_pdma_desc_t node = head;
  629. do
  630. {
  631. node_addr = (uint32_t)node;
  632. if ((node_addr < PDMA0->SCATBA) || (node_addr - PDMA0->SCATBA) >= NU_PDMA_SG_LIMITED_DISTANCE)
  633. {
  634. rt_kprintf("The distance is over %d between 0x%08x and 0x%08x. \n", NU_PDMA_SG_LIMITED_DISTANCE, PDMA0->SCATBA, node);
  635. rt_kprintf("Please use nu_pdma_sgtbl_allocate to allocate valid sg-table.\n");
  636. return -RT_ERROR;
  637. }
  638. node = (nu_pdma_desc_t)(node->NEXT + PDMA0->SCATBA);
  639. }
  640. while (((uint32_t)node != PDMA0->SCATBA) && (node != head));
  641. return RT_EOK;
  642. }
  643. static void _nu_pdma_transfer(int i32ChannID, uint32_t u32Peripheral, nu_pdma_desc_t head, uint32_t u32IdleTimeout_us)
  644. {
  645. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  646. nu_pdma_chn_t *psPdmaChann = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos];
  647. PDMA_DisableTimeout(PDMA, 1 << NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  648. PDMA_EnableInt(PDMA, NU_PDMA_GET_MOD_CHIDX(i32ChannID), PDMA_INT_TRANS_DONE);
  649. nu_pdma_timeout_set(i32ChannID, u32IdleTimeout_us);
  650. /* Set scatter-gather mode and head */
  651. PDMA_SetTransferMode(PDMA,
  652. NU_PDMA_GET_MOD_CHIDX(i32ChannID),
  653. u32Peripheral,
  654. (head->NEXT != 0) ? 1 : 0,
  655. (uint32_t)head);
  656. /* If peripheral is M2M, trigger it. */
  657. if (u32Peripheral == PDMA_MEM)
  658. {
  659. PDMA_Trigger(PDMA, NU_PDMA_GET_MOD_CHIDX(i32ChannID));
  660. }
  661. else if (psPdmaChann->m_sCB_Trigger.m_pfnCBHandler)
  662. {
  663. psPdmaChann->m_sCB_Trigger.m_pfnCBHandler(psPdmaChann->m_sCB_Trigger.m_pvUserData, psPdmaChann->m_sCB_Trigger.m_u32Reserved);
  664. }
  665. }
  666. static void _nu_pdma_free_sgtbls(nu_pdma_chn_t *psPdmaChann)
  667. {
  668. if (psPdmaChann->m_ppsSgtbl)
  669. {
  670. nu_pdma_sgtbls_free(psPdmaChann->m_ppsSgtbl, psPdmaChann->m_u32WantedSGTblNum);
  671. psPdmaChann->m_ppsSgtbl = RT_NULL;
  672. psPdmaChann->m_u32WantedSGTblNum = 0;
  673. }
  674. }
  675. static rt_err_t _nu_pdma_transfer_chain(int i32ChannID, uint32_t u32DataWidth, uint32_t u32AddrSrc, uint32_t u32AddrDst, uint32_t u32TransferCnt, uint32_t u32IdleTimeout_us)
  676. {
  677. int i = 0;
  678. rt_err_t ret = -RT_ERROR;
  679. nu_pdma_periph_ctl_t *psPeriphCtl = NULL;
  680. nu_pdma_chn_t *psPdmaChann = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos];
  681. nu_pdma_memctrl_t eMemCtl = nu_pdma_channel_memctrl_get(i32ChannID);
  682. rt_uint32_t u32Offset = 0;
  683. rt_uint32_t u32TxCnt = 0;
  684. psPeriphCtl = &psPdmaChann->m_spPeripCtl;
  685. if (psPdmaChann->m_u32WantedSGTblNum != (u32TransferCnt / NU_PDMA_MAX_TXCNT + 1))
  686. {
  687. if (psPdmaChann->m_u32WantedSGTblNum > 0)
  688. _nu_pdma_free_sgtbls(psPdmaChann);
  689. psPdmaChann->m_u32WantedSGTblNum = u32TransferCnt / NU_PDMA_MAX_TXCNT + 1;
  690. psPdmaChann->m_ppsSgtbl = (nu_pdma_desc_t *)rt_malloc_align(sizeof(nu_pdma_desc_t) * psPdmaChann->m_u32WantedSGTblNum, 4);
  691. if (!psPdmaChann->m_ppsSgtbl)
  692. goto exit__nu_pdma_transfer_chain;
  693. ret = nu_pdma_sgtbls_allocate(psPdmaChann->m_ppsSgtbl, psPdmaChann->m_u32WantedSGTblNum);
  694. if (ret != RT_EOK)
  695. goto exit__nu_pdma_transfer_chain;
  696. }
  697. for (i = 0; i < psPdmaChann->m_u32WantedSGTblNum; i++)
  698. {
  699. u32TxCnt = (u32TransferCnt > NU_PDMA_MAX_TXCNT) ? NU_PDMA_MAX_TXCNT : u32TransferCnt;
  700. ret = nu_pdma_desc_setup(i32ChannID,
  701. psPdmaChann->m_ppsSgtbl[i],
  702. u32DataWidth,
  703. (eMemCtl & 0x2ul) ? u32AddrSrc + u32Offset : u32AddrSrc, /* Src address is Inc or not. */
  704. (eMemCtl & 0x1ul) ? u32AddrDst + u32Offset : u32AddrDst, /* Dst address is Inc or not. */
  705. u32TxCnt,
  706. ((i + 1) == psPdmaChann->m_u32WantedSGTblNum) ? RT_NULL : psPdmaChann->m_ppsSgtbl[i + 1],
  707. ((i + 1) == psPdmaChann->m_u32WantedSGTblNum) ? 0 : 1); // Silent, w/o TD interrupt
  708. if (ret != RT_EOK)
  709. goto exit__nu_pdma_transfer_chain;
  710. u32TransferCnt -= u32TxCnt;
  711. u32Offset += (u32TxCnt * u32DataWidth / 8);
  712. }
  713. _nu_pdma_transfer(i32ChannID, psPeriphCtl->m_u32Peripheral, psPdmaChann->m_ppsSgtbl[0], u32IdleTimeout_us);
  714. ret = RT_EOK;
  715. return ret;
  716. exit__nu_pdma_transfer_chain:
  717. _nu_pdma_free_sgtbls(psPdmaChann);
  718. return -(ret);
  719. }
  720. rt_err_t nu_pdma_transfer(int i32ChannID, uint32_t u32DataWidth, uint32_t u32AddrSrc, uint32_t u32AddrDst, uint32_t u32TransferCnt, uint32_t u32IdleTimeout_us)
  721. {
  722. rt_err_t ret = -RT_EINVAL;
  723. PDMA_T *PDMA = NU_PDMA_GET_BASE(i32ChannID);
  724. nu_pdma_desc_t head;
  725. nu_pdma_chn_t *psPdmaChann;
  726. nu_pdma_periph_ctl_t *psPeriphCtl = NULL;
  727. if (nu_pdma_check_is_nonallocated(i32ChannID))
  728. goto exit_nu_pdma_transfer;
  729. else if (!u32TransferCnt)
  730. goto exit_nu_pdma_transfer;
  731. else if (u32TransferCnt > NU_PDMA_MAX_TXCNT)
  732. return _nu_pdma_transfer_chain(i32ChannID, u32DataWidth, u32AddrSrc, u32AddrDst, u32TransferCnt, u32IdleTimeout_us);
  733. psPdmaChann = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos];
  734. psPeriphCtl = &psPdmaChann->m_spPeripCtl;
  735. head = &PDMA->DSCT[NU_PDMA_GET_MOD_CHIDX(i32ChannID)];
  736. ret = nu_pdma_desc_setup(i32ChannID,
  737. head,
  738. u32DataWidth,
  739. u32AddrSrc,
  740. u32AddrDst,
  741. u32TransferCnt,
  742. RT_NULL,
  743. 0);
  744. if (ret != RT_EOK)
  745. goto exit_nu_pdma_transfer;
  746. _nu_pdma_transfer(i32ChannID, psPeriphCtl->m_u32Peripheral, head, u32IdleTimeout_us);
  747. ret = RT_EOK;
  748. exit_nu_pdma_transfer:
  749. return -(ret);
  750. }
  751. rt_err_t nu_pdma_sg_transfer(int i32ChannID, nu_pdma_desc_t head, uint32_t u32IdleTimeout_us)
  752. {
  753. rt_err_t ret = -RT_EINVAL;
  754. nu_pdma_periph_ctl_t *psPeriphCtl = NULL;
  755. if (!head)
  756. goto exit_nu_pdma_sg_transfer;
  757. else if (nu_pdma_check_is_nonallocated(i32ChannID))
  758. goto exit_nu_pdma_sg_transfer;
  759. else if ((ret = nu_pdma_sgtbls_valid(head)) != RT_EOK) /* Check SG-tbls. */
  760. goto exit_nu_pdma_sg_transfer;
  761. psPeriphCtl = &nu_pdma_chn_arr[i32ChannID - NU_PDMA_CH_Pos].m_spPeripCtl;
  762. _nu_pdma_transfer(i32ChannID, psPeriphCtl->m_u32Peripheral, head, u32IdleTimeout_us);
  763. ret = RT_EOK;
  764. exit_nu_pdma_sg_transfer:
  765. return -(ret);
  766. }
  767. void PDMA_IRQHandler(PDMA_T *PDMA)
  768. {
  769. int i;
  770. uint32_t intsts = PDMA_GET_INT_STATUS(PDMA);
  771. uint32_t abtsts = PDMA_GET_ABORT_STS(PDMA);
  772. uint32_t tdsts = PDMA_GET_TD_STS(PDMA);
  773. uint32_t unalignsts = PDMA_GET_ALIGN_STS(PDMA);
  774. uint32_t reqto = intsts & PDMA_INTSTS_REQTOFn_Msk;
  775. uint32_t reqto_ch = (reqto >> PDMA_INTSTS_REQTOFn_Pos);
  776. int allch_sts = (reqto_ch | tdsts | abtsts | unalignsts);
  777. // Abort
  778. if (intsts & PDMA_INTSTS_ABTIF_Msk)
  779. {
  780. // Clear all Abort flags
  781. PDMA_CLR_ABORT_FLAG(PDMA, abtsts);
  782. }
  783. // Transfer done
  784. if (intsts & PDMA_INTSTS_TDIF_Msk)
  785. {
  786. // Clear all transfer done flags
  787. PDMA_CLR_TD_FLAG(PDMA, tdsts);
  788. }
  789. // Unaligned
  790. if (intsts & PDMA_INTSTS_ALIGNF_Msk)
  791. {
  792. // Clear all Unaligned flags
  793. PDMA_CLR_ALIGN_FLAG(PDMA, unalignsts);
  794. }
  795. // Timeout
  796. if (reqto)
  797. {
  798. // Clear all Timeout flags
  799. PDMA->INTSTS = reqto;
  800. }
  801. // Find the position of first '1' in allch_sts.
  802. while ((i = nu_ctz(allch_sts)) < PDMA_CH_MAX)
  803. {
  804. int module_id = ((uint32_t)PDMA - PDMA0_BASE) / 0x10000UL;
  805. int j = i + (module_id * PDMA_CH_MAX);
  806. int ch_mask = (1 << i);
  807. if (nu_pdma_chn_mask_arr[module_id] & ch_mask)
  808. {
  809. int ch_event = 0;
  810. nu_pdma_chn_t *dma_chn = nu_pdma_chn_arr + j - NU_PDMA_CH_Pos;
  811. if (dma_chn->m_sCB_Event.m_pfnCBHandler)
  812. {
  813. if (abtsts & ch_mask)
  814. {
  815. ch_event |= NU_PDMA_EVENT_ABORT;
  816. }
  817. if (tdsts & ch_mask)
  818. {
  819. ch_event |= NU_PDMA_EVENT_TRANSFER_DONE;
  820. }
  821. if (unalignsts & ch_mask)
  822. {
  823. ch_event |= NU_PDMA_EVENT_ALIGNMENT;
  824. }
  825. if (reqto_ch & ch_mask)
  826. {
  827. PDMA_DisableTimeout(PDMA, ch_mask);
  828. ch_event |= NU_PDMA_EVENT_TIMEOUT;
  829. }
  830. if (dma_chn->m_sCB_Disable.m_pfnCBHandler)
  831. dma_chn->m_sCB_Disable.m_pfnCBHandler(dma_chn->m_sCB_Disable.m_pvUserData, dma_chn->m_sCB_Disable.m_u32Reserved);
  832. if (dma_chn->m_u32EventFilter & ch_event)
  833. dma_chn->m_sCB_Event.m_pfnCBHandler(dma_chn->m_sCB_Event.m_pvUserData, ch_event);
  834. if (reqto_ch & ch_mask)
  835. nu_pdma_timeout_set(j, nu_pdma_chn_arr[j - NU_PDMA_CH_Pos].m_u32IdleTimeout_us);
  836. }//if(dma_chn->handler)
  837. } //if (nu_pdma_chn_mask & ch_mask)
  838. // Clear the served bit.
  839. allch_sts &= ~ch_mask;
  840. } //while
  841. }
  842. void PDMA0_IRQHandler(void)
  843. {
  844. /* enter interrupt */
  845. rt_interrupt_enter();
  846. PDMA_IRQHandler(PDMA0);
  847. /* leave interrupt */
  848. rt_interrupt_leave();
  849. }
  850. void PDMA1_IRQHandler(void)
  851. {
  852. /* enter interrupt */
  853. rt_interrupt_enter();
  854. PDMA_IRQHandler(PDMA1);
  855. /* leave interrupt */
  856. rt_interrupt_leave();
  857. }
  858. static void nu_pdma_memfun_actor_init(void)
  859. {
  860. int i = 0 ;
  861. nu_pdma_init();
  862. for (i = 0; i < NU_PDMA_MEMFUN_ACTOR_MAX; i++)
  863. {
  864. rt_memset(&nu_pdma_memfun_actor_arr[i], 0, sizeof(struct nu_pdma_memfun_actor));
  865. if (-(RT_ERROR) != (nu_pdma_memfun_actor_arr[i].m_i32ChannID = nu_pdma_channel_allocate(PDMA_MEM)))
  866. {
  867. nu_pdma_memfun_actor_arr[i].m_psSemMemFun = rt_sem_create("memactor_sem", 0, RT_IPC_FLAG_FIFO);
  868. RT_ASSERT(nu_pdma_memfun_actor_arr[i].m_psSemMemFun != RT_NULL);
  869. }
  870. else
  871. break;
  872. }
  873. if (i)
  874. {
  875. nu_pdma_memfun_actor_maxnum = i;
  876. nu_pdma_memfun_actor_mask = ~(((1 << i) - 1));
  877. nu_pdma_memfun_actor_pool_sem = rt_sem_create("mempool_sem", nu_pdma_memfun_actor_maxnum, RT_IPC_FLAG_FIFO);
  878. RT_ASSERT(nu_pdma_memfun_actor_pool_sem != RT_NULL);
  879. nu_pdma_memfun_actor_pool_lock = rt_mutex_create("mempool_lock", RT_IPC_FLAG_PRIO);
  880. RT_ASSERT(nu_pdma_memfun_actor_pool_lock != RT_NULL);
  881. }
  882. }
  883. static void nu_pdma_memfun_cb(void *pvUserData, uint32_t u32Events)
  884. {
  885. rt_err_t result = RT_EOK;
  886. nu_pdma_memfun_actor_t psMemFunActor = (nu_pdma_memfun_actor_t)pvUserData;
  887. psMemFunActor->m_u32Result = u32Events;
  888. result = rt_sem_release(psMemFunActor->m_psSemMemFun);
  889. RT_ASSERT(result == RT_EOK);
  890. }
  891. static int nu_pdma_memfun_employ(void)
  892. {
  893. int idx = -1 ;
  894. rt_err_t result = RT_EOK;
  895. /* Headhunter */
  896. if (nu_pdma_memfun_actor_pool_sem &&
  897. ((result = rt_sem_take(nu_pdma_memfun_actor_pool_sem, RT_WAITING_FOREVER)) == RT_EOK))
  898. {
  899. RT_ASSERT(result == RT_EOK);
  900. result = rt_mutex_take(nu_pdma_memfun_actor_pool_lock, RT_WAITING_FOREVER);
  901. RT_ASSERT(result == RT_EOK);
  902. /* Find the position of first '0' in nu_pdma_memfun_actor_mask. */
  903. idx = nu_cto(nu_pdma_memfun_actor_mask);
  904. if (idx != 32)
  905. {
  906. nu_pdma_memfun_actor_mask |= (1 << idx);
  907. }
  908. else
  909. {
  910. idx = -1;
  911. }
  912. result = rt_mutex_release(nu_pdma_memfun_actor_pool_lock);
  913. RT_ASSERT(result == RT_EOK);
  914. }
  915. return idx;
  916. }
  917. static rt_ssize_t nu_pdma_memfun(void *dest, void *src, uint32_t u32DataWidth, unsigned int u32TransferCnt, nu_pdma_memctrl_t eMemCtl)
  918. {
  919. nu_pdma_memfun_actor_t psMemFunActor = NULL;
  920. struct nu_pdma_chn_cb sChnCB;
  921. rt_err_t result = -RT_ERROR;
  922. int idx;
  923. rt_size_t ret = 0;
  924. /* Employ actor */
  925. while ((idx = nu_pdma_memfun_employ()) < 0);
  926. psMemFunActor = &nu_pdma_memfun_actor_arr[idx];
  927. /* Set PDMA memory control to eMemCtl. */
  928. nu_pdma_channel_memctrl_set(psMemFunActor->m_i32ChannID, eMemCtl);
  929. /* Register ISR callback function */
  930. sChnCB.m_eCBType = eCBType_Event;
  931. sChnCB.m_pfnCBHandler = nu_pdma_memfun_cb;
  932. sChnCB.m_pvUserData = (void *)psMemFunActor;
  933. nu_pdma_filtering_set(psMemFunActor->m_i32ChannID, NU_PDMA_EVENT_ABORT | NU_PDMA_EVENT_TRANSFER_DONE);
  934. nu_pdma_callback_register(psMemFunActor->m_i32ChannID, &sChnCB);
  935. psMemFunActor->m_u32Result = 0;
  936. /* Trigger it */
  937. nu_pdma_transfer(psMemFunActor->m_i32ChannID,
  938. u32DataWidth,
  939. (uint32_t)src,
  940. (uint32_t)dest,
  941. u32TransferCnt,
  942. 0);
  943. /* Wait it done. */
  944. result = rt_sem_take(psMemFunActor->m_psSemMemFun, RT_WAITING_FOREVER);
  945. RT_ASSERT(result == RT_EOK);
  946. /* Give result if get NU_PDMA_EVENT_TRANSFER_DONE.*/
  947. if (psMemFunActor->m_u32Result & NU_PDMA_EVENT_TRANSFER_DONE)
  948. {
  949. ret += u32TransferCnt;
  950. }
  951. else
  952. {
  953. ret += (u32TransferCnt - nu_pdma_non_transfer_count_get(psMemFunActor->m_i32ChannID));
  954. }
  955. /* Terminate it if get ABORT event */
  956. if (psMemFunActor->m_u32Result & NU_PDMA_EVENT_ABORT)
  957. {
  958. nu_pdma_channel_terminate(psMemFunActor->m_i32ChannID);
  959. }
  960. result = rt_mutex_take(nu_pdma_memfun_actor_pool_lock, RT_WAITING_FOREVER);
  961. RT_ASSERT(result == RT_EOK);
  962. nu_pdma_memfun_actor_mask &= ~(1 << idx);
  963. result = rt_mutex_release(nu_pdma_memfun_actor_pool_lock);
  964. RT_ASSERT(result == RT_EOK);
  965. /* Fire actor */
  966. result = rt_sem_release(nu_pdma_memfun_actor_pool_sem);
  967. RT_ASSERT(result == RT_EOK);
  968. return ret;
  969. }
  970. rt_size_t nu_pdma_mempush(void *dest, void *src, uint32_t data_width, unsigned int transfer_count)
  971. {
  972. if (data_width == 8 || data_width == 16 || data_width == 32)
  973. return nu_pdma_memfun(dest, src, data_width, transfer_count, eMemCtl_SrcInc_DstFix);
  974. return 0;
  975. }
  976. void *nu_pdma_memcpy(void *dest, void *src, unsigned int count)
  977. {
  978. int i = 0;
  979. uint32_t u32Offset = 0;
  980. uint32_t u32Remaining = count;
  981. for (i = 4; (i > 0) && (u32Remaining > 0) ; i >>= 1)
  982. {
  983. uint32_t u32src = (uint32_t)src + u32Offset;
  984. uint32_t u32dest = (uint32_t)dest + u32Offset;
  985. if (((u32src % i) == (u32dest % i)) &&
  986. ((u32src % i) == 0) &&
  987. (RT_ALIGN_DOWN(u32Remaining, i) >= i))
  988. {
  989. uint32_t u32TXCnt = u32Remaining / i;
  990. if (u32TXCnt != nu_pdma_memfun((void *)u32dest, (void *)u32src, i * 8, u32TXCnt, eMemCtl_SrcInc_DstInc))
  991. goto exit_nu_pdma_memcpy;
  992. u32Offset += (u32TXCnt * i);
  993. u32Remaining -= (u32TXCnt * i);
  994. }
  995. }
  996. if (count == u32Offset)
  997. return dest;
  998. exit_nu_pdma_memcpy:
  999. return NULL;
  1000. }
  1001. /**
  1002. * PDMA memfun actor initialization
  1003. */
  1004. int rt_hw_pdma_memfun_init(void)
  1005. {
  1006. nu_pdma_memfun_actor_init();
  1007. return 0;
  1008. }
  1009. INIT_DEVICE_EXPORT(rt_hw_pdma_memfun_init);
  1010. #endif // #if defined(BSP_USING_PDMA)