drv_crypto.c 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822
  1. /**************************************************************************//**
  2. *
  3. * @copyright (C) 2020 Nuvoton Technology Corp. All rights reserved.
  4. *
  5. * SPDX-License-Identifier: Apache-2.0
  6. *
  7. * Change Logs:
  8. * Date Author Notes
  9. * 2020-7-3 YCHuang12 First version
  10. *
  11. ******************************************************************************/
  12. #include <rtconfig.h>
  13. #if ((defined(BSP_USING_CRYPTO) || defined(BSP_USING_TRNG) || defined(BSP_USING_CRC)) && defined(RT_USING_HWCRYPTO))
  14. #include <rtdevice.h>
  15. #include <rtdbg.h>
  16. #include <board.h>
  17. #include "NuMicro.h"
  18. #include <nu_bitutil.h>
  19. #if defined(BSP_USING_TRNG)
  20. #include "drv_trng.h"
  21. #endif
  22. #if defined(BSP_USING_CRC)
  23. #include "drv_crc.h"
  24. #endif
  25. /* Private typedef --------------------------------------------------------------*/
  26. typedef struct
  27. {
  28. uint8_t *pu8SHATempBuf;
  29. uint32_t u32SHATempBufLen;
  30. uint32_t u32DMAMode;
  31. uint32_t u32BlockSize;
  32. } S_SHA_CONTEXT;
  33. /* Private functions ------------------------------------------------------------*/
  34. static rt_err_t nu_hwcrypto_create(struct rt_hwcrypto_ctx *ctx);
  35. static void nu_hwcrypto_destroy(struct rt_hwcrypto_ctx *ctx);
  36. static rt_err_t nu_hwcrypto_clone(struct rt_hwcrypto_ctx *des, const struct rt_hwcrypto_ctx *src);
  37. static void nu_hwcrypto_reset(struct rt_hwcrypto_ctx *ctx);
  38. /* Private variables ------------------------------------------------------------*/
  39. static const struct rt_hwcrypto_ops nu_hwcrypto_ops =
  40. {
  41. .create = nu_hwcrypto_create,
  42. .destroy = nu_hwcrypto_destroy,
  43. .copy = nu_hwcrypto_clone,
  44. .reset = nu_hwcrypto_reset,
  45. };
  46. /* Crypto engine operation ------------------------------------------------------------*/
  47. #if defined(BSP_USING_CRYPTO)
  48. #define NU_HWCRYPTO_AES_NAME "nu_AES"
  49. #define NU_HWCRYPTO_SHA_NAME "nu_SHA"
  50. #if !defined(BSP_USING_TRNG)
  51. #define NU_HWCRYPTO_PRNG_NAME "nu_PRNG"
  52. #endif
  53. static struct rt_mutex s_AES_mutex;
  54. static struct rt_mutex s_SHA_mutex;
  55. #if !defined(BSP_USING_TRNG)
  56. static struct rt_mutex s_PRNG_mutex;
  57. static volatile int s_PRNG_done;
  58. #endif
  59. static volatile int s_AES_done;
  60. static volatile int s_SHA_done;
  61. static rt_err_t nu_crypto_init(void)
  62. {
  63. /* Enable Crypto engine interrupt */
  64. NVIC_EnableIRQ(CRPT_IRQn);
  65. AES_ENABLE_INT(CRPT);
  66. SHA_ENABLE_INT(CRPT);
  67. //init cipher mutex
  68. rt_mutex_init(&s_AES_mutex, NU_HWCRYPTO_AES_NAME, RT_IPC_FLAG_PRIO);
  69. rt_mutex_init(&s_SHA_mutex, NU_HWCRYPTO_SHA_NAME, RT_IPC_FLAG_PRIO);
  70. #if !defined(BSP_USING_TRNG)
  71. PRNG_ENABLE_INT(CRPT);
  72. rt_mutex_init(&s_PRNG_mutex, NU_HWCRYPTO_PRNG_NAME, RT_IPC_FLAG_PRIO);
  73. #endif
  74. return RT_EOK;
  75. }
  76. //Crypto engine IRQ handler
  77. void CRPT_IRQHandler()
  78. {
  79. if (AES_GET_INT_FLAG(CRPT))
  80. {
  81. if (CRPT->INTSTS & (CRPT_INTSTS_AESEIF_Msk) || (CRPT->AES_STS & (CRPT_AES_STS_BUSERR_Msk | CRPT_AES_STS_CNTERR_Msk | (0x1ul << 21))))
  82. rt_kprintf("AES ERROR\n");
  83. s_AES_done = 1;
  84. AES_CLR_INT_FLAG(CRPT);
  85. }
  86. if (SHA_GET_INT_FLAG(CRPT))
  87. {
  88. if (CRPT->INTSTS & (CRPT_INTSTS_HMACEIF_Msk) || (CRPT->HMAC_STS & (CRPT_HMAC_STS_DMAERR_Msk | (0x1ul << 9))))
  89. rt_kprintf("SHA ERROR\n");
  90. s_SHA_done = 1;
  91. SHA_CLR_INT_FLAG(CRPT);
  92. }
  93. #if !defined(BSP_USING_TRNG)
  94. if (PRNG_GET_INT_FLAG(CRPT))
  95. {
  96. s_PRNG_done = 1;
  97. PRNG_CLR_INT_FLAG(CRPT);
  98. }
  99. #endif
  100. }
  101. static rt_err_t nu_aes_crypt_run(
  102. rt_bool_t bEncrypt,
  103. uint32_t u32OpMode,
  104. uint8_t *pu8Key,
  105. uint32_t u32KeySize,
  106. uint8_t *pu8IV,
  107. uint8_t *pu8InData,
  108. uint8_t *pu8OutData,
  109. uint32_t u32DataLen
  110. )
  111. {
  112. uint32_t au32SwapKey[8];
  113. uint32_t au32SwapIV[4];
  114. au32SwapKey[0] = nu_get32_be(&pu8Key[0]);
  115. au32SwapKey[1] = nu_get32_be(&pu8Key[4]);
  116. au32SwapKey[2] = nu_get32_be(&pu8Key[8]);
  117. au32SwapKey[3] = nu_get32_be(&pu8Key[12]);
  118. if ((u32KeySize == AES_KEY_SIZE_192) || (u32KeySize == AES_KEY_SIZE_256))
  119. {
  120. au32SwapKey[4] = nu_get32_be(&pu8Key[16]);
  121. au32SwapKey[5] = nu_get32_be(&pu8Key[20]);
  122. }
  123. if (u32KeySize == AES_KEY_SIZE_256)
  124. {
  125. au32SwapKey[6] = nu_get32_be(&pu8Key[24]);
  126. au32SwapKey[7] = nu_get32_be(&pu8Key[28]);
  127. }
  128. au32SwapIV[0] = nu_get32_be(&pu8IV[0]);
  129. au32SwapIV[1] = nu_get32_be(&pu8IV[4]);
  130. au32SwapIV[2] = nu_get32_be(&pu8IV[8]);
  131. au32SwapIV[3] = nu_get32_be(&pu8IV[12]);
  132. rt_mutex_take(&s_AES_mutex, RT_WAITING_FOREVER);
  133. //Using Channel 0
  134. AES_Open(CRPT, 0, bEncrypt, u32OpMode, u32KeySize, AES_IN_OUT_SWAP);
  135. AES_SetKey(CRPT, 0, (uint32_t *)au32SwapKey, u32KeySize);
  136. AES_SetInitVect(CRPT, 0, (uint32_t *)au32SwapIV);
  137. //Setup AES DMA
  138. AES_SetDMATransfer(CRPT, 0, (uint32_t)pu8InData, (uint32_t)pu8OutData, u32DataLen);
  139. AES_CLR_INT_FLAG(CRPT);
  140. //Start AES encryption/decryption
  141. s_AES_done = 0;
  142. AES_Start(CRPT, 0, CRYPTO_DMA_ONE_SHOT);
  143. while (!s_AES_done) {};
  144. rt_mutex_release(&s_AES_mutex);
  145. return RT_EOK;
  146. }
  147. #if !defined(BSP_USING_TRNG)
  148. //Using PRNG instead of TRNG
  149. static void nu_prng_open(uint32_t u32Seed)
  150. {
  151. rt_mutex_take(&s_PRNG_mutex, RT_WAITING_FOREVER);
  152. //Open PRNG 128 bits. But always return 32 bits
  153. PRNG_Open(CRPT, PRNG_KEY_SIZE_128, PRNG_SEED_RELOAD, u32Seed);
  154. rt_mutex_release(&s_PRNG_mutex);
  155. }
  156. static rt_uint32_t nu_prng_run(void)
  157. {
  158. uint32_t au32RNGValue[2];
  159. rt_mutex_take(&s_PRNG_mutex, RT_WAITING_FOREVER);
  160. s_PRNG_done = 0;
  161. PRNG_Start(CRPT);
  162. while (!s_PRNG_done) {};
  163. PRNG_Read(CRPT, au32RNGValue);
  164. rt_mutex_release(&s_PRNG_mutex);
  165. return au32RNGValue[0];
  166. }
  167. #endif
  168. static rt_err_t nu_aes_crypt(struct hwcrypto_symmetric *symmetric_ctx, struct hwcrypto_symmetric_info *symmetric_info)
  169. {
  170. uint32_t u32AESOpMode;
  171. uint32_t u32AESKeySize;
  172. unsigned char *in, *out;
  173. unsigned char in_align_flag = 0;
  174. unsigned char out_align_flag = 0;
  175. unsigned char iv_temp[16];
  176. if ((symmetric_info->length % 4) != 0)
  177. {
  178. return -RT_EINVAL;
  179. }
  180. //Checking key length
  181. if (symmetric_ctx->key_bitlen == 128)
  182. {
  183. u32AESKeySize = AES_KEY_SIZE_128;
  184. }
  185. else if (symmetric_ctx->key_bitlen == 192)
  186. {
  187. u32AESKeySize = AES_KEY_SIZE_192;
  188. }
  189. else if (symmetric_ctx->key_bitlen == 256)
  190. {
  191. u32AESKeySize = AES_KEY_SIZE_256;
  192. }
  193. else
  194. {
  195. return -RT_EINVAL;
  196. }
  197. //Select AES operation mode
  198. switch (symmetric_ctx->parent.type & (HWCRYPTO_MAIN_TYPE_MASK | HWCRYPTO_SUB_TYPE_MASK))
  199. {
  200. case HWCRYPTO_TYPE_AES_ECB:
  201. u32AESOpMode = AES_MODE_ECB;
  202. break;
  203. case HWCRYPTO_TYPE_AES_CBC:
  204. u32AESOpMode = AES_MODE_CBC;
  205. break;
  206. case HWCRYPTO_TYPE_AES_CFB:
  207. u32AESOpMode = AES_MODE_CFB;
  208. break;
  209. case HWCRYPTO_TYPE_AES_OFB:
  210. u32AESOpMode = AES_MODE_OFB;
  211. break;
  212. case HWCRYPTO_TYPE_AES_CTR:
  213. u32AESOpMode = AES_MODE_CTR;
  214. break;
  215. default :
  216. return -RT_ERROR;
  217. }
  218. in = (unsigned char *)symmetric_info->in;
  219. out = (unsigned char *)symmetric_info->out;
  220. //Checking in/out data buffer address not alignment or out of SRAM
  221. if (((rt_uint32_t)in % 4) != 0 || ((rt_uint32_t)in < SRAM_BASE) || ((rt_uint32_t)in > SRAM_END))
  222. {
  223. in = rt_malloc(symmetric_info->length);
  224. if (in == RT_NULL)
  225. {
  226. LOG_E("fun[%s] memory allocate %d bytes failed!", __FUNCTION__, symmetric_info->length);
  227. return -RT_ENOMEM;
  228. }
  229. rt_memcpy(in, symmetric_info->in, symmetric_info->length);
  230. in_align_flag = 1;
  231. }
  232. if (((rt_uint32_t)out % 4) != 0 || ((rt_uint32_t)out < SRAM_BASE) || ((rt_uint32_t)out > SRAM_END))
  233. {
  234. out = rt_malloc(symmetric_info->length);
  235. if (out == RT_NULL)
  236. {
  237. if (in_align_flag)
  238. rt_free(in);
  239. LOG_E("fun[%s] memory allocate %d bytes failed!", __FUNCTION__, symmetric_info->length);
  240. return -RT_ENOMEM;
  241. }
  242. out_align_flag = 1;
  243. }
  244. if ((u32AESOpMode == AES_MODE_CBC) && (symmetric_info->mode == HWCRYPTO_MODE_DECRYPT))
  245. {
  246. uint32_t loop;
  247. loop = (symmetric_info->length - 1) / 16;
  248. rt_memcpy(iv_temp, in + (loop * 16), 16);
  249. }
  250. nu_aes_crypt_run(symmetric_info->mode == HWCRYPTO_MODE_ENCRYPT ? TRUE : FALSE, u32AESOpMode, symmetric_ctx->key, u32AESKeySize, symmetric_ctx->iv, in, out, symmetric_info->length);
  251. if (u32AESOpMode == AES_MODE_CBC)
  252. {
  253. if (symmetric_info->mode == HWCRYPTO_MODE_DECRYPT)
  254. {
  255. rt_memcpy(symmetric_ctx->iv, iv_temp, 16);
  256. }
  257. else
  258. {
  259. uint32_t loop;
  260. loop = (symmetric_info->length - 1) / 16;
  261. rt_memcpy(symmetric_ctx->iv, out + (loop * 16), 16);
  262. }
  263. }
  264. if (out_align_flag)
  265. {
  266. rt_memcpy(symmetric_info->out, out, symmetric_info->length);
  267. rt_free(out);
  268. }
  269. if (in_align_flag)
  270. {
  271. rt_free(in);
  272. }
  273. return RT_EOK;
  274. }
  275. static void SHABlockUpdate(uint32_t u32OpMode, uint32_t u32SrcAddr, uint32_t u32Len, uint32_t u32Mode)
  276. {
  277. SHA_Open(CRPT, u32OpMode, SHA_IN_OUT_SWAP, 0);
  278. //Setup SHA DMA
  279. SHA_SetDMATransfer(CRPT, u32SrcAddr, u32Len);
  280. SHA_CLR_INT_FLAG(CRPT);
  281. //Start SHA
  282. s_SHA_done = 0;
  283. if (u32Mode == CRYPTO_DMA_FIRST)
  284. CRPT->HMAC_CTL |= CRPT_HMAC_CTL_DMAFIRST_Msk;
  285. else
  286. CRPT->HMAC_CTL &= ~CRPT_HMAC_CTL_DMAFIRST_Msk;
  287. SHA_Start(CRPT, u32Mode);
  288. while (!s_SHA_done) {};
  289. }
  290. static rt_err_t nu_sha_hash_run(
  291. S_SHA_CONTEXT *psSHACtx,
  292. uint32_t u32OpMode,
  293. uint8_t *pu8InData,
  294. uint32_t u32DataLen
  295. )
  296. {
  297. rt_mutex_take(&s_SHA_mutex, RT_WAITING_FOREVER);
  298. uint8_t *pu8SrcAddr = (uint8_t *)pu8InData;
  299. uint32_t u32CopyLen = 0;
  300. while ((psSHACtx->u32SHATempBufLen + u32DataLen) >= psSHACtx->u32BlockSize)
  301. {
  302. if (psSHACtx->pu8SHATempBuf)
  303. {
  304. if (psSHACtx->u32SHATempBufLen == psSHACtx->u32BlockSize)
  305. {
  306. //Trigger SHA block update
  307. SHABlockUpdate(u32OpMode, (uint32_t)psSHACtx->pu8SHATempBuf, psSHACtx->u32BlockSize, psSHACtx->u32DMAMode);
  308. psSHACtx->u32DMAMode = CRYPTO_DMA_CONTINUE;
  309. //free SHATempBuff
  310. rt_free(psSHACtx->pu8SHATempBuf);
  311. psSHACtx->pu8SHATempBuf = NULL;
  312. psSHACtx->u32SHATempBufLen = 0;
  313. continue;
  314. }
  315. else
  316. {
  317. u32CopyLen = psSHACtx->u32BlockSize - psSHACtx->u32SHATempBufLen;
  318. if (u32DataLen < u32CopyLen)
  319. u32CopyLen = u32DataLen;
  320. rt_memcpy(psSHACtx->pu8SHATempBuf + psSHACtx->u32SHATempBufLen, pu8SrcAddr, u32CopyLen);
  321. psSHACtx->u32SHATempBufLen += u32CopyLen;
  322. pu8SrcAddr += u32CopyLen;
  323. u32DataLen -= u32CopyLen;
  324. continue;
  325. }
  326. }
  327. if ((uint32_t) pu8SrcAddr & 3) //address not aligned 4
  328. {
  329. psSHACtx->pu8SHATempBuf = rt_malloc(psSHACtx->u32BlockSize);
  330. if (psSHACtx->pu8SHATempBuf == RT_NULL)
  331. {
  332. LOG_E("fun[%s] memory allocate %d bytes failed!", __FUNCTION__, psSHACtx->u32BlockSize);
  333. rt_mutex_release(&s_SHA_mutex);
  334. return -RT_ENOMEM;
  335. }
  336. rt_memcpy(psSHACtx->pu8SHATempBuf, pu8SrcAddr, psSHACtx->u32BlockSize);
  337. psSHACtx->u32SHATempBufLen = psSHACtx->u32BlockSize;
  338. pu8SrcAddr += psSHACtx->u32BlockSize;
  339. u32DataLen -= psSHACtx->u32BlockSize;
  340. continue;
  341. }
  342. //Trigger SHA block update
  343. SHABlockUpdate(u32OpMode, (uint32_t)pu8SrcAddr, psSHACtx->u32BlockSize, psSHACtx->u32DMAMode);
  344. psSHACtx->u32DMAMode = CRYPTO_DMA_CONTINUE;
  345. pu8SrcAddr += psSHACtx->u32BlockSize;
  346. u32DataLen -= psSHACtx->u32BlockSize;
  347. }
  348. if (u32DataLen)
  349. {
  350. if (psSHACtx->pu8SHATempBuf == NULL)
  351. {
  352. psSHACtx->pu8SHATempBuf = rt_malloc(psSHACtx->u32BlockSize);
  353. if (psSHACtx->pu8SHATempBuf == RT_NULL)
  354. {
  355. LOG_E("fun[%s] memory allocate %d bytes failed!", __FUNCTION__, psSHACtx->u32BlockSize);
  356. rt_mutex_release(&s_SHA_mutex);
  357. return -RT_ENOMEM;
  358. }
  359. psSHACtx->u32SHATempBufLen = 0;
  360. }
  361. rt_memcpy(psSHACtx->pu8SHATempBuf, pu8SrcAddr, u32DataLen);
  362. psSHACtx->u32SHATempBufLen += u32DataLen;
  363. }
  364. rt_mutex_release(&s_SHA_mutex);
  365. return RT_EOK;
  366. }
  367. static rt_err_t nu_sha_update(struct hwcrypto_hash *hash_ctx, const rt_uint8_t *in, rt_size_t length)
  368. {
  369. uint32_t u32SHAOpMode;
  370. unsigned char *nu_in;
  371. unsigned char in_align_flag = 0;
  372. //Select SHA operation mode
  373. switch (hash_ctx->parent.type & (HWCRYPTO_MAIN_TYPE_MASK | HWCRYPTO_SUB_TYPE_MASK))
  374. {
  375. case HWCRYPTO_TYPE_SHA1:
  376. u32SHAOpMode = SHA_MODE_SHA1;
  377. break;
  378. case HWCRYPTO_TYPE_SHA224:
  379. u32SHAOpMode = SHA_MODE_SHA224;
  380. break;
  381. case HWCRYPTO_TYPE_SHA256:
  382. u32SHAOpMode = SHA_MODE_SHA256;
  383. break;
  384. case HWCRYPTO_TYPE_SHA384:
  385. u32SHAOpMode = SHA_MODE_SHA384;
  386. break;
  387. case HWCRYPTO_TYPE_SHA512:
  388. u32SHAOpMode = SHA_MODE_SHA512;
  389. break;
  390. default :
  391. return -RT_ERROR;
  392. }
  393. nu_in = (unsigned char *)in;
  394. //Checking in data buffer address not alignment or out of SRAM
  395. if (((rt_uint32_t)nu_in % 4) != 0 || ((rt_uint32_t)nu_in < SRAM_BASE) || ((rt_uint32_t)nu_in > SRAM_END))
  396. {
  397. nu_in = rt_malloc(length);
  398. if (nu_in == RT_NULL)
  399. {
  400. LOG_E("fun[%s] memory allocate %d bytes failed!", __FUNCTION__, length);
  401. return -RT_ENOMEM;
  402. }
  403. rt_memcpy(nu_in, in, length);
  404. in_align_flag = 1;
  405. }
  406. nu_sha_hash_run(hash_ctx->parent.contex, u32SHAOpMode, nu_in, length);
  407. if (in_align_flag)
  408. {
  409. rt_free(nu_in);
  410. }
  411. return RT_EOK;
  412. }
  413. static rt_err_t nu_sha_finish(struct hwcrypto_hash *hash_ctx, rt_uint8_t *out, rt_size_t length)
  414. {
  415. unsigned char *nu_out;
  416. unsigned char out_align_flag = 0;
  417. uint32_t u32SHAOpMode;
  418. S_SHA_CONTEXT *psSHACtx = hash_ctx->parent.contex;
  419. //Check SHA Hash value buffer length
  420. switch (hash_ctx->parent.type & (HWCRYPTO_MAIN_TYPE_MASK | HWCRYPTO_SUB_TYPE_MASK))
  421. {
  422. case HWCRYPTO_TYPE_SHA1:
  423. u32SHAOpMode = SHA_MODE_SHA1;
  424. if (length < 5UL)
  425. {
  426. return -RT_EINVAL;
  427. }
  428. break;
  429. case HWCRYPTO_TYPE_SHA224:
  430. u32SHAOpMode = SHA_MODE_SHA224;
  431. if (length < 7UL)
  432. {
  433. return -RT_EINVAL;
  434. }
  435. break;
  436. case HWCRYPTO_TYPE_SHA256:
  437. u32SHAOpMode = SHA_MODE_SHA256;
  438. if (length < 8UL)
  439. {
  440. return -RT_EINVAL;
  441. }
  442. break;
  443. case HWCRYPTO_TYPE_SHA384:
  444. u32SHAOpMode = SHA_MODE_SHA384;
  445. if (length < 12UL)
  446. {
  447. return -RT_EINVAL;
  448. }
  449. break;
  450. case HWCRYPTO_TYPE_SHA512:
  451. u32SHAOpMode = SHA_MODE_SHA512;
  452. if (length < 16UL)
  453. {
  454. return -RT_EINVAL;
  455. }
  456. break;
  457. default :
  458. return -RT_ERROR;
  459. }
  460. nu_out = (unsigned char *)out;
  461. //Checking out data buffer address alignment or not
  462. if (((rt_uint32_t)nu_out % 4) != 0)
  463. {
  464. nu_out = rt_malloc(length);
  465. if (nu_out == RT_NULL)
  466. {
  467. LOG_E("fun[%s] memory allocate %d bytes failed!", __FUNCTION__, length);
  468. return -RT_ENOMEM;
  469. }
  470. out_align_flag = 1;
  471. }
  472. if (psSHACtx->pu8SHATempBuf)
  473. {
  474. if (psSHACtx->u32DMAMode == CRYPTO_DMA_FIRST)
  475. SHABlockUpdate(u32SHAOpMode, (uint32_t)psSHACtx->pu8SHATempBuf, psSHACtx->u32SHATempBufLen, CRYPTO_DMA_ONE_SHOT);
  476. else
  477. SHABlockUpdate(u32SHAOpMode, (uint32_t)psSHACtx->pu8SHATempBuf, psSHACtx->u32SHATempBufLen, CRYPTO_DMA_LAST);
  478. //free SHATempBuf
  479. rt_free(psSHACtx->pu8SHATempBuf);
  480. psSHACtx->pu8SHATempBuf = RT_NULL;
  481. psSHACtx->u32SHATempBufLen = 0;
  482. }
  483. else
  484. {
  485. SHABlockUpdate(u32SHAOpMode, (uint32_t)NULL, 0, CRYPTO_DMA_LAST);
  486. }
  487. SHA_Read(CRPT, (uint32_t *)nu_out);
  488. if (out_align_flag)
  489. {
  490. rt_memcpy(out, nu_out, length);
  491. rt_free(nu_out);
  492. }
  493. return RT_EOK;
  494. }
  495. #if !defined(BSP_USING_TRNG)
  496. static rt_uint32_t nu_prng_rand(struct hwcrypto_rng *ctx)
  497. {
  498. return nu_prng_run();
  499. }
  500. #endif
  501. static const struct hwcrypto_symmetric_ops nu_aes_ops =
  502. {
  503. .crypt = nu_aes_crypt,
  504. };
  505. static const struct hwcrypto_hash_ops nu_sha_ops =
  506. {
  507. .update = nu_sha_update,
  508. .finish = nu_sha_finish,
  509. };
  510. #endif
  511. /* CRC operation ------------------------------------------------------------*/
  512. #if defined(BSP_USING_CRC)
  513. static const struct hwcrypto_crc_ops nu_crc_ops =
  514. {
  515. .update = nu_crc_update,
  516. };
  517. #endif
  518. /* TRNG operation ------------------------------------------------------------*/
  519. #if defined(BSP_USING_TRNG)
  520. static const struct hwcrypto_rng_ops nu_rng_ops =
  521. {
  522. .update = nu_trng_rand,
  523. };
  524. #elif defined(BSP_USING_CRYPTO)
  525. static const struct hwcrypto_rng_ops nu_rng_ops =
  526. {
  527. .update = nu_prng_rand,
  528. };
  529. #endif
  530. /* Register crypto interface ----------------------------------------------------------*/
  531. static rt_err_t nu_hwcrypto_create(struct rt_hwcrypto_ctx *ctx)
  532. {
  533. rt_err_t res = RT_EOK;
  534. switch (ctx->type & HWCRYPTO_MAIN_TYPE_MASK)
  535. {
  536. #if defined(BSP_USING_TRNG)
  537. case HWCRYPTO_TYPE_RNG:
  538. {
  539. ctx->contex = RT_NULL;
  540. //Setup RNG operation
  541. ((struct hwcrypto_rng *)ctx)->ops = &nu_rng_ops;
  542. break;
  543. }
  544. #endif /* BSP_USING_TRNG */
  545. #if defined(BSP_USING_CRC)
  546. case HWCRYPTO_TYPE_CRC:
  547. {
  548. ctx->contex = RT_NULL;
  549. //Setup CRC operation
  550. ((struct hwcrypto_crc *)ctx)->ops = &nu_crc_ops;
  551. break;
  552. }
  553. #endif /* BSP_USING_CRC */
  554. #if defined(BSP_USING_CRYPTO)
  555. case HWCRYPTO_TYPE_AES:
  556. {
  557. ctx->contex = RT_NULL;
  558. //Setup AES operation
  559. ((struct hwcrypto_symmetric *)ctx)->ops = &nu_aes_ops;
  560. break;
  561. }
  562. case HWCRYPTO_TYPE_SHA1:
  563. {
  564. ctx->contex = rt_malloc(sizeof(S_SHA_CONTEXT));
  565. if (ctx->contex == RT_NULL)
  566. return -RT_ERROR;
  567. rt_memset(ctx->contex, 0, sizeof(S_SHA_CONTEXT));
  568. //Setup SHA1 operation
  569. ((struct hwcrypto_hash *)ctx)->ops = &nu_sha_ops;
  570. break;
  571. }
  572. case HWCRYPTO_TYPE_SHA2:
  573. {
  574. ctx->contex = rt_malloc(sizeof(S_SHA_CONTEXT));
  575. if (ctx->contex == RT_NULL)
  576. return -RT_ERROR;
  577. rt_memset(ctx->contex, 0, sizeof(S_SHA_CONTEXT));
  578. //Setup SHA2 operation
  579. ((struct hwcrypto_hash *)ctx)->ops = &nu_sha_ops;
  580. break;
  581. }
  582. #if !defined(BSP_USING_TRNG)
  583. case HWCRYPTO_TYPE_RNG:
  584. {
  585. ctx->contex = RT_NULL;
  586. ((struct hwcrypto_rng *)ctx)->ops = &nu_rng_ops;
  587. #if defined(NU_PRNG_USE_SEED)
  588. nu_prng_open(NU_PRNG_SEED_VALUE);
  589. #else
  590. nu_prng_open(rt_tick_get());
  591. #endif
  592. break;
  593. }
  594. #endif /* !BSP_USING_TRNG */
  595. #endif /* BSP_USING_CRYPTO */
  596. default:
  597. res = -RT_ERROR;
  598. break;
  599. }
  600. return res;
  601. }
  602. static void nu_hwcrypto_destroy(struct rt_hwcrypto_ctx *ctx)
  603. {
  604. if (ctx->contex)
  605. rt_free(ctx->contex);
  606. }
  607. static rt_err_t nu_hwcrypto_clone(struct rt_hwcrypto_ctx *des, const struct rt_hwcrypto_ctx *src)
  608. {
  609. rt_err_t res = RT_EOK;
  610. if (des->contex && src->contex)
  611. {
  612. rt_memcpy(des->contex, src->contex, sizeof(struct rt_hwcrypto_ctx));
  613. }
  614. else
  615. return -RT_EINVAL;
  616. return res;
  617. }
  618. static void nu_hwcrypto_reset(struct rt_hwcrypto_ctx *ctx)
  619. {
  620. switch (ctx->type & HWCRYPTO_MAIN_TYPE_MASK)
  621. {
  622. #if !defined(BSP_USING_TRNG)
  623. case HWCRYPTO_TYPE_RNG:
  624. {
  625. #if defined(NU_PRNG_USE_SEED)
  626. nu_prng_open(NU_PRNG_SEED_VALUE);
  627. #else
  628. nu_prng_open(rt_tick_get());
  629. #endif
  630. break;
  631. }
  632. #endif /* !BSP_USING_TRNG */
  633. #if defined(BSP_USING_CRYPTO)
  634. case HWCRYPTO_TYPE_SHA1:
  635. case HWCRYPTO_TYPE_SHA2:
  636. {
  637. S_SHA_CONTEXT *psSHACtx = (S_SHA_CONTEXT *)ctx->contex;
  638. if (psSHACtx->pu8SHATempBuf)
  639. {
  640. rt_free(psSHACtx->pu8SHATempBuf);
  641. }
  642. psSHACtx->pu8SHATempBuf = RT_NULL;
  643. psSHACtx->u32SHATempBufLen = 0;
  644. psSHACtx->u32DMAMode = CRYPTO_DMA_FIRST;
  645. if ((ctx->type == HWCRYPTO_TYPE_SHA384) || (ctx->type == HWCRYPTO_TYPE_SHA512))
  646. {
  647. psSHACtx->u32BlockSize = 128;
  648. }
  649. else
  650. {
  651. psSHACtx->u32BlockSize = 64;
  652. }
  653. break;
  654. }
  655. #endif
  656. default:
  657. break;
  658. }
  659. }
  660. /* Init and register nu_hwcrypto_dev */
  661. int nu_hwcrypto_device_init(void)
  662. {
  663. static struct rt_hwcrypto_device nu_hwcrypto_dev;
  664. nu_hwcrypto_dev.ops = &nu_hwcrypto_ops;
  665. nu_hwcrypto_dev.id = 0;
  666. nu_hwcrypto_dev.user_data = &nu_hwcrypto_dev;
  667. #if defined(BSP_USING_CRYPTO)
  668. nu_crypto_init();
  669. #endif
  670. #if defined(BSP_USING_CRC)
  671. nu_crc_init();
  672. #endif
  673. #if defined(BSP_USING_TRNG)
  674. nu_trng_init();
  675. #endif
  676. // register hwcrypto operation
  677. if (rt_hwcrypto_register(&nu_hwcrypto_dev, RT_HWCRYPTO_DEFAULT_NAME) != RT_EOK)
  678. {
  679. return -1;
  680. }
  681. return 0;
  682. }
  683. INIT_DEVICE_EXPORT(nu_hwcrypto_device_init);
  684. #endif //#if ((defined(BSP_USING_CRYPTO) || defined(BSP_USING_TRNG) || defined(BSP_USING_CRC)) && defined(RT_USING_HWCRYPTO))