drv_crypto.c 27 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027
  1. /**************************************************************************//**
  2. *
  3. * @copyright (C) 2020 Nuvoton Technology Corp. All rights reserved.
  4. *
  5. * SPDX-License-Identifier: Apache-2.0
  6. *
  7. * Change Logs:
  8. * Date Author Notes
  9. * 2020-3-3 CHChen First version
  10. * 2020-5-3 YCHuang12 Add TDES and SHA
  11. *
  12. ******************************************************************************/
  13. #include <rtconfig.h>
  14. #if ((defined(BSP_USING_CRYPTO) || defined(BSP_USING_TRNG) || defined(BSP_USING_CRC)) && defined(RT_USING_HWCRYPTO))
  15. #include <rtdevice.h>
  16. #include <rtdbg.h>
  17. #include <board.h>
  18. #include "NuMicro.h"
  19. #include <nu_bitutil.h>
  20. #if defined(BSP_USING_TRNG)
  21. #include "drv_trng.h"
  22. #endif
  23. #if defined(BSP_USING_CRC)
  24. #include "drv_crc.h"
  25. #endif
  26. /* Private typedef --------------------------------------------------------------*/
  27. typedef struct
  28. {
  29. uint8_t *pu8SHATempBuf;
  30. uint32_t u32SHATempBufLen;
  31. uint32_t u32DMAMode;
  32. uint32_t u32BlockSize;
  33. } S_SHA_CONTEXT;
  34. /* Private functions ------------------------------------------------------------*/
  35. static rt_err_t nu_hwcrypto_create(struct rt_hwcrypto_ctx *ctx);
  36. static void nu_hwcrypto_destroy(struct rt_hwcrypto_ctx *ctx);
  37. static rt_err_t nu_hwcrypto_clone(struct rt_hwcrypto_ctx *des, const struct rt_hwcrypto_ctx *src);
  38. static void nu_hwcrypto_reset(struct rt_hwcrypto_ctx *ctx);
  39. /* Private variables ------------------------------------------------------------*/
  40. static const struct rt_hwcrypto_ops nu_hwcrypto_ops =
  41. {
  42. .create = nu_hwcrypto_create,
  43. .destroy = nu_hwcrypto_destroy,
  44. .copy = nu_hwcrypto_clone,
  45. .reset = nu_hwcrypto_reset,
  46. };
  47. /* Crypto engine operation ------------------------------------------------------------*/
  48. #if defined(BSP_USING_CRYPTO)
  49. #define NU_HWCRYPTO_DES_3KEYS 1
  50. #define NU_HWCRYPTO_DES_NO3KEYS 0
  51. #define NU_HWCRYPTO_AES_NAME "nu_AES"
  52. #define NU_HWCRYPTO_TDES_NAME "nu_TDES"
  53. #define NU_HWCRYPTO_SHA_NAME "nu_SHA"
  54. #if !defined(BSP_USING_TRNG)
  55. #define NU_HWCRYPTO_PRNG_NAME "nu_PRNG"
  56. #endif
  57. static struct rt_mutex s_AES_mutex;
  58. static struct rt_mutex s_TDES_mutex;
  59. static struct rt_mutex s_SHA_mutex;
  60. #if !defined(BSP_USING_TRNG)
  61. static struct rt_mutex s_PRNG_mutex;
  62. #endif
  63. static rt_err_t nu_crypto_init(void)
  64. {
  65. rt_err_t result = RT_EOK;
  66. /* init cipher mutex */
  67. #if defined(RT_HWCRYPTO_USING_AES)
  68. result = rt_mutex_init(&s_AES_mutex, NU_HWCRYPTO_AES_NAME, RT_IPC_FLAG_PRIO);
  69. RT_ASSERT(result == RT_EOK);
  70. AES_ENABLE_INT(CRPT);
  71. #endif
  72. #if defined(RT_HWCRYPTO_USING_3DES)
  73. result = rt_mutex_init(&s_TDES_mutex, NU_HWCRYPTO_TDES_NAME, RT_IPC_FLAG_PRIO);
  74. RT_ASSERT(result == RT_EOK);
  75. TDES_ENABLE_INT(CRPT);
  76. #endif
  77. #if defined(RT_HWCRYPTO_USING_SHA1) || defined(RT_HWCRYPTO_USING_SHA2)
  78. result = rt_mutex_init(&s_SHA_mutex, NU_HWCRYPTO_SHA_NAME, RT_IPC_FLAG_PRIO);
  79. RT_ASSERT(result == RT_EOK);
  80. SHA_ENABLE_INT(CRPT);
  81. #endif
  82. #if defined(RT_HWCRYPTO_USING_RNG) && !defined(BSP_USING_TRNG)
  83. result = rt_mutex_init(&s_PRNG_mutex, NU_HWCRYPTO_PRNG_NAME, RT_IPC_FLAG_PRIO);
  84. RT_ASSERT(result == RT_EOK);
  85. PRNG_ENABLE_INT(CRPT);
  86. #endif
  87. return result;
  88. }
  89. static rt_err_t nu_aes_crypt_run(
  90. rt_bool_t bEncrypt,
  91. uint32_t u32OpMode,
  92. uint8_t *pu8Key,
  93. uint32_t u32KeySize,
  94. uint8_t *pu8IV,
  95. uint8_t *pu8InData,
  96. uint8_t *pu8OutData,
  97. uint32_t u32DataLen
  98. )
  99. {
  100. uint32_t au32SwapKey[8];
  101. uint32_t au32SwapIV[4];
  102. rt_err_t result;
  103. au32SwapKey[0] = nu_get32_be(&pu8Key[0]);
  104. au32SwapKey[1] = nu_get32_be(&pu8Key[4]);
  105. au32SwapKey[2] = nu_get32_be(&pu8Key[8]);
  106. au32SwapKey[3] = nu_get32_be(&pu8Key[12]);
  107. if ((u32KeySize == AES_KEY_SIZE_192) || (u32KeySize == AES_KEY_SIZE_256))
  108. {
  109. au32SwapKey[4] = nu_get32_be(&pu8Key[16]);
  110. au32SwapKey[5] = nu_get32_be(&pu8Key[20]);
  111. }
  112. if (u32KeySize == AES_KEY_SIZE_256)
  113. {
  114. au32SwapKey[6] = nu_get32_be(&pu8Key[24]);
  115. au32SwapKey[7] = nu_get32_be(&pu8Key[28]);
  116. }
  117. au32SwapIV[0] = nu_get32_be(&pu8IV[0]);
  118. au32SwapIV[1] = nu_get32_be(&pu8IV[4]);
  119. au32SwapIV[2] = nu_get32_be(&pu8IV[8]);
  120. au32SwapIV[3] = nu_get32_be(&pu8IV[12]);
  121. result = rt_mutex_take(&s_AES_mutex, RT_WAITING_FOREVER);
  122. RT_ASSERT(result == RT_EOK);
  123. //Using Channel 0
  124. AES_Open(CRPT, 0, bEncrypt, u32OpMode, u32KeySize, AES_IN_OUT_SWAP);
  125. AES_SetKey(CRPT, 0, (uint32_t *)&au32SwapKey[0], u32KeySize);
  126. AES_SetInitVect(CRPT, 0, (uint32_t *)au32SwapIV);
  127. //Setup AES DMA
  128. AES_SetDMATransfer(CRPT, 0, (uint32_t)pu8InData, (uint32_t)pu8OutData, u32DataLen);
  129. AES_CLR_INT_FLAG(CRPT);
  130. /* Start AES encryption/decryption */
  131. AES_Start(CRPT, 0, CRYPTO_DMA_ONE_SHOT);
  132. /* Wait done */
  133. while (!(CRPT->INTSTS & CRPT_INTEN_AESIEN_Msk)) {};
  134. if ((u32DataLen % 16) && (CRPT->AES_STS & (CRPT_AES_STS_OUTBUFEMPTY_Msk | CRPT_AES_STS_INBUFEMPTY_Msk)))
  135. rt_kprintf("AES WARNING - AES Data length(%d) is not enough. -> %d \n", u32DataLen, RT_ALIGN(u32DataLen, 16));
  136. else if (CRPT->INTSTS & (CRPT_INTSTS_AESEIF_Msk) || (CRPT->AES_STS & (CRPT_AES_STS_BUSERR_Msk | CRPT_AES_STS_CNTERR_Msk)))
  137. rt_kprintf("AES ERROR - CRPT->INTSTS-%08x, CRPT->AES_STS-%08x\n", CRPT->INTSTS, CRPT->AES_STS);
  138. /* Clear AES interrupt status */
  139. AES_CLR_INT_FLAG(CRPT);
  140. result = rt_mutex_release(&s_AES_mutex);
  141. RT_ASSERT(result == RT_EOK);
  142. return RT_EOK;
  143. }
  144. #if !defined(BSP_USING_TRNG)
  145. //Using PRNG instead of TRNG
  146. static void nu_prng_open(uint32_t u32Seed)
  147. {
  148. rt_err_t result;
  149. result = rt_mutex_take(&s_PRNG_mutex, RT_WAITING_FOREVER);
  150. RT_ASSERT(result == RT_EOK);
  151. //Open PRNG 64 bits.
  152. PRNG_Open(CRPT, PRNG_KEY_SIZE_64, PRNG_SEED_RELOAD, u32Seed);
  153. result = rt_mutex_release(&s_PRNG_mutex);
  154. RT_ASSERT(result == RT_EOK);
  155. }
  156. static rt_uint32_t nu_prng_run(void)
  157. {
  158. uint32_t au32RNGValue[2];
  159. rt_err_t result;
  160. result = rt_mutex_take(&s_PRNG_mutex, RT_WAITING_FOREVER);
  161. RT_ASSERT(result == RT_EOK);
  162. PRNG_Start(CRPT);
  163. while ((CRPT->PRNG_CTL & CRPT_PRNG_CTL_BUSY_Msk)) {};
  164. /* Clear PRNG interrupt status */
  165. PRNG_CLR_INT_FLAG(CRPT);
  166. PRNG_Read(CRPT, &au32RNGValue[0]);
  167. result = rt_mutex_release(&s_PRNG_mutex);
  168. RT_ASSERT(result == RT_EOK);
  169. return au32RNGValue[0] ^ au32RNGValue[1];
  170. }
  171. #endif
  172. static rt_err_t nu_aes_crypt(struct hwcrypto_symmetric *symmetric_ctx, struct hwcrypto_symmetric_info *symmetric_info)
  173. {
  174. uint32_t u32AESOpMode;
  175. uint32_t u32AESKeySize;
  176. unsigned char *in, *out;
  177. unsigned char in_align_flag = 0;
  178. unsigned char out_align_flag = 0;
  179. unsigned char iv_temp[16];
  180. RT_ASSERT(symmetric_ctx != RT_NULL);
  181. RT_ASSERT(symmetric_info != RT_NULL);
  182. if ((symmetric_info->length % 4) != 0)
  183. {
  184. return -RT_EINVAL;
  185. }
  186. //Checking key length
  187. if (symmetric_ctx->key_bitlen == 128)
  188. {
  189. u32AESKeySize = AES_KEY_SIZE_128;
  190. }
  191. else if (symmetric_ctx->key_bitlen == 192)
  192. {
  193. u32AESKeySize = AES_KEY_SIZE_192;
  194. }
  195. else if (symmetric_ctx->key_bitlen == 256)
  196. {
  197. u32AESKeySize = AES_KEY_SIZE_256;
  198. }
  199. else
  200. {
  201. return -RT_EINVAL;
  202. }
  203. //Select AES operation mode
  204. switch (symmetric_ctx->parent.type & (HWCRYPTO_MAIN_TYPE_MASK | HWCRYPTO_SUB_TYPE_MASK))
  205. {
  206. case HWCRYPTO_TYPE_AES_ECB:
  207. u32AESOpMode = AES_MODE_ECB;
  208. break;
  209. case HWCRYPTO_TYPE_AES_CBC:
  210. u32AESOpMode = AES_MODE_CBC;
  211. break;
  212. case HWCRYPTO_TYPE_AES_CFB:
  213. u32AESOpMode = AES_MODE_CFB;
  214. break;
  215. case HWCRYPTO_TYPE_AES_OFB:
  216. u32AESOpMode = AES_MODE_OFB;
  217. break;
  218. case HWCRYPTO_TYPE_AES_CTR:
  219. u32AESOpMode = AES_MODE_CTR;
  220. break;
  221. default :
  222. return -RT_ERROR;
  223. }
  224. in = (unsigned char *)symmetric_info->in;
  225. out = (unsigned char *)symmetric_info->out;
  226. //Checking in/out data buffer address not alignment or out of SRAM
  227. if (((rt_uint32_t)in % 4) != 0 || ((rt_uint32_t)in < SRAM_BASE) || ((rt_uint32_t)in > SRAM_END))
  228. {
  229. in = rt_malloc(symmetric_info->length);
  230. if (in == RT_NULL)
  231. {
  232. LOG_E("fun[%s] memory allocate %d bytes failed!", __FUNCTION__, symmetric_info->length);
  233. return -RT_ENOMEM;
  234. }
  235. rt_memcpy(in, symmetric_info->in, symmetric_info->length);
  236. in_align_flag = 1;
  237. }
  238. if (((rt_uint32_t)out % 4) != 0 || ((rt_uint32_t)out < SRAM_BASE) || ((rt_uint32_t)out > SRAM_END))
  239. {
  240. out = rt_malloc(symmetric_info->length);
  241. if (out == RT_NULL)
  242. {
  243. if (in_align_flag)
  244. rt_free(in);
  245. LOG_E("fun[%s] memory allocate %d bytes failed!", __FUNCTION__, symmetric_info->length);
  246. return -RT_ENOMEM;
  247. }
  248. out_align_flag = 1;
  249. }
  250. if ((u32AESOpMode == AES_MODE_CBC) && (symmetric_info->mode == HWCRYPTO_MODE_DECRYPT))
  251. {
  252. uint32_t loop;
  253. loop = (symmetric_info->length - 1) / 16;
  254. rt_memcpy(iv_temp, in + (loop * 16), 16);
  255. }
  256. nu_aes_crypt_run(symmetric_info->mode == HWCRYPTO_MODE_ENCRYPT ? TRUE : FALSE, u32AESOpMode, symmetric_ctx->key, u32AESKeySize, symmetric_ctx->iv, in, out, symmetric_info->length);
  257. if (u32AESOpMode == AES_MODE_CBC)
  258. {
  259. if (symmetric_info->mode == HWCRYPTO_MODE_DECRYPT)
  260. {
  261. rt_memcpy(symmetric_ctx->iv, iv_temp, 16);
  262. }
  263. else
  264. {
  265. uint32_t loop;
  266. loop = (symmetric_info->length - 1) / 16;
  267. rt_memcpy(symmetric_ctx->iv, out + (loop * 16), 16);
  268. }
  269. }
  270. if (out_align_flag)
  271. {
  272. rt_memcpy(symmetric_info->out, out, symmetric_info->length);
  273. rt_free(out);
  274. }
  275. if (in_align_flag)
  276. {
  277. rt_free(in);
  278. }
  279. return RT_EOK;
  280. }
  281. static rt_err_t nu_des_crypt_run(
  282. rt_bool_t bEncrypt,
  283. uint32_t u32OpMode,
  284. uint8_t *pu8Key,
  285. uint32_t u32KeySize,
  286. uint8_t *pu8IV,
  287. uint8_t *pu8InData,
  288. uint8_t *pu8OutData,
  289. uint32_t u32DataLen
  290. )
  291. {
  292. rt_err_t result;
  293. uint32_t au32SwapKey[3][2];
  294. uint32_t au32SwapIV[2];
  295. au32SwapKey[0][0] = nu_get32_be(&pu8Key[0]);
  296. au32SwapKey[0][1] = nu_get32_be(&pu8Key[4]);
  297. au32SwapKey[1][0] = nu_get32_be(&pu8Key[8]);
  298. au32SwapKey[1][1] = nu_get32_be(&pu8Key[12]);
  299. if (u32KeySize == NU_HWCRYPTO_DES_3KEYS)
  300. {
  301. au32SwapKey[2][0] = nu_get32_be(&pu8Key[16]);
  302. au32SwapKey[2][1] = nu_get32_be(&pu8Key[20]);
  303. }
  304. au32SwapIV[0] = nu_get32_be(&pu8IV[0]);
  305. au32SwapIV[1] = nu_get32_be(&pu8IV[4]);
  306. result = rt_mutex_take(&s_TDES_mutex, RT_WAITING_FOREVER);
  307. RT_ASSERT(result == RT_EOK);
  308. //Using Channel 0
  309. TDES_Open(CRPT, 0, bEncrypt, (u32OpMode & CRPT_TDES_CTL_TMODE_Msk), u32KeySize, u32OpMode, TDES_IN_OUT_WHL_SWAP);
  310. TDES_SetKey(CRPT, 0, au32SwapKey);
  311. TDES_SetInitVect(CRPT, 0, au32SwapIV[0], au32SwapIV[1]);
  312. //Setup TDES DMA
  313. TDES_SetDMATransfer(CRPT, 0, (uint32_t)pu8InData, (uint32_t)pu8OutData, u32DataLen);
  314. TDES_CLR_INT_FLAG(CRPT);
  315. //Start TDES encryption/decryption
  316. TDES_Start(CRPT, 0, CRYPTO_DMA_ONE_SHOT);
  317. /* Wait done */
  318. while (!(CRPT->INTSTS & CRPT_INTEN_TDESIEN_Msk)) {};
  319. result = rt_mutex_release(&s_TDES_mutex);
  320. RT_ASSERT(result == RT_EOK);
  321. return RT_EOK;
  322. }
  323. static rt_err_t nu_des_crypt(struct hwcrypto_symmetric *symmetric_ctx, struct hwcrypto_symmetric_info *symmetric_info)
  324. {
  325. uint32_t u32DESOpMode;
  326. uint32_t u32DESKeySize;
  327. unsigned char *in, *out;
  328. unsigned char in_align_flag = 0;
  329. unsigned char out_align_flag = 0;
  330. if ((symmetric_info->length % 8) != 0)
  331. {
  332. return -RT_EINVAL;
  333. }
  334. //Checking key length
  335. if (symmetric_ctx->key_bitlen == 128 || symmetric_ctx->key_bitlen == 64)
  336. {
  337. u32DESKeySize = NU_HWCRYPTO_DES_NO3KEYS;
  338. }
  339. else if (symmetric_ctx->key_bitlen == 192)
  340. {
  341. u32DESKeySize = NU_HWCRYPTO_DES_3KEYS;
  342. }
  343. else
  344. {
  345. return -RT_EINVAL;
  346. }
  347. //Select DES operation mode
  348. switch (symmetric_ctx->parent.type & (HWCRYPTO_MAIN_TYPE_MASK | HWCRYPTO_SUB_TYPE_MASK))
  349. {
  350. case HWCRYPTO_TYPE_DES_ECB:
  351. u32DESOpMode = DES_MODE_ECB;
  352. break;
  353. case HWCRYPTO_TYPE_DES_CBC:
  354. u32DESOpMode = DES_MODE_CBC;
  355. break;
  356. case HWCRYPTO_TYPE_3DES_ECB:
  357. u32DESOpMode = TDES_MODE_ECB;
  358. break;
  359. case HWCRYPTO_TYPE_3DES_CBC:
  360. u32DESOpMode = TDES_MODE_CBC;
  361. break;
  362. default :
  363. return -RT_ERROR;
  364. }
  365. in = (unsigned char *)symmetric_info->in;
  366. out = (unsigned char *)symmetric_info->out;
  367. //Checking in/out data buffer address not alignment or out of SRAM
  368. if (((rt_uint32_t)in % 4) != 0 || ((rt_uint32_t)in < SRAM_BASE) || ((rt_uint32_t)in > SRAM_END))
  369. {
  370. in = rt_malloc(symmetric_info->length);
  371. if (in == RT_NULL)
  372. {
  373. LOG_E("fun[%s] memory allocate %d bytes failed!", __FUNCTION__, symmetric_info->length);
  374. return -RT_ENOMEM;
  375. }
  376. rt_memcpy(in, symmetric_info->in, symmetric_info->length);
  377. in_align_flag = 1;
  378. }
  379. if (((rt_uint32_t)out % 4) != 0 || ((rt_uint32_t)out < SRAM_BASE) || ((rt_uint32_t)out > SRAM_END))
  380. {
  381. out = rt_malloc(symmetric_info->length);
  382. if (out == RT_NULL)
  383. {
  384. if (in_align_flag)
  385. rt_free(in);
  386. LOG_E("fun[%s] memory allocate %d bytes failed!", __FUNCTION__, symmetric_info->length);
  387. return -RT_ENOMEM;
  388. }
  389. out_align_flag = 1;
  390. }
  391. nu_des_crypt_run(symmetric_info->mode == HWCRYPTO_MODE_ENCRYPT ? TRUE : FALSE, u32DESOpMode, symmetric_ctx->key, u32DESKeySize, symmetric_ctx->iv, in, out, symmetric_info->length);
  392. if (out_align_flag)
  393. {
  394. rt_memcpy(symmetric_info->out, out, symmetric_info->length);
  395. rt_free(out);
  396. }
  397. if (in_align_flag)
  398. {
  399. rt_free(in);
  400. }
  401. return RT_EOK;
  402. }
  403. #define CRPT_HMAC_CTL_DMAFIRST_Pos (4) /*!< CRPT_T::HMAC_CTL: DMAFIRST Position */
  404. #define CRPT_HMAC_CTL_DMAFIRST_Msk (0x1ul << CRPT_HMAC_CTL_DMAFIRST_Pos) /*!< CRPT_T::HMAC_CTL: DMAFIRST Mask */
  405. static void SHABlockUpdate(uint32_t u32OpMode, uint32_t u32SrcAddr, uint32_t u32Len, uint32_t u32Mode)
  406. {
  407. SHA_Open(CRPT, u32OpMode, SHA_IN_OUT_SWAP, 0);
  408. //Setup SHA DMA
  409. SHA_SetDMATransfer(CRPT, u32SrcAddr, u32Len);
  410. if (u32Mode == CRYPTO_DMA_FIRST)
  411. {
  412. if ((SYS->CSERVER & SYS_CSERVER_VERSION_Msk) == 0x0)
  413. {
  414. //M480MD version
  415. u32Mode = CRYPTO_DMA_CONTINUE;
  416. }
  417. else
  418. {
  419. //M480LD version
  420. CRPT->HMAC_CTL |= CRPT_HMAC_CTL_DMAFIRST_Msk;
  421. }
  422. }
  423. else
  424. {
  425. if ((SYS->CSERVER & SYS_CSERVER_VERSION_Msk) != 0x0)
  426. {
  427. //M480LD version
  428. CRPT->HMAC_CTL &= ~CRPT_HMAC_CTL_DMAFIRST_Msk;
  429. }
  430. }
  431. //Start SHA
  432. SHA_CLR_INT_FLAG(CRPT);
  433. SHA_Start(CRPT, u32Mode);
  434. /* Wait done */
  435. while (!(CRPT->INTSTS & CRPT_INTSTS_HMACIF_Msk)) {};
  436. if (CRPT->INTSTS & (CRPT_INTSTS_HMACEIF_Msk) || (CRPT->HMAC_STS & (CRPT_HMAC_STS_DMAERR_Msk)))
  437. rt_kprintf("SHA ERROR - CRPT->INTSTS-%08x, CRPT->HMAC_STS-%08x\n", CRPT->INTSTS, CRPT->HMAC_STS);
  438. /* Clear SHA interrupt status */
  439. SHA_CLR_INT_FLAG(CRPT);
  440. }
  441. static rt_err_t nu_sha_hash_run(
  442. S_SHA_CONTEXT *psSHACtx,
  443. uint32_t u32OpMode,
  444. uint8_t *pu8InData,
  445. uint32_t u32DataLen
  446. )
  447. {
  448. rt_err_t result;
  449. RT_ASSERT(psSHACtx != RT_NULL);
  450. RT_ASSERT(pu8InData != RT_NULL);
  451. result = rt_mutex_take(&s_SHA_mutex, RT_WAITING_FOREVER);
  452. RT_ASSERT(result == RT_EOK);
  453. uint8_t *pu8SrcAddr = (uint8_t *)pu8InData;
  454. uint32_t u32CopyLen = 0;
  455. while ((psSHACtx->u32SHATempBufLen + u32DataLen) > psSHACtx->u32BlockSize)
  456. {
  457. if (psSHACtx->pu8SHATempBuf)
  458. {
  459. if (psSHACtx->u32SHATempBufLen == psSHACtx->u32BlockSize)
  460. {
  461. //Trigger SHA block update
  462. SHABlockUpdate(u32OpMode, (uint32_t)psSHACtx->pu8SHATempBuf, psSHACtx->u32BlockSize, psSHACtx->u32DMAMode);
  463. psSHACtx->u32DMAMode = CRYPTO_DMA_CONTINUE;
  464. //free SHATempBuff
  465. rt_free(psSHACtx->pu8SHATempBuf);
  466. psSHACtx->pu8SHATempBuf = NULL;
  467. psSHACtx->u32SHATempBufLen = 0;
  468. continue;
  469. }
  470. else
  471. {
  472. u32CopyLen = psSHACtx->u32BlockSize - psSHACtx->u32SHATempBufLen;
  473. if (u32DataLen < u32CopyLen)
  474. u32CopyLen = u32DataLen;
  475. rt_memcpy(psSHACtx->pu8SHATempBuf + psSHACtx->u32SHATempBufLen, pu8SrcAddr, u32CopyLen);
  476. psSHACtx->u32SHATempBufLen += u32CopyLen;
  477. pu8SrcAddr += u32CopyLen;
  478. u32DataLen -= u32CopyLen;
  479. continue;
  480. }
  481. }
  482. if ((uint32_t) pu8SrcAddr & 3) //address not aligned 4
  483. {
  484. psSHACtx->pu8SHATempBuf = rt_malloc(psSHACtx->u32BlockSize);
  485. if (psSHACtx->pu8SHATempBuf == RT_NULL)
  486. {
  487. LOG_E("fun[%s] memory allocate %d bytes failed!", __FUNCTION__, psSHACtx->u32BlockSize);
  488. result = rt_mutex_release(&s_SHA_mutex);
  489. RT_ASSERT(result == RT_EOK);
  490. return -RT_ENOMEM;
  491. }
  492. rt_memcpy(psSHACtx->pu8SHATempBuf, pu8SrcAddr, psSHACtx->u32BlockSize);
  493. psSHACtx->u32SHATempBufLen = psSHACtx->u32BlockSize;
  494. pu8SrcAddr += psSHACtx->u32BlockSize;
  495. u32DataLen -= psSHACtx->u32BlockSize;
  496. continue;
  497. }
  498. //Trigger SHA block update
  499. SHABlockUpdate(u32OpMode, (uint32_t)pu8SrcAddr, psSHACtx->u32BlockSize, psSHACtx->u32DMAMode);
  500. psSHACtx->u32DMAMode = CRYPTO_DMA_CONTINUE;
  501. pu8SrcAddr += psSHACtx->u32BlockSize;
  502. u32DataLen -= psSHACtx->u32BlockSize;
  503. }
  504. if (u32DataLen)
  505. {
  506. if (psSHACtx->pu8SHATempBuf == NULL)
  507. {
  508. psSHACtx->pu8SHATempBuf = rt_malloc(psSHACtx->u32BlockSize);
  509. if (psSHACtx->pu8SHATempBuf == RT_NULL)
  510. {
  511. LOG_E("fun[%s] memory allocate %d bytes failed!", __FUNCTION__, psSHACtx->u32BlockSize);
  512. result = rt_mutex_release(&s_SHA_mutex);
  513. RT_ASSERT(result == RT_EOK);
  514. return -RT_ENOMEM;
  515. }
  516. psSHACtx->u32SHATempBufLen = 0;
  517. }
  518. rt_memcpy(psSHACtx->pu8SHATempBuf, pu8SrcAddr, u32DataLen);
  519. psSHACtx->u32SHATempBufLen += u32DataLen;
  520. }
  521. result = rt_mutex_release(&s_SHA_mutex);
  522. RT_ASSERT(result == RT_EOK);
  523. return RT_EOK;
  524. }
  525. static rt_err_t nu_sha_update(struct hwcrypto_hash *hash_ctx, const rt_uint8_t *in, rt_size_t length)
  526. {
  527. uint32_t u32SHAOpMode;
  528. unsigned char *nu_in;
  529. unsigned char in_align_flag = 0;
  530. RT_ASSERT(hash_ctx != RT_NULL);
  531. RT_ASSERT(in != RT_NULL);
  532. //Select SHA operation mode
  533. switch (hash_ctx->parent.type & (HWCRYPTO_MAIN_TYPE_MASK | HWCRYPTO_SUB_TYPE_MASK))
  534. {
  535. case HWCRYPTO_TYPE_SHA1:
  536. u32SHAOpMode = SHA_MODE_SHA1;
  537. break;
  538. case HWCRYPTO_TYPE_SHA224:
  539. u32SHAOpMode = SHA_MODE_SHA224;
  540. break;
  541. case HWCRYPTO_TYPE_SHA256:
  542. u32SHAOpMode = SHA_MODE_SHA256;
  543. break;
  544. case HWCRYPTO_TYPE_SHA384:
  545. u32SHAOpMode = SHA_MODE_SHA384;
  546. break;
  547. case HWCRYPTO_TYPE_SHA512:
  548. u32SHAOpMode = SHA_MODE_SHA512;
  549. break;
  550. default :
  551. return -RT_ERROR;
  552. }
  553. nu_in = (unsigned char *)in;
  554. //Checking in data buffer address not alignment or out of SRAM
  555. if (((rt_uint32_t)nu_in % 4) != 0 || ((rt_uint32_t)nu_in < SRAM_BASE) || ((rt_uint32_t)nu_in > SRAM_END))
  556. {
  557. nu_in = rt_malloc(length);
  558. if (nu_in == RT_NULL)
  559. {
  560. LOG_E("fun[%s] memory allocate %d bytes failed!", __FUNCTION__, length);
  561. return -RT_ENOMEM;
  562. }
  563. rt_memcpy(nu_in, in, length);
  564. in_align_flag = 1;
  565. }
  566. nu_sha_hash_run(hash_ctx->parent.contex, u32SHAOpMode, nu_in, length);
  567. if (in_align_flag)
  568. {
  569. rt_free(nu_in);
  570. }
  571. return RT_EOK;
  572. }
  573. static rt_err_t nu_sha_finish(struct hwcrypto_hash *hash_ctx, rt_uint8_t *out, rt_size_t length)
  574. {
  575. unsigned char *nu_out;
  576. unsigned char out_align_flag = 0;
  577. uint32_t u32SHAOpMode;
  578. S_SHA_CONTEXT *psSHACtx = RT_NULL;
  579. RT_ASSERT(hash_ctx != RT_NULL);
  580. RT_ASSERT(out != RT_NULL);
  581. psSHACtx = hash_ctx->parent.contex;
  582. //Check SHA Hash value buffer length
  583. switch (hash_ctx->parent.type & (HWCRYPTO_MAIN_TYPE_MASK | HWCRYPTO_SUB_TYPE_MASK))
  584. {
  585. case HWCRYPTO_TYPE_SHA1:
  586. u32SHAOpMode = SHA_MODE_SHA1;
  587. if (length < 5UL)
  588. {
  589. return -RT_EINVAL;
  590. }
  591. break;
  592. case HWCRYPTO_TYPE_SHA224:
  593. u32SHAOpMode = SHA_MODE_SHA224;
  594. if (length < 7UL)
  595. {
  596. return -RT_EINVAL;
  597. }
  598. break;
  599. case HWCRYPTO_TYPE_SHA256:
  600. u32SHAOpMode = SHA_MODE_SHA256;
  601. if (length < 8UL)
  602. {
  603. return -RT_EINVAL;
  604. }
  605. break;
  606. case HWCRYPTO_TYPE_SHA384:
  607. u32SHAOpMode = SHA_MODE_SHA384;
  608. if (length < 12UL)
  609. {
  610. return -RT_EINVAL;
  611. }
  612. break;
  613. case HWCRYPTO_TYPE_SHA512:
  614. u32SHAOpMode = SHA_MODE_SHA512;
  615. if (length < 16UL)
  616. {
  617. return -RT_EINVAL;
  618. }
  619. break;
  620. default :
  621. return -RT_ERROR;
  622. }
  623. nu_out = (unsigned char *)out;
  624. //Checking out data buffer address alignment or not
  625. if (((rt_uint32_t)nu_out % 4) != 0)
  626. {
  627. nu_out = rt_malloc(length);
  628. if (nu_out == RT_NULL)
  629. {
  630. LOG_E("fun[%s] memory allocate %d bytes failed!", __FUNCTION__, length);
  631. return -RT_ENOMEM;
  632. }
  633. out_align_flag = 1;
  634. }
  635. if (psSHACtx->pu8SHATempBuf)
  636. {
  637. if (psSHACtx->u32DMAMode == CRYPTO_DMA_FIRST)
  638. SHABlockUpdate(u32SHAOpMode, (uint32_t)psSHACtx->pu8SHATempBuf, psSHACtx->u32SHATempBufLen, CRYPTO_DMA_ONE_SHOT);
  639. else
  640. SHABlockUpdate(u32SHAOpMode, (uint32_t)psSHACtx->pu8SHATempBuf, psSHACtx->u32SHATempBufLen, CRYPTO_DMA_LAST);
  641. //free SHATempBuf
  642. rt_free(psSHACtx->pu8SHATempBuf);
  643. psSHACtx->pu8SHATempBuf = RT_NULL;
  644. psSHACtx->u32SHATempBufLen = 0;
  645. }
  646. else
  647. {
  648. SHABlockUpdate(u32SHAOpMode, (uint32_t)NULL, 0, CRYPTO_DMA_LAST);
  649. }
  650. SHA_Read(CRPT, (uint32_t *)nu_out);
  651. if (out_align_flag)
  652. {
  653. rt_memcpy(out, nu_out, length);
  654. rt_free(nu_out);
  655. }
  656. return RT_EOK;
  657. }
  658. #if !defined(BSP_USING_TRNG)
  659. static rt_uint32_t nu_prng_rand(struct hwcrypto_rng *ctx)
  660. {
  661. return nu_prng_run();
  662. }
  663. #endif
  664. static const struct hwcrypto_symmetric_ops nu_aes_ops =
  665. {
  666. .crypt = nu_aes_crypt,
  667. };
  668. static const struct hwcrypto_symmetric_ops nu_des_ops =
  669. {
  670. .crypt = nu_des_crypt,
  671. };
  672. static const struct hwcrypto_hash_ops nu_sha_ops =
  673. {
  674. .update = nu_sha_update,
  675. .finish = nu_sha_finish,
  676. };
  677. #endif
  678. /* CRC operation ------------------------------------------------------------*/
  679. #if defined(BSP_USING_CRC)
  680. static const struct hwcrypto_crc_ops nu_crc_ops =
  681. {
  682. .update = nu_crc_update,
  683. };
  684. #endif
  685. /* TRNG operation ------------------------------------------------------------*/
  686. #if defined(BSP_USING_TRNG)
  687. static const struct hwcrypto_rng_ops nu_rng_ops =
  688. {
  689. .update = nu_trng_rand,
  690. };
  691. #elif defined(BSP_USING_CRYPTO)
  692. static const struct hwcrypto_rng_ops nu_rng_ops =
  693. {
  694. .update = nu_prng_rand,
  695. };
  696. #endif
  697. /* Register crypto interface ----------------------------------------------------------*/
  698. static rt_err_t nu_hwcrypto_create(struct rt_hwcrypto_ctx *ctx)
  699. {
  700. rt_err_t res = RT_EOK;
  701. RT_ASSERT(ctx != RT_NULL);
  702. switch (ctx->type & HWCRYPTO_MAIN_TYPE_MASK)
  703. {
  704. #if defined(BSP_USING_TRNG)
  705. case HWCRYPTO_TYPE_RNG:
  706. {
  707. ctx->contex = RT_NULL;
  708. //Setup RNG operation
  709. ((struct hwcrypto_rng *)ctx)->ops = &nu_rng_ops;
  710. break;
  711. }
  712. #endif /* BSP_USING_TRNG */
  713. #if defined(BSP_USING_CRC)
  714. case HWCRYPTO_TYPE_CRC:
  715. {
  716. ctx->contex = RT_NULL;
  717. //Setup CRC operation
  718. ((struct hwcrypto_crc *)ctx)->ops = &nu_crc_ops;
  719. break;
  720. }
  721. #endif /* BSP_USING_CRC */
  722. #if defined(BSP_USING_CRYPTO)
  723. case HWCRYPTO_TYPE_AES:
  724. {
  725. ctx->contex = RT_NULL;
  726. //Setup AES operation
  727. ((struct hwcrypto_symmetric *)ctx)->ops = &nu_aes_ops;
  728. break;
  729. }
  730. case HWCRYPTO_TYPE_DES:
  731. case HWCRYPTO_TYPE_3DES:
  732. {
  733. ctx->contex = RT_NULL;
  734. //Setup operation
  735. ((struct hwcrypto_symmetric *)ctx)->ops = &nu_des_ops;
  736. break;
  737. }
  738. case HWCRYPTO_TYPE_SHA1:
  739. case HWCRYPTO_TYPE_SHA2:
  740. {
  741. ctx->contex = rt_malloc(sizeof(S_SHA_CONTEXT));
  742. if (ctx->contex == RT_NULL)
  743. return -RT_ERROR;
  744. rt_memset(ctx->contex, 0, sizeof(S_SHA_CONTEXT));
  745. //Setup operation
  746. ((struct hwcrypto_hash *)ctx)->ops = &nu_sha_ops;
  747. break;
  748. }
  749. #if !defined(BSP_USING_TRNG)
  750. case HWCRYPTO_TYPE_RNG:
  751. {
  752. ctx->contex = RT_NULL;
  753. ((struct hwcrypto_rng *)ctx)->ops = &nu_rng_ops;
  754. #if defined(NU_PRNG_USE_SEED)
  755. nu_prng_open(NU_PRNG_SEED_VALUE);
  756. #else
  757. nu_prng_open(rt_tick_get());
  758. #endif
  759. break;
  760. }
  761. #endif /* !BSP_USING_TRNG */
  762. #endif /* BSP_USING_CRYPTO */
  763. default:
  764. res = -RT_ERROR;
  765. break;
  766. }
  767. nu_hwcrypto_reset(ctx);
  768. return res;
  769. }
  770. static void nu_hwcrypto_destroy(struct rt_hwcrypto_ctx *ctx)
  771. {
  772. RT_ASSERT(ctx != RT_NULL);
  773. if (ctx->contex)
  774. rt_free(ctx->contex);
  775. }
  776. static rt_err_t nu_hwcrypto_clone(struct rt_hwcrypto_ctx *des, const struct rt_hwcrypto_ctx *src)
  777. {
  778. rt_err_t res = RT_EOK;
  779. RT_ASSERT(des != RT_NULL);
  780. RT_ASSERT(src != RT_NULL);
  781. if (des->contex && src->contex)
  782. {
  783. rt_memcpy(des->contex, src->contex, sizeof(struct rt_hwcrypto_ctx));
  784. }
  785. else
  786. return -RT_EINVAL;
  787. return res;
  788. }
  789. static void nu_hwcrypto_reset(struct rt_hwcrypto_ctx *ctx)
  790. {
  791. switch (ctx->type & HWCRYPTO_MAIN_TYPE_MASK)
  792. {
  793. #if !defined(BSP_USING_TRNG)
  794. case HWCRYPTO_TYPE_RNG:
  795. {
  796. #if defined(NU_PRNG_USE_SEED)
  797. nu_prng_open(NU_PRNG_SEED_VALUE);
  798. #else
  799. nu_prng_open(rt_tick_get());
  800. #endif
  801. break;
  802. }
  803. #endif /* !BSP_USING_TRNG */
  804. #if defined(BSP_USING_CRYPTO)
  805. case HWCRYPTO_TYPE_SHA1:
  806. case HWCRYPTO_TYPE_SHA2:
  807. {
  808. S_SHA_CONTEXT *psSHACtx = (S_SHA_CONTEXT *)ctx->contex;
  809. if (psSHACtx->pu8SHATempBuf)
  810. {
  811. rt_free(psSHACtx->pu8SHATempBuf);
  812. }
  813. psSHACtx->pu8SHATempBuf = RT_NULL;
  814. psSHACtx->u32SHATempBufLen = 0;
  815. psSHACtx->u32DMAMode = CRYPTO_DMA_FIRST;
  816. if ((ctx->type == HWCRYPTO_TYPE_SHA384) || (ctx->type == HWCRYPTO_TYPE_SHA512))
  817. {
  818. psSHACtx->u32BlockSize = 128;
  819. }
  820. else
  821. {
  822. psSHACtx->u32BlockSize = 64;
  823. }
  824. break;
  825. }
  826. #endif
  827. default:
  828. break;
  829. }
  830. }
  831. /* Init and register nu_hwcrypto_dev */
  832. int nu_hwcrypto_device_init(void)
  833. {
  834. rt_err_t result;
  835. static struct rt_hwcrypto_device nu_hwcrypto_dev;
  836. nu_hwcrypto_dev.ops = &nu_hwcrypto_ops;
  837. nu_hwcrypto_dev.id = 0;
  838. nu_hwcrypto_dev.user_data = &nu_hwcrypto_dev;
  839. #if defined(BSP_USING_CRYPTO)
  840. nu_crypto_init();
  841. #endif
  842. #if defined(BSP_USING_CRC)
  843. nu_crc_init();
  844. #endif
  845. #if defined(BSP_USING_TRNG)
  846. nu_trng_init();
  847. #endif
  848. /* register hwcrypto operation */
  849. result = rt_hwcrypto_register(&nu_hwcrypto_dev, RT_HWCRYPTO_DEFAULT_NAME);
  850. RT_ASSERT(result == RT_EOK);
  851. return 0;
  852. }
  853. INIT_DEVICE_EXPORT(nu_hwcrypto_device_init);
  854. #endif //#if ((defined(BSP_USING_CRYPTO) || defined(BSP_USING_TRNG) || defined(BSP_USING_CRC)) && defined(RT_USING_HWCRYPTO))