cc310_backend_aes.c 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867
  1. /**
  2. * Copyright (c) 2018 - 2020, Nordic Semiconductor ASA
  3. *
  4. * All rights reserved.
  5. *
  6. * Redistribution and use in source and binary forms, with or without modification,
  7. * are permitted provided that the following conditions are met:
  8. *
  9. * 1. Redistributions of source code must retain the above copyright notice, this
  10. * list of conditions and the following disclaimer.
  11. *
  12. * 2. Redistributions in binary form, except as embedded into a Nordic
  13. * Semiconductor ASA integrated circuit in a product or a software update for
  14. * such product, must reproduce the above copyright notice, this list of
  15. * conditions and the following disclaimer in the documentation and/or other
  16. * materials provided with the distribution.
  17. *
  18. * 3. Neither the name of Nordic Semiconductor ASA nor the names of its
  19. * contributors may be used to endorse or promote products derived from this
  20. * software without specific prior written permission.
  21. *
  22. * 4. This software, with or without modification, must only be used with a
  23. * Nordic Semiconductor ASA integrated circuit.
  24. *
  25. * 5. Any software provided in binary form under this license must not be reverse
  26. * engineered, decompiled, modified and/or disassembled.
  27. *
  28. * THIS SOFTWARE IS PROVIDED BY NORDIC SEMICONDUCTOR ASA "AS IS" AND ANY EXPRESS
  29. * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
  30. * OF MERCHANTABILITY, NONINFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE ARE
  31. * DISCLAIMED. IN NO EVENT SHALL NORDIC SEMICONDUCTOR ASA OR CONTRIBUTORS BE
  32. * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
  33. * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
  34. * GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
  35. * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
  36. * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
  37. * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  38. *
  39. */
  40. #include "sdk_common.h"
  41. #include <drivers/nrfx_common.h>
  42. #if NRF_MODULE_ENABLED(NRF_CRYPTO)
  43. #include <stdbool.h>
  44. #include "ssi_aes_error.h"
  45. #include "cc310_backend_aes.h"
  46. #include "cc310_backend_mutex.h"
  47. #include "cc310_backend_shared.h"
  48. #if NRF_MODULE_ENABLED(NRF_CRYPTO_CC310_AES)
  49. /**@internal @brief Type declarations of templates matching all possible context sizes
  50. * for this backend.
  51. */
  52. typedef struct
  53. {
  54. nrf_crypto_aes_internal_context_t header; /**< Common header for context. */
  55. SaSiAesUserContext_t context; /**< AES context internal to mbed TLS. */
  56. nrf_crypto_backend_aes_ctx_t backend; /**< Backend-specific internal context. */
  57. } nrf_crypto_backend_cc310_aes_any_context_t;
  58. /**@internal @brief Type declarations of templates matching all possible context sizes
  59. * for this backend.
  60. */
  61. typedef union
  62. {
  63. nrf_crypto_backend_cc310_aes_any_context_t any; /**< Common for all contexts. */
  64. #if NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_ECB)
  65. nrf_crypto_backend_aes_ecb_context_t ecb;
  66. #endif
  67. #if NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_CBC)
  68. nrf_crypto_backend_aes_cbc_context_t cbc;
  69. #endif
  70. #if NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_CTR)
  71. nrf_crypto_backend_aes_ctr_context_t ctr;
  72. #endif
  73. #if NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_CBC_MAC)
  74. nrf_crypto_backend_aes_cbc_mac_context_t cbc_mac;
  75. #endif
  76. #if NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_CMAC)
  77. nrf_crypto_backend_aes_cmac_context_t cmac;
  78. #endif
  79. } nrf_crypto_backend_cc310_aes_context_t;
  80. static ret_code_t result_get(SaSiError_t error)
  81. {
  82. ret_code_t ret_val;
  83. switch (error)
  84. {
  85. case SASI_SUCCESS:
  86. ret_val = NRF_SUCCESS;
  87. break;
  88. case SASI_AES_INVALID_USER_CONTEXT_POINTER_ERROR:
  89. ret_val = NRF_ERROR_CRYPTO_CONTEXT_NULL;
  90. break;
  91. case SASI_AES_ILLEGAL_KEY_SIZE_ERROR:
  92. case SASI_AES_DATA_IN_SIZE_ILLEGAL:
  93. case SASI_AES_DATA_IN_BUFFER_SIZE_ERROR:
  94. ret_val = NRF_ERROR_CRYPTO_INPUT_LENGTH;
  95. break;
  96. case SASI_AES_INVALID_IV_OR_TWEAK_PTR_ERROR:
  97. case SASI_AES_INVALID_KEY_POINTER_ERROR:
  98. case SASI_AES_DATA_IN_POINTER_INVALID_ERROR:
  99. ret_val = NRF_ERROR_CRYPTO_INPUT_NULL;
  100. break;
  101. case SASI_AES_ILLEGAL_OPERATION_MODE_ERROR:
  102. case SASI_AES_KEY_TYPE_NOT_SUPPORTED_ERROR:
  103. case SASI_AES_INVALID_ENCRYPT_MODE_ERROR:
  104. case SASI_AES_ILLEGAL_PADDING_TYPE_ERROR:
  105. case SASI_AES_INCORRECT_PADDING_ERROR:
  106. case SASI_AES_DECRYPTION_NOT_ALLOWED_ON_THIS_MODE:
  107. case SASI_AES_ADDITIONAL_BLOCK_NOT_PERMITTED_ERROR:
  108. case SASI_AES_IS_NOT_SUPPORTED:
  109. ret_val = NRF_ERROR_CRYPTO_FEATURE_UNAVAILABLE;
  110. break;
  111. case SASI_AES_DATA_OUT_BUFFER_SIZE_ERROR:
  112. ret_val = NRF_ERROR_CRYPTO_OUTPUT_LENGTH;
  113. break;
  114. case SASI_AES_DATA_OUT_POINTER_INVALID_ERROR:
  115. case SASI_AES_DATA_OUT_SIZE_POINTER_INVALID_ERROR:
  116. ret_val = NRF_ERROR_CRYPTO_OUTPUT_NULL;
  117. break;
  118. case SASI_AES_CTX_SIZES_ERROR:
  119. default:
  120. ret_val = NRF_ERROR_CRYPTO_INTERNAL;
  121. break;
  122. }
  123. return ret_val;
  124. }
  125. static ret_code_t params_validate(nrf_crypto_backend_cc310_aes_context_t const * const p_ctx,
  126. SaSiAesOperationMode_t * p_mode,
  127. nrf_crypto_operation_t operation)
  128. {
  129. ret_code_t ret_val = NRF_SUCCESS;
  130. switch (p_ctx->any.header.p_info->mode)
  131. {
  132. #if NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_ECB)
  133. case NRF_CRYPTO_AES_MODE_ECB:
  134. case NRF_CRYPTO_AES_MODE_ECB_PAD_PCKS7:
  135. *p_mode = SASI_AES_MODE_ECB;
  136. break;
  137. #endif
  138. #if NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_CBC)
  139. case NRF_CRYPTO_AES_MODE_CBC:
  140. case NRF_CRYPTO_AES_MODE_CBC_PAD_PCKS7:
  141. *p_mode = SASI_AES_MODE_CBC;
  142. break;
  143. #endif
  144. #if NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_CTR)
  145. case NRF_CRYPTO_AES_MODE_CTR:
  146. *p_mode = SASI_AES_MODE_CTR;
  147. break;
  148. #endif
  149. #if NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_CBC_MAC)
  150. case NRF_CRYPTO_AES_MODE_CBC_MAC:
  151. case NRF_CRYPTO_AES_MODE_CBC_MAC_PAD_PCKS7:
  152. *p_mode = SASI_AES_MODE_CBC_MAC;
  153. VERIFY_TRUE((operation == NRF_CRYPTO_MAC_CALCULATE), NRF_ERROR_CRYPTO_INVALID_PARAM);
  154. break;
  155. #endif
  156. #if NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_CMAC)
  157. case NRF_CRYPTO_AES_MODE_CMAC:
  158. *p_mode = SASI_AES_MODE_CMAC;
  159. VERIFY_TRUE((operation == NRF_CRYPTO_MAC_CALCULATE), NRF_ERROR_CRYPTO_INVALID_PARAM);
  160. break;
  161. #endif
  162. default:
  163. ret_val = NRF_ERROR_CRYPTO_FEATURE_UNAVAILABLE;
  164. break;
  165. }
  166. return ret_val;
  167. }
  168. static ret_code_t backend_cc310_init(void * const p_context, nrf_crypto_operation_t operation)
  169. {
  170. SaSiError_t result;
  171. ret_code_t ret_val;
  172. bool mutex_locked;
  173. SaSiAesOperationMode_t mode;
  174. SaSiAesEncryptMode_t operation_cc310;
  175. nrf_crypto_backend_cc310_aes_context_t * p_ctx =
  176. (nrf_crypto_backend_cc310_aes_context_t *)p_context;
  177. mutex_locked = cc310_backend_mutex_trylock();
  178. VERIFY_TRUE(mutex_locked, NRF_ERROR_CRYPTO_BUSY);
  179. if (!nrfx_is_in_ram(&p_ctx->any.context))
  180. {
  181. ret_val = NRF_ERROR_CRYPTO_INPUT_LOCATION;
  182. goto exit;
  183. }
  184. if (p_ctx->any.header.p_info->key_size != NRF_CRYPTO_KEY_SIZE_128)
  185. {
  186. ret_val = NRF_ERROR_CRYPTO_KEY_SIZE;
  187. goto exit;
  188. }
  189. ret_val = params_validate(p_ctx, &mode, operation);
  190. if (ret_val != NRF_SUCCESS)
  191. {
  192. goto exit;
  193. }
  194. if (operation == NRF_CRYPTO_DECRYPT)
  195. {
  196. operation_cc310 = SASI_AES_DECRYPT;
  197. }
  198. else if ((operation == NRF_CRYPTO_ENCRYPT) || (operation == NRF_CRYPTO_MAC_CALCULATE))
  199. {
  200. operation_cc310 = SASI_AES_ENCRYPT;
  201. }
  202. else
  203. {
  204. ret_val = NRF_ERROR_CRYPTO_INVALID_PARAM;
  205. goto exit;
  206. }
  207. p_ctx->any.backend.operation = operation;
  208. result = SaSi_AesInit(&p_ctx->any.context,
  209. operation_cc310,
  210. mode,
  211. SASI_AES_PADDING_NONE); /* CC310 does not support padding */
  212. ret_val = result_get(result);
  213. exit:
  214. cc310_backend_mutex_unlock();
  215. return ret_val;
  216. }
  217. static ret_code_t backend_cc310_uninit(void * const p_context)
  218. {
  219. SaSiError_t result;
  220. ret_code_t ret_val;
  221. nrf_crypto_backend_cc310_aes_context_t * p_ctx =
  222. (nrf_crypto_backend_cc310_aes_context_t *)p_context;
  223. bool mutex_locked = cc310_backend_mutex_trylock();
  224. VERIFY_TRUE(mutex_locked, NRF_ERROR_CRYPTO_BUSY);
  225. result = SaSi_AesFree(&p_ctx->any.context);
  226. ret_val = result_get(result);
  227. cc310_backend_mutex_unlock();
  228. return ret_val;
  229. }
  230. static ret_code_t backend_cc310_key_set(void * const p_context, uint8_t * p_key)
  231. {
  232. SaSiError_t result;
  233. ret_code_t ret_val;
  234. bool mutex_locked;
  235. SaSiAesUserKeyData_t key_data;
  236. nrf_crypto_backend_cc310_aes_context_t * p_ctx =
  237. (nrf_crypto_backend_cc310_aes_context_t *)p_context;
  238. mutex_locked = cc310_backend_mutex_trylock();
  239. VERIFY_TRUE(mutex_locked, NRF_ERROR_CRYPTO_BUSY);
  240. if (!nrfx_is_in_ram(p_key))
  241. {
  242. ret_val = NRF_ERROR_CRYPTO_INPUT_LOCATION;
  243. goto exit;
  244. }
  245. key_data.pKey = p_key;
  246. key_data.keySize = (p_ctx->any.header.p_info->key_size) >> 3; // change bits to bytes
  247. result = SaSi_AesSetKey(&p_ctx->any.context,
  248. SASI_AES_USER_KEY,
  249. &key_data,
  250. sizeof(key_data));
  251. ret_val = result_get(result);
  252. exit:
  253. cc310_backend_mutex_unlock();
  254. return ret_val;
  255. }
  256. #if NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_CBC) || \
  257. NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_CTR) || \
  258. NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_CBC_MAC)
  259. static ret_code_t backend_cc310_iv_set(void * const p_context, uint8_t * p_iv)
  260. {
  261. SaSiError_t result;
  262. ret_code_t ret_val;
  263. bool mutex_locked;
  264. nrf_crypto_backend_cc310_aes_context_t * p_ctx =
  265. (nrf_crypto_backend_cc310_aes_context_t *)p_context;
  266. mutex_locked = cc310_backend_mutex_trylock();
  267. VERIFY_TRUE(mutex_locked, NRF_ERROR_CRYPTO_BUSY);
  268. if (!nrfx_is_in_ram(p_iv))
  269. {
  270. ret_val = NRF_ERROR_CRYPTO_INPUT_LOCATION;
  271. goto exit;
  272. }
  273. result = SaSi_AesSetIv(&p_ctx->any.context, p_iv);
  274. ret_val = result_get(result);
  275. exit:
  276. cc310_backend_mutex_unlock();
  277. return ret_val;
  278. }
  279. static ret_code_t backend_cc310_iv_get(void * const p_context, uint8_t * p_iv)
  280. {
  281. SaSiError_t result;
  282. ret_code_t ret_val = NRF_ERROR_CRYPTO_INTERNAL;
  283. bool mutex_locked;
  284. nrf_crypto_backend_cc310_aes_context_t * p_ctx =
  285. (nrf_crypto_backend_cc310_aes_context_t *)p_context;
  286. mutex_locked = cc310_backend_mutex_trylock();
  287. VERIFY_TRUE(mutex_locked, NRF_ERROR_CRYPTO_BUSY);
  288. if (!nrfx_is_in_ram(p_iv))
  289. {
  290. ret_val = NRF_ERROR_CRYPTO_INPUT_LOCATION;
  291. goto exit;
  292. }
  293. result = SaSi_AesGetIv(&p_ctx->any.context, p_iv);
  294. /* Below code allows to read IV after calling nrf_crypto_aes_finalize */
  295. if (result == SASI_AES_ILLEGAL_OPERATION_MODE_ERROR)
  296. {
  297. if (p_ctx->any.header.init_value == NRF_CRYPTO_AES_UNINIT_MAGIC_VALUE)
  298. {
  299. memcpy(p_iv, p_ctx->any.backend.iv, NRF_CRYPTO_MBEDTLS_AES_IV_SIZE);
  300. ret_val = NRF_SUCCESS;
  301. }
  302. }
  303. else
  304. {
  305. ret_val = result_get(result);
  306. }
  307. exit:
  308. cc310_backend_mutex_unlock();
  309. return ret_val;
  310. }
  311. #endif
  312. static ret_code_t backend_cc310_update(void * const p_context,
  313. uint8_t * p_data_in,
  314. size_t data_size,
  315. uint8_t * p_data_out)
  316. {
  317. SaSiError_t result;
  318. ret_code_t ret_val;
  319. bool mutex_locked;
  320. size_t size;
  321. size_t offset = 0;
  322. nrf_crypto_backend_cc310_aes_context_t * p_ctx =
  323. (nrf_crypto_backend_cc310_aes_context_t *)p_context;
  324. mutex_locked = cc310_backend_mutex_trylock();
  325. VERIFY_TRUE(mutex_locked, NRF_ERROR_CRYPTO_BUSY);
  326. if (!nrfx_is_in_ram(p_data_in) || !nrfx_is_in_ram(p_data_out))
  327. {
  328. ret_val = NRF_ERROR_CRYPTO_INPUT_LOCATION;
  329. goto exit;
  330. }
  331. do
  332. {
  333. /* CC310 allows only 64kB blocks, operation must be devided */
  334. if (data_size > CC310_MAX_LENGTH_DMA_AES_OPERATIONS)
  335. {
  336. size = CC310_MAX_LENGTH_DMA_AES_OPERATIONS;
  337. data_size -= CC310_MAX_LENGTH_DMA_AES_OPERATIONS;
  338. }
  339. else
  340. {
  341. size = data_size;
  342. data_size = 0;
  343. }
  344. if (p_ctx->any.backend.operation == NRF_CRYPTO_MAC_CALCULATE)
  345. {
  346. result = SaSi_AesBlock(&p_ctx->any.context,
  347. p_data_in + offset,
  348. size,
  349. p_data_out);
  350. }
  351. else
  352. {
  353. result = SaSi_AesBlock(&p_ctx->any.context,
  354. p_data_in + offset,
  355. size,
  356. p_data_out + offset);
  357. }
  358. offset += size;
  359. ret_val = result_get(result);
  360. } while ((data_size > 0) && (ret_val == NRF_SUCCESS));
  361. exit:
  362. cc310_backend_mutex_unlock();
  363. return ret_val;
  364. }
  365. static ret_code_t backend_cc310_finalize(void * const p_context,
  366. uint8_t * p_data_in,
  367. size_t data_size,
  368. uint8_t * p_data_out,
  369. size_t * p_data_out_size)
  370. {
  371. SaSiError_t result;
  372. ret_code_t ret_val;
  373. bool mutex_locked;
  374. size_t size;
  375. size_t offset = 0;
  376. nrf_crypto_backend_cc310_aes_context_t * p_ctx =
  377. (nrf_crypto_backend_cc310_aes_context_t *)p_context;
  378. mutex_locked = cc310_backend_mutex_trylock();
  379. VERIFY_TRUE(mutex_locked, NRF_ERROR_CRYPTO_BUSY);
  380. if (*p_data_out_size < data_size)
  381. {
  382. ret_val = NRF_ERROR_CRYPTO_OUTPUT_LENGTH;
  383. goto exit;
  384. }
  385. /* This function does not support padding */
  386. if (((data_size & 0xF) != 0) &&
  387. (p_ctx->any.header.p_info->mode != NRF_CRYPTO_AES_MODE_CTR))
  388. {
  389. ret_val = NRF_ERROR_CRYPTO_INPUT_LENGTH;
  390. goto exit;
  391. }
  392. if (!nrfx_is_in_ram(p_data_in) || !nrfx_is_in_ram(p_data_out))
  393. {
  394. ret_val = NRF_ERROR_CRYPTO_INPUT_LOCATION;
  395. goto exit;
  396. }
  397. /* CC310 allows only 64kB blocks, operation must be devided */
  398. while (data_size > CC310_MAX_LENGTH_DMA_AES_OPERATIONS)
  399. {
  400. size = CC310_MAX_LENGTH_DMA_AES_OPERATIONS;
  401. data_size -= CC310_MAX_LENGTH_DMA_AES_OPERATIONS;
  402. result = SaSi_AesBlock(&p_ctx->any.context,
  403. p_data_in + offset,
  404. size,
  405. p_data_out + offset);
  406. offset += size;
  407. ret_val = result_get(result);
  408. if (ret_val != NRF_SUCCESS)
  409. {
  410. goto exit;
  411. }
  412. }
  413. /* Calculate space in the output buffer */
  414. *p_data_out_size -= offset;
  415. result = SaSi_AesFinish(&p_ctx->any.context,
  416. data_size,
  417. p_data_in + offset,
  418. data_size,
  419. p_data_out + offset,
  420. p_data_out_size);
  421. ret_val = result_get(result);
  422. if (ret_val == NRF_SUCCESS)
  423. {
  424. /* update information about size of encrypted data */
  425. *p_data_out_size += offset;
  426. }
  427. /* Store IV value in case it will be needed after finalize operation */
  428. if ((p_ctx->any.header.p_info->mode == NRF_CRYPTO_AES_MODE_CBC) ||
  429. (p_ctx->any.header.p_info->mode == NRF_CRYPTO_AES_MODE_CTR))
  430. {
  431. result = SaSi_AesGetIv(&p_ctx->any.context, &p_ctx->any.backend.iv[0]);
  432. ret_val = result_get(result);
  433. }
  434. exit:
  435. cc310_backend_mutex_unlock();
  436. return ret_val;
  437. }
  438. #if NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_CMAC) || \
  439. NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_CBC_MAC)
  440. static ret_code_t backend_cc310_mac_finalize(void * const p_context,
  441. uint8_t * p_data_in,
  442. size_t data_size,
  443. uint8_t * p_data_out,
  444. size_t * p_data_out_size)
  445. {
  446. SaSiError_t result;
  447. ret_code_t ret_val;
  448. bool mutex_locked;
  449. size_t size;
  450. size_t offset = 0;
  451. nrf_crypto_backend_cc310_aes_context_t * p_ctx =
  452. (nrf_crypto_backend_cc310_aes_context_t *)p_context;
  453. mutex_locked = cc310_backend_mutex_trylock();
  454. VERIFY_TRUE(mutex_locked, NRF_ERROR_CRYPTO_BUSY);
  455. if (*p_data_out_size < NRF_CRYPTO_AES_BLOCK_SIZE)
  456. {
  457. ret_val = NRF_ERROR_CRYPTO_OUTPUT_LENGTH;
  458. goto exit;
  459. }
  460. if (!nrfx_is_in_ram(p_data_in) || !nrfx_is_in_ram(p_data_out))
  461. {
  462. ret_val = NRF_ERROR_CRYPTO_INPUT_LOCATION;
  463. goto exit;
  464. }
  465. /* This function does not support padding for CBC-MAC */
  466. if (((data_size & 0xF) != 0) &&
  467. (NRF_CRYPTO_AES_MODE_CBC_MAC == p_ctx->any.header.p_info->mode))
  468. {
  469. ret_val = NRF_ERROR_CRYPTO_INPUT_LENGTH;
  470. goto exit;
  471. }
  472. /* CC310 allows only 64kB blocks, operation must be devided */
  473. while (data_size > CC310_MAX_LENGTH_DMA_AES_OPERATIONS)
  474. {
  475. size = CC310_MAX_LENGTH_DMA_AES_OPERATIONS;
  476. data_size -= CC310_MAX_LENGTH_DMA_AES_OPERATIONS;
  477. result = SaSi_AesBlock(&p_ctx->any.context,
  478. p_data_in + offset,
  479. size,
  480. p_data_out);
  481. offset += size;
  482. ret_val = result_get(result);
  483. if (ret_val != NRF_SUCCESS)
  484. {
  485. goto exit;
  486. }
  487. }
  488. result = SaSi_AesFinish(&p_ctx->any.context,
  489. data_size,
  490. p_data_in + offset,
  491. data_size,
  492. p_data_out,
  493. p_data_out_size);
  494. ret_val = result_get(result);
  495. if (ret_val == NRF_SUCCESS)
  496. {
  497. /* update information about size of encrypted data */
  498. *p_data_out_size = NRF_CRYPTO_AES_BLOCK_SIZE;
  499. }
  500. /* Store IV value in case it will be needed after finalize operation */
  501. if (p_ctx->any.header.p_info->mode == NRF_CRYPTO_AES_MODE_CBC_MAC_PAD_PCKS7)
  502. {
  503. result = SaSi_AesGetIv(&p_ctx->any.context, &p_ctx->any.backend.iv[0]);
  504. ret_val = result_get(result);
  505. }
  506. exit:
  507. cc310_backend_mutex_unlock();
  508. return ret_val;
  509. }
  510. #endif
  511. #if NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_CBC_MAC)
  512. static ret_code_t backend_cc310_cbc_mac_padding_finalize(void * const p_context,
  513. uint8_t * p_data_in,
  514. size_t data_size,
  515. uint8_t * p_data_out,
  516. size_t * p_data_out_size)
  517. {
  518. ret_code_t ret_val;
  519. uint8_t padding_buffer[NRF_CRYPTO_AES_BLOCK_SIZE] = {0};
  520. uint8_t msg_ending = (uint8_t)(data_size & (size_t)0x0F);
  521. if (*p_data_out_size < NRF_CRYPTO_AES_BLOCK_SIZE)
  522. {
  523. /* output buffer too small */
  524. return NRF_ERROR_CRYPTO_OUTPUT_LENGTH;
  525. }
  526. data_size -= msg_ending;
  527. if (data_size > 0)
  528. {
  529. ret_val = backend_cc310_update(p_context,
  530. p_data_in,
  531. data_size,
  532. p_data_out);
  533. VERIFY_SUCCESS(ret_val);
  534. }
  535. ret_val = padding_pkcs7_add(&padding_buffer[0],
  536. p_data_in + data_size,
  537. msg_ending);
  538. VERIFY_SUCCESS(ret_val);
  539. ret_val = backend_cc310_mac_finalize(p_context,
  540. &padding_buffer[0],
  541. NRF_CRYPTO_AES_BLOCK_SIZE,
  542. p_data_out,
  543. p_data_out_size);
  544. VERIFY_SUCCESS(ret_val);
  545. return ret_val;
  546. }
  547. #endif
  548. #if NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_CBC) || \
  549. NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_ECB)
  550. static ret_code_t backend_cc310_padding_finalize(void * const p_context,
  551. uint8_t * p_data_in,
  552. size_t data_size,
  553. uint8_t * p_data_out,
  554. size_t * p_data_out_size)
  555. {
  556. SaSiError_t result;
  557. ret_code_t ret_val;
  558. uint8_t padding_buffer[NRF_CRYPTO_AES_BLOCK_SIZE] = {0};
  559. uint8_t msg_ending = (uint8_t)(data_size & (size_t)0x0F);
  560. size_t buff_out_size;
  561. nrf_crypto_backend_cc310_aes_context_t * p_ctx =
  562. (nrf_crypto_backend_cc310_aes_context_t *)p_context;
  563. if (p_ctx->any.backend.operation == NRF_CRYPTO_DECRYPT)
  564. {
  565. ret_val = backend_cc310_finalize(p_context,
  566. p_data_in,
  567. data_size,
  568. p_data_out,
  569. p_data_out_size);
  570. VERIFY_SUCCESS(ret_val);
  571. ret_val = padding_pkcs7_remove(p_data_out,
  572. p_data_out_size);
  573. return ret_val;
  574. }
  575. /* -------------- ENCRYPTION --------------*/
  576. data_size -= msg_ending;
  577. if (*p_data_out_size < (data_size + NRF_CRYPTO_AES_BLOCK_SIZE))
  578. {
  579. /* no space for padding */
  580. return NRF_ERROR_CRYPTO_OUTPUT_LENGTH;
  581. }
  582. if (data_size > 0)
  583. {
  584. ret_val = backend_cc310_update(p_context,
  585. p_data_in,
  586. data_size,
  587. p_data_out);
  588. VERIFY_SUCCESS(ret_val);
  589. }
  590. ret_val = padding_pkcs7_add(&padding_buffer[0],
  591. p_data_in + data_size,
  592. msg_ending);
  593. VERIFY_SUCCESS(ret_val);
  594. buff_out_size = *p_data_out_size - data_size;
  595. ret_val = backend_cc310_finalize(p_context,
  596. &padding_buffer[0],
  597. NRF_CRYPTO_AES_BLOCK_SIZE,
  598. p_data_out + data_size,
  599. &buff_out_size);
  600. VERIFY_SUCCESS(ret_val);
  601. *p_data_out_size = buff_out_size + data_size;
  602. /* Store IV value in case it will be needed after finalize operation */
  603. if (p_ctx->any.header.p_info->mode == NRF_CRYPTO_AES_MODE_CBC_PAD_PCKS7)
  604. {
  605. result = SaSi_AesGetIv(&p_ctx->any.context, &p_ctx->any.backend.iv[0]);
  606. ret_val = result_get(result);
  607. }
  608. return ret_val;
  609. }
  610. #endif
  611. #if NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_CBC)
  612. nrf_crypto_aes_info_t const g_nrf_crypto_aes_cbc_128_info =
  613. {
  614. .mode = NRF_CRYPTO_AES_MODE_CBC,
  615. .key_size = NRF_CRYPTO_KEY_SIZE_128,
  616. .context_size = sizeof(nrf_crypto_backend_aes_cbc_context_t),
  617. .init_fn = backend_cc310_init,
  618. .uninit_fn = backend_cc310_uninit,
  619. .key_set_fn = backend_cc310_key_set,
  620. .iv_set_fn = backend_cc310_iv_set,
  621. .iv_get_fn = backend_cc310_iv_get,
  622. .update_fn = backend_cc310_update,
  623. .finalize_fn = backend_cc310_finalize
  624. };
  625. nrf_crypto_aes_info_t const g_nrf_crypto_aes_cbc_128_pad_pkcs7_info =
  626. {
  627. .mode = NRF_CRYPTO_AES_MODE_CBC_PAD_PCKS7,
  628. .key_size = NRF_CRYPTO_KEY_SIZE_128,
  629. .context_size = sizeof(nrf_crypto_backend_aes_cbc_context_t),
  630. .init_fn = backend_cc310_init,
  631. .uninit_fn = backend_cc310_uninit,
  632. .key_set_fn = backend_cc310_key_set,
  633. .iv_set_fn = backend_cc310_iv_set,
  634. .iv_get_fn = backend_cc310_iv_get,
  635. .update_fn = backend_cc310_update,
  636. .finalize_fn = backend_cc310_padding_finalize
  637. };
  638. #endif
  639. #if NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_CTR)
  640. nrf_crypto_aes_info_t const g_nrf_crypto_aes_ctr_128_info =
  641. {
  642. .mode = NRF_CRYPTO_AES_MODE_CTR,
  643. .key_size = NRF_CRYPTO_KEY_SIZE_128,
  644. .context_size = sizeof(nrf_crypto_backend_aes_ctr_context_t),
  645. .init_fn = backend_cc310_init,
  646. .uninit_fn = backend_cc310_uninit,
  647. .key_set_fn = backend_cc310_key_set,
  648. .iv_set_fn = backend_cc310_iv_set,
  649. .iv_get_fn = backend_cc310_iv_get,
  650. .update_fn = backend_cc310_update,
  651. .finalize_fn = backend_cc310_finalize
  652. };
  653. #endif
  654. #if NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_ECB)
  655. nrf_crypto_aes_info_t const g_nrf_crypto_aes_ecb_128_info =
  656. {
  657. .mode = NRF_CRYPTO_AES_MODE_ECB,
  658. .key_size = NRF_CRYPTO_KEY_SIZE_128,
  659. .context_size = sizeof(nrf_crypto_backend_aes_ecb_context_t),
  660. .init_fn = backend_cc310_init,
  661. .uninit_fn = backend_cc310_uninit,
  662. .key_set_fn = backend_cc310_key_set,
  663. .iv_set_fn = NULL,
  664. .iv_get_fn = NULL,
  665. .update_fn = backend_cc310_update,
  666. .finalize_fn = backend_cc310_finalize
  667. };
  668. nrf_crypto_aes_info_t const g_nrf_crypto_aes_ecb_128_pad_pkcs7_info =
  669. {
  670. .mode = NRF_CRYPTO_AES_MODE_ECB_PAD_PCKS7,
  671. .key_size = NRF_CRYPTO_KEY_SIZE_128,
  672. .context_size = sizeof(nrf_crypto_backend_aes_ecb_context_t),
  673. .init_fn = backend_cc310_init,
  674. .uninit_fn = backend_cc310_uninit,
  675. .key_set_fn = backend_cc310_key_set,
  676. .iv_set_fn = NULL,
  677. .iv_get_fn = NULL,
  678. .update_fn = backend_cc310_update,
  679. .finalize_fn = backend_cc310_padding_finalize
  680. };
  681. #endif
  682. #if NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_CBC_MAC)
  683. nrf_crypto_aes_info_t const g_nrf_crypto_aes_cbc_mac_128_info =
  684. {
  685. .mode = NRF_CRYPTO_AES_MODE_CBC_MAC,
  686. .key_size = NRF_CRYPTO_KEY_SIZE_128,
  687. .context_size = sizeof(nrf_crypto_backend_aes_cbc_mac_context_t),
  688. .init_fn = backend_cc310_init,
  689. .uninit_fn = backend_cc310_uninit,
  690. .key_set_fn = backend_cc310_key_set,
  691. .iv_set_fn = backend_cc310_iv_set,
  692. .iv_get_fn = backend_cc310_iv_get,
  693. .update_fn = backend_cc310_update,
  694. .finalize_fn = backend_cc310_mac_finalize
  695. };
  696. nrf_crypto_aes_info_t const g_nrf_crypto_aes_cbc_mac_128_pad_pkcs7_info =
  697. {
  698. .mode = NRF_CRYPTO_AES_MODE_CBC_MAC_PAD_PCKS7,
  699. .key_size = NRF_CRYPTO_KEY_SIZE_128,
  700. .context_size = sizeof(nrf_crypto_backend_aes_cbc_mac_context_t),
  701. .init_fn = backend_cc310_init,
  702. .uninit_fn = backend_cc310_uninit,
  703. .key_set_fn = backend_cc310_key_set,
  704. .iv_set_fn = backend_cc310_iv_set,
  705. .iv_get_fn = backend_cc310_iv_get,
  706. .update_fn = backend_cc310_update,
  707. .finalize_fn = backend_cc310_cbc_mac_padding_finalize
  708. };
  709. #endif
  710. #if NRF_MODULE_ENABLED(NRF_CRYPTO_BACKEND_CC310_AES_CMAC)
  711. nrf_crypto_aes_info_t const g_nrf_crypto_aes_cmac_128_info =
  712. {
  713. .mode = NRF_CRYPTO_AES_MODE_CMAC,
  714. .key_size = NRF_CRYPTO_KEY_SIZE_128,
  715. .context_size = sizeof(nrf_crypto_backend_aes_cmac_context_t),
  716. .init_fn = backend_cc310_init,
  717. .uninit_fn = backend_cc310_uninit,
  718. .key_set_fn = backend_cc310_key_set,
  719. .iv_set_fn = NULL,
  720. .iv_get_fn = NULL,
  721. .update_fn = backend_cc310_update,
  722. .finalize_fn = backend_cc310_mac_finalize
  723. };
  724. #endif
  725. #endif // NRF_MODULE_ENABLED(NRF_CRYPTO_AES_BACKEND_CC310)
  726. #endif // NRF_MODULE_ENABLED(NRF_CRYPTO)