/src/wolfssl-sp-math/wolfcrypt/src/aes.c
Line | Count | Source (jump to first uncovered line) |
1 | | /* aes.c |
2 | | * |
3 | | * Copyright (C) 2006-2022 wolfSSL Inc. |
4 | | * |
5 | | * This file is part of wolfSSL. |
6 | | * |
7 | | * wolfSSL is free software; you can redistribute it and/or modify |
8 | | * it under the terms of the GNU General Public License as published by |
9 | | * the Free Software Foundation; either version 2 of the License, or |
10 | | * (at your option) any later version. |
11 | | * |
12 | | * wolfSSL is distributed in the hope that it will be useful, |
13 | | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
14 | | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
15 | | * GNU General Public License for more details. |
16 | | * |
17 | | * You should have received a copy of the GNU General Public License |
18 | | * along with this program; if not, write to the Free Software |
19 | | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA |
20 | | */ |
21 | | |
22 | | /* |
23 | | |
24 | | DESCRIPTION |
25 | | This library provides the interfaces to the Advanced Encryption Standard (AES) |
26 | | for encrypting and decrypting data. AES is the standard known for a symmetric |
27 | | block cipher mechanism that uses n-bit binary string parameter key with 128-bits, |
28 | | 192-bits, and 256-bits of key sizes. |
29 | | |
30 | | */ |
31 | | #ifdef HAVE_CONFIG_H |
32 | | #include <config.h> |
33 | | #endif |
34 | | |
35 | | #include <wolfssl/wolfcrypt/settings.h> |
36 | | #include <wolfssl/wolfcrypt/error-crypt.h> |
37 | | |
38 | | #if !defined(NO_AES) |
39 | | |
40 | | /* Tip: Locate the software cipher modes by searching for "Software AES" */ |
41 | | |
42 | | #if defined(HAVE_FIPS) && \ |
43 | | defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION >= 2) |
44 | | |
45 | | /* set NO_WRAPPERS before headers, use direct internal f()s not wrappers */ |
46 | | #define FIPS_NO_WRAPPERS |
47 | | |
48 | | #ifdef USE_WINDOWS_API |
49 | | #pragma code_seg(".fipsA$g") |
50 | | #pragma const_seg(".fipsB$g") |
51 | | #endif |
52 | | #endif |
53 | | |
54 | | #include <wolfssl/wolfcrypt/aes.h> |
55 | | |
56 | | #ifdef WOLFSSL_AESNI |
57 | | #include <wmmintrin.h> |
58 | | #include <emmintrin.h> |
59 | | #include <smmintrin.h> |
60 | | #endif /* WOLFSSL_AESNI */ |
61 | | |
62 | | #include <wolfssl/wolfcrypt/cpuid.h> |
63 | | |
64 | | #ifdef WOLF_CRYPTO_CB |
65 | | #include <wolfssl/wolfcrypt/cryptocb.h> |
66 | | #endif |
67 | | |
68 | | #ifdef WOLFSSL_SECO_CAAM |
69 | | #include <wolfssl/wolfcrypt/port/caam/wolfcaam.h> |
70 | | #endif |
71 | | |
72 | | #ifdef WOLFSSL_IMXRT_DCP |
73 | | #include <wolfssl/wolfcrypt/port/nxp/dcp_port.h> |
74 | | #endif |
75 | | #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT) |
76 | | #include <wolfssl/wolfcrypt/port/nxp/se050_port.h> |
77 | | #endif |
78 | | |
79 | | #ifdef WOLFSSL_AES_SIV |
80 | | #include <wolfssl/wolfcrypt/cmac.h> |
81 | | #endif |
82 | | |
83 | | #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES) |
84 | | #include <wolfssl/wolfcrypt/port/psa/psa.h> |
85 | | #endif |
86 | | |
87 | | /* fips wrapper calls, user can call direct */ |
88 | | #if defined(HAVE_FIPS) && \ |
89 | | (!defined(HAVE_FIPS_VERSION) || (HAVE_FIPS_VERSION < 2)) |
90 | | |
91 | | int wc_AesSetKey(Aes* aes, const byte* key, word32 len, const byte* iv, |
92 | | int dir) |
93 | | { |
94 | | if (aes == NULL || !( (len == 16) || (len == 24) || (len == 32)) ) { |
95 | | return BAD_FUNC_ARG; |
96 | | } |
97 | | |
98 | | return AesSetKey_fips(aes, key, len, iv, dir); |
99 | | } |
100 | | int wc_AesSetIV(Aes* aes, const byte* iv) |
101 | | { |
102 | | if (aes == NULL) { |
103 | | return BAD_FUNC_ARG; |
104 | | } |
105 | | |
106 | | return AesSetIV_fips(aes, iv); |
107 | | } |
108 | | #ifdef HAVE_AES_CBC |
109 | | int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
110 | | { |
111 | | if (aes == NULL || out == NULL || in == NULL) { |
112 | | return BAD_FUNC_ARG; |
113 | | } |
114 | | |
115 | | return AesCbcEncrypt_fips(aes, out, in, sz); |
116 | | } |
117 | | #ifdef HAVE_AES_DECRYPT |
118 | | int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
119 | | { |
120 | | if (aes == NULL || out == NULL || in == NULL |
121 | | || sz % AES_BLOCK_SIZE != 0) { |
122 | | return BAD_FUNC_ARG; |
123 | | } |
124 | | |
125 | | return AesCbcDecrypt_fips(aes, out, in, sz); |
126 | | } |
127 | | #endif /* HAVE_AES_DECRYPT */ |
128 | | #endif /* HAVE_AES_CBC */ |
129 | | |
130 | | /* AES-CTR */ |
131 | | #ifdef WOLFSSL_AES_COUNTER |
132 | | int wc_AesCtrEncrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
133 | | { |
134 | | if (aes == NULL || out == NULL || in == NULL) { |
135 | | return BAD_FUNC_ARG; |
136 | | } |
137 | | |
138 | | return AesCtrEncrypt(aes, out, in, sz); |
139 | | } |
140 | | #endif |
141 | | |
142 | | /* AES-DIRECT */ |
143 | | #if defined(WOLFSSL_AES_DIRECT) |
144 | | void wc_AesEncryptDirect(Aes* aes, byte* out, const byte* in) |
145 | | { |
146 | | AesEncryptDirect(aes, out, in); |
147 | | } |
148 | | |
149 | | #ifdef HAVE_AES_DECRYPT |
150 | | void wc_AesDecryptDirect(Aes* aes, byte* out, const byte* in) |
151 | | { |
152 | | AesDecryptDirect(aes, out, in); |
153 | | } |
154 | | #endif /* HAVE_AES_DECRYPT */ |
155 | | |
156 | | int wc_AesSetKeyDirect(Aes* aes, const byte* key, word32 len, |
157 | | const byte* iv, int dir) |
158 | | { |
159 | | return AesSetKeyDirect(aes, key, len, iv, dir); |
160 | | } |
161 | | #endif /* WOLFSSL_AES_DIRECT */ |
162 | | |
163 | | /* AES-GCM */ |
164 | | #ifdef HAVE_AESGCM |
165 | | int wc_AesGcmSetKey(Aes* aes, const byte* key, word32 len) |
166 | | { |
167 | | if (aes == NULL || !( (len == 16) || (len == 24) || (len == 32)) ) { |
168 | | return BAD_FUNC_ARG; |
169 | | } |
170 | | |
171 | | return AesGcmSetKey_fips(aes, key, len); |
172 | | } |
173 | | int wc_AesGcmEncrypt(Aes* aes, byte* out, const byte* in, word32 sz, |
174 | | const byte* iv, word32 ivSz, |
175 | | byte* authTag, word32 authTagSz, |
176 | | const byte* authIn, word32 authInSz) |
177 | | { |
178 | | if (aes == NULL || authTagSz > AES_BLOCK_SIZE || |
179 | | authTagSz < WOLFSSL_MIN_AUTH_TAG_SZ || |
180 | | ivSz == 0 || ivSz > AES_BLOCK_SIZE) { |
181 | | return BAD_FUNC_ARG; |
182 | | } |
183 | | |
184 | | return AesGcmEncrypt_fips(aes, out, in, sz, iv, ivSz, authTag, |
185 | | authTagSz, authIn, authInSz); |
186 | | } |
187 | | |
188 | | #ifdef HAVE_AES_DECRYPT |
189 | | int wc_AesGcmDecrypt(Aes* aes, byte* out, const byte* in, word32 sz, |
190 | | const byte* iv, word32 ivSz, |
191 | | const byte* authTag, word32 authTagSz, |
192 | | const byte* authIn, word32 authInSz) |
193 | | { |
194 | | if (aes == NULL || out == NULL || in == NULL || iv == NULL |
195 | | || authTag == NULL || authTagSz > AES_BLOCK_SIZE || |
196 | | ivSz == 0 || ivSz > AES_BLOCK_SIZE) { |
197 | | return BAD_FUNC_ARG; |
198 | | } |
199 | | |
200 | | return AesGcmDecrypt_fips(aes, out, in, sz, iv, ivSz, authTag, |
201 | | authTagSz, authIn, authInSz); |
202 | | } |
203 | | #endif /* HAVE_AES_DECRYPT */ |
204 | | |
205 | | int wc_GmacSetKey(Gmac* gmac, const byte* key, word32 len) |
206 | | { |
207 | | if (gmac == NULL || key == NULL || !((len == 16) || |
208 | | (len == 24) || (len == 32)) ) { |
209 | | return BAD_FUNC_ARG; |
210 | | } |
211 | | |
212 | | return GmacSetKey(gmac, key, len); |
213 | | } |
214 | | int wc_GmacUpdate(Gmac* gmac, const byte* iv, word32 ivSz, |
215 | | const byte* authIn, word32 authInSz, |
216 | | byte* authTag, word32 authTagSz) |
217 | | { |
218 | | if (gmac == NULL || authTagSz > AES_BLOCK_SIZE || |
219 | | authTagSz < WOLFSSL_MIN_AUTH_TAG_SZ) { |
220 | | return BAD_FUNC_ARG; |
221 | | } |
222 | | |
223 | | return GmacUpdate(gmac, iv, ivSz, authIn, authInSz, |
224 | | authTag, authTagSz); |
225 | | } |
226 | | #endif /* HAVE_AESGCM */ |
227 | | |
228 | | /* AES-CCM */ |
229 | | #if defined(HAVE_AESCCM) && \ |
230 | | defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION >= 2) |
231 | | int wc_AesCcmSetKey(Aes* aes, const byte* key, word32 keySz) |
232 | | { |
233 | | return AesCcmSetKey(aes, key, keySz); |
234 | | } |
235 | | int wc_AesCcmEncrypt(Aes* aes, byte* out, const byte* in, word32 inSz, |
236 | | const byte* nonce, word32 nonceSz, |
237 | | byte* authTag, word32 authTagSz, |
238 | | const byte* authIn, word32 authInSz) |
239 | | { |
240 | | /* sanity check on arguments */ |
241 | | if (aes == NULL || out == NULL || in == NULL || nonce == NULL |
242 | | || authTag == NULL || nonceSz < 7 || nonceSz > 13) |
243 | | return BAD_FUNC_ARG; |
244 | | |
245 | | AesCcmEncrypt(aes, out, in, inSz, nonce, nonceSz, authTag, |
246 | | authTagSz, authIn, authInSz); |
247 | | return 0; |
248 | | } |
249 | | |
250 | | #ifdef HAVE_AES_DECRYPT |
251 | | int wc_AesCcmDecrypt(Aes* aes, byte* out, |
252 | | const byte* in, word32 inSz, |
253 | | const byte* nonce, word32 nonceSz, |
254 | | const byte* authTag, word32 authTagSz, |
255 | | const byte* authIn, word32 authInSz) |
256 | | { |
257 | | |
258 | | if (aes == NULL || out == NULL || in == NULL || nonce == NULL |
259 | | || authTag == NULL || nonceSz < 7 || nonceSz > 13) { |
260 | | return BAD_FUNC_ARG; |
261 | | } |
262 | | |
263 | | return AesCcmDecrypt(aes, out, in, inSz, nonce, nonceSz, |
264 | | authTag, authTagSz, authIn, authInSz); |
265 | | } |
266 | | #endif /* HAVE_AES_DECRYPT */ |
267 | | #endif /* HAVE_AESCCM && HAVE_FIPS_VERSION 2 */ |
268 | | |
269 | | int wc_AesInit(Aes* aes, void* h, int i) |
270 | | { |
271 | | if (aes == NULL) |
272 | | return BAD_FUNC_ARG; |
273 | | |
274 | | (void)h; |
275 | | (void)i; |
276 | | |
277 | | /* FIPS doesn't support */ |
278 | | #ifdef WOLFSSL_KCAPI_AES |
279 | | return AesInit(aes, h, i); |
280 | | #else |
281 | | return 0; |
282 | | #endif |
283 | | } |
284 | | void wc_AesFree(Aes* aes) |
285 | | { |
286 | | (void)aes; |
287 | | /* FIPS doesn't support */ |
288 | | #ifdef WOLFSSL_KCAPI_AES |
289 | | AesFree(aes); |
290 | | #endif |
291 | | } |
292 | | |
293 | | #else /* else build without fips, or for FIPS v2+ */ |
294 | | |
295 | | |
296 | | #if defined(WOLFSSL_TI_CRYPT) |
297 | | #include <wolfcrypt/src/port/ti/ti-aes.c> |
298 | | #else |
299 | | |
300 | | #include <wolfssl/wolfcrypt/logging.h> |
301 | | |
302 | | #ifdef NO_INLINE |
303 | | #include <wolfssl/wolfcrypt/misc.h> |
304 | | #else |
305 | | #define WOLFSSL_MISC_INCLUDED |
306 | | #include <wolfcrypt/src/misc.c> |
307 | | #endif |
308 | | |
309 | | #if !defined(WOLFSSL_ARMASM) |
310 | | |
311 | | #ifdef WOLFSSL_IMX6_CAAM_BLOB |
312 | | /* case of possibly not using hardware acceleration for AES but using key |
313 | | blobs */ |
314 | | #include <wolfssl/wolfcrypt/port/caam/wolfcaam.h> |
315 | | #endif |
316 | | |
317 | | #ifdef DEBUG_AESNI |
318 | | #include <stdio.h> |
319 | | #endif |
320 | | |
321 | | #ifdef _MSC_VER |
322 | | /* 4127 warning constant while(1) */ |
323 | | #pragma warning(disable: 4127) |
324 | | #endif |
325 | | |
326 | | |
327 | | /* Define AES implementation includes and functions */ |
328 | | #if defined(STM32_CRYPTO) |
329 | | /* STM32F2/F4/F7/L4/L5/H7/WB55 hardware AES support for ECB, CBC, CTR and GCM modes */ |
330 | | |
331 | | #if defined(WOLFSSL_AES_DIRECT) || defined(HAVE_AESGCM) || defined(HAVE_AESCCM) |
332 | | |
333 | | static WARN_UNUSED_RESULT int wc_AesEncrypt( |
334 | | Aes* aes, const byte* inBlock, byte* outBlock) |
335 | | { |
336 | | int ret = 0; |
337 | | #ifdef WOLFSSL_STM32_CUBEMX |
338 | | CRYP_HandleTypeDef hcryp; |
339 | | #else |
340 | | CRYP_InitTypeDef cryptInit; |
341 | | CRYP_KeyInitTypeDef keyInit; |
342 | | #endif |
343 | | |
344 | | #ifdef WOLFSSL_STM32_CUBEMX |
345 | | ret = wc_Stm32_Aes_Init(aes, &hcryp); |
346 | | if (ret != 0) |
347 | | return ret; |
348 | | |
349 | | ret = wolfSSL_CryptHwMutexLock(); |
350 | | if (ret != 0) |
351 | | return ret; |
352 | | |
353 | | #if defined(STM32_HAL_V2) |
354 | | hcryp.Init.Algorithm = CRYP_AES_ECB; |
355 | | #elif defined(STM32_CRYPTO_AES_ONLY) |
356 | | hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT; |
357 | | hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_ECB; |
358 | | hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE; |
359 | | #endif |
360 | | HAL_CRYP_Init(&hcryp); |
361 | | |
362 | | #if defined(STM32_HAL_V2) |
363 | | ret = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)inBlock, AES_BLOCK_SIZE, |
364 | | (uint32_t*)outBlock, STM32_HAL_TIMEOUT); |
365 | | #elif defined(STM32_CRYPTO_AES_ONLY) |
366 | | ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE, |
367 | | outBlock, STM32_HAL_TIMEOUT); |
368 | | #else |
369 | | ret = HAL_CRYP_AESECB_Encrypt(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE, |
370 | | outBlock, STM32_HAL_TIMEOUT); |
371 | | #endif |
372 | | if (ret != HAL_OK) { |
373 | | ret = WC_TIMEOUT_E; |
374 | | } |
375 | | HAL_CRYP_DeInit(&hcryp); |
376 | | |
377 | | #else /* Standard Peripheral Library */ |
378 | | ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit); |
379 | | if (ret != 0) |
380 | | return ret; |
381 | | |
382 | | ret = wolfSSL_CryptHwMutexLock(); |
383 | | if (ret != 0) |
384 | | return ret; |
385 | | |
386 | | /* reset registers to their default values */ |
387 | | CRYP_DeInit(); |
388 | | |
389 | | /* setup key */ |
390 | | CRYP_KeyInit(&keyInit); |
391 | | |
392 | | /* set direction and mode */ |
393 | | cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Encrypt; |
394 | | cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_ECB; |
395 | | CRYP_Init(&cryptInit); |
396 | | |
397 | | /* enable crypto processor */ |
398 | | CRYP_Cmd(ENABLE); |
399 | | |
400 | | /* flush IN/OUT FIFOs */ |
401 | | CRYP_FIFOFlush(); |
402 | | |
403 | | CRYP_DataIn(*(uint32_t*)&inBlock[0]); |
404 | | CRYP_DataIn(*(uint32_t*)&inBlock[4]); |
405 | | CRYP_DataIn(*(uint32_t*)&inBlock[8]); |
406 | | CRYP_DataIn(*(uint32_t*)&inBlock[12]); |
407 | | |
408 | | /* wait until the complete message has been processed */ |
409 | | while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {} |
410 | | |
411 | | *(uint32_t*)&outBlock[0] = CRYP_DataOut(); |
412 | | *(uint32_t*)&outBlock[4] = CRYP_DataOut(); |
413 | | *(uint32_t*)&outBlock[8] = CRYP_DataOut(); |
414 | | *(uint32_t*)&outBlock[12] = CRYP_DataOut(); |
415 | | |
416 | | /* disable crypto processor */ |
417 | | CRYP_Cmd(DISABLE); |
418 | | #endif /* WOLFSSL_STM32_CUBEMX */ |
419 | | wolfSSL_CryptHwMutexUnLock(); |
420 | | |
421 | | return ret; |
422 | | } |
423 | | #endif /* WOLFSSL_AES_DIRECT || HAVE_AESGCM || HAVE_AESCCM */ |
424 | | |
425 | | #ifdef HAVE_AES_DECRYPT |
426 | | #if defined(WOLFSSL_AES_DIRECT) || defined(HAVE_AESCCM) |
427 | | static WARN_UNUSED_RESULT int wc_AesDecrypt( |
428 | | Aes* aes, const byte* inBlock, byte* outBlock) |
429 | | { |
430 | | int ret = 0; |
431 | | #ifdef WOLFSSL_STM32_CUBEMX |
432 | | CRYP_HandleTypeDef hcryp; |
433 | | #else |
434 | | CRYP_InitTypeDef cryptInit; |
435 | | CRYP_KeyInitTypeDef keyInit; |
436 | | #endif |
437 | | |
438 | | #ifdef WOLFSSL_STM32_CUBEMX |
439 | | ret = wc_Stm32_Aes_Init(aes, &hcryp); |
440 | | if (ret != 0) |
441 | | return ret; |
442 | | |
443 | | ret = wolfSSL_CryptHwMutexLock(); |
444 | | if (ret != 0) |
445 | | return ret; |
446 | | |
447 | | #if defined(STM32_HAL_V2) |
448 | | hcryp.Init.Algorithm = CRYP_AES_ECB; |
449 | | #elif defined(STM32_CRYPTO_AES_ONLY) |
450 | | hcryp.Init.OperatingMode = CRYP_ALGOMODE_KEYDERIVATION_DECRYPT; |
451 | | hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_ECB; |
452 | | hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE; |
453 | | #endif |
454 | | HAL_CRYP_Init(&hcryp); |
455 | | |
456 | | #if defined(STM32_HAL_V2) |
457 | | ret = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)inBlock, AES_BLOCK_SIZE, |
458 | | (uint32_t*)outBlock, STM32_HAL_TIMEOUT); |
459 | | #elif defined(STM32_CRYPTO_AES_ONLY) |
460 | | ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE, |
461 | | outBlock, STM32_HAL_TIMEOUT); |
462 | | #else |
463 | | ret = HAL_CRYP_AESECB_Decrypt(&hcryp, (uint8_t*)inBlock, AES_BLOCK_SIZE, |
464 | | outBlock, STM32_HAL_TIMEOUT); |
465 | | #endif |
466 | | if (ret != HAL_OK) { |
467 | | ret = WC_TIMEOUT_E; |
468 | | } |
469 | | HAL_CRYP_DeInit(&hcryp); |
470 | | |
471 | | #else /* Standard Peripheral Library */ |
472 | | ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit); |
473 | | if (ret != 0) |
474 | | return ret; |
475 | | |
476 | | ret = wolfSSL_CryptHwMutexLock(); |
477 | | if (ret != 0) |
478 | | return ret; |
479 | | |
480 | | /* reset registers to their default values */ |
481 | | CRYP_DeInit(); |
482 | | |
483 | | /* set direction and key */ |
484 | | CRYP_KeyInit(&keyInit); |
485 | | cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt; |
486 | | cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_Key; |
487 | | CRYP_Init(&cryptInit); |
488 | | |
489 | | /* enable crypto processor */ |
490 | | CRYP_Cmd(ENABLE); |
491 | | |
492 | | /* wait until decrypt key has been initialized */ |
493 | | while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {} |
494 | | |
495 | | /* set direction and mode */ |
496 | | cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt; |
497 | | cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_ECB; |
498 | | CRYP_Init(&cryptInit); |
499 | | |
500 | | /* enable crypto processor */ |
501 | | CRYP_Cmd(ENABLE); |
502 | | |
503 | | /* flush IN/OUT FIFOs */ |
504 | | CRYP_FIFOFlush(); |
505 | | |
506 | | CRYP_DataIn(*(uint32_t*)&inBlock[0]); |
507 | | CRYP_DataIn(*(uint32_t*)&inBlock[4]); |
508 | | CRYP_DataIn(*(uint32_t*)&inBlock[8]); |
509 | | CRYP_DataIn(*(uint32_t*)&inBlock[12]); |
510 | | |
511 | | /* wait until the complete message has been processed */ |
512 | | while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {} |
513 | | |
514 | | *(uint32_t*)&outBlock[0] = CRYP_DataOut(); |
515 | | *(uint32_t*)&outBlock[4] = CRYP_DataOut(); |
516 | | *(uint32_t*)&outBlock[8] = CRYP_DataOut(); |
517 | | *(uint32_t*)&outBlock[12] = CRYP_DataOut(); |
518 | | |
519 | | /* disable crypto processor */ |
520 | | CRYP_Cmd(DISABLE); |
521 | | #endif /* WOLFSSL_STM32_CUBEMX */ |
522 | | wolfSSL_CryptHwMutexUnLock(); |
523 | | |
524 | | return ret; |
525 | | } |
526 | | #endif /* WOLFSSL_AES_DIRECT || HAVE_AESCCM */ |
527 | | #endif /* HAVE_AES_DECRYPT */ |
528 | | |
529 | | #elif defined(HAVE_COLDFIRE_SEC) |
530 | | /* Freescale Coldfire SEC support for CBC mode. |
531 | | * NOTE: no support for AES-CTR/GCM/CCM/Direct */ |
532 | | #include <wolfssl/wolfcrypt/types.h> |
533 | | #include "sec.h" |
534 | | #include "mcf5475_sec.h" |
535 | | #include "mcf5475_siu.h" |
536 | | #elif defined(FREESCALE_LTC) |
537 | | #include "fsl_ltc.h" |
538 | | #if defined(FREESCALE_LTC_AES_GCM) |
539 | | #undef NEED_AES_TABLES |
540 | | #undef GCM_TABLE |
541 | | #endif |
542 | | |
543 | | /* if LTC doesn't have GCM, use software with LTC AES ECB mode */ |
544 | | static WARN_UNUSED_RESULT int wc_AesEncrypt( |
545 | | Aes* aes, const byte* inBlock, byte* outBlock) |
546 | | { |
547 | | word32 keySize = 0; |
548 | | byte* key = (byte*)aes->key; |
549 | | int ret = wc_AesGetKeySize(aes, &keySize); |
550 | | if (ret != 0) |
551 | | return ret; |
552 | | |
553 | | if (wolfSSL_CryptHwMutexLock() == 0) { |
554 | | LTC_AES_EncryptEcb(LTC_BASE, inBlock, outBlock, AES_BLOCK_SIZE, |
555 | | key, keySize); |
556 | | wolfSSL_CryptHwMutexUnLock(); |
557 | | } |
558 | | return 0; |
559 | | } |
560 | | #ifdef HAVE_AES_DECRYPT |
561 | | static WARN_UNUSED_RESULT int wc_AesDecrypt( |
562 | | Aes* aes, const byte* inBlock, byte* outBlock) |
563 | | { |
564 | | word32 keySize = 0; |
565 | | byte* key = (byte*)aes->key; |
566 | | int ret = wc_AesGetKeySize(aes, &keySize); |
567 | | if (ret != 0) |
568 | | return ret; |
569 | | |
570 | | if (wolfSSL_CryptHwMutexLock() == 0) { |
571 | | LTC_AES_DecryptEcb(LTC_BASE, inBlock, outBlock, AES_BLOCK_SIZE, |
572 | | key, keySize, kLTC_EncryptKey); |
573 | | wolfSSL_CryptHwMutexUnLock(); |
574 | | } |
575 | | return 0; |
576 | | } |
577 | | #endif |
578 | | |
579 | | #elif defined(FREESCALE_MMCAU) |
580 | | /* Freescale mmCAU hardware AES support for Direct, CBC, CCM, GCM modes |
581 | | * through the CAU/mmCAU library. Documentation located in |
582 | | * ColdFire/ColdFire+ CAU and Kinetis mmCAU Software Library User |
583 | | * Guide (See note in README). */ |
584 | | #ifdef FREESCALE_MMCAU_CLASSIC |
585 | | /* MMCAU 1.4 library used with non-KSDK / classic MQX builds */ |
586 | | #include "cau_api.h" |
587 | | #else |
588 | | #include "fsl_mmcau.h" |
589 | | #endif |
590 | | |
591 | | static WARN_UNUSED_RESULT int wc_AesEncrypt( |
592 | | Aes* aes, const byte* inBlock, byte* outBlock) |
593 | | { |
594 | | if (wolfSSL_CryptHwMutexLock() == 0) { |
595 | | #ifdef FREESCALE_MMCAU_CLASSIC |
596 | | if ((wc_ptr_t)outBlock % WOLFSSL_MMCAU_ALIGNMENT) { |
597 | | WOLFSSL_MSG("Bad cau_aes_encrypt alignment"); |
598 | | return BAD_ALIGN_E; |
599 | | } |
600 | | cau_aes_encrypt(inBlock, (byte*)aes->key, aes->rounds, outBlock); |
601 | | #else |
602 | | MMCAU_AES_EncryptEcb(inBlock, (byte*)aes->key, aes->rounds, |
603 | | outBlock); |
604 | | #endif |
605 | | wolfSSL_CryptHwMutexUnLock(); |
606 | | } |
607 | | return 0; |
608 | | } |
609 | | #ifdef HAVE_AES_DECRYPT |
610 | | static WARN_UNUSED_RESULT int wc_AesDecrypt( |
611 | | Aes* aes, const byte* inBlock, byte* outBlock) |
612 | | { |
613 | | if (wolfSSL_CryptHwMutexLock() == 0) { |
614 | | #ifdef FREESCALE_MMCAU_CLASSIC |
615 | | if ((wc_ptr_t)outBlock % WOLFSSL_MMCAU_ALIGNMENT) { |
616 | | WOLFSSL_MSG("Bad cau_aes_decrypt alignment"); |
617 | | return BAD_ALIGN_E; |
618 | | } |
619 | | cau_aes_decrypt(inBlock, (byte*)aes->key, aes->rounds, outBlock); |
620 | | #else |
621 | | MMCAU_AES_DecryptEcb(inBlock, (byte*)aes->key, aes->rounds, |
622 | | outBlock); |
623 | | #endif |
624 | | wolfSSL_CryptHwMutexUnLock(); |
625 | | } |
626 | | return 0; |
627 | | } |
628 | | #endif /* HAVE_AES_DECRYPT */ |
629 | | |
630 | | #elif defined(WOLFSSL_PIC32MZ_CRYPT) |
631 | | |
632 | | #include <wolfssl/wolfcrypt/port/pic32/pic32mz-crypt.h> |
633 | | |
634 | | #if defined(HAVE_AESGCM) || defined(WOLFSSL_AES_DIRECT) |
635 | | static WARN_UNUSED_RESULT int wc_AesEncrypt( |
636 | | Aes* aes, const byte* inBlock, byte* outBlock) |
637 | | { |
638 | | /* Thread mutex protection handled in Pic32Crypto */ |
639 | | return wc_Pic32AesCrypt(aes->key, aes->keylen, NULL, 0, |
640 | | outBlock, inBlock, AES_BLOCK_SIZE, |
641 | | PIC32_ENCRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_RECB); |
642 | | } |
643 | | #endif |
644 | | |
645 | | #if defined(HAVE_AES_DECRYPT) && defined(WOLFSSL_AES_DIRECT) |
646 | | static WARN_UNUSED_RESULT int wc_AesDecrypt( |
647 | | Aes* aes, const byte* inBlock, byte* outBlock) |
648 | | { |
649 | | /* Thread mutex protection handled in Pic32Crypto */ |
650 | | return wc_Pic32AesCrypt(aes->key, aes->keylen, NULL, 0, |
651 | | outBlock, inBlock, AES_BLOCK_SIZE, |
652 | | PIC32_DECRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_RECB); |
653 | | } |
654 | | #endif |
655 | | |
656 | | #elif defined(WOLFSSL_NRF51_AES) |
657 | | /* Use built-in AES hardware - AES 128 ECB Encrypt Only */ |
658 | | #include "wolfssl/wolfcrypt/port/nrf51.h" |
659 | | |
660 | | static WARN_UNUSED_RESULT int wc_AesEncrypt( |
661 | | Aes* aes, const byte* inBlock, byte* outBlock) |
662 | | { |
663 | | int ret; |
664 | | ret = wolfSSL_CryptHwMutexLock(); |
665 | | if (ret == 0) { |
666 | | ret = nrf51_aes_encrypt(inBlock, (byte*)aes->key, aes->rounds, |
667 | | outBlock); |
668 | | wolfSSL_CryptHwMutexUnLock(); |
669 | | } |
670 | | return ret; |
671 | | } |
672 | | |
673 | | #ifdef HAVE_AES_DECRYPT |
674 | | #error nRF51 AES Hardware does not support decrypt |
675 | | #endif /* HAVE_AES_DECRYPT */ |
676 | | |
677 | | #elif defined(WOLFSSL_ESP32WROOM32_CRYPT) && \ |
678 | | !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_AES) |
679 | | |
680 | | #include "wolfssl/wolfcrypt/port/Espressif/esp32-crypt.h" |
681 | | |
682 | | #if defined(HAVE_AESGCM) || defined(WOLFSSL_AES_DIRECT) |
683 | | static WARN_UNUSED_RESULT int wc_AesEncrypt( |
684 | | Aes* aes, const byte* inBlock, byte* outBlock) |
685 | | { |
686 | | /* Thread mutex protection handled in esp_aes_hw_InUse */ |
687 | | return wc_esp32AesEncrypt(aes, inBlock, outBlock); |
688 | | } |
689 | | #endif |
690 | | |
691 | | #if defined(HAVE_AES_DECRYPT) && defined(WOLFSSL_AES_DIRECT) |
692 | | static WARN_UNUSED_RESULT int wc_AesDecrypt( |
693 | | Aes* aes, const byte* inBlock, byte* outBlock) |
694 | | { |
695 | | /* Thread mutex protection handled in esp_aes_hw_InUse */ |
696 | | return wc_esp32AesDecrypt(aes, inBlock, outBlock); |
697 | | } |
698 | | #endif |
699 | | |
700 | | #elif defined(WOLFSSL_AESNI) |
701 | | |
702 | | #define NEED_AES_TABLES |
703 | | |
704 | | /* Each platform needs to query info type 1 from cpuid to see if aesni is |
705 | | * supported. Also, let's setup a macro for proper linkage w/o ABI conflicts |
706 | | */ |
707 | | |
708 | | #ifndef AESNI_ALIGN |
709 | | #define AESNI_ALIGN 16 |
710 | | #endif |
711 | | |
712 | | #ifdef _MSC_VER |
713 | | #define XASM_LINK(f) |
714 | | #elif defined(__APPLE__) |
715 | | #define XASM_LINK(f) asm("_" f) |
716 | | #else |
717 | | #define XASM_LINK(f) asm(f) |
718 | | #endif /* _MSC_VER */ |
719 | | |
720 | | static int checkAESNI = 0; |
721 | | static int haveAESNI = 0; |
722 | | static word32 intel_flags = 0; |
723 | | |
724 | | static WARN_UNUSED_RESULT int Check_CPU_support_AES(void) |
725 | | { |
726 | | intel_flags = cpuid_get_flags(); |
727 | | |
728 | | return IS_INTEL_AESNI(intel_flags) != 0; |
729 | | } |
730 | | |
731 | | |
732 | | /* tell C compiler these are asm functions in case any mix up of ABI underscore |
733 | | prefix between clang/gcc/llvm etc */ |
734 | | #ifdef HAVE_AES_CBC |
735 | | void AES_CBC_encrypt(const unsigned char* in, unsigned char* out, |
736 | | unsigned char* ivec, unsigned long length, |
737 | | const unsigned char* KS, int nr) |
738 | | XASM_LINK("AES_CBC_encrypt"); |
739 | | |
740 | | #ifdef HAVE_AES_DECRYPT |
741 | | #if defined(WOLFSSL_AESNI_BY4) |
742 | | void AES_CBC_decrypt_by4(const unsigned char* in, unsigned char* out, |
743 | | unsigned char* ivec, unsigned long length, |
744 | | const unsigned char* KS, int nr) |
745 | | XASM_LINK("AES_CBC_decrypt_by4"); |
746 | | #elif defined(WOLFSSL_AESNI_BY6) |
747 | | void AES_CBC_decrypt_by6(const unsigned char* in, unsigned char* out, |
748 | | unsigned char* ivec, unsigned long length, |
749 | | const unsigned char* KS, int nr) |
750 | | XASM_LINK("AES_CBC_decrypt_by6"); |
751 | | #else /* WOLFSSL_AESNI_BYx */ |
752 | | void AES_CBC_decrypt_by8(const unsigned char* in, unsigned char* out, |
753 | | unsigned char* ivec, unsigned long length, |
754 | | const unsigned char* KS, int nr) |
755 | | XASM_LINK("AES_CBC_decrypt_by8"); |
756 | | #endif /* WOLFSSL_AESNI_BYx */ |
757 | | #endif /* HAVE_AES_DECRYPT */ |
758 | | #endif /* HAVE_AES_CBC */ |
759 | | |
760 | | void AES_ECB_encrypt(const unsigned char* in, unsigned char* out, |
761 | | unsigned long length, const unsigned char* KS, int nr) |
762 | | XASM_LINK("AES_ECB_encrypt"); |
763 | | |
764 | | #ifdef HAVE_AES_DECRYPT |
765 | | void AES_ECB_decrypt(const unsigned char* in, unsigned char* out, |
766 | | unsigned long length, const unsigned char* KS, int nr) |
767 | | XASM_LINK("AES_ECB_decrypt"); |
768 | | #endif |
769 | | |
770 | | void AES_128_Key_Expansion(const unsigned char* userkey, |
771 | | unsigned char* key_schedule) |
772 | | XASM_LINK("AES_128_Key_Expansion"); |
773 | | |
774 | | void AES_192_Key_Expansion(const unsigned char* userkey, |
775 | | unsigned char* key_schedule) |
776 | | XASM_LINK("AES_192_Key_Expansion"); |
777 | | |
778 | | void AES_256_Key_Expansion(const unsigned char* userkey, |
779 | | unsigned char* key_schedule) |
780 | | XASM_LINK("AES_256_Key_Expansion"); |
781 | | |
782 | | |
783 | | static WARN_UNUSED_RESULT int AES_set_encrypt_key( |
784 | | const unsigned char *userKey, const int bits, Aes* aes) |
785 | | { |
786 | | int ret; |
787 | | |
788 | | if (!userKey || !aes) |
789 | | return BAD_FUNC_ARG; |
790 | | |
791 | | switch (bits) { |
792 | | case 128: |
793 | | AES_128_Key_Expansion (userKey,(byte*)aes->key); aes->rounds = 10; |
794 | | return 0; |
795 | | case 192: |
796 | | AES_192_Key_Expansion (userKey,(byte*)aes->key); aes->rounds = 12; |
797 | | return 0; |
798 | | case 256: |
799 | | AES_256_Key_Expansion (userKey,(byte*)aes->key); aes->rounds = 14; |
800 | | return 0; |
801 | | default: |
802 | | ret = BAD_FUNC_ARG; |
803 | | } |
804 | | |
805 | | return ret; |
806 | | } |
807 | | |
808 | | #ifdef HAVE_AES_DECRYPT |
809 | | static WARN_UNUSED_RESULT int AES_set_decrypt_key( |
810 | | const unsigned char* userKey, const int bits, Aes* aes) |
811 | | { |
812 | | int nr; |
813 | | #ifdef WOLFSSL_SMALL_STACK |
814 | | Aes *temp_key; |
815 | | #else |
816 | | Aes temp_key[1]; |
817 | | #endif |
818 | | __m128i *Key_Schedule; |
819 | | __m128i *Temp_Key_Schedule; |
820 | | |
821 | | if (!userKey || !aes) |
822 | | return BAD_FUNC_ARG; |
823 | | |
824 | | #ifdef WOLFSSL_SMALL_STACK |
825 | | if ((temp_key = (Aes *)XMALLOC(sizeof *aes, aes->heap, |
826 | | DYNAMIC_TYPE_AES)) == NULL) |
827 | | return MEMORY_E; |
828 | | #endif |
829 | | |
830 | | if (AES_set_encrypt_key(userKey,bits,temp_key) == BAD_FUNC_ARG) { |
831 | | #ifdef WOLFSSL_SMALL_STACK |
832 | | XFREE(temp_key, aes->heap, DYNAMIC_TYPE_AES); |
833 | | #endif |
834 | | return BAD_FUNC_ARG; |
835 | | } |
836 | | |
837 | | Key_Schedule = (__m128i*)aes->key; |
838 | | Temp_Key_Schedule = (__m128i*)temp_key->key; |
839 | | |
840 | | nr = temp_key->rounds; |
841 | | aes->rounds = nr; |
842 | | |
843 | | #ifdef WOLFSSL_SMALL_STACK |
844 | | SAVE_VECTOR_REGISTERS(XFREE(temp_key, aes->heap, DYNAMIC_TYPE_AES); return _svr_ret;); |
845 | | #else |
846 | | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
847 | | #endif |
848 | | |
849 | | Key_Schedule[nr] = Temp_Key_Schedule[0]; |
850 | | Key_Schedule[nr-1] = _mm_aesimc_si128(Temp_Key_Schedule[1]); |
851 | | Key_Schedule[nr-2] = _mm_aesimc_si128(Temp_Key_Schedule[2]); |
852 | | Key_Schedule[nr-3] = _mm_aesimc_si128(Temp_Key_Schedule[3]); |
853 | | Key_Schedule[nr-4] = _mm_aesimc_si128(Temp_Key_Schedule[4]); |
854 | | Key_Schedule[nr-5] = _mm_aesimc_si128(Temp_Key_Schedule[5]); |
855 | | Key_Schedule[nr-6] = _mm_aesimc_si128(Temp_Key_Schedule[6]); |
856 | | Key_Schedule[nr-7] = _mm_aesimc_si128(Temp_Key_Schedule[7]); |
857 | | Key_Schedule[nr-8] = _mm_aesimc_si128(Temp_Key_Schedule[8]); |
858 | | Key_Schedule[nr-9] = _mm_aesimc_si128(Temp_Key_Schedule[9]); |
859 | | |
860 | | if (nr>10) { |
861 | | Key_Schedule[nr-10] = _mm_aesimc_si128(Temp_Key_Schedule[10]); |
862 | | Key_Schedule[nr-11] = _mm_aesimc_si128(Temp_Key_Schedule[11]); |
863 | | } |
864 | | |
865 | | if (nr>12) { |
866 | | Key_Schedule[nr-12] = _mm_aesimc_si128(Temp_Key_Schedule[12]); |
867 | | Key_Schedule[nr-13] = _mm_aesimc_si128(Temp_Key_Schedule[13]); |
868 | | } |
869 | | |
870 | | Key_Schedule[0] = Temp_Key_Schedule[nr]; |
871 | | |
872 | | RESTORE_VECTOR_REGISTERS(); |
873 | | |
874 | | #ifdef WOLFSSL_SMALL_STACK |
875 | | XFREE(temp_key, aes->heap, DYNAMIC_TYPE_AES); |
876 | | #endif |
877 | | |
878 | | return 0; |
879 | | } |
880 | | #endif /* HAVE_AES_DECRYPT */ |
881 | | |
882 | | #elif (defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) \ |
883 | | && !defined(WOLFSSL_QNX_CAAM)) || \ |
884 | | ((defined(WOLFSSL_AFALG) || defined(WOLFSSL_DEVCRYPTO_AES)) && \ |
885 | | defined(HAVE_AESCCM)) |
886 | | static WARN_UNUSED_RESULT int wc_AesEncrypt( |
887 | | Aes* aes, const byte* inBlock, byte* outBlock) |
888 | | { |
889 | | return wc_AesEncryptDirect(aes, outBlock, inBlock); |
890 | | } |
891 | | |
892 | | #elif defined(WOLFSSL_AFALG) |
893 | | /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */ |
894 | | |
895 | | #elif defined(WOLFSSL_DEVCRYPTO_AES) |
896 | | /* implemented in wolfcrypt/src/port/devcrypto/devcrypto_aes.c */ |
897 | | |
898 | | #elif defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT) |
899 | | static WARN_UNUSED_RESULT int AES_ECB_encrypt( |
900 | | Aes* aes, const byte* inBlock, byte* outBlock, int sz) |
901 | | { |
902 | | return se050_aes_crypt(aes, inBlock, outBlock, sz, AES_ENCRYPTION, |
903 | | kAlgorithm_SSS_AES_ECB); |
904 | | } |
905 | | static WARN_UNUSED_RESULT int AES_ECB_decrypt( |
906 | | Aes* aes, const byte* inBlock, byte* outBlock, int sz) |
907 | | { |
908 | | return se050_aes_crypt(aes, inBlock, outBlock, sz, AES_DECRYPTION, |
909 | | kAlgorithm_SSS_AES_ECB); |
910 | | } |
911 | | static WARN_UNUSED_RESULT int wc_AesEncrypt( |
912 | | Aes* aes, const byte* inBlock, byte* outBlock) |
913 | | { |
914 | | return AES_ECB_encrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE); |
915 | | } |
916 | | static WARN_UNUSED_RESULT int wc_AesDecrypt( |
917 | | Aes* aes, const byte* inBlock, byte* outBlock) |
918 | | { |
919 | | return AES_ECB_decrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE); |
920 | | } |
921 | | |
922 | | #elif defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES) |
923 | | #include "hal_data.h" |
924 | | |
925 | | #ifndef WOLFSSL_SCE_AES256_HANDLE |
926 | | #define WOLFSSL_SCE_AES256_HANDLE g_sce_aes_256 |
927 | | #endif |
928 | | |
929 | | #ifndef WOLFSSL_SCE_AES192_HANDLE |
930 | | #define WOLFSSL_SCE_AES192_HANDLE g_sce_aes_192 |
931 | | #endif |
932 | | |
933 | | #ifndef WOLFSSL_SCE_AES128_HANDLE |
934 | | #define WOLFSSL_SCE_AES128_HANDLE g_sce_aes_128 |
935 | | #endif |
936 | | |
937 | | static WARN_UNUSED_RESULT int AES_ECB_encrypt( |
938 | | Aes* aes, const byte* inBlock, byte* outBlock, int sz) |
939 | | { |
940 | | word32 ret; |
941 | | |
942 | | if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag == |
943 | | CRYPTO_WORD_ENDIAN_BIG) { |
944 | | ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz); |
945 | | } |
946 | | |
947 | | switch (aes->keylen) { |
948 | | #ifdef WOLFSSL_AES_128 |
949 | | case AES_128_KEY_SIZE: |
950 | | ret = WOLFSSL_SCE_AES128_HANDLE.p_api->encrypt( |
951 | | WOLFSSL_SCE_AES128_HANDLE.p_ctrl, aes->key, |
952 | | NULL, (sz / sizeof(word32)), (word32*)inBlock, |
953 | | (word32*)outBlock); |
954 | | break; |
955 | | #endif |
956 | | #ifdef WOLFSSL_AES_192 |
957 | | case AES_192_KEY_SIZE: |
958 | | ret = WOLFSSL_SCE_AES192_HANDLE.p_api->encrypt( |
959 | | WOLFSSL_SCE_AES192_HANDLE.p_ctrl, aes->key, |
960 | | NULL, (sz / sizeof(word32)), (word32*)inBlock, |
961 | | (word32*)outBlock); |
962 | | break; |
963 | | #endif |
964 | | #ifdef WOLFSSL_AES_256 |
965 | | case AES_256_KEY_SIZE: |
966 | | ret = WOLFSSL_SCE_AES256_HANDLE.p_api->encrypt( |
967 | | WOLFSSL_SCE_AES256_HANDLE.p_ctrl, aes->key, |
968 | | NULL, (sz / sizeof(word32)), (word32*)inBlock, |
969 | | (word32*)outBlock); |
970 | | break; |
971 | | #endif |
972 | | default: |
973 | | WOLFSSL_MSG("Unknown key size"); |
974 | | return BAD_FUNC_ARG; |
975 | | } |
976 | | |
977 | | if (ret != SSP_SUCCESS) { |
978 | | /* revert input */ |
979 | | ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz); |
980 | | return WC_HW_E; |
981 | | } |
982 | | |
983 | | if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag == |
984 | | CRYPTO_WORD_ENDIAN_BIG) { |
985 | | ByteReverseWords((word32*)outBlock, (word32*)outBlock, sz); |
986 | | if (inBlock != outBlock) { |
987 | | /* revert input */ |
988 | | ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz); |
989 | | } |
990 | | } |
991 | | return 0; |
992 | | } |
993 | | |
994 | | #if defined(HAVE_AES_DECRYPT) |
995 | | static WARN_UNUSED_RESULT int AES_ECB_decrypt( |
996 | | Aes* aes, const byte* inBlock, byte* outBlock, int sz) |
997 | | { |
998 | | word32 ret; |
999 | | |
1000 | | if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag == |
1001 | | CRYPTO_WORD_ENDIAN_BIG) { |
1002 | | ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz); |
1003 | | } |
1004 | | |
1005 | | switch (aes->keylen) { |
1006 | | #ifdef WOLFSSL_AES_128 |
1007 | | case AES_128_KEY_SIZE: |
1008 | | ret = WOLFSSL_SCE_AES128_HANDLE.p_api->decrypt( |
1009 | | WOLFSSL_SCE_AES128_HANDLE.p_ctrl, aes->key, aes->reg, |
1010 | | (sz / sizeof(word32)), (word32*)inBlock, |
1011 | | (word32*)outBlock); |
1012 | | break; |
1013 | | #endif |
1014 | | #ifdef WOLFSSL_AES_192 |
1015 | | case AES_192_KEY_SIZE: |
1016 | | ret = WOLFSSL_SCE_AES192_HANDLE.p_api->decrypt( |
1017 | | WOLFSSL_SCE_AES192_HANDLE.p_ctrl, aes->key, aes->reg, |
1018 | | (sz / sizeof(word32)), (word32*)inBlock, |
1019 | | (word32*)outBlock); |
1020 | | break; |
1021 | | #endif |
1022 | | #ifdef WOLFSSL_AES_256 |
1023 | | case AES_256_KEY_SIZE: |
1024 | | ret = WOLFSSL_SCE_AES256_HANDLE.p_api->decrypt( |
1025 | | WOLFSSL_SCE_AES256_HANDLE.p_ctrl, aes->key, aes->reg, |
1026 | | (sz / sizeof(word32)), (word32*)inBlock, |
1027 | | (word32*)outBlock); |
1028 | | break; |
1029 | | #endif |
1030 | | default: |
1031 | | WOLFSSL_MSG("Unknown key size"); |
1032 | | return BAD_FUNC_ARG; |
1033 | | } |
1034 | | if (ret != SSP_SUCCESS) { |
1035 | | return WC_HW_E; |
1036 | | } |
1037 | | |
1038 | | if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag == |
1039 | | CRYPTO_WORD_ENDIAN_BIG) { |
1040 | | ByteReverseWords((word32*)outBlock, (word32*)outBlock, sz); |
1041 | | if (inBlock != outBlock) { |
1042 | | /* revert input */ |
1043 | | ByteReverseWords((word32*)inBlock, (word32*)inBlock, sz); |
1044 | | } |
1045 | | } |
1046 | | |
1047 | | return 0; |
1048 | | } |
1049 | | #endif /* HAVE_AES_DECRYPT */ |
1050 | | |
1051 | | #if defined(HAVE_AESGCM) || defined(WOLFSSL_AES_DIRECT) |
1052 | | static WARN_UNUSED_RESULT int wc_AesEncrypt( |
1053 | | Aes* aes, const byte* inBlock, byte* outBlock) |
1054 | | { |
1055 | | return AES_ECB_encrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE); |
1056 | | } |
1057 | | #endif |
1058 | | |
1059 | | #if defined(HAVE_AES_DECRYPT) && defined(WOLFSSL_AES_DIRECT) |
1060 | | static WARN_UNUSED_RESULT int wc_AesDecrypt( |
1061 | | Aes* aes, const byte* inBlock, byte* outBlock) |
1062 | | { |
1063 | | return AES_ECB_decrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE); |
1064 | | } |
1065 | | #endif |
1066 | | |
1067 | | #elif defined(WOLFSSL_KCAPI_AES) |
1068 | | /* Only CBC and GCM that are in wolfcrypt/src/port/kcapi/kcapi_aes.c */ |
1069 | | #if defined(WOLFSSL_AES_COUNTER) || defined(HAVE_AESCCM) || \ |
1070 | | defined(WOLFSSL_CMAC) || defined(WOLFSSL_AES_OFB) || \ |
1071 | | defined(WOLFSSL_AES_CFB) || defined(HAVE_AES_ECB) || \ |
1072 | | defined(WOLFSSL_AES_DIRECT) || \ |
1073 | | (defined(HAVE_AES_CBC) && defined(WOLFSSL_NO_KCAPI_AES_CBC)) |
1074 | | |
1075 | | #define NEED_AES_TABLES |
1076 | | #endif |
1077 | | #elif defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES) |
1078 | | /* implemented in wolfcrypt/src/port/psa/psa_aes.c */ |
1079 | | #else |
1080 | | |
1081 | | /* using wolfCrypt software implementation */ |
1082 | | #define NEED_AES_TABLES |
1083 | | #endif |
1084 | | |
1085 | | |
1086 | | |
1087 | | #ifdef NEED_AES_TABLES |
1088 | | |
1089 | | static const FLASH_QUALIFIER word32 rcon[] = { |
1090 | | 0x01000000, 0x02000000, 0x04000000, 0x08000000, |
1091 | | 0x10000000, 0x20000000, 0x40000000, 0x80000000, |
1092 | | 0x1B000000, 0x36000000, |
1093 | | /* for 128-bit blocks, Rijndael never uses more than 10 rcon values */ |
1094 | | }; |
1095 | | |
1096 | | #ifndef WOLFSSL_AES_SMALL_TABLES |
1097 | | static const FLASH_QUALIFIER word32 Te[4][256] = { |
1098 | | { |
1099 | | 0xc66363a5U, 0xf87c7c84U, 0xee777799U, 0xf67b7b8dU, |
1100 | | 0xfff2f20dU, 0xd66b6bbdU, 0xde6f6fb1U, 0x91c5c554U, |
1101 | | 0x60303050U, 0x02010103U, 0xce6767a9U, 0x562b2b7dU, |
1102 | | 0xe7fefe19U, 0xb5d7d762U, 0x4dababe6U, 0xec76769aU, |
1103 | | 0x8fcaca45U, 0x1f82829dU, 0x89c9c940U, 0xfa7d7d87U, |
1104 | | 0xeffafa15U, 0xb25959ebU, 0x8e4747c9U, 0xfbf0f00bU, |
1105 | | 0x41adadecU, 0xb3d4d467U, 0x5fa2a2fdU, 0x45afafeaU, |
1106 | | 0x239c9cbfU, 0x53a4a4f7U, 0xe4727296U, 0x9bc0c05bU, |
1107 | | 0x75b7b7c2U, 0xe1fdfd1cU, 0x3d9393aeU, 0x4c26266aU, |
1108 | | 0x6c36365aU, 0x7e3f3f41U, 0xf5f7f702U, 0x83cccc4fU, |
1109 | | 0x6834345cU, 0x51a5a5f4U, 0xd1e5e534U, 0xf9f1f108U, |
1110 | | 0xe2717193U, 0xabd8d873U, 0x62313153U, 0x2a15153fU, |
1111 | | 0x0804040cU, 0x95c7c752U, 0x46232365U, 0x9dc3c35eU, |
1112 | | 0x30181828U, 0x379696a1U, 0x0a05050fU, 0x2f9a9ab5U, |
1113 | | 0x0e070709U, 0x24121236U, 0x1b80809bU, 0xdfe2e23dU, |
1114 | | 0xcdebeb26U, 0x4e272769U, 0x7fb2b2cdU, 0xea75759fU, |
1115 | | 0x1209091bU, 0x1d83839eU, 0x582c2c74U, 0x341a1a2eU, |
1116 | | 0x361b1b2dU, 0xdc6e6eb2U, 0xb45a5aeeU, 0x5ba0a0fbU, |
1117 | | 0xa45252f6U, 0x763b3b4dU, 0xb7d6d661U, 0x7db3b3ceU, |
1118 | | 0x5229297bU, 0xdde3e33eU, 0x5e2f2f71U, 0x13848497U, |
1119 | | 0xa65353f5U, 0xb9d1d168U, 0x00000000U, 0xc1eded2cU, |
1120 | | 0x40202060U, 0xe3fcfc1fU, 0x79b1b1c8U, 0xb65b5bedU, |
1121 | | 0xd46a6abeU, 0x8dcbcb46U, 0x67bebed9U, 0x7239394bU, |
1122 | | 0x944a4adeU, 0x984c4cd4U, 0xb05858e8U, 0x85cfcf4aU, |
1123 | | 0xbbd0d06bU, 0xc5efef2aU, 0x4faaaae5U, 0xedfbfb16U, |
1124 | | 0x864343c5U, 0x9a4d4dd7U, 0x66333355U, 0x11858594U, |
1125 | | 0x8a4545cfU, 0xe9f9f910U, 0x04020206U, 0xfe7f7f81U, |
1126 | | 0xa05050f0U, 0x783c3c44U, 0x259f9fbaU, 0x4ba8a8e3U, |
1127 | | 0xa25151f3U, 0x5da3a3feU, 0x804040c0U, 0x058f8f8aU, |
1128 | | 0x3f9292adU, 0x219d9dbcU, 0x70383848U, 0xf1f5f504U, |
1129 | | 0x63bcbcdfU, 0x77b6b6c1U, 0xafdada75U, 0x42212163U, |
1130 | | 0x20101030U, 0xe5ffff1aU, 0xfdf3f30eU, 0xbfd2d26dU, |
1131 | | 0x81cdcd4cU, 0x180c0c14U, 0x26131335U, 0xc3ecec2fU, |
1132 | | 0xbe5f5fe1U, 0x359797a2U, 0x884444ccU, 0x2e171739U, |
1133 | | 0x93c4c457U, 0x55a7a7f2U, 0xfc7e7e82U, 0x7a3d3d47U, |
1134 | | 0xc86464acU, 0xba5d5de7U, 0x3219192bU, 0xe6737395U, |
1135 | | 0xc06060a0U, 0x19818198U, 0x9e4f4fd1U, 0xa3dcdc7fU, |
1136 | | 0x44222266U, 0x542a2a7eU, 0x3b9090abU, 0x0b888883U, |
1137 | | 0x8c4646caU, 0xc7eeee29U, 0x6bb8b8d3U, 0x2814143cU, |
1138 | | 0xa7dede79U, 0xbc5e5ee2U, 0x160b0b1dU, 0xaddbdb76U, |
1139 | | 0xdbe0e03bU, 0x64323256U, 0x743a3a4eU, 0x140a0a1eU, |
1140 | | 0x924949dbU, 0x0c06060aU, 0x4824246cU, 0xb85c5ce4U, |
1141 | | 0x9fc2c25dU, 0xbdd3d36eU, 0x43acacefU, 0xc46262a6U, |
1142 | | 0x399191a8U, 0x319595a4U, 0xd3e4e437U, 0xf279798bU, |
1143 | | 0xd5e7e732U, 0x8bc8c843U, 0x6e373759U, 0xda6d6db7U, |
1144 | | 0x018d8d8cU, 0xb1d5d564U, 0x9c4e4ed2U, 0x49a9a9e0U, |
1145 | | 0xd86c6cb4U, 0xac5656faU, 0xf3f4f407U, 0xcfeaea25U, |
1146 | | 0xca6565afU, 0xf47a7a8eU, 0x47aeaee9U, 0x10080818U, |
1147 | | 0x6fbabad5U, 0xf0787888U, 0x4a25256fU, 0x5c2e2e72U, |
1148 | | 0x381c1c24U, 0x57a6a6f1U, 0x73b4b4c7U, 0x97c6c651U, |
1149 | | 0xcbe8e823U, 0xa1dddd7cU, 0xe874749cU, 0x3e1f1f21U, |
1150 | | 0x964b4bddU, 0x61bdbddcU, 0x0d8b8b86U, 0x0f8a8a85U, |
1151 | | 0xe0707090U, 0x7c3e3e42U, 0x71b5b5c4U, 0xcc6666aaU, |
1152 | | 0x904848d8U, 0x06030305U, 0xf7f6f601U, 0x1c0e0e12U, |
1153 | | 0xc26161a3U, 0x6a35355fU, 0xae5757f9U, 0x69b9b9d0U, |
1154 | | 0x17868691U, 0x99c1c158U, 0x3a1d1d27U, 0x279e9eb9U, |
1155 | | 0xd9e1e138U, 0xebf8f813U, 0x2b9898b3U, 0x22111133U, |
1156 | | 0xd26969bbU, 0xa9d9d970U, 0x078e8e89U, 0x339494a7U, |
1157 | | 0x2d9b9bb6U, 0x3c1e1e22U, 0x15878792U, 0xc9e9e920U, |
1158 | | 0x87cece49U, 0xaa5555ffU, 0x50282878U, 0xa5dfdf7aU, |
1159 | | 0x038c8c8fU, 0x59a1a1f8U, 0x09898980U, 0x1a0d0d17U, |
1160 | | 0x65bfbfdaU, 0xd7e6e631U, 0x844242c6U, 0xd06868b8U, |
1161 | | 0x824141c3U, 0x299999b0U, 0x5a2d2d77U, 0x1e0f0f11U, |
1162 | | 0x7bb0b0cbU, 0xa85454fcU, 0x6dbbbbd6U, 0x2c16163aU, |
1163 | | }, |
1164 | | { |
1165 | | 0xa5c66363U, 0x84f87c7cU, 0x99ee7777U, 0x8df67b7bU, |
1166 | | 0x0dfff2f2U, 0xbdd66b6bU, 0xb1de6f6fU, 0x5491c5c5U, |
1167 | | 0x50603030U, 0x03020101U, 0xa9ce6767U, 0x7d562b2bU, |
1168 | | 0x19e7fefeU, 0x62b5d7d7U, 0xe64dababU, 0x9aec7676U, |
1169 | | 0x458fcacaU, 0x9d1f8282U, 0x4089c9c9U, 0x87fa7d7dU, |
1170 | | 0x15effafaU, 0xebb25959U, 0xc98e4747U, 0x0bfbf0f0U, |
1171 | | 0xec41adadU, 0x67b3d4d4U, 0xfd5fa2a2U, 0xea45afafU, |
1172 | | 0xbf239c9cU, 0xf753a4a4U, 0x96e47272U, 0x5b9bc0c0U, |
1173 | | 0xc275b7b7U, 0x1ce1fdfdU, 0xae3d9393U, 0x6a4c2626U, |
1174 | | 0x5a6c3636U, 0x417e3f3fU, 0x02f5f7f7U, 0x4f83ccccU, |
1175 | | 0x5c683434U, 0xf451a5a5U, 0x34d1e5e5U, 0x08f9f1f1U, |
1176 | | 0x93e27171U, 0x73abd8d8U, 0x53623131U, 0x3f2a1515U, |
1177 | | 0x0c080404U, 0x5295c7c7U, 0x65462323U, 0x5e9dc3c3U, |
1178 | | 0x28301818U, 0xa1379696U, 0x0f0a0505U, 0xb52f9a9aU, |
1179 | | 0x090e0707U, 0x36241212U, 0x9b1b8080U, 0x3ddfe2e2U, |
1180 | | 0x26cdebebU, 0x694e2727U, 0xcd7fb2b2U, 0x9fea7575U, |
1181 | | 0x1b120909U, 0x9e1d8383U, 0x74582c2cU, 0x2e341a1aU, |
1182 | | 0x2d361b1bU, 0xb2dc6e6eU, 0xeeb45a5aU, 0xfb5ba0a0U, |
1183 | | 0xf6a45252U, 0x4d763b3bU, 0x61b7d6d6U, 0xce7db3b3U, |
1184 | | 0x7b522929U, 0x3edde3e3U, 0x715e2f2fU, 0x97138484U, |
1185 | | 0xf5a65353U, 0x68b9d1d1U, 0x00000000U, 0x2cc1ededU, |
1186 | | 0x60402020U, 0x1fe3fcfcU, 0xc879b1b1U, 0xedb65b5bU, |
1187 | | 0xbed46a6aU, 0x468dcbcbU, 0xd967bebeU, 0x4b723939U, |
1188 | | 0xde944a4aU, 0xd4984c4cU, 0xe8b05858U, 0x4a85cfcfU, |
1189 | | 0x6bbbd0d0U, 0x2ac5efefU, 0xe54faaaaU, 0x16edfbfbU, |
1190 | | 0xc5864343U, 0xd79a4d4dU, 0x55663333U, 0x94118585U, |
1191 | | 0xcf8a4545U, 0x10e9f9f9U, 0x06040202U, 0x81fe7f7fU, |
1192 | | 0xf0a05050U, 0x44783c3cU, 0xba259f9fU, 0xe34ba8a8U, |
1193 | | 0xf3a25151U, 0xfe5da3a3U, 0xc0804040U, 0x8a058f8fU, |
1194 | | 0xad3f9292U, 0xbc219d9dU, 0x48703838U, 0x04f1f5f5U, |
1195 | | 0xdf63bcbcU, 0xc177b6b6U, 0x75afdadaU, 0x63422121U, |
1196 | | 0x30201010U, 0x1ae5ffffU, 0x0efdf3f3U, 0x6dbfd2d2U, |
1197 | | 0x4c81cdcdU, 0x14180c0cU, 0x35261313U, 0x2fc3ececU, |
1198 | | 0xe1be5f5fU, 0xa2359797U, 0xcc884444U, 0x392e1717U, |
1199 | | 0x5793c4c4U, 0xf255a7a7U, 0x82fc7e7eU, 0x477a3d3dU, |
1200 | | 0xacc86464U, 0xe7ba5d5dU, 0x2b321919U, 0x95e67373U, |
1201 | | 0xa0c06060U, 0x98198181U, 0xd19e4f4fU, 0x7fa3dcdcU, |
1202 | | 0x66442222U, 0x7e542a2aU, 0xab3b9090U, 0x830b8888U, |
1203 | | 0xca8c4646U, 0x29c7eeeeU, 0xd36bb8b8U, 0x3c281414U, |
1204 | | 0x79a7dedeU, 0xe2bc5e5eU, 0x1d160b0bU, 0x76addbdbU, |
1205 | | 0x3bdbe0e0U, 0x56643232U, 0x4e743a3aU, 0x1e140a0aU, |
1206 | | 0xdb924949U, 0x0a0c0606U, 0x6c482424U, 0xe4b85c5cU, |
1207 | | 0x5d9fc2c2U, 0x6ebdd3d3U, 0xef43acacU, 0xa6c46262U, |
1208 | | 0xa8399191U, 0xa4319595U, 0x37d3e4e4U, 0x8bf27979U, |
1209 | | 0x32d5e7e7U, 0x438bc8c8U, 0x596e3737U, 0xb7da6d6dU, |
1210 | | 0x8c018d8dU, 0x64b1d5d5U, 0xd29c4e4eU, 0xe049a9a9U, |
1211 | | 0xb4d86c6cU, 0xfaac5656U, 0x07f3f4f4U, 0x25cfeaeaU, |
1212 | | 0xafca6565U, 0x8ef47a7aU, 0xe947aeaeU, 0x18100808U, |
1213 | | 0xd56fbabaU, 0x88f07878U, 0x6f4a2525U, 0x725c2e2eU, |
1214 | | 0x24381c1cU, 0xf157a6a6U, 0xc773b4b4U, 0x5197c6c6U, |
1215 | | 0x23cbe8e8U, 0x7ca1ddddU, 0x9ce87474U, 0x213e1f1fU, |
1216 | | 0xdd964b4bU, 0xdc61bdbdU, 0x860d8b8bU, 0x850f8a8aU, |
1217 | | 0x90e07070U, 0x427c3e3eU, 0xc471b5b5U, 0xaacc6666U, |
1218 | | 0xd8904848U, 0x05060303U, 0x01f7f6f6U, 0x121c0e0eU, |
1219 | | 0xa3c26161U, 0x5f6a3535U, 0xf9ae5757U, 0xd069b9b9U, |
1220 | | 0x91178686U, 0x5899c1c1U, 0x273a1d1dU, 0xb9279e9eU, |
1221 | | 0x38d9e1e1U, 0x13ebf8f8U, 0xb32b9898U, 0x33221111U, |
1222 | | 0xbbd26969U, 0x70a9d9d9U, 0x89078e8eU, 0xa7339494U, |
1223 | | 0xb62d9b9bU, 0x223c1e1eU, 0x92158787U, 0x20c9e9e9U, |
1224 | | 0x4987ceceU, 0xffaa5555U, 0x78502828U, 0x7aa5dfdfU, |
1225 | | 0x8f038c8cU, 0xf859a1a1U, 0x80098989U, 0x171a0d0dU, |
1226 | | 0xda65bfbfU, 0x31d7e6e6U, 0xc6844242U, 0xb8d06868U, |
1227 | | 0xc3824141U, 0xb0299999U, 0x775a2d2dU, 0x111e0f0fU, |
1228 | | 0xcb7bb0b0U, 0xfca85454U, 0xd66dbbbbU, 0x3a2c1616U, |
1229 | | }, |
1230 | | { |
1231 | | 0x63a5c663U, 0x7c84f87cU, 0x7799ee77U, 0x7b8df67bU, |
1232 | | 0xf20dfff2U, 0x6bbdd66bU, 0x6fb1de6fU, 0xc55491c5U, |
1233 | | 0x30506030U, 0x01030201U, 0x67a9ce67U, 0x2b7d562bU, |
1234 | | 0xfe19e7feU, 0xd762b5d7U, 0xabe64dabU, 0x769aec76U, |
1235 | | 0xca458fcaU, 0x829d1f82U, 0xc94089c9U, 0x7d87fa7dU, |
1236 | | 0xfa15effaU, 0x59ebb259U, 0x47c98e47U, 0xf00bfbf0U, |
1237 | | 0xadec41adU, 0xd467b3d4U, 0xa2fd5fa2U, 0xafea45afU, |
1238 | | 0x9cbf239cU, 0xa4f753a4U, 0x7296e472U, 0xc05b9bc0U, |
1239 | | 0xb7c275b7U, 0xfd1ce1fdU, 0x93ae3d93U, 0x266a4c26U, |
1240 | | 0x365a6c36U, 0x3f417e3fU, 0xf702f5f7U, 0xcc4f83ccU, |
1241 | | 0x345c6834U, 0xa5f451a5U, 0xe534d1e5U, 0xf108f9f1U, |
1242 | | 0x7193e271U, 0xd873abd8U, 0x31536231U, 0x153f2a15U, |
1243 | | 0x040c0804U, 0xc75295c7U, 0x23654623U, 0xc35e9dc3U, |
1244 | | 0x18283018U, 0x96a13796U, 0x050f0a05U, 0x9ab52f9aU, |
1245 | | 0x07090e07U, 0x12362412U, 0x809b1b80U, 0xe23ddfe2U, |
1246 | | 0xeb26cdebU, 0x27694e27U, 0xb2cd7fb2U, 0x759fea75U, |
1247 | | 0x091b1209U, 0x839e1d83U, 0x2c74582cU, 0x1a2e341aU, |
1248 | | 0x1b2d361bU, 0x6eb2dc6eU, 0x5aeeb45aU, 0xa0fb5ba0U, |
1249 | | 0x52f6a452U, 0x3b4d763bU, 0xd661b7d6U, 0xb3ce7db3U, |
1250 | | 0x297b5229U, 0xe33edde3U, 0x2f715e2fU, 0x84971384U, |
1251 | | 0x53f5a653U, 0xd168b9d1U, 0x00000000U, 0xed2cc1edU, |
1252 | | 0x20604020U, 0xfc1fe3fcU, 0xb1c879b1U, 0x5bedb65bU, |
1253 | | 0x6abed46aU, 0xcb468dcbU, 0xbed967beU, 0x394b7239U, |
1254 | | 0x4ade944aU, 0x4cd4984cU, 0x58e8b058U, 0xcf4a85cfU, |
1255 | | 0xd06bbbd0U, 0xef2ac5efU, 0xaae54faaU, 0xfb16edfbU, |
1256 | | 0x43c58643U, 0x4dd79a4dU, 0x33556633U, 0x85941185U, |
1257 | | 0x45cf8a45U, 0xf910e9f9U, 0x02060402U, 0x7f81fe7fU, |
1258 | | 0x50f0a050U, 0x3c44783cU, 0x9fba259fU, 0xa8e34ba8U, |
1259 | | 0x51f3a251U, 0xa3fe5da3U, 0x40c08040U, 0x8f8a058fU, |
1260 | | 0x92ad3f92U, 0x9dbc219dU, 0x38487038U, 0xf504f1f5U, |
1261 | | 0xbcdf63bcU, 0xb6c177b6U, 0xda75afdaU, 0x21634221U, |
1262 | | 0x10302010U, 0xff1ae5ffU, 0xf30efdf3U, 0xd26dbfd2U, |
1263 | | 0xcd4c81cdU, 0x0c14180cU, 0x13352613U, 0xec2fc3ecU, |
1264 | | 0x5fe1be5fU, 0x97a23597U, 0x44cc8844U, 0x17392e17U, |
1265 | | 0xc45793c4U, 0xa7f255a7U, 0x7e82fc7eU, 0x3d477a3dU, |
1266 | | 0x64acc864U, 0x5de7ba5dU, 0x192b3219U, 0x7395e673U, |
1267 | | 0x60a0c060U, 0x81981981U, 0x4fd19e4fU, 0xdc7fa3dcU, |
1268 | | 0x22664422U, 0x2a7e542aU, 0x90ab3b90U, 0x88830b88U, |
1269 | | 0x46ca8c46U, 0xee29c7eeU, 0xb8d36bb8U, 0x143c2814U, |
1270 | | 0xde79a7deU, 0x5ee2bc5eU, 0x0b1d160bU, 0xdb76addbU, |
1271 | | 0xe03bdbe0U, 0x32566432U, 0x3a4e743aU, 0x0a1e140aU, |
1272 | | 0x49db9249U, 0x060a0c06U, 0x246c4824U, 0x5ce4b85cU, |
1273 | | 0xc25d9fc2U, 0xd36ebdd3U, 0xacef43acU, 0x62a6c462U, |
1274 | | 0x91a83991U, 0x95a43195U, 0xe437d3e4U, 0x798bf279U, |
1275 | | 0xe732d5e7U, 0xc8438bc8U, 0x37596e37U, 0x6db7da6dU, |
1276 | | 0x8d8c018dU, 0xd564b1d5U, 0x4ed29c4eU, 0xa9e049a9U, |
1277 | | 0x6cb4d86cU, 0x56faac56U, 0xf407f3f4U, 0xea25cfeaU, |
1278 | | 0x65afca65U, 0x7a8ef47aU, 0xaee947aeU, 0x08181008U, |
1279 | | 0xbad56fbaU, 0x7888f078U, 0x256f4a25U, 0x2e725c2eU, |
1280 | | 0x1c24381cU, 0xa6f157a6U, 0xb4c773b4U, 0xc65197c6U, |
1281 | | 0xe823cbe8U, 0xdd7ca1ddU, 0x749ce874U, 0x1f213e1fU, |
1282 | | 0x4bdd964bU, 0xbddc61bdU, 0x8b860d8bU, 0x8a850f8aU, |
1283 | | 0x7090e070U, 0x3e427c3eU, 0xb5c471b5U, 0x66aacc66U, |
1284 | | 0x48d89048U, 0x03050603U, 0xf601f7f6U, 0x0e121c0eU, |
1285 | | 0x61a3c261U, 0x355f6a35U, 0x57f9ae57U, 0xb9d069b9U, |
1286 | | 0x86911786U, 0xc15899c1U, 0x1d273a1dU, 0x9eb9279eU, |
1287 | | 0xe138d9e1U, 0xf813ebf8U, 0x98b32b98U, 0x11332211U, |
1288 | | 0x69bbd269U, 0xd970a9d9U, 0x8e89078eU, 0x94a73394U, |
1289 | | 0x9bb62d9bU, 0x1e223c1eU, 0x87921587U, 0xe920c9e9U, |
1290 | | 0xce4987ceU, 0x55ffaa55U, 0x28785028U, 0xdf7aa5dfU, |
1291 | | 0x8c8f038cU, 0xa1f859a1U, 0x89800989U, 0x0d171a0dU, |
1292 | | 0xbfda65bfU, 0xe631d7e6U, 0x42c68442U, 0x68b8d068U, |
1293 | | 0x41c38241U, 0x99b02999U, 0x2d775a2dU, 0x0f111e0fU, |
1294 | | 0xb0cb7bb0U, 0x54fca854U, 0xbbd66dbbU, 0x163a2c16U, |
1295 | | }, |
1296 | | { |
1297 | | 0x6363a5c6U, 0x7c7c84f8U, 0x777799eeU, 0x7b7b8df6U, |
1298 | | 0xf2f20dffU, 0x6b6bbdd6U, 0x6f6fb1deU, 0xc5c55491U, |
1299 | | 0x30305060U, 0x01010302U, 0x6767a9ceU, 0x2b2b7d56U, |
1300 | | 0xfefe19e7U, 0xd7d762b5U, 0xababe64dU, 0x76769aecU, |
1301 | | 0xcaca458fU, 0x82829d1fU, 0xc9c94089U, 0x7d7d87faU, |
1302 | | 0xfafa15efU, 0x5959ebb2U, 0x4747c98eU, 0xf0f00bfbU, |
1303 | | 0xadadec41U, 0xd4d467b3U, 0xa2a2fd5fU, 0xafafea45U, |
1304 | | 0x9c9cbf23U, 0xa4a4f753U, 0x727296e4U, 0xc0c05b9bU, |
1305 | | 0xb7b7c275U, 0xfdfd1ce1U, 0x9393ae3dU, 0x26266a4cU, |
1306 | | 0x36365a6cU, 0x3f3f417eU, 0xf7f702f5U, 0xcccc4f83U, |
1307 | | 0x34345c68U, 0xa5a5f451U, 0xe5e534d1U, 0xf1f108f9U, |
1308 | | 0x717193e2U, 0xd8d873abU, 0x31315362U, 0x15153f2aU, |
1309 | | 0x04040c08U, 0xc7c75295U, 0x23236546U, 0xc3c35e9dU, |
1310 | | 0x18182830U, 0x9696a137U, 0x05050f0aU, 0x9a9ab52fU, |
1311 | | 0x0707090eU, 0x12123624U, 0x80809b1bU, 0xe2e23ddfU, |
1312 | | 0xebeb26cdU, 0x2727694eU, 0xb2b2cd7fU, 0x75759feaU, |
1313 | | 0x09091b12U, 0x83839e1dU, 0x2c2c7458U, 0x1a1a2e34U, |
1314 | | 0x1b1b2d36U, 0x6e6eb2dcU, 0x5a5aeeb4U, 0xa0a0fb5bU, |
1315 | | 0x5252f6a4U, 0x3b3b4d76U, 0xd6d661b7U, 0xb3b3ce7dU, |
1316 | | 0x29297b52U, 0xe3e33eddU, 0x2f2f715eU, 0x84849713U, |
1317 | | 0x5353f5a6U, 0xd1d168b9U, 0x00000000U, 0xeded2cc1U, |
1318 | | 0x20206040U, 0xfcfc1fe3U, 0xb1b1c879U, 0x5b5bedb6U, |
1319 | | 0x6a6abed4U, 0xcbcb468dU, 0xbebed967U, 0x39394b72U, |
1320 | | 0x4a4ade94U, 0x4c4cd498U, 0x5858e8b0U, 0xcfcf4a85U, |
1321 | | 0xd0d06bbbU, 0xefef2ac5U, 0xaaaae54fU, 0xfbfb16edU, |
1322 | | 0x4343c586U, 0x4d4dd79aU, 0x33335566U, 0x85859411U, |
1323 | | 0x4545cf8aU, 0xf9f910e9U, 0x02020604U, 0x7f7f81feU, |
1324 | | 0x5050f0a0U, 0x3c3c4478U, 0x9f9fba25U, 0xa8a8e34bU, |
1325 | | 0x5151f3a2U, 0xa3a3fe5dU, 0x4040c080U, 0x8f8f8a05U, |
1326 | | 0x9292ad3fU, 0x9d9dbc21U, 0x38384870U, 0xf5f504f1U, |
1327 | | 0xbcbcdf63U, 0xb6b6c177U, 0xdada75afU, 0x21216342U, |
1328 | | 0x10103020U, 0xffff1ae5U, 0xf3f30efdU, 0xd2d26dbfU, |
1329 | | 0xcdcd4c81U, 0x0c0c1418U, 0x13133526U, 0xecec2fc3U, |
1330 | | 0x5f5fe1beU, 0x9797a235U, 0x4444cc88U, 0x1717392eU, |
1331 | | 0xc4c45793U, 0xa7a7f255U, 0x7e7e82fcU, 0x3d3d477aU, |
1332 | | 0x6464acc8U, 0x5d5de7baU, 0x19192b32U, 0x737395e6U, |
1333 | | 0x6060a0c0U, 0x81819819U, 0x4f4fd19eU, 0xdcdc7fa3U, |
1334 | | 0x22226644U, 0x2a2a7e54U, 0x9090ab3bU, 0x8888830bU, |
1335 | | 0x4646ca8cU, 0xeeee29c7U, 0xb8b8d36bU, 0x14143c28U, |
1336 | | 0xdede79a7U, 0x5e5ee2bcU, 0x0b0b1d16U, 0xdbdb76adU, |
1337 | | 0xe0e03bdbU, 0x32325664U, 0x3a3a4e74U, 0x0a0a1e14U, |
1338 | | 0x4949db92U, 0x06060a0cU, 0x24246c48U, 0x5c5ce4b8U, |
1339 | | 0xc2c25d9fU, 0xd3d36ebdU, 0xacacef43U, 0x6262a6c4U, |
1340 | | 0x9191a839U, 0x9595a431U, 0xe4e437d3U, 0x79798bf2U, |
1341 | | 0xe7e732d5U, 0xc8c8438bU, 0x3737596eU, 0x6d6db7daU, |
1342 | | 0x8d8d8c01U, 0xd5d564b1U, 0x4e4ed29cU, 0xa9a9e049U, |
1343 | | 0x6c6cb4d8U, 0x5656faacU, 0xf4f407f3U, 0xeaea25cfU, |
1344 | | 0x6565afcaU, 0x7a7a8ef4U, 0xaeaee947U, 0x08081810U, |
1345 | | 0xbabad56fU, 0x787888f0U, 0x25256f4aU, 0x2e2e725cU, |
1346 | | 0x1c1c2438U, 0xa6a6f157U, 0xb4b4c773U, 0xc6c65197U, |
1347 | | 0xe8e823cbU, 0xdddd7ca1U, 0x74749ce8U, 0x1f1f213eU, |
1348 | | 0x4b4bdd96U, 0xbdbddc61U, 0x8b8b860dU, 0x8a8a850fU, |
1349 | | 0x707090e0U, 0x3e3e427cU, 0xb5b5c471U, 0x6666aaccU, |
1350 | | 0x4848d890U, 0x03030506U, 0xf6f601f7U, 0x0e0e121cU, |
1351 | | 0x6161a3c2U, 0x35355f6aU, 0x5757f9aeU, 0xb9b9d069U, |
1352 | | 0x86869117U, 0xc1c15899U, 0x1d1d273aU, 0x9e9eb927U, |
1353 | | 0xe1e138d9U, 0xf8f813ebU, 0x9898b32bU, 0x11113322U, |
1354 | | 0x6969bbd2U, 0xd9d970a9U, 0x8e8e8907U, 0x9494a733U, |
1355 | | 0x9b9bb62dU, 0x1e1e223cU, 0x87879215U, 0xe9e920c9U, |
1356 | | 0xcece4987U, 0x5555ffaaU, 0x28287850U, 0xdfdf7aa5U, |
1357 | | 0x8c8c8f03U, 0xa1a1f859U, 0x89898009U, 0x0d0d171aU, |
1358 | | 0xbfbfda65U, 0xe6e631d7U, 0x4242c684U, 0x6868b8d0U, |
1359 | | 0x4141c382U, 0x9999b029U, 0x2d2d775aU, 0x0f0f111eU, |
1360 | | 0xb0b0cb7bU, 0x5454fca8U, 0xbbbbd66dU, 0x16163a2cU, |
1361 | | } |
1362 | | }; |
1363 | | |
1364 | | #ifdef HAVE_AES_DECRYPT |
1365 | | static const FLASH_QUALIFIER word32 Td[4][256] = { |
1366 | | { |
1367 | | 0x51f4a750U, 0x7e416553U, 0x1a17a4c3U, 0x3a275e96U, |
1368 | | 0x3bab6bcbU, 0x1f9d45f1U, 0xacfa58abU, 0x4be30393U, |
1369 | | 0x2030fa55U, 0xad766df6U, 0x88cc7691U, 0xf5024c25U, |
1370 | | 0x4fe5d7fcU, 0xc52acbd7U, 0x26354480U, 0xb562a38fU, |
1371 | | 0xdeb15a49U, 0x25ba1b67U, 0x45ea0e98U, 0x5dfec0e1U, |
1372 | | 0xc32f7502U, 0x814cf012U, 0x8d4697a3U, 0x6bd3f9c6U, |
1373 | | 0x038f5fe7U, 0x15929c95U, 0xbf6d7aebU, 0x955259daU, |
1374 | | 0xd4be832dU, 0x587421d3U, 0x49e06929U, 0x8ec9c844U, |
1375 | | 0x75c2896aU, 0xf48e7978U, 0x99583e6bU, 0x27b971ddU, |
1376 | | 0xbee14fb6U, 0xf088ad17U, 0xc920ac66U, 0x7dce3ab4U, |
1377 | | 0x63df4a18U, 0xe51a3182U, 0x97513360U, 0x62537f45U, |
1378 | | 0xb16477e0U, 0xbb6bae84U, 0xfe81a01cU, 0xf9082b94U, |
1379 | | 0x70486858U, 0x8f45fd19U, 0x94de6c87U, 0x527bf8b7U, |
1380 | | 0xab73d323U, 0x724b02e2U, 0xe31f8f57U, 0x6655ab2aU, |
1381 | | 0xb2eb2807U, 0x2fb5c203U, 0x86c57b9aU, 0xd33708a5U, |
1382 | | 0x302887f2U, 0x23bfa5b2U, 0x02036abaU, 0xed16825cU, |
1383 | | 0x8acf1c2bU, 0xa779b492U, 0xf307f2f0U, 0x4e69e2a1U, |
1384 | | 0x65daf4cdU, 0x0605bed5U, 0xd134621fU, 0xc4a6fe8aU, |
1385 | | 0x342e539dU, 0xa2f355a0U, 0x058ae132U, 0xa4f6eb75U, |
1386 | | 0x0b83ec39U, 0x4060efaaU, 0x5e719f06U, 0xbd6e1051U, |
1387 | | 0x3e218af9U, 0x96dd063dU, 0xdd3e05aeU, 0x4de6bd46U, |
1388 | | 0x91548db5U, 0x71c45d05U, 0x0406d46fU, 0x605015ffU, |
1389 | | 0x1998fb24U, 0xd6bde997U, 0x894043ccU, 0x67d99e77U, |
1390 | | 0xb0e842bdU, 0x07898b88U, 0xe7195b38U, 0x79c8eedbU, |
1391 | | 0xa17c0a47U, 0x7c420fe9U, 0xf8841ec9U, 0x00000000U, |
1392 | | 0x09808683U, 0x322bed48U, 0x1e1170acU, 0x6c5a724eU, |
1393 | | 0xfd0efffbU, 0x0f853856U, 0x3daed51eU, 0x362d3927U, |
1394 | | 0x0a0fd964U, 0x685ca621U, 0x9b5b54d1U, 0x24362e3aU, |
1395 | | 0x0c0a67b1U, 0x9357e70fU, 0xb4ee96d2U, 0x1b9b919eU, |
1396 | | 0x80c0c54fU, 0x61dc20a2U, 0x5a774b69U, 0x1c121a16U, |
1397 | | 0xe293ba0aU, 0xc0a02ae5U, 0x3c22e043U, 0x121b171dU, |
1398 | | 0x0e090d0bU, 0xf28bc7adU, 0x2db6a8b9U, 0x141ea9c8U, |
1399 | | 0x57f11985U, 0xaf75074cU, 0xee99ddbbU, 0xa37f60fdU, |
1400 | | 0xf701269fU, 0x5c72f5bcU, 0x44663bc5U, 0x5bfb7e34U, |
1401 | | 0x8b432976U, 0xcb23c6dcU, 0xb6edfc68U, 0xb8e4f163U, |
1402 | | 0xd731dccaU, 0x42638510U, 0x13972240U, 0x84c61120U, |
1403 | | 0x854a247dU, 0xd2bb3df8U, 0xaef93211U, 0xc729a16dU, |
1404 | | 0x1d9e2f4bU, 0xdcb230f3U, 0x0d8652ecU, 0x77c1e3d0U, |
1405 | | 0x2bb3166cU, 0xa970b999U, 0x119448faU, 0x47e96422U, |
1406 | | 0xa8fc8cc4U, 0xa0f03f1aU, 0x567d2cd8U, 0x223390efU, |
1407 | | 0x87494ec7U, 0xd938d1c1U, 0x8ccaa2feU, 0x98d40b36U, |
1408 | | 0xa6f581cfU, 0xa57ade28U, 0xdab78e26U, 0x3fadbfa4U, |
1409 | | 0x2c3a9de4U, 0x5078920dU, 0x6a5fcc9bU, 0x547e4662U, |
1410 | | 0xf68d13c2U, 0x90d8b8e8U, 0x2e39f75eU, 0x82c3aff5U, |
1411 | | 0x9f5d80beU, 0x69d0937cU, 0x6fd52da9U, 0xcf2512b3U, |
1412 | | 0xc8ac993bU, 0x10187da7U, 0xe89c636eU, 0xdb3bbb7bU, |
1413 | | 0xcd267809U, 0x6e5918f4U, 0xec9ab701U, 0x834f9aa8U, |
1414 | | 0xe6956e65U, 0xaaffe67eU, 0x21bccf08U, 0xef15e8e6U, |
1415 | | 0xbae79bd9U, 0x4a6f36ceU, 0xea9f09d4U, 0x29b07cd6U, |
1416 | | 0x31a4b2afU, 0x2a3f2331U, 0xc6a59430U, 0x35a266c0U, |
1417 | | 0x744ebc37U, 0xfc82caa6U, 0xe090d0b0U, 0x33a7d815U, |
1418 | | 0xf104984aU, 0x41ecdaf7U, 0x7fcd500eU, 0x1791f62fU, |
1419 | | 0x764dd68dU, 0x43efb04dU, 0xccaa4d54U, 0xe49604dfU, |
1420 | | 0x9ed1b5e3U, 0x4c6a881bU, 0xc12c1fb8U, 0x4665517fU, |
1421 | | 0x9d5eea04U, 0x018c355dU, 0xfa877473U, 0xfb0b412eU, |
1422 | | 0xb3671d5aU, 0x92dbd252U, 0xe9105633U, 0x6dd64713U, |
1423 | | 0x9ad7618cU, 0x37a10c7aU, 0x59f8148eU, 0xeb133c89U, |
1424 | | 0xcea927eeU, 0xb761c935U, 0xe11ce5edU, 0x7a47b13cU, |
1425 | | 0x9cd2df59U, 0x55f2733fU, 0x1814ce79U, 0x73c737bfU, |
1426 | | 0x53f7cdeaU, 0x5ffdaa5bU, 0xdf3d6f14U, 0x7844db86U, |
1427 | | 0xcaaff381U, 0xb968c43eU, 0x3824342cU, 0xc2a3405fU, |
1428 | | 0x161dc372U, 0xbce2250cU, 0x283c498bU, 0xff0d9541U, |
1429 | | 0x39a80171U, 0x080cb3deU, 0xd8b4e49cU, 0x6456c190U, |
1430 | | 0x7bcb8461U, 0xd532b670U, 0x486c5c74U, 0xd0b85742U, |
1431 | | }, |
1432 | | { |
1433 | | 0x5051f4a7U, 0x537e4165U, 0xc31a17a4U, 0x963a275eU, |
1434 | | 0xcb3bab6bU, 0xf11f9d45U, 0xabacfa58U, 0x934be303U, |
1435 | | 0x552030faU, 0xf6ad766dU, 0x9188cc76U, 0x25f5024cU, |
1436 | | 0xfc4fe5d7U, 0xd7c52acbU, 0x80263544U, 0x8fb562a3U, |
1437 | | 0x49deb15aU, 0x6725ba1bU, 0x9845ea0eU, 0xe15dfec0U, |
1438 | | 0x02c32f75U, 0x12814cf0U, 0xa38d4697U, 0xc66bd3f9U, |
1439 | | 0xe7038f5fU, 0x9515929cU, 0xebbf6d7aU, 0xda955259U, |
1440 | | 0x2dd4be83U, 0xd3587421U, 0x2949e069U, 0x448ec9c8U, |
1441 | | 0x6a75c289U, 0x78f48e79U, 0x6b99583eU, 0xdd27b971U, |
1442 | | 0xb6bee14fU, 0x17f088adU, 0x66c920acU, 0xb47dce3aU, |
1443 | | 0x1863df4aU, 0x82e51a31U, 0x60975133U, 0x4562537fU, |
1444 | | 0xe0b16477U, 0x84bb6baeU, 0x1cfe81a0U, 0x94f9082bU, |
1445 | | 0x58704868U, 0x198f45fdU, 0x8794de6cU, 0xb7527bf8U, |
1446 | | 0x23ab73d3U, 0xe2724b02U, 0x57e31f8fU, 0x2a6655abU, |
1447 | | 0x07b2eb28U, 0x032fb5c2U, 0x9a86c57bU, 0xa5d33708U, |
1448 | | 0xf2302887U, 0xb223bfa5U, 0xba02036aU, 0x5ced1682U, |
1449 | | 0x2b8acf1cU, 0x92a779b4U, 0xf0f307f2U, 0xa14e69e2U, |
1450 | | 0xcd65daf4U, 0xd50605beU, 0x1fd13462U, 0x8ac4a6feU, |
1451 | | 0x9d342e53U, 0xa0a2f355U, 0x32058ae1U, 0x75a4f6ebU, |
1452 | | 0x390b83ecU, 0xaa4060efU, 0x065e719fU, 0x51bd6e10U, |
1453 | | 0xf93e218aU, 0x3d96dd06U, 0xaedd3e05U, 0x464de6bdU, |
1454 | | 0xb591548dU, 0x0571c45dU, 0x6f0406d4U, 0xff605015U, |
1455 | | 0x241998fbU, 0x97d6bde9U, 0xcc894043U, 0x7767d99eU, |
1456 | | 0xbdb0e842U, 0x8807898bU, 0x38e7195bU, 0xdb79c8eeU, |
1457 | | 0x47a17c0aU, 0xe97c420fU, 0xc9f8841eU, 0x00000000U, |
1458 | | 0x83098086U, 0x48322bedU, 0xac1e1170U, 0x4e6c5a72U, |
1459 | | 0xfbfd0effU, 0x560f8538U, 0x1e3daed5U, 0x27362d39U, |
1460 | | 0x640a0fd9U, 0x21685ca6U, 0xd19b5b54U, 0x3a24362eU, |
1461 | | 0xb10c0a67U, 0x0f9357e7U, 0xd2b4ee96U, 0x9e1b9b91U, |
1462 | | 0x4f80c0c5U, 0xa261dc20U, 0x695a774bU, 0x161c121aU, |
1463 | | 0x0ae293baU, 0xe5c0a02aU, 0x433c22e0U, 0x1d121b17U, |
1464 | | 0x0b0e090dU, 0xadf28bc7U, 0xb92db6a8U, 0xc8141ea9U, |
1465 | | 0x8557f119U, 0x4caf7507U, 0xbbee99ddU, 0xfda37f60U, |
1466 | | 0x9ff70126U, 0xbc5c72f5U, 0xc544663bU, 0x345bfb7eU, |
1467 | | 0x768b4329U, 0xdccb23c6U, 0x68b6edfcU, 0x63b8e4f1U, |
1468 | | 0xcad731dcU, 0x10426385U, 0x40139722U, 0x2084c611U, |
1469 | | 0x7d854a24U, 0xf8d2bb3dU, 0x11aef932U, 0x6dc729a1U, |
1470 | | 0x4b1d9e2fU, 0xf3dcb230U, 0xec0d8652U, 0xd077c1e3U, |
1471 | | 0x6c2bb316U, 0x99a970b9U, 0xfa119448U, 0x2247e964U, |
1472 | | 0xc4a8fc8cU, 0x1aa0f03fU, 0xd8567d2cU, 0xef223390U, |
1473 | | 0xc787494eU, 0xc1d938d1U, 0xfe8ccaa2U, 0x3698d40bU, |
1474 | | 0xcfa6f581U, 0x28a57adeU, 0x26dab78eU, 0xa43fadbfU, |
1475 | | 0xe42c3a9dU, 0x0d507892U, 0x9b6a5fccU, 0x62547e46U, |
1476 | | 0xc2f68d13U, 0xe890d8b8U, 0x5e2e39f7U, 0xf582c3afU, |
1477 | | 0xbe9f5d80U, 0x7c69d093U, 0xa96fd52dU, 0xb3cf2512U, |
1478 | | 0x3bc8ac99U, 0xa710187dU, 0x6ee89c63U, 0x7bdb3bbbU, |
1479 | | 0x09cd2678U, 0xf46e5918U, 0x01ec9ab7U, 0xa8834f9aU, |
1480 | | 0x65e6956eU, 0x7eaaffe6U, 0x0821bccfU, 0xe6ef15e8U, |
1481 | | 0xd9bae79bU, 0xce4a6f36U, 0xd4ea9f09U, 0xd629b07cU, |
1482 | | 0xaf31a4b2U, 0x312a3f23U, 0x30c6a594U, 0xc035a266U, |
1483 | | 0x37744ebcU, 0xa6fc82caU, 0xb0e090d0U, 0x1533a7d8U, |
1484 | | 0x4af10498U, 0xf741ecdaU, 0x0e7fcd50U, 0x2f1791f6U, |
1485 | | 0x8d764dd6U, 0x4d43efb0U, 0x54ccaa4dU, 0xdfe49604U, |
1486 | | 0xe39ed1b5U, 0x1b4c6a88U, 0xb8c12c1fU, 0x7f466551U, |
1487 | | 0x049d5eeaU, 0x5d018c35U, 0x73fa8774U, 0x2efb0b41U, |
1488 | | 0x5ab3671dU, 0x5292dbd2U, 0x33e91056U, 0x136dd647U, |
1489 | | 0x8c9ad761U, 0x7a37a10cU, 0x8e59f814U, 0x89eb133cU, |
1490 | | 0xeecea927U, 0x35b761c9U, 0xede11ce5U, 0x3c7a47b1U, |
1491 | | 0x599cd2dfU, 0x3f55f273U, 0x791814ceU, 0xbf73c737U, |
1492 | | 0xea53f7cdU, 0x5b5ffdaaU, 0x14df3d6fU, 0x867844dbU, |
1493 | | 0x81caaff3U, 0x3eb968c4U, 0x2c382434U, 0x5fc2a340U, |
1494 | | 0x72161dc3U, 0x0cbce225U, 0x8b283c49U, 0x41ff0d95U, |
1495 | | 0x7139a801U, 0xde080cb3U, 0x9cd8b4e4U, 0x906456c1U, |
1496 | | 0x617bcb84U, 0x70d532b6U, 0x74486c5cU, 0x42d0b857U, |
1497 | | }, |
1498 | | { |
1499 | | 0xa75051f4U, 0x65537e41U, 0xa4c31a17U, 0x5e963a27U, |
1500 | | 0x6bcb3babU, 0x45f11f9dU, 0x58abacfaU, 0x03934be3U, |
1501 | | 0xfa552030U, 0x6df6ad76U, 0x769188ccU, 0x4c25f502U, |
1502 | | 0xd7fc4fe5U, 0xcbd7c52aU, 0x44802635U, 0xa38fb562U, |
1503 | | 0x5a49deb1U, 0x1b6725baU, 0x0e9845eaU, 0xc0e15dfeU, |
1504 | | 0x7502c32fU, 0xf012814cU, 0x97a38d46U, 0xf9c66bd3U, |
1505 | | 0x5fe7038fU, 0x9c951592U, 0x7aebbf6dU, 0x59da9552U, |
1506 | | 0x832dd4beU, 0x21d35874U, 0x692949e0U, 0xc8448ec9U, |
1507 | | 0x896a75c2U, 0x7978f48eU, 0x3e6b9958U, 0x71dd27b9U, |
1508 | | 0x4fb6bee1U, 0xad17f088U, 0xac66c920U, 0x3ab47dceU, |
1509 | | 0x4a1863dfU, 0x3182e51aU, 0x33609751U, 0x7f456253U, |
1510 | | 0x77e0b164U, 0xae84bb6bU, 0xa01cfe81U, 0x2b94f908U, |
1511 | | 0x68587048U, 0xfd198f45U, 0x6c8794deU, 0xf8b7527bU, |
1512 | | 0xd323ab73U, 0x02e2724bU, 0x8f57e31fU, 0xab2a6655U, |
1513 | | 0x2807b2ebU, 0xc2032fb5U, 0x7b9a86c5U, 0x08a5d337U, |
1514 | | 0x87f23028U, 0xa5b223bfU, 0x6aba0203U, 0x825ced16U, |
1515 | | 0x1c2b8acfU, 0xb492a779U, 0xf2f0f307U, 0xe2a14e69U, |
1516 | | 0xf4cd65daU, 0xbed50605U, 0x621fd134U, 0xfe8ac4a6U, |
1517 | | 0x539d342eU, 0x55a0a2f3U, 0xe132058aU, 0xeb75a4f6U, |
1518 | | 0xec390b83U, 0xefaa4060U, 0x9f065e71U, 0x1051bd6eU, |
1519 | | |
1520 | | 0x8af93e21U, 0x063d96ddU, 0x05aedd3eU, 0xbd464de6U, |
1521 | | 0x8db59154U, 0x5d0571c4U, 0xd46f0406U, 0x15ff6050U, |
1522 | | 0xfb241998U, 0xe997d6bdU, 0x43cc8940U, 0x9e7767d9U, |
1523 | | 0x42bdb0e8U, 0x8b880789U, 0x5b38e719U, 0xeedb79c8U, |
1524 | | 0x0a47a17cU, 0x0fe97c42U, 0x1ec9f884U, 0x00000000U, |
1525 | | 0x86830980U, 0xed48322bU, 0x70ac1e11U, 0x724e6c5aU, |
1526 | | 0xfffbfd0eU, 0x38560f85U, 0xd51e3daeU, 0x3927362dU, |
1527 | | 0xd9640a0fU, 0xa621685cU, 0x54d19b5bU, 0x2e3a2436U, |
1528 | | 0x67b10c0aU, 0xe70f9357U, 0x96d2b4eeU, 0x919e1b9bU, |
1529 | | 0xc54f80c0U, 0x20a261dcU, 0x4b695a77U, 0x1a161c12U, |
1530 | | 0xba0ae293U, 0x2ae5c0a0U, 0xe0433c22U, 0x171d121bU, |
1531 | | 0x0d0b0e09U, 0xc7adf28bU, 0xa8b92db6U, 0xa9c8141eU, |
1532 | | 0x198557f1U, 0x074caf75U, 0xddbbee99U, 0x60fda37fU, |
1533 | | 0x269ff701U, 0xf5bc5c72U, 0x3bc54466U, 0x7e345bfbU, |
1534 | | 0x29768b43U, 0xc6dccb23U, 0xfc68b6edU, 0xf163b8e4U, |
1535 | | 0xdccad731U, 0x85104263U, 0x22401397U, 0x112084c6U, |
1536 | | 0x247d854aU, 0x3df8d2bbU, 0x3211aef9U, 0xa16dc729U, |
1537 | | 0x2f4b1d9eU, 0x30f3dcb2U, 0x52ec0d86U, 0xe3d077c1U, |
1538 | | 0x166c2bb3U, 0xb999a970U, 0x48fa1194U, 0x642247e9U, |
1539 | | 0x8cc4a8fcU, 0x3f1aa0f0U, 0x2cd8567dU, 0x90ef2233U, |
1540 | | 0x4ec78749U, 0xd1c1d938U, 0xa2fe8ccaU, 0x0b3698d4U, |
1541 | | 0x81cfa6f5U, 0xde28a57aU, 0x8e26dab7U, 0xbfa43fadU, |
1542 | | 0x9de42c3aU, 0x920d5078U, 0xcc9b6a5fU, 0x4662547eU, |
1543 | | 0x13c2f68dU, 0xb8e890d8U, 0xf75e2e39U, 0xaff582c3U, |
1544 | | 0x80be9f5dU, 0x937c69d0U, 0x2da96fd5U, 0x12b3cf25U, |
1545 | | 0x993bc8acU, 0x7da71018U, 0x636ee89cU, 0xbb7bdb3bU, |
1546 | | 0x7809cd26U, 0x18f46e59U, 0xb701ec9aU, 0x9aa8834fU, |
1547 | | 0x6e65e695U, 0xe67eaaffU, 0xcf0821bcU, 0xe8e6ef15U, |
1548 | | 0x9bd9bae7U, 0x36ce4a6fU, 0x09d4ea9fU, 0x7cd629b0U, |
1549 | | 0xb2af31a4U, 0x23312a3fU, 0x9430c6a5U, 0x66c035a2U, |
1550 | | 0xbc37744eU, 0xcaa6fc82U, 0xd0b0e090U, 0xd81533a7U, |
1551 | | 0x984af104U, 0xdaf741ecU, 0x500e7fcdU, 0xf62f1791U, |
1552 | | 0xd68d764dU, 0xb04d43efU, 0x4d54ccaaU, 0x04dfe496U, |
1553 | | 0xb5e39ed1U, 0x881b4c6aU, 0x1fb8c12cU, 0x517f4665U, |
1554 | | 0xea049d5eU, 0x355d018cU, 0x7473fa87U, 0x412efb0bU, |
1555 | | 0x1d5ab367U, 0xd25292dbU, 0x5633e910U, 0x47136dd6U, |
1556 | | 0x618c9ad7U, 0x0c7a37a1U, 0x148e59f8U, 0x3c89eb13U, |
1557 | | 0x27eecea9U, 0xc935b761U, 0xe5ede11cU, 0xb13c7a47U, |
1558 | | 0xdf599cd2U, 0x733f55f2U, 0xce791814U, 0x37bf73c7U, |
1559 | | 0xcdea53f7U, 0xaa5b5ffdU, 0x6f14df3dU, 0xdb867844U, |
1560 | | 0xf381caafU, 0xc43eb968U, 0x342c3824U, 0x405fc2a3U, |
1561 | | 0xc372161dU, 0x250cbce2U, 0x498b283cU, 0x9541ff0dU, |
1562 | | 0x017139a8U, 0xb3de080cU, 0xe49cd8b4U, 0xc1906456U, |
1563 | | 0x84617bcbU, 0xb670d532U, 0x5c74486cU, 0x5742d0b8U, |
1564 | | }, |
1565 | | { |
1566 | | 0xf4a75051U, 0x4165537eU, 0x17a4c31aU, 0x275e963aU, |
1567 | | 0xab6bcb3bU, 0x9d45f11fU, 0xfa58abacU, 0xe303934bU, |
1568 | | 0x30fa5520U, 0x766df6adU, 0xcc769188U, 0x024c25f5U, |
1569 | | 0xe5d7fc4fU, 0x2acbd7c5U, 0x35448026U, 0x62a38fb5U, |
1570 | | 0xb15a49deU, 0xba1b6725U, 0xea0e9845U, 0xfec0e15dU, |
1571 | | 0x2f7502c3U, 0x4cf01281U, 0x4697a38dU, 0xd3f9c66bU, |
1572 | | 0x8f5fe703U, 0x929c9515U, 0x6d7aebbfU, 0x5259da95U, |
1573 | | 0xbe832dd4U, 0x7421d358U, 0xe0692949U, 0xc9c8448eU, |
1574 | | 0xc2896a75U, 0x8e7978f4U, 0x583e6b99U, 0xb971dd27U, |
1575 | | 0xe14fb6beU, 0x88ad17f0U, 0x20ac66c9U, 0xce3ab47dU, |
1576 | | 0xdf4a1863U, 0x1a3182e5U, 0x51336097U, 0x537f4562U, |
1577 | | 0x6477e0b1U, 0x6bae84bbU, 0x81a01cfeU, 0x082b94f9U, |
1578 | | 0x48685870U, 0x45fd198fU, 0xde6c8794U, 0x7bf8b752U, |
1579 | | 0x73d323abU, 0x4b02e272U, 0x1f8f57e3U, 0x55ab2a66U, |
1580 | | 0xeb2807b2U, 0xb5c2032fU, 0xc57b9a86U, 0x3708a5d3U, |
1581 | | 0x2887f230U, 0xbfa5b223U, 0x036aba02U, 0x16825cedU, |
1582 | | 0xcf1c2b8aU, 0x79b492a7U, 0x07f2f0f3U, 0x69e2a14eU, |
1583 | | 0xdaf4cd65U, 0x05bed506U, 0x34621fd1U, 0xa6fe8ac4U, |
1584 | | 0x2e539d34U, 0xf355a0a2U, 0x8ae13205U, 0xf6eb75a4U, |
1585 | | 0x83ec390bU, 0x60efaa40U, 0x719f065eU, 0x6e1051bdU, |
1586 | | 0x218af93eU, 0xdd063d96U, 0x3e05aeddU, 0xe6bd464dU, |
1587 | | 0x548db591U, 0xc45d0571U, 0x06d46f04U, 0x5015ff60U, |
1588 | | 0x98fb2419U, 0xbde997d6U, 0x4043cc89U, 0xd99e7767U, |
1589 | | 0xe842bdb0U, 0x898b8807U, 0x195b38e7U, 0xc8eedb79U, |
1590 | | 0x7c0a47a1U, 0x420fe97cU, 0x841ec9f8U, 0x00000000U, |
1591 | | 0x80868309U, 0x2bed4832U, 0x1170ac1eU, 0x5a724e6cU, |
1592 | | 0x0efffbfdU, 0x8538560fU, 0xaed51e3dU, 0x2d392736U, |
1593 | | 0x0fd9640aU, 0x5ca62168U, 0x5b54d19bU, 0x362e3a24U, |
1594 | | 0x0a67b10cU, 0x57e70f93U, 0xee96d2b4U, 0x9b919e1bU, |
1595 | | 0xc0c54f80U, 0xdc20a261U, 0x774b695aU, 0x121a161cU, |
1596 | | 0x93ba0ae2U, 0xa02ae5c0U, 0x22e0433cU, 0x1b171d12U, |
1597 | | 0x090d0b0eU, 0x8bc7adf2U, 0xb6a8b92dU, 0x1ea9c814U, |
1598 | | 0xf1198557U, 0x75074cafU, 0x99ddbbeeU, 0x7f60fda3U, |
1599 | | 0x01269ff7U, 0x72f5bc5cU, 0x663bc544U, 0xfb7e345bU, |
1600 | | 0x4329768bU, 0x23c6dccbU, 0xedfc68b6U, 0xe4f163b8U, |
1601 | | 0x31dccad7U, 0x63851042U, 0x97224013U, 0xc6112084U, |
1602 | | 0x4a247d85U, 0xbb3df8d2U, 0xf93211aeU, 0x29a16dc7U, |
1603 | | 0x9e2f4b1dU, 0xb230f3dcU, 0x8652ec0dU, 0xc1e3d077U, |
1604 | | 0xb3166c2bU, 0x70b999a9U, 0x9448fa11U, 0xe9642247U, |
1605 | | 0xfc8cc4a8U, 0xf03f1aa0U, 0x7d2cd856U, 0x3390ef22U, |
1606 | | 0x494ec787U, 0x38d1c1d9U, 0xcaa2fe8cU, 0xd40b3698U, |
1607 | | 0xf581cfa6U, 0x7ade28a5U, 0xb78e26daU, 0xadbfa43fU, |
1608 | | 0x3a9de42cU, 0x78920d50U, 0x5fcc9b6aU, 0x7e466254U, |
1609 | | 0x8d13c2f6U, 0xd8b8e890U, 0x39f75e2eU, 0xc3aff582U, |
1610 | | 0x5d80be9fU, 0xd0937c69U, 0xd52da96fU, 0x2512b3cfU, |
1611 | | 0xac993bc8U, 0x187da710U, 0x9c636ee8U, 0x3bbb7bdbU, |
1612 | | 0x267809cdU, 0x5918f46eU, 0x9ab701ecU, 0x4f9aa883U, |
1613 | | 0x956e65e6U, 0xffe67eaaU, 0xbccf0821U, 0x15e8e6efU, |
1614 | | 0xe79bd9baU, 0x6f36ce4aU, 0x9f09d4eaU, 0xb07cd629U, |
1615 | | 0xa4b2af31U, 0x3f23312aU, 0xa59430c6U, 0xa266c035U, |
1616 | | 0x4ebc3774U, 0x82caa6fcU, 0x90d0b0e0U, 0xa7d81533U, |
1617 | | 0x04984af1U, 0xecdaf741U, 0xcd500e7fU, 0x91f62f17U, |
1618 | | 0x4dd68d76U, 0xefb04d43U, 0xaa4d54ccU, 0x9604dfe4U, |
1619 | | 0xd1b5e39eU, 0x6a881b4cU, 0x2c1fb8c1U, 0x65517f46U, |
1620 | | 0x5eea049dU, 0x8c355d01U, 0x877473faU, 0x0b412efbU, |
1621 | | 0x671d5ab3U, 0xdbd25292U, 0x105633e9U, 0xd647136dU, |
1622 | | 0xd7618c9aU, 0xa10c7a37U, 0xf8148e59U, 0x133c89ebU, |
1623 | | 0xa927eeceU, 0x61c935b7U, 0x1ce5ede1U, 0x47b13c7aU, |
1624 | | 0xd2df599cU, 0xf2733f55U, 0x14ce7918U, 0xc737bf73U, |
1625 | | 0xf7cdea53U, 0xfdaa5b5fU, 0x3d6f14dfU, 0x44db8678U, |
1626 | | 0xaff381caU, 0x68c43eb9U, 0x24342c38U, 0xa3405fc2U, |
1627 | | 0x1dc37216U, 0xe2250cbcU, 0x3c498b28U, 0x0d9541ffU, |
1628 | | 0xa8017139U, 0x0cb3de08U, 0xb4e49cd8U, 0x56c19064U, |
1629 | | 0xcb84617bU, 0x32b670d5U, 0x6c5c7448U, 0xb85742d0U, |
1630 | | } |
1631 | | }; |
1632 | | #endif /* HAVE_AES_DECRYPT */ |
1633 | | #endif /* WOLFSSL_AES_SMALL_TABLES */ |
1634 | | |
1635 | | #ifdef HAVE_AES_DECRYPT |
1636 | | #if (defined(HAVE_AES_CBC) && !defined(WOLFSSL_DEVCRYPTO_CBC)) \ |
1637 | | || defined(WOLFSSL_AES_DIRECT) |
1638 | | static const FLASH_QUALIFIER byte Td4[256] = |
1639 | | { |
1640 | | 0x52U, 0x09U, 0x6aU, 0xd5U, 0x30U, 0x36U, 0xa5U, 0x38U, |
1641 | | 0xbfU, 0x40U, 0xa3U, 0x9eU, 0x81U, 0xf3U, 0xd7U, 0xfbU, |
1642 | | 0x7cU, 0xe3U, 0x39U, 0x82U, 0x9bU, 0x2fU, 0xffU, 0x87U, |
1643 | | 0x34U, 0x8eU, 0x43U, 0x44U, 0xc4U, 0xdeU, 0xe9U, 0xcbU, |
1644 | | 0x54U, 0x7bU, 0x94U, 0x32U, 0xa6U, 0xc2U, 0x23U, 0x3dU, |
1645 | | 0xeeU, 0x4cU, 0x95U, 0x0bU, 0x42U, 0xfaU, 0xc3U, 0x4eU, |
1646 | | 0x08U, 0x2eU, 0xa1U, 0x66U, 0x28U, 0xd9U, 0x24U, 0xb2U, |
1647 | | 0x76U, 0x5bU, 0xa2U, 0x49U, 0x6dU, 0x8bU, 0xd1U, 0x25U, |
1648 | | 0x72U, 0xf8U, 0xf6U, 0x64U, 0x86U, 0x68U, 0x98U, 0x16U, |
1649 | | 0xd4U, 0xa4U, 0x5cU, 0xccU, 0x5dU, 0x65U, 0xb6U, 0x92U, |
1650 | | 0x6cU, 0x70U, 0x48U, 0x50U, 0xfdU, 0xedU, 0xb9U, 0xdaU, |
1651 | | 0x5eU, 0x15U, 0x46U, 0x57U, 0xa7U, 0x8dU, 0x9dU, 0x84U, |
1652 | | 0x90U, 0xd8U, 0xabU, 0x00U, 0x8cU, 0xbcU, 0xd3U, 0x0aU, |
1653 | | 0xf7U, 0xe4U, 0x58U, 0x05U, 0xb8U, 0xb3U, 0x45U, 0x06U, |
1654 | | 0xd0U, 0x2cU, 0x1eU, 0x8fU, 0xcaU, 0x3fU, 0x0fU, 0x02U, |
1655 | | 0xc1U, 0xafU, 0xbdU, 0x03U, 0x01U, 0x13U, 0x8aU, 0x6bU, |
1656 | | 0x3aU, 0x91U, 0x11U, 0x41U, 0x4fU, 0x67U, 0xdcU, 0xeaU, |
1657 | | 0x97U, 0xf2U, 0xcfU, 0xceU, 0xf0U, 0xb4U, 0xe6U, 0x73U, |
1658 | | 0x96U, 0xacU, 0x74U, 0x22U, 0xe7U, 0xadU, 0x35U, 0x85U, |
1659 | | 0xe2U, 0xf9U, 0x37U, 0xe8U, 0x1cU, 0x75U, 0xdfU, 0x6eU, |
1660 | | 0x47U, 0xf1U, 0x1aU, 0x71U, 0x1dU, 0x29U, 0xc5U, 0x89U, |
1661 | | 0x6fU, 0xb7U, 0x62U, 0x0eU, 0xaaU, 0x18U, 0xbeU, 0x1bU, |
1662 | | 0xfcU, 0x56U, 0x3eU, 0x4bU, 0xc6U, 0xd2U, 0x79U, 0x20U, |
1663 | | 0x9aU, 0xdbU, 0xc0U, 0xfeU, 0x78U, 0xcdU, 0x5aU, 0xf4U, |
1664 | | 0x1fU, 0xddU, 0xa8U, 0x33U, 0x88U, 0x07U, 0xc7U, 0x31U, |
1665 | | 0xb1U, 0x12U, 0x10U, 0x59U, 0x27U, 0x80U, 0xecU, 0x5fU, |
1666 | | 0x60U, 0x51U, 0x7fU, 0xa9U, 0x19U, 0xb5U, 0x4aU, 0x0dU, |
1667 | | 0x2dU, 0xe5U, 0x7aU, 0x9fU, 0x93U, 0xc9U, 0x9cU, 0xefU, |
1668 | | 0xa0U, 0xe0U, 0x3bU, 0x4dU, 0xaeU, 0x2aU, 0xf5U, 0xb0U, |
1669 | | 0xc8U, 0xebU, 0xbbU, 0x3cU, 0x83U, 0x53U, 0x99U, 0x61U, |
1670 | | 0x17U, 0x2bU, 0x04U, 0x7eU, 0xbaU, 0x77U, 0xd6U, 0x26U, |
1671 | | 0xe1U, 0x69U, 0x14U, 0x63U, 0x55U, 0x21U, 0x0cU, 0x7dU, |
1672 | | }; |
1673 | | #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT */ |
1674 | | #endif /* HAVE_AES_DECRYPT */ |
1675 | | |
1676 | 58.5k | #define GETBYTE(x, y) (word32)((byte)((x) >> (8 * (y)))) |
1677 | | |
1678 | | #ifdef WOLFSSL_AES_SMALL_TABLES |
1679 | | static const byte Tsbox[256] = { |
1680 | | 0x63U, 0x7cU, 0x77U, 0x7bU, 0xf2U, 0x6bU, 0x6fU, 0xc5U, |
1681 | | 0x30U, 0x01U, 0x67U, 0x2bU, 0xfeU, 0xd7U, 0xabU, 0x76U, |
1682 | | 0xcaU, 0x82U, 0xc9U, 0x7dU, 0xfaU, 0x59U, 0x47U, 0xf0U, |
1683 | | 0xadU, 0xd4U, 0xa2U, 0xafU, 0x9cU, 0xa4U, 0x72U, 0xc0U, |
1684 | | 0xb7U, 0xfdU, 0x93U, 0x26U, 0x36U, 0x3fU, 0xf7U, 0xccU, |
1685 | | 0x34U, 0xa5U, 0xe5U, 0xf1U, 0x71U, 0xd8U, 0x31U, 0x15U, |
1686 | | 0x04U, 0xc7U, 0x23U, 0xc3U, 0x18U, 0x96U, 0x05U, 0x9aU, |
1687 | | 0x07U, 0x12U, 0x80U, 0xe2U, 0xebU, 0x27U, 0xb2U, 0x75U, |
1688 | | 0x09U, 0x83U, 0x2cU, 0x1aU, 0x1bU, 0x6eU, 0x5aU, 0xa0U, |
1689 | | 0x52U, 0x3bU, 0xd6U, 0xb3U, 0x29U, 0xe3U, 0x2fU, 0x84U, |
1690 | | 0x53U, 0xd1U, 0x00U, 0xedU, 0x20U, 0xfcU, 0xb1U, 0x5bU, |
1691 | | 0x6aU, 0xcbU, 0xbeU, 0x39U, 0x4aU, 0x4cU, 0x58U, 0xcfU, |
1692 | | 0xd0U, 0xefU, 0xaaU, 0xfbU, 0x43U, 0x4dU, 0x33U, 0x85U, |
1693 | | 0x45U, 0xf9U, 0x02U, 0x7fU, 0x50U, 0x3cU, 0x9fU, 0xa8U, |
1694 | | 0x51U, 0xa3U, 0x40U, 0x8fU, 0x92U, 0x9dU, 0x38U, 0xf5U, |
1695 | | 0xbcU, 0xb6U, 0xdaU, 0x21U, 0x10U, 0xffU, 0xf3U, 0xd2U, |
1696 | | 0xcdU, 0x0cU, 0x13U, 0xecU, 0x5fU, 0x97U, 0x44U, 0x17U, |
1697 | | 0xc4U, 0xa7U, 0x7eU, 0x3dU, 0x64U, 0x5dU, 0x19U, 0x73U, |
1698 | | 0x60U, 0x81U, 0x4fU, 0xdcU, 0x22U, 0x2aU, 0x90U, 0x88U, |
1699 | | 0x46U, 0xeeU, 0xb8U, 0x14U, 0xdeU, 0x5eU, 0x0bU, 0xdbU, |
1700 | | 0xe0U, 0x32U, 0x3aU, 0x0aU, 0x49U, 0x06U, 0x24U, 0x5cU, |
1701 | | 0xc2U, 0xd3U, 0xacU, 0x62U, 0x91U, 0x95U, 0xe4U, 0x79U, |
1702 | | 0xe7U, 0xc8U, 0x37U, 0x6dU, 0x8dU, 0xd5U, 0x4eU, 0xa9U, |
1703 | | 0x6cU, 0x56U, 0xf4U, 0xeaU, 0x65U, 0x7aU, 0xaeU, 0x08U, |
1704 | | 0xbaU, 0x78U, 0x25U, 0x2eU, 0x1cU, 0xa6U, 0xb4U, 0xc6U, |
1705 | | 0xe8U, 0xddU, 0x74U, 0x1fU, 0x4bU, 0xbdU, 0x8bU, 0x8aU, |
1706 | | 0x70U, 0x3eU, 0xb5U, 0x66U, 0x48U, 0x03U, 0xf6U, 0x0eU, |
1707 | | 0x61U, 0x35U, 0x57U, 0xb9U, 0x86U, 0xc1U, 0x1dU, 0x9eU, |
1708 | | 0xe1U, 0xf8U, 0x98U, 0x11U, 0x69U, 0xd9U, 0x8eU, 0x94U, |
1709 | | 0x9bU, 0x1eU, 0x87U, 0xe9U, 0xceU, 0x55U, 0x28U, 0xdfU, |
1710 | | 0x8cU, 0xa1U, 0x89U, 0x0dU, 0xbfU, 0xe6U, 0x42U, 0x68U, |
1711 | | 0x41U, 0x99U, 0x2dU, 0x0fU, 0xb0U, 0x54U, 0xbbU, 0x16U |
1712 | | }; |
1713 | | |
1714 | | #define AES_XTIME(x) ((byte)((byte)((x) << 1) ^ ((0 - ((x) >> 7)) & 0x1b))) |
1715 | | |
1716 | | static WARN_UNUSED_RESULT word32 col_mul( |
1717 | | word32 t, int i2, int i3, int ia, int ib) |
1718 | | { |
1719 | | byte t3 = GETBYTE(t, i3); |
1720 | | byte tm = AES_XTIME(GETBYTE(t, i2) ^ t3); |
1721 | | |
1722 | | return GETBYTE(t, ia) ^ GETBYTE(t, ib) ^ t3 ^ tm; |
1723 | | } |
1724 | | |
1725 | | #if defined(HAVE_AES_CBC) || defined(WOLFSSL_AES_DIRECT) |
1726 | | static WARN_UNUSED_RESULT word32 inv_col_mul( |
1727 | | word32 t, int i9, int ib, int id, int ie) |
1728 | | { |
1729 | | byte t9 = GETBYTE(t, i9); |
1730 | | byte tb = GETBYTE(t, ib); |
1731 | | byte td = GETBYTE(t, id); |
1732 | | byte te = GETBYTE(t, ie); |
1733 | | byte t0 = t9 ^ tb ^ td; |
1734 | | return t0 ^ AES_XTIME(AES_XTIME(AES_XTIME(t0 ^ te) ^ td ^ te) ^ tb ^ te); |
1735 | | } |
1736 | | #endif |
1737 | | #endif |
1738 | | |
1739 | | #if defined(HAVE_AES_CBC) || defined(WOLFSSL_AES_DIRECT) || \ |
1740 | | defined(HAVE_AESCCM) || defined(HAVE_AESGCM) |
1741 | | |
1742 | | #ifndef WC_CACHE_LINE_SZ |
1743 | | #if defined(__x86_64__) || defined(_M_X64) || \ |
1744 | | (defined(__ILP32__) && (__ILP32__ >= 1)) |
1745 | 22.2k | #define WC_CACHE_LINE_SZ 64 |
1746 | | #else |
1747 | | /* default cache line size */ |
1748 | | #define WC_CACHE_LINE_SZ 32 |
1749 | | #endif |
1750 | | #endif |
1751 | | |
1752 | | |
1753 | | #ifndef WC_NO_CACHE_RESISTANT |
1754 | | #ifndef WOLFSSL_AES_SMALL_TABLES |
1755 | | /* load 4 Te Tables into cache by cache line stride */ |
1756 | | static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchTe(void) |
1757 | 303 | { |
1758 | 303 | word32 x = 0; |
1759 | 303 | int i,j; |
1760 | | |
1761 | 1.51k | for (i = 0; i < 4; i++) { |
1762 | | /* 256 elements, each one is 4 bytes */ |
1763 | 20.6k | for (j = 0; j < 256; j += WC_CACHE_LINE_SZ/4) { |
1764 | 19.3k | x &= Te[i][j]; |
1765 | 19.3k | } |
1766 | 1.21k | } |
1767 | 303 | return x; |
1768 | 303 | } |
1769 | | #else |
1770 | | /* load sbox into cache by cache line stride */ |
1771 | | static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchSBox(void) |
1772 | | { |
1773 | | word32 x = 0; |
1774 | | int i; |
1775 | | |
1776 | | for (i = 0; i < 256; i += WC_CACHE_LINE_SZ/4) { |
1777 | | x &= Tsbox[i]; |
1778 | | } |
1779 | | return x; |
1780 | | } |
1781 | | #endif |
1782 | | #endif |
1783 | | |
1784 | | /* Software AES - ECB Encrypt */ |
1785 | | static WARN_UNUSED_RESULT int wc_AesEncrypt( |
1786 | | Aes* aes, const byte* inBlock, byte* outBlock) |
1787 | 303 | { |
1788 | 303 | word32 s0, s1, s2, s3; |
1789 | 303 | word32 t0, t1, t2, t3; |
1790 | 303 | word32 r = aes->rounds >> 1; |
1791 | 303 | const word32* rk = aes->key; |
1792 | | |
1793 | 303 | if (r > 7 || r == 0) { |
1794 | 0 | WOLFSSL_ERROR_VERBOSE(KEYUSAGE_E); |
1795 | 0 | return KEYUSAGE_E; |
1796 | 0 | } |
1797 | | |
1798 | | #ifdef WOLFSSL_AESNI |
1799 | | if (haveAESNI && aes->use_aesni) { |
1800 | | #ifdef DEBUG_AESNI |
1801 | | printf("about to aes encrypt\n"); |
1802 | | printf("in = %p\n", inBlock); |
1803 | | printf("out = %p\n", outBlock); |
1804 | | printf("aes->key = %p\n", aes->key); |
1805 | | printf("aes->rounds = %d\n", aes->rounds); |
1806 | | printf("sz = %d\n", AES_BLOCK_SIZE); |
1807 | | #endif |
1808 | | |
1809 | | /* check alignment, decrypt doesn't need alignment */ |
1810 | | if ((wc_ptr_t)inBlock % AESNI_ALIGN) { |
1811 | | #ifndef NO_WOLFSSL_ALLOC_ALIGN |
1812 | | byte* tmp = (byte*)XMALLOC(AES_BLOCK_SIZE + AESNI_ALIGN, aes->heap, |
1813 | | DYNAMIC_TYPE_TMP_BUFFER); |
1814 | | byte* tmp_align; |
1815 | | if (tmp == NULL) |
1816 | | return MEMORY_E; |
1817 | | |
1818 | | tmp_align = tmp + (AESNI_ALIGN - ((wc_ptr_t)tmp % AESNI_ALIGN)); |
1819 | | |
1820 | | XMEMCPY(tmp_align, inBlock, AES_BLOCK_SIZE); |
1821 | | AES_ECB_encrypt(tmp_align, tmp_align, AES_BLOCK_SIZE, |
1822 | | (byte*)aes->key, aes->rounds); |
1823 | | XMEMCPY(outBlock, tmp_align, AES_BLOCK_SIZE); |
1824 | | XFREE(tmp, aes->heap, DYNAMIC_TYPE_TMP_BUFFER); |
1825 | | return 0; |
1826 | | #else |
1827 | | WOLFSSL_MSG("AES-ECB encrypt with bad alignment"); |
1828 | | WOLFSSL_ERROR_VERBOSE(BAD_ALIGN_E); |
1829 | | return BAD_ALIGN_E; |
1830 | | #endif |
1831 | | } |
1832 | | |
1833 | | AES_ECB_encrypt(inBlock, outBlock, AES_BLOCK_SIZE, (byte*)aes->key, |
1834 | | aes->rounds); |
1835 | | |
1836 | | return 0; |
1837 | | } |
1838 | | else { |
1839 | | #ifdef DEBUG_AESNI |
1840 | | printf("Skipping AES-NI\n"); |
1841 | | #endif |
1842 | | } |
1843 | | #endif |
1844 | | #if defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES) |
1845 | | AES_ECB_encrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE); |
1846 | | return 0; |
1847 | | #endif |
1848 | | |
1849 | | #if defined(WOLFSSL_IMXRT_DCP) |
1850 | | if (aes->keylen == 16) { |
1851 | | DCPAesEcbEncrypt(aes, outBlock, inBlock, AES_BLOCK_SIZE); |
1852 | | return 0; |
1853 | | } |
1854 | | #endif |
1855 | | |
1856 | | /* |
1857 | | * map byte array block to cipher state |
1858 | | * and add initial round key: |
1859 | | */ |
1860 | 303 | XMEMCPY(&s0, inBlock, sizeof(s0)); |
1861 | 303 | XMEMCPY(&s1, inBlock + sizeof(s0), sizeof(s1)); |
1862 | 303 | XMEMCPY(&s2, inBlock + 2 * sizeof(s0), sizeof(s2)); |
1863 | 303 | XMEMCPY(&s3, inBlock + 3 * sizeof(s0), sizeof(s3)); |
1864 | | |
1865 | 303 | #ifdef LITTLE_ENDIAN_ORDER |
1866 | 303 | s0 = ByteReverseWord32(s0); |
1867 | 303 | s1 = ByteReverseWord32(s1); |
1868 | 303 | s2 = ByteReverseWord32(s2); |
1869 | 303 | s3 = ByteReverseWord32(s3); |
1870 | 303 | #endif |
1871 | | |
1872 | | /* AddRoundKey */ |
1873 | 303 | s0 ^= rk[0]; |
1874 | 303 | s1 ^= rk[1]; |
1875 | 303 | s2 ^= rk[2]; |
1876 | 303 | s3 ^= rk[3]; |
1877 | | |
1878 | 303 | #ifndef WOLFSSL_AES_SMALL_TABLES |
1879 | 303 | #ifndef WC_NO_CACHE_RESISTANT |
1880 | 303 | s0 |= PreFetchTe(); |
1881 | 303 | #endif |
1882 | | |
1883 | 303 | #ifndef WOLFSSL_AES_NO_UNROLL |
1884 | | /* Unroll the loop. */ |
1885 | 303 | #define ENC_ROUND_T_S(o) \ |
1886 | 1.51k | t0 = Te[0][GETBYTE(s0, 3)] ^ Te[1][GETBYTE(s1, 2)] ^ \ |
1887 | 1.51k | Te[2][GETBYTE(s2, 1)] ^ Te[3][GETBYTE(s3, 0)] ^ rk[(o)+4]; \ |
1888 | 1.51k | t1 = Te[0][GETBYTE(s1, 3)] ^ Te[1][GETBYTE(s2, 2)] ^ \ |
1889 | 1.51k | Te[2][GETBYTE(s3, 1)] ^ Te[3][GETBYTE(s0, 0)] ^ rk[(o)+5]; \ |
1890 | 1.51k | t2 = Te[0][GETBYTE(s2, 3)] ^ Te[1][GETBYTE(s3, 2)] ^ \ |
1891 | 1.51k | Te[2][GETBYTE(s0, 1)] ^ Te[3][GETBYTE(s1, 0)] ^ rk[(o)+6]; \ |
1892 | 1.51k | t3 = Te[0][GETBYTE(s3, 3)] ^ Te[1][GETBYTE(s0, 2)] ^ \ |
1893 | 1.51k | Te[2][GETBYTE(s1, 1)] ^ Te[3][GETBYTE(s2, 0)] ^ rk[(o)+7] |
1894 | 303 | #define ENC_ROUND_S_T(o) \ |
1895 | 1.21k | s0 = Te[0][GETBYTE(t0, 3)] ^ Te[1][GETBYTE(t1, 2)] ^ \ |
1896 | 1.21k | Te[2][GETBYTE(t2, 1)] ^ Te[3][GETBYTE(t3, 0)] ^ rk[(o)+0]; \ |
1897 | 1.21k | s1 = Te[0][GETBYTE(t1, 3)] ^ Te[1][GETBYTE(t2, 2)] ^ \ |
1898 | 1.21k | Te[2][GETBYTE(t3, 1)] ^ Te[3][GETBYTE(t0, 0)] ^ rk[(o)+1]; \ |
1899 | 1.21k | s2 = Te[0][GETBYTE(t2, 3)] ^ Te[1][GETBYTE(t3, 2)] ^ \ |
1900 | 1.21k | Te[2][GETBYTE(t0, 1)] ^ Te[3][GETBYTE(t1, 0)] ^ rk[(o)+2]; \ |
1901 | 1.21k | s3 = Te[0][GETBYTE(t3, 3)] ^ Te[1][GETBYTE(t0, 2)] ^ \ |
1902 | 1.21k | Te[2][GETBYTE(t1, 1)] ^ Te[3][GETBYTE(t2, 0)] ^ rk[(o)+3] |
1903 | | |
1904 | 303 | ENC_ROUND_T_S( 0); |
1905 | 303 | ENC_ROUND_S_T( 8); ENC_ROUND_T_S( 8); |
1906 | 303 | ENC_ROUND_S_T(16); ENC_ROUND_T_S(16); |
1907 | 303 | ENC_ROUND_S_T(24); ENC_ROUND_T_S(24); |
1908 | 303 | ENC_ROUND_S_T(32); ENC_ROUND_T_S(32); |
1909 | 303 | if (r > 5) { |
1910 | 0 | ENC_ROUND_S_T(40); ENC_ROUND_T_S(40); |
1911 | 0 | if (r > 6) { |
1912 | 0 | ENC_ROUND_S_T(48); ENC_ROUND_T_S(48); |
1913 | 0 | } |
1914 | 0 | } |
1915 | 303 | rk += r * 8; |
1916 | | #else |
1917 | | /* |
1918 | | * Nr - 1 full rounds: |
1919 | | */ |
1920 | | |
1921 | | for (;;) { |
1922 | | t0 = |
1923 | | Te[0][GETBYTE(s0, 3)] ^ |
1924 | | Te[1][GETBYTE(s1, 2)] ^ |
1925 | | Te[2][GETBYTE(s2, 1)] ^ |
1926 | | Te[3][GETBYTE(s3, 0)] ^ |
1927 | | rk[4]; |
1928 | | t1 = |
1929 | | Te[0][GETBYTE(s1, 3)] ^ |
1930 | | Te[1][GETBYTE(s2, 2)] ^ |
1931 | | Te[2][GETBYTE(s3, 1)] ^ |
1932 | | Te[3][GETBYTE(s0, 0)] ^ |
1933 | | rk[5]; |
1934 | | t2 = |
1935 | | Te[0][GETBYTE(s2, 3)] ^ |
1936 | | Te[1][GETBYTE(s3, 2)] ^ |
1937 | | Te[2][GETBYTE(s0, 1)] ^ |
1938 | | Te[3][GETBYTE(s1, 0)] ^ |
1939 | | rk[6]; |
1940 | | t3 = |
1941 | | Te[0][GETBYTE(s3, 3)] ^ |
1942 | | Te[1][GETBYTE(s0, 2)] ^ |
1943 | | Te[2][GETBYTE(s1, 1)] ^ |
1944 | | Te[3][GETBYTE(s2, 0)] ^ |
1945 | | rk[7]; |
1946 | | |
1947 | | rk += 8; |
1948 | | if (--r == 0) { |
1949 | | break; |
1950 | | } |
1951 | | |
1952 | | s0 = |
1953 | | Te[0][GETBYTE(t0, 3)] ^ |
1954 | | Te[1][GETBYTE(t1, 2)] ^ |
1955 | | Te[2][GETBYTE(t2, 1)] ^ |
1956 | | Te[3][GETBYTE(t3, 0)] ^ |
1957 | | rk[0]; |
1958 | | s1 = |
1959 | | Te[0][GETBYTE(t1, 3)] ^ |
1960 | | Te[1][GETBYTE(t2, 2)] ^ |
1961 | | Te[2][GETBYTE(t3, 1)] ^ |
1962 | | Te[3][GETBYTE(t0, 0)] ^ |
1963 | | rk[1]; |
1964 | | s2 = |
1965 | | Te[0][GETBYTE(t2, 3)] ^ |
1966 | | Te[1][GETBYTE(t3, 2)] ^ |
1967 | | Te[2][GETBYTE(t0, 1)] ^ |
1968 | | Te[3][GETBYTE(t1, 0)] ^ |
1969 | | rk[2]; |
1970 | | s3 = |
1971 | | Te[0][GETBYTE(t3, 3)] ^ |
1972 | | Te[1][GETBYTE(t0, 2)] ^ |
1973 | | Te[2][GETBYTE(t1, 1)] ^ |
1974 | | Te[3][GETBYTE(t2, 0)] ^ |
1975 | | rk[3]; |
1976 | | } |
1977 | | #endif |
1978 | | |
1979 | | /* |
1980 | | * apply last round and |
1981 | | * map cipher state to byte array block: |
1982 | | */ |
1983 | | |
1984 | 303 | s0 = |
1985 | 303 | (Te[2][GETBYTE(t0, 3)] & 0xff000000) ^ |
1986 | 303 | (Te[3][GETBYTE(t1, 2)] & 0x00ff0000) ^ |
1987 | 303 | (Te[0][GETBYTE(t2, 1)] & 0x0000ff00) ^ |
1988 | 303 | (Te[1][GETBYTE(t3, 0)] & 0x000000ff) ^ |
1989 | 303 | rk[0]; |
1990 | 303 | s1 = |
1991 | 303 | (Te[2][GETBYTE(t1, 3)] & 0xff000000) ^ |
1992 | 303 | (Te[3][GETBYTE(t2, 2)] & 0x00ff0000) ^ |
1993 | 303 | (Te[0][GETBYTE(t3, 1)] & 0x0000ff00) ^ |
1994 | 303 | (Te[1][GETBYTE(t0, 0)] & 0x000000ff) ^ |
1995 | 303 | rk[1]; |
1996 | 303 | s2 = |
1997 | 303 | (Te[2][GETBYTE(t2, 3)] & 0xff000000) ^ |
1998 | 303 | (Te[3][GETBYTE(t3, 2)] & 0x00ff0000) ^ |
1999 | 303 | (Te[0][GETBYTE(t0, 1)] & 0x0000ff00) ^ |
2000 | 303 | (Te[1][GETBYTE(t1, 0)] & 0x000000ff) ^ |
2001 | 303 | rk[2]; |
2002 | 303 | s3 = |
2003 | 303 | (Te[2][GETBYTE(t3, 3)] & 0xff000000) ^ |
2004 | 303 | (Te[3][GETBYTE(t0, 2)] & 0x00ff0000) ^ |
2005 | 303 | (Te[0][GETBYTE(t1, 1)] & 0x0000ff00) ^ |
2006 | 303 | (Te[1][GETBYTE(t2, 0)] & 0x000000ff) ^ |
2007 | 303 | rk[3]; |
2008 | | #else |
2009 | | #ifndef WC_NO_CACHE_RESISTANT |
2010 | | s0 |= PreFetchSBox(); |
2011 | | #endif |
2012 | | |
2013 | | r *= 2; |
2014 | | /* Two rounds at a time */ |
2015 | | for (rk += 4; r > 1; r--, rk += 4) { |
2016 | | t0 = |
2017 | | ((word32)Tsbox[GETBYTE(s0, 3)] << 24) ^ |
2018 | | ((word32)Tsbox[GETBYTE(s1, 2)] << 16) ^ |
2019 | | ((word32)Tsbox[GETBYTE(s2, 1)] << 8) ^ |
2020 | | ((word32)Tsbox[GETBYTE(s3, 0)]); |
2021 | | t1 = |
2022 | | ((word32)Tsbox[GETBYTE(s1, 3)] << 24) ^ |
2023 | | ((word32)Tsbox[GETBYTE(s2, 2)] << 16) ^ |
2024 | | ((word32)Tsbox[GETBYTE(s3, 1)] << 8) ^ |
2025 | | ((word32)Tsbox[GETBYTE(s0, 0)]); |
2026 | | t2 = |
2027 | | ((word32)Tsbox[GETBYTE(s2, 3)] << 24) ^ |
2028 | | ((word32)Tsbox[GETBYTE(s3, 2)] << 16) ^ |
2029 | | ((word32)Tsbox[GETBYTE(s0, 1)] << 8) ^ |
2030 | | ((word32)Tsbox[GETBYTE(s1, 0)]); |
2031 | | t3 = |
2032 | | ((word32)Tsbox[GETBYTE(s3, 3)] << 24) ^ |
2033 | | ((word32)Tsbox[GETBYTE(s0, 2)] << 16) ^ |
2034 | | ((word32)Tsbox[GETBYTE(s1, 1)] << 8) ^ |
2035 | | ((word32)Tsbox[GETBYTE(s2, 0)]); |
2036 | | |
2037 | | s0 = |
2038 | | (col_mul(t0, 3, 2, 0, 1) << 24) ^ |
2039 | | (col_mul(t0, 2, 1, 0, 3) << 16) ^ |
2040 | | (col_mul(t0, 1, 0, 2, 3) << 8) ^ |
2041 | | (col_mul(t0, 0, 3, 2, 1) ) ^ |
2042 | | rk[0]; |
2043 | | s1 = |
2044 | | (col_mul(t1, 3, 2, 0, 1) << 24) ^ |
2045 | | (col_mul(t1, 2, 1, 0, 3) << 16) ^ |
2046 | | (col_mul(t1, 1, 0, 2, 3) << 8) ^ |
2047 | | (col_mul(t1, 0, 3, 2, 1) ) ^ |
2048 | | rk[1]; |
2049 | | s2 = |
2050 | | (col_mul(t2, 3, 2, 0, 1) << 24) ^ |
2051 | | (col_mul(t2, 2, 1, 0, 3) << 16) ^ |
2052 | | (col_mul(t2, 1, 0, 2, 3) << 8) ^ |
2053 | | (col_mul(t2, 0, 3, 2, 1) ) ^ |
2054 | | rk[2]; |
2055 | | s3 = |
2056 | | (col_mul(t3, 3, 2, 0, 1) << 24) ^ |
2057 | | (col_mul(t3, 2, 1, 0, 3) << 16) ^ |
2058 | | (col_mul(t3, 1, 0, 2, 3) << 8) ^ |
2059 | | (col_mul(t3, 0, 3, 2, 1) ) ^ |
2060 | | rk[3]; |
2061 | | } |
2062 | | |
2063 | | t0 = |
2064 | | ((word32)Tsbox[GETBYTE(s0, 3)] << 24) ^ |
2065 | | ((word32)Tsbox[GETBYTE(s1, 2)] << 16) ^ |
2066 | | ((word32)Tsbox[GETBYTE(s2, 1)] << 8) ^ |
2067 | | ((word32)Tsbox[GETBYTE(s3, 0)]); |
2068 | | t1 = |
2069 | | ((word32)Tsbox[GETBYTE(s1, 3)] << 24) ^ |
2070 | | ((word32)Tsbox[GETBYTE(s2, 2)] << 16) ^ |
2071 | | ((word32)Tsbox[GETBYTE(s3, 1)] << 8) ^ |
2072 | | ((word32)Tsbox[GETBYTE(s0, 0)]); |
2073 | | t2 = |
2074 | | ((word32)Tsbox[GETBYTE(s2, 3)] << 24) ^ |
2075 | | ((word32)Tsbox[GETBYTE(s3, 2)] << 16) ^ |
2076 | | ((word32)Tsbox[GETBYTE(s0, 1)] << 8) ^ |
2077 | | ((word32)Tsbox[GETBYTE(s1, 0)]); |
2078 | | t3 = |
2079 | | ((word32)Tsbox[GETBYTE(s3, 3)] << 24) ^ |
2080 | | ((word32)Tsbox[GETBYTE(s0, 2)] << 16) ^ |
2081 | | ((word32)Tsbox[GETBYTE(s1, 1)] << 8) ^ |
2082 | | ((word32)Tsbox[GETBYTE(s2, 0)]); |
2083 | | s0 = t0 ^ rk[0]; |
2084 | | s1 = t1 ^ rk[1]; |
2085 | | s2 = t2 ^ rk[2]; |
2086 | | s3 = t3 ^ rk[3]; |
2087 | | #endif |
2088 | | |
2089 | | /* write out */ |
2090 | 303 | #ifdef LITTLE_ENDIAN_ORDER |
2091 | 303 | s0 = ByteReverseWord32(s0); |
2092 | 303 | s1 = ByteReverseWord32(s1); |
2093 | 303 | s2 = ByteReverseWord32(s2); |
2094 | 303 | s3 = ByteReverseWord32(s3); |
2095 | 303 | #endif |
2096 | | |
2097 | 303 | XMEMCPY(outBlock, &s0, sizeof(s0)); |
2098 | 303 | XMEMCPY(outBlock + sizeof(s0), &s1, sizeof(s1)); |
2099 | 303 | XMEMCPY(outBlock + 2 * sizeof(s0), &s2, sizeof(s2)); |
2100 | 303 | XMEMCPY(outBlock + 3 * sizeof(s0), &s3, sizeof(s3)); |
2101 | | |
2102 | 303 | return 0; |
2103 | 303 | } |
2104 | | #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT || HAVE_AESGCM */ |
2105 | | |
2106 | | #if defined(HAVE_AES_DECRYPT) |
2107 | | #if (defined(HAVE_AES_CBC) && !defined(WOLFSSL_DEVCRYPTO_CBC)) || \ |
2108 | | defined(WOLFSSL_AES_DIRECT) |
2109 | | |
2110 | | #ifndef WC_NO_CACHE_RESISTANT |
2111 | | #ifndef WOLFSSL_AES_SMALL_TABLES |
2112 | | /* load 4 Td Tables into cache by cache line stride */ |
2113 | | static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchTd(void) |
2114 | 42 | { |
2115 | 42 | word32 x = 0; |
2116 | 42 | int i,j; |
2117 | | |
2118 | 210 | for (i = 0; i < 4; i++) { |
2119 | | /* 256 elements, each one is 4 bytes */ |
2120 | 2.85k | for (j = 0; j < 256; j += WC_CACHE_LINE_SZ/4) { |
2121 | 2.68k | x &= Td[i][j]; |
2122 | 2.68k | } |
2123 | 168 | } |
2124 | 42 | return x; |
2125 | 42 | } |
2126 | | #endif |
2127 | | |
2128 | | /* load Td Table4 into cache by cache line stride */ |
2129 | | static WARN_UNUSED_RESULT WC_INLINE word32 PreFetchTd4(void) |
2130 | 42 | { |
2131 | 42 | word32 x = 0; |
2132 | 42 | int i; |
2133 | | |
2134 | 210 | for (i = 0; i < 256; i += WC_CACHE_LINE_SZ) { |
2135 | 168 | x &= (word32)Td4[i]; |
2136 | 168 | } |
2137 | 42 | return x; |
2138 | 42 | } |
2139 | | #endif |
2140 | | |
2141 | | /* Software AES - ECB Decrypt */ |
2142 | | static WARN_UNUSED_RESULT int wc_AesDecrypt( |
2143 | | Aes* aes, const byte* inBlock, byte* outBlock) |
2144 | 42 | { |
2145 | 42 | word32 s0, s1, s2, s3; |
2146 | 42 | word32 t0, t1, t2, t3; |
2147 | 42 | word32 r = aes->rounds >> 1; |
2148 | 42 | const word32* rk = aes->key; |
2149 | | |
2150 | 42 | if (r > 7 || r == 0) { |
2151 | 0 | WOLFSSL_ERROR_VERBOSE(KEYUSAGE_E); |
2152 | 0 | return KEYUSAGE_E; |
2153 | 0 | } |
2154 | | |
2155 | | #ifdef WOLFSSL_AESNI |
2156 | | if (haveAESNI && aes->use_aesni) { |
2157 | | #ifdef DEBUG_AESNI |
2158 | | printf("about to aes decrypt\n"); |
2159 | | printf("in = %p\n", inBlock); |
2160 | | printf("out = %p\n", outBlock); |
2161 | | printf("aes->key = %p\n", aes->key); |
2162 | | printf("aes->rounds = %d\n", aes->rounds); |
2163 | | printf("sz = %d\n", AES_BLOCK_SIZE); |
2164 | | #endif |
2165 | | |
2166 | | /* if input and output same will overwrite input iv */ |
2167 | | if ((const byte*)aes->tmp != inBlock) |
2168 | | XMEMCPY(aes->tmp, inBlock, AES_BLOCK_SIZE); |
2169 | | AES_ECB_decrypt(inBlock, outBlock, AES_BLOCK_SIZE, (byte*)aes->key, |
2170 | | aes->rounds); |
2171 | | return 0; |
2172 | | } |
2173 | | else { |
2174 | | #ifdef DEBUG_AESNI |
2175 | | printf("Skipping AES-NI\n"); |
2176 | | #endif |
2177 | | } |
2178 | | #endif /* WOLFSSL_AESNI */ |
2179 | | #if defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES) |
2180 | | return AES_ECB_decrypt(aes, inBlock, outBlock, AES_BLOCK_SIZE); |
2181 | | #endif |
2182 | | #if defined(WOLFSSL_IMXRT_DCP) |
2183 | | if (aes->keylen == 16) { |
2184 | | DCPAesEcbDecrypt(aes, outBlock, inBlock, AES_BLOCK_SIZE); |
2185 | | return 0; |
2186 | | } |
2187 | | #endif |
2188 | | |
2189 | | /* |
2190 | | * map byte array block to cipher state |
2191 | | * and add initial round key: |
2192 | | */ |
2193 | 42 | XMEMCPY(&s0, inBlock, sizeof(s0)); |
2194 | 42 | XMEMCPY(&s1, inBlock + sizeof(s0), sizeof(s1)); |
2195 | 42 | XMEMCPY(&s2, inBlock + 2 * sizeof(s0), sizeof(s2)); |
2196 | 42 | XMEMCPY(&s3, inBlock + 3 * sizeof(s0), sizeof(s3)); |
2197 | | |
2198 | 42 | #ifdef LITTLE_ENDIAN_ORDER |
2199 | 42 | s0 = ByteReverseWord32(s0); |
2200 | 42 | s1 = ByteReverseWord32(s1); |
2201 | 42 | s2 = ByteReverseWord32(s2); |
2202 | 42 | s3 = ByteReverseWord32(s3); |
2203 | 42 | #endif |
2204 | | |
2205 | 42 | s0 ^= rk[0]; |
2206 | 42 | s1 ^= rk[1]; |
2207 | 42 | s2 ^= rk[2]; |
2208 | 42 | s3 ^= rk[3]; |
2209 | | |
2210 | 42 | #ifndef WOLFSSL_AES_SMALL_TABLES |
2211 | 42 | #ifndef WC_NO_CACHE_RESISTANT |
2212 | 42 | s0 |= PreFetchTd(); |
2213 | 42 | #endif |
2214 | | |
2215 | 42 | #ifndef WOLFSSL_AES_NO_UNROLL |
2216 | | /* Unroll the loop. */ |
2217 | 42 | #define DEC_ROUND_T_S(o) \ |
2218 | 210 | t0 = Td[0][GETBYTE(s0, 3)] ^ Td[1][GETBYTE(s3, 2)] ^ \ |
2219 | 210 | Td[2][GETBYTE(s2, 1)] ^ Td[3][GETBYTE(s1, 0)] ^ rk[(o)+4]; \ |
2220 | 210 | t1 = Td[0][GETBYTE(s1, 3)] ^ Td[1][GETBYTE(s0, 2)] ^ \ |
2221 | 210 | Td[2][GETBYTE(s3, 1)] ^ Td[3][GETBYTE(s2, 0)] ^ rk[(o)+5]; \ |
2222 | 210 | t2 = Td[0][GETBYTE(s2, 3)] ^ Td[1][GETBYTE(s1, 2)] ^ \ |
2223 | 210 | Td[2][GETBYTE(s0, 1)] ^ Td[3][GETBYTE(s3, 0)] ^ rk[(o)+6]; \ |
2224 | 210 | t3 = Td[0][GETBYTE(s3, 3)] ^ Td[1][GETBYTE(s2, 2)] ^ \ |
2225 | 210 | Td[2][GETBYTE(s1, 1)] ^ Td[3][GETBYTE(s0, 0)] ^ rk[(o)+7] |
2226 | 42 | #define DEC_ROUND_S_T(o) \ |
2227 | 168 | s0 = Td[0][GETBYTE(t0, 3)] ^ Td[1][GETBYTE(t3, 2)] ^ \ |
2228 | 168 | Td[2][GETBYTE(t2, 1)] ^ Td[3][GETBYTE(t1, 0)] ^ rk[(o)+0]; \ |
2229 | 168 | s1 = Td[0][GETBYTE(t1, 3)] ^ Td[1][GETBYTE(t0, 2)] ^ \ |
2230 | 168 | Td[2][GETBYTE(t3, 1)] ^ Td[3][GETBYTE(t2, 0)] ^ rk[(o)+1]; \ |
2231 | 168 | s2 = Td[0][GETBYTE(t2, 3)] ^ Td[1][GETBYTE(t1, 2)] ^ \ |
2232 | 168 | Td[2][GETBYTE(t0, 1)] ^ Td[3][GETBYTE(t3, 0)] ^ rk[(o)+2]; \ |
2233 | 168 | s3 = Td[0][GETBYTE(t3, 3)] ^ Td[1][GETBYTE(t2, 2)] ^ \ |
2234 | 168 | Td[2][GETBYTE(t1, 1)] ^ Td[3][GETBYTE(t0, 0)] ^ rk[(o)+3] |
2235 | | |
2236 | 42 | DEC_ROUND_T_S( 0); |
2237 | 42 | DEC_ROUND_S_T( 8); DEC_ROUND_T_S( 8); |
2238 | 42 | DEC_ROUND_S_T(16); DEC_ROUND_T_S(16); |
2239 | 42 | DEC_ROUND_S_T(24); DEC_ROUND_T_S(24); |
2240 | 42 | DEC_ROUND_S_T(32); DEC_ROUND_T_S(32); |
2241 | 42 | if (r > 5) { |
2242 | 0 | DEC_ROUND_S_T(40); DEC_ROUND_T_S(40); |
2243 | 0 | if (r > 6) { |
2244 | 0 | DEC_ROUND_S_T(48); DEC_ROUND_T_S(48); |
2245 | 0 | } |
2246 | 0 | } |
2247 | 42 | rk += r * 8; |
2248 | | #else |
2249 | | |
2250 | | /* |
2251 | | * Nr - 1 full rounds: |
2252 | | */ |
2253 | | |
2254 | | for (;;) { |
2255 | | t0 = |
2256 | | Td[0][GETBYTE(s0, 3)] ^ |
2257 | | Td[1][GETBYTE(s3, 2)] ^ |
2258 | | Td[2][GETBYTE(s2, 1)] ^ |
2259 | | Td[3][GETBYTE(s1, 0)] ^ |
2260 | | rk[4]; |
2261 | | t1 = |
2262 | | Td[0][GETBYTE(s1, 3)] ^ |
2263 | | Td[1][GETBYTE(s0, 2)] ^ |
2264 | | Td[2][GETBYTE(s3, 1)] ^ |
2265 | | Td[3][GETBYTE(s2, 0)] ^ |
2266 | | rk[5]; |
2267 | | t2 = |
2268 | | Td[0][GETBYTE(s2, 3)] ^ |
2269 | | Td[1][GETBYTE(s1, 2)] ^ |
2270 | | Td[2][GETBYTE(s0, 1)] ^ |
2271 | | Td[3][GETBYTE(s3, 0)] ^ |
2272 | | rk[6]; |
2273 | | t3 = |
2274 | | Td[0][GETBYTE(s3, 3)] ^ |
2275 | | Td[1][GETBYTE(s2, 2)] ^ |
2276 | | Td[2][GETBYTE(s1, 1)] ^ |
2277 | | Td[3][GETBYTE(s0, 0)] ^ |
2278 | | rk[7]; |
2279 | | |
2280 | | rk += 8; |
2281 | | if (--r == 0) { |
2282 | | break; |
2283 | | } |
2284 | | |
2285 | | s0 = |
2286 | | Td[0][GETBYTE(t0, 3)] ^ |
2287 | | Td[1][GETBYTE(t3, 2)] ^ |
2288 | | Td[2][GETBYTE(t2, 1)] ^ |
2289 | | Td[3][GETBYTE(t1, 0)] ^ |
2290 | | rk[0]; |
2291 | | s1 = |
2292 | | Td[0][GETBYTE(t1, 3)] ^ |
2293 | | Td[1][GETBYTE(t0, 2)] ^ |
2294 | | Td[2][GETBYTE(t3, 1)] ^ |
2295 | | Td[3][GETBYTE(t2, 0)] ^ |
2296 | | rk[1]; |
2297 | | s2 = |
2298 | | Td[0][GETBYTE(t2, 3)] ^ |
2299 | | Td[1][GETBYTE(t1, 2)] ^ |
2300 | | Td[2][GETBYTE(t0, 1)] ^ |
2301 | | Td[3][GETBYTE(t3, 0)] ^ |
2302 | | rk[2]; |
2303 | | s3 = |
2304 | | Td[0][GETBYTE(t3, 3)] ^ |
2305 | | Td[1][GETBYTE(t2, 2)] ^ |
2306 | | Td[2][GETBYTE(t1, 1)] ^ |
2307 | | Td[3][GETBYTE(t0, 0)] ^ |
2308 | | rk[3]; |
2309 | | } |
2310 | | #endif |
2311 | | /* |
2312 | | * apply last round and |
2313 | | * map cipher state to byte array block: |
2314 | | */ |
2315 | | |
2316 | 42 | #ifndef WC_NO_CACHE_RESISTANT |
2317 | 42 | t0 |= PreFetchTd4(); |
2318 | 42 | #endif |
2319 | | |
2320 | 42 | s0 = |
2321 | 42 | ((word32)Td4[GETBYTE(t0, 3)] << 24) ^ |
2322 | 42 | ((word32)Td4[GETBYTE(t3, 2)] << 16) ^ |
2323 | 42 | ((word32)Td4[GETBYTE(t2, 1)] << 8) ^ |
2324 | 42 | ((word32)Td4[GETBYTE(t1, 0)]) ^ |
2325 | 42 | rk[0]; |
2326 | 42 | s1 = |
2327 | 42 | ((word32)Td4[GETBYTE(t1, 3)] << 24) ^ |
2328 | 42 | ((word32)Td4[GETBYTE(t0, 2)] << 16) ^ |
2329 | 42 | ((word32)Td4[GETBYTE(t3, 1)] << 8) ^ |
2330 | 42 | ((word32)Td4[GETBYTE(t2, 0)]) ^ |
2331 | 42 | rk[1]; |
2332 | 42 | s2 = |
2333 | 42 | ((word32)Td4[GETBYTE(t2, 3)] << 24) ^ |
2334 | 42 | ((word32)Td4[GETBYTE(t1, 2)] << 16) ^ |
2335 | 42 | ((word32)Td4[GETBYTE(t0, 1)] << 8) ^ |
2336 | 42 | ((word32)Td4[GETBYTE(t3, 0)]) ^ |
2337 | 42 | rk[2]; |
2338 | 42 | s3 = |
2339 | 42 | ((word32)Td4[GETBYTE(t3, 3)] << 24) ^ |
2340 | 42 | ((word32)Td4[GETBYTE(t2, 2)] << 16) ^ |
2341 | 42 | ((word32)Td4[GETBYTE(t1, 1)] << 8) ^ |
2342 | 42 | ((word32)Td4[GETBYTE(t0, 0)]) ^ |
2343 | 42 | rk[3]; |
2344 | | #else |
2345 | | #ifndef WC_NO_CACHE_RESISTANT |
2346 | | s0 |= PreFetchTd4(); |
2347 | | #endif |
2348 | | |
2349 | | r *= 2; |
2350 | | for (rk += 4; r > 1; r--, rk += 4) { |
2351 | | t0 = |
2352 | | ((word32)Td4[GETBYTE(s0, 3)] << 24) ^ |
2353 | | ((word32)Td4[GETBYTE(s3, 2)] << 16) ^ |
2354 | | ((word32)Td4[GETBYTE(s2, 1)] << 8) ^ |
2355 | | ((word32)Td4[GETBYTE(s1, 0)]) ^ |
2356 | | rk[0]; |
2357 | | t1 = |
2358 | | ((word32)Td4[GETBYTE(s1, 3)] << 24) ^ |
2359 | | ((word32)Td4[GETBYTE(s0, 2)] << 16) ^ |
2360 | | ((word32)Td4[GETBYTE(s3, 1)] << 8) ^ |
2361 | | ((word32)Td4[GETBYTE(s2, 0)]) ^ |
2362 | | rk[1]; |
2363 | | t2 = |
2364 | | ((word32)Td4[GETBYTE(s2, 3)] << 24) ^ |
2365 | | ((word32)Td4[GETBYTE(s1, 2)] << 16) ^ |
2366 | | ((word32)Td4[GETBYTE(s0, 1)] << 8) ^ |
2367 | | ((word32)Td4[GETBYTE(s3, 0)]) ^ |
2368 | | rk[2]; |
2369 | | t3 = |
2370 | | ((word32)Td4[GETBYTE(s3, 3)] << 24) ^ |
2371 | | ((word32)Td4[GETBYTE(s2, 2)] << 16) ^ |
2372 | | ((word32)Td4[GETBYTE(s1, 1)] << 8) ^ |
2373 | | ((word32)Td4[GETBYTE(s0, 0)]) ^ |
2374 | | rk[3]; |
2375 | | |
2376 | | s0 = |
2377 | | (inv_col_mul(t0, 0, 2, 1, 3) << 24) ^ |
2378 | | (inv_col_mul(t0, 3, 1, 0, 2) << 16) ^ |
2379 | | (inv_col_mul(t0, 2, 0, 3, 1) << 8) ^ |
2380 | | (inv_col_mul(t0, 1, 3, 2, 0) ); |
2381 | | s1 = |
2382 | | (inv_col_mul(t1, 0, 2, 1, 3) << 24) ^ |
2383 | | (inv_col_mul(t1, 3, 1, 0, 2) << 16) ^ |
2384 | | (inv_col_mul(t1, 2, 0, 3, 1) << 8) ^ |
2385 | | (inv_col_mul(t1, 1, 3, 2, 0) ); |
2386 | | s2 = |
2387 | | (inv_col_mul(t2, 0, 2, 1, 3) << 24) ^ |
2388 | | (inv_col_mul(t2, 3, 1, 0, 2) << 16) ^ |
2389 | | (inv_col_mul(t2, 2, 0, 3, 1) << 8) ^ |
2390 | | (inv_col_mul(t2, 1, 3, 2, 0) ); |
2391 | | s3 = |
2392 | | (inv_col_mul(t3, 0, 2, 1, 3) << 24) ^ |
2393 | | (inv_col_mul(t3, 3, 1, 0, 2) << 16) ^ |
2394 | | (inv_col_mul(t3, 2, 0, 3, 1) << 8) ^ |
2395 | | (inv_col_mul(t3, 1, 3, 2, 0) ); |
2396 | | } |
2397 | | |
2398 | | t0 = |
2399 | | ((word32)Td4[GETBYTE(s0, 3)] << 24) ^ |
2400 | | ((word32)Td4[GETBYTE(s3, 2)] << 16) ^ |
2401 | | ((word32)Td4[GETBYTE(s2, 1)] << 8) ^ |
2402 | | ((word32)Td4[GETBYTE(s1, 0)]); |
2403 | | t1 = |
2404 | | ((word32)Td4[GETBYTE(s1, 3)] << 24) ^ |
2405 | | ((word32)Td4[GETBYTE(s0, 2)] << 16) ^ |
2406 | | ((word32)Td4[GETBYTE(s3, 1)] << 8) ^ |
2407 | | ((word32)Td4[GETBYTE(s2, 0)]); |
2408 | | t2 = |
2409 | | ((word32)Td4[GETBYTE(s2, 3)] << 24) ^ |
2410 | | ((word32)Td4[GETBYTE(s1, 2)] << 16) ^ |
2411 | | ((word32)Td4[GETBYTE(s0, 1)] << 8) ^ |
2412 | | ((word32)Td4[GETBYTE(s3, 0)]); |
2413 | | t3 = |
2414 | | ((word32)Td4[GETBYTE(s3, 3)] << 24) ^ |
2415 | | ((word32)Td4[GETBYTE(s2, 2)] << 16) ^ |
2416 | | ((word32)Td4[GETBYTE(s1, 1)] << 8) ^ |
2417 | | ((word32)Td4[GETBYTE(s0, 0)]); |
2418 | | s0 = t0 ^ rk[0]; |
2419 | | s1 = t1 ^ rk[1]; |
2420 | | s2 = t2 ^ rk[2]; |
2421 | | s3 = t3 ^ rk[3]; |
2422 | | #endif |
2423 | | |
2424 | | /* write out */ |
2425 | 42 | #ifdef LITTLE_ENDIAN_ORDER |
2426 | 42 | s0 = ByteReverseWord32(s0); |
2427 | 42 | s1 = ByteReverseWord32(s1); |
2428 | 42 | s2 = ByteReverseWord32(s2); |
2429 | 42 | s3 = ByteReverseWord32(s3); |
2430 | 42 | #endif |
2431 | | |
2432 | 42 | XMEMCPY(outBlock, &s0, sizeof(s0)); |
2433 | 42 | XMEMCPY(outBlock + sizeof(s0), &s1, sizeof(s1)); |
2434 | 42 | XMEMCPY(outBlock + 2 * sizeof(s0), &s2, sizeof(s2)); |
2435 | 42 | XMEMCPY(outBlock + 3 * sizeof(s0), &s3, sizeof(s3)); |
2436 | | |
2437 | 42 | return 0; |
2438 | 42 | } |
2439 | | #endif /* HAVE_AES_CBC || WOLFSSL_AES_DIRECT */ |
2440 | | #endif /* HAVE_AES_DECRYPT */ |
2441 | | |
2442 | | #endif /* NEED_AES_TABLES */ |
2443 | | |
2444 | | |
2445 | | |
2446 | | /* wc_AesSetKey */ |
2447 | | #if defined(STM32_CRYPTO) |
2448 | | |
2449 | | int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen, |
2450 | | const byte* iv, int dir) |
2451 | | { |
2452 | | word32 *rk; |
2453 | | |
2454 | | (void)dir; |
2455 | | |
2456 | | if (aes == NULL || (keylen != 16 && |
2457 | | #ifdef WOLFSSL_AES_192 |
2458 | | keylen != 24 && |
2459 | | #endif |
2460 | | keylen != 32)) { |
2461 | | return BAD_FUNC_ARG; |
2462 | | } |
2463 | | |
2464 | | rk = aes->key; |
2465 | | aes->keylen = keylen; |
2466 | | aes->rounds = keylen/4 + 6; |
2467 | | XMEMCPY(rk, userKey, keylen); |
2468 | | #if !defined(WOLFSSL_STM32_CUBEMX) || defined(STM32_HAL_V2) |
2469 | | ByteReverseWords(rk, rk, keylen); |
2470 | | #endif |
2471 | | #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \ |
2472 | | defined(WOLFSSL_AES_OFB) |
2473 | | aes->left = 0; |
2474 | | #endif |
2475 | | return wc_AesSetIV(aes, iv); |
2476 | | } |
2477 | | #if defined(WOLFSSL_AES_DIRECT) |
2478 | | int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen, |
2479 | | const byte* iv, int dir) |
2480 | | { |
2481 | | return wc_AesSetKey(aes, userKey, keylen, iv, dir); |
2482 | | } |
2483 | | #endif |
2484 | | |
2485 | | #elif defined(HAVE_COLDFIRE_SEC) |
2486 | | #if defined (HAVE_THREADX) |
2487 | | #include "memory_pools.h" |
2488 | | extern TX_BYTE_POOL mp_ncached; /* Non Cached memory pool */ |
2489 | | #endif |
2490 | | |
2491 | | #define AES_BUFFER_SIZE (AES_BLOCK_SIZE * 64) |
2492 | | static unsigned char *AESBuffIn = NULL; |
2493 | | static unsigned char *AESBuffOut = NULL; |
2494 | | static byte *secReg; |
2495 | | static byte *secKey; |
2496 | | static volatile SECdescriptorType *secDesc; |
2497 | | |
2498 | | static wolfSSL_Mutex Mutex_AesSEC; |
2499 | | |
2500 | | #define SEC_DESC_AES_CBC_ENCRYPT 0x60300010 |
2501 | | #define SEC_DESC_AES_CBC_DECRYPT 0x60200010 |
2502 | | |
2503 | | extern volatile unsigned char __MBAR[]; |
2504 | | |
2505 | | int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen, |
2506 | | const byte* iv, int dir) |
2507 | | { |
2508 | | if (AESBuffIn == NULL) { |
2509 | | #if defined (HAVE_THREADX) |
2510 | | int s1, s2, s3, s4, s5; |
2511 | | s5 = tx_byte_allocate(&mp_ncached,(void *)&secDesc, |
2512 | | sizeof(SECdescriptorType), TX_NO_WAIT); |
2513 | | s1 = tx_byte_allocate(&mp_ncached, (void *)&AESBuffIn, |
2514 | | AES_BUFFER_SIZE, TX_NO_WAIT); |
2515 | | s2 = tx_byte_allocate(&mp_ncached, (void *)&AESBuffOut, |
2516 | | AES_BUFFER_SIZE, TX_NO_WAIT); |
2517 | | s3 = tx_byte_allocate(&mp_ncached, (void *)&secKey, |
2518 | | AES_BLOCK_SIZE*2, TX_NO_WAIT); |
2519 | | s4 = tx_byte_allocate(&mp_ncached, (void *)&secReg, |
2520 | | AES_BLOCK_SIZE, TX_NO_WAIT); |
2521 | | |
2522 | | if (s1 || s2 || s3 || s4 || s5) |
2523 | | return BAD_FUNC_ARG; |
2524 | | #else |
2525 | | #warning "Allocate non-Cache buffers" |
2526 | | #endif |
2527 | | |
2528 | | wc_InitMutex(&Mutex_AesSEC); |
2529 | | } |
2530 | | |
2531 | | if (!((keylen == 16) || (keylen == 24) || (keylen == 32))) |
2532 | | return BAD_FUNC_ARG; |
2533 | | |
2534 | | if (aes == NULL) |
2535 | | return BAD_FUNC_ARG; |
2536 | | |
2537 | | aes->keylen = keylen; |
2538 | | aes->rounds = keylen/4 + 6; |
2539 | | XMEMCPY(aes->key, userKey, keylen); |
2540 | | |
2541 | | if (iv) |
2542 | | XMEMCPY(aes->reg, iv, AES_BLOCK_SIZE); |
2543 | | |
2544 | | #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \ |
2545 | | defined(WOLFSSL_AES_OFB) |
2546 | | aes->left = 0; |
2547 | | #endif |
2548 | | |
2549 | | return 0; |
2550 | | } |
2551 | | #elif defined(FREESCALE_LTC) |
2552 | | int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen, const byte* iv, |
2553 | | int dir) |
2554 | | { |
2555 | | if (aes == NULL || !((keylen == 16) || (keylen == 24) || (keylen == 32))) |
2556 | | return BAD_FUNC_ARG; |
2557 | | |
2558 | | aes->rounds = keylen/4 + 6; |
2559 | | XMEMCPY(aes->key, userKey, keylen); |
2560 | | |
2561 | | #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \ |
2562 | | defined(WOLFSSL_AES_OFB) |
2563 | | aes->left = 0; |
2564 | | #endif |
2565 | | |
2566 | | return wc_AesSetIV(aes, iv); |
2567 | | } |
2568 | | |
2569 | | int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen, |
2570 | | const byte* iv, int dir) |
2571 | | { |
2572 | | return wc_AesSetKey(aes, userKey, keylen, iv, dir); |
2573 | | } |
2574 | | #elif defined(FREESCALE_MMCAU) |
2575 | | int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen, |
2576 | | const byte* iv, int dir) |
2577 | | { |
2578 | | int ret; |
2579 | | byte* rk; |
2580 | | byte* tmpKey = (byte*)userKey; |
2581 | | int tmpKeyDynamic = 0; |
2582 | | word32 alignOffset = 0; |
2583 | | |
2584 | | (void)dir; |
2585 | | |
2586 | | if (!((keylen == 16) || (keylen == 24) || (keylen == 32))) |
2587 | | return BAD_FUNC_ARG; |
2588 | | if (aes == NULL) |
2589 | | return BAD_FUNC_ARG; |
2590 | | |
2591 | | rk = (byte*)aes->key; |
2592 | | if (rk == NULL) |
2593 | | return BAD_FUNC_ARG; |
2594 | | |
2595 | | #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \ |
2596 | | defined(WOLFSSL_AES_OFB) |
2597 | | aes->left = 0; |
2598 | | #endif |
2599 | | |
2600 | | aes->rounds = keylen/4 + 6; |
2601 | | |
2602 | | #ifdef FREESCALE_MMCAU_CLASSIC |
2603 | | if ((wc_ptr_t)userKey % WOLFSSL_MMCAU_ALIGNMENT) { |
2604 | | #ifndef NO_WOLFSSL_ALLOC_ALIGN |
2605 | | byte* tmp = (byte*)XMALLOC(keylen + WOLFSSL_MMCAU_ALIGNMENT, |
2606 | | aes->heap, DYNAMIC_TYPE_TMP_BUFFER); |
2607 | | if (tmp == NULL) { |
2608 | | return MEMORY_E; |
2609 | | } |
2610 | | alignOffset = WOLFSSL_MMCAU_ALIGNMENT - |
2611 | | ((wc_ptr_t)tmp % WOLFSSL_MMCAU_ALIGNMENT); |
2612 | | tmpKey = tmp + alignOffset; |
2613 | | XMEMCPY(tmpKey, userKey, keylen); |
2614 | | tmpKeyDynamic = 1; |
2615 | | #else |
2616 | | WOLFSSL_MSG("Bad cau_aes_set_key alignment"); |
2617 | | return BAD_ALIGN_E; |
2618 | | #endif |
2619 | | } |
2620 | | #endif |
2621 | | |
2622 | | ret = wolfSSL_CryptHwMutexLock(); |
2623 | | if(ret == 0) { |
2624 | | #ifdef FREESCALE_MMCAU_CLASSIC |
2625 | | cau_aes_set_key(tmpKey, keylen*8, rk); |
2626 | | #else |
2627 | | MMCAU_AES_SetKey(tmpKey, keylen, rk); |
2628 | | #endif |
2629 | | wolfSSL_CryptHwMutexUnLock(); |
2630 | | |
2631 | | ret = wc_AesSetIV(aes, iv); |
2632 | | } |
2633 | | |
2634 | | if (tmpKeyDynamic == 1) { |
2635 | | XFREE(tmpKey - alignOffset, aes->heap, DYNAMIC_TYPE_TMP_BUFFER); |
2636 | | } |
2637 | | |
2638 | | return ret; |
2639 | | } |
2640 | | |
2641 | | int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen, |
2642 | | const byte* iv, int dir) |
2643 | | { |
2644 | | return wc_AesSetKey(aes, userKey, keylen, iv, dir); |
2645 | | } |
2646 | | |
2647 | | #elif defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT) |
2648 | | int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen, const byte* iv, |
2649 | | int dir) |
2650 | | { |
2651 | | int ret; |
2652 | | |
2653 | | if (aes == NULL || (keylen != 16 && keylen != 24 && keylen != 32)) { |
2654 | | return BAD_FUNC_ARG; |
2655 | | } |
2656 | | |
2657 | | aes->ctxInitDone = 0; |
2658 | | #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \ |
2659 | | defined(WOLFSSL_AES_OFB) |
2660 | | aes->left = 0; |
2661 | | #endif |
2662 | | |
2663 | | ret = se050_aes_set_key(aes, userKey, keylen, iv, dir); |
2664 | | if (ret == 0) { |
2665 | | ret = wc_AesSetIV(aes, iv); |
2666 | | } |
2667 | | return ret; |
2668 | | } |
2669 | | |
2670 | | int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen, |
2671 | | const byte* iv, int dir) |
2672 | | { |
2673 | | return wc_AesSetKey(aes, userKey, keylen, iv, dir); |
2674 | | } |
2675 | | |
2676 | | #elif defined(WOLFSSL_NRF51_AES) |
2677 | | int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen, |
2678 | | const byte* iv, int dir) |
2679 | | { |
2680 | | int ret; |
2681 | | |
2682 | | (void)dir; |
2683 | | (void)iv; |
2684 | | |
2685 | | if (aes == NULL || keylen != 16) |
2686 | | return BAD_FUNC_ARG; |
2687 | | |
2688 | | aes->keylen = keylen; |
2689 | | aes->rounds = keylen/4 + 6; |
2690 | | XMEMCPY(aes->key, userKey, keylen); |
2691 | | ret = nrf51_aes_set_key(userKey); |
2692 | | |
2693 | | #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \ |
2694 | | defined(WOLFSSL_AES_OFB) |
2695 | | aes->left = 0; |
2696 | | #endif |
2697 | | |
2698 | | return ret; |
2699 | | } |
2700 | | |
2701 | | int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen, |
2702 | | const byte* iv, int dir) |
2703 | | { |
2704 | | return wc_AesSetKey(aes, userKey, keylen, iv, dir); |
2705 | | } |
2706 | | #elif defined(WOLFSSL_ESP32WROOM32_CRYPT) && \ |
2707 | | !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_AES) |
2708 | | |
2709 | | int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen, |
2710 | | const byte* iv, int dir) |
2711 | | { |
2712 | | (void)dir; |
2713 | | (void)iv; |
2714 | | |
2715 | | if (aes == NULL || (keylen != 16 && keylen != 24 && keylen != 32)) { |
2716 | | return BAD_FUNC_ARG; |
2717 | | } |
2718 | | |
2719 | | aes->keylen = keylen; |
2720 | | aes->rounds = keylen/4 + 6; |
2721 | | |
2722 | | XMEMCPY(aes->key, userKey, keylen); |
2723 | | #if defined(WOLFSSL_AES_COUNTER) |
2724 | | aes->left = 0; |
2725 | | #endif |
2726 | | return wc_AesSetIV(aes, iv); |
2727 | | } |
2728 | | |
2729 | | int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen, |
2730 | | const byte* iv, int dir) |
2731 | | { |
2732 | | return wc_AesSetKey(aes, userKey, keylen, iv, dir); |
2733 | | } |
2734 | | #elif defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES) |
2735 | | |
2736 | | int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen, const byte* iv, |
2737 | | int dir) |
2738 | | { |
2739 | | SaSiError_t ret = SASI_OK; |
2740 | | SaSiAesIv_t iv_aes; |
2741 | | |
2742 | | if (aes == NULL || |
2743 | | (keylen != AES_128_KEY_SIZE && |
2744 | | keylen != AES_192_KEY_SIZE && |
2745 | | keylen != AES_256_KEY_SIZE)) { |
2746 | | return BAD_FUNC_ARG; |
2747 | | } |
2748 | | #if defined(AES_MAX_KEY_SIZE) |
2749 | | if (keylen > (AES_MAX_KEY_SIZE/8)) { |
2750 | | return BAD_FUNC_ARG; |
2751 | | } |
2752 | | #endif |
2753 | | if (dir != AES_ENCRYPTION && |
2754 | | dir != AES_DECRYPTION) { |
2755 | | return BAD_FUNC_ARG; |
2756 | | } |
2757 | | |
2758 | | if (dir == AES_ENCRYPTION) { |
2759 | | aes->ctx.mode = SASI_AES_ENCRYPT; |
2760 | | SaSi_AesInit(&aes->ctx.user_ctx, |
2761 | | SASI_AES_ENCRYPT, |
2762 | | SASI_AES_MODE_CBC, |
2763 | | SASI_AES_PADDING_NONE); |
2764 | | } |
2765 | | else { |
2766 | | aes->ctx.mode = SASI_AES_DECRYPT; |
2767 | | SaSi_AesInit(&aes->ctx.user_ctx, |
2768 | | SASI_AES_DECRYPT, |
2769 | | SASI_AES_MODE_CBC, |
2770 | | SASI_AES_PADDING_NONE); |
2771 | | } |
2772 | | |
2773 | | aes->keylen = keylen; |
2774 | | aes->rounds = keylen/4 + 6; |
2775 | | XMEMCPY(aes->key, userKey, keylen); |
2776 | | |
2777 | | aes->ctx.key.pKey = (byte*)aes->key; |
2778 | | aes->ctx.key.keySize= keylen; |
2779 | | |
2780 | | ret = SaSi_AesSetKey(&aes->ctx.user_ctx, |
2781 | | SASI_AES_USER_KEY, |
2782 | | &aes->ctx.key, |
2783 | | sizeof(aes->ctx.key)); |
2784 | | if (ret != SASI_OK) { |
2785 | | return BAD_FUNC_ARG; |
2786 | | } |
2787 | | |
2788 | | ret = wc_AesSetIV(aes, iv); |
2789 | | |
2790 | | if (iv) |
2791 | | XMEMCPY(iv_aes, iv, AES_BLOCK_SIZE); |
2792 | | else |
2793 | | XMEMSET(iv_aes, 0, AES_BLOCK_SIZE); |
2794 | | |
2795 | | |
2796 | | ret = SaSi_AesSetIv(&aes->ctx.user_ctx, iv_aes); |
2797 | | if (ret != SASI_OK) { |
2798 | | return ret; |
2799 | | } |
2800 | | return ret; |
2801 | | } |
2802 | | #if defined(WOLFSSL_AES_DIRECT) |
2803 | | int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen, |
2804 | | const byte* iv, int dir) |
2805 | | { |
2806 | | return wc_AesSetKey(aes, userKey, keylen, iv, dir); |
2807 | | } |
2808 | | #endif |
2809 | | |
2810 | | #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) \ |
2811 | | && !defined(WOLFSSL_QNX_CAAM) |
2812 | | /* implemented in wolfcrypt/src/port/caam/caam_aes.c */ |
2813 | | |
2814 | | #elif defined(WOLFSSL_AFALG) |
2815 | | /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */ |
2816 | | |
2817 | | #elif defined(WOLFSSL_DEVCRYPTO_AES) |
2818 | | /* implemented in wolfcrypt/src/port/devcrypto/devcrypto_aes.c */ |
2819 | | |
2820 | | #elif defined(WOLFSSL_SILABS_SE_ACCEL) |
2821 | | /* implemented in wolfcrypt/src/port/silabs/silabs_hash.c */ |
2822 | | |
2823 | | #else |
2824 | | |
2825 | | /* Software AES - SetKey */ |
2826 | | static WARN_UNUSED_RESULT int wc_AesSetKeyLocal( |
2827 | | Aes* aes, const byte* userKey, word32 keylen, const byte* iv, int dir, |
2828 | | int checkKeyLen) |
2829 | 76 | { |
2830 | 76 | int ret; |
2831 | 76 | word32 *rk; |
2832 | 76 | #ifdef NEED_AES_TABLES |
2833 | 76 | word32 temp; |
2834 | 76 | unsigned int i = 0; |
2835 | 76 | #endif |
2836 | | #ifdef WOLFSSL_IMX6_CAAM_BLOB |
2837 | | byte local[32]; |
2838 | | word32 localSz = 32; |
2839 | | #endif |
2840 | | |
2841 | | #ifdef WOLFSSL_IMX6_CAAM_BLOB |
2842 | | if (keylen == (16 + WC_CAAM_BLOB_SZ) || |
2843 | | keylen == (24 + WC_CAAM_BLOB_SZ) || |
2844 | | keylen == (32 + WC_CAAM_BLOB_SZ)) { |
2845 | | if (wc_caamOpenBlob((byte*)userKey, keylen, local, &localSz) != 0) { |
2846 | | return BAD_FUNC_ARG; |
2847 | | } |
2848 | | |
2849 | | /* set local values */ |
2850 | | userKey = local; |
2851 | | keylen = localSz; |
2852 | | } |
2853 | | #endif |
2854 | | |
2855 | | #ifdef WOLFSSL_SECO_CAAM |
2856 | | /* if set to use hardware than import the key */ |
2857 | | if (aes->devId == WOLFSSL_SECO_DEVID) { |
2858 | | int keyGroup = 1; /* group one was chosen arbitrarily */ |
2859 | | unsigned int keyIdOut; |
2860 | | byte importiv[GCM_NONCE_MID_SZ]; |
2861 | | int importivSz = GCM_NONCE_MID_SZ; |
2862 | | int keyType = 0; |
2863 | | WC_RNG rng; |
2864 | | |
2865 | | if (wc_InitRng(&rng) != 0) { |
2866 | | WOLFSSL_MSG("RNG init for IV failed"); |
2867 | | return WC_HW_E; |
2868 | | } |
2869 | | |
2870 | | if (wc_RNG_GenerateBlock(&rng, importiv, importivSz) != 0) { |
2871 | | WOLFSSL_MSG("Generate IV failed"); |
2872 | | wc_FreeRng(&rng); |
2873 | | return WC_HW_E; |
2874 | | } |
2875 | | wc_FreeRng(&rng); |
2876 | | |
2877 | | switch (keylen) { |
2878 | | case AES_128_KEY_SIZE: keyType = CAAM_KEYTYPE_AES128; break; |
2879 | | case AES_192_KEY_SIZE: keyType = CAAM_KEYTYPE_AES192; break; |
2880 | | case AES_256_KEY_SIZE: keyType = CAAM_KEYTYPE_AES256; break; |
2881 | | } |
2882 | | |
2883 | | keyIdOut = wc_SECO_WrapKey(0, (byte*)userKey, keylen, importiv, |
2884 | | importivSz, keyType, CAAM_KEY_TRANSIENT, keyGroup); |
2885 | | if (keyIdOut == 0) { |
2886 | | return WC_HW_E; |
2887 | | } |
2888 | | aes->blackKey = keyIdOut; |
2889 | | return 0; |
2890 | | } |
2891 | | #endif |
2892 | | |
2893 | 76 | #if defined(WOLF_CRYPTO_CB) || (defined(WOLFSSL_DEVCRYPTO) && \ |
2894 | 76 | (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))) || \ |
2895 | 76 | (defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)) |
2896 | 76 | #ifdef WOLF_CRYPTO_CB |
2897 | 76 | if (aes->devId != INVALID_DEVID) |
2898 | 0 | #endif |
2899 | 0 | { |
2900 | 0 | if (keylen > sizeof(aes->devKey)) { |
2901 | 0 | return BAD_FUNC_ARG; |
2902 | 0 | } |
2903 | 0 | XMEMCPY(aes->devKey, userKey, keylen); |
2904 | 0 | } |
2905 | 76 | #endif |
2906 | | |
2907 | 76 | if (checkKeyLen) { |
2908 | 76 | if (keylen != 16 && keylen != 24 && keylen != 32) { |
2909 | 0 | return BAD_FUNC_ARG; |
2910 | 0 | } |
2911 | | #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE < 256 |
2912 | | /* Check key length only when AES_MAX_KEY_SIZE doesn't allow |
2913 | | * all key sizes. Otherwise this condition is never true. */ |
2914 | | if (keylen > (AES_MAX_KEY_SIZE / 8)) { |
2915 | | return BAD_FUNC_ARG; |
2916 | | } |
2917 | | #endif |
2918 | 76 | } |
2919 | | |
2920 | 76 | #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_COUNTER) || \ |
2921 | 76 | defined(WOLFSSL_AES_OFB) |
2922 | 76 | aes->left = 0; |
2923 | 76 | #endif |
2924 | | |
2925 | 76 | aes->keylen = keylen; |
2926 | 76 | aes->rounds = (keylen/4) + 6; |
2927 | | |
2928 | | #ifdef WOLFSSL_AESNI |
2929 | | aes->use_aesni = 0; |
2930 | | if (checkAESNI == 0) { |
2931 | | haveAESNI = Check_CPU_support_AES(); |
2932 | | checkAESNI = 1; |
2933 | | } |
2934 | | if (haveAESNI) { |
2935 | | aes->use_aesni = 1; |
2936 | | if (iv) |
2937 | | XMEMCPY(aes->reg, iv, AES_BLOCK_SIZE); |
2938 | | else |
2939 | | XMEMSET(aes->reg, 0, AES_BLOCK_SIZE); |
2940 | | if (dir == AES_ENCRYPTION) |
2941 | | return AES_set_encrypt_key(userKey, keylen * 8, aes); |
2942 | | #ifdef HAVE_AES_DECRYPT |
2943 | | else |
2944 | | return AES_set_decrypt_key(userKey, keylen * 8, aes); |
2945 | | #endif |
2946 | | } |
2947 | | #endif /* WOLFSSL_AESNI */ |
2948 | | |
2949 | | #ifdef WOLFSSL_KCAPI_AES |
2950 | | XMEMCPY(aes->devKey, userKey, keylen); |
2951 | | if (aes->init != 0) { |
2952 | | kcapi_cipher_destroy(aes->handle); |
2953 | | aes->handle = NULL; |
2954 | | aes->init = 0; |
2955 | | } |
2956 | | (void)dir; |
2957 | | #endif |
2958 | | |
2959 | 76 | if (keylen > sizeof(aes->key)) { |
2960 | 0 | return BAD_FUNC_ARG; |
2961 | 0 | } |
2962 | | #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES) |
2963 | | return wc_psa_aes_set_key(aes, userKey, keylen, (uint8_t*)iv, |
2964 | | ((psa_algorithm_t)0), dir); |
2965 | | #endif |
2966 | | |
2967 | 76 | rk = aes->key; |
2968 | 76 | XMEMCPY(rk, userKey, keylen); |
2969 | 76 | #if defined(LITTLE_ENDIAN_ORDER) && !defined(WOLFSSL_PIC32MZ_CRYPT) && \ |
2970 | 76 | (!defined(WOLFSSL_ESP32WROOM32_CRYPT) || \ |
2971 | 76 | defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_AES)) |
2972 | 76 | ByteReverseWords(rk, rk, keylen); |
2973 | 76 | #endif |
2974 | | |
2975 | | #ifdef WOLFSSL_IMXRT_DCP |
2976 | | /* Implemented in wolfcrypt/src/port/nxp/dcp_port.c */ |
2977 | | temp = 0; |
2978 | | if (keylen == 16) |
2979 | | temp = DCPAesSetKey(aes, userKey, keylen, iv, dir); |
2980 | | if (temp != 0) |
2981 | | return WC_HW_E; |
2982 | | #endif |
2983 | | |
2984 | 76 | #ifdef NEED_AES_TABLES |
2985 | 76 | switch (keylen) { |
2986 | 0 | #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE >= 128 && \ |
2987 | 0 | defined(WOLFSSL_AES_128) |
2988 | 76 | case 16: |
2989 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
2990 | | wc_MemZero_Add("wc_AesSetKeyLocal temp", &temp, sizeof(temp)); |
2991 | | #endif |
2992 | 760 | while (1) |
2993 | 760 | { |
2994 | 760 | temp = rk[3]; |
2995 | 760 | rk[4] = rk[0] ^ |
2996 | 760 | #ifndef WOLFSSL_AES_SMALL_TABLES |
2997 | 760 | (Te[2][GETBYTE(temp, 2)] & 0xff000000) ^ |
2998 | 760 | (Te[3][GETBYTE(temp, 1)] & 0x00ff0000) ^ |
2999 | 760 | (Te[0][GETBYTE(temp, 0)] & 0x0000ff00) ^ |
3000 | 760 | (Te[1][GETBYTE(temp, 3)] & 0x000000ff) ^ |
3001 | | #else |
3002 | | ((word32)Tsbox[GETBYTE(temp, 2)] << 24) ^ |
3003 | | ((word32)Tsbox[GETBYTE(temp, 1)] << 16) ^ |
3004 | | ((word32)Tsbox[GETBYTE(temp, 0)] << 8) ^ |
3005 | | ((word32)Tsbox[GETBYTE(temp, 3)]) ^ |
3006 | | #endif |
3007 | 760 | rcon[i]; |
3008 | 760 | rk[5] = rk[1] ^ rk[4]; |
3009 | 760 | rk[6] = rk[2] ^ rk[5]; |
3010 | 760 | rk[7] = rk[3] ^ rk[6]; |
3011 | 760 | if (++i == 10) |
3012 | 76 | break; |
3013 | 684 | rk += 4; |
3014 | 684 | } |
3015 | 76 | break; |
3016 | 0 | #endif /* 128 */ |
3017 | | |
3018 | 0 | #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE >= 192 && \ |
3019 | 0 | defined(WOLFSSL_AES_192) |
3020 | 0 | case 24: |
3021 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
3022 | | wc_MemZero_Add("wc_AesSetKeyLocal temp", &temp, sizeof(temp)); |
3023 | | #endif |
3024 | | /* for (;;) here triggers a bug in VC60 SP4 w/ Pro Pack */ |
3025 | 0 | while (1) |
3026 | 0 | { |
3027 | 0 | temp = rk[ 5]; |
3028 | 0 | rk[ 6] = rk[ 0] ^ |
3029 | 0 | #ifndef WOLFSSL_AES_SMALL_TABLES |
3030 | 0 | (Te[2][GETBYTE(temp, 2)] & 0xff000000) ^ |
3031 | 0 | (Te[3][GETBYTE(temp, 1)] & 0x00ff0000) ^ |
3032 | 0 | (Te[0][GETBYTE(temp, 0)] & 0x0000ff00) ^ |
3033 | 0 | (Te[1][GETBYTE(temp, 3)] & 0x000000ff) ^ |
3034 | | #else |
3035 | | ((word32)Tsbox[GETBYTE(temp, 2)] << 24) ^ |
3036 | | ((word32)Tsbox[GETBYTE(temp, 1)] << 16) ^ |
3037 | | ((word32)Tsbox[GETBYTE(temp, 0)] << 8) ^ |
3038 | | ((word32)Tsbox[GETBYTE(temp, 3)]) ^ |
3039 | | #endif |
3040 | 0 | rcon[i]; |
3041 | 0 | rk[ 7] = rk[ 1] ^ rk[ 6]; |
3042 | 0 | rk[ 8] = rk[ 2] ^ rk[ 7]; |
3043 | 0 | rk[ 9] = rk[ 3] ^ rk[ 8]; |
3044 | 0 | if (++i == 8) |
3045 | 0 | break; |
3046 | 0 | rk[10] = rk[ 4] ^ rk[ 9]; |
3047 | 0 | rk[11] = rk[ 5] ^ rk[10]; |
3048 | 0 | rk += 6; |
3049 | 0 | } |
3050 | 0 | break; |
3051 | 0 | #endif /* 192 */ |
3052 | | |
3053 | 0 | #if defined(AES_MAX_KEY_SIZE) && AES_MAX_KEY_SIZE >= 256 && \ |
3054 | 0 | defined(WOLFSSL_AES_256) |
3055 | 0 | case 32: |
3056 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
3057 | | wc_MemZero_Add("wc_AesSetKeyLocal temp", &temp, sizeof(temp)); |
3058 | | #endif |
3059 | 0 | while (1) |
3060 | 0 | { |
3061 | 0 | temp = rk[ 7]; |
3062 | 0 | rk[ 8] = rk[ 0] ^ |
3063 | 0 | #ifndef WOLFSSL_AES_SMALL_TABLES |
3064 | 0 | (Te[2][GETBYTE(temp, 2)] & 0xff000000) ^ |
3065 | 0 | (Te[3][GETBYTE(temp, 1)] & 0x00ff0000) ^ |
3066 | 0 | (Te[0][GETBYTE(temp, 0)] & 0x0000ff00) ^ |
3067 | 0 | (Te[1][GETBYTE(temp, 3)] & 0x000000ff) ^ |
3068 | | #else |
3069 | | ((word32)Tsbox[GETBYTE(temp, 2)] << 24) ^ |
3070 | | ((word32)Tsbox[GETBYTE(temp, 1)] << 16) ^ |
3071 | | ((word32)Tsbox[GETBYTE(temp, 0)] << 8) ^ |
3072 | | ((word32)Tsbox[GETBYTE(temp, 3)]) ^ |
3073 | | #endif |
3074 | 0 | rcon[i]; |
3075 | 0 | rk[ 9] = rk[ 1] ^ rk[ 8]; |
3076 | 0 | rk[10] = rk[ 2] ^ rk[ 9]; |
3077 | 0 | rk[11] = rk[ 3] ^ rk[10]; |
3078 | 0 | if (++i == 7) |
3079 | 0 | break; |
3080 | 0 | temp = rk[11]; |
3081 | 0 | rk[12] = rk[ 4] ^ |
3082 | 0 | #ifndef WOLFSSL_AES_SMALL_TABLES |
3083 | 0 | (Te[2][GETBYTE(temp, 3)] & 0xff000000) ^ |
3084 | 0 | (Te[3][GETBYTE(temp, 2)] & 0x00ff0000) ^ |
3085 | 0 | (Te[0][GETBYTE(temp, 1)] & 0x0000ff00) ^ |
3086 | 0 | (Te[1][GETBYTE(temp, 0)] & 0x000000ff); |
3087 | | #else |
3088 | | ((word32)Tsbox[GETBYTE(temp, 3)] << 24) ^ |
3089 | | ((word32)Tsbox[GETBYTE(temp, 2)] << 16) ^ |
3090 | | ((word32)Tsbox[GETBYTE(temp, 1)] << 8) ^ |
3091 | | ((word32)Tsbox[GETBYTE(temp, 0)]); |
3092 | | #endif |
3093 | 0 | rk[13] = rk[ 5] ^ rk[12]; |
3094 | 0 | rk[14] = rk[ 6] ^ rk[13]; |
3095 | 0 | rk[15] = rk[ 7] ^ rk[14]; |
3096 | |
|
3097 | 0 | rk += 8; |
3098 | 0 | } |
3099 | 0 | break; |
3100 | 0 | #endif /* 256 */ |
3101 | | |
3102 | 0 | default: |
3103 | 0 | return BAD_FUNC_ARG; |
3104 | 76 | } /* switch */ |
3105 | 76 | ForceZero(&temp, sizeof(temp)); |
3106 | | |
3107 | 76 | #if defined(HAVE_AES_DECRYPT) |
3108 | 76 | if (dir == AES_DECRYPTION) { |
3109 | 2 | unsigned int j; |
3110 | 2 | rk = aes->key; |
3111 | | |
3112 | | /* invert the order of the round keys: */ |
3113 | 12 | for (i = 0, j = 4* aes->rounds; i < j; i += 4, j -= 4) { |
3114 | 10 | temp = rk[i ]; rk[i ] = rk[j ]; rk[j ] = temp; |
3115 | 10 | temp = rk[i + 1]; rk[i + 1] = rk[j + 1]; rk[j + 1] = temp; |
3116 | 10 | temp = rk[i + 2]; rk[i + 2] = rk[j + 2]; rk[j + 2] = temp; |
3117 | 10 | temp = rk[i + 3]; rk[i + 3] = rk[j + 3]; rk[j + 3] = temp; |
3118 | 10 | } |
3119 | 2 | ForceZero(&temp, sizeof(temp)); |
3120 | 2 | #if !defined(WOLFSSL_AES_SMALL_TABLES) |
3121 | | /* apply the inverse MixColumn transform to all round keys but the |
3122 | | first and the last: */ |
3123 | 20 | for (i = 1; i < aes->rounds; i++) { |
3124 | 18 | rk += 4; |
3125 | 18 | rk[0] = |
3126 | 18 | Td[0][Te[1][GETBYTE(rk[0], 3)] & 0xff] ^ |
3127 | 18 | Td[1][Te[1][GETBYTE(rk[0], 2)] & 0xff] ^ |
3128 | 18 | Td[2][Te[1][GETBYTE(rk[0], 1)] & 0xff] ^ |
3129 | 18 | Td[3][Te[1][GETBYTE(rk[0], 0)] & 0xff]; |
3130 | 18 | rk[1] = |
3131 | 18 | Td[0][Te[1][GETBYTE(rk[1], 3)] & 0xff] ^ |
3132 | 18 | Td[1][Te[1][GETBYTE(rk[1], 2)] & 0xff] ^ |
3133 | 18 | Td[2][Te[1][GETBYTE(rk[1], 1)] & 0xff] ^ |
3134 | 18 | Td[3][Te[1][GETBYTE(rk[1], 0)] & 0xff]; |
3135 | 18 | rk[2] = |
3136 | 18 | Td[0][Te[1][GETBYTE(rk[2], 3)] & 0xff] ^ |
3137 | 18 | Td[1][Te[1][GETBYTE(rk[2], 2)] & 0xff] ^ |
3138 | 18 | Td[2][Te[1][GETBYTE(rk[2], 1)] & 0xff] ^ |
3139 | 18 | Td[3][Te[1][GETBYTE(rk[2], 0)] & 0xff]; |
3140 | 18 | rk[3] = |
3141 | 18 | Td[0][Te[1][GETBYTE(rk[3], 3)] & 0xff] ^ |
3142 | 18 | Td[1][Te[1][GETBYTE(rk[3], 2)] & 0xff] ^ |
3143 | 18 | Td[2][Te[1][GETBYTE(rk[3], 1)] & 0xff] ^ |
3144 | 18 | Td[3][Te[1][GETBYTE(rk[3], 0)] & 0xff]; |
3145 | 18 | } |
3146 | 2 | #endif |
3147 | 2 | } |
3148 | | #else |
3149 | | (void)dir; |
3150 | | #endif /* HAVE_AES_DECRYPT */ |
3151 | 76 | (void)temp; |
3152 | 76 | #endif /* NEED_AES_TABLES */ |
3153 | | |
3154 | | #if defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES) |
3155 | | XMEMCPY((byte*)aes->key, userKey, keylen); |
3156 | | if (WOLFSSL_SCE_GSCE_HANDLE.p_cfg->endian_flag == CRYPTO_WORD_ENDIAN_BIG) { |
3157 | | ByteReverseWords(aes->key, aes->key, 32); |
3158 | | } |
3159 | | #endif |
3160 | | |
3161 | 76 | ret = wc_AesSetIV(aes, iv); |
3162 | | |
3163 | | #if defined(WOLFSSL_DEVCRYPTO) && \ |
3164 | | (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC)) |
3165 | | aes->ctx.cfd = -1; |
3166 | | #endif |
3167 | | #ifdef WOLFSSL_IMX6_CAAM_BLOB |
3168 | | ForceZero(local, sizeof(local)); |
3169 | | #endif |
3170 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
3171 | | wc_MemZero_Check(&temp, sizeof(temp)); |
3172 | | #endif |
3173 | 76 | return ret; |
3174 | 76 | } |
3175 | | |
3176 | | int wc_AesSetKey(Aes* aes, const byte* userKey, word32 keylen, |
3177 | | const byte* iv, int dir) |
3178 | 76 | { |
3179 | 76 | if (aes == NULL) { |
3180 | 0 | return BAD_FUNC_ARG; |
3181 | 0 | } |
3182 | 76 | if (keylen > sizeof(aes->key)) { |
3183 | 0 | return BAD_FUNC_ARG; |
3184 | 0 | } |
3185 | | |
3186 | 76 | return wc_AesSetKeyLocal(aes, userKey, keylen, iv, dir, 1); |
3187 | 76 | } |
3188 | | |
3189 | | #if defined(WOLFSSL_AES_DIRECT) || defined(WOLFSSL_AES_COUNTER) |
3190 | | /* AES-CTR and AES-DIRECT need to use this for key setup */ |
3191 | | /* This function allows key sizes that are not 128/192/256 bits */ |
3192 | | int wc_AesSetKeyDirect(Aes* aes, const byte* userKey, word32 keylen, |
3193 | | const byte* iv, int dir) |
3194 | 0 | { |
3195 | 0 | if (aes == NULL) { |
3196 | 0 | return BAD_FUNC_ARG; |
3197 | 0 | } |
3198 | 0 | if (keylen > sizeof(aes->key)) { |
3199 | 0 | return BAD_FUNC_ARG; |
3200 | 0 | } |
3201 | | |
3202 | 0 | return wc_AesSetKeyLocal(aes, userKey, keylen, iv, dir, 0); |
3203 | 0 | } |
3204 | | #endif /* WOLFSSL_AES_DIRECT || WOLFSSL_AES_COUNTER */ |
3205 | | #endif /* wc_AesSetKey block */ |
3206 | | |
3207 | | |
3208 | | /* wc_AesSetIV is shared between software and hardware */ |
3209 | | int wc_AesSetIV(Aes* aes, const byte* iv) |
3210 | 76 | { |
3211 | 76 | if (aes == NULL) |
3212 | 0 | return BAD_FUNC_ARG; |
3213 | | |
3214 | 76 | if (iv) |
3215 | 76 | XMEMCPY(aes->reg, iv, AES_BLOCK_SIZE); |
3216 | 0 | else |
3217 | 0 | XMEMSET(aes->reg, 0, AES_BLOCK_SIZE); |
3218 | | |
3219 | 76 | #if defined(WOLFSSL_AES_COUNTER) || defined(WOLFSSL_AES_CFB) || \ |
3220 | 76 | defined(WOLFSSL_AES_OFB) || defined(WOLFSSL_AES_XTS) |
3221 | | /* Clear any unused bytes from last cipher op. */ |
3222 | 76 | aes->left = 0; |
3223 | 76 | #endif |
3224 | | |
3225 | 76 | return 0; |
3226 | 76 | } |
3227 | | |
3228 | | /* AES-DIRECT */ |
3229 | | #if defined(WOLFSSL_AES_DIRECT) |
3230 | | #if defined(HAVE_COLDFIRE_SEC) |
3231 | | #error "Coldfire SEC doesn't yet support AES direct" |
3232 | | |
3233 | | #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \ |
3234 | | !defined(WOLFSSL_QNX_CAAM) |
3235 | | /* implemented in wolfcrypt/src/port/caam/caam_aes.c */ |
3236 | | |
3237 | | #elif defined(WOLFSSL_AFALG) |
3238 | | /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */ |
3239 | | |
3240 | | #elif defined(WOLFSSL_DEVCRYPTO_AES) |
3241 | | /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */ |
3242 | | |
3243 | | #elif defined(WOLFSSL_LINUXKM) && defined(WOLFSSL_AESNI) |
3244 | | |
3245 | | WARN_UNUSED_RESULT int wc_AesEncryptDirect( |
3246 | | Aes* aes, byte* out, const byte* in) |
3247 | | { |
3248 | | int ret; |
3249 | | if (haveAESNI && aes->use_aesni) |
3250 | | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
3251 | | ret = wc_AesEncrypt(aes, in, out); |
3252 | | if (haveAESNI && aes->use_aesni) |
3253 | | RESTORE_VECTOR_REGISTERS(); |
3254 | | return ret; |
3255 | | } |
3256 | | /* vector reg save/restore is explicit in all below calls to |
3257 | | * wc_Aes{En,De}cryptDirect(), so bypass the public version with a |
3258 | | * macro. |
3259 | | */ |
3260 | | #define wc_AesEncryptDirect(aes, out, in) wc_AesEncrypt(aes, in, out) |
3261 | | #ifdef HAVE_AES_DECRYPT |
3262 | | /* Allow direct access to one block decrypt */ |
3263 | | WARN_UNUSED_RESULT int wc_AesDecryptDirect( |
3264 | | Aes* aes, byte* out, const byte* in) |
3265 | | { |
3266 | | int ret; |
3267 | | if (haveAESNI && aes->use_aesni) |
3268 | | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
3269 | | ret = wc_AesDecrypt(aes, in, out); |
3270 | | if (haveAESNI && aes->use_aesni) |
3271 | | RESTORE_VECTOR_REGISTERS(); |
3272 | | return ret; |
3273 | | } |
3274 | | #define wc_AesDecryptDirect(aes, out, in) wc_AesDecrypt(aes, in, out) |
3275 | | #endif /* HAVE_AES_DECRYPT */ |
3276 | | |
3277 | | #else |
3278 | | |
3279 | | /* Allow direct access to one block encrypt */ |
3280 | | int wc_AesEncryptDirect(Aes* aes, byte* out, const byte* in) |
3281 | 0 | { |
3282 | 0 | return wc_AesEncrypt(aes, in, out); |
3283 | 0 | } |
3284 | | #ifdef HAVE_AES_DECRYPT |
3285 | | /* Allow direct access to one block decrypt */ |
3286 | | int wc_AesDecryptDirect(Aes* aes, byte* out, const byte* in) |
3287 | 0 | { |
3288 | 0 | return wc_AesDecrypt(aes, in, out); |
3289 | 0 | } |
3290 | | #endif /* HAVE_AES_DECRYPT */ |
3291 | | #endif /* AES direct block */ |
3292 | | #endif /* WOLFSSL_AES_DIRECT */ |
3293 | | |
3294 | | |
3295 | | /* AES-CBC */ |
3296 | | #ifdef HAVE_AES_CBC |
3297 | | #if defined(STM32_CRYPTO) |
3298 | | |
3299 | | #ifdef WOLFSSL_STM32_CUBEMX |
3300 | | int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
3301 | | { |
3302 | | int ret = 0; |
3303 | | CRYP_HandleTypeDef hcryp; |
3304 | | word32 blocks = (sz / AES_BLOCK_SIZE); |
3305 | | |
3306 | | #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS |
3307 | | if (sz % AES_BLOCK_SIZE) { |
3308 | | return BAD_LENGTH_E; |
3309 | | } |
3310 | | #endif |
3311 | | if (blocks == 0) |
3312 | | return 0; |
3313 | | |
3314 | | ret = wc_Stm32_Aes_Init(aes, &hcryp); |
3315 | | if (ret != 0) |
3316 | | return ret; |
3317 | | |
3318 | | ret = wolfSSL_CryptHwMutexLock(); |
3319 | | if (ret != 0) { |
3320 | | return ret; |
3321 | | } |
3322 | | |
3323 | | #if defined(STM32_HAL_V2) |
3324 | | hcryp.Init.Algorithm = CRYP_AES_CBC; |
3325 | | ByteReverseWords(aes->reg, aes->reg, AES_BLOCK_SIZE); |
3326 | | #elif defined(STM32_CRYPTO_AES_ONLY) |
3327 | | hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT; |
3328 | | hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_CBC; |
3329 | | hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE; |
3330 | | #endif |
3331 | | hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg; |
3332 | | HAL_CRYP_Init(&hcryp); |
3333 | | |
3334 | | #if defined(STM32_HAL_V2) |
3335 | | ret = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in, blocks * AES_BLOCK_SIZE, |
3336 | | (uint32_t*)out, STM32_HAL_TIMEOUT); |
3337 | | #elif defined(STM32_CRYPTO_AES_ONLY) |
3338 | | ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)in, blocks * AES_BLOCK_SIZE, |
3339 | | out, STM32_HAL_TIMEOUT); |
3340 | | #else |
3341 | | ret = HAL_CRYP_AESCBC_Encrypt(&hcryp, (uint8_t*)in, |
3342 | | blocks * AES_BLOCK_SIZE, |
3343 | | out, STM32_HAL_TIMEOUT); |
3344 | | #endif |
3345 | | if (ret != HAL_OK) { |
3346 | | ret = WC_TIMEOUT_E; |
3347 | | } |
3348 | | |
3349 | | /* store iv for next call */ |
3350 | | XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE); |
3351 | | |
3352 | | HAL_CRYP_DeInit(&hcryp); |
3353 | | |
3354 | | wolfSSL_CryptHwMutexUnLock(); |
3355 | | |
3356 | | return ret; |
3357 | | } |
3358 | | #ifdef HAVE_AES_DECRYPT |
3359 | | int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
3360 | | { |
3361 | | int ret = 0; |
3362 | | CRYP_HandleTypeDef hcryp; |
3363 | | word32 blocks = (sz / AES_BLOCK_SIZE); |
3364 | | |
3365 | | #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS |
3366 | | if (sz % AES_BLOCK_SIZE) { |
3367 | | return BAD_LENGTH_E; |
3368 | | } |
3369 | | #endif |
3370 | | if (blocks == 0) |
3371 | | return 0; |
3372 | | |
3373 | | ret = wc_Stm32_Aes_Init(aes, &hcryp); |
3374 | | if (ret != 0) |
3375 | | return ret; |
3376 | | |
3377 | | ret = wolfSSL_CryptHwMutexLock(); |
3378 | | if (ret != 0) { |
3379 | | return ret; |
3380 | | } |
3381 | | |
3382 | | /* if input and output same will overwrite input iv */ |
3383 | | XMEMCPY(aes->tmp, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE); |
3384 | | |
3385 | | #if defined(STM32_HAL_V2) |
3386 | | hcryp.Init.Algorithm = CRYP_AES_CBC; |
3387 | | ByteReverseWords(aes->reg, aes->reg, AES_BLOCK_SIZE); |
3388 | | #elif defined(STM32_CRYPTO_AES_ONLY) |
3389 | | hcryp.Init.OperatingMode = CRYP_ALGOMODE_KEYDERIVATION_DECRYPT; |
3390 | | hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_CBC; |
3391 | | hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE; |
3392 | | #endif |
3393 | | |
3394 | | hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg; |
3395 | | HAL_CRYP_Init(&hcryp); |
3396 | | |
3397 | | #if defined(STM32_HAL_V2) |
3398 | | ret = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)in, blocks * AES_BLOCK_SIZE, |
3399 | | (uint32_t*)out, STM32_HAL_TIMEOUT); |
3400 | | #elif defined(STM32_CRYPTO_AES_ONLY) |
3401 | | ret = HAL_CRYPEx_AES(&hcryp, (uint8_t*)in, blocks * AES_BLOCK_SIZE, |
3402 | | out, STM32_HAL_TIMEOUT); |
3403 | | #else |
3404 | | ret = HAL_CRYP_AESCBC_Decrypt(&hcryp, (uint8_t*)in, |
3405 | | blocks * AES_BLOCK_SIZE, |
3406 | | out, STM32_HAL_TIMEOUT); |
3407 | | #endif |
3408 | | if (ret != HAL_OK) { |
3409 | | ret = WC_TIMEOUT_E; |
3410 | | } |
3411 | | |
3412 | | /* store iv for next call */ |
3413 | | XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE); |
3414 | | |
3415 | | HAL_CRYP_DeInit(&hcryp); |
3416 | | wolfSSL_CryptHwMutexUnLock(); |
3417 | | |
3418 | | return ret; |
3419 | | } |
3420 | | #endif /* HAVE_AES_DECRYPT */ |
3421 | | |
3422 | | #else /* Standard Peripheral Library */ |
3423 | | int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
3424 | | { |
3425 | | int ret; |
3426 | | word32 *iv; |
3427 | | CRYP_InitTypeDef cryptInit; |
3428 | | CRYP_KeyInitTypeDef keyInit; |
3429 | | CRYP_IVInitTypeDef ivInit; |
3430 | | word32 blocks = (sz / AES_BLOCK_SIZE); |
3431 | | |
3432 | | #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS |
3433 | | if (sz % AES_BLOCK_SIZE) { |
3434 | | return BAD_LENGTH_E; |
3435 | | } |
3436 | | #endif |
3437 | | if (blocks == 0) |
3438 | | return 0; |
3439 | | |
3440 | | ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit); |
3441 | | if (ret != 0) |
3442 | | return ret; |
3443 | | |
3444 | | ret = wolfSSL_CryptHwMutexLock(); |
3445 | | if (ret != 0) { |
3446 | | return ret; |
3447 | | } |
3448 | | |
3449 | | /* reset registers to their default values */ |
3450 | | CRYP_DeInit(); |
3451 | | |
3452 | | /* set key */ |
3453 | | CRYP_KeyInit(&keyInit); |
3454 | | |
3455 | | /* set iv */ |
3456 | | iv = aes->reg; |
3457 | | CRYP_IVStructInit(&ivInit); |
3458 | | ByteReverseWords(iv, iv, AES_BLOCK_SIZE); |
3459 | | ivInit.CRYP_IV0Left = iv[0]; |
3460 | | ivInit.CRYP_IV0Right = iv[1]; |
3461 | | ivInit.CRYP_IV1Left = iv[2]; |
3462 | | ivInit.CRYP_IV1Right = iv[3]; |
3463 | | CRYP_IVInit(&ivInit); |
3464 | | |
3465 | | /* set direction and mode */ |
3466 | | cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Encrypt; |
3467 | | cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_CBC; |
3468 | | CRYP_Init(&cryptInit); |
3469 | | |
3470 | | /* enable crypto processor */ |
3471 | | CRYP_Cmd(ENABLE); |
3472 | | |
3473 | | while (blocks--) { |
3474 | | /* flush IN/OUT FIFOs */ |
3475 | | CRYP_FIFOFlush(); |
3476 | | |
3477 | | CRYP_DataIn(*(uint32_t*)&in[0]); |
3478 | | CRYP_DataIn(*(uint32_t*)&in[4]); |
3479 | | CRYP_DataIn(*(uint32_t*)&in[8]); |
3480 | | CRYP_DataIn(*(uint32_t*)&in[12]); |
3481 | | |
3482 | | /* wait until the complete message has been processed */ |
3483 | | while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {} |
3484 | | |
3485 | | *(uint32_t*)&out[0] = CRYP_DataOut(); |
3486 | | *(uint32_t*)&out[4] = CRYP_DataOut(); |
3487 | | *(uint32_t*)&out[8] = CRYP_DataOut(); |
3488 | | *(uint32_t*)&out[12] = CRYP_DataOut(); |
3489 | | |
3490 | | /* store iv for next call */ |
3491 | | XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE); |
3492 | | |
3493 | | sz -= AES_BLOCK_SIZE; |
3494 | | in += AES_BLOCK_SIZE; |
3495 | | out += AES_BLOCK_SIZE; |
3496 | | } |
3497 | | |
3498 | | /* disable crypto processor */ |
3499 | | CRYP_Cmd(DISABLE); |
3500 | | wolfSSL_CryptHwMutexUnLock(); |
3501 | | |
3502 | | return ret; |
3503 | | } |
3504 | | |
3505 | | #ifdef HAVE_AES_DECRYPT |
3506 | | int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
3507 | | { |
3508 | | int ret; |
3509 | | word32 *iv; |
3510 | | CRYP_InitTypeDef cryptInit; |
3511 | | CRYP_KeyInitTypeDef keyInit; |
3512 | | CRYP_IVInitTypeDef ivInit; |
3513 | | word32 blocks = (sz / AES_BLOCK_SIZE); |
3514 | | |
3515 | | #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS |
3516 | | if (sz % AES_BLOCK_SIZE) { |
3517 | | return BAD_LENGTH_E; |
3518 | | } |
3519 | | #endif |
3520 | | if (blocks == 0) |
3521 | | return 0; |
3522 | | |
3523 | | ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit); |
3524 | | if (ret != 0) |
3525 | | return ret; |
3526 | | |
3527 | | ret = wolfSSL_CryptHwMutexLock(); |
3528 | | if (ret != 0) { |
3529 | | return ret; |
3530 | | } |
3531 | | |
3532 | | /* if input and output same will overwrite input iv */ |
3533 | | XMEMCPY(aes->tmp, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE); |
3534 | | |
3535 | | /* reset registers to their default values */ |
3536 | | CRYP_DeInit(); |
3537 | | |
3538 | | /* set direction and key */ |
3539 | | CRYP_KeyInit(&keyInit); |
3540 | | cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt; |
3541 | | cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_Key; |
3542 | | CRYP_Init(&cryptInit); |
3543 | | |
3544 | | /* enable crypto processor */ |
3545 | | CRYP_Cmd(ENABLE); |
3546 | | |
3547 | | /* wait until key has been prepared */ |
3548 | | while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {} |
3549 | | |
3550 | | /* set direction and mode */ |
3551 | | cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Decrypt; |
3552 | | cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_CBC; |
3553 | | CRYP_Init(&cryptInit); |
3554 | | |
3555 | | /* set iv */ |
3556 | | iv = aes->reg; |
3557 | | CRYP_IVStructInit(&ivInit); |
3558 | | ByteReverseWords(iv, iv, AES_BLOCK_SIZE); |
3559 | | ivInit.CRYP_IV0Left = iv[0]; |
3560 | | ivInit.CRYP_IV0Right = iv[1]; |
3561 | | ivInit.CRYP_IV1Left = iv[2]; |
3562 | | ivInit.CRYP_IV1Right = iv[3]; |
3563 | | CRYP_IVInit(&ivInit); |
3564 | | |
3565 | | /* enable crypto processor */ |
3566 | | CRYP_Cmd(ENABLE); |
3567 | | |
3568 | | while (blocks--) { |
3569 | | /* flush IN/OUT FIFOs */ |
3570 | | CRYP_FIFOFlush(); |
3571 | | |
3572 | | CRYP_DataIn(*(uint32_t*)&in[0]); |
3573 | | CRYP_DataIn(*(uint32_t*)&in[4]); |
3574 | | CRYP_DataIn(*(uint32_t*)&in[8]); |
3575 | | CRYP_DataIn(*(uint32_t*)&in[12]); |
3576 | | |
3577 | | /* wait until the complete message has been processed */ |
3578 | | while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {} |
3579 | | |
3580 | | *(uint32_t*)&out[0] = CRYP_DataOut(); |
3581 | | *(uint32_t*)&out[4] = CRYP_DataOut(); |
3582 | | *(uint32_t*)&out[8] = CRYP_DataOut(); |
3583 | | *(uint32_t*)&out[12] = CRYP_DataOut(); |
3584 | | |
3585 | | /* store iv for next call */ |
3586 | | XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE); |
3587 | | |
3588 | | in += AES_BLOCK_SIZE; |
3589 | | out += AES_BLOCK_SIZE; |
3590 | | } |
3591 | | |
3592 | | /* disable crypto processor */ |
3593 | | CRYP_Cmd(DISABLE); |
3594 | | wolfSSL_CryptHwMutexUnLock(); |
3595 | | |
3596 | | return ret; |
3597 | | } |
3598 | | #endif /* HAVE_AES_DECRYPT */ |
3599 | | #endif /* WOLFSSL_STM32_CUBEMX */ |
3600 | | |
3601 | | #elif defined(HAVE_COLDFIRE_SEC) |
3602 | | static WARN_UNUSED_RESULT int wc_AesCbcCrypt( |
3603 | | Aes* aes, byte* po, const byte* pi, word32 sz, word32 descHeader) |
3604 | | { |
3605 | | #ifdef DEBUG_WOLFSSL |
3606 | | int i; int stat1, stat2; int ret; |
3607 | | #endif |
3608 | | |
3609 | | int size; |
3610 | | volatile int v; |
3611 | | |
3612 | | if ((pi == NULL) || (po == NULL)) |
3613 | | return BAD_FUNC_ARG; /*wrong pointer*/ |
3614 | | |
3615 | | #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS |
3616 | | if (sz % AES_BLOCK_SIZE) { |
3617 | | return BAD_LENGTH_E; |
3618 | | } |
3619 | | #endif |
3620 | | |
3621 | | wc_LockMutex(&Mutex_AesSEC); |
3622 | | |
3623 | | /* Set descriptor for SEC */ |
3624 | | secDesc->length1 = 0x0; |
3625 | | secDesc->pointer1 = NULL; |
3626 | | |
3627 | | secDesc->length2 = AES_BLOCK_SIZE; |
3628 | | secDesc->pointer2 = (byte *)secReg; /* Initial Vector */ |
3629 | | |
3630 | | switch(aes->rounds) { |
3631 | | case 10: secDesc->length3 = 16; break; |
3632 | | case 12: secDesc->length3 = 24; break; |
3633 | | case 14: secDesc->length3 = 32; break; |
3634 | | } |
3635 | | XMEMCPY(secKey, aes->key, secDesc->length3); |
3636 | | |
3637 | | secDesc->pointer3 = (byte *)secKey; |
3638 | | secDesc->pointer4 = AESBuffIn; |
3639 | | secDesc->pointer5 = AESBuffOut; |
3640 | | secDesc->length6 = 0x0; |
3641 | | secDesc->pointer6 = NULL; |
3642 | | secDesc->length7 = 0x0; |
3643 | | secDesc->pointer7 = NULL; |
3644 | | secDesc->nextDescriptorPtr = NULL; |
3645 | | |
3646 | | #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS |
3647 | | size = AES_BUFFER_SIZE; |
3648 | | #endif |
3649 | | while (sz) { |
3650 | | secDesc->header = descHeader; |
3651 | | XMEMCPY(secReg, aes->reg, AES_BLOCK_SIZE); |
3652 | | #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS |
3653 | | sz -= AES_BUFFER_SIZE; |
3654 | | #else |
3655 | | if (sz < AES_BUFFER_SIZE) { |
3656 | | size = sz; |
3657 | | sz = 0; |
3658 | | } else { |
3659 | | size = AES_BUFFER_SIZE; |
3660 | | sz -= AES_BUFFER_SIZE; |
3661 | | } |
3662 | | #endif |
3663 | | |
3664 | | secDesc->length4 = size; |
3665 | | secDesc->length5 = size; |
3666 | | |
3667 | | XMEMCPY(AESBuffIn, pi, size); |
3668 | | if(descHeader == SEC_DESC_AES_CBC_DECRYPT) { |
3669 | | XMEMCPY((void*)aes->tmp, (void*)&(pi[size-AES_BLOCK_SIZE]), |
3670 | | AES_BLOCK_SIZE); |
3671 | | } |
3672 | | |
3673 | | /* Point SEC to the location of the descriptor */ |
3674 | | MCF_SEC_FR0 = (uint32)secDesc; |
3675 | | /* Initialize SEC and wait for encryption to complete */ |
3676 | | MCF_SEC_CCCR0 = 0x0000001a; |
3677 | | /* poll SISR to determine when channel is complete */ |
3678 | | v=0; |
3679 | | |
3680 | | while ((secDesc->header>> 24) != 0xff) v++; |
3681 | | |
3682 | | #ifdef DEBUG_WOLFSSL |
3683 | | ret = MCF_SEC_SISRH; |
3684 | | stat1 = MCF_SEC_AESSR; |
3685 | | stat2 = MCF_SEC_AESISR; |
3686 | | if (ret & 0xe0000000) { |
3687 | | db_printf("Aes_Cbc(i=%d):ISRH=%08x, AESSR=%08x, " |
3688 | | "AESISR=%08x\n", i, ret, stat1, stat2); |
3689 | | } |
3690 | | #endif |
3691 | | |
3692 | | XMEMCPY(po, AESBuffOut, size); |
3693 | | |
3694 | | if (descHeader == SEC_DESC_AES_CBC_ENCRYPT) { |
3695 | | XMEMCPY((void*)aes->reg, (void*)&(po[size-AES_BLOCK_SIZE]), |
3696 | | AES_BLOCK_SIZE); |
3697 | | } else { |
3698 | | XMEMCPY((void*)aes->reg, (void*)aes->tmp, AES_BLOCK_SIZE); |
3699 | | } |
3700 | | |
3701 | | pi += size; |
3702 | | po += size; |
3703 | | } |
3704 | | |
3705 | | wc_UnLockMutex(&Mutex_AesSEC); |
3706 | | return 0; |
3707 | | } |
3708 | | |
3709 | | int wc_AesCbcEncrypt(Aes* aes, byte* po, const byte* pi, word32 sz) |
3710 | | { |
3711 | | return (wc_AesCbcCrypt(aes, po, pi, sz, SEC_DESC_AES_CBC_ENCRYPT)); |
3712 | | } |
3713 | | |
3714 | | #ifdef HAVE_AES_DECRYPT |
3715 | | int wc_AesCbcDecrypt(Aes* aes, byte* po, const byte* pi, word32 sz) |
3716 | | { |
3717 | | return (wc_AesCbcCrypt(aes, po, pi, sz, SEC_DESC_AES_CBC_DECRYPT)); |
3718 | | } |
3719 | | #endif /* HAVE_AES_DECRYPT */ |
3720 | | |
3721 | | #elif defined(FREESCALE_LTC) |
3722 | | int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
3723 | | { |
3724 | | word32 keySize; |
3725 | | status_t status; |
3726 | | byte *iv, *enc_key; |
3727 | | word32 blocks = (sz / AES_BLOCK_SIZE); |
3728 | | |
3729 | | #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS |
3730 | | if (sz % AES_BLOCK_SIZE) { |
3731 | | return BAD_LENGTH_E; |
3732 | | } |
3733 | | #endif |
3734 | | if (blocks == 0) |
3735 | | return 0; |
3736 | | |
3737 | | iv = (byte*)aes->reg; |
3738 | | enc_key = (byte*)aes->key; |
3739 | | |
3740 | | status = wc_AesGetKeySize(aes, &keySize); |
3741 | | if (status != 0) { |
3742 | | return status; |
3743 | | } |
3744 | | |
3745 | | status = wolfSSL_CryptHwMutexLock(); |
3746 | | if (status != 0) |
3747 | | return status; |
3748 | | status = LTC_AES_EncryptCbc(LTC_BASE, in, out, blocks * AES_BLOCK_SIZE, |
3749 | | iv, enc_key, keySize); |
3750 | | wolfSSL_CryptHwMutexUnLock(); |
3751 | | |
3752 | | /* store iv for next call */ |
3753 | | if (status == kStatus_Success) { |
3754 | | XMEMCPY(iv, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE); |
3755 | | } |
3756 | | |
3757 | | return (status == kStatus_Success) ? 0 : -1; |
3758 | | } |
3759 | | |
3760 | | #ifdef HAVE_AES_DECRYPT |
3761 | | int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
3762 | | { |
3763 | | word32 keySize; |
3764 | | status_t status; |
3765 | | byte* iv, *dec_key; |
3766 | | byte temp_block[AES_BLOCK_SIZE]; |
3767 | | word32 blocks = (sz / AES_BLOCK_SIZE); |
3768 | | |
3769 | | #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS |
3770 | | if (sz % AES_BLOCK_SIZE) { |
3771 | | return BAD_LENGTH_E; |
3772 | | } |
3773 | | #endif |
3774 | | if (blocks == 0) |
3775 | | return 0; |
3776 | | |
3777 | | iv = (byte*)aes->reg; |
3778 | | dec_key = (byte*)aes->key; |
3779 | | |
3780 | | status = wc_AesGetKeySize(aes, &keySize); |
3781 | | if (status != 0) { |
3782 | | return status; |
3783 | | } |
3784 | | |
3785 | | /* get IV for next call */ |
3786 | | XMEMCPY(temp_block, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE); |
3787 | | |
3788 | | status = wolfSSL_CryptHwMutexLock(); |
3789 | | if (status != 0) |
3790 | | return status; |
3791 | | status = LTC_AES_DecryptCbc(LTC_BASE, in, out, blocks * AES_BLOCK_SIZE, |
3792 | | iv, dec_key, keySize, kLTC_EncryptKey); |
3793 | | wolfSSL_CryptHwMutexUnLock(); |
3794 | | |
3795 | | /* store IV for next call */ |
3796 | | if (status == kStatus_Success) { |
3797 | | XMEMCPY(iv, temp_block, AES_BLOCK_SIZE); |
3798 | | } |
3799 | | |
3800 | | return (status == kStatus_Success) ? 0 : -1; |
3801 | | } |
3802 | | #endif /* HAVE_AES_DECRYPT */ |
3803 | | |
3804 | | #elif defined(FREESCALE_MMCAU) |
3805 | | int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
3806 | | { |
3807 | | int i; |
3808 | | int offset = 0; |
3809 | | byte *iv; |
3810 | | byte temp_block[AES_BLOCK_SIZE]; |
3811 | | word32 blocks = (sz / AES_BLOCK_SIZE); |
3812 | | int ret; |
3813 | | |
3814 | | #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS |
3815 | | if (sz % AES_BLOCK_SIZE) { |
3816 | | return BAD_LENGTH_E; |
3817 | | } |
3818 | | #endif |
3819 | | if (blocks == 0) |
3820 | | return 0; |
3821 | | |
3822 | | iv = (byte*)aes->reg; |
3823 | | |
3824 | | while (blocks--) { |
3825 | | XMEMCPY(temp_block, in + offset, AES_BLOCK_SIZE); |
3826 | | |
3827 | | /* XOR block with IV for CBC */ |
3828 | | for (i = 0; i < AES_BLOCK_SIZE; i++) |
3829 | | temp_block[i] ^= iv[i]; |
3830 | | |
3831 | | ret = wc_AesEncrypt(aes, temp_block, out + offset); |
3832 | | if (ret != 0) |
3833 | | return ret; |
3834 | | |
3835 | | offset += AES_BLOCK_SIZE; |
3836 | | |
3837 | | /* store IV for next block */ |
3838 | | XMEMCPY(iv, out + offset - AES_BLOCK_SIZE, AES_BLOCK_SIZE); |
3839 | | } |
3840 | | |
3841 | | return 0; |
3842 | | } |
3843 | | #ifdef HAVE_AES_DECRYPT |
3844 | | int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
3845 | | { |
3846 | | int i; |
3847 | | int offset = 0; |
3848 | | byte* iv; |
3849 | | byte temp_block[AES_BLOCK_SIZE]; |
3850 | | word32 blocks = (sz / AES_BLOCK_SIZE); |
3851 | | |
3852 | | #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS |
3853 | | if (sz % AES_BLOCK_SIZE) { |
3854 | | return BAD_LENGTH_E; |
3855 | | } |
3856 | | #endif |
3857 | | if (blocks == 0) |
3858 | | return 0; |
3859 | | |
3860 | | iv = (byte*)aes->reg; |
3861 | | |
3862 | | while (blocks--) { |
3863 | | XMEMCPY(temp_block, in + offset, AES_BLOCK_SIZE); |
3864 | | |
3865 | | wc_AesDecrypt(aes, in + offset, out + offset); |
3866 | | |
3867 | | /* XOR block with IV for CBC */ |
3868 | | for (i = 0; i < AES_BLOCK_SIZE; i++) |
3869 | | (out + offset)[i] ^= iv[i]; |
3870 | | |
3871 | | /* store IV for next block */ |
3872 | | XMEMCPY(iv, temp_block, AES_BLOCK_SIZE); |
3873 | | |
3874 | | offset += AES_BLOCK_SIZE; |
3875 | | } |
3876 | | |
3877 | | return 0; |
3878 | | } |
3879 | | #endif /* HAVE_AES_DECRYPT */ |
3880 | | |
3881 | | #elif defined(WOLFSSL_PIC32MZ_CRYPT) |
3882 | | |
3883 | | int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
3884 | | { |
3885 | | int ret; |
3886 | | |
3887 | | if (sz == 0) |
3888 | | return 0; |
3889 | | |
3890 | | /* hardware fails on input that is not a multiple of AES block size */ |
3891 | | if (sz % AES_BLOCK_SIZE != 0) { |
3892 | | #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS |
3893 | | return BAD_LENGTH_E; |
3894 | | #else |
3895 | | return BAD_FUNC_ARG; |
3896 | | #endif |
3897 | | } |
3898 | | |
3899 | | ret = wc_Pic32AesCrypt( |
3900 | | aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE, |
3901 | | out, in, sz, PIC32_ENCRYPTION, |
3902 | | PIC32_ALGO_AES, PIC32_CRYPTOALGO_RCBC); |
3903 | | |
3904 | | /* store iv for next call */ |
3905 | | if (ret == 0) { |
3906 | | XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE); |
3907 | | } |
3908 | | |
3909 | | return ret; |
3910 | | } |
3911 | | #ifdef HAVE_AES_DECRYPT |
3912 | | int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
3913 | | { |
3914 | | int ret; |
3915 | | byte scratch[AES_BLOCK_SIZE]; |
3916 | | |
3917 | | if (sz == 0) |
3918 | | return 0; |
3919 | | |
3920 | | /* hardware fails on input that is not a multiple of AES block size */ |
3921 | | if (sz % AES_BLOCK_SIZE != 0) { |
3922 | | #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS |
3923 | | return BAD_LENGTH_E; |
3924 | | #else |
3925 | | return BAD_FUNC_ARG; |
3926 | | #endif |
3927 | | } |
3928 | | XMEMCPY(scratch, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE); |
3929 | | |
3930 | | ret = wc_Pic32AesCrypt( |
3931 | | aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE, |
3932 | | out, in, sz, PIC32_DECRYPTION, |
3933 | | PIC32_ALGO_AES, PIC32_CRYPTOALGO_RCBC); |
3934 | | |
3935 | | /* store iv for next call */ |
3936 | | if (ret == 0) { |
3937 | | XMEMCPY((byte*)aes->reg, scratch, AES_BLOCK_SIZE); |
3938 | | } |
3939 | | |
3940 | | return ret; |
3941 | | } |
3942 | | #endif /* HAVE_AES_DECRYPT */ |
3943 | | #elif defined(WOLFSSL_ESP32WROOM32_CRYPT) && \ |
3944 | | !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_AES) |
3945 | | |
3946 | | int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
3947 | | { |
3948 | | return wc_esp32AesCbcEncrypt(aes, out, in, sz); |
3949 | | } |
3950 | | int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
3951 | | { |
3952 | | return wc_esp32AesCbcDecrypt(aes, out, in, sz); |
3953 | | } |
3954 | | #elif defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES) |
3955 | | int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
3956 | | { |
3957 | | return SaSi_AesBlock(&aes->ctx.user_ctx, (uint8_t*)in, sz, out); |
3958 | | } |
3959 | | int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
3960 | | { |
3961 | | return SaSi_AesBlock(&aes->ctx.user_ctx, (uint8_t*)in, sz, out); |
3962 | | } |
3963 | | #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \ |
3964 | | !defined(WOLFSSL_QNX_CAAM) |
3965 | | /* implemented in wolfcrypt/src/port/caam/caam_aes.c */ |
3966 | | |
3967 | | #elif defined(WOLFSSL_AFALG) |
3968 | | /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */ |
3969 | | |
3970 | | #elif defined(WOLFSSL_KCAPI_AES) && !defined(WOLFSSL_NO_KCAPI_AES_CBC) |
3971 | | /* implemented in wolfcrypt/src/port/kcapi/kcapi_aes.c */ |
3972 | | |
3973 | | #elif defined(WOLFSSL_DEVCRYPTO_CBC) |
3974 | | /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */ |
3975 | | |
3976 | | #elif defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT) |
3977 | | int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
3978 | | { |
3979 | | return se050_aes_crypt(aes, in, out, sz, AES_ENCRYPTION, |
3980 | | kAlgorithm_SSS_AES_CBC); |
3981 | | } |
3982 | | int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
3983 | | { |
3984 | | return se050_aes_crypt(aes, in, out, sz, AES_DECRYPTION, |
3985 | | kAlgorithm_SSS_AES_CBC); |
3986 | | } |
3987 | | |
3988 | | #elif defined(WOLFSSL_SILABS_SE_ACCEL) |
3989 | | /* implemented in wolfcrypt/src/port/silabs/silabs_hash.c */ |
3990 | | |
3991 | | #elif defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES) |
3992 | | /* implemented in wolfcrypt/src/port/psa/psa_aes.c */ |
3993 | | |
3994 | | #else |
3995 | | |
3996 | | /* Software AES - CBC Encrypt */ |
3997 | | int wc_AesCbcEncrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
3998 | 74 | { |
3999 | 74 | word32 blocks; |
4000 | | |
4001 | 74 | if (aes == NULL || out == NULL || in == NULL) { |
4002 | 0 | return BAD_FUNC_ARG; |
4003 | 0 | } |
4004 | | |
4005 | 74 | if (sz == 0) { |
4006 | 0 | return 0; |
4007 | 0 | } |
4008 | | |
4009 | 74 | blocks = sz / AES_BLOCK_SIZE; |
4010 | | #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS |
4011 | | if (sz % AES_BLOCK_SIZE) { |
4012 | | WOLFSSL_ERROR_VERBOSE(BAD_LENGTH_E); |
4013 | | return BAD_LENGTH_E; |
4014 | | } |
4015 | | #endif |
4016 | | |
4017 | | #ifdef WOLFSSL_IMXRT_DCP |
4018 | | /* Implemented in wolfcrypt/src/port/nxp/dcp_port.c */ |
4019 | | if (aes->keylen == 16) |
4020 | | return DCPAesCbcEncrypt(aes, out, in, sz); |
4021 | | #endif |
4022 | | |
4023 | 74 | #ifdef WOLF_CRYPTO_CB |
4024 | 74 | if (aes->devId != INVALID_DEVID) { |
4025 | 0 | int crypto_cb_ret = wc_CryptoCb_AesCbcEncrypt(aes, out, in, sz); |
4026 | 0 | if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE) |
4027 | 0 | return crypto_cb_ret; |
4028 | | /* fall-through when unavailable */ |
4029 | 0 | } |
4030 | 74 | #endif |
4031 | | #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES) |
4032 | | /* if async and byte count above threshold */ |
4033 | | if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES && |
4034 | | sz >= WC_ASYNC_THRESH_AES_CBC) { |
4035 | | #if defined(HAVE_CAVIUM) |
4036 | | return NitroxAesCbcEncrypt(aes, out, in, sz); |
4037 | | #elif defined(HAVE_INTEL_QA) |
4038 | | return IntelQaSymAesCbcEncrypt(&aes->asyncDev, out, in, sz, |
4039 | | (const byte*)aes->devKey, aes->keylen, |
4040 | | (byte*)aes->reg, AES_BLOCK_SIZE); |
4041 | | #else /* WOLFSSL_ASYNC_CRYPT_TEST */ |
4042 | | if (wc_AsyncTestInit(&aes->asyncDev, ASYNC_TEST_AES_CBC_ENCRYPT)) { |
4043 | | WC_ASYNC_TEST* testDev = &aes->asyncDev.test; |
4044 | | testDev->aes.aes = aes; |
4045 | | testDev->aes.out = out; |
4046 | | testDev->aes.in = in; |
4047 | | testDev->aes.sz = sz; |
4048 | | return WC_PENDING_E; |
4049 | | } |
4050 | | #endif |
4051 | | } |
4052 | | #endif /* WOLFSSL_ASYNC_CRYPT */ |
4053 | | |
4054 | | #ifdef WOLFSSL_AESNI |
4055 | | if (haveAESNI) { |
4056 | | #ifdef DEBUG_AESNI |
4057 | | printf("about to aes cbc encrypt\n"); |
4058 | | printf("in = %p\n", in); |
4059 | | printf("out = %p\n", out); |
4060 | | printf("aes->key = %p\n", aes->key); |
4061 | | printf("aes->reg = %p\n", aes->reg); |
4062 | | printf("aes->rounds = %d\n", aes->rounds); |
4063 | | printf("sz = %d\n", sz); |
4064 | | #endif |
4065 | | |
4066 | | /* check alignment, decrypt doesn't need alignment */ |
4067 | | if ((wc_ptr_t)in % AESNI_ALIGN) { |
4068 | | #ifndef NO_WOLFSSL_ALLOC_ALIGN |
4069 | | byte* tmp = (byte*)XMALLOC(sz + AES_BLOCK_SIZE + AESNI_ALIGN, |
4070 | | aes->heap, DYNAMIC_TYPE_TMP_BUFFER); |
4071 | | byte* tmp_align; |
4072 | | if (tmp == NULL) return MEMORY_E; |
4073 | | |
4074 | | tmp_align = tmp + (AESNI_ALIGN - ((wc_ptr_t)tmp % AESNI_ALIGN)); |
4075 | | XMEMCPY(tmp_align, in, sz); |
4076 | | SAVE_VECTOR_REGISTERS(XFREE(tmp, aes->heap, DYNAMIC_TYPE_TMP_BUFFER); return _svr_ret;); |
4077 | | AES_CBC_encrypt(tmp_align, tmp_align, (byte*)aes->reg, sz, |
4078 | | (byte*)aes->key, aes->rounds); |
4079 | | RESTORE_VECTOR_REGISTERS(); |
4080 | | /* store iv for next call */ |
4081 | | XMEMCPY(aes->reg, tmp_align + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE); |
4082 | | |
4083 | | XMEMCPY(out, tmp_align, sz); |
4084 | | XFREE(tmp, aes->heap, DYNAMIC_TYPE_TMP_BUFFER); |
4085 | | return 0; |
4086 | | #else |
4087 | | WOLFSSL_MSG("AES-CBC encrypt with bad alignment"); |
4088 | | WOLFSSL_ERROR_VERBOSE(BAD_ALIGN_E); |
4089 | | return BAD_ALIGN_E; |
4090 | | #endif |
4091 | | } |
4092 | | |
4093 | | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
4094 | | AES_CBC_encrypt(in, out, (byte*)aes->reg, sz, (byte*)aes->key, |
4095 | | aes->rounds); |
4096 | | RESTORE_VECTOR_REGISTERS(); |
4097 | | /* store iv for next call */ |
4098 | | XMEMCPY(aes->reg, out + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE); |
4099 | | |
4100 | | return 0; |
4101 | | } |
4102 | | #endif |
4103 | | |
4104 | 377 | while (blocks--) { |
4105 | 303 | int ret; |
4106 | 303 | xorbuf((byte*)aes->reg, in, AES_BLOCK_SIZE); |
4107 | 303 | ret = wc_AesEncrypt(aes, (byte*)aes->reg, (byte*)aes->reg); |
4108 | 303 | if (ret != 0) |
4109 | 0 | return ret; |
4110 | 303 | XMEMCPY(out, aes->reg, AES_BLOCK_SIZE); |
4111 | | |
4112 | 303 | out += AES_BLOCK_SIZE; |
4113 | 303 | in += AES_BLOCK_SIZE; |
4114 | 303 | } |
4115 | | |
4116 | 74 | return 0; |
4117 | 74 | } |
4118 | | |
4119 | | #ifdef HAVE_AES_DECRYPT |
4120 | | /* Software AES - CBC Decrypt */ |
4121 | | int wc_AesCbcDecrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
4122 | 2 | { |
4123 | 2 | word32 blocks; |
4124 | | |
4125 | 2 | if (aes == NULL || out == NULL || in == NULL) { |
4126 | 0 | return BAD_FUNC_ARG; |
4127 | 0 | } |
4128 | | |
4129 | 2 | if (sz == 0) { |
4130 | 0 | return 0; |
4131 | 0 | } |
4132 | | |
4133 | 2 | blocks = sz / AES_BLOCK_SIZE; |
4134 | 2 | if (sz % AES_BLOCK_SIZE) { |
4135 | | #ifdef WOLFSSL_AES_CBC_LENGTH_CHECKS |
4136 | | return BAD_LENGTH_E; |
4137 | | #else |
4138 | 0 | return BAD_FUNC_ARG; |
4139 | 0 | #endif |
4140 | 0 | } |
4141 | | |
4142 | | #ifdef WOLFSSL_IMXRT_DCP |
4143 | | /* Implemented in wolfcrypt/src/port/nxp/dcp_port.c */ |
4144 | | if (aes->keylen == 16) |
4145 | | return DCPAesCbcDecrypt(aes, out, in, sz); |
4146 | | #endif |
4147 | | |
4148 | 2 | #ifdef WOLF_CRYPTO_CB |
4149 | 2 | if (aes->devId != INVALID_DEVID) { |
4150 | 0 | int crypto_cb_ret = wc_CryptoCb_AesCbcDecrypt(aes, out, in, sz); |
4151 | 0 | if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE) |
4152 | 0 | return crypto_cb_ret; |
4153 | | /* fall-through when unavailable */ |
4154 | 0 | } |
4155 | 2 | #endif |
4156 | | #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES) |
4157 | | /* if async and byte count above threshold */ |
4158 | | if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES && |
4159 | | sz >= WC_ASYNC_THRESH_AES_CBC) { |
4160 | | #if defined(HAVE_CAVIUM) |
4161 | | return NitroxAesCbcDecrypt(aes, out, in, sz); |
4162 | | #elif defined(HAVE_INTEL_QA) |
4163 | | return IntelQaSymAesCbcDecrypt(&aes->asyncDev, out, in, sz, |
4164 | | (const byte*)aes->devKey, aes->keylen, |
4165 | | (byte*)aes->reg, AES_BLOCK_SIZE); |
4166 | | #else /* WOLFSSL_ASYNC_CRYPT_TEST */ |
4167 | | if (wc_AsyncTestInit(&aes->asyncDev, ASYNC_TEST_AES_CBC_DECRYPT)) { |
4168 | | WC_ASYNC_TEST* testDev = &aes->asyncDev.test; |
4169 | | testDev->aes.aes = aes; |
4170 | | testDev->aes.out = out; |
4171 | | testDev->aes.in = in; |
4172 | | testDev->aes.sz = sz; |
4173 | | return WC_PENDING_E; |
4174 | | } |
4175 | | #endif |
4176 | | } |
4177 | | #endif |
4178 | | |
4179 | | #ifdef WOLFSSL_AESNI |
4180 | | if (haveAESNI) { |
4181 | | #ifdef DEBUG_AESNI |
4182 | | printf("about to aes cbc decrypt\n"); |
4183 | | printf("in = %p\n", in); |
4184 | | printf("out = %p\n", out); |
4185 | | printf("aes->key = %p\n", aes->key); |
4186 | | printf("aes->reg = %p\n", aes->reg); |
4187 | | printf("aes->rounds = %d\n", aes->rounds); |
4188 | | printf("sz = %d\n", sz); |
4189 | | #endif |
4190 | | |
4191 | | /* if input and output same will overwrite input iv */ |
4192 | | XMEMCPY(aes->tmp, in + sz - AES_BLOCK_SIZE, AES_BLOCK_SIZE); |
4193 | | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
4194 | | #if defined(WOLFSSL_AESNI_BY4) |
4195 | | AES_CBC_decrypt_by4(in, out, (byte*)aes->reg, sz, (byte*)aes->key, |
4196 | | aes->rounds); |
4197 | | #elif defined(WOLFSSL_AESNI_BY6) |
4198 | | AES_CBC_decrypt_by6(in, out, (byte*)aes->reg, sz, (byte*)aes->key, |
4199 | | aes->rounds); |
4200 | | #else /* WOLFSSL_AESNI_BYx */ |
4201 | | AES_CBC_decrypt_by8(in, out, (byte*)aes->reg, sz, (byte*)aes->key, |
4202 | | aes->rounds); |
4203 | | #endif /* WOLFSSL_AESNI_BYx */ |
4204 | | /* store iv for next call */ |
4205 | | RESTORE_VECTOR_REGISTERS(); |
4206 | | XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE); |
4207 | | return 0; |
4208 | | } |
4209 | | #endif |
4210 | | |
4211 | 44 | while (blocks--) { |
4212 | 42 | int ret; |
4213 | 42 | XMEMCPY(aes->tmp, in, AES_BLOCK_SIZE); |
4214 | 42 | ret = wc_AesDecrypt(aes, (byte*)aes->tmp, out); |
4215 | 42 | if (ret != 0) |
4216 | 0 | return ret; |
4217 | 42 | xorbuf(out, (byte*)aes->reg, AES_BLOCK_SIZE); |
4218 | | /* store iv for next call */ |
4219 | 42 | XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE); |
4220 | | |
4221 | 42 | out += AES_BLOCK_SIZE; |
4222 | 42 | in += AES_BLOCK_SIZE; |
4223 | 42 | } |
4224 | | |
4225 | 2 | return 0; |
4226 | 2 | } |
4227 | | #endif |
4228 | | |
4229 | | #endif /* AES-CBC block */ |
4230 | | #endif /* HAVE_AES_CBC */ |
4231 | | |
4232 | | /* AES-CTR */ |
4233 | | #if defined(WOLFSSL_AES_COUNTER) |
4234 | | |
4235 | | #ifdef STM32_CRYPTO |
4236 | | #define NEED_AES_CTR_SOFT |
4237 | | #define XTRANSFORM_AESCTRBLOCK wc_AesCtrEncryptBlock |
4238 | | |
4239 | | int wc_AesCtrEncryptBlock(Aes* aes, byte* out, const byte* in) |
4240 | | { |
4241 | | int ret = 0; |
4242 | | #ifdef WOLFSSL_STM32_CUBEMX |
4243 | | CRYP_HandleTypeDef hcryp; |
4244 | | #ifdef STM32_HAL_V2 |
4245 | | word32 iv[AES_BLOCK_SIZE/sizeof(word32)]; |
4246 | | #endif |
4247 | | #else |
4248 | | word32 *iv; |
4249 | | CRYP_InitTypeDef cryptInit; |
4250 | | CRYP_KeyInitTypeDef keyInit; |
4251 | | CRYP_IVInitTypeDef ivInit; |
4252 | | #endif |
4253 | | |
4254 | | #ifdef WOLFSSL_STM32_CUBEMX |
4255 | | ret = wc_Stm32_Aes_Init(aes, &hcryp); |
4256 | | if (ret != 0) { |
4257 | | return ret; |
4258 | | } |
4259 | | |
4260 | | ret = wolfSSL_CryptHwMutexLock(); |
4261 | | if (ret != 0) { |
4262 | | return ret; |
4263 | | } |
4264 | | |
4265 | | #if defined(STM32_HAL_V2) |
4266 | | hcryp.Init.Algorithm = CRYP_AES_CTR; |
4267 | | ByteReverseWords(iv, aes->reg, AES_BLOCK_SIZE); |
4268 | | hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)iv; |
4269 | | #elif defined(STM32_CRYPTO_AES_ONLY) |
4270 | | hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT; |
4271 | | hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_CTR; |
4272 | | hcryp.Init.KeyWriteFlag = CRYP_KEY_WRITE_ENABLE; |
4273 | | hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg; |
4274 | | #else |
4275 | | hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)aes->reg; |
4276 | | #endif |
4277 | | HAL_CRYP_Init(&hcryp); |
4278 | | |
4279 | | #if defined(STM32_HAL_V2) |
4280 | | ret = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in, AES_BLOCK_SIZE, |
4281 | | (uint32_t*)out, STM32_HAL_TIMEOUT); |
4282 | | #elif defined(STM32_CRYPTO_AES_ONLY) |
4283 | | ret = HAL_CRYPEx_AES(&hcryp, (byte*)in, AES_BLOCK_SIZE, |
4284 | | out, STM32_HAL_TIMEOUT); |
4285 | | #else |
4286 | | ret = HAL_CRYP_AESCTR_Encrypt(&hcryp, (byte*)in, AES_BLOCK_SIZE, |
4287 | | out, STM32_HAL_TIMEOUT); |
4288 | | #endif |
4289 | | if (ret != HAL_OK) { |
4290 | | ret = WC_TIMEOUT_E; |
4291 | | } |
4292 | | HAL_CRYP_DeInit(&hcryp); |
4293 | | |
4294 | | #else /* Standard Peripheral Library */ |
4295 | | ret = wc_Stm32_Aes_Init(aes, &cryptInit, &keyInit); |
4296 | | if (ret != 0) { |
4297 | | return ret; |
4298 | | } |
4299 | | |
4300 | | ret = wolfSSL_CryptHwMutexLock(); |
4301 | | if (ret != 0) { |
4302 | | return ret; |
4303 | | } |
4304 | | |
4305 | | /* reset registers to their default values */ |
4306 | | CRYP_DeInit(); |
4307 | | |
4308 | | /* set key */ |
4309 | | CRYP_KeyInit(&keyInit); |
4310 | | |
4311 | | /* set iv */ |
4312 | | iv = aes->reg; |
4313 | | CRYP_IVStructInit(&ivInit); |
4314 | | ivInit.CRYP_IV0Left = ByteReverseWord32(iv[0]); |
4315 | | ivInit.CRYP_IV0Right = ByteReverseWord32(iv[1]); |
4316 | | ivInit.CRYP_IV1Left = ByteReverseWord32(iv[2]); |
4317 | | ivInit.CRYP_IV1Right = ByteReverseWord32(iv[3]); |
4318 | | CRYP_IVInit(&ivInit); |
4319 | | |
4320 | | /* set direction and mode */ |
4321 | | cryptInit.CRYP_AlgoDir = CRYP_AlgoDir_Encrypt; |
4322 | | cryptInit.CRYP_AlgoMode = CRYP_AlgoMode_AES_CTR; |
4323 | | CRYP_Init(&cryptInit); |
4324 | | |
4325 | | /* enable crypto processor */ |
4326 | | CRYP_Cmd(ENABLE); |
4327 | | |
4328 | | /* flush IN/OUT FIFOs */ |
4329 | | CRYP_FIFOFlush(); |
4330 | | |
4331 | | CRYP_DataIn(*(uint32_t*)&in[0]); |
4332 | | CRYP_DataIn(*(uint32_t*)&in[4]); |
4333 | | CRYP_DataIn(*(uint32_t*)&in[8]); |
4334 | | CRYP_DataIn(*(uint32_t*)&in[12]); |
4335 | | |
4336 | | /* wait until the complete message has been processed */ |
4337 | | while (CRYP_GetFlagStatus(CRYP_FLAG_BUSY) != RESET) {} |
4338 | | |
4339 | | *(uint32_t*)&out[0] = CRYP_DataOut(); |
4340 | | *(uint32_t*)&out[4] = CRYP_DataOut(); |
4341 | | *(uint32_t*)&out[8] = CRYP_DataOut(); |
4342 | | *(uint32_t*)&out[12] = CRYP_DataOut(); |
4343 | | |
4344 | | /* disable crypto processor */ |
4345 | | CRYP_Cmd(DISABLE); |
4346 | | #endif /* WOLFSSL_STM32_CUBEMX */ |
4347 | | |
4348 | | wolfSSL_CryptHwMutexUnLock(); |
4349 | | return ret; |
4350 | | } |
4351 | | |
4352 | | |
4353 | | #elif defined(WOLFSSL_PIC32MZ_CRYPT) |
4354 | | |
4355 | | #define NEED_AES_CTR_SOFT |
4356 | | #define XTRANSFORM_AESCTRBLOCK wc_AesCtrEncryptBlock |
4357 | | |
4358 | | int wc_AesCtrEncryptBlock(Aes* aes, byte* out, const byte* in) |
4359 | | { |
4360 | | word32 tmpIv[AES_BLOCK_SIZE / sizeof(word32)]; |
4361 | | XMEMCPY(tmpIv, aes->reg, AES_BLOCK_SIZE); |
4362 | | return wc_Pic32AesCrypt( |
4363 | | aes->key, aes->keylen, tmpIv, AES_BLOCK_SIZE, |
4364 | | out, in, AES_BLOCK_SIZE, |
4365 | | PIC32_ENCRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_RCTR); |
4366 | | } |
4367 | | |
4368 | | #elif defined(HAVE_COLDFIRE_SEC) |
4369 | | #error "Coldfire SEC doesn't currently support AES-CTR mode" |
4370 | | |
4371 | | #elif defined(FREESCALE_LTC) |
4372 | | int wc_AesCtrEncrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
4373 | | { |
4374 | | int ret = 0; |
4375 | | word32 keySize; |
4376 | | byte *iv, *enc_key; |
4377 | | byte* tmp; |
4378 | | |
4379 | | if (aes == NULL || out == NULL || in == NULL) { |
4380 | | return BAD_FUNC_ARG; |
4381 | | } |
4382 | | |
4383 | | /* consume any unused bytes left in aes->tmp */ |
4384 | | tmp = (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left; |
4385 | | while (aes->left && sz) { |
4386 | | *(out++) = *(in++) ^ *(tmp++); |
4387 | | aes->left--; |
4388 | | sz--; |
4389 | | } |
4390 | | |
4391 | | if (sz) { |
4392 | | iv = (byte*)aes->reg; |
4393 | | enc_key = (byte*)aes->key; |
4394 | | |
4395 | | ret = wc_AesGetKeySize(aes, &keySize); |
4396 | | if (ret != 0) |
4397 | | return ret; |
4398 | | |
4399 | | ret = wolfSSL_CryptHwMutexLock(); |
4400 | | if (ret != 0) |
4401 | | return ret; |
4402 | | LTC_AES_CryptCtr(LTC_BASE, in, out, sz, |
4403 | | iv, enc_key, keySize, (byte*)aes->tmp, |
4404 | | (uint32_t*)&aes->left); |
4405 | | wolfSSL_CryptHwMutexUnLock(); |
4406 | | } |
4407 | | |
4408 | | return ret; |
4409 | | } |
4410 | | |
4411 | | #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \ |
4412 | | !defined(WOLFSSL_QNX_CAAM) |
4413 | | /* implemented in wolfcrypt/src/port/caam/caam_aes.c */ |
4414 | | |
4415 | | #elif defined(WOLFSSL_AFALG) |
4416 | | /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */ |
4417 | | |
4418 | | #elif defined(WOLFSSL_DEVCRYPTO_AES) |
4419 | | /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */ |
4420 | | |
4421 | | #elif defined(WOLFSSL_ESP32WROOM32_CRYPT) && \ |
4422 | | !defined(NO_WOLFSSL_ESP32WROOM32_CRYPT_AES) |
4423 | | /* esp32 doesn't support CRT mode by hw. */ |
4424 | | /* use aes ecnryption plus sw implementation */ |
4425 | | #define NEED_AES_CTR_SOFT |
4426 | | |
4427 | | #elif defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES) |
4428 | | /* implemented in wolfcrypt/src/port/psa/psa_aes.c */ |
4429 | | #else |
4430 | | |
4431 | | /* Use software based AES counter */ |
4432 | | #define NEED_AES_CTR_SOFT |
4433 | | #endif |
4434 | | |
4435 | | #ifdef NEED_AES_CTR_SOFT |
4436 | | /* Increment AES counter */ |
4437 | | static WC_INLINE void IncrementAesCounter(byte* inOutCtr) |
4438 | 0 | { |
4439 | | /* in network byte order so start at end and work back */ |
4440 | 0 | int i; |
4441 | 0 | for (i = AES_BLOCK_SIZE - 1; i >= 0; i--) { |
4442 | 0 | if (++inOutCtr[i]) /* we're done unless we overflow */ |
4443 | 0 | return; |
4444 | 0 | } |
4445 | 0 | } |
4446 | | |
4447 | | /* Software AES - CTR Encrypt */ |
4448 | | int wc_AesCtrEncrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
4449 | 0 | { |
4450 | 0 | byte* tmp; |
4451 | 0 | byte scratch[AES_BLOCK_SIZE]; |
4452 | 0 | int ret; |
4453 | |
|
4454 | 0 | if (aes == NULL || out == NULL || in == NULL) { |
4455 | 0 | return BAD_FUNC_ARG; |
4456 | 0 | } |
4457 | | |
4458 | 0 | #ifdef WOLF_CRYPTO_CB |
4459 | 0 | if (aes->devId != INVALID_DEVID) { |
4460 | 0 | int crypto_cb_ret = wc_CryptoCb_AesCtrEncrypt(aes, out, in, sz); |
4461 | 0 | if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE) |
4462 | 0 | return crypto_cb_ret; |
4463 | | /* fall-through when unavailable */ |
4464 | 0 | } |
4465 | 0 | #endif |
4466 | | |
4467 | | /* consume any unused bytes left in aes->tmp */ |
4468 | 0 | tmp = (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left; |
4469 | 0 | while (aes->left && sz) { |
4470 | 0 | *(out++) = *(in++) ^ *(tmp++); |
4471 | 0 | aes->left--; |
4472 | 0 | sz--; |
4473 | 0 | } |
4474 | |
|
4475 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
4476 | | wc_MemZero_Add("wc_AesCtrEncrypt scratch", scratch, AES_BLOCK_SIZE); |
4477 | | #endif |
4478 | 0 | #if defined(HAVE_AES_ECB) && !defined(WOLFSSL_PIC32MZ_CRYPT) && \ |
4479 | 0 | !defined(XTRANSFORM_AESCTRBLOCK) |
4480 | 0 | if (in != out && sz >= AES_BLOCK_SIZE) { |
4481 | 0 | int blocks = sz / AES_BLOCK_SIZE; |
4482 | 0 | byte* counter = (byte*)aes->reg; |
4483 | 0 | byte* c = out; |
4484 | 0 | while (blocks--) { |
4485 | 0 | XMEMCPY(c, counter, AES_BLOCK_SIZE); |
4486 | 0 | c += AES_BLOCK_SIZE; |
4487 | 0 | IncrementAesCounter(counter); |
4488 | 0 | } |
4489 | | |
4490 | | /* reset number of blocks and then do encryption */ |
4491 | 0 | blocks = sz / AES_BLOCK_SIZE; |
4492 | 0 | wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks); |
4493 | 0 | xorbuf(out, in, AES_BLOCK_SIZE * blocks); |
4494 | 0 | in += AES_BLOCK_SIZE * blocks; |
4495 | 0 | out += AES_BLOCK_SIZE * blocks; |
4496 | 0 | sz -= blocks * AES_BLOCK_SIZE; |
4497 | 0 | } |
4498 | 0 | else |
4499 | 0 | #endif |
4500 | 0 | { |
4501 | | /* do as many block size ops as possible */ |
4502 | 0 | while (sz >= AES_BLOCK_SIZE) { |
4503 | | #ifdef XTRANSFORM_AESCTRBLOCK |
4504 | | XTRANSFORM_AESCTRBLOCK(aes, out, in); |
4505 | | #else |
4506 | 0 | ret = wc_AesEncrypt(aes, (byte*)aes->reg, scratch); |
4507 | 0 | if (ret != 0) { |
4508 | 0 | ForceZero(scratch, AES_BLOCK_SIZE); |
4509 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
4510 | | wc_MemZero_Check(scratch, AES_BLOCK_SIZE); |
4511 | | #endif |
4512 | 0 | return ret; |
4513 | 0 | } |
4514 | 0 | xorbuf(scratch, in, AES_BLOCK_SIZE); |
4515 | 0 | XMEMCPY(out, scratch, AES_BLOCK_SIZE); |
4516 | 0 | #endif |
4517 | 0 | IncrementAesCounter((byte*)aes->reg); |
4518 | |
|
4519 | 0 | out += AES_BLOCK_SIZE; |
4520 | 0 | in += AES_BLOCK_SIZE; |
4521 | 0 | sz -= AES_BLOCK_SIZE; |
4522 | 0 | aes->left = 0; |
4523 | 0 | } |
4524 | 0 | ForceZero(scratch, AES_BLOCK_SIZE); |
4525 | 0 | } |
4526 | | |
4527 | | /* handle non block size remaining and store unused byte count in left */ |
4528 | 0 | if (sz) { |
4529 | 0 | ret = wc_AesEncrypt(aes, (byte*)aes->reg, (byte*)aes->tmp); |
4530 | 0 | if (ret != 0) { |
4531 | 0 | ForceZero(scratch, AES_BLOCK_SIZE); |
4532 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
4533 | | wc_MemZero_Check(scratch, AES_BLOCK_SIZE); |
4534 | | #endif |
4535 | 0 | return ret; |
4536 | 0 | } |
4537 | 0 | IncrementAesCounter((byte*)aes->reg); |
4538 | |
|
4539 | 0 | aes->left = AES_BLOCK_SIZE; |
4540 | 0 | tmp = (byte*)aes->tmp; |
4541 | |
|
4542 | 0 | while (sz--) { |
4543 | 0 | *(out++) = *(in++) ^ *(tmp++); |
4544 | 0 | aes->left--; |
4545 | 0 | } |
4546 | 0 | } |
4547 | | |
4548 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
4549 | | wc_MemZero_Check(scratch, AES_BLOCK_SIZE); |
4550 | | #endif |
4551 | 0 | return 0; |
4552 | 0 | } |
4553 | | |
4554 | | #endif /* NEED_AES_CTR_SOFT */ |
4555 | | |
4556 | | #endif /* WOLFSSL_AES_COUNTER */ |
4557 | | #endif /* !WOLFSSL_ARMASM */ |
4558 | | |
4559 | | |
4560 | | /* |
4561 | | * The IV for AES GCM and CCM, stored in struct Aes's member reg, is comprised |
4562 | | * of two parts in order: |
4563 | | * 1. The fixed field which may be 0 or 4 bytes long. In TLS, this is set |
4564 | | * to the implicit IV. |
4565 | | * 2. The explicit IV is generated by wolfCrypt. It needs to be managed |
4566 | | * by wolfCrypt to ensure the IV is unique for each call to encrypt. |
4567 | | * The IV may be a 96-bit random value, or the 32-bit fixed value and a |
4568 | | * 64-bit set of 0 or random data. The final 32-bits of reg is used as a |
4569 | | * block counter during the encryption. |
4570 | | */ |
4571 | | |
4572 | | #if (defined(HAVE_AESGCM) && !defined(WC_NO_RNG)) || defined(HAVE_AESCCM) |
4573 | | static WC_INLINE void IncCtr(byte* ctr, word32 ctrSz) |
4574 | 0 | { |
4575 | 0 | int i; |
4576 | 0 | for (i = ctrSz-1; i >= 0; i--) { |
4577 | 0 | if (++ctr[i]) |
4578 | 0 | break; |
4579 | 0 | } |
4580 | 0 | } |
4581 | | #endif /* HAVE_AESGCM || HAVE_AESCCM */ |
4582 | | |
4583 | | |
4584 | | #ifdef HAVE_AESGCM |
4585 | | |
4586 | | #ifdef WOLFSSL_AESGCM_STREAM |
4587 | | /* Access initialization counter data. */ |
4588 | 0 | #define AES_INITCTR(aes) ((aes)->streamData + 0 * AES_BLOCK_SIZE) |
4589 | | /* Access counter data. */ |
4590 | 0 | #define AES_COUNTER(aes) ((aes)->streamData + 1 * AES_BLOCK_SIZE) |
4591 | | /* Access tag data. */ |
4592 | 0 | #define AES_TAG(aes) ((aes)->streamData + 2 * AES_BLOCK_SIZE) |
4593 | | /* Access last GHASH block. */ |
4594 | | #define AES_LASTGBLOCK(aes) ((aes)->streamData + 3 * AES_BLOCK_SIZE) |
4595 | | /* Access last encrypted block. */ |
4596 | 0 | #define AES_LASTBLOCK(aes) ((aes)->streamData + 4 * AES_BLOCK_SIZE) |
4597 | | #endif |
4598 | | |
4599 | | #if defined(HAVE_COLDFIRE_SEC) |
4600 | | #error "Coldfire SEC doesn't currently support AES-GCM mode" |
4601 | | |
4602 | | #endif |
4603 | | |
4604 | | #ifdef WOLFSSL_ARMASM |
4605 | | /* implementation is located in wolfcrypt/src/port/arm/armv8-aes.c */ |
4606 | | |
4607 | | #elif defined(WOLFSSL_AFALG) |
4608 | | /* implemented in wolfcrypt/src/port/afalg/afalg_aes.c */ |
4609 | | |
4610 | | #elif defined(WOLFSSL_KCAPI_AES) |
4611 | | /* implemented in wolfcrypt/src/port/kcapi/kcapi_aes.c */ |
4612 | | |
4613 | | #elif defined(WOLFSSL_DEVCRYPTO_AES) |
4614 | | /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */ |
4615 | | |
4616 | | #else /* software + AESNI implementation */ |
4617 | | |
4618 | | #if !defined(FREESCALE_LTC_AES_GCM) |
4619 | | static WC_INLINE void IncrementGcmCounter(byte* inOutCtr) |
4620 | 0 | { |
4621 | 0 | int i; |
4622 | | |
4623 | | /* in network byte order so start at end and work back */ |
4624 | 0 | for (i = AES_BLOCK_SIZE - 1; i >= AES_BLOCK_SIZE - CTR_SZ; i--) { |
4625 | 0 | if (++inOutCtr[i]) /* we're done unless we overflow */ |
4626 | 0 | return; |
4627 | 0 | } |
4628 | 0 | } |
4629 | | #endif /* !FREESCALE_LTC_AES_GCM */ |
4630 | | |
4631 | | #if defined(GCM_SMALL) || defined(GCM_TABLE) || defined(GCM_TABLE_4BIT) |
4632 | | |
4633 | | static WC_INLINE void FlattenSzInBits(byte* buf, word32 sz) |
4634 | 0 | { |
4635 | | /* Multiply the sz by 8 */ |
4636 | 0 | word32 szHi = (sz >> (8*sizeof(sz) - 3)); |
4637 | 0 | sz <<= 3; |
4638 | | |
4639 | | /* copy over the words of the sz into the destination buffer */ |
4640 | 0 | buf[0] = (szHi >> 24) & 0xff; |
4641 | 0 | buf[1] = (szHi >> 16) & 0xff; |
4642 | 0 | buf[2] = (szHi >> 8) & 0xff; |
4643 | 0 | buf[3] = szHi & 0xff; |
4644 | 0 | buf[4] = (sz >> 24) & 0xff; |
4645 | 0 | buf[5] = (sz >> 16) & 0xff; |
4646 | 0 | buf[6] = (sz >> 8) & 0xff; |
4647 | 0 | buf[7] = sz & 0xff; |
4648 | 0 | } |
4649 | | |
4650 | | |
4651 | | static WC_INLINE void RIGHTSHIFTX(byte* x) |
4652 | 0 | { |
4653 | 0 | int i; |
4654 | 0 | int carryIn = 0; |
4655 | 0 | byte borrow = (0x00 - (x[15] & 0x01)) & 0xE1; |
4656 | |
|
4657 | 0 | for (i = 0; i < AES_BLOCK_SIZE; i++) { |
4658 | 0 | int carryOut = (x[i] & 0x01) << 7; |
4659 | 0 | x[i] = (byte) ((x[i] >> 1) | carryIn); |
4660 | 0 | carryIn = carryOut; |
4661 | 0 | } |
4662 | 0 | x[0] ^= borrow; |
4663 | 0 | } |
4664 | | |
4665 | | #endif /* defined(GCM_SMALL) || defined(GCM_TABLE) || defined(GCM_TABLE_4BIT) */ |
4666 | | |
4667 | | |
4668 | | #ifdef GCM_TABLE |
4669 | | |
4670 | | static void GenerateM0(Aes* aes) |
4671 | | { |
4672 | | int i, j; |
4673 | | byte (*m)[AES_BLOCK_SIZE] = aes->M0; |
4674 | | |
4675 | | XMEMCPY(m[128], aes->H, AES_BLOCK_SIZE); |
4676 | | |
4677 | | for (i = 64; i > 0; i /= 2) { |
4678 | | XMEMCPY(m[i], m[i*2], AES_BLOCK_SIZE); |
4679 | | RIGHTSHIFTX(m[i]); |
4680 | | } |
4681 | | |
4682 | | for (i = 2; i < 256; i *= 2) { |
4683 | | for (j = 1; j < i; j++) { |
4684 | | XMEMCPY(m[i+j], m[i], AES_BLOCK_SIZE); |
4685 | | xorbuf(m[i+j], m[j], AES_BLOCK_SIZE); |
4686 | | } |
4687 | | } |
4688 | | |
4689 | | XMEMSET(m[0], 0, AES_BLOCK_SIZE); |
4690 | | } |
4691 | | |
4692 | | #elif defined(GCM_TABLE_4BIT) |
4693 | | |
4694 | | static WC_INLINE void Shift4_M0(byte *r8, byte* z8) |
4695 | 0 | { |
4696 | 0 | int i; |
4697 | 0 | for (i = 15; i > 0; i--) |
4698 | 0 | r8[i] = (z8[i-1] << 4) | (z8[i] >> 4); |
4699 | 0 | r8[0] = z8[0] >> 4; |
4700 | 0 | } |
4701 | | |
4702 | | static void GenerateM0(Aes* aes) |
4703 | 0 | { |
4704 | 0 | #if !defined(BIG_ENDIAN_ORDER) && !defined(WC_16BIT_CPU) |
4705 | 0 | int i; |
4706 | 0 | #endif |
4707 | 0 | byte (*m)[AES_BLOCK_SIZE] = aes->M0; |
4708 | | |
4709 | | /* 0 times -> 0x0 */ |
4710 | 0 | XMEMSET(m[0x0], 0, AES_BLOCK_SIZE); |
4711 | | /* 1 times -> 0x8 */ |
4712 | 0 | XMEMCPY(m[0x8], aes->H, AES_BLOCK_SIZE); |
4713 | | /* 2 times -> 0x4 */ |
4714 | 0 | XMEMCPY(m[0x4], m[0x8], AES_BLOCK_SIZE); |
4715 | 0 | RIGHTSHIFTX(m[0x4]); |
4716 | | /* 4 times -> 0x2 */ |
4717 | 0 | XMEMCPY(m[0x2], m[0x4], AES_BLOCK_SIZE); |
4718 | 0 | RIGHTSHIFTX(m[0x2]); |
4719 | | /* 8 times -> 0x1 */ |
4720 | 0 | XMEMCPY(m[0x1], m[0x2], AES_BLOCK_SIZE); |
4721 | 0 | RIGHTSHIFTX(m[0x1]); |
4722 | | |
4723 | | /* 0x3 */ |
4724 | 0 | XMEMCPY(m[0x3], m[0x2], AES_BLOCK_SIZE); |
4725 | 0 | xorbuf (m[0x3], m[0x1], AES_BLOCK_SIZE); |
4726 | | |
4727 | | /* 0x5 -> 0x7 */ |
4728 | 0 | XMEMCPY(m[0x5], m[0x4], AES_BLOCK_SIZE); |
4729 | 0 | xorbuf (m[0x5], m[0x1], AES_BLOCK_SIZE); |
4730 | 0 | XMEMCPY(m[0x6], m[0x4], AES_BLOCK_SIZE); |
4731 | 0 | xorbuf (m[0x6], m[0x2], AES_BLOCK_SIZE); |
4732 | 0 | XMEMCPY(m[0x7], m[0x4], AES_BLOCK_SIZE); |
4733 | 0 | xorbuf (m[0x7], m[0x3], AES_BLOCK_SIZE); |
4734 | | |
4735 | | /* 0x9 -> 0xf */ |
4736 | 0 | XMEMCPY(m[0x9], m[0x8], AES_BLOCK_SIZE); |
4737 | 0 | xorbuf (m[0x9], m[0x1], AES_BLOCK_SIZE); |
4738 | 0 | XMEMCPY(m[0xa], m[0x8], AES_BLOCK_SIZE); |
4739 | 0 | xorbuf (m[0xa], m[0x2], AES_BLOCK_SIZE); |
4740 | 0 | XMEMCPY(m[0xb], m[0x8], AES_BLOCK_SIZE); |
4741 | 0 | xorbuf (m[0xb], m[0x3], AES_BLOCK_SIZE); |
4742 | 0 | XMEMCPY(m[0xc], m[0x8], AES_BLOCK_SIZE); |
4743 | 0 | xorbuf (m[0xc], m[0x4], AES_BLOCK_SIZE); |
4744 | 0 | XMEMCPY(m[0xd], m[0x8], AES_BLOCK_SIZE); |
4745 | 0 | xorbuf (m[0xd], m[0x5], AES_BLOCK_SIZE); |
4746 | 0 | XMEMCPY(m[0xe], m[0x8], AES_BLOCK_SIZE); |
4747 | 0 | xorbuf (m[0xe], m[0x6], AES_BLOCK_SIZE); |
4748 | 0 | XMEMCPY(m[0xf], m[0x8], AES_BLOCK_SIZE); |
4749 | 0 | xorbuf (m[0xf], m[0x7], AES_BLOCK_SIZE); |
4750 | |
|
4751 | 0 | #if !defined(BIG_ENDIAN_ORDER) && !defined(WC_16BIT_CPU) |
4752 | 0 | for (i = 0; i < 16; i++) { |
4753 | 0 | Shift4_M0(m[16+i], m[i]); |
4754 | 0 | } |
4755 | 0 | #endif |
4756 | 0 | } |
4757 | | |
4758 | | #endif /* GCM_TABLE */ |
4759 | | |
4760 | | /* Software AES - GCM SetKey */ |
4761 | | int wc_AesGcmSetKey(Aes* aes, const byte* key, word32 len) |
4762 | 0 | { |
4763 | 0 | int ret; |
4764 | 0 | byte iv[AES_BLOCK_SIZE]; |
4765 | |
|
4766 | | #ifdef WOLFSSL_IMX6_CAAM_BLOB |
4767 | | byte local[32]; |
4768 | | word32 localSz = 32; |
4769 | | |
4770 | | if (len == (16 + WC_CAAM_BLOB_SZ) || |
4771 | | len == (24 + WC_CAAM_BLOB_SZ) || |
4772 | | len == (32 + WC_CAAM_BLOB_SZ)) { |
4773 | | if (wc_caamOpenBlob((byte*)key, len, local, &localSz) != 0) { |
4774 | | return BAD_FUNC_ARG; |
4775 | | } |
4776 | | |
4777 | | /* set local values */ |
4778 | | key = local; |
4779 | | len = localSz; |
4780 | | } |
4781 | | #endif |
4782 | |
|
4783 | 0 | if (!((len == 16) || (len == 24) || (len == 32))) |
4784 | 0 | return BAD_FUNC_ARG; |
4785 | 0 | if (aes == NULL) { |
4786 | | #ifdef WOLFSSL_IMX6_CAAM_BLOB |
4787 | | ForceZero(local, sizeof(local)); |
4788 | | #endif |
4789 | 0 | return BAD_FUNC_ARG; |
4790 | 0 | } |
4791 | | |
4792 | | #ifdef OPENSSL_EXTRA |
4793 | | XMEMSET(aes->aadH, 0, sizeof(aes->aadH)); |
4794 | | aes->aadLen = 0; |
4795 | | #endif |
4796 | 0 | XMEMSET(iv, 0, AES_BLOCK_SIZE); |
4797 | 0 | ret = wc_AesSetKey(aes, key, len, iv, AES_ENCRYPTION); |
4798 | 0 | #ifdef WOLFSSL_AESGCM_STREAM |
4799 | 0 | aes->gcmKeySet = 1; |
4800 | 0 | #endif |
4801 | |
|
4802 | | #ifdef WOLFSSL_AESNI |
4803 | | /* AES-NI code generates its own H value. */ |
4804 | | if (haveAESNI) |
4805 | | return ret; |
4806 | | #endif /* WOLFSSL_AESNI */ |
4807 | | #if defined(WOLFSSL_SECO_CAAM) |
4808 | | if (aes->devId == WOLFSSL_SECO_DEVID) { |
4809 | | return ret; |
4810 | | } |
4811 | | #endif /* WOLFSSL_SECO_CAAM */ |
4812 | |
|
4813 | 0 | #if !defined(FREESCALE_LTC_AES_GCM) |
4814 | 0 | if (ret == 0) |
4815 | 0 | ret = wc_AesEncrypt(aes, iv, aes->H); |
4816 | 0 | if (ret == 0) { |
4817 | 0 | #if defined(GCM_TABLE) || defined(GCM_TABLE_4BIT) |
4818 | 0 | GenerateM0(aes); |
4819 | 0 | #endif /* GCM_TABLE */ |
4820 | 0 | } |
4821 | 0 | #endif /* FREESCALE_LTC_AES_GCM */ |
4822 | |
|
4823 | | #if defined(WOLFSSL_XILINX_CRYPT) |
4824 | | wc_AesGcmSetKey_ex(aes, key, len, XSECURE_CSU_AES_KEY_SRC_KUP); |
4825 | | #elif defined(WOLFSSL_AFALG_XILINX_AES) |
4826 | | wc_AesGcmSetKey_ex(aes, key, len, 0); |
4827 | | #endif |
4828 | |
|
4829 | 0 | #ifdef WOLF_CRYPTO_CB |
4830 | 0 | if (aes->devId != INVALID_DEVID) { |
4831 | 0 | XMEMCPY(aes->devKey, key, len); |
4832 | 0 | } |
4833 | 0 | #endif |
4834 | |
|
4835 | | #ifdef WOLFSSL_IMX6_CAAM_BLOB |
4836 | | ForceZero(local, sizeof(local)); |
4837 | | #endif |
4838 | 0 | return ret; |
4839 | 0 | } |
4840 | | |
4841 | | |
4842 | | #ifdef WOLFSSL_AESNI |
4843 | | |
4844 | | #if defined(USE_INTEL_SPEEDUP) |
4845 | | #define HAVE_INTEL_AVX1 |
4846 | | #define HAVE_INTEL_AVX2 |
4847 | | #endif /* USE_INTEL_SPEEDUP */ |
4848 | | |
4849 | | #ifndef _MSC_VER |
4850 | | |
4851 | | void AES_GCM_encrypt(const unsigned char *in, unsigned char *out, |
4852 | | const unsigned char* addt, const unsigned char* ivec, |
4853 | | unsigned char *tag, word32 nbytes, |
4854 | | word32 abytes, word32 ibytes, |
4855 | | word32 tbytes, const unsigned char* key, int nr) |
4856 | | XASM_LINK("AES_GCM_encrypt"); |
4857 | | #ifdef HAVE_INTEL_AVX1 |
4858 | | void AES_GCM_encrypt_avx1(const unsigned char *in, unsigned char *out, |
4859 | | const unsigned char* addt, const unsigned char* ivec, |
4860 | | unsigned char *tag, word32 nbytes, |
4861 | | word32 abytes, word32 ibytes, |
4862 | | word32 tbytes, const unsigned char* key, |
4863 | | int nr) |
4864 | | XASM_LINK("AES_GCM_encrypt_avx1"); |
4865 | | #ifdef HAVE_INTEL_AVX2 |
4866 | | void AES_GCM_encrypt_avx2(const unsigned char *in, unsigned char *out, |
4867 | | const unsigned char* addt, const unsigned char* ivec, |
4868 | | unsigned char *tag, word32 nbytes, |
4869 | | word32 abytes, word32 ibytes, |
4870 | | word32 tbytes, const unsigned char* key, |
4871 | | int nr) |
4872 | | XASM_LINK("AES_GCM_encrypt_avx2"); |
4873 | | #endif /* HAVE_INTEL_AVX2 */ |
4874 | | #endif /* HAVE_INTEL_AVX1 */ |
4875 | | |
4876 | | #ifdef HAVE_AES_DECRYPT |
4877 | | void AES_GCM_decrypt(const unsigned char *in, unsigned char *out, |
4878 | | const unsigned char* addt, const unsigned char* ivec, |
4879 | | const unsigned char *tag, word32 nbytes, word32 abytes, |
4880 | | word32 ibytes, word32 tbytes, const unsigned char* key, |
4881 | | int nr, int* res) |
4882 | | XASM_LINK("AES_GCM_decrypt"); |
4883 | | #ifdef HAVE_INTEL_AVX1 |
4884 | | void AES_GCM_decrypt_avx1(const unsigned char *in, unsigned char *out, |
4885 | | const unsigned char* addt, const unsigned char* ivec, |
4886 | | const unsigned char *tag, word32 nbytes, |
4887 | | word32 abytes, word32 ibytes, word32 tbytes, |
4888 | | const unsigned char* key, int nr, int* res) |
4889 | | XASM_LINK("AES_GCM_decrypt_avx1"); |
4890 | | #ifdef HAVE_INTEL_AVX2 |
4891 | | void AES_GCM_decrypt_avx2(const unsigned char *in, unsigned char *out, |
4892 | | const unsigned char* addt, const unsigned char* ivec, |
4893 | | const unsigned char *tag, word32 nbytes, |
4894 | | word32 abytes, word32 ibytes, word32 tbytes, |
4895 | | const unsigned char* key, int nr, int* res) |
4896 | | XASM_LINK("AES_GCM_decrypt_avx2"); |
4897 | | #endif /* HAVE_INTEL_AVX2 */ |
4898 | | #endif /* HAVE_INTEL_AVX1 */ |
4899 | | #endif /* HAVE_AES_DECRYPT */ |
4900 | | |
4901 | | #else /* _MSC_VER */ |
4902 | | |
4903 | | /* AESNI with Microsoft */ |
4904 | | #ifdef __clang__ |
4905 | | /* With Clang the __m128i in emmintrin.h is union using: |
4906 | | * "unsigned __int64 m128i_u64[2];" |
4907 | | * Notes: Must add "-maes -msse4.1 -mpclmul" to compiler flags. |
4908 | | * Must mark "aes_asm.asm" as included/compiled C file. |
4909 | | */ |
4910 | | #define M128_INIT(x,y) { (long long)x, (long long)y } |
4911 | | #else |
4912 | | /* Typically this is array of 16 int8's */ |
4913 | | #define S(w,z) ((char)((unsigned long long)(w) >> (8*(7-(z))) & 0xFF)) |
4914 | | #define M128_INIT(x,y) { S((x),7), S((x),6), S((x),5), S((x),4), \ |
4915 | | S((x),3), S((x),2), S((x),1), S((x),0), \ |
4916 | | S((y),7), S((y),6), S((y),5), S((y),4), \ |
4917 | | S((y),3), S((y),2), S((y),1), S((y),0) } |
4918 | | #endif |
4919 | | |
4920 | | static const __m128i MOD2_128 = |
4921 | | M128_INIT(0x1, (long long int)0xc200000000000000UL); |
4922 | | |
4923 | | |
4924 | | /* See Intel Carry-Less Multiplication Instruction |
4925 | | * and its Usage for Computing the GCM Mode White Paper |
4926 | | * by Shay Gueron, Intel Mobility Group, Israel Development Center; |
4927 | | * and Michael E. Kounavis, Intel Labs, Circuits and Systems Research */ |
4928 | | |
4929 | | |
4930 | | /* Figure 9. AES-GCM - Encrypt With Single Block Ghash at a Time */ |
4931 | | |
4932 | | static const __m128i ONE = M128_INIT(0x0, 0x1); |
4933 | | #ifndef AES_GCM_AESNI_NO_UNROLL |
4934 | | static const __m128i TWO = M128_INIT(0x0, 0x2); |
4935 | | static const __m128i THREE = M128_INIT(0x0, 0x3); |
4936 | | static const __m128i FOUR = M128_INIT(0x0, 0x4); |
4937 | | static const __m128i FIVE = M128_INIT(0x0, 0x5); |
4938 | | static const __m128i SIX = M128_INIT(0x0, 0x6); |
4939 | | static const __m128i SEVEN = M128_INIT(0x0, 0x7); |
4940 | | static const __m128i EIGHT = M128_INIT(0x0, 0x8); |
4941 | | #endif |
4942 | | static const __m128i BSWAP_EPI64 = |
4943 | | M128_INIT(0x0001020304050607, 0x08090a0b0c0d0e0f); |
4944 | | static const __m128i BSWAP_MASK = |
4945 | | M128_INIT(0x08090a0b0c0d0e0f, 0x0001020304050607); |
4946 | | |
4947 | | |
4948 | | /* The following are for MSC based builds which do not allow |
4949 | | * inline assembly. Intrinsic functions are used instead. */ |
4950 | | |
4951 | | #define aes_gcm_calc_iv_12(KEY, ivec, nr, H, Y, T) \ |
4952 | | do \ |
4953 | | { \ |
4954 | | word32 iv12[4]; \ |
4955 | | iv12[0] = *(word32*)&ivec[0]; \ |
4956 | | iv12[1] = *(word32*)&ivec[4]; \ |
4957 | | iv12[2] = *(word32*)&ivec[8]; \ |
4958 | | iv12[3] = 0x01000000; \ |
4959 | | Y = _mm_loadu_si128((__m128i*)iv12); \ |
4960 | | \ |
4961 | | /* (Compute E[ZERO, KS] and E[Y0, KS] together */ \ |
4962 | | tmp1 = _mm_load_si128(&KEY[0]); \ |
4963 | | tmp2 = _mm_xor_si128(Y, KEY[0]); \ |
4964 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[1]); \ |
4965 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[1]); \ |
4966 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[2]); \ |
4967 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[2]); \ |
4968 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[3]); \ |
4969 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[3]); \ |
4970 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[4]); \ |
4971 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[4]); \ |
4972 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[5]); \ |
4973 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[5]); \ |
4974 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[6]); \ |
4975 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[6]); \ |
4976 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[7]); \ |
4977 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[7]); \ |
4978 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[8]); \ |
4979 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[8]); \ |
4980 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[9]); \ |
4981 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[9]); \ |
4982 | | lastKey = KEY[10]; \ |
4983 | | if (nr > 10) { \ |
4984 | | tmp1 = _mm_aesenc_si128(tmp1, lastKey); \ |
4985 | | tmp2 = _mm_aesenc_si128(tmp2, lastKey); \ |
4986 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[11]); \ |
4987 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[11]); \ |
4988 | | lastKey = KEY[12]; \ |
4989 | | if (nr > 12) { \ |
4990 | | tmp1 = _mm_aesenc_si128(tmp1, lastKey); \ |
4991 | | tmp2 = _mm_aesenc_si128(tmp2, lastKey); \ |
4992 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[13]); \ |
4993 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[13]); \ |
4994 | | lastKey = KEY[14]; \ |
4995 | | } \ |
4996 | | } \ |
4997 | | H = _mm_aesenclast_si128(tmp1, lastKey); \ |
4998 | | T = _mm_aesenclast_si128(tmp2, lastKey); \ |
4999 | | H = _mm_shuffle_epi8(H, BSWAP_MASK); \ |
5000 | | } \ |
5001 | | while (0) |
5002 | | |
5003 | | |
5004 | | #ifdef _M_X64 |
5005 | | /* 64-bit */ |
5006 | | #define AES_GCM_INSERT_EPI(tmp1, a, b) \ |
5007 | | tmp1 = _mm_insert_epi64(tmp1, ((word64)(a))*8, 0); \ |
5008 | | tmp1 = _mm_insert_epi64(tmp1, ((word64)(b))*8, 1); |
5009 | | #else |
5010 | | /* 32-bit */ |
5011 | | #define AES_GCM_INSERT_EPI(tmp1, a, b) \ |
5012 | | tmp1 = _mm_insert_epi32(tmp1, ((int)(a))*8, 0); \ |
5013 | | tmp1 = _mm_insert_epi32(tmp1, 0, 1); \ |
5014 | | tmp1 = _mm_insert_epi32(tmp1, ((int)(b))*8, 2); \ |
5015 | | tmp1 = _mm_insert_epi32(tmp1, 0, 3); |
5016 | | #endif |
5017 | | |
5018 | | #define aes_gcm_calc_iv(KEY, ivec, ibytes, nr, H, Y, T) \ |
5019 | | do \ |
5020 | | { \ |
5021 | | if (ibytes % 16) { \ |
5022 | | i = ibytes / 16; \ |
5023 | | for (j=0; j < (int)(ibytes%16); j++) \ |
5024 | | ((unsigned char*)&last_block)[j] = ivec[i*16+j]; \ |
5025 | | } \ |
5026 | | tmp1 = _mm_load_si128(&KEY[0]); \ |
5027 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[1]); \ |
5028 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[2]); \ |
5029 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[3]); \ |
5030 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[4]); \ |
5031 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[5]); \ |
5032 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[6]); \ |
5033 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[7]); \ |
5034 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[8]); \ |
5035 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[9]); \ |
5036 | | lastKey = KEY[10]; \ |
5037 | | if (nr > 10) { \ |
5038 | | tmp1 = _mm_aesenc_si128(tmp1, lastKey); \ |
5039 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[11]); \ |
5040 | | lastKey = KEY[12]; \ |
5041 | | if (nr > 12) { \ |
5042 | | tmp1 = _mm_aesenc_si128(tmp1, lastKey); \ |
5043 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[13]); \ |
5044 | | lastKey = KEY[14]; \ |
5045 | | } \ |
5046 | | } \ |
5047 | | H = _mm_aesenclast_si128(tmp1, lastKey); \ |
5048 | | H = _mm_shuffle_epi8(H, BSWAP_MASK); \ |
5049 | | Y = _mm_setzero_si128(); \ |
5050 | | for (i=0; i < (int)(ibytes/16); i++) { \ |
5051 | | tmp1 = _mm_loadu_si128(&((__m128i*)ivec)[i]); \ |
5052 | | tmp1 = _mm_shuffle_epi8(tmp1, BSWAP_MASK); \ |
5053 | | Y = _mm_xor_si128(Y, tmp1); \ |
5054 | | Y = gfmul_sw(Y, H); \ |
5055 | | } \ |
5056 | | if (ibytes % 16) { \ |
5057 | | tmp1 = last_block; \ |
5058 | | tmp1 = _mm_shuffle_epi8(tmp1, BSWAP_MASK); \ |
5059 | | Y = _mm_xor_si128(Y, tmp1); \ |
5060 | | Y = gfmul_sw(Y, H); \ |
5061 | | } \ |
5062 | | AES_GCM_INSERT_EPI(tmp1, ibytes, 0); \ |
5063 | | Y = _mm_xor_si128(Y, tmp1); \ |
5064 | | Y = gfmul_sw(Y, H); \ |
5065 | | Y = _mm_shuffle_epi8(Y, BSWAP_MASK); /* Compute E(K, Y0) */ \ |
5066 | | tmp1 = _mm_xor_si128(Y, KEY[0]); \ |
5067 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[1]); \ |
5068 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[2]); \ |
5069 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[3]); \ |
5070 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[4]); \ |
5071 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[5]); \ |
5072 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[6]); \ |
5073 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[7]); \ |
5074 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[8]); \ |
5075 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[9]); \ |
5076 | | lastKey = KEY[10]; \ |
5077 | | if (nr > 10) { \ |
5078 | | tmp1 = _mm_aesenc_si128(tmp1, lastKey); \ |
5079 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[11]); \ |
5080 | | lastKey = KEY[12]; \ |
5081 | | if (nr > 12) { \ |
5082 | | tmp1 = _mm_aesenc_si128(tmp1, lastKey); \ |
5083 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[13]); \ |
5084 | | lastKey = KEY[14]; \ |
5085 | | } \ |
5086 | | } \ |
5087 | | T = _mm_aesenclast_si128(tmp1, lastKey); \ |
5088 | | } \ |
5089 | | while (0) |
5090 | | |
5091 | | #define AES_ENC_8(j) \ |
5092 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[j]); \ |
5093 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[j]); \ |
5094 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[j]); \ |
5095 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[j]); \ |
5096 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[j]); \ |
5097 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[j]); \ |
5098 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[j]); \ |
5099 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[j]); |
5100 | | |
5101 | | #define AES_ENC_LAST_8() \ |
5102 | | tmp1 =_mm_aesenclast_si128(tmp1, lastKey); \ |
5103 | | tmp2 =_mm_aesenclast_si128(tmp2, lastKey); \ |
5104 | | tmp1 = _mm_xor_si128(tmp1, _mm_loadu_si128(&((__m128i*)in)[i*8+0])); \ |
5105 | | tmp2 = _mm_xor_si128(tmp2, _mm_loadu_si128(&((__m128i*)in)[i*8+1])); \ |
5106 | | _mm_storeu_si128(&((__m128i*)out)[i*8+0], tmp1); \ |
5107 | | _mm_storeu_si128(&((__m128i*)out)[i*8+1], tmp2); \ |
5108 | | tmp3 =_mm_aesenclast_si128(tmp3, lastKey); \ |
5109 | | tmp4 =_mm_aesenclast_si128(tmp4, lastKey); \ |
5110 | | tmp3 = _mm_xor_si128(tmp3, _mm_loadu_si128(&((__m128i*)in)[i*8+2])); \ |
5111 | | tmp4 = _mm_xor_si128(tmp4, _mm_loadu_si128(&((__m128i*)in)[i*8+3])); \ |
5112 | | _mm_storeu_si128(&((__m128i*)out)[i*8+2], tmp3); \ |
5113 | | _mm_storeu_si128(&((__m128i*)out)[i*8+3], tmp4); \ |
5114 | | tmp5 =_mm_aesenclast_si128(tmp5, lastKey); \ |
5115 | | tmp6 =_mm_aesenclast_si128(tmp6, lastKey); \ |
5116 | | tmp5 = _mm_xor_si128(tmp5, _mm_loadu_si128(&((__m128i*)in)[i*8+4])); \ |
5117 | | tmp6 = _mm_xor_si128(tmp6, _mm_loadu_si128(&((__m128i*)in)[i*8+5])); \ |
5118 | | _mm_storeu_si128(&((__m128i*)out)[i*8+4], tmp5); \ |
5119 | | _mm_storeu_si128(&((__m128i*)out)[i*8+5], tmp6); \ |
5120 | | tmp7 =_mm_aesenclast_si128(tmp7, lastKey); \ |
5121 | | tmp8 =_mm_aesenclast_si128(tmp8, lastKey); \ |
5122 | | tmp7 = _mm_xor_si128(tmp7, _mm_loadu_si128(&((__m128i*)in)[i*8+6])); \ |
5123 | | tmp8 = _mm_xor_si128(tmp8, _mm_loadu_si128(&((__m128i*)in)[i*8+7])); \ |
5124 | | _mm_storeu_si128(&((__m128i*)out)[i*8+6], tmp7); \ |
5125 | | _mm_storeu_si128(&((__m128i*)out)[i*8+7], tmp8); |
5126 | | |
5127 | | |
5128 | | static WARN_UNUSED_RESULT __m128i gfmul_sw(__m128i a, __m128i b) |
5129 | | { |
5130 | | __m128i r, t1, t2, t3, t4, t5, t6, t7; |
5131 | | t2 = _mm_shuffle_epi32(b, 78); |
5132 | | t3 = _mm_shuffle_epi32(a, 78); |
5133 | | t2 = _mm_xor_si128(t2, b); |
5134 | | t3 = _mm_xor_si128(t3, a); |
5135 | | t4 = _mm_clmulepi64_si128(b, a, 0x11); |
5136 | | t1 = _mm_clmulepi64_si128(b, a, 0x00); |
5137 | | t2 = _mm_clmulepi64_si128(t2, t3, 0x00); |
5138 | | t2 = _mm_xor_si128(t2, t1); |
5139 | | t2 = _mm_xor_si128(t2, t4); |
5140 | | t3 = _mm_slli_si128(t2, 8); |
5141 | | t2 = _mm_srli_si128(t2, 8); |
5142 | | t1 = _mm_xor_si128(t1, t3); |
5143 | | t4 = _mm_xor_si128(t4, t2); |
5144 | | |
5145 | | t5 = _mm_srli_epi32(t1, 31); |
5146 | | t6 = _mm_srli_epi32(t4, 31); |
5147 | | t1 = _mm_slli_epi32(t1, 1); |
5148 | | t4 = _mm_slli_epi32(t4, 1); |
5149 | | t7 = _mm_srli_si128(t5, 12); |
5150 | | t5 = _mm_slli_si128(t5, 4); |
5151 | | t6 = _mm_slli_si128(t6, 4); |
5152 | | t4 = _mm_or_si128(t4, t7); |
5153 | | t1 = _mm_or_si128(t1, t5); |
5154 | | t4 = _mm_or_si128(t4, t6); |
5155 | | |
5156 | | t5 = _mm_slli_epi32(t1, 31); |
5157 | | t6 = _mm_slli_epi32(t1, 30); |
5158 | | t7 = _mm_slli_epi32(t1, 25); |
5159 | | t5 = _mm_xor_si128(t5, t6); |
5160 | | t5 = _mm_xor_si128(t5, t7); |
5161 | | |
5162 | | t6 = _mm_srli_si128(t5, 4); |
5163 | | t5 = _mm_slli_si128(t5, 12); |
5164 | | t1 = _mm_xor_si128(t1, t5); |
5165 | | t7 = _mm_srli_epi32(t1, 1); |
5166 | | t3 = _mm_srli_epi32(t1, 2); |
5167 | | t2 = _mm_srli_epi32(t1, 7); |
5168 | | |
5169 | | t7 = _mm_xor_si128(t7, t3); |
5170 | | t7 = _mm_xor_si128(t7, t2); |
5171 | | t7 = _mm_xor_si128(t7, t6); |
5172 | | t7 = _mm_xor_si128(t7, t1); |
5173 | | r = _mm_xor_si128(t4, t7); |
5174 | | |
5175 | | return r; |
5176 | | } |
5177 | | |
5178 | | static void gfmul_only(__m128i a, __m128i b, __m128i* r0, __m128i* r1) |
5179 | | { |
5180 | | __m128i t1, t2, t3, t4; |
5181 | | |
5182 | | /* 128 x 128 Carryless Multiply */ |
5183 | | t2 = _mm_shuffle_epi32(b, 78); |
5184 | | t3 = _mm_shuffle_epi32(a, 78); |
5185 | | t2 = _mm_xor_si128(t2, b); |
5186 | | t3 = _mm_xor_si128(t3, a); |
5187 | | t4 = _mm_clmulepi64_si128(b, a, 0x11); |
5188 | | t1 = _mm_clmulepi64_si128(b, a, 0x00); |
5189 | | t2 = _mm_clmulepi64_si128(t2, t3, 0x00); |
5190 | | t2 = _mm_xor_si128(t2, t1); |
5191 | | t2 = _mm_xor_si128(t2, t4); |
5192 | | t3 = _mm_slli_si128(t2, 8); |
5193 | | t2 = _mm_srli_si128(t2, 8); |
5194 | | t1 = _mm_xor_si128(t1, t3); |
5195 | | t4 = _mm_xor_si128(t4, t2); |
5196 | | *r0 = _mm_xor_si128(t1, *r0); |
5197 | | *r1 = _mm_xor_si128(t4, *r1); |
5198 | | } |
5199 | | |
5200 | | static WARN_UNUSED_RESULT __m128i gfmul_shl1(__m128i a) |
5201 | | { |
5202 | | __m128i t1 = a, t2; |
5203 | | t2 = _mm_srli_epi64(t1, 63); |
5204 | | t1 = _mm_slli_epi64(t1, 1); |
5205 | | t2 = _mm_slli_si128(t2, 8); |
5206 | | t1 = _mm_or_si128(t1, t2); |
5207 | | /* if (a[1] >> 63) t1 = _mm_xor_si128(t1, MOD2_128); */ |
5208 | | a = _mm_shuffle_epi32(a, 0xff); |
5209 | | a = _mm_srai_epi32(a, 31); |
5210 | | a = _mm_and_si128(a, MOD2_128); |
5211 | | t1 = _mm_xor_si128(t1, a); |
5212 | | return t1; |
5213 | | } |
5214 | | |
5215 | | static WARN_UNUSED_RESULT __m128i ghash_red(__m128i r0, __m128i r1) |
5216 | | { |
5217 | | __m128i t2, t3; |
5218 | | __m128i t5, t6, t7; |
5219 | | |
5220 | | t5 = _mm_slli_epi32(r0, 31); |
5221 | | t6 = _mm_slli_epi32(r0, 30); |
5222 | | t7 = _mm_slli_epi32(r0, 25); |
5223 | | t5 = _mm_xor_si128(t5, t6); |
5224 | | t5 = _mm_xor_si128(t5, t7); |
5225 | | |
5226 | | t6 = _mm_srli_si128(t5, 4); |
5227 | | t5 = _mm_slli_si128(t5, 12); |
5228 | | r0 = _mm_xor_si128(r0, t5); |
5229 | | t7 = _mm_srli_epi32(r0, 1); |
5230 | | t3 = _mm_srli_epi32(r0, 2); |
5231 | | t2 = _mm_srli_epi32(r0, 7); |
5232 | | |
5233 | | t7 = _mm_xor_si128(t7, t3); |
5234 | | t7 = _mm_xor_si128(t7, t2); |
5235 | | t7 = _mm_xor_si128(t7, t6); |
5236 | | t7 = _mm_xor_si128(t7, r0); |
5237 | | return _mm_xor_si128(r1, t7); |
5238 | | } |
5239 | | |
5240 | | static WARN_UNUSED_RESULT __m128i gfmul_shifted(__m128i a, __m128i b) |
5241 | | { |
5242 | | __m128i t0 = _mm_setzero_si128(), t1 = _mm_setzero_si128(); |
5243 | | gfmul_only(a, b, &t0, &t1); |
5244 | | return ghash_red(t0, t1); |
5245 | | } |
5246 | | |
5247 | | #ifndef AES_GCM_AESNI_NO_UNROLL |
5248 | | static WARN_UNUSED_RESULT __m128i gfmul8( |
5249 | | __m128i a1, __m128i a2, __m128i a3, __m128i a4, |
5250 | | __m128i a5, __m128i a6, __m128i a7, __m128i a8, |
5251 | | __m128i b1, __m128i b2, __m128i b3, __m128i b4, |
5252 | | __m128i b5, __m128i b6, __m128i b7, __m128i b8) |
5253 | | { |
5254 | | __m128i t0 = _mm_setzero_si128(), t1 = _mm_setzero_si128(); |
5255 | | gfmul_only(a1, b8, &t0, &t1); |
5256 | | gfmul_only(a2, b7, &t0, &t1); |
5257 | | gfmul_only(a3, b6, &t0, &t1); |
5258 | | gfmul_only(a4, b5, &t0, &t1); |
5259 | | gfmul_only(a5, b4, &t0, &t1); |
5260 | | gfmul_only(a6, b3, &t0, &t1); |
5261 | | gfmul_only(a7, b2, &t0, &t1); |
5262 | | gfmul_only(a8, b1, &t0, &t1); |
5263 | | return ghash_red(t0, t1); |
5264 | | } |
5265 | | #endif |
5266 | | |
5267 | | |
5268 | | static WARN_UNUSED_RESULT int AES_GCM_encrypt( |
5269 | | const unsigned char *in, unsigned char *out, |
5270 | | const unsigned char* addt, |
5271 | | const unsigned char* ivec, unsigned char *tag, |
5272 | | word32 nbytes, word32 abytes, word32 ibytes, |
5273 | | word32 tbytes, const unsigned char* key, int nr) |
5274 | | { |
5275 | | int i, j ,k; |
5276 | | __m128i ctr1; |
5277 | | __m128i H, Y, T; |
5278 | | __m128i X = _mm_setzero_si128(); |
5279 | | __m128i *KEY = (__m128i*)key, lastKey; |
5280 | | __m128i last_block = _mm_setzero_si128(); |
5281 | | __m128i tmp1, tmp2; |
5282 | | #ifndef AES_GCM_AESNI_NO_UNROLL |
5283 | | __m128i HT[8]; |
5284 | | __m128i r0, r1; |
5285 | | __m128i XV; |
5286 | | __m128i tmp3, tmp4, tmp5, tmp6, tmp7, tmp8; |
5287 | | #endif |
5288 | | |
5289 | | if (ibytes == GCM_NONCE_MID_SZ) |
5290 | | aes_gcm_calc_iv_12(KEY, ivec, nr, H, Y, T); |
5291 | | else |
5292 | | aes_gcm_calc_iv(KEY, ivec, ibytes, nr, H, Y, T); |
5293 | | |
5294 | | for (i=0; i < (int)(abytes/16); i++) { |
5295 | | tmp1 = _mm_loadu_si128(&((__m128i*)addt)[i]); |
5296 | | tmp1 = _mm_shuffle_epi8(tmp1, BSWAP_MASK); |
5297 | | X = _mm_xor_si128(X, tmp1); |
5298 | | X = gfmul_sw(X, H); |
5299 | | } |
5300 | | if (abytes%16) { |
5301 | | last_block = _mm_setzero_si128(); |
5302 | | for (j=0; j < (int)(abytes%16); j++) |
5303 | | ((unsigned char*)&last_block)[j] = addt[i*16+j]; |
5304 | | tmp1 = last_block; |
5305 | | tmp1 = _mm_shuffle_epi8(tmp1, BSWAP_MASK); |
5306 | | X = _mm_xor_si128(X, tmp1); |
5307 | | X = gfmul_sw(X, H); |
5308 | | } |
5309 | | tmp1 = _mm_shuffle_epi8(Y, BSWAP_EPI64); |
5310 | | ctr1 = _mm_add_epi32(tmp1, ONE); |
5311 | | H = gfmul_shl1(H); |
5312 | | |
5313 | | #ifndef AES_GCM_AESNI_NO_UNROLL |
5314 | | i = 0; |
5315 | | if (nbytes >= 16*8) { |
5316 | | HT[0] = H; |
5317 | | HT[1] = gfmul_shifted(H, H); |
5318 | | HT[2] = gfmul_shifted(H, HT[1]); |
5319 | | HT[3] = gfmul_shifted(HT[1], HT[1]); |
5320 | | HT[4] = gfmul_shifted(HT[1], HT[2]); |
5321 | | HT[5] = gfmul_shifted(HT[2], HT[2]); |
5322 | | HT[6] = gfmul_shifted(HT[2], HT[3]); |
5323 | | HT[7] = gfmul_shifted(HT[3], HT[3]); |
5324 | | |
5325 | | tmp1 = _mm_shuffle_epi8(ctr1, BSWAP_EPI64); |
5326 | | tmp2 = _mm_add_epi32(ctr1, ONE); |
5327 | | tmp2 = _mm_shuffle_epi8(tmp2, BSWAP_EPI64); |
5328 | | tmp3 = _mm_add_epi32(ctr1, TWO); |
5329 | | tmp3 = _mm_shuffle_epi8(tmp3, BSWAP_EPI64); |
5330 | | tmp4 = _mm_add_epi32(ctr1, THREE); |
5331 | | tmp4 = _mm_shuffle_epi8(tmp4, BSWAP_EPI64); |
5332 | | tmp5 = _mm_add_epi32(ctr1, FOUR); |
5333 | | tmp5 = _mm_shuffle_epi8(tmp5, BSWAP_EPI64); |
5334 | | tmp6 = _mm_add_epi32(ctr1, FIVE); |
5335 | | tmp6 = _mm_shuffle_epi8(tmp6, BSWAP_EPI64); |
5336 | | tmp7 = _mm_add_epi32(ctr1, SIX); |
5337 | | tmp7 = _mm_shuffle_epi8(tmp7, BSWAP_EPI64); |
5338 | | tmp8 = _mm_add_epi32(ctr1, SEVEN); |
5339 | | tmp8 = _mm_shuffle_epi8(tmp8, BSWAP_EPI64); |
5340 | | ctr1 = _mm_add_epi32(ctr1, EIGHT); |
5341 | | tmp1 =_mm_xor_si128(tmp1, KEY[0]); |
5342 | | tmp2 =_mm_xor_si128(tmp2, KEY[0]); |
5343 | | tmp3 =_mm_xor_si128(tmp3, KEY[0]); |
5344 | | tmp4 =_mm_xor_si128(tmp4, KEY[0]); |
5345 | | tmp5 =_mm_xor_si128(tmp5, KEY[0]); |
5346 | | tmp6 =_mm_xor_si128(tmp6, KEY[0]); |
5347 | | tmp7 =_mm_xor_si128(tmp7, KEY[0]); |
5348 | | tmp8 =_mm_xor_si128(tmp8, KEY[0]); |
5349 | | AES_ENC_8(1); |
5350 | | AES_ENC_8(2); |
5351 | | AES_ENC_8(3); |
5352 | | AES_ENC_8(4); |
5353 | | AES_ENC_8(5); |
5354 | | AES_ENC_8(6); |
5355 | | AES_ENC_8(7); |
5356 | | AES_ENC_8(8); |
5357 | | AES_ENC_8(9); |
5358 | | lastKey = KEY[10]; |
5359 | | if (nr > 10) { |
5360 | | AES_ENC_8(10); |
5361 | | AES_ENC_8(11); |
5362 | | lastKey = KEY[12]; |
5363 | | if (nr > 12) { |
5364 | | AES_ENC_8(12); |
5365 | | AES_ENC_8(13); |
5366 | | lastKey = KEY[14]; |
5367 | | } |
5368 | | } |
5369 | | AES_ENC_LAST_8(); |
5370 | | |
5371 | | for (i=1; i < (int)(nbytes/16/8); i++) { |
5372 | | r0 = _mm_setzero_si128(); |
5373 | | r1 = _mm_setzero_si128(); |
5374 | | tmp1 = _mm_shuffle_epi8(ctr1, BSWAP_EPI64); |
5375 | | tmp2 = _mm_add_epi32(ctr1, ONE); |
5376 | | tmp2 = _mm_shuffle_epi8(tmp2, BSWAP_EPI64); |
5377 | | tmp3 = _mm_add_epi32(ctr1, TWO); |
5378 | | tmp3 = _mm_shuffle_epi8(tmp3, BSWAP_EPI64); |
5379 | | tmp4 = _mm_add_epi32(ctr1, THREE); |
5380 | | tmp4 = _mm_shuffle_epi8(tmp4, BSWAP_EPI64); |
5381 | | tmp5 = _mm_add_epi32(ctr1, FOUR); |
5382 | | tmp5 = _mm_shuffle_epi8(tmp5, BSWAP_EPI64); |
5383 | | tmp6 = _mm_add_epi32(ctr1, FIVE); |
5384 | | tmp6 = _mm_shuffle_epi8(tmp6, BSWAP_EPI64); |
5385 | | tmp7 = _mm_add_epi32(ctr1, SIX); |
5386 | | tmp7 = _mm_shuffle_epi8(tmp7, BSWAP_EPI64); |
5387 | | tmp8 = _mm_add_epi32(ctr1, SEVEN); |
5388 | | tmp8 = _mm_shuffle_epi8(tmp8, BSWAP_EPI64); |
5389 | | ctr1 = _mm_add_epi32(ctr1, EIGHT); |
5390 | | tmp1 =_mm_xor_si128(tmp1, KEY[0]); |
5391 | | tmp2 =_mm_xor_si128(tmp2, KEY[0]); |
5392 | | tmp3 =_mm_xor_si128(tmp3, KEY[0]); |
5393 | | tmp4 =_mm_xor_si128(tmp4, KEY[0]); |
5394 | | tmp5 =_mm_xor_si128(tmp5, KEY[0]); |
5395 | | tmp6 =_mm_xor_si128(tmp6, KEY[0]); |
5396 | | tmp7 =_mm_xor_si128(tmp7, KEY[0]); |
5397 | | tmp8 =_mm_xor_si128(tmp8, KEY[0]); |
5398 | | /* 128 x 128 Carryless Multiply */ |
5399 | | XV = _mm_loadu_si128(&((__m128i*)out)[(i-1)*8+0]); |
5400 | | XV = _mm_shuffle_epi8(XV, BSWAP_MASK); |
5401 | | XV = _mm_xor_si128(XV, X); |
5402 | | gfmul_only(XV, HT[7], &r0, &r1); |
5403 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[1]); |
5404 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[1]); |
5405 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[1]); |
5406 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[1]); |
5407 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[1]); |
5408 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[1]); |
5409 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[1]); |
5410 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[1]); |
5411 | | /* 128 x 128 Carryless Multiply */ |
5412 | | XV = _mm_loadu_si128(&((__m128i*)out)[(i-1)*8+1]); |
5413 | | XV = _mm_shuffle_epi8(XV, BSWAP_MASK); |
5414 | | gfmul_only(XV, HT[6], &r0, &r1); |
5415 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[2]); |
5416 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[2]); |
5417 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[2]); |
5418 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[2]); |
5419 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[2]); |
5420 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[2]); |
5421 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[2]); |
5422 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[2]); |
5423 | | /* 128 x 128 Carryless Multiply */ |
5424 | | XV = _mm_loadu_si128(&((__m128i*)out)[(i-1)*8+2]); |
5425 | | XV = _mm_shuffle_epi8(XV, BSWAP_MASK); |
5426 | | gfmul_only(XV, HT[5], &r0, &r1); |
5427 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[3]); |
5428 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[3]); |
5429 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[3]); |
5430 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[3]); |
5431 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[3]); |
5432 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[3]); |
5433 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[3]); |
5434 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[3]); |
5435 | | /* 128 x 128 Carryless Multiply */ |
5436 | | XV = _mm_loadu_si128(&((__m128i*)out)[(i-1)*8+3]); |
5437 | | XV = _mm_shuffle_epi8(XV, BSWAP_MASK); |
5438 | | gfmul_only(XV, HT[4], &r0, &r1); |
5439 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[4]); |
5440 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[4]); |
5441 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[4]); |
5442 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[4]); |
5443 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[4]); |
5444 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[4]); |
5445 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[4]); |
5446 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[4]); |
5447 | | /* 128 x 128 Carryless Multiply */ |
5448 | | XV = _mm_loadu_si128(&((__m128i*)out)[(i-1)*8+4]); |
5449 | | XV = _mm_shuffle_epi8(XV, BSWAP_MASK); |
5450 | | gfmul_only(XV, HT[3], &r0, &r1); |
5451 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[5]); |
5452 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[5]); |
5453 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[5]); |
5454 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[5]); |
5455 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[5]); |
5456 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[5]); |
5457 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[5]); |
5458 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[5]); |
5459 | | /* 128 x 128 Carryless Multiply */ |
5460 | | XV = _mm_loadu_si128(&((__m128i*)out)[(i-1)*8+5]); |
5461 | | XV = _mm_shuffle_epi8(XV, BSWAP_MASK); |
5462 | | gfmul_only(XV, HT[2], &r0, &r1); |
5463 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[6]); |
5464 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[6]); |
5465 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[6]); |
5466 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[6]); |
5467 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[6]); |
5468 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[6]); |
5469 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[6]); |
5470 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[6]); |
5471 | | /* 128 x 128 Carryless Multiply */ |
5472 | | XV = _mm_loadu_si128(&((__m128i*)out)[(i-1)*8+6]); |
5473 | | XV = _mm_shuffle_epi8(XV, BSWAP_MASK); |
5474 | | gfmul_only(XV, HT[1], &r0, &r1); |
5475 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[7]); |
5476 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[7]); |
5477 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[7]); |
5478 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[7]); |
5479 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[7]); |
5480 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[7]); |
5481 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[7]); |
5482 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[7]); |
5483 | | /* 128 x 128 Carryless Multiply */ |
5484 | | XV = _mm_loadu_si128(&((__m128i*)out)[(i-1)*8+7]); |
5485 | | XV = _mm_shuffle_epi8(XV, BSWAP_MASK); |
5486 | | gfmul_only(XV, HT[0], &r0, &r1); |
5487 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[8]); |
5488 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[8]); |
5489 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[8]); |
5490 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[8]); |
5491 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[8]); |
5492 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[8]); |
5493 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[8]); |
5494 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[8]); |
5495 | | /* Reduction */ |
5496 | | X = ghash_red(r0, r1); |
5497 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[9]); |
5498 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[9]); |
5499 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[9]); |
5500 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[9]); |
5501 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[9]); |
5502 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[9]); |
5503 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[9]); |
5504 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[9]); |
5505 | | lastKey = KEY[10]; |
5506 | | if (nr > 10) { |
5507 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[10]); |
5508 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[10]); |
5509 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[10]); |
5510 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[10]); |
5511 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[10]); |
5512 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[10]); |
5513 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[10]); |
5514 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[10]); |
5515 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[11]); |
5516 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[11]); |
5517 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[11]); |
5518 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[11]); |
5519 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[11]); |
5520 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[11]); |
5521 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[11]); |
5522 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[11]); |
5523 | | lastKey = KEY[12]; |
5524 | | if (nr > 12) { |
5525 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[12]); |
5526 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[12]); |
5527 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[12]); |
5528 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[12]); |
5529 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[12]); |
5530 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[12]); |
5531 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[12]); |
5532 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[12]); |
5533 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[13]); |
5534 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[13]); |
5535 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[13]); |
5536 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[13]); |
5537 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[13]); |
5538 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[13]); |
5539 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[13]); |
5540 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[13]); |
5541 | | lastKey = KEY[14]; |
5542 | | } |
5543 | | } |
5544 | | AES_ENC_LAST_8(); |
5545 | | } |
5546 | | |
5547 | | tmp1 = _mm_shuffle_epi8(tmp1, BSWAP_MASK); |
5548 | | tmp2 = _mm_shuffle_epi8(tmp2, BSWAP_MASK); |
5549 | | tmp3 = _mm_shuffle_epi8(tmp3, BSWAP_MASK); |
5550 | | tmp4 = _mm_shuffle_epi8(tmp4, BSWAP_MASK); |
5551 | | tmp5 = _mm_shuffle_epi8(tmp5, BSWAP_MASK); |
5552 | | tmp6 = _mm_shuffle_epi8(tmp6, BSWAP_MASK); |
5553 | | tmp7 = _mm_shuffle_epi8(tmp7, BSWAP_MASK); |
5554 | | tmp8 = _mm_shuffle_epi8(tmp8, BSWAP_MASK); |
5555 | | tmp1 = _mm_xor_si128(X, tmp1); |
5556 | | X = gfmul8(tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7, tmp8, |
5557 | | HT[0], HT[1], HT[2], HT[3], HT[4], HT[5], HT[6], HT[7]); |
5558 | | } |
5559 | | for (k = i*8; k < (int)(nbytes/16); k++) { |
5560 | | tmp1 = _mm_shuffle_epi8(ctr1, BSWAP_EPI64); |
5561 | | ctr1 = _mm_add_epi32(ctr1, ONE); |
5562 | | tmp1 = _mm_xor_si128(tmp1, KEY[0]); |
5563 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[1]); |
5564 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[2]); |
5565 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[3]); |
5566 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[4]); |
5567 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[5]); |
5568 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[6]); |
5569 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[7]); |
5570 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[8]); |
5571 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[9]); |
5572 | | lastKey = KEY[10]; |
5573 | | if (nr > 10) { |
5574 | | tmp1 = _mm_aesenc_si128(tmp1, lastKey); |
5575 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[11]); |
5576 | | lastKey = KEY[12]; |
5577 | | if (nr > 12) { |
5578 | | tmp1 = _mm_aesenc_si128(tmp1, lastKey); |
5579 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[13]); |
5580 | | lastKey = KEY[14]; |
5581 | | } |
5582 | | } |
5583 | | tmp1 = _mm_aesenclast_si128(tmp1, lastKey); |
5584 | | tmp1 = _mm_xor_si128(tmp1, _mm_loadu_si128(&((__m128i*)in)[k])); |
5585 | | _mm_storeu_si128(&((__m128i*)out)[k], tmp1); |
5586 | | tmp1 = _mm_shuffle_epi8(tmp1, BSWAP_MASK); |
5587 | | X =_mm_xor_si128(X, tmp1); |
5588 | | X = gfmul_shifted(X, H); |
5589 | | } |
5590 | | #else /* AES_GCM_AESNI_NO_UNROLL */ |
5591 | | for (k = 0; k < (int)(nbytes/16) && k < 1; k++) { |
5592 | | tmp1 = _mm_shuffle_epi8(ctr1, BSWAP_EPI64); |
5593 | | ctr1 = _mm_add_epi32(ctr1, ONE); |
5594 | | tmp1 = _mm_xor_si128(tmp1, KEY[0]); |
5595 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[1]); |
5596 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[2]); |
5597 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[3]); |
5598 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[4]); |
5599 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[5]); |
5600 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[6]); |
5601 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[7]); |
5602 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[8]); |
5603 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[9]); |
5604 | | lastKey = KEY[10]; |
5605 | | if (nr > 10) { |
5606 | | tmp1 = _mm_aesenc_si128(tmp1, lastKey); |
5607 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[11]); |
5608 | | lastKey = KEY[12]; |
5609 | | if (nr > 12) { |
5610 | | tmp1 = _mm_aesenc_si128(tmp1, lastKey); |
5611 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[13]); |
5612 | | lastKey = KEY[14]; |
5613 | | } |
5614 | | } |
5615 | | tmp1 = _mm_aesenclast_si128(tmp1, lastKey); |
5616 | | tmp1 = _mm_xor_si128(tmp1, _mm_loadu_si128(&((__m128i*)in)[k])); |
5617 | | _mm_storeu_si128(&((__m128i*)out)[k], tmp1); |
5618 | | tmp1 = _mm_shuffle_epi8(tmp1, BSWAP_MASK); |
5619 | | X =_mm_xor_si128(X, tmp1); |
5620 | | } |
5621 | | for (; k < (int)(nbytes/16); k++) { |
5622 | | tmp1 = _mm_shuffle_epi8(ctr1, BSWAP_EPI64); |
5623 | | ctr1 = _mm_add_epi32(ctr1, ONE); |
5624 | | tmp1 = _mm_xor_si128(tmp1, KEY[0]); |
5625 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[1]); |
5626 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[2]); |
5627 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[3]); |
5628 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[4]); |
5629 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[5]); |
5630 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[6]); |
5631 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[7]); |
5632 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[8]); |
5633 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[9]); |
5634 | | X = gfmul_shifted(X, H); |
5635 | | lastKey = KEY[10]; |
5636 | | if (nr > 10) { |
5637 | | tmp1 = _mm_aesenc_si128(tmp1, lastKey); |
5638 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[11]); |
5639 | | lastKey = KEY[12]; |
5640 | | if (nr > 12) { |
5641 | | tmp1 = _mm_aesenc_si128(tmp1, lastKey); |
5642 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[13]); |
5643 | | lastKey = KEY[14]; |
5644 | | } |
5645 | | } |
5646 | | tmp1 = _mm_aesenclast_si128(tmp1, lastKey); |
5647 | | tmp1 = _mm_xor_si128(tmp1, _mm_loadu_si128(&((__m128i*)in)[k])); |
5648 | | _mm_storeu_si128(&((__m128i*)out)[k], tmp1); |
5649 | | tmp1 = _mm_shuffle_epi8(tmp1, BSWAP_MASK); |
5650 | | X =_mm_xor_si128(X, tmp1); |
5651 | | } |
5652 | | if (k > 0) { |
5653 | | X = gfmul_shifted(X, H); |
5654 | | } |
5655 | | #endif /* AES_GCM_AESNI_NO_UNROLL */ |
5656 | | |
5657 | | /* If one partial block remains */ |
5658 | | if (nbytes % 16) { |
5659 | | tmp1 = _mm_shuffle_epi8(ctr1, BSWAP_EPI64); |
5660 | | tmp1 = _mm_xor_si128(tmp1, KEY[0]); |
5661 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[1]); |
5662 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[2]); |
5663 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[3]); |
5664 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[4]); |
5665 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[5]); |
5666 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[6]); |
5667 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[7]); |
5668 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[8]); |
5669 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[9]); |
5670 | | lastKey = KEY[10]; |
5671 | | if (nr > 10) { |
5672 | | tmp1 = _mm_aesenc_si128(tmp1, lastKey); |
5673 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[11]); |
5674 | | lastKey = KEY[12]; |
5675 | | if (nr > 12) { |
5676 | | tmp1 = _mm_aesenc_si128(tmp1, lastKey); |
5677 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[13]); |
5678 | | lastKey = KEY[14]; |
5679 | | } |
5680 | | } |
5681 | | tmp1 = _mm_aesenclast_si128(tmp1, lastKey); |
5682 | | last_block = tmp1; |
5683 | | for (j=0; j < (int)(nbytes%16); j++) |
5684 | | ((unsigned char*)&last_block)[j] = in[k*16+j]; |
5685 | | tmp1 = _mm_xor_si128(tmp1, last_block); |
5686 | | last_block = tmp1; |
5687 | | for (j=0; j < (int)(nbytes%16); j++) |
5688 | | out[k*16+j] = ((unsigned char*)&last_block)[j]; |
5689 | | tmp1 = last_block; |
5690 | | tmp1 = _mm_shuffle_epi8(tmp1, BSWAP_MASK); |
5691 | | X =_mm_xor_si128(X, tmp1); |
5692 | | X = gfmul_shifted(X, H); |
5693 | | } |
5694 | | AES_GCM_INSERT_EPI(tmp1, nbytes, abytes); |
5695 | | X = _mm_xor_si128(X, tmp1); |
5696 | | X = gfmul_shifted(X, H); |
5697 | | X = _mm_shuffle_epi8(X, BSWAP_MASK); |
5698 | | T = _mm_xor_si128(X, T); |
5699 | | /*_mm_storeu_si128((__m128i*)tag, T);*/ |
5700 | | XMEMCPY(tag, &T, tbytes); |
5701 | | ForceZero(&lastKey, sizeof(lastKey)); |
5702 | | |
5703 | | return 0; |
5704 | | } |
5705 | | |
5706 | | #ifdef HAVE_AES_DECRYPT |
5707 | | |
5708 | | static WARN_UNUSED_RESULT int AES_GCM_decrypt( |
5709 | | const unsigned char *in, unsigned char *out, |
5710 | | const unsigned char* addt, |
5711 | | const unsigned char* ivec, const unsigned char *tag, |
5712 | | word32 nbytes, word32 abytes, word32 ibytes, |
5713 | | word32 tbytes, const unsigned char* key, int nr, |
5714 | | int* res) |
5715 | | { |
5716 | | int i, j ,k; |
5717 | | __m128i H, Y, T; |
5718 | | __m128i *KEY = (__m128i*)key, lastKey; |
5719 | | __m128i ctr1; |
5720 | | __m128i last_block = _mm_setzero_si128(); |
5721 | | __m128i X = _mm_setzero_si128(); |
5722 | | __m128i tmp1, tmp2, XV; |
5723 | | #ifndef AES_GCM_AESNI_NO_UNROLL |
5724 | | __m128i HT[8]; |
5725 | | __m128i r0, r1; |
5726 | | __m128i tmp3, tmp4, tmp5, tmp6, tmp7, tmp8; |
5727 | | #endif /* AES_GCM_AESNI_NO_UNROLL */ |
5728 | | |
5729 | | if (ibytes == GCM_NONCE_MID_SZ) |
5730 | | aes_gcm_calc_iv_12(KEY, ivec, nr, H, Y, T); |
5731 | | else |
5732 | | aes_gcm_calc_iv(KEY, ivec, ibytes, nr, H, Y, T); |
5733 | | |
5734 | | for (i=0; i<(int)(abytes/16); i++) { |
5735 | | tmp1 = _mm_loadu_si128(&((__m128i*)addt)[i]); |
5736 | | tmp1 = _mm_shuffle_epi8(tmp1, BSWAP_MASK); |
5737 | | X = _mm_xor_si128(X, tmp1); |
5738 | | X = gfmul_sw(X, H); |
5739 | | } |
5740 | | if (abytes%16) { |
5741 | | last_block = _mm_setzero_si128(); |
5742 | | for (j=0; j<(int)(abytes%16); j++) |
5743 | | ((unsigned char*)&last_block)[j] = addt[i*16+j]; |
5744 | | tmp1 = last_block; |
5745 | | tmp1 = _mm_shuffle_epi8(tmp1, BSWAP_MASK); |
5746 | | X = _mm_xor_si128(X, tmp1); |
5747 | | X = gfmul_sw(X, H); |
5748 | | } |
5749 | | |
5750 | | tmp1 = _mm_shuffle_epi8(Y, BSWAP_EPI64); |
5751 | | ctr1 = _mm_add_epi32(tmp1, ONE); |
5752 | | H = gfmul_shl1(H); |
5753 | | i = 0; |
5754 | | |
5755 | | #ifndef AES_GCM_AESNI_NO_UNROLL |
5756 | | |
5757 | | if (0 < nbytes/16/8) { |
5758 | | HT[0] = H; |
5759 | | HT[1] = gfmul_shifted(H, H); |
5760 | | HT[2] = gfmul_shifted(H, HT[1]); |
5761 | | HT[3] = gfmul_shifted(HT[1], HT[1]); |
5762 | | HT[4] = gfmul_shifted(HT[1], HT[2]); |
5763 | | HT[5] = gfmul_shifted(HT[2], HT[2]); |
5764 | | HT[6] = gfmul_shifted(HT[2], HT[3]); |
5765 | | HT[7] = gfmul_shifted(HT[3], HT[3]); |
5766 | | |
5767 | | for (; i < (int)(nbytes/16/8); i++) { |
5768 | | r0 = _mm_setzero_si128(); |
5769 | | r1 = _mm_setzero_si128(); |
5770 | | |
5771 | | tmp1 = _mm_shuffle_epi8(ctr1, BSWAP_EPI64); |
5772 | | tmp2 = _mm_add_epi32(ctr1, ONE); |
5773 | | tmp2 = _mm_shuffle_epi8(tmp2, BSWAP_EPI64); |
5774 | | tmp3 = _mm_add_epi32(ctr1, TWO); |
5775 | | tmp3 = _mm_shuffle_epi8(tmp3, BSWAP_EPI64); |
5776 | | tmp4 = _mm_add_epi32(ctr1, THREE); |
5777 | | tmp4 = _mm_shuffle_epi8(tmp4, BSWAP_EPI64); |
5778 | | tmp5 = _mm_add_epi32(ctr1, FOUR); |
5779 | | tmp5 = _mm_shuffle_epi8(tmp5, BSWAP_EPI64); |
5780 | | tmp6 = _mm_add_epi32(ctr1, FIVE); |
5781 | | tmp6 = _mm_shuffle_epi8(tmp6, BSWAP_EPI64); |
5782 | | tmp7 = _mm_add_epi32(ctr1, SIX); |
5783 | | tmp7 = _mm_shuffle_epi8(tmp7, BSWAP_EPI64); |
5784 | | tmp8 = _mm_add_epi32(ctr1, SEVEN); |
5785 | | tmp8 = _mm_shuffle_epi8(tmp8, BSWAP_EPI64); |
5786 | | ctr1 = _mm_add_epi32(ctr1, EIGHT); |
5787 | | tmp1 =_mm_xor_si128(tmp1, KEY[0]); |
5788 | | tmp2 =_mm_xor_si128(tmp2, KEY[0]); |
5789 | | tmp3 =_mm_xor_si128(tmp3, KEY[0]); |
5790 | | tmp4 =_mm_xor_si128(tmp4, KEY[0]); |
5791 | | tmp5 =_mm_xor_si128(tmp5, KEY[0]); |
5792 | | tmp6 =_mm_xor_si128(tmp6, KEY[0]); |
5793 | | tmp7 =_mm_xor_si128(tmp7, KEY[0]); |
5794 | | tmp8 =_mm_xor_si128(tmp8, KEY[0]); |
5795 | | /* 128 x 128 Carryless Multiply */ |
5796 | | XV = _mm_loadu_si128(&((__m128i*)in)[i*8+0]); |
5797 | | XV = _mm_shuffle_epi8(XV, BSWAP_MASK); |
5798 | | XV = _mm_xor_si128(XV, X); |
5799 | | gfmul_only(XV, HT[7], &r0, &r1); |
5800 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[1]); |
5801 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[1]); |
5802 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[1]); |
5803 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[1]); |
5804 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[1]); |
5805 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[1]); |
5806 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[1]); |
5807 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[1]); |
5808 | | /* 128 x 128 Carryless Multiply */ |
5809 | | XV = _mm_loadu_si128(&((__m128i*)in)[i*8+1]); |
5810 | | XV = _mm_shuffle_epi8(XV, BSWAP_MASK); |
5811 | | gfmul_only(XV, HT[6], &r0, &r1); |
5812 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[2]); |
5813 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[2]); |
5814 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[2]); |
5815 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[2]); |
5816 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[2]); |
5817 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[2]); |
5818 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[2]); |
5819 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[2]); |
5820 | | /* 128 x 128 Carryless Multiply */ |
5821 | | XV = _mm_loadu_si128(&((__m128i*)in)[i*8+2]); |
5822 | | XV = _mm_shuffle_epi8(XV, BSWAP_MASK); |
5823 | | gfmul_only(XV, HT[5], &r0, &r1); |
5824 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[3]); |
5825 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[3]); |
5826 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[3]); |
5827 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[3]); |
5828 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[3]); |
5829 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[3]); |
5830 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[3]); |
5831 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[3]); |
5832 | | /* 128 x 128 Carryless Multiply */ |
5833 | | XV = _mm_loadu_si128(&((__m128i*)in)[i*8+3]); |
5834 | | XV = _mm_shuffle_epi8(XV, BSWAP_MASK); |
5835 | | gfmul_only(XV, HT[4], &r0, &r1); |
5836 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[4]); |
5837 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[4]); |
5838 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[4]); |
5839 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[4]); |
5840 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[4]); |
5841 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[4]); |
5842 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[4]); |
5843 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[4]); |
5844 | | /* 128 x 128 Carryless Multiply */ |
5845 | | XV = _mm_loadu_si128(&((__m128i*)in)[i*8+4]); |
5846 | | XV = _mm_shuffle_epi8(XV, BSWAP_MASK); |
5847 | | gfmul_only(XV, HT[3], &r0, &r1); |
5848 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[5]); |
5849 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[5]); |
5850 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[5]); |
5851 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[5]); |
5852 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[5]); |
5853 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[5]); |
5854 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[5]); |
5855 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[5]); |
5856 | | /* 128 x 128 Carryless Multiply */ |
5857 | | XV = _mm_loadu_si128(&((__m128i*)in)[i*8+5]); |
5858 | | XV = _mm_shuffle_epi8(XV, BSWAP_MASK); |
5859 | | gfmul_only(XV, HT[2], &r0, &r1); |
5860 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[6]); |
5861 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[6]); |
5862 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[6]); |
5863 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[6]); |
5864 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[6]); |
5865 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[6]); |
5866 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[6]); |
5867 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[6]); |
5868 | | /* 128 x 128 Carryless Multiply */ |
5869 | | XV = _mm_loadu_si128(&((__m128i*)in)[i*8+6]); |
5870 | | XV = _mm_shuffle_epi8(XV, BSWAP_MASK); |
5871 | | gfmul_only(XV, HT[1], &r0, &r1); |
5872 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[7]); |
5873 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[7]); |
5874 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[7]); |
5875 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[7]); |
5876 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[7]); |
5877 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[7]); |
5878 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[7]); |
5879 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[7]); |
5880 | | /* 128 x 128 Carryless Multiply */ |
5881 | | XV = _mm_loadu_si128(&((__m128i*)in)[i*8+7]); |
5882 | | XV = _mm_shuffle_epi8(XV, BSWAP_MASK); |
5883 | | gfmul_only(XV, HT[0], &r0, &r1); |
5884 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[8]); |
5885 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[8]); |
5886 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[8]); |
5887 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[8]); |
5888 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[8]); |
5889 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[8]); |
5890 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[8]); |
5891 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[8]); |
5892 | | /* Reduction */ |
5893 | | X = ghash_red(r0, r1); |
5894 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[9]); |
5895 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[9]); |
5896 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[9]); |
5897 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[9]); |
5898 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[9]); |
5899 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[9]); |
5900 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[9]); |
5901 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[9]); |
5902 | | lastKey = KEY[10]; |
5903 | | if (nr > 10) { |
5904 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[10]); |
5905 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[10]); |
5906 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[10]); |
5907 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[10]); |
5908 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[10]); |
5909 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[10]); |
5910 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[10]); |
5911 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[10]); |
5912 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[11]); |
5913 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[11]); |
5914 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[11]); |
5915 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[11]); |
5916 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[11]); |
5917 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[11]); |
5918 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[11]); |
5919 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[11]); |
5920 | | lastKey = KEY[12]; |
5921 | | if (nr > 12) { |
5922 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[12]); |
5923 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[12]); |
5924 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[12]); |
5925 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[12]); |
5926 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[12]); |
5927 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[12]); |
5928 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[12]); |
5929 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[12]); |
5930 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[13]); |
5931 | | tmp2 = _mm_aesenc_si128(tmp2, KEY[13]); |
5932 | | tmp3 = _mm_aesenc_si128(tmp3, KEY[13]); |
5933 | | tmp4 = _mm_aesenc_si128(tmp4, KEY[13]); |
5934 | | tmp5 = _mm_aesenc_si128(tmp5, KEY[13]); |
5935 | | tmp6 = _mm_aesenc_si128(tmp6, KEY[13]); |
5936 | | tmp7 = _mm_aesenc_si128(tmp7, KEY[13]); |
5937 | | tmp8 = _mm_aesenc_si128(tmp8, KEY[13]); |
5938 | | lastKey = KEY[14]; |
5939 | | } |
5940 | | } |
5941 | | AES_ENC_LAST_8(); |
5942 | | } |
5943 | | } |
5944 | | |
5945 | | #endif /* AES_GCM_AESNI_NO_UNROLL */ |
5946 | | |
5947 | | for (k = i*8; k < (int)(nbytes/16); k++) { |
5948 | | tmp1 = _mm_shuffle_epi8(ctr1, BSWAP_EPI64); |
5949 | | ctr1 = _mm_add_epi32(ctr1, ONE); |
5950 | | tmp1 = _mm_xor_si128(tmp1, KEY[0]); |
5951 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[1]); |
5952 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[2]); |
5953 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[3]); |
5954 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[4]); |
5955 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[5]); |
5956 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[6]); |
5957 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[7]); |
5958 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[8]); |
5959 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[9]); |
5960 | | /* 128 x 128 Carryless Multiply */ |
5961 | | XV = _mm_loadu_si128(&((__m128i*)in)[k]); |
5962 | | XV = _mm_shuffle_epi8(XV, BSWAP_MASK); |
5963 | | XV = _mm_xor_si128(XV, X); |
5964 | | X = gfmul_shifted(XV, H); |
5965 | | lastKey = KEY[10]; |
5966 | | if (nr > 10) { |
5967 | | tmp1 = _mm_aesenc_si128(tmp1, lastKey); |
5968 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[11]); |
5969 | | lastKey = KEY[12]; |
5970 | | if (nr > 12) { |
5971 | | tmp1 = _mm_aesenc_si128(tmp1, lastKey); |
5972 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[13]); |
5973 | | lastKey = KEY[14]; |
5974 | | } |
5975 | | } |
5976 | | tmp1 = _mm_aesenclast_si128(tmp1, lastKey); |
5977 | | tmp2 = _mm_loadu_si128(&((__m128i*)in)[k]); |
5978 | | tmp1 = _mm_xor_si128(tmp1, tmp2); |
5979 | | _mm_storeu_si128(&((__m128i*)out)[k], tmp1); |
5980 | | } |
5981 | | |
5982 | | /* If one partial block remains */ |
5983 | | if (nbytes % 16) { |
5984 | | tmp1 = _mm_shuffle_epi8(ctr1, BSWAP_EPI64); |
5985 | | tmp1 = _mm_xor_si128(tmp1, KEY[0]); |
5986 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[1]); |
5987 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[2]); |
5988 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[3]); |
5989 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[4]); |
5990 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[5]); |
5991 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[6]); |
5992 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[7]); |
5993 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[8]); |
5994 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[9]); |
5995 | | lastKey = KEY[10]; |
5996 | | if (nr > 10) { |
5997 | | tmp1 = _mm_aesenc_si128(tmp1, lastKey); |
5998 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[11]); |
5999 | | lastKey = KEY[12]; |
6000 | | if (nr > 12) { |
6001 | | tmp1 = _mm_aesenc_si128(tmp1, lastKey); |
6002 | | tmp1 = _mm_aesenc_si128(tmp1, KEY[13]); |
6003 | | lastKey = KEY[14]; |
6004 | | } |
6005 | | } |
6006 | | tmp1 = _mm_aesenclast_si128(tmp1, lastKey); |
6007 | | last_block = _mm_setzero_si128(); |
6008 | | for (j=0; j < (int)(nbytes%16); j++) |
6009 | | ((unsigned char*)&last_block)[j] = in[k*16+j]; |
6010 | | XV = last_block; |
6011 | | tmp1 = _mm_xor_si128(tmp1, last_block); |
6012 | | last_block = tmp1; |
6013 | | for (j=0; j < (int)(nbytes%16); j++) |
6014 | | out[k*16+j] = ((unsigned char*)&last_block)[j]; |
6015 | | XV = _mm_shuffle_epi8(XV, BSWAP_MASK); |
6016 | | XV = _mm_xor_si128(XV, X); |
6017 | | X = gfmul_shifted(XV, H); |
6018 | | } |
6019 | | |
6020 | | AES_GCM_INSERT_EPI(tmp1, nbytes, abytes); |
6021 | | |
6022 | | /* 128 x 128 Carryless Multiply */ |
6023 | | X = _mm_xor_si128(X, tmp1); |
6024 | | X = gfmul_shifted(X, H); |
6025 | | X = _mm_shuffle_epi8(X, BSWAP_MASK); |
6026 | | T = _mm_xor_si128(X, T); |
6027 | | |
6028 | | /* if (0xffff != |
6029 | | _mm_movemask_epi8(_mm_cmpeq_epi8(T, _mm_loadu_si128((__m128i*)tag)))) */ |
6030 | | if (XMEMCMP(tag, &T, tbytes) != 0) |
6031 | | *res = 0; /* in case the authentication failed */ |
6032 | | else |
6033 | | *res = 1; /* when successful returns 1 */ |
6034 | | ForceZero(&lastKey, sizeof(lastKey)); |
6035 | | |
6036 | | return 0; |
6037 | | } |
6038 | | |
6039 | | #endif /* HAVE_AES_DECRYPT */ |
6040 | | #endif /* _MSC_VER */ |
6041 | | #endif /* WOLFSSL_AESNI */ |
6042 | | |
6043 | | #if defined(GCM_SMALL) |
6044 | | static void GMULT(byte* X, byte* Y) |
6045 | | { |
6046 | | byte Z[AES_BLOCK_SIZE]; |
6047 | | byte V[AES_BLOCK_SIZE]; |
6048 | | int i, j; |
6049 | | |
6050 | | XMEMSET(Z, 0, AES_BLOCK_SIZE); |
6051 | | XMEMCPY(V, X, AES_BLOCK_SIZE); |
6052 | | for (i = 0; i < AES_BLOCK_SIZE; i++) |
6053 | | { |
6054 | | byte y = Y[i]; |
6055 | | for (j = 0; j < 8; j++) |
6056 | | { |
6057 | | if (y & 0x80) { |
6058 | | xorbuf(Z, V, AES_BLOCK_SIZE); |
6059 | | } |
6060 | | |
6061 | | RIGHTSHIFTX(V); |
6062 | | y = y << 1; |
6063 | | } |
6064 | | } |
6065 | | XMEMCPY(X, Z, AES_BLOCK_SIZE); |
6066 | | } |
6067 | | |
6068 | | |
6069 | | void GHASH(Aes* aes, const byte* a, word32 aSz, const byte* c, |
6070 | | word32 cSz, byte* s, word32 sSz) |
6071 | | { |
6072 | | byte x[AES_BLOCK_SIZE]; |
6073 | | byte scratch[AES_BLOCK_SIZE]; |
6074 | | word32 blocks, partial; |
6075 | | byte* h; |
6076 | | |
6077 | | if (aes == NULL) { |
6078 | | return; |
6079 | | } |
6080 | | |
6081 | | h = aes->H; |
6082 | | XMEMSET(x, 0, AES_BLOCK_SIZE); |
6083 | | |
6084 | | /* Hash in A, the Additional Authentication Data */ |
6085 | | if (aSz != 0 && a != NULL) { |
6086 | | blocks = aSz / AES_BLOCK_SIZE; |
6087 | | partial = aSz % AES_BLOCK_SIZE; |
6088 | | while (blocks--) { |
6089 | | xorbuf(x, a, AES_BLOCK_SIZE); |
6090 | | GMULT(x, h); |
6091 | | a += AES_BLOCK_SIZE; |
6092 | | } |
6093 | | if (partial != 0) { |
6094 | | XMEMSET(scratch, 0, AES_BLOCK_SIZE); |
6095 | | XMEMCPY(scratch, a, partial); |
6096 | | xorbuf(x, scratch, AES_BLOCK_SIZE); |
6097 | | GMULT(x, h); |
6098 | | } |
6099 | | } |
6100 | | |
6101 | | /* Hash in C, the Ciphertext */ |
6102 | | if (cSz != 0 && c != NULL) { |
6103 | | blocks = cSz / AES_BLOCK_SIZE; |
6104 | | partial = cSz % AES_BLOCK_SIZE; |
6105 | | while (blocks--) { |
6106 | | xorbuf(x, c, AES_BLOCK_SIZE); |
6107 | | GMULT(x, h); |
6108 | | c += AES_BLOCK_SIZE; |
6109 | | } |
6110 | | if (partial != 0) { |
6111 | | XMEMSET(scratch, 0, AES_BLOCK_SIZE); |
6112 | | XMEMCPY(scratch, c, partial); |
6113 | | xorbuf(x, scratch, AES_BLOCK_SIZE); |
6114 | | GMULT(x, h); |
6115 | | } |
6116 | | } |
6117 | | |
6118 | | /* Hash in the lengths of A and C in bits */ |
6119 | | FlattenSzInBits(&scratch[0], aSz); |
6120 | | FlattenSzInBits(&scratch[8], cSz); |
6121 | | xorbuf(x, scratch, AES_BLOCK_SIZE); |
6122 | | GMULT(x, h); |
6123 | | |
6124 | | /* Copy the result into s. */ |
6125 | | XMEMCPY(s, x, sSz); |
6126 | | } |
6127 | | |
6128 | | #ifdef WOLFSSL_AESGCM_STREAM |
6129 | | /* No extra initialization for small implementation. |
6130 | | * |
6131 | | * @param [in] aes AES GCM object. |
6132 | | */ |
6133 | | #define GHASH_INIT_EXTRA(aes) |
6134 | | |
6135 | | /* GHASH one block of data.. |
6136 | | * |
6137 | | * XOR block into tag and GMULT with H. |
6138 | | * |
6139 | | * @param [in, out] aes AES GCM object. |
6140 | | * @param [in] block Block of AAD or cipher text. |
6141 | | */ |
6142 | | #define GHASH_ONE_BLOCK(aes, block) \ |
6143 | | do { \ |
6144 | | xorbuf(AES_TAG(aes), block, AES_BLOCK_SIZE); \ |
6145 | | GMULT(AES_TAG(aes), aes->H); \ |
6146 | | } \ |
6147 | | while (0) |
6148 | | #endif /* WOLFSSL_AESGCM_STREAM */ |
6149 | | /* end GCM_SMALL */ |
6150 | | #elif defined(GCM_TABLE) |
6151 | | |
6152 | | static const byte R[256][2] = { |
6153 | | {0x00, 0x00}, {0x01, 0xc2}, {0x03, 0x84}, {0x02, 0x46}, |
6154 | | {0x07, 0x08}, {0x06, 0xca}, {0x04, 0x8c}, {0x05, 0x4e}, |
6155 | | {0x0e, 0x10}, {0x0f, 0xd2}, {0x0d, 0x94}, {0x0c, 0x56}, |
6156 | | {0x09, 0x18}, {0x08, 0xda}, {0x0a, 0x9c}, {0x0b, 0x5e}, |
6157 | | {0x1c, 0x20}, {0x1d, 0xe2}, {0x1f, 0xa4}, {0x1e, 0x66}, |
6158 | | {0x1b, 0x28}, {0x1a, 0xea}, {0x18, 0xac}, {0x19, 0x6e}, |
6159 | | {0x12, 0x30}, {0x13, 0xf2}, {0x11, 0xb4}, {0x10, 0x76}, |
6160 | | {0x15, 0x38}, {0x14, 0xfa}, {0x16, 0xbc}, {0x17, 0x7e}, |
6161 | | {0x38, 0x40}, {0x39, 0x82}, {0x3b, 0xc4}, {0x3a, 0x06}, |
6162 | | {0x3f, 0x48}, {0x3e, 0x8a}, {0x3c, 0xcc}, {0x3d, 0x0e}, |
6163 | | {0x36, 0x50}, {0x37, 0x92}, {0x35, 0xd4}, {0x34, 0x16}, |
6164 | | {0x31, 0x58}, {0x30, 0x9a}, {0x32, 0xdc}, {0x33, 0x1e}, |
6165 | | {0x24, 0x60}, {0x25, 0xa2}, {0x27, 0xe4}, {0x26, 0x26}, |
6166 | | {0x23, 0x68}, {0x22, 0xaa}, {0x20, 0xec}, {0x21, 0x2e}, |
6167 | | {0x2a, 0x70}, {0x2b, 0xb2}, {0x29, 0xf4}, {0x28, 0x36}, |
6168 | | {0x2d, 0x78}, {0x2c, 0xba}, {0x2e, 0xfc}, {0x2f, 0x3e}, |
6169 | | {0x70, 0x80}, {0x71, 0x42}, {0x73, 0x04}, {0x72, 0xc6}, |
6170 | | {0x77, 0x88}, {0x76, 0x4a}, {0x74, 0x0c}, {0x75, 0xce}, |
6171 | | {0x7e, 0x90}, {0x7f, 0x52}, {0x7d, 0x14}, {0x7c, 0xd6}, |
6172 | | {0x79, 0x98}, {0x78, 0x5a}, {0x7a, 0x1c}, {0x7b, 0xde}, |
6173 | | {0x6c, 0xa0}, {0x6d, 0x62}, {0x6f, 0x24}, {0x6e, 0xe6}, |
6174 | | {0x6b, 0xa8}, {0x6a, 0x6a}, {0x68, 0x2c}, {0x69, 0xee}, |
6175 | | {0x62, 0xb0}, {0x63, 0x72}, {0x61, 0x34}, {0x60, 0xf6}, |
6176 | | {0x65, 0xb8}, {0x64, 0x7a}, {0x66, 0x3c}, {0x67, 0xfe}, |
6177 | | {0x48, 0xc0}, {0x49, 0x02}, {0x4b, 0x44}, {0x4a, 0x86}, |
6178 | | {0x4f, 0xc8}, {0x4e, 0x0a}, {0x4c, 0x4c}, {0x4d, 0x8e}, |
6179 | | {0x46, 0xd0}, {0x47, 0x12}, {0x45, 0x54}, {0x44, 0x96}, |
6180 | | {0x41, 0xd8}, {0x40, 0x1a}, {0x42, 0x5c}, {0x43, 0x9e}, |
6181 | | {0x54, 0xe0}, {0x55, 0x22}, {0x57, 0x64}, {0x56, 0xa6}, |
6182 | | {0x53, 0xe8}, {0x52, 0x2a}, {0x50, 0x6c}, {0x51, 0xae}, |
6183 | | {0x5a, 0xf0}, {0x5b, 0x32}, {0x59, 0x74}, {0x58, 0xb6}, |
6184 | | {0x5d, 0xf8}, {0x5c, 0x3a}, {0x5e, 0x7c}, {0x5f, 0xbe}, |
6185 | | {0xe1, 0x00}, {0xe0, 0xc2}, {0xe2, 0x84}, {0xe3, 0x46}, |
6186 | | {0xe6, 0x08}, {0xe7, 0xca}, {0xe5, 0x8c}, {0xe4, 0x4e}, |
6187 | | {0xef, 0x10}, {0xee, 0xd2}, {0xec, 0x94}, {0xed, 0x56}, |
6188 | | {0xe8, 0x18}, {0xe9, 0xda}, {0xeb, 0x9c}, {0xea, 0x5e}, |
6189 | | {0xfd, 0x20}, {0xfc, 0xe2}, {0xfe, 0xa4}, {0xff, 0x66}, |
6190 | | {0xfa, 0x28}, {0xfb, 0xea}, {0xf9, 0xac}, {0xf8, 0x6e}, |
6191 | | {0xf3, 0x30}, {0xf2, 0xf2}, {0xf0, 0xb4}, {0xf1, 0x76}, |
6192 | | {0xf4, 0x38}, {0xf5, 0xfa}, {0xf7, 0xbc}, {0xf6, 0x7e}, |
6193 | | {0xd9, 0x40}, {0xd8, 0x82}, {0xda, 0xc4}, {0xdb, 0x06}, |
6194 | | {0xde, 0x48}, {0xdf, 0x8a}, {0xdd, 0xcc}, {0xdc, 0x0e}, |
6195 | | {0xd7, 0x50}, {0xd6, 0x92}, {0xd4, 0xd4}, {0xd5, 0x16}, |
6196 | | {0xd0, 0x58}, {0xd1, 0x9a}, {0xd3, 0xdc}, {0xd2, 0x1e}, |
6197 | | {0xc5, 0x60}, {0xc4, 0xa2}, {0xc6, 0xe4}, {0xc7, 0x26}, |
6198 | | {0xc2, 0x68}, {0xc3, 0xaa}, {0xc1, 0xec}, {0xc0, 0x2e}, |
6199 | | {0xcb, 0x70}, {0xca, 0xb2}, {0xc8, 0xf4}, {0xc9, 0x36}, |
6200 | | {0xcc, 0x78}, {0xcd, 0xba}, {0xcf, 0xfc}, {0xce, 0x3e}, |
6201 | | {0x91, 0x80}, {0x90, 0x42}, {0x92, 0x04}, {0x93, 0xc6}, |
6202 | | {0x96, 0x88}, {0x97, 0x4a}, {0x95, 0x0c}, {0x94, 0xce}, |
6203 | | {0x9f, 0x90}, {0x9e, 0x52}, {0x9c, 0x14}, {0x9d, 0xd6}, |
6204 | | {0x98, 0x98}, {0x99, 0x5a}, {0x9b, 0x1c}, {0x9a, 0xde}, |
6205 | | {0x8d, 0xa0}, {0x8c, 0x62}, {0x8e, 0x24}, {0x8f, 0xe6}, |
6206 | | {0x8a, 0xa8}, {0x8b, 0x6a}, {0x89, 0x2c}, {0x88, 0xee}, |
6207 | | {0x83, 0xb0}, {0x82, 0x72}, {0x80, 0x34}, {0x81, 0xf6}, |
6208 | | {0x84, 0xb8}, {0x85, 0x7a}, {0x87, 0x3c}, {0x86, 0xfe}, |
6209 | | {0xa9, 0xc0}, {0xa8, 0x02}, {0xaa, 0x44}, {0xab, 0x86}, |
6210 | | {0xae, 0xc8}, {0xaf, 0x0a}, {0xad, 0x4c}, {0xac, 0x8e}, |
6211 | | {0xa7, 0xd0}, {0xa6, 0x12}, {0xa4, 0x54}, {0xa5, 0x96}, |
6212 | | {0xa0, 0xd8}, {0xa1, 0x1a}, {0xa3, 0x5c}, {0xa2, 0x9e}, |
6213 | | {0xb5, 0xe0}, {0xb4, 0x22}, {0xb6, 0x64}, {0xb7, 0xa6}, |
6214 | | {0xb2, 0xe8}, {0xb3, 0x2a}, {0xb1, 0x6c}, {0xb0, 0xae}, |
6215 | | {0xbb, 0xf0}, {0xba, 0x32}, {0xb8, 0x74}, {0xb9, 0xb6}, |
6216 | | {0xbc, 0xf8}, {0xbd, 0x3a}, {0xbf, 0x7c}, {0xbe, 0xbe} }; |
6217 | | |
6218 | | |
6219 | | static void GMULT(byte *x, byte m[256][AES_BLOCK_SIZE]) |
6220 | | { |
6221 | | #if !defined(WORD64_AVAILABLE) || defined(BIG_ENDIAN_ORDER) |
6222 | | int i, j; |
6223 | | byte Z[AES_BLOCK_SIZE]; |
6224 | | byte a; |
6225 | | |
6226 | | XMEMSET(Z, 0, sizeof(Z)); |
6227 | | |
6228 | | for (i = 15; i > 0; i--) { |
6229 | | xorbuf(Z, m[x[i]], AES_BLOCK_SIZE); |
6230 | | a = Z[15]; |
6231 | | |
6232 | | for (j = 15; j > 0; j--) { |
6233 | | Z[j] = Z[j-1]; |
6234 | | } |
6235 | | |
6236 | | Z[0] = R[a][0]; |
6237 | | Z[1] ^= R[a][1]; |
6238 | | } |
6239 | | xorbuf(Z, m[x[0]], AES_BLOCK_SIZE); |
6240 | | |
6241 | | XMEMCPY(x, Z, AES_BLOCK_SIZE); |
6242 | | #else |
6243 | | byte Z[AES_BLOCK_SIZE + AES_BLOCK_SIZE]; |
6244 | | byte a; |
6245 | | word64* pZ; |
6246 | | word64* pm; |
6247 | | word64* px = (word64*)(x); |
6248 | | int i; |
6249 | | |
6250 | | pZ = (word64*)(Z + 15 + 1); |
6251 | | pm = (word64*)(m[x[15]]); |
6252 | | pZ[0] = pm[0]; |
6253 | | pZ[1] = pm[1]; |
6254 | | a = Z[16 + 15]; |
6255 | | Z[15] = R[a][0]; |
6256 | | Z[16] ^= R[a][1]; |
6257 | | for (i = 14; i > 0; i--) { |
6258 | | pZ = (word64*)(Z + i + 1); |
6259 | | pm = (word64*)(m[x[i]]); |
6260 | | pZ[0] ^= pm[0]; |
6261 | | pZ[1] ^= pm[1]; |
6262 | | a = Z[16 + i]; |
6263 | | Z[i] = R[a][0]; |
6264 | | Z[i+1] ^= R[a][1]; |
6265 | | } |
6266 | | pZ = (word64*)(Z + 1); |
6267 | | pm = (word64*)(m[x[0]]); |
6268 | | px[0] = pZ[0] ^ pm[0]; px[1] = pZ[1] ^ pm[1]; |
6269 | | #endif |
6270 | | } |
6271 | | |
6272 | | void GHASH(Aes* aes, const byte* a, word32 aSz, const byte* c, |
6273 | | word32 cSz, byte* s, word32 sSz) |
6274 | | { |
6275 | | byte x[AES_BLOCK_SIZE]; |
6276 | | byte scratch[AES_BLOCK_SIZE]; |
6277 | | word32 blocks, partial; |
6278 | | |
6279 | | if (aes == NULL) { |
6280 | | return; |
6281 | | } |
6282 | | |
6283 | | XMEMSET(x, 0, AES_BLOCK_SIZE); |
6284 | | |
6285 | | /* Hash in A, the Additional Authentication Data */ |
6286 | | if (aSz != 0 && a != NULL) { |
6287 | | blocks = aSz / AES_BLOCK_SIZE; |
6288 | | partial = aSz % AES_BLOCK_SIZE; |
6289 | | while (blocks--) { |
6290 | | xorbuf(x, a, AES_BLOCK_SIZE); |
6291 | | GMULT(x, aes->M0); |
6292 | | a += AES_BLOCK_SIZE; |
6293 | | } |
6294 | | if (partial != 0) { |
6295 | | XMEMSET(scratch, 0, AES_BLOCK_SIZE); |
6296 | | XMEMCPY(scratch, a, partial); |
6297 | | xorbuf(x, scratch, AES_BLOCK_SIZE); |
6298 | | GMULT(x, aes->M0); |
6299 | | } |
6300 | | } |
6301 | | |
6302 | | /* Hash in C, the Ciphertext */ |
6303 | | if (cSz != 0 && c != NULL) { |
6304 | | blocks = cSz / AES_BLOCK_SIZE; |
6305 | | partial = cSz % AES_BLOCK_SIZE; |
6306 | | while (blocks--) { |
6307 | | xorbuf(x, c, AES_BLOCK_SIZE); |
6308 | | GMULT(x, aes->M0); |
6309 | | c += AES_BLOCK_SIZE; |
6310 | | } |
6311 | | if (partial != 0) { |
6312 | | XMEMSET(scratch, 0, AES_BLOCK_SIZE); |
6313 | | XMEMCPY(scratch, c, partial); |
6314 | | xorbuf(x, scratch, AES_BLOCK_SIZE); |
6315 | | GMULT(x, aes->M0); |
6316 | | } |
6317 | | } |
6318 | | |
6319 | | /* Hash in the lengths of A and C in bits */ |
6320 | | FlattenSzInBits(&scratch[0], aSz); |
6321 | | FlattenSzInBits(&scratch[8], cSz); |
6322 | | xorbuf(x, scratch, AES_BLOCK_SIZE); |
6323 | | GMULT(x, aes->M0); |
6324 | | |
6325 | | /* Copy the result into s. */ |
6326 | | XMEMCPY(s, x, sSz); |
6327 | | } |
6328 | | |
6329 | | #ifdef WOLFSSL_AESGCM_STREAM |
6330 | | /* No extra initialization for table implementation. |
6331 | | * |
6332 | | * @param [in] aes AES GCM object. |
6333 | | */ |
6334 | | #define GHASH_INIT_EXTRA(aes) |
6335 | | |
6336 | | /* GHASH one block of data.. |
6337 | | * |
6338 | | * XOR block into tag and GMULT with H using pre-computed table. |
6339 | | * |
6340 | | * @param [in, out] aes AES GCM object. |
6341 | | * @param [in] block Block of AAD or cipher text. |
6342 | | */ |
6343 | | #define GHASH_ONE_BLOCK(aes, block) \ |
6344 | | do { \ |
6345 | | xorbuf(AES_TAG(aes), block, AES_BLOCK_SIZE); \ |
6346 | | GMULT(AES_TAG(aes), aes->M0); \ |
6347 | | } \ |
6348 | | while (0) |
6349 | | #endif /* WOLFSSL_AESGCM_STREAM */ |
6350 | | /* end GCM_TABLE */ |
6351 | | #elif defined(GCM_TABLE_4BIT) |
6352 | | |
6353 | | /* remainder = x^7 + x^2 + x^1 + 1 => 0xe1 |
6354 | | * R shifts right a reverse bit pair of bytes such that: |
6355 | | * R(b0, b1) => b1 = (b1 >> 1) | (b0 << 7); b0 >>= 1 |
6356 | | * 0 => 0, 0, 0, 0 => R(R(R(00,00) ^ 00,00) ^ 00,00) ^ 00,00 = 00,00 |
6357 | | * 8 => 0, 0, 0, 1 => R(R(R(00,00) ^ 00,00) ^ 00,00) ^ e1,00 = e1,00 |
6358 | | * 4 => 0, 0, 1, 0 => R(R(R(00,00) ^ 00,00) ^ e1,00) ^ 00,00 = 70,80 |
6359 | | * 2 => 0, 1, 0, 0 => R(R(R(00,00) ^ e1,00) ^ 00,00) ^ 00,00 = 38,40 |
6360 | | * 1 => 1, 0, 0, 0 => R(R(R(e1,00) ^ 00,00) ^ 00,00) ^ 00,00 = 1c,20 |
6361 | | * To calculate te rest, XOR result for each bit. |
6362 | | * e.g. 6 = 4 ^ 2 => 48,c0 |
6363 | | * |
6364 | | * Second half is same values rotated by 4-bits. |
6365 | | */ |
6366 | | #if defined(BIG_ENDIAN_ORDER) || defined(WC_16BIT_CPU) |
6367 | | static const byte R[16][2] = { |
6368 | | {0x00, 0x00}, {0x1c, 0x20}, {0x38, 0x40}, {0x24, 0x60}, |
6369 | | {0x70, 0x80}, {0x6c, 0xa0}, {0x48, 0xc0}, {0x54, 0xe0}, |
6370 | | {0xe1, 0x00}, {0xfd, 0x20}, {0xd9, 0x40}, {0xc5, 0x60}, |
6371 | | {0x91, 0x80}, {0x8d, 0xa0}, {0xa9, 0xc0}, {0xb5, 0xe0}, |
6372 | | }; |
6373 | | #else |
6374 | | static const word16 R[32] = { |
6375 | | 0x0000, 0x201c, 0x4038, 0x6024, |
6376 | | 0x8070, 0xa06c, 0xc048, 0xe054, |
6377 | | 0x00e1, 0x20fd, 0x40d9, 0x60c5, |
6378 | | 0x8091, 0xa08d, 0xc0a9, 0xe0b5, |
6379 | | |
6380 | | 0x0000, 0xc201, 0x8403, 0x4602, |
6381 | | 0x0807, 0xca06, 0x8c04, 0x4e05, |
6382 | | 0x100e, 0xd20f, 0x940d, 0x560c, |
6383 | | 0x1809, 0xda08, 0x9c0a, 0x5e0b, |
6384 | | }; |
6385 | | #endif |
6386 | | |
6387 | | /* Multiply in GF(2^128) defined by polynomial: |
6388 | | * x^128 + x^7 + x^2 + x^1 + 1. |
6389 | | * |
6390 | | * H: hash key = encrypt(key, 0) |
6391 | | * x = x * H in field |
6392 | | * |
6393 | | * x: cumlative result |
6394 | | * m: 4-bit table |
6395 | | * [0..15] * H |
6396 | | */ |
6397 | | #if defined(BIG_ENDIAN_ORDER) || defined(WC_16BIT_CPU) |
6398 | | static void GMULT(byte *x, byte m[16][AES_BLOCK_SIZE]) |
6399 | | { |
6400 | | int i, j, n; |
6401 | | byte Z[AES_BLOCK_SIZE]; |
6402 | | byte a; |
6403 | | |
6404 | | XMEMSET(Z, 0, sizeof(Z)); |
6405 | | |
6406 | | for (i = 15; i >= 0; i--) { |
6407 | | for (n = 0; n < 2; n++) { |
6408 | | if (n == 0) |
6409 | | xorbuf(Z, m[x[i] & 0xf], AES_BLOCK_SIZE); |
6410 | | else { |
6411 | | xorbuf(Z, m[x[i] >> 4], AES_BLOCK_SIZE); |
6412 | | if (i == 0) |
6413 | | break; |
6414 | | } |
6415 | | a = Z[15] & 0xf; |
6416 | | |
6417 | | for (j = 15; j > 0; j--) |
6418 | | Z[j] = (Z[j-1] << 4) | (Z[j] >> 4); |
6419 | | Z[0] >>= 4; |
6420 | | |
6421 | | Z[0] ^= R[a][0]; |
6422 | | Z[1] ^= R[a][1]; |
6423 | | } |
6424 | | } |
6425 | | |
6426 | | XMEMCPY(x, Z, AES_BLOCK_SIZE); |
6427 | | } |
6428 | | #elif defined(WC_32BIT_CPU) |
6429 | | static WC_INLINE void GMULT(byte *x, byte m[32][AES_BLOCK_SIZE]) |
6430 | | { |
6431 | | int i; |
6432 | | word32 z8[4] = {0, 0, 0, 0}; |
6433 | | byte a; |
6434 | | word32* x8 = (word32*)x; |
6435 | | word32* m8; |
6436 | | byte xi; |
6437 | | word32 n7, n6, n5, n4, n3, n2, n1, n0; |
6438 | | |
6439 | | for (i = 15; i > 0; i--) { |
6440 | | xi = x[i]; |
6441 | | |
6442 | | /* XOR in (msn * H) */ |
6443 | | m8 = (word32*)m[xi & 0xf]; |
6444 | | z8[0] ^= m8[0]; z8[1] ^= m8[1]; z8[2] ^= m8[2]; z8[3] ^= m8[3]; |
6445 | | |
6446 | | /* Cache top byte for remainder calculations - lost in rotate. */ |
6447 | | a = z8[3] >> 24; |
6448 | | |
6449 | | /* Rotate Z by 8-bits */ |
6450 | | z8[3] = (z8[2] >> 24) | (z8[3] << 8); |
6451 | | z8[2] = (z8[1] >> 24) | (z8[2] << 8); |
6452 | | z8[1] = (z8[0] >> 24) | (z8[1] << 8); |
6453 | | z8[0] <<= 8; |
6454 | | |
6455 | | /* XOR in (msn * remainder) [pre-rotated by 4 bits] */ |
6456 | | z8[0] ^= (word32)R[16 + (a & 0xf)]; |
6457 | | |
6458 | | xi >>= 4; |
6459 | | /* XOR in next significant nibble (XORed with H) * remainder */ |
6460 | | m8 = (word32*)m[xi]; |
6461 | | a ^= (byte)(m8[3] >> 20); |
6462 | | z8[0] ^= (word32)R[a >> 4]; |
6463 | | |
6464 | | /* XOR in (next significant nibble * H) [pre-rotated by 4 bits] */ |
6465 | | m8 = (word32*)m[16 + xi]; |
6466 | | z8[0] ^= m8[0]; z8[1] ^= m8[1]; |
6467 | | z8[2] ^= m8[2]; z8[3] ^= m8[3]; |
6468 | | } |
6469 | | |
6470 | | xi = x[0]; |
6471 | | |
6472 | | /* XOR in most significant nibble * H */ |
6473 | | m8 = (word32*)m[xi & 0xf]; |
6474 | | z8[0] ^= m8[0]; z8[1] ^= m8[1]; z8[2] ^= m8[2]; z8[3] ^= m8[3]; |
6475 | | |
6476 | | /* Cache top byte for remainder calculations - lost in rotate. */ |
6477 | | a = (z8[3] >> 24) & 0xf; |
6478 | | |
6479 | | /* Rotate z by 4-bits */ |
6480 | | n7 = z8[3] & 0xf0f0f0f0ULL; |
6481 | | n6 = z8[3] & 0x0f0f0f0fULL; |
6482 | | n5 = z8[2] & 0xf0f0f0f0ULL; |
6483 | | n4 = z8[2] & 0x0f0f0f0fULL; |
6484 | | n3 = z8[1] & 0xf0f0f0f0ULL; |
6485 | | n2 = z8[1] & 0x0f0f0f0fULL; |
6486 | | n1 = z8[0] & 0xf0f0f0f0ULL; |
6487 | | n0 = z8[0] & 0x0f0f0f0fULL; |
6488 | | z8[3] = (n7 >> 4) | (n6 << 12) | (n4 >> 20); |
6489 | | z8[2] = (n5 >> 4) | (n4 << 12) | (n2 >> 20); |
6490 | | z8[1] = (n3 >> 4) | (n2 << 12) | (n0 >> 20); |
6491 | | z8[0] = (n1 >> 4) | (n0 << 12); |
6492 | | |
6493 | | /* XOR in most significant nibble * remainder */ |
6494 | | z8[0] ^= (word32)R[a]; |
6495 | | /* XOR in next significant nibble * H */ |
6496 | | m8 = (word32*)m[xi >> 4]; |
6497 | | z8[0] ^= m8[0]; z8[1] ^= m8[1]; z8[2] ^= m8[2]; z8[3] ^= m8[3]; |
6498 | | |
6499 | | /* Write back result. */ |
6500 | | x8[0] = z8[0]; x8[1] = z8[1]; x8[2] = z8[2]; x8[3] = z8[3]; |
6501 | | } |
6502 | | #else |
6503 | | static WC_INLINE void GMULT(byte *x, byte m[32][AES_BLOCK_SIZE]) |
6504 | 0 | { |
6505 | 0 | int i; |
6506 | 0 | word64 z8[2] = {0, 0}; |
6507 | 0 | byte a; |
6508 | 0 | word64* x8 = (word64*)x; |
6509 | 0 | word64* m8; |
6510 | 0 | word64 n0, n1, n2, n3; |
6511 | 0 | byte xi; |
6512 | |
|
6513 | 0 | for (i = 15; i > 0; i--) { |
6514 | 0 | xi = x[i]; |
6515 | | |
6516 | | /* XOR in (msn * H) */ |
6517 | 0 | m8 = (word64*)m[xi & 0xf]; |
6518 | 0 | z8[0] ^= m8[0]; |
6519 | 0 | z8[1] ^= m8[1]; |
6520 | | |
6521 | | /* Cache top byte for remainder calculations - lost in rotate. */ |
6522 | 0 | a = z8[1] >> 56; |
6523 | | |
6524 | | /* Rotate Z by 8-bits */ |
6525 | 0 | z8[1] = (z8[0] >> 56) | (z8[1] << 8); |
6526 | 0 | z8[0] <<= 8; |
6527 | | |
6528 | | /* XOR in (next significant nibble * H) [pre-rotated by 4 bits] */ |
6529 | 0 | m8 = (word64*)m[16 + (xi >> 4)]; |
6530 | 0 | z8[0] ^= m8[0]; |
6531 | 0 | z8[1] ^= m8[1]; |
6532 | | |
6533 | | /* XOR in (msn * remainder) [pre-rotated by 4 bits] */ |
6534 | 0 | z8[0] ^= (word64)R[16 + (a & 0xf)]; |
6535 | | /* XOR in next significant nibble (XORed with H) * remainder */ |
6536 | 0 | m8 = (word64*)m[xi >> 4]; |
6537 | 0 | a ^= (byte)(m8[1] >> 52); |
6538 | 0 | z8[0] ^= (word64)R[a >> 4]; |
6539 | 0 | } |
6540 | |
|
6541 | 0 | xi = x[0]; |
6542 | | |
6543 | | /* XOR in most significant nibble * H */ |
6544 | 0 | m8 = (word64*)m[xi & 0xf]; |
6545 | 0 | z8[0] ^= m8[0]; |
6546 | 0 | z8[1] ^= m8[1]; |
6547 | | |
6548 | | /* Cache top byte for remainder calculations - lost in rotate. */ |
6549 | 0 | a = (z8[1] >> 56) & 0xf; |
6550 | | |
6551 | | /* Rotate z by 4-bits */ |
6552 | 0 | n3 = z8[1] & 0xf0f0f0f0f0f0f0f0ULL; |
6553 | 0 | n2 = z8[1] & 0x0f0f0f0f0f0f0f0fULL; |
6554 | 0 | n1 = z8[0] & 0xf0f0f0f0f0f0f0f0ULL; |
6555 | 0 | n0 = z8[0] & 0x0f0f0f0f0f0f0f0fULL; |
6556 | 0 | z8[1] = (n3 >> 4) | (n2 << 12) | (n0 >> 52); |
6557 | 0 | z8[0] = (n1 >> 4) | (n0 << 12); |
6558 | | |
6559 | | /* XOR in next significant nibble * H */ |
6560 | 0 | m8 = (word64*)m[xi >> 4]; |
6561 | 0 | z8[0] ^= m8[0]; |
6562 | 0 | z8[1] ^= m8[1]; |
6563 | | /* XOR in most significant nibble * remainder */ |
6564 | 0 | z8[0] ^= (word64)R[a]; |
6565 | | |
6566 | | /* Write back result. */ |
6567 | 0 | x8[0] = z8[0]; |
6568 | 0 | x8[1] = z8[1]; |
6569 | 0 | } |
6570 | | #endif |
6571 | | |
6572 | | void GHASH(Aes* aes, const byte* a, word32 aSz, const byte* c, |
6573 | | word32 cSz, byte* s, word32 sSz) |
6574 | 0 | { |
6575 | 0 | byte x[AES_BLOCK_SIZE]; |
6576 | 0 | byte scratch[AES_BLOCK_SIZE]; |
6577 | 0 | word32 blocks, partial; |
6578 | |
|
6579 | 0 | if (aes == NULL) { |
6580 | 0 | return; |
6581 | 0 | } |
6582 | | |
6583 | 0 | XMEMSET(x, 0, AES_BLOCK_SIZE); |
6584 | | |
6585 | | /* Hash in A, the Additional Authentication Data */ |
6586 | 0 | if (aSz != 0 && a != NULL) { |
6587 | 0 | blocks = aSz / AES_BLOCK_SIZE; |
6588 | 0 | partial = aSz % AES_BLOCK_SIZE; |
6589 | 0 | while (blocks--) { |
6590 | 0 | xorbuf(x, a, AES_BLOCK_SIZE); |
6591 | 0 | GMULT(x, aes->M0); |
6592 | 0 | a += AES_BLOCK_SIZE; |
6593 | 0 | } |
6594 | 0 | if (partial != 0) { |
6595 | 0 | XMEMSET(scratch, 0, AES_BLOCK_SIZE); |
6596 | 0 | XMEMCPY(scratch, a, partial); |
6597 | 0 | xorbuf(x, scratch, AES_BLOCK_SIZE); |
6598 | 0 | GMULT(x, aes->M0); |
6599 | 0 | } |
6600 | 0 | } |
6601 | | |
6602 | | /* Hash in C, the Ciphertext */ |
6603 | 0 | if (cSz != 0 && c != NULL) { |
6604 | 0 | blocks = cSz / AES_BLOCK_SIZE; |
6605 | 0 | partial = cSz % AES_BLOCK_SIZE; |
6606 | 0 | while (blocks--) { |
6607 | 0 | xorbuf(x, c, AES_BLOCK_SIZE); |
6608 | 0 | GMULT(x, aes->M0); |
6609 | 0 | c += AES_BLOCK_SIZE; |
6610 | 0 | } |
6611 | 0 | if (partial != 0) { |
6612 | 0 | XMEMSET(scratch, 0, AES_BLOCK_SIZE); |
6613 | 0 | XMEMCPY(scratch, c, partial); |
6614 | 0 | xorbuf(x, scratch, AES_BLOCK_SIZE); |
6615 | 0 | GMULT(x, aes->M0); |
6616 | 0 | } |
6617 | 0 | } |
6618 | | |
6619 | | /* Hash in the lengths of A and C in bits */ |
6620 | 0 | FlattenSzInBits(&scratch[0], aSz); |
6621 | 0 | FlattenSzInBits(&scratch[8], cSz); |
6622 | 0 | xorbuf(x, scratch, AES_BLOCK_SIZE); |
6623 | 0 | GMULT(x, aes->M0); |
6624 | | |
6625 | | /* Copy the result into s. */ |
6626 | 0 | XMEMCPY(s, x, sSz); |
6627 | 0 | } |
6628 | | |
6629 | | #ifdef WOLFSSL_AESGCM_STREAM |
6630 | | /* No extra initialization for 4-bit table implementation. |
6631 | | * |
6632 | | * @param [in] aes AES GCM object. |
6633 | | */ |
6634 | | #define GHASH_INIT_EXTRA(aes) |
6635 | | |
6636 | | /* GHASH one block of data.. |
6637 | | * |
6638 | | * XOR block into tag and GMULT with H using pre-computed table. |
6639 | | * |
6640 | | * @param [in, out] aes AES GCM object. |
6641 | | * @param [in] block Block of AAD or cipher text. |
6642 | | */ |
6643 | | #define GHASH_ONE_BLOCK(aes, block) \ |
6644 | 0 | do { \ |
6645 | 0 | xorbuf(AES_TAG(aes), block, AES_BLOCK_SIZE); \ |
6646 | 0 | GMULT(AES_TAG(aes), (aes)->M0); \ |
6647 | 0 | } \ |
6648 | 0 | while (0) |
6649 | | #endif /* WOLFSSL_AESGCM_STREAM */ |
6650 | | #elif defined(WORD64_AVAILABLE) && !defined(GCM_WORD32) |
6651 | | |
6652 | | #if !defined(FREESCALE_LTC_AES_GCM) |
6653 | | static void GMULT(word64* X, word64* Y) |
6654 | | { |
6655 | | word64 Z[2] = {0,0}; |
6656 | | word64 V[2]; |
6657 | | int i, j; |
6658 | | word64 v1; |
6659 | | V[0] = X[0]; V[1] = X[1]; |
6660 | | |
6661 | | for (i = 0; i < 2; i++) |
6662 | | { |
6663 | | word64 y = Y[i]; |
6664 | | for (j = 0; j < 64; j++) |
6665 | | { |
6666 | | #ifndef AES_GCM_GMULT_NCT |
6667 | | word64 mask = 0 - (y >> 63); |
6668 | | Z[0] ^= V[0] & mask; |
6669 | | Z[1] ^= V[1] & mask; |
6670 | | #else |
6671 | | if (y & 0x8000000000000000ULL) { |
6672 | | Z[0] ^= V[0]; |
6673 | | Z[1] ^= V[1]; |
6674 | | } |
6675 | | #endif |
6676 | | |
6677 | | v1 = (0 - (V[1] & 1)) & 0xE100000000000000ULL; |
6678 | | V[1] >>= 1; |
6679 | | V[1] |= V[0] << 63; |
6680 | | V[0] >>= 1; |
6681 | | V[0] ^= v1; |
6682 | | y <<= 1; |
6683 | | } |
6684 | | } |
6685 | | X[0] = Z[0]; |
6686 | | X[1] = Z[1]; |
6687 | | } |
6688 | | |
6689 | | |
6690 | | void GHASH(Aes* aes, const byte* a, word32 aSz, const byte* c, |
6691 | | word32 cSz, byte* s, word32 sSz) |
6692 | | { |
6693 | | word64 x[2] = {0,0}; |
6694 | | word32 blocks, partial; |
6695 | | word64 bigH[2]; |
6696 | | |
6697 | | if (aes == NULL) { |
6698 | | return; |
6699 | | } |
6700 | | |
6701 | | XMEMCPY(bigH, aes->H, AES_BLOCK_SIZE); |
6702 | | #ifdef LITTLE_ENDIAN_ORDER |
6703 | | ByteReverseWords64(bigH, bigH, AES_BLOCK_SIZE); |
6704 | | #endif |
6705 | | |
6706 | | /* Hash in A, the Additional Authentication Data */ |
6707 | | if (aSz != 0 && a != NULL) { |
6708 | | word64 bigA[2]; |
6709 | | blocks = aSz / AES_BLOCK_SIZE; |
6710 | | partial = aSz % AES_BLOCK_SIZE; |
6711 | | while (blocks--) { |
6712 | | XMEMCPY(bigA, a, AES_BLOCK_SIZE); |
6713 | | #ifdef LITTLE_ENDIAN_ORDER |
6714 | | ByteReverseWords64(bigA, bigA, AES_BLOCK_SIZE); |
6715 | | #endif |
6716 | | x[0] ^= bigA[0]; |
6717 | | x[1] ^= bigA[1]; |
6718 | | GMULT(x, bigH); |
6719 | | a += AES_BLOCK_SIZE; |
6720 | | } |
6721 | | if (partial != 0) { |
6722 | | XMEMSET(bigA, 0, AES_BLOCK_SIZE); |
6723 | | XMEMCPY(bigA, a, partial); |
6724 | | #ifdef LITTLE_ENDIAN_ORDER |
6725 | | ByteReverseWords64(bigA, bigA, AES_BLOCK_SIZE); |
6726 | | #endif |
6727 | | x[0] ^= bigA[0]; |
6728 | | x[1] ^= bigA[1]; |
6729 | | GMULT(x, bigH); |
6730 | | } |
6731 | | #ifdef OPENSSL_EXTRA |
6732 | | /* store AAD partial tag for next call */ |
6733 | | aes->aadH[0] = (word32)((x[0] & 0xFFFFFFFF00000000ULL) >> 32); |
6734 | | aes->aadH[1] = (word32)(x[0] & 0xFFFFFFFF); |
6735 | | aes->aadH[2] = (word32)((x[1] & 0xFFFFFFFF00000000ULL) >> 32); |
6736 | | aes->aadH[3] = (word32)(x[1] & 0xFFFFFFFF); |
6737 | | #endif |
6738 | | } |
6739 | | |
6740 | | /* Hash in C, the Ciphertext */ |
6741 | | if (cSz != 0 && c != NULL) { |
6742 | | word64 bigC[2]; |
6743 | | blocks = cSz / AES_BLOCK_SIZE; |
6744 | | partial = cSz % AES_BLOCK_SIZE; |
6745 | | #ifdef OPENSSL_EXTRA |
6746 | | /* Start from last AAD partial tag */ |
6747 | | if(aes->aadLen) { |
6748 | | x[0] = ((word64)aes->aadH[0]) << 32 | aes->aadH[1]; |
6749 | | x[1] = ((word64)aes->aadH[2]) << 32 | aes->aadH[3]; |
6750 | | } |
6751 | | #endif |
6752 | | while (blocks--) { |
6753 | | XMEMCPY(bigC, c, AES_BLOCK_SIZE); |
6754 | | #ifdef LITTLE_ENDIAN_ORDER |
6755 | | ByteReverseWords64(bigC, bigC, AES_BLOCK_SIZE); |
6756 | | #endif |
6757 | | x[0] ^= bigC[0]; |
6758 | | x[1] ^= bigC[1]; |
6759 | | GMULT(x, bigH); |
6760 | | c += AES_BLOCK_SIZE; |
6761 | | } |
6762 | | if (partial != 0) { |
6763 | | XMEMSET(bigC, 0, AES_BLOCK_SIZE); |
6764 | | XMEMCPY(bigC, c, partial); |
6765 | | #ifdef LITTLE_ENDIAN_ORDER |
6766 | | ByteReverseWords64(bigC, bigC, AES_BLOCK_SIZE); |
6767 | | #endif |
6768 | | x[0] ^= bigC[0]; |
6769 | | x[1] ^= bigC[1]; |
6770 | | GMULT(x, bigH); |
6771 | | } |
6772 | | } |
6773 | | |
6774 | | /* Hash in the lengths in bits of A and C */ |
6775 | | { |
6776 | | word64 len[2]; |
6777 | | len[0] = aSz; len[1] = cSz; |
6778 | | #ifdef OPENSSL_EXTRA |
6779 | | if (aes->aadLen) |
6780 | | len[0] = (word64)aes->aadLen; |
6781 | | #endif |
6782 | | /* Lengths are in bytes. Convert to bits. */ |
6783 | | len[0] *= 8; |
6784 | | len[1] *= 8; |
6785 | | |
6786 | | x[0] ^= len[0]; |
6787 | | x[1] ^= len[1]; |
6788 | | GMULT(x, bigH); |
6789 | | } |
6790 | | #ifdef LITTLE_ENDIAN_ORDER |
6791 | | ByteReverseWords64(x, x, AES_BLOCK_SIZE); |
6792 | | #endif |
6793 | | XMEMCPY(s, x, sSz); |
6794 | | } |
6795 | | #endif /* !FREESCALE_LTC_AES_GCM */ |
6796 | | |
6797 | | #ifdef WOLFSSL_AESGCM_STREAM |
6798 | | |
6799 | | #ifdef LITTLE_ENDIAN_ORDER |
6800 | | |
6801 | | /* No extra initialization for small implementation. |
6802 | | * |
6803 | | * @param [in] aes AES GCM object. |
6804 | | */ |
6805 | | #define GHASH_INIT_EXTRA(aes) \ |
6806 | | ByteReverseWords64((word64*)aes->H, (word64*)aes->H, AES_BLOCK_SIZE) |
6807 | | |
6808 | | /* GHASH one block of data.. |
6809 | | * |
6810 | | * XOR block into tag and GMULT with H. |
6811 | | * |
6812 | | * @param [in, out] aes AES GCM object. |
6813 | | * @param [in] block Block of AAD or cipher text. |
6814 | | */ |
6815 | | #define GHASH_ONE_BLOCK(aes, block) \ |
6816 | | do { \ |
6817 | | word64* x = (word64*)AES_TAG(aes); \ |
6818 | | word64* h = (word64*)aes->H; \ |
6819 | | word64 block64[2]; \ |
6820 | | XMEMCPY(block64, block, AES_BLOCK_SIZE); \ |
6821 | | ByteReverseWords64(block64, block64, AES_BLOCK_SIZE); \ |
6822 | | x[0] ^= block64[0]; \ |
6823 | | x[1] ^= block64[1]; \ |
6824 | | GMULT(x, h); \ |
6825 | | } \ |
6826 | | while (0) |
6827 | | |
6828 | | #ifdef OPENSSL_EXTRA |
6829 | | /* GHASH in AAD and cipher text lengths in bits. |
6830 | | * |
6831 | | * Convert tag back to little-endian. |
6832 | | * |
6833 | | * @param [in, out] aes AES GCM object. |
6834 | | */ |
6835 | | #define GHASH_LEN_BLOCK(aes) \ |
6836 | | do { \ |
6837 | | word64* x = (word64*)AES_TAG(aes); \ |
6838 | | word64* h = (word64*)aes->H; \ |
6839 | | word64 len[2]; \ |
6840 | | len[0] = aes->aSz; len[1] = aes->cSz; \ |
6841 | | if (aes->aadLen) \ |
6842 | | len[0] = (word64)aes->aadLen; \ |
6843 | | /* Lengths are in bytes. Convert to bits. */ \ |
6844 | | len[0] *= 8; \ |
6845 | | len[1] *= 8; \ |
6846 | | \ |
6847 | | x[0] ^= len[0]; \ |
6848 | | x[1] ^= len[1]; \ |
6849 | | GMULT(x, h); \ |
6850 | | ByteReverseWords64(x, x, AES_BLOCK_SIZE); \ |
6851 | | } \ |
6852 | | while (0) |
6853 | | #else |
6854 | | /* GHASH in AAD and cipher text lengths in bits. |
6855 | | * |
6856 | | * Convert tag back to little-endian. |
6857 | | * |
6858 | | * @param [in, out] aes AES GCM object. |
6859 | | */ |
6860 | | #define GHASH_LEN_BLOCK(aes) \ |
6861 | | do { \ |
6862 | | word64* x = (word64*)AES_TAG(aes); \ |
6863 | | word64* h = (word64*)aes->H; \ |
6864 | | word64 len[2]; \ |
6865 | | len[0] = aes->aSz; len[1] = aes->cSz; \ |
6866 | | /* Lengths are in bytes. Convert to bits. */ \ |
6867 | | len[0] *= 8; \ |
6868 | | len[1] *= 8; \ |
6869 | | \ |
6870 | | x[0] ^= len[0]; \ |
6871 | | x[1] ^= len[1]; \ |
6872 | | GMULT(x, h); \ |
6873 | | ByteReverseWords64(x, x, AES_BLOCK_SIZE); \ |
6874 | | } \ |
6875 | | while (0) |
6876 | | #endif |
6877 | | |
6878 | | #else |
6879 | | |
6880 | | /* No extra initialization for small implementation. |
6881 | | * |
6882 | | * @param [in] aes AES GCM object. |
6883 | | */ |
6884 | | #define GHASH_INIT_EXTRA(aes) |
6885 | | |
6886 | | /* GHASH one block of data.. |
6887 | | * |
6888 | | * XOR block into tag and GMULT with H. |
6889 | | * |
6890 | | * @param [in, out] aes AES GCM object. |
6891 | | * @param [in] block Block of AAD or cipher text. |
6892 | | */ |
6893 | | #define GHASH_ONE_BLOCK(aes, block) \ |
6894 | | do { \ |
6895 | | word64* x = (word64*)AES_TAG(aes); \ |
6896 | | word64* h = (word64*)aes->H; \ |
6897 | | word64 block64[2]; \ |
6898 | | XMEMCPY(block64, block, AES_BLOCK_SIZE); \ |
6899 | | x[0] ^= block64[0]; \ |
6900 | | x[1] ^= block64[1]; \ |
6901 | | GMULT(x, h); \ |
6902 | | } \ |
6903 | | while (0) |
6904 | | |
6905 | | #ifdef OPENSSL_EXTRA |
6906 | | /* GHASH in AAD and cipher text lengths in bits. |
6907 | | * |
6908 | | * Convert tag back to little-endian. |
6909 | | * |
6910 | | * @param [in, out] aes AES GCM object. |
6911 | | */ |
6912 | | #define GHASH_LEN_BLOCK(aes) \ |
6913 | | do { \ |
6914 | | word64* x = (word64*)AES_TAG(aes); \ |
6915 | | word64* h = (word64*)aes->H; \ |
6916 | | word64 len[2]; \ |
6917 | | len[0] = aes->aSz; len[1] = aes->cSz; \ |
6918 | | if (aes->aadLen) \ |
6919 | | len[0] = (word64)aes->aadLen; \ |
6920 | | /* Lengths are in bytes. Convert to bits. */ \ |
6921 | | len[0] *= 8; \ |
6922 | | len[1] *= 8; \ |
6923 | | \ |
6924 | | x[0] ^= len[0]; \ |
6925 | | x[1] ^= len[1]; \ |
6926 | | GMULT(x, h); \ |
6927 | | } \ |
6928 | | while (0) |
6929 | | #else |
6930 | | /* GHASH in AAD and cipher text lengths in bits. |
6931 | | * |
6932 | | * Convert tag back to little-endian. |
6933 | | * |
6934 | | * @param [in, out] aes AES GCM object. |
6935 | | */ |
6936 | | #define GHASH_LEN_BLOCK(aes) \ |
6937 | | do { \ |
6938 | | word64* x = (word64*)AES_TAG(aes); \ |
6939 | | word64* h = (word64*)aes->H; \ |
6940 | | word64 len[2]; \ |
6941 | | len[0] = aes->aSz; len[1] = aes->cSz; \ |
6942 | | /* Lengths are in bytes. Convert to bits. */ \ |
6943 | | len[0] *= 8; \ |
6944 | | len[1] *= 8; \ |
6945 | | \ |
6946 | | x[0] ^= len[0]; \ |
6947 | | x[1] ^= len[1]; \ |
6948 | | GMULT(x, h); \ |
6949 | | } \ |
6950 | | while (0) |
6951 | | #endif |
6952 | | |
6953 | | #endif /* !LITTLE_ENDIAN_ORDER */ |
6954 | | |
6955 | | #endif /* WOLFSSL_AESGCM_STREAM */ |
6956 | | /* end defined(WORD64_AVAILABLE) && !defined(GCM_WORD32) */ |
6957 | | #else /* GCM_WORD32 */ |
6958 | | |
6959 | | static void GMULT(word32* X, word32* Y) |
6960 | | { |
6961 | | word32 Z[4] = {0,0,0,0}; |
6962 | | word32 V[4]; |
6963 | | int i, j; |
6964 | | |
6965 | | V[0] = X[0]; V[1] = X[1]; V[2] = X[2]; V[3] = X[3]; |
6966 | | |
6967 | | for (i = 0; i < 4; i++) |
6968 | | { |
6969 | | word32 y = Y[i]; |
6970 | | for (j = 0; j < 32; j++) |
6971 | | { |
6972 | | if (y & 0x80000000) { |
6973 | | Z[0] ^= V[0]; |
6974 | | Z[1] ^= V[1]; |
6975 | | Z[2] ^= V[2]; |
6976 | | Z[3] ^= V[3]; |
6977 | | } |
6978 | | |
6979 | | if (V[3] & 0x00000001) { |
6980 | | V[3] >>= 1; |
6981 | | V[3] |= ((V[2] & 0x00000001) ? 0x80000000 : 0); |
6982 | | V[2] >>= 1; |
6983 | | V[2] |= ((V[1] & 0x00000001) ? 0x80000000 : 0); |
6984 | | V[1] >>= 1; |
6985 | | V[1] |= ((V[0] & 0x00000001) ? 0x80000000 : 0); |
6986 | | V[0] >>= 1; |
6987 | | V[0] ^= 0xE1000000; |
6988 | | } else { |
6989 | | V[3] >>= 1; |
6990 | | V[3] |= ((V[2] & 0x00000001) ? 0x80000000 : 0); |
6991 | | V[2] >>= 1; |
6992 | | V[2] |= ((V[1] & 0x00000001) ? 0x80000000 : 0); |
6993 | | V[1] >>= 1; |
6994 | | V[1] |= ((V[0] & 0x00000001) ? 0x80000000 : 0); |
6995 | | V[0] >>= 1; |
6996 | | } |
6997 | | y <<= 1; |
6998 | | } |
6999 | | } |
7000 | | X[0] = Z[0]; |
7001 | | X[1] = Z[1]; |
7002 | | X[2] = Z[2]; |
7003 | | X[3] = Z[3]; |
7004 | | } |
7005 | | |
7006 | | |
7007 | | void GHASH(Aes* aes, const byte* a, word32 aSz, const byte* c, |
7008 | | word32 cSz, byte* s, word32 sSz) |
7009 | | { |
7010 | | word32 x[4] = {0,0,0,0}; |
7011 | | word32 blocks, partial; |
7012 | | word32 bigH[4]; |
7013 | | |
7014 | | if (aes == NULL) { |
7015 | | return; |
7016 | | } |
7017 | | |
7018 | | XMEMCPY(bigH, aes->H, AES_BLOCK_SIZE); |
7019 | | #ifdef LITTLE_ENDIAN_ORDER |
7020 | | ByteReverseWords(bigH, bigH, AES_BLOCK_SIZE); |
7021 | | #endif |
7022 | | |
7023 | | /* Hash in A, the Additional Authentication Data */ |
7024 | | if (aSz != 0 && a != NULL) { |
7025 | | word32 bigA[4]; |
7026 | | blocks = aSz / AES_BLOCK_SIZE; |
7027 | | partial = aSz % AES_BLOCK_SIZE; |
7028 | | while (blocks--) { |
7029 | | XMEMCPY(bigA, a, AES_BLOCK_SIZE); |
7030 | | #ifdef LITTLE_ENDIAN_ORDER |
7031 | | ByteReverseWords(bigA, bigA, AES_BLOCK_SIZE); |
7032 | | #endif |
7033 | | x[0] ^= bigA[0]; |
7034 | | x[1] ^= bigA[1]; |
7035 | | x[2] ^= bigA[2]; |
7036 | | x[3] ^= bigA[3]; |
7037 | | GMULT(x, bigH); |
7038 | | a += AES_BLOCK_SIZE; |
7039 | | } |
7040 | | if (partial != 0) { |
7041 | | XMEMSET(bigA, 0, AES_BLOCK_SIZE); |
7042 | | XMEMCPY(bigA, a, partial); |
7043 | | #ifdef LITTLE_ENDIAN_ORDER |
7044 | | ByteReverseWords(bigA, bigA, AES_BLOCK_SIZE); |
7045 | | #endif |
7046 | | x[0] ^= bigA[0]; |
7047 | | x[1] ^= bigA[1]; |
7048 | | x[2] ^= bigA[2]; |
7049 | | x[3] ^= bigA[3]; |
7050 | | GMULT(x, bigH); |
7051 | | } |
7052 | | } |
7053 | | |
7054 | | /* Hash in C, the Ciphertext */ |
7055 | | if (cSz != 0 && c != NULL) { |
7056 | | word32 bigC[4]; |
7057 | | blocks = cSz / AES_BLOCK_SIZE; |
7058 | | partial = cSz % AES_BLOCK_SIZE; |
7059 | | while (blocks--) { |
7060 | | XMEMCPY(bigC, c, AES_BLOCK_SIZE); |
7061 | | #ifdef LITTLE_ENDIAN_ORDER |
7062 | | ByteReverseWords(bigC, bigC, AES_BLOCK_SIZE); |
7063 | | #endif |
7064 | | x[0] ^= bigC[0]; |
7065 | | x[1] ^= bigC[1]; |
7066 | | x[2] ^= bigC[2]; |
7067 | | x[3] ^= bigC[3]; |
7068 | | GMULT(x, bigH); |
7069 | | c += AES_BLOCK_SIZE; |
7070 | | } |
7071 | | if (partial != 0) { |
7072 | | XMEMSET(bigC, 0, AES_BLOCK_SIZE); |
7073 | | XMEMCPY(bigC, c, partial); |
7074 | | #ifdef LITTLE_ENDIAN_ORDER |
7075 | | ByteReverseWords(bigC, bigC, AES_BLOCK_SIZE); |
7076 | | #endif |
7077 | | x[0] ^= bigC[0]; |
7078 | | x[1] ^= bigC[1]; |
7079 | | x[2] ^= bigC[2]; |
7080 | | x[3] ^= bigC[3]; |
7081 | | GMULT(x, bigH); |
7082 | | } |
7083 | | } |
7084 | | |
7085 | | /* Hash in the lengths in bits of A and C */ |
7086 | | { |
7087 | | word32 len[4]; |
7088 | | |
7089 | | /* Lengths are in bytes. Convert to bits. */ |
7090 | | len[0] = (aSz >> (8*sizeof(aSz) - 3)); |
7091 | | len[1] = aSz << 3; |
7092 | | len[2] = (cSz >> (8*sizeof(cSz) - 3)); |
7093 | | len[3] = cSz << 3; |
7094 | | |
7095 | | x[0] ^= len[0]; |
7096 | | x[1] ^= len[1]; |
7097 | | x[2] ^= len[2]; |
7098 | | x[3] ^= len[3]; |
7099 | | GMULT(x, bigH); |
7100 | | } |
7101 | | #ifdef LITTLE_ENDIAN_ORDER |
7102 | | ByteReverseWords(x, x, AES_BLOCK_SIZE); |
7103 | | #endif |
7104 | | XMEMCPY(s, x, sSz); |
7105 | | } |
7106 | | |
7107 | | #ifdef WOLFSSL_AESGCM_STREAM |
7108 | | #ifdef LITTLE_ENDIAN_ORDER |
7109 | | /* Little-endian 32-bit word implementation requires byte reversal of H. |
7110 | | * |
7111 | | * H is all-zeros block encrypted with key. |
7112 | | * |
7113 | | * @param [in, out] aes AES GCM object. |
7114 | | */ |
7115 | | #define GHASH_INIT_EXTRA(aes) \ |
7116 | | ByteReverseWords((word32*)aes->H, (word32*)aes->H, AES_BLOCK_SIZE) |
7117 | | |
7118 | | /* GHASH one block of data.. |
7119 | | * |
7120 | | * XOR block, in big-endian form, into tag and GMULT with H. |
7121 | | * |
7122 | | * @param [in, out] aes AES GCM object. |
7123 | | * @param [in] block Block of AAD or cipher text. |
7124 | | */ |
7125 | | #define GHASH_ONE_BLOCK(aes, block) \ |
7126 | | do { \ |
7127 | | word32* x = (word32*)AES_TAG(aes); \ |
7128 | | word32* h = (word32*)aes->H; \ |
7129 | | word32 bigEnd[4]; \ |
7130 | | XMEMCPY(bigEnd, block, AES_BLOCK_SIZE); \ |
7131 | | ByteReverseWords(bigEnd, bigEnd, AES_BLOCK_SIZE); \ |
7132 | | x[0] ^= bigEnd[0]; \ |
7133 | | x[1] ^= bigEnd[1]; \ |
7134 | | x[2] ^= bigEnd[2]; \ |
7135 | | x[3] ^= bigEnd[3]; \ |
7136 | | GMULT(x, h); \ |
7137 | | } \ |
7138 | | while (0) |
7139 | | |
7140 | | /* GHASH in AAD and cipher text lengths in bits. |
7141 | | * |
7142 | | * Convert tag back to little-endian. |
7143 | | * |
7144 | | * @param [in, out] aes AES GCM object. |
7145 | | */ |
7146 | | #define GHASH_LEN_BLOCK(aes) \ |
7147 | | do { \ |
7148 | | word32 len[4]; \ |
7149 | | word32* x = (word32*)AES_TAG(aes); \ |
7150 | | word32* h = (word32*)aes->H; \ |
7151 | | len[0] = (aes->aSz >> (8*sizeof(aes->aSz) - 3)); \ |
7152 | | len[1] = aes->aSz << 3; \ |
7153 | | len[2] = (aes->cSz >> (8*sizeof(aes->cSz) - 3)); \ |
7154 | | len[3] = aes->cSz << 3; \ |
7155 | | x[0] ^= len[0]; \ |
7156 | | x[1] ^= len[1]; \ |
7157 | | x[2] ^= len[2]; \ |
7158 | | x[3] ^= len[3]; \ |
7159 | | GMULT(x, h); \ |
7160 | | ByteReverseWords(x, x, AES_BLOCK_SIZE); \ |
7161 | | } \ |
7162 | | while (0) |
7163 | | #else |
7164 | | /* No extra initialization for 32-bit word implementation. |
7165 | | * |
7166 | | * @param [in] aes AES GCM object. |
7167 | | */ |
7168 | | #define GHASH_INIT_EXTRA(aes) |
7169 | | |
7170 | | /* GHASH one block of data.. |
7171 | | * |
7172 | | * XOR block into tag and GMULT with H. |
7173 | | * |
7174 | | * @param [in, out] aes AES GCM object. |
7175 | | * @param [in] block Block of AAD or cipher text. |
7176 | | */ |
7177 | | #define GHASH_ONE_BLOCK(aes, block) \ |
7178 | | do { \ |
7179 | | word32* x = (word32*)AES_TAG(aes); \ |
7180 | | word32* h = (word32*)aes->H; \ |
7181 | | word32 block32[4]; \ |
7182 | | XMEMCPY(block32, block, AES_BLOCK_SIZE); \ |
7183 | | x[0] ^= block32[0]; \ |
7184 | | x[1] ^= block32[1]; \ |
7185 | | x[2] ^= block32[2]; \ |
7186 | | x[3] ^= block32[3]; \ |
7187 | | GMULT(x, h); \ |
7188 | | } \ |
7189 | | while (0) |
7190 | | |
7191 | | /* GHASH in AAD and cipher text lengths in bits. |
7192 | | * |
7193 | | * @param [in, out] aes AES GCM object. |
7194 | | */ |
7195 | | #define GHASH_LEN_BLOCK(aes) \ |
7196 | | do { \ |
7197 | | word32 len[4]; \ |
7198 | | word32* x = (word32*)AES_TAG(aes); \ |
7199 | | word32* h = (word32*)aes->H; \ |
7200 | | len[0] = (aes->aSz >> (8*sizeof(aes->aSz) - 3)); \ |
7201 | | len[1] = aes->aSz << 3; \ |
7202 | | len[2] = (aes->cSz >> (8*sizeof(aes->cSz) - 3)); \ |
7203 | | len[3] = aes->cSz << 3; \ |
7204 | | x[0] ^= len[0]; \ |
7205 | | x[1] ^= len[1]; \ |
7206 | | x[2] ^= len[2]; \ |
7207 | | x[3] ^= len[3]; \ |
7208 | | GMULT(x, h); \ |
7209 | | } \ |
7210 | | while (0) |
7211 | | #endif /* LITTLE_ENDIAN_ORDER */ |
7212 | | #endif /* WOLFSSL_AESGCM_STREAM */ |
7213 | | #endif /* end GCM_WORD32 */ |
7214 | | |
7215 | | #if !defined(WOLFSSL_XILINX_CRYPT) && !defined(WOLFSSL_AFALG_XILINX_AES) |
7216 | | #ifdef WOLFSSL_AESGCM_STREAM |
7217 | | #ifndef GHASH_LEN_BLOCK |
7218 | | /* Hash in the lengths of the AAD and cipher text in bits. |
7219 | | * |
7220 | | * Default implementation. |
7221 | | * |
7222 | | * @param [in, out] aes AES GCM object. |
7223 | | */ |
7224 | | #define GHASH_LEN_BLOCK(aes) \ |
7225 | 0 | do { \ |
7226 | 0 | byte scratch[AES_BLOCK_SIZE]; \ |
7227 | 0 | FlattenSzInBits(&scratch[0], (aes)->aSz); \ |
7228 | 0 | FlattenSzInBits(&scratch[8], (aes)->cSz); \ |
7229 | 0 | GHASH_ONE_BLOCK(aes, scratch); \ |
7230 | 0 | } \ |
7231 | 0 | while (0) |
7232 | | #endif |
7233 | | |
7234 | | /* Initialize a GHASH for streaming operations. |
7235 | | * |
7236 | | * @param [in, out] aes AES GCM object. |
7237 | | */ |
7238 | 0 | static void GHASH_INIT(Aes* aes) { |
7239 | | /* Set tag to all zeros as initial value. */ |
7240 | 0 | XMEMSET(AES_TAG(aes), 0, AES_BLOCK_SIZE); |
7241 | | /* Reset counts of AAD and cipher text. */ |
7242 | 0 | aes->aOver = 0; |
7243 | 0 | aes->cOver = 0; |
7244 | | /* Extra initialization baed on implementation. */ |
7245 | 0 | GHASH_INIT_EXTRA(aes); |
7246 | 0 | } |
7247 | | |
7248 | | /* Update the GHASH with AAD and/or cipher text. |
7249 | | * |
7250 | | * @param [in,out] aes AES GCM object. |
7251 | | * @param [in] a Additional authentication data buffer. |
7252 | | * @param [in] aSz Size of data in AAD buffer. |
7253 | | * @param [in] c Cipher text buffer. |
7254 | | * @param [in] cSz Size of data in cipher text buffer. |
7255 | | */ |
7256 | | static void GHASH_UPDATE(Aes* aes, const byte* a, word32 aSz, const byte* c, |
7257 | | word32 cSz) |
7258 | 0 | { |
7259 | 0 | word32 blocks; |
7260 | 0 | word32 partial; |
7261 | | |
7262 | | /* Hash in A, the Additional Authentication Data */ |
7263 | 0 | if (aSz != 0 && a != NULL) { |
7264 | | /* Update count of AAD we have hashed. */ |
7265 | 0 | aes->aSz += aSz; |
7266 | | /* Check if we have unprocessed data. */ |
7267 | 0 | if (aes->aOver > 0) { |
7268 | | /* Calculate amount we can use - fill up the block. */ |
7269 | 0 | byte sz = AES_BLOCK_SIZE - aes->aOver; |
7270 | 0 | if (sz > aSz) { |
7271 | 0 | sz = aSz; |
7272 | 0 | } |
7273 | | /* Copy extra into last GHASH block array and update count. */ |
7274 | 0 | XMEMCPY(AES_LASTGBLOCK(aes) + aes->aOver, a, sz); |
7275 | 0 | aes->aOver += sz; |
7276 | 0 | if (aes->aOver == AES_BLOCK_SIZE) { |
7277 | | /* We have filled up the block and can process. */ |
7278 | 0 | GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes)); |
7279 | | /* Reset count. */ |
7280 | 0 | aes->aOver = 0; |
7281 | 0 | } |
7282 | | /* Used up some data. */ |
7283 | 0 | aSz -= sz; |
7284 | 0 | a += sz; |
7285 | 0 | } |
7286 | | |
7287 | | /* Calculate number of blocks of AAD and the leftover. */ |
7288 | 0 | blocks = aSz / AES_BLOCK_SIZE; |
7289 | 0 | partial = aSz % AES_BLOCK_SIZE; |
7290 | | /* GHASH full blocks now. */ |
7291 | 0 | while (blocks--) { |
7292 | 0 | GHASH_ONE_BLOCK(aes, a); |
7293 | 0 | a += AES_BLOCK_SIZE; |
7294 | 0 | } |
7295 | 0 | if (partial != 0) { |
7296 | | /* Cache the partial block. */ |
7297 | 0 | XMEMCPY(AES_LASTGBLOCK(aes), a, partial); |
7298 | 0 | aes->aOver = (byte)partial; |
7299 | 0 | } |
7300 | 0 | } |
7301 | 0 | if (aes->aOver > 0 && cSz > 0 && c != NULL) { |
7302 | | /* No more AAD coming and we have a partial block. */ |
7303 | | /* Fill the rest of the block with zeros. */ |
7304 | 0 | byte sz = AES_BLOCK_SIZE - aes->aOver; |
7305 | 0 | XMEMSET(AES_LASTGBLOCK(aes) + aes->aOver, 0, sz); |
7306 | | /* GHASH last AAD block. */ |
7307 | 0 | GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes)); |
7308 | | /* Clear partial count for next time through. */ |
7309 | 0 | aes->aOver = 0; |
7310 | 0 | } |
7311 | | |
7312 | | /* Hash in C, the Ciphertext */ |
7313 | 0 | if (cSz != 0 && c != NULL) { |
7314 | | /* Update count of cipher text we have hashed. */ |
7315 | 0 | aes->cSz += cSz; |
7316 | 0 | if (aes->cOver > 0) { |
7317 | | /* Calculate amount we can use - fill up the block. */ |
7318 | 0 | byte sz = AES_BLOCK_SIZE - aes->cOver; |
7319 | 0 | if (sz > cSz) { |
7320 | 0 | sz = cSz; |
7321 | 0 | } |
7322 | 0 | XMEMCPY(AES_LASTGBLOCK(aes) + aes->cOver, c, sz); |
7323 | | /* Update count of unsed encrypted counter. */ |
7324 | 0 | aes->cOver += sz; |
7325 | 0 | if (aes->cOver == AES_BLOCK_SIZE) { |
7326 | | /* We have filled up the block and can process. */ |
7327 | 0 | GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes)); |
7328 | | /* Reset count. */ |
7329 | 0 | aes->cOver = 0; |
7330 | 0 | } |
7331 | | /* Used up some data. */ |
7332 | 0 | cSz -= sz; |
7333 | 0 | c += sz; |
7334 | 0 | } |
7335 | | |
7336 | | /* Calculate number of blocks of cipher text and the leftover. */ |
7337 | 0 | blocks = cSz / AES_BLOCK_SIZE; |
7338 | 0 | partial = cSz % AES_BLOCK_SIZE; |
7339 | | /* GHASH full blocks now. */ |
7340 | 0 | while (blocks--) { |
7341 | 0 | GHASH_ONE_BLOCK(aes, c); |
7342 | 0 | c += AES_BLOCK_SIZE; |
7343 | 0 | } |
7344 | 0 | if (partial != 0) { |
7345 | | /* Cache the partial block. */ |
7346 | 0 | XMEMCPY(AES_LASTGBLOCK(aes), c, partial); |
7347 | 0 | aes->cOver = (byte)partial; |
7348 | 0 | } |
7349 | 0 | } |
7350 | 0 | } |
7351 | | |
7352 | | /* Finalize the GHASH calculation. |
7353 | | * |
7354 | | * Complete hashing cipher text and hash the AAD and cipher text lengths. |
7355 | | * |
7356 | | * @param [in, out] aes AES GCM object. |
7357 | | * @param [out] s Authentication tag. |
7358 | | * @param [in] sSz Size of authentication tag required. |
7359 | | */ |
7360 | | static void GHASH_FINAL(Aes* aes, byte* s, word32 sSz) |
7361 | 0 | { |
7362 | | /* AAD block incomplete when > 0 */ |
7363 | 0 | byte over = aes->aOver; |
7364 | |
|
7365 | 0 | if (aes->cOver > 0) { |
7366 | | /* Cipher text block incomplete. */ |
7367 | 0 | over = aes->cOver; |
7368 | 0 | } |
7369 | 0 | if (over > 0) { |
7370 | | /* Zeroize the unused part of the block. */ |
7371 | 0 | XMEMSET(AES_LASTGBLOCK(aes) + over, 0, AES_BLOCK_SIZE - over); |
7372 | | /* Hash the last block of cipher text. */ |
7373 | 0 | GHASH_ONE_BLOCK(aes, AES_LASTGBLOCK(aes)); |
7374 | 0 | } |
7375 | | /* Hash in the lengths of AAD and cipher text in bits */ |
7376 | 0 | GHASH_LEN_BLOCK(aes); |
7377 | | /* Copy the result into s. */ |
7378 | 0 | XMEMCPY(s, AES_TAG(aes), sSz); |
7379 | 0 | } |
7380 | | #endif /* WOLFSSL_AESGCM_STREAM */ |
7381 | | |
7382 | | |
7383 | | #ifdef FREESCALE_LTC_AES_GCM |
7384 | | int wc_AesGcmEncrypt(Aes* aes, byte* out, const byte* in, word32 sz, |
7385 | | const byte* iv, word32 ivSz, |
7386 | | byte* authTag, word32 authTagSz, |
7387 | | const byte* authIn, word32 authInSz) |
7388 | | { |
7389 | | status_t status; |
7390 | | word32 keySize; |
7391 | | |
7392 | | /* argument checks */ |
7393 | | if (aes == NULL || authTagSz > AES_BLOCK_SIZE || ivSz == 0) { |
7394 | | return BAD_FUNC_ARG; |
7395 | | } |
7396 | | |
7397 | | if (authTagSz < WOLFSSL_MIN_AUTH_TAG_SZ) { |
7398 | | WOLFSSL_MSG("GcmEncrypt authTagSz too small error"); |
7399 | | return BAD_FUNC_ARG; |
7400 | | } |
7401 | | |
7402 | | status = wc_AesGetKeySize(aes, &keySize); |
7403 | | if (status) |
7404 | | return status; |
7405 | | |
7406 | | status = wolfSSL_CryptHwMutexLock(); |
7407 | | if (status != 0) |
7408 | | return status; |
7409 | | |
7410 | | status = LTC_AES_EncryptTagGcm(LTC_BASE, in, out, sz, iv, ivSz, |
7411 | | authIn, authInSz, (byte*)aes->key, keySize, authTag, authTagSz); |
7412 | | wolfSSL_CryptHwMutexUnLock(); |
7413 | | |
7414 | | return (status == kStatus_Success) ? 0 : AES_GCM_AUTH_E; |
7415 | | } |
7416 | | |
7417 | | #else |
7418 | | |
7419 | | #ifdef STM32_CRYPTO_AES_GCM |
7420 | | |
7421 | | /* this function supports inline encrypt */ |
7422 | | /* define STM32_AESGCM_PARTIAL for newer STM Cube HAL's with workaround |
7423 | | for handling partial packets to improve auth tag calculation performance by |
7424 | | using hardware */ |
7425 | | static WARN_UNUSED_RESULT int wc_AesGcmEncrypt_STM32( |
7426 | | Aes* aes, byte* out, const byte* in, word32 sz, |
7427 | | const byte* iv, word32 ivSz, |
7428 | | byte* authTag, word32 authTagSz, |
7429 | | const byte* authIn, word32 authInSz) |
7430 | | { |
7431 | | int ret; |
7432 | | #ifdef WOLFSSL_STM32_CUBEMX |
7433 | | CRYP_HandleTypeDef hcryp; |
7434 | | #else |
7435 | | word32 keyCopy[AES_256_KEY_SIZE/sizeof(word32)]; |
7436 | | #endif |
7437 | | word32 keySize; |
7438 | | #ifdef WOLFSSL_STM32_CUBEMX |
7439 | | int status = HAL_OK; |
7440 | | word32 blocks = sz / AES_BLOCK_SIZE; |
7441 | | word32 partialBlock[AES_BLOCK_SIZE/sizeof(word32)]; |
7442 | | #else |
7443 | | int status = SUCCESS; |
7444 | | #endif |
7445 | | word32 partial = sz % AES_BLOCK_SIZE; |
7446 | | word32 tag[AES_BLOCK_SIZE/sizeof(word32)]; |
7447 | | word32 ctrInit[AES_BLOCK_SIZE/sizeof(word32)]; |
7448 | | word32 ctr[AES_BLOCK_SIZE/sizeof(word32)]; |
7449 | | word32 authhdr[AES_BLOCK_SIZE/sizeof(word32)]; |
7450 | | byte* authInPadded = NULL; |
7451 | | int authPadSz, wasAlloc = 0, useSwGhash = 0; |
7452 | | |
7453 | | ret = wc_AesGetKeySize(aes, &keySize); |
7454 | | if (ret != 0) |
7455 | | return ret; |
7456 | | |
7457 | | #ifdef WOLFSSL_STM32_CUBEMX |
7458 | | ret = wc_Stm32_Aes_Init(aes, &hcryp); |
7459 | | if (ret != 0) |
7460 | | return ret; |
7461 | | #endif |
7462 | | |
7463 | | XMEMSET(ctr, 0, AES_BLOCK_SIZE); |
7464 | | if (ivSz == GCM_NONCE_MID_SZ) { |
7465 | | byte* pCtr = (byte*)ctr; |
7466 | | XMEMCPY(ctr, iv, ivSz); |
7467 | | pCtr[AES_BLOCK_SIZE - 1] = 1; |
7468 | | } |
7469 | | else { |
7470 | | GHASH(aes, NULL, 0, iv, ivSz, (byte*)ctr, AES_BLOCK_SIZE); |
7471 | | } |
7472 | | XMEMCPY(ctrInit, ctr, sizeof(ctr)); /* save off initial counter for GMAC */ |
7473 | | |
7474 | | /* Authentication buffer - must be 4-byte multiple zero padded */ |
7475 | | authPadSz = authInSz % sizeof(word32); |
7476 | | if (authPadSz != 0) { |
7477 | | authPadSz = authInSz + sizeof(word32) - authPadSz; |
7478 | | if (authPadSz <= sizeof(authhdr)) { |
7479 | | authInPadded = (byte*)authhdr; |
7480 | | } |
7481 | | else { |
7482 | | authInPadded = (byte*)XMALLOC(authPadSz, aes->heap, |
7483 | | DYNAMIC_TYPE_TMP_BUFFER); |
7484 | | if (authInPadded == NULL) { |
7485 | | wolfSSL_CryptHwMutexUnLock(); |
7486 | | return MEMORY_E; |
7487 | | } |
7488 | | wasAlloc = 1; |
7489 | | } |
7490 | | XMEMSET(authInPadded, 0, authPadSz); |
7491 | | XMEMCPY(authInPadded, authIn, authInSz); |
7492 | | } else { |
7493 | | authPadSz = authInSz; |
7494 | | authInPadded = (byte*)authIn; |
7495 | | } |
7496 | | |
7497 | | /* for cases where hardware cannot be used for authTag calculate it */ |
7498 | | /* if IV is not 12 calculate GHASH using software */ |
7499 | | if (ivSz != GCM_NONCE_MID_SZ |
7500 | | #ifndef CRYP_HEADERWIDTHUNIT_BYTE |
7501 | | /* or harware that does not support partial block */ |
7502 | | || sz == 0 || partial != 0 |
7503 | | #endif |
7504 | | #if !defined(CRYP_HEADERWIDTHUNIT_BYTE) && !defined(STM32_AESGCM_PARTIAL) |
7505 | | /* or authIn is not a multiple of 4 */ |
7506 | | || authPadSz != authInSz |
7507 | | #endif |
7508 | | ) { |
7509 | | useSwGhash = 1; |
7510 | | } |
7511 | | |
7512 | | /* Hardware requires counter + 1 */ |
7513 | | IncrementGcmCounter((byte*)ctr); |
7514 | | |
7515 | | ret = wolfSSL_CryptHwMutexLock(); |
7516 | | if (ret != 0) { |
7517 | | return ret; |
7518 | | } |
7519 | | #ifdef WOLFSSL_STM32_CUBEMX |
7520 | | hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr; |
7521 | | hcryp.Init.Header = (STM_CRYPT_TYPE*)authInPadded; |
7522 | | |
7523 | | #if defined(STM32_HAL_V2) |
7524 | | hcryp.Init.Algorithm = CRYP_AES_GCM; |
7525 | | #ifdef CRYP_HEADERWIDTHUNIT_BYTE |
7526 | | /* V2 with CRYP_HEADERWIDTHUNIT_BYTE uses byte size for header */ |
7527 | | hcryp.Init.HeaderSize = authInSz; |
7528 | | #else |
7529 | | hcryp.Init.HeaderSize = authPadSz/sizeof(word32); |
7530 | | #endif |
7531 | | #ifdef STM32_AESGCM_PARTIAL |
7532 | | hcryp.Init.HeaderPadSize = authPadSz - authInSz; |
7533 | | #endif |
7534 | | #ifdef CRYP_KEYIVCONFIG_ONCE |
7535 | | /* allows repeated calls to HAL_CRYP_Encrypt */ |
7536 | | hcryp.Init.KeyIVConfigSkip = CRYP_KEYIVCONFIG_ONCE; |
7537 | | #endif |
7538 | | ByteReverseWords(ctr, ctr, AES_BLOCK_SIZE); |
7539 | | hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr; |
7540 | | HAL_CRYP_Init(&hcryp); |
7541 | | |
7542 | | #ifndef CRYP_KEYIVCONFIG_ONCE |
7543 | | /* GCM payload phase - can handle partial blocks */ |
7544 | | status = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in, |
7545 | | (blocks * AES_BLOCK_SIZE) + partial, (uint32_t*)out, STM32_HAL_TIMEOUT); |
7546 | | #else |
7547 | | /* GCM payload phase - blocks */ |
7548 | | if (blocks) { |
7549 | | status = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)in, |
7550 | | (blocks * AES_BLOCK_SIZE), (uint32_t*)out, STM32_HAL_TIMEOUT); |
7551 | | } |
7552 | | /* GCM payload phase - partial remainder */ |
7553 | | if (status == HAL_OK && (partial != 0 || blocks == 0)) { |
7554 | | XMEMSET(partialBlock, 0, sizeof(partialBlock)); |
7555 | | XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial); |
7556 | | status = HAL_CRYP_Encrypt(&hcryp, (uint32_t*)partialBlock, partial, |
7557 | | (uint32_t*)partialBlock, STM32_HAL_TIMEOUT); |
7558 | | XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial); |
7559 | | } |
7560 | | #endif |
7561 | | if (status == HAL_OK && !useSwGhash) { |
7562 | | /* Compute the authTag */ |
7563 | | status = HAL_CRYPEx_AESGCM_GenerateAuthTAG(&hcryp, (uint32_t*)tag, |
7564 | | STM32_HAL_TIMEOUT); |
7565 | | } |
7566 | | #elif defined(STM32_CRYPTO_AES_ONLY) |
7567 | | /* Set the CRYP parameters */ |
7568 | | hcryp.Init.HeaderSize = authPadSz; |
7569 | | if (authPadSz == 0) |
7570 | | hcryp.Init.Header = NULL; /* cannot pass pointer here when authIn == 0 */ |
7571 | | hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_GCM_GMAC; |
7572 | | hcryp.Init.OperatingMode = CRYP_ALGOMODE_ENCRYPT; |
7573 | | hcryp.Init.GCMCMACPhase = CRYP_INIT_PHASE; |
7574 | | HAL_CRYP_Init(&hcryp); |
7575 | | |
7576 | | /* GCM init phase */ |
7577 | | status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT); |
7578 | | if (status == HAL_OK) { |
7579 | | /* GCM header phase */ |
7580 | | hcryp.Init.GCMCMACPhase = CRYP_HEADER_PHASE; |
7581 | | status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT); |
7582 | | } |
7583 | | if (status == HAL_OK) { |
7584 | | /* GCM payload phase - blocks */ |
7585 | | hcryp.Init.GCMCMACPhase = CRYP_PAYLOAD_PHASE; |
7586 | | if (blocks) { |
7587 | | status = HAL_CRYPEx_AES_Auth(&hcryp, (byte*)in, |
7588 | | (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT); |
7589 | | } |
7590 | | } |
7591 | | if (status == HAL_OK && (partial != 0 || (sz > 0 && blocks == 0))) { |
7592 | | /* GCM payload phase - partial remainder */ |
7593 | | XMEMSET(partialBlock, 0, sizeof(partialBlock)); |
7594 | | XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial); |
7595 | | status = HAL_CRYPEx_AES_Auth(&hcryp, (uint8_t*)partialBlock, partial, |
7596 | | (uint8_t*)partialBlock, STM32_HAL_TIMEOUT); |
7597 | | XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial); |
7598 | | } |
7599 | | if (status == HAL_OK && !useSwGhash) { |
7600 | | /* GCM final phase */ |
7601 | | hcryp.Init.GCMCMACPhase = CRYP_FINAL_PHASE; |
7602 | | status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, sz, (uint8_t*)tag, STM32_HAL_TIMEOUT); |
7603 | | } |
7604 | | #else |
7605 | | hcryp.Init.HeaderSize = authPadSz; |
7606 | | HAL_CRYP_Init(&hcryp); |
7607 | | if (blocks) { |
7608 | | /* GCM payload phase - blocks */ |
7609 | | status = HAL_CRYPEx_AESGCM_Encrypt(&hcryp, (byte*)in, |
7610 | | (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT); |
7611 | | } |
7612 | | if (status == HAL_OK && (partial != 0 || blocks == 0)) { |
7613 | | /* GCM payload phase - partial remainder */ |
7614 | | XMEMSET(partialBlock, 0, sizeof(partialBlock)); |
7615 | | XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial); |
7616 | | status = HAL_CRYPEx_AESGCM_Encrypt(&hcryp, (uint8_t*)partialBlock, partial, |
7617 | | (uint8_t*)partialBlock, STM32_HAL_TIMEOUT); |
7618 | | XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial); |
7619 | | } |
7620 | | if (status == HAL_OK && !useSwGhash) { |
7621 | | /* Compute the authTag */ |
7622 | | status = HAL_CRYPEx_AESGCM_Finish(&hcryp, sz, (uint8_t*)tag, STM32_HAL_TIMEOUT); |
7623 | | } |
7624 | | #endif |
7625 | | |
7626 | | if (status != HAL_OK) |
7627 | | ret = AES_GCM_AUTH_E; |
7628 | | HAL_CRYP_DeInit(&hcryp); |
7629 | | |
7630 | | #else /* Standard Peripheral Library */ |
7631 | | ByteReverseWords(keyCopy, (word32*)aes->key, keySize); |
7632 | | status = CRYP_AES_GCM(MODE_ENCRYPT, (uint8_t*)ctr, |
7633 | | (uint8_t*)keyCopy, keySize * 8, |
7634 | | (uint8_t*)in, sz, |
7635 | | (uint8_t*)authInPadded, authInSz, |
7636 | | (uint8_t*)out, (uint8_t*)tag); |
7637 | | if (status != SUCCESS) |
7638 | | ret = AES_GCM_AUTH_E; |
7639 | | #endif /* WOLFSSL_STM32_CUBEMX */ |
7640 | | wolfSSL_CryptHwMutexUnLock(); |
7641 | | |
7642 | | if (ret == 0) { |
7643 | | /* return authTag */ |
7644 | | if (authTag) { |
7645 | | if (useSwGhash) { |
7646 | | GHASH(aes, authIn, authInSz, out, sz, authTag, authTagSz); |
7647 | | ret = wc_AesEncrypt(aes, (byte*)ctrInit, (byte*)tag); |
7648 | | if (ret == 0) { |
7649 | | xorbuf(authTag, tag, authTagSz); |
7650 | | } |
7651 | | } |
7652 | | else { |
7653 | | /* use hardware calculated tag */ |
7654 | | XMEMCPY(authTag, tag, authTagSz); |
7655 | | } |
7656 | | } |
7657 | | } |
7658 | | |
7659 | | /* Free memory */ |
7660 | | if (wasAlloc) { |
7661 | | XFREE(authInPadded, aes->heap, DYNAMIC_TYPE_TMP_BUFFER); |
7662 | | } |
7663 | | |
7664 | | return ret; |
7665 | | } |
7666 | | |
7667 | | #endif /* STM32_CRYPTO_AES_GCM */ |
7668 | | |
7669 | | #ifdef WOLFSSL_AESNI |
7670 | | /* For performance reasons, this code needs to be not inlined. */ |
7671 | | WARN_UNUSED_RESULT int AES_GCM_encrypt_C( |
7672 | | Aes* aes, byte* out, const byte* in, word32 sz, |
7673 | | const byte* iv, word32 ivSz, |
7674 | | byte* authTag, word32 authTagSz, |
7675 | | const byte* authIn, word32 authInSz); |
7676 | | #else |
7677 | | static |
7678 | | #endif |
7679 | | WARN_UNUSED_RESULT int AES_GCM_encrypt_C( |
7680 | | Aes* aes, byte* out, const byte* in, word32 sz, |
7681 | | const byte* iv, word32 ivSz, |
7682 | | byte* authTag, word32 authTagSz, |
7683 | | const byte* authIn, word32 authInSz) |
7684 | 0 | { |
7685 | 0 | int ret = 0; |
7686 | 0 | word32 blocks = sz / AES_BLOCK_SIZE; |
7687 | 0 | word32 partial = sz % AES_BLOCK_SIZE; |
7688 | 0 | const byte* p = in; |
7689 | 0 | byte* c = out; |
7690 | 0 | ALIGN32 byte counter[AES_BLOCK_SIZE]; |
7691 | 0 | ALIGN32 byte initialCounter[AES_BLOCK_SIZE]; |
7692 | 0 | ALIGN32 byte scratch[AES_BLOCK_SIZE]; |
7693 | |
|
7694 | 0 | if (ivSz == GCM_NONCE_MID_SZ) { |
7695 | | /* Counter is IV with bottom 4 bytes set to: 0x00,0x00,0x00,0x01. */ |
7696 | 0 | XMEMCPY(counter, iv, ivSz); |
7697 | 0 | XMEMSET(counter + GCM_NONCE_MID_SZ, 0, |
7698 | 0 | AES_BLOCK_SIZE - GCM_NONCE_MID_SZ - 1); |
7699 | 0 | counter[AES_BLOCK_SIZE - 1] = 1; |
7700 | 0 | } |
7701 | 0 | else { |
7702 | | /* Counter is GHASH of IV. */ |
7703 | | #ifdef OPENSSL_EXTRA |
7704 | | word32 aadTemp = aes->aadLen; |
7705 | | aes->aadLen = 0; |
7706 | | #endif |
7707 | 0 | GHASH(aes, NULL, 0, iv, ivSz, counter, AES_BLOCK_SIZE); |
7708 | | #ifdef OPENSSL_EXTRA |
7709 | | aes->aadLen = aadTemp; |
7710 | | #endif |
7711 | 0 | } |
7712 | 0 | XMEMCPY(initialCounter, counter, AES_BLOCK_SIZE); |
7713 | |
|
7714 | | #ifdef WOLFSSL_PIC32MZ_CRYPT |
7715 | | if (blocks) { |
7716 | | /* use initial IV for HW, but don't use it below */ |
7717 | | XMEMCPY(aes->reg, counter, AES_BLOCK_SIZE); |
7718 | | |
7719 | | ret = wc_Pic32AesCrypt( |
7720 | | aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE, |
7721 | | out, in, (blocks * AES_BLOCK_SIZE), |
7722 | | PIC32_ENCRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_AES_GCM); |
7723 | | if (ret != 0) |
7724 | | return ret; |
7725 | | } |
7726 | | /* process remainder using partial handling */ |
7727 | | #endif |
7728 | |
|
7729 | 0 | #if defined(HAVE_AES_ECB) && !defined(WOLFSSL_PIC32MZ_CRYPT) |
7730 | | /* some hardware acceleration can gain performance from doing AES encryption |
7731 | | * of the whole buffer at once */ |
7732 | 0 | if (c != p && blocks > 0) { /* can not handle inline encryption */ |
7733 | 0 | while (blocks--) { |
7734 | 0 | IncrementGcmCounter(counter); |
7735 | 0 | XMEMCPY(c, counter, AES_BLOCK_SIZE); |
7736 | 0 | c += AES_BLOCK_SIZE; |
7737 | 0 | } |
7738 | | |
7739 | | /* reset number of blocks and then do encryption */ |
7740 | 0 | blocks = sz / AES_BLOCK_SIZE; |
7741 | 0 | wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks); |
7742 | 0 | xorbuf(out, p, AES_BLOCK_SIZE * blocks); |
7743 | 0 | p += AES_BLOCK_SIZE * blocks; |
7744 | 0 | } |
7745 | 0 | else |
7746 | 0 | #endif /* HAVE_AES_ECB && !WOLFSSL_PIC32MZ_CRYPT */ |
7747 | 0 | { |
7748 | 0 | while (blocks--) { |
7749 | 0 | IncrementGcmCounter(counter); |
7750 | 0 | #if !defined(WOLFSSL_PIC32MZ_CRYPT) |
7751 | 0 | ret = wc_AesEncrypt(aes, counter, scratch); |
7752 | 0 | if (ret != 0) |
7753 | 0 | return ret; |
7754 | 0 | xorbufout(c, scratch, p, AES_BLOCK_SIZE); |
7755 | 0 | #endif |
7756 | 0 | p += AES_BLOCK_SIZE; |
7757 | 0 | c += AES_BLOCK_SIZE; |
7758 | 0 | } |
7759 | 0 | } |
7760 | | |
7761 | 0 | if (partial != 0) { |
7762 | 0 | IncrementGcmCounter(counter); |
7763 | 0 | ret = wc_AesEncrypt(aes, counter, scratch); |
7764 | 0 | if (ret != 0) |
7765 | 0 | return ret; |
7766 | 0 | xorbufout(c, scratch, p, partial); |
7767 | 0 | } |
7768 | 0 | if (authTag) { |
7769 | 0 | GHASH(aes, authIn, authInSz, out, sz, authTag, authTagSz); |
7770 | 0 | ret = wc_AesEncrypt(aes, initialCounter, scratch); |
7771 | 0 | if (ret != 0) |
7772 | 0 | return ret; |
7773 | 0 | xorbuf(authTag, scratch, authTagSz); |
7774 | | #ifdef OPENSSL_EXTRA |
7775 | | if (!in && !sz) |
7776 | | /* store AAD size for next call */ |
7777 | | aes->aadLen = authInSz; |
7778 | | #endif |
7779 | 0 | } |
7780 | | |
7781 | 0 | return ret; |
7782 | 0 | } |
7783 | | |
7784 | | /* Software AES - GCM Encrypt */ |
7785 | | int wc_AesGcmEncrypt(Aes* aes, byte* out, const byte* in, word32 sz, |
7786 | | const byte* iv, word32 ivSz, |
7787 | | byte* authTag, word32 authTagSz, |
7788 | | const byte* authIn, word32 authInSz) |
7789 | 0 | { |
7790 | | /* argument checks */ |
7791 | 0 | if (aes == NULL || authTagSz > AES_BLOCK_SIZE || ivSz == 0) { |
7792 | 0 | return BAD_FUNC_ARG; |
7793 | 0 | } |
7794 | | |
7795 | 0 | if (authTagSz < WOLFSSL_MIN_AUTH_TAG_SZ) { |
7796 | 0 | WOLFSSL_MSG("GcmEncrypt authTagSz too small error"); |
7797 | 0 | return BAD_FUNC_ARG; |
7798 | 0 | } |
7799 | | |
7800 | 0 | #ifdef WOLF_CRYPTO_CB |
7801 | 0 | if (aes->devId != INVALID_DEVID) { |
7802 | 0 | int crypto_cb_ret = |
7803 | 0 | wc_CryptoCb_AesGcmEncrypt(aes, out, in, sz, iv, ivSz, authTag, |
7804 | 0 | authTagSz, authIn, authInSz); |
7805 | 0 | if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE) |
7806 | 0 | return crypto_cb_ret; |
7807 | | /* fall-through when unavailable */ |
7808 | 0 | } |
7809 | 0 | #endif |
7810 | | |
7811 | | #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES) |
7812 | | /* if async and byte count above threshold */ |
7813 | | /* only 12-byte IV is supported in HW */ |
7814 | | if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES && |
7815 | | sz >= WC_ASYNC_THRESH_AES_GCM && ivSz == GCM_NONCE_MID_SZ) { |
7816 | | #if defined(HAVE_CAVIUM) |
7817 | | #ifdef HAVE_CAVIUM_V |
7818 | | if (authInSz == 20) { /* Nitrox V GCM is only working with 20 byte AAD */ |
7819 | | return NitroxAesGcmEncrypt(aes, out, in, sz, |
7820 | | (const byte*)aes->devKey, aes->keylen, iv, ivSz, |
7821 | | authTag, authTagSz, authIn, authInSz); |
7822 | | } |
7823 | | #endif |
7824 | | #elif defined(HAVE_INTEL_QA) |
7825 | | return IntelQaSymAesGcmEncrypt(&aes->asyncDev, out, in, sz, |
7826 | | (const byte*)aes->devKey, aes->keylen, iv, ivSz, |
7827 | | authTag, authTagSz, authIn, authInSz); |
7828 | | #else /* WOLFSSL_ASYNC_CRYPT_TEST */ |
7829 | | if (wc_AsyncTestInit(&aes->asyncDev, ASYNC_TEST_AES_GCM_ENCRYPT)) { |
7830 | | WC_ASYNC_TEST* testDev = &aes->asyncDev.test; |
7831 | | testDev->aes.aes = aes; |
7832 | | testDev->aes.out = out; |
7833 | | testDev->aes.in = in; |
7834 | | testDev->aes.sz = sz; |
7835 | | testDev->aes.iv = iv; |
7836 | | testDev->aes.ivSz = ivSz; |
7837 | | testDev->aes.authTag = authTag; |
7838 | | testDev->aes.authTagSz = authTagSz; |
7839 | | testDev->aes.authIn = authIn; |
7840 | | testDev->aes.authInSz = authInSz; |
7841 | | return WC_PENDING_E; |
7842 | | } |
7843 | | #endif |
7844 | | } |
7845 | | #endif /* WOLFSSL_ASYNC_CRYPT */ |
7846 | | |
7847 | | #ifdef WOLFSSL_SILABS_SE_ACCEL |
7848 | | return wc_AesGcmEncrypt_silabs( |
7849 | | aes, out, in, sz, |
7850 | | iv, ivSz, |
7851 | | authTag, authTagSz, |
7852 | | authIn, authInSz); |
7853 | | #endif |
7854 | | |
7855 | | #ifdef STM32_CRYPTO_AES_GCM |
7856 | | return wc_AesGcmEncrypt_STM32( |
7857 | | aes, out, in, sz, iv, ivSz, |
7858 | | authTag, authTagSz, authIn, authInSz); |
7859 | | #endif /* STM32_CRYPTO_AES_GCM */ |
7860 | | |
7861 | | #ifdef WOLFSSL_AESNI |
7862 | | #ifdef HAVE_INTEL_AVX2 |
7863 | | if (IS_INTEL_AVX2(intel_flags)) { |
7864 | | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
7865 | | AES_GCM_encrypt_avx2(in, out, authIn, iv, authTag, sz, authInSz, ivSz, |
7866 | | authTagSz, (const byte*)aes->key, aes->rounds); |
7867 | | RESTORE_VECTOR_REGISTERS(); |
7868 | | return 0; |
7869 | | } |
7870 | | else |
7871 | | #endif |
7872 | | #ifdef HAVE_INTEL_AVX1 |
7873 | | if (IS_INTEL_AVX1(intel_flags)) { |
7874 | | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
7875 | | AES_GCM_encrypt_avx1(in, out, authIn, iv, authTag, sz, authInSz, ivSz, |
7876 | | authTagSz, (const byte*)aes->key, aes->rounds); |
7877 | | RESTORE_VECTOR_REGISTERS(); |
7878 | | return 0; |
7879 | | } |
7880 | | else |
7881 | | #endif |
7882 | | if (haveAESNI) { |
7883 | | AES_GCM_encrypt(in, out, authIn, iv, authTag, sz, authInSz, ivSz, |
7884 | | authTagSz, (const byte*)aes->key, aes->rounds); |
7885 | | return 0; |
7886 | | } |
7887 | | else |
7888 | | #endif |
7889 | 0 | { |
7890 | 0 | return AES_GCM_encrypt_C(aes, out, in, sz, iv, ivSz, authTag, authTagSz, |
7891 | 0 | authIn, authInSz); |
7892 | 0 | } |
7893 | 0 | } |
7894 | | #endif |
7895 | | |
7896 | | |
7897 | | /* AES GCM Decrypt */ |
7898 | | #if defined(HAVE_AES_DECRYPT) || defined(HAVE_AESGCM_DECRYPT) |
7899 | | #ifdef FREESCALE_LTC_AES_GCM |
7900 | | int wc_AesGcmDecrypt(Aes* aes, byte* out, const byte* in, word32 sz, |
7901 | | const byte* iv, word32 ivSz, |
7902 | | const byte* authTag, word32 authTagSz, |
7903 | | const byte* authIn, word32 authInSz) |
7904 | | { |
7905 | | int ret; |
7906 | | word32 keySize; |
7907 | | status_t status; |
7908 | | |
7909 | | /* argument checks */ |
7910 | | /* If the sz is non-zero, both in and out must be set. If sz is 0, |
7911 | | * in and out are don't cares, as this is is the GMAC case. */ |
7912 | | if (aes == NULL || iv == NULL || (sz != 0 && (in == NULL || out == NULL)) || |
7913 | | authTag == NULL || authTagSz > AES_BLOCK_SIZE || authTagSz == 0 || |
7914 | | ivSz == 0) { |
7915 | | |
7916 | | return BAD_FUNC_ARG; |
7917 | | } |
7918 | | |
7919 | | ret = wc_AesGetKeySize(aes, &keySize); |
7920 | | if (ret != 0) { |
7921 | | return ret; |
7922 | | } |
7923 | | |
7924 | | status = wolfSSL_CryptHwMutexLock(); |
7925 | | if (status != 0) |
7926 | | return status; |
7927 | | |
7928 | | status = LTC_AES_DecryptTagGcm(LTC_BASE, in, out, sz, iv, ivSz, |
7929 | | authIn, authInSz, (byte*)aes->key, keySize, authTag, authTagSz); |
7930 | | wolfSSL_CryptHwMutexUnLock(); |
7931 | | |
7932 | | return (status == kStatus_Success) ? 0 : AES_GCM_AUTH_E; |
7933 | | } |
7934 | | |
7935 | | #else |
7936 | | |
7937 | | #ifdef STM32_CRYPTO_AES_GCM |
7938 | | /* this function supports inline decrypt */ |
7939 | | static WARN_UNUSED_RESULT int wc_AesGcmDecrypt_STM32( |
7940 | | Aes* aes, byte* out, |
7941 | | const byte* in, word32 sz, |
7942 | | const byte* iv, word32 ivSz, |
7943 | | const byte* authTag, word32 authTagSz, |
7944 | | const byte* authIn, word32 authInSz) |
7945 | | { |
7946 | | int ret; |
7947 | | #ifdef WOLFSSL_STM32_CUBEMX |
7948 | | int status = HAL_OK; |
7949 | | CRYP_HandleTypeDef hcryp; |
7950 | | word32 blocks = sz / AES_BLOCK_SIZE; |
7951 | | #else |
7952 | | int status = SUCCESS; |
7953 | | word32 keyCopy[AES_256_KEY_SIZE/sizeof(word32)]; |
7954 | | #endif |
7955 | | word32 keySize; |
7956 | | word32 partial = sz % AES_BLOCK_SIZE; |
7957 | | word32 tag[AES_BLOCK_SIZE/sizeof(word32)]; |
7958 | | word32 tagExpected[AES_BLOCK_SIZE/sizeof(word32)]; |
7959 | | word32 partialBlock[AES_BLOCK_SIZE/sizeof(word32)]; |
7960 | | word32 ctr[AES_BLOCK_SIZE/sizeof(word32)]; |
7961 | | word32 authhdr[AES_BLOCK_SIZE/sizeof(word32)]; |
7962 | | byte* authInPadded = NULL; |
7963 | | int authPadSz, wasAlloc = 0, tagComputed = 0; |
7964 | | |
7965 | | ret = wc_AesGetKeySize(aes, &keySize); |
7966 | | if (ret != 0) |
7967 | | return ret; |
7968 | | |
7969 | | #ifdef WOLFSSL_STM32_CUBEMX |
7970 | | ret = wc_Stm32_Aes_Init(aes, &hcryp); |
7971 | | if (ret != 0) |
7972 | | return ret; |
7973 | | #endif |
7974 | | |
7975 | | XMEMSET(ctr, 0, AES_BLOCK_SIZE); |
7976 | | if (ivSz == GCM_NONCE_MID_SZ) { |
7977 | | byte* pCtr = (byte*)ctr; |
7978 | | XMEMCPY(ctr, iv, ivSz); |
7979 | | pCtr[AES_BLOCK_SIZE - 1] = 1; |
7980 | | } |
7981 | | else { |
7982 | | GHASH(aes, NULL, 0, iv, ivSz, (byte*)ctr, AES_BLOCK_SIZE); |
7983 | | } |
7984 | | |
7985 | | /* Make copy of expected authTag, which could get corrupted in some |
7986 | | * Cube HAL versions without proper partial block support. |
7987 | | * For TLS blocks the authTag is after the output buffer, so save it */ |
7988 | | XMEMCPY(tagExpected, authTag, authTagSz); |
7989 | | |
7990 | | /* Authentication buffer - must be 4-byte multiple zero padded */ |
7991 | | authPadSz = authInSz % sizeof(word32); |
7992 | | if (authPadSz != 0) { |
7993 | | authPadSz = authInSz + sizeof(word32) - authPadSz; |
7994 | | } |
7995 | | else { |
7996 | | authPadSz = authInSz; |
7997 | | } |
7998 | | |
7999 | | /* for cases where hardware cannot be used for authTag calculate it */ |
8000 | | /* if IV is not 12 calculate GHASH using software */ |
8001 | | if (ivSz != GCM_NONCE_MID_SZ |
8002 | | #ifndef CRYP_HEADERWIDTHUNIT_BYTE |
8003 | | /* or harware that does not support partial block */ |
8004 | | || sz == 0 || partial != 0 |
8005 | | #endif |
8006 | | #if !defined(CRYP_HEADERWIDTHUNIT_BYTE) && !defined(STM32_AESGCM_PARTIAL) |
8007 | | /* or authIn is not a multiple of 4 */ |
8008 | | || authPadSz != authInSz |
8009 | | #endif |
8010 | | ) { |
8011 | | GHASH(aes, authIn, authInSz, in, sz, (byte*)tag, sizeof(tag)); |
8012 | | ret = wc_AesEncrypt(aes, (byte*)ctr, (byte*)partialBlock); |
8013 | | if (ret != 0) |
8014 | | return ret; |
8015 | | xorbuf(tag, partialBlock, sizeof(tag)); |
8016 | | tagComputed = 1; |
8017 | | } |
8018 | | |
8019 | | /* if using hardware for authentication tag make sure its aligned and zero padded */ |
8020 | | if (authPadSz != authInSz && !tagComputed) { |
8021 | | if (authPadSz <= sizeof(authhdr)) { |
8022 | | authInPadded = (byte*)authhdr; |
8023 | | } |
8024 | | else { |
8025 | | authInPadded = (byte*)XMALLOC(authPadSz, aes->heap, |
8026 | | DYNAMIC_TYPE_TMP_BUFFER); |
8027 | | if (authInPadded == NULL) { |
8028 | | wolfSSL_CryptHwMutexUnLock(); |
8029 | | return MEMORY_E; |
8030 | | } |
8031 | | wasAlloc = 1; |
8032 | | } |
8033 | | XMEMSET(authInPadded, 0, authPadSz); |
8034 | | XMEMCPY(authInPadded, authIn, authInSz); |
8035 | | } else { |
8036 | | authInPadded = (byte*)authIn; |
8037 | | } |
8038 | | |
8039 | | /* Hardware requires counter + 1 */ |
8040 | | IncrementGcmCounter((byte*)ctr); |
8041 | | |
8042 | | ret = wolfSSL_CryptHwMutexLock(); |
8043 | | if (ret != 0) { |
8044 | | return ret; |
8045 | | } |
8046 | | #ifdef WOLFSSL_STM32_CUBEMX |
8047 | | hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr; |
8048 | | hcryp.Init.Header = (STM_CRYPT_TYPE*)authInPadded; |
8049 | | |
8050 | | #if defined(STM32_HAL_V2) |
8051 | | hcryp.Init.Algorithm = CRYP_AES_GCM; |
8052 | | #ifdef CRYP_HEADERWIDTHUNIT_BYTE |
8053 | | /* V2 with CRYP_HEADERWIDTHUNIT_BYTE uses byte size for header */ |
8054 | | hcryp.Init.HeaderSize = authInSz; |
8055 | | #else |
8056 | | hcryp.Init.HeaderSize = authPadSz/sizeof(word32); |
8057 | | #endif |
8058 | | #ifdef STM32_AESGCM_PARTIAL |
8059 | | hcryp.Init.HeaderPadSize = authPadSz - authInSz; |
8060 | | #endif |
8061 | | #ifdef CRYP_KEYIVCONFIG_ONCE |
8062 | | /* allows repeated calls to HAL_CRYP_Decrypt */ |
8063 | | hcryp.Init.KeyIVConfigSkip = CRYP_KEYIVCONFIG_ONCE; |
8064 | | #endif |
8065 | | ByteReverseWords(ctr, ctr, AES_BLOCK_SIZE); |
8066 | | hcryp.Init.pInitVect = (STM_CRYPT_TYPE*)ctr; |
8067 | | HAL_CRYP_Init(&hcryp); |
8068 | | |
8069 | | #ifndef CRYP_KEYIVCONFIG_ONCE |
8070 | | status = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)in, |
8071 | | (blocks * AES_BLOCK_SIZE) + partial, (uint32_t*)out, STM32_HAL_TIMEOUT); |
8072 | | #else |
8073 | | /* GCM payload phase - blocks */ |
8074 | | if (blocks) { |
8075 | | status = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)in, |
8076 | | (blocks * AES_BLOCK_SIZE), (uint32_t*)out, STM32_HAL_TIMEOUT); |
8077 | | } |
8078 | | /* GCM payload phase - partial remainder */ |
8079 | | if (status == HAL_OK && (partial != 0 || blocks == 0)) { |
8080 | | XMEMSET(partialBlock, 0, sizeof(partialBlock)); |
8081 | | XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial); |
8082 | | status = HAL_CRYP_Decrypt(&hcryp, (uint32_t*)partialBlock, partial, |
8083 | | ( uint32_t*)partialBlock, STM32_HAL_TIMEOUT); |
8084 | | XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial); |
8085 | | } |
8086 | | #endif |
8087 | | if (status == HAL_OK && !tagComputed) { |
8088 | | /* Compute the authTag */ |
8089 | | status = HAL_CRYPEx_AESGCM_GenerateAuthTAG(&hcryp, (uint32_t*)tag, |
8090 | | STM32_HAL_TIMEOUT); |
8091 | | } |
8092 | | #elif defined(STM32_CRYPTO_AES_ONLY) |
8093 | | /* Set the CRYP parameters */ |
8094 | | hcryp.Init.HeaderSize = authPadSz; |
8095 | | if (authPadSz == 0) |
8096 | | hcryp.Init.Header = NULL; /* cannot pass pointer when authIn == 0 */ |
8097 | | hcryp.Init.ChainingMode = CRYP_CHAINMODE_AES_GCM_GMAC; |
8098 | | hcryp.Init.OperatingMode = CRYP_ALGOMODE_DECRYPT; |
8099 | | hcryp.Init.GCMCMACPhase = CRYP_INIT_PHASE; |
8100 | | HAL_CRYP_Init(&hcryp); |
8101 | | |
8102 | | /* GCM init phase */ |
8103 | | status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT); |
8104 | | if (status == HAL_OK) { |
8105 | | /* GCM header phase */ |
8106 | | hcryp.Init.GCMCMACPhase = CRYP_HEADER_PHASE; |
8107 | | status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, 0, NULL, STM32_HAL_TIMEOUT); |
8108 | | } |
8109 | | if (status == HAL_OK) { |
8110 | | /* GCM payload phase - blocks */ |
8111 | | hcryp.Init.GCMCMACPhase = CRYP_PAYLOAD_PHASE; |
8112 | | if (blocks) { |
8113 | | status = HAL_CRYPEx_AES_Auth(&hcryp, (byte*)in, |
8114 | | (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT); |
8115 | | } |
8116 | | } |
8117 | | if (status == HAL_OK && (partial != 0 || (sz > 0 && blocks == 0))) { |
8118 | | /* GCM payload phase - partial remainder */ |
8119 | | XMEMSET(partialBlock, 0, sizeof(partialBlock)); |
8120 | | XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial); |
8121 | | status = HAL_CRYPEx_AES_Auth(&hcryp, (byte*)partialBlock, partial, |
8122 | | (byte*)partialBlock, STM32_HAL_TIMEOUT); |
8123 | | XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial); |
8124 | | } |
8125 | | if (status == HAL_OK && tagComputed == 0) { |
8126 | | /* GCM final phase */ |
8127 | | hcryp.Init.GCMCMACPhase = CRYP_FINAL_PHASE; |
8128 | | status = HAL_CRYPEx_AES_Auth(&hcryp, NULL, sz, (byte*)tag, STM32_HAL_TIMEOUT); |
8129 | | } |
8130 | | #else |
8131 | | hcryp.Init.HeaderSize = authPadSz; |
8132 | | HAL_CRYP_Init(&hcryp); |
8133 | | if (blocks) { |
8134 | | /* GCM payload phase - blocks */ |
8135 | | status = HAL_CRYPEx_AESGCM_Decrypt(&hcryp, (byte*)in, |
8136 | | (blocks * AES_BLOCK_SIZE), out, STM32_HAL_TIMEOUT); |
8137 | | } |
8138 | | if (status == HAL_OK && (partial != 0 || blocks == 0)) { |
8139 | | /* GCM payload phase - partial remainder */ |
8140 | | XMEMSET(partialBlock, 0, sizeof(partialBlock)); |
8141 | | XMEMCPY(partialBlock, in + (blocks * AES_BLOCK_SIZE), partial); |
8142 | | status = HAL_CRYPEx_AESGCM_Decrypt(&hcryp, (byte*)partialBlock, partial, |
8143 | | (byte*)partialBlock, STM32_HAL_TIMEOUT); |
8144 | | XMEMCPY(out + (blocks * AES_BLOCK_SIZE), partialBlock, partial); |
8145 | | } |
8146 | | if (status == HAL_OK && tagComputed == 0) { |
8147 | | /* Compute the authTag */ |
8148 | | status = HAL_CRYPEx_AESGCM_Finish(&hcryp, sz, (byte*)tag, STM32_HAL_TIMEOUT); |
8149 | | } |
8150 | | #endif |
8151 | | |
8152 | | if (status != HAL_OK) |
8153 | | ret = AES_GCM_AUTH_E; |
8154 | | |
8155 | | HAL_CRYP_DeInit(&hcryp); |
8156 | | |
8157 | | #else /* Standard Peripheral Library */ |
8158 | | ByteReverseWords(keyCopy, (word32*)aes->key, aes->keylen); |
8159 | | |
8160 | | /* Input size and auth size need to be the actual sizes, even though |
8161 | | * they are not block aligned, because this length (in bits) is used |
8162 | | * in the final GHASH. */ |
8163 | | XMEMSET(partialBlock, 0, sizeof(partialBlock)); /* use this to get tag */ |
8164 | | status = CRYP_AES_GCM(MODE_DECRYPT, (uint8_t*)ctr, |
8165 | | (uint8_t*)keyCopy, keySize * 8, |
8166 | | (uint8_t*)in, sz, |
8167 | | (uint8_t*)authInPadded, authInSz, |
8168 | | (uint8_t*)out, (uint8_t*)partialBlock); |
8169 | | if (status != SUCCESS) |
8170 | | ret = AES_GCM_AUTH_E; |
8171 | | if (tagComputed == 0) |
8172 | | XMEMCPY(tag, partialBlock, authTagSz); |
8173 | | #endif /* WOLFSSL_STM32_CUBEMX */ |
8174 | | wolfSSL_CryptHwMutexUnLock(); |
8175 | | |
8176 | | /* Check authentication tag */ |
8177 | | if (ConstantCompare((const byte*)tagExpected, (byte*)tag, authTagSz) != 0) { |
8178 | | ret = AES_GCM_AUTH_E; |
8179 | | } |
8180 | | |
8181 | | /* Free memory */ |
8182 | | if (wasAlloc) { |
8183 | | XFREE(authInPadded, aes->heap, DYNAMIC_TYPE_TMP_BUFFER); |
8184 | | } |
8185 | | |
8186 | | return ret; |
8187 | | } |
8188 | | |
8189 | | #endif /* STM32_CRYPTO_AES_GCM */ |
8190 | | |
8191 | | #ifdef WOLFSSL_AESNI |
8192 | | /* For performance reasons, this code needs to be not inlined. */ |
8193 | | int WARN_UNUSED_RESULT AES_GCM_decrypt_C( |
8194 | | Aes* aes, byte* out, const byte* in, word32 sz, |
8195 | | const byte* iv, word32 ivSz, |
8196 | | const byte* authTag, word32 authTagSz, |
8197 | | const byte* authIn, word32 authInSz); |
8198 | | #else |
8199 | | static |
8200 | | #endif |
8201 | | int WARN_UNUSED_RESULT AES_GCM_decrypt_C( |
8202 | | Aes* aes, byte* out, const byte* in, word32 sz, |
8203 | | const byte* iv, word32 ivSz, |
8204 | | const byte* authTag, word32 authTagSz, |
8205 | | const byte* authIn, word32 authInSz) |
8206 | 0 | { |
8207 | 0 | int ret = 0; |
8208 | 0 | word32 blocks = sz / AES_BLOCK_SIZE; |
8209 | 0 | word32 partial = sz % AES_BLOCK_SIZE; |
8210 | 0 | const byte* c = in; |
8211 | 0 | byte* p = out; |
8212 | 0 | ALIGN32 byte counter[AES_BLOCK_SIZE]; |
8213 | 0 | ALIGN32 byte scratch[AES_BLOCK_SIZE]; |
8214 | 0 | ALIGN32 byte Tprime[AES_BLOCK_SIZE]; |
8215 | 0 | ALIGN32 byte EKY0[AES_BLOCK_SIZE]; |
8216 | 0 | sword32 res; |
8217 | |
|
8218 | 0 | if (ivSz == GCM_NONCE_MID_SZ) { |
8219 | | /* Counter is IV with bottom 4 bytes set to: 0x00,0x00,0x00,0x01. */ |
8220 | 0 | XMEMCPY(counter, iv, ivSz); |
8221 | 0 | XMEMSET(counter + GCM_NONCE_MID_SZ, 0, |
8222 | 0 | AES_BLOCK_SIZE - GCM_NONCE_MID_SZ - 1); |
8223 | 0 | counter[AES_BLOCK_SIZE - 1] = 1; |
8224 | 0 | } |
8225 | 0 | else { |
8226 | | /* Counter is GHASH of IV. */ |
8227 | | #ifdef OPENSSL_EXTRA |
8228 | | word32 aadTemp = aes->aadLen; |
8229 | | aes->aadLen = 0; |
8230 | | #endif |
8231 | 0 | GHASH(aes, NULL, 0, iv, ivSz, counter, AES_BLOCK_SIZE); |
8232 | | #ifdef OPENSSL_EXTRA |
8233 | | aes->aadLen = aadTemp; |
8234 | | #endif |
8235 | 0 | } |
8236 | | |
8237 | | /* Calc the authTag again using received auth data and the cipher text */ |
8238 | 0 | GHASH(aes, authIn, authInSz, in, sz, Tprime, sizeof(Tprime)); |
8239 | 0 | ret = wc_AesEncrypt(aes, counter, EKY0); |
8240 | 0 | if (ret != 0) |
8241 | 0 | return ret; |
8242 | 0 | xorbuf(Tprime, EKY0, sizeof(Tprime)); |
8243 | |
|
8244 | | #ifdef OPENSSL_EXTRA |
8245 | | if (!out) { |
8246 | | /* authenticated, non-confidential data */ |
8247 | | /* store AAD size for next call */ |
8248 | | aes->aadLen = authInSz; |
8249 | | } |
8250 | | #endif |
8251 | |
|
8252 | | #if defined(WOLFSSL_PIC32MZ_CRYPT) |
8253 | | if (blocks) { |
8254 | | /* use initial IV for HW, but don't use it below */ |
8255 | | XMEMCPY(aes->reg, counter, AES_BLOCK_SIZE); |
8256 | | |
8257 | | ret = wc_Pic32AesCrypt( |
8258 | | aes->key, aes->keylen, aes->reg, AES_BLOCK_SIZE, |
8259 | | out, in, (blocks * AES_BLOCK_SIZE), |
8260 | | PIC32_DECRYPTION, PIC32_ALGO_AES, PIC32_CRYPTOALGO_AES_GCM); |
8261 | | if (ret != 0) |
8262 | | return ret; |
8263 | | } |
8264 | | /* process remainder using partial handling */ |
8265 | | #endif |
8266 | |
|
8267 | 0 | #if defined(HAVE_AES_ECB) && !defined(WOLFSSL_PIC32MZ_CRYPT) |
8268 | | /* some hardware acceleration can gain performance from doing AES encryption |
8269 | | * of the whole buffer at once */ |
8270 | 0 | if (c != p && blocks > 0) { /* can not handle inline decryption */ |
8271 | 0 | while (blocks--) { |
8272 | 0 | IncrementGcmCounter(counter); |
8273 | 0 | XMEMCPY(p, counter, AES_BLOCK_SIZE); |
8274 | 0 | p += AES_BLOCK_SIZE; |
8275 | 0 | } |
8276 | | |
8277 | | /* reset number of blocks and then do encryption */ |
8278 | 0 | blocks = sz / AES_BLOCK_SIZE; |
8279 | |
|
8280 | 0 | wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks); |
8281 | 0 | xorbuf(out, c, AES_BLOCK_SIZE * blocks); |
8282 | 0 | c += AES_BLOCK_SIZE * blocks; |
8283 | 0 | } |
8284 | 0 | else |
8285 | 0 | #endif /* HAVE_AES_ECB && !PIC32MZ */ |
8286 | 0 | { |
8287 | 0 | while (blocks--) { |
8288 | 0 | IncrementGcmCounter(counter); |
8289 | 0 | #if !defined(WOLFSSL_PIC32MZ_CRYPT) |
8290 | 0 | ret = wc_AesEncrypt(aes, counter, scratch); |
8291 | 0 | if (ret != 0) |
8292 | 0 | return ret; |
8293 | 0 | xorbufout(p, scratch, c, AES_BLOCK_SIZE); |
8294 | 0 | #endif |
8295 | 0 | p += AES_BLOCK_SIZE; |
8296 | 0 | c += AES_BLOCK_SIZE; |
8297 | 0 | } |
8298 | 0 | } |
8299 | | |
8300 | 0 | if (partial != 0) { |
8301 | 0 | IncrementGcmCounter(counter); |
8302 | 0 | ret = wc_AesEncrypt(aes, counter, scratch); |
8303 | 0 | if (ret != 0) |
8304 | 0 | return ret; |
8305 | 0 | xorbuf(scratch, c, partial); |
8306 | 0 | XMEMCPY(p, scratch, partial); |
8307 | 0 | } |
8308 | | |
8309 | | /* ConstantCompare returns the cumulative bitwise or of the bitwise xor of |
8310 | | * the pairwise bytes in the strings. |
8311 | | */ |
8312 | 0 | res = ConstantCompare(authTag, Tprime, authTagSz); |
8313 | | /* convert positive retval from ConstantCompare() to all-1s word, in |
8314 | | * constant time. |
8315 | | */ |
8316 | 0 | res = 0 - (sword32)(((word32)(0 - res)) >> 31U); |
8317 | | /* now use res as a mask for constant time return of ret, unless tag |
8318 | | * mismatch, whereupon AES_GCM_AUTH_E is returned. |
8319 | | */ |
8320 | 0 | ret = (ret & ~res) | (res & AES_GCM_AUTH_E); |
8321 | |
|
8322 | 0 | return ret; |
8323 | 0 | } |
8324 | | |
8325 | | /* Software AES - GCM Decrypt */ |
8326 | | int wc_AesGcmDecrypt(Aes* aes, byte* out, const byte* in, word32 sz, |
8327 | | const byte* iv, word32 ivSz, |
8328 | | const byte* authTag, word32 authTagSz, |
8329 | | const byte* authIn, word32 authInSz) |
8330 | 0 | { |
8331 | | #ifdef WOLFSSL_AESNI |
8332 | | int res = AES_GCM_AUTH_E; |
8333 | | #endif |
8334 | | |
8335 | | /* argument checks */ |
8336 | | /* If the sz is non-zero, both in and out must be set. If sz is 0, |
8337 | | * in and out are don't cares, as this is is the GMAC case. */ |
8338 | 0 | if (aes == NULL || iv == NULL || (sz != 0 && (in == NULL || out == NULL)) || |
8339 | 0 | authTag == NULL || authTagSz > AES_BLOCK_SIZE || authTagSz == 0 || |
8340 | 0 | ivSz == 0) { |
8341 | |
|
8342 | 0 | return BAD_FUNC_ARG; |
8343 | 0 | } |
8344 | | |
8345 | 0 | #ifdef WOLF_CRYPTO_CB |
8346 | 0 | if (aes->devId != INVALID_DEVID) { |
8347 | 0 | int crypto_cb_ret = |
8348 | 0 | wc_CryptoCb_AesGcmDecrypt(aes, out, in, sz, iv, ivSz, |
8349 | 0 | authTag, authTagSz, authIn, authInSz); |
8350 | 0 | if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE) |
8351 | 0 | return crypto_cb_ret; |
8352 | | /* fall-through when unavailable */ |
8353 | 0 | } |
8354 | 0 | #endif |
8355 | | |
8356 | | #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES) |
8357 | | /* if async and byte count above threshold */ |
8358 | | /* only 12-byte IV is supported in HW */ |
8359 | | if (aes->asyncDev.marker == WOLFSSL_ASYNC_MARKER_AES && |
8360 | | sz >= WC_ASYNC_THRESH_AES_GCM && ivSz == GCM_NONCE_MID_SZ) { |
8361 | | #if defined(HAVE_CAVIUM) |
8362 | | #ifdef HAVE_CAVIUM_V |
8363 | | if (authInSz == 20) { /* Nitrox V GCM is only working with 20 byte AAD */ |
8364 | | return NitroxAesGcmDecrypt(aes, out, in, sz, |
8365 | | (const byte*)aes->devKey, aes->keylen, iv, ivSz, |
8366 | | authTag, authTagSz, authIn, authInSz); |
8367 | | } |
8368 | | #endif |
8369 | | #elif defined(HAVE_INTEL_QA) |
8370 | | return IntelQaSymAesGcmDecrypt(&aes->asyncDev, out, in, sz, |
8371 | | (const byte*)aes->devKey, aes->keylen, iv, ivSz, |
8372 | | authTag, authTagSz, authIn, authInSz); |
8373 | | #else /* WOLFSSL_ASYNC_CRYPT_TEST */ |
8374 | | if (wc_AsyncTestInit(&aes->asyncDev, ASYNC_TEST_AES_GCM_DECRYPT)) { |
8375 | | WC_ASYNC_TEST* testDev = &aes->asyncDev.test; |
8376 | | testDev->aes.aes = aes; |
8377 | | testDev->aes.out = out; |
8378 | | testDev->aes.in = in; |
8379 | | testDev->aes.sz = sz; |
8380 | | testDev->aes.iv = iv; |
8381 | | testDev->aes.ivSz = ivSz; |
8382 | | testDev->aes.authTag = (byte*)authTag; |
8383 | | testDev->aes.authTagSz = authTagSz; |
8384 | | testDev->aes.authIn = authIn; |
8385 | | testDev->aes.authInSz = authInSz; |
8386 | | return WC_PENDING_E; |
8387 | | } |
8388 | | #endif |
8389 | | } |
8390 | | #endif /* WOLFSSL_ASYNC_CRYPT */ |
8391 | | |
8392 | | #ifdef WOLFSSL_SILABS_SE_ACCEL |
8393 | | return wc_AesGcmDecrypt_silabs( |
8394 | | aes, out, in, sz, iv, ivSz, |
8395 | | authTag, authTagSz, authIn, authInSz); |
8396 | | |
8397 | | #endif |
8398 | | |
8399 | | #ifdef STM32_CRYPTO_AES_GCM |
8400 | | /* The STM standard peripheral library API's doesn't support partial blocks */ |
8401 | | return wc_AesGcmDecrypt_STM32( |
8402 | | aes, out, in, sz, iv, ivSz, |
8403 | | authTag, authTagSz, authIn, authInSz); |
8404 | | #endif /* STM32_CRYPTO_AES_GCM */ |
8405 | | |
8406 | | #ifdef WOLFSSL_AESNI |
8407 | | #ifdef HAVE_INTEL_AVX2 |
8408 | | if (IS_INTEL_AVX2(intel_flags)) { |
8409 | | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
8410 | | AES_GCM_decrypt_avx2(in, out, authIn, iv, authTag, sz, authInSz, ivSz, |
8411 | | authTagSz, (byte*)aes->key, aes->rounds, &res); |
8412 | | RESTORE_VECTOR_REGISTERS(); |
8413 | | if (res == 0) |
8414 | | return AES_GCM_AUTH_E; |
8415 | | return 0; |
8416 | | } |
8417 | | else |
8418 | | #endif |
8419 | | #ifdef HAVE_INTEL_AVX1 |
8420 | | if (IS_INTEL_AVX1(intel_flags)) { |
8421 | | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
8422 | | AES_GCM_decrypt_avx1(in, out, authIn, iv, authTag, sz, authInSz, ivSz, |
8423 | | authTagSz, (byte*)aes->key, aes->rounds, &res); |
8424 | | RESTORE_VECTOR_REGISTERS(); |
8425 | | if (res == 0) |
8426 | | return AES_GCM_AUTH_E; |
8427 | | return 0; |
8428 | | } |
8429 | | else |
8430 | | #endif |
8431 | | if (haveAESNI) { |
8432 | | AES_GCM_decrypt(in, out, authIn, iv, authTag, sz, authInSz, ivSz, |
8433 | | authTagSz, (byte*)aes->key, aes->rounds, &res); |
8434 | | if (res == 0) |
8435 | | return AES_GCM_AUTH_E; |
8436 | | return 0; |
8437 | | } |
8438 | | else |
8439 | | #endif |
8440 | 0 | { |
8441 | 0 | return AES_GCM_decrypt_C(aes, out, in, sz, iv, ivSz, authTag, authTagSz, |
8442 | 0 | authIn, authInSz); |
8443 | 0 | } |
8444 | 0 | } |
8445 | | #endif |
8446 | | #endif /* HAVE_AES_DECRYPT || HAVE_AESGCM_DECRYPT */ |
8447 | | |
8448 | | #ifdef WOLFSSL_AESGCM_STREAM |
8449 | | /* Initialize the AES GCM cipher with an IV. C implementation. |
8450 | | * |
8451 | | * @param [in, out] aes AES object. |
8452 | | * @param [in] iv IV/nonce buffer. |
8453 | | * @param [in] ivSz Length of IV/nonce data. |
8454 | | */ |
8455 | | static WARN_UNUSED_RESULT int AesGcmInit_C(Aes* aes, const byte* iv, word32 ivSz) |
8456 | 0 | { |
8457 | 0 | ALIGN32 byte counter[AES_BLOCK_SIZE]; |
8458 | 0 | int ret; |
8459 | |
|
8460 | 0 | if (ivSz == GCM_NONCE_MID_SZ) { |
8461 | | /* Counter is IV with bottom 4 bytes set to: 0x00,0x00,0x00,0x01. */ |
8462 | 0 | XMEMCPY(counter, iv, ivSz); |
8463 | 0 | XMEMSET(counter + GCM_NONCE_MID_SZ, 0, |
8464 | 0 | AES_BLOCK_SIZE - GCM_NONCE_MID_SZ - 1); |
8465 | 0 | counter[AES_BLOCK_SIZE - 1] = 1; |
8466 | 0 | } |
8467 | 0 | else { |
8468 | | /* Counter is GHASH of IV. */ |
8469 | | #ifdef OPENSSL_EXTRA |
8470 | | word32 aadTemp = aes->aadLen; |
8471 | | aes->aadLen = 0; |
8472 | | #endif |
8473 | 0 | GHASH(aes, NULL, 0, iv, ivSz, counter, AES_BLOCK_SIZE); |
8474 | | #ifdef OPENSSL_EXTRA |
8475 | | aes->aadLen = aadTemp; |
8476 | | #endif |
8477 | 0 | } |
8478 | | |
8479 | | /* Copy in the counter for use with cipher. */ |
8480 | 0 | XMEMCPY(AES_COUNTER(aes), counter, AES_BLOCK_SIZE); |
8481 | | /* Encrypt initial counter into a buffer for GCM. */ |
8482 | 0 | ret = wc_AesEncrypt(aes, counter, AES_INITCTR(aes)); |
8483 | 0 | if (ret != 0) |
8484 | 0 | return ret; |
8485 | | /* Reset state fields. */ |
8486 | 0 | aes->over = 0; |
8487 | 0 | aes->aSz = 0; |
8488 | 0 | aes->cSz = 0; |
8489 | | /* Initialization for GHASH. */ |
8490 | 0 | GHASH_INIT(aes); |
8491 | |
|
8492 | 0 | return 0; |
8493 | 0 | } |
8494 | | |
8495 | | /* Update the AES GCM cipher with data. C implementation. |
8496 | | * |
8497 | | * Only enciphers data. |
8498 | | * |
8499 | | * @param [in, out] aes AES object. |
8500 | | * @param [in] out Cipher text or plaintext buffer. |
8501 | | * @param [in] in Plaintext or cipher text buffer. |
8502 | | * @param [in] sz Length of data. |
8503 | | */ |
8504 | | static WARN_UNUSED_RESULT int AesGcmCryptUpdate_C( |
8505 | | Aes* aes, byte* out, const byte* in, word32 sz) |
8506 | 0 | { |
8507 | 0 | word32 blocks; |
8508 | 0 | word32 partial; |
8509 | 0 | int ret; |
8510 | | |
8511 | | /* Check if previous encrypted block was not used up. */ |
8512 | 0 | if (aes->over > 0) { |
8513 | 0 | byte pSz = AES_BLOCK_SIZE - aes->over; |
8514 | 0 | if (pSz > sz) pSz = sz; |
8515 | | |
8516 | | /* Use some/all of last encrypted block. */ |
8517 | 0 | xorbufout(out, AES_LASTBLOCK(aes) + aes->over, in, pSz); |
8518 | 0 | aes->over = (aes->over + pSz) & (AES_BLOCK_SIZE - 1); |
8519 | | |
8520 | | /* Some data used. */ |
8521 | 0 | sz -= pSz; |
8522 | 0 | in += pSz; |
8523 | 0 | out += pSz; |
8524 | 0 | } |
8525 | | |
8526 | | /* Calculate the number of blocks needing to be encrypted and any leftover. |
8527 | | */ |
8528 | 0 | blocks = sz / AES_BLOCK_SIZE; |
8529 | 0 | partial = sz & (AES_BLOCK_SIZE - 1); |
8530 | |
|
8531 | 0 | #if defined(HAVE_AES_ECB) |
8532 | | /* Some hardware acceleration can gain performance from doing AES encryption |
8533 | | * of the whole buffer at once. |
8534 | | * Overwrites the cipher text before using plaintext - no inline encryption. |
8535 | | */ |
8536 | 0 | if ((out != in) && blocks > 0) { |
8537 | 0 | word32 b; |
8538 | | /* Place incrementing counter blocks into cipher text. */ |
8539 | 0 | for (b = 0; b < blocks; b++) { |
8540 | 0 | IncrementGcmCounter(AES_COUNTER(aes)); |
8541 | 0 | XMEMCPY(out + b * AES_BLOCK_SIZE, AES_COUNTER(aes), AES_BLOCK_SIZE); |
8542 | 0 | } |
8543 | | |
8544 | | /* Encrypt counter blocks. */ |
8545 | 0 | wc_AesEcbEncrypt(aes, out, out, AES_BLOCK_SIZE * blocks); |
8546 | | /* XOR in plaintext. */ |
8547 | 0 | xorbuf(out, in, AES_BLOCK_SIZE * blocks); |
8548 | | /* Skip over processed data. */ |
8549 | 0 | in += AES_BLOCK_SIZE * blocks; |
8550 | 0 | out += AES_BLOCK_SIZE * blocks; |
8551 | 0 | } |
8552 | 0 | else |
8553 | 0 | #endif /* HAVE_AES_ECB */ |
8554 | 0 | { |
8555 | | /* Encrypt block by block. */ |
8556 | 0 | while (blocks--) { |
8557 | 0 | ALIGN32 byte scratch[AES_BLOCK_SIZE]; |
8558 | 0 | IncrementGcmCounter(AES_COUNTER(aes)); |
8559 | | /* Encrypt counter into a buffer. */ |
8560 | 0 | ret = wc_AesEncrypt(aes, AES_COUNTER(aes), scratch); |
8561 | 0 | if (ret != 0) |
8562 | 0 | return ret; |
8563 | | /* XOR plain text into encrypted counter into cipher text buffer. */ |
8564 | 0 | xorbufout(out, scratch, in, AES_BLOCK_SIZE); |
8565 | | /* Data complete. */ |
8566 | 0 | in += AES_BLOCK_SIZE; |
8567 | 0 | out += AES_BLOCK_SIZE; |
8568 | 0 | } |
8569 | 0 | } |
8570 | | |
8571 | 0 | if (partial != 0) { |
8572 | | /* Generate an extra block and use up as much as needed. */ |
8573 | 0 | IncrementGcmCounter(AES_COUNTER(aes)); |
8574 | | /* Encrypt counter into cache. */ |
8575 | 0 | ret = wc_AesEncrypt(aes, AES_COUNTER(aes), AES_LASTBLOCK(aes)); |
8576 | 0 | if (ret != 0) |
8577 | 0 | return ret; |
8578 | | /* XOR plain text into encrypted counter into cipher text buffer. */ |
8579 | 0 | xorbufout(out, AES_LASTBLOCK(aes), in, partial); |
8580 | | /* Keep amount of encrypted block used. */ |
8581 | 0 | aes->over = partial; |
8582 | 0 | } |
8583 | | |
8584 | 0 | return 0; |
8585 | 0 | } |
8586 | | |
8587 | | /* Calculates authentication tag for AES GCM. C implementation. |
8588 | | * |
8589 | | * @param [in, out] aes AES object. |
8590 | | * @param [out] authTag Buffer to store authentication tag in. |
8591 | | * @param [in] authTagSz Length of tag to create. |
8592 | | */ |
8593 | | static WARN_UNUSED_RESULT int AesGcmFinal_C( |
8594 | | Aes* aes, byte* authTag, word32 authTagSz) |
8595 | 0 | { |
8596 | | /* Calculate authentication tag. */ |
8597 | 0 | GHASH_FINAL(aes, authTag, authTagSz); |
8598 | | /* XOR in as much of encrypted counter as is required. */ |
8599 | 0 | xorbuf(authTag, AES_INITCTR(aes), authTagSz); |
8600 | | #ifdef OPENSSL_EXTRA |
8601 | | /* store AAD size for next call */ |
8602 | | aes->aadLen = aes->aSz; |
8603 | | #endif |
8604 | | /* Zeroize last block to protect sensitive data. */ |
8605 | 0 | ForceZero(AES_LASTBLOCK(aes), AES_BLOCK_SIZE); |
8606 | |
|
8607 | 0 | return 0; |
8608 | 0 | } |
8609 | | |
8610 | | #ifdef WOLFSSL_AESNI |
8611 | | |
8612 | | #ifdef __cplusplus |
8613 | | extern "C" { |
8614 | | #endif |
8615 | | |
8616 | | /* Assembly code implementations in: aes_gcm_asm.S */ |
8617 | | #ifdef HAVE_INTEL_AVX2 |
8618 | | extern void AES_GCM_init_avx2(const unsigned char* key, int nr, |
8619 | | const unsigned char* ivec, unsigned int ibytes, unsigned char* h, |
8620 | | unsigned char* counter, unsigned char* initCtr); |
8621 | | extern void AES_GCM_aad_update_avx2(const unsigned char* addt, |
8622 | | unsigned int abytes, unsigned char* tag, unsigned char* h); |
8623 | | extern void AES_GCM_encrypt_block_avx2(const unsigned char* key, int nr, |
8624 | | unsigned char* out, const unsigned char* in, unsigned char* counter); |
8625 | | extern void AES_GCM_ghash_block_avx2(const unsigned char* data, |
8626 | | unsigned char* tag, unsigned char* h); |
8627 | | |
8628 | | extern void AES_GCM_encrypt_update_avx2(const unsigned char* key, int nr, |
8629 | | unsigned char* out, const unsigned char* in, unsigned int nbytes, |
8630 | | unsigned char* tag, unsigned char* h, unsigned char* counter); |
8631 | | extern void AES_GCM_encrypt_final_avx2(unsigned char* tag, |
8632 | | unsigned char* authTag, unsigned int tbytes, unsigned int nbytes, |
8633 | | unsigned int abytes, unsigned char* h, unsigned char* initCtr); |
8634 | | #endif |
8635 | | #ifdef HAVE_INTEL_AVX1 |
8636 | | extern void AES_GCM_init_avx1(const unsigned char* key, int nr, |
8637 | | const unsigned char* ivec, unsigned int ibytes, unsigned char* h, |
8638 | | unsigned char* counter, unsigned char* initCtr); |
8639 | | extern void AES_GCM_aad_update_avx1(const unsigned char* addt, |
8640 | | unsigned int abytes, unsigned char* tag, unsigned char* h); |
8641 | | extern void AES_GCM_encrypt_block_avx1(const unsigned char* key, int nr, |
8642 | | unsigned char* out, const unsigned char* in, unsigned char* counter); |
8643 | | extern void AES_GCM_ghash_block_avx1(const unsigned char* data, |
8644 | | unsigned char* tag, unsigned char* h); |
8645 | | |
8646 | | extern void AES_GCM_encrypt_update_avx1(const unsigned char* key, int nr, |
8647 | | unsigned char* out, const unsigned char* in, unsigned int nbytes, |
8648 | | unsigned char* tag, unsigned char* h, unsigned char* counter); |
8649 | | extern void AES_GCM_encrypt_final_avx1(unsigned char* tag, |
8650 | | unsigned char* authTag, unsigned int tbytes, unsigned int nbytes, |
8651 | | unsigned int abytes, unsigned char* h, unsigned char* initCtr); |
8652 | | #endif |
8653 | | extern void AES_GCM_init_aesni(const unsigned char* key, int nr, |
8654 | | const unsigned char* ivec, unsigned int ibytes, unsigned char* h, |
8655 | | unsigned char* counter, unsigned char* initCtr); |
8656 | | extern void AES_GCM_aad_update_aesni(const unsigned char* addt, |
8657 | | unsigned int abytes, unsigned char* tag, unsigned char* h); |
8658 | | extern void AES_GCM_encrypt_block_aesni(const unsigned char* key, int nr, |
8659 | | unsigned char* out, const unsigned char* in, unsigned char* counter); |
8660 | | extern void AES_GCM_ghash_block_aesni(const unsigned char* data, |
8661 | | unsigned char* tag, unsigned char* h); |
8662 | | |
8663 | | extern void AES_GCM_encrypt_update_aesni(const unsigned char* key, int nr, |
8664 | | unsigned char* out, const unsigned char* in, unsigned int nbytes, |
8665 | | unsigned char* tag, unsigned char* h, unsigned char* counter); |
8666 | | extern void AES_GCM_encrypt_final_aesni(unsigned char* tag, |
8667 | | unsigned char* authTag, unsigned int tbytes, unsigned int nbytes, |
8668 | | unsigned int abytes, unsigned char* h, unsigned char* initCtr); |
8669 | | |
8670 | | #ifdef __cplusplus |
8671 | | } /* extern "C" */ |
8672 | | #endif |
8673 | | |
8674 | | /* Initialize the AES GCM cipher with an IV. AES-NI implementations. |
8675 | | * |
8676 | | * @param [in, out] aes AES object. |
8677 | | * @param [in] iv IV/nonce buffer. |
8678 | | * @param [in] ivSz Length of IV/nonce data. |
8679 | | */ |
8680 | | static WARN_UNUSED_RESULT int AesGcmInit_aesni( |
8681 | | Aes* aes, const byte* iv, word32 ivSz) |
8682 | | { |
8683 | | /* Reset state fields. */ |
8684 | | aes->aSz = 0; |
8685 | | aes->cSz = 0; |
8686 | | /* Set tag to all zeros as initial value. */ |
8687 | | XMEMSET(AES_TAG(aes), 0, AES_BLOCK_SIZE); |
8688 | | /* Reset counts of AAD and cipher text. */ |
8689 | | aes->aOver = 0; |
8690 | | aes->cOver = 0; |
8691 | | |
8692 | | #ifdef HAVE_INTEL_AVX2 |
8693 | | if (IS_INTEL_AVX2(intel_flags)) { |
8694 | | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
8695 | | AES_GCM_init_avx2((byte*)aes->key, aes->rounds, iv, ivSz, aes->H, |
8696 | | AES_COUNTER(aes), AES_INITCTR(aes)); |
8697 | | RESTORE_VECTOR_REGISTERS(); |
8698 | | } |
8699 | | else |
8700 | | #endif |
8701 | | #ifdef HAVE_INTEL_AVX1 |
8702 | | if (IS_INTEL_AVX1(intel_flags)) { |
8703 | | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
8704 | | AES_GCM_init_avx1((byte*)aes->key, aes->rounds, iv, ivSz, aes->H, |
8705 | | AES_COUNTER(aes), AES_INITCTR(aes)); |
8706 | | RESTORE_VECTOR_REGISTERS(); |
8707 | | } |
8708 | | else |
8709 | | #endif |
8710 | | { |
8711 | | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
8712 | | AES_GCM_init_aesni((byte*)aes->key, aes->rounds, iv, ivSz, aes->H, |
8713 | | AES_COUNTER(aes), AES_INITCTR(aes)); |
8714 | | RESTORE_VECTOR_REGISTERS(); |
8715 | | } |
8716 | | return 0; |
8717 | | } |
8718 | | |
8719 | | /* Update the AES GCM for encryption with authentication data. |
8720 | | * |
8721 | | * Implementation uses AVX2, AVX1 or straight AES-NI optimized assembly code. |
8722 | | * |
8723 | | * @param [in, out] aes AES object. |
8724 | | * @param [in] a Buffer holding authentication data. |
8725 | | * @param [in] aSz Length of authentication data in bytes. |
8726 | | * @param [in] endA Whether no more authentication data is expected. |
8727 | | */ |
8728 | | static WARN_UNUSED_RESULT int AesGcmAadUpdate_aesni( |
8729 | | Aes* aes, const byte* a, word32 aSz, int endA) |
8730 | | { |
8731 | | word32 blocks; |
8732 | | int partial; |
8733 | | |
8734 | | ASSERT_SAVED_VECTOR_REGISTERS(); |
8735 | | |
8736 | | if (aSz != 0 && a != NULL) { |
8737 | | /* Total count of AAD updated. */ |
8738 | | aes->aSz += aSz; |
8739 | | /* Check if we have unprocessed data. */ |
8740 | | if (aes->aOver > 0) { |
8741 | | /* Calculate amount we can use - fill up the block. */ |
8742 | | byte sz = AES_BLOCK_SIZE - aes->aOver; |
8743 | | if (sz > aSz) { |
8744 | | sz = aSz; |
8745 | | } |
8746 | | /* Copy extra into last GHASH block array and update count. */ |
8747 | | XMEMCPY(AES_LASTGBLOCK(aes) + aes->aOver, a, sz); |
8748 | | aes->aOver += sz; |
8749 | | if (aes->aOver == AES_BLOCK_SIZE) { |
8750 | | /* We have filled up the block and can process. */ |
8751 | | #ifdef HAVE_INTEL_AVX2 |
8752 | | if (IS_INTEL_AVX2(intel_flags)) { |
8753 | | AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes), |
8754 | | aes->H); |
8755 | | } |
8756 | | else |
8757 | | #endif |
8758 | | #ifdef HAVE_INTEL_AVX1 |
8759 | | if (IS_INTEL_AVX1(intel_flags)) { |
8760 | | AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes), |
8761 | | aes->H); |
8762 | | } |
8763 | | else |
8764 | | #endif |
8765 | | { |
8766 | | AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes), |
8767 | | aes->H); |
8768 | | } |
8769 | | /* Reset count. */ |
8770 | | aes->aOver = 0; |
8771 | | } |
8772 | | /* Used up some data. */ |
8773 | | aSz -= sz; |
8774 | | a += sz; |
8775 | | } |
8776 | | |
8777 | | /* Calculate number of blocks of AAD and the leftover. */ |
8778 | | blocks = aSz / AES_BLOCK_SIZE; |
8779 | | partial = aSz % AES_BLOCK_SIZE; |
8780 | | if (blocks > 0) { |
8781 | | /* GHASH full blocks now. */ |
8782 | | #ifdef HAVE_INTEL_AVX2 |
8783 | | if (IS_INTEL_AVX2(intel_flags)) { |
8784 | | AES_GCM_aad_update_avx2(a, blocks * AES_BLOCK_SIZE, |
8785 | | AES_TAG(aes), aes->H); |
8786 | | } |
8787 | | else |
8788 | | #endif |
8789 | | #ifdef HAVE_INTEL_AVX1 |
8790 | | if (IS_INTEL_AVX1(intel_flags)) { |
8791 | | AES_GCM_aad_update_avx1(a, blocks * AES_BLOCK_SIZE, |
8792 | | AES_TAG(aes), aes->H); |
8793 | | } |
8794 | | else |
8795 | | #endif |
8796 | | { |
8797 | | AES_GCM_aad_update_aesni(a, blocks * AES_BLOCK_SIZE, |
8798 | | AES_TAG(aes), aes->H); |
8799 | | } |
8800 | | /* Skip over to end of AAD blocks. */ |
8801 | | a += blocks * AES_BLOCK_SIZE; |
8802 | | } |
8803 | | if (partial != 0) { |
8804 | | /* Cache the partial block. */ |
8805 | | XMEMCPY(AES_LASTGBLOCK(aes), a, partial); |
8806 | | aes->aOver = (byte)partial; |
8807 | | } |
8808 | | } |
8809 | | if (endA && (aes->aOver > 0)) { |
8810 | | /* No more AAD coming and we have a partial block. */ |
8811 | | /* Fill the rest of the block with zeros. */ |
8812 | | XMEMSET(AES_LASTGBLOCK(aes) + aes->aOver, 0, |
8813 | | AES_BLOCK_SIZE - aes->aOver); |
8814 | | /* GHASH last AAD block. */ |
8815 | | #ifdef HAVE_INTEL_AVX2 |
8816 | | if (IS_INTEL_AVX2(intel_flags)) { |
8817 | | AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes), aes->H); |
8818 | | } |
8819 | | else |
8820 | | #endif |
8821 | | #ifdef HAVE_INTEL_AVX1 |
8822 | | if (IS_INTEL_AVX1(intel_flags)) { |
8823 | | AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes), aes->H); |
8824 | | } |
8825 | | else |
8826 | | #endif |
8827 | | { |
8828 | | AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes), |
8829 | | aes->H); |
8830 | | } |
8831 | | /* Clear partial count for next time through. */ |
8832 | | aes->aOver = 0; |
8833 | | } |
8834 | | |
8835 | | return 0; |
8836 | | } |
8837 | | |
8838 | | /* Update the AES GCM for encryption with data and/or authentication data. |
8839 | | * |
8840 | | * Implementation uses AVX2, AVX1 or straight AES-NI optimized assembly code. |
8841 | | * |
8842 | | * @param [in, out] aes AES object. |
8843 | | * @param [out] c Buffer to hold cipher text. |
8844 | | * @param [in] p Buffer holding plaintext. |
8845 | | * @param [in] cSz Length of cipher text/plaintext in bytes. |
8846 | | * @param [in] a Buffer holding authentication data. |
8847 | | * @param [in] aSz Length of authentication data in bytes. |
8848 | | */ |
8849 | | static WARN_UNUSED_RESULT int AesGcmEncryptUpdate_aesni( |
8850 | | Aes* aes, byte* c, const byte* p, word32 cSz, const byte* a, word32 aSz) |
8851 | | { |
8852 | | word32 blocks; |
8853 | | int partial; |
8854 | | int ret; |
8855 | | |
8856 | | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
8857 | | /* Hash in A, the Authentication Data */ |
8858 | | ret = AesGcmAadUpdate_aesni(aes, a, aSz, (cSz > 0) && (c != NULL)); |
8859 | | if (ret != 0) |
8860 | | return ret; |
8861 | | |
8862 | | /* Encrypt plaintext and Hash in C, the Cipher text */ |
8863 | | if (cSz != 0 && c != NULL) { |
8864 | | /* Update count of cipher text we have hashed. */ |
8865 | | aes->cSz += cSz; |
8866 | | if (aes->cOver > 0) { |
8867 | | /* Calculate amount we can use - fill up the block. */ |
8868 | | byte sz = AES_BLOCK_SIZE - aes->cOver; |
8869 | | if (sz > cSz) { |
8870 | | sz = cSz; |
8871 | | } |
8872 | | /* Encrypt some of the plaintext. */ |
8873 | | xorbuf(AES_LASTGBLOCK(aes) + aes->cOver, p, sz); |
8874 | | XMEMCPY(c, AES_LASTGBLOCK(aes) + aes->cOver, sz); |
8875 | | /* Update count of unsed encrypted counter. */ |
8876 | | aes->cOver += sz; |
8877 | | if (aes->cOver == AES_BLOCK_SIZE) { |
8878 | | /* We have filled up the block and can process. */ |
8879 | | #ifdef HAVE_INTEL_AVX2 |
8880 | | if (IS_INTEL_AVX2(intel_flags)) { |
8881 | | AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes), |
8882 | | aes->H); |
8883 | | } |
8884 | | else |
8885 | | #endif |
8886 | | #ifdef HAVE_INTEL_AVX1 |
8887 | | if (IS_INTEL_AVX1(intel_flags)) { |
8888 | | AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes), |
8889 | | aes->H); |
8890 | | } |
8891 | | else |
8892 | | #endif |
8893 | | { |
8894 | | AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes), |
8895 | | aes->H); |
8896 | | } |
8897 | | /* Reset count. */ |
8898 | | aes->cOver = 0; |
8899 | | } |
8900 | | /* Used up some data. */ |
8901 | | cSz -= sz; |
8902 | | p += sz; |
8903 | | c += sz; |
8904 | | } |
8905 | | |
8906 | | /* Calculate number of blocks of plaintext and the leftover. */ |
8907 | | blocks = cSz / AES_BLOCK_SIZE; |
8908 | | partial = cSz % AES_BLOCK_SIZE; |
8909 | | if (blocks > 0) { |
8910 | | /* Encrypt and GHASH full blocks now. */ |
8911 | | #ifdef HAVE_INTEL_AVX2 |
8912 | | if (IS_INTEL_AVX2(intel_flags)) { |
8913 | | AES_GCM_encrypt_update_avx2((byte*)aes->key, aes->rounds, c, p, |
8914 | | blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->H, |
8915 | | AES_COUNTER(aes)); |
8916 | | } |
8917 | | else |
8918 | | #endif |
8919 | | #ifdef HAVE_INTEL_AVX1 |
8920 | | if (IS_INTEL_AVX1(intel_flags)) { |
8921 | | AES_GCM_encrypt_update_avx1((byte*)aes->key, aes->rounds, c, p, |
8922 | | blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->H, |
8923 | | AES_COUNTER(aes)); |
8924 | | } |
8925 | | else |
8926 | | #endif |
8927 | | { |
8928 | | AES_GCM_encrypt_update_aesni((byte*)aes->key, aes->rounds, c, p, |
8929 | | blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->H, |
8930 | | AES_COUNTER(aes)); |
8931 | | } |
8932 | | /* Skip over to end of blocks. */ |
8933 | | p += blocks * AES_BLOCK_SIZE; |
8934 | | c += blocks * AES_BLOCK_SIZE; |
8935 | | } |
8936 | | if (partial != 0) { |
8937 | | /* Encrypt the counter - XOR in zeros as proxy for plaintext. */ |
8938 | | XMEMSET(AES_LASTGBLOCK(aes), 0, AES_BLOCK_SIZE); |
8939 | | #ifdef HAVE_INTEL_AVX2 |
8940 | | if (IS_INTEL_AVX2(intel_flags)) { |
8941 | | AES_GCM_encrypt_block_avx2((byte*)aes->key, aes->rounds, |
8942 | | AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes)); |
8943 | | } |
8944 | | else |
8945 | | #endif |
8946 | | #ifdef HAVE_INTEL_AVX1 |
8947 | | if (IS_INTEL_AVX1(intel_flags)) { |
8948 | | AES_GCM_encrypt_block_avx1((byte*)aes->key, aes->rounds, |
8949 | | AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes)); |
8950 | | } |
8951 | | else |
8952 | | #endif |
8953 | | { |
8954 | | AES_GCM_encrypt_block_aesni((byte*)aes->key, aes->rounds, |
8955 | | AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes)); |
8956 | | } |
8957 | | /* XOR the remaining plaintext to calculate cipher text. |
8958 | | * Keep cipher text for GHASH of last partial block. |
8959 | | */ |
8960 | | xorbuf(AES_LASTGBLOCK(aes), p, partial); |
8961 | | XMEMCPY(c, AES_LASTGBLOCK(aes), partial); |
8962 | | /* Update count of the block used. */ |
8963 | | aes->cOver = (byte)partial; |
8964 | | } |
8965 | | } |
8966 | | RESTORE_VECTOR_REGISTERS(); |
8967 | | return 0; |
8968 | | } |
8969 | | |
8970 | | /* Finalize the AES GCM for encryption and calculate the authentication tag. |
8971 | | * |
8972 | | * Calls AVX2, AVX1 or straight AES-NI optimized assembly code. |
8973 | | * |
8974 | | * @param [in, out] aes AES object. |
8975 | | * @param [in] authTag Buffer to hold authentication tag. |
8976 | | * @param [in] authTagSz Length of authentication tag in bytes. |
8977 | | * @return 0 on success. |
8978 | | */ |
8979 | | static WARN_UNUSED_RESULT int AesGcmEncryptFinal_aesni( |
8980 | | Aes* aes, byte* authTag, word32 authTagSz) |
8981 | | { |
8982 | | /* AAD block incomplete when > 0 */ |
8983 | | byte over = aes->aOver; |
8984 | | |
8985 | | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
8986 | | if (aes->cOver > 0) { |
8987 | | /* Cipher text block incomplete. */ |
8988 | | over = aes->cOver; |
8989 | | } |
8990 | | if (over > 0) { |
8991 | | /* Fill the rest of the block with zeros. */ |
8992 | | XMEMSET(AES_LASTGBLOCK(aes) + over, 0, AES_BLOCK_SIZE - over); |
8993 | | /* GHASH last cipher block. */ |
8994 | | #ifdef HAVE_INTEL_AVX2 |
8995 | | if (IS_INTEL_AVX2(intel_flags)) { |
8996 | | AES_GCM_ghash_block_avx2(AES_LASTGBLOCK(aes), AES_TAG(aes), aes->H); |
8997 | | } |
8998 | | else |
8999 | | #endif |
9000 | | #ifdef HAVE_INTEL_AVX1 |
9001 | | if (IS_INTEL_AVX1(intel_flags)) { |
9002 | | AES_GCM_ghash_block_avx1(AES_LASTGBLOCK(aes), AES_TAG(aes), aes->H); |
9003 | | } |
9004 | | else |
9005 | | #endif |
9006 | | { |
9007 | | AES_GCM_ghash_block_aesni(AES_LASTGBLOCK(aes), AES_TAG(aes), |
9008 | | aes->H); |
9009 | | } |
9010 | | } |
9011 | | /* Calculate the authentication tag. */ |
9012 | | #ifdef HAVE_INTEL_AVX2 |
9013 | | if (IS_INTEL_AVX2(intel_flags)) { |
9014 | | AES_GCM_encrypt_final_avx2(AES_TAG(aes), authTag, authTagSz, aes->cSz, |
9015 | | aes->aSz, aes->H, AES_INITCTR(aes)); |
9016 | | } |
9017 | | else |
9018 | | #endif |
9019 | | #ifdef HAVE_INTEL_AVX1 |
9020 | | if (IS_INTEL_AVX1(intel_flags)) { |
9021 | | AES_GCM_encrypt_final_avx1(AES_TAG(aes), authTag, authTagSz, aes->cSz, |
9022 | | aes->aSz, aes->H, AES_INITCTR(aes)); |
9023 | | } |
9024 | | else |
9025 | | #endif |
9026 | | { |
9027 | | AES_GCM_encrypt_final_aesni(AES_TAG(aes), authTag, authTagSz, aes->cSz, |
9028 | | aes->aSz, aes->H, AES_INITCTR(aes)); |
9029 | | } |
9030 | | RESTORE_VECTOR_REGISTERS(); |
9031 | | return 0; |
9032 | | } |
9033 | | |
9034 | | #if defined(HAVE_AES_DECRYPT) || defined(HAVE_AESGCM_DECRYPT) |
9035 | | |
9036 | | #ifdef __cplusplus |
9037 | | extern "C" { |
9038 | | #endif |
9039 | | |
9040 | | /* Assembly code implementations in: aes_gcm_asm.S */ |
9041 | | #ifdef HAVE_INTEL_AVX2 |
9042 | | extern void AES_GCM_decrypt_update_avx2(const unsigned char* key, int nr, |
9043 | | unsigned char* out, const unsigned char* in, unsigned int nbytes, |
9044 | | unsigned char* tag, unsigned char* h, unsigned char* counter); |
9045 | | extern void AES_GCM_decrypt_final_avx2(unsigned char* tag, |
9046 | | const unsigned char* authTag, unsigned int tbytes, unsigned int nbytes, |
9047 | | unsigned int abytes, unsigned char* h, unsigned char* initCtr, int* res); |
9048 | | #endif |
9049 | | #ifdef HAVE_INTEL_AVX1 |
9050 | | extern void AES_GCM_decrypt_update_avx1(const unsigned char* key, int nr, |
9051 | | unsigned char* out, const unsigned char* in, unsigned int nbytes, |
9052 | | unsigned char* tag, unsigned char* h, unsigned char* counter); |
9053 | | extern void AES_GCM_decrypt_final_avx1(unsigned char* tag, |
9054 | | const unsigned char* authTag, unsigned int tbytes, unsigned int nbytes, |
9055 | | unsigned int abytes, unsigned char* h, unsigned char* initCtr, int* res); |
9056 | | #endif |
9057 | | extern void AES_GCM_decrypt_update_aesni(const unsigned char* key, int nr, |
9058 | | unsigned char* out, const unsigned char* in, unsigned int nbytes, |
9059 | | unsigned char* tag, unsigned char* h, unsigned char* counter); |
9060 | | extern void AES_GCM_decrypt_final_aesni(unsigned char* tag, |
9061 | | const unsigned char* authTag, unsigned int tbytes, unsigned int nbytes, |
9062 | | unsigned int abytes, unsigned char* h, unsigned char* initCtr, int* res); |
9063 | | |
9064 | | #ifdef __cplusplus |
9065 | | } /* extern "C" */ |
9066 | | #endif |
9067 | | |
9068 | | /* Update the AES GCM for decryption with data and/or authentication data. |
9069 | | * |
9070 | | * @param [in, out] aes AES object. |
9071 | | * @param [out] p Buffer to hold plaintext. |
9072 | | * @param [in] c Buffer holding ciper text. |
9073 | | * @param [in] cSz Length of cipher text/plaintext in bytes. |
9074 | | * @param [in] a Buffer holding authentication data. |
9075 | | * @param [in] aSz Length of authentication data in bytes. |
9076 | | */ |
9077 | | static WARN_UNUSED_RESULT int AesGcmDecryptUpdate_aesni( |
9078 | | Aes* aes, byte* p, const byte* c, word32 cSz, const byte* a, word32 aSz) |
9079 | | { |
9080 | | word32 blocks; |
9081 | | int partial; |
9082 | | int ret; |
9083 | | |
9084 | | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
9085 | | /* Hash in A, the Authentication Data */ |
9086 | | ret = AesGcmAadUpdate_aesni(aes, a, aSz, (cSz > 0) && (c != NULL)); |
9087 | | if (ret != 0) |
9088 | | return ret; |
9089 | | |
9090 | | /* Hash in C, the Cipher text, and decrypt. */ |
9091 | | if (cSz != 0 && p != NULL) { |
9092 | | /* Update count of cipher text we have hashed. */ |
9093 | | aes->cSz += cSz; |
9094 | | if (aes->cOver > 0) { |
9095 | | /* Calculate amount we can use - fill up the block. */ |
9096 | | byte sz = AES_BLOCK_SIZE - aes->cOver; |
9097 | | if (sz > cSz) { |
9098 | | sz = cSz; |
9099 | | } |
9100 | | /* Keep a copy of the cipher text for GHASH. */ |
9101 | | XMEMCPY(AES_LASTBLOCK(aes) + aes->cOver, c, sz); |
9102 | | /* Decrypt some of the cipher text. */ |
9103 | | xorbuf(AES_LASTGBLOCK(aes) + aes->cOver, c, sz); |
9104 | | XMEMCPY(p, AES_LASTGBLOCK(aes) + aes->cOver, sz); |
9105 | | /* Update count of unsed encrypted counter. */ |
9106 | | aes->cOver += sz; |
9107 | | if (aes->cOver == AES_BLOCK_SIZE) { |
9108 | | /* We have filled up the block and can process. */ |
9109 | | #ifdef HAVE_INTEL_AVX2 |
9110 | | if (IS_INTEL_AVX2(intel_flags)) { |
9111 | | AES_GCM_ghash_block_avx2(AES_LASTBLOCK(aes), AES_TAG(aes), |
9112 | | aes->H); |
9113 | | } |
9114 | | else |
9115 | | #endif |
9116 | | #ifdef HAVE_INTEL_AVX1 |
9117 | | if (IS_INTEL_AVX1(intel_flags)) { |
9118 | | AES_GCM_ghash_block_avx1(AES_LASTBLOCK(aes), AES_TAG(aes), |
9119 | | aes->H); |
9120 | | } |
9121 | | else |
9122 | | #endif |
9123 | | { |
9124 | | AES_GCM_ghash_block_aesni(AES_LASTBLOCK(aes), AES_TAG(aes), |
9125 | | aes->H); |
9126 | | } |
9127 | | /* Reset count. */ |
9128 | | aes->cOver = 0; |
9129 | | } |
9130 | | /* Used up some data. */ |
9131 | | cSz -= sz; |
9132 | | c += sz; |
9133 | | p += sz; |
9134 | | } |
9135 | | |
9136 | | /* Calculate number of blocks of plaintext and the leftover. */ |
9137 | | blocks = cSz / AES_BLOCK_SIZE; |
9138 | | partial = cSz % AES_BLOCK_SIZE; |
9139 | | if (blocks > 0) { |
9140 | | /* Decrypt and GHASH full blocks now. */ |
9141 | | #ifdef HAVE_INTEL_AVX2 |
9142 | | if (IS_INTEL_AVX2(intel_flags)) { |
9143 | | AES_GCM_decrypt_update_avx2((byte*)aes->key, aes->rounds, p, c, |
9144 | | blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->H, |
9145 | | AES_COUNTER(aes)); |
9146 | | } |
9147 | | else |
9148 | | #endif |
9149 | | #ifdef HAVE_INTEL_AVX1 |
9150 | | if (IS_INTEL_AVX1(intel_flags)) { |
9151 | | AES_GCM_decrypt_update_avx1((byte*)aes->key, aes->rounds, p, c, |
9152 | | blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->H, |
9153 | | AES_COUNTER(aes)); |
9154 | | } |
9155 | | else |
9156 | | #endif |
9157 | | { |
9158 | | AES_GCM_decrypt_update_aesni((byte*)aes->key, aes->rounds, p, c, |
9159 | | blocks * AES_BLOCK_SIZE, AES_TAG(aes), aes->H, |
9160 | | AES_COUNTER(aes)); |
9161 | | } |
9162 | | /* Skip over to end of blocks. */ |
9163 | | c += blocks * AES_BLOCK_SIZE; |
9164 | | p += blocks * AES_BLOCK_SIZE; |
9165 | | } |
9166 | | if (partial != 0) { |
9167 | | /* Encrypt the counter - XOR in zeros as proxy for cipher text. */ |
9168 | | XMEMSET(AES_LASTGBLOCK(aes), 0, AES_BLOCK_SIZE); |
9169 | | #ifdef HAVE_INTEL_AVX2 |
9170 | | if (IS_INTEL_AVX2(intel_flags)) { |
9171 | | AES_GCM_encrypt_block_avx2((byte*)aes->key, aes->rounds, |
9172 | | AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes)); |
9173 | | } |
9174 | | else |
9175 | | #endif |
9176 | | #ifdef HAVE_INTEL_AVX1 |
9177 | | if (IS_INTEL_AVX1(intel_flags)) { |
9178 | | AES_GCM_encrypt_block_avx1((byte*)aes->key, aes->rounds, |
9179 | | AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes)); |
9180 | | } |
9181 | | else |
9182 | | #endif |
9183 | | { |
9184 | | AES_GCM_encrypt_block_aesni((byte*)aes->key, aes->rounds, |
9185 | | AES_LASTGBLOCK(aes), AES_LASTGBLOCK(aes), AES_COUNTER(aes)); |
9186 | | } |
9187 | | /* Keep cipher text for GHASH of last partial block. */ |
9188 | | XMEMCPY(AES_LASTBLOCK(aes), c, partial); |
9189 | | /* XOR the remaining cipher text to calculate plaintext. */ |
9190 | | xorbuf(AES_LASTGBLOCK(aes), c, partial); |
9191 | | XMEMCPY(p, AES_LASTGBLOCK(aes), partial); |
9192 | | /* Update count of the block used. */ |
9193 | | aes->cOver = (byte)partial; |
9194 | | } |
9195 | | } |
9196 | | RESTORE_VECTOR_REGISTERS(); |
9197 | | return 0; |
9198 | | } |
9199 | | |
9200 | | /* Finalize the AES GCM for decryption and check the authentication tag. |
9201 | | * |
9202 | | * Calls AVX2, AVX1 or straight AES-NI optimized assembly code. |
9203 | | * |
9204 | | * @param [in, out] aes AES object. |
9205 | | * @param [in] authTag Buffer holding authentication tag. |
9206 | | * @param [in] authTagSz Length of authentication tag in bytes. |
9207 | | * @return 0 on success. |
9208 | | * @return AES_GCM_AUTH_E when authentication tag doesn't match calculated |
9209 | | * value. |
9210 | | */ |
9211 | | static WARN_UNUSED_RESULT int AesGcmDecryptFinal_aesni( |
9212 | | Aes* aes, const byte* authTag, word32 authTagSz) |
9213 | | { |
9214 | | int ret = 0; |
9215 | | int res; |
9216 | | /* AAD block incomplete when > 0 */ |
9217 | | byte over = aes->aOver; |
9218 | | byte *lastBlock = AES_LASTGBLOCK(aes); |
9219 | | |
9220 | | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
9221 | | if (aes->cOver > 0) { |
9222 | | /* Cipher text block incomplete. */ |
9223 | | over = aes->cOver; |
9224 | | lastBlock = AES_LASTBLOCK(aes); |
9225 | | } |
9226 | | if (over > 0) { |
9227 | | /* Zeroize the unused part of the block. */ |
9228 | | XMEMSET(lastBlock + over, 0, AES_BLOCK_SIZE - over); |
9229 | | /* Hash the last block of cipher text. */ |
9230 | | #ifdef HAVE_INTEL_AVX2 |
9231 | | if (IS_INTEL_AVX2(intel_flags)) { |
9232 | | AES_GCM_ghash_block_avx2(lastBlock, AES_TAG(aes), aes->H); |
9233 | | } |
9234 | | else |
9235 | | #endif |
9236 | | #ifdef HAVE_INTEL_AVX1 |
9237 | | if (IS_INTEL_AVX1(intel_flags)) { |
9238 | | AES_GCM_ghash_block_avx1(lastBlock, AES_TAG(aes), aes->H); |
9239 | | } |
9240 | | else |
9241 | | #endif |
9242 | | { |
9243 | | AES_GCM_ghash_block_aesni(lastBlock, AES_TAG(aes), aes->H); |
9244 | | } |
9245 | | } |
9246 | | /* Calculate and compare the authentication tag. */ |
9247 | | #ifdef HAVE_INTEL_AVX2 |
9248 | | if (IS_INTEL_AVX2(intel_flags)) { |
9249 | | AES_GCM_decrypt_final_avx2(AES_TAG(aes), authTag, authTagSz, aes->cSz, |
9250 | | aes->aSz, aes->H, AES_INITCTR(aes), &res); |
9251 | | } |
9252 | | else |
9253 | | #endif |
9254 | | #ifdef HAVE_INTEL_AVX1 |
9255 | | if (IS_INTEL_AVX1(intel_flags)) { |
9256 | | AES_GCM_decrypt_final_avx1(AES_TAG(aes), authTag, authTagSz, aes->cSz, |
9257 | | aes->aSz, aes->H, AES_INITCTR(aes), &res); |
9258 | | } |
9259 | | else |
9260 | | #endif |
9261 | | { |
9262 | | AES_GCM_decrypt_final_aesni(AES_TAG(aes), authTag, authTagSz, aes->cSz, |
9263 | | aes->aSz, aes->H, AES_INITCTR(aes), &res); |
9264 | | } |
9265 | | RESTORE_VECTOR_REGISTERS(); |
9266 | | /* Return error code when calculated doesn't match input. */ |
9267 | | if (res == 0) { |
9268 | | ret = AES_GCM_AUTH_E; |
9269 | | } |
9270 | | return ret; |
9271 | | } |
9272 | | #endif /* HAVE_AES_DECRYPT || HAVE_AESGCM_DECRYPT */ |
9273 | | #endif /* WOLFSSL_AESNI */ |
9274 | | |
9275 | | /* Initialize an AES GCM cipher for encryption or decryption. |
9276 | | * |
9277 | | * Must call wc_AesInit() before calling this function. |
9278 | | * |
9279 | | * @param [in, out] aes AES object. |
9280 | | * @param [in] key Buffer holding key. |
9281 | | * @param [in] len Length of key in bytes. |
9282 | | * @param [in] iv Buffer holding IV/nonce. |
9283 | | * @param [in] ivSz Length of IV/nonce in bytes. |
9284 | | * @return 0 on success. |
9285 | | * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer |
9286 | | * is NULL, or the IV is NULL and no previous IV has been set. |
9287 | | * @return MEMORY_E when dynamic memory allocation fails. (WOLFSSL_SMALL_STACK) |
9288 | | */ |
9289 | | int wc_AesGcmInit(Aes* aes, const byte* key, word32 len, const byte* iv, |
9290 | | word32 ivSz) |
9291 | 0 | { |
9292 | 0 | int ret = 0; |
9293 | | |
9294 | | /* Check validity of parameters. */ |
9295 | 0 | if ((aes == NULL) || ((len > 0) && (key == NULL)) || |
9296 | 0 | ((ivSz == 0) && (iv != NULL)) || (ivSz > AES_BLOCK_SIZE) || |
9297 | 0 | ((ivSz > 0) && (iv == NULL))) { |
9298 | 0 | ret = BAD_FUNC_ARG; |
9299 | 0 | } |
9300 | |
|
9301 | 0 | #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_AESNI) |
9302 | 0 | if ((ret == 0) && (aes->streamData == NULL)) { |
9303 | | /* Allocate buffers for streaming. */ |
9304 | 0 | aes->streamData = (byte*)XMALLOC(5 * AES_BLOCK_SIZE, aes->heap, |
9305 | 0 | DYNAMIC_TYPE_AES); |
9306 | 0 | if (aes->streamData == NULL) { |
9307 | 0 | ret = MEMORY_E; |
9308 | 0 | } |
9309 | 0 | } |
9310 | 0 | #endif |
9311 | | |
9312 | | /* Set the key if passed in. */ |
9313 | 0 | if ((ret == 0) && (key != NULL)) { |
9314 | 0 | ret = wc_AesGcmSetKey(aes, key, len); |
9315 | 0 | } |
9316 | |
|
9317 | 0 | if (ret == 0) { |
9318 | | /* Setup with IV if needed. */ |
9319 | 0 | if (iv != NULL) { |
9320 | | /* Cache the IV in AES GCM object. */ |
9321 | 0 | XMEMCPY((byte*)aes->reg, iv, ivSz); |
9322 | 0 | aes->nonceSz = ivSz; |
9323 | 0 | } |
9324 | 0 | else if (aes->nonceSz != 0) { |
9325 | | /* Copy out the cached copy. */ |
9326 | 0 | iv = (byte*)aes->reg; |
9327 | 0 | ivSz = aes->nonceSz; |
9328 | 0 | } |
9329 | |
|
9330 | 0 | if (iv != NULL) { |
9331 | | /* Initialize with the IV. */ |
9332 | | #ifdef WOLFSSL_AESNI |
9333 | | if (haveAESNI |
9334 | | #ifdef HAVE_INTEL_AVX2 |
9335 | | || IS_INTEL_AVX2(intel_flags) |
9336 | | #endif |
9337 | | #ifdef HAVE_INTEL_AVX1 |
9338 | | || IS_INTEL_AVX1(intel_flags) |
9339 | | #endif |
9340 | | ) { |
9341 | | ret = AesGcmInit_aesni(aes, iv, ivSz); |
9342 | | } |
9343 | | else |
9344 | | #endif |
9345 | 0 | { |
9346 | 0 | ret = AesGcmInit_C(aes, iv, ivSz); |
9347 | 0 | } |
9348 | |
|
9349 | 0 | aes->nonceSet = 1; |
9350 | 0 | } |
9351 | 0 | } |
9352 | |
|
9353 | 0 | return ret; |
9354 | 0 | } |
9355 | | |
9356 | | /* Initialize an AES GCM cipher for encryption. |
9357 | | * |
9358 | | * Must call wc_AesInit() before calling this function. |
9359 | | * |
9360 | | * @param [in, out] aes AES object. |
9361 | | * @param [in] key Buffer holding key. |
9362 | | * @param [in] len Length of key in bytes. |
9363 | | * @param [in] iv Buffer holding IV/nonce. |
9364 | | * @param [in] ivSz Length of IV/nonce in bytes. |
9365 | | * @return 0 on success. |
9366 | | * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer |
9367 | | * is NULL, or the IV is NULL and no previous IV has been set. |
9368 | | */ |
9369 | | int wc_AesGcmEncryptInit(Aes* aes, const byte* key, word32 len, const byte* iv, |
9370 | | word32 ivSz) |
9371 | 0 | { |
9372 | 0 | return wc_AesGcmInit(aes, key, len, iv, ivSz); |
9373 | 0 | } |
9374 | | |
9375 | | /* Initialize an AES GCM cipher for encryption or decryption. Get IV. |
9376 | | * |
9377 | | * Must call wc_AesInit() before calling this function. |
9378 | | * |
9379 | | * @param [in, out] aes AES object. |
9380 | | * @param [in] key Buffer holding key. |
9381 | | * @param [in] len Length of key in bytes. |
9382 | | * @param [in] iv Buffer holding IV/nonce. |
9383 | | * @param [in] ivSz Length of IV/nonce in bytes. |
9384 | | * @return 0 on success. |
9385 | | * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer |
9386 | | * is NULL, or the IV is NULL and no previous IV has been set. |
9387 | | */ |
9388 | | int wc_AesGcmEncryptInit_ex(Aes* aes, const byte* key, word32 len, byte* ivOut, |
9389 | | word32 ivOutSz) |
9390 | 0 | { |
9391 | 0 | XMEMCPY(ivOut, aes->reg, ivOutSz); |
9392 | 0 | return wc_AesGcmInit(aes, key, len, NULL, 0); |
9393 | 0 | } |
9394 | | |
9395 | | /* Update the AES GCM for encryption with data and/or authentication data. |
9396 | | * |
9397 | | * All the AAD must be passed to update before the plaintext. |
9398 | | * Last part of AAD can be passed with first part of plaintext. |
9399 | | * |
9400 | | * Must set key and IV before calling this function. |
9401 | | * Must call wc_AesGcmInit() before calling this function. |
9402 | | * |
9403 | | * @param [in, out] aes AES object. |
9404 | | * @param [out] out Buffer to hold cipher text. |
9405 | | * @param [in] in Buffer holding plaintext. |
9406 | | * @param [in] sz Length of plaintext in bytes. |
9407 | | * @param [in] authIn Buffer holding authentication data. |
9408 | | * @param [in] authInSz Length of authentication data in bytes. |
9409 | | * @return 0 on success. |
9410 | | * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer |
9411 | | * is NULL. |
9412 | | */ |
9413 | | int wc_AesGcmEncryptUpdate(Aes* aes, byte* out, const byte* in, word32 sz, |
9414 | | const byte* authIn, word32 authInSz) |
9415 | 0 | { |
9416 | 0 | int ret = 0; |
9417 | | |
9418 | | /* Check validity of parameters. */ |
9419 | 0 | if ((aes == NULL) || ((authInSz > 0) && (authIn == NULL)) || ((sz > 0) && |
9420 | 0 | ((out == NULL) || (in == NULL)))) { |
9421 | 0 | ret = BAD_FUNC_ARG; |
9422 | 0 | } |
9423 | | |
9424 | | /* Check key has been set. */ |
9425 | 0 | if ((ret == 0) && (!aes->gcmKeySet)) { |
9426 | 0 | ret = MISSING_KEY; |
9427 | 0 | } |
9428 | | /* Check IV has been set. */ |
9429 | 0 | if ((ret == 0) && (!aes->nonceSet)) { |
9430 | 0 | ret = MISSING_IV; |
9431 | 0 | } |
9432 | |
|
9433 | 0 | if ((ret == 0) && aes->ctrSet && (aes->aSz == 0) && (aes->cSz == 0)) { |
9434 | 0 | aes->invokeCtr[0]++; |
9435 | 0 | if (aes->invokeCtr[0] == 0) { |
9436 | 0 | aes->invokeCtr[1]++; |
9437 | 0 | if (aes->invokeCtr[1] == 0) |
9438 | 0 | ret = AES_GCM_OVERFLOW_E; |
9439 | 0 | } |
9440 | 0 | } |
9441 | |
|
9442 | 0 | if (ret == 0) { |
9443 | | /* Encrypt with AAD and/or plaintext. */ |
9444 | | #if defined(WOLFSSL_AESNI) |
9445 | | if (haveAESNI |
9446 | | #ifdef HAVE_INTEL_AVX2 |
9447 | | || IS_INTEL_AVX2(intel_flags) |
9448 | | #endif |
9449 | | #ifdef HAVE_INTEL_AVX1 |
9450 | | || IS_INTEL_AVX1(intel_flags) |
9451 | | #endif |
9452 | | ) { |
9453 | | ret = AesGcmEncryptUpdate_aesni(aes, out, in, sz, authIn, authInSz); |
9454 | | } |
9455 | | else |
9456 | | #endif |
9457 | 0 | { |
9458 | | /* Encrypt the plaintext. */ |
9459 | 0 | ret = AesGcmCryptUpdate_C(aes, out, in, sz); |
9460 | 0 | if (ret != 0) |
9461 | 0 | return ret; |
9462 | | /* Update the authenication tag with any authentication data and the |
9463 | | * new cipher text. */ |
9464 | 0 | GHASH_UPDATE(aes, authIn, authInSz, out, sz); |
9465 | 0 | } |
9466 | 0 | } |
9467 | | |
9468 | 0 | return ret; |
9469 | 0 | } |
9470 | | |
9471 | | /* Finalize the AES GCM for encryption and return the authentication tag. |
9472 | | * |
9473 | | * Must set key and IV before calling this function. |
9474 | | * Must call wc_AesGcmInit() before calling this function. |
9475 | | * |
9476 | | * @param [in, out] aes AES object. |
9477 | | * @param [out] authTag Buffer to hold authentication tag. |
9478 | | * @param [in] authTagSz Length of authentication tag in bytes. |
9479 | | * @return 0 on success. |
9480 | | */ |
9481 | | int wc_AesGcmEncryptFinal(Aes* aes, byte* authTag, word32 authTagSz) |
9482 | 0 | { |
9483 | 0 | int ret = 0; |
9484 | | |
9485 | | /* Check validity of parameters. */ |
9486 | 0 | if ((aes == NULL) || (authTag == NULL) || (authTagSz > AES_BLOCK_SIZE) || |
9487 | 0 | (authTagSz == 0)) { |
9488 | 0 | ret = BAD_FUNC_ARG; |
9489 | 0 | } |
9490 | | |
9491 | | /* Check key has been set. */ |
9492 | 0 | if ((ret == 0) && (!aes->gcmKeySet)) { |
9493 | 0 | ret = MISSING_KEY; |
9494 | 0 | } |
9495 | | /* Check IV has been set. */ |
9496 | 0 | if ((ret == 0) && (!aes->nonceSet)) { |
9497 | 0 | ret = MISSING_IV; |
9498 | 0 | } |
9499 | |
|
9500 | 0 | if (ret == 0) { |
9501 | | /* Calculate authentication tag. */ |
9502 | | #ifdef WOLFSSL_AESNI |
9503 | | if (haveAESNI |
9504 | | #ifdef HAVE_INTEL_AVX2 |
9505 | | || IS_INTEL_AVX2(intel_flags) |
9506 | | #endif |
9507 | | #ifdef HAVE_INTEL_AVX1 |
9508 | | || IS_INTEL_AVX1(intel_flags) |
9509 | | #endif |
9510 | | ) { |
9511 | | ret = AesGcmEncryptFinal_aesni(aes, authTag, authTagSz); |
9512 | | } |
9513 | | else |
9514 | | #endif |
9515 | 0 | { |
9516 | 0 | ret = AesGcmFinal_C(aes, authTag, authTagSz); |
9517 | 0 | } |
9518 | 0 | } |
9519 | |
|
9520 | 0 | if ((ret == 0) && aes->ctrSet) { |
9521 | 0 | IncCtr((byte*)aes->reg, aes->nonceSz); |
9522 | 0 | } |
9523 | |
|
9524 | 0 | return ret; |
9525 | 0 | } |
9526 | | |
9527 | | #if defined(HAVE_AES_DECRYPT) || defined(HAVE_AESGCM_DECRYPT) |
9528 | | /* Initialize an AES GCM cipher for decryption. |
9529 | | * |
9530 | | * Must call wc_AesInit() before calling this function. |
9531 | | * |
9532 | | * @param [in, out] aes AES object. |
9533 | | * @param [in] key Buffer holding key. |
9534 | | * @param [in] len Length of key in bytes. |
9535 | | * @param [in] iv Buffer holding IV/nonce. |
9536 | | * @param [in] ivSz Length of IV/nonce in bytes. |
9537 | | * @return 0 on success. |
9538 | | * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer |
9539 | | * is NULL, or the IV is NULL and no previous IV has been set. |
9540 | | */ |
9541 | | int wc_AesGcmDecryptInit(Aes* aes, const byte* key, word32 len, const byte* iv, |
9542 | | word32 ivSz) |
9543 | 0 | { |
9544 | 0 | return wc_AesGcmInit(aes, key, len, iv, ivSz); |
9545 | 0 | } |
9546 | | |
9547 | | /* Update the AES GCM for decryption with data and/or authentication data. |
9548 | | * |
9549 | | * All the AAD must be passed to update before the cipher text. |
9550 | | * Last part of AAD can be passed with first part of cipher text. |
9551 | | * |
9552 | | * Must set key and IV before calling this function. |
9553 | | * Must call wc_AesGcmInit() before calling this function. |
9554 | | * |
9555 | | * @param [in, out] aes AES object. |
9556 | | * @param [out] out Buffer to hold plaintext. |
9557 | | * @param [in] in Buffer holding cipher text. |
9558 | | * @param [in] sz Length of cipher text in bytes. |
9559 | | * @param [in] authIn Buffer holding authentication data. |
9560 | | * @param [in] authInSz Length of authentication data in bytes. |
9561 | | * @return 0 on success. |
9562 | | * @return BAD_FUNC_ARG when aes is NULL, or a length is non-zero but buffer |
9563 | | * is NULL. |
9564 | | */ |
9565 | | int wc_AesGcmDecryptUpdate(Aes* aes, byte* out, const byte* in, word32 sz, |
9566 | | const byte* authIn, word32 authInSz) |
9567 | 0 | { |
9568 | 0 | int ret = 0; |
9569 | | |
9570 | | /* Check validity of parameters. */ |
9571 | 0 | if ((aes == NULL) || ((authInSz > 0) && (authIn == NULL)) || ((sz > 0) && |
9572 | 0 | ((out == NULL) || (in == NULL)))) { |
9573 | 0 | ret = BAD_FUNC_ARG; |
9574 | 0 | } |
9575 | | |
9576 | | /* Check key has been set. */ |
9577 | 0 | if ((ret == 0) && (!aes->gcmKeySet)) { |
9578 | 0 | ret = MISSING_KEY; |
9579 | 0 | } |
9580 | | /* Check IV has been set. */ |
9581 | 0 | if ((ret == 0) && (!aes->nonceSet)) { |
9582 | 0 | ret = MISSING_IV; |
9583 | 0 | } |
9584 | |
|
9585 | 0 | if (ret == 0) { |
9586 | | /* Decrypt with AAD and/or cipher text. */ |
9587 | | #if defined(WOLFSSL_AESNI) |
9588 | | if (haveAESNI |
9589 | | #ifdef HAVE_INTEL_AVX2 |
9590 | | || IS_INTEL_AVX2(intel_flags) |
9591 | | #endif |
9592 | | #ifdef HAVE_INTEL_AVX1 |
9593 | | || IS_INTEL_AVX1(intel_flags) |
9594 | | #endif |
9595 | | ) { |
9596 | | ret = AesGcmDecryptUpdate_aesni(aes, out, in, sz, authIn, authInSz); |
9597 | | } |
9598 | | else |
9599 | | #endif |
9600 | 0 | { |
9601 | | /* Update the authenication tag with any authentication data and |
9602 | | * cipher text. */ |
9603 | 0 | GHASH_UPDATE(aes, authIn, authInSz, in, sz); |
9604 | | /* Decrypt the cipher text. */ |
9605 | 0 | ret = AesGcmCryptUpdate_C(aes, out, in, sz); |
9606 | 0 | } |
9607 | 0 | } |
9608 | |
|
9609 | 0 | return ret; |
9610 | 0 | } |
9611 | | |
9612 | | /* Finalize the AES GCM for decryption and check the authentication tag. |
9613 | | * |
9614 | | * Must set key and IV before calling this function. |
9615 | | * Must call wc_AesGcmInit() before calling this function. |
9616 | | * |
9617 | | * @param [in, out] aes AES object. |
9618 | | * @param [in] authTag Buffer holding authentication tag. |
9619 | | * @param [in] authTagSz Length of authentication tag in bytes. |
9620 | | * @return 0 on success. |
9621 | | */ |
9622 | | int wc_AesGcmDecryptFinal(Aes* aes, const byte* authTag, word32 authTagSz) |
9623 | 0 | { |
9624 | 0 | int ret = 0; |
9625 | | |
9626 | | /* Check validity of parameters. */ |
9627 | 0 | if ((aes == NULL) || (authTag == NULL) || (authTagSz > AES_BLOCK_SIZE) || |
9628 | 0 | (authTagSz == 0)) { |
9629 | 0 | ret = BAD_FUNC_ARG; |
9630 | 0 | } |
9631 | | |
9632 | | /* Check key has been set. */ |
9633 | 0 | if ((ret == 0) && (!aes->gcmKeySet)) { |
9634 | 0 | ret = MISSING_KEY; |
9635 | 0 | } |
9636 | | /* Check IV has been set. */ |
9637 | 0 | if ((ret == 0) && (!aes->nonceSet)) { |
9638 | 0 | ret = MISSING_IV; |
9639 | 0 | } |
9640 | |
|
9641 | 0 | if (ret == 0) { |
9642 | | /* Calculate authentication tag and compare with one passed in.. */ |
9643 | | #ifdef WOLFSSL_AESNI |
9644 | | if (haveAESNI |
9645 | | #ifdef HAVE_INTEL_AVX2 |
9646 | | || IS_INTEL_AVX2(intel_flags) |
9647 | | #endif |
9648 | | #ifdef HAVE_INTEL_AVX1 |
9649 | | || IS_INTEL_AVX1(intel_flags) |
9650 | | #endif |
9651 | | ) { |
9652 | | ret = AesGcmDecryptFinal_aesni(aes, authTag, authTagSz); |
9653 | | } |
9654 | | else |
9655 | | #endif |
9656 | 0 | { |
9657 | 0 | ALIGN32 byte calcTag[AES_BLOCK_SIZE]; |
9658 | | /* Calculate authentication tag. */ |
9659 | 0 | ret = AesGcmFinal_C(aes, calcTag, authTagSz); |
9660 | 0 | if (ret == 0) { |
9661 | | /* Check calculated tag matches the one passed in. */ |
9662 | 0 | if (ConstantCompare(authTag, calcTag, authTagSz) != 0) { |
9663 | 0 | ret = AES_GCM_AUTH_E; |
9664 | 0 | } |
9665 | 0 | } |
9666 | 0 | } |
9667 | 0 | } |
9668 | | |
9669 | | /* reset the state */ |
9670 | 0 | if (ret == 0) |
9671 | 0 | wc_AesFree(aes); |
9672 | |
|
9673 | 0 | return ret; |
9674 | 0 | } |
9675 | | #endif /* HAVE_AES_DECRYPT || HAVE_AESGCM_DECRYPT */ |
9676 | | #endif /* WOLFSSL_AESGCM_STREAM */ |
9677 | | #endif /* WOLFSSL_XILINX_CRYPT */ |
9678 | | #endif /* end of block for AESGCM implementation selection */ |
9679 | | |
9680 | | |
9681 | | /* Common to all, abstract functions that build off of lower level AESGCM |
9682 | | * functions */ |
9683 | | #ifndef WC_NO_RNG |
9684 | | |
9685 | 0 | static WARN_UNUSED_RESULT WC_INLINE int CheckAesGcmIvSize(int ivSz) { |
9686 | 0 | return (ivSz == GCM_NONCE_MIN_SZ || |
9687 | 0 | ivSz == GCM_NONCE_MID_SZ || |
9688 | 0 | ivSz == GCM_NONCE_MAX_SZ); |
9689 | 0 | } |
9690 | | |
9691 | | |
9692 | | int wc_AesGcmSetExtIV(Aes* aes, const byte* iv, word32 ivSz) |
9693 | 0 | { |
9694 | 0 | int ret = 0; |
9695 | |
|
9696 | 0 | if (aes == NULL || iv == NULL || !CheckAesGcmIvSize(ivSz)) { |
9697 | 0 | ret = BAD_FUNC_ARG; |
9698 | 0 | } |
9699 | |
|
9700 | 0 | if (ret == 0) { |
9701 | 0 | XMEMCPY((byte*)aes->reg, iv, ivSz); |
9702 | | |
9703 | | /* If the IV is 96, allow for a 2^64 invocation counter. |
9704 | | * For any other size for the nonce, limit the invocation |
9705 | | * counter to 32-bits. (SP 800-38D 8.3) */ |
9706 | 0 | aes->invokeCtr[0] = 0; |
9707 | 0 | aes->invokeCtr[1] = (ivSz == GCM_NONCE_MID_SZ) ? 0 : 0xFFFFFFFF; |
9708 | 0 | #ifdef WOLFSSL_AESGCM_STREAM |
9709 | 0 | aes->ctrSet = 1; |
9710 | 0 | #endif |
9711 | 0 | aes->nonceSz = ivSz; |
9712 | 0 | } |
9713 | |
|
9714 | 0 | return ret; |
9715 | 0 | } |
9716 | | |
9717 | | |
9718 | | int wc_AesGcmSetIV(Aes* aes, word32 ivSz, |
9719 | | const byte* ivFixed, word32 ivFixedSz, |
9720 | | WC_RNG* rng) |
9721 | 0 | { |
9722 | 0 | int ret = 0; |
9723 | |
|
9724 | 0 | if (aes == NULL || rng == NULL || !CheckAesGcmIvSize(ivSz) || |
9725 | 0 | (ivFixed == NULL && ivFixedSz != 0) || |
9726 | 0 | (ivFixed != NULL && ivFixedSz != AES_IV_FIXED_SZ)) { |
9727 | |
|
9728 | 0 | ret = BAD_FUNC_ARG; |
9729 | 0 | } |
9730 | |
|
9731 | 0 | if (ret == 0) { |
9732 | 0 | byte* iv = (byte*)aes->reg; |
9733 | |
|
9734 | 0 | if (ivFixedSz) |
9735 | 0 | XMEMCPY(iv, ivFixed, ivFixedSz); |
9736 | |
|
9737 | 0 | ret = wc_RNG_GenerateBlock(rng, iv + ivFixedSz, ivSz - ivFixedSz); |
9738 | 0 | } |
9739 | |
|
9740 | 0 | if (ret == 0) { |
9741 | | /* If the IV is 96, allow for a 2^64 invocation counter. |
9742 | | * For any other size for the nonce, limit the invocation |
9743 | | * counter to 32-bits. (SP 800-38D 8.3) */ |
9744 | 0 | aes->invokeCtr[0] = 0; |
9745 | 0 | aes->invokeCtr[1] = (ivSz == GCM_NONCE_MID_SZ) ? 0 : 0xFFFFFFFF; |
9746 | 0 | #ifdef WOLFSSL_AESGCM_STREAM |
9747 | 0 | aes->ctrSet = 1; |
9748 | 0 | #endif |
9749 | 0 | aes->nonceSz = ivSz; |
9750 | 0 | } |
9751 | |
|
9752 | 0 | return ret; |
9753 | 0 | } |
9754 | | |
9755 | | |
9756 | | int wc_AesGcmEncrypt_ex(Aes* aes, byte* out, const byte* in, word32 sz, |
9757 | | byte* ivOut, word32 ivOutSz, |
9758 | | byte* authTag, word32 authTagSz, |
9759 | | const byte* authIn, word32 authInSz) |
9760 | 0 | { |
9761 | 0 | int ret = 0; |
9762 | |
|
9763 | 0 | if (aes == NULL || (sz != 0 && (in == NULL || out == NULL)) || |
9764 | 0 | ivOut == NULL || ivOutSz != aes->nonceSz || |
9765 | 0 | (authIn == NULL && authInSz != 0)) { |
9766 | |
|
9767 | 0 | ret = BAD_FUNC_ARG; |
9768 | 0 | } |
9769 | |
|
9770 | 0 | if (ret == 0) { |
9771 | 0 | aes->invokeCtr[0]++; |
9772 | 0 | if (aes->invokeCtr[0] == 0) { |
9773 | 0 | aes->invokeCtr[1]++; |
9774 | 0 | if (aes->invokeCtr[1] == 0) |
9775 | 0 | ret = AES_GCM_OVERFLOW_E; |
9776 | 0 | } |
9777 | 0 | } |
9778 | |
|
9779 | 0 | if (ret == 0) { |
9780 | 0 | XMEMCPY(ivOut, aes->reg, ivOutSz); |
9781 | 0 | ret = wc_AesGcmEncrypt(aes, out, in, sz, |
9782 | 0 | (byte*)aes->reg, ivOutSz, |
9783 | 0 | authTag, authTagSz, |
9784 | 0 | authIn, authInSz); |
9785 | 0 | if (ret == 0) |
9786 | 0 | IncCtr((byte*)aes->reg, ivOutSz); |
9787 | 0 | } |
9788 | |
|
9789 | 0 | return ret; |
9790 | 0 | } |
9791 | | |
9792 | | int wc_Gmac(const byte* key, word32 keySz, byte* iv, word32 ivSz, |
9793 | | const byte* authIn, word32 authInSz, |
9794 | | byte* authTag, word32 authTagSz, WC_RNG* rng) |
9795 | 0 | { |
9796 | 0 | #ifdef WOLFSSL_SMALL_STACK |
9797 | 0 | Aes *aes = NULL; |
9798 | | #else |
9799 | | Aes aes[1]; |
9800 | | #endif |
9801 | 0 | int ret; |
9802 | |
|
9803 | 0 | if (key == NULL || iv == NULL || (authIn == NULL && authInSz != 0) || |
9804 | 0 | authTag == NULL || authTagSz == 0 || rng == NULL) { |
9805 | |
|
9806 | 0 | return BAD_FUNC_ARG; |
9807 | 0 | } |
9808 | | |
9809 | 0 | #ifdef WOLFSSL_SMALL_STACK |
9810 | 0 | if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL, |
9811 | 0 | DYNAMIC_TYPE_AES)) == NULL) |
9812 | 0 | return MEMORY_E; |
9813 | 0 | #endif |
9814 | | |
9815 | 0 | ret = wc_AesInit(aes, NULL, INVALID_DEVID); |
9816 | 0 | if (ret == 0) { |
9817 | 0 | ret = wc_AesGcmSetKey(aes, key, keySz); |
9818 | 0 | if (ret == 0) |
9819 | 0 | ret = wc_AesGcmSetIV(aes, ivSz, NULL, 0, rng); |
9820 | 0 | if (ret == 0) |
9821 | 0 | ret = wc_AesGcmEncrypt_ex(aes, NULL, NULL, 0, iv, ivSz, |
9822 | 0 | authTag, authTagSz, authIn, authInSz); |
9823 | 0 | wc_AesFree(aes); |
9824 | 0 | } |
9825 | 0 | ForceZero(aes, sizeof *aes); |
9826 | 0 | #ifdef WOLFSSL_SMALL_STACK |
9827 | 0 | XFREE(aes, NULL, DYNAMIC_TYPE_AES); |
9828 | 0 | #endif |
9829 | |
|
9830 | 0 | return ret; |
9831 | 0 | } |
9832 | | |
9833 | | int wc_GmacVerify(const byte* key, word32 keySz, |
9834 | | const byte* iv, word32 ivSz, |
9835 | | const byte* authIn, word32 authInSz, |
9836 | | const byte* authTag, word32 authTagSz) |
9837 | 0 | { |
9838 | 0 | int ret; |
9839 | 0 | #ifdef HAVE_AES_DECRYPT |
9840 | 0 | #ifdef WOLFSSL_SMALL_STACK |
9841 | 0 | Aes *aes = NULL; |
9842 | | #else |
9843 | | Aes aes[1]; |
9844 | | #endif |
9845 | |
|
9846 | 0 | if (key == NULL || iv == NULL || (authIn == NULL && authInSz != 0) || |
9847 | 0 | authTag == NULL || authTagSz == 0 || authTagSz > AES_BLOCK_SIZE) { |
9848 | |
|
9849 | 0 | return BAD_FUNC_ARG; |
9850 | 0 | } |
9851 | | |
9852 | 0 | #ifdef WOLFSSL_SMALL_STACK |
9853 | 0 | if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL, |
9854 | 0 | DYNAMIC_TYPE_AES)) == NULL) |
9855 | 0 | return MEMORY_E; |
9856 | 0 | #endif |
9857 | | |
9858 | 0 | ret = wc_AesInit(aes, NULL, INVALID_DEVID); |
9859 | 0 | if (ret == 0) { |
9860 | 0 | ret = wc_AesGcmSetKey(aes, key, keySz); |
9861 | 0 | if (ret == 0) |
9862 | 0 | ret = wc_AesGcmDecrypt(aes, NULL, NULL, 0, iv, ivSz, |
9863 | 0 | authTag, authTagSz, authIn, authInSz); |
9864 | 0 | wc_AesFree(aes); |
9865 | 0 | } |
9866 | 0 | ForceZero(aes, sizeof *aes); |
9867 | 0 | #ifdef WOLFSSL_SMALL_STACK |
9868 | 0 | XFREE(aes, NULL, DYNAMIC_TYPE_AES); |
9869 | 0 | #endif |
9870 | | #else |
9871 | | (void)key; |
9872 | | (void)keySz; |
9873 | | (void)iv; |
9874 | | (void)ivSz; |
9875 | | (void)authIn; |
9876 | | (void)authInSz; |
9877 | | (void)authTag; |
9878 | | (void)authTagSz; |
9879 | | ret = NOT_COMPILED_IN; |
9880 | | #endif |
9881 | 0 | return ret; |
9882 | 0 | } |
9883 | | |
9884 | | #endif /* WC_NO_RNG */ |
9885 | | |
9886 | | |
9887 | | WOLFSSL_API int wc_GmacSetKey(Gmac* gmac, const byte* key, word32 len) |
9888 | 0 | { |
9889 | 0 | if (gmac == NULL || key == NULL) { |
9890 | 0 | return BAD_FUNC_ARG; |
9891 | 0 | } |
9892 | 0 | return wc_AesGcmSetKey(&gmac->aes, key, len); |
9893 | 0 | } |
9894 | | |
9895 | | |
9896 | | WOLFSSL_API int wc_GmacUpdate(Gmac* gmac, const byte* iv, word32 ivSz, |
9897 | | const byte* authIn, word32 authInSz, |
9898 | | byte* authTag, word32 authTagSz) |
9899 | 0 | { |
9900 | 0 | if (gmac == NULL) { |
9901 | 0 | return BAD_FUNC_ARG; |
9902 | 0 | } |
9903 | | |
9904 | 0 | return wc_AesGcmEncrypt(&gmac->aes, NULL, NULL, 0, iv, ivSz, |
9905 | 0 | authTag, authTagSz, authIn, authInSz); |
9906 | 0 | } |
9907 | | |
9908 | | #endif /* HAVE_AESGCM */ |
9909 | | |
9910 | | |
9911 | | #ifdef HAVE_AESCCM |
9912 | | |
9913 | | int wc_AesCcmSetKey(Aes* aes, const byte* key, word32 keySz) |
9914 | 0 | { |
9915 | 0 | if (!((keySz == 16) || (keySz == 24) || (keySz == 32))) |
9916 | 0 | return BAD_FUNC_ARG; |
9917 | | |
9918 | 0 | return wc_AesSetKey(aes, key, keySz, NULL, AES_ENCRYPTION); |
9919 | 0 | } |
9920 | | |
9921 | | |
9922 | | /* Checks if the tag size is an accepted value based on RFC 3610 section 2 |
9923 | | * returns 0 if tag size is ok |
9924 | | */ |
9925 | | int wc_AesCcmCheckTagSize(int sz) |
9926 | 0 | { |
9927 | | /* values here are from RFC 3610 section 2 */ |
9928 | 0 | if (sz != 4 && sz != 6 && sz != 8 && sz != 10 && sz != 12 && sz != 14 |
9929 | 0 | && sz != 16) { |
9930 | 0 | WOLFSSL_MSG("Bad auth tag size AES-CCM"); |
9931 | 0 | return BAD_FUNC_ARG; |
9932 | 0 | } |
9933 | 0 | return 0; |
9934 | 0 | } |
9935 | | |
9936 | | #ifdef WOLFSSL_ARMASM |
9937 | | /* implementation located in wolfcrypt/src/port/arm/armv8-aes.c */ |
9938 | | |
9939 | | #elif defined(HAVE_COLDFIRE_SEC) |
9940 | | #error "Coldfire SEC doesn't currently support AES-CCM mode" |
9941 | | |
9942 | | #elif defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \ |
9943 | | !defined(WOLFSSL_QNX_CAAM) |
9944 | | /* implemented in wolfcrypt/src/port/caam_aes.c */ |
9945 | | |
9946 | | #elif defined(WOLFSSL_SILABS_SE_ACCEL) |
9947 | | /* implemented in wolfcrypt/src/port/silabs/silabs_aes.c */ |
9948 | | int wc_AesCcmEncrypt(Aes* aes, byte* out, const byte* in, word32 inSz, |
9949 | | const byte* nonce, word32 nonceSz, |
9950 | | byte* authTag, word32 authTagSz, |
9951 | | const byte* authIn, word32 authInSz) |
9952 | | { |
9953 | | return wc_AesCcmEncrypt_silabs( |
9954 | | aes, out, in, inSz, |
9955 | | nonce, nonceSz, |
9956 | | authTag, authTagSz, |
9957 | | authIn, authInSz); |
9958 | | } |
9959 | | |
9960 | | #ifdef HAVE_AES_DECRYPT |
9961 | | int wc_AesCcmDecrypt(Aes* aes, byte* out, const byte* in, word32 inSz, |
9962 | | const byte* nonce, word32 nonceSz, |
9963 | | const byte* authTag, word32 authTagSz, |
9964 | | const byte* authIn, word32 authInSz) |
9965 | | { |
9966 | | return wc_AesCcmDecrypt_silabs( |
9967 | | aes, out, in, inSz, |
9968 | | nonce, nonceSz, |
9969 | | authTag, authTagSz, |
9970 | | authIn, authInSz); |
9971 | | } |
9972 | | #endif |
9973 | | #elif defined(FREESCALE_LTC) |
9974 | | |
9975 | | /* return 0 on success */ |
9976 | | int wc_AesCcmEncrypt(Aes* aes, byte* out, const byte* in, word32 inSz, |
9977 | | const byte* nonce, word32 nonceSz, |
9978 | | byte* authTag, word32 authTagSz, |
9979 | | const byte* authIn, word32 authInSz) |
9980 | | { |
9981 | | byte *key; |
9982 | | word32 keySize; |
9983 | | status_t status; |
9984 | | |
9985 | | /* sanity check on arguments */ |
9986 | | /* note, LTC_AES_EncryptTagCcm() doesn't allow null src or dst |
9987 | | * ptrs even if inSz is zero (ltc_aes_ccm_check_input_args()), so |
9988 | | * don't allow it here either. |
9989 | | */ |
9990 | | if (aes == NULL || out == NULL || in == NULL || nonce == NULL |
9991 | | || authTag == NULL || nonceSz < 7 || nonceSz > 13) { |
9992 | | return BAD_FUNC_ARG; |
9993 | | } |
9994 | | |
9995 | | if (wc_AesCcmCheckTagSize(authTagSz) != 0) { |
9996 | | return BAD_FUNC_ARG; |
9997 | | } |
9998 | | |
9999 | | key = (byte*)aes->key; |
10000 | | |
10001 | | status = wc_AesGetKeySize(aes, &keySize); |
10002 | | if (status != 0) { |
10003 | | return status; |
10004 | | } |
10005 | | |
10006 | | status = wolfSSL_CryptHwMutexLock(); |
10007 | | if (status != 0) |
10008 | | return status; |
10009 | | |
10010 | | status = LTC_AES_EncryptTagCcm(LTC_BASE, in, out, inSz, |
10011 | | nonce, nonceSz, authIn, authInSz, key, keySize, authTag, authTagSz); |
10012 | | wolfSSL_CryptHwMutexUnLock(); |
10013 | | |
10014 | | return (kStatus_Success == status) ? 0 : BAD_FUNC_ARG; |
10015 | | } |
10016 | | |
10017 | | #ifdef HAVE_AES_DECRYPT |
10018 | | int wc_AesCcmDecrypt(Aes* aes, byte* out, const byte* in, word32 inSz, |
10019 | | const byte* nonce, word32 nonceSz, |
10020 | | const byte* authTag, word32 authTagSz, |
10021 | | const byte* authIn, word32 authInSz) |
10022 | | { |
10023 | | byte *key; |
10024 | | word32 keySize; |
10025 | | status_t status; |
10026 | | |
10027 | | /* sanity check on arguments */ |
10028 | | if (aes == NULL || out == NULL || in == NULL || nonce == NULL |
10029 | | || authTag == NULL || nonceSz < 7 || nonceSz > 13) { |
10030 | | return BAD_FUNC_ARG; |
10031 | | } |
10032 | | |
10033 | | key = (byte*)aes->key; |
10034 | | |
10035 | | status = wc_AesGetKeySize(aes, &keySize); |
10036 | | if (status != 0) { |
10037 | | return status; |
10038 | | } |
10039 | | |
10040 | | status = wolfSSL_CryptHwMutexLock(); |
10041 | | if (status != 0) |
10042 | | return status; |
10043 | | status = LTC_AES_DecryptTagCcm(LTC_BASE, in, out, inSz, |
10044 | | nonce, nonceSz, authIn, authInSz, key, keySize, authTag, authTagSz); |
10045 | | wolfSSL_CryptHwMutexUnLock(); |
10046 | | |
10047 | | if (status != kStatus_Success) { |
10048 | | XMEMSET(out, 0, inSz); |
10049 | | return AES_CCM_AUTH_E; |
10050 | | } |
10051 | | return 0; |
10052 | | } |
10053 | | #endif /* HAVE_AES_DECRYPT */ |
10054 | | |
10055 | | #else |
10056 | | |
10057 | | /* Software CCM */ |
10058 | | static WARN_UNUSED_RESULT int roll_x( |
10059 | | Aes* aes, const byte* in, word32 inSz, byte* out) |
10060 | 0 | { |
10061 | 0 | int ret; |
10062 | | |
10063 | | /* process the bulk of the data */ |
10064 | 0 | while (inSz >= AES_BLOCK_SIZE) { |
10065 | 0 | xorbuf(out, in, AES_BLOCK_SIZE); |
10066 | 0 | in += AES_BLOCK_SIZE; |
10067 | 0 | inSz -= AES_BLOCK_SIZE; |
10068 | |
|
10069 | 0 | ret = wc_AesEncrypt(aes, out, out); |
10070 | 0 | if (ret != 0) |
10071 | 0 | return ret; |
10072 | 0 | } |
10073 | | |
10074 | | /* process remainder of the data */ |
10075 | 0 | if (inSz > 0) { |
10076 | 0 | xorbuf(out, in, inSz); |
10077 | 0 | ret = wc_AesEncrypt(aes, out, out); |
10078 | 0 | if (ret != 0) |
10079 | 0 | return ret; |
10080 | 0 | } |
10081 | | |
10082 | 0 | return 0; |
10083 | 0 | } |
10084 | | |
10085 | | static WARN_UNUSED_RESULT int roll_auth( |
10086 | | Aes* aes, const byte* in, word32 inSz, byte* out) |
10087 | 0 | { |
10088 | 0 | word32 authLenSz; |
10089 | 0 | word32 remainder; |
10090 | 0 | int ret; |
10091 | | |
10092 | | /* encode the length in */ |
10093 | 0 | if (inSz <= 0xFEFF) { |
10094 | 0 | authLenSz = 2; |
10095 | 0 | out[0] ^= ((inSz & 0xFF00) >> 8); |
10096 | 0 | out[1] ^= (inSz & 0x00FF); |
10097 | 0 | } |
10098 | 0 | else if (inSz <= 0xFFFFFFFF) { |
10099 | 0 | authLenSz = 6; |
10100 | 0 | out[0] ^= 0xFF; out[1] ^= 0xFE; |
10101 | 0 | out[2] ^= ((inSz & 0xFF000000) >> 24); |
10102 | 0 | out[3] ^= ((inSz & 0x00FF0000) >> 16); |
10103 | 0 | out[4] ^= ((inSz & 0x0000FF00) >> 8); |
10104 | 0 | out[5] ^= (inSz & 0x000000FF); |
10105 | 0 | } |
10106 | | /* Note, the protocol handles auth data up to 2^64, but we are |
10107 | | * using 32-bit sizes right now, so the bigger data isn't handled |
10108 | | * else if (inSz <= 0xFFFFFFFFFFFFFFFF) {} */ |
10109 | 0 | else |
10110 | 0 | return BAD_LENGTH_E; |
10111 | | |
10112 | | /* start fill out the rest of the first block */ |
10113 | 0 | remainder = AES_BLOCK_SIZE - authLenSz; |
10114 | 0 | if (inSz >= remainder) { |
10115 | | /* plenty of bulk data to fill the remainder of this block */ |
10116 | 0 | xorbuf(out + authLenSz, in, remainder); |
10117 | 0 | inSz -= remainder; |
10118 | 0 | in += remainder; |
10119 | 0 | } |
10120 | 0 | else { |
10121 | | /* not enough bulk data, copy what is available, and pad zero */ |
10122 | 0 | xorbuf(out + authLenSz, in, inSz); |
10123 | 0 | inSz = 0; |
10124 | 0 | } |
10125 | 0 | ret = wc_AesEncrypt(aes, out, out); |
10126 | |
|
10127 | 0 | if ((ret == 0) && (inSz > 0)) { |
10128 | 0 | ret = roll_x(aes, in, inSz, out); |
10129 | 0 | } |
10130 | |
|
10131 | 0 | return ret; |
10132 | 0 | } |
10133 | | |
10134 | | |
10135 | | static WC_INLINE void AesCcmCtrInc(byte* B, word32 lenSz) |
10136 | 0 | { |
10137 | 0 | word32 i; |
10138 | |
|
10139 | 0 | for (i = 0; i < lenSz; i++) { |
10140 | 0 | if (++B[AES_BLOCK_SIZE - 1 - i] != 0) return; |
10141 | 0 | } |
10142 | 0 | } |
10143 | | |
10144 | | #ifdef WOLFSSL_AESNI |
10145 | | static WC_INLINE void AesCcmCtrIncSet4(byte* B, word32 lenSz) |
10146 | | { |
10147 | | word32 i; |
10148 | | |
10149 | | /* B+1 = B */ |
10150 | | XMEMCPY(B + AES_BLOCK_SIZE * 1, B, AES_BLOCK_SIZE); |
10151 | | /* B+2,B+3 = B,B+1 */ |
10152 | | XMEMCPY(B + AES_BLOCK_SIZE * 2, B, AES_BLOCK_SIZE * 2); |
10153 | | |
10154 | | for (i = 0; i < lenSz; i++) { |
10155 | | if (++B[AES_BLOCK_SIZE * 2 - 1 - i] != 0) break; |
10156 | | } |
10157 | | B[AES_BLOCK_SIZE * 3 - 1] += 2; |
10158 | | if (B[AES_BLOCK_SIZE * 3 - 1] < 2) { |
10159 | | for (i = 1; i < lenSz; i++) { |
10160 | | if (++B[AES_BLOCK_SIZE * 3 - 1 - i] != 0) break; |
10161 | | } |
10162 | | } |
10163 | | B[AES_BLOCK_SIZE * 4 - 1] += 3; |
10164 | | if (B[AES_BLOCK_SIZE * 4 - 1] < 3) { |
10165 | | for (i = 1; i < lenSz; i++) { |
10166 | | if (++B[AES_BLOCK_SIZE * 4 - 1 - i] != 0) break; |
10167 | | } |
10168 | | } |
10169 | | } |
10170 | | |
10171 | | static WC_INLINE void AesCcmCtrInc4(byte* B, word32 lenSz) |
10172 | | { |
10173 | | word32 i; |
10174 | | |
10175 | | B[AES_BLOCK_SIZE - 1] += 4; |
10176 | | if (B[AES_BLOCK_SIZE - 1] < 4) { |
10177 | | for (i = 1; i < lenSz; i++) { |
10178 | | if (++B[AES_BLOCK_SIZE - 1 - i] != 0) break; |
10179 | | } |
10180 | | } |
10181 | | } |
10182 | | #endif |
10183 | | |
10184 | | /* Software AES - CCM Encrypt */ |
10185 | | /* return 0 on success */ |
10186 | | int wc_AesCcmEncrypt(Aes* aes, byte* out, const byte* in, word32 inSz, |
10187 | | const byte* nonce, word32 nonceSz, |
10188 | | byte* authTag, word32 authTagSz, |
10189 | | const byte* authIn, word32 authInSz) |
10190 | 0 | { |
10191 | 0 | #ifndef WOLFSSL_AESNI |
10192 | 0 | byte A[AES_BLOCK_SIZE]; |
10193 | 0 | byte B[AES_BLOCK_SIZE]; |
10194 | | #else |
10195 | | ALIGN128 byte A[AES_BLOCK_SIZE * 4]; |
10196 | | ALIGN128 byte B[AES_BLOCK_SIZE * 4]; |
10197 | | #endif |
10198 | 0 | byte lenSz; |
10199 | 0 | word32 i; |
10200 | 0 | byte mask = 0xFF; |
10201 | 0 | const word32 wordSz = (word32)sizeof(word32); |
10202 | 0 | int ret; |
10203 | | |
10204 | | /* sanity check on arguments */ |
10205 | 0 | if (aes == NULL || (inSz != 0 && (in == NULL || out == NULL)) || |
10206 | 0 | nonce == NULL || authTag == NULL || nonceSz < 7 || nonceSz > 13 || |
10207 | 0 | authTagSz > AES_BLOCK_SIZE) |
10208 | 0 | return BAD_FUNC_ARG; |
10209 | | |
10210 | | /* sanity check on tag size */ |
10211 | 0 | if (wc_AesCcmCheckTagSize(authTagSz) != 0) { |
10212 | 0 | return BAD_FUNC_ARG; |
10213 | 0 | } |
10214 | | |
10215 | 0 | #ifdef WOLF_CRYPTO_CB |
10216 | 0 | if (aes->devId != INVALID_DEVID) { |
10217 | 0 | int crypto_cb_ret = |
10218 | 0 | wc_CryptoCb_AesCcmEncrypt(aes, out, in, inSz, nonce, nonceSz, |
10219 | 0 | authTag, authTagSz, authIn, authInSz); |
10220 | 0 | if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE) |
10221 | 0 | return crypto_cb_ret; |
10222 | | /* fall-through when unavailable */ |
10223 | 0 | } |
10224 | 0 | #endif |
10225 | | |
10226 | 0 | XMEMSET(A, 0, sizeof(A)); |
10227 | 0 | XMEMCPY(B+1, nonce, nonceSz); |
10228 | 0 | lenSz = AES_BLOCK_SIZE - 1 - (byte)nonceSz; |
10229 | 0 | B[0] = (authInSz > 0 ? 64 : 0) |
10230 | 0 | + (8 * (((byte)authTagSz - 2) / 2)) |
10231 | 0 | + (lenSz - 1); |
10232 | 0 | for (i = 0; i < lenSz; i++) { |
10233 | 0 | if (mask && i >= wordSz) |
10234 | 0 | mask = 0x00; |
10235 | 0 | B[AES_BLOCK_SIZE - 1 - i] = (inSz >> ((8 * i) & mask)) & mask; |
10236 | 0 | } |
10237 | |
|
10238 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
10239 | | wc_MemZero_Add("wc_AesCcmEncrypt B", B, sizeof(B)); |
10240 | | #endif |
10241 | |
|
10242 | 0 | ret = wc_AesEncrypt(aes, B, A); |
10243 | 0 | if (ret != 0) { |
10244 | 0 | ForceZero(B, sizeof(B)); |
10245 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
10246 | | wc_MemZero_Check(B, sizeof(B)); |
10247 | | #endif |
10248 | 0 | return ret; |
10249 | 0 | } |
10250 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
10251 | | wc_MemZero_Add("wc_AesCcmEncrypt A", A, sizeof(A)); |
10252 | | #endif |
10253 | | |
10254 | 0 | if (authInSz > 0) { |
10255 | 0 | ret = roll_auth(aes, authIn, authInSz, A); |
10256 | 0 | if (ret != 0) { |
10257 | 0 | ForceZero(A, sizeof(A)); |
10258 | 0 | ForceZero(B, sizeof(B)); |
10259 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
10260 | | wc_MemZero_Check(A, sizeof(A)); |
10261 | | wc_MemZero_Check(B, sizeof(B)); |
10262 | | #endif |
10263 | 0 | return ret; |
10264 | 0 | } |
10265 | 0 | } |
10266 | 0 | if (inSz > 0) { |
10267 | 0 | ret = roll_x(aes, in, inSz, A); |
10268 | 0 | if (ret != 0) { |
10269 | 0 | ForceZero(A, sizeof(A)); |
10270 | 0 | ForceZero(B, sizeof(B)); |
10271 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
10272 | | wc_MemZero_Check(A, sizeof(A)); |
10273 | | wc_MemZero_Check(B, sizeof(B)); |
10274 | | #endif |
10275 | 0 | return ret; |
10276 | 0 | } |
10277 | 0 | } |
10278 | 0 | XMEMCPY(authTag, A, authTagSz); |
10279 | |
|
10280 | 0 | B[0] = lenSz - 1; |
10281 | 0 | for (i = 0; i < lenSz; i++) |
10282 | 0 | B[AES_BLOCK_SIZE - 1 - i] = 0; |
10283 | 0 | ret = wc_AesEncrypt(aes, B, A); |
10284 | 0 | if (ret != 0) { |
10285 | 0 | ForceZero(A, sizeof(A)); |
10286 | 0 | ForceZero(B, sizeof(B)); |
10287 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
10288 | | wc_MemZero_Check(A, sizeof(A)); |
10289 | | wc_MemZero_Check(B, sizeof(B)); |
10290 | | #endif |
10291 | 0 | return ret; |
10292 | 0 | } |
10293 | 0 | xorbuf(authTag, A, authTagSz); |
10294 | |
|
10295 | 0 | B[15] = 1; |
10296 | | #ifdef WOLFSSL_AESNI |
10297 | | if (haveAESNI && aes->use_aesni) { |
10298 | | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
10299 | | while (inSz >= AES_BLOCK_SIZE * 4) { |
10300 | | AesCcmCtrIncSet4(B, lenSz); |
10301 | | |
10302 | | AES_ECB_encrypt(B, A, AES_BLOCK_SIZE * 4, (byte*)aes->key, |
10303 | | aes->rounds); |
10304 | | |
10305 | | xorbuf(A, in, AES_BLOCK_SIZE * 4); |
10306 | | XMEMCPY(out, A, AES_BLOCK_SIZE * 4); |
10307 | | |
10308 | | inSz -= AES_BLOCK_SIZE * 4; |
10309 | | in += AES_BLOCK_SIZE * 4; |
10310 | | out += AES_BLOCK_SIZE * 4; |
10311 | | |
10312 | | AesCcmCtrInc4(B, lenSz); |
10313 | | } |
10314 | | RESTORE_VECTOR_REGISTERS(); |
10315 | | } |
10316 | | #endif |
10317 | 0 | while (inSz >= AES_BLOCK_SIZE) { |
10318 | 0 | ret = wc_AesEncrypt(aes, B, A); |
10319 | 0 | if (ret != 0) { |
10320 | 0 | ForceZero(A, sizeof(A)); |
10321 | 0 | ForceZero(B, sizeof(B)); |
10322 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
10323 | | wc_MemZero_Check(A, sizeof(A)); |
10324 | | wc_MemZero_Check(B, sizeof(B)); |
10325 | | #endif |
10326 | 0 | return ret; |
10327 | 0 | } |
10328 | 0 | xorbuf(A, in, AES_BLOCK_SIZE); |
10329 | 0 | XMEMCPY(out, A, AES_BLOCK_SIZE); |
10330 | |
|
10331 | 0 | AesCcmCtrInc(B, lenSz); |
10332 | 0 | inSz -= AES_BLOCK_SIZE; |
10333 | 0 | in += AES_BLOCK_SIZE; |
10334 | 0 | out += AES_BLOCK_SIZE; |
10335 | 0 | } |
10336 | 0 | if (inSz > 0) { |
10337 | 0 | ret = wc_AesEncrypt(aes, B, A); |
10338 | 0 | if (ret != 0) { |
10339 | 0 | ForceZero(A, sizeof(A)); |
10340 | 0 | ForceZero(B, sizeof(B)); |
10341 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
10342 | | wc_MemZero_Check(A, sizeof(A)); |
10343 | | wc_MemZero_Check(B, sizeof(B)); |
10344 | | #endif |
10345 | 0 | return ret; |
10346 | 0 | } |
10347 | 0 | xorbuf(A, in, inSz); |
10348 | 0 | XMEMCPY(out, A, inSz); |
10349 | 0 | } |
10350 | | |
10351 | 0 | ForceZero(A, sizeof(A)); |
10352 | 0 | ForceZero(B, sizeof(B)); |
10353 | |
|
10354 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
10355 | | wc_MemZero_Check(A, sizeof(A)); |
10356 | | wc_MemZero_Check(B, sizeof(B)); |
10357 | | #endif |
10358 | |
|
10359 | 0 | return 0; |
10360 | 0 | } |
10361 | | |
10362 | | #ifdef HAVE_AES_DECRYPT |
10363 | | /* Software AES - CCM Decrypt */ |
10364 | | int wc_AesCcmDecrypt(Aes* aes, byte* out, const byte* in, word32 inSz, |
10365 | | const byte* nonce, word32 nonceSz, |
10366 | | const byte* authTag, word32 authTagSz, |
10367 | | const byte* authIn, word32 authInSz) |
10368 | 0 | { |
10369 | 0 | #ifndef WOLFSSL_AESNI |
10370 | 0 | byte A[AES_BLOCK_SIZE]; |
10371 | 0 | byte B[AES_BLOCK_SIZE]; |
10372 | | #else |
10373 | | ALIGN128 byte B[AES_BLOCK_SIZE * 4]; |
10374 | | ALIGN128 byte A[AES_BLOCK_SIZE * 4]; |
10375 | | #endif |
10376 | 0 | byte* o; |
10377 | 0 | byte lenSz; |
10378 | 0 | word32 i, oSz; |
10379 | 0 | byte mask = 0xFF; |
10380 | 0 | const word32 wordSz = (word32)sizeof(word32); |
10381 | 0 | int ret; |
10382 | | |
10383 | | /* sanity check on arguments */ |
10384 | 0 | if (aes == NULL || (inSz != 0 && (in == NULL || out == NULL)) || |
10385 | 0 | nonce == NULL || authTag == NULL || nonceSz < 7 || nonceSz > 13 || |
10386 | 0 | authTagSz > AES_BLOCK_SIZE) |
10387 | 0 | return BAD_FUNC_ARG; |
10388 | | |
10389 | | /* sanity check on tag size */ |
10390 | 0 | if (wc_AesCcmCheckTagSize(authTagSz) != 0) { |
10391 | 0 | return BAD_FUNC_ARG; |
10392 | 0 | } |
10393 | | |
10394 | 0 | #ifdef WOLF_CRYPTO_CB |
10395 | 0 | if (aes->devId != INVALID_DEVID) { |
10396 | 0 | int crypto_cb_ret = |
10397 | 0 | wc_CryptoCb_AesCcmDecrypt(aes, out, in, inSz, nonce, nonceSz, |
10398 | 0 | authTag, authTagSz, authIn, authInSz); |
10399 | 0 | if (crypto_cb_ret != CRYPTOCB_UNAVAILABLE) |
10400 | 0 | return crypto_cb_ret; |
10401 | | /* fall-through when unavailable */ |
10402 | 0 | } |
10403 | 0 | #endif |
10404 | | |
10405 | 0 | o = out; |
10406 | 0 | oSz = inSz; |
10407 | 0 | XMEMSET(A, 0, sizeof A); |
10408 | 0 | XMEMCPY(B+1, nonce, nonceSz); |
10409 | 0 | lenSz = AES_BLOCK_SIZE - 1 - (byte)nonceSz; |
10410 | |
|
10411 | 0 | B[0] = lenSz - 1; |
10412 | 0 | for (i = 0; i < lenSz; i++) |
10413 | 0 | B[AES_BLOCK_SIZE - 1 - i] = 0; |
10414 | 0 | B[15] = 1; |
10415 | |
|
10416 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
10417 | | wc_MemZero_Add("wc_AesCcmEncrypt A", A, sizeof(A)); |
10418 | | wc_MemZero_Add("wc_AesCcmEncrypt B", B, sizeof(B)); |
10419 | | #endif |
10420 | |
|
10421 | | #ifdef WOLFSSL_AESNI |
10422 | | if (haveAESNI && aes->use_aesni) { |
10423 | | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
10424 | | while (oSz >= AES_BLOCK_SIZE * 4) { |
10425 | | AesCcmCtrIncSet4(B, lenSz); |
10426 | | |
10427 | | AES_ECB_encrypt(B, A, AES_BLOCK_SIZE * 4, (byte*)aes->key, |
10428 | | aes->rounds); |
10429 | | |
10430 | | xorbuf(A, in, AES_BLOCK_SIZE * 4); |
10431 | | XMEMCPY(o, A, AES_BLOCK_SIZE * 4); |
10432 | | |
10433 | | oSz -= AES_BLOCK_SIZE * 4; |
10434 | | in += AES_BLOCK_SIZE * 4; |
10435 | | o += AES_BLOCK_SIZE * 4; |
10436 | | |
10437 | | AesCcmCtrInc4(B, lenSz); |
10438 | | } |
10439 | | RESTORE_VECTOR_REGISTERS(); |
10440 | | } |
10441 | | #endif |
10442 | 0 | while (oSz >= AES_BLOCK_SIZE) { |
10443 | 0 | ret = wc_AesEncrypt(aes, B, A); |
10444 | 0 | if (ret != 0) { |
10445 | 0 | ForceZero(A, sizeof(A)); |
10446 | 0 | ForceZero(B, sizeof(B)); |
10447 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
10448 | | wc_MemZero_Check(A, sizeof(A)); |
10449 | | wc_MemZero_Check(B, sizeof(B)); |
10450 | | #endif |
10451 | 0 | return ret; |
10452 | 0 | } |
10453 | 0 | xorbuf(A, in, AES_BLOCK_SIZE); |
10454 | 0 | XMEMCPY(o, A, AES_BLOCK_SIZE); |
10455 | |
|
10456 | 0 | AesCcmCtrInc(B, lenSz); |
10457 | 0 | oSz -= AES_BLOCK_SIZE; |
10458 | 0 | in += AES_BLOCK_SIZE; |
10459 | 0 | o += AES_BLOCK_SIZE; |
10460 | 0 | } |
10461 | 0 | if (inSz > 0) { |
10462 | 0 | ret = wc_AesEncrypt(aes, B, A); |
10463 | 0 | if (ret != 0) { |
10464 | 0 | ForceZero(A, sizeof(A)); |
10465 | 0 | ForceZero(B, sizeof(B)); |
10466 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
10467 | | wc_MemZero_Check(A, sizeof(A)); |
10468 | | wc_MemZero_Check(B, sizeof(B)); |
10469 | | #endif |
10470 | 0 | return ret; |
10471 | 0 | } |
10472 | 0 | xorbuf(A, in, oSz); |
10473 | 0 | XMEMCPY(o, A, oSz); |
10474 | 0 | } |
10475 | | |
10476 | 0 | for (i = 0; i < lenSz; i++) |
10477 | 0 | B[AES_BLOCK_SIZE - 1 - i] = 0; |
10478 | 0 | ret = wc_AesEncrypt(aes, B, A); |
10479 | 0 | if (ret != 0) { |
10480 | 0 | ForceZero(A, sizeof(A)); |
10481 | 0 | ForceZero(B, sizeof(B)); |
10482 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
10483 | | wc_MemZero_Check(A, sizeof(A)); |
10484 | | wc_MemZero_Check(B, sizeof(B)); |
10485 | | #endif |
10486 | 0 | return ret; |
10487 | 0 | } |
10488 | | |
10489 | 0 | o = out; |
10490 | 0 | oSz = inSz; |
10491 | |
|
10492 | 0 | B[0] = (authInSz > 0 ? 64 : 0) |
10493 | 0 | + (8 * (((byte)authTagSz - 2) / 2)) |
10494 | 0 | + (lenSz - 1); |
10495 | 0 | for (i = 0; i < lenSz; i++) { |
10496 | 0 | if (mask && i >= wordSz) |
10497 | 0 | mask = 0x00; |
10498 | 0 | B[AES_BLOCK_SIZE - 1 - i] = (inSz >> ((8 * i) & mask)) & mask; |
10499 | 0 | } |
10500 | |
|
10501 | 0 | ret = wc_AesEncrypt(aes, B, A); |
10502 | 0 | if (ret != 0) { |
10503 | 0 | ForceZero(A, sizeof(A)); |
10504 | 0 | ForceZero(B, sizeof(B)); |
10505 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
10506 | | wc_MemZero_Check(A, sizeof(A)); |
10507 | | wc_MemZero_Check(B, sizeof(B)); |
10508 | | #endif |
10509 | 0 | return ret; |
10510 | 0 | } |
10511 | | |
10512 | 0 | if (authInSz > 0) { |
10513 | 0 | ret = roll_auth(aes, authIn, authInSz, A); |
10514 | 0 | if (ret != 0) { |
10515 | 0 | ForceZero(A, sizeof(A)); |
10516 | 0 | ForceZero(B, sizeof(B)); |
10517 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
10518 | | wc_MemZero_Check(A, sizeof(A)); |
10519 | | wc_MemZero_Check(B, sizeof(B)); |
10520 | | #endif |
10521 | 0 | return ret; |
10522 | 0 | } |
10523 | 0 | } |
10524 | 0 | if (inSz > 0) { |
10525 | 0 | ret = roll_x(aes, o, oSz, A); |
10526 | 0 | if (ret != 0) { |
10527 | 0 | ForceZero(A, sizeof(A)); |
10528 | 0 | ForceZero(B, sizeof(B)); |
10529 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
10530 | | wc_MemZero_Check(A, sizeof(A)); |
10531 | | wc_MemZero_Check(B, sizeof(B)); |
10532 | | #endif |
10533 | 0 | return ret; |
10534 | 0 | } |
10535 | 0 | } |
10536 | | |
10537 | 0 | B[0] = lenSz - 1; |
10538 | 0 | for (i = 0; i < lenSz; i++) |
10539 | 0 | B[AES_BLOCK_SIZE - 1 - i] = 0; |
10540 | 0 | ret = wc_AesEncrypt(aes, B, B); |
10541 | 0 | if (ret != 0) { |
10542 | 0 | ForceZero(A, sizeof(A)); |
10543 | 0 | ForceZero(B, sizeof(B)); |
10544 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
10545 | | wc_MemZero_Check(A, sizeof(A)); |
10546 | | wc_MemZero_Check(B, sizeof(B)); |
10547 | | #endif |
10548 | 0 | return ret; |
10549 | 0 | } |
10550 | 0 | xorbuf(A, B, authTagSz); |
10551 | |
|
10552 | 0 | if (ConstantCompare(A, authTag, authTagSz) != 0) { |
10553 | | /* If the authTag check fails, don't keep the decrypted data. |
10554 | | * Unfortunately, you need the decrypted data to calculate the |
10555 | | * check value. */ |
10556 | | #if defined(HAVE_FIPS_VERSION) && (HAVE_FIPS_VERSION >= 2) && \ |
10557 | | defined(ACVP_VECTOR_TESTING) |
10558 | | WOLFSSL_MSG("Preserve output for vector responses"); |
10559 | | #else |
10560 | 0 | if (inSz > 0) |
10561 | 0 | XMEMSET(out, 0, inSz); |
10562 | 0 | #endif |
10563 | 0 | ret = AES_CCM_AUTH_E; |
10564 | 0 | } |
10565 | |
|
10566 | 0 | ForceZero(A, sizeof(A)); |
10567 | 0 | ForceZero(B, sizeof(B)); |
10568 | 0 | o = NULL; |
10569 | |
|
10570 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
10571 | | wc_MemZero_Check(A, sizeof(A)); |
10572 | | wc_MemZero_Check(B, sizeof(B)); |
10573 | | #endif |
10574 | |
|
10575 | 0 | return ret; |
10576 | 0 | } |
10577 | | |
10578 | | #endif /* HAVE_AES_DECRYPT */ |
10579 | | #endif /* software CCM */ |
10580 | | |
10581 | | /* abstract functions that call lower level AESCCM functions */ |
10582 | | #ifndef WC_NO_RNG |
10583 | | |
10584 | | int wc_AesCcmSetNonce(Aes* aes, const byte* nonce, word32 nonceSz) |
10585 | 0 | { |
10586 | 0 | int ret = 0; |
10587 | |
|
10588 | 0 | if (aes == NULL || nonce == NULL || |
10589 | 0 | nonceSz < CCM_NONCE_MIN_SZ || nonceSz > CCM_NONCE_MAX_SZ) { |
10590 | |
|
10591 | 0 | ret = BAD_FUNC_ARG; |
10592 | 0 | } |
10593 | |
|
10594 | 0 | if (ret == 0) { |
10595 | 0 | XMEMCPY(aes->reg, nonce, nonceSz); |
10596 | 0 | aes->nonceSz = nonceSz; |
10597 | | |
10598 | | /* Invocation counter should be 2^61 */ |
10599 | 0 | aes->invokeCtr[0] = 0; |
10600 | 0 | aes->invokeCtr[1] = 0xE0000000; |
10601 | 0 | } |
10602 | |
|
10603 | 0 | return ret; |
10604 | 0 | } |
10605 | | |
10606 | | |
10607 | | int wc_AesCcmEncrypt_ex(Aes* aes, byte* out, const byte* in, word32 sz, |
10608 | | byte* ivOut, word32 ivOutSz, |
10609 | | byte* authTag, word32 authTagSz, |
10610 | | const byte* authIn, word32 authInSz) |
10611 | 0 | { |
10612 | 0 | int ret = 0; |
10613 | |
|
10614 | 0 | if (aes == NULL || out == NULL || |
10615 | 0 | (in == NULL && sz != 0) || |
10616 | 0 | ivOut == NULL || |
10617 | 0 | (authIn == NULL && authInSz != 0) || |
10618 | 0 | (ivOutSz != aes->nonceSz)) { |
10619 | |
|
10620 | 0 | ret = BAD_FUNC_ARG; |
10621 | 0 | } |
10622 | |
|
10623 | 0 | if (ret == 0) { |
10624 | 0 | aes->invokeCtr[0]++; |
10625 | 0 | if (aes->invokeCtr[0] == 0) { |
10626 | 0 | aes->invokeCtr[1]++; |
10627 | 0 | if (aes->invokeCtr[1] == 0) |
10628 | 0 | ret = AES_CCM_OVERFLOW_E; |
10629 | 0 | } |
10630 | 0 | } |
10631 | |
|
10632 | 0 | if (ret == 0) { |
10633 | 0 | ret = wc_AesCcmEncrypt(aes, out, in, sz, |
10634 | 0 | (byte*)aes->reg, aes->nonceSz, |
10635 | 0 | authTag, authTagSz, |
10636 | 0 | authIn, authInSz); |
10637 | 0 | if (ret == 0) { |
10638 | 0 | XMEMCPY(ivOut, aes->reg, aes->nonceSz); |
10639 | 0 | IncCtr((byte*)aes->reg, aes->nonceSz); |
10640 | 0 | } |
10641 | 0 | } |
10642 | |
|
10643 | 0 | return ret; |
10644 | 0 | } |
10645 | | |
10646 | | #endif /* WC_NO_RNG */ |
10647 | | |
10648 | | #endif /* HAVE_AESCCM */ |
10649 | | |
10650 | | |
10651 | | /* Initialize Aes for use with async hardware */ |
10652 | | int wc_AesInit(Aes* aes, void* heap, int devId) |
10653 | 76 | { |
10654 | 76 | int ret = 0; |
10655 | | |
10656 | 76 | if (aes == NULL) |
10657 | 0 | return BAD_FUNC_ARG; |
10658 | | |
10659 | 76 | aes->heap = heap; |
10660 | | |
10661 | 76 | #ifdef WOLF_CRYPTO_CB |
10662 | 76 | aes->devId = devId; |
10663 | 76 | aes->devCtx = NULL; |
10664 | | #else |
10665 | | (void)devId; |
10666 | | #endif |
10667 | | #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES) |
10668 | | ret = wolfAsync_DevCtxInit(&aes->asyncDev, WOLFSSL_ASYNC_MARKER_AES, |
10669 | | aes->heap, devId); |
10670 | | #endif /* WOLFSSL_ASYNC_CRYPT */ |
10671 | | |
10672 | | #ifdef WOLFSSL_AFALG |
10673 | | aes->alFd = -1; |
10674 | | aes->rdFd = -1; |
10675 | | #endif |
10676 | | #ifdef WOLFSSL_KCAPI_AES |
10677 | | aes->handle = NULL; |
10678 | | aes->init = 0; |
10679 | | #endif |
10680 | | #if defined(WOLFSSL_DEVCRYPTO) && \ |
10681 | | (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC)) |
10682 | | aes->ctx.cfd = -1; |
10683 | | #endif |
10684 | | #if defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES) |
10685 | | XMEMSET(&aes->ctx, 0, sizeof(aes->ctx)); |
10686 | | #endif |
10687 | | #if defined(WOLFSSL_IMXRT_DCP) |
10688 | | DCPAesInit(aes); |
10689 | | #endif |
10690 | | |
10691 | | |
10692 | 76 | #ifdef HAVE_AESGCM |
10693 | | #ifdef OPENSSL_EXTRA |
10694 | | XMEMSET(aes->aadH, 0, sizeof(aes->aadH)); |
10695 | | aes->aadLen = 0; |
10696 | | #endif |
10697 | 76 | #endif |
10698 | | |
10699 | 76 | #ifdef WOLFSSL_AESGCM_STREAM |
10700 | 76 | #if defined(WOLFSSL_SMALL_STACK) && !defined(WOLFSSL_AESNI) |
10701 | 76 | aes->streamData = NULL; |
10702 | 76 | #endif |
10703 | 76 | aes->keylen = 0; |
10704 | 76 | aes->nonceSz = 0; |
10705 | 76 | aes->gcmKeySet = 0; |
10706 | 76 | aes->nonceSet = 0; |
10707 | 76 | aes->ctrSet = 0; |
10708 | 76 | #endif |
10709 | | |
10710 | | #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES) |
10711 | | ret = wc_psa_aes_init(aes); |
10712 | | #endif |
10713 | | |
10714 | 76 | return ret; |
10715 | 76 | } |
10716 | | |
10717 | | #ifdef WOLF_PRIVATE_KEY_ID |
10718 | | int wc_AesInit_Id(Aes* aes, unsigned char* id, int len, void* heap, int devId) |
10719 | 0 | { |
10720 | 0 | int ret = 0; |
10721 | |
|
10722 | 0 | if (aes == NULL) |
10723 | 0 | ret = BAD_FUNC_ARG; |
10724 | 0 | if (ret == 0 && (len < 0 || len > AES_MAX_ID_LEN)) |
10725 | 0 | ret = BUFFER_E; |
10726 | |
|
10727 | 0 | if (ret == 0) |
10728 | 0 | ret = wc_AesInit(aes, heap, devId); |
10729 | 0 | if (ret == 0) { |
10730 | 0 | XMEMCPY(aes->id, id, len); |
10731 | 0 | aes->idLen = len; |
10732 | 0 | aes->labelLen = 0; |
10733 | 0 | } |
10734 | |
|
10735 | 0 | return ret; |
10736 | 0 | } |
10737 | | |
10738 | | int wc_AesInit_Label(Aes* aes, const char* label, void* heap, int devId) |
10739 | 0 | { |
10740 | 0 | int ret = 0; |
10741 | 0 | int labelLen = 0; |
10742 | |
|
10743 | 0 | if (aes == NULL || label == NULL) |
10744 | 0 | ret = BAD_FUNC_ARG; |
10745 | 0 | if (ret == 0) { |
10746 | 0 | labelLen = (int)XSTRLEN(label); |
10747 | 0 | if (labelLen == 0 || labelLen > AES_MAX_LABEL_LEN) |
10748 | 0 | ret = BUFFER_E; |
10749 | 0 | } |
10750 | |
|
10751 | 0 | if (ret == 0) |
10752 | 0 | ret = wc_AesInit(aes, heap, devId); |
10753 | 0 | if (ret == 0) { |
10754 | 0 | XMEMCPY(aes->label, label, labelLen); |
10755 | 0 | aes->labelLen = labelLen; |
10756 | 0 | aes->idLen = 0; |
10757 | 0 | } |
10758 | |
|
10759 | 0 | return ret; |
10760 | 0 | } |
10761 | | #endif |
10762 | | |
10763 | | /* Free Aes from use with async hardware */ |
10764 | | void wc_AesFree(Aes* aes) |
10765 | 76 | { |
10766 | 76 | if (aes == NULL) |
10767 | 0 | return; |
10768 | | |
10769 | | #if defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES) |
10770 | | wolfAsync_DevCtxFree(&aes->asyncDev, WOLFSSL_ASYNC_MARKER_AES); |
10771 | | #endif /* WOLFSSL_ASYNC_CRYPT */ |
10772 | | #if defined(WOLFSSL_AFALG) || defined(WOLFSSL_AFALG_XILINX_AES) |
10773 | | if (aes->rdFd > 0) { /* negative is error case */ |
10774 | | close(aes->rdFd); |
10775 | | } |
10776 | | if (aes->alFd > 0) { |
10777 | | close(aes->alFd); |
10778 | | } |
10779 | | #endif /* WOLFSSL_AFALG */ |
10780 | | #ifdef WOLFSSL_KCAPI_AES |
10781 | | ForceZero((byte*)aes->devKey, AES_MAX_KEY_SIZE/WOLFSSL_BIT_SIZE); |
10782 | | if (aes->init == 1) { |
10783 | | kcapi_cipher_destroy(aes->handle); |
10784 | | } |
10785 | | aes->init = 0; |
10786 | | aes->handle = NULL; |
10787 | | #endif |
10788 | | #if defined(WOLFSSL_DEVCRYPTO) && \ |
10789 | | (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC)) |
10790 | | wc_DevCryptoFree(&aes->ctx); |
10791 | | #endif |
10792 | 76 | #if defined(WOLF_CRYPTO_CB) || (defined(WOLFSSL_DEVCRYPTO) && \ |
10793 | 76 | (defined(WOLFSSL_DEVCRYPTO_AES) || defined(WOLFSSL_DEVCRYPTO_CBC))) || \ |
10794 | 76 | (defined(WOLFSSL_ASYNC_CRYPT) && defined(WC_ASYNC_ENABLE_AES)) |
10795 | 76 | ForceZero((byte*)aes->devKey, AES_MAX_KEY_SIZE/WOLFSSL_BIT_SIZE); |
10796 | 76 | #endif |
10797 | | #if defined(WOLFSSL_IMXRT_DCP) |
10798 | | DCPAesFree(aes); |
10799 | | #endif |
10800 | 76 | #if defined(WOLFSSL_AESGCM_STREAM) && defined(WOLFSSL_SMALL_STACK) && \ |
10801 | 76 | !defined(WOLFSSL_AESNI) |
10802 | 76 | if (aes->streamData != NULL) { |
10803 | 0 | XFREE(aes->streamData, aes->heap, DYNAMIC_TYPE_AES); |
10804 | 0 | aes->streamData = NULL; |
10805 | 0 | } |
10806 | 76 | #endif |
10807 | | |
10808 | | #if defined(WOLFSSL_SE050) && defined(WOLFSSL_SE050_CRYPT) |
10809 | | se050_aes_free(aes); |
10810 | | #endif |
10811 | | |
10812 | | #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES) |
10813 | | wc_psa_aes_free(aes); |
10814 | | #endif |
10815 | | |
10816 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
10817 | | wc_MemZero_Check(aes, sizeof(Aes)); |
10818 | | #endif |
10819 | 76 | } |
10820 | | |
10821 | | |
10822 | | int wc_AesGetKeySize(Aes* aes, word32* keySize) |
10823 | 0 | { |
10824 | 0 | int ret = 0; |
10825 | |
|
10826 | 0 | if (aes == NULL || keySize == NULL) { |
10827 | 0 | return BAD_FUNC_ARG; |
10828 | 0 | } |
10829 | | |
10830 | | #if defined(WOLFSSL_HAVE_PSA) && !defined(WOLFSSL_PSA_NO_AES) |
10831 | | return wc_psa_aes_get_key_size(aes, keySize); |
10832 | | #endif |
10833 | | #if defined(WOLFSSL_CRYPTOCELL) && defined(WOLFSSL_CRYPTOCELL_AES) |
10834 | | *keySize = aes->ctx.key.keySize; |
10835 | | return ret; |
10836 | | #endif |
10837 | 0 | switch (aes->rounds) { |
10838 | 0 | #ifdef WOLFSSL_AES_128 |
10839 | 0 | case 10: |
10840 | 0 | *keySize = 16; |
10841 | 0 | break; |
10842 | 0 | #endif |
10843 | 0 | #ifdef WOLFSSL_AES_192 |
10844 | 0 | case 12: |
10845 | 0 | *keySize = 24; |
10846 | 0 | break; |
10847 | 0 | #endif |
10848 | 0 | #ifdef WOLFSSL_AES_256 |
10849 | 0 | case 14: |
10850 | 0 | *keySize = 32; |
10851 | 0 | break; |
10852 | 0 | #endif |
10853 | 0 | default: |
10854 | 0 | *keySize = 0; |
10855 | 0 | ret = BAD_FUNC_ARG; |
10856 | 0 | } |
10857 | | |
10858 | 0 | return ret; |
10859 | 0 | } |
10860 | | |
10861 | | #endif /* !WOLFSSL_TI_CRYPT */ |
10862 | | |
10863 | | #ifdef HAVE_AES_ECB |
10864 | | #if defined(WOLFSSL_IMX6_CAAM) && !defined(NO_IMX6_CAAM_AES) && \ |
10865 | | !defined(WOLFSSL_QNX_CAAM) |
10866 | | /* implemented in wolfcrypt/src/port/caam/caam_aes.c */ |
10867 | | |
10868 | | #elif defined(WOLFSSL_AFALG) |
10869 | | /* implemented in wolfcrypt/src/port/af_alg/afalg_aes.c */ |
10870 | | |
10871 | | #elif defined(WOLFSSL_DEVCRYPTO_AES) |
10872 | | /* implemented in wolfcrypt/src/port/devcrypt/devcrypto_aes.c */ |
10873 | | |
10874 | | #elif defined(WOLFSSL_SCE) && !defined(WOLFSSL_SCE_NO_AES) |
10875 | | |
10876 | | /* Software AES - ECB */ |
10877 | | int wc_AesEcbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
10878 | | { |
10879 | | if ((in == NULL) || (out == NULL) || (aes == NULL)) |
10880 | | return BAD_FUNC_ARG; |
10881 | | |
10882 | | return AES_ECB_encrypt(aes, in, out, sz); |
10883 | | } |
10884 | | |
10885 | | |
10886 | | int wc_AesEcbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
10887 | | { |
10888 | | if ((in == NULL) || (out == NULL) || (aes == NULL)) |
10889 | | return BAD_FUNC_ARG; |
10890 | | |
10891 | | return AES_ECB_decrypt(aes, in, out, sz); |
10892 | | } |
10893 | | |
10894 | | #else |
10895 | | |
10896 | | /* Software AES - ECB */ |
10897 | | static WARN_UNUSED_RESULT int _AesEcbEncrypt( |
10898 | | Aes* aes, byte* out, const byte* in, word32 sz) |
10899 | 0 | { |
10900 | 0 | word32 blocks = sz / AES_BLOCK_SIZE; |
10901 | |
|
10902 | 0 | #ifdef WOLF_CRYPTO_CB |
10903 | 0 | if (aes->devId != INVALID_DEVID) { |
10904 | 0 | int ret = wc_CryptoCb_AesEcbEncrypt(aes, out, in, sz); |
10905 | 0 | if (ret != CRYPTOCB_UNAVAILABLE) |
10906 | 0 | return ret; |
10907 | | /* fall-through when unavailable */ |
10908 | 0 | } |
10909 | 0 | #endif |
10910 | | #ifdef WOLFSSL_IMXRT_DCP |
10911 | | if (aes->keylen == 16) |
10912 | | return DCPAesEcbEncrypt(aes, out, in, sz); |
10913 | | #endif |
10914 | 0 | while (blocks > 0) { |
10915 | 0 | int ret = wc_AesEncryptDirect(aes, out, in); |
10916 | 0 | if (ret != 0) |
10917 | 0 | return ret; |
10918 | 0 | out += AES_BLOCK_SIZE; |
10919 | 0 | in += AES_BLOCK_SIZE; |
10920 | 0 | blocks--; |
10921 | 0 | } |
10922 | 0 | return 0; |
10923 | 0 | } |
10924 | | |
10925 | | static WARN_UNUSED_RESULT int _AesEcbDecrypt( |
10926 | | Aes* aes, byte* out, const byte* in, word32 sz) |
10927 | 0 | { |
10928 | 0 | word32 blocks = sz / AES_BLOCK_SIZE; |
10929 | |
|
10930 | 0 | #ifdef WOLF_CRYPTO_CB |
10931 | 0 | if (aes->devId != INVALID_DEVID) { |
10932 | 0 | int ret = wc_CryptoCb_AesEcbDecrypt(aes, out, in, sz); |
10933 | 0 | if (ret != CRYPTOCB_UNAVAILABLE) |
10934 | 0 | return ret; |
10935 | | /* fall-through when unavailable */ |
10936 | 0 | } |
10937 | 0 | #endif |
10938 | | #ifdef WOLFSSL_IMXRT_DCP |
10939 | | if (aes->keylen == 16) |
10940 | | return DCPAesEcbDecrypt(aes, out, in, sz); |
10941 | | #endif |
10942 | 0 | while (blocks > 0) { |
10943 | 0 | int ret = wc_AesDecryptDirect(aes, out, in); |
10944 | 0 | if (ret != 0) |
10945 | 0 | return ret; |
10946 | 0 | out += AES_BLOCK_SIZE; |
10947 | 0 | in += AES_BLOCK_SIZE; |
10948 | 0 | blocks--; |
10949 | 0 | } |
10950 | 0 | return 0; |
10951 | 0 | } |
10952 | | |
10953 | | int wc_AesEcbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
10954 | 0 | { |
10955 | 0 | int ret; |
10956 | |
|
10957 | 0 | if ((in == NULL) || (out == NULL) || (aes == NULL)) |
10958 | 0 | return BAD_FUNC_ARG; |
10959 | | |
10960 | 0 | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
10961 | 0 | ret = _AesEcbEncrypt(aes, out, in, sz); |
10962 | 0 | RESTORE_VECTOR_REGISTERS(); |
10963 | |
|
10964 | 0 | return ret; |
10965 | 0 | } |
10966 | | |
10967 | | int wc_AesEcbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
10968 | 0 | { |
10969 | 0 | int ret; |
10970 | |
|
10971 | 0 | if ((in == NULL) || (out == NULL) || (aes == NULL)) |
10972 | 0 | return BAD_FUNC_ARG; |
10973 | | |
10974 | 0 | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
10975 | 0 | ret = _AesEcbDecrypt(aes, out, in, sz); |
10976 | 0 | RESTORE_VECTOR_REGISTERS(); |
10977 | |
|
10978 | 0 | return ret; |
10979 | 0 | } |
10980 | | #endif |
10981 | | #endif /* HAVE_AES_ECB */ |
10982 | | |
10983 | | #if defined(WOLFSSL_AES_CFB) || defined(WOLFSSL_AES_OFB) |
10984 | | /* Feedback AES mode |
10985 | | * |
10986 | | * aes structure holding key to use for encryption |
10987 | | * out buffer to hold result of encryption (must be at least as large as input |
10988 | | * buffer) |
10989 | | * in buffer to encrypt |
10990 | | * sz size of input buffer |
10991 | | * mode flag to specify AES mode |
10992 | | * |
10993 | | * returns 0 on success and negative error values on failure |
10994 | | */ |
10995 | | /* Software AES - CFB Encrypt */ |
10996 | | static WARN_UNUSED_RESULT int wc_AesFeedbackEncrypt( |
10997 | | Aes* aes, byte* out, const byte* in, word32 sz, byte mode) |
10998 | 0 | { |
10999 | 0 | byte* tmp = NULL; |
11000 | 0 | #ifdef WOLFSSL_AES_CFB |
11001 | 0 | byte* reg = NULL; |
11002 | 0 | #endif |
11003 | 0 | int ret = 0; |
11004 | |
|
11005 | 0 | if (aes == NULL || out == NULL || in == NULL) { |
11006 | 0 | return BAD_FUNC_ARG; |
11007 | 0 | } |
11008 | | |
11009 | 0 | #ifdef WOLFSSL_AES_CFB |
11010 | 0 | if (aes->left && sz) { |
11011 | 0 | reg = (byte*)aes->reg + AES_BLOCK_SIZE - aes->left; |
11012 | 0 | } |
11013 | 0 | #endif |
11014 | | |
11015 | | /* consume any unused bytes left in aes->tmp */ |
11016 | 0 | tmp = (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left; |
11017 | 0 | while (aes->left && sz) { |
11018 | 0 | *(out) = *(in++) ^ *(tmp++); |
11019 | 0 | #ifdef WOLFSSL_AES_CFB |
11020 | 0 | if (mode == AES_CFB_MODE) { |
11021 | 0 | *(reg++) = *out; |
11022 | 0 | } |
11023 | 0 | #endif |
11024 | 0 | out++; |
11025 | 0 | aes->left--; |
11026 | 0 | sz--; |
11027 | 0 | } |
11028 | |
|
11029 | 0 | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
11030 | |
|
11031 | 0 | while (sz >= AES_BLOCK_SIZE) { |
11032 | | /* Using aes->tmp here for inline case i.e. in=out */ |
11033 | 0 | ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg); |
11034 | 0 | if (ret != 0) |
11035 | 0 | break; |
11036 | 0 | #ifdef WOLFSSL_AES_OFB |
11037 | 0 | if (mode == AES_OFB_MODE) { |
11038 | 0 | XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE); |
11039 | 0 | } |
11040 | 0 | #endif |
11041 | 0 | xorbuf((byte*)aes->tmp, in, AES_BLOCK_SIZE); |
11042 | 0 | #ifdef WOLFSSL_AES_CFB |
11043 | 0 | if (mode == AES_CFB_MODE) { |
11044 | 0 | XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE); |
11045 | 0 | } |
11046 | 0 | #endif |
11047 | 0 | XMEMCPY(out, aes->tmp, AES_BLOCK_SIZE); |
11048 | 0 | out += AES_BLOCK_SIZE; |
11049 | 0 | in += AES_BLOCK_SIZE; |
11050 | 0 | sz -= AES_BLOCK_SIZE; |
11051 | 0 | aes->left = 0; |
11052 | 0 | } |
11053 | | |
11054 | | /* encrypt left over data */ |
11055 | 0 | if ((ret == 0) && sz) { |
11056 | 0 | ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg); |
11057 | 0 | } |
11058 | 0 | if ((ret == 0) && sz) { |
11059 | 0 | aes->left = AES_BLOCK_SIZE; |
11060 | 0 | tmp = (byte*)aes->tmp; |
11061 | 0 | #ifdef WOLFSSL_AES_OFB |
11062 | 0 | if (mode == AES_OFB_MODE) { |
11063 | 0 | XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE); |
11064 | 0 | } |
11065 | 0 | #endif |
11066 | 0 | #ifdef WOLFSSL_AES_CFB |
11067 | 0 | reg = (byte*)aes->reg; |
11068 | 0 | #endif |
11069 | |
|
11070 | 0 | while (sz--) { |
11071 | 0 | *(out) = *(in++) ^ *(tmp++); |
11072 | 0 | #ifdef WOLFSSL_AES_CFB |
11073 | 0 | if (mode == AES_CFB_MODE) { |
11074 | 0 | *(reg++) = *out; |
11075 | 0 | } |
11076 | 0 | #endif |
11077 | 0 | out++; |
11078 | 0 | aes->left--; |
11079 | 0 | } |
11080 | 0 | } |
11081 | 0 | RESTORE_VECTOR_REGISTERS(); |
11082 | |
|
11083 | 0 | return ret; |
11084 | 0 | } |
11085 | | |
11086 | | |
11087 | | #ifdef HAVE_AES_DECRYPT |
11088 | | /* CFB 128 |
11089 | | * |
11090 | | * aes structure holding key to use for decryption |
11091 | | * out buffer to hold result of decryption (must be at least as large as input |
11092 | | * buffer) |
11093 | | * in buffer to decrypt |
11094 | | * sz size of input buffer |
11095 | | * |
11096 | | * returns 0 on success and negative error values on failure |
11097 | | */ |
11098 | | /* Software AES - CFB Decrypt */ |
11099 | | static WARN_UNUSED_RESULT int wc_AesFeedbackDecrypt( |
11100 | | Aes* aes, byte* out, const byte* in, word32 sz, byte mode) |
11101 | 0 | { |
11102 | 0 | byte* tmp; |
11103 | 0 | int ret = 0; |
11104 | |
|
11105 | 0 | if (aes == NULL || out == NULL || in == NULL) { |
11106 | 0 | return BAD_FUNC_ARG; |
11107 | 0 | } |
11108 | | |
11109 | 0 | #ifdef WOLFSSL_AES_CFB |
11110 | | /* check if more input needs copied over to aes->reg */ |
11111 | 0 | if (aes->left && sz && mode == AES_CFB_MODE) { |
11112 | 0 | int size = min(aes->left, sz); |
11113 | 0 | XMEMCPY((byte*)aes->reg + AES_BLOCK_SIZE - aes->left, in, size); |
11114 | 0 | } |
11115 | 0 | #endif |
11116 | | |
11117 | | /* consume any unused bytes left in aes->tmp */ |
11118 | 0 | tmp = (byte*)aes->tmp + AES_BLOCK_SIZE - aes->left; |
11119 | 0 | while (aes->left && sz) { |
11120 | 0 | *(out++) = *(in++) ^ *(tmp++); |
11121 | 0 | aes->left--; |
11122 | 0 | sz--; |
11123 | 0 | } |
11124 | |
|
11125 | 0 | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
11126 | |
|
11127 | 0 | while (sz > AES_BLOCK_SIZE) { |
11128 | | /* Using aes->tmp here for inline case i.e. in=out */ |
11129 | 0 | ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg); |
11130 | 0 | if (ret != 0) |
11131 | 0 | break; |
11132 | 0 | #ifdef WOLFSSL_AES_OFB |
11133 | 0 | if (mode == AES_OFB_MODE) { |
11134 | 0 | XMEMCPY((byte*)aes->reg, (byte*)aes->tmp, AES_BLOCK_SIZE); |
11135 | 0 | } |
11136 | 0 | #endif |
11137 | 0 | xorbuf((byte*)aes->tmp, in, AES_BLOCK_SIZE); |
11138 | 0 | #ifdef WOLFSSL_AES_CFB |
11139 | 0 | if (mode == AES_CFB_MODE) { |
11140 | 0 | XMEMCPY(aes->reg, in, AES_BLOCK_SIZE); |
11141 | 0 | } |
11142 | 0 | #endif |
11143 | 0 | XMEMCPY(out, (byte*)aes->tmp, AES_BLOCK_SIZE); |
11144 | 0 | out += AES_BLOCK_SIZE; |
11145 | 0 | in += AES_BLOCK_SIZE; |
11146 | 0 | sz -= AES_BLOCK_SIZE; |
11147 | 0 | aes->left = 0; |
11148 | 0 | } |
11149 | | |
11150 | | /* decrypt left over data */ |
11151 | 0 | if ((ret == 0) && sz) { |
11152 | 0 | ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg); |
11153 | 0 | } |
11154 | 0 | if ((ret == 0) && sz) { |
11155 | 0 | #ifdef WOLFSSL_AES_CFB |
11156 | 0 | if (mode == AES_CFB_MODE) { |
11157 | 0 | XMEMCPY(aes->reg, in, sz); |
11158 | 0 | } |
11159 | 0 | #endif |
11160 | 0 | #ifdef WOLFSSL_AES_OFB |
11161 | 0 | if (mode == AES_OFB_MODE) { |
11162 | 0 | XMEMCPY(aes->reg, aes->tmp, AES_BLOCK_SIZE); |
11163 | 0 | } |
11164 | 0 | #endif |
11165 | |
|
11166 | 0 | aes->left = AES_BLOCK_SIZE; |
11167 | 0 | tmp = (byte*)aes->tmp; |
11168 | |
|
11169 | 0 | while (sz--) { |
11170 | 0 | *(out++) = *(in++) ^ *(tmp++); |
11171 | 0 | aes->left--; |
11172 | 0 | } |
11173 | 0 | } |
11174 | 0 | RESTORE_VECTOR_REGISTERS(); |
11175 | |
|
11176 | 0 | return ret; |
11177 | 0 | } |
11178 | | #endif /* HAVE_AES_DECRYPT */ |
11179 | | #endif /* WOLFSSL_AES_CFB */ |
11180 | | |
11181 | | #ifdef WOLFSSL_AES_CFB |
11182 | | /* CFB 128 |
11183 | | * |
11184 | | * aes structure holding key to use for encryption |
11185 | | * out buffer to hold result of encryption (must be at least as large as input |
11186 | | * buffer) |
11187 | | * in buffer to encrypt |
11188 | | * sz size of input buffer |
11189 | | * |
11190 | | * returns 0 on success and negative error values on failure |
11191 | | */ |
11192 | | /* Software AES - CFB Encrypt */ |
11193 | | int wc_AesCfbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
11194 | 0 | { |
11195 | 0 | return wc_AesFeedbackEncrypt(aes, out, in, sz, AES_CFB_MODE); |
11196 | 0 | } |
11197 | | |
11198 | | |
11199 | | #ifdef HAVE_AES_DECRYPT |
11200 | | /* CFB 128 |
11201 | | * |
11202 | | * aes structure holding key to use for decryption |
11203 | | * out buffer to hold result of decryption (must be at least as large as input |
11204 | | * buffer) |
11205 | | * in buffer to decrypt |
11206 | | * sz size of input buffer |
11207 | | * |
11208 | | * returns 0 on success and negative error values on failure |
11209 | | */ |
11210 | | /* Software AES - CFB Decrypt */ |
11211 | | int wc_AesCfbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
11212 | 0 | { |
11213 | 0 | return wc_AesFeedbackDecrypt(aes, out, in, sz, AES_CFB_MODE); |
11214 | 0 | } |
11215 | | #endif /* HAVE_AES_DECRYPT */ |
11216 | | |
11217 | | |
11218 | | /* shift the whole AES_BLOCK_SIZE array left by 8 or 1 bits */ |
11219 | | static void shiftLeftArray(byte* ary, byte shift) |
11220 | 0 | { |
11221 | 0 | int i; |
11222 | |
|
11223 | 0 | if (shift == WOLFSSL_BIT_SIZE) { |
11224 | | /* shifting over by 8 bits */ |
11225 | 0 | for (i = 0; i < AES_BLOCK_SIZE - 1; i++) { |
11226 | 0 | ary[i] = ary[i+1]; |
11227 | 0 | } |
11228 | 0 | ary[i] = 0; |
11229 | 0 | } |
11230 | 0 | else { |
11231 | | /* shifting over by 7 or less bits */ |
11232 | 0 | for (i = 0; i < AES_BLOCK_SIZE - 1; i++) { |
11233 | 0 | byte carry = ary[i+1] & (0XFF << (WOLFSSL_BIT_SIZE - shift)); |
11234 | 0 | carry >>= (WOLFSSL_BIT_SIZE - shift); |
11235 | 0 | ary[i] = (ary[i] << shift) + carry; |
11236 | 0 | } |
11237 | 0 | ary[i] = ary[i] << shift; |
11238 | 0 | } |
11239 | 0 | } |
11240 | | |
11241 | | |
11242 | | /* returns 0 on success and negative values on failure */ |
11243 | | static WARN_UNUSED_RESULT int wc_AesFeedbackCFB8( |
11244 | | Aes* aes, byte* out, const byte* in, word32 sz, byte dir) |
11245 | 0 | { |
11246 | 0 | byte *pt; |
11247 | 0 | int ret = 0; |
11248 | |
|
11249 | 0 | if (aes == NULL || out == NULL || in == NULL) { |
11250 | 0 | return BAD_FUNC_ARG; |
11251 | 0 | } |
11252 | | |
11253 | 0 | if (sz == 0) { |
11254 | 0 | return 0; |
11255 | 0 | } |
11256 | | |
11257 | 0 | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
11258 | |
|
11259 | 0 | while (sz > 0) { |
11260 | 0 | ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg); |
11261 | 0 | if (ret != 0) |
11262 | 0 | break; |
11263 | 0 | if (dir == AES_DECRYPTION) { |
11264 | 0 | pt = (byte*)aes->reg; |
11265 | | |
11266 | | /* LSB + CAT */ |
11267 | 0 | shiftLeftArray(pt, WOLFSSL_BIT_SIZE); |
11268 | 0 | pt[AES_BLOCK_SIZE - 1] = in[0]; |
11269 | 0 | } |
11270 | | |
11271 | | /* MSB + XOR */ |
11272 | | #ifdef BIG_ENDIAN_ORDER |
11273 | | ByteReverseWords(aes->tmp, aes->tmp, AES_BLOCK_SIZE); |
11274 | | #endif |
11275 | 0 | out[0] = (byte)(aes->tmp[0] ^ in[0]); |
11276 | 0 | if (dir == AES_ENCRYPTION) { |
11277 | 0 | pt = (byte*)aes->reg; |
11278 | | |
11279 | | /* LSB + CAT */ |
11280 | 0 | shiftLeftArray(pt, WOLFSSL_BIT_SIZE); |
11281 | 0 | pt[AES_BLOCK_SIZE - 1] = out[0]; |
11282 | 0 | } |
11283 | |
|
11284 | 0 | out += 1; |
11285 | 0 | in += 1; |
11286 | 0 | sz -= 1; |
11287 | 0 | } |
11288 | |
|
11289 | 0 | RESTORE_VECTOR_REGISTERS(); |
11290 | |
|
11291 | 0 | return ret; |
11292 | 0 | } |
11293 | | |
11294 | | |
11295 | | /* returns 0 on success and negative values on failure */ |
11296 | | static WARN_UNUSED_RESULT int wc_AesFeedbackCFB1( |
11297 | | Aes* aes, byte* out, const byte* in, word32 sz, byte dir) |
11298 | 0 | { |
11299 | 0 | byte tmp; |
11300 | 0 | byte cur = 0; /* hold current work in order to handle inline in=out */ |
11301 | 0 | byte* pt; |
11302 | 0 | int bit = 7; |
11303 | 0 | int ret = 0; |
11304 | |
|
11305 | 0 | if (aes == NULL || out == NULL || in == NULL) { |
11306 | 0 | return BAD_FUNC_ARG; |
11307 | 0 | } |
11308 | | |
11309 | 0 | if (sz == 0) { |
11310 | 0 | return 0; |
11311 | 0 | } |
11312 | | |
11313 | 0 | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
11314 | |
|
11315 | 0 | while (sz > 0) { |
11316 | 0 | ret = wc_AesEncryptDirect(aes, (byte*)aes->tmp, (byte*)aes->reg); |
11317 | 0 | if (ret != 0) |
11318 | 0 | break; |
11319 | 0 | if (dir == AES_DECRYPTION) { |
11320 | 0 | pt = (byte*)aes->reg; |
11321 | | |
11322 | | /* LSB + CAT */ |
11323 | 0 | tmp = (0X01 << bit) & in[0]; |
11324 | 0 | tmp = tmp >> bit; |
11325 | 0 | tmp &= 0x01; |
11326 | 0 | shiftLeftArray((byte*)aes->reg, 1); |
11327 | 0 | pt[AES_BLOCK_SIZE - 1] |= tmp; |
11328 | 0 | } |
11329 | | |
11330 | | /* MSB + XOR */ |
11331 | 0 | tmp = (0X01 << bit) & in[0]; |
11332 | 0 | pt = (byte*)aes->tmp; |
11333 | 0 | tmp = (pt[0] >> 7) ^ (tmp >> bit); |
11334 | 0 | tmp &= 0x01; |
11335 | 0 | cur |= (tmp << bit); |
11336 | | |
11337 | |
|
11338 | 0 | if (dir == AES_ENCRYPTION) { |
11339 | 0 | pt = (byte*)aes->reg; |
11340 | | |
11341 | | /* LSB + CAT */ |
11342 | 0 | shiftLeftArray((byte*)aes->reg, 1); |
11343 | 0 | pt[AES_BLOCK_SIZE - 1] |= tmp; |
11344 | 0 | } |
11345 | |
|
11346 | 0 | bit--; |
11347 | 0 | if (bit < 0) { |
11348 | 0 | out[0] = cur; |
11349 | 0 | out += 1; |
11350 | 0 | in += 1; |
11351 | 0 | sz -= 1; |
11352 | 0 | bit = 7; |
11353 | 0 | cur = 0; |
11354 | 0 | } |
11355 | 0 | else { |
11356 | 0 | sz -= 1; |
11357 | 0 | } |
11358 | 0 | } |
11359 | |
|
11360 | 0 | if (ret == 0) { |
11361 | 0 | if (bit > 0 && bit < 7) { |
11362 | 0 | out[0] = cur; |
11363 | 0 | } |
11364 | 0 | } |
11365 | |
|
11366 | 0 | RESTORE_VECTOR_REGISTERS(); |
11367 | |
|
11368 | 0 | return ret; |
11369 | 0 | } |
11370 | | |
11371 | | |
11372 | | /* CFB 1 |
11373 | | * |
11374 | | * aes structure holding key to use for encryption |
11375 | | * out buffer to hold result of encryption (must be at least as large as input |
11376 | | * buffer) |
11377 | | * in buffer to encrypt (packed to left, i.e. 101 is 0x90) |
11378 | | * sz size of input buffer in bits (0x1 would be size of 1 and 0xFF size of 8) |
11379 | | * |
11380 | | * returns 0 on success and negative values on failure |
11381 | | */ |
11382 | | int wc_AesCfb1Encrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
11383 | 0 | { |
11384 | 0 | return wc_AesFeedbackCFB1(aes, out, in, sz, AES_ENCRYPTION); |
11385 | 0 | } |
11386 | | |
11387 | | |
11388 | | /* CFB 8 |
11389 | | * |
11390 | | * aes structure holding key to use for encryption |
11391 | | * out buffer to hold result of encryption (must be at least as large as input |
11392 | | * buffer) |
11393 | | * in buffer to encrypt |
11394 | | * sz size of input buffer |
11395 | | * |
11396 | | * returns 0 on success and negative values on failure |
11397 | | */ |
11398 | | int wc_AesCfb8Encrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
11399 | 0 | { |
11400 | 0 | return wc_AesFeedbackCFB8(aes, out, in, sz, AES_ENCRYPTION); |
11401 | 0 | } |
11402 | | #ifdef HAVE_AES_DECRYPT |
11403 | | |
11404 | | /* CFB 1 |
11405 | | * |
11406 | | * aes structure holding key to use for encryption |
11407 | | * out buffer to hold result of encryption (must be at least as large as input |
11408 | | * buffer) |
11409 | | * in buffer to encrypt |
11410 | | * sz size of input buffer in bits (0x1 would be size of 1 and 0xFF size of 8) |
11411 | | * |
11412 | | * returns 0 on success and negative values on failure |
11413 | | */ |
11414 | | int wc_AesCfb1Decrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
11415 | 0 | { |
11416 | 0 | return wc_AesFeedbackCFB1(aes, out, in, sz, AES_DECRYPTION); |
11417 | 0 | } |
11418 | | |
11419 | | |
11420 | | /* CFB 8 |
11421 | | * |
11422 | | * aes structure holding key to use for encryption |
11423 | | * out buffer to hold result of encryption (must be at least as large as input |
11424 | | * buffer) |
11425 | | * in buffer to encrypt |
11426 | | * sz size of input buffer |
11427 | | * |
11428 | | * returns 0 on success and negative values on failure |
11429 | | */ |
11430 | | int wc_AesCfb8Decrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
11431 | 0 | { |
11432 | 0 | return wc_AesFeedbackCFB8(aes, out, in, sz, AES_DECRYPTION); |
11433 | 0 | } |
11434 | | #endif /* HAVE_AES_DECRYPT */ |
11435 | | #endif /* WOLFSSL_AES_CFB */ |
11436 | | |
11437 | | #ifdef WOLFSSL_AES_OFB |
11438 | | /* OFB |
11439 | | * |
11440 | | * aes structure holding key to use for encryption |
11441 | | * out buffer to hold result of encryption (must be at least as large as input |
11442 | | * buffer) |
11443 | | * in buffer to encrypt |
11444 | | * sz size of input buffer |
11445 | | * |
11446 | | * returns 0 on success and negative error values on failure |
11447 | | */ |
11448 | | /* Software AES - CFB Encrypt */ |
11449 | | int wc_AesOfbEncrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
11450 | 0 | { |
11451 | 0 | return wc_AesFeedbackEncrypt(aes, out, in, sz, AES_OFB_MODE); |
11452 | 0 | } |
11453 | | |
11454 | | |
11455 | | #ifdef HAVE_AES_DECRYPT |
11456 | | /* OFB |
11457 | | * |
11458 | | * aes structure holding key to use for decryption |
11459 | | * out buffer to hold result of decryption (must be at least as large as input |
11460 | | * buffer) |
11461 | | * in buffer to decrypt |
11462 | | * sz size of input buffer |
11463 | | * |
11464 | | * returns 0 on success and negative error values on failure |
11465 | | */ |
11466 | | /* Software AES - OFB Decrypt */ |
11467 | | int wc_AesOfbDecrypt(Aes* aes, byte* out, const byte* in, word32 sz) |
11468 | 0 | { |
11469 | 0 | return wc_AesFeedbackDecrypt(aes, out, in, sz, AES_OFB_MODE); |
11470 | 0 | } |
11471 | | #endif /* HAVE_AES_DECRYPT */ |
11472 | | #endif /* WOLFSSL_AES_OFB */ |
11473 | | |
11474 | | |
11475 | | #ifdef HAVE_AES_KEYWRAP |
11476 | | |
11477 | | /* Initialize key wrap counter with value */ |
11478 | | static WC_INLINE void InitKeyWrapCounter(byte* inOutCtr, word32 value) |
11479 | 0 | { |
11480 | 0 | int i; |
11481 | 0 | word32 bytes; |
11482 | |
|
11483 | 0 | bytes = sizeof(word32); |
11484 | 0 | for (i = 0; i < (int)sizeof(word32); i++) { |
11485 | 0 | inOutCtr[i+sizeof(word32)] = (value >> ((bytes - 1) * 8)) & 0xFF; |
11486 | 0 | bytes--; |
11487 | 0 | } |
11488 | 0 | } |
11489 | | |
11490 | | /* Increment key wrap counter */ |
11491 | | static WC_INLINE void IncrementKeyWrapCounter(byte* inOutCtr) |
11492 | 0 | { |
11493 | 0 | int i; |
11494 | | |
11495 | | /* in network byte order so start at end and work back */ |
11496 | 0 | for (i = KEYWRAP_BLOCK_SIZE - 1; i >= 0; i--) { |
11497 | 0 | if (++inOutCtr[i]) /* we're done unless we overflow */ |
11498 | 0 | return; |
11499 | 0 | } |
11500 | 0 | } |
11501 | | |
11502 | | /* Decrement key wrap counter */ |
11503 | | static WC_INLINE void DecrementKeyWrapCounter(byte* inOutCtr) |
11504 | 0 | { |
11505 | 0 | int i; |
11506 | |
|
11507 | 0 | for (i = KEYWRAP_BLOCK_SIZE - 1; i >= 0; i--) { |
11508 | 0 | if (--inOutCtr[i] != 0xFF) /* we're done unless we underflow */ |
11509 | 0 | return; |
11510 | 0 | } |
11511 | 0 | } |
11512 | | |
11513 | | int wc_AesKeyWrap_ex(Aes *aes, const byte* in, word32 inSz, byte* out, |
11514 | | word32 outSz, const byte* iv) |
11515 | 0 | { |
11516 | 0 | word32 i; |
11517 | 0 | byte* r; |
11518 | 0 | int j; |
11519 | 0 | int ret = 0; |
11520 | |
|
11521 | 0 | byte t[KEYWRAP_BLOCK_SIZE]; |
11522 | 0 | byte tmp[AES_BLOCK_SIZE]; |
11523 | | |
11524 | | /* n must be at least 2 64-bit blocks, output size is (n + 1) 8 bytes (64-bit) */ |
11525 | 0 | if (aes == NULL || in == NULL || inSz < 2*KEYWRAP_BLOCK_SIZE || |
11526 | 0 | out == NULL || outSz < (inSz + KEYWRAP_BLOCK_SIZE)) |
11527 | 0 | return BAD_FUNC_ARG; |
11528 | | |
11529 | | /* input must be multiple of 64-bits */ |
11530 | 0 | if (inSz % KEYWRAP_BLOCK_SIZE != 0) |
11531 | 0 | return BAD_FUNC_ARG; |
11532 | | |
11533 | 0 | r = out + 8; |
11534 | 0 | XMEMCPY(r, in, inSz); |
11535 | 0 | XMEMSET(t, 0, sizeof(t)); |
11536 | | |
11537 | | /* user IV is optional */ |
11538 | 0 | if (iv == NULL) { |
11539 | 0 | XMEMSET(tmp, 0xA6, KEYWRAP_BLOCK_SIZE); |
11540 | 0 | } else { |
11541 | 0 | XMEMCPY(tmp, iv, KEYWRAP_BLOCK_SIZE); |
11542 | 0 | } |
11543 | |
|
11544 | 0 | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
11545 | |
|
11546 | 0 | for (j = 0; j <= 5; j++) { |
11547 | 0 | for (i = 1; i <= inSz / KEYWRAP_BLOCK_SIZE; i++) { |
11548 | | /* load R[i] */ |
11549 | 0 | XMEMCPY(tmp + KEYWRAP_BLOCK_SIZE, r, KEYWRAP_BLOCK_SIZE); |
11550 | |
|
11551 | 0 | ret = wc_AesEncryptDirect(aes, tmp, tmp); |
11552 | 0 | if (ret != 0) |
11553 | 0 | break; |
11554 | | |
11555 | | /* calculate new A */ |
11556 | 0 | IncrementKeyWrapCounter(t); |
11557 | 0 | xorbuf(tmp, t, KEYWRAP_BLOCK_SIZE); |
11558 | | |
11559 | | /* save R[i] */ |
11560 | 0 | XMEMCPY(r, tmp + KEYWRAP_BLOCK_SIZE, KEYWRAP_BLOCK_SIZE); |
11561 | 0 | r += KEYWRAP_BLOCK_SIZE; |
11562 | 0 | } |
11563 | 0 | if (ret != 0) |
11564 | 0 | break; |
11565 | 0 | r = out + KEYWRAP_BLOCK_SIZE; |
11566 | 0 | } |
11567 | 0 | RESTORE_VECTOR_REGISTERS(); |
11568 | |
|
11569 | 0 | if (ret != 0) |
11570 | 0 | return ret; |
11571 | | |
11572 | | /* C[0] = A */ |
11573 | 0 | XMEMCPY(out, tmp, KEYWRAP_BLOCK_SIZE); |
11574 | |
|
11575 | 0 | return inSz + KEYWRAP_BLOCK_SIZE; |
11576 | 0 | } |
11577 | | |
11578 | | /* perform AES key wrap (RFC3394), return out sz on success, negative on err */ |
11579 | | int wc_AesKeyWrap(const byte* key, word32 keySz, const byte* in, word32 inSz, |
11580 | | byte* out, word32 outSz, const byte* iv) |
11581 | 0 | { |
11582 | 0 | #ifdef WOLFSSL_SMALL_STACK |
11583 | 0 | Aes *aes = NULL; |
11584 | | #else |
11585 | | Aes aes[1]; |
11586 | | #endif |
11587 | 0 | int ret; |
11588 | |
|
11589 | 0 | if (key == NULL) |
11590 | 0 | return BAD_FUNC_ARG; |
11591 | | |
11592 | 0 | #ifdef WOLFSSL_SMALL_STACK |
11593 | 0 | if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL, |
11594 | 0 | DYNAMIC_TYPE_AES)) == NULL) |
11595 | 0 | return MEMORY_E; |
11596 | 0 | #endif |
11597 | | |
11598 | 0 | ret = wc_AesInit(aes, NULL, INVALID_DEVID); |
11599 | 0 | if (ret != 0) |
11600 | 0 | goto out; |
11601 | | |
11602 | 0 | ret = wc_AesSetKey(aes, key, keySz, NULL, AES_ENCRYPTION); |
11603 | 0 | if (ret != 0) { |
11604 | 0 | wc_AesFree(aes); |
11605 | 0 | goto out; |
11606 | 0 | } |
11607 | | |
11608 | 0 | ret = wc_AesKeyWrap_ex(aes, in, inSz, out, outSz, iv); |
11609 | |
|
11610 | 0 | wc_AesFree(aes); |
11611 | |
|
11612 | 0 | out: |
11613 | 0 | #ifdef WOLFSSL_SMALL_STACK |
11614 | 0 | if (aes != NULL) |
11615 | 0 | XFREE(aes, NULL, DYNAMIC_TYPE_AES); |
11616 | 0 | #endif |
11617 | |
|
11618 | 0 | return ret; |
11619 | 0 | } |
11620 | | |
11621 | | int wc_AesKeyUnWrap_ex(Aes *aes, const byte* in, word32 inSz, byte* out, |
11622 | | word32 outSz, const byte* iv) |
11623 | 0 | { |
11624 | 0 | byte* r; |
11625 | 0 | word32 i, n; |
11626 | 0 | int j; |
11627 | 0 | int ret = 0; |
11628 | |
|
11629 | 0 | byte t[KEYWRAP_BLOCK_SIZE]; |
11630 | 0 | byte tmp[AES_BLOCK_SIZE]; |
11631 | |
|
11632 | 0 | const byte* expIv; |
11633 | 0 | const byte defaultIV[] = { |
11634 | 0 | 0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6, 0xA6 |
11635 | 0 | }; |
11636 | |
|
11637 | 0 | if (aes == NULL || in == NULL || inSz < 3 * KEYWRAP_BLOCK_SIZE || |
11638 | 0 | out == NULL || outSz < (inSz - KEYWRAP_BLOCK_SIZE)) |
11639 | 0 | return BAD_FUNC_ARG; |
11640 | | |
11641 | | /* input must be multiple of 64-bits */ |
11642 | 0 | if (inSz % KEYWRAP_BLOCK_SIZE != 0) |
11643 | 0 | return BAD_FUNC_ARG; |
11644 | | |
11645 | | /* user IV optional */ |
11646 | 0 | if (iv != NULL) |
11647 | 0 | expIv = iv; |
11648 | 0 | else |
11649 | 0 | expIv = defaultIV; |
11650 | | |
11651 | | /* A = C[0], R[i] = C[i] */ |
11652 | 0 | XMEMCPY(tmp, in, KEYWRAP_BLOCK_SIZE); |
11653 | 0 | XMEMCPY(out, in + KEYWRAP_BLOCK_SIZE, inSz - KEYWRAP_BLOCK_SIZE); |
11654 | 0 | XMEMSET(t, 0, sizeof(t)); |
11655 | |
|
11656 | 0 | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
11657 | | |
11658 | | /* initialize counter to 6n */ |
11659 | 0 | n = (inSz - 1) / KEYWRAP_BLOCK_SIZE; |
11660 | 0 | InitKeyWrapCounter(t, 6 * n); |
11661 | |
|
11662 | 0 | for (j = 5; j >= 0; j--) { |
11663 | 0 | for (i = n; i >= 1; i--) { |
11664 | | |
11665 | | /* calculate A */ |
11666 | 0 | xorbuf(tmp, t, KEYWRAP_BLOCK_SIZE); |
11667 | 0 | DecrementKeyWrapCounter(t); |
11668 | | |
11669 | | /* load R[i], starting at end of R */ |
11670 | 0 | r = out + ((i - 1) * KEYWRAP_BLOCK_SIZE); |
11671 | 0 | XMEMCPY(tmp + KEYWRAP_BLOCK_SIZE, r, KEYWRAP_BLOCK_SIZE); |
11672 | 0 | ret = wc_AesDecryptDirect(aes, tmp, tmp); |
11673 | 0 | if (ret != 0) |
11674 | 0 | break; |
11675 | | |
11676 | | /* save R[i] */ |
11677 | 0 | XMEMCPY(r, tmp + KEYWRAP_BLOCK_SIZE, KEYWRAP_BLOCK_SIZE); |
11678 | 0 | } |
11679 | 0 | if (ret != 0) |
11680 | 0 | break; |
11681 | 0 | } |
11682 | 0 | RESTORE_VECTOR_REGISTERS(); |
11683 | |
|
11684 | 0 | if (ret != 0) |
11685 | 0 | return ret; |
11686 | | |
11687 | | /* verify IV */ |
11688 | 0 | if (XMEMCMP(tmp, expIv, KEYWRAP_BLOCK_SIZE) != 0) |
11689 | 0 | return BAD_KEYWRAP_IV_E; |
11690 | | |
11691 | 0 | return inSz - KEYWRAP_BLOCK_SIZE; |
11692 | 0 | } |
11693 | | |
11694 | | int wc_AesKeyUnWrap(const byte* key, word32 keySz, const byte* in, word32 inSz, |
11695 | | byte* out, word32 outSz, const byte* iv) |
11696 | 0 | { |
11697 | 0 | #ifdef WOLFSSL_SMALL_STACK |
11698 | 0 | Aes *aes = NULL; |
11699 | | #else |
11700 | | Aes aes[1]; |
11701 | | #endif |
11702 | 0 | int ret; |
11703 | |
|
11704 | 0 | (void)iv; |
11705 | |
|
11706 | 0 | if (key == NULL) |
11707 | 0 | return BAD_FUNC_ARG; |
11708 | | |
11709 | 0 | #ifdef WOLFSSL_SMALL_STACK |
11710 | 0 | if ((aes = (Aes *)XMALLOC(sizeof *aes, NULL, |
11711 | 0 | DYNAMIC_TYPE_AES)) == NULL) |
11712 | 0 | return MEMORY_E; |
11713 | 0 | #endif |
11714 | | |
11715 | | |
11716 | 0 | ret = wc_AesInit(aes, NULL, INVALID_DEVID); |
11717 | 0 | if (ret != 0) |
11718 | 0 | goto out; |
11719 | | |
11720 | 0 | ret = wc_AesSetKey(aes, key, keySz, NULL, AES_DECRYPTION); |
11721 | 0 | if (ret != 0) { |
11722 | 0 | wc_AesFree(aes); |
11723 | 0 | goto out; |
11724 | 0 | } |
11725 | | |
11726 | 0 | ret = wc_AesKeyUnWrap_ex(aes, in, inSz, out, outSz, iv); |
11727 | |
|
11728 | 0 | wc_AesFree(aes); |
11729 | |
|
11730 | 0 | out: |
11731 | 0 | #ifdef WOLFSSL_SMALL_STACK |
11732 | 0 | if (aes) |
11733 | 0 | XFREE(aes, NULL, DYNAMIC_TYPE_AES); |
11734 | 0 | #endif |
11735 | |
|
11736 | 0 | return ret; |
11737 | 0 | } |
11738 | | |
11739 | | #endif /* HAVE_AES_KEYWRAP */ |
11740 | | |
11741 | | #ifdef WOLFSSL_AES_XTS |
11742 | | |
11743 | | /* Galios Field to use */ |
11744 | 0 | #define GF_XTS 0x87 |
11745 | | |
11746 | | /* This is to help with setting keys to correct encrypt or decrypt type. |
11747 | | * |
11748 | | * tweak AES key for tweak in XTS |
11749 | | * aes AES key for encrypt/decrypt process |
11750 | | * key buffer holding aes key | tweak key |
11751 | | * len length of key buffer in bytes. Should be twice that of key size. i.e. |
11752 | | * 32 for a 16 byte key. |
11753 | | * dir direction, either AES_ENCRYPTION or AES_DECRYPTION |
11754 | | * heap heap hint to use for memory. Can be NULL |
11755 | | * devId id to use with async crypto. Can be 0 |
11756 | | * |
11757 | | * Note: is up to user to call wc_AesFree on tweak and aes key when done. |
11758 | | * |
11759 | | * return 0 on success |
11760 | | */ |
11761 | | int wc_AesXtsSetKey(XtsAes* aes, const byte* key, word32 len, int dir, |
11762 | | void* heap, int devId) |
11763 | 0 | { |
11764 | 0 | word32 keySz; |
11765 | 0 | int ret = 0; |
11766 | |
|
11767 | 0 | if (aes == NULL || key == NULL) { |
11768 | 0 | return BAD_FUNC_ARG; |
11769 | 0 | } |
11770 | | |
11771 | 0 | if ((ret = wc_AesInit(&aes->tweak, heap, devId)) != 0) { |
11772 | 0 | return ret; |
11773 | 0 | } |
11774 | 0 | if ((ret = wc_AesInit(&aes->aes, heap, devId)) != 0) { |
11775 | 0 | return ret; |
11776 | 0 | } |
11777 | | |
11778 | 0 | keySz = len/2; |
11779 | 0 | if (keySz != 16 && keySz != 32) { |
11780 | 0 | WOLFSSL_MSG("Unsupported key size"); |
11781 | 0 | return WC_KEY_SIZE_E; |
11782 | 0 | } |
11783 | | |
11784 | 0 | if ((ret = wc_AesSetKey(&aes->aes, key, keySz, NULL, dir)) == 0) { |
11785 | 0 | ret = wc_AesSetKey(&aes->tweak, key + keySz, keySz, NULL, |
11786 | 0 | AES_ENCRYPTION); |
11787 | 0 | if (ret != 0) { |
11788 | 0 | wc_AesFree(&aes->aes); |
11789 | 0 | } |
11790 | 0 | } |
11791 | |
|
11792 | 0 | return ret; |
11793 | 0 | } |
11794 | | |
11795 | | |
11796 | | /* This is used to free up resources used by Aes structs |
11797 | | * |
11798 | | * aes AES keys to free |
11799 | | * |
11800 | | * return 0 on success |
11801 | | */ |
11802 | | int wc_AesXtsFree(XtsAes* aes) |
11803 | 0 | { |
11804 | 0 | if (aes != NULL) { |
11805 | 0 | wc_AesFree(&aes->aes); |
11806 | 0 | wc_AesFree(&aes->tweak); |
11807 | 0 | } |
11808 | |
|
11809 | 0 | return 0; |
11810 | 0 | } |
11811 | | |
11812 | | |
11813 | | /* Same process as wc_AesXtsEncrypt but uses a word64 type as the tweak value |
11814 | | * instead of a byte array. This just converts the word64 to a byte array and |
11815 | | * calls wc_AesXtsEncrypt. |
11816 | | * |
11817 | | * aes AES keys to use for block encrypt/decrypt |
11818 | | * out output buffer to hold cipher text |
11819 | | * in input plain text buffer to encrypt |
11820 | | * sz size of both out and in buffers |
11821 | | * sector value to use for tweak |
11822 | | * |
11823 | | * returns 0 on success |
11824 | | */ |
11825 | | int wc_AesXtsEncryptSector(XtsAes* aes, byte* out, const byte* in, |
11826 | | word32 sz, word64 sector) |
11827 | 0 | { |
11828 | 0 | byte* pt; |
11829 | 0 | byte i[AES_BLOCK_SIZE]; |
11830 | |
|
11831 | 0 | XMEMSET(i, 0, AES_BLOCK_SIZE); |
11832 | | #ifdef BIG_ENDIAN_ORDER |
11833 | | sector = ByteReverseWord64(sector); |
11834 | | #endif |
11835 | 0 | pt = (byte*)§or; |
11836 | 0 | XMEMCPY(i, pt, sizeof(word64)); |
11837 | |
|
11838 | 0 | return wc_AesXtsEncrypt(aes, out, in, sz, (const byte*)i, AES_BLOCK_SIZE); |
11839 | 0 | } |
11840 | | |
11841 | | |
11842 | | /* Same process as wc_AesXtsDecrypt but uses a word64 type as the tweak value |
11843 | | * instead of a byte array. This just converts the word64 to a byte array. |
11844 | | * |
11845 | | * aes AES keys to use for block encrypt/decrypt |
11846 | | * out output buffer to hold plain text |
11847 | | * in input cipher text buffer to encrypt |
11848 | | * sz size of both out and in buffers |
11849 | | * sector value to use for tweak |
11850 | | * |
11851 | | * returns 0 on success |
11852 | | */ |
11853 | | int wc_AesXtsDecryptSector(XtsAes* aes, byte* out, const byte* in, word32 sz, |
11854 | | word64 sector) |
11855 | 0 | { |
11856 | 0 | byte* pt; |
11857 | 0 | byte i[AES_BLOCK_SIZE]; |
11858 | |
|
11859 | 0 | XMEMSET(i, 0, AES_BLOCK_SIZE); |
11860 | | #ifdef BIG_ENDIAN_ORDER |
11861 | | sector = ByteReverseWord64(sector); |
11862 | | #endif |
11863 | 0 | pt = (byte*)§or; |
11864 | 0 | XMEMCPY(i, pt, sizeof(word64)); |
11865 | |
|
11866 | 0 | return wc_AesXtsDecrypt(aes, out, in, sz, (const byte*)i, AES_BLOCK_SIZE); |
11867 | 0 | } |
11868 | | |
11869 | | #ifdef HAVE_AES_ECB |
11870 | | /* helper function for encrypting / decrypting full buffer at once */ |
11871 | | static WARN_UNUSED_RESULT int _AesXtsHelper( |
11872 | | Aes* aes, byte* out, const byte* in, word32 sz, int dir) |
11873 | 0 | { |
11874 | 0 | word32 outSz = sz; |
11875 | 0 | word32 totalSz = (sz / AES_BLOCK_SIZE) * AES_BLOCK_SIZE; /* total bytes */ |
11876 | 0 | byte* pt = out; |
11877 | |
|
11878 | 0 | outSz -= AES_BLOCK_SIZE; |
11879 | |
|
11880 | 0 | while (outSz > 0) { |
11881 | 0 | word32 j; |
11882 | 0 | byte carry = 0; |
11883 | | |
11884 | | /* multiply by shift left and propagate carry */ |
11885 | 0 | for (j = 0; j < AES_BLOCK_SIZE && outSz > 0; j++, outSz--) { |
11886 | 0 | byte tmpC; |
11887 | |
|
11888 | 0 | tmpC = (pt[j] >> 7) & 0x01; |
11889 | 0 | pt[j+AES_BLOCK_SIZE] = ((pt[j] << 1) + carry) & 0xFF; |
11890 | 0 | carry = tmpC; |
11891 | 0 | } |
11892 | 0 | if (carry) { |
11893 | 0 | pt[AES_BLOCK_SIZE] ^= GF_XTS; |
11894 | 0 | } |
11895 | |
|
11896 | 0 | pt += AES_BLOCK_SIZE; |
11897 | 0 | } |
11898 | |
|
11899 | 0 | xorbuf(out, in, totalSz); |
11900 | 0 | if (dir == AES_ENCRYPTION) { |
11901 | 0 | return _AesEcbEncrypt(aes, out, out, totalSz); |
11902 | 0 | } |
11903 | 0 | else { |
11904 | 0 | return _AesEcbDecrypt(aes, out, out, totalSz); |
11905 | 0 | } |
11906 | 0 | } |
11907 | | #endif /* HAVE_AES_ECB */ |
11908 | | |
11909 | | |
11910 | | /* AES with XTS mode. (XTS) XEX encryption with Tweak and cipher text Stealing. |
11911 | | * |
11912 | | * xaes AES keys to use for block encrypt/decrypt |
11913 | | * out output buffer to hold cipher text |
11914 | | * in input plain text buffer to encrypt |
11915 | | * sz size of both out and in buffers |
11916 | | * i value to use for tweak |
11917 | | * iSz size of i buffer, should always be AES_BLOCK_SIZE but having this input |
11918 | | * adds a sanity check on how the user calls the function. |
11919 | | * |
11920 | | * returns 0 on success |
11921 | | */ |
11922 | | /* Software AES - XTS Encrypt */ |
11923 | | int wc_AesXtsEncrypt(XtsAes* xaes, byte* out, const byte* in, word32 sz, |
11924 | | const byte* i, word32 iSz) |
11925 | 0 | { |
11926 | 0 | int ret = 0; |
11927 | 0 | word32 blocks = (sz / AES_BLOCK_SIZE); |
11928 | 0 | Aes *aes, *tweak; |
11929 | |
|
11930 | 0 | if (xaes == NULL || out == NULL || in == NULL) { |
11931 | 0 | return BAD_FUNC_ARG; |
11932 | 0 | } |
11933 | | |
11934 | 0 | aes = &xaes->aes; |
11935 | 0 | tweak = &xaes->tweak; |
11936 | |
|
11937 | 0 | if (iSz < AES_BLOCK_SIZE) { |
11938 | 0 | return BAD_FUNC_ARG; |
11939 | 0 | } |
11940 | | |
11941 | 0 | if (blocks > 0) { |
11942 | 0 | byte tmp[AES_BLOCK_SIZE]; |
11943 | |
|
11944 | 0 | XMEMSET(tmp, 0, AES_BLOCK_SIZE); /* set to 0's in case of improper AES |
11945 | | * key setup passed to encrypt direct*/ |
11946 | |
|
11947 | 0 | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
11948 | |
|
11949 | 0 | ret = wc_AesEncryptDirect(tweak, tmp, i); |
11950 | |
|
11951 | 0 | if (ret != 0) { |
11952 | 0 | RESTORE_VECTOR_REGISTERS(); |
11953 | 0 | return ret; |
11954 | 0 | } |
11955 | | |
11956 | 0 | #ifdef HAVE_AES_ECB |
11957 | | /* encrypt all of buffer at once when possible */ |
11958 | 0 | if (in != out) { /* can not handle inline */ |
11959 | 0 | XMEMCPY(out, tmp, AES_BLOCK_SIZE); |
11960 | 0 | if ((ret = _AesXtsHelper(aes, out, in, sz, AES_ENCRYPTION)) != 0) { |
11961 | 0 | RESTORE_VECTOR_REGISTERS(); |
11962 | 0 | return ret; |
11963 | 0 | } |
11964 | 0 | } |
11965 | 0 | #endif |
11966 | | |
11967 | 0 | while (blocks > 0) { |
11968 | 0 | word32 j; |
11969 | 0 | byte carry = 0; |
11970 | |
|
11971 | 0 | #ifdef HAVE_AES_ECB |
11972 | 0 | if (in == out) |
11973 | 0 | #endif |
11974 | 0 | { /* check for if inline */ |
11975 | 0 | byte buf[AES_BLOCK_SIZE]; |
11976 | |
|
11977 | 0 | XMEMCPY(buf, in, AES_BLOCK_SIZE); |
11978 | 0 | xorbuf(buf, tmp, AES_BLOCK_SIZE); |
11979 | 0 | ret = wc_AesEncryptDirect(aes, out, buf); |
11980 | 0 | if (ret != 0) { |
11981 | 0 | RESTORE_VECTOR_REGISTERS(); |
11982 | 0 | return ret; |
11983 | 0 | } |
11984 | 0 | } |
11985 | 0 | xorbuf(out, tmp, AES_BLOCK_SIZE); |
11986 | | |
11987 | | /* multiply by shift left and propagate carry */ |
11988 | 0 | for (j = 0; j < AES_BLOCK_SIZE; j++) { |
11989 | 0 | byte tmpC; |
11990 | |
|
11991 | 0 | tmpC = (tmp[j] >> 7) & 0x01; |
11992 | 0 | tmp[j] = ((tmp[j] << 1) + carry) & 0xFF; |
11993 | 0 | carry = tmpC; |
11994 | 0 | } |
11995 | 0 | if (carry) { |
11996 | 0 | tmp[0] ^= GF_XTS; |
11997 | 0 | } |
11998 | |
|
11999 | 0 | in += AES_BLOCK_SIZE; |
12000 | 0 | out += AES_BLOCK_SIZE; |
12001 | 0 | sz -= AES_BLOCK_SIZE; |
12002 | 0 | blocks--; |
12003 | 0 | } |
12004 | | |
12005 | | /* stealing operation of XTS to handle left overs */ |
12006 | 0 | if (sz > 0) { |
12007 | 0 | byte buf[AES_BLOCK_SIZE]; |
12008 | |
|
12009 | 0 | XMEMCPY(buf, out - AES_BLOCK_SIZE, AES_BLOCK_SIZE); |
12010 | 0 | if (sz >= AES_BLOCK_SIZE) { /* extra sanity check before copy */ |
12011 | 0 | RESTORE_VECTOR_REGISTERS(); |
12012 | 0 | return BUFFER_E; |
12013 | 0 | } |
12014 | 0 | XMEMCPY(out, buf, sz); |
12015 | 0 | XMEMCPY(buf, in, sz); |
12016 | |
|
12017 | 0 | xorbuf(buf, tmp, AES_BLOCK_SIZE); |
12018 | 0 | ret = wc_AesEncryptDirect(aes, out - AES_BLOCK_SIZE, buf); |
12019 | 0 | if (ret == 0) |
12020 | 0 | xorbuf(out - AES_BLOCK_SIZE, tmp, AES_BLOCK_SIZE); |
12021 | 0 | } |
12022 | 0 | RESTORE_VECTOR_REGISTERS(); |
12023 | 0 | } |
12024 | 0 | else { |
12025 | 0 | WOLFSSL_MSG("Plain text input too small for encryption"); |
12026 | 0 | return BAD_FUNC_ARG; |
12027 | 0 | } |
12028 | | |
12029 | 0 | return ret; |
12030 | 0 | } |
12031 | | |
12032 | | |
12033 | | /* Same process as encryption but Aes key is AES_DECRYPTION type. |
12034 | | * |
12035 | | * xaes AES keys to use for block encrypt/decrypt |
12036 | | * out output buffer to hold plain text |
12037 | | * in input cipher text buffer to decrypt |
12038 | | * sz size of both out and in buffers |
12039 | | * i value to use for tweak |
12040 | | * iSz size of i buffer, should always be AES_BLOCK_SIZE but having this input |
12041 | | * adds a sanity check on how the user calls the function. |
12042 | | * |
12043 | | * returns 0 on success |
12044 | | */ |
12045 | | /* Software AES - XTS Decrypt */ |
12046 | | int wc_AesXtsDecrypt(XtsAes* xaes, byte* out, const byte* in, word32 sz, |
12047 | | const byte* i, word32 iSz) |
12048 | 0 | { |
12049 | 0 | int ret = 0; |
12050 | 0 | word32 blocks = (sz / AES_BLOCK_SIZE); |
12051 | 0 | Aes *aes, *tweak; |
12052 | |
|
12053 | 0 | if (xaes == NULL || out == NULL || in == NULL) { |
12054 | 0 | return BAD_FUNC_ARG; |
12055 | 0 | } |
12056 | | |
12057 | 0 | aes = &xaes->aes; |
12058 | 0 | tweak = &xaes->tweak; |
12059 | |
|
12060 | 0 | if (iSz < AES_BLOCK_SIZE) { |
12061 | 0 | return BAD_FUNC_ARG; |
12062 | 0 | } |
12063 | | |
12064 | 0 | if (blocks > 0) { |
12065 | 0 | word32 j; |
12066 | 0 | byte carry = 0; |
12067 | 0 | byte tmp[AES_BLOCK_SIZE]; |
12068 | 0 | byte stl = (sz % AES_BLOCK_SIZE); |
12069 | |
|
12070 | 0 | XMEMSET(tmp, 0, AES_BLOCK_SIZE); /* set to 0's in case of improper AES |
12071 | | * key setup passed to decrypt direct*/ |
12072 | |
|
12073 | 0 | SAVE_VECTOR_REGISTERS(return _svr_ret;); |
12074 | |
|
12075 | 0 | ret = wc_AesEncryptDirect(tweak, tmp, i); |
12076 | 0 | if (ret != 0) { |
12077 | 0 | RESTORE_VECTOR_REGISTERS(); |
12078 | 0 | return ret; |
12079 | 0 | } |
12080 | | |
12081 | | /* if Stealing then break out of loop one block early to handle special |
12082 | | * case */ |
12083 | 0 | if (stl > 0) { |
12084 | 0 | blocks--; |
12085 | 0 | } |
12086 | |
|
12087 | 0 | #ifdef HAVE_AES_ECB |
12088 | | /* decrypt all of buffer at once when possible */ |
12089 | 0 | if (in != out) { /* can not handle inline */ |
12090 | 0 | XMEMCPY(out, tmp, AES_BLOCK_SIZE); |
12091 | 0 | if ((ret = _AesXtsHelper(aes, out, in, sz, AES_DECRYPTION)) != 0) { |
12092 | 0 | RESTORE_VECTOR_REGISTERS(); |
12093 | 0 | return ret; |
12094 | 0 | } |
12095 | 0 | } |
12096 | 0 | #endif |
12097 | | |
12098 | 0 | while (blocks > 0) { |
12099 | 0 | #ifdef HAVE_AES_ECB |
12100 | 0 | if (in == out) |
12101 | 0 | #endif |
12102 | 0 | { /* check for if inline */ |
12103 | 0 | byte buf[AES_BLOCK_SIZE]; |
12104 | |
|
12105 | 0 | XMEMCPY(buf, in, AES_BLOCK_SIZE); |
12106 | 0 | xorbuf(buf, tmp, AES_BLOCK_SIZE); |
12107 | 0 | ret = wc_AesDecryptDirect(aes, out, buf); |
12108 | 0 | if (ret != 0) { |
12109 | 0 | RESTORE_VECTOR_REGISTERS(); |
12110 | 0 | return ret; |
12111 | 0 | } |
12112 | 0 | } |
12113 | 0 | xorbuf(out, tmp, AES_BLOCK_SIZE); |
12114 | | |
12115 | | /* multiply by shift left and propagate carry */ |
12116 | 0 | for (j = 0; j < AES_BLOCK_SIZE; j++) { |
12117 | 0 | byte tmpC; |
12118 | |
|
12119 | 0 | tmpC = (tmp[j] >> 7) & 0x01; |
12120 | 0 | tmp[j] = ((tmp[j] << 1) + carry) & 0xFF; |
12121 | 0 | carry = tmpC; |
12122 | 0 | } |
12123 | 0 | if (carry) { |
12124 | 0 | tmp[0] ^= GF_XTS; |
12125 | 0 | } |
12126 | 0 | carry = 0; |
12127 | |
|
12128 | 0 | in += AES_BLOCK_SIZE; |
12129 | 0 | out += AES_BLOCK_SIZE; |
12130 | 0 | sz -= AES_BLOCK_SIZE; |
12131 | 0 | blocks--; |
12132 | 0 | } |
12133 | | |
12134 | | /* stealing operation of XTS to handle left overs */ |
12135 | 0 | if (sz >= AES_BLOCK_SIZE) { |
12136 | 0 | byte buf[AES_BLOCK_SIZE]; |
12137 | 0 | byte tmp2[AES_BLOCK_SIZE]; |
12138 | | |
12139 | | /* multiply by shift left and propagate carry */ |
12140 | 0 | for (j = 0; j < AES_BLOCK_SIZE; j++) { |
12141 | 0 | byte tmpC; |
12142 | |
|
12143 | 0 | tmpC = (tmp[j] >> 7) & 0x01; |
12144 | 0 | tmp2[j] = ((tmp[j] << 1) + carry) & 0xFF; |
12145 | 0 | carry = tmpC; |
12146 | 0 | } |
12147 | 0 | if (carry) { |
12148 | 0 | tmp2[0] ^= GF_XTS; |
12149 | 0 | } |
12150 | |
|
12151 | 0 | XMEMCPY(buf, in, AES_BLOCK_SIZE); |
12152 | 0 | xorbuf(buf, tmp2, AES_BLOCK_SIZE); |
12153 | 0 | ret = wc_AesDecryptDirect(aes, out, buf); |
12154 | 0 | if (ret != 0) { |
12155 | 0 | RESTORE_VECTOR_REGISTERS(); |
12156 | 0 | return ret; |
12157 | 0 | } |
12158 | 0 | xorbuf(out, tmp2, AES_BLOCK_SIZE); |
12159 | | |
12160 | | /* tmp2 holds partial | last */ |
12161 | 0 | XMEMCPY(tmp2, out, AES_BLOCK_SIZE); |
12162 | 0 | in += AES_BLOCK_SIZE; |
12163 | 0 | out += AES_BLOCK_SIZE; |
12164 | 0 | sz -= AES_BLOCK_SIZE; |
12165 | | |
12166 | | /* Make buffer with end of cipher text | last */ |
12167 | 0 | XMEMCPY(buf, tmp2, AES_BLOCK_SIZE); |
12168 | 0 | if (sz >= AES_BLOCK_SIZE) { /* extra sanity check before copy */ |
12169 | 0 | RESTORE_VECTOR_REGISTERS(); |
12170 | 0 | return BUFFER_E; |
12171 | 0 | } |
12172 | 0 | XMEMCPY(buf, in, sz); |
12173 | 0 | XMEMCPY(out, tmp2, sz); |
12174 | |
|
12175 | 0 | xorbuf(buf, tmp, AES_BLOCK_SIZE); |
12176 | 0 | ret = wc_AesDecryptDirect(aes, tmp2, buf); |
12177 | 0 | if (ret != 0) { |
12178 | 0 | RESTORE_VECTOR_REGISTERS(); |
12179 | 0 | return ret; |
12180 | 0 | } |
12181 | 0 | xorbuf(tmp2, tmp, AES_BLOCK_SIZE); |
12182 | 0 | XMEMCPY(out - AES_BLOCK_SIZE, tmp2, AES_BLOCK_SIZE); |
12183 | 0 | } |
12184 | 0 | RESTORE_VECTOR_REGISTERS(); |
12185 | 0 | } |
12186 | 0 | else { |
12187 | 0 | WOLFSSL_MSG("Plain text input too small for encryption"); |
12188 | 0 | return BAD_FUNC_ARG; |
12189 | 0 | } |
12190 | | |
12191 | 0 | return ret; |
12192 | 0 | } |
12193 | | |
12194 | | #endif /* WOLFSSL_AES_XTS */ |
12195 | | |
12196 | | #ifdef WOLFSSL_AES_SIV |
12197 | | |
12198 | | /* |
12199 | | * See RFC 5297 Section 2.4. |
12200 | | */ |
12201 | | static WARN_UNUSED_RESULT int S2V( |
12202 | | const byte* key, word32 keySz, const byte* assoc, word32 assocSz, |
12203 | | const byte* nonce, word32 nonceSz, const byte* data, |
12204 | | word32 dataSz, byte* out) |
12205 | 0 | { |
12206 | 0 | #ifdef WOLFSSL_SMALL_STACK |
12207 | 0 | byte* tmp[3] = {NULL, NULL, NULL}; |
12208 | 0 | int i; |
12209 | 0 | Cmac* cmac; |
12210 | | #else |
12211 | | byte tmp[3][AES_BLOCK_SIZE]; |
12212 | | Cmac cmac[1]; |
12213 | | #endif |
12214 | 0 | word32 macSz = AES_BLOCK_SIZE; |
12215 | 0 | int ret = 0; |
12216 | 0 | word32 zeroBytes; |
12217 | |
|
12218 | 0 | #ifdef WOLFSSL_SMALL_STACK |
12219 | 0 | for (i = 0; i < 3; ++i) { |
12220 | 0 | tmp[i] = (byte*)XMALLOC(AES_BLOCK_SIZE, NULL, DYNAMIC_TYPE_TMP_BUFFER); |
12221 | 0 | if (tmp[i] == NULL) { |
12222 | 0 | ret = MEMORY_E; |
12223 | 0 | break; |
12224 | 0 | } |
12225 | 0 | } |
12226 | 0 | if (ret == 0) |
12227 | 0 | #endif |
12228 | 0 | { |
12229 | 0 | XMEMSET(tmp[1], 0, AES_BLOCK_SIZE); |
12230 | 0 | XMEMSET(tmp[2], 0, AES_BLOCK_SIZE); |
12231 | |
|
12232 | 0 | ret = wc_AesCmacGenerate(tmp[0], &macSz, tmp[1], AES_BLOCK_SIZE, |
12233 | 0 | key, keySz); |
12234 | 0 | if (ret == 0) { |
12235 | 0 | ShiftAndXorRb(tmp[1], tmp[0]); |
12236 | 0 | ret = wc_AesCmacGenerate(tmp[0], &macSz, assoc, assocSz, key, |
12237 | 0 | keySz); |
12238 | 0 | if (ret == 0) { |
12239 | 0 | xorbuf(tmp[1], tmp[0], AES_BLOCK_SIZE); |
12240 | 0 | } |
12241 | 0 | } |
12242 | 0 | } |
12243 | |
|
12244 | 0 | if (ret == 0) { |
12245 | 0 | if (nonceSz > 0) { |
12246 | 0 | ShiftAndXorRb(tmp[0], tmp[1]); |
12247 | 0 | ret = wc_AesCmacGenerate(tmp[1], &macSz, nonce, nonceSz, key, |
12248 | 0 | keySz); |
12249 | 0 | if (ret == 0) { |
12250 | 0 | xorbuf(tmp[0], tmp[1], AES_BLOCK_SIZE); |
12251 | 0 | } |
12252 | 0 | } |
12253 | 0 | else { |
12254 | 0 | XMEMCPY(tmp[0], tmp[1], AES_BLOCK_SIZE); |
12255 | 0 | } |
12256 | 0 | } |
12257 | |
|
12258 | 0 | if (ret == 0) { |
12259 | 0 | if (dataSz >= AES_BLOCK_SIZE) { |
12260 | |
|
12261 | 0 | #ifdef WOLFSSL_SMALL_STACK |
12262 | 0 | cmac = (Cmac*)XMALLOC(sizeof(Cmac), NULL, DYNAMIC_TYPE_CMAC); |
12263 | 0 | if (cmac == NULL) { |
12264 | 0 | ret = MEMORY_E; |
12265 | 0 | } |
12266 | 0 | if (ret == 0) |
12267 | 0 | #endif |
12268 | 0 | { |
12269 | | #ifdef WOLFSSL_CHECK_MEM_ZERO |
12270 | | /* Aes part is checked by wc_AesFree. */ |
12271 | | wc_MemZero_Add("wc_AesCmacGenerate cmac", |
12272 | | ((unsigned char *)cmac) + sizeof(Aes), |
12273 | | sizeof(Cmac) - sizeof(Aes)); |
12274 | | #endif |
12275 | 0 | xorbuf(tmp[0], data + (dataSz - AES_BLOCK_SIZE), |
12276 | 0 | AES_BLOCK_SIZE); |
12277 | 0 | ret = wc_InitCmac(cmac, key, keySz, WC_CMAC_AES, NULL); |
12278 | 0 | if (ret == 0) { |
12279 | 0 | ret = wc_CmacUpdate(cmac, data, dataSz - AES_BLOCK_SIZE); |
12280 | 0 | } |
12281 | 0 | if (ret == 0) { |
12282 | 0 | ret = wc_CmacUpdate(cmac, tmp[0], AES_BLOCK_SIZE); |
12283 | 0 | } |
12284 | 0 | if (ret == 0) { |
12285 | 0 | ret = wc_CmacFinal(cmac, out, &macSz); |
12286 | 0 | } |
12287 | 0 | } |
12288 | 0 | #ifdef WOLFSSL_SMALL_STACK |
12289 | 0 | if (cmac != NULL) { |
12290 | 0 | XFREE(cmac, NULL, DYNAMIC_TYPE_CMAC); |
12291 | 0 | } |
12292 | | #elif defined(WOLFSSL_CHECK_MEM_ZERO) |
12293 | | wc_MemZero_Check(cmac, sizeof(Cmac)); |
12294 | | #endif |
12295 | 0 | } |
12296 | 0 | else { |
12297 | 0 | XMEMCPY(tmp[2], data, dataSz); |
12298 | 0 | tmp[2][dataSz] |= 0x80; |
12299 | 0 | zeroBytes = AES_BLOCK_SIZE - (dataSz + 1); |
12300 | 0 | if (zeroBytes != 0) { |
12301 | 0 | XMEMSET(tmp[2] + dataSz + 1, 0, zeroBytes); |
12302 | 0 | } |
12303 | 0 | ShiftAndXorRb(tmp[1], tmp[0]); |
12304 | 0 | xorbuf(tmp[1], tmp[2], AES_BLOCK_SIZE); |
12305 | 0 | ret = wc_AesCmacGenerate(out, &macSz, tmp[1], AES_BLOCK_SIZE, key, |
12306 | 0 | keySz); |
12307 | 0 | } |
12308 | 0 | } |
12309 | |
|
12310 | 0 | #ifdef WOLFSSL_SMALL_STACK |
12311 | 0 | for (i = 0; i < 3; ++i) { |
12312 | 0 | if (tmp[i] != NULL) { |
12313 | 0 | XFREE(tmp[i], NULL, DYNAMIC_TYPE_TMP_BUFFER); |
12314 | 0 | } |
12315 | 0 | } |
12316 | 0 | #endif |
12317 | |
|
12318 | 0 | return ret; |
12319 | 0 | } |
12320 | | |
12321 | | static WARN_UNUSED_RESULT int AesSivCipher( |
12322 | | const byte* key, word32 keySz, const byte* assoc, |
12323 | | word32 assocSz, const byte* nonce, word32 nonceSz, |
12324 | | const byte* data, word32 dataSz, byte* siv, byte* out, |
12325 | | int enc) |
12326 | 0 | { |
12327 | 0 | int ret = 0; |
12328 | 0 | #ifdef WOLFSSL_SMALL_STACK |
12329 | 0 | Aes* aes = NULL; |
12330 | | #else |
12331 | | Aes aes[1]; |
12332 | | #endif |
12333 | 0 | byte sivTmp[AES_BLOCK_SIZE]; |
12334 | |
|
12335 | 0 | if (key == NULL || siv == NULL || out == NULL) { |
12336 | 0 | WOLFSSL_MSG("Bad parameter"); |
12337 | 0 | ret = BAD_FUNC_ARG; |
12338 | 0 | } |
12339 | |
|
12340 | 0 | if (ret == 0 && keySz != 32 && keySz != 48 && keySz != 64) { |
12341 | 0 | WOLFSSL_MSG("Bad key size. Must be 256, 384, or 512 bits."); |
12342 | 0 | ret = BAD_FUNC_ARG; |
12343 | 0 | } |
12344 | |
|
12345 | 0 | #ifdef WOLFSSL_SMALL_STACK |
12346 | 0 | if (ret == 0) { |
12347 | 0 | aes = (Aes*)XMALLOC(sizeof(Aes), NULL, DYNAMIC_TYPE_AES); |
12348 | 0 | if (aes == NULL) { |
12349 | 0 | ret = MEMORY_E; |
12350 | 0 | } |
12351 | 0 | } |
12352 | 0 | #endif |
12353 | |
|
12354 | 0 | if (ret == 0) { |
12355 | 0 | if (enc == 1) { |
12356 | 0 | ret = S2V(key, keySz / 2, assoc, assocSz, nonce, nonceSz, data, |
12357 | 0 | dataSz, sivTmp); |
12358 | 0 | if (ret != 0) { |
12359 | 0 | WOLFSSL_MSG("S2V failed."); |
12360 | 0 | } |
12361 | 0 | else { |
12362 | 0 | XMEMCPY(siv, sivTmp, AES_BLOCK_SIZE); |
12363 | 0 | } |
12364 | 0 | } |
12365 | 0 | else { |
12366 | 0 | XMEMCPY(sivTmp, siv, AES_BLOCK_SIZE); |
12367 | 0 | } |
12368 | 0 | } |
12369 | |
|
12370 | 0 | if (ret == 0) { |
12371 | 0 | ret = wc_AesInit(aes, NULL, INVALID_DEVID); |
12372 | 0 | if (ret != 0) { |
12373 | 0 | WOLFSSL_MSG("Failed to initialized AES object."); |
12374 | 0 | } |
12375 | 0 | } |
12376 | |
|
12377 | 0 | if (ret == 0 && dataSz > 0) { |
12378 | 0 | sivTmp[12] &= 0x7f; |
12379 | 0 | sivTmp[8] &= 0x7f; |
12380 | 0 | ret = wc_AesSetKey(aes, key + keySz / 2, keySz / 2, sivTmp, |
12381 | 0 | AES_ENCRYPTION); |
12382 | 0 | if (ret != 0) { |
12383 | 0 | WOLFSSL_MSG("Failed to set key for AES-CTR."); |
12384 | 0 | } |
12385 | 0 | else { |
12386 | 0 | ret = wc_AesCtrEncrypt(aes, out, data, dataSz); |
12387 | 0 | if (ret != 0) { |
12388 | 0 | WOLFSSL_MSG("AES-CTR encryption failed."); |
12389 | 0 | } |
12390 | 0 | } |
12391 | 0 | } |
12392 | |
|
12393 | 0 | if (ret == 0 && enc == 0) { |
12394 | 0 | ret = S2V(key, keySz / 2, assoc, assocSz, nonce, nonceSz, out, dataSz, |
12395 | 0 | sivTmp); |
12396 | 0 | if (ret != 0) { |
12397 | 0 | WOLFSSL_MSG("S2V failed."); |
12398 | 0 | } |
12399 | |
|
12400 | 0 | if (XMEMCMP(siv, sivTmp, AES_BLOCK_SIZE) != 0) { |
12401 | 0 | WOLFSSL_MSG("Computed SIV doesn't match received SIV."); |
12402 | 0 | ret = AES_SIV_AUTH_E; |
12403 | 0 | } |
12404 | 0 | } |
12405 | |
|
12406 | 0 | wc_AesFree(aes); |
12407 | 0 | #ifdef WOLFSSL_SMALL_STACK |
12408 | 0 | XFREE(aes, NULL, DYNAMIC_TYPE_AES); |
12409 | 0 | #endif |
12410 | |
|
12411 | 0 | return ret; |
12412 | 0 | } |
12413 | | |
12414 | | /* |
12415 | | * See RFC 5297 Section 2.6. |
12416 | | */ |
12417 | | int wc_AesSivEncrypt(const byte* key, word32 keySz, const byte* assoc, |
12418 | | word32 assocSz, const byte* nonce, word32 nonceSz, |
12419 | | const byte* in, word32 inSz, byte* siv, byte* out) |
12420 | 0 | { |
12421 | 0 | return AesSivCipher(key, keySz, assoc, assocSz, nonce, nonceSz, in, inSz, |
12422 | 0 | siv, out, 1); |
12423 | 0 | } |
12424 | | |
12425 | | /* |
12426 | | * See RFC 5297 Section 2.7. |
12427 | | */ |
12428 | | int wc_AesSivDecrypt(const byte* key, word32 keySz, const byte* assoc, |
12429 | | word32 assocSz, const byte* nonce, word32 nonceSz, |
12430 | | const byte* in, word32 inSz, byte* siv, byte* out) |
12431 | 0 | { |
12432 | 0 | return AesSivCipher(key, keySz, assoc, assocSz, nonce, nonceSz, in, inSz, |
12433 | 0 | siv, out, 0); |
12434 | 0 | } |
12435 | | |
12436 | | #endif /* WOLFSSL_AES_SIV */ |
12437 | | |
12438 | | #endif /* HAVE_FIPS */ |
12439 | | #endif /* !NO_AES */ |