/src/libgcrypt/cipher/rijndael.c
Line | Count | Source (jump to first uncovered line) |
1 | | /* Rijndael (AES) for GnuPG |
2 | | * Copyright (C) 2000, 2001, 2002, 2003, 2007, |
3 | | * 2008, 2011, 2012 Free Software Foundation, Inc. |
4 | | * |
5 | | * This file is part of Libgcrypt. |
6 | | * |
7 | | * Libgcrypt is free software; you can redistribute it and/or modify |
8 | | * it under the terms of the GNU Lesser General Public License as |
9 | | * published by the Free Software Foundation; either version 2.1 of |
10 | | * the License, or (at your option) any later version. |
11 | | * |
12 | | * Libgcrypt is distributed in the hope that it will be useful, |
13 | | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
14 | | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
15 | | * GNU Lesser General Public License for more details. |
16 | | * |
17 | | * You should have received a copy of the GNU Lesser General Public |
18 | | * License along with this program; if not, see <http://www.gnu.org/licenses/>. |
19 | | ******************************************************************* |
20 | | * The code here is based on the optimized implementation taken from |
21 | | * http://www.esat.kuleuven.ac.be/~rijmen/rijndael/ on Oct 2, 2000, |
22 | | * which carries this notice: |
23 | | *------------------------------------------ |
24 | | * rijndael-alg-fst.c v2.3 April '2000 |
25 | | * |
26 | | * Optimised ANSI C code |
27 | | * |
28 | | * authors: v1.0: Antoon Bosselaers |
29 | | * v2.0: Vincent Rijmen |
30 | | * v2.3: Paulo Barreto |
31 | | * |
32 | | * This code is placed in the public domain. |
33 | | *------------------------------------------ |
34 | | * |
35 | | * The SP800-38a document is available at: |
36 | | * http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf |
37 | | * |
38 | | */ |
39 | | |
40 | | #include <config.h> |
41 | | #include <stdio.h> |
42 | | #include <stdlib.h> |
43 | | #include <string.h> /* for memcmp() */ |
44 | | |
45 | | #include "types.h" /* for byte and u32 typedefs */ |
46 | | #include "g10lib.h" |
47 | | #include "cipher.h" |
48 | | #include "bufhelp.h" |
49 | | #include "rijndael-internal.h" |
50 | | #include "./cipher-internal.h" |
51 | | |
52 | | |
53 | | #ifdef USE_AMD64_ASM |
54 | | /* AMD64 assembly implementations of AES */ |
55 | | extern unsigned int _gcry_aes_amd64_encrypt_block(const void *keysched_enc, |
56 | | unsigned char *out, |
57 | | const unsigned char *in, |
58 | | int rounds, |
59 | | const void *encT); |
60 | | |
61 | | extern unsigned int _gcry_aes_amd64_decrypt_block(const void *keysched_dec, |
62 | | unsigned char *out, |
63 | | const unsigned char *in, |
64 | | int rounds, |
65 | | const void *decT); |
66 | | #endif /*USE_AMD64_ASM*/ |
67 | | |
68 | | #ifdef USE_AESNI |
69 | | /* AES-NI (AMD64 & i386) accelerated implementations of AES */ |
70 | | extern void _gcry_aes_aesni_do_setkey(RIJNDAEL_context *ctx, const byte *key); |
71 | | extern void _gcry_aes_aesni_prepare_decryption(RIJNDAEL_context *ctx); |
72 | | |
73 | | extern unsigned int _gcry_aes_aesni_encrypt (const RIJNDAEL_context *ctx, |
74 | | unsigned char *dst, |
75 | | const unsigned char *src); |
76 | | extern unsigned int _gcry_aes_aesni_decrypt (const RIJNDAEL_context *ctx, |
77 | | unsigned char *dst, |
78 | | const unsigned char *src); |
79 | | extern void _gcry_aes_aesni_cfb_enc (void *context, unsigned char *iv, |
80 | | void *outbuf_arg, const void *inbuf_arg, |
81 | | size_t nblocks); |
82 | | extern void _gcry_aes_aesni_cbc_enc (void *context, unsigned char *iv, |
83 | | void *outbuf_arg, const void *inbuf_arg, |
84 | | size_t nblocks, int cbc_mac); |
85 | | extern void _gcry_aes_aesni_ctr_enc (void *context, unsigned char *ctr, |
86 | | void *outbuf_arg, const void *inbuf_arg, |
87 | | size_t nblocks); |
88 | | extern void _gcry_aes_aesni_ctr32le_enc (void *context, unsigned char *ctr, |
89 | | void *outbuf_arg, |
90 | | const void *inbuf_arg, size_t nblocks); |
91 | | extern void _gcry_aes_aesni_cfb_dec (void *context, unsigned char *iv, |
92 | | void *outbuf_arg, const void *inbuf_arg, |
93 | | size_t nblocks); |
94 | | extern void _gcry_aes_aesni_cbc_dec (void *context, unsigned char *iv, |
95 | | void *outbuf_arg, const void *inbuf_arg, |
96 | | size_t nblocks); |
97 | | extern size_t _gcry_aes_aesni_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg, |
98 | | const void *inbuf_arg, size_t nblocks, |
99 | | int encrypt); |
100 | | extern size_t _gcry_aes_aesni_ocb_auth (gcry_cipher_hd_t c, const void *abuf_arg, |
101 | | size_t nblocks); |
102 | | extern void _gcry_aes_aesni_xts_crypt (void *context, unsigned char *tweak, |
103 | | void *outbuf_arg, const void *inbuf_arg, |
104 | | size_t nblocks, int encrypt); |
105 | | extern void _gcry_aes_aesni_ecb_crypt (void *context, void *outbuf_arg, |
106 | | const void *inbuf_arg, size_t nblocks, |
107 | | int encrypt); |
108 | | #endif |
109 | | |
110 | | #if defined(USE_VAES_I386) || defined(USE_VAES) |
111 | | /* VAES (i386/AMD64) accelerated implementation of AES */ |
112 | | |
113 | | extern void _gcry_aes_vaes_cfb_dec (void *context, unsigned char *iv, |
114 | | void *outbuf_arg, const void *inbuf_arg, |
115 | | size_t nblocks); |
116 | | extern void _gcry_aes_vaes_cbc_dec (void *context, unsigned char *iv, |
117 | | void *outbuf_arg, const void *inbuf_arg, |
118 | | size_t nblocks); |
119 | | extern void _gcry_aes_vaes_ctr_enc (void *context, unsigned char *ctr, |
120 | | void *outbuf_arg, const void *inbuf_arg, |
121 | | size_t nblocks); |
122 | | extern void _gcry_aes_vaes_ctr32le_enc (void *context, unsigned char *ctr, |
123 | | void *outbuf_arg, const void *inbuf_arg, |
124 | | size_t nblocks); |
125 | | extern size_t _gcry_aes_vaes_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg, |
126 | | const void *inbuf_arg, size_t nblocks, |
127 | | int encrypt); |
128 | | extern size_t _gcry_aes_vaes_ocb_auth (gcry_cipher_hd_t c, |
129 | | const void *inbuf_arg, |
130 | | size_t nblocks); |
131 | | extern void _gcry_aes_vaes_xts_crypt (void *context, unsigned char *tweak, |
132 | | void *outbuf_arg, const void *inbuf_arg, |
133 | | size_t nblocks, int encrypt); |
134 | | extern void _gcry_aes_vaes_ecb_crypt (void *context, void *outbuf_arg, |
135 | | const void *inbuf_arg, size_t nblocks, |
136 | | int encrypt); |
137 | | #endif |
138 | | |
139 | | #ifdef USE_SSSE3 |
140 | | /* SSSE3 (AMD64) vector permutation implementation of AES */ |
141 | | extern void _gcry_aes_ssse3_do_setkey(RIJNDAEL_context *ctx, const byte *key); |
142 | | extern void _gcry_aes_ssse3_prepare_decryption(RIJNDAEL_context *ctx); |
143 | | |
144 | | extern unsigned int _gcry_aes_ssse3_encrypt (const RIJNDAEL_context *ctx, |
145 | | unsigned char *dst, |
146 | | const unsigned char *src); |
147 | | extern unsigned int _gcry_aes_ssse3_decrypt (const RIJNDAEL_context *ctx, |
148 | | unsigned char *dst, |
149 | | const unsigned char *src); |
150 | | extern void _gcry_aes_ssse3_cfb_enc (void *context, unsigned char *iv, |
151 | | void *outbuf_arg, const void *inbuf_arg, |
152 | | size_t nblocks); |
153 | | extern void _gcry_aes_ssse3_cbc_enc (void *context, unsigned char *iv, |
154 | | void *outbuf_arg, const void *inbuf_arg, |
155 | | size_t nblocks, |
156 | | int cbc_mac); |
157 | | extern void _gcry_aes_ssse3_ctr_enc (void *context, unsigned char *ctr, |
158 | | void *outbuf_arg, const void *inbuf_arg, |
159 | | size_t nblocks); |
160 | | extern void _gcry_aes_ssse3_cfb_dec (void *context, unsigned char *iv, |
161 | | void *outbuf_arg, const void *inbuf_arg, |
162 | | size_t nblocks); |
163 | | extern void _gcry_aes_ssse3_cbc_dec (void *context, unsigned char *iv, |
164 | | void *outbuf_arg, const void *inbuf_arg, |
165 | | size_t nblocks); |
166 | | extern size_t _gcry_aes_ssse3_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg, |
167 | | const void *inbuf_arg, size_t nblocks, |
168 | | int encrypt); |
169 | | extern size_t _gcry_aes_ssse3_ocb_auth (gcry_cipher_hd_t c, const void *abuf_arg, |
170 | | size_t nblocks); |
171 | | #endif |
172 | | |
173 | | #ifdef USE_VP_AARCH64 |
174 | | /* AArch64 vector permutation implementation of AES */ |
175 | | extern void _gcry_aes_vp_aarch64_do_setkey(RIJNDAEL_context *ctx, |
176 | | const byte *key); |
177 | | extern void _gcry_aes_vp_aarch64_prepare_decryption(RIJNDAEL_context *ctx); |
178 | | |
179 | | extern unsigned int _gcry_aes_vp_aarch64_encrypt (const RIJNDAEL_context *ctx, |
180 | | unsigned char *dst, |
181 | | const unsigned char *src); |
182 | | extern unsigned int _gcry_aes_vp_aarch64_decrypt (const RIJNDAEL_context *ctx, |
183 | | unsigned char *dst, |
184 | | const unsigned char *src); |
185 | | extern void _gcry_aes_vp_aarch64_cfb_enc (void *context, unsigned char *iv, |
186 | | void *outbuf_arg, |
187 | | const void *inbuf_arg, |
188 | | size_t nblocks); |
189 | | extern void _gcry_aes_vp_aarch64_cbc_enc (void *context, unsigned char *iv, |
190 | | void *outbuf_arg, |
191 | | const void *inbuf_arg, |
192 | | size_t nblocks, |
193 | | int cbc_mac); |
194 | | extern void _gcry_aes_vp_aarch64_ctr_enc (void *context, unsigned char *ctr, |
195 | | void *outbuf_arg, |
196 | | const void *inbuf_arg, |
197 | | size_t nblocks); |
198 | | extern void _gcry_aes_vp_aarch64_ctr32le_enc (void *context, unsigned char *ctr, |
199 | | void *outbuf_arg, |
200 | | const void *inbuf_arg, |
201 | | size_t nblocks); |
202 | | extern void _gcry_aes_vp_aarch64_cfb_dec (void *context, unsigned char *iv, |
203 | | void *outbuf_arg, |
204 | | const void *inbuf_arg, |
205 | | size_t nblocks); |
206 | | extern void _gcry_aes_vp_aarch64_cbc_dec (void *context, unsigned char *iv, |
207 | | void *outbuf_arg, |
208 | | const void *inbuf_arg, |
209 | | size_t nblocks); |
210 | | extern size_t _gcry_aes_vp_aarch64_ocb_crypt (gcry_cipher_hd_t c, |
211 | | void *outbuf_arg, |
212 | | const void *inbuf_arg, |
213 | | size_t nblocks, |
214 | | int encrypt); |
215 | | extern size_t _gcry_aes_vp_aarch64_ocb_auth (gcry_cipher_hd_t c, |
216 | | const void *abuf_arg, |
217 | | size_t nblocks); |
218 | | extern void _gcry_aes_vp_aarch64_ecb_crypt (void *context, void *outbuf_arg, |
219 | | const void *inbuf_arg, |
220 | | size_t nblocks, int encrypt); |
221 | | extern void _gcry_aes_vp_aarch64_xts_crypt (void *context, unsigned char *tweak, |
222 | | void *outbuf_arg, |
223 | | const void *inbuf_arg, |
224 | | size_t nblocks, int encrypt); |
225 | | #endif |
226 | | |
227 | | #ifdef USE_PADLOCK |
228 | | extern unsigned int _gcry_aes_padlock_encrypt (const RIJNDAEL_context *ctx, |
229 | | unsigned char *bx, |
230 | | const unsigned char *ax); |
231 | | extern unsigned int _gcry_aes_padlock_decrypt (const RIJNDAEL_context *ctx, |
232 | | unsigned char *bx, |
233 | | const unsigned char *ax); |
234 | | extern void _gcry_aes_padlock_prepare_decryption (RIJNDAEL_context *ctx); |
235 | | #endif |
236 | | |
237 | | #ifdef USE_ARM_ASM |
238 | | /* ARM assembly implementations of AES */ |
239 | | extern unsigned int _gcry_aes_arm_encrypt_block(const void *keysched_enc, |
240 | | unsigned char *out, |
241 | | const unsigned char *in, |
242 | | int rounds, |
243 | | const void *encT); |
244 | | |
245 | | extern unsigned int _gcry_aes_arm_decrypt_block(const void *keysched_dec, |
246 | | unsigned char *out, |
247 | | const unsigned char *in, |
248 | | int rounds, |
249 | | const void *decT); |
250 | | #endif /*USE_ARM_ASM*/ |
251 | | |
252 | | #ifdef USE_ARM_CE |
253 | | /* ARMv8 Crypto Extension implementations of AES */ |
254 | | extern void _gcry_aes_armv8_ce_setkey(RIJNDAEL_context *ctx, const byte *key); |
255 | | extern void _gcry_aes_armv8_ce_prepare_decryption(RIJNDAEL_context *ctx); |
256 | | |
257 | | extern unsigned int _gcry_aes_armv8_ce_encrypt(const RIJNDAEL_context *ctx, |
258 | | unsigned char *dst, |
259 | | const unsigned char *src); |
260 | | extern unsigned int _gcry_aes_armv8_ce_decrypt(const RIJNDAEL_context *ctx, |
261 | | unsigned char *dst, |
262 | | const unsigned char *src); |
263 | | |
264 | | extern void _gcry_aes_armv8_ce_cfb_enc (void *context, unsigned char *iv, |
265 | | void *outbuf_arg, const void *inbuf_arg, |
266 | | size_t nblocks); |
267 | | extern void _gcry_aes_armv8_ce_cbc_enc (void *context, unsigned char *iv, |
268 | | void *outbuf_arg, const void *inbuf_arg, |
269 | | size_t nblocks, |
270 | | int cbc_mac); |
271 | | extern void _gcry_aes_armv8_ce_ctr_enc (void *context, unsigned char *ctr, |
272 | | void *outbuf_arg, const void *inbuf_arg, |
273 | | size_t nblocks); |
274 | | extern void _gcry_aes_armv8_ce_ctr32le_enc (void *context, unsigned char *ctr, |
275 | | void *outbuf_arg, |
276 | | const void *inbuf_arg, |
277 | | size_t nblocks); |
278 | | extern void _gcry_aes_armv8_ce_cfb_dec (void *context, unsigned char *iv, |
279 | | void *outbuf_arg, const void *inbuf_arg, |
280 | | size_t nblocks); |
281 | | extern void _gcry_aes_armv8_ce_cbc_dec (void *context, unsigned char *iv, |
282 | | void *outbuf_arg, const void *inbuf_arg, |
283 | | size_t nblocks); |
284 | | extern size_t _gcry_aes_armv8_ce_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg, |
285 | | const void *inbuf_arg, size_t nblocks, |
286 | | int encrypt); |
287 | | extern size_t _gcry_aes_armv8_ce_ocb_auth (gcry_cipher_hd_t c, |
288 | | const void *abuf_arg, size_t nblocks); |
289 | | extern void _gcry_aes_armv8_ce_xts_crypt (void *context, unsigned char *tweak, |
290 | | void *outbuf_arg, |
291 | | const void *inbuf_arg, |
292 | | size_t nblocks, int encrypt); |
293 | | extern void _gcry_aes_armv8_ce_ecb_crypt (void *context, void *outbuf_arg, |
294 | | const void *inbuf_arg, size_t nblocks, |
295 | | int encrypt); |
296 | | #endif /*USE_ARM_ASM*/ |
297 | | |
298 | | #ifdef USE_PPC_CRYPTO |
299 | | /* PowerPC Crypto implementations of AES */ |
300 | | extern void _gcry_aes_ppc8_setkey(RIJNDAEL_context *ctx, const byte *key); |
301 | | extern void _gcry_aes_ppc8_prepare_decryption(RIJNDAEL_context *ctx); |
302 | | |
303 | | extern unsigned int _gcry_aes_ppc8_encrypt(const RIJNDAEL_context *ctx, |
304 | | unsigned char *dst, |
305 | | const unsigned char *src); |
306 | | extern unsigned int _gcry_aes_ppc8_decrypt(const RIJNDAEL_context *ctx, |
307 | | unsigned char *dst, |
308 | | const unsigned char *src); |
309 | | |
310 | | extern void _gcry_aes_ppc8_ecb_crypt (void *context, void *outbuf_arg, |
311 | | const void *inbuf_arg, size_t nblocks, |
312 | | int encrypt); |
313 | | |
314 | | extern void _gcry_aes_ppc8_cfb_enc (void *context, unsigned char *iv, |
315 | | void *outbuf_arg, const void *inbuf_arg, |
316 | | size_t nblocks); |
317 | | extern void _gcry_aes_ppc8_cbc_enc (void *context, unsigned char *iv, |
318 | | void *outbuf_arg, const void *inbuf_arg, |
319 | | size_t nblocks, int cbc_mac); |
320 | | extern void _gcry_aes_ppc8_ctr_enc (void *context, unsigned char *ctr, |
321 | | void *outbuf_arg, const void *inbuf_arg, |
322 | | size_t nblocks); |
323 | | extern void _gcry_aes_ppc8_cfb_dec (void *context, unsigned char *iv, |
324 | | void *outbuf_arg, const void *inbuf_arg, |
325 | | size_t nblocks); |
326 | | extern void _gcry_aes_ppc8_cbc_dec (void *context, unsigned char *iv, |
327 | | void *outbuf_arg, const void *inbuf_arg, |
328 | | size_t nblocks); |
329 | | |
330 | | extern size_t _gcry_aes_ppc8_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg, |
331 | | const void *inbuf_arg, size_t nblocks, |
332 | | int encrypt); |
333 | | extern size_t _gcry_aes_ppc8_ocb_auth (gcry_cipher_hd_t c, |
334 | | const void *abuf_arg, size_t nblocks); |
335 | | |
336 | | extern void _gcry_aes_ppc8_xts_crypt (void *context, unsigned char *tweak, |
337 | | void *outbuf_arg, |
338 | | const void *inbuf_arg, |
339 | | size_t nblocks, int encrypt); |
340 | | |
341 | | extern void _gcry_aes_ppc8_ctr32le_enc (void *context, unsigned char *ctr, |
342 | | void *outbuf_arg, const void *inbuf_arg, |
343 | | size_t nblocks); |
344 | | #endif /*USE_PPC_CRYPTO*/ |
345 | | |
346 | | #ifdef USE_PPC_CRYPTO_WITH_PPC9LE |
347 | | /* Power9 little-endian crypto implementations of AES */ |
348 | | extern unsigned int _gcry_aes_ppc9le_encrypt(const RIJNDAEL_context *ctx, |
349 | | unsigned char *dst, |
350 | | const unsigned char *src); |
351 | | extern unsigned int _gcry_aes_ppc9le_decrypt(const RIJNDAEL_context *ctx, |
352 | | unsigned char *dst, |
353 | | const unsigned char *src); |
354 | | |
355 | | extern void _gcry_aes_ppc9le_ecb_crypt (void *context, void *outbuf_arg, |
356 | | const void *inbuf_arg, size_t nblocks, |
357 | | int encrypt); |
358 | | |
359 | | extern void _gcry_aes_ppc9le_cfb_enc (void *context, unsigned char *iv, |
360 | | void *outbuf_arg, const void *inbuf_arg, |
361 | | size_t nblocks); |
362 | | extern void _gcry_aes_ppc9le_cbc_enc (void *context, unsigned char *iv, |
363 | | void *outbuf_arg, const void *inbuf_arg, |
364 | | size_t nblocks, int cbc_mac); |
365 | | extern void _gcry_aes_ppc9le_ctr_enc (void *context, unsigned char *ctr, |
366 | | void *outbuf_arg, const void *inbuf_arg, |
367 | | size_t nblocks); |
368 | | extern void _gcry_aes_ppc9le_cfb_dec (void *context, unsigned char *iv, |
369 | | void *outbuf_arg, const void *inbuf_arg, |
370 | | size_t nblocks); |
371 | | extern void _gcry_aes_ppc9le_cbc_dec (void *context, unsigned char *iv, |
372 | | void *outbuf_arg, const void *inbuf_arg, |
373 | | size_t nblocks); |
374 | | |
375 | | extern size_t _gcry_aes_ppc9le_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg, |
376 | | const void *inbuf_arg, size_t nblocks, |
377 | | int encrypt); |
378 | | extern size_t _gcry_aes_ppc9le_ocb_auth (gcry_cipher_hd_t c, |
379 | | const void *abuf_arg, size_t nblocks); |
380 | | |
381 | | extern void _gcry_aes_ppc9le_xts_crypt (void *context, unsigned char *tweak, |
382 | | void *outbuf_arg, |
383 | | const void *inbuf_arg, |
384 | | size_t nblocks, int encrypt); |
385 | | |
386 | | extern void _gcry_aes_ppc9le_ctr32le_enc (void *context, unsigned char *ctr, |
387 | | void *outbuf_arg, |
388 | | const void *inbuf_arg, |
389 | | size_t nblocks); |
390 | | |
391 | | extern size_t _gcry_aes_p10le_gcm_crypt (gcry_cipher_hd_t c, void *outbuf_arg, |
392 | | const void *inbuf_arg, |
393 | | size_t nblocks, int encrypt); |
394 | | #endif /*USE_PPC_CRYPTO_WITH_PPC9LE*/ |
395 | | |
396 | | #ifdef USE_S390X_CRYPTO |
397 | | /* zSeries crypto implementations of AES */ |
398 | | extern int _gcry_aes_s390x_setup_acceleration(RIJNDAEL_context *ctx, |
399 | | unsigned int keylen, |
400 | | unsigned int hwfeatures, |
401 | | cipher_bulk_ops_t *bulk_ops); |
402 | | extern void _gcry_aes_s390x_setkey(RIJNDAEL_context *ctx, const byte *key); |
403 | | extern void _gcry_aes_s390x_prepare_decryption(RIJNDAEL_context *ctx); |
404 | | |
405 | | extern unsigned int _gcry_aes_s390x_encrypt(const RIJNDAEL_context *ctx, |
406 | | unsigned char *dst, |
407 | | const unsigned char *src); |
408 | | extern unsigned int _gcry_aes_s390x_decrypt(const RIJNDAEL_context *ctx, |
409 | | unsigned char *dst, |
410 | | const unsigned char *src); |
411 | | |
412 | | #endif /*USE_S390X_CRYPTO*/ |
413 | | |
414 | | static unsigned int do_encrypt (const RIJNDAEL_context *ctx, unsigned char *bx, |
415 | | const unsigned char *ax); |
416 | | static unsigned int do_decrypt (const RIJNDAEL_context *ctx, unsigned char *bx, |
417 | | const unsigned char *ax); |
418 | | |
419 | | static void _gcry_aes_cfb_enc (void *context, unsigned char *iv, |
420 | | void *outbuf, const void *inbuf, |
421 | | size_t nblocks); |
422 | | static void _gcry_aes_cfb_dec (void *context, unsigned char *iv, |
423 | | void *outbuf_arg, const void *inbuf_arg, |
424 | | size_t nblocks); |
425 | | static void _gcry_aes_cbc_enc (void *context, unsigned char *iv, |
426 | | void *outbuf_arg, const void *inbuf_arg, |
427 | | size_t nblocks, int cbc_mac); |
428 | | static void _gcry_aes_cbc_dec (void *context, unsigned char *iv, |
429 | | void *outbuf_arg, const void *inbuf_arg, |
430 | | size_t nblocks); |
431 | | static void _gcry_aes_ctr_enc (void *context, unsigned char *ctr, |
432 | | void *outbuf_arg, const void *inbuf_arg, |
433 | | size_t nblocks); |
434 | | static size_t _gcry_aes_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg, |
435 | | const void *inbuf_arg, size_t nblocks, |
436 | | int encrypt); |
437 | | static size_t _gcry_aes_ocb_auth (gcry_cipher_hd_t c, const void *abuf_arg, |
438 | | size_t nblocks); |
439 | | static void _gcry_aes_xts_crypt (void *context, unsigned char *tweak, |
440 | | void *outbuf_arg, const void *inbuf_arg, |
441 | | size_t nblocks, int encrypt); |
442 | | |
443 | | |
444 | | /* All the numbers. */ |
445 | | #include "rijndael-tables.h" |
446 | | |
447 | | |
448 | | |
449 | | |
450 | | /* Function prototypes. */ |
451 | | static const char *selftest(void); |
452 | | static void prepare_decryption(RIJNDAEL_context *ctx); |
453 | | |
454 | | |
455 | | |
456 | | /* Prefetching for encryption/decryption tables. */ |
457 | | static inline void prefetch_table(const volatile byte *tab, size_t len) |
458 | 0 | { |
459 | 0 | size_t i; |
460 | |
|
461 | 0 | for (i = 0; len - i >= 8 * 32; i += 8 * 32) |
462 | 0 | { |
463 | 0 | (void)tab[i + 0 * 32]; |
464 | 0 | (void)tab[i + 1 * 32]; |
465 | 0 | (void)tab[i + 2 * 32]; |
466 | 0 | (void)tab[i + 3 * 32]; |
467 | 0 | (void)tab[i + 4 * 32]; |
468 | 0 | (void)tab[i + 5 * 32]; |
469 | 0 | (void)tab[i + 6 * 32]; |
470 | 0 | (void)tab[i + 7 * 32]; |
471 | 0 | } |
472 | 0 | for (; i < len; i += 32) |
473 | 0 | { |
474 | 0 | (void)tab[i]; |
475 | 0 | } |
476 | |
|
477 | 0 | (void)tab[len - 1]; |
478 | 0 | } |
479 | | |
480 | | static void prefetch_enc(void) |
481 | 0 | { |
482 | | /* Modify counters to trigger copy-on-write and unsharing if physical pages |
483 | | * of look-up table are shared between processes. Modifying counters also |
484 | | * causes checksums for pages to change and hint same-page merging algorithm |
485 | | * that these pages are frequently changing. */ |
486 | 0 | enc_tables.counter_head++; |
487 | 0 | enc_tables.counter_tail++; |
488 | | |
489 | | /* Prefetch look-up tables to cache. */ |
490 | 0 | prefetch_table((const void *)&enc_tables, sizeof(enc_tables)); |
491 | 0 | } |
492 | | |
493 | | static void prefetch_dec(void) |
494 | 0 | { |
495 | | /* Modify counters to trigger copy-on-write and unsharing if physical pages |
496 | | * of look-up table are shared between processes. Modifying counters also |
497 | | * causes checksums for pages to change and hint same-page merging algorithm |
498 | | * that these pages are frequently changing. */ |
499 | 0 | dec_tables.counter_head++; |
500 | 0 | dec_tables.counter_tail++; |
501 | | |
502 | | /* Prefetch look-up tables to cache. */ |
503 | 0 | prefetch_table((const void *)&dec_tables, sizeof(dec_tables)); |
504 | 0 | } |
505 | | |
506 | | |
507 | | |
508 | | static inline u32 |
509 | | sbox4(u32 inb4) |
510 | 0 | { |
511 | 0 | u32 out; |
512 | 0 | out = (encT[(inb4 >> 0) & 0xffU] & 0xff00U) >> 8; |
513 | 0 | out |= (encT[(inb4 >> 8) & 0xffU] & 0xff00U) >> 0; |
514 | 0 | out |= (encT[(inb4 >> 16) & 0xffU] & 0xff0000U) << 0; |
515 | 0 | out |= (encT[(inb4 >> 24) & 0xffU] & 0xff0000U) << 8; |
516 | 0 | return out; |
517 | 0 | } |
518 | | |
519 | | /* Perform the key setup. */ |
520 | | static gcry_err_code_t |
521 | | do_setkey (RIJNDAEL_context *ctx, const byte *key, const unsigned keylen, |
522 | | cipher_bulk_ops_t *bulk_ops) |
523 | 203 | { |
524 | 203 | static int initialized = 0; |
525 | 203 | static const char *selftest_failed = 0; |
526 | 203 | void (*hw_setkey)(RIJNDAEL_context *ctx, const byte *key) = NULL; |
527 | 203 | int rounds; |
528 | 203 | unsigned int KC; |
529 | 203 | unsigned int hwfeatures; |
530 | | |
531 | | /* The on-the-fly self tests are only run in non-fips mode. In fips |
532 | | mode explicit self-tests are required. Actually the on-the-fly |
533 | | self-tests are not fully thread-safe and it might happen that a |
534 | | failed self-test won't get noticed in another thread. |
535 | | |
536 | | FIXME: We might want to have a central registry of succeeded |
537 | | self-tests. */ |
538 | 203 | if (!fips_mode () && !initialized) |
539 | 5 | { |
540 | 5 | initialized = 1; |
541 | 5 | selftest_failed = selftest (); |
542 | 5 | if (selftest_failed) |
543 | 0 | log_error ("%s\n", selftest_failed ); |
544 | 5 | } |
545 | 203 | if (selftest_failed) |
546 | 0 | return GPG_ERR_SELFTEST_FAILED; |
547 | | |
548 | 203 | if( keylen == 128/8 ) |
549 | 60 | { |
550 | 60 | rounds = 10; |
551 | 60 | KC = 4; |
552 | 60 | } |
553 | 143 | else if ( keylen == 192/8 ) |
554 | 19 | { |
555 | 19 | rounds = 12; |
556 | 19 | KC = 6; |
557 | 19 | } |
558 | 124 | else if ( keylen == 256/8 ) |
559 | 103 | { |
560 | 103 | rounds = 14; |
561 | 103 | KC = 8; |
562 | 103 | } |
563 | 21 | else |
564 | 21 | return GPG_ERR_INV_KEYLEN; |
565 | | |
566 | 182 | ctx->rounds = rounds; |
567 | 182 | hwfeatures = _gcry_get_hw_features (); |
568 | | |
569 | 182 | ctx->decryption_prepared = 0; |
570 | | |
571 | | /* Setup default bulk encryption routines. */ |
572 | 182 | memset (bulk_ops, 0, sizeof(*bulk_ops)); |
573 | 182 | bulk_ops->cfb_enc = _gcry_aes_cfb_enc; |
574 | 182 | bulk_ops->cfb_dec = _gcry_aes_cfb_dec; |
575 | 182 | bulk_ops->cbc_enc = _gcry_aes_cbc_enc; |
576 | 182 | bulk_ops->cbc_dec = _gcry_aes_cbc_dec; |
577 | 182 | bulk_ops->ctr_enc = _gcry_aes_ctr_enc; |
578 | 182 | bulk_ops->ocb_crypt = _gcry_aes_ocb_crypt; |
579 | 182 | bulk_ops->ocb_auth = _gcry_aes_ocb_auth; |
580 | 182 | bulk_ops->xts_crypt = _gcry_aes_xts_crypt; |
581 | | |
582 | 182 | (void)hwfeatures; |
583 | | |
584 | 182 | if (0) |
585 | 0 | { |
586 | 0 | ; |
587 | 0 | } |
588 | 182 | #ifdef USE_AESNI |
589 | 182 | else if (hwfeatures & HWF_INTEL_AESNI) |
590 | 182 | { |
591 | 182 | hw_setkey = _gcry_aes_aesni_do_setkey; |
592 | 182 | ctx->encrypt_fn = _gcry_aes_aesni_encrypt; |
593 | 182 | ctx->decrypt_fn = _gcry_aes_aesni_decrypt; |
594 | 182 | ctx->prefetch_enc_fn = NULL; |
595 | 182 | ctx->prefetch_dec_fn = NULL; |
596 | 182 | ctx->prepare_decryption = _gcry_aes_aesni_prepare_decryption; |
597 | 182 | ctx->use_avx = !!(hwfeatures & HWF_INTEL_AVX); |
598 | 182 | ctx->use_avx2 = !!(hwfeatures & HWF_INTEL_AVX2); |
599 | | |
600 | | /* Setup AES-NI bulk encryption routines. */ |
601 | 182 | bulk_ops->cfb_enc = _gcry_aes_aesni_cfb_enc; |
602 | 182 | bulk_ops->cfb_dec = _gcry_aes_aesni_cfb_dec; |
603 | 182 | bulk_ops->cbc_enc = _gcry_aes_aesni_cbc_enc; |
604 | 182 | bulk_ops->cbc_dec = _gcry_aes_aesni_cbc_dec; |
605 | 182 | bulk_ops->ctr_enc = _gcry_aes_aesni_ctr_enc; |
606 | 182 | bulk_ops->ctr32le_enc = _gcry_aes_aesni_ctr32le_enc; |
607 | 182 | bulk_ops->ocb_crypt = _gcry_aes_aesni_ocb_crypt; |
608 | 182 | bulk_ops->ocb_auth = _gcry_aes_aesni_ocb_auth; |
609 | 182 | bulk_ops->xts_crypt = _gcry_aes_aesni_xts_crypt; |
610 | 182 | bulk_ops->ecb_crypt = _gcry_aes_aesni_ecb_crypt; |
611 | | |
612 | 182 | #ifdef USE_VAES |
613 | 182 | if ((hwfeatures & HWF_INTEL_VAES_VPCLMUL) && |
614 | 182 | (hwfeatures & HWF_INTEL_AVX2)) |
615 | 0 | { |
616 | | /* Setup VAES bulk encryption routines. */ |
617 | 0 | bulk_ops->cfb_dec = _gcry_aes_vaes_cfb_dec; |
618 | 0 | bulk_ops->cbc_dec = _gcry_aes_vaes_cbc_dec; |
619 | 0 | bulk_ops->ctr_enc = _gcry_aes_vaes_ctr_enc; |
620 | 0 | bulk_ops->ctr32le_enc = _gcry_aes_vaes_ctr32le_enc; |
621 | 0 | bulk_ops->ocb_crypt = _gcry_aes_vaes_ocb_crypt; |
622 | 0 | bulk_ops->ocb_auth = _gcry_aes_vaes_ocb_auth; |
623 | 0 | bulk_ops->xts_crypt = _gcry_aes_vaes_xts_crypt; |
624 | 0 | bulk_ops->ecb_crypt = _gcry_aes_vaes_ecb_crypt; |
625 | 0 | } |
626 | 182 | #endif |
627 | | #ifdef USE_VAES_I386 |
628 | | if ((hwfeatures & HWF_INTEL_VAES_VPCLMUL) && |
629 | | (hwfeatures & HWF_INTEL_AVX2)) |
630 | | { |
631 | | /* Setup VAES bulk encryption routines. */ |
632 | | bulk_ops->cfb_dec = _gcry_aes_vaes_cfb_dec; |
633 | | bulk_ops->cbc_dec = _gcry_aes_vaes_cbc_dec; |
634 | | bulk_ops->ctr_enc = _gcry_aes_vaes_ctr_enc; |
635 | | bulk_ops->ctr32le_enc = _gcry_aes_vaes_ctr32le_enc; |
636 | | bulk_ops->ocb_crypt = _gcry_aes_vaes_ocb_crypt; |
637 | | bulk_ops->ocb_auth = _gcry_aes_vaes_ocb_auth; |
638 | | bulk_ops->xts_crypt = _gcry_aes_vaes_xts_crypt; |
639 | | bulk_ops->ecb_crypt = _gcry_aes_vaes_ecb_crypt; |
640 | | } |
641 | | #endif |
642 | 182 | } |
643 | 0 | #endif |
644 | 0 | #ifdef USE_PADLOCK |
645 | 0 | else if ((hwfeatures & HWF_PADLOCK_AES) && keylen == 128/8) |
646 | 0 | { |
647 | 0 | ctx->encrypt_fn = _gcry_aes_padlock_encrypt; |
648 | 0 | ctx->decrypt_fn = _gcry_aes_padlock_decrypt; |
649 | 0 | ctx->prefetch_enc_fn = NULL; |
650 | 0 | ctx->prefetch_dec_fn = NULL; |
651 | 0 | ctx->prepare_decryption = _gcry_aes_padlock_prepare_decryption; |
652 | 0 | memcpy (ctx->padlockkey, key, keylen); |
653 | 0 | } |
654 | 0 | #endif |
655 | 0 | #ifdef USE_SSSE3 |
656 | 0 | else if (hwfeatures & HWF_INTEL_SSSE3) |
657 | 0 | { |
658 | 0 | hw_setkey = _gcry_aes_ssse3_do_setkey; |
659 | 0 | ctx->encrypt_fn = _gcry_aes_ssse3_encrypt; |
660 | 0 | ctx->decrypt_fn = _gcry_aes_ssse3_decrypt; |
661 | 0 | ctx->prefetch_enc_fn = NULL; |
662 | 0 | ctx->prefetch_dec_fn = NULL; |
663 | 0 | ctx->prepare_decryption = _gcry_aes_ssse3_prepare_decryption; |
664 | | |
665 | | /* Setup SSSE3 bulk encryption routines. */ |
666 | 0 | bulk_ops->cfb_enc = _gcry_aes_ssse3_cfb_enc; |
667 | 0 | bulk_ops->cfb_dec = _gcry_aes_ssse3_cfb_dec; |
668 | 0 | bulk_ops->cbc_enc = _gcry_aes_ssse3_cbc_enc; |
669 | 0 | bulk_ops->cbc_dec = _gcry_aes_ssse3_cbc_dec; |
670 | 0 | bulk_ops->ctr_enc = _gcry_aes_ssse3_ctr_enc; |
671 | 0 | bulk_ops->ocb_crypt = _gcry_aes_ssse3_ocb_crypt; |
672 | 0 | bulk_ops->ocb_auth = _gcry_aes_ssse3_ocb_auth; |
673 | 0 | } |
674 | 0 | #endif |
675 | | #ifdef USE_ARM_CE |
676 | | else if (hwfeatures & HWF_ARM_AES) |
677 | | { |
678 | | hw_setkey = _gcry_aes_armv8_ce_setkey; |
679 | | ctx->encrypt_fn = _gcry_aes_armv8_ce_encrypt; |
680 | | ctx->decrypt_fn = _gcry_aes_armv8_ce_decrypt; |
681 | | ctx->prefetch_enc_fn = NULL; |
682 | | ctx->prefetch_dec_fn = NULL; |
683 | | ctx->prepare_decryption = _gcry_aes_armv8_ce_prepare_decryption; |
684 | | |
685 | | /* Setup ARM-CE bulk encryption routines. */ |
686 | | bulk_ops->cfb_enc = _gcry_aes_armv8_ce_cfb_enc; |
687 | | bulk_ops->cfb_dec = _gcry_aes_armv8_ce_cfb_dec; |
688 | | bulk_ops->cbc_enc = _gcry_aes_armv8_ce_cbc_enc; |
689 | | bulk_ops->cbc_dec = _gcry_aes_armv8_ce_cbc_dec; |
690 | | bulk_ops->ctr_enc = _gcry_aes_armv8_ce_ctr_enc; |
691 | | bulk_ops->ctr32le_enc = _gcry_aes_armv8_ce_ctr32le_enc; |
692 | | bulk_ops->ocb_crypt = _gcry_aes_armv8_ce_ocb_crypt; |
693 | | bulk_ops->ocb_auth = _gcry_aes_armv8_ce_ocb_auth; |
694 | | bulk_ops->xts_crypt = _gcry_aes_armv8_ce_xts_crypt; |
695 | | bulk_ops->ecb_crypt = _gcry_aes_armv8_ce_ecb_crypt; |
696 | | } |
697 | | #endif |
698 | | #ifdef USE_VP_AARCH64 |
699 | | else if (hwfeatures & HWF_ARM_NEON) |
700 | | { |
701 | | hw_setkey = _gcry_aes_vp_aarch64_do_setkey; |
702 | | ctx->encrypt_fn = _gcry_aes_vp_aarch64_encrypt; |
703 | | ctx->decrypt_fn = _gcry_aes_vp_aarch64_decrypt; |
704 | | ctx->prefetch_enc_fn = NULL; |
705 | | ctx->prefetch_dec_fn = NULL; |
706 | | ctx->prepare_decryption = _gcry_aes_vp_aarch64_prepare_decryption; |
707 | | |
708 | | /* Setup vector permute AArch64 bulk encryption routines. */ |
709 | | bulk_ops->cfb_enc = _gcry_aes_vp_aarch64_cfb_enc; |
710 | | bulk_ops->cfb_dec = _gcry_aes_vp_aarch64_cfb_dec; |
711 | | bulk_ops->cbc_enc = _gcry_aes_vp_aarch64_cbc_enc; |
712 | | bulk_ops->cbc_dec = _gcry_aes_vp_aarch64_cbc_dec; |
713 | | bulk_ops->ctr_enc = _gcry_aes_vp_aarch64_ctr_enc; |
714 | | bulk_ops->ctr32le_enc = _gcry_aes_vp_aarch64_ctr32le_enc; |
715 | | bulk_ops->ocb_crypt = _gcry_aes_vp_aarch64_ocb_crypt; |
716 | | bulk_ops->ocb_auth = _gcry_aes_vp_aarch64_ocb_auth; |
717 | | bulk_ops->ecb_crypt = _gcry_aes_vp_aarch64_ecb_crypt; |
718 | | bulk_ops->xts_crypt = _gcry_aes_vp_aarch64_xts_crypt; |
719 | | } |
720 | | #endif |
721 | | #ifdef USE_PPC_CRYPTO_WITH_PPC9LE |
722 | | else if ((hwfeatures & HWF_PPC_VCRYPTO) && (hwfeatures & HWF_PPC_ARCH_3_00)) |
723 | | { |
724 | | hw_setkey = _gcry_aes_ppc8_setkey; |
725 | | ctx->encrypt_fn = _gcry_aes_ppc9le_encrypt; |
726 | | ctx->decrypt_fn = _gcry_aes_ppc9le_decrypt; |
727 | | ctx->prefetch_enc_fn = NULL; |
728 | | ctx->prefetch_dec_fn = NULL; |
729 | | ctx->prepare_decryption = _gcry_aes_ppc8_prepare_decryption; |
730 | | |
731 | | /* Setup PPC9LE bulk encryption routines. */ |
732 | | bulk_ops->ecb_crypt = _gcry_aes_ppc9le_ecb_crypt; |
733 | | bulk_ops->cfb_enc = _gcry_aes_ppc9le_cfb_enc; |
734 | | bulk_ops->cfb_dec = _gcry_aes_ppc9le_cfb_dec; |
735 | | bulk_ops->cbc_enc = _gcry_aes_ppc9le_cbc_enc; |
736 | | bulk_ops->cbc_dec = _gcry_aes_ppc9le_cbc_dec; |
737 | | bulk_ops->ctr_enc = _gcry_aes_ppc9le_ctr_enc; |
738 | | bulk_ops->ocb_crypt = _gcry_aes_ppc9le_ocb_crypt; |
739 | | bulk_ops->ocb_auth = _gcry_aes_ppc9le_ocb_auth; |
740 | | bulk_ops->xts_crypt = _gcry_aes_ppc9le_xts_crypt; |
741 | | bulk_ops->ctr32le_enc = _gcry_aes_ppc9le_ctr32le_enc; |
742 | | if (hwfeatures & HWF_PPC_ARCH_3_10) /* for P10 */ |
743 | | bulk_ops->gcm_crypt = _gcry_aes_p10le_gcm_crypt; |
744 | | # ifdef ENABLE_FORCE_SOFT_HWFEATURES |
745 | | /* HWF_PPC_ARCH_3_10 above is used as soft HW-feature indicator for P10. |
746 | | * Actual implementation works with HWF_PPC_ARCH_3_00 also. */ |
747 | | if (hwfeatures & HWF_PPC_ARCH_3_00) |
748 | | bulk_ops->gcm_crypt = _gcry_aes_p10le_gcm_crypt; |
749 | | # endif |
750 | | } |
751 | | #endif |
752 | | #ifdef USE_PPC_CRYPTO |
753 | | else if (hwfeatures & HWF_PPC_VCRYPTO) |
754 | | { |
755 | | hw_setkey = _gcry_aes_ppc8_setkey; |
756 | | ctx->encrypt_fn = _gcry_aes_ppc8_encrypt; |
757 | | ctx->decrypt_fn = _gcry_aes_ppc8_decrypt; |
758 | | ctx->prefetch_enc_fn = NULL; |
759 | | ctx->prefetch_dec_fn = NULL; |
760 | | ctx->prepare_decryption = _gcry_aes_ppc8_prepare_decryption; |
761 | | |
762 | | /* Setup PPC8 bulk encryption routines. */ |
763 | | bulk_ops->ecb_crypt = _gcry_aes_ppc8_ecb_crypt; |
764 | | bulk_ops->cfb_enc = _gcry_aes_ppc8_cfb_enc; |
765 | | bulk_ops->cfb_dec = _gcry_aes_ppc8_cfb_dec; |
766 | | bulk_ops->cbc_enc = _gcry_aes_ppc8_cbc_enc; |
767 | | bulk_ops->cbc_dec = _gcry_aes_ppc8_cbc_dec; |
768 | | bulk_ops->ctr_enc = _gcry_aes_ppc8_ctr_enc; |
769 | | bulk_ops->ocb_crypt = _gcry_aes_ppc8_ocb_crypt; |
770 | | bulk_ops->ocb_auth = _gcry_aes_ppc8_ocb_auth; |
771 | | bulk_ops->xts_crypt = _gcry_aes_ppc8_xts_crypt; |
772 | | bulk_ops->ctr32le_enc = _gcry_aes_ppc8_ctr32le_enc; |
773 | | } |
774 | | #endif |
775 | | #ifdef USE_S390X_CRYPTO |
776 | | else if (_gcry_aes_s390x_setup_acceleration (ctx, keylen, hwfeatures, |
777 | | bulk_ops)) |
778 | | { |
779 | | hw_setkey = _gcry_aes_s390x_setkey; |
780 | | ctx->encrypt_fn = _gcry_aes_s390x_encrypt; |
781 | | ctx->decrypt_fn = _gcry_aes_s390x_decrypt; |
782 | | ctx->prefetch_enc_fn = NULL; |
783 | | ctx->prefetch_dec_fn = NULL; |
784 | | ctx->prepare_decryption = _gcry_aes_s390x_prepare_decryption; |
785 | | } |
786 | | #endif |
787 | 0 | else |
788 | 0 | { |
789 | 0 | ctx->encrypt_fn = do_encrypt; |
790 | 0 | ctx->decrypt_fn = do_decrypt; |
791 | 0 | ctx->prefetch_enc_fn = prefetch_enc; |
792 | 0 | ctx->prefetch_dec_fn = prefetch_dec; |
793 | 0 | ctx->prepare_decryption = prepare_decryption; |
794 | 0 | } |
795 | | |
796 | | /* NB: We don't yet support Padlock hardware key generation. */ |
797 | | |
798 | 182 | if (hw_setkey) |
799 | 182 | { |
800 | 182 | hw_setkey (ctx, key); |
801 | 182 | } |
802 | 0 | else |
803 | 0 | { |
804 | 0 | u32 W_prev; |
805 | 0 | u32 *W_u32 = ctx->keyschenc32b; |
806 | 0 | byte rcon = 1; |
807 | 0 | unsigned int i, j; |
808 | |
|
809 | 0 | prefetch_enc(); |
810 | |
|
811 | 0 | for (i = 0; i < KC; i += 2) |
812 | 0 | { |
813 | 0 | W_u32[i + 0] = buf_get_le32(key + i * 4 + 0); |
814 | 0 | W_u32[i + 1] = buf_get_le32(key + i * 4 + 4); |
815 | 0 | } |
816 | |
|
817 | 0 | for (i = KC, j = KC, W_prev = W_u32[KC - 1]; |
818 | 0 | i < 4 * (rounds + 1); |
819 | 0 | i += 2, j += 2) |
820 | 0 | { |
821 | 0 | u32 temp0 = W_prev; |
822 | 0 | u32 temp1; |
823 | |
|
824 | 0 | if (j == KC) |
825 | 0 | { |
826 | 0 | j = 0; |
827 | 0 | temp0 = sbox4(rol(temp0, 24)) ^ rcon; |
828 | 0 | rcon = ((rcon << 1) ^ (-(rcon >> 7) & 0x1b)) & 0xff; |
829 | 0 | } |
830 | 0 | else if (KC == 8 && j == 4) |
831 | 0 | { |
832 | 0 | temp0 = sbox4(temp0); |
833 | 0 | } |
834 | |
|
835 | 0 | temp1 = W_u32[i - KC + 0]; |
836 | |
|
837 | 0 | W_u32[i + 0] = temp0 ^ temp1; |
838 | 0 | W_u32[i + 1] = W_u32[i - KC + 1] ^ temp0 ^ temp1; |
839 | 0 | W_prev = W_u32[i + 1]; |
840 | 0 | } |
841 | 0 | } |
842 | | |
843 | 182 | return 0; |
844 | 203 | } |
845 | | |
846 | | |
847 | | static gcry_err_code_t |
848 | | rijndael_setkey (void *context, const byte *key, const unsigned keylen, |
849 | | cipher_bulk_ops_t *bulk_ops) |
850 | 203 | { |
851 | 203 | RIJNDAEL_context *ctx = context; |
852 | 203 | return do_setkey (ctx, key, keylen, bulk_ops); |
853 | 203 | } |
854 | | |
855 | | |
856 | | /* Make a decryption key from an encryption key. */ |
857 | | static void |
858 | | prepare_decryption( RIJNDAEL_context *ctx ) |
859 | 0 | { |
860 | 0 | const byte *sbox = ((const byte *)encT) + 1; |
861 | 0 | int r; |
862 | |
|
863 | 0 | prefetch_enc(); |
864 | 0 | prefetch_dec(); |
865 | |
|
866 | 0 | ctx->keyschdec32[0][0] = ctx->keyschenc32[0][0]; |
867 | 0 | ctx->keyschdec32[0][1] = ctx->keyschenc32[0][1]; |
868 | 0 | ctx->keyschdec32[0][2] = ctx->keyschenc32[0][2]; |
869 | 0 | ctx->keyschdec32[0][3] = ctx->keyschenc32[0][3]; |
870 | |
|
871 | 0 | for (r = 1; r < ctx->rounds; r++) |
872 | 0 | { |
873 | 0 | u32 *wi = ctx->keyschenc32[r]; |
874 | 0 | u32 *wo = ctx->keyschdec32[r]; |
875 | 0 | u32 wt; |
876 | |
|
877 | 0 | wt = wi[0]; |
878 | 0 | wo[0] = rol(decT[sbox[(byte)(wt >> 0) * 4]], 8 * 0) |
879 | 0 | ^ rol(decT[sbox[(byte)(wt >> 8) * 4]], 8 * 1) |
880 | 0 | ^ rol(decT[sbox[(byte)(wt >> 16) * 4]], 8 * 2) |
881 | 0 | ^ rol(decT[sbox[(byte)(wt >> 24) * 4]], 8 * 3); |
882 | |
|
883 | 0 | wt = wi[1]; |
884 | 0 | wo[1] = rol(decT[sbox[(byte)(wt >> 0) * 4]], 8 * 0) |
885 | 0 | ^ rol(decT[sbox[(byte)(wt >> 8) * 4]], 8 * 1) |
886 | 0 | ^ rol(decT[sbox[(byte)(wt >> 16) * 4]], 8 * 2) |
887 | 0 | ^ rol(decT[sbox[(byte)(wt >> 24) * 4]], 8 * 3); |
888 | |
|
889 | 0 | wt = wi[2]; |
890 | 0 | wo[2] = rol(decT[sbox[(byte)(wt >> 0) * 4]], 8 * 0) |
891 | 0 | ^ rol(decT[sbox[(byte)(wt >> 8) * 4]], 8 * 1) |
892 | 0 | ^ rol(decT[sbox[(byte)(wt >> 16) * 4]], 8 * 2) |
893 | 0 | ^ rol(decT[sbox[(byte)(wt >> 24) * 4]], 8 * 3); |
894 | |
|
895 | 0 | wt = wi[3]; |
896 | 0 | wo[3] = rol(decT[sbox[(byte)(wt >> 0) * 4]], 8 * 0) |
897 | 0 | ^ rol(decT[sbox[(byte)(wt >> 8) * 4]], 8 * 1) |
898 | 0 | ^ rol(decT[sbox[(byte)(wt >> 16) * 4]], 8 * 2) |
899 | 0 | ^ rol(decT[sbox[(byte)(wt >> 24) * 4]], 8 * 3); |
900 | 0 | } |
901 | |
|
902 | 0 | ctx->keyschdec32[r][0] = ctx->keyschenc32[r][0]; |
903 | 0 | ctx->keyschdec32[r][1] = ctx->keyschenc32[r][1]; |
904 | 0 | ctx->keyschdec32[r][2] = ctx->keyschenc32[r][2]; |
905 | 0 | ctx->keyschdec32[r][3] = ctx->keyschenc32[r][3]; |
906 | 0 | } |
907 | | |
908 | | |
909 | | #if !defined(USE_ARM_ASM) && !defined(USE_AMD64_ASM) |
910 | | /* Encrypt one block. A and B may be the same. */ |
911 | | static unsigned int |
912 | | do_encrypt_fn (const RIJNDAEL_context *ctx, unsigned char *b, |
913 | | const unsigned char *a) |
914 | | { |
915 | | #define rk (ctx->keyschenc32) |
916 | | const byte *sbox = ((const byte *)encT) + 1; |
917 | | int rounds = ctx->rounds; |
918 | | int r; |
919 | | u32 sa[4]; |
920 | | u32 sb[4]; |
921 | | |
922 | | sb[0] = buf_get_le32(a + 0); |
923 | | sb[1] = buf_get_le32(a + 4); |
924 | | sb[2] = buf_get_le32(a + 8); |
925 | | sb[3] = buf_get_le32(a + 12); |
926 | | |
927 | | sa[0] = sb[0] ^ rk[0][0]; |
928 | | sa[1] = sb[1] ^ rk[0][1]; |
929 | | sa[2] = sb[2] ^ rk[0][2]; |
930 | | sa[3] = sb[3] ^ rk[0][3]; |
931 | | |
932 | | sb[0] = rol(encT[(byte)(sa[0] >> (0 * 8))], (0 * 8)); |
933 | | sb[3] = rol(encT[(byte)(sa[0] >> (1 * 8))], (1 * 8)); |
934 | | sb[2] = rol(encT[(byte)(sa[0] >> (2 * 8))], (2 * 8)); |
935 | | sb[1] = rol(encT[(byte)(sa[0] >> (3 * 8))], (3 * 8)); |
936 | | sa[0] = rk[1][0] ^ sb[0]; |
937 | | |
938 | | sb[1] ^= rol(encT[(byte)(sa[1] >> (0 * 8))], (0 * 8)); |
939 | | sa[0] ^= rol(encT[(byte)(sa[1] >> (1 * 8))], (1 * 8)); |
940 | | sb[3] ^= rol(encT[(byte)(sa[1] >> (2 * 8))], (2 * 8)); |
941 | | sb[2] ^= rol(encT[(byte)(sa[1] >> (3 * 8))], (3 * 8)); |
942 | | sa[1] = rk[1][1] ^ sb[1]; |
943 | | |
944 | | sb[2] ^= rol(encT[(byte)(sa[2] >> (0 * 8))], (0 * 8)); |
945 | | sa[1] ^= rol(encT[(byte)(sa[2] >> (1 * 8))], (1 * 8)); |
946 | | sa[0] ^= rol(encT[(byte)(sa[2] >> (2 * 8))], (2 * 8)); |
947 | | sb[3] ^= rol(encT[(byte)(sa[2] >> (3 * 8))], (3 * 8)); |
948 | | sa[2] = rk[1][2] ^ sb[2]; |
949 | | |
950 | | sb[3] ^= rol(encT[(byte)(sa[3] >> (0 * 8))], (0 * 8)); |
951 | | sa[2] ^= rol(encT[(byte)(sa[3] >> (1 * 8))], (1 * 8)); |
952 | | sa[1] ^= rol(encT[(byte)(sa[3] >> (2 * 8))], (2 * 8)); |
953 | | sa[0] ^= rol(encT[(byte)(sa[3] >> (3 * 8))], (3 * 8)); |
954 | | sa[3] = rk[1][3] ^ sb[3]; |
955 | | |
956 | | for (r = 2; r < rounds; r++) |
957 | | { |
958 | | sb[0] = rol(encT[(byte)(sa[0] >> (0 * 8))], (0 * 8)); |
959 | | sb[3] = rol(encT[(byte)(sa[0] >> (1 * 8))], (1 * 8)); |
960 | | sb[2] = rol(encT[(byte)(sa[0] >> (2 * 8))], (2 * 8)); |
961 | | sb[1] = rol(encT[(byte)(sa[0] >> (3 * 8))], (3 * 8)); |
962 | | sa[0] = rk[r][0] ^ sb[0]; |
963 | | |
964 | | sb[1] ^= rol(encT[(byte)(sa[1] >> (0 * 8))], (0 * 8)); |
965 | | sa[0] ^= rol(encT[(byte)(sa[1] >> (1 * 8))], (1 * 8)); |
966 | | sb[3] ^= rol(encT[(byte)(sa[1] >> (2 * 8))], (2 * 8)); |
967 | | sb[2] ^= rol(encT[(byte)(sa[1] >> (3 * 8))], (3 * 8)); |
968 | | sa[1] = rk[r][1] ^ sb[1]; |
969 | | |
970 | | sb[2] ^= rol(encT[(byte)(sa[2] >> (0 * 8))], (0 * 8)); |
971 | | sa[1] ^= rol(encT[(byte)(sa[2] >> (1 * 8))], (1 * 8)); |
972 | | sa[0] ^= rol(encT[(byte)(sa[2] >> (2 * 8))], (2 * 8)); |
973 | | sb[3] ^= rol(encT[(byte)(sa[2] >> (3 * 8))], (3 * 8)); |
974 | | sa[2] = rk[r][2] ^ sb[2]; |
975 | | |
976 | | sb[3] ^= rol(encT[(byte)(sa[3] >> (0 * 8))], (0 * 8)); |
977 | | sa[2] ^= rol(encT[(byte)(sa[3] >> (1 * 8))], (1 * 8)); |
978 | | sa[1] ^= rol(encT[(byte)(sa[3] >> (2 * 8))], (2 * 8)); |
979 | | sa[0] ^= rol(encT[(byte)(sa[3] >> (3 * 8))], (3 * 8)); |
980 | | sa[3] = rk[r][3] ^ sb[3]; |
981 | | |
982 | | r++; |
983 | | |
984 | | sb[0] = rol(encT[(byte)(sa[0] >> (0 * 8))], (0 * 8)); |
985 | | sb[3] = rol(encT[(byte)(sa[0] >> (1 * 8))], (1 * 8)); |
986 | | sb[2] = rol(encT[(byte)(sa[0] >> (2 * 8))], (2 * 8)); |
987 | | sb[1] = rol(encT[(byte)(sa[0] >> (3 * 8))], (3 * 8)); |
988 | | sa[0] = rk[r][0] ^ sb[0]; |
989 | | |
990 | | sb[1] ^= rol(encT[(byte)(sa[1] >> (0 * 8))], (0 * 8)); |
991 | | sa[0] ^= rol(encT[(byte)(sa[1] >> (1 * 8))], (1 * 8)); |
992 | | sb[3] ^= rol(encT[(byte)(sa[1] >> (2 * 8))], (2 * 8)); |
993 | | sb[2] ^= rol(encT[(byte)(sa[1] >> (3 * 8))], (3 * 8)); |
994 | | sa[1] = rk[r][1] ^ sb[1]; |
995 | | |
996 | | sb[2] ^= rol(encT[(byte)(sa[2] >> (0 * 8))], (0 * 8)); |
997 | | sa[1] ^= rol(encT[(byte)(sa[2] >> (1 * 8))], (1 * 8)); |
998 | | sa[0] ^= rol(encT[(byte)(sa[2] >> (2 * 8))], (2 * 8)); |
999 | | sb[3] ^= rol(encT[(byte)(sa[2] >> (3 * 8))], (3 * 8)); |
1000 | | sa[2] = rk[r][2] ^ sb[2]; |
1001 | | |
1002 | | sb[3] ^= rol(encT[(byte)(sa[3] >> (0 * 8))], (0 * 8)); |
1003 | | sa[2] ^= rol(encT[(byte)(sa[3] >> (1 * 8))], (1 * 8)); |
1004 | | sa[1] ^= rol(encT[(byte)(sa[3] >> (2 * 8))], (2 * 8)); |
1005 | | sa[0] ^= rol(encT[(byte)(sa[3] >> (3 * 8))], (3 * 8)); |
1006 | | sa[3] = rk[r][3] ^ sb[3]; |
1007 | | } |
1008 | | |
1009 | | /* Last round is special. */ |
1010 | | |
1011 | | sb[0] = ((u32)sbox[(byte)(sa[0] >> (0 * 8)) * 4]) << (0 * 8); |
1012 | | sb[3] = ((u32)sbox[(byte)(sa[0] >> (1 * 8)) * 4]) << (1 * 8); |
1013 | | sb[2] = ((u32)sbox[(byte)(sa[0] >> (2 * 8)) * 4]) << (2 * 8); |
1014 | | sb[1] = ((u32)sbox[(byte)(sa[0] >> (3 * 8)) * 4]) << (3 * 8); |
1015 | | sa[0] = rk[r][0] ^ sb[0]; |
1016 | | |
1017 | | sb[1] ^= ((u32)sbox[(byte)(sa[1] >> (0 * 8)) * 4]) << (0 * 8); |
1018 | | sa[0] ^= ((u32)sbox[(byte)(sa[1] >> (1 * 8)) * 4]) << (1 * 8); |
1019 | | sb[3] ^= ((u32)sbox[(byte)(sa[1] >> (2 * 8)) * 4]) << (2 * 8); |
1020 | | sb[2] ^= ((u32)sbox[(byte)(sa[1] >> (3 * 8)) * 4]) << (3 * 8); |
1021 | | sa[1] = rk[r][1] ^ sb[1]; |
1022 | | |
1023 | | sb[2] ^= ((u32)sbox[(byte)(sa[2] >> (0 * 8)) * 4]) << (0 * 8); |
1024 | | sa[1] ^= ((u32)sbox[(byte)(sa[2] >> (1 * 8)) * 4]) << (1 * 8); |
1025 | | sa[0] ^= ((u32)sbox[(byte)(sa[2] >> (2 * 8)) * 4]) << (2 * 8); |
1026 | | sb[3] ^= ((u32)sbox[(byte)(sa[2] >> (3 * 8)) * 4]) << (3 * 8); |
1027 | | sa[2] = rk[r][2] ^ sb[2]; |
1028 | | |
1029 | | sb[3] ^= ((u32)sbox[(byte)(sa[3] >> (0 * 8)) * 4]) << (0 * 8); |
1030 | | sa[2] ^= ((u32)sbox[(byte)(sa[3] >> (1 * 8)) * 4]) << (1 * 8); |
1031 | | sa[1] ^= ((u32)sbox[(byte)(sa[3] >> (2 * 8)) * 4]) << (2 * 8); |
1032 | | sa[0] ^= ((u32)sbox[(byte)(sa[3] >> (3 * 8)) * 4]) << (3 * 8); |
1033 | | sa[3] = rk[r][3] ^ sb[3]; |
1034 | | |
1035 | | buf_put_le32(b + 0, sa[0]); |
1036 | | buf_put_le32(b + 4, sa[1]); |
1037 | | buf_put_le32(b + 8, sa[2]); |
1038 | | buf_put_le32(b + 12, sa[3]); |
1039 | | #undef rk |
1040 | | |
1041 | | return (56 + 2*sizeof(int)); |
1042 | | } |
1043 | | #endif /*!USE_ARM_ASM && !USE_AMD64_ASM*/ |
1044 | | |
1045 | | |
1046 | | static unsigned int |
1047 | | do_encrypt (const RIJNDAEL_context *ctx, |
1048 | | unsigned char *bx, const unsigned char *ax) |
1049 | 0 | { |
1050 | 0 | #ifdef USE_AMD64_ASM |
1051 | 0 | return _gcry_aes_amd64_encrypt_block(ctx->keyschenc, bx, ax, ctx->rounds, |
1052 | 0 | enc_tables.T); |
1053 | | #elif defined(USE_ARM_ASM) |
1054 | | return _gcry_aes_arm_encrypt_block(ctx->keyschenc, bx, ax, ctx->rounds, |
1055 | | enc_tables.T); |
1056 | | #else |
1057 | | return do_encrypt_fn (ctx, bx, ax); |
1058 | | #endif /* !USE_ARM_ASM && !USE_AMD64_ASM*/ |
1059 | 0 | } |
1060 | | |
1061 | | |
1062 | | static unsigned int |
1063 | | rijndael_encrypt (void *context, byte *b, const byte *a) |
1064 | 403 | { |
1065 | 403 | RIJNDAEL_context *ctx = context; |
1066 | | |
1067 | 403 | if (ctx->prefetch_enc_fn) |
1068 | 0 | ctx->prefetch_enc_fn(); |
1069 | | |
1070 | 403 | return ctx->encrypt_fn (ctx, b, a); |
1071 | 403 | } |
1072 | | |
1073 | | |
1074 | | /* Bulk encryption of complete blocks in CFB mode. Caller needs to |
1075 | | make sure that IV is aligned on an unsigned long boundary. This |
1076 | | function is only intended for the bulk encryption feature of |
1077 | | cipher.c. */ |
1078 | | static void |
1079 | | _gcry_aes_cfb_enc (void *context, unsigned char *iv, |
1080 | | void *outbuf_arg, const void *inbuf_arg, |
1081 | | size_t nblocks) |
1082 | 0 | { |
1083 | 0 | RIJNDAEL_context *ctx = context; |
1084 | 0 | unsigned char *outbuf = outbuf_arg; |
1085 | 0 | const unsigned char *inbuf = inbuf_arg; |
1086 | 0 | unsigned int burn_depth = 0; |
1087 | 0 | rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn; |
1088 | |
|
1089 | 0 | if (ctx->prefetch_enc_fn) |
1090 | 0 | ctx->prefetch_enc_fn(); |
1091 | |
|
1092 | 0 | for ( ;nblocks; nblocks-- ) |
1093 | 0 | { |
1094 | | /* Encrypt the IV. */ |
1095 | 0 | burn_depth = encrypt_fn (ctx, iv, iv); |
1096 | | /* XOR the input with the IV and store input into IV. */ |
1097 | 0 | cipher_block_xor_2dst(outbuf, iv, inbuf, BLOCKSIZE); |
1098 | 0 | outbuf += BLOCKSIZE; |
1099 | 0 | inbuf += BLOCKSIZE; |
1100 | 0 | } |
1101 | |
|
1102 | 0 | if (burn_depth) |
1103 | 0 | _gcry_burn_stack (burn_depth + 4 * sizeof(void *)); |
1104 | 0 | } |
1105 | | |
1106 | | |
1107 | | /* Bulk encryption of complete blocks in CBC mode. Caller needs to |
1108 | | make sure that IV is aligned on an unsigned long boundary. This |
1109 | | function is only intended for the bulk encryption feature of |
1110 | | cipher.c. */ |
1111 | | static void |
1112 | | _gcry_aes_cbc_enc (void *context, unsigned char *iv, |
1113 | | void *outbuf_arg, const void *inbuf_arg, |
1114 | | size_t nblocks, int cbc_mac) |
1115 | 0 | { |
1116 | 0 | RIJNDAEL_context *ctx = context; |
1117 | 0 | unsigned char *outbuf = outbuf_arg; |
1118 | 0 | const unsigned char *inbuf = inbuf_arg; |
1119 | 0 | unsigned char *last_iv; |
1120 | 0 | unsigned int burn_depth = 0; |
1121 | 0 | rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn; |
1122 | |
|
1123 | 0 | if (ctx->prefetch_enc_fn) |
1124 | 0 | ctx->prefetch_enc_fn(); |
1125 | |
|
1126 | 0 | last_iv = iv; |
1127 | |
|
1128 | 0 | for ( ;nblocks; nblocks-- ) |
1129 | 0 | { |
1130 | 0 | cipher_block_xor(outbuf, inbuf, last_iv, BLOCKSIZE); |
1131 | |
|
1132 | 0 | burn_depth = encrypt_fn (ctx, outbuf, outbuf); |
1133 | |
|
1134 | 0 | last_iv = outbuf; |
1135 | 0 | inbuf += BLOCKSIZE; |
1136 | 0 | if (!cbc_mac) |
1137 | 0 | outbuf += BLOCKSIZE; |
1138 | 0 | } |
1139 | |
|
1140 | 0 | if (last_iv != iv) |
1141 | 0 | cipher_block_cpy (iv, last_iv, BLOCKSIZE); |
1142 | |
|
1143 | 0 | if (burn_depth) |
1144 | 0 | _gcry_burn_stack (burn_depth + 4 * sizeof(void *)); |
1145 | 0 | } |
1146 | | |
1147 | | |
1148 | | /* Bulk encryption of complete blocks in CTR mode. Caller needs to |
1149 | | make sure that CTR is aligned on a 16 byte boundary if AESNI; the |
1150 | | minimum alignment is for an u32. This function is only intended |
1151 | | for the bulk encryption feature of cipher.c. CTR is expected to be |
1152 | | of size BLOCKSIZE. */ |
1153 | | static void |
1154 | | _gcry_aes_ctr_enc (void *context, unsigned char *ctr, |
1155 | | void *outbuf_arg, const void *inbuf_arg, |
1156 | | size_t nblocks) |
1157 | 0 | { |
1158 | 0 | RIJNDAEL_context *ctx = context; |
1159 | 0 | unsigned char *outbuf = outbuf_arg; |
1160 | 0 | const unsigned char *inbuf = inbuf_arg; |
1161 | 0 | unsigned int burn_depth = 0; |
1162 | 0 | union { unsigned char x1[16] ATTR_ALIGNED_16; u32 x32[4]; } tmp; |
1163 | 0 | rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn; |
1164 | |
|
1165 | 0 | if (ctx->prefetch_enc_fn) |
1166 | 0 | ctx->prefetch_enc_fn(); |
1167 | |
|
1168 | 0 | for ( ;nblocks; nblocks-- ) |
1169 | 0 | { |
1170 | | /* Encrypt the counter. */ |
1171 | 0 | burn_depth = encrypt_fn (ctx, tmp.x1, ctr); |
1172 | | /* XOR the input with the encrypted counter and store in output. */ |
1173 | 0 | cipher_block_xor(outbuf, tmp.x1, inbuf, BLOCKSIZE); |
1174 | 0 | outbuf += BLOCKSIZE; |
1175 | 0 | inbuf += BLOCKSIZE; |
1176 | | /* Increment the counter. */ |
1177 | 0 | cipher_block_add(ctr, 1, BLOCKSIZE); |
1178 | 0 | } |
1179 | |
|
1180 | 0 | wipememory(&tmp, sizeof(tmp)); |
1181 | |
|
1182 | 0 | if (burn_depth) |
1183 | 0 | _gcry_burn_stack (burn_depth + 4 * sizeof(void *)); |
1184 | 0 | } |
1185 | | |
1186 | | |
1187 | | |
1188 | | #if !defined(USE_ARM_ASM) && !defined(USE_AMD64_ASM) |
1189 | | /* Decrypt one block. A and B may be the same. */ |
1190 | | static unsigned int |
1191 | | do_decrypt_fn (const RIJNDAEL_context *ctx, unsigned char *b, |
1192 | | const unsigned char *a) |
1193 | | { |
1194 | | #define rk (ctx->keyschdec32) |
1195 | | int rounds = ctx->rounds; |
1196 | | int r; |
1197 | | u32 sa[4]; |
1198 | | u32 sb[4]; |
1199 | | |
1200 | | sb[0] = buf_get_le32(a + 0); |
1201 | | sb[1] = buf_get_le32(a + 4); |
1202 | | sb[2] = buf_get_le32(a + 8); |
1203 | | sb[3] = buf_get_le32(a + 12); |
1204 | | |
1205 | | sa[0] = sb[0] ^ rk[rounds][0]; |
1206 | | sa[1] = sb[1] ^ rk[rounds][1]; |
1207 | | sa[2] = sb[2] ^ rk[rounds][2]; |
1208 | | sa[3] = sb[3] ^ rk[rounds][3]; |
1209 | | |
1210 | | for (r = rounds - 1; r > 1; r--) |
1211 | | { |
1212 | | sb[0] = rol(decT[(byte)(sa[0] >> (0 * 8))], (0 * 8)); |
1213 | | sb[1] = rol(decT[(byte)(sa[0] >> (1 * 8))], (1 * 8)); |
1214 | | sb[2] = rol(decT[(byte)(sa[0] >> (2 * 8))], (2 * 8)); |
1215 | | sb[3] = rol(decT[(byte)(sa[0] >> (3 * 8))], (3 * 8)); |
1216 | | sa[0] = rk[r][0] ^ sb[0]; |
1217 | | |
1218 | | sb[1] ^= rol(decT[(byte)(sa[1] >> (0 * 8))], (0 * 8)); |
1219 | | sb[2] ^= rol(decT[(byte)(sa[1] >> (1 * 8))], (1 * 8)); |
1220 | | sb[3] ^= rol(decT[(byte)(sa[1] >> (2 * 8))], (2 * 8)); |
1221 | | sa[0] ^= rol(decT[(byte)(sa[1] >> (3 * 8))], (3 * 8)); |
1222 | | sa[1] = rk[r][1] ^ sb[1]; |
1223 | | |
1224 | | sb[2] ^= rol(decT[(byte)(sa[2] >> (0 * 8))], (0 * 8)); |
1225 | | sb[3] ^= rol(decT[(byte)(sa[2] >> (1 * 8))], (1 * 8)); |
1226 | | sa[0] ^= rol(decT[(byte)(sa[2] >> (2 * 8))], (2 * 8)); |
1227 | | sa[1] ^= rol(decT[(byte)(sa[2] >> (3 * 8))], (3 * 8)); |
1228 | | sa[2] = rk[r][2] ^ sb[2]; |
1229 | | |
1230 | | sb[3] ^= rol(decT[(byte)(sa[3] >> (0 * 8))], (0 * 8)); |
1231 | | sa[0] ^= rol(decT[(byte)(sa[3] >> (1 * 8))], (1 * 8)); |
1232 | | sa[1] ^= rol(decT[(byte)(sa[3] >> (2 * 8))], (2 * 8)); |
1233 | | sa[2] ^= rol(decT[(byte)(sa[3] >> (3 * 8))], (3 * 8)); |
1234 | | sa[3] = rk[r][3] ^ sb[3]; |
1235 | | |
1236 | | r--; |
1237 | | |
1238 | | sb[0] = rol(decT[(byte)(sa[0] >> (0 * 8))], (0 * 8)); |
1239 | | sb[1] = rol(decT[(byte)(sa[0] >> (1 * 8))], (1 * 8)); |
1240 | | sb[2] = rol(decT[(byte)(sa[0] >> (2 * 8))], (2 * 8)); |
1241 | | sb[3] = rol(decT[(byte)(sa[0] >> (3 * 8))], (3 * 8)); |
1242 | | sa[0] = rk[r][0] ^ sb[0]; |
1243 | | |
1244 | | sb[1] ^= rol(decT[(byte)(sa[1] >> (0 * 8))], (0 * 8)); |
1245 | | sb[2] ^= rol(decT[(byte)(sa[1] >> (1 * 8))], (1 * 8)); |
1246 | | sb[3] ^= rol(decT[(byte)(sa[1] >> (2 * 8))], (2 * 8)); |
1247 | | sa[0] ^= rol(decT[(byte)(sa[1] >> (3 * 8))], (3 * 8)); |
1248 | | sa[1] = rk[r][1] ^ sb[1]; |
1249 | | |
1250 | | sb[2] ^= rol(decT[(byte)(sa[2] >> (0 * 8))], (0 * 8)); |
1251 | | sb[3] ^= rol(decT[(byte)(sa[2] >> (1 * 8))], (1 * 8)); |
1252 | | sa[0] ^= rol(decT[(byte)(sa[2] >> (2 * 8))], (2 * 8)); |
1253 | | sa[1] ^= rol(decT[(byte)(sa[2] >> (3 * 8))], (3 * 8)); |
1254 | | sa[2] = rk[r][2] ^ sb[2]; |
1255 | | |
1256 | | sb[3] ^= rol(decT[(byte)(sa[3] >> (0 * 8))], (0 * 8)); |
1257 | | sa[0] ^= rol(decT[(byte)(sa[3] >> (1 * 8))], (1 * 8)); |
1258 | | sa[1] ^= rol(decT[(byte)(sa[3] >> (2 * 8))], (2 * 8)); |
1259 | | sa[2] ^= rol(decT[(byte)(sa[3] >> (3 * 8))], (3 * 8)); |
1260 | | sa[3] = rk[r][3] ^ sb[3]; |
1261 | | } |
1262 | | |
1263 | | sb[0] = rol(decT[(byte)(sa[0] >> (0 * 8))], (0 * 8)); |
1264 | | sb[1] = rol(decT[(byte)(sa[0] >> (1 * 8))], (1 * 8)); |
1265 | | sb[2] = rol(decT[(byte)(sa[0] >> (2 * 8))], (2 * 8)); |
1266 | | sb[3] = rol(decT[(byte)(sa[0] >> (3 * 8))], (3 * 8)); |
1267 | | sa[0] = rk[1][0] ^ sb[0]; |
1268 | | |
1269 | | sb[1] ^= rol(decT[(byte)(sa[1] >> (0 * 8))], (0 * 8)); |
1270 | | sb[2] ^= rol(decT[(byte)(sa[1] >> (1 * 8))], (1 * 8)); |
1271 | | sb[3] ^= rol(decT[(byte)(sa[1] >> (2 * 8))], (2 * 8)); |
1272 | | sa[0] ^= rol(decT[(byte)(sa[1] >> (3 * 8))], (3 * 8)); |
1273 | | sa[1] = rk[1][1] ^ sb[1]; |
1274 | | |
1275 | | sb[2] ^= rol(decT[(byte)(sa[2] >> (0 * 8))], (0 * 8)); |
1276 | | sb[3] ^= rol(decT[(byte)(sa[2] >> (1 * 8))], (1 * 8)); |
1277 | | sa[0] ^= rol(decT[(byte)(sa[2] >> (2 * 8))], (2 * 8)); |
1278 | | sa[1] ^= rol(decT[(byte)(sa[2] >> (3 * 8))], (3 * 8)); |
1279 | | sa[2] = rk[1][2] ^ sb[2]; |
1280 | | |
1281 | | sb[3] ^= rol(decT[(byte)(sa[3] >> (0 * 8))], (0 * 8)); |
1282 | | sa[0] ^= rol(decT[(byte)(sa[3] >> (1 * 8))], (1 * 8)); |
1283 | | sa[1] ^= rol(decT[(byte)(sa[3] >> (2 * 8))], (2 * 8)); |
1284 | | sa[2] ^= rol(decT[(byte)(sa[3] >> (3 * 8))], (3 * 8)); |
1285 | | sa[3] = rk[1][3] ^ sb[3]; |
1286 | | |
1287 | | /* Last round is special. */ |
1288 | | sb[0] = (u32)inv_sbox[(byte)(sa[0] >> (0 * 8))] << (0 * 8); |
1289 | | sb[1] = (u32)inv_sbox[(byte)(sa[0] >> (1 * 8))] << (1 * 8); |
1290 | | sb[2] = (u32)inv_sbox[(byte)(sa[0] >> (2 * 8))] << (2 * 8); |
1291 | | sb[3] = (u32)inv_sbox[(byte)(sa[0] >> (3 * 8))] << (3 * 8); |
1292 | | sa[0] = sb[0] ^ rk[0][0]; |
1293 | | |
1294 | | sb[1] ^= (u32)inv_sbox[(byte)(sa[1] >> (0 * 8))] << (0 * 8); |
1295 | | sb[2] ^= (u32)inv_sbox[(byte)(sa[1] >> (1 * 8))] << (1 * 8); |
1296 | | sb[3] ^= (u32)inv_sbox[(byte)(sa[1] >> (2 * 8))] << (2 * 8); |
1297 | | sa[0] ^= (u32)inv_sbox[(byte)(sa[1] >> (3 * 8))] << (3 * 8); |
1298 | | sa[1] = sb[1] ^ rk[0][1]; |
1299 | | |
1300 | | sb[2] ^= (u32)inv_sbox[(byte)(sa[2] >> (0 * 8))] << (0 * 8); |
1301 | | sb[3] ^= (u32)inv_sbox[(byte)(sa[2] >> (1 * 8))] << (1 * 8); |
1302 | | sa[0] ^= (u32)inv_sbox[(byte)(sa[2] >> (2 * 8))] << (2 * 8); |
1303 | | sa[1] ^= (u32)inv_sbox[(byte)(sa[2] >> (3 * 8))] << (3 * 8); |
1304 | | sa[2] = sb[2] ^ rk[0][2]; |
1305 | | |
1306 | | sb[3] ^= (u32)inv_sbox[(byte)(sa[3] >> (0 * 8))] << (0 * 8); |
1307 | | sa[0] ^= (u32)inv_sbox[(byte)(sa[3] >> (1 * 8))] << (1 * 8); |
1308 | | sa[1] ^= (u32)inv_sbox[(byte)(sa[3] >> (2 * 8))] << (2 * 8); |
1309 | | sa[2] ^= (u32)inv_sbox[(byte)(sa[3] >> (3 * 8))] << (3 * 8); |
1310 | | sa[3] = sb[3] ^ rk[0][3]; |
1311 | | |
1312 | | buf_put_le32(b + 0, sa[0]); |
1313 | | buf_put_le32(b + 4, sa[1]); |
1314 | | buf_put_le32(b + 8, sa[2]); |
1315 | | buf_put_le32(b + 12, sa[3]); |
1316 | | #undef rk |
1317 | | |
1318 | | return (56+2*sizeof(int)); |
1319 | | } |
1320 | | #endif /*!USE_ARM_ASM && !USE_AMD64_ASM*/ |
1321 | | |
1322 | | |
1323 | | /* Decrypt one block. AX and BX may be the same. */ |
1324 | | static unsigned int |
1325 | | do_decrypt (const RIJNDAEL_context *ctx, unsigned char *bx, |
1326 | | const unsigned char *ax) |
1327 | 0 | { |
1328 | 0 | #ifdef USE_AMD64_ASM |
1329 | 0 | return _gcry_aes_amd64_decrypt_block(ctx->keyschdec, bx, ax, ctx->rounds, |
1330 | 0 | dec_tables.T); |
1331 | | #elif defined(USE_ARM_ASM) |
1332 | | return _gcry_aes_arm_decrypt_block(ctx->keyschdec, bx, ax, ctx->rounds, |
1333 | | dec_tables.T); |
1334 | | #else |
1335 | | return do_decrypt_fn (ctx, bx, ax); |
1336 | | #endif /*!USE_ARM_ASM && !USE_AMD64_ASM*/ |
1337 | 0 | } |
1338 | | |
1339 | | |
1340 | | static inline void |
1341 | | check_decryption_preparation (RIJNDAEL_context *ctx) |
1342 | 15 | { |
1343 | 15 | if ( !ctx->decryption_prepared ) |
1344 | 15 | { |
1345 | 15 | ctx->prepare_decryption ( ctx ); |
1346 | 15 | ctx->decryption_prepared = 1; |
1347 | 15 | } |
1348 | 15 | } |
1349 | | |
1350 | | |
1351 | | static unsigned int |
1352 | | rijndael_decrypt (void *context, byte *b, const byte *a) |
1353 | 15 | { |
1354 | 15 | RIJNDAEL_context *ctx = context; |
1355 | | |
1356 | 15 | check_decryption_preparation (ctx); |
1357 | | |
1358 | 15 | if (ctx->prefetch_dec_fn) |
1359 | 0 | ctx->prefetch_dec_fn(); |
1360 | | |
1361 | 15 | return ctx->decrypt_fn (ctx, b, a); |
1362 | 15 | } |
1363 | | |
1364 | | |
1365 | | /* Bulk decryption of complete blocks in CFB mode. Caller needs to |
1366 | | make sure that IV is aligned on an unsigned long boundary. This |
1367 | | function is only intended for the bulk encryption feature of |
1368 | | cipher.c. */ |
1369 | | static void |
1370 | | _gcry_aes_cfb_dec (void *context, unsigned char *iv, |
1371 | | void *outbuf_arg, const void *inbuf_arg, |
1372 | | size_t nblocks) |
1373 | 0 | { |
1374 | 0 | RIJNDAEL_context *ctx = context; |
1375 | 0 | unsigned char *outbuf = outbuf_arg; |
1376 | 0 | const unsigned char *inbuf = inbuf_arg; |
1377 | 0 | unsigned int burn_depth = 0; |
1378 | 0 | rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn; |
1379 | |
|
1380 | 0 | if (ctx->prefetch_enc_fn) |
1381 | 0 | ctx->prefetch_enc_fn(); |
1382 | |
|
1383 | 0 | for ( ;nblocks; nblocks-- ) |
1384 | 0 | { |
1385 | 0 | burn_depth = encrypt_fn (ctx, iv, iv); |
1386 | 0 | cipher_block_xor_n_copy(outbuf, iv, inbuf, BLOCKSIZE); |
1387 | 0 | outbuf += BLOCKSIZE; |
1388 | 0 | inbuf += BLOCKSIZE; |
1389 | 0 | } |
1390 | |
|
1391 | 0 | if (burn_depth) |
1392 | 0 | _gcry_burn_stack (burn_depth + 4 * sizeof(void *)); |
1393 | 0 | } |
1394 | | |
1395 | | |
1396 | | /* Bulk decryption of complete blocks in CBC mode. Caller needs to |
1397 | | make sure that IV is aligned on an unsigned long boundary. This |
1398 | | function is only intended for the bulk encryption feature of |
1399 | | cipher.c. */ |
1400 | | static void |
1401 | | _gcry_aes_cbc_dec (void *context, unsigned char *iv, |
1402 | | void *outbuf_arg, const void *inbuf_arg, |
1403 | | size_t nblocks) |
1404 | 0 | { |
1405 | 0 | RIJNDAEL_context *ctx = context; |
1406 | 0 | unsigned char *outbuf = outbuf_arg; |
1407 | 0 | const unsigned char *inbuf = inbuf_arg; |
1408 | 0 | unsigned int burn_depth = 0; |
1409 | 0 | unsigned char savebuf[BLOCKSIZE] ATTR_ALIGNED_16; |
1410 | 0 | rijndael_cryptfn_t decrypt_fn = ctx->decrypt_fn; |
1411 | |
|
1412 | 0 | check_decryption_preparation (ctx); |
1413 | |
|
1414 | 0 | if (ctx->prefetch_dec_fn) |
1415 | 0 | ctx->prefetch_dec_fn(); |
1416 | |
|
1417 | 0 | for ( ;nblocks; nblocks-- ) |
1418 | 0 | { |
1419 | | /* INBUF is needed later and it may be identical to OUTBUF, so store |
1420 | | the intermediate result to SAVEBUF. */ |
1421 | |
|
1422 | 0 | burn_depth = decrypt_fn (ctx, savebuf, inbuf); |
1423 | |
|
1424 | 0 | cipher_block_xor_n_copy_2(outbuf, savebuf, iv, inbuf, BLOCKSIZE); |
1425 | 0 | inbuf += BLOCKSIZE; |
1426 | 0 | outbuf += BLOCKSIZE; |
1427 | 0 | } |
1428 | |
|
1429 | 0 | wipememory(savebuf, sizeof(savebuf)); |
1430 | |
|
1431 | 0 | if (burn_depth) |
1432 | 0 | _gcry_burn_stack (burn_depth + 4 * sizeof(void *)); |
1433 | 0 | } |
1434 | | |
1435 | | |
1436 | | |
1437 | | /* Bulk encryption/decryption of complete blocks in OCB mode. */ |
1438 | | static size_t |
1439 | | _gcry_aes_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg, |
1440 | | const void *inbuf_arg, size_t nblocks, int encrypt) |
1441 | 0 | { |
1442 | 0 | RIJNDAEL_context *ctx = (void *)&c->context.c; |
1443 | 0 | unsigned char *outbuf = outbuf_arg; |
1444 | 0 | const unsigned char *inbuf = inbuf_arg; |
1445 | 0 | unsigned int burn_depth = 0; |
1446 | |
|
1447 | 0 | if (encrypt) |
1448 | 0 | { |
1449 | 0 | union { unsigned char x1[16] ATTR_ALIGNED_16; u32 x32[4]; } l_tmp; |
1450 | 0 | rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn; |
1451 | |
|
1452 | 0 | if (ctx->prefetch_enc_fn) |
1453 | 0 | ctx->prefetch_enc_fn(); |
1454 | |
|
1455 | 0 | for ( ;nblocks; nblocks-- ) |
1456 | 0 | { |
1457 | 0 | u64 i = ++c->u_mode.ocb.data_nblocks; |
1458 | 0 | const unsigned char *l = ocb_get_l(c, i); |
1459 | | |
1460 | | /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */ |
1461 | 0 | cipher_block_xor_1 (c->u_iv.iv, l, BLOCKSIZE); |
1462 | 0 | cipher_block_cpy (l_tmp.x1, inbuf, BLOCKSIZE); |
1463 | | /* Checksum_i = Checksum_{i-1} xor P_i */ |
1464 | 0 | cipher_block_xor_1 (c->u_ctr.ctr, l_tmp.x1, BLOCKSIZE); |
1465 | | /* C_i = Offset_i xor ENCIPHER(K, P_i xor Offset_i) */ |
1466 | 0 | cipher_block_xor_1 (l_tmp.x1, c->u_iv.iv, BLOCKSIZE); |
1467 | 0 | burn_depth = encrypt_fn (ctx, l_tmp.x1, l_tmp.x1); |
1468 | 0 | cipher_block_xor_1 (l_tmp.x1, c->u_iv.iv, BLOCKSIZE); |
1469 | 0 | cipher_block_cpy (outbuf, l_tmp.x1, BLOCKSIZE); |
1470 | |
|
1471 | 0 | inbuf += BLOCKSIZE; |
1472 | 0 | outbuf += BLOCKSIZE; |
1473 | 0 | } |
1474 | 0 | } |
1475 | 0 | else |
1476 | 0 | { |
1477 | 0 | union { unsigned char x1[16] ATTR_ALIGNED_16; u32 x32[4]; } l_tmp; |
1478 | 0 | rijndael_cryptfn_t decrypt_fn = ctx->decrypt_fn; |
1479 | |
|
1480 | 0 | check_decryption_preparation (ctx); |
1481 | |
|
1482 | 0 | if (ctx->prefetch_dec_fn) |
1483 | 0 | ctx->prefetch_dec_fn(); |
1484 | |
|
1485 | 0 | for ( ;nblocks; nblocks-- ) |
1486 | 0 | { |
1487 | 0 | u64 i = ++c->u_mode.ocb.data_nblocks; |
1488 | 0 | const unsigned char *l = ocb_get_l(c, i); |
1489 | | |
1490 | | /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */ |
1491 | 0 | cipher_block_xor_1 (c->u_iv.iv, l, BLOCKSIZE); |
1492 | 0 | cipher_block_cpy (l_tmp.x1, inbuf, BLOCKSIZE); |
1493 | | /* C_i = Offset_i xor ENCIPHER(K, P_i xor Offset_i) */ |
1494 | 0 | cipher_block_xor_1 (l_tmp.x1, c->u_iv.iv, BLOCKSIZE); |
1495 | 0 | burn_depth = decrypt_fn (ctx, l_tmp.x1, l_tmp.x1); |
1496 | 0 | cipher_block_xor_1 (l_tmp.x1, c->u_iv.iv, BLOCKSIZE); |
1497 | | /* Checksum_i = Checksum_{i-1} xor P_i */ |
1498 | 0 | cipher_block_xor_1 (c->u_ctr.ctr, l_tmp.x1, BLOCKSIZE); |
1499 | 0 | cipher_block_cpy (outbuf, l_tmp.x1, BLOCKSIZE); |
1500 | |
|
1501 | 0 | inbuf += BLOCKSIZE; |
1502 | 0 | outbuf += BLOCKSIZE; |
1503 | 0 | } |
1504 | 0 | } |
1505 | |
|
1506 | 0 | if (burn_depth) |
1507 | 0 | _gcry_burn_stack (burn_depth + 4 * sizeof(void *)); |
1508 | |
|
1509 | 0 | return 0; |
1510 | 0 | } |
1511 | | |
1512 | | |
1513 | | /* Bulk authentication of complete blocks in OCB mode. */ |
1514 | | static size_t |
1515 | | _gcry_aes_ocb_auth (gcry_cipher_hd_t c, const void *abuf_arg, size_t nblocks) |
1516 | 0 | { |
1517 | 0 | RIJNDAEL_context *ctx = (void *)&c->context.c; |
1518 | 0 | const unsigned char *abuf = abuf_arg; |
1519 | 0 | unsigned int burn_depth = 0; |
1520 | 0 | union { unsigned char x1[16] ATTR_ALIGNED_16; u32 x32[4]; } l_tmp; |
1521 | 0 | rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn; |
1522 | |
|
1523 | 0 | if (ctx->prefetch_enc_fn) |
1524 | 0 | ctx->prefetch_enc_fn(); |
1525 | |
|
1526 | 0 | for ( ;nblocks; nblocks-- ) |
1527 | 0 | { |
1528 | 0 | u64 i = ++c->u_mode.ocb.aad_nblocks; |
1529 | 0 | const unsigned char *l = ocb_get_l(c, i); |
1530 | | |
1531 | | /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */ |
1532 | 0 | cipher_block_xor_1 (c->u_mode.ocb.aad_offset, l, BLOCKSIZE); |
1533 | | /* Sum_i = Sum_{i-1} xor ENCIPHER(K, A_i xor Offset_i) */ |
1534 | 0 | cipher_block_xor (l_tmp.x1, c->u_mode.ocb.aad_offset, abuf, |
1535 | 0 | BLOCKSIZE); |
1536 | 0 | burn_depth = encrypt_fn (ctx, l_tmp.x1, l_tmp.x1); |
1537 | 0 | cipher_block_xor_1 (c->u_mode.ocb.aad_sum, l_tmp.x1, BLOCKSIZE); |
1538 | |
|
1539 | 0 | abuf += BLOCKSIZE; |
1540 | 0 | } |
1541 | |
|
1542 | 0 | wipememory(&l_tmp, sizeof(l_tmp)); |
1543 | |
|
1544 | 0 | if (burn_depth) |
1545 | 0 | _gcry_burn_stack (burn_depth + 4 * sizeof(void *)); |
1546 | |
|
1547 | 0 | return 0; |
1548 | 0 | } |
1549 | | |
1550 | | |
1551 | | /* Bulk encryption/decryption of complete blocks in XTS mode. */ |
1552 | | static void |
1553 | | _gcry_aes_xts_crypt (void *context, unsigned char *tweak, |
1554 | | void *outbuf_arg, const void *inbuf_arg, |
1555 | | size_t nblocks, int encrypt) |
1556 | 0 | { |
1557 | 0 | RIJNDAEL_context *ctx = context; |
1558 | 0 | unsigned char *outbuf = outbuf_arg; |
1559 | 0 | const unsigned char *inbuf = inbuf_arg; |
1560 | 0 | unsigned int burn_depth = 0; |
1561 | 0 | rijndael_cryptfn_t crypt_fn; |
1562 | 0 | u64 tweak_lo, tweak_hi, tweak_next_lo, tweak_next_hi, tmp_lo, tmp_hi, carry; |
1563 | |
|
1564 | 0 | if (encrypt) |
1565 | 0 | { |
1566 | 0 | if (ctx->prefetch_enc_fn) |
1567 | 0 | ctx->prefetch_enc_fn(); |
1568 | |
|
1569 | 0 | crypt_fn = ctx->encrypt_fn; |
1570 | 0 | } |
1571 | 0 | else |
1572 | 0 | { |
1573 | 0 | check_decryption_preparation (ctx); |
1574 | |
|
1575 | 0 | if (ctx->prefetch_dec_fn) |
1576 | 0 | ctx->prefetch_dec_fn(); |
1577 | |
|
1578 | 0 | crypt_fn = ctx->decrypt_fn; |
1579 | 0 | } |
1580 | |
|
1581 | 0 | tweak_next_lo = buf_get_le64 (tweak + 0); |
1582 | 0 | tweak_next_hi = buf_get_le64 (tweak + 8); |
1583 | |
|
1584 | 0 | while (nblocks) |
1585 | 0 | { |
1586 | 0 | tweak_lo = tweak_next_lo; |
1587 | 0 | tweak_hi = tweak_next_hi; |
1588 | | |
1589 | | /* Xor-Encrypt/Decrypt-Xor block. */ |
1590 | 0 | tmp_lo = buf_get_le64 (inbuf + 0) ^ tweak_lo; |
1591 | 0 | tmp_hi = buf_get_le64 (inbuf + 8) ^ tweak_hi; |
1592 | |
|
1593 | 0 | buf_put_le64 (outbuf + 0, tmp_lo); |
1594 | 0 | buf_put_le64 (outbuf + 8, tmp_hi); |
1595 | | |
1596 | | /* Generate next tweak. */ |
1597 | 0 | carry = -(tweak_next_hi >> 63) & 0x87; |
1598 | 0 | tweak_next_hi = (tweak_next_hi << 1) + (tweak_next_lo >> 63); |
1599 | 0 | tweak_next_lo = (tweak_next_lo << 1) ^ carry; |
1600 | |
|
1601 | 0 | burn_depth = crypt_fn (ctx, outbuf, outbuf); |
1602 | |
|
1603 | 0 | buf_put_le64 (outbuf + 0, buf_get_le64 (outbuf + 0) ^ tweak_lo); |
1604 | 0 | buf_put_le64 (outbuf + 8, buf_get_le64 (outbuf + 8) ^ tweak_hi); |
1605 | |
|
1606 | 0 | outbuf += GCRY_XTS_BLOCK_LEN; |
1607 | 0 | inbuf += GCRY_XTS_BLOCK_LEN; |
1608 | 0 | nblocks--; |
1609 | 0 | } |
1610 | |
|
1611 | 0 | buf_put_le64 (tweak + 0, tweak_next_lo); |
1612 | 0 | buf_put_le64 (tweak + 8, tweak_next_hi); |
1613 | |
|
1614 | 0 | if (burn_depth) |
1615 | 0 | _gcry_burn_stack (burn_depth + 5 * sizeof(void *)); |
1616 | 0 | } |
1617 | | |
1618 | | |
1619 | | /* Run the self-tests for AES 128. Returns NULL on success. */ |
1620 | | static const char* |
1621 | | selftest_basic_128 (void) |
1622 | 5 | { |
1623 | 5 | RIJNDAEL_context *ctx; |
1624 | 5 | unsigned char ctxmem[sizeof(*ctx) + 16]; |
1625 | 5 | unsigned char scratch[16]; |
1626 | 5 | cipher_bulk_ops_t bulk_ops; |
1627 | | |
1628 | | /* The test vectors are from the AES supplied ones; more or less |
1629 | | randomly taken from ecb_tbl.txt (I=42,81,14) */ |
1630 | 5 | #if 1 |
1631 | 5 | static const unsigned char plaintext_128[16] = |
1632 | 5 | { |
1633 | 5 | 0x01,0x4B,0xAF,0x22,0x78,0xA6,0x9D,0x33, |
1634 | 5 | 0x1D,0x51,0x80,0x10,0x36,0x43,0xE9,0x9A |
1635 | 5 | }; |
1636 | 5 | static const unsigned char key_128[16] = |
1637 | 5 | { |
1638 | 5 | 0xE8,0xE9,0xEA,0xEB,0xED,0xEE,0xEF,0xF0, |
1639 | 5 | 0xF2,0xF3,0xF4,0xF5,0xF7,0xF8,0xF9,0xFA |
1640 | 5 | }; |
1641 | 5 | static const unsigned char ciphertext_128[16] = |
1642 | 5 | { |
1643 | 5 | 0x67,0x43,0xC3,0xD1,0x51,0x9A,0xB4,0xF2, |
1644 | 5 | 0xCD,0x9A,0x78,0xAB,0x09,0xA5,0x11,0xBD |
1645 | 5 | }; |
1646 | | #else |
1647 | | /* Test vectors from fips-197, appendix C. */ |
1648 | | # warning debug test vectors in use |
1649 | | static const unsigned char plaintext_128[16] = |
1650 | | { |
1651 | | 0x00,0x11,0x22,0x33,0x44,0x55,0x66,0x77, |
1652 | | 0x88,0x99,0xaa,0xbb,0xcc,0xdd,0xee,0xff |
1653 | | }; |
1654 | | static const unsigned char key_128[16] = |
1655 | | { |
1656 | | 0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07, |
1657 | | 0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f |
1658 | | /* 0x2b, 0x7e, 0x15, 0x16, 0x28, 0xae, 0xd2, 0xa6, */ |
1659 | | /* 0xab, 0xf7, 0x15, 0x88, 0x09, 0xcf, 0x4f, 0x3c */ |
1660 | | }; |
1661 | | static const unsigned char ciphertext_128[16] = |
1662 | | { |
1663 | | 0x69,0xc4,0xe0,0xd8,0x6a,0x7b,0x04,0x30, |
1664 | | 0xd8,0xcd,0xb7,0x80,0x70,0xb4,0xc5,0x5a |
1665 | | }; |
1666 | | #endif |
1667 | | |
1668 | 5 | ctx = (void *)(ctxmem + ((16 - ((uintptr_t)ctxmem & 15)) & 15)); |
1669 | | |
1670 | 5 | rijndael_setkey (ctx, key_128, sizeof (key_128), &bulk_ops); |
1671 | 5 | rijndael_encrypt (ctx, scratch, plaintext_128); |
1672 | 5 | if (memcmp (scratch, ciphertext_128, sizeof (ciphertext_128))) |
1673 | 0 | { |
1674 | 0 | return "AES-128 test encryption failed."; |
1675 | 0 | } |
1676 | 5 | rijndael_decrypt (ctx, scratch, scratch); |
1677 | 5 | if (memcmp (scratch, plaintext_128, sizeof (plaintext_128))) |
1678 | 0 | return "AES-128 test decryption failed."; |
1679 | | |
1680 | 5 | return NULL; |
1681 | 5 | } |
1682 | | |
1683 | | /* Run the self-tests for AES 192. Returns NULL on success. */ |
1684 | | static const char* |
1685 | | selftest_basic_192 (void) |
1686 | 5 | { |
1687 | 5 | RIJNDAEL_context *ctx; |
1688 | 5 | unsigned char ctxmem[sizeof(*ctx) + 16]; |
1689 | 5 | unsigned char scratch[16]; |
1690 | 5 | cipher_bulk_ops_t bulk_ops; |
1691 | | |
1692 | 5 | static unsigned char plaintext_192[16] = |
1693 | 5 | { |
1694 | 5 | 0x76,0x77,0x74,0x75,0xF1,0xF2,0xF3,0xF4, |
1695 | 5 | 0xF8,0xF9,0xE6,0xE7,0x77,0x70,0x71,0x72 |
1696 | 5 | }; |
1697 | 5 | static unsigned char key_192[24] = |
1698 | 5 | { |
1699 | 5 | 0x04,0x05,0x06,0x07,0x09,0x0A,0x0B,0x0C, |
1700 | 5 | 0x0E,0x0F,0x10,0x11,0x13,0x14,0x15,0x16, |
1701 | 5 | 0x18,0x19,0x1A,0x1B,0x1D,0x1E,0x1F,0x20 |
1702 | 5 | }; |
1703 | 5 | static const unsigned char ciphertext_192[16] = |
1704 | 5 | { |
1705 | 5 | 0x5D,0x1E,0xF2,0x0D,0xCE,0xD6,0xBC,0xBC, |
1706 | 5 | 0x12,0x13,0x1A,0xC7,0xC5,0x47,0x88,0xAA |
1707 | 5 | }; |
1708 | | |
1709 | 5 | ctx = (void *)(ctxmem + ((16 - ((uintptr_t)ctxmem & 15)) & 15)); |
1710 | | |
1711 | 5 | rijndael_setkey (ctx, key_192, sizeof(key_192), &bulk_ops); |
1712 | 5 | rijndael_encrypt (ctx, scratch, plaintext_192); |
1713 | 5 | if (memcmp (scratch, ciphertext_192, sizeof (ciphertext_192))) |
1714 | 0 | { |
1715 | 0 | return "AES-192 test encryption failed."; |
1716 | 0 | } |
1717 | 5 | rijndael_decrypt (ctx, scratch, scratch); |
1718 | 5 | if (memcmp (scratch, plaintext_192, sizeof (plaintext_192))) |
1719 | 0 | return "AES-192 test decryption failed."; |
1720 | | |
1721 | 5 | return NULL; |
1722 | 5 | } |
1723 | | |
1724 | | |
1725 | | /* Run the self-tests for AES 256. Returns NULL on success. */ |
1726 | | static const char* |
1727 | | selftest_basic_256 (void) |
1728 | 5 | { |
1729 | 5 | RIJNDAEL_context *ctx; |
1730 | 5 | unsigned char ctxmem[sizeof(*ctx) + 16]; |
1731 | 5 | unsigned char scratch[16]; |
1732 | 5 | cipher_bulk_ops_t bulk_ops; |
1733 | | |
1734 | 5 | static unsigned char plaintext_256[16] = |
1735 | 5 | { |
1736 | 5 | 0x06,0x9A,0x00,0x7F,0xC7,0x6A,0x45,0x9F, |
1737 | 5 | 0x98,0xBA,0xF9,0x17,0xFE,0xDF,0x95,0x21 |
1738 | 5 | }; |
1739 | 5 | static unsigned char key_256[32] = |
1740 | 5 | { |
1741 | 5 | 0x08,0x09,0x0A,0x0B,0x0D,0x0E,0x0F,0x10, |
1742 | 5 | 0x12,0x13,0x14,0x15,0x17,0x18,0x19,0x1A, |
1743 | 5 | 0x1C,0x1D,0x1E,0x1F,0x21,0x22,0x23,0x24, |
1744 | 5 | 0x26,0x27,0x28,0x29,0x2B,0x2C,0x2D,0x2E |
1745 | 5 | }; |
1746 | 5 | static const unsigned char ciphertext_256[16] = |
1747 | 5 | { |
1748 | 5 | 0x08,0x0E,0x95,0x17,0xEB,0x16,0x77,0x71, |
1749 | 5 | 0x9A,0xCF,0x72,0x80,0x86,0x04,0x0A,0xE3 |
1750 | 5 | }; |
1751 | | |
1752 | 5 | ctx = (void *)(ctxmem + ((16 - ((uintptr_t)ctxmem & 15)) & 15)); |
1753 | | |
1754 | 5 | rijndael_setkey (ctx, key_256, sizeof(key_256), &bulk_ops); |
1755 | 5 | rijndael_encrypt (ctx, scratch, plaintext_256); |
1756 | 5 | if (memcmp (scratch, ciphertext_256, sizeof (ciphertext_256))) |
1757 | 0 | { |
1758 | 0 | return "AES-256 test encryption failed."; |
1759 | 0 | } |
1760 | 5 | rijndael_decrypt (ctx, scratch, scratch); |
1761 | 5 | if (memcmp (scratch, plaintext_256, sizeof (plaintext_256))) |
1762 | 0 | return "AES-256 test decryption failed."; |
1763 | | |
1764 | 5 | return NULL; |
1765 | 5 | } |
1766 | | |
1767 | | |
1768 | | /* Run all the self-tests and return NULL on success. This function |
1769 | | is used for the on-the-fly self-tests. */ |
1770 | | static const char * |
1771 | | selftest (void) |
1772 | 5 | { |
1773 | 5 | const char *r; |
1774 | | |
1775 | 5 | if ( (r = selftest_basic_128 ()) |
1776 | 5 | || (r = selftest_basic_192 ()) |
1777 | 5 | || (r = selftest_basic_256 ()) ) |
1778 | 0 | return r; |
1779 | | |
1780 | 5 | return r; |
1781 | 5 | } |
1782 | | |
1783 | | |
1784 | | /* SP800-38a.pdf for AES-128. */ |
1785 | | static const char * |
1786 | | selftest_fips_128_38a (int requested_mode) |
1787 | 0 | { |
1788 | 0 | static const struct tv |
1789 | 0 | { |
1790 | 0 | int mode; |
1791 | 0 | const unsigned char key[16]; |
1792 | 0 | const unsigned char iv[16]; |
1793 | 0 | struct |
1794 | 0 | { |
1795 | 0 | const unsigned char input[16]; |
1796 | 0 | const unsigned char output[16]; |
1797 | 0 | } data[4]; |
1798 | 0 | } tv[2] = |
1799 | 0 | { |
1800 | 0 | { |
1801 | 0 | GCRY_CIPHER_MODE_CFB, /* F.3.13, CFB128-AES128 */ |
1802 | 0 | { 0x2b, 0x7e, 0x15, 0x16, 0x28, 0xae, 0xd2, 0xa6, |
1803 | 0 | 0xab, 0xf7, 0x15, 0x88, 0x09, 0xcf, 0x4f, 0x3c }, |
1804 | 0 | { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, |
1805 | 0 | 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f }, |
1806 | 0 | { |
1807 | 0 | { { 0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96, |
1808 | 0 | 0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a }, |
1809 | 0 | { 0x3b, 0x3f, 0xd9, 0x2e, 0xb7, 0x2d, 0xad, 0x20, |
1810 | 0 | 0x33, 0x34, 0x49, 0xf8, 0xe8, 0x3c, 0xfb, 0x4a } }, |
1811 | |
|
1812 | 0 | { { 0xae, 0x2d, 0x8a, 0x57, 0x1e, 0x03, 0xac, 0x9c, |
1813 | 0 | 0x9e, 0xb7, 0x6f, 0xac, 0x45, 0xaf, 0x8e, 0x51 }, |
1814 | 0 | { 0xc8, 0xa6, 0x45, 0x37, 0xa0, 0xb3, 0xa9, 0x3f, |
1815 | 0 | 0xcd, 0xe3, 0xcd, 0xad, 0x9f, 0x1c, 0xe5, 0x8b } }, |
1816 | |
|
1817 | 0 | { { 0x30, 0xc8, 0x1c, 0x46, 0xa3, 0x5c, 0xe4, 0x11, |
1818 | 0 | 0xe5, 0xfb, 0xc1, 0x19, 0x1a, 0x0a, 0x52, 0xef }, |
1819 | 0 | { 0x26, 0x75, 0x1f, 0x67, 0xa3, 0xcb, 0xb1, 0x40, |
1820 | 0 | 0xb1, 0x80, 0x8c, 0xf1, 0x87, 0xa4, 0xf4, 0xdf } }, |
1821 | |
|
1822 | 0 | { { 0xf6, 0x9f, 0x24, 0x45, 0xdf, 0x4f, 0x9b, 0x17, |
1823 | 0 | 0xad, 0x2b, 0x41, 0x7b, 0xe6, 0x6c, 0x37, 0x10 }, |
1824 | 0 | { 0xc0, 0x4b, 0x05, 0x35, 0x7c, 0x5d, 0x1c, 0x0e, |
1825 | 0 | 0xea, 0xc4, 0xc6, 0x6f, 0x9f, 0xf7, 0xf2, 0xe6 } } |
1826 | 0 | } |
1827 | 0 | }, |
1828 | 0 | { |
1829 | 0 | GCRY_CIPHER_MODE_OFB, |
1830 | 0 | { 0x2b, 0x7e, 0x15, 0x16, 0x28, 0xae, 0xd2, 0xa6, |
1831 | 0 | 0xab, 0xf7, 0x15, 0x88, 0x09, 0xcf, 0x4f, 0x3c }, |
1832 | 0 | { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, |
1833 | 0 | 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f }, |
1834 | 0 | { |
1835 | 0 | { { 0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96, |
1836 | 0 | 0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a }, |
1837 | 0 | { 0x3b, 0x3f, 0xd9, 0x2e, 0xb7, 0x2d, 0xad, 0x20, |
1838 | 0 | 0x33, 0x34, 0x49, 0xf8, 0xe8, 0x3c, 0xfb, 0x4a } }, |
1839 | |
|
1840 | 0 | { { 0xae, 0x2d, 0x8a, 0x57, 0x1e, 0x03, 0xac, 0x9c, |
1841 | 0 | 0x9e, 0xb7, 0x6f, 0xac, 0x45, 0xaf, 0x8e, 0x51 }, |
1842 | 0 | { 0x77, 0x89, 0x50, 0x8d, 0x16, 0x91, 0x8f, 0x03, |
1843 | 0 | 0xf5, 0x3c, 0x52, 0xda, 0xc5, 0x4e, 0xd8, 0x25 } }, |
1844 | |
|
1845 | 0 | { { 0x30, 0xc8, 0x1c, 0x46, 0xa3, 0x5c, 0xe4, 0x11, |
1846 | 0 | 0xe5, 0xfb, 0xc1, 0x19, 0x1a, 0x0a, 0x52, 0xef }, |
1847 | 0 | { 0x97, 0x40, 0x05, 0x1e, 0x9c, 0x5f, 0xec, 0xf6, |
1848 | 0 | 0x43, 0x44, 0xf7, 0xa8, 0x22, 0x60, 0xed, 0xcc } }, |
1849 | |
|
1850 | 0 | { { 0xf6, 0x9f, 0x24, 0x45, 0xdf, 0x4f, 0x9b, 0x17, |
1851 | 0 | 0xad, 0x2b, 0x41, 0x7b, 0xe6, 0x6c, 0x37, 0x10 }, |
1852 | 0 | { 0x30, 0x4c, 0x65, 0x28, 0xf6, 0x59, 0xc7, 0x78, |
1853 | 0 | 0x66, 0xa5, 0x10, 0xd9, 0xc1, 0xd6, 0xae, 0x5e } }, |
1854 | 0 | } |
1855 | 0 | } |
1856 | 0 | }; |
1857 | 0 | unsigned char scratch[16]; |
1858 | 0 | gpg_error_t err; |
1859 | 0 | int tvi, idx; |
1860 | 0 | gcry_cipher_hd_t hdenc = NULL; |
1861 | 0 | gcry_cipher_hd_t hddec = NULL; |
1862 | |
|
1863 | 0 | #define Fail(a) do { \ |
1864 | 0 | _gcry_cipher_close (hdenc); \ |
1865 | 0 | _gcry_cipher_close (hddec); \ |
1866 | 0 | return a; \ |
1867 | 0 | } while (0) |
1868 | |
|
1869 | 0 | gcry_assert (sizeof tv[0].data[0].input == sizeof scratch); |
1870 | 0 | gcry_assert (sizeof tv[0].data[0].output == sizeof scratch); |
1871 | | |
1872 | 0 | for (tvi=0; tvi < DIM (tv); tvi++) |
1873 | 0 | if (tv[tvi].mode == requested_mode) |
1874 | 0 | break; |
1875 | 0 | if (tvi == DIM (tv)) |
1876 | 0 | Fail ("no test data for this mode"); |
1877 | | |
1878 | 0 | err = _gcry_cipher_open (&hdenc, GCRY_CIPHER_AES, tv[tvi].mode, 0); |
1879 | 0 | if (err) |
1880 | 0 | Fail ("open"); |
1881 | 0 | err = _gcry_cipher_open (&hddec, GCRY_CIPHER_AES, tv[tvi].mode, 0); |
1882 | 0 | if (err) |
1883 | 0 | Fail ("open"); |
1884 | 0 | err = _gcry_cipher_setkey (hdenc, tv[tvi].key, sizeof tv[tvi].key); |
1885 | 0 | if (!err) |
1886 | 0 | err = _gcry_cipher_setkey (hddec, tv[tvi].key, sizeof tv[tvi].key); |
1887 | 0 | if (err) |
1888 | 0 | Fail ("set key"); |
1889 | 0 | err = _gcry_cipher_setiv (hdenc, tv[tvi].iv, sizeof tv[tvi].iv); |
1890 | 0 | if (!err) |
1891 | 0 | err = _gcry_cipher_setiv (hddec, tv[tvi].iv, sizeof tv[tvi].iv); |
1892 | 0 | if (err) |
1893 | 0 | Fail ("set IV"); |
1894 | 0 | for (idx=0; idx < DIM (tv[tvi].data); idx++) |
1895 | 0 | { |
1896 | 0 | err = _gcry_cipher_encrypt (hdenc, scratch, sizeof scratch, |
1897 | 0 | tv[tvi].data[idx].input, |
1898 | 0 | sizeof tv[tvi].data[idx].input); |
1899 | 0 | if (err) |
1900 | 0 | Fail ("encrypt command"); |
1901 | 0 | if (memcmp (scratch, tv[tvi].data[idx].output, sizeof scratch)) |
1902 | 0 | Fail ("encrypt mismatch"); |
1903 | 0 | err = _gcry_cipher_decrypt (hddec, scratch, sizeof scratch, |
1904 | 0 | tv[tvi].data[idx].output, |
1905 | 0 | sizeof tv[tvi].data[idx].output); |
1906 | 0 | if (err) |
1907 | 0 | Fail ("decrypt command"); |
1908 | 0 | if (memcmp (scratch, tv[tvi].data[idx].input, sizeof scratch)) |
1909 | 0 | Fail ("decrypt mismatch"); |
1910 | 0 | } |
1911 | | |
1912 | 0 | #undef Fail |
1913 | 0 | _gcry_cipher_close (hdenc); |
1914 | 0 | _gcry_cipher_close (hddec); |
1915 | 0 | return NULL; |
1916 | 0 | } |
1917 | | |
1918 | | |
1919 | | /* Complete selftest for AES-128 with all modes and driver code. */ |
1920 | | static gpg_err_code_t |
1921 | | selftest_fips_128 (int extended, selftest_report_func_t report) |
1922 | 0 | { |
1923 | 0 | const char *what; |
1924 | 0 | const char *errtxt; |
1925 | |
|
1926 | 0 | what = "low-level"; |
1927 | 0 | errtxt = selftest_basic_128 (); |
1928 | 0 | if (errtxt) |
1929 | 0 | goto failed; |
1930 | | |
1931 | 0 | if (extended) |
1932 | 0 | { |
1933 | 0 | what = "cfb"; |
1934 | 0 | errtxt = selftest_fips_128_38a (GCRY_CIPHER_MODE_CFB); |
1935 | 0 | if (errtxt) |
1936 | 0 | goto failed; |
1937 | | |
1938 | 0 | what = "ofb"; |
1939 | 0 | errtxt = selftest_fips_128_38a (GCRY_CIPHER_MODE_OFB); |
1940 | 0 | if (errtxt) |
1941 | 0 | goto failed; |
1942 | 0 | } |
1943 | | |
1944 | 0 | return 0; /* Succeeded. */ |
1945 | | |
1946 | 0 | failed: |
1947 | 0 | if (report) |
1948 | 0 | report ("cipher", GCRY_CIPHER_AES128, what, errtxt); |
1949 | 0 | return GPG_ERR_SELFTEST_FAILED; |
1950 | 0 | } |
1951 | | |
1952 | | /* Complete selftest for AES-192. */ |
1953 | | static gpg_err_code_t |
1954 | | selftest_fips_192 (int extended, selftest_report_func_t report) |
1955 | 0 | { |
1956 | 0 | const char *what; |
1957 | 0 | const char *errtxt; |
1958 | |
|
1959 | 0 | (void)extended; /* No extended tests available. */ |
1960 | |
|
1961 | 0 | what = "low-level"; |
1962 | 0 | errtxt = selftest_basic_192 (); |
1963 | 0 | if (errtxt) |
1964 | 0 | goto failed; |
1965 | | |
1966 | | |
1967 | 0 | return 0; /* Succeeded. */ |
1968 | | |
1969 | 0 | failed: |
1970 | 0 | if (report) |
1971 | 0 | report ("cipher", GCRY_CIPHER_AES192, what, errtxt); |
1972 | 0 | return GPG_ERR_SELFTEST_FAILED; |
1973 | 0 | } |
1974 | | |
1975 | | |
1976 | | /* Complete selftest for AES-256. */ |
1977 | | static gpg_err_code_t |
1978 | | selftest_fips_256 (int extended, selftest_report_func_t report) |
1979 | 0 | { |
1980 | 0 | const char *what; |
1981 | 0 | const char *errtxt; |
1982 | |
|
1983 | 0 | (void)extended; /* No extended tests available. */ |
1984 | |
|
1985 | 0 | what = "low-level"; |
1986 | 0 | errtxt = selftest_basic_256 (); |
1987 | 0 | if (errtxt) |
1988 | 0 | goto failed; |
1989 | | |
1990 | 0 | return 0; /* Succeeded. */ |
1991 | | |
1992 | 0 | failed: |
1993 | 0 | if (report) |
1994 | 0 | report ("cipher", GCRY_CIPHER_AES256, what, errtxt); |
1995 | 0 | return GPG_ERR_SELFTEST_FAILED; |
1996 | 0 | } |
1997 | | |
1998 | | |
1999 | | |
2000 | | /* Run a full self-test for ALGO and return 0 on success. */ |
2001 | | static gpg_err_code_t |
2002 | | run_selftests (int algo, int extended, selftest_report_func_t report) |
2003 | 0 | { |
2004 | 0 | gpg_err_code_t ec; |
2005 | |
|
2006 | 0 | switch (algo) |
2007 | 0 | { |
2008 | 0 | case GCRY_CIPHER_AES128: |
2009 | 0 | ec = selftest_fips_128 (extended, report); |
2010 | 0 | break; |
2011 | 0 | case GCRY_CIPHER_AES192: |
2012 | 0 | ec = selftest_fips_192 (extended, report); |
2013 | 0 | break; |
2014 | 0 | case GCRY_CIPHER_AES256: |
2015 | 0 | ec = selftest_fips_256 (extended, report); |
2016 | 0 | break; |
2017 | 0 | default: |
2018 | 0 | ec = GPG_ERR_CIPHER_ALGO; |
2019 | 0 | break; |
2020 | |
|
2021 | 0 | } |
2022 | 0 | return ec; |
2023 | 0 | } |
2024 | | |
2025 | | |
2026 | | |
2027 | | |
2028 | | static const char *rijndael_names[] = |
2029 | | { |
2030 | | "RIJNDAEL", |
2031 | | "AES128", |
2032 | | "AES-128", |
2033 | | NULL |
2034 | | }; |
2035 | | |
2036 | | static const gcry_cipher_oid_spec_t rijndael_oids[] = |
2037 | | { |
2038 | | { "2.16.840.1.101.3.4.1.1", GCRY_CIPHER_MODE_ECB }, |
2039 | | { "2.16.840.1.101.3.4.1.2", GCRY_CIPHER_MODE_CBC }, |
2040 | | { "2.16.840.1.101.3.4.1.3", GCRY_CIPHER_MODE_OFB }, |
2041 | | { "2.16.840.1.101.3.4.1.4", GCRY_CIPHER_MODE_CFB }, |
2042 | | { "2.16.840.1.101.3.4.1.6", GCRY_CIPHER_MODE_GCM }, |
2043 | | { "2.16.840.1.101.3.4.1.7", GCRY_CIPHER_MODE_CCM }, |
2044 | | { NULL } |
2045 | | }; |
2046 | | |
2047 | | gcry_cipher_spec_t _gcry_cipher_spec_aes = |
2048 | | { |
2049 | | GCRY_CIPHER_AES, {0, 1}, |
2050 | | "AES", rijndael_names, rijndael_oids, 16, 128, |
2051 | | sizeof (RIJNDAEL_context), |
2052 | | rijndael_setkey, rijndael_encrypt, rijndael_decrypt, |
2053 | | NULL, NULL, |
2054 | | run_selftests |
2055 | | }; |
2056 | | |
2057 | | |
2058 | | static const char *rijndael192_names[] = |
2059 | | { |
2060 | | "RIJNDAEL192", |
2061 | | "AES-192", |
2062 | | NULL |
2063 | | }; |
2064 | | |
2065 | | static const gcry_cipher_oid_spec_t rijndael192_oids[] = |
2066 | | { |
2067 | | { "2.16.840.1.101.3.4.1.21", GCRY_CIPHER_MODE_ECB }, |
2068 | | { "2.16.840.1.101.3.4.1.22", GCRY_CIPHER_MODE_CBC }, |
2069 | | { "2.16.840.1.101.3.4.1.23", GCRY_CIPHER_MODE_OFB }, |
2070 | | { "2.16.840.1.101.3.4.1.24", GCRY_CIPHER_MODE_CFB }, |
2071 | | { "2.16.840.1.101.3.4.1.26", GCRY_CIPHER_MODE_GCM }, |
2072 | | { "2.16.840.1.101.3.4.1.27", GCRY_CIPHER_MODE_CCM }, |
2073 | | { NULL } |
2074 | | }; |
2075 | | |
2076 | | gcry_cipher_spec_t _gcry_cipher_spec_aes192 = |
2077 | | { |
2078 | | GCRY_CIPHER_AES192, {0, 1}, |
2079 | | "AES192", rijndael192_names, rijndael192_oids, 16, 192, |
2080 | | sizeof (RIJNDAEL_context), |
2081 | | rijndael_setkey, rijndael_encrypt, rijndael_decrypt, |
2082 | | NULL, NULL, |
2083 | | run_selftests |
2084 | | }; |
2085 | | |
2086 | | |
2087 | | static const char *rijndael256_names[] = |
2088 | | { |
2089 | | "RIJNDAEL256", |
2090 | | "AES-256", |
2091 | | NULL |
2092 | | }; |
2093 | | |
2094 | | static const gcry_cipher_oid_spec_t rijndael256_oids[] = |
2095 | | { |
2096 | | { "2.16.840.1.101.3.4.1.41", GCRY_CIPHER_MODE_ECB }, |
2097 | | { "2.16.840.1.101.3.4.1.42", GCRY_CIPHER_MODE_CBC }, |
2098 | | { "2.16.840.1.101.3.4.1.43", GCRY_CIPHER_MODE_OFB }, |
2099 | | { "2.16.840.1.101.3.4.1.44", GCRY_CIPHER_MODE_CFB }, |
2100 | | { "2.16.840.1.101.3.4.1.46", GCRY_CIPHER_MODE_GCM }, |
2101 | | { "2.16.840.1.101.3.4.1.47", GCRY_CIPHER_MODE_CCM }, |
2102 | | { NULL } |
2103 | | }; |
2104 | | |
2105 | | gcry_cipher_spec_t _gcry_cipher_spec_aes256 = |
2106 | | { |
2107 | | GCRY_CIPHER_AES256, {0, 1}, |
2108 | | "AES256", rijndael256_names, rijndael256_oids, 16, 256, |
2109 | | sizeof (RIJNDAEL_context), |
2110 | | rijndael_setkey, rijndael_encrypt, rijndael_decrypt, |
2111 | | NULL, NULL, |
2112 | | run_selftests |
2113 | | }; |