Coverage Report

Created: 2022-12-08 06:10

/src/libgcrypt/cipher/rijndael.c
Line
Count
Source (jump to first uncovered line)
1
/* Rijndael (AES) for GnuPG
2
 * Copyright (C) 2000, 2001, 2002, 2003, 2007,
3
 *               2008, 2011, 2012 Free Software Foundation, Inc.
4
 *
5
 * This file is part of Libgcrypt.
6
 *
7
 * Libgcrypt is free software; you can redistribute it and/or modify
8
 * it under the terms of the GNU Lesser General Public License as
9
 * published by the Free Software Foundation; either version 2.1 of
10
 * the License, or (at your option) any later version.
11
 *
12
 * Libgcrypt is distributed in the hope that it will be useful,
13
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15
 * GNU Lesser General Public License for more details.
16
 *
17
 * You should have received a copy of the GNU Lesser General Public
18
 * License along with this program; if not, see <http://www.gnu.org/licenses/>.
19
 *******************************************************************
20
 * The code here is based on the optimized implementation taken from
21
 * http://www.esat.kuleuven.ac.be/~rijmen/rijndael/ on Oct 2, 2000,
22
 * which carries this notice:
23
 *------------------------------------------
24
 * rijndael-alg-fst.c   v2.3   April '2000
25
 *
26
 * Optimised ANSI C code
27
 *
28
 * authors: v1.0: Antoon Bosselaers
29
 *          v2.0: Vincent Rijmen
30
 *          v2.3: Paulo Barreto
31
 *
32
 * This code is placed in the public domain.
33
 *------------------------------------------
34
 *
35
 * The SP800-38a document is available at:
36
 *   http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf
37
 *
38
 */
39
40
#include <config.h>
41
#include <stdio.h>
42
#include <stdlib.h>
43
#include <string.h> /* for memcmp() */
44
45
#include "types.h"  /* for byte and u32 typedefs */
46
#include "g10lib.h"
47
#include "cipher.h"
48
#include "bufhelp.h"
49
#include "rijndael-internal.h"
50
#include "./cipher-internal.h"
51
52
53
#ifdef USE_AMD64_ASM
54
/* AMD64 assembly implementations of AES */
55
extern unsigned int _gcry_aes_amd64_encrypt_block(const void *keysched_enc,
56
                                                  unsigned char *out,
57
                                                  const unsigned char *in,
58
                                                  int rounds,
59
                                                  const void *encT);
60
61
extern unsigned int _gcry_aes_amd64_decrypt_block(const void *keysched_dec,
62
                                                  unsigned char *out,
63
                                                  const unsigned char *in,
64
                                                  int rounds,
65
                                                  const void *decT);
66
#endif /*USE_AMD64_ASM*/
67
68
#ifdef USE_AESNI
69
/* AES-NI (AMD64 & i386) accelerated implementations of AES */
70
extern void _gcry_aes_aesni_do_setkey(RIJNDAEL_context *ctx, const byte *key);
71
extern void _gcry_aes_aesni_prepare_decryption(RIJNDAEL_context *ctx);
72
73
extern unsigned int _gcry_aes_aesni_encrypt (const RIJNDAEL_context *ctx,
74
                                             unsigned char *dst,
75
                                             const unsigned char *src);
76
extern unsigned int _gcry_aes_aesni_decrypt (const RIJNDAEL_context *ctx,
77
                                             unsigned char *dst,
78
                                             const unsigned char *src);
79
extern void _gcry_aes_aesni_cfb_enc (void *context, unsigned char *iv,
80
                                     void *outbuf_arg, const void *inbuf_arg,
81
                                     size_t nblocks);
82
extern void _gcry_aes_aesni_cbc_enc (void *context, unsigned char *iv,
83
                                     void *outbuf_arg, const void *inbuf_arg,
84
                                     size_t nblocks, int cbc_mac);
85
extern void _gcry_aes_aesni_ctr_enc (void *context, unsigned char *ctr,
86
                                     void *outbuf_arg, const void *inbuf_arg,
87
                                     size_t nblocks);
88
extern void _gcry_aes_aesni_ctr32le_enc (void *context, unsigned char *ctr,
89
           void *outbuf_arg,
90
           const void *inbuf_arg, size_t nblocks);
91
extern void _gcry_aes_aesni_cfb_dec (void *context, unsigned char *iv,
92
                                     void *outbuf_arg, const void *inbuf_arg,
93
                                     size_t nblocks);
94
extern void _gcry_aes_aesni_cbc_dec (void *context, unsigned char *iv,
95
                                     void *outbuf_arg, const void *inbuf_arg,
96
                                     size_t nblocks);
97
extern size_t _gcry_aes_aesni_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg,
98
                                         const void *inbuf_arg, size_t nblocks,
99
                                         int encrypt);
100
extern size_t _gcry_aes_aesni_ocb_auth (gcry_cipher_hd_t c, const void *abuf_arg,
101
                                        size_t nblocks);
102
extern void _gcry_aes_aesni_xts_crypt (void *context, unsigned char *tweak,
103
                                       void *outbuf_arg, const void *inbuf_arg,
104
                                       size_t nblocks, int encrypt);
105
extern void _gcry_aes_aesni_ecb_crypt (void *context, void *outbuf_arg,
106
               const void *inbuf_arg, size_t nblocks,
107
               int encrypt);
108
#endif
109
110
#ifdef USE_VAES
111
/* VAES (AMD64) accelerated implementation of AES */
112
113
extern void _gcry_aes_vaes_cfb_dec (void *context, unsigned char *iv,
114
            void *outbuf_arg, const void *inbuf_arg,
115
            size_t nblocks);
116
extern void _gcry_aes_vaes_cbc_dec (void *context, unsigned char *iv,
117
            void *outbuf_arg, const void *inbuf_arg,
118
            size_t nblocks);
119
extern void _gcry_aes_vaes_ctr_enc (void *context, unsigned char *ctr,
120
            void *outbuf_arg, const void *inbuf_arg,
121
            size_t nblocks);
122
extern void _gcry_aes_vaes_ctr32le_enc (void *context, unsigned char *ctr,
123
          void *outbuf_arg, const void *inbuf_arg,
124
          size_t nblocks);
125
extern size_t _gcry_aes_vaes_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg,
126
          const void *inbuf_arg, size_t nblocks,
127
          int encrypt);
128
extern void _gcry_aes_vaes_xts_crypt (void *context, unsigned char *tweak,
129
              void *outbuf_arg, const void *inbuf_arg,
130
              size_t nblocks, int encrypt);
131
extern void _gcry_aes_vaes_ecb_crypt (void *context, void *outbuf_arg,
132
              const void *inbuf_arg, size_t nblocks,
133
              int encrypt);
134
#endif
135
136
#ifdef USE_SSSE3
137
/* SSSE3 (AMD64) vector permutation implementation of AES */
138
extern void _gcry_aes_ssse3_do_setkey(RIJNDAEL_context *ctx, const byte *key);
139
extern void _gcry_aes_ssse3_prepare_decryption(RIJNDAEL_context *ctx);
140
141
extern unsigned int _gcry_aes_ssse3_encrypt (const RIJNDAEL_context *ctx,
142
                                             unsigned char *dst,
143
                                             const unsigned char *src);
144
extern unsigned int _gcry_aes_ssse3_decrypt (const RIJNDAEL_context *ctx,
145
                                             unsigned char *dst,
146
                                             const unsigned char *src);
147
extern void _gcry_aes_ssse3_cfb_enc (void *context, unsigned char *iv,
148
                                     void *outbuf_arg, const void *inbuf_arg,
149
                                     size_t nblocks);
150
extern void _gcry_aes_ssse3_cbc_enc (void *context, unsigned char *iv,
151
                                     void *outbuf_arg, const void *inbuf_arg,
152
                                     size_t nblocks,
153
                                     int cbc_mac);
154
extern void _gcry_aes_ssse3_ctr_enc (void *context, unsigned char *ctr,
155
                                     void *outbuf_arg, const void *inbuf_arg,
156
                                     size_t nblocks);
157
extern void _gcry_aes_ssse3_cfb_dec (void *context, unsigned char *iv,
158
                                     void *outbuf_arg, const void *inbuf_arg,
159
                                     size_t nblocks);
160
extern void _gcry_aes_ssse3_cbc_dec (void *context, unsigned char *iv,
161
                                     void *outbuf_arg, const void *inbuf_arg,
162
                                     size_t nblocks);
163
extern size_t _gcry_aes_ssse3_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg,
164
                                         const void *inbuf_arg, size_t nblocks,
165
                                         int encrypt);
166
extern size_t _gcry_aes_ssse3_ocb_auth (gcry_cipher_hd_t c, const void *abuf_arg,
167
                                        size_t nblocks);
168
#endif
169
170
#ifdef USE_PADLOCK
171
extern unsigned int _gcry_aes_padlock_encrypt (const RIJNDAEL_context *ctx,
172
                                               unsigned char *bx,
173
                                               const unsigned char *ax);
174
extern unsigned int _gcry_aes_padlock_decrypt (const RIJNDAEL_context *ctx,
175
                                               unsigned char *bx,
176
                                               const unsigned char *ax);
177
extern void _gcry_aes_padlock_prepare_decryption (RIJNDAEL_context *ctx);
178
#endif
179
180
#ifdef USE_ARM_ASM
181
/* ARM assembly implementations of AES */
182
extern unsigned int _gcry_aes_arm_encrypt_block(const void *keysched_enc,
183
                                                unsigned char *out,
184
                                                const unsigned char *in,
185
                                                int rounds,
186
                                                const void *encT);
187
188
extern unsigned int _gcry_aes_arm_decrypt_block(const void *keysched_dec,
189
                                                unsigned char *out,
190
                                                const unsigned char *in,
191
                                                int rounds,
192
                                                const void *decT);
193
#endif /*USE_ARM_ASM*/
194
195
#ifdef USE_ARM_CE
196
/* ARMv8 Crypto Extension implementations of AES */
197
extern void _gcry_aes_armv8_ce_setkey(RIJNDAEL_context *ctx, const byte *key);
198
extern void _gcry_aes_armv8_ce_prepare_decryption(RIJNDAEL_context *ctx);
199
200
extern unsigned int _gcry_aes_armv8_ce_encrypt(const RIJNDAEL_context *ctx,
201
                                               unsigned char *dst,
202
                                               const unsigned char *src);
203
extern unsigned int _gcry_aes_armv8_ce_decrypt(const RIJNDAEL_context *ctx,
204
                                               unsigned char *dst,
205
                                               const unsigned char *src);
206
207
extern void _gcry_aes_armv8_ce_cfb_enc (void *context, unsigned char *iv,
208
                                        void *outbuf_arg, const void *inbuf_arg,
209
                                        size_t nblocks);
210
extern void _gcry_aes_armv8_ce_cbc_enc (void *context, unsigned char *iv,
211
                                        void *outbuf_arg, const void *inbuf_arg,
212
                                        size_t nblocks,
213
                                        int cbc_mac);
214
extern void _gcry_aes_armv8_ce_ctr_enc (void *context, unsigned char *ctr,
215
                                        void *outbuf_arg, const void *inbuf_arg,
216
                                        size_t nblocks);
217
extern void _gcry_aes_armv8_ce_ctr32le_enc (void *context, unsigned char *ctr,
218
                                            void *outbuf_arg,
219
                                            const void *inbuf_arg,
220
                                            size_t nblocks);
221
extern void _gcry_aes_armv8_ce_cfb_dec (void *context, unsigned char *iv,
222
                                        void *outbuf_arg, const void *inbuf_arg,
223
                                        size_t nblocks);
224
extern void _gcry_aes_armv8_ce_cbc_dec (void *context, unsigned char *iv,
225
                                        void *outbuf_arg, const void *inbuf_arg,
226
                                        size_t nblocks);
227
extern size_t _gcry_aes_armv8_ce_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg,
228
                                            const void *inbuf_arg, size_t nblocks,
229
                                            int encrypt);
230
extern size_t _gcry_aes_armv8_ce_ocb_auth (gcry_cipher_hd_t c,
231
                                           const void *abuf_arg, size_t nblocks);
232
extern void _gcry_aes_armv8_ce_xts_crypt (void *context, unsigned char *tweak,
233
                                          void *outbuf_arg,
234
                                          const void *inbuf_arg,
235
                                          size_t nblocks, int encrypt);
236
extern void _gcry_aes_armv8_ce_ecb_crypt (void *context, void *outbuf_arg,
237
                                          const void *inbuf_arg, size_t nblocks,
238
                                          int encrypt);
239
#endif /*USE_ARM_ASM*/
240
241
#ifdef USE_PPC_CRYPTO
242
/* PowerPC Crypto implementations of AES */
243
extern void _gcry_aes_ppc8_setkey(RIJNDAEL_context *ctx, const byte *key);
244
extern void _gcry_aes_ppc8_prepare_decryption(RIJNDAEL_context *ctx);
245
246
extern unsigned int _gcry_aes_ppc8_encrypt(const RIJNDAEL_context *ctx,
247
             unsigned char *dst,
248
             const unsigned char *src);
249
extern unsigned int _gcry_aes_ppc8_decrypt(const RIJNDAEL_context *ctx,
250
             unsigned char *dst,
251
             const unsigned char *src);
252
253
extern void _gcry_aes_ppc8_cfb_enc (void *context, unsigned char *iv,
254
            void *outbuf_arg, const void *inbuf_arg,
255
            size_t nblocks);
256
extern void _gcry_aes_ppc8_cbc_enc (void *context, unsigned char *iv,
257
            void *outbuf_arg, const void *inbuf_arg,
258
            size_t nblocks, int cbc_mac);
259
extern void _gcry_aes_ppc8_ctr_enc (void *context, unsigned char *ctr,
260
            void *outbuf_arg, const void *inbuf_arg,
261
            size_t nblocks);
262
extern void _gcry_aes_ppc8_cfb_dec (void *context, unsigned char *iv,
263
            void *outbuf_arg, const void *inbuf_arg,
264
            size_t nblocks);
265
extern void _gcry_aes_ppc8_cbc_dec (void *context, unsigned char *iv,
266
            void *outbuf_arg, const void *inbuf_arg,
267
            size_t nblocks);
268
269
extern size_t _gcry_aes_ppc8_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg,
270
          const void *inbuf_arg, size_t nblocks,
271
          int encrypt);
272
extern size_t _gcry_aes_ppc8_ocb_auth (gcry_cipher_hd_t c,
273
               const void *abuf_arg, size_t nblocks);
274
275
extern void _gcry_aes_ppc8_xts_crypt (void *context, unsigned char *tweak,
276
              void *outbuf_arg,
277
              const void *inbuf_arg,
278
              size_t nblocks, int encrypt);
279
#endif /*USE_PPC_CRYPTO*/
280
281
#ifdef USE_PPC_CRYPTO_WITH_PPC9LE
282
/* Power9 little-endian crypto implementations of AES */
283
extern unsigned int _gcry_aes_ppc9le_encrypt(const RIJNDAEL_context *ctx,
284
              unsigned char *dst,
285
              const unsigned char *src);
286
extern unsigned int _gcry_aes_ppc9le_decrypt(const RIJNDAEL_context *ctx,
287
              unsigned char *dst,
288
              const unsigned char *src);
289
290
extern void _gcry_aes_ppc9le_cfb_enc (void *context, unsigned char *iv,
291
              void *outbuf_arg, const void *inbuf_arg,
292
              size_t nblocks);
293
extern void _gcry_aes_ppc9le_cbc_enc (void *context, unsigned char *iv,
294
              void *outbuf_arg, const void *inbuf_arg,
295
              size_t nblocks, int cbc_mac);
296
extern void _gcry_aes_ppc9le_ctr_enc (void *context, unsigned char *ctr,
297
              void *outbuf_arg, const void *inbuf_arg,
298
              size_t nblocks);
299
extern void _gcry_aes_ppc9le_cfb_dec (void *context, unsigned char *iv,
300
              void *outbuf_arg, const void *inbuf_arg,
301
              size_t nblocks);
302
extern void _gcry_aes_ppc9le_cbc_dec (void *context, unsigned char *iv,
303
              void *outbuf_arg, const void *inbuf_arg,
304
              size_t nblocks);
305
306
extern size_t _gcry_aes_ppc9le_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg,
307
            const void *inbuf_arg, size_t nblocks,
308
            int encrypt);
309
extern size_t _gcry_aes_ppc9le_ocb_auth (gcry_cipher_hd_t c,
310
          const void *abuf_arg, size_t nblocks);
311
312
extern void _gcry_aes_ppc9le_xts_crypt (void *context, unsigned char *tweak,
313
          void *outbuf_arg,
314
          const void *inbuf_arg,
315
          size_t nblocks, int encrypt);
316
317
extern size_t _gcry_aes_p10le_gcm_crypt (gcry_cipher_hd_t c, void *outbuf_arg,
318
           const void *inbuf_arg,
319
           size_t nblocks, int encrypt);
320
#endif /*USE_PPC_CRYPTO_WITH_PPC9LE*/
321
322
#ifdef USE_S390X_CRYPTO
323
/* zSeries crypto implementations of AES */
324
extern int _gcry_aes_s390x_setup_acceleration(RIJNDAEL_context *ctx,
325
                unsigned int keylen,
326
                unsigned int hwfeatures,
327
                cipher_bulk_ops_t *bulk_ops);
328
extern void _gcry_aes_s390x_setkey(RIJNDAEL_context *ctx, const byte *key);
329
extern void _gcry_aes_s390x_prepare_decryption(RIJNDAEL_context *ctx);
330
331
extern unsigned int _gcry_aes_s390x_encrypt(const RIJNDAEL_context *ctx,
332
              unsigned char *dst,
333
              const unsigned char *src);
334
extern unsigned int _gcry_aes_s390x_decrypt(const RIJNDAEL_context *ctx,
335
              unsigned char *dst,
336
              const unsigned char *src);
337
338
#endif /*USE_S390X_CRYPTO*/
339
340
static unsigned int do_encrypt (const RIJNDAEL_context *ctx, unsigned char *bx,
341
                                const unsigned char *ax);
342
static unsigned int do_decrypt (const RIJNDAEL_context *ctx, unsigned char *bx,
343
                                const unsigned char *ax);
344
345
static void _gcry_aes_cfb_enc (void *context, unsigned char *iv,
346
             void *outbuf, const void *inbuf,
347
             size_t nblocks);
348
static void _gcry_aes_cfb_dec (void *context, unsigned char *iv,
349
             void *outbuf_arg, const void *inbuf_arg,
350
             size_t nblocks);
351
static void _gcry_aes_cbc_enc (void *context, unsigned char *iv,
352
             void *outbuf_arg, const void *inbuf_arg,
353
             size_t nblocks, int cbc_mac);
354
static void _gcry_aes_cbc_dec (void *context, unsigned char *iv,
355
             void *outbuf_arg, const void *inbuf_arg,
356
             size_t nblocks);
357
static void _gcry_aes_ctr_enc (void *context, unsigned char *ctr,
358
             void *outbuf_arg, const void *inbuf_arg,
359
             size_t nblocks);
360
static size_t _gcry_aes_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg,
361
           const void *inbuf_arg, size_t nblocks,
362
           int encrypt);
363
static size_t _gcry_aes_ocb_auth (gcry_cipher_hd_t c, const void *abuf_arg,
364
          size_t nblocks);
365
static void _gcry_aes_xts_crypt (void *context, unsigned char *tweak,
366
         void *outbuf_arg, const void *inbuf_arg,
367
         size_t nblocks, int encrypt);
368

369
370
/* All the numbers.  */
371
#include "rijndael-tables.h"
372
373
374

375
376
/* Function prototypes.  */
377
static const char *selftest(void);
378
static void prepare_decryption(RIJNDAEL_context *ctx);
379
380
381

382
/* Prefetching for encryption/decryption tables. */
383
static inline void prefetch_table(const volatile byte *tab, size_t len)
384
0
{
385
0
  size_t i;
386
387
0
  for (i = 0; len - i >= 8 * 32; i += 8 * 32)
388
0
    {
389
0
      (void)tab[i + 0 * 32];
390
0
      (void)tab[i + 1 * 32];
391
0
      (void)tab[i + 2 * 32];
392
0
      (void)tab[i + 3 * 32];
393
0
      (void)tab[i + 4 * 32];
394
0
      (void)tab[i + 5 * 32];
395
0
      (void)tab[i + 6 * 32];
396
0
      (void)tab[i + 7 * 32];
397
0
    }
398
0
  for (; i < len; i += 32)
399
0
    {
400
0
      (void)tab[i];
401
0
    }
402
403
0
  (void)tab[len - 1];
404
0
}
405
406
static void prefetch_enc(void)
407
0
{
408
  /* Modify counters to trigger copy-on-write and unsharing if physical pages
409
   * of look-up table are shared between processes.  Modifying counters also
410
   * causes checksums for pages to change and hint same-page merging algorithm
411
   * that these pages are frequently changing.  */
412
0
  enc_tables.counter_head++;
413
0
  enc_tables.counter_tail++;
414
415
  /* Prefetch look-up tables to cache.  */
416
0
  prefetch_table((const void *)&enc_tables, sizeof(enc_tables));
417
0
}
418
419
static void prefetch_dec(void)
420
0
{
421
  /* Modify counters to trigger copy-on-write and unsharing if physical pages
422
   * of look-up table are shared between processes.  Modifying counters also
423
   * causes checksums for pages to change and hint same-page merging algorithm
424
   * that these pages are frequently changing.  */
425
0
  dec_tables.counter_head++;
426
0
  dec_tables.counter_tail++;
427
428
  /* Prefetch look-up tables to cache.  */
429
0
  prefetch_table((const void *)&dec_tables, sizeof(dec_tables));
430
0
}
431
432
433

434
static inline u32
435
sbox4(u32 inb4)
436
0
{
437
0
  u32 out;
438
0
  out =  (encT[(inb4 >> 0) & 0xffU] & 0xff00U) >> 8;
439
0
  out |= (encT[(inb4 >> 8) & 0xffU] & 0xff00U) >> 0;
440
0
  out |= (encT[(inb4 >> 16) & 0xffU] & 0xff0000U) << 0;
441
0
  out |= (encT[(inb4 >> 24) & 0xffU] & 0xff0000U) << 8;
442
0
  return out;
443
0
}
444
445
/* Perform the key setup.  */
446
static gcry_err_code_t
447
do_setkey (RIJNDAEL_context *ctx, const byte *key, const unsigned keylen,
448
           cipher_bulk_ops_t *bulk_ops)
449
0
{
450
0
  static int initialized = 0;
451
0
  static const char *selftest_failed = 0;
452
0
  void (*hw_setkey)(RIJNDAEL_context *ctx, const byte *key) = NULL;
453
0
  int rounds;
454
0
  unsigned int KC;
455
0
  unsigned int hwfeatures;
456
457
  /* The on-the-fly self tests are only run in non-fips mode. In fips
458
     mode explicit self-tests are required.  Actually the on-the-fly
459
     self-tests are not fully thread-safe and it might happen that a
460
     failed self-test won't get noticed in another thread.
461
462
     FIXME: We might want to have a central registry of succeeded
463
     self-tests. */
464
0
  if (!fips_mode () && !initialized)
465
0
    {
466
0
      initialized = 1;
467
0
      selftest_failed = selftest ();
468
0
      if (selftest_failed)
469
0
        log_error ("%s\n", selftest_failed );
470
0
    }
471
0
  if (selftest_failed)
472
0
    return GPG_ERR_SELFTEST_FAILED;
473
474
0
  if( keylen == 128/8 )
475
0
    {
476
0
      rounds = 10;
477
0
      KC = 4;
478
0
    }
479
0
  else if ( keylen == 192/8 )
480
0
    {
481
0
      rounds = 12;
482
0
      KC = 6;
483
0
    }
484
0
  else if ( keylen == 256/8 )
485
0
    {
486
0
      rounds = 14;
487
0
      KC = 8;
488
0
    }
489
0
  else
490
0
    return GPG_ERR_INV_KEYLEN;
491
492
0
  ctx->rounds = rounds;
493
0
  hwfeatures = _gcry_get_hw_features ();
494
495
0
  ctx->decryption_prepared = 0;
496
497
  /* Setup default bulk encryption routines.  */
498
0
  memset (bulk_ops, 0, sizeof(*bulk_ops));
499
0
  bulk_ops->cfb_enc = _gcry_aes_cfb_enc;
500
0
  bulk_ops->cfb_dec = _gcry_aes_cfb_dec;
501
0
  bulk_ops->cbc_enc = _gcry_aes_cbc_enc;
502
0
  bulk_ops->cbc_dec = _gcry_aes_cbc_dec;
503
0
  bulk_ops->ctr_enc = _gcry_aes_ctr_enc;
504
0
  bulk_ops->ocb_crypt = _gcry_aes_ocb_crypt;
505
0
  bulk_ops->ocb_auth  = _gcry_aes_ocb_auth;
506
0
  bulk_ops->xts_crypt = _gcry_aes_xts_crypt;
507
508
0
  (void)hwfeatures;
509
510
0
  if (0)
511
0
    {
512
0
      ;
513
0
    }
514
0
#ifdef USE_AESNI
515
0
  else if (hwfeatures & HWF_INTEL_AESNI)
516
0
    {
517
0
      hw_setkey = _gcry_aes_aesni_do_setkey;
518
0
      ctx->encrypt_fn = _gcry_aes_aesni_encrypt;
519
0
      ctx->decrypt_fn = _gcry_aes_aesni_decrypt;
520
0
      ctx->prefetch_enc_fn = NULL;
521
0
      ctx->prefetch_dec_fn = NULL;
522
0
      ctx->prepare_decryption = _gcry_aes_aesni_prepare_decryption;
523
0
      ctx->use_avx = !!(hwfeatures & HWF_INTEL_AVX);
524
0
      ctx->use_avx2 = !!(hwfeatures & HWF_INTEL_AVX2);
525
526
      /* Setup AES-NI bulk encryption routines.  */
527
0
      bulk_ops->cfb_enc = _gcry_aes_aesni_cfb_enc;
528
0
      bulk_ops->cfb_dec = _gcry_aes_aesni_cfb_dec;
529
0
      bulk_ops->cbc_enc = _gcry_aes_aesni_cbc_enc;
530
0
      bulk_ops->cbc_dec = _gcry_aes_aesni_cbc_dec;
531
0
      bulk_ops->ctr_enc = _gcry_aes_aesni_ctr_enc;
532
0
      bulk_ops->ctr32le_enc = _gcry_aes_aesni_ctr32le_enc;
533
0
      bulk_ops->ocb_crypt = _gcry_aes_aesni_ocb_crypt;
534
0
      bulk_ops->ocb_auth = _gcry_aes_aesni_ocb_auth;
535
0
      bulk_ops->xts_crypt = _gcry_aes_aesni_xts_crypt;
536
0
      bulk_ops->ecb_crypt = _gcry_aes_aesni_ecb_crypt;
537
538
0
#ifdef USE_VAES
539
0
      if ((hwfeatures & HWF_INTEL_VAES_VPCLMUL) &&
540
0
    (hwfeatures & HWF_INTEL_AVX2))
541
0
  {
542
    /* Setup VAES bulk encryption routines.  */
543
0
    bulk_ops->cfb_dec = _gcry_aes_vaes_cfb_dec;
544
0
    bulk_ops->cbc_dec = _gcry_aes_vaes_cbc_dec;
545
0
    bulk_ops->ctr_enc = _gcry_aes_vaes_ctr_enc;
546
0
    bulk_ops->ctr32le_enc = _gcry_aes_vaes_ctr32le_enc;
547
0
    bulk_ops->ocb_crypt = _gcry_aes_vaes_ocb_crypt;
548
0
    bulk_ops->xts_crypt = _gcry_aes_vaes_xts_crypt;
549
0
    bulk_ops->ecb_crypt = _gcry_aes_vaes_ecb_crypt;
550
0
  }
551
0
#endif
552
0
    }
553
0
#endif
554
0
#ifdef USE_PADLOCK
555
0
  else if ((hwfeatures & HWF_PADLOCK_AES) && keylen == 128/8)
556
0
    {
557
0
      ctx->encrypt_fn = _gcry_aes_padlock_encrypt;
558
0
      ctx->decrypt_fn = _gcry_aes_padlock_decrypt;
559
0
      ctx->prefetch_enc_fn = NULL;
560
0
      ctx->prefetch_dec_fn = NULL;
561
0
      ctx->prepare_decryption = _gcry_aes_padlock_prepare_decryption;
562
0
      memcpy (ctx->padlockkey, key, keylen);
563
0
    }
564
0
#endif
565
0
#ifdef USE_SSSE3
566
0
  else if (hwfeatures & HWF_INTEL_SSSE3)
567
0
    {
568
0
      hw_setkey = _gcry_aes_ssse3_do_setkey;
569
0
      ctx->encrypt_fn = _gcry_aes_ssse3_encrypt;
570
0
      ctx->decrypt_fn = _gcry_aes_ssse3_decrypt;
571
0
      ctx->prefetch_enc_fn = NULL;
572
0
      ctx->prefetch_dec_fn = NULL;
573
0
      ctx->prepare_decryption = _gcry_aes_ssse3_prepare_decryption;
574
575
      /* Setup SSSE3 bulk encryption routines.  */
576
0
      bulk_ops->cfb_enc = _gcry_aes_ssse3_cfb_enc;
577
0
      bulk_ops->cfb_dec = _gcry_aes_ssse3_cfb_dec;
578
0
      bulk_ops->cbc_enc = _gcry_aes_ssse3_cbc_enc;
579
0
      bulk_ops->cbc_dec = _gcry_aes_ssse3_cbc_dec;
580
0
      bulk_ops->ctr_enc = _gcry_aes_ssse3_ctr_enc;
581
0
      bulk_ops->ocb_crypt = _gcry_aes_ssse3_ocb_crypt;
582
0
      bulk_ops->ocb_auth = _gcry_aes_ssse3_ocb_auth;
583
0
    }
584
0
#endif
585
#ifdef USE_ARM_CE
586
  else if (hwfeatures & HWF_ARM_AES)
587
    {
588
      hw_setkey = _gcry_aes_armv8_ce_setkey;
589
      ctx->encrypt_fn = _gcry_aes_armv8_ce_encrypt;
590
      ctx->decrypt_fn = _gcry_aes_armv8_ce_decrypt;
591
      ctx->prefetch_enc_fn = NULL;
592
      ctx->prefetch_dec_fn = NULL;
593
      ctx->prepare_decryption = _gcry_aes_armv8_ce_prepare_decryption;
594
595
      /* Setup ARM-CE bulk encryption routines.  */
596
      bulk_ops->cfb_enc = _gcry_aes_armv8_ce_cfb_enc;
597
      bulk_ops->cfb_dec = _gcry_aes_armv8_ce_cfb_dec;
598
      bulk_ops->cbc_enc = _gcry_aes_armv8_ce_cbc_enc;
599
      bulk_ops->cbc_dec = _gcry_aes_armv8_ce_cbc_dec;
600
      bulk_ops->ctr_enc = _gcry_aes_armv8_ce_ctr_enc;
601
      bulk_ops->ctr32le_enc = _gcry_aes_armv8_ce_ctr32le_enc;
602
      bulk_ops->ocb_crypt = _gcry_aes_armv8_ce_ocb_crypt;
603
      bulk_ops->ocb_auth = _gcry_aes_armv8_ce_ocb_auth;
604
      bulk_ops->xts_crypt = _gcry_aes_armv8_ce_xts_crypt;
605
      bulk_ops->ecb_crypt = _gcry_aes_armv8_ce_ecb_crypt;
606
    }
607
#endif
608
#ifdef USE_PPC_CRYPTO_WITH_PPC9LE
609
  else if ((hwfeatures & HWF_PPC_VCRYPTO) && (hwfeatures & HWF_PPC_ARCH_3_00))
610
    {
611
      hw_setkey = _gcry_aes_ppc8_setkey;
612
      ctx->encrypt_fn = _gcry_aes_ppc9le_encrypt;
613
      ctx->decrypt_fn = _gcry_aes_ppc9le_decrypt;
614
      ctx->prefetch_enc_fn = NULL;
615
      ctx->prefetch_dec_fn = NULL;
616
      ctx->prepare_decryption = _gcry_aes_ppc8_prepare_decryption;
617
618
      /* Setup PPC9LE bulk encryption routines.  */
619
      bulk_ops->cfb_enc = _gcry_aes_ppc9le_cfb_enc;
620
      bulk_ops->cfb_dec = _gcry_aes_ppc9le_cfb_dec;
621
      bulk_ops->cbc_enc = _gcry_aes_ppc9le_cbc_enc;
622
      bulk_ops->cbc_dec = _gcry_aes_ppc9le_cbc_dec;
623
      bulk_ops->ctr_enc = _gcry_aes_ppc9le_ctr_enc;
624
      bulk_ops->ocb_crypt = _gcry_aes_ppc9le_ocb_crypt;
625
      bulk_ops->ocb_auth = _gcry_aes_ppc9le_ocb_auth;
626
      bulk_ops->xts_crypt = _gcry_aes_ppc9le_xts_crypt;
627
      if (hwfeatures & HWF_PPC_ARCH_3_10)  /* for P10 */
628
        bulk_ops->gcm_crypt = _gcry_aes_p10le_gcm_crypt;
629
# ifdef ENABLE_FORCE_SOFT_HWFEATURES
630
      /* HWF_PPC_ARCH_3_10 above is used as soft HW-feature indicator for P10.
631
       * Actual implementation works with HWF_PPC_ARCH_3_00 also. */
632
      if (hwfeatures & HWF_PPC_ARCH_3_00)
633
        bulk_ops->gcm_crypt = _gcry_aes_p10le_gcm_crypt;
634
# endif
635
    }
636
#endif
637
#ifdef USE_PPC_CRYPTO
638
  else if (hwfeatures & HWF_PPC_VCRYPTO)
639
    {
640
      hw_setkey = _gcry_aes_ppc8_setkey;
641
      ctx->encrypt_fn = _gcry_aes_ppc8_encrypt;
642
      ctx->decrypt_fn = _gcry_aes_ppc8_decrypt;
643
      ctx->prefetch_enc_fn = NULL;
644
      ctx->prefetch_dec_fn = NULL;
645
      ctx->prepare_decryption = _gcry_aes_ppc8_prepare_decryption;
646
647
      /* Setup PPC8 bulk encryption routines.  */
648
      bulk_ops->cfb_enc = _gcry_aes_ppc8_cfb_enc;
649
      bulk_ops->cfb_dec = _gcry_aes_ppc8_cfb_dec;
650
      bulk_ops->cbc_enc = _gcry_aes_ppc8_cbc_enc;
651
      bulk_ops->cbc_dec = _gcry_aes_ppc8_cbc_dec;
652
      bulk_ops->ctr_enc = _gcry_aes_ppc8_ctr_enc;
653
      bulk_ops->ocb_crypt = _gcry_aes_ppc8_ocb_crypt;
654
      bulk_ops->ocb_auth = _gcry_aes_ppc8_ocb_auth;
655
      bulk_ops->xts_crypt = _gcry_aes_ppc8_xts_crypt;
656
    }
657
#endif
658
#ifdef USE_S390X_CRYPTO
659
  else if (_gcry_aes_s390x_setup_acceleration (ctx, keylen, hwfeatures,
660
                 bulk_ops))
661
  {
662
      hw_setkey = _gcry_aes_s390x_setkey;
663
      ctx->encrypt_fn = _gcry_aes_s390x_encrypt;
664
      ctx->decrypt_fn = _gcry_aes_s390x_decrypt;
665
      ctx->prefetch_enc_fn = NULL;
666
      ctx->prefetch_dec_fn = NULL;
667
      ctx->prepare_decryption = _gcry_aes_s390x_prepare_decryption;
668
    }
669
#endif
670
0
  else
671
0
    {
672
0
      ctx->encrypt_fn = do_encrypt;
673
0
      ctx->decrypt_fn = do_decrypt;
674
0
      ctx->prefetch_enc_fn = prefetch_enc;
675
0
      ctx->prefetch_dec_fn = prefetch_dec;
676
0
      ctx->prepare_decryption = prepare_decryption;
677
0
    }
678
679
  /* NB: We don't yet support Padlock hardware key generation.  */
680
681
0
  if (hw_setkey)
682
0
    {
683
0
      hw_setkey (ctx, key);
684
0
    }
685
0
  else
686
0
    {
687
0
      u32 W_prev;
688
0
      u32 *W_u32 = ctx->keyschenc32b;
689
0
      byte rcon = 1;
690
0
      unsigned int i, j;
691
692
0
      prefetch_enc();
693
694
0
      for (i = 0; i < KC; i += 2)
695
0
        {
696
0
          W_u32[i + 0] = buf_get_le32(key + i * 4 + 0);
697
0
          W_u32[i + 1] = buf_get_le32(key + i * 4 + 4);
698
0
        }
699
700
0
      for (i = KC, j = KC, W_prev = W_u32[KC - 1];
701
0
           i < 4 * (rounds + 1);
702
0
           i += 2, j += 2)
703
0
        {
704
0
          u32 temp0 = W_prev;
705
0
          u32 temp1;
706
707
0
          if (j == KC)
708
0
            {
709
0
              j = 0;
710
0
              temp0 = sbox4(rol(temp0, 24)) ^ rcon;
711
0
              rcon = ((rcon << 1) ^ (-(rcon >> 7) & 0x1b)) & 0xff;
712
0
            }
713
0
          else if (KC == 8 && j == 4)
714
0
            {
715
0
              temp0 = sbox4(temp0);
716
0
            }
717
718
0
          temp1 = W_u32[i - KC + 0];
719
720
0
          W_u32[i + 0] = temp0 ^ temp1;
721
0
          W_u32[i + 1] = W_u32[i - KC + 1] ^ temp0 ^ temp1;
722
0
          W_prev = W_u32[i + 1];
723
0
        }
724
0
    }
725
726
0
  return 0;
727
0
}
728
729
730
static gcry_err_code_t
731
rijndael_setkey (void *context, const byte *key, const unsigned keylen,
732
                 cipher_bulk_ops_t *bulk_ops)
733
0
{
734
0
  RIJNDAEL_context *ctx = context;
735
0
  return do_setkey (ctx, key, keylen, bulk_ops);
736
0
}
737
738
739
/* Make a decryption key from an encryption key. */
740
static void
741
prepare_decryption( RIJNDAEL_context *ctx )
742
0
{
743
0
  const byte *sbox = ((const byte *)encT) + 1;
744
0
  int r;
745
746
0
  prefetch_enc();
747
0
  prefetch_dec();
748
749
0
  ctx->keyschdec32[0][0] = ctx->keyschenc32[0][0];
750
0
  ctx->keyschdec32[0][1] = ctx->keyschenc32[0][1];
751
0
  ctx->keyschdec32[0][2] = ctx->keyschenc32[0][2];
752
0
  ctx->keyschdec32[0][3] = ctx->keyschenc32[0][3];
753
754
0
  for (r = 1; r < ctx->rounds; r++)
755
0
    {
756
0
      u32 *wi = ctx->keyschenc32[r];
757
0
      u32 *wo = ctx->keyschdec32[r];
758
0
      u32 wt;
759
760
0
      wt = wi[0];
761
0
      wo[0] = rol(decT[sbox[(byte)(wt >> 0) * 4]], 8 * 0)
762
0
        ^ rol(decT[sbox[(byte)(wt >> 8) * 4]], 8 * 1)
763
0
        ^ rol(decT[sbox[(byte)(wt >> 16) * 4]], 8 * 2)
764
0
        ^ rol(decT[sbox[(byte)(wt >> 24) * 4]], 8 * 3);
765
766
0
      wt = wi[1];
767
0
      wo[1] = rol(decT[sbox[(byte)(wt >> 0) * 4]], 8 * 0)
768
0
        ^ rol(decT[sbox[(byte)(wt >> 8) * 4]], 8 * 1)
769
0
        ^ rol(decT[sbox[(byte)(wt >> 16) * 4]], 8 * 2)
770
0
        ^ rol(decT[sbox[(byte)(wt >> 24) * 4]], 8 * 3);
771
772
0
      wt = wi[2];
773
0
      wo[2] = rol(decT[sbox[(byte)(wt >> 0) * 4]], 8 * 0)
774
0
        ^ rol(decT[sbox[(byte)(wt >> 8) * 4]], 8 * 1)
775
0
        ^ rol(decT[sbox[(byte)(wt >> 16) * 4]], 8 * 2)
776
0
        ^ rol(decT[sbox[(byte)(wt >> 24) * 4]], 8 * 3);
777
778
0
      wt = wi[3];
779
0
      wo[3] = rol(decT[sbox[(byte)(wt >> 0) * 4]], 8 * 0)
780
0
        ^ rol(decT[sbox[(byte)(wt >> 8) * 4]], 8 * 1)
781
0
        ^ rol(decT[sbox[(byte)(wt >> 16) * 4]], 8 * 2)
782
0
        ^ rol(decT[sbox[(byte)(wt >> 24) * 4]], 8 * 3);
783
0
    }
784
785
0
  ctx->keyschdec32[r][0] = ctx->keyschenc32[r][0];
786
0
  ctx->keyschdec32[r][1] = ctx->keyschenc32[r][1];
787
0
  ctx->keyschdec32[r][2] = ctx->keyschenc32[r][2];
788
0
  ctx->keyschdec32[r][3] = ctx->keyschenc32[r][3];
789
0
}
790
791

792
#if !defined(USE_ARM_ASM) && !defined(USE_AMD64_ASM)
793
/* Encrypt one block. A and B may be the same. */
794
static unsigned int
795
do_encrypt_fn (const RIJNDAEL_context *ctx, unsigned char *b,
796
               const unsigned char *a)
797
{
798
#define rk (ctx->keyschenc32)
799
  const byte *sbox = ((const byte *)encT) + 1;
800
  int rounds = ctx->rounds;
801
  int r;
802
  u32 sa[4];
803
  u32 sb[4];
804
805
  sb[0] = buf_get_le32(a + 0);
806
  sb[1] = buf_get_le32(a + 4);
807
  sb[2] = buf_get_le32(a + 8);
808
  sb[3] = buf_get_le32(a + 12);
809
810
  sa[0] = sb[0] ^ rk[0][0];
811
  sa[1] = sb[1] ^ rk[0][1];
812
  sa[2] = sb[2] ^ rk[0][2];
813
  sa[3] = sb[3] ^ rk[0][3];
814
815
  sb[0] = rol(encT[(byte)(sa[0] >> (0 * 8))], (0 * 8));
816
  sb[3] = rol(encT[(byte)(sa[0] >> (1 * 8))], (1 * 8));
817
  sb[2] = rol(encT[(byte)(sa[0] >> (2 * 8))], (2 * 8));
818
  sb[1] = rol(encT[(byte)(sa[0] >> (3 * 8))], (3 * 8));
819
  sa[0] = rk[1][0] ^ sb[0];
820
821
  sb[1] ^= rol(encT[(byte)(sa[1] >> (0 * 8))], (0 * 8));
822
  sa[0] ^= rol(encT[(byte)(sa[1] >> (1 * 8))], (1 * 8));
823
  sb[3] ^= rol(encT[(byte)(sa[1] >> (2 * 8))], (2 * 8));
824
  sb[2] ^= rol(encT[(byte)(sa[1] >> (3 * 8))], (3 * 8));
825
  sa[1] = rk[1][1] ^ sb[1];
826
827
  sb[2] ^= rol(encT[(byte)(sa[2] >> (0 * 8))], (0 * 8));
828
  sa[1] ^= rol(encT[(byte)(sa[2] >> (1 * 8))], (1 * 8));
829
  sa[0] ^= rol(encT[(byte)(sa[2] >> (2 * 8))], (2 * 8));
830
  sb[3] ^= rol(encT[(byte)(sa[2] >> (3 * 8))], (3 * 8));
831
  sa[2] = rk[1][2] ^ sb[2];
832
833
  sb[3] ^= rol(encT[(byte)(sa[3] >> (0 * 8))], (0 * 8));
834
  sa[2] ^= rol(encT[(byte)(sa[3] >> (1 * 8))], (1 * 8));
835
  sa[1] ^= rol(encT[(byte)(sa[3] >> (2 * 8))], (2 * 8));
836
  sa[0] ^= rol(encT[(byte)(sa[3] >> (3 * 8))], (3 * 8));
837
  sa[3] = rk[1][3] ^ sb[3];
838
839
  for (r = 2; r < rounds; r++)
840
    {
841
      sb[0] = rol(encT[(byte)(sa[0] >> (0 * 8))], (0 * 8));
842
      sb[3] = rol(encT[(byte)(sa[0] >> (1 * 8))], (1 * 8));
843
      sb[2] = rol(encT[(byte)(sa[0] >> (2 * 8))], (2 * 8));
844
      sb[1] = rol(encT[(byte)(sa[0] >> (3 * 8))], (3 * 8));
845
      sa[0] = rk[r][0] ^ sb[0];
846
847
      sb[1] ^= rol(encT[(byte)(sa[1] >> (0 * 8))], (0 * 8));
848
      sa[0] ^= rol(encT[(byte)(sa[1] >> (1 * 8))], (1 * 8));
849
      sb[3] ^= rol(encT[(byte)(sa[1] >> (2 * 8))], (2 * 8));
850
      sb[2] ^= rol(encT[(byte)(sa[1] >> (3 * 8))], (3 * 8));
851
      sa[1] = rk[r][1] ^ sb[1];
852
853
      sb[2] ^= rol(encT[(byte)(sa[2] >> (0 * 8))], (0 * 8));
854
      sa[1] ^= rol(encT[(byte)(sa[2] >> (1 * 8))], (1 * 8));
855
      sa[0] ^= rol(encT[(byte)(sa[2] >> (2 * 8))], (2 * 8));
856
      sb[3] ^= rol(encT[(byte)(sa[2] >> (3 * 8))], (3 * 8));
857
      sa[2] = rk[r][2] ^ sb[2];
858
859
      sb[3] ^= rol(encT[(byte)(sa[3] >> (0 * 8))], (0 * 8));
860
      sa[2] ^= rol(encT[(byte)(sa[3] >> (1 * 8))], (1 * 8));
861
      sa[1] ^= rol(encT[(byte)(sa[3] >> (2 * 8))], (2 * 8));
862
      sa[0] ^= rol(encT[(byte)(sa[3] >> (3 * 8))], (3 * 8));
863
      sa[3] = rk[r][3] ^ sb[3];
864
865
      r++;
866
867
      sb[0] = rol(encT[(byte)(sa[0] >> (0 * 8))], (0 * 8));
868
      sb[3] = rol(encT[(byte)(sa[0] >> (1 * 8))], (1 * 8));
869
      sb[2] = rol(encT[(byte)(sa[0] >> (2 * 8))], (2 * 8));
870
      sb[1] = rol(encT[(byte)(sa[0] >> (3 * 8))], (3 * 8));
871
      sa[0] = rk[r][0] ^ sb[0];
872
873
      sb[1] ^= rol(encT[(byte)(sa[1] >> (0 * 8))], (0 * 8));
874
      sa[0] ^= rol(encT[(byte)(sa[1] >> (1 * 8))], (1 * 8));
875
      sb[3] ^= rol(encT[(byte)(sa[1] >> (2 * 8))], (2 * 8));
876
      sb[2] ^= rol(encT[(byte)(sa[1] >> (3 * 8))], (3 * 8));
877
      sa[1] = rk[r][1] ^ sb[1];
878
879
      sb[2] ^= rol(encT[(byte)(sa[2] >> (0 * 8))], (0 * 8));
880
      sa[1] ^= rol(encT[(byte)(sa[2] >> (1 * 8))], (1 * 8));
881
      sa[0] ^= rol(encT[(byte)(sa[2] >> (2 * 8))], (2 * 8));
882
      sb[3] ^= rol(encT[(byte)(sa[2] >> (3 * 8))], (3 * 8));
883
      sa[2] = rk[r][2] ^ sb[2];
884
885
      sb[3] ^= rol(encT[(byte)(sa[3] >> (0 * 8))], (0 * 8));
886
      sa[2] ^= rol(encT[(byte)(sa[3] >> (1 * 8))], (1 * 8));
887
      sa[1] ^= rol(encT[(byte)(sa[3] >> (2 * 8))], (2 * 8));
888
      sa[0] ^= rol(encT[(byte)(sa[3] >> (3 * 8))], (3 * 8));
889
      sa[3] = rk[r][3] ^ sb[3];
890
    }
891
892
  /* Last round is special. */
893
894
  sb[0] = ((u32)sbox[(byte)(sa[0] >> (0 * 8)) * 4]) << (0 * 8);
895
  sb[3] = ((u32)sbox[(byte)(sa[0] >> (1 * 8)) * 4]) << (1 * 8);
896
  sb[2] = ((u32)sbox[(byte)(sa[0] >> (2 * 8)) * 4]) << (2 * 8);
897
  sb[1] = ((u32)sbox[(byte)(sa[0] >> (3 * 8)) * 4]) << (3 * 8);
898
  sa[0] = rk[r][0] ^ sb[0];
899
900
  sb[1] ^= ((u32)sbox[(byte)(sa[1] >> (0 * 8)) * 4]) << (0 * 8);
901
  sa[0] ^= ((u32)sbox[(byte)(sa[1] >> (1 * 8)) * 4]) << (1 * 8);
902
  sb[3] ^= ((u32)sbox[(byte)(sa[1] >> (2 * 8)) * 4]) << (2 * 8);
903
  sb[2] ^= ((u32)sbox[(byte)(sa[1] >> (3 * 8)) * 4]) << (3 * 8);
904
  sa[1] = rk[r][1] ^ sb[1];
905
906
  sb[2] ^= ((u32)sbox[(byte)(sa[2] >> (0 * 8)) * 4]) << (0 * 8);
907
  sa[1] ^= ((u32)sbox[(byte)(sa[2] >> (1 * 8)) * 4]) << (1 * 8);
908
  sa[0] ^= ((u32)sbox[(byte)(sa[2] >> (2 * 8)) * 4]) << (2 * 8);
909
  sb[3] ^= ((u32)sbox[(byte)(sa[2] >> (3 * 8)) * 4]) << (3 * 8);
910
  sa[2] = rk[r][2] ^ sb[2];
911
912
  sb[3] ^= ((u32)sbox[(byte)(sa[3] >> (0 * 8)) * 4]) << (0 * 8);
913
  sa[2] ^= ((u32)sbox[(byte)(sa[3] >> (1 * 8)) * 4]) << (1 * 8);
914
  sa[1] ^= ((u32)sbox[(byte)(sa[3] >> (2 * 8)) * 4]) << (2 * 8);
915
  sa[0] ^= ((u32)sbox[(byte)(sa[3] >> (3 * 8)) * 4]) << (3 * 8);
916
  sa[3] = rk[r][3] ^ sb[3];
917
918
  buf_put_le32(b + 0, sa[0]);
919
  buf_put_le32(b + 4, sa[1]);
920
  buf_put_le32(b + 8, sa[2]);
921
  buf_put_le32(b + 12, sa[3]);
922
#undef rk
923
924
  return (56 + 2*sizeof(int));
925
}
926
#endif /*!USE_ARM_ASM && !USE_AMD64_ASM*/
927
928
929
static unsigned int
930
do_encrypt (const RIJNDAEL_context *ctx,
931
            unsigned char *bx, const unsigned char *ax)
932
0
{
933
0
#ifdef USE_AMD64_ASM
934
0
  return _gcry_aes_amd64_encrypt_block(ctx->keyschenc, bx, ax, ctx->rounds,
935
0
               enc_tables.T);
936
#elif defined(USE_ARM_ASM)
937
  return _gcry_aes_arm_encrypt_block(ctx->keyschenc, bx, ax, ctx->rounds,
938
             enc_tables.T);
939
#else
940
  return do_encrypt_fn (ctx, bx, ax);
941
#endif /* !USE_ARM_ASM && !USE_AMD64_ASM*/
942
0
}
943
944
945
static unsigned int
946
rijndael_encrypt (void *context, byte *b, const byte *a)
947
0
{
948
0
  RIJNDAEL_context *ctx = context;
949
950
0
  if (ctx->prefetch_enc_fn)
951
0
    ctx->prefetch_enc_fn();
952
953
0
  return ctx->encrypt_fn (ctx, b, a);
954
0
}
955
956
957
/* Bulk encryption of complete blocks in CFB mode.  Caller needs to
958
   make sure that IV is aligned on an unsigned long boundary.  This
959
   function is only intended for the bulk encryption feature of
960
   cipher.c. */
961
static void
962
_gcry_aes_cfb_enc (void *context, unsigned char *iv,
963
                   void *outbuf_arg, const void *inbuf_arg,
964
                   size_t nblocks)
965
0
{
966
0
  RIJNDAEL_context *ctx = context;
967
0
  unsigned char *outbuf = outbuf_arg;
968
0
  const unsigned char *inbuf = inbuf_arg;
969
0
  unsigned int burn_depth = 0;
970
0
  rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn;
971
972
0
  if (ctx->prefetch_enc_fn)
973
0
    ctx->prefetch_enc_fn();
974
975
0
  for ( ;nblocks; nblocks-- )
976
0
    {
977
      /* Encrypt the IV. */
978
0
      burn_depth = encrypt_fn (ctx, iv, iv);
979
      /* XOR the input with the IV and store input into IV.  */
980
0
      cipher_block_xor_2dst(outbuf, iv, inbuf, BLOCKSIZE);
981
0
      outbuf += BLOCKSIZE;
982
0
      inbuf  += BLOCKSIZE;
983
0
    }
984
985
0
  if (burn_depth)
986
0
    _gcry_burn_stack (burn_depth + 4 * sizeof(void *));
987
0
}
988
989
990
/* Bulk encryption of complete blocks in CBC mode.  Caller needs to
991
   make sure that IV is aligned on an unsigned long boundary.  This
992
   function is only intended for the bulk encryption feature of
993
   cipher.c. */
994
static void
995
_gcry_aes_cbc_enc (void *context, unsigned char *iv,
996
                   void *outbuf_arg, const void *inbuf_arg,
997
                   size_t nblocks, int cbc_mac)
998
0
{
999
0
  RIJNDAEL_context *ctx = context;
1000
0
  unsigned char *outbuf = outbuf_arg;
1001
0
  const unsigned char *inbuf = inbuf_arg;
1002
0
  unsigned char *last_iv;
1003
0
  unsigned int burn_depth = 0;
1004
0
  rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn;
1005
1006
0
  if (ctx->prefetch_enc_fn)
1007
0
    ctx->prefetch_enc_fn();
1008
1009
0
  last_iv = iv;
1010
1011
0
  for ( ;nblocks; nblocks-- )
1012
0
    {
1013
0
      cipher_block_xor(outbuf, inbuf, last_iv, BLOCKSIZE);
1014
1015
0
      burn_depth = encrypt_fn (ctx, outbuf, outbuf);
1016
1017
0
      last_iv = outbuf;
1018
0
      inbuf += BLOCKSIZE;
1019
0
      if (!cbc_mac)
1020
0
  outbuf += BLOCKSIZE;
1021
0
    }
1022
1023
0
  if (last_iv != iv)
1024
0
    cipher_block_cpy (iv, last_iv, BLOCKSIZE);
1025
1026
0
  if (burn_depth)
1027
0
    _gcry_burn_stack (burn_depth + 4 * sizeof(void *));
1028
0
}
1029
1030
1031
/* Bulk encryption of complete blocks in CTR mode.  Caller needs to
1032
   make sure that CTR is aligned on a 16 byte boundary if AESNI; the
1033
   minimum alignment is for an u32.  This function is only intended
1034
   for the bulk encryption feature of cipher.c.  CTR is expected to be
1035
   of size BLOCKSIZE. */
1036
static void
1037
_gcry_aes_ctr_enc (void *context, unsigned char *ctr,
1038
                   void *outbuf_arg, const void *inbuf_arg,
1039
                   size_t nblocks)
1040
0
{
1041
0
  RIJNDAEL_context *ctx = context;
1042
0
  unsigned char *outbuf = outbuf_arg;
1043
0
  const unsigned char *inbuf = inbuf_arg;
1044
0
  unsigned int burn_depth = 0;
1045
0
  union { unsigned char x1[16] ATTR_ALIGNED_16; u32 x32[4]; } tmp;
1046
0
  rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn;
1047
1048
0
  if (ctx->prefetch_enc_fn)
1049
0
    ctx->prefetch_enc_fn();
1050
1051
0
  for ( ;nblocks; nblocks-- )
1052
0
    {
1053
      /* Encrypt the counter. */
1054
0
      burn_depth = encrypt_fn (ctx, tmp.x1, ctr);
1055
      /* XOR the input with the encrypted counter and store in output.  */
1056
0
      cipher_block_xor(outbuf, tmp.x1, inbuf, BLOCKSIZE);
1057
0
      outbuf += BLOCKSIZE;
1058
0
      inbuf  += BLOCKSIZE;
1059
      /* Increment the counter.  */
1060
0
      cipher_block_add(ctr, 1, BLOCKSIZE);
1061
0
    }
1062
1063
0
  wipememory(&tmp, sizeof(tmp));
1064
1065
0
  if (burn_depth)
1066
0
    _gcry_burn_stack (burn_depth + 4 * sizeof(void *));
1067
0
}
1068
1069
1070

1071
#if !defined(USE_ARM_ASM) && !defined(USE_AMD64_ASM)
1072
/* Decrypt one block.  A and B may be the same. */
1073
static unsigned int
1074
do_decrypt_fn (const RIJNDAEL_context *ctx, unsigned char *b,
1075
               const unsigned char *a)
1076
{
1077
#define rk (ctx->keyschdec32)
1078
  int rounds = ctx->rounds;
1079
  int r;
1080
  u32 sa[4];
1081
  u32 sb[4];
1082
1083
  sb[0] = buf_get_le32(a + 0);
1084
  sb[1] = buf_get_le32(a + 4);
1085
  sb[2] = buf_get_le32(a + 8);
1086
  sb[3] = buf_get_le32(a + 12);
1087
1088
  sa[0] = sb[0] ^ rk[rounds][0];
1089
  sa[1] = sb[1] ^ rk[rounds][1];
1090
  sa[2] = sb[2] ^ rk[rounds][2];
1091
  sa[3] = sb[3] ^ rk[rounds][3];
1092
1093
  for (r = rounds - 1; r > 1; r--)
1094
    {
1095
      sb[0] = rol(decT[(byte)(sa[0] >> (0 * 8))], (0 * 8));
1096
      sb[1] = rol(decT[(byte)(sa[0] >> (1 * 8))], (1 * 8));
1097
      sb[2] = rol(decT[(byte)(sa[0] >> (2 * 8))], (2 * 8));
1098
      sb[3] = rol(decT[(byte)(sa[0] >> (3 * 8))], (3 * 8));
1099
      sa[0] = rk[r][0] ^ sb[0];
1100
1101
      sb[1] ^= rol(decT[(byte)(sa[1] >> (0 * 8))], (0 * 8));
1102
      sb[2] ^= rol(decT[(byte)(sa[1] >> (1 * 8))], (1 * 8));
1103
      sb[3] ^= rol(decT[(byte)(sa[1] >> (2 * 8))], (2 * 8));
1104
      sa[0] ^= rol(decT[(byte)(sa[1] >> (3 * 8))], (3 * 8));
1105
      sa[1] = rk[r][1] ^ sb[1];
1106
1107
      sb[2] ^= rol(decT[(byte)(sa[2] >> (0 * 8))], (0 * 8));
1108
      sb[3] ^= rol(decT[(byte)(sa[2] >> (1 * 8))], (1 * 8));
1109
      sa[0] ^= rol(decT[(byte)(sa[2] >> (2 * 8))], (2 * 8));
1110
      sa[1] ^= rol(decT[(byte)(sa[2] >> (3 * 8))], (3 * 8));
1111
      sa[2] = rk[r][2] ^ sb[2];
1112
1113
      sb[3] ^= rol(decT[(byte)(sa[3] >> (0 * 8))], (0 * 8));
1114
      sa[0] ^= rol(decT[(byte)(sa[3] >> (1 * 8))], (1 * 8));
1115
      sa[1] ^= rol(decT[(byte)(sa[3] >> (2 * 8))], (2 * 8));
1116
      sa[2] ^= rol(decT[(byte)(sa[3] >> (3 * 8))], (3 * 8));
1117
      sa[3] = rk[r][3] ^ sb[3];
1118
1119
      r--;
1120
1121
      sb[0] = rol(decT[(byte)(sa[0] >> (0 * 8))], (0 * 8));
1122
      sb[1] = rol(decT[(byte)(sa[0] >> (1 * 8))], (1 * 8));
1123
      sb[2] = rol(decT[(byte)(sa[0] >> (2 * 8))], (2 * 8));
1124
      sb[3] = rol(decT[(byte)(sa[0] >> (3 * 8))], (3 * 8));
1125
      sa[0] = rk[r][0] ^ sb[0];
1126
1127
      sb[1] ^= rol(decT[(byte)(sa[1] >> (0 * 8))], (0 * 8));
1128
      sb[2] ^= rol(decT[(byte)(sa[1] >> (1 * 8))], (1 * 8));
1129
      sb[3] ^= rol(decT[(byte)(sa[1] >> (2 * 8))], (2 * 8));
1130
      sa[0] ^= rol(decT[(byte)(sa[1] >> (3 * 8))], (3 * 8));
1131
      sa[1] = rk[r][1] ^ sb[1];
1132
1133
      sb[2] ^= rol(decT[(byte)(sa[2] >> (0 * 8))], (0 * 8));
1134
      sb[3] ^= rol(decT[(byte)(sa[2] >> (1 * 8))], (1 * 8));
1135
      sa[0] ^= rol(decT[(byte)(sa[2] >> (2 * 8))], (2 * 8));
1136
      sa[1] ^= rol(decT[(byte)(sa[2] >> (3 * 8))], (3 * 8));
1137
      sa[2] = rk[r][2] ^ sb[2];
1138
1139
      sb[3] ^= rol(decT[(byte)(sa[3] >> (0 * 8))], (0 * 8));
1140
      sa[0] ^= rol(decT[(byte)(sa[3] >> (1 * 8))], (1 * 8));
1141
      sa[1] ^= rol(decT[(byte)(sa[3] >> (2 * 8))], (2 * 8));
1142
      sa[2] ^= rol(decT[(byte)(sa[3] >> (3 * 8))], (3 * 8));
1143
      sa[3] = rk[r][3] ^ sb[3];
1144
    }
1145
1146
  sb[0] = rol(decT[(byte)(sa[0] >> (0 * 8))], (0 * 8));
1147
  sb[1] = rol(decT[(byte)(sa[0] >> (1 * 8))], (1 * 8));
1148
  sb[2] = rol(decT[(byte)(sa[0] >> (2 * 8))], (2 * 8));
1149
  sb[3] = rol(decT[(byte)(sa[0] >> (3 * 8))], (3 * 8));
1150
  sa[0] = rk[1][0] ^ sb[0];
1151
1152
  sb[1] ^= rol(decT[(byte)(sa[1] >> (0 * 8))], (0 * 8));
1153
  sb[2] ^= rol(decT[(byte)(sa[1] >> (1 * 8))], (1 * 8));
1154
  sb[3] ^= rol(decT[(byte)(sa[1] >> (2 * 8))], (2 * 8));
1155
  sa[0] ^= rol(decT[(byte)(sa[1] >> (3 * 8))], (3 * 8));
1156
  sa[1] = rk[1][1] ^ sb[1];
1157
1158
  sb[2] ^= rol(decT[(byte)(sa[2] >> (0 * 8))], (0 * 8));
1159
  sb[3] ^= rol(decT[(byte)(sa[2] >> (1 * 8))], (1 * 8));
1160
  sa[0] ^= rol(decT[(byte)(sa[2] >> (2 * 8))], (2 * 8));
1161
  sa[1] ^= rol(decT[(byte)(sa[2] >> (3 * 8))], (3 * 8));
1162
  sa[2] = rk[1][2] ^ sb[2];
1163
1164
  sb[3] ^= rol(decT[(byte)(sa[3] >> (0 * 8))], (0 * 8));
1165
  sa[0] ^= rol(decT[(byte)(sa[3] >> (1 * 8))], (1 * 8));
1166
  sa[1] ^= rol(decT[(byte)(sa[3] >> (2 * 8))], (2 * 8));
1167
  sa[2] ^= rol(decT[(byte)(sa[3] >> (3 * 8))], (3 * 8));
1168
  sa[3] = rk[1][3] ^ sb[3];
1169
1170
  /* Last round is special. */
1171
  sb[0] = (u32)inv_sbox[(byte)(sa[0] >> (0 * 8))] << (0 * 8);
1172
  sb[1] = (u32)inv_sbox[(byte)(sa[0] >> (1 * 8))] << (1 * 8);
1173
  sb[2] = (u32)inv_sbox[(byte)(sa[0] >> (2 * 8))] << (2 * 8);
1174
  sb[3] = (u32)inv_sbox[(byte)(sa[0] >> (3 * 8))] << (3 * 8);
1175
  sa[0] = sb[0] ^ rk[0][0];
1176
1177
  sb[1] ^= (u32)inv_sbox[(byte)(sa[1] >> (0 * 8))] << (0 * 8);
1178
  sb[2] ^= (u32)inv_sbox[(byte)(sa[1] >> (1 * 8))] << (1 * 8);
1179
  sb[3] ^= (u32)inv_sbox[(byte)(sa[1] >> (2 * 8))] << (2 * 8);
1180
  sa[0] ^= (u32)inv_sbox[(byte)(sa[1] >> (3 * 8))] << (3 * 8);
1181
  sa[1] = sb[1] ^ rk[0][1];
1182
1183
  sb[2] ^= (u32)inv_sbox[(byte)(sa[2] >> (0 * 8))] << (0 * 8);
1184
  sb[3] ^= (u32)inv_sbox[(byte)(sa[2] >> (1 * 8))] << (1 * 8);
1185
  sa[0] ^= (u32)inv_sbox[(byte)(sa[2] >> (2 * 8))] << (2 * 8);
1186
  sa[1] ^= (u32)inv_sbox[(byte)(sa[2] >> (3 * 8))] << (3 * 8);
1187
  sa[2] = sb[2] ^ rk[0][2];
1188
1189
  sb[3] ^= (u32)inv_sbox[(byte)(sa[3] >> (0 * 8))] << (0 * 8);
1190
  sa[0] ^= (u32)inv_sbox[(byte)(sa[3] >> (1 * 8))] << (1 * 8);
1191
  sa[1] ^= (u32)inv_sbox[(byte)(sa[3] >> (2 * 8))] << (2 * 8);
1192
  sa[2] ^= (u32)inv_sbox[(byte)(sa[3] >> (3 * 8))] << (3 * 8);
1193
  sa[3] = sb[3] ^ rk[0][3];
1194
1195
  buf_put_le32(b + 0, sa[0]);
1196
  buf_put_le32(b + 4, sa[1]);
1197
  buf_put_le32(b + 8, sa[2]);
1198
  buf_put_le32(b + 12, sa[3]);
1199
#undef rk
1200
1201
  return (56+2*sizeof(int));
1202
}
1203
#endif /*!USE_ARM_ASM && !USE_AMD64_ASM*/
1204
1205
1206
/* Decrypt one block.  AX and BX may be the same. */
1207
static unsigned int
1208
do_decrypt (const RIJNDAEL_context *ctx, unsigned char *bx,
1209
            const unsigned char *ax)
1210
0
{
1211
0
#ifdef USE_AMD64_ASM
1212
0
  return _gcry_aes_amd64_decrypt_block(ctx->keyschdec, bx, ax, ctx->rounds,
1213
0
               dec_tables.T);
1214
#elif defined(USE_ARM_ASM)
1215
  return _gcry_aes_arm_decrypt_block(ctx->keyschdec, bx, ax, ctx->rounds,
1216
             dec_tables.T);
1217
#else
1218
  return do_decrypt_fn (ctx, bx, ax);
1219
#endif /*!USE_ARM_ASM && !USE_AMD64_ASM*/
1220
0
}
1221
1222
1223
static inline void
1224
check_decryption_preparation (RIJNDAEL_context *ctx)
1225
0
{
1226
0
  if ( !ctx->decryption_prepared )
1227
0
    {
1228
0
      ctx->prepare_decryption ( ctx );
1229
0
      ctx->decryption_prepared = 1;
1230
0
    }
1231
0
}
1232
1233
1234
static unsigned int
1235
rijndael_decrypt (void *context, byte *b, const byte *a)
1236
0
{
1237
0
  RIJNDAEL_context *ctx = context;
1238
1239
0
  check_decryption_preparation (ctx);
1240
1241
0
  if (ctx->prefetch_dec_fn)
1242
0
    ctx->prefetch_dec_fn();
1243
1244
0
  return ctx->decrypt_fn (ctx, b, a);
1245
0
}
1246
1247
1248
/* Bulk decryption of complete blocks in CFB mode.  Caller needs to
1249
   make sure that IV is aligned on an unsigned long boundary.  This
1250
   function is only intended for the bulk encryption feature of
1251
   cipher.c. */
1252
static void
1253
_gcry_aes_cfb_dec (void *context, unsigned char *iv,
1254
                   void *outbuf_arg, const void *inbuf_arg,
1255
                   size_t nblocks)
1256
0
{
1257
0
  RIJNDAEL_context *ctx = context;
1258
0
  unsigned char *outbuf = outbuf_arg;
1259
0
  const unsigned char *inbuf = inbuf_arg;
1260
0
  unsigned int burn_depth = 0;
1261
0
  rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn;
1262
1263
0
  if (ctx->prefetch_enc_fn)
1264
0
    ctx->prefetch_enc_fn();
1265
1266
0
  for ( ;nblocks; nblocks-- )
1267
0
    {
1268
0
      burn_depth = encrypt_fn (ctx, iv, iv);
1269
0
      cipher_block_xor_n_copy(outbuf, iv, inbuf, BLOCKSIZE);
1270
0
      outbuf += BLOCKSIZE;
1271
0
      inbuf  += BLOCKSIZE;
1272
0
    }
1273
1274
0
  if (burn_depth)
1275
0
    _gcry_burn_stack (burn_depth + 4 * sizeof(void *));
1276
0
}
1277
1278
1279
/* Bulk decryption of complete blocks in CBC mode.  Caller needs to
1280
   make sure that IV is aligned on an unsigned long boundary.  This
1281
   function is only intended for the bulk encryption feature of
1282
   cipher.c. */
1283
static void
1284
_gcry_aes_cbc_dec (void *context, unsigned char *iv,
1285
                   void *outbuf_arg, const void *inbuf_arg,
1286
                   size_t nblocks)
1287
0
{
1288
0
  RIJNDAEL_context *ctx = context;
1289
0
  unsigned char *outbuf = outbuf_arg;
1290
0
  const unsigned char *inbuf = inbuf_arg;
1291
0
  unsigned int burn_depth = 0;
1292
0
  unsigned char savebuf[BLOCKSIZE] ATTR_ALIGNED_16;
1293
0
  rijndael_cryptfn_t decrypt_fn = ctx->decrypt_fn;
1294
1295
0
  check_decryption_preparation (ctx);
1296
1297
0
  if (ctx->prefetch_dec_fn)
1298
0
    ctx->prefetch_dec_fn();
1299
1300
0
  for ( ;nblocks; nblocks-- )
1301
0
    {
1302
      /* INBUF is needed later and it may be identical to OUTBUF, so store
1303
    the intermediate result to SAVEBUF.  */
1304
1305
0
      burn_depth = decrypt_fn (ctx, savebuf, inbuf);
1306
1307
0
      cipher_block_xor_n_copy_2(outbuf, savebuf, iv, inbuf, BLOCKSIZE);
1308
0
      inbuf += BLOCKSIZE;
1309
0
      outbuf += BLOCKSIZE;
1310
0
    }
1311
1312
0
  wipememory(savebuf, sizeof(savebuf));
1313
1314
0
  if (burn_depth)
1315
0
    _gcry_burn_stack (burn_depth + 4 * sizeof(void *));
1316
0
}
1317
1318
1319

1320
/* Bulk encryption/decryption of complete blocks in OCB mode. */
1321
static size_t
1322
_gcry_aes_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg,
1323
                     const void *inbuf_arg, size_t nblocks, int encrypt)
1324
0
{
1325
0
  RIJNDAEL_context *ctx = (void *)&c->context.c;
1326
0
  unsigned char *outbuf = outbuf_arg;
1327
0
  const unsigned char *inbuf = inbuf_arg;
1328
0
  unsigned int burn_depth = 0;
1329
1330
0
  if (encrypt)
1331
0
    {
1332
0
      union { unsigned char x1[16] ATTR_ALIGNED_16; u32 x32[4]; } l_tmp;
1333
0
      rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn;
1334
1335
0
      if (ctx->prefetch_enc_fn)
1336
0
        ctx->prefetch_enc_fn();
1337
1338
0
      for ( ;nblocks; nblocks-- )
1339
0
        {
1340
0
          u64 i = ++c->u_mode.ocb.data_nblocks;
1341
0
          const unsigned char *l = ocb_get_l(c, i);
1342
1343
          /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */
1344
0
          cipher_block_xor_1 (c->u_iv.iv, l, BLOCKSIZE);
1345
0
          cipher_block_cpy (l_tmp.x1, inbuf, BLOCKSIZE);
1346
          /* Checksum_i = Checksum_{i-1} xor P_i  */
1347
0
          cipher_block_xor_1 (c->u_ctr.ctr, l_tmp.x1, BLOCKSIZE);
1348
          /* C_i = Offset_i xor ENCIPHER(K, P_i xor Offset_i)  */
1349
0
          cipher_block_xor_1 (l_tmp.x1, c->u_iv.iv, BLOCKSIZE);
1350
0
          burn_depth = encrypt_fn (ctx, l_tmp.x1, l_tmp.x1);
1351
0
          cipher_block_xor_1 (l_tmp.x1, c->u_iv.iv, BLOCKSIZE);
1352
0
          cipher_block_cpy (outbuf, l_tmp.x1, BLOCKSIZE);
1353
1354
0
          inbuf += BLOCKSIZE;
1355
0
          outbuf += BLOCKSIZE;
1356
0
        }
1357
0
    }
1358
0
  else
1359
0
    {
1360
0
      union { unsigned char x1[16] ATTR_ALIGNED_16; u32 x32[4]; } l_tmp;
1361
0
      rijndael_cryptfn_t decrypt_fn = ctx->decrypt_fn;
1362
1363
0
      check_decryption_preparation (ctx);
1364
1365
0
      if (ctx->prefetch_dec_fn)
1366
0
        ctx->prefetch_dec_fn();
1367
1368
0
      for ( ;nblocks; nblocks-- )
1369
0
        {
1370
0
          u64 i = ++c->u_mode.ocb.data_nblocks;
1371
0
          const unsigned char *l = ocb_get_l(c, i);
1372
1373
          /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */
1374
0
          cipher_block_xor_1 (c->u_iv.iv, l, BLOCKSIZE);
1375
0
          cipher_block_cpy (l_tmp.x1, inbuf, BLOCKSIZE);
1376
          /* C_i = Offset_i xor ENCIPHER(K, P_i xor Offset_i)  */
1377
0
          cipher_block_xor_1 (l_tmp.x1, c->u_iv.iv, BLOCKSIZE);
1378
0
          burn_depth = decrypt_fn (ctx, l_tmp.x1, l_tmp.x1);
1379
0
          cipher_block_xor_1 (l_tmp.x1, c->u_iv.iv, BLOCKSIZE);
1380
          /* Checksum_i = Checksum_{i-1} xor P_i  */
1381
0
          cipher_block_xor_1 (c->u_ctr.ctr, l_tmp.x1, BLOCKSIZE);
1382
0
          cipher_block_cpy (outbuf, l_tmp.x1, BLOCKSIZE);
1383
1384
0
          inbuf += BLOCKSIZE;
1385
0
          outbuf += BLOCKSIZE;
1386
0
        }
1387
0
    }
1388
1389
0
  if (burn_depth)
1390
0
    _gcry_burn_stack (burn_depth + 4 * sizeof(void *));
1391
1392
0
  return 0;
1393
0
}
1394
1395
1396
/* Bulk authentication of complete blocks in OCB mode. */
1397
static size_t
1398
_gcry_aes_ocb_auth (gcry_cipher_hd_t c, const void *abuf_arg, size_t nblocks)
1399
0
{
1400
0
  RIJNDAEL_context *ctx = (void *)&c->context.c;
1401
0
  const unsigned char *abuf = abuf_arg;
1402
0
  unsigned int burn_depth = 0;
1403
0
  union { unsigned char x1[16] ATTR_ALIGNED_16; u32 x32[4]; } l_tmp;
1404
0
  rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn;
1405
1406
0
  if (ctx->prefetch_enc_fn)
1407
0
    ctx->prefetch_enc_fn();
1408
1409
0
  for ( ;nblocks; nblocks-- )
1410
0
    {
1411
0
      u64 i = ++c->u_mode.ocb.aad_nblocks;
1412
0
      const unsigned char *l = ocb_get_l(c, i);
1413
1414
      /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */
1415
0
      cipher_block_xor_1 (c->u_mode.ocb.aad_offset, l, BLOCKSIZE);
1416
      /* Sum_i = Sum_{i-1} xor ENCIPHER(K, A_i xor Offset_i)  */
1417
0
      cipher_block_xor (l_tmp.x1, c->u_mode.ocb.aad_offset, abuf,
1418
0
      BLOCKSIZE);
1419
0
      burn_depth = encrypt_fn (ctx, l_tmp.x1, l_tmp.x1);
1420
0
      cipher_block_xor_1 (c->u_mode.ocb.aad_sum, l_tmp.x1, BLOCKSIZE);
1421
1422
0
      abuf += BLOCKSIZE;
1423
0
    }
1424
1425
0
  wipememory(&l_tmp, sizeof(l_tmp));
1426
1427
0
  if (burn_depth)
1428
0
    _gcry_burn_stack (burn_depth + 4 * sizeof(void *));
1429
1430
0
  return 0;
1431
0
}
1432
1433
1434
/* Bulk encryption/decryption of complete blocks in XTS mode. */
1435
static void
1436
_gcry_aes_xts_crypt (void *context, unsigned char *tweak,
1437
         void *outbuf_arg, const void *inbuf_arg,
1438
         size_t nblocks, int encrypt)
1439
0
{
1440
0
  RIJNDAEL_context *ctx = context;
1441
0
  unsigned char *outbuf = outbuf_arg;
1442
0
  const unsigned char *inbuf = inbuf_arg;
1443
0
  unsigned int burn_depth = 0;
1444
0
  rijndael_cryptfn_t crypt_fn;
1445
0
  u64 tweak_lo, tweak_hi, tweak_next_lo, tweak_next_hi, tmp_lo, tmp_hi, carry;
1446
1447
0
  if (encrypt)
1448
0
    {
1449
0
      if (ctx->prefetch_enc_fn)
1450
0
  ctx->prefetch_enc_fn();
1451
1452
0
      crypt_fn = ctx->encrypt_fn;
1453
0
    }
1454
0
  else
1455
0
    {
1456
0
      check_decryption_preparation (ctx);
1457
1458
0
      if (ctx->prefetch_dec_fn)
1459
0
  ctx->prefetch_dec_fn();
1460
1461
0
      crypt_fn = ctx->decrypt_fn;
1462
0
    }
1463
1464
0
  tweak_next_lo = buf_get_le64 (tweak + 0);
1465
0
  tweak_next_hi = buf_get_le64 (tweak + 8);
1466
1467
0
  while (nblocks)
1468
0
    {
1469
0
      tweak_lo = tweak_next_lo;
1470
0
      tweak_hi = tweak_next_hi;
1471
1472
      /* Xor-Encrypt/Decrypt-Xor block. */
1473
0
      tmp_lo = buf_get_le64 (inbuf + 0) ^ tweak_lo;
1474
0
      tmp_hi = buf_get_le64 (inbuf + 8) ^ tweak_hi;
1475
1476
0
      buf_put_le64 (outbuf + 0, tmp_lo);
1477
0
      buf_put_le64 (outbuf + 8, tmp_hi);
1478
1479
      /* Generate next tweak. */
1480
0
      carry = -(tweak_next_hi >> 63) & 0x87;
1481
0
      tweak_next_hi = (tweak_next_hi << 1) + (tweak_next_lo >> 63);
1482
0
      tweak_next_lo = (tweak_next_lo << 1) ^ carry;
1483
1484
0
      burn_depth = crypt_fn (ctx, outbuf, outbuf);
1485
1486
0
      buf_put_le64 (outbuf + 0, buf_get_le64 (outbuf + 0) ^ tweak_lo);
1487
0
      buf_put_le64 (outbuf + 8, buf_get_le64 (outbuf + 8) ^ tweak_hi);
1488
1489
0
      outbuf += GCRY_XTS_BLOCK_LEN;
1490
0
      inbuf += GCRY_XTS_BLOCK_LEN;
1491
0
      nblocks--;
1492
0
    }
1493
1494
0
  buf_put_le64 (tweak + 0, tweak_next_lo);
1495
0
  buf_put_le64 (tweak + 8, tweak_next_hi);
1496
1497
0
  if (burn_depth)
1498
0
    _gcry_burn_stack (burn_depth + 5 * sizeof(void *));
1499
0
}
1500
1501

1502
/* Run the self-tests for AES 128.  Returns NULL on success. */
1503
static const char*
1504
selftest_basic_128 (void)
1505
0
{
1506
0
  RIJNDAEL_context *ctx;
1507
0
  unsigned char ctxmem[sizeof(*ctx) + 16];
1508
0
  unsigned char scratch[16];
1509
0
  cipher_bulk_ops_t bulk_ops;
1510
1511
  /* The test vectors are from the AES supplied ones; more or less
1512
     randomly taken from ecb_tbl.txt (I=42,81,14) */
1513
0
#if 1
1514
0
  static const unsigned char plaintext_128[16] =
1515
0
    {
1516
0
      0x01,0x4B,0xAF,0x22,0x78,0xA6,0x9D,0x33,
1517
0
      0x1D,0x51,0x80,0x10,0x36,0x43,0xE9,0x9A
1518
0
    };
1519
0
  static const unsigned char key_128[16] =
1520
0
    {
1521
0
      0xE8,0xE9,0xEA,0xEB,0xED,0xEE,0xEF,0xF0,
1522
0
      0xF2,0xF3,0xF4,0xF5,0xF7,0xF8,0xF9,0xFA
1523
0
    };
1524
0
  static const unsigned char ciphertext_128[16] =
1525
0
    {
1526
0
      0x67,0x43,0xC3,0xD1,0x51,0x9A,0xB4,0xF2,
1527
0
      0xCD,0x9A,0x78,0xAB,0x09,0xA5,0x11,0xBD
1528
0
    };
1529
#else
1530
  /* Test vectors from fips-197, appendix C. */
1531
# warning debug test vectors in use
1532
  static const unsigned char plaintext_128[16] =
1533
    {
1534
      0x00,0x11,0x22,0x33,0x44,0x55,0x66,0x77,
1535
      0x88,0x99,0xaa,0xbb,0xcc,0xdd,0xee,0xff
1536
    };
1537
  static const unsigned char key_128[16] =
1538
    {
1539
      0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,
1540
      0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f
1541
      /* 0x2b, 0x7e, 0x15, 0x16, 0x28, 0xae, 0xd2, 0xa6, */
1542
      /* 0xab, 0xf7, 0x15, 0x88, 0x09, 0xcf, 0x4f, 0x3c */
1543
    };
1544
  static const unsigned char ciphertext_128[16] =
1545
    {
1546
      0x69,0xc4,0xe0,0xd8,0x6a,0x7b,0x04,0x30,
1547
      0xd8,0xcd,0xb7,0x80,0x70,0xb4,0xc5,0x5a
1548
    };
1549
#endif
1550
1551
0
  ctx = (void *)(ctxmem + ((16 - ((uintptr_t)ctxmem & 15)) & 15));
1552
1553
0
  rijndael_setkey (ctx, key_128, sizeof (key_128), &bulk_ops);
1554
0
  rijndael_encrypt (ctx, scratch, plaintext_128);
1555
0
  if (memcmp (scratch, ciphertext_128, sizeof (ciphertext_128)))
1556
0
    {
1557
0
      return "AES-128 test encryption failed.";
1558
0
    }
1559
0
  rijndael_decrypt (ctx, scratch, scratch);
1560
0
  if (memcmp (scratch, plaintext_128, sizeof (plaintext_128)))
1561
0
    return "AES-128 test decryption failed.";
1562
1563
0
  return NULL;
1564
0
}
1565
1566
/* Run the self-tests for AES 192.  Returns NULL on success. */
1567
static const char*
1568
selftest_basic_192 (void)
1569
0
{
1570
0
  RIJNDAEL_context *ctx;
1571
0
  unsigned char ctxmem[sizeof(*ctx) + 16];
1572
0
  unsigned char scratch[16];
1573
0
  cipher_bulk_ops_t bulk_ops;
1574
1575
0
  static unsigned char plaintext_192[16] =
1576
0
    {
1577
0
      0x76,0x77,0x74,0x75,0xF1,0xF2,0xF3,0xF4,
1578
0
      0xF8,0xF9,0xE6,0xE7,0x77,0x70,0x71,0x72
1579
0
    };
1580
0
  static unsigned char key_192[24] =
1581
0
    {
1582
0
      0x04,0x05,0x06,0x07,0x09,0x0A,0x0B,0x0C,
1583
0
      0x0E,0x0F,0x10,0x11,0x13,0x14,0x15,0x16,
1584
0
      0x18,0x19,0x1A,0x1B,0x1D,0x1E,0x1F,0x20
1585
0
    };
1586
0
  static const unsigned char ciphertext_192[16] =
1587
0
    {
1588
0
      0x5D,0x1E,0xF2,0x0D,0xCE,0xD6,0xBC,0xBC,
1589
0
      0x12,0x13,0x1A,0xC7,0xC5,0x47,0x88,0xAA
1590
0
    };
1591
1592
0
  ctx = (void *)(ctxmem + ((16 - ((uintptr_t)ctxmem & 15)) & 15));
1593
1594
0
  rijndael_setkey (ctx, key_192, sizeof(key_192), &bulk_ops);
1595
0
  rijndael_encrypt (ctx, scratch, plaintext_192);
1596
0
  if (memcmp (scratch, ciphertext_192, sizeof (ciphertext_192)))
1597
0
    {
1598
0
      return "AES-192 test encryption failed.";
1599
0
    }
1600
0
  rijndael_decrypt (ctx, scratch, scratch);
1601
0
  if (memcmp (scratch, plaintext_192, sizeof (plaintext_192)))
1602
0
    return "AES-192 test decryption failed.";
1603
1604
0
  return NULL;
1605
0
}
1606
1607
1608
/* Run the self-tests for AES 256.  Returns NULL on success. */
1609
static const char*
1610
selftest_basic_256 (void)
1611
0
{
1612
0
  RIJNDAEL_context *ctx;
1613
0
  unsigned char ctxmem[sizeof(*ctx) + 16];
1614
0
  unsigned char scratch[16];
1615
0
  cipher_bulk_ops_t bulk_ops;
1616
1617
0
  static unsigned char plaintext_256[16] =
1618
0
    {
1619
0
      0x06,0x9A,0x00,0x7F,0xC7,0x6A,0x45,0x9F,
1620
0
      0x98,0xBA,0xF9,0x17,0xFE,0xDF,0x95,0x21
1621
0
    };
1622
0
  static unsigned char key_256[32] =
1623
0
    {
1624
0
      0x08,0x09,0x0A,0x0B,0x0D,0x0E,0x0F,0x10,
1625
0
      0x12,0x13,0x14,0x15,0x17,0x18,0x19,0x1A,
1626
0
      0x1C,0x1D,0x1E,0x1F,0x21,0x22,0x23,0x24,
1627
0
      0x26,0x27,0x28,0x29,0x2B,0x2C,0x2D,0x2E
1628
0
    };
1629
0
  static const unsigned char ciphertext_256[16] =
1630
0
    {
1631
0
      0x08,0x0E,0x95,0x17,0xEB,0x16,0x77,0x71,
1632
0
      0x9A,0xCF,0x72,0x80,0x86,0x04,0x0A,0xE3
1633
0
    };
1634
1635
0
  ctx = (void *)(ctxmem + ((16 - ((uintptr_t)ctxmem & 15)) & 15));
1636
1637
0
  rijndael_setkey (ctx, key_256, sizeof(key_256), &bulk_ops);
1638
0
  rijndael_encrypt (ctx, scratch, plaintext_256);
1639
0
  if (memcmp (scratch, ciphertext_256, sizeof (ciphertext_256)))
1640
0
    {
1641
0
      return "AES-256 test encryption failed.";
1642
0
    }
1643
0
  rijndael_decrypt (ctx, scratch, scratch);
1644
0
  if (memcmp (scratch, plaintext_256, sizeof (plaintext_256)))
1645
0
    return "AES-256 test decryption failed.";
1646
1647
0
  return NULL;
1648
0
}
1649
1650
1651
/* Run all the self-tests and return NULL on success.  This function
1652
   is used for the on-the-fly self-tests. */
1653
static const char *
1654
selftest (void)
1655
0
{
1656
0
  const char *r;
1657
1658
0
  if ( (r = selftest_basic_128 ())
1659
0
       || (r = selftest_basic_192 ())
1660
0
       || (r = selftest_basic_256 ()) )
1661
0
    return r;
1662
1663
0
  return r;
1664
0
}
1665
1666
1667
/* SP800-38a.pdf for AES-128.  */
1668
static const char *
1669
selftest_fips_128_38a (int requested_mode)
1670
0
{
1671
0
  static const struct tv
1672
0
  {
1673
0
    int mode;
1674
0
    const unsigned char key[16];
1675
0
    const unsigned char iv[16];
1676
0
    struct
1677
0
    {
1678
0
      const unsigned char input[16];
1679
0
      const unsigned char output[16];
1680
0
    } data[4];
1681
0
  } tv[2] =
1682
0
    {
1683
0
      {
1684
0
        GCRY_CIPHER_MODE_CFB,  /* F.3.13, CFB128-AES128 */
1685
0
        { 0x2b, 0x7e, 0x15, 0x16, 0x28, 0xae, 0xd2, 0xa6,
1686
0
          0xab, 0xf7, 0x15, 0x88, 0x09, 0xcf, 0x4f, 0x3c },
1687
0
        { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
1688
0
          0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f },
1689
0
        {
1690
0
          { { 0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96,
1691
0
              0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a },
1692
0
            { 0x3b, 0x3f, 0xd9, 0x2e, 0xb7, 0x2d, 0xad, 0x20,
1693
0
              0x33, 0x34, 0x49, 0xf8, 0xe8, 0x3c, 0xfb, 0x4a } },
1694
1695
0
          { { 0xae, 0x2d, 0x8a, 0x57, 0x1e, 0x03, 0xac, 0x9c,
1696
0
              0x9e, 0xb7, 0x6f, 0xac, 0x45, 0xaf, 0x8e, 0x51 },
1697
0
            { 0xc8, 0xa6, 0x45, 0x37, 0xa0, 0xb3, 0xa9, 0x3f,
1698
0
              0xcd, 0xe3, 0xcd, 0xad, 0x9f, 0x1c, 0xe5, 0x8b } },
1699
1700
0
          { { 0x30, 0xc8, 0x1c, 0x46, 0xa3, 0x5c, 0xe4, 0x11,
1701
0
              0xe5, 0xfb, 0xc1, 0x19, 0x1a, 0x0a, 0x52, 0xef },
1702
0
            { 0x26, 0x75, 0x1f, 0x67, 0xa3, 0xcb, 0xb1, 0x40,
1703
0
              0xb1, 0x80, 0x8c, 0xf1, 0x87, 0xa4, 0xf4, 0xdf } },
1704
1705
0
          { { 0xf6, 0x9f, 0x24, 0x45, 0xdf, 0x4f, 0x9b, 0x17,
1706
0
              0xad, 0x2b, 0x41, 0x7b, 0xe6, 0x6c, 0x37, 0x10 },
1707
0
            { 0xc0, 0x4b, 0x05, 0x35, 0x7c, 0x5d, 0x1c, 0x0e,
1708
0
              0xea, 0xc4, 0xc6, 0x6f, 0x9f, 0xf7, 0xf2, 0xe6 } }
1709
0
        }
1710
0
      },
1711
0
      {
1712
0
        GCRY_CIPHER_MODE_OFB,
1713
0
        { 0x2b, 0x7e, 0x15, 0x16, 0x28, 0xae, 0xd2, 0xa6,
1714
0
          0xab, 0xf7, 0x15, 0x88, 0x09, 0xcf, 0x4f, 0x3c },
1715
0
        { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
1716
0
          0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f },
1717
0
        {
1718
0
          { { 0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96,
1719
0
              0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a },
1720
0
            { 0x3b, 0x3f, 0xd9, 0x2e, 0xb7, 0x2d, 0xad, 0x20,
1721
0
              0x33, 0x34, 0x49, 0xf8, 0xe8, 0x3c, 0xfb, 0x4a } },
1722
1723
0
          { { 0xae, 0x2d, 0x8a, 0x57, 0x1e, 0x03, 0xac, 0x9c,
1724
0
              0x9e, 0xb7, 0x6f, 0xac, 0x45, 0xaf, 0x8e, 0x51 },
1725
0
            { 0x77, 0x89, 0x50, 0x8d, 0x16, 0x91, 0x8f, 0x03,
1726
0
              0xf5, 0x3c, 0x52, 0xda, 0xc5, 0x4e, 0xd8, 0x25 } },
1727
1728
0
          { { 0x30, 0xc8, 0x1c, 0x46, 0xa3, 0x5c, 0xe4, 0x11,
1729
0
              0xe5, 0xfb, 0xc1, 0x19, 0x1a, 0x0a, 0x52, 0xef },
1730
0
            { 0x97, 0x40, 0x05, 0x1e, 0x9c, 0x5f, 0xec, 0xf6,
1731
0
              0x43, 0x44, 0xf7, 0xa8, 0x22, 0x60, 0xed, 0xcc } },
1732
1733
0
          { { 0xf6, 0x9f, 0x24, 0x45, 0xdf, 0x4f, 0x9b, 0x17,
1734
0
              0xad, 0x2b, 0x41, 0x7b, 0xe6, 0x6c, 0x37, 0x10 },
1735
0
            { 0x30, 0x4c, 0x65, 0x28, 0xf6, 0x59, 0xc7, 0x78,
1736
0
              0x66, 0xa5, 0x10, 0xd9, 0xc1, 0xd6, 0xae, 0x5e } },
1737
0
        }
1738
0
      }
1739
0
    };
1740
0
  unsigned char scratch[16];
1741
0
  gpg_error_t err;
1742
0
  int tvi, idx;
1743
0
  gcry_cipher_hd_t hdenc = NULL;
1744
0
  gcry_cipher_hd_t hddec = NULL;
1745
1746
0
#define Fail(a) do {           \
1747
0
    _gcry_cipher_close (hdenc);  \
1748
0
    _gcry_cipher_close (hddec);  \
1749
0
    return a;                    \
1750
0
  } while (0)
1751
1752
0
  gcry_assert (sizeof tv[0].data[0].input == sizeof scratch);
1753
0
  gcry_assert (sizeof tv[0].data[0].output == sizeof scratch);
1754
1755
0
  for (tvi=0; tvi < DIM (tv); tvi++)
1756
0
    if (tv[tvi].mode == requested_mode)
1757
0
      break;
1758
0
  if (tvi == DIM (tv))
1759
0
    Fail ("no test data for this mode");
1760
1761
0
  err = _gcry_cipher_open (&hdenc, GCRY_CIPHER_AES, tv[tvi].mode, 0);
1762
0
  if (err)
1763
0
    Fail ("open");
1764
0
  err = _gcry_cipher_open (&hddec, GCRY_CIPHER_AES, tv[tvi].mode, 0);
1765
0
  if (err)
1766
0
    Fail ("open");
1767
0
  err = _gcry_cipher_setkey (hdenc, tv[tvi].key,  sizeof tv[tvi].key);
1768
0
  if (!err)
1769
0
    err = _gcry_cipher_setkey (hddec, tv[tvi].key, sizeof tv[tvi].key);
1770
0
  if (err)
1771
0
    Fail ("set key");
1772
0
  err = _gcry_cipher_setiv (hdenc, tv[tvi].iv, sizeof tv[tvi].iv);
1773
0
  if (!err)
1774
0
    err = _gcry_cipher_setiv (hddec, tv[tvi].iv, sizeof tv[tvi].iv);
1775
0
  if (err)
1776
0
    Fail ("set IV");
1777
0
  for (idx=0; idx < DIM (tv[tvi].data); idx++)
1778
0
    {
1779
0
      err = _gcry_cipher_encrypt (hdenc, scratch, sizeof scratch,
1780
0
                                  tv[tvi].data[idx].input,
1781
0
                                  sizeof tv[tvi].data[idx].input);
1782
0
      if (err)
1783
0
        Fail ("encrypt command");
1784
0
      if (memcmp (scratch, tv[tvi].data[idx].output, sizeof scratch))
1785
0
        Fail ("encrypt mismatch");
1786
0
      err = _gcry_cipher_decrypt (hddec, scratch, sizeof scratch,
1787
0
                                  tv[tvi].data[idx].output,
1788
0
                                  sizeof tv[tvi].data[idx].output);
1789
0
      if (err)
1790
0
        Fail ("decrypt command");
1791
0
      if (memcmp (scratch, tv[tvi].data[idx].input, sizeof scratch))
1792
0
        Fail ("decrypt mismatch");
1793
0
    }
1794
1795
0
#undef Fail
1796
0
  _gcry_cipher_close (hdenc);
1797
0
  _gcry_cipher_close (hddec);
1798
0
  return NULL;
1799
0
}
1800
1801
1802
/* Complete selftest for AES-128 with all modes and driver code.  */
1803
static gpg_err_code_t
1804
selftest_fips_128 (int extended, selftest_report_func_t report)
1805
0
{
1806
0
  const char *what;
1807
0
  const char *errtxt;
1808
1809
0
  what = "low-level";
1810
0
  errtxt = selftest_basic_128 ();
1811
0
  if (errtxt)
1812
0
    goto failed;
1813
1814
0
  if (extended)
1815
0
    {
1816
0
      what = "cfb";
1817
0
      errtxt = selftest_fips_128_38a (GCRY_CIPHER_MODE_CFB);
1818
0
      if (errtxt)
1819
0
        goto failed;
1820
1821
0
      what = "ofb";
1822
0
      errtxt = selftest_fips_128_38a (GCRY_CIPHER_MODE_OFB);
1823
0
      if (errtxt)
1824
0
        goto failed;
1825
0
    }
1826
1827
0
  return 0; /* Succeeded. */
1828
1829
0
 failed:
1830
0
  if (report)
1831
0
    report ("cipher", GCRY_CIPHER_AES128, what, errtxt);
1832
0
  return GPG_ERR_SELFTEST_FAILED;
1833
0
}
1834
1835
/* Complete selftest for AES-192.  */
1836
static gpg_err_code_t
1837
selftest_fips_192 (int extended, selftest_report_func_t report)
1838
0
{
1839
0
  const char *what;
1840
0
  const char *errtxt;
1841
1842
0
  (void)extended; /* No extended tests available.  */
1843
1844
0
  what = "low-level";
1845
0
  errtxt = selftest_basic_192 ();
1846
0
  if (errtxt)
1847
0
    goto failed;
1848
1849
1850
0
  return 0; /* Succeeded. */
1851
1852
0
 failed:
1853
0
  if (report)
1854
0
    report ("cipher", GCRY_CIPHER_AES192, what, errtxt);
1855
0
  return GPG_ERR_SELFTEST_FAILED;
1856
0
}
1857
1858
1859
/* Complete selftest for AES-256.  */
1860
static gpg_err_code_t
1861
selftest_fips_256 (int extended, selftest_report_func_t report)
1862
0
{
1863
0
  const char *what;
1864
0
  const char *errtxt;
1865
1866
0
  (void)extended; /* No extended tests available.  */
1867
1868
0
  what = "low-level";
1869
0
  errtxt = selftest_basic_256 ();
1870
0
  if (errtxt)
1871
0
    goto failed;
1872
1873
0
  return 0; /* Succeeded. */
1874
1875
0
 failed:
1876
0
  if (report)
1877
0
    report ("cipher", GCRY_CIPHER_AES256, what, errtxt);
1878
0
  return GPG_ERR_SELFTEST_FAILED;
1879
0
}
1880
1881
1882
1883
/* Run a full self-test for ALGO and return 0 on success.  */
1884
static gpg_err_code_t
1885
run_selftests (int algo, int extended, selftest_report_func_t report)
1886
0
{
1887
0
  gpg_err_code_t ec;
1888
1889
0
  switch (algo)
1890
0
    {
1891
0
    case GCRY_CIPHER_AES128:
1892
0
      ec = selftest_fips_128 (extended, report);
1893
0
      break;
1894
0
    case GCRY_CIPHER_AES192:
1895
0
      ec = selftest_fips_192 (extended, report);
1896
0
      break;
1897
0
    case GCRY_CIPHER_AES256:
1898
0
      ec = selftest_fips_256 (extended, report);
1899
0
      break;
1900
0
    default:
1901
0
      ec = GPG_ERR_CIPHER_ALGO;
1902
0
      break;
1903
1904
0
    }
1905
0
  return ec;
1906
0
}
1907
1908
1909

1910
1911
static const char *rijndael_names[] =
1912
  {
1913
    "RIJNDAEL",
1914
    "AES128",
1915
    "AES-128",
1916
    NULL
1917
  };
1918
1919
static const gcry_cipher_oid_spec_t rijndael_oids[] =
1920
  {
1921
    { "2.16.840.1.101.3.4.1.1", GCRY_CIPHER_MODE_ECB },
1922
    { "2.16.840.1.101.3.4.1.2", GCRY_CIPHER_MODE_CBC },
1923
    { "2.16.840.1.101.3.4.1.3", GCRY_CIPHER_MODE_OFB },
1924
    { "2.16.840.1.101.3.4.1.4", GCRY_CIPHER_MODE_CFB },
1925
    { "2.16.840.1.101.3.4.1.6", GCRY_CIPHER_MODE_GCM },
1926
    { "2.16.840.1.101.3.4.1.7", GCRY_CIPHER_MODE_CCM },
1927
    { NULL }
1928
  };
1929
1930
gcry_cipher_spec_t _gcry_cipher_spec_aes =
1931
  {
1932
    GCRY_CIPHER_AES, {0, 1},
1933
    "AES", rijndael_names, rijndael_oids, 16, 128,
1934
    sizeof (RIJNDAEL_context),
1935
    rijndael_setkey, rijndael_encrypt, rijndael_decrypt,
1936
    NULL, NULL,
1937
    run_selftests
1938
  };
1939
1940
1941
static const char *rijndael192_names[] =
1942
  {
1943
    "RIJNDAEL192",
1944
    "AES-192",
1945
    NULL
1946
  };
1947
1948
static const gcry_cipher_oid_spec_t rijndael192_oids[] =
1949
  {
1950
    { "2.16.840.1.101.3.4.1.21", GCRY_CIPHER_MODE_ECB },
1951
    { "2.16.840.1.101.3.4.1.22", GCRY_CIPHER_MODE_CBC },
1952
    { "2.16.840.1.101.3.4.1.23", GCRY_CIPHER_MODE_OFB },
1953
    { "2.16.840.1.101.3.4.1.24", GCRY_CIPHER_MODE_CFB },
1954
    { "2.16.840.1.101.3.4.1.26", GCRY_CIPHER_MODE_GCM },
1955
    { "2.16.840.1.101.3.4.1.27", GCRY_CIPHER_MODE_CCM },
1956
    { NULL }
1957
  };
1958
1959
gcry_cipher_spec_t _gcry_cipher_spec_aes192 =
1960
  {
1961
    GCRY_CIPHER_AES192, {0, 1},
1962
    "AES192", rijndael192_names, rijndael192_oids, 16, 192,
1963
    sizeof (RIJNDAEL_context),
1964
    rijndael_setkey, rijndael_encrypt, rijndael_decrypt,
1965
    NULL, NULL,
1966
    run_selftests
1967
  };
1968
1969
1970
static const char *rijndael256_names[] =
1971
  {
1972
    "RIJNDAEL256",
1973
    "AES-256",
1974
    NULL
1975
  };
1976
1977
static const gcry_cipher_oid_spec_t rijndael256_oids[] =
1978
  {
1979
    { "2.16.840.1.101.3.4.1.41", GCRY_CIPHER_MODE_ECB },
1980
    { "2.16.840.1.101.3.4.1.42", GCRY_CIPHER_MODE_CBC },
1981
    { "2.16.840.1.101.3.4.1.43", GCRY_CIPHER_MODE_OFB },
1982
    { "2.16.840.1.101.3.4.1.44", GCRY_CIPHER_MODE_CFB },
1983
    { "2.16.840.1.101.3.4.1.46", GCRY_CIPHER_MODE_GCM },
1984
    { "2.16.840.1.101.3.4.1.47", GCRY_CIPHER_MODE_CCM },
1985
    { NULL }
1986
  };
1987
1988
gcry_cipher_spec_t _gcry_cipher_spec_aes256 =
1989
  {
1990
    GCRY_CIPHER_AES256, {0, 1},
1991
    "AES256", rijndael256_names, rijndael256_oids, 16, 256,
1992
    sizeof (RIJNDAEL_context),
1993
    rijndael_setkey, rijndael_encrypt, rijndael_decrypt,
1994
    NULL, NULL,
1995
    run_selftests
1996
  };