Coverage Report

Created: 2025-12-10 06:24

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/openssl/crypto/evp/e_aes.c
Line
Count
Source
1
/*
2
 * Copyright 2001-2025 The OpenSSL Project Authors. All Rights Reserved.
3
 *
4
 * Licensed under the Apache License 2.0 (the "License").  You may not use
5
 * this file except in compliance with the License.  You can obtain a copy
6
 * in the file LICENSE in the source distribution or at
7
 * https://www.openssl.org/source/license.html
8
 */
9
10
/*
11
 * This file uses the low-level AES functions (which are deprecated for
12
 * non-internal use) in order to implement the EVP AES ciphers.
13
 */
14
#include "internal/deprecated.h"
15
16
#include <string.h>
17
#include <assert.h>
18
#include <openssl/opensslconf.h>
19
#include <openssl/crypto.h>
20
#include <openssl/evp.h>
21
#include <openssl/err.h>
22
#include <openssl/aes.h>
23
#include <openssl/rand.h>
24
#include <openssl/cmac.h>
25
#include "crypto/evp.h"
26
#include "internal/cryptlib.h"
27
#include "crypto/modes.h"
28
#include "crypto/siv.h"
29
#include "crypto/aes_platform.h"
30
#include "evp_local.h"
31
32
typedef struct {
33
    union {
34
        OSSL_UNION_ALIGN;
35
        AES_KEY ks;
36
    } ks;
37
    block128_f block;
38
    union {
39
        cbc128_f cbc;
40
        ctr128_f ctr;
41
    } stream;
42
} EVP_AES_KEY;
43
44
typedef struct {
45
    union {
46
        OSSL_UNION_ALIGN;
47
        AES_KEY ks;
48
    } ks; /* AES key schedule to use */
49
    int key_set; /* Set if key initialised */
50
    int iv_set; /* Set if an iv is set */
51
    GCM128_CONTEXT gcm;
52
    unsigned char *iv; /* Temporary IV store */
53
    int ivlen; /* IV length */
54
    int taglen;
55
    int iv_gen; /* It is OK to generate IVs */
56
    int iv_gen_rand; /* No IV was specified, so generate a rand IV */
57
    int tls_aad_len; /* TLS AAD length */
58
    uint64_t tls_enc_records; /* Number of TLS records encrypted */
59
    ctr128_f ctr;
60
} EVP_AES_GCM_CTX;
61
62
typedef struct {
63
    union {
64
        OSSL_UNION_ALIGN;
65
        AES_KEY ks;
66
    } ks1, ks2; /* AES key schedules to use */
67
    XTS128_CONTEXT xts;
68
    void (*stream)(const unsigned char *in,
69
        unsigned char *out, size_t length,
70
        const AES_KEY *key1, const AES_KEY *key2,
71
        const unsigned char iv[16]);
72
} EVP_AES_XTS_CTX;
73
74
#ifdef FIPS_MODULE
75
static const int allow_insecure_decrypt = 0;
76
#else
77
static const int allow_insecure_decrypt = 1;
78
#endif
79
80
typedef struct {
81
    union {
82
        OSSL_UNION_ALIGN;
83
        AES_KEY ks;
84
    } ks; /* AES key schedule to use */
85
    int key_set; /* Set if key initialised */
86
    int iv_set; /* Set if an iv is set */
87
    int tag_set; /* Set if tag is valid */
88
    int len_set; /* Set if message length set */
89
    int L, M; /* L and M parameters from RFC3610 */
90
    int tls_aad_len; /* TLS AAD length */
91
    CCM128_CONTEXT ccm;
92
    ccm128_f str;
93
} EVP_AES_CCM_CTX;
94
95
#ifndef OPENSSL_NO_OCB
96
typedef struct {
97
    union {
98
        OSSL_UNION_ALIGN;
99
        AES_KEY ks;
100
    } ksenc; /* AES key schedule to use for encryption */
101
    union {
102
        OSSL_UNION_ALIGN;
103
        AES_KEY ks;
104
    } ksdec; /* AES key schedule to use for decryption */
105
    int key_set; /* Set if key initialised */
106
    int iv_set; /* Set if an iv is set */
107
    OCB128_CONTEXT ocb;
108
    unsigned char *iv; /* Temporary IV store */
109
    unsigned char tag[16];
110
    unsigned char data_buf[16]; /* Store partial data blocks */
111
    unsigned char aad_buf[16]; /* Store partial AAD blocks */
112
    int data_buf_len;
113
    int aad_buf_len;
114
    int ivlen; /* IV length */
115
    int taglen;
116
} EVP_AES_OCB_CTX;
117
#endif
118
119
0
#define MAXBITCHUNK ((size_t)1 << (sizeof(size_t) * 8 - 4))
120
121
/* increment counter (64-bit int) by 1 */
122
static void ctr64_inc(unsigned char *counter)
123
0
{
124
0
    int n = 8;
125
0
    unsigned char c;
126
127
0
    do {
128
0
        --n;
129
0
        c = counter[n];
130
0
        ++c;
131
0
        counter[n] = c;
132
0
        if (c)
133
0
            return;
134
0
    } while (n);
135
0
}
136
137
#if defined(AESNI_CAPABLE)
138
#if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
139
#define AES_GCM_ASM2(gctx) (gctx->gcm.block == (block128_f)aesni_encrypt && gctx->gcm.ghash == gcm_ghash_avx)
140
#undef AES_GCM_ASM2 /* minor size optimization */
141
#endif
142
143
static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
144
    const unsigned char *iv, int enc)
145
{
146
    int ret, mode;
147
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY, ctx);
148
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
149
150
    if (keylen <= 0) {
151
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
152
        return 0;
153
    }
154
    mode = EVP_CIPHER_CTX_get_mode(ctx);
155
    if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
156
        && !enc) {
157
        ret = aesni_set_decrypt_key(key, keylen, &dat->ks.ks);
158
        dat->block = (block128_f)aesni_decrypt;
159
        dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? (cbc128_f)aesni_cbc_encrypt : NULL;
160
    } else {
161
        ret = aesni_set_encrypt_key(key, keylen, &dat->ks.ks);
162
        dat->block = (block128_f)aesni_encrypt;
163
        if (mode == EVP_CIPH_CBC_MODE)
164
            dat->stream.cbc = (cbc128_f)aesni_cbc_encrypt;
165
        else if (mode == EVP_CIPH_CTR_MODE)
166
            dat->stream.ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
167
        else
168
            dat->stream.cbc = NULL;
169
    }
170
171
    if (ret < 0) {
172
        ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
173
        return 0;
174
    }
175
176
    return 1;
177
}
178
179
static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
180
    const unsigned char *in, size_t len)
181
{
182
    aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY, ctx)->ks.ks,
183
        ctx->iv, EVP_CIPHER_CTX_is_encrypting(ctx));
184
185
    return 1;
186
}
187
188
static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
189
    const unsigned char *in, size_t len)
190
{
191
    size_t bl = EVP_CIPHER_CTX_get_block_size(ctx);
192
193
    if (len < bl)
194
        return 1;
195
196
    aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY, ctx)->ks.ks,
197
        EVP_CIPHER_CTX_is_encrypting(ctx));
198
199
    return 1;
200
}
201
202
#define aesni_ofb_cipher aes_ofb_cipher
203
static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
204
    const unsigned char *in, size_t len);
205
206
#define aesni_cfb_cipher aes_cfb_cipher
207
static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
208
    const unsigned char *in, size_t len);
209
210
#define aesni_cfb8_cipher aes_cfb8_cipher
211
static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
212
    const unsigned char *in, size_t len);
213
214
#define aesni_cfb1_cipher aes_cfb1_cipher
215
static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
216
    const unsigned char *in, size_t len);
217
218
#define aesni_ctr_cipher aes_ctr_cipher
219
static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
220
    const unsigned char *in, size_t len);
221
222
static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
223
    const unsigned char *iv, int enc)
224
{
225
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX, ctx);
226
227
    if (iv == NULL && key == NULL)
228
        return 1;
229
230
    if (key) {
231
        const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
232
233
        if (keylen <= 0) {
234
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
235
            return 0;
236
        }
237
        aesni_set_encrypt_key(key, keylen, &gctx->ks.ks);
238
        CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f)aesni_encrypt);
239
        gctx->ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
240
        /*
241
         * If we have an iv can set it directly, otherwise use saved IV.
242
         */
243
        if (iv == NULL && gctx->iv_set)
244
            iv = gctx->iv;
245
        if (iv) {
246
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
247
            gctx->iv_set = 1;
248
        }
249
        gctx->key_set = 1;
250
    } else {
251
        /* If key set use IV, otherwise copy */
252
        if (gctx->key_set)
253
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
254
        else
255
            memcpy(gctx->iv, iv, gctx->ivlen);
256
        gctx->iv_set = 1;
257
        gctx->iv_gen = 0;
258
    }
259
    return 1;
260
}
261
262
#define aesni_gcm_cipher aes_gcm_cipher
263
static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
264
    const unsigned char *in, size_t len);
265
266
static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
267
    const unsigned char *iv, int enc)
268
{
269
    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, ctx);
270
271
    if (iv == NULL && key == NULL)
272
        return 1;
273
274
    if (key) {
275
        /* The key is two half length keys in reality */
276
        const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
277
        const int bytes = keylen / 2;
278
        const int bits = bytes * 8;
279
280
        if (keylen <= 0) {
281
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
282
            return 0;
283
        }
284
        /*
285
         * Verify that the two keys are different.
286
         *
287
         * This addresses Rogaway's vulnerability.
288
         * See comment in aes_xts_init_key() below.
289
         */
290
        if ((!allow_insecure_decrypt || enc)
291
            && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
292
            ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
293
            return 0;
294
        }
295
296
        /* key_len is two AES keys */
297
        if (enc) {
298
            aesni_set_encrypt_key(key, bits, &xctx->ks1.ks);
299
            xctx->xts.block1 = (block128_f)aesni_encrypt;
300
            xctx->stream = aesni_xts_encrypt;
301
        } else {
302
            aesni_set_decrypt_key(key, bits, &xctx->ks1.ks);
303
            xctx->xts.block1 = (block128_f)aesni_decrypt;
304
            xctx->stream = aesni_xts_decrypt;
305
        }
306
307
        aesni_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
308
        xctx->xts.block2 = (block128_f)aesni_encrypt;
309
310
        xctx->xts.key1 = &xctx->ks1;
311
    }
312
313
    if (iv) {
314
        xctx->xts.key2 = &xctx->ks2;
315
        memcpy(ctx->iv, iv, 16);
316
    }
317
318
    return 1;
319
}
320
321
#define aesni_xts_cipher aes_xts_cipher
322
static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
323
    const unsigned char *in, size_t len);
324
325
static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
326
    const unsigned char *iv, int enc)
327
{
328
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX, ctx);
329
330
    if (iv == NULL && key == NULL)
331
        return 1;
332
333
    if (key != NULL) {
334
        const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
335
336
        if (keylen <= 0) {
337
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
338
            return 0;
339
        }
340
        aesni_set_encrypt_key(key, keylen, &cctx->ks.ks);
341
        CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
342
            &cctx->ks, (block128_f)aesni_encrypt);
343
        cctx->str = enc ? (ccm128_f)aesni_ccm64_encrypt_blocks : (ccm128_f)aesni_ccm64_decrypt_blocks;
344
        cctx->key_set = 1;
345
    }
346
    if (iv) {
347
        memcpy(ctx->iv, iv, 15 - cctx->L);
348
        cctx->iv_set = 1;
349
    }
350
    return 1;
351
}
352
353
#define aesni_ccm_cipher aes_ccm_cipher
354
static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
355
    const unsigned char *in, size_t len);
356
357
#ifndef OPENSSL_NO_OCB
358
static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
359
    const unsigned char *iv, int enc)
360
{
361
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX, ctx);
362
363
    if (iv == NULL && key == NULL)
364
        return 1;
365
366
    if (key != NULL) {
367
        const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
368
369
        if (keylen <= 0) {
370
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
371
            return 0;
372
        }
373
        do {
374
            /*
375
             * We set both the encrypt and decrypt key here because decrypt
376
             * needs both. We could possibly optimise to remove setting the
377
             * decrypt for an encryption operation.
378
             */
379
            aesni_set_encrypt_key(key, keylen, &octx->ksenc.ks);
380
            aesni_set_decrypt_key(key, keylen, &octx->ksdec.ks);
381
            if (!CRYPTO_ocb128_init(&octx->ocb,
382
                    &octx->ksenc.ks, &octx->ksdec.ks,
383
                    (block128_f)aesni_encrypt,
384
                    (block128_f)aesni_decrypt,
385
                    enc ? aesni_ocb_encrypt
386
                        : aesni_ocb_decrypt))
387
                return 0;
388
        } while (0);
389
390
        /*
391
         * If we have an iv we can set it directly, otherwise use saved IV.
392
         */
393
        if (iv == NULL && octx->iv_set)
394
            iv = octx->iv;
395
        if (iv) {
396
            if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
397
                != 1)
398
                return 0;
399
            octx->iv_set = 1;
400
        }
401
        octx->key_set = 1;
402
    } else {
403
        /* If key set use IV, otherwise copy */
404
        if (octx->key_set)
405
            CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
406
        else
407
            memcpy(octx->iv, iv, octx->ivlen);
408
        octx->iv_set = 1;
409
    }
410
    return 1;
411
}
412
413
#define aesni_ocb_cipher aes_ocb_cipher
414
static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
415
    const unsigned char *in, size_t len);
416
#endif /* OPENSSL_NO_OCB */
417
418
#define BLOCK_CIPHER_generic(nid, keylen, blocksize, ivlen, nmode, mode, MODE, flags) \
419
    static const EVP_CIPHER aesni_##keylen##_##mode = {                               \
420
        nid##_##keylen##_##nmode, blocksize, keylen / 8, ivlen,                       \
421
        flags | EVP_CIPH_##MODE##_MODE,                                               \
422
        EVP_ORIG_GLOBAL,                                                              \
423
        aesni_init_key,                                                               \
424
        aesni_##mode##_cipher,                                                        \
425
        NULL,                                                                         \
426
        sizeof(EVP_AES_KEY),                                                          \
427
        NULL, NULL, NULL, NULL                                                        \
428
    };                                                                                \
429
    static const EVP_CIPHER aes_##keylen##_##mode = {                                 \
430
        nid##_##keylen##_##nmode, blocksize,                                          \
431
        keylen / 8, ivlen,                                                            \
432
        flags | EVP_CIPH_##MODE##_MODE,                                               \
433
        EVP_ORIG_GLOBAL,                                                              \
434
        aes_init_key,                                                                 \
435
        aes_##mode##_cipher,                                                          \
436
        NULL,                                                                         \
437
        sizeof(EVP_AES_KEY),                                                          \
438
        NULL, NULL, NULL, NULL                                                        \
439
    };                                                                                \
440
    const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)                                 \
441
    {                                                                                 \
442
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
443
    }
444
445
#define BLOCK_CIPHER_custom(nid, keylen, blocksize, ivlen, mode, MODE, flags)                                              \
446
    static const EVP_CIPHER aesni_##keylen##_##mode = {                                                                    \
447
        nid##_##keylen##_##mode, blocksize,                                                                                \
448
        (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE || EVP_CIPH_##MODE##_MODE == EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
449
        ivlen,                                                                                                             \
450
        flags | EVP_CIPH_##MODE##_MODE,                                                                                    \
451
        EVP_ORIG_GLOBAL,                                                                                                   \
452
        aesni_##mode##_init_key,                                                                                           \
453
        aesni_##mode##_cipher,                                                                                             \
454
        aes_##mode##_cleanup,                                                                                              \
455
        sizeof(EVP_AES_##MODE##_CTX),                                                                                      \
456
        NULL, NULL, aes_##mode##_ctrl, NULL                                                                                \
457
    };                                                                                                                     \
458
    static const EVP_CIPHER aes_##keylen##_##mode = {                                                                      \
459
        nid##_##keylen##_##mode, blocksize,                                                                                \
460
        (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE || EVP_CIPH_##MODE##_MODE == EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
461
        ivlen,                                                                                                             \
462
        flags | EVP_CIPH_##MODE##_MODE,                                                                                    \
463
        EVP_ORIG_GLOBAL,                                                                                                   \
464
        aes_##mode##_init_key,                                                                                             \
465
        aes_##mode##_cipher,                                                                                               \
466
        aes_##mode##_cleanup,                                                                                              \
467
        sizeof(EVP_AES_##MODE##_CTX),                                                                                      \
468
        NULL, NULL, aes_##mode##_ctrl, NULL                                                                                \
469
    };                                                                                                                     \
470
    const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)                                                                      \
471
    {                                                                                                                      \
472
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;                                          \
473
    }
474
475
#elif defined(SPARC_AES_CAPABLE)
476
477
static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
478
    const unsigned char *iv, int enc)
479
{
480
    int ret, mode, bits;
481
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY, ctx);
482
483
    mode = EVP_CIPHER_CTX_get_mode(ctx);
484
    bits = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
485
    if (bits <= 0) {
486
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
487
        return 0;
488
    }
489
    if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
490
        && !enc) {
491
        ret = 0;
492
        aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
493
        dat->block = (block128_f)aes_t4_decrypt;
494
        switch (bits) {
495
        case 128:
496
            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? (cbc128_f)aes128_t4_cbc_decrypt : NULL;
497
            break;
498
        case 192:
499
            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? (cbc128_f)aes192_t4_cbc_decrypt : NULL;
500
            break;
501
        case 256:
502
            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? (cbc128_f)aes256_t4_cbc_decrypt : NULL;
503
            break;
504
        default:
505
            ret = -1;
506
        }
507
    } else {
508
        ret = 0;
509
        aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
510
        dat->block = (block128_f)aes_t4_encrypt;
511
        switch (bits) {
512
        case 128:
513
            if (mode == EVP_CIPH_CBC_MODE)
514
                dat->stream.cbc = (cbc128_f)aes128_t4_cbc_encrypt;
515
            else if (mode == EVP_CIPH_CTR_MODE)
516
                dat->stream.ctr = (ctr128_f)aes128_t4_ctr32_encrypt;
517
            else
518
                dat->stream.cbc = NULL;
519
            break;
520
        case 192:
521
            if (mode == EVP_CIPH_CBC_MODE)
522
                dat->stream.cbc = (cbc128_f)aes192_t4_cbc_encrypt;
523
            else if (mode == EVP_CIPH_CTR_MODE)
524
                dat->stream.ctr = (ctr128_f)aes192_t4_ctr32_encrypt;
525
            else
526
                dat->stream.cbc = NULL;
527
            break;
528
        case 256:
529
            if (mode == EVP_CIPH_CBC_MODE)
530
                dat->stream.cbc = (cbc128_f)aes256_t4_cbc_encrypt;
531
            else if (mode == EVP_CIPH_CTR_MODE)
532
                dat->stream.ctr = (ctr128_f)aes256_t4_ctr32_encrypt;
533
            else
534
                dat->stream.cbc = NULL;
535
            break;
536
        default:
537
            ret = -1;
538
        }
539
    }
540
541
    if (ret < 0) {
542
        ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
543
        return 0;
544
    }
545
546
    return 1;
547
}
548
549
#define aes_t4_cbc_cipher aes_cbc_cipher
550
static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
551
    const unsigned char *in, size_t len);
552
553
#define aes_t4_ecb_cipher aes_ecb_cipher
554
static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
555
    const unsigned char *in, size_t len);
556
557
#define aes_t4_ofb_cipher aes_ofb_cipher
558
static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
559
    const unsigned char *in, size_t len);
560
561
#define aes_t4_cfb_cipher aes_cfb_cipher
562
static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
563
    const unsigned char *in, size_t len);
564
565
#define aes_t4_cfb8_cipher aes_cfb8_cipher
566
static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
567
    const unsigned char *in, size_t len);
568
569
#define aes_t4_cfb1_cipher aes_cfb1_cipher
570
static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
571
    const unsigned char *in, size_t len);
572
573
#define aes_t4_ctr_cipher aes_ctr_cipher
574
static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
575
    const unsigned char *in, size_t len);
576
577
static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
578
    const unsigned char *iv, int enc)
579
{
580
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX, ctx);
581
582
    if (iv == NULL && key == NULL)
583
        return 1;
584
    if (key) {
585
        const int bits = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
586
587
        if (bits <= 0) {
588
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
589
            return 0;
590
        }
591
        aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
592
        CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
593
            (block128_f)aes_t4_encrypt);
594
        switch (bits) {
595
        case 128:
596
            gctx->ctr = (ctr128_f)aes128_t4_ctr32_encrypt;
597
            break;
598
        case 192:
599
            gctx->ctr = (ctr128_f)aes192_t4_ctr32_encrypt;
600
            break;
601
        case 256:
602
            gctx->ctr = (ctr128_f)aes256_t4_ctr32_encrypt;
603
            break;
604
        default:
605
            return 0;
606
        }
607
        /*
608
         * If we have an iv can set it directly, otherwise use saved IV.
609
         */
610
        if (iv == NULL && gctx->iv_set)
611
            iv = gctx->iv;
612
        if (iv) {
613
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
614
            gctx->iv_set = 1;
615
        }
616
        gctx->key_set = 1;
617
    } else {
618
        /* If key set use IV, otherwise copy */
619
        if (gctx->key_set)
620
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
621
        else
622
            memcpy(gctx->iv, iv, gctx->ivlen);
623
        gctx->iv_set = 1;
624
        gctx->iv_gen = 0;
625
    }
626
    return 1;
627
}
628
629
#define aes_t4_gcm_cipher aes_gcm_cipher
630
static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
631
    const unsigned char *in, size_t len);
632
633
static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
634
    const unsigned char *iv, int enc)
635
{
636
    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, ctx);
637
638
    if (!iv && !key)
639
        return 1;
640
641
    if (key) {
642
        /* The key is two half length keys in reality */
643
        const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
644
        const int bytes = keylen / 2;
645
        const int bits = bytes * 8;
646
647
        if (keylen <= 0) {
648
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
649
            return 0;
650
        }
651
        /*
652
         * Verify that the two keys are different.
653
         *
654
         * This addresses Rogaway's vulnerability.
655
         * See comment in aes_xts_init_key() below.
656
         */
657
        if ((!allow_insecure_decrypt || enc)
658
            && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
659
            ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
660
            return 0;
661
        }
662
663
        xctx->stream = NULL;
664
        /* key_len is two AES keys */
665
        if (enc) {
666
            aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
667
            xctx->xts.block1 = (block128_f)aes_t4_encrypt;
668
            switch (bits) {
669
            case 128:
670
                xctx->stream = aes128_t4_xts_encrypt;
671
                break;
672
            case 256:
673
                xctx->stream = aes256_t4_xts_encrypt;
674
                break;
675
            default:
676
                return 0;
677
            }
678
        } else {
679
            aes_t4_set_decrypt_key(key, bits, &xctx->ks1.ks);
680
            xctx->xts.block1 = (block128_f)aes_t4_decrypt;
681
            switch (bits) {
682
            case 128:
683
                xctx->stream = aes128_t4_xts_decrypt;
684
                break;
685
            case 256:
686
                xctx->stream = aes256_t4_xts_decrypt;
687
                break;
688
            default:
689
                return 0;
690
            }
691
        }
692
693
        aes_t4_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
694
        xctx->xts.block2 = (block128_f)aes_t4_encrypt;
695
696
        xctx->xts.key1 = &xctx->ks1;
697
    }
698
699
    if (iv) {
700
        xctx->xts.key2 = &xctx->ks2;
701
        memcpy(ctx->iv, iv, 16);
702
    }
703
704
    return 1;
705
}
706
707
#define aes_t4_xts_cipher aes_xts_cipher
708
static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
709
    const unsigned char *in, size_t len);
710
711
static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
712
    const unsigned char *iv, int enc)
713
{
714
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX, ctx);
715
716
    if (iv == NULL && key == NULL)
717
        return 1;
718
719
    if (key != NULL) {
720
        const int bits = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
721
722
        if (bits <= 0) {
723
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
724
            return 0;
725
        }
726
        aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
727
        CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
728
            &cctx->ks, (block128_f)aes_t4_encrypt);
729
        cctx->str = NULL;
730
        cctx->key_set = 1;
731
    }
732
    if (iv) {
733
        memcpy(ctx->iv, iv, 15 - cctx->L);
734
        cctx->iv_set = 1;
735
    }
736
    return 1;
737
}
738
739
#define aes_t4_ccm_cipher aes_ccm_cipher
740
static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
741
    const unsigned char *in, size_t len);
742
743
#ifndef OPENSSL_NO_OCB
744
static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
745
    const unsigned char *iv, int enc)
746
{
747
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX, ctx);
748
749
    if (iv == NULL && key == NULL)
750
        return 1;
751
752
    if (key != NULL) {
753
        const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
754
755
        if (keylen <= 0) {
756
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
757
            return 0;
758
        }
759
        do {
760
            /*
761
             * We set both the encrypt and decrypt key here because decrypt
762
             * needs both. We could possibly optimise to remove setting the
763
             * decrypt for an encryption operation.
764
             */
765
            aes_t4_set_encrypt_key(key, keylen, &octx->ksenc.ks);
766
            aes_t4_set_decrypt_key(key, keylen, &octx->ksdec.ks);
767
            if (!CRYPTO_ocb128_init(&octx->ocb,
768
                    &octx->ksenc.ks, &octx->ksdec.ks,
769
                    (block128_f)aes_t4_encrypt,
770
                    (block128_f)aes_t4_decrypt,
771
                    NULL))
772
                return 0;
773
        } while (0);
774
775
        /*
776
         * If we have an iv we can set it directly, otherwise use saved IV.
777
         */
778
        if (iv == NULL && octx->iv_set)
779
            iv = octx->iv;
780
        if (iv) {
781
            if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
782
                != 1)
783
                return 0;
784
            octx->iv_set = 1;
785
        }
786
        octx->key_set = 1;
787
    } else {
788
        /* If key set use IV, otherwise copy */
789
        if (octx->key_set)
790
            CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
791
        else
792
            memcpy(octx->iv, iv, octx->ivlen);
793
        octx->iv_set = 1;
794
    }
795
    return 1;
796
}
797
798
#define aes_t4_ocb_cipher aes_ocb_cipher
799
static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
800
    const unsigned char *in, size_t len);
801
#endif /* OPENSSL_NO_OCB */
802
803
#ifndef OPENSSL_NO_SIV
804
#define aes_t4_siv_init_key aes_siv_init_key
805
#define aes_t4_siv_cipher aes_siv_cipher
806
#endif /* OPENSSL_NO_SIV */
807
808
#define BLOCK_CIPHER_generic(nid, keylen, blocksize, ivlen, nmode, mode, MODE, flags)  \
809
    static const EVP_CIPHER aes_t4_##keylen##_##mode = {                               \
810
        nid##_##keylen##_##nmode, blocksize, keylen / 8, ivlen,                        \
811
        flags | EVP_CIPH_##MODE##_MODE,                                                \
812
        EVP_ORIG_GLOBAL,                                                               \
813
        aes_t4_init_key,                                                               \
814
        aes_t4_##mode##_cipher,                                                        \
815
        NULL,                                                                          \
816
        sizeof(EVP_AES_KEY),                                                           \
817
        NULL, NULL, NULL, NULL                                                         \
818
    };                                                                                 \
819
    static const EVP_CIPHER aes_##keylen##_##mode = {                                  \
820
        nid##_##keylen##_##nmode, blocksize,                                           \
821
        keylen / 8, ivlen,                                                             \
822
        flags | EVP_CIPH_##MODE##_MODE,                                                \
823
        EVP_ORIG_GLOBAL,                                                               \
824
        aes_init_key,                                                                  \
825
        aes_##mode##_cipher,                                                           \
826
        NULL,                                                                          \
827
        sizeof(EVP_AES_KEY),                                                           \
828
        NULL, NULL, NULL, NULL                                                         \
829
    };                                                                                 \
830
    const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)                                  \
831
    {                                                                                  \
832
        return SPARC_AES_CAPABLE ? &aes_t4_##keylen##_##mode : &aes_##keylen##_##mode; \
833
    }
834
835
#define BLOCK_CIPHER_custom(nid, keylen, blocksize, ivlen, mode, MODE, flags)                                              \
836
    static const EVP_CIPHER aes_t4_##keylen##_##mode = {                                                                   \
837
        nid##_##keylen##_##mode, blocksize,                                                                                \
838
        (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE || EVP_CIPH_##MODE##_MODE == EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
839
        ivlen,                                                                                                             \
840
        flags | EVP_CIPH_##MODE##_MODE,                                                                                    \
841
        EVP_ORIG_GLOBAL,                                                                                                   \
842
        aes_t4_##mode##_init_key,                                                                                          \
843
        aes_t4_##mode##_cipher,                                                                                            \
844
        aes_##mode##_cleanup,                                                                                              \
845
        sizeof(EVP_AES_##MODE##_CTX),                                                                                      \
846
        NULL, NULL, aes_##mode##_ctrl, NULL                                                                                \
847
    };                                                                                                                     \
848
    static const EVP_CIPHER aes_##keylen##_##mode = {                                                                      \
849
        nid##_##keylen##_##mode, blocksize,                                                                                \
850
        (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE || EVP_CIPH_##MODE##_MODE == EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
851
        ivlen,                                                                                                             \
852
        flags | EVP_CIPH_##MODE##_MODE,                                                                                    \
853
        EVP_ORIG_GLOBAL,                                                                                                   \
854
        aes_##mode##_init_key,                                                                                             \
855
        aes_##mode##_cipher,                                                                                               \
856
        aes_##mode##_cleanup,                                                                                              \
857
        sizeof(EVP_AES_##MODE##_CTX),                                                                                      \
858
        NULL, NULL, aes_##mode##_ctrl, NULL                                                                                \
859
    };                                                                                                                     \
860
    const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)                                                                      \
861
    {                                                                                                                      \
862
        return SPARC_AES_CAPABLE ? &aes_t4_##keylen##_##mode : &aes_##keylen##_##mode;                                     \
863
    }
864
865
#elif defined(S390X_aes_128_CAPABLE)
866
/* IBM S390X support */
867
typedef struct {
868
    union {
869
        OSSL_UNION_ALIGN;
870
        /*-
871
         * KM-AES parameter block - begin
872
         * (see z/Architecture Principles of Operation >= SA22-7832-06)
873
         */
874
        struct {
875
            unsigned char k[32];
876
        } param;
877
        /* KM-AES parameter block - end */
878
    } km;
879
    unsigned int fc;
880
} S390X_AES_ECB_CTX;
881
882
typedef struct {
883
    union {
884
        OSSL_UNION_ALIGN;
885
        /*-
886
         * KMO-AES parameter block - begin
887
         * (see z/Architecture Principles of Operation >= SA22-7832-08)
888
         */
889
        struct {
890
            unsigned char cv[16];
891
            unsigned char k[32];
892
        } param;
893
        /* KMO-AES parameter block - end */
894
    } kmo;
895
    unsigned int fc;
896
} S390X_AES_OFB_CTX;
897
898
typedef struct {
899
    union {
900
        OSSL_UNION_ALIGN;
901
        /*-
902
         * KMF-AES parameter block - begin
903
         * (see z/Architecture Principles of Operation >= SA22-7832-08)
904
         */
905
        struct {
906
            unsigned char cv[16];
907
            unsigned char k[32];
908
        } param;
909
        /* KMF-AES parameter block - end */
910
    } kmf;
911
    unsigned int fc;
912
} S390X_AES_CFB_CTX;
913
914
typedef struct {
915
    union {
916
        OSSL_UNION_ALIGN;
917
        /*-
918
         * KMA-GCM-AES parameter block - begin
919
         * (see z/Architecture Principles of Operation >= SA22-7832-11)
920
         */
921
        struct {
922
            unsigned char reserved[12];
923
            union {
924
                unsigned int w;
925
                unsigned char b[4];
926
            } cv;
927
            union {
928
                unsigned long long g[2];
929
                unsigned char b[16];
930
            } t;
931
            unsigned char h[16];
932
            unsigned long long taadl;
933
            unsigned long long tpcl;
934
            union {
935
                unsigned long long g[2];
936
                unsigned int w[4];
937
            } j0;
938
            unsigned char k[32];
939
        } param;
940
        /* KMA-GCM-AES parameter block - end */
941
    } kma;
942
    unsigned int fc;
943
    int key_set;
944
945
    unsigned char *iv;
946
    int ivlen;
947
    int iv_set;
948
    int iv_gen;
949
950
    int taglen;
951
952
    unsigned char ares[16];
953
    unsigned char mres[16];
954
    unsigned char kres[16];
955
    int areslen;
956
    int mreslen;
957
    int kreslen;
958
959
    int tls_aad_len;
960
    uint64_t tls_enc_records; /* Number of TLS records encrypted */
961
} S390X_AES_GCM_CTX;
962
963
typedef struct {
964
    union {
965
        OSSL_UNION_ALIGN;
966
        /*-
967
         * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
968
         * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
969
         * rounds field is used to store the function code and that the key
970
         * schedule is not stored (if aes hardware support is detected).
971
         */
972
        struct {
973
            unsigned char pad[16];
974
            AES_KEY k;
975
        } key;
976
977
        struct {
978
            /*-
979
             * KMAC-AES parameter block - begin
980
             * (see z/Architecture Principles of Operation >= SA22-7832-08)
981
             */
982
            struct {
983
                union {
984
                    unsigned long long g[2];
985
                    unsigned char b[16];
986
                } icv;
987
                unsigned char k[32];
988
            } kmac_param;
989
            /* KMAC-AES parameter block - end */
990
991
            union {
992
                unsigned long long g[2];
993
                unsigned char b[16];
994
            } nonce;
995
            union {
996
                unsigned long long g[2];
997
                unsigned char b[16];
998
            } buf;
999
1000
            unsigned long long blocks;
1001
            int l;
1002
            int m;
1003
            int tls_aad_len;
1004
            int iv_set;
1005
            int tag_set;
1006
            int len_set;
1007
            int key_set;
1008
1009
            unsigned char pad[140];
1010
            unsigned int fc;
1011
        } ccm;
1012
    } aes;
1013
} S390X_AES_CCM_CTX;
1014
1015
#define s390x_aes_init_key aes_init_key
1016
static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1017
    const unsigned char *iv, int enc);
1018
1019
#define S390X_AES_CBC_CTX EVP_AES_KEY
1020
1021
#define s390x_aes_cbc_init_key aes_init_key
1022
1023
#define s390x_aes_cbc_cipher aes_cbc_cipher
1024
static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1025
    const unsigned char *in, size_t len);
1026
1027
static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
1028
    const unsigned char *key,
1029
    const unsigned char *iv, int enc)
1030
{
1031
    S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1032
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1033
1034
    if (keylen <= 0) {
1035
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
1036
        return 0;
1037
    }
1038
    cctx->fc = S390X_AES_FC(keylen);
1039
    if (!enc)
1040
        cctx->fc |= S390X_DECRYPT;
1041
1042
    memcpy(cctx->km.param.k, key, keylen);
1043
    return 1;
1044
}
1045
1046
static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1047
    const unsigned char *in, size_t len)
1048
{
1049
    S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1050
1051
    s390x_km(in, len, out, cctx->fc, &cctx->km.param);
1052
    return 1;
1053
}
1054
1055
static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
1056
    const unsigned char *key,
1057
    const unsigned char *ivec, int enc)
1058
{
1059
    S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1060
    const unsigned char *iv = ctx->oiv;
1061
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1062
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1063
1064
    if (keylen <= 0) {
1065
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
1066
        return 0;
1067
    }
1068
    if (ivlen <= 0) {
1069
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_IV_LENGTH);
1070
        return 0;
1071
    }
1072
    memcpy(cctx->kmo.param.cv, iv, ivlen);
1073
    memcpy(cctx->kmo.param.k, key, keylen);
1074
    cctx->fc = S390X_AES_FC(keylen);
1075
    return 1;
1076
}
1077
1078
static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1079
    const unsigned char *in, size_t len)
1080
{
1081
    S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1082
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1083
    unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
1084
    int n = ctx->num;
1085
    int rem;
1086
1087
    memcpy(cctx->kmo.param.cv, iv, ivlen);
1088
    while (n && len) {
1089
        *out = *in ^ cctx->kmo.param.cv[n];
1090
        n = (n + 1) & 0xf;
1091
        --len;
1092
        ++in;
1093
        ++out;
1094
    }
1095
1096
    rem = len & 0xf;
1097
1098
    len &= ~(size_t)0xf;
1099
    if (len) {
1100
        s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1101
1102
        out += len;
1103
        in += len;
1104
    }
1105
1106
    if (rem) {
1107
        s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1108
            cctx->kmo.param.k);
1109
1110
        while (rem--) {
1111
            out[n] = in[n] ^ cctx->kmo.param.cv[n];
1112
            ++n;
1113
        }
1114
    }
1115
1116
    memcpy(iv, cctx->kmo.param.cv, ivlen);
1117
    ctx->num = n;
1118
    return 1;
1119
}
1120
1121
static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1122
    const unsigned char *key,
1123
    const unsigned char *ivec, int enc)
1124
{
1125
    S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1126
    const unsigned char *iv = ctx->oiv;
1127
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1128
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1129
1130
    if (keylen <= 0) {
1131
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
1132
        return 0;
1133
    }
1134
    if (ivlen <= 0) {
1135
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_IV_LENGTH);
1136
        return 0;
1137
    }
1138
    cctx->fc = S390X_AES_FC(keylen);
1139
    cctx->fc |= 16 << 24; /* 16 bytes cipher feedback */
1140
    if (!enc)
1141
        cctx->fc |= S390X_DECRYPT;
1142
1143
    memcpy(cctx->kmf.param.cv, iv, ivlen);
1144
    memcpy(cctx->kmf.param.k, key, keylen);
1145
    return 1;
1146
}
1147
1148
static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1149
    const unsigned char *in, size_t len)
1150
{
1151
    S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1152
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1153
    const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1154
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1155
    unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
1156
    int n = ctx->num;
1157
    int rem;
1158
    unsigned char tmp;
1159
1160
    if (keylen <= 0) {
1161
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
1162
        return 0;
1163
    }
1164
    if (ivlen <= 0) {
1165
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_IV_LENGTH);
1166
        return 0;
1167
    }
1168
    memcpy(cctx->kmf.param.cv, iv, ivlen);
1169
    while (n && len) {
1170
        tmp = *in;
1171
        *out = cctx->kmf.param.cv[n] ^ tmp;
1172
        cctx->kmf.param.cv[n] = enc ? *out : tmp;
1173
        n = (n + 1) & 0xf;
1174
        --len;
1175
        ++in;
1176
        ++out;
1177
    }
1178
1179
    rem = len & 0xf;
1180
1181
    len &= ~(size_t)0xf;
1182
    if (len) {
1183
        s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1184
1185
        out += len;
1186
        in += len;
1187
    }
1188
1189
    if (rem) {
1190
        s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1191
            S390X_AES_FC(keylen), cctx->kmf.param.k);
1192
1193
        while (rem--) {
1194
            tmp = in[n];
1195
            out[n] = cctx->kmf.param.cv[n] ^ tmp;
1196
            cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1197
            ++n;
1198
        }
1199
    }
1200
1201
    memcpy(iv, cctx->kmf.param.cv, ivlen);
1202
    ctx->num = n;
1203
    return 1;
1204
}
1205
1206
static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1207
    const unsigned char *key,
1208
    const unsigned char *ivec, int enc)
1209
{
1210
    S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1211
    const unsigned char *iv = ctx->oiv;
1212
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1213
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1214
1215
    if (keylen <= 0) {
1216
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
1217
        return 0;
1218
    }
1219
    if (ivlen <= 0) {
1220
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_IV_LENGTH);
1221
        return 0;
1222
    }
1223
    cctx->fc = S390X_AES_FC(keylen);
1224
    cctx->fc |= 1 << 24; /* 1 byte cipher feedback */
1225
    if (!enc)
1226
        cctx->fc |= S390X_DECRYPT;
1227
1228
    memcpy(cctx->kmf.param.cv, iv, ivlen);
1229
    memcpy(cctx->kmf.param.k, key, keylen);
1230
    return 1;
1231
}
1232
1233
static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1234
    const unsigned char *in, size_t len)
1235
{
1236
    S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1237
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1238
    unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
1239
1240
    memcpy(cctx->kmf.param.cv, iv, ivlen);
1241
    s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1242
    memcpy(iv, cctx->kmf.param.cv, ivlen);
1243
    return 1;
1244
}
1245
1246
#define s390x_aes_cfb1_init_key aes_init_key
1247
1248
#define s390x_aes_cfb1_cipher aes_cfb1_cipher
1249
static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1250
    const unsigned char *in, size_t len);
1251
1252
#define S390X_AES_CTR_CTX EVP_AES_KEY
1253
1254
#define s390x_aes_ctr_init_key aes_init_key
1255
1256
#define s390x_aes_ctr_cipher aes_ctr_cipher
1257
static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1258
    const unsigned char *in, size_t len);
1259
1260
/* iv + padding length for iv lengths != 12 */
1261
#define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
1262
1263
/*-
1264
 * Process additional authenticated data. Returns 0 on success. Code is
1265
 * big-endian.
1266
 */
1267
static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1268
    size_t len)
1269
{
1270
    unsigned long long alen;
1271
    int n, rem;
1272
1273
    if (ctx->kma.param.tpcl)
1274
        return -2;
1275
1276
    alen = ctx->kma.param.taadl + len;
1277
    if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1278
        return -1;
1279
    ctx->kma.param.taadl = alen;
1280
1281
    n = ctx->areslen;
1282
    if (n) {
1283
        while (n && len) {
1284
            ctx->ares[n] = *aad;
1285
            n = (n + 1) & 0xf;
1286
            ++aad;
1287
            --len;
1288
        }
1289
        /* ctx->ares contains a complete block if offset has wrapped around */
1290
        if (!n) {
1291
            s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1292
            ctx->fc |= S390X_KMA_HS;
1293
        }
1294
        ctx->areslen = n;
1295
    }
1296
1297
    rem = len & 0xf;
1298
1299
    len &= ~(size_t)0xf;
1300
    if (len) {
1301
        s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1302
        aad += len;
1303
        ctx->fc |= S390X_KMA_HS;
1304
    }
1305
1306
    if (rem) {
1307
        ctx->areslen = rem;
1308
1309
        do {
1310
            --rem;
1311
            ctx->ares[rem] = aad[rem];
1312
        } while (rem);
1313
    }
1314
    return 0;
1315
}
1316
1317
/*-
1318
 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1319
 * success. Code is big-endian.
1320
 */
1321
static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1322
    unsigned char *out, size_t len)
1323
{
1324
    const unsigned char *inptr;
1325
    unsigned long long mlen;
1326
    union {
1327
        unsigned int w[4];
1328
        unsigned char b[16];
1329
    } buf;
1330
    size_t inlen;
1331
    int n, rem, i;
1332
1333
    mlen = ctx->kma.param.tpcl + len;
1334
    if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1335
        return -1;
1336
    ctx->kma.param.tpcl = mlen;
1337
1338
    n = ctx->mreslen;
1339
    if (n) {
1340
        inptr = in;
1341
        inlen = len;
1342
        while (n && inlen) {
1343
            ctx->mres[n] = *inptr;
1344
            n = (n + 1) & 0xf;
1345
            ++inptr;
1346
            --inlen;
1347
        }
1348
        /* ctx->mres contains a complete block if offset has wrapped around */
1349
        if (!n) {
1350
            s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1351
                ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1352
            ctx->fc |= S390X_KMA_HS;
1353
            ctx->areslen = 0;
1354
1355
            /* previous call already encrypted/decrypted its remainder,
1356
             * see comment below */
1357
            n = ctx->mreslen;
1358
            while (n) {
1359
                *out = buf.b[n];
1360
                n = (n + 1) & 0xf;
1361
                ++out;
1362
                ++in;
1363
                --len;
1364
            }
1365
            ctx->mreslen = 0;
1366
        }
1367
    }
1368
1369
    rem = len & 0xf;
1370
1371
    len &= ~(size_t)0xf;
1372
    if (len) {
1373
        s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1374
            ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1375
        in += len;
1376
        out += len;
1377
        ctx->fc |= S390X_KMA_HS;
1378
        ctx->areslen = 0;
1379
    }
1380
1381
    /*-
1382
     * If there is a remainder, it has to be saved such that it can be
1383
     * processed by kma later. However, we also have to do the for-now
1384
     * unauthenticated encryption/decryption part here and now...
1385
     */
1386
    if (rem) {
1387
        if (!ctx->mreslen) {
1388
            buf.w[0] = ctx->kma.param.j0.w[0];
1389
            buf.w[1] = ctx->kma.param.j0.w[1];
1390
            buf.w[2] = ctx->kma.param.j0.w[2];
1391
            buf.w[3] = ctx->kma.param.cv.w + 1;
1392
            s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1393
        }
1394
1395
        n = ctx->mreslen;
1396
        for (i = 0; i < rem; i++) {
1397
            ctx->mres[n + i] = in[i];
1398
            out[i] = in[i] ^ ctx->kres[n + i];
1399
        }
1400
1401
        ctx->mreslen += rem;
1402
    }
1403
    return 0;
1404
}
1405
1406
/*-
1407
 * Initialize context structure. Code is big-endian.
1408
 */
1409
static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1410
    const unsigned char *iv)
1411
{
1412
    ctx->kma.param.t.g[0] = 0;
1413
    ctx->kma.param.t.g[1] = 0;
1414
    ctx->kma.param.tpcl = 0;
1415
    ctx->kma.param.taadl = 0;
1416
    ctx->mreslen = 0;
1417
    ctx->areslen = 0;
1418
    ctx->kreslen = 0;
1419
1420
    if (ctx->ivlen == 12) {
1421
        memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1422
        ctx->kma.param.j0.w[3] = 1;
1423
        ctx->kma.param.cv.w = 1;
1424
    } else {
1425
        /* ctx->iv has the right size and is already padded. */
1426
        memcpy(ctx->iv, iv, ctx->ivlen);
1427
        s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1428
            ctx->fc, &ctx->kma.param);
1429
        ctx->fc |= S390X_KMA_HS;
1430
1431
        ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1432
        ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1433
        ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1434
        ctx->kma.param.t.g[0] = 0;
1435
        ctx->kma.param.t.g[1] = 0;
1436
    }
1437
}
1438
1439
/*-
1440
 * Performs various operations on the context structure depending on control
1441
 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1442
 * Code is big-endian.
1443
 */
1444
static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1445
{
1446
    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1447
    S390X_AES_GCM_CTX *gctx_out;
1448
    EVP_CIPHER_CTX *out;
1449
    unsigned char *buf;
1450
    int ivlen, enc, len;
1451
1452
    switch (type) {
1453
    case EVP_CTRL_INIT:
1454
        ivlen = EVP_CIPHER_get_iv_length(c->cipher);
1455
        gctx->key_set = 0;
1456
        gctx->iv_set = 0;
1457
        gctx->ivlen = ivlen;
1458
        gctx->iv = c->iv;
1459
        gctx->taglen = -1;
1460
        gctx->iv_gen = 0;
1461
        gctx->tls_aad_len = -1;
1462
        return 1;
1463
1464
    case EVP_CTRL_GET_IVLEN:
1465
        *(int *)ptr = gctx->ivlen;
1466
        return 1;
1467
1468
    case EVP_CTRL_AEAD_SET_IVLEN:
1469
        if (arg <= 0)
1470
            return 0;
1471
1472
        if (arg != 12) {
1473
            len = S390X_gcm_ivpadlen(arg);
1474
1475
            /* Allocate memory for iv if needed. */
1476
            if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1477
                if (gctx->iv != c->iv)
1478
                    OPENSSL_free(gctx->iv);
1479
1480
                if ((gctx->iv = OPENSSL_malloc(len)) == NULL)
1481
                    return 0;
1482
            }
1483
            /* Add padding. */
1484
            memset(gctx->iv + arg, 0, len - arg - 8);
1485
            *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1486
        }
1487
        gctx->ivlen = arg;
1488
        return 1;
1489
1490
    case EVP_CTRL_AEAD_SET_TAG:
1491
        buf = EVP_CIPHER_CTX_buf_noconst(c);
1492
        enc = EVP_CIPHER_CTX_is_encrypting(c);
1493
        if (arg <= 0 || arg > 16 || enc)
1494
            return 0;
1495
1496
        memcpy(buf, ptr, arg);
1497
        gctx->taglen = arg;
1498
        return 1;
1499
1500
    case EVP_CTRL_AEAD_GET_TAG:
1501
        enc = EVP_CIPHER_CTX_is_encrypting(c);
1502
        if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1503
            return 0;
1504
1505
        memcpy(ptr, gctx->kma.param.t.b, arg);
1506
        return 1;
1507
1508
    case EVP_CTRL_GCM_SET_IV_FIXED:
1509
        /* Special case: -1 length restores whole iv */
1510
        if (arg == -1) {
1511
            memcpy(gctx->iv, ptr, gctx->ivlen);
1512
            gctx->iv_gen = 1;
1513
            return 1;
1514
        }
1515
        /*
1516
         * Fixed field must be at least 4 bytes and invocation field at least
1517
         * 8.
1518
         */
1519
        if ((arg < 4) || (gctx->ivlen - arg) < 8)
1520
            return 0;
1521
1522
        if (arg)
1523
            memcpy(gctx->iv, ptr, arg);
1524
1525
        enc = EVP_CIPHER_CTX_is_encrypting(c);
1526
        if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1527
            return 0;
1528
1529
        gctx->iv_gen = 1;
1530
        return 1;
1531
1532
    case EVP_CTRL_GCM_IV_GEN:
1533
        if (gctx->iv_gen == 0 || gctx->key_set == 0)
1534
            return 0;
1535
1536
        s390x_aes_gcm_setiv(gctx, gctx->iv);
1537
1538
        if (arg <= 0 || arg > gctx->ivlen)
1539
            arg = gctx->ivlen;
1540
1541
        memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1542
        /*
1543
         * Invocation field will be at least 8 bytes in size and so no need
1544
         * to check wrap around or increment more than last 8 bytes.
1545
         */
1546
        ctr64_inc(gctx->iv + gctx->ivlen - 8);
1547
        gctx->iv_set = 1;
1548
        return 1;
1549
1550
    case EVP_CTRL_GCM_SET_IV_INV:
1551
        enc = EVP_CIPHER_CTX_is_encrypting(c);
1552
        if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1553
            return 0;
1554
1555
        memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1556
        s390x_aes_gcm_setiv(gctx, gctx->iv);
1557
        gctx->iv_set = 1;
1558
        return 1;
1559
1560
    case EVP_CTRL_AEAD_TLS1_AAD:
1561
        /* Save the aad for later use. */
1562
        if (arg != EVP_AEAD_TLS1_AAD_LEN)
1563
            return 0;
1564
1565
        buf = EVP_CIPHER_CTX_buf_noconst(c);
1566
        memcpy(buf, ptr, arg);
1567
        gctx->tls_aad_len = arg;
1568
        gctx->tls_enc_records = 0;
1569
1570
        len = buf[arg - 2] << 8 | buf[arg - 1];
1571
        /* Correct length for explicit iv. */
1572
        if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1573
            return 0;
1574
        len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1575
1576
        /* If decrypting correct for tag too. */
1577
        enc = EVP_CIPHER_CTX_is_encrypting(c);
1578
        if (!enc) {
1579
            if (len < EVP_GCM_TLS_TAG_LEN)
1580
                return 0;
1581
            len -= EVP_GCM_TLS_TAG_LEN;
1582
        }
1583
        buf[arg - 2] = len >> 8;
1584
        buf[arg - 1] = len & 0xff;
1585
        /* Extra padding: tag appended to record. */
1586
        return EVP_GCM_TLS_TAG_LEN;
1587
1588
    case EVP_CTRL_COPY:
1589
        out = ptr;
1590
        gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1591
1592
        if (gctx->iv == c->iv) {
1593
            gctx_out->iv = out->iv;
1594
        } else {
1595
            len = S390X_gcm_ivpadlen(gctx->ivlen);
1596
1597
            if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL)
1598
                return 0;
1599
1600
            memcpy(gctx_out->iv, gctx->iv, len);
1601
        }
1602
        return 1;
1603
1604
    default:
1605
        return -1;
1606
    }
1607
}
1608
1609
/*-
1610
 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1611
 */
1612
static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1613
    const unsigned char *key,
1614
    const unsigned char *iv, int enc)
1615
{
1616
    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1617
    int keylen;
1618
1619
    if (iv == NULL && key == NULL)
1620
        return 1;
1621
1622
    if (key != NULL) {
1623
        keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1624
        if (keylen <= 0) {
1625
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
1626
            return 0;
1627
        }
1628
1629
        memcpy(&gctx->kma.param.k, key, keylen);
1630
1631
        gctx->fc = S390X_AES_FC(keylen);
1632
        if (!enc)
1633
            gctx->fc |= S390X_DECRYPT;
1634
1635
        if (iv == NULL && gctx->iv_set)
1636
            iv = gctx->iv;
1637
1638
        if (iv != NULL) {
1639
            s390x_aes_gcm_setiv(gctx, iv);
1640
            gctx->iv_set = 1;
1641
        }
1642
        gctx->key_set = 1;
1643
    } else {
1644
        if (gctx->key_set)
1645
            s390x_aes_gcm_setiv(gctx, iv);
1646
        else
1647
            memcpy(gctx->iv, iv, gctx->ivlen);
1648
1649
        gctx->iv_set = 1;
1650
        gctx->iv_gen = 0;
1651
    }
1652
    return 1;
1653
}
1654
1655
/*-
1656
 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1657
 * if successful. Otherwise -1 is returned. Code is big-endian.
1658
 */
1659
static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1660
    const unsigned char *in, size_t len)
1661
{
1662
    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1663
    const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1664
    const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1665
    int rv = -1;
1666
1667
    if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1668
        return -1;
1669
1670
    /*
1671
     * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
1672
     * Requirements from SP 800-38D".  The requirements is for one party to the
1673
     * communication to fail after 2^64 - 1 keys.  We do this on the encrypting
1674
     * side only.
1675
     */
1676
    if (enc && ++gctx->tls_enc_records == 0) {
1677
        ERR_raise(ERR_LIB_EVP, EVP_R_TOO_MANY_RECORDS);
1678
        goto err;
1679
    }
1680
1681
    if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
1682
            EVP_GCM_TLS_EXPLICIT_IV_LEN, out)
1683
        <= 0)
1684
        goto err;
1685
1686
    in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1687
    out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1688
    len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1689
1690
    gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1691
    gctx->kma.param.tpcl = len << 3;
1692
    s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1693
        gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1694
1695
    if (enc) {
1696
        memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1697
        rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1698
    } else {
1699
        if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1700
                EVP_GCM_TLS_TAG_LEN)) {
1701
            OPENSSL_cleanse(out, len);
1702
            goto err;
1703
        }
1704
        rv = len;
1705
    }
1706
err:
1707
    gctx->iv_set = 0;
1708
    gctx->tls_aad_len = -1;
1709
    return rv;
1710
}
1711
1712
/*-
1713
 * Called from EVP layer to initialize context, process additional
1714
 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1715
 * ciphertext or process a TLS packet, depending on context. Returns bytes
1716
 * written on success. Otherwise -1 is returned. Code is big-endian.
1717
 */
1718
static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1719
    const unsigned char *in, size_t len)
1720
{
1721
    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1722
    unsigned char *buf, tmp[16];
1723
    int enc;
1724
1725
    if (!gctx->key_set)
1726
        return -1;
1727
1728
    if (gctx->tls_aad_len >= 0)
1729
        return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1730
1731
    if (!gctx->iv_set)
1732
        return -1;
1733
1734
    if (in != NULL) {
1735
        if (out == NULL) {
1736
            if (s390x_aes_gcm_aad(gctx, in, len))
1737
                return -1;
1738
        } else {
1739
            if (s390x_aes_gcm(gctx, in, out, len))
1740
                return -1;
1741
        }
1742
        return len;
1743
    } else {
1744
        gctx->kma.param.taadl <<= 3;
1745
        gctx->kma.param.tpcl <<= 3;
1746
        s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1747
            gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1748
        /* recall that we already did en-/decrypt gctx->mres
1749
         * and returned it to caller... */
1750
        OPENSSL_cleanse(tmp, gctx->mreslen);
1751
        gctx->iv_set = 0;
1752
1753
        enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1754
        if (enc) {
1755
            gctx->taglen = 16;
1756
        } else {
1757
            if (gctx->taglen < 0)
1758
                return -1;
1759
1760
            buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1761
            if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1762
                return -1;
1763
        }
1764
        return 0;
1765
    }
1766
}
1767
1768
static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1769
{
1770
    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1771
1772
    if (gctx == NULL)
1773
        return 0;
1774
1775
    if (gctx->iv != c->iv)
1776
        OPENSSL_free(gctx->iv);
1777
1778
    OPENSSL_cleanse(gctx, sizeof(*gctx));
1779
    return 1;
1780
}
1781
1782
#define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
1783
1784
#define s390x_aes_xts_init_key aes_xts_init_key
1785
static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1786
    const unsigned char *key,
1787
    const unsigned char *iv, int enc);
1788
#define s390x_aes_xts_cipher aes_xts_cipher
1789
static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1790
    const unsigned char *in, size_t len);
1791
#define s390x_aes_xts_ctrl aes_xts_ctrl
1792
static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1793
#define s390x_aes_xts_cleanup aes_xts_cleanup
1794
1795
/*-
1796
 * Set nonce and length fields. Code is big-endian.
1797
 */
1798
static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1799
    const unsigned char *nonce,
1800
    size_t mlen)
1801
{
1802
    ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1803
    ctx->aes.ccm.nonce.g[1] = mlen;
1804
    memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1805
}
1806
1807
/*-
1808
 * Process additional authenticated data. Code is big-endian.
1809
 */
1810
static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1811
    size_t alen)
1812
{
1813
    unsigned char *ptr;
1814
    int i, rem;
1815
1816
    if (!alen)
1817
        return;
1818
1819
    ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1820
1821
    /* Suppress 'type-punned pointer dereference' warning. */
1822
    ptr = ctx->aes.ccm.buf.b;
1823
1824
    if (alen < ((1 << 16) - (1 << 8))) {
1825
        *(uint16_t *)ptr = alen;
1826
        i = 2;
1827
    } else if (sizeof(alen) == 8
1828
        && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1829
        *(uint16_t *)ptr = 0xffff;
1830
        *(uint64_t *)(ptr + 2) = alen;
1831
        i = 10;
1832
    } else {
1833
        *(uint16_t *)ptr = 0xfffe;
1834
        *(uint32_t *)(ptr + 2) = alen;
1835
        i = 6;
1836
    }
1837
1838
    while (i < 16 && alen) {
1839
        ctx->aes.ccm.buf.b[i] = *aad;
1840
        ++aad;
1841
        --alen;
1842
        ++i;
1843
    }
1844
    while (i < 16) {
1845
        ctx->aes.ccm.buf.b[i] = 0;
1846
        ++i;
1847
    }
1848
1849
    ctx->aes.ccm.kmac_param.icv.g[0] = 0;
1850
    ctx->aes.ccm.kmac_param.icv.g[1] = 0;
1851
    s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
1852
        &ctx->aes.ccm.kmac_param);
1853
    ctx->aes.ccm.blocks += 2;
1854
1855
    rem = alen & 0xf;
1856
    alen &= ~(size_t)0xf;
1857
    if (alen) {
1858
        s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1859
        ctx->aes.ccm.blocks += alen >> 4;
1860
        aad += alen;
1861
    }
1862
    if (rem) {
1863
        for (i = 0; i < rem; i++)
1864
            ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
1865
1866
        s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1867
            ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1868
            ctx->aes.ccm.kmac_param.k);
1869
        ctx->aes.ccm.blocks++;
1870
    }
1871
}
1872
1873
/*-
1874
 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
1875
 * success.
1876
 */
1877
static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
1878
    unsigned char *out, size_t len, int enc)
1879
{
1880
    size_t n, rem;
1881
    unsigned int i, l, num;
1882
    unsigned char flags;
1883
1884
    flags = ctx->aes.ccm.nonce.b[0];
1885
    if (!(flags & S390X_CCM_AAD_FLAG)) {
1886
        s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
1887
            ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
1888
        ctx->aes.ccm.blocks++;
1889
    }
1890
    l = flags & 0x7;
1891
    ctx->aes.ccm.nonce.b[0] = l;
1892
1893
    /*-
1894
     * Reconstruct length from encoded length field
1895
     * and initialize it with counter value.
1896
     */
1897
    n = 0;
1898
    for (i = 15 - l; i < 15; i++) {
1899
        n |= ctx->aes.ccm.nonce.b[i];
1900
        ctx->aes.ccm.nonce.b[i] = 0;
1901
        n <<= 8;
1902
    }
1903
    n |= ctx->aes.ccm.nonce.b[15];
1904
    ctx->aes.ccm.nonce.b[15] = 1;
1905
1906
    if (n != len)
1907
        return -1; /* length mismatch */
1908
1909
    if (enc) {
1910
        /* Two operations per block plus one for tag encryption */
1911
        ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
1912
        if (ctx->aes.ccm.blocks > (1ULL << 61))
1913
            return -2; /* too much data */
1914
    }
1915
1916
    num = 0;
1917
    rem = len & 0xf;
1918
    len &= ~(size_t)0xf;
1919
1920
    if (enc) {
1921
        /* mac-then-encrypt */
1922
        if (len)
1923
            s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1924
        if (rem) {
1925
            for (i = 0; i < rem; i++)
1926
                ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
1927
1928
            s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1929
                ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1930
                ctx->aes.ccm.kmac_param.k);
1931
        }
1932
1933
        CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1934
            ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1935
            &num, (ctr128_f)AES_ctr32_encrypt);
1936
    } else {
1937
        /* decrypt-then-mac */
1938
        CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1939
            ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1940
            &num, (ctr128_f)AES_ctr32_encrypt);
1941
1942
        if (len)
1943
            s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1944
        if (rem) {
1945
            for (i = 0; i < rem; i++)
1946
                ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
1947
1948
            s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1949
                ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1950
                ctx->aes.ccm.kmac_param.k);
1951
        }
1952
    }
1953
    /* encrypt tag */
1954
    for (i = 15 - l; i < 16; i++)
1955
        ctx->aes.ccm.nonce.b[i] = 0;
1956
1957
    s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
1958
        ctx->aes.ccm.kmac_param.k);
1959
    ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
1960
    ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
1961
1962
    ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
1963
    return 0;
1964
}
1965
1966
/*-
1967
 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1968
 * if successful. Otherwise -1 is returned.
1969
 */
1970
static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1971
    const unsigned char *in, size_t len)
1972
{
1973
    S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1974
    unsigned char *ivec = ctx->iv;
1975
    unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1976
    const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1977
1978
    if (out != in
1979
        || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
1980
        return -1;
1981
1982
    if (enc) {
1983
        /* Set explicit iv (sequence number). */
1984
        memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1985
    }
1986
1987
    len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1988
    /*-
1989
     * Get explicit iv (sequence number). We already have fixed iv
1990
     * (server/client_write_iv) here.
1991
     */
1992
    memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1993
    s390x_aes_ccm_setiv(cctx, ivec, len);
1994
1995
    /* Process aad (sequence number|type|version|length) */
1996
    s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
1997
1998
    in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1999
    out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2000
2001
    if (enc) {
2002
        if (s390x_aes_ccm(cctx, in, out, len, enc))
2003
            return -1;
2004
2005
        memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2006
        return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2007
    } else {
2008
        if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2009
            if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
2010
                    cctx->aes.ccm.m))
2011
                return len;
2012
        }
2013
2014
        OPENSSL_cleanse(out, len);
2015
        return -1;
2016
    }
2017
}
2018
2019
/*-
2020
 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
2021
 * returned.
2022
 */
2023
static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
2024
    const unsigned char *key,
2025
    const unsigned char *iv, int enc)
2026
{
2027
    S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2028
    int keylen;
2029
2030
    if (iv == NULL && key == NULL)
2031
        return 1;
2032
2033
    if (key != NULL) {
2034
        keylen = EVP_CIPHER_CTX_get_key_length(ctx);
2035
        if (keylen <= 0) {
2036
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
2037
            return 0;
2038
        }
2039
2040
        cctx->aes.ccm.fc = S390X_AES_FC(keylen);
2041
        memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
2042
2043
        /* Store encoded m and l. */
2044
        cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
2045
            | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
2046
        memset(cctx->aes.ccm.nonce.b + 1, 0,
2047
            sizeof(cctx->aes.ccm.nonce.b));
2048
        cctx->aes.ccm.blocks = 0;
2049
2050
        cctx->aes.ccm.key_set = 1;
2051
    }
2052
2053
    if (iv != NULL) {
2054
        memcpy(ctx->iv, iv, 15 - cctx->aes.ccm.l);
2055
2056
        cctx->aes.ccm.iv_set = 1;
2057
    }
2058
2059
    return 1;
2060
}
2061
2062
/*-
2063
 * Called from EVP layer to initialize context, process additional
2064
 * authenticated data, en/de-crypt plain/cipher-text and authenticate
2065
 * plaintext or process a TLS packet, depending on context. Returns bytes
2066
 * written on success. Otherwise -1 is returned.
2067
 */
2068
static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2069
    const unsigned char *in, size_t len)
2070
{
2071
    S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2072
    const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
2073
    int rv;
2074
    unsigned char *buf;
2075
2076
    if (!cctx->aes.ccm.key_set)
2077
        return -1;
2078
2079
    if (cctx->aes.ccm.tls_aad_len >= 0)
2080
        return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
2081
2082
    /*-
2083
     * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
2084
     * so integrity must be checked already at Update() i.e., before
2085
     * potentially corrupted data is output.
2086
     */
2087
    if (in == NULL && out != NULL)
2088
        return 0;
2089
2090
    if (!cctx->aes.ccm.iv_set)
2091
        return -1;
2092
2093
    if (out == NULL) {
2094
        /* Update(): Pass message length. */
2095
        if (in == NULL) {
2096
            s390x_aes_ccm_setiv(cctx, ctx->iv, len);
2097
2098
            cctx->aes.ccm.len_set = 1;
2099
            return len;
2100
        }
2101
2102
        /* Update(): Process aad. */
2103
        if (!cctx->aes.ccm.len_set && len)
2104
            return -1;
2105
2106
        s390x_aes_ccm_aad(cctx, in, len);
2107
        return len;
2108
    }
2109
2110
    /* The tag must be set before actually decrypting data */
2111
    if (!enc && !cctx->aes.ccm.tag_set)
2112
        return -1;
2113
2114
    /* Update(): Process message. */
2115
2116
    if (!cctx->aes.ccm.len_set) {
2117
        /*-
2118
         * In case message length was not previously set explicitly via
2119
         * Update(), set it now.
2120
         */
2121
        s390x_aes_ccm_setiv(cctx, ctx->iv, len);
2122
2123
        cctx->aes.ccm.len_set = 1;
2124
    }
2125
2126
    if (enc) {
2127
        if (s390x_aes_ccm(cctx, in, out, len, enc))
2128
            return -1;
2129
2130
        cctx->aes.ccm.tag_set = 1;
2131
        return len;
2132
    } else {
2133
        rv = -1;
2134
2135
        if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2136
            buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2137
            if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2138
                    cctx->aes.ccm.m))
2139
                rv = len;
2140
        }
2141
2142
        if (rv == -1)
2143
            OPENSSL_cleanse(out, len);
2144
2145
        cctx->aes.ccm.iv_set = 0;
2146
        cctx->aes.ccm.tag_set = 0;
2147
        cctx->aes.ccm.len_set = 0;
2148
        return rv;
2149
    }
2150
}
2151
2152
/*-
2153
 * Performs various operations on the context structure depending on control
2154
 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2155
 * Code is big-endian.
2156
 */
2157
static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2158
{
2159
    S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
2160
    unsigned char *buf;
2161
    int enc, len;
2162
2163
    switch (type) {
2164
    case EVP_CTRL_INIT:
2165
        cctx->aes.ccm.key_set = 0;
2166
        cctx->aes.ccm.iv_set = 0;
2167
        cctx->aes.ccm.l = 8;
2168
        cctx->aes.ccm.m = 12;
2169
        cctx->aes.ccm.tag_set = 0;
2170
        cctx->aes.ccm.len_set = 0;
2171
        cctx->aes.ccm.tls_aad_len = -1;
2172
        return 1;
2173
2174
    case EVP_CTRL_GET_IVLEN:
2175
        *(int *)ptr = 15 - cctx->aes.ccm.l;
2176
        return 1;
2177
2178
    case EVP_CTRL_AEAD_TLS1_AAD:
2179
        if (arg != EVP_AEAD_TLS1_AAD_LEN)
2180
            return 0;
2181
2182
        /* Save the aad for later use. */
2183
        buf = EVP_CIPHER_CTX_buf_noconst(c);
2184
        memcpy(buf, ptr, arg);
2185
        cctx->aes.ccm.tls_aad_len = arg;
2186
2187
        len = buf[arg - 2] << 8 | buf[arg - 1];
2188
        if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2189
            return 0;
2190
2191
        /* Correct length for explicit iv. */
2192
        len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2193
2194
        enc = EVP_CIPHER_CTX_is_encrypting(c);
2195
        if (!enc) {
2196
            if (len < cctx->aes.ccm.m)
2197
                return 0;
2198
2199
            /* Correct length for tag. */
2200
            len -= cctx->aes.ccm.m;
2201
        }
2202
2203
        buf[arg - 2] = len >> 8;
2204
        buf[arg - 1] = len & 0xff;
2205
2206
        /* Extra padding: tag appended to record. */
2207
        return cctx->aes.ccm.m;
2208
2209
    case EVP_CTRL_CCM_SET_IV_FIXED:
2210
        if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2211
            return 0;
2212
2213
        /* Copy to first part of the iv. */
2214
        memcpy(c->iv, ptr, arg);
2215
        return 1;
2216
2217
    case EVP_CTRL_AEAD_SET_IVLEN:
2218
        arg = 15 - arg;
2219
        /* fall-through */
2220
2221
    case EVP_CTRL_CCM_SET_L:
2222
        if (arg < 2 || arg > 8)
2223
            return 0;
2224
2225
        cctx->aes.ccm.l = arg;
2226
        return 1;
2227
2228
    case EVP_CTRL_AEAD_SET_TAG:
2229
        if ((arg & 1) || arg < 4 || arg > 16)
2230
            return 0;
2231
2232
        enc = EVP_CIPHER_CTX_is_encrypting(c);
2233
        if (enc && ptr)
2234
            return 0;
2235
2236
        if (ptr) {
2237
            cctx->aes.ccm.tag_set = 1;
2238
            buf = EVP_CIPHER_CTX_buf_noconst(c);
2239
            memcpy(buf, ptr, arg);
2240
        }
2241
2242
        cctx->aes.ccm.m = arg;
2243
        return 1;
2244
2245
    case EVP_CTRL_AEAD_GET_TAG:
2246
        enc = EVP_CIPHER_CTX_is_encrypting(c);
2247
        if (!enc || !cctx->aes.ccm.tag_set)
2248
            return 0;
2249
2250
        if (arg < cctx->aes.ccm.m)
2251
            return 0;
2252
2253
        memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2254
        cctx->aes.ccm.tag_set = 0;
2255
        cctx->aes.ccm.iv_set = 0;
2256
        cctx->aes.ccm.len_set = 0;
2257
        return 1;
2258
2259
    case EVP_CTRL_COPY:
2260
        return 1;
2261
2262
    default:
2263
        return -1;
2264
    }
2265
}
2266
2267
#define s390x_aes_ccm_cleanup aes_ccm_cleanup
2268
2269
#ifndef OPENSSL_NO_OCB
2270
#define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
2271
2272
#define s390x_aes_ocb_init_key aes_ocb_init_key
2273
static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2274
    const unsigned char *iv, int enc);
2275
#define s390x_aes_ocb_cipher aes_ocb_cipher
2276
static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2277
    const unsigned char *in, size_t len);
2278
#define s390x_aes_ocb_cleanup aes_ocb_cleanup
2279
static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2280
#define s390x_aes_ocb_ctrl aes_ocb_ctrl
2281
static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2282
#endif
2283
2284
#ifndef OPENSSL_NO_SIV
2285
#define S390X_AES_SIV_CTX EVP_AES_SIV_CTX
2286
2287
#define s390x_aes_siv_init_key aes_siv_init_key
2288
#define s390x_aes_siv_cipher aes_siv_cipher
2289
#define s390x_aes_siv_cleanup aes_siv_cleanup
2290
#define s390x_aes_siv_ctrl aes_siv_ctrl
2291
#endif
2292
2293
#define BLOCK_CIPHER_generic(nid, keylen, blocksize, ivlen, nmode, mode,                                      \
2294
    MODE, flags)                                                                                              \
2295
    static const EVP_CIPHER s390x_aes_##keylen##_##mode = {                                                   \
2296
        nid##_##keylen##_##nmode, blocksize,                                                                  \
2297
        keylen / 8,                                                                                           \
2298
        ivlen,                                                                                                \
2299
        flags | EVP_CIPH_##MODE##_MODE,                                                                       \
2300
        EVP_ORIG_GLOBAL,                                                                                      \
2301
        s390x_aes_##mode##_init_key,                                                                          \
2302
        s390x_aes_##mode##_cipher,                                                                            \
2303
        NULL,                                                                                                 \
2304
        sizeof(S390X_AES_##MODE##_CTX),                                                                       \
2305
        NULL,                                                                                                 \
2306
        NULL,                                                                                                 \
2307
        NULL,                                                                                                 \
2308
        NULL                                                                                                  \
2309
    };                                                                                                        \
2310
    static const EVP_CIPHER aes_##keylen##_##mode = {                                                         \
2311
        nid##_##keylen##_##nmode,                                                                             \
2312
        blocksize,                                                                                            \
2313
        keylen / 8,                                                                                           \
2314
        ivlen,                                                                                                \
2315
        flags | EVP_CIPH_##MODE##_MODE,                                                                       \
2316
        EVP_ORIG_GLOBAL,                                                                                      \
2317
        aes_init_key,                                                                                         \
2318
        aes_##mode##_cipher,                                                                                  \
2319
        NULL,                                                                                                 \
2320
        sizeof(EVP_AES_KEY),                                                                                  \
2321
        NULL,                                                                                                 \
2322
        NULL,                                                                                                 \
2323
        NULL,                                                                                                 \
2324
        NULL                                                                                                  \
2325
    };                                                                                                        \
2326
    const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)                                                         \
2327
    {                                                                                                         \
2328
        return S390X_aes_##keylen##_##mode##_CAPABLE ? &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2329
    }
2330
2331
#define BLOCK_CIPHER_custom(nid, keylen, blocksize, ivlen, mode, MODE, flags)                                              \
2332
    static const EVP_CIPHER s390x_aes_##keylen##_##mode = {                                                                \
2333
        nid##_##keylen##_##mode,                                                                                           \
2334
        blocksize,                                                                                                         \
2335
        (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE || EVP_CIPH_##MODE##_MODE == EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2336
        ivlen,                                                                                                             \
2337
        flags | EVP_CIPH_##MODE##_MODE,                                                                                    \
2338
        EVP_ORIG_GLOBAL,                                                                                                   \
2339
        s390x_aes_##mode##_init_key,                                                                                       \
2340
        s390x_aes_##mode##_cipher,                                                                                         \
2341
        s390x_aes_##mode##_cleanup,                                                                                        \
2342
        sizeof(S390X_AES_##MODE##_CTX),                                                                                    \
2343
        NULL,                                                                                                              \
2344
        NULL,                                                                                                              \
2345
        s390x_aes_##mode##_ctrl,                                                                                           \
2346
        NULL                                                                                                               \
2347
    };                                                                                                                     \
2348
    static const EVP_CIPHER aes_##keylen##_##mode = {                                                                      \
2349
        nid##_##keylen##_##mode, blocksize,                                                                                \
2350
        (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE || EVP_CIPH_##MODE##_MODE == EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2351
        ivlen,                                                                                                             \
2352
        flags | EVP_CIPH_##MODE##_MODE,                                                                                    \
2353
        EVP_ORIG_GLOBAL,                                                                                                   \
2354
        aes_##mode##_init_key,                                                                                             \
2355
        aes_##mode##_cipher,                                                                                               \
2356
        aes_##mode##_cleanup,                                                                                              \
2357
        sizeof(EVP_AES_##MODE##_CTX),                                                                                      \
2358
        NULL,                                                                                                              \
2359
        NULL,                                                                                                              \
2360
        aes_##mode##_ctrl,                                                                                                 \
2361
        NULL                                                                                                               \
2362
    };                                                                                                                     \
2363
    const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)                                                                      \
2364
    {                                                                                                                      \
2365
        return S390X_aes_##keylen##_##mode##_CAPABLE ? &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode;              \
2366
    }
2367
2368
#else
2369
2370
#define BLOCK_CIPHER_generic(nid, keylen, blocksize, ivlen, nmode, mode, MODE, flags) \
2371
    static const EVP_CIPHER aes_##keylen##_##mode = {                                 \
2372
        nid##_##keylen##_##nmode, blocksize, keylen / 8, ivlen,                       \
2373
        flags | EVP_CIPH_##MODE##_MODE,                                               \
2374
        EVP_ORIG_GLOBAL,                                                              \
2375
        aes_init_key,                                                                 \
2376
        aes_##mode##_cipher,                                                          \
2377
        NULL,                                                                         \
2378
        sizeof(EVP_AES_KEY),                                                          \
2379
        NULL, NULL, NULL, NULL                                                        \
2380
    };                                                                                \
2381
    const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)                                 \
2382
63
    {                                                                                 \
2383
63
        return &aes_##keylen##_##mode;                                                \
2384
63
    }
EVP_aes_128_cbc
Line
Count
Source
2382
3
    {                                                                                 \
2383
3
        return &aes_##keylen##_##mode;                                                \
2384
3
    }
EVP_aes_128_ecb
Line
Count
Source
2382
3
    {                                                                                 \
2383
3
        return &aes_##keylen##_##mode;                                                \
2384
3
    }
EVP_aes_128_ofb
Line
Count
Source
2382
3
    {                                                                                 \
2383
3
        return &aes_##keylen##_##mode;                                                \
2384
3
    }
EVP_aes_128_cfb128
Line
Count
Source
2382
3
    {                                                                                 \
2383
3
        return &aes_##keylen##_##mode;                                                \
2384
3
    }
EVP_aes_128_cfb1
Line
Count
Source
2382
3
    {                                                                                 \
2383
3
        return &aes_##keylen##_##mode;                                                \
2384
3
    }
EVP_aes_128_cfb8
Line
Count
Source
2382
3
    {                                                                                 \
2383
3
        return &aes_##keylen##_##mode;                                                \
2384
3
    }
EVP_aes_128_ctr
Line
Count
Source
2382
3
    {                                                                                 \
2383
3
        return &aes_##keylen##_##mode;                                                \
2384
3
    }
EVP_aes_192_cbc
Line
Count
Source
2382
3
    {                                                                                 \
2383
3
        return &aes_##keylen##_##mode;                                                \
2384
3
    }
EVP_aes_192_ecb
Line
Count
Source
2382
3
    {                                                                                 \
2383
3
        return &aes_##keylen##_##mode;                                                \
2384
3
    }
EVP_aes_192_ofb
Line
Count
Source
2382
3
    {                                                                                 \
2383
3
        return &aes_##keylen##_##mode;                                                \
2384
3
    }
EVP_aes_192_cfb128
Line
Count
Source
2382
3
    {                                                                                 \
2383
3
        return &aes_##keylen##_##mode;                                                \
2384
3
    }
EVP_aes_192_cfb1
Line
Count
Source
2382
3
    {                                                                                 \
2383
3
        return &aes_##keylen##_##mode;                                                \
2384
3
    }
EVP_aes_192_cfb8
Line
Count
Source
2382
3
    {                                                                                 \
2383
3
        return &aes_##keylen##_##mode;                                                \
2384
3
    }
EVP_aes_192_ctr
Line
Count
Source
2382
3
    {                                                                                 \
2383
3
        return &aes_##keylen##_##mode;                                                \
2384
3
    }
EVP_aes_256_cbc
Line
Count
Source
2382
3
    {                                                                                 \
2383
3
        return &aes_##keylen##_##mode;                                                \
2384
3
    }
EVP_aes_256_ecb
Line
Count
Source
2382
3
    {                                                                                 \
2383
3
        return &aes_##keylen##_##mode;                                                \
2384
3
    }
EVP_aes_256_ofb
Line
Count
Source
2382
3
    {                                                                                 \
2383
3
        return &aes_##keylen##_##mode;                                                \
2384
3
    }
EVP_aes_256_cfb128
Line
Count
Source
2382
3
    {                                                                                 \
2383
3
        return &aes_##keylen##_##mode;                                                \
2384
3
    }
EVP_aes_256_cfb1
Line
Count
Source
2382
3
    {                                                                                 \
2383
3
        return &aes_##keylen##_##mode;                                                \
2384
3
    }
EVP_aes_256_cfb8
Line
Count
Source
2382
3
    {                                                                                 \
2383
3
        return &aes_##keylen##_##mode;                                                \
2384
3
    }
EVP_aes_256_ctr
Line
Count
Source
2382
3
    {                                                                                 \
2383
3
        return &aes_##keylen##_##mode;                                                \
2384
3
    }
2385
2386
#define BLOCK_CIPHER_custom(nid, keylen, blocksize, ivlen, mode, MODE, flags)                                              \
2387
    static const EVP_CIPHER aes_##keylen##_##mode = {                                                                      \
2388
        nid##_##keylen##_##mode, blocksize,                                                                                \
2389
        (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE || EVP_CIPH_##MODE##_MODE == EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2390
        ivlen,                                                                                                             \
2391
        flags | EVP_CIPH_##MODE##_MODE,                                                                                    \
2392
        EVP_ORIG_GLOBAL,                                                                                                   \
2393
        aes_##mode##_init_key,                                                                                             \
2394
        aes_##mode##_cipher,                                                                                               \
2395
        aes_##mode##_cleanup,                                                                                              \
2396
        sizeof(EVP_AES_##MODE##_CTX),                                                                                      \
2397
        NULL, NULL, aes_##mode##_ctrl, NULL                                                                                \
2398
    };                                                                                                                     \
2399
    const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)                                                                      \
2400
33
    {                                                                                                                      \
2401
33
        return &aes_##keylen##_##mode;                                                                                     \
2402
33
    }
EVP_aes_128_gcm
Line
Count
Source
2400
3
    {                                                                                                                      \
2401
3
        return &aes_##keylen##_##mode;                                                                                     \
2402
3
    }
EVP_aes_192_gcm
Line
Count
Source
2400
3
    {                                                                                                                      \
2401
3
        return &aes_##keylen##_##mode;                                                                                     \
2402
3
    }
EVP_aes_256_gcm
Line
Count
Source
2400
3
    {                                                                                                                      \
2401
3
        return &aes_##keylen##_##mode;                                                                                     \
2402
3
    }
EVP_aes_128_xts
Line
Count
Source
2400
3
    {                                                                                                                      \
2401
3
        return &aes_##keylen##_##mode;                                                                                     \
2402
3
    }
EVP_aes_256_xts
Line
Count
Source
2400
3
    {                                                                                                                      \
2401
3
        return &aes_##keylen##_##mode;                                                                                     \
2402
3
    }
EVP_aes_128_ccm
Line
Count
Source
2400
3
    {                                                                                                                      \
2401
3
        return &aes_##keylen##_##mode;                                                                                     \
2402
3
    }
EVP_aes_192_ccm
Line
Count
Source
2400
3
    {                                                                                                                      \
2401
3
        return &aes_##keylen##_##mode;                                                                                     \
2402
3
    }
EVP_aes_256_ccm
Line
Count
Source
2400
3
    {                                                                                                                      \
2401
3
        return &aes_##keylen##_##mode;                                                                                     \
2402
3
    }
EVP_aes_128_ocb
Line
Count
Source
2400
3
    {                                                                                                                      \
2401
3
        return &aes_##keylen##_##mode;                                                                                     \
2402
3
    }
EVP_aes_192_ocb
Line
Count
Source
2400
3
    {                                                                                                                      \
2401
3
        return &aes_##keylen##_##mode;                                                                                     \
2402
3
    }
EVP_aes_256_ocb
Line
Count
Source
2400
3
    {                                                                                                                      \
2401
3
        return &aes_##keylen##_##mode;                                                                                     \
2402
3
    }
2403
2404
#endif
2405
2406
#define BLOCK_CIPHER_generic_pack(nid, keylen, flags)                                                          \
2407
    BLOCK_CIPHER_generic(nid, keylen, 16, 16, cbc, cbc, CBC, flags | EVP_CIPH_FLAG_DEFAULT_ASN1)               \
2408
        BLOCK_CIPHER_generic(nid, keylen, 16, 0, ecb, ecb, ECB, flags | EVP_CIPH_FLAG_DEFAULT_ASN1)            \
2409
            BLOCK_CIPHER_generic(nid, keylen, 1, 16, ofb128, ofb, OFB, flags | EVP_CIPH_FLAG_DEFAULT_ASN1)     \
2410
                BLOCK_CIPHER_generic(nid, keylen, 1, 16, cfb128, cfb, CFB, flags | EVP_CIPH_FLAG_DEFAULT_ASN1) \
2411
                    BLOCK_CIPHER_generic(nid, keylen, 1, 16, cfb1, cfb1, CFB, flags)                           \
2412
                        BLOCK_CIPHER_generic(nid, keylen, 1, 16, cfb8, cfb8, CFB, flags)                       \
2413
                            BLOCK_CIPHER_generic(nid, keylen, 1, 16, ctr, ctr, CTR, flags)
2414
2415
static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2416
    const unsigned char *iv, int enc)
2417
0
{
2418
0
    int ret, mode;
2419
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY, ctx);
2420
0
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
2421
2422
0
    if (keylen <= 0) {
2423
0
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
2424
0
        return 0;
2425
0
    }
2426
2427
0
    mode = EVP_CIPHER_CTX_get_mode(ctx);
2428
0
    if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
2429
0
        && !enc) {
2430
#ifdef HWAES_CAPABLE
2431
        if (HWAES_CAPABLE) {
2432
            ret = HWAES_set_decrypt_key(key, keylen, &dat->ks.ks);
2433
            dat->block = (block128_f)HWAES_decrypt;
2434
            dat->stream.cbc = NULL;
2435
#ifdef HWAES_cbc_encrypt
2436
            if (mode == EVP_CIPH_CBC_MODE)
2437
                dat->stream.cbc = (cbc128_f)HWAES_cbc_encrypt;
2438
#endif
2439
        } else
2440
#endif
2441
#ifdef BSAES_CAPABLE
2442
            if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
2443
            ret = AES_set_decrypt_key(key, keylen, &dat->ks.ks);
2444
            dat->block = (block128_f)AES_decrypt;
2445
            dat->stream.cbc = (cbc128_f)ossl_bsaes_cbc_encrypt;
2446
        } else
2447
#endif
2448
#ifdef VPAES_CAPABLE
2449
            if (VPAES_CAPABLE) {
2450
            ret = vpaes_set_decrypt_key(key, keylen, &dat->ks.ks);
2451
            dat->block = (block128_f)vpaes_decrypt;
2452
            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
2453
        } else
2454
#endif
2455
0
        {
2456
0
            ret = AES_set_decrypt_key(key, keylen, &dat->ks.ks);
2457
0
            dat->block = (block128_f)AES_decrypt;
2458
0
            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
2459
0
        }
2460
0
    } else
2461
#ifdef HWAES_CAPABLE
2462
        if (HWAES_CAPABLE) {
2463
        ret = HWAES_set_encrypt_key(key, keylen, &dat->ks.ks);
2464
        dat->block = (block128_f)HWAES_encrypt;
2465
        dat->stream.cbc = NULL;
2466
#ifdef HWAES_cbc_encrypt
2467
        if (mode == EVP_CIPH_CBC_MODE)
2468
            dat->stream.cbc = (cbc128_f)HWAES_cbc_encrypt;
2469
        else
2470
#endif
2471
#ifdef HWAES_ctr32_encrypt_blocks
2472
            if (mode == EVP_CIPH_CTR_MODE)
2473
            dat->stream.ctr = (ctr128_f)HWAES_ctr32_encrypt_blocks;
2474
        else
2475
#endif
2476
            (void)0; /* terminate potentially open 'else' */
2477
    } else
2478
#endif
2479
#ifdef BSAES_CAPABLE
2480
        if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
2481
        ret = AES_set_encrypt_key(key, keylen, &dat->ks.ks);
2482
        dat->block = (block128_f)AES_encrypt;
2483
        dat->stream.ctr = (ctr128_f)ossl_bsaes_ctr32_encrypt_blocks;
2484
    } else
2485
#endif
2486
#ifdef VPAES_CAPABLE
2487
        if (VPAES_CAPABLE) {
2488
        ret = vpaes_set_encrypt_key(key, keylen, &dat->ks.ks);
2489
        dat->block = (block128_f)vpaes_encrypt;
2490
        dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
2491
    } else
2492
#endif
2493
0
    {
2494
0
        ret = AES_set_encrypt_key(key, keylen, &dat->ks.ks);
2495
0
        dat->block = (block128_f)AES_encrypt;
2496
0
        dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
2497
#ifdef AES_CTR_ASM
2498
        if (mode == EVP_CIPH_CTR_MODE)
2499
            dat->stream.ctr = (ctr128_f)AES_ctr32_encrypt;
2500
#endif
2501
0
    }
2502
2503
0
    if (ret < 0) {
2504
0
        ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
2505
0
        return 0;
2506
0
    }
2507
2508
0
    return 1;
2509
0
}
2510
2511
static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2512
    const unsigned char *in, size_t len)
2513
0
{
2514
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY, ctx);
2515
2516
0
    if (dat->stream.cbc)
2517
0
        (*dat->stream.cbc)(in, out, len, &dat->ks, ctx->iv,
2518
0
            EVP_CIPHER_CTX_is_encrypting(ctx));
2519
0
    else if (EVP_CIPHER_CTX_is_encrypting(ctx))
2520
0
        CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv,
2521
0
            dat->block);
2522
0
    else
2523
0
        CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
2524
0
            ctx->iv, dat->block);
2525
2526
0
    return 1;
2527
0
}
2528
2529
static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2530
    const unsigned char *in, size_t len)
2531
0
{
2532
0
    size_t bl = EVP_CIPHER_CTX_get_block_size(ctx);
2533
0
    size_t i;
2534
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY, ctx);
2535
2536
0
    if (len < bl)
2537
0
        return 1;
2538
2539
0
    for (i = 0, len -= bl; i <= len; i += bl)
2540
0
        (*dat->block)(in + i, out + i, &dat->ks);
2541
2542
0
    return 1;
2543
0
}
2544
2545
static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2546
    const unsigned char *in, size_t len)
2547
0
{
2548
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY, ctx);
2549
2550
0
    int num = EVP_CIPHER_CTX_get_num(ctx);
2551
0
    CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
2552
0
        ctx->iv, &num, dat->block);
2553
0
    EVP_CIPHER_CTX_set_num(ctx, num);
2554
0
    return 1;
2555
0
}
2556
2557
static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2558
    const unsigned char *in, size_t len)
2559
0
{
2560
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY, ctx);
2561
2562
0
    int num = EVP_CIPHER_CTX_get_num(ctx);
2563
0
    CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
2564
0
        ctx->iv, &num,
2565
0
        EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2566
0
    EVP_CIPHER_CTX_set_num(ctx, num);
2567
0
    return 1;
2568
0
}
2569
2570
static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2571
    const unsigned char *in, size_t len)
2572
0
{
2573
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY, ctx);
2574
2575
0
    int num = EVP_CIPHER_CTX_get_num(ctx);
2576
0
    CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
2577
0
        ctx->iv, &num,
2578
0
        EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2579
0
    EVP_CIPHER_CTX_set_num(ctx, num);
2580
0
    return 1;
2581
0
}
2582
2583
static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2584
    const unsigned char *in, size_t len)
2585
0
{
2586
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY, ctx);
2587
2588
0
    if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2589
0
        int num = EVP_CIPHER_CTX_get_num(ctx);
2590
0
        CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
2591
0
            ctx->iv, &num,
2592
0
            EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2593
0
        EVP_CIPHER_CTX_set_num(ctx, num);
2594
0
        return 1;
2595
0
    }
2596
2597
0
    while (len >= MAXBITCHUNK) {
2598
0
        int num = EVP_CIPHER_CTX_get_num(ctx);
2599
0
        CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
2600
0
            ctx->iv, &num,
2601
0
            EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2602
0
        EVP_CIPHER_CTX_set_num(ctx, num);
2603
0
        len -= MAXBITCHUNK;
2604
0
        out += MAXBITCHUNK;
2605
0
        in += MAXBITCHUNK;
2606
0
    }
2607
0
    if (len) {
2608
0
        int num = EVP_CIPHER_CTX_get_num(ctx);
2609
0
        CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
2610
0
            ctx->iv, &num,
2611
0
            EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2612
0
        EVP_CIPHER_CTX_set_num(ctx, num);
2613
0
    }
2614
2615
0
    return 1;
2616
0
}
2617
2618
static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2619
    const unsigned char *in, size_t len)
2620
0
{
2621
0
    int n = EVP_CIPHER_CTX_get_num(ctx);
2622
0
    unsigned int num;
2623
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY, ctx);
2624
2625
0
    if (n < 0)
2626
0
        return 0;
2627
0
    num = (unsigned int)n;
2628
2629
0
    if (dat->stream.ctr)
2630
0
        CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
2631
0
            ctx->iv,
2632
0
            EVP_CIPHER_CTX_buf_noconst(ctx),
2633
0
            &num, dat->stream.ctr);
2634
0
    else
2635
0
        CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
2636
0
            ctx->iv,
2637
0
            EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2638
0
            dat->block);
2639
0
    EVP_CIPHER_CTX_set_num(ctx, num);
2640
0
    return 1;
2641
0
}
2642
2643
BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2644
    BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2645
        BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
2646
2647
            static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
2648
0
{
2649
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX, c);
2650
0
    if (gctx == NULL)
2651
0
        return 0;
2652
0
    OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
2653
0
    if (gctx->iv != c->iv)
2654
0
        OPENSSL_free(gctx->iv);
2655
0
    return 1;
2656
0
}
2657
2658
static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2659
0
{
2660
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX, c);
2661
0
    switch (type) {
2662
0
    case EVP_CTRL_INIT:
2663
0
        gctx->key_set = 0;
2664
0
        gctx->iv_set = 0;
2665
0
        gctx->ivlen = EVP_CIPHER_get_iv_length(c->cipher);
2666
0
        gctx->iv = c->iv;
2667
0
        gctx->taglen = -1;
2668
0
        gctx->iv_gen = 0;
2669
0
        gctx->tls_aad_len = -1;
2670
0
        return 1;
2671
2672
0
    case EVP_CTRL_GET_IVLEN:
2673
0
        *(int *)ptr = gctx->ivlen;
2674
0
        return 1;
2675
2676
0
    case EVP_CTRL_AEAD_SET_IVLEN:
2677
0
        if (arg <= 0)
2678
0
            return 0;
2679
        /* Allocate memory for IV if needed */
2680
0
        if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
2681
0
            if (gctx->iv != c->iv)
2682
0
                OPENSSL_free(gctx->iv);
2683
0
            if ((gctx->iv = OPENSSL_malloc(arg)) == NULL)
2684
0
                return 0;
2685
0
        }
2686
0
        gctx->ivlen = arg;
2687
0
        return 1;
2688
2689
0
    case EVP_CTRL_AEAD_SET_TAG:
2690
0
        if (arg <= 0 || arg > 16 || c->encrypt)
2691
0
            return 0;
2692
0
        memcpy(c->buf, ptr, arg);
2693
0
        gctx->taglen = arg;
2694
0
        return 1;
2695
2696
0
    case EVP_CTRL_AEAD_GET_TAG:
2697
0
        if (arg <= 0 || arg > 16 || !c->encrypt
2698
0
            || gctx->taglen < 0)
2699
0
            return 0;
2700
0
        memcpy(ptr, c->buf, arg);
2701
0
        return 1;
2702
2703
0
    case EVP_CTRL_GCM_SET_IV_FIXED:
2704
        /* Special case: -1 length restores whole IV */
2705
0
        if (arg == -1) {
2706
0
            memcpy(gctx->iv, ptr, gctx->ivlen);
2707
0
            gctx->iv_gen = 1;
2708
0
            return 1;
2709
0
        }
2710
        /*
2711
         * Fixed field must be at least 4 bytes and invocation field at least
2712
         * 8.
2713
         */
2714
0
        if ((arg < 4) || (gctx->ivlen - arg) < 8)
2715
0
            return 0;
2716
0
        if (arg)
2717
0
            memcpy(gctx->iv, ptr, arg);
2718
0
        if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
2719
0
            return 0;
2720
0
        gctx->iv_gen = 1;
2721
0
        return 1;
2722
2723
0
    case EVP_CTRL_GCM_IV_GEN:
2724
0
        if (gctx->iv_gen == 0 || gctx->key_set == 0)
2725
0
            return 0;
2726
0
        CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2727
0
        if (arg <= 0 || arg > gctx->ivlen)
2728
0
            arg = gctx->ivlen;
2729
0
        memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2730
        /*
2731
         * Invocation field will be at least 8 bytes in size and so no need
2732
         * to check wrap around or increment more than last 8 bytes.
2733
         */
2734
0
        ctr64_inc(gctx->iv + gctx->ivlen - 8);
2735
0
        gctx->iv_set = 1;
2736
0
        return 1;
2737
2738
0
    case EVP_CTRL_GCM_SET_IV_INV:
2739
0
        if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
2740
0
            return 0;
2741
0
        memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2742
0
        CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2743
0
        gctx->iv_set = 1;
2744
0
        return 1;
2745
2746
0
    case EVP_CTRL_AEAD_TLS1_AAD:
2747
        /* Save the AAD for later use */
2748
0
        if (arg != EVP_AEAD_TLS1_AAD_LEN)
2749
0
            return 0;
2750
0
        memcpy(c->buf, ptr, arg);
2751
0
        gctx->tls_aad_len = arg;
2752
0
        gctx->tls_enc_records = 0;
2753
0
        {
2754
0
            unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
2755
            /* Correct length for explicit IV */
2756
0
            if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2757
0
                return 0;
2758
0
            len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2759
            /* If decrypting correct for tag too */
2760
0
            if (!c->encrypt) {
2761
0
                if (len < EVP_GCM_TLS_TAG_LEN)
2762
0
                    return 0;
2763
0
                len -= EVP_GCM_TLS_TAG_LEN;
2764
0
            }
2765
0
            c->buf[arg - 2] = len >> 8;
2766
0
            c->buf[arg - 1] = len & 0xff;
2767
0
        }
2768
        /* Extra padding: tag appended to record */
2769
0
        return EVP_GCM_TLS_TAG_LEN;
2770
2771
0
    case EVP_CTRL_COPY: {
2772
0
        EVP_CIPHER_CTX *out = ptr;
2773
0
        EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX, out);
2774
0
        if (gctx->gcm.key) {
2775
0
            if (gctx->gcm.key != &gctx->ks)
2776
0
                return 0;
2777
0
            gctx_out->gcm.key = &gctx_out->ks;
2778
0
        }
2779
0
        if (gctx->iv == c->iv)
2780
0
            gctx_out->iv = out->iv;
2781
0
        else {
2782
0
            if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL)
2783
0
                return 0;
2784
0
            memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2785
0
        }
2786
0
        return 1;
2787
0
    }
2788
2789
0
    default:
2790
0
        return -1;
2791
0
    }
2792
0
}
2793
2794
static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2795
    const unsigned char *iv, int enc)
2796
0
{
2797
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX, ctx);
2798
2799
0
    if (iv == NULL && key == NULL)
2800
0
        return 1;
2801
2802
0
    if (key != NULL) {
2803
0
        const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
2804
2805
0
        if (keylen <= 0) {
2806
0
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
2807
0
            return 0;
2808
0
        }
2809
0
        do {
2810
#ifdef HWAES_CAPABLE
2811
            if (HWAES_CAPABLE) {
2812
                HWAES_set_encrypt_key(key, keylen, &gctx->ks.ks);
2813
                CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2814
                    (block128_f)HWAES_encrypt);
2815
#ifdef HWAES_ctr32_encrypt_blocks
2816
                gctx->ctr = (ctr128_f)HWAES_ctr32_encrypt_blocks;
2817
#else
2818
                gctx->ctr = NULL;
2819
#endif
2820
                break;
2821
            } else
2822
#endif
2823
#ifdef BSAES_CAPABLE
2824
                if (BSAES_CAPABLE) {
2825
                AES_set_encrypt_key(key, keylen, &gctx->ks.ks);
2826
                CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2827
                    (block128_f)AES_encrypt);
2828
                gctx->ctr = (ctr128_f)ossl_bsaes_ctr32_encrypt_blocks;
2829
                break;
2830
            } else
2831
#endif
2832
#ifdef VPAES_CAPABLE
2833
                if (VPAES_CAPABLE) {
2834
                vpaes_set_encrypt_key(key, keylen, &gctx->ks.ks);
2835
                CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2836
                    (block128_f)vpaes_encrypt);
2837
                gctx->ctr = NULL;
2838
                break;
2839
            } else
2840
#endif
2841
0
                (void)0; /* terminate potentially open 'else' */
2842
2843
0
            AES_set_encrypt_key(key, keylen, &gctx->ks.ks);
2844
0
            CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2845
0
                (block128_f)AES_encrypt);
2846
#ifdef AES_CTR_ASM
2847
            gctx->ctr = (ctr128_f)AES_ctr32_encrypt;
2848
#else
2849
0
            gctx->ctr = NULL;
2850
0
#endif
2851
0
        } while (0);
2852
2853
        /*
2854
         * If we have an iv can set it directly, otherwise use saved IV.
2855
         */
2856
0
        if (iv == NULL && gctx->iv_set)
2857
0
            iv = gctx->iv;
2858
0
        if (iv) {
2859
0
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2860
0
            gctx->iv_set = 1;
2861
0
        }
2862
0
        gctx->key_set = 1;
2863
0
    } else {
2864
        /* If key set use IV, otherwise copy */
2865
0
        if (gctx->key_set)
2866
0
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2867
0
        else
2868
0
            memcpy(gctx->iv, iv, gctx->ivlen);
2869
0
        gctx->iv_set = 1;
2870
0
        gctx->iv_gen = 0;
2871
0
    }
2872
0
    return 1;
2873
0
}
2874
2875
/*
2876
 * Handle TLS GCM packet format. This consists of the last portion of the IV
2877
 * followed by the payload and finally the tag. On encrypt generate IV,
2878
 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
2879
 * and verify tag.
2880
 */
2881
2882
static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2883
    const unsigned char *in, size_t len)
2884
0
{
2885
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX, ctx);
2886
0
    int rv = -1;
2887
    /* Encrypt/decrypt must be performed in place */
2888
0
    if (out != in
2889
0
        || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
2890
0
        return -1;
2891
2892
    /*
2893
     * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
2894
     * Requirements from SP 800-38D".  The requirements is for one party to the
2895
     * communication to fail after 2^64 - 1 keys.  We do this on the encrypting
2896
     * side only.
2897
     */
2898
0
    if (EVP_CIPHER_CTX_is_encrypting(ctx) && ++gctx->tls_enc_records == 0) {
2899
0
        ERR_raise(ERR_LIB_EVP, EVP_R_TOO_MANY_RECORDS);
2900
0
        goto err;
2901
0
    }
2902
2903
    /*
2904
     * Set IV from start of buffer or generate IV and write to start of
2905
     * buffer.
2906
     */
2907
0
    if (EVP_CIPHER_CTX_ctrl(ctx,
2908
0
            EVP_CIPHER_CTX_is_encrypting(ctx) ? EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
2909
0
            EVP_GCM_TLS_EXPLICIT_IV_LEN, out)
2910
0
        <= 0)
2911
0
        goto err;
2912
    /* Use saved AAD */
2913
0
    if (CRYPTO_gcm128_aad(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
2914
0
            gctx->tls_aad_len))
2915
0
        goto err;
2916
    /* Fix buffer and length to point to payload */
2917
0
    in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2918
0
    out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2919
0
    len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2920
0
    if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
2921
        /* Encrypt payload */
2922
0
        if (gctx->ctr) {
2923
0
            size_t bulk = 0;
2924
#if defined(AES_GCM_ASM)
2925
            if (len >= 32 && AES_GCM_ASM(gctx)) {
2926
                if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2927
                    return -1;
2928
2929
                bulk = AES_gcm_encrypt(in, out, len,
2930
                    gctx->gcm.key,
2931
                    gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2932
                gctx->gcm.len.u[1] += bulk;
2933
            }
2934
#endif
2935
0
            if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2936
0
                    in + bulk,
2937
0
                    out + bulk,
2938
0
                    len - bulk, gctx->ctr))
2939
0
                goto err;
2940
0
        } else {
2941
0
            size_t bulk = 0;
2942
#if defined(AES_GCM_ASM2)
2943
            if (len >= 32 && AES_GCM_ASM2(gctx)) {
2944
                if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2945
                    return -1;
2946
2947
                bulk = AES_gcm_encrypt(in, out, len,
2948
                    gctx->gcm.key,
2949
                    gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2950
                gctx->gcm.len.u[1] += bulk;
2951
            }
2952
#endif
2953
0
            if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2954
0
                    in + bulk, out + bulk, len - bulk))
2955
0
                goto err;
2956
0
        }
2957
0
        out += len;
2958
        /* Finally write tag */
2959
0
        CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
2960
0
        rv = (int)(len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN);
2961
0
    } else {
2962
        /* Decrypt */
2963
0
        if (gctx->ctr) {
2964
0
            size_t bulk = 0;
2965
#if defined(AES_GCM_ASM)
2966
            if (len >= 16 && AES_GCM_ASM(gctx)) {
2967
                if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2968
                    return -1;
2969
2970
                bulk = AES_gcm_decrypt(in, out, len,
2971
                    gctx->gcm.key,
2972
                    gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2973
                gctx->gcm.len.u[1] += bulk;
2974
            }
2975
#endif
2976
0
            if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
2977
0
                    in + bulk,
2978
0
                    out + bulk,
2979
0
                    len - bulk, gctx->ctr))
2980
0
                goto err;
2981
0
        } else {
2982
0
            size_t bulk = 0;
2983
#if defined(AES_GCM_ASM2)
2984
            if (len >= 16 && AES_GCM_ASM2(gctx)) {
2985
                if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2986
                    return -1;
2987
2988
                bulk = AES_gcm_decrypt(in, out, len,
2989
                    gctx->gcm.key,
2990
                    gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2991
                gctx->gcm.len.u[1] += bulk;
2992
            }
2993
#endif
2994
0
            if (CRYPTO_gcm128_decrypt(&gctx->gcm,
2995
0
                    in + bulk, out + bulk, len - bulk))
2996
0
                goto err;
2997
0
        }
2998
        /* Retrieve tag */
2999
0
        CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
3000
0
            EVP_GCM_TLS_TAG_LEN);
3001
        /* If tag mismatch wipe buffer */
3002
0
        if (CRYPTO_memcmp(EVP_CIPHER_CTX_buf_noconst(ctx), in + len,
3003
0
                EVP_GCM_TLS_TAG_LEN)) {
3004
0
            OPENSSL_cleanse(out, len);
3005
0
            goto err;
3006
0
        }
3007
0
        rv = (int)len;
3008
0
    }
3009
3010
0
err:
3011
0
    gctx->iv_set = 0;
3012
0
    gctx->tls_aad_len = -1;
3013
0
    return rv;
3014
0
}
3015
3016
#ifdef FIPS_MODULE
3017
/*
3018
 * See SP800-38D (GCM) Section 8 "Uniqueness requirement on IVS and keys"
3019
 *
3020
 * See also 8.2.2 RBG-based construction.
3021
 * Random construction consists of a free field (which can be NULL) and a
3022
 * random field which will use a DRBG that can return at least 96 bits of
3023
 * entropy strength. (The DRBG must be seeded by the FIPS module).
3024
 */
3025
static int aes_gcm_iv_generate(EVP_AES_GCM_CTX *gctx, int offset)
3026
{
3027
    int sz = gctx->ivlen - offset;
3028
3029
    /* Must be at least 96 bits */
3030
    if (sz <= 0 || gctx->ivlen < 12)
3031
        return 0;
3032
3033
    /* Use DRBG to generate random iv */
3034
    if (RAND_bytes(gctx->iv + offset, sz) <= 0)
3035
        return 0;
3036
    return 1;
3037
}
3038
#endif /* FIPS_MODULE */
3039
3040
static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3041
    const unsigned char *in, size_t len)
3042
0
{
3043
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX, ctx);
3044
3045
    /* If not set up, return error */
3046
0
    if (!gctx->key_set)
3047
0
        return -1;
3048
3049
0
    if (gctx->tls_aad_len >= 0)
3050
0
        return aes_gcm_tls_cipher(ctx, out, in, len);
3051
3052
#ifdef FIPS_MODULE
3053
    /*
3054
     * FIPS requires generation of AES-GCM IV's inside the FIPS module.
3055
     * The IV can still be set externally (the security policy will state that
3056
     * this is not FIPS compliant). There are some applications
3057
     * where setting the IV externally is the only option available.
3058
     */
3059
    if (!gctx->iv_set) {
3060
        if (!EVP_CIPHER_CTX_is_encrypting(ctx) || !aes_gcm_iv_generate(gctx, 0))
3061
            return -1;
3062
        CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
3063
        gctx->iv_set = 1;
3064
        gctx->iv_gen_rand = 1;
3065
    }
3066
#else
3067
0
    if (!gctx->iv_set)
3068
0
        return -1;
3069
0
#endif /* FIPS_MODULE */
3070
3071
0
    if (in) {
3072
0
        if (out == NULL) {
3073
0
            if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
3074
0
                return -1;
3075
0
        } else if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3076
0
            if (gctx->ctr) {
3077
0
                size_t bulk = 0;
3078
#if defined(AES_GCM_ASM)
3079
                if (len >= 32 && AES_GCM_ASM(gctx)) {
3080
                    size_t res = (16 - gctx->gcm.mres) % 16;
3081
3082
                    if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3083
                        return -1;
3084
3085
                    bulk = AES_gcm_encrypt(in + res,
3086
                        out + res, len - res,
3087
                        gctx->gcm.key, gctx->gcm.Yi.c,
3088
                        gctx->gcm.Xi.u);
3089
                    gctx->gcm.len.u[1] += bulk;
3090
                    bulk += res;
3091
                }
3092
#endif
3093
0
                if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3094
0
                        in + bulk,
3095
0
                        out + bulk,
3096
0
                        len - bulk, gctx->ctr))
3097
0
                    return -1;
3098
0
            } else {
3099
0
                size_t bulk = 0;
3100
#if defined(AES_GCM_ASM2)
3101
                if (len >= 32 && AES_GCM_ASM2(gctx)) {
3102
                    size_t res = (16 - gctx->gcm.mres) % 16;
3103
3104
                    if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3105
                        return -1;
3106
3107
                    bulk = AES_gcm_encrypt(in + res,
3108
                        out + res, len - res,
3109
                        gctx->gcm.key, gctx->gcm.Yi.c,
3110
                        gctx->gcm.Xi.u);
3111
                    gctx->gcm.len.u[1] += bulk;
3112
                    bulk += res;
3113
                }
3114
#endif
3115
0
                if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3116
0
                        in + bulk, out + bulk, len - bulk))
3117
0
                    return -1;
3118
0
            }
3119
0
        } else {
3120
0
            if (gctx->ctr) {
3121
0
                size_t bulk = 0;
3122
#if defined(AES_GCM_ASM)
3123
                if (len >= 16 && AES_GCM_ASM(gctx)) {
3124
                    size_t res = (16 - gctx->gcm.mres) % 16;
3125
3126
                    if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3127
                        return -1;
3128
3129
                    bulk = AES_gcm_decrypt(in + res,
3130
                        out + res, len - res,
3131
                        gctx->gcm.key,
3132
                        gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3133
                    gctx->gcm.len.u[1] += bulk;
3134
                    bulk += res;
3135
                }
3136
#endif
3137
0
                if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3138
0
                        in + bulk,
3139
0
                        out + bulk,
3140
0
                        len - bulk, gctx->ctr))
3141
0
                    return -1;
3142
0
            } else {
3143
0
                size_t bulk = 0;
3144
#if defined(AES_GCM_ASM2)
3145
                if (len >= 16 && AES_GCM_ASM2(gctx)) {
3146
                    size_t res = (16 - gctx->gcm.mres) % 16;
3147
3148
                    if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3149
                        return -1;
3150
3151
                    bulk = AES_gcm_decrypt(in + res,
3152
                        out + res, len - res,
3153
                        gctx->gcm.key,
3154
                        gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3155
                    gctx->gcm.len.u[1] += bulk;
3156
                    bulk += res;
3157
                }
3158
#endif
3159
0
                if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3160
0
                        in + bulk, out + bulk, len - bulk))
3161
0
                    return -1;
3162
0
            }
3163
0
        }
3164
0
        return (int)len;
3165
0
    } else {
3166
0
        if (!EVP_CIPHER_CTX_is_encrypting(ctx)) {
3167
0
            if (gctx->taglen < 0)
3168
0
                return -1;
3169
0
            if (CRYPTO_gcm128_finish(&gctx->gcm,
3170
0
                    EVP_CIPHER_CTX_buf_noconst(ctx),
3171
0
                    gctx->taglen)
3172
0
                != 0)
3173
0
                return -1;
3174
0
            gctx->iv_set = 0;
3175
0
            return 0;
3176
0
        }
3177
0
        CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), 16);
3178
0
        gctx->taglen = 16;
3179
        /* Don't reuse the IV */
3180
0
        gctx->iv_set = 0;
3181
0
        return 0;
3182
0
    }
3183
0
}
3184
3185
#define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1       \
3186
    | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3187
    | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT   \
3188
    | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_CUSTOM_IV_LENGTH)
3189
3190
BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3191
    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3192
    BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3193
        EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3194
        BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3195
            EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3196
3197
            static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3198
0
{
3199
0
    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, c);
3200
3201
0
    if (type == EVP_CTRL_COPY) {
3202
0
        EVP_CIPHER_CTX *out = ptr;
3203
0
        EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX, out);
3204
3205
0
        if (xctx->xts.key1) {
3206
0
            if (xctx->xts.key1 != &xctx->ks1)
3207
0
                return 0;
3208
0
            xctx_out->xts.key1 = &xctx_out->ks1;
3209
0
        }
3210
0
        if (xctx->xts.key2) {
3211
0
            if (xctx->xts.key2 != &xctx->ks2)
3212
0
                return 0;
3213
0
            xctx_out->xts.key2 = &xctx_out->ks2;
3214
0
        }
3215
0
        return 1;
3216
0
    } else if (type != EVP_CTRL_INIT)
3217
0
        return -1;
3218
    /* key1 and key2 are used as an indicator both key and IV are set */
3219
0
    xctx->xts.key1 = NULL;
3220
0
    xctx->xts.key2 = NULL;
3221
0
    return 1;
3222
0
}
3223
3224
static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3225
    const unsigned char *iv, int enc)
3226
0
{
3227
0
    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, ctx);
3228
3229
0
    if (iv == NULL && key == NULL)
3230
0
        return 1;
3231
3232
0
    if (key != NULL) {
3233
0
        do {
3234
            /* The key is two half length keys in reality */
3235
0
            const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
3236
0
            const int bytes = keylen / 2;
3237
0
            const int bits = bytes * 8;
3238
3239
0
            if (keylen <= 0) {
3240
0
                ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
3241
0
                return 0;
3242
0
            }
3243
            /*
3244
             * Verify that the two keys are different.
3245
             *
3246
             * This addresses the vulnerability described in Rogaway's
3247
             * September 2004 paper:
3248
             *
3249
             *      "Efficient Instantiations of Tweakable Blockciphers and
3250
             *       Refinements to Modes OCB and PMAC".
3251
             *      (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf)
3252
             *
3253
             * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states
3254
             * that:
3255
             *      "The check for Key_1 != Key_2 shall be done at any place
3256
             *       BEFORE using the keys in the XTS-AES algorithm to process
3257
             *       data with them."
3258
             */
3259
0
            if ((!allow_insecure_decrypt || enc)
3260
0
                && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
3261
0
                ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
3262
0
                return 0;
3263
0
            }
3264
3265
#ifdef AES_XTS_ASM
3266
            xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
3267
#else
3268
0
            xctx->stream = NULL;
3269
0
#endif
3270
            /* key_len is two AES keys */
3271
#ifdef HWAES_CAPABLE
3272
            if (HWAES_CAPABLE) {
3273
                if (enc) {
3274
                    HWAES_set_encrypt_key(key, bits, &xctx->ks1.ks);
3275
                    xctx->xts.block1 = (block128_f)HWAES_encrypt;
3276
#ifdef HWAES_xts_encrypt
3277
                    xctx->stream = HWAES_xts_encrypt;
3278
#endif
3279
                } else {
3280
                    HWAES_set_decrypt_key(key, bits, &xctx->ks1.ks);
3281
                    xctx->xts.block1 = (block128_f)HWAES_decrypt;
3282
#ifdef HWAES_xts_decrypt
3283
                    xctx->stream = HWAES_xts_decrypt;
3284
#endif
3285
                }
3286
3287
                HWAES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3288
                xctx->xts.block2 = (block128_f)HWAES_encrypt;
3289
3290
                xctx->xts.key1 = &xctx->ks1;
3291
                break;
3292
            } else
3293
#endif
3294
#ifdef BSAES_CAPABLE
3295
                if (BSAES_CAPABLE)
3296
                xctx->stream = enc ? ossl_bsaes_xts_encrypt : ossl_bsaes_xts_decrypt;
3297
            else
3298
#endif
3299
#ifdef VPAES_CAPABLE
3300
                if (VPAES_CAPABLE) {
3301
                if (enc) {
3302
                    vpaes_set_encrypt_key(key, bits, &xctx->ks1.ks);
3303
                    xctx->xts.block1 = (block128_f)vpaes_encrypt;
3304
                } else {
3305
                    vpaes_set_decrypt_key(key, bits, &xctx->ks1.ks);
3306
                    xctx->xts.block1 = (block128_f)vpaes_decrypt;
3307
                }
3308
3309
                vpaes_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3310
                xctx->xts.block2 = (block128_f)vpaes_encrypt;
3311
3312
                xctx->xts.key1 = &xctx->ks1;
3313
                break;
3314
            } else
3315
#endif
3316
0
                (void)0; /* terminate potentially open 'else' */
3317
3318
0
            if (enc) {
3319
0
                AES_set_encrypt_key(key, bits, &xctx->ks1.ks);
3320
0
                xctx->xts.block1 = (block128_f)AES_encrypt;
3321
0
            } else {
3322
0
                AES_set_decrypt_key(key, bits, &xctx->ks1.ks);
3323
0
                xctx->xts.block1 = (block128_f)AES_decrypt;
3324
0
            }
3325
3326
0
            AES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3327
0
            xctx->xts.block2 = (block128_f)AES_encrypt;
3328
3329
0
            xctx->xts.key1 = &xctx->ks1;
3330
0
        } while (0);
3331
0
    }
3332
3333
0
    if (iv) {
3334
0
        xctx->xts.key2 = &xctx->ks2;
3335
0
        memcpy(ctx->iv, iv, 16);
3336
0
    }
3337
3338
0
    return 1;
3339
0
}
3340
3341
static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3342
    const unsigned char *in, size_t len)
3343
0
{
3344
0
    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, ctx);
3345
3346
0
    if (xctx->xts.key1 == NULL
3347
0
        || xctx->xts.key2 == NULL
3348
0
        || out == NULL
3349
0
        || in == NULL
3350
0
        || len < AES_BLOCK_SIZE)
3351
0
        return 0;
3352
3353
    /*
3354
     * Impose a limit of 2^20 blocks per data unit as specified by
3355
     * IEEE Std 1619-2018.  The earlier and obsolete IEEE Std 1619-2007
3356
     * indicated that this was a SHOULD NOT rather than a MUST NOT.
3357
     * NIST SP 800-38E mandates the same limit.
3358
     */
3359
0
    if (len > XTS_MAX_BLOCKS_PER_DATA_UNIT * AES_BLOCK_SIZE) {
3360
0
        ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DATA_UNIT_IS_TOO_LARGE);
3361
0
        return 0;
3362
0
    }
3363
3364
0
    if (xctx->stream)
3365
0
        (*xctx->stream)(in, out, len,
3366
0
            xctx->xts.key1, xctx->xts.key2,
3367
0
            ctx->iv);
3368
0
    else if (CRYPTO_xts128_encrypt(&xctx->xts, ctx->iv, in, out, len,
3369
0
                 EVP_CIPHER_CTX_is_encrypting(ctx)))
3370
0
        return 0;
3371
0
    return 1;
3372
0
}
3373
3374
#define aes_xts_cleanup NULL
3375
3376
#define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
3377
    | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT               \
3378
    | EVP_CIPH_CUSTOM_COPY)
3379
3380
BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3381
    BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
3382
3383
        static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3384
0
{
3385
0
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX, c);
3386
0
    switch (type) {
3387
0
    case EVP_CTRL_INIT:
3388
0
        cctx->key_set = 0;
3389
0
        cctx->iv_set = 0;
3390
0
        cctx->L = 8;
3391
0
        cctx->M = 12;
3392
0
        cctx->tag_set = 0;
3393
0
        cctx->len_set = 0;
3394
0
        cctx->tls_aad_len = -1;
3395
0
        return 1;
3396
3397
0
    case EVP_CTRL_GET_IVLEN:
3398
0
        *(int *)ptr = 15 - cctx->L;
3399
0
        return 1;
3400
3401
0
    case EVP_CTRL_AEAD_TLS1_AAD:
3402
        /* Save the AAD for later use */
3403
0
        if (arg != EVP_AEAD_TLS1_AAD_LEN)
3404
0
            return 0;
3405
0
        memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3406
0
        cctx->tls_aad_len = arg;
3407
0
        {
3408
0
            uint16_t len = EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3409
0
                | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
3410
            /* Correct length for explicit IV */
3411
0
            if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3412
0
                return 0;
3413
0
            len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3414
            /* If decrypting correct for tag too */
3415
0
            if (!EVP_CIPHER_CTX_is_encrypting(c)) {
3416
0
                if (len < cctx->M)
3417
0
                    return 0;
3418
0
                len -= cctx->M;
3419
0
            }
3420
0
            EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3421
0
            EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
3422
0
        }
3423
        /* Extra padding: tag appended to record */
3424
0
        return cctx->M;
3425
3426
0
    case EVP_CTRL_CCM_SET_IV_FIXED:
3427
        /* Sanity check length */
3428
0
        if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3429
0
            return 0;
3430
        /* Just copy to first part of IV */
3431
0
        memcpy(c->iv, ptr, arg);
3432
0
        return 1;
3433
3434
0
    case EVP_CTRL_AEAD_SET_IVLEN:
3435
0
        arg = 15 - arg;
3436
        /* fall through */
3437
0
    case EVP_CTRL_CCM_SET_L:
3438
0
        if (arg < 2 || arg > 8)
3439
0
            return 0;
3440
0
        cctx->L = arg;
3441
0
        return 1;
3442
3443
0
    case EVP_CTRL_AEAD_SET_TAG:
3444
0
        if ((arg & 1) || arg < 4 || arg > 16)
3445
0
            return 0;
3446
0
        if (EVP_CIPHER_CTX_is_encrypting(c) && ptr)
3447
0
            return 0;
3448
0
        if (ptr) {
3449
0
            cctx->tag_set = 1;
3450
0
            memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3451
0
        }
3452
0
        cctx->M = arg;
3453
0
        return 1;
3454
3455
0
    case EVP_CTRL_AEAD_GET_TAG:
3456
0
        if (!EVP_CIPHER_CTX_is_encrypting(c) || !cctx->tag_set)
3457
0
            return 0;
3458
0
        if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3459
0
            return 0;
3460
0
        cctx->tag_set = 0;
3461
0
        cctx->iv_set = 0;
3462
0
        cctx->len_set = 0;
3463
0
        return 1;
3464
3465
0
    case EVP_CTRL_COPY: {
3466
0
        EVP_CIPHER_CTX *out = ptr;
3467
0
        EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX, out);
3468
0
        if (cctx->ccm.key) {
3469
0
            if (cctx->ccm.key != &cctx->ks)
3470
0
                return 0;
3471
0
            cctx_out->ccm.key = &cctx_out->ks;
3472
0
        }
3473
0
        return 1;
3474
0
    }
3475
3476
0
    default:
3477
0
        return -1;
3478
0
    }
3479
0
}
3480
3481
static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3482
    const unsigned char *iv, int enc)
3483
0
{
3484
0
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX, ctx);
3485
3486
0
    if (iv == NULL && key == NULL)
3487
0
        return 1;
3488
3489
0
    if (key != NULL) {
3490
0
        const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
3491
3492
0
        if (keylen <= 0) {
3493
0
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
3494
0
            return 0;
3495
0
        }
3496
0
        do {
3497
#ifdef HWAES_CAPABLE
3498
            if (HWAES_CAPABLE) {
3499
                HWAES_set_encrypt_key(key, keylen, &cctx->ks.ks);
3500
3501
                CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3502
                    &cctx->ks, (block128_f)HWAES_encrypt);
3503
                cctx->str = NULL;
3504
                cctx->key_set = 1;
3505
                break;
3506
            } else
3507
#endif
3508
#ifdef VPAES_CAPABLE
3509
                if (VPAES_CAPABLE) {
3510
                vpaes_set_encrypt_key(key, keylen, &cctx->ks.ks);
3511
                CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3512
                    &cctx->ks, (block128_f)vpaes_encrypt);
3513
                cctx->str = NULL;
3514
                cctx->key_set = 1;
3515
                break;
3516
            }
3517
#endif
3518
0
            AES_set_encrypt_key(key, keylen, &cctx->ks.ks);
3519
0
            CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3520
0
                &cctx->ks, (block128_f)AES_encrypt);
3521
0
            cctx->str = NULL;
3522
0
            cctx->key_set = 1;
3523
0
        } while (0);
3524
0
    }
3525
0
    if (iv != NULL) {
3526
0
        memcpy(ctx->iv, iv, 15 - cctx->L);
3527
0
        cctx->iv_set = 1;
3528
0
    }
3529
0
    return 1;
3530
0
}
3531
3532
static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3533
    const unsigned char *in, size_t len)
3534
0
{
3535
0
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX, ctx);
3536
0
    CCM128_CONTEXT *ccm = &cctx->ccm;
3537
    /* Encrypt/decrypt must be performed in place */
3538
0
    if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3539
0
        return -1;
3540
    /* If encrypting set explicit IV from sequence number (start of AAD) */
3541
0
    if (EVP_CIPHER_CTX_is_encrypting(ctx))
3542
0
        memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3543
0
            EVP_CCM_TLS_EXPLICIT_IV_LEN);
3544
    /* Get rest of IV from explicit IV */
3545
0
    memcpy(ctx->iv + EVP_CCM_TLS_FIXED_IV_LEN, in,
3546
0
        EVP_CCM_TLS_EXPLICIT_IV_LEN);
3547
    /* Correct length value */
3548
0
    len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3549
0
    if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L,
3550
0
            len))
3551
0
        return -1;
3552
    /* Use saved AAD */
3553
0
    CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx),
3554
0
        cctx->tls_aad_len);
3555
    /* Fix buffer to point to payload */
3556
0
    in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3557
0
    out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3558
0
    if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3559
0
        if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3560
0
                            cctx->str)
3561
0
                      : CRYPTO_ccm128_encrypt(ccm, in, out, len))
3562
0
            return -1;
3563
0
        if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3564
0
            return -1;
3565
0
        return (int)(len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M);
3566
0
    } else {
3567
0
        if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3568
0
                            cctx->str)
3569
0
                      : !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3570
0
            unsigned char tag[16];
3571
0
            if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3572
0
                if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3573
0
                    return (int)len;
3574
0
            }
3575
0
        }
3576
0
        OPENSSL_cleanse(out, len);
3577
0
        return -1;
3578
0
    }
3579
0
}
3580
3581
static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3582
    const unsigned char *in, size_t len)
3583
0
{
3584
0
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX, ctx);
3585
0
    CCM128_CONTEXT *ccm = &cctx->ccm;
3586
    /* If not set up, return error */
3587
0
    if (!cctx->key_set)
3588
0
        return -1;
3589
3590
0
    if (cctx->tls_aad_len >= 0)
3591
0
        return aes_ccm_tls_cipher(ctx, out, in, len);
3592
3593
    /* EVP_*Final() doesn't return any data */
3594
0
    if (in == NULL && out != NULL)
3595
0
        return 0;
3596
3597
0
    if (!cctx->iv_set)
3598
0
        return -1;
3599
3600
0
    if (!out) {
3601
0
        if (!in) {
3602
0
            if (CRYPTO_ccm128_setiv(ccm, ctx->iv,
3603
0
                    15 - cctx->L, len))
3604
0
                return -1;
3605
0
            cctx->len_set = 1;
3606
0
            return (int)len;
3607
0
        }
3608
        /* If have AAD need message length */
3609
0
        if (!cctx->len_set && len)
3610
0
            return -1;
3611
0
        CRYPTO_ccm128_aad(ccm, in, len);
3612
0
        return (int)len;
3613
0
    }
3614
3615
    /* The tag must be set before actually decrypting data */
3616
0
    if (!EVP_CIPHER_CTX_is_encrypting(ctx) && !cctx->tag_set)
3617
0
        return -1;
3618
3619
    /* If not set length yet do it */
3620
0
    if (!cctx->len_set) {
3621
0
        if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
3622
0
            return -1;
3623
0
        cctx->len_set = 1;
3624
0
    }
3625
0
    if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3626
0
        if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3627
0
                            cctx->str)
3628
0
                      : CRYPTO_ccm128_encrypt(ccm, in, out, len))
3629
0
            return -1;
3630
0
        cctx->tag_set = 1;
3631
0
        return (int)len;
3632
0
    } else {
3633
0
        int rv = -1;
3634
0
        if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3635
0
                            cctx->str)
3636
0
                      : !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3637
0
            unsigned char tag[16];
3638
0
            if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3639
0
                if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3640
0
                        cctx->M))
3641
0
                    rv = (int)len;
3642
0
            }
3643
0
        }
3644
0
        if (rv == -1)
3645
0
            OPENSSL_cleanse(out, len);
3646
0
        cctx->iv_set = 0;
3647
0
        cctx->tag_set = 0;
3648
0
        cctx->len_set = 0;
3649
0
        return rv;
3650
0
    }
3651
0
}
3652
3653
#define aes_ccm_cleanup NULL
3654
3655
BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3656
    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3657
    BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3658
        EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3659
        BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3660
            EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3661
3662
            typedef struct {
3663
    union {
3664
        OSSL_UNION_ALIGN;
3665
        AES_KEY ks;
3666
    } ks;
3667
    /* Indicates if IV has been set */
3668
    unsigned char *iv;
3669
} EVP_AES_WRAP_CTX;
3670
3671
static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3672
    const unsigned char *iv, int enc)
3673
0
{
3674
0
    int len;
3675
0
    EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX, ctx);
3676
3677
0
    if (iv == NULL && key == NULL)
3678
0
        return 1;
3679
0
    if (key != NULL) {
3680
0
        const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
3681
3682
0
        if (keylen <= 0) {
3683
0
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
3684
0
            return 0;
3685
0
        }
3686
0
        if (EVP_CIPHER_CTX_is_encrypting(ctx))
3687
0
            AES_set_encrypt_key(key, keylen, &wctx->ks.ks);
3688
0
        else
3689
0
            AES_set_decrypt_key(key, keylen, &wctx->ks.ks);
3690
0
        if (iv == NULL)
3691
0
            wctx->iv = NULL;
3692
0
    }
3693
0
    if (iv != NULL) {
3694
0
        if ((len = EVP_CIPHER_CTX_get_iv_length(ctx)) < 0)
3695
0
            return 0;
3696
0
        memcpy(ctx->iv, iv, len);
3697
0
        wctx->iv = ctx->iv;
3698
0
    }
3699
0
    return 1;
3700
0
}
3701
3702
static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3703
    const unsigned char *in, size_t inlen)
3704
0
{
3705
0
    EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX, ctx);
3706
0
    size_t rv;
3707
    /* AES wrap with padding has IV length of 4, without padding 8 */
3708
0
    int pad = EVP_CIPHER_CTX_get_iv_length(ctx) == 4;
3709
    /* No final operation so always return zero length */
3710
0
    if (!in)
3711
0
        return 0;
3712
    /* Input length must always be non-zero */
3713
0
    if (!inlen)
3714
0
        return -1;
3715
    /* If decrypting need at least 16 bytes and multiple of 8 */
3716
0
    if (!EVP_CIPHER_CTX_is_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
3717
0
        return -1;
3718
    /* If not padding input must be multiple of 8 */
3719
0
    if (!pad && inlen & 0x7)
3720
0
        return -1;
3721
0
    if (ossl_is_partially_overlapping(out, in, (int)inlen)) {
3722
0
        ERR_raise(ERR_LIB_EVP, EVP_R_PARTIALLY_OVERLAPPING);
3723
0
        return 0;
3724
0
    }
3725
0
    if (!out) {
3726
0
        if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3727
            /* If padding round up to multiple of 8 */
3728
0
            if (pad)
3729
0
                inlen = (inlen + 7) / 8 * 8;
3730
            /* 8 byte prefix */
3731
0
            return (int)(inlen + 8);
3732
0
        } else {
3733
            /*
3734
             * If not padding output will be exactly 8 bytes smaller than
3735
             * input. If padding it will be at least 8 bytes smaller but we
3736
             * don't know how much.
3737
             */
3738
0
            return (int)(inlen - 8);
3739
0
        }
3740
0
    }
3741
0
    if (pad) {
3742
0
        if (EVP_CIPHER_CTX_is_encrypting(ctx))
3743
0
            rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3744
0
                out, in, inlen,
3745
0
                (block128_f)AES_encrypt);
3746
0
        else
3747
0
            rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3748
0
                out, in, inlen,
3749
0
                (block128_f)AES_decrypt);
3750
0
    } else {
3751
0
        if (EVP_CIPHER_CTX_is_encrypting(ctx))
3752
0
            rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3753
0
                out, in, inlen, (block128_f)AES_encrypt);
3754
0
        else
3755
0
            rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3756
0
                out, in, inlen, (block128_f)AES_decrypt);
3757
0
    }
3758
0
    return rv ? (int)rv : -1;
3759
0
}
3760
3761
#define WRAP_FLAGS (EVP_CIPH_WRAP_MODE                 \
3762
    | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3763
    | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
3764
3765
static const EVP_CIPHER aes_128_wrap = {
3766
    NID_id_aes128_wrap,
3767
    8, 16, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3768
    aes_wrap_init_key, aes_wrap_cipher,
3769
    NULL,
3770
    sizeof(EVP_AES_WRAP_CTX),
3771
    NULL, NULL, NULL, NULL
3772
};
3773
3774
const EVP_CIPHER *EVP_aes_128_wrap(void)
3775
3
{
3776
3
    return &aes_128_wrap;
3777
3
}
3778
3779
static const EVP_CIPHER aes_192_wrap = {
3780
    NID_id_aes192_wrap,
3781
    8, 24, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3782
    aes_wrap_init_key, aes_wrap_cipher,
3783
    NULL,
3784
    sizeof(EVP_AES_WRAP_CTX),
3785
    NULL, NULL, NULL, NULL
3786
};
3787
3788
const EVP_CIPHER *EVP_aes_192_wrap(void)
3789
3
{
3790
3
    return &aes_192_wrap;
3791
3
}
3792
3793
static const EVP_CIPHER aes_256_wrap = {
3794
    NID_id_aes256_wrap,
3795
    8, 32, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3796
    aes_wrap_init_key, aes_wrap_cipher,
3797
    NULL,
3798
    sizeof(EVP_AES_WRAP_CTX),
3799
    NULL, NULL, NULL, NULL
3800
};
3801
3802
const EVP_CIPHER *EVP_aes_256_wrap(void)
3803
3
{
3804
3
    return &aes_256_wrap;
3805
3
}
3806
3807
static const EVP_CIPHER aes_128_wrap_pad = {
3808
    NID_id_aes128_wrap_pad,
3809
    8, 16, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3810
    aes_wrap_init_key, aes_wrap_cipher,
3811
    NULL,
3812
    sizeof(EVP_AES_WRAP_CTX),
3813
    NULL, NULL, NULL, NULL
3814
};
3815
3816
const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
3817
3
{
3818
3
    return &aes_128_wrap_pad;
3819
3
}
3820
3821
static const EVP_CIPHER aes_192_wrap_pad = {
3822
    NID_id_aes192_wrap_pad,
3823
    8, 24, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3824
    aes_wrap_init_key, aes_wrap_cipher,
3825
    NULL,
3826
    sizeof(EVP_AES_WRAP_CTX),
3827
    NULL, NULL, NULL, NULL
3828
};
3829
3830
const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
3831
3
{
3832
3
    return &aes_192_wrap_pad;
3833
3
}
3834
3835
static const EVP_CIPHER aes_256_wrap_pad = {
3836
    NID_id_aes256_wrap_pad,
3837
    8, 32, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3838
    aes_wrap_init_key, aes_wrap_cipher,
3839
    NULL,
3840
    sizeof(EVP_AES_WRAP_CTX),
3841
    NULL, NULL, NULL, NULL
3842
};
3843
3844
const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
3845
3
{
3846
3
    return &aes_256_wrap_pad;
3847
3
}
3848
3849
#ifndef OPENSSL_NO_OCB
3850
static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3851
0
{
3852
0
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX, c);
3853
0
    EVP_CIPHER_CTX *newc;
3854
0
    EVP_AES_OCB_CTX *new_octx;
3855
3856
0
    switch (type) {
3857
0
    case EVP_CTRL_INIT:
3858
0
        octx->key_set = 0;
3859
0
        octx->iv_set = 0;
3860
0
        octx->ivlen = EVP_CIPHER_get_iv_length(c->cipher);
3861
0
        octx->iv = c->iv;
3862
0
        octx->taglen = 16;
3863
0
        octx->data_buf_len = 0;
3864
0
        octx->aad_buf_len = 0;
3865
0
        return 1;
3866
3867
0
    case EVP_CTRL_GET_IVLEN:
3868
0
        *(int *)ptr = octx->ivlen;
3869
0
        return 1;
3870
3871
0
    case EVP_CTRL_AEAD_SET_IVLEN:
3872
        /* IV len must be 1 to 15 */
3873
0
        if (arg <= 0 || arg > 15)
3874
0
            return 0;
3875
3876
0
        octx->ivlen = arg;
3877
0
        return 1;
3878
3879
0
    case EVP_CTRL_AEAD_SET_TAG:
3880
0
        if (ptr == NULL) {
3881
            /* Tag len must be 0 to 16 */
3882
0
            if (arg < 0 || arg > 16)
3883
0
                return 0;
3884
3885
0
            octx->taglen = arg;
3886
0
            return 1;
3887
0
        }
3888
0
        if (arg != octx->taglen || EVP_CIPHER_CTX_is_encrypting(c))
3889
0
            return 0;
3890
0
        memcpy(octx->tag, ptr, arg);
3891
0
        return 1;
3892
3893
0
    case EVP_CTRL_AEAD_GET_TAG:
3894
0
        if (arg != octx->taglen || !EVP_CIPHER_CTX_is_encrypting(c))
3895
0
            return 0;
3896
3897
0
        memcpy(ptr, octx->tag, arg);
3898
0
        return 1;
3899
3900
0
    case EVP_CTRL_COPY:
3901
0
        newc = (EVP_CIPHER_CTX *)ptr;
3902
0
        new_octx = EVP_C_DATA(EVP_AES_OCB_CTX, newc);
3903
0
        return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
3904
0
            &new_octx->ksenc.ks,
3905
0
            &new_octx->ksdec.ks);
3906
3907
0
    default:
3908
0
        return -1;
3909
0
    }
3910
0
}
3911
3912
static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3913
    const unsigned char *iv, int enc)
3914
0
{
3915
0
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX, ctx);
3916
3917
0
    if (iv == NULL && key == NULL)
3918
0
        return 1;
3919
3920
0
    if (key != NULL) {
3921
0
        const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
3922
3923
0
        if (keylen <= 0) {
3924
0
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
3925
0
            return 0;
3926
0
        }
3927
0
        do {
3928
            /*
3929
             * We set both the encrypt and decrypt key here because decrypt
3930
             * needs both. We could possibly optimise to remove setting the
3931
             * decrypt for an encryption operation.
3932
             */
3933
#ifdef HWAES_CAPABLE
3934
            if (HWAES_CAPABLE) {
3935
                HWAES_set_encrypt_key(key, keylen, &octx->ksenc.ks);
3936
                HWAES_set_decrypt_key(key, keylen, &octx->ksdec.ks);
3937
                if (!CRYPTO_ocb128_init(&octx->ocb,
3938
                        &octx->ksenc.ks, &octx->ksdec.ks,
3939
                        (block128_f)HWAES_encrypt,
3940
                        (block128_f)HWAES_decrypt,
3941
                        enc ? HWAES_ocb_encrypt
3942
                            : HWAES_ocb_decrypt))
3943
                    return 0;
3944
                break;
3945
            }
3946
#endif
3947
#ifdef VPAES_CAPABLE
3948
            if (VPAES_CAPABLE) {
3949
                vpaes_set_encrypt_key(key, keylen, &octx->ksenc.ks);
3950
                vpaes_set_decrypt_key(key, keylen, &octx->ksdec.ks);
3951
                if (!CRYPTO_ocb128_init(&octx->ocb,
3952
                        &octx->ksenc.ks, &octx->ksdec.ks,
3953
                        (block128_f)vpaes_encrypt,
3954
                        (block128_f)vpaes_decrypt,
3955
                        NULL))
3956
                    return 0;
3957
                break;
3958
            }
3959
#endif
3960
0
            AES_set_encrypt_key(key, keylen, &octx->ksenc.ks);
3961
0
            AES_set_decrypt_key(key, keylen, &octx->ksdec.ks);
3962
0
            if (!CRYPTO_ocb128_init(&octx->ocb,
3963
0
                    &octx->ksenc.ks, &octx->ksdec.ks,
3964
0
                    (block128_f)AES_encrypt,
3965
0
                    (block128_f)AES_decrypt,
3966
0
                    NULL))
3967
0
                return 0;
3968
0
        } while (0);
3969
3970
        /*
3971
         * If we have an iv we can set it directly, otherwise use saved IV.
3972
         */
3973
0
        if (iv == NULL && octx->iv_set)
3974
0
            iv = octx->iv;
3975
0
        if (iv) {
3976
0
            if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
3977
0
                != 1)
3978
0
                return 0;
3979
0
            octx->iv_set = 1;
3980
0
        }
3981
0
        octx->key_set = 1;
3982
0
    } else {
3983
        /* If key set use IV, otherwise copy */
3984
0
        if (octx->key_set)
3985
0
            CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
3986
0
        else
3987
0
            memcpy(octx->iv, iv, octx->ivlen);
3988
0
        octx->iv_set = 1;
3989
0
    }
3990
0
    return 1;
3991
0
}
3992
3993
static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3994
    const unsigned char *in, size_t len)
3995
0
{
3996
0
    unsigned char *buf;
3997
0
    int *buf_len;
3998
0
    int written_len = 0;
3999
0
    size_t trailing_len;
4000
0
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX, ctx);
4001
4002
    /* If IV or Key not set then return error */
4003
0
    if (!octx->iv_set)
4004
0
        return -1;
4005
4006
0
    if (!octx->key_set)
4007
0
        return -1;
4008
4009
0
    if (in != NULL) {
4010
        /*
4011
         * Need to ensure we are only passing full blocks to low-level OCB
4012
         * routines. We do it here rather than in EVP_EncryptUpdate/
4013
         * EVP_DecryptUpdate because we need to pass full blocks of AAD too
4014
         * and those routines don't support that
4015
         */
4016
4017
        /* Are we dealing with AAD or normal data here? */
4018
0
        if (out == NULL) {
4019
0
            buf = octx->aad_buf;
4020
0
            buf_len = &(octx->aad_buf_len);
4021
0
        } else {
4022
0
            buf = octx->data_buf;
4023
0
            buf_len = &(octx->data_buf_len);
4024
4025
0
            if (ossl_is_partially_overlapping(out + *buf_len, in, (int)len)) {
4026
0
                ERR_raise(ERR_LIB_EVP, EVP_R_PARTIALLY_OVERLAPPING);
4027
0
                return 0;
4028
0
            }
4029
0
        }
4030
4031
        /*
4032
         * If we've got a partially filled buffer from a previous call then
4033
         * use that data first
4034
         */
4035
0
        if (*buf_len > 0) {
4036
0
            unsigned int remaining;
4037
4038
0
            remaining = AES_BLOCK_SIZE - (*buf_len);
4039
0
            if (remaining > len) {
4040
0
                memcpy(buf + (*buf_len), in, len);
4041
0
                *(buf_len) += (int)len;
4042
0
                return 0;
4043
0
            }
4044
0
            memcpy(buf + (*buf_len), in, remaining);
4045
4046
            /*
4047
             * If we get here we've filled the buffer, so process it
4048
             */
4049
0
            len -= remaining;
4050
0
            in += remaining;
4051
0
            if (out == NULL) {
4052
0
                if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
4053
0
                    return -1;
4054
0
            } else if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
4055
0
                if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
4056
0
                        AES_BLOCK_SIZE))
4057
0
                    return -1;
4058
0
            } else {
4059
0
                if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
4060
0
                        AES_BLOCK_SIZE))
4061
0
                    return -1;
4062
0
            }
4063
0
            written_len = AES_BLOCK_SIZE;
4064
0
            *buf_len = 0;
4065
0
            if (out != NULL)
4066
0
                out += AES_BLOCK_SIZE;
4067
0
        }
4068
4069
        /* Do we have a partial block to handle at the end? */
4070
0
        trailing_len = len % AES_BLOCK_SIZE;
4071
4072
        /*
4073
         * If we've got some full blocks to handle, then process these first
4074
         */
4075
0
        if (len != trailing_len) {
4076
0
            if (out == NULL) {
4077
0
                if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
4078
0
                    return -1;
4079
0
            } else if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
4080
0
                if (!CRYPTO_ocb128_encrypt(&octx->ocb, in, out, len - trailing_len))
4081
0
                    return -1;
4082
0
            } else {
4083
0
                if (!CRYPTO_ocb128_decrypt(&octx->ocb, in, out, len - trailing_len))
4084
0
                    return -1;
4085
0
            }
4086
0
            written_len += (int)(len - trailing_len);
4087
0
            in += len - trailing_len;
4088
0
        }
4089
4090
        /* Handle any trailing partial block */
4091
0
        if (trailing_len > 0) {
4092
0
            memcpy(buf, in, trailing_len);
4093
0
            *buf_len = (int)trailing_len;
4094
0
        }
4095
4096
0
        return written_len;
4097
0
    } else {
4098
        /*
4099
         * First of all empty the buffer of any partial block that we might
4100
         * have been provided - both for data and AAD
4101
         */
4102
0
        if (octx->data_buf_len > 0) {
4103
0
            if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
4104
0
                if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
4105
0
                        octx->data_buf_len))
4106
0
                    return -1;
4107
0
            } else {
4108
0
                if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
4109
0
                        octx->data_buf_len))
4110
0
                    return -1;
4111
0
            }
4112
0
            written_len = octx->data_buf_len;
4113
0
            octx->data_buf_len = 0;
4114
0
        }
4115
0
        if (octx->aad_buf_len > 0) {
4116
0
            if (!CRYPTO_ocb128_aad(&octx->ocb, octx->aad_buf, octx->aad_buf_len))
4117
0
                return -1;
4118
0
            octx->aad_buf_len = 0;
4119
0
        }
4120
        /* If decrypting then verify */
4121
0
        if (!EVP_CIPHER_CTX_is_encrypting(ctx)) {
4122
0
            if (octx->taglen < 0)
4123
0
                return -1;
4124
0
            if (CRYPTO_ocb128_finish(&octx->ocb,
4125
0
                    octx->tag, octx->taglen)
4126
0
                != 0)
4127
0
                return -1;
4128
0
            octx->iv_set = 0;
4129
0
            return written_len;
4130
0
        }
4131
        /* If encrypting then just get the tag */
4132
0
        if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
4133
0
            return -1;
4134
        /* Don't reuse the IV */
4135
0
        octx->iv_set = 0;
4136
0
        return written_len;
4137
0
    }
4138
0
}
4139
4140
static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
4141
0
{
4142
0
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX, c);
4143
0
    CRYPTO_ocb128_cleanup(&octx->ocb);
4144
0
    return 1;
4145
0
}
4146
4147
BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
4148
    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4149
    BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4150
        EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4151
        BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4152
            EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4153
#endif /* OPENSSL_NO_OCB */