Coverage Report

Created: 2025-12-31 06:58

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/openssl30/crypto/evp/e_aes.c
Line
Count
Source
1
/*
2
 * Copyright 2001-2024 The OpenSSL Project Authors. All Rights Reserved.
3
 *
4
 * Licensed under the Apache License 2.0 (the "License").  You may not use
5
 * this file except in compliance with the License.  You can obtain a copy
6
 * in the file LICENSE in the source distribution or at
7
 * https://www.openssl.org/source/license.html
8
 */
9
10
/*
11
 * This file uses the low level AES functions (which are deprecated for
12
 * non-internal use) in order to implement the EVP AES ciphers.
13
 */
14
#include "internal/deprecated.h"
15
16
#include <string.h>
17
#include <assert.h>
18
#include <openssl/opensslconf.h>
19
#include <openssl/crypto.h>
20
#include <openssl/evp.h>
21
#include <openssl/err.h>
22
#include <openssl/aes.h>
23
#include <openssl/rand.h>
24
#include <openssl/cmac.h>
25
#include "crypto/evp.h"
26
#include "internal/cryptlib.h"
27
#include "crypto/modes.h"
28
#include "crypto/siv.h"
29
#include "crypto/aes_platform.h"
30
#include "evp_local.h"
31
32
typedef struct {
33
    union {
34
        OSSL_UNION_ALIGN;
35
        AES_KEY ks;
36
    } ks;
37
    block128_f block;
38
    union {
39
        cbc128_f cbc;
40
        ctr128_f ctr;
41
    } stream;
42
} EVP_AES_KEY;
43
44
typedef struct {
45
    union {
46
        OSSL_UNION_ALIGN;
47
        AES_KEY ks;
48
    } ks; /* AES key schedule to use */
49
    int key_set; /* Set if key initialised */
50
    int iv_set; /* Set if an iv is set */
51
    GCM128_CONTEXT gcm;
52
    unsigned char *iv; /* Temporary IV store */
53
    int ivlen; /* IV length */
54
    int taglen;
55
    int iv_gen; /* It is OK to generate IVs */
56
    int iv_gen_rand; /* No IV was specified, so generate a rand IV */
57
    int tls_aad_len; /* TLS AAD length */
58
    uint64_t tls_enc_records; /* Number of TLS records encrypted */
59
    ctr128_f ctr;
60
} EVP_AES_GCM_CTX;
61
62
typedef struct {
63
    union {
64
        OSSL_UNION_ALIGN;
65
        AES_KEY ks;
66
    } ks1, ks2; /* AES key schedules to use */
67
    XTS128_CONTEXT xts;
68
    void (*stream)(const unsigned char *in,
69
        unsigned char *out, size_t length,
70
        const AES_KEY *key1, const AES_KEY *key2,
71
        const unsigned char iv[16]);
72
} EVP_AES_XTS_CTX;
73
74
#ifdef FIPS_MODULE
75
static const int allow_insecure_decrypt = 0;
76
#else
77
static const int allow_insecure_decrypt = 1;
78
#endif
79
80
typedef struct {
81
    union {
82
        OSSL_UNION_ALIGN;
83
        AES_KEY ks;
84
    } ks; /* AES key schedule to use */
85
    int key_set; /* Set if key initialised */
86
    int iv_set; /* Set if an iv is set */
87
    int tag_set; /* Set if tag is valid */
88
    int len_set; /* Set if message length set */
89
    int L, M; /* L and M parameters from RFC3610 */
90
    int tls_aad_len; /* TLS AAD length */
91
    CCM128_CONTEXT ccm;
92
    ccm128_f str;
93
} EVP_AES_CCM_CTX;
94
95
#ifndef OPENSSL_NO_OCB
96
typedef struct {
97
    union {
98
        OSSL_UNION_ALIGN;
99
        AES_KEY ks;
100
    } ksenc; /* AES key schedule to use for encryption */
101
    union {
102
        OSSL_UNION_ALIGN;
103
        AES_KEY ks;
104
    } ksdec; /* AES key schedule to use for decryption */
105
    int key_set; /* Set if key initialised */
106
    int iv_set; /* Set if an iv is set */
107
    OCB128_CONTEXT ocb;
108
    unsigned char *iv; /* Temporary IV store */
109
    unsigned char tag[16];
110
    unsigned char data_buf[16]; /* Store partial data blocks */
111
    unsigned char aad_buf[16]; /* Store partial AAD blocks */
112
    int data_buf_len;
113
    int aad_buf_len;
114
    int ivlen; /* IV length */
115
    int taglen;
116
} EVP_AES_OCB_CTX;
117
#endif
118
119
0
#define MAXBITCHUNK ((size_t)1 << (sizeof(size_t) * 8 - 4))
120
121
/* increment counter (64-bit int) by 1 */
122
static void ctr64_inc(unsigned char *counter)
123
0
{
124
0
    int n = 8;
125
0
    unsigned char c;
126
127
0
    do {
128
0
        --n;
129
0
        c = counter[n];
130
0
        ++c;
131
0
        counter[n] = c;
132
0
        if (c)
133
0
            return;
134
0
    } while (n);
135
0
}
136
137
#if defined(AESNI_CAPABLE)
138
#if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
139
#define AES_GCM_ASM2(gctx) (gctx->gcm.block == (block128_f)aesni_encrypt && gctx->gcm.ghash == gcm_ghash_avx)
140
#undef AES_GCM_ASM2 /* minor size optimization */
141
#endif
142
143
static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
144
    const unsigned char *iv, int enc)
145
0
{
146
0
    int ret, mode;
147
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY, ctx);
148
149
0
    mode = EVP_CIPHER_CTX_get_mode(ctx);
150
0
    if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
151
0
        && !enc) {
152
0
        ret = aesni_set_decrypt_key(key,
153
0
            EVP_CIPHER_CTX_get_key_length(ctx) * 8,
154
0
            &dat->ks.ks);
155
0
        dat->block = (block128_f)aesni_decrypt;
156
0
        dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? (cbc128_f)aesni_cbc_encrypt : NULL;
157
0
    } else {
158
0
        ret = aesni_set_encrypt_key(key,
159
0
            EVP_CIPHER_CTX_get_key_length(ctx) * 8,
160
0
            &dat->ks.ks);
161
0
        dat->block = (block128_f)aesni_encrypt;
162
0
        if (mode == EVP_CIPH_CBC_MODE)
163
0
            dat->stream.cbc = (cbc128_f)aesni_cbc_encrypt;
164
0
        else if (mode == EVP_CIPH_CTR_MODE)
165
0
            dat->stream.ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
166
0
        else
167
0
            dat->stream.cbc = NULL;
168
0
    }
169
170
0
    if (ret < 0) {
171
0
        ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
172
0
        return 0;
173
0
    }
174
175
0
    return 1;
176
0
}
177
178
static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
179
    const unsigned char *in, size_t len)
180
0
{
181
0
    aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY, ctx)->ks.ks,
182
0
        ctx->iv, EVP_CIPHER_CTX_is_encrypting(ctx));
183
184
0
    return 1;
185
0
}
186
187
static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
188
    const unsigned char *in, size_t len)
189
0
{
190
0
    size_t bl = EVP_CIPHER_CTX_get_block_size(ctx);
191
192
0
    if (len < bl)
193
0
        return 1;
194
195
0
    aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY, ctx)->ks.ks,
196
0
        EVP_CIPHER_CTX_is_encrypting(ctx));
197
198
0
    return 1;
199
0
}
200
201
#define aesni_ofb_cipher aes_ofb_cipher
202
static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
203
    const unsigned char *in, size_t len);
204
205
#define aesni_cfb_cipher aes_cfb_cipher
206
static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
207
    const unsigned char *in, size_t len);
208
209
#define aesni_cfb8_cipher aes_cfb8_cipher
210
static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
211
    const unsigned char *in, size_t len);
212
213
#define aesni_cfb1_cipher aes_cfb1_cipher
214
static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
215
    const unsigned char *in, size_t len);
216
217
#define aesni_ctr_cipher aes_ctr_cipher
218
static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
219
    const unsigned char *in, size_t len);
220
221
static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
222
    const unsigned char *iv, int enc)
223
0
{
224
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX, ctx);
225
0
    if (!iv && !key)
226
0
        return 1;
227
0
    if (key) {
228
0
        aesni_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
229
0
            &gctx->ks.ks);
230
0
        CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f)aesni_encrypt);
231
0
        gctx->ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
232
        /*
233
         * If we have an iv can set it directly, otherwise use saved IV.
234
         */
235
0
        if (iv == NULL && gctx->iv_set)
236
0
            iv = gctx->iv;
237
0
        if (iv) {
238
0
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
239
0
            gctx->iv_set = 1;
240
0
        }
241
0
        gctx->key_set = 1;
242
0
    } else {
243
        /* If key set use IV, otherwise copy */
244
0
        if (gctx->key_set)
245
0
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
246
0
        else
247
0
            memcpy(gctx->iv, iv, gctx->ivlen);
248
0
        gctx->iv_set = 1;
249
0
        gctx->iv_gen = 0;
250
0
    }
251
0
    return 1;
252
0
}
253
254
#define aesni_gcm_cipher aes_gcm_cipher
255
static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
256
    const unsigned char *in, size_t len);
257
258
static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
259
    const unsigned char *iv, int enc)
260
0
{
261
0
    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, ctx);
262
263
0
    if (!iv && !key)
264
0
        return 1;
265
266
0
    if (key) {
267
        /* The key is two half length keys in reality */
268
0
        const int bytes = EVP_CIPHER_CTX_get_key_length(ctx) / 2;
269
0
        const int bits = bytes * 8;
270
271
        /*
272
         * Verify that the two keys are different.
273
         *
274
         * This addresses Rogaway's vulnerability.
275
         * See comment in aes_xts_init_key() below.
276
         */
277
0
        if ((!allow_insecure_decrypt || enc)
278
0
            && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
279
0
            ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
280
0
            return 0;
281
0
        }
282
283
        /* key_len is two AES keys */
284
0
        if (enc) {
285
0
            aesni_set_encrypt_key(key, bits, &xctx->ks1.ks);
286
0
            xctx->xts.block1 = (block128_f)aesni_encrypt;
287
0
            xctx->stream = aesni_xts_encrypt;
288
0
        } else {
289
0
            aesni_set_decrypt_key(key, bits, &xctx->ks1.ks);
290
0
            xctx->xts.block1 = (block128_f)aesni_decrypt;
291
0
            xctx->stream = aesni_xts_decrypt;
292
0
        }
293
294
0
        aesni_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
295
0
        xctx->xts.block2 = (block128_f)aesni_encrypt;
296
297
0
        xctx->xts.key1 = &xctx->ks1;
298
0
    }
299
300
0
    if (iv) {
301
0
        xctx->xts.key2 = &xctx->ks2;
302
0
        memcpy(ctx->iv, iv, 16);
303
0
    }
304
305
0
    return 1;
306
0
}
307
308
#define aesni_xts_cipher aes_xts_cipher
309
static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
310
    const unsigned char *in, size_t len);
311
312
static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
313
    const unsigned char *iv, int enc)
314
0
{
315
0
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX, ctx);
316
0
    if (!iv && !key)
317
0
        return 1;
318
0
    if (key) {
319
0
        aesni_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
320
0
            &cctx->ks.ks);
321
0
        CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
322
0
            &cctx->ks, (block128_f)aesni_encrypt);
323
0
        cctx->str = enc ? (ccm128_f)aesni_ccm64_encrypt_blocks : (ccm128_f)aesni_ccm64_decrypt_blocks;
324
0
        cctx->key_set = 1;
325
0
    }
326
0
    if (iv) {
327
0
        memcpy(ctx->iv, iv, 15 - cctx->L);
328
0
        cctx->iv_set = 1;
329
0
    }
330
0
    return 1;
331
0
}
332
333
#define aesni_ccm_cipher aes_ccm_cipher
334
static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
335
    const unsigned char *in, size_t len);
336
337
#ifndef OPENSSL_NO_OCB
338
static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
339
    const unsigned char *iv, int enc)
340
0
{
341
0
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX, ctx);
342
0
    if (!iv && !key)
343
0
        return 1;
344
0
    if (key) {
345
0
        do {
346
            /*
347
             * We set both the encrypt and decrypt key here because decrypt
348
             * needs both. We could possibly optimise to remove setting the
349
             * decrypt for an encryption operation.
350
             */
351
0
            aesni_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
352
0
                &octx->ksenc.ks);
353
0
            aesni_set_decrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
354
0
                &octx->ksdec.ks);
355
0
            if (!CRYPTO_ocb128_init(&octx->ocb,
356
0
                    &octx->ksenc.ks, &octx->ksdec.ks,
357
0
                    (block128_f)aesni_encrypt,
358
0
                    (block128_f)aesni_decrypt,
359
0
                    enc ? aesni_ocb_encrypt
360
0
                        : aesni_ocb_decrypt))
361
0
                return 0;
362
0
        } while (0);
363
364
        /*
365
         * If we have an iv we can set it directly, otherwise use saved IV.
366
         */
367
0
        if (iv == NULL && octx->iv_set)
368
0
            iv = octx->iv;
369
0
        if (iv) {
370
0
            if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
371
0
                != 1)
372
0
                return 0;
373
0
            octx->iv_set = 1;
374
0
        }
375
0
        octx->key_set = 1;
376
0
    } else {
377
        /* If key set use IV, otherwise copy */
378
0
        if (octx->key_set)
379
0
            CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
380
0
        else
381
0
            memcpy(octx->iv, iv, octx->ivlen);
382
0
        octx->iv_set = 1;
383
0
    }
384
0
    return 1;
385
0
}
386
387
#define aesni_ocb_cipher aes_ocb_cipher
388
static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
389
    const unsigned char *in, size_t len);
390
#endif /* OPENSSL_NO_OCB */
391
392
#define BLOCK_CIPHER_generic(nid, keylen, blocksize, ivlen, nmode, mode, MODE, flags) \
393
    static const EVP_CIPHER aesni_##keylen##_##mode = {                               \
394
        nid##_##keylen##_##nmode, blocksize, keylen / 8, ivlen,                       \
395
        flags | EVP_CIPH_##MODE##_MODE,                                               \
396
        EVP_ORIG_GLOBAL,                                                              \
397
        aesni_init_key,                                                               \
398
        aesni_##mode##_cipher,                                                        \
399
        NULL,                                                                         \
400
        sizeof(EVP_AES_KEY),                                                          \
401
        NULL, NULL, NULL, NULL                                                        \
402
    };                                                                                \
403
    static const EVP_CIPHER aes_##keylen##_##mode = {                                 \
404
        nid##_##keylen##_##nmode, blocksize,                                          \
405
        keylen / 8, ivlen,                                                            \
406
        flags | EVP_CIPH_##MODE##_MODE,                                               \
407
        EVP_ORIG_GLOBAL,                                                              \
408
        aes_init_key,                                                                 \
409
        aes_##mode##_cipher,                                                          \
410
        NULL,                                                                         \
411
        sizeof(EVP_AES_KEY),                                                          \
412
        NULL, NULL, NULL, NULL                                                        \
413
    };                                                                                \
414
    const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)                                 \
415
3.44k
    {                                                                                 \
416
3.44k
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
417
3.44k
    }
EVP_aes_128_cbc
Line
Count
Source
415
164
    {                                                                                 \
416
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
417
164
    }
EVP_aes_128_ecb
Line
Count
Source
415
164
    {                                                                                 \
416
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
417
164
    }
EVP_aes_128_ofb
Line
Count
Source
415
164
    {                                                                                 \
416
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
417
164
    }
EVP_aes_128_cfb128
Line
Count
Source
415
164
    {                                                                                 \
416
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
417
164
    }
EVP_aes_128_cfb1
Line
Count
Source
415
164
    {                                                                                 \
416
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
417
164
    }
EVP_aes_128_cfb8
Line
Count
Source
415
164
    {                                                                                 \
416
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
417
164
    }
EVP_aes_128_ctr
Line
Count
Source
415
164
    {                                                                                 \
416
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
417
164
    }
EVP_aes_192_cbc
Line
Count
Source
415
164
    {                                                                                 \
416
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
417
164
    }
EVP_aes_192_ecb
Line
Count
Source
415
164
    {                                                                                 \
416
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
417
164
    }
EVP_aes_192_ofb
Line
Count
Source
415
164
    {                                                                                 \
416
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
417
164
    }
EVP_aes_192_cfb128
Line
Count
Source
415
164
    {                                                                                 \
416
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
417
164
    }
EVP_aes_192_cfb1
Line
Count
Source
415
164
    {                                                                                 \
416
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
417
164
    }
EVP_aes_192_cfb8
Line
Count
Source
415
164
    {                                                                                 \
416
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
417
164
    }
EVP_aes_192_ctr
Line
Count
Source
415
164
    {                                                                                 \
416
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
417
164
    }
EVP_aes_256_cbc
Line
Count
Source
415
164
    {                                                                                 \
416
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
417
164
    }
EVP_aes_256_ecb
Line
Count
Source
415
164
    {                                                                                 \
416
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
417
164
    }
EVP_aes_256_ofb
Line
Count
Source
415
164
    {                                                                                 \
416
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
417
164
    }
EVP_aes_256_cfb128
Line
Count
Source
415
164
    {                                                                                 \
416
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
417
164
    }
EVP_aes_256_cfb1
Line
Count
Source
415
164
    {                                                                                 \
416
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
417
164
    }
EVP_aes_256_cfb8
Line
Count
Source
415
164
    {                                                                                 \
416
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
417
164
    }
EVP_aes_256_ctr
Line
Count
Source
415
164
    {                                                                                 \
416
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;     \
417
164
    }
418
419
#define BLOCK_CIPHER_custom(nid, keylen, blocksize, ivlen, mode, MODE, flags)                                              \
420
    static const EVP_CIPHER aesni_##keylen##_##mode = {                                                                    \
421
        nid##_##keylen##_##mode, blocksize,                                                                                \
422
        (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE || EVP_CIPH_##MODE##_MODE == EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
423
        ivlen,                                                                                                             \
424
        flags | EVP_CIPH_##MODE##_MODE,                                                                                    \
425
        EVP_ORIG_GLOBAL,                                                                                                   \
426
        aesni_##mode##_init_key,                                                                                           \
427
        aesni_##mode##_cipher,                                                                                             \
428
        aes_##mode##_cleanup,                                                                                              \
429
        sizeof(EVP_AES_##MODE##_CTX),                                                                                      \
430
        NULL, NULL, aes_##mode##_ctrl, NULL                                                                                \
431
    };                                                                                                                     \
432
    static const EVP_CIPHER aes_##keylen##_##mode = {                                                                      \
433
        nid##_##keylen##_##mode, blocksize,                                                                                \
434
        (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE || EVP_CIPH_##MODE##_MODE == EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
435
        ivlen,                                                                                                             \
436
        flags | EVP_CIPH_##MODE##_MODE,                                                                                    \
437
        EVP_ORIG_GLOBAL,                                                                                                   \
438
        aes_##mode##_init_key,                                                                                             \
439
        aes_##mode##_cipher,                                                                                               \
440
        aes_##mode##_cleanup,                                                                                              \
441
        sizeof(EVP_AES_##MODE##_CTX),                                                                                      \
442
        NULL, NULL, aes_##mode##_ctrl, NULL                                                                                \
443
    };                                                                                                                     \
444
    const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)                                                                      \
445
1.80k
    {                                                                                                                      \
446
1.80k
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;                                          \
447
1.80k
    }
EVP_aes_128_gcm
Line
Count
Source
445
164
    {                                                                                                                      \
446
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;                                          \
447
164
    }
EVP_aes_192_gcm
Line
Count
Source
445
164
    {                                                                                                                      \
446
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;                                          \
447
164
    }
EVP_aes_256_gcm
Line
Count
Source
445
164
    {                                                                                                                      \
446
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;                                          \
447
164
    }
EVP_aes_128_xts
Line
Count
Source
445
164
    {                                                                                                                      \
446
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;                                          \
447
164
    }
EVP_aes_256_xts
Line
Count
Source
445
164
    {                                                                                                                      \
446
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;                                          \
447
164
    }
EVP_aes_128_ccm
Line
Count
Source
445
164
    {                                                                                                                      \
446
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;                                          \
447
164
    }
EVP_aes_192_ccm
Line
Count
Source
445
164
    {                                                                                                                      \
446
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;                                          \
447
164
    }
EVP_aes_256_ccm
Line
Count
Source
445
164
    {                                                                                                                      \
446
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;                                          \
447
164
    }
EVP_aes_128_ocb
Line
Count
Source
445
164
    {                                                                                                                      \
446
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;                                          \
447
164
    }
EVP_aes_192_ocb
Line
Count
Source
445
164
    {                                                                                                                      \
446
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;                                          \
447
164
    }
EVP_aes_256_ocb
Line
Count
Source
445
164
    {                                                                                                                      \
446
164
        return AESNI_CAPABLE ? &aesni_##keylen##_##mode : &aes_##keylen##_##mode;                                          \
447
164
    }
448
449
#elif defined(SPARC_AES_CAPABLE)
450
451
static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
452
    const unsigned char *iv, int enc)
453
{
454
    int ret, mode, bits;
455
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY, ctx);
456
457
    mode = EVP_CIPHER_CTX_get_mode(ctx);
458
    bits = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
459
    if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
460
        && !enc) {
461
        ret = 0;
462
        aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
463
        dat->block = (block128_f)aes_t4_decrypt;
464
        switch (bits) {
465
        case 128:
466
            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? (cbc128_f)aes128_t4_cbc_decrypt : NULL;
467
            break;
468
        case 192:
469
            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? (cbc128_f)aes192_t4_cbc_decrypt : NULL;
470
            break;
471
        case 256:
472
            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? (cbc128_f)aes256_t4_cbc_decrypt : NULL;
473
            break;
474
        default:
475
            ret = -1;
476
        }
477
    } else {
478
        ret = 0;
479
        aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
480
        dat->block = (block128_f)aes_t4_encrypt;
481
        switch (bits) {
482
        case 128:
483
            if (mode == EVP_CIPH_CBC_MODE)
484
                dat->stream.cbc = (cbc128_f)aes128_t4_cbc_encrypt;
485
            else if (mode == EVP_CIPH_CTR_MODE)
486
                dat->stream.ctr = (ctr128_f)aes128_t4_ctr32_encrypt;
487
            else
488
                dat->stream.cbc = NULL;
489
            break;
490
        case 192:
491
            if (mode == EVP_CIPH_CBC_MODE)
492
                dat->stream.cbc = (cbc128_f)aes192_t4_cbc_encrypt;
493
            else if (mode == EVP_CIPH_CTR_MODE)
494
                dat->stream.ctr = (ctr128_f)aes192_t4_ctr32_encrypt;
495
            else
496
                dat->stream.cbc = NULL;
497
            break;
498
        case 256:
499
            if (mode == EVP_CIPH_CBC_MODE)
500
                dat->stream.cbc = (cbc128_f)aes256_t4_cbc_encrypt;
501
            else if (mode == EVP_CIPH_CTR_MODE)
502
                dat->stream.ctr = (ctr128_f)aes256_t4_ctr32_encrypt;
503
            else
504
                dat->stream.cbc = NULL;
505
            break;
506
        default:
507
            ret = -1;
508
        }
509
    }
510
511
    if (ret < 0) {
512
        ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
513
        return 0;
514
    }
515
516
    return 1;
517
}
518
519
#define aes_t4_cbc_cipher aes_cbc_cipher
520
static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
521
    const unsigned char *in, size_t len);
522
523
#define aes_t4_ecb_cipher aes_ecb_cipher
524
static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
525
    const unsigned char *in, size_t len);
526
527
#define aes_t4_ofb_cipher aes_ofb_cipher
528
static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
529
    const unsigned char *in, size_t len);
530
531
#define aes_t4_cfb_cipher aes_cfb_cipher
532
static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
533
    const unsigned char *in, size_t len);
534
535
#define aes_t4_cfb8_cipher aes_cfb8_cipher
536
static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
537
    const unsigned char *in, size_t len);
538
539
#define aes_t4_cfb1_cipher aes_cfb1_cipher
540
static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
541
    const unsigned char *in, size_t len);
542
543
#define aes_t4_ctr_cipher aes_ctr_cipher
544
static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
545
    const unsigned char *in, size_t len);
546
547
static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
548
    const unsigned char *iv, int enc)
549
{
550
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX, ctx);
551
    if (!iv && !key)
552
        return 1;
553
    if (key) {
554
        int bits = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
555
        aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
556
        CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
557
            (block128_f)aes_t4_encrypt);
558
        switch (bits) {
559
        case 128:
560
            gctx->ctr = (ctr128_f)aes128_t4_ctr32_encrypt;
561
            break;
562
        case 192:
563
            gctx->ctr = (ctr128_f)aes192_t4_ctr32_encrypt;
564
            break;
565
        case 256:
566
            gctx->ctr = (ctr128_f)aes256_t4_ctr32_encrypt;
567
            break;
568
        default:
569
            return 0;
570
        }
571
        /*
572
         * If we have an iv can set it directly, otherwise use saved IV.
573
         */
574
        if (iv == NULL && gctx->iv_set)
575
            iv = gctx->iv;
576
        if (iv) {
577
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
578
            gctx->iv_set = 1;
579
        }
580
        gctx->key_set = 1;
581
    } else {
582
        /* If key set use IV, otherwise copy */
583
        if (gctx->key_set)
584
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
585
        else
586
            memcpy(gctx->iv, iv, gctx->ivlen);
587
        gctx->iv_set = 1;
588
        gctx->iv_gen = 0;
589
    }
590
    return 1;
591
}
592
593
#define aes_t4_gcm_cipher aes_gcm_cipher
594
static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
595
    const unsigned char *in, size_t len);
596
597
static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
598
    const unsigned char *iv, int enc)
599
{
600
    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, ctx);
601
602
    if (!iv && !key)
603
        return 1;
604
605
    if (key) {
606
        /* The key is two half length keys in reality */
607
        const int bytes = EVP_CIPHER_CTX_get_key_length(ctx) / 2;
608
        const int bits = bytes * 8;
609
610
        /*
611
         * Verify that the two keys are different.
612
         *
613
         * This addresses Rogaway's vulnerability.
614
         * See comment in aes_xts_init_key() below.
615
         */
616
        if ((!allow_insecure_decrypt || enc)
617
            && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
618
            ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
619
            return 0;
620
        }
621
622
        xctx->stream = NULL;
623
        /* key_len is two AES keys */
624
        if (enc) {
625
            aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
626
            xctx->xts.block1 = (block128_f)aes_t4_encrypt;
627
            switch (bits) {
628
            case 128:
629
                xctx->stream = aes128_t4_xts_encrypt;
630
                break;
631
            case 256:
632
                xctx->stream = aes256_t4_xts_encrypt;
633
                break;
634
            default:
635
                return 0;
636
            }
637
        } else {
638
            aes_t4_set_decrypt_key(key, bits, &xctx->ks1.ks);
639
            xctx->xts.block1 = (block128_f)aes_t4_decrypt;
640
            switch (bits) {
641
            case 128:
642
                xctx->stream = aes128_t4_xts_decrypt;
643
                break;
644
            case 256:
645
                xctx->stream = aes256_t4_xts_decrypt;
646
                break;
647
            default:
648
                return 0;
649
            }
650
        }
651
652
        aes_t4_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
653
        xctx->xts.block2 = (block128_f)aes_t4_encrypt;
654
655
        xctx->xts.key1 = &xctx->ks1;
656
    }
657
658
    if (iv) {
659
        xctx->xts.key2 = &xctx->ks2;
660
        memcpy(ctx->iv, iv, 16);
661
    }
662
663
    return 1;
664
}
665
666
#define aes_t4_xts_cipher aes_xts_cipher
667
static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
668
    const unsigned char *in, size_t len);
669
670
static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
671
    const unsigned char *iv, int enc)
672
{
673
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX, ctx);
674
    if (!iv && !key)
675
        return 1;
676
    if (key) {
677
        int bits = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
678
        aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
679
        CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
680
            &cctx->ks, (block128_f)aes_t4_encrypt);
681
        cctx->str = NULL;
682
        cctx->key_set = 1;
683
    }
684
    if (iv) {
685
        memcpy(ctx->iv, iv, 15 - cctx->L);
686
        cctx->iv_set = 1;
687
    }
688
    return 1;
689
}
690
691
#define aes_t4_ccm_cipher aes_ccm_cipher
692
static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
693
    const unsigned char *in, size_t len);
694
695
#ifndef OPENSSL_NO_OCB
696
static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
697
    const unsigned char *iv, int enc)
698
{
699
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX, ctx);
700
    if (!iv && !key)
701
        return 1;
702
    if (key) {
703
        do {
704
            /*
705
             * We set both the encrypt and decrypt key here because decrypt
706
             * needs both. We could possibly optimise to remove setting the
707
             * decrypt for an encryption operation.
708
             */
709
            aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
710
                &octx->ksenc.ks);
711
            aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
712
                &octx->ksdec.ks);
713
            if (!CRYPTO_ocb128_init(&octx->ocb,
714
                    &octx->ksenc.ks, &octx->ksdec.ks,
715
                    (block128_f)aes_t4_encrypt,
716
                    (block128_f)aes_t4_decrypt,
717
                    NULL))
718
                return 0;
719
        } while (0);
720
721
        /*
722
         * If we have an iv we can set it directly, otherwise use saved IV.
723
         */
724
        if (iv == NULL && octx->iv_set)
725
            iv = octx->iv;
726
        if (iv) {
727
            if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
728
                != 1)
729
                return 0;
730
            octx->iv_set = 1;
731
        }
732
        octx->key_set = 1;
733
    } else {
734
        /* If key set use IV, otherwise copy */
735
        if (octx->key_set)
736
            CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
737
        else
738
            memcpy(octx->iv, iv, octx->ivlen);
739
        octx->iv_set = 1;
740
    }
741
    return 1;
742
}
743
744
#define aes_t4_ocb_cipher aes_ocb_cipher
745
static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
746
    const unsigned char *in, size_t len);
747
#endif /* OPENSSL_NO_OCB */
748
749
#ifndef OPENSSL_NO_SIV
750
#define aes_t4_siv_init_key aes_siv_init_key
751
#define aes_t4_siv_cipher aes_siv_cipher
752
#endif /* OPENSSL_NO_SIV */
753
754
#define BLOCK_CIPHER_generic(nid, keylen, blocksize, ivlen, nmode, mode, MODE, flags)  \
755
    static const EVP_CIPHER aes_t4_##keylen##_##mode = {                               \
756
        nid##_##keylen##_##nmode, blocksize, keylen / 8, ivlen,                        \
757
        flags | EVP_CIPH_##MODE##_MODE,                                                \
758
        EVP_ORIG_GLOBAL,                                                               \
759
        aes_t4_init_key,                                                               \
760
        aes_t4_##mode##_cipher,                                                        \
761
        NULL,                                                                          \
762
        sizeof(EVP_AES_KEY),                                                           \
763
        NULL, NULL, NULL, NULL                                                         \
764
    };                                                                                 \
765
    static const EVP_CIPHER aes_##keylen##_##mode = {                                  \
766
        nid##_##keylen##_##nmode, blocksize,                                           \
767
        keylen / 8, ivlen,                                                             \
768
        flags | EVP_CIPH_##MODE##_MODE,                                                \
769
        EVP_ORIG_GLOBAL,                                                               \
770
        aes_init_key,                                                                  \
771
        aes_##mode##_cipher,                                                           \
772
        NULL,                                                                          \
773
        sizeof(EVP_AES_KEY),                                                           \
774
        NULL, NULL, NULL, NULL                                                         \
775
    };                                                                                 \
776
    const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)                                  \
777
    {                                                                                  \
778
        return SPARC_AES_CAPABLE ? &aes_t4_##keylen##_##mode : &aes_##keylen##_##mode; \
779
    }
780
781
#define BLOCK_CIPHER_custom(nid, keylen, blocksize, ivlen, mode, MODE, flags)                                              \
782
    static const EVP_CIPHER aes_t4_##keylen##_##mode = {                                                                   \
783
        nid##_##keylen##_##mode, blocksize,                                                                                \
784
        (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE || EVP_CIPH_##MODE##_MODE == EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
785
        ivlen,                                                                                                             \
786
        flags | EVP_CIPH_##MODE##_MODE,                                                                                    \
787
        EVP_ORIG_GLOBAL,                                                                                                   \
788
        aes_t4_##mode##_init_key,                                                                                          \
789
        aes_t4_##mode##_cipher,                                                                                            \
790
        aes_##mode##_cleanup,                                                                                              \
791
        sizeof(EVP_AES_##MODE##_CTX),                                                                                      \
792
        NULL, NULL, aes_##mode##_ctrl, NULL                                                                                \
793
    };                                                                                                                     \
794
    static const EVP_CIPHER aes_##keylen##_##mode = {                                                                      \
795
        nid##_##keylen##_##mode, blocksize,                                                                                \
796
        (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE || EVP_CIPH_##MODE##_MODE == EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
797
        ivlen,                                                                                                             \
798
        flags | EVP_CIPH_##MODE##_MODE,                                                                                    \
799
        EVP_ORIG_GLOBAL,                                                                                                   \
800
        aes_##mode##_init_key,                                                                                             \
801
        aes_##mode##_cipher,                                                                                               \
802
        aes_##mode##_cleanup,                                                                                              \
803
        sizeof(EVP_AES_##MODE##_CTX),                                                                                      \
804
        NULL, NULL, aes_##mode##_ctrl, NULL                                                                                \
805
    };                                                                                                                     \
806
    const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)                                                                      \
807
    {                                                                                                                      \
808
        return SPARC_AES_CAPABLE ? &aes_t4_##keylen##_##mode : &aes_##keylen##_##mode;                                     \
809
    }
810
811
#elif defined(S390X_aes_128_CAPABLE)
812
/* IBM S390X support */
813
typedef struct {
814
    union {
815
        OSSL_UNION_ALIGN;
816
        /*-
817
         * KM-AES parameter block - begin
818
         * (see z/Architecture Principles of Operation >= SA22-7832-06)
819
         */
820
        struct {
821
            unsigned char k[32];
822
        } param;
823
        /* KM-AES parameter block - end */
824
    } km;
825
    unsigned int fc;
826
} S390X_AES_ECB_CTX;
827
828
typedef struct {
829
    union {
830
        OSSL_UNION_ALIGN;
831
        /*-
832
         * KMO-AES parameter block - begin
833
         * (see z/Architecture Principles of Operation >= SA22-7832-08)
834
         */
835
        struct {
836
            unsigned char cv[16];
837
            unsigned char k[32];
838
        } param;
839
        /* KMO-AES parameter block - end */
840
    } kmo;
841
    unsigned int fc;
842
} S390X_AES_OFB_CTX;
843
844
typedef struct {
845
    union {
846
        OSSL_UNION_ALIGN;
847
        /*-
848
         * KMF-AES parameter block - begin
849
         * (see z/Architecture Principles of Operation >= SA22-7832-08)
850
         */
851
        struct {
852
            unsigned char cv[16];
853
            unsigned char k[32];
854
        } param;
855
        /* KMF-AES parameter block - end */
856
    } kmf;
857
    unsigned int fc;
858
} S390X_AES_CFB_CTX;
859
860
typedef struct {
861
    union {
862
        OSSL_UNION_ALIGN;
863
        /*-
864
         * KMA-GCM-AES parameter block - begin
865
         * (see z/Architecture Principles of Operation >= SA22-7832-11)
866
         */
867
        struct {
868
            unsigned char reserved[12];
869
            union {
870
                unsigned int w;
871
                unsigned char b[4];
872
            } cv;
873
            union {
874
                unsigned long long g[2];
875
                unsigned char b[16];
876
            } t;
877
            unsigned char h[16];
878
            unsigned long long taadl;
879
            unsigned long long tpcl;
880
            union {
881
                unsigned long long g[2];
882
                unsigned int w[4];
883
            } j0;
884
            unsigned char k[32];
885
        } param;
886
        /* KMA-GCM-AES parameter block - end */
887
    } kma;
888
    unsigned int fc;
889
    int key_set;
890
891
    unsigned char *iv;
892
    int ivlen;
893
    int iv_set;
894
    int iv_gen;
895
896
    int taglen;
897
898
    unsigned char ares[16];
899
    unsigned char mres[16];
900
    unsigned char kres[16];
901
    int areslen;
902
    int mreslen;
903
    int kreslen;
904
905
    int tls_aad_len;
906
    uint64_t tls_enc_records; /* Number of TLS records encrypted */
907
} S390X_AES_GCM_CTX;
908
909
typedef struct {
910
    union {
911
        OSSL_UNION_ALIGN;
912
        /*-
913
         * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
914
         * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
915
         * rounds field is used to store the function code and that the key
916
         * schedule is not stored (if aes hardware support is detected).
917
         */
918
        struct {
919
            unsigned char pad[16];
920
            AES_KEY k;
921
        } key;
922
923
        struct {
924
            /*-
925
             * KMAC-AES parameter block - begin
926
             * (see z/Architecture Principles of Operation >= SA22-7832-08)
927
             */
928
            struct {
929
                union {
930
                    unsigned long long g[2];
931
                    unsigned char b[16];
932
                } icv;
933
                unsigned char k[32];
934
            } kmac_param;
935
            /* KMAC-AES parameter block - end */
936
937
            union {
938
                unsigned long long g[2];
939
                unsigned char b[16];
940
            } nonce;
941
            union {
942
                unsigned long long g[2];
943
                unsigned char b[16];
944
            } buf;
945
946
            unsigned long long blocks;
947
            int l;
948
            int m;
949
            int tls_aad_len;
950
            int iv_set;
951
            int tag_set;
952
            int len_set;
953
            int key_set;
954
955
            unsigned char pad[140];
956
            unsigned int fc;
957
        } ccm;
958
    } aes;
959
} S390X_AES_CCM_CTX;
960
961
#define s390x_aes_init_key aes_init_key
962
static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
963
    const unsigned char *iv, int enc);
964
965
#define S390X_AES_CBC_CTX EVP_AES_KEY
966
967
#define s390x_aes_cbc_init_key aes_init_key
968
969
#define s390x_aes_cbc_cipher aes_cbc_cipher
970
static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
971
    const unsigned char *in, size_t len);
972
973
static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
974
    const unsigned char *key,
975
    const unsigned char *iv, int enc)
976
{
977
    S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
978
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
979
980
    cctx->fc = S390X_AES_FC(keylen);
981
    if (!enc)
982
        cctx->fc |= S390X_DECRYPT;
983
984
    memcpy(cctx->km.param.k, key, keylen);
985
    return 1;
986
}
987
988
static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
989
    const unsigned char *in, size_t len)
990
{
991
    S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
992
993
    s390x_km(in, len, out, cctx->fc, &cctx->km.param);
994
    return 1;
995
}
996
997
static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
998
    const unsigned char *key,
999
    const unsigned char *ivec, int enc)
1000
{
1001
    S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1002
    const unsigned char *iv = ctx->oiv;
1003
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1004
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1005
1006
    memcpy(cctx->kmo.param.cv, iv, ivlen);
1007
    memcpy(cctx->kmo.param.k, key, keylen);
1008
    cctx->fc = S390X_AES_FC(keylen);
1009
    return 1;
1010
}
1011
1012
static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1013
    const unsigned char *in, size_t len)
1014
{
1015
    S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1016
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1017
    unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
1018
    int n = ctx->num;
1019
    int rem;
1020
1021
    memcpy(cctx->kmo.param.cv, iv, ivlen);
1022
    while (n && len) {
1023
        *out = *in ^ cctx->kmo.param.cv[n];
1024
        n = (n + 1) & 0xf;
1025
        --len;
1026
        ++in;
1027
        ++out;
1028
    }
1029
1030
    rem = len & 0xf;
1031
1032
    len &= ~(size_t)0xf;
1033
    if (len) {
1034
        s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1035
1036
        out += len;
1037
        in += len;
1038
    }
1039
1040
    if (rem) {
1041
        s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1042
            cctx->kmo.param.k);
1043
1044
        while (rem--) {
1045
            out[n] = in[n] ^ cctx->kmo.param.cv[n];
1046
            ++n;
1047
        }
1048
    }
1049
1050
    memcpy(iv, cctx->kmo.param.cv, ivlen);
1051
    ctx->num = n;
1052
    return 1;
1053
}
1054
1055
static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1056
    const unsigned char *key,
1057
    const unsigned char *ivec, int enc)
1058
{
1059
    S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1060
    const unsigned char *iv = ctx->oiv;
1061
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1062
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1063
1064
    cctx->fc = S390X_AES_FC(keylen);
1065
    cctx->fc |= 16 << 24; /* 16 bytes cipher feedback */
1066
    if (!enc)
1067
        cctx->fc |= S390X_DECRYPT;
1068
1069
    memcpy(cctx->kmf.param.cv, iv, ivlen);
1070
    memcpy(cctx->kmf.param.k, key, keylen);
1071
    return 1;
1072
}
1073
1074
static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1075
    const unsigned char *in, size_t len)
1076
{
1077
    S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1078
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1079
    const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1080
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1081
    unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
1082
    int n = ctx->num;
1083
    int rem;
1084
    unsigned char tmp;
1085
1086
    memcpy(cctx->kmf.param.cv, iv, ivlen);
1087
    while (n && len) {
1088
        tmp = *in;
1089
        *out = cctx->kmf.param.cv[n] ^ tmp;
1090
        cctx->kmf.param.cv[n] = enc ? *out : tmp;
1091
        n = (n + 1) & 0xf;
1092
        --len;
1093
        ++in;
1094
        ++out;
1095
    }
1096
1097
    rem = len & 0xf;
1098
1099
    len &= ~(size_t)0xf;
1100
    if (len) {
1101
        s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1102
1103
        out += len;
1104
        in += len;
1105
    }
1106
1107
    if (rem) {
1108
        s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1109
            S390X_AES_FC(keylen), cctx->kmf.param.k);
1110
1111
        while (rem--) {
1112
            tmp = in[n];
1113
            out[n] = cctx->kmf.param.cv[n] ^ tmp;
1114
            cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1115
            ++n;
1116
        }
1117
    }
1118
1119
    memcpy(iv, cctx->kmf.param.cv, ivlen);
1120
    ctx->num = n;
1121
    return 1;
1122
}
1123
1124
static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1125
    const unsigned char *key,
1126
    const unsigned char *ivec, int enc)
1127
{
1128
    S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1129
    const unsigned char *iv = ctx->oiv;
1130
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1131
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1132
1133
    cctx->fc = S390X_AES_FC(keylen);
1134
    cctx->fc |= 1 << 24; /* 1 byte cipher feedback */
1135
    if (!enc)
1136
        cctx->fc |= S390X_DECRYPT;
1137
1138
    memcpy(cctx->kmf.param.cv, iv, ivlen);
1139
    memcpy(cctx->kmf.param.k, key, keylen);
1140
    return 1;
1141
}
1142
1143
static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1144
    const unsigned char *in, size_t len)
1145
{
1146
    S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1147
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1148
    unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
1149
1150
    memcpy(cctx->kmf.param.cv, iv, ivlen);
1151
    s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1152
    memcpy(iv, cctx->kmf.param.cv, ivlen);
1153
    return 1;
1154
}
1155
1156
#define s390x_aes_cfb1_init_key aes_init_key
1157
1158
#define s390x_aes_cfb1_cipher aes_cfb1_cipher
1159
static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1160
    const unsigned char *in, size_t len);
1161
1162
#define S390X_AES_CTR_CTX EVP_AES_KEY
1163
1164
#define s390x_aes_ctr_init_key aes_init_key
1165
1166
#define s390x_aes_ctr_cipher aes_ctr_cipher
1167
static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1168
    const unsigned char *in, size_t len);
1169
1170
/* iv + padding length for iv lengths != 12 */
1171
#define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
1172
1173
/*-
1174
 * Process additional authenticated data. Returns 0 on success. Code is
1175
 * big-endian.
1176
 */
1177
static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1178
    size_t len)
1179
{
1180
    unsigned long long alen;
1181
    int n, rem;
1182
1183
    if (ctx->kma.param.tpcl)
1184
        return -2;
1185
1186
    alen = ctx->kma.param.taadl + len;
1187
    if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1188
        return -1;
1189
    ctx->kma.param.taadl = alen;
1190
1191
    n = ctx->areslen;
1192
    if (n) {
1193
        while (n && len) {
1194
            ctx->ares[n] = *aad;
1195
            n = (n + 1) & 0xf;
1196
            ++aad;
1197
            --len;
1198
        }
1199
        /* ctx->ares contains a complete block if offset has wrapped around */
1200
        if (!n) {
1201
            s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1202
            ctx->fc |= S390X_KMA_HS;
1203
        }
1204
        ctx->areslen = n;
1205
    }
1206
1207
    rem = len & 0xf;
1208
1209
    len &= ~(size_t)0xf;
1210
    if (len) {
1211
        s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1212
        aad += len;
1213
        ctx->fc |= S390X_KMA_HS;
1214
    }
1215
1216
    if (rem) {
1217
        ctx->areslen = rem;
1218
1219
        do {
1220
            --rem;
1221
            ctx->ares[rem] = aad[rem];
1222
        } while (rem);
1223
    }
1224
    return 0;
1225
}
1226
1227
/*-
1228
 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1229
 * success. Code is big-endian.
1230
 */
1231
static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1232
    unsigned char *out, size_t len)
1233
{
1234
    const unsigned char *inptr;
1235
    unsigned long long mlen;
1236
    union {
1237
        unsigned int w[4];
1238
        unsigned char b[16];
1239
    } buf;
1240
    size_t inlen;
1241
    int n, rem, i;
1242
1243
    mlen = ctx->kma.param.tpcl + len;
1244
    if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1245
        return -1;
1246
    ctx->kma.param.tpcl = mlen;
1247
1248
    n = ctx->mreslen;
1249
    if (n) {
1250
        inptr = in;
1251
        inlen = len;
1252
        while (n && inlen) {
1253
            ctx->mres[n] = *inptr;
1254
            n = (n + 1) & 0xf;
1255
            ++inptr;
1256
            --inlen;
1257
        }
1258
        /* ctx->mres contains a complete block if offset has wrapped around */
1259
        if (!n) {
1260
            s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1261
                ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1262
            ctx->fc |= S390X_KMA_HS;
1263
            ctx->areslen = 0;
1264
1265
            /* previous call already encrypted/decrypted its remainder,
1266
             * see comment below */
1267
            n = ctx->mreslen;
1268
            while (n) {
1269
                *out = buf.b[n];
1270
                n = (n + 1) & 0xf;
1271
                ++out;
1272
                ++in;
1273
                --len;
1274
            }
1275
            ctx->mreslen = 0;
1276
        }
1277
    }
1278
1279
    rem = len & 0xf;
1280
1281
    len &= ~(size_t)0xf;
1282
    if (len) {
1283
        s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1284
            ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1285
        in += len;
1286
        out += len;
1287
        ctx->fc |= S390X_KMA_HS;
1288
        ctx->areslen = 0;
1289
    }
1290
1291
    /*-
1292
     * If there is a remainder, it has to be saved such that it can be
1293
     * processed by kma later. However, we also have to do the for-now
1294
     * unauthenticated encryption/decryption part here and now...
1295
     */
1296
    if (rem) {
1297
        if (!ctx->mreslen) {
1298
            buf.w[0] = ctx->kma.param.j0.w[0];
1299
            buf.w[1] = ctx->kma.param.j0.w[1];
1300
            buf.w[2] = ctx->kma.param.j0.w[2];
1301
            buf.w[3] = ctx->kma.param.cv.w + 1;
1302
            s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1303
        }
1304
1305
        n = ctx->mreslen;
1306
        for (i = 0; i < rem; i++) {
1307
            ctx->mres[n + i] = in[i];
1308
            out[i] = in[i] ^ ctx->kres[n + i];
1309
        }
1310
1311
        ctx->mreslen += rem;
1312
    }
1313
    return 0;
1314
}
1315
1316
/*-
1317
 * Initialize context structure. Code is big-endian.
1318
 */
1319
static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1320
    const unsigned char *iv)
1321
{
1322
    ctx->kma.param.t.g[0] = 0;
1323
    ctx->kma.param.t.g[1] = 0;
1324
    ctx->kma.param.tpcl = 0;
1325
    ctx->kma.param.taadl = 0;
1326
    ctx->mreslen = 0;
1327
    ctx->areslen = 0;
1328
    ctx->kreslen = 0;
1329
1330
    if (ctx->ivlen == 12) {
1331
        memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1332
        ctx->kma.param.j0.w[3] = 1;
1333
        ctx->kma.param.cv.w = 1;
1334
    } else {
1335
        /* ctx->iv has the right size and is already padded. */
1336
        memcpy(ctx->iv, iv, ctx->ivlen);
1337
        s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1338
            ctx->fc, &ctx->kma.param);
1339
        ctx->fc |= S390X_KMA_HS;
1340
1341
        ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1342
        ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1343
        ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1344
        ctx->kma.param.t.g[0] = 0;
1345
        ctx->kma.param.t.g[1] = 0;
1346
    }
1347
}
1348
1349
/*-
1350
 * Performs various operations on the context structure depending on control
1351
 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1352
 * Code is big-endian.
1353
 */
1354
static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1355
{
1356
    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1357
    S390X_AES_GCM_CTX *gctx_out;
1358
    EVP_CIPHER_CTX *out;
1359
    unsigned char *buf;
1360
    int ivlen, enc, len;
1361
1362
    switch (type) {
1363
    case EVP_CTRL_INIT:
1364
        ivlen = EVP_CIPHER_get_iv_length(c->cipher);
1365
        gctx->key_set = 0;
1366
        gctx->iv_set = 0;
1367
        gctx->ivlen = ivlen;
1368
        gctx->iv = c->iv;
1369
        gctx->taglen = -1;
1370
        gctx->iv_gen = 0;
1371
        gctx->tls_aad_len = -1;
1372
        return 1;
1373
1374
    case EVP_CTRL_GET_IVLEN:
1375
        *(int *)ptr = gctx->ivlen;
1376
        return 1;
1377
1378
    case EVP_CTRL_AEAD_SET_IVLEN:
1379
        if (arg <= 0)
1380
            return 0;
1381
1382
        if (arg != 12) {
1383
            len = S390X_gcm_ivpadlen(arg);
1384
1385
            /* Allocate memory for iv if needed. */
1386
            if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1387
                if (gctx->iv != c->iv)
1388
                    OPENSSL_free(gctx->iv);
1389
1390
                if ((gctx->iv = OPENSSL_malloc(len)) == NULL) {
1391
                    ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
1392
                    return 0;
1393
                }
1394
            }
1395
            /* Add padding. */
1396
            memset(gctx->iv + arg, 0, len - arg - 8);
1397
            *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1398
        }
1399
        gctx->ivlen = arg;
1400
        return 1;
1401
1402
    case EVP_CTRL_AEAD_SET_TAG:
1403
        buf = EVP_CIPHER_CTX_buf_noconst(c);
1404
        enc = EVP_CIPHER_CTX_is_encrypting(c);
1405
        if (arg <= 0 || arg > 16 || enc)
1406
            return 0;
1407
1408
        memcpy(buf, ptr, arg);
1409
        gctx->taglen = arg;
1410
        return 1;
1411
1412
    case EVP_CTRL_AEAD_GET_TAG:
1413
        enc = EVP_CIPHER_CTX_is_encrypting(c);
1414
        if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1415
            return 0;
1416
1417
        memcpy(ptr, gctx->kma.param.t.b, arg);
1418
        return 1;
1419
1420
    case EVP_CTRL_GCM_SET_IV_FIXED:
1421
        /* Special case: -1 length restores whole iv */
1422
        if (arg == -1) {
1423
            memcpy(gctx->iv, ptr, gctx->ivlen);
1424
            gctx->iv_gen = 1;
1425
            return 1;
1426
        }
1427
        /*
1428
         * Fixed field must be at least 4 bytes and invocation field at least
1429
         * 8.
1430
         */
1431
        if ((arg < 4) || (gctx->ivlen - arg) < 8)
1432
            return 0;
1433
1434
        if (arg)
1435
            memcpy(gctx->iv, ptr, arg);
1436
1437
        enc = EVP_CIPHER_CTX_is_encrypting(c);
1438
        if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1439
            return 0;
1440
1441
        gctx->iv_gen = 1;
1442
        return 1;
1443
1444
    case EVP_CTRL_GCM_IV_GEN:
1445
        if (gctx->iv_gen == 0 || gctx->key_set == 0)
1446
            return 0;
1447
1448
        s390x_aes_gcm_setiv(gctx, gctx->iv);
1449
1450
        if (arg <= 0 || arg > gctx->ivlen)
1451
            arg = gctx->ivlen;
1452
1453
        memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1454
        /*
1455
         * Invocation field will be at least 8 bytes in size and so no need
1456
         * to check wrap around or increment more than last 8 bytes.
1457
         */
1458
        ctr64_inc(gctx->iv + gctx->ivlen - 8);
1459
        gctx->iv_set = 1;
1460
        return 1;
1461
1462
    case EVP_CTRL_GCM_SET_IV_INV:
1463
        enc = EVP_CIPHER_CTX_is_encrypting(c);
1464
        if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1465
            return 0;
1466
1467
        memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1468
        s390x_aes_gcm_setiv(gctx, gctx->iv);
1469
        gctx->iv_set = 1;
1470
        return 1;
1471
1472
    case EVP_CTRL_AEAD_TLS1_AAD:
1473
        /* Save the aad for later use. */
1474
        if (arg != EVP_AEAD_TLS1_AAD_LEN)
1475
            return 0;
1476
1477
        buf = EVP_CIPHER_CTX_buf_noconst(c);
1478
        memcpy(buf, ptr, arg);
1479
        gctx->tls_aad_len = arg;
1480
        gctx->tls_enc_records = 0;
1481
1482
        len = buf[arg - 2] << 8 | buf[arg - 1];
1483
        /* Correct length for explicit iv. */
1484
        if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1485
            return 0;
1486
        len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1487
1488
        /* If decrypting correct for tag too. */
1489
        enc = EVP_CIPHER_CTX_is_encrypting(c);
1490
        if (!enc) {
1491
            if (len < EVP_GCM_TLS_TAG_LEN)
1492
                return 0;
1493
            len -= EVP_GCM_TLS_TAG_LEN;
1494
        }
1495
        buf[arg - 2] = len >> 8;
1496
        buf[arg - 1] = len & 0xff;
1497
        /* Extra padding: tag appended to record. */
1498
        return EVP_GCM_TLS_TAG_LEN;
1499
1500
    case EVP_CTRL_COPY:
1501
        out = ptr;
1502
        gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1503
1504
        if (gctx->iv == c->iv) {
1505
            gctx_out->iv = out->iv;
1506
        } else {
1507
            len = S390X_gcm_ivpadlen(gctx->ivlen);
1508
1509
            if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) {
1510
                ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
1511
                return 0;
1512
            }
1513
1514
            memcpy(gctx_out->iv, gctx->iv, len);
1515
        }
1516
        return 1;
1517
1518
    default:
1519
        return -1;
1520
    }
1521
}
1522
1523
/*-
1524
 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1525
 */
1526
static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1527
    const unsigned char *key,
1528
    const unsigned char *iv, int enc)
1529
{
1530
    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1531
    int keylen;
1532
1533
    if (iv == NULL && key == NULL)
1534
        return 1;
1535
1536
    if (key != NULL) {
1537
        keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1538
        memcpy(&gctx->kma.param.k, key, keylen);
1539
1540
        gctx->fc = S390X_AES_FC(keylen);
1541
        if (!enc)
1542
            gctx->fc |= S390X_DECRYPT;
1543
1544
        if (iv == NULL && gctx->iv_set)
1545
            iv = gctx->iv;
1546
1547
        if (iv != NULL) {
1548
            s390x_aes_gcm_setiv(gctx, iv);
1549
            gctx->iv_set = 1;
1550
        }
1551
        gctx->key_set = 1;
1552
    } else {
1553
        if (gctx->key_set)
1554
            s390x_aes_gcm_setiv(gctx, iv);
1555
        else
1556
            memcpy(gctx->iv, iv, gctx->ivlen);
1557
1558
        gctx->iv_set = 1;
1559
        gctx->iv_gen = 0;
1560
    }
1561
    return 1;
1562
}
1563
1564
/*-
1565
 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1566
 * if successful. Otherwise -1 is returned. Code is big-endian.
1567
 */
1568
static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1569
    const unsigned char *in, size_t len)
1570
{
1571
    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1572
    const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1573
    const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1574
    int rv = -1;
1575
1576
    if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1577
        return -1;
1578
1579
    /*
1580
     * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
1581
     * Requirements from SP 800-38D".  The requirements is for one party to the
1582
     * communication to fail after 2^64 - 1 keys.  We do this on the encrypting
1583
     * side only.
1584
     */
1585
    if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
1586
        ERR_raise(ERR_LIB_EVP, EVP_R_TOO_MANY_RECORDS);
1587
        goto err;
1588
    }
1589
1590
    if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
1591
            EVP_GCM_TLS_EXPLICIT_IV_LEN, out)
1592
        <= 0)
1593
        goto err;
1594
1595
    in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1596
    out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1597
    len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1598
1599
    gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1600
    gctx->kma.param.tpcl = len << 3;
1601
    s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1602
        gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1603
1604
    if (enc) {
1605
        memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1606
        rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1607
    } else {
1608
        if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1609
                EVP_GCM_TLS_TAG_LEN)) {
1610
            OPENSSL_cleanse(out, len);
1611
            goto err;
1612
        }
1613
        rv = len;
1614
    }
1615
err:
1616
    gctx->iv_set = 0;
1617
    gctx->tls_aad_len = -1;
1618
    return rv;
1619
}
1620
1621
/*-
1622
 * Called from EVP layer to initialize context, process additional
1623
 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1624
 * ciphertext or process a TLS packet, depending on context. Returns bytes
1625
 * written on success. Otherwise -1 is returned. Code is big-endian.
1626
 */
1627
static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1628
    const unsigned char *in, size_t len)
1629
{
1630
    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1631
    unsigned char *buf, tmp[16];
1632
    int enc;
1633
1634
    if (!gctx->key_set)
1635
        return -1;
1636
1637
    if (gctx->tls_aad_len >= 0)
1638
        return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1639
1640
    if (!gctx->iv_set)
1641
        return -1;
1642
1643
    if (in != NULL) {
1644
        if (out == NULL) {
1645
            if (s390x_aes_gcm_aad(gctx, in, len))
1646
                return -1;
1647
        } else {
1648
            if (s390x_aes_gcm(gctx, in, out, len))
1649
                return -1;
1650
        }
1651
        return len;
1652
    } else {
1653
        gctx->kma.param.taadl <<= 3;
1654
        gctx->kma.param.tpcl <<= 3;
1655
        s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1656
            gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1657
        /* recall that we already did en-/decrypt gctx->mres
1658
         * and returned it to caller... */
1659
        OPENSSL_cleanse(tmp, gctx->mreslen);
1660
        gctx->iv_set = 0;
1661
1662
        enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1663
        if (enc) {
1664
            gctx->taglen = 16;
1665
        } else {
1666
            if (gctx->taglen < 0)
1667
                return -1;
1668
1669
            buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1670
            if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1671
                return -1;
1672
        }
1673
        return 0;
1674
    }
1675
}
1676
1677
static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1678
{
1679
    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1680
1681
    if (gctx == NULL)
1682
        return 0;
1683
1684
    if (gctx->iv != c->iv)
1685
        OPENSSL_free(gctx->iv);
1686
1687
    OPENSSL_cleanse(gctx, sizeof(*gctx));
1688
    return 1;
1689
}
1690
1691
#define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
1692
1693
#define s390x_aes_xts_init_key aes_xts_init_key
1694
static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1695
    const unsigned char *key,
1696
    const unsigned char *iv, int enc);
1697
#define s390x_aes_xts_cipher aes_xts_cipher
1698
static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1699
    const unsigned char *in, size_t len);
1700
#define s390x_aes_xts_ctrl aes_xts_ctrl
1701
static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1702
#define s390x_aes_xts_cleanup aes_xts_cleanup
1703
1704
/*-
1705
 * Set nonce and length fields. Code is big-endian.
1706
 */
1707
static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1708
    const unsigned char *nonce,
1709
    size_t mlen)
1710
{
1711
    ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1712
    ctx->aes.ccm.nonce.g[1] = mlen;
1713
    memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1714
}
1715
1716
/*-
1717
 * Process additional authenticated data. Code is big-endian.
1718
 */
1719
static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1720
    size_t alen)
1721
{
1722
    unsigned char *ptr;
1723
    int i, rem;
1724
1725
    if (!alen)
1726
        return;
1727
1728
    ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1729
1730
    /* Suppress 'type-punned pointer dereference' warning. */
1731
    ptr = ctx->aes.ccm.buf.b;
1732
1733
    if (alen < ((1 << 16) - (1 << 8))) {
1734
        *(uint16_t *)ptr = alen;
1735
        i = 2;
1736
    } else if (sizeof(alen) == 8
1737
        && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1738
        *(uint16_t *)ptr = 0xffff;
1739
        *(uint64_t *)(ptr + 2) = alen;
1740
        i = 10;
1741
    } else {
1742
        *(uint16_t *)ptr = 0xfffe;
1743
        *(uint32_t *)(ptr + 2) = alen;
1744
        i = 6;
1745
    }
1746
1747
    while (i < 16 && alen) {
1748
        ctx->aes.ccm.buf.b[i] = *aad;
1749
        ++aad;
1750
        --alen;
1751
        ++i;
1752
    }
1753
    while (i < 16) {
1754
        ctx->aes.ccm.buf.b[i] = 0;
1755
        ++i;
1756
    }
1757
1758
    ctx->aes.ccm.kmac_param.icv.g[0] = 0;
1759
    ctx->aes.ccm.kmac_param.icv.g[1] = 0;
1760
    s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
1761
        &ctx->aes.ccm.kmac_param);
1762
    ctx->aes.ccm.blocks += 2;
1763
1764
    rem = alen & 0xf;
1765
    alen &= ~(size_t)0xf;
1766
    if (alen) {
1767
        s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1768
        ctx->aes.ccm.blocks += alen >> 4;
1769
        aad += alen;
1770
    }
1771
    if (rem) {
1772
        for (i = 0; i < rem; i++)
1773
            ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
1774
1775
        s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1776
            ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1777
            ctx->aes.ccm.kmac_param.k);
1778
        ctx->aes.ccm.blocks++;
1779
    }
1780
}
1781
1782
/*-
1783
 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
1784
 * success.
1785
 */
1786
static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
1787
    unsigned char *out, size_t len, int enc)
1788
{
1789
    size_t n, rem;
1790
    unsigned int i, l, num;
1791
    unsigned char flags;
1792
1793
    flags = ctx->aes.ccm.nonce.b[0];
1794
    if (!(flags & S390X_CCM_AAD_FLAG)) {
1795
        s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
1796
            ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
1797
        ctx->aes.ccm.blocks++;
1798
    }
1799
    l = flags & 0x7;
1800
    ctx->aes.ccm.nonce.b[0] = l;
1801
1802
    /*-
1803
     * Reconstruct length from encoded length field
1804
     * and initialize it with counter value.
1805
     */
1806
    n = 0;
1807
    for (i = 15 - l; i < 15; i++) {
1808
        n |= ctx->aes.ccm.nonce.b[i];
1809
        ctx->aes.ccm.nonce.b[i] = 0;
1810
        n <<= 8;
1811
    }
1812
    n |= ctx->aes.ccm.nonce.b[15];
1813
    ctx->aes.ccm.nonce.b[15] = 1;
1814
1815
    if (n != len)
1816
        return -1; /* length mismatch */
1817
1818
    if (enc) {
1819
        /* Two operations per block plus one for tag encryption */
1820
        ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
1821
        if (ctx->aes.ccm.blocks > (1ULL << 61))
1822
            return -2; /* too much data */
1823
    }
1824
1825
    num = 0;
1826
    rem = len & 0xf;
1827
    len &= ~(size_t)0xf;
1828
1829
    if (enc) {
1830
        /* mac-then-encrypt */
1831
        if (len)
1832
            s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1833
        if (rem) {
1834
            for (i = 0; i < rem; i++)
1835
                ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
1836
1837
            s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1838
                ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1839
                ctx->aes.ccm.kmac_param.k);
1840
        }
1841
1842
        CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1843
            ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1844
            &num, (ctr128_f)AES_ctr32_encrypt);
1845
    } else {
1846
        /* decrypt-then-mac */
1847
        CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1848
            ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1849
            &num, (ctr128_f)AES_ctr32_encrypt);
1850
1851
        if (len)
1852
            s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1853
        if (rem) {
1854
            for (i = 0; i < rem; i++)
1855
                ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
1856
1857
            s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1858
                ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1859
                ctx->aes.ccm.kmac_param.k);
1860
        }
1861
    }
1862
    /* encrypt tag */
1863
    for (i = 15 - l; i < 16; i++)
1864
        ctx->aes.ccm.nonce.b[i] = 0;
1865
1866
    s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
1867
        ctx->aes.ccm.kmac_param.k);
1868
    ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
1869
    ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
1870
1871
    ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
1872
    return 0;
1873
}
1874
1875
/*-
1876
 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1877
 * if successful. Otherwise -1 is returned.
1878
 */
1879
static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1880
    const unsigned char *in, size_t len)
1881
{
1882
    S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1883
    unsigned char *ivec = ctx->iv;
1884
    unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1885
    const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1886
1887
    if (out != in
1888
        || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
1889
        return -1;
1890
1891
    if (enc) {
1892
        /* Set explicit iv (sequence number). */
1893
        memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1894
    }
1895
1896
    len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1897
    /*-
1898
     * Get explicit iv (sequence number). We already have fixed iv
1899
     * (server/client_write_iv) here.
1900
     */
1901
    memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1902
    s390x_aes_ccm_setiv(cctx, ivec, len);
1903
1904
    /* Process aad (sequence number|type|version|length) */
1905
    s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
1906
1907
    in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1908
    out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1909
1910
    if (enc) {
1911
        if (s390x_aes_ccm(cctx, in, out, len, enc))
1912
            return -1;
1913
1914
        memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
1915
        return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1916
    } else {
1917
        if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
1918
            if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
1919
                    cctx->aes.ccm.m))
1920
                return len;
1921
        }
1922
1923
        OPENSSL_cleanse(out, len);
1924
        return -1;
1925
    }
1926
}
1927
1928
/*-
1929
 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
1930
 * returned.
1931
 */
1932
static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
1933
    const unsigned char *key,
1934
    const unsigned char *iv, int enc)
1935
{
1936
    S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1937
    int keylen;
1938
1939
    if (iv == NULL && key == NULL)
1940
        return 1;
1941
1942
    if (key != NULL) {
1943
        keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1944
        cctx->aes.ccm.fc = S390X_AES_FC(keylen);
1945
        memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
1946
1947
        /* Store encoded m and l. */
1948
        cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
1949
            | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
1950
        memset(cctx->aes.ccm.nonce.b + 1, 0,
1951
            sizeof(cctx->aes.ccm.nonce.b));
1952
        cctx->aes.ccm.blocks = 0;
1953
1954
        cctx->aes.ccm.key_set = 1;
1955
    }
1956
1957
    if (iv != NULL) {
1958
        memcpy(ctx->iv, iv, 15 - cctx->aes.ccm.l);
1959
1960
        cctx->aes.ccm.iv_set = 1;
1961
    }
1962
1963
    return 1;
1964
}
1965
1966
/*-
1967
 * Called from EVP layer to initialize context, process additional
1968
 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1969
 * plaintext or process a TLS packet, depending on context. Returns bytes
1970
 * written on success. Otherwise -1 is returned.
1971
 */
1972
static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1973
    const unsigned char *in, size_t len)
1974
{
1975
    S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1976
    const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1977
    int rv;
1978
    unsigned char *buf;
1979
1980
    if (!cctx->aes.ccm.key_set)
1981
        return -1;
1982
1983
    if (cctx->aes.ccm.tls_aad_len >= 0)
1984
        return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
1985
1986
    /*-
1987
     * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
1988
     * so integrity must be checked already at Update() i.e., before
1989
     * potentially corrupted data is output.
1990
     */
1991
    if (in == NULL && out != NULL)
1992
        return 0;
1993
1994
    if (!cctx->aes.ccm.iv_set)
1995
        return -1;
1996
1997
    if (out == NULL) {
1998
        /* Update(): Pass message length. */
1999
        if (in == NULL) {
2000
            s390x_aes_ccm_setiv(cctx, ctx->iv, len);
2001
2002
            cctx->aes.ccm.len_set = 1;
2003
            return len;
2004
        }
2005
2006
        /* Update(): Process aad. */
2007
        if (!cctx->aes.ccm.len_set && len)
2008
            return -1;
2009
2010
        s390x_aes_ccm_aad(cctx, in, len);
2011
        return len;
2012
    }
2013
2014
    /* The tag must be set before actually decrypting data */
2015
    if (!enc && !cctx->aes.ccm.tag_set)
2016
        return -1;
2017
2018
    /* Update(): Process message. */
2019
2020
    if (!cctx->aes.ccm.len_set) {
2021
        /*-
2022
         * In case message length was not previously set explicitly via
2023
         * Update(), set it now.
2024
         */
2025
        s390x_aes_ccm_setiv(cctx, ctx->iv, len);
2026
2027
        cctx->aes.ccm.len_set = 1;
2028
    }
2029
2030
    if (enc) {
2031
        if (s390x_aes_ccm(cctx, in, out, len, enc))
2032
            return -1;
2033
2034
        cctx->aes.ccm.tag_set = 1;
2035
        return len;
2036
    } else {
2037
        rv = -1;
2038
2039
        if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2040
            buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2041
            if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2042
                    cctx->aes.ccm.m))
2043
                rv = len;
2044
        }
2045
2046
        if (rv == -1)
2047
            OPENSSL_cleanse(out, len);
2048
2049
        cctx->aes.ccm.iv_set = 0;
2050
        cctx->aes.ccm.tag_set = 0;
2051
        cctx->aes.ccm.len_set = 0;
2052
        return rv;
2053
    }
2054
}
2055
2056
/*-
2057
 * Performs various operations on the context structure depending on control
2058
 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2059
 * Code is big-endian.
2060
 */
2061
static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2062
{
2063
    S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
2064
    unsigned char *buf;
2065
    int enc, len;
2066
2067
    switch (type) {
2068
    case EVP_CTRL_INIT:
2069
        cctx->aes.ccm.key_set = 0;
2070
        cctx->aes.ccm.iv_set = 0;
2071
        cctx->aes.ccm.l = 8;
2072
        cctx->aes.ccm.m = 12;
2073
        cctx->aes.ccm.tag_set = 0;
2074
        cctx->aes.ccm.len_set = 0;
2075
        cctx->aes.ccm.tls_aad_len = -1;
2076
        return 1;
2077
2078
    case EVP_CTRL_GET_IVLEN:
2079
        *(int *)ptr = 15 - cctx->aes.ccm.l;
2080
        return 1;
2081
2082
    case EVP_CTRL_AEAD_TLS1_AAD:
2083
        if (arg != EVP_AEAD_TLS1_AAD_LEN)
2084
            return 0;
2085
2086
        /* Save the aad for later use. */
2087
        buf = EVP_CIPHER_CTX_buf_noconst(c);
2088
        memcpy(buf, ptr, arg);
2089
        cctx->aes.ccm.tls_aad_len = arg;
2090
2091
        len = buf[arg - 2] << 8 | buf[arg - 1];
2092
        if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2093
            return 0;
2094
2095
        /* Correct length for explicit iv. */
2096
        len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2097
2098
        enc = EVP_CIPHER_CTX_is_encrypting(c);
2099
        if (!enc) {
2100
            if (len < cctx->aes.ccm.m)
2101
                return 0;
2102
2103
            /* Correct length for tag. */
2104
            len -= cctx->aes.ccm.m;
2105
        }
2106
2107
        buf[arg - 2] = len >> 8;
2108
        buf[arg - 1] = len & 0xff;
2109
2110
        /* Extra padding: tag appended to record. */
2111
        return cctx->aes.ccm.m;
2112
2113
    case EVP_CTRL_CCM_SET_IV_FIXED:
2114
        if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2115
            return 0;
2116
2117
        /* Copy to first part of the iv. */
2118
        memcpy(c->iv, ptr, arg);
2119
        return 1;
2120
2121
    case EVP_CTRL_AEAD_SET_IVLEN:
2122
        arg = 15 - arg;
2123
        /* fall-through */
2124
2125
    case EVP_CTRL_CCM_SET_L:
2126
        if (arg < 2 || arg > 8)
2127
            return 0;
2128
2129
        cctx->aes.ccm.l = arg;
2130
        return 1;
2131
2132
    case EVP_CTRL_AEAD_SET_TAG:
2133
        if ((arg & 1) || arg < 4 || arg > 16)
2134
            return 0;
2135
2136
        enc = EVP_CIPHER_CTX_is_encrypting(c);
2137
        if (enc && ptr)
2138
            return 0;
2139
2140
        if (ptr) {
2141
            cctx->aes.ccm.tag_set = 1;
2142
            buf = EVP_CIPHER_CTX_buf_noconst(c);
2143
            memcpy(buf, ptr, arg);
2144
        }
2145
2146
        cctx->aes.ccm.m = arg;
2147
        return 1;
2148
2149
    case EVP_CTRL_AEAD_GET_TAG:
2150
        enc = EVP_CIPHER_CTX_is_encrypting(c);
2151
        if (!enc || !cctx->aes.ccm.tag_set)
2152
            return 0;
2153
2154
        if (arg < cctx->aes.ccm.m)
2155
            return 0;
2156
2157
        memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2158
        cctx->aes.ccm.tag_set = 0;
2159
        cctx->aes.ccm.iv_set = 0;
2160
        cctx->aes.ccm.len_set = 0;
2161
        return 1;
2162
2163
    case EVP_CTRL_COPY:
2164
        return 1;
2165
2166
    default:
2167
        return -1;
2168
    }
2169
}
2170
2171
#define s390x_aes_ccm_cleanup aes_ccm_cleanup
2172
2173
#ifndef OPENSSL_NO_OCB
2174
#define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
2175
2176
#define s390x_aes_ocb_init_key aes_ocb_init_key
2177
static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2178
    const unsigned char *iv, int enc);
2179
#define s390x_aes_ocb_cipher aes_ocb_cipher
2180
static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2181
    const unsigned char *in, size_t len);
2182
#define s390x_aes_ocb_cleanup aes_ocb_cleanup
2183
static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2184
#define s390x_aes_ocb_ctrl aes_ocb_ctrl
2185
static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2186
#endif
2187
2188
#ifndef OPENSSL_NO_SIV
2189
#define S390X_AES_SIV_CTX EVP_AES_SIV_CTX
2190
2191
#define s390x_aes_siv_init_key aes_siv_init_key
2192
#define s390x_aes_siv_cipher aes_siv_cipher
2193
#define s390x_aes_siv_cleanup aes_siv_cleanup
2194
#define s390x_aes_siv_ctrl aes_siv_ctrl
2195
#endif
2196
2197
#define BLOCK_CIPHER_generic(nid, keylen, blocksize, ivlen, nmode, mode,                                      \
2198
    MODE, flags)                                                                                              \
2199
    static const EVP_CIPHER s390x_aes_##keylen##_##mode = {                                                   \
2200
        nid##_##keylen##_##nmode, blocksize,                                                                  \
2201
        keylen / 8,                                                                                           \
2202
        ivlen,                                                                                                \
2203
        flags | EVP_CIPH_##MODE##_MODE,                                                                       \
2204
        EVP_ORIG_GLOBAL,                                                                                      \
2205
        s390x_aes_##mode##_init_key,                                                                          \
2206
        s390x_aes_##mode##_cipher,                                                                            \
2207
        NULL,                                                                                                 \
2208
        sizeof(S390X_AES_##MODE##_CTX),                                                                       \
2209
        NULL,                                                                                                 \
2210
        NULL,                                                                                                 \
2211
        NULL,                                                                                                 \
2212
        NULL                                                                                                  \
2213
    };                                                                                                        \
2214
    static const EVP_CIPHER aes_##keylen##_##mode = {                                                         \
2215
        nid##_##keylen##_##nmode,                                                                             \
2216
        blocksize,                                                                                            \
2217
        keylen / 8,                                                                                           \
2218
        ivlen,                                                                                                \
2219
        flags | EVP_CIPH_##MODE##_MODE,                                                                       \
2220
        EVP_ORIG_GLOBAL,                                                                                      \
2221
        aes_init_key,                                                                                         \
2222
        aes_##mode##_cipher,                                                                                  \
2223
        NULL,                                                                                                 \
2224
        sizeof(EVP_AES_KEY),                                                                                  \
2225
        NULL,                                                                                                 \
2226
        NULL,                                                                                                 \
2227
        NULL,                                                                                                 \
2228
        NULL                                                                                                  \
2229
    };                                                                                                        \
2230
    const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)                                                         \
2231
    {                                                                                                         \
2232
        return S390X_aes_##keylen##_##mode##_CAPABLE ? &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2233
    }
2234
2235
#define BLOCK_CIPHER_custom(nid, keylen, blocksize, ivlen, mode, MODE, flags)                                              \
2236
    static const EVP_CIPHER s390x_aes_##keylen##_##mode = {                                                                \
2237
        nid##_##keylen##_##mode,                                                                                           \
2238
        blocksize,                                                                                                         \
2239
        (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE || EVP_CIPH_##MODE##_MODE == EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2240
        ivlen,                                                                                                             \
2241
        flags | EVP_CIPH_##MODE##_MODE,                                                                                    \
2242
        EVP_ORIG_GLOBAL,                                                                                                   \
2243
        s390x_aes_##mode##_init_key,                                                                                       \
2244
        s390x_aes_##mode##_cipher,                                                                                         \
2245
        s390x_aes_##mode##_cleanup,                                                                                        \
2246
        sizeof(S390X_AES_##MODE##_CTX),                                                                                    \
2247
        NULL,                                                                                                              \
2248
        NULL,                                                                                                              \
2249
        s390x_aes_##mode##_ctrl,                                                                                           \
2250
        NULL                                                                                                               \
2251
    };                                                                                                                     \
2252
    static const EVP_CIPHER aes_##keylen##_##mode = {                                                                      \
2253
        nid##_##keylen##_##mode, blocksize,                                                                                \
2254
        (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE || EVP_CIPH_##MODE##_MODE == EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2255
        ivlen,                                                                                                             \
2256
        flags | EVP_CIPH_##MODE##_MODE,                                                                                    \
2257
        EVP_ORIG_GLOBAL,                                                                                                   \
2258
        aes_##mode##_init_key,                                                                                             \
2259
        aes_##mode##_cipher,                                                                                               \
2260
        aes_##mode##_cleanup,                                                                                              \
2261
        sizeof(EVP_AES_##MODE##_CTX),                                                                                      \
2262
        NULL,                                                                                                              \
2263
        NULL,                                                                                                              \
2264
        aes_##mode##_ctrl,                                                                                                 \
2265
        NULL                                                                                                               \
2266
    };                                                                                                                     \
2267
    const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)                                                                      \
2268
    {                                                                                                                      \
2269
        return S390X_aes_##keylen##_##mode##_CAPABLE ? &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode;              \
2270
    }
2271
2272
#else
2273
2274
#define BLOCK_CIPHER_generic(nid, keylen, blocksize, ivlen, nmode, mode, MODE, flags) \
2275
    static const EVP_CIPHER aes_##keylen##_##mode = {                                 \
2276
        nid##_##keylen##_##nmode, blocksize, keylen / 8, ivlen,                       \
2277
        flags | EVP_CIPH_##MODE##_MODE,                                               \
2278
        EVP_ORIG_GLOBAL,                                                              \
2279
        aes_init_key,                                                                 \
2280
        aes_##mode##_cipher,                                                          \
2281
        NULL,                                                                         \
2282
        sizeof(EVP_AES_KEY),                                                          \
2283
        NULL, NULL, NULL, NULL                                                        \
2284
    };                                                                                \
2285
    const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)                                 \
2286
    {                                                                                 \
2287
        return &aes_##keylen##_##mode;                                                \
2288
    }
2289
2290
#define BLOCK_CIPHER_custom(nid, keylen, blocksize, ivlen, mode, MODE, flags)                                              \
2291
    static const EVP_CIPHER aes_##keylen##_##mode = {                                                                      \
2292
        nid##_##keylen##_##mode, blocksize,                                                                                \
2293
        (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE || EVP_CIPH_##MODE##_MODE == EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8, \
2294
        ivlen,                                                                                                             \
2295
        flags | EVP_CIPH_##MODE##_MODE,                                                                                    \
2296
        EVP_ORIG_GLOBAL,                                                                                                   \
2297
        aes_##mode##_init_key,                                                                                             \
2298
        aes_##mode##_cipher,                                                                                               \
2299
        aes_##mode##_cleanup,                                                                                              \
2300
        sizeof(EVP_AES_##MODE##_CTX),                                                                                      \
2301
        NULL, NULL, aes_##mode##_ctrl, NULL                                                                                \
2302
    };                                                                                                                     \
2303
    const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)                                                                      \
2304
    {                                                                                                                      \
2305
        return &aes_##keylen##_##mode;                                                                                     \
2306
    }
2307
2308
#endif
2309
2310
#define BLOCK_CIPHER_generic_pack(nid, keylen, flags)                                                          \
2311
    BLOCK_CIPHER_generic(nid, keylen, 16, 16, cbc, cbc, CBC, flags | EVP_CIPH_FLAG_DEFAULT_ASN1)               \
2312
        BLOCK_CIPHER_generic(nid, keylen, 16, 0, ecb, ecb, ECB, flags | EVP_CIPH_FLAG_DEFAULT_ASN1)            \
2313
            BLOCK_CIPHER_generic(nid, keylen, 1, 16, ofb128, ofb, OFB, flags | EVP_CIPH_FLAG_DEFAULT_ASN1)     \
2314
                BLOCK_CIPHER_generic(nid, keylen, 1, 16, cfb128, cfb, CFB, flags | EVP_CIPH_FLAG_DEFAULT_ASN1) \
2315
                    BLOCK_CIPHER_generic(nid, keylen, 1, 16, cfb1, cfb1, CFB, flags)                           \
2316
                        BLOCK_CIPHER_generic(nid, keylen, 1, 16, cfb8, cfb8, CFB, flags)                       \
2317
                            BLOCK_CIPHER_generic(nid, keylen, 1, 16, ctr, ctr, CTR, flags)
2318
2319
static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2320
    const unsigned char *iv, int enc)
2321
0
{
2322
0
    int ret, mode;
2323
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY, ctx);
2324
2325
0
    mode = EVP_CIPHER_CTX_get_mode(ctx);
2326
0
    if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
2327
0
        && !enc) {
2328
#ifdef HWAES_CAPABLE
2329
        if (HWAES_CAPABLE) {
2330
            ret = HWAES_set_decrypt_key(key,
2331
                EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2332
                &dat->ks.ks);
2333
            dat->block = (block128_f)HWAES_decrypt;
2334
            dat->stream.cbc = NULL;
2335
#ifdef HWAES_cbc_encrypt
2336
            if (mode == EVP_CIPH_CBC_MODE)
2337
                dat->stream.cbc = (cbc128_f)HWAES_cbc_encrypt;
2338
#endif
2339
        } else
2340
#endif
2341
0
#ifdef BSAES_CAPABLE
2342
0
            if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
2343
0
            ret = AES_set_decrypt_key(key,
2344
0
                EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2345
0
                &dat->ks.ks);
2346
0
            dat->block = (block128_f)AES_decrypt;
2347
0
            dat->stream.cbc = (cbc128_f)ossl_bsaes_cbc_encrypt;
2348
0
        } else
2349
0
#endif
2350
0
#ifdef VPAES_CAPABLE
2351
0
            if (VPAES_CAPABLE) {
2352
0
            ret = vpaes_set_decrypt_key(key,
2353
0
                EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2354
0
                &dat->ks.ks);
2355
0
            dat->block = (block128_f)vpaes_decrypt;
2356
0
            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
2357
0
        } else
2358
0
#endif
2359
0
        {
2360
0
            ret = AES_set_decrypt_key(key,
2361
0
                EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2362
0
                &dat->ks.ks);
2363
0
            dat->block = (block128_f)AES_decrypt;
2364
0
            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
2365
0
        }
2366
0
    } else
2367
#ifdef HWAES_CAPABLE
2368
        if (HWAES_CAPABLE) {
2369
        ret = HWAES_set_encrypt_key(key,
2370
            EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2371
            &dat->ks.ks);
2372
        dat->block = (block128_f)HWAES_encrypt;
2373
        dat->stream.cbc = NULL;
2374
#ifdef HWAES_cbc_encrypt
2375
        if (mode == EVP_CIPH_CBC_MODE)
2376
            dat->stream.cbc = (cbc128_f)HWAES_cbc_encrypt;
2377
        else
2378
#endif
2379
#ifdef HWAES_ctr32_encrypt_blocks
2380
            if (mode == EVP_CIPH_CTR_MODE)
2381
            dat->stream.ctr = (ctr128_f)HWAES_ctr32_encrypt_blocks;
2382
        else
2383
#endif
2384
            (void)0; /* terminate potentially open 'else' */
2385
    } else
2386
#endif
2387
0
#ifdef BSAES_CAPABLE
2388
0
        if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
2389
0
        ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2390
0
            &dat->ks.ks);
2391
0
        dat->block = (block128_f)AES_encrypt;
2392
0
        dat->stream.ctr = (ctr128_f)ossl_bsaes_ctr32_encrypt_blocks;
2393
0
    } else
2394
0
#endif
2395
0
#ifdef VPAES_CAPABLE
2396
0
        if (VPAES_CAPABLE) {
2397
0
        ret = vpaes_set_encrypt_key(key,
2398
0
            EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2399
0
            &dat->ks.ks);
2400
0
        dat->block = (block128_f)vpaes_encrypt;
2401
0
        dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
2402
0
    } else
2403
0
#endif
2404
0
    {
2405
0
        ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2406
0
            &dat->ks.ks);
2407
0
        dat->block = (block128_f)AES_encrypt;
2408
0
        dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
2409
#ifdef AES_CTR_ASM
2410
        if (mode == EVP_CIPH_CTR_MODE)
2411
            dat->stream.ctr = (ctr128_f)AES_ctr32_encrypt;
2412
#endif
2413
0
    }
2414
2415
0
    if (ret < 0) {
2416
0
        ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
2417
0
        return 0;
2418
0
    }
2419
2420
0
    return 1;
2421
0
}
2422
2423
static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2424
    const unsigned char *in, size_t len)
2425
0
{
2426
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY, ctx);
2427
2428
0
    if (dat->stream.cbc)
2429
0
        (*dat->stream.cbc)(in, out, len, &dat->ks, ctx->iv,
2430
0
            EVP_CIPHER_CTX_is_encrypting(ctx));
2431
0
    else if (EVP_CIPHER_CTX_is_encrypting(ctx))
2432
0
        CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv,
2433
0
            dat->block);
2434
0
    else
2435
0
        CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
2436
0
            ctx->iv, dat->block);
2437
2438
0
    return 1;
2439
0
}
2440
2441
static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2442
    const unsigned char *in, size_t len)
2443
0
{
2444
0
    size_t bl = EVP_CIPHER_CTX_get_block_size(ctx);
2445
0
    size_t i;
2446
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY, ctx);
2447
2448
0
    if (len < bl)
2449
0
        return 1;
2450
2451
0
    for (i = 0, len -= bl; i <= len; i += bl)
2452
0
        (*dat->block)(in + i, out + i, &dat->ks);
2453
2454
0
    return 1;
2455
0
}
2456
2457
static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2458
    const unsigned char *in, size_t len)
2459
0
{
2460
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY, ctx);
2461
2462
0
    int num = EVP_CIPHER_CTX_get_num(ctx);
2463
0
    CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
2464
0
        ctx->iv, &num, dat->block);
2465
0
    EVP_CIPHER_CTX_set_num(ctx, num);
2466
0
    return 1;
2467
0
}
2468
2469
static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2470
    const unsigned char *in, size_t len)
2471
0
{
2472
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY, ctx);
2473
2474
0
    int num = EVP_CIPHER_CTX_get_num(ctx);
2475
0
    CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
2476
0
        ctx->iv, &num,
2477
0
        EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2478
0
    EVP_CIPHER_CTX_set_num(ctx, num);
2479
0
    return 1;
2480
0
}
2481
2482
static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2483
    const unsigned char *in, size_t len)
2484
0
{
2485
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY, ctx);
2486
2487
0
    int num = EVP_CIPHER_CTX_get_num(ctx);
2488
0
    CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
2489
0
        ctx->iv, &num,
2490
0
        EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2491
0
    EVP_CIPHER_CTX_set_num(ctx, num);
2492
0
    return 1;
2493
0
}
2494
2495
static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2496
    const unsigned char *in, size_t len)
2497
0
{
2498
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY, ctx);
2499
2500
0
    if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2501
0
        int num = EVP_CIPHER_CTX_get_num(ctx);
2502
0
        CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
2503
0
            ctx->iv, &num,
2504
0
            EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2505
0
        EVP_CIPHER_CTX_set_num(ctx, num);
2506
0
        return 1;
2507
0
    }
2508
2509
0
    while (len >= MAXBITCHUNK) {
2510
0
        int num = EVP_CIPHER_CTX_get_num(ctx);
2511
0
        CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
2512
0
            ctx->iv, &num,
2513
0
            EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2514
0
        EVP_CIPHER_CTX_set_num(ctx, num);
2515
0
        len -= MAXBITCHUNK;
2516
0
        out += MAXBITCHUNK;
2517
0
        in += MAXBITCHUNK;
2518
0
    }
2519
0
    if (len) {
2520
0
        int num = EVP_CIPHER_CTX_get_num(ctx);
2521
0
        CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
2522
0
            ctx->iv, &num,
2523
0
            EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2524
0
        EVP_CIPHER_CTX_set_num(ctx, num);
2525
0
    }
2526
2527
0
    return 1;
2528
0
}
2529
2530
static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2531
    const unsigned char *in, size_t len)
2532
0
{
2533
0
    int n = EVP_CIPHER_CTX_get_num(ctx);
2534
0
    unsigned int num;
2535
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY, ctx);
2536
2537
0
    if (n < 0)
2538
0
        return 0;
2539
0
    num = (unsigned int)n;
2540
2541
0
    if (dat->stream.ctr)
2542
0
        CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
2543
0
            ctx->iv,
2544
0
            EVP_CIPHER_CTX_buf_noconst(ctx),
2545
0
            &num, dat->stream.ctr);
2546
0
    else
2547
0
        CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
2548
0
            ctx->iv,
2549
0
            EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2550
0
            dat->block);
2551
0
    EVP_CIPHER_CTX_set_num(ctx, num);
2552
0
    return 1;
2553
0
}
2554
2555
BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2556
    BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2557
        BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
2558
2559
            static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
2560
0
{
2561
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX, c);
2562
0
    if (gctx == NULL)
2563
0
        return 0;
2564
0
    OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
2565
0
    if (gctx->iv != c->iv)
2566
0
        OPENSSL_free(gctx->iv);
2567
0
    return 1;
2568
0
}
2569
2570
static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2571
0
{
2572
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX, c);
2573
0
    switch (type) {
2574
0
    case EVP_CTRL_INIT:
2575
0
        gctx->key_set = 0;
2576
0
        gctx->iv_set = 0;
2577
0
        gctx->ivlen = EVP_CIPHER_get_iv_length(c->cipher);
2578
0
        gctx->iv = c->iv;
2579
0
        gctx->taglen = -1;
2580
0
        gctx->iv_gen = 0;
2581
0
        gctx->tls_aad_len = -1;
2582
0
        return 1;
2583
2584
0
    case EVP_CTRL_GET_IVLEN:
2585
0
        *(int *)ptr = gctx->ivlen;
2586
0
        return 1;
2587
2588
0
    case EVP_CTRL_AEAD_SET_IVLEN:
2589
0
        if (arg <= 0)
2590
0
            return 0;
2591
        /* Allocate memory for IV if needed */
2592
0
        if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
2593
0
            if (gctx->iv != c->iv)
2594
0
                OPENSSL_free(gctx->iv);
2595
0
            if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
2596
0
                ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
2597
0
                return 0;
2598
0
            }
2599
0
        }
2600
0
        gctx->ivlen = arg;
2601
0
        return 1;
2602
2603
0
    case EVP_CTRL_AEAD_SET_TAG:
2604
0
        if (arg <= 0 || arg > 16 || c->encrypt)
2605
0
            return 0;
2606
0
        memcpy(c->buf, ptr, arg);
2607
0
        gctx->taglen = arg;
2608
0
        return 1;
2609
2610
0
    case EVP_CTRL_AEAD_GET_TAG:
2611
0
        if (arg <= 0 || arg > 16 || !c->encrypt
2612
0
            || gctx->taglen < 0)
2613
0
            return 0;
2614
0
        memcpy(ptr, c->buf, arg);
2615
0
        return 1;
2616
2617
0
    case EVP_CTRL_GCM_SET_IV_FIXED:
2618
        /* Special case: -1 length restores whole IV */
2619
0
        if (arg == -1) {
2620
0
            memcpy(gctx->iv, ptr, gctx->ivlen);
2621
0
            gctx->iv_gen = 1;
2622
0
            return 1;
2623
0
        }
2624
        /*
2625
         * Fixed field must be at least 4 bytes and invocation field at least
2626
         * 8.
2627
         */
2628
0
        if ((arg < 4) || (gctx->ivlen - arg) < 8)
2629
0
            return 0;
2630
0
        if (arg)
2631
0
            memcpy(gctx->iv, ptr, arg);
2632
0
        if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
2633
0
            return 0;
2634
0
        gctx->iv_gen = 1;
2635
0
        return 1;
2636
2637
0
    case EVP_CTRL_GCM_IV_GEN:
2638
0
        if (gctx->iv_gen == 0 || gctx->key_set == 0)
2639
0
            return 0;
2640
0
        CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2641
0
        if (arg <= 0 || arg > gctx->ivlen)
2642
0
            arg = gctx->ivlen;
2643
0
        memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2644
        /*
2645
         * Invocation field will be at least 8 bytes in size and so no need
2646
         * to check wrap around or increment more than last 8 bytes.
2647
         */
2648
0
        ctr64_inc(gctx->iv + gctx->ivlen - 8);
2649
0
        gctx->iv_set = 1;
2650
0
        return 1;
2651
2652
0
    case EVP_CTRL_GCM_SET_IV_INV:
2653
0
        if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
2654
0
            return 0;
2655
0
        memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2656
0
        CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2657
0
        gctx->iv_set = 1;
2658
0
        return 1;
2659
2660
0
    case EVP_CTRL_AEAD_TLS1_AAD:
2661
        /* Save the AAD for later use */
2662
0
        if (arg != EVP_AEAD_TLS1_AAD_LEN)
2663
0
            return 0;
2664
0
        memcpy(c->buf, ptr, arg);
2665
0
        gctx->tls_aad_len = arg;
2666
0
        gctx->tls_enc_records = 0;
2667
0
        {
2668
0
            unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
2669
            /* Correct length for explicit IV */
2670
0
            if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2671
0
                return 0;
2672
0
            len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2673
            /* If decrypting correct for tag too */
2674
0
            if (!c->encrypt) {
2675
0
                if (len < EVP_GCM_TLS_TAG_LEN)
2676
0
                    return 0;
2677
0
                len -= EVP_GCM_TLS_TAG_LEN;
2678
0
            }
2679
0
            c->buf[arg - 2] = len >> 8;
2680
0
            c->buf[arg - 1] = len & 0xff;
2681
0
        }
2682
        /* Extra padding: tag appended to record */
2683
0
        return EVP_GCM_TLS_TAG_LEN;
2684
2685
0
    case EVP_CTRL_COPY: {
2686
0
        EVP_CIPHER_CTX *out = ptr;
2687
0
        EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX, out);
2688
0
        if (gctx->gcm.key) {
2689
0
            if (gctx->gcm.key != &gctx->ks)
2690
0
                return 0;
2691
0
            gctx_out->gcm.key = &gctx_out->ks;
2692
0
        }
2693
0
        if (gctx->iv == c->iv)
2694
0
            gctx_out->iv = out->iv;
2695
0
        else {
2696
0
            if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
2697
0
                ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
2698
0
                return 0;
2699
0
            }
2700
0
            memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2701
0
        }
2702
0
        return 1;
2703
0
    }
2704
2705
0
    default:
2706
0
        return -1;
2707
0
    }
2708
0
}
2709
2710
static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2711
    const unsigned char *iv, int enc)
2712
0
{
2713
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX, ctx);
2714
0
    if (!iv && !key)
2715
0
        return 1;
2716
0
    if (key) {
2717
0
        do {
2718
#ifdef HWAES_CAPABLE
2719
            if (HWAES_CAPABLE) {
2720
                HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2721
                CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2722
                    (block128_f)HWAES_encrypt);
2723
#ifdef HWAES_ctr32_encrypt_blocks
2724
                gctx->ctr = (ctr128_f)HWAES_ctr32_encrypt_blocks;
2725
#else
2726
                gctx->ctr = NULL;
2727
#endif
2728
                break;
2729
            } else
2730
#endif
2731
0
#ifdef BSAES_CAPABLE
2732
0
                if (BSAES_CAPABLE) {
2733
0
                AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2734
0
                CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2735
0
                    (block128_f)AES_encrypt);
2736
0
                gctx->ctr = (ctr128_f)ossl_bsaes_ctr32_encrypt_blocks;
2737
0
                break;
2738
0
            } else
2739
0
#endif
2740
0
#ifdef VPAES_CAPABLE
2741
0
                if (VPAES_CAPABLE) {
2742
0
                vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2743
0
                CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2744
0
                    (block128_f)vpaes_encrypt);
2745
0
                gctx->ctr = NULL;
2746
0
                break;
2747
0
            } else
2748
0
#endif
2749
0
                (void)0; /* terminate potentially open 'else' */
2750
2751
0
            AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2752
0
            CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2753
0
                (block128_f)AES_encrypt);
2754
#ifdef AES_CTR_ASM
2755
            gctx->ctr = (ctr128_f)AES_ctr32_encrypt;
2756
#else
2757
0
            gctx->ctr = NULL;
2758
0
#endif
2759
0
        } while (0);
2760
2761
        /*
2762
         * If we have an iv can set it directly, otherwise use saved IV.
2763
         */
2764
0
        if (iv == NULL && gctx->iv_set)
2765
0
            iv = gctx->iv;
2766
0
        if (iv) {
2767
0
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2768
0
            gctx->iv_set = 1;
2769
0
        }
2770
0
        gctx->key_set = 1;
2771
0
    } else {
2772
        /* If key set use IV, otherwise copy */
2773
0
        if (gctx->key_set)
2774
0
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2775
0
        else
2776
0
            memcpy(gctx->iv, iv, gctx->ivlen);
2777
0
        gctx->iv_set = 1;
2778
0
        gctx->iv_gen = 0;
2779
0
    }
2780
0
    return 1;
2781
0
}
2782
2783
/*
2784
 * Handle TLS GCM packet format. This consists of the last portion of the IV
2785
 * followed by the payload and finally the tag. On encrypt generate IV,
2786
 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
2787
 * and verify tag.
2788
 */
2789
2790
static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2791
    const unsigned char *in, size_t len)
2792
0
{
2793
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX, ctx);
2794
0
    int rv = -1;
2795
    /* Encrypt/decrypt must be performed in place */
2796
0
    if (out != in
2797
0
        || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
2798
0
        return -1;
2799
2800
    /*
2801
     * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
2802
     * Requirements from SP 800-38D".  The requirements is for one party to the
2803
     * communication to fail after 2^64 - 1 keys.  We do this on the encrypting
2804
     * side only.
2805
     */
2806
0
    if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
2807
0
        ERR_raise(ERR_LIB_EVP, EVP_R_TOO_MANY_RECORDS);
2808
0
        goto err;
2809
0
    }
2810
2811
    /*
2812
     * Set IV from start of buffer or generate IV and write to start of
2813
     * buffer.
2814
     */
2815
0
    if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
2816
0
            EVP_GCM_TLS_EXPLICIT_IV_LEN, out)
2817
0
        <= 0)
2818
0
        goto err;
2819
    /* Use saved AAD */
2820
0
    if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
2821
0
        goto err;
2822
    /* Fix buffer and length to point to payload */
2823
0
    in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2824
0
    out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2825
0
    len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2826
0
    if (ctx->encrypt) {
2827
        /* Encrypt payload */
2828
0
        if (gctx->ctr) {
2829
0
            size_t bulk = 0;
2830
0
#if defined(AES_GCM_ASM)
2831
0
            if (len >= 32 && AES_GCM_ASM(gctx)) {
2832
0
                if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2833
0
                    return -1;
2834
2835
0
                bulk = AES_gcm_encrypt(in, out, len,
2836
0
                    gctx->gcm.key,
2837
0
                    gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2838
0
                gctx->gcm.len.u[1] += bulk;
2839
0
            }
2840
0
#endif
2841
0
            if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2842
0
                    in + bulk,
2843
0
                    out + bulk,
2844
0
                    len - bulk, gctx->ctr))
2845
0
                goto err;
2846
0
        } else {
2847
0
            size_t bulk = 0;
2848
#if defined(AES_GCM_ASM2)
2849
            if (len >= 32 && AES_GCM_ASM2(gctx)) {
2850
                if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2851
                    return -1;
2852
2853
                bulk = AES_gcm_encrypt(in, out, len,
2854
                    gctx->gcm.key,
2855
                    gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2856
                gctx->gcm.len.u[1] += bulk;
2857
            }
2858
#endif
2859
0
            if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2860
0
                    in + bulk, out + bulk, len - bulk))
2861
0
                goto err;
2862
0
        }
2863
0
        out += len;
2864
        /* Finally write tag */
2865
0
        CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
2866
0
        rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2867
0
    } else {
2868
        /* Decrypt */
2869
0
        if (gctx->ctr) {
2870
0
            size_t bulk = 0;
2871
0
#if defined(AES_GCM_ASM)
2872
0
            if (len >= 16 && AES_GCM_ASM(gctx)) {
2873
0
                if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2874
0
                    return -1;
2875
2876
0
                bulk = AES_gcm_decrypt(in, out, len,
2877
0
                    gctx->gcm.key,
2878
0
                    gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2879
0
                gctx->gcm.len.u[1] += bulk;
2880
0
            }
2881
0
#endif
2882
0
            if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
2883
0
                    in + bulk,
2884
0
                    out + bulk,
2885
0
                    len - bulk, gctx->ctr))
2886
0
                goto err;
2887
0
        } else {
2888
0
            size_t bulk = 0;
2889
#if defined(AES_GCM_ASM2)
2890
            if (len >= 16 && AES_GCM_ASM2(gctx)) {
2891
                if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2892
                    return -1;
2893
2894
                bulk = AES_gcm_decrypt(in, out, len,
2895
                    gctx->gcm.key,
2896
                    gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2897
                gctx->gcm.len.u[1] += bulk;
2898
            }
2899
#endif
2900
0
            if (CRYPTO_gcm128_decrypt(&gctx->gcm,
2901
0
                    in + bulk, out + bulk, len - bulk))
2902
0
                goto err;
2903
0
        }
2904
        /* Retrieve tag */
2905
0
        CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
2906
        /* If tag mismatch wipe buffer */
2907
0
        if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
2908
0
            OPENSSL_cleanse(out, len);
2909
0
            goto err;
2910
0
        }
2911
0
        rv = len;
2912
0
    }
2913
2914
0
err:
2915
0
    gctx->iv_set = 0;
2916
0
    gctx->tls_aad_len = -1;
2917
0
    return rv;
2918
0
}
2919
2920
#ifdef FIPS_MODULE
2921
/*
2922
 * See SP800-38D (GCM) Section 8 "Uniqueness requirement on IVS and keys"
2923
 *
2924
 * See also 8.2.2 RBG-based construction.
2925
 * Random construction consists of a free field (which can be NULL) and a
2926
 * random field which will use a DRBG that can return at least 96 bits of
2927
 * entropy strength. (The DRBG must be seeded by the FIPS module).
2928
 */
2929
static int aes_gcm_iv_generate(EVP_AES_GCM_CTX *gctx, int offset)
2930
{
2931
    int sz = gctx->ivlen - offset;
2932
2933
    /* Must be at least 96 bits */
2934
    if (sz <= 0 || gctx->ivlen < 12)
2935
        return 0;
2936
2937
    /* Use DRBG to generate random iv */
2938
    if (RAND_bytes(gctx->iv + offset, sz) <= 0)
2939
        return 0;
2940
    return 1;
2941
}
2942
#endif /* FIPS_MODULE */
2943
2944
static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2945
    const unsigned char *in, size_t len)
2946
0
{
2947
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX, ctx);
2948
2949
    /* If not set up, return error */
2950
0
    if (!gctx->key_set)
2951
0
        return -1;
2952
2953
0
    if (gctx->tls_aad_len >= 0)
2954
0
        return aes_gcm_tls_cipher(ctx, out, in, len);
2955
2956
#ifdef FIPS_MODULE
2957
    /*
2958
     * FIPS requires generation of AES-GCM IV's inside the FIPS module.
2959
     * The IV can still be set externally (the security policy will state that
2960
     * this is not FIPS compliant). There are some applications
2961
     * where setting the IV externally is the only option available.
2962
     */
2963
    if (!gctx->iv_set) {
2964
        if (!ctx->encrypt || !aes_gcm_iv_generate(gctx, 0))
2965
            return -1;
2966
        CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2967
        gctx->iv_set = 1;
2968
        gctx->iv_gen_rand = 1;
2969
    }
2970
#else
2971
0
    if (!gctx->iv_set)
2972
0
        return -1;
2973
0
#endif /* FIPS_MODULE */
2974
2975
0
    if (in) {
2976
0
        if (out == NULL) {
2977
0
            if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
2978
0
                return -1;
2979
0
        } else if (ctx->encrypt) {
2980
0
            if (gctx->ctr) {
2981
0
                size_t bulk = 0;
2982
0
#if defined(AES_GCM_ASM)
2983
0
                if (len >= 32 && AES_GCM_ASM(gctx)) {
2984
0
                    size_t res = (16 - gctx->gcm.mres) % 16;
2985
2986
0
                    if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
2987
0
                        return -1;
2988
2989
0
                    bulk = AES_gcm_encrypt(in + res,
2990
0
                        out + res, len - res,
2991
0
                        gctx->gcm.key, gctx->gcm.Yi.c,
2992
0
                        gctx->gcm.Xi.u);
2993
0
                    gctx->gcm.len.u[1] += bulk;
2994
0
                    bulk += res;
2995
0
                }
2996
0
#endif
2997
0
                if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2998
0
                        in + bulk,
2999
0
                        out + bulk,
3000
0
                        len - bulk, gctx->ctr))
3001
0
                    return -1;
3002
0
            } else {
3003
0
                size_t bulk = 0;
3004
#if defined(AES_GCM_ASM2)
3005
                if (len >= 32 && AES_GCM_ASM2(gctx)) {
3006
                    size_t res = (16 - gctx->gcm.mres) % 16;
3007
3008
                    if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3009
                        return -1;
3010
3011
                    bulk = AES_gcm_encrypt(in + res,
3012
                        out + res, len - res,
3013
                        gctx->gcm.key, gctx->gcm.Yi.c,
3014
                        gctx->gcm.Xi.u);
3015
                    gctx->gcm.len.u[1] += bulk;
3016
                    bulk += res;
3017
                }
3018
#endif
3019
0
                if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3020
0
                        in + bulk, out + bulk, len - bulk))
3021
0
                    return -1;
3022
0
            }
3023
0
        } else {
3024
0
            if (gctx->ctr) {
3025
0
                size_t bulk = 0;
3026
0
#if defined(AES_GCM_ASM)
3027
0
                if (len >= 16 && AES_GCM_ASM(gctx)) {
3028
0
                    size_t res = (16 - gctx->gcm.mres) % 16;
3029
3030
0
                    if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3031
0
                        return -1;
3032
3033
0
                    bulk = AES_gcm_decrypt(in + res,
3034
0
                        out + res, len - res,
3035
0
                        gctx->gcm.key,
3036
0
                        gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3037
0
                    gctx->gcm.len.u[1] += bulk;
3038
0
                    bulk += res;
3039
0
                }
3040
0
#endif
3041
0
                if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3042
0
                        in + bulk,
3043
0
                        out + bulk,
3044
0
                        len - bulk, gctx->ctr))
3045
0
                    return -1;
3046
0
            } else {
3047
0
                size_t bulk = 0;
3048
#if defined(AES_GCM_ASM2)
3049
                if (len >= 16 && AES_GCM_ASM2(gctx)) {
3050
                    size_t res = (16 - gctx->gcm.mres) % 16;
3051
3052
                    if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3053
                        return -1;
3054
3055
                    bulk = AES_gcm_decrypt(in + res,
3056
                        out + res, len - res,
3057
                        gctx->gcm.key,
3058
                        gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3059
                    gctx->gcm.len.u[1] += bulk;
3060
                    bulk += res;
3061
                }
3062
#endif
3063
0
                if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3064
0
                        in + bulk, out + bulk, len - bulk))
3065
0
                    return -1;
3066
0
            }
3067
0
        }
3068
0
        return len;
3069
0
    } else {
3070
0
        if (!ctx->encrypt) {
3071
0
            if (gctx->taglen < 0)
3072
0
                return -1;
3073
0
            if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
3074
0
                return -1;
3075
0
            gctx->iv_set = 0;
3076
0
            return 0;
3077
0
        }
3078
0
        CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
3079
0
        gctx->taglen = 16;
3080
        /* Don't reuse the IV */
3081
0
        gctx->iv_set = 0;
3082
0
        return 0;
3083
0
    }
3084
0
}
3085
3086
#define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1       \
3087
    | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3088
    | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT   \
3089
    | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_CUSTOM_IV_LENGTH)
3090
3091
BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3092
    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3093
    BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3094
        EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3095
        BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3096
            EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3097
3098
            static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3099
0
{
3100
0
    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, c);
3101
3102
0
    if (type == EVP_CTRL_COPY) {
3103
0
        EVP_CIPHER_CTX *out = ptr;
3104
0
        EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX, out);
3105
3106
0
        if (xctx->xts.key1) {
3107
0
            if (xctx->xts.key1 != &xctx->ks1)
3108
0
                return 0;
3109
0
            xctx_out->xts.key1 = &xctx_out->ks1;
3110
0
        }
3111
0
        if (xctx->xts.key2) {
3112
0
            if (xctx->xts.key2 != &xctx->ks2)
3113
0
                return 0;
3114
0
            xctx_out->xts.key2 = &xctx_out->ks2;
3115
0
        }
3116
0
        return 1;
3117
0
    } else if (type != EVP_CTRL_INIT)
3118
0
        return -1;
3119
    /* key1 and key2 are used as an indicator both key and IV are set */
3120
0
    xctx->xts.key1 = NULL;
3121
0
    xctx->xts.key2 = NULL;
3122
0
    return 1;
3123
0
}
3124
3125
static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3126
    const unsigned char *iv, int enc)
3127
0
{
3128
0
    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, ctx);
3129
3130
0
    if (!iv && !key)
3131
0
        return 1;
3132
3133
0
    if (key) {
3134
0
        do {
3135
            /* The key is two half length keys in reality */
3136
0
            const int bytes = EVP_CIPHER_CTX_get_key_length(ctx) / 2;
3137
0
            const int bits = bytes * 8;
3138
3139
            /*
3140
             * Verify that the two keys are different.
3141
             *
3142
             * This addresses the vulnerability described in Rogaway's
3143
             * September 2004 paper:
3144
             *
3145
             *      "Efficient Instantiations of Tweakable Blockciphers and
3146
             *       Refinements to Modes OCB and PMAC".
3147
             *      (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf)
3148
             *
3149
             * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states
3150
             * that:
3151
             *      "The check for Key_1 != Key_2 shall be done at any place
3152
             *       BEFORE using the keys in the XTS-AES algorithm to process
3153
             *       data with them."
3154
             */
3155
0
            if ((!allow_insecure_decrypt || enc)
3156
0
                && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
3157
0
                ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
3158
0
                return 0;
3159
0
            }
3160
3161
#ifdef AES_XTS_ASM
3162
            xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
3163
#else
3164
0
            xctx->stream = NULL;
3165
0
#endif
3166
            /* key_len is two AES keys */
3167
#ifdef HWAES_CAPABLE
3168
            if (HWAES_CAPABLE) {
3169
                if (enc) {
3170
                    HWAES_set_encrypt_key(key, bits, &xctx->ks1.ks);
3171
                    xctx->xts.block1 = (block128_f)HWAES_encrypt;
3172
#ifdef HWAES_xts_encrypt
3173
                    xctx->stream = HWAES_xts_encrypt;
3174
#endif
3175
                } else {
3176
                    HWAES_set_decrypt_key(key, bits, &xctx->ks1.ks);
3177
                    xctx->xts.block1 = (block128_f)HWAES_decrypt;
3178
#ifdef HWAES_xts_decrypt
3179
                    xctx->stream = HWAES_xts_decrypt;
3180
#endif
3181
                }
3182
3183
                HWAES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3184
                xctx->xts.block2 = (block128_f)HWAES_encrypt;
3185
3186
                xctx->xts.key1 = &xctx->ks1;
3187
                break;
3188
            } else
3189
#endif
3190
0
#ifdef BSAES_CAPABLE
3191
0
                if (BSAES_CAPABLE)
3192
0
                xctx->stream = enc ? ossl_bsaes_xts_encrypt : ossl_bsaes_xts_decrypt;
3193
0
            else
3194
0
#endif
3195
0
#ifdef VPAES_CAPABLE
3196
0
                if (VPAES_CAPABLE) {
3197
0
                if (enc) {
3198
0
                    vpaes_set_encrypt_key(key, bits, &xctx->ks1.ks);
3199
0
                    xctx->xts.block1 = (block128_f)vpaes_encrypt;
3200
0
                } else {
3201
0
                    vpaes_set_decrypt_key(key, bits, &xctx->ks1.ks);
3202
0
                    xctx->xts.block1 = (block128_f)vpaes_decrypt;
3203
0
                }
3204
3205
0
                vpaes_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3206
0
                xctx->xts.block2 = (block128_f)vpaes_encrypt;
3207
3208
0
                xctx->xts.key1 = &xctx->ks1;
3209
0
                break;
3210
0
            } else
3211
0
#endif
3212
0
                (void)0; /* terminate potentially open 'else' */
3213
3214
0
            if (enc) {
3215
0
                AES_set_encrypt_key(key, bits, &xctx->ks1.ks);
3216
0
                xctx->xts.block1 = (block128_f)AES_encrypt;
3217
0
            } else {
3218
0
                AES_set_decrypt_key(key, bits, &xctx->ks1.ks);
3219
0
                xctx->xts.block1 = (block128_f)AES_decrypt;
3220
0
            }
3221
3222
0
            AES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3223
0
            xctx->xts.block2 = (block128_f)AES_encrypt;
3224
3225
0
            xctx->xts.key1 = &xctx->ks1;
3226
0
        } while (0);
3227
0
    }
3228
3229
0
    if (iv) {
3230
0
        xctx->xts.key2 = &xctx->ks2;
3231
0
        memcpy(ctx->iv, iv, 16);
3232
0
    }
3233
3234
0
    return 1;
3235
0
}
3236
3237
static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3238
    const unsigned char *in, size_t len)
3239
0
{
3240
0
    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, ctx);
3241
3242
0
    if (xctx->xts.key1 == NULL
3243
0
        || xctx->xts.key2 == NULL
3244
0
        || out == NULL
3245
0
        || in == NULL
3246
0
        || len < AES_BLOCK_SIZE)
3247
0
        return 0;
3248
3249
    /*
3250
     * Impose a limit of 2^20 blocks per data unit as specified by
3251
     * IEEE Std 1619-2018.  The earlier and obsolete IEEE Std 1619-2007
3252
     * indicated that this was a SHOULD NOT rather than a MUST NOT.
3253
     * NIST SP 800-38E mandates the same limit.
3254
     */
3255
0
    if (len > XTS_MAX_BLOCKS_PER_DATA_UNIT * AES_BLOCK_SIZE) {
3256
0
        ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DATA_UNIT_IS_TOO_LARGE);
3257
0
        return 0;
3258
0
    }
3259
3260
0
    if (xctx->stream)
3261
0
        (*xctx->stream)(in, out, len,
3262
0
            xctx->xts.key1, xctx->xts.key2,
3263
0
            ctx->iv);
3264
0
    else if (CRYPTO_xts128_encrypt(&xctx->xts, ctx->iv, in, out, len,
3265
0
                 EVP_CIPHER_CTX_is_encrypting(ctx)))
3266
0
        return 0;
3267
0
    return 1;
3268
0
}
3269
3270
#define aes_xts_cleanup NULL
3271
3272
#define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
3273
    | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT               \
3274
    | EVP_CIPH_CUSTOM_COPY)
3275
3276
BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3277
    BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
3278
3279
        static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3280
0
{
3281
0
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX, c);
3282
0
    switch (type) {
3283
0
    case EVP_CTRL_INIT:
3284
0
        cctx->key_set = 0;
3285
0
        cctx->iv_set = 0;
3286
0
        cctx->L = 8;
3287
0
        cctx->M = 12;
3288
0
        cctx->tag_set = 0;
3289
0
        cctx->len_set = 0;
3290
0
        cctx->tls_aad_len = -1;
3291
0
        return 1;
3292
3293
0
    case EVP_CTRL_GET_IVLEN:
3294
0
        *(int *)ptr = 15 - cctx->L;
3295
0
        return 1;
3296
3297
0
    case EVP_CTRL_AEAD_TLS1_AAD:
3298
        /* Save the AAD for later use */
3299
0
        if (arg != EVP_AEAD_TLS1_AAD_LEN)
3300
0
            return 0;
3301
0
        memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3302
0
        cctx->tls_aad_len = arg;
3303
0
        {
3304
0
            uint16_t len = EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3305
0
                | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
3306
            /* Correct length for explicit IV */
3307
0
            if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3308
0
                return 0;
3309
0
            len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3310
            /* If decrypting correct for tag too */
3311
0
            if (!EVP_CIPHER_CTX_is_encrypting(c)) {
3312
0
                if (len < cctx->M)
3313
0
                    return 0;
3314
0
                len -= cctx->M;
3315
0
            }
3316
0
            EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3317
0
            EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
3318
0
        }
3319
        /* Extra padding: tag appended to record */
3320
0
        return cctx->M;
3321
3322
0
    case EVP_CTRL_CCM_SET_IV_FIXED:
3323
        /* Sanity check length */
3324
0
        if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3325
0
            return 0;
3326
        /* Just copy to first part of IV */
3327
0
        memcpy(c->iv, ptr, arg);
3328
0
        return 1;
3329
3330
0
    case EVP_CTRL_AEAD_SET_IVLEN:
3331
0
        arg = 15 - arg;
3332
        /* fall thru */
3333
0
    case EVP_CTRL_CCM_SET_L:
3334
0
        if (arg < 2 || arg > 8)
3335
0
            return 0;
3336
0
        cctx->L = arg;
3337
0
        return 1;
3338
3339
0
    case EVP_CTRL_AEAD_SET_TAG:
3340
0
        if ((arg & 1) || arg < 4 || arg > 16)
3341
0
            return 0;
3342
0
        if (EVP_CIPHER_CTX_is_encrypting(c) && ptr)
3343
0
            return 0;
3344
0
        if (ptr) {
3345
0
            cctx->tag_set = 1;
3346
0
            memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3347
0
        }
3348
0
        cctx->M = arg;
3349
0
        return 1;
3350
3351
0
    case EVP_CTRL_AEAD_GET_TAG:
3352
0
        if (!EVP_CIPHER_CTX_is_encrypting(c) || !cctx->tag_set)
3353
0
            return 0;
3354
0
        if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3355
0
            return 0;
3356
0
        cctx->tag_set = 0;
3357
0
        cctx->iv_set = 0;
3358
0
        cctx->len_set = 0;
3359
0
        return 1;
3360
3361
0
    case EVP_CTRL_COPY: {
3362
0
        EVP_CIPHER_CTX *out = ptr;
3363
0
        EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX, out);
3364
0
        if (cctx->ccm.key) {
3365
0
            if (cctx->ccm.key != &cctx->ks)
3366
0
                return 0;
3367
0
            cctx_out->ccm.key = &cctx_out->ks;
3368
0
        }
3369
0
        return 1;
3370
0
    }
3371
3372
0
    default:
3373
0
        return -1;
3374
0
    }
3375
0
}
3376
3377
static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3378
    const unsigned char *iv, int enc)
3379
0
{
3380
0
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX, ctx);
3381
0
    if (!iv && !key)
3382
0
        return 1;
3383
0
    if (key)
3384
0
        do {
3385
#ifdef HWAES_CAPABLE
3386
            if (HWAES_CAPABLE) {
3387
                HWAES_set_encrypt_key(key,
3388
                    EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3389
                    &cctx->ks.ks);
3390
3391
                CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3392
                    &cctx->ks, (block128_f)HWAES_encrypt);
3393
                cctx->str = NULL;
3394
                cctx->key_set = 1;
3395
                break;
3396
            } else
3397
#endif
3398
0
#ifdef VPAES_CAPABLE
3399
0
                if (VPAES_CAPABLE) {
3400
0
                vpaes_set_encrypt_key(key,
3401
0
                    EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3402
0
                    &cctx->ks.ks);
3403
0
                CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3404
0
                    &cctx->ks, (block128_f)vpaes_encrypt);
3405
0
                cctx->str = NULL;
3406
0
                cctx->key_set = 1;
3407
0
                break;
3408
0
            }
3409
0
#endif
3410
0
            AES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3411
0
                &cctx->ks.ks);
3412
0
            CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3413
0
                &cctx->ks, (block128_f)AES_encrypt);
3414
0
            cctx->str = NULL;
3415
0
            cctx->key_set = 1;
3416
0
        } while (0);
3417
0
    if (iv) {
3418
0
        memcpy(ctx->iv, iv, 15 - cctx->L);
3419
0
        cctx->iv_set = 1;
3420
0
    }
3421
0
    return 1;
3422
0
}
3423
3424
static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3425
    const unsigned char *in, size_t len)
3426
0
{
3427
0
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX, ctx);
3428
0
    CCM128_CONTEXT *ccm = &cctx->ccm;
3429
    /* Encrypt/decrypt must be performed in place */
3430
0
    if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3431
0
        return -1;
3432
    /* If encrypting set explicit IV from sequence number (start of AAD) */
3433
0
    if (EVP_CIPHER_CTX_is_encrypting(ctx))
3434
0
        memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3435
0
            EVP_CCM_TLS_EXPLICIT_IV_LEN);
3436
    /* Get rest of IV from explicit IV */
3437
0
    memcpy(ctx->iv + EVP_CCM_TLS_FIXED_IV_LEN, in,
3438
0
        EVP_CCM_TLS_EXPLICIT_IV_LEN);
3439
    /* Correct length value */
3440
0
    len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3441
0
    if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L,
3442
0
            len))
3443
0
        return -1;
3444
    /* Use saved AAD */
3445
0
    CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx),
3446
0
        cctx->tls_aad_len);
3447
    /* Fix buffer to point to payload */
3448
0
    in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3449
0
    out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3450
0
    if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3451
0
        if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3452
0
                            cctx->str)
3453
0
                      : CRYPTO_ccm128_encrypt(ccm, in, out, len))
3454
0
            return -1;
3455
0
        if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3456
0
            return -1;
3457
0
        return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3458
0
    } else {
3459
0
        if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3460
0
                            cctx->str)
3461
0
                      : !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3462
0
            unsigned char tag[16];
3463
0
            if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3464
0
                if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3465
0
                    return len;
3466
0
            }
3467
0
        }
3468
0
        OPENSSL_cleanse(out, len);
3469
0
        return -1;
3470
0
    }
3471
0
}
3472
3473
static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3474
    const unsigned char *in, size_t len)
3475
0
{
3476
0
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX, ctx);
3477
0
    CCM128_CONTEXT *ccm = &cctx->ccm;
3478
    /* If not set up, return error */
3479
0
    if (!cctx->key_set)
3480
0
        return -1;
3481
3482
0
    if (cctx->tls_aad_len >= 0)
3483
0
        return aes_ccm_tls_cipher(ctx, out, in, len);
3484
3485
    /* EVP_*Final() doesn't return any data */
3486
0
    if (in == NULL && out != NULL)
3487
0
        return 0;
3488
3489
0
    if (!cctx->iv_set)
3490
0
        return -1;
3491
3492
0
    if (!out) {
3493
0
        if (!in) {
3494
0
            if (CRYPTO_ccm128_setiv(ccm, ctx->iv,
3495
0
                    15 - cctx->L, len))
3496
0
                return -1;
3497
0
            cctx->len_set = 1;
3498
0
            return len;
3499
0
        }
3500
        /* If have AAD need message length */
3501
0
        if (!cctx->len_set && len)
3502
0
            return -1;
3503
0
        CRYPTO_ccm128_aad(ccm, in, len);
3504
0
        return len;
3505
0
    }
3506
3507
    /* The tag must be set before actually decrypting data */
3508
0
    if (!EVP_CIPHER_CTX_is_encrypting(ctx) && !cctx->tag_set)
3509
0
        return -1;
3510
3511
    /* If not set length yet do it */
3512
0
    if (!cctx->len_set) {
3513
0
        if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
3514
0
            return -1;
3515
0
        cctx->len_set = 1;
3516
0
    }
3517
0
    if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3518
0
        if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3519
0
                            cctx->str)
3520
0
                      : CRYPTO_ccm128_encrypt(ccm, in, out, len))
3521
0
            return -1;
3522
0
        cctx->tag_set = 1;
3523
0
        return len;
3524
0
    } else {
3525
0
        int rv = -1;
3526
0
        if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3527
0
                            cctx->str)
3528
0
                      : !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3529
0
            unsigned char tag[16];
3530
0
            if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3531
0
                if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3532
0
                        cctx->M))
3533
0
                    rv = len;
3534
0
            }
3535
0
        }
3536
0
        if (rv == -1)
3537
0
            OPENSSL_cleanse(out, len);
3538
0
        cctx->iv_set = 0;
3539
0
        cctx->tag_set = 0;
3540
0
        cctx->len_set = 0;
3541
0
        return rv;
3542
0
    }
3543
0
}
3544
3545
#define aes_ccm_cleanup NULL
3546
3547
BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3548
    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3549
    BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3550
        EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3551
        BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3552
            EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3553
3554
            typedef struct {
3555
    union {
3556
        OSSL_UNION_ALIGN;
3557
        AES_KEY ks;
3558
    } ks;
3559
    /* Indicates if IV has been set */
3560
    unsigned char *iv;
3561
} EVP_AES_WRAP_CTX;
3562
3563
static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3564
    const unsigned char *iv, int enc)
3565
0
{
3566
0
    int len;
3567
0
    EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX, ctx);
3568
3569
0
    if (iv == NULL && key == NULL)
3570
0
        return 1;
3571
0
    if (key != NULL) {
3572
0
        if (EVP_CIPHER_CTX_is_encrypting(ctx))
3573
0
            AES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3574
0
                &wctx->ks.ks);
3575
0
        else
3576
0
            AES_set_decrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3577
0
                &wctx->ks.ks);
3578
0
        if (iv == NULL)
3579
0
            wctx->iv = NULL;
3580
0
    }
3581
0
    if (iv != NULL) {
3582
0
        if ((len = EVP_CIPHER_CTX_get_iv_length(ctx)) < 0)
3583
0
            return 0;
3584
0
        memcpy(ctx->iv, iv, len);
3585
0
        wctx->iv = ctx->iv;
3586
0
    }
3587
0
    return 1;
3588
0
}
3589
3590
static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3591
    const unsigned char *in, size_t inlen)
3592
0
{
3593
0
    EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX, ctx);
3594
0
    size_t rv;
3595
    /* AES wrap with padding has IV length of 4, without padding 8 */
3596
0
    int pad = EVP_CIPHER_CTX_get_iv_length(ctx) == 4;
3597
    /* No final operation so always return zero length */
3598
0
    if (!in)
3599
0
        return 0;
3600
    /* Input length must always be non-zero */
3601
0
    if (!inlen)
3602
0
        return -1;
3603
    /* If decrypting need at least 16 bytes and multiple of 8 */
3604
0
    if (!EVP_CIPHER_CTX_is_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
3605
0
        return -1;
3606
    /* If not padding input must be multiple of 8 */
3607
0
    if (!pad && inlen & 0x7)
3608
0
        return -1;
3609
0
    if (ossl_is_partially_overlapping(out, in, inlen)) {
3610
0
        ERR_raise(ERR_LIB_EVP, EVP_R_PARTIALLY_OVERLAPPING);
3611
0
        return 0;
3612
0
    }
3613
0
    if (!out) {
3614
0
        if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3615
            /* If padding round up to multiple of 8 */
3616
0
            if (pad)
3617
0
                inlen = (inlen + 7) / 8 * 8;
3618
            /* 8 byte prefix */
3619
0
            return inlen + 8;
3620
0
        } else {
3621
            /*
3622
             * If not padding output will be exactly 8 bytes smaller than
3623
             * input. If padding it will be at least 8 bytes smaller but we
3624
             * don't know how much.
3625
             */
3626
0
            return inlen - 8;
3627
0
        }
3628
0
    }
3629
0
    if (pad) {
3630
0
        if (EVP_CIPHER_CTX_is_encrypting(ctx))
3631
0
            rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3632
0
                out, in, inlen,
3633
0
                (block128_f)AES_encrypt);
3634
0
        else
3635
0
            rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3636
0
                out, in, inlen,
3637
0
                (block128_f)AES_decrypt);
3638
0
    } else {
3639
0
        if (EVP_CIPHER_CTX_is_encrypting(ctx))
3640
0
            rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3641
0
                out, in, inlen, (block128_f)AES_encrypt);
3642
0
        else
3643
0
            rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3644
0
                out, in, inlen, (block128_f)AES_decrypt);
3645
0
    }
3646
0
    return rv ? (int)rv : -1;
3647
0
}
3648
3649
#define WRAP_FLAGS (EVP_CIPH_WRAP_MODE                 \
3650
    | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3651
    | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
3652
3653
static const EVP_CIPHER aes_128_wrap = {
3654
    NID_id_aes128_wrap,
3655
    8, 16, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3656
    aes_wrap_init_key, aes_wrap_cipher,
3657
    NULL,
3658
    sizeof(EVP_AES_WRAP_CTX),
3659
    NULL, NULL, NULL, NULL
3660
};
3661
3662
const EVP_CIPHER *EVP_aes_128_wrap(void)
3663
164
{
3664
164
    return &aes_128_wrap;
3665
164
}
3666
3667
static const EVP_CIPHER aes_192_wrap = {
3668
    NID_id_aes192_wrap,
3669
    8, 24, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3670
    aes_wrap_init_key, aes_wrap_cipher,
3671
    NULL,
3672
    sizeof(EVP_AES_WRAP_CTX),
3673
    NULL, NULL, NULL, NULL
3674
};
3675
3676
const EVP_CIPHER *EVP_aes_192_wrap(void)
3677
164
{
3678
164
    return &aes_192_wrap;
3679
164
}
3680
3681
static const EVP_CIPHER aes_256_wrap = {
3682
    NID_id_aes256_wrap,
3683
    8, 32, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3684
    aes_wrap_init_key, aes_wrap_cipher,
3685
    NULL,
3686
    sizeof(EVP_AES_WRAP_CTX),
3687
    NULL, NULL, NULL, NULL
3688
};
3689
3690
const EVP_CIPHER *EVP_aes_256_wrap(void)
3691
164
{
3692
164
    return &aes_256_wrap;
3693
164
}
3694
3695
static const EVP_CIPHER aes_128_wrap_pad = {
3696
    NID_id_aes128_wrap_pad,
3697
    8, 16, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3698
    aes_wrap_init_key, aes_wrap_cipher,
3699
    NULL,
3700
    sizeof(EVP_AES_WRAP_CTX),
3701
    NULL, NULL, NULL, NULL
3702
};
3703
3704
const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
3705
164
{
3706
164
    return &aes_128_wrap_pad;
3707
164
}
3708
3709
static const EVP_CIPHER aes_192_wrap_pad = {
3710
    NID_id_aes192_wrap_pad,
3711
    8, 24, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3712
    aes_wrap_init_key, aes_wrap_cipher,
3713
    NULL,
3714
    sizeof(EVP_AES_WRAP_CTX),
3715
    NULL, NULL, NULL, NULL
3716
};
3717
3718
const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
3719
164
{
3720
164
    return &aes_192_wrap_pad;
3721
164
}
3722
3723
static const EVP_CIPHER aes_256_wrap_pad = {
3724
    NID_id_aes256_wrap_pad,
3725
    8, 32, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3726
    aes_wrap_init_key, aes_wrap_cipher,
3727
    NULL,
3728
    sizeof(EVP_AES_WRAP_CTX),
3729
    NULL, NULL, NULL, NULL
3730
};
3731
3732
const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
3733
164
{
3734
164
    return &aes_256_wrap_pad;
3735
164
}
3736
3737
#ifndef OPENSSL_NO_OCB
3738
static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3739
0
{
3740
0
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX, c);
3741
0
    EVP_CIPHER_CTX *newc;
3742
0
    EVP_AES_OCB_CTX *new_octx;
3743
3744
0
    switch (type) {
3745
0
    case EVP_CTRL_INIT:
3746
0
        octx->key_set = 0;
3747
0
        octx->iv_set = 0;
3748
0
        octx->ivlen = EVP_CIPHER_get_iv_length(c->cipher);
3749
0
        octx->iv = c->iv;
3750
0
        octx->taglen = 16;
3751
0
        octx->data_buf_len = 0;
3752
0
        octx->aad_buf_len = 0;
3753
0
        return 1;
3754
3755
0
    case EVP_CTRL_GET_IVLEN:
3756
0
        *(int *)ptr = octx->ivlen;
3757
0
        return 1;
3758
3759
0
    case EVP_CTRL_AEAD_SET_IVLEN:
3760
        /* IV len must be 1 to 15 */
3761
0
        if (arg <= 0 || arg > 15)
3762
0
            return 0;
3763
3764
0
        octx->ivlen = arg;
3765
0
        return 1;
3766
3767
0
    case EVP_CTRL_AEAD_SET_TAG:
3768
0
        if (ptr == NULL) {
3769
            /* Tag len must be 0 to 16 */
3770
0
            if (arg < 0 || arg > 16)
3771
0
                return 0;
3772
3773
0
            octx->taglen = arg;
3774
0
            return 1;
3775
0
        }
3776
0
        if (arg != octx->taglen || EVP_CIPHER_CTX_is_encrypting(c))
3777
0
            return 0;
3778
0
        memcpy(octx->tag, ptr, arg);
3779
0
        return 1;
3780
3781
0
    case EVP_CTRL_AEAD_GET_TAG:
3782
0
        if (arg != octx->taglen || !EVP_CIPHER_CTX_is_encrypting(c))
3783
0
            return 0;
3784
3785
0
        memcpy(ptr, octx->tag, arg);
3786
0
        return 1;
3787
3788
0
    case EVP_CTRL_COPY:
3789
0
        newc = (EVP_CIPHER_CTX *)ptr;
3790
0
        new_octx = EVP_C_DATA(EVP_AES_OCB_CTX, newc);
3791
0
        return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
3792
0
            &new_octx->ksenc.ks,
3793
0
            &new_octx->ksdec.ks);
3794
3795
0
    default:
3796
0
        return -1;
3797
0
    }
3798
0
}
3799
3800
static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3801
    const unsigned char *iv, int enc)
3802
0
{
3803
0
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX, ctx);
3804
0
    if (!iv && !key)
3805
0
        return 1;
3806
0
    if (key) {
3807
0
        do {
3808
            /*
3809
             * We set both the encrypt and decrypt key here because decrypt
3810
             * needs both. We could possibly optimise to remove setting the
3811
             * decrypt for an encryption operation.
3812
             */
3813
#ifdef HWAES_CAPABLE
3814
            if (HWAES_CAPABLE) {
3815
                HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3816
                    &octx->ksenc.ks);
3817
                HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3818
                    &octx->ksdec.ks);
3819
                if (!CRYPTO_ocb128_init(&octx->ocb,
3820
                        &octx->ksenc.ks, &octx->ksdec.ks,
3821
                        (block128_f)HWAES_encrypt,
3822
                        (block128_f)HWAES_decrypt,
3823
                        enc ? HWAES_ocb_encrypt
3824
                            : HWAES_ocb_decrypt))
3825
                    return 0;
3826
                break;
3827
            }
3828
#endif
3829
0
#ifdef VPAES_CAPABLE
3830
0
            if (VPAES_CAPABLE) {
3831
0
                vpaes_set_encrypt_key(key,
3832
0
                    EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3833
0
                    &octx->ksenc.ks);
3834
0
                vpaes_set_decrypt_key(key,
3835
0
                    EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3836
0
                    &octx->ksdec.ks);
3837
0
                if (!CRYPTO_ocb128_init(&octx->ocb,
3838
0
                        &octx->ksenc.ks, &octx->ksdec.ks,
3839
0
                        (block128_f)vpaes_encrypt,
3840
0
                        (block128_f)vpaes_decrypt,
3841
0
                        NULL))
3842
0
                    return 0;
3843
0
                break;
3844
0
            }
3845
0
#endif
3846
0
            AES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3847
0
                &octx->ksenc.ks);
3848
0
            AES_set_decrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3849
0
                &octx->ksdec.ks);
3850
0
            if (!CRYPTO_ocb128_init(&octx->ocb,
3851
0
                    &octx->ksenc.ks, &octx->ksdec.ks,
3852
0
                    (block128_f)AES_encrypt,
3853
0
                    (block128_f)AES_decrypt,
3854
0
                    NULL))
3855
0
                return 0;
3856
0
        } while (0);
3857
3858
        /*
3859
         * If we have an iv we can set it directly, otherwise use saved IV.
3860
         */
3861
0
        if (iv == NULL && octx->iv_set)
3862
0
            iv = octx->iv;
3863
0
        if (iv) {
3864
0
            if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
3865
0
                != 1)
3866
0
                return 0;
3867
0
            octx->iv_set = 1;
3868
0
        }
3869
0
        octx->key_set = 1;
3870
0
    } else {
3871
        /* If key set use IV, otherwise copy */
3872
0
        if (octx->key_set)
3873
0
            CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
3874
0
        else
3875
0
            memcpy(octx->iv, iv, octx->ivlen);
3876
0
        octx->iv_set = 1;
3877
0
    }
3878
0
    return 1;
3879
0
}
3880
3881
static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3882
    const unsigned char *in, size_t len)
3883
0
{
3884
0
    unsigned char *buf;
3885
0
    int *buf_len;
3886
0
    int written_len = 0;
3887
0
    size_t trailing_len;
3888
0
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX, ctx);
3889
3890
    /* If IV or Key not set then return error */
3891
0
    if (!octx->iv_set)
3892
0
        return -1;
3893
3894
0
    if (!octx->key_set)
3895
0
        return -1;
3896
3897
0
    if (in != NULL) {
3898
        /*
3899
         * Need to ensure we are only passing full blocks to low level OCB
3900
         * routines. We do it here rather than in EVP_EncryptUpdate/
3901
         * EVP_DecryptUpdate because we need to pass full blocks of AAD too
3902
         * and those routines don't support that
3903
         */
3904
3905
        /* Are we dealing with AAD or normal data here? */
3906
0
        if (out == NULL) {
3907
0
            buf = octx->aad_buf;
3908
0
            buf_len = &(octx->aad_buf_len);
3909
0
        } else {
3910
0
            buf = octx->data_buf;
3911
0
            buf_len = &(octx->data_buf_len);
3912
3913
0
            if (ossl_is_partially_overlapping(out + *buf_len, in, len)) {
3914
0
                ERR_raise(ERR_LIB_EVP, EVP_R_PARTIALLY_OVERLAPPING);
3915
0
                return 0;
3916
0
            }
3917
0
        }
3918
3919
        /*
3920
         * If we've got a partially filled buffer from a previous call then
3921
         * use that data first
3922
         */
3923
0
        if (*buf_len > 0) {
3924
0
            unsigned int remaining;
3925
3926
0
            remaining = AES_BLOCK_SIZE - (*buf_len);
3927
0
            if (remaining > len) {
3928
0
                memcpy(buf + (*buf_len), in, len);
3929
0
                *(buf_len) += len;
3930
0
                return 0;
3931
0
            }
3932
0
            memcpy(buf + (*buf_len), in, remaining);
3933
3934
            /*
3935
             * If we get here we've filled the buffer, so process it
3936
             */
3937
0
            len -= remaining;
3938
0
            in += remaining;
3939
0
            if (out == NULL) {
3940
0
                if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
3941
0
                    return -1;
3942
0
            } else if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3943
0
                if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
3944
0
                        AES_BLOCK_SIZE))
3945
0
                    return -1;
3946
0
            } else {
3947
0
                if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
3948
0
                        AES_BLOCK_SIZE))
3949
0
                    return -1;
3950
0
            }
3951
0
            written_len = AES_BLOCK_SIZE;
3952
0
            *buf_len = 0;
3953
0
            if (out != NULL)
3954
0
                out += AES_BLOCK_SIZE;
3955
0
        }
3956
3957
        /* Do we have a partial block to handle at the end? */
3958
0
        trailing_len = len % AES_BLOCK_SIZE;
3959
3960
        /*
3961
         * If we've got some full blocks to handle, then process these first
3962
         */
3963
0
        if (len != trailing_len) {
3964
0
            if (out == NULL) {
3965
0
                if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
3966
0
                    return -1;
3967
0
            } else if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3968
0
                if (!CRYPTO_ocb128_encrypt(&octx->ocb, in, out, len - trailing_len))
3969
0
                    return -1;
3970
0
            } else {
3971
0
                if (!CRYPTO_ocb128_decrypt(&octx->ocb, in, out, len - trailing_len))
3972
0
                    return -1;
3973
0
            }
3974
0
            written_len += len - trailing_len;
3975
0
            in += len - trailing_len;
3976
0
        }
3977
3978
        /* Handle any trailing partial block */
3979
0
        if (trailing_len > 0) {
3980
0
            memcpy(buf, in, trailing_len);
3981
0
            *buf_len = trailing_len;
3982
0
        }
3983
3984
0
        return written_len;
3985
0
    } else {
3986
        /*
3987
         * First of all empty the buffer of any partial block that we might
3988
         * have been provided - both for data and AAD
3989
         */
3990
0
        if (octx->data_buf_len > 0) {
3991
0
            if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3992
0
                if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
3993
0
                        octx->data_buf_len))
3994
0
                    return -1;
3995
0
            } else {
3996
0
                if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
3997
0
                        octx->data_buf_len))
3998
0
                    return -1;
3999
0
            }
4000
0
            written_len = octx->data_buf_len;
4001
0
            octx->data_buf_len = 0;
4002
0
        }
4003
0
        if (octx->aad_buf_len > 0) {
4004
0
            if (!CRYPTO_ocb128_aad(&octx->ocb, octx->aad_buf, octx->aad_buf_len))
4005
0
                return -1;
4006
0
            octx->aad_buf_len = 0;
4007
0
        }
4008
        /* If decrypting then verify */
4009
0
        if (!EVP_CIPHER_CTX_is_encrypting(ctx)) {
4010
0
            if (octx->taglen < 0)
4011
0
                return -1;
4012
0
            if (CRYPTO_ocb128_finish(&octx->ocb,
4013
0
                    octx->tag, octx->taglen)
4014
0
                != 0)
4015
0
                return -1;
4016
0
            octx->iv_set = 0;
4017
0
            return written_len;
4018
0
        }
4019
        /* If encrypting then just get the tag */
4020
0
        if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
4021
0
            return -1;
4022
        /* Don't reuse the IV */
4023
0
        octx->iv_set = 0;
4024
0
        return written_len;
4025
0
    }
4026
0
}
4027
4028
static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
4029
0
{
4030
0
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX, c);
4031
0
    CRYPTO_ocb128_cleanup(&octx->ocb);
4032
0
    return 1;
4033
0
}
4034
4035
BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
4036
    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4037
    BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4038
        EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4039
        BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4040
            EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4041
#endif /* OPENSSL_NO_OCB */