Coverage Report

Created: 2025-06-13 06:58

/src/openssl31/crypto/evp/e_aes.c
Line
Count
Source (jump to first uncovered line)
1
/*
2
 * Copyright 2001-2024 The OpenSSL Project Authors. All Rights Reserved.
3
 *
4
 * Licensed under the Apache License 2.0 (the "License").  You may not use
5
 * this file except in compliance with the License.  You can obtain a copy
6
 * in the file LICENSE in the source distribution or at
7
 * https://www.openssl.org/source/license.html
8
 */
9
10
/*
11
 * This file uses the low-level AES functions (which are deprecated for
12
 * non-internal use) in order to implement the EVP AES ciphers.
13
 */
14
#include "internal/deprecated.h"
15
16
#include <string.h>
17
#include <assert.h>
18
#include <openssl/opensslconf.h>
19
#include <openssl/crypto.h>
20
#include <openssl/evp.h>
21
#include <openssl/err.h>
22
#include <openssl/aes.h>
23
#include <openssl/rand.h>
24
#include <openssl/cmac.h>
25
#include "crypto/evp.h"
26
#include "internal/cryptlib.h"
27
#include "crypto/modes.h"
28
#include "crypto/siv.h"
29
#include "crypto/aes_platform.h"
30
#include "evp_local.h"
31
32
typedef struct {
33
    union {
34
        OSSL_UNION_ALIGN;
35
        AES_KEY ks;
36
    } ks;
37
    block128_f block;
38
    union {
39
        cbc128_f cbc;
40
        ctr128_f ctr;
41
    } stream;
42
} EVP_AES_KEY;
43
44
typedef struct {
45
    union {
46
        OSSL_UNION_ALIGN;
47
        AES_KEY ks;
48
    } ks;                       /* AES key schedule to use */
49
    int key_set;                /* Set if key initialised */
50
    int iv_set;                 /* Set if an iv is set */
51
    GCM128_CONTEXT gcm;
52
    unsigned char *iv;          /* Temporary IV store */
53
    int ivlen;                  /* IV length */
54
    int taglen;
55
    int iv_gen;                 /* It is OK to generate IVs */
56
    int iv_gen_rand;            /* No IV was specified, so generate a rand IV */
57
    int tls_aad_len;            /* TLS AAD length */
58
    uint64_t tls_enc_records;   /* Number of TLS records encrypted */
59
    ctr128_f ctr;
60
} EVP_AES_GCM_CTX;
61
62
typedef struct {
63
    union {
64
        OSSL_UNION_ALIGN;
65
        AES_KEY ks;
66
    } ks1, ks2;                 /* AES key schedules to use */
67
    XTS128_CONTEXT xts;
68
    void (*stream) (const unsigned char *in,
69
                    unsigned char *out, size_t length,
70
                    const AES_KEY *key1, const AES_KEY *key2,
71
                    const unsigned char iv[16]);
72
} EVP_AES_XTS_CTX;
73
74
#ifdef FIPS_MODULE
75
static const int allow_insecure_decrypt = 0;
76
#else
77
static const int allow_insecure_decrypt = 1;
78
#endif
79
80
typedef struct {
81
    union {
82
        OSSL_UNION_ALIGN;
83
        AES_KEY ks;
84
    } ks;                       /* AES key schedule to use */
85
    int key_set;                /* Set if key initialised */
86
    int iv_set;                 /* Set if an iv is set */
87
    int tag_set;                /* Set if tag is valid */
88
    int len_set;                /* Set if message length set */
89
    int L, M;                   /* L and M parameters from RFC3610 */
90
    int tls_aad_len;            /* TLS AAD length */
91
    CCM128_CONTEXT ccm;
92
    ccm128_f str;
93
} EVP_AES_CCM_CTX;
94
95
#ifndef OPENSSL_NO_OCB
96
typedef struct {
97
    union {
98
        OSSL_UNION_ALIGN;
99
        AES_KEY ks;
100
    } ksenc;                    /* AES key schedule to use for encryption */
101
    union {
102
        OSSL_UNION_ALIGN;
103
        AES_KEY ks;
104
    } ksdec;                    /* AES key schedule to use for decryption */
105
    int key_set;                /* Set if key initialised */
106
    int iv_set;                 /* Set if an iv is set */
107
    OCB128_CONTEXT ocb;
108
    unsigned char *iv;          /* Temporary IV store */
109
    unsigned char tag[16];
110
    unsigned char data_buf[16]; /* Store partial data blocks */
111
    unsigned char aad_buf[16];  /* Store partial AAD blocks */
112
    int data_buf_len;
113
    int aad_buf_len;
114
    int ivlen;                  /* IV length */
115
    int taglen;
116
} EVP_AES_OCB_CTX;
117
#endif
118
119
0
#define MAXBITCHUNK     ((size_t)1<<(sizeof(size_t)*8-4))
120
121
/* increment counter (64-bit int) by 1 */
122
static void ctr64_inc(unsigned char *counter)
123
0
{
124
0
    int n = 8;
125
0
    unsigned char c;
126
127
0
    do {
128
0
        --n;
129
0
        c = counter[n];
130
0
        ++c;
131
0
        counter[n] = c;
132
0
        if (c)
133
0
            return;
134
0
    } while (n);
135
0
}
136
137
#if defined(AESNI_CAPABLE)
138
# if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
139
#  define AES_GCM_ASM2(gctx)      (gctx->gcm.block==(block128_f)aesni_encrypt && \
140
                                 gctx->gcm.ghash==gcm_ghash_avx)
141
#  undef AES_GCM_ASM2          /* minor size optimization */
142
# endif
143
144
static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
145
                          const unsigned char *iv, int enc)
146
0
{
147
0
    int ret, mode;
148
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
149
0
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
150
151
0
    if (keylen <= 0) {
152
0
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
153
0
        return 0;
154
0
    }
155
0
    mode = EVP_CIPHER_CTX_get_mode(ctx);
156
0
    if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
157
0
        && !enc) {
158
0
        ret = aesni_set_decrypt_key(key, keylen, &dat->ks.ks);
159
0
        dat->block = (block128_f) aesni_decrypt;
160
0
        dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
161
0
            (cbc128_f) aesni_cbc_encrypt : NULL;
162
0
    } else {
163
0
        ret = aesni_set_encrypt_key(key, keylen, &dat->ks.ks);
164
0
        dat->block = (block128_f) aesni_encrypt;
165
0
        if (mode == EVP_CIPH_CBC_MODE)
166
0
            dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
167
0
        else if (mode == EVP_CIPH_CTR_MODE)
168
0
            dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
169
0
        else
170
0
            dat->stream.cbc = NULL;
171
0
    }
172
173
0
    if (ret < 0) {
174
0
        ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
175
0
        return 0;
176
0
    }
177
178
0
    return 1;
179
0
}
180
181
static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
182
                            const unsigned char *in, size_t len)
183
0
{
184
0
    aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
185
0
                      ctx->iv, EVP_CIPHER_CTX_is_encrypting(ctx));
186
187
0
    return 1;
188
0
}
189
190
static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
191
                            const unsigned char *in, size_t len)
192
0
{
193
0
    size_t bl = EVP_CIPHER_CTX_get_block_size(ctx);
194
195
0
    if (len < bl)
196
0
        return 1;
197
198
0
    aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
199
0
                      EVP_CIPHER_CTX_is_encrypting(ctx));
200
201
0
    return 1;
202
0
}
203
204
# define aesni_ofb_cipher aes_ofb_cipher
205
static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
206
                            const unsigned char *in, size_t len);
207
208
# define aesni_cfb_cipher aes_cfb_cipher
209
static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
210
                            const unsigned char *in, size_t len);
211
212
# define aesni_cfb8_cipher aes_cfb8_cipher
213
static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
214
                             const unsigned char *in, size_t len);
215
216
# define aesni_cfb1_cipher aes_cfb1_cipher
217
static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
218
                             const unsigned char *in, size_t len);
219
220
# define aesni_ctr_cipher aes_ctr_cipher
221
static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
222
                            const unsigned char *in, size_t len);
223
224
static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
225
                              const unsigned char *iv, int enc)
226
0
{
227
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX, ctx);
228
229
0
    if (iv == NULL && key == NULL)
230
0
        return 1;
231
232
0
    if (key) {
233
0
        const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
234
235
0
        if (keylen <= 0) {
236
0
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
237
0
            return 0;
238
0
        }
239
0
        aesni_set_encrypt_key(key, keylen, &gctx->ks.ks);
240
0
        CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
241
0
        gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
242
        /*
243
         * If we have an iv can set it directly, otherwise use saved IV.
244
         */
245
0
        if (iv == NULL && gctx->iv_set)
246
0
            iv = gctx->iv;
247
0
        if (iv) {
248
0
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
249
0
            gctx->iv_set = 1;
250
0
        }
251
0
        gctx->key_set = 1;
252
0
    } else {
253
        /* If key set use IV, otherwise copy */
254
0
        if (gctx->key_set)
255
0
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
256
0
        else
257
0
            memcpy(gctx->iv, iv, gctx->ivlen);
258
0
        gctx->iv_set = 1;
259
0
        gctx->iv_gen = 0;
260
0
    }
261
0
    return 1;
262
0
}
263
264
# define aesni_gcm_cipher aes_gcm_cipher
265
static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
266
                            const unsigned char *in, size_t len);
267
268
static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
269
                              const unsigned char *iv, int enc)
270
0
{
271
0
    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
272
273
0
    if (iv == NULL && key == NULL)
274
0
        return 1;
275
276
0
    if (key) {
277
        /* The key is two half length keys in reality */
278
0
        const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
279
0
        const int bytes = keylen / 2;
280
0
        const int bits = bytes * 8;
281
282
0
        if (keylen <= 0) {
283
0
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
284
0
            return 0;
285
0
        }
286
        /*
287
         * Verify that the two keys are different.
288
         *
289
         * This addresses Rogaway's vulnerability.
290
         * See comment in aes_xts_init_key() below.
291
         */
292
0
        if ((!allow_insecure_decrypt || enc)
293
0
                && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
294
0
            ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
295
0
            return 0;
296
0
        }
297
298
        /* key_len is two AES keys */
299
0
        if (enc) {
300
0
            aesni_set_encrypt_key(key, bits, &xctx->ks1.ks);
301
0
            xctx->xts.block1 = (block128_f) aesni_encrypt;
302
0
            xctx->stream = aesni_xts_encrypt;
303
0
        } else {
304
0
            aesni_set_decrypt_key(key, bits, &xctx->ks1.ks);
305
0
            xctx->xts.block1 = (block128_f) aesni_decrypt;
306
0
            xctx->stream = aesni_xts_decrypt;
307
0
        }
308
309
0
        aesni_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
310
0
        xctx->xts.block2 = (block128_f) aesni_encrypt;
311
312
0
        xctx->xts.key1 = &xctx->ks1;
313
0
    }
314
315
0
    if (iv) {
316
0
        xctx->xts.key2 = &xctx->ks2;
317
0
        memcpy(ctx->iv, iv, 16);
318
0
    }
319
320
0
    return 1;
321
0
}
322
323
# define aesni_xts_cipher aes_xts_cipher
324
static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
325
                            const unsigned char *in, size_t len);
326
327
static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
328
                              const unsigned char *iv, int enc)
329
0
{
330
0
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
331
332
0
    if (iv == NULL && key == NULL)
333
0
        return 1;
334
335
0
    if (key != NULL) {
336
0
        const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
337
338
0
        if (keylen <= 0) {
339
0
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
340
0
            return 0;
341
0
        }
342
0
        aesni_set_encrypt_key(key, keylen, &cctx->ks.ks);
343
0
        CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
344
0
                           &cctx->ks, (block128_f) aesni_encrypt);
345
0
        cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
346
0
            (ccm128_f) aesni_ccm64_decrypt_blocks;
347
0
        cctx->key_set = 1;
348
0
    }
349
0
    if (iv) {
350
0
        memcpy(ctx->iv, iv, 15 - cctx->L);
351
0
        cctx->iv_set = 1;
352
0
    }
353
0
    return 1;
354
0
}
355
356
# define aesni_ccm_cipher aes_ccm_cipher
357
static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
358
                            const unsigned char *in, size_t len);
359
360
# ifndef OPENSSL_NO_OCB
361
static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
362
                              const unsigned char *iv, int enc)
363
0
{
364
0
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
365
366
0
    if (iv == NULL && key == NULL)
367
0
        return 1;
368
369
0
    if (key != NULL) {
370
0
        const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
371
372
0
        if (keylen <= 0) {
373
0
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
374
0
            return 0;
375
0
        }
376
0
        do {
377
            /*
378
             * We set both the encrypt and decrypt key here because decrypt
379
             * needs both. We could possibly optimise to remove setting the
380
             * decrypt for an encryption operation.
381
             */
382
0
            aesni_set_encrypt_key(key, keylen, &octx->ksenc.ks);
383
0
            aesni_set_decrypt_key(key, keylen, &octx->ksdec.ks);
384
0
            if (!CRYPTO_ocb128_init(&octx->ocb,
385
0
                                    &octx->ksenc.ks, &octx->ksdec.ks,
386
0
                                    (block128_f) aesni_encrypt,
387
0
                                    (block128_f) aesni_decrypt,
388
0
                                    enc ? aesni_ocb_encrypt
389
0
                                        : aesni_ocb_decrypt))
390
0
                return 0;
391
0
        }
392
0
        while (0);
393
394
        /*
395
         * If we have an iv we can set it directly, otherwise use saved IV.
396
         */
397
0
        if (iv == NULL && octx->iv_set)
398
0
            iv = octx->iv;
399
0
        if (iv) {
400
0
            if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
401
0
                != 1)
402
0
                return 0;
403
0
            octx->iv_set = 1;
404
0
        }
405
0
        octx->key_set = 1;
406
0
    } else {
407
        /* If key set use IV, otherwise copy */
408
0
        if (octx->key_set)
409
0
            CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
410
0
        else
411
0
            memcpy(octx->iv, iv, octx->ivlen);
412
0
        octx->iv_set = 1;
413
0
    }
414
0
    return 1;
415
0
}
416
417
#  define aesni_ocb_cipher aes_ocb_cipher
418
static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
419
                            const unsigned char *in, size_t len);
420
# endif                        /* OPENSSL_NO_OCB */
421
422
# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
423
static const EVP_CIPHER aesni_##keylen##_##mode = { \
424
        nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
425
        flags|EVP_CIPH_##MODE##_MODE,   \
426
        EVP_ORIG_GLOBAL,                \
427
        aesni_init_key,                 \
428
        aesni_##mode##_cipher,          \
429
        NULL,                           \
430
        sizeof(EVP_AES_KEY),            \
431
        NULL,NULL,NULL,NULL }; \
432
static const EVP_CIPHER aes_##keylen##_##mode = { \
433
        nid##_##keylen##_##nmode,blocksize,     \
434
        keylen/8,ivlen,                 \
435
        flags|EVP_CIPH_##MODE##_MODE,   \
436
        EVP_ORIG_GLOBAL,                 \
437
        aes_init_key,                   \
438
        aes_##mode##_cipher,            \
439
        NULL,                           \
440
        sizeof(EVP_AES_KEY),            \
441
        NULL,NULL,NULL,NULL }; \
442
1.49k
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
1.49k
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_128_cbc
Line
Count
Source
442
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_128_ecb
Line
Count
Source
442
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_128_ofb
Line
Count
Source
442
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_128_cfb128
Line
Count
Source
442
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_128_cfb1
Line
Count
Source
442
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_128_cfb8
Line
Count
Source
442
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_128_ctr
Line
Count
Source
442
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_192_cbc
Line
Count
Source
442
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_192_ecb
Line
Count
Source
442
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_192_ofb
Line
Count
Source
442
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_192_cfb128
Line
Count
Source
442
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_192_cfb1
Line
Count
Source
442
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_192_cfb8
Line
Count
Source
442
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_192_ctr
Line
Count
Source
442
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_256_cbc
Line
Count
Source
442
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_256_ecb
Line
Count
Source
442
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_256_ofb
Line
Count
Source
442
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_256_cfb128
Line
Count
Source
442
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_256_cfb1
Line
Count
Source
442
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_256_cfb8
Line
Count
Source
442
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_256_ctr
Line
Count
Source
442
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
444
445
# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
446
static const EVP_CIPHER aesni_##keylen##_##mode = { \
447
        nid##_##keylen##_##mode,blocksize, \
448
        (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
449
        ivlen,                          \
450
        flags|EVP_CIPH_##MODE##_MODE,   \
451
        EVP_ORIG_GLOBAL,                \
452
        aesni_##mode##_init_key,        \
453
        aesni_##mode##_cipher,          \
454
        aes_##mode##_cleanup,           \
455
        sizeof(EVP_AES_##MODE##_CTX),   \
456
        NULL,NULL,aes_##mode##_ctrl,NULL }; \
457
static const EVP_CIPHER aes_##keylen##_##mode = { \
458
        nid##_##keylen##_##mode,blocksize, \
459
        (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
460
        ivlen,                          \
461
        flags|EVP_CIPH_##MODE##_MODE,   \
462
        EVP_ORIG_GLOBAL,                \
463
        aes_##mode##_init_key,          \
464
        aes_##mode##_cipher,            \
465
        aes_##mode##_cleanup,           \
466
        sizeof(EVP_AES_##MODE##_CTX),   \
467
        NULL,NULL,aes_##mode##_ctrl,NULL }; \
468
781
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
469
781
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_128_gcm
Line
Count
Source
468
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
469
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_192_gcm
Line
Count
Source
468
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
469
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_256_gcm
Line
Count
Source
468
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
469
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_128_xts
Line
Count
Source
468
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
469
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_256_xts
Line
Count
Source
468
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
469
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_128_ccm
Line
Count
Source
468
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
469
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_192_ccm
Line
Count
Source
468
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
469
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_256_ccm
Line
Count
Source
468
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
469
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_128_ocb
Line
Count
Source
468
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
469
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_192_ocb
Line
Count
Source
468
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
469
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_256_ocb
Line
Count
Source
468
71
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
469
71
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
470
471
#elif defined(SPARC_AES_CAPABLE)
472
473
static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
474
                           const unsigned char *iv, int enc)
475
{
476
    int ret, mode, bits;
477
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
478
479
    mode = EVP_CIPHER_CTX_get_mode(ctx);
480
    bits = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
481
    if (bits <= 0) {
482
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
483
        return 0;
484
    }
485
    if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
486
        && !enc) {
487
        ret = 0;
488
        aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
489
        dat->block = (block128_f) aes_t4_decrypt;
490
        switch (bits) {
491
        case 128:
492
            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
493
                (cbc128_f) aes128_t4_cbc_decrypt : NULL;
494
            break;
495
        case 192:
496
            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
497
                (cbc128_f) aes192_t4_cbc_decrypt : NULL;
498
            break;
499
        case 256:
500
            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
501
                (cbc128_f) aes256_t4_cbc_decrypt : NULL;
502
            break;
503
        default:
504
            ret = -1;
505
        }
506
    } else {
507
        ret = 0;
508
        aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
509
        dat->block = (block128_f) aes_t4_encrypt;
510
        switch (bits) {
511
        case 128:
512
            if (mode == EVP_CIPH_CBC_MODE)
513
                dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
514
            else if (mode == EVP_CIPH_CTR_MODE)
515
                dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
516
            else
517
                dat->stream.cbc = NULL;
518
            break;
519
        case 192:
520
            if (mode == EVP_CIPH_CBC_MODE)
521
                dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
522
            else if (mode == EVP_CIPH_CTR_MODE)
523
                dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
524
            else
525
                dat->stream.cbc = NULL;
526
            break;
527
        case 256:
528
            if (mode == EVP_CIPH_CBC_MODE)
529
                dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
530
            else if (mode == EVP_CIPH_CTR_MODE)
531
                dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
532
            else
533
                dat->stream.cbc = NULL;
534
            break;
535
        default:
536
            ret = -1;
537
        }
538
    }
539
540
    if (ret < 0) {
541
        ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
542
        return 0;
543
    }
544
545
    return 1;
546
}
547
548
# define aes_t4_cbc_cipher aes_cbc_cipher
549
static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
550
                             const unsigned char *in, size_t len);
551
552
# define aes_t4_ecb_cipher aes_ecb_cipher
553
static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
554
                             const unsigned char *in, size_t len);
555
556
# define aes_t4_ofb_cipher aes_ofb_cipher
557
static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
558
                             const unsigned char *in, size_t len);
559
560
# define aes_t4_cfb_cipher aes_cfb_cipher
561
static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
562
                             const unsigned char *in, size_t len);
563
564
# define aes_t4_cfb8_cipher aes_cfb8_cipher
565
static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
566
                              const unsigned char *in, size_t len);
567
568
# define aes_t4_cfb1_cipher aes_cfb1_cipher
569
static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
570
                              const unsigned char *in, size_t len);
571
572
# define aes_t4_ctr_cipher aes_ctr_cipher
573
static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
574
                             const unsigned char *in, size_t len);
575
576
static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
577
                               const unsigned char *iv, int enc)
578
{
579
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
580
581
    if (iv == NULL && key == NULL)
582
        return 1;
583
    if (key) {
584
        const int bits = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
585
586
        if (bits <= 0) {
587
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
588
            return 0;
589
        }
590
        aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
591
        CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
592
                           (block128_f) aes_t4_encrypt);
593
        switch (bits) {
594
        case 128:
595
            gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
596
            break;
597
        case 192:
598
            gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
599
            break;
600
        case 256:
601
            gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
602
            break;
603
        default:
604
            return 0;
605
        }
606
        /*
607
         * If we have an iv can set it directly, otherwise use saved IV.
608
         */
609
        if (iv == NULL && gctx->iv_set)
610
            iv = gctx->iv;
611
        if (iv) {
612
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
613
            gctx->iv_set = 1;
614
        }
615
        gctx->key_set = 1;
616
    } else {
617
        /* If key set use IV, otherwise copy */
618
        if (gctx->key_set)
619
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
620
        else
621
            memcpy(gctx->iv, iv, gctx->ivlen);
622
        gctx->iv_set = 1;
623
        gctx->iv_gen = 0;
624
    }
625
    return 1;
626
}
627
628
# define aes_t4_gcm_cipher aes_gcm_cipher
629
static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
630
                             const unsigned char *in, size_t len);
631
632
static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
633
                               const unsigned char *iv, int enc)
634
{
635
    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
636
637
    if (!iv && !key)
638
        return 1;
639
640
    if (key) {
641
        /* The key is two half length keys in reality */
642
        const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
643
        const int bytes = keylen / 2;
644
        const int bits = bytes * 8;
645
646
        if (keylen <= 0) {
647
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
648
            return 0;
649
        }
650
        /*
651
         * Verify that the two keys are different.
652
         *
653
         * This addresses Rogaway's vulnerability.
654
         * See comment in aes_xts_init_key() below.
655
         */
656
        if ((!allow_insecure_decrypt || enc)
657
                && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
658
            ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
659
            return 0;
660
        }
661
662
        xctx->stream = NULL;
663
        /* key_len is two AES keys */
664
        if (enc) {
665
            aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
666
            xctx->xts.block1 = (block128_f) aes_t4_encrypt;
667
            switch (bits) {
668
            case 128:
669
                xctx->stream = aes128_t4_xts_encrypt;
670
                break;
671
            case 256:
672
                xctx->stream = aes256_t4_xts_encrypt;
673
                break;
674
            default:
675
                return 0;
676
            }
677
        } else {
678
            aes_t4_set_decrypt_key(key, bits, &xctx->ks1.ks);
679
            xctx->xts.block1 = (block128_f) aes_t4_decrypt;
680
            switch (bits) {
681
            case 128:
682
                xctx->stream = aes128_t4_xts_decrypt;
683
                break;
684
            case 256:
685
                xctx->stream = aes256_t4_xts_decrypt;
686
                break;
687
            default:
688
                return 0;
689
            }
690
        }
691
692
        aes_t4_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
693
        xctx->xts.block2 = (block128_f) aes_t4_encrypt;
694
695
        xctx->xts.key1 = &xctx->ks1;
696
    }
697
698
    if (iv) {
699
        xctx->xts.key2 = &xctx->ks2;
700
        memcpy(ctx->iv, iv, 16);
701
    }
702
703
    return 1;
704
}
705
706
# define aes_t4_xts_cipher aes_xts_cipher
707
static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
708
                             const unsigned char *in, size_t len);
709
710
static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
711
                               const unsigned char *iv, int enc)
712
{
713
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
714
715
    if (iv == NULL && key == NULL)
716
        return 1;
717
718
    if (key != NULL) {
719
        const int bits = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
720
721
        if (bits <= 0) {
722
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
723
            return 0;
724
        }
725
        aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
726
        CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
727
                           &cctx->ks, (block128_f) aes_t4_encrypt);
728
        cctx->str = NULL;
729
        cctx->key_set = 1;
730
    }
731
    if (iv) {
732
        memcpy(ctx->iv, iv, 15 - cctx->L);
733
        cctx->iv_set = 1;
734
    }
735
    return 1;
736
}
737
738
# define aes_t4_ccm_cipher aes_ccm_cipher
739
static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
740
                             const unsigned char *in, size_t len);
741
742
# ifndef OPENSSL_NO_OCB
743
static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
744
                               const unsigned char *iv, int enc)
745
{
746
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
747
748
    if (iv == NULL && key == NULL)
749
        return 1;
750
751
    if (key != NULL) {
752
        const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
753
754
        if (keylen <= 0) {
755
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
756
            return 0;
757
        }
758
        do {
759
            /*
760
             * We set both the encrypt and decrypt key here because decrypt
761
             * needs both. We could possibly optimise to remove setting the
762
             * decrypt for an encryption operation.
763
             */
764
            aes_t4_set_encrypt_key(key, keylen, &octx->ksenc.ks);
765
            aes_t4_set_decrypt_key(key, keylen, &octx->ksdec.ks);
766
            if (!CRYPTO_ocb128_init(&octx->ocb,
767
                                    &octx->ksenc.ks, &octx->ksdec.ks,
768
                                    (block128_f) aes_t4_encrypt,
769
                                    (block128_f) aes_t4_decrypt,
770
                                    NULL))
771
                return 0;
772
        }
773
        while (0);
774
775
        /*
776
         * If we have an iv we can set it directly, otherwise use saved IV.
777
         */
778
        if (iv == NULL && octx->iv_set)
779
            iv = octx->iv;
780
        if (iv) {
781
            if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
782
                != 1)
783
                return 0;
784
            octx->iv_set = 1;
785
        }
786
        octx->key_set = 1;
787
    } else {
788
        /* If key set use IV, otherwise copy */
789
        if (octx->key_set)
790
            CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
791
        else
792
            memcpy(octx->iv, iv, octx->ivlen);
793
        octx->iv_set = 1;
794
    }
795
    return 1;
796
}
797
798
#  define aes_t4_ocb_cipher aes_ocb_cipher
799
static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
800
                             const unsigned char *in, size_t len);
801
# endif                        /* OPENSSL_NO_OCB */
802
803
# ifndef OPENSSL_NO_SIV
804
#  define aes_t4_siv_init_key aes_siv_init_key
805
#  define aes_t4_siv_cipher aes_siv_cipher
806
# endif /* OPENSSL_NO_SIV */
807
808
# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
809
static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
810
        nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
811
        flags|EVP_CIPH_##MODE##_MODE,   \
812
        EVP_ORIG_GLOBAL,                \
813
        aes_t4_init_key,                \
814
        aes_t4_##mode##_cipher,         \
815
        NULL,                           \
816
        sizeof(EVP_AES_KEY),            \
817
        NULL,NULL,NULL,NULL }; \
818
static const EVP_CIPHER aes_##keylen##_##mode = { \
819
        nid##_##keylen##_##nmode,blocksize,     \
820
        keylen/8,ivlen, \
821
        flags|EVP_CIPH_##MODE##_MODE,   \
822
        EVP_ORIG_GLOBAL,                \
823
        aes_init_key,                   \
824
        aes_##mode##_cipher,            \
825
        NULL,                           \
826
        sizeof(EVP_AES_KEY),            \
827
        NULL,NULL,NULL,NULL }; \
828
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
829
{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
830
831
# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
832
static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
833
        nid##_##keylen##_##mode,blocksize, \
834
        (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
835
        ivlen,                          \
836
        flags|EVP_CIPH_##MODE##_MODE,   \
837
        EVP_ORIG_GLOBAL,                \
838
        aes_t4_##mode##_init_key,       \
839
        aes_t4_##mode##_cipher,         \
840
        aes_##mode##_cleanup,           \
841
        sizeof(EVP_AES_##MODE##_CTX),   \
842
        NULL,NULL,aes_##mode##_ctrl,NULL }; \
843
static const EVP_CIPHER aes_##keylen##_##mode = { \
844
        nid##_##keylen##_##mode,blocksize, \
845
        (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
846
        ivlen,                          \
847
        flags|EVP_CIPH_##MODE##_MODE,   \
848
        EVP_ORIG_GLOBAL,                \
849
        aes_##mode##_init_key,          \
850
        aes_##mode##_cipher,            \
851
        aes_##mode##_cleanup,           \
852
        sizeof(EVP_AES_##MODE##_CTX),   \
853
        NULL,NULL,aes_##mode##_ctrl,NULL }; \
854
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
855
{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
856
857
#elif defined(S390X_aes_128_CAPABLE)
858
/* IBM S390X support */
859
typedef struct {
860
    union {
861
        OSSL_UNION_ALIGN;
862
        /*-
863
         * KM-AES parameter block - begin
864
         * (see z/Architecture Principles of Operation >= SA22-7832-06)
865
         */
866
        struct {
867
            unsigned char k[32];
868
        } param;
869
        /* KM-AES parameter block - end */
870
    } km;
871
    unsigned int fc;
872
} S390X_AES_ECB_CTX;
873
874
typedef struct {
875
    union {
876
        OSSL_UNION_ALIGN;
877
        /*-
878
         * KMO-AES parameter block - begin
879
         * (see z/Architecture Principles of Operation >= SA22-7832-08)
880
         */
881
        struct {
882
            unsigned char cv[16];
883
            unsigned char k[32];
884
        } param;
885
        /* KMO-AES parameter block - end */
886
    } kmo;
887
    unsigned int fc;
888
} S390X_AES_OFB_CTX;
889
890
typedef struct {
891
    union {
892
        OSSL_UNION_ALIGN;
893
        /*-
894
         * KMF-AES parameter block - begin
895
         * (see z/Architecture Principles of Operation >= SA22-7832-08)
896
         */
897
        struct {
898
            unsigned char cv[16];
899
            unsigned char k[32];
900
        } param;
901
        /* KMF-AES parameter block - end */
902
    } kmf;
903
    unsigned int fc;
904
} S390X_AES_CFB_CTX;
905
906
typedef struct {
907
    union {
908
        OSSL_UNION_ALIGN;
909
        /*-
910
         * KMA-GCM-AES parameter block - begin
911
         * (see z/Architecture Principles of Operation >= SA22-7832-11)
912
         */
913
        struct {
914
            unsigned char reserved[12];
915
            union {
916
                unsigned int w;
917
                unsigned char b[4];
918
            } cv;
919
            union {
920
                unsigned long long g[2];
921
                unsigned char b[16];
922
            } t;
923
            unsigned char h[16];
924
            unsigned long long taadl;
925
            unsigned long long tpcl;
926
            union {
927
                unsigned long long g[2];
928
                unsigned int w[4];
929
            } j0;
930
            unsigned char k[32];
931
        } param;
932
        /* KMA-GCM-AES parameter block - end */
933
    } kma;
934
    unsigned int fc;
935
    int key_set;
936
937
    unsigned char *iv;
938
    int ivlen;
939
    int iv_set;
940
    int iv_gen;
941
942
    int taglen;
943
944
    unsigned char ares[16];
945
    unsigned char mres[16];
946
    unsigned char kres[16];
947
    int areslen;
948
    int mreslen;
949
    int kreslen;
950
951
    int tls_aad_len;
952
    uint64_t tls_enc_records;   /* Number of TLS records encrypted */
953
} S390X_AES_GCM_CTX;
954
955
typedef struct {
956
    union {
957
        OSSL_UNION_ALIGN;
958
        /*-
959
         * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
960
         * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
961
         * rounds field is used to store the function code and that the key
962
         * schedule is not stored (if aes hardware support is detected).
963
         */
964
        struct {
965
            unsigned char pad[16];
966
            AES_KEY k;
967
        } key;
968
969
        struct {
970
            /*-
971
             * KMAC-AES parameter block - begin
972
             * (see z/Architecture Principles of Operation >= SA22-7832-08)
973
             */
974
            struct {
975
                union {
976
                    unsigned long long g[2];
977
                    unsigned char b[16];
978
                } icv;
979
                unsigned char k[32];
980
            } kmac_param;
981
            /* KMAC-AES parameter block - end */
982
983
            union {
984
                unsigned long long g[2];
985
                unsigned char b[16];
986
            } nonce;
987
            union {
988
                unsigned long long g[2];
989
                unsigned char b[16];
990
            } buf;
991
992
            unsigned long long blocks;
993
            int l;
994
            int m;
995
            int tls_aad_len;
996
            int iv_set;
997
            int tag_set;
998
            int len_set;
999
            int key_set;
1000
1001
            unsigned char pad[140];
1002
            unsigned int fc;
1003
        } ccm;
1004
    } aes;
1005
} S390X_AES_CCM_CTX;
1006
1007
# define s390x_aes_init_key aes_init_key
1008
static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1009
                              const unsigned char *iv, int enc);
1010
1011
# define S390X_AES_CBC_CTX              EVP_AES_KEY
1012
1013
# define s390x_aes_cbc_init_key aes_init_key
1014
1015
# define s390x_aes_cbc_cipher aes_cbc_cipher
1016
static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1017
                                const unsigned char *in, size_t len);
1018
1019
static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
1020
                                  const unsigned char *key,
1021
                                  const unsigned char *iv, int enc)
1022
{
1023
    S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1024
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1025
1026
    if (keylen <= 0) {
1027
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
1028
        return 0;
1029
    }
1030
    cctx->fc = S390X_AES_FC(keylen);
1031
    if (!enc)
1032
        cctx->fc |= S390X_DECRYPT;
1033
1034
    memcpy(cctx->km.param.k, key, keylen);
1035
    return 1;
1036
}
1037
1038
static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1039
                                const unsigned char *in, size_t len)
1040
{
1041
    S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1042
1043
    s390x_km(in, len, out, cctx->fc, &cctx->km.param);
1044
    return 1;
1045
}
1046
1047
static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
1048
                                  const unsigned char *key,
1049
                                  const unsigned char *ivec, int enc)
1050
{
1051
    S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1052
    const unsigned char *iv = ctx->oiv;
1053
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1054
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1055
1056
    if (keylen <= 0) {
1057
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
1058
        return 0;
1059
    }
1060
    if (ivlen <= 0) {
1061
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_IV_LENGTH);
1062
        return 0;
1063
    }
1064
    memcpy(cctx->kmo.param.cv, iv, ivlen);
1065
    memcpy(cctx->kmo.param.k, key, keylen);
1066
    cctx->fc = S390X_AES_FC(keylen);
1067
    return 1;
1068
}
1069
1070
static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1071
                                const unsigned char *in, size_t len)
1072
{
1073
    S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1074
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1075
    unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
1076
    int n = ctx->num;
1077
    int rem;
1078
1079
    memcpy(cctx->kmo.param.cv, iv, ivlen);
1080
    while (n && len) {
1081
        *out = *in ^ cctx->kmo.param.cv[n];
1082
        n = (n + 1) & 0xf;
1083
        --len;
1084
        ++in;
1085
        ++out;
1086
    }
1087
1088
    rem = len & 0xf;
1089
1090
    len &= ~(size_t)0xf;
1091
    if (len) {
1092
        s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1093
1094
        out += len;
1095
        in += len;
1096
    }
1097
1098
    if (rem) {
1099
        s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1100
                 cctx->kmo.param.k);
1101
1102
        while (rem--) {
1103
            out[n] = in[n] ^ cctx->kmo.param.cv[n];
1104
            ++n;
1105
        }
1106
    }
1107
1108
    memcpy(iv, cctx->kmo.param.cv, ivlen);
1109
    ctx->num = n;
1110
    return 1;
1111
}
1112
1113
static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1114
                                  const unsigned char *key,
1115
                                  const unsigned char *ivec, int enc)
1116
{
1117
    S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1118
    const unsigned char *iv = ctx->oiv;
1119
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1120
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1121
1122
    if (keylen <= 0) {
1123
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
1124
        return 0;
1125
    }
1126
    if (ivlen <= 0) {
1127
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_IV_LENGTH);
1128
        return 0;
1129
    }
1130
    cctx->fc = S390X_AES_FC(keylen);
1131
    cctx->fc |= 16 << 24;   /* 16 bytes cipher feedback */
1132
    if (!enc)
1133
        cctx->fc |= S390X_DECRYPT;
1134
1135
    memcpy(cctx->kmf.param.cv, iv, ivlen);
1136
    memcpy(cctx->kmf.param.k, key, keylen);
1137
    return 1;
1138
}
1139
1140
static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1141
                                const unsigned char *in, size_t len)
1142
{
1143
    S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1144
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1145
    const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1146
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1147
    unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
1148
    int n = ctx->num;
1149
    int rem;
1150
    unsigned char tmp;
1151
1152
    if (keylen <= 0) {
1153
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
1154
        return 0;
1155
    }
1156
    if (ivlen <= 0) {
1157
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_IV_LENGTH);
1158
        return 0;
1159
    }
1160
    memcpy(cctx->kmf.param.cv, iv, ivlen);
1161
    while (n && len) {
1162
        tmp = *in;
1163
        *out = cctx->kmf.param.cv[n] ^ tmp;
1164
        cctx->kmf.param.cv[n] = enc ? *out : tmp;
1165
        n = (n + 1) & 0xf;
1166
        --len;
1167
        ++in;
1168
        ++out;
1169
    }
1170
1171
    rem = len & 0xf;
1172
1173
    len &= ~(size_t)0xf;
1174
    if (len) {
1175
        s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1176
1177
        out += len;
1178
        in += len;
1179
    }
1180
1181
    if (rem) {
1182
        s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1183
                 S390X_AES_FC(keylen), cctx->kmf.param.k);
1184
1185
        while (rem--) {
1186
            tmp = in[n];
1187
            out[n] = cctx->kmf.param.cv[n] ^ tmp;
1188
            cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1189
            ++n;
1190
        }
1191
    }
1192
1193
    memcpy(iv, cctx->kmf.param.cv, ivlen);
1194
    ctx->num = n;
1195
    return 1;
1196
}
1197
1198
static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1199
                                   const unsigned char *key,
1200
                                   const unsigned char *ivec, int enc)
1201
{
1202
    S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1203
    const unsigned char *iv = ctx->oiv;
1204
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1205
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1206
1207
    if (keylen <= 0) {
1208
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
1209
        return 0;
1210
    }
1211
    if (ivlen <= 0) {
1212
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_IV_LENGTH);
1213
        return 0;
1214
    }
1215
    cctx->fc = S390X_AES_FC(keylen);
1216
    cctx->fc |= 1 << 24;   /* 1 byte cipher feedback */
1217
    if (!enc)
1218
        cctx->fc |= S390X_DECRYPT;
1219
1220
    memcpy(cctx->kmf.param.cv, iv, ivlen);
1221
    memcpy(cctx->kmf.param.k, key, keylen);
1222
    return 1;
1223
}
1224
1225
static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1226
                                 const unsigned char *in, size_t len)
1227
{
1228
    S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1229
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1230
    unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
1231
1232
    memcpy(cctx->kmf.param.cv, iv, ivlen);
1233
    s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1234
    memcpy(iv, cctx->kmf.param.cv, ivlen);
1235
    return 1;
1236
}
1237
1238
# define s390x_aes_cfb1_init_key aes_init_key
1239
1240
# define s390x_aes_cfb1_cipher aes_cfb1_cipher
1241
static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1242
                                 const unsigned char *in, size_t len);
1243
1244
# define S390X_AES_CTR_CTX              EVP_AES_KEY
1245
1246
# define s390x_aes_ctr_init_key aes_init_key
1247
1248
# define s390x_aes_ctr_cipher aes_ctr_cipher
1249
static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1250
                                const unsigned char *in, size_t len);
1251
1252
/* iv + padding length for iv lengths != 12 */
1253
# define S390X_gcm_ivpadlen(i)  ((((i) + 15) >> 4 << 4) + 16)
1254
1255
/*-
1256
 * Process additional authenticated data. Returns 0 on success. Code is
1257
 * big-endian.
1258
 */
1259
static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1260
                             size_t len)
1261
{
1262
    unsigned long long alen;
1263
    int n, rem;
1264
1265
    if (ctx->kma.param.tpcl)
1266
        return -2;
1267
1268
    alen = ctx->kma.param.taadl + len;
1269
    if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1270
        return -1;
1271
    ctx->kma.param.taadl = alen;
1272
1273
    n = ctx->areslen;
1274
    if (n) {
1275
        while (n && len) {
1276
            ctx->ares[n] = *aad;
1277
            n = (n + 1) & 0xf;
1278
            ++aad;
1279
            --len;
1280
        }
1281
        /* ctx->ares contains a complete block if offset has wrapped around */
1282
        if (!n) {
1283
            s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1284
            ctx->fc |= S390X_KMA_HS;
1285
        }
1286
        ctx->areslen = n;
1287
    }
1288
1289
    rem = len & 0xf;
1290
1291
    len &= ~(size_t)0xf;
1292
    if (len) {
1293
        s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1294
        aad += len;
1295
        ctx->fc |= S390X_KMA_HS;
1296
    }
1297
1298
    if (rem) {
1299
        ctx->areslen = rem;
1300
1301
        do {
1302
            --rem;
1303
            ctx->ares[rem] = aad[rem];
1304
        } while (rem);
1305
    }
1306
    return 0;
1307
}
1308
1309
/*-
1310
 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1311
 * success. Code is big-endian.
1312
 */
1313
static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1314
                         unsigned char *out, size_t len)
1315
{
1316
    const unsigned char *inptr;
1317
    unsigned long long mlen;
1318
    union {
1319
        unsigned int w[4];
1320
        unsigned char b[16];
1321
    } buf;
1322
    size_t inlen;
1323
    int n, rem, i;
1324
1325
    mlen = ctx->kma.param.tpcl + len;
1326
    if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1327
        return -1;
1328
    ctx->kma.param.tpcl = mlen;
1329
1330
    n = ctx->mreslen;
1331
    if (n) {
1332
        inptr = in;
1333
        inlen = len;
1334
        while (n && inlen) {
1335
            ctx->mres[n] = *inptr;
1336
            n = (n + 1) & 0xf;
1337
            ++inptr;
1338
            --inlen;
1339
        }
1340
        /* ctx->mres contains a complete block if offset has wrapped around */
1341
        if (!n) {
1342
            s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1343
                      ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1344
            ctx->fc |= S390X_KMA_HS;
1345
            ctx->areslen = 0;
1346
1347
            /* previous call already encrypted/decrypted its remainder,
1348
             * see comment below */
1349
            n = ctx->mreslen;
1350
            while (n) {
1351
                *out = buf.b[n];
1352
                n = (n + 1) & 0xf;
1353
                ++out;
1354
                ++in;
1355
                --len;
1356
            }
1357
            ctx->mreslen = 0;
1358
        }
1359
    }
1360
1361
    rem = len & 0xf;
1362
1363
    len &= ~(size_t)0xf;
1364
    if (len) {
1365
        s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1366
                  ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1367
        in += len;
1368
        out += len;
1369
        ctx->fc |= S390X_KMA_HS;
1370
        ctx->areslen = 0;
1371
    }
1372
1373
    /*-
1374
     * If there is a remainder, it has to be saved such that it can be
1375
     * processed by kma later. However, we also have to do the for-now
1376
     * unauthenticated encryption/decryption part here and now...
1377
     */
1378
    if (rem) {
1379
        if (!ctx->mreslen) {
1380
            buf.w[0] = ctx->kma.param.j0.w[0];
1381
            buf.w[1] = ctx->kma.param.j0.w[1];
1382
            buf.w[2] = ctx->kma.param.j0.w[2];
1383
            buf.w[3] = ctx->kma.param.cv.w + 1;
1384
            s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1385
        }
1386
1387
        n = ctx->mreslen;
1388
        for (i = 0; i < rem; i++) {
1389
            ctx->mres[n + i] = in[i];
1390
            out[i] = in[i] ^ ctx->kres[n + i];
1391
        }
1392
1393
        ctx->mreslen += rem;
1394
    }
1395
    return 0;
1396
}
1397
1398
/*-
1399
 * Initialize context structure. Code is big-endian.
1400
 */
1401
static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1402
                                const unsigned char *iv)
1403
{
1404
    ctx->kma.param.t.g[0] = 0;
1405
    ctx->kma.param.t.g[1] = 0;
1406
    ctx->kma.param.tpcl = 0;
1407
    ctx->kma.param.taadl = 0;
1408
    ctx->mreslen = 0;
1409
    ctx->areslen = 0;
1410
    ctx->kreslen = 0;
1411
1412
    if (ctx->ivlen == 12) {
1413
        memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1414
        ctx->kma.param.j0.w[3] = 1;
1415
        ctx->kma.param.cv.w = 1;
1416
    } else {
1417
        /* ctx->iv has the right size and is already padded. */
1418
        memcpy(ctx->iv, iv, ctx->ivlen);
1419
        s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1420
                  ctx->fc, &ctx->kma.param);
1421
        ctx->fc |= S390X_KMA_HS;
1422
1423
        ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1424
        ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1425
        ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1426
        ctx->kma.param.t.g[0] = 0;
1427
        ctx->kma.param.t.g[1] = 0;
1428
    }
1429
}
1430
1431
/*-
1432
 * Performs various operations on the context structure depending on control
1433
 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1434
 * Code is big-endian.
1435
 */
1436
static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1437
{
1438
    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1439
    S390X_AES_GCM_CTX *gctx_out;
1440
    EVP_CIPHER_CTX *out;
1441
    unsigned char *buf;
1442
    int ivlen, enc, len;
1443
1444
    switch (type) {
1445
    case EVP_CTRL_INIT:
1446
        ivlen = EVP_CIPHER_get_iv_length(c->cipher);
1447
        gctx->key_set = 0;
1448
        gctx->iv_set = 0;
1449
        gctx->ivlen = ivlen;
1450
        gctx->iv = c->iv;
1451
        gctx->taglen = -1;
1452
        gctx->iv_gen = 0;
1453
        gctx->tls_aad_len = -1;
1454
        return 1;
1455
1456
    case EVP_CTRL_GET_IVLEN:
1457
        *(int *)ptr = gctx->ivlen;
1458
        return 1;
1459
1460
    case EVP_CTRL_AEAD_SET_IVLEN:
1461
        if (arg <= 0)
1462
            return 0;
1463
1464
        if (arg != 12) {
1465
            len = S390X_gcm_ivpadlen(arg);
1466
1467
            /* Allocate memory for iv if needed. */
1468
            if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1469
                if (gctx->iv != c->iv)
1470
                    OPENSSL_free(gctx->iv);
1471
1472
                if ((gctx->iv = OPENSSL_malloc(len)) == NULL) {
1473
                    ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
1474
                    return 0;
1475
                }
1476
            }
1477
            /* Add padding. */
1478
            memset(gctx->iv + arg, 0, len - arg - 8);
1479
            *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1480
        }
1481
        gctx->ivlen = arg;
1482
        return 1;
1483
1484
    case EVP_CTRL_AEAD_SET_TAG:
1485
        buf = EVP_CIPHER_CTX_buf_noconst(c);
1486
        enc = EVP_CIPHER_CTX_is_encrypting(c);
1487
        if (arg <= 0 || arg > 16 || enc)
1488
            return 0;
1489
1490
        memcpy(buf, ptr, arg);
1491
        gctx->taglen = arg;
1492
        return 1;
1493
1494
    case EVP_CTRL_AEAD_GET_TAG:
1495
        enc = EVP_CIPHER_CTX_is_encrypting(c);
1496
        if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1497
            return 0;
1498
1499
        memcpy(ptr, gctx->kma.param.t.b, arg);
1500
        return 1;
1501
1502
    case EVP_CTRL_GCM_SET_IV_FIXED:
1503
        /* Special case: -1 length restores whole iv */
1504
        if (arg == -1) {
1505
            memcpy(gctx->iv, ptr, gctx->ivlen);
1506
            gctx->iv_gen = 1;
1507
            return 1;
1508
        }
1509
        /*
1510
         * Fixed field must be at least 4 bytes and invocation field at least
1511
         * 8.
1512
         */
1513
        if ((arg < 4) || (gctx->ivlen - arg) < 8)
1514
            return 0;
1515
1516
        if (arg)
1517
            memcpy(gctx->iv, ptr, arg);
1518
1519
        enc = EVP_CIPHER_CTX_is_encrypting(c);
1520
        if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1521
            return 0;
1522
1523
        gctx->iv_gen = 1;
1524
        return 1;
1525
1526
    case EVP_CTRL_GCM_IV_GEN:
1527
        if (gctx->iv_gen == 0 || gctx->key_set == 0)
1528
            return 0;
1529
1530
        s390x_aes_gcm_setiv(gctx, gctx->iv);
1531
1532
        if (arg <= 0 || arg > gctx->ivlen)
1533
            arg = gctx->ivlen;
1534
1535
        memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1536
        /*
1537
         * Invocation field will be at least 8 bytes in size and so no need
1538
         * to check wrap around or increment more than last 8 bytes.
1539
         */
1540
        ctr64_inc(gctx->iv + gctx->ivlen - 8);
1541
        gctx->iv_set = 1;
1542
        return 1;
1543
1544
    case EVP_CTRL_GCM_SET_IV_INV:
1545
        enc = EVP_CIPHER_CTX_is_encrypting(c);
1546
        if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1547
            return 0;
1548
1549
        memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1550
        s390x_aes_gcm_setiv(gctx, gctx->iv);
1551
        gctx->iv_set = 1;
1552
        return 1;
1553
1554
    case EVP_CTRL_AEAD_TLS1_AAD:
1555
        /* Save the aad for later use. */
1556
        if (arg != EVP_AEAD_TLS1_AAD_LEN)
1557
            return 0;
1558
1559
        buf = EVP_CIPHER_CTX_buf_noconst(c);
1560
        memcpy(buf, ptr, arg);
1561
        gctx->tls_aad_len = arg;
1562
        gctx->tls_enc_records = 0;
1563
1564
        len = buf[arg - 2] << 8 | buf[arg - 1];
1565
        /* Correct length for explicit iv. */
1566
        if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1567
            return 0;
1568
        len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1569
1570
        /* If decrypting correct for tag too. */
1571
        enc = EVP_CIPHER_CTX_is_encrypting(c);
1572
        if (!enc) {
1573
            if (len < EVP_GCM_TLS_TAG_LEN)
1574
                return 0;
1575
            len -= EVP_GCM_TLS_TAG_LEN;
1576
        }
1577
        buf[arg - 2] = len >> 8;
1578
        buf[arg - 1] = len & 0xff;
1579
        /* Extra padding: tag appended to record. */
1580
        return EVP_GCM_TLS_TAG_LEN;
1581
1582
    case EVP_CTRL_COPY:
1583
        out = ptr;
1584
        gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1585
1586
        if (gctx->iv == c->iv) {
1587
            gctx_out->iv = out->iv;
1588
        } else {
1589
            len = S390X_gcm_ivpadlen(gctx->ivlen);
1590
1591
            if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) {
1592
                ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
1593
                return 0;
1594
            }
1595
1596
            memcpy(gctx_out->iv, gctx->iv, len);
1597
        }
1598
        return 1;
1599
1600
    default:
1601
        return -1;
1602
    }
1603
}
1604
1605
/*-
1606
 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1607
 */
1608
static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1609
                                  const unsigned char *key,
1610
                                  const unsigned char *iv, int enc)
1611
{
1612
    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1613
    int keylen;
1614
1615
    if (iv == NULL && key == NULL)
1616
        return 1;
1617
1618
    if (key != NULL) {
1619
        keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1620
        if (keylen <= 0) {
1621
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
1622
            return 0;
1623
        }
1624
1625
        memcpy(&gctx->kma.param.k, key, keylen);
1626
1627
        gctx->fc = S390X_AES_FC(keylen);
1628
        if (!enc)
1629
            gctx->fc |= S390X_DECRYPT;
1630
1631
        if (iv == NULL && gctx->iv_set)
1632
            iv = gctx->iv;
1633
1634
        if (iv != NULL) {
1635
            s390x_aes_gcm_setiv(gctx, iv);
1636
            gctx->iv_set = 1;
1637
        }
1638
        gctx->key_set = 1;
1639
    } else {
1640
        if (gctx->key_set)
1641
            s390x_aes_gcm_setiv(gctx, iv);
1642
        else
1643
            memcpy(gctx->iv, iv, gctx->ivlen);
1644
1645
        gctx->iv_set = 1;
1646
        gctx->iv_gen = 0;
1647
    }
1648
    return 1;
1649
}
1650
1651
/*-
1652
 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1653
 * if successful. Otherwise -1 is returned. Code is big-endian.
1654
 */
1655
static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1656
                                    const unsigned char *in, size_t len)
1657
{
1658
    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1659
    const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1660
    const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1661
    int rv = -1;
1662
1663
    if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1664
        return -1;
1665
1666
    /*
1667
     * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
1668
     * Requirements from SP 800-38D".  The requirements is for one party to the
1669
     * communication to fail after 2^64 - 1 keys.  We do this on the encrypting
1670
     * side only.
1671
     */
1672
    if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
1673
        ERR_raise(ERR_LIB_EVP, EVP_R_TOO_MANY_RECORDS);
1674
        goto err;
1675
    }
1676
1677
    if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1678
                                     : EVP_CTRL_GCM_SET_IV_INV,
1679
                            EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1680
        goto err;
1681
1682
    in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1683
    out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1684
    len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1685
1686
    gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1687
    gctx->kma.param.tpcl = len << 3;
1688
    s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1689
              gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1690
1691
    if (enc) {
1692
        memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1693
        rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1694
    } else {
1695
        if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1696
                          EVP_GCM_TLS_TAG_LEN)) {
1697
            OPENSSL_cleanse(out, len);
1698
            goto err;
1699
        }
1700
        rv = len;
1701
    }
1702
err:
1703
    gctx->iv_set = 0;
1704
    gctx->tls_aad_len = -1;
1705
    return rv;
1706
}
1707
1708
/*-
1709
 * Called from EVP layer to initialize context, process additional
1710
 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1711
 * ciphertext or process a TLS packet, depending on context. Returns bytes
1712
 * written on success. Otherwise -1 is returned. Code is big-endian.
1713
 */
1714
static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1715
                                const unsigned char *in, size_t len)
1716
{
1717
    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1718
    unsigned char *buf, tmp[16];
1719
    int enc;
1720
1721
    if (!gctx->key_set)
1722
        return -1;
1723
1724
    if (gctx->tls_aad_len >= 0)
1725
        return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1726
1727
    if (!gctx->iv_set)
1728
        return -1;
1729
1730
    if (in != NULL) {
1731
        if (out == NULL) {
1732
            if (s390x_aes_gcm_aad(gctx, in, len))
1733
                return -1;
1734
        } else {
1735
            if (s390x_aes_gcm(gctx, in, out, len))
1736
                return -1;
1737
        }
1738
        return len;
1739
    } else {
1740
        gctx->kma.param.taadl <<= 3;
1741
        gctx->kma.param.tpcl <<= 3;
1742
        s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1743
                  gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1744
        /* recall that we already did en-/decrypt gctx->mres
1745
         * and returned it to caller... */
1746
        OPENSSL_cleanse(tmp, gctx->mreslen);
1747
        gctx->iv_set = 0;
1748
1749
        enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1750
        if (enc) {
1751
            gctx->taglen = 16;
1752
        } else {
1753
            if (gctx->taglen < 0)
1754
                return -1;
1755
1756
            buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1757
            if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1758
                return -1;
1759
        }
1760
        return 0;
1761
    }
1762
}
1763
1764
static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1765
{
1766
    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1767
1768
    if (gctx == NULL)
1769
        return 0;
1770
1771
    if (gctx->iv != c->iv)
1772
        OPENSSL_free(gctx->iv);
1773
1774
    OPENSSL_cleanse(gctx, sizeof(*gctx));
1775
    return 1;
1776
}
1777
1778
# define S390X_AES_XTS_CTX              EVP_AES_XTS_CTX
1779
1780
# define s390x_aes_xts_init_key aes_xts_init_key
1781
static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1782
                                  const unsigned char *key,
1783
                                  const unsigned char *iv, int enc);
1784
# define s390x_aes_xts_cipher aes_xts_cipher
1785
static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1786
                                const unsigned char *in, size_t len);
1787
# define s390x_aes_xts_ctrl aes_xts_ctrl
1788
static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1789
# define s390x_aes_xts_cleanup aes_xts_cleanup
1790
1791
/*-
1792
 * Set nonce and length fields. Code is big-endian.
1793
 */
1794
static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1795
                                          const unsigned char *nonce,
1796
                                          size_t mlen)
1797
{
1798
    ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1799
    ctx->aes.ccm.nonce.g[1] = mlen;
1800
    memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1801
}
1802
1803
/*-
1804
 * Process additional authenticated data. Code is big-endian.
1805
 */
1806
static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1807
                              size_t alen)
1808
{
1809
    unsigned char *ptr;
1810
    int i, rem;
1811
1812
    if (!alen)
1813
        return;
1814
1815
    ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1816
1817
    /* Suppress 'type-punned pointer dereference' warning. */
1818
    ptr = ctx->aes.ccm.buf.b;
1819
1820
    if (alen < ((1 << 16) - (1 << 8))) {
1821
        *(uint16_t *)ptr = alen;
1822
        i = 2;
1823
    } else if (sizeof(alen) == 8
1824
               && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1825
        *(uint16_t *)ptr = 0xffff;
1826
        *(uint64_t *)(ptr + 2) = alen;
1827
        i = 10;
1828
    } else {
1829
        *(uint16_t *)ptr = 0xfffe;
1830
        *(uint32_t *)(ptr + 2) = alen;
1831
        i = 6;
1832
    }
1833
1834
    while (i < 16 && alen) {
1835
        ctx->aes.ccm.buf.b[i] = *aad;
1836
        ++aad;
1837
        --alen;
1838
        ++i;
1839
    }
1840
    while (i < 16) {
1841
        ctx->aes.ccm.buf.b[i] = 0;
1842
        ++i;
1843
    }
1844
1845
    ctx->aes.ccm.kmac_param.icv.g[0] = 0;
1846
    ctx->aes.ccm.kmac_param.icv.g[1] = 0;
1847
    s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
1848
               &ctx->aes.ccm.kmac_param);
1849
    ctx->aes.ccm.blocks += 2;
1850
1851
    rem = alen & 0xf;
1852
    alen &= ~(size_t)0xf;
1853
    if (alen) {
1854
        s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1855
        ctx->aes.ccm.blocks += alen >> 4;
1856
        aad += alen;
1857
    }
1858
    if (rem) {
1859
        for (i = 0; i < rem; i++)
1860
            ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
1861
1862
        s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1863
                 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1864
                 ctx->aes.ccm.kmac_param.k);
1865
        ctx->aes.ccm.blocks++;
1866
    }
1867
}
1868
1869
/*-
1870
 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
1871
 * success.
1872
 */
1873
static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
1874
                         unsigned char *out, size_t len, int enc)
1875
{
1876
    size_t n, rem;
1877
    unsigned int i, l, num;
1878
    unsigned char flags;
1879
1880
    flags = ctx->aes.ccm.nonce.b[0];
1881
    if (!(flags & S390X_CCM_AAD_FLAG)) {
1882
        s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
1883
                 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
1884
        ctx->aes.ccm.blocks++;
1885
    }
1886
    l = flags & 0x7;
1887
    ctx->aes.ccm.nonce.b[0] = l;
1888
1889
    /*-
1890
     * Reconstruct length from encoded length field
1891
     * and initialize it with counter value.
1892
     */
1893
    n = 0;
1894
    for (i = 15 - l; i < 15; i++) {
1895
        n |= ctx->aes.ccm.nonce.b[i];
1896
        ctx->aes.ccm.nonce.b[i] = 0;
1897
        n <<= 8;
1898
    }
1899
    n |= ctx->aes.ccm.nonce.b[15];
1900
    ctx->aes.ccm.nonce.b[15] = 1;
1901
1902
    if (n != len)
1903
        return -1;              /* length mismatch */
1904
1905
    if (enc) {
1906
        /* Two operations per block plus one for tag encryption */
1907
        ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
1908
        if (ctx->aes.ccm.blocks > (1ULL << 61))
1909
            return -2;          /* too much data */
1910
    }
1911
1912
    num = 0;
1913
    rem = len & 0xf;
1914
    len &= ~(size_t)0xf;
1915
1916
    if (enc) {
1917
        /* mac-then-encrypt */
1918
        if (len)
1919
            s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1920
        if (rem) {
1921
            for (i = 0; i < rem; i++)
1922
                ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
1923
1924
            s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1925
                     ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1926
                     ctx->aes.ccm.kmac_param.k);
1927
        }
1928
1929
        CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1930
                                    ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1931
                                    &num, (ctr128_f)AES_ctr32_encrypt);
1932
    } else {
1933
        /* decrypt-then-mac */
1934
        CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1935
                                    ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1936
                                    &num, (ctr128_f)AES_ctr32_encrypt);
1937
1938
        if (len)
1939
            s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1940
        if (rem) {
1941
            for (i = 0; i < rem; i++)
1942
                ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
1943
1944
            s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1945
                     ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1946
                     ctx->aes.ccm.kmac_param.k);
1947
        }
1948
    }
1949
    /* encrypt tag */
1950
    for (i = 15 - l; i < 16; i++)
1951
        ctx->aes.ccm.nonce.b[i] = 0;
1952
1953
    s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
1954
             ctx->aes.ccm.kmac_param.k);
1955
    ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
1956
    ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
1957
1958
    ctx->aes.ccm.nonce.b[0] = flags;    /* restore flags field */
1959
    return 0;
1960
}
1961
1962
/*-
1963
 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1964
 * if successful. Otherwise -1 is returned.
1965
 */
1966
static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1967
                                    const unsigned char *in, size_t len)
1968
{
1969
    S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1970
    unsigned char *ivec = ctx->iv;
1971
    unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1972
    const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1973
1974
    if (out != in
1975
            || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
1976
        return -1;
1977
1978
    if (enc) {
1979
        /* Set explicit iv (sequence number). */
1980
        memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1981
    }
1982
1983
    len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1984
    /*-
1985
     * Get explicit iv (sequence number). We already have fixed iv
1986
     * (server/client_write_iv) here.
1987
     */
1988
    memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1989
    s390x_aes_ccm_setiv(cctx, ivec, len);
1990
1991
    /* Process aad (sequence number|type|version|length) */
1992
    s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
1993
1994
    in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1995
    out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1996
1997
    if (enc) {
1998
        if (s390x_aes_ccm(cctx, in, out, len, enc))
1999
            return -1;
2000
2001
        memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2002
        return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2003
    } else {
2004
        if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2005
            if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
2006
                               cctx->aes.ccm.m))
2007
                return len;
2008
        }
2009
2010
        OPENSSL_cleanse(out, len);
2011
        return -1;
2012
    }
2013
}
2014
2015
/*-
2016
 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
2017
 * returned.
2018
 */
2019
static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
2020
                                  const unsigned char *key,
2021
                                  const unsigned char *iv, int enc)
2022
{
2023
    S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2024
    int keylen;
2025
2026
    if (iv == NULL && key == NULL)
2027
        return 1;
2028
2029
    if (key != NULL) {
2030
        keylen = EVP_CIPHER_CTX_get_key_length(ctx);
2031
        if (keylen <= 0) {
2032
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
2033
            return 0;
2034
        }
2035
2036
        cctx->aes.ccm.fc = S390X_AES_FC(keylen);
2037
        memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
2038
2039
        /* Store encoded m and l. */
2040
        cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
2041
                                 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
2042
        memset(cctx->aes.ccm.nonce.b + 1, 0,
2043
               sizeof(cctx->aes.ccm.nonce.b));
2044
        cctx->aes.ccm.blocks = 0;
2045
2046
        cctx->aes.ccm.key_set = 1;
2047
    }
2048
2049
    if (iv != NULL) {
2050
        memcpy(ctx->iv, iv, 15 - cctx->aes.ccm.l);
2051
2052
        cctx->aes.ccm.iv_set = 1;
2053
    }
2054
2055
    return 1;
2056
}
2057
2058
/*-
2059
 * Called from EVP layer to initialize context, process additional
2060
 * authenticated data, en/de-crypt plain/cipher-text and authenticate
2061
 * plaintext or process a TLS packet, depending on context. Returns bytes
2062
 * written on success. Otherwise -1 is returned.
2063
 */
2064
static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2065
                                const unsigned char *in, size_t len)
2066
{
2067
    S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2068
    const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
2069
    int rv;
2070
    unsigned char *buf;
2071
2072
    if (!cctx->aes.ccm.key_set)
2073
        return -1;
2074
2075
    if (cctx->aes.ccm.tls_aad_len >= 0)
2076
        return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
2077
2078
    /*-
2079
     * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
2080
     * so integrity must be checked already at Update() i.e., before
2081
     * potentially corrupted data is output.
2082
     */
2083
    if (in == NULL && out != NULL)
2084
        return 0;
2085
2086
    if (!cctx->aes.ccm.iv_set)
2087
        return -1;
2088
2089
    if (out == NULL) {
2090
        /* Update(): Pass message length. */
2091
        if (in == NULL) {
2092
            s390x_aes_ccm_setiv(cctx, ctx->iv, len);
2093
2094
            cctx->aes.ccm.len_set = 1;
2095
            return len;
2096
        }
2097
2098
        /* Update(): Process aad. */
2099
        if (!cctx->aes.ccm.len_set && len)
2100
            return -1;
2101
2102
        s390x_aes_ccm_aad(cctx, in, len);
2103
        return len;
2104
    }
2105
2106
    /* The tag must be set before actually decrypting data */
2107
    if (!enc && !cctx->aes.ccm.tag_set)
2108
        return -1;
2109
2110
    /* Update(): Process message. */
2111
2112
    if (!cctx->aes.ccm.len_set) {
2113
        /*-
2114
         * In case message length was not previously set explicitly via
2115
         * Update(), set it now.
2116
         */
2117
        s390x_aes_ccm_setiv(cctx, ctx->iv, len);
2118
2119
        cctx->aes.ccm.len_set = 1;
2120
    }
2121
2122
    if (enc) {
2123
        if (s390x_aes_ccm(cctx, in, out, len, enc))
2124
            return -1;
2125
2126
        cctx->aes.ccm.tag_set = 1;
2127
        return len;
2128
    } else {
2129
        rv = -1;
2130
2131
        if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2132
            buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2133
            if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2134
                               cctx->aes.ccm.m))
2135
                rv = len;
2136
        }
2137
2138
        if (rv == -1)
2139
            OPENSSL_cleanse(out, len);
2140
2141
        cctx->aes.ccm.iv_set = 0;
2142
        cctx->aes.ccm.tag_set = 0;
2143
        cctx->aes.ccm.len_set = 0;
2144
        return rv;
2145
    }
2146
}
2147
2148
/*-
2149
 * Performs various operations on the context structure depending on control
2150
 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2151
 * Code is big-endian.
2152
 */
2153
static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2154
{
2155
    S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
2156
    unsigned char *buf;
2157
    int enc, len;
2158
2159
    switch (type) {
2160
    case EVP_CTRL_INIT:
2161
        cctx->aes.ccm.key_set = 0;
2162
        cctx->aes.ccm.iv_set = 0;
2163
        cctx->aes.ccm.l = 8;
2164
        cctx->aes.ccm.m = 12;
2165
        cctx->aes.ccm.tag_set = 0;
2166
        cctx->aes.ccm.len_set = 0;
2167
        cctx->aes.ccm.tls_aad_len = -1;
2168
        return 1;
2169
2170
    case EVP_CTRL_GET_IVLEN:
2171
        *(int *)ptr = 15 - cctx->aes.ccm.l;
2172
        return 1;
2173
2174
    case EVP_CTRL_AEAD_TLS1_AAD:
2175
        if (arg != EVP_AEAD_TLS1_AAD_LEN)
2176
            return 0;
2177
2178
        /* Save the aad for later use. */
2179
        buf = EVP_CIPHER_CTX_buf_noconst(c);
2180
        memcpy(buf, ptr, arg);
2181
        cctx->aes.ccm.tls_aad_len = arg;
2182
2183
        len = buf[arg - 2] << 8 | buf[arg - 1];
2184
        if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2185
            return 0;
2186
2187
        /* Correct length for explicit iv. */
2188
        len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2189
2190
        enc = EVP_CIPHER_CTX_is_encrypting(c);
2191
        if (!enc) {
2192
            if (len < cctx->aes.ccm.m)
2193
                return 0;
2194
2195
            /* Correct length for tag. */
2196
            len -= cctx->aes.ccm.m;
2197
        }
2198
2199
        buf[arg - 2] = len >> 8;
2200
        buf[arg - 1] = len & 0xff;
2201
2202
        /* Extra padding: tag appended to record. */
2203
        return cctx->aes.ccm.m;
2204
2205
    case EVP_CTRL_CCM_SET_IV_FIXED:
2206
        if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2207
            return 0;
2208
2209
        /* Copy to first part of the iv. */
2210
        memcpy(c->iv, ptr, arg);
2211
        return 1;
2212
2213
    case EVP_CTRL_AEAD_SET_IVLEN:
2214
        arg = 15 - arg;
2215
        /* fall-through */
2216
2217
    case EVP_CTRL_CCM_SET_L:
2218
        if (arg < 2 || arg > 8)
2219
            return 0;
2220
2221
        cctx->aes.ccm.l = arg;
2222
        return 1;
2223
2224
    case EVP_CTRL_AEAD_SET_TAG:
2225
        if ((arg & 1) || arg < 4 || arg > 16)
2226
            return 0;
2227
2228
        enc = EVP_CIPHER_CTX_is_encrypting(c);
2229
        if (enc && ptr)
2230
            return 0;
2231
2232
        if (ptr) {
2233
            cctx->aes.ccm.tag_set = 1;
2234
            buf = EVP_CIPHER_CTX_buf_noconst(c);
2235
            memcpy(buf, ptr, arg);
2236
        }
2237
2238
        cctx->aes.ccm.m = arg;
2239
        return 1;
2240
2241
    case EVP_CTRL_AEAD_GET_TAG:
2242
        enc = EVP_CIPHER_CTX_is_encrypting(c);
2243
        if (!enc || !cctx->aes.ccm.tag_set)
2244
            return 0;
2245
2246
        if(arg < cctx->aes.ccm.m)
2247
            return 0;
2248
2249
        memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2250
        cctx->aes.ccm.tag_set = 0;
2251
        cctx->aes.ccm.iv_set = 0;
2252
        cctx->aes.ccm.len_set = 0;
2253
        return 1;
2254
2255
    case EVP_CTRL_COPY:
2256
        return 1;
2257
2258
    default:
2259
        return -1;
2260
    }
2261
}
2262
2263
# define s390x_aes_ccm_cleanup aes_ccm_cleanup
2264
2265
# ifndef OPENSSL_NO_OCB
2266
#  define S390X_AES_OCB_CTX             EVP_AES_OCB_CTX
2267
2268
#  define s390x_aes_ocb_init_key aes_ocb_init_key
2269
static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2270
                                  const unsigned char *iv, int enc);
2271
#  define s390x_aes_ocb_cipher aes_ocb_cipher
2272
static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2273
                                const unsigned char *in, size_t len);
2274
#  define s390x_aes_ocb_cleanup aes_ocb_cleanup
2275
static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2276
#  define s390x_aes_ocb_ctrl aes_ocb_ctrl
2277
static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2278
# endif
2279
2280
# ifndef OPENSSL_NO_SIV
2281
#  define S390X_AES_SIV_CTX             EVP_AES_SIV_CTX
2282
2283
#  define s390x_aes_siv_init_key aes_siv_init_key
2284
#  define s390x_aes_siv_cipher aes_siv_cipher
2285
#  define s390x_aes_siv_cleanup aes_siv_cleanup
2286
#  define s390x_aes_siv_ctrl aes_siv_ctrl
2287
# endif
2288
2289
# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,    \
2290
                              MODE,flags)                               \
2291
static const EVP_CIPHER s390x_aes_##keylen##_##mode = {                 \
2292
    nid##_##keylen##_##nmode,blocksize,                                 \
2293
    keylen / 8,                                                         \
2294
    ivlen,                                                              \
2295
    flags | EVP_CIPH_##MODE##_MODE,                                     \
2296
    EVP_ORIG_GLOBAL,                                                    \
2297
    s390x_aes_##mode##_init_key,                                        \
2298
    s390x_aes_##mode##_cipher,                                          \
2299
    NULL,                                                               \
2300
    sizeof(S390X_AES_##MODE##_CTX),                                     \
2301
    NULL,                                                               \
2302
    NULL,                                                               \
2303
    NULL,                                                               \
2304
    NULL                                                                \
2305
};                                                                      \
2306
static const EVP_CIPHER aes_##keylen##_##mode = {                       \
2307
    nid##_##keylen##_##nmode,                                           \
2308
    blocksize,                                                          \
2309
    keylen / 8,                                                         \
2310
    ivlen,                                                              \
2311
    flags | EVP_CIPH_##MODE##_MODE,                                     \
2312
    EVP_ORIG_GLOBAL,                                                    \
2313
    aes_init_key,                                                       \
2314
    aes_##mode##_cipher,                                                \
2315
    NULL,                                                               \
2316
    sizeof(EVP_AES_KEY),                                                \
2317
    NULL,                                                               \
2318
    NULL,                                                               \
2319
    NULL,                                                               \
2320
    NULL                                                                \
2321
};                                                                      \
2322
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)                       \
2323
{                                                                       \
2324
    return S390X_aes_##keylen##_##mode##_CAPABLE ?                      \
2325
           &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode;       \
2326
}
2327
2328
# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
2329
static const EVP_CIPHER s390x_aes_##keylen##_##mode = {                 \
2330
    nid##_##keylen##_##mode,                                            \
2331
    blocksize,                                                          \
2332
    (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8,        \
2333
    ivlen,                                                              \
2334
    flags | EVP_CIPH_##MODE##_MODE,                                     \
2335
    EVP_ORIG_GLOBAL,                                                    \
2336
    s390x_aes_##mode##_init_key,                                        \
2337
    s390x_aes_##mode##_cipher,                                          \
2338
    s390x_aes_##mode##_cleanup,                                         \
2339
    sizeof(S390X_AES_##MODE##_CTX),                                     \
2340
    NULL,                                                               \
2341
    NULL,                                                               \
2342
    s390x_aes_##mode##_ctrl,                                            \
2343
    NULL                                                                \
2344
};                                                                      \
2345
static const EVP_CIPHER aes_##keylen##_##mode = {                       \
2346
    nid##_##keylen##_##mode,blocksize,                                  \
2347
    (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8,        \
2348
    ivlen,                                                              \
2349
    flags | EVP_CIPH_##MODE##_MODE,                                     \
2350
    EVP_ORIG_GLOBAL,                                                    \
2351
    aes_##mode##_init_key,                                              \
2352
    aes_##mode##_cipher,                                                \
2353
    aes_##mode##_cleanup,                                               \
2354
    sizeof(EVP_AES_##MODE##_CTX),                                       \
2355
    NULL,                                                               \
2356
    NULL,                                                               \
2357
    aes_##mode##_ctrl,                                                  \
2358
    NULL                                                                \
2359
};                                                                      \
2360
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)                       \
2361
{                                                                       \
2362
    return S390X_aes_##keylen##_##mode##_CAPABLE ?                      \
2363
           &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode;       \
2364
}
2365
2366
#else
2367
2368
# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
2369
static const EVP_CIPHER aes_##keylen##_##mode = { \
2370
        nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2371
        flags|EVP_CIPH_##MODE##_MODE,   \
2372
        EVP_ORIG_GLOBAL,                \
2373
        aes_init_key,                   \
2374
        aes_##mode##_cipher,            \
2375
        NULL,                           \
2376
        sizeof(EVP_AES_KEY),            \
2377
        NULL,NULL,NULL,NULL }; \
2378
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2379
{ return &aes_##keylen##_##mode; }
2380
2381
# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
2382
static const EVP_CIPHER aes_##keylen##_##mode = { \
2383
        nid##_##keylen##_##mode,blocksize, \
2384
        (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
2385
        ivlen,                          \
2386
        flags|EVP_CIPH_##MODE##_MODE,   \
2387
        EVP_ORIG_GLOBAL,                \
2388
        aes_##mode##_init_key,          \
2389
        aes_##mode##_cipher,            \
2390
        aes_##mode##_cleanup,           \
2391
        sizeof(EVP_AES_##MODE##_CTX),   \
2392
        NULL,NULL,aes_##mode##_ctrl,NULL }; \
2393
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2394
{ return &aes_##keylen##_##mode; }
2395
2396
#endif
2397
2398
#define BLOCK_CIPHER_generic_pack(nid,keylen,flags)             \
2399
        BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)     \
2400
        BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)      \
2401
        BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)   \
2402
        BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)   \
2403
        BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags)       \
2404
        BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags)       \
2405
        BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
2406
2407
static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2408
                        const unsigned char *iv, int enc)
2409
0
{
2410
0
    int ret, mode;
2411
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2412
0
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
2413
2414
0
    if (keylen <= 0) {
2415
0
        ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
2416
0
        return 0;
2417
0
    }
2418
2419
0
    mode = EVP_CIPHER_CTX_get_mode(ctx);
2420
0
    if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
2421
0
        && !enc) {
2422
#ifdef HWAES_CAPABLE
2423
        if (HWAES_CAPABLE) {
2424
            ret = HWAES_set_decrypt_key(key, keylen, &dat->ks.ks);
2425
            dat->block = (block128_f) HWAES_decrypt;
2426
            dat->stream.cbc = NULL;
2427
# ifdef HWAES_cbc_encrypt
2428
            if (mode == EVP_CIPH_CBC_MODE)
2429
                dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2430
# endif
2431
        } else
2432
#endif
2433
0
#ifdef BSAES_CAPABLE
2434
0
        if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
2435
0
            ret = AES_set_decrypt_key(key, keylen, &dat->ks.ks);
2436
0
            dat->block = (block128_f) AES_decrypt;
2437
0
            dat->stream.cbc = (cbc128_f) ossl_bsaes_cbc_encrypt;
2438
0
        } else
2439
0
#endif
2440
0
#ifdef VPAES_CAPABLE
2441
0
        if (VPAES_CAPABLE) {
2442
0
            ret = vpaes_set_decrypt_key(key, keylen, &dat->ks.ks);
2443
0
            dat->block = (block128_f) vpaes_decrypt;
2444
0
            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2445
0
                (cbc128_f) vpaes_cbc_encrypt : NULL;
2446
0
        } else
2447
0
#endif
2448
0
        {
2449
0
            ret = AES_set_decrypt_key(key, keylen, &dat->ks.ks);
2450
0
            dat->block = (block128_f) AES_decrypt;
2451
0
            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2452
0
                (cbc128_f) AES_cbc_encrypt : NULL;
2453
0
        }
2454
0
    } else
2455
#ifdef HWAES_CAPABLE
2456
    if (HWAES_CAPABLE) {
2457
        ret = HWAES_set_encrypt_key(key, keylen, &dat->ks.ks);
2458
        dat->block = (block128_f) HWAES_encrypt;
2459
        dat->stream.cbc = NULL;
2460
# ifdef HWAES_cbc_encrypt
2461
        if (mode == EVP_CIPH_CBC_MODE)
2462
            dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2463
        else
2464
# endif
2465
# ifdef HWAES_ctr32_encrypt_blocks
2466
        if (mode == EVP_CIPH_CTR_MODE)
2467
            dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2468
        else
2469
# endif
2470
            (void)0;            /* terminate potentially open 'else' */
2471
    } else
2472
#endif
2473
0
#ifdef BSAES_CAPABLE
2474
0
    if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
2475
0
        ret = AES_set_encrypt_key(key, keylen, &dat->ks.ks);
2476
0
        dat->block = (block128_f) AES_encrypt;
2477
0
        dat->stream.ctr = (ctr128_f) ossl_bsaes_ctr32_encrypt_blocks;
2478
0
    } else
2479
0
#endif
2480
0
#ifdef VPAES_CAPABLE
2481
0
    if (VPAES_CAPABLE) {
2482
0
        ret = vpaes_set_encrypt_key(key, keylen, &dat->ks.ks);
2483
0
        dat->block = (block128_f) vpaes_encrypt;
2484
0
        dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2485
0
            (cbc128_f) vpaes_cbc_encrypt : NULL;
2486
0
    } else
2487
0
#endif
2488
0
    {
2489
0
        ret = AES_set_encrypt_key(key, keylen, &dat->ks.ks);
2490
0
        dat->block = (block128_f) AES_encrypt;
2491
0
        dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2492
0
            (cbc128_f) AES_cbc_encrypt : NULL;
2493
#ifdef AES_CTR_ASM
2494
        if (mode == EVP_CIPH_CTR_MODE)
2495
            dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
2496
#endif
2497
0
    }
2498
2499
0
    if (ret < 0) {
2500
0
        ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
2501
0
        return 0;
2502
0
    }
2503
2504
0
    return 1;
2505
0
}
2506
2507
static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2508
                          const unsigned char *in, size_t len)
2509
0
{
2510
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2511
2512
0
    if (dat->stream.cbc)
2513
0
        (*dat->stream.cbc) (in, out, len, &dat->ks, ctx->iv,
2514
0
                            EVP_CIPHER_CTX_is_encrypting(ctx));
2515
0
    else if (EVP_CIPHER_CTX_is_encrypting(ctx))
2516
0
        CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv,
2517
0
                              dat->block);
2518
0
    else
2519
0
        CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
2520
0
                              ctx->iv, dat->block);
2521
2522
0
    return 1;
2523
0
}
2524
2525
static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2526
                          const unsigned char *in, size_t len)
2527
0
{
2528
0
    size_t bl = EVP_CIPHER_CTX_get_block_size(ctx);
2529
0
    size_t i;
2530
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2531
2532
0
    if (len < bl)
2533
0
        return 1;
2534
2535
0
    for (i = 0, len -= bl; i <= len; i += bl)
2536
0
        (*dat->block) (in + i, out + i, &dat->ks);
2537
2538
0
    return 1;
2539
0
}
2540
2541
static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2542
                          const unsigned char *in, size_t len)
2543
0
{
2544
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2545
2546
0
    int num = EVP_CIPHER_CTX_get_num(ctx);
2547
0
    CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
2548
0
                          ctx->iv, &num, dat->block);
2549
0
    EVP_CIPHER_CTX_set_num(ctx, num);
2550
0
    return 1;
2551
0
}
2552
2553
static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2554
                          const unsigned char *in, size_t len)
2555
0
{
2556
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2557
2558
0
    int num = EVP_CIPHER_CTX_get_num(ctx);
2559
0
    CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
2560
0
                          ctx->iv, &num,
2561
0
                          EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2562
0
    EVP_CIPHER_CTX_set_num(ctx, num);
2563
0
    return 1;
2564
0
}
2565
2566
static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2567
                           const unsigned char *in, size_t len)
2568
0
{
2569
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2570
2571
0
    int num = EVP_CIPHER_CTX_get_num(ctx);
2572
0
    CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
2573
0
                            ctx->iv, &num,
2574
0
                            EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2575
0
    EVP_CIPHER_CTX_set_num(ctx, num);
2576
0
    return 1;
2577
0
}
2578
2579
static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2580
                           const unsigned char *in, size_t len)
2581
0
{
2582
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2583
2584
0
    if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2585
0
        int num = EVP_CIPHER_CTX_get_num(ctx);
2586
0
        CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
2587
0
                                ctx->iv, &num,
2588
0
                                EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2589
0
        EVP_CIPHER_CTX_set_num(ctx, num);
2590
0
        return 1;
2591
0
    }
2592
2593
0
    while (len >= MAXBITCHUNK) {
2594
0
        int num = EVP_CIPHER_CTX_get_num(ctx);
2595
0
        CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
2596
0
                                ctx->iv, &num,
2597
0
                                EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2598
0
        EVP_CIPHER_CTX_set_num(ctx, num);
2599
0
        len -= MAXBITCHUNK;
2600
0
        out += MAXBITCHUNK;
2601
0
        in  += MAXBITCHUNK;
2602
0
    }
2603
0
    if (len) {
2604
0
        int num = EVP_CIPHER_CTX_get_num(ctx);
2605
0
        CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
2606
0
                                ctx->iv, &num,
2607
0
                                EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2608
0
        EVP_CIPHER_CTX_set_num(ctx, num);
2609
0
    }
2610
2611
0
    return 1;
2612
0
}
2613
2614
static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2615
                          const unsigned char *in, size_t len)
2616
0
{
2617
0
    int n = EVP_CIPHER_CTX_get_num(ctx);
2618
0
    unsigned int num;
2619
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2620
2621
0
    if (n < 0)
2622
0
        return 0;
2623
0
    num = (unsigned int)n;
2624
2625
0
    if (dat->stream.ctr)
2626
0
        CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
2627
0
                                    ctx->iv,
2628
0
                                    EVP_CIPHER_CTX_buf_noconst(ctx),
2629
0
                                    &num, dat->stream.ctr);
2630
0
    else
2631
0
        CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
2632
0
                              ctx->iv,
2633
0
                              EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2634
0
                              dat->block);
2635
0
    EVP_CIPHER_CTX_set_num(ctx, num);
2636
0
    return 1;
2637
0
}
2638
2639
BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2640
    BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2641
    BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
2642
2643
static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
2644
0
{
2645
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2646
0
    if (gctx == NULL)
2647
0
        return 0;
2648
0
    OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
2649
0
    if (gctx->iv != c->iv)
2650
0
        OPENSSL_free(gctx->iv);
2651
0
    return 1;
2652
0
}
2653
2654
static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2655
0
{
2656
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2657
0
    switch (type) {
2658
0
    case EVP_CTRL_INIT:
2659
0
        gctx->key_set = 0;
2660
0
        gctx->iv_set = 0;
2661
0
        gctx->ivlen = EVP_CIPHER_get_iv_length(c->cipher);
2662
0
        gctx->iv = c->iv;
2663
0
        gctx->taglen = -1;
2664
0
        gctx->iv_gen = 0;
2665
0
        gctx->tls_aad_len = -1;
2666
0
        return 1;
2667
2668
0
    case EVP_CTRL_GET_IVLEN:
2669
0
        *(int *)ptr = gctx->ivlen;
2670
0
        return 1;
2671
2672
0
    case EVP_CTRL_AEAD_SET_IVLEN:
2673
0
        if (arg <= 0)
2674
0
            return 0;
2675
        /* Allocate memory for IV if needed */
2676
0
        if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
2677
0
            if (gctx->iv != c->iv)
2678
0
                OPENSSL_free(gctx->iv);
2679
0
            if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
2680
0
                ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
2681
0
                return 0;
2682
0
            }
2683
0
        }
2684
0
        gctx->ivlen = arg;
2685
0
        return 1;
2686
2687
0
    case EVP_CTRL_AEAD_SET_TAG:
2688
0
        if (arg <= 0 || arg > 16 || c->encrypt)
2689
0
            return 0;
2690
0
        memcpy(c->buf, ptr, arg);
2691
0
        gctx->taglen = arg;
2692
0
        return 1;
2693
2694
0
    case EVP_CTRL_AEAD_GET_TAG:
2695
0
        if (arg <= 0 || arg > 16 || !c->encrypt
2696
0
            || gctx->taglen < 0)
2697
0
            return 0;
2698
0
        memcpy(ptr, c->buf, arg);
2699
0
        return 1;
2700
2701
0
    case EVP_CTRL_GCM_SET_IV_FIXED:
2702
        /* Special case: -1 length restores whole IV */
2703
0
        if (arg == -1) {
2704
0
            memcpy(gctx->iv, ptr, gctx->ivlen);
2705
0
            gctx->iv_gen = 1;
2706
0
            return 1;
2707
0
        }
2708
        /*
2709
         * Fixed field must be at least 4 bytes and invocation field at least
2710
         * 8.
2711
         */
2712
0
        if ((arg < 4) || (gctx->ivlen - arg) < 8)
2713
0
            return 0;
2714
0
        if (arg)
2715
0
            memcpy(gctx->iv, ptr, arg);
2716
0
        if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
2717
0
            return 0;
2718
0
        gctx->iv_gen = 1;
2719
0
        return 1;
2720
2721
0
    case EVP_CTRL_GCM_IV_GEN:
2722
0
        if (gctx->iv_gen == 0 || gctx->key_set == 0)
2723
0
            return 0;
2724
0
        CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2725
0
        if (arg <= 0 || arg > gctx->ivlen)
2726
0
            arg = gctx->ivlen;
2727
0
        memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2728
        /*
2729
         * Invocation field will be at least 8 bytes in size and so no need
2730
         * to check wrap around or increment more than last 8 bytes.
2731
         */
2732
0
        ctr64_inc(gctx->iv + gctx->ivlen - 8);
2733
0
        gctx->iv_set = 1;
2734
0
        return 1;
2735
2736
0
    case EVP_CTRL_GCM_SET_IV_INV:
2737
0
        if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
2738
0
            return 0;
2739
0
        memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2740
0
        CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2741
0
        gctx->iv_set = 1;
2742
0
        return 1;
2743
2744
0
    case EVP_CTRL_AEAD_TLS1_AAD:
2745
        /* Save the AAD for later use */
2746
0
        if (arg != EVP_AEAD_TLS1_AAD_LEN)
2747
0
            return 0;
2748
0
        memcpy(c->buf, ptr, arg);
2749
0
        gctx->tls_aad_len = arg;
2750
0
        gctx->tls_enc_records = 0;
2751
0
        {
2752
0
            unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
2753
            /* Correct length for explicit IV */
2754
0
            if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2755
0
                return 0;
2756
0
            len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2757
            /* If decrypting correct for tag too */
2758
0
            if (!c->encrypt) {
2759
0
                if (len < EVP_GCM_TLS_TAG_LEN)
2760
0
                    return 0;
2761
0
                len -= EVP_GCM_TLS_TAG_LEN;
2762
0
            }
2763
0
            c->buf[arg - 2] = len >> 8;
2764
0
            c->buf[arg - 1] = len & 0xff;
2765
0
        }
2766
        /* Extra padding: tag appended to record */
2767
0
        return EVP_GCM_TLS_TAG_LEN;
2768
2769
0
    case EVP_CTRL_COPY:
2770
0
        {
2771
0
            EVP_CIPHER_CTX *out = ptr;
2772
0
            EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
2773
0
            if (gctx->gcm.key) {
2774
0
                if (gctx->gcm.key != &gctx->ks)
2775
0
                    return 0;
2776
0
                gctx_out->gcm.key = &gctx_out->ks;
2777
0
            }
2778
0
            if (gctx->iv == c->iv)
2779
0
                gctx_out->iv = out->iv;
2780
0
            else {
2781
0
                if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
2782
0
                    ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
2783
0
                    return 0;
2784
0
                }
2785
0
                memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2786
0
            }
2787
0
            return 1;
2788
0
        }
2789
2790
0
    default:
2791
0
        return -1;
2792
2793
0
    }
2794
0
}
2795
2796
static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2797
                            const unsigned char *iv, int enc)
2798
0
{
2799
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2800
2801
0
    if (iv == NULL && key == NULL)
2802
0
        return 1;
2803
2804
0
    if (key != NULL) {
2805
0
        const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
2806
2807
0
        if (keylen <= 0) {
2808
0
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
2809
0
            return 0;
2810
0
        }
2811
0
        do {
2812
#ifdef HWAES_CAPABLE
2813
            if (HWAES_CAPABLE) {
2814
                HWAES_set_encrypt_key(key, keylen, &gctx->ks.ks);
2815
                CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2816
                                   (block128_f) HWAES_encrypt);
2817
# ifdef HWAES_ctr32_encrypt_blocks
2818
                gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2819
# else
2820
                gctx->ctr = NULL;
2821
# endif
2822
                break;
2823
            } else
2824
#endif
2825
0
#ifdef BSAES_CAPABLE
2826
0
            if (BSAES_CAPABLE) {
2827
0
                AES_set_encrypt_key(key, keylen, &gctx->ks.ks);
2828
0
                CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2829
0
                                   (block128_f) AES_encrypt);
2830
0
                gctx->ctr = (ctr128_f) ossl_bsaes_ctr32_encrypt_blocks;
2831
0
                break;
2832
0
            } else
2833
0
#endif
2834
0
#ifdef VPAES_CAPABLE
2835
0
            if (VPAES_CAPABLE) {
2836
0
                vpaes_set_encrypt_key(key, keylen, &gctx->ks.ks);
2837
0
                CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2838
0
                                   (block128_f) vpaes_encrypt);
2839
0
                gctx->ctr = NULL;
2840
0
                break;
2841
0
            } else
2842
0
#endif
2843
0
                (void)0;        /* terminate potentially open 'else' */
2844
2845
0
            AES_set_encrypt_key(key, keylen, &gctx->ks.ks);
2846
0
            CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2847
0
                               (block128_f) AES_encrypt);
2848
#ifdef AES_CTR_ASM
2849
            gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
2850
#else
2851
0
            gctx->ctr = NULL;
2852
0
#endif
2853
0
        } while (0);
2854
2855
        /*
2856
         * If we have an iv can set it directly, otherwise use saved IV.
2857
         */
2858
0
        if (iv == NULL && gctx->iv_set)
2859
0
            iv = gctx->iv;
2860
0
        if (iv) {
2861
0
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2862
0
            gctx->iv_set = 1;
2863
0
        }
2864
0
        gctx->key_set = 1;
2865
0
    } else {
2866
        /* If key set use IV, otherwise copy */
2867
0
        if (gctx->key_set)
2868
0
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2869
0
        else
2870
0
            memcpy(gctx->iv, iv, gctx->ivlen);
2871
0
        gctx->iv_set = 1;
2872
0
        gctx->iv_gen = 0;
2873
0
    }
2874
0
    return 1;
2875
0
}
2876
2877
/*
2878
 * Handle TLS GCM packet format. This consists of the last portion of the IV
2879
 * followed by the payload and finally the tag. On encrypt generate IV,
2880
 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
2881
 * and verify tag.
2882
 */
2883
2884
static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2885
                              const unsigned char *in, size_t len)
2886
0
{
2887
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2888
0
    int rv = -1;
2889
    /* Encrypt/decrypt must be performed in place */
2890
0
    if (out != in
2891
0
        || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
2892
0
        return -1;
2893
2894
    /*
2895
     * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
2896
     * Requirements from SP 800-38D".  The requirements is for one party to the
2897
     * communication to fail after 2^64 - 1 keys.  We do this on the encrypting
2898
     * side only.
2899
     */
2900
0
    if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
2901
0
        ERR_raise(ERR_LIB_EVP, EVP_R_TOO_MANY_RECORDS);
2902
0
        goto err;
2903
0
    }
2904
2905
    /*
2906
     * Set IV from start of buffer or generate IV and write to start of
2907
     * buffer.
2908
     */
2909
0
    if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN
2910
0
                                              : EVP_CTRL_GCM_SET_IV_INV,
2911
0
                            EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
2912
0
        goto err;
2913
    /* Use saved AAD */
2914
0
    if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
2915
0
        goto err;
2916
    /* Fix buffer and length to point to payload */
2917
0
    in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2918
0
    out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2919
0
    len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2920
0
    if (ctx->encrypt) {
2921
        /* Encrypt payload */
2922
0
        if (gctx->ctr) {
2923
0
            size_t bulk = 0;
2924
0
#if defined(AES_GCM_ASM)
2925
0
            if (len >= 32 && AES_GCM_ASM(gctx)) {
2926
0
                if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2927
0
                    return -1;
2928
2929
0
                bulk = AES_gcm_encrypt(in, out, len,
2930
0
                                       gctx->gcm.key,
2931
0
                                       gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2932
0
                gctx->gcm.len.u[1] += bulk;
2933
0
            }
2934
0
#endif
2935
0
            if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2936
0
                                            in + bulk,
2937
0
                                            out + bulk,
2938
0
                                            len - bulk, gctx->ctr))
2939
0
                goto err;
2940
0
        } else {
2941
0
            size_t bulk = 0;
2942
#if defined(AES_GCM_ASM2)
2943
            if (len >= 32 && AES_GCM_ASM2(gctx)) {
2944
                if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2945
                    return -1;
2946
2947
                bulk = AES_gcm_encrypt(in, out, len,
2948
                                       gctx->gcm.key,
2949
                                       gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2950
                gctx->gcm.len.u[1] += bulk;
2951
            }
2952
#endif
2953
0
            if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2954
0
                                      in + bulk, out + bulk, len - bulk))
2955
0
                goto err;
2956
0
        }
2957
0
        out += len;
2958
        /* Finally write tag */
2959
0
        CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
2960
0
        rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2961
0
    } else {
2962
        /* Decrypt */
2963
0
        if (gctx->ctr) {
2964
0
            size_t bulk = 0;
2965
0
#if defined(AES_GCM_ASM)
2966
0
            if (len >= 16 && AES_GCM_ASM(gctx)) {
2967
0
                if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2968
0
                    return -1;
2969
2970
0
                bulk = AES_gcm_decrypt(in, out, len,
2971
0
                                       gctx->gcm.key,
2972
0
                                       gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2973
0
                gctx->gcm.len.u[1] += bulk;
2974
0
            }
2975
0
#endif
2976
0
            if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
2977
0
                                            in + bulk,
2978
0
                                            out + bulk,
2979
0
                                            len - bulk, gctx->ctr))
2980
0
                goto err;
2981
0
        } else {
2982
0
            size_t bulk = 0;
2983
#if defined(AES_GCM_ASM2)
2984
            if (len >= 16 && AES_GCM_ASM2(gctx)) {
2985
                if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2986
                    return -1;
2987
2988
                bulk = AES_gcm_decrypt(in, out, len,
2989
                                       gctx->gcm.key,
2990
                                       gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2991
                gctx->gcm.len.u[1] += bulk;
2992
            }
2993
#endif
2994
0
            if (CRYPTO_gcm128_decrypt(&gctx->gcm,
2995
0
                                      in + bulk, out + bulk, len - bulk))
2996
0
                goto err;
2997
0
        }
2998
        /* Retrieve tag */
2999
0
        CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
3000
        /* If tag mismatch wipe buffer */
3001
0
        if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
3002
0
            OPENSSL_cleanse(out, len);
3003
0
            goto err;
3004
0
        }
3005
0
        rv = len;
3006
0
    }
3007
3008
0
 err:
3009
0
    gctx->iv_set = 0;
3010
0
    gctx->tls_aad_len = -1;
3011
0
    return rv;
3012
0
}
3013
3014
#ifdef FIPS_MODULE
3015
/*
3016
 * See SP800-38D (GCM) Section 8 "Uniqueness requirement on IVS and keys"
3017
 *
3018
 * See also 8.2.2 RBG-based construction.
3019
 * Random construction consists of a free field (which can be NULL) and a
3020
 * random field which will use a DRBG that can return at least 96 bits of
3021
 * entropy strength. (The DRBG must be seeded by the FIPS module).
3022
 */
3023
static int aes_gcm_iv_generate(EVP_AES_GCM_CTX *gctx, int offset)
3024
{
3025
    int sz = gctx->ivlen - offset;
3026
3027
    /* Must be at least 96 bits */
3028
    if (sz <= 0 || gctx->ivlen < 12)
3029
        return 0;
3030
3031
    /* Use DRBG to generate random iv */
3032
    if (RAND_bytes(gctx->iv + offset, sz) <= 0)
3033
        return 0;
3034
    return 1;
3035
}
3036
#endif /* FIPS_MODULE */
3037
3038
static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3039
                          const unsigned char *in, size_t len)
3040
0
{
3041
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3042
3043
    /* If not set up, return error */
3044
0
    if (!gctx->key_set)
3045
0
        return -1;
3046
3047
0
    if (gctx->tls_aad_len >= 0)
3048
0
        return aes_gcm_tls_cipher(ctx, out, in, len);
3049
3050
#ifdef FIPS_MODULE
3051
    /*
3052
     * FIPS requires generation of AES-GCM IV's inside the FIPS module.
3053
     * The IV can still be set externally (the security policy will state that
3054
     * this is not FIPS compliant). There are some applications
3055
     * where setting the IV externally is the only option available.
3056
     */
3057
    if (!gctx->iv_set) {
3058
        if (!ctx->encrypt || !aes_gcm_iv_generate(gctx, 0))
3059
            return -1;
3060
        CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
3061
        gctx->iv_set = 1;
3062
        gctx->iv_gen_rand = 1;
3063
    }
3064
#else
3065
0
    if (!gctx->iv_set)
3066
0
        return -1;
3067
0
#endif /* FIPS_MODULE */
3068
3069
0
    if (in) {
3070
0
        if (out == NULL) {
3071
0
            if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
3072
0
                return -1;
3073
0
        } else if (ctx->encrypt) {
3074
0
            if (gctx->ctr) {
3075
0
                size_t bulk = 0;
3076
0
#if defined(AES_GCM_ASM)
3077
0
                if (len >= 32 && AES_GCM_ASM(gctx)) {
3078
0
                    size_t res = (16 - gctx->gcm.mres) % 16;
3079
3080
0
                    if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3081
0
                        return -1;
3082
3083
0
                    bulk = AES_gcm_encrypt(in + res,
3084
0
                                           out + res, len - res,
3085
0
                                           gctx->gcm.key, gctx->gcm.Yi.c,
3086
0
                                           gctx->gcm.Xi.u);
3087
0
                    gctx->gcm.len.u[1] += bulk;
3088
0
                    bulk += res;
3089
0
                }
3090
0
#endif
3091
0
                if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3092
0
                                                in + bulk,
3093
0
                                                out + bulk,
3094
0
                                                len - bulk, gctx->ctr))
3095
0
                    return -1;
3096
0
            } else {
3097
0
                size_t bulk = 0;
3098
#if defined(AES_GCM_ASM2)
3099
                if (len >= 32 && AES_GCM_ASM2(gctx)) {
3100
                    size_t res = (16 - gctx->gcm.mres) % 16;
3101
3102
                    if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3103
                        return -1;
3104
3105
                    bulk = AES_gcm_encrypt(in + res,
3106
                                           out + res, len - res,
3107
                                           gctx->gcm.key, gctx->gcm.Yi.c,
3108
                                           gctx->gcm.Xi.u);
3109
                    gctx->gcm.len.u[1] += bulk;
3110
                    bulk += res;
3111
                }
3112
#endif
3113
0
                if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3114
0
                                          in + bulk, out + bulk, len - bulk))
3115
0
                    return -1;
3116
0
            }
3117
0
        } else {
3118
0
            if (gctx->ctr) {
3119
0
                size_t bulk = 0;
3120
0
#if defined(AES_GCM_ASM)
3121
0
                if (len >= 16 && AES_GCM_ASM(gctx)) {
3122
0
                    size_t res = (16 - gctx->gcm.mres) % 16;
3123
3124
0
                    if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3125
0
                        return -1;
3126
3127
0
                    bulk = AES_gcm_decrypt(in + res,
3128
0
                                           out + res, len - res,
3129
0
                                           gctx->gcm.key,
3130
0
                                           gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3131
0
                    gctx->gcm.len.u[1] += bulk;
3132
0
                    bulk += res;
3133
0
                }
3134
0
#endif
3135
0
                if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3136
0
                                                in + bulk,
3137
0
                                                out + bulk,
3138
0
                                                len - bulk, gctx->ctr))
3139
0
                    return -1;
3140
0
            } else {
3141
0
                size_t bulk = 0;
3142
#if defined(AES_GCM_ASM2)
3143
                if (len >= 16 && AES_GCM_ASM2(gctx)) {
3144
                    size_t res = (16 - gctx->gcm.mres) % 16;
3145
3146
                    if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3147
                        return -1;
3148
3149
                    bulk = AES_gcm_decrypt(in + res,
3150
                                           out + res, len - res,
3151
                                           gctx->gcm.key,
3152
                                           gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3153
                    gctx->gcm.len.u[1] += bulk;
3154
                    bulk += res;
3155
                }
3156
#endif
3157
0
                if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3158
0
                                          in + bulk, out + bulk, len - bulk))
3159
0
                    return -1;
3160
0
            }
3161
0
        }
3162
0
        return len;
3163
0
    } else {
3164
0
        if (!ctx->encrypt) {
3165
0
            if (gctx->taglen < 0)
3166
0
                return -1;
3167
0
            if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
3168
0
                return -1;
3169
0
            gctx->iv_set = 0;
3170
0
            return 0;
3171
0
        }
3172
0
        CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
3173
0
        gctx->taglen = 16;
3174
        /* Don't reuse the IV */
3175
0
        gctx->iv_set = 0;
3176
0
        return 0;
3177
0
    }
3178
3179
0
}
3180
3181
#define CUSTOM_FLAGS    (EVP_CIPH_FLAG_DEFAULT_ASN1 \
3182
                | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3183
                | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3184
                | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_CUSTOM_IV_LENGTH)
3185
3186
BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3187
                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3188
BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3189
                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3190
BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3191
                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3192
3193
static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3194
0
{
3195
0
    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, c);
3196
3197
0
    if (type == EVP_CTRL_COPY) {
3198
0
        EVP_CIPHER_CTX *out = ptr;
3199
0
        EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
3200
3201
0
        if (xctx->xts.key1) {
3202
0
            if (xctx->xts.key1 != &xctx->ks1)
3203
0
                return 0;
3204
0
            xctx_out->xts.key1 = &xctx_out->ks1;
3205
0
        }
3206
0
        if (xctx->xts.key2) {
3207
0
            if (xctx->xts.key2 != &xctx->ks2)
3208
0
                return 0;
3209
0
            xctx_out->xts.key2 = &xctx_out->ks2;
3210
0
        }
3211
0
        return 1;
3212
0
    } else if (type != EVP_CTRL_INIT)
3213
0
        return -1;
3214
    /* key1 and key2 are used as an indicator both key and IV are set */
3215
0
    xctx->xts.key1 = NULL;
3216
0
    xctx->xts.key2 = NULL;
3217
0
    return 1;
3218
0
}
3219
3220
static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3221
                            const unsigned char *iv, int enc)
3222
0
{
3223
0
    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3224
3225
0
    if (iv == NULL && key == NULL)
3226
0
        return 1;
3227
3228
0
    if (key != NULL) {
3229
0
        do {
3230
            /* The key is two half length keys in reality */
3231
0
            const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
3232
0
            const int bytes = keylen / 2;
3233
0
            const int bits = bytes * 8;
3234
3235
0
            if (keylen <= 0) {
3236
0
                ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
3237
0
                return 0;
3238
0
            }
3239
            /*
3240
             * Verify that the two keys are different.
3241
             *
3242
             * This addresses the vulnerability described in Rogaway's
3243
             * September 2004 paper:
3244
             *
3245
             *      "Efficient Instantiations of Tweakable Blockciphers and
3246
             *       Refinements to Modes OCB and PMAC".
3247
             *      (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf)
3248
             *
3249
             * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states
3250
             * that:
3251
             *      "The check for Key_1 != Key_2 shall be done at any place
3252
             *       BEFORE using the keys in the XTS-AES algorithm to process
3253
             *       data with them."
3254
             */
3255
0
            if ((!allow_insecure_decrypt || enc)
3256
0
                    && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
3257
0
                ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
3258
0
                return 0;
3259
0
            }
3260
3261
#ifdef AES_XTS_ASM
3262
            xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
3263
#else
3264
0
            xctx->stream = NULL;
3265
0
#endif
3266
            /* key_len is two AES keys */
3267
#ifdef HWAES_CAPABLE
3268
            if (HWAES_CAPABLE) {
3269
                if (enc) {
3270
                    HWAES_set_encrypt_key(key, bits, &xctx->ks1.ks);
3271
                    xctx->xts.block1 = (block128_f) HWAES_encrypt;
3272
# ifdef HWAES_xts_encrypt
3273
                    xctx->stream = HWAES_xts_encrypt;
3274
# endif
3275
                } else {
3276
                    HWAES_set_decrypt_key(key, bits, &xctx->ks1.ks);
3277
                    xctx->xts.block1 = (block128_f) HWAES_decrypt;
3278
# ifdef HWAES_xts_decrypt
3279
                    xctx->stream = HWAES_xts_decrypt;
3280
#endif
3281
                }
3282
3283
                HWAES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3284
                xctx->xts.block2 = (block128_f) HWAES_encrypt;
3285
3286
                xctx->xts.key1 = &xctx->ks1;
3287
                break;
3288
            } else
3289
#endif
3290
0
#ifdef BSAES_CAPABLE
3291
0
            if (BSAES_CAPABLE)
3292
0
                xctx->stream = enc ? ossl_bsaes_xts_encrypt : ossl_bsaes_xts_decrypt;
3293
0
            else
3294
0
#endif
3295
0
#ifdef VPAES_CAPABLE
3296
0
            if (VPAES_CAPABLE) {
3297
0
                if (enc) {
3298
0
                    vpaes_set_encrypt_key(key, bits, &xctx->ks1.ks);
3299
0
                    xctx->xts.block1 = (block128_f) vpaes_encrypt;
3300
0
                } else {
3301
0
                    vpaes_set_decrypt_key(key, bits, &xctx->ks1.ks);
3302
0
                    xctx->xts.block1 = (block128_f) vpaes_decrypt;
3303
0
                }
3304
3305
0
                vpaes_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3306
0
                xctx->xts.block2 = (block128_f) vpaes_encrypt;
3307
3308
0
                xctx->xts.key1 = &xctx->ks1;
3309
0
                break;
3310
0
            } else
3311
0
#endif
3312
0
                (void)0;        /* terminate potentially open 'else' */
3313
3314
0
            if (enc) {
3315
0
                AES_set_encrypt_key(key, bits, &xctx->ks1.ks);
3316
0
                xctx->xts.block1 = (block128_f) AES_encrypt;
3317
0
            } else {
3318
0
                AES_set_decrypt_key(key, bits, &xctx->ks1.ks);
3319
0
                xctx->xts.block1 = (block128_f) AES_decrypt;
3320
0
            }
3321
3322
0
            AES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3323
0
            xctx->xts.block2 = (block128_f) AES_encrypt;
3324
3325
0
            xctx->xts.key1 = &xctx->ks1;
3326
0
        } while (0);
3327
0
    }
3328
3329
0
    if (iv) {
3330
0
        xctx->xts.key2 = &xctx->ks2;
3331
0
        memcpy(ctx->iv, iv, 16);
3332
0
    }
3333
3334
0
    return 1;
3335
0
}
3336
3337
static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3338
                          const unsigned char *in, size_t len)
3339
0
{
3340
0
    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3341
3342
0
    if (xctx->xts.key1 == NULL
3343
0
            || xctx->xts.key2 == NULL
3344
0
            || out == NULL
3345
0
            || in == NULL
3346
0
            || len < AES_BLOCK_SIZE)
3347
0
        return 0;
3348
3349
    /*
3350
     * Impose a limit of 2^20 blocks per data unit as specified by
3351
     * IEEE Std 1619-2018.  The earlier and obsolete IEEE Std 1619-2007
3352
     * indicated that this was a SHOULD NOT rather than a MUST NOT.
3353
     * NIST SP 800-38E mandates the same limit.
3354
     */
3355
0
    if (len > XTS_MAX_BLOCKS_PER_DATA_UNIT * AES_BLOCK_SIZE) {
3356
0
        ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DATA_UNIT_IS_TOO_LARGE);
3357
0
        return 0;
3358
0
    }
3359
3360
0
    if (xctx->stream)
3361
0
        (*xctx->stream) (in, out, len,
3362
0
                         xctx->xts.key1, xctx->xts.key2,
3363
0
                         ctx->iv);
3364
0
    else if (CRYPTO_xts128_encrypt(&xctx->xts, ctx->iv, in, out, len,
3365
0
                                   EVP_CIPHER_CTX_is_encrypting(ctx)))
3366
0
        return 0;
3367
0
    return 1;
3368
0
}
3369
3370
#define aes_xts_cleanup NULL
3371
3372
#define XTS_FLAGS       (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
3373
                         | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3374
                         | EVP_CIPH_CUSTOM_COPY)
3375
3376
BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3377
BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
3378
3379
static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3380
0
{
3381
0
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
3382
0
    switch (type) {
3383
0
    case EVP_CTRL_INIT:
3384
0
        cctx->key_set = 0;
3385
0
        cctx->iv_set = 0;
3386
0
        cctx->L = 8;
3387
0
        cctx->M = 12;
3388
0
        cctx->tag_set = 0;
3389
0
        cctx->len_set = 0;
3390
0
        cctx->tls_aad_len = -1;
3391
0
        return 1;
3392
3393
0
    case EVP_CTRL_GET_IVLEN:
3394
0
        *(int *)ptr = 15 - cctx->L;
3395
0
        return 1;
3396
3397
0
    case EVP_CTRL_AEAD_TLS1_AAD:
3398
        /* Save the AAD for later use */
3399
0
        if (arg != EVP_AEAD_TLS1_AAD_LEN)
3400
0
            return 0;
3401
0
        memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3402
0
        cctx->tls_aad_len = arg;
3403
0
        {
3404
0
            uint16_t len =
3405
0
                EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3406
0
                | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
3407
            /* Correct length for explicit IV */
3408
0
            if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3409
0
                return 0;
3410
0
            len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3411
            /* If decrypting correct for tag too */
3412
0
            if (!EVP_CIPHER_CTX_is_encrypting(c)) {
3413
0
                if (len < cctx->M)
3414
0
                    return 0;
3415
0
                len -= cctx->M;
3416
0
            }
3417
0
            EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3418
0
            EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
3419
0
        }
3420
        /* Extra padding: tag appended to record */
3421
0
        return cctx->M;
3422
3423
0
    case EVP_CTRL_CCM_SET_IV_FIXED:
3424
        /* Sanity check length */
3425
0
        if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3426
0
            return 0;
3427
        /* Just copy to first part of IV */
3428
0
        memcpy(c->iv, ptr, arg);
3429
0
        return 1;
3430
3431
0
    case EVP_CTRL_AEAD_SET_IVLEN:
3432
0
        arg = 15 - arg;
3433
        /* fall through */
3434
0
    case EVP_CTRL_CCM_SET_L:
3435
0
        if (arg < 2 || arg > 8)
3436
0
            return 0;
3437
0
        cctx->L = arg;
3438
0
        return 1;
3439
3440
0
    case EVP_CTRL_AEAD_SET_TAG:
3441
0
        if ((arg & 1) || arg < 4 || arg > 16)
3442
0
            return 0;
3443
0
        if (EVP_CIPHER_CTX_is_encrypting(c) && ptr)
3444
0
            return 0;
3445
0
        if (ptr) {
3446
0
            cctx->tag_set = 1;
3447
0
            memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3448
0
        }
3449
0
        cctx->M = arg;
3450
0
        return 1;
3451
3452
0
    case EVP_CTRL_AEAD_GET_TAG:
3453
0
        if (!EVP_CIPHER_CTX_is_encrypting(c) || !cctx->tag_set)
3454
0
            return 0;
3455
0
        if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3456
0
            return 0;
3457
0
        cctx->tag_set = 0;
3458
0
        cctx->iv_set = 0;
3459
0
        cctx->len_set = 0;
3460
0
        return 1;
3461
3462
0
    case EVP_CTRL_COPY:
3463
0
        {
3464
0
            EVP_CIPHER_CTX *out = ptr;
3465
0
            EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
3466
0
            if (cctx->ccm.key) {
3467
0
                if (cctx->ccm.key != &cctx->ks)
3468
0
                    return 0;
3469
0
                cctx_out->ccm.key = &cctx_out->ks;
3470
0
            }
3471
0
            return 1;
3472
0
        }
3473
3474
0
    default:
3475
0
        return -1;
3476
3477
0
    }
3478
0
}
3479
3480
static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3481
                            const unsigned char *iv, int enc)
3482
0
{
3483
0
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3484
3485
0
    if (iv == NULL && key == NULL)
3486
0
        return 1;
3487
3488
0
    if (key != NULL) {
3489
0
        const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
3490
3491
0
        if (keylen <= 0) {
3492
0
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
3493
0
            return 0;
3494
0
        }
3495
0
        do {
3496
#ifdef HWAES_CAPABLE
3497
            if (HWAES_CAPABLE) {
3498
                HWAES_set_encrypt_key(key, keylen, &cctx->ks.ks);
3499
3500
                CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3501
                                   &cctx->ks, (block128_f) HWAES_encrypt);
3502
                cctx->str = NULL;
3503
                cctx->key_set = 1;
3504
                break;
3505
            } else
3506
#endif
3507
0
#ifdef VPAES_CAPABLE
3508
0
            if (VPAES_CAPABLE) {
3509
0
                vpaes_set_encrypt_key(key, keylen, &cctx->ks.ks);
3510
0
                CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3511
0
                                   &cctx->ks, (block128_f) vpaes_encrypt);
3512
0
                cctx->str = NULL;
3513
0
                cctx->key_set = 1;
3514
0
                break;
3515
0
            }
3516
0
#endif
3517
0
            AES_set_encrypt_key(key, keylen, &cctx->ks.ks);
3518
0
            CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3519
0
                               &cctx->ks, (block128_f) AES_encrypt);
3520
0
            cctx->str = NULL;
3521
0
            cctx->key_set = 1;
3522
0
        } while (0);
3523
0
    }
3524
0
    if (iv != NULL) {
3525
0
        memcpy(ctx->iv, iv, 15 - cctx->L);
3526
0
        cctx->iv_set = 1;
3527
0
    }
3528
0
    return 1;
3529
0
}
3530
3531
static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3532
                              const unsigned char *in, size_t len)
3533
0
{
3534
0
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3535
0
    CCM128_CONTEXT *ccm = &cctx->ccm;
3536
    /* Encrypt/decrypt must be performed in place */
3537
0
    if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3538
0
        return -1;
3539
    /* If encrypting set explicit IV from sequence number (start of AAD) */
3540
0
    if (EVP_CIPHER_CTX_is_encrypting(ctx))
3541
0
        memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3542
0
               EVP_CCM_TLS_EXPLICIT_IV_LEN);
3543
    /* Get rest of IV from explicit IV */
3544
0
    memcpy(ctx->iv + EVP_CCM_TLS_FIXED_IV_LEN, in,
3545
0
           EVP_CCM_TLS_EXPLICIT_IV_LEN);
3546
    /* Correct length value */
3547
0
    len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3548
0
    if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L,
3549
0
                            len))
3550
0
            return -1;
3551
    /* Use saved AAD */
3552
0
    CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx),
3553
0
                      cctx->tls_aad_len);
3554
    /* Fix buffer to point to payload */
3555
0
    in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3556
0
    out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3557
0
    if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3558
0
        if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3559
0
                                                    cctx->str) :
3560
0
            CRYPTO_ccm128_encrypt(ccm, in, out, len))
3561
0
            return -1;
3562
0
        if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3563
0
            return -1;
3564
0
        return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3565
0
    } else {
3566
0
        if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3567
0
                                                     cctx->str) :
3568
0
            !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3569
0
            unsigned char tag[16];
3570
0
            if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3571
0
                if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3572
0
                    return len;
3573
0
            }
3574
0
        }
3575
0
        OPENSSL_cleanse(out, len);
3576
0
        return -1;
3577
0
    }
3578
0
}
3579
3580
static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3581
                          const unsigned char *in, size_t len)
3582
0
{
3583
0
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3584
0
    CCM128_CONTEXT *ccm = &cctx->ccm;
3585
    /* If not set up, return error */
3586
0
    if (!cctx->key_set)
3587
0
        return -1;
3588
3589
0
    if (cctx->tls_aad_len >= 0)
3590
0
        return aes_ccm_tls_cipher(ctx, out, in, len);
3591
3592
    /* EVP_*Final() doesn't return any data */
3593
0
    if (in == NULL && out != NULL)
3594
0
        return 0;
3595
3596
0
    if (!cctx->iv_set)
3597
0
        return -1;
3598
3599
0
    if (!out) {
3600
0
        if (!in) {
3601
0
            if (CRYPTO_ccm128_setiv(ccm, ctx->iv,
3602
0
                                    15 - cctx->L, len))
3603
0
                return -1;
3604
0
            cctx->len_set = 1;
3605
0
            return len;
3606
0
        }
3607
        /* If have AAD need message length */
3608
0
        if (!cctx->len_set && len)
3609
0
            return -1;
3610
0
        CRYPTO_ccm128_aad(ccm, in, len);
3611
0
        return len;
3612
0
    }
3613
3614
    /* The tag must be set before actually decrypting data */
3615
0
    if (!EVP_CIPHER_CTX_is_encrypting(ctx) && !cctx->tag_set)
3616
0
        return -1;
3617
3618
    /* If not set length yet do it */
3619
0
    if (!cctx->len_set) {
3620
0
        if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
3621
0
            return -1;
3622
0
        cctx->len_set = 1;
3623
0
    }
3624
0
    if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3625
0
        if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3626
0
                                                    cctx->str) :
3627
0
            CRYPTO_ccm128_encrypt(ccm, in, out, len))
3628
0
            return -1;
3629
0
        cctx->tag_set = 1;
3630
0
        return len;
3631
0
    } else {
3632
0
        int rv = -1;
3633
0
        if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3634
0
                                                     cctx->str) :
3635
0
            !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3636
0
            unsigned char tag[16];
3637
0
            if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3638
0
                if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3639
0
                                   cctx->M))
3640
0
                    rv = len;
3641
0
            }
3642
0
        }
3643
0
        if (rv == -1)
3644
0
            OPENSSL_cleanse(out, len);
3645
0
        cctx->iv_set = 0;
3646
0
        cctx->tag_set = 0;
3647
0
        cctx->len_set = 0;
3648
0
        return rv;
3649
0
    }
3650
0
}
3651
3652
#define aes_ccm_cleanup NULL
3653
3654
BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3655
                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3656
BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3657
                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3658
BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3659
                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3660
3661
typedef struct {
3662
    union {
3663
        OSSL_UNION_ALIGN;
3664
        AES_KEY ks;
3665
    } ks;
3666
    /* Indicates if IV has been set */
3667
    unsigned char *iv;
3668
} EVP_AES_WRAP_CTX;
3669
3670
static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3671
                             const unsigned char *iv, int enc)
3672
0
{
3673
0
    int len;
3674
0
    EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3675
3676
0
    if (iv == NULL && key == NULL)
3677
0
        return 1;
3678
0
    if (key != NULL) {
3679
0
        const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
3680
3681
0
        if (keylen <= 0) {
3682
0
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
3683
0
            return 0;
3684
0
        }
3685
0
        if (EVP_CIPHER_CTX_is_encrypting(ctx))
3686
0
            AES_set_encrypt_key(key, keylen, &wctx->ks.ks);
3687
0
        else
3688
0
            AES_set_decrypt_key(key, keylen, &wctx->ks.ks);
3689
0
        if (iv == NULL)
3690
0
            wctx->iv = NULL;
3691
0
    }
3692
0
    if (iv != NULL) {
3693
0
        if ((len = EVP_CIPHER_CTX_get_iv_length(ctx)) < 0)
3694
0
            return 0;
3695
0
        memcpy(ctx->iv, iv, len);
3696
0
        wctx->iv = ctx->iv;
3697
0
    }
3698
0
    return 1;
3699
0
}
3700
3701
static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3702
                           const unsigned char *in, size_t inlen)
3703
0
{
3704
0
    EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3705
0
    size_t rv;
3706
    /* AES wrap with padding has IV length of 4, without padding 8 */
3707
0
    int pad = EVP_CIPHER_CTX_get_iv_length(ctx) == 4;
3708
    /* No final operation so always return zero length */
3709
0
    if (!in)
3710
0
        return 0;
3711
    /* Input length must always be non-zero */
3712
0
    if (!inlen)
3713
0
        return -1;
3714
    /* If decrypting need at least 16 bytes and multiple of 8 */
3715
0
    if (!EVP_CIPHER_CTX_is_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
3716
0
        return -1;
3717
    /* If not padding input must be multiple of 8 */
3718
0
    if (!pad && inlen & 0x7)
3719
0
        return -1;
3720
0
    if (ossl_is_partially_overlapping(out, in, inlen)) {
3721
0
        ERR_raise(ERR_LIB_EVP, EVP_R_PARTIALLY_OVERLAPPING);
3722
0
        return 0;
3723
0
    }
3724
0
    if (!out) {
3725
0
        if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3726
            /* If padding round up to multiple of 8 */
3727
0
            if (pad)
3728
0
                inlen = (inlen + 7) / 8 * 8;
3729
            /* 8 byte prefix */
3730
0
            return inlen + 8;
3731
0
        } else {
3732
            /*
3733
             * If not padding output will be exactly 8 bytes smaller than
3734
             * input. If padding it will be at least 8 bytes smaller but we
3735
             * don't know how much.
3736
             */
3737
0
            return inlen - 8;
3738
0
        }
3739
0
    }
3740
0
    if (pad) {
3741
0
        if (EVP_CIPHER_CTX_is_encrypting(ctx))
3742
0
            rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3743
0
                                     out, in, inlen,
3744
0
                                     (block128_f) AES_encrypt);
3745
0
        else
3746
0
            rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3747
0
                                       out, in, inlen,
3748
0
                                       (block128_f) AES_decrypt);
3749
0
    } else {
3750
0
        if (EVP_CIPHER_CTX_is_encrypting(ctx))
3751
0
            rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3752
0
                                 out, in, inlen, (block128_f) AES_encrypt);
3753
0
        else
3754
0
            rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3755
0
                                   out, in, inlen, (block128_f) AES_decrypt);
3756
0
    }
3757
0
    return rv ? (int)rv : -1;
3758
0
}
3759
3760
#define WRAP_FLAGS      (EVP_CIPH_WRAP_MODE \
3761
                | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3762
                | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
3763
3764
static const EVP_CIPHER aes_128_wrap = {
3765
    NID_id_aes128_wrap,
3766
    8, 16, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3767
    aes_wrap_init_key, aes_wrap_cipher,
3768
    NULL,
3769
    sizeof(EVP_AES_WRAP_CTX),
3770
    NULL, NULL, NULL, NULL
3771
};
3772
3773
const EVP_CIPHER *EVP_aes_128_wrap(void)
3774
71
{
3775
71
    return &aes_128_wrap;
3776
71
}
3777
3778
static const EVP_CIPHER aes_192_wrap = {
3779
    NID_id_aes192_wrap,
3780
    8, 24, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3781
    aes_wrap_init_key, aes_wrap_cipher,
3782
    NULL,
3783
    sizeof(EVP_AES_WRAP_CTX),
3784
    NULL, NULL, NULL, NULL
3785
};
3786
3787
const EVP_CIPHER *EVP_aes_192_wrap(void)
3788
71
{
3789
71
    return &aes_192_wrap;
3790
71
}
3791
3792
static const EVP_CIPHER aes_256_wrap = {
3793
    NID_id_aes256_wrap,
3794
    8, 32, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3795
    aes_wrap_init_key, aes_wrap_cipher,
3796
    NULL,
3797
    sizeof(EVP_AES_WRAP_CTX),
3798
    NULL, NULL, NULL, NULL
3799
};
3800
3801
const EVP_CIPHER *EVP_aes_256_wrap(void)
3802
71
{
3803
71
    return &aes_256_wrap;
3804
71
}
3805
3806
static const EVP_CIPHER aes_128_wrap_pad = {
3807
    NID_id_aes128_wrap_pad,
3808
    8, 16, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3809
    aes_wrap_init_key, aes_wrap_cipher,
3810
    NULL,
3811
    sizeof(EVP_AES_WRAP_CTX),
3812
    NULL, NULL, NULL, NULL
3813
};
3814
3815
const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
3816
71
{
3817
71
    return &aes_128_wrap_pad;
3818
71
}
3819
3820
static const EVP_CIPHER aes_192_wrap_pad = {
3821
    NID_id_aes192_wrap_pad,
3822
    8, 24, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3823
    aes_wrap_init_key, aes_wrap_cipher,
3824
    NULL,
3825
    sizeof(EVP_AES_WRAP_CTX),
3826
    NULL, NULL, NULL, NULL
3827
};
3828
3829
const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
3830
71
{
3831
71
    return &aes_192_wrap_pad;
3832
71
}
3833
3834
static const EVP_CIPHER aes_256_wrap_pad = {
3835
    NID_id_aes256_wrap_pad,
3836
    8, 32, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3837
    aes_wrap_init_key, aes_wrap_cipher,
3838
    NULL,
3839
    sizeof(EVP_AES_WRAP_CTX),
3840
    NULL, NULL, NULL, NULL
3841
};
3842
3843
const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
3844
71
{
3845
71
    return &aes_256_wrap_pad;
3846
71
}
3847
3848
#ifndef OPENSSL_NO_OCB
3849
static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3850
0
{
3851
0
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
3852
0
    EVP_CIPHER_CTX *newc;
3853
0
    EVP_AES_OCB_CTX *new_octx;
3854
3855
0
    switch (type) {
3856
0
    case EVP_CTRL_INIT:
3857
0
        octx->key_set = 0;
3858
0
        octx->iv_set = 0;
3859
0
        octx->ivlen = EVP_CIPHER_get_iv_length(c->cipher);
3860
0
        octx->iv = c->iv;
3861
0
        octx->taglen = 16;
3862
0
        octx->data_buf_len = 0;
3863
0
        octx->aad_buf_len = 0;
3864
0
        return 1;
3865
3866
0
    case EVP_CTRL_GET_IVLEN:
3867
0
        *(int *)ptr = octx->ivlen;
3868
0
        return 1;
3869
3870
0
    case EVP_CTRL_AEAD_SET_IVLEN:
3871
        /* IV len must be 1 to 15 */
3872
0
        if (arg <= 0 || arg > 15)
3873
0
            return 0;
3874
3875
0
        octx->ivlen = arg;
3876
0
        return 1;
3877
3878
0
    case EVP_CTRL_AEAD_SET_TAG:
3879
0
        if (ptr == NULL) {
3880
            /* Tag len must be 0 to 16 */
3881
0
            if (arg < 0 || arg > 16)
3882
0
                return 0;
3883
3884
0
            octx->taglen = arg;
3885
0
            return 1;
3886
0
        }
3887
0
        if (arg != octx->taglen || EVP_CIPHER_CTX_is_encrypting(c))
3888
0
            return 0;
3889
0
        memcpy(octx->tag, ptr, arg);
3890
0
        return 1;
3891
3892
0
    case EVP_CTRL_AEAD_GET_TAG:
3893
0
        if (arg != octx->taglen || !EVP_CIPHER_CTX_is_encrypting(c))
3894
0
            return 0;
3895
3896
0
        memcpy(ptr, octx->tag, arg);
3897
0
        return 1;
3898
3899
0
    case EVP_CTRL_COPY:
3900
0
        newc = (EVP_CIPHER_CTX *)ptr;
3901
0
        new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
3902
0
        return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
3903
0
                                      &new_octx->ksenc.ks,
3904
0
                                      &new_octx->ksdec.ks);
3905
3906
0
    default:
3907
0
        return -1;
3908
3909
0
    }
3910
0
}
3911
3912
static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3913
                            const unsigned char *iv, int enc)
3914
0
{
3915
0
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
3916
3917
0
    if (iv == NULL && key == NULL)
3918
0
        return 1;
3919
3920
0
    if (key != NULL) {
3921
0
       const int keylen = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
3922
3923
0
        if (keylen <= 0) {
3924
0
            ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH);
3925
0
            return 0;
3926
0
        }
3927
0
        do {
3928
            /*
3929
             * We set both the encrypt and decrypt key here because decrypt
3930
             * needs both. We could possibly optimise to remove setting the
3931
             * decrypt for an encryption operation.
3932
             */
3933
# ifdef HWAES_CAPABLE
3934
            if (HWAES_CAPABLE) {
3935
                HWAES_set_encrypt_key(key, keylen, &octx->ksenc.ks);
3936
                HWAES_set_decrypt_key(key, keylen, &octx->ksdec.ks);
3937
                if (!CRYPTO_ocb128_init(&octx->ocb,
3938
                                        &octx->ksenc.ks, &octx->ksdec.ks,
3939
                                        (block128_f) HWAES_encrypt,
3940
                                        (block128_f) HWAES_decrypt,
3941
                                        enc ? HWAES_ocb_encrypt
3942
                                            : HWAES_ocb_decrypt))
3943
                    return 0;
3944
                break;
3945
            }
3946
# endif
3947
0
# ifdef VPAES_CAPABLE
3948
0
            if (VPAES_CAPABLE) {
3949
0
                vpaes_set_encrypt_key(key, keylen, &octx->ksenc.ks);
3950
0
                vpaes_set_decrypt_key(key, keylen, &octx->ksdec.ks);
3951
0
                if (!CRYPTO_ocb128_init(&octx->ocb,
3952
0
                                        &octx->ksenc.ks, &octx->ksdec.ks,
3953
0
                                        (block128_f) vpaes_encrypt,
3954
0
                                        (block128_f) vpaes_decrypt,
3955
0
                                        NULL))
3956
0
                    return 0;
3957
0
                break;
3958
0
            }
3959
0
# endif
3960
0
            AES_set_encrypt_key(key, keylen, &octx->ksenc.ks);
3961
0
            AES_set_decrypt_key(key, keylen, &octx->ksdec.ks);
3962
0
            if (!CRYPTO_ocb128_init(&octx->ocb,
3963
0
                                    &octx->ksenc.ks, &octx->ksdec.ks,
3964
0
                                    (block128_f) AES_encrypt,
3965
0
                                    (block128_f) AES_decrypt,
3966
0
                                    NULL))
3967
0
                return 0;
3968
0
        }
3969
0
        while (0);
3970
3971
        /*
3972
         * If we have an iv we can set it directly, otherwise use saved IV.
3973
         */
3974
0
        if (iv == NULL && octx->iv_set)
3975
0
            iv = octx->iv;
3976
0
        if (iv) {
3977
0
            if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
3978
0
                != 1)
3979
0
                return 0;
3980
0
            octx->iv_set = 1;
3981
0
        }
3982
0
        octx->key_set = 1;
3983
0
    } else {
3984
        /* If key set use IV, otherwise copy */
3985
0
        if (octx->key_set)
3986
0
            CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
3987
0
        else
3988
0
            memcpy(octx->iv, iv, octx->ivlen);
3989
0
        octx->iv_set = 1;
3990
0
    }
3991
0
    return 1;
3992
0
}
3993
3994
static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3995
                          const unsigned char *in, size_t len)
3996
0
{
3997
0
    unsigned char *buf;
3998
0
    int *buf_len;
3999
0
    int written_len = 0;
4000
0
    size_t trailing_len;
4001
0
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
4002
4003
    /* If IV or Key not set then return error */
4004
0
    if (!octx->iv_set)
4005
0
        return -1;
4006
4007
0
    if (!octx->key_set)
4008
0
        return -1;
4009
4010
0
    if (in != NULL) {
4011
        /*
4012
         * Need to ensure we are only passing full blocks to low-level OCB
4013
         * routines. We do it here rather than in EVP_EncryptUpdate/
4014
         * EVP_DecryptUpdate because we need to pass full blocks of AAD too
4015
         * and those routines don't support that
4016
         */
4017
4018
        /* Are we dealing with AAD or normal data here? */
4019
0
        if (out == NULL) {
4020
0
            buf = octx->aad_buf;
4021
0
            buf_len = &(octx->aad_buf_len);
4022
0
        } else {
4023
0
            buf = octx->data_buf;
4024
0
            buf_len = &(octx->data_buf_len);
4025
4026
0
            if (ossl_is_partially_overlapping(out + *buf_len, in, len)) {
4027
0
                ERR_raise(ERR_LIB_EVP, EVP_R_PARTIALLY_OVERLAPPING);
4028
0
                return 0;
4029
0
            }
4030
0
        }
4031
4032
        /*
4033
         * If we've got a partially filled buffer from a previous call then
4034
         * use that data first
4035
         */
4036
0
        if (*buf_len > 0) {
4037
0
            unsigned int remaining;
4038
4039
0
            remaining = AES_BLOCK_SIZE - (*buf_len);
4040
0
            if (remaining > len) {
4041
0
                memcpy(buf + (*buf_len), in, len);
4042
0
                *(buf_len) += len;
4043
0
                return 0;
4044
0
            }
4045
0
            memcpy(buf + (*buf_len), in, remaining);
4046
4047
            /*
4048
             * If we get here we've filled the buffer, so process it
4049
             */
4050
0
            len -= remaining;
4051
0
            in += remaining;
4052
0
            if (out == NULL) {
4053
0
                if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
4054
0
                    return -1;
4055
0
            } else if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
4056
0
                if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
4057
0
                                           AES_BLOCK_SIZE))
4058
0
                    return -1;
4059
0
            } else {
4060
0
                if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
4061
0
                                           AES_BLOCK_SIZE))
4062
0
                    return -1;
4063
0
            }
4064
0
            written_len = AES_BLOCK_SIZE;
4065
0
            *buf_len = 0;
4066
0
            if (out != NULL)
4067
0
                out += AES_BLOCK_SIZE;
4068
0
        }
4069
4070
        /* Do we have a partial block to handle at the end? */
4071
0
        trailing_len = len % AES_BLOCK_SIZE;
4072
4073
        /*
4074
         * If we've got some full blocks to handle, then process these first
4075
         */
4076
0
        if (len != trailing_len) {
4077
0
            if (out == NULL) {
4078
0
                if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
4079
0
                    return -1;
4080
0
            } else if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
4081
0
                if (!CRYPTO_ocb128_encrypt
4082
0
                    (&octx->ocb, in, out, len - trailing_len))
4083
0
                    return -1;
4084
0
            } else {
4085
0
                if (!CRYPTO_ocb128_decrypt
4086
0
                    (&octx->ocb, in, out, len - trailing_len))
4087
0
                    return -1;
4088
0
            }
4089
0
            written_len += len - trailing_len;
4090
0
            in += len - trailing_len;
4091
0
        }
4092
4093
        /* Handle any trailing partial block */
4094
0
        if (trailing_len > 0) {
4095
0
            memcpy(buf, in, trailing_len);
4096
0
            *buf_len = trailing_len;
4097
0
        }
4098
4099
0
        return written_len;
4100
0
    } else {
4101
        /*
4102
         * First of all empty the buffer of any partial block that we might
4103
         * have been provided - both for data and AAD
4104
         */
4105
0
        if (octx->data_buf_len > 0) {
4106
0
            if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
4107
0
                if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
4108
0
                                           octx->data_buf_len))
4109
0
                    return -1;
4110
0
            } else {
4111
0
                if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
4112
0
                                           octx->data_buf_len))
4113
0
                    return -1;
4114
0
            }
4115
0
            written_len = octx->data_buf_len;
4116
0
            octx->data_buf_len = 0;
4117
0
        }
4118
0
        if (octx->aad_buf_len > 0) {
4119
0
            if (!CRYPTO_ocb128_aad
4120
0
                (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
4121
0
                return -1;
4122
0
            octx->aad_buf_len = 0;
4123
0
        }
4124
        /* If decrypting then verify */
4125
0
        if (!EVP_CIPHER_CTX_is_encrypting(ctx)) {
4126
0
            if (octx->taglen < 0)
4127
0
                return -1;
4128
0
            if (CRYPTO_ocb128_finish(&octx->ocb,
4129
0
                                     octx->tag, octx->taglen) != 0)
4130
0
                return -1;
4131
0
            octx->iv_set = 0;
4132
0
            return written_len;
4133
0
        }
4134
        /* If encrypting then just get the tag */
4135
0
        if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
4136
0
            return -1;
4137
        /* Don't reuse the IV */
4138
0
        octx->iv_set = 0;
4139
0
        return written_len;
4140
0
    }
4141
0
}
4142
4143
static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
4144
0
{
4145
0
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
4146
0
    CRYPTO_ocb128_cleanup(&octx->ocb);
4147
0
    return 1;
4148
0
}
4149
4150
BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
4151
                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4152
BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4153
                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4154
BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4155
                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4156
#endif                         /* OPENSSL_NO_OCB */