Coverage Report

Created: 2023-06-08 06:40

/src/openssl30/crypto/evp/e_aes.c
Line
Count
Source (jump to first uncovered line)
1
/*
2
 * Copyright 2001-2021 The OpenSSL Project Authors. All Rights Reserved.
3
 *
4
 * Licensed under the Apache License 2.0 (the "License").  You may not use
5
 * this file except in compliance with the License.  You can obtain a copy
6
 * in the file LICENSE in the source distribution or at
7
 * https://www.openssl.org/source/license.html
8
 */
9
10
/*
11
 * This file uses the low level AES functions (which are deprecated for
12
 * non-internal use) in order to implement the EVP AES ciphers.
13
 */
14
#include "internal/deprecated.h"
15
16
#include <string.h>
17
#include <assert.h>
18
#include <openssl/opensslconf.h>
19
#include <openssl/crypto.h>
20
#include <openssl/evp.h>
21
#include <openssl/err.h>
22
#include <openssl/aes.h>
23
#include <openssl/rand.h>
24
#include <openssl/cmac.h>
25
#include "crypto/evp.h"
26
#include "internal/cryptlib.h"
27
#include "crypto/modes.h"
28
#include "crypto/siv.h"
29
#include "crypto/aes_platform.h"
30
#include "evp_local.h"
31
32
typedef struct {
33
    union {
34
        OSSL_UNION_ALIGN;
35
        AES_KEY ks;
36
    } ks;
37
    block128_f block;
38
    union {
39
        cbc128_f cbc;
40
        ctr128_f ctr;
41
    } stream;
42
} EVP_AES_KEY;
43
44
typedef struct {
45
    union {
46
        OSSL_UNION_ALIGN;
47
        AES_KEY ks;
48
    } ks;                       /* AES key schedule to use */
49
    int key_set;                /* Set if key initialised */
50
    int iv_set;                 /* Set if an iv is set */
51
    GCM128_CONTEXT gcm;
52
    unsigned char *iv;          /* Temporary IV store */
53
    int ivlen;                  /* IV length */
54
    int taglen;
55
    int iv_gen;                 /* It is OK to generate IVs */
56
    int iv_gen_rand;            /* No IV was specified, so generate a rand IV */
57
    int tls_aad_len;            /* TLS AAD length */
58
    uint64_t tls_enc_records;   /* Number of TLS records encrypted */
59
    ctr128_f ctr;
60
} EVP_AES_GCM_CTX;
61
62
typedef struct {
63
    union {
64
        OSSL_UNION_ALIGN;
65
        AES_KEY ks;
66
    } ks1, ks2;                 /* AES key schedules to use */
67
    XTS128_CONTEXT xts;
68
    void (*stream) (const unsigned char *in,
69
                    unsigned char *out, size_t length,
70
                    const AES_KEY *key1, const AES_KEY *key2,
71
                    const unsigned char iv[16]);
72
} EVP_AES_XTS_CTX;
73
74
#ifdef FIPS_MODULE
75
static const int allow_insecure_decrypt = 0;
76
#else
77
static const int allow_insecure_decrypt = 1;
78
#endif
79
80
typedef struct {
81
    union {
82
        OSSL_UNION_ALIGN;
83
        AES_KEY ks;
84
    } ks;                       /* AES key schedule to use */
85
    int key_set;                /* Set if key initialised */
86
    int iv_set;                 /* Set if an iv is set */
87
    int tag_set;                /* Set if tag is valid */
88
    int len_set;                /* Set if message length set */
89
    int L, M;                   /* L and M parameters from RFC3610 */
90
    int tls_aad_len;            /* TLS AAD length */
91
    CCM128_CONTEXT ccm;
92
    ccm128_f str;
93
} EVP_AES_CCM_CTX;
94
95
#ifndef OPENSSL_NO_OCB
96
typedef struct {
97
    union {
98
        OSSL_UNION_ALIGN;
99
        AES_KEY ks;
100
    } ksenc;                    /* AES key schedule to use for encryption */
101
    union {
102
        OSSL_UNION_ALIGN;
103
        AES_KEY ks;
104
    } ksdec;                    /* AES key schedule to use for decryption */
105
    int key_set;                /* Set if key initialised */
106
    int iv_set;                 /* Set if an iv is set */
107
    OCB128_CONTEXT ocb;
108
    unsigned char *iv;          /* Temporary IV store */
109
    unsigned char tag[16];
110
    unsigned char data_buf[16]; /* Store partial data blocks */
111
    unsigned char aad_buf[16];  /* Store partial AAD blocks */
112
    int data_buf_len;
113
    int aad_buf_len;
114
    int ivlen;                  /* IV length */
115
    int taglen;
116
} EVP_AES_OCB_CTX;
117
#endif
118
119
0
#define MAXBITCHUNK     ((size_t)1<<(sizeof(size_t)*8-4))
120
121
/* increment counter (64-bit int) by 1 */
122
static void ctr64_inc(unsigned char *counter)
123
0
{
124
0
    int n = 8;
125
0
    unsigned char c;
126
127
0
    do {
128
0
        --n;
129
0
        c = counter[n];
130
0
        ++c;
131
0
        counter[n] = c;
132
0
        if (c)
133
0
            return;
134
0
    } while (n);
135
0
}
136
137
#if defined(AESNI_CAPABLE)
138
# if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
139
#  define AES_GCM_ASM2(gctx)      (gctx->gcm.block==(block128_f)aesni_encrypt && \
140
                                 gctx->gcm.ghash==gcm_ghash_avx)
141
#  undef AES_GCM_ASM2          /* minor size optimization */
142
# endif
143
144
static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
145
                          const unsigned char *iv, int enc)
146
0
{
147
0
    int ret, mode;
148
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
149
150
0
    mode = EVP_CIPHER_CTX_get_mode(ctx);
151
0
    if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
152
0
        && !enc) {
153
0
        ret = aesni_set_decrypt_key(key,
154
0
                                    EVP_CIPHER_CTX_get_key_length(ctx) * 8,
155
0
                                    &dat->ks.ks);
156
0
        dat->block = (block128_f) aesni_decrypt;
157
0
        dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
158
0
            (cbc128_f) aesni_cbc_encrypt : NULL;
159
0
    } else {
160
0
        ret = aesni_set_encrypt_key(key,
161
0
                                    EVP_CIPHER_CTX_get_key_length(ctx) * 8,
162
0
                                    &dat->ks.ks);
163
0
        dat->block = (block128_f) aesni_encrypt;
164
0
        if (mode == EVP_CIPH_CBC_MODE)
165
0
            dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
166
0
        else if (mode == EVP_CIPH_CTR_MODE)
167
0
            dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
168
0
        else
169
0
            dat->stream.cbc = NULL;
170
0
    }
171
172
0
    if (ret < 0) {
173
0
        ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
174
0
        return 0;
175
0
    }
176
177
0
    return 1;
178
0
}
179
180
static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
181
                            const unsigned char *in, size_t len)
182
0
{
183
0
    aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
184
0
                      ctx->iv, EVP_CIPHER_CTX_is_encrypting(ctx));
185
186
0
    return 1;
187
0
}
188
189
static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
190
                            const unsigned char *in, size_t len)
191
0
{
192
0
    size_t bl = EVP_CIPHER_CTX_get_block_size(ctx);
193
194
0
    if (len < bl)
195
0
        return 1;
196
197
0
    aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
198
0
                      EVP_CIPHER_CTX_is_encrypting(ctx));
199
200
0
    return 1;
201
0
}
202
203
# define aesni_ofb_cipher aes_ofb_cipher
204
static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
205
                            const unsigned char *in, size_t len);
206
207
# define aesni_cfb_cipher aes_cfb_cipher
208
static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
209
                            const unsigned char *in, size_t len);
210
211
# define aesni_cfb8_cipher aes_cfb8_cipher
212
static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
213
                             const unsigned char *in, size_t len);
214
215
# define aesni_cfb1_cipher aes_cfb1_cipher
216
static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
217
                             const unsigned char *in, size_t len);
218
219
# define aesni_ctr_cipher aes_ctr_cipher
220
static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
221
                            const unsigned char *in, size_t len);
222
223
static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
224
                              const unsigned char *iv, int enc)
225
0
{
226
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
227
0
    if (!iv && !key)
228
0
        return 1;
229
0
    if (key) {
230
0
        aesni_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
231
0
                              &gctx->ks.ks);
232
0
        CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
233
0
        gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
234
        /*
235
         * If we have an iv can set it directly, otherwise use saved IV.
236
         */
237
0
        if (iv == NULL && gctx->iv_set)
238
0
            iv = gctx->iv;
239
0
        if (iv) {
240
0
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
241
0
            gctx->iv_set = 1;
242
0
        }
243
0
        gctx->key_set = 1;
244
0
    } else {
245
        /* If key set use IV, otherwise copy */
246
0
        if (gctx->key_set)
247
0
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
248
0
        else
249
0
            memcpy(gctx->iv, iv, gctx->ivlen);
250
0
        gctx->iv_set = 1;
251
0
        gctx->iv_gen = 0;
252
0
    }
253
0
    return 1;
254
0
}
255
256
# define aesni_gcm_cipher aes_gcm_cipher
257
static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
258
                            const unsigned char *in, size_t len);
259
260
static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
261
                              const unsigned char *iv, int enc)
262
0
{
263
0
    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
264
265
0
    if (!iv && !key)
266
0
        return 1;
267
268
0
    if (key) {
269
        /* The key is two half length keys in reality */
270
0
        const int bytes = EVP_CIPHER_CTX_get_key_length(ctx) / 2;
271
0
        const int bits = bytes * 8;
272
273
        /*
274
         * Verify that the two keys are different.
275
         *
276
         * This addresses Rogaway's vulnerability.
277
         * See comment in aes_xts_init_key() below.
278
         */
279
0
        if ((!allow_insecure_decrypt || enc)
280
0
                && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
281
0
            ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
282
0
            return 0;
283
0
        }
284
285
        /* key_len is two AES keys */
286
0
        if (enc) {
287
0
            aesni_set_encrypt_key(key, bits, &xctx->ks1.ks);
288
0
            xctx->xts.block1 = (block128_f) aesni_encrypt;
289
0
            xctx->stream = aesni_xts_encrypt;
290
0
        } else {
291
0
            aesni_set_decrypt_key(key, bits, &xctx->ks1.ks);
292
0
            xctx->xts.block1 = (block128_f) aesni_decrypt;
293
0
            xctx->stream = aesni_xts_decrypt;
294
0
        }
295
296
0
        aesni_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
297
0
        xctx->xts.block2 = (block128_f) aesni_encrypt;
298
299
0
        xctx->xts.key1 = &xctx->ks1;
300
0
    }
301
302
0
    if (iv) {
303
0
        xctx->xts.key2 = &xctx->ks2;
304
0
        memcpy(ctx->iv, iv, 16);
305
0
    }
306
307
0
    return 1;
308
0
}
309
310
# define aesni_xts_cipher aes_xts_cipher
311
static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
312
                            const unsigned char *in, size_t len);
313
314
static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
315
                              const unsigned char *iv, int enc)
316
0
{
317
0
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
318
0
    if (!iv && !key)
319
0
        return 1;
320
0
    if (key) {
321
0
        aesni_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
322
0
                              &cctx->ks.ks);
323
0
        CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
324
0
                           &cctx->ks, (block128_f) aesni_encrypt);
325
0
        cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
326
0
            (ccm128_f) aesni_ccm64_decrypt_blocks;
327
0
        cctx->key_set = 1;
328
0
    }
329
0
    if (iv) {
330
0
        memcpy(ctx->iv, iv, 15 - cctx->L);
331
0
        cctx->iv_set = 1;
332
0
    }
333
0
    return 1;
334
0
}
335
336
# define aesni_ccm_cipher aes_ccm_cipher
337
static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
338
                            const unsigned char *in, size_t len);
339
340
# ifndef OPENSSL_NO_OCB
341
static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
342
                              const unsigned char *iv, int enc)
343
0
{
344
0
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
345
0
    if (!iv && !key)
346
0
        return 1;
347
0
    if (key) {
348
0
        do {
349
            /*
350
             * We set both the encrypt and decrypt key here because decrypt
351
             * needs both. We could possibly optimise to remove setting the
352
             * decrypt for an encryption operation.
353
             */
354
0
            aesni_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
355
0
                                  &octx->ksenc.ks);
356
0
            aesni_set_decrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
357
0
                                  &octx->ksdec.ks);
358
0
            if (!CRYPTO_ocb128_init(&octx->ocb,
359
0
                                    &octx->ksenc.ks, &octx->ksdec.ks,
360
0
                                    (block128_f) aesni_encrypt,
361
0
                                    (block128_f) aesni_decrypt,
362
0
                                    enc ? aesni_ocb_encrypt
363
0
                                        : aesni_ocb_decrypt))
364
0
                return 0;
365
0
        }
366
0
        while (0);
367
368
        /*
369
         * If we have an iv we can set it directly, otherwise use saved IV.
370
         */
371
0
        if (iv == NULL && octx->iv_set)
372
0
            iv = octx->iv;
373
0
        if (iv) {
374
0
            if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
375
0
                != 1)
376
0
                return 0;
377
0
            octx->iv_set = 1;
378
0
        }
379
0
        octx->key_set = 1;
380
0
    } else {
381
        /* If key set use IV, otherwise copy */
382
0
        if (octx->key_set)
383
0
            CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
384
0
        else
385
0
            memcpy(octx->iv, iv, octx->ivlen);
386
0
        octx->iv_set = 1;
387
0
    }
388
0
    return 1;
389
0
}
390
391
#  define aesni_ocb_cipher aes_ocb_cipher
392
static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
393
                            const unsigned char *in, size_t len);
394
# endif                        /* OPENSSL_NO_OCB */
395
396
# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
397
static const EVP_CIPHER aesni_##keylen##_##mode = { \
398
        nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
399
        flags|EVP_CIPH_##MODE##_MODE,   \
400
        EVP_ORIG_GLOBAL,                \
401
        aesni_init_key,                 \
402
        aesni_##mode##_cipher,          \
403
        NULL,                           \
404
        sizeof(EVP_AES_KEY),            \
405
        NULL,NULL,NULL,NULL }; \
406
static const EVP_CIPHER aes_##keylen##_##mode = { \
407
        nid##_##keylen##_##nmode,blocksize,     \
408
        keylen/8,ivlen,                 \
409
        flags|EVP_CIPH_##MODE##_MODE,   \
410
        EVP_ORIG_GLOBAL,                 \
411
        aes_init_key,                   \
412
        aes_##mode##_cipher,            \
413
        NULL,                           \
414
        sizeof(EVP_AES_KEY),            \
415
        NULL,NULL,NULL,NULL }; \
416
21
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417
21
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_128_cbc
Line
Count
Source
416
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_128_ecb
Line
Count
Source
416
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_128_ofb
Line
Count
Source
416
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_128_cfb128
Line
Count
Source
416
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_128_cfb1
Line
Count
Source
416
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_128_cfb8
Line
Count
Source
416
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_128_ctr
Line
Count
Source
416
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_192_cbc
Line
Count
Source
416
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_192_ecb
Line
Count
Source
416
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_192_ofb
Line
Count
Source
416
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_192_cfb128
Line
Count
Source
416
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_192_cfb1
Line
Count
Source
416
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_192_cfb8
Line
Count
Source
416
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_192_ctr
Line
Count
Source
416
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_256_cbc
Line
Count
Source
416
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_256_ecb
Line
Count
Source
416
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_256_ofb
Line
Count
Source
416
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_256_cfb128
Line
Count
Source
416
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_256_cfb1
Line
Count
Source
416
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_256_cfb8
Line
Count
Source
416
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_256_ctr
Line
Count
Source
416
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
417
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
418
419
# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
420
static const EVP_CIPHER aesni_##keylen##_##mode = { \
421
        nid##_##keylen##_##mode,blocksize, \
422
        (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
423
        ivlen,                          \
424
        flags|EVP_CIPH_##MODE##_MODE,   \
425
        EVP_ORIG_GLOBAL,                \
426
        aesni_##mode##_init_key,        \
427
        aesni_##mode##_cipher,          \
428
        aes_##mode##_cleanup,           \
429
        sizeof(EVP_AES_##MODE##_CTX),   \
430
        NULL,NULL,aes_##mode##_ctrl,NULL }; \
431
static const EVP_CIPHER aes_##keylen##_##mode = { \
432
        nid##_##keylen##_##mode,blocksize, \
433
        (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
434
        ivlen,                          \
435
        flags|EVP_CIPH_##MODE##_MODE,   \
436
        EVP_ORIG_GLOBAL,                \
437
        aes_##mode##_init_key,          \
438
        aes_##mode##_cipher,            \
439
        aes_##mode##_cleanup,           \
440
        sizeof(EVP_AES_##MODE##_CTX),   \
441
        NULL,NULL,aes_##mode##_ctrl,NULL }; \
442
11
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
11
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_128_gcm
Line
Count
Source
442
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_192_gcm
Line
Count
Source
442
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_256_gcm
Line
Count
Source
442
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_128_xts
Line
Count
Source
442
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_256_xts
Line
Count
Source
442
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_128_ccm
Line
Count
Source
442
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_192_ccm
Line
Count
Source
442
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_256_ccm
Line
Count
Source
442
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_128_ocb
Line
Count
Source
442
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_192_ocb
Line
Count
Source
442
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
EVP_aes_256_ocb
Line
Count
Source
442
1
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
443
1
{ return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
444
445
#elif defined(SPARC_AES_CAPABLE)
446
447
static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
448
                           const unsigned char *iv, int enc)
449
{
450
    int ret, mode, bits;
451
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
452
453
    mode = EVP_CIPHER_CTX_get_mode(ctx);
454
    bits = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
455
    if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
456
        && !enc) {
457
        ret = 0;
458
        aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
459
        dat->block = (block128_f) aes_t4_decrypt;
460
        switch (bits) {
461
        case 128:
462
            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
463
                (cbc128_f) aes128_t4_cbc_decrypt : NULL;
464
            break;
465
        case 192:
466
            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
467
                (cbc128_f) aes192_t4_cbc_decrypt : NULL;
468
            break;
469
        case 256:
470
            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
471
                (cbc128_f) aes256_t4_cbc_decrypt : NULL;
472
            break;
473
        default:
474
            ret = -1;
475
        }
476
    } else {
477
        ret = 0;
478
        aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
479
        dat->block = (block128_f) aes_t4_encrypt;
480
        switch (bits) {
481
        case 128:
482
            if (mode == EVP_CIPH_CBC_MODE)
483
                dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
484
            else if (mode == EVP_CIPH_CTR_MODE)
485
                dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
486
            else
487
                dat->stream.cbc = NULL;
488
            break;
489
        case 192:
490
            if (mode == EVP_CIPH_CBC_MODE)
491
                dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
492
            else if (mode == EVP_CIPH_CTR_MODE)
493
                dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
494
            else
495
                dat->stream.cbc = NULL;
496
            break;
497
        case 256:
498
            if (mode == EVP_CIPH_CBC_MODE)
499
                dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
500
            else if (mode == EVP_CIPH_CTR_MODE)
501
                dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
502
            else
503
                dat->stream.cbc = NULL;
504
            break;
505
        default:
506
            ret = -1;
507
        }
508
    }
509
510
    if (ret < 0) {
511
        ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
512
        return 0;
513
    }
514
515
    return 1;
516
}
517
518
# define aes_t4_cbc_cipher aes_cbc_cipher
519
static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
520
                             const unsigned char *in, size_t len);
521
522
# define aes_t4_ecb_cipher aes_ecb_cipher
523
static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
524
                             const unsigned char *in, size_t len);
525
526
# define aes_t4_ofb_cipher aes_ofb_cipher
527
static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
528
                             const unsigned char *in, size_t len);
529
530
# define aes_t4_cfb_cipher aes_cfb_cipher
531
static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
532
                             const unsigned char *in, size_t len);
533
534
# define aes_t4_cfb8_cipher aes_cfb8_cipher
535
static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
536
                              const unsigned char *in, size_t len);
537
538
# define aes_t4_cfb1_cipher aes_cfb1_cipher
539
static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
540
                              const unsigned char *in, size_t len);
541
542
# define aes_t4_ctr_cipher aes_ctr_cipher
543
static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
544
                             const unsigned char *in, size_t len);
545
546
static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
547
                               const unsigned char *iv, int enc)
548
{
549
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
550
    if (!iv && !key)
551
        return 1;
552
    if (key) {
553
        int bits = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
554
        aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
555
        CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
556
                           (block128_f) aes_t4_encrypt);
557
        switch (bits) {
558
        case 128:
559
            gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
560
            break;
561
        case 192:
562
            gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
563
            break;
564
        case 256:
565
            gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
566
            break;
567
        default:
568
            return 0;
569
        }
570
        /*
571
         * If we have an iv can set it directly, otherwise use saved IV.
572
         */
573
        if (iv == NULL && gctx->iv_set)
574
            iv = gctx->iv;
575
        if (iv) {
576
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
577
            gctx->iv_set = 1;
578
        }
579
        gctx->key_set = 1;
580
    } else {
581
        /* If key set use IV, otherwise copy */
582
        if (gctx->key_set)
583
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
584
        else
585
            memcpy(gctx->iv, iv, gctx->ivlen);
586
        gctx->iv_set = 1;
587
        gctx->iv_gen = 0;
588
    }
589
    return 1;
590
}
591
592
# define aes_t4_gcm_cipher aes_gcm_cipher
593
static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
594
                             const unsigned char *in, size_t len);
595
596
static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
597
                               const unsigned char *iv, int enc)
598
{
599
    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
600
601
    if (!iv && !key)
602
        return 1;
603
604
    if (key) {
605
        /* The key is two half length keys in reality */
606
        const int bytes = EVP_CIPHER_CTX_get_key_length(ctx) / 2;
607
        const int bits = bytes * 8;
608
609
        /*
610
         * Verify that the two keys are different.
611
         *
612
         * This addresses Rogaway's vulnerability.
613
         * See comment in aes_xts_init_key() below.
614
         */
615
        if ((!allow_insecure_decrypt || enc)
616
                && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
617
            ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
618
            return 0;
619
        }
620
621
        xctx->stream = NULL;
622
        /* key_len is two AES keys */
623
        if (enc) {
624
            aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
625
            xctx->xts.block1 = (block128_f) aes_t4_encrypt;
626
            switch (bits) {
627
            case 128:
628
                xctx->stream = aes128_t4_xts_encrypt;
629
                break;
630
            case 256:
631
                xctx->stream = aes256_t4_xts_encrypt;
632
                break;
633
            default:
634
                return 0;
635
            }
636
        } else {
637
            aes_t4_set_decrypt_key(key, bits, &xctx->ks1.ks);
638
            xctx->xts.block1 = (block128_f) aes_t4_decrypt;
639
            switch (bits) {
640
            case 128:
641
                xctx->stream = aes128_t4_xts_decrypt;
642
                break;
643
            case 256:
644
                xctx->stream = aes256_t4_xts_decrypt;
645
                break;
646
            default:
647
                return 0;
648
            }
649
        }
650
651
        aes_t4_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
652
        xctx->xts.block2 = (block128_f) aes_t4_encrypt;
653
654
        xctx->xts.key1 = &xctx->ks1;
655
    }
656
657
    if (iv) {
658
        xctx->xts.key2 = &xctx->ks2;
659
        memcpy(ctx->iv, iv, 16);
660
    }
661
662
    return 1;
663
}
664
665
# define aes_t4_xts_cipher aes_xts_cipher
666
static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
667
                             const unsigned char *in, size_t len);
668
669
static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
670
                               const unsigned char *iv, int enc)
671
{
672
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
673
    if (!iv && !key)
674
        return 1;
675
    if (key) {
676
        int bits = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
677
        aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
678
        CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
679
                           &cctx->ks, (block128_f) aes_t4_encrypt);
680
        cctx->str = NULL;
681
        cctx->key_set = 1;
682
    }
683
    if (iv) {
684
        memcpy(ctx->iv, iv, 15 - cctx->L);
685
        cctx->iv_set = 1;
686
    }
687
    return 1;
688
}
689
690
# define aes_t4_ccm_cipher aes_ccm_cipher
691
static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
692
                             const unsigned char *in, size_t len);
693
694
# ifndef OPENSSL_NO_OCB
695
static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
696
                               const unsigned char *iv, int enc)
697
{
698
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
699
    if (!iv && !key)
700
        return 1;
701
    if (key) {
702
        do {
703
            /*
704
             * We set both the encrypt and decrypt key here because decrypt
705
             * needs both. We could possibly optimise to remove setting the
706
             * decrypt for an encryption operation.
707
             */
708
            aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
709
                                   &octx->ksenc.ks);
710
            aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
711
                                   &octx->ksdec.ks);
712
            if (!CRYPTO_ocb128_init(&octx->ocb,
713
                                    &octx->ksenc.ks, &octx->ksdec.ks,
714
                                    (block128_f) aes_t4_encrypt,
715
                                    (block128_f) aes_t4_decrypt,
716
                                    NULL))
717
                return 0;
718
        }
719
        while (0);
720
721
        /*
722
         * If we have an iv we can set it directly, otherwise use saved IV.
723
         */
724
        if (iv == NULL && octx->iv_set)
725
            iv = octx->iv;
726
        if (iv) {
727
            if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
728
                != 1)
729
                return 0;
730
            octx->iv_set = 1;
731
        }
732
        octx->key_set = 1;
733
    } else {
734
        /* If key set use IV, otherwise copy */
735
        if (octx->key_set)
736
            CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
737
        else
738
            memcpy(octx->iv, iv, octx->ivlen);
739
        octx->iv_set = 1;
740
    }
741
    return 1;
742
}
743
744
#  define aes_t4_ocb_cipher aes_ocb_cipher
745
static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
746
                             const unsigned char *in, size_t len);
747
# endif                        /* OPENSSL_NO_OCB */
748
749
# ifndef OPENSSL_NO_SIV
750
#  define aes_t4_siv_init_key aes_siv_init_key
751
#  define aes_t4_siv_cipher aes_siv_cipher
752
# endif /* OPENSSL_NO_SIV */
753
754
# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
755
static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
756
        nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
757
        flags|EVP_CIPH_##MODE##_MODE,   \
758
        EVP_ORIG_GLOBAL,                \
759
        aes_t4_init_key,                \
760
        aes_t4_##mode##_cipher,         \
761
        NULL,                           \
762
        sizeof(EVP_AES_KEY),            \
763
        NULL,NULL,NULL,NULL }; \
764
static const EVP_CIPHER aes_##keylen##_##mode = { \
765
        nid##_##keylen##_##nmode,blocksize,     \
766
        keylen/8,ivlen, \
767
        flags|EVP_CIPH_##MODE##_MODE,   \
768
        EVP_ORIG_GLOBAL,                \
769
        aes_init_key,                   \
770
        aes_##mode##_cipher,            \
771
        NULL,                           \
772
        sizeof(EVP_AES_KEY),            \
773
        NULL,NULL,NULL,NULL }; \
774
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
775
{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
776
777
# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
778
static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
779
        nid##_##keylen##_##mode,blocksize, \
780
        (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
781
        ivlen,                          \
782
        flags|EVP_CIPH_##MODE##_MODE,   \
783
        EVP_ORIG_GLOBAL,                \
784
        aes_t4_##mode##_init_key,       \
785
        aes_t4_##mode##_cipher,         \
786
        aes_##mode##_cleanup,           \
787
        sizeof(EVP_AES_##MODE##_CTX),   \
788
        NULL,NULL,aes_##mode##_ctrl,NULL }; \
789
static const EVP_CIPHER aes_##keylen##_##mode = { \
790
        nid##_##keylen##_##mode,blocksize, \
791
        (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
792
        ivlen,                          \
793
        flags|EVP_CIPH_##MODE##_MODE,   \
794
        EVP_ORIG_GLOBAL,                \
795
        aes_##mode##_init_key,          \
796
        aes_##mode##_cipher,            \
797
        aes_##mode##_cleanup,           \
798
        sizeof(EVP_AES_##MODE##_CTX),   \
799
        NULL,NULL,aes_##mode##_ctrl,NULL }; \
800
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
801
{ return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
802
803
#elif defined(S390X_aes_128_CAPABLE)
804
/* IBM S390X support */
805
typedef struct {
806
    union {
807
        OSSL_UNION_ALIGN;
808
        /*-
809
         * KM-AES parameter block - begin
810
         * (see z/Architecture Principles of Operation >= SA22-7832-06)
811
         */
812
        struct {
813
            unsigned char k[32];
814
        } param;
815
        /* KM-AES parameter block - end */
816
    } km;
817
    unsigned int fc;
818
} S390X_AES_ECB_CTX;
819
820
typedef struct {
821
    union {
822
        OSSL_UNION_ALIGN;
823
        /*-
824
         * KMO-AES parameter block - begin
825
         * (see z/Architecture Principles of Operation >= SA22-7832-08)
826
         */
827
        struct {
828
            unsigned char cv[16];
829
            unsigned char k[32];
830
        } param;
831
        /* KMO-AES parameter block - end */
832
    } kmo;
833
    unsigned int fc;
834
835
    int res;
836
} S390X_AES_OFB_CTX;
837
838
typedef struct {
839
    union {
840
        OSSL_UNION_ALIGN;
841
        /*-
842
         * KMF-AES parameter block - begin
843
         * (see z/Architecture Principles of Operation >= SA22-7832-08)
844
         */
845
        struct {
846
            unsigned char cv[16];
847
            unsigned char k[32];
848
        } param;
849
        /* KMF-AES parameter block - end */
850
    } kmf;
851
    unsigned int fc;
852
853
    int res;
854
} S390X_AES_CFB_CTX;
855
856
typedef struct {
857
    union {
858
        OSSL_UNION_ALIGN;
859
        /*-
860
         * KMA-GCM-AES parameter block - begin
861
         * (see z/Architecture Principles of Operation >= SA22-7832-11)
862
         */
863
        struct {
864
            unsigned char reserved[12];
865
            union {
866
                unsigned int w;
867
                unsigned char b[4];
868
            } cv;
869
            union {
870
                unsigned long long g[2];
871
                unsigned char b[16];
872
            } t;
873
            unsigned char h[16];
874
            unsigned long long taadl;
875
            unsigned long long tpcl;
876
            union {
877
                unsigned long long g[2];
878
                unsigned int w[4];
879
            } j0;
880
            unsigned char k[32];
881
        } param;
882
        /* KMA-GCM-AES parameter block - end */
883
    } kma;
884
    unsigned int fc;
885
    int key_set;
886
887
    unsigned char *iv;
888
    int ivlen;
889
    int iv_set;
890
    int iv_gen;
891
892
    int taglen;
893
894
    unsigned char ares[16];
895
    unsigned char mres[16];
896
    unsigned char kres[16];
897
    int areslen;
898
    int mreslen;
899
    int kreslen;
900
901
    int tls_aad_len;
902
    uint64_t tls_enc_records;   /* Number of TLS records encrypted */
903
} S390X_AES_GCM_CTX;
904
905
typedef struct {
906
    union {
907
        OSSL_UNION_ALIGN;
908
        /*-
909
         * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
910
         * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
911
         * rounds field is used to store the function code and that the key
912
         * schedule is not stored (if aes hardware support is detected).
913
         */
914
        struct {
915
            unsigned char pad[16];
916
            AES_KEY k;
917
        } key;
918
919
        struct {
920
            /*-
921
             * KMAC-AES parameter block - begin
922
             * (see z/Architecture Principles of Operation >= SA22-7832-08)
923
             */
924
            struct {
925
                union {
926
                    unsigned long long g[2];
927
                    unsigned char b[16];
928
                } icv;
929
                unsigned char k[32];
930
            } kmac_param;
931
            /* KMAC-AES parameter block - end */
932
933
            union {
934
                unsigned long long g[2];
935
                unsigned char b[16];
936
            } nonce;
937
            union {
938
                unsigned long long g[2];
939
                unsigned char b[16];
940
            } buf;
941
942
            unsigned long long blocks;
943
            int l;
944
            int m;
945
            int tls_aad_len;
946
            int iv_set;
947
            int tag_set;
948
            int len_set;
949
            int key_set;
950
951
            unsigned char pad[140];
952
            unsigned int fc;
953
        } ccm;
954
    } aes;
955
} S390X_AES_CCM_CTX;
956
957
# define s390x_aes_init_key aes_init_key
958
static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
959
                              const unsigned char *iv, int enc);
960
961
# define S390X_AES_CBC_CTX              EVP_AES_KEY
962
963
# define s390x_aes_cbc_init_key aes_init_key
964
965
# define s390x_aes_cbc_cipher aes_cbc_cipher
966
static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
967
                                const unsigned char *in, size_t len);
968
969
static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
970
                                  const unsigned char *key,
971
                                  const unsigned char *iv, int enc)
972
{
973
    S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
974
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
975
976
    cctx->fc = S390X_AES_FC(keylen);
977
    if (!enc)
978
        cctx->fc |= S390X_DECRYPT;
979
980
    memcpy(cctx->km.param.k, key, keylen);
981
    return 1;
982
}
983
984
static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
985
                                const unsigned char *in, size_t len)
986
{
987
    S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
988
989
    s390x_km(in, len, out, cctx->fc, &cctx->km.param);
990
    return 1;
991
}
992
993
static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
994
                                  const unsigned char *key,
995
                                  const unsigned char *ivec, int enc)
996
{
997
    S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
998
    const unsigned char *iv = ctx->oiv;
999
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1000
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1001
1002
    memcpy(cctx->kmo.param.cv, iv, ivlen);
1003
    memcpy(cctx->kmo.param.k, key, keylen);
1004
    cctx->fc = S390X_AES_FC(keylen);
1005
    cctx->res = 0;
1006
    return 1;
1007
}
1008
1009
static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1010
                                const unsigned char *in, size_t len)
1011
{
1012
    S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1013
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1014
    unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
1015
    int n = cctx->res;
1016
    int rem;
1017
1018
    memcpy(cctx->kmo.param.cv, iv, ivlen);
1019
    while (n && len) {
1020
        *out = *in ^ cctx->kmo.param.cv[n];
1021
        n = (n + 1) & 0xf;
1022
        --len;
1023
        ++in;
1024
        ++out;
1025
    }
1026
1027
    rem = len & 0xf;
1028
1029
    len &= ~(size_t)0xf;
1030
    if (len) {
1031
        s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1032
1033
        out += len;
1034
        in += len;
1035
    }
1036
1037
    if (rem) {
1038
        s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1039
                 cctx->kmo.param.k);
1040
1041
        while (rem--) {
1042
            out[n] = in[n] ^ cctx->kmo.param.cv[n];
1043
            ++n;
1044
        }
1045
    }
1046
1047
    memcpy(iv, cctx->kmo.param.cv, ivlen);
1048
    cctx->res = n;
1049
    return 1;
1050
}
1051
1052
static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1053
                                  const unsigned char *key,
1054
                                  const unsigned char *ivec, int enc)
1055
{
1056
    S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1057
    const unsigned char *iv = ctx->oiv;
1058
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1059
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1060
1061
    cctx->fc = S390X_AES_FC(keylen);
1062
    cctx->fc |= 16 << 24;   /* 16 bytes cipher feedback */
1063
    if (!enc)
1064
        cctx->fc |= S390X_DECRYPT;
1065
1066
    cctx->res = 0;
1067
    memcpy(cctx->kmf.param.cv, iv, ivlen);
1068
    memcpy(cctx->kmf.param.k, key, keylen);
1069
    return 1;
1070
}
1071
1072
static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1073
                                const unsigned char *in, size_t len)
1074
{
1075
    S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1076
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1077
    const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1078
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1079
    unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
1080
    int n = cctx->res;
1081
    int rem;
1082
    unsigned char tmp;
1083
1084
    memcpy(cctx->kmf.param.cv, iv, ivlen);
1085
    while (n && len) {
1086
        tmp = *in;
1087
        *out = cctx->kmf.param.cv[n] ^ tmp;
1088
        cctx->kmf.param.cv[n] = enc ? *out : tmp;
1089
        n = (n + 1) & 0xf;
1090
        --len;
1091
        ++in;
1092
        ++out;
1093
    }
1094
1095
    rem = len & 0xf;
1096
1097
    len &= ~(size_t)0xf;
1098
    if (len) {
1099
        s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1100
1101
        out += len;
1102
        in += len;
1103
    }
1104
1105
    if (rem) {
1106
        s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1107
                 S390X_AES_FC(keylen), cctx->kmf.param.k);
1108
1109
        while (rem--) {
1110
            tmp = in[n];
1111
            out[n] = cctx->kmf.param.cv[n] ^ tmp;
1112
            cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1113
            ++n;
1114
        }
1115
    }
1116
1117
    memcpy(iv, cctx->kmf.param.cv, ivlen);
1118
    cctx->res = n;
1119
    return 1;
1120
}
1121
1122
static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1123
                                   const unsigned char *key,
1124
                                   const unsigned char *ivec, int enc)
1125
{
1126
    S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1127
    const unsigned char *iv = ctx->oiv;
1128
    const int keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1129
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1130
1131
    cctx->fc = S390X_AES_FC(keylen);
1132
    cctx->fc |= 1 << 24;   /* 1 byte cipher feedback */
1133
    if (!enc)
1134
        cctx->fc |= S390X_DECRYPT;
1135
1136
    memcpy(cctx->kmf.param.cv, iv, ivlen);
1137
    memcpy(cctx->kmf.param.k, key, keylen);
1138
    return 1;
1139
}
1140
1141
static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1142
                                 const unsigned char *in, size_t len)
1143
{
1144
    S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1145
    const int ivlen = EVP_CIPHER_CTX_get_iv_length(ctx);
1146
    unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
1147
1148
    memcpy(cctx->kmf.param.cv, iv, ivlen);
1149
    s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1150
    memcpy(iv, cctx->kmf.param.cv, ivlen);
1151
    return 1;
1152
}
1153
1154
# define s390x_aes_cfb1_init_key aes_init_key
1155
1156
# define s390x_aes_cfb1_cipher aes_cfb1_cipher
1157
static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1158
                                 const unsigned char *in, size_t len);
1159
1160
# define S390X_AES_CTR_CTX              EVP_AES_KEY
1161
1162
# define s390x_aes_ctr_init_key aes_init_key
1163
1164
# define s390x_aes_ctr_cipher aes_ctr_cipher
1165
static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1166
                                const unsigned char *in, size_t len);
1167
1168
/* iv + padding length for iv lengths != 12 */
1169
# define S390X_gcm_ivpadlen(i)  ((((i) + 15) >> 4 << 4) + 16)
1170
1171
/*-
1172
 * Process additional authenticated data. Returns 0 on success. Code is
1173
 * big-endian.
1174
 */
1175
static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1176
                             size_t len)
1177
{
1178
    unsigned long long alen;
1179
    int n, rem;
1180
1181
    if (ctx->kma.param.tpcl)
1182
        return -2;
1183
1184
    alen = ctx->kma.param.taadl + len;
1185
    if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1186
        return -1;
1187
    ctx->kma.param.taadl = alen;
1188
1189
    n = ctx->areslen;
1190
    if (n) {
1191
        while (n && len) {
1192
            ctx->ares[n] = *aad;
1193
            n = (n + 1) & 0xf;
1194
            ++aad;
1195
            --len;
1196
        }
1197
        /* ctx->ares contains a complete block if offset has wrapped around */
1198
        if (!n) {
1199
            s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1200
            ctx->fc |= S390X_KMA_HS;
1201
        }
1202
        ctx->areslen = n;
1203
    }
1204
1205
    rem = len & 0xf;
1206
1207
    len &= ~(size_t)0xf;
1208
    if (len) {
1209
        s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1210
        aad += len;
1211
        ctx->fc |= S390X_KMA_HS;
1212
    }
1213
1214
    if (rem) {
1215
        ctx->areslen = rem;
1216
1217
        do {
1218
            --rem;
1219
            ctx->ares[rem] = aad[rem];
1220
        } while (rem);
1221
    }
1222
    return 0;
1223
}
1224
1225
/*-
1226
 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1227
 * success. Code is big-endian.
1228
 */
1229
static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1230
                         unsigned char *out, size_t len)
1231
{
1232
    const unsigned char *inptr;
1233
    unsigned long long mlen;
1234
    union {
1235
        unsigned int w[4];
1236
        unsigned char b[16];
1237
    } buf;
1238
    size_t inlen;
1239
    int n, rem, i;
1240
1241
    mlen = ctx->kma.param.tpcl + len;
1242
    if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1243
        return -1;
1244
    ctx->kma.param.tpcl = mlen;
1245
1246
    n = ctx->mreslen;
1247
    if (n) {
1248
        inptr = in;
1249
        inlen = len;
1250
        while (n && inlen) {
1251
            ctx->mres[n] = *inptr;
1252
            n = (n + 1) & 0xf;
1253
            ++inptr;
1254
            --inlen;
1255
        }
1256
        /* ctx->mres contains a complete block if offset has wrapped around */
1257
        if (!n) {
1258
            s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1259
                      ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1260
            ctx->fc |= S390X_KMA_HS;
1261
            ctx->areslen = 0;
1262
1263
            /* previous call already encrypted/decrypted its remainder,
1264
             * see comment below */
1265
            n = ctx->mreslen;
1266
            while (n) {
1267
                *out = buf.b[n];
1268
                n = (n + 1) & 0xf;
1269
                ++out;
1270
                ++in;
1271
                --len;
1272
            }
1273
            ctx->mreslen = 0;
1274
        }
1275
    }
1276
1277
    rem = len & 0xf;
1278
1279
    len &= ~(size_t)0xf;
1280
    if (len) {
1281
        s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1282
                  ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1283
        in += len;
1284
        out += len;
1285
        ctx->fc |= S390X_KMA_HS;
1286
        ctx->areslen = 0;
1287
    }
1288
1289
    /*-
1290
     * If there is a remainder, it has to be saved such that it can be
1291
     * processed by kma later. However, we also have to do the for-now
1292
     * unauthenticated encryption/decryption part here and now...
1293
     */
1294
    if (rem) {
1295
        if (!ctx->mreslen) {
1296
            buf.w[0] = ctx->kma.param.j0.w[0];
1297
            buf.w[1] = ctx->kma.param.j0.w[1];
1298
            buf.w[2] = ctx->kma.param.j0.w[2];
1299
            buf.w[3] = ctx->kma.param.cv.w + 1;
1300
            s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1301
        }
1302
1303
        n = ctx->mreslen;
1304
        for (i = 0; i < rem; i++) {
1305
            ctx->mres[n + i] = in[i];
1306
            out[i] = in[i] ^ ctx->kres[n + i];
1307
        }
1308
1309
        ctx->mreslen += rem;
1310
    }
1311
    return 0;
1312
}
1313
1314
/*-
1315
 * Initialize context structure. Code is big-endian.
1316
 */
1317
static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1318
                                const unsigned char *iv)
1319
{
1320
    ctx->kma.param.t.g[0] = 0;
1321
    ctx->kma.param.t.g[1] = 0;
1322
    ctx->kma.param.tpcl = 0;
1323
    ctx->kma.param.taadl = 0;
1324
    ctx->mreslen = 0;
1325
    ctx->areslen = 0;
1326
    ctx->kreslen = 0;
1327
1328
    if (ctx->ivlen == 12) {
1329
        memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1330
        ctx->kma.param.j0.w[3] = 1;
1331
        ctx->kma.param.cv.w = 1;
1332
    } else {
1333
        /* ctx->iv has the right size and is already padded. */
1334
        memcpy(ctx->iv, iv, ctx->ivlen);
1335
        s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1336
                  ctx->fc, &ctx->kma.param);
1337
        ctx->fc |= S390X_KMA_HS;
1338
1339
        ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1340
        ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1341
        ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1342
        ctx->kma.param.t.g[0] = 0;
1343
        ctx->kma.param.t.g[1] = 0;
1344
    }
1345
}
1346
1347
/*-
1348
 * Performs various operations on the context structure depending on control
1349
 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1350
 * Code is big-endian.
1351
 */
1352
static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1353
{
1354
    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1355
    S390X_AES_GCM_CTX *gctx_out;
1356
    EVP_CIPHER_CTX *out;
1357
    unsigned char *buf;
1358
    int ivlen, enc, len;
1359
1360
    switch (type) {
1361
    case EVP_CTRL_INIT:
1362
        ivlen = EVP_CIPHER_get_iv_length(c->cipher);
1363
        gctx->key_set = 0;
1364
        gctx->iv_set = 0;
1365
        gctx->ivlen = ivlen;
1366
        gctx->iv = c->iv;
1367
        gctx->taglen = -1;
1368
        gctx->iv_gen = 0;
1369
        gctx->tls_aad_len = -1;
1370
        return 1;
1371
1372
    case EVP_CTRL_GET_IVLEN:
1373
        *(int *)ptr = gctx->ivlen;
1374
        return 1;
1375
1376
    case EVP_CTRL_AEAD_SET_IVLEN:
1377
        if (arg <= 0)
1378
            return 0;
1379
1380
        if (arg != 12) {
1381
            len = S390X_gcm_ivpadlen(arg);
1382
1383
            /* Allocate memory for iv if needed. */
1384
            if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1385
                if (gctx->iv != c->iv)
1386
                    OPENSSL_free(gctx->iv);
1387
1388
                if ((gctx->iv = OPENSSL_malloc(len)) == NULL) {
1389
                    ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
1390
                    return 0;
1391
                }
1392
            }
1393
            /* Add padding. */
1394
            memset(gctx->iv + arg, 0, len - arg - 8);
1395
            *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1396
        }
1397
        gctx->ivlen = arg;
1398
        return 1;
1399
1400
    case EVP_CTRL_AEAD_SET_TAG:
1401
        buf = EVP_CIPHER_CTX_buf_noconst(c);
1402
        enc = EVP_CIPHER_CTX_is_encrypting(c);
1403
        if (arg <= 0 || arg > 16 || enc)
1404
            return 0;
1405
1406
        memcpy(buf, ptr, arg);
1407
        gctx->taglen = arg;
1408
        return 1;
1409
1410
    case EVP_CTRL_AEAD_GET_TAG:
1411
        enc = EVP_CIPHER_CTX_is_encrypting(c);
1412
        if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1413
            return 0;
1414
1415
        memcpy(ptr, gctx->kma.param.t.b, arg);
1416
        return 1;
1417
1418
    case EVP_CTRL_GCM_SET_IV_FIXED:
1419
        /* Special case: -1 length restores whole iv */
1420
        if (arg == -1) {
1421
            memcpy(gctx->iv, ptr, gctx->ivlen);
1422
            gctx->iv_gen = 1;
1423
            return 1;
1424
        }
1425
        /*
1426
         * Fixed field must be at least 4 bytes and invocation field at least
1427
         * 8.
1428
         */
1429
        if ((arg < 4) || (gctx->ivlen - arg) < 8)
1430
            return 0;
1431
1432
        if (arg)
1433
            memcpy(gctx->iv, ptr, arg);
1434
1435
        enc = EVP_CIPHER_CTX_is_encrypting(c);
1436
        if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1437
            return 0;
1438
1439
        gctx->iv_gen = 1;
1440
        return 1;
1441
1442
    case EVP_CTRL_GCM_IV_GEN:
1443
        if (gctx->iv_gen == 0 || gctx->key_set == 0)
1444
            return 0;
1445
1446
        s390x_aes_gcm_setiv(gctx, gctx->iv);
1447
1448
        if (arg <= 0 || arg > gctx->ivlen)
1449
            arg = gctx->ivlen;
1450
1451
        memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1452
        /*
1453
         * Invocation field will be at least 8 bytes in size and so no need
1454
         * to check wrap around or increment more than last 8 bytes.
1455
         */
1456
        ctr64_inc(gctx->iv + gctx->ivlen - 8);
1457
        gctx->iv_set = 1;
1458
        return 1;
1459
1460
    case EVP_CTRL_GCM_SET_IV_INV:
1461
        enc = EVP_CIPHER_CTX_is_encrypting(c);
1462
        if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1463
            return 0;
1464
1465
        memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1466
        s390x_aes_gcm_setiv(gctx, gctx->iv);
1467
        gctx->iv_set = 1;
1468
        return 1;
1469
1470
    case EVP_CTRL_AEAD_TLS1_AAD:
1471
        /* Save the aad for later use. */
1472
        if (arg != EVP_AEAD_TLS1_AAD_LEN)
1473
            return 0;
1474
1475
        buf = EVP_CIPHER_CTX_buf_noconst(c);
1476
        memcpy(buf, ptr, arg);
1477
        gctx->tls_aad_len = arg;
1478
        gctx->tls_enc_records = 0;
1479
1480
        len = buf[arg - 2] << 8 | buf[arg - 1];
1481
        /* Correct length for explicit iv. */
1482
        if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1483
            return 0;
1484
        len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1485
1486
        /* If decrypting correct for tag too. */
1487
        enc = EVP_CIPHER_CTX_is_encrypting(c);
1488
        if (!enc) {
1489
            if (len < EVP_GCM_TLS_TAG_LEN)
1490
                return 0;
1491
            len -= EVP_GCM_TLS_TAG_LEN;
1492
        }
1493
        buf[arg - 2] = len >> 8;
1494
        buf[arg - 1] = len & 0xff;
1495
        /* Extra padding: tag appended to record. */
1496
        return EVP_GCM_TLS_TAG_LEN;
1497
1498
    case EVP_CTRL_COPY:
1499
        out = ptr;
1500
        gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1501
1502
        if (gctx->iv == c->iv) {
1503
            gctx_out->iv = out->iv;
1504
        } else {
1505
            len = S390X_gcm_ivpadlen(gctx->ivlen);
1506
1507
            if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) {
1508
                ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
1509
                return 0;
1510
            }
1511
1512
            memcpy(gctx_out->iv, gctx->iv, len);
1513
        }
1514
        return 1;
1515
1516
    default:
1517
        return -1;
1518
    }
1519
}
1520
1521
/*-
1522
 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1523
 */
1524
static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1525
                                  const unsigned char *key,
1526
                                  const unsigned char *iv, int enc)
1527
{
1528
    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1529
    int keylen;
1530
1531
    if (iv == NULL && key == NULL)
1532
        return 1;
1533
1534
    if (key != NULL) {
1535
        keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1536
        memcpy(&gctx->kma.param.k, key, keylen);
1537
1538
        gctx->fc = S390X_AES_FC(keylen);
1539
        if (!enc)
1540
            gctx->fc |= S390X_DECRYPT;
1541
1542
        if (iv == NULL && gctx->iv_set)
1543
            iv = gctx->iv;
1544
1545
        if (iv != NULL) {
1546
            s390x_aes_gcm_setiv(gctx, iv);
1547
            gctx->iv_set = 1;
1548
        }
1549
        gctx->key_set = 1;
1550
    } else {
1551
        if (gctx->key_set)
1552
            s390x_aes_gcm_setiv(gctx, iv);
1553
        else
1554
            memcpy(gctx->iv, iv, gctx->ivlen);
1555
1556
        gctx->iv_set = 1;
1557
        gctx->iv_gen = 0;
1558
    }
1559
    return 1;
1560
}
1561
1562
/*-
1563
 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1564
 * if successful. Otherwise -1 is returned. Code is big-endian.
1565
 */
1566
static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1567
                                    const unsigned char *in, size_t len)
1568
{
1569
    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1570
    const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1571
    const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1572
    int rv = -1;
1573
1574
    if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1575
        return -1;
1576
1577
    /*
1578
     * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
1579
     * Requirements from SP 800-38D".  The requirements is for one party to the
1580
     * communication to fail after 2^64 - 1 keys.  We do this on the encrypting
1581
     * side only.
1582
     */
1583
    if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
1584
        ERR_raise(ERR_LIB_EVP, EVP_R_TOO_MANY_RECORDS);
1585
        goto err;
1586
    }
1587
1588
    if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1589
                                     : EVP_CTRL_GCM_SET_IV_INV,
1590
                            EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1591
        goto err;
1592
1593
    in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1594
    out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1595
    len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1596
1597
    gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1598
    gctx->kma.param.tpcl = len << 3;
1599
    s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1600
              gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1601
1602
    if (enc) {
1603
        memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1604
        rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1605
    } else {
1606
        if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1607
                          EVP_GCM_TLS_TAG_LEN)) {
1608
            OPENSSL_cleanse(out, len);
1609
            goto err;
1610
        }
1611
        rv = len;
1612
    }
1613
err:
1614
    gctx->iv_set = 0;
1615
    gctx->tls_aad_len = -1;
1616
    return rv;
1617
}
1618
1619
/*-
1620
 * Called from EVP layer to initialize context, process additional
1621
 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1622
 * ciphertext or process a TLS packet, depending on context. Returns bytes
1623
 * written on success. Otherwise -1 is returned. Code is big-endian.
1624
 */
1625
static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1626
                                const unsigned char *in, size_t len)
1627
{
1628
    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1629
    unsigned char *buf, tmp[16];
1630
    int enc;
1631
1632
    if (!gctx->key_set)
1633
        return -1;
1634
1635
    if (gctx->tls_aad_len >= 0)
1636
        return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1637
1638
    if (!gctx->iv_set)
1639
        return -1;
1640
1641
    if (in != NULL) {
1642
        if (out == NULL) {
1643
            if (s390x_aes_gcm_aad(gctx, in, len))
1644
                return -1;
1645
        } else {
1646
            if (s390x_aes_gcm(gctx, in, out, len))
1647
                return -1;
1648
        }
1649
        return len;
1650
    } else {
1651
        gctx->kma.param.taadl <<= 3;
1652
        gctx->kma.param.tpcl <<= 3;
1653
        s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1654
                  gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1655
        /* recall that we already did en-/decrypt gctx->mres
1656
         * and returned it to caller... */
1657
        OPENSSL_cleanse(tmp, gctx->mreslen);
1658
        gctx->iv_set = 0;
1659
1660
        enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1661
        if (enc) {
1662
            gctx->taglen = 16;
1663
        } else {
1664
            if (gctx->taglen < 0)
1665
                return -1;
1666
1667
            buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1668
            if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1669
                return -1;
1670
        }
1671
        return 0;
1672
    }
1673
}
1674
1675
static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1676
{
1677
    S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1678
1679
    if (gctx == NULL)
1680
        return 0;
1681
1682
    if (gctx->iv != c->iv)
1683
        OPENSSL_free(gctx->iv);
1684
1685
    OPENSSL_cleanse(gctx, sizeof(*gctx));
1686
    return 1;
1687
}
1688
1689
# define S390X_AES_XTS_CTX              EVP_AES_XTS_CTX
1690
1691
# define s390x_aes_xts_init_key aes_xts_init_key
1692
static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1693
                                  const unsigned char *key,
1694
                                  const unsigned char *iv, int enc);
1695
# define s390x_aes_xts_cipher aes_xts_cipher
1696
static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1697
                                const unsigned char *in, size_t len);
1698
# define s390x_aes_xts_ctrl aes_xts_ctrl
1699
static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1700
# define s390x_aes_xts_cleanup aes_xts_cleanup
1701
1702
/*-
1703
 * Set nonce and length fields. Code is big-endian.
1704
 */
1705
static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1706
                                          const unsigned char *nonce,
1707
                                          size_t mlen)
1708
{
1709
    ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1710
    ctx->aes.ccm.nonce.g[1] = mlen;
1711
    memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1712
}
1713
1714
/*-
1715
 * Process additional authenticated data. Code is big-endian.
1716
 */
1717
static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1718
                              size_t alen)
1719
{
1720
    unsigned char *ptr;
1721
    int i, rem;
1722
1723
    if (!alen)
1724
        return;
1725
1726
    ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1727
1728
    /* Suppress 'type-punned pointer dereference' warning. */
1729
    ptr = ctx->aes.ccm.buf.b;
1730
1731
    if (alen < ((1 << 16) - (1 << 8))) {
1732
        *(uint16_t *)ptr = alen;
1733
        i = 2;
1734
    } else if (sizeof(alen) == 8
1735
               && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1736
        *(uint16_t *)ptr = 0xffff;
1737
        *(uint64_t *)(ptr + 2) = alen;
1738
        i = 10;
1739
    } else {
1740
        *(uint16_t *)ptr = 0xfffe;
1741
        *(uint32_t *)(ptr + 2) = alen;
1742
        i = 6;
1743
    }
1744
1745
    while (i < 16 && alen) {
1746
        ctx->aes.ccm.buf.b[i] = *aad;
1747
        ++aad;
1748
        --alen;
1749
        ++i;
1750
    }
1751
    while (i < 16) {
1752
        ctx->aes.ccm.buf.b[i] = 0;
1753
        ++i;
1754
    }
1755
1756
    ctx->aes.ccm.kmac_param.icv.g[0] = 0;
1757
    ctx->aes.ccm.kmac_param.icv.g[1] = 0;
1758
    s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
1759
               &ctx->aes.ccm.kmac_param);
1760
    ctx->aes.ccm.blocks += 2;
1761
1762
    rem = alen & 0xf;
1763
    alen &= ~(size_t)0xf;
1764
    if (alen) {
1765
        s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1766
        ctx->aes.ccm.blocks += alen >> 4;
1767
        aad += alen;
1768
    }
1769
    if (rem) {
1770
        for (i = 0; i < rem; i++)
1771
            ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
1772
1773
        s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1774
                 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1775
                 ctx->aes.ccm.kmac_param.k);
1776
        ctx->aes.ccm.blocks++;
1777
    }
1778
}
1779
1780
/*-
1781
 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
1782
 * success.
1783
 */
1784
static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
1785
                         unsigned char *out, size_t len, int enc)
1786
{
1787
    size_t n, rem;
1788
    unsigned int i, l, num;
1789
    unsigned char flags;
1790
1791
    flags = ctx->aes.ccm.nonce.b[0];
1792
    if (!(flags & S390X_CCM_AAD_FLAG)) {
1793
        s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
1794
                 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
1795
        ctx->aes.ccm.blocks++;
1796
    }
1797
    l = flags & 0x7;
1798
    ctx->aes.ccm.nonce.b[0] = l;
1799
1800
    /*-
1801
     * Reconstruct length from encoded length field
1802
     * and initialize it with counter value.
1803
     */
1804
    n = 0;
1805
    for (i = 15 - l; i < 15; i++) {
1806
        n |= ctx->aes.ccm.nonce.b[i];
1807
        ctx->aes.ccm.nonce.b[i] = 0;
1808
        n <<= 8;
1809
    }
1810
    n |= ctx->aes.ccm.nonce.b[15];
1811
    ctx->aes.ccm.nonce.b[15] = 1;
1812
1813
    if (n != len)
1814
        return -1;              /* length mismatch */
1815
1816
    if (enc) {
1817
        /* Two operations per block plus one for tag encryption */
1818
        ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
1819
        if (ctx->aes.ccm.blocks > (1ULL << 61))
1820
            return -2;          /* too much data */
1821
    }
1822
1823
    num = 0;
1824
    rem = len & 0xf;
1825
    len &= ~(size_t)0xf;
1826
1827
    if (enc) {
1828
        /* mac-then-encrypt */
1829
        if (len)
1830
            s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1831
        if (rem) {
1832
            for (i = 0; i < rem; i++)
1833
                ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
1834
1835
            s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1836
                     ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1837
                     ctx->aes.ccm.kmac_param.k);
1838
        }
1839
1840
        CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1841
                                    ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1842
                                    &num, (ctr128_f)AES_ctr32_encrypt);
1843
    } else {
1844
        /* decrypt-then-mac */
1845
        CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
1846
                                    ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
1847
                                    &num, (ctr128_f)AES_ctr32_encrypt);
1848
1849
        if (len)
1850
            s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
1851
        if (rem) {
1852
            for (i = 0; i < rem; i++)
1853
                ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
1854
1855
            s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
1856
                     ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
1857
                     ctx->aes.ccm.kmac_param.k);
1858
        }
1859
    }
1860
    /* encrypt tag */
1861
    for (i = 15 - l; i < 16; i++)
1862
        ctx->aes.ccm.nonce.b[i] = 0;
1863
1864
    s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
1865
             ctx->aes.ccm.kmac_param.k);
1866
    ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
1867
    ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
1868
1869
    ctx->aes.ccm.nonce.b[0] = flags;    /* restore flags field */
1870
    return 0;
1871
}
1872
1873
/*-
1874
 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1875
 * if successful. Otherwise -1 is returned.
1876
 */
1877
static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1878
                                    const unsigned char *in, size_t len)
1879
{
1880
    S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1881
    unsigned char *ivec = ctx->iv;
1882
    unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1883
    const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1884
1885
    if (out != in
1886
            || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
1887
        return -1;
1888
1889
    if (enc) {
1890
        /* Set explicit iv (sequence number). */
1891
        memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1892
    }
1893
1894
    len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1895
    /*-
1896
     * Get explicit iv (sequence number). We already have fixed iv
1897
     * (server/client_write_iv) here.
1898
     */
1899
    memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
1900
    s390x_aes_ccm_setiv(cctx, ivec, len);
1901
1902
    /* Process aad (sequence number|type|version|length) */
1903
    s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
1904
1905
    in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1906
    out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
1907
1908
    if (enc) {
1909
        if (s390x_aes_ccm(cctx, in, out, len, enc))
1910
            return -1;
1911
1912
        memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
1913
        return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
1914
    } else {
1915
        if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
1916
            if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
1917
                               cctx->aes.ccm.m))
1918
                return len;
1919
        }
1920
1921
        OPENSSL_cleanse(out, len);
1922
        return -1;
1923
    }
1924
}
1925
1926
/*-
1927
 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
1928
 * returned.
1929
 */
1930
static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
1931
                                  const unsigned char *key,
1932
                                  const unsigned char *iv, int enc)
1933
{
1934
    S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1935
    int keylen;
1936
1937
    if (iv == NULL && key == NULL)
1938
        return 1;
1939
1940
    if (key != NULL) {
1941
        keylen = EVP_CIPHER_CTX_get_key_length(ctx);
1942
        cctx->aes.ccm.fc = S390X_AES_FC(keylen);
1943
        memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
1944
1945
        /* Store encoded m and l. */
1946
        cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
1947
                                 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
1948
        memset(cctx->aes.ccm.nonce.b + 1, 0,
1949
               sizeof(cctx->aes.ccm.nonce.b));
1950
        cctx->aes.ccm.blocks = 0;
1951
1952
        cctx->aes.ccm.key_set = 1;
1953
    }
1954
1955
    if (iv != NULL) {
1956
        memcpy(ctx->iv, iv, 15 - cctx->aes.ccm.l);
1957
1958
        cctx->aes.ccm.iv_set = 1;
1959
    }
1960
1961
    return 1;
1962
}
1963
1964
/*-
1965
 * Called from EVP layer to initialize context, process additional
1966
 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1967
 * plaintext or process a TLS packet, depending on context. Returns bytes
1968
 * written on success. Otherwise -1 is returned.
1969
 */
1970
static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1971
                                const unsigned char *in, size_t len)
1972
{
1973
    S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
1974
    const int enc = EVP_CIPHER_CTX_is_encrypting(ctx);
1975
    int rv;
1976
    unsigned char *buf;
1977
1978
    if (!cctx->aes.ccm.key_set)
1979
        return -1;
1980
1981
    if (cctx->aes.ccm.tls_aad_len >= 0)
1982
        return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
1983
1984
    /*-
1985
     * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
1986
     * so integrity must be checked already at Update() i.e., before
1987
     * potentially corrupted data is output.
1988
     */
1989
    if (in == NULL && out != NULL)
1990
        return 0;
1991
1992
    if (!cctx->aes.ccm.iv_set)
1993
        return -1;
1994
1995
    if (out == NULL) {
1996
        /* Update(): Pass message length. */
1997
        if (in == NULL) {
1998
            s390x_aes_ccm_setiv(cctx, ctx->iv, len);
1999
2000
            cctx->aes.ccm.len_set = 1;
2001
            return len;
2002
        }
2003
2004
        /* Update(): Process aad. */
2005
        if (!cctx->aes.ccm.len_set && len)
2006
            return -1;
2007
2008
        s390x_aes_ccm_aad(cctx, in, len);
2009
        return len;
2010
    }
2011
2012
    /* The tag must be set before actually decrypting data */
2013
    if (!enc && !cctx->aes.ccm.tag_set)
2014
        return -1;
2015
2016
    /* Update(): Process message. */
2017
2018
    if (!cctx->aes.ccm.len_set) {
2019
        /*-
2020
         * In case message length was not previously set explicitly via
2021
         * Update(), set it now.
2022
         */
2023
        s390x_aes_ccm_setiv(cctx, ctx->iv, len);
2024
2025
        cctx->aes.ccm.len_set = 1;
2026
    }
2027
2028
    if (enc) {
2029
        if (s390x_aes_ccm(cctx, in, out, len, enc))
2030
            return -1;
2031
2032
        cctx->aes.ccm.tag_set = 1;
2033
        return len;
2034
    } else {
2035
        rv = -1;
2036
2037
        if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2038
            buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2039
            if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2040
                               cctx->aes.ccm.m))
2041
                rv = len;
2042
        }
2043
2044
        if (rv == -1)
2045
            OPENSSL_cleanse(out, len);
2046
2047
        cctx->aes.ccm.iv_set = 0;
2048
        cctx->aes.ccm.tag_set = 0;
2049
        cctx->aes.ccm.len_set = 0;
2050
        return rv;
2051
    }
2052
}
2053
2054
/*-
2055
 * Performs various operations on the context structure depending on control
2056
 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2057
 * Code is big-endian.
2058
 */
2059
static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2060
{
2061
    S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
2062
    unsigned char *buf;
2063
    int enc, len;
2064
2065
    switch (type) {
2066
    case EVP_CTRL_INIT:
2067
        cctx->aes.ccm.key_set = 0;
2068
        cctx->aes.ccm.iv_set = 0;
2069
        cctx->aes.ccm.l = 8;
2070
        cctx->aes.ccm.m = 12;
2071
        cctx->aes.ccm.tag_set = 0;
2072
        cctx->aes.ccm.len_set = 0;
2073
        cctx->aes.ccm.tls_aad_len = -1;
2074
        return 1;
2075
2076
    case EVP_CTRL_GET_IVLEN:
2077
        *(int *)ptr = 15 - cctx->aes.ccm.l;
2078
        return 1;
2079
2080
    case EVP_CTRL_AEAD_TLS1_AAD:
2081
        if (arg != EVP_AEAD_TLS1_AAD_LEN)
2082
            return 0;
2083
2084
        /* Save the aad for later use. */
2085
        buf = EVP_CIPHER_CTX_buf_noconst(c);
2086
        memcpy(buf, ptr, arg);
2087
        cctx->aes.ccm.tls_aad_len = arg;
2088
2089
        len = buf[arg - 2] << 8 | buf[arg - 1];
2090
        if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2091
            return 0;
2092
2093
        /* Correct length for explicit iv. */
2094
        len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2095
2096
        enc = EVP_CIPHER_CTX_is_encrypting(c);
2097
        if (!enc) {
2098
            if (len < cctx->aes.ccm.m)
2099
                return 0;
2100
2101
            /* Correct length for tag. */
2102
            len -= cctx->aes.ccm.m;
2103
        }
2104
2105
        buf[arg - 2] = len >> 8;
2106
        buf[arg - 1] = len & 0xff;
2107
2108
        /* Extra padding: tag appended to record. */
2109
        return cctx->aes.ccm.m;
2110
2111
    case EVP_CTRL_CCM_SET_IV_FIXED:
2112
        if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2113
            return 0;
2114
2115
        /* Copy to first part of the iv. */
2116
        memcpy(c->iv, ptr, arg);
2117
        return 1;
2118
2119
    case EVP_CTRL_AEAD_SET_IVLEN:
2120
        arg = 15 - arg;
2121
        /* fall-through */
2122
2123
    case EVP_CTRL_CCM_SET_L:
2124
        if (arg < 2 || arg > 8)
2125
            return 0;
2126
2127
        cctx->aes.ccm.l = arg;
2128
        return 1;
2129
2130
    case EVP_CTRL_AEAD_SET_TAG:
2131
        if ((arg & 1) || arg < 4 || arg > 16)
2132
            return 0;
2133
2134
        enc = EVP_CIPHER_CTX_is_encrypting(c);
2135
        if (enc && ptr)
2136
            return 0;
2137
2138
        if (ptr) {
2139
            cctx->aes.ccm.tag_set = 1;
2140
            buf = EVP_CIPHER_CTX_buf_noconst(c);
2141
            memcpy(buf, ptr, arg);
2142
        }
2143
2144
        cctx->aes.ccm.m = arg;
2145
        return 1;
2146
2147
    case EVP_CTRL_AEAD_GET_TAG:
2148
        enc = EVP_CIPHER_CTX_is_encrypting(c);
2149
        if (!enc || !cctx->aes.ccm.tag_set)
2150
            return 0;
2151
2152
        if(arg < cctx->aes.ccm.m)
2153
            return 0;
2154
2155
        memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2156
        cctx->aes.ccm.tag_set = 0;
2157
        cctx->aes.ccm.iv_set = 0;
2158
        cctx->aes.ccm.len_set = 0;
2159
        return 1;
2160
2161
    case EVP_CTRL_COPY:
2162
        return 1;
2163
2164
    default:
2165
        return -1;
2166
    }
2167
}
2168
2169
# define s390x_aes_ccm_cleanup aes_ccm_cleanup
2170
2171
# ifndef OPENSSL_NO_OCB
2172
#  define S390X_AES_OCB_CTX             EVP_AES_OCB_CTX
2173
2174
#  define s390x_aes_ocb_init_key aes_ocb_init_key
2175
static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2176
                                  const unsigned char *iv, int enc);
2177
#  define s390x_aes_ocb_cipher aes_ocb_cipher
2178
static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2179
                                const unsigned char *in, size_t len);
2180
#  define s390x_aes_ocb_cleanup aes_ocb_cleanup
2181
static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2182
#  define s390x_aes_ocb_ctrl aes_ocb_ctrl
2183
static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2184
# endif
2185
2186
# ifndef OPENSSL_NO_SIV
2187
#  define S390X_AES_SIV_CTX             EVP_AES_SIV_CTX
2188
2189
#  define s390x_aes_siv_init_key aes_siv_init_key
2190
#  define s390x_aes_siv_cipher aes_siv_cipher
2191
#  define s390x_aes_siv_cleanup aes_siv_cleanup
2192
#  define s390x_aes_siv_ctrl aes_siv_ctrl
2193
# endif
2194
2195
# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,    \
2196
                              MODE,flags)                               \
2197
static const EVP_CIPHER s390x_aes_##keylen##_##mode = {                 \
2198
    nid##_##keylen##_##nmode,blocksize,                                 \
2199
    keylen / 8,                                                         \
2200
    ivlen,                                                              \
2201
    flags | EVP_CIPH_##MODE##_MODE,                                     \
2202
    EVP_ORIG_GLOBAL,                                                    \
2203
    s390x_aes_##mode##_init_key,                                        \
2204
    s390x_aes_##mode##_cipher,                                          \
2205
    NULL,                                                               \
2206
    sizeof(S390X_AES_##MODE##_CTX),                                     \
2207
    NULL,                                                               \
2208
    NULL,                                                               \
2209
    NULL,                                                               \
2210
    NULL                                                                \
2211
};                                                                      \
2212
static const EVP_CIPHER aes_##keylen##_##mode = {                       \
2213
    nid##_##keylen##_##nmode,                                           \
2214
    blocksize,                                                          \
2215
    keylen / 8,                                                         \
2216
    ivlen,                                                              \
2217
    flags | EVP_CIPH_##MODE##_MODE,                                     \
2218
    EVP_ORIG_GLOBAL,                                                    \
2219
    aes_init_key,                                                       \
2220
    aes_##mode##_cipher,                                                \
2221
    NULL,                                                               \
2222
    sizeof(EVP_AES_KEY),                                                \
2223
    NULL,                                                               \
2224
    NULL,                                                               \
2225
    NULL,                                                               \
2226
    NULL                                                                \
2227
};                                                                      \
2228
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)                       \
2229
{                                                                       \
2230
    return S390X_aes_##keylen##_##mode##_CAPABLE ?                      \
2231
           &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode;       \
2232
}
2233
2234
# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
2235
static const EVP_CIPHER s390x_aes_##keylen##_##mode = {                 \
2236
    nid##_##keylen##_##mode,                                            \
2237
    blocksize,                                                          \
2238
    (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8,        \
2239
    ivlen,                                                              \
2240
    flags | EVP_CIPH_##MODE##_MODE,                                     \
2241
    EVP_ORIG_GLOBAL,                                                    \
2242
    s390x_aes_##mode##_init_key,                                        \
2243
    s390x_aes_##mode##_cipher,                                          \
2244
    s390x_aes_##mode##_cleanup,                                         \
2245
    sizeof(S390X_AES_##MODE##_CTX),                                     \
2246
    NULL,                                                               \
2247
    NULL,                                                               \
2248
    s390x_aes_##mode##_ctrl,                                            \
2249
    NULL                                                                \
2250
};                                                                      \
2251
static const EVP_CIPHER aes_##keylen##_##mode = {                       \
2252
    nid##_##keylen##_##mode,blocksize,                                  \
2253
    (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE ? 2 : 1) * keylen / 8,        \
2254
    ivlen,                                                              \
2255
    flags | EVP_CIPH_##MODE##_MODE,                                     \
2256
    EVP_ORIG_GLOBAL,                                                    \
2257
    aes_##mode##_init_key,                                              \
2258
    aes_##mode##_cipher,                                                \
2259
    aes_##mode##_cleanup,                                               \
2260
    sizeof(EVP_AES_##MODE##_CTX),                                       \
2261
    NULL,                                                               \
2262
    NULL,                                                               \
2263
    aes_##mode##_ctrl,                                                  \
2264
    NULL                                                                \
2265
};                                                                      \
2266
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void)                       \
2267
{                                                                       \
2268
    return S390X_aes_##keylen##_##mode##_CAPABLE ?                      \
2269
           &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode;       \
2270
}
2271
2272
#else
2273
2274
# define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
2275
static const EVP_CIPHER aes_##keylen##_##mode = { \
2276
        nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2277
        flags|EVP_CIPH_##MODE##_MODE,   \
2278
        EVP_ORIG_GLOBAL,                \
2279
        aes_init_key,                   \
2280
        aes_##mode##_cipher,            \
2281
        NULL,                           \
2282
        sizeof(EVP_AES_KEY),            \
2283
        NULL,NULL,NULL,NULL }; \
2284
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2285
{ return &aes_##keylen##_##mode; }
2286
2287
# define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
2288
static const EVP_CIPHER aes_##keylen##_##mode = { \
2289
        nid##_##keylen##_##mode,blocksize, \
2290
        (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE||EVP_CIPH_##MODE##_MODE==EVP_CIPH_SIV_MODE?2:1)*keylen/8, \
2291
        ivlen,                          \
2292
        flags|EVP_CIPH_##MODE##_MODE,   \
2293
        EVP_ORIG_GLOBAL,                \
2294
        aes_##mode##_init_key,          \
2295
        aes_##mode##_cipher,            \
2296
        aes_##mode##_cleanup,           \
2297
        sizeof(EVP_AES_##MODE##_CTX),   \
2298
        NULL,NULL,aes_##mode##_ctrl,NULL }; \
2299
const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2300
{ return &aes_##keylen##_##mode; }
2301
2302
#endif
2303
2304
#define BLOCK_CIPHER_generic_pack(nid,keylen,flags)             \
2305
        BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)     \
2306
        BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)      \
2307
        BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)   \
2308
        BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1)   \
2309
        BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags)       \
2310
        BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags)       \
2311
        BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
2312
2313
static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2314
                        const unsigned char *iv, int enc)
2315
0
{
2316
0
    int ret, mode;
2317
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2318
2319
0
    mode = EVP_CIPHER_CTX_get_mode(ctx);
2320
0
    if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
2321
0
        && !enc) {
2322
#ifdef HWAES_CAPABLE
2323
        if (HWAES_CAPABLE) {
2324
            ret = HWAES_set_decrypt_key(key,
2325
                                        EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2326
                                        &dat->ks.ks);
2327
            dat->block = (block128_f) HWAES_decrypt;
2328
            dat->stream.cbc = NULL;
2329
# ifdef HWAES_cbc_encrypt
2330
            if (mode == EVP_CIPH_CBC_MODE)
2331
                dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2332
# endif
2333
        } else
2334
#endif
2335
0
#ifdef BSAES_CAPABLE
2336
0
        if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
2337
0
            ret = AES_set_decrypt_key(key,
2338
0
                                      EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2339
0
                                      &dat->ks.ks);
2340
0
            dat->block = (block128_f) AES_decrypt;
2341
0
            dat->stream.cbc = (cbc128_f) ossl_bsaes_cbc_encrypt;
2342
0
        } else
2343
0
#endif
2344
0
#ifdef VPAES_CAPABLE
2345
0
        if (VPAES_CAPABLE) {
2346
0
            ret = vpaes_set_decrypt_key(key,
2347
0
                                        EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2348
0
                                        &dat->ks.ks);
2349
0
            dat->block = (block128_f) vpaes_decrypt;
2350
0
            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2351
0
                (cbc128_f) vpaes_cbc_encrypt : NULL;
2352
0
        } else
2353
0
#endif
2354
0
        {
2355
0
            ret = AES_set_decrypt_key(key,
2356
0
                                      EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2357
0
                                      &dat->ks.ks);
2358
0
            dat->block = (block128_f) AES_decrypt;
2359
0
            dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2360
0
                (cbc128_f) AES_cbc_encrypt : NULL;
2361
0
        }
2362
0
    } else
2363
#ifdef HWAES_CAPABLE
2364
    if (HWAES_CAPABLE) {
2365
        ret = HWAES_set_encrypt_key(key,
2366
                                    EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2367
                                    &dat->ks.ks);
2368
        dat->block = (block128_f) HWAES_encrypt;
2369
        dat->stream.cbc = NULL;
2370
# ifdef HWAES_cbc_encrypt
2371
        if (mode == EVP_CIPH_CBC_MODE)
2372
            dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2373
        else
2374
# endif
2375
# ifdef HWAES_ctr32_encrypt_blocks
2376
        if (mode == EVP_CIPH_CTR_MODE)
2377
            dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2378
        else
2379
# endif
2380
            (void)0;            /* terminate potentially open 'else' */
2381
    } else
2382
#endif
2383
0
#ifdef BSAES_CAPABLE
2384
0
    if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
2385
0
        ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2386
0
                                  &dat->ks.ks);
2387
0
        dat->block = (block128_f) AES_encrypt;
2388
0
        dat->stream.ctr = (ctr128_f) ossl_bsaes_ctr32_encrypt_blocks;
2389
0
    } else
2390
0
#endif
2391
0
#ifdef VPAES_CAPABLE
2392
0
    if (VPAES_CAPABLE) {
2393
0
        ret = vpaes_set_encrypt_key(key,
2394
0
                                    EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2395
0
                                    &dat->ks.ks);
2396
0
        dat->block = (block128_f) vpaes_encrypt;
2397
0
        dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2398
0
            (cbc128_f) vpaes_cbc_encrypt : NULL;
2399
0
    } else
2400
0
#endif
2401
0
    {
2402
0
        ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
2403
0
                                  &dat->ks.ks);
2404
0
        dat->block = (block128_f) AES_encrypt;
2405
0
        dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2406
0
            (cbc128_f) AES_cbc_encrypt : NULL;
2407
#ifdef AES_CTR_ASM
2408
        if (mode == EVP_CIPH_CTR_MODE)
2409
            dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
2410
#endif
2411
0
    }
2412
2413
0
    if (ret < 0) {
2414
0
        ERR_raise(ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED);
2415
0
        return 0;
2416
0
    }
2417
2418
0
    return 1;
2419
0
}
2420
2421
static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2422
                          const unsigned char *in, size_t len)
2423
0
{
2424
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2425
2426
0
    if (dat->stream.cbc)
2427
0
        (*dat->stream.cbc) (in, out, len, &dat->ks, ctx->iv,
2428
0
                            EVP_CIPHER_CTX_is_encrypting(ctx));
2429
0
    else if (EVP_CIPHER_CTX_is_encrypting(ctx))
2430
0
        CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv,
2431
0
                              dat->block);
2432
0
    else
2433
0
        CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
2434
0
                              ctx->iv, dat->block);
2435
2436
0
    return 1;
2437
0
}
2438
2439
static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2440
                          const unsigned char *in, size_t len)
2441
0
{
2442
0
    size_t bl = EVP_CIPHER_CTX_get_block_size(ctx);
2443
0
    size_t i;
2444
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2445
2446
0
    if (len < bl)
2447
0
        return 1;
2448
2449
0
    for (i = 0, len -= bl; i <= len; i += bl)
2450
0
        (*dat->block) (in + i, out + i, &dat->ks);
2451
2452
0
    return 1;
2453
0
}
2454
2455
static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2456
                          const unsigned char *in, size_t len)
2457
0
{
2458
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2459
2460
0
    int num = EVP_CIPHER_CTX_get_num(ctx);
2461
0
    CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
2462
0
                          ctx->iv, &num, dat->block);
2463
0
    EVP_CIPHER_CTX_set_num(ctx, num);
2464
0
    return 1;
2465
0
}
2466
2467
static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2468
                          const unsigned char *in, size_t len)
2469
0
{
2470
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2471
2472
0
    int num = EVP_CIPHER_CTX_get_num(ctx);
2473
0
    CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
2474
0
                          ctx->iv, &num,
2475
0
                          EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2476
0
    EVP_CIPHER_CTX_set_num(ctx, num);
2477
0
    return 1;
2478
0
}
2479
2480
static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2481
                           const unsigned char *in, size_t len)
2482
0
{
2483
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2484
2485
0
    int num = EVP_CIPHER_CTX_get_num(ctx);
2486
0
    CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
2487
0
                            ctx->iv, &num,
2488
0
                            EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2489
0
    EVP_CIPHER_CTX_set_num(ctx, num);
2490
0
    return 1;
2491
0
}
2492
2493
static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2494
                           const unsigned char *in, size_t len)
2495
0
{
2496
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2497
2498
0
    if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2499
0
        int num = EVP_CIPHER_CTX_get_num(ctx);
2500
0
        CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
2501
0
                                ctx->iv, &num,
2502
0
                                EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2503
0
        EVP_CIPHER_CTX_set_num(ctx, num);
2504
0
        return 1;
2505
0
    }
2506
2507
0
    while (len >= MAXBITCHUNK) {
2508
0
        int num = EVP_CIPHER_CTX_get_num(ctx);
2509
0
        CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
2510
0
                                ctx->iv, &num,
2511
0
                                EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2512
0
        EVP_CIPHER_CTX_set_num(ctx, num);
2513
0
        len -= MAXBITCHUNK;
2514
0
        out += MAXBITCHUNK;
2515
0
        in  += MAXBITCHUNK;
2516
0
    }
2517
0
    if (len) {
2518
0
        int num = EVP_CIPHER_CTX_get_num(ctx);
2519
0
        CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
2520
0
                                ctx->iv, &num,
2521
0
                                EVP_CIPHER_CTX_is_encrypting(ctx), dat->block);
2522
0
        EVP_CIPHER_CTX_set_num(ctx, num);
2523
0
    }
2524
2525
0
    return 1;
2526
0
}
2527
2528
static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2529
                          const unsigned char *in, size_t len)
2530
0
{
2531
0
    int n = EVP_CIPHER_CTX_get_num(ctx);
2532
0
    unsigned int num;
2533
0
    EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2534
2535
0
    if (n < 0)
2536
0
        return 0;
2537
0
    num = (unsigned int)n;
2538
2539
0
    if (dat->stream.ctr)
2540
0
        CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
2541
0
                                    ctx->iv,
2542
0
                                    EVP_CIPHER_CTX_buf_noconst(ctx),
2543
0
                                    &num, dat->stream.ctr);
2544
0
    else
2545
0
        CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
2546
0
                              ctx->iv,
2547
0
                              EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2548
0
                              dat->block);
2549
0
    EVP_CIPHER_CTX_set_num(ctx, num);
2550
0
    return 1;
2551
0
}
2552
2553
BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2554
    BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2555
    BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
2556
2557
static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
2558
0
{
2559
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2560
0
    if (gctx == NULL)
2561
0
        return 0;
2562
0
    OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
2563
0
    if (gctx->iv != c->iv)
2564
0
        OPENSSL_free(gctx->iv);
2565
0
    return 1;
2566
0
}
2567
2568
static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2569
0
{
2570
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2571
0
    switch (type) {
2572
0
    case EVP_CTRL_INIT:
2573
0
        gctx->key_set = 0;
2574
0
        gctx->iv_set = 0;
2575
0
        gctx->ivlen = EVP_CIPHER_get_iv_length(c->cipher);
2576
0
        gctx->iv = c->iv;
2577
0
        gctx->taglen = -1;
2578
0
        gctx->iv_gen = 0;
2579
0
        gctx->tls_aad_len = -1;
2580
0
        return 1;
2581
2582
0
    case EVP_CTRL_GET_IVLEN:
2583
0
        *(int *)ptr = gctx->ivlen;
2584
0
        return 1;
2585
2586
0
    case EVP_CTRL_AEAD_SET_IVLEN:
2587
0
        if (arg <= 0)
2588
0
            return 0;
2589
        /* Allocate memory for IV if needed */
2590
0
        if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
2591
0
            if (gctx->iv != c->iv)
2592
0
                OPENSSL_free(gctx->iv);
2593
0
            if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
2594
0
                ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
2595
0
                return 0;
2596
0
            }
2597
0
        }
2598
0
        gctx->ivlen = arg;
2599
0
        return 1;
2600
2601
0
    case EVP_CTRL_AEAD_SET_TAG:
2602
0
        if (arg <= 0 || arg > 16 || c->encrypt)
2603
0
            return 0;
2604
0
        memcpy(c->buf, ptr, arg);
2605
0
        gctx->taglen = arg;
2606
0
        return 1;
2607
2608
0
    case EVP_CTRL_AEAD_GET_TAG:
2609
0
        if (arg <= 0 || arg > 16 || !c->encrypt
2610
0
            || gctx->taglen < 0)
2611
0
            return 0;
2612
0
        memcpy(ptr, c->buf, arg);
2613
0
        return 1;
2614
2615
0
    case EVP_CTRL_GCM_SET_IV_FIXED:
2616
        /* Special case: -1 length restores whole IV */
2617
0
        if (arg == -1) {
2618
0
            memcpy(gctx->iv, ptr, gctx->ivlen);
2619
0
            gctx->iv_gen = 1;
2620
0
            return 1;
2621
0
        }
2622
        /*
2623
         * Fixed field must be at least 4 bytes and invocation field at least
2624
         * 8.
2625
         */
2626
0
        if ((arg < 4) || (gctx->ivlen - arg) < 8)
2627
0
            return 0;
2628
0
        if (arg)
2629
0
            memcpy(gctx->iv, ptr, arg);
2630
0
        if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
2631
0
            return 0;
2632
0
        gctx->iv_gen = 1;
2633
0
        return 1;
2634
2635
0
    case EVP_CTRL_GCM_IV_GEN:
2636
0
        if (gctx->iv_gen == 0 || gctx->key_set == 0)
2637
0
            return 0;
2638
0
        CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2639
0
        if (arg <= 0 || arg > gctx->ivlen)
2640
0
            arg = gctx->ivlen;
2641
0
        memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2642
        /*
2643
         * Invocation field will be at least 8 bytes in size and so no need
2644
         * to check wrap around or increment more than last 8 bytes.
2645
         */
2646
0
        ctr64_inc(gctx->iv + gctx->ivlen - 8);
2647
0
        gctx->iv_set = 1;
2648
0
        return 1;
2649
2650
0
    case EVP_CTRL_GCM_SET_IV_INV:
2651
0
        if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
2652
0
            return 0;
2653
0
        memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2654
0
        CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2655
0
        gctx->iv_set = 1;
2656
0
        return 1;
2657
2658
0
    case EVP_CTRL_AEAD_TLS1_AAD:
2659
        /* Save the AAD for later use */
2660
0
        if (arg != EVP_AEAD_TLS1_AAD_LEN)
2661
0
            return 0;
2662
0
        memcpy(c->buf, ptr, arg);
2663
0
        gctx->tls_aad_len = arg;
2664
0
        gctx->tls_enc_records = 0;
2665
0
        {
2666
0
            unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
2667
            /* Correct length for explicit IV */
2668
0
            if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2669
0
                return 0;
2670
0
            len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2671
            /* If decrypting correct for tag too */
2672
0
            if (!c->encrypt) {
2673
0
                if (len < EVP_GCM_TLS_TAG_LEN)
2674
0
                    return 0;
2675
0
                len -= EVP_GCM_TLS_TAG_LEN;
2676
0
            }
2677
0
            c->buf[arg - 2] = len >> 8;
2678
0
            c->buf[arg - 1] = len & 0xff;
2679
0
        }
2680
        /* Extra padding: tag appended to record */
2681
0
        return EVP_GCM_TLS_TAG_LEN;
2682
2683
0
    case EVP_CTRL_COPY:
2684
0
        {
2685
0
            EVP_CIPHER_CTX *out = ptr;
2686
0
            EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
2687
0
            if (gctx->gcm.key) {
2688
0
                if (gctx->gcm.key != &gctx->ks)
2689
0
                    return 0;
2690
0
                gctx_out->gcm.key = &gctx_out->ks;
2691
0
            }
2692
0
            if (gctx->iv == c->iv)
2693
0
                gctx_out->iv = out->iv;
2694
0
            else {
2695
0
                if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
2696
0
                    ERR_raise(ERR_LIB_EVP, ERR_R_MALLOC_FAILURE);
2697
0
                    return 0;
2698
0
                }
2699
0
                memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2700
0
            }
2701
0
            return 1;
2702
0
        }
2703
2704
0
    default:
2705
0
        return -1;
2706
2707
0
    }
2708
0
}
2709
2710
static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2711
                            const unsigned char *iv, int enc)
2712
0
{
2713
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2714
0
    if (!iv && !key)
2715
0
        return 1;
2716
0
    if (key) {
2717
0
        do {
2718
#ifdef HWAES_CAPABLE
2719
            if (HWAES_CAPABLE) {
2720
                HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2721
                CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2722
                                   (block128_f) HWAES_encrypt);
2723
# ifdef HWAES_ctr32_encrypt_blocks
2724
                gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2725
# else
2726
                gctx->ctr = NULL;
2727
# endif
2728
                break;
2729
            } else
2730
#endif
2731
0
#ifdef BSAES_CAPABLE
2732
0
            if (BSAES_CAPABLE) {
2733
0
                AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2734
0
                CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2735
0
                                   (block128_f) AES_encrypt);
2736
0
                gctx->ctr = (ctr128_f) ossl_bsaes_ctr32_encrypt_blocks;
2737
0
                break;
2738
0
            } else
2739
0
#endif
2740
0
#ifdef VPAES_CAPABLE
2741
0
            if (VPAES_CAPABLE) {
2742
0
                vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2743
0
                CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2744
0
                                   (block128_f) vpaes_encrypt);
2745
0
                gctx->ctr = NULL;
2746
0
                break;
2747
0
            } else
2748
0
#endif
2749
0
                (void)0;        /* terminate potentially open 'else' */
2750
2751
0
            AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
2752
0
            CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
2753
0
                               (block128_f) AES_encrypt);
2754
#ifdef AES_CTR_ASM
2755
            gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
2756
#else
2757
0
            gctx->ctr = NULL;
2758
0
#endif
2759
0
        } while (0);
2760
2761
        /*
2762
         * If we have an iv can set it directly, otherwise use saved IV.
2763
         */
2764
0
        if (iv == NULL && gctx->iv_set)
2765
0
            iv = gctx->iv;
2766
0
        if (iv) {
2767
0
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2768
0
            gctx->iv_set = 1;
2769
0
        }
2770
0
        gctx->key_set = 1;
2771
0
    } else {
2772
        /* If key set use IV, otherwise copy */
2773
0
        if (gctx->key_set)
2774
0
            CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
2775
0
        else
2776
0
            memcpy(gctx->iv, iv, gctx->ivlen);
2777
0
        gctx->iv_set = 1;
2778
0
        gctx->iv_gen = 0;
2779
0
    }
2780
0
    return 1;
2781
0
}
2782
2783
/*
2784
 * Handle TLS GCM packet format. This consists of the last portion of the IV
2785
 * followed by the payload and finally the tag. On encrypt generate IV,
2786
 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
2787
 * and verify tag.
2788
 */
2789
2790
static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2791
                              const unsigned char *in, size_t len)
2792
0
{
2793
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2794
0
    int rv = -1;
2795
    /* Encrypt/decrypt must be performed in place */
2796
0
    if (out != in
2797
0
        || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
2798
0
        return -1;
2799
2800
    /*
2801
     * Check for too many keys as per FIPS 140-2 IG A.5 "Key/IV Pair Uniqueness
2802
     * Requirements from SP 800-38D".  The requirements is for one party to the
2803
     * communication to fail after 2^64 - 1 keys.  We do this on the encrypting
2804
     * side only.
2805
     */
2806
0
    if (ctx->encrypt && ++gctx->tls_enc_records == 0) {
2807
0
        ERR_raise(ERR_LIB_EVP, EVP_R_TOO_MANY_RECORDS);
2808
0
        goto err;
2809
0
    }
2810
2811
    /*
2812
     * Set IV from start of buffer or generate IV and write to start of
2813
     * buffer.
2814
     */
2815
0
    if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN
2816
0
                                              : EVP_CTRL_GCM_SET_IV_INV,
2817
0
                            EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
2818
0
        goto err;
2819
    /* Use saved AAD */
2820
0
    if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
2821
0
        goto err;
2822
    /* Fix buffer and length to point to payload */
2823
0
    in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2824
0
    out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
2825
0
    len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2826
0
    if (ctx->encrypt) {
2827
        /* Encrypt payload */
2828
0
        if (gctx->ctr) {
2829
0
            size_t bulk = 0;
2830
0
#if defined(AES_GCM_ASM)
2831
0
            if (len >= 32 && AES_GCM_ASM(gctx)) {
2832
0
                if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2833
0
                    return -1;
2834
2835
0
                bulk = AES_gcm_encrypt(in, out, len,
2836
0
                                       gctx->gcm.key,
2837
0
                                       gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2838
0
                gctx->gcm.len.u[1] += bulk;
2839
0
            }
2840
0
#endif
2841
0
            if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2842
0
                                            in + bulk,
2843
0
                                            out + bulk,
2844
0
                                            len - bulk, gctx->ctr))
2845
0
                goto err;
2846
0
        } else {
2847
0
            size_t bulk = 0;
2848
#if defined(AES_GCM_ASM2)
2849
            if (len >= 32 && AES_GCM_ASM2(gctx)) {
2850
                if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
2851
                    return -1;
2852
2853
                bulk = AES_gcm_encrypt(in, out, len,
2854
                                       gctx->gcm.key,
2855
                                       gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2856
                gctx->gcm.len.u[1] += bulk;
2857
            }
2858
#endif
2859
0
            if (CRYPTO_gcm128_encrypt(&gctx->gcm,
2860
0
                                      in + bulk, out + bulk, len - bulk))
2861
0
                goto err;
2862
0
        }
2863
0
        out += len;
2864
        /* Finally write tag */
2865
0
        CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
2866
0
        rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
2867
0
    } else {
2868
        /* Decrypt */
2869
0
        if (gctx->ctr) {
2870
0
            size_t bulk = 0;
2871
0
#if defined(AES_GCM_ASM)
2872
0
            if (len >= 16 && AES_GCM_ASM(gctx)) {
2873
0
                if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2874
0
                    return -1;
2875
2876
0
                bulk = AES_gcm_decrypt(in, out, len,
2877
0
                                       gctx->gcm.key,
2878
0
                                       gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2879
0
                gctx->gcm.len.u[1] += bulk;
2880
0
            }
2881
0
#endif
2882
0
            if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
2883
0
                                            in + bulk,
2884
0
                                            out + bulk,
2885
0
                                            len - bulk, gctx->ctr))
2886
0
                goto err;
2887
0
        } else {
2888
0
            size_t bulk = 0;
2889
#if defined(AES_GCM_ASM2)
2890
            if (len >= 16 && AES_GCM_ASM2(gctx)) {
2891
                if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
2892
                    return -1;
2893
2894
                bulk = AES_gcm_decrypt(in, out, len,
2895
                                       gctx->gcm.key,
2896
                                       gctx->gcm.Yi.c, gctx->gcm.Xi.u);
2897
                gctx->gcm.len.u[1] += bulk;
2898
            }
2899
#endif
2900
0
            if (CRYPTO_gcm128_decrypt(&gctx->gcm,
2901
0
                                      in + bulk, out + bulk, len - bulk))
2902
0
                goto err;
2903
0
        }
2904
        /* Retrieve tag */
2905
0
        CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
2906
        /* If tag mismatch wipe buffer */
2907
0
        if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
2908
0
            OPENSSL_cleanse(out, len);
2909
0
            goto err;
2910
0
        }
2911
0
        rv = len;
2912
0
    }
2913
2914
0
 err:
2915
0
    gctx->iv_set = 0;
2916
0
    gctx->tls_aad_len = -1;
2917
0
    return rv;
2918
0
}
2919
2920
#ifdef FIPS_MODULE
2921
/*
2922
 * See SP800-38D (GCM) Section 8 "Uniqueness requirement on IVS and keys"
2923
 *
2924
 * See also 8.2.2 RBG-based construction.
2925
 * Random construction consists of a free field (which can be NULL) and a
2926
 * random field which will use a DRBG that can return at least 96 bits of
2927
 * entropy strength. (The DRBG must be seeded by the FIPS module).
2928
 */
2929
static int aes_gcm_iv_generate(EVP_AES_GCM_CTX *gctx, int offset)
2930
{
2931
    int sz = gctx->ivlen - offset;
2932
2933
    /* Must be at least 96 bits */
2934
    if (sz <= 0 || gctx->ivlen < 12)
2935
        return 0;
2936
2937
    /* Use DRBG to generate random iv */
2938
    if (RAND_bytes(gctx->iv + offset, sz) <= 0)
2939
        return 0;
2940
    return 1;
2941
}
2942
#endif /* FIPS_MODULE */
2943
2944
static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2945
                          const unsigned char *in, size_t len)
2946
0
{
2947
0
    EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2948
2949
    /* If not set up, return error */
2950
0
    if (!gctx->key_set)
2951
0
        return -1;
2952
2953
0
    if (gctx->tls_aad_len >= 0)
2954
0
        return aes_gcm_tls_cipher(ctx, out, in, len);
2955
2956
#ifdef FIPS_MODULE
2957
    /*
2958
     * FIPS requires generation of AES-GCM IV's inside the FIPS module.
2959
     * The IV can still be set externally (the security policy will state that
2960
     * this is not FIPS compliant). There are some applications
2961
     * where setting the IV externally is the only option available.
2962
     */
2963
    if (!gctx->iv_set) {
2964
        if (!ctx->encrypt || !aes_gcm_iv_generate(gctx, 0))
2965
            return -1;
2966
        CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2967
        gctx->iv_set = 1;
2968
        gctx->iv_gen_rand = 1;
2969
    }
2970
#else
2971
0
    if (!gctx->iv_set)
2972
0
        return -1;
2973
0
#endif /* FIPS_MODULE */
2974
2975
0
    if (in) {
2976
0
        if (out == NULL) {
2977
0
            if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
2978
0
                return -1;
2979
0
        } else if (ctx->encrypt) {
2980
0
            if (gctx->ctr) {
2981
0
                size_t bulk = 0;
2982
0
#if defined(AES_GCM_ASM)
2983
0
                if (len >= 32 && AES_GCM_ASM(gctx)) {
2984
0
                    size_t res = (16 - gctx->gcm.mres) % 16;
2985
2986
0
                    if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
2987
0
                        return -1;
2988
2989
0
                    bulk = AES_gcm_encrypt(in + res,
2990
0
                                           out + res, len - res,
2991
0
                                           gctx->gcm.key, gctx->gcm.Yi.c,
2992
0
                                           gctx->gcm.Xi.u);
2993
0
                    gctx->gcm.len.u[1] += bulk;
2994
0
                    bulk += res;
2995
0
                }
2996
0
#endif
2997
0
                if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
2998
0
                                                in + bulk,
2999
0
                                                out + bulk,
3000
0
                                                len - bulk, gctx->ctr))
3001
0
                    return -1;
3002
0
            } else {
3003
0
                size_t bulk = 0;
3004
#if defined(AES_GCM_ASM2)
3005
                if (len >= 32 && AES_GCM_ASM2(gctx)) {
3006
                    size_t res = (16 - gctx->gcm.mres) % 16;
3007
3008
                    if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3009
                        return -1;
3010
3011
                    bulk = AES_gcm_encrypt(in + res,
3012
                                           out + res, len - res,
3013
                                           gctx->gcm.key, gctx->gcm.Yi.c,
3014
                                           gctx->gcm.Xi.u);
3015
                    gctx->gcm.len.u[1] += bulk;
3016
                    bulk += res;
3017
                }
3018
#endif
3019
0
                if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3020
0
                                          in + bulk, out + bulk, len - bulk))
3021
0
                    return -1;
3022
0
            }
3023
0
        } else {
3024
0
            if (gctx->ctr) {
3025
0
                size_t bulk = 0;
3026
0
#if defined(AES_GCM_ASM)
3027
0
                if (len >= 16 && AES_GCM_ASM(gctx)) {
3028
0
                    size_t res = (16 - gctx->gcm.mres) % 16;
3029
3030
0
                    if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3031
0
                        return -1;
3032
3033
0
                    bulk = AES_gcm_decrypt(in + res,
3034
0
                                           out + res, len - res,
3035
0
                                           gctx->gcm.key,
3036
0
                                           gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3037
0
                    gctx->gcm.len.u[1] += bulk;
3038
0
                    bulk += res;
3039
0
                }
3040
0
#endif
3041
0
                if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3042
0
                                                in + bulk,
3043
0
                                                out + bulk,
3044
0
                                                len - bulk, gctx->ctr))
3045
0
                    return -1;
3046
0
            } else {
3047
0
                size_t bulk = 0;
3048
#if defined(AES_GCM_ASM2)
3049
                if (len >= 16 && AES_GCM_ASM2(gctx)) {
3050
                    size_t res = (16 - gctx->gcm.mres) % 16;
3051
3052
                    if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3053
                        return -1;
3054
3055
                    bulk = AES_gcm_decrypt(in + res,
3056
                                           out + res, len - res,
3057
                                           gctx->gcm.key,
3058
                                           gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3059
                    gctx->gcm.len.u[1] += bulk;
3060
                    bulk += res;
3061
                }
3062
#endif
3063
0
                if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3064
0
                                          in + bulk, out + bulk, len - bulk))
3065
0
                    return -1;
3066
0
            }
3067
0
        }
3068
0
        return len;
3069
0
    } else {
3070
0
        if (!ctx->encrypt) {
3071
0
            if (gctx->taglen < 0)
3072
0
                return -1;
3073
0
            if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
3074
0
                return -1;
3075
0
            gctx->iv_set = 0;
3076
0
            return 0;
3077
0
        }
3078
0
        CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
3079
0
        gctx->taglen = 16;
3080
        /* Don't reuse the IV */
3081
0
        gctx->iv_set = 0;
3082
0
        return 0;
3083
0
    }
3084
3085
0
}
3086
3087
#define CUSTOM_FLAGS    (EVP_CIPH_FLAG_DEFAULT_ASN1 \
3088
                | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3089
                | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3090
                | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_CUSTOM_IV_LENGTH)
3091
3092
BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3093
                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3094
    BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3095
                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3096
    BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3097
                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3098
3099
static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3100
0
{
3101
0
    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, c);
3102
3103
0
    if (type == EVP_CTRL_COPY) {
3104
0
        EVP_CIPHER_CTX *out = ptr;
3105
0
        EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
3106
3107
0
        if (xctx->xts.key1) {
3108
0
            if (xctx->xts.key1 != &xctx->ks1)
3109
0
                return 0;
3110
0
            xctx_out->xts.key1 = &xctx_out->ks1;
3111
0
        }
3112
0
        if (xctx->xts.key2) {
3113
0
            if (xctx->xts.key2 != &xctx->ks2)
3114
0
                return 0;
3115
0
            xctx_out->xts.key2 = &xctx_out->ks2;
3116
0
        }
3117
0
        return 1;
3118
0
    } else if (type != EVP_CTRL_INIT)
3119
0
        return -1;
3120
    /* key1 and key2 are used as an indicator both key and IV are set */
3121
0
    xctx->xts.key1 = NULL;
3122
0
    xctx->xts.key2 = NULL;
3123
0
    return 1;
3124
0
}
3125
3126
static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3127
                            const unsigned char *iv, int enc)
3128
0
{
3129
0
    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3130
3131
0
    if (!iv && !key)
3132
0
        return 1;
3133
3134
0
    if (key) {
3135
0
        do {
3136
            /* The key is two half length keys in reality */
3137
0
            const int bytes = EVP_CIPHER_CTX_get_key_length(ctx) / 2;
3138
0
            const int bits = bytes * 8;
3139
3140
            /*
3141
             * Verify that the two keys are different.
3142
             *
3143
             * This addresses the vulnerability described in Rogaway's
3144
             * September 2004 paper:
3145
             *
3146
             *      "Efficient Instantiations of Tweakable Blockciphers and
3147
             *       Refinements to Modes OCB and PMAC".
3148
             *      (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf)
3149
             *
3150
             * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states
3151
             * that:
3152
             *      "The check for Key_1 != Key_2 shall be done at any place
3153
             *       BEFORE using the keys in the XTS-AES algorithm to process
3154
             *       data with them."
3155
             */
3156
0
            if ((!allow_insecure_decrypt || enc)
3157
0
                    && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
3158
0
                ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS);
3159
0
                return 0;
3160
0
            }
3161
3162
#ifdef AES_XTS_ASM
3163
            xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
3164
#else
3165
0
            xctx->stream = NULL;
3166
0
#endif
3167
            /* key_len is two AES keys */
3168
#ifdef HWAES_CAPABLE
3169
            if (HWAES_CAPABLE) {
3170
                if (enc) {
3171
                    HWAES_set_encrypt_key(key, bits, &xctx->ks1.ks);
3172
                    xctx->xts.block1 = (block128_f) HWAES_encrypt;
3173
# ifdef HWAES_xts_encrypt
3174
                    xctx->stream = HWAES_xts_encrypt;
3175
# endif
3176
                } else {
3177
                    HWAES_set_decrypt_key(key, bits, &xctx->ks1.ks);
3178
                    xctx->xts.block1 = (block128_f) HWAES_decrypt;
3179
# ifdef HWAES_xts_decrypt
3180
                    xctx->stream = HWAES_xts_decrypt;
3181
#endif
3182
                }
3183
3184
                HWAES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3185
                xctx->xts.block2 = (block128_f) HWAES_encrypt;
3186
3187
                xctx->xts.key1 = &xctx->ks1;
3188
                break;
3189
            } else
3190
#endif
3191
0
#ifdef BSAES_CAPABLE
3192
0
            if (BSAES_CAPABLE)
3193
0
                xctx->stream = enc ? ossl_bsaes_xts_encrypt : ossl_bsaes_xts_decrypt;
3194
0
            else
3195
0
#endif
3196
0
#ifdef VPAES_CAPABLE
3197
0
            if (VPAES_CAPABLE) {
3198
0
                if (enc) {
3199
0
                    vpaes_set_encrypt_key(key, bits, &xctx->ks1.ks);
3200
0
                    xctx->xts.block1 = (block128_f) vpaes_encrypt;
3201
0
                } else {
3202
0
                    vpaes_set_decrypt_key(key, bits, &xctx->ks1.ks);
3203
0
                    xctx->xts.block1 = (block128_f) vpaes_decrypt;
3204
0
                }
3205
3206
0
                vpaes_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3207
0
                xctx->xts.block2 = (block128_f) vpaes_encrypt;
3208
3209
0
                xctx->xts.key1 = &xctx->ks1;
3210
0
                break;
3211
0
            } else
3212
0
#endif
3213
0
                (void)0;        /* terminate potentially open 'else' */
3214
3215
0
            if (enc) {
3216
0
                AES_set_encrypt_key(key, bits, &xctx->ks1.ks);
3217
0
                xctx->xts.block1 = (block128_f) AES_encrypt;
3218
0
            } else {
3219
0
                AES_set_decrypt_key(key, bits, &xctx->ks1.ks);
3220
0
                xctx->xts.block1 = (block128_f) AES_decrypt;
3221
0
            }
3222
3223
0
            AES_set_encrypt_key(key + bytes, bits, &xctx->ks2.ks);
3224
0
            xctx->xts.block2 = (block128_f) AES_encrypt;
3225
3226
0
            xctx->xts.key1 = &xctx->ks1;
3227
0
        } while (0);
3228
0
    }
3229
3230
0
    if (iv) {
3231
0
        xctx->xts.key2 = &xctx->ks2;
3232
0
        memcpy(ctx->iv, iv, 16);
3233
0
    }
3234
3235
0
    return 1;
3236
0
}
3237
3238
static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3239
                          const unsigned char *in, size_t len)
3240
0
{
3241
0
    EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3242
3243
0
    if (xctx->xts.key1 == NULL
3244
0
            || xctx->xts.key2 == NULL
3245
0
            || out == NULL
3246
0
            || in == NULL
3247
0
            || len < AES_BLOCK_SIZE)
3248
0
        return 0;
3249
3250
    /*
3251
     * Impose a limit of 2^20 blocks per data unit as specified by
3252
     * IEEE Std 1619-2018.  The earlier and obsolete IEEE Std 1619-2007
3253
     * indicated that this was a SHOULD NOT rather than a MUST NOT.
3254
     * NIST SP 800-38E mandates the same limit.
3255
     */
3256
0
    if (len > XTS_MAX_BLOCKS_PER_DATA_UNIT * AES_BLOCK_SIZE) {
3257
0
        ERR_raise(ERR_LIB_EVP, EVP_R_XTS_DATA_UNIT_IS_TOO_LARGE);
3258
0
        return 0;
3259
0
    }
3260
3261
0
    if (xctx->stream)
3262
0
        (*xctx->stream) (in, out, len,
3263
0
                         xctx->xts.key1, xctx->xts.key2,
3264
0
                         ctx->iv);
3265
0
    else if (CRYPTO_xts128_encrypt(&xctx->xts, ctx->iv, in, out, len,
3266
0
                                   EVP_CIPHER_CTX_is_encrypting(ctx)))
3267
0
        return 0;
3268
0
    return 1;
3269
0
}
3270
3271
#define aes_xts_cleanup NULL
3272
3273
#define XTS_FLAGS       (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
3274
                         | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3275
                         | EVP_CIPH_CUSTOM_COPY)
3276
3277
BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3278
    BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
3279
3280
static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3281
0
{
3282
0
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
3283
0
    switch (type) {
3284
0
    case EVP_CTRL_INIT:
3285
0
        cctx->key_set = 0;
3286
0
        cctx->iv_set = 0;
3287
0
        cctx->L = 8;
3288
0
        cctx->M = 12;
3289
0
        cctx->tag_set = 0;
3290
0
        cctx->len_set = 0;
3291
0
        cctx->tls_aad_len = -1;
3292
0
        return 1;
3293
3294
0
    case EVP_CTRL_GET_IVLEN:
3295
0
        *(int *)ptr = 15 - cctx->L;
3296
0
        return 1;
3297
3298
0
    case EVP_CTRL_AEAD_TLS1_AAD:
3299
        /* Save the AAD for later use */
3300
0
        if (arg != EVP_AEAD_TLS1_AAD_LEN)
3301
0
            return 0;
3302
0
        memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3303
0
        cctx->tls_aad_len = arg;
3304
0
        {
3305
0
            uint16_t len =
3306
0
                EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3307
0
                | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
3308
            /* Correct length for explicit IV */
3309
0
            if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3310
0
                return 0;
3311
0
            len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3312
            /* If decrypting correct for tag too */
3313
0
            if (!EVP_CIPHER_CTX_is_encrypting(c)) {
3314
0
                if (len < cctx->M)
3315
0
                    return 0;
3316
0
                len -= cctx->M;
3317
0
            }
3318
0
            EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3319
0
            EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
3320
0
        }
3321
        /* Extra padding: tag appended to record */
3322
0
        return cctx->M;
3323
3324
0
    case EVP_CTRL_CCM_SET_IV_FIXED:
3325
        /* Sanity check length */
3326
0
        if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3327
0
            return 0;
3328
        /* Just copy to first part of IV */
3329
0
        memcpy(c->iv, ptr, arg);
3330
0
        return 1;
3331
3332
0
    case EVP_CTRL_AEAD_SET_IVLEN:
3333
0
        arg = 15 - arg;
3334
        /* fall thru */
3335
0
    case EVP_CTRL_CCM_SET_L:
3336
0
        if (arg < 2 || arg > 8)
3337
0
            return 0;
3338
0
        cctx->L = arg;
3339
0
        return 1;
3340
3341
0
    case EVP_CTRL_AEAD_SET_TAG:
3342
0
        if ((arg & 1) || arg < 4 || arg > 16)
3343
0
            return 0;
3344
0
        if (EVP_CIPHER_CTX_is_encrypting(c) && ptr)
3345
0
            return 0;
3346
0
        if (ptr) {
3347
0
            cctx->tag_set = 1;
3348
0
            memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3349
0
        }
3350
0
        cctx->M = arg;
3351
0
        return 1;
3352
3353
0
    case EVP_CTRL_AEAD_GET_TAG:
3354
0
        if (!EVP_CIPHER_CTX_is_encrypting(c) || !cctx->tag_set)
3355
0
            return 0;
3356
0
        if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3357
0
            return 0;
3358
0
        cctx->tag_set = 0;
3359
0
        cctx->iv_set = 0;
3360
0
        cctx->len_set = 0;
3361
0
        return 1;
3362
3363
0
    case EVP_CTRL_COPY:
3364
0
        {
3365
0
            EVP_CIPHER_CTX *out = ptr;
3366
0
            EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
3367
0
            if (cctx->ccm.key) {
3368
0
                if (cctx->ccm.key != &cctx->ks)
3369
0
                    return 0;
3370
0
                cctx_out->ccm.key = &cctx_out->ks;
3371
0
            }
3372
0
            return 1;
3373
0
        }
3374
3375
0
    default:
3376
0
        return -1;
3377
3378
0
    }
3379
0
}
3380
3381
static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3382
                            const unsigned char *iv, int enc)
3383
0
{
3384
0
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3385
0
    if (!iv && !key)
3386
0
        return 1;
3387
0
    if (key)
3388
0
        do {
3389
#ifdef HWAES_CAPABLE
3390
            if (HWAES_CAPABLE) {
3391
                HWAES_set_encrypt_key(key,
3392
                                      EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3393
                                      &cctx->ks.ks);
3394
3395
                CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3396
                                   &cctx->ks, (block128_f) HWAES_encrypt);
3397
                cctx->str = NULL;
3398
                cctx->key_set = 1;
3399
                break;
3400
            } else
3401
#endif
3402
0
#ifdef VPAES_CAPABLE
3403
0
            if (VPAES_CAPABLE) {
3404
0
                vpaes_set_encrypt_key(key,
3405
0
                                      EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3406
0
                                      &cctx->ks.ks);
3407
0
                CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3408
0
                                   &cctx->ks, (block128_f) vpaes_encrypt);
3409
0
                cctx->str = NULL;
3410
0
                cctx->key_set = 1;
3411
0
                break;
3412
0
            }
3413
0
#endif
3414
0
            AES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3415
0
                                &cctx->ks.ks);
3416
0
            CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3417
0
                               &cctx->ks, (block128_f) AES_encrypt);
3418
0
            cctx->str = NULL;
3419
0
            cctx->key_set = 1;
3420
0
        } while (0);
3421
0
    if (iv) {
3422
0
        memcpy(ctx->iv, iv, 15 - cctx->L);
3423
0
        cctx->iv_set = 1;
3424
0
    }
3425
0
    return 1;
3426
0
}
3427
3428
static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3429
                              const unsigned char *in, size_t len)
3430
0
{
3431
0
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3432
0
    CCM128_CONTEXT *ccm = &cctx->ccm;
3433
    /* Encrypt/decrypt must be performed in place */
3434
0
    if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3435
0
        return -1;
3436
    /* If encrypting set explicit IV from sequence number (start of AAD) */
3437
0
    if (EVP_CIPHER_CTX_is_encrypting(ctx))
3438
0
        memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3439
0
               EVP_CCM_TLS_EXPLICIT_IV_LEN);
3440
    /* Get rest of IV from explicit IV */
3441
0
    memcpy(ctx->iv + EVP_CCM_TLS_FIXED_IV_LEN, in,
3442
0
           EVP_CCM_TLS_EXPLICIT_IV_LEN);
3443
    /* Correct length value */
3444
0
    len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3445
0
    if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L,
3446
0
                            len))
3447
0
            return -1;
3448
    /* Use saved AAD */
3449
0
    CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx),
3450
0
                      cctx->tls_aad_len);
3451
    /* Fix buffer to point to payload */
3452
0
    in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3453
0
    out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3454
0
    if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3455
0
        if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3456
0
                                                    cctx->str) :
3457
0
            CRYPTO_ccm128_encrypt(ccm, in, out, len))
3458
0
            return -1;
3459
0
        if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3460
0
            return -1;
3461
0
        return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3462
0
    } else {
3463
0
        if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3464
0
                                                     cctx->str) :
3465
0
            !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3466
0
            unsigned char tag[16];
3467
0
            if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3468
0
                if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3469
0
                    return len;
3470
0
            }
3471
0
        }
3472
0
        OPENSSL_cleanse(out, len);
3473
0
        return -1;
3474
0
    }
3475
0
}
3476
3477
static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3478
                          const unsigned char *in, size_t len)
3479
0
{
3480
0
    EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3481
0
    CCM128_CONTEXT *ccm = &cctx->ccm;
3482
    /* If not set up, return error */
3483
0
    if (!cctx->key_set)
3484
0
        return -1;
3485
3486
0
    if (cctx->tls_aad_len >= 0)
3487
0
        return aes_ccm_tls_cipher(ctx, out, in, len);
3488
3489
    /* EVP_*Final() doesn't return any data */
3490
0
    if (in == NULL && out != NULL)
3491
0
        return 0;
3492
3493
0
    if (!cctx->iv_set)
3494
0
        return -1;
3495
3496
0
    if (!out) {
3497
0
        if (!in) {
3498
0
            if (CRYPTO_ccm128_setiv(ccm, ctx->iv,
3499
0
                                    15 - cctx->L, len))
3500
0
                return -1;
3501
0
            cctx->len_set = 1;
3502
0
            return len;
3503
0
        }
3504
        /* If have AAD need message length */
3505
0
        if (!cctx->len_set && len)
3506
0
            return -1;
3507
0
        CRYPTO_ccm128_aad(ccm, in, len);
3508
0
        return len;
3509
0
    }
3510
3511
    /* The tag must be set before actually decrypting data */
3512
0
    if (!EVP_CIPHER_CTX_is_encrypting(ctx) && !cctx->tag_set)
3513
0
        return -1;
3514
3515
    /* If not set length yet do it */
3516
0
    if (!cctx->len_set) {
3517
0
        if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
3518
0
            return -1;
3519
0
        cctx->len_set = 1;
3520
0
    }
3521
0
    if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3522
0
        if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3523
0
                                                    cctx->str) :
3524
0
            CRYPTO_ccm128_encrypt(ccm, in, out, len))
3525
0
            return -1;
3526
0
        cctx->tag_set = 1;
3527
0
        return len;
3528
0
    } else {
3529
0
        int rv = -1;
3530
0
        if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3531
0
                                                     cctx->str) :
3532
0
            !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3533
0
            unsigned char tag[16];
3534
0
            if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3535
0
                if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3536
0
                                   cctx->M))
3537
0
                    rv = len;
3538
0
            }
3539
0
        }
3540
0
        if (rv == -1)
3541
0
            OPENSSL_cleanse(out, len);
3542
0
        cctx->iv_set = 0;
3543
0
        cctx->tag_set = 0;
3544
0
        cctx->len_set = 0;
3545
0
        return rv;
3546
0
    }
3547
0
}
3548
3549
#define aes_ccm_cleanup NULL
3550
3551
BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3552
                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3553
BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3554
                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3555
BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3556
                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3557
3558
typedef struct {
3559
    union {
3560
        OSSL_UNION_ALIGN;
3561
        AES_KEY ks;
3562
    } ks;
3563
    /* Indicates if IV has been set */
3564
    unsigned char *iv;
3565
} EVP_AES_WRAP_CTX;
3566
3567
static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3568
                             const unsigned char *iv, int enc)
3569
0
{
3570
0
    int len;
3571
0
    EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3572
3573
0
    if (iv == NULL && key == NULL)
3574
0
        return 1;
3575
0
    if (key != NULL) {
3576
0
        if (EVP_CIPHER_CTX_is_encrypting(ctx))
3577
0
            AES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3578
0
                                &wctx->ks.ks);
3579
0
        else
3580
0
            AES_set_decrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3581
0
                                &wctx->ks.ks);
3582
0
        if (iv == NULL)
3583
0
            wctx->iv = NULL;
3584
0
    }
3585
0
    if (iv != NULL) {
3586
0
        if ((len = EVP_CIPHER_CTX_get_iv_length(ctx)) < 0)
3587
0
            return 0;
3588
0
        memcpy(ctx->iv, iv, len);
3589
0
        wctx->iv = ctx->iv;
3590
0
    }
3591
0
    return 1;
3592
0
}
3593
3594
static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3595
                           const unsigned char *in, size_t inlen)
3596
0
{
3597
0
    EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3598
0
    size_t rv;
3599
    /* AES wrap with padding has IV length of 4, without padding 8 */
3600
0
    int pad = EVP_CIPHER_CTX_get_iv_length(ctx) == 4;
3601
    /* No final operation so always return zero length */
3602
0
    if (!in)
3603
0
        return 0;
3604
    /* Input length must always be non-zero */
3605
0
    if (!inlen)
3606
0
        return -1;
3607
    /* If decrypting need at least 16 bytes and multiple of 8 */
3608
0
    if (!EVP_CIPHER_CTX_is_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
3609
0
        return -1;
3610
    /* If not padding input must be multiple of 8 */
3611
0
    if (!pad && inlen & 0x7)
3612
0
        return -1;
3613
0
    if (ossl_is_partially_overlapping(out, in, inlen)) {
3614
0
        ERR_raise(ERR_LIB_EVP, EVP_R_PARTIALLY_OVERLAPPING);
3615
0
        return 0;
3616
0
    }
3617
0
    if (!out) {
3618
0
        if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3619
            /* If padding round up to multiple of 8 */
3620
0
            if (pad)
3621
0
                inlen = (inlen + 7) / 8 * 8;
3622
            /* 8 byte prefix */
3623
0
            return inlen + 8;
3624
0
        } else {
3625
            /*
3626
             * If not padding output will be exactly 8 bytes smaller than
3627
             * input. If padding it will be at least 8 bytes smaller but we
3628
             * don't know how much.
3629
             */
3630
0
            return inlen - 8;
3631
0
        }
3632
0
    }
3633
0
    if (pad) {
3634
0
        if (EVP_CIPHER_CTX_is_encrypting(ctx))
3635
0
            rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3636
0
                                     out, in, inlen,
3637
0
                                     (block128_f) AES_encrypt);
3638
0
        else
3639
0
            rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3640
0
                                       out, in, inlen,
3641
0
                                       (block128_f) AES_decrypt);
3642
0
    } else {
3643
0
        if (EVP_CIPHER_CTX_is_encrypting(ctx))
3644
0
            rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3645
0
                                 out, in, inlen, (block128_f) AES_encrypt);
3646
0
        else
3647
0
            rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3648
0
                                   out, in, inlen, (block128_f) AES_decrypt);
3649
0
    }
3650
0
    return rv ? (int)rv : -1;
3651
0
}
3652
3653
#define WRAP_FLAGS      (EVP_CIPH_WRAP_MODE \
3654
                | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3655
                | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
3656
3657
static const EVP_CIPHER aes_128_wrap = {
3658
    NID_id_aes128_wrap,
3659
    8, 16, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3660
    aes_wrap_init_key, aes_wrap_cipher,
3661
    NULL,
3662
    sizeof(EVP_AES_WRAP_CTX),
3663
    NULL, NULL, NULL, NULL
3664
};
3665
3666
const EVP_CIPHER *EVP_aes_128_wrap(void)
3667
1
{
3668
1
    return &aes_128_wrap;
3669
1
}
3670
3671
static const EVP_CIPHER aes_192_wrap = {
3672
    NID_id_aes192_wrap,
3673
    8, 24, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3674
    aes_wrap_init_key, aes_wrap_cipher,
3675
    NULL,
3676
    sizeof(EVP_AES_WRAP_CTX),
3677
    NULL, NULL, NULL, NULL
3678
};
3679
3680
const EVP_CIPHER *EVP_aes_192_wrap(void)
3681
1
{
3682
1
    return &aes_192_wrap;
3683
1
}
3684
3685
static const EVP_CIPHER aes_256_wrap = {
3686
    NID_id_aes256_wrap,
3687
    8, 32, 8, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3688
    aes_wrap_init_key, aes_wrap_cipher,
3689
    NULL,
3690
    sizeof(EVP_AES_WRAP_CTX),
3691
    NULL, NULL, NULL, NULL
3692
};
3693
3694
const EVP_CIPHER *EVP_aes_256_wrap(void)
3695
1
{
3696
1
    return &aes_256_wrap;
3697
1
}
3698
3699
static const EVP_CIPHER aes_128_wrap_pad = {
3700
    NID_id_aes128_wrap_pad,
3701
    8, 16, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3702
    aes_wrap_init_key, aes_wrap_cipher,
3703
    NULL,
3704
    sizeof(EVP_AES_WRAP_CTX),
3705
    NULL, NULL, NULL, NULL
3706
};
3707
3708
const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
3709
1
{
3710
1
    return &aes_128_wrap_pad;
3711
1
}
3712
3713
static const EVP_CIPHER aes_192_wrap_pad = {
3714
    NID_id_aes192_wrap_pad,
3715
    8, 24, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3716
    aes_wrap_init_key, aes_wrap_cipher,
3717
    NULL,
3718
    sizeof(EVP_AES_WRAP_CTX),
3719
    NULL, NULL, NULL, NULL
3720
};
3721
3722
const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
3723
1
{
3724
1
    return &aes_192_wrap_pad;
3725
1
}
3726
3727
static const EVP_CIPHER aes_256_wrap_pad = {
3728
    NID_id_aes256_wrap_pad,
3729
    8, 32, 4, WRAP_FLAGS, EVP_ORIG_GLOBAL,
3730
    aes_wrap_init_key, aes_wrap_cipher,
3731
    NULL,
3732
    sizeof(EVP_AES_WRAP_CTX),
3733
    NULL, NULL, NULL, NULL
3734
};
3735
3736
const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
3737
1
{
3738
1
    return &aes_256_wrap_pad;
3739
1
}
3740
3741
#ifndef OPENSSL_NO_OCB
3742
static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3743
0
{
3744
0
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
3745
0
    EVP_CIPHER_CTX *newc;
3746
0
    EVP_AES_OCB_CTX *new_octx;
3747
3748
0
    switch (type) {
3749
0
    case EVP_CTRL_INIT:
3750
0
        octx->key_set = 0;
3751
0
        octx->iv_set = 0;
3752
0
        octx->ivlen = EVP_CIPHER_get_iv_length(c->cipher);
3753
0
        octx->iv = c->iv;
3754
0
        octx->taglen = 16;
3755
0
        octx->data_buf_len = 0;
3756
0
        octx->aad_buf_len = 0;
3757
0
        return 1;
3758
3759
0
    case EVP_CTRL_GET_IVLEN:
3760
0
        *(int *)ptr = octx->ivlen;
3761
0
        return 1;
3762
3763
0
    case EVP_CTRL_AEAD_SET_IVLEN:
3764
        /* IV len must be 1 to 15 */
3765
0
        if (arg <= 0 || arg > 15)
3766
0
            return 0;
3767
3768
0
        octx->ivlen = arg;
3769
0
        return 1;
3770
3771
0
    case EVP_CTRL_AEAD_SET_TAG:
3772
0
        if (ptr == NULL) {
3773
            /* Tag len must be 0 to 16 */
3774
0
            if (arg < 0 || arg > 16)
3775
0
                return 0;
3776
3777
0
            octx->taglen = arg;
3778
0
            return 1;
3779
0
        }
3780
0
        if (arg != octx->taglen || EVP_CIPHER_CTX_is_encrypting(c))
3781
0
            return 0;
3782
0
        memcpy(octx->tag, ptr, arg);
3783
0
        return 1;
3784
3785
0
    case EVP_CTRL_AEAD_GET_TAG:
3786
0
        if (arg != octx->taglen || !EVP_CIPHER_CTX_is_encrypting(c))
3787
0
            return 0;
3788
3789
0
        memcpy(ptr, octx->tag, arg);
3790
0
        return 1;
3791
3792
0
    case EVP_CTRL_COPY:
3793
0
        newc = (EVP_CIPHER_CTX *)ptr;
3794
0
        new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
3795
0
        return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
3796
0
                                      &new_octx->ksenc.ks,
3797
0
                                      &new_octx->ksdec.ks);
3798
3799
0
    default:
3800
0
        return -1;
3801
3802
0
    }
3803
0
}
3804
3805
static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3806
                            const unsigned char *iv, int enc)
3807
0
{
3808
0
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
3809
0
    if (!iv && !key)
3810
0
        return 1;
3811
0
    if (key) {
3812
0
        do {
3813
            /*
3814
             * We set both the encrypt and decrypt key here because decrypt
3815
             * needs both. We could possibly optimise to remove setting the
3816
             * decrypt for an encryption operation.
3817
             */
3818
# ifdef HWAES_CAPABLE
3819
            if (HWAES_CAPABLE) {
3820
                HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3821
                                      &octx->ksenc.ks);
3822
                HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3823
                                      &octx->ksdec.ks);
3824
                if (!CRYPTO_ocb128_init(&octx->ocb,
3825
                                        &octx->ksenc.ks, &octx->ksdec.ks,
3826
                                        (block128_f) HWAES_encrypt,
3827
                                        (block128_f) HWAES_decrypt,
3828
                                        enc ? HWAES_ocb_encrypt
3829
                                            : HWAES_ocb_decrypt))
3830
                    return 0;
3831
                break;
3832
            }
3833
# endif
3834
0
# ifdef VPAES_CAPABLE
3835
0
            if (VPAES_CAPABLE) {
3836
0
                vpaes_set_encrypt_key(key,
3837
0
                                      EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3838
0
                                      &octx->ksenc.ks);
3839
0
                vpaes_set_decrypt_key(key,
3840
0
                                      EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3841
0
                                      &octx->ksdec.ks);
3842
0
                if (!CRYPTO_ocb128_init(&octx->ocb,
3843
0
                                        &octx->ksenc.ks, &octx->ksdec.ks,
3844
0
                                        (block128_f) vpaes_encrypt,
3845
0
                                        (block128_f) vpaes_decrypt,
3846
0
                                        NULL))
3847
0
                    return 0;
3848
0
                break;
3849
0
            }
3850
0
# endif
3851
0
            AES_set_encrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3852
0
                                &octx->ksenc.ks);
3853
0
            AES_set_decrypt_key(key, EVP_CIPHER_CTX_get_key_length(ctx) * 8,
3854
0
                                &octx->ksdec.ks);
3855
0
            if (!CRYPTO_ocb128_init(&octx->ocb,
3856
0
                                    &octx->ksenc.ks, &octx->ksdec.ks,
3857
0
                                    (block128_f) AES_encrypt,
3858
0
                                    (block128_f) AES_decrypt,
3859
0
                                    NULL))
3860
0
                return 0;
3861
0
        }
3862
0
        while (0);
3863
3864
        /*
3865
         * If we have an iv we can set it directly, otherwise use saved IV.
3866
         */
3867
0
        if (iv == NULL && octx->iv_set)
3868
0
            iv = octx->iv;
3869
0
        if (iv) {
3870
0
            if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
3871
0
                != 1)
3872
0
                return 0;
3873
0
            octx->iv_set = 1;
3874
0
        }
3875
0
        octx->key_set = 1;
3876
0
    } else {
3877
        /* If key set use IV, otherwise copy */
3878
0
        if (octx->key_set)
3879
0
            CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
3880
0
        else
3881
0
            memcpy(octx->iv, iv, octx->ivlen);
3882
0
        octx->iv_set = 1;
3883
0
    }
3884
0
    return 1;
3885
0
}
3886
3887
static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3888
                          const unsigned char *in, size_t len)
3889
0
{
3890
0
    unsigned char *buf;
3891
0
    int *buf_len;
3892
0
    int written_len = 0;
3893
0
    size_t trailing_len;
3894
0
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
3895
3896
    /* If IV or Key not set then return error */
3897
0
    if (!octx->iv_set)
3898
0
        return -1;
3899
3900
0
    if (!octx->key_set)
3901
0
        return -1;
3902
3903
0
    if (in != NULL) {
3904
        /*
3905
         * Need to ensure we are only passing full blocks to low level OCB
3906
         * routines. We do it here rather than in EVP_EncryptUpdate/
3907
         * EVP_DecryptUpdate because we need to pass full blocks of AAD too
3908
         * and those routines don't support that
3909
         */
3910
3911
        /* Are we dealing with AAD or normal data here? */
3912
0
        if (out == NULL) {
3913
0
            buf = octx->aad_buf;
3914
0
            buf_len = &(octx->aad_buf_len);
3915
0
        } else {
3916
0
            buf = octx->data_buf;
3917
0
            buf_len = &(octx->data_buf_len);
3918
3919
0
            if (ossl_is_partially_overlapping(out + *buf_len, in, len)) {
3920
0
                ERR_raise(ERR_LIB_EVP, EVP_R_PARTIALLY_OVERLAPPING);
3921
0
                return 0;
3922
0
            }
3923
0
        }
3924
3925
        /*
3926
         * If we've got a partially filled buffer from a previous call then
3927
         * use that data first
3928
         */
3929
0
        if (*buf_len > 0) {
3930
0
            unsigned int remaining;
3931
3932
0
            remaining = AES_BLOCK_SIZE - (*buf_len);
3933
0
            if (remaining > len) {
3934
0
                memcpy(buf + (*buf_len), in, len);
3935
0
                *(buf_len) += len;
3936
0
                return 0;
3937
0
            }
3938
0
            memcpy(buf + (*buf_len), in, remaining);
3939
3940
            /*
3941
             * If we get here we've filled the buffer, so process it
3942
             */
3943
0
            len -= remaining;
3944
0
            in += remaining;
3945
0
            if (out == NULL) {
3946
0
                if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
3947
0
                    return -1;
3948
0
            } else if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3949
0
                if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
3950
0
                                           AES_BLOCK_SIZE))
3951
0
                    return -1;
3952
0
            } else {
3953
0
                if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
3954
0
                                           AES_BLOCK_SIZE))
3955
0
                    return -1;
3956
0
            }
3957
0
            written_len = AES_BLOCK_SIZE;
3958
0
            *buf_len = 0;
3959
0
            if (out != NULL)
3960
0
                out += AES_BLOCK_SIZE;
3961
0
        }
3962
3963
        /* Do we have a partial block to handle at the end? */
3964
0
        trailing_len = len % AES_BLOCK_SIZE;
3965
3966
        /*
3967
         * If we've got some full blocks to handle, then process these first
3968
         */
3969
0
        if (len != trailing_len) {
3970
0
            if (out == NULL) {
3971
0
                if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
3972
0
                    return -1;
3973
0
            } else if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
3974
0
                if (!CRYPTO_ocb128_encrypt
3975
0
                    (&octx->ocb, in, out, len - trailing_len))
3976
0
                    return -1;
3977
0
            } else {
3978
0
                if (!CRYPTO_ocb128_decrypt
3979
0
                    (&octx->ocb, in, out, len - trailing_len))
3980
0
                    return -1;
3981
0
            }
3982
0
            written_len += len - trailing_len;
3983
0
            in += len - trailing_len;
3984
0
        }
3985
3986
        /* Handle any trailing partial block */
3987
0
        if (trailing_len > 0) {
3988
0
            memcpy(buf, in, trailing_len);
3989
0
            *buf_len = trailing_len;
3990
0
        }
3991
3992
0
        return written_len;
3993
0
    } else {
3994
        /*
3995
         * First of all empty the buffer of any partial block that we might
3996
         * have been provided - both for data and AAD
3997
         */
3998
0
        if (octx->data_buf_len > 0) {
3999
0
            if (EVP_CIPHER_CTX_is_encrypting(ctx)) {
4000
0
                if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
4001
0
                                           octx->data_buf_len))
4002
0
                    return -1;
4003
0
            } else {
4004
0
                if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
4005
0
                                           octx->data_buf_len))
4006
0
                    return -1;
4007
0
            }
4008
0
            written_len = octx->data_buf_len;
4009
0
            octx->data_buf_len = 0;
4010
0
        }
4011
0
        if (octx->aad_buf_len > 0) {
4012
0
            if (!CRYPTO_ocb128_aad
4013
0
                (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
4014
0
                return -1;
4015
0
            octx->aad_buf_len = 0;
4016
0
        }
4017
        /* If decrypting then verify */
4018
0
        if (!EVP_CIPHER_CTX_is_encrypting(ctx)) {
4019
0
            if (octx->taglen < 0)
4020
0
                return -1;
4021
0
            if (CRYPTO_ocb128_finish(&octx->ocb,
4022
0
                                     octx->tag, octx->taglen) != 0)
4023
0
                return -1;
4024
0
            octx->iv_set = 0;
4025
0
            return written_len;
4026
0
        }
4027
        /* If encrypting then just get the tag */
4028
0
        if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
4029
0
            return -1;
4030
        /* Don't reuse the IV */
4031
0
        octx->iv_set = 0;
4032
0
        return written_len;
4033
0
    }
4034
0
}
4035
4036
static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
4037
0
{
4038
0
    EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
4039
0
    CRYPTO_ocb128_cleanup(&octx->ocb);
4040
0
    return 1;
4041
0
}
4042
4043
BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
4044
                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4045
BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4046
                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4047
BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4048
                    EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4049
#endif                         /* OPENSSL_NO_OCB */