Coverage Report

Created: 2025-06-13 06:56

/src/openssl/engines/e_padlock.c
Line
Count
Source (jump to first uncovered line)
1
/*
2
 * Copyright 2004-2023 The OpenSSL Project Authors. All Rights Reserved.
3
 *
4
 * Licensed under the Apache License 2.0 (the "License").  You may not use
5
 * this file except in compliance with the License.  You can obtain a copy
6
 * in the file LICENSE in the source distribution or at
7
 * https://www.openssl.org/source/license.html
8
 */
9
10
/*
11
 * This file uses the low level AES and engine functions (which are deprecated
12
 * for non-internal use) in order to implement the padlock engine AES ciphers.
13
 */
14
#define OPENSSL_SUPPRESS_DEPRECATED
15
16
#include <stdio.h>
17
#include <string.h>
18
19
#include <openssl/opensslconf.h>
20
#include <openssl/crypto.h>
21
#include <openssl/engine.h>
22
#include <openssl/evp.h>
23
#include <openssl/aes.h>
24
#include <openssl/rand.h>
25
#include <openssl/err.h>
26
#include <openssl/modes.h>
27
28
#ifndef OPENSSL_NO_PADLOCKENG
29
30
/*
31
 * VIA PadLock AES is available *ONLY* on some x86 CPUs. Not only that it
32
 * doesn't exist elsewhere, but it even can't be compiled on other platforms!
33
 */
34
35
# undef COMPILE_PADLOCKENG
36
# if defined(PADLOCK_ASM)
37
#  define COMPILE_PADLOCKENG
38
#  ifdef OPENSSL_NO_DYNAMIC_ENGINE
39
static ENGINE *ENGINE_padlock(void);
40
#  endif
41
# endif
42
43
# ifdef OPENSSL_NO_DYNAMIC_ENGINE
44
void engine_load_padlock_int(void);
45
void engine_load_padlock_int(void)
46
0
{
47
/* On non-x86 CPUs it just returns. */
48
0
#  ifdef COMPILE_PADLOCKENG
49
0
    ENGINE *toadd = ENGINE_padlock();
50
0
    if (!toadd)
51
0
        return;
52
0
    ERR_set_mark();
53
0
    ENGINE_add(toadd);
54
    /*
55
     * If the "add" worked, it gets a structural reference. So either way, we
56
     * release our just-created reference.
57
     */
58
0
    ENGINE_free(toadd);
59
    /*
60
     * If the "add" didn't work, it was probably a conflict because it was
61
     * already added (eg. someone calling ENGINE_load_blah then calling
62
     * ENGINE_load_builtin_engines() perhaps).
63
     */
64
0
    ERR_pop_to_mark();
65
0
#  endif
66
0
}
67
68
# endif
69
70
# ifdef COMPILE_PADLOCKENG
71
72
/* Function for ENGINE detection and control */
73
static int padlock_available(void);
74
static int padlock_init(ENGINE *e);
75
76
/* RNG Stuff */
77
static RAND_METHOD padlock_rand;
78
79
/* Cipher Stuff */
80
static int padlock_ciphers(ENGINE *e, const EVP_CIPHER **cipher,
81
                           const int **nids, int nid);
82
83
/* Engine names */
84
static const char *padlock_id = "padlock";
85
static char padlock_name[100];
86
87
/* Available features */
88
static int padlock_use_ace = 0; /* Advanced Cryptography Engine */
89
static int padlock_use_rng = 0; /* Random Number Generator */
90
91
/* ===== Engine "management" functions ===== */
92
93
/* Prepare the ENGINE structure for registration */
94
static int padlock_bind_helper(ENGINE *e)
95
0
{
96
    /* Check available features */
97
0
    padlock_available();
98
99
    /*
100
     * RNG is currently disabled for reasons discussed in commentary just
101
     * before padlock_rand_bytes function.
102
     */
103
0
    padlock_use_rng = 0;
104
105
    /* Generate a nice engine name with available features */
106
0
    BIO_snprintf(padlock_name, sizeof(padlock_name),
107
0
                 "VIA PadLock (%s, %s)",
108
0
                 padlock_use_rng ? "RNG" : "no-RNG",
109
0
                 padlock_use_ace ? "ACE" : "no-ACE");
110
111
    /* Register everything or return with an error */
112
0
    if (!ENGINE_set_id(e, padlock_id) ||
113
0
        !ENGINE_set_name(e, padlock_name) ||
114
0
        !ENGINE_set_init_function(e, padlock_init) ||
115
0
        (padlock_use_ace && !ENGINE_set_ciphers(e, padlock_ciphers)) ||
116
0
        (padlock_use_rng && !ENGINE_set_RAND(e, &padlock_rand))) {
117
0
        return 0;
118
0
    }
119
120
    /* Everything looks good */
121
0
    return 1;
122
0
}
123
124
#  ifdef OPENSSL_NO_DYNAMIC_ENGINE
125
/* Constructor */
126
static ENGINE *ENGINE_padlock(void)
127
0
{
128
0
    ENGINE *eng = ENGINE_new();
129
130
0
    if (eng == NULL) {
131
0
        return NULL;
132
0
    }
133
134
0
    if (!padlock_bind_helper(eng)) {
135
0
        ENGINE_free(eng);
136
0
        return NULL;
137
0
    }
138
139
0
    return eng;
140
0
}
141
#  endif
142
143
/* Check availability of the engine */
144
static int padlock_init(ENGINE *e)
145
0
{
146
0
    return (padlock_use_rng || padlock_use_ace);
147
0
}
148
149
#  ifndef AES_ASM
150
static int padlock_aes_set_encrypt_key(const unsigned char *userKey,
151
                                       const int bits,
152
                                       AES_KEY *key);
153
static int padlock_aes_set_decrypt_key(const unsigned char *userKey,
154
                                       const int bits,
155
                                       AES_KEY *key);
156
#   define AES_ASM
157
#   define AES_set_encrypt_key padlock_aes_set_encrypt_key
158
#   define AES_set_decrypt_key padlock_aes_set_decrypt_key
159
#   include "../crypto/aes/aes_core.c"
160
#  endif
161
162
/*
163
 * This stuff is needed if this ENGINE is being compiled into a
164
 * self-contained shared-library.
165
 */
166
#  ifndef OPENSSL_NO_DYNAMIC_ENGINE
167
static int padlock_bind_fn(ENGINE *e, const char *id)
168
{
169
    if (id && (strcmp(id, padlock_id) != 0)) {
170
        return 0;
171
    }
172
173
    if (!padlock_bind_helper(e)) {
174
        return 0;
175
    }
176
177
    return 1;
178
}
179
180
IMPLEMENT_DYNAMIC_CHECK_FN()
181
IMPLEMENT_DYNAMIC_BIND_FN(padlock_bind_fn)
182
#  endif                       /* !OPENSSL_NO_DYNAMIC_ENGINE */
183
/* ===== Here comes the "real" engine ===== */
184
185
/* Some AES-related constants */
186
0
#  define AES_BLOCK_SIZE          16
187
0
#  define AES_KEY_SIZE_128        16
188
0
#  define AES_KEY_SIZE_192        24
189
0
#  define AES_KEY_SIZE_256        32
190
    /*
191
     * Here we store the status information relevant to the current context.
192
     */
193
    /*
194
     * BIG FAT WARNING: Inline assembler in PADLOCK_XCRYPT_ASM() depends on
195
     * the order of items in this structure.  Don't blindly modify, reorder,
196
     * etc!
197
     */
198
struct padlock_cipher_data {
199
    unsigned char iv[AES_BLOCK_SIZE]; /* Initialization vector */
200
    union {
201
        unsigned int pad[4];
202
        struct {
203
            int rounds:4;
204
            int dgst:1;         /* n/a in C3 */
205
            int align:1;        /* n/a in C3 */
206
            int ciphr:1;        /* n/a in C3 */
207
            unsigned int keygen:1;
208
            int interm:1;
209
            unsigned int encdec:1;
210
            int ksize:2;
211
        } b;
212
    } cword;                    /* Control word */
213
    AES_KEY ks;                 /* Encryption key */
214
};
215
216
/* Interface to assembler module */
217
unsigned int padlock_capability(void);
218
void padlock_key_bswap(AES_KEY *key);
219
void padlock_verify_context(struct padlock_cipher_data *ctx);
220
void padlock_reload_key(void);
221
void padlock_aes_block(void *out, const void *inp,
222
                       struct padlock_cipher_data *ctx);
223
int padlock_ecb_encrypt(void *out, const void *inp,
224
                        struct padlock_cipher_data *ctx, size_t len);
225
int padlock_cbc_encrypt(void *out, const void *inp,
226
                        struct padlock_cipher_data *ctx, size_t len);
227
int padlock_cfb_encrypt(void *out, const void *inp,
228
                        struct padlock_cipher_data *ctx, size_t len);
229
int padlock_ofb_encrypt(void *out, const void *inp,
230
                        struct padlock_cipher_data *ctx, size_t len);
231
int padlock_ctr32_encrypt(void *out, const void *inp,
232
                          struct padlock_cipher_data *ctx, size_t len);
233
int padlock_xstore(void *out, int edx);
234
void padlock_sha1_oneshot(void *ctx, const void *inp, size_t len);
235
void padlock_sha1(void *ctx, const void *inp, size_t len);
236
void padlock_sha256_oneshot(void *ctx, const void *inp, size_t len);
237
void padlock_sha256(void *ctx, const void *inp, size_t len);
238
239
/*
240
 * Load supported features of the CPU to see if the PadLock is available.
241
 */
242
static int padlock_available(void)
243
0
{
244
0
    unsigned int edx = padlock_capability();
245
246
    /* Fill up some flags */
247
0
    padlock_use_ace = ((edx & (0x3 << 6)) == (0x3 << 6));
248
0
    padlock_use_rng = ((edx & (0x3 << 2)) == (0x3 << 2));
249
250
0
    return padlock_use_ace + padlock_use_rng;
251
0
}
252
253
/* ===== AES encryption/decryption ===== */
254
255
#  if defined(NID_aes_128_cfb128) && ! defined (NID_aes_128_cfb)
256
0
#   define NID_aes_128_cfb NID_aes_128_cfb128
257
#  endif
258
259
#  if defined(NID_aes_128_ofb128) && ! defined (NID_aes_128_ofb)
260
0
#   define NID_aes_128_ofb NID_aes_128_ofb128
261
#  endif
262
263
#  if defined(NID_aes_192_cfb128) && ! defined (NID_aes_192_cfb)
264
0
#   define NID_aes_192_cfb NID_aes_192_cfb128
265
#  endif
266
267
#  if defined(NID_aes_192_ofb128) && ! defined (NID_aes_192_ofb)
268
0
#   define NID_aes_192_ofb NID_aes_192_ofb128
269
#  endif
270
271
#  if defined(NID_aes_256_cfb128) && ! defined (NID_aes_256_cfb)
272
0
#   define NID_aes_256_cfb NID_aes_256_cfb128
273
#  endif
274
275
#  if defined(NID_aes_256_ofb128) && ! defined (NID_aes_256_ofb)
276
0
#   define NID_aes_256_ofb NID_aes_256_ofb128
277
#  endif
278
279
/* List of supported ciphers. */
280
static const int padlock_cipher_nids[] = {
281
    NID_aes_128_ecb,
282
    NID_aes_128_cbc,
283
    NID_aes_128_cfb,
284
    NID_aes_128_ofb,
285
    NID_aes_128_ctr,
286
287
    NID_aes_192_ecb,
288
    NID_aes_192_cbc,
289
    NID_aes_192_cfb,
290
    NID_aes_192_ofb,
291
    NID_aes_192_ctr,
292
293
    NID_aes_256_ecb,
294
    NID_aes_256_cbc,
295
    NID_aes_256_cfb,
296
    NID_aes_256_ofb,
297
    NID_aes_256_ctr
298
};
299
300
static int padlock_cipher_nids_num = (sizeof(padlock_cipher_nids) /
301
                                      sizeof(padlock_cipher_nids[0]));
302
303
/* Function prototypes ... */
304
static int padlock_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
305
                                const unsigned char *iv, int enc);
306
307
0
#  define NEAREST_ALIGNED(ptr) ( (unsigned char *)(ptr) +         \
308
0
        ( (0x10 - ((size_t)(ptr) & 0x0F)) & 0x0F )      )
309
0
#  define ALIGNED_CIPHER_DATA(ctx) ((struct padlock_cipher_data *)\
310
0
        NEAREST_ALIGNED(EVP_CIPHER_CTX_get_cipher_data(ctx)))
311
312
static int
313
padlock_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
314
                   const unsigned char *in_arg, size_t nbytes)
315
0
{
316
0
    return padlock_ecb_encrypt(out_arg, in_arg,
317
0
                               ALIGNED_CIPHER_DATA(ctx), nbytes);
318
0
}
319
320
static int
321
padlock_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
322
                   const unsigned char *in_arg, size_t nbytes)
323
0
{
324
0
    struct padlock_cipher_data *cdata = ALIGNED_CIPHER_DATA(ctx);
325
0
    int ret;
326
327
0
    memcpy(cdata->iv, EVP_CIPHER_CTX_iv(ctx), AES_BLOCK_SIZE);
328
0
    if ((ret = padlock_cbc_encrypt(out_arg, in_arg, cdata, nbytes)))
329
0
        memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), cdata->iv, AES_BLOCK_SIZE);
330
0
    return ret;
331
0
}
332
333
static int
334
padlock_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
335
                   const unsigned char *in_arg, size_t nbytes)
336
0
{
337
0
    struct padlock_cipher_data *cdata = ALIGNED_CIPHER_DATA(ctx);
338
0
    size_t chunk;
339
340
0
    if ((chunk = EVP_CIPHER_CTX_get_num(ctx))) {   /* borrow chunk variable */
341
0
        unsigned char *ivp = EVP_CIPHER_CTX_iv_noconst(ctx);
342
343
0
        if (chunk >= AES_BLOCK_SIZE)
344
0
            return 0;           /* bogus value */
345
346
0
        if (EVP_CIPHER_CTX_is_encrypting(ctx))
347
0
            while (chunk < AES_BLOCK_SIZE && nbytes != 0) {
348
0
                ivp[chunk] = *(out_arg++) = *(in_arg++) ^ ivp[chunk];
349
0
                chunk++, nbytes--;
350
0
        } else
351
0
            while (chunk < AES_BLOCK_SIZE && nbytes != 0) {
352
0
                unsigned char c = *(in_arg++);
353
0
                *(out_arg++) = c ^ ivp[chunk];
354
0
                ivp[chunk++] = c, nbytes--;
355
0
            }
356
357
0
        EVP_CIPHER_CTX_set_num(ctx, chunk % AES_BLOCK_SIZE);
358
0
    }
359
360
0
    if (nbytes == 0)
361
0
        return 1;
362
363
0
    memcpy(cdata->iv, EVP_CIPHER_CTX_iv(ctx), AES_BLOCK_SIZE);
364
365
0
    if ((chunk = nbytes & ~(AES_BLOCK_SIZE - 1))) {
366
0
        if (!padlock_cfb_encrypt(out_arg, in_arg, cdata, chunk))
367
0
            return 0;
368
0
        nbytes -= chunk;
369
0
    }
370
371
0
    if (nbytes) {
372
0
        unsigned char *ivp = cdata->iv;
373
374
0
        out_arg += chunk;
375
0
        in_arg += chunk;
376
0
        EVP_CIPHER_CTX_set_num(ctx, nbytes);
377
0
        if (cdata->cword.b.encdec) {
378
0
            cdata->cword.b.encdec = 0;
379
0
            padlock_reload_key();
380
0
            padlock_aes_block(ivp, ivp, cdata);
381
0
            cdata->cword.b.encdec = 1;
382
0
            padlock_reload_key();
383
0
            while (nbytes) {
384
0
                unsigned char c = *(in_arg++);
385
0
                *(out_arg++) = c ^ *ivp;
386
0
                *(ivp++) = c, nbytes--;
387
0
            }
388
0
        } else {
389
0
            padlock_reload_key();
390
0
            padlock_aes_block(ivp, ivp, cdata);
391
0
            padlock_reload_key();
392
0
            while (nbytes) {
393
0
                *ivp = *(out_arg++) = *(in_arg++) ^ *ivp;
394
0
                ivp++, nbytes--;
395
0
            }
396
0
        }
397
0
    }
398
399
0
    memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), cdata->iv, AES_BLOCK_SIZE);
400
401
0
    return 1;
402
0
}
403
404
static int
405
padlock_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
406
                   const unsigned char *in_arg, size_t nbytes)
407
0
{
408
0
    struct padlock_cipher_data *cdata = ALIGNED_CIPHER_DATA(ctx);
409
0
    size_t chunk;
410
411
    /*
412
     * ctx->num is maintained in byte-oriented modes, such as CFB and OFB...
413
     */
414
0
    if ((chunk = EVP_CIPHER_CTX_get_num(ctx))) {   /* borrow chunk variable */
415
0
        unsigned char *ivp = EVP_CIPHER_CTX_iv_noconst(ctx);
416
417
0
        if (chunk >= AES_BLOCK_SIZE)
418
0
            return 0;           /* bogus value */
419
420
0
        while (chunk < AES_BLOCK_SIZE && nbytes != 0) {
421
0
            *(out_arg++) = *(in_arg++) ^ ivp[chunk];
422
0
            chunk++, nbytes--;
423
0
        }
424
425
0
        EVP_CIPHER_CTX_set_num(ctx, chunk % AES_BLOCK_SIZE);
426
0
    }
427
428
0
    if (nbytes == 0)
429
0
        return 1;
430
431
0
    memcpy(cdata->iv, EVP_CIPHER_CTX_iv(ctx), AES_BLOCK_SIZE);
432
433
0
    if ((chunk = nbytes & ~(AES_BLOCK_SIZE - 1))) {
434
0
        if (!padlock_ofb_encrypt(out_arg, in_arg, cdata, chunk))
435
0
            return 0;
436
0
        nbytes -= chunk;
437
0
    }
438
439
0
    if (nbytes) {
440
0
        unsigned char *ivp = cdata->iv;
441
442
0
        out_arg += chunk;
443
0
        in_arg += chunk;
444
0
        EVP_CIPHER_CTX_set_num(ctx, nbytes);
445
0
        padlock_reload_key();   /* empirically found */
446
0
        padlock_aes_block(ivp, ivp, cdata);
447
0
        padlock_reload_key();   /* empirically found */
448
0
        while (nbytes) {
449
0
            *(out_arg++) = *(in_arg++) ^ *ivp;
450
0
            ivp++, nbytes--;
451
0
        }
452
0
    }
453
454
0
    memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), cdata->iv, AES_BLOCK_SIZE);
455
456
0
    return 1;
457
0
}
458
459
static void padlock_ctr32_encrypt_glue(const unsigned char *in,
460
                                       unsigned char *out, size_t blocks,
461
                                       struct padlock_cipher_data *ctx,
462
                                       const unsigned char *ivec)
463
0
{
464
0
    memcpy(ctx->iv, ivec, AES_BLOCK_SIZE);
465
0
    padlock_ctr32_encrypt(out, in, ctx, AES_BLOCK_SIZE * blocks);
466
0
}
467
468
static int
469
padlock_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out_arg,
470
                   const unsigned char *in_arg, size_t nbytes)
471
0
{
472
0
    struct padlock_cipher_data *cdata = ALIGNED_CIPHER_DATA(ctx);
473
0
    int n = EVP_CIPHER_CTX_get_num(ctx);
474
0
    unsigned int num;
475
476
0
    if (n < 0)
477
0
        return 0;
478
0
    num = (unsigned int)n;
479
480
0
    CRYPTO_ctr128_encrypt_ctr32(in_arg, out_arg, nbytes,
481
0
                                cdata, EVP_CIPHER_CTX_iv_noconst(ctx),
482
0
                                EVP_CIPHER_CTX_buf_noconst(ctx), &num,
483
0
                                (ctr128_f) padlock_ctr32_encrypt_glue);
484
485
0
    EVP_CIPHER_CTX_set_num(ctx, (size_t)num);
486
0
    return 1;
487
0
}
488
489
0
#  define EVP_CIPHER_block_size_ECB       AES_BLOCK_SIZE
490
0
#  define EVP_CIPHER_block_size_CBC       AES_BLOCK_SIZE
491
0
#  define EVP_CIPHER_block_size_OFB       1
492
0
#  define EVP_CIPHER_block_size_CFB       1
493
0
#  define EVP_CIPHER_block_size_CTR       1
494
495
/*
496
 * Declaring so many ciphers by hand would be a pain. Instead introduce a bit
497
 * of preprocessor magic :-)
498
 */
499
#  define DECLARE_AES_EVP(ksize,lmode,umode)      \
500
static EVP_CIPHER *_hidden_aes_##ksize##_##lmode = NULL; \
501
0
static const EVP_CIPHER *padlock_aes_##ksize##_##lmode(void) \
502
0
{                                                                       \
503
0
    if (_hidden_aes_##ksize##_##lmode == NULL                           \
504
0
        && ((_hidden_aes_##ksize##_##lmode =                            \
505
0
             EVP_CIPHER_meth_new(NID_aes_##ksize##_##lmode,             \
506
0
                                 EVP_CIPHER_block_size_##umode,         \
507
0
                                 AES_KEY_SIZE_##ksize)) == NULL         \
508
0
            || !EVP_CIPHER_meth_set_iv_length(_hidden_aes_##ksize##_##lmode, \
509
0
                                              AES_BLOCK_SIZE)           \
510
0
            || !EVP_CIPHER_meth_set_flags(_hidden_aes_##ksize##_##lmode, \
511
0
                                          0 | EVP_CIPH_##umode##_MODE)  \
512
0
            || !EVP_CIPHER_meth_set_init(_hidden_aes_##ksize##_##lmode, \
513
0
                                         padlock_aes_init_key)          \
514
0
            || !EVP_CIPHER_meth_set_do_cipher(_hidden_aes_##ksize##_##lmode, \
515
0
                                              padlock_##lmode##_cipher) \
516
0
            || !EVP_CIPHER_meth_set_impl_ctx_size(_hidden_aes_##ksize##_##lmode, \
517
0
                                                  sizeof(struct padlock_cipher_data) + 16) \
518
0
            || !EVP_CIPHER_meth_set_set_asn1_params(_hidden_aes_##ksize##_##lmode, \
519
0
                                                    EVP_CIPHER_set_asn1_iv) \
520
0
            || !EVP_CIPHER_meth_set_get_asn1_params(_hidden_aes_##ksize##_##lmode, \
521
0
                                                    EVP_CIPHER_get_asn1_iv))) { \
522
0
        EVP_CIPHER_meth_free(_hidden_aes_##ksize##_##lmode);            \
523
0
        _hidden_aes_##ksize##_##lmode = NULL;                           \
524
0
    }                                                                   \
525
0
    return _hidden_aes_##ksize##_##lmode;                               \
526
0
}
Unexecuted instantiation: e_padlock.c:padlock_aes_128_ecb
Unexecuted instantiation: e_padlock.c:padlock_aes_128_cbc
Unexecuted instantiation: e_padlock.c:padlock_aes_128_cfb
Unexecuted instantiation: e_padlock.c:padlock_aes_128_ofb
Unexecuted instantiation: e_padlock.c:padlock_aes_128_ctr
Unexecuted instantiation: e_padlock.c:padlock_aes_192_ecb
Unexecuted instantiation: e_padlock.c:padlock_aes_192_cbc
Unexecuted instantiation: e_padlock.c:padlock_aes_192_cfb
Unexecuted instantiation: e_padlock.c:padlock_aes_192_ofb
Unexecuted instantiation: e_padlock.c:padlock_aes_192_ctr
Unexecuted instantiation: e_padlock.c:padlock_aes_256_ecb
Unexecuted instantiation: e_padlock.c:padlock_aes_256_cbc
Unexecuted instantiation: e_padlock.c:padlock_aes_256_cfb
Unexecuted instantiation: e_padlock.c:padlock_aes_256_ofb
Unexecuted instantiation: e_padlock.c:padlock_aes_256_ctr
527
528
DECLARE_AES_EVP(128, ecb, ECB)
529
DECLARE_AES_EVP(128, cbc, CBC)
530
DECLARE_AES_EVP(128, cfb, CFB)
531
DECLARE_AES_EVP(128, ofb, OFB)
532
DECLARE_AES_EVP(128, ctr, CTR)
533
534
DECLARE_AES_EVP(192, ecb, ECB)
535
DECLARE_AES_EVP(192, cbc, CBC)
536
DECLARE_AES_EVP(192, cfb, CFB)
537
DECLARE_AES_EVP(192, ofb, OFB)
538
DECLARE_AES_EVP(192, ctr, CTR)
539
540
DECLARE_AES_EVP(256, ecb, ECB)
541
DECLARE_AES_EVP(256, cbc, CBC)
542
DECLARE_AES_EVP(256, cfb, CFB)
543
DECLARE_AES_EVP(256, ofb, OFB)
544
DECLARE_AES_EVP(256, ctr, CTR)
545
546
static int
547
padlock_ciphers(ENGINE *e, const EVP_CIPHER **cipher, const int **nids,
548
                int nid)
549
0
{
550
    /* No specific cipher => return a list of supported nids ... */
551
0
    if (!cipher) {
552
0
        *nids = padlock_cipher_nids;
553
0
        return padlock_cipher_nids_num;
554
0
    }
555
556
    /* ... or the requested "cipher" otherwise */
557
0
    switch (nid) {
558
0
    case NID_aes_128_ecb:
559
0
        *cipher = padlock_aes_128_ecb();
560
0
        break;
561
0
    case NID_aes_128_cbc:
562
0
        *cipher = padlock_aes_128_cbc();
563
0
        break;
564
0
    case NID_aes_128_cfb:
565
0
        *cipher = padlock_aes_128_cfb();
566
0
        break;
567
0
    case NID_aes_128_ofb:
568
0
        *cipher = padlock_aes_128_ofb();
569
0
        break;
570
0
    case NID_aes_128_ctr:
571
0
        *cipher = padlock_aes_128_ctr();
572
0
        break;
573
574
0
    case NID_aes_192_ecb:
575
0
        *cipher = padlock_aes_192_ecb();
576
0
        break;
577
0
    case NID_aes_192_cbc:
578
0
        *cipher = padlock_aes_192_cbc();
579
0
        break;
580
0
    case NID_aes_192_cfb:
581
0
        *cipher = padlock_aes_192_cfb();
582
0
        break;
583
0
    case NID_aes_192_ofb:
584
0
        *cipher = padlock_aes_192_ofb();
585
0
        break;
586
0
    case NID_aes_192_ctr:
587
0
        *cipher = padlock_aes_192_ctr();
588
0
        break;
589
590
0
    case NID_aes_256_ecb:
591
0
        *cipher = padlock_aes_256_ecb();
592
0
        break;
593
0
    case NID_aes_256_cbc:
594
0
        *cipher = padlock_aes_256_cbc();
595
0
        break;
596
0
    case NID_aes_256_cfb:
597
0
        *cipher = padlock_aes_256_cfb();
598
0
        break;
599
0
    case NID_aes_256_ofb:
600
0
        *cipher = padlock_aes_256_ofb();
601
0
        break;
602
0
    case NID_aes_256_ctr:
603
0
        *cipher = padlock_aes_256_ctr();
604
0
        break;
605
606
0
    default:
607
        /* Sorry, we don't support this NID */
608
0
        *cipher = NULL;
609
0
        return 0;
610
0
    }
611
612
0
    return 1;
613
0
}
614
615
/* Prepare the encryption key for PadLock usage */
616
static int
617
padlock_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
618
                     const unsigned char *iv, int enc)
619
0
{
620
0
    struct padlock_cipher_data *cdata;
621
0
    int key_len = EVP_CIPHER_CTX_get_key_length(ctx) * 8;
622
0
    unsigned long mode = EVP_CIPHER_CTX_get_mode(ctx);
623
624
0
    if (key == NULL)
625
0
        return 0;               /* ERROR */
626
627
0
    cdata = ALIGNED_CIPHER_DATA(ctx);
628
0
    memset(cdata, 0, sizeof(*cdata));
629
630
    /* Prepare Control word. */
631
0
    if (mode == EVP_CIPH_OFB_MODE || mode == EVP_CIPH_CTR_MODE)
632
0
        cdata->cword.b.encdec = 0;
633
0
    else
634
0
        cdata->cword.b.encdec = (EVP_CIPHER_CTX_is_encrypting(ctx) == 0);
635
0
    cdata->cword.b.rounds = 10 + (key_len - 128) / 32;
636
0
    cdata->cword.b.ksize = (key_len - 128) / 64;
637
638
0
    switch (key_len) {
639
0
    case 128:
640
        /*
641
         * PadLock can generate an extended key for AES128 in hardware
642
         */
643
0
        memcpy(cdata->ks.rd_key, key, AES_KEY_SIZE_128);
644
0
        cdata->cword.b.keygen = 0;
645
0
        break;
646
647
0
    case 192:
648
0
    case 256:
649
        /*
650
         * Generate an extended AES key in software. Needed for AES192/AES256
651
         */
652
        /*
653
         * Well, the above applies to Stepping 8 CPUs and is listed as
654
         * hardware errata. They most likely will fix it at some point and
655
         * then a check for stepping would be due here.
656
         */
657
0
        if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
658
0
            && !enc)
659
0
            AES_set_decrypt_key(key, key_len, &cdata->ks);
660
0
        else
661
0
            AES_set_encrypt_key(key, key_len, &cdata->ks);
662
        /*
663
         * OpenSSL C functions use byte-swapped extended key.
664
         */
665
0
        padlock_key_bswap(&cdata->ks);
666
0
        cdata->cword.b.keygen = 1;
667
0
        break;
668
669
0
    default:
670
        /* ERROR */
671
0
        return 0;
672
0
    }
673
674
    /*
675
     * This is done to cover for cases when user reuses the
676
     * context for new key. The catch is that if we don't do
677
     * this, padlock_eas_cipher might proceed with old key...
678
     */
679
0
    padlock_reload_key();
680
681
0
    return 1;
682
0
}
683
684
/* ===== Random Number Generator ===== */
685
/*
686
 * This code is not engaged. The reason is that it does not comply
687
 * with recommendations for VIA RNG usage for secure applications
688
 * (posted at http://www.via.com.tw/en/viac3/c3.jsp) nor does it
689
 * provide meaningful error control...
690
 */
691
/*
692
 * Wrapper that provides an interface between the API and the raw PadLock
693
 * RNG
694
 */
695
static int padlock_rand_bytes(unsigned char *output, int count)
696
0
{
697
0
    unsigned int eax, buf;
698
699
0
    while (count >= 8) {
700
0
        eax = padlock_xstore(output, 0);
701
0
        if (!(eax & (1 << 6)))
702
0
            return 0;           /* RNG disabled */
703
        /* this ---vv--- covers DC bias, Raw Bits and String Filter */
704
0
        if (eax & (0x1F << 10))
705
0
            return 0;
706
0
        if ((eax & 0x1F) == 0)
707
0
            continue;           /* no data, retry... */
708
0
        if ((eax & 0x1F) != 8)
709
0
            return 0;           /* fatal failure...  */
710
0
        output += 8;
711
0
        count -= 8;
712
0
    }
713
0
    while (count > 0) {
714
0
        eax = padlock_xstore(&buf, 3);
715
0
        if (!(eax & (1 << 6)))
716
0
            return 0;           /* RNG disabled */
717
        /* this ---vv--- covers DC bias, Raw Bits and String Filter */
718
0
        if (eax & (0x1F << 10))
719
0
            return 0;
720
0
        if ((eax & 0x1F) == 0)
721
0
            continue;           /* no data, retry... */
722
0
        if ((eax & 0x1F) != 1)
723
0
            return 0;           /* fatal failure...  */
724
0
        *output++ = (unsigned char)buf;
725
0
        count--;
726
0
    }
727
0
    OPENSSL_cleanse(&buf, sizeof(buf));
728
729
0
    return 1;
730
0
}
731
732
/* Dummy but necessary function */
733
static int padlock_rand_status(void)
734
0
{
735
0
    return 1;
736
0
}
737
738
/* Prepare structure for registration */
739
static RAND_METHOD padlock_rand = {
740
    NULL,                       /* seed */
741
    padlock_rand_bytes,         /* bytes */
742
    NULL,                       /* cleanup */
743
    NULL,                       /* add */
744
    padlock_rand_bytes,         /* pseudorand */
745
    padlock_rand_status,        /* rand status */
746
};
747
748
# endif                        /* COMPILE_PADLOCKENG */
749
#endif                         /* !OPENSSL_NO_PADLOCKENG */
750
751
#if defined(OPENSSL_NO_PADLOCKENG) || !defined(COMPILE_PADLOCKENG)
752
# ifndef OPENSSL_NO_DYNAMIC_ENGINE
753
OPENSSL_EXPORT
754
    int bind_engine(ENGINE *e, const char *id, const dynamic_fns *fns);
755
OPENSSL_EXPORT
756
    int bind_engine(ENGINE *e, const char *id, const dynamic_fns *fns)
757
{
758
    return 0;
759
}
760
761
IMPLEMENT_DYNAMIC_CHECK_FN()
762
# endif
763
#endif