Coverage Report

Created: 2023-03-26 08:33

/src/gnutls/lib/accelerated/x86/aes-gcm-x86-pclmul-avx.c
Line
Count
Source (jump to first uncovered line)
1
/*
2
 * Copyright (C) 2011-2016 Free Software Foundation, Inc.
3
 * Copyright (C) 2015-2018 Red Hat, Inc.
4
 *
5
 * Author: Nikos Mavrogiannopoulos
6
 *
7
 * This file is part of GnuTLS.
8
 *
9
 * The GnuTLS is free software; you can redistribute it and/or
10
 * modify it under the terms of the GNU Lesser General Public License
11
 * as published by the Free Software Foundation; either version 2.1 of
12
 * the License, or (at your option) any later version.
13
 *
14
 * This library is distributed in the hope that it will be useful, but
15
 * WITHOUT ANY WARRANTY; without even the implied warranty of
16
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
17
 * Lesser General Public License for more details.
18
 *
19
 * You should have received a copy of the GNU Lesser General Public License
20
 * along with this program.  If not, see <https://www.gnu.org/licenses/>
21
 *
22
 */
23
24
/*
25
 * The following code is an implementation of the AES-128-GCM cipher
26
 * using intel's AES instruction set.
27
 */
28
29
#include "errors.h"
30
#include "gnutls_int.h"
31
#include <gnutls/crypto.h>
32
#include "errors.h"
33
#include <aes-x86.h>
34
#include <x86-common.h>
35
#include <nettle/memxor.h>
36
#include <byteswap.h>
37
38
0
#define GCM_BLOCK_SIZE 16
39
40
/* GCM mode with PCLMUL and AVX optimization */
41
42
typedef struct {
43
  uint64_t hi, lo;
44
} u128;
45
46
/* This is the gcm128 structure used in openssl. It
47
 * is compatible with the included assembly code.
48
 */
49
struct gcm128_context {
50
  union {
51
    uint64_t u[2];
52
    uint32_t d[4];
53
    uint8_t c[16];
54
    size_t t[16 / sizeof(size_t)];
55
  } Yi, EKi, EK0, len, Xi, H;
56
  u128 Htable[16];
57
};
58
59
struct aes_gcm_ctx {
60
  AES_KEY expanded_key;
61
  struct gcm128_context gcm;
62
  unsigned finished;
63
  unsigned auth_finished;
64
  size_t rekey_counter;
65
};
66
67
void gcm_init_avx(u128 Htable[16], const uint64_t Xi[2]);
68
void gcm_ghash_avx(uint64_t Xi[2], const u128 Htable[16], const uint8_t * in,
69
       size_t len);
70
void gcm_gmult_avx(uint64_t Xi[2], const u128 Htable[16]);
71
72
static void aes_gcm_deinit(void *_ctx)
73
0
{
74
0
  struct aes_gcm_ctx *ctx = _ctx;
75
76
0
  zeroize_temp_key(ctx, sizeof(*ctx));
77
0
  gnutls_free(ctx);
78
0
}
79
80
static int
81
aes_gcm_cipher_init(gnutls_cipher_algorithm_t algorithm, void **_ctx, int enc)
82
0
{
83
  /* we use key size to distinguish */
84
0
  if (algorithm != GNUTLS_CIPHER_AES_128_GCM &&
85
0
      algorithm != GNUTLS_CIPHER_AES_192_GCM &&
86
0
      algorithm != GNUTLS_CIPHER_AES_256_GCM)
87
0
    return GNUTLS_E_INVALID_REQUEST;
88
89
0
  *_ctx = gnutls_calloc(1, sizeof(struct aes_gcm_ctx));
90
0
  if (*_ctx == NULL) {
91
0
    gnutls_assert();
92
0
    return GNUTLS_E_MEMORY_ERROR;
93
0
  }
94
95
0
  return 0;
96
0
}
97
98
static int
99
aes_gcm_cipher_setkey(void *_ctx, const void *userkey, size_t keysize)
100
0
{
101
0
  struct aes_gcm_ctx *ctx = _ctx;
102
0
  int ret;
103
104
0
  CHECK_AES_KEYSIZE(keysize);
105
106
0
  ret =
107
0
      aesni_set_encrypt_key(userkey, keysize * 8,
108
0
          ALIGN16(&ctx->expanded_key));
109
0
  if (ret != 0)
110
0
    return gnutls_assert_val(GNUTLS_E_ENCRYPTION_FAILED);
111
112
0
  aesni_ecb_encrypt(ctx->gcm.H.c, ctx->gcm.H.c,
113
0
        GCM_BLOCK_SIZE, ALIGN16(&ctx->expanded_key), 1);
114
115
0
  ctx->gcm.H.u[0] = bswap_64(ctx->gcm.H.u[0]);
116
0
  ctx->gcm.H.u[1] = bswap_64(ctx->gcm.H.u[1]);
117
118
0
  gcm_init_avx(ctx->gcm.Htable, ctx->gcm.H.u);
119
120
0
  ctx->rekey_counter = 0;
121
0
  return 0;
122
0
}
123
124
static int aes_gcm_setiv(void *_ctx, const void *iv, size_t iv_size)
125
0
{
126
0
  struct aes_gcm_ctx *ctx = _ctx;
127
128
0
  if (iv_size != GCM_BLOCK_SIZE - 4)
129
0
    return gnutls_assert_val(GNUTLS_E_INVALID_REQUEST);
130
131
0
  memset(ctx->gcm.Xi.c, 0, sizeof(ctx->gcm.Xi.c));
132
0
  memset(ctx->gcm.len.c, 0, sizeof(ctx->gcm.len.c));
133
134
0
  memcpy(ctx->gcm.Yi.c, iv, GCM_BLOCK_SIZE - 4);
135
0
  ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 4] = 0;
136
0
  ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 3] = 0;
137
0
  ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 2] = 0;
138
0
  ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 1] = 1;
139
140
0
  aesni_ecb_encrypt(ctx->gcm.Yi.c, ctx->gcm.EK0.c,
141
0
        GCM_BLOCK_SIZE, ALIGN16(&ctx->expanded_key), 1);
142
0
  ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 1] = 2;
143
0
  ctx->finished = 0;
144
0
  ctx->auth_finished = 0;
145
0
  ctx->rekey_counter = 0;
146
0
  return 0;
147
0
}
148
149
static void
150
gcm_ghash(struct aes_gcm_ctx *ctx, const uint8_t * src, size_t src_size)
151
0
{
152
0
  size_t rest = src_size % GCM_BLOCK_SIZE;
153
0
  size_t aligned_size = src_size - rest;
154
155
0
  if (aligned_size > 0)
156
0
    gcm_ghash_avx(ctx->gcm.Xi.u, ctx->gcm.Htable, src,
157
0
            aligned_size);
158
159
0
  if (rest > 0) {
160
0
    memxor(ctx->gcm.Xi.c, src + aligned_size, rest);
161
0
    gcm_gmult_avx(ctx->gcm.Xi.u, ctx->gcm.Htable);
162
0
  }
163
0
}
164
165
static inline void
166
ctr_encrypt_last(struct aes_gcm_ctx *ctx, const uint8_t * src,
167
     uint8_t * dst, size_t pos, size_t length)
168
0
{
169
0
  uint8_t tmp[GCM_BLOCK_SIZE];
170
0
  uint8_t out[GCM_BLOCK_SIZE];
171
172
0
  memcpy(tmp, &src[pos], length);
173
0
  aesni_ctr32_encrypt_blocks(tmp, out, 1,
174
0
           ALIGN16(&ctx->expanded_key), ctx->gcm.Yi.c);
175
176
0
  memcpy(&dst[pos], out, length);
177
178
0
}
179
180
static int
181
aes_gcm_encrypt(void *_ctx, const void *src, size_t src_size,
182
    void *dst, size_t length)
183
0
{
184
0
  struct aes_gcm_ctx *ctx = _ctx;
185
0
  int blocks = src_size / GCM_BLOCK_SIZE;
186
0
  int exp_blocks = blocks * GCM_BLOCK_SIZE;
187
0
  int rest = src_size - (exp_blocks);
188
0
  uint32_t counter;
189
0
  int ret;
190
191
0
  if (unlikely(ctx->finished))
192
0
    return gnutls_assert_val(GNUTLS_E_INVALID_REQUEST);
193
194
0
  if (unlikely(length < src_size))
195
0
    return gnutls_assert_val(GNUTLS_E_SHORT_MEMORY_BUFFER);
196
197
0
  ret = record_aes_gcm_encrypt_size(&ctx->rekey_counter, src_size);
198
0
  if (ret < 0) {
199
0
    return gnutls_assert_val(ret);
200
0
  }
201
202
0
  if (blocks > 0) {
203
0
    aesni_ctr32_encrypt_blocks(src, dst,
204
0
             blocks,
205
0
             ALIGN16(&ctx->expanded_key),
206
0
             ctx->gcm.Yi.c);
207
208
0
    counter = _gnutls_read_uint32(ctx->gcm.Yi.c + 12);
209
0
    counter += blocks;
210
0
    _gnutls_write_uint32(counter, ctx->gcm.Yi.c + 12);
211
0
  }
212
213
0
  if (rest > 0) {   /* last incomplete block */
214
0
    ctr_encrypt_last(ctx, src, dst, exp_blocks, rest);
215
0
    ctx->finished = 1;
216
0
  }
217
218
0
  gcm_ghash(ctx, dst, src_size);
219
0
  ctx->gcm.len.u[1] += src_size;
220
221
0
  return 0;
222
0
}
223
224
static int
225
aes_gcm_decrypt(void *_ctx, const void *src, size_t src_size,
226
    void *dst, size_t dst_size)
227
0
{
228
0
  struct aes_gcm_ctx *ctx = _ctx;
229
0
  int blocks = src_size / GCM_BLOCK_SIZE;
230
0
  int exp_blocks = blocks * GCM_BLOCK_SIZE;
231
0
  int rest = src_size - (exp_blocks);
232
0
  uint32_t counter;
233
234
0
  if (unlikely(ctx->finished))
235
0
    return gnutls_assert_val(GNUTLS_E_INVALID_REQUEST);
236
237
0
  if (unlikely(dst_size < src_size))
238
0
    return gnutls_assert_val(GNUTLS_E_SHORT_MEMORY_BUFFER);
239
240
0
  gcm_ghash(ctx, src, src_size);
241
0
  ctx->gcm.len.u[1] += src_size;
242
243
0
  if (blocks > 0) {
244
0
    aesni_ctr32_encrypt_blocks(src, dst,
245
0
             blocks,
246
0
             ALIGN16(&ctx->expanded_key),
247
0
             ctx->gcm.Yi.c);
248
249
0
    counter = _gnutls_read_uint32(ctx->gcm.Yi.c + 12);
250
0
    counter += blocks;
251
0
    _gnutls_write_uint32(counter, ctx->gcm.Yi.c + 12);
252
0
  }
253
254
0
  if (rest > 0) {   /* last incomplete block */
255
0
    ctr_encrypt_last(ctx, src, dst, exp_blocks, rest);
256
0
    ctx->finished = 1;
257
0
  }
258
259
0
  return 0;
260
0
}
261
262
static int aes_gcm_auth(void *_ctx, const void *src, size_t src_size)
263
0
{
264
0
  struct aes_gcm_ctx *ctx = _ctx;
265
266
0
  if (unlikely(ctx->auth_finished))
267
0
    return gnutls_assert_val(GNUTLS_E_INVALID_REQUEST);
268
269
0
  gcm_ghash(ctx, src, src_size);
270
0
  ctx->gcm.len.u[0] += src_size;
271
272
0
  if (src_size % GCM_BLOCK_SIZE != 0)
273
0
    ctx->auth_finished = 1;
274
275
0
  return 0;
276
0
}
277
278
static void aes_gcm_tag(void *_ctx, void *tag, size_t tagsize)
279
0
{
280
0
  struct aes_gcm_ctx *ctx = _ctx;
281
0
  uint8_t buffer[GCM_BLOCK_SIZE];
282
0
  uint64_t alen, clen;
283
284
0
  alen = ctx->gcm.len.u[0] * 8;
285
0
  clen = ctx->gcm.len.u[1] * 8;
286
287
0
  _gnutls_write_uint64(alen, buffer);
288
0
  _gnutls_write_uint64(clen, &buffer[8]);
289
290
0
  gcm_ghash_avx(ctx->gcm.Xi.u, ctx->gcm.Htable, buffer, GCM_BLOCK_SIZE);
291
292
0
  ctx->gcm.Xi.u[0] ^= ctx->gcm.EK0.u[0];
293
0
  ctx->gcm.Xi.u[1] ^= ctx->gcm.EK0.u[1];
294
295
0
  memcpy(tag, ctx->gcm.Xi.c, MIN(GCM_BLOCK_SIZE, tagsize));
296
0
}
297
298
#ifdef ASM_X86_64
299
/* requires AVX */
300
static int
301
aesni_gcm_aead_encrypt(void *_ctx,
302
           const void *nonce, size_t nonce_size,
303
           const void *auth, size_t auth_size,
304
           size_t tag_size,
305
           const void *plain, size_t plain_size,
306
           void *encr, size_t encr_size)
307
0
{
308
0
  struct aes_gcm_ctx *ctx = _ctx;
309
0
  size_t s = 0;
310
311
0
  if (encr_size < plain_size + tag_size)
312
0
    return gnutls_assert_val(GNUTLS_E_SHORT_MEMORY_BUFFER);
313
314
0
  aes_gcm_setiv(ctx, nonce, nonce_size);
315
0
  aes_gcm_auth(ctx, auth, auth_size);
316
317
0
  if (plain_size >= 96) {
318
0
    s = aesni_gcm_encrypt(plain, encr, plain_size,
319
0
              ALIGN16(&ctx->expanded_key),
320
0
              ctx->gcm.Yi.c, ctx->gcm.Xi.u);
321
0
    ctx->gcm.len.u[1] += s;
322
0
  }
323
324
0
  if ((plain_size - s) > 0)
325
0
    aes_gcm_encrypt(ctx, ((uint8_t *) plain) + s, plain_size - s,
326
0
        ((uint8_t *) encr) + s, encr_size - s);
327
328
0
  aes_gcm_tag(ctx, ((uint8_t *) encr) + plain_size, tag_size);
329
330
0
  return 0;
331
0
}
332
333
static int
334
aesni_gcm_aead_decrypt(void *_ctx,
335
           const void *nonce, size_t nonce_size,
336
           const void *auth, size_t auth_size,
337
           size_t tag_size,
338
           const void *encr, size_t encr_size,
339
           void *plain, size_t plain_size)
340
0
{
341
0
  struct aes_gcm_ctx *ctx = _ctx;
342
0
  uint8_t tag[MAX_HASH_SIZE];
343
0
  size_t s = 0;
344
345
0
  if (unlikely(encr_size < tag_size))
346
0
    return gnutls_assert_val(GNUTLS_E_DECRYPTION_FAILED);
347
348
0
  if (unlikely(plain_size < encr_size - tag_size))
349
0
    return gnutls_assert_val(GNUTLS_E_SHORT_MEMORY_BUFFER);
350
351
0
  aes_gcm_setiv(ctx, nonce, nonce_size);
352
0
  aes_gcm_auth(ctx, auth, auth_size);
353
354
0
  encr_size -= tag_size;
355
356
0
  if (encr_size >= 96) {
357
0
    s = aesni_gcm_decrypt(encr, plain, encr_size,
358
0
              ALIGN16(&ctx->expanded_key),
359
0
              ctx->gcm.Yi.c, ctx->gcm.Xi.u);
360
0
    ctx->gcm.len.u[1] += s;
361
0
  }
362
363
0
  if ((encr_size - s) > 0) {
364
0
    aes_gcm_decrypt(ctx, ((uint8_t *) encr) + s, encr_size - s,
365
0
        ((uint8_t *) plain) + s, plain_size - s);
366
0
  }
367
368
0
  aes_gcm_tag(ctx, tag, tag_size);
369
370
0
  if (gnutls_memcmp(((uint8_t *) encr) + encr_size, tag, tag_size) != 0)
371
0
    return gnutls_assert_val(GNUTLS_E_DECRYPTION_FAILED);
372
373
0
  return 0;
374
0
}
375
#else
376
# define aesni_gcm_aead_decrypt aes_gcm_aead_decrypt
377
# define aesni_gcm_aead_encrypt aes_gcm_aead_encrypt
378
# include "aes-gcm-aead.h"
379
#endif
380
381
const gnutls_crypto_cipher_st _gnutls_aes_gcm_pclmul_avx = {
382
  .init = aes_gcm_cipher_init,
383
  .setkey = aes_gcm_cipher_setkey,
384
  .setiv = aes_gcm_setiv,
385
  .aead_encrypt = aesni_gcm_aead_encrypt,
386
  .aead_decrypt = aesni_gcm_aead_decrypt,
387
  .encrypt = aes_gcm_encrypt,
388
  .decrypt = aes_gcm_decrypt,
389
  .deinit = aes_gcm_deinit,
390
  .tag = aes_gcm_tag,
391
  .auth = aes_gcm_auth,
392
};