/src/gnutls/lib/accelerated/x86/aes-gcm-x86-pclmul.c
Line | Count | Source (jump to first uncovered line) |
1 | | /* |
2 | | * Copyright (C) 2011-2012 Free Software Foundation, Inc. |
3 | | * Copyright (C) 2018 Red Hat, Inc. |
4 | | * |
5 | | * Author: Nikos Mavrogiannopoulos |
6 | | * |
7 | | * This file is part of GnuTLS. |
8 | | * |
9 | | * The GnuTLS is free software; you can redistribute it and/or |
10 | | * modify it under the terms of the GNU Lesser General Public License |
11 | | * as published by the Free Software Foundation; either version 2.1 of |
12 | | * the License, or (at your option) any later version. |
13 | | * |
14 | | * This library is distributed in the hope that it will be useful, but |
15 | | * WITHOUT ANY WARRANTY; without even the implied warranty of |
16 | | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
17 | | * Lesser General Public License for more details. |
18 | | * |
19 | | * You should have received a copy of the GNU Lesser General Public License |
20 | | * along with this program. If not, see <https://www.gnu.org/licenses/> |
21 | | * |
22 | | */ |
23 | | |
24 | | /* |
25 | | * The following code is an implementation of the AES-128-GCM cipher |
26 | | * using intel's AES instruction set. |
27 | | */ |
28 | | |
29 | | #include "errors.h" |
30 | | #include "gnutls_int.h" |
31 | | #include <gnutls/crypto.h> |
32 | | #include "errors.h" |
33 | | #include <aes-x86.h> |
34 | | #include <x86-common.h> |
35 | | #include <nettle/memxor.h> |
36 | | #include <byteswap.h> |
37 | | |
38 | 0 | #define GCM_BLOCK_SIZE 16 |
39 | | |
40 | | /* GCM mode */ |
41 | | |
42 | | typedef struct { |
43 | | uint64_t hi, lo; |
44 | | } u128; |
45 | | |
46 | | /* This is the gcm128 structure used in openssl. It |
47 | | * is compatible with the included assembly code. |
48 | | */ |
49 | | struct gcm128_context { |
50 | | union { |
51 | | uint64_t u[2]; |
52 | | uint32_t d[4]; |
53 | | uint8_t c[16]; |
54 | | } Yi, EKi, EK0, len, Xi, H; |
55 | | u128 Htable[16]; |
56 | | }; |
57 | | |
58 | | struct aes_gcm_ctx { |
59 | | AES_KEY expanded_key; |
60 | | struct gcm128_context gcm; |
61 | | unsigned finished; |
62 | | unsigned auth_finished; |
63 | | size_t rekey_counter; |
64 | | }; |
65 | | |
66 | | void gcm_init_clmul(u128 Htable[16], const uint64_t Xi[2]); |
67 | | void gcm_ghash_clmul(uint64_t Xi[2], const u128 Htable[16], |
68 | | const uint8_t * inp, size_t len); |
69 | | void gcm_gmult_clmul(uint64_t Xi[2], const u128 Htable[16]); |
70 | | |
71 | | static void aes_gcm_deinit(void *_ctx) |
72 | 0 | { |
73 | 0 | struct aes_gcm_ctx *ctx = _ctx; |
74 | |
|
75 | 0 | zeroize_temp_key(ctx, sizeof(*ctx)); |
76 | 0 | gnutls_free(ctx); |
77 | 0 | } |
78 | | |
79 | | static int |
80 | | aes_gcm_cipher_init(gnutls_cipher_algorithm_t algorithm, void **_ctx, int enc) |
81 | 0 | { |
82 | | /* we use key size to distinguish */ |
83 | 0 | if (algorithm != GNUTLS_CIPHER_AES_128_GCM && |
84 | 0 | algorithm != GNUTLS_CIPHER_AES_192_GCM && |
85 | 0 | algorithm != GNUTLS_CIPHER_AES_256_GCM) |
86 | 0 | return GNUTLS_E_INVALID_REQUEST; |
87 | | |
88 | 0 | *_ctx = gnutls_calloc(1, sizeof(struct aes_gcm_ctx)); |
89 | 0 | if (*_ctx == NULL) { |
90 | 0 | gnutls_assert(); |
91 | 0 | return GNUTLS_E_MEMORY_ERROR; |
92 | 0 | } |
93 | | |
94 | 0 | return 0; |
95 | 0 | } |
96 | | |
97 | | static int |
98 | | aes_gcm_cipher_setkey(void *_ctx, const void *userkey, size_t keysize) |
99 | 0 | { |
100 | 0 | struct aes_gcm_ctx *ctx = _ctx; |
101 | 0 | int ret; |
102 | |
|
103 | 0 | CHECK_AES_KEYSIZE(keysize); |
104 | | |
105 | 0 | ret = |
106 | 0 | aesni_set_encrypt_key(userkey, keysize * 8, |
107 | 0 | ALIGN16(&ctx->expanded_key)); |
108 | 0 | if (ret != 0) |
109 | 0 | return gnutls_assert_val(GNUTLS_E_ENCRYPTION_FAILED); |
110 | | |
111 | 0 | aesni_ecb_encrypt(ctx->gcm.H.c, ctx->gcm.H.c, |
112 | 0 | GCM_BLOCK_SIZE, ALIGN16(&ctx->expanded_key), 1); |
113 | |
|
114 | 0 | ctx->gcm.H.u[0] = bswap_64(ctx->gcm.H.u[0]); |
115 | 0 | ctx->gcm.H.u[1] = bswap_64(ctx->gcm.H.u[1]); |
116 | |
|
117 | 0 | gcm_init_clmul(ctx->gcm.Htable, ctx->gcm.H.u); |
118 | |
|
119 | 0 | ctx->rekey_counter = 0; |
120 | 0 | return 0; |
121 | 0 | } |
122 | | |
123 | | static int aes_gcm_setiv(void *_ctx, const void *iv, size_t iv_size) |
124 | 0 | { |
125 | 0 | struct aes_gcm_ctx *ctx = _ctx; |
126 | |
|
127 | 0 | if (iv_size != GCM_BLOCK_SIZE - 4) |
128 | 0 | return gnutls_assert_val(GNUTLS_E_INVALID_REQUEST); |
129 | | |
130 | 0 | memset(ctx->gcm.Xi.c, 0, sizeof(ctx->gcm.Xi.c)); |
131 | 0 | memset(ctx->gcm.len.c, 0, sizeof(ctx->gcm.len.c)); |
132 | |
|
133 | 0 | memcpy(ctx->gcm.Yi.c, iv, GCM_BLOCK_SIZE - 4); |
134 | 0 | ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 4] = 0; |
135 | 0 | ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 3] = 0; |
136 | 0 | ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 2] = 0; |
137 | 0 | ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 1] = 1; |
138 | |
|
139 | 0 | aesni_ecb_encrypt(ctx->gcm.Yi.c, ctx->gcm.EK0.c, |
140 | 0 | GCM_BLOCK_SIZE, ALIGN16(&ctx->expanded_key), 1); |
141 | 0 | ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 1] = 2; |
142 | 0 | ctx->finished = 0; |
143 | 0 | ctx->auth_finished = 0; |
144 | 0 | ctx->rekey_counter = 0; |
145 | 0 | return 0; |
146 | 0 | } |
147 | | |
148 | | static void |
149 | | gcm_ghash(struct aes_gcm_ctx *ctx, const uint8_t * src, size_t src_size) |
150 | 0 | { |
151 | 0 | size_t rest = src_size % GCM_BLOCK_SIZE; |
152 | 0 | size_t aligned_size = src_size - rest; |
153 | |
|
154 | 0 | if (aligned_size > 0) |
155 | 0 | gcm_ghash_clmul(ctx->gcm.Xi.u, ctx->gcm.Htable, src, |
156 | 0 | aligned_size); |
157 | |
|
158 | 0 | if (rest > 0) { |
159 | 0 | memxor(ctx->gcm.Xi.c, src + aligned_size, rest); |
160 | 0 | gcm_gmult_clmul(ctx->gcm.Xi.u, ctx->gcm.Htable); |
161 | 0 | } |
162 | 0 | } |
163 | | |
164 | | static inline void |
165 | | ctr_encrypt_last(struct aes_gcm_ctx *ctx, const uint8_t * src, |
166 | | uint8_t * dst, size_t pos, size_t length) |
167 | 0 | { |
168 | 0 | uint8_t tmp[GCM_BLOCK_SIZE]; |
169 | 0 | uint8_t out[GCM_BLOCK_SIZE]; |
170 | |
|
171 | 0 | memcpy(tmp, &src[pos], length); |
172 | 0 | aesni_ctr32_encrypt_blocks(tmp, out, 1, |
173 | 0 | ALIGN16(&ctx->expanded_key), ctx->gcm.Yi.c); |
174 | |
|
175 | 0 | memcpy(&dst[pos], out, length); |
176 | |
|
177 | 0 | } |
178 | | |
179 | | static int |
180 | | aes_gcm_encrypt(void *_ctx, const void *src, size_t src_size, |
181 | | void *dst, size_t length) |
182 | 0 | { |
183 | 0 | struct aes_gcm_ctx *ctx = _ctx; |
184 | 0 | int blocks = src_size / GCM_BLOCK_SIZE; |
185 | 0 | int exp_blocks = blocks * GCM_BLOCK_SIZE; |
186 | 0 | int rest = src_size - (exp_blocks); |
187 | 0 | uint32_t counter; |
188 | 0 | int ret; |
189 | |
|
190 | 0 | if (unlikely(ctx->finished)) |
191 | 0 | return gnutls_assert_val(GNUTLS_E_INVALID_REQUEST); |
192 | | |
193 | 0 | if (unlikely(length < src_size)) |
194 | 0 | return gnutls_assert_val(GNUTLS_E_SHORT_MEMORY_BUFFER); |
195 | | |
196 | 0 | ret = record_aes_gcm_encrypt_size(&ctx->rekey_counter, src_size); |
197 | 0 | if (ret < 0) { |
198 | 0 | return gnutls_assert_val(ret); |
199 | 0 | } |
200 | | |
201 | 0 | if (blocks > 0) { |
202 | 0 | aesni_ctr32_encrypt_blocks(src, dst, |
203 | 0 | blocks, |
204 | 0 | ALIGN16(&ctx->expanded_key), |
205 | 0 | ctx->gcm.Yi.c); |
206 | |
|
207 | 0 | counter = _gnutls_read_uint32(ctx->gcm.Yi.c + 12); |
208 | 0 | counter += blocks; |
209 | 0 | _gnutls_write_uint32(counter, ctx->gcm.Yi.c + 12); |
210 | 0 | } |
211 | |
|
212 | 0 | if (rest > 0) { /* last incomplete block */ |
213 | 0 | ctr_encrypt_last(ctx, src, dst, exp_blocks, rest); |
214 | 0 | ctx->finished = 1; |
215 | 0 | } |
216 | |
|
217 | 0 | gcm_ghash(ctx, dst, src_size); |
218 | 0 | ctx->gcm.len.u[1] += src_size; |
219 | |
|
220 | 0 | return 0; |
221 | 0 | } |
222 | | |
223 | | static int |
224 | | aes_gcm_decrypt(void *_ctx, const void *src, size_t src_size, |
225 | | void *dst, size_t dst_size) |
226 | 0 | { |
227 | 0 | struct aes_gcm_ctx *ctx = _ctx; |
228 | 0 | int blocks = src_size / GCM_BLOCK_SIZE; |
229 | 0 | int exp_blocks = blocks * GCM_BLOCK_SIZE; |
230 | 0 | int rest = src_size - (exp_blocks); |
231 | 0 | uint32_t counter; |
232 | |
|
233 | 0 | if (unlikely(ctx->finished)) |
234 | 0 | return gnutls_assert_val(GNUTLS_E_INVALID_REQUEST); |
235 | | |
236 | 0 | if (unlikely(dst_size < src_size)) |
237 | 0 | return gnutls_assert_val(GNUTLS_E_SHORT_MEMORY_BUFFER); |
238 | | |
239 | 0 | gcm_ghash(ctx, src, src_size); |
240 | 0 | ctx->gcm.len.u[1] += src_size; |
241 | |
|
242 | 0 | if (blocks > 0) { |
243 | 0 | aesni_ctr32_encrypt_blocks(src, dst, |
244 | 0 | blocks, |
245 | 0 | ALIGN16(&ctx->expanded_key), |
246 | 0 | ctx->gcm.Yi.c); |
247 | |
|
248 | 0 | counter = _gnutls_read_uint32(ctx->gcm.Yi.c + 12); |
249 | 0 | counter += blocks; |
250 | 0 | _gnutls_write_uint32(counter, ctx->gcm.Yi.c + 12); |
251 | 0 | } |
252 | |
|
253 | 0 | if (rest > 0) { /* last incomplete block */ |
254 | 0 | ctr_encrypt_last(ctx, src, dst, exp_blocks, rest); |
255 | 0 | ctx->finished = 1; |
256 | 0 | } |
257 | |
|
258 | 0 | return 0; |
259 | 0 | } |
260 | | |
261 | | static int aes_gcm_auth(void *_ctx, const void *src, size_t src_size) |
262 | 0 | { |
263 | 0 | struct aes_gcm_ctx *ctx = _ctx; |
264 | |
|
265 | 0 | if (unlikely(ctx->auth_finished)) |
266 | 0 | return gnutls_assert_val(GNUTLS_E_INVALID_REQUEST); |
267 | | |
268 | 0 | gcm_ghash(ctx, src, src_size); |
269 | 0 | ctx->gcm.len.u[0] += src_size; |
270 | |
|
271 | 0 | if (src_size % GCM_BLOCK_SIZE != 0) |
272 | 0 | ctx->auth_finished = 1; |
273 | |
|
274 | 0 | return 0; |
275 | 0 | } |
276 | | |
277 | | static void aes_gcm_tag(void *_ctx, void *tag, size_t tagsize) |
278 | 0 | { |
279 | 0 | struct aes_gcm_ctx *ctx = _ctx; |
280 | 0 | uint8_t buffer[GCM_BLOCK_SIZE]; |
281 | 0 | uint64_t alen, clen; |
282 | |
|
283 | 0 | alen = ctx->gcm.len.u[0] * 8; |
284 | 0 | clen = ctx->gcm.len.u[1] * 8; |
285 | |
|
286 | 0 | _gnutls_write_uint64(alen, buffer); |
287 | 0 | _gnutls_write_uint64(clen, &buffer[8]); |
288 | |
|
289 | 0 | gcm_ghash_clmul(ctx->gcm.Xi.u, ctx->gcm.Htable, buffer, GCM_BLOCK_SIZE); |
290 | |
|
291 | 0 | ctx->gcm.Xi.u[0] ^= ctx->gcm.EK0.u[0]; |
292 | 0 | ctx->gcm.Xi.u[1] ^= ctx->gcm.EK0.u[1]; |
293 | |
|
294 | 0 | memcpy(tag, ctx->gcm.Xi.c, MIN(GCM_BLOCK_SIZE, tagsize)); |
295 | 0 | } |
296 | | |
297 | | #include "aes-gcm-aead.h" |
298 | | |
299 | | const gnutls_crypto_cipher_st _gnutls_aes_gcm_pclmul = { |
300 | | .init = aes_gcm_cipher_init, |
301 | | .setkey = aes_gcm_cipher_setkey, |
302 | | .setiv = aes_gcm_setiv, |
303 | | .aead_encrypt = aes_gcm_aead_encrypt, |
304 | | .aead_decrypt = aes_gcm_aead_decrypt, |
305 | | .encrypt = aes_gcm_encrypt, |
306 | | .decrypt = aes_gcm_decrypt, |
307 | | .deinit = aes_gcm_deinit, |
308 | | .tag = aes_gcm_tag, |
309 | | .auth = aes_gcm_auth, |
310 | | }; |