/src/gnutls/lib/accelerated/x86/aes-gcm-x86-pclmul.c
Line | Count | Source (jump to first uncovered line) |
1 | | /* |
2 | | * Copyright (C) 2011-2012 Free Software Foundation, Inc. |
3 | | * Copyright (C) 2018 Red Hat, Inc. |
4 | | * |
5 | | * Author: Nikos Mavrogiannopoulos |
6 | | * |
7 | | * This file is part of GnuTLS. |
8 | | * |
9 | | * The GnuTLS is free software; you can redistribute it and/or |
10 | | * modify it under the terms of the GNU Lesser General Public License |
11 | | * as published by the Free Software Foundation; either version 2.1 of |
12 | | * the License, or (at your option) any later version. |
13 | | * |
14 | | * This library is distributed in the hope that it will be useful, but |
15 | | * WITHOUT ANY WARRANTY; without even the implied warranty of |
16 | | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
17 | | * Lesser General Public License for more details. |
18 | | * |
19 | | * You should have received a copy of the GNU Lesser General Public License |
20 | | * along with this program. If not, see <https://www.gnu.org/licenses/> |
21 | | * |
22 | | */ |
23 | | |
24 | | /* |
25 | | * The following code is an implementation of the AES-128-GCM cipher |
26 | | * using intel's AES instruction set. |
27 | | */ |
28 | | |
29 | | #include "errors.h" |
30 | | #include "gnutls_int.h" |
31 | | #include <gnutls/crypto.h> |
32 | | #include "errors.h" |
33 | | #include "aes-x86.h" |
34 | | #include "x86-common.h" |
35 | | #include <nettle/memxor.h> |
36 | | #include <byteswap.h> |
37 | | |
38 | 0 | #define GCM_BLOCK_SIZE 16 |
39 | | |
40 | | /* GCM mode */ |
41 | | |
42 | | typedef struct { |
43 | | uint64_t hi, lo; |
44 | | } u128; |
45 | | |
46 | | /* This is the gcm128 structure used in openssl. It |
47 | | * is compatible with the included assembly code. |
48 | | */ |
49 | | struct gcm128_context { |
50 | | union { |
51 | | uint64_t u[2]; |
52 | | uint32_t d[4]; |
53 | | uint8_t c[16]; |
54 | | } Yi, EKi, EK0, len, Xi, H; |
55 | | u128 Htable[16]; |
56 | | }; |
57 | | |
58 | | struct aes_gcm_ctx { |
59 | | AES_KEY expanded_key; |
60 | | struct gcm128_context gcm; |
61 | | unsigned finished; |
62 | | unsigned auth_finished; |
63 | | size_t rekey_counter; |
64 | | }; |
65 | | |
66 | | void gcm_init_clmul(u128 Htable[16], const uint64_t Xi[2]); |
67 | | void gcm_ghash_clmul(uint64_t Xi[2], const u128 Htable[16], const uint8_t *inp, |
68 | | size_t len); |
69 | | void gcm_gmult_clmul(uint64_t Xi[2], const u128 Htable[16]); |
70 | | |
71 | | static void aes_gcm_deinit(void *_ctx) |
72 | 0 | { |
73 | 0 | struct aes_gcm_ctx *ctx = _ctx; |
74 | |
|
75 | 0 | zeroize_temp_key(ctx, sizeof(*ctx)); |
76 | 0 | gnutls_free(ctx); |
77 | 0 | } |
78 | | |
79 | | static int aes_gcm_cipher_init(gnutls_cipher_algorithm_t algorithm, void **_ctx, |
80 | | int enc) |
81 | 0 | { |
82 | | /* we use key size to distinguish */ |
83 | 0 | if (algorithm != GNUTLS_CIPHER_AES_128_GCM && |
84 | 0 | algorithm != GNUTLS_CIPHER_AES_192_GCM && |
85 | 0 | algorithm != GNUTLS_CIPHER_AES_256_GCM) |
86 | 0 | return GNUTLS_E_INVALID_REQUEST; |
87 | | |
88 | 0 | *_ctx = gnutls_calloc(1, sizeof(struct aes_gcm_ctx)); |
89 | 0 | if (*_ctx == NULL) { |
90 | 0 | gnutls_assert(); |
91 | 0 | return GNUTLS_E_MEMORY_ERROR; |
92 | 0 | } |
93 | | |
94 | 0 | return 0; |
95 | 0 | } |
96 | | |
97 | | static int aes_gcm_cipher_setkey(void *_ctx, const void *userkey, |
98 | | size_t keysize) |
99 | 0 | { |
100 | 0 | struct aes_gcm_ctx *ctx = _ctx; |
101 | 0 | int ret; |
102 | |
|
103 | 0 | CHECK_AES_KEYSIZE(keysize); |
104 | | |
105 | 0 | ret = aesni_set_encrypt_key(userkey, keysize * 8, |
106 | 0 | ALIGN16(&ctx->expanded_key)); |
107 | 0 | if (ret != 0) |
108 | 0 | return gnutls_assert_val(GNUTLS_E_ENCRYPTION_FAILED); |
109 | | |
110 | 0 | aesni_ecb_encrypt(ctx->gcm.H.c, ctx->gcm.H.c, GCM_BLOCK_SIZE, |
111 | 0 | ALIGN16(&ctx->expanded_key), 1); |
112 | |
|
113 | 0 | ctx->gcm.H.u[0] = bswap_64(ctx->gcm.H.u[0]); |
114 | 0 | ctx->gcm.H.u[1] = bswap_64(ctx->gcm.H.u[1]); |
115 | |
|
116 | 0 | gcm_init_clmul(ctx->gcm.Htable, ctx->gcm.H.u); |
117 | |
|
118 | 0 | ctx->rekey_counter = 0; |
119 | 0 | return 0; |
120 | 0 | } |
121 | | |
122 | | static int aes_gcm_setiv(void *_ctx, const void *iv, size_t iv_size) |
123 | 0 | { |
124 | 0 | struct aes_gcm_ctx *ctx = _ctx; |
125 | |
|
126 | 0 | if (iv_size != GCM_BLOCK_SIZE - 4) |
127 | 0 | return gnutls_assert_val(GNUTLS_E_INVALID_REQUEST); |
128 | | |
129 | 0 | memset(ctx->gcm.Xi.c, 0, sizeof(ctx->gcm.Xi.c)); |
130 | 0 | memset(ctx->gcm.len.c, 0, sizeof(ctx->gcm.len.c)); |
131 | |
|
132 | 0 | memcpy(ctx->gcm.Yi.c, iv, GCM_BLOCK_SIZE - 4); |
133 | 0 | ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 4] = 0; |
134 | 0 | ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 3] = 0; |
135 | 0 | ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 2] = 0; |
136 | 0 | ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 1] = 1; |
137 | |
|
138 | 0 | aesni_ecb_encrypt(ctx->gcm.Yi.c, ctx->gcm.EK0.c, GCM_BLOCK_SIZE, |
139 | 0 | ALIGN16(&ctx->expanded_key), 1); |
140 | 0 | ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 1] = 2; |
141 | 0 | ctx->finished = 0; |
142 | 0 | ctx->auth_finished = 0; |
143 | 0 | ctx->rekey_counter = 0; |
144 | 0 | return 0; |
145 | 0 | } |
146 | | |
147 | | static void gcm_ghash(struct aes_gcm_ctx *ctx, const uint8_t *src, |
148 | | size_t src_size) |
149 | 0 | { |
150 | 0 | size_t rest = src_size % GCM_BLOCK_SIZE; |
151 | 0 | size_t aligned_size = src_size - rest; |
152 | |
|
153 | 0 | if (aligned_size > 0) |
154 | 0 | gcm_ghash_clmul(ctx->gcm.Xi.u, ctx->gcm.Htable, src, |
155 | 0 | aligned_size); |
156 | |
|
157 | 0 | if (rest > 0) { |
158 | 0 | memxor(ctx->gcm.Xi.c, src + aligned_size, rest); |
159 | 0 | gcm_gmult_clmul(ctx->gcm.Xi.u, ctx->gcm.Htable); |
160 | 0 | } |
161 | 0 | } |
162 | | |
163 | | static inline void ctr_encrypt_last(struct aes_gcm_ctx *ctx, const uint8_t *src, |
164 | | uint8_t *dst, size_t pos, size_t length) |
165 | 0 | { |
166 | 0 | uint8_t tmp[GCM_BLOCK_SIZE]; |
167 | 0 | uint8_t out[GCM_BLOCK_SIZE]; |
168 | |
|
169 | 0 | memcpy(tmp, &src[pos], length); |
170 | 0 | aesni_ctr32_encrypt_blocks(tmp, out, 1, ALIGN16(&ctx->expanded_key), |
171 | 0 | ctx->gcm.Yi.c); |
172 | |
|
173 | 0 | memcpy(&dst[pos], out, length); |
174 | 0 | } |
175 | | |
176 | | static int aes_gcm_encrypt(void *_ctx, const void *src, size_t src_size, |
177 | | void *dst, size_t length) |
178 | 0 | { |
179 | 0 | struct aes_gcm_ctx *ctx = _ctx; |
180 | 0 | int blocks = src_size / GCM_BLOCK_SIZE; |
181 | 0 | int exp_blocks = blocks * GCM_BLOCK_SIZE; |
182 | 0 | int rest = src_size - (exp_blocks); |
183 | 0 | uint32_t counter; |
184 | 0 | int ret; |
185 | |
|
186 | 0 | if (unlikely(ctx->finished)) |
187 | 0 | return gnutls_assert_val(GNUTLS_E_INVALID_REQUEST); |
188 | | |
189 | 0 | if (unlikely(length < src_size)) |
190 | 0 | return gnutls_assert_val(GNUTLS_E_SHORT_MEMORY_BUFFER); |
191 | | |
192 | 0 | ret = record_aes_gcm_encrypt_size(&ctx->rekey_counter, src_size); |
193 | 0 | if (ret < 0) { |
194 | 0 | return gnutls_assert_val(ret); |
195 | 0 | } |
196 | | |
197 | 0 | if (blocks > 0) { |
198 | 0 | aesni_ctr32_encrypt_blocks(src, dst, blocks, |
199 | 0 | ALIGN16(&ctx->expanded_key), |
200 | 0 | ctx->gcm.Yi.c); |
201 | |
|
202 | 0 | counter = _gnutls_read_uint32(ctx->gcm.Yi.c + 12); |
203 | 0 | counter += blocks; |
204 | 0 | _gnutls_write_uint32(counter, ctx->gcm.Yi.c + 12); |
205 | 0 | } |
206 | |
|
207 | 0 | if (rest > 0) { /* last incomplete block */ |
208 | 0 | ctr_encrypt_last(ctx, src, dst, exp_blocks, rest); |
209 | 0 | ctx->finished = 1; |
210 | 0 | } |
211 | |
|
212 | 0 | gcm_ghash(ctx, dst, src_size); |
213 | 0 | ctx->gcm.len.u[1] += src_size; |
214 | |
|
215 | 0 | return 0; |
216 | 0 | } |
217 | | |
218 | | static int aes_gcm_decrypt(void *_ctx, const void *src, size_t src_size, |
219 | | void *dst, size_t dst_size) |
220 | 0 | { |
221 | 0 | struct aes_gcm_ctx *ctx = _ctx; |
222 | 0 | int blocks = src_size / GCM_BLOCK_SIZE; |
223 | 0 | int exp_blocks = blocks * GCM_BLOCK_SIZE; |
224 | 0 | int rest = src_size - (exp_blocks); |
225 | 0 | uint32_t counter; |
226 | |
|
227 | 0 | if (unlikely(ctx->finished)) |
228 | 0 | return gnutls_assert_val(GNUTLS_E_INVALID_REQUEST); |
229 | | |
230 | 0 | if (unlikely(dst_size < src_size)) |
231 | 0 | return gnutls_assert_val(GNUTLS_E_SHORT_MEMORY_BUFFER); |
232 | | |
233 | 0 | gcm_ghash(ctx, src, src_size); |
234 | 0 | ctx->gcm.len.u[1] += src_size; |
235 | |
|
236 | 0 | if (blocks > 0) { |
237 | 0 | aesni_ctr32_encrypt_blocks(src, dst, blocks, |
238 | 0 | ALIGN16(&ctx->expanded_key), |
239 | 0 | ctx->gcm.Yi.c); |
240 | |
|
241 | 0 | counter = _gnutls_read_uint32(ctx->gcm.Yi.c + 12); |
242 | 0 | counter += blocks; |
243 | 0 | _gnutls_write_uint32(counter, ctx->gcm.Yi.c + 12); |
244 | 0 | } |
245 | |
|
246 | 0 | if (rest > 0) { /* last incomplete block */ |
247 | 0 | ctr_encrypt_last(ctx, src, dst, exp_blocks, rest); |
248 | 0 | ctx->finished = 1; |
249 | 0 | } |
250 | |
|
251 | 0 | return 0; |
252 | 0 | } |
253 | | |
254 | | static int aes_gcm_auth(void *_ctx, const void *src, size_t src_size) |
255 | 0 | { |
256 | 0 | struct aes_gcm_ctx *ctx = _ctx; |
257 | |
|
258 | 0 | if (unlikely(ctx->auth_finished)) |
259 | 0 | return gnutls_assert_val(GNUTLS_E_INVALID_REQUEST); |
260 | | |
261 | 0 | gcm_ghash(ctx, src, src_size); |
262 | 0 | ctx->gcm.len.u[0] += src_size; |
263 | |
|
264 | 0 | if (src_size % GCM_BLOCK_SIZE != 0) |
265 | 0 | ctx->auth_finished = 1; |
266 | |
|
267 | 0 | return 0; |
268 | 0 | } |
269 | | |
270 | | static void aes_gcm_tag(void *_ctx, void *tag, size_t tagsize) |
271 | 0 | { |
272 | 0 | struct aes_gcm_ctx *ctx = _ctx; |
273 | 0 | uint8_t buffer[GCM_BLOCK_SIZE]; |
274 | 0 | uint64_t alen, clen; |
275 | |
|
276 | 0 | alen = ctx->gcm.len.u[0] * 8; |
277 | 0 | clen = ctx->gcm.len.u[1] * 8; |
278 | |
|
279 | 0 | _gnutls_write_uint64(alen, buffer); |
280 | 0 | _gnutls_write_uint64(clen, &buffer[8]); |
281 | |
|
282 | 0 | gcm_ghash_clmul(ctx->gcm.Xi.u, ctx->gcm.Htable, buffer, GCM_BLOCK_SIZE); |
283 | |
|
284 | 0 | ctx->gcm.Xi.u[0] ^= ctx->gcm.EK0.u[0]; |
285 | 0 | ctx->gcm.Xi.u[1] ^= ctx->gcm.EK0.u[1]; |
286 | |
|
287 | 0 | memcpy(tag, ctx->gcm.Xi.c, MIN(GCM_BLOCK_SIZE, tagsize)); |
288 | 0 | } |
289 | | |
290 | | #include "aes-gcm-aead.h" |
291 | | |
292 | | const gnutls_crypto_cipher_st _gnutls_aes_gcm_pclmul = { |
293 | | .init = aes_gcm_cipher_init, |
294 | | .setkey = aes_gcm_cipher_setkey, |
295 | | .setiv = aes_gcm_setiv, |
296 | | .aead_encrypt = aes_gcm_aead_encrypt, |
297 | | .aead_decrypt = aes_gcm_aead_decrypt, |
298 | | .encrypt = aes_gcm_encrypt, |
299 | | .decrypt = aes_gcm_decrypt, |
300 | | .deinit = aes_gcm_deinit, |
301 | | .tag = aes_gcm_tag, |
302 | | .auth = aes_gcm_auth, |
303 | | }; |