/src/libgcrypt/cipher/cipher-gcm.c
Line | Count | Source (jump to first uncovered line) |
1 | | /* cipher-gcm.c - Generic Galois Counter Mode implementation |
2 | | * Copyright (C) 2013 Dmitry Eremin-Solenikov |
3 | | * Copyright (C) 2013, 2018-2019 Jussi Kivilinna <jussi.kivilinna@iki.fi> |
4 | | * |
5 | | * This file is part of Libgcrypt. |
6 | | * |
7 | | * Libgcrypt is free software; you can redistribute it and/or modify |
8 | | * it under the terms of the GNU Lesser General Public License as |
9 | | * published by the Free Software Foundation; either version 2.1 of |
10 | | * the License, or (at your option) any later version. |
11 | | * |
12 | | * Libgcrypt is distributed in the hope that it will be useful, |
13 | | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
14 | | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
15 | | * GNU Lesser General Public License for more details. |
16 | | * |
17 | | * You should have received a copy of the GNU Lesser General Public |
18 | | * License along with this program; if not, see <http://www.gnu.org/licenses/>. |
19 | | */ |
20 | | |
21 | | #include <config.h> |
22 | | #include <stdio.h> |
23 | | #include <stdlib.h> |
24 | | #include <string.h> |
25 | | #include <errno.h> |
26 | | |
27 | | #include "g10lib.h" |
28 | | #include "cipher.h" |
29 | | #include "bufhelp.h" |
30 | | #include "./cipher-internal.h" |
31 | | |
32 | | |
33 | | static gcry_err_code_t _gcry_cipher_gcm_setiv_zero (gcry_cipher_hd_t c); |
34 | | |
35 | | /* Helper macro to force alignment to 16 or 64 bytes. */ |
36 | | #ifdef HAVE_GCC_ATTRIBUTE_ALIGNED |
37 | | # define ATTR_ALIGNED_64 __attribute__ ((aligned (64))) |
38 | | #else |
39 | | # define ATTR_ALIGNED_64 |
40 | | #endif |
41 | | |
42 | | |
43 | | #ifdef GCM_USE_INTEL_PCLMUL |
44 | | extern void _gcry_ghash_setup_intel_pclmul (gcry_cipher_hd_t c, |
45 | | unsigned int hw_features); |
46 | | #endif |
47 | | |
48 | | #ifdef GCM_USE_ARM_PMULL |
49 | | extern void _gcry_ghash_setup_armv8_ce_pmull (void *gcm_key, void *gcm_table); |
50 | | |
51 | | extern unsigned int _gcry_ghash_armv8_ce_pmull (void *gcm_key, byte *result, |
52 | | const byte *buf, size_t nblocks, |
53 | | void *gcm_table); |
54 | | |
55 | | extern unsigned int _gcry_polyval_armv8_ce_pmull (void *gcm_key, byte *result, |
56 | | const byte *buf, |
57 | | size_t nblocks, |
58 | | void *gcm_table); |
59 | | |
60 | | static void |
61 | | ghash_setup_armv8_ce_pmull (gcry_cipher_hd_t c) |
62 | | { |
63 | | _gcry_ghash_setup_armv8_ce_pmull(c->u_mode.gcm.u_ghash_key.key, |
64 | | c->u_mode.gcm.gcm_table); |
65 | | } |
66 | | |
67 | | static unsigned int |
68 | | ghash_armv8_ce_pmull (gcry_cipher_hd_t c, byte *result, const byte *buf, |
69 | | size_t nblocks) |
70 | | { |
71 | | return _gcry_ghash_armv8_ce_pmull(c->u_mode.gcm.u_ghash_key.key, result, buf, |
72 | | nblocks, c->u_mode.gcm.gcm_table); |
73 | | } |
74 | | |
75 | | static unsigned int |
76 | | polyval_armv8_ce_pmull (gcry_cipher_hd_t c, byte *result, const byte *buf, |
77 | | size_t nblocks) |
78 | | { |
79 | | return _gcry_polyval_armv8_ce_pmull(c->u_mode.gcm.u_ghash_key.key, result, |
80 | | buf, nblocks, c->u_mode.gcm.gcm_table); |
81 | | } |
82 | | #endif /* GCM_USE_ARM_PMULL */ |
83 | | |
84 | | #ifdef GCM_USE_ARM_NEON |
85 | | extern void _gcry_ghash_setup_armv7_neon (void *gcm_key); |
86 | | |
87 | | extern unsigned int _gcry_ghash_armv7_neon (void *gcm_key, byte *result, |
88 | | const byte *buf, size_t nblocks); |
89 | | |
90 | | static void |
91 | | ghash_setup_armv7_neon (gcry_cipher_hd_t c) |
92 | | { |
93 | | _gcry_ghash_setup_armv7_neon(c->u_mode.gcm.u_ghash_key.key); |
94 | | } |
95 | | |
96 | | static unsigned int |
97 | | ghash_armv7_neon (gcry_cipher_hd_t c, byte *result, const byte *buf, |
98 | | size_t nblocks) |
99 | | { |
100 | | return _gcry_ghash_armv7_neon(c->u_mode.gcm.u_ghash_key.key, result, buf, |
101 | | nblocks); |
102 | | } |
103 | | #endif /* GCM_USE_ARM_NEON */ |
104 | | |
105 | | #ifdef GCM_USE_AARCH64 |
106 | | extern void _gcry_ghash_setup_aarch64_simd(gcry_cipher_hd_t c); |
107 | | |
108 | | extern unsigned int _gcry_ghash_aarch64_simd(gcry_cipher_hd_t c, byte *result, |
109 | | const byte *buf, size_t nblocks); |
110 | | #endif /* GCM_USE_AARCH64 */ |
111 | | |
112 | | #ifdef GCM_USE_S390X_CRYPTO |
113 | | #include "asm-inline-s390x.h" |
114 | | |
115 | | static unsigned int |
116 | | ghash_s390x_kimd (gcry_cipher_hd_t c, byte *result, const byte *buf, |
117 | | size_t nblocks) |
118 | | { |
119 | | u128_t params[2]; |
120 | | |
121 | | memcpy (¶ms[0], result, 16); |
122 | | memcpy (¶ms[1], c->u_mode.gcm.u_ghash_key.key, 16); |
123 | | |
124 | | kimd_execute (KMID_FUNCTION_GHASH, ¶ms, buf, nblocks * 16); |
125 | | |
126 | | memcpy (result, ¶ms[0], 16); |
127 | | wipememory (params, sizeof(params)); |
128 | | return 0; |
129 | | } |
130 | | #endif /* GCM_USE_S390X_CRYPTO*/ |
131 | | |
132 | | #ifdef GCM_USE_PPC_VPMSUM |
133 | | extern void _gcry_ghash_setup_ppc_vpmsum (void *gcm_table, void *gcm_key); |
134 | | |
135 | | /* result is 128-bits */ |
136 | | extern unsigned int _gcry_ghash_ppc_vpmsum (byte *result, void *gcm_table, |
137 | | const byte *buf, size_t nblocks); |
138 | | |
139 | | static void |
140 | | ghash_setup_ppc_vpmsum (gcry_cipher_hd_t c) |
141 | | { |
142 | | _gcry_ghash_setup_ppc_vpmsum(c->u_mode.gcm.gcm_table, |
143 | | c->u_mode.gcm.u_ghash_key.key); |
144 | | } |
145 | | |
146 | | static unsigned int |
147 | | ghash_ppc_vpmsum (gcry_cipher_hd_t c, byte *result, const byte *buf, |
148 | | size_t nblocks) |
149 | | { |
150 | | return _gcry_ghash_ppc_vpmsum(result, c->u_mode.gcm.gcm_table, buf, |
151 | | nblocks); |
152 | | } |
153 | | #endif /* GCM_USE_PPC_VPMSUM */ |
154 | | |
155 | | #ifdef GCM_USE_TABLES |
156 | | static struct |
157 | | { |
158 | | volatile u32 counter_head; |
159 | | u32 cacheline_align[64 / 4 - 1]; |
160 | | u16 R[256]; |
161 | | volatile u32 counter_tail; |
162 | | } gcm_table ATTR_ALIGNED_64 = |
163 | | { |
164 | | 0, |
165 | | { 0, }, |
166 | | { |
167 | | 0x0000, 0x01c2, 0x0384, 0x0246, 0x0708, 0x06ca, 0x048c, 0x054e, |
168 | | 0x0e10, 0x0fd2, 0x0d94, 0x0c56, 0x0918, 0x08da, 0x0a9c, 0x0b5e, |
169 | | 0x1c20, 0x1de2, 0x1fa4, 0x1e66, 0x1b28, 0x1aea, 0x18ac, 0x196e, |
170 | | 0x1230, 0x13f2, 0x11b4, 0x1076, 0x1538, 0x14fa, 0x16bc, 0x177e, |
171 | | 0x3840, 0x3982, 0x3bc4, 0x3a06, 0x3f48, 0x3e8a, 0x3ccc, 0x3d0e, |
172 | | 0x3650, 0x3792, 0x35d4, 0x3416, 0x3158, 0x309a, 0x32dc, 0x331e, |
173 | | 0x2460, 0x25a2, 0x27e4, 0x2626, 0x2368, 0x22aa, 0x20ec, 0x212e, |
174 | | 0x2a70, 0x2bb2, 0x29f4, 0x2836, 0x2d78, 0x2cba, 0x2efc, 0x2f3e, |
175 | | 0x7080, 0x7142, 0x7304, 0x72c6, 0x7788, 0x764a, 0x740c, 0x75ce, |
176 | | 0x7e90, 0x7f52, 0x7d14, 0x7cd6, 0x7998, 0x785a, 0x7a1c, 0x7bde, |
177 | | 0x6ca0, 0x6d62, 0x6f24, 0x6ee6, 0x6ba8, 0x6a6a, 0x682c, 0x69ee, |
178 | | 0x62b0, 0x6372, 0x6134, 0x60f6, 0x65b8, 0x647a, 0x663c, 0x67fe, |
179 | | 0x48c0, 0x4902, 0x4b44, 0x4a86, 0x4fc8, 0x4e0a, 0x4c4c, 0x4d8e, |
180 | | 0x46d0, 0x4712, 0x4554, 0x4496, 0x41d8, 0x401a, 0x425c, 0x439e, |
181 | | 0x54e0, 0x5522, 0x5764, 0x56a6, 0x53e8, 0x522a, 0x506c, 0x51ae, |
182 | | 0x5af0, 0x5b32, 0x5974, 0x58b6, 0x5df8, 0x5c3a, 0x5e7c, 0x5fbe, |
183 | | 0xe100, 0xe0c2, 0xe284, 0xe346, 0xe608, 0xe7ca, 0xe58c, 0xe44e, |
184 | | 0xef10, 0xeed2, 0xec94, 0xed56, 0xe818, 0xe9da, 0xeb9c, 0xea5e, |
185 | | 0xfd20, 0xfce2, 0xfea4, 0xff66, 0xfa28, 0xfbea, 0xf9ac, 0xf86e, |
186 | | 0xf330, 0xf2f2, 0xf0b4, 0xf176, 0xf438, 0xf5fa, 0xf7bc, 0xf67e, |
187 | | 0xd940, 0xd882, 0xdac4, 0xdb06, 0xde48, 0xdf8a, 0xddcc, 0xdc0e, |
188 | | 0xd750, 0xd692, 0xd4d4, 0xd516, 0xd058, 0xd19a, 0xd3dc, 0xd21e, |
189 | | 0xc560, 0xc4a2, 0xc6e4, 0xc726, 0xc268, 0xc3aa, 0xc1ec, 0xc02e, |
190 | | 0xcb70, 0xcab2, 0xc8f4, 0xc936, 0xcc78, 0xcdba, 0xcffc, 0xce3e, |
191 | | 0x9180, 0x9042, 0x9204, 0x93c6, 0x9688, 0x974a, 0x950c, 0x94ce, |
192 | | 0x9f90, 0x9e52, 0x9c14, 0x9dd6, 0x9898, 0x995a, 0x9b1c, 0x9ade, |
193 | | 0x8da0, 0x8c62, 0x8e24, 0x8fe6, 0x8aa8, 0x8b6a, 0x892c, 0x88ee, |
194 | | 0x83b0, 0x8272, 0x8034, 0x81f6, 0x84b8, 0x857a, 0x873c, 0x86fe, |
195 | | 0xa9c0, 0xa802, 0xaa44, 0xab86, 0xaec8, 0xaf0a, 0xad4c, 0xac8e, |
196 | | 0xa7d0, 0xa612, 0xa454, 0xa596, 0xa0d8, 0xa11a, 0xa35c, 0xa29e, |
197 | | 0xb5e0, 0xb422, 0xb664, 0xb7a6, 0xb2e8, 0xb32a, 0xb16c, 0xb0ae, |
198 | | 0xbbf0, 0xba32, 0xb874, 0xb9b6, 0xbcf8, 0xbd3a, 0xbf7c, 0xbebe, |
199 | | }, |
200 | | 0 |
201 | | }; |
202 | | |
203 | 0 | #define gcmR gcm_table.R |
204 | | |
205 | | static inline |
206 | | void prefetch_table(const void *tab, size_t len) |
207 | 0 | { |
208 | 0 | const volatile byte *vtab = tab; |
209 | 0 | size_t i; |
210 | |
|
211 | 0 | for (i = 0; len - i >= 8 * 32; i += 8 * 32) |
212 | 0 | { |
213 | 0 | (void)vtab[i + 0 * 32]; |
214 | 0 | (void)vtab[i + 1 * 32]; |
215 | 0 | (void)vtab[i + 2 * 32]; |
216 | 0 | (void)vtab[i + 3 * 32]; |
217 | 0 | (void)vtab[i + 4 * 32]; |
218 | 0 | (void)vtab[i + 5 * 32]; |
219 | 0 | (void)vtab[i + 6 * 32]; |
220 | 0 | (void)vtab[i + 7 * 32]; |
221 | 0 | } |
222 | 0 | for (; i < len; i += 32) |
223 | 0 | { |
224 | 0 | (void)vtab[i]; |
225 | 0 | } |
226 | |
|
227 | 0 | (void)vtab[len - 1]; |
228 | 0 | } |
229 | | |
230 | | static inline void |
231 | | do_prefetch_tables (const void *gcmM, size_t gcmM_size) |
232 | 0 | { |
233 | | /* Modify counters to trigger copy-on-write and unsharing if physical pages |
234 | | * of look-up table are shared between processes. Modifying counters also |
235 | | * causes checksums for pages to change and hint same-page merging algorithm |
236 | | * that these pages are frequently changing. */ |
237 | 0 | gcm_table.counter_head++; |
238 | 0 | gcm_table.counter_tail++; |
239 | | |
240 | | /* Prefetch look-up tables to cache. */ |
241 | 0 | prefetch_table(gcmM, gcmM_size); |
242 | 0 | prefetch_table(&gcm_table, sizeof(gcm_table)); |
243 | 0 | } |
244 | | |
245 | | #ifdef GCM_TABLES_USE_U64 |
246 | | static void |
247 | | bshift (u64 * b0, u64 * b1) |
248 | 0 | { |
249 | 0 | u64 t[2], mask; |
250 | |
|
251 | 0 | t[0] = *b0; |
252 | 0 | t[1] = *b1; |
253 | 0 | mask = -(t[1] & 1) & 0xe1; |
254 | 0 | mask <<= 56; |
255 | |
|
256 | 0 | *b1 = (t[1] >> 1) ^ (t[0] << 63); |
257 | 0 | *b0 = (t[0] >> 1) ^ mask; |
258 | 0 | } |
259 | | |
260 | | static void |
261 | | do_fillM (unsigned char *h, u64 *M) |
262 | 0 | { |
263 | 0 | int i, j; |
264 | |
|
265 | 0 | M[0 + 0] = 0; |
266 | 0 | M[0 + 16] = 0; |
267 | |
|
268 | 0 | M[8 + 0] = buf_get_be64 (h + 0); |
269 | 0 | M[8 + 16] = buf_get_be64 (h + 8); |
270 | |
|
271 | 0 | for (i = 4; i > 0; i /= 2) |
272 | 0 | { |
273 | 0 | M[i + 0] = M[2 * i + 0]; |
274 | 0 | M[i + 16] = M[2 * i + 16]; |
275 | |
|
276 | 0 | bshift (&M[i], &M[i + 16]); |
277 | 0 | } |
278 | |
|
279 | 0 | for (i = 2; i < 16; i *= 2) |
280 | 0 | for (j = 1; j < i; j++) |
281 | 0 | { |
282 | 0 | M[(i + j) + 0] = M[i + 0] ^ M[j + 0]; |
283 | 0 | M[(i + j) + 16] = M[i + 16] ^ M[j + 16]; |
284 | 0 | } |
285 | |
|
286 | 0 | for (i = 0; i < 16; i++) |
287 | 0 | { |
288 | 0 | M[i + 32] = (M[i + 0] >> 4) ^ ((u64) gcmR[(M[i + 16] & 0xf) << 4] << 48); |
289 | 0 | M[i + 48] = (M[i + 16] >> 4) ^ (M[i + 0] << 60); |
290 | 0 | } |
291 | 0 | } |
292 | | |
293 | | static inline unsigned int |
294 | | do_ghash (unsigned char *result, const unsigned char *buf, const u64 *gcmM) |
295 | 0 | { |
296 | 0 | u64 V[2]; |
297 | 0 | u64 tmp[2]; |
298 | 0 | const u64 *M; |
299 | 0 | u64 T; |
300 | 0 | u32 A; |
301 | 0 | int i; |
302 | |
|
303 | 0 | cipher_block_xor (V, result, buf, 16); |
304 | 0 | V[0] = be_bswap64 (V[0]); |
305 | 0 | V[1] = be_bswap64 (V[1]); |
306 | | |
307 | | /* First round can be manually tweaked based on fact that 'tmp' is zero. */ |
308 | 0 | M = &gcmM[(V[1] & 0xf) + 32]; |
309 | 0 | V[1] >>= 4; |
310 | 0 | tmp[0] = M[0]; |
311 | 0 | tmp[1] = M[16]; |
312 | 0 | tmp[0] ^= gcmM[(V[1] & 0xf) + 0]; |
313 | 0 | tmp[1] ^= gcmM[(V[1] & 0xf) + 16]; |
314 | 0 | V[1] >>= 4; |
315 | |
|
316 | 0 | i = 6; |
317 | 0 | while (1) |
318 | 0 | { |
319 | 0 | M = &gcmM[(V[1] & 0xf) + 32]; |
320 | 0 | V[1] >>= 4; |
321 | |
|
322 | 0 | A = tmp[1] & 0xff; |
323 | 0 | T = tmp[0]; |
324 | 0 | tmp[0] = (T >> 8) ^ ((u64) gcmR[A] << 48) ^ gcmM[(V[1] & 0xf) + 0]; |
325 | 0 | tmp[1] = (T << 56) ^ (tmp[1] >> 8) ^ gcmM[(V[1] & 0xf) + 16]; |
326 | |
|
327 | 0 | tmp[0] ^= M[0]; |
328 | 0 | tmp[1] ^= M[16]; |
329 | |
|
330 | 0 | if (i == 0) |
331 | 0 | break; |
332 | | |
333 | 0 | V[1] >>= 4; |
334 | 0 | --i; |
335 | 0 | } |
336 | |
|
337 | 0 | i = 7; |
338 | 0 | while (1) |
339 | 0 | { |
340 | 0 | M = &gcmM[(V[0] & 0xf) + 32]; |
341 | 0 | V[0] >>= 4; |
342 | |
|
343 | 0 | A = tmp[1] & 0xff; |
344 | 0 | T = tmp[0]; |
345 | 0 | tmp[0] = (T >> 8) ^ ((u64) gcmR[A] << 48) ^ gcmM[(V[0] & 0xf) + 0]; |
346 | 0 | tmp[1] = (T << 56) ^ (tmp[1] >> 8) ^ gcmM[(V[0] & 0xf) + 16]; |
347 | |
|
348 | 0 | tmp[0] ^= M[0]; |
349 | 0 | tmp[1] ^= M[16]; |
350 | |
|
351 | 0 | if (i == 0) |
352 | 0 | break; |
353 | | |
354 | 0 | V[0] >>= 4; |
355 | 0 | --i; |
356 | 0 | } |
357 | |
|
358 | 0 | buf_put_be64 (result + 0, tmp[0]); |
359 | 0 | buf_put_be64 (result + 8, tmp[1]); |
360 | |
|
361 | 0 | return (sizeof(V) + sizeof(T) + sizeof(tmp) + |
362 | 0 | sizeof(int)*2 + sizeof(void*)*5); |
363 | 0 | } |
364 | | |
365 | | #else /*!GCM_TABLES_USE_U64*/ |
366 | | |
367 | | static void |
368 | | bshift (u32 * M, int i) |
369 | | { |
370 | | u32 t[4], mask; |
371 | | |
372 | | t[0] = M[i * 4 + 0]; |
373 | | t[1] = M[i * 4 + 1]; |
374 | | t[2] = M[i * 4 + 2]; |
375 | | t[3] = M[i * 4 + 3]; |
376 | | mask = -(t[3] & 1) & 0xe1; |
377 | | |
378 | | M[i * 4 + 3] = (t[3] >> 1) ^ (t[2] << 31); |
379 | | M[i * 4 + 2] = (t[2] >> 1) ^ (t[1] << 31); |
380 | | M[i * 4 + 1] = (t[1] >> 1) ^ (t[0] << 31); |
381 | | M[i * 4 + 0] = (t[0] >> 1) ^ (mask << 24); |
382 | | } |
383 | | |
384 | | static void |
385 | | do_fillM (unsigned char *h, u32 *M) |
386 | | { |
387 | | int i, j; |
388 | | |
389 | | M[0 * 4 + 0] = 0; |
390 | | M[0 * 4 + 1] = 0; |
391 | | M[0 * 4 + 2] = 0; |
392 | | M[0 * 4 + 3] = 0; |
393 | | |
394 | | M[8 * 4 + 0] = buf_get_be32 (h + 0); |
395 | | M[8 * 4 + 1] = buf_get_be32 (h + 4); |
396 | | M[8 * 4 + 2] = buf_get_be32 (h + 8); |
397 | | M[8 * 4 + 3] = buf_get_be32 (h + 12); |
398 | | |
399 | | for (i = 4; i > 0; i /= 2) |
400 | | { |
401 | | M[i * 4 + 0] = M[2 * i * 4 + 0]; |
402 | | M[i * 4 + 1] = M[2 * i * 4 + 1]; |
403 | | M[i * 4 + 2] = M[2 * i * 4 + 2]; |
404 | | M[i * 4 + 3] = M[2 * i * 4 + 3]; |
405 | | |
406 | | bshift (M, i); |
407 | | } |
408 | | |
409 | | for (i = 2; i < 16; i *= 2) |
410 | | for (j = 1; j < i; j++) |
411 | | { |
412 | | M[(i + j) * 4 + 0] = M[i * 4 + 0] ^ M[j * 4 + 0]; |
413 | | M[(i + j) * 4 + 1] = M[i * 4 + 1] ^ M[j * 4 + 1]; |
414 | | M[(i + j) * 4 + 2] = M[i * 4 + 2] ^ M[j * 4 + 2]; |
415 | | M[(i + j) * 4 + 3] = M[i * 4 + 3] ^ M[j * 4 + 3]; |
416 | | } |
417 | | |
418 | | for (i = 0; i < 4 * 16; i += 4) |
419 | | { |
420 | | M[i + 0 + 64] = (M[i + 0] >> 4) |
421 | | ^ ((u64) gcmR[(M[i + 3] << 4) & 0xf0] << 16); |
422 | | M[i + 1 + 64] = (M[i + 1] >> 4) ^ (M[i + 0] << 28); |
423 | | M[i + 2 + 64] = (M[i + 2] >> 4) ^ (M[i + 1] << 28); |
424 | | M[i + 3 + 64] = (M[i + 3] >> 4) ^ (M[i + 2] << 28); |
425 | | } |
426 | | } |
427 | | |
428 | | static inline unsigned int |
429 | | do_ghash (unsigned char *result, const unsigned char *buf, const u32 *gcmM) |
430 | | { |
431 | | byte V[16]; |
432 | | u32 tmp[4]; |
433 | | u32 v; |
434 | | const u32 *M, *m; |
435 | | u32 T[3]; |
436 | | int i; |
437 | | |
438 | | cipher_block_xor (V, result, buf, 16); /* V is big-endian */ |
439 | | |
440 | | /* First round can be manually tweaked based on fact that 'tmp' is zero. */ |
441 | | i = 15; |
442 | | |
443 | | v = V[i]; |
444 | | M = &gcmM[(v & 0xf) * 4 + 64]; |
445 | | v = (v & 0xf0) >> 4; |
446 | | m = &gcmM[v * 4]; |
447 | | v = V[--i]; |
448 | | |
449 | | tmp[0] = M[0] ^ m[0]; |
450 | | tmp[1] = M[1] ^ m[1]; |
451 | | tmp[2] = M[2] ^ m[2]; |
452 | | tmp[3] = M[3] ^ m[3]; |
453 | | |
454 | | while (1) |
455 | | { |
456 | | M = &gcmM[(v & 0xf) * 4 + 64]; |
457 | | v = (v & 0xf0) >> 4; |
458 | | m = &gcmM[v * 4]; |
459 | | |
460 | | T[0] = tmp[0]; |
461 | | T[1] = tmp[1]; |
462 | | T[2] = tmp[2]; |
463 | | tmp[0] = (T[0] >> 8) ^ ((u32) gcmR[tmp[3] & 0xff] << 16) ^ m[0]; |
464 | | tmp[1] = (T[0] << 24) ^ (tmp[1] >> 8) ^ m[1]; |
465 | | tmp[2] = (T[1] << 24) ^ (tmp[2] >> 8) ^ m[2]; |
466 | | tmp[3] = (T[2] << 24) ^ (tmp[3] >> 8) ^ m[3]; |
467 | | |
468 | | tmp[0] ^= M[0]; |
469 | | tmp[1] ^= M[1]; |
470 | | tmp[2] ^= M[2]; |
471 | | tmp[3] ^= M[3]; |
472 | | |
473 | | if (i == 0) |
474 | | break; |
475 | | |
476 | | v = V[--i]; |
477 | | } |
478 | | |
479 | | buf_put_be32 (result + 0, tmp[0]); |
480 | | buf_put_be32 (result + 4, tmp[1]); |
481 | | buf_put_be32 (result + 8, tmp[2]); |
482 | | buf_put_be32 (result + 12, tmp[3]); |
483 | | |
484 | | return (sizeof(V) + sizeof(T) + sizeof(tmp) + |
485 | | sizeof(int)*2 + sizeof(void*)*6); |
486 | | } |
487 | | #endif /*!GCM_TABLES_USE_U64*/ |
488 | | |
489 | | #define fillM(c) \ |
490 | 0 | do_fillM (c->u_mode.gcm.u_ghash_key.key, c->u_mode.gcm.gcm_table) |
491 | 0 | #define GHASH(c, result, buf) do_ghash (result, buf, c->u_mode.gcm.gcm_table) |
492 | | #define prefetch_tables(c) \ |
493 | 0 | do_prefetch_tables(c->u_mode.gcm.gcm_table, sizeof(c->u_mode.gcm.gcm_table)) |
494 | | |
495 | | #else |
496 | | |
497 | | static unsigned long |
498 | | bshift (unsigned long *b) |
499 | | { |
500 | | unsigned long c; |
501 | | int i; |
502 | | c = b[3] & 1; |
503 | | for (i = 3; i > 0; i--) |
504 | | { |
505 | | b[i] = (b[i] >> 1) | (b[i - 1] << 31); |
506 | | } |
507 | | b[i] >>= 1; |
508 | | return c; |
509 | | } |
510 | | |
511 | | static unsigned int |
512 | | do_ghash (unsigned char *hsub, unsigned char *result, const unsigned char *buf) |
513 | | { |
514 | | unsigned long V[4]; |
515 | | int i, j; |
516 | | byte *p; |
517 | | |
518 | | #ifdef WORDS_BIGENDIAN |
519 | | p = result; |
520 | | #else |
521 | | unsigned long T[4]; |
522 | | |
523 | | cipher_block_xor (V, result, buf, 16); |
524 | | for (i = 0; i < 4; i++) |
525 | | { |
526 | | V[i] = (V[i] & 0x00ff00ff) << 8 | (V[i] & 0xff00ff00) >> 8; |
527 | | V[i] = (V[i] & 0x0000ffff) << 16 | (V[i] & 0xffff0000) >> 16; |
528 | | } |
529 | | p = (byte *) T; |
530 | | #endif |
531 | | |
532 | | memset (p, 0, 16); |
533 | | |
534 | | for (i = 0; i < 16; i++) |
535 | | { |
536 | | for (j = 0x80; j; j >>= 1) |
537 | | { |
538 | | if (hsub[i] & j) |
539 | | cipher_block_xor (p, p, V, 16); |
540 | | if (bshift (V)) |
541 | | V[0] ^= 0xe1000000; |
542 | | } |
543 | | } |
544 | | #ifndef WORDS_BIGENDIAN |
545 | | for (i = 0, p = (byte *) T; i < 16; i += 4, p += 4) |
546 | | { |
547 | | result[i + 0] = p[3]; |
548 | | result[i + 1] = p[2]; |
549 | | result[i + 2] = p[1]; |
550 | | result[i + 3] = p[0]; |
551 | | } |
552 | | #endif |
553 | | |
554 | | return (sizeof(V) + sizeof(T) + sizeof(int)*2 + sizeof(void*)*5); |
555 | | } |
556 | | |
557 | | #define fillM(c) do { } while (0) |
558 | | #define GHASH(c, result, buf) do_ghash (c->u_mode.gcm.u_ghash_key.key, result, buf) |
559 | | #define prefetch_tables(c) do {} while (0) |
560 | | |
561 | | #endif /* !GCM_USE_TABLES */ |
562 | | |
563 | | |
564 | | static unsigned int |
565 | | ghash_internal (gcry_cipher_hd_t c, byte *result, const byte *buf, |
566 | | size_t nblocks) |
567 | 0 | { |
568 | 0 | const unsigned int blocksize = GCRY_GCM_BLOCK_LEN; |
569 | 0 | unsigned int burn = 0; |
570 | |
|
571 | 0 | prefetch_tables (c); |
572 | |
|
573 | 0 | while (nblocks) |
574 | 0 | { |
575 | 0 | burn = GHASH (c, result, buf); |
576 | 0 | buf += blocksize; |
577 | 0 | nblocks--; |
578 | 0 | } |
579 | |
|
580 | 0 | return burn + (burn ? 5*sizeof(void*) : 0); |
581 | 0 | } |
582 | | |
583 | | |
584 | | static void |
585 | | setupM (gcry_cipher_hd_t c) |
586 | 34 | { |
587 | 34 | unsigned int features = _gcry_get_hw_features (); |
588 | | |
589 | 34 | c->u_mode.gcm.ghash_fn = NULL; |
590 | 34 | c->u_mode.gcm.polyval_fn = NULL; |
591 | | |
592 | 34 | if (0) |
593 | 0 | { |
594 | 0 | (void)features; |
595 | 0 | } |
596 | 34 | #ifdef GCM_USE_INTEL_PCLMUL |
597 | 34 | else if (features & HWF_INTEL_PCLMUL) |
598 | 34 | { |
599 | 34 | _gcry_ghash_setup_intel_pclmul (c, features); |
600 | 34 | } |
601 | 34 | #endif |
602 | | #ifdef GCM_USE_ARM_PMULL |
603 | | else if (features & HWF_ARM_PMULL) |
604 | | { |
605 | | c->u_mode.gcm.ghash_fn = ghash_armv8_ce_pmull; |
606 | | c->u_mode.gcm.polyval_fn = polyval_armv8_ce_pmull; |
607 | | ghash_setup_armv8_ce_pmull (c); |
608 | | } |
609 | | #endif |
610 | | #ifdef GCM_USE_ARM_NEON |
611 | | else if (features & HWF_ARM_NEON) |
612 | | { |
613 | | c->u_mode.gcm.ghash_fn = ghash_armv7_neon; |
614 | | ghash_setup_armv7_neon (c); |
615 | | } |
616 | | #endif |
617 | | #ifdef GCM_USE_AARCH64 |
618 | | else if (features & HWF_ARM_NEON) |
619 | | { |
620 | | c->u_mode.gcm.ghash_fn = _gcry_ghash_aarch64_simd; |
621 | | _gcry_ghash_setup_aarch64_simd (c); |
622 | | } |
623 | | #endif |
624 | | #ifdef GCM_USE_PPC_VPMSUM |
625 | | else if (features & HWF_PPC_VCRYPTO) |
626 | | { |
627 | | c->u_mode.gcm.ghash_fn = ghash_ppc_vpmsum; |
628 | | ghash_setup_ppc_vpmsum (c); |
629 | | } |
630 | | #endif |
631 | | #ifdef GCM_USE_S390X_CRYPTO |
632 | | else if (features & HWF_S390X_MSA) |
633 | | { |
634 | | if (kimd_query () & km_function_to_mask (KMID_FUNCTION_GHASH)) |
635 | | { |
636 | | c->u_mode.gcm.ghash_fn = ghash_s390x_kimd; |
637 | | } |
638 | | } |
639 | | #endif |
640 | | |
641 | 34 | if (c->u_mode.gcm.ghash_fn == NULL) |
642 | 0 | { |
643 | 0 | c->u_mode.gcm.ghash_fn = ghash_internal; |
644 | 0 | fillM (c); |
645 | 0 | } |
646 | 34 | } |
647 | | |
648 | | |
649 | | static inline void |
650 | | gcm_bytecounter_add (u32 ctr[2], size_t add) |
651 | 82 | { |
652 | 82 | if (sizeof(add) > sizeof(u32)) |
653 | 82 | { |
654 | 82 | u32 high_add = ((add >> 31) >> 1) & 0xffffffff; |
655 | 82 | ctr[1] += high_add; |
656 | 82 | } |
657 | | |
658 | 82 | ctr[0] += add; |
659 | 82 | if (ctr[0] >= add) |
660 | 82 | return; |
661 | 0 | ++ctr[1]; |
662 | 0 | } |
663 | | |
664 | | |
665 | | static inline u32 |
666 | | gcm_add32_be128 (byte *ctr, unsigned int add) |
667 | 74 | { |
668 | | /* 'ctr' must be aligned to four bytes. */ |
669 | 74 | const unsigned int blocksize = GCRY_GCM_BLOCK_LEN; |
670 | 74 | u32 *pval = (u32 *)(void *)(ctr + blocksize - sizeof(u32)); |
671 | 74 | u32 val; |
672 | | |
673 | 74 | val = be_bswap32(*pval) + add; |
674 | 74 | *pval = be_bswap32(val); |
675 | | |
676 | 74 | return val; /* return result as host-endian value */ |
677 | 74 | } |
678 | | |
679 | | |
680 | | static inline int |
681 | | gcm_check_datalen (u32 ctr[2]) |
682 | 48 | { |
683 | | /* len(plaintext) <= 2^39-256 bits == 2^36-32 bytes == 2^32-2 blocks */ |
684 | 48 | if (ctr[1] > 0xfU) |
685 | 0 | return 0; |
686 | 48 | if (ctr[1] < 0xfU) |
687 | 48 | return 1; |
688 | | |
689 | 0 | if (ctr[0] <= 0xffffffe0U) |
690 | 0 | return 1; |
691 | | |
692 | 0 | return 0; |
693 | 0 | } |
694 | | |
695 | | |
696 | | static inline int |
697 | | gcm_check_aadlen_or_ivlen (u32 ctr[2]) |
698 | 34 | { |
699 | | /* len(aad/iv) <= 2^64-1 bits ~= 2^61-1 bytes */ |
700 | 34 | if (ctr[1] > 0x1fffffffU) |
701 | 0 | return 0; |
702 | 34 | if (ctr[1] < 0x1fffffffU) |
703 | 34 | return 1; |
704 | | |
705 | 0 | if (ctr[0] <= 0xffffffffU) |
706 | 0 | return 1; |
707 | | |
708 | 0 | return 0; |
709 | 0 | } |
710 | | |
711 | | |
712 | | static void |
713 | | do_ghash_buf(gcry_cipher_hd_t c, byte *hash, const byte *buf, |
714 | | size_t buflen, int do_padding) |
715 | 150 | { |
716 | 150 | unsigned int blocksize = GCRY_GCM_BLOCK_LEN; |
717 | 150 | unsigned int unused = c->u_mode.gcm.mac_unused; |
718 | 150 | ghash_fn_t ghash_fn = c->u_mode.gcm.ghash_fn; |
719 | 150 | size_t nblocks, n; |
720 | 150 | unsigned int burn = 0; |
721 | | |
722 | 150 | if (buflen == 0 && (unused == 0 || !do_padding)) |
723 | 34 | return; |
724 | | |
725 | 116 | do |
726 | 155 | { |
727 | 155 | if (buflen > 0 && (buflen + unused < blocksize || unused > 0)) |
728 | 52 | { |
729 | 52 | n = blocksize - unused; |
730 | 52 | n = n < buflen ? n : buflen; |
731 | | |
732 | 52 | buf_cpy (&c->u_mode.gcm.macbuf[unused], buf, n); |
733 | | |
734 | 52 | unused += n; |
735 | 52 | buf += n; |
736 | 52 | buflen -= n; |
737 | 52 | } |
738 | 155 | if (!buflen) |
739 | 47 | { |
740 | 47 | if (!do_padding && unused < blocksize) |
741 | 47 | { |
742 | 47 | break; |
743 | 47 | } |
744 | | |
745 | 0 | n = blocksize - unused; |
746 | 0 | if (n > 0) |
747 | 0 | { |
748 | 0 | memset (&c->u_mode.gcm.macbuf[unused], 0, n); |
749 | 0 | unused = blocksize; |
750 | 0 | } |
751 | 0 | } |
752 | | |
753 | 108 | if (unused > 0) |
754 | 5 | { |
755 | 5 | gcry_assert (unused == blocksize); |
756 | | |
757 | | /* Process one block from macbuf. */ |
758 | 0 | burn = ghash_fn (c, hash, c->u_mode.gcm.macbuf, 1); |
759 | 5 | unused = 0; |
760 | 5 | } |
761 | | |
762 | 0 | nblocks = buflen / blocksize; |
763 | | |
764 | 108 | if (nblocks) |
765 | 107 | { |
766 | 107 | burn = ghash_fn (c, hash, buf, nblocks); |
767 | 107 | buf += blocksize * nblocks; |
768 | 107 | buflen -= blocksize * nblocks; |
769 | 107 | } |
770 | 108 | } |
771 | 116 | while (buflen > 0); |
772 | | |
773 | 0 | c->u_mode.gcm.mac_unused = unused; |
774 | | |
775 | 116 | if (burn) |
776 | 0 | _gcry_burn_stack (burn); |
777 | 116 | } |
778 | | |
779 | | |
780 | | static gcry_err_code_t |
781 | | gcm_ctr_encrypt (gcry_cipher_hd_t c, byte *outbuf, size_t outbuflen, |
782 | | const byte *inbuf, size_t inbuflen) |
783 | 48 | { |
784 | 48 | gcry_err_code_t err = 0; |
785 | | |
786 | 96 | while (inbuflen) |
787 | 48 | { |
788 | 48 | u32 nblocks_to_overflow; |
789 | 48 | u32 num_ctr_increments; |
790 | 48 | u32 curr_ctr_low; |
791 | 48 | size_t currlen = inbuflen; |
792 | 48 | byte ctr_copy[GCRY_GCM_BLOCK_LEN]; |
793 | 48 | int fix_ctr = 0; |
794 | | |
795 | | /* GCM CTR increments only least significant 32-bits, without carry |
796 | | * to upper 96-bits of counter. Using generic CTR implementation |
797 | | * directly would carry 32-bit overflow to upper 96-bit. Detect |
798 | | * if input length is long enough to cause overflow, and limit |
799 | | * input length so that CTR overflow happen but updated CTR value is |
800 | | * not used to encrypt further input. After overflow, upper 96 bits |
801 | | * of CTR are restored to cancel out modification done by generic CTR |
802 | | * encryption. */ |
803 | | |
804 | 48 | if (inbuflen > c->unused) |
805 | 40 | { |
806 | 40 | curr_ctr_low = gcm_add32_be128 (c->u_ctr.ctr, 0); |
807 | | |
808 | | /* Number of CTR increments this inbuflen would cause. */ |
809 | 40 | num_ctr_increments = (inbuflen - c->unused) / GCRY_GCM_BLOCK_LEN + |
810 | 40 | !!((inbuflen - c->unused) % GCRY_GCM_BLOCK_LEN); |
811 | | |
812 | 40 | if ((u32)(num_ctr_increments + curr_ctr_low) < curr_ctr_low) |
813 | 0 | { |
814 | 0 | nblocks_to_overflow = 0xffffffffU - curr_ctr_low + 1; |
815 | 0 | currlen = nblocks_to_overflow * GCRY_GCM_BLOCK_LEN + c->unused; |
816 | 0 | if (currlen > inbuflen) |
817 | 0 | { |
818 | 0 | currlen = inbuflen; |
819 | 0 | } |
820 | |
|
821 | 0 | fix_ctr = 1; |
822 | 0 | cipher_block_cpy(ctr_copy, c->u_ctr.ctr, GCRY_GCM_BLOCK_LEN); |
823 | 0 | } |
824 | 40 | } |
825 | | |
826 | 48 | err = _gcry_cipher_ctr_encrypt(c, outbuf, outbuflen, inbuf, currlen); |
827 | 48 | if (err != 0) |
828 | 0 | return err; |
829 | | |
830 | 48 | if (fix_ctr) |
831 | 0 | { |
832 | | /* Lower 32-bits of CTR should now be zero. */ |
833 | 0 | gcry_assert(gcm_add32_be128 (c->u_ctr.ctr, 0) == 0); |
834 | | |
835 | | /* Restore upper part of CTR. */ |
836 | 0 | buf_cpy(c->u_ctr.ctr, ctr_copy, GCRY_GCM_BLOCK_LEN - sizeof(u32)); |
837 | |
|
838 | 0 | wipememory(ctr_copy, sizeof(ctr_copy)); |
839 | 0 | } |
840 | | |
841 | 0 | inbuflen -= currlen; |
842 | 48 | inbuf += currlen; |
843 | 48 | outbuflen -= currlen; |
844 | 48 | outbuf += currlen; |
845 | 48 | } |
846 | | |
847 | 48 | return err; |
848 | 48 | } |
849 | | |
850 | | |
851 | | static gcry_err_code_t |
852 | | gcm_crypt_inner (gcry_cipher_hd_t c, byte *outbuf, size_t outbuflen, |
853 | | const byte *inbuf, size_t inbuflen, int encrypt) |
854 | 48 | { |
855 | 48 | gcry_err_code_t err; |
856 | | |
857 | 96 | while (inbuflen) |
858 | 48 | { |
859 | 48 | size_t currlen = inbuflen; |
860 | | |
861 | | /* Use a bulk method if available. */ |
862 | 48 | if (c->bulk.gcm_crypt) |
863 | 0 | { |
864 | | /* Bulk method requires that there is no cached data. */ |
865 | 0 | if (inbuflen >= GCRY_GCM_BLOCK_LEN && c->u_mode.gcm.mac_unused == 0) |
866 | 0 | { |
867 | 0 | size_t nblks = inbuflen / GCRY_GCM_BLOCK_LEN; |
868 | 0 | size_t nleft; |
869 | 0 | size_t ndone; |
870 | |
|
871 | 0 | nleft = c->bulk.gcm_crypt (c, outbuf, inbuf, nblks, encrypt); |
872 | 0 | ndone = nblks - nleft; |
873 | |
|
874 | 0 | inbuf += ndone * GCRY_GCM_BLOCK_LEN; |
875 | 0 | outbuf += ndone * GCRY_GCM_BLOCK_LEN; |
876 | 0 | inbuflen -= ndone * GCRY_GCM_BLOCK_LEN; |
877 | 0 | outbuflen -= ndone * GCRY_GCM_BLOCK_LEN; |
878 | |
|
879 | 0 | if (inbuflen == 0) |
880 | 0 | break; |
881 | | |
882 | 0 | currlen = inbuflen; |
883 | 0 | } |
884 | 0 | else if (c->u_mode.gcm.mac_unused > 0 |
885 | 0 | && inbuflen >= GCRY_GCM_BLOCK_LEN |
886 | 0 | + (16 - c->u_mode.gcm.mac_unused)) |
887 | 0 | { |
888 | | /* Handle just enough data so that cache is depleted, and on |
889 | | * next loop iteration use bulk method. */ |
890 | 0 | currlen = 16 - c->u_mode.gcm.mac_unused; |
891 | |
|
892 | 0 | gcry_assert(currlen); |
893 | 0 | } |
894 | 0 | } |
895 | | |
896 | | /* Since checksumming is done after/before encryption/decryption, |
897 | | * process input in 24KiB chunks to keep data loaded in L1 cache for |
898 | | * checksumming/decryption. However only do splitting if input is |
899 | | * large enough so that last chunks does not end up being short. */ |
900 | 48 | if (currlen > 32 * 1024) |
901 | 0 | currlen = 24 * 1024; |
902 | | |
903 | 48 | if (!encrypt) |
904 | 18 | do_ghash_buf(c, c->u_mode.gcm.u_tag.tag, inbuf, currlen, 0); |
905 | | |
906 | 48 | err = gcm_ctr_encrypt(c, outbuf, outbuflen, inbuf, currlen); |
907 | 48 | if (err != 0) |
908 | 0 | return err; |
909 | | |
910 | 48 | if (encrypt) |
911 | 30 | do_ghash_buf(c, c->u_mode.gcm.u_tag.tag, outbuf, currlen, 0); |
912 | | |
913 | 48 | outbuf += currlen; |
914 | 48 | inbuf += currlen; |
915 | 48 | outbuflen -= currlen; |
916 | 48 | inbuflen -= currlen; |
917 | 48 | } |
918 | | |
919 | 48 | return 0; |
920 | 48 | } |
921 | | |
922 | | |
923 | | gcry_err_code_t |
924 | | _gcry_cipher_gcm_encrypt (gcry_cipher_hd_t c, |
925 | | byte *outbuf, size_t outbuflen, |
926 | | const byte *inbuf, size_t inbuflen) |
927 | 30 | { |
928 | 30 | if (c->spec->blocksize != GCRY_GCM_BLOCK_LEN) |
929 | 0 | return GPG_ERR_CIPHER_ALGO; |
930 | 30 | if (outbuflen < inbuflen) |
931 | 0 | return GPG_ERR_BUFFER_TOO_SHORT; |
932 | 30 | if (c->u_mode.gcm.datalen_over_limits) |
933 | 0 | return GPG_ERR_INV_LENGTH; |
934 | 30 | if (c->marks.tag |
935 | 30 | || c->u_mode.gcm.ghash_data_finalized |
936 | 30 | || !c->u_mode.gcm.ghash_fn) |
937 | 0 | return GPG_ERR_INV_STATE; |
938 | | |
939 | 30 | if (!c->marks.iv) |
940 | 0 | _gcry_cipher_gcm_setiv_zero (c); |
941 | | |
942 | 30 | if (c->u_mode.gcm.disallow_encryption_because_of_setiv_in_fips_mode) |
943 | 0 | return GPG_ERR_INV_STATE; |
944 | | |
945 | 30 | if (!c->u_mode.gcm.ghash_aad_finalized) |
946 | 16 | { |
947 | | /* Start of encryption marks end of AAD stream. */ |
948 | 16 | do_ghash_buf(c, c->u_mode.gcm.u_tag.tag, NULL, 0, 1); |
949 | 16 | c->u_mode.gcm.ghash_aad_finalized = 1; |
950 | 16 | } |
951 | | |
952 | 30 | gcm_bytecounter_add(c->u_mode.gcm.datalen, inbuflen); |
953 | 30 | if (!gcm_check_datalen(c->u_mode.gcm.datalen)) |
954 | 0 | { |
955 | 0 | c->u_mode.gcm.datalen_over_limits = 1; |
956 | 0 | return GPG_ERR_INV_LENGTH; |
957 | 0 | } |
958 | | |
959 | 30 | return gcm_crypt_inner (c, outbuf, outbuflen, inbuf, inbuflen, 1); |
960 | 30 | } |
961 | | |
962 | | |
963 | | gcry_err_code_t |
964 | | _gcry_cipher_gcm_decrypt (gcry_cipher_hd_t c, |
965 | | byte *outbuf, size_t outbuflen, |
966 | | const byte *inbuf, size_t inbuflen) |
967 | 18 | { |
968 | 18 | if (c->spec->blocksize != GCRY_GCM_BLOCK_LEN) |
969 | 0 | return GPG_ERR_CIPHER_ALGO; |
970 | 18 | if (outbuflen < inbuflen) |
971 | 0 | return GPG_ERR_BUFFER_TOO_SHORT; |
972 | 18 | if (c->u_mode.gcm.datalen_over_limits) |
973 | 0 | return GPG_ERR_INV_LENGTH; |
974 | 18 | if (c->marks.tag |
975 | 18 | || c->u_mode.gcm.ghash_data_finalized |
976 | 18 | || !c->u_mode.gcm.ghash_fn) |
977 | 0 | return GPG_ERR_INV_STATE; |
978 | | |
979 | 18 | if (!c->marks.iv) |
980 | 0 | _gcry_cipher_gcm_setiv_zero (c); |
981 | | |
982 | 18 | if (!c->u_mode.gcm.ghash_aad_finalized) |
983 | 18 | { |
984 | | /* Start of decryption marks end of AAD stream. */ |
985 | 18 | do_ghash_buf(c, c->u_mode.gcm.u_tag.tag, NULL, 0, 1); |
986 | 18 | c->u_mode.gcm.ghash_aad_finalized = 1; |
987 | 18 | } |
988 | | |
989 | 18 | gcm_bytecounter_add(c->u_mode.gcm.datalen, inbuflen); |
990 | 18 | if (!gcm_check_datalen(c->u_mode.gcm.datalen)) |
991 | 0 | { |
992 | 0 | c->u_mode.gcm.datalen_over_limits = 1; |
993 | 0 | return GPG_ERR_INV_LENGTH; |
994 | 0 | } |
995 | | |
996 | 18 | return gcm_crypt_inner (c, outbuf, outbuflen, inbuf, inbuflen, 0); |
997 | 18 | } |
998 | | |
999 | | |
1000 | | gcry_err_code_t |
1001 | | _gcry_cipher_gcm_authenticate (gcry_cipher_hd_t c, |
1002 | | const byte * aadbuf, size_t aadbuflen) |
1003 | 0 | { |
1004 | 0 | if (c->spec->blocksize != GCRY_GCM_BLOCK_LEN) |
1005 | 0 | return GPG_ERR_CIPHER_ALGO; |
1006 | 0 | if (c->u_mode.gcm.datalen_over_limits) |
1007 | 0 | return GPG_ERR_INV_LENGTH; |
1008 | 0 | if (c->marks.tag |
1009 | 0 | || c->u_mode.gcm.ghash_aad_finalized |
1010 | 0 | || c->u_mode.gcm.ghash_data_finalized |
1011 | 0 | || !c->u_mode.gcm.ghash_fn) |
1012 | 0 | return GPG_ERR_INV_STATE; |
1013 | | |
1014 | 0 | if (!c->marks.iv) |
1015 | 0 | _gcry_cipher_gcm_setiv_zero (c); |
1016 | |
|
1017 | 0 | gcm_bytecounter_add(c->u_mode.gcm.aadlen, aadbuflen); |
1018 | 0 | if (!gcm_check_aadlen_or_ivlen(c->u_mode.gcm.aadlen)) |
1019 | 0 | { |
1020 | 0 | c->u_mode.gcm.datalen_over_limits = 1; |
1021 | 0 | return GPG_ERR_INV_LENGTH; |
1022 | 0 | } |
1023 | | |
1024 | 0 | do_ghash_buf(c, c->u_mode.gcm.u_tag.tag, aadbuf, aadbuflen, 0); |
1025 | |
|
1026 | 0 | return 0; |
1027 | 0 | } |
1028 | | |
1029 | | |
1030 | | void |
1031 | | _gcry_cipher_gcm_setupM (gcry_cipher_hd_t c) |
1032 | 0 | { |
1033 | 0 | setupM (c); |
1034 | 0 | } |
1035 | | |
1036 | | |
1037 | | void |
1038 | | _gcry_cipher_gcm_setkey (gcry_cipher_hd_t c) |
1039 | 34 | { |
1040 | 34 | memset (c->u_mode.gcm.u_ghash_key.key, 0, GCRY_GCM_BLOCK_LEN); |
1041 | | |
1042 | 34 | c->spec->encrypt (&c->context.c, c->u_mode.gcm.u_ghash_key.key, |
1043 | 34 | c->u_mode.gcm.u_ghash_key.key); |
1044 | 34 | setupM (c); |
1045 | 34 | } |
1046 | | |
1047 | | |
1048 | | static gcry_err_code_t |
1049 | | _gcry_cipher_gcm_initiv (gcry_cipher_hd_t c, const byte *iv, size_t ivlen) |
1050 | 34 | { |
1051 | 34 | memset (c->u_mode.gcm.aadlen, 0, sizeof(c->u_mode.gcm.aadlen)); |
1052 | 34 | memset (c->u_mode.gcm.datalen, 0, sizeof(c->u_mode.gcm.datalen)); |
1053 | 34 | memset (c->u_mode.gcm.u_tag.tag, 0, GCRY_GCM_BLOCK_LEN); |
1054 | 34 | c->u_mode.gcm.datalen_over_limits = 0; |
1055 | 34 | c->u_mode.gcm.ghash_data_finalized = 0; |
1056 | 34 | c->u_mode.gcm.ghash_aad_finalized = 0; |
1057 | | |
1058 | 34 | if (ivlen == 0) |
1059 | 0 | return GPG_ERR_INV_LENGTH; |
1060 | | |
1061 | 34 | if (ivlen != GCRY_GCM_BLOCK_LEN - 4) |
1062 | 34 | { |
1063 | 34 | u32 iv_bytes[2] = {0, 0}; |
1064 | 34 | u32 bitlengths[2][2]; |
1065 | | |
1066 | 34 | if (!c->u_mode.gcm.ghash_fn) |
1067 | 0 | return GPG_ERR_INV_STATE; |
1068 | | |
1069 | 34 | memset(c->u_ctr.ctr, 0, GCRY_GCM_BLOCK_LEN); |
1070 | | |
1071 | 34 | gcm_bytecounter_add(iv_bytes, ivlen); |
1072 | 34 | if (!gcm_check_aadlen_or_ivlen(iv_bytes)) |
1073 | 0 | { |
1074 | 0 | c->u_mode.gcm.datalen_over_limits = 1; |
1075 | 0 | return GPG_ERR_INV_LENGTH; |
1076 | 0 | } |
1077 | | |
1078 | 34 | do_ghash_buf(c, c->u_ctr.ctr, iv, ivlen, 1); |
1079 | | |
1080 | | /* iv length, 64-bit */ |
1081 | 34 | bitlengths[1][1] = be_bswap32(iv_bytes[0] << 3); |
1082 | 34 | bitlengths[1][0] = be_bswap32((iv_bytes[0] >> 29) | |
1083 | 34 | (iv_bytes[1] << 3)); |
1084 | | /* zeros, 64-bit */ |
1085 | 34 | bitlengths[0][1] = 0; |
1086 | 34 | bitlengths[0][0] = 0; |
1087 | | |
1088 | 34 | do_ghash_buf(c, c->u_ctr.ctr, (byte*)bitlengths, GCRY_GCM_BLOCK_LEN, 1); |
1089 | | |
1090 | 34 | wipememory (iv_bytes, sizeof iv_bytes); |
1091 | 34 | wipememory (bitlengths, sizeof bitlengths); |
1092 | 34 | } |
1093 | 0 | else |
1094 | 0 | { |
1095 | | /* 96-bit IV is handled differently. */ |
1096 | 0 | memcpy (c->u_ctr.ctr, iv, ivlen); |
1097 | 0 | c->u_ctr.ctr[12] = c->u_ctr.ctr[13] = c->u_ctr.ctr[14] = 0; |
1098 | 0 | c->u_ctr.ctr[15] = 1; |
1099 | 0 | } |
1100 | | |
1101 | 34 | c->spec->encrypt (&c->context.c, c->u_mode.gcm.tagiv, c->u_ctr.ctr); |
1102 | | |
1103 | 34 | gcm_add32_be128 (c->u_ctr.ctr, 1); |
1104 | | |
1105 | 34 | c->unused = 0; |
1106 | 34 | c->marks.iv = 1; |
1107 | 34 | c->marks.tag = 0; |
1108 | | |
1109 | 34 | return 0; |
1110 | 34 | } |
1111 | | |
1112 | | |
1113 | | gcry_err_code_t |
1114 | | _gcry_cipher_gcm_setiv (gcry_cipher_hd_t c, const byte *iv, size_t ivlen) |
1115 | 34 | { |
1116 | 34 | c->marks.iv = 0; |
1117 | 34 | c->marks.tag = 0; |
1118 | | |
1119 | 34 | return _gcry_cipher_gcm_initiv (c, iv, ivlen); |
1120 | 34 | } |
1121 | | |
1122 | | static gcry_err_code_t |
1123 | | _gcry_cipher_gcm_setiv_zero (gcry_cipher_hd_t c) |
1124 | 0 | { |
1125 | 0 | static const unsigned char zerobuf[MAX_BLOCKSIZE]; |
1126 | |
|
1127 | 0 | c->u_mode.gcm.disallow_encryption_because_of_setiv_in_fips_mode = 0; |
1128 | |
|
1129 | 0 | if (fips_mode ()) |
1130 | 0 | { |
1131 | | /* Direct invocation of GCM setiv in FIPS mode disables encryption. */ |
1132 | 0 | c->u_mode.gcm.disallow_encryption_because_of_setiv_in_fips_mode = 1; |
1133 | 0 | } |
1134 | |
|
1135 | 0 | return _gcry_cipher_gcm_setiv (c, zerobuf, GCRY_GCM_BLOCK_LEN); |
1136 | 0 | } |
1137 | | |
1138 | | |
1139 | | #if 0 && TODO |
1140 | | void |
1141 | | _gcry_cipher_gcm_geniv (gcry_cipher_hd_t c, |
1142 | | byte *ivout, size_t ivoutlen, const byte *nonce, |
1143 | | size_t noncelen) |
1144 | | { |
1145 | | /* nonce: user provided part (might be null) */ |
1146 | | /* noncelen: check if proper length (if nonce not null) */ |
1147 | | /* ivout: iv used to initialize gcm, output to user */ |
1148 | | /* ivoutlen: check correct size */ |
1149 | | byte iv[IVLEN]; |
1150 | | |
1151 | | if (!ivout) |
1152 | | return GPG_ERR_INV_ARG; |
1153 | | if (ivoutlen != IVLEN) |
1154 | | return GPG_ERR_INV_LENGTH; |
1155 | | if (nonce != NULL && !is_nonce_ok_len(noncelen)) |
1156 | | return GPG_ERR_INV_ARG; |
1157 | | |
1158 | | gcm_generate_iv(iv, nonce, noncelen); |
1159 | | |
1160 | | c->marks.iv = 0; |
1161 | | c->marks.tag = 0; |
1162 | | c->u_mode.gcm.disallow_encryption_because_of_setiv_in_fips_mode = 0; |
1163 | | |
1164 | | _gcry_cipher_gcm_initiv (c, iv, IVLEN); |
1165 | | |
1166 | | buf_cpy(ivout, iv, IVLEN); |
1167 | | wipememory(iv, sizeof(iv)); |
1168 | | } |
1169 | | #endif |
1170 | | |
1171 | | |
1172 | | static int |
1173 | | is_tag_length_valid(size_t taglen) |
1174 | 0 | { |
1175 | 0 | switch (taglen) |
1176 | 0 | { |
1177 | | /* Allowed tag lengths from NIST SP 800-38D. */ |
1178 | 0 | case 128 / 8: /* GCRY_GCM_BLOCK_LEN */ |
1179 | 0 | case 120 / 8: |
1180 | 0 | case 112 / 8: |
1181 | 0 | case 104 / 8: |
1182 | 0 | case 96 / 8: |
1183 | 0 | case 64 / 8: |
1184 | 0 | case 32 / 8: |
1185 | 0 | return 1; |
1186 | | |
1187 | 0 | default: |
1188 | 0 | return 0; |
1189 | 0 | } |
1190 | 0 | } |
1191 | | |
1192 | | static gcry_err_code_t |
1193 | | _gcry_cipher_gcm_tag (gcry_cipher_hd_t c, |
1194 | | byte * outbuf, size_t outbuflen, int check) |
1195 | 0 | { |
1196 | 0 | if (!(is_tag_length_valid (outbuflen) || outbuflen >= GCRY_GCM_BLOCK_LEN)) |
1197 | 0 | return GPG_ERR_INV_LENGTH; |
1198 | 0 | if (c->u_mode.gcm.datalen_over_limits) |
1199 | 0 | return GPG_ERR_INV_LENGTH; |
1200 | | |
1201 | 0 | if (!c->marks.tag) |
1202 | 0 | { |
1203 | 0 | u32 bitlengths[2][2]; |
1204 | |
|
1205 | 0 | if (!c->u_mode.gcm.ghash_fn) |
1206 | 0 | return GPG_ERR_INV_STATE; |
1207 | | |
1208 | | /* aad length */ |
1209 | 0 | bitlengths[0][1] = be_bswap32(c->u_mode.gcm.aadlen[0] << 3); |
1210 | 0 | bitlengths[0][0] = be_bswap32((c->u_mode.gcm.aadlen[0] >> 29) | |
1211 | 0 | (c->u_mode.gcm.aadlen[1] << 3)); |
1212 | | /* data length */ |
1213 | 0 | bitlengths[1][1] = be_bswap32(c->u_mode.gcm.datalen[0] << 3); |
1214 | 0 | bitlengths[1][0] = be_bswap32((c->u_mode.gcm.datalen[0] >> 29) | |
1215 | 0 | (c->u_mode.gcm.datalen[1] << 3)); |
1216 | | |
1217 | | /* Finalize data-stream. */ |
1218 | 0 | do_ghash_buf(c, c->u_mode.gcm.u_tag.tag, NULL, 0, 1); |
1219 | 0 | c->u_mode.gcm.ghash_aad_finalized = 1; |
1220 | 0 | c->u_mode.gcm.ghash_data_finalized = 1; |
1221 | | |
1222 | | /* Add bitlengths to tag. */ |
1223 | 0 | do_ghash_buf(c, c->u_mode.gcm.u_tag.tag, (byte*)bitlengths, |
1224 | 0 | GCRY_GCM_BLOCK_LEN, 1); |
1225 | 0 | cipher_block_xor (c->u_mode.gcm.u_tag.tag, c->u_mode.gcm.tagiv, |
1226 | 0 | c->u_mode.gcm.u_tag.tag, GCRY_GCM_BLOCK_LEN); |
1227 | 0 | c->marks.tag = 1; |
1228 | |
|
1229 | 0 | wipememory (bitlengths, sizeof (bitlengths)); |
1230 | 0 | wipememory (c->u_mode.gcm.macbuf, GCRY_GCM_BLOCK_LEN); |
1231 | 0 | wipememory (c->u_mode.gcm.tagiv, GCRY_GCM_BLOCK_LEN); |
1232 | 0 | wipememory (c->u_mode.gcm.aadlen, sizeof (c->u_mode.gcm.aadlen)); |
1233 | 0 | wipememory (c->u_mode.gcm.datalen, sizeof (c->u_mode.gcm.datalen)); |
1234 | 0 | } |
1235 | | |
1236 | 0 | if (!check) |
1237 | 0 | { |
1238 | 0 | if (outbuflen > GCRY_GCM_BLOCK_LEN) |
1239 | 0 | outbuflen = GCRY_GCM_BLOCK_LEN; |
1240 | | |
1241 | | /* NB: We already checked that OUTBUF is large enough to hold |
1242 | | * the result or has valid truncated length. */ |
1243 | 0 | memcpy (outbuf, c->u_mode.gcm.u_tag.tag, outbuflen); |
1244 | 0 | } |
1245 | 0 | else |
1246 | 0 | { |
1247 | | /* OUTBUFLEN gives the length of the user supplied tag in OUTBUF |
1248 | | * and thus we need to compare its length first. */ |
1249 | 0 | if (!is_tag_length_valid (outbuflen) |
1250 | 0 | || !buf_eq_const (outbuf, c->u_mode.gcm.u_tag.tag, outbuflen)) |
1251 | 0 | return GPG_ERR_CHECKSUM; |
1252 | 0 | } |
1253 | | |
1254 | 0 | return 0; |
1255 | 0 | } |
1256 | | |
1257 | | |
1258 | | gcry_err_code_t |
1259 | | _gcry_cipher_gcm_get_tag (gcry_cipher_hd_t c, unsigned char *outtag, |
1260 | | size_t taglen) |
1261 | 0 | { |
1262 | | /* Outputting authentication tag is part of encryption. */ |
1263 | 0 | if (c->u_mode.gcm.disallow_encryption_because_of_setiv_in_fips_mode) |
1264 | 0 | return GPG_ERR_INV_STATE; |
1265 | | |
1266 | 0 | return _gcry_cipher_gcm_tag (c, outtag, taglen, 0); |
1267 | 0 | } |
1268 | | |
1269 | | gcry_err_code_t |
1270 | | _gcry_cipher_gcm_check_tag (gcry_cipher_hd_t c, const unsigned char *intag, |
1271 | | size_t taglen) |
1272 | 0 | { |
1273 | 0 | return _gcry_cipher_gcm_tag (c, (unsigned char *) intag, taglen, 1); |
1274 | 0 | } |