/src/gnutls/lib/accelerated/x86/sha-padlock.c
Line | Count | Source (jump to first uncovered line) |
1 | | /* |
2 | | * Copyright (C) 2011-2012 Free Software Foundation, Inc. |
3 | | * Portions Copyright (C) 2001 Niels Moeller |
4 | | * |
5 | | * Author: Nikos Mavrogiannopoulos |
6 | | * |
7 | | * This file is part of GNUTLS. |
8 | | * |
9 | | * The GNUTLS library is free software; you can redistribute it and/or |
10 | | * modify it under the terms of the GNU Lesser General Public License |
11 | | * as published by the Free Software Foundation; either version 2.1 of |
12 | | * the License, or (at your option) any later version. |
13 | | * |
14 | | * This library is distributed in the hope that it will be useful, but |
15 | | * WITHOUT ANY WARRANTY; without even the implied warranty of |
16 | | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
17 | | * Lesser General Public License for more details. |
18 | | * |
19 | | * You should have received a copy of the GNU Lesser General Public License |
20 | | * along with this program. If not, see <https://www.gnu.org/licenses/> |
21 | | * |
22 | | */ |
23 | | |
24 | | #include "gnutls_int.h" |
25 | | #include <hash_int.h> |
26 | | #include "errors.h" |
27 | | #include <nettle/sha.h> |
28 | | #include <nettle/hmac.h> |
29 | | #include <nettle/macros.h> |
30 | | #include <aes-padlock.h> |
31 | | #include <assert.h> |
32 | | #include <sha-padlock.h> |
33 | | #include <x86-common.h> |
34 | | |
35 | | #ifdef HAVE_LIBNETTLE |
36 | | |
37 | | typedef void (*update_func)(void *, size_t, const uint8_t *); |
38 | | typedef void (*digest_func)(void *, size_t, uint8_t *); |
39 | | typedef void (*set_key_func)(void *, size_t, const uint8_t *); |
40 | | typedef void (*init_func)(void *); |
41 | | |
42 | | struct padlock_hash_ctx { |
43 | | union { |
44 | | struct sha1_ctx sha1; |
45 | | struct sha224_ctx sha224; |
46 | | struct sha256_ctx sha256; |
47 | | struct sha384_ctx sha384; |
48 | | struct sha512_ctx sha512; |
49 | | } ctx; |
50 | | void *ctx_ptr; |
51 | | gnutls_digest_algorithm_t algo; |
52 | | size_t length; |
53 | | update_func update; |
54 | | digest_func digest; |
55 | | init_func init; |
56 | | }; |
57 | | |
58 | | static int |
59 | | wrap_padlock_hash_update(void *_ctx, const void *text, size_t textsize) |
60 | 0 | { |
61 | 0 | struct padlock_hash_ctx *ctx = _ctx; |
62 | |
|
63 | 0 | ctx->update(ctx->ctx_ptr, textsize, text); |
64 | |
|
65 | 0 | return GNUTLS_E_SUCCESS; |
66 | 0 | } |
67 | | |
68 | | static void wrap_padlock_hash_deinit(void *hd) |
69 | 0 | { |
70 | 0 | gnutls_free(hd); |
71 | 0 | } |
72 | | |
73 | | # define MD1_INCR(c) (c->count++) |
74 | 0 | # define SHA1_COMPRESS(ctx, data) (padlock_sha1_blocks((void*)(ctx)->state, data, 1)) |
75 | 0 | # define SHA256_COMPRESS(ctx, data) (padlock_sha256_blocks((void*)(ctx)->state, data, 1)) |
76 | 0 | # define SHA512_COMPRESS(ctx, data) (padlock_sha512_blocks((void*)(ctx)->state, data, 1)) |
77 | | |
78 | | void |
79 | | padlock_sha1_update(struct sha1_ctx *ctx, size_t length, const uint8_t * data) |
80 | 0 | { |
81 | 0 | MD_UPDATE(ctx, length, data, SHA1_COMPRESS, MD1_INCR(ctx)); |
82 | 0 | } |
83 | | |
84 | | void |
85 | | padlock_sha256_update(struct sha256_ctx *ctx, |
86 | | size_t length, const uint8_t * data) |
87 | 0 | { |
88 | 0 | MD_UPDATE(ctx, length, data, SHA256_COMPRESS, MD1_INCR(ctx)); |
89 | 0 | } |
90 | | |
91 | | void |
92 | | padlock_sha512_update(struct sha512_ctx *ctx, |
93 | | size_t length, const uint8_t * data) |
94 | 0 | { |
95 | 0 | MD_UPDATE(ctx, length, data, SHA512_COMPRESS, MD_INCR(ctx)); |
96 | 0 | } |
97 | | |
98 | | static void _nettle_write_be32(unsigned length, uint8_t * dst, uint32_t * src) |
99 | 0 | { |
100 | 0 | unsigned i; |
101 | 0 | unsigned words; |
102 | 0 | unsigned leftover; |
103 | |
|
104 | 0 | words = length / 4; |
105 | 0 | leftover = length % 4; |
106 | |
|
107 | 0 | for (i = 0; i < words; i++, dst += 4) |
108 | 0 | WRITE_UINT32(dst, src[i]); |
109 | |
|
110 | 0 | if (leftover) { |
111 | 0 | uint32_t word; |
112 | 0 | unsigned j = leftover; |
113 | |
|
114 | 0 | word = src[i]; |
115 | |
|
116 | 0 | switch (leftover) { |
117 | 0 | default: |
118 | 0 | abort(); |
119 | 0 | case 3: |
120 | 0 | dst[--j] = (word >> 8) & 0xff; |
121 | 0 | FALLTHROUGH; |
122 | 0 | case 2: |
123 | 0 | dst[--j] = (word >> 16) & 0xff; |
124 | 0 | FALLTHROUGH; |
125 | 0 | case 1: |
126 | 0 | dst[--j] = (word >> 24) & 0xff; |
127 | 0 | } |
128 | 0 | } |
129 | 0 | } |
130 | | |
131 | | static void |
132 | | padlock_sha1_digest(struct sha1_ctx *ctx, size_t length, uint8_t * digest) |
133 | 0 | { |
134 | 0 | uint64_t bit_count; |
135 | |
|
136 | 0 | assert(length <= SHA1_DIGEST_SIZE); |
137 | | |
138 | 0 | MD_PAD(ctx, 8, SHA1_COMPRESS); |
139 | | |
140 | | /* There are 512 = 2^9 bits in one block */ |
141 | 0 | bit_count = (ctx->count << 9) | (ctx->index << 3); |
142 | | |
143 | | /* append the 64 bit count */ |
144 | 0 | WRITE_UINT64(ctx->block + (SHA1_BLOCK_SIZE - 8), bit_count); |
145 | 0 | SHA1_COMPRESS(ctx, ctx->block); |
146 | |
|
147 | 0 | _nettle_write_be32(length, digest, ctx->state); |
148 | 0 | } |
149 | | |
150 | | static void |
151 | | padlock_sha256_digest(struct sha256_ctx *ctx, size_t length, uint8_t * digest) |
152 | 0 | { |
153 | 0 | uint64_t bit_count; |
154 | |
|
155 | 0 | assert(length <= SHA256_DIGEST_SIZE); |
156 | | |
157 | 0 | MD_PAD(ctx, 8, SHA256_COMPRESS); |
158 | | |
159 | | /* There are 512 = 2^9 bits in one block */ |
160 | 0 | bit_count = (ctx->count << 9) | (ctx->index << 3); |
161 | | |
162 | | /* This is slightly inefficient, as the numbers are converted to |
163 | | big-endian format, and will be converted back by the compression |
164 | | function. It's probably not worth the effort to fix this. */ |
165 | 0 | WRITE_UINT64(ctx->block + (SHA256_BLOCK_SIZE - 8), bit_count); |
166 | 0 | SHA256_COMPRESS(ctx, ctx->block); |
167 | |
|
168 | 0 | _nettle_write_be32(length, digest, ctx->state); |
169 | 0 | } |
170 | | |
171 | | static void |
172 | | padlock_sha512_digest(struct sha512_ctx *ctx, size_t length, uint8_t * digest) |
173 | 0 | { |
174 | 0 | uint64_t high, low; |
175 | |
|
176 | 0 | unsigned i; |
177 | 0 | unsigned words; |
178 | 0 | unsigned leftover; |
179 | |
|
180 | 0 | assert(length <= SHA512_DIGEST_SIZE); |
181 | | |
182 | 0 | MD_PAD(ctx, 16, SHA512_COMPRESS); |
183 | | |
184 | | /* There are 1024 = 2^10 bits in one block */ |
185 | 0 | high = (ctx->count_high << 10) | (ctx->count_low >> 54); |
186 | 0 | low = (ctx->count_low << 10) | (ctx->index << 3); |
187 | | |
188 | | /* This is slightly inefficient, as the numbers are converted to |
189 | | big-endian format, and will be converted back by the compression |
190 | | function. It's probably not worth the effort to fix this. */ |
191 | 0 | WRITE_UINT64(ctx->block + (SHA512_DATA_SIZE - 16), high); |
192 | 0 | WRITE_UINT64(ctx->block + (SHA512_DATA_SIZE - 8), low); |
193 | 0 | SHA512_COMPRESS(ctx, ctx->block); |
194 | |
|
195 | 0 | words = length / 8; |
196 | 0 | leftover = length % 8; |
197 | |
|
198 | 0 | for (i = 0; i < words; i++, digest += 8) |
199 | 0 | WRITE_UINT64(digest, ctx->state[i]); |
200 | |
|
201 | 0 | if (leftover) { |
202 | | /* Truncate to the right size */ |
203 | 0 | uint64_t word = ctx->state[i] >> (8 * (8 - leftover)); |
204 | |
|
205 | 0 | do { |
206 | 0 | digest[--leftover] = word & 0xff; |
207 | 0 | word >>= 8; |
208 | 0 | } while (leftover); |
209 | 0 | } |
210 | 0 | } |
211 | | |
212 | | static int _ctx_init(gnutls_digest_algorithm_t algo, |
213 | | struct padlock_hash_ctx *ctx) |
214 | 0 | { |
215 | 0 | switch (algo) { |
216 | 0 | case GNUTLS_DIG_SHA1: |
217 | 0 | sha1_init(&ctx->ctx.sha1); |
218 | 0 | ctx->update = (update_func) padlock_sha1_update; |
219 | 0 | ctx->digest = (digest_func) padlock_sha1_digest; |
220 | 0 | ctx->init = (init_func) sha1_init; |
221 | 0 | ctx->ctx_ptr = &ctx->ctx.sha1; |
222 | 0 | ctx->length = SHA1_DIGEST_SIZE; |
223 | 0 | break; |
224 | 0 | case GNUTLS_DIG_SHA224: |
225 | 0 | sha224_init(&ctx->ctx.sha224); |
226 | 0 | ctx->update = (update_func) padlock_sha256_update; |
227 | 0 | ctx->digest = (digest_func) padlock_sha256_digest; |
228 | 0 | ctx->init = (init_func) sha224_init; |
229 | 0 | ctx->ctx_ptr = &ctx->ctx.sha224; |
230 | 0 | ctx->length = SHA224_DIGEST_SIZE; |
231 | 0 | break; |
232 | 0 | case GNUTLS_DIG_SHA256: |
233 | 0 | sha256_init(&ctx->ctx.sha256); |
234 | 0 | ctx->update = (update_func) padlock_sha256_update; |
235 | 0 | ctx->digest = (digest_func) padlock_sha256_digest; |
236 | 0 | ctx->init = (init_func) sha256_init; |
237 | 0 | ctx->ctx_ptr = &ctx->ctx.sha256; |
238 | 0 | ctx->length = SHA256_DIGEST_SIZE; |
239 | 0 | break; |
240 | 0 | case GNUTLS_DIG_SHA384: |
241 | 0 | sha384_init(&ctx->ctx.sha384); |
242 | 0 | ctx->update = (update_func) padlock_sha512_update; |
243 | 0 | ctx->digest = (digest_func) padlock_sha512_digest; |
244 | 0 | ctx->init = (init_func) sha384_init; |
245 | 0 | ctx->ctx_ptr = &ctx->ctx.sha384; |
246 | 0 | ctx->length = SHA384_DIGEST_SIZE; |
247 | 0 | break; |
248 | 0 | case GNUTLS_DIG_SHA512: |
249 | 0 | sha512_init(&ctx->ctx.sha512); |
250 | 0 | ctx->update = (update_func) padlock_sha512_update; |
251 | 0 | ctx->digest = (digest_func) padlock_sha512_digest; |
252 | 0 | ctx->init = (init_func) sha512_init; |
253 | 0 | ctx->ctx_ptr = &ctx->ctx.sha512; |
254 | 0 | ctx->length = SHA512_DIGEST_SIZE; |
255 | 0 | break; |
256 | 0 | default: |
257 | 0 | gnutls_assert(); |
258 | 0 | return GNUTLS_E_INVALID_REQUEST; |
259 | 0 | } |
260 | | |
261 | 0 | return 0; |
262 | 0 | } |
263 | | |
264 | | static int wrap_padlock_hash_init(gnutls_digest_algorithm_t algo, void **_ctx) |
265 | 0 | { |
266 | 0 | struct padlock_hash_ctx *ctx; |
267 | 0 | int ret; |
268 | |
|
269 | 0 | ctx = gnutls_malloc(sizeof(struct padlock_hash_ctx)); |
270 | 0 | if (ctx == NULL) { |
271 | 0 | gnutls_assert(); |
272 | 0 | return GNUTLS_E_MEMORY_ERROR; |
273 | 0 | } |
274 | | |
275 | 0 | ctx->algo = algo; |
276 | |
|
277 | 0 | if ((ret = _ctx_init(algo, ctx)) < 0) { |
278 | 0 | gnutls_assert(); |
279 | 0 | return ret; |
280 | 0 | } |
281 | | |
282 | 0 | *_ctx = ctx; |
283 | |
|
284 | 0 | return 0; |
285 | 0 | } |
286 | | |
287 | | static void *wrap_padlock_hash_copy(const void *_ctx) |
288 | 0 | { |
289 | 0 | struct padlock_hash_ctx *new_ctx; |
290 | 0 | const struct padlock_hash_ctx *ctx = _ctx; |
291 | 0 | ptrdiff_t off = (uint8_t *) ctx->ctx_ptr - (uint8_t *) (&ctx->ctx); |
292 | |
|
293 | 0 | new_ctx = gnutls_malloc(sizeof(struct padlock_hash_ctx)); |
294 | 0 | if (new_ctx == NULL) { |
295 | 0 | gnutls_assert(); |
296 | 0 | return NULL; |
297 | 0 | } |
298 | | |
299 | 0 | memcpy(new_ctx, ctx, sizeof(*new_ctx)); |
300 | 0 | new_ctx->ctx_ptr = (uint8_t *) & new_ctx->ctx + off; |
301 | |
|
302 | 0 | return new_ctx; |
303 | 0 | } |
304 | | |
305 | | static int |
306 | | wrap_padlock_hash_output(void *src_ctx, void *digest, size_t digestsize) |
307 | 0 | { |
308 | 0 | struct padlock_hash_ctx *ctx; |
309 | 0 | ctx = src_ctx; |
310 | |
|
311 | 0 | if (digestsize < ctx->length) |
312 | 0 | return gnutls_assert_val(GNUTLS_E_SHORT_MEMORY_BUFFER); |
313 | | |
314 | 0 | ctx->digest(ctx->ctx_ptr, digestsize, digest); |
315 | |
|
316 | 0 | ctx->init(ctx->ctx_ptr); |
317 | |
|
318 | 0 | return 0; |
319 | 0 | } |
320 | | |
321 | | int wrap_padlock_hash_fast(gnutls_digest_algorithm_t algo, |
322 | | const void *text, size_t text_size, void *digest) |
323 | 0 | { |
324 | 0 | if (text_size == 0 && text == NULL) |
325 | 0 | text = digest; |
326 | 0 | if (algo == GNUTLS_DIG_SHA1) { |
327 | 0 | uint32_t iv[5] = { |
328 | 0 | 0x67452301UL, |
329 | 0 | 0xEFCDAB89UL, |
330 | 0 | 0x98BADCFEUL, |
331 | 0 | 0x10325476UL, |
332 | 0 | 0xC3D2E1F0UL, |
333 | 0 | }; |
334 | 0 | padlock_sha1_oneshot(iv, text, text_size); |
335 | 0 | _nettle_write_be32(20, digest, iv); |
336 | 0 | } else if (algo == GNUTLS_DIG_SHA256) { |
337 | 0 | uint32_t iv[8] = { |
338 | 0 | 0x6a09e667UL, 0xbb67ae85UL, 0x3c6ef372UL, |
339 | 0 | 0xa54ff53aUL, |
340 | 0 | 0x510e527fUL, 0x9b05688cUL, 0x1f83d9abUL, |
341 | 0 | 0x5be0cd19UL, |
342 | 0 | }; |
343 | 0 | padlock_sha256_oneshot(iv, text, text_size); |
344 | 0 | _nettle_write_be32(32, digest, iv); |
345 | 0 | } else { |
346 | 0 | struct padlock_hash_ctx ctx; |
347 | 0 | int ret; |
348 | |
|
349 | 0 | ret = _ctx_init(algo, &ctx); |
350 | 0 | if (ret < 0) |
351 | 0 | return gnutls_assert_val(ret); |
352 | 0 | ctx.algo = algo; |
353 | |
|
354 | 0 | wrap_padlock_hash_update(&ctx, text, text_size); |
355 | |
|
356 | 0 | wrap_padlock_hash_output(&ctx, digest, ctx.length); |
357 | 0 | } |
358 | | |
359 | 0 | return 0; |
360 | 0 | } |
361 | | |
362 | | const struct nettle_hash padlock_sha1 = |
363 | | NN_HASH(sha1, padlock_sha1_update, padlock_sha1_digest, SHA1); |
364 | | const struct nettle_hash padlock_sha224 = |
365 | | NN_HASH(sha224, padlock_sha256_update, padlock_sha256_digest, SHA224); |
366 | | const struct nettle_hash padlock_sha256 = |
367 | | NN_HASH(sha256, padlock_sha256_update, padlock_sha256_digest, SHA256); |
368 | | const struct nettle_hash padlock_sha384 = |
369 | | NN_HASH(sha384, padlock_sha512_update, padlock_sha512_digest, SHA384); |
370 | | const struct nettle_hash padlock_sha512 = |
371 | | NN_HASH(sha512, padlock_sha512_update, padlock_sha512_digest, SHA512); |
372 | | |
373 | | const gnutls_crypto_digest_st _gnutls_sha_padlock_oneshot = { |
374 | | .init = NULL, |
375 | | .hash = NULL, |
376 | | .output = NULL, |
377 | | .deinit = NULL, |
378 | | .fast = wrap_padlock_hash_fast |
379 | | }; |
380 | | |
381 | | const gnutls_crypto_digest_st _gnutls_sha_padlock = { |
382 | | .init = wrap_padlock_hash_init, |
383 | | .hash = wrap_padlock_hash_update, |
384 | | .output = wrap_padlock_hash_output, |
385 | | .copy = wrap_padlock_hash_copy, |
386 | | .deinit = wrap_padlock_hash_deinit, |
387 | | .fast = wrap_padlock_hash_fast, |
388 | | }; |
389 | | |
390 | | #endif /* HAVE_LIBNETTLE */ |