Coverage Report

Created: 2025-07-11 06:59

/src/qpdf/libqpdf/sha2.c
Line
Count
Source (jump to first uncovered line)
1
/* clang-format off */
2
/* $Id: sha2.c 227 2010-06-16 17:28:38Z tp $ */
3
/*
4
 * SHA-224 / SHA-256 implementation.
5
 *
6
 * ==========================(LICENSE BEGIN)============================
7
 *
8
 * Copyright (c) 2007-2010  Projet RNRT SAPHIR
9
 * 
10
 * Permission is hereby granted, free of charge, to any person obtaining
11
 * a copy of this software and associated documentation files (the
12
 * "Software"), to deal in the Software without restriction, including
13
 * without limitation the rights to use, copy, modify, merge, publish,
14
 * distribute, sublicense, and/or sell copies of the Software, and to
15
 * permit persons to whom the Software is furnished to do so, subject to
16
 * the following conditions:
17
 * 
18
 * The above copyright notice and this permission notice shall be
19
 * included in all copies or substantial portions of the Software.
20
 * 
21
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
22
 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
23
 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
24
 * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
25
 * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
26
 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
27
 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
28
 *
29
 * ===========================(LICENSE END)=============================
30
 *
31
 * @author   Thomas Pornin <thomas.pornin@cryptolog.com>
32
 */
33
34
#include <stddef.h>
35
#include <string.h>
36
37
#include "sph/sph_sha2.h"
38
39
#if SPH_SMALL_FOOTPRINT && !defined SPH_SMALL_FOOTPRINT_SHA2
40
#define SPH_SMALL_FOOTPRINT_SHA2   1
41
#endif
42
43
#define CH(X, Y, Z)    ((((Y) ^ (Z)) & (X)) ^ (Z))
44
#define MAJ(X, Y, Z)   (((Y) & (Z)) | (((Y) | (Z)) & (X)))
45
46
#define ROTR    SPH_ROTR32
47
48
#define BSG2_0(x)      (ROTR(x, 2) ^ ROTR(x, 13) ^ ROTR(x, 22))
49
#define BSG2_1(x)      (ROTR(x, 6) ^ ROTR(x, 11) ^ ROTR(x, 25))
50
#define SSG2_0(x)      (ROTR(x, 7) ^ ROTR(x, 18) ^ SPH_T32((x) >> 3))
51
#define SSG2_1(x)      (ROTR(x, 17) ^ ROTR(x, 19) ^ SPH_T32((x) >> 10))
52
53
static const sph_u32 H224[8] = {
54
        SPH_C32(0xC1059ED8), SPH_C32(0x367CD507), SPH_C32(0x3070DD17),
55
        SPH_C32(0xF70E5939), SPH_C32(0xFFC00B31), SPH_C32(0x68581511),
56
        SPH_C32(0x64F98FA7), SPH_C32(0xBEFA4FA4)
57
};
58
59
static const sph_u32 H256[8] = {
60
        SPH_C32(0x6A09E667), SPH_C32(0xBB67AE85), SPH_C32(0x3C6EF372),
61
        SPH_C32(0xA54FF53A), SPH_C32(0x510E527F), SPH_C32(0x9B05688C),
62
        SPH_C32(0x1F83D9AB), SPH_C32(0x5BE0CD19)
63
};
64
65
/*
66
 * The SHA2_ROUND_BODY defines the body for a SHA-224 / SHA-256
67
 * compression function implementation. The "in" parameter should
68
 * evaluate, when applied to a numerical input parameter from 0 to 15,
69
 * to an expression which yields the corresponding input block. The "r"
70
 * parameter should evaluate to an array or pointer expression
71
 * designating the array of 8 words which contains the input and output
72
 * of the compression function.
73
 */
74
75
#if SPH_SMALL_FOOTPRINT_SHA2
76
77
static const sph_u32 K[64] = {
78
        SPH_C32(0x428A2F98), SPH_C32(0x71374491),
79
        SPH_C32(0xB5C0FBCF), SPH_C32(0xE9B5DBA5),
80
        SPH_C32(0x3956C25B), SPH_C32(0x59F111F1),
81
        SPH_C32(0x923F82A4), SPH_C32(0xAB1C5ED5),
82
        SPH_C32(0xD807AA98), SPH_C32(0x12835B01),
83
        SPH_C32(0x243185BE), SPH_C32(0x550C7DC3),
84
        SPH_C32(0x72BE5D74), SPH_C32(0x80DEB1FE),
85
        SPH_C32(0x9BDC06A7), SPH_C32(0xC19BF174),
86
        SPH_C32(0xE49B69C1), SPH_C32(0xEFBE4786),
87
        SPH_C32(0x0FC19DC6), SPH_C32(0x240CA1CC),
88
        SPH_C32(0x2DE92C6F), SPH_C32(0x4A7484AA),
89
        SPH_C32(0x5CB0A9DC), SPH_C32(0x76F988DA),
90
        SPH_C32(0x983E5152), SPH_C32(0xA831C66D),
91
        SPH_C32(0xB00327C8), SPH_C32(0xBF597FC7),
92
        SPH_C32(0xC6E00BF3), SPH_C32(0xD5A79147),
93
        SPH_C32(0x06CA6351), SPH_C32(0x14292967),
94
        SPH_C32(0x27B70A85), SPH_C32(0x2E1B2138),
95
        SPH_C32(0x4D2C6DFC), SPH_C32(0x53380D13),
96
        SPH_C32(0x650A7354), SPH_C32(0x766A0ABB),
97
        SPH_C32(0x81C2C92E), SPH_C32(0x92722C85),
98
        SPH_C32(0xA2BFE8A1), SPH_C32(0xA81A664B),
99
        SPH_C32(0xC24B8B70), SPH_C32(0xC76C51A3),
100
        SPH_C32(0xD192E819), SPH_C32(0xD6990624),
101
        SPH_C32(0xF40E3585), SPH_C32(0x106AA070),
102
        SPH_C32(0x19A4C116), SPH_C32(0x1E376C08),
103
        SPH_C32(0x2748774C), SPH_C32(0x34B0BCB5),
104
        SPH_C32(0x391C0CB3), SPH_C32(0x4ED8AA4A),
105
        SPH_C32(0x5B9CCA4F), SPH_C32(0x682E6FF3),
106
        SPH_C32(0x748F82EE), SPH_C32(0x78A5636F),
107
        SPH_C32(0x84C87814), SPH_C32(0x8CC70208),
108
        SPH_C32(0x90BEFFFA), SPH_C32(0xA4506CEB),
109
        SPH_C32(0xBEF9A3F7), SPH_C32(0xC67178F2)
110
};
111
112
#define SHA2_MEXP1(in, pc)   do { \
113
                W[pc] = in(pc); \
114
        } while (0)
115
116
#define SHA2_MEXP2(in, pc)   do { \
117
                W[(pc) & 0x0F] = SPH_T32(SSG2_1(W[((pc) - 2) & 0x0F]) \
118
                        + W[((pc) - 7) & 0x0F] \
119
                        + SSG2_0(W[((pc) - 15) & 0x0F]) + W[(pc) & 0x0F]); \
120
        } while (0)
121
122
#define SHA2_STEPn(n, a, b, c, d, e, f, g, h, in, pc)   do { \
123
                sph_u32 t1, t2; \
124
                SHA2_MEXP ## n(in, pc); \
125
                t1 = SPH_T32(h + BSG2_1(e) + CH(e, f, g) \
126
                        + K[pcount + (pc)] + W[(pc) & 0x0F]); \
127
                t2 = SPH_T32(BSG2_0(a) + MAJ(a, b, c)); \
128
                d = SPH_T32(d + t1); \
129
                h = SPH_T32(t1 + t2); \
130
        } while (0)
131
132
#define SHA2_STEP1(a, b, c, d, e, f, g, h, in, pc) \
133
        SHA2_STEPn(1, a, b, c, d, e, f, g, h, in, pc)
134
#define SHA2_STEP2(a, b, c, d, e, f, g, h, in, pc) \
135
        SHA2_STEPn(2, a, b, c, d, e, f, g, h, in, pc)
136
137
#define SHA2_ROUND_BODY(in, r)   do { \
138
                sph_u32 A, B, C, D, E, F, G, H; \
139
                sph_u32 W[16]; \
140
                unsigned pcount; \
141
 \
142
                A = (r)[0]; \
143
                B = (r)[1]; \
144
                C = (r)[2]; \
145
                D = (r)[3]; \
146
                E = (r)[4]; \
147
                F = (r)[5]; \
148
                G = (r)[6]; \
149
                H = (r)[7]; \
150
                pcount = 0; \
151
                SHA2_STEP1(A, B, C, D, E, F, G, H, in,  0); \
152
                SHA2_STEP1(H, A, B, C, D, E, F, G, in,  1); \
153
                SHA2_STEP1(G, H, A, B, C, D, E, F, in,  2); \
154
                SHA2_STEP1(F, G, H, A, B, C, D, E, in,  3); \
155
                SHA2_STEP1(E, F, G, H, A, B, C, D, in,  4); \
156
                SHA2_STEP1(D, E, F, G, H, A, B, C, in,  5); \
157
                SHA2_STEP1(C, D, E, F, G, H, A, B, in,  6); \
158
                SHA2_STEP1(B, C, D, E, F, G, H, A, in,  7); \
159
                SHA2_STEP1(A, B, C, D, E, F, G, H, in,  8); \
160
                SHA2_STEP1(H, A, B, C, D, E, F, G, in,  9); \
161
                SHA2_STEP1(G, H, A, B, C, D, E, F, in, 10); \
162
                SHA2_STEP1(F, G, H, A, B, C, D, E, in, 11); \
163
                SHA2_STEP1(E, F, G, H, A, B, C, D, in, 12); \
164
                SHA2_STEP1(D, E, F, G, H, A, B, C, in, 13); \
165
                SHA2_STEP1(C, D, E, F, G, H, A, B, in, 14); \
166
                SHA2_STEP1(B, C, D, E, F, G, H, A, in, 15); \
167
                for (pcount = 16; pcount < 64; pcount += 16) { \
168
                        SHA2_STEP2(A, B, C, D, E, F, G, H, in,  0); \
169
                        SHA2_STEP2(H, A, B, C, D, E, F, G, in,  1); \
170
                        SHA2_STEP2(G, H, A, B, C, D, E, F, in,  2); \
171
                        SHA2_STEP2(F, G, H, A, B, C, D, E, in,  3); \
172
                        SHA2_STEP2(E, F, G, H, A, B, C, D, in,  4); \
173
                        SHA2_STEP2(D, E, F, G, H, A, B, C, in,  5); \
174
                        SHA2_STEP2(C, D, E, F, G, H, A, B, in,  6); \
175
                        SHA2_STEP2(B, C, D, E, F, G, H, A, in,  7); \
176
                        SHA2_STEP2(A, B, C, D, E, F, G, H, in,  8); \
177
                        SHA2_STEP2(H, A, B, C, D, E, F, G, in,  9); \
178
                        SHA2_STEP2(G, H, A, B, C, D, E, F, in, 10); \
179
                        SHA2_STEP2(F, G, H, A, B, C, D, E, in, 11); \
180
                        SHA2_STEP2(E, F, G, H, A, B, C, D, in, 12); \
181
                        SHA2_STEP2(D, E, F, G, H, A, B, C, in, 13); \
182
                        SHA2_STEP2(C, D, E, F, G, H, A, B, in, 14); \
183
                        SHA2_STEP2(B, C, D, E, F, G, H, A, in, 15); \
184
                } \
185
                (r)[0] = SPH_T32((r)[0] + A); \
186
                (r)[1] = SPH_T32((r)[1] + B); \
187
                (r)[2] = SPH_T32((r)[2] + C); \
188
                (r)[3] = SPH_T32((r)[3] + D); \
189
                (r)[4] = SPH_T32((r)[4] + E); \
190
                (r)[5] = SPH_T32((r)[5] + F); \
191
                (r)[6] = SPH_T32((r)[6] + G); \
192
                (r)[7] = SPH_T32((r)[7] + H); \
193
        } while (0)
194
195
#else
196
197
2.56M
#define SHA2_ROUND_BODY(in, r)   do { \
198
2.56M
                sph_u32 A, B, C, D, E, F, G, H, T1, T2; \
199
2.56M
                sph_u32 W00, W01, W02, W03, W04, W05, W06, W07; \
200
2.56M
                sph_u32 W08, W09, W10, W11, W12, W13, W14, W15; \
201
2.56M
 \
202
2.56M
                A = (r)[0]; \
203
2.56M
                B = (r)[1]; \
204
2.56M
                C = (r)[2]; \
205
2.56M
                D = (r)[3]; \
206
2.56M
                E = (r)[4]; \
207
2.56M
                F = (r)[5]; \
208
2.56M
                G = (r)[6]; \
209
2.56M
                H = (r)[7]; \
210
2.56M
                W00 = in(0); \
211
2.56M
                T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
212
2.56M
                        + SPH_C32(0x428A2F98) + W00); \
213
2.56M
                T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
214
2.56M
                D = SPH_T32(D + T1); \
215
2.56M
                H = SPH_T32(T1 + T2); \
216
2.56M
                W01 = in(1); \
217
2.56M
                T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
218
2.56M
                        + SPH_C32(0x71374491) + W01); \
219
2.56M
                T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
220
2.56M
                C = SPH_T32(C + T1); \
221
2.56M
                G = SPH_T32(T1 + T2); \
222
2.56M
                W02 = in(2); \
223
2.56M
                T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
224
2.56M
                        + SPH_C32(0xB5C0FBCF) + W02); \
225
2.56M
                T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
226
2.56M
                B = SPH_T32(B + T1); \
227
2.56M
                F = SPH_T32(T1 + T2); \
228
2.56M
                W03 = in(3); \
229
2.56M
                T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
230
2.56M
                        + SPH_C32(0xE9B5DBA5) + W03); \
231
2.56M
                T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
232
2.56M
                A = SPH_T32(A + T1); \
233
2.56M
                E = SPH_T32(T1 + T2); \
234
2.56M
                W04 = in(4); \
235
2.56M
                T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
236
2.56M
                        + SPH_C32(0x3956C25B) + W04); \
237
2.56M
                T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
238
2.56M
                H = SPH_T32(H + T1); \
239
2.56M
                D = SPH_T32(T1 + T2); \
240
2.56M
                W05 = in(5); \
241
2.56M
                T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
242
2.56M
                        + SPH_C32(0x59F111F1) + W05); \
243
2.56M
                T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
244
2.56M
                G = SPH_T32(G + T1); \
245
2.56M
                C = SPH_T32(T1 + T2); \
246
2.56M
                W06 = in(6); \
247
2.56M
                T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
248
2.56M
                        + SPH_C32(0x923F82A4) + W06); \
249
2.56M
                T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
250
2.56M
                F = SPH_T32(F + T1); \
251
2.56M
                B = SPH_T32(T1 + T2); \
252
2.56M
                W07 = in(7); \
253
2.56M
                T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
254
2.56M
                        + SPH_C32(0xAB1C5ED5) + W07); \
255
2.56M
                T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
256
2.56M
                E = SPH_T32(E + T1); \
257
2.56M
                A = SPH_T32(T1 + T2); \
258
2.56M
                W08 = in(8); \
259
2.56M
                T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
260
2.56M
                        + SPH_C32(0xD807AA98) + W08); \
261
2.56M
                T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
262
2.56M
                D = SPH_T32(D + T1); \
263
2.56M
                H = SPH_T32(T1 + T2); \
264
2.56M
                W09 = in(9); \
265
2.56M
                T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
266
2.56M
                        + SPH_C32(0x12835B01) + W09); \
267
2.56M
                T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
268
2.56M
                C = SPH_T32(C + T1); \
269
2.56M
                G = SPH_T32(T1 + T2); \
270
2.56M
                W10 = in(10); \
271
2.56M
                T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
272
2.56M
                        + SPH_C32(0x243185BE) + W10); \
273
2.56M
                T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
274
2.56M
                B = SPH_T32(B + T1); \
275
2.56M
                F = SPH_T32(T1 + T2); \
276
2.56M
                W11 = in(11); \
277
2.56M
                T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
278
2.56M
                        + SPH_C32(0x550C7DC3) + W11); \
279
2.56M
                T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
280
2.56M
                A = SPH_T32(A + T1); \
281
2.56M
                E = SPH_T32(T1 + T2); \
282
2.56M
                W12 = in(12); \
283
2.56M
                T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
284
2.56M
                        + SPH_C32(0x72BE5D74) + W12); \
285
2.56M
                T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
286
2.56M
                H = SPH_T32(H + T1); \
287
2.56M
                D = SPH_T32(T1 + T2); \
288
2.56M
                W13 = in(13); \
289
2.56M
                T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
290
2.56M
                        + SPH_C32(0x80DEB1FE) + W13); \
291
2.56M
                T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
292
2.56M
                G = SPH_T32(G + T1); \
293
2.56M
                C = SPH_T32(T1 + T2); \
294
2.56M
                W14 = in(14); \
295
2.56M
                T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
296
2.56M
                        + SPH_C32(0x9BDC06A7) + W14); \
297
2.56M
                T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
298
2.56M
                F = SPH_T32(F + T1); \
299
2.56M
                B = SPH_T32(T1 + T2); \
300
2.56M
                W15 = in(15); \
301
2.56M
                T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
302
2.56M
                        + SPH_C32(0xC19BF174) + W15); \
303
2.56M
                T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
304
2.56M
                E = SPH_T32(E + T1); \
305
2.56M
                A = SPH_T32(T1 + T2); \
306
2.56M
                W00 = SPH_T32(SSG2_1(W14) + W09 + SSG2_0(W01) + W00); \
307
2.56M
                T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
308
2.56M
                        + SPH_C32(0xE49B69C1) + W00); \
309
2.56M
                T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
310
2.56M
                D = SPH_T32(D + T1); \
311
2.56M
                H = SPH_T32(T1 + T2); \
312
2.56M
                W01 = SPH_T32(SSG2_1(W15) + W10 + SSG2_0(W02) + W01); \
313
2.56M
                T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
314
2.56M
                        + SPH_C32(0xEFBE4786) + W01); \
315
2.56M
                T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
316
2.56M
                C = SPH_T32(C + T1); \
317
2.56M
                G = SPH_T32(T1 + T2); \
318
2.56M
                W02 = SPH_T32(SSG2_1(W00) + W11 + SSG2_0(W03) + W02); \
319
2.56M
                T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
320
2.56M
                        + SPH_C32(0x0FC19DC6) + W02); \
321
2.56M
                T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
322
2.56M
                B = SPH_T32(B + T1); \
323
2.56M
                F = SPH_T32(T1 + T2); \
324
2.56M
                W03 = SPH_T32(SSG2_1(W01) + W12 + SSG2_0(W04) + W03); \
325
2.56M
                T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
326
2.56M
                        + SPH_C32(0x240CA1CC) + W03); \
327
2.56M
                T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
328
2.56M
                A = SPH_T32(A + T1); \
329
2.56M
                E = SPH_T32(T1 + T2); \
330
2.56M
                W04 = SPH_T32(SSG2_1(W02) + W13 + SSG2_0(W05) + W04); \
331
2.56M
                T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
332
2.56M
                        + SPH_C32(0x2DE92C6F) + W04); \
333
2.56M
                T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
334
2.56M
                H = SPH_T32(H + T1); \
335
2.56M
                D = SPH_T32(T1 + T2); \
336
2.56M
                W05 = SPH_T32(SSG2_1(W03) + W14 + SSG2_0(W06) + W05); \
337
2.56M
                T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
338
2.56M
                        + SPH_C32(0x4A7484AA) + W05); \
339
2.56M
                T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
340
2.56M
                G = SPH_T32(G + T1); \
341
2.56M
                C = SPH_T32(T1 + T2); \
342
2.56M
                W06 = SPH_T32(SSG2_1(W04) + W15 + SSG2_0(W07) + W06); \
343
2.56M
                T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
344
2.56M
                        + SPH_C32(0x5CB0A9DC) + W06); \
345
2.56M
                T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
346
2.56M
                F = SPH_T32(F + T1); \
347
2.56M
                B = SPH_T32(T1 + T2); \
348
2.56M
                W07 = SPH_T32(SSG2_1(W05) + W00 + SSG2_0(W08) + W07); \
349
2.56M
                T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
350
2.56M
                        + SPH_C32(0x76F988DA) + W07); \
351
2.56M
                T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
352
2.56M
                E = SPH_T32(E + T1); \
353
2.56M
                A = SPH_T32(T1 + T2); \
354
2.56M
                W08 = SPH_T32(SSG2_1(W06) + W01 + SSG2_0(W09) + W08); \
355
2.56M
                T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
356
2.56M
                        + SPH_C32(0x983E5152) + W08); \
357
2.56M
                T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
358
2.56M
                D = SPH_T32(D + T1); \
359
2.56M
                H = SPH_T32(T1 + T2); \
360
2.56M
                W09 = SPH_T32(SSG2_1(W07) + W02 + SSG2_0(W10) + W09); \
361
2.56M
                T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
362
2.56M
                        + SPH_C32(0xA831C66D) + W09); \
363
2.56M
                T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
364
2.56M
                C = SPH_T32(C + T1); \
365
2.56M
                G = SPH_T32(T1 + T2); \
366
2.56M
                W10 = SPH_T32(SSG2_1(W08) + W03 + SSG2_0(W11) + W10); \
367
2.56M
                T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
368
2.56M
                        + SPH_C32(0xB00327C8) + W10); \
369
2.56M
                T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
370
2.56M
                B = SPH_T32(B + T1); \
371
2.56M
                F = SPH_T32(T1 + T2); \
372
2.56M
                W11 = SPH_T32(SSG2_1(W09) + W04 + SSG2_0(W12) + W11); \
373
2.56M
                T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
374
2.56M
                        + SPH_C32(0xBF597FC7) + W11); \
375
2.56M
                T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
376
2.56M
                A = SPH_T32(A + T1); \
377
2.56M
                E = SPH_T32(T1 + T2); \
378
2.56M
                W12 = SPH_T32(SSG2_1(W10) + W05 + SSG2_0(W13) + W12); \
379
2.56M
                T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
380
2.56M
                        + SPH_C32(0xC6E00BF3) + W12); \
381
2.56M
                T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
382
2.56M
                H = SPH_T32(H + T1); \
383
2.56M
                D = SPH_T32(T1 + T2); \
384
2.56M
                W13 = SPH_T32(SSG2_1(W11) + W06 + SSG2_0(W14) + W13); \
385
2.56M
                T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
386
2.56M
                        + SPH_C32(0xD5A79147) + W13); \
387
2.56M
                T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
388
2.56M
                G = SPH_T32(G + T1); \
389
2.56M
                C = SPH_T32(T1 + T2); \
390
2.56M
                W14 = SPH_T32(SSG2_1(W12) + W07 + SSG2_0(W15) + W14); \
391
2.56M
                T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
392
2.56M
                        + SPH_C32(0x06CA6351) + W14); \
393
2.56M
                T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
394
2.56M
                F = SPH_T32(F + T1); \
395
2.56M
                B = SPH_T32(T1 + T2); \
396
2.56M
                W15 = SPH_T32(SSG2_1(W13) + W08 + SSG2_0(W00) + W15); \
397
2.56M
                T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
398
2.56M
                        + SPH_C32(0x14292967) + W15); \
399
2.56M
                T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
400
2.56M
                E = SPH_T32(E + T1); \
401
2.56M
                A = SPH_T32(T1 + T2); \
402
2.56M
                W00 = SPH_T32(SSG2_1(W14) + W09 + SSG2_0(W01) + W00); \
403
2.56M
                T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
404
2.56M
                        + SPH_C32(0x27B70A85) + W00); \
405
2.56M
                T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
406
2.56M
                D = SPH_T32(D + T1); \
407
2.56M
                H = SPH_T32(T1 + T2); \
408
2.56M
                W01 = SPH_T32(SSG2_1(W15) + W10 + SSG2_0(W02) + W01); \
409
2.56M
                T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
410
2.56M
                        + SPH_C32(0x2E1B2138) + W01); \
411
2.56M
                T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
412
2.56M
                C = SPH_T32(C + T1); \
413
2.56M
                G = SPH_T32(T1 + T2); \
414
2.56M
                W02 = SPH_T32(SSG2_1(W00) + W11 + SSG2_0(W03) + W02); \
415
2.56M
                T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
416
2.56M
                        + SPH_C32(0x4D2C6DFC) + W02); \
417
2.56M
                T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
418
2.56M
                B = SPH_T32(B + T1); \
419
2.56M
                F = SPH_T32(T1 + T2); \
420
2.56M
                W03 = SPH_T32(SSG2_1(W01) + W12 + SSG2_0(W04) + W03); \
421
2.56M
                T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
422
2.56M
                        + SPH_C32(0x53380D13) + W03); \
423
2.56M
                T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
424
2.56M
                A = SPH_T32(A + T1); \
425
2.56M
                E = SPH_T32(T1 + T2); \
426
2.56M
                W04 = SPH_T32(SSG2_1(W02) + W13 + SSG2_0(W05) + W04); \
427
2.56M
                T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
428
2.56M
                        + SPH_C32(0x650A7354) + W04); \
429
2.56M
                T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
430
2.56M
                H = SPH_T32(H + T1); \
431
2.56M
                D = SPH_T32(T1 + T2); \
432
2.56M
                W05 = SPH_T32(SSG2_1(W03) + W14 + SSG2_0(W06) + W05); \
433
2.56M
                T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
434
2.56M
                        + SPH_C32(0x766A0ABB) + W05); \
435
2.56M
                T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
436
2.56M
                G = SPH_T32(G + T1); \
437
2.56M
                C = SPH_T32(T1 + T2); \
438
2.56M
                W06 = SPH_T32(SSG2_1(W04) + W15 + SSG2_0(W07) + W06); \
439
2.56M
                T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
440
2.56M
                        + SPH_C32(0x81C2C92E) + W06); \
441
2.56M
                T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
442
2.56M
                F = SPH_T32(F + T1); \
443
2.56M
                B = SPH_T32(T1 + T2); \
444
2.56M
                W07 = SPH_T32(SSG2_1(W05) + W00 + SSG2_0(W08) + W07); \
445
2.56M
                T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
446
2.56M
                        + SPH_C32(0x92722C85) + W07); \
447
2.56M
                T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
448
2.56M
                E = SPH_T32(E + T1); \
449
2.56M
                A = SPH_T32(T1 + T2); \
450
2.56M
                W08 = SPH_T32(SSG2_1(W06) + W01 + SSG2_0(W09) + W08); \
451
2.56M
                T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
452
2.56M
                        + SPH_C32(0xA2BFE8A1) + W08); \
453
2.56M
                T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
454
2.56M
                D = SPH_T32(D + T1); \
455
2.56M
                H = SPH_T32(T1 + T2); \
456
2.56M
                W09 = SPH_T32(SSG2_1(W07) + W02 + SSG2_0(W10) + W09); \
457
2.56M
                T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
458
2.56M
                        + SPH_C32(0xA81A664B) + W09); \
459
2.56M
                T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
460
2.56M
                C = SPH_T32(C + T1); \
461
2.56M
                G = SPH_T32(T1 + T2); \
462
2.56M
                W10 = SPH_T32(SSG2_1(W08) + W03 + SSG2_0(W11) + W10); \
463
2.56M
                T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
464
2.56M
                        + SPH_C32(0xC24B8B70) + W10); \
465
2.56M
                T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
466
2.56M
                B = SPH_T32(B + T1); \
467
2.56M
                F = SPH_T32(T1 + T2); \
468
2.56M
                W11 = SPH_T32(SSG2_1(W09) + W04 + SSG2_0(W12) + W11); \
469
2.56M
                T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
470
2.56M
                        + SPH_C32(0xC76C51A3) + W11); \
471
2.56M
                T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
472
2.56M
                A = SPH_T32(A + T1); \
473
2.56M
                E = SPH_T32(T1 + T2); \
474
2.56M
                W12 = SPH_T32(SSG2_1(W10) + W05 + SSG2_0(W13) + W12); \
475
2.56M
                T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
476
2.56M
                        + SPH_C32(0xD192E819) + W12); \
477
2.56M
                T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
478
2.56M
                H = SPH_T32(H + T1); \
479
2.56M
                D = SPH_T32(T1 + T2); \
480
2.56M
                W13 = SPH_T32(SSG2_1(W11) + W06 + SSG2_0(W14) + W13); \
481
2.56M
                T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
482
2.56M
                        + SPH_C32(0xD6990624) + W13); \
483
2.56M
                T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
484
2.56M
                G = SPH_T32(G + T1); \
485
2.56M
                C = SPH_T32(T1 + T2); \
486
2.56M
                W14 = SPH_T32(SSG2_1(W12) + W07 + SSG2_0(W15) + W14); \
487
2.56M
                T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
488
2.56M
                        + SPH_C32(0xF40E3585) + W14); \
489
2.56M
                T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
490
2.56M
                F = SPH_T32(F + T1); \
491
2.56M
                B = SPH_T32(T1 + T2); \
492
2.56M
                W15 = SPH_T32(SSG2_1(W13) + W08 + SSG2_0(W00) + W15); \
493
2.56M
                T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
494
2.56M
                        + SPH_C32(0x106AA070) + W15); \
495
2.56M
                T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
496
2.56M
                E = SPH_T32(E + T1); \
497
2.56M
                A = SPH_T32(T1 + T2); \
498
2.56M
                W00 = SPH_T32(SSG2_1(W14) + W09 + SSG2_0(W01) + W00); \
499
2.56M
                T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
500
2.56M
                        + SPH_C32(0x19A4C116) + W00); \
501
2.56M
                T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
502
2.56M
                D = SPH_T32(D + T1); \
503
2.56M
                H = SPH_T32(T1 + T2); \
504
2.56M
                W01 = SPH_T32(SSG2_1(W15) + W10 + SSG2_0(W02) + W01); \
505
2.56M
                T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
506
2.56M
                        + SPH_C32(0x1E376C08) + W01); \
507
2.56M
                T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
508
2.56M
                C = SPH_T32(C + T1); \
509
2.56M
                G = SPH_T32(T1 + T2); \
510
2.56M
                W02 = SPH_T32(SSG2_1(W00) + W11 + SSG2_0(W03) + W02); \
511
2.56M
                T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
512
2.56M
                        + SPH_C32(0x2748774C) + W02); \
513
2.56M
                T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
514
2.56M
                B = SPH_T32(B + T1); \
515
2.56M
                F = SPH_T32(T1 + T2); \
516
2.56M
                W03 = SPH_T32(SSG2_1(W01) + W12 + SSG2_0(W04) + W03); \
517
2.56M
                T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
518
2.56M
                        + SPH_C32(0x34B0BCB5) + W03); \
519
2.56M
                T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
520
2.56M
                A = SPH_T32(A + T1); \
521
2.56M
                E = SPH_T32(T1 + T2); \
522
2.56M
                W04 = SPH_T32(SSG2_1(W02) + W13 + SSG2_0(W05) + W04); \
523
2.56M
                T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
524
2.56M
                        + SPH_C32(0x391C0CB3) + W04); \
525
2.56M
                T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
526
2.56M
                H = SPH_T32(H + T1); \
527
2.56M
                D = SPH_T32(T1 + T2); \
528
2.56M
                W05 = SPH_T32(SSG2_1(W03) + W14 + SSG2_0(W06) + W05); \
529
2.56M
                T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
530
2.56M
                        + SPH_C32(0x4ED8AA4A) + W05); \
531
2.56M
                T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
532
2.56M
                G = SPH_T32(G + T1); \
533
2.56M
                C = SPH_T32(T1 + T2); \
534
2.56M
                W06 = SPH_T32(SSG2_1(W04) + W15 + SSG2_0(W07) + W06); \
535
2.56M
                T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
536
2.56M
                        + SPH_C32(0x5B9CCA4F) + W06); \
537
2.56M
                T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
538
2.56M
                F = SPH_T32(F + T1); \
539
2.56M
                B = SPH_T32(T1 + T2); \
540
2.56M
                W07 = SPH_T32(SSG2_1(W05) + W00 + SSG2_0(W08) + W07); \
541
2.56M
                T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
542
2.56M
                        + SPH_C32(0x682E6FF3) + W07); \
543
2.56M
                T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
544
2.56M
                E = SPH_T32(E + T1); \
545
2.56M
                A = SPH_T32(T1 + T2); \
546
2.56M
                W08 = SPH_T32(SSG2_1(W06) + W01 + SSG2_0(W09) + W08); \
547
2.56M
                T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
548
2.56M
                        + SPH_C32(0x748F82EE) + W08); \
549
2.56M
                T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
550
2.56M
                D = SPH_T32(D + T1); \
551
2.56M
                H = SPH_T32(T1 + T2); \
552
2.56M
                W09 = SPH_T32(SSG2_1(W07) + W02 + SSG2_0(W10) + W09); \
553
2.56M
                T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
554
2.56M
                        + SPH_C32(0x78A5636F) + W09); \
555
2.56M
                T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
556
2.56M
                C = SPH_T32(C + T1); \
557
2.56M
                G = SPH_T32(T1 + T2); \
558
2.56M
                W10 = SPH_T32(SSG2_1(W08) + W03 + SSG2_0(W11) + W10); \
559
2.56M
                T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
560
2.56M
                        + SPH_C32(0x84C87814) + W10); \
561
2.56M
                T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
562
2.56M
                B = SPH_T32(B + T1); \
563
2.56M
                F = SPH_T32(T1 + T2); \
564
2.56M
                W11 = SPH_T32(SSG2_1(W09) + W04 + SSG2_0(W12) + W11); \
565
2.56M
                T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
566
2.56M
                        + SPH_C32(0x8CC70208) + W11); \
567
2.56M
                T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
568
2.56M
                A = SPH_T32(A + T1); \
569
2.56M
                E = SPH_T32(T1 + T2); \
570
2.56M
                W12 = SPH_T32(SSG2_1(W10) + W05 + SSG2_0(W13) + W12); \
571
2.56M
                T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
572
2.56M
                        + SPH_C32(0x90BEFFFA) + W12); \
573
2.56M
                T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
574
2.56M
                H = SPH_T32(H + T1); \
575
2.56M
                D = SPH_T32(T1 + T2); \
576
2.56M
                W13 = SPH_T32(SSG2_1(W11) + W06 + SSG2_0(W14) + W13); \
577
2.56M
                T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
578
2.56M
                        + SPH_C32(0xA4506CEB) + W13); \
579
2.56M
                T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
580
2.56M
                G = SPH_T32(G + T1); \
581
2.56M
                C = SPH_T32(T1 + T2); \
582
2.56M
                W14 = SPH_T32(SSG2_1(W12) + W07 + SSG2_0(W15) + W14); \
583
2.56M
                T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
584
2.56M
                        + SPH_C32(0xBEF9A3F7) + W14); \
585
2.56M
                T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
586
2.56M
                F = SPH_T32(F + T1); \
587
2.56M
                B = SPH_T32(T1 + T2); \
588
2.56M
                W15 = SPH_T32(SSG2_1(W13) + W08 + SSG2_0(W00) + W15); \
589
2.56M
                T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
590
2.56M
                        + SPH_C32(0xC67178F2) + W15); \
591
2.56M
                T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
592
2.56M
                E = SPH_T32(E + T1); \
593
2.56M
                A = SPH_T32(T1 + T2); \
594
2.56M
                (r)[0] = SPH_T32((r)[0] + A); \
595
2.56M
                (r)[1] = SPH_T32((r)[1] + B); \
596
2.56M
                (r)[2] = SPH_T32((r)[2] + C); \
597
2.56M
                (r)[3] = SPH_T32((r)[3] + D); \
598
2.56M
                (r)[4] = SPH_T32((r)[4] + E); \
599
2.56M
                (r)[5] = SPH_T32((r)[5] + F); \
600
2.56M
                (r)[6] = SPH_T32((r)[6] + G); \
601
2.56M
                (r)[7] = SPH_T32((r)[7] + H); \
602
2.56M
        } while (0)
603
604
#endif
605
606
/*
607
 * One round of SHA-224 / SHA-256. The data must be aligned for 32-bit access.
608
 */
609
static void
610
sha2_round(const unsigned char *data, sph_u32 r[8])
611
2.56M
{
612
40.9M
#define SHA2_IN(x)   sph_dec32be_aligned(data + (4 * (x)))
613
40.9M
        SHA2_ROUND_BODY(SHA2_IN, r);
614
2.56M
#undef SHA2_IN
615
2.56M
}
616
617
/* see sph_sha2.h */
618
void
619
sph_sha224_init(void *cc)
620
0
{
621
0
        sph_sha224_context *sc;
622
623
0
        sc = cc;
624
0
        memcpy(sc->val, H224, sizeof H224);
625
0
#if SPH_64
626
0
        sc->count = 0;
627
#else
628
        sc->count_high = sc->count_low = 0;
629
#endif
630
0
}
631
632
/* see sph_sha2.h */
633
void
634
sph_sha256_init(void *cc)
635
75.2k
{
636
75.2k
        sph_sha256_context *sc;
637
638
75.2k
        sc = cc;
639
75.2k
        memcpy(sc->val, H256, sizeof H256);
640
75.2k
#if SPH_64
641
75.2k
        sc->count = 0;
642
#else
643
        sc->count_high = sc->count_low = 0;
644
#endif
645
75.2k
}
646
647
2.56M
#define RFUN   sha2_round
648
#define HASH   sha224
649
#define BE32   1
650
#include "sph/md_helper.c"
651
652
/* see sph_sha2.h */
653
void
654
sph_sha224_close(void *cc, void *dst)
655
0
{
656
0
        sha224_close(cc, dst, 7);
657
0
        sph_sha224_init(cc);
658
0
}
659
660
/* see sph_sha2.h */
661
void
662
sph_sha224_addbits_and_close(void *cc, unsigned ub, unsigned n, void *dst)
663
0
{
664
0
        sha224_addbits_and_close(cc, ub, n, dst, 7);
665
0
        sph_sha224_init(cc);
666
0
}
667
668
/* see sph_sha2.h */
669
void
670
sph_sha256_close(void *cc, void *dst)
671
37.6k
{
672
37.6k
        sha224_close(cc, dst, 8);
673
37.6k
        sph_sha256_init(cc);
674
37.6k
}
675
676
/* see sph_sha2.h */
677
void
678
sph_sha256_addbits_and_close(void *cc, unsigned ub, unsigned n, void *dst)
679
0
{
680
0
        sha224_addbits_and_close(cc, ub, n, dst, 8);
681
0
        sph_sha256_init(cc);
682
0
}
683
684
/* see sph_sha2.h */
685
void
686
sph_sha224_comp(const sph_u32 msg[16], sph_u32 val[8])
687
0
{
688
0
#define SHA2_IN(x)   msg[x]
689
0
        SHA2_ROUND_BODY(SHA2_IN, val);
690
0
#undef SHA2_IN
691
0
}