Coverage Report

Created: 2026-02-26 06:46

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/openssh/sntrup761.c
Line
Count
Source
1
/*  $OpenBSD: sntrup761.c,v 1.9 2026/01/20 22:56:11 dtucker Exp $ */
2
3
/*
4
 * Public Domain, Authors:
5
 * - Daniel J. Bernstein
6
 * - Chitchanok Chuengsatiansup
7
 * - Tanja Lange
8
 * - Christine van Vredendaal
9
 */
10
11
#include "includes.h"
12
13
#ifdef USE_SNTRUP761X25519
14
15
#include <string.h>
16
#include "crypto_api.h"
17
18
826
#define crypto_declassify(x, y) do {} while (0)
19
20
#define int8 crypto_int8
21
#define uint8 crypto_uint8
22
#define int16 crypto_int16
23
#define uint16 crypto_uint16
24
2.56M
#define int32 crypto_int32
25
#define uint32 crypto_uint32
26
#define int64 crypto_int64
27
#define uint64 crypto_uint64
28
extern volatile crypto_int16 crypto_int16_optblocker;
29
extern volatile crypto_int32 crypto_int32_optblocker;
30
extern volatile crypto_int64 crypto_int64_optblocker;
31
32
/* from supercop-20240808/cryptoint/crypto_int16.h */
33
/* auto-generated: cd cryptoint; ./autogen */
34
/* cryptoint 20240806 */
35
36
#ifndef crypto_int16_h
37
#define crypto_int16_h
38
39
2.51M
#define crypto_int16 int16_t
40
#define crypto_int16_unsigned uint16_t
41
42
43
44
__attribute__((unused))
45
static inline
46
0
crypto_int16 crypto_int16_load(const unsigned char *crypto_int16_s) {
47
0
  crypto_int16 crypto_int16_z = 0;
48
0
  crypto_int16_z |= ((crypto_int16) (*crypto_int16_s++)) << 0;
49
0
  crypto_int16_z |= ((crypto_int16) (*crypto_int16_s++)) << 8;
50
0
  return crypto_int16_z;
51
0
}
52
53
__attribute__((unused))
54
static inline
55
0
void crypto_int16_store(unsigned char *crypto_int16_s,crypto_int16 crypto_int16_x) {
56
0
  *crypto_int16_s++ = crypto_int16_x >> 0;
57
0
  *crypto_int16_s++ = crypto_int16_x >> 8;
58
0
}
59
60
__attribute__((unused))
61
static inline
62
2.51M
crypto_int16 crypto_int16_negative_mask(crypto_int16 crypto_int16_x) {
63
2.51M
#if defined(__GNUC__) && defined(__x86_64__)
64
2.51M
  __asm__ ("sarw $15,%0" : "+r"(crypto_int16_x) : : "cc");
65
2.51M
  return crypto_int16_x;
66
#elif defined(__GNUC__) && defined(__aarch64__)
67
  crypto_int16 crypto_int16_y;
68
  __asm__ ("sbfx %w0,%w1,15,1" : "=r"(crypto_int16_y) : "r"(crypto_int16_x) : );
69
  return crypto_int16_y;
70
#else
71
  crypto_int16_x >>= 16-6;
72
  crypto_int16_x ^= crypto_int16_optblocker;
73
  crypto_int16_x >>= 5;
74
  return crypto_int16_x;
75
#endif
76
2.51M
}
77
78
__attribute__((unused))
79
static inline
80
0
crypto_int16_unsigned crypto_int16_unsigned_topbit_01(crypto_int16_unsigned crypto_int16_x) {
81
0
#if defined(__GNUC__) && defined(__x86_64__)
82
0
  __asm__ ("shrw $15,%0" : "+r"(crypto_int16_x) : : "cc");
83
0
  return crypto_int16_x;
84
0
#elif defined(__GNUC__) && defined(__aarch64__)
85
0
  crypto_int16 crypto_int16_y;
86
0
  __asm__ ("ubfx %w0,%w1,15,1" : "=r"(crypto_int16_y) : "r"(crypto_int16_x) : );
87
0
  return crypto_int16_y;
88
0
#else
89
0
  crypto_int16_x >>= 16-6;
90
0
  crypto_int16_x ^= crypto_int16_optblocker;
91
0
  crypto_int16_x >>= 5;
92
0
  return crypto_int16_x;
93
0
#endif
94
0
}
95
96
__attribute__((unused))
97
static inline
98
0
crypto_int16 crypto_int16_negative_01(crypto_int16 crypto_int16_x) {
99
0
  return crypto_int16_unsigned_topbit_01(crypto_int16_x);
100
0
}
101
102
__attribute__((unused))
103
static inline
104
0
crypto_int16 crypto_int16_topbit_mask(crypto_int16 crypto_int16_x) {
105
0
  return crypto_int16_negative_mask(crypto_int16_x);
106
0
}
107
108
__attribute__((unused))
109
static inline
110
0
crypto_int16 crypto_int16_topbit_01(crypto_int16 crypto_int16_x) {
111
0
  return crypto_int16_unsigned_topbit_01(crypto_int16_x);
112
0
}
113
114
__attribute__((unused))
115
static inline
116
0
crypto_int16 crypto_int16_bottombit_mask(crypto_int16 crypto_int16_x) {
117
0
#if defined(__GNUC__) && defined(__x86_64__)
118
0
  __asm__ ("andw $1,%0" : "+r"(crypto_int16_x) : : "cc");
119
0
  return -crypto_int16_x;
120
0
#elif defined(__GNUC__) && defined(__aarch64__)
121
0
  crypto_int16 crypto_int16_y;
122
0
  __asm__ ("sbfx %w0,%w1,0,1" : "=r"(crypto_int16_y) : "r"(crypto_int16_x) : );
123
0
  return crypto_int16_y;
124
0
#else
125
0
  crypto_int16_x &= 1 ^ crypto_int16_optblocker;
126
0
  return -crypto_int16_x;
127
0
#endif
128
0
}
129
130
__attribute__((unused))
131
static inline
132
0
crypto_int16 crypto_int16_bottombit_01(crypto_int16 crypto_int16_x) {
133
0
#if defined(__GNUC__) && defined(__x86_64__)
134
0
  __asm__ ("andw $1,%0" : "+r"(crypto_int16_x) : : "cc");
135
0
  return crypto_int16_x;
136
0
#elif defined(__GNUC__) && defined(__aarch64__)
137
0
  crypto_int16 crypto_int16_y;
138
0
  __asm__ ("ubfx %w0,%w1,0,1" : "=r"(crypto_int16_y) : "r"(crypto_int16_x) : );
139
0
  return crypto_int16_y;
140
0
#else
141
0
  crypto_int16_x &= 1 ^ crypto_int16_optblocker;
142
0
  return crypto_int16_x;
143
0
#endif
144
0
}
145
146
__attribute__((unused))
147
static inline
148
0
crypto_int16 crypto_int16_bitinrangepublicpos_mask(crypto_int16 crypto_int16_x,crypto_int16 crypto_int16_s) {
149
0
#if defined(__GNUC__) && defined(__x86_64__)
150
0
  __asm__ ("sarw %%cl,%0" : "+r"(crypto_int16_x) : "c"(crypto_int16_s) : "cc");
151
0
#elif defined(__GNUC__) && defined(__aarch64__)
152
0
  __asm__ ("sxth %w0,%w0\n asr %w0,%w0,%w1" : "+&r"(crypto_int16_x) : "r"(crypto_int16_s) : );
153
0
#else
154
0
  crypto_int16_x >>= crypto_int16_s ^ crypto_int16_optblocker;
155
0
#endif
156
0
  return crypto_int16_bottombit_mask(crypto_int16_x);
157
0
}
158
159
__attribute__((unused))
160
static inline
161
0
crypto_int16 crypto_int16_bitinrangepublicpos_01(crypto_int16 crypto_int16_x,crypto_int16 crypto_int16_s) {
162
0
#if defined(__GNUC__) && defined(__x86_64__)
163
0
  __asm__ ("sarw %%cl,%0" : "+r"(crypto_int16_x) : "c"(crypto_int16_s) : "cc");
164
0
#elif defined(__GNUC__) && defined(__aarch64__)
165
0
  __asm__ ("sxth %w0,%w0\n asr %w0,%w0,%w1" : "+&r"(crypto_int16_x) : "r"(crypto_int16_s) : );
166
0
#else
167
0
  crypto_int16_x >>= crypto_int16_s ^ crypto_int16_optblocker;
168
0
#endif
169
0
  return crypto_int16_bottombit_01(crypto_int16_x);
170
0
}
171
172
__attribute__((unused))
173
static inline
174
0
crypto_int16 crypto_int16_shlmod(crypto_int16 crypto_int16_x,crypto_int16 crypto_int16_s) {
175
0
#if defined(__GNUC__) && defined(__x86_64__)
176
0
  crypto_int16_s &= 15;
177
0
  __asm__ ("shlw %%cl,%0" : "+r"(crypto_int16_x) : "c"(crypto_int16_s) : "cc");
178
0
#elif defined(__GNUC__) && defined(__aarch64__)
179
0
  __asm__ ("and %w0,%w0,15\n and %w1,%w1,65535\n lsl %w1,%w1,%w0" : "+&r"(crypto_int16_s), "+r"(crypto_int16_x) : : );
180
0
#else
181
0
  int crypto_int16_k, crypto_int16_l;
182
0
  for (crypto_int16_l = 0,crypto_int16_k = 1;crypto_int16_k < 16;++crypto_int16_l,crypto_int16_k *= 2)
183
0
    crypto_int16_x ^= (crypto_int16_x ^ (crypto_int16_x << crypto_int16_k)) & crypto_int16_bitinrangepublicpos_mask(crypto_int16_s,crypto_int16_l);
184
0
#endif
185
0
  return crypto_int16_x;
186
0
}
187
188
__attribute__((unused))
189
static inline
190
0
crypto_int16 crypto_int16_shrmod(crypto_int16 crypto_int16_x,crypto_int16 crypto_int16_s) {
191
0
#if defined(__GNUC__) && defined(__x86_64__)
192
0
  crypto_int16_s &= 15;
193
0
  __asm__ ("sarw %%cl,%0" : "+r"(crypto_int16_x) : "c"(crypto_int16_s) : "cc");
194
0
#elif defined(__GNUC__) && defined(__aarch64__)
195
0
  __asm__ ("and %w0,%w0,15\n sxth %w1,%w1\n asr %w1,%w1,%w0" : "+&r"(crypto_int16_s), "+r"(crypto_int16_x) : : );
196
0
#else
197
0
  int crypto_int16_k, crypto_int16_l;
198
0
  for (crypto_int16_l = 0,crypto_int16_k = 1;crypto_int16_k < 16;++crypto_int16_l,crypto_int16_k *= 2)
199
0
    crypto_int16_x ^= (crypto_int16_x ^ (crypto_int16_x >> crypto_int16_k)) & crypto_int16_bitinrangepublicpos_mask(crypto_int16_s,crypto_int16_l);
200
0
#endif
201
0
  return crypto_int16_x;
202
0
}
203
204
__attribute__((unused))
205
static inline
206
0
crypto_int16 crypto_int16_bitmod_mask(crypto_int16 crypto_int16_x,crypto_int16 crypto_int16_s) {
207
0
  crypto_int16_x = crypto_int16_shrmod(crypto_int16_x,crypto_int16_s);
208
0
  return crypto_int16_bottombit_mask(crypto_int16_x);
209
0
}
210
211
__attribute__((unused))
212
static inline
213
0
crypto_int16 crypto_int16_bitmod_01(crypto_int16 crypto_int16_x,crypto_int16 crypto_int16_s) {
214
0
  crypto_int16_x = crypto_int16_shrmod(crypto_int16_x,crypto_int16_s);
215
0
  return crypto_int16_bottombit_01(crypto_int16_x);
216
0
}
217
218
__attribute__((unused))
219
static inline
220
2.51M
crypto_int16 crypto_int16_nonzero_mask(crypto_int16 crypto_int16_x) {
221
2.51M
#if defined(__GNUC__) && defined(__x86_64__)
222
2.51M
  crypto_int16 crypto_int16_q,crypto_int16_z;
223
2.51M
  __asm__ ("xorw %0,%0\n movw $-1,%1\n testw %2,%2\n cmovnew %1,%0" : "=&r"(crypto_int16_z), "=&r"(crypto_int16_q) : "r"(crypto_int16_x) : "cc");
224
2.51M
  return crypto_int16_z;
225
#elif defined(__GNUC__) && defined(__aarch64__)
226
  crypto_int16 crypto_int16_z;
227
  __asm__ ("tst %w1,65535\n csetm %w0,ne" : "=r"(crypto_int16_z) : "r"(crypto_int16_x) : "cc");
228
  return crypto_int16_z;
229
#else
230
  crypto_int16_x |= -crypto_int16_x;
231
  return crypto_int16_negative_mask(crypto_int16_x);
232
#endif
233
2.51M
}
234
235
__attribute__((unused))
236
static inline
237
0
crypto_int16 crypto_int16_nonzero_01(crypto_int16 crypto_int16_x) {
238
0
#if defined(__GNUC__) && defined(__x86_64__)
239
0
  crypto_int16 crypto_int16_q,crypto_int16_z;
240
0
  __asm__ ("xorw %0,%0\n movw $1,%1\n testw %2,%2\n cmovnew %1,%0" : "=&r"(crypto_int16_z), "=&r"(crypto_int16_q) : "r"(crypto_int16_x) : "cc");
241
0
  return crypto_int16_z;
242
0
#elif defined(__GNUC__) && defined(__aarch64__)
243
0
  crypto_int16 crypto_int16_z;
244
0
  __asm__ ("tst %w1,65535\n cset %w0,ne" : "=r"(crypto_int16_z) : "r"(crypto_int16_x) : "cc");
245
0
  return crypto_int16_z;
246
0
#else
247
0
  crypto_int16_x |= -crypto_int16_x;
248
0
  return crypto_int16_unsigned_topbit_01(crypto_int16_x);
249
0
#endif
250
0
}
251
252
__attribute__((unused))
253
static inline
254
0
crypto_int16 crypto_int16_positive_mask(crypto_int16 crypto_int16_x) {
255
0
#if defined(__GNUC__) && defined(__x86_64__)
256
0
  crypto_int16 crypto_int16_q,crypto_int16_z;
257
0
  __asm__ ("xorw %0,%0\n movw $-1,%1\n testw %2,%2\n cmovgw %1,%0" : "=&r"(crypto_int16_z), "=&r"(crypto_int16_q) : "r"(crypto_int16_x) : "cc");
258
0
  return crypto_int16_z;
259
0
#elif defined(__GNUC__) && defined(__aarch64__)
260
0
  crypto_int16 crypto_int16_z;
261
0
  __asm__ ("sxth %w0,%w1\n cmp %w0,0\n csetm %w0,gt" : "=r"(crypto_int16_z) : "r"(crypto_int16_x) : "cc");
262
0
  return crypto_int16_z;
263
0
#else
264
0
  crypto_int16 crypto_int16_z = -crypto_int16_x;
265
0
  crypto_int16_z ^= crypto_int16_x & crypto_int16_z;
266
0
  return crypto_int16_negative_mask(crypto_int16_z);
267
0
#endif
268
0
}
269
270
__attribute__((unused))
271
static inline
272
0
crypto_int16 crypto_int16_positive_01(crypto_int16 crypto_int16_x) {
273
0
#if defined(__GNUC__) && defined(__x86_64__)
274
0
  crypto_int16 crypto_int16_q,crypto_int16_z;
275
0
  __asm__ ("xorw %0,%0\n movw $1,%1\n testw %2,%2\n cmovgw %1,%0" : "=&r"(crypto_int16_z), "=&r"(crypto_int16_q) : "r"(crypto_int16_x) : "cc");
276
0
  return crypto_int16_z;
277
0
#elif defined(__GNUC__) && defined(__aarch64__)
278
0
  crypto_int16 crypto_int16_z;
279
0
  __asm__ ("sxth %w0,%w1\n cmp %w0,0\n cset %w0,gt" : "=r"(crypto_int16_z) : "r"(crypto_int16_x) : "cc");
280
0
  return crypto_int16_z;
281
0
#else
282
0
  crypto_int16 crypto_int16_z = -crypto_int16_x;
283
0
  crypto_int16_z ^= crypto_int16_x & crypto_int16_z;
284
0
  return crypto_int16_unsigned_topbit_01(crypto_int16_z);
285
0
#endif
286
0
}
287
288
__attribute__((unused))
289
static inline
290
0
crypto_int16 crypto_int16_zero_mask(crypto_int16 crypto_int16_x) {
291
0
#if defined(__GNUC__) && defined(__x86_64__)
292
0
  crypto_int16 crypto_int16_q,crypto_int16_z;
293
0
  __asm__ ("xorw %0,%0\n movw $-1,%1\n testw %2,%2\n cmovew %1,%0" : "=&r"(crypto_int16_z), "=&r"(crypto_int16_q) : "r"(crypto_int16_x) : "cc");
294
0
  return crypto_int16_z;
295
0
#elif defined(__GNUC__) && defined(__aarch64__)
296
0
  crypto_int16 crypto_int16_z;
297
0
  __asm__ ("tst %w1,65535\n csetm %w0,eq" : "=r"(crypto_int16_z) : "r"(crypto_int16_x) : "cc");
298
0
  return crypto_int16_z;
299
0
#else
300
0
  return ~crypto_int16_nonzero_mask(crypto_int16_x);
301
0
#endif
302
0
}
303
304
__attribute__((unused))
305
static inline
306
0
crypto_int16 crypto_int16_zero_01(crypto_int16 crypto_int16_x) {
307
0
#if defined(__GNUC__) && defined(__x86_64__)
308
0
  crypto_int16 crypto_int16_q,crypto_int16_z;
309
0
  __asm__ ("xorw %0,%0\n movw $1,%1\n testw %2,%2\n cmovew %1,%0" : "=&r"(crypto_int16_z), "=&r"(crypto_int16_q) : "r"(crypto_int16_x) : "cc");
310
0
  return crypto_int16_z;
311
0
#elif defined(__GNUC__) && defined(__aarch64__)
312
0
  crypto_int16 crypto_int16_z;
313
0
  __asm__ ("tst %w1,65535\n cset %w0,eq" : "=r"(crypto_int16_z) : "r"(crypto_int16_x) : "cc");
314
0
  return crypto_int16_z;
315
0
#else
316
0
  return 1-crypto_int16_nonzero_01(crypto_int16_x);
317
0
#endif
318
0
}
319
320
__attribute__((unused))
321
static inline
322
0
crypto_int16 crypto_int16_unequal_mask(crypto_int16 crypto_int16_x,crypto_int16 crypto_int16_y) {
323
0
#if defined(__GNUC__) && defined(__x86_64__)
324
0
  crypto_int16 crypto_int16_q,crypto_int16_z;
325
0
  __asm__ ("xorw %0,%0\n movw $-1,%1\n cmpw %3,%2\n cmovnew %1,%0" : "=&r"(crypto_int16_z), "=&r"(crypto_int16_q) : "r"(crypto_int16_x), "r"(crypto_int16_y) : "cc");
326
0
  return crypto_int16_z;
327
0
#elif defined(__GNUC__) && defined(__aarch64__)
328
0
  crypto_int16 crypto_int16_z;
329
0
  __asm__ ("and %w0,%w1,65535\n cmp %w0,%w2,uxth\n csetm %w0,ne" : "=&r"(crypto_int16_z) : "r"(crypto_int16_x), "r"(crypto_int16_y) : "cc");
330
0
  return crypto_int16_z;
331
0
#else
332
0
  return crypto_int16_nonzero_mask(crypto_int16_x ^ crypto_int16_y);
333
0
#endif
334
0
}
335
336
__attribute__((unused))
337
static inline
338
0
crypto_int16 crypto_int16_unequal_01(crypto_int16 crypto_int16_x,crypto_int16 crypto_int16_y) {
339
0
#if defined(__GNUC__) && defined(__x86_64__)
340
0
  crypto_int16 crypto_int16_q,crypto_int16_z;
341
0
  __asm__ ("xorw %0,%0\n movw $1,%1\n cmpw %3,%2\n cmovnew %1,%0" : "=&r"(crypto_int16_z), "=&r"(crypto_int16_q) : "r"(crypto_int16_x), "r"(crypto_int16_y) : "cc");
342
0
  return crypto_int16_z;
343
0
#elif defined(__GNUC__) && defined(__aarch64__)
344
0
  crypto_int16 crypto_int16_z;
345
0
  __asm__ ("and %w0,%w1,65535\n cmp %w0,%w2,uxth\n cset %w0,ne" : "=&r"(crypto_int16_z) : "r"(crypto_int16_x), "r"(crypto_int16_y) : "cc");
346
0
  return crypto_int16_z;
347
0
#else
348
0
  return crypto_int16_nonzero_01(crypto_int16_x ^ crypto_int16_y);
349
0
#endif
350
0
}
351
352
__attribute__((unused))
353
static inline
354
0
crypto_int16 crypto_int16_equal_mask(crypto_int16 crypto_int16_x,crypto_int16 crypto_int16_y) {
355
0
#if defined(__GNUC__) && defined(__x86_64__)
356
0
  crypto_int16 crypto_int16_q,crypto_int16_z;
357
0
  __asm__ ("xorw %0,%0\n movw $-1,%1\n cmpw %3,%2\n cmovew %1,%0" : "=&r"(crypto_int16_z), "=&r"(crypto_int16_q) : "r"(crypto_int16_x), "r"(crypto_int16_y) : "cc");
358
0
  return crypto_int16_z;
359
0
#elif defined(__GNUC__) && defined(__aarch64__)
360
0
  crypto_int16 crypto_int16_z;
361
0
  __asm__ ("and %w0,%w1,65535\n cmp %w0,%w2,uxth\n csetm %w0,eq" : "=&r"(crypto_int16_z) : "r"(crypto_int16_x), "r"(crypto_int16_y) : "cc");
362
0
  return crypto_int16_z;
363
0
#else
364
0
  return ~crypto_int16_unequal_mask(crypto_int16_x,crypto_int16_y);
365
0
#endif
366
0
}
367
368
__attribute__((unused))
369
static inline
370
0
crypto_int16 crypto_int16_equal_01(crypto_int16 crypto_int16_x,crypto_int16 crypto_int16_y) {
371
0
#if defined(__GNUC__) && defined(__x86_64__)
372
0
  crypto_int16 crypto_int16_q,crypto_int16_z;
373
0
  __asm__ ("xorw %0,%0\n movw $1,%1\n cmpw %3,%2\n cmovew %1,%0" : "=&r"(crypto_int16_z), "=&r"(crypto_int16_q) : "r"(crypto_int16_x), "r"(crypto_int16_y) : "cc");
374
0
  return crypto_int16_z;
375
0
#elif defined(__GNUC__) && defined(__aarch64__)
376
0
  crypto_int16 crypto_int16_z;
377
0
  __asm__ ("and %w0,%w1,65535\n cmp %w0,%w2,uxth\n cset %w0,eq" : "=&r"(crypto_int16_z) : "r"(crypto_int16_x), "r"(crypto_int16_y) : "cc");
378
0
  return crypto_int16_z;
379
0
#else
380
0
  return 1-crypto_int16_unequal_01(crypto_int16_x,crypto_int16_y);
381
0
#endif
382
0
}
383
384
__attribute__((unused))
385
static inline
386
0
crypto_int16 crypto_int16_min(crypto_int16 crypto_int16_x,crypto_int16 crypto_int16_y) {
387
0
#if defined(__GNUC__) && defined(__x86_64__)
388
0
  __asm__ ("cmpw %1,%0\n cmovgw %1,%0" : "+r"(crypto_int16_x) : "r"(crypto_int16_y) : "cc");
389
0
  return crypto_int16_x;
390
0
#elif defined(__GNUC__) && defined(__aarch64__)
391
0
  __asm__ ("sxth %w0,%w0\n cmp %w0,%w1,sxth\n csel %w0,%w0,%w1,lt" : "+&r"(crypto_int16_x) : "r"(crypto_int16_y) : "cc");
392
0
  return crypto_int16_x;
393
0
#else
394
0
  crypto_int16 crypto_int16_r = crypto_int16_y ^ crypto_int16_x;
395
0
  crypto_int16 crypto_int16_z = crypto_int16_y - crypto_int16_x;
396
0
  crypto_int16_z ^= crypto_int16_r & (crypto_int16_z ^ crypto_int16_y);
397
0
  crypto_int16_z = crypto_int16_negative_mask(crypto_int16_z);
398
0
  crypto_int16_z &= crypto_int16_r;
399
0
  return crypto_int16_x ^ crypto_int16_z;
400
0
#endif
401
0
}
402
403
__attribute__((unused))
404
static inline
405
0
crypto_int16 crypto_int16_max(crypto_int16 crypto_int16_x,crypto_int16 crypto_int16_y) {
406
0
#if defined(__GNUC__) && defined(__x86_64__)
407
0
  __asm__ ("cmpw %1,%0\n cmovlw %1,%0" : "+r"(crypto_int16_x) : "r"(crypto_int16_y) : "cc");
408
0
  return crypto_int16_x;
409
0
#elif defined(__GNUC__) && defined(__aarch64__)
410
0
  __asm__ ("sxth %w0,%w0\n cmp %w0,%w1,sxth\n csel %w0,%w1,%w0,lt" : "+&r"(crypto_int16_x) : "r"(crypto_int16_y) : "cc");
411
0
  return crypto_int16_x;
412
0
#else
413
0
  crypto_int16 crypto_int16_r = crypto_int16_y ^ crypto_int16_x;
414
0
  crypto_int16 crypto_int16_z = crypto_int16_y - crypto_int16_x;
415
0
  crypto_int16_z ^= crypto_int16_r & (crypto_int16_z ^ crypto_int16_y);
416
0
  crypto_int16_z = crypto_int16_negative_mask(crypto_int16_z);
417
0
  crypto_int16_z &= crypto_int16_r;
418
0
  return crypto_int16_y ^ crypto_int16_z;
419
0
#endif
420
0
}
421
422
__attribute__((unused))
423
static inline
424
0
void crypto_int16_minmax(crypto_int16 *crypto_int16_p,crypto_int16 *crypto_int16_q) {
425
0
  crypto_int16 crypto_int16_x = *crypto_int16_p;
426
0
  crypto_int16 crypto_int16_y = *crypto_int16_q;
427
0
#if defined(__GNUC__) && defined(__x86_64__)
428
0
  crypto_int16 crypto_int16_z;
429
0
  __asm__ ("cmpw %2,%1\n movw %1,%0\n cmovgw %2,%1\n cmovgw %0,%2" : "=&r"(crypto_int16_z), "+&r"(crypto_int16_x), "+r"(crypto_int16_y) : : "cc");
430
0
  *crypto_int16_p = crypto_int16_x;
431
0
  *crypto_int16_q = crypto_int16_y;
432
0
#elif defined(__GNUC__) && defined(__aarch64__)
433
0
  crypto_int16 crypto_int16_r, crypto_int16_s;
434
0
  __asm__ ("sxth %w0,%w0\n cmp %w0,%w3,sxth\n csel %w1,%w0,%w3,lt\n csel %w2,%w3,%w0,lt" : "+&r"(crypto_int16_x), "=&r"(crypto_int16_r), "=r"(crypto_int16_s) : "r"(crypto_int16_y) : "cc");
435
0
  *crypto_int16_p = crypto_int16_r;
436
0
  *crypto_int16_q = crypto_int16_s;
437
0
#else
438
0
  crypto_int16 crypto_int16_r = crypto_int16_y ^ crypto_int16_x;
439
0
  crypto_int16 crypto_int16_z = crypto_int16_y - crypto_int16_x;
440
0
  crypto_int16_z ^= crypto_int16_r & (crypto_int16_z ^ crypto_int16_y);
441
0
  crypto_int16_z = crypto_int16_negative_mask(crypto_int16_z);
442
0
  crypto_int16_z &= crypto_int16_r;
443
0
  crypto_int16_x ^= crypto_int16_z;
444
0
  crypto_int16_y ^= crypto_int16_z;
445
0
  *crypto_int16_p = crypto_int16_x;
446
0
  *crypto_int16_q = crypto_int16_y;
447
0
#endif
448
0
}
449
450
__attribute__((unused))
451
static inline
452
0
crypto_int16 crypto_int16_smaller_mask(crypto_int16 crypto_int16_x,crypto_int16 crypto_int16_y) {
453
0
#if defined(__GNUC__) && defined(__x86_64__)
454
0
  crypto_int16 crypto_int16_q,crypto_int16_z;
455
0
  __asm__ ("xorw %0,%0\n movw $-1,%1\n cmpw %3,%2\n cmovlw %1,%0" : "=&r"(crypto_int16_z), "=&r"(crypto_int16_q) : "r"(crypto_int16_x), "r"(crypto_int16_y) : "cc");
456
0
  return crypto_int16_z;
457
0
#elif defined(__GNUC__) && defined(__aarch64__)
458
0
  crypto_int16 crypto_int16_z;
459
0
  __asm__ ("sxth %w0,%w1\n cmp %w0,%w2,sxth\n csetm %w0,lt" : "=&r"(crypto_int16_z) : "r"(crypto_int16_x), "r"(crypto_int16_y) : "cc");
460
0
  return crypto_int16_z;
461
0
#else
462
0
  crypto_int16 crypto_int16_r = crypto_int16_x ^ crypto_int16_y;
463
0
  crypto_int16 crypto_int16_z = crypto_int16_x - crypto_int16_y;
464
0
  crypto_int16_z ^= crypto_int16_r & (crypto_int16_z ^ crypto_int16_x);
465
0
  return crypto_int16_negative_mask(crypto_int16_z);
466
0
#endif
467
0
}
468
469
__attribute__((unused))
470
static inline
471
0
crypto_int16 crypto_int16_smaller_01(crypto_int16 crypto_int16_x,crypto_int16 crypto_int16_y) {
472
0
#if defined(__GNUC__) && defined(__x86_64__)
473
0
  crypto_int16 crypto_int16_q,crypto_int16_z;
474
0
  __asm__ ("xorw %0,%0\n movw $1,%1\n cmpw %3,%2\n cmovlw %1,%0" : "=&r"(crypto_int16_z), "=&r"(crypto_int16_q) : "r"(crypto_int16_x), "r"(crypto_int16_y) : "cc");
475
0
  return crypto_int16_z;
476
0
#elif defined(__GNUC__) && defined(__aarch64__)
477
0
  crypto_int16 crypto_int16_z;
478
0
  __asm__ ("sxth %w0,%w1\n cmp %w0,%w2,sxth\n cset %w0,lt" : "=&r"(crypto_int16_z) : "r"(crypto_int16_x), "r"(crypto_int16_y) : "cc");
479
0
  return crypto_int16_z;
480
0
#else
481
0
  crypto_int16 crypto_int16_r = crypto_int16_x ^ crypto_int16_y;
482
0
  crypto_int16 crypto_int16_z = crypto_int16_x - crypto_int16_y;
483
0
  crypto_int16_z ^= crypto_int16_r & (crypto_int16_z ^ crypto_int16_x);
484
0
  return crypto_int16_unsigned_topbit_01(crypto_int16_z);
485
0
#endif
486
0
}
487
488
__attribute__((unused))
489
static inline
490
0
crypto_int16 crypto_int16_leq_mask(crypto_int16 crypto_int16_x,crypto_int16 crypto_int16_y) {
491
0
#if defined(__GNUC__) && defined(__x86_64__)
492
0
  crypto_int16 crypto_int16_q,crypto_int16_z;
493
0
  __asm__ ("xorw %0,%0\n movw $-1,%1\n cmpw %3,%2\n cmovlew %1,%0" : "=&r"(crypto_int16_z), "=&r"(crypto_int16_q) : "r"(crypto_int16_x), "r"(crypto_int16_y) : "cc");
494
0
  return crypto_int16_z;
495
0
#elif defined(__GNUC__) && defined(__aarch64__)
496
0
  crypto_int16 crypto_int16_z;
497
0
  __asm__ ("sxth %w0,%w1\n cmp %w0,%w2,sxth\n csetm %w0,le" : "=&r"(crypto_int16_z) : "r"(crypto_int16_x), "r"(crypto_int16_y) : "cc");
498
0
  return crypto_int16_z;
499
0
#else
500
0
  return ~crypto_int16_smaller_mask(crypto_int16_y,crypto_int16_x);
501
0
#endif
502
0
}
503
504
__attribute__((unused))
505
static inline
506
0
crypto_int16 crypto_int16_leq_01(crypto_int16 crypto_int16_x,crypto_int16 crypto_int16_y) {
507
0
#if defined(__GNUC__) && defined(__x86_64__)
508
0
  crypto_int16 crypto_int16_q,crypto_int16_z;
509
0
  __asm__ ("xorw %0,%0\n movw $1,%1\n cmpw %3,%2\n cmovlew %1,%0" : "=&r"(crypto_int16_z), "=&r"(crypto_int16_q) : "r"(crypto_int16_x), "r"(crypto_int16_y) : "cc");
510
0
  return crypto_int16_z;
511
0
#elif defined(__GNUC__) && defined(__aarch64__)
512
0
  crypto_int16 crypto_int16_z;
513
0
  __asm__ ("sxth %w0,%w1\n cmp %w0,%w2,sxth\n cset %w0,le" : "=&r"(crypto_int16_z) : "r"(crypto_int16_x), "r"(crypto_int16_y) : "cc");
514
0
  return crypto_int16_z;
515
0
#else
516
0
  return 1-crypto_int16_smaller_01(crypto_int16_y,crypto_int16_x);
517
0
#endif
518
0
}
519
520
__attribute__((unused))
521
static inline
522
0
int crypto_int16_ones_num(crypto_int16 crypto_int16_x) {
523
0
  crypto_int16_unsigned crypto_int16_y = crypto_int16_x;
524
0
  const crypto_int16 C0 = 0x5555;
525
0
  const crypto_int16 C1 = 0x3333;
526
0
  const crypto_int16 C2 = 0x0f0f;
527
0
  crypto_int16_y -= ((crypto_int16_y >> 1) & C0);
528
0
  crypto_int16_y = (crypto_int16_y & C1) + ((crypto_int16_y >> 2) & C1);
529
0
  crypto_int16_y = (crypto_int16_y + (crypto_int16_y >> 4)) & C2;
530
0
  crypto_int16_y = (crypto_int16_y + (crypto_int16_y >> 8)) & 0xff;
531
0
  return crypto_int16_y;
532
0
}
533
534
__attribute__((unused))
535
static inline
536
0
int crypto_int16_bottomzeros_num(crypto_int16 crypto_int16_x) {
537
0
#if defined(__GNUC__) && defined(__x86_64__)
538
0
  crypto_int16 fallback = 16;
539
0
  __asm__ ("bsfw %0,%0\n cmovew %1,%0" : "+&r"(crypto_int16_x) : "r"(fallback) : "cc");
540
0
  return crypto_int16_x;
541
0
#elif defined(__GNUC__) && defined(__aarch64__)
542
0
  int64_t crypto_int16_y;
543
0
  __asm__ ("orr %w0,%w1,-65536\n rbit %w0,%w0\n clz %w0,%w0" : "=r"(crypto_int16_y) : "r"(crypto_int16_x) : );
544
0
  return crypto_int16_y;
545
0
#else
546
0
  crypto_int16 crypto_int16_y = crypto_int16_x ^ (crypto_int16_x-1);
547
0
  crypto_int16_y = ((crypto_int16) crypto_int16_y) >> 1;
548
0
  crypto_int16_y &= ~(crypto_int16_x & (((crypto_int16) 1) << (16-1)));
549
0
  return crypto_int16_ones_num(crypto_int16_y);
550
0
#endif
551
0
}
552
553
#endif
554
555
/* from supercop-20240808/cryptoint/crypto_int32.h */
556
/* auto-generated: cd cryptoint; ./autogen */
557
/* cryptoint 20240806 */
558
559
#ifndef crypto_int32_h
560
#define crypto_int32_h
561
562
44.8M
#define crypto_int32 int32_t
563
#define crypto_int32_unsigned uint32_t
564
565
566
567
__attribute__((unused))
568
static inline
569
0
crypto_int32 crypto_int32_load(const unsigned char *crypto_int32_s) {
570
0
  crypto_int32 crypto_int32_z = 0;
571
0
  crypto_int32_z |= ((crypto_int32) (*crypto_int32_s++)) << 0;
572
0
  crypto_int32_z |= ((crypto_int32) (*crypto_int32_s++)) << 8;
573
0
  crypto_int32_z |= ((crypto_int32) (*crypto_int32_s++)) << 16;
574
0
  crypto_int32_z |= ((crypto_int32) (*crypto_int32_s++)) << 24;
575
0
  return crypto_int32_z;
576
0
}
577
578
__attribute__((unused))
579
static inline
580
0
void crypto_int32_store(unsigned char *crypto_int32_s,crypto_int32 crypto_int32_x) {
581
0
  *crypto_int32_s++ = crypto_int32_x >> 0;
582
0
  *crypto_int32_s++ = crypto_int32_x >> 8;
583
0
  *crypto_int32_s++ = crypto_int32_x >> 16;
584
0
  *crypto_int32_s++ = crypto_int32_x >> 24;
585
0
}
586
587
__attribute__((unused))
588
static inline
589
36.5k
crypto_int32 crypto_int32_negative_mask(crypto_int32 crypto_int32_x) {
590
36.5k
#if defined(__GNUC__) && defined(__x86_64__)
591
36.5k
  __asm__ ("sarl $31,%0" : "+r"(crypto_int32_x) : : "cc");
592
36.5k
  return crypto_int32_x;
593
#elif defined(__GNUC__) && defined(__aarch64__)
594
  crypto_int32 crypto_int32_y;
595
  __asm__ ("asr %w0,%w1,31" : "=r"(crypto_int32_y) : "r"(crypto_int32_x) : );
596
  return crypto_int32_y;
597
#else
598
  crypto_int32_x >>= 32-6;
599
  crypto_int32_x ^= crypto_int32_optblocker;
600
  crypto_int32_x >>= 5;
601
  return crypto_int32_x;
602
#endif
603
36.5k
}
604
605
__attribute__((unused))
606
static inline
607
0
crypto_int32_unsigned crypto_int32_unsigned_topbit_01(crypto_int32_unsigned crypto_int32_x) {
608
0
#if defined(__GNUC__) && defined(__x86_64__)
609
0
  __asm__ ("shrl $31,%0" : "+r"(crypto_int32_x) : : "cc");
610
0
  return crypto_int32_x;
611
0
#elif defined(__GNUC__) && defined(__aarch64__)
612
0
  crypto_int32 crypto_int32_y;
613
0
  __asm__ ("lsr %w0,%w1,31" : "=r"(crypto_int32_y) : "r"(crypto_int32_x) : );
614
0
  return crypto_int32_y;
615
0
#else
616
0
  crypto_int32_x >>= 32-6;
617
0
  crypto_int32_x ^= crypto_int32_optblocker;
618
0
  crypto_int32_x >>= 5;
619
0
  return crypto_int32_x;
620
0
#endif
621
0
}
622
623
__attribute__((unused))
624
static inline
625
0
crypto_int32 crypto_int32_negative_01(crypto_int32 crypto_int32_x) {
626
0
  return crypto_int32_unsigned_topbit_01(crypto_int32_x);
627
0
}
628
629
__attribute__((unused))
630
static inline
631
0
crypto_int32 crypto_int32_topbit_mask(crypto_int32 crypto_int32_x) {
632
0
  return crypto_int32_negative_mask(crypto_int32_x);
633
0
}
634
635
__attribute__((unused))
636
static inline
637
0
crypto_int32 crypto_int32_topbit_01(crypto_int32 crypto_int32_x) {
638
0
  return crypto_int32_unsigned_topbit_01(crypto_int32_x);
639
0
}
640
641
__attribute__((unused))
642
static inline
643
0
crypto_int32 crypto_int32_bottombit_mask(crypto_int32 crypto_int32_x) {
644
0
#if defined(__GNUC__) && defined(__x86_64__)
645
0
  __asm__ ("andl $1,%0" : "+r"(crypto_int32_x) : : "cc");
646
0
  return -crypto_int32_x;
647
0
#elif defined(__GNUC__) && defined(__aarch64__)
648
0
  crypto_int32 crypto_int32_y;
649
0
  __asm__ ("sbfx %w0,%w1,0,1" : "=r"(crypto_int32_y) : "r"(crypto_int32_x) : );
650
0
  return crypto_int32_y;
651
0
#else
652
0
  crypto_int32_x &= 1 ^ crypto_int32_optblocker;
653
0
  return -crypto_int32_x;
654
0
#endif
655
0
}
656
657
__attribute__((unused))
658
static inline
659
0
crypto_int32 crypto_int32_bottombit_01(crypto_int32 crypto_int32_x) {
660
0
#if defined(__GNUC__) && defined(__x86_64__)
661
0
  __asm__ ("andl $1,%0" : "+r"(crypto_int32_x) : : "cc");
662
0
  return crypto_int32_x;
663
0
#elif defined(__GNUC__) && defined(__aarch64__)
664
0
  crypto_int32 crypto_int32_y;
665
0
  __asm__ ("ubfx %w0,%w1,0,1" : "=r"(crypto_int32_y) : "r"(crypto_int32_x) : );
666
0
  return crypto_int32_y;
667
0
#else
668
0
  crypto_int32_x &= 1 ^ crypto_int32_optblocker;
669
0
  return crypto_int32_x;
670
0
#endif
671
0
}
672
673
__attribute__((unused))
674
static inline
675
0
crypto_int32 crypto_int32_bitinrangepublicpos_mask(crypto_int32 crypto_int32_x,crypto_int32 crypto_int32_s) {
676
0
#if defined(__GNUC__) && defined(__x86_64__)
677
0
  __asm__ ("sarl %%cl,%0" : "+r"(crypto_int32_x) : "c"(crypto_int32_s) : "cc");
678
0
#elif defined(__GNUC__) && defined(__aarch64__)
679
0
  __asm__ ("asr %w0,%w0,%w1" : "+r"(crypto_int32_x) : "r"(crypto_int32_s) : );
680
0
#else
681
0
  crypto_int32_x >>= crypto_int32_s ^ crypto_int32_optblocker;
682
0
#endif
683
0
  return crypto_int32_bottombit_mask(crypto_int32_x);
684
0
}
685
686
__attribute__((unused))
687
static inline
688
0
crypto_int32 crypto_int32_bitinrangepublicpos_01(crypto_int32 crypto_int32_x,crypto_int32 crypto_int32_s) {
689
0
#if defined(__GNUC__) && defined(__x86_64__)
690
0
  __asm__ ("sarl %%cl,%0" : "+r"(crypto_int32_x) : "c"(crypto_int32_s) : "cc");
691
0
#elif defined(__GNUC__) && defined(__aarch64__)
692
0
  __asm__ ("asr %w0,%w0,%w1" : "+r"(crypto_int32_x) : "r"(crypto_int32_s) : );
693
0
#else
694
0
  crypto_int32_x >>= crypto_int32_s ^ crypto_int32_optblocker;
695
0
#endif
696
0
  return crypto_int32_bottombit_01(crypto_int32_x);
697
0
}
698
699
__attribute__((unused))
700
static inline
701
0
crypto_int32 crypto_int32_shlmod(crypto_int32 crypto_int32_x,crypto_int32 crypto_int32_s) {
702
0
#if defined(__GNUC__) && defined(__x86_64__)
703
0
  __asm__ ("shll %%cl,%0" : "+r"(crypto_int32_x) : "c"(crypto_int32_s) : "cc");
704
0
#elif defined(__GNUC__) && defined(__aarch64__)
705
0
  __asm__ ("lsl %w0,%w0,%w1" : "+r"(crypto_int32_x) : "r"(crypto_int32_s) : );
706
0
#else
707
0
  int crypto_int32_k, crypto_int32_l;
708
0
  for (crypto_int32_l = 0,crypto_int32_k = 1;crypto_int32_k < 32;++crypto_int32_l,crypto_int32_k *= 2)
709
0
    crypto_int32_x ^= (crypto_int32_x ^ (crypto_int32_x << crypto_int32_k)) & crypto_int32_bitinrangepublicpos_mask(crypto_int32_s,crypto_int32_l);
710
0
#endif
711
0
  return crypto_int32_x;
712
0
}
713
714
__attribute__((unused))
715
static inline
716
0
crypto_int32 crypto_int32_shrmod(crypto_int32 crypto_int32_x,crypto_int32 crypto_int32_s) {
717
0
#if defined(__GNUC__) && defined(__x86_64__)
718
0
  __asm__ ("sarl %%cl,%0" : "+r"(crypto_int32_x) : "c"(crypto_int32_s) : "cc");
719
0
#elif defined(__GNUC__) && defined(__aarch64__)
720
0
  __asm__ ("asr %w0,%w0,%w1" : "+r"(crypto_int32_x) : "r"(crypto_int32_s) : );
721
0
#else
722
0
  int crypto_int32_k, crypto_int32_l;
723
0
  for (crypto_int32_l = 0,crypto_int32_k = 1;crypto_int32_k < 32;++crypto_int32_l,crypto_int32_k *= 2)
724
0
    crypto_int32_x ^= (crypto_int32_x ^ (crypto_int32_x >> crypto_int32_k)) & crypto_int32_bitinrangepublicpos_mask(crypto_int32_s,crypto_int32_l);
725
0
#endif
726
0
  return crypto_int32_x;
727
0
}
728
729
__attribute__((unused))
730
static inline
731
0
crypto_int32 crypto_int32_bitmod_mask(crypto_int32 crypto_int32_x,crypto_int32 crypto_int32_s) {
732
0
  crypto_int32_x = crypto_int32_shrmod(crypto_int32_x,crypto_int32_s);
733
0
  return crypto_int32_bottombit_mask(crypto_int32_x);
734
0
}
735
736
__attribute__((unused))
737
static inline
738
0
crypto_int32 crypto_int32_bitmod_01(crypto_int32 crypto_int32_x,crypto_int32 crypto_int32_s) {
739
0
  crypto_int32_x = crypto_int32_shrmod(crypto_int32_x,crypto_int32_s);
740
0
  return crypto_int32_bottombit_01(crypto_int32_x);
741
0
}
742
743
__attribute__((unused))
744
static inline
745
0
crypto_int32 crypto_int32_nonzero_mask(crypto_int32 crypto_int32_x) {
746
0
#if defined(__GNUC__) && defined(__x86_64__)
747
0
  crypto_int32 crypto_int32_q,crypto_int32_z;
748
0
  __asm__ ("xorl %0,%0\n movl $-1,%1\n testl %2,%2\n cmovnel %1,%0" : "=&r"(crypto_int32_z), "=&r"(crypto_int32_q) : "r"(crypto_int32_x) : "cc");
749
0
  return crypto_int32_z;
750
0
#elif defined(__GNUC__) && defined(__aarch64__)
751
0
  crypto_int32 crypto_int32_z;
752
0
  __asm__ ("cmp %w1,0\n csetm %w0,ne" : "=r"(crypto_int32_z) : "r"(crypto_int32_x) : "cc");
753
0
  return crypto_int32_z;
754
0
#else
755
0
  crypto_int32_x |= -crypto_int32_x;
756
0
  return crypto_int32_negative_mask(crypto_int32_x);
757
0
#endif
758
0
}
759
760
__attribute__((unused))
761
static inline
762
0
crypto_int32 crypto_int32_nonzero_01(crypto_int32 crypto_int32_x) {
763
0
#if defined(__GNUC__) && defined(__x86_64__)
764
0
  crypto_int32 crypto_int32_q,crypto_int32_z;
765
0
  __asm__ ("xorl %0,%0\n movl $1,%1\n testl %2,%2\n cmovnel %1,%0" : "=&r"(crypto_int32_z), "=&r"(crypto_int32_q) : "r"(crypto_int32_x) : "cc");
766
0
  return crypto_int32_z;
767
0
#elif defined(__GNUC__) && defined(__aarch64__)
768
0
  crypto_int32 crypto_int32_z;
769
0
  __asm__ ("cmp %w1,0\n cset %w0,ne" : "=r"(crypto_int32_z) : "r"(crypto_int32_x) : "cc");
770
0
  return crypto_int32_z;
771
0
#else
772
0
  crypto_int32_x |= -crypto_int32_x;
773
0
  return crypto_int32_unsigned_topbit_01(crypto_int32_x);
774
0
#endif
775
0
}
776
777
__attribute__((unused))
778
static inline
779
0
crypto_int32 crypto_int32_positive_mask(crypto_int32 crypto_int32_x) {
780
0
#if defined(__GNUC__) && defined(__x86_64__)
781
0
  crypto_int32 crypto_int32_q,crypto_int32_z;
782
0
  __asm__ ("xorl %0,%0\n movl $-1,%1\n testl %2,%2\n cmovgl %1,%0" : "=&r"(crypto_int32_z), "=&r"(crypto_int32_q) : "r"(crypto_int32_x) : "cc");
783
0
  return crypto_int32_z;
784
0
#elif defined(__GNUC__) && defined(__aarch64__)
785
0
  crypto_int32 crypto_int32_z;
786
0
  __asm__ ("cmp %w1,0\n csetm %w0,gt" : "=r"(crypto_int32_z) : "r"(crypto_int32_x) : "cc");
787
0
  return crypto_int32_z;
788
0
#else
789
0
  crypto_int32 crypto_int32_z = -crypto_int32_x;
790
0
  crypto_int32_z ^= crypto_int32_x & crypto_int32_z;
791
0
  return crypto_int32_negative_mask(crypto_int32_z);
792
0
#endif
793
0
}
794
795
__attribute__((unused))
796
static inline
797
0
crypto_int32 crypto_int32_positive_01(crypto_int32 crypto_int32_x) {
798
0
#if defined(__GNUC__) && defined(__x86_64__)
799
0
  crypto_int32 crypto_int32_q,crypto_int32_z;
800
0
  __asm__ ("xorl %0,%0\n movl $1,%1\n testl %2,%2\n cmovgl %1,%0" : "=&r"(crypto_int32_z), "=&r"(crypto_int32_q) : "r"(crypto_int32_x) : "cc");
801
0
  return crypto_int32_z;
802
0
#elif defined(__GNUC__) && defined(__aarch64__)
803
0
  crypto_int32 crypto_int32_z;
804
0
  __asm__ ("cmp %w1,0\n cset %w0,gt" : "=r"(crypto_int32_z) : "r"(crypto_int32_x) : "cc");
805
0
  return crypto_int32_z;
806
0
#else
807
0
  crypto_int32 crypto_int32_z = -crypto_int32_x;
808
0
  crypto_int32_z ^= crypto_int32_x & crypto_int32_z;
809
0
  return crypto_int32_unsigned_topbit_01(crypto_int32_z);
810
0
#endif
811
0
}
812
813
__attribute__((unused))
814
static inline
815
0
crypto_int32 crypto_int32_zero_mask(crypto_int32 crypto_int32_x) {
816
0
#if defined(__GNUC__) && defined(__x86_64__)
817
0
  crypto_int32 crypto_int32_q,crypto_int32_z;
818
0
  __asm__ ("xorl %0,%0\n movl $-1,%1\n testl %2,%2\n cmovel %1,%0" : "=&r"(crypto_int32_z), "=&r"(crypto_int32_q) : "r"(crypto_int32_x) : "cc");
819
0
  return crypto_int32_z;
820
0
#elif defined(__GNUC__) && defined(__aarch64__)
821
0
  crypto_int32 crypto_int32_z;
822
0
  __asm__ ("cmp %w1,0\n csetm %w0,eq" : "=r"(crypto_int32_z) : "r"(crypto_int32_x) : "cc");
823
0
  return crypto_int32_z;
824
0
#else
825
0
  return ~crypto_int32_nonzero_mask(crypto_int32_x);
826
0
#endif
827
0
}
828
829
__attribute__((unused))
830
static inline
831
0
crypto_int32 crypto_int32_zero_01(crypto_int32 crypto_int32_x) {
832
0
#if defined(__GNUC__) && defined(__x86_64__)
833
0
  crypto_int32 crypto_int32_q,crypto_int32_z;
834
0
  __asm__ ("xorl %0,%0\n movl $1,%1\n testl %2,%2\n cmovel %1,%0" : "=&r"(crypto_int32_z), "=&r"(crypto_int32_q) : "r"(crypto_int32_x) : "cc");
835
0
  return crypto_int32_z;
836
0
#elif defined(__GNUC__) && defined(__aarch64__)
837
0
  crypto_int32 crypto_int32_z;
838
0
  __asm__ ("cmp %w1,0\n cset %w0,eq" : "=r"(crypto_int32_z) : "r"(crypto_int32_x) : "cc");
839
0
  return crypto_int32_z;
840
0
#else
841
0
  return 1-crypto_int32_nonzero_01(crypto_int32_x);
842
0
#endif
843
0
}
844
845
__attribute__((unused))
846
static inline
847
0
crypto_int32 crypto_int32_unequal_mask(crypto_int32 crypto_int32_x,crypto_int32 crypto_int32_y) {
848
0
#if defined(__GNUC__) && defined(__x86_64__)
849
0
  crypto_int32 crypto_int32_q,crypto_int32_z;
850
0
  __asm__ ("xorl %0,%0\n movl $-1,%1\n cmpl %3,%2\n cmovnel %1,%0" : "=&r"(crypto_int32_z), "=&r"(crypto_int32_q) : "r"(crypto_int32_x), "r"(crypto_int32_y) : "cc");
851
0
  return crypto_int32_z;
852
0
#elif defined(__GNUC__) && defined(__aarch64__)
853
0
  crypto_int32 crypto_int32_z;
854
0
  __asm__ ("cmp %w1,%w2\n csetm %w0,ne" : "=r"(crypto_int32_z) : "r"(crypto_int32_x), "r"(crypto_int32_y) : "cc");
855
0
  return crypto_int32_z;
856
0
#else
857
0
  return crypto_int32_nonzero_mask(crypto_int32_x ^ crypto_int32_y);
858
0
#endif
859
0
}
860
861
__attribute__((unused))
862
static inline
863
0
crypto_int32 crypto_int32_unequal_01(crypto_int32 crypto_int32_x,crypto_int32 crypto_int32_y) {
864
0
#if defined(__GNUC__) && defined(__x86_64__)
865
0
  crypto_int32 crypto_int32_q,crypto_int32_z;
866
0
  __asm__ ("xorl %0,%0\n movl $1,%1\n cmpl %3,%2\n cmovnel %1,%0" : "=&r"(crypto_int32_z), "=&r"(crypto_int32_q) : "r"(crypto_int32_x), "r"(crypto_int32_y) : "cc");
867
0
  return crypto_int32_z;
868
0
#elif defined(__GNUC__) && defined(__aarch64__)
869
0
  crypto_int32 crypto_int32_z;
870
0
  __asm__ ("cmp %w1,%w2\n cset %w0,ne" : "=r"(crypto_int32_z) : "r"(crypto_int32_x), "r"(crypto_int32_y) : "cc");
871
0
  return crypto_int32_z;
872
0
#else
873
0
  return crypto_int32_nonzero_01(crypto_int32_x ^ crypto_int32_y);
874
0
#endif
875
0
}
876
877
__attribute__((unused))
878
static inline
879
0
crypto_int32 crypto_int32_equal_mask(crypto_int32 crypto_int32_x,crypto_int32 crypto_int32_y) {
880
0
#if defined(__GNUC__) && defined(__x86_64__)
881
0
  crypto_int32 crypto_int32_q,crypto_int32_z;
882
0
  __asm__ ("xorl %0,%0\n movl $-1,%1\n cmpl %3,%2\n cmovel %1,%0" : "=&r"(crypto_int32_z), "=&r"(crypto_int32_q) : "r"(crypto_int32_x), "r"(crypto_int32_y) : "cc");
883
0
  return crypto_int32_z;
884
0
#elif defined(__GNUC__) && defined(__aarch64__)
885
0
  crypto_int32 crypto_int32_z;
886
0
  __asm__ ("cmp %w1,%w2\n csetm %w0,eq" : "=r"(crypto_int32_z) : "r"(crypto_int32_x), "r"(crypto_int32_y) : "cc");
887
0
  return crypto_int32_z;
888
0
#else
889
0
  return ~crypto_int32_unequal_mask(crypto_int32_x,crypto_int32_y);
890
0
#endif
891
0
}
892
893
__attribute__((unused))
894
static inline
895
0
crypto_int32 crypto_int32_equal_01(crypto_int32 crypto_int32_x,crypto_int32 crypto_int32_y) {
896
0
#if defined(__GNUC__) && defined(__x86_64__)
897
0
  crypto_int32 crypto_int32_q,crypto_int32_z;
898
0
  __asm__ ("xorl %0,%0\n movl $1,%1\n cmpl %3,%2\n cmovel %1,%0" : "=&r"(crypto_int32_z), "=&r"(crypto_int32_q) : "r"(crypto_int32_x), "r"(crypto_int32_y) : "cc");
899
0
  return crypto_int32_z;
900
0
#elif defined(__GNUC__) && defined(__aarch64__)
901
0
  crypto_int32 crypto_int32_z;
902
0
  __asm__ ("cmp %w1,%w2\n cset %w0,eq" : "=r"(crypto_int32_z) : "r"(crypto_int32_x), "r"(crypto_int32_y) : "cc");
903
0
  return crypto_int32_z;
904
0
#else
905
0
  return 1-crypto_int32_unequal_01(crypto_int32_x,crypto_int32_y);
906
0
#endif
907
0
}
908
909
__attribute__((unused))
910
static inline
911
0
crypto_int32 crypto_int32_min(crypto_int32 crypto_int32_x,crypto_int32 crypto_int32_y) {
912
0
#if defined(__GNUC__) && defined(__x86_64__)
913
0
  __asm__ ("cmpl %1,%0\n cmovgl %1,%0" : "+r"(crypto_int32_x) : "r"(crypto_int32_y) : "cc");
914
0
  return crypto_int32_x;
915
0
#elif defined(__GNUC__) && defined(__aarch64__)
916
0
  __asm__ ("cmp %w0,%w1\n csel %w0,%w0,%w1,lt" : "+r"(crypto_int32_x) : "r"(crypto_int32_y) : "cc");
917
0
  return crypto_int32_x;
918
0
#else
919
0
  crypto_int64 crypto_int32_r = (crypto_int64)crypto_int32_y ^ (crypto_int64)crypto_int32_x;
920
0
  crypto_int64 crypto_int32_z = (crypto_int64)crypto_int32_y - (crypto_int64)crypto_int32_x;
921
0
  crypto_int32_z ^= crypto_int32_r & (crypto_int32_z ^ crypto_int32_y);
922
0
  crypto_int32_z = crypto_int32_negative_mask(crypto_int32_z);
923
0
  crypto_int32_z &= crypto_int32_r;
924
0
  return crypto_int32_x ^ crypto_int32_z;
925
0
#endif
926
0
}
927
928
__attribute__((unused))
929
static inline
930
0
crypto_int32 crypto_int32_max(crypto_int32 crypto_int32_x,crypto_int32 crypto_int32_y) {
931
0
#if defined(__GNUC__) && defined(__x86_64__)
932
0
  __asm__ ("cmpl %1,%0\n cmovll %1,%0" : "+r"(crypto_int32_x) : "r"(crypto_int32_y) : "cc");
933
0
  return crypto_int32_x;
934
0
#elif defined(__GNUC__) && defined(__aarch64__)
935
0
  __asm__ ("cmp %w0,%w1\n csel %w0,%w1,%w0,lt" : "+r"(crypto_int32_x) : "r"(crypto_int32_y) : "cc");
936
0
  return crypto_int32_x;
937
0
#else
938
0
  crypto_int64 crypto_int32_r = (crypto_int64)crypto_int32_y ^ (crypto_int64)crypto_int32_x;
939
0
  crypto_int64 crypto_int32_z = (crypto_int64)crypto_int32_y - (crypto_int64)crypto_int32_x;
940
0
  crypto_int32_z ^= crypto_int32_r & (crypto_int32_z ^ crypto_int32_y);
941
0
  crypto_int32_z = crypto_int32_negative_mask(crypto_int32_z);
942
0
  crypto_int32_z &= crypto_int32_r;
943
0
  return crypto_int32_y ^ crypto_int32_z;
944
0
#endif
945
0
}
946
947
__attribute__((unused))
948
static inline
949
14.0M
void crypto_int32_minmax(crypto_int32 *crypto_int32_p,crypto_int32 *crypto_int32_q) {
950
14.0M
  crypto_int32 crypto_int32_x = *crypto_int32_p;
951
14.0M
  crypto_int32 crypto_int32_y = *crypto_int32_q;
952
14.0M
#if defined(__GNUC__) && defined(__x86_64__)
953
14.0M
  crypto_int32 crypto_int32_z;
954
14.0M
  __asm__ ("cmpl %2,%1\n movl %1,%0\n cmovgl %2,%1\n cmovgl %0,%2" : "=&r"(crypto_int32_z), "+&r"(crypto_int32_x), "+r"(crypto_int32_y) : : "cc");
955
14.0M
  *crypto_int32_p = crypto_int32_x;
956
14.0M
  *crypto_int32_q = crypto_int32_y;
957
#elif defined(__GNUC__) && defined(__aarch64__)
958
  crypto_int32 crypto_int32_r, crypto_int32_s;
959
  __asm__ ("cmp %w2,%w3\n csel %w0,%w2,%w3,lt\n csel %w1,%w3,%w2,lt" : "=&r"(crypto_int32_r), "=r"(crypto_int32_s) : "r"(crypto_int32_x), "r"(crypto_int32_y) : "cc");
960
  *crypto_int32_p = crypto_int32_r;
961
  *crypto_int32_q = crypto_int32_s;
962
#else
963
  crypto_int64 crypto_int32_r = (crypto_int64)crypto_int32_y ^ (crypto_int64)crypto_int32_x;
964
  crypto_int64 crypto_int32_z = (crypto_int64)crypto_int32_y - (crypto_int64)crypto_int32_x;
965
  crypto_int32_z ^= crypto_int32_r & (crypto_int32_z ^ crypto_int32_y);
966
  crypto_int32_z = crypto_int32_negative_mask(crypto_int32_z);
967
  crypto_int32_z &= crypto_int32_r;
968
  crypto_int32_x ^= crypto_int32_z;
969
  crypto_int32_y ^= crypto_int32_z;
970
  *crypto_int32_p = crypto_int32_x;
971
  *crypto_int32_q = crypto_int32_y;
972
#endif
973
14.0M
}
974
975
__attribute__((unused))
976
static inline
977
0
crypto_int32 crypto_int32_smaller_mask(crypto_int32 crypto_int32_x,crypto_int32 crypto_int32_y) {
978
0
#if defined(__GNUC__) && defined(__x86_64__)
979
0
  crypto_int32 crypto_int32_q,crypto_int32_z;
980
0
  __asm__ ("xorl %0,%0\n movl $-1,%1\n cmpl %3,%2\n cmovll %1,%0" : "=&r"(crypto_int32_z), "=&r"(crypto_int32_q) : "r"(crypto_int32_x), "r"(crypto_int32_y) : "cc");
981
0
  return crypto_int32_z;
982
0
#elif defined(__GNUC__) && defined(__aarch64__)
983
0
  crypto_int32 crypto_int32_z;
984
0
  __asm__ ("cmp %w1,%w2\n csetm %w0,lt" : "=r"(crypto_int32_z) : "r"(crypto_int32_x), "r"(crypto_int32_y) : "cc");
985
0
  return crypto_int32_z;
986
0
#else
987
0
  crypto_int32 crypto_int32_r = crypto_int32_x ^ crypto_int32_y;
988
0
  crypto_int32 crypto_int32_z = crypto_int32_x - crypto_int32_y;
989
0
  crypto_int32_z ^= crypto_int32_r & (crypto_int32_z ^ crypto_int32_x);
990
0
  return crypto_int32_negative_mask(crypto_int32_z);
991
0
#endif
992
0
}
993
994
__attribute__((unused))
995
static inline
996
0
crypto_int32 crypto_int32_smaller_01(crypto_int32 crypto_int32_x,crypto_int32 crypto_int32_y) {
997
0
#if defined(__GNUC__) && defined(__x86_64__)
998
0
  crypto_int32 crypto_int32_q,crypto_int32_z;
999
0
  __asm__ ("xorl %0,%0\n movl $1,%1\n cmpl %3,%2\n cmovll %1,%0" : "=&r"(crypto_int32_z), "=&r"(crypto_int32_q) : "r"(crypto_int32_x), "r"(crypto_int32_y) : "cc");
1000
0
  return crypto_int32_z;
1001
0
#elif defined(__GNUC__) && defined(__aarch64__)
1002
0
  crypto_int32 crypto_int32_z;
1003
0
  __asm__ ("cmp %w1,%w2\n cset %w0,lt" : "=r"(crypto_int32_z) : "r"(crypto_int32_x), "r"(crypto_int32_y) : "cc");
1004
0
  return crypto_int32_z;
1005
0
#else
1006
0
  crypto_int32 crypto_int32_r = crypto_int32_x ^ crypto_int32_y;
1007
0
  crypto_int32 crypto_int32_z = crypto_int32_x - crypto_int32_y;
1008
0
  crypto_int32_z ^= crypto_int32_r & (crypto_int32_z ^ crypto_int32_x);
1009
0
  return crypto_int32_unsigned_topbit_01(crypto_int32_z);
1010
0
#endif
1011
0
}
1012
1013
__attribute__((unused))
1014
static inline
1015
0
crypto_int32 crypto_int32_leq_mask(crypto_int32 crypto_int32_x,crypto_int32 crypto_int32_y) {
1016
0
#if defined(__GNUC__) && defined(__x86_64__)
1017
0
  crypto_int32 crypto_int32_q,crypto_int32_z;
1018
0
  __asm__ ("xorl %0,%0\n movl $-1,%1\n cmpl %3,%2\n cmovlel %1,%0" : "=&r"(crypto_int32_z), "=&r"(crypto_int32_q) : "r"(crypto_int32_x), "r"(crypto_int32_y) : "cc");
1019
0
  return crypto_int32_z;
1020
0
#elif defined(__GNUC__) && defined(__aarch64__)
1021
0
  crypto_int32 crypto_int32_z;
1022
0
  __asm__ ("cmp %w1,%w2\n csetm %w0,le" : "=r"(crypto_int32_z) : "r"(crypto_int32_x), "r"(crypto_int32_y) : "cc");
1023
0
  return crypto_int32_z;
1024
0
#else
1025
0
  return ~crypto_int32_smaller_mask(crypto_int32_y,crypto_int32_x);
1026
0
#endif
1027
0
}
1028
1029
__attribute__((unused))
1030
static inline
1031
0
crypto_int32 crypto_int32_leq_01(crypto_int32 crypto_int32_x,crypto_int32 crypto_int32_y) {
1032
0
#if defined(__GNUC__) && defined(__x86_64__)
1033
0
  crypto_int32 crypto_int32_q,crypto_int32_z;
1034
0
  __asm__ ("xorl %0,%0\n movl $1,%1\n cmpl %3,%2\n cmovlel %1,%0" : "=&r"(crypto_int32_z), "=&r"(crypto_int32_q) : "r"(crypto_int32_x), "r"(crypto_int32_y) : "cc");
1035
0
  return crypto_int32_z;
1036
0
#elif defined(__GNUC__) && defined(__aarch64__)
1037
0
  crypto_int32 crypto_int32_z;
1038
0
  __asm__ ("cmp %w1,%w2\n cset %w0,le" : "=r"(crypto_int32_z) : "r"(crypto_int32_x), "r"(crypto_int32_y) : "cc");
1039
0
  return crypto_int32_z;
1040
0
#else
1041
0
  return 1-crypto_int32_smaller_01(crypto_int32_y,crypto_int32_x);
1042
0
#endif
1043
0
}
1044
1045
__attribute__((unused))
1046
static inline
1047
0
int crypto_int32_ones_num(crypto_int32 crypto_int32_x) {
1048
0
  crypto_int32_unsigned crypto_int32_y = crypto_int32_x;
1049
0
  const crypto_int32 C0 = 0x55555555;
1050
0
  const crypto_int32 C1 = 0x33333333;
1051
0
  const crypto_int32 C2 = 0x0f0f0f0f;
1052
0
  crypto_int32_y -= ((crypto_int32_y >> 1) & C0);
1053
0
  crypto_int32_y = (crypto_int32_y & C1) + ((crypto_int32_y >> 2) & C1);
1054
0
  crypto_int32_y = (crypto_int32_y + (crypto_int32_y >> 4)) & C2;
1055
0
  crypto_int32_y += crypto_int32_y >> 8;
1056
0
  crypto_int32_y = (crypto_int32_y + (crypto_int32_y >> 16)) & 0xff;
1057
0
  return crypto_int32_y;
1058
0
}
1059
1060
__attribute__((unused))
1061
static inline
1062
0
int crypto_int32_bottomzeros_num(crypto_int32 crypto_int32_x) {
1063
0
#if defined(__GNUC__) && defined(__x86_64__)
1064
0
  crypto_int32 fallback = 32;
1065
0
  __asm__ ("bsfl %0,%0\n cmovel %1,%0" : "+&r"(crypto_int32_x) : "r"(fallback) : "cc");
1066
0
  return crypto_int32_x;
1067
0
#elif defined(__GNUC__) && defined(__aarch64__)
1068
0
  int64_t crypto_int32_y;
1069
0
  __asm__ ("rbit %w0,%w1\n clz %w0,%w0" : "=r"(crypto_int32_y) : "r"(crypto_int32_x) : );
1070
0
  return crypto_int32_y;
1071
0
#else
1072
0
  crypto_int32 crypto_int32_y = crypto_int32_x ^ (crypto_int32_x-1);
1073
0
  crypto_int32_y = ((crypto_int32) crypto_int32_y) >> 1;
1074
0
  crypto_int32_y &= ~(crypto_int32_x & (((crypto_int32) 1) << (32-1)));
1075
0
  return crypto_int32_ones_num(crypto_int32_y);
1076
0
#endif
1077
0
}
1078
1079
#endif
1080
1081
/* from supercop-20240808/cryptoint/crypto_int64.h */
1082
/* auto-generated: cd cryptoint; ./autogen */
1083
/* cryptoint 20240806 */
1084
1085
#ifndef crypto_int64_h
1086
#define crypto_int64_h
1087
1088
#define crypto_int64 int64_t
1089
#define crypto_int64_unsigned uint64_t
1090
1091
1092
1093
__attribute__((unused))
1094
static inline
1095
0
crypto_int64 crypto_int64_load(const unsigned char *crypto_int64_s) {
1096
0
  crypto_int64 crypto_int64_z = 0;
1097
0
  crypto_int64_z |= ((crypto_int64) (*crypto_int64_s++)) << 0;
1098
0
  crypto_int64_z |= ((crypto_int64) (*crypto_int64_s++)) << 8;
1099
0
  crypto_int64_z |= ((crypto_int64) (*crypto_int64_s++)) << 16;
1100
0
  crypto_int64_z |= ((crypto_int64) (*crypto_int64_s++)) << 24;
1101
0
  crypto_int64_z |= ((crypto_int64) (*crypto_int64_s++)) << 32;
1102
0
  crypto_int64_z |= ((crypto_int64) (*crypto_int64_s++)) << 40;
1103
0
  crypto_int64_z |= ((crypto_int64) (*crypto_int64_s++)) << 48;
1104
0
  crypto_int64_z |= ((crypto_int64) (*crypto_int64_s++)) << 56;
1105
0
  return crypto_int64_z;
1106
0
}
1107
1108
__attribute__((unused))
1109
static inline
1110
0
void crypto_int64_store(unsigned char *crypto_int64_s,crypto_int64 crypto_int64_x) {
1111
0
  *crypto_int64_s++ = crypto_int64_x >> 0;
1112
0
  *crypto_int64_s++ = crypto_int64_x >> 8;
1113
0
  *crypto_int64_s++ = crypto_int64_x >> 16;
1114
0
  *crypto_int64_s++ = crypto_int64_x >> 24;
1115
0
  *crypto_int64_s++ = crypto_int64_x >> 32;
1116
0
  *crypto_int64_s++ = crypto_int64_x >> 40;
1117
0
  *crypto_int64_s++ = crypto_int64_x >> 48;
1118
0
  *crypto_int64_s++ = crypto_int64_x >> 56;
1119
0
}
1120
1121
__attribute__((unused))
1122
static inline
1123
0
crypto_int64 crypto_int64_negative_mask(crypto_int64 crypto_int64_x) {
1124
0
#if defined(__GNUC__) && defined(__x86_64__)
1125
0
  __asm__ ("sarq $63,%0" : "+r"(crypto_int64_x) : : "cc");
1126
0
  return crypto_int64_x;
1127
0
#elif defined(__GNUC__) && defined(__aarch64__)
1128
0
  crypto_int64 crypto_int64_y;
1129
0
  __asm__ ("asr %0,%1,63" : "=r"(crypto_int64_y) : "r"(crypto_int64_x) : );
1130
0
  return crypto_int64_y;
1131
0
#else
1132
0
  crypto_int64_x >>= 64-6;
1133
0
  crypto_int64_x ^= crypto_int64_optblocker;
1134
0
  crypto_int64_x >>= 5;
1135
0
  return crypto_int64_x;
1136
0
#endif
1137
0
}
1138
1139
__attribute__((unused))
1140
static inline
1141
0
crypto_int64_unsigned crypto_int64_unsigned_topbit_01(crypto_int64_unsigned crypto_int64_x) {
1142
0
#if defined(__GNUC__) && defined(__x86_64__)
1143
0
  __asm__ ("shrq $63,%0" : "+r"(crypto_int64_x) : : "cc");
1144
0
  return crypto_int64_x;
1145
0
#elif defined(__GNUC__) && defined(__aarch64__)
1146
0
  crypto_int64 crypto_int64_y;
1147
0
  __asm__ ("lsr %0,%1,63" : "=r"(crypto_int64_y) : "r"(crypto_int64_x) : );
1148
0
  return crypto_int64_y;
1149
0
#else
1150
0
  crypto_int64_x >>= 64-6;
1151
0
  crypto_int64_x ^= crypto_int64_optblocker;
1152
0
  crypto_int64_x >>= 5;
1153
0
  return crypto_int64_x;
1154
0
#endif
1155
0
}
1156
1157
__attribute__((unused))
1158
static inline
1159
0
crypto_int64 crypto_int64_negative_01(crypto_int64 crypto_int64_x) {
1160
0
  return crypto_int64_unsigned_topbit_01(crypto_int64_x);
1161
0
}
1162
1163
__attribute__((unused))
1164
static inline
1165
0
crypto_int64 crypto_int64_topbit_mask(crypto_int64 crypto_int64_x) {
1166
0
  return crypto_int64_negative_mask(crypto_int64_x);
1167
0
}
1168
1169
__attribute__((unused))
1170
static inline
1171
0
crypto_int64 crypto_int64_topbit_01(crypto_int64 crypto_int64_x) {
1172
0
  return crypto_int64_unsigned_topbit_01(crypto_int64_x);
1173
0
}
1174
1175
__attribute__((unused))
1176
static inline
1177
0
crypto_int64 crypto_int64_bottombit_mask(crypto_int64 crypto_int64_x) {
1178
0
#if defined(__GNUC__) && defined(__x86_64__)
1179
0
  __asm__ ("andq $1,%0" : "+r"(crypto_int64_x) : : "cc");
1180
0
  return -crypto_int64_x;
1181
0
#elif defined(__GNUC__) && defined(__aarch64__)
1182
0
  crypto_int64 crypto_int64_y;
1183
0
  __asm__ ("sbfx %0,%1,0,1" : "=r"(crypto_int64_y) : "r"(crypto_int64_x) : );
1184
0
  return crypto_int64_y;
1185
0
#else
1186
0
  crypto_int64_x &= 1 ^ crypto_int64_optblocker;
1187
0
  return -crypto_int64_x;
1188
0
#endif
1189
0
}
1190
1191
__attribute__((unused))
1192
static inline
1193
3.81k
crypto_int64 crypto_int64_bottombit_01(crypto_int64 crypto_int64_x) {
1194
3.81k
#if defined(__GNUC__) && defined(__x86_64__)
1195
3.81k
  __asm__ ("andq $1,%0" : "+r"(crypto_int64_x) : : "cc");
1196
3.81k
  return crypto_int64_x;
1197
#elif defined(__GNUC__) && defined(__aarch64__)
1198
  crypto_int64 crypto_int64_y;
1199
  __asm__ ("ubfx %0,%1,0,1" : "=r"(crypto_int64_y) : "r"(crypto_int64_x) : );
1200
  return crypto_int64_y;
1201
#else
1202
  crypto_int64_x &= 1 ^ crypto_int64_optblocker;
1203
  return crypto_int64_x;
1204
#endif
1205
3.81k
}
1206
1207
__attribute__((unused))
1208
static inline
1209
0
crypto_int64 crypto_int64_bitinrangepublicpos_mask(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_s) {
1210
0
#if defined(__GNUC__) && defined(__x86_64__)
1211
0
  __asm__ ("sarq %%cl,%0" : "+r"(crypto_int64_x) : "c"(crypto_int64_s) : "cc");
1212
0
#elif defined(__GNUC__) && defined(__aarch64__)
1213
0
  __asm__ ("asr %0,%0,%1" : "+r"(crypto_int64_x) : "r"(crypto_int64_s) : );
1214
0
#else
1215
0
  crypto_int64_x >>= crypto_int64_s ^ crypto_int64_optblocker;
1216
0
#endif
1217
0
  return crypto_int64_bottombit_mask(crypto_int64_x);
1218
0
}
1219
1220
__attribute__((unused))
1221
static inline
1222
0
crypto_int64 crypto_int64_bitinrangepublicpos_01(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_s) {
1223
0
#if defined(__GNUC__) && defined(__x86_64__)
1224
0
  __asm__ ("sarq %%cl,%0" : "+r"(crypto_int64_x) : "c"(crypto_int64_s) : "cc");
1225
0
#elif defined(__GNUC__) && defined(__aarch64__)
1226
0
  __asm__ ("asr %0,%0,%1" : "+r"(crypto_int64_x) : "r"(crypto_int64_s) : );
1227
0
#else
1228
0
  crypto_int64_x >>= crypto_int64_s ^ crypto_int64_optblocker;
1229
0
#endif
1230
0
  return crypto_int64_bottombit_01(crypto_int64_x);
1231
0
}
1232
1233
__attribute__((unused))
1234
static inline
1235
0
crypto_int64 crypto_int64_shlmod(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_s) {
1236
0
#if defined(__GNUC__) && defined(__x86_64__)
1237
0
  __asm__ ("shlq %%cl,%0" : "+r"(crypto_int64_x) : "c"(crypto_int64_s) : "cc");
1238
0
#elif defined(__GNUC__) && defined(__aarch64__)
1239
0
  __asm__ ("lsl %0,%0,%1" : "+r"(crypto_int64_x) : "r"(crypto_int64_s) : );
1240
0
#else
1241
0
  int crypto_int64_k, crypto_int64_l;
1242
0
  for (crypto_int64_l = 0,crypto_int64_k = 1;crypto_int64_k < 64;++crypto_int64_l,crypto_int64_k *= 2)
1243
0
    crypto_int64_x ^= (crypto_int64_x ^ (crypto_int64_x << crypto_int64_k)) & crypto_int64_bitinrangepublicpos_mask(crypto_int64_s,crypto_int64_l);
1244
0
#endif
1245
0
  return crypto_int64_x;
1246
0
}
1247
1248
__attribute__((unused))
1249
static inline
1250
5
crypto_int64 crypto_int64_shrmod(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_s) {
1251
5
#if defined(__GNUC__) && defined(__x86_64__)
1252
5
  __asm__ ("sarq %%cl,%0" : "+r"(crypto_int64_x) : "c"(crypto_int64_s) : "cc");
1253
#elif defined(__GNUC__) && defined(__aarch64__)
1254
  __asm__ ("asr %0,%0,%1" : "+r"(crypto_int64_x) : "r"(crypto_int64_s) : );
1255
#else
1256
  int crypto_int64_k, crypto_int64_l;
1257
  for (crypto_int64_l = 0,crypto_int64_k = 1;crypto_int64_k < 64;++crypto_int64_l,crypto_int64_k *= 2)
1258
    crypto_int64_x ^= (crypto_int64_x ^ (crypto_int64_x >> crypto_int64_k)) & crypto_int64_bitinrangepublicpos_mask(crypto_int64_s,crypto_int64_l);
1259
#endif
1260
5
  return crypto_int64_x;
1261
5
}
1262
1263
__attribute__((unused))
1264
static inline
1265
0
crypto_int64 crypto_int64_bitmod_mask(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_s) {
1266
0
  crypto_int64_x = crypto_int64_shrmod(crypto_int64_x,crypto_int64_s);
1267
0
  return crypto_int64_bottombit_mask(crypto_int64_x);
1268
0
}
1269
1270
__attribute__((unused))
1271
static inline
1272
5
crypto_int64 crypto_int64_bitmod_01(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_s) {
1273
5
  crypto_int64_x = crypto_int64_shrmod(crypto_int64_x,crypto_int64_s);
1274
5
  return crypto_int64_bottombit_01(crypto_int64_x);
1275
5
}
1276
1277
__attribute__((unused))
1278
static inline
1279
0
crypto_int64 crypto_int64_nonzero_mask(crypto_int64 crypto_int64_x) {
1280
0
#if defined(__GNUC__) && defined(__x86_64__)
1281
0
  crypto_int64 crypto_int64_q,crypto_int64_z;
1282
0
  __asm__ ("xorq %0,%0\n movq $-1,%1\n testq %2,%2\n cmovneq %1,%0" : "=&r"(crypto_int64_z), "=&r"(crypto_int64_q) : "r"(crypto_int64_x) : "cc");
1283
0
  return crypto_int64_z;
1284
0
#elif defined(__GNUC__) && defined(__aarch64__)
1285
0
  crypto_int64 crypto_int64_z;
1286
0
  __asm__ ("cmp %1,0\n csetm %0,ne" : "=r"(crypto_int64_z) : "r"(crypto_int64_x) : "cc");
1287
0
  return crypto_int64_z;
1288
0
#else
1289
0
  crypto_int64_x |= -crypto_int64_x;
1290
0
  return crypto_int64_negative_mask(crypto_int64_x);
1291
0
#endif
1292
0
}
1293
1294
__attribute__((unused))
1295
static inline
1296
0
crypto_int64 crypto_int64_nonzero_01(crypto_int64 crypto_int64_x) {
1297
0
#if defined(__GNUC__) && defined(__x86_64__)
1298
0
  crypto_int64 crypto_int64_q,crypto_int64_z;
1299
0
  __asm__ ("xorq %0,%0\n movq $1,%1\n testq %2,%2\n cmovneq %1,%0" : "=&r"(crypto_int64_z), "=&r"(crypto_int64_q) : "r"(crypto_int64_x) : "cc");
1300
0
  return crypto_int64_z;
1301
0
#elif defined(__GNUC__) && defined(__aarch64__)
1302
0
  crypto_int64 crypto_int64_z;
1303
0
  __asm__ ("cmp %1,0\n cset %0,ne" : "=r"(crypto_int64_z) : "r"(crypto_int64_x) : "cc");
1304
0
  return crypto_int64_z;
1305
0
#else
1306
0
  crypto_int64_x |= -crypto_int64_x;
1307
0
  return crypto_int64_unsigned_topbit_01(crypto_int64_x);
1308
0
#endif
1309
0
}
1310
1311
__attribute__((unused))
1312
static inline
1313
0
crypto_int64 crypto_int64_positive_mask(crypto_int64 crypto_int64_x) {
1314
0
#if defined(__GNUC__) && defined(__x86_64__)
1315
0
  crypto_int64 crypto_int64_q,crypto_int64_z;
1316
0
  __asm__ ("xorq %0,%0\n movq $-1,%1\n testq %2,%2\n cmovgq %1,%0" : "=&r"(crypto_int64_z), "=&r"(crypto_int64_q) : "r"(crypto_int64_x) : "cc");
1317
0
  return crypto_int64_z;
1318
0
#elif defined(__GNUC__) && defined(__aarch64__)
1319
0
  crypto_int64 crypto_int64_z;
1320
0
  __asm__ ("cmp %1,0\n csetm %0,gt" : "=r"(crypto_int64_z) : "r"(crypto_int64_x) : "cc");
1321
0
  return crypto_int64_z;
1322
0
#else
1323
0
  crypto_int64 crypto_int64_z = -crypto_int64_x;
1324
0
  crypto_int64_z ^= crypto_int64_x & crypto_int64_z;
1325
0
  return crypto_int64_negative_mask(crypto_int64_z);
1326
0
#endif
1327
0
}
1328
1329
__attribute__((unused))
1330
static inline
1331
0
crypto_int64 crypto_int64_positive_01(crypto_int64 crypto_int64_x) {
1332
0
#if defined(__GNUC__) && defined(__x86_64__)
1333
0
  crypto_int64 crypto_int64_q,crypto_int64_z;
1334
0
  __asm__ ("xorq %0,%0\n movq $1,%1\n testq %2,%2\n cmovgq %1,%0" : "=&r"(crypto_int64_z), "=&r"(crypto_int64_q) : "r"(crypto_int64_x) : "cc");
1335
0
  return crypto_int64_z;
1336
0
#elif defined(__GNUC__) && defined(__aarch64__)
1337
0
  crypto_int64 crypto_int64_z;
1338
0
  __asm__ ("cmp %1,0\n cset %0,gt" : "=r"(crypto_int64_z) : "r"(crypto_int64_x) : "cc");
1339
0
  return crypto_int64_z;
1340
0
#else
1341
0
  crypto_int64 crypto_int64_z = -crypto_int64_x;
1342
0
  crypto_int64_z ^= crypto_int64_x & crypto_int64_z;
1343
0
  return crypto_int64_unsigned_topbit_01(crypto_int64_z);
1344
0
#endif
1345
0
}
1346
1347
__attribute__((unused))
1348
static inline
1349
0
crypto_int64 crypto_int64_zero_mask(crypto_int64 crypto_int64_x) {
1350
0
#if defined(__GNUC__) && defined(__x86_64__)
1351
0
  crypto_int64 crypto_int64_q,crypto_int64_z;
1352
0
  __asm__ ("xorq %0,%0\n movq $-1,%1\n testq %2,%2\n cmoveq %1,%0" : "=&r"(crypto_int64_z), "=&r"(crypto_int64_q) : "r"(crypto_int64_x) : "cc");
1353
0
  return crypto_int64_z;
1354
0
#elif defined(__GNUC__) && defined(__aarch64__)
1355
0
  crypto_int64 crypto_int64_z;
1356
0
  __asm__ ("cmp %1,0\n csetm %0,eq" : "=r"(crypto_int64_z) : "r"(crypto_int64_x) : "cc");
1357
0
  return crypto_int64_z;
1358
0
#else
1359
0
  return ~crypto_int64_nonzero_mask(crypto_int64_x);
1360
0
#endif
1361
0
}
1362
1363
__attribute__((unused))
1364
static inline
1365
0
crypto_int64 crypto_int64_zero_01(crypto_int64 crypto_int64_x) {
1366
0
#if defined(__GNUC__) && defined(__x86_64__)
1367
0
  crypto_int64 crypto_int64_q,crypto_int64_z;
1368
0
  __asm__ ("xorq %0,%0\n movq $1,%1\n testq %2,%2\n cmoveq %1,%0" : "=&r"(crypto_int64_z), "=&r"(crypto_int64_q) : "r"(crypto_int64_x) : "cc");
1369
0
  return crypto_int64_z;
1370
0
#elif defined(__GNUC__) && defined(__aarch64__)
1371
0
  crypto_int64 crypto_int64_z;
1372
0
  __asm__ ("cmp %1,0\n cset %0,eq" : "=r"(crypto_int64_z) : "r"(crypto_int64_x) : "cc");
1373
0
  return crypto_int64_z;
1374
0
#else
1375
0
  return 1-crypto_int64_nonzero_01(crypto_int64_x);
1376
0
#endif
1377
0
}
1378
1379
__attribute__((unused))
1380
static inline
1381
0
crypto_int64 crypto_int64_unequal_mask(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_y) {
1382
0
#if defined(__GNUC__) && defined(__x86_64__)
1383
0
  crypto_int64 crypto_int64_q,crypto_int64_z;
1384
0
  __asm__ ("xorq %0,%0\n movq $-1,%1\n cmpq %3,%2\n cmovneq %1,%0" : "=&r"(crypto_int64_z), "=&r"(crypto_int64_q) : "r"(crypto_int64_x), "r"(crypto_int64_y) : "cc");
1385
0
  return crypto_int64_z;
1386
0
#elif defined(__GNUC__) && defined(__aarch64__)
1387
0
  crypto_int64 crypto_int64_z;
1388
0
  __asm__ ("cmp %1,%2\n csetm %0,ne" : "=r"(crypto_int64_z) : "r"(crypto_int64_x), "r"(crypto_int64_y) : "cc");
1389
0
  return crypto_int64_z;
1390
0
#else
1391
0
  return crypto_int64_nonzero_mask(crypto_int64_x ^ crypto_int64_y);
1392
0
#endif
1393
0
}
1394
1395
__attribute__((unused))
1396
static inline
1397
0
crypto_int64 crypto_int64_unequal_01(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_y) {
1398
0
#if defined(__GNUC__) && defined(__x86_64__)
1399
0
  crypto_int64 crypto_int64_q,crypto_int64_z;
1400
0
  __asm__ ("xorq %0,%0\n movq $1,%1\n cmpq %3,%2\n cmovneq %1,%0" : "=&r"(crypto_int64_z), "=&r"(crypto_int64_q) : "r"(crypto_int64_x), "r"(crypto_int64_y) : "cc");
1401
0
  return crypto_int64_z;
1402
0
#elif defined(__GNUC__) && defined(__aarch64__)
1403
0
  crypto_int64 crypto_int64_z;
1404
0
  __asm__ ("cmp %1,%2\n cset %0,ne" : "=r"(crypto_int64_z) : "r"(crypto_int64_x), "r"(crypto_int64_y) : "cc");
1405
0
  return crypto_int64_z;
1406
0
#else
1407
0
  return crypto_int64_nonzero_01(crypto_int64_x ^ crypto_int64_y);
1408
0
#endif
1409
0
}
1410
1411
__attribute__((unused))
1412
static inline
1413
0
crypto_int64 crypto_int64_equal_mask(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_y) {
1414
0
#if defined(__GNUC__) && defined(__x86_64__)
1415
0
  crypto_int64 crypto_int64_q,crypto_int64_z;
1416
0
  __asm__ ("xorq %0,%0\n movq $-1,%1\n cmpq %3,%2\n cmoveq %1,%0" : "=&r"(crypto_int64_z), "=&r"(crypto_int64_q) : "r"(crypto_int64_x), "r"(crypto_int64_y) : "cc");
1417
0
  return crypto_int64_z;
1418
0
#elif defined(__GNUC__) && defined(__aarch64__)
1419
0
  crypto_int64 crypto_int64_z;
1420
0
  __asm__ ("cmp %1,%2\n csetm %0,eq" : "=r"(crypto_int64_z) : "r"(crypto_int64_x), "r"(crypto_int64_y) : "cc");
1421
0
  return crypto_int64_z;
1422
0
#else
1423
0
  return ~crypto_int64_unequal_mask(crypto_int64_x,crypto_int64_y);
1424
0
#endif
1425
0
}
1426
1427
__attribute__((unused))
1428
static inline
1429
0
crypto_int64 crypto_int64_equal_01(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_y) {
1430
0
#if defined(__GNUC__) && defined(__x86_64__)
1431
0
  crypto_int64 crypto_int64_q,crypto_int64_z;
1432
0
  __asm__ ("xorq %0,%0\n movq $1,%1\n cmpq %3,%2\n cmoveq %1,%0" : "=&r"(crypto_int64_z), "=&r"(crypto_int64_q) : "r"(crypto_int64_x), "r"(crypto_int64_y) : "cc");
1433
0
  return crypto_int64_z;
1434
0
#elif defined(__GNUC__) && defined(__aarch64__)
1435
0
  crypto_int64 crypto_int64_z;
1436
0
  __asm__ ("cmp %1,%2\n cset %0,eq" : "=r"(crypto_int64_z) : "r"(crypto_int64_x), "r"(crypto_int64_y) : "cc");
1437
0
  return crypto_int64_z;
1438
0
#else
1439
0
  return 1-crypto_int64_unequal_01(crypto_int64_x,crypto_int64_y);
1440
0
#endif
1441
0
}
1442
1443
__attribute__((unused))
1444
static inline
1445
0
crypto_int64 crypto_int64_min(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_y) {
1446
0
#if defined(__GNUC__) && defined(__x86_64__)
1447
0
  __asm__ ("cmpq %1,%0\n cmovgq %1,%0" : "+r"(crypto_int64_x) : "r"(crypto_int64_y) : "cc");
1448
0
  return crypto_int64_x;
1449
0
#elif defined(__GNUC__) && defined(__aarch64__)
1450
0
  __asm__ ("cmp %0,%1\n csel %0,%0,%1,lt" : "+r"(crypto_int64_x) : "r"(crypto_int64_y) : "cc");
1451
0
  return crypto_int64_x;
1452
0
#else
1453
0
  crypto_int64 crypto_int64_r = crypto_int64_y ^ crypto_int64_x;
1454
0
  crypto_int64 crypto_int64_z = crypto_int64_y - crypto_int64_x;
1455
0
  crypto_int64_z ^= crypto_int64_r & (crypto_int64_z ^ crypto_int64_y);
1456
0
  crypto_int64_z = crypto_int64_negative_mask(crypto_int64_z);
1457
0
  crypto_int64_z &= crypto_int64_r;
1458
0
  return crypto_int64_x ^ crypto_int64_z;
1459
0
#endif
1460
0
}
1461
1462
__attribute__((unused))
1463
static inline
1464
0
crypto_int64 crypto_int64_max(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_y) {
1465
0
#if defined(__GNUC__) && defined(__x86_64__)
1466
0
  __asm__ ("cmpq %1,%0\n cmovlq %1,%0" : "+r"(crypto_int64_x) : "r"(crypto_int64_y) : "cc");
1467
0
  return crypto_int64_x;
1468
0
#elif defined(__GNUC__) && defined(__aarch64__)
1469
0
  __asm__ ("cmp %0,%1\n csel %0,%1,%0,lt" : "+r"(crypto_int64_x) : "r"(crypto_int64_y) : "cc");
1470
0
  return crypto_int64_x;
1471
0
#else
1472
0
  crypto_int64 crypto_int64_r = crypto_int64_y ^ crypto_int64_x;
1473
0
  crypto_int64 crypto_int64_z = crypto_int64_y - crypto_int64_x;
1474
0
  crypto_int64_z ^= crypto_int64_r & (crypto_int64_z ^ crypto_int64_y);
1475
0
  crypto_int64_z = crypto_int64_negative_mask(crypto_int64_z);
1476
0
  crypto_int64_z &= crypto_int64_r;
1477
0
  return crypto_int64_y ^ crypto_int64_z;
1478
0
#endif
1479
0
}
1480
1481
__attribute__((unused))
1482
static inline
1483
0
void crypto_int64_minmax(crypto_int64 *crypto_int64_p,crypto_int64 *crypto_int64_q) {
1484
0
  crypto_int64 crypto_int64_x = *crypto_int64_p;
1485
0
  crypto_int64 crypto_int64_y = *crypto_int64_q;
1486
0
#if defined(__GNUC__) && defined(__x86_64__)
1487
0
  crypto_int64 crypto_int64_z;
1488
0
  __asm__ ("cmpq %2,%1\n movq %1,%0\n cmovgq %2,%1\n cmovgq %0,%2" : "=&r"(crypto_int64_z), "+&r"(crypto_int64_x), "+r"(crypto_int64_y) : : "cc");
1489
0
  *crypto_int64_p = crypto_int64_x;
1490
0
  *crypto_int64_q = crypto_int64_y;
1491
0
#elif defined(__GNUC__) && defined(__aarch64__)
1492
0
  crypto_int64 crypto_int64_r, crypto_int64_s;
1493
0
  __asm__ ("cmp %2,%3\n csel %0,%2,%3,lt\n csel %1,%3,%2,lt" : "=&r"(crypto_int64_r), "=r"(crypto_int64_s) : "r"(crypto_int64_x), "r"(crypto_int64_y) : "cc");
1494
0
  *crypto_int64_p = crypto_int64_r;
1495
0
  *crypto_int64_q = crypto_int64_s;
1496
0
#else
1497
0
  crypto_int64 crypto_int64_r = crypto_int64_y ^ crypto_int64_x;
1498
0
  crypto_int64 crypto_int64_z = crypto_int64_y - crypto_int64_x;
1499
0
  crypto_int64_z ^= crypto_int64_r & (crypto_int64_z ^ crypto_int64_y);
1500
0
  crypto_int64_z = crypto_int64_negative_mask(crypto_int64_z);
1501
0
  crypto_int64_z &= crypto_int64_r;
1502
0
  crypto_int64_x ^= crypto_int64_z;
1503
0
  crypto_int64_y ^= crypto_int64_z;
1504
0
  *crypto_int64_p = crypto_int64_x;
1505
0
  *crypto_int64_q = crypto_int64_y;
1506
0
#endif
1507
0
}
1508
1509
__attribute__((unused))
1510
static inline
1511
0
crypto_int64 crypto_int64_smaller_mask(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_y) {
1512
0
#if defined(__GNUC__) && defined(__x86_64__)
1513
0
  crypto_int64 crypto_int64_q,crypto_int64_z;
1514
0
  __asm__ ("xorq %0,%0\n movq $-1,%1\n cmpq %3,%2\n cmovlq %1,%0" : "=&r"(crypto_int64_z), "=&r"(crypto_int64_q) : "r"(crypto_int64_x), "r"(crypto_int64_y) : "cc");
1515
0
  return crypto_int64_z;
1516
0
#elif defined(__GNUC__) && defined(__aarch64__)
1517
0
  crypto_int64 crypto_int64_z;
1518
0
  __asm__ ("cmp %1,%2\n csetm %0,lt" : "=r"(crypto_int64_z) : "r"(crypto_int64_x), "r"(crypto_int64_y) : "cc");
1519
0
  return crypto_int64_z;
1520
0
#else
1521
0
  crypto_int64 crypto_int64_r = crypto_int64_x ^ crypto_int64_y;
1522
0
  crypto_int64 crypto_int64_z = crypto_int64_x - crypto_int64_y;
1523
0
  crypto_int64_z ^= crypto_int64_r & (crypto_int64_z ^ crypto_int64_x);
1524
0
  return crypto_int64_negative_mask(crypto_int64_z);
1525
0
#endif
1526
0
}
1527
1528
__attribute__((unused))
1529
static inline
1530
0
crypto_int64 crypto_int64_smaller_01(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_y) {
1531
0
#if defined(__GNUC__) && defined(__x86_64__)
1532
0
  crypto_int64 crypto_int64_q,crypto_int64_z;
1533
0
  __asm__ ("xorq %0,%0\n movq $1,%1\n cmpq %3,%2\n cmovlq %1,%0" : "=&r"(crypto_int64_z), "=&r"(crypto_int64_q) : "r"(crypto_int64_x), "r"(crypto_int64_y) : "cc");
1534
0
  return crypto_int64_z;
1535
0
#elif defined(__GNUC__) && defined(__aarch64__)
1536
0
  crypto_int64 crypto_int64_z;
1537
0
  __asm__ ("cmp %1,%2\n cset %0,lt" : "=r"(crypto_int64_z) : "r"(crypto_int64_x), "r"(crypto_int64_y) : "cc");
1538
0
  return crypto_int64_z;
1539
0
#else
1540
0
  crypto_int64 crypto_int64_r = crypto_int64_x ^ crypto_int64_y;
1541
0
  crypto_int64 crypto_int64_z = crypto_int64_x - crypto_int64_y;
1542
0
  crypto_int64_z ^= crypto_int64_r & (crypto_int64_z ^ crypto_int64_x);
1543
0
  return crypto_int64_unsigned_topbit_01(crypto_int64_z);
1544
0
#endif
1545
0
}
1546
1547
__attribute__((unused))
1548
static inline
1549
0
crypto_int64 crypto_int64_leq_mask(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_y) {
1550
0
#if defined(__GNUC__) && defined(__x86_64__)
1551
0
  crypto_int64 crypto_int64_q,crypto_int64_z;
1552
0
  __asm__ ("xorq %0,%0\n movq $-1,%1\n cmpq %3,%2\n cmovleq %1,%0" : "=&r"(crypto_int64_z), "=&r"(crypto_int64_q) : "r"(crypto_int64_x), "r"(crypto_int64_y) : "cc");
1553
0
  return crypto_int64_z;
1554
0
#elif defined(__GNUC__) && defined(__aarch64__)
1555
0
  crypto_int64 crypto_int64_z;
1556
0
  __asm__ ("cmp %1,%2\n csetm %0,le" : "=r"(crypto_int64_z) : "r"(crypto_int64_x), "r"(crypto_int64_y) : "cc");
1557
0
  return crypto_int64_z;
1558
0
#else
1559
0
  return ~crypto_int64_smaller_mask(crypto_int64_y,crypto_int64_x);
1560
0
#endif
1561
0
}
1562
1563
__attribute__((unused))
1564
static inline
1565
0
crypto_int64 crypto_int64_leq_01(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_y) {
1566
0
#if defined(__GNUC__) && defined(__x86_64__)
1567
0
  crypto_int64 crypto_int64_q,crypto_int64_z;
1568
0
  __asm__ ("xorq %0,%0\n movq $1,%1\n cmpq %3,%2\n cmovleq %1,%0" : "=&r"(crypto_int64_z), "=&r"(crypto_int64_q) : "r"(crypto_int64_x), "r"(crypto_int64_y) : "cc");
1569
0
  return crypto_int64_z;
1570
0
#elif defined(__GNUC__) && defined(__aarch64__)
1571
0
  crypto_int64 crypto_int64_z;
1572
0
  __asm__ ("cmp %1,%2\n cset %0,le" : "=r"(crypto_int64_z) : "r"(crypto_int64_x), "r"(crypto_int64_y) : "cc");
1573
0
  return crypto_int64_z;
1574
0
#else
1575
0
  return 1-crypto_int64_smaller_01(crypto_int64_y,crypto_int64_x);
1576
0
#endif
1577
0
}
1578
1579
__attribute__((unused))
1580
static inline
1581
0
int crypto_int64_ones_num(crypto_int64 crypto_int64_x) {
1582
0
  crypto_int64_unsigned crypto_int64_y = crypto_int64_x;
1583
0
  const crypto_int64 C0 = 0x5555555555555555;
1584
0
  const crypto_int64 C1 = 0x3333333333333333;
1585
0
  const crypto_int64 C2 = 0x0f0f0f0f0f0f0f0f;
1586
0
  crypto_int64_y -= ((crypto_int64_y >> 1) & C0);
1587
0
  crypto_int64_y = (crypto_int64_y & C1) + ((crypto_int64_y >> 2) & C1);
1588
0
  crypto_int64_y = (crypto_int64_y + (crypto_int64_y >> 4)) & C2;
1589
0
  crypto_int64_y += crypto_int64_y >> 8;
1590
0
  crypto_int64_y += crypto_int64_y >> 16;
1591
0
  crypto_int64_y = (crypto_int64_y + (crypto_int64_y >> 32)) & 0xff;
1592
0
  return crypto_int64_y;
1593
0
}
1594
1595
__attribute__((unused))
1596
static inline
1597
0
int crypto_int64_bottomzeros_num(crypto_int64 crypto_int64_x) {
1598
0
#if defined(__GNUC__) && defined(__x86_64__)
1599
0
  crypto_int64 fallback = 64;
1600
0
  __asm__ ("bsfq %0,%0\n cmoveq %1,%0" : "+&r"(crypto_int64_x) : "r"(fallback) : "cc");
1601
0
  return crypto_int64_x;
1602
0
#elif defined(__GNUC__) && defined(__aarch64__)
1603
0
  int64_t crypto_int64_y;
1604
0
  __asm__ ("rbit %0,%1\n clz %0,%0" : "=r"(crypto_int64_y) : "r"(crypto_int64_x) : );
1605
0
  return crypto_int64_y;
1606
0
#else
1607
0
  crypto_int64 crypto_int64_y = crypto_int64_x ^ (crypto_int64_x-1);
1608
0
  crypto_int64_y = ((crypto_int64) crypto_int64_y) >> 1;
1609
0
  crypto_int64_y &= ~(crypto_int64_x & (((crypto_int64) 1) << (64-1)));
1610
0
  return crypto_int64_ones_num(crypto_int64_y);
1611
0
#endif
1612
0
}
1613
1614
#endif
1615
1616
/* from supercop-20240808/crypto_sort/int32/portable4/sort.c */
1617
14.1M
#define int32_MINMAX(a,b) crypto_int32_minmax(&a,&b)
1618
1619
static void crypto_sort_int32(void *array,long long n)
1620
840
{
1621
840
  long long top,p,q,r,i,j;
1622
840
  int32 *x = array;
1623
1624
840
  if (n < 2) return;
1625
840
  top = 1;
1626
8.40k
  while (top < n - top) top += top;
1627
1628
9.24k
  for (p = top;p >= 1;p >>= 1) {
1629
8.40k
    i = 0;
1630
641k
    while (i + 2 * p <= n) {
1631
3.21M
      for (j = i;j < i + p;++j)
1632
2.57M
        int32_MINMAX(x[j],x[j+p]);
1633
633k
      i += 2 * p;
1634
633k
    }
1635
396k
    for (j = i;j < n - p;++j)
1636
388k
      int32_MINMAX(x[j],x[j+p]);
1637
1638
8.40k
    i = 0;
1639
8.40k
    j = 0;
1640
46.2k
    for (q = top;q > p;q >>= 1) {
1641
37.8k
      if (j != i) for (;;) {
1642
21.0k
        if (j == n - q) goto done;
1643
21.0k
        int32 a = x[j + p];
1644
97.4k
        for (r = q;r > p;r >>= 1)
1645
76.4k
          int32_MINMAX(a,x[j + r]);
1646
21.0k
        x[j + p] = a;
1647
21.0k
        ++j;
1648
21.0k
        if (j == i + p) {
1649
10.9k
          i += 2 * p;
1650
10.9k
          break;
1651
10.9k
        }
1652
21.0k
      }
1653
657k
      while (i + p <= n - q) {
1654
2.94M
        for (j = i;j < i + p;++j) {
1655
2.32M
          int32 a = x[j + p];
1656
13.1M
          for (r = q;r > p;r >>= 1)
1657
10.7M
            int32_MINMAX(a,x[j+r]);
1658
2.32M
          x[j + p] = a;
1659
2.32M
        }
1660
619k
        i += 2 * p;
1661
619k
      }
1662
      /* now i + p > n - q */
1663
37.8k
      j = i;
1664
259k
      while (j < n - q) {
1665
221k
        int32 a = x[j + p];
1666
484k
        for (r = q;r > p;r >>= 1)
1667
262k
          int32_MINMAX(a,x[j+r]);
1668
221k
        x[j + p] = a;
1669
221k
        ++j;
1670
221k
      }
1671
1672
37.8k
      done: ;
1673
37.8k
    }
1674
8.40k
  }
1675
840
}
1676
1677
/* from supercop-20240808/crypto_sort/uint32/useint32/sort.c */
1678
1679
/* can save time by vectorizing xor loops */
1680
/* can save time by integrating xor loops with int32_sort */
1681
1682
static void crypto_sort_uint32(void *array,long long n)
1683
840
{
1684
840
  crypto_uint32 *x = array;
1685
840
  long long j;
1686
640k
  for (j = 0;j < n;++j) x[j] ^= 0x80000000;
1687
840
  crypto_sort_int32(array,n);
1688
640k
  for (j = 0;j < n;++j) x[j] ^= 0x80000000;
1689
840
}
1690
1691
/* from supercop-20240808/crypto_kem/sntrup761/compact/kem.c */
1692
// 20240806 djb: some automated conversion to cryptoint
1693
1694
8.18G
#define p 761
1695
17.3G
#define q 4591
1696
243k
#define w 286
1697
661k
#define q12 ((q - 1) / 2)
1698
typedef int8_t small;
1699
typedef int16_t Fq;
1700
20.9k
#define Hash_bytes 32
1701
3.49k
#define Small_bytes ((p + 3) / 4)
1702
typedef small Inputs[p];
1703
831
#define SecretKeys_bytes (2 * Small_bytes)
1704
19
#define Confirm_bytes 32
1705
1706
1.91G
static small F3_freeze(int16_t x) { return x - 3 * ((10923 * x + 16384) >> 15); }
1707
1708
1.92G
static Fq Fq_freeze(int32_t x) {
1709
1.92G
  const int32_t q16 = (0x10000 + q / 2) / q;
1710
1.92G
  const int32_t q20 = (0x100000 + q / 2) / q;
1711
1.92G
  const int32_t q28 = (0x10000000 + q / 2) / q;
1712
1.92G
  x -= q * ((q16 * x) >> 16);
1713
1.92G
  x -= q * ((q20 * x) >> 20);
1714
1.92G
  return x - q * ((q28 * x + 0x8000000) >> 28);
1715
1.92G
}
1716
1717
5
static int Weightw_mask(small *r) {
1718
5
  int i, weight = 0;
1719
3.81k
  for (i = 0; i < p; ++i) weight += crypto_int64_bottombit_01(r[i]);
1720
5
  return crypto_int16_nonzero_mask(weight - w);
1721
5
}
1722
1723
36.5k
static void uint32_divmod_uint14(uint32_t *Q, uint16_t *r, uint32_t x, uint16_t m) {
1724
36.5k
  uint32_t qpart, mask, v = 0x80000000 / m;
1725
36.5k
  qpart = (x * (uint64_t)v) >> 31;
1726
36.5k
  x -= qpart * m;
1727
36.5k
  *Q = qpart;
1728
36.5k
  qpart = (x * (uint64_t)v) >> 31;
1729
36.5k
  x -= qpart * m;
1730
36.5k
  *Q += qpart;
1731
36.5k
  x -= m;
1732
36.5k
  *Q += 1;
1733
36.5k
  mask = crypto_int32_negative_mask(x);
1734
36.5k
  x += mask & (uint32_t)m;
1735
36.5k
  *Q += mask;
1736
36.5k
  *r = x;
1737
36.5k
}
1738
1739
18.2k
static uint16_t uint32_mod_uint14(uint32_t x, uint16_t m) {
1740
18.2k
  uint32_t Q;
1741
18.2k
  uint16_t r;
1742
18.2k
  uint32_divmod_uint14(&Q, &r, x, m);
1743
18.2k
  return r;
1744
18.2k
}
1745
1746
9.29k
static void Encode(unsigned char *out, const uint16_t *R, const uint16_t *M, long long len) {
1747
9.29k
  if (len == 1) {
1748
845
    uint16_t r = R[0], m = M[0];
1749
2.53k
    while (m > 1) {
1750
1.69k
      *out++ = r;
1751
1.69k
      r >>= 8;
1752
1.69k
      m = (m + 255) >> 8;
1753
1.69k
    }
1754
845
  }
1755
9.29k
  if (len > 1) {
1756
8.45k
    uint16_t R2[(len + 1) / 2], M2[(len + 1) / 2];
1757
8.45k
    long long i;
1758
650k
    for (i = 0; i < len - 1; i += 2) {
1759
642k
      uint32_t m0 = M[i];
1760
642k
      uint32_t r = R[i] + R[i + 1] * m0;
1761
642k
      uint32_t m = M[i + 1] * m0;
1762
1.61M
      while (m >= 16384) {
1763
973k
        *out++ = r;
1764
973k
        r >>= 8;
1765
973k
        m = (m + 255) >> 8;
1766
973k
      }
1767
642k
      R2[i / 2] = r;
1768
642k
      M2[i / 2] = m;
1769
642k
    }
1770
8.45k
    if (i < len) {
1771
3.38k
      R2[i / 2] = R[i];
1772
3.38k
      M2[i / 2] = M[i];
1773
3.38k
    }
1774
8.45k
    Encode(out, R2, M2, (len + 1) / 2);
1775
8.45k
  }
1776
9.29k
}
1777
1778
264
static void Decode(uint16_t *out, const unsigned char *S, const uint16_t *M, long long len) {
1779
264
  if (len == 1) {
1780
24
    if (M[0] == 1)
1781
0
      *out = 0;
1782
24
    else if (M[0] <= 256)
1783
0
      *out = uint32_mod_uint14(S[0], M[0]);
1784
24
    else
1785
24
      *out = uint32_mod_uint14(S[0] + (((uint16_t)S[1]) << 8), M[0]);
1786
24
  }
1787
264
  if (len > 1) {
1788
240
    uint16_t R2[(len + 1) / 2], M2[(len + 1) / 2], bottomr[len / 2];
1789
240
    uint32_t bottomt[len / 2];
1790
240
    long long i;
1791
18.4k
    for (i = 0; i < len - 1; i += 2) {
1792
18.2k
      uint32_t m = M[i] * (uint32_t)M[i + 1];
1793
18.2k
      if (m > 256 * 16383) {
1794
8.74k
        bottomt[i / 2] = 256 * 256;
1795
8.74k
        bottomr[i / 2] = S[0] + 256 * S[1];
1796
8.74k
        S += 2;
1797
8.74k
        M2[i / 2] = (((m + 255) >> 8) + 255) >> 8;
1798
9.49k
      } else if (m >= 16384) {
1799
9.49k
        bottomt[i / 2] = 256;
1800
9.49k
        bottomr[i / 2] = S[0];
1801
9.49k
        S += 1;
1802
9.49k
        M2[i / 2] = (m + 255) >> 8;
1803
9.49k
      } else {
1804
0
        bottomt[i / 2] = 1;
1805
0
        bottomr[i / 2] = 0;
1806
0
        M2[i / 2] = m;
1807
0
      }
1808
18.2k
    }
1809
240
    if (i < len) M2[i / 2] = M[i];
1810
240
    Decode(R2, S, M2, (len + 1) / 2);
1811
18.4k
    for (i = 0; i < len - 1; i += 2) {
1812
18.2k
      uint32_t r1, r = bottomr[i / 2];
1813
18.2k
      uint16_t r0;
1814
18.2k
      r += bottomt[i / 2] * R2[i / 2];
1815
18.2k
      uint32_divmod_uint14(&r1, &r0, r, M[i]);
1816
18.2k
      r1 = uint32_mod_uint14(r1, M[i + 1]);
1817
18.2k
      *out++ = r0;
1818
18.2k
      *out++ = r1;
1819
18.2k
    }
1820
240
    if (i < len) *out++ = R2[i / 2];
1821
240
  }
1822
264
}
1823
1824
5
static void R3_fromRq(small *out, const Fq *r) {
1825
5
  int i;
1826
3.81k
  for (i = 0; i < p; ++i) out[i] = F3_freeze(r[i]);
1827
5
}
1828
1829
5
static void R3_mult(small *h, const small *f, const small *g) {
1830
5
  int16_t fg[p + p - 1];
1831
5
  int i, j;
1832
7.61k
  for (i = 0; i < p + p - 1; ++i) fg[i] = 0;
1833
3.81k
  for (i = 0; i < p; ++i)
1834
2.89M
    for (j = 0; j < p; ++j) fg[i + j] += f[i] * (int16_t)g[j];
1835
3.80k
  for (i = p; i < p + p - 1; ++i) fg[i - p] += fg[i];
1836
3.80k
  for (i = p; i < p + p - 1; ++i) fg[i - p + 1] += fg[i];
1837
3.81k
  for (i = 0; i < p; ++i) h[i] = F3_freeze(fg[i]);
1838
5
}
1839
1840
826
static int R3_recip(small *out, const small *in) {
1841
826
  small f[p + 1], g[p + 1], v[p + 1], r[p + 1];
1842
826
  int sign, swap, t, i, loop, delta = 1;
1843
630k
  for (i = 0; i < p + 1; ++i) v[i] = 0;
1844
630k
  for (i = 0; i < p + 1; ++i) r[i] = 0;
1845
826
  r[0] = 1;
1846
629k
  for (i = 0; i < p; ++i) f[i] = 0;
1847
826
  f[0] = 1;
1848
826
  f[p - 1] = f[p] = -1;
1849
629k
  for (i = 0; i < p; ++i) g[p - 1 - i] = in[i];
1850
826
  g[p] = 0;
1851
1.25M
  for (loop = 0; loop < 2 * p - 1; ++loop) {
1852
957M
    for (i = p; i > 0; --i) v[i] = v[i - 1];
1853
1.25M
    v[0] = 0;
1854
1.25M
    sign = -g[0] * f[0];
1855
1.25M
    swap = crypto_int16_negative_mask(-delta) & crypto_int16_nonzero_mask(g[0]);
1856
1.25M
    delta ^= swap & (delta ^ -delta);
1857
1.25M
    delta += 1;
1858
958M
    for (i = 0; i < p + 1; ++i) {
1859
957M
      t = swap & (f[i] ^ g[i]);
1860
957M
      f[i] ^= t;
1861
957M
      g[i] ^= t;
1862
957M
      t = swap & (v[i] ^ r[i]);
1863
957M
      v[i] ^= t;
1864
957M
      r[i] ^= t;
1865
957M
    }
1866
958M
    for (i = 0; i < p + 1; ++i) g[i] = F3_freeze(g[i] + sign * f[i]);
1867
958M
    for (i = 0; i < p + 1; ++i) r[i] = F3_freeze(r[i] + sign * v[i]);
1868
957M
    for (i = 0; i < p; ++i) g[i] = g[i + 1];
1869
1.25M
    g[p] = 0;
1870
1.25M
  }
1871
826
  sign = f[0];
1872
629k
  for (i = 0; i < p; ++i) out[i] = sign * v[p - 1 - i];
1873
826
  return crypto_int16_nonzero_mask(delta);
1874
826
}
1875
1876
850
static void Rq_mult_small(Fq *h, const Fq *f, const small *g) {
1877
850
  int32_t fg[p + p - 1];
1878
850
  int i, j;
1879
1.29M
  for (i = 0; i < p + p - 1; ++i) fg[i] = 0;
1880
647k
  for (i = 0; i < p; ++i)
1881
492M
    for (j = 0; j < p; ++j) fg[i + j] += f[i] * (int32_t)g[j];
1882
646k
  for (i = p; i < p + p - 1; ++i) fg[i - p] += fg[i];
1883
646k
  for (i = p; i < p + p - 1; ++i) fg[i - p + 1] += fg[i];
1884
647k
  for (i = 0; i < p; ++i) h[i] = Fq_freeze(fg[i]);
1885
850
}
1886
1887
5
static void Rq_mult3(Fq *h, const Fq *f) {
1888
5
  int i;
1889
3.81k
  for (i = 0; i < p; ++i) h[i] = Fq_freeze(3 * f[i]);
1890
5
}
1891
1892
1.65k
static Fq Fq_recip(Fq a1) {
1893
1.65k
  int i = 1;
1894
1.65k
  Fq ai = a1;
1895
7.58M
  while (i < q - 2) {
1896
7.57M
    ai = Fq_freeze(a1 * (int32_t)ai);
1897
7.57M
    i += 1;
1898
7.57M
  }
1899
1.65k
  return ai;
1900
1.65k
}
1901
1902
826
static int Rq_recip3(Fq *out, const small *in) {
1903
826
  Fq f[p + 1], g[p + 1], v[p + 1], r[p + 1], scale;
1904
826
  int swap, t, i, loop, delta = 1;
1905
826
  int32_t f0, g0;
1906
630k
  for (i = 0; i < p + 1; ++i) v[i] = 0;
1907
630k
  for (i = 0; i < p + 1; ++i) r[i] = 0;
1908
826
  r[0] = Fq_recip(3);
1909
629k
  for (i = 0; i < p; ++i) f[i] = 0;
1910
826
  f[0] = 1;
1911
826
  f[p - 1] = f[p] = -1;
1912
629k
  for (i = 0; i < p; ++i) g[p - 1 - i] = in[i];
1913
826
  g[p] = 0;
1914
1.25M
  for (loop = 0; loop < 2 * p - 1; ++loop) {
1915
957M
    for (i = p; i > 0; --i) v[i] = v[i - 1];
1916
1.25M
    v[0] = 0;
1917
1.25M
    swap = crypto_int16_negative_mask(-delta) & crypto_int16_nonzero_mask(g[0]);
1918
1.25M
    delta ^= swap & (delta ^ -delta);
1919
1.25M
    delta += 1;
1920
958M
    for (i = 0; i < p + 1; ++i) {
1921
957M
      t = swap & (f[i] ^ g[i]);
1922
957M
      f[i] ^= t;
1923
957M
      g[i] ^= t;
1924
957M
      t = swap & (v[i] ^ r[i]);
1925
957M
      v[i] ^= t;
1926
957M
      r[i] ^= t;
1927
957M
    }
1928
1.25M
    f0 = f[0];
1929
1.25M
    g0 = g[0];
1930
958M
    for (i = 0; i < p + 1; ++i) g[i] = Fq_freeze(f0 * g[i] - g0 * f[i]);
1931
958M
    for (i = 0; i < p + 1; ++i) r[i] = Fq_freeze(f0 * r[i] - g0 * v[i]);
1932
957M
    for (i = 0; i < p; ++i) g[i] = g[i + 1];
1933
1.25M
    g[p] = 0;
1934
1.25M
  }
1935
826
  scale = Fq_recip(f[0]);
1936
629k
  for (i = 0; i < p; ++i) out[i] = Fq_freeze(scale * (int32_t)v[p - 1 - i]);
1937
826
  return crypto_int16_nonzero_mask(delta);
1938
826
}
1939
1940
19
static void Round(Fq *out, const Fq *a) {
1941
19
  int i;
1942
14.4k
  for (i = 0; i < p; ++i) out[i] = a[i] - F3_freeze(a[i]);
1943
19
}
1944
1945
840
static void Short_fromlist(small *out, const uint32_t *in) {
1946
840
  uint32_t L[p];
1947
840
  int i;
1948
241k
  for (i = 0; i < w; ++i) L[i] = in[i] & (uint32_t)-2;
1949
399k
  for (i = w; i < p; ++i) L[i] = (in[i] & (uint32_t)-3) | 1;
1950
840
  crypto_sort_uint32(L, p);
1951
640k
  for (i = 0; i < p; ++i) out[i] = (L[i] & 3) - 1;
1952
840
}
1953
1954
916
static void Hash_prefix(unsigned char *out, int b, const unsigned char *in, int inlen) {
1955
916
  unsigned char x[inlen + 1], h[64];
1956
916
  int i;
1957
916
  x[0] = b;
1958
1.00M
  for (i = 0; i < inlen; ++i) x[i + 1] = in[i];
1959
916
  crypto_hash_sha512(h, x, inlen + 1);
1960
30.2k
  for (i = 0; i < 32; ++i) out[i] = h[i];
1961
916
}
1962
1963
1964
840
static void Short_random(small *out) {
1965
840
  uint32_t L[p];
1966
840
  randombytes(L, sizeof(L));
1967
840
  Short_fromlist(out, L);
1968
840
  explicit_bzero(L, sizeof(L));
1969
840
}
1970
826
static void Small_random(small *out) {
1971
826
  int i;
1972
826
  uint32_t L[p];
1973
826
  randombytes(L, sizeof(L));
1974
629k
  for (i = 0; i < p; ++i) out[i] = (((L[i] & 0x3fffffff) * 3) >> 30) - 1;
1975
826
  explicit_bzero(L, sizeof(L));
1976
826
}
1977
826
static void KeyGen(Fq *h, small *f, small *ginv) {
1978
826
  small g[p];
1979
826
  Fq finv[p];
1980
826
  for (;;) {
1981
826
    int result;
1982
826
    Small_random(g);
1983
826
    result = R3_recip(ginv, g);
1984
826
    crypto_declassify(&result, sizeof result);
1985
826
    if (result == 0) break;
1986
826
  }
1987
826
  Short_random(f);
1988
826
  Rq_recip3(finv, f);
1989
826
  Rq_mult_small(h, finv, g);
1990
826
}
1991
1992
19
static void Encrypt(Fq *c, const small *r, const Fq *h) {
1993
19
  Fq hr[p];
1994
19
  Rq_mult_small(hr, h, r);
1995
19
  Round(c, hr);
1996
19
}
1997
1998
5
static void Decrypt(small *r, const Fq *c, const small *f, const small *ginv) {
1999
5
  Fq cf[p], cf3[p];
2000
5
  small e[p], ev[p];
2001
5
  int mask, i;
2002
5
  Rq_mult_small(cf, c, f);
2003
5
  Rq_mult3(cf3, cf);
2004
5
  R3_fromRq(e, cf3);
2005
5
  R3_mult(ev, e, ginv);
2006
5
  mask = Weightw_mask(ev);
2007
1.43k
  for (i = 0; i < w; ++i) r[i] = ((ev[i] ^ 1) & ~mask) ^ 1;
2008
2.38k
  for (i = w; i < p; ++i) r[i] = ev[i] & ~mask;
2009
5
}
2010
2011
1.67k
static void Small_encode(unsigned char *s, const small *f) {
2012
1.67k
  int i, j;
2013
319k
  for (i = 0; i < p / 4; ++i) {
2014
317k
    small x = 0;
2015
1.58M
    for (j = 0;j < 4;++j) x += (*f++ + 1) << (2 * j);
2016
317k
    *s++ = x;
2017
317k
  }
2018
1.67k
  *s = *f++ + 1;
2019
1.67k
}
2020
2021
10
static void Small_decode(small *f, const unsigned char *s) {
2022
10
  int i, j;
2023
1.91k
  for (i = 0; i < p / 4; ++i) {
2024
1.90k
    unsigned char x = *s++;
2025
9.50k
    for (j = 0;j < 4;++j) *f++ = ((small)((x >> (2 * j)) & 3)) - 1;
2026
1.90k
  }
2027
10
  *f++ = ((small)(*s & 3)) - 1;
2028
10
}
2029
2030
826
static void Rq_encode(unsigned char *s, const Fq *r) {
2031
826
  uint16_t R[p], M[p];
2032
826
  int i;
2033
629k
  for (i = 0; i < p; ++i) R[i] = r[i] + q12;
2034
629k
  for (i = 0; i < p; ++i) M[i] = q;
2035
826
  Encode(s, R, M, p);
2036
826
}
2037
2038
19
static void Rq_decode(Fq *r, const unsigned char *s) {
2039
19
  uint16_t R[p], M[p];
2040
19
  int i;
2041
14.4k
  for (i = 0; i < p; ++i) M[i] = q;
2042
19
  Decode(R, s, M, p);
2043
14.4k
  for (i = 0; i < p; ++i) r[i] = ((Fq)R[i]) - q12;
2044
19
}
2045
2046
19
static void Rounded_encode(unsigned char *s, const Fq *r) {
2047
19
  uint16_t R[p], M[p];
2048
19
  int i;
2049
14.4k
  for (i = 0; i < p; ++i) R[i] = ((r[i] + q12) * 10923) >> 15;
2050
14.4k
  for (i = 0; i < p; ++i) M[i] = (q + 2) / 3;
2051
19
  Encode(s, R, M, p);
2052
19
}
2053
2054
5
static void Rounded_decode(Fq *r, const unsigned char *s) {
2055
5
  uint16_t R[p], M[p];
2056
5
  int i;
2057
3.81k
  for (i = 0; i < p; ++i) M[i] = (q + 2) / 3;
2058
5
  Decode(R, s, M, p);
2059
3.81k
  for (i = 0; i < p; ++i) r[i] = R[i] * 3 - q12;
2060
5
}
2061
2062
826
static void ZKeyGen(unsigned char *pk, unsigned char *sk) {
2063
826
  Fq h[p];
2064
826
  small f[p], v[p];
2065
826
  KeyGen(h, f, v);
2066
826
  Rq_encode(pk, h);
2067
826
  Small_encode(sk, f);
2068
826
  Small_encode(sk + Small_bytes, v);
2069
826
}
2070
2071
19
static void ZEncrypt(unsigned char *C, const Inputs r, const unsigned char *pk) {
2072
19
  Fq h[p], c[p];
2073
19
  Rq_decode(h, pk);
2074
19
  Encrypt(c, r, h);
2075
19
  Rounded_encode(C, c);
2076
19
}
2077
2078
5
static void ZDecrypt(Inputs r, const unsigned char *C, const unsigned char *sk) {
2079
5
  small f[p], v[p];
2080
5
  Fq c[p];
2081
5
  Small_decode(f, sk);
2082
5
  Small_decode(v, sk + Small_bytes);
2083
5
  Rounded_decode(c, C);
2084
5
  Decrypt(r, c, f, v);
2085
5
}
2086
2087
19
static void HashConfirm(unsigned char *h, const unsigned char *r, const unsigned char *cache) {
2088
19
  unsigned char x[Hash_bytes * 2];
2089
19
  int i;
2090
19
  Hash_prefix(x, 3, r, Small_bytes);
2091
627
  for (i = 0; i < Hash_bytes; ++i) x[Hash_bytes + i] = cache[i];
2092
19
  Hash_prefix(h, 2, x, sizeof x);
2093
19
}
2094
2095
19
static void HashSession(unsigned char *k, int b, const unsigned char *y, const unsigned char *z) {
2096
19
  unsigned char x[Hash_bytes + crypto_kem_sntrup761_CIPHERTEXTBYTES];
2097
19
  int i;
2098
19
  Hash_prefix(x, 3, y, Small_bytes);
2099
19.7k
  for (i = 0; i < crypto_kem_sntrup761_CIPHERTEXTBYTES; ++i) x[Hash_bytes + i] = z[i];
2100
19
  Hash_prefix(k, b, x, sizeof x);
2101
19
}
2102
2103
826
int crypto_kem_sntrup761_keypair(unsigned char *pk, unsigned char *sk) {
2104
826
  int i;
2105
826
  ZKeyGen(pk, sk);
2106
826
  sk += SecretKeys_bytes;
2107
957k
  for (i = 0; i < crypto_kem_sntrup761_PUBLICKEYBYTES; ++i) *sk++ = pk[i];
2108
826
  randombytes(sk, Small_bytes);
2109
826
  Hash_prefix(sk + Small_bytes, 4, pk, crypto_kem_sntrup761_PUBLICKEYBYTES);
2110
826
  return 0;
2111
826
}
2112
2113
19
static void Hide(unsigned char *c, unsigned char *r_enc, const Inputs r, const unsigned char *pk, const unsigned char *cache) {
2114
19
  Small_encode(r_enc, r);
2115
19
  ZEncrypt(c, r, pk);
2116
19
  HashConfirm(c + crypto_kem_sntrup761_CIPHERTEXTBYTES - Confirm_bytes, r_enc, cache);
2117
19
}
2118
2119
14
int crypto_kem_sntrup761_enc(unsigned char *c, unsigned char *k, const unsigned char *pk) {
2120
14
  Inputs r;
2121
14
  unsigned char r_enc[Small_bytes], cache[Hash_bytes];
2122
14
  Hash_prefix(cache, 4, pk, crypto_kem_sntrup761_PUBLICKEYBYTES);
2123
14
  Short_random(r);
2124
14
  Hide(c, r_enc, r, pk, cache);
2125
14
  HashSession(k, 1, r_enc, c);
2126
14
  return 0;
2127
14
}
2128
2129
5
static int Ciphertexts_diff_mask(const unsigned char *c, const unsigned char *c2) {
2130
5
  uint16_t differentbits = 0;
2131
5
  int len = crypto_kem_sntrup761_CIPHERTEXTBYTES;
2132
5.20k
  while (len-- > 0) differentbits |= (*c++) ^ (*c2++);
2133
5
  return (crypto_int64_bitmod_01((differentbits - 1),8)) - 1;
2134
5
}
2135
2136
5
int crypto_kem_sntrup761_dec(unsigned char *k, const unsigned char *c, const unsigned char *sk) {
2137
5
  const unsigned char *pk = sk + SecretKeys_bytes;
2138
5
  const unsigned char *rho = pk + crypto_kem_sntrup761_PUBLICKEYBYTES;
2139
5
  const unsigned char *cache = rho + Small_bytes;
2140
5
  Inputs r;
2141
5
  unsigned char r_enc[Small_bytes], cnew[crypto_kem_sntrup761_CIPHERTEXTBYTES];
2142
5
  int mask, i;
2143
5
  ZDecrypt(r, c, sk);
2144
5
  Hide(cnew, r_enc, r, pk, cache);
2145
5
  mask = Ciphertexts_diff_mask(c, cnew);
2146
960
  for (i = 0; i < Small_bytes; ++i) r_enc[i] ^= mask & (r_enc[i] ^ rho[i]);
2147
5
  HashSession(k, 1 + mask, r_enc, c);
2148
5
  return 0;
2149
5
}
2150
2151
#endif /* USE_SNTRUP761X25519 */