/src/libgcrypt/cipher/rijndael-vaes.c
Line | Count | Source (jump to first uncovered line) |
1 | | /* VAES/AVX2 accelerated AES for Libgcrypt |
2 | | * Copyright (C) 2021 Jussi Kivilinna <jussi.kivilinna@iki.fi> |
3 | | * |
4 | | * This file is part of Libgcrypt. |
5 | | * |
6 | | * Libgcrypt is free software; you can redistribute it and/or modify |
7 | | * it under the terms of the GNU Lesser General Public License as |
8 | | * published by the Free Software Foundation; either version 2.1 of |
9 | | * the License, or (at your option) any later version. |
10 | | * |
11 | | * Libgcrypt is distributed in the hope that it will be useful, |
12 | | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
13 | | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
14 | | * GNU Lesser General Public License for more details. |
15 | | * |
16 | | * You should have received a copy of the GNU Lesser General Public |
17 | | * License along with this program; if not, see <http://www.gnu.org/licenses/>. |
18 | | * |
19 | | */ |
20 | | |
21 | | #include <config.h> |
22 | | #include <stdio.h> |
23 | | #include <stdlib.h> |
24 | | |
25 | | #include "types.h" /* for byte and u32 typedefs */ |
26 | | #include "g10lib.h" |
27 | | #include "cipher.h" |
28 | | #include "bufhelp.h" |
29 | | #include "rijndael-internal.h" |
30 | | #include "./cipher-internal.h" |
31 | | |
32 | | |
33 | | #ifdef USE_VAES |
34 | | |
35 | | |
36 | | # ifdef HAVE_COMPATIBLE_GCC_WIN64_PLATFORM_AS |
37 | | # define ASM_FUNC_ABI __attribute__((sysv_abi)) |
38 | | # else |
39 | | # define ASM_FUNC_ABI |
40 | | # endif |
41 | | |
42 | | |
43 | | extern void _gcry_aes_aesni_prepare_decryption(RIJNDAEL_context *ctx); |
44 | | |
45 | | |
46 | | extern void _gcry_vaes_avx2_cbc_dec_amd64 (const void *keysched, |
47 | | unsigned char *iv, |
48 | | void *outbuf_arg, |
49 | | const void *inbuf_arg, |
50 | | size_t nblocks, |
51 | | unsigned int nrounds) ASM_FUNC_ABI; |
52 | | |
53 | | extern void _gcry_vaes_avx2_cfb_dec_amd64 (const void *keysched, |
54 | | unsigned char *iv, |
55 | | void *outbuf_arg, |
56 | | const void *inbuf_arg, |
57 | | size_t nblocks, |
58 | | unsigned int nrounds) ASM_FUNC_ABI; |
59 | | |
60 | | extern void _gcry_vaes_avx2_ctr_enc_amd64 (const void *keysched, |
61 | | unsigned char *ctr, |
62 | | void *outbuf_arg, |
63 | | const void *inbuf_arg, |
64 | | size_t nblocks, |
65 | | unsigned int nrounds) ASM_FUNC_ABI; |
66 | | |
67 | | extern void _gcry_vaes_avx2_ctr32le_enc_amd64 (const void *keysched, |
68 | | unsigned char *ctr, |
69 | | void *outbuf_arg, |
70 | | const void *inbuf_arg, |
71 | | size_t nblocks, |
72 | | unsigned int nrounds) |
73 | | ASM_FUNC_ABI; |
74 | | |
75 | | extern void _gcry_vaes_avx2_ocb_crypt_amd64 (const void *keysched, |
76 | | unsigned int blkn, |
77 | | void *outbuf_arg, |
78 | | const void *inbuf_arg, |
79 | | size_t nblocks, |
80 | | unsigned int nrounds, |
81 | | unsigned char *offset, |
82 | | unsigned char *checksum, |
83 | | unsigned char *L_table, |
84 | | int encrypt) ASM_FUNC_ABI; |
85 | | |
86 | | extern void _gcry_vaes_avx2_xts_crypt_amd64 (const void *keysched, |
87 | | unsigned char *tweak, |
88 | | void *outbuf_arg, |
89 | | const void *inbuf_arg, |
90 | | size_t nblocks, |
91 | | unsigned int nrounds, |
92 | | int encrypt) ASM_FUNC_ABI; |
93 | | |
94 | | extern void _gcry_vaes_avx2_ecb_crypt_amd64 (const void *keysched, |
95 | | int encrypt, |
96 | | void *outbuf_arg, |
97 | | const void *inbuf_arg, |
98 | | size_t nblocks, |
99 | | unsigned int nrounds) ASM_FUNC_ABI; |
100 | | |
101 | | |
102 | | void |
103 | | _gcry_aes_vaes_ecb_crypt (void *context, void *outbuf, |
104 | | const void *inbuf, size_t nblocks, |
105 | | int encrypt) |
106 | 0 | { |
107 | 0 | RIJNDAEL_context *ctx = context; |
108 | 0 | const void *keysched = encrypt ? ctx->keyschenc32 : ctx->keyschdec32; |
109 | 0 | unsigned int nrounds = ctx->rounds; |
110 | |
|
111 | 0 | if (!encrypt && !ctx->decryption_prepared) |
112 | 0 | { |
113 | 0 | _gcry_aes_aesni_prepare_decryption (ctx); |
114 | 0 | ctx->decryption_prepared = 1; |
115 | 0 | } |
116 | |
|
117 | 0 | _gcry_vaes_avx2_ecb_crypt_amd64 (keysched, encrypt, outbuf, inbuf, |
118 | 0 | nblocks, nrounds); |
119 | 0 | } |
120 | | |
121 | | void |
122 | | _gcry_aes_vaes_cbc_dec (void *context, unsigned char *iv, |
123 | | void *outbuf, const void *inbuf, |
124 | | size_t nblocks) |
125 | 0 | { |
126 | 0 | RIJNDAEL_context *ctx = context; |
127 | 0 | const void *keysched = ctx->keyschdec32; |
128 | 0 | unsigned int nrounds = ctx->rounds; |
129 | |
|
130 | 0 | if (!ctx->decryption_prepared) |
131 | 0 | { |
132 | 0 | _gcry_aes_aesni_prepare_decryption (ctx); |
133 | 0 | ctx->decryption_prepared = 1; |
134 | 0 | } |
135 | |
|
136 | 0 | _gcry_vaes_avx2_cbc_dec_amd64 (keysched, iv, outbuf, inbuf, nblocks, nrounds); |
137 | 0 | } |
138 | | |
139 | | void |
140 | | _gcry_aes_vaes_cfb_dec (void *context, unsigned char *iv, |
141 | | void *outbuf, const void *inbuf, |
142 | | size_t nblocks) |
143 | 0 | { |
144 | 0 | RIJNDAEL_context *ctx = context; |
145 | 0 | const void *keysched = ctx->keyschenc32; |
146 | 0 | unsigned int nrounds = ctx->rounds; |
147 | |
|
148 | 0 | _gcry_vaes_avx2_cfb_dec_amd64 (keysched, iv, outbuf, inbuf, nblocks, nrounds); |
149 | 0 | } |
150 | | |
151 | | void |
152 | | _gcry_aes_vaes_ctr_enc (void *context, unsigned char *iv, |
153 | | void *outbuf, const void *inbuf, |
154 | | size_t nblocks) |
155 | 0 | { |
156 | 0 | RIJNDAEL_context *ctx = context; |
157 | 0 | const void *keysched = ctx->keyschenc32; |
158 | 0 | unsigned int nrounds = ctx->rounds; |
159 | |
|
160 | 0 | _gcry_vaes_avx2_ctr_enc_amd64 (keysched, iv, outbuf, inbuf, nblocks, nrounds); |
161 | 0 | } |
162 | | |
163 | | void |
164 | | _gcry_aes_vaes_ctr32le_enc (void *context, unsigned char *iv, |
165 | | void *outbuf, const void *inbuf, |
166 | | size_t nblocks) |
167 | 0 | { |
168 | 0 | RIJNDAEL_context *ctx = context; |
169 | 0 | const void *keysched = ctx->keyschenc32; |
170 | 0 | unsigned int nrounds = ctx->rounds; |
171 | |
|
172 | 0 | _gcry_vaes_avx2_ctr32le_enc_amd64 (keysched, iv, outbuf, inbuf, nblocks, |
173 | 0 | nrounds); |
174 | 0 | } |
175 | | |
176 | | size_t |
177 | | _gcry_aes_vaes_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg, |
178 | | const void *inbuf_arg, size_t nblocks, |
179 | | int encrypt) |
180 | 0 | { |
181 | 0 | RIJNDAEL_context *ctx = (void *)&c->context.c; |
182 | 0 | const void *keysched = encrypt ? ctx->keyschenc32 : ctx->keyschdec32; |
183 | 0 | unsigned char *outbuf = outbuf_arg; |
184 | 0 | const unsigned char *inbuf = inbuf_arg; |
185 | 0 | unsigned int nrounds = ctx->rounds; |
186 | 0 | u64 blkn = c->u_mode.ocb.data_nblocks; |
187 | |
|
188 | 0 | if (!encrypt && !ctx->decryption_prepared) |
189 | 0 | { |
190 | 0 | _gcry_aes_aesni_prepare_decryption (ctx); |
191 | 0 | ctx->decryption_prepared = 1; |
192 | 0 | } |
193 | |
|
194 | 0 | c->u_mode.ocb.data_nblocks = blkn + nblocks; |
195 | |
|
196 | 0 | _gcry_vaes_avx2_ocb_crypt_amd64 (keysched, (unsigned int)blkn, outbuf, inbuf, |
197 | 0 | nblocks, nrounds, c->u_iv.iv, c->u_ctr.ctr, |
198 | 0 | c->u_mode.ocb.L[0], encrypt); |
199 | |
|
200 | 0 | return 0; |
201 | 0 | } |
202 | | |
203 | | void |
204 | | _gcry_aes_vaes_xts_crypt (void *context, unsigned char *tweak, |
205 | | void *outbuf, const void *inbuf, |
206 | | size_t nblocks, int encrypt) |
207 | 0 | { |
208 | 0 | RIJNDAEL_context *ctx = context; |
209 | 0 | const void *keysched = encrypt ? ctx->keyschenc32 : ctx->keyschdec32; |
210 | 0 | unsigned int nrounds = ctx->rounds; |
211 | |
|
212 | 0 | if (!encrypt && !ctx->decryption_prepared) |
213 | 0 | { |
214 | 0 | _gcry_aes_aesni_prepare_decryption (ctx); |
215 | 0 | ctx->decryption_prepared = 1; |
216 | 0 | } |
217 | |
|
218 | 0 | _gcry_vaes_avx2_xts_crypt_amd64 (keysched, tweak, outbuf, inbuf, nblocks, |
219 | 0 | nrounds, encrypt); |
220 | 0 | } |
221 | | |
222 | | #endif /* USE_VAES */ |