Botan 3.3.0
Crypto and TLS for C&
aes_power8.cpp
Go to the documentation of this file.
1/*
2* AES using POWER8/POWER9 crypto extensions
3*
4* Contributed by Jeffrey Walton
5*
6* Further changes
7* (C) 2018,2019 Jack Lloyd
8*
9* Botan is released under the Simplified BSD License (see license.txt)
10*/
11
12#include <botan/internal/aes.h>
13
14#include <botan/internal/cpuid.h>
15
16#include <altivec.h>
17#undef vector
18#undef bool
19
20namespace Botan {
21
22typedef __vector unsigned long long Altivec64x2;
23typedef __vector unsigned int Altivec32x4;
24typedef __vector unsigned char Altivec8x16;
25
26namespace {
27
28inline Altivec8x16 reverse_vec(Altivec8x16 src) {
30 const Altivec8x16 mask = {15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0};
31 const Altivec8x16 zero = {0};
32 return vec_perm(src, zero, mask);
33 } else {
34 return src;
35 }
36}
37
38BOTAN_FUNC_ISA("vsx") inline Altivec64x2 load_key(const uint32_t key[]) {
39 return reinterpret_cast<Altivec64x2>(reverse_vec(reinterpret_cast<Altivec8x16>(vec_vsx_ld(0, key))));
40}
41
42BOTAN_FUNC_ISA("vsx") inline Altivec64x2 load_block(const uint8_t src[]) {
43 return reinterpret_cast<Altivec64x2>(reverse_vec(vec_vsx_ld(0, src)));
44}
45
46BOTAN_FUNC_ISA("vsx") inline void store_block(Altivec64x2 src, uint8_t dest[]) {
47 vec_vsx_st(reverse_vec(reinterpret_cast<Altivec8x16>(src)), 0, dest);
48}
49
50inline void store_blocks(Altivec64x2 B0, Altivec64x2 B1, Altivec64x2 B2, Altivec64x2 B3, uint8_t out[]) {
51 store_block(B0, out);
52 store_block(B1, out + 16);
53 store_block(B2, out + 16 * 2);
54 store_block(B3, out + 16 * 3);
55}
56
57#define AES_XOR_4(B0, B1, B2, B3, K) \
58 do { \
59 B0 = vec_xor(B0, K); \
60 B1 = vec_xor(B1, K); \
61 B2 = vec_xor(B2, K); \
62 B3 = vec_xor(B3, K); \
63 } while(0)
64
65#define AES_ENCRYPT_4(B0, B1, B2, B3, K) \
66 do { \
67 B0 = __builtin_crypto_vcipher(B0, K); \
68 B1 = __builtin_crypto_vcipher(B1, K); \
69 B2 = __builtin_crypto_vcipher(B2, K); \
70 B3 = __builtin_crypto_vcipher(B3, K); \
71 } while(0)
72
73#define AES_ENCRYPT_4_LAST(B0, B1, B2, B3, K) \
74 do { \
75 B0 = __builtin_crypto_vcipherlast(B0, K); \
76 B1 = __builtin_crypto_vcipherlast(B1, K); \
77 B2 = __builtin_crypto_vcipherlast(B2, K); \
78 B3 = __builtin_crypto_vcipherlast(B3, K); \
79 } while(0)
80
81#define AES_DECRYPT_4(B0, B1, B2, B3, K) \
82 do { \
83 B0 = __builtin_crypto_vncipher(B0, K); \
84 B1 = __builtin_crypto_vncipher(B1, K); \
85 B2 = __builtin_crypto_vncipher(B2, K); \
86 B3 = __builtin_crypto_vncipher(B3, K); \
87 } while(0)
88
89#define AES_DECRYPT_4_LAST(B0, B1, B2, B3, K) \
90 do { \
91 B0 = __builtin_crypto_vncipherlast(B0, K); \
92 B1 = __builtin_crypto_vncipherlast(B1, K); \
93 B2 = __builtin_crypto_vncipherlast(B2, K); \
94 B3 = __builtin_crypto_vncipherlast(B3, K); \
95 } while(0)
96
97} // namespace
98
99BOTAN_FUNC_ISA("crypto,vsx") void AES_128::hw_aes_encrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const {
100 const Altivec64x2 K0 = load_key(&m_EK[0]);
101 const Altivec64x2 K1 = load_key(&m_EK[4]);
102 const Altivec64x2 K2 = load_key(&m_EK[8]);
103 const Altivec64x2 K3 = load_key(&m_EK[12]);
104 const Altivec64x2 K4 = load_key(&m_EK[16]);
105 const Altivec64x2 K5 = load_key(&m_EK[20]);
106 const Altivec64x2 K6 = load_key(&m_EK[24]);
107 const Altivec64x2 K7 = load_key(&m_EK[28]);
108 const Altivec64x2 K8 = load_key(&m_EK[32]);
109 const Altivec64x2 K9 = load_key(&m_EK[36]);
110 const Altivec64x2 K10 = load_key(&m_EK[40]);
111
112 while(blocks >= 4) {
113 Altivec64x2 B0 = load_block(in);
114 Altivec64x2 B1 = load_block(in + 16);
115 Altivec64x2 B2 = load_block(in + 16 * 2);
116 Altivec64x2 B3 = load_block(in + 16 * 3);
117
118 AES_XOR_4(B0, B1, B2, B3, K0);
119 AES_ENCRYPT_4(B0, B1, B2, B3, K1);
120 AES_ENCRYPT_4(B0, B1, B2, B3, K2);
121 AES_ENCRYPT_4(B0, B1, B2, B3, K3);
122 AES_ENCRYPT_4(B0, B1, B2, B3, K4);
123 AES_ENCRYPT_4(B0, B1, B2, B3, K5);
124 AES_ENCRYPT_4(B0, B1, B2, B3, K6);
125 AES_ENCRYPT_4(B0, B1, B2, B3, K7);
126 AES_ENCRYPT_4(B0, B1, B2, B3, K8);
127 AES_ENCRYPT_4(B0, B1, B2, B3, K9);
128 AES_ENCRYPT_4_LAST(B0, B1, B2, B3, K10);
129
130 store_blocks(B0, B1, B2, B3, out);
131
132 out += 4 * 16;
133 in += 4 * 16;
134 blocks -= 4;
135 }
136
137 for(size_t i = 0; i != blocks; ++i) {
138 Altivec64x2 B = load_block(in);
139
140 B = vec_xor(B, K0);
141 B = __builtin_crypto_vcipher(B, K1);
142 B = __builtin_crypto_vcipher(B, K2);
143 B = __builtin_crypto_vcipher(B, K3);
144 B = __builtin_crypto_vcipher(B, K4);
145 B = __builtin_crypto_vcipher(B, K5);
146 B = __builtin_crypto_vcipher(B, K6);
147 B = __builtin_crypto_vcipher(B, K7);
148 B = __builtin_crypto_vcipher(B, K8);
149 B = __builtin_crypto_vcipher(B, K9);
150 B = __builtin_crypto_vcipherlast(B, K10);
151
152 store_block(B, out);
153
154 out += 16;
155 in += 16;
156 }
157}
158
159BOTAN_FUNC_ISA("crypto,vsx") void AES_128::hw_aes_decrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const {
160 const Altivec64x2 K0 = load_key(&m_EK[40]);
161 const Altivec64x2 K1 = load_key(&m_EK[36]);
162 const Altivec64x2 K2 = load_key(&m_EK[32]);
163 const Altivec64x2 K3 = load_key(&m_EK[28]);
164 const Altivec64x2 K4 = load_key(&m_EK[24]);
165 const Altivec64x2 K5 = load_key(&m_EK[20]);
166 const Altivec64x2 K6 = load_key(&m_EK[16]);
167 const Altivec64x2 K7 = load_key(&m_EK[12]);
168 const Altivec64x2 K8 = load_key(&m_EK[8]);
169 const Altivec64x2 K9 = load_key(&m_EK[4]);
170 const Altivec64x2 K10 = load_key(&m_EK[0]);
171
172 while(blocks >= 4) {
173 Altivec64x2 B0 = load_block(in);
174 Altivec64x2 B1 = load_block(in + 16);
175 Altivec64x2 B2 = load_block(in + 16 * 2);
176 Altivec64x2 B3 = load_block(in + 16 * 3);
177
178 AES_XOR_4(B0, B1, B2, B3, K0);
179 AES_DECRYPT_4(B0, B1, B2, B3, K1);
180 AES_DECRYPT_4(B0, B1, B2, B3, K2);
181 AES_DECRYPT_4(B0, B1, B2, B3, K3);
182 AES_DECRYPT_4(B0, B1, B2, B3, K4);
183 AES_DECRYPT_4(B0, B1, B2, B3, K5);
184 AES_DECRYPT_4(B0, B1, B2, B3, K6);
185 AES_DECRYPT_4(B0, B1, B2, B3, K7);
186 AES_DECRYPT_4(B0, B1, B2, B3, K8);
187 AES_DECRYPT_4(B0, B1, B2, B3, K9);
188 AES_DECRYPT_4_LAST(B0, B1, B2, B3, K10);
189
190 store_blocks(B0, B1, B2, B3, out);
191
192 out += 4 * 16;
193 in += 4 * 16;
194 blocks -= 4;
195 }
196
197 for(size_t i = 0; i != blocks; ++i) {
198 Altivec64x2 B = load_block(in);
199
200 B = vec_xor(B, K0);
201 B = __builtin_crypto_vncipher(B, K1);
202 B = __builtin_crypto_vncipher(B, K2);
203 B = __builtin_crypto_vncipher(B, K3);
204 B = __builtin_crypto_vncipher(B, K4);
205 B = __builtin_crypto_vncipher(B, K5);
206 B = __builtin_crypto_vncipher(B, K6);
207 B = __builtin_crypto_vncipher(B, K7);
208 B = __builtin_crypto_vncipher(B, K8);
209 B = __builtin_crypto_vncipher(B, K9);
210 B = __builtin_crypto_vncipherlast(B, K10);
211
212 store_block(B, out);
213
214 out += 16;
215 in += 16;
216 }
217}
218
219BOTAN_FUNC_ISA("crypto,vsx") void AES_192::hw_aes_encrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const {
220 const Altivec64x2 K0 = load_key(&m_EK[0]);
221 const Altivec64x2 K1 = load_key(&m_EK[4]);
222 const Altivec64x2 K2 = load_key(&m_EK[8]);
223 const Altivec64x2 K3 = load_key(&m_EK[12]);
224 const Altivec64x2 K4 = load_key(&m_EK[16]);
225 const Altivec64x2 K5 = load_key(&m_EK[20]);
226 const Altivec64x2 K6 = load_key(&m_EK[24]);
227 const Altivec64x2 K7 = load_key(&m_EK[28]);
228 const Altivec64x2 K8 = load_key(&m_EK[32]);
229 const Altivec64x2 K9 = load_key(&m_EK[36]);
230 const Altivec64x2 K10 = load_key(&m_EK[40]);
231 const Altivec64x2 K11 = load_key(&m_EK[44]);
232 const Altivec64x2 K12 = load_key(&m_EK[48]);
233
234 while(blocks >= 4) {
235 Altivec64x2 B0 = load_block(in);
236 Altivec64x2 B1 = load_block(in + 16);
237 Altivec64x2 B2 = load_block(in + 16 * 2);
238 Altivec64x2 B3 = load_block(in + 16 * 3);
239
240 AES_XOR_4(B0, B1, B2, B3, K0);
241 AES_ENCRYPT_4(B0, B1, B2, B3, K1);
242 AES_ENCRYPT_4(B0, B1, B2, B3, K2);
243 AES_ENCRYPT_4(B0, B1, B2, B3, K3);
244 AES_ENCRYPT_4(B0, B1, B2, B3, K4);
245 AES_ENCRYPT_4(B0, B1, B2, B3, K5);
246 AES_ENCRYPT_4(B0, B1, B2, B3, K6);
247 AES_ENCRYPT_4(B0, B1, B2, B3, K7);
248 AES_ENCRYPT_4(B0, B1, B2, B3, K8);
249 AES_ENCRYPT_4(B0, B1, B2, B3, K9);
250 AES_ENCRYPT_4(B0, B1, B2, B3, K10);
251 AES_ENCRYPT_4(B0, B1, B2, B3, K11);
252 AES_ENCRYPT_4_LAST(B0, B1, B2, B3, K12);
253
254 store_blocks(B0, B1, B2, B3, out);
255
256 out += 4 * 16;
257 in += 4 * 16;
258 blocks -= 4;
259 }
260
261 for(size_t i = 0; i != blocks; ++i) {
262 Altivec64x2 B = load_block(in);
263
264 B = vec_xor(B, K0);
265 B = __builtin_crypto_vcipher(B, K1);
266 B = __builtin_crypto_vcipher(B, K2);
267 B = __builtin_crypto_vcipher(B, K3);
268 B = __builtin_crypto_vcipher(B, K4);
269 B = __builtin_crypto_vcipher(B, K5);
270 B = __builtin_crypto_vcipher(B, K6);
271 B = __builtin_crypto_vcipher(B, K7);
272 B = __builtin_crypto_vcipher(B, K8);
273 B = __builtin_crypto_vcipher(B, K9);
274 B = __builtin_crypto_vcipher(B, K10);
275 B = __builtin_crypto_vcipher(B, K11);
276 B = __builtin_crypto_vcipherlast(B, K12);
277
278 store_block(B, out);
279
280 out += 16;
281 in += 16;
282 }
283}
284
285BOTAN_FUNC_ISA("crypto,vsx") void AES_192::hw_aes_decrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const {
286 const Altivec64x2 K0 = load_key(&m_EK[48]);
287 const Altivec64x2 K1 = load_key(&m_EK[44]);
288 const Altivec64x2 K2 = load_key(&m_EK[40]);
289 const Altivec64x2 K3 = load_key(&m_EK[36]);
290 const Altivec64x2 K4 = load_key(&m_EK[32]);
291 const Altivec64x2 K5 = load_key(&m_EK[28]);
292 const Altivec64x2 K6 = load_key(&m_EK[24]);
293 const Altivec64x2 K7 = load_key(&m_EK[20]);
294 const Altivec64x2 K8 = load_key(&m_EK[16]);
295 const Altivec64x2 K9 = load_key(&m_EK[12]);
296 const Altivec64x2 K10 = load_key(&m_EK[8]);
297 const Altivec64x2 K11 = load_key(&m_EK[4]);
298 const Altivec64x2 K12 = load_key(&m_EK[0]);
299
300 while(blocks >= 4) {
301 Altivec64x2 B0 = load_block(in);
302 Altivec64x2 B1 = load_block(in + 16);
303 Altivec64x2 B2 = load_block(in + 16 * 2);
304 Altivec64x2 B3 = load_block(in + 16 * 3);
305
306 AES_XOR_4(B0, B1, B2, B3, K0);
307 AES_DECRYPT_4(B0, B1, B2, B3, K1);
308 AES_DECRYPT_4(B0, B1, B2, B3, K2);
309 AES_DECRYPT_4(B0, B1, B2, B3, K3);
310 AES_DECRYPT_4(B0, B1, B2, B3, K4);
311 AES_DECRYPT_4(B0, B1, B2, B3, K5);
312 AES_DECRYPT_4(B0, B1, B2, B3, K6);
313 AES_DECRYPT_4(B0, B1, B2, B3, K7);
314 AES_DECRYPT_4(B0, B1, B2, B3, K8);
315 AES_DECRYPT_4(B0, B1, B2, B3, K9);
316 AES_DECRYPT_4(B0, B1, B2, B3, K10);
317 AES_DECRYPT_4(B0, B1, B2, B3, K11);
318 AES_DECRYPT_4_LAST(B0, B1, B2, B3, K12);
319
320 store_blocks(B0, B1, B2, B3, out);
321
322 out += 4 * 16;
323 in += 4 * 16;
324 blocks -= 4;
325 }
326
327 for(size_t i = 0; i != blocks; ++i) {
328 Altivec64x2 B = load_block(in);
329
330 B = vec_xor(B, K0);
331 B = __builtin_crypto_vncipher(B, K1);
332 B = __builtin_crypto_vncipher(B, K2);
333 B = __builtin_crypto_vncipher(B, K3);
334 B = __builtin_crypto_vncipher(B, K4);
335 B = __builtin_crypto_vncipher(B, K5);
336 B = __builtin_crypto_vncipher(B, K6);
337 B = __builtin_crypto_vncipher(B, K7);
338 B = __builtin_crypto_vncipher(B, K8);
339 B = __builtin_crypto_vncipher(B, K9);
340 B = __builtin_crypto_vncipher(B, K10);
341 B = __builtin_crypto_vncipher(B, K11);
342 B = __builtin_crypto_vncipherlast(B, K12);
343
344 store_block(B, out);
345
346 out += 16;
347 in += 16;
348 }
349}
350
351BOTAN_FUNC_ISA("crypto,vsx") void AES_256::hw_aes_encrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const {
352 const Altivec64x2 K0 = load_key(&m_EK[0]);
353 const Altivec64x2 K1 = load_key(&m_EK[4]);
354 const Altivec64x2 K2 = load_key(&m_EK[8]);
355 const Altivec64x2 K3 = load_key(&m_EK[12]);
356 const Altivec64x2 K4 = load_key(&m_EK[16]);
357 const Altivec64x2 K5 = load_key(&m_EK[20]);
358 const Altivec64x2 K6 = load_key(&m_EK[24]);
359 const Altivec64x2 K7 = load_key(&m_EK[28]);
360 const Altivec64x2 K8 = load_key(&m_EK[32]);
361 const Altivec64x2 K9 = load_key(&m_EK[36]);
362 const Altivec64x2 K10 = load_key(&m_EK[40]);
363 const Altivec64x2 K11 = load_key(&m_EK[44]);
364 const Altivec64x2 K12 = load_key(&m_EK[48]);
365 const Altivec64x2 K13 = load_key(&m_EK[52]);
366 const Altivec64x2 K14 = load_key(&m_EK[56]);
367
368 while(blocks >= 4) {
369 Altivec64x2 B0 = load_block(in);
370 Altivec64x2 B1 = load_block(in + 16);
371 Altivec64x2 B2 = load_block(in + 16 * 2);
372 Altivec64x2 B3 = load_block(in + 16 * 3);
373
374 AES_XOR_4(B0, B1, B2, B3, K0);
375 AES_ENCRYPT_4(B0, B1, B2, B3, K1);
376 AES_ENCRYPT_4(B0, B1, B2, B3, K2);
377 AES_ENCRYPT_4(B0, B1, B2, B3, K3);
378 AES_ENCRYPT_4(B0, B1, B2, B3, K4);
379 AES_ENCRYPT_4(B0, B1, B2, B3, K5);
380 AES_ENCRYPT_4(B0, B1, B2, B3, K6);
381 AES_ENCRYPT_4(B0, B1, B2, B3, K7);
382 AES_ENCRYPT_4(B0, B1, B2, B3, K8);
383 AES_ENCRYPT_4(B0, B1, B2, B3, K9);
384 AES_ENCRYPT_4(B0, B1, B2, B3, K10);
385 AES_ENCRYPT_4(B0, B1, B2, B3, K11);
386 AES_ENCRYPT_4(B0, B1, B2, B3, K12);
387 AES_ENCRYPT_4(B0, B1, B2, B3, K13);
388 AES_ENCRYPT_4_LAST(B0, B1, B2, B3, K14);
389
390 store_blocks(B0, B1, B2, B3, out);
391
392 out += 4 * 16;
393 in += 4 * 16;
394 blocks -= 4;
395 }
396
397 for(size_t i = 0; i != blocks; ++i) {
398 Altivec64x2 B = load_block(in);
399
400 B = vec_xor(B, K0);
401 B = __builtin_crypto_vcipher(B, K1);
402 B = __builtin_crypto_vcipher(B, K2);
403 B = __builtin_crypto_vcipher(B, K3);
404 B = __builtin_crypto_vcipher(B, K4);
405 B = __builtin_crypto_vcipher(B, K5);
406 B = __builtin_crypto_vcipher(B, K6);
407 B = __builtin_crypto_vcipher(B, K7);
408 B = __builtin_crypto_vcipher(B, K8);
409 B = __builtin_crypto_vcipher(B, K9);
410 B = __builtin_crypto_vcipher(B, K10);
411 B = __builtin_crypto_vcipher(B, K11);
412 B = __builtin_crypto_vcipher(B, K12);
413 B = __builtin_crypto_vcipher(B, K13);
414 B = __builtin_crypto_vcipherlast(B, K14);
415
416 store_block(B, out);
417
418 out += 16;
419 in += 16;
420 }
421}
422
423BOTAN_FUNC_ISA("crypto,vsx") void AES_256::hw_aes_decrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const {
424 const Altivec64x2 K0 = load_key(&m_EK[56]);
425 const Altivec64x2 K1 = load_key(&m_EK[52]);
426 const Altivec64x2 K2 = load_key(&m_EK[48]);
427 const Altivec64x2 K3 = load_key(&m_EK[44]);
428 const Altivec64x2 K4 = load_key(&m_EK[40]);
429 const Altivec64x2 K5 = load_key(&m_EK[36]);
430 const Altivec64x2 K6 = load_key(&m_EK[32]);
431 const Altivec64x2 K7 = load_key(&m_EK[28]);
432 const Altivec64x2 K8 = load_key(&m_EK[24]);
433 const Altivec64x2 K9 = load_key(&m_EK[20]);
434 const Altivec64x2 K10 = load_key(&m_EK[16]);
435 const Altivec64x2 K11 = load_key(&m_EK[12]);
436 const Altivec64x2 K12 = load_key(&m_EK[8]);
437 const Altivec64x2 K13 = load_key(&m_EK[4]);
438 const Altivec64x2 K14 = load_key(&m_EK[0]);
439
440 while(blocks >= 4) {
441 Altivec64x2 B0 = load_block(in);
442 Altivec64x2 B1 = load_block(in + 16);
443 Altivec64x2 B2 = load_block(in + 16 * 2);
444 Altivec64x2 B3 = load_block(in + 16 * 3);
445
446 AES_XOR_4(B0, B1, B2, B3, K0);
447 AES_DECRYPT_4(B0, B1, B2, B3, K1);
448 AES_DECRYPT_4(B0, B1, B2, B3, K2);
449 AES_DECRYPT_4(B0, B1, B2, B3, K3);
450 AES_DECRYPT_4(B0, B1, B2, B3, K4);
451 AES_DECRYPT_4(B0, B1, B2, B3, K5);
452 AES_DECRYPT_4(B0, B1, B2, B3, K6);
453 AES_DECRYPT_4(B0, B1, B2, B3, K7);
454 AES_DECRYPT_4(B0, B1, B2, B3, K8);
455 AES_DECRYPT_4(B0, B1, B2, B3, K9);
456 AES_DECRYPT_4(B0, B1, B2, B3, K10);
457 AES_DECRYPT_4(B0, B1, B2, B3, K11);
458 AES_DECRYPT_4(B0, B1, B2, B3, K12);
459 AES_DECRYPT_4(B0, B1, B2, B3, K13);
460 AES_DECRYPT_4_LAST(B0, B1, B2, B3, K14);
461
462 store_blocks(B0, B1, B2, B3, out);
463
464 out += 4 * 16;
465 in += 4 * 16;
466 blocks -= 4;
467 }
468
469 for(size_t i = 0; i != blocks; ++i) {
470 Altivec64x2 B = load_block(in);
471
472 B = vec_xor(B, K0);
473 B = __builtin_crypto_vncipher(B, K1);
474 B = __builtin_crypto_vncipher(B, K2);
475 B = __builtin_crypto_vncipher(B, K3);
476 B = __builtin_crypto_vncipher(B, K4);
477 B = __builtin_crypto_vncipher(B, K5);
478 B = __builtin_crypto_vncipher(B, K6);
479 B = __builtin_crypto_vncipher(B, K7);
480 B = __builtin_crypto_vncipher(B, K8);
481 B = __builtin_crypto_vncipher(B, K9);
482 B = __builtin_crypto_vncipher(B, K10);
483 B = __builtin_crypto_vncipher(B, K11);
484 B = __builtin_crypto_vncipher(B, K12);
485 B = __builtin_crypto_vncipher(B, K13);
486 B = __builtin_crypto_vncipherlast(B, K14);
487
488 store_block(B, out);
489
490 out += 16;
491 in += 16;
492 }
493}
494
495#undef AES_XOR_4
496#undef AES_ENCRYPT_4
497#undef AES_ENCRYPT_4_LAST
498#undef AES_DECRYPT_4
499#undef AES_DECRYPT_4_LAST
500
501} // namespace Botan
#define AES_ENCRYPT_4_LAST(B0, B1, B2, B3, K)
#define AES_ENCRYPT_4(B0, B1, B2, B3, K)
#define AES_XOR_4(B0, B1, B2, B3, K)
#define AES_DECRYPT_4_LAST(B0, B1, B2, B3, K)
#define AES_DECRYPT_4(B0, B1, B2, B3, K)
static bool is_little_endian()
Definition cpuid.h:59
#define BOTAN_FUNC_ISA(isa)
Definition compiler.h:92
uint8x16_t uint8x16_t K2
Definition aes_armv8.cpp:32
std::unique_ptr< Private_Key > load_key(DataSource &source, const std::function< std::string()> &get_pass)
Definition pkcs8.cpp:316
__vector unsigned long long Altivec64x2
__vector unsigned char Altivec8x16
__vector unsigned int Altivec32x4