Botan 3.7.1
Crypto and TLS for C&
aes_power8.cpp
Go to the documentation of this file.
1/*
2* AES using POWER8/POWER9 crypto extensions
3*
4* Contributed by Jeffrey Walton
5*
6* Further changes
7* (C) 2018,2019 Jack Lloyd
8*
9* Botan is released under the Simplified BSD License (see license.txt)
10*/
11
12#include <botan/internal/aes.h>
13
14#include <botan/compiler.h>
15#include <botan/internal/cpuid.h>
16
17#include <altivec.h>
18#undef vector
19#undef bool
20
21namespace Botan {
22
23typedef __vector unsigned long long Altivec64x2;
24typedef __vector unsigned int Altivec32x4;
25typedef __vector unsigned char Altivec8x16;
26
27namespace {
28
29inline Altivec8x16 reverse_vec(Altivec8x16 src) {
31 const Altivec8x16 mask = {15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0};
32 const Altivec8x16 zero = {0};
33 return vec_perm(src, zero, mask);
34 } else {
35 return src;
36 }
37}
38
39BOTAN_FUNC_ISA("vsx") inline Altivec64x2 load_key(const uint32_t key[]) {
40 return reinterpret_cast<Altivec64x2>(reverse_vec(reinterpret_cast<Altivec8x16>(vec_vsx_ld(0, key))));
41}
42
43BOTAN_FUNC_ISA("vsx") inline Altivec64x2 load_block(const uint8_t src[]) {
44 return reinterpret_cast<Altivec64x2>(reverse_vec(vec_vsx_ld(0, src)));
45}
46
47BOTAN_FUNC_ISA("vsx") inline void store_block(Altivec64x2 src, uint8_t dest[]) {
48 vec_vsx_st(reverse_vec(reinterpret_cast<Altivec8x16>(src)), 0, dest);
49}
50
51inline void store_blocks(Altivec64x2 B0, Altivec64x2 B1, Altivec64x2 B2, Altivec64x2 B3, uint8_t out[]) {
52 store_block(B0, out);
53 store_block(B1, out + 16);
54 store_block(B2, out + 16 * 2);
55 store_block(B3, out + 16 * 3);
56}
57
58#define AES_XOR_4(B0, B1, B2, B3, K) \
59 do { \
60 B0 = vec_xor(B0, K); \
61 B1 = vec_xor(B1, K); \
62 B2 = vec_xor(B2, K); \
63 B3 = vec_xor(B3, K); \
64 } while(0)
65
66#define AES_ENCRYPT_4(B0, B1, B2, B3, K) \
67 do { \
68 B0 = __builtin_crypto_vcipher(B0, K); \
69 B1 = __builtin_crypto_vcipher(B1, K); \
70 B2 = __builtin_crypto_vcipher(B2, K); \
71 B3 = __builtin_crypto_vcipher(B3, K); \
72 } while(0)
73
74#define AES_ENCRYPT_4_LAST(B0, B1, B2, B3, K) \
75 do { \
76 B0 = __builtin_crypto_vcipherlast(B0, K); \
77 B1 = __builtin_crypto_vcipherlast(B1, K); \
78 B2 = __builtin_crypto_vcipherlast(B2, K); \
79 B3 = __builtin_crypto_vcipherlast(B3, K); \
80 } while(0)
81
82#define AES_DECRYPT_4(B0, B1, B2, B3, K) \
83 do { \
84 B0 = __builtin_crypto_vncipher(B0, K); \
85 B1 = __builtin_crypto_vncipher(B1, K); \
86 B2 = __builtin_crypto_vncipher(B2, K); \
87 B3 = __builtin_crypto_vncipher(B3, K); \
88 } while(0)
89
90#define AES_DECRYPT_4_LAST(B0, B1, B2, B3, K) \
91 do { \
92 B0 = __builtin_crypto_vncipherlast(B0, K); \
93 B1 = __builtin_crypto_vncipherlast(B1, K); \
94 B2 = __builtin_crypto_vncipherlast(B2, K); \
95 B3 = __builtin_crypto_vncipherlast(B3, K); \
96 } while(0)
97
98} // namespace
99
100BOTAN_FUNC_ISA("crypto,vsx") void AES_128::hw_aes_encrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const {
101 const Altivec64x2 K0 = load_key(&m_EK[0]);
102 const Altivec64x2 K1 = load_key(&m_EK[4]);
103 const Altivec64x2 K2 = load_key(&m_EK[8]);
104 const Altivec64x2 K3 = load_key(&m_EK[12]);
105 const Altivec64x2 K4 = load_key(&m_EK[16]);
106 const Altivec64x2 K5 = load_key(&m_EK[20]);
107 const Altivec64x2 K6 = load_key(&m_EK[24]);
108 const Altivec64x2 K7 = load_key(&m_EK[28]);
109 const Altivec64x2 K8 = load_key(&m_EK[32]);
110 const Altivec64x2 K9 = load_key(&m_EK[36]);
111 const Altivec64x2 K10 = load_key(&m_EK[40]);
112
113 while(blocks >= 4) {
114 Altivec64x2 B0 = load_block(in);
115 Altivec64x2 B1 = load_block(in + 16);
116 Altivec64x2 B2 = load_block(in + 16 * 2);
117 Altivec64x2 B3 = load_block(in + 16 * 3);
118
119 AES_XOR_4(B0, B1, B2, B3, K0);
120 AES_ENCRYPT_4(B0, B1, B2, B3, K1);
121 AES_ENCRYPT_4(B0, B1, B2, B3, K2);
122 AES_ENCRYPT_4(B0, B1, B2, B3, K3);
123 AES_ENCRYPT_4(B0, B1, B2, B3, K4);
124 AES_ENCRYPT_4(B0, B1, B2, B3, K5);
125 AES_ENCRYPT_4(B0, B1, B2, B3, K6);
126 AES_ENCRYPT_4(B0, B1, B2, B3, K7);
127 AES_ENCRYPT_4(B0, B1, B2, B3, K8);
128 AES_ENCRYPT_4(B0, B1, B2, B3, K9);
129 AES_ENCRYPT_4_LAST(B0, B1, B2, B3, K10);
130
131 store_blocks(B0, B1, B2, B3, out);
132
133 out += 4 * 16;
134 in += 4 * 16;
135 blocks -= 4;
136 }
137
138 for(size_t i = 0; i != blocks; ++i) {
139 Altivec64x2 B = load_block(in);
140
141 B = vec_xor(B, K0);
142 B = __builtin_crypto_vcipher(B, K1);
143 B = __builtin_crypto_vcipher(B, K2);
144 B = __builtin_crypto_vcipher(B, K3);
145 B = __builtin_crypto_vcipher(B, K4);
146 B = __builtin_crypto_vcipher(B, K5);
147 B = __builtin_crypto_vcipher(B, K6);
148 B = __builtin_crypto_vcipher(B, K7);
149 B = __builtin_crypto_vcipher(B, K8);
150 B = __builtin_crypto_vcipher(B, K9);
151 B = __builtin_crypto_vcipherlast(B, K10);
152
153 store_block(B, out);
154
155 out += 16;
156 in += 16;
157 }
158}
159
160BOTAN_FUNC_ISA("crypto,vsx") void AES_128::hw_aes_decrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const {
161 const Altivec64x2 K0 = load_key(&m_EK[40]);
162 const Altivec64x2 K1 = load_key(&m_EK[36]);
163 const Altivec64x2 K2 = load_key(&m_EK[32]);
164 const Altivec64x2 K3 = load_key(&m_EK[28]);
165 const Altivec64x2 K4 = load_key(&m_EK[24]);
166 const Altivec64x2 K5 = load_key(&m_EK[20]);
167 const Altivec64x2 K6 = load_key(&m_EK[16]);
168 const Altivec64x2 K7 = load_key(&m_EK[12]);
169 const Altivec64x2 K8 = load_key(&m_EK[8]);
170 const Altivec64x2 K9 = load_key(&m_EK[4]);
171 const Altivec64x2 K10 = load_key(&m_EK[0]);
172
173 while(blocks >= 4) {
174 Altivec64x2 B0 = load_block(in);
175 Altivec64x2 B1 = load_block(in + 16);
176 Altivec64x2 B2 = load_block(in + 16 * 2);
177 Altivec64x2 B3 = load_block(in + 16 * 3);
178
179 AES_XOR_4(B0, B1, B2, B3, K0);
180 AES_DECRYPT_4(B0, B1, B2, B3, K1);
181 AES_DECRYPT_4(B0, B1, B2, B3, K2);
182 AES_DECRYPT_4(B0, B1, B2, B3, K3);
183 AES_DECRYPT_4(B0, B1, B2, B3, K4);
184 AES_DECRYPT_4(B0, B1, B2, B3, K5);
185 AES_DECRYPT_4(B0, B1, B2, B3, K6);
186 AES_DECRYPT_4(B0, B1, B2, B3, K7);
187 AES_DECRYPT_4(B0, B1, B2, B3, K8);
188 AES_DECRYPT_4(B0, B1, B2, B3, K9);
189 AES_DECRYPT_4_LAST(B0, B1, B2, B3, K10);
190
191 store_blocks(B0, B1, B2, B3, out);
192
193 out += 4 * 16;
194 in += 4 * 16;
195 blocks -= 4;
196 }
197
198 for(size_t i = 0; i != blocks; ++i) {
199 Altivec64x2 B = load_block(in);
200
201 B = vec_xor(B, K0);
202 B = __builtin_crypto_vncipher(B, K1);
203 B = __builtin_crypto_vncipher(B, K2);
204 B = __builtin_crypto_vncipher(B, K3);
205 B = __builtin_crypto_vncipher(B, K4);
206 B = __builtin_crypto_vncipher(B, K5);
207 B = __builtin_crypto_vncipher(B, K6);
208 B = __builtin_crypto_vncipher(B, K7);
209 B = __builtin_crypto_vncipher(B, K8);
210 B = __builtin_crypto_vncipher(B, K9);
211 B = __builtin_crypto_vncipherlast(B, K10);
212
213 store_block(B, out);
214
215 out += 16;
216 in += 16;
217 }
218}
219
220BOTAN_FUNC_ISA("crypto,vsx") void AES_192::hw_aes_encrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const {
221 const Altivec64x2 K0 = load_key(&m_EK[0]);
222 const Altivec64x2 K1 = load_key(&m_EK[4]);
223 const Altivec64x2 K2 = load_key(&m_EK[8]);
224 const Altivec64x2 K3 = load_key(&m_EK[12]);
225 const Altivec64x2 K4 = load_key(&m_EK[16]);
226 const Altivec64x2 K5 = load_key(&m_EK[20]);
227 const Altivec64x2 K6 = load_key(&m_EK[24]);
228 const Altivec64x2 K7 = load_key(&m_EK[28]);
229 const Altivec64x2 K8 = load_key(&m_EK[32]);
230 const Altivec64x2 K9 = load_key(&m_EK[36]);
231 const Altivec64x2 K10 = load_key(&m_EK[40]);
232 const Altivec64x2 K11 = load_key(&m_EK[44]);
233 const Altivec64x2 K12 = load_key(&m_EK[48]);
234
235 while(blocks >= 4) {
236 Altivec64x2 B0 = load_block(in);
237 Altivec64x2 B1 = load_block(in + 16);
238 Altivec64x2 B2 = load_block(in + 16 * 2);
239 Altivec64x2 B3 = load_block(in + 16 * 3);
240
241 AES_XOR_4(B0, B1, B2, B3, K0);
242 AES_ENCRYPT_4(B0, B1, B2, B3, K1);
243 AES_ENCRYPT_4(B0, B1, B2, B3, K2);
244 AES_ENCRYPT_4(B0, B1, B2, B3, K3);
245 AES_ENCRYPT_4(B0, B1, B2, B3, K4);
246 AES_ENCRYPT_4(B0, B1, B2, B3, K5);
247 AES_ENCRYPT_4(B0, B1, B2, B3, K6);
248 AES_ENCRYPT_4(B0, B1, B2, B3, K7);
249 AES_ENCRYPT_4(B0, B1, B2, B3, K8);
250 AES_ENCRYPT_4(B0, B1, B2, B3, K9);
251 AES_ENCRYPT_4(B0, B1, B2, B3, K10);
252 AES_ENCRYPT_4(B0, B1, B2, B3, K11);
253 AES_ENCRYPT_4_LAST(B0, B1, B2, B3, K12);
254
255 store_blocks(B0, B1, B2, B3, out);
256
257 out += 4 * 16;
258 in += 4 * 16;
259 blocks -= 4;
260 }
261
262 for(size_t i = 0; i != blocks; ++i) {
263 Altivec64x2 B = load_block(in);
264
265 B = vec_xor(B, K0);
266 B = __builtin_crypto_vcipher(B, K1);
267 B = __builtin_crypto_vcipher(B, K2);
268 B = __builtin_crypto_vcipher(B, K3);
269 B = __builtin_crypto_vcipher(B, K4);
270 B = __builtin_crypto_vcipher(B, K5);
271 B = __builtin_crypto_vcipher(B, K6);
272 B = __builtin_crypto_vcipher(B, K7);
273 B = __builtin_crypto_vcipher(B, K8);
274 B = __builtin_crypto_vcipher(B, K9);
275 B = __builtin_crypto_vcipher(B, K10);
276 B = __builtin_crypto_vcipher(B, K11);
277 B = __builtin_crypto_vcipherlast(B, K12);
278
279 store_block(B, out);
280
281 out += 16;
282 in += 16;
283 }
284}
285
286BOTAN_FUNC_ISA("crypto,vsx") void AES_192::hw_aes_decrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const {
287 const Altivec64x2 K0 = load_key(&m_EK[48]);
288 const Altivec64x2 K1 = load_key(&m_EK[44]);
289 const Altivec64x2 K2 = load_key(&m_EK[40]);
290 const Altivec64x2 K3 = load_key(&m_EK[36]);
291 const Altivec64x2 K4 = load_key(&m_EK[32]);
292 const Altivec64x2 K5 = load_key(&m_EK[28]);
293 const Altivec64x2 K6 = load_key(&m_EK[24]);
294 const Altivec64x2 K7 = load_key(&m_EK[20]);
295 const Altivec64x2 K8 = load_key(&m_EK[16]);
296 const Altivec64x2 K9 = load_key(&m_EK[12]);
297 const Altivec64x2 K10 = load_key(&m_EK[8]);
298 const Altivec64x2 K11 = load_key(&m_EK[4]);
299 const Altivec64x2 K12 = load_key(&m_EK[0]);
300
301 while(blocks >= 4) {
302 Altivec64x2 B0 = load_block(in);
303 Altivec64x2 B1 = load_block(in + 16);
304 Altivec64x2 B2 = load_block(in + 16 * 2);
305 Altivec64x2 B3 = load_block(in + 16 * 3);
306
307 AES_XOR_4(B0, B1, B2, B3, K0);
308 AES_DECRYPT_4(B0, B1, B2, B3, K1);
309 AES_DECRYPT_4(B0, B1, B2, B3, K2);
310 AES_DECRYPT_4(B0, B1, B2, B3, K3);
311 AES_DECRYPT_4(B0, B1, B2, B3, K4);
312 AES_DECRYPT_4(B0, B1, B2, B3, K5);
313 AES_DECRYPT_4(B0, B1, B2, B3, K6);
314 AES_DECRYPT_4(B0, B1, B2, B3, K7);
315 AES_DECRYPT_4(B0, B1, B2, B3, K8);
316 AES_DECRYPT_4(B0, B1, B2, B3, K9);
317 AES_DECRYPT_4(B0, B1, B2, B3, K10);
318 AES_DECRYPT_4(B0, B1, B2, B3, K11);
319 AES_DECRYPT_4_LAST(B0, B1, B2, B3, K12);
320
321 store_blocks(B0, B1, B2, B3, out);
322
323 out += 4 * 16;
324 in += 4 * 16;
325 blocks -= 4;
326 }
327
328 for(size_t i = 0; i != blocks; ++i) {
329 Altivec64x2 B = load_block(in);
330
331 B = vec_xor(B, K0);
332 B = __builtin_crypto_vncipher(B, K1);
333 B = __builtin_crypto_vncipher(B, K2);
334 B = __builtin_crypto_vncipher(B, K3);
335 B = __builtin_crypto_vncipher(B, K4);
336 B = __builtin_crypto_vncipher(B, K5);
337 B = __builtin_crypto_vncipher(B, K6);
338 B = __builtin_crypto_vncipher(B, K7);
339 B = __builtin_crypto_vncipher(B, K8);
340 B = __builtin_crypto_vncipher(B, K9);
341 B = __builtin_crypto_vncipher(B, K10);
342 B = __builtin_crypto_vncipher(B, K11);
343 B = __builtin_crypto_vncipherlast(B, K12);
344
345 store_block(B, out);
346
347 out += 16;
348 in += 16;
349 }
350}
351
352BOTAN_FUNC_ISA("crypto,vsx") void AES_256::hw_aes_encrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const {
353 const Altivec64x2 K0 = load_key(&m_EK[0]);
354 const Altivec64x2 K1 = load_key(&m_EK[4]);
355 const Altivec64x2 K2 = load_key(&m_EK[8]);
356 const Altivec64x2 K3 = load_key(&m_EK[12]);
357 const Altivec64x2 K4 = load_key(&m_EK[16]);
358 const Altivec64x2 K5 = load_key(&m_EK[20]);
359 const Altivec64x2 K6 = load_key(&m_EK[24]);
360 const Altivec64x2 K7 = load_key(&m_EK[28]);
361 const Altivec64x2 K8 = load_key(&m_EK[32]);
362 const Altivec64x2 K9 = load_key(&m_EK[36]);
363 const Altivec64x2 K10 = load_key(&m_EK[40]);
364 const Altivec64x2 K11 = load_key(&m_EK[44]);
365 const Altivec64x2 K12 = load_key(&m_EK[48]);
366 const Altivec64x2 K13 = load_key(&m_EK[52]);
367 const Altivec64x2 K14 = load_key(&m_EK[56]);
368
369 while(blocks >= 4) {
370 Altivec64x2 B0 = load_block(in);
371 Altivec64x2 B1 = load_block(in + 16);
372 Altivec64x2 B2 = load_block(in + 16 * 2);
373 Altivec64x2 B3 = load_block(in + 16 * 3);
374
375 AES_XOR_4(B0, B1, B2, B3, K0);
376 AES_ENCRYPT_4(B0, B1, B2, B3, K1);
377 AES_ENCRYPT_4(B0, B1, B2, B3, K2);
378 AES_ENCRYPT_4(B0, B1, B2, B3, K3);
379 AES_ENCRYPT_4(B0, B1, B2, B3, K4);
380 AES_ENCRYPT_4(B0, B1, B2, B3, K5);
381 AES_ENCRYPT_4(B0, B1, B2, B3, K6);
382 AES_ENCRYPT_4(B0, B1, B2, B3, K7);
383 AES_ENCRYPT_4(B0, B1, B2, B3, K8);
384 AES_ENCRYPT_4(B0, B1, B2, B3, K9);
385 AES_ENCRYPT_4(B0, B1, B2, B3, K10);
386 AES_ENCRYPT_4(B0, B1, B2, B3, K11);
387 AES_ENCRYPT_4(B0, B1, B2, B3, K12);
388 AES_ENCRYPT_4(B0, B1, B2, B3, K13);
389 AES_ENCRYPT_4_LAST(B0, B1, B2, B3, K14);
390
391 store_blocks(B0, B1, B2, B3, out);
392
393 out += 4 * 16;
394 in += 4 * 16;
395 blocks -= 4;
396 }
397
398 for(size_t i = 0; i != blocks; ++i) {
399 Altivec64x2 B = load_block(in);
400
401 B = vec_xor(B, K0);
402 B = __builtin_crypto_vcipher(B, K1);
403 B = __builtin_crypto_vcipher(B, K2);
404 B = __builtin_crypto_vcipher(B, K3);
405 B = __builtin_crypto_vcipher(B, K4);
406 B = __builtin_crypto_vcipher(B, K5);
407 B = __builtin_crypto_vcipher(B, K6);
408 B = __builtin_crypto_vcipher(B, K7);
409 B = __builtin_crypto_vcipher(B, K8);
410 B = __builtin_crypto_vcipher(B, K9);
411 B = __builtin_crypto_vcipher(B, K10);
412 B = __builtin_crypto_vcipher(B, K11);
413 B = __builtin_crypto_vcipher(B, K12);
414 B = __builtin_crypto_vcipher(B, K13);
415 B = __builtin_crypto_vcipherlast(B, K14);
416
417 store_block(B, out);
418
419 out += 16;
420 in += 16;
421 }
422}
423
424BOTAN_FUNC_ISA("crypto,vsx") void AES_256::hw_aes_decrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const {
425 const Altivec64x2 K0 = load_key(&m_EK[56]);
426 const Altivec64x2 K1 = load_key(&m_EK[52]);
427 const Altivec64x2 K2 = load_key(&m_EK[48]);
428 const Altivec64x2 K3 = load_key(&m_EK[44]);
429 const Altivec64x2 K4 = load_key(&m_EK[40]);
430 const Altivec64x2 K5 = load_key(&m_EK[36]);
431 const Altivec64x2 K6 = load_key(&m_EK[32]);
432 const Altivec64x2 K7 = load_key(&m_EK[28]);
433 const Altivec64x2 K8 = load_key(&m_EK[24]);
434 const Altivec64x2 K9 = load_key(&m_EK[20]);
435 const Altivec64x2 K10 = load_key(&m_EK[16]);
436 const Altivec64x2 K11 = load_key(&m_EK[12]);
437 const Altivec64x2 K12 = load_key(&m_EK[8]);
438 const Altivec64x2 K13 = load_key(&m_EK[4]);
439 const Altivec64x2 K14 = load_key(&m_EK[0]);
440
441 while(blocks >= 4) {
442 Altivec64x2 B0 = load_block(in);
443 Altivec64x2 B1 = load_block(in + 16);
444 Altivec64x2 B2 = load_block(in + 16 * 2);
445 Altivec64x2 B3 = load_block(in + 16 * 3);
446
447 AES_XOR_4(B0, B1, B2, B3, K0);
448 AES_DECRYPT_4(B0, B1, B2, B3, K1);
449 AES_DECRYPT_4(B0, B1, B2, B3, K2);
450 AES_DECRYPT_4(B0, B1, B2, B3, K3);
451 AES_DECRYPT_4(B0, B1, B2, B3, K4);
452 AES_DECRYPT_4(B0, B1, B2, B3, K5);
453 AES_DECRYPT_4(B0, B1, B2, B3, K6);
454 AES_DECRYPT_4(B0, B1, B2, B3, K7);
455 AES_DECRYPT_4(B0, B1, B2, B3, K8);
456 AES_DECRYPT_4(B0, B1, B2, B3, K9);
457 AES_DECRYPT_4(B0, B1, B2, B3, K10);
458 AES_DECRYPT_4(B0, B1, B2, B3, K11);
459 AES_DECRYPT_4(B0, B1, B2, B3, K12);
460 AES_DECRYPT_4(B0, B1, B2, B3, K13);
461 AES_DECRYPT_4_LAST(B0, B1, B2, B3, K14);
462
463 store_blocks(B0, B1, B2, B3, out);
464
465 out += 4 * 16;
466 in += 4 * 16;
467 blocks -= 4;
468 }
469
470 for(size_t i = 0; i != blocks; ++i) {
471 Altivec64x2 B = load_block(in);
472
473 B = vec_xor(B, K0);
474 B = __builtin_crypto_vncipher(B, K1);
475 B = __builtin_crypto_vncipher(B, K2);
476 B = __builtin_crypto_vncipher(B, K3);
477 B = __builtin_crypto_vncipher(B, K4);
478 B = __builtin_crypto_vncipher(B, K5);
479 B = __builtin_crypto_vncipher(B, K6);
480 B = __builtin_crypto_vncipher(B, K7);
481 B = __builtin_crypto_vncipher(B, K8);
482 B = __builtin_crypto_vncipher(B, K9);
483 B = __builtin_crypto_vncipher(B, K10);
484 B = __builtin_crypto_vncipher(B, K11);
485 B = __builtin_crypto_vncipher(B, K12);
486 B = __builtin_crypto_vncipher(B, K13);
487 B = __builtin_crypto_vncipherlast(B, K14);
488
489 store_block(B, out);
490
491 out += 16;
492 in += 16;
493 }
494}
495
496#undef AES_XOR_4
497#undef AES_ENCRYPT_4
498#undef AES_ENCRYPT_4_LAST
499#undef AES_DECRYPT_4
500#undef AES_DECRYPT_4_LAST
501
502} // namespace Botan
#define AES_ENCRYPT_4_LAST(B0, B1, B2, B3, K)
#define AES_ENCRYPT_4(B0, B1, B2, B3, K)
#define AES_XOR_4(B0, B1, B2, B3, K)
#define AES_DECRYPT_4_LAST(B0, B1, B2, B3, K)
#define AES_DECRYPT_4(B0, B1, B2, B3, K)
static bool is_little_endian()
Definition cpuid.h:60
#define BOTAN_FUNC_ISA(isa)
Definition compiler.h:42
uint8x16_t uint8x16_t K2
Definition aes_armv8.cpp:32
std::unique_ptr< Private_Key > load_key(DataSource &source, const std::function< std::string()> &get_pass)
Definition pkcs8.cpp:316
__vector unsigned long long Altivec64x2
__vector unsigned char Altivec8x16
__vector unsigned int Altivec32x4