Botan 3.0.0-alpha0
Crypto and TLS for C&
aes_power8.cpp
Go to the documentation of this file.
1/*
2* AES using POWER8/POWER9 crypto extensions
3*
4* Contributed by Jeffrey Walton
5*
6* Further changes
7* (C) 2018,2019 Jack Lloyd
8*
9* Botan is released under the Simplified BSD License (see license.txt)
10*/
11
12#include <botan/internal/aes.h>
13#include <botan/internal/cpuid.h>
14
15#include <altivec.h>
16#undef vector
17#undef bool
18
19namespace Botan {
20
21typedef __vector unsigned long long Altivec64x2;
22typedef __vector unsigned int Altivec32x4;
23typedef __vector unsigned char Altivec8x16;
24
25namespace {
26
27inline Altivec8x16 reverse_vec(Altivec8x16 src)
28 {
30 {
31 const Altivec8x16 mask = {15,14,13,12, 11,10,9,8, 7,6,5,4, 3,2,1,0};
32 const Altivec8x16 zero = {0};
33 return vec_perm(src, zero, mask);
34 }
35 else
36 {
37 return src;
38 }
39 }
40
41BOTAN_FUNC_ISA("vsx")
42inline Altivec64x2 load_key(const uint32_t key[])
43 {
44 return reinterpret_cast<Altivec64x2>(reverse_vec(reinterpret_cast<Altivec8x16>(vec_vsx_ld(0, key))));
45 }
46
47BOTAN_FUNC_ISA("vsx")
48inline Altivec64x2 load_block(const uint8_t src[])
49 {
50 return reinterpret_cast<Altivec64x2>(reverse_vec(vec_vsx_ld(0, src)));
51 }
52
53BOTAN_FUNC_ISA("vsx")
54inline void store_block(Altivec64x2 src, uint8_t dest[])
55 {
56 vec_vsx_st(reverse_vec(reinterpret_cast<Altivec8x16>(src)), 0, dest);
57 }
58
59inline void store_blocks(Altivec64x2 B0, Altivec64x2 B1,
61 uint8_t out[])
62 {
63 store_block(B0, out);
64 store_block(B1, out+16);
65 store_block(B2, out+16*2);
66 store_block(B3, out+16*3);
67 }
68
69#define AES_XOR_4(B0, B1, B2, B3, K) do { \
70 B0 = vec_xor(B0, K); \
71 B1 = vec_xor(B1, K); \
72 B2 = vec_xor(B2, K); \
73 B3 = vec_xor(B3, K); \
74 } while(0)
75
76#define AES_ENCRYPT_4(B0, B1, B2, B3, K) do { \
77 B0 = __builtin_crypto_vcipher(B0, K); \
78 B1 = __builtin_crypto_vcipher(B1, K); \
79 B2 = __builtin_crypto_vcipher(B2, K); \
80 B3 = __builtin_crypto_vcipher(B3, K); \
81 } while(0)
82
83#define AES_ENCRYPT_4_LAST(B0, B1, B2, B3, K) do { \
84 B0 = __builtin_crypto_vcipherlast(B0, K); \
85 B1 = __builtin_crypto_vcipherlast(B1, K); \
86 B2 = __builtin_crypto_vcipherlast(B2, K); \
87 B3 = __builtin_crypto_vcipherlast(B3, K); \
88 } while(0)
89
90#define AES_DECRYPT_4(B0, B1, B2, B3, K) do { \
91 B0 = __builtin_crypto_vncipher(B0, K); \
92 B1 = __builtin_crypto_vncipher(B1, K); \
93 B2 = __builtin_crypto_vncipher(B2, K); \
94 B3 = __builtin_crypto_vncipher(B3, K); \
95 } while(0)
96
97#define AES_DECRYPT_4_LAST(B0, B1, B2, B3, K) do { \
98 B0 = __builtin_crypto_vncipherlast(B0, K); \
99 B1 = __builtin_crypto_vncipherlast(B1, K); \
100 B2 = __builtin_crypto_vncipherlast(B2, K); \
101 B3 = __builtin_crypto_vncipherlast(B3, K); \
102 } while(0)
103
104}
105
106BOTAN_FUNC_ISA("crypto,vsx")
107void AES_128::hw_aes_encrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const
108 {
109 const Altivec64x2 K0 = load_key(&m_EK[0]);
110 const Altivec64x2 K1 = load_key(&m_EK[4]);
111 const Altivec64x2 K2 = load_key(&m_EK[8]);
112 const Altivec64x2 K3 = load_key(&m_EK[12]);
113 const Altivec64x2 K4 = load_key(&m_EK[16]);
114 const Altivec64x2 K5 = load_key(&m_EK[20]);
115 const Altivec64x2 K6 = load_key(&m_EK[24]);
116 const Altivec64x2 K7 = load_key(&m_EK[28]);
117 const Altivec64x2 K8 = load_key(&m_EK[32]);
118 const Altivec64x2 K9 = load_key(&m_EK[36]);
119 const Altivec64x2 K10 = load_key(&m_EK[40]);
120
121 while(blocks >= 4)
122 {
123 Altivec64x2 B0 = load_block(in);
124 Altivec64x2 B1 = load_block(in+16);
125 Altivec64x2 B2 = load_block(in+16*2);
126 Altivec64x2 B3 = load_block(in+16*3);
127
128 AES_XOR_4(B0, B1, B2, B3, K0);
129 AES_ENCRYPT_4(B0, B1, B2, B3, K1);
130 AES_ENCRYPT_4(B0, B1, B2, B3, K2);
131 AES_ENCRYPT_4(B0, B1, B2, B3, K3);
132 AES_ENCRYPT_4(B0, B1, B2, B3, K4);
133 AES_ENCRYPT_4(B0, B1, B2, B3, K5);
134 AES_ENCRYPT_4(B0, B1, B2, B3, K6);
135 AES_ENCRYPT_4(B0, B1, B2, B3, K7);
136 AES_ENCRYPT_4(B0, B1, B2, B3, K8);
137 AES_ENCRYPT_4(B0, B1, B2, B3, K9);
138 AES_ENCRYPT_4_LAST(B0, B1, B2, B3, K10);
139
140 store_blocks(B0, B1, B2, B3, out);
141
142 out += 4*16;
143 in += 4*16;
144 blocks -= 4;
145 }
146
147 for(size_t i = 0; i != blocks; ++i)
148 {
149 Altivec64x2 B = load_block(in);
150
151 B = vec_xor(B, K0);
152 B = __builtin_crypto_vcipher(B, K1);
153 B = __builtin_crypto_vcipher(B, K2);
154 B = __builtin_crypto_vcipher(B, K3);
155 B = __builtin_crypto_vcipher(B, K4);
156 B = __builtin_crypto_vcipher(B, K5);
157 B = __builtin_crypto_vcipher(B, K6);
158 B = __builtin_crypto_vcipher(B, K7);
159 B = __builtin_crypto_vcipher(B, K8);
160 B = __builtin_crypto_vcipher(B, K9);
161 B = __builtin_crypto_vcipherlast(B, K10);
162
163 store_block(B, out);
164
165 out += 16;
166 in += 16;
167 }
168 }
169
170BOTAN_FUNC_ISA("crypto,vsx")
171void AES_128::hw_aes_decrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const
172 {
173 const Altivec64x2 K0 = load_key(&m_EK[40]);
174 const Altivec64x2 K1 = load_key(&m_EK[36]);
175 const Altivec64x2 K2 = load_key(&m_EK[32]);
176 const Altivec64x2 K3 = load_key(&m_EK[28]);
177 const Altivec64x2 K4 = load_key(&m_EK[24]);
178 const Altivec64x2 K5 = load_key(&m_EK[20]);
179 const Altivec64x2 K6 = load_key(&m_EK[16]);
180 const Altivec64x2 K7 = load_key(&m_EK[12]);
181 const Altivec64x2 K8 = load_key(&m_EK[8]);
182 const Altivec64x2 K9 = load_key(&m_EK[4]);
183 const Altivec64x2 K10 = load_key(&m_EK[0]);
184
185 while(blocks >= 4)
186 {
187 Altivec64x2 B0 = load_block(in);
188 Altivec64x2 B1 = load_block(in+16);
189 Altivec64x2 B2 = load_block(in+16*2);
190 Altivec64x2 B3 = load_block(in+16*3);
191
192 AES_XOR_4(B0, B1, B2, B3, K0);
193 AES_DECRYPT_4(B0, B1, B2, B3, K1);
194 AES_DECRYPT_4(B0, B1, B2, B3, K2);
195 AES_DECRYPT_4(B0, B1, B2, B3, K3);
196 AES_DECRYPT_4(B0, B1, B2, B3, K4);
197 AES_DECRYPT_4(B0, B1, B2, B3, K5);
198 AES_DECRYPT_4(B0, B1, B2, B3, K6);
199 AES_DECRYPT_4(B0, B1, B2, B3, K7);
200 AES_DECRYPT_4(B0, B1, B2, B3, K8);
201 AES_DECRYPT_4(B0, B1, B2, B3, K9);
202 AES_DECRYPT_4_LAST(B0, B1, B2, B3, K10);
203
204 store_blocks(B0, B1, B2, B3, out);
205
206 out += 4*16;
207 in += 4*16;
208 blocks -= 4;
209 }
210
211 for(size_t i = 0; i != blocks; ++i)
212 {
213 Altivec64x2 B = load_block(in);
214
215 B = vec_xor(B, K0);
216 B = __builtin_crypto_vncipher(B, K1);
217 B = __builtin_crypto_vncipher(B, K2);
218 B = __builtin_crypto_vncipher(B, K3);
219 B = __builtin_crypto_vncipher(B, K4);
220 B = __builtin_crypto_vncipher(B, K5);
221 B = __builtin_crypto_vncipher(B, K6);
222 B = __builtin_crypto_vncipher(B, K7);
223 B = __builtin_crypto_vncipher(B, K8);
224 B = __builtin_crypto_vncipher(B, K9);
225 B = __builtin_crypto_vncipherlast(B, K10);
226
227 store_block(B, out);
228
229 out += 16;
230 in += 16;
231 }
232 }
233
234BOTAN_FUNC_ISA("crypto,vsx")
235void AES_192::hw_aes_encrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const
236 {
237 const Altivec64x2 K0 = load_key(&m_EK[0]);
238 const Altivec64x2 K1 = load_key(&m_EK[4]);
239 const Altivec64x2 K2 = load_key(&m_EK[8]);
240 const Altivec64x2 K3 = load_key(&m_EK[12]);
241 const Altivec64x2 K4 = load_key(&m_EK[16]);
242 const Altivec64x2 K5 = load_key(&m_EK[20]);
243 const Altivec64x2 K6 = load_key(&m_EK[24]);
244 const Altivec64x2 K7 = load_key(&m_EK[28]);
245 const Altivec64x2 K8 = load_key(&m_EK[32]);
246 const Altivec64x2 K9 = load_key(&m_EK[36]);
247 const Altivec64x2 K10 = load_key(&m_EK[40]);
248 const Altivec64x2 K11 = load_key(&m_EK[44]);
249 const Altivec64x2 K12 = load_key(&m_EK[48]);
250
251 while(blocks >= 4)
252 {
253 Altivec64x2 B0 = load_block(in);
254 Altivec64x2 B1 = load_block(in+16);
255 Altivec64x2 B2 = load_block(in+16*2);
256 Altivec64x2 B3 = load_block(in+16*3);
257
258 AES_XOR_4(B0, B1, B2, B3, K0);
259 AES_ENCRYPT_4(B0, B1, B2, B3, K1);
260 AES_ENCRYPT_4(B0, B1, B2, B3, K2);
261 AES_ENCRYPT_4(B0, B1, B2, B3, K3);
262 AES_ENCRYPT_4(B0, B1, B2, B3, K4);
263 AES_ENCRYPT_4(B0, B1, B2, B3, K5);
264 AES_ENCRYPT_4(B0, B1, B2, B3, K6);
265 AES_ENCRYPT_4(B0, B1, B2, B3, K7);
266 AES_ENCRYPT_4(B0, B1, B2, B3, K8);
267 AES_ENCRYPT_4(B0, B1, B2, B3, K9);
268 AES_ENCRYPT_4(B0, B1, B2, B3, K10);
269 AES_ENCRYPT_4(B0, B1, B2, B3, K11);
270 AES_ENCRYPT_4_LAST(B0, B1, B2, B3, K12);
271
272 store_blocks(B0, B1, B2, B3, out);
273
274 out += 4*16;
275 in += 4*16;
276 blocks -= 4;
277 }
278
279 for(size_t i = 0; i != blocks; ++i)
280 {
281 Altivec64x2 B = load_block(in);
282
283 B = vec_xor(B, K0);
284 B = __builtin_crypto_vcipher(B, K1);
285 B = __builtin_crypto_vcipher(B, K2);
286 B = __builtin_crypto_vcipher(B, K3);
287 B = __builtin_crypto_vcipher(B, K4);
288 B = __builtin_crypto_vcipher(B, K5);
289 B = __builtin_crypto_vcipher(B, K6);
290 B = __builtin_crypto_vcipher(B, K7);
291 B = __builtin_crypto_vcipher(B, K8);
292 B = __builtin_crypto_vcipher(B, K9);
293 B = __builtin_crypto_vcipher(B, K10);
294 B = __builtin_crypto_vcipher(B, K11);
295 B = __builtin_crypto_vcipherlast(B, K12);
296
297 store_block(B, out);
298
299 out += 16;
300 in += 16;
301 }
302 }
303
304BOTAN_FUNC_ISA("crypto,vsx")
305void AES_192::hw_aes_decrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const
306 {
307 const Altivec64x2 K0 = load_key(&m_EK[48]);
308 const Altivec64x2 K1 = load_key(&m_EK[44]);
309 const Altivec64x2 K2 = load_key(&m_EK[40]);
310 const Altivec64x2 K3 = load_key(&m_EK[36]);
311 const Altivec64x2 K4 = load_key(&m_EK[32]);
312 const Altivec64x2 K5 = load_key(&m_EK[28]);
313 const Altivec64x2 K6 = load_key(&m_EK[24]);
314 const Altivec64x2 K7 = load_key(&m_EK[20]);
315 const Altivec64x2 K8 = load_key(&m_EK[16]);
316 const Altivec64x2 K9 = load_key(&m_EK[12]);
317 const Altivec64x2 K10 = load_key(&m_EK[8]);
318 const Altivec64x2 K11 = load_key(&m_EK[4]);
319 const Altivec64x2 K12 = load_key(&m_EK[0]);
320
321 while(blocks >= 4)
322 {
323 Altivec64x2 B0 = load_block(in);
324 Altivec64x2 B1 = load_block(in+16);
325 Altivec64x2 B2 = load_block(in+16*2);
326 Altivec64x2 B3 = load_block(in+16*3);
327
328 AES_XOR_4(B0, B1, B2, B3, K0);
329 AES_DECRYPT_4(B0, B1, B2, B3, K1);
330 AES_DECRYPT_4(B0, B1, B2, B3, K2);
331 AES_DECRYPT_4(B0, B1, B2, B3, K3);
332 AES_DECRYPT_4(B0, B1, B2, B3, K4);
333 AES_DECRYPT_4(B0, B1, B2, B3, K5);
334 AES_DECRYPT_4(B0, B1, B2, B3, K6);
335 AES_DECRYPT_4(B0, B1, B2, B3, K7);
336 AES_DECRYPT_4(B0, B1, B2, B3, K8);
337 AES_DECRYPT_4(B0, B1, B2, B3, K9);
338 AES_DECRYPT_4(B0, B1, B2, B3, K10);
339 AES_DECRYPT_4(B0, B1, B2, B3, K11);
340 AES_DECRYPT_4_LAST(B0, B1, B2, B3, K12);
341
342 store_blocks(B0, B1, B2, B3, out);
343
344 out += 4*16;
345 in += 4*16;
346 blocks -= 4;
347 }
348
349 for(size_t i = 0; i != blocks; ++i)
350 {
351 Altivec64x2 B = load_block(in);
352
353 B = vec_xor(B, K0);
354 B = __builtin_crypto_vncipher(B, K1);
355 B = __builtin_crypto_vncipher(B, K2);
356 B = __builtin_crypto_vncipher(B, K3);
357 B = __builtin_crypto_vncipher(B, K4);
358 B = __builtin_crypto_vncipher(B, K5);
359 B = __builtin_crypto_vncipher(B, K6);
360 B = __builtin_crypto_vncipher(B, K7);
361 B = __builtin_crypto_vncipher(B, K8);
362 B = __builtin_crypto_vncipher(B, K9);
363 B = __builtin_crypto_vncipher(B, K10);
364 B = __builtin_crypto_vncipher(B, K11);
365 B = __builtin_crypto_vncipherlast(B, K12);
366
367 store_block(B, out);
368
369 out += 16;
370 in += 16;
371 }
372 }
373
374BOTAN_FUNC_ISA("crypto,vsx")
375void AES_256::hw_aes_encrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const
376 {
377 const Altivec64x2 K0 = load_key(&m_EK[0]);
378 const Altivec64x2 K1 = load_key(&m_EK[4]);
379 const Altivec64x2 K2 = load_key(&m_EK[8]);
380 const Altivec64x2 K3 = load_key(&m_EK[12]);
381 const Altivec64x2 K4 = load_key(&m_EK[16]);
382 const Altivec64x2 K5 = load_key(&m_EK[20]);
383 const Altivec64x2 K6 = load_key(&m_EK[24]);
384 const Altivec64x2 K7 = load_key(&m_EK[28]);
385 const Altivec64x2 K8 = load_key(&m_EK[32]);
386 const Altivec64x2 K9 = load_key(&m_EK[36]);
387 const Altivec64x2 K10 = load_key(&m_EK[40]);
388 const Altivec64x2 K11 = load_key(&m_EK[44]);
389 const Altivec64x2 K12 = load_key(&m_EK[48]);
390 const Altivec64x2 K13 = load_key(&m_EK[52]);
391 const Altivec64x2 K14 = load_key(&m_EK[56]);
392
393 while(blocks >= 4)
394 {
395 Altivec64x2 B0 = load_block(in);
396 Altivec64x2 B1 = load_block(in+16);
397 Altivec64x2 B2 = load_block(in+16*2);
398 Altivec64x2 B3 = load_block(in+16*3);
399
400 AES_XOR_4(B0, B1, B2, B3, K0);
401 AES_ENCRYPT_4(B0, B1, B2, B3, K1);
402 AES_ENCRYPT_4(B0, B1, B2, B3, K2);
403 AES_ENCRYPT_4(B0, B1, B2, B3, K3);
404 AES_ENCRYPT_4(B0, B1, B2, B3, K4);
405 AES_ENCRYPT_4(B0, B1, B2, B3, K5);
406 AES_ENCRYPT_4(B0, B1, B2, B3, K6);
407 AES_ENCRYPT_4(B0, B1, B2, B3, K7);
408 AES_ENCRYPT_4(B0, B1, B2, B3, K8);
409 AES_ENCRYPT_4(B0, B1, B2, B3, K9);
410 AES_ENCRYPT_4(B0, B1, B2, B3, K10);
411 AES_ENCRYPT_4(B0, B1, B2, B3, K11);
412 AES_ENCRYPT_4(B0, B1, B2, B3, K12);
413 AES_ENCRYPT_4(B0, B1, B2, B3, K13);
414 AES_ENCRYPT_4_LAST(B0, B1, B2, B3, K14);
415
416 store_blocks(B0, B1, B2, B3, out);
417
418 out += 4*16;
419 in += 4*16;
420 blocks -= 4;
421 }
422
423 for(size_t i = 0; i != blocks; ++i)
424 {
425 Altivec64x2 B = load_block(in);
426
427 B = vec_xor(B, K0);
428 B = __builtin_crypto_vcipher(B, K1);
429 B = __builtin_crypto_vcipher(B, K2);
430 B = __builtin_crypto_vcipher(B, K3);
431 B = __builtin_crypto_vcipher(B, K4);
432 B = __builtin_crypto_vcipher(B, K5);
433 B = __builtin_crypto_vcipher(B, K6);
434 B = __builtin_crypto_vcipher(B, K7);
435 B = __builtin_crypto_vcipher(B, K8);
436 B = __builtin_crypto_vcipher(B, K9);
437 B = __builtin_crypto_vcipher(B, K10);
438 B = __builtin_crypto_vcipher(B, K11);
439 B = __builtin_crypto_vcipher(B, K12);
440 B = __builtin_crypto_vcipher(B, K13);
441 B = __builtin_crypto_vcipherlast(B, K14);
442
443 store_block(B, out);
444
445 out += 16;
446 in += 16;
447 }
448 }
449
450BOTAN_FUNC_ISA("crypto,vsx")
451void AES_256::hw_aes_decrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const
452 {
453 const Altivec64x2 K0 = load_key(&m_EK[56]);
454 const Altivec64x2 K1 = load_key(&m_EK[52]);
455 const Altivec64x2 K2 = load_key(&m_EK[48]);
456 const Altivec64x2 K3 = load_key(&m_EK[44]);
457 const Altivec64x2 K4 = load_key(&m_EK[40]);
458 const Altivec64x2 K5 = load_key(&m_EK[36]);
459 const Altivec64x2 K6 = load_key(&m_EK[32]);
460 const Altivec64x2 K7 = load_key(&m_EK[28]);
461 const Altivec64x2 K8 = load_key(&m_EK[24]);
462 const Altivec64x2 K9 = load_key(&m_EK[20]);
463 const Altivec64x2 K10 = load_key(&m_EK[16]);
464 const Altivec64x2 K11 = load_key(&m_EK[12]);
465 const Altivec64x2 K12 = load_key(&m_EK[8]);
466 const Altivec64x2 K13 = load_key(&m_EK[4]);
467 const Altivec64x2 K14 = load_key(&m_EK[0]);
468
469 while(blocks >= 4)
470 {
471 Altivec64x2 B0 = load_block(in);
472 Altivec64x2 B1 = load_block(in+16);
473 Altivec64x2 B2 = load_block(in+16*2);
474 Altivec64x2 B3 = load_block(in+16*3);
475
476 AES_XOR_4(B0, B1, B2, B3, K0);
477 AES_DECRYPT_4(B0, B1, B2, B3, K1);
478 AES_DECRYPT_4(B0, B1, B2, B3, K2);
479 AES_DECRYPT_4(B0, B1, B2, B3, K3);
480 AES_DECRYPT_4(B0, B1, B2, B3, K4);
481 AES_DECRYPT_4(B0, B1, B2, B3, K5);
482 AES_DECRYPT_4(B0, B1, B2, B3, K6);
483 AES_DECRYPT_4(B0, B1, B2, B3, K7);
484 AES_DECRYPT_4(B0, B1, B2, B3, K8);
485 AES_DECRYPT_4(B0, B1, B2, B3, K9);
486 AES_DECRYPT_4(B0, B1, B2, B3, K10);
487 AES_DECRYPT_4(B0, B1, B2, B3, K11);
488 AES_DECRYPT_4(B0, B1, B2, B3, K12);
489 AES_DECRYPT_4(B0, B1, B2, B3, K13);
490 AES_DECRYPT_4_LAST(B0, B1, B2, B3, K14);
491
492 store_blocks(B0, B1, B2, B3, out);
493
494 out += 4*16;
495 in += 4*16;
496 blocks -= 4;
497 }
498
499 for(size_t i = 0; i != blocks; ++i)
500 {
501 Altivec64x2 B = load_block(in);
502
503 B = vec_xor(B, K0);
504 B = __builtin_crypto_vncipher(B, K1);
505 B = __builtin_crypto_vncipher(B, K2);
506 B = __builtin_crypto_vncipher(B, K3);
507 B = __builtin_crypto_vncipher(B, K4);
508 B = __builtin_crypto_vncipher(B, K5);
509 B = __builtin_crypto_vncipher(B, K6);
510 B = __builtin_crypto_vncipher(B, K7);
511 B = __builtin_crypto_vncipher(B, K8);
512 B = __builtin_crypto_vncipher(B, K9);
513 B = __builtin_crypto_vncipher(B, K10);
514 B = __builtin_crypto_vncipher(B, K11);
515 B = __builtin_crypto_vncipher(B, K12);
516 B = __builtin_crypto_vncipher(B, K13);
517 B = __builtin_crypto_vncipherlast(B, K14);
518
519 store_block(B, out);
520
521 out += 16;
522 in += 16;
523 }
524 }
525
526#undef AES_XOR_4
527#undef AES_ENCRYPT_4
528#undef AES_ENCRYPT_4_LAST
529#undef AES_DECRYPT_4
530#undef AES_DECRYPT_4_LAST
531
532}
#define AES_ENCRYPT_4_LAST(B0, B1, B2, B3, K)
Definition: aes_power8.cpp:83
#define AES_ENCRYPT_4(B0, B1, B2, B3, K)
Definition: aes_power8.cpp:76
#define AES_XOR_4(B0, B1, B2, B3, K)
Definition: aes_power8.cpp:69
#define AES_DECRYPT_4_LAST(B0, B1, B2, B3, K)
Definition: aes_power8.cpp:97
#define AES_DECRYPT_4(B0, B1, B2, B3, K)
Definition: aes_power8.cpp:90
static bool is_little_endian()
Definition: cpuid.h:64
#define BOTAN_FUNC_ISA(isa)
Definition: compiler.h:59
std::unique_ptr< Private_Key > load_key(DataSource &source, const std::function< std::string()> &get_pass)
Definition: pkcs8.cpp:355
Definition: alg_id.cpp:13
__vector unsigned long long Altivec64x2
Definition: aes_power8.cpp:21
__vector unsigned char Altivec8x16
Definition: aes_power8.cpp:23
__vector unsigned int Altivec32x4
Definition: aes_power8.cpp:22