diff options
Diffstat (limited to 'linux-aarch64')
-rw-r--r-- | linux-aarch64/crypto/aes/aesv8-armx64.S (renamed from linux-aarch64/crypto/aes/aesv8-armx.S) | 4 | ||||
-rw-r--r-- | linux-aarch64/crypto/modes/ghashv8-armx64.S (renamed from linux-aarch64/crypto/modes/ghashv8-armx.S) | 26 | ||||
-rw-r--r-- | linux-aarch64/crypto/sha/sha1-armv8.S | 4 | ||||
-rw-r--r-- | linux-aarch64/crypto/sha/sha256-armv8.S | 4 | ||||
-rw-r--r-- | linux-aarch64/crypto/sha/sha512-armv8.S | 4 |
5 files changed, 26 insertions, 16 deletions
diff --git a/linux-aarch64/crypto/aes/aesv8-armx.S b/linux-aarch64/crypto/aes/aesv8-armx64.S index 9c63291..fa2abbc 100644 --- a/linux-aarch64/crypto/aes/aesv8-armx.S +++ b/linux-aarch64/crypto/aes/aesv8-armx64.S @@ -1,4 +1,5 @@ -#include "arm_arch.h" +#if defined(__aarch64__) +#include <openssl/arm_arch.h> #if __ARM_MAX_ARCH__>=7 .text @@ -747,3 +748,4 @@ aes_v8_ctr32_encrypt_blocks: ret .size aes_v8_ctr32_encrypt_blocks,.-aes_v8_ctr32_encrypt_blocks #endif +#endif
\ No newline at end of file diff --git a/linux-aarch64/crypto/modes/ghashv8-armx.S b/linux-aarch64/crypto/modes/ghashv8-armx64.S index ad19074..8d44667 100644 --- a/linux-aarch64/crypto/modes/ghashv8-armx.S +++ b/linux-aarch64/crypto/modes/ghashv8-armx64.S @@ -1,4 +1,5 @@ -#include "arm_arch.h" +#if defined(__aarch64__) +#include <openssl/arm_arch.h> .text #if !defined(__clang__) @@ -67,10 +68,10 @@ gcm_gmult_v8: #endif ext v3.16b,v17.16b,v17.16b,#8 - pmull v0.1q,v20.1d,v3.1d //H.lo·Xi.lo + pmull v0.1q,v20.1d,v3.1d //H.lo·Xi.lo eor v17.16b,v17.16b,v3.16b //Karatsuba pre-processing - pmull2 v2.1q,v20.2d,v3.2d //H.hi·Xi.hi - pmull v1.1q,v21.1d,v17.1d //(H.lo+H.hi)·(Xi.lo+Xi.hi) + pmull2 v2.1q,v20.2d,v3.2d //H.hi·Xi.hi + pmull v1.1q,v21.1d,v17.1d //(H.lo+H.hi)·(Xi.lo+Xi.hi) ext v17.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing eor v18.16b,v0.16b,v2.16b @@ -134,7 +135,7 @@ gcm_ghash_v8: #endif ext v7.16b,v17.16b,v17.16b,#8 eor v3.16b,v3.16b,v0.16b //I[i]^=Xi - pmull v4.1q,v20.1d,v7.1d //H·Ii+1 + pmull v4.1q,v20.1d,v7.1d //H·Ii+1 eor v17.16b,v17.16b,v7.16b //Karatsuba pre-processing pmull2 v6.1q,v20.2d,v7.2d b .Loop_mod2x_v8 @@ -143,14 +144,14 @@ gcm_ghash_v8: .Loop_mod2x_v8: ext v18.16b,v3.16b,v3.16b,#8 subs x3,x3,#32 //is there more data? - pmull v0.1q,v22.1d,v3.1d //H^2.lo·Xi.lo + pmull v0.1q,v22.1d,v3.1d //H^2.lo·Xi.lo csel x12,xzr,x12,lo //is it time to zero x12? pmull v5.1q,v21.1d,v17.1d eor v18.16b,v18.16b,v3.16b //Karatsuba pre-processing - pmull2 v2.1q,v22.2d,v3.2d //H^2.hi·Xi.hi + pmull2 v2.1q,v22.2d,v3.2d //H^2.hi·Xi.hi eor v0.16b,v0.16b,v4.16b //accumulate - pmull2 v1.1q,v21.2d,v18.2d //(H^2.lo+H^2.hi)·(Xi.lo+Xi.hi) + pmull2 v1.1q,v21.2d,v18.2d //(H^2.lo+H^2.hi)·(Xi.lo+Xi.hi) ld1 {v16.2d},[x2],x12 //load [rotated] I[i+2] eor v2.16b,v2.16b,v6.16b @@ -175,7 +176,7 @@ gcm_ghash_v8: ext v7.16b,v17.16b,v17.16b,#8 ext v3.16b,v16.16b,v16.16b,#8 eor v0.16b,v1.16b,v18.16b - pmull v4.1q,v20.1d,v7.1d //H·Ii+1 + pmull v4.1q,v20.1d,v7.1d //H·Ii+1 eor v3.16b,v3.16b,v2.16b //accumulate v3.16b early ext v18.16b,v0.16b,v0.16b,#8 //2nd phase of reduction @@ -196,10 +197,10 @@ gcm_ghash_v8: eor v3.16b,v3.16b,v0.16b //inp^=Xi eor v17.16b,v16.16b,v18.16b //v17.16b is rotated inp^Xi - pmull v0.1q,v20.1d,v3.1d //H.lo·Xi.lo + pmull v0.1q,v20.1d,v3.1d //H.lo·Xi.lo eor v17.16b,v17.16b,v3.16b //Karatsuba pre-processing - pmull2 v2.1q,v20.2d,v3.2d //H.hi·Xi.hi - pmull v1.1q,v21.1d,v17.1d //(H.lo+H.hi)·(Xi.lo+Xi.hi) + pmull2 v2.1q,v20.2d,v3.2d //H.hi·Xi.hi + pmull v1.1q,v21.1d,v17.1d //(H.lo+H.hi)·(Xi.lo+Xi.hi) ext v17.16b,v0.16b,v2.16b,#8 //Karatsuba post-processing eor v18.16b,v0.16b,v2.16b @@ -228,3 +229,4 @@ gcm_ghash_v8: .byte 71,72,65,83,72,32,102,111,114,32,65,82,77,118,56,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .align 2 +#endif
\ No newline at end of file diff --git a/linux-aarch64/crypto/sha/sha1-armv8.S b/linux-aarch64/crypto/sha/sha1-armv8.S index ab6aa98..6cf9877 100644 --- a/linux-aarch64/crypto/sha/sha1-armv8.S +++ b/linux-aarch64/crypto/sha/sha1-armv8.S @@ -1,4 +1,5 @@ -#include "arm_arch.h" +#if defined(__aarch64__) +#include <openssl/arm_arch.h> .text @@ -1211,3 +1212,4 @@ sha1_block_armv8: .align 2 .align 2 .comm OPENSSL_armcap_P,4,4 +#endif
\ No newline at end of file diff --git a/linux-aarch64/crypto/sha/sha256-armv8.S b/linux-aarch64/crypto/sha/sha256-armv8.S index ec572e9..0fad009 100644 --- a/linux-aarch64/crypto/sha/sha256-armv8.S +++ b/linux-aarch64/crypto/sha/sha256-armv8.S @@ -1,4 +1,5 @@ -#include "arm_arch.h" +#if defined(__aarch64__) +#include <openssl/arm_arch.h> .text @@ -1141,3 +1142,4 @@ sha256_block_armv8: ret .size sha256_block_armv8,.-sha256_block_armv8 .comm OPENSSL_armcap_P,4,4 +#endif
\ No newline at end of file diff --git a/linux-aarch64/crypto/sha/sha512-armv8.S b/linux-aarch64/crypto/sha/sha512-armv8.S index 8fc342a..517c033 100644 --- a/linux-aarch64/crypto/sha/sha512-armv8.S +++ b/linux-aarch64/crypto/sha/sha512-armv8.S @@ -1,4 +1,5 @@ -#include "arm_arch.h" +#if defined(__aarch64__) +#include <openssl/arm_arch.h> .text @@ -1021,3 +1022,4 @@ sha512_block_data_order: .align 2 .align 2 .comm OPENSSL_armcap_P,4,4 +#endif
\ No newline at end of file |