cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

Makefile (4756B)


      1# SPDX-License-Identifier: GPL-2.0
      2#
      3# x86 crypto algorithms
      4
      5obj-$(CONFIG_CRYPTO_TWOFISH_586) += twofish-i586.o
      6twofish-i586-y := twofish-i586-asm_32.o twofish_glue.o
      7obj-$(CONFIG_CRYPTO_TWOFISH_X86_64) += twofish-x86_64.o
      8twofish-x86_64-y := twofish-x86_64-asm_64.o twofish_glue.o
      9obj-$(CONFIG_CRYPTO_TWOFISH_X86_64_3WAY) += twofish-x86_64-3way.o
     10twofish-x86_64-3way-y := twofish-x86_64-asm_64-3way.o twofish_glue_3way.o
     11obj-$(CONFIG_CRYPTO_TWOFISH_AVX_X86_64) += twofish-avx-x86_64.o
     12twofish-avx-x86_64-y := twofish-avx-x86_64-asm_64.o twofish_avx_glue.o
     13
     14obj-$(CONFIG_CRYPTO_SERPENT_SSE2_586) += serpent-sse2-i586.o
     15serpent-sse2-i586-y := serpent-sse2-i586-asm_32.o serpent_sse2_glue.o
     16obj-$(CONFIG_CRYPTO_SERPENT_SSE2_X86_64) += serpent-sse2-x86_64.o
     17serpent-sse2-x86_64-y := serpent-sse2-x86_64-asm_64.o serpent_sse2_glue.o
     18obj-$(CONFIG_CRYPTO_SERPENT_AVX_X86_64) += serpent-avx-x86_64.o
     19serpent-avx-x86_64-y := serpent-avx-x86_64-asm_64.o serpent_avx_glue.o
     20obj-$(CONFIG_CRYPTO_SERPENT_AVX2_X86_64) += serpent-avx2.o
     21serpent-avx2-y := serpent-avx2-asm_64.o serpent_avx2_glue.o
     22
     23obj-$(CONFIG_CRYPTO_DES3_EDE_X86_64) += des3_ede-x86_64.o
     24des3_ede-x86_64-y := des3_ede-asm_64.o des3_ede_glue.o
     25
     26obj-$(CONFIG_CRYPTO_CAMELLIA_X86_64) += camellia-x86_64.o
     27camellia-x86_64-y := camellia-x86_64-asm_64.o camellia_glue.o
     28obj-$(CONFIG_CRYPTO_CAMELLIA_AESNI_AVX_X86_64) += camellia-aesni-avx-x86_64.o
     29camellia-aesni-avx-x86_64-y := camellia-aesni-avx-asm_64.o camellia_aesni_avx_glue.o
     30obj-$(CONFIG_CRYPTO_CAMELLIA_AESNI_AVX2_X86_64) += camellia-aesni-avx2.o
     31camellia-aesni-avx2-y := camellia-aesni-avx2-asm_64.o camellia_aesni_avx2_glue.o
     32
     33obj-$(CONFIG_CRYPTO_BLOWFISH_X86_64) += blowfish-x86_64.o
     34blowfish-x86_64-y := blowfish-x86_64-asm_64.o blowfish_glue.o
     35
     36obj-$(CONFIG_CRYPTO_CAST5_AVX_X86_64) += cast5-avx-x86_64.o
     37cast5-avx-x86_64-y := cast5-avx-x86_64-asm_64.o cast5_avx_glue.o
     38
     39obj-$(CONFIG_CRYPTO_CAST6_AVX_X86_64) += cast6-avx-x86_64.o
     40cast6-avx-x86_64-y := cast6-avx-x86_64-asm_64.o cast6_avx_glue.o
     41
     42obj-$(CONFIG_CRYPTO_AEGIS128_AESNI_SSE2) += aegis128-aesni.o
     43aegis128-aesni-y := aegis128-aesni-asm.o aegis128-aesni-glue.o
     44
     45obj-$(CONFIG_CRYPTO_CHACHA20_X86_64) += chacha-x86_64.o
     46chacha-x86_64-y := chacha-avx2-x86_64.o chacha-ssse3-x86_64.o chacha_glue.o
     47chacha-x86_64-$(CONFIG_AS_AVX512) += chacha-avx512vl-x86_64.o
     48
     49obj-$(CONFIG_CRYPTO_AES_NI_INTEL) += aesni-intel.o
     50aesni-intel-y := aesni-intel_asm.o aesni-intel_glue.o
     51aesni-intel-$(CONFIG_64BIT) += aesni-intel_avx-x86_64.o aes_ctrby8_avx-x86_64.o
     52
     53obj-$(CONFIG_CRYPTO_SHA1_SSSE3) += sha1-ssse3.o
     54sha1-ssse3-y := sha1_avx2_x86_64_asm.o sha1_ssse3_asm.o sha1_ssse3_glue.o
     55sha1-ssse3-$(CONFIG_AS_SHA1_NI) += sha1_ni_asm.o
     56
     57obj-$(CONFIG_CRYPTO_SHA256_SSSE3) += sha256-ssse3.o
     58sha256-ssse3-y := sha256-ssse3-asm.o sha256-avx-asm.o sha256-avx2-asm.o sha256_ssse3_glue.o
     59sha256-ssse3-$(CONFIG_AS_SHA256_NI) += sha256_ni_asm.o
     60
     61obj-$(CONFIG_CRYPTO_SHA512_SSSE3) += sha512-ssse3.o
     62sha512-ssse3-y := sha512-ssse3-asm.o sha512-avx-asm.o sha512-avx2-asm.o sha512_ssse3_glue.o
     63
     64obj-$(CONFIG_CRYPTO_BLAKE2S_X86) += blake2s-x86_64.o
     65blake2s-x86_64-y := blake2s-shash.o
     66obj-$(if $(CONFIG_CRYPTO_BLAKE2S_X86),y) += libblake2s-x86_64.o
     67libblake2s-x86_64-y := blake2s-core.o blake2s-glue.o
     68
     69obj-$(CONFIG_CRYPTO_GHASH_CLMUL_NI_INTEL) += ghash-clmulni-intel.o
     70ghash-clmulni-intel-y := ghash-clmulni-intel_asm.o ghash-clmulni-intel_glue.o
     71
     72obj-$(CONFIG_CRYPTO_CRC32C_INTEL) += crc32c-intel.o
     73crc32c-intel-y := crc32c-intel_glue.o
     74crc32c-intel-$(CONFIG_64BIT) += crc32c-pcl-intel-asm_64.o
     75
     76obj-$(CONFIG_CRYPTO_CRC32_PCLMUL) += crc32-pclmul.o
     77crc32-pclmul-y := crc32-pclmul_asm.o crc32-pclmul_glue.o
     78
     79obj-$(CONFIG_CRYPTO_CRCT10DIF_PCLMUL) += crct10dif-pclmul.o
     80crct10dif-pclmul-y := crct10dif-pcl-asm_64.o crct10dif-pclmul_glue.o
     81
     82obj-$(CONFIG_CRYPTO_POLY1305_X86_64) += poly1305-x86_64.o
     83poly1305-x86_64-y := poly1305-x86_64-cryptogams.o poly1305_glue.o
     84targets += poly1305-x86_64-cryptogams.S
     85
     86obj-$(CONFIG_CRYPTO_NHPOLY1305_SSE2) += nhpoly1305-sse2.o
     87nhpoly1305-sse2-y := nh-sse2-x86_64.o nhpoly1305-sse2-glue.o
     88obj-$(CONFIG_CRYPTO_NHPOLY1305_AVX2) += nhpoly1305-avx2.o
     89nhpoly1305-avx2-y := nh-avx2-x86_64.o nhpoly1305-avx2-glue.o
     90
     91obj-$(CONFIG_CRYPTO_CURVE25519_X86) += curve25519-x86_64.o
     92
     93obj-$(CONFIG_CRYPTO_SM3_AVX_X86_64) += sm3-avx-x86_64.o
     94sm3-avx-x86_64-y := sm3-avx-asm_64.o sm3_avx_glue.o
     95
     96obj-$(CONFIG_CRYPTO_SM4_AESNI_AVX_X86_64) += sm4-aesni-avx-x86_64.o
     97sm4-aesni-avx-x86_64-y := sm4-aesni-avx-asm_64.o sm4_aesni_avx_glue.o
     98
     99obj-$(CONFIG_CRYPTO_SM4_AESNI_AVX2_X86_64) += sm4-aesni-avx2-x86_64.o
    100sm4-aesni-avx2-x86_64-y := sm4-aesni-avx2-asm_64.o sm4_aesni_avx2_glue.o
    101
    102quiet_cmd_perlasm = PERLASM $@
    103      cmd_perlasm = $(PERL) $< > $@
    104$(obj)/%.S: $(src)/%.pl FORCE
    105	$(call if_changed,perlasm)