diff options
Diffstat (limited to 'crypto')
-rw-r--r-- | crypto/Kconfig | 19 | ||||
-rw-r--r-- | crypto/Makefile | 3 | ||||
-rw-r--r-- | crypto/ablkcipher.c | 5 | ||||
-rw-r--r-- | crypto/acompress.c | 3 | ||||
-rw-r--r-- | crypto/aead.c | 3 | ||||
-rw-r--r-- | crypto/aes_generic.c | 64 | ||||
-rw-r--r-- | crypto/aes_ti.c | 375 | ||||
-rw-r--r-- | crypto/ahash.c | 3 | ||||
-rw-r--r-- | crypto/akcipher.c | 3 | ||||
-rw-r--r-- | crypto/algapi.c | 68 | ||||
-rw-r--r-- | crypto/algif_hash.c | 2 | ||||
-rw-r--r-- | crypto/blkcipher.c | 7 | ||||
-rw-r--r-- | crypto/cbc.c | 3 | ||||
-rw-r--r-- | crypto/ccm.c | 386 | ||||
-rw-r--r-- | crypto/chacha20_generic.c | 73 | ||||
-rw-r--r-- | crypto/cmac.c | 3 | ||||
-rw-r--r-- | crypto/ctr.c | 2 | ||||
-rw-r--r-- | crypto/cts.c | 8 | ||||
-rw-r--r-- | crypto/kpp.c | 3 | ||||
-rw-r--r-- | crypto/pcbc.c | 6 | ||||
-rw-r--r-- | crypto/rng.c | 3 | ||||
-rw-r--r-- | crypto/scompress.c | 3 | ||||
-rw-r--r-- | crypto/seqiv.c | 2 | ||||
-rw-r--r-- | crypto/shash.c | 9 | ||||
-rw-r--r-- | crypto/skcipher.c | 23 | ||||
-rw-r--r-- | crypto/tcrypt.c | 6 | ||||
-rw-r--r-- | crypto/testmgr.c | 1055 | ||||
-rw-r--r-- | crypto/testmgr.h | 330 |
28 files changed, 1087 insertions, 1383 deletions
diff --git a/crypto/Kconfig b/crypto/Kconfig index 160f08e721cc..f37e9cca50e1 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -263,6 +263,7 @@ comment "Authenticated Encryption with Associated Data" config CRYPTO_CCM tristate "CCM support" select CRYPTO_CTR + select CRYPTO_HASH select CRYPTO_AEAD help Support for Counter with CBC MAC. Required for IPsec. @@ -374,6 +375,7 @@ config CRYPTO_XTS select CRYPTO_BLKCIPHER select CRYPTO_MANAGER select CRYPTO_GF128MUL + select CRYPTO_ECB help XTS: IEEE1619/D16 narrow block cipher use with aes-xts-plain, key size 256, 384 or 512 bits. This implementation currently @@ -895,6 +897,23 @@ config CRYPTO_AES See <http://csrc.nist.gov/CryptoToolkit/aes/> for more information. +config CRYPTO_AES_TI + tristate "Fixed time AES cipher" + select CRYPTO_ALGAPI + help + This is a generic implementation of AES that attempts to eliminate + data dependent latencies as much as possible without affecting + performance too much. It is intended for use by the generic CCM + and GCM drivers, and other CTR or CMAC/XCBC based modes that rely + solely on encryption (although decryption is supported as well, but + with a more dramatic performance hit) + + Instead of using 16 lookup tables of 1 KB each, (8 for encryption and + 8 for decryption), this implementation only uses just two S-boxes of + 256 bytes each, and attempts to eliminate data dependent latencies by + prefetching the entire table into the cache at the start of each + block. + config CRYPTO_AES_586 tristate "AES cipher algorithms (i586)" depends on (X86 || UML_X86) && !64BIT diff --git a/crypto/Makefile b/crypto/Makefile index b8f0e3eb0791..8a44057240d5 100644 --- a/crypto/Makefile +++ b/crypto/Makefile @@ -75,6 +75,7 @@ obj-$(CONFIG_CRYPTO_SHA256) += sha256_generic.o obj-$(CONFIG_CRYPTO_SHA512) += sha512_generic.o obj-$(CONFIG_CRYPTO_SHA3) += sha3_generic.o obj-$(CONFIG_CRYPTO_WP512) += wp512.o +CFLAGS_wp512.o := $(call cc-option,-fno-schedule-insns) # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=79149 obj-$(CONFIG_CRYPTO_TGR192) += tgr192.o obj-$(CONFIG_CRYPTO_GF128MUL) += gf128mul.o obj-$(CONFIG_CRYPTO_ECB) += ecb.o @@ -98,7 +99,9 @@ obj-$(CONFIG_CRYPTO_BLOWFISH_COMMON) += blowfish_common.o obj-$(CONFIG_CRYPTO_TWOFISH) += twofish_generic.o obj-$(CONFIG_CRYPTO_TWOFISH_COMMON) += twofish_common.o obj-$(CONFIG_CRYPTO_SERPENT) += serpent_generic.o +CFLAGS_serpent_generic.o := $(call cc-option,-fsched-pressure) # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=79149 obj-$(CONFIG_CRYPTO_AES) += aes_generic.o +obj-$(CONFIG_CRYPTO_AES_TI) += aes_ti.o obj-$(CONFIG_CRYPTO_CAMELLIA) += camellia_generic.o obj-$(CONFIG_CRYPTO_CAST_COMMON) += cast_common.o obj-$(CONFIG_CRYPTO_CAST5) += cast5_generic.o diff --git a/crypto/ablkcipher.c b/crypto/ablkcipher.c index d676fc59521a..d880a4897159 100644 --- a/crypto/ablkcipher.c +++ b/crypto/ablkcipher.c @@ -19,6 +19,7 @@ #include <linux/slab.h> #include <linux/seq_file.h> #include <linux/cryptouser.h> +#include <linux/compiler.h> #include <net/netlink.h> #include <crypto/scatterwalk.h> @@ -394,7 +395,7 @@ static int crypto_ablkcipher_report(struct sk_buff *skb, struct crypto_alg *alg) #endif static void crypto_ablkcipher_show(struct seq_file *m, struct crypto_alg *alg) - __attribute__ ((unused)); + __maybe_unused; static void crypto_ablkcipher_show(struct seq_file *m, struct crypto_alg *alg) { struct ablkcipher_alg *ablkcipher = &alg->cra_ablkcipher; @@ -468,7 +469,7 @@ static int crypto_givcipher_report(struct sk_buff *skb, struct crypto_alg *alg) #endif static void crypto_givcipher_show(struct seq_file *m, struct crypto_alg *alg) - __attribute__ ((unused)); + __maybe_unused; static void crypto_givcipher_show(struct seq_file *m, struct crypto_alg *alg) { struct ablkcipher_alg *ablkcipher = &alg->cra_ablkcipher; diff --git a/crypto/acompress.c b/crypto/acompress.c index 887783d8e9a9..47d11627cd20 100644 --- a/crypto/acompress.c +++ b/crypto/acompress.c @@ -20,6 +20,7 @@ #include <linux/crypto.h> #include <crypto/algapi.h> #include <linux/cryptouser.h> +#include <linux/compiler.h> #include <net/netlink.h> #include <crypto/internal/acompress.h> #include <crypto/internal/scompress.h> @@ -50,7 +51,7 @@ static int crypto_acomp_report(struct sk_buff *skb, struct crypto_alg *alg) #endif static void crypto_acomp_show(struct seq_file *m, struct crypto_alg *alg) - __attribute__ ((unused)); + __maybe_unused; static void crypto_acomp_show(struct seq_file *m, struct crypto_alg *alg) { diff --git a/crypto/aead.c b/crypto/aead.c index 3f5c5ff004ab..f794b30a9407 100644 --- a/crypto/aead.c +++ b/crypto/aead.c @@ -24,6 +24,7 @@ #include <linux/slab.h> #include <linux/seq_file.h> #include <linux/cryptouser.h> +#include <linux/compiler.h> #include <net/netlink.h> #include "internal.h" @@ -132,7 +133,7 @@ static int crypto_aead_report(struct sk_buff *skb, struct crypto_alg *alg) #endif static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg) - __attribute__ ((unused)); + __maybe_unused; static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg) { struct aead_alg *aead = container_of(alg, struct aead_alg, base); diff --git a/crypto/aes_generic.c b/crypto/aes_generic.c index 3dd101144a58..ca554d57d01e 100644 --- a/crypto/aes_generic.c +++ b/crypto/aes_generic.c @@ -54,6 +54,7 @@ #include <linux/errno.h> #include <linux/crypto.h> #include <asm/byteorder.h> +#include <asm/unaligned.h> static inline u8 byte(const u32 x, const unsigned n) { @@ -1216,7 +1217,6 @@ EXPORT_SYMBOL_GPL(crypto_il_tab); int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key, unsigned int key_len) { - const __le32 *key = (const __le32 *)in_key; u32 i, t, u, v, w, j; if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 && @@ -1225,10 +1225,15 @@ int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key, ctx->key_length = key_len; - ctx->key_dec[key_len + 24] = ctx->key_enc[0] = le32_to_cpu(key[0]); - ctx->key_dec[key_len + 25] = ctx->key_enc[1] = le32_to_cpu(key[1]); - ctx->key_dec[key_len + 26] = ctx->key_enc[2] = le32_to_cpu(key[2]); - ctx->key_dec[key_len + 27] = ctx->key_enc[3] = le32_to_cpu(key[3]); + ctx->key_enc[0] = get_unaligned_le32(in_key); + ctx->key_enc[1] = get_unaligned_le32(in_key + 4); + ctx->key_enc[2] = get_unaligned_le32(in_key + 8); + ctx->key_enc[3] = get_unaligned_le32(in_key + 12); + + ctx->key_dec[key_len + 24] = ctx->key_enc[0]; + ctx->key_dec[key_len + 25] = ctx->key_enc[1]; + ctx->key_dec[key_len + 26] = ctx->key_enc[2]; + ctx->key_dec[key_len + 27] = ctx->key_enc[3]; switch (key_len) { case AES_KEYSIZE_128: @@ -1238,17 +1243,17 @@ int crypto_aes_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key, break; case AES_KEYSIZE_192: - ctx->key_enc[4] = le32_to_cpu(key[4]); - t = ctx->key_enc[5] = le32_to_cpu(key[5]); + ctx->key_enc[4] = get_unaligned_le32(in_key + 16); + t = ctx->key_enc[5] = get_unaligned_le32(in_key + 20); for (i = 0; i < 8; ++i) loop6(i); break; case AES_KEYSIZE_256: - ctx->key_enc[4] = le32_to_cpu(key[4]); - ctx->key_enc[5] = le32_to_cpu(key[5]); - ctx->key_enc[6] = le32_to_cpu(key[6]); - t = ctx->key_enc[7] = le32_to_cpu(key[7]); + ctx->key_enc[4] = get_unaligned_le32(in_key + 16); + ctx->key_enc[5] = get_unaligned_le32(in_key + 20); + ctx->key_enc[6] = get_unaligned_le32(in_key + 24); + t = ctx->key_enc[7] = get_unaligned_le32(in_key + 28); for (i = 0; i < 6; ++i) loop8(i); loop8tophalf(i); @@ -1329,16 +1334,14 @@ EXPORT_SYMBOL_GPL(crypto_aes_set_key); static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); - const __le32 *src = (const __le32 *)in; - __le32 *dst = (__le32 *)out; u32 b0[4], b1[4]; const u32 *kp = ctx->key_enc + 4; const int key_len = ctx->key_length; - b0[0] = le32_to_cpu(src[0]) ^ ctx->key_enc[0]; - b0[1] = le32_to_cpu(src[1]) ^ ctx->key_enc[1]; - b0[2] = le32_to_cpu(src[2]) ^ ctx->key_enc[2]; - b0[3] = le32_to_cpu(src[3]) ^ ctx->key_enc[3]; + b0[0] = ctx->key_enc[0] ^ get_unaligned_le32(in); + b0[1] = ctx->key_enc[1] ^ get_unaligned_le32(in + 4); + b0[2] = ctx->key_enc[2] ^ get_unaligned_le32(in + 8); + b0[3] = ctx->key_enc[3] ^ get_unaligned_le32(in + 12); if (key_len > 24) { f_nround(b1, b0, kp); @@ -1361,10 +1364,10 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) f_nround(b1, b0, kp); f_lround(b0, b1, kp); - dst[0] = cpu_to_le32(b0[0]); - dst[1] = cpu_to_le32(b0[1]); - dst[2] = cpu_to_le32(b0[2]); - dst[3] = cpu_to_le32(b0[3]); + put_unaligned_le32(b0[0], out); + put_unaligned_le32(b0[1], out + 4); + put_unaligned_le32(b0[2], out + 8); + put_unaligned_le32(b0[3], out + 12); } /* decrypt a block of text */ @@ -1401,16 +1404,14 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) { const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); - const __le32 *src = (const __le32 *)in; - __le32 *dst = (__le32 *)out; u32 b0[4], b1[4]; const int key_len = ctx->key_length; const u32 *kp = ctx->key_dec + 4; - b0[0] = le32_to_cpu(src[0]) ^ ctx->key_dec[0]; - b0[1] = le32_to_cpu(src[1]) ^ ctx->key_dec[1]; - b0[2] = le32_to_cpu(src[2]) ^ ctx->key_dec[2]; - b0[3] = le32_to_cpu(src[3]) ^ ctx->key_dec[3]; + b0[0] = ctx->key_dec[0] ^ get_unaligned_le32(in); + b0[1] = ctx->key_dec[1] ^ get_unaligned_le32(in + 4); + b0[2] = ctx->key_dec[2] ^ get_unaligned_le32(in + 8); + b0[3] = ctx->key_dec[3] ^ get_unaligned_le32(in + 12); if (key_len > 24) { i_nround(b1, b0, kp); @@ -1433,10 +1434,10 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) i_nround(b1, b0, kp); i_lround(b0, b1, kp); - dst[0] = cpu_to_le32(b0[0]); - dst[1] = cpu_to_le32(b0[1]); - dst[2] = cpu_to_le32(b0[2]); - dst[3] = cpu_to_le32(b0[3]); + put_unaligned_le32(b0[0], out); + put_unaligned_le32(b0[1], out + 4); + put_unaligned_le32(b0[2], out + 8); + put_unaligned_le32(b0[3], out + 12); } static struct crypto_alg aes_alg = { @@ -1446,7 +1447,6 @@ static struct crypto_alg aes_alg = { .cra_flags = CRYPTO_ALG_TYPE_CIPHER, .cra_blocksize = AES_BLOCK_SIZE, .cra_ctxsize = sizeof(struct crypto_aes_ctx), - .cra_alignmask = 3, .cra_module = THIS_MODULE, .cra_u = { .cipher = { diff --git a/crypto/aes_ti.c b/crypto/aes_ti.c new file mode 100644 index 000000000000..92644fd1ac19 --- /dev/null +++ b/crypto/aes_ti.c @@ -0,0 +1,375 @@ +/* + * Scalar fixed time AES core transform + * + * Copyright (C) 2017 Linaro Ltd <ard.biesheuvel@linaro.org> + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + */ + +#include <crypto/aes.h> +#include <linux/crypto.h> +#include <linux/module.h> +#include <asm/unaligned.h> + +/* + * Emit the sbox as volatile const to prevent the compiler from doing + * constant folding on sbox references involving fixed indexes. + */ +static volatile const u8 __cacheline_aligned __aesti_sbox[] = { + 0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5, + 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76, + 0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0, + 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0, + 0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc, + 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15, + 0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a, + 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75, + 0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0, + 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84, + 0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b, + 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf, + 0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85, + 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8, + 0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5, + 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2, + 0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17, + 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73, + 0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88, + 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb, + 0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c, + 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79, + 0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9, + 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08, + 0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6, + 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a, + 0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e, + 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e, + 0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94, + 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf, + 0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68, + 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16, +}; + +static volatile const u8 __cacheline_aligned __aesti_inv_sbox[] = { + 0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, + 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb, + 0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87, + 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb, + 0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d, + 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e, + 0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2, + 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25, + 0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16, + 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92, + 0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda, + 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84, + 0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a, + 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06, + 0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02, + 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b, + 0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea, + 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73, + 0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85, + 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e, + 0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89, + 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b, + 0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20, + 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4, + 0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31, + 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f, + 0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d, + 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef, + 0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0, + 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61, + 0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26, + 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d, +}; + +static u32 mul_by_x(u32 w) +{ + u32 x = w & 0x7f7f7f7f; + u32 y = w & 0x80808080; + + /* multiply by polynomial 'x' (0b10) in GF(2^8) */ + return (x << 1) ^ (y >> 7) * 0x1b; +} + +static u32 mul_by_x2(u32 w) +{ + u32 x = w & 0x3f3f3f3f; + u32 y = w & 0x80808080; + u32 z = w & 0x40404040; + + /* multiply by polynomial 'x^2' (0b100) in GF(2^8) */ + return (x << 2) ^ (y >> 7) * 0x36 ^ (z >> 6) * 0x1b; +} + +static u32 mix_columns(u32 x) +{ + /* + * Perform the following matrix multiplication in GF(2^8) + * + * | 0x2 0x3 0x1 0x1 | | x[0] | + * | 0x1 0x2 0x3 0x1 | | x[1] | + * | 0x1 0x1 0x2 0x3 | x | x[2] | + * | 0x3 0x1 0x1 0x3 | | x[3] | + */ + u32 y = mul_by_x(x) ^ ror32(x, 16); + + return y ^ ror32(x ^ y, 8); +} + +static u32 inv_mix_columns(u32 x) +{ + /* + * Perform the following matrix multiplication in GF(2^8) + * + * | 0xe 0xb 0xd 0x9 | | x[0] | + * | 0x9 0xe 0xb 0xd | | x[1] | + * | 0xd 0x9 0xe 0xb | x | x[2] | + * | 0xb 0xd 0x9 0xe | | x[3] | + * + * which can conveniently be reduced to + * + * | 0x2 0x3 0x1 0x1 | | 0x5 0x0 0x4 0x0 | | x[0] | + * | 0x1 0x2 0x3 0x1 | | 0x0 0x5 0x0 0x4 | | x[1] | + * | 0x1 0x1 0x2 0x3 | x | 0x4 0x0 0x5 0x0 | x | x[2] | + * | 0x3 0x1 0x1 0x2 | | 0x0 0x4 0x0 0x5 | | x[3] | + */ + u32 y = mul_by_x2(x); + + return mix_columns(x ^ y ^ ror32(y, 16)); +} + +static __always_inline u32 subshift(u32 in[], int pos) +{ + return (__aesti_sbox[in[pos] & 0xff]) ^ + (__aesti_sbox[(in[(pos + 1) % 4] >> 8) & 0xff] << 8) ^ + (__aesti_sbox[(in[(pos + 2) % 4] >> 16) & 0xff] << 16) ^ + (__aesti_sbox[(in[(pos + 3) % 4] >> 24) & 0xff] << 24); +} + +static __always_inline u32 inv_subshift(u32 in[], int pos) +{ + return (__aesti_inv_sbox[in[pos] & 0xff]) ^ + (__aesti_inv_sbox[(in[(pos + 3) % 4] >> 8) & 0xff] << 8) ^ + (__aesti_inv_sbox[(in[(pos + 2) % 4] >> 16) & 0xff] << 16) ^ + (__aesti_inv_sbox[(in[(pos + 1) % 4] >> 24) & 0xff] << 24); +} + +static u32 subw(u32 in) +{ + return (__aesti_sbox[in & 0xff]) ^ + (__aesti_sbox[(in >> 8) & 0xff] << 8) ^ + (__aesti_sbox[(in >> 16) & 0xff] << 16) ^ + (__aesti_sbox[(in >> 24) & 0xff] << 24); +} + +static int aesti_expand_key(struct crypto_aes_ctx *ctx, const u8 *in_key, + unsigned int key_len) +{ + u32 kwords = key_len / sizeof(u32); + u32 rc, i, j; + + if (key_len != AES_KEYSIZE_128 && + key_len != AES_KEYSIZE_192 && + key_len != AES_KEYSIZE_256) + return -EINVAL; + + ctx->key_length = key_len; + + for (i = 0; i < kwords; i++) + ctx->key_enc[i] = get_unaligned_le32(in_key + i * sizeof(u32)); + + for (i = 0, rc = 1; i < 10; i++, rc = mul_by_x(rc)) { + u32 *rki = ctx->key_enc + (i * kwords); + u32 *rko = rki + kwords; + + rko[0] = ror32(subw(rki[kwords - 1]), 8) ^ rc ^ rki[0]; + rko[1] = rko[0] ^ rki[1]; + rko[2] = rko[1] ^ rki[2]; + rko[3] = rko[2] ^ rki[3]; + + if (key_len == 24) { + if (i >= 7) + break; + rko[4] = rko[3] ^ rki[4]; + rko[5] = rko[4] ^ rki[5]; + } else if (key_len == 32) { + if (i >= 6) + break; + rko[4] = subw(rko[3]) ^ rki[4]; + rko[5] = rko[4] ^ rki[5]; + rko[6] = rko[5] ^ rki[6]; + rko[7] = rko[6] ^ rki[7]; + } + } + + /* + * Generate the decryption keys for the Equivalent Inverse Cipher. + * This involves reversing the order of the round keys, and applying + * the Inverse Mix Columns transformation to all but the first and + * the last one. + */ + ctx->key_dec[0] = ctx->key_enc[key_len + 24]; + ctx->key_dec[1] = ctx->key_enc[key_len + 25]; + ctx->key_dec[2] = ctx->key_enc[key_len + 26]; + ctx->key_dec[3] = ctx->key_enc[key_len + 27]; + + for (i = 4, j = key_len + 20; j > 0; i += 4, j -= 4) { + ctx->key_dec[i] = inv_mix_columns(ctx->key_enc[j]); + ctx->key_dec[i + 1] = inv_mix_columns(ctx->key_enc[j + 1]); + ctx->key_dec[i + 2] = inv_mix_columns(ctx->key_enc[j + 2]); + ctx->key_dec[i + 3] = inv_mix_columns(ctx->key_enc[j + 3]); + } + + ctx->key_dec[i] = ctx->key_enc[0]; + ctx->key_dec[i + 1] = ctx->key_enc[1]; + ctx->key_dec[i + 2] = ctx->key_enc[2]; + ctx->key_dec[i + 3] = ctx->key_enc[3]; + + return 0; +} + +static int aesti_set_key(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len) +{ + struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); + int err; + + err = aesti_expand_key(ctx, in_key, key_len); + if (err) + return err; + + /* + * In order to force the compiler to emit data independent Sbox lookups + * at the start of each block, xor the first round key with values at + * fixed indexes in the Sbox. This will need to be repeated each time + * the key is used, which will pull the entire Sbox into the D-cache + * before any data dependent Sbox lookups are performed. + */ + ctx->key_enc[0] ^= __aesti_sbox[ 0] ^ __aesti_sbox[128]; + ctx->key_enc[1] ^= __aesti_sbox[32] ^ __aesti_sbox[160]; + ctx->key_enc[2] ^= __aesti_sbox[64] ^ __aesti_sbox[192]; + ctx->key_enc[3] ^= __aesti_sbox[96] ^ __aesti_sbox[224]; + + ctx->key_dec[0] ^= __aesti_inv_sbox[ 0] ^ __aesti_inv_sbox[128]; + ctx->key_dec[1] ^= __aesti_inv_sbox[32] ^ __aesti_inv_sbox[160]; + ctx->key_dec[2] ^= __aesti_inv_sbox[64] ^ __aesti_inv_sbox[192]; + ctx->key_dec[3] ^= __aesti_inv_sbox[96] ^ __aesti_inv_sbox[224]; + + return 0; +} + +static void aesti_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +{ + const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); + const u32 *rkp = ctx->key_enc + 4; + int rounds = 6 + ctx->key_length / 4; + u32 st0[4], st1[4]; + int round; + + st0[0] = ctx->key_enc[0] ^ get_unaligned_le32(in); + st0[1] = ctx->key_enc[1] ^ get_unaligned_le32(in + 4); + st0[2] = ctx->key_enc[2] ^ get_unaligned_le32(in + 8); + st0[3] = ctx->key_enc[3] ^ get_unaligned_le32(in + 12); + + st0[0] ^= __aesti_sbox[ 0] ^ __aesti_sbox[128]; + st0[1] ^= __aesti_sbox[32] ^ __aesti_sbox[160]; + st0[2] ^= __aesti_sbox[64] ^ __aesti_sbox[192]; + st0[3] ^= __aesti_sbox[96] ^ __aesti_sbox[224]; + + for (round = 0;; round += 2, rkp += 8) { + st1[0] = mix_columns(subshift(st0, 0)) ^ rkp[0]; + st1[1] = mix_columns(subshift(st0, 1)) ^ rkp[1]; + st1[2] = mix_columns(subshift(st0, 2)) ^ rkp[2]; + st1[3] = mix_columns(subshift(st0, 3)) ^ rkp[3]; + + if (round == rounds - 2) + break; + + st0[0] = mix_columns(subshift(st1, 0)) ^ rkp[4]; + st0[1] = mix_columns(subshift(st1, 1)) ^ rkp[5]; + st0[2] = mix_columns(subshift(st1, 2)) ^ rkp[6]; + st0[3] = mix_columns(subshift(st1, 3)) ^ rkp[7]; + } + + put_unaligned_le32(subshift(st1, 0) ^ rkp[4], out); + put_unaligned_le32(subshift(st1, 1) ^ rkp[5], out + 4); + put_unaligned_le32(subshift(st1, 2) ^ rkp[6], out + 8); + put_unaligned_le32(subshift(st1, 3) ^ rkp[7], out + 12); +} + +static void aesti_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +{ + const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm); + const u32 *rkp = ctx->key_dec + 4; + int rounds = 6 + ctx->key_length / 4; + u32 st0[4], st1[4]; + int round; + + st0[0] = ctx->key_dec[0] ^ get_unaligned_le32(in); + st0[1] = ctx->key_dec[1] ^ get_unaligned_le32(in + 4); + st0[2] = ctx->key_dec[2] ^ get_unaligned_le32(in + 8); + st0[3] = ctx->key_dec[3] ^ get_unaligned_le32(in + 12); + + st0[0] ^= __aesti_inv_sbox[ 0] ^ __aesti_inv_sbox[128]; + st0[1] ^= __aesti_inv_sbox[32] ^ __aesti_inv_sbox[160]; + st0[2] ^= __aesti_inv_sbox[64] ^ __aesti_inv_sbox[192]; + st0[3] ^= __aesti_inv_sbox[96] ^ __aesti_inv_sbox[224]; + + for (round = 0;; round += 2, rkp += 8) { + st1[0] = inv_mix_columns(inv_subshift(st0, 0)) ^ rkp[0]; + st1[1] = inv_mix_columns(inv_subshift(st0, 1)) ^ rkp[1]; + st1[2] = inv_mix_columns(inv_subshift(st0, 2)) ^ rkp[2]; + st1[3] = inv_mix_columns(inv_subshift(st0, 3)) ^ rkp[3]; + + if (round == rounds - 2) + break; + + st0[0] = inv_mix_columns(inv_subshift(st1, 0)) ^ rkp[4]; + st0[1] = inv_mix_columns(inv_subshift(st1, 1)) ^ rkp[5]; + st0[2] = inv_mix_columns(inv_subshift(st1, 2)) ^ rkp[6]; + st0[3] = inv_mix_columns(inv_subshift(st1, 3)) ^ rkp[7]; + } + + put_unaligned_le32(inv_subshift(st1, 0) ^ rkp[4], out); + put_unaligned_le32(inv_subshift(st1, 1) ^ rkp[5], out + 4); + put_unaligned_le32(inv_subshift(st1, 2) ^ rkp[6], out + 8); + put_unaligned_le32(inv_subshift(st1, 3) ^ rkp[7], out + 12); +} + +static struct crypto_alg aes_alg = { + .cra_name = "aes", + .cra_driver_name = "aes-fixed-time", + .cra_priority = 100 + 1, + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = AES_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct crypto_aes_ctx), + .cra_module = THIS_MODULE, + + .cra_cipher.cia_min_keysize = AES_MIN_KEY_SIZE, + .cra_cipher.cia_max_keysize = AES_MAX_KEY_SIZE, + .cra_cipher.cia_setkey = aesti_set_key, + .cra_cipher.cia_encrypt = aesti_encrypt, + .cra_cipher.cia_decrypt = aesti_decrypt +}; + +static int __init aes_init(void) +{ + return crypto_register_alg(&aes_alg); +} + +static void __exit aes_fini(void) +{ + crypto_unregister_alg(&aes_alg); +} + +module_init(aes_init); +module_exit(aes_fini); + +MODULE_DESCRIPTION("Generic fixed time AES"); +MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>"); +MODULE_LICENSE("GPL v2"); diff --git a/crypto/ahash.c b/crypto/ahash.c index 2ce8bcb9049c..e58c4970c22b 100644 --- a/crypto/ahash.c +++ b/crypto/ahash.c @@ -23,6 +23,7 @@ #include <linux/slab.h> #include <linux/seq_file.h> #include <linux/cryptouser.h> +#include <linux/compiler.h> #include <net/netlink.h> #include "internal.h" @@ -493,7 +494,7 @@ static int crypto_ahash_report(struct sk_buff *skb, struct crypto_alg *alg) #endif static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) - __attribute__ ((unused)); + __maybe_unused; static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg) { seq_printf(m, "type : ahash\n"); diff --git a/crypto/akcipher.c b/crypto/akcipher.c index def301ed1288..cfbdb06d8ca8 100644 --- a/crypto/akcipher.c +++ b/crypto/akcipher.c @@ -17,6 +17,7 @@ #include <linux/slab.h> #include <linux/string.h> #include <linux/crypto.h> +#include <linux/compiler.h> #include <crypto/algapi.h> #include <linux/cryptouser.h> #include <net/netlink.h> @@ -47,7 +48,7 @@ static int crypto_akcipher_report(struct sk_buff *skb, struct crypto_alg *alg) #endif static void crypto_akcipher_show(struct seq_file *m, struct crypto_alg *alg) - __attribute__ ((unused)); + __maybe_unused; static void crypto_akcipher_show(struct seq_file *m, struct crypto_alg *alg) { diff --git a/crypto/algapi.c b/crypto/algapi.c index 1fad2a6b3bbb..6b52e8f0b95f 100644 --- a/crypto/algapi.c +++ b/crypto/algapi.c @@ -962,34 +962,66 @@ void crypto_inc(u8 *a, unsigned int size) __be32 *b = (__be32 *)(a + size); u32 c; - for (; size >= 4; size -= 4) { - c = be32_to_cpu(*--b) + 1; - *b = cpu_to_be32(c); - if (c) - return; - } + if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) || + !((unsigned long)b & (__alignof__(*b) - 1))) + for (; size >= 4; size -= 4) { + c = be32_to_cpu(*--b) + 1; + *b = cpu_to_be32(c); + if (c) + return; + } crypto_inc_byte(a, size); } EXPORT_SYMBOL_GPL(crypto_inc); -static inline void crypto_xor_byte(u8 *a, const u8 *b, unsigned int size) +void __crypto_xor(u8 *dst, const u8 *src, unsigned int len) { - for (; size; size--) - *a++ ^= *b++; -} + int relalign = 0; + + if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) { + int size = sizeof(unsigned long); + int d = ((unsigned long)dst ^ (unsigned long)src) & (size - 1); + + relalign = d ? 1 << __ffs(d) : size; + + /* + * If we care about alignment, process as many bytes as + * needed to advance dst and src to values whose alignments + * equal their relative alignment. This will allow us to + * process the remainder of the input using optimal strides. + */ + while (((unsigned long)dst & (relalign - 1)) && len > 0) { + *dst++ ^= *src++; + len--; + } + } -void crypto_xor(u8 *dst, const u8 *src, unsigned int size) -{ - u32 *a = (u32 *)dst; - u32 *b = (u32 *)src; + while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) { + *(u64 *)dst ^= *(u64 *)src; + dst += 8; + src += 8; + len -= 8; + } - for (; size >= 4; size -= 4) - *a++ ^= *b++; + while (len >= 4 && !(relalign & 3)) { + *(u32 *)dst ^= *(u32 *)src; + dst += 4; + src += 4; + len -= 4; + } + + while (len >= 2 && !(relalign & 1)) { + *(u16 *)dst ^= *(u16 *)src; + dst += 2; + src += 2; + len -= 2; + } - crypto_xor_byte((u8 *)a, (u8 *)b, size); + while (len--) + *dst++ ^= *src++; } -EXPORT_SYMBOL_GPL(crypto_xor); +EXPORT_SYMBOL_GPL(__crypto_xor); unsigned int crypto_alg_extsize(struct crypto_alg *alg) { diff --git a/crypto/algif_hash.c b/crypto/algif_hash.c index d19b09cdf284..54fc90e8339c 100644 --- a/crypto/algif_hash.c +++ b/crypto/algif_hash.c @@ -245,7 +245,7 @@ static int hash_accept(struct socket *sock, struct socket *newsock, int flags) struct alg_sock *ask = alg_sk(sk); struct hash_ctx *ctx = ask->private; struct ahash_request *req = &ctx->req; - char state[crypto_ahash_statesize(crypto_ahash_reqtfm(req))]; + char state[crypto_ahash_statesize(crypto_ahash_reqtfm(req)) ? : 1]; struct sock *sk2; struct alg_sock *ask2; struct hash_ctx *ctx2; diff --git a/crypto/blkcipher.c b/crypto/blkcipher.c index a832426820e8..6c43a0a17a55 100644 --- a/crypto/blkcipher.c +++ b/crypto/blkcipher.c @@ -1,6 +1,6 @@ /* * Block chaining cipher operations. - * + * * Generic encrypt/decrypt wrapper for ciphers, handles operations across * multiple page boundaries by using temporary blocks. In user context, * the kernel is given a chance to schedule us once per page. @@ -9,7 +9,7 @@ * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free - * Software Foundation; either version 2 of the License, or (at your option) + * Software Foundation; either version 2 of the License, or (at your option) * any later version. * */ @@ -25,6 +25,7 @@ #include <linux/slab.h> #include <linux/string.h> #include <linux/cryptouser.h> +#include <linux/compiler.h> #include <net/netlink.h> #include "internal.h" @@ -534,7 +535,7 @@ static int crypto_blkcipher_report(struct sk_buff *skb, struct crypto_alg *alg) #endif static void crypto_blkcipher_show(struct seq_file *m, struct crypto_alg *alg) - __attribute__ ((unused)); + __maybe_unused; static void crypto_blkcipher_show(struct seq_file *m, struct crypto_alg *alg) { seq_printf(m, "type : blkcipher\n"); diff --git a/crypto/cbc.c b/crypto/cbc.c index 68f751a41a84..bc160a3186dc 100644 --- a/crypto/cbc.c +++ b/crypto/cbc.c @@ -145,9 +145,6 @@ static int crypto_cbc_create(struct crypto_template *tmpl, struct rtattr **tb) inst->alg.base.cra_blocksize = alg->cra_blocksize; inst->alg.base.cra_alignmask = alg->cra_alignmask; - /* We access the data as u32s when xoring. */ - inst->alg.base.cra_alignmask |= __alignof__(u32) - 1; - inst->alg.ivsize = alg->cra_blocksize; inst->alg.min_keysize = alg->cra_cipher.cia_min_keysize; inst->alg.max_keysize = alg->cra_cipher.cia_max_keysize; diff --git a/crypto/ccm.c b/crypto/ccm.c index 26b924d1e582..442848807a52 100644 --- a/crypto/ccm.c +++ b/crypto/ccm.c @@ -11,6 +11,7 @@ */ #include <crypto/internal/aead.h> +#include <crypto/internal/hash.h> #include <crypto/internal/skcipher.h> #include <crypto/scatterwalk.h> #include <linux/err.h> @@ -23,11 +24,11 @@ struct ccm_instance_ctx { struct crypto_skcipher_spawn ctr; - struct crypto_spawn cipher; + struct crypto_ahash_spawn mac; }; struct crypto_ccm_ctx { - struct crypto_cipher *cipher; + struct crypto_ahash *mac; struct crypto_skcipher *ctr; }; @@ -44,15 +45,21 @@ struct crypto_rfc4309_req_ctx { struct crypto_ccm_req_priv_ctx { u8 odata[16]; - u8 idata[16]; u8 auth_tag[16]; - u32 ilen; u32 flags; struct scatterlist src[3]; struct scatterlist dst[3]; struct skcipher_request skreq; }; +struct cbcmac_tfm_ctx { + struct crypto_cipher *child; +}; + +struct cbcmac_desc_ctx { + unsigned int len; +}; + static inline struct crypto_ccm_req_priv_ctx *crypto_ccm_reqctx( struct aead_request *req) { @@ -84,7 +91,7 @@ static int crypto_ccm_setkey(struct crypto_aead *aead, const u8 *key, { struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead); struct crypto_skcipher *ctr = ctx->ctr; - struct crypto_cipher *tfm = ctx->cipher; + struct crypto_ahash *mac = ctx->mac; int err = 0; crypto_skcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK); @@ -96,11 +103,11 @@ static int crypto_ccm_setkey(struct crypto_aead *aead, const u8 *key, if (err) goto out; - crypto_cipher_clear_flags(tfm, CRYPTO_TFM_REQ_MASK); - crypto_cipher_set_flags(tfm, crypto_aead_get_flags(aead) & + crypto_ahash_clear_flags(mac, CRYPTO_TFM_REQ_MASK); + crypto_ahash_set_flags(mac, crypto_aead_get_flags(aead) & CRYPTO_TFM_REQ_MASK); - err = crypto_cipher_setkey(tfm, key, keylen); - crypto_aead_set_flags(aead, crypto_cipher_get_flags(tfm) & + err = crypto_ahash_setkey(mac, key, keylen); + crypto_aead_set_flags(aead, crypto_ahash_get_flags(mac) & CRYPTO_TFM_RES_MASK); out: @@ -167,119 +174,61 @@ static int format_adata(u8 *adata, unsigned int a) return len; } -static void compute_mac(struct crypto_cipher *tfm, u8 *data, int n, - struct crypto_ccm_req_priv_ctx *pctx) -{ - unsigned int bs = 16; - u8 *odata = pctx->odata; - u8 *idata = pctx->idata; - int datalen, getlen; - - datalen = n; - - /* first time in here, block may be partially filled. */ - getlen = bs - pctx->ilen; - if (datalen >= getlen) { - memcpy(idata + pctx->ilen, data, getlen); - crypto_xor(odata, idata, bs); - crypto_cipher_encrypt_one(tfm, odata, odata); - datalen -= getlen; - data += getlen; - pctx->ilen = 0; - } - - /* now encrypt rest of data */ - while (datalen >= bs) { - crypto_xor(odata, data, bs); - crypto_cipher_encrypt_one(tfm, odata, odata); - - datalen -= bs; - data += bs; - } - - /* check and see if there's leftover data that wasn't - * enough to fill a block. - */ - if (datalen) { - memcpy(idata + pctx->ilen, data, datalen); - pctx->ilen += datalen; - } -} - -static void get_data_to_compute(struct crypto_cipher *tfm, - struct crypto_ccm_req_priv_ctx *pctx, - struct scatterlist *sg, unsigned int len) -{ - struct scatter_walk walk; - u8 *data_src; - int n; - - scatterwalk_start(&walk, sg); - - while (len) { - n = scatterwalk_clamp(&walk, len); - if (!n) { - scatterwalk_start(&walk, sg_next(walk.sg)); - n = scatterwalk_clamp(&walk, len); - } - data_src = scatterwalk_map(&walk); - - compute_mac(tfm, data_src, n, pctx); - len -= n; - - scatterwalk_unmap(data_src); - scatterwalk_advance(&walk, n); - scatterwalk_done(&walk, 0, len); - if (len) - crypto_yield(pctx->flags); - } - - /* any leftover needs padding and then encrypted */ - if (pctx->ilen) { - int padlen; - u8 *odata = pctx->odata; - u8 *idata = pctx->idata; - - padlen = 16 - pctx->ilen; - memset(idata + pctx->ilen, 0, padlen); - crypto_xor(odata, idata, 16); - crypto_cipher_encrypt_one(tfm, odata, odata); - pctx->ilen = 0; - } -} - static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain, unsigned int cryptlen) { + struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); struct crypto_aead *aead = crypto_aead_reqtfm(req); struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead); - struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); - struct crypto_cipher *cipher = ctx->cipher; + AHASH_REQUEST_ON_STACK(ahreq, ctx->mac); unsigned int assoclen = req->assoclen; - u8 *odata = pctx->odata; - u8 *idata = pctx->idata; - int err; + struct scatterlist sg[3]; + u8 odata[16]; + u8 idata[16]; + int ilen, err; /* format control data for input */ err = format_input(odata, req, cryptlen); if (err) goto out; - /* encrypt first block to use as start in computing mac */ - crypto_cipher_encrypt_one(cipher, odata, odata); + sg_init_table(sg, 3); + sg_set_buf(&sg[0], odata, 16); /* format associated data and compute into mac */ if (assoclen) { - pctx->ilen = format_adata(idata, assoclen); - get_data_to_compute(cipher, pctx, req->src, req->assoclen); + ilen = format_adata(idata, assoclen); + sg_set_buf(&sg[1], idata, ilen); + sg_chain(sg, 3, req->src); } else { - pctx->ilen = 0; + ilen = 0; + sg_chain(sg, 2, req->src); } - /* compute plaintext into mac */ - if (cryptlen) - get_data_to_compute(cipher, pctx, plain, cryptlen); + ahash_request_set_tfm(ahreq, ctx->mac); + ahash_request_set_callback(ahreq, pctx->flags, NULL, NULL); + ahash_request_set_crypt(ahreq, sg, NULL, assoclen + ilen + 16); + err = crypto_ahash_init(ahreq); + if (err) + goto out; + err = crypto_ahash_update(ahreq); + if (err) + goto out; + /* we need to pad the MAC input to a round multiple of the block size */ + ilen = 16 - (assoclen + ilen) % 16; + if (ilen < 16) { + memset(idata, 0, ilen); + sg_init_table(sg, 2); + sg_set_buf(&sg[0], idata, ilen); + if (plain) + sg_chain(sg, 2, plain); + plain = sg; + cryptlen += ilen; + } + + ahash_request_set_crypt(ahreq, plain, pctx->odata, cryptlen); + err = crypto_ahash_finup(ahreq); out: return err; } @@ -453,21 +402,21 @@ static int crypto_ccm_init_tfm(struct crypto_aead *tfm) struct aead_instance *inst = aead_alg_instance(tfm); struct ccm_instance_ctx *ictx = aead_instance_ctx(inst); struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm); - struct crypto_cipher *cipher; + struct crypto_ahash *mac; struct crypto_skcipher *ctr; unsigned long align; int err; - cipher = crypto_spawn_cipher(&ictx->cipher); - if (IS_ERR(cipher)) - return PTR_ERR(cipher); + mac = crypto_spawn_ahash(&ictx->mac); + if (IS_ERR(mac)) + return PTR_ERR(mac); ctr = crypto_spawn_skcipher(&ictx->ctr); err = PTR_ERR(ctr); if (IS_ERR(ctr)) - goto err_free_cipher; + goto err_free_mac; - ctx->cipher = cipher; + ctx->mac = mac; ctx->ctr = ctr; align = crypto_aead_alignmask(tfm); @@ -479,8 +428,8 @@ static int crypto_ccm_init_tfm(struct crypto_aead *tfm) return 0; -err_free_cipher: - crypto_free_cipher(cipher); +err_free_mac: + crypto_free_ahash(mac); return err; } @@ -488,7 +437,7 @@ static void crypto_ccm_exit_tfm(struct crypto_aead *tfm) { struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm); - crypto_free_cipher(ctx->cipher); + crypto_free_ahash(ctx->mac); crypto_free_skcipher(ctx->ctr); } @@ -496,7 +445,7 @@ static void crypto_ccm_free(struct aead_instance *inst) { struct ccm_instance_ctx *ctx = aead_instance_ctx(inst); - crypto_drop_spawn(&ctx->cipher); + crypto_drop_ahash(&ctx->mac); crypto_drop_skcipher(&ctx->ctr); kfree(inst); } @@ -505,12 +454,13 @@ static int crypto_ccm_create_common(struct crypto_template *tmpl, struct rtattr **tb, const char *full_name, const char *ctr_name, - const char *cipher_name) + const char *mac_name) { struct crypto_attr_type *algt; struct aead_instance *inst; struct skcipher_alg *ctr; - struct crypto_alg *cipher; + struct crypto_alg *mac_alg; + struct hash_alg_common *mac; struct ccm_instance_ctx *ictx; int err; @@ -521,25 +471,26 @@ static int crypto_ccm_create_common(struct crypto_template *tmpl, if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) return -EINVAL; - cipher = crypto_alg_mod_lookup(cipher_name, CRYPTO_ALG_TYPE_CIPHER, - CRYPTO_ALG_TYPE_MASK); - if (IS_ERR(cipher)) - return PTR_ERR(cipher); + mac_alg = crypto_find_alg(mac_name, &crypto_ahash_type, + CRYPTO_ALG_TYPE_HASH, + CRYPTO_ALG_TYPE_AHASH_MASK | + CRYPTO_ALG_ASYNC); + if (IS_ERR(mac_alg)) + return PTR_ERR(mac_alg); + mac = __crypto_hash_alg_common(mac_alg); err = -EINVAL; - if (cipher->cra_blocksize != 16) - goto out_put_cipher; + if (mac->digestsize != 16) + goto out_put_mac; inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL); err = -ENOMEM; if (!inst) - goto out_put_cipher; + goto out_put_mac; ictx = aead_instance_ctx(inst); - - err = crypto_init_spawn(&ictx->cipher, cipher, - aead_crypto_instance(inst), - CRYPTO_ALG_TYPE_MASK); + err = crypto_init_ahash_spawn(&ictx->mac, mac, + aead_crypto_instance(inst)); if (err) goto err_free_inst; @@ -548,7 +499,7 @@ static int crypto_ccm_create_common(struct crypto_template *tmpl, crypto_requires_sync(algt->type, algt->mask)); if (err) - goto err_drop_cipher; + goto err_drop_mac; ctr = crypto_spawn_skcipher_alg(&ictx->ctr); @@ -564,18 +515,17 @@ static int crypto_ccm_create_common(struct crypto_template *tmpl, err = -ENAMETOOLONG; if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "ccm_base(%s,%s)", ctr->base.cra_driver_name, - cipher->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) + mac->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME) goto err_drop_ctr; memcpy(inst->alg.base.cra_name, full_name, CRYPTO_MAX_ALG_NAME); inst->alg.base.cra_flags = ctr->base.cra_flags & CRYPTO_ALG_ASYNC; - inst->alg.base.cra_priority = (cipher->cra_priority + + inst->alg.base.cra_priority = (mac->base.cra_priority + ctr->base.cra_priority) / 2; inst->alg.base.cra_blocksize = 1; - inst->alg.base.cra_alignmask = cipher->cra_alignmask | - ctr->base.cra_alignmask | - (__alignof__(u32) - 1); + inst->alg.base.cra_alignmask = mac->base.cra_alignmask | + ctr->base.cra_alignmask; inst->alg.ivsize = 16; inst->alg.chunksize = crypto_skcipher_alg_chunksize(ctr); inst->alg.maxauthsize = 16; @@ -593,23 +543,24 @@ static int crypto_ccm_create_common(struct crypto_template *tmpl, if (err) goto err_drop_ctr; -out_put_cipher: - crypto_mod_put(cipher); +out_put_mac: + crypto_mod_put(mac_alg); return err; err_drop_ctr: crypto_drop_skcipher(&ictx->ctr); -err_drop_cipher: - crypto_drop_spawn(&ictx->cipher); +err_drop_mac: + crypto_drop_ahash(&ictx->mac); err_free_inst: kfree(inst); - goto out_put_cipher; + goto out_put_mac; } static int crypto_ccm_create(struct crypto_template *tmpl, struct rtattr **tb) { const char *cipher_name; char ctr_name[CRYPTO_MAX_ALG_NAME]; + char mac_name[CRYPTO_MAX_ALG_NAME]; char full_name[CRYPTO_MAX_ALG_NAME]; cipher_name = crypto_attr_alg_name(tb[1]); @@ -620,12 +571,16 @@ static int crypto_ccm_create(struct crypto_template *tmpl, struct rtattr **tb) cipher_name) >= CRYPTO_MAX_ALG_NAME) return -ENAMETOOLONG; + if (snprintf(mac_name, CRYPTO_MAX_ALG_NAME, "cbcmac(%s)", + cipher_name) >= CRYPTO_MAX_ALG_NAME) + return -ENAMETOOLONG; + if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm(%s)", cipher_name) >= CRYPTO_MAX_ALG_NAME) return -ENAMETOOLONG; return crypto_ccm_create_common(tmpl, tb, full_name, ctr_name, - cipher_name); + mac_name); } static struct crypto_template crypto_ccm_tmpl = { @@ -899,14 +854,164 @@ static struct crypto_template crypto_rfc4309_tmpl = { .module = THIS_MODULE, }; +static int crypto_cbcmac_digest_setkey(struct crypto_shash *parent, + const u8 *inkey, unsigned int keylen) +{ + struct cbcmac_tfm_ctx *ctx = crypto_shash_ctx(parent); + + return crypto_cipher_setkey(ctx->child, inkey, keylen); +} + +static int crypto_cbcmac_digest_init(struct shash_desc *pdesc) +{ + struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc); + int bs = crypto_shash_digestsize(pdesc->tfm); + u8 *dg = (u8 *)ctx + crypto_shash_descsize(pdesc->tfm) - bs; + + ctx->len = 0; + memset(dg, 0, bs); + + return 0; +} + +static int crypto_cbcmac_digest_update(struct shash_desc *pdesc, const u8 *p, + unsigned int len) +{ + struct crypto_shash *parent = pdesc->tfm; + struct cbcmac_tfm_ctx *tctx = crypto_shash_ctx(parent); + struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc); + struct crypto_cipher *tfm = tctx->child; + int bs = crypto_shash_digestsize(parent); + u8 *dg = (u8 *)ctx + crypto_shash_descsize(parent) - bs; + + while (len > 0) { + unsigned int l = min(len, bs - ctx->len); + + crypto_xor(dg + ctx->len, p, l); + ctx->len +=l; + len -= l; + p += l; + + if (ctx->len == bs) { + crypto_cipher_encrypt_one(tfm, dg, dg); + ctx->len = 0; + } + } + + return 0; +} + +static int crypto_cbcmac_digest_final(struct shash_desc *pdesc, u8 *out) +{ + struct crypto_shash *parent = pdesc->tfm; + struct cbcmac_tfm_ctx *tctx = crypto_shash_ctx(parent); + struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc); + struct crypto_cipher *tfm = tctx->child; + int bs = crypto_shash_digestsize(parent); + u8 *dg = (u8 *)ctx + crypto_shash_descsize(parent) - bs; + + if (ctx->len) + crypto_cipher_encrypt_one(tfm, dg, dg); + + memcpy(out, dg, bs); + return 0; +} + +static int cbcmac_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_cipher *cipher; + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct crypto_spawn *spawn = crypto_instance_ctx(inst); + struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm); + + cipher = crypto_spawn_cipher(spawn); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); + + ctx->child = cipher; + + return 0; +}; + +static void cbcmac_exit_tfm(struct crypto_tfm *tfm) +{ + struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm); + crypto_free_cipher(ctx->child); +} + +static int cbcmac_create(struct crypto_template *tmpl, struct rtattr **tb) +{ + struct shash_instance *inst; + struct crypto_alg *alg; + int err; + + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH); + if (err) + return err; + + alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, + CRYPTO_ALG_TYPE_MASK); + if (IS_ERR(alg)) + return PTR_ERR(alg); + + inst = shash_alloc_instance("cbcmac", alg); + err = PTR_ERR(inst); + if (IS_ERR(inst)) + goto out_put_alg; + + err = crypto_init_spawn(shash_instance_ctx(inst), alg, + shash_crypto_instance(inst), + CRYPTO_ALG_TYPE_MASK); + if (err) + goto out_free_inst; + + inst->alg.base.cra_priority = alg->cra_priority; + inst->alg.base.cra_blocksize = 1; + + inst->alg.digestsize = alg->cra_blocksize; + inst->alg.descsize = ALIGN(sizeof(struct cbcmac_desc_ctx), + alg->cra_alignmask + 1) + + alg->cra_blocksize; + + inst->alg.base.cra_ctxsize = sizeof(struct cbcmac_tfm_ctx); + inst->alg.base.cra_init = cbcmac_init_tfm; + inst->alg.base.cra_exit = cbcmac_exit_tfm; + + inst->alg.init = crypto_cbcmac_digest_init; + inst->alg.update = crypto_cbcmac_digest_update; + inst->alg.final = crypto_cbcmac_digest_final; + inst->alg.setkey = crypto_cbcmac_digest_setkey; + + err = shash_register_instance(tmpl, inst); + +out_free_inst: + if (err) + shash_free_instance(shash_crypto_instance(inst)); + +out_put_alg: + crypto_mod_put(alg); + return err; +} + +static struct crypto_template crypto_cbcmac_tmpl = { + .name = "cbcmac", + .create = cbcmac_create, + .free = shash_free_instance, + .module = THIS_MODULE, +}; + static int __init crypto_ccm_module_init(void) { int err; - err = crypto_register_template(&crypto_ccm_base_tmpl); + err = crypto_register_template(&crypto_cbcmac_tmpl); if (err) goto out; + err = crypto_register_template(&crypto_ccm_base_tmpl); + if (err) + goto out_undo_cbcmac; + err = crypto_register_template(&crypto_ccm_tmpl); if (err) goto out_undo_base; @@ -922,6 +1027,8 @@ out_undo_ccm: crypto_unregister_template(&crypto_ccm_tmpl); out_undo_base: crypto_unregister_template(&crypto_ccm_base_tmpl); +out_undo_cbcmac: + crypto_register_template(&crypto_cbcmac_tmpl); goto out; } @@ -930,6 +1037,7 @@ static void __exit crypto_ccm_module_exit(void) crypto_unregister_template(&crypto_rfc4309_tmpl); crypto_unregister_template(&crypto_ccm_tmpl); crypto_unregister_template(&crypto_ccm_base_tmpl); + crypto_unregister_template(&crypto_cbcmac_tmpl); } module_init(crypto_ccm_module_init); diff --git a/crypto/chacha20_generic.c b/crypto/chacha20_generic.c index 1cab83146e33..8b3c04d625c3 100644 --- a/crypto/chacha20_generic.c +++ b/crypto/chacha20_generic.c @@ -10,10 +10,9 @@ */ #include <crypto/algapi.h> -#include <linux/crypto.h> -#include <linux/kernel.h> -#include <linux/module.h> #include <crypto/chacha20.h> +#include <crypto/internal/skcipher.h> +#include <linux/module.h> static inline u32 le32_to_cpuvp(const void *p) { @@ -63,10 +62,10 @@ void crypto_chacha20_init(u32 *state, struct chacha20_ctx *ctx, u8 *iv) } EXPORT_SYMBOL_GPL(crypto_chacha20_init); -int crypto_chacha20_setkey(struct crypto_tfm *tfm, const u8 *key, +int crypto_chacha20_setkey(struct crypto_skcipher *tfm, const u8 *key, unsigned int keysize) { - struct chacha20_ctx *ctx = crypto_tfm_ctx(tfm); + struct chacha20_ctx *ctx = crypto_skcipher_ctx(tfm); int i; if (keysize != CHACHA20_KEY_SIZE) @@ -79,66 +78,54 @@ int crypto_chacha20_setkey(struct crypto_tfm *tfm, const u8 *key, } EXPORT_SYMBOL_GPL(crypto_chacha20_setkey); -int crypto_chacha20_crypt(struct blkcipher_desc *desc, struct scatterlist *dst, - struct scatterlist *src, unsigned int nbytes) +int crypto_chacha20_crypt(struct skcipher_request *req) { - struct blkcipher_walk walk; + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); + struct chacha20_ctx *ctx = crypto_skcipher_ctx(tfm); + struct skcipher_walk walk; u32 state[16]; int err; - blkcipher_walk_init(&walk, dst, src, nbytes); - err = blkcipher_walk_virt_block(desc, &walk, CHACHA20_BLOCK_SIZE); - - crypto_chacha20_init(state, crypto_blkcipher_ctx(desc->tfm), walk.iv); + err = skcipher_walk_virt(&walk, req, true); - while (walk.nbytes >= CHACHA20_BLOCK_SIZE) { - chacha20_docrypt(state, walk.dst.virt.addr, walk.src.virt.addr, - rounddown(walk.nbytes, CHACHA20_BLOCK_SIZE)); - err = blkcipher_walk_done(desc, &walk, - walk.nbytes % CHACHA20_BLOCK_SIZE); - } + crypto_chacha20_init(state, ctx, walk.iv); - if (walk.nbytes) { + while (walk.nbytes > 0) { chacha20_docrypt(state, walk.dst.virt.addr, walk.src.virt.addr, walk.nbytes); - err = blkcipher_walk_done(desc, &walk, 0); + err = skcipher_walk_done(&walk, 0); } return err; } EXPORT_SYMBOL_GPL(crypto_chacha20_crypt); -static struct crypto_alg alg = { - .cra_name = "chacha20", - .cra_driver_name = "chacha20-generic", - .cra_priority = 100, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, - .cra_blocksize = 1, - .cra_type = &crypto_blkcipher_type, - .cra_ctxsize = sizeof(struct chacha20_ctx), - .cra_alignmask = sizeof(u32) - 1, - .cra_module = THIS_MODULE, - .cra_u = { - .blkcipher = { - .min_keysize = CHACHA20_KEY_SIZE, - .max_keysize = CHACHA20_KEY_SIZE, - .ivsize = CHACHA20_IV_SIZE, - .geniv = "seqiv", - .setkey = crypto_chacha20_setkey, - .encrypt = crypto_chacha20_crypt, - .decrypt = crypto_chacha20_crypt, - }, - }, +static struct skcipher_alg alg = { + .base.cra_name = "chacha20", + .base.cra_driver_name = "chacha20-generic", + .base.cra_priority = 100, + .base.cra_blocksize = 1, + .base.cra_ctxsize = sizeof(struct chacha20_ctx), + .base.cra_alignmask = sizeof(u32) - 1, + .base.cra_module = THIS_MODULE, + + .min_keysize = CHACHA20_KEY_SIZE, + .max_keysize = CHACHA20_KEY_SIZE, + .ivsize = CHACHA20_IV_SIZE, + .chunksize = CHACHA20_BLOCK_SIZE, + .setkey = crypto_chacha20_setkey, + .encrypt = crypto_chacha20_crypt, + .decrypt = crypto_chacha20_crypt, }; static int __init chacha20_generic_mod_init(void) { - return crypto_register_alg(&alg); + return crypto_register_skcipher(&alg); } static void __exit chacha20_generic_mod_fini(void) { - crypto_unregister_alg(&alg); + crypto_unregister_skcipher(&alg); } module_init(chacha20_generic_mod_init); diff --git a/crypto/cmac.c b/crypto/cmac.c index 04080dca8f0c..16301f52858c 100644 --- a/crypto/cmac.c +++ b/crypto/cmac.c @@ -260,8 +260,7 @@ static int cmac_create(struct crypto_template *tmpl, struct rtattr **tb) if (err) goto out_free_inst; - /* We access the data as u32s when xoring. */ - alignmask = alg->cra_alignmask | (__alignof__(u32) - 1); + alignmask = alg->cra_alignmask; inst->alg.base.cra_alignmask = alignmask; inst->alg.base.cra_priority = alg->cra_priority; inst->alg.base.cra_blocksize = alg->cra_blocksize; diff --git a/crypto/ctr.c b/crypto/ctr.c index a9a7a44f2783..a4f4a8983169 100644 --- a/crypto/ctr.c +++ b/crypto/ctr.c @@ -209,7 +209,7 @@ static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb) inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER; inst->alg.cra_priority = alg->cra_priority; inst->alg.cra_blocksize = 1; - inst->alg.cra_alignmask = alg->cra_alignmask | (__alignof__(u32) - 1); + inst->alg.cra_alignmask = alg->cra_alignmask; inst->alg.cra_type = &crypto_blkcipher_type; inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize; diff --git a/crypto/cts.c b/crypto/cts.c index 00254d76b21b..243f591dc409 100644 --- a/crypto/cts.c +++ b/crypto/cts.c @@ -49,6 +49,7 @@ #include <linux/scatterlist.h> #include <crypto/scatterwalk.h> #include <linux/slab.h> +#include <linux/compiler.h> struct crypto_cts_ctx { struct crypto_skcipher *child; @@ -103,7 +104,7 @@ static int cts_cbc_encrypt(struct skcipher_request *req) struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); struct skcipher_request *subreq = &rctx->subreq; int bsize = crypto_skcipher_blocksize(tfm); - u8 d[bsize * 2] __attribute__ ((aligned(__alignof__(u32)))); + u8 d[bsize * 2] __aligned(__alignof__(u32)); struct scatterlist *sg; unsigned int offset; int lastn; @@ -183,7 +184,7 @@ static int cts_cbc_decrypt(struct skcipher_request *req) struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); struct skcipher_request *subreq = &rctx->subreq; int bsize = crypto_skcipher_blocksize(tfm); - u8 d[bsize * 2] __attribute__ ((aligned(__alignof__(u32)))); + u8 d[bsize * 2] __aligned(__alignof__(u32)); struct scatterlist *sg; unsigned int offset; u8 *space; @@ -373,9 +374,6 @@ static int crypto_cts_create(struct crypto_template *tmpl, struct rtattr **tb) inst->alg.base.cra_blocksize = alg->base.cra_blocksize; inst->alg.base.cra_alignmask = alg->base.cra_alignmask; - /* We access the data as u32s when xoring. */ - inst->alg.base.cra_alignmask |= __alignof__(u32) - 1; - inst->alg.ivsize = alg->base.cra_blocksize; inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg); inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg); diff --git a/crypto/kpp.c b/crypto/kpp.c index d36ce05eee43..a90edc27af77 100644 --- a/crypto/kpp.c +++ b/crypto/kpp.c @@ -19,6 +19,7 @@ #include <linux/crypto.h> #include <crypto/algapi.h> #include <linux/cryptouser.h> +#include <linux/compiler.h> #include <net/netlink.h> #include <crypto/kpp.h> #include <crypto/internal/kpp.h> @@ -47,7 +48,7 @@ static int crypto_kpp_report(struct sk_buff *skb, struct crypto_alg *alg) #endif static void crypto_kpp_show(struct seq_file *m, struct crypto_alg *alg) - __attribute__ ((unused)); + __maybe_unused; static void crypto_kpp_show(struct seq_file *m, struct crypto_alg *alg) { diff --git a/crypto/pcbc.c b/crypto/pcbc.c index e4538e07f7ca..29dd2b4a3b85 100644 --- a/crypto/pcbc.c +++ b/crypto/pcbc.c @@ -20,6 +20,7 @@ #include <linux/kernel.h> #include <linux/module.h> #include <linux/slab.h> +#include <linux/compiler.h> struct crypto_pcbc_ctx { struct crypto_cipher *child; @@ -146,7 +147,7 @@ static int crypto_pcbc_decrypt_inplace(struct skcipher_request *req, unsigned int nbytes = walk->nbytes; u8 *src = walk->src.virt.addr; u8 *iv = walk->iv; - u8 tmpbuf[bsize] __attribute__ ((aligned(__alignof__(u32)))); + u8 tmpbuf[bsize] __aligned(__alignof__(u32)); do { memcpy(tmpbuf, src, bsize); @@ -259,9 +260,6 @@ static int crypto_pcbc_create(struct crypto_template *tmpl, struct rtattr **tb) inst->alg.base.cra_blocksize = alg->cra_blocksize; inst->alg.base.cra_alignmask = alg->cra_alignmask; - /* We access the data as u32s when xoring. */ - inst->alg.base.cra_alignmask |= __alignof__(u32) - 1; - inst->alg.ivsize = alg->cra_blocksize; inst->alg.min_keysize = alg->cra_cipher.cia_min_keysize; inst->alg.max_keysize = alg->cra_cipher.cia_max_keysize; diff --git a/crypto/rng.c b/crypto/rng.c index b81cffb13bab..f46dac5288b9 100644 --- a/crypto/rng.c +++ b/crypto/rng.c @@ -23,6 +23,7 @@ #include <linux/slab.h> #include <linux/string.h> #include <linux/cryptouser.h> +#include <linux/compiler.h> #include <net/netlink.h> #include "internal.h" @@ -95,7 +96,7 @@ static int crypto_rng_report(struct sk_buff *skb, struct crypto_alg *alg) #endif static void crypto_rng_show(struct seq_file *m, struct crypto_alg *alg) - __attribute__ ((unused)); + __maybe_unused; static void crypto_rng_show(struct seq_file *m, struct crypto_alg *alg) { seq_printf(m, "type : rng\n"); diff --git a/crypto/scompress.c b/crypto/scompress.c index 35e396d154b7..6b048b36312d 100644 --- a/crypto/scompress.c +++ b/crypto/scompress.c @@ -18,6 +18,7 @@ #include <linux/slab.h> #include <linux/string.h> #include <linux/crypto.h> +#include <linux/compiler.h> #include <linux/vmalloc.h> #include <crypto/algapi.h> #include <linux/cryptouser.h> @@ -57,7 +58,7 @@ static int crypto_scomp_report(struct sk_buff *skb, struct crypto_alg *alg) #endif static void crypto_scomp_show(struct seq_file *m, struct crypto_alg *alg) - __attribute__ ((unused)); + __maybe_unused; static void crypto_scomp_show(struct seq_file *m, struct crypto_alg *alg) { diff --git a/crypto/seqiv.c b/crypto/seqiv.c index c7049231861f..570b7d1aa0ca 100644 --- a/crypto/seqiv.c +++ b/crypto/seqiv.c @@ -153,8 +153,6 @@ static int seqiv_aead_create(struct crypto_template *tmpl, struct rtattr **tb) if (IS_ERR(inst)) return PTR_ERR(inst); - inst->alg.base.cra_alignmask |= __alignof__(u32) - 1; - spawn = aead_instance_ctx(inst); alg = crypto_spawn_aead_alg(spawn); diff --git a/crypto/shash.c b/crypto/shash.c index a051541a4a17..5e31c8d776df 100644 --- a/crypto/shash.c +++ b/crypto/shash.c @@ -19,6 +19,7 @@ #include <linux/seq_file.h> #include <linux/cryptouser.h> #include <net/netlink.h> +#include <linux/compiler.h> #include "internal.h" @@ -67,7 +68,7 @@ EXPORT_SYMBOL_GPL(crypto_shash_setkey); static inline unsigned int shash_align_buffer_size(unsigned len, unsigned long mask) { - typedef u8 __attribute__ ((aligned)) u8_aligned; + typedef u8 __aligned_largest u8_aligned; return len + (mask & ~(__alignof__(u8_aligned) - 1)); } @@ -80,7 +81,7 @@ static int shash_update_unaligned(struct shash_desc *desc, const u8 *data, unsigned int unaligned_len = alignmask + 1 - ((unsigned long)data & alignmask); u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)] - __attribute__ ((aligned)); + __aligned_largest; u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1); int err; @@ -116,7 +117,7 @@ static int shash_final_unaligned(struct shash_desc *desc, u8 *out) struct shash_alg *shash = crypto_shash_alg(tfm); unsigned int ds = crypto_shash_digestsize(tfm); u8 ubuf[shash_align_buffer_size(ds, alignmask)] - __attribute__ ((aligned)); + __aligned_largest; u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1); int err; @@ -403,7 +404,7 @@ static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg) #endif static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) - __attribute__ ((unused)); + __maybe_unused; static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) { struct shash_alg *salg = __crypto_shash_alg(alg); diff --git a/crypto/skcipher.c b/crypto/skcipher.c index 0e1e6c35188e..014af741fc6a 100644 --- a/crypto/skcipher.c +++ b/crypto/skcipher.c @@ -19,6 +19,7 @@ #include <crypto/scatterwalk.h> #include <linux/bug.h> #include <linux/cryptouser.h> +#include <linux/compiler.h> #include <linux/list.h> #include <linux/module.h> #include <linux/rtnetlink.h> @@ -185,12 +186,12 @@ void skcipher_walk_complete(struct skcipher_walk *walk, int err) data = p->data; if (!data) { data = PTR_ALIGN(&p->buffer[0], walk->alignmask + 1); - data = skcipher_get_spot(data, walk->chunksize); + data = skcipher_get_spot(data, walk->stride); } scatterwalk_copychunks(data, &p->dst, p->len, 1); - if (offset_in_page(p->data) + p->len + walk->chunksize > + if (offset_in_page(p->data) + p->len + walk->stride > PAGE_SIZE) free_page((unsigned long)p->data); @@ -299,7 +300,7 @@ static int skcipher_next_copy(struct skcipher_walk *walk) p->len = walk->nbytes; skcipher_queue_write(walk, p); - if (offset_in_page(walk->page) + walk->nbytes + walk->chunksize > + if (offset_in_page(walk->page) + walk->nbytes + walk->stride > PAGE_SIZE) walk->page = NULL; else @@ -344,7 +345,7 @@ static int skcipher_walk_next(struct skcipher_walk *walk) SKCIPHER_WALK_DIFF); n = walk->total; - bsize = min(walk->chunksize, max(n, walk->blocksize)); + bsize = min(walk->stride, max(n, walk->blocksize)); n = scatterwalk_clamp(&walk->in, n); n = scatterwalk_clamp(&walk->out, n); @@ -393,7 +394,7 @@ static int skcipher_copy_iv(struct skcipher_walk *walk) unsigned a = crypto_tfm_ctx_alignment() - 1; unsigned alignmask = walk->alignmask; unsigned ivsize = walk->ivsize; - unsigned bs = walk->chunksize; + unsigned bs = walk->stride; unsigned aligned_bs; unsigned size; u8 *iv; @@ -463,7 +464,7 @@ static int skcipher_walk_skcipher(struct skcipher_walk *walk, SKCIPHER_WALK_SLEEP : 0; walk->blocksize = crypto_skcipher_blocksize(tfm); - walk->chunksize = crypto_skcipher_chunksize(tfm); + walk->stride = crypto_skcipher_walksize(tfm); walk->ivsize = crypto_skcipher_ivsize(tfm); walk->alignmask = crypto_skcipher_alignmask(tfm); @@ -525,7 +526,7 @@ static int skcipher_walk_aead_common(struct skcipher_walk *walk, walk->flags &= ~SKCIPHER_WALK_SLEEP; walk->blocksize = crypto_aead_blocksize(tfm); - walk->chunksize = crypto_aead_chunksize(tfm); + walk->stride = crypto_aead_chunksize(tfm); walk->ivsize = crypto_aead_ivsize(tfm); walk->alignmask = crypto_aead_alignmask(tfm); @@ -807,7 +808,7 @@ static void crypto_skcipher_free_instance(struct crypto_instance *inst) } static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg) - __attribute__ ((unused)); + __maybe_unused; static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg) { struct skcipher_alg *skcipher = container_of(alg, struct skcipher_alg, @@ -821,6 +822,7 @@ static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg) seq_printf(m, "max keysize : %u\n", skcipher->max_keysize); seq_printf(m, "ivsize : %u\n", skcipher->ivsize); seq_printf(m, "chunksize : %u\n", skcipher->chunksize); + seq_printf(m, "walksize : %u\n", skcipher->walksize); } #ifdef CONFIG_NET @@ -893,11 +895,14 @@ static int skcipher_prepare_alg(struct skcipher_alg *alg) { struct crypto_alg *base = &alg->base; - if (alg->ivsize > PAGE_SIZE / 8 || alg->chunksize > PAGE_SIZE / 8) + if (alg->ivsize > PAGE_SIZE / 8 || alg->chunksize > PAGE_SIZE / 8 || + alg->walksize > PAGE_SIZE / 8) return -EINVAL; if (!alg->chunksize) alg->chunksize = base->cra_blocksize; + if (!alg->walksize) + alg->walksize = alg->chunksize; base->cra_type = &crypto_skcipher_type2; base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c index ae22f05d5936..9a11f3c2bf98 100644 --- a/crypto/tcrypt.c +++ b/crypto/tcrypt.c @@ -22,6 +22,8 @@ * */ +#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt + #include <crypto/aead.h> #include <crypto/hash.h> #include <crypto/skcipher.h> @@ -1010,6 +1012,8 @@ static inline int tcrypt_test(const char *alg) { int ret; + pr_debug("testing %s\n", alg); + ret = alg_test(alg, alg, 0, 0); /* non-fips algs return -EINVAL in fips mode */ if (fips_enabled && ret == -EINVAL) @@ -2059,6 +2063,8 @@ static int __init tcrypt_mod_init(void) if (err) { printk(KERN_ERR "tcrypt: one or more tests failed!\n"); goto err_free_tv; + } else { + pr_debug("all tests passed\n"); } /* We intentionaly return -EAGAIN to prevent keeping the module, diff --git a/crypto/testmgr.c b/crypto/testmgr.c index 44e888b0b041..f9c378af3907 100644 --- a/crypto/testmgr.c +++ b/crypto/testmgr.c @@ -265,6 +265,7 @@ static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, const int align_offset) { const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm)); + size_t digest_size = crypto_ahash_digestsize(tfm); unsigned int i, j, k, temp; struct scatterlist sg[8]; char *result; @@ -275,7 +276,7 @@ static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, char *xbuf[XBUFSIZE]; int ret = -ENOMEM; - result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL); + result = kmalloc(digest_size, GFP_KERNEL); if (!result) return ret; key = kmalloc(MAX_KEYLEN, GFP_KERNEL); @@ -305,7 +306,7 @@ static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, goto out; j++; - memset(result, 0, MAX_DIGEST_SIZE); + memset(result, 0, digest_size); hash_buff = xbuf[0]; hash_buff += align_offset; @@ -380,7 +381,7 @@ static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, continue; j++; - memset(result, 0, MAX_DIGEST_SIZE); + memset(result, 0, digest_size); temp = 0; sg_init_table(sg, template[i].np); @@ -458,7 +459,7 @@ static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, continue; j++; - memset(result, 0, MAX_DIGEST_SIZE); + memset(result, 0, digest_size); ret = -EINVAL; hash_buff = xbuf[0]; @@ -1463,13 +1464,12 @@ static int test_acomp(struct crypto_acomp *tfm, struct comp_testvec *ctemplate, int ilen = ctemplate[i].inlen; void *input_vec; - input_vec = kmalloc(ilen, GFP_KERNEL); + input_vec = kmemdup(ctemplate[i].input, ilen, GFP_KERNEL); if (!input_vec) { ret = -ENOMEM; goto out; } - memcpy(input_vec, ctemplate[i].input, ilen); memset(output, 0, dlen); init_completion(&result.completion); sg_init_one(&src, input_vec, ilen); @@ -1525,13 +1525,12 @@ static int test_acomp(struct crypto_acomp *tfm, struct comp_testvec *ctemplate, int ilen = dtemplate[i].inlen; void *input_vec; - input_vec = kmalloc(ilen, GFP_KERNEL); + input_vec = kmemdup(dtemplate[i].input, ilen, GFP_KERNEL); if (!input_vec) { ret = -ENOMEM; goto out; } - memcpy(input_vec, dtemplate[i].input, ilen); memset(output, 0, dlen); init_completion(&result.completion); sg_init_one(&src, input_vec, ilen); @@ -2251,30 +2250,23 @@ static int alg_test_null(const struct alg_test_desc *desc, return 0; } +#define __VECS(tv) { .vecs = tv, .count = ARRAY_SIZE(tv) } + /* Please keep this list sorted by algorithm name. */ static const struct alg_test_desc alg_test_descs[] = { { .alg = "ansi_cprng", .test = alg_test_cprng, .suite = { - .cprng = { - .vecs = ansi_cprng_aes_tv_template, - .count = ANSI_CPRNG_AES_TEST_VECTORS - } + .cprng = __VECS(ansi_cprng_aes_tv_template) } }, { .alg = "authenc(hmac(md5),ecb(cipher_null))", .test = alg_test_aead, .suite = { .aead = { - .enc = { - .vecs = hmac_md5_ecb_cipher_null_enc_tv_template, - .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS - }, - .dec = { - .vecs = hmac_md5_ecb_cipher_null_dec_tv_template, - .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS - } + .enc = __VECS(hmac_md5_ecb_cipher_null_enc_tv_template), + .dec = __VECS(hmac_md5_ecb_cipher_null_dec_tv_template) } } }, { @@ -2282,12 +2274,7 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_aead, .suite = { .aead = { - .enc = { - .vecs = - hmac_sha1_aes_cbc_enc_tv_temp, - .count = - HMAC_SHA1_AES_CBC_ENC_TEST_VEC - } + .enc = __VECS(hmac_sha1_aes_cbc_enc_tv_temp) } } }, { @@ -2295,12 +2282,7 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_aead, .suite = { .aead = { - .enc = { - .vecs = - hmac_sha1_des_cbc_enc_tv_temp, - .count = - HMAC_SHA1_DES_CBC_ENC_TEST_VEC - } + .enc = __VECS(hmac_sha1_des_cbc_enc_tv_temp) } } }, { @@ -2309,12 +2291,7 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .suite = { .aead = { - .enc = { - .vecs = - hmac_sha1_des3_ede_cbc_enc_tv_temp, - .count = - HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC - } + .enc = __VECS(hmac_sha1_des3_ede_cbc_enc_tv_temp) } } }, { @@ -2326,18 +2303,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_aead, .suite = { .aead = { - .enc = { - .vecs = - hmac_sha1_ecb_cipher_null_enc_tv_temp, - .count = - HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC - }, - .dec = { - .vecs = - hmac_sha1_ecb_cipher_null_dec_tv_temp, - .count = - HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC - } + .enc = __VECS(hmac_sha1_ecb_cipher_null_enc_tv_temp), + .dec = __VECS(hmac_sha1_ecb_cipher_null_dec_tv_temp) } } }, { @@ -2349,12 +2316,7 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_aead, .suite = { .aead = { - .enc = { - .vecs = - hmac_sha224_des_cbc_enc_tv_temp, - .count = - HMAC_SHA224_DES_CBC_ENC_TEST_VEC - } + .enc = __VECS(hmac_sha224_des_cbc_enc_tv_temp) } } }, { @@ -2363,12 +2325,7 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .suite = { .aead = { - .enc = { - .vecs = - hmac_sha224_des3_ede_cbc_enc_tv_temp, - .count = - HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC - } + .enc = __VECS(hmac_sha224_des3_ede_cbc_enc_tv_temp) } } }, { @@ -2377,12 +2334,7 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .suite = { .aead = { - .enc = { - .vecs = - hmac_sha256_aes_cbc_enc_tv_temp, - .count = - HMAC_SHA256_AES_CBC_ENC_TEST_VEC - } + .enc = __VECS(hmac_sha256_aes_cbc_enc_tv_temp) } } }, { @@ -2390,12 +2342,7 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_aead, .suite = { .aead = { - .enc = { - .vecs = - hmac_sha256_des_cbc_enc_tv_temp, - .count = - HMAC_SHA256_DES_CBC_ENC_TEST_VEC - } + .enc = __VECS(hmac_sha256_des_cbc_enc_tv_temp) } } }, { @@ -2404,12 +2351,7 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .suite = { .aead = { - .enc = { - .vecs = - hmac_sha256_des3_ede_cbc_enc_tv_temp, - .count = - HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC - } + .enc = __VECS(hmac_sha256_des3_ede_cbc_enc_tv_temp) } } }, { @@ -2425,12 +2367,7 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_aead, .suite = { .aead = { - .enc = { - .vecs = - hmac_sha384_des_cbc_enc_tv_temp, - .count = - HMAC_SHA384_DES_CBC_ENC_TEST_VEC - } + .enc = __VECS(hmac_sha384_des_cbc_enc_tv_temp) } } }, { @@ -2439,12 +2376,7 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .suite = { .aead = { - .enc = { - .vecs = - hmac_sha384_des3_ede_cbc_enc_tv_temp, - .count = - HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC - } + .enc = __VECS(hmac_sha384_des3_ede_cbc_enc_tv_temp) } } }, { @@ -2461,12 +2393,7 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_aead, .suite = { .aead = { - .enc = { - .vecs = - hmac_sha512_aes_cbc_enc_tv_temp, - .count = - HMAC_SHA512_AES_CBC_ENC_TEST_VEC - } + .enc = __VECS(hmac_sha512_aes_cbc_enc_tv_temp) } } }, { @@ -2474,12 +2401,7 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_aead, .suite = { .aead = { - .enc = { - .vecs = - hmac_sha512_des_cbc_enc_tv_temp, - .count = - HMAC_SHA512_DES_CBC_ENC_TEST_VEC - } + .enc = __VECS(hmac_sha512_des_cbc_enc_tv_temp) } } }, { @@ -2488,12 +2410,7 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .suite = { .aead = { - .enc = { - .vecs = - hmac_sha512_des3_ede_cbc_enc_tv_temp, - .count = - HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC - } + .enc = __VECS(hmac_sha512_des3_ede_cbc_enc_tv_temp) } } }, { @@ -2510,14 +2427,8 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .suite = { .cipher = { - .enc = { - .vecs = aes_cbc_enc_tv_template, - .count = AES_CBC_ENC_TEST_VECTORS - }, - .dec = { - .vecs = aes_cbc_dec_tv_template, - .count = AES_CBC_DEC_TEST_VECTORS - } + .enc = __VECS(aes_cbc_enc_tv_template), + .dec = __VECS(aes_cbc_dec_tv_template) } } }, { @@ -2525,14 +2436,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = anubis_cbc_enc_tv_template, - .count = ANUBIS_CBC_ENC_TEST_VECTORS - }, - .dec = { - .vecs = anubis_cbc_dec_tv_template, - .count = ANUBIS_CBC_DEC_TEST_VECTORS - } + .enc = __VECS(anubis_cbc_enc_tv_template), + .dec = __VECS(anubis_cbc_dec_tv_template) } } }, { @@ -2540,14 +2445,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = bf_cbc_enc_tv_template, - .count = BF_CBC_ENC_TEST_VECTORS - }, - .dec = { - .vecs = bf_cbc_dec_tv_template, - .count = BF_CBC_DEC_TEST_VECTORS - } + .enc = __VECS(bf_cbc_enc_tv_template), + .dec = __VECS(bf_cbc_dec_tv_template) } } }, { @@ -2555,14 +2454,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = camellia_cbc_enc_tv_template, - .count = CAMELLIA_CBC_ENC_TEST_VECTORS - }, - .dec = { - .vecs = camellia_cbc_dec_tv_template, - .count = CAMELLIA_CBC_DEC_TEST_VECTORS - } + .enc = __VECS(camellia_cbc_enc_tv_template), + .dec = __VECS(camellia_cbc_dec_tv_template) } } }, { @@ -2570,14 +2463,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = cast5_cbc_enc_tv_template, - .count = CAST5_CBC_ENC_TEST_VECTORS - }, - .dec = { - .vecs = cast5_cbc_dec_tv_template, - .count = CAST5_CBC_DEC_TEST_VECTORS - } + .enc = __VECS(cast5_cbc_enc_tv_template), + .dec = __VECS(cast5_cbc_dec_tv_template) } } }, { @@ -2585,14 +2472,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = cast6_cbc_enc_tv_template, - .count = CAST6_CBC_ENC_TEST_VECTORS - }, - .dec = { - .vecs = cast6_cbc_dec_tv_template, - .count = CAST6_CBC_DEC_TEST_VECTORS - } + .enc = __VECS(cast6_cbc_enc_tv_template), + .dec = __VECS(cast6_cbc_dec_tv_template) } } }, { @@ -2600,14 +2481,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = des_cbc_enc_tv_template, - .count = DES_CBC_ENC_TEST_VECTORS - }, - .dec = { - .vecs = des_cbc_dec_tv_template, - .count = DES_CBC_DEC_TEST_VECTORS - } + .enc = __VECS(des_cbc_enc_tv_template), + .dec = __VECS(des_cbc_dec_tv_template) } } }, { @@ -2616,14 +2491,8 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .suite = { .cipher = { - .enc = { - .vecs = des3_ede_cbc_enc_tv_template, - .count = DES3_EDE_CBC_ENC_TEST_VECTORS - }, - .dec = { - .vecs = des3_ede_cbc_dec_tv_template, - .count = DES3_EDE_CBC_DEC_TEST_VECTORS - } + .enc = __VECS(des3_ede_cbc_enc_tv_template), + .dec = __VECS(des3_ede_cbc_dec_tv_template) } } }, { @@ -2631,14 +2500,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = serpent_cbc_enc_tv_template, - .count = SERPENT_CBC_ENC_TEST_VECTORS - }, - .dec = { - .vecs = serpent_cbc_dec_tv_template, - .count = SERPENT_CBC_DEC_TEST_VECTORS - } + .enc = __VECS(serpent_cbc_enc_tv_template), + .dec = __VECS(serpent_cbc_dec_tv_template) } } }, { @@ -2646,30 +2509,25 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = tf_cbc_enc_tv_template, - .count = TF_CBC_ENC_TEST_VECTORS - }, - .dec = { - .vecs = tf_cbc_dec_tv_template, - .count = TF_CBC_DEC_TEST_VECTORS - } + .enc = __VECS(tf_cbc_enc_tv_template), + .dec = __VECS(tf_cbc_dec_tv_template) } } }, { + .alg = "cbcmac(aes)", + .fips_allowed = 1, + .test = alg_test_hash, + .suite = { + .hash = __VECS(aes_cbcmac_tv_template) + } + }, { .alg = "ccm(aes)", .test = alg_test_aead, .fips_allowed = 1, .suite = { .aead = { - .enc = { - .vecs = aes_ccm_enc_tv_template, - .count = AES_CCM_ENC_TEST_VECTORS - }, - .dec = { - .vecs = aes_ccm_dec_tv_template, - .count = AES_CCM_DEC_TEST_VECTORS - } + .enc = __VECS(aes_ccm_enc_tv_template), + .dec = __VECS(aes_ccm_dec_tv_template) } } }, { @@ -2677,14 +2535,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = chacha20_enc_tv_template, - .count = CHACHA20_ENC_TEST_VECTORS - }, - .dec = { - .vecs = chacha20_enc_tv_template, - .count = CHACHA20_ENC_TEST_VECTORS - }, + .enc = __VECS(chacha20_enc_tv_template), + .dec = __VECS(chacha20_enc_tv_template), } } }, { @@ -2692,20 +2544,14 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .test = alg_test_hash, .suite = { - .hash = { - .vecs = aes_cmac128_tv_template, - .count = CMAC_AES_TEST_VECTORS - } + .hash = __VECS(aes_cmac128_tv_template) } }, { .alg = "cmac(des3_ede)", .fips_allowed = 1, .test = alg_test_hash, .suite = { - .hash = { - .vecs = des3_ede_cmac64_tv_template, - .count = CMAC_DES3_EDE_TEST_VECTORS - } + .hash = __VECS(des3_ede_cmac64_tv_template) } }, { .alg = "compress_null", @@ -2714,30 +2560,21 @@ static const struct alg_test_desc alg_test_descs[] = { .alg = "crc32", .test = alg_test_hash, .suite = { - .hash = { - .vecs = crc32_tv_template, - .count = CRC32_TEST_VECTORS - } + .hash = __VECS(crc32_tv_template) } }, { .alg = "crc32c", .test = alg_test_crc32c, .fips_allowed = 1, .suite = { - .hash = { - .vecs = crc32c_tv_template, - .count = CRC32C_TEST_VECTORS - } + .hash = __VECS(crc32c_tv_template) } }, { .alg = "crct10dif", .test = alg_test_hash, .fips_allowed = 1, .suite = { - .hash = { - .vecs = crct10dif_tv_template, - .count = CRCT10DIF_TEST_VECTORS - } + .hash = __VECS(crct10dif_tv_template) } }, { .alg = "ctr(aes)", @@ -2745,14 +2582,8 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .suite = { .cipher = { - .enc = { - .vecs = aes_ctr_enc_tv_template, - .count = AES_CTR_ENC_TEST_VECTORS - }, - .dec = { - .vecs = aes_ctr_dec_tv_template, - .count = AES_CTR_DEC_TEST_VECTORS - } + .enc = __VECS(aes_ctr_enc_tv_template), + .dec = __VECS(aes_ctr_dec_tv_template) } } }, { @@ -2760,14 +2591,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = bf_ctr_enc_tv_template, - .count = BF_CTR_ENC_TEST_VECTORS - }, - .dec = { - .vecs = bf_ctr_dec_tv_template, - .count = BF_CTR_DEC_TEST_VECTORS - } + .enc = __VECS(bf_ctr_enc_tv_template), + .dec = __VECS(bf_ctr_dec_tv_template) } } }, { @@ -2775,14 +2600,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = camellia_ctr_enc_tv_template, - .count = CAMELLIA_CTR_ENC_TEST_VECTORS - }, - .dec = { - .vecs = camellia_ctr_dec_tv_template, - .count = CAMELLIA_CTR_DEC_TEST_VECTORS - } + .enc = __VECS(camellia_ctr_enc_tv_template), + .dec = __VECS(camellia_ctr_dec_tv_template) } } }, { @@ -2790,14 +2609,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = cast5_ctr_enc_tv_template, - .count = CAST5_CTR_ENC_TEST_VECTORS - }, - .dec = { - .vecs = cast5_ctr_dec_tv_template, - .count = CAST5_CTR_DEC_TEST_VECTORS - } + .enc = __VECS(cast5_ctr_enc_tv_template), + .dec = __VECS(cast5_ctr_dec_tv_template) } } }, { @@ -2805,14 +2618,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = cast6_ctr_enc_tv_template, - .count = CAST6_CTR_ENC_TEST_VECTORS - }, - .dec = { - .vecs = cast6_ctr_dec_tv_template, - .count = CAST6_CTR_DEC_TEST_VECTORS - } + .enc = __VECS(cast6_ctr_enc_tv_template), + .dec = __VECS(cast6_ctr_dec_tv_template) } } }, { @@ -2820,14 +2627,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = des_ctr_enc_tv_template, - .count = DES_CTR_ENC_TEST_VECTORS - }, - .dec = { - .vecs = des_ctr_dec_tv_template, - .count = DES_CTR_DEC_TEST_VECTORS - } + .enc = __VECS(des_ctr_enc_tv_template), + .dec = __VECS(des_ctr_dec_tv_template) } } }, { @@ -2835,14 +2636,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = des3_ede_ctr_enc_tv_template, - .count = DES3_EDE_CTR_ENC_TEST_VECTORS - }, - .dec = { - .vecs = des3_ede_ctr_dec_tv_template, - .count = DES3_EDE_CTR_DEC_TEST_VECTORS - } + .enc = __VECS(des3_ede_ctr_enc_tv_template), + .dec = __VECS(des3_ede_ctr_dec_tv_template) } } }, { @@ -2850,14 +2645,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = serpent_ctr_enc_tv_template, - .count = SERPENT_CTR_ENC_TEST_VECTORS - }, - .dec = { - .vecs = serpent_ctr_dec_tv_template, - .count = SERPENT_CTR_DEC_TEST_VECTORS - } + .enc = __VECS(serpent_ctr_enc_tv_template), + .dec = __VECS(serpent_ctr_dec_tv_template) } } }, { @@ -2865,14 +2654,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = tf_ctr_enc_tv_template, - .count = TF_CTR_ENC_TEST_VECTORS - }, - .dec = { - .vecs = tf_ctr_dec_tv_template, - .count = TF_CTR_DEC_TEST_VECTORS - } + .enc = __VECS(tf_ctr_enc_tv_template), + .dec = __VECS(tf_ctr_dec_tv_template) } } }, { @@ -2880,14 +2663,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = cts_mode_enc_tv_template, - .count = CTS_MODE_ENC_TEST_VECTORS - }, - .dec = { - .vecs = cts_mode_dec_tv_template, - .count = CTS_MODE_DEC_TEST_VECTORS - } + .enc = __VECS(cts_mode_enc_tv_template), + .dec = __VECS(cts_mode_dec_tv_template) } } }, { @@ -2896,14 +2673,8 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .suite = { .comp = { - .comp = { - .vecs = deflate_comp_tv_template, - .count = DEFLATE_COMP_TEST_VECTORS - }, - .decomp = { - .vecs = deflate_decomp_tv_template, - .count = DEFLATE_DECOMP_TEST_VECTORS - } + .comp = __VECS(deflate_comp_tv_template), + .decomp = __VECS(deflate_decomp_tv_template) } } }, { @@ -2911,10 +2682,7 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_kpp, .fips_allowed = 1, .suite = { - .kpp = { - .vecs = dh_tv_template, - .count = DH_TEST_VECTORS - } + .kpp = __VECS(dh_tv_template) } }, { .alg = "digest_null", @@ -2924,30 +2692,21 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_drbg, .fips_allowed = 1, .suite = { - .drbg = { - .vecs = drbg_nopr_ctr_aes128_tv_template, - .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template) - } + .drbg = __VECS(drbg_nopr_ctr_aes128_tv_template) } }, { .alg = "drbg_nopr_ctr_aes192", .test = alg_test_drbg, .fips_allowed = 1, .suite = { - .drbg = { - .vecs = drbg_nopr_ctr_aes192_tv_template, - .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template) - } + .drbg = __VECS(drbg_nopr_ctr_aes192_tv_template) } }, { .alg = "drbg_nopr_ctr_aes256", .test = alg_test_drbg, .fips_allowed = 1, .suite = { - .drbg = { - .vecs = drbg_nopr_ctr_aes256_tv_template, - .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template) - } + .drbg = __VECS(drbg_nopr_ctr_aes256_tv_template) } }, { /* @@ -2962,11 +2721,7 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_drbg, .fips_allowed = 1, .suite = { - .drbg = { - .vecs = drbg_nopr_hmac_sha256_tv_template, - .count = - ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template) - } + .drbg = __VECS(drbg_nopr_hmac_sha256_tv_template) } }, { /* covered by drbg_nopr_hmac_sha256 test */ @@ -2986,10 +2741,7 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_drbg, .fips_allowed = 1, .suite = { - .drbg = { - .vecs = drbg_nopr_sha256_tv_template, - .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template) - } + .drbg = __VECS(drbg_nopr_sha256_tv_template) } }, { /* covered by drbg_nopr_sha256 test */ @@ -3005,10 +2757,7 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_drbg, .fips_allowed = 1, .suite = { - .drbg = { - .vecs = drbg_pr_ctr_aes128_tv_template, - .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template) - } + .drbg = __VECS(drbg_pr_ctr_aes128_tv_template) } }, { /* covered by drbg_pr_ctr_aes128 test */ @@ -3028,10 +2777,7 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_drbg, .fips_allowed = 1, .suite = { - .drbg = { - .vecs = drbg_pr_hmac_sha256_tv_template, - .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template) - } + .drbg = __VECS(drbg_pr_hmac_sha256_tv_template) } }, { /* covered by drbg_pr_hmac_sha256 test */ @@ -3051,10 +2797,7 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_drbg, .fips_allowed = 1, .suite = { - .drbg = { - .vecs = drbg_pr_sha256_tv_template, - .count = ARRAY_SIZE(drbg_pr_sha256_tv_template) - } + .drbg = __VECS(drbg_pr_sha256_tv_template) } }, { /* covered by drbg_pr_sha256 test */ @@ -3071,14 +2814,8 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .suite = { .cipher = { - .enc = { - .vecs = aes_enc_tv_template, - .count = AES_ENC_TEST_VECTORS - }, - .dec = { - .vecs = aes_dec_tv_template, - .count = AES_DEC_TEST_VECTORS - } + .enc = __VECS(aes_enc_tv_template), + .dec = __VECS(aes_dec_tv_template) } } }, { @@ -3086,14 +2823,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = anubis_enc_tv_template, - .count = ANUBIS_ENC_TEST_VECTORS - }, - .dec = { - .vecs = anubis_dec_tv_template, - .count = ANUBIS_DEC_TEST_VECTORS - } + .enc = __VECS(anubis_enc_tv_template), + .dec = __VECS(anubis_dec_tv_template) } } }, { @@ -3101,14 +2832,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = arc4_enc_tv_template, - .count = ARC4_ENC_TEST_VECTORS - }, - .dec = { - .vecs = arc4_dec_tv_template, - .count = ARC4_DEC_TEST_VECTORS - } + .enc = __VECS(arc4_enc_tv_template), + .dec = __VECS(arc4_dec_tv_template) } } }, { @@ -3116,14 +2841,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = bf_enc_tv_template, - .count = BF_ENC_TEST_VECTORS - }, - .dec = { - .vecs = bf_dec_tv_template, - .count = BF_DEC_TEST_VECTORS - } + .enc = __VECS(bf_enc_tv_template), + .dec = __VECS(bf_dec_tv_template) } } }, { @@ -3131,14 +2850,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = camellia_enc_tv_template, - .count = CAMELLIA_ENC_TEST_VECTORS - }, - .dec = { - .vecs = camellia_dec_tv_template, - .count = CAMELLIA_DEC_TEST_VECTORS - } + .enc = __VECS(camellia_enc_tv_template), + .dec = __VECS(camellia_dec_tv_template) } } }, { @@ -3146,14 +2859,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = cast5_enc_tv_template, - .count = CAST5_ENC_TEST_VECTORS - }, - .dec = { - .vecs = cast5_dec_tv_template, - .count = CAST5_DEC_TEST_VECTORS - } + .enc = __VECS(cast5_enc_tv_template), + .dec = __VECS(cast5_dec_tv_template) } } }, { @@ -3161,14 +2868,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = cast6_enc_tv_template, - .count = CAST6_ENC_TEST_VECTORS - }, - .dec = { - .vecs = cast6_dec_tv_template, - .count = CAST6_DEC_TEST_VECTORS - } + .enc = __VECS(cast6_enc_tv_template), + .dec = __VECS(cast6_dec_tv_template) } } }, { @@ -3179,14 +2880,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = des_enc_tv_template, - .count = DES_ENC_TEST_VECTORS - }, - .dec = { - .vecs = des_dec_tv_template, - .count = DES_DEC_TEST_VECTORS - } + .enc = __VECS(des_enc_tv_template), + .dec = __VECS(des_dec_tv_template) } } }, { @@ -3195,14 +2890,8 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .suite = { .cipher = { - .enc = { - .vecs = des3_ede_enc_tv_template, - .count = DES3_EDE_ENC_TEST_VECTORS - }, - .dec = { - .vecs = des3_ede_dec_tv_template, - .count = DES3_EDE_DEC_TEST_VECTORS - } + .enc = __VECS(des3_ede_enc_tv_template), + .dec = __VECS(des3_ede_dec_tv_template) } } }, { @@ -3225,14 +2914,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = khazad_enc_tv_template, - .count = KHAZAD_ENC_TEST_VECTORS - }, - .dec = { - .vecs = khazad_dec_tv_template, - .count = KHAZAD_DEC_TEST_VECTORS - } + .enc = __VECS(khazad_enc_tv_template), + .dec = __VECS(khazad_dec_tv_template) } } }, { @@ -3240,14 +2923,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = seed_enc_tv_template, - .count = SEED_ENC_TEST_VECTORS - }, - .dec = { - .vecs = seed_dec_tv_template, - .count = SEED_DEC_TEST_VECTORS - } + .enc = __VECS(seed_enc_tv_template), + .dec = __VECS(seed_dec_tv_template) } } }, { @@ -3255,14 +2932,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = serpent_enc_tv_template, - .count = SERPENT_ENC_TEST_VECTORS - }, - .dec = { - .vecs = serpent_dec_tv_template, - .count = SERPENT_DEC_TEST_VECTORS - } + .enc = __VECS(serpent_enc_tv_template), + .dec = __VECS(serpent_dec_tv_template) } } }, { @@ -3270,14 +2941,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = tea_enc_tv_template, - .count = TEA_ENC_TEST_VECTORS - }, - .dec = { - .vecs = tea_dec_tv_template, - .count = TEA_DEC_TEST_VECTORS - } + .enc = __VECS(tea_enc_tv_template), + .dec = __VECS(tea_dec_tv_template) } } }, { @@ -3285,14 +2950,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = tnepres_enc_tv_template, - .count = TNEPRES_ENC_TEST_VECTORS - }, - .dec = { - .vecs = tnepres_dec_tv_template, - .count = TNEPRES_DEC_TEST_VECTORS - } + .enc = __VECS(tnepres_enc_tv_template), + .dec = __VECS(tnepres_dec_tv_template) } } }, { @@ -3300,14 +2959,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = tf_enc_tv_template, - .count = TF_ENC_TEST_VECTORS - }, - .dec = { - .vecs = tf_dec_tv_template, - .count = TF_DEC_TEST_VECTORS - } + .enc = __VECS(tf_enc_tv_template), + .dec = __VECS(tf_dec_tv_template) } } }, { @@ -3315,14 +2968,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = xeta_enc_tv_template, - .count = XETA_ENC_TEST_VECTORS - }, - .dec = { - .vecs = xeta_dec_tv_template, - .count = XETA_DEC_TEST_VECTORS - } + .enc = __VECS(xeta_enc_tv_template), + .dec = __VECS(xeta_dec_tv_template) } } }, { @@ -3330,14 +2977,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = xtea_enc_tv_template, - .count = XTEA_ENC_TEST_VECTORS - }, - .dec = { - .vecs = xtea_dec_tv_template, - .count = XTEA_DEC_TEST_VECTORS - } + .enc = __VECS(xtea_enc_tv_template), + .dec = __VECS(xtea_dec_tv_template) } } }, { @@ -3345,10 +2986,7 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_kpp, .fips_allowed = 1, .suite = { - .kpp = { - .vecs = ecdh_tv_template, - .count = ECDH_TEST_VECTORS - } + .kpp = __VECS(ecdh_tv_template) } }, { .alg = "gcm(aes)", @@ -3356,14 +2994,8 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .suite = { .aead = { - .enc = { - .vecs = aes_gcm_enc_tv_template, - .count = AES_GCM_ENC_TEST_VECTORS - }, - .dec = { - .vecs = aes_gcm_dec_tv_template, - .count = AES_GCM_DEC_TEST_VECTORS - } + .enc = __VECS(aes_gcm_enc_tv_template), + .dec = __VECS(aes_gcm_dec_tv_template) } } }, { @@ -3371,136 +3003,94 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_hash, .fips_allowed = 1, .suite = { - .hash = { - .vecs = ghash_tv_template, - .count = GHASH_TEST_VECTORS - } + .hash = __VECS(ghash_tv_template) } }, { .alg = "hmac(crc32)", .test = alg_test_hash, .suite = { - .hash = { - .vecs = bfin_crc_tv_template, - .count = BFIN_CRC_TEST_VECTORS - } + .hash = __VECS(bfin_crc_tv_template) } }, { .alg = "hmac(md5)", .test = alg_test_hash, .suite = { - .hash = { - .vecs = hmac_md5_tv_template, - .count = HMAC_MD5_TEST_VECTORS - } + .hash = __VECS(hmac_md5_tv_template) } }, { .alg = "hmac(rmd128)", .test = alg_test_hash, .suite = { - .hash = { - .vecs = hmac_rmd128_tv_template, - .count = HMAC_RMD128_TEST_VECTORS - } + .hash = __VECS(hmac_rmd128_tv_template) } }, { .alg = "hmac(rmd160)", .test = alg_test_hash, .suite = { - .hash = { - .vecs = hmac_rmd160_tv_template, - .count = HMAC_RMD160_TEST_VECTORS - } + .hash = __VECS(hmac_rmd160_tv_template) } }, { .alg = "hmac(sha1)", .test = alg_test_hash, .fips_allowed = 1, .suite = { - .hash = { - .vecs = hmac_sha1_tv_template, - .count = HMAC_SHA1_TEST_VECTORS - } + .hash = __VECS(hmac_sha1_tv_template) } }, { .alg = "hmac(sha224)", .test = alg_test_hash, .fips_allowed = 1, .suite = { - .hash = { - .vecs = hmac_sha224_tv_template, - .count = HMAC_SHA224_TEST_VECTORS - } + .hash = __VECS(hmac_sha224_tv_template) } }, { .alg = "hmac(sha256)", .test = alg_test_hash, .fips_allowed = 1, .suite = { - .hash = { - .vecs = hmac_sha256_tv_template, - .count = HMAC_SHA256_TEST_VECTORS - } + .hash = __VECS(hmac_sha256_tv_template) } }, { .alg = "hmac(sha3-224)", .test = alg_test_hash, .fips_allowed = 1, .suite = { - .hash = { - .vecs = hmac_sha3_224_tv_template, - .count = HMAC_SHA3_224_TEST_VECTORS - } + .hash = __VECS(hmac_sha3_224_tv_template) } }, { .alg = "hmac(sha3-256)", .test = alg_test_hash, .fips_allowed = 1, .suite = { - .hash = { - .vecs = hmac_sha3_256_tv_template, - .count = HMAC_SHA3_256_TEST_VECTORS - } + .hash = __VECS(hmac_sha3_256_tv_template) } }, { .alg = "hmac(sha3-384)", .test = alg_test_hash, .fips_allowed = 1, .suite = { - .hash = { - .vecs = hmac_sha3_384_tv_template, - .count = HMAC_SHA3_384_TEST_VECTORS - } + .hash = __VECS(hmac_sha3_384_tv_template) } }, { .alg = "hmac(sha3-512)", .test = alg_test_hash, .fips_allowed = 1, .suite = { - .hash = { - .vecs = hmac_sha3_512_tv_template, - .count = HMAC_SHA3_512_TEST_VECTORS - } + .hash = __VECS(hmac_sha3_512_tv_template) } }, { .alg = "hmac(sha384)", .test = alg_test_hash, .fips_allowed = 1, .suite = { - .hash = { - .vecs = hmac_sha384_tv_template, - .count = HMAC_SHA384_TEST_VECTORS - } + .hash = __VECS(hmac_sha384_tv_template) } }, { .alg = "hmac(sha512)", .test = alg_test_hash, .fips_allowed = 1, .suite = { - .hash = { - .vecs = hmac_sha512_tv_template, - .count = HMAC_SHA512_TEST_VECTORS - } + .hash = __VECS(hmac_sha512_tv_template) } }, { .alg = "jitterentropy_rng", @@ -3512,14 +3102,8 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .suite = { .cipher = { - .enc = { - .vecs = aes_kw_enc_tv_template, - .count = ARRAY_SIZE(aes_kw_enc_tv_template) - }, - .dec = { - .vecs = aes_kw_dec_tv_template, - .count = ARRAY_SIZE(aes_kw_dec_tv_template) - } + .enc = __VECS(aes_kw_enc_tv_template), + .dec = __VECS(aes_kw_dec_tv_template) } } }, { @@ -3527,14 +3111,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = aes_lrw_enc_tv_template, - .count = AES_LRW_ENC_TEST_VECTORS - }, - .dec = { - .vecs = aes_lrw_dec_tv_template, - .count = AES_LRW_DEC_TEST_VECTORS - } + .enc = __VECS(aes_lrw_enc_tv_template), + .dec = __VECS(aes_lrw_dec_tv_template) } } }, { @@ -3542,14 +3120,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = camellia_lrw_enc_tv_template, - .count = CAMELLIA_LRW_ENC_TEST_VECTORS - }, - .dec = { - .vecs = camellia_lrw_dec_tv_template, - .count = CAMELLIA_LRW_DEC_TEST_VECTORS - } + .enc = __VECS(camellia_lrw_enc_tv_template), + .dec = __VECS(camellia_lrw_dec_tv_template) } } }, { @@ -3557,14 +3129,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = cast6_lrw_enc_tv_template, - .count = CAST6_LRW_ENC_TEST_VECTORS - }, - .dec = { - .vecs = cast6_lrw_dec_tv_template, - .count = CAST6_LRW_DEC_TEST_VECTORS - } + .enc = __VECS(cast6_lrw_enc_tv_template), + .dec = __VECS(cast6_lrw_dec_tv_template) } } }, { @@ -3572,14 +3138,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = serpent_lrw_enc_tv_template, - .count = SERPENT_LRW_ENC_TEST_VECTORS - }, - .dec = { - .vecs = serpent_lrw_dec_tv_template, - .count = SERPENT_LRW_DEC_TEST_VECTORS - } + .enc = __VECS(serpent_lrw_enc_tv_template), + .dec = __VECS(serpent_lrw_dec_tv_template) } } }, { @@ -3587,14 +3147,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = tf_lrw_enc_tv_template, - .count = TF_LRW_ENC_TEST_VECTORS - }, - .dec = { - .vecs = tf_lrw_dec_tv_template, - .count = TF_LRW_DEC_TEST_VECTORS - } + .enc = __VECS(tf_lrw_enc_tv_template), + .dec = __VECS(tf_lrw_dec_tv_template) } } }, { @@ -3603,14 +3157,8 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .suite = { .comp = { - .comp = { - .vecs = lz4_comp_tv_template, - .count = LZ4_COMP_TEST_VECTORS - }, - .decomp = { - .vecs = lz4_decomp_tv_template, - .count = LZ4_DECOMP_TEST_VECTORS - } + .comp = __VECS(lz4_comp_tv_template), + .decomp = __VECS(lz4_decomp_tv_template) } } }, { @@ -3619,14 +3167,8 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .suite = { .comp = { - .comp = { - .vecs = lz4hc_comp_tv_template, - .count = LZ4HC_COMP_TEST_VECTORS - }, - .decomp = { - .vecs = lz4hc_decomp_tv_template, - .count = LZ4HC_DECOMP_TEST_VECTORS - } + .comp = __VECS(lz4hc_comp_tv_template), + .decomp = __VECS(lz4hc_decomp_tv_template) } } }, { @@ -3635,42 +3177,27 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .suite = { .comp = { - .comp = { - .vecs = lzo_comp_tv_template, - .count = LZO_COMP_TEST_VECTORS - }, - .decomp = { - .vecs = lzo_decomp_tv_template, - .count = LZO_DECOMP_TEST_VECTORS - } + .comp = __VECS(lzo_comp_tv_template), + .decomp = __VECS(lzo_decomp_tv_template) } } }, { .alg = "md4", .test = alg_test_hash, .suite = { - .hash = { - .vecs = md4_tv_template, - .count = MD4_TEST_VECTORS - } + .hash = __VECS(md4_tv_template) } }, { .alg = "md5", .test = alg_test_hash, .suite = { - .hash = { - .vecs = md5_tv_template, - .count = MD5_TEST_VECTORS - } + .hash = __VECS(md5_tv_template) } }, { .alg = "michael_mic", .test = alg_test_hash, .suite = { - .hash = { - .vecs = michael_mic_tv_template, - .count = MICHAEL_MIC_TEST_VECTORS - } + .hash = __VECS(michael_mic_tv_template) } }, { .alg = "ofb(aes)", @@ -3678,14 +3205,8 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .suite = { .cipher = { - .enc = { - .vecs = aes_ofb_enc_tv_template, - .count = AES_OFB_ENC_TEST_VECTORS - }, - .dec = { - .vecs = aes_ofb_dec_tv_template, - .count = AES_OFB_DEC_TEST_VECTORS - } + .enc = __VECS(aes_ofb_enc_tv_template), + .dec = __VECS(aes_ofb_dec_tv_template) } } }, { @@ -3693,24 +3214,15 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = fcrypt_pcbc_enc_tv_template, - .count = FCRYPT_ENC_TEST_VECTORS - }, - .dec = { - .vecs = fcrypt_pcbc_dec_tv_template, - .count = FCRYPT_DEC_TEST_VECTORS - } + .enc = __VECS(fcrypt_pcbc_enc_tv_template), + .dec = __VECS(fcrypt_pcbc_dec_tv_template) } } }, { .alg = "poly1305", .test = alg_test_hash, .suite = { - .hash = { - .vecs = poly1305_tv_template, - .count = POLY1305_TEST_VECTORS - } + .hash = __VECS(poly1305_tv_template) } }, { .alg = "rfc3686(ctr(aes))", @@ -3718,14 +3230,8 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .suite = { .cipher = { - .enc = { - .vecs = aes_ctr_rfc3686_enc_tv_template, - .count = AES_CTR_3686_ENC_TEST_VECTORS - }, - .dec = { - .vecs = aes_ctr_rfc3686_dec_tv_template, - .count = AES_CTR_3686_DEC_TEST_VECTORS - } + .enc = __VECS(aes_ctr_rfc3686_enc_tv_template), + .dec = __VECS(aes_ctr_rfc3686_dec_tv_template) } } }, { @@ -3734,14 +3240,8 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .suite = { .aead = { - .enc = { - .vecs = aes_gcm_rfc4106_enc_tv_template, - .count = AES_GCM_4106_ENC_TEST_VECTORS - }, - .dec = { - .vecs = aes_gcm_rfc4106_dec_tv_template, - .count = AES_GCM_4106_DEC_TEST_VECTORS - } + .enc = __VECS(aes_gcm_rfc4106_enc_tv_template), + .dec = __VECS(aes_gcm_rfc4106_dec_tv_template) } } }, { @@ -3750,14 +3250,8 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .suite = { .aead = { - .enc = { - .vecs = aes_ccm_rfc4309_enc_tv_template, - .count = AES_CCM_4309_ENC_TEST_VECTORS - }, - .dec = { - .vecs = aes_ccm_rfc4309_dec_tv_template, - .count = AES_CCM_4309_DEC_TEST_VECTORS - } + .enc = __VECS(aes_ccm_rfc4309_enc_tv_template), + .dec = __VECS(aes_ccm_rfc4309_dec_tv_template) } } }, { @@ -3765,14 +3259,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_aead, .suite = { .aead = { - .enc = { - .vecs = aes_gcm_rfc4543_enc_tv_template, - .count = AES_GCM_4543_ENC_TEST_VECTORS - }, - .dec = { - .vecs = aes_gcm_rfc4543_dec_tv_template, - .count = AES_GCM_4543_DEC_TEST_VECTORS - }, + .enc = __VECS(aes_gcm_rfc4543_enc_tv_template), + .dec = __VECS(aes_gcm_rfc4543_dec_tv_template), } } }, { @@ -3780,14 +3268,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_aead, .suite = { .aead = { - .enc = { - .vecs = rfc7539_enc_tv_template, - .count = RFC7539_ENC_TEST_VECTORS - }, - .dec = { - .vecs = rfc7539_dec_tv_template, - .count = RFC7539_DEC_TEST_VECTORS - }, + .enc = __VECS(rfc7539_enc_tv_template), + .dec = __VECS(rfc7539_dec_tv_template), } } }, { @@ -3795,71 +3277,47 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_aead, .suite = { .aead = { - .enc = { - .vecs = rfc7539esp_enc_tv_template, - .count = RFC7539ESP_ENC_TEST_VECTORS - }, - .dec = { - .vecs = rfc7539esp_dec_tv_template, - .count = RFC7539ESP_DEC_TEST_VECTORS - }, + .enc = __VECS(rfc7539esp_enc_tv_template), + .dec = __VECS(rfc7539esp_dec_tv_template), } } }, { .alg = "rmd128", .test = alg_test_hash, .suite = { - .hash = { - .vecs = rmd128_tv_template, - .count = RMD128_TEST_VECTORS - } + .hash = __VECS(rmd128_tv_template) } }, { .alg = "rmd160", .test = alg_test_hash, .suite = { - .hash = { - .vecs = rmd160_tv_template, - .count = RMD160_TEST_VECTORS - } + .hash = __VECS(rmd160_tv_template) } }, { .alg = "rmd256", .test = alg_test_hash, .suite = { - .hash = { - .vecs = rmd256_tv_template, - .count = RMD256_TEST_VECTORS - } + .hash = __VECS(rmd256_tv_template) } }, { .alg = "rmd320", .test = alg_test_hash, .suite = { - .hash = { - .vecs = rmd320_tv_template, - .count = RMD320_TEST_VECTORS - } + .hash = __VECS(rmd320_tv_template) } }, { .alg = "rsa", .test = alg_test_akcipher, .fips_allowed = 1, .suite = { - .akcipher = { - .vecs = rsa_tv_template, - .count = RSA_TEST_VECTORS - } + .akcipher = __VECS(rsa_tv_template) } }, { .alg = "salsa20", .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = salsa20_stream_enc_tv_template, - .count = SALSA20_STREAM_ENC_TEST_VECTORS - } + .enc = __VECS(salsa20_stream_enc_tv_template) } } }, { @@ -3867,162 +3325,111 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_hash, .fips_allowed = 1, .suite = { - .hash = { - .vecs = sha1_tv_template, - .count = SHA1_TEST_VECTORS - } + .hash = __VECS(sha1_tv_template) } }, { .alg = "sha224", .test = alg_test_hash, .fips_allowed = 1, .suite = { - .hash = { - .vecs = sha224_tv_template, - .count = SHA224_TEST_VECTORS - } + .hash = __VECS(sha224_tv_template) } }, { .alg = "sha256", .test = alg_test_hash, .fips_allowed = 1, .suite = { - .hash = { - .vecs = sha256_tv_template, - .count = SHA256_TEST_VECTORS - } + .hash = __VECS(sha256_tv_template) } }, { .alg = "sha3-224", .test = alg_test_hash, .fips_allowed = 1, .suite = { - .hash = { - .vecs = sha3_224_tv_template, - .count = SHA3_224_TEST_VECTORS - } + .hash = __VECS(sha3_224_tv_template) } }, { .alg = "sha3-256", .test = alg_test_hash, .fips_allowed = 1, .suite = { - .hash = { - .vecs = sha3_256_tv_template, - .count = SHA3_256_TEST_VECTORS - } + .hash = __VECS(sha3_256_tv_template) } }, { .alg = "sha3-384", .test = alg_test_hash, .fips_allowed = 1, .suite = { - .hash = { - .vecs = sha3_384_tv_template, - .count = SHA3_384_TEST_VECTORS - } + .hash = __VECS(sha3_384_tv_template) } }, { .alg = "sha3-512", .test = alg_test_hash, .fips_allowed = 1, .suite = { - .hash = { - .vecs = sha3_512_tv_template, - .count = SHA3_512_TEST_VECTORS - } + .hash = __VECS(sha3_512_tv_template) } }, { .alg = "sha384", .test = alg_test_hash, .fips_allowed = 1, .suite = { - .hash = { - .vecs = sha384_tv_template, - .count = SHA384_TEST_VECTORS - } + .hash = __VECS(sha384_tv_template) } }, { .alg = "sha512", .test = alg_test_hash, .fips_allowed = 1, .suite = { - .hash = { - .vecs = sha512_tv_template, - .count = SHA512_TEST_VECTORS - } + .hash = __VECS(sha512_tv_template) } }, { .alg = "tgr128", .test = alg_test_hash, .suite = { - .hash = { - .vecs = tgr128_tv_template, - .count = TGR128_TEST_VECTORS - } + .hash = __VECS(tgr128_tv_template) } }, { .alg = "tgr160", .test = alg_test_hash, .suite = { - .hash = { - .vecs = tgr160_tv_template, - .count = TGR160_TEST_VECTORS - } + .hash = __VECS(tgr160_tv_template) } }, { .alg = "tgr192", .test = alg_test_hash, .suite = { - .hash = { - .vecs = tgr192_tv_template, - .count = TGR192_TEST_VECTORS - } + .hash = __VECS(tgr192_tv_template) } }, { .alg = "vmac(aes)", .test = alg_test_hash, .suite = { - .hash = { - .vecs = aes_vmac128_tv_template, - .count = VMAC_AES_TEST_VECTORS - } + .hash = __VECS(aes_vmac128_tv_template) } }, { .alg = "wp256", .test = alg_test_hash, .suite = { - .hash = { - .vecs = wp256_tv_template, - .count = WP256_TEST_VECTORS - } + .hash = __VECS(wp256_tv_template) } }, { .alg = "wp384", .test = alg_test_hash, .suite = { - .hash = { - .vecs = wp384_tv_template, - .count = WP384_TEST_VECTORS - } + .hash = __VECS(wp384_tv_template) } }, { .alg = "wp512", .test = alg_test_hash, .suite = { - .hash = { - .vecs = wp512_tv_template, - .count = WP512_TEST_VECTORS - } + .hash = __VECS(wp512_tv_template) } }, { .alg = "xcbc(aes)", .test = alg_test_hash, .suite = { - .hash = { - .vecs = aes_xcbc128_tv_template, - .count = XCBC_AES_TEST_VECTORS - } + .hash = __VECS(aes_xcbc128_tv_template) } }, { .alg = "xts(aes)", @@ -4030,14 +3437,8 @@ static const struct alg_test_desc alg_test_descs[] = { .fips_allowed = 1, .suite = { .cipher = { - .enc = { - .vecs = aes_xts_enc_tv_template, - .count = AES_XTS_ENC_TEST_VECTORS - }, - .dec = { - .vecs = aes_xts_dec_tv_template, - .count = AES_XTS_DEC_TEST_VECTORS - } + .enc = __VECS(aes_xts_enc_tv_template), + .dec = __VECS(aes_xts_dec_tv_template) } } }, { @@ -4045,14 +3446,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = camellia_xts_enc_tv_template, - .count = CAMELLIA_XTS_ENC_TEST_VECTORS - }, - .dec = { - .vecs = camellia_xts_dec_tv_template, - .count = CAMELLIA_XTS_DEC_TEST_VECTORS - } + .enc = __VECS(camellia_xts_enc_tv_template), + .dec = __VECS(camellia_xts_dec_tv_template) } } }, { @@ -4060,14 +3455,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = cast6_xts_enc_tv_template, - .count = CAST6_XTS_ENC_TEST_VECTORS - }, - .dec = { - .vecs = cast6_xts_dec_tv_template, - .count = CAST6_XTS_DEC_TEST_VECTORS - } + .enc = __VECS(cast6_xts_enc_tv_template), + .dec = __VECS(cast6_xts_dec_tv_template) } } }, { @@ -4075,14 +3464,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = serpent_xts_enc_tv_template, - .count = SERPENT_XTS_ENC_TEST_VECTORS - }, - .dec = { - .vecs = serpent_xts_dec_tv_template, - .count = SERPENT_XTS_DEC_TEST_VECTORS - } + .enc = __VECS(serpent_xts_enc_tv_template), + .dec = __VECS(serpent_xts_dec_tv_template) } } }, { @@ -4090,14 +3473,8 @@ static const struct alg_test_desc alg_test_descs[] = { .test = alg_test_skcipher, .suite = { .cipher = { - .enc = { - .vecs = tf_xts_enc_tv_template, - .count = TF_XTS_ENC_TEST_VECTORS - }, - .dec = { - .vecs = tf_xts_dec_tv_template, - .count = TF_XTS_DEC_TEST_VECTORS - } + .enc = __VECS(tf_xts_enc_tv_template), + .dec = __VECS(tf_xts_dec_tv_template) } } } diff --git a/crypto/testmgr.h b/crypto/testmgr.h index 9b656be7f52f..f85e51cf7dcc 100644 --- a/crypto/testmgr.h +++ b/crypto/testmgr.h @@ -151,11 +151,6 @@ static char zeroed_string[48]; /* * RSA test vectors. Borrowed from openSSL. */ -#ifdef CONFIG_CRYPTO_FIPS -#define RSA_TEST_VECTORS 2 -#else -#define RSA_TEST_VECTORS 5 -#endif static struct akcipher_testvec rsa_tv_template[] = { { #ifndef CONFIG_CRYPTO_FIPS @@ -340,6 +335,7 @@ static struct akcipher_testvec rsa_tv_template[] = { .m_size = 8, .c_size = 256, .public_key_vec = true, +#ifndef CONFIG_CRYPTO_FIPS }, { .key = "\x30\x82\x09\x29" /* sequence of 2345 bytes */ @@ -538,11 +534,10 @@ static struct akcipher_testvec rsa_tv_template[] = { .key_len = 2349, .m_size = 8, .c_size = 512, +#endif } }; -#define DH_TEST_VECTORS 2 - struct kpp_testvec dh_tv_template[] = { { .secret = @@ -760,11 +755,6 @@ struct kpp_testvec dh_tv_template[] = { } }; -#ifdef CONFIG_CRYPTO_FIPS -#define ECDH_TEST_VECTORS 1 -#else -#define ECDH_TEST_VECTORS 2 -#endif struct kpp_testvec ecdh_tv_template[] = { { #ifndef CONFIG_CRYPTO_FIPS @@ -856,8 +846,6 @@ struct kpp_testvec ecdh_tv_template[] = { /* * MD4 test vectors from RFC1320 */ -#define MD4_TEST_VECTORS 7 - static struct hash_testvec md4_tv_template [] = { { .plaintext = "", @@ -899,7 +887,6 @@ static struct hash_testvec md4_tv_template [] = { }, }; -#define SHA3_224_TEST_VECTORS 3 static struct hash_testvec sha3_224_tv_template[] = { { .plaintext = "", @@ -925,7 +912,6 @@ static struct hash_testvec sha3_224_tv_template[] = { }, }; -#define SHA3_256_TEST_VECTORS 3 static struct hash_testvec sha3_256_tv_template[] = { { .plaintext = "", @@ -952,7 +938,6 @@ static struct hash_testvec sha3_256_tv_template[] = { }; -#define SHA3_384_TEST_VECTORS 3 static struct hash_testvec sha3_384_tv_template[] = { { .plaintext = "", @@ -985,7 +970,6 @@ static struct hash_testvec sha3_384_tv_template[] = { }; -#define SHA3_512_TEST_VECTORS 3 static struct hash_testvec sha3_512_tv_template[] = { { .plaintext = "", @@ -1027,8 +1011,6 @@ static struct hash_testvec sha3_512_tv_template[] = { /* * MD5 test vectors from RFC1321 */ -#define MD5_TEST_VECTORS 7 - static struct hash_testvec md5_tv_template[] = { { .digest = "\xd4\x1d\x8c\xd9\x8f\x00\xb2\x04" @@ -1073,8 +1055,6 @@ static struct hash_testvec md5_tv_template[] = { /* * RIPEMD-128 test vectors from ISO/IEC 10118-3:2004(E) */ -#define RMD128_TEST_VECTORS 10 - static struct hash_testvec rmd128_tv_template[] = { { .digest = "\xcd\xf2\x62\x13\xa1\x50\xdc\x3e" @@ -1137,8 +1117,6 @@ static struct hash_testvec rmd128_tv_template[] = { /* * RIPEMD-160 test vectors from ISO/IEC 10118-3:2004(E) */ -#define RMD160_TEST_VECTORS 10 - static struct hash_testvec rmd160_tv_template[] = { { .digest = "\x9c\x11\x85\xa5\xc5\xe9\xfc\x54\x61\x28" @@ -1201,8 +1179,6 @@ static struct hash_testvec rmd160_tv_template[] = { /* * RIPEMD-256 test vectors */ -#define RMD256_TEST_VECTORS 8 - static struct hash_testvec rmd256_tv_template[] = { { .digest = "\x02\xba\x4c\x4e\x5f\x8e\xcd\x18" @@ -1269,8 +1245,6 @@ static struct hash_testvec rmd256_tv_template[] = { /* * RIPEMD-320 test vectors */ -#define RMD320_TEST_VECTORS 8 - static struct hash_testvec rmd320_tv_template[] = { { .digest = "\x22\xd6\x5d\x56\x61\x53\x6c\xdc\x75\xc1" @@ -1334,7 +1308,6 @@ static struct hash_testvec rmd320_tv_template[] = { } }; -#define CRCT10DIF_TEST_VECTORS ARRAY_SIZE(crct10dif_tv_template) static struct hash_testvec crct10dif_tv_template[] = { { .plaintext = "abc", @@ -1385,8 +1358,6 @@ static struct hash_testvec crct10dif_tv_template[] = { * SHA1 test vectors from from FIPS PUB 180-1 * Long vector from CAVS 5.0 */ -#define SHA1_TEST_VECTORS 6 - static struct hash_testvec sha1_tv_template[] = { { .plaintext = "", @@ -1577,8 +1548,6 @@ static struct hash_testvec sha1_tv_template[] = { /* * SHA224 test vectors from from FIPS PUB 180-2 */ -#define SHA224_TEST_VECTORS 5 - static struct hash_testvec sha224_tv_template[] = { { .plaintext = "", @@ -1751,8 +1720,6 @@ static struct hash_testvec sha224_tv_template[] = { /* * SHA256 test vectors from from NIST */ -#define SHA256_TEST_VECTORS 5 - static struct hash_testvec sha256_tv_template[] = { { .plaintext = "", @@ -1924,8 +1891,6 @@ static struct hash_testvec sha256_tv_template[] = { /* * SHA384 test vectors from from NIST and kerneli */ -#define SHA384_TEST_VECTORS 6 - static struct hash_testvec sha384_tv_template[] = { { .plaintext = "", @@ -2118,8 +2083,6 @@ static struct hash_testvec sha384_tv_template[] = { /* * SHA512 test vectors from from NIST and kerneli */ -#define SHA512_TEST_VECTORS 6 - static struct hash_testvec sha512_tv_template[] = { { .plaintext = "", @@ -2327,8 +2290,6 @@ static struct hash_testvec sha512_tv_template[] = { * by Vincent Rijmen and Paulo S. L. M. Barreto as part of the NESSIE * submission */ -#define WP512_TEST_VECTORS 8 - static struct hash_testvec wp512_tv_template[] = { { .plaintext = "", @@ -2425,8 +2386,6 @@ static struct hash_testvec wp512_tv_template[] = { }, }; -#define WP384_TEST_VECTORS 8 - static struct hash_testvec wp384_tv_template[] = { { .plaintext = "", @@ -2507,8 +2466,6 @@ static struct hash_testvec wp384_tv_template[] = { }, }; -#define WP256_TEST_VECTORS 8 - static struct hash_testvec wp256_tv_template[] = { { .plaintext = "", @@ -2576,8 +2533,6 @@ static struct hash_testvec wp256_tv_template[] = { /* * TIGER test vectors from Tiger website */ -#define TGR192_TEST_VECTORS 6 - static struct hash_testvec tgr192_tv_template[] = { { .plaintext = "", @@ -2621,8 +2576,6 @@ static struct hash_testvec tgr192_tv_template[] = { }, }; -#define TGR160_TEST_VECTORS 6 - static struct hash_testvec tgr160_tv_template[] = { { .plaintext = "", @@ -2666,8 +2619,6 @@ static struct hash_testvec tgr160_tv_template[] = { }, }; -#define TGR128_TEST_VECTORS 6 - static struct hash_testvec tgr128_tv_template[] = { { .plaintext = "", @@ -2705,8 +2656,6 @@ static struct hash_testvec tgr128_tv_template[] = { }, }; -#define GHASH_TEST_VECTORS 6 - static struct hash_testvec ghash_tv_template[] = { { @@ -2822,8 +2771,6 @@ static struct hash_testvec ghash_tv_template[] = * HMAC-MD5 test vectors from RFC2202 * (These need to be fixed to not use strlen). */ -#define HMAC_MD5_TEST_VECTORS 7 - static struct hash_testvec hmac_md5_tv_template[] = { { @@ -2904,8 +2851,6 @@ static struct hash_testvec hmac_md5_tv_template[] = /* * HMAC-RIPEMD128 test vectors from RFC2286 */ -#define HMAC_RMD128_TEST_VECTORS 7 - static struct hash_testvec hmac_rmd128_tv_template[] = { { .key = "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b", @@ -2985,8 +2930,6 @@ static struct hash_testvec hmac_rmd128_tv_template[] = { /* * HMAC-RIPEMD160 test vectors from RFC2286 */ -#define HMAC_RMD160_TEST_VECTORS 7 - static struct hash_testvec hmac_rmd160_tv_template[] = { { .key = "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b", @@ -3066,8 +3009,6 @@ static struct hash_testvec hmac_rmd160_tv_template[] = { /* * HMAC-SHA1 test vectors from RFC2202 */ -#define HMAC_SHA1_TEST_VECTORS 7 - static struct hash_testvec hmac_sha1_tv_template[] = { { .key = "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b", @@ -3149,8 +3090,6 @@ static struct hash_testvec hmac_sha1_tv_template[] = { /* * SHA224 HMAC test vectors from RFC4231 */ -#define HMAC_SHA224_TEST_VECTORS 4 - static struct hash_testvec hmac_sha224_tv_template[] = { { .key = "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b" @@ -3264,8 +3203,6 @@ static struct hash_testvec hmac_sha224_tv_template[] = { * HMAC-SHA256 test vectors from * draft-ietf-ipsec-ciph-sha-256-01.txt */ -#define HMAC_SHA256_TEST_VECTORS 10 - static struct hash_testvec hmac_sha256_tv_template[] = { { .key = "\x01\x02\x03\x04\x05\x06\x07\x08" @@ -3401,8 +3338,6 @@ static struct hash_testvec hmac_sha256_tv_template[] = { }, }; -#define CMAC_AES_TEST_VECTORS 6 - static struct hash_testvec aes_cmac128_tv_template[] = { { /* From NIST Special Publication 800-38B, AES-128 */ .key = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6" @@ -3478,7 +3413,65 @@ static struct hash_testvec aes_cmac128_tv_template[] = { } }; -#define CMAC_DES3_EDE_TEST_VECTORS 4 +static struct hash_testvec aes_cbcmac_tv_template[] = { + { + .key = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6" + "\xab\xf7\x15\x88\x09\xcf\x4f\x3c", + .plaintext = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96" + "\xe9\x3d\x7e\x11\x73\x93\x17\x2a", + .digest = "\x3a\xd7\x7b\xb4\x0d\x7a\x36\x60" + "\xa8\x9e\xca\xf3\x24\x66\xef\x97", + .psize = 16, + .ksize = 16, + }, { + .key = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6" + "\xab\xf7\x15\x88\x09\xcf\x4f\x3c", + .plaintext = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96" + "\xe9\x3d\x7e\x11\x73\x93\x17\x2a" + "\xae\x2d\x8a\x57\x1e\x03\xac\x9c" + "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51" + "\x30", + .digest = "\x9d\x0d\xd0\x63\xfb\xcb\x24\x43" + "\xf8\xf2\x76\x03\xac\x39\xb0\x9d", + .psize = 33, + .ksize = 16, + .np = 2, + .tap = { 7, 26 }, + }, { + .key = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6" + "\xab\xf7\x15\x88\x09\xcf\x4f\x3c", + .plaintext = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96" + "\xe9\x3d\x7e\x11\x73\x93\x17\x2a" + "\xae\x2d\x8a\x57\x1e\x03\xac\x9c" + "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51" + "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11" + "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef" + "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17" + "\xad\x2b\x41\x7b\xe6\x6c\x37", + .digest = "\xc0\x71\x73\xb8\xa0\x2c\x11\x7c" + "\xaf\xdc\xb2\xf8\x89\x32\xa3\x3a", + .psize = 63, + .ksize = 16, + }, { + .key = "\x60\x3d\xeb\x10\x15\xca\x71\xbe" + "\x2b\x73\xae\xf0\x85\x7d\x77\x81" + "\x1f\x35\x2c\x07\x3b\x61\x08\xd7" + "\x2d\x98\x10\xa3\x09\x14\xdf\xf4", + .plaintext = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96" + "\xe9\x3d\x7e\x11\x73\x93\x17\x2a" + "\xae\x2d\x8a\x57\x1e\x03\xac\x9c" + "\x9e\xb7\x6f\xac\x45\xaf\x8e\x51" + "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11" + "\xe5\xfb\xc1\x19\x1a\x0a\x52\xef" + "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17" + "\xad\x2b\x41\x7b\xe6\x6c\x37\x10" + "\x1c", + .digest = "\x6a\x4e\xdb\x21\x47\x51\xdf\x4f" + "\xa8\x4d\x4c\x10\x3b\x72\x7d\xd6", + .psize = 65, + .ksize = 32, + } +}; static struct hash_testvec des3_ede_cmac64_tv_template[] = { /* @@ -3526,8 +3519,6 @@ static struct hash_testvec des3_ede_cmac64_tv_template[] = { } }; -#define XCBC_AES_TEST_VECTORS 6 - static struct hash_testvec aes_xcbc128_tv_template[] = { { .key = "\x00\x01\x02\x03\x04\x05\x06\x07" @@ -3594,7 +3585,6 @@ static struct hash_testvec aes_xcbc128_tv_template[] = { } }; -#define VMAC_AES_TEST_VECTORS 11 static char vmac_string1[128] = {'\x01', '\x01', '\x01', '\x01', '\x02', '\x03', '\x02', '\x02', '\x02', '\x04', '\x01', '\x07', @@ -3701,8 +3691,6 @@ static struct hash_testvec aes_vmac128_tv_template[] = { * SHA384 HMAC test vectors from RFC4231 */ -#define HMAC_SHA384_TEST_VECTORS 4 - static struct hash_testvec hmac_sha384_tv_template[] = { { .key = "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b" @@ -3801,8 +3789,6 @@ static struct hash_testvec hmac_sha384_tv_template[] = { * SHA512 HMAC test vectors from RFC4231 */ -#define HMAC_SHA512_TEST_VECTORS 4 - static struct hash_testvec hmac_sha512_tv_template[] = { { .key = "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b" @@ -3908,8 +3894,6 @@ static struct hash_testvec hmac_sha512_tv_template[] = { }, }; -#define HMAC_SHA3_224_TEST_VECTORS 4 - static struct hash_testvec hmac_sha3_224_tv_template[] = { { .key = "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b" @@ -3999,8 +3983,6 @@ static struct hash_testvec hmac_sha3_224_tv_template[] = { }, }; -#define HMAC_SHA3_256_TEST_VECTORS 4 - static struct hash_testvec hmac_sha3_256_tv_template[] = { { .key = "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b" @@ -4090,8 +4072,6 @@ static struct hash_testvec hmac_sha3_256_tv_template[] = { }, }; -#define HMAC_SHA3_384_TEST_VECTORS 4 - static struct hash_testvec hmac_sha3_384_tv_template[] = { { .key = "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b" @@ -4189,8 +4169,6 @@ static struct hash_testvec hmac_sha3_384_tv_template[] = { }, }; -#define HMAC_SHA3_512_TEST_VECTORS 4 - static struct hash_testvec hmac_sha3_512_tv_template[] = { { .key = "\x0b\x0b\x0b\x0b\x0b\x0b\x0b\x0b" @@ -4300,8 +4278,6 @@ static struct hash_testvec hmac_sha3_512_tv_template[] = { * Poly1305 test vectors from RFC7539 A.3. */ -#define POLY1305_TEST_VECTORS 11 - static struct hash_testvec poly1305_tv_template[] = { { /* Test Vector #1 */ .plaintext = "\x00\x00\x00\x00\x00\x00\x00\x00" @@ -4547,19 +4523,6 @@ static struct hash_testvec poly1305_tv_template[] = { /* * DES test vectors. */ -#define DES_ENC_TEST_VECTORS 11 -#define DES_DEC_TEST_VECTORS 5 -#define DES_CBC_ENC_TEST_VECTORS 6 -#define DES_CBC_DEC_TEST_VECTORS 5 -#define DES_CTR_ENC_TEST_VECTORS 2 -#define DES_CTR_DEC_TEST_VECTORS 2 -#define DES3_EDE_ENC_TEST_VECTORS 4 -#define DES3_EDE_DEC_TEST_VECTORS 4 -#define DES3_EDE_CBC_ENC_TEST_VECTORS 2 -#define DES3_EDE_CBC_DEC_TEST_VECTORS 2 -#define DES3_EDE_CTR_ENC_TEST_VECTORS 2 -#define DES3_EDE_CTR_DEC_TEST_VECTORS 2 - static struct cipher_testvec des_enc_tv_template[] = { { /* From Applied Cryptography */ .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", @@ -6620,13 +6583,6 @@ static struct cipher_testvec des3_ede_ctr_dec_tv_template[] = { /* * Blowfish test vectors. */ -#define BF_ENC_TEST_VECTORS 7 -#define BF_DEC_TEST_VECTORS 7 -#define BF_CBC_ENC_TEST_VECTORS 2 -#define BF_CBC_DEC_TEST_VECTORS 2 -#define BF_CTR_ENC_TEST_VECTORS 2 -#define BF_CTR_DEC_TEST_VECTORS 2 - static struct cipher_testvec bf_enc_tv_template[] = { { /* DES test vectors from OpenSSL */ .key = "\x00\x00\x00\x00\x00\x00\x00\x00", @@ -8152,17 +8108,6 @@ static struct cipher_testvec bf_ctr_dec_tv_template[] = { /* * Twofish test vectors. */ -#define TF_ENC_TEST_VECTORS 4 -#define TF_DEC_TEST_VECTORS 4 -#define TF_CBC_ENC_TEST_VECTORS 5 -#define TF_CBC_DEC_TEST_VECTORS 5 -#define TF_CTR_ENC_TEST_VECTORS 2 -#define TF_CTR_DEC_TEST_VECTORS 2 -#define TF_LRW_ENC_TEST_VECTORS 8 -#define TF_LRW_DEC_TEST_VECTORS 8 -#define TF_XTS_ENC_TEST_VECTORS 5 -#define TF_XTS_DEC_TEST_VECTORS 5 - static struct cipher_testvec tf_enc_tv_template[] = { { .key = zeroed_string, @@ -10881,24 +10826,6 @@ static struct cipher_testvec tf_xts_dec_tv_template[] = { * Serpent test vectors. These are backwards because Serpent writes * octet sequences in right-to-left mode. */ -#define SERPENT_ENC_TEST_VECTORS 5 -#define SERPENT_DEC_TEST_VECTORS 5 - -#define TNEPRES_ENC_TEST_VECTORS 4 -#define TNEPRES_DEC_TEST_VECTORS 4 - -#define SERPENT_CBC_ENC_TEST_VECTORS 1 -#define SERPENT_CBC_DEC_TEST_VECTORS 1 - -#define SERPENT_CTR_ENC_TEST_VECTORS 2 -#define SERPENT_CTR_DEC_TEST_VECTORS 2 - -#define SERPENT_LRW_ENC_TEST_VECTORS 8 -#define SERPENT_LRW_DEC_TEST_VECTORS 8 - -#define SERPENT_XTS_ENC_TEST_VECTORS 5 -#define SERPENT_XTS_DEC_TEST_VECTORS 5 - static struct cipher_testvec serpent_enc_tv_template[] = { { .input = "\x00\x01\x02\x03\x04\x05\x06\x07" @@ -13637,17 +13564,6 @@ static struct cipher_testvec serpent_xts_dec_tv_template[] = { }; /* Cast6 test vectors from RFC 2612 */ -#define CAST6_ENC_TEST_VECTORS 4 -#define CAST6_DEC_TEST_VECTORS 4 -#define CAST6_CBC_ENC_TEST_VECTORS 1 -#define CAST6_CBC_DEC_TEST_VECTORS 1 -#define CAST6_CTR_ENC_TEST_VECTORS 2 -#define CAST6_CTR_DEC_TEST_VECTORS 2 -#define CAST6_LRW_ENC_TEST_VECTORS 1 -#define CAST6_LRW_DEC_TEST_VECTORS 1 -#define CAST6_XTS_ENC_TEST_VECTORS 1 -#define CAST6_XTS_DEC_TEST_VECTORS 1 - static struct cipher_testvec cast6_enc_tv_template[] = { { .key = "\x23\x42\xbb\x9e\xfa\x38\x54\x2c" @@ -15182,38 +15098,6 @@ static struct cipher_testvec cast6_xts_dec_tv_template[] = { /* * AES test vectors. */ -#define AES_ENC_TEST_VECTORS 4 -#define AES_DEC_TEST_VECTORS 4 -#define AES_CBC_ENC_TEST_VECTORS 5 -#define AES_CBC_DEC_TEST_VECTORS 5 -#define HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS 2 -#define HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS 2 -#define HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC 2 -#define HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC 2 -#define HMAC_SHA1_AES_CBC_ENC_TEST_VEC 7 -#define HMAC_SHA256_AES_CBC_ENC_TEST_VEC 7 -#define HMAC_SHA512_AES_CBC_ENC_TEST_VEC 7 -#define AES_LRW_ENC_TEST_VECTORS 8 -#define AES_LRW_DEC_TEST_VECTORS 8 -#define AES_XTS_ENC_TEST_VECTORS 5 -#define AES_XTS_DEC_TEST_VECTORS 5 -#define AES_CTR_ENC_TEST_VECTORS 5 -#define AES_CTR_DEC_TEST_VECTORS 5 -#define AES_OFB_ENC_TEST_VECTORS 1 -#define AES_OFB_DEC_TEST_VECTORS 1 -#define AES_CTR_3686_ENC_TEST_VECTORS 7 -#define AES_CTR_3686_DEC_TEST_VECTORS 6 -#define AES_GCM_ENC_TEST_VECTORS 9 -#define AES_GCM_DEC_TEST_VECTORS 8 -#define AES_GCM_4106_ENC_TEST_VECTORS 23 -#define AES_GCM_4106_DEC_TEST_VECTORS 23 -#define AES_GCM_4543_ENC_TEST_VECTORS 1 -#define AES_GCM_4543_DEC_TEST_VECTORS 2 -#define AES_CCM_ENC_TEST_VECTORS 8 -#define AES_CCM_DEC_TEST_VECTORS 7 -#define AES_CCM_4309_ENC_TEST_VECTORS 7 -#define AES_CCM_4309_DEC_TEST_VECTORS 10 - static struct cipher_testvec aes_enc_tv_template[] = { { /* From FIPS-197 */ .key = "\x00\x01\x02\x03\x04\x05\x06\x07" @@ -17069,8 +16953,6 @@ static struct aead_testvec hmac_sha512_aes_cbc_enc_tv_temp[] = { }, }; -#define HMAC_SHA1_DES_CBC_ENC_TEST_VEC 1 - static struct aead_testvec hmac_sha1_des_cbc_enc_tv_temp[] = { { /*Generated with cryptopp*/ #ifdef __LITTLE_ENDIAN @@ -17130,8 +17012,6 @@ static struct aead_testvec hmac_sha1_des_cbc_enc_tv_temp[] = { }, }; -#define HMAC_SHA224_DES_CBC_ENC_TEST_VEC 1 - static struct aead_testvec hmac_sha224_des_cbc_enc_tv_temp[] = { { /*Generated with cryptopp*/ #ifdef __LITTLE_ENDIAN @@ -17191,8 +17071,6 @@ static struct aead_testvec hmac_sha224_des_cbc_enc_tv_temp[] = { }, }; -#define HMAC_SHA256_DES_CBC_ENC_TEST_VEC 1 - static struct aead_testvec hmac_sha256_des_cbc_enc_tv_temp[] = { { /*Generated with cryptopp*/ #ifdef __LITTLE_ENDIAN @@ -17254,8 +17132,6 @@ static struct aead_testvec hmac_sha256_des_cbc_enc_tv_temp[] = { }, }; -#define HMAC_SHA384_DES_CBC_ENC_TEST_VEC 1 - static struct aead_testvec hmac_sha384_des_cbc_enc_tv_temp[] = { { /*Generated with cryptopp*/ #ifdef __LITTLE_ENDIAN @@ -17321,8 +17197,6 @@ static struct aead_testvec hmac_sha384_des_cbc_enc_tv_temp[] = { }, }; -#define HMAC_SHA512_DES_CBC_ENC_TEST_VEC 1 - static struct aead_testvec hmac_sha512_des_cbc_enc_tv_temp[] = { { /*Generated with cryptopp*/ #ifdef __LITTLE_ENDIAN @@ -17392,8 +17266,6 @@ static struct aead_testvec hmac_sha512_des_cbc_enc_tv_temp[] = { }, }; -#define HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC 1 - static struct aead_testvec hmac_sha1_des3_ede_cbc_enc_tv_temp[] = { { /*Generated with cryptopp*/ #ifdef __LITTLE_ENDIAN @@ -17455,8 +17327,6 @@ static struct aead_testvec hmac_sha1_des3_ede_cbc_enc_tv_temp[] = { }, }; -#define HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC 1 - static struct aead_testvec hmac_sha224_des3_ede_cbc_enc_tv_temp[] = { { /*Generated with cryptopp*/ #ifdef __LITTLE_ENDIAN @@ -17518,8 +17388,6 @@ static struct aead_testvec hmac_sha224_des3_ede_cbc_enc_tv_temp[] = { }, }; -#define HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC 1 - static struct aead_testvec hmac_sha256_des3_ede_cbc_enc_tv_temp[] = { { /*Generated with cryptopp*/ #ifdef __LITTLE_ENDIAN @@ -17583,8 +17451,6 @@ static struct aead_testvec hmac_sha256_des3_ede_cbc_enc_tv_temp[] = { }, }; -#define HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC 1 - static struct aead_testvec hmac_sha384_des3_ede_cbc_enc_tv_temp[] = { { /*Generated with cryptopp*/ #ifdef __LITTLE_ENDIAN @@ -17652,8 +17518,6 @@ static struct aead_testvec hmac_sha384_des3_ede_cbc_enc_tv_temp[] = { }, }; -#define HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC 1 - static struct aead_testvec hmac_sha512_des3_ede_cbc_enc_tv_temp[] = { { /*Generated with cryptopp*/ #ifdef __LITTLE_ENDIAN @@ -24434,8 +24298,6 @@ static struct aead_testvec aes_ccm_rfc4309_dec_tv_template[] = { /* * ChaCha20-Poly1305 AEAD test vectors from RFC7539 2.8.2./A.5. */ -#define RFC7539_ENC_TEST_VECTORS 2 -#define RFC7539_DEC_TEST_VECTORS 2 static struct aead_testvec rfc7539_enc_tv_template[] = { { .key = "\x80\x81\x82\x83\x84\x85\x86\x87" @@ -24703,8 +24565,6 @@ static struct aead_testvec rfc7539_dec_tv_template[] = { /* * draft-irtf-cfrg-chacha20-poly1305 */ -#define RFC7539ESP_DEC_TEST_VECTORS 1 -#define RFC7539ESP_ENC_TEST_VECTORS 1 static struct aead_testvec rfc7539esp_enc_tv_template[] = { { .key = "\x1c\x92\x40\xa5\xeb\x55\xd3\x8a" @@ -24927,8 +24787,6 @@ static struct cipher_testvec aes_kw_dec_tv_template[] = { * http://csrc.nist.gov/groups/STM/cavp/documents/rng/RNGVS.pdf * Only AES-128 is supported at this time. */ -#define ANSI_CPRNG_AES_TEST_VECTORS 6 - static struct cprng_testvec ansi_cprng_aes_tv_template[] = { { .key = "\xf3\xb1\x66\x6d\x13\x60\x72\x42" @@ -25846,13 +25704,6 @@ static struct drbg_testvec drbg_nopr_ctr_aes128_tv_template[] = { }; /* Cast5 test vectors from RFC 2144 */ -#define CAST5_ENC_TEST_VECTORS 4 -#define CAST5_DEC_TEST_VECTORS 4 -#define CAST5_CBC_ENC_TEST_VECTORS 1 -#define CAST5_CBC_DEC_TEST_VECTORS 1 -#define CAST5_CTR_ENC_TEST_VECTORS 2 -#define CAST5_CTR_DEC_TEST_VECTORS 2 - static struct cipher_testvec cast5_enc_tv_template[] = { { .key = "\x01\x23\x45\x67\x12\x34\x56\x78" @@ -26756,9 +26607,6 @@ static struct cipher_testvec cast5_ctr_dec_tv_template[] = { /* * ARC4 test vectors from OpenSSL */ -#define ARC4_ENC_TEST_VECTORS 7 -#define ARC4_DEC_TEST_VECTORS 7 - static struct cipher_testvec arc4_enc_tv_template[] = { { .key = "\x01\x23\x45\x67\x89\xab\xcd\xef", @@ -26894,9 +26742,6 @@ static struct cipher_testvec arc4_dec_tv_template[] = { /* * TEA test vectors */ -#define TEA_ENC_TEST_VECTORS 4 -#define TEA_DEC_TEST_VECTORS 4 - static struct cipher_testvec tea_enc_tv_template[] = { { .key = zeroed_string, @@ -26986,9 +26831,6 @@ static struct cipher_testvec tea_dec_tv_template[] = { /* * XTEA test vectors */ -#define XTEA_ENC_TEST_VECTORS 4 -#define XTEA_DEC_TEST_VECTORS 4 - static struct cipher_testvec xtea_enc_tv_template[] = { { .key = zeroed_string, @@ -27078,9 +26920,6 @@ static struct cipher_testvec xtea_dec_tv_template[] = { /* * KHAZAD test vectors. */ -#define KHAZAD_ENC_TEST_VECTORS 5 -#define KHAZAD_DEC_TEST_VECTORS 5 - static struct cipher_testvec khazad_enc_tv_template[] = { { .key = "\x80\x00\x00\x00\x00\x00\x00\x00" @@ -27177,11 +27016,6 @@ static struct cipher_testvec khazad_dec_tv_template[] = { * Anubis test vectors. */ -#define ANUBIS_ENC_TEST_VECTORS 5 -#define ANUBIS_DEC_TEST_VECTORS 5 -#define ANUBIS_CBC_ENC_TEST_VECTORS 2 -#define ANUBIS_CBC_DEC_TEST_VECTORS 2 - static struct cipher_testvec anubis_enc_tv_template[] = { { .key = "\xfe\xfe\xfe\xfe\xfe\xfe\xfe\xfe" @@ -27381,9 +27215,6 @@ static struct cipher_testvec anubis_cbc_dec_tv_template[] = { /* * XETA test vectors */ -#define XETA_ENC_TEST_VECTORS 4 -#define XETA_DEC_TEST_VECTORS 4 - static struct cipher_testvec xeta_enc_tv_template[] = { { .key = zeroed_string, @@ -27473,9 +27304,6 @@ static struct cipher_testvec xeta_dec_tv_template[] = { /* * FCrypt test vectors */ -#define FCRYPT_ENC_TEST_VECTORS ARRAY_SIZE(fcrypt_pcbc_enc_tv_template) -#define FCRYPT_DEC_TEST_VECTORS ARRAY_SIZE(fcrypt_pcbc_dec_tv_template) - static struct cipher_testvec fcrypt_pcbc_enc_tv_template[] = { { /* http://www.openafs.org/pipermail/openafs-devel/2000-December/005320.html */ .key = "\x00\x00\x00\x00\x00\x00\x00\x00", @@ -27601,17 +27429,6 @@ static struct cipher_testvec fcrypt_pcbc_dec_tv_template[] = { /* * CAMELLIA test vectors. */ -#define CAMELLIA_ENC_TEST_VECTORS 4 -#define CAMELLIA_DEC_TEST_VECTORS 4 -#define CAMELLIA_CBC_ENC_TEST_VECTORS 3 -#define CAMELLIA_CBC_DEC_TEST_VECTORS 3 -#define CAMELLIA_CTR_ENC_TEST_VECTORS 2 -#define CAMELLIA_CTR_DEC_TEST_VECTORS 2 -#define CAMELLIA_LRW_ENC_TEST_VECTORS 8 -#define CAMELLIA_LRW_DEC_TEST_VECTORS 8 -#define CAMELLIA_XTS_ENC_TEST_VECTORS 5 -#define CAMELLIA_XTS_DEC_TEST_VECTORS 5 - static struct cipher_testvec camellia_enc_tv_template[] = { { .key = "\x01\x23\x45\x67\x89\xab\xcd\xef" @@ -31331,9 +31148,6 @@ static struct cipher_testvec camellia_xts_dec_tv_template[] = { /* * SEED test vectors */ -#define SEED_ENC_TEST_VECTORS 4 -#define SEED_DEC_TEST_VECTORS 4 - static struct cipher_testvec seed_enc_tv_template[] = { { .key = zeroed_string, @@ -31418,7 +31232,6 @@ static struct cipher_testvec seed_dec_tv_template[] = { } }; -#define SALSA20_STREAM_ENC_TEST_VECTORS 5 static struct cipher_testvec salsa20_stream_enc_tv_template[] = { /* * Testvectors from verified.test-vectors submitted to ECRYPT. @@ -32588,7 +32401,6 @@ static struct cipher_testvec salsa20_stream_enc_tv_template[] = { }, }; -#define CHACHA20_ENC_TEST_VECTORS 4 static struct cipher_testvec chacha20_enc_tv_template[] = { { /* RFC7539 A.2. Test Vector #1 */ .key = "\x00\x00\x00\x00\x00\x00\x00\x00" @@ -33100,8 +32912,6 @@ static struct cipher_testvec chacha20_enc_tv_template[] = { /* * CTS (Cipher Text Stealing) mode tests */ -#define CTS_MODE_ENC_TEST_VECTORS 6 -#define CTS_MODE_DEC_TEST_VECTORS 6 static struct cipher_testvec cts_mode_enc_tv_template[] = { { /* from rfc3962 */ .klen = 16, @@ -33322,9 +33132,6 @@ struct comp_testvec { * Params: winbits=-11, Z_DEFAULT_COMPRESSION, MAX_MEM_LEVEL. */ -#define DEFLATE_COMP_TEST_VECTORS 2 -#define DEFLATE_DECOMP_TEST_VECTORS 2 - static struct comp_testvec deflate_comp_tv_template[] = { { .inlen = 70, @@ -33400,9 +33207,6 @@ static struct comp_testvec deflate_decomp_tv_template[] = { /* * LZO test vectors (null-terminated strings). */ -#define LZO_COMP_TEST_VECTORS 2 -#define LZO_DECOMP_TEST_VECTORS 2 - static struct comp_testvec lzo_comp_tv_template[] = { { .inlen = 70, @@ -33534,8 +33338,6 @@ static struct hash_testvec michael_mic_tv_template[] = { /* * CRC32 test vectors */ -#define CRC32_TEST_VECTORS 14 - static struct hash_testvec crc32_tv_template[] = { { .key = "\x87\xa9\xcb\xed", @@ -33968,8 +33770,6 @@ static struct hash_testvec crc32_tv_template[] = { /* * CRC32C test vectors */ -#define CRC32C_TEST_VECTORS 15 - static struct hash_testvec crc32c_tv_template[] = { { .psize = 0, @@ -34406,8 +34206,6 @@ static struct hash_testvec crc32c_tv_template[] = { /* * Blakcifn CRC test vectors */ -#define BFIN_CRC_TEST_VECTORS 6 - static struct hash_testvec bfin_crc_tv_template[] = { { .psize = 0, @@ -34493,9 +34291,6 @@ static struct hash_testvec bfin_crc_tv_template[] = { }; -#define LZ4_COMP_TEST_VECTORS 1 -#define LZ4_DECOMP_TEST_VECTORS 1 - static struct comp_testvec lz4_comp_tv_template[] = { { .inlen = 70, @@ -34526,9 +34321,6 @@ static struct comp_testvec lz4_decomp_tv_template[] = { }, }; -#define LZ4HC_COMP_TEST_VECTORS 1 -#define LZ4HC_DECOMP_TEST_VECTORS 1 - static struct comp_testvec lz4hc_comp_tv_template[] = { { .inlen = 70, |