2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
23 #include <crypto/aead.h>
24 #include <crypto/hash.h>
25 #include <crypto/skcipher.h>
26 #include <linux/err.h>
27 #include <linux/fips.h>
28 #include <linux/module.h>
29 #include <linux/scatterlist.h>
30 #include <linux/slab.h>
31 #include <linux/string.h>
32 #include <crypto/rng.h>
33 #include <crypto/drbg.h>
34 #include <crypto/akcipher.h>
38 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
41 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
51 * Need slab memory for testing (size in number of pages).
56 * Indexes into the xbuf to simulate cross-page access.
68 * Used by test_cipher()
73 struct tcrypt_result {
74 struct completion completion;
78 struct aead_test_suite {
80 struct aead_testvec *vecs;
85 struct cipher_test_suite {
87 struct cipher_testvec *vecs;
92 struct comp_test_suite {
94 struct comp_testvec *vecs;
99 struct hash_test_suite {
100 struct hash_testvec *vecs;
104 struct cprng_test_suite {
105 struct cprng_testvec *vecs;
109 struct drbg_test_suite {
110 struct drbg_testvec *vecs;
114 struct akcipher_test_suite {
115 struct akcipher_testvec *vecs;
119 struct alg_test_desc {
121 int (*test)(const struct alg_test_desc *desc, const char *driver,
123 int fips_allowed; /* set if alg is allowed in fips mode */
126 struct aead_test_suite aead;
127 struct cipher_test_suite cipher;
128 struct comp_test_suite comp;
129 struct hash_test_suite hash;
130 struct cprng_test_suite cprng;
131 struct drbg_test_suite drbg;
132 struct akcipher_test_suite akcipher;
136 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
138 static void hexdump(unsigned char *buf, unsigned int len)
140 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
145 static void tcrypt_complete(struct crypto_async_request *req, int err)
147 struct tcrypt_result *res = req->data;
149 if (err == -EINPROGRESS)
153 complete(&res->completion);
156 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
160 for (i = 0; i < XBUFSIZE; i++) {
161 buf[i] = (void *)__get_free_page(GFP_KERNEL);
170 free_page((unsigned long)buf[i]);
175 static void testmgr_free_buf(char *buf[XBUFSIZE])
179 for (i = 0; i < XBUFSIZE; i++)
180 free_page((unsigned long)buf[i]);
183 static int wait_async_op(struct tcrypt_result *tr, int ret)
185 if (ret == -EINPROGRESS || ret == -EBUSY) {
186 wait_for_completion(&tr->completion);
187 reinit_completion(&tr->completion);
193 static int ahash_partial_update(struct ahash_request **preq,
194 struct crypto_ahash *tfm, struct hash_testvec *template,
195 void *hash_buff, int k, int temp, struct scatterlist *sg,
196 const char *algo, char *result, struct tcrypt_result *tresult)
199 struct ahash_request *req;
200 int statesize, ret = -EINVAL;
203 statesize = crypto_ahash_statesize(
204 crypto_ahash_reqtfm(req));
205 state = kmalloc(statesize, GFP_KERNEL);
207 pr_err("alt: hash: Failed to alloc state for %s\n", algo);
210 ret = crypto_ahash_export(req, state);
212 pr_err("alt: hash: Failed to export() for %s\n", algo);
215 ahash_request_free(req);
216 req = ahash_request_alloc(tfm, GFP_KERNEL);
218 pr_err("alg: hash: Failed to alloc request for %s\n", algo);
221 ahash_request_set_callback(req,
222 CRYPTO_TFM_REQ_MAY_BACKLOG,
223 tcrypt_complete, tresult);
225 memcpy(hash_buff, template->plaintext + temp,
227 sg_init_one(&sg[0], hash_buff, template->tap[k]);
228 ahash_request_set_crypt(req, sg, result, template->tap[k]);
229 ret = crypto_ahash_import(req, state);
231 pr_err("alg: hash: Failed to import() for %s\n", algo);
234 ret = wait_async_op(tresult, crypto_ahash_update(req));
241 ahash_request_free(req);
248 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
249 unsigned int tcount, bool use_digest,
250 const int align_offset)
252 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
253 unsigned int i, j, k, temp;
254 struct scatterlist sg[8];
257 struct ahash_request *req;
258 struct tcrypt_result tresult;
260 char *xbuf[XBUFSIZE];
263 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
266 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
269 if (testmgr_alloc_buf(xbuf))
272 init_completion(&tresult.completion);
274 req = ahash_request_alloc(tfm, GFP_KERNEL);
276 printk(KERN_ERR "alg: hash: Failed to allocate request for "
280 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
281 tcrypt_complete, &tresult);
284 for (i = 0; i < tcount; i++) {
289 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
293 memset(result, 0, MAX_DIGEST_SIZE);
296 hash_buff += align_offset;
298 memcpy(hash_buff, template[i].plaintext, template[i].psize);
299 sg_init_one(&sg[0], hash_buff, template[i].psize);
301 if (template[i].ksize) {
302 crypto_ahash_clear_flags(tfm, ~0);
303 if (template[i].ksize > MAX_KEYLEN) {
304 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
305 j, algo, template[i].ksize, MAX_KEYLEN);
309 memcpy(key, template[i].key, template[i].ksize);
310 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
312 printk(KERN_ERR "alg: hash: setkey failed on "
313 "test %d for %s: ret=%d\n", j, algo,
319 ahash_request_set_crypt(req, sg, result, template[i].psize);
321 ret = wait_async_op(&tresult, crypto_ahash_digest(req));
323 pr_err("alg: hash: digest failed on test %d "
324 "for %s: ret=%d\n", j, algo, -ret);
328 ret = wait_async_op(&tresult, crypto_ahash_init(req));
330 pr_err("alt: hash: init failed on test %d "
331 "for %s: ret=%d\n", j, algo, -ret);
334 ret = wait_async_op(&tresult, crypto_ahash_update(req));
336 pr_err("alt: hash: update failed on test %d "
337 "for %s: ret=%d\n", j, algo, -ret);
340 ret = wait_async_op(&tresult, crypto_ahash_final(req));
342 pr_err("alt: hash: final failed on test %d "
343 "for %s: ret=%d\n", j, algo, -ret);
348 if (memcmp(result, template[i].digest,
349 crypto_ahash_digestsize(tfm))) {
350 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
352 hexdump(result, crypto_ahash_digestsize(tfm));
359 for (i = 0; i < tcount; i++) {
360 /* alignment tests are only done with continuous buffers */
361 if (align_offset != 0)
368 memset(result, 0, MAX_DIGEST_SIZE);
371 sg_init_table(sg, template[i].np);
373 for (k = 0; k < template[i].np; k++) {
374 if (WARN_ON(offset_in_page(IDX[k]) +
375 template[i].tap[k] > PAGE_SIZE))
378 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
379 offset_in_page(IDX[k]),
380 template[i].plaintext + temp,
383 temp += template[i].tap[k];
386 if (template[i].ksize) {
387 if (template[i].ksize > MAX_KEYLEN) {
388 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
389 j, algo, template[i].ksize, MAX_KEYLEN);
393 crypto_ahash_clear_flags(tfm, ~0);
394 memcpy(key, template[i].key, template[i].ksize);
395 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
398 printk(KERN_ERR "alg: hash: setkey "
399 "failed on chunking test %d "
400 "for %s: ret=%d\n", j, algo, -ret);
405 ahash_request_set_crypt(req, sg, result, template[i].psize);
406 ret = crypto_ahash_digest(req);
412 wait_for_completion(&tresult.completion);
413 reinit_completion(&tresult.completion);
419 printk(KERN_ERR "alg: hash: digest failed "
420 "on chunking test %d for %s: "
421 "ret=%d\n", j, algo, -ret);
425 if (memcmp(result, template[i].digest,
426 crypto_ahash_digestsize(tfm))) {
427 printk(KERN_ERR "alg: hash: Chunking test %d "
428 "failed for %s\n", j, algo);
429 hexdump(result, crypto_ahash_digestsize(tfm));
435 /* partial update exercise */
437 for (i = 0; i < tcount; i++) {
438 /* alignment tests are only done with continuous buffers */
439 if (align_offset != 0)
442 if (template[i].np < 2)
446 memset(result, 0, MAX_DIGEST_SIZE);
450 memcpy(hash_buff, template[i].plaintext,
452 sg_init_one(&sg[0], hash_buff, template[i].tap[0]);
454 if (template[i].ksize) {
455 crypto_ahash_clear_flags(tfm, ~0);
456 if (template[i].ksize > MAX_KEYLEN) {
457 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
458 j, algo, template[i].ksize, MAX_KEYLEN);
462 memcpy(key, template[i].key, template[i].ksize);
463 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
465 pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n",
471 ahash_request_set_crypt(req, sg, result, template[i].tap[0]);
472 ret = wait_async_op(&tresult, crypto_ahash_init(req));
474 pr_err("alt: hash: init failed on test %d for %s: ret=%d\n",
478 ret = wait_async_op(&tresult, crypto_ahash_update(req));
480 pr_err("alt: hash: update failed on test %d for %s: ret=%d\n",
485 temp = template[i].tap[0];
486 for (k = 1; k < template[i].np; k++) {
487 ret = ahash_partial_update(&req, tfm, &template[i],
488 hash_buff, k, temp, &sg[0], algo, result,
491 pr_err("hash: partial update failed on test %d for %s: ret=%d\n",
495 temp += template[i].tap[k];
497 ret = wait_async_op(&tresult, crypto_ahash_final(req));
499 pr_err("alt: hash: final failed on test %d for %s: ret=%d\n",
503 if (memcmp(result, template[i].digest,
504 crypto_ahash_digestsize(tfm))) {
505 pr_err("alg: hash: Partial Test %d failed for %s\n",
507 hexdump(result, crypto_ahash_digestsize(tfm));
516 ahash_request_free(req);
518 testmgr_free_buf(xbuf);
525 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
526 unsigned int tcount, bool use_digest)
528 unsigned int alignmask;
531 ret = __test_hash(tfm, template, tcount, use_digest, 0);
535 /* test unaligned buffers, check with one byte offset */
536 ret = __test_hash(tfm, template, tcount, use_digest, 1);
540 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
542 /* Check if alignment mask for tfm is correctly set. */
543 ret = __test_hash(tfm, template, tcount, use_digest,
552 static int __test_aead(struct crypto_aead *tfm, int enc,
553 struct aead_testvec *template, unsigned int tcount,
554 const bool diff_dst, const int align_offset)
556 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
557 unsigned int i, j, k, n, temp;
561 struct aead_request *req;
562 struct scatterlist *sg;
563 struct scatterlist *sgout;
565 struct tcrypt_result result;
566 unsigned int authsize, iv_len;
571 char *xbuf[XBUFSIZE];
572 char *xoutbuf[XBUFSIZE];
573 char *axbuf[XBUFSIZE];
575 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
578 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
581 if (testmgr_alloc_buf(xbuf))
583 if (testmgr_alloc_buf(axbuf))
585 if (diff_dst && testmgr_alloc_buf(xoutbuf))
588 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
589 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL);
604 init_completion(&result.completion);
606 req = aead_request_alloc(tfm, GFP_KERNEL);
608 pr_err("alg: aead%s: Failed to allocate request for %s\n",
613 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
614 tcrypt_complete, &result);
616 for (i = 0, j = 0; i < tcount; i++) {
622 /* some templates have no input data but they will
626 input += align_offset;
630 if (WARN_ON(align_offset + template[i].ilen >
631 PAGE_SIZE || template[i].alen > PAGE_SIZE))
634 memcpy(input, template[i].input, template[i].ilen);
635 memcpy(assoc, template[i].assoc, template[i].alen);
636 iv_len = crypto_aead_ivsize(tfm);
638 memcpy(iv, template[i].iv, iv_len);
640 memset(iv, 0, iv_len);
642 crypto_aead_clear_flags(tfm, ~0);
644 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
646 if (template[i].klen > MAX_KEYLEN) {
647 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
648 d, j, algo, template[i].klen,
653 memcpy(key, template[i].key, template[i].klen);
655 ret = crypto_aead_setkey(tfm, key, template[i].klen);
656 if (!ret == template[i].fail) {
657 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
658 d, j, algo, crypto_aead_get_flags(tfm));
663 authsize = abs(template[i].rlen - template[i].ilen);
664 ret = crypto_aead_setauthsize(tfm, authsize);
666 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
667 d, authsize, j, algo);
671 k = !!template[i].alen;
672 sg_init_table(sg, k + 1);
673 sg_set_buf(&sg[0], assoc, template[i].alen);
674 sg_set_buf(&sg[k], input,
675 template[i].ilen + (enc ? authsize : 0));
679 sg_init_table(sgout, k + 1);
680 sg_set_buf(&sgout[0], assoc, template[i].alen);
683 output += align_offset;
684 sg_set_buf(&sgout[k], output,
685 template[i].rlen + (enc ? 0 : authsize));
688 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
689 template[i].ilen, iv);
691 aead_request_set_ad(req, template[i].alen);
693 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
697 if (template[i].novrfy) {
698 /* verification was supposed to fail */
699 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
701 /* so really, we got a bad message */
708 wait_for_completion(&result.completion);
709 reinit_completion(&result.completion);
714 if (template[i].novrfy)
715 /* verification failure was expected */
719 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
720 d, e, j, algo, -ret);
725 if (memcmp(q, template[i].result, template[i].rlen)) {
726 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
728 hexdump(q, template[i].rlen);
734 for (i = 0, j = 0; i < tcount; i++) {
735 /* alignment tests are only done with continuous buffers */
736 if (align_offset != 0)
745 memcpy(iv, template[i].iv, MAX_IVLEN);
747 memset(iv, 0, MAX_IVLEN);
749 crypto_aead_clear_flags(tfm, ~0);
751 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
752 if (template[i].klen > MAX_KEYLEN) {
753 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
754 d, j, algo, template[i].klen, MAX_KEYLEN);
758 memcpy(key, template[i].key, template[i].klen);
760 ret = crypto_aead_setkey(tfm, key, template[i].klen);
761 if (!ret == template[i].fail) {
762 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
763 d, j, algo, crypto_aead_get_flags(tfm));
768 authsize = abs(template[i].rlen - template[i].ilen);
771 sg_init_table(sg, template[i].anp + template[i].np);
773 sg_init_table(sgout, template[i].anp + template[i].np);
776 for (k = 0, temp = 0; k < template[i].anp; k++) {
777 if (WARN_ON(offset_in_page(IDX[k]) +
778 template[i].atap[k] > PAGE_SIZE))
781 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
782 offset_in_page(IDX[k]),
783 template[i].assoc + temp,
784 template[i].atap[k]),
785 template[i].atap[k]);
787 sg_set_buf(&sgout[k],
788 axbuf[IDX[k] >> PAGE_SHIFT] +
789 offset_in_page(IDX[k]),
790 template[i].atap[k]);
791 temp += template[i].atap[k];
794 for (k = 0, temp = 0; k < template[i].np; k++) {
795 if (WARN_ON(offset_in_page(IDX[k]) +
796 template[i].tap[k] > PAGE_SIZE))
799 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
800 memcpy(q, template[i].input + temp, template[i].tap[k]);
801 sg_set_buf(&sg[template[i].anp + k],
802 q, template[i].tap[k]);
805 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
806 offset_in_page(IDX[k]);
808 memset(q, 0, template[i].tap[k]);
810 sg_set_buf(&sgout[template[i].anp + k],
811 q, template[i].tap[k]);
814 n = template[i].tap[k];
815 if (k == template[i].np - 1 && enc)
817 if (offset_in_page(q) + n < PAGE_SIZE)
820 temp += template[i].tap[k];
823 ret = crypto_aead_setauthsize(tfm, authsize);
825 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
826 d, authsize, j, algo);
831 if (WARN_ON(sg[template[i].anp + k - 1].offset +
832 sg[template[i].anp + k - 1].length +
833 authsize > PAGE_SIZE)) {
839 sgout[template[i].anp + k - 1].length +=
841 sg[template[i].anp + k - 1].length += authsize;
844 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
848 aead_request_set_ad(req, template[i].alen);
850 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
854 if (template[i].novrfy) {
855 /* verification was supposed to fail */
856 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
858 /* so really, we got a bad message */
865 wait_for_completion(&result.completion);
866 reinit_completion(&result.completion);
871 if (template[i].novrfy)
872 /* verification failure was expected */
876 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
877 d, e, j, algo, -ret);
882 for (k = 0, temp = 0; k < template[i].np; k++) {
884 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
885 offset_in_page(IDX[k]);
887 q = xbuf[IDX[k] >> PAGE_SHIFT] +
888 offset_in_page(IDX[k]);
890 n = template[i].tap[k];
891 if (k == template[i].np - 1)
892 n += enc ? authsize : -authsize;
894 if (memcmp(q, template[i].result + temp, n)) {
895 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
902 if (k == template[i].np - 1 && !enc) {
904 memcmp(q, template[i].input +
910 for (n = 0; offset_in_page(q + n) && q[n]; n++)
914 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
915 d, j, e, k, algo, n);
920 temp += template[i].tap[k];
927 aead_request_free(req);
931 testmgr_free_buf(xoutbuf);
933 testmgr_free_buf(axbuf);
935 testmgr_free_buf(xbuf);
942 static int test_aead(struct crypto_aead *tfm, int enc,
943 struct aead_testvec *template, unsigned int tcount)
945 unsigned int alignmask;
948 /* test 'dst == src' case */
949 ret = __test_aead(tfm, enc, template, tcount, false, 0);
953 /* test 'dst != src' case */
954 ret = __test_aead(tfm, enc, template, tcount, true, 0);
958 /* test unaligned buffers, check with one byte offset */
959 ret = __test_aead(tfm, enc, template, tcount, true, 1);
963 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
965 /* Check if alignment mask for tfm is correctly set. */
966 ret = __test_aead(tfm, enc, template, tcount, true,
975 static int test_cipher(struct crypto_cipher *tfm, int enc,
976 struct cipher_testvec *template, unsigned int tcount)
978 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
979 unsigned int i, j, k;
983 char *xbuf[XBUFSIZE];
986 if (testmgr_alloc_buf(xbuf))
995 for (i = 0; i < tcount; i++) {
1002 if (WARN_ON(template[i].ilen > PAGE_SIZE))
1006 memcpy(data, template[i].input, template[i].ilen);
1008 crypto_cipher_clear_flags(tfm, ~0);
1010 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1012 ret = crypto_cipher_setkey(tfm, template[i].key,
1014 if (!ret == template[i].fail) {
1015 printk(KERN_ERR "alg: cipher: setkey failed "
1016 "on test %d for %s: flags=%x\n", j,
1017 algo, crypto_cipher_get_flags(tfm));
1022 for (k = 0; k < template[i].ilen;
1023 k += crypto_cipher_blocksize(tfm)) {
1025 crypto_cipher_encrypt_one(tfm, data + k,
1028 crypto_cipher_decrypt_one(tfm, data + k,
1033 if (memcmp(q, template[i].result, template[i].rlen)) {
1034 printk(KERN_ERR "alg: cipher: Test %d failed "
1035 "on %s for %s\n", j, e, algo);
1036 hexdump(q, template[i].rlen);
1045 testmgr_free_buf(xbuf);
1050 static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
1051 struct cipher_testvec *template, unsigned int tcount,
1052 const bool diff_dst, const int align_offset)
1055 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
1056 unsigned int i, j, k, n, temp;
1058 struct skcipher_request *req;
1059 struct scatterlist sg[8];
1060 struct scatterlist sgout[8];
1062 struct tcrypt_result result;
1065 char *xbuf[XBUFSIZE];
1066 char *xoutbuf[XBUFSIZE];
1068 unsigned int ivsize = crypto_skcipher_ivsize(tfm);
1070 if (testmgr_alloc_buf(xbuf))
1073 if (diff_dst && testmgr_alloc_buf(xoutbuf))
1086 init_completion(&result.completion);
1088 req = skcipher_request_alloc(tfm, GFP_KERNEL);
1090 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
1095 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1096 tcrypt_complete, &result);
1099 for (i = 0; i < tcount; i++) {
1100 if (template[i].np && !template[i].also_non_np)
1104 memcpy(iv, template[i].iv, ivsize);
1106 memset(iv, 0, MAX_IVLEN);
1110 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE))
1114 data += align_offset;
1115 memcpy(data, template[i].input, template[i].ilen);
1117 crypto_skcipher_clear_flags(tfm, ~0);
1119 crypto_skcipher_set_flags(tfm,
1120 CRYPTO_TFM_REQ_WEAK_KEY);
1122 ret = crypto_skcipher_setkey(tfm, template[i].key,
1124 if (!ret == template[i].fail) {
1125 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
1126 d, j, algo, crypto_skcipher_get_flags(tfm));
1131 sg_init_one(&sg[0], data, template[i].ilen);
1134 data += align_offset;
1135 sg_init_one(&sgout[0], data, template[i].ilen);
1138 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1139 template[i].ilen, iv);
1140 ret = enc ? crypto_skcipher_encrypt(req) :
1141 crypto_skcipher_decrypt(req);
1148 wait_for_completion(&result.completion);
1149 reinit_completion(&result.completion);
1155 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1156 d, e, j, algo, -ret);
1161 if (memcmp(q, template[i].result, template[i].rlen)) {
1162 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
1164 hexdump(q, template[i].rlen);
1169 if (template[i].iv_out &&
1170 memcmp(iv, template[i].iv_out,
1171 crypto_skcipher_ivsize(tfm))) {
1172 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
1174 hexdump(iv, crypto_skcipher_ivsize(tfm));
1181 for (i = 0; i < tcount; i++) {
1182 /* alignment tests are only done with continuous buffers */
1183 if (align_offset != 0)
1186 if (!template[i].np)
1190 memcpy(iv, template[i].iv, ivsize);
1192 memset(iv, 0, MAX_IVLEN);
1195 crypto_skcipher_clear_flags(tfm, ~0);
1197 crypto_skcipher_set_flags(tfm,
1198 CRYPTO_TFM_REQ_WEAK_KEY);
1200 ret = crypto_skcipher_setkey(tfm, template[i].key,
1202 if (!ret == template[i].fail) {
1203 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1204 d, j, algo, crypto_skcipher_get_flags(tfm));
1211 sg_init_table(sg, template[i].np);
1213 sg_init_table(sgout, template[i].np);
1214 for (k = 0; k < template[i].np; k++) {
1215 if (WARN_ON(offset_in_page(IDX[k]) +
1216 template[i].tap[k] > PAGE_SIZE))
1219 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1221 memcpy(q, template[i].input + temp, template[i].tap[k]);
1223 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1224 q[template[i].tap[k]] = 0;
1226 sg_set_buf(&sg[k], q, template[i].tap[k]);
1228 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1229 offset_in_page(IDX[k]);
1231 sg_set_buf(&sgout[k], q, template[i].tap[k]);
1233 memset(q, 0, template[i].tap[k]);
1234 if (offset_in_page(q) +
1235 template[i].tap[k] < PAGE_SIZE)
1236 q[template[i].tap[k]] = 0;
1239 temp += template[i].tap[k];
1242 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1243 template[i].ilen, iv);
1245 ret = enc ? crypto_skcipher_encrypt(req) :
1246 crypto_skcipher_decrypt(req);
1253 wait_for_completion(&result.completion);
1254 reinit_completion(&result.completion);
1260 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1261 d, e, j, algo, -ret);
1267 for (k = 0; k < template[i].np; k++) {
1269 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1270 offset_in_page(IDX[k]);
1272 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1273 offset_in_page(IDX[k]);
1275 if (memcmp(q, template[i].result + temp,
1276 template[i].tap[k])) {
1277 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1279 hexdump(q, template[i].tap[k]);
1283 q += template[i].tap[k];
1284 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1287 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1288 d, j, e, k, algo, n);
1292 temp += template[i].tap[k];
1299 skcipher_request_free(req);
1301 testmgr_free_buf(xoutbuf);
1303 testmgr_free_buf(xbuf);
1308 static int test_skcipher(struct crypto_skcipher *tfm, int enc,
1309 struct cipher_testvec *template, unsigned int tcount)
1311 unsigned int alignmask;
1314 /* test 'dst == src' case */
1315 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1319 /* test 'dst != src' case */
1320 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1324 /* test unaligned buffers, check with one byte offset */
1325 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1329 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1331 /* Check if alignment mask for tfm is correctly set. */
1332 ret = __test_skcipher(tfm, enc, template, tcount, true,
1341 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1342 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1344 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1346 char result[COMP_BUF_SIZE];
1349 for (i = 0; i < ctcount; i++) {
1351 unsigned int dlen = COMP_BUF_SIZE;
1353 memset(result, 0, sizeof (result));
1355 ilen = ctemplate[i].inlen;
1356 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1357 ilen, result, &dlen);
1359 printk(KERN_ERR "alg: comp: compression failed "
1360 "on test %d for %s: ret=%d\n", i + 1, algo,
1365 if (dlen != ctemplate[i].outlen) {
1366 printk(KERN_ERR "alg: comp: Compression test %d "
1367 "failed for %s: output len = %d\n", i + 1, algo,
1373 if (memcmp(result, ctemplate[i].output, dlen)) {
1374 printk(KERN_ERR "alg: comp: Compression test %d "
1375 "failed for %s\n", i + 1, algo);
1376 hexdump(result, dlen);
1382 for (i = 0; i < dtcount; i++) {
1384 unsigned int dlen = COMP_BUF_SIZE;
1386 memset(result, 0, sizeof (result));
1388 ilen = dtemplate[i].inlen;
1389 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1390 ilen, result, &dlen);
1392 printk(KERN_ERR "alg: comp: decompression failed "
1393 "on test %d for %s: ret=%d\n", i + 1, algo,
1398 if (dlen != dtemplate[i].outlen) {
1399 printk(KERN_ERR "alg: comp: Decompression test %d "
1400 "failed for %s: output len = %d\n", i + 1, algo,
1406 if (memcmp(result, dtemplate[i].output, dlen)) {
1407 printk(KERN_ERR "alg: comp: Decompression test %d "
1408 "failed for %s\n", i + 1, algo);
1409 hexdump(result, dlen);
1421 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1422 unsigned int tcount)
1424 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1425 int err = 0, i, j, seedsize;
1429 seedsize = crypto_rng_seedsize(tfm);
1431 seed = kmalloc(seedsize, GFP_KERNEL);
1433 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1438 for (i = 0; i < tcount; i++) {
1439 memset(result, 0, 32);
1441 memcpy(seed, template[i].v, template[i].vlen);
1442 memcpy(seed + template[i].vlen, template[i].key,
1444 memcpy(seed + template[i].vlen + template[i].klen,
1445 template[i].dt, template[i].dtlen);
1447 err = crypto_rng_reset(tfm, seed, seedsize);
1449 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1454 for (j = 0; j < template[i].loops; j++) {
1455 err = crypto_rng_get_bytes(tfm, result,
1458 printk(KERN_ERR "alg: cprng: Failed to obtain "
1459 "the correct amount of random data for "
1460 "%s (requested %d)\n", algo,
1466 err = memcmp(result, template[i].result,
1469 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1471 hexdump(result, template[i].rlen);
1482 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1485 struct crypto_aead *tfm;
1488 tfm = crypto_alloc_aead(driver, type | CRYPTO_ALG_INTERNAL, mask);
1490 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1491 "%ld\n", driver, PTR_ERR(tfm));
1492 return PTR_ERR(tfm);
1495 if (desc->suite.aead.enc.vecs) {
1496 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1497 desc->suite.aead.enc.count);
1502 if (!err && desc->suite.aead.dec.vecs)
1503 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1504 desc->suite.aead.dec.count);
1507 crypto_free_aead(tfm);
1511 static int alg_test_cipher(const struct alg_test_desc *desc,
1512 const char *driver, u32 type, u32 mask)
1514 struct crypto_cipher *tfm;
1517 tfm = crypto_alloc_cipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1519 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1520 "%s: %ld\n", driver, PTR_ERR(tfm));
1521 return PTR_ERR(tfm);
1524 if (desc->suite.cipher.enc.vecs) {
1525 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1526 desc->suite.cipher.enc.count);
1531 if (desc->suite.cipher.dec.vecs)
1532 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1533 desc->suite.cipher.dec.count);
1536 crypto_free_cipher(tfm);
1540 static int alg_test_skcipher(const struct alg_test_desc *desc,
1541 const char *driver, u32 type, u32 mask)
1543 struct crypto_skcipher *tfm;
1546 tfm = crypto_alloc_skcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1548 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1549 "%s: %ld\n", driver, PTR_ERR(tfm));
1550 return PTR_ERR(tfm);
1553 if (desc->suite.cipher.enc.vecs) {
1554 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1555 desc->suite.cipher.enc.count);
1560 if (desc->suite.cipher.dec.vecs)
1561 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1562 desc->suite.cipher.dec.count);
1565 crypto_free_skcipher(tfm);
1569 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1572 struct crypto_comp *tfm;
1575 tfm = crypto_alloc_comp(driver, type, mask);
1577 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1578 "%ld\n", driver, PTR_ERR(tfm));
1579 return PTR_ERR(tfm);
1582 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1583 desc->suite.comp.decomp.vecs,
1584 desc->suite.comp.comp.count,
1585 desc->suite.comp.decomp.count);
1587 crypto_free_comp(tfm);
1591 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1594 struct crypto_ahash *tfm;
1597 tfm = crypto_alloc_ahash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1599 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1600 "%ld\n", driver, PTR_ERR(tfm));
1601 return PTR_ERR(tfm);
1604 err = test_hash(tfm, desc->suite.hash.vecs,
1605 desc->suite.hash.count, true);
1607 err = test_hash(tfm, desc->suite.hash.vecs,
1608 desc->suite.hash.count, false);
1610 crypto_free_ahash(tfm);
1614 static int alg_test_crc32c(const struct alg_test_desc *desc,
1615 const char *driver, u32 type, u32 mask)
1617 struct crypto_shash *tfm;
1621 err = alg_test_hash(desc, driver, type, mask);
1625 tfm = crypto_alloc_shash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1627 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1628 "%ld\n", driver, PTR_ERR(tfm));
1634 SHASH_DESC_ON_STACK(shash, tfm);
1635 u32 *ctx = (u32 *)shash_desc_ctx(shash);
1640 *ctx = le32_to_cpu(420553207);
1641 err = crypto_shash_final(shash, (u8 *)&val);
1643 printk(KERN_ERR "alg: crc32c: Operation failed for "
1644 "%s: %d\n", driver, err);
1648 if (val != ~420553207) {
1649 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1650 "%d\n", driver, val);
1655 crypto_free_shash(tfm);
1661 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1664 struct crypto_rng *rng;
1667 rng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1669 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1670 "%ld\n", driver, PTR_ERR(rng));
1671 return PTR_ERR(rng);
1674 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1676 crypto_free_rng(rng);
1682 static int drbg_cavs_test(struct drbg_testvec *test, int pr,
1683 const char *driver, u32 type, u32 mask)
1686 struct crypto_rng *drng;
1687 struct drbg_test_data test_data;
1688 struct drbg_string addtl, pers, testentropy;
1689 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1694 drng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1696 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1702 test_data.testentropy = &testentropy;
1703 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1704 drbg_string_fill(&pers, test->pers, test->perslen);
1705 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1707 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1711 drbg_string_fill(&addtl, test->addtla, test->addtllen);
1713 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1714 ret = crypto_drbg_get_bytes_addtl_test(drng,
1715 buf, test->expectedlen, &addtl, &test_data);
1717 ret = crypto_drbg_get_bytes_addtl(drng,
1718 buf, test->expectedlen, &addtl);
1721 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1722 "driver %s\n", driver);
1726 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1728 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1729 ret = crypto_drbg_get_bytes_addtl_test(drng,
1730 buf, test->expectedlen, &addtl, &test_data);
1732 ret = crypto_drbg_get_bytes_addtl(drng,
1733 buf, test->expectedlen, &addtl);
1736 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1737 "driver %s\n", driver);
1741 ret = memcmp(test->expected, buf, test->expectedlen);
1744 crypto_free_rng(drng);
1750 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1756 struct drbg_testvec *template = desc->suite.drbg.vecs;
1757 unsigned int tcount = desc->suite.drbg.count;
1759 if (0 == memcmp(driver, "drbg_pr_", 8))
1762 for (i = 0; i < tcount; i++) {
1763 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1765 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1775 static int do_test_rsa(struct crypto_akcipher *tfm,
1776 struct akcipher_testvec *vecs)
1778 struct akcipher_request *req;
1779 void *outbuf_enc = NULL;
1780 void *outbuf_dec = NULL;
1781 struct tcrypt_result result;
1782 unsigned int out_len_max, out_len = 0;
1784 struct scatterlist src, dst, src_tab[2];
1786 req = akcipher_request_alloc(tfm, GFP_KERNEL);
1790 init_completion(&result.completion);
1792 if (vecs->public_key_vec)
1793 err = crypto_akcipher_set_pub_key(tfm, vecs->key,
1796 err = crypto_akcipher_set_priv_key(tfm, vecs->key,
1801 out_len_max = crypto_akcipher_maxsize(tfm);
1802 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
1806 sg_init_table(src_tab, 2);
1807 sg_set_buf(&src_tab[0], vecs->m, 8);
1808 sg_set_buf(&src_tab[1], vecs->m + 8, vecs->m_size - 8);
1809 sg_init_one(&dst, outbuf_enc, out_len_max);
1810 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size,
1812 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1813 tcrypt_complete, &result);
1815 /* Run RSA encrypt - c = m^e mod n;*/
1816 err = wait_async_op(&result, crypto_akcipher_encrypt(req));
1818 pr_err("alg: rsa: encrypt test failed. err %d\n", err);
1821 if (req->dst_len != vecs->c_size) {
1822 pr_err("alg: rsa: encrypt test failed. Invalid output len\n");
1826 /* verify that encrypted message is equal to expected */
1827 if (memcmp(vecs->c, sg_virt(req->dst), vecs->c_size)) {
1828 pr_err("alg: rsa: encrypt test failed. Invalid output\n");
1832 /* Don't invoke decrypt for vectors with public key */
1833 if (vecs->public_key_vec) {
1837 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
1842 sg_init_one(&src, vecs->c, vecs->c_size);
1843 sg_init_one(&dst, outbuf_dec, out_len_max);
1844 init_completion(&result.completion);
1845 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max);
1847 /* Run RSA decrypt - m = c^d mod n;*/
1848 err = wait_async_op(&result, crypto_akcipher_decrypt(req));
1850 pr_err("alg: rsa: decrypt test failed. err %d\n", err);
1853 out_len = req->dst_len;
1854 if (out_len != vecs->m_size) {
1855 pr_err("alg: rsa: decrypt test failed. Invalid output len\n");
1859 /* verify that decrypted message is equal to the original msg */
1860 if (memcmp(vecs->m, outbuf_dec, vecs->m_size)) {
1861 pr_err("alg: rsa: decrypt test failed. Invalid output\n");
1868 akcipher_request_free(req);
1872 static int test_rsa(struct crypto_akcipher *tfm, struct akcipher_testvec *vecs,
1873 unsigned int tcount)
1877 for (i = 0; i < tcount; i++) {
1878 ret = do_test_rsa(tfm, vecs++);
1880 pr_err("alg: rsa: test failed on vector %d, err=%d\n",
1888 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
1889 struct akcipher_testvec *vecs, unsigned int tcount)
1891 if (strncmp(alg, "rsa", 3) == 0)
1892 return test_rsa(tfm, vecs, tcount);
1897 static int alg_test_akcipher(const struct alg_test_desc *desc,
1898 const char *driver, u32 type, u32 mask)
1900 struct crypto_akcipher *tfm;
1903 tfm = crypto_alloc_akcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1905 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
1906 driver, PTR_ERR(tfm));
1907 return PTR_ERR(tfm);
1909 if (desc->suite.akcipher.vecs)
1910 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
1911 desc->suite.akcipher.count);
1913 crypto_free_akcipher(tfm);
1917 static int alg_test_null(const struct alg_test_desc *desc,
1918 const char *driver, u32 type, u32 mask)
1923 /* Please keep this list sorted by algorithm name. */
1924 static const struct alg_test_desc alg_test_descs[] = {
1926 .alg = "__cbc-cast5-avx",
1927 .test = alg_test_null,
1929 .alg = "__cbc-cast6-avx",
1930 .test = alg_test_null,
1932 .alg = "__cbc-serpent-avx",
1933 .test = alg_test_null,
1935 .alg = "__cbc-serpent-avx2",
1936 .test = alg_test_null,
1938 .alg = "__cbc-serpent-sse2",
1939 .test = alg_test_null,
1941 .alg = "__cbc-twofish-avx",
1942 .test = alg_test_null,
1944 .alg = "__driver-cbc-aes-aesni",
1945 .test = alg_test_null,
1948 .alg = "__driver-cbc-camellia-aesni",
1949 .test = alg_test_null,
1951 .alg = "__driver-cbc-camellia-aesni-avx2",
1952 .test = alg_test_null,
1954 .alg = "__driver-cbc-cast5-avx",
1955 .test = alg_test_null,
1957 .alg = "__driver-cbc-cast6-avx",
1958 .test = alg_test_null,
1960 .alg = "__driver-cbc-serpent-avx",
1961 .test = alg_test_null,
1963 .alg = "__driver-cbc-serpent-avx2",
1964 .test = alg_test_null,
1966 .alg = "__driver-cbc-serpent-sse2",
1967 .test = alg_test_null,
1969 .alg = "__driver-cbc-twofish-avx",
1970 .test = alg_test_null,
1972 .alg = "__driver-ecb-aes-aesni",
1973 .test = alg_test_null,
1976 .alg = "__driver-ecb-camellia-aesni",
1977 .test = alg_test_null,
1979 .alg = "__driver-ecb-camellia-aesni-avx2",
1980 .test = alg_test_null,
1982 .alg = "__driver-ecb-cast5-avx",
1983 .test = alg_test_null,
1985 .alg = "__driver-ecb-cast6-avx",
1986 .test = alg_test_null,
1988 .alg = "__driver-ecb-serpent-avx",
1989 .test = alg_test_null,
1991 .alg = "__driver-ecb-serpent-avx2",
1992 .test = alg_test_null,
1994 .alg = "__driver-ecb-serpent-sse2",
1995 .test = alg_test_null,
1997 .alg = "__driver-ecb-twofish-avx",
1998 .test = alg_test_null,
2000 .alg = "__driver-gcm-aes-aesni",
2001 .test = alg_test_null,
2004 .alg = "__ghash-pclmulqdqni",
2005 .test = alg_test_null,
2008 .alg = "ansi_cprng",
2009 .test = alg_test_cprng,
2012 .vecs = ansi_cprng_aes_tv_template,
2013 .count = ANSI_CPRNG_AES_TEST_VECTORS
2017 .alg = "authenc(hmac(md5),ecb(cipher_null))",
2018 .test = alg_test_aead,
2022 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template,
2023 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS
2026 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template,
2027 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS
2032 .alg = "authenc(hmac(sha1),cbc(aes))",
2033 .test = alg_test_aead,
2038 hmac_sha1_aes_cbc_enc_tv_temp,
2040 HMAC_SHA1_AES_CBC_ENC_TEST_VEC
2045 .alg = "authenc(hmac(sha1),cbc(des))",
2046 .test = alg_test_aead,
2051 hmac_sha1_des_cbc_enc_tv_temp,
2053 HMAC_SHA1_DES_CBC_ENC_TEST_VEC
2058 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
2059 .test = alg_test_aead,
2064 hmac_sha1_des3_ede_cbc_enc_tv_temp,
2066 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC
2071 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
2072 .test = alg_test_aead,
2077 hmac_sha1_ecb_cipher_null_enc_tv_temp,
2079 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC
2083 hmac_sha1_ecb_cipher_null_dec_tv_temp,
2085 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC
2090 .alg = "authenc(hmac(sha224),cbc(des))",
2091 .test = alg_test_aead,
2096 hmac_sha224_des_cbc_enc_tv_temp,
2098 HMAC_SHA224_DES_CBC_ENC_TEST_VEC
2103 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
2104 .test = alg_test_aead,
2109 hmac_sha224_des3_ede_cbc_enc_tv_temp,
2111 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC
2116 .alg = "authenc(hmac(sha256),cbc(aes))",
2117 .test = alg_test_aead,
2122 hmac_sha256_aes_cbc_enc_tv_temp,
2124 HMAC_SHA256_AES_CBC_ENC_TEST_VEC
2129 .alg = "authenc(hmac(sha256),cbc(des))",
2130 .test = alg_test_aead,
2135 hmac_sha256_des_cbc_enc_tv_temp,
2137 HMAC_SHA256_DES_CBC_ENC_TEST_VEC
2142 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
2143 .test = alg_test_aead,
2148 hmac_sha256_des3_ede_cbc_enc_tv_temp,
2150 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC
2155 .alg = "authenc(hmac(sha384),cbc(des))",
2156 .test = alg_test_aead,
2161 hmac_sha384_des_cbc_enc_tv_temp,
2163 HMAC_SHA384_DES_CBC_ENC_TEST_VEC
2168 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
2169 .test = alg_test_aead,
2174 hmac_sha384_des3_ede_cbc_enc_tv_temp,
2176 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC
2181 .alg = "authenc(hmac(sha512),cbc(aes))",
2182 .test = alg_test_aead,
2187 hmac_sha512_aes_cbc_enc_tv_temp,
2189 HMAC_SHA512_AES_CBC_ENC_TEST_VEC
2194 .alg = "authenc(hmac(sha512),cbc(des))",
2195 .test = alg_test_aead,
2200 hmac_sha512_des_cbc_enc_tv_temp,
2202 HMAC_SHA512_DES_CBC_ENC_TEST_VEC
2207 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
2208 .test = alg_test_aead,
2213 hmac_sha512_des3_ede_cbc_enc_tv_temp,
2215 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC
2221 .test = alg_test_skcipher,
2226 .vecs = aes_cbc_enc_tv_template,
2227 .count = AES_CBC_ENC_TEST_VECTORS
2230 .vecs = aes_cbc_dec_tv_template,
2231 .count = AES_CBC_DEC_TEST_VECTORS
2236 .alg = "cbc(anubis)",
2237 .test = alg_test_skcipher,
2241 .vecs = anubis_cbc_enc_tv_template,
2242 .count = ANUBIS_CBC_ENC_TEST_VECTORS
2245 .vecs = anubis_cbc_dec_tv_template,
2246 .count = ANUBIS_CBC_DEC_TEST_VECTORS
2251 .alg = "cbc(blowfish)",
2252 .test = alg_test_skcipher,
2256 .vecs = bf_cbc_enc_tv_template,
2257 .count = BF_CBC_ENC_TEST_VECTORS
2260 .vecs = bf_cbc_dec_tv_template,
2261 .count = BF_CBC_DEC_TEST_VECTORS
2266 .alg = "cbc(camellia)",
2267 .test = alg_test_skcipher,
2271 .vecs = camellia_cbc_enc_tv_template,
2272 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
2275 .vecs = camellia_cbc_dec_tv_template,
2276 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
2281 .alg = "cbc(cast5)",
2282 .test = alg_test_skcipher,
2286 .vecs = cast5_cbc_enc_tv_template,
2287 .count = CAST5_CBC_ENC_TEST_VECTORS
2290 .vecs = cast5_cbc_dec_tv_template,
2291 .count = CAST5_CBC_DEC_TEST_VECTORS
2296 .alg = "cbc(cast6)",
2297 .test = alg_test_skcipher,
2301 .vecs = cast6_cbc_enc_tv_template,
2302 .count = CAST6_CBC_ENC_TEST_VECTORS
2305 .vecs = cast6_cbc_dec_tv_template,
2306 .count = CAST6_CBC_DEC_TEST_VECTORS
2312 .test = alg_test_skcipher,
2316 .vecs = des_cbc_enc_tv_template,
2317 .count = DES_CBC_ENC_TEST_VECTORS
2320 .vecs = des_cbc_dec_tv_template,
2321 .count = DES_CBC_DEC_TEST_VECTORS
2326 .alg = "cbc(des3_ede)",
2327 .test = alg_test_skcipher,
2332 .vecs = des3_ede_cbc_enc_tv_template,
2333 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
2336 .vecs = des3_ede_cbc_dec_tv_template,
2337 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
2342 .alg = "cbc(serpent)",
2343 .test = alg_test_skcipher,
2347 .vecs = serpent_cbc_enc_tv_template,
2348 .count = SERPENT_CBC_ENC_TEST_VECTORS
2351 .vecs = serpent_cbc_dec_tv_template,
2352 .count = SERPENT_CBC_DEC_TEST_VECTORS
2357 .alg = "cbc(twofish)",
2358 .test = alg_test_skcipher,
2362 .vecs = tf_cbc_enc_tv_template,
2363 .count = TF_CBC_ENC_TEST_VECTORS
2366 .vecs = tf_cbc_dec_tv_template,
2367 .count = TF_CBC_DEC_TEST_VECTORS
2373 .test = alg_test_aead,
2378 .vecs = aes_ccm_enc_tv_template,
2379 .count = AES_CCM_ENC_TEST_VECTORS
2382 .vecs = aes_ccm_dec_tv_template,
2383 .count = AES_CCM_DEC_TEST_VECTORS
2389 .test = alg_test_skcipher,
2393 .vecs = chacha20_enc_tv_template,
2394 .count = CHACHA20_ENC_TEST_VECTORS
2397 .vecs = chacha20_enc_tv_template,
2398 .count = CHACHA20_ENC_TEST_VECTORS
2405 .test = alg_test_hash,
2408 .vecs = aes_cmac128_tv_template,
2409 .count = CMAC_AES_TEST_VECTORS
2413 .alg = "cmac(des3_ede)",
2415 .test = alg_test_hash,
2418 .vecs = des3_ede_cmac64_tv_template,
2419 .count = CMAC_DES3_EDE_TEST_VECTORS
2423 .alg = "compress_null",
2424 .test = alg_test_null,
2427 .test = alg_test_hash,
2430 .vecs = crc32_tv_template,
2431 .count = CRC32_TEST_VECTORS
2436 .test = alg_test_crc32c,
2440 .vecs = crc32c_tv_template,
2441 .count = CRC32C_TEST_VECTORS
2446 .test = alg_test_hash,
2450 .vecs = crct10dif_tv_template,
2451 .count = CRCT10DIF_TEST_VECTORS
2455 .alg = "cryptd(__driver-cbc-aes-aesni)",
2456 .test = alg_test_null,
2459 .alg = "cryptd(__driver-cbc-camellia-aesni)",
2460 .test = alg_test_null,
2462 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)",
2463 .test = alg_test_null,
2465 .alg = "cryptd(__driver-cbc-serpent-avx2)",
2466 .test = alg_test_null,
2468 .alg = "cryptd(__driver-ecb-aes-aesni)",
2469 .test = alg_test_null,
2472 .alg = "cryptd(__driver-ecb-camellia-aesni)",
2473 .test = alg_test_null,
2475 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)",
2476 .test = alg_test_null,
2478 .alg = "cryptd(__driver-ecb-cast5-avx)",
2479 .test = alg_test_null,
2481 .alg = "cryptd(__driver-ecb-cast6-avx)",
2482 .test = alg_test_null,
2484 .alg = "cryptd(__driver-ecb-serpent-avx)",
2485 .test = alg_test_null,
2487 .alg = "cryptd(__driver-ecb-serpent-avx2)",
2488 .test = alg_test_null,
2490 .alg = "cryptd(__driver-ecb-serpent-sse2)",
2491 .test = alg_test_null,
2493 .alg = "cryptd(__driver-ecb-twofish-avx)",
2494 .test = alg_test_null,
2496 .alg = "cryptd(__driver-gcm-aes-aesni)",
2497 .test = alg_test_null,
2500 .alg = "cryptd(__ghash-pclmulqdqni)",
2501 .test = alg_test_null,
2505 .test = alg_test_skcipher,
2510 .vecs = aes_ctr_enc_tv_template,
2511 .count = AES_CTR_ENC_TEST_VECTORS
2514 .vecs = aes_ctr_dec_tv_template,
2515 .count = AES_CTR_DEC_TEST_VECTORS
2520 .alg = "ctr(blowfish)",
2521 .test = alg_test_skcipher,
2525 .vecs = bf_ctr_enc_tv_template,
2526 .count = BF_CTR_ENC_TEST_VECTORS
2529 .vecs = bf_ctr_dec_tv_template,
2530 .count = BF_CTR_DEC_TEST_VECTORS
2535 .alg = "ctr(camellia)",
2536 .test = alg_test_skcipher,
2540 .vecs = camellia_ctr_enc_tv_template,
2541 .count = CAMELLIA_CTR_ENC_TEST_VECTORS
2544 .vecs = camellia_ctr_dec_tv_template,
2545 .count = CAMELLIA_CTR_DEC_TEST_VECTORS
2550 .alg = "ctr(cast5)",
2551 .test = alg_test_skcipher,
2555 .vecs = cast5_ctr_enc_tv_template,
2556 .count = CAST5_CTR_ENC_TEST_VECTORS
2559 .vecs = cast5_ctr_dec_tv_template,
2560 .count = CAST5_CTR_DEC_TEST_VECTORS
2565 .alg = "ctr(cast6)",
2566 .test = alg_test_skcipher,
2570 .vecs = cast6_ctr_enc_tv_template,
2571 .count = CAST6_CTR_ENC_TEST_VECTORS
2574 .vecs = cast6_ctr_dec_tv_template,
2575 .count = CAST6_CTR_DEC_TEST_VECTORS
2581 .test = alg_test_skcipher,
2585 .vecs = des_ctr_enc_tv_template,
2586 .count = DES_CTR_ENC_TEST_VECTORS
2589 .vecs = des_ctr_dec_tv_template,
2590 .count = DES_CTR_DEC_TEST_VECTORS
2595 .alg = "ctr(des3_ede)",
2596 .test = alg_test_skcipher,
2600 .vecs = des3_ede_ctr_enc_tv_template,
2601 .count = DES3_EDE_CTR_ENC_TEST_VECTORS
2604 .vecs = des3_ede_ctr_dec_tv_template,
2605 .count = DES3_EDE_CTR_DEC_TEST_VECTORS
2610 .alg = "ctr(serpent)",
2611 .test = alg_test_skcipher,
2615 .vecs = serpent_ctr_enc_tv_template,
2616 .count = SERPENT_CTR_ENC_TEST_VECTORS
2619 .vecs = serpent_ctr_dec_tv_template,
2620 .count = SERPENT_CTR_DEC_TEST_VECTORS
2625 .alg = "ctr(twofish)",
2626 .test = alg_test_skcipher,
2630 .vecs = tf_ctr_enc_tv_template,
2631 .count = TF_CTR_ENC_TEST_VECTORS
2634 .vecs = tf_ctr_dec_tv_template,
2635 .count = TF_CTR_DEC_TEST_VECTORS
2640 .alg = "cts(cbc(aes))",
2641 .test = alg_test_skcipher,
2645 .vecs = cts_mode_enc_tv_template,
2646 .count = CTS_MODE_ENC_TEST_VECTORS
2649 .vecs = cts_mode_dec_tv_template,
2650 .count = CTS_MODE_DEC_TEST_VECTORS
2656 .test = alg_test_comp,
2661 .vecs = deflate_comp_tv_template,
2662 .count = DEFLATE_COMP_TEST_VECTORS
2665 .vecs = deflate_decomp_tv_template,
2666 .count = DEFLATE_DECOMP_TEST_VECTORS
2671 .alg = "digest_null",
2672 .test = alg_test_null,
2674 .alg = "drbg_nopr_ctr_aes128",
2675 .test = alg_test_drbg,
2679 .vecs = drbg_nopr_ctr_aes128_tv_template,
2680 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template)
2684 .alg = "drbg_nopr_ctr_aes192",
2685 .test = alg_test_drbg,
2689 .vecs = drbg_nopr_ctr_aes192_tv_template,
2690 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template)
2694 .alg = "drbg_nopr_ctr_aes256",
2695 .test = alg_test_drbg,
2699 .vecs = drbg_nopr_ctr_aes256_tv_template,
2700 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template)
2705 * There is no need to specifically test the DRBG with every
2706 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2708 .alg = "drbg_nopr_hmac_sha1",
2710 .test = alg_test_null,
2712 .alg = "drbg_nopr_hmac_sha256",
2713 .test = alg_test_drbg,
2717 .vecs = drbg_nopr_hmac_sha256_tv_template,
2719 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template)
2723 /* covered by drbg_nopr_hmac_sha256 test */
2724 .alg = "drbg_nopr_hmac_sha384",
2726 .test = alg_test_null,
2728 .alg = "drbg_nopr_hmac_sha512",
2729 .test = alg_test_null,
2732 .alg = "drbg_nopr_sha1",
2734 .test = alg_test_null,
2736 .alg = "drbg_nopr_sha256",
2737 .test = alg_test_drbg,
2741 .vecs = drbg_nopr_sha256_tv_template,
2742 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template)
2746 /* covered by drbg_nopr_sha256 test */
2747 .alg = "drbg_nopr_sha384",
2749 .test = alg_test_null,
2751 .alg = "drbg_nopr_sha512",
2753 .test = alg_test_null,
2755 .alg = "drbg_pr_ctr_aes128",
2756 .test = alg_test_drbg,
2760 .vecs = drbg_pr_ctr_aes128_tv_template,
2761 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template)
2765 /* covered by drbg_pr_ctr_aes128 test */
2766 .alg = "drbg_pr_ctr_aes192",
2768 .test = alg_test_null,
2770 .alg = "drbg_pr_ctr_aes256",
2772 .test = alg_test_null,
2774 .alg = "drbg_pr_hmac_sha1",
2776 .test = alg_test_null,
2778 .alg = "drbg_pr_hmac_sha256",
2779 .test = alg_test_drbg,
2783 .vecs = drbg_pr_hmac_sha256_tv_template,
2784 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template)
2788 /* covered by drbg_pr_hmac_sha256 test */
2789 .alg = "drbg_pr_hmac_sha384",
2791 .test = alg_test_null,
2793 .alg = "drbg_pr_hmac_sha512",
2794 .test = alg_test_null,
2797 .alg = "drbg_pr_sha1",
2799 .test = alg_test_null,
2801 .alg = "drbg_pr_sha256",
2802 .test = alg_test_drbg,
2806 .vecs = drbg_pr_sha256_tv_template,
2807 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template)
2811 /* covered by drbg_pr_sha256 test */
2812 .alg = "drbg_pr_sha384",
2814 .test = alg_test_null,
2816 .alg = "drbg_pr_sha512",
2818 .test = alg_test_null,
2820 .alg = "ecb(__aes-aesni)",
2821 .test = alg_test_null,
2825 .test = alg_test_skcipher,
2830 .vecs = aes_enc_tv_template,
2831 .count = AES_ENC_TEST_VECTORS
2834 .vecs = aes_dec_tv_template,
2835 .count = AES_DEC_TEST_VECTORS
2840 .alg = "ecb(anubis)",
2841 .test = alg_test_skcipher,
2845 .vecs = anubis_enc_tv_template,
2846 .count = ANUBIS_ENC_TEST_VECTORS
2849 .vecs = anubis_dec_tv_template,
2850 .count = ANUBIS_DEC_TEST_VECTORS
2856 .test = alg_test_skcipher,
2860 .vecs = arc4_enc_tv_template,
2861 .count = ARC4_ENC_TEST_VECTORS
2864 .vecs = arc4_dec_tv_template,
2865 .count = ARC4_DEC_TEST_VECTORS
2870 .alg = "ecb(blowfish)",
2871 .test = alg_test_skcipher,
2875 .vecs = bf_enc_tv_template,
2876 .count = BF_ENC_TEST_VECTORS
2879 .vecs = bf_dec_tv_template,
2880 .count = BF_DEC_TEST_VECTORS
2885 .alg = "ecb(camellia)",
2886 .test = alg_test_skcipher,
2890 .vecs = camellia_enc_tv_template,
2891 .count = CAMELLIA_ENC_TEST_VECTORS
2894 .vecs = camellia_dec_tv_template,
2895 .count = CAMELLIA_DEC_TEST_VECTORS
2900 .alg = "ecb(cast5)",
2901 .test = alg_test_skcipher,
2905 .vecs = cast5_enc_tv_template,
2906 .count = CAST5_ENC_TEST_VECTORS
2909 .vecs = cast5_dec_tv_template,
2910 .count = CAST5_DEC_TEST_VECTORS
2915 .alg = "ecb(cast6)",
2916 .test = alg_test_skcipher,
2920 .vecs = cast6_enc_tv_template,
2921 .count = CAST6_ENC_TEST_VECTORS
2924 .vecs = cast6_dec_tv_template,
2925 .count = CAST6_DEC_TEST_VECTORS
2930 .alg = "ecb(cipher_null)",
2931 .test = alg_test_null,
2934 .test = alg_test_skcipher,
2938 .vecs = des_enc_tv_template,
2939 .count = DES_ENC_TEST_VECTORS
2942 .vecs = des_dec_tv_template,
2943 .count = DES_DEC_TEST_VECTORS
2948 .alg = "ecb(des3_ede)",
2949 .test = alg_test_skcipher,
2954 .vecs = des3_ede_enc_tv_template,
2955 .count = DES3_EDE_ENC_TEST_VECTORS
2958 .vecs = des3_ede_dec_tv_template,
2959 .count = DES3_EDE_DEC_TEST_VECTORS
2964 .alg = "ecb(fcrypt)",
2965 .test = alg_test_skcipher,
2969 .vecs = fcrypt_pcbc_enc_tv_template,
2973 .vecs = fcrypt_pcbc_dec_tv_template,
2979 .alg = "ecb(khazad)",
2980 .test = alg_test_skcipher,
2984 .vecs = khazad_enc_tv_template,
2985 .count = KHAZAD_ENC_TEST_VECTORS
2988 .vecs = khazad_dec_tv_template,
2989 .count = KHAZAD_DEC_TEST_VECTORS
2995 .test = alg_test_skcipher,
2999 .vecs = seed_enc_tv_template,
3000 .count = SEED_ENC_TEST_VECTORS
3003 .vecs = seed_dec_tv_template,
3004 .count = SEED_DEC_TEST_VECTORS
3009 .alg = "ecb(serpent)",
3010 .test = alg_test_skcipher,
3014 .vecs = serpent_enc_tv_template,
3015 .count = SERPENT_ENC_TEST_VECTORS
3018 .vecs = serpent_dec_tv_template,
3019 .count = SERPENT_DEC_TEST_VECTORS
3025 .test = alg_test_skcipher,
3029 .vecs = tea_enc_tv_template,
3030 .count = TEA_ENC_TEST_VECTORS
3033 .vecs = tea_dec_tv_template,
3034 .count = TEA_DEC_TEST_VECTORS
3039 .alg = "ecb(tnepres)",
3040 .test = alg_test_skcipher,
3044 .vecs = tnepres_enc_tv_template,
3045 .count = TNEPRES_ENC_TEST_VECTORS
3048 .vecs = tnepres_dec_tv_template,
3049 .count = TNEPRES_DEC_TEST_VECTORS
3054 .alg = "ecb(twofish)",
3055 .test = alg_test_skcipher,
3059 .vecs = tf_enc_tv_template,
3060 .count = TF_ENC_TEST_VECTORS
3063 .vecs = tf_dec_tv_template,
3064 .count = TF_DEC_TEST_VECTORS
3070 .test = alg_test_skcipher,
3074 .vecs = xeta_enc_tv_template,
3075 .count = XETA_ENC_TEST_VECTORS
3078 .vecs = xeta_dec_tv_template,
3079 .count = XETA_DEC_TEST_VECTORS
3085 .test = alg_test_skcipher,
3089 .vecs = xtea_enc_tv_template,
3090 .count = XTEA_ENC_TEST_VECTORS
3093 .vecs = xtea_dec_tv_template,
3094 .count = XTEA_DEC_TEST_VECTORS
3100 .test = alg_test_aead,
3105 .vecs = aes_gcm_enc_tv_template,
3106 .count = AES_GCM_ENC_TEST_VECTORS
3109 .vecs = aes_gcm_dec_tv_template,
3110 .count = AES_GCM_DEC_TEST_VECTORS
3116 .test = alg_test_hash,
3120 .vecs = ghash_tv_template,
3121 .count = GHASH_TEST_VECTORS
3125 .alg = "hmac(crc32)",
3126 .test = alg_test_hash,
3129 .vecs = bfin_crc_tv_template,
3130 .count = BFIN_CRC_TEST_VECTORS
3135 .test = alg_test_hash,
3138 .vecs = hmac_md5_tv_template,
3139 .count = HMAC_MD5_TEST_VECTORS
3143 .alg = "hmac(rmd128)",
3144 .test = alg_test_hash,
3147 .vecs = hmac_rmd128_tv_template,
3148 .count = HMAC_RMD128_TEST_VECTORS
3152 .alg = "hmac(rmd160)",
3153 .test = alg_test_hash,
3156 .vecs = hmac_rmd160_tv_template,
3157 .count = HMAC_RMD160_TEST_VECTORS
3161 .alg = "hmac(sha1)",
3162 .test = alg_test_hash,
3166 .vecs = hmac_sha1_tv_template,
3167 .count = HMAC_SHA1_TEST_VECTORS
3171 .alg = "hmac(sha224)",
3172 .test = alg_test_hash,
3176 .vecs = hmac_sha224_tv_template,
3177 .count = HMAC_SHA224_TEST_VECTORS
3181 .alg = "hmac(sha256)",
3182 .test = alg_test_hash,
3186 .vecs = hmac_sha256_tv_template,
3187 .count = HMAC_SHA256_TEST_VECTORS
3191 .alg = "hmac(sha384)",
3192 .test = alg_test_hash,
3196 .vecs = hmac_sha384_tv_template,
3197 .count = HMAC_SHA384_TEST_VECTORS
3201 .alg = "hmac(sha512)",
3202 .test = alg_test_hash,
3206 .vecs = hmac_sha512_tv_template,
3207 .count = HMAC_SHA512_TEST_VECTORS
3211 .alg = "jitterentropy_rng",
3213 .test = alg_test_null,
3216 .test = alg_test_skcipher,
3221 .vecs = aes_kw_enc_tv_template,
3222 .count = ARRAY_SIZE(aes_kw_enc_tv_template)
3225 .vecs = aes_kw_dec_tv_template,
3226 .count = ARRAY_SIZE(aes_kw_dec_tv_template)
3232 .test = alg_test_skcipher,
3236 .vecs = aes_lrw_enc_tv_template,
3237 .count = AES_LRW_ENC_TEST_VECTORS
3240 .vecs = aes_lrw_dec_tv_template,
3241 .count = AES_LRW_DEC_TEST_VECTORS
3246 .alg = "lrw(camellia)",
3247 .test = alg_test_skcipher,
3251 .vecs = camellia_lrw_enc_tv_template,
3252 .count = CAMELLIA_LRW_ENC_TEST_VECTORS
3255 .vecs = camellia_lrw_dec_tv_template,
3256 .count = CAMELLIA_LRW_DEC_TEST_VECTORS
3261 .alg = "lrw(cast6)",
3262 .test = alg_test_skcipher,
3266 .vecs = cast6_lrw_enc_tv_template,
3267 .count = CAST6_LRW_ENC_TEST_VECTORS
3270 .vecs = cast6_lrw_dec_tv_template,
3271 .count = CAST6_LRW_DEC_TEST_VECTORS
3276 .alg = "lrw(serpent)",
3277 .test = alg_test_skcipher,
3281 .vecs = serpent_lrw_enc_tv_template,
3282 .count = SERPENT_LRW_ENC_TEST_VECTORS
3285 .vecs = serpent_lrw_dec_tv_template,
3286 .count = SERPENT_LRW_DEC_TEST_VECTORS
3291 .alg = "lrw(twofish)",
3292 .test = alg_test_skcipher,
3296 .vecs = tf_lrw_enc_tv_template,
3297 .count = TF_LRW_ENC_TEST_VECTORS
3300 .vecs = tf_lrw_dec_tv_template,
3301 .count = TF_LRW_DEC_TEST_VECTORS
3307 .test = alg_test_comp,
3312 .vecs = lz4_comp_tv_template,
3313 .count = LZ4_COMP_TEST_VECTORS
3316 .vecs = lz4_decomp_tv_template,
3317 .count = LZ4_DECOMP_TEST_VECTORS
3323 .test = alg_test_comp,
3328 .vecs = lz4hc_comp_tv_template,
3329 .count = LZ4HC_COMP_TEST_VECTORS
3332 .vecs = lz4hc_decomp_tv_template,
3333 .count = LZ4HC_DECOMP_TEST_VECTORS
3339 .test = alg_test_comp,
3344 .vecs = lzo_comp_tv_template,
3345 .count = LZO_COMP_TEST_VECTORS
3348 .vecs = lzo_decomp_tv_template,
3349 .count = LZO_DECOMP_TEST_VECTORS
3355 .test = alg_test_hash,
3358 .vecs = md4_tv_template,
3359 .count = MD4_TEST_VECTORS
3364 .test = alg_test_hash,
3367 .vecs = md5_tv_template,
3368 .count = MD5_TEST_VECTORS
3372 .alg = "michael_mic",
3373 .test = alg_test_hash,
3376 .vecs = michael_mic_tv_template,
3377 .count = MICHAEL_MIC_TEST_VECTORS
3382 .test = alg_test_skcipher,
3387 .vecs = aes_ofb_enc_tv_template,
3388 .count = AES_OFB_ENC_TEST_VECTORS
3391 .vecs = aes_ofb_dec_tv_template,
3392 .count = AES_OFB_DEC_TEST_VECTORS
3397 .alg = "pcbc(fcrypt)",
3398 .test = alg_test_skcipher,
3402 .vecs = fcrypt_pcbc_enc_tv_template,
3403 .count = FCRYPT_ENC_TEST_VECTORS
3406 .vecs = fcrypt_pcbc_dec_tv_template,
3407 .count = FCRYPT_DEC_TEST_VECTORS
3413 .test = alg_test_hash,
3416 .vecs = poly1305_tv_template,
3417 .count = POLY1305_TEST_VECTORS
3421 .alg = "rfc3686(ctr(aes))",
3422 .test = alg_test_skcipher,
3427 .vecs = aes_ctr_rfc3686_enc_tv_template,
3428 .count = AES_CTR_3686_ENC_TEST_VECTORS
3431 .vecs = aes_ctr_rfc3686_dec_tv_template,
3432 .count = AES_CTR_3686_DEC_TEST_VECTORS
3437 .alg = "rfc4106(gcm(aes))",
3438 .test = alg_test_aead,
3443 .vecs = aes_gcm_rfc4106_enc_tv_template,
3444 .count = AES_GCM_4106_ENC_TEST_VECTORS
3447 .vecs = aes_gcm_rfc4106_dec_tv_template,
3448 .count = AES_GCM_4106_DEC_TEST_VECTORS
3453 .alg = "rfc4309(ccm(aes))",
3454 .test = alg_test_aead,
3459 .vecs = aes_ccm_rfc4309_enc_tv_template,
3460 .count = AES_CCM_4309_ENC_TEST_VECTORS
3463 .vecs = aes_ccm_rfc4309_dec_tv_template,
3464 .count = AES_CCM_4309_DEC_TEST_VECTORS
3469 .alg = "rfc4543(gcm(aes))",
3470 .test = alg_test_aead,
3474 .vecs = aes_gcm_rfc4543_enc_tv_template,
3475 .count = AES_GCM_4543_ENC_TEST_VECTORS
3478 .vecs = aes_gcm_rfc4543_dec_tv_template,
3479 .count = AES_GCM_4543_DEC_TEST_VECTORS
3484 .alg = "rfc7539(chacha20,poly1305)",
3485 .test = alg_test_aead,
3489 .vecs = rfc7539_enc_tv_template,
3490 .count = RFC7539_ENC_TEST_VECTORS
3493 .vecs = rfc7539_dec_tv_template,
3494 .count = RFC7539_DEC_TEST_VECTORS
3499 .alg = "rfc7539esp(chacha20,poly1305)",
3500 .test = alg_test_aead,
3504 .vecs = rfc7539esp_enc_tv_template,
3505 .count = RFC7539ESP_ENC_TEST_VECTORS
3508 .vecs = rfc7539esp_dec_tv_template,
3509 .count = RFC7539ESP_DEC_TEST_VECTORS
3515 .test = alg_test_hash,
3518 .vecs = rmd128_tv_template,
3519 .count = RMD128_TEST_VECTORS
3524 .test = alg_test_hash,
3527 .vecs = rmd160_tv_template,
3528 .count = RMD160_TEST_VECTORS
3533 .test = alg_test_hash,
3536 .vecs = rmd256_tv_template,
3537 .count = RMD256_TEST_VECTORS
3542 .test = alg_test_hash,
3545 .vecs = rmd320_tv_template,
3546 .count = RMD320_TEST_VECTORS
3551 .test = alg_test_akcipher,
3555 .vecs = rsa_tv_template,
3556 .count = RSA_TEST_VECTORS
3561 .test = alg_test_skcipher,
3565 .vecs = salsa20_stream_enc_tv_template,
3566 .count = SALSA20_STREAM_ENC_TEST_VECTORS
3572 .test = alg_test_hash,
3576 .vecs = sha1_tv_template,
3577 .count = SHA1_TEST_VECTORS
3582 .test = alg_test_hash,
3586 .vecs = sha224_tv_template,
3587 .count = SHA224_TEST_VECTORS
3592 .test = alg_test_hash,
3596 .vecs = sha256_tv_template,
3597 .count = SHA256_TEST_VECTORS
3602 .test = alg_test_hash,
3606 .vecs = sha384_tv_template,
3607 .count = SHA384_TEST_VECTORS
3612 .test = alg_test_hash,
3616 .vecs = sha512_tv_template,
3617 .count = SHA512_TEST_VECTORS
3622 .test = alg_test_hash,
3625 .vecs = tgr128_tv_template,
3626 .count = TGR128_TEST_VECTORS
3631 .test = alg_test_hash,
3634 .vecs = tgr160_tv_template,
3635 .count = TGR160_TEST_VECTORS
3640 .test = alg_test_hash,
3643 .vecs = tgr192_tv_template,
3644 .count = TGR192_TEST_VECTORS
3649 .test = alg_test_hash,
3652 .vecs = aes_vmac128_tv_template,
3653 .count = VMAC_AES_TEST_VECTORS
3658 .test = alg_test_hash,
3661 .vecs = wp256_tv_template,
3662 .count = WP256_TEST_VECTORS
3667 .test = alg_test_hash,
3670 .vecs = wp384_tv_template,
3671 .count = WP384_TEST_VECTORS
3676 .test = alg_test_hash,
3679 .vecs = wp512_tv_template,
3680 .count = WP512_TEST_VECTORS
3685 .test = alg_test_hash,
3688 .vecs = aes_xcbc128_tv_template,
3689 .count = XCBC_AES_TEST_VECTORS
3694 .test = alg_test_skcipher,
3699 .vecs = aes_xts_enc_tv_template,
3700 .count = AES_XTS_ENC_TEST_VECTORS
3703 .vecs = aes_xts_dec_tv_template,
3704 .count = AES_XTS_DEC_TEST_VECTORS
3709 .alg = "xts(camellia)",
3710 .test = alg_test_skcipher,
3714 .vecs = camellia_xts_enc_tv_template,
3715 .count = CAMELLIA_XTS_ENC_TEST_VECTORS
3718 .vecs = camellia_xts_dec_tv_template,
3719 .count = CAMELLIA_XTS_DEC_TEST_VECTORS
3724 .alg = "xts(cast6)",
3725 .test = alg_test_skcipher,
3729 .vecs = cast6_xts_enc_tv_template,
3730 .count = CAST6_XTS_ENC_TEST_VECTORS
3733 .vecs = cast6_xts_dec_tv_template,
3734 .count = CAST6_XTS_DEC_TEST_VECTORS
3739 .alg = "xts(serpent)",
3740 .test = alg_test_skcipher,
3744 .vecs = serpent_xts_enc_tv_template,
3745 .count = SERPENT_XTS_ENC_TEST_VECTORS
3748 .vecs = serpent_xts_dec_tv_template,
3749 .count = SERPENT_XTS_DEC_TEST_VECTORS
3754 .alg = "xts(twofish)",
3755 .test = alg_test_skcipher,
3759 .vecs = tf_xts_enc_tv_template,
3760 .count = TF_XTS_ENC_TEST_VECTORS
3763 .vecs = tf_xts_dec_tv_template,
3764 .count = TF_XTS_DEC_TEST_VECTORS
3771 static bool alg_test_descs_checked;
3773 static void alg_test_descs_check_order(void)
3777 /* only check once */
3778 if (alg_test_descs_checked)
3781 alg_test_descs_checked = true;
3783 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3784 int diff = strcmp(alg_test_descs[i - 1].alg,
3785 alg_test_descs[i].alg);
3787 if (WARN_ON(diff > 0)) {
3788 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3789 alg_test_descs[i - 1].alg,
3790 alg_test_descs[i].alg);
3793 if (WARN_ON(diff == 0)) {
3794 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3795 alg_test_descs[i].alg);
3800 static int alg_find_test(const char *alg)
3803 int end = ARRAY_SIZE(alg_test_descs);
3805 while (start < end) {
3806 int i = (start + end) / 2;
3807 int diff = strcmp(alg_test_descs[i].alg, alg);
3825 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3831 alg_test_descs_check_order();
3833 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3834 char nalg[CRYPTO_MAX_ALG_NAME];
3836 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3838 return -ENAMETOOLONG;
3840 i = alg_find_test(nalg);
3844 if (fips_enabled && !alg_test_descs[i].fips_allowed)
3847 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3851 i = alg_find_test(alg);
3852 j = alg_find_test(driver);
3856 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3857 (j >= 0 && !alg_test_descs[j].fips_allowed)))
3862 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3864 if (j >= 0 && j != i)
3865 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3869 if (fips_enabled && rc)
3870 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3872 if (fips_enabled && !rc)
3873 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
3878 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3884 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3886 EXPORT_SYMBOL_GPL(alg_test);