Merge tag 'arc-5.5-rc1' of git://git.kernel.org/pub/scm/linux/kernel/git/vgupta/arc
[linux-block.git] / arch / x86 / crypto / blake2s-glue.c
1 // SPDX-License-Identifier: GPL-2.0 OR MIT
2 /*
3  * Copyright (C) 2015-2019 Jason A. Donenfeld <Jason@zx2c4.com>. All Rights Reserved.
4  */
5
6 #include <crypto/internal/blake2s.h>
7 #include <crypto/internal/simd.h>
8 #include <crypto/internal/hash.h>
9
10 #include <linux/types.h>
11 #include <linux/jump_label.h>
12 #include <linux/kernel.h>
13 #include <linux/module.h>
14
15 #include <asm/cpufeature.h>
16 #include <asm/fpu/api.h>
17 #include <asm/processor.h>
18 #include <asm/simd.h>
19
20 asmlinkage void blake2s_compress_ssse3(struct blake2s_state *state,
21                                        const u8 *block, const size_t nblocks,
22                                        const u32 inc);
23 asmlinkage void blake2s_compress_avx512(struct blake2s_state *state,
24                                         const u8 *block, const size_t nblocks,
25                                         const u32 inc);
26
27 static __ro_after_init DEFINE_STATIC_KEY_FALSE(blake2s_use_ssse3);
28 static __ro_after_init DEFINE_STATIC_KEY_FALSE(blake2s_use_avx512);
29
30 void blake2s_compress_arch(struct blake2s_state *state,
31                            const u8 *block, size_t nblocks,
32                            const u32 inc)
33 {
34         /* SIMD disables preemption, so relax after processing each page. */
35         BUILD_BUG_ON(PAGE_SIZE / BLAKE2S_BLOCK_SIZE < 8);
36
37         if (!static_branch_likely(&blake2s_use_ssse3) || !crypto_simd_usable()) {
38                 blake2s_compress_generic(state, block, nblocks, inc);
39                 return;
40         }
41
42         for (;;) {
43                 const size_t blocks = min_t(size_t, nblocks,
44                                             PAGE_SIZE / BLAKE2S_BLOCK_SIZE);
45
46                 kernel_fpu_begin();
47                 if (IS_ENABLED(CONFIG_AS_AVX512) &&
48                     static_branch_likely(&blake2s_use_avx512))
49                         blake2s_compress_avx512(state, block, blocks, inc);
50                 else
51                         blake2s_compress_ssse3(state, block, blocks, inc);
52                 kernel_fpu_end();
53
54                 nblocks -= blocks;
55                 if (!nblocks)
56                         break;
57                 block += blocks * BLAKE2S_BLOCK_SIZE;
58         }
59 }
60 EXPORT_SYMBOL(blake2s_compress_arch);
61
62 static int crypto_blake2s_setkey(struct crypto_shash *tfm, const u8 *key,
63                                  unsigned int keylen)
64 {
65         struct blake2s_tfm_ctx *tctx = crypto_shash_ctx(tfm);
66
67         if (keylen == 0 || keylen > BLAKE2S_KEY_SIZE) {
68                 crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
69                 return -EINVAL;
70         }
71
72         memcpy(tctx->key, key, keylen);
73         tctx->keylen = keylen;
74
75         return 0;
76 }
77
78 static int crypto_blake2s_init(struct shash_desc *desc)
79 {
80         struct blake2s_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
81         struct blake2s_state *state = shash_desc_ctx(desc);
82         const int outlen = crypto_shash_digestsize(desc->tfm);
83
84         if (tctx->keylen)
85                 blake2s_init_key(state, outlen, tctx->key, tctx->keylen);
86         else
87                 blake2s_init(state, outlen);
88
89         return 0;
90 }
91
92 static int crypto_blake2s_update(struct shash_desc *desc, const u8 *in,
93                                  unsigned int inlen)
94 {
95         struct blake2s_state *state = shash_desc_ctx(desc);
96         const size_t fill = BLAKE2S_BLOCK_SIZE - state->buflen;
97
98         if (unlikely(!inlen))
99                 return 0;
100         if (inlen > fill) {
101                 memcpy(state->buf + state->buflen, in, fill);
102                 blake2s_compress_arch(state, state->buf, 1, BLAKE2S_BLOCK_SIZE);
103                 state->buflen = 0;
104                 in += fill;
105                 inlen -= fill;
106         }
107         if (inlen > BLAKE2S_BLOCK_SIZE) {
108                 const size_t nblocks = DIV_ROUND_UP(inlen, BLAKE2S_BLOCK_SIZE);
109                 /* Hash one less (full) block than strictly possible */
110                 blake2s_compress_arch(state, in, nblocks - 1, BLAKE2S_BLOCK_SIZE);
111                 in += BLAKE2S_BLOCK_SIZE * (nblocks - 1);
112                 inlen -= BLAKE2S_BLOCK_SIZE * (nblocks - 1);
113         }
114         memcpy(state->buf + state->buflen, in, inlen);
115         state->buflen += inlen;
116
117         return 0;
118 }
119
120 static int crypto_blake2s_final(struct shash_desc *desc, u8 *out)
121 {
122         struct blake2s_state *state = shash_desc_ctx(desc);
123
124         blake2s_set_lastblock(state);
125         memset(state->buf + state->buflen, 0,
126                BLAKE2S_BLOCK_SIZE - state->buflen); /* Padding */
127         blake2s_compress_arch(state, state->buf, 1, state->buflen);
128         cpu_to_le32_array(state->h, ARRAY_SIZE(state->h));
129         memcpy(out, state->h, state->outlen);
130         memzero_explicit(state, sizeof(*state));
131
132         return 0;
133 }
134
135 static struct shash_alg blake2s_algs[] = {{
136         .base.cra_name          = "blake2s-128",
137         .base.cra_driver_name   = "blake2s-128-x86",
138         .base.cra_flags         = CRYPTO_ALG_OPTIONAL_KEY,
139         .base.cra_ctxsize       = sizeof(struct blake2s_tfm_ctx),
140         .base.cra_priority      = 200,
141         .base.cra_blocksize     = BLAKE2S_BLOCK_SIZE,
142         .base.cra_module        = THIS_MODULE,
143
144         .digestsize             = BLAKE2S_128_HASH_SIZE,
145         .setkey                 = crypto_blake2s_setkey,
146         .init                   = crypto_blake2s_init,
147         .update                 = crypto_blake2s_update,
148         .final                  = crypto_blake2s_final,
149         .descsize               = sizeof(struct blake2s_state),
150 }, {
151         .base.cra_name          = "blake2s-160",
152         .base.cra_driver_name   = "blake2s-160-x86",
153         .base.cra_flags         = CRYPTO_ALG_OPTIONAL_KEY,
154         .base.cra_ctxsize       = sizeof(struct blake2s_tfm_ctx),
155         .base.cra_priority      = 200,
156         .base.cra_blocksize     = BLAKE2S_BLOCK_SIZE,
157         .base.cra_module        = THIS_MODULE,
158
159         .digestsize             = BLAKE2S_160_HASH_SIZE,
160         .setkey                 = crypto_blake2s_setkey,
161         .init                   = crypto_blake2s_init,
162         .update                 = crypto_blake2s_update,
163         .final                  = crypto_blake2s_final,
164         .descsize               = sizeof(struct blake2s_state),
165 }, {
166         .base.cra_name          = "blake2s-224",
167         .base.cra_driver_name   = "blake2s-224-x86",
168         .base.cra_flags         = CRYPTO_ALG_OPTIONAL_KEY,
169         .base.cra_ctxsize       = sizeof(struct blake2s_tfm_ctx),
170         .base.cra_priority      = 200,
171         .base.cra_blocksize     = BLAKE2S_BLOCK_SIZE,
172         .base.cra_module        = THIS_MODULE,
173
174         .digestsize             = BLAKE2S_224_HASH_SIZE,
175         .setkey                 = crypto_blake2s_setkey,
176         .init                   = crypto_blake2s_init,
177         .update                 = crypto_blake2s_update,
178         .final                  = crypto_blake2s_final,
179         .descsize               = sizeof(struct blake2s_state),
180 }, {
181         .base.cra_name          = "blake2s-256",
182         .base.cra_driver_name   = "blake2s-256-x86",
183         .base.cra_flags         = CRYPTO_ALG_OPTIONAL_KEY,
184         .base.cra_ctxsize       = sizeof(struct blake2s_tfm_ctx),
185         .base.cra_priority      = 200,
186         .base.cra_blocksize     = BLAKE2S_BLOCK_SIZE,
187         .base.cra_module        = THIS_MODULE,
188
189         .digestsize             = BLAKE2S_256_HASH_SIZE,
190         .setkey                 = crypto_blake2s_setkey,
191         .init                   = crypto_blake2s_init,
192         .update                 = crypto_blake2s_update,
193         .final                  = crypto_blake2s_final,
194         .descsize               = sizeof(struct blake2s_state),
195 }};
196
197 static int __init blake2s_mod_init(void)
198 {
199         if (!boot_cpu_has(X86_FEATURE_SSSE3))
200                 return 0;
201
202         static_branch_enable(&blake2s_use_ssse3);
203
204         if (IS_ENABLED(CONFIG_AS_AVX512) &&
205             boot_cpu_has(X86_FEATURE_AVX) &&
206             boot_cpu_has(X86_FEATURE_AVX2) &&
207             boot_cpu_has(X86_FEATURE_AVX512F) &&
208             boot_cpu_has(X86_FEATURE_AVX512VL) &&
209             cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM |
210                               XFEATURE_MASK_AVX512, NULL))
211                 static_branch_enable(&blake2s_use_avx512);
212
213         return IS_REACHABLE(CONFIG_CRYPTO_HASH) ?
214                 crypto_register_shashes(blake2s_algs,
215                                         ARRAY_SIZE(blake2s_algs)) : 0;
216 }
217
218 static void __exit blake2s_mod_exit(void)
219 {
220         if (IS_REACHABLE(CONFIG_CRYPTO_HASH) && boot_cpu_has(X86_FEATURE_SSSE3))
221                 crypto_unregister_shashes(blake2s_algs, ARRAY_SIZE(blake2s_algs));
222 }
223
224 module_init(blake2s_mod_init);
225 module_exit(blake2s_mod_exit);
226
227 MODULE_ALIAS_CRYPTO("blake2s-128");
228 MODULE_ALIAS_CRYPTO("blake2s-128-x86");
229 MODULE_ALIAS_CRYPTO("blake2s-160");
230 MODULE_ALIAS_CRYPTO("blake2s-160-x86");
231 MODULE_ALIAS_CRYPTO("blake2s-224");
232 MODULE_ALIAS_CRYPTO("blake2s-224-x86");
233 MODULE_ALIAS_CRYPTO("blake2s-256");
234 MODULE_ALIAS_CRYPTO("blake2s-256-x86");
235 MODULE_LICENSE("GPL v2");