Commit | Line | Data |
---|---|---|
f08fcced CL |
1 | // SPDX-License-Identifier: GPL-2.0 |
2 | /* | |
3 | * sun8i-ss-cipher.c - hardware cryptographic offloader for | |
4 | * Allwinner A80/A83T SoC | |
5 | * | |
6 | * Copyright (C) 2016-2019 Corentin LABBE <clabbe.montjoie@gmail.com> | |
7 | * | |
8 | * This file add support for AES cipher with 128,192,256 bits keysize in | |
9 | * CBC and ECB mode. | |
10 | * | |
11 | * You could find a link for the datasheet in Documentation/arm/sunxi/README | |
12 | */ | |
13 | ||
14 | #include <linux/crypto.h> | |
15 | #include <linux/dma-mapping.h> | |
16 | #include <linux/io.h> | |
17 | #include <linux/pm_runtime.h> | |
18 | #include <crypto/scatterwalk.h> | |
19 | #include <crypto/internal/skcipher.h> | |
20 | #include "sun8i-ss.h" | |
21 | ||
22 | static bool sun8i_ss_need_fallback(struct skcipher_request *areq) | |
23 | { | |
24 | struct scatterlist *in_sg = areq->src; | |
25 | struct scatterlist *out_sg = areq->dst; | |
26 | struct scatterlist *sg; | |
27 | ||
28 | if (areq->cryptlen == 0 || areq->cryptlen % 16) | |
29 | return true; | |
30 | ||
31 | if (sg_nents(areq->src) > 8 || sg_nents(areq->dst) > 8) | |
32 | return true; | |
33 | ||
34 | sg = areq->src; | |
35 | while (sg) { | |
36 | if ((sg->length % 16) != 0) | |
37 | return true; | |
38 | if ((sg_dma_len(sg) % 16) != 0) | |
39 | return true; | |
40 | if (!IS_ALIGNED(sg->offset, 16)) | |
41 | return true; | |
42 | sg = sg_next(sg); | |
43 | } | |
44 | sg = areq->dst; | |
45 | while (sg) { | |
46 | if ((sg->length % 16) != 0) | |
47 | return true; | |
48 | if ((sg_dma_len(sg) % 16) != 0) | |
49 | return true; | |
50 | if (!IS_ALIGNED(sg->offset, 16)) | |
51 | return true; | |
52 | sg = sg_next(sg); | |
53 | } | |
54 | ||
55 | /* SS need same numbers of SG (with same length) for source and destination */ | |
56 | in_sg = areq->src; | |
57 | out_sg = areq->dst; | |
58 | while (in_sg && out_sg) { | |
59 | if (in_sg->length != out_sg->length) | |
60 | return true; | |
61 | in_sg = sg_next(in_sg); | |
62 | out_sg = sg_next(out_sg); | |
63 | } | |
64 | if (in_sg || out_sg) | |
65 | return true; | |
66 | return false; | |
67 | } | |
68 | ||
69 | static int sun8i_ss_cipher_fallback(struct skcipher_request *areq) | |
70 | { | |
71 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); | |
72 | struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); | |
73 | struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); | |
74 | int err; | |
75 | ||
76 | SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, op->fallback_tfm); | |
77 | #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG | |
78 | struct skcipher_alg *alg = crypto_skcipher_alg(tfm); | |
79 | struct sun8i_ss_alg_template *algt; | |
80 | ||
81 | algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher); | |
82 | algt->stat_fb++; | |
83 | #endif | |
84 | skcipher_request_set_sync_tfm(subreq, op->fallback_tfm); | |
85 | skcipher_request_set_callback(subreq, areq->base.flags, NULL, NULL); | |
86 | skcipher_request_set_crypt(subreq, areq->src, areq->dst, | |
87 | areq->cryptlen, areq->iv); | |
88 | if (rctx->op_dir & SS_DECRYPTION) | |
89 | err = crypto_skcipher_decrypt(subreq); | |
90 | else | |
91 | err = crypto_skcipher_encrypt(subreq); | |
92 | skcipher_request_zero(subreq); | |
93 | return err; | |
94 | } | |
95 | ||
96 | static int sun8i_ss_cipher(struct skcipher_request *areq) | |
97 | { | |
98 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); | |
99 | struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); | |
100 | struct sun8i_ss_dev *ss = op->ss; | |
101 | struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); | |
102 | struct skcipher_alg *alg = crypto_skcipher_alg(tfm); | |
103 | struct sun8i_ss_alg_template *algt; | |
104 | struct scatterlist *sg; | |
105 | unsigned int todo, len, offset, ivsize; | |
106 | void *backup_iv = NULL; | |
107 | int nr_sgs = 0; | |
108 | int nr_sgd = 0; | |
109 | int err = 0; | |
110 | int i; | |
111 | ||
112 | algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher); | |
113 | ||
114 | dev_dbg(ss->dev, "%s %s %u %x IV(%p %u) key=%u\n", __func__, | |
115 | crypto_tfm_alg_name(areq->base.tfm), | |
116 | areq->cryptlen, | |
117 | rctx->op_dir, areq->iv, crypto_skcipher_ivsize(tfm), | |
118 | op->keylen); | |
119 | ||
120 | #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG | |
121 | algt->stat_req++; | |
122 | #endif | |
123 | ||
124 | rctx->op_mode = ss->variant->op_mode[algt->ss_blockmode]; | |
125 | rctx->method = ss->variant->alg_cipher[algt->ss_algo_id]; | |
126 | rctx->keylen = op->keylen; | |
127 | ||
128 | rctx->p_key = dma_map_single(ss->dev, op->key, op->keylen, DMA_TO_DEVICE); | |
129 | if (dma_mapping_error(ss->dev, rctx->p_key)) { | |
130 | dev_err(ss->dev, "Cannot DMA MAP KEY\n"); | |
131 | err = -EFAULT; | |
132 | goto theend; | |
133 | } | |
134 | ||
135 | ivsize = crypto_skcipher_ivsize(tfm); | |
136 | if (areq->iv && crypto_skcipher_ivsize(tfm) > 0) { | |
137 | rctx->ivlen = ivsize; | |
138 | rctx->biv = kzalloc(ivsize, GFP_KERNEL | GFP_DMA); | |
139 | if (!rctx->biv) { | |
140 | err = -ENOMEM; | |
141 | goto theend_key; | |
142 | } | |
143 | if (rctx->op_dir & SS_DECRYPTION) { | |
144 | backup_iv = kzalloc(ivsize, GFP_KERNEL); | |
145 | if (!backup_iv) { | |
146 | err = -ENOMEM; | |
147 | goto theend_key; | |
148 | } | |
149 | offset = areq->cryptlen - ivsize; | |
150 | scatterwalk_map_and_copy(backup_iv, areq->src, offset, | |
151 | ivsize, 0); | |
152 | } | |
153 | memcpy(rctx->biv, areq->iv, ivsize); | |
154 | rctx->p_iv = dma_map_single(ss->dev, rctx->biv, rctx->ivlen, | |
155 | DMA_TO_DEVICE); | |
156 | if (dma_mapping_error(ss->dev, rctx->p_iv)) { | |
157 | dev_err(ss->dev, "Cannot DMA MAP IV\n"); | |
158 | err = -ENOMEM; | |
159 | goto theend_iv; | |
160 | } | |
161 | } | |
162 | if (areq->src == areq->dst) { | |
163 | nr_sgs = dma_map_sg(ss->dev, areq->src, sg_nents(areq->src), | |
164 | DMA_BIDIRECTIONAL); | |
165 | if (nr_sgs <= 0 || nr_sgs > 8) { | |
166 | dev_err(ss->dev, "Invalid sg number %d\n", nr_sgs); | |
167 | err = -EINVAL; | |
168 | goto theend_iv; | |
169 | } | |
170 | nr_sgd = nr_sgs; | |
171 | } else { | |
172 | nr_sgs = dma_map_sg(ss->dev, areq->src, sg_nents(areq->src), | |
173 | DMA_TO_DEVICE); | |
174 | if (nr_sgs <= 0 || nr_sgs > 8) { | |
175 | dev_err(ss->dev, "Invalid sg number %d\n", nr_sgs); | |
176 | err = -EINVAL; | |
177 | goto theend_iv; | |
178 | } | |
179 | nr_sgd = dma_map_sg(ss->dev, areq->dst, sg_nents(areq->dst), | |
180 | DMA_FROM_DEVICE); | |
181 | if (nr_sgd <= 0 || nr_sgd > 8) { | |
182 | dev_err(ss->dev, "Invalid sg number %d\n", nr_sgd); | |
183 | err = -EINVAL; | |
184 | goto theend_sgs; | |
185 | } | |
186 | } | |
187 | ||
188 | len = areq->cryptlen; | |
189 | i = 0; | |
190 | sg = areq->src; | |
191 | while (i < nr_sgs && sg && len) { | |
192 | if (sg_dma_len(sg) == 0) | |
193 | goto sgs_next; | |
194 | rctx->t_src[i].addr = sg_dma_address(sg); | |
195 | todo = min(len, sg_dma_len(sg)); | |
196 | rctx->t_src[i].len = todo / 4; | |
197 | dev_dbg(ss->dev, "%s total=%u SGS(%d %u off=%d) todo=%u\n", __func__, | |
198 | areq->cryptlen, i, rctx->t_src[i].len, sg->offset, todo); | |
199 | len -= todo; | |
200 | i++; | |
201 | sgs_next: | |
202 | sg = sg_next(sg); | |
203 | } | |
204 | if (len > 0) { | |
205 | dev_err(ss->dev, "remaining len %d\n", len); | |
206 | err = -EINVAL; | |
207 | goto theend_sgs; | |
208 | } | |
209 | ||
210 | len = areq->cryptlen; | |
211 | i = 0; | |
212 | sg = areq->dst; | |
213 | while (i < nr_sgd && sg && len) { | |
214 | if (sg_dma_len(sg) == 0) | |
215 | goto sgd_next; | |
216 | rctx->t_dst[i].addr = sg_dma_address(sg); | |
217 | todo = min(len, sg_dma_len(sg)); | |
218 | rctx->t_dst[i].len = todo / 4; | |
219 | dev_dbg(ss->dev, "%s total=%u SGD(%d %u off=%d) todo=%u\n", __func__, | |
220 | areq->cryptlen, i, rctx->t_dst[i].len, sg->offset, todo); | |
221 | len -= todo; | |
222 | i++; | |
223 | sgd_next: | |
224 | sg = sg_next(sg); | |
225 | } | |
226 | if (len > 0) { | |
227 | dev_err(ss->dev, "remaining len %d\n", len); | |
228 | err = -EINVAL; | |
229 | goto theend_sgs; | |
230 | } | |
231 | ||
232 | err = sun8i_ss_run_task(ss, rctx, crypto_tfm_alg_name(areq->base.tfm)); | |
233 | ||
234 | theend_sgs: | |
235 | if (areq->src == areq->dst) { | |
236 | dma_unmap_sg(ss->dev, areq->src, nr_sgs, DMA_BIDIRECTIONAL); | |
237 | } else { | |
238 | dma_unmap_sg(ss->dev, areq->src, nr_sgs, DMA_TO_DEVICE); | |
239 | dma_unmap_sg(ss->dev, areq->dst, nr_sgd, DMA_FROM_DEVICE); | |
240 | } | |
241 | ||
242 | theend_iv: | |
243 | if (rctx->p_iv) | |
244 | dma_unmap_single(ss->dev, rctx->p_iv, rctx->ivlen, | |
245 | DMA_TO_DEVICE); | |
246 | ||
247 | if (areq->iv && ivsize > 0) { | |
248 | if (rctx->biv) { | |
249 | offset = areq->cryptlen - ivsize; | |
250 | if (rctx->op_dir & SS_DECRYPTION) { | |
251 | memcpy(areq->iv, backup_iv, ivsize); | |
252 | memzero_explicit(backup_iv, ivsize); | |
253 | kzfree(backup_iv); | |
254 | } else { | |
255 | scatterwalk_map_and_copy(areq->iv, areq->dst, offset, | |
256 | ivsize, 0); | |
257 | } | |
258 | kfree(rctx->biv); | |
259 | } | |
260 | } | |
261 | ||
262 | theend_key: | |
263 | dma_unmap_single(ss->dev, rctx->p_key, op->keylen, DMA_TO_DEVICE); | |
264 | ||
265 | theend: | |
266 | ||
267 | return err; | |
268 | } | |
269 | ||
270 | static int sun8i_ss_handle_cipher_request(struct crypto_engine *engine, void *areq) | |
271 | { | |
272 | int err; | |
273 | struct skcipher_request *breq = container_of(areq, struct skcipher_request, base); | |
274 | ||
275 | err = sun8i_ss_cipher(breq); | |
276 | crypto_finalize_skcipher_request(engine, breq, err); | |
277 | ||
278 | return 0; | |
279 | } | |
280 | ||
281 | int sun8i_ss_skdecrypt(struct skcipher_request *areq) | |
282 | { | |
283 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); | |
284 | struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); | |
285 | struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); | |
286 | struct crypto_engine *engine; | |
287 | int e; | |
288 | ||
289 | memset(rctx, 0, sizeof(struct sun8i_cipher_req_ctx)); | |
290 | rctx->op_dir = SS_DECRYPTION; | |
291 | ||
292 | if (sun8i_ss_need_fallback(areq)) | |
293 | return sun8i_ss_cipher_fallback(areq); | |
294 | ||
295 | e = sun8i_ss_get_engine_number(op->ss); | |
296 | engine = op->ss->flows[e].engine; | |
297 | rctx->flow = e; | |
298 | ||
299 | return crypto_transfer_skcipher_request_to_engine(engine, areq); | |
300 | } | |
301 | ||
302 | int sun8i_ss_skencrypt(struct skcipher_request *areq) | |
303 | { | |
304 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); | |
305 | struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); | |
306 | struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); | |
307 | struct crypto_engine *engine; | |
308 | int e; | |
309 | ||
310 | memset(rctx, 0, sizeof(struct sun8i_cipher_req_ctx)); | |
311 | rctx->op_dir = SS_ENCRYPTION; | |
312 | ||
313 | if (sun8i_ss_need_fallback(areq)) | |
314 | return sun8i_ss_cipher_fallback(areq); | |
315 | ||
316 | e = sun8i_ss_get_engine_number(op->ss); | |
317 | engine = op->ss->flows[e].engine; | |
318 | rctx->flow = e; | |
319 | ||
320 | return crypto_transfer_skcipher_request_to_engine(engine, areq); | |
321 | } | |
322 | ||
323 | int sun8i_ss_cipher_init(struct crypto_tfm *tfm) | |
324 | { | |
325 | struct sun8i_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm); | |
326 | struct sun8i_ss_alg_template *algt; | |
327 | const char *name = crypto_tfm_alg_name(tfm); | |
328 | struct crypto_skcipher *sktfm = __crypto_skcipher_cast(tfm); | |
329 | struct skcipher_alg *alg = crypto_skcipher_alg(sktfm); | |
330 | int err; | |
331 | ||
332 | memset(op, 0, sizeof(struct sun8i_cipher_tfm_ctx)); | |
333 | ||
334 | algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher); | |
335 | op->ss = algt->ss; | |
336 | ||
337 | sktfm->reqsize = sizeof(struct sun8i_cipher_req_ctx); | |
338 | ||
339 | op->fallback_tfm = crypto_alloc_sync_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK); | |
340 | if (IS_ERR(op->fallback_tfm)) { | |
341 | dev_err(op->ss->dev, "ERROR: Cannot allocate fallback for %s %ld\n", | |
342 | name, PTR_ERR(op->fallback_tfm)); | |
343 | return PTR_ERR(op->fallback_tfm); | |
344 | } | |
345 | ||
346 | dev_info(op->ss->dev, "Fallback for %s is %s\n", | |
347 | crypto_tfm_alg_driver_name(&sktfm->base), | |
348 | crypto_tfm_alg_driver_name(crypto_skcipher_tfm(&op->fallback_tfm->base))); | |
349 | ||
350 | op->enginectx.op.do_one_request = sun8i_ss_handle_cipher_request; | |
351 | op->enginectx.op.prepare_request = NULL; | |
352 | op->enginectx.op.unprepare_request = NULL; | |
353 | ||
354 | err = pm_runtime_get_sync(op->ss->dev); | |
355 | if (err < 0) { | |
356 | dev_err(op->ss->dev, "pm error %d\n", err); | |
357 | goto error_pm; | |
358 | } | |
359 | ||
360 | return 0; | |
361 | error_pm: | |
362 | crypto_free_sync_skcipher(op->fallback_tfm); | |
363 | return err; | |
364 | } | |
365 | ||
366 | void sun8i_ss_cipher_exit(struct crypto_tfm *tfm) | |
367 | { | |
368 | struct sun8i_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm); | |
369 | ||
370 | if (op->key) { | |
371 | memzero_explicit(op->key, op->keylen); | |
372 | kfree(op->key); | |
373 | } | |
374 | crypto_free_sync_skcipher(op->fallback_tfm); | |
375 | pm_runtime_put_sync(op->ss->dev); | |
376 | } | |
377 | ||
378 | int sun8i_ss_aes_setkey(struct crypto_skcipher *tfm, const u8 *key, | |
379 | unsigned int keylen) | |
380 | { | |
381 | struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); | |
382 | struct sun8i_ss_dev *ss = op->ss; | |
383 | ||
384 | switch (keylen) { | |
385 | case 128 / 8: | |
386 | break; | |
387 | case 192 / 8: | |
388 | break; | |
389 | case 256 / 8: | |
390 | break; | |
391 | default: | |
392 | dev_dbg(ss->dev, "ERROR: Invalid keylen %u\n", keylen); | |
f08fcced CL |
393 | return -EINVAL; |
394 | } | |
395 | if (op->key) { | |
396 | memzero_explicit(op->key, op->keylen); | |
397 | kfree(op->key); | |
398 | } | |
399 | op->keylen = keylen; | |
44eff4ad | 400 | op->key = kmemdup(key, keylen, GFP_KERNEL | GFP_DMA); |
f08fcced CL |
401 | if (!op->key) |
402 | return -ENOMEM; | |
f08fcced CL |
403 | |
404 | crypto_sync_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK); | |
405 | crypto_sync_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK); | |
406 | ||
407 | return crypto_sync_skcipher_setkey(op->fallback_tfm, key, keylen); | |
408 | } | |
409 | ||
410 | int sun8i_ss_des3_setkey(struct crypto_skcipher *tfm, const u8 *key, | |
411 | unsigned int keylen) | |
412 | { | |
413 | struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); | |
414 | struct sun8i_ss_dev *ss = op->ss; | |
415 | ||
416 | if (unlikely(keylen != 3 * DES_KEY_SIZE)) { | |
417 | dev_dbg(ss->dev, "Invalid keylen %u\n", keylen); | |
f08fcced CL |
418 | return -EINVAL; |
419 | } | |
420 | ||
421 | if (op->key) { | |
422 | memzero_explicit(op->key, op->keylen); | |
423 | kfree(op->key); | |
424 | } | |
425 | op->keylen = keylen; | |
44eff4ad | 426 | op->key = kmemdup(key, keylen, GFP_KERNEL | GFP_DMA); |
f08fcced CL |
427 | if (!op->key) |
428 | return -ENOMEM; | |
f08fcced CL |
429 | |
430 | crypto_sync_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK); | |
431 | crypto_sync_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK); | |
432 | ||
433 | return crypto_sync_skcipher_setkey(op->fallback_tfm, key, keylen); | |
434 | } |