Merge tag 'xtensa-20190607' of git://github.com/jcmvbkbc/linux-xtensa
[linux-2.6-block.git] / drivers / crypto / amcc / crypto4xx_alg.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /**
3  * AMCC SoC PPC4xx Crypto Driver
4  *
5  * Copyright (c) 2008 Applied Micro Circuits Corporation.
6  * All rights reserved. James Hsiao <jhsiao@amcc.com>
7  *
8  * This file implements the Linux crypto algorithms.
9  */
10
11 #include <linux/kernel.h>
12 #include <linux/interrupt.h>
13 #include <linux/spinlock_types.h>
14 #include <linux/scatterlist.h>
15 #include <linux/crypto.h>
16 #include <linux/hash.h>
17 #include <crypto/internal/hash.h>
18 #include <linux/dma-mapping.h>
19 #include <crypto/algapi.h>
20 #include <crypto/aead.h>
21 #include <crypto/aes.h>
22 #include <crypto/gcm.h>
23 #include <crypto/sha.h>
24 #include <crypto/ctr.h>
25 #include <crypto/skcipher.h>
26 #include "crypto4xx_reg_def.h"
27 #include "crypto4xx_core.h"
28 #include "crypto4xx_sa.h"
29
30 static void set_dynamic_sa_command_0(struct dynamic_sa_ctl *sa, u32 save_h,
31                                      u32 save_iv, u32 ld_h, u32 ld_iv,
32                                      u32 hdr_proc, u32 h, u32 c, u32 pad_type,
33                                      u32 op_grp, u32 op, u32 dir)
34 {
35         sa->sa_command_0.w = 0;
36         sa->sa_command_0.bf.save_hash_state = save_h;
37         sa->sa_command_0.bf.save_iv = save_iv;
38         sa->sa_command_0.bf.load_hash_state = ld_h;
39         sa->sa_command_0.bf.load_iv = ld_iv;
40         sa->sa_command_0.bf.hdr_proc = hdr_proc;
41         sa->sa_command_0.bf.hash_alg = h;
42         sa->sa_command_0.bf.cipher_alg = c;
43         sa->sa_command_0.bf.pad_type = pad_type & 3;
44         sa->sa_command_0.bf.extend_pad = pad_type >> 2;
45         sa->sa_command_0.bf.op_group = op_grp;
46         sa->sa_command_0.bf.opcode = op;
47         sa->sa_command_0.bf.dir = dir;
48 }
49
50 static void set_dynamic_sa_command_1(struct dynamic_sa_ctl *sa, u32 cm,
51                                      u32 hmac_mc, u32 cfb, u32 esn,
52                                      u32 sn_mask, u32 mute, u32 cp_pad,
53                                      u32 cp_pay, u32 cp_hdr)
54 {
55         sa->sa_command_1.w = 0;
56         sa->sa_command_1.bf.crypto_mode31 = (cm & 4) >> 2;
57         sa->sa_command_1.bf.crypto_mode9_8 = cm & 3;
58         sa->sa_command_1.bf.feedback_mode = cfb,
59         sa->sa_command_1.bf.sa_rev = 1;
60         sa->sa_command_1.bf.hmac_muting = hmac_mc;
61         sa->sa_command_1.bf.extended_seq_num = esn;
62         sa->sa_command_1.bf.seq_num_mask = sn_mask;
63         sa->sa_command_1.bf.mutable_bit_proc = mute;
64         sa->sa_command_1.bf.copy_pad = cp_pad;
65         sa->sa_command_1.bf.copy_payload = cp_pay;
66         sa->sa_command_1.bf.copy_hdr = cp_hdr;
67 }
68
69 static inline int crypto4xx_crypt(struct skcipher_request *req,
70                                   const unsigned int ivlen, bool decrypt)
71 {
72         struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
73         struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
74         __le32 iv[AES_IV_SIZE];
75
76         if (ivlen)
77                 crypto4xx_memcpy_to_le32(iv, req->iv, ivlen);
78
79         return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
80                 req->cryptlen, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out,
81                 ctx->sa_len, 0, NULL);
82 }
83
84 int crypto4xx_encrypt_noiv(struct skcipher_request *req)
85 {
86         return crypto4xx_crypt(req, 0, false);
87 }
88
89 int crypto4xx_encrypt_iv(struct skcipher_request *req)
90 {
91         return crypto4xx_crypt(req, AES_IV_SIZE, false);
92 }
93
94 int crypto4xx_decrypt_noiv(struct skcipher_request *req)
95 {
96         return crypto4xx_crypt(req, 0, true);
97 }
98
99 int crypto4xx_decrypt_iv(struct skcipher_request *req)
100 {
101         return crypto4xx_crypt(req, AES_IV_SIZE, true);
102 }
103
104 /**
105  * AES Functions
106  */
107 static int crypto4xx_setkey_aes(struct crypto_skcipher *cipher,
108                                 const u8 *key,
109                                 unsigned int keylen,
110                                 unsigned char cm,
111                                 u8 fb)
112 {
113         struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
114         struct dynamic_sa_ctl *sa;
115         int    rc;
116
117         if (keylen != AES_KEYSIZE_256 &&
118                 keylen != AES_KEYSIZE_192 && keylen != AES_KEYSIZE_128) {
119                 crypto_skcipher_set_flags(cipher,
120                                 CRYPTO_TFM_RES_BAD_KEY_LEN);
121                 return -EINVAL;
122         }
123
124         /* Create SA */
125         if (ctx->sa_in || ctx->sa_out)
126                 crypto4xx_free_sa(ctx);
127
128         rc = crypto4xx_alloc_sa(ctx, SA_AES128_LEN + (keylen-16) / 4);
129         if (rc)
130                 return rc;
131
132         /* Setup SA */
133         sa = ctx->sa_in;
134
135         set_dynamic_sa_command_0(sa, SA_NOT_SAVE_HASH, (cm == CRYPTO_MODE_ECB ?
136                                  SA_NOT_SAVE_IV : SA_SAVE_IV),
137                                  SA_NOT_LOAD_HASH, (cm == CRYPTO_MODE_ECB ?
138                                  SA_LOAD_IV_FROM_SA : SA_LOAD_IV_FROM_STATE),
139                                  SA_NO_HEADER_PROC, SA_HASH_ALG_NULL,
140                                  SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
141                                  SA_OP_GROUP_BASIC, SA_OPCODE_DECRYPT,
142                                  DIR_INBOUND);
143
144         set_dynamic_sa_command_1(sa, cm, SA_HASH_MODE_HASH,
145                                  fb, SA_EXTENDED_SN_OFF,
146                                  SA_SEQ_MASK_OFF, SA_MC_ENABLE,
147                                  SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
148                                  SA_NOT_COPY_HDR);
149         crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa),
150                                  key, keylen);
151         sa->sa_contents.w = SA_AES_CONTENTS | (keylen << 2);
152         sa->sa_command_1.bf.key_len = keylen >> 3;
153
154         memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
155         sa = ctx->sa_out;
156         sa->sa_command_0.bf.dir = DIR_OUTBOUND;
157         /*
158          * SA_OPCODE_ENCRYPT is the same value as SA_OPCODE_DECRYPT.
159          * it's the DIR_(IN|OUT)BOUND that matters
160          */
161         sa->sa_command_0.bf.opcode = SA_OPCODE_ENCRYPT;
162
163         return 0;
164 }
165
166 int crypto4xx_setkey_aes_cbc(struct crypto_skcipher *cipher,
167                              const u8 *key, unsigned int keylen)
168 {
169         return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CBC,
170                                     CRYPTO_FEEDBACK_MODE_NO_FB);
171 }
172
173 int crypto4xx_setkey_aes_cfb(struct crypto_skcipher *cipher,
174                              const u8 *key, unsigned int keylen)
175 {
176         return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CFB,
177                                     CRYPTO_FEEDBACK_MODE_128BIT_CFB);
178 }
179
180 int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher,
181                              const u8 *key, unsigned int keylen)
182 {
183         return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_ECB,
184                                     CRYPTO_FEEDBACK_MODE_NO_FB);
185 }
186
187 int crypto4xx_setkey_aes_ofb(struct crypto_skcipher *cipher,
188                              const u8 *key, unsigned int keylen)
189 {
190         return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_OFB,
191                                     CRYPTO_FEEDBACK_MODE_64BIT_OFB);
192 }
193
194 int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher,
195                              const u8 *key, unsigned int keylen)
196 {
197         struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
198         int rc;
199
200         rc = crypto4xx_setkey_aes(cipher, key, keylen - CTR_RFC3686_NONCE_SIZE,
201                 CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
202         if (rc)
203                 return rc;
204
205         ctx->iv_nonce = cpu_to_le32p((u32 *)&key[keylen -
206                                                  CTR_RFC3686_NONCE_SIZE]);
207
208         return 0;
209 }
210
211 int crypto4xx_rfc3686_encrypt(struct skcipher_request *req)
212 {
213         struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
214         struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
215         __le32 iv[AES_IV_SIZE / 4] = {
216                 ctx->iv_nonce,
217                 cpu_to_le32p((u32 *) req->iv),
218                 cpu_to_le32p((u32 *) (req->iv + 4)),
219                 cpu_to_le32(1) };
220
221         return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
222                                   req->cryptlen, iv, AES_IV_SIZE,
223                                   ctx->sa_out, ctx->sa_len, 0, NULL);
224 }
225
226 int crypto4xx_rfc3686_decrypt(struct skcipher_request *req)
227 {
228         struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
229         struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
230         __le32 iv[AES_IV_SIZE / 4] = {
231                 ctx->iv_nonce,
232                 cpu_to_le32p((u32 *) req->iv),
233                 cpu_to_le32p((u32 *) (req->iv + 4)),
234                 cpu_to_le32(1) };
235
236         return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
237                                   req->cryptlen, iv, AES_IV_SIZE,
238                                   ctx->sa_out, ctx->sa_len, 0, NULL);
239 }
240
241 static int
242 crypto4xx_ctr_crypt(struct skcipher_request *req, bool encrypt)
243 {
244         struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
245         struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
246         size_t iv_len = crypto_skcipher_ivsize(cipher);
247         unsigned int counter = be32_to_cpup((__be32 *)(req->iv + iv_len - 4));
248         unsigned int nblks = ALIGN(req->cryptlen, AES_BLOCK_SIZE) /
249                         AES_BLOCK_SIZE;
250
251         /*
252          * The hardware uses only the last 32-bits as the counter while the
253          * kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
254          * the whole IV is a counter.  So fallback if the counter is going to
255          * overlow.
256          */
257         if (counter + nblks < counter) {
258                 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->sw_cipher.cipher);
259                 int ret;
260
261                 skcipher_request_set_sync_tfm(subreq, ctx->sw_cipher.cipher);
262                 skcipher_request_set_callback(subreq, req->base.flags,
263                         NULL, NULL);
264                 skcipher_request_set_crypt(subreq, req->src, req->dst,
265                         req->cryptlen, req->iv);
266                 ret = encrypt ? crypto_skcipher_encrypt(subreq)
267                         : crypto_skcipher_decrypt(subreq);
268                 skcipher_request_zero(subreq);
269                 return ret;
270         }
271
272         return encrypt ? crypto4xx_encrypt_iv(req)
273                        : crypto4xx_decrypt_iv(req);
274 }
275
276 static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx *ctx,
277                                        struct crypto_skcipher *cipher,
278                                        const u8 *key,
279                                        unsigned int keylen)
280 {
281         int rc;
282
283         crypto_sync_skcipher_clear_flags(ctx->sw_cipher.cipher,
284                                     CRYPTO_TFM_REQ_MASK);
285         crypto_sync_skcipher_set_flags(ctx->sw_cipher.cipher,
286                 crypto_skcipher_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
287         rc = crypto_sync_skcipher_setkey(ctx->sw_cipher.cipher, key, keylen);
288         crypto_skcipher_clear_flags(cipher, CRYPTO_TFM_RES_MASK);
289         crypto_skcipher_set_flags(cipher,
290                 crypto_sync_skcipher_get_flags(ctx->sw_cipher.cipher) &
291                         CRYPTO_TFM_RES_MASK);
292
293         return rc;
294 }
295
296 int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher,
297                              const u8 *key, unsigned int keylen)
298 {
299         struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
300         int rc;
301
302         rc = crypto4xx_sk_setup_fallback(ctx, cipher, key, keylen);
303         if (rc)
304                 return rc;
305
306         return crypto4xx_setkey_aes(cipher, key, keylen,
307                 CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
308 }
309
310 int crypto4xx_encrypt_ctr(struct skcipher_request *req)
311 {
312         return crypto4xx_ctr_crypt(req, true);
313 }
314
315 int crypto4xx_decrypt_ctr(struct skcipher_request *req)
316 {
317         return crypto4xx_ctr_crypt(req, false);
318 }
319
320 static inline bool crypto4xx_aead_need_fallback(struct aead_request *req,
321                                                 unsigned int len,
322                                                 bool is_ccm, bool decrypt)
323 {
324         struct crypto_aead *aead = crypto_aead_reqtfm(req);
325
326         /* authsize has to be a multiple of 4 */
327         if (aead->authsize & 3)
328                 return true;
329
330         /*
331          * hardware does not handle cases where plaintext
332          * is less than a block.
333          */
334         if (len < AES_BLOCK_SIZE)
335                 return true;
336
337         /* assoc len needs to be a multiple of 4 and <= 1020 */
338         if (req->assoclen & 0x3 || req->assoclen > 1020)
339                 return true;
340
341         /* CCM supports only counter field length of 2 and 4 bytes */
342         if (is_ccm && !(req->iv[0] == 1 || req->iv[0] == 3))
343                 return true;
344
345         return false;
346 }
347
348 static int crypto4xx_aead_fallback(struct aead_request *req,
349         struct crypto4xx_ctx *ctx, bool do_decrypt)
350 {
351         struct aead_request *subreq = aead_request_ctx(req);
352
353         aead_request_set_tfm(subreq, ctx->sw_cipher.aead);
354         aead_request_set_callback(subreq, req->base.flags,
355                                   req->base.complete, req->base.data);
356         aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
357                                req->iv);
358         aead_request_set_ad(subreq, req->assoclen);
359         return do_decrypt ? crypto_aead_decrypt(subreq) :
360                             crypto_aead_encrypt(subreq);
361 }
362
363 static int crypto4xx_aead_setup_fallback(struct crypto4xx_ctx *ctx,
364                                          struct crypto_aead *cipher,
365                                          const u8 *key,
366                                          unsigned int keylen)
367 {
368         int rc;
369
370         crypto_aead_clear_flags(ctx->sw_cipher.aead, CRYPTO_TFM_REQ_MASK);
371         crypto_aead_set_flags(ctx->sw_cipher.aead,
372                 crypto_aead_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
373         rc = crypto_aead_setkey(ctx->sw_cipher.aead, key, keylen);
374         crypto_aead_clear_flags(cipher, CRYPTO_TFM_RES_MASK);
375         crypto_aead_set_flags(cipher,
376                 crypto_aead_get_flags(ctx->sw_cipher.aead) &
377                         CRYPTO_TFM_RES_MASK);
378
379         return rc;
380 }
381
382 /**
383  * AES-CCM Functions
384  */
385
386 int crypto4xx_setkey_aes_ccm(struct crypto_aead *cipher, const u8 *key,
387                              unsigned int keylen)
388 {
389         struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
390         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
391         struct dynamic_sa_ctl *sa;
392         int rc = 0;
393
394         rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
395         if (rc)
396                 return rc;
397
398         if (ctx->sa_in || ctx->sa_out)
399                 crypto4xx_free_sa(ctx);
400
401         rc = crypto4xx_alloc_sa(ctx, SA_AES128_CCM_LEN + (keylen - 16) / 4);
402         if (rc)
403                 return rc;
404
405         /* Setup SA */
406         sa = (struct dynamic_sa_ctl *) ctx->sa_in;
407         sa->sa_contents.w = SA_AES_CCM_CONTENTS | (keylen << 2);
408
409         set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
410                                  SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
411                                  SA_NO_HEADER_PROC, SA_HASH_ALG_CBC_MAC,
412                                  SA_CIPHER_ALG_AES,
413                                  SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
414                                  SA_OPCODE_HASH_DECRYPT, DIR_INBOUND);
415
416         set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
417                                  CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
418                                  SA_SEQ_MASK_OFF, SA_MC_ENABLE,
419                                  SA_NOT_COPY_PAD, SA_COPY_PAYLOAD,
420                                  SA_NOT_COPY_HDR);
421
422         sa->sa_command_1.bf.key_len = keylen >> 3;
423
424         crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa), key, keylen);
425
426         memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
427         sa = (struct dynamic_sa_ctl *) ctx->sa_out;
428
429         set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
430                                  SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
431                                  SA_NO_HEADER_PROC, SA_HASH_ALG_CBC_MAC,
432                                  SA_CIPHER_ALG_AES,
433                                  SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
434                                  SA_OPCODE_ENCRYPT_HASH, DIR_OUTBOUND);
435
436         set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
437                                  CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
438                                  SA_SEQ_MASK_OFF, SA_MC_ENABLE,
439                                  SA_COPY_PAD, SA_COPY_PAYLOAD,
440                                  SA_NOT_COPY_HDR);
441
442         sa->sa_command_1.bf.key_len = keylen >> 3;
443         return 0;
444 }
445
446 static int crypto4xx_crypt_aes_ccm(struct aead_request *req, bool decrypt)
447 {
448         struct crypto4xx_ctx *ctx  = crypto_tfm_ctx(req->base.tfm);
449         struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
450         struct crypto_aead *aead = crypto_aead_reqtfm(req);
451         __le32 iv[16];
452         u32 tmp_sa[SA_AES128_CCM_LEN + 4];
453         struct dynamic_sa_ctl *sa = (struct dynamic_sa_ctl *)tmp_sa;
454         unsigned int len = req->cryptlen;
455
456         if (decrypt)
457                 len -= crypto_aead_authsize(aead);
458
459         if (crypto4xx_aead_need_fallback(req, len, true, decrypt))
460                 return crypto4xx_aead_fallback(req, ctx, decrypt);
461
462         memcpy(tmp_sa, decrypt ? ctx->sa_in : ctx->sa_out, ctx->sa_len * 4);
463         sa->sa_command_0.bf.digest_len = crypto_aead_authsize(aead) >> 2;
464
465         if (req->iv[0] == 1) {
466                 /* CRYPTO_MODE_AES_ICM */
467                 sa->sa_command_1.bf.crypto_mode9_8 = 1;
468         }
469
470         iv[3] = cpu_to_le32(0);
471         crypto4xx_memcpy_to_le32(iv, req->iv, 16 - (req->iv[0] + 1));
472
473         return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
474                                   len, iv, sizeof(iv),
475                                   sa, ctx->sa_len, req->assoclen, rctx->dst);
476 }
477
478 int crypto4xx_encrypt_aes_ccm(struct aead_request *req)
479 {
480         return crypto4xx_crypt_aes_ccm(req, false);
481 }
482
483 int crypto4xx_decrypt_aes_ccm(struct aead_request *req)
484 {
485         return crypto4xx_crypt_aes_ccm(req, true);
486 }
487
488 int crypto4xx_setauthsize_aead(struct crypto_aead *cipher,
489                                unsigned int authsize)
490 {
491         struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
492         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
493
494         return crypto_aead_setauthsize(ctx->sw_cipher.aead, authsize);
495 }
496
497 /**
498  * AES-GCM Functions
499  */
500
501 static int crypto4xx_aes_gcm_validate_keylen(unsigned int keylen)
502 {
503         switch (keylen) {
504         case 16:
505         case 24:
506         case 32:
507                 return 0;
508         default:
509                 return -EINVAL;
510         }
511 }
512
513 static int crypto4xx_compute_gcm_hash_key_sw(__le32 *hash_start, const u8 *key,
514                                              unsigned int keylen)
515 {
516         struct crypto_cipher *aes_tfm = NULL;
517         uint8_t src[16] = { 0 };
518         int rc = 0;
519
520         aes_tfm = crypto_alloc_cipher("aes", 0, CRYPTO_ALG_NEED_FALLBACK);
521         if (IS_ERR(aes_tfm)) {
522                 rc = PTR_ERR(aes_tfm);
523                 pr_warn("could not load aes cipher driver: %d\n", rc);
524                 return rc;
525         }
526
527         rc = crypto_cipher_setkey(aes_tfm, key, keylen);
528         if (rc) {
529                 pr_err("setkey() failed: %d\n", rc);
530                 goto out;
531         }
532
533         crypto_cipher_encrypt_one(aes_tfm, src, src);
534         crypto4xx_memcpy_to_le32(hash_start, src, 16);
535 out:
536         crypto_free_cipher(aes_tfm);
537         return rc;
538 }
539
540 int crypto4xx_setkey_aes_gcm(struct crypto_aead *cipher,
541                              const u8 *key, unsigned int keylen)
542 {
543         struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
544         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
545         struct dynamic_sa_ctl *sa;
546         int    rc = 0;
547
548         if (crypto4xx_aes_gcm_validate_keylen(keylen) != 0) {
549                 crypto_aead_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
550                 return -EINVAL;
551         }
552
553         rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
554         if (rc)
555                 return rc;
556
557         if (ctx->sa_in || ctx->sa_out)
558                 crypto4xx_free_sa(ctx);
559
560         rc = crypto4xx_alloc_sa(ctx, SA_AES128_GCM_LEN + (keylen - 16) / 4);
561         if (rc)
562                 return rc;
563
564         sa  = (struct dynamic_sa_ctl *) ctx->sa_in;
565
566         sa->sa_contents.w = SA_AES_GCM_CONTENTS | (keylen << 2);
567         set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
568                                  SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
569                                  SA_NO_HEADER_PROC, SA_HASH_ALG_GHASH,
570                                  SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
571                                  SA_OP_GROUP_BASIC, SA_OPCODE_HASH_DECRYPT,
572                                  DIR_INBOUND);
573         set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
574                                  CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
575                                  SA_SEQ_MASK_ON, SA_MC_DISABLE,
576                                  SA_NOT_COPY_PAD, SA_COPY_PAYLOAD,
577                                  SA_NOT_COPY_HDR);
578
579         sa->sa_command_1.bf.key_len = keylen >> 3;
580
581         crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa),
582                                  key, keylen);
583
584         rc = crypto4xx_compute_gcm_hash_key_sw(get_dynamic_sa_inner_digest(sa),
585                 key, keylen);
586         if (rc) {
587                 pr_err("GCM hash key setting failed = %d\n", rc);
588                 goto err;
589         }
590
591         memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
592         sa = (struct dynamic_sa_ctl *) ctx->sa_out;
593         sa->sa_command_0.bf.dir = DIR_OUTBOUND;
594         sa->sa_command_0.bf.opcode = SA_OPCODE_ENCRYPT_HASH;
595
596         return 0;
597 err:
598         crypto4xx_free_sa(ctx);
599         return rc;
600 }
601
602 static inline int crypto4xx_crypt_aes_gcm(struct aead_request *req,
603                                           bool decrypt)
604 {
605         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
606         struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
607         __le32 iv[4];
608         unsigned int len = req->cryptlen;
609
610         if (decrypt)
611                 len -= crypto_aead_authsize(crypto_aead_reqtfm(req));
612
613         if (crypto4xx_aead_need_fallback(req, len, false, decrypt))
614                 return crypto4xx_aead_fallback(req, ctx, decrypt);
615
616         crypto4xx_memcpy_to_le32(iv, req->iv, GCM_AES_IV_SIZE);
617         iv[3] = cpu_to_le32(1);
618
619         return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
620                                   len, iv, sizeof(iv),
621                                   decrypt ? ctx->sa_in : ctx->sa_out,
622                                   ctx->sa_len, req->assoclen, rctx->dst);
623 }
624
625 int crypto4xx_encrypt_aes_gcm(struct aead_request *req)
626 {
627         return crypto4xx_crypt_aes_gcm(req, false);
628 }
629
630 int crypto4xx_decrypt_aes_gcm(struct aead_request *req)
631 {
632         return crypto4xx_crypt_aes_gcm(req, true);
633 }
634
635 /**
636  * HASH SHA1 Functions
637  */
638 static int crypto4xx_hash_alg_init(struct crypto_tfm *tfm,
639                                    unsigned int sa_len,
640                                    unsigned char ha,
641                                    unsigned char hm)
642 {
643         struct crypto_alg *alg = tfm->__crt_alg;
644         struct crypto4xx_alg *my_alg;
645         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
646         struct dynamic_sa_hash160 *sa;
647         int rc;
648
649         my_alg = container_of(__crypto_ahash_alg(alg), struct crypto4xx_alg,
650                               alg.u.hash);
651         ctx->dev   = my_alg->dev;
652
653         /* Create SA */
654         if (ctx->sa_in || ctx->sa_out)
655                 crypto4xx_free_sa(ctx);
656
657         rc = crypto4xx_alloc_sa(ctx, sa_len);
658         if (rc)
659                 return rc;
660
661         crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
662                                  sizeof(struct crypto4xx_ctx));
663         sa = (struct dynamic_sa_hash160 *)ctx->sa_in;
664         set_dynamic_sa_command_0(&sa->ctrl, SA_SAVE_HASH, SA_NOT_SAVE_IV,
665                                  SA_NOT_LOAD_HASH, SA_LOAD_IV_FROM_SA,
666                                  SA_NO_HEADER_PROC, ha, SA_CIPHER_ALG_NULL,
667                                  SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
668                                  SA_OPCODE_HASH, DIR_INBOUND);
669         set_dynamic_sa_command_1(&sa->ctrl, 0, SA_HASH_MODE_HASH,
670                                  CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
671                                  SA_SEQ_MASK_OFF, SA_MC_ENABLE,
672                                  SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
673                                  SA_NOT_COPY_HDR);
674         /* Need to zero hash digest in SA */
675         memset(sa->inner_digest, 0, sizeof(sa->inner_digest));
676         memset(sa->outer_digest, 0, sizeof(sa->outer_digest));
677
678         return 0;
679 }
680
681 int crypto4xx_hash_init(struct ahash_request *req)
682 {
683         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
684         int ds;
685         struct dynamic_sa_ctl *sa;
686
687         sa = ctx->sa_in;
688         ds = crypto_ahash_digestsize(
689                         __crypto_ahash_cast(req->base.tfm));
690         sa->sa_command_0.bf.digest_len = ds >> 2;
691         sa->sa_command_0.bf.load_hash_state = SA_LOAD_HASH_FROM_SA;
692
693         return 0;
694 }
695
696 int crypto4xx_hash_update(struct ahash_request *req)
697 {
698         struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
699         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
700         struct scatterlist dst;
701         unsigned int ds = crypto_ahash_digestsize(ahash);
702
703         sg_init_one(&dst, req->result, ds);
704
705         return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
706                                   req->nbytes, NULL, 0, ctx->sa_in,
707                                   ctx->sa_len, 0, NULL);
708 }
709
710 int crypto4xx_hash_final(struct ahash_request *req)
711 {
712         return 0;
713 }
714
715 int crypto4xx_hash_digest(struct ahash_request *req)
716 {
717         struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
718         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
719         struct scatterlist dst;
720         unsigned int ds = crypto_ahash_digestsize(ahash);
721
722         sg_init_one(&dst, req->result, ds);
723
724         return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
725                                   req->nbytes, NULL, 0, ctx->sa_in,
726                                   ctx->sa_len, 0, NULL);
727 }
728
729 /**
730  * SHA1 Algorithm
731  */
732 int crypto4xx_sha1_alg_init(struct crypto_tfm *tfm)
733 {
734         return crypto4xx_hash_alg_init(tfm, SA_HASH160_LEN, SA_HASH_ALG_SHA1,
735                                        SA_HASH_MODE_HASH);
736 }