1 // SPDX-License-Identifier: GPL-2.0+
3 * caam - Freescale FSL CAAM support for crypto API
5 * Copyright 2008-2011 Freescale Semiconductor, Inc.
6 * Copyright 2016-2018 NXP
8 * Based on talitos crypto API driver.
10 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
12 * --------------- ---------------
13 * | JobDesc #1 |-------------------->| ShareDesc |
14 * | *(packet 1) | | (PDB) |
15 * --------------- |------------->| (hashKey) |
17 * . | |-------->| (operation) |
18 * --------------- | | ---------------
19 * | JobDesc #2 |------| |
25 * | JobDesc #3 |------------
29 * The SharedDesc never changes for a connection unless rekeyed, but
30 * each packet will likely be in a different place. So all we need
31 * to know to process the packet is where the input is, where the
32 * output goes, and what context we want to process with. Context is
33 * in the SharedDesc, packet references in the JobDesc.
35 * So, a job desc looks like:
37 * ---------------------
39 * | ShareDesc Pointer |
46 * ---------------------
53 #include "desc_constr.h"
56 #include "sg_sw_sec4.h"
58 #include "caamalg_desc.h"
63 #define CAAM_CRA_PRIORITY 3000
64 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
65 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
66 CTR_RFC3686_NONCE_SIZE + \
67 SHA512_DIGEST_SIZE * 2)
69 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
70 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
72 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
75 #define CHACHAPOLY_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6)
77 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
78 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
81 /* for print_hex_dumps with line references */
82 #define debug(format, arg...) printk(format, arg)
84 #define debug(format, arg...)
87 struct caam_alg_entry {
94 struct caam_aead_alg {
96 struct caam_alg_entry caam;
100 struct caam_skcipher_alg {
101 struct skcipher_alg skcipher;
102 struct caam_alg_entry caam;
107 * per-session context
110 u32 sh_desc_enc[DESC_MAX_USED_LEN];
111 u32 sh_desc_dec[DESC_MAX_USED_LEN];
112 u8 key[CAAM_MAX_KEY_SIZE];
113 dma_addr_t sh_desc_enc_dma;
114 dma_addr_t sh_desc_dec_dma;
116 enum dma_data_direction dir;
117 struct device *jrdev;
118 struct alginfo adata;
119 struct alginfo cdata;
120 unsigned int authsize;
123 static int aead_null_set_sh_desc(struct crypto_aead *aead)
125 struct caam_ctx *ctx = crypto_aead_ctx(aead);
126 struct device *jrdev = ctx->jrdev;
127 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
129 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
130 ctx->adata.keylen_pad;
133 * Job Descriptor and Shared Descriptors
134 * must all fit into the 64-word Descriptor h/w Buffer
136 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
137 ctx->adata.key_inline = true;
138 ctx->adata.key_virt = ctx->key;
140 ctx->adata.key_inline = false;
141 ctx->adata.key_dma = ctx->key_dma;
144 /* aead_encrypt shared descriptor */
145 desc = ctx->sh_desc_enc;
146 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
148 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
149 desc_bytes(desc), ctx->dir);
152 * Job Descriptor and Shared Descriptors
153 * must all fit into the 64-word Descriptor h/w Buffer
155 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
156 ctx->adata.key_inline = true;
157 ctx->adata.key_virt = ctx->key;
159 ctx->adata.key_inline = false;
160 ctx->adata.key_dma = ctx->key_dma;
163 /* aead_decrypt shared descriptor */
164 desc = ctx->sh_desc_dec;
165 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
167 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
168 desc_bytes(desc), ctx->dir);
173 static int aead_set_sh_desc(struct crypto_aead *aead)
175 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
176 struct caam_aead_alg, aead);
177 unsigned int ivsize = crypto_aead_ivsize(aead);
178 struct caam_ctx *ctx = crypto_aead_ctx(aead);
179 struct device *jrdev = ctx->jrdev;
180 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
182 u32 *desc, *nonce = NULL;
184 unsigned int data_len[2];
185 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
186 OP_ALG_AAI_CTR_MOD128);
187 const bool is_rfc3686 = alg->caam.rfc3686;
192 /* NULL encryption / decryption */
193 if (!ctx->cdata.keylen)
194 return aead_null_set_sh_desc(aead);
197 * AES-CTR needs to load IV in CONTEXT1 reg
198 * at an offset of 128bits (16bytes)
199 * CONTEXT1[255:128] = IV
206 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
209 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
210 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
211 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
214 data_len[0] = ctx->adata.keylen_pad;
215 data_len[1] = ctx->cdata.keylen;
221 * Job Descriptor and Shared Descriptors
222 * must all fit into the 64-word Descriptor h/w Buffer
224 if (desc_inline_query(DESC_AEAD_ENC_LEN +
225 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
226 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
227 ARRAY_SIZE(data_len)) < 0)
231 ctx->adata.key_virt = ctx->key;
233 ctx->adata.key_dma = ctx->key_dma;
236 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
238 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
240 ctx->adata.key_inline = !!(inl_mask & 1);
241 ctx->cdata.key_inline = !!(inl_mask & 2);
243 /* aead_encrypt shared descriptor */
244 desc = ctx->sh_desc_enc;
245 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
246 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
247 false, ctrlpriv->era);
248 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
249 desc_bytes(desc), ctx->dir);
253 * Job Descriptor and Shared Descriptors
254 * must all fit into the 64-word Descriptor h/w Buffer
256 if (desc_inline_query(DESC_AEAD_DEC_LEN +
257 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
258 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
259 ARRAY_SIZE(data_len)) < 0)
263 ctx->adata.key_virt = ctx->key;
265 ctx->adata.key_dma = ctx->key_dma;
268 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
270 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
272 ctx->adata.key_inline = !!(inl_mask & 1);
273 ctx->cdata.key_inline = !!(inl_mask & 2);
275 /* aead_decrypt shared descriptor */
276 desc = ctx->sh_desc_dec;
277 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
278 ctx->authsize, alg->caam.geniv, is_rfc3686,
279 nonce, ctx1_iv_off, false, ctrlpriv->era);
280 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
281 desc_bytes(desc), ctx->dir);
283 if (!alg->caam.geniv)
287 * Job Descriptor and Shared Descriptors
288 * must all fit into the 64-word Descriptor h/w Buffer
290 if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
291 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
292 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
293 ARRAY_SIZE(data_len)) < 0)
297 ctx->adata.key_virt = ctx->key;
299 ctx->adata.key_dma = ctx->key_dma;
302 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
304 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
306 ctx->adata.key_inline = !!(inl_mask & 1);
307 ctx->cdata.key_inline = !!(inl_mask & 2);
309 /* aead_givencrypt shared descriptor */
310 desc = ctx->sh_desc_enc;
311 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
312 ctx->authsize, is_rfc3686, nonce,
313 ctx1_iv_off, false, ctrlpriv->era);
314 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
315 desc_bytes(desc), ctx->dir);
321 static int aead_setauthsize(struct crypto_aead *authenc,
322 unsigned int authsize)
324 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
326 ctx->authsize = authsize;
327 aead_set_sh_desc(authenc);
332 static int gcm_set_sh_desc(struct crypto_aead *aead)
334 struct caam_ctx *ctx = crypto_aead_ctx(aead);
335 struct device *jrdev = ctx->jrdev;
336 unsigned int ivsize = crypto_aead_ivsize(aead);
338 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
341 if (!ctx->cdata.keylen || !ctx->authsize)
345 * AES GCM encrypt shared descriptor
346 * Job Descriptor and Shared Descriptor
347 * must fit into the 64-word Descriptor h/w Buffer
349 if (rem_bytes >= DESC_GCM_ENC_LEN) {
350 ctx->cdata.key_inline = true;
351 ctx->cdata.key_virt = ctx->key;
353 ctx->cdata.key_inline = false;
354 ctx->cdata.key_dma = ctx->key_dma;
357 desc = ctx->sh_desc_enc;
358 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
359 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
360 desc_bytes(desc), ctx->dir);
363 * Job Descriptor and Shared Descriptors
364 * must all fit into the 64-word Descriptor h/w Buffer
366 if (rem_bytes >= DESC_GCM_DEC_LEN) {
367 ctx->cdata.key_inline = true;
368 ctx->cdata.key_virt = ctx->key;
370 ctx->cdata.key_inline = false;
371 ctx->cdata.key_dma = ctx->key_dma;
374 desc = ctx->sh_desc_dec;
375 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
376 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
377 desc_bytes(desc), ctx->dir);
382 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
384 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
386 ctx->authsize = authsize;
387 gcm_set_sh_desc(authenc);
392 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
394 struct caam_ctx *ctx = crypto_aead_ctx(aead);
395 struct device *jrdev = ctx->jrdev;
396 unsigned int ivsize = crypto_aead_ivsize(aead);
398 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
401 if (!ctx->cdata.keylen || !ctx->authsize)
405 * RFC4106 encrypt shared descriptor
406 * Job Descriptor and Shared Descriptor
407 * must fit into the 64-word Descriptor h/w Buffer
409 if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
410 ctx->cdata.key_inline = true;
411 ctx->cdata.key_virt = ctx->key;
413 ctx->cdata.key_inline = false;
414 ctx->cdata.key_dma = ctx->key_dma;
417 desc = ctx->sh_desc_enc;
418 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
420 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
421 desc_bytes(desc), ctx->dir);
424 * Job Descriptor and Shared Descriptors
425 * must all fit into the 64-word Descriptor h/w Buffer
427 if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
428 ctx->cdata.key_inline = true;
429 ctx->cdata.key_virt = ctx->key;
431 ctx->cdata.key_inline = false;
432 ctx->cdata.key_dma = ctx->key_dma;
435 desc = ctx->sh_desc_dec;
436 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
438 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
439 desc_bytes(desc), ctx->dir);
444 static int rfc4106_setauthsize(struct crypto_aead *authenc,
445 unsigned int authsize)
447 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
449 ctx->authsize = authsize;
450 rfc4106_set_sh_desc(authenc);
455 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
457 struct caam_ctx *ctx = crypto_aead_ctx(aead);
458 struct device *jrdev = ctx->jrdev;
459 unsigned int ivsize = crypto_aead_ivsize(aead);
461 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
464 if (!ctx->cdata.keylen || !ctx->authsize)
468 * RFC4543 encrypt shared descriptor
469 * Job Descriptor and Shared Descriptor
470 * must fit into the 64-word Descriptor h/w Buffer
472 if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
473 ctx->cdata.key_inline = true;
474 ctx->cdata.key_virt = ctx->key;
476 ctx->cdata.key_inline = false;
477 ctx->cdata.key_dma = ctx->key_dma;
480 desc = ctx->sh_desc_enc;
481 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
483 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
484 desc_bytes(desc), ctx->dir);
487 * Job Descriptor and Shared Descriptors
488 * must all fit into the 64-word Descriptor h/w Buffer
490 if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
491 ctx->cdata.key_inline = true;
492 ctx->cdata.key_virt = ctx->key;
494 ctx->cdata.key_inline = false;
495 ctx->cdata.key_dma = ctx->key_dma;
498 desc = ctx->sh_desc_dec;
499 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
501 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
502 desc_bytes(desc), ctx->dir);
507 static int rfc4543_setauthsize(struct crypto_aead *authenc,
508 unsigned int authsize)
510 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
512 ctx->authsize = authsize;
513 rfc4543_set_sh_desc(authenc);
518 static int chachapoly_set_sh_desc(struct crypto_aead *aead)
520 struct caam_ctx *ctx = crypto_aead_ctx(aead);
521 struct device *jrdev = ctx->jrdev;
522 unsigned int ivsize = crypto_aead_ivsize(aead);
525 if (!ctx->cdata.keylen || !ctx->authsize)
528 desc = ctx->sh_desc_enc;
529 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
530 ctx->authsize, true, false);
531 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
532 desc_bytes(desc), ctx->dir);
534 desc = ctx->sh_desc_dec;
535 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
536 ctx->authsize, false, false);
537 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
538 desc_bytes(desc), ctx->dir);
543 static int chachapoly_setauthsize(struct crypto_aead *aead,
544 unsigned int authsize)
546 struct caam_ctx *ctx = crypto_aead_ctx(aead);
548 if (authsize != POLY1305_DIGEST_SIZE)
551 ctx->authsize = authsize;
552 return chachapoly_set_sh_desc(aead);
555 static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,
558 struct caam_ctx *ctx = crypto_aead_ctx(aead);
559 unsigned int ivsize = crypto_aead_ivsize(aead);
560 unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize;
562 if (keylen != CHACHA_KEY_SIZE + saltlen) {
563 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
567 ctx->cdata.key_virt = key;
568 ctx->cdata.keylen = keylen - saltlen;
570 return chachapoly_set_sh_desc(aead);
573 static int aead_setkey(struct crypto_aead *aead,
574 const u8 *key, unsigned int keylen)
576 struct caam_ctx *ctx = crypto_aead_ctx(aead);
577 struct device *jrdev = ctx->jrdev;
578 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
579 struct crypto_authenc_keys keys;
582 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
586 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
587 keys.authkeylen + keys.enckeylen, keys.enckeylen,
589 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
590 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
594 * If DKP is supported, use it in the shared descriptor to generate
597 if (ctrlpriv->era >= 6) {
598 ctx->adata.keylen = keys.authkeylen;
599 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
602 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
605 memcpy(ctx->key, keys.authkey, keys.authkeylen);
606 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
608 dma_sync_single_for_device(jrdev, ctx->key_dma,
609 ctx->adata.keylen_pad +
610 keys.enckeylen, ctx->dir);
614 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
615 keys.authkeylen, CAAM_MAX_KEY_SIZE -
621 /* postpend encryption key to auth split key */
622 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
623 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
624 keys.enckeylen, ctx->dir);
626 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
627 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
628 ctx->adata.keylen_pad + keys.enckeylen, 1);
632 ctx->cdata.keylen = keys.enckeylen;
633 memzero_explicit(&keys, sizeof(keys));
634 return aead_set_sh_desc(aead);
636 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
637 memzero_explicit(&keys, sizeof(keys));
641 static int gcm_setkey(struct crypto_aead *aead,
642 const u8 *key, unsigned int keylen)
644 struct caam_ctx *ctx = crypto_aead_ctx(aead);
645 struct device *jrdev = ctx->jrdev;
648 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
649 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
652 memcpy(ctx->key, key, keylen);
653 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
654 ctx->cdata.keylen = keylen;
656 return gcm_set_sh_desc(aead);
659 static int rfc4106_setkey(struct crypto_aead *aead,
660 const u8 *key, unsigned int keylen)
662 struct caam_ctx *ctx = crypto_aead_ctx(aead);
663 struct device *jrdev = ctx->jrdev;
669 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
670 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
673 memcpy(ctx->key, key, keylen);
676 * The last four bytes of the key material are used as the salt value
677 * in the nonce. Update the AES key length.
679 ctx->cdata.keylen = keylen - 4;
680 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
682 return rfc4106_set_sh_desc(aead);
685 static int rfc4543_setkey(struct crypto_aead *aead,
686 const u8 *key, unsigned int keylen)
688 struct caam_ctx *ctx = crypto_aead_ctx(aead);
689 struct device *jrdev = ctx->jrdev;
695 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
696 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
699 memcpy(ctx->key, key, keylen);
702 * The last four bytes of the key material are used as the salt value
703 * in the nonce. Update the AES key length.
705 ctx->cdata.keylen = keylen - 4;
706 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
708 return rfc4543_set_sh_desc(aead);
711 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
714 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
715 struct caam_skcipher_alg *alg =
716 container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
718 struct device *jrdev = ctx->jrdev;
719 unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
722 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
723 OP_ALG_AAI_CTR_MOD128);
724 const bool is_rfc3686 = alg->caam.rfc3686;
727 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
728 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
731 * AES-CTR needs to load IV in CONTEXT1 reg
732 * at an offset of 128bits (16bytes)
733 * CONTEXT1[255:128] = IV
740 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
741 * | *key = {KEY, NONCE}
744 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
745 keylen -= CTR_RFC3686_NONCE_SIZE;
748 ctx->cdata.keylen = keylen;
749 ctx->cdata.key_virt = key;
750 ctx->cdata.key_inline = true;
752 /* skcipher_encrypt shared descriptor */
753 desc = ctx->sh_desc_enc;
754 cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
756 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
757 desc_bytes(desc), ctx->dir);
759 /* skcipher_decrypt shared descriptor */
760 desc = ctx->sh_desc_dec;
761 cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
763 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
764 desc_bytes(desc), ctx->dir);
769 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
772 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
773 struct device *jrdev = ctx->jrdev;
776 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
777 crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
778 dev_err(jrdev, "key size mismatch\n");
782 ctx->cdata.keylen = keylen;
783 ctx->cdata.key_virt = key;
784 ctx->cdata.key_inline = true;
786 /* xts_skcipher_encrypt shared descriptor */
787 desc = ctx->sh_desc_enc;
788 cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata);
789 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
790 desc_bytes(desc), ctx->dir);
792 /* xts_skcipher_decrypt shared descriptor */
793 desc = ctx->sh_desc_dec;
794 cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata);
795 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
796 desc_bytes(desc), ctx->dir);
802 * aead_edesc - s/w-extended aead descriptor
803 * @src_nents: number of segments in input s/w scatterlist
804 * @dst_nents: number of segments in output s/w scatterlist
805 * @sec4_sg_bytes: length of dma mapped sec4_sg space
806 * @sec4_sg_dma: bus physical mapped address of h/w link table
807 * @sec4_sg: pointer to h/w link table
808 * @hw_desc: the h/w job descriptor followed by any referenced link tables
814 dma_addr_t sec4_sg_dma;
815 struct sec4_sg_entry *sec4_sg;
820 * skcipher_edesc - s/w-extended skcipher descriptor
821 * @src_nents: number of segments in input s/w scatterlist
822 * @dst_nents: number of segments in output s/w scatterlist
823 * @iv_dma: dma address of iv for checking continuity and link table
824 * @sec4_sg_bytes: length of dma mapped sec4_sg space
825 * @sec4_sg_dma: bus physical mapped address of h/w link table
826 * @sec4_sg: pointer to h/w link table
827 * @hw_desc: the h/w job descriptor followed by any referenced link tables
830 struct skcipher_edesc {
835 dma_addr_t sec4_sg_dma;
836 struct sec4_sg_entry *sec4_sg;
840 static void caam_unmap(struct device *dev, struct scatterlist *src,
841 struct scatterlist *dst, int src_nents,
843 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
848 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
849 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
851 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
855 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
857 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
861 static void aead_unmap(struct device *dev,
862 struct aead_edesc *edesc,
863 struct aead_request *req)
865 caam_unmap(dev, req->src, req->dst,
866 edesc->src_nents, edesc->dst_nents, 0, 0,
867 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
870 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
871 struct skcipher_request *req)
873 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
874 int ivsize = crypto_skcipher_ivsize(skcipher);
876 caam_unmap(dev, req->src, req->dst,
877 edesc->src_nents, edesc->dst_nents,
878 edesc->iv_dma, ivsize,
879 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
882 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
885 struct aead_request *req = context;
886 struct aead_edesc *edesc;
889 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
892 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
895 caam_jr_strstatus(jrdev, err);
897 aead_unmap(jrdev, edesc, req);
901 aead_request_complete(req, err);
904 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
907 struct aead_request *req = context;
908 struct aead_edesc *edesc;
911 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
914 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
917 caam_jr_strstatus(jrdev, err);
919 aead_unmap(jrdev, edesc, req);
922 * verify hw auth check passed else return -EBADMSG
924 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
929 aead_request_complete(req, err);
932 static void skcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
935 struct skcipher_request *req = context;
936 struct skcipher_edesc *edesc;
937 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
938 int ivsize = crypto_skcipher_ivsize(skcipher);
941 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
944 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]);
947 caam_jr_strstatus(jrdev, err);
950 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
951 DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
952 edesc->src_nents > 1 ? 100 : ivsize, 1);
954 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ",
955 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
956 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
958 skcipher_unmap(jrdev, edesc, req);
961 * The crypto API expects us to set the IV (req->iv) to the last
962 * ciphertext block. This is used e.g. by the CTS mode.
964 scatterwalk_map_and_copy(req->iv, req->dst, req->cryptlen - ivsize,
969 skcipher_request_complete(req, err);
972 static void skcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
975 struct skcipher_request *req = context;
976 struct skcipher_edesc *edesc;
978 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
979 int ivsize = crypto_skcipher_ivsize(skcipher);
981 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
984 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]);
986 caam_jr_strstatus(jrdev, err);
989 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
990 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
992 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ",
993 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
994 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
996 skcipher_unmap(jrdev, edesc, req);
999 skcipher_request_complete(req, err);
1003 * Fill in aead job descriptor
1005 static void init_aead_job(struct aead_request *req,
1006 struct aead_edesc *edesc,
1007 bool all_contig, bool encrypt)
1009 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1010 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1011 int authsize = ctx->authsize;
1012 u32 *desc = edesc->hw_desc;
1013 u32 out_options, in_options;
1014 dma_addr_t dst_dma, src_dma;
1015 int len, sec4_sg_index = 0;
1019 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1020 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1022 len = desc_len(sh_desc);
1023 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1026 src_dma = edesc->src_nents ? sg_dma_address(req->src) : 0;
1029 src_dma = edesc->sec4_sg_dma;
1030 sec4_sg_index += edesc->src_nents;
1031 in_options = LDST_SGF;
1034 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
1038 out_options = in_options;
1040 if (unlikely(req->src != req->dst)) {
1041 if (edesc->dst_nents == 1) {
1042 dst_dma = sg_dma_address(req->dst);
1044 dst_dma = edesc->sec4_sg_dma +
1046 sizeof(struct sec4_sg_entry);
1047 out_options = LDST_SGF;
1052 append_seq_out_ptr(desc, dst_dma,
1053 req->assoclen + req->cryptlen + authsize,
1056 append_seq_out_ptr(desc, dst_dma,
1057 req->assoclen + req->cryptlen - authsize,
1061 static void init_gcm_job(struct aead_request *req,
1062 struct aead_edesc *edesc,
1063 bool all_contig, bool encrypt)
1065 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1066 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1067 unsigned int ivsize = crypto_aead_ivsize(aead);
1068 u32 *desc = edesc->hw_desc;
1069 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
1072 init_aead_job(req, edesc, all_contig, encrypt);
1073 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1075 /* BUG This should not be specific to generic GCM. */
1077 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1078 last = FIFOLD_TYPE_LAST1;
1081 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
1082 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last);
1085 append_data(desc, ctx->key + ctx->cdata.keylen, 4);
1087 append_data(desc, req->iv, ivsize);
1088 /* End of blank commands */
1091 static void init_chachapoly_job(struct aead_request *req,
1092 struct aead_edesc *edesc, bool all_contig,
1095 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1096 unsigned int ivsize = crypto_aead_ivsize(aead);
1097 unsigned int assoclen = req->assoclen;
1098 u32 *desc = edesc->hw_desc;
1101 init_aead_job(req, edesc, all_contig, encrypt);
1103 if (ivsize != CHACHAPOLY_IV_SIZE) {
1104 /* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */
1108 * The associated data comes already with the IV but we need
1109 * to skip it when we authenticate or encrypt...
1114 append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen);
1117 * For IPsec load the IV further in the same register.
1118 * For RFC7539 simply load the 12 bytes nonce in a single operation
1120 append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB |
1121 LDST_SRCDST_BYTE_CONTEXT |
1122 ctx_iv_off << LDST_OFFSET_SHIFT);
1125 static void init_authenc_job(struct aead_request *req,
1126 struct aead_edesc *edesc,
1127 bool all_contig, bool encrypt)
1129 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1130 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1131 struct caam_aead_alg, aead);
1132 unsigned int ivsize = crypto_aead_ivsize(aead);
1133 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1134 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
1135 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
1136 OP_ALG_AAI_CTR_MOD128);
1137 const bool is_rfc3686 = alg->caam.rfc3686;
1138 u32 *desc = edesc->hw_desc;
1142 * AES-CTR needs to load IV in CONTEXT1 reg
1143 * at an offset of 128bits (16bytes)
1144 * CONTEXT1[255:128] = IV
1151 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1154 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
1156 init_aead_job(req, edesc, all_contig, encrypt);
1159 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
1160 * having DPOVRD as destination.
1162 if (ctrlpriv->era < 3)
1163 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1165 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
1167 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
1168 append_load_as_imm(desc, req->iv, ivsize,
1170 LDST_SRCDST_BYTE_CONTEXT |
1171 (ivoffset << LDST_OFFSET_SHIFT));
1175 * Fill in skcipher job descriptor
1177 static void init_skcipher_job(struct skcipher_request *req,
1178 struct skcipher_edesc *edesc,
1181 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1182 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1183 int ivsize = crypto_skcipher_ivsize(skcipher);
1184 u32 *desc = edesc->hw_desc;
1186 u32 out_options = 0;
1187 dma_addr_t dst_dma, ptr;
1191 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
1192 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
1193 pr_err("asked=%d, cryptlen%d\n",
1194 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen);
1196 caam_dump_sg(KERN_ERR, "src @" __stringify(__LINE__)": ",
1197 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1198 edesc->src_nents > 1 ? 100 : req->cryptlen, 1);
1200 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1201 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1203 len = desc_len(sh_desc);
1204 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1206 append_seq_in_ptr(desc, edesc->sec4_sg_dma, req->cryptlen + ivsize,
1209 if (likely(req->src == req->dst)) {
1210 dst_dma = edesc->sec4_sg_dma + sizeof(struct sec4_sg_entry);
1211 out_options = LDST_SGF;
1213 if (edesc->dst_nents == 1) {
1214 dst_dma = sg_dma_address(req->dst);
1216 dst_dma = edesc->sec4_sg_dma + (edesc->src_nents + 1) *
1217 sizeof(struct sec4_sg_entry);
1218 out_options = LDST_SGF;
1221 append_seq_out_ptr(desc, dst_dma, req->cryptlen, out_options);
1225 * allocate and map the aead extended descriptor
1227 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1228 int desc_bytes, bool *all_contig_ptr,
1231 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1232 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1233 struct device *jrdev = ctx->jrdev;
1234 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1235 GFP_KERNEL : GFP_ATOMIC;
1236 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1237 struct aead_edesc *edesc;
1238 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
1239 unsigned int authsize = ctx->authsize;
1241 if (unlikely(req->dst != req->src)) {
1242 src_nents = sg_nents_for_len(req->src, req->assoclen +
1244 if (unlikely(src_nents < 0)) {
1245 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1246 req->assoclen + req->cryptlen);
1247 return ERR_PTR(src_nents);
1250 dst_nents = sg_nents_for_len(req->dst, req->assoclen +
1252 (encrypt ? authsize :
1254 if (unlikely(dst_nents < 0)) {
1255 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1256 req->assoclen + req->cryptlen +
1257 (encrypt ? authsize : (-authsize)));
1258 return ERR_PTR(dst_nents);
1261 src_nents = sg_nents_for_len(req->src, req->assoclen +
1263 (encrypt ? authsize : 0));
1264 if (unlikely(src_nents < 0)) {
1265 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1266 req->assoclen + req->cryptlen +
1267 (encrypt ? authsize : 0));
1268 return ERR_PTR(src_nents);
1272 if (likely(req->src == req->dst)) {
1273 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1275 if (unlikely(!mapped_src_nents)) {
1276 dev_err(jrdev, "unable to map source\n");
1277 return ERR_PTR(-ENOMEM);
1280 /* Cover also the case of null (zero length) input data */
1282 mapped_src_nents = dma_map_sg(jrdev, req->src,
1283 src_nents, DMA_TO_DEVICE);
1284 if (unlikely(!mapped_src_nents)) {
1285 dev_err(jrdev, "unable to map source\n");
1286 return ERR_PTR(-ENOMEM);
1289 mapped_src_nents = 0;
1292 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1294 if (unlikely(!mapped_dst_nents)) {
1295 dev_err(jrdev, "unable to map destination\n");
1296 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1297 return ERR_PTR(-ENOMEM);
1301 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
1302 sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1303 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1305 /* allocate space for base edesc and hw desc commands, link tables */
1306 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1309 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1311 return ERR_PTR(-ENOMEM);
1314 edesc->src_nents = src_nents;
1315 edesc->dst_nents = dst_nents;
1316 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1318 *all_contig_ptr = !(mapped_src_nents > 1);
1321 if (mapped_src_nents > 1) {
1322 sg_to_sec4_sg_last(req->src, mapped_src_nents,
1323 edesc->sec4_sg + sec4_sg_index, 0);
1324 sec4_sg_index += mapped_src_nents;
1326 if (mapped_dst_nents > 1) {
1327 sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1328 edesc->sec4_sg + sec4_sg_index, 0);
1334 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1335 sec4_sg_bytes, DMA_TO_DEVICE);
1336 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1337 dev_err(jrdev, "unable to map S/G table\n");
1338 aead_unmap(jrdev, edesc, req);
1340 return ERR_PTR(-ENOMEM);
1343 edesc->sec4_sg_bytes = sec4_sg_bytes;
1348 static int gcm_encrypt(struct aead_request *req)
1350 struct aead_edesc *edesc;
1351 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1352 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1353 struct device *jrdev = ctx->jrdev;
1358 /* allocate extended descriptor */
1359 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
1361 return PTR_ERR(edesc);
1363 /* Create and submit job descriptor */
1364 init_gcm_job(req, edesc, all_contig, true);
1366 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1367 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1368 desc_bytes(edesc->hw_desc), 1);
1371 desc = edesc->hw_desc;
1372 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1376 aead_unmap(jrdev, edesc, req);
1383 static int chachapoly_encrypt(struct aead_request *req)
1385 struct aead_edesc *edesc;
1386 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1387 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1388 struct device *jrdev = ctx->jrdev;
1393 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
1396 return PTR_ERR(edesc);
1398 desc = edesc->hw_desc;
1400 init_chachapoly_job(req, edesc, all_contig, true);
1401 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
1402 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1405 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1409 aead_unmap(jrdev, edesc, req);
1416 static int chachapoly_decrypt(struct aead_request *req)
1418 struct aead_edesc *edesc;
1419 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1420 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1421 struct device *jrdev = ctx->jrdev;
1426 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
1429 return PTR_ERR(edesc);
1431 desc = edesc->hw_desc;
1433 init_chachapoly_job(req, edesc, all_contig, false);
1434 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
1435 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1438 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1442 aead_unmap(jrdev, edesc, req);
1449 static int ipsec_gcm_encrypt(struct aead_request *req)
1451 if (req->assoclen < 8)
1454 return gcm_encrypt(req);
1457 static int aead_encrypt(struct aead_request *req)
1459 struct aead_edesc *edesc;
1460 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1461 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1462 struct device *jrdev = ctx->jrdev;
1467 /* allocate extended descriptor */
1468 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1471 return PTR_ERR(edesc);
1473 /* Create and submit job descriptor */
1474 init_authenc_job(req, edesc, all_contig, true);
1476 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1477 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1478 desc_bytes(edesc->hw_desc), 1);
1481 desc = edesc->hw_desc;
1482 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1486 aead_unmap(jrdev, edesc, req);
1493 static int gcm_decrypt(struct aead_request *req)
1495 struct aead_edesc *edesc;
1496 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1497 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1498 struct device *jrdev = ctx->jrdev;
1503 /* allocate extended descriptor */
1504 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
1506 return PTR_ERR(edesc);
1508 /* Create and submit job descriptor*/
1509 init_gcm_job(req, edesc, all_contig, false);
1511 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1512 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1513 desc_bytes(edesc->hw_desc), 1);
1516 desc = edesc->hw_desc;
1517 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1521 aead_unmap(jrdev, edesc, req);
1528 static int ipsec_gcm_decrypt(struct aead_request *req)
1530 if (req->assoclen < 8)
1533 return gcm_decrypt(req);
1536 static int aead_decrypt(struct aead_request *req)
1538 struct aead_edesc *edesc;
1539 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1540 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1541 struct device *jrdev = ctx->jrdev;
1546 caam_dump_sg(KERN_ERR, "dec src@" __stringify(__LINE__)": ",
1547 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1548 req->assoclen + req->cryptlen, 1);
1550 /* allocate extended descriptor */
1551 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1552 &all_contig, false);
1554 return PTR_ERR(edesc);
1556 /* Create and submit job descriptor*/
1557 init_authenc_job(req, edesc, all_contig, false);
1559 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1560 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1561 desc_bytes(edesc->hw_desc), 1);
1564 desc = edesc->hw_desc;
1565 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1569 aead_unmap(jrdev, edesc, req);
1577 * allocate and map the skcipher extended descriptor for skcipher
1579 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
1582 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1583 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1584 struct device *jrdev = ctx->jrdev;
1585 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1586 GFP_KERNEL : GFP_ATOMIC;
1587 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1588 struct skcipher_edesc *edesc;
1591 int ivsize = crypto_skcipher_ivsize(skcipher);
1592 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1594 src_nents = sg_nents_for_len(req->src, req->cryptlen);
1595 if (unlikely(src_nents < 0)) {
1596 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1598 return ERR_PTR(src_nents);
1601 if (req->dst != req->src) {
1602 dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
1603 if (unlikely(dst_nents < 0)) {
1604 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1606 return ERR_PTR(dst_nents);
1610 if (likely(req->src == req->dst)) {
1611 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1613 if (unlikely(!mapped_src_nents)) {
1614 dev_err(jrdev, "unable to map source\n");
1615 return ERR_PTR(-ENOMEM);
1618 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1620 if (unlikely(!mapped_src_nents)) {
1621 dev_err(jrdev, "unable to map source\n");
1622 return ERR_PTR(-ENOMEM);
1625 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1627 if (unlikely(!mapped_dst_nents)) {
1628 dev_err(jrdev, "unable to map destination\n");
1629 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1630 return ERR_PTR(-ENOMEM);
1634 sec4_sg_ents = 1 + mapped_src_nents;
1635 dst_sg_idx = sec4_sg_ents;
1636 sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1637 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1640 * allocate space for base edesc and hw desc commands, link tables, IV
1642 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize,
1645 dev_err(jrdev, "could not allocate extended descriptor\n");
1646 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1648 return ERR_PTR(-ENOMEM);
1651 edesc->src_nents = src_nents;
1652 edesc->dst_nents = dst_nents;
1653 edesc->sec4_sg_bytes = sec4_sg_bytes;
1654 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc +
1657 /* Make sure IV is located in a DMAable area */
1658 iv = (u8 *)edesc->hw_desc + desc_bytes + sec4_sg_bytes;
1659 memcpy(iv, req->iv, ivsize);
1661 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_TO_DEVICE);
1662 if (dma_mapping_error(jrdev, iv_dma)) {
1663 dev_err(jrdev, "unable to map IV\n");
1664 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1667 return ERR_PTR(-ENOMEM);
1670 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
1671 sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg + 1, 0);
1673 if (mapped_dst_nents > 1) {
1674 sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1675 edesc->sec4_sg + dst_sg_idx, 0);
1678 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1679 sec4_sg_bytes, DMA_TO_DEVICE);
1680 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1681 dev_err(jrdev, "unable to map S/G table\n");
1682 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1683 iv_dma, ivsize, 0, 0);
1685 return ERR_PTR(-ENOMEM);
1688 edesc->iv_dma = iv_dma;
1691 print_hex_dump(KERN_ERR, "skcipher sec4_sg@" __stringify(__LINE__)": ",
1692 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1699 static int skcipher_encrypt(struct skcipher_request *req)
1701 struct skcipher_edesc *edesc;
1702 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1703 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1704 struct device *jrdev = ctx->jrdev;
1708 /* allocate extended descriptor */
1709 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
1711 return PTR_ERR(edesc);
1713 /* Create and submit job descriptor*/
1714 init_skcipher_job(req, edesc, true);
1716 print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ",
1717 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1718 desc_bytes(edesc->hw_desc), 1);
1720 desc = edesc->hw_desc;
1721 ret = caam_jr_enqueue(jrdev, desc, skcipher_encrypt_done, req);
1726 skcipher_unmap(jrdev, edesc, req);
1733 static int skcipher_decrypt(struct skcipher_request *req)
1735 struct skcipher_edesc *edesc;
1736 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1737 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1738 int ivsize = crypto_skcipher_ivsize(skcipher);
1739 struct device *jrdev = ctx->jrdev;
1743 /* allocate extended descriptor */
1744 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
1746 return PTR_ERR(edesc);
1749 * The crypto API expects us to set the IV (req->iv) to the last
1752 scatterwalk_map_and_copy(req->iv, req->src, req->cryptlen - ivsize,
1755 /* Create and submit job descriptor*/
1756 init_skcipher_job(req, edesc, false);
1757 desc = edesc->hw_desc;
1759 print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ",
1760 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1761 desc_bytes(edesc->hw_desc), 1);
1764 ret = caam_jr_enqueue(jrdev, desc, skcipher_decrypt_done, req);
1768 skcipher_unmap(jrdev, edesc, req);
1775 static struct caam_skcipher_alg driver_algs[] = {
1779 .cra_name = "cbc(aes)",
1780 .cra_driver_name = "cbc-aes-caam",
1781 .cra_blocksize = AES_BLOCK_SIZE,
1783 .setkey = skcipher_setkey,
1784 .encrypt = skcipher_encrypt,
1785 .decrypt = skcipher_decrypt,
1786 .min_keysize = AES_MIN_KEY_SIZE,
1787 .max_keysize = AES_MAX_KEY_SIZE,
1788 .ivsize = AES_BLOCK_SIZE,
1790 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1795 .cra_name = "cbc(des3_ede)",
1796 .cra_driver_name = "cbc-3des-caam",
1797 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1799 .setkey = skcipher_setkey,
1800 .encrypt = skcipher_encrypt,
1801 .decrypt = skcipher_decrypt,
1802 .min_keysize = DES3_EDE_KEY_SIZE,
1803 .max_keysize = DES3_EDE_KEY_SIZE,
1804 .ivsize = DES3_EDE_BLOCK_SIZE,
1806 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1811 .cra_name = "cbc(des)",
1812 .cra_driver_name = "cbc-des-caam",
1813 .cra_blocksize = DES_BLOCK_SIZE,
1815 .setkey = skcipher_setkey,
1816 .encrypt = skcipher_encrypt,
1817 .decrypt = skcipher_decrypt,
1818 .min_keysize = DES_KEY_SIZE,
1819 .max_keysize = DES_KEY_SIZE,
1820 .ivsize = DES_BLOCK_SIZE,
1822 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1827 .cra_name = "ctr(aes)",
1828 .cra_driver_name = "ctr-aes-caam",
1831 .setkey = skcipher_setkey,
1832 .encrypt = skcipher_encrypt,
1833 .decrypt = skcipher_decrypt,
1834 .min_keysize = AES_MIN_KEY_SIZE,
1835 .max_keysize = AES_MAX_KEY_SIZE,
1836 .ivsize = AES_BLOCK_SIZE,
1837 .chunksize = AES_BLOCK_SIZE,
1839 .caam.class1_alg_type = OP_ALG_ALGSEL_AES |
1840 OP_ALG_AAI_CTR_MOD128,
1845 .cra_name = "rfc3686(ctr(aes))",
1846 .cra_driver_name = "rfc3686-ctr-aes-caam",
1849 .setkey = skcipher_setkey,
1850 .encrypt = skcipher_encrypt,
1851 .decrypt = skcipher_decrypt,
1852 .min_keysize = AES_MIN_KEY_SIZE +
1853 CTR_RFC3686_NONCE_SIZE,
1854 .max_keysize = AES_MAX_KEY_SIZE +
1855 CTR_RFC3686_NONCE_SIZE,
1856 .ivsize = CTR_RFC3686_IV_SIZE,
1857 .chunksize = AES_BLOCK_SIZE,
1860 .class1_alg_type = OP_ALG_ALGSEL_AES |
1861 OP_ALG_AAI_CTR_MOD128,
1868 .cra_name = "xts(aes)",
1869 .cra_driver_name = "xts-aes-caam",
1870 .cra_blocksize = AES_BLOCK_SIZE,
1872 .setkey = xts_skcipher_setkey,
1873 .encrypt = skcipher_encrypt,
1874 .decrypt = skcipher_decrypt,
1875 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1876 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1877 .ivsize = AES_BLOCK_SIZE,
1879 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
1883 static struct caam_aead_alg driver_aeads[] = {
1887 .cra_name = "rfc4106(gcm(aes))",
1888 .cra_driver_name = "rfc4106-gcm-aes-caam",
1891 .setkey = rfc4106_setkey,
1892 .setauthsize = rfc4106_setauthsize,
1893 .encrypt = ipsec_gcm_encrypt,
1894 .decrypt = ipsec_gcm_decrypt,
1895 .ivsize = GCM_RFC4106_IV_SIZE,
1896 .maxauthsize = AES_BLOCK_SIZE,
1899 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1905 .cra_name = "rfc4543(gcm(aes))",
1906 .cra_driver_name = "rfc4543-gcm-aes-caam",
1909 .setkey = rfc4543_setkey,
1910 .setauthsize = rfc4543_setauthsize,
1911 .encrypt = ipsec_gcm_encrypt,
1912 .decrypt = ipsec_gcm_decrypt,
1913 .ivsize = GCM_RFC4543_IV_SIZE,
1914 .maxauthsize = AES_BLOCK_SIZE,
1917 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1920 /* Galois Counter Mode */
1924 .cra_name = "gcm(aes)",
1925 .cra_driver_name = "gcm-aes-caam",
1928 .setkey = gcm_setkey,
1929 .setauthsize = gcm_setauthsize,
1930 .encrypt = gcm_encrypt,
1931 .decrypt = gcm_decrypt,
1932 .ivsize = GCM_AES_IV_SIZE,
1933 .maxauthsize = AES_BLOCK_SIZE,
1936 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1939 /* single-pass ipsec_esp descriptor */
1943 .cra_name = "authenc(hmac(md5),"
1944 "ecb(cipher_null))",
1945 .cra_driver_name = "authenc-hmac-md5-"
1946 "ecb-cipher_null-caam",
1947 .cra_blocksize = NULL_BLOCK_SIZE,
1949 .setkey = aead_setkey,
1950 .setauthsize = aead_setauthsize,
1951 .encrypt = aead_encrypt,
1952 .decrypt = aead_decrypt,
1953 .ivsize = NULL_IV_SIZE,
1954 .maxauthsize = MD5_DIGEST_SIZE,
1957 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
1958 OP_ALG_AAI_HMAC_PRECOMP,
1964 .cra_name = "authenc(hmac(sha1),"
1965 "ecb(cipher_null))",
1966 .cra_driver_name = "authenc-hmac-sha1-"
1967 "ecb-cipher_null-caam",
1968 .cra_blocksize = NULL_BLOCK_SIZE,
1970 .setkey = aead_setkey,
1971 .setauthsize = aead_setauthsize,
1972 .encrypt = aead_encrypt,
1973 .decrypt = aead_decrypt,
1974 .ivsize = NULL_IV_SIZE,
1975 .maxauthsize = SHA1_DIGEST_SIZE,
1978 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1979 OP_ALG_AAI_HMAC_PRECOMP,
1985 .cra_name = "authenc(hmac(sha224),"
1986 "ecb(cipher_null))",
1987 .cra_driver_name = "authenc-hmac-sha224-"
1988 "ecb-cipher_null-caam",
1989 .cra_blocksize = NULL_BLOCK_SIZE,
1991 .setkey = aead_setkey,
1992 .setauthsize = aead_setauthsize,
1993 .encrypt = aead_encrypt,
1994 .decrypt = aead_decrypt,
1995 .ivsize = NULL_IV_SIZE,
1996 .maxauthsize = SHA224_DIGEST_SIZE,
1999 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2000 OP_ALG_AAI_HMAC_PRECOMP,
2006 .cra_name = "authenc(hmac(sha256),"
2007 "ecb(cipher_null))",
2008 .cra_driver_name = "authenc-hmac-sha256-"
2009 "ecb-cipher_null-caam",
2010 .cra_blocksize = NULL_BLOCK_SIZE,
2012 .setkey = aead_setkey,
2013 .setauthsize = aead_setauthsize,
2014 .encrypt = aead_encrypt,
2015 .decrypt = aead_decrypt,
2016 .ivsize = NULL_IV_SIZE,
2017 .maxauthsize = SHA256_DIGEST_SIZE,
2020 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2021 OP_ALG_AAI_HMAC_PRECOMP,
2027 .cra_name = "authenc(hmac(sha384),"
2028 "ecb(cipher_null))",
2029 .cra_driver_name = "authenc-hmac-sha384-"
2030 "ecb-cipher_null-caam",
2031 .cra_blocksize = NULL_BLOCK_SIZE,
2033 .setkey = aead_setkey,
2034 .setauthsize = aead_setauthsize,
2035 .encrypt = aead_encrypt,
2036 .decrypt = aead_decrypt,
2037 .ivsize = NULL_IV_SIZE,
2038 .maxauthsize = SHA384_DIGEST_SIZE,
2041 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2042 OP_ALG_AAI_HMAC_PRECOMP,
2048 .cra_name = "authenc(hmac(sha512),"
2049 "ecb(cipher_null))",
2050 .cra_driver_name = "authenc-hmac-sha512-"
2051 "ecb-cipher_null-caam",
2052 .cra_blocksize = NULL_BLOCK_SIZE,
2054 .setkey = aead_setkey,
2055 .setauthsize = aead_setauthsize,
2056 .encrypt = aead_encrypt,
2057 .decrypt = aead_decrypt,
2058 .ivsize = NULL_IV_SIZE,
2059 .maxauthsize = SHA512_DIGEST_SIZE,
2062 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2063 OP_ALG_AAI_HMAC_PRECOMP,
2069 .cra_name = "authenc(hmac(md5),cbc(aes))",
2070 .cra_driver_name = "authenc-hmac-md5-"
2072 .cra_blocksize = AES_BLOCK_SIZE,
2074 .setkey = aead_setkey,
2075 .setauthsize = aead_setauthsize,
2076 .encrypt = aead_encrypt,
2077 .decrypt = aead_decrypt,
2078 .ivsize = AES_BLOCK_SIZE,
2079 .maxauthsize = MD5_DIGEST_SIZE,
2082 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2083 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2084 OP_ALG_AAI_HMAC_PRECOMP,
2090 .cra_name = "echainiv(authenc(hmac(md5),"
2092 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2094 .cra_blocksize = AES_BLOCK_SIZE,
2096 .setkey = aead_setkey,
2097 .setauthsize = aead_setauthsize,
2098 .encrypt = aead_encrypt,
2099 .decrypt = aead_decrypt,
2100 .ivsize = AES_BLOCK_SIZE,
2101 .maxauthsize = MD5_DIGEST_SIZE,
2104 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2105 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2106 OP_ALG_AAI_HMAC_PRECOMP,
2113 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2114 .cra_driver_name = "authenc-hmac-sha1-"
2116 .cra_blocksize = AES_BLOCK_SIZE,
2118 .setkey = aead_setkey,
2119 .setauthsize = aead_setauthsize,
2120 .encrypt = aead_encrypt,
2121 .decrypt = aead_decrypt,
2122 .ivsize = AES_BLOCK_SIZE,
2123 .maxauthsize = SHA1_DIGEST_SIZE,
2126 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2127 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2128 OP_ALG_AAI_HMAC_PRECOMP,
2134 .cra_name = "echainiv(authenc(hmac(sha1),"
2136 .cra_driver_name = "echainiv-authenc-"
2137 "hmac-sha1-cbc-aes-caam",
2138 .cra_blocksize = AES_BLOCK_SIZE,
2140 .setkey = aead_setkey,
2141 .setauthsize = aead_setauthsize,
2142 .encrypt = aead_encrypt,
2143 .decrypt = aead_decrypt,
2144 .ivsize = AES_BLOCK_SIZE,
2145 .maxauthsize = SHA1_DIGEST_SIZE,
2148 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2149 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2150 OP_ALG_AAI_HMAC_PRECOMP,
2157 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2158 .cra_driver_name = "authenc-hmac-sha224-"
2160 .cra_blocksize = AES_BLOCK_SIZE,
2162 .setkey = aead_setkey,
2163 .setauthsize = aead_setauthsize,
2164 .encrypt = aead_encrypt,
2165 .decrypt = aead_decrypt,
2166 .ivsize = AES_BLOCK_SIZE,
2167 .maxauthsize = SHA224_DIGEST_SIZE,
2170 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2171 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2172 OP_ALG_AAI_HMAC_PRECOMP,
2178 .cra_name = "echainiv(authenc(hmac(sha224),"
2180 .cra_driver_name = "echainiv-authenc-"
2181 "hmac-sha224-cbc-aes-caam",
2182 .cra_blocksize = AES_BLOCK_SIZE,
2184 .setkey = aead_setkey,
2185 .setauthsize = aead_setauthsize,
2186 .encrypt = aead_encrypt,
2187 .decrypt = aead_decrypt,
2188 .ivsize = AES_BLOCK_SIZE,
2189 .maxauthsize = SHA224_DIGEST_SIZE,
2192 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2193 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2194 OP_ALG_AAI_HMAC_PRECOMP,
2201 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2202 .cra_driver_name = "authenc-hmac-sha256-"
2204 .cra_blocksize = AES_BLOCK_SIZE,
2206 .setkey = aead_setkey,
2207 .setauthsize = aead_setauthsize,
2208 .encrypt = aead_encrypt,
2209 .decrypt = aead_decrypt,
2210 .ivsize = AES_BLOCK_SIZE,
2211 .maxauthsize = SHA256_DIGEST_SIZE,
2214 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2215 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2216 OP_ALG_AAI_HMAC_PRECOMP,
2222 .cra_name = "echainiv(authenc(hmac(sha256),"
2224 .cra_driver_name = "echainiv-authenc-"
2225 "hmac-sha256-cbc-aes-caam",
2226 .cra_blocksize = AES_BLOCK_SIZE,
2228 .setkey = aead_setkey,
2229 .setauthsize = aead_setauthsize,
2230 .encrypt = aead_encrypt,
2231 .decrypt = aead_decrypt,
2232 .ivsize = AES_BLOCK_SIZE,
2233 .maxauthsize = SHA256_DIGEST_SIZE,
2236 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2237 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2238 OP_ALG_AAI_HMAC_PRECOMP,
2245 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2246 .cra_driver_name = "authenc-hmac-sha384-"
2248 .cra_blocksize = AES_BLOCK_SIZE,
2250 .setkey = aead_setkey,
2251 .setauthsize = aead_setauthsize,
2252 .encrypt = aead_encrypt,
2253 .decrypt = aead_decrypt,
2254 .ivsize = AES_BLOCK_SIZE,
2255 .maxauthsize = SHA384_DIGEST_SIZE,
2258 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2259 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2260 OP_ALG_AAI_HMAC_PRECOMP,
2266 .cra_name = "echainiv(authenc(hmac(sha384),"
2268 .cra_driver_name = "echainiv-authenc-"
2269 "hmac-sha384-cbc-aes-caam",
2270 .cra_blocksize = AES_BLOCK_SIZE,
2272 .setkey = aead_setkey,
2273 .setauthsize = aead_setauthsize,
2274 .encrypt = aead_encrypt,
2275 .decrypt = aead_decrypt,
2276 .ivsize = AES_BLOCK_SIZE,
2277 .maxauthsize = SHA384_DIGEST_SIZE,
2280 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2281 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2282 OP_ALG_AAI_HMAC_PRECOMP,
2289 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2290 .cra_driver_name = "authenc-hmac-sha512-"
2292 .cra_blocksize = AES_BLOCK_SIZE,
2294 .setkey = aead_setkey,
2295 .setauthsize = aead_setauthsize,
2296 .encrypt = aead_encrypt,
2297 .decrypt = aead_decrypt,
2298 .ivsize = AES_BLOCK_SIZE,
2299 .maxauthsize = SHA512_DIGEST_SIZE,
2302 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2303 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2304 OP_ALG_AAI_HMAC_PRECOMP,
2310 .cra_name = "echainiv(authenc(hmac(sha512),"
2312 .cra_driver_name = "echainiv-authenc-"
2313 "hmac-sha512-cbc-aes-caam",
2314 .cra_blocksize = AES_BLOCK_SIZE,
2316 .setkey = aead_setkey,
2317 .setauthsize = aead_setauthsize,
2318 .encrypt = aead_encrypt,
2319 .decrypt = aead_decrypt,
2320 .ivsize = AES_BLOCK_SIZE,
2321 .maxauthsize = SHA512_DIGEST_SIZE,
2324 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2325 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2326 OP_ALG_AAI_HMAC_PRECOMP,
2333 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2334 .cra_driver_name = "authenc-hmac-md5-"
2335 "cbc-des3_ede-caam",
2336 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2338 .setkey = aead_setkey,
2339 .setauthsize = aead_setauthsize,
2340 .encrypt = aead_encrypt,
2341 .decrypt = aead_decrypt,
2342 .ivsize = DES3_EDE_BLOCK_SIZE,
2343 .maxauthsize = MD5_DIGEST_SIZE,
2346 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2347 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2348 OP_ALG_AAI_HMAC_PRECOMP,
2354 .cra_name = "echainiv(authenc(hmac(md5),"
2356 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2357 "cbc-des3_ede-caam",
2358 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2360 .setkey = aead_setkey,
2361 .setauthsize = aead_setauthsize,
2362 .encrypt = aead_encrypt,
2363 .decrypt = aead_decrypt,
2364 .ivsize = DES3_EDE_BLOCK_SIZE,
2365 .maxauthsize = MD5_DIGEST_SIZE,
2368 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2369 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2370 OP_ALG_AAI_HMAC_PRECOMP,
2377 .cra_name = "authenc(hmac(sha1),"
2379 .cra_driver_name = "authenc-hmac-sha1-"
2380 "cbc-des3_ede-caam",
2381 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2383 .setkey = aead_setkey,
2384 .setauthsize = aead_setauthsize,
2385 .encrypt = aead_encrypt,
2386 .decrypt = aead_decrypt,
2387 .ivsize = DES3_EDE_BLOCK_SIZE,
2388 .maxauthsize = SHA1_DIGEST_SIZE,
2391 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2392 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2393 OP_ALG_AAI_HMAC_PRECOMP,
2399 .cra_name = "echainiv(authenc(hmac(sha1),"
2401 .cra_driver_name = "echainiv-authenc-"
2403 "cbc-des3_ede-caam",
2404 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2406 .setkey = aead_setkey,
2407 .setauthsize = aead_setauthsize,
2408 .encrypt = aead_encrypt,
2409 .decrypt = aead_decrypt,
2410 .ivsize = DES3_EDE_BLOCK_SIZE,
2411 .maxauthsize = SHA1_DIGEST_SIZE,
2414 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2415 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2416 OP_ALG_AAI_HMAC_PRECOMP,
2423 .cra_name = "authenc(hmac(sha224),"
2425 .cra_driver_name = "authenc-hmac-sha224-"
2426 "cbc-des3_ede-caam",
2427 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2429 .setkey = aead_setkey,
2430 .setauthsize = aead_setauthsize,
2431 .encrypt = aead_encrypt,
2432 .decrypt = aead_decrypt,
2433 .ivsize = DES3_EDE_BLOCK_SIZE,
2434 .maxauthsize = SHA224_DIGEST_SIZE,
2437 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2438 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2439 OP_ALG_AAI_HMAC_PRECOMP,
2445 .cra_name = "echainiv(authenc(hmac(sha224),"
2447 .cra_driver_name = "echainiv-authenc-"
2449 "cbc-des3_ede-caam",
2450 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2452 .setkey = aead_setkey,
2453 .setauthsize = aead_setauthsize,
2454 .encrypt = aead_encrypt,
2455 .decrypt = aead_decrypt,
2456 .ivsize = DES3_EDE_BLOCK_SIZE,
2457 .maxauthsize = SHA224_DIGEST_SIZE,
2460 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2461 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2462 OP_ALG_AAI_HMAC_PRECOMP,
2469 .cra_name = "authenc(hmac(sha256),"
2471 .cra_driver_name = "authenc-hmac-sha256-"
2472 "cbc-des3_ede-caam",
2473 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2475 .setkey = aead_setkey,
2476 .setauthsize = aead_setauthsize,
2477 .encrypt = aead_encrypt,
2478 .decrypt = aead_decrypt,
2479 .ivsize = DES3_EDE_BLOCK_SIZE,
2480 .maxauthsize = SHA256_DIGEST_SIZE,
2483 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2484 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2485 OP_ALG_AAI_HMAC_PRECOMP,
2491 .cra_name = "echainiv(authenc(hmac(sha256),"
2493 .cra_driver_name = "echainiv-authenc-"
2495 "cbc-des3_ede-caam",
2496 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2498 .setkey = aead_setkey,
2499 .setauthsize = aead_setauthsize,
2500 .encrypt = aead_encrypt,
2501 .decrypt = aead_decrypt,
2502 .ivsize = DES3_EDE_BLOCK_SIZE,
2503 .maxauthsize = SHA256_DIGEST_SIZE,
2506 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2507 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2508 OP_ALG_AAI_HMAC_PRECOMP,
2515 .cra_name = "authenc(hmac(sha384),"
2517 .cra_driver_name = "authenc-hmac-sha384-"
2518 "cbc-des3_ede-caam",
2519 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2521 .setkey = aead_setkey,
2522 .setauthsize = aead_setauthsize,
2523 .encrypt = aead_encrypt,
2524 .decrypt = aead_decrypt,
2525 .ivsize = DES3_EDE_BLOCK_SIZE,
2526 .maxauthsize = SHA384_DIGEST_SIZE,
2529 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2530 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2531 OP_ALG_AAI_HMAC_PRECOMP,
2537 .cra_name = "echainiv(authenc(hmac(sha384),"
2539 .cra_driver_name = "echainiv-authenc-"
2541 "cbc-des3_ede-caam",
2542 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2544 .setkey = aead_setkey,
2545 .setauthsize = aead_setauthsize,
2546 .encrypt = aead_encrypt,
2547 .decrypt = aead_decrypt,
2548 .ivsize = DES3_EDE_BLOCK_SIZE,
2549 .maxauthsize = SHA384_DIGEST_SIZE,
2552 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2553 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2554 OP_ALG_AAI_HMAC_PRECOMP,
2561 .cra_name = "authenc(hmac(sha512),"
2563 .cra_driver_name = "authenc-hmac-sha512-"
2564 "cbc-des3_ede-caam",
2565 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2567 .setkey = aead_setkey,
2568 .setauthsize = aead_setauthsize,
2569 .encrypt = aead_encrypt,
2570 .decrypt = aead_decrypt,
2571 .ivsize = DES3_EDE_BLOCK_SIZE,
2572 .maxauthsize = SHA512_DIGEST_SIZE,
2575 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2576 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2577 OP_ALG_AAI_HMAC_PRECOMP,
2583 .cra_name = "echainiv(authenc(hmac(sha512),"
2585 .cra_driver_name = "echainiv-authenc-"
2587 "cbc-des3_ede-caam",
2588 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2590 .setkey = aead_setkey,
2591 .setauthsize = aead_setauthsize,
2592 .encrypt = aead_encrypt,
2593 .decrypt = aead_decrypt,
2594 .ivsize = DES3_EDE_BLOCK_SIZE,
2595 .maxauthsize = SHA512_DIGEST_SIZE,
2598 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2599 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2600 OP_ALG_AAI_HMAC_PRECOMP,
2607 .cra_name = "authenc(hmac(md5),cbc(des))",
2608 .cra_driver_name = "authenc-hmac-md5-"
2610 .cra_blocksize = DES_BLOCK_SIZE,
2612 .setkey = aead_setkey,
2613 .setauthsize = aead_setauthsize,
2614 .encrypt = aead_encrypt,
2615 .decrypt = aead_decrypt,
2616 .ivsize = DES_BLOCK_SIZE,
2617 .maxauthsize = MD5_DIGEST_SIZE,
2620 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2621 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2622 OP_ALG_AAI_HMAC_PRECOMP,
2628 .cra_name = "echainiv(authenc(hmac(md5),"
2630 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2632 .cra_blocksize = DES_BLOCK_SIZE,
2634 .setkey = aead_setkey,
2635 .setauthsize = aead_setauthsize,
2636 .encrypt = aead_encrypt,
2637 .decrypt = aead_decrypt,
2638 .ivsize = DES_BLOCK_SIZE,
2639 .maxauthsize = MD5_DIGEST_SIZE,
2642 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2643 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2644 OP_ALG_AAI_HMAC_PRECOMP,
2651 .cra_name = "authenc(hmac(sha1),cbc(des))",
2652 .cra_driver_name = "authenc-hmac-sha1-"
2654 .cra_blocksize = DES_BLOCK_SIZE,
2656 .setkey = aead_setkey,
2657 .setauthsize = aead_setauthsize,
2658 .encrypt = aead_encrypt,
2659 .decrypt = aead_decrypt,
2660 .ivsize = DES_BLOCK_SIZE,
2661 .maxauthsize = SHA1_DIGEST_SIZE,
2664 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2665 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2666 OP_ALG_AAI_HMAC_PRECOMP,
2672 .cra_name = "echainiv(authenc(hmac(sha1),"
2674 .cra_driver_name = "echainiv-authenc-"
2675 "hmac-sha1-cbc-des-caam",
2676 .cra_blocksize = DES_BLOCK_SIZE,
2678 .setkey = aead_setkey,
2679 .setauthsize = aead_setauthsize,
2680 .encrypt = aead_encrypt,
2681 .decrypt = aead_decrypt,
2682 .ivsize = DES_BLOCK_SIZE,
2683 .maxauthsize = SHA1_DIGEST_SIZE,
2686 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2687 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2688 OP_ALG_AAI_HMAC_PRECOMP,
2695 .cra_name = "authenc(hmac(sha224),cbc(des))",
2696 .cra_driver_name = "authenc-hmac-sha224-"
2698 .cra_blocksize = DES_BLOCK_SIZE,
2700 .setkey = aead_setkey,
2701 .setauthsize = aead_setauthsize,
2702 .encrypt = aead_encrypt,
2703 .decrypt = aead_decrypt,
2704 .ivsize = DES_BLOCK_SIZE,
2705 .maxauthsize = SHA224_DIGEST_SIZE,
2708 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2709 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2710 OP_ALG_AAI_HMAC_PRECOMP,
2716 .cra_name = "echainiv(authenc(hmac(sha224),"
2718 .cra_driver_name = "echainiv-authenc-"
2719 "hmac-sha224-cbc-des-caam",
2720 .cra_blocksize = DES_BLOCK_SIZE,
2722 .setkey = aead_setkey,
2723 .setauthsize = aead_setauthsize,
2724 .encrypt = aead_encrypt,
2725 .decrypt = aead_decrypt,
2726 .ivsize = DES_BLOCK_SIZE,
2727 .maxauthsize = SHA224_DIGEST_SIZE,
2730 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2731 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2732 OP_ALG_AAI_HMAC_PRECOMP,
2739 .cra_name = "authenc(hmac(sha256),cbc(des))",
2740 .cra_driver_name = "authenc-hmac-sha256-"
2742 .cra_blocksize = DES_BLOCK_SIZE,
2744 .setkey = aead_setkey,
2745 .setauthsize = aead_setauthsize,
2746 .encrypt = aead_encrypt,
2747 .decrypt = aead_decrypt,
2748 .ivsize = DES_BLOCK_SIZE,
2749 .maxauthsize = SHA256_DIGEST_SIZE,
2752 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2753 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2754 OP_ALG_AAI_HMAC_PRECOMP,
2760 .cra_name = "echainiv(authenc(hmac(sha256),"
2762 .cra_driver_name = "echainiv-authenc-"
2763 "hmac-sha256-cbc-des-caam",
2764 .cra_blocksize = DES_BLOCK_SIZE,
2766 .setkey = aead_setkey,
2767 .setauthsize = aead_setauthsize,
2768 .encrypt = aead_encrypt,
2769 .decrypt = aead_decrypt,
2770 .ivsize = DES_BLOCK_SIZE,
2771 .maxauthsize = SHA256_DIGEST_SIZE,
2774 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2775 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2776 OP_ALG_AAI_HMAC_PRECOMP,
2783 .cra_name = "authenc(hmac(sha384),cbc(des))",
2784 .cra_driver_name = "authenc-hmac-sha384-"
2786 .cra_blocksize = DES_BLOCK_SIZE,
2788 .setkey = aead_setkey,
2789 .setauthsize = aead_setauthsize,
2790 .encrypt = aead_encrypt,
2791 .decrypt = aead_decrypt,
2792 .ivsize = DES_BLOCK_SIZE,
2793 .maxauthsize = SHA384_DIGEST_SIZE,
2796 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2797 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2798 OP_ALG_AAI_HMAC_PRECOMP,
2804 .cra_name = "echainiv(authenc(hmac(sha384),"
2806 .cra_driver_name = "echainiv-authenc-"
2807 "hmac-sha384-cbc-des-caam",
2808 .cra_blocksize = DES_BLOCK_SIZE,
2810 .setkey = aead_setkey,
2811 .setauthsize = aead_setauthsize,
2812 .encrypt = aead_encrypt,
2813 .decrypt = aead_decrypt,
2814 .ivsize = DES_BLOCK_SIZE,
2815 .maxauthsize = SHA384_DIGEST_SIZE,
2818 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2819 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2820 OP_ALG_AAI_HMAC_PRECOMP,
2827 .cra_name = "authenc(hmac(sha512),cbc(des))",
2828 .cra_driver_name = "authenc-hmac-sha512-"
2830 .cra_blocksize = DES_BLOCK_SIZE,
2832 .setkey = aead_setkey,
2833 .setauthsize = aead_setauthsize,
2834 .encrypt = aead_encrypt,
2835 .decrypt = aead_decrypt,
2836 .ivsize = DES_BLOCK_SIZE,
2837 .maxauthsize = SHA512_DIGEST_SIZE,
2840 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2841 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2842 OP_ALG_AAI_HMAC_PRECOMP,
2848 .cra_name = "echainiv(authenc(hmac(sha512),"
2850 .cra_driver_name = "echainiv-authenc-"
2851 "hmac-sha512-cbc-des-caam",
2852 .cra_blocksize = DES_BLOCK_SIZE,
2854 .setkey = aead_setkey,
2855 .setauthsize = aead_setauthsize,
2856 .encrypt = aead_encrypt,
2857 .decrypt = aead_decrypt,
2858 .ivsize = DES_BLOCK_SIZE,
2859 .maxauthsize = SHA512_DIGEST_SIZE,
2862 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2863 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2864 OP_ALG_AAI_HMAC_PRECOMP,
2871 .cra_name = "authenc(hmac(md5),"
2872 "rfc3686(ctr(aes)))",
2873 .cra_driver_name = "authenc-hmac-md5-"
2874 "rfc3686-ctr-aes-caam",
2877 .setkey = aead_setkey,
2878 .setauthsize = aead_setauthsize,
2879 .encrypt = aead_encrypt,
2880 .decrypt = aead_decrypt,
2881 .ivsize = CTR_RFC3686_IV_SIZE,
2882 .maxauthsize = MD5_DIGEST_SIZE,
2885 .class1_alg_type = OP_ALG_ALGSEL_AES |
2886 OP_ALG_AAI_CTR_MOD128,
2887 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2888 OP_ALG_AAI_HMAC_PRECOMP,
2895 .cra_name = "seqiv(authenc("
2896 "hmac(md5),rfc3686(ctr(aes))))",
2897 .cra_driver_name = "seqiv-authenc-hmac-md5-"
2898 "rfc3686-ctr-aes-caam",
2901 .setkey = aead_setkey,
2902 .setauthsize = aead_setauthsize,
2903 .encrypt = aead_encrypt,
2904 .decrypt = aead_decrypt,
2905 .ivsize = CTR_RFC3686_IV_SIZE,
2906 .maxauthsize = MD5_DIGEST_SIZE,
2909 .class1_alg_type = OP_ALG_ALGSEL_AES |
2910 OP_ALG_AAI_CTR_MOD128,
2911 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2912 OP_ALG_AAI_HMAC_PRECOMP,
2920 .cra_name = "authenc(hmac(sha1),"
2921 "rfc3686(ctr(aes)))",
2922 .cra_driver_name = "authenc-hmac-sha1-"
2923 "rfc3686-ctr-aes-caam",
2926 .setkey = aead_setkey,
2927 .setauthsize = aead_setauthsize,
2928 .encrypt = aead_encrypt,
2929 .decrypt = aead_decrypt,
2930 .ivsize = CTR_RFC3686_IV_SIZE,
2931 .maxauthsize = SHA1_DIGEST_SIZE,
2934 .class1_alg_type = OP_ALG_ALGSEL_AES |
2935 OP_ALG_AAI_CTR_MOD128,
2936 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2937 OP_ALG_AAI_HMAC_PRECOMP,
2944 .cra_name = "seqiv(authenc("
2945 "hmac(sha1),rfc3686(ctr(aes))))",
2946 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
2947 "rfc3686-ctr-aes-caam",
2950 .setkey = aead_setkey,
2951 .setauthsize = aead_setauthsize,
2952 .encrypt = aead_encrypt,
2953 .decrypt = aead_decrypt,
2954 .ivsize = CTR_RFC3686_IV_SIZE,
2955 .maxauthsize = SHA1_DIGEST_SIZE,
2958 .class1_alg_type = OP_ALG_ALGSEL_AES |
2959 OP_ALG_AAI_CTR_MOD128,
2960 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2961 OP_ALG_AAI_HMAC_PRECOMP,
2969 .cra_name = "authenc(hmac(sha224),"
2970 "rfc3686(ctr(aes)))",
2971 .cra_driver_name = "authenc-hmac-sha224-"
2972 "rfc3686-ctr-aes-caam",
2975 .setkey = aead_setkey,
2976 .setauthsize = aead_setauthsize,
2977 .encrypt = aead_encrypt,
2978 .decrypt = aead_decrypt,
2979 .ivsize = CTR_RFC3686_IV_SIZE,
2980 .maxauthsize = SHA224_DIGEST_SIZE,
2983 .class1_alg_type = OP_ALG_ALGSEL_AES |
2984 OP_ALG_AAI_CTR_MOD128,
2985 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2986 OP_ALG_AAI_HMAC_PRECOMP,
2993 .cra_name = "seqiv(authenc("
2994 "hmac(sha224),rfc3686(ctr(aes))))",
2995 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
2996 "rfc3686-ctr-aes-caam",
2999 .setkey = aead_setkey,
3000 .setauthsize = aead_setauthsize,
3001 .encrypt = aead_encrypt,
3002 .decrypt = aead_decrypt,
3003 .ivsize = CTR_RFC3686_IV_SIZE,
3004 .maxauthsize = SHA224_DIGEST_SIZE,
3007 .class1_alg_type = OP_ALG_ALGSEL_AES |
3008 OP_ALG_AAI_CTR_MOD128,
3009 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3010 OP_ALG_AAI_HMAC_PRECOMP,
3018 .cra_name = "authenc(hmac(sha256),"
3019 "rfc3686(ctr(aes)))",
3020 .cra_driver_name = "authenc-hmac-sha256-"
3021 "rfc3686-ctr-aes-caam",
3024 .setkey = aead_setkey,
3025 .setauthsize = aead_setauthsize,
3026 .encrypt = aead_encrypt,
3027 .decrypt = aead_decrypt,
3028 .ivsize = CTR_RFC3686_IV_SIZE,
3029 .maxauthsize = SHA256_DIGEST_SIZE,
3032 .class1_alg_type = OP_ALG_ALGSEL_AES |
3033 OP_ALG_AAI_CTR_MOD128,
3034 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3035 OP_ALG_AAI_HMAC_PRECOMP,
3042 .cra_name = "seqiv(authenc(hmac(sha256),"
3043 "rfc3686(ctr(aes))))",
3044 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
3045 "rfc3686-ctr-aes-caam",
3048 .setkey = aead_setkey,
3049 .setauthsize = aead_setauthsize,
3050 .encrypt = aead_encrypt,
3051 .decrypt = aead_decrypt,
3052 .ivsize = CTR_RFC3686_IV_SIZE,
3053 .maxauthsize = SHA256_DIGEST_SIZE,
3056 .class1_alg_type = OP_ALG_ALGSEL_AES |
3057 OP_ALG_AAI_CTR_MOD128,
3058 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3059 OP_ALG_AAI_HMAC_PRECOMP,
3067 .cra_name = "authenc(hmac(sha384),"
3068 "rfc3686(ctr(aes)))",
3069 .cra_driver_name = "authenc-hmac-sha384-"
3070 "rfc3686-ctr-aes-caam",
3073 .setkey = aead_setkey,
3074 .setauthsize = aead_setauthsize,
3075 .encrypt = aead_encrypt,
3076 .decrypt = aead_decrypt,
3077 .ivsize = CTR_RFC3686_IV_SIZE,
3078 .maxauthsize = SHA384_DIGEST_SIZE,
3081 .class1_alg_type = OP_ALG_ALGSEL_AES |
3082 OP_ALG_AAI_CTR_MOD128,
3083 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3084 OP_ALG_AAI_HMAC_PRECOMP,
3091 .cra_name = "seqiv(authenc(hmac(sha384),"
3092 "rfc3686(ctr(aes))))",
3093 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
3094 "rfc3686-ctr-aes-caam",
3097 .setkey = aead_setkey,
3098 .setauthsize = aead_setauthsize,
3099 .encrypt = aead_encrypt,
3100 .decrypt = aead_decrypt,
3101 .ivsize = CTR_RFC3686_IV_SIZE,
3102 .maxauthsize = SHA384_DIGEST_SIZE,
3105 .class1_alg_type = OP_ALG_ALGSEL_AES |
3106 OP_ALG_AAI_CTR_MOD128,
3107 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3108 OP_ALG_AAI_HMAC_PRECOMP,
3116 .cra_name = "authenc(hmac(sha512),"
3117 "rfc3686(ctr(aes)))",
3118 .cra_driver_name = "authenc-hmac-sha512-"
3119 "rfc3686-ctr-aes-caam",
3122 .setkey = aead_setkey,
3123 .setauthsize = aead_setauthsize,
3124 .encrypt = aead_encrypt,
3125 .decrypt = aead_decrypt,
3126 .ivsize = CTR_RFC3686_IV_SIZE,
3127 .maxauthsize = SHA512_DIGEST_SIZE,
3130 .class1_alg_type = OP_ALG_ALGSEL_AES |
3131 OP_ALG_AAI_CTR_MOD128,
3132 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3133 OP_ALG_AAI_HMAC_PRECOMP,
3140 .cra_name = "seqiv(authenc(hmac(sha512),"
3141 "rfc3686(ctr(aes))))",
3142 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
3143 "rfc3686-ctr-aes-caam",
3146 .setkey = aead_setkey,
3147 .setauthsize = aead_setauthsize,
3148 .encrypt = aead_encrypt,
3149 .decrypt = aead_decrypt,
3150 .ivsize = CTR_RFC3686_IV_SIZE,
3151 .maxauthsize = SHA512_DIGEST_SIZE,
3154 .class1_alg_type = OP_ALG_ALGSEL_AES |
3155 OP_ALG_AAI_CTR_MOD128,
3156 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3157 OP_ALG_AAI_HMAC_PRECOMP,
3165 .cra_name = "rfc7539(chacha20,poly1305)",
3166 .cra_driver_name = "rfc7539-chacha20-poly1305-"
3170 .setkey = chachapoly_setkey,
3171 .setauthsize = chachapoly_setauthsize,
3172 .encrypt = chachapoly_encrypt,
3173 .decrypt = chachapoly_decrypt,
3174 .ivsize = CHACHAPOLY_IV_SIZE,
3175 .maxauthsize = POLY1305_DIGEST_SIZE,
3178 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3180 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3187 .cra_name = "rfc7539esp(chacha20,poly1305)",
3188 .cra_driver_name = "rfc7539esp-chacha20-"
3192 .setkey = chachapoly_setkey,
3193 .setauthsize = chachapoly_setauthsize,
3194 .encrypt = chachapoly_encrypt,
3195 .decrypt = chachapoly_decrypt,
3197 .maxauthsize = POLY1305_DIGEST_SIZE,
3200 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3202 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3208 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
3211 dma_addr_t dma_addr;
3212 struct caam_drv_private *priv;
3214 ctx->jrdev = caam_jr_alloc();
3215 if (IS_ERR(ctx->jrdev)) {
3216 pr_err("Job Ring Device allocation for transform failed\n");
3217 return PTR_ERR(ctx->jrdev);
3220 priv = dev_get_drvdata(ctx->jrdev->parent);
3221 if (priv->era >= 6 && uses_dkp)
3222 ctx->dir = DMA_BIDIRECTIONAL;
3224 ctx->dir = DMA_TO_DEVICE;
3226 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3227 offsetof(struct caam_ctx,
3229 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3230 if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3231 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3232 caam_jr_free(ctx->jrdev);
3236 ctx->sh_desc_enc_dma = dma_addr;
3237 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3239 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key);
3241 /* copy descriptor header template value */
3242 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3243 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
3248 static int caam_cra_init(struct crypto_skcipher *tfm)
3250 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
3251 struct caam_skcipher_alg *caam_alg =
3252 container_of(alg, typeof(*caam_alg), skcipher);
3254 return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam,
3258 static int caam_aead_init(struct crypto_aead *tfm)
3260 struct aead_alg *alg = crypto_aead_alg(tfm);
3261 struct caam_aead_alg *caam_alg =
3262 container_of(alg, struct caam_aead_alg, aead);
3263 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
3265 return caam_init_common(ctx, &caam_alg->caam,
3266 alg->setkey == aead_setkey);
3269 static void caam_exit_common(struct caam_ctx *ctx)
3271 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3272 offsetof(struct caam_ctx, sh_desc_enc_dma),
3273 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3274 caam_jr_free(ctx->jrdev);
3277 static void caam_cra_exit(struct crypto_skcipher *tfm)
3279 caam_exit_common(crypto_skcipher_ctx(tfm));
3282 static void caam_aead_exit(struct crypto_aead *tfm)
3284 caam_exit_common(crypto_aead_ctx(tfm));
3287 static void __exit caam_algapi_exit(void)
3291 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3292 struct caam_aead_alg *t_alg = driver_aeads + i;
3294 if (t_alg->registered)
3295 crypto_unregister_aead(&t_alg->aead);
3298 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3299 struct caam_skcipher_alg *t_alg = driver_algs + i;
3301 if (t_alg->registered)
3302 crypto_unregister_skcipher(&t_alg->skcipher);
3306 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
3308 struct skcipher_alg *alg = &t_alg->skcipher;
3310 alg->base.cra_module = THIS_MODULE;
3311 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3312 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3313 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
3315 alg->init = caam_cra_init;
3316 alg->exit = caam_cra_exit;
3319 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3321 struct aead_alg *alg = &t_alg->aead;
3323 alg->base.cra_module = THIS_MODULE;
3324 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3325 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3326 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
3328 alg->init = caam_aead_init;
3329 alg->exit = caam_aead_exit;
3332 static int __init caam_algapi_init(void)
3334 struct device_node *dev_node;
3335 struct platform_device *pdev;
3336 struct device *ctrldev;
3337 struct caam_drv_private *priv;
3339 u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst;
3340 unsigned int md_limit = SHA512_DIGEST_SIZE;
3341 bool registered = false;
3343 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
3345 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
3350 pdev = of_find_device_by_node(dev_node);
3352 of_node_put(dev_node);
3356 ctrldev = &pdev->dev;
3357 priv = dev_get_drvdata(ctrldev);
3358 of_node_put(dev_node);
3361 * If priv is NULL, it's probably because the caam driver wasn't
3362 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
3369 * Register crypto algorithms the device supports.
3370 * First, detect presence and attributes of DES, AES, and MD blocks.
3372 if (priv->era < 10) {
3373 u32 cha_vid, cha_inst;
3375 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
3376 aes_vid = cha_vid & CHA_ID_LS_AES_MASK;
3377 md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3379 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
3380 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >>
3381 CHA_ID_LS_DES_SHIFT;
3382 aes_inst = cha_inst & CHA_ID_LS_AES_MASK;
3383 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3389 aesa = rd_reg32(&priv->ctrl->vreg.aesa);
3390 mdha = rd_reg32(&priv->ctrl->vreg.mdha);
3392 aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3393 md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3395 des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK;
3396 aes_inst = aesa & CHA_VER_NUM_MASK;
3397 md_inst = mdha & CHA_VER_NUM_MASK;
3398 ccha_inst = rd_reg32(&priv->ctrl->vreg.ccha) & CHA_VER_NUM_MASK;
3399 ptha_inst = rd_reg32(&priv->ctrl->vreg.ptha) & CHA_VER_NUM_MASK;
3402 /* If MD is present, limit digest size based on LP256 */
3403 if (md_inst && md_vid == CHA_VER_VID_MD_LP256)
3404 md_limit = SHA256_DIGEST_SIZE;
3406 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3407 struct caam_skcipher_alg *t_alg = driver_algs + i;
3408 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
3410 /* Skip DES algorithms if not supported by device */
3412 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3413 (alg_sel == OP_ALG_ALGSEL_DES)))
3416 /* Skip AES algorithms if not supported by device */
3417 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3421 * Check support for AES modes not available
3424 if (aes_vid == CHA_VER_VID_AES_LP &&
3425 (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) ==
3429 caam_skcipher_alg_init(t_alg);
3431 err = crypto_register_skcipher(&t_alg->skcipher);
3433 pr_warn("%s alg registration failed\n",
3434 t_alg->skcipher.base.cra_driver_name);
3438 t_alg->registered = true;
3442 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3443 struct caam_aead_alg *t_alg = driver_aeads + i;
3444 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3446 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3448 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3450 /* Skip DES algorithms if not supported by device */
3452 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3453 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3456 /* Skip AES algorithms if not supported by device */
3457 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3460 /* Skip CHACHA20 algorithms if not supported by device */
3461 if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst)
3464 /* Skip POLY1305 algorithms if not supported by device */
3465 if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst)
3469 * Check support for AES algorithms not available
3472 if (aes_vid == CHA_VER_VID_AES_LP && alg_aai == OP_ALG_AAI_GCM)
3476 * Skip algorithms requiring message digests
3477 * if MD or MD size is not supported by device.
3479 if (is_mdha(c2_alg_sel) &&
3480 (!md_inst || t_alg->aead.maxauthsize > md_limit))
3483 caam_aead_alg_init(t_alg);
3485 err = crypto_register_aead(&t_alg->aead);
3487 pr_warn("%s alg registration failed\n",
3488 t_alg->aead.base.cra_driver_name);
3492 t_alg->registered = true;
3497 pr_info("caam algorithms registered in /proc/crypto\n");
3502 module_init(caam_algapi_init);
3503 module_exit(caam_algapi_exit);
3505 MODULE_LICENSE("GPL");
3506 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
3507 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");