2 * caam - Freescale FSL CAAM support for crypto API
4 * Copyright 2008-2011 Freescale Semiconductor, Inc.
6 * Based on talitos crypto API driver.
8 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
10 * --------------- ---------------
11 * | JobDesc #1 |-------------------->| ShareDesc |
12 * | *(packet 1) | | (PDB) |
13 * --------------- |------------->| (hashKey) |
15 * . | |-------->| (operation) |
16 * --------------- | | ---------------
17 * | JobDesc #2 |------| |
23 * | JobDesc #3 |------------
27 * The SharedDesc never changes for a connection unless rekeyed, but
28 * each packet will likely be in a different place. So all we need
29 * to know to process the packet is where the input is, where the
30 * output goes, and what context we want to process with. Context is
31 * in the SharedDesc, packet references in the JobDesc.
33 * So, a job desc looks like:
35 * ---------------------
37 * | ShareDesc Pointer |
44 * ---------------------
51 #include "desc_constr.h"
54 #include "sg_sw_sec4.h"
60 #define CAAM_CRA_PRIORITY 3000
61 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
62 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
63 CTR_RFC3686_NONCE_SIZE + \
64 SHA512_DIGEST_SIZE * 2)
65 /* max IV is max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */
66 #define CAAM_MAX_IV_LENGTH 16
68 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
69 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
71 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
74 /* length of descriptors text */
75 #define DESC_AEAD_BASE (4 * CAAM_CMD_SZ)
76 #define DESC_AEAD_ENC_LEN (DESC_AEAD_BASE + 11 * CAAM_CMD_SZ)
77 #define DESC_AEAD_DEC_LEN (DESC_AEAD_BASE + 15 * CAAM_CMD_SZ)
78 #define DESC_AEAD_GIVENC_LEN (DESC_AEAD_ENC_LEN + 9 * CAAM_CMD_SZ)
80 /* Note: Nonce is counted in enckeylen */
81 #define DESC_AEAD_CTR_RFC3686_LEN (4 * CAAM_CMD_SZ)
83 #define DESC_AEAD_NULL_BASE (3 * CAAM_CMD_SZ)
84 #define DESC_AEAD_NULL_ENC_LEN (DESC_AEAD_NULL_BASE + 11 * CAAM_CMD_SZ)
85 #define DESC_AEAD_NULL_DEC_LEN (DESC_AEAD_NULL_BASE + 13 * CAAM_CMD_SZ)
87 #define DESC_GCM_BASE (3 * CAAM_CMD_SZ)
88 #define DESC_GCM_ENC_LEN (DESC_GCM_BASE + 16 * CAAM_CMD_SZ)
89 #define DESC_GCM_DEC_LEN (DESC_GCM_BASE + 12 * CAAM_CMD_SZ)
91 #define DESC_RFC4106_BASE (3 * CAAM_CMD_SZ)
92 #define DESC_RFC4106_ENC_LEN (DESC_RFC4106_BASE + 13 * CAAM_CMD_SZ)
93 #define DESC_RFC4106_DEC_LEN (DESC_RFC4106_BASE + 13 * CAAM_CMD_SZ)
95 #define DESC_RFC4543_BASE (3 * CAAM_CMD_SZ)
96 #define DESC_RFC4543_ENC_LEN (DESC_RFC4543_BASE + 11 * CAAM_CMD_SZ)
97 #define DESC_RFC4543_DEC_LEN (DESC_RFC4543_BASE + 12 * CAAM_CMD_SZ)
99 #define DESC_ABLKCIPHER_BASE (3 * CAAM_CMD_SZ)
100 #define DESC_ABLKCIPHER_ENC_LEN (DESC_ABLKCIPHER_BASE + \
102 #define DESC_ABLKCIPHER_DEC_LEN (DESC_ABLKCIPHER_BASE + \
105 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
106 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
109 /* for print_hex_dumps with line references */
110 #define debug(format, arg...) printk(format, arg)
112 #define debug(format, arg...)
114 static struct list_head alg_list;
116 struct caam_alg_entry {
124 struct caam_aead_alg {
125 struct aead_alg aead;
126 struct caam_alg_entry caam;
130 /* Set DK bit in class 1 operation if shared */
131 static inline void append_dec_op1(u32 *desc, u32 type)
133 u32 *jump_cmd, *uncond_jump_cmd;
135 /* DK bit is valid only for AES */
136 if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
137 append_operation(desc, type | OP_ALG_AS_INITFINAL |
142 jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
143 append_operation(desc, type | OP_ALG_AS_INITFINAL |
145 uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
146 set_jump_tgt_here(desc, jump_cmd);
147 append_operation(desc, type | OP_ALG_AS_INITFINAL |
148 OP_ALG_DECRYPT | OP_ALG_AAI_DK);
149 set_jump_tgt_here(desc, uncond_jump_cmd);
153 * For aead functions, read payload and write payload,
154 * both of which are specified in req->src and req->dst
156 static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
158 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
159 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
160 KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
164 * For ablkcipher encrypt and decrypt, read from req->src and
167 static inline void ablkcipher_append_src_dst(u32 *desc)
169 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
170 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
171 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
172 KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
173 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
177 * per-session context
180 struct device *jrdev;
181 u32 sh_desc_enc[DESC_MAX_USED_LEN];
182 u32 sh_desc_dec[DESC_MAX_USED_LEN];
183 u32 sh_desc_givenc[DESC_MAX_USED_LEN];
184 dma_addr_t sh_desc_enc_dma;
185 dma_addr_t sh_desc_dec_dma;
186 dma_addr_t sh_desc_givenc_dma;
190 u8 key[CAAM_MAX_KEY_SIZE];
192 unsigned int enckeylen;
193 unsigned int split_key_len;
194 unsigned int split_key_pad_len;
195 unsigned int authsize;
198 static void append_key_aead(u32 *desc, struct caam_ctx *ctx,
199 int keys_fit_inline, bool is_rfc3686)
202 unsigned int enckeylen = ctx->enckeylen;
206 * | ctx->key = {AUTH_KEY, ENC_KEY, NONCE}
207 * | enckeylen = encryption key size + nonce size
210 enckeylen -= CTR_RFC3686_NONCE_SIZE;
212 if (keys_fit_inline) {
213 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
214 ctx->split_key_len, CLASS_2 |
215 KEY_DEST_MDHA_SPLIT | KEY_ENC);
216 append_key_as_imm(desc, (void *)ctx->key +
217 ctx->split_key_pad_len, enckeylen,
218 enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
220 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
221 KEY_DEST_MDHA_SPLIT | KEY_ENC);
222 append_key(desc, ctx->key_dma + ctx->split_key_pad_len,
223 enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
226 /* Load Counter into CONTEXT1 reg */
228 nonce = (u32 *)((void *)ctx->key + ctx->split_key_pad_len +
230 append_load_imm_u32(desc, *nonce, LDST_CLASS_IND_CCB |
231 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
234 MOVE_DEST_CLASS1CTX |
235 (16 << MOVE_OFFSET_SHIFT) |
236 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
240 static void init_sh_desc_key_aead(u32 *desc, struct caam_ctx *ctx,
241 int keys_fit_inline, bool is_rfc3686)
245 /* Note: Context registers are saved. */
246 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
248 /* Skip if already shared */
249 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
252 append_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
254 set_jump_tgt_here(desc, key_jump_cmd);
257 static int aead_null_set_sh_desc(struct crypto_aead *aead)
259 struct caam_ctx *ctx = crypto_aead_ctx(aead);
260 struct device *jrdev = ctx->jrdev;
261 bool keys_fit_inline = false;
262 u32 *key_jump_cmd, *jump_cmd, *read_move_cmd, *write_move_cmd;
266 * Job Descriptor and Shared Descriptors
267 * must all fit into the 64-word Descriptor h/w Buffer
269 if (DESC_AEAD_NULL_ENC_LEN + AEAD_DESC_JOB_IO_LEN +
270 ctx->split_key_pad_len <= CAAM_DESC_BYTES_MAX)
271 keys_fit_inline = true;
273 /* aead_encrypt shared descriptor */
274 desc = ctx->sh_desc_enc;
276 init_sh_desc(desc, HDR_SHARE_SERIAL);
278 /* Skip if already shared */
279 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
282 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
283 ctx->split_key_len, CLASS_2 |
284 KEY_DEST_MDHA_SPLIT | KEY_ENC);
286 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
287 KEY_DEST_MDHA_SPLIT | KEY_ENC);
288 set_jump_tgt_here(desc, key_jump_cmd);
290 /* assoclen + cryptlen = seqinlen */
291 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
293 /* Prepare to read and write cryptlen + assoclen bytes */
294 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
295 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
298 * MOVE_LEN opcode is not available in all SEC HW revisions,
299 * thus need to do some magic, i.e. self-patch the descriptor
302 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
304 (0x6 << MOVE_LEN_SHIFT));
305 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
308 (0x8 << MOVE_LEN_SHIFT));
310 /* Class 2 operation */
311 append_operation(desc, ctx->class2_alg_type |
312 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
314 /* Read and write cryptlen bytes */
315 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
317 set_move_tgt_here(desc, read_move_cmd);
318 set_move_tgt_here(desc, write_move_cmd);
319 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
320 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
324 append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB |
325 LDST_SRCDST_BYTE_CONTEXT);
327 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
330 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
331 dev_err(jrdev, "unable to map shared descriptor\n");
335 print_hex_dump(KERN_ERR,
336 "aead null enc shdesc@"__stringify(__LINE__)": ",
337 DUMP_PREFIX_ADDRESS, 16, 4, desc,
338 desc_bytes(desc), 1);
342 * Job Descriptor and Shared Descriptors
343 * must all fit into the 64-word Descriptor h/w Buffer
345 keys_fit_inline = false;
346 if (DESC_AEAD_NULL_DEC_LEN + DESC_JOB_IO_LEN +
347 ctx->split_key_pad_len <= CAAM_DESC_BYTES_MAX)
348 keys_fit_inline = true;
350 desc = ctx->sh_desc_dec;
352 /* aead_decrypt shared descriptor */
353 init_sh_desc(desc, HDR_SHARE_SERIAL);
355 /* Skip if already shared */
356 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
359 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
360 ctx->split_key_len, CLASS_2 |
361 KEY_DEST_MDHA_SPLIT | KEY_ENC);
363 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
364 KEY_DEST_MDHA_SPLIT | KEY_ENC);
365 set_jump_tgt_here(desc, key_jump_cmd);
367 /* Class 2 operation */
368 append_operation(desc, ctx->class2_alg_type |
369 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
371 /* assoclen + cryptlen = seqoutlen */
372 append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
374 /* Prepare to read and write cryptlen + assoclen bytes */
375 append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
376 append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
379 * MOVE_LEN opcode is not available in all SEC HW revisions,
380 * thus need to do some magic, i.e. self-patch the descriptor
383 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
385 (0x6 << MOVE_LEN_SHIFT));
386 write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
389 (0x8 << MOVE_LEN_SHIFT));
391 /* Read and write cryptlen bytes */
392 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
395 * Insert a NOP here, since we need at least 4 instructions between
396 * code patching the descriptor buffer and the location being patched.
398 jump_cmd = append_jump(desc, JUMP_TEST_ALL);
399 set_jump_tgt_here(desc, jump_cmd);
401 set_move_tgt_here(desc, read_move_cmd);
402 set_move_tgt_here(desc, write_move_cmd);
403 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
404 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
406 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
409 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS2 |
410 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
412 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
415 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
416 dev_err(jrdev, "unable to map shared descriptor\n");
420 print_hex_dump(KERN_ERR,
421 "aead null dec shdesc@"__stringify(__LINE__)": ",
422 DUMP_PREFIX_ADDRESS, 16, 4, desc,
423 desc_bytes(desc), 1);
429 static int aead_set_sh_desc(struct crypto_aead *aead)
431 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
432 struct caam_aead_alg, aead);
433 unsigned int ivsize = crypto_aead_ivsize(aead);
434 struct caam_ctx *ctx = crypto_aead_ctx(aead);
435 struct device *jrdev = ctx->jrdev;
436 bool keys_fit_inline;
440 const bool ctr_mode = ((ctx->class1_alg_type & OP_ALG_AAI_MASK) ==
441 OP_ALG_AAI_CTR_MOD128);
442 const bool is_rfc3686 = alg->caam.rfc3686;
447 /* NULL encryption / decryption */
449 return aead_null_set_sh_desc(aead);
452 * AES-CTR needs to load IV in CONTEXT1 reg
453 * at an offset of 128bits (16bytes)
454 * CONTEXT1[255:128] = IV
461 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
464 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
470 * Job Descriptor and Shared Descriptors
471 * must all fit into the 64-word Descriptor h/w Buffer
473 keys_fit_inline = false;
474 if (DESC_AEAD_ENC_LEN + AUTHENC_DESC_JOB_IO_LEN +
475 ctx->split_key_pad_len + ctx->enckeylen +
476 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0) <=
478 keys_fit_inline = true;
480 /* aead_encrypt shared descriptor */
481 desc = ctx->sh_desc_enc;
483 /* Note: Context registers are saved. */
484 init_sh_desc_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
486 /* Class 2 operation */
487 append_operation(desc, ctx->class2_alg_type |
488 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
490 /* Read and write assoclen bytes */
491 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
492 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
494 /* Skip assoc data */
495 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
497 /* read assoc before reading payload */
498 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
501 /* Load Counter into CONTEXT1 reg */
503 append_load_imm_u32(desc, be32_to_cpu(1), LDST_IMM |
505 LDST_SRCDST_BYTE_CONTEXT |
506 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
509 /* Class 1 operation */
510 append_operation(desc, ctx->class1_alg_type |
511 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
513 /* Read and write cryptlen bytes */
514 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
515 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
516 aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
519 append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB |
520 LDST_SRCDST_BYTE_CONTEXT);
522 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
525 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
526 dev_err(jrdev, "unable to map shared descriptor\n");
530 print_hex_dump(KERN_ERR, "aead enc shdesc@"__stringify(__LINE__)": ",
531 DUMP_PREFIX_ADDRESS, 16, 4, desc,
532 desc_bytes(desc), 1);
537 * Job Descriptor and Shared Descriptors
538 * must all fit into the 64-word Descriptor h/w Buffer
540 keys_fit_inline = false;
541 if (DESC_AEAD_DEC_LEN + AUTHENC_DESC_JOB_IO_LEN +
542 ctx->split_key_pad_len + ctx->enckeylen +
543 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0) <=
545 keys_fit_inline = true;
547 /* aead_decrypt shared descriptor */
548 desc = ctx->sh_desc_dec;
550 /* Note: Context registers are saved. */
551 init_sh_desc_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
553 /* Class 2 operation */
554 append_operation(desc, ctx->class2_alg_type |
555 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
557 /* Read and write assoclen bytes */
558 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
559 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
561 /* Skip assoc data */
562 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
564 /* read assoc before reading payload */
565 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
568 /* Load Counter into CONTEXT1 reg */
570 append_load_imm_u32(desc, be32_to_cpu(1), LDST_IMM |
572 LDST_SRCDST_BYTE_CONTEXT |
573 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
576 /* Choose operation */
578 append_operation(desc, ctx->class1_alg_type |
579 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT);
581 append_dec_op1(desc, ctx->class1_alg_type);
583 /* Read and write cryptlen bytes */
584 append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
585 append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
586 aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
589 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS2 |
590 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
592 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
595 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
596 dev_err(jrdev, "unable to map shared descriptor\n");
600 print_hex_dump(KERN_ERR, "aead dec shdesc@"__stringify(__LINE__)": ",
601 DUMP_PREFIX_ADDRESS, 16, 4, desc,
602 desc_bytes(desc), 1);
605 if (!alg->caam.geniv)
609 * Job Descriptor and Shared Descriptors
610 * must all fit into the 64-word Descriptor h/w Buffer
612 keys_fit_inline = false;
613 if (DESC_AEAD_GIVENC_LEN + AUTHENC_DESC_JOB_IO_LEN +
614 ctx->split_key_pad_len + ctx->enckeylen +
615 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0) <=
617 keys_fit_inline = true;
619 /* aead_givencrypt shared descriptor */
620 desc = ctx->sh_desc_enc;
622 /* Note: Context registers are saved. */
623 init_sh_desc_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
629 geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
630 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
631 NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
632 append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
633 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
634 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
635 append_move(desc, MOVE_WAITCOMP |
636 MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
637 (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
638 (ivsize << MOVE_LEN_SHIFT));
639 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
642 /* Copy IV to class 1 context */
643 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
644 (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
645 (ivsize << MOVE_LEN_SHIFT));
647 /* Return to encryption */
648 append_operation(desc, ctx->class2_alg_type |
649 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
651 /* Read and write assoclen bytes */
652 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
653 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
655 /* ivsize + cryptlen = seqoutlen - authsize */
656 append_math_sub_imm_u32(desc, REG3, SEQOUTLEN, IMM, ctx->authsize);
658 /* Skip assoc data */
659 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
661 /* read assoc before reading payload */
662 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
665 /* Copy iv from outfifo to class 2 fifo */
666 moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
667 NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
668 append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
669 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
670 append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
671 LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
673 /* Load Counter into CONTEXT1 reg */
675 append_load_imm_u32(desc, be32_to_cpu(1), LDST_IMM |
677 LDST_SRCDST_BYTE_CONTEXT |
678 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
681 /* Class 1 operation */
682 append_operation(desc, ctx->class1_alg_type |
683 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
685 /* Will write ivsize + cryptlen */
686 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
688 /* Not need to reload iv */
689 append_seq_fifo_load(desc, ivsize,
692 /* Will read cryptlen */
693 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
694 aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
697 append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB |
698 LDST_SRCDST_BYTE_CONTEXT);
700 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
703 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
704 dev_err(jrdev, "unable to map shared descriptor\n");
708 print_hex_dump(KERN_ERR, "aead givenc shdesc@"__stringify(__LINE__)": ",
709 DUMP_PREFIX_ADDRESS, 16, 4, desc,
710 desc_bytes(desc), 1);
717 static int aead_setauthsize(struct crypto_aead *authenc,
718 unsigned int authsize)
720 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
722 ctx->authsize = authsize;
723 aead_set_sh_desc(authenc);
728 static int gcm_set_sh_desc(struct crypto_aead *aead)
730 struct caam_ctx *ctx = crypto_aead_ctx(aead);
731 struct device *jrdev = ctx->jrdev;
732 bool keys_fit_inline = false;
733 u32 *key_jump_cmd, *zero_payload_jump_cmd,
734 *zero_assoc_jump_cmd1, *zero_assoc_jump_cmd2;
737 if (!ctx->enckeylen || !ctx->authsize)
741 * AES GCM encrypt shared descriptor
742 * Job Descriptor and Shared Descriptor
743 * must fit into the 64-word Descriptor h/w Buffer
745 if (DESC_GCM_ENC_LEN + GCM_DESC_JOB_IO_LEN +
746 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
747 keys_fit_inline = true;
749 desc = ctx->sh_desc_enc;
751 init_sh_desc(desc, HDR_SHARE_SERIAL);
753 /* skip key loading if they are loaded due to sharing */
754 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
755 JUMP_COND_SHRD | JUMP_COND_SELF);
757 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
758 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
760 append_key(desc, ctx->key_dma, ctx->enckeylen,
761 CLASS_1 | KEY_DEST_CLASS_REG);
762 set_jump_tgt_here(desc, key_jump_cmd);
764 /* class 1 operation */
765 append_operation(desc, ctx->class1_alg_type |
766 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
768 /* if assoclen + cryptlen is ZERO, skip to ICV write */
769 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
770 zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
773 /* if assoclen is ZERO, skip reading the assoc data */
774 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
775 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
778 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
780 /* skip assoc data */
781 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
783 /* cryptlen = seqinlen - assoclen */
784 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
786 /* if cryptlen is ZERO jump to zero-payload commands */
787 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
790 /* read assoc data */
791 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
792 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
793 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
795 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
797 /* write encrypted data */
798 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
800 /* read payload data */
801 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
802 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
804 /* jump the zero-payload commands */
805 append_jump(desc, JUMP_TEST_ALL | 2);
807 /* zero-payload commands */
808 set_jump_tgt_here(desc, zero_payload_jump_cmd);
810 /* read assoc data */
811 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
812 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
814 /* There is no input data */
815 set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
818 append_seq_store(desc, ctx->authsize, LDST_CLASS_1_CCB |
819 LDST_SRCDST_BYTE_CONTEXT);
821 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
824 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
825 dev_err(jrdev, "unable to map shared descriptor\n");
829 print_hex_dump(KERN_ERR, "gcm enc shdesc@"__stringify(__LINE__)": ",
830 DUMP_PREFIX_ADDRESS, 16, 4, desc,
831 desc_bytes(desc), 1);
835 * Job Descriptor and Shared Descriptors
836 * must all fit into the 64-word Descriptor h/w Buffer
838 keys_fit_inline = false;
839 if (DESC_GCM_DEC_LEN + GCM_DESC_JOB_IO_LEN +
840 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
841 keys_fit_inline = true;
843 desc = ctx->sh_desc_dec;
845 init_sh_desc(desc, HDR_SHARE_SERIAL);
847 /* skip key loading if they are loaded due to sharing */
848 key_jump_cmd = append_jump(desc, JUMP_JSL |
849 JUMP_TEST_ALL | JUMP_COND_SHRD |
852 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
853 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
855 append_key(desc, ctx->key_dma, ctx->enckeylen,
856 CLASS_1 | KEY_DEST_CLASS_REG);
857 set_jump_tgt_here(desc, key_jump_cmd);
859 /* class 1 operation */
860 append_operation(desc, ctx->class1_alg_type |
861 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
863 /* if assoclen is ZERO, skip reading the assoc data */
864 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
865 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
868 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
870 /* skip assoc data */
871 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
873 /* read assoc data */
874 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
875 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
877 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
879 /* cryptlen = seqoutlen - assoclen */
880 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
882 /* jump to zero-payload command if cryptlen is zero */
883 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
886 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
888 /* store encrypted data */
889 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
891 /* read payload data */
892 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
893 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
895 /* zero-payload command */
896 set_jump_tgt_here(desc, zero_payload_jump_cmd);
899 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS1 |
900 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
902 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
905 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
906 dev_err(jrdev, "unable to map shared descriptor\n");
910 print_hex_dump(KERN_ERR, "gcm dec shdesc@"__stringify(__LINE__)": ",
911 DUMP_PREFIX_ADDRESS, 16, 4, desc,
912 desc_bytes(desc), 1);
918 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
920 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
922 ctx->authsize = authsize;
923 gcm_set_sh_desc(authenc);
928 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
930 struct caam_ctx *ctx = crypto_aead_ctx(aead);
931 struct device *jrdev = ctx->jrdev;
932 bool keys_fit_inline = false;
936 if (!ctx->enckeylen || !ctx->authsize)
940 * RFC4106 encrypt shared descriptor
941 * Job Descriptor and Shared Descriptor
942 * must fit into the 64-word Descriptor h/w Buffer
944 if (DESC_RFC4106_ENC_LEN + GCM_DESC_JOB_IO_LEN +
945 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
946 keys_fit_inline = true;
948 desc = ctx->sh_desc_enc;
950 init_sh_desc(desc, HDR_SHARE_SERIAL);
952 /* Skip key loading if it is loaded due to sharing */
953 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
956 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
957 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
959 append_key(desc, ctx->key_dma, ctx->enckeylen,
960 CLASS_1 | KEY_DEST_CLASS_REG);
961 set_jump_tgt_here(desc, key_jump_cmd);
963 /* Class 1 operation */
964 append_operation(desc, ctx->class1_alg_type |
965 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
967 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
968 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
970 /* Read assoc data */
971 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
972 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
975 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
977 /* Will read cryptlen bytes */
978 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
980 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
981 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
983 /* Skip assoc data */
984 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
986 /* cryptlen = seqoutlen - assoclen */
987 append_math_sub(desc, VARSEQOUTLEN, VARSEQINLEN, REG0, CAAM_CMD_SZ);
989 /* Write encrypted data */
990 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
992 /* Read payload data */
993 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
994 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
997 append_seq_store(desc, ctx->authsize, LDST_CLASS_1_CCB |
998 LDST_SRCDST_BYTE_CONTEXT);
1000 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
1003 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1004 dev_err(jrdev, "unable to map shared descriptor\n");
1008 print_hex_dump(KERN_ERR, "rfc4106 enc shdesc@"__stringify(__LINE__)": ",
1009 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1010 desc_bytes(desc), 1);
1014 * Job Descriptor and Shared Descriptors
1015 * must all fit into the 64-word Descriptor h/w Buffer
1017 keys_fit_inline = false;
1018 if (DESC_RFC4106_DEC_LEN + DESC_JOB_IO_LEN +
1019 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
1020 keys_fit_inline = true;
1022 desc = ctx->sh_desc_dec;
1024 init_sh_desc(desc, HDR_SHARE_SERIAL);
1026 /* Skip key loading if it is loaded due to sharing */
1027 key_jump_cmd = append_jump(desc, JUMP_JSL |
1028 JUMP_TEST_ALL | JUMP_COND_SHRD);
1029 if (keys_fit_inline)
1030 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1031 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1033 append_key(desc, ctx->key_dma, ctx->enckeylen,
1034 CLASS_1 | KEY_DEST_CLASS_REG);
1035 set_jump_tgt_here(desc, key_jump_cmd);
1037 /* Class 1 operation */
1038 append_operation(desc, ctx->class1_alg_type |
1039 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1041 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
1042 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
1044 /* Read assoc data */
1045 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1046 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
1049 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1051 /* Will read cryptlen bytes */
1052 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
1054 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
1055 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
1057 /* Skip assoc data */
1058 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
1060 /* Will write cryptlen bytes */
1061 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1063 /* Store payload data */
1064 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1066 /* Read encrypted data */
1067 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1068 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
1071 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS1 |
1072 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1074 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
1077 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1078 dev_err(jrdev, "unable to map shared descriptor\n");
1082 print_hex_dump(KERN_ERR, "rfc4106 dec shdesc@"__stringify(__LINE__)": ",
1083 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1084 desc_bytes(desc), 1);
1090 static int rfc4106_setauthsize(struct crypto_aead *authenc,
1091 unsigned int authsize)
1093 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
1095 ctx->authsize = authsize;
1096 rfc4106_set_sh_desc(authenc);
1101 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
1103 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1104 struct device *jrdev = ctx->jrdev;
1105 bool keys_fit_inline = false;
1107 u32 *read_move_cmd, *write_move_cmd;
1110 if (!ctx->enckeylen || !ctx->authsize)
1114 * RFC4543 encrypt shared descriptor
1115 * Job Descriptor and Shared Descriptor
1116 * must fit into the 64-word Descriptor h/w Buffer
1118 if (DESC_RFC4543_ENC_LEN + GCM_DESC_JOB_IO_LEN +
1119 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
1120 keys_fit_inline = true;
1122 desc = ctx->sh_desc_enc;
1124 init_sh_desc(desc, HDR_SHARE_SERIAL);
1126 /* Skip key loading if it is loaded due to sharing */
1127 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1129 if (keys_fit_inline)
1130 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1131 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1133 append_key(desc, ctx->key_dma, ctx->enckeylen,
1134 CLASS_1 | KEY_DEST_CLASS_REG);
1135 set_jump_tgt_here(desc, key_jump_cmd);
1137 /* Class 1 operation */
1138 append_operation(desc, ctx->class1_alg_type |
1139 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
1141 /* assoclen + cryptlen = seqinlen */
1142 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
1145 * MOVE_LEN opcode is not available in all SEC HW revisions,
1146 * thus need to do some magic, i.e. self-patch the descriptor
1149 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1150 (0x6 << MOVE_LEN_SHIFT));
1151 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1152 (0x8 << MOVE_LEN_SHIFT));
1154 /* Will read assoclen + cryptlen bytes */
1155 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1157 /* Will write assoclen + cryptlen bytes */
1158 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1160 /* Read and write assoclen + cryptlen bytes */
1161 aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
1163 set_move_tgt_here(desc, read_move_cmd);
1164 set_move_tgt_here(desc, write_move_cmd);
1165 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1166 /* Move payload data to OFIFO */
1167 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1170 append_seq_store(desc, ctx->authsize, LDST_CLASS_1_CCB |
1171 LDST_SRCDST_BYTE_CONTEXT);
1173 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
1176 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1177 dev_err(jrdev, "unable to map shared descriptor\n");
1181 print_hex_dump(KERN_ERR, "rfc4543 enc shdesc@"__stringify(__LINE__)": ",
1182 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1183 desc_bytes(desc), 1);
1187 * Job Descriptor and Shared Descriptors
1188 * must all fit into the 64-word Descriptor h/w Buffer
1190 keys_fit_inline = false;
1191 if (DESC_RFC4543_DEC_LEN + GCM_DESC_JOB_IO_LEN +
1192 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
1193 keys_fit_inline = true;
1195 desc = ctx->sh_desc_dec;
1197 init_sh_desc(desc, HDR_SHARE_SERIAL);
1199 /* Skip key loading if it is loaded due to sharing */
1200 key_jump_cmd = append_jump(desc, JUMP_JSL |
1201 JUMP_TEST_ALL | JUMP_COND_SHRD);
1202 if (keys_fit_inline)
1203 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1204 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1206 append_key(desc, ctx->key_dma, ctx->enckeylen,
1207 CLASS_1 | KEY_DEST_CLASS_REG);
1208 set_jump_tgt_here(desc, key_jump_cmd);
1210 /* Class 1 operation */
1211 append_operation(desc, ctx->class1_alg_type |
1212 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1214 /* assoclen + cryptlen = seqoutlen */
1215 append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1218 * MOVE_LEN opcode is not available in all SEC HW revisions,
1219 * thus need to do some magic, i.e. self-patch the descriptor
1222 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1223 (0x6 << MOVE_LEN_SHIFT));
1224 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1225 (0x8 << MOVE_LEN_SHIFT));
1227 /* Will read assoclen + cryptlen bytes */
1228 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1230 /* Will write assoclen + cryptlen bytes */
1231 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1233 /* Store payload data */
1234 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1236 /* In-snoop assoclen + cryptlen data */
1237 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
1238 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
1240 set_move_tgt_here(desc, read_move_cmd);
1241 set_move_tgt_here(desc, write_move_cmd);
1242 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1243 /* Move payload data to OFIFO */
1244 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1245 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1248 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS1 |
1249 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1251 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
1254 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1255 dev_err(jrdev, "unable to map shared descriptor\n");
1259 print_hex_dump(KERN_ERR, "rfc4543 dec shdesc@"__stringify(__LINE__)": ",
1260 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1261 desc_bytes(desc), 1);
1267 static int rfc4543_setauthsize(struct crypto_aead *authenc,
1268 unsigned int authsize)
1270 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
1272 ctx->authsize = authsize;
1273 rfc4543_set_sh_desc(authenc);
1278 static u32 gen_split_aead_key(struct caam_ctx *ctx, const u8 *key_in,
1281 return gen_split_key(ctx->jrdev, ctx->key, ctx->split_key_len,
1282 ctx->split_key_pad_len, key_in, authkeylen,
1286 static int aead_setkey(struct crypto_aead *aead,
1287 const u8 *key, unsigned int keylen)
1289 /* Sizes for MDHA pads (*not* keys): MD5, SHA1, 224, 256, 384, 512 */
1290 static const u8 mdpadlen[] = { 16, 20, 32, 32, 64, 64 };
1291 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1292 struct device *jrdev = ctx->jrdev;
1293 struct crypto_authenc_keys keys;
1296 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
1299 /* Pick class 2 key length from algorithm submask */
1300 ctx->split_key_len = mdpadlen[(ctx->alg_op & OP_ALG_ALGSEL_SUBMASK) >>
1301 OP_ALG_ALGSEL_SHIFT] * 2;
1302 ctx->split_key_pad_len = ALIGN(ctx->split_key_len, 16);
1304 if (ctx->split_key_pad_len + keys.enckeylen > CAAM_MAX_KEY_SIZE)
1308 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
1309 keys.authkeylen + keys.enckeylen, keys.enckeylen,
1311 printk(KERN_ERR "split_key_len %d split_key_pad_len %d\n",
1312 ctx->split_key_len, ctx->split_key_pad_len);
1313 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1314 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1317 ret = gen_split_aead_key(ctx, keys.authkey, keys.authkeylen);
1322 /* postpend encryption key to auth split key */
1323 memcpy(ctx->key + ctx->split_key_pad_len, keys.enckey, keys.enckeylen);
1325 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->split_key_pad_len +
1326 keys.enckeylen, DMA_TO_DEVICE);
1327 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1328 dev_err(jrdev, "unable to map key i/o memory\n");
1332 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
1333 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
1334 ctx->split_key_pad_len + keys.enckeylen, 1);
1337 ctx->enckeylen = keys.enckeylen;
1339 ret = aead_set_sh_desc(aead);
1341 dma_unmap_single(jrdev, ctx->key_dma, ctx->split_key_pad_len +
1342 keys.enckeylen, DMA_TO_DEVICE);
1347 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
1351 static int gcm_setkey(struct crypto_aead *aead,
1352 const u8 *key, unsigned int keylen)
1354 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1355 struct device *jrdev = ctx->jrdev;
1359 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1360 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1363 memcpy(ctx->key, key, keylen);
1364 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen,
1366 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1367 dev_err(jrdev, "unable to map key i/o memory\n");
1370 ctx->enckeylen = keylen;
1372 ret = gcm_set_sh_desc(aead);
1374 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen,
1381 static int rfc4106_setkey(struct crypto_aead *aead,
1382 const u8 *key, unsigned int keylen)
1384 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1385 struct device *jrdev = ctx->jrdev;
1392 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1393 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1396 memcpy(ctx->key, key, keylen);
1399 * The last four bytes of the key material are used as the salt value
1400 * in the nonce. Update the AES key length.
1402 ctx->enckeylen = keylen - 4;
1404 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->enckeylen,
1406 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1407 dev_err(jrdev, "unable to map key i/o memory\n");
1411 ret = rfc4106_set_sh_desc(aead);
1413 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen,
1420 static int rfc4543_setkey(struct crypto_aead *aead,
1421 const u8 *key, unsigned int keylen)
1423 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1424 struct device *jrdev = ctx->jrdev;
1431 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1432 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1435 memcpy(ctx->key, key, keylen);
1438 * The last four bytes of the key material are used as the salt value
1439 * in the nonce. Update the AES key length.
1441 ctx->enckeylen = keylen - 4;
1443 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->enckeylen,
1445 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1446 dev_err(jrdev, "unable to map key i/o memory\n");
1450 ret = rfc4543_set_sh_desc(aead);
1452 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen,
1459 static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
1460 const u8 *key, unsigned int keylen)
1462 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1463 struct ablkcipher_tfm *crt = &ablkcipher->base.crt_ablkcipher;
1464 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher);
1465 const char *alg_name = crypto_tfm_alg_name(tfm);
1466 struct device *jrdev = ctx->jrdev;
1472 u32 ctx1_iv_off = 0;
1473 const bool ctr_mode = ((ctx->class1_alg_type & OP_ALG_AAI_MASK) ==
1474 OP_ALG_AAI_CTR_MOD128);
1475 const bool is_rfc3686 = (ctr_mode &&
1476 (strstr(alg_name, "rfc3686") != NULL));
1479 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1480 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1483 * AES-CTR needs to load IV in CONTEXT1 reg
1484 * at an offset of 128bits (16bytes)
1485 * CONTEXT1[255:128] = IV
1492 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1493 * | *key = {KEY, NONCE}
1496 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
1497 keylen -= CTR_RFC3686_NONCE_SIZE;
1500 memcpy(ctx->key, key, keylen);
1501 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen,
1503 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1504 dev_err(jrdev, "unable to map key i/o memory\n");
1507 ctx->enckeylen = keylen;
1509 /* ablkcipher_encrypt shared descriptor */
1510 desc = ctx->sh_desc_enc;
1511 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1512 /* Skip if already shared */
1513 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1516 /* Load class1 key only */
1517 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1518 ctx->enckeylen, CLASS_1 |
1519 KEY_DEST_CLASS_REG);
1521 /* Load nonce into CONTEXT1 reg */
1523 nonce = (u32 *)(key + keylen);
1524 append_load_imm_u32(desc, *nonce, LDST_CLASS_IND_CCB |
1525 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1526 append_move(desc, MOVE_WAITCOMP |
1528 MOVE_DEST_CLASS1CTX |
1529 (16 << MOVE_OFFSET_SHIFT) |
1530 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1533 set_jump_tgt_here(desc, key_jump_cmd);
1536 append_seq_load(desc, crt->ivsize, LDST_SRCDST_BYTE_CONTEXT |
1537 LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1539 /* Load counter into CONTEXT1 reg */
1541 append_load_imm_u32(desc, be32_to_cpu(1), LDST_IMM |
1543 LDST_SRCDST_BYTE_CONTEXT |
1544 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1545 LDST_OFFSET_SHIFT));
1547 /* Load operation */
1548 append_operation(desc, ctx->class1_alg_type |
1549 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
1551 /* Perform operation */
1552 ablkcipher_append_src_dst(desc);
1554 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
1557 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1558 dev_err(jrdev, "unable to map shared descriptor\n");
1562 print_hex_dump(KERN_ERR,
1563 "ablkcipher enc shdesc@"__stringify(__LINE__)": ",
1564 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1565 desc_bytes(desc), 1);
1567 /* ablkcipher_decrypt shared descriptor */
1568 desc = ctx->sh_desc_dec;
1570 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1571 /* Skip if already shared */
1572 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1575 /* Load class1 key only */
1576 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1577 ctx->enckeylen, CLASS_1 |
1578 KEY_DEST_CLASS_REG);
1580 /* Load nonce into CONTEXT1 reg */
1582 nonce = (u32 *)(key + keylen);
1583 append_load_imm_u32(desc, *nonce, LDST_CLASS_IND_CCB |
1584 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1585 append_move(desc, MOVE_WAITCOMP |
1587 MOVE_DEST_CLASS1CTX |
1588 (16 << MOVE_OFFSET_SHIFT) |
1589 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1592 set_jump_tgt_here(desc, key_jump_cmd);
1595 append_seq_load(desc, crt->ivsize, LDST_SRCDST_BYTE_CONTEXT |
1596 LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1598 /* Load counter into CONTEXT1 reg */
1600 append_load_imm_u32(desc, be32_to_cpu(1), LDST_IMM |
1602 LDST_SRCDST_BYTE_CONTEXT |
1603 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1604 LDST_OFFSET_SHIFT));
1606 /* Choose operation */
1608 append_operation(desc, ctx->class1_alg_type |
1609 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT);
1611 append_dec_op1(desc, ctx->class1_alg_type);
1613 /* Perform operation */
1614 ablkcipher_append_src_dst(desc);
1616 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
1619 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1620 dev_err(jrdev, "unable to map shared descriptor\n");
1625 print_hex_dump(KERN_ERR,
1626 "ablkcipher dec shdesc@"__stringify(__LINE__)": ",
1627 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1628 desc_bytes(desc), 1);
1630 /* ablkcipher_givencrypt shared descriptor */
1631 desc = ctx->sh_desc_givenc;
1633 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1634 /* Skip if already shared */
1635 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1638 /* Load class1 key only */
1639 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1640 ctx->enckeylen, CLASS_1 |
1641 KEY_DEST_CLASS_REG);
1643 /* Load Nonce into CONTEXT1 reg */
1645 nonce = (u32 *)(key + keylen);
1646 append_load_imm_u32(desc, *nonce, LDST_CLASS_IND_CCB |
1647 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1648 append_move(desc, MOVE_WAITCOMP |
1650 MOVE_DEST_CLASS1CTX |
1651 (16 << MOVE_OFFSET_SHIFT) |
1652 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1654 set_jump_tgt_here(desc, key_jump_cmd);
1657 geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
1658 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
1659 NFIFOENTRY_PTYPE_RND | (crt->ivsize << NFIFOENTRY_DLEN_SHIFT);
1660 append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
1661 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
1662 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1663 append_move(desc, MOVE_WAITCOMP |
1665 MOVE_DEST_CLASS1CTX |
1666 (crt->ivsize << MOVE_LEN_SHIFT) |
1667 (ctx1_iv_off << MOVE_OFFSET_SHIFT));
1668 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1670 /* Copy generated IV to memory */
1671 append_seq_store(desc, crt->ivsize,
1672 LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1673 (ctx1_iv_off << LDST_OFFSET_SHIFT));
1675 /* Load Counter into CONTEXT1 reg */
1677 append_load_imm_u32(desc, (u32)1, LDST_IMM |
1679 LDST_SRCDST_BYTE_CONTEXT |
1680 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1681 LDST_OFFSET_SHIFT));
1684 append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NCP |
1685 (1 << JUMP_OFFSET_SHIFT));
1687 /* Load operation */
1688 append_operation(desc, ctx->class1_alg_type |
1689 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
1691 /* Perform operation */
1692 ablkcipher_append_src_dst(desc);
1694 ctx->sh_desc_givenc_dma = dma_map_single(jrdev, desc,
1697 if (dma_mapping_error(jrdev, ctx->sh_desc_givenc_dma)) {
1698 dev_err(jrdev, "unable to map shared descriptor\n");
1702 print_hex_dump(KERN_ERR,
1703 "ablkcipher givenc shdesc@" __stringify(__LINE__) ": ",
1704 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1705 desc_bytes(desc), 1);
1711 static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
1712 const u8 *key, unsigned int keylen)
1714 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1715 struct device *jrdev = ctx->jrdev;
1716 u32 *key_jump_cmd, *desc;
1717 __be64 sector_size = cpu_to_be64(512);
1719 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
1720 crypto_ablkcipher_set_flags(ablkcipher,
1721 CRYPTO_TFM_RES_BAD_KEY_LEN);
1722 dev_err(jrdev, "key size mismatch\n");
1726 memcpy(ctx->key, key, keylen);
1727 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen, DMA_TO_DEVICE);
1728 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1729 dev_err(jrdev, "unable to map key i/o memory\n");
1732 ctx->enckeylen = keylen;
1734 /* xts_ablkcipher_encrypt shared descriptor */
1735 desc = ctx->sh_desc_enc;
1736 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1737 /* Skip if already shared */
1738 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1741 /* Load class1 keys only */
1742 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1743 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1745 /* Load sector size with index 40 bytes (0x28) */
1746 append_cmd(desc, CMD_LOAD | IMMEDIATE | LDST_SRCDST_BYTE_CONTEXT |
1747 LDST_CLASS_1_CCB | (0x28 << LDST_OFFSET_SHIFT) | 8);
1748 append_data(desc, (void *)§or_size, 8);
1750 set_jump_tgt_here(desc, key_jump_cmd);
1753 * create sequence for loading the sector index
1754 * Upper 8B of IV - will be used as sector index
1755 * Lower 8B of IV - will be discarded
1757 append_cmd(desc, CMD_SEQ_LOAD | LDST_SRCDST_BYTE_CONTEXT |
1758 LDST_CLASS_1_CCB | (0x20 << LDST_OFFSET_SHIFT) | 8);
1759 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1761 /* Load operation */
1762 append_operation(desc, ctx->class1_alg_type | OP_ALG_AS_INITFINAL |
1765 /* Perform operation */
1766 ablkcipher_append_src_dst(desc);
1768 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, desc_bytes(desc),
1770 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1771 dev_err(jrdev, "unable to map shared descriptor\n");
1775 print_hex_dump(KERN_ERR,
1776 "xts ablkcipher enc shdesc@" __stringify(__LINE__) ": ",
1777 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1780 /* xts_ablkcipher_decrypt shared descriptor */
1781 desc = ctx->sh_desc_dec;
1783 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1784 /* Skip if already shared */
1785 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1788 /* Load class1 key only */
1789 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1790 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1792 /* Load sector size with index 40 bytes (0x28) */
1793 append_cmd(desc, CMD_LOAD | IMMEDIATE | LDST_SRCDST_BYTE_CONTEXT |
1794 LDST_CLASS_1_CCB | (0x28 << LDST_OFFSET_SHIFT) | 8);
1795 append_data(desc, (void *)§or_size, 8);
1797 set_jump_tgt_here(desc, key_jump_cmd);
1800 * create sequence for loading the sector index
1801 * Upper 8B of IV - will be used as sector index
1802 * Lower 8B of IV - will be discarded
1804 append_cmd(desc, CMD_SEQ_LOAD | LDST_SRCDST_BYTE_CONTEXT |
1805 LDST_CLASS_1_CCB | (0x20 << LDST_OFFSET_SHIFT) | 8);
1806 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1808 /* Load operation */
1809 append_dec_op1(desc, ctx->class1_alg_type);
1811 /* Perform operation */
1812 ablkcipher_append_src_dst(desc);
1814 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, desc_bytes(desc),
1816 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1817 dma_unmap_single(jrdev, ctx->sh_desc_enc_dma,
1818 desc_bytes(ctx->sh_desc_enc), DMA_TO_DEVICE);
1819 dev_err(jrdev, "unable to map shared descriptor\n");
1823 print_hex_dump(KERN_ERR,
1824 "xts ablkcipher dec shdesc@" __stringify(__LINE__) ": ",
1825 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1832 * aead_edesc - s/w-extended aead descriptor
1833 * @assoc_nents: number of segments in associated data (SPI+Seq) scatterlist
1834 * @src_nents: number of segments in input scatterlist
1835 * @dst_nents: number of segments in output scatterlist
1836 * @iv_dma: dma address of iv for checking continuity and link table
1837 * @desc: h/w descriptor (variable length; must not exceed MAX_CAAM_DESCSIZE)
1838 * @sec4_sg_bytes: length of dma mapped sec4_sg space
1839 * @sec4_sg_dma: bus physical mapped address of h/w link table
1840 * @hw_desc: the h/w job descriptor followed by any referenced link tables
1848 dma_addr_t sec4_sg_dma;
1849 struct sec4_sg_entry *sec4_sg;
1854 * ablkcipher_edesc - s/w-extended ablkcipher descriptor
1855 * @src_nents: number of segments in input scatterlist
1856 * @dst_nents: number of segments in output scatterlist
1857 * @iv_dma: dma address of iv for checking continuity and link table
1858 * @desc: h/w descriptor (variable length; must not exceed MAX_CAAM_DESCSIZE)
1859 * @sec4_sg_bytes: length of dma mapped sec4_sg space
1860 * @sec4_sg_dma: bus physical mapped address of h/w link table
1861 * @hw_desc: the h/w job descriptor followed by any referenced link tables
1863 struct ablkcipher_edesc {
1868 dma_addr_t sec4_sg_dma;
1869 struct sec4_sg_entry *sec4_sg;
1873 static void caam_unmap(struct device *dev, struct scatterlist *src,
1874 struct scatterlist *dst, int src_nents,
1876 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
1880 dma_unmap_sg(dev, src, src_nents ? : 1, DMA_TO_DEVICE);
1881 dma_unmap_sg(dev, dst, dst_nents ? : 1, DMA_FROM_DEVICE);
1883 dma_unmap_sg(dev, src, src_nents ? : 1, DMA_BIDIRECTIONAL);
1887 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
1889 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
1893 static void aead_unmap(struct device *dev,
1894 struct aead_edesc *edesc,
1895 struct aead_request *req)
1897 caam_unmap(dev, req->src, req->dst,
1898 edesc->src_nents, edesc->dst_nents, 0, 0,
1899 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
1902 static void ablkcipher_unmap(struct device *dev,
1903 struct ablkcipher_edesc *edesc,
1904 struct ablkcipher_request *req)
1906 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1907 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1909 caam_unmap(dev, req->src, req->dst,
1910 edesc->src_nents, edesc->dst_nents,
1911 edesc->iv_dma, ivsize,
1912 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
1915 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
1918 struct aead_request *req = context;
1919 struct aead_edesc *edesc;
1922 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1925 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
1928 caam_jr_strstatus(jrdev, err);
1930 aead_unmap(jrdev, edesc, req);
1934 aead_request_complete(req, err);
1937 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
1940 struct aead_request *req = context;
1941 struct aead_edesc *edesc;
1944 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1947 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
1950 caam_jr_strstatus(jrdev, err);
1952 aead_unmap(jrdev, edesc, req);
1955 * verify hw auth check passed else return -EBADMSG
1957 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
1962 aead_request_complete(req, err);
1965 static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
1968 struct ablkcipher_request *req = context;
1969 struct ablkcipher_edesc *edesc;
1971 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1972 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1974 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1977 edesc = (struct ablkcipher_edesc *)((char *)desc -
1978 offsetof(struct ablkcipher_edesc, hw_desc));
1981 caam_jr_strstatus(jrdev, err);
1984 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
1985 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1986 edesc->src_nents > 1 ? 100 : ivsize, 1);
1987 print_hex_dump(KERN_ERR, "dst @"__stringify(__LINE__)": ",
1988 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src),
1989 edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
1992 ablkcipher_unmap(jrdev, edesc, req);
1995 ablkcipher_request_complete(req, err);
1998 static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
2001 struct ablkcipher_request *req = context;
2002 struct ablkcipher_edesc *edesc;
2004 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2005 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2007 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
2010 edesc = (struct ablkcipher_edesc *)((char *)desc -
2011 offsetof(struct ablkcipher_edesc, hw_desc));
2013 caam_jr_strstatus(jrdev, err);
2016 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
2017 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
2019 print_hex_dump(KERN_ERR, "dst @"__stringify(__LINE__)": ",
2020 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src),
2021 edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
2024 ablkcipher_unmap(jrdev, edesc, req);
2027 ablkcipher_request_complete(req, err);
2031 * Fill in aead job descriptor
2033 static void init_aead_job(struct aead_request *req,
2034 struct aead_edesc *edesc,
2035 bool all_contig, bool encrypt)
2037 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2038 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2039 int authsize = ctx->authsize;
2040 u32 *desc = edesc->hw_desc;
2041 u32 out_options, in_options;
2042 dma_addr_t dst_dma, src_dma;
2043 int len, sec4_sg_index = 0;
2047 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
2048 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
2050 len = desc_len(sh_desc);
2051 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
2054 src_dma = sg_dma_address(req->src);
2057 src_dma = edesc->sec4_sg_dma;
2058 sec4_sg_index += edesc->src_nents;
2059 in_options = LDST_SGF;
2062 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
2066 out_options = in_options;
2068 if (unlikely(req->src != req->dst)) {
2069 if (!edesc->dst_nents) {
2070 dst_dma = sg_dma_address(req->dst);
2072 dst_dma = edesc->sec4_sg_dma +
2074 sizeof(struct sec4_sg_entry);
2075 out_options = LDST_SGF;
2080 append_seq_out_ptr(desc, dst_dma,
2081 req->assoclen + req->cryptlen + authsize,
2084 append_seq_out_ptr(desc, dst_dma,
2085 req->assoclen + req->cryptlen - authsize,
2088 /* REG3 = assoclen */
2089 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
2092 static void init_gcm_job(struct aead_request *req,
2093 struct aead_edesc *edesc,
2094 bool all_contig, bool encrypt)
2096 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2097 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2098 unsigned int ivsize = crypto_aead_ivsize(aead);
2099 u32 *desc = edesc->hw_desc;
2100 bool generic_gcm = (ivsize == 12);
2103 init_aead_job(req, edesc, all_contig, encrypt);
2105 /* BUG This should not be specific to generic GCM. */
2107 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
2108 last = FIFOLD_TYPE_LAST1;
2111 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
2112 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | 12 | last);
2115 append_data(desc, ctx->key + ctx->enckeylen, 4);
2117 append_data(desc, req->iv, ivsize);
2118 /* End of blank commands */
2121 static void init_authenc_job(struct aead_request *req,
2122 struct aead_edesc *edesc,
2123 bool all_contig, bool encrypt)
2125 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2126 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
2127 struct caam_aead_alg, aead);
2128 unsigned int ivsize = crypto_aead_ivsize(aead);
2129 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2130 const bool ctr_mode = ((ctx->class1_alg_type & OP_ALG_AAI_MASK) ==
2131 OP_ALG_AAI_CTR_MOD128);
2132 const bool is_rfc3686 = alg->caam.rfc3686;
2133 u32 *desc = edesc->hw_desc;
2137 * AES-CTR needs to load IV in CONTEXT1 reg
2138 * at an offset of 128bits (16bytes)
2139 * CONTEXT1[255:128] = IV
2146 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
2149 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
2151 init_aead_job(req, edesc, all_contig, encrypt);
2153 if (ivsize && (is_rfc3686 || !(alg->caam.geniv && encrypt)))
2154 append_load_as_imm(desc, req->iv, ivsize,
2156 LDST_SRCDST_BYTE_CONTEXT |
2157 (ivoffset << LDST_OFFSET_SHIFT));
2161 * Fill in ablkcipher job descriptor
2163 static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr,
2164 struct ablkcipher_edesc *edesc,
2165 struct ablkcipher_request *req,
2168 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2169 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2170 u32 *desc = edesc->hw_desc;
2171 u32 out_options = 0, in_options;
2172 dma_addr_t dst_dma, src_dma;
2173 int len, sec4_sg_index = 0;
2176 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
2177 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
2179 print_hex_dump(KERN_ERR, "src @"__stringify(__LINE__)": ",
2180 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src),
2181 edesc->src_nents ? 100 : req->nbytes, 1);
2184 len = desc_len(sh_desc);
2185 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
2188 src_dma = edesc->iv_dma;
2191 src_dma = edesc->sec4_sg_dma;
2192 sec4_sg_index += edesc->src_nents + 1;
2193 in_options = LDST_SGF;
2195 append_seq_in_ptr(desc, src_dma, req->nbytes + ivsize, in_options);
2197 if (likely(req->src == req->dst)) {
2198 if (!edesc->src_nents && iv_contig) {
2199 dst_dma = sg_dma_address(req->src);
2201 dst_dma = edesc->sec4_sg_dma +
2202 sizeof(struct sec4_sg_entry);
2203 out_options = LDST_SGF;
2206 if (!edesc->dst_nents) {
2207 dst_dma = sg_dma_address(req->dst);
2209 dst_dma = edesc->sec4_sg_dma +
2210 sec4_sg_index * sizeof(struct sec4_sg_entry);
2211 out_options = LDST_SGF;
2214 append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options);
2218 * Fill in ablkcipher givencrypt job descriptor
2220 static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr,
2221 struct ablkcipher_edesc *edesc,
2222 struct ablkcipher_request *req,
2225 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2226 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2227 u32 *desc = edesc->hw_desc;
2228 u32 out_options, in_options;
2229 dma_addr_t dst_dma, src_dma;
2230 int len, sec4_sg_index = 0;
2233 print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ",
2234 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
2236 print_hex_dump(KERN_ERR, "src @" __stringify(__LINE__) ": ",
2237 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src),
2238 edesc->src_nents ? 100 : req->nbytes, 1);
2241 len = desc_len(sh_desc);
2242 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
2244 if (!edesc->src_nents) {
2245 src_dma = sg_dma_address(req->src);
2248 src_dma = edesc->sec4_sg_dma;
2249 sec4_sg_index += edesc->src_nents;
2250 in_options = LDST_SGF;
2252 append_seq_in_ptr(desc, src_dma, req->nbytes, in_options);
2255 dst_dma = edesc->iv_dma;
2258 dst_dma = edesc->sec4_sg_dma +
2259 sec4_sg_index * sizeof(struct sec4_sg_entry);
2260 out_options = LDST_SGF;
2262 append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, out_options);
2266 * allocate and map the aead extended descriptor
2268 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
2269 int desc_bytes, bool *all_contig_ptr,
2272 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2273 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2274 struct device *jrdev = ctx->jrdev;
2275 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
2276 CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC;
2277 int src_nents, dst_nents = 0;
2278 struct aead_edesc *edesc;
2280 bool all_contig = true;
2281 int sec4_sg_index, sec4_sg_len = 0, sec4_sg_bytes;
2282 unsigned int authsize = ctx->authsize;
2284 if (unlikely(req->dst != req->src)) {
2285 src_nents = sg_count(req->src, req->assoclen + req->cryptlen);
2286 dst_nents = sg_count(req->dst,
2287 req->assoclen + req->cryptlen +
2288 (encrypt ? authsize : (-authsize)));
2290 src_nents = sg_count(req->src,
2291 req->assoclen + req->cryptlen +
2292 (encrypt ? authsize : 0));
2295 /* Check if data are contiguous. */
2296 all_contig = !src_nents;
2298 src_nents = src_nents ? : 1;
2299 sec4_sg_len = src_nents;
2302 sec4_sg_len += dst_nents;
2304 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
2306 /* allocate space for base edesc and hw desc commands, link tables */
2307 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
2310 dev_err(jrdev, "could not allocate extended descriptor\n");
2311 return ERR_PTR(-ENOMEM);
2314 if (likely(req->src == req->dst)) {
2315 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2317 if (unlikely(!sgc)) {
2318 dev_err(jrdev, "unable to map source\n");
2320 return ERR_PTR(-ENOMEM);
2323 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2325 if (unlikely(!sgc)) {
2326 dev_err(jrdev, "unable to map source\n");
2328 return ERR_PTR(-ENOMEM);
2331 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
2333 if (unlikely(!sgc)) {
2334 dev_err(jrdev, "unable to map destination\n");
2335 dma_unmap_sg(jrdev, req->src, src_nents ? : 1,
2338 return ERR_PTR(-ENOMEM);
2342 edesc->src_nents = src_nents;
2343 edesc->dst_nents = dst_nents;
2344 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
2346 *all_contig_ptr = all_contig;
2350 sg_to_sec4_sg_last(req->src, src_nents,
2351 edesc->sec4_sg + sec4_sg_index, 0);
2352 sec4_sg_index += src_nents;
2355 sg_to_sec4_sg_last(req->dst, dst_nents,
2356 edesc->sec4_sg + sec4_sg_index, 0);
2362 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
2363 sec4_sg_bytes, DMA_TO_DEVICE);
2364 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
2365 dev_err(jrdev, "unable to map S/G table\n");
2366 aead_unmap(jrdev, edesc, req);
2368 return ERR_PTR(-ENOMEM);
2371 edesc->sec4_sg_bytes = sec4_sg_bytes;
2376 static int gcm_encrypt(struct aead_request *req)
2378 struct aead_edesc *edesc;
2379 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2380 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2381 struct device *jrdev = ctx->jrdev;
2386 /* allocate extended descriptor */
2387 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
2389 return PTR_ERR(edesc);
2391 /* Create and submit job descriptor */
2392 init_gcm_job(req, edesc, all_contig, true);
2394 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2395 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2396 desc_bytes(edesc->hw_desc), 1);
2399 desc = edesc->hw_desc;
2400 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
2404 aead_unmap(jrdev, edesc, req);
2411 static int ipsec_gcm_encrypt(struct aead_request *req)
2413 if (req->assoclen < 8)
2416 return gcm_encrypt(req);
2419 static int aead_encrypt(struct aead_request *req)
2421 struct aead_edesc *edesc;
2422 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2423 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2424 struct device *jrdev = ctx->jrdev;
2429 /* allocate extended descriptor */
2430 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
2433 return PTR_ERR(edesc);
2435 /* Create and submit job descriptor */
2436 init_authenc_job(req, edesc, all_contig, true);
2438 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2439 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2440 desc_bytes(edesc->hw_desc), 1);
2443 desc = edesc->hw_desc;
2444 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
2448 aead_unmap(jrdev, edesc, req);
2455 static int gcm_decrypt(struct aead_request *req)
2457 struct aead_edesc *edesc;
2458 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2459 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2460 struct device *jrdev = ctx->jrdev;
2465 /* allocate extended descriptor */
2466 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
2468 return PTR_ERR(edesc);
2470 /* Create and submit job descriptor*/
2471 init_gcm_job(req, edesc, all_contig, false);
2473 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2474 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2475 desc_bytes(edesc->hw_desc), 1);
2478 desc = edesc->hw_desc;
2479 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
2483 aead_unmap(jrdev, edesc, req);
2490 static int ipsec_gcm_decrypt(struct aead_request *req)
2492 if (req->assoclen < 8)
2495 return gcm_decrypt(req);
2498 static int aead_decrypt(struct aead_request *req)
2500 struct aead_edesc *edesc;
2501 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2502 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2503 struct device *jrdev = ctx->jrdev;
2508 /* allocate extended descriptor */
2509 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
2510 &all_contig, false);
2512 return PTR_ERR(edesc);
2515 print_hex_dump(KERN_ERR, "dec src@"__stringify(__LINE__)": ",
2516 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src),
2517 req->assoclen + req->cryptlen, 1);
2520 /* Create and submit job descriptor*/
2521 init_authenc_job(req, edesc, all_contig, false);
2523 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2524 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2525 desc_bytes(edesc->hw_desc), 1);
2528 desc = edesc->hw_desc;
2529 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
2533 aead_unmap(jrdev, edesc, req);
2540 static int aead_givdecrypt(struct aead_request *req)
2542 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2543 unsigned int ivsize = crypto_aead_ivsize(aead);
2545 if (req->cryptlen < ivsize)
2548 req->cryptlen -= ivsize;
2549 req->assoclen += ivsize;
2551 return aead_decrypt(req);
2555 * allocate and map the ablkcipher extended descriptor for ablkcipher
2557 static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request
2558 *req, int desc_bytes,
2559 bool *iv_contig_out)
2561 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2562 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2563 struct device *jrdev = ctx->jrdev;
2564 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
2565 CRYPTO_TFM_REQ_MAY_SLEEP)) ?
2566 GFP_KERNEL : GFP_ATOMIC;
2567 int src_nents, dst_nents = 0, sec4_sg_bytes;
2568 struct ablkcipher_edesc *edesc;
2569 dma_addr_t iv_dma = 0;
2570 bool iv_contig = false;
2572 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2575 src_nents = sg_count(req->src, req->nbytes);
2577 if (req->dst != req->src)
2578 dst_nents = sg_count(req->dst, req->nbytes);
2580 if (likely(req->src == req->dst)) {
2581 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2584 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2586 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
2590 iv_dma = dma_map_single(jrdev, req->info, ivsize, DMA_TO_DEVICE);
2591 if (dma_mapping_error(jrdev, iv_dma)) {
2592 dev_err(jrdev, "unable to map IV\n");
2593 return ERR_PTR(-ENOMEM);
2597 * Check if iv can be contiguous with source and destination.
2598 * If so, include it. If not, create scatterlist.
2600 if (!src_nents && iv_dma + ivsize == sg_dma_address(req->src))
2603 src_nents = src_nents ? : 1;
2604 sec4_sg_bytes = ((iv_contig ? 0 : 1) + src_nents + dst_nents) *
2605 sizeof(struct sec4_sg_entry);
2607 /* allocate space for base edesc and hw desc commands, link tables */
2608 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
2611 dev_err(jrdev, "could not allocate extended descriptor\n");
2612 return ERR_PTR(-ENOMEM);
2615 edesc->src_nents = src_nents;
2616 edesc->dst_nents = dst_nents;
2617 edesc->sec4_sg_bytes = sec4_sg_bytes;
2618 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
2623 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
2624 sg_to_sec4_sg_last(req->src, src_nents,
2625 edesc->sec4_sg + 1, 0);
2626 sec4_sg_index += 1 + src_nents;
2630 sg_to_sec4_sg_last(req->dst, dst_nents,
2631 edesc->sec4_sg + sec4_sg_index, 0);
2634 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
2635 sec4_sg_bytes, DMA_TO_DEVICE);
2636 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
2637 dev_err(jrdev, "unable to map S/G table\n");
2638 return ERR_PTR(-ENOMEM);
2641 edesc->iv_dma = iv_dma;
2644 print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ",
2645 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
2649 *iv_contig_out = iv_contig;
2653 static int ablkcipher_encrypt(struct ablkcipher_request *req)
2655 struct ablkcipher_edesc *edesc;
2656 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2657 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2658 struct device *jrdev = ctx->jrdev;
2663 /* allocate extended descriptor */
2664 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
2665 CAAM_CMD_SZ, &iv_contig);
2667 return PTR_ERR(edesc);
2669 /* Create and submit job descriptor*/
2670 init_ablkcipher_job(ctx->sh_desc_enc,
2671 ctx->sh_desc_enc_dma, edesc, req, iv_contig);
2673 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
2674 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2675 desc_bytes(edesc->hw_desc), 1);
2677 desc = edesc->hw_desc;
2678 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
2683 ablkcipher_unmap(jrdev, edesc, req);
2690 static int ablkcipher_decrypt(struct ablkcipher_request *req)
2692 struct ablkcipher_edesc *edesc;
2693 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2694 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2695 struct device *jrdev = ctx->jrdev;
2700 /* allocate extended descriptor */
2701 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
2702 CAAM_CMD_SZ, &iv_contig);
2704 return PTR_ERR(edesc);
2706 /* Create and submit job descriptor*/
2707 init_ablkcipher_job(ctx->sh_desc_dec,
2708 ctx->sh_desc_dec_dma, edesc, req, iv_contig);
2709 desc = edesc->hw_desc;
2711 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
2712 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2713 desc_bytes(edesc->hw_desc), 1);
2716 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req);
2720 ablkcipher_unmap(jrdev, edesc, req);
2728 * allocate and map the ablkcipher extended descriptor
2729 * for ablkcipher givencrypt
2731 static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc(
2732 struct skcipher_givcrypt_request *greq,
2734 bool *iv_contig_out)
2736 struct ablkcipher_request *req = &greq->creq;
2737 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2738 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2739 struct device *jrdev = ctx->jrdev;
2740 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
2741 CRYPTO_TFM_REQ_MAY_SLEEP)) ?
2742 GFP_KERNEL : GFP_ATOMIC;
2743 int src_nents, dst_nents = 0, sec4_sg_bytes;
2744 struct ablkcipher_edesc *edesc;
2745 dma_addr_t iv_dma = 0;
2746 bool iv_contig = false;
2748 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2751 src_nents = sg_count(req->src, req->nbytes);
2753 if (unlikely(req->dst != req->src))
2754 dst_nents = sg_count(req->dst, req->nbytes);
2756 if (likely(req->src == req->dst)) {
2757 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2760 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2762 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
2767 * Check if iv can be contiguous with source and destination.
2768 * If so, include it. If not, create scatterlist.
2770 iv_dma = dma_map_single(jrdev, greq->giv, ivsize, DMA_TO_DEVICE);
2771 if (dma_mapping_error(jrdev, iv_dma)) {
2772 dev_err(jrdev, "unable to map IV\n");
2773 return ERR_PTR(-ENOMEM);
2776 if (!dst_nents && iv_dma + ivsize == sg_dma_address(req->dst))
2779 dst_nents = dst_nents ? : 1;
2780 sec4_sg_bytes = ((iv_contig ? 0 : 1) + src_nents + dst_nents) *
2781 sizeof(struct sec4_sg_entry);
2783 /* allocate space for base edesc and hw desc commands, link tables */
2784 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
2787 dev_err(jrdev, "could not allocate extended descriptor\n");
2788 return ERR_PTR(-ENOMEM);
2791 edesc->src_nents = src_nents;
2792 edesc->dst_nents = dst_nents;
2793 edesc->sec4_sg_bytes = sec4_sg_bytes;
2794 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
2799 sg_to_sec4_sg_last(req->src, src_nents, edesc->sec4_sg, 0);
2800 sec4_sg_index += src_nents;
2804 dma_to_sec4_sg_one(edesc->sec4_sg + sec4_sg_index,
2807 sg_to_sec4_sg_last(req->dst, dst_nents,
2808 edesc->sec4_sg + sec4_sg_index, 0);
2811 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
2812 sec4_sg_bytes, DMA_TO_DEVICE);
2813 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
2814 dev_err(jrdev, "unable to map S/G table\n");
2815 return ERR_PTR(-ENOMEM);
2817 edesc->iv_dma = iv_dma;
2820 print_hex_dump(KERN_ERR,
2821 "ablkcipher sec4_sg@" __stringify(__LINE__) ": ",
2822 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
2826 *iv_contig_out = iv_contig;
2830 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq)
2832 struct ablkcipher_request *req = &creq->creq;
2833 struct ablkcipher_edesc *edesc;
2834 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2835 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2836 struct device *jrdev = ctx->jrdev;
2841 /* allocate extended descriptor */
2842 edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN *
2843 CAAM_CMD_SZ, &iv_contig);
2845 return PTR_ERR(edesc);
2847 /* Create and submit job descriptor*/
2848 init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma,
2849 edesc, req, iv_contig);
2851 print_hex_dump(KERN_ERR,
2852 "ablkcipher jobdesc@" __stringify(__LINE__) ": ",
2853 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2854 desc_bytes(edesc->hw_desc), 1);
2856 desc = edesc->hw_desc;
2857 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
2862 ablkcipher_unmap(jrdev, edesc, req);
2869 #define template_aead template_u.aead
2870 #define template_ablkcipher template_u.ablkcipher
2871 struct caam_alg_template {
2872 char name[CRYPTO_MAX_ALG_NAME];
2873 char driver_name[CRYPTO_MAX_ALG_NAME];
2874 unsigned int blocksize;
2877 struct ablkcipher_alg ablkcipher;
2879 u32 class1_alg_type;
2880 u32 class2_alg_type;
2884 static struct caam_alg_template driver_algs[] = {
2885 /* ablkcipher descriptor */
2888 .driver_name = "cbc-aes-caam",
2889 .blocksize = AES_BLOCK_SIZE,
2890 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2891 .template_ablkcipher = {
2892 .setkey = ablkcipher_setkey,
2893 .encrypt = ablkcipher_encrypt,
2894 .decrypt = ablkcipher_decrypt,
2895 .givencrypt = ablkcipher_givencrypt,
2896 .geniv = "<built-in>",
2897 .min_keysize = AES_MIN_KEY_SIZE,
2898 .max_keysize = AES_MAX_KEY_SIZE,
2899 .ivsize = AES_BLOCK_SIZE,
2901 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2904 .name = "cbc(des3_ede)",
2905 .driver_name = "cbc-3des-caam",
2906 .blocksize = DES3_EDE_BLOCK_SIZE,
2907 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2908 .template_ablkcipher = {
2909 .setkey = ablkcipher_setkey,
2910 .encrypt = ablkcipher_encrypt,
2911 .decrypt = ablkcipher_decrypt,
2912 .givencrypt = ablkcipher_givencrypt,
2913 .geniv = "<built-in>",
2914 .min_keysize = DES3_EDE_KEY_SIZE,
2915 .max_keysize = DES3_EDE_KEY_SIZE,
2916 .ivsize = DES3_EDE_BLOCK_SIZE,
2918 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2922 .driver_name = "cbc-des-caam",
2923 .blocksize = DES_BLOCK_SIZE,
2924 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2925 .template_ablkcipher = {
2926 .setkey = ablkcipher_setkey,
2927 .encrypt = ablkcipher_encrypt,
2928 .decrypt = ablkcipher_decrypt,
2929 .givencrypt = ablkcipher_givencrypt,
2930 .geniv = "<built-in>",
2931 .min_keysize = DES_KEY_SIZE,
2932 .max_keysize = DES_KEY_SIZE,
2933 .ivsize = DES_BLOCK_SIZE,
2935 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2939 .driver_name = "ctr-aes-caam",
2941 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2942 .template_ablkcipher = {
2943 .setkey = ablkcipher_setkey,
2944 .encrypt = ablkcipher_encrypt,
2945 .decrypt = ablkcipher_decrypt,
2947 .min_keysize = AES_MIN_KEY_SIZE,
2948 .max_keysize = AES_MAX_KEY_SIZE,
2949 .ivsize = AES_BLOCK_SIZE,
2951 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
2954 .name = "rfc3686(ctr(aes))",
2955 .driver_name = "rfc3686-ctr-aes-caam",
2957 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2958 .template_ablkcipher = {
2959 .setkey = ablkcipher_setkey,
2960 .encrypt = ablkcipher_encrypt,
2961 .decrypt = ablkcipher_decrypt,
2962 .givencrypt = ablkcipher_givencrypt,
2963 .geniv = "<built-in>",
2964 .min_keysize = AES_MIN_KEY_SIZE +
2965 CTR_RFC3686_NONCE_SIZE,
2966 .max_keysize = AES_MAX_KEY_SIZE +
2967 CTR_RFC3686_NONCE_SIZE,
2968 .ivsize = CTR_RFC3686_IV_SIZE,
2970 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
2974 .driver_name = "xts-aes-caam",
2975 .blocksize = AES_BLOCK_SIZE,
2976 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2977 .template_ablkcipher = {
2978 .setkey = xts_ablkcipher_setkey,
2979 .encrypt = ablkcipher_encrypt,
2980 .decrypt = ablkcipher_decrypt,
2982 .min_keysize = 2 * AES_MIN_KEY_SIZE,
2983 .max_keysize = 2 * AES_MAX_KEY_SIZE,
2984 .ivsize = AES_BLOCK_SIZE,
2986 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
2990 static struct caam_aead_alg driver_aeads[] = {
2994 .cra_name = "rfc4106(gcm(aes))",
2995 .cra_driver_name = "rfc4106-gcm-aes-caam",
2998 .setkey = rfc4106_setkey,
2999 .setauthsize = rfc4106_setauthsize,
3000 .encrypt = ipsec_gcm_encrypt,
3001 .decrypt = ipsec_gcm_decrypt,
3003 .maxauthsize = AES_BLOCK_SIZE,
3006 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
3012 .cra_name = "rfc4543(gcm(aes))",
3013 .cra_driver_name = "rfc4543-gcm-aes-caam",
3016 .setkey = rfc4543_setkey,
3017 .setauthsize = rfc4543_setauthsize,
3018 .encrypt = ipsec_gcm_encrypt,
3019 .decrypt = ipsec_gcm_decrypt,
3021 .maxauthsize = AES_BLOCK_SIZE,
3024 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
3027 /* Galois Counter Mode */
3031 .cra_name = "gcm(aes)",
3032 .cra_driver_name = "gcm-aes-caam",
3035 .setkey = gcm_setkey,
3036 .setauthsize = gcm_setauthsize,
3037 .encrypt = gcm_encrypt,
3038 .decrypt = gcm_decrypt,
3040 .maxauthsize = AES_BLOCK_SIZE,
3043 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
3046 /* single-pass ipsec_esp descriptor */
3050 .cra_name = "authenc(hmac(md5),"
3051 "ecb(cipher_null))",
3052 .cra_driver_name = "authenc-hmac-md5-"
3053 "ecb-cipher_null-caam",
3054 .cra_blocksize = NULL_BLOCK_SIZE,
3056 .setkey = aead_setkey,
3057 .setauthsize = aead_setauthsize,
3058 .encrypt = aead_encrypt,
3059 .decrypt = aead_decrypt,
3060 .ivsize = NULL_IV_SIZE,
3061 .maxauthsize = MD5_DIGEST_SIZE,
3064 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3065 OP_ALG_AAI_HMAC_PRECOMP,
3066 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3072 .cra_name = "authenc(hmac(sha1),"
3073 "ecb(cipher_null))",
3074 .cra_driver_name = "authenc-hmac-sha1-"
3075 "ecb-cipher_null-caam",
3076 .cra_blocksize = NULL_BLOCK_SIZE,
3078 .setkey = aead_setkey,
3079 .setauthsize = aead_setauthsize,
3080 .encrypt = aead_encrypt,
3081 .decrypt = aead_decrypt,
3082 .ivsize = NULL_IV_SIZE,
3083 .maxauthsize = SHA1_DIGEST_SIZE,
3086 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3087 OP_ALG_AAI_HMAC_PRECOMP,
3088 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3094 .cra_name = "authenc(hmac(sha224),"
3095 "ecb(cipher_null))",
3096 .cra_driver_name = "authenc-hmac-sha224-"
3097 "ecb-cipher_null-caam",
3098 .cra_blocksize = NULL_BLOCK_SIZE,
3100 .setkey = aead_setkey,
3101 .setauthsize = aead_setauthsize,
3102 .encrypt = aead_encrypt,
3103 .decrypt = aead_decrypt,
3104 .ivsize = NULL_IV_SIZE,
3105 .maxauthsize = SHA224_DIGEST_SIZE,
3108 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3109 OP_ALG_AAI_HMAC_PRECOMP,
3110 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3116 .cra_name = "authenc(hmac(sha256),"
3117 "ecb(cipher_null))",
3118 .cra_driver_name = "authenc-hmac-sha256-"
3119 "ecb-cipher_null-caam",
3120 .cra_blocksize = NULL_BLOCK_SIZE,
3122 .setkey = aead_setkey,
3123 .setauthsize = aead_setauthsize,
3124 .encrypt = aead_encrypt,
3125 .decrypt = aead_decrypt,
3126 .ivsize = NULL_IV_SIZE,
3127 .maxauthsize = SHA256_DIGEST_SIZE,
3130 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3131 OP_ALG_AAI_HMAC_PRECOMP,
3132 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3138 .cra_name = "authenc(hmac(sha384),"
3139 "ecb(cipher_null))",
3140 .cra_driver_name = "authenc-hmac-sha384-"
3141 "ecb-cipher_null-caam",
3142 .cra_blocksize = NULL_BLOCK_SIZE,
3144 .setkey = aead_setkey,
3145 .setauthsize = aead_setauthsize,
3146 .encrypt = aead_encrypt,
3147 .decrypt = aead_decrypt,
3148 .ivsize = NULL_IV_SIZE,
3149 .maxauthsize = SHA384_DIGEST_SIZE,
3152 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3153 OP_ALG_AAI_HMAC_PRECOMP,
3154 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3160 .cra_name = "authenc(hmac(sha512),"
3161 "ecb(cipher_null))",
3162 .cra_driver_name = "authenc-hmac-sha512-"
3163 "ecb-cipher_null-caam",
3164 .cra_blocksize = NULL_BLOCK_SIZE,
3166 .setkey = aead_setkey,
3167 .setauthsize = aead_setauthsize,
3168 .encrypt = aead_encrypt,
3169 .decrypt = aead_decrypt,
3170 .ivsize = NULL_IV_SIZE,
3171 .maxauthsize = SHA512_DIGEST_SIZE,
3174 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3175 OP_ALG_AAI_HMAC_PRECOMP,
3176 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3182 .cra_name = "authenc(hmac(md5),cbc(aes))",
3183 .cra_driver_name = "authenc-hmac-md5-"
3185 .cra_blocksize = AES_BLOCK_SIZE,
3187 .setkey = aead_setkey,
3188 .setauthsize = aead_setauthsize,
3189 .encrypt = aead_encrypt,
3190 .decrypt = aead_decrypt,
3191 .ivsize = AES_BLOCK_SIZE,
3192 .maxauthsize = MD5_DIGEST_SIZE,
3195 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3196 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3197 OP_ALG_AAI_HMAC_PRECOMP,
3198 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3204 .cra_name = "echainiv(authenc(hmac(md5),"
3206 .cra_driver_name = "echainiv-authenc-hmac-md5-"
3208 .cra_blocksize = AES_BLOCK_SIZE,
3210 .setkey = aead_setkey,
3211 .setauthsize = aead_setauthsize,
3212 .encrypt = aead_encrypt,
3213 .decrypt = aead_givdecrypt,
3214 .ivsize = AES_BLOCK_SIZE,
3215 .maxauthsize = MD5_DIGEST_SIZE,
3218 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3219 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3220 OP_ALG_AAI_HMAC_PRECOMP,
3221 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3228 .cra_name = "authenc(hmac(sha1),cbc(aes))",
3229 .cra_driver_name = "authenc-hmac-sha1-"
3231 .cra_blocksize = AES_BLOCK_SIZE,
3233 .setkey = aead_setkey,
3234 .setauthsize = aead_setauthsize,
3235 .encrypt = aead_encrypt,
3236 .decrypt = aead_decrypt,
3237 .ivsize = AES_BLOCK_SIZE,
3238 .maxauthsize = SHA1_DIGEST_SIZE,
3241 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3242 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3243 OP_ALG_AAI_HMAC_PRECOMP,
3244 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3250 .cra_name = "echainiv(authenc(hmac(sha1),"
3252 .cra_driver_name = "echainiv-authenc-"
3253 "hmac-sha1-cbc-aes-caam",
3254 .cra_blocksize = AES_BLOCK_SIZE,
3256 .setkey = aead_setkey,
3257 .setauthsize = aead_setauthsize,
3258 .encrypt = aead_encrypt,
3259 .decrypt = aead_givdecrypt,
3260 .ivsize = AES_BLOCK_SIZE,
3261 .maxauthsize = SHA1_DIGEST_SIZE,
3264 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3265 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3266 OP_ALG_AAI_HMAC_PRECOMP,
3267 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3274 .cra_name = "authenc(hmac(sha224),cbc(aes))",
3275 .cra_driver_name = "authenc-hmac-sha224-"
3277 .cra_blocksize = AES_BLOCK_SIZE,
3279 .setkey = aead_setkey,
3280 .setauthsize = aead_setauthsize,
3281 .encrypt = aead_encrypt,
3282 .decrypt = aead_decrypt,
3283 .ivsize = AES_BLOCK_SIZE,
3284 .maxauthsize = SHA224_DIGEST_SIZE,
3287 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3288 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3289 OP_ALG_AAI_HMAC_PRECOMP,
3290 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3296 .cra_name = "echainiv(authenc(hmac(sha224),"
3298 .cra_driver_name = "echainiv-authenc-"
3299 "hmac-sha224-cbc-aes-caam",
3300 .cra_blocksize = AES_BLOCK_SIZE,
3302 .setkey = aead_setkey,
3303 .setauthsize = aead_setauthsize,
3304 .encrypt = aead_encrypt,
3305 .decrypt = aead_givdecrypt,
3306 .ivsize = AES_BLOCK_SIZE,
3307 .maxauthsize = SHA224_DIGEST_SIZE,
3310 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3311 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3312 OP_ALG_AAI_HMAC_PRECOMP,
3313 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3320 .cra_name = "authenc(hmac(sha256),cbc(aes))",
3321 .cra_driver_name = "authenc-hmac-sha256-"
3323 .cra_blocksize = AES_BLOCK_SIZE,
3325 .setkey = aead_setkey,
3326 .setauthsize = aead_setauthsize,
3327 .encrypt = aead_encrypt,
3328 .decrypt = aead_decrypt,
3329 .ivsize = AES_BLOCK_SIZE,
3330 .maxauthsize = SHA256_DIGEST_SIZE,
3333 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3334 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3335 OP_ALG_AAI_HMAC_PRECOMP,
3336 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3342 .cra_name = "echainiv(authenc(hmac(sha256),"
3344 .cra_driver_name = "echainiv-authenc-"
3345 "hmac-sha256-cbc-aes-caam",
3346 .cra_blocksize = AES_BLOCK_SIZE,
3348 .setkey = aead_setkey,
3349 .setauthsize = aead_setauthsize,
3350 .encrypt = aead_encrypt,
3351 .decrypt = aead_givdecrypt,
3352 .ivsize = AES_BLOCK_SIZE,
3353 .maxauthsize = SHA256_DIGEST_SIZE,
3356 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3357 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3358 OP_ALG_AAI_HMAC_PRECOMP,
3359 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3366 .cra_name = "authenc(hmac(sha384),cbc(aes))",
3367 .cra_driver_name = "authenc-hmac-sha384-"
3369 .cra_blocksize = AES_BLOCK_SIZE,
3371 .setkey = aead_setkey,
3372 .setauthsize = aead_setauthsize,
3373 .encrypt = aead_encrypt,
3374 .decrypt = aead_decrypt,
3375 .ivsize = AES_BLOCK_SIZE,
3376 .maxauthsize = SHA384_DIGEST_SIZE,
3379 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3380 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3381 OP_ALG_AAI_HMAC_PRECOMP,
3382 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3388 .cra_name = "echainiv(authenc(hmac(sha384),"
3390 .cra_driver_name = "echainiv-authenc-"
3391 "hmac-sha384-cbc-aes-caam",
3392 .cra_blocksize = AES_BLOCK_SIZE,
3394 .setkey = aead_setkey,
3395 .setauthsize = aead_setauthsize,
3396 .encrypt = aead_encrypt,
3397 .decrypt = aead_givdecrypt,
3398 .ivsize = AES_BLOCK_SIZE,
3399 .maxauthsize = SHA384_DIGEST_SIZE,
3402 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3403 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3404 OP_ALG_AAI_HMAC_PRECOMP,
3405 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3412 .cra_name = "authenc(hmac(sha512),cbc(aes))",
3413 .cra_driver_name = "authenc-hmac-sha512-"
3415 .cra_blocksize = AES_BLOCK_SIZE,
3417 .setkey = aead_setkey,
3418 .setauthsize = aead_setauthsize,
3419 .encrypt = aead_encrypt,
3420 .decrypt = aead_decrypt,
3421 .ivsize = AES_BLOCK_SIZE,
3422 .maxauthsize = SHA512_DIGEST_SIZE,
3425 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3426 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3427 OP_ALG_AAI_HMAC_PRECOMP,
3428 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3434 .cra_name = "echainiv(authenc(hmac(sha512),"
3436 .cra_driver_name = "echainiv-authenc-"
3437 "hmac-sha512-cbc-aes-caam",
3438 .cra_blocksize = AES_BLOCK_SIZE,
3440 .setkey = aead_setkey,
3441 .setauthsize = aead_setauthsize,
3442 .encrypt = aead_encrypt,
3443 .decrypt = aead_givdecrypt,
3444 .ivsize = AES_BLOCK_SIZE,
3445 .maxauthsize = SHA512_DIGEST_SIZE,
3448 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3449 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3450 OP_ALG_AAI_HMAC_PRECOMP,
3451 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3458 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
3459 .cra_driver_name = "authenc-hmac-md5-"
3460 "cbc-des3_ede-caam",
3461 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3463 .setkey = aead_setkey,
3464 .setauthsize = aead_setauthsize,
3465 .encrypt = aead_encrypt,
3466 .decrypt = aead_decrypt,
3467 .ivsize = DES3_EDE_BLOCK_SIZE,
3468 .maxauthsize = MD5_DIGEST_SIZE,
3471 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3472 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3473 OP_ALG_AAI_HMAC_PRECOMP,
3474 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3480 .cra_name = "echainiv(authenc(hmac(md5),"
3482 .cra_driver_name = "echainiv-authenc-hmac-md5-"
3483 "cbc-des3_ede-caam",
3484 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3486 .setkey = aead_setkey,
3487 .setauthsize = aead_setauthsize,
3488 .encrypt = aead_encrypt,
3489 .decrypt = aead_givdecrypt,
3490 .ivsize = DES3_EDE_BLOCK_SIZE,
3491 .maxauthsize = MD5_DIGEST_SIZE,
3494 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3495 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3496 OP_ALG_AAI_HMAC_PRECOMP,
3497 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3504 .cra_name = "authenc(hmac(sha1),"
3506 .cra_driver_name = "authenc-hmac-sha1-"
3507 "cbc-des3_ede-caam",
3508 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3510 .setkey = aead_setkey,
3511 .setauthsize = aead_setauthsize,
3512 .encrypt = aead_encrypt,
3513 .decrypt = aead_decrypt,
3514 .ivsize = DES3_EDE_BLOCK_SIZE,
3515 .maxauthsize = SHA1_DIGEST_SIZE,
3518 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3519 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3520 OP_ALG_AAI_HMAC_PRECOMP,
3521 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3527 .cra_name = "echainiv(authenc(hmac(sha1),"
3529 .cra_driver_name = "echainiv-authenc-"
3531 "cbc-des3_ede-caam",
3532 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3534 .setkey = aead_setkey,
3535 .setauthsize = aead_setauthsize,
3536 .encrypt = aead_encrypt,
3537 .decrypt = aead_givdecrypt,
3538 .ivsize = DES3_EDE_BLOCK_SIZE,
3539 .maxauthsize = SHA1_DIGEST_SIZE,
3542 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3543 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3544 OP_ALG_AAI_HMAC_PRECOMP,
3545 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3552 .cra_name = "authenc(hmac(sha224),"
3554 .cra_driver_name = "authenc-hmac-sha224-"
3555 "cbc-des3_ede-caam",
3556 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3558 .setkey = aead_setkey,
3559 .setauthsize = aead_setauthsize,
3560 .encrypt = aead_encrypt,
3561 .decrypt = aead_decrypt,
3562 .ivsize = DES3_EDE_BLOCK_SIZE,
3563 .maxauthsize = SHA224_DIGEST_SIZE,
3566 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3567 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3568 OP_ALG_AAI_HMAC_PRECOMP,
3569 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3575 .cra_name = "echainiv(authenc(hmac(sha224),"
3577 .cra_driver_name = "echainiv-authenc-"
3579 "cbc-des3_ede-caam",
3580 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3582 .setkey = aead_setkey,
3583 .setauthsize = aead_setauthsize,
3584 .encrypt = aead_encrypt,
3585 .decrypt = aead_givdecrypt,
3586 .ivsize = DES3_EDE_BLOCK_SIZE,
3587 .maxauthsize = SHA224_DIGEST_SIZE,
3590 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3591 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3592 OP_ALG_AAI_HMAC_PRECOMP,
3593 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3600 .cra_name = "authenc(hmac(sha256),"
3602 .cra_driver_name = "authenc-hmac-sha256-"
3603 "cbc-des3_ede-caam",
3604 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3606 .setkey = aead_setkey,
3607 .setauthsize = aead_setauthsize,
3608 .encrypt = aead_encrypt,
3609 .decrypt = aead_decrypt,
3610 .ivsize = DES3_EDE_BLOCK_SIZE,
3611 .maxauthsize = SHA256_DIGEST_SIZE,
3614 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3615 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3616 OP_ALG_AAI_HMAC_PRECOMP,
3617 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3623 .cra_name = "echainiv(authenc(hmac(sha256),"
3625 .cra_driver_name = "echainiv-authenc-"
3627 "cbc-des3_ede-caam",
3628 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3630 .setkey = aead_setkey,
3631 .setauthsize = aead_setauthsize,
3632 .encrypt = aead_encrypt,
3633 .decrypt = aead_givdecrypt,
3634 .ivsize = DES3_EDE_BLOCK_SIZE,
3635 .maxauthsize = SHA256_DIGEST_SIZE,
3638 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3639 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3640 OP_ALG_AAI_HMAC_PRECOMP,
3641 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3648 .cra_name = "authenc(hmac(sha384),"
3650 .cra_driver_name = "authenc-hmac-sha384-"
3651 "cbc-des3_ede-caam",
3652 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3654 .setkey = aead_setkey,
3655 .setauthsize = aead_setauthsize,
3656 .encrypt = aead_encrypt,
3657 .decrypt = aead_decrypt,
3658 .ivsize = DES3_EDE_BLOCK_SIZE,
3659 .maxauthsize = SHA384_DIGEST_SIZE,
3662 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3663 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3664 OP_ALG_AAI_HMAC_PRECOMP,
3665 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3671 .cra_name = "echainiv(authenc(hmac(sha384),"
3673 .cra_driver_name = "echainiv-authenc-"
3675 "cbc-des3_ede-caam",
3676 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3678 .setkey = aead_setkey,
3679 .setauthsize = aead_setauthsize,
3680 .encrypt = aead_encrypt,
3681 .decrypt = aead_givdecrypt,
3682 .ivsize = DES3_EDE_BLOCK_SIZE,
3683 .maxauthsize = SHA384_DIGEST_SIZE,
3686 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3687 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3688 OP_ALG_AAI_HMAC_PRECOMP,
3689 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3696 .cra_name = "authenc(hmac(sha512),"
3698 .cra_driver_name = "authenc-hmac-sha512-"
3699 "cbc-des3_ede-caam",
3700 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3702 .setkey = aead_setkey,
3703 .setauthsize = aead_setauthsize,
3704 .encrypt = aead_encrypt,
3705 .decrypt = aead_decrypt,
3706 .ivsize = DES3_EDE_BLOCK_SIZE,
3707 .maxauthsize = SHA512_DIGEST_SIZE,
3710 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3711 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3712 OP_ALG_AAI_HMAC_PRECOMP,
3713 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3719 .cra_name = "echainiv(authenc(hmac(sha512),"
3721 .cra_driver_name = "echainiv-authenc-"
3723 "cbc-des3_ede-caam",
3724 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3726 .setkey = aead_setkey,
3727 .setauthsize = aead_setauthsize,
3728 .encrypt = aead_encrypt,
3729 .decrypt = aead_givdecrypt,
3730 .ivsize = DES3_EDE_BLOCK_SIZE,
3731 .maxauthsize = SHA512_DIGEST_SIZE,
3734 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3735 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3736 OP_ALG_AAI_HMAC_PRECOMP,
3737 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3744 .cra_name = "authenc(hmac(md5),cbc(des))",
3745 .cra_driver_name = "authenc-hmac-md5-"
3747 .cra_blocksize = DES_BLOCK_SIZE,
3749 .setkey = aead_setkey,
3750 .setauthsize = aead_setauthsize,
3751 .encrypt = aead_encrypt,
3752 .decrypt = aead_decrypt,
3753 .ivsize = DES_BLOCK_SIZE,
3754 .maxauthsize = MD5_DIGEST_SIZE,
3757 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3758 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3759 OP_ALG_AAI_HMAC_PRECOMP,
3760 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3766 .cra_name = "echainiv(authenc(hmac(md5),"
3768 .cra_driver_name = "echainiv-authenc-hmac-md5-"
3770 .cra_blocksize = DES_BLOCK_SIZE,
3772 .setkey = aead_setkey,
3773 .setauthsize = aead_setauthsize,
3774 .encrypt = aead_encrypt,
3775 .decrypt = aead_givdecrypt,
3776 .ivsize = DES_BLOCK_SIZE,
3777 .maxauthsize = MD5_DIGEST_SIZE,
3780 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3781 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3782 OP_ALG_AAI_HMAC_PRECOMP,
3783 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3790 .cra_name = "authenc(hmac(sha1),cbc(des))",
3791 .cra_driver_name = "authenc-hmac-sha1-"
3793 .cra_blocksize = DES_BLOCK_SIZE,
3795 .setkey = aead_setkey,
3796 .setauthsize = aead_setauthsize,
3797 .encrypt = aead_encrypt,
3798 .decrypt = aead_decrypt,
3799 .ivsize = DES_BLOCK_SIZE,
3800 .maxauthsize = SHA1_DIGEST_SIZE,
3803 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3804 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3805 OP_ALG_AAI_HMAC_PRECOMP,
3806 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3812 .cra_name = "echainiv(authenc(hmac(sha1),"
3814 .cra_driver_name = "echainiv-authenc-"
3815 "hmac-sha1-cbc-des-caam",
3816 .cra_blocksize = DES_BLOCK_SIZE,
3818 .setkey = aead_setkey,
3819 .setauthsize = aead_setauthsize,
3820 .encrypt = aead_encrypt,
3821 .decrypt = aead_givdecrypt,
3822 .ivsize = DES_BLOCK_SIZE,
3823 .maxauthsize = SHA1_DIGEST_SIZE,
3826 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3827 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3828 OP_ALG_AAI_HMAC_PRECOMP,
3829 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3836 .cra_name = "authenc(hmac(sha224),cbc(des))",
3837 .cra_driver_name = "authenc-hmac-sha224-"
3839 .cra_blocksize = DES_BLOCK_SIZE,
3841 .setkey = aead_setkey,
3842 .setauthsize = aead_setauthsize,
3843 .encrypt = aead_encrypt,
3844 .decrypt = aead_decrypt,
3845 .ivsize = DES_BLOCK_SIZE,
3846 .maxauthsize = SHA224_DIGEST_SIZE,
3849 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3850 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3851 OP_ALG_AAI_HMAC_PRECOMP,
3852 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3858 .cra_name = "echainiv(authenc(hmac(sha224),"
3860 .cra_driver_name = "echainiv-authenc-"
3861 "hmac-sha224-cbc-des-caam",
3862 .cra_blocksize = DES_BLOCK_SIZE,
3864 .setkey = aead_setkey,
3865 .setauthsize = aead_setauthsize,
3866 .encrypt = aead_encrypt,
3867 .decrypt = aead_givdecrypt,
3868 .ivsize = DES_BLOCK_SIZE,
3869 .maxauthsize = SHA224_DIGEST_SIZE,
3872 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3873 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3874 OP_ALG_AAI_HMAC_PRECOMP,
3875 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3882 .cra_name = "authenc(hmac(sha256),cbc(des))",
3883 .cra_driver_name = "authenc-hmac-sha256-"
3885 .cra_blocksize = DES_BLOCK_SIZE,
3887 .setkey = aead_setkey,
3888 .setauthsize = aead_setauthsize,
3889 .encrypt = aead_encrypt,
3890 .decrypt = aead_decrypt,
3891 .ivsize = DES_BLOCK_SIZE,
3892 .maxauthsize = SHA256_DIGEST_SIZE,
3895 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3896 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3897 OP_ALG_AAI_HMAC_PRECOMP,
3898 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3904 .cra_name = "echainiv(authenc(hmac(sha256),"
3906 .cra_driver_name = "echainiv-authenc-"
3907 "hmac-sha256-cbc-des-caam",
3908 .cra_blocksize = DES_BLOCK_SIZE,
3910 .setkey = aead_setkey,
3911 .setauthsize = aead_setauthsize,
3912 .encrypt = aead_encrypt,
3913 .decrypt = aead_givdecrypt,
3914 .ivsize = DES_BLOCK_SIZE,
3915 .maxauthsize = SHA256_DIGEST_SIZE,
3918 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3919 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3920 OP_ALG_AAI_HMAC_PRECOMP,
3921 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3928 .cra_name = "authenc(hmac(sha384),cbc(des))",
3929 .cra_driver_name = "authenc-hmac-sha384-"
3931 .cra_blocksize = DES_BLOCK_SIZE,
3933 .setkey = aead_setkey,
3934 .setauthsize = aead_setauthsize,
3935 .encrypt = aead_encrypt,
3936 .decrypt = aead_decrypt,
3937 .ivsize = DES_BLOCK_SIZE,
3938 .maxauthsize = SHA384_DIGEST_SIZE,
3941 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3942 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3943 OP_ALG_AAI_HMAC_PRECOMP,
3944 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3950 .cra_name = "echainiv(authenc(hmac(sha384),"
3952 .cra_driver_name = "echainiv-authenc-"
3953 "hmac-sha384-cbc-des-caam",
3954 .cra_blocksize = DES_BLOCK_SIZE,
3956 .setkey = aead_setkey,
3957 .setauthsize = aead_setauthsize,
3958 .encrypt = aead_encrypt,
3959 .decrypt = aead_givdecrypt,
3960 .ivsize = DES_BLOCK_SIZE,
3961 .maxauthsize = SHA384_DIGEST_SIZE,
3964 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3965 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3966 OP_ALG_AAI_HMAC_PRECOMP,
3967 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3974 .cra_name = "authenc(hmac(sha512),cbc(des))",
3975 .cra_driver_name = "authenc-hmac-sha512-"
3977 .cra_blocksize = DES_BLOCK_SIZE,
3979 .setkey = aead_setkey,
3980 .setauthsize = aead_setauthsize,
3981 .encrypt = aead_encrypt,
3982 .decrypt = aead_decrypt,
3983 .ivsize = DES_BLOCK_SIZE,
3984 .maxauthsize = SHA512_DIGEST_SIZE,
3987 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3988 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3989 OP_ALG_AAI_HMAC_PRECOMP,
3990 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3996 .cra_name = "echainiv(authenc(hmac(sha512),"
3998 .cra_driver_name = "echainiv-authenc-"
3999 "hmac-sha512-cbc-des-caam",
4000 .cra_blocksize = DES_BLOCK_SIZE,
4002 .setkey = aead_setkey,
4003 .setauthsize = aead_setauthsize,
4004 .encrypt = aead_encrypt,
4005 .decrypt = aead_givdecrypt,
4006 .ivsize = DES_BLOCK_SIZE,
4007 .maxauthsize = SHA512_DIGEST_SIZE,
4010 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
4011 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
4012 OP_ALG_AAI_HMAC_PRECOMP,
4013 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
4020 .cra_name = "authenc(hmac(md5),"
4021 "rfc3686(ctr(aes)))",
4022 .cra_driver_name = "authenc-hmac-md5-"
4023 "rfc3686-ctr-aes-caam",
4026 .setkey = aead_setkey,
4027 .setauthsize = aead_setauthsize,
4028 .encrypt = aead_encrypt,
4029 .decrypt = aead_decrypt,
4030 .ivsize = CTR_RFC3686_IV_SIZE,
4031 .maxauthsize = MD5_DIGEST_SIZE,
4034 .class1_alg_type = OP_ALG_ALGSEL_AES |
4035 OP_ALG_AAI_CTR_MOD128,
4036 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
4037 OP_ALG_AAI_HMAC_PRECOMP,
4038 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
4045 .cra_name = "seqiv(authenc("
4046 "hmac(md5),rfc3686(ctr(aes))))",
4047 .cra_driver_name = "seqiv-authenc-hmac-md5-"
4048 "rfc3686-ctr-aes-caam",
4051 .setkey = aead_setkey,
4052 .setauthsize = aead_setauthsize,
4053 .encrypt = aead_encrypt,
4054 .decrypt = aead_givdecrypt,
4055 .ivsize = CTR_RFC3686_IV_SIZE,
4056 .maxauthsize = MD5_DIGEST_SIZE,
4059 .class1_alg_type = OP_ALG_ALGSEL_AES |
4060 OP_ALG_AAI_CTR_MOD128,
4061 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
4062 OP_ALG_AAI_HMAC_PRECOMP,
4063 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
4071 .cra_name = "authenc(hmac(sha1),"
4072 "rfc3686(ctr(aes)))",
4073 .cra_driver_name = "authenc-hmac-sha1-"
4074 "rfc3686-ctr-aes-caam",
4077 .setkey = aead_setkey,
4078 .setauthsize = aead_setauthsize,
4079 .encrypt = aead_encrypt,
4080 .decrypt = aead_decrypt,
4081 .ivsize = CTR_RFC3686_IV_SIZE,
4082 .maxauthsize = SHA1_DIGEST_SIZE,
4085 .class1_alg_type = OP_ALG_ALGSEL_AES |
4086 OP_ALG_AAI_CTR_MOD128,
4087 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
4088 OP_ALG_AAI_HMAC_PRECOMP,
4089 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
4096 .cra_name = "seqiv(authenc("
4097 "hmac(sha1),rfc3686(ctr(aes))))",
4098 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
4099 "rfc3686-ctr-aes-caam",
4102 .setkey = aead_setkey,
4103 .setauthsize = aead_setauthsize,
4104 .encrypt = aead_encrypt,
4105 .decrypt = aead_givdecrypt,
4106 .ivsize = CTR_RFC3686_IV_SIZE,
4107 .maxauthsize = SHA1_DIGEST_SIZE,
4110 .class1_alg_type = OP_ALG_ALGSEL_AES |
4111 OP_ALG_AAI_CTR_MOD128,
4112 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
4113 OP_ALG_AAI_HMAC_PRECOMP,
4114 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
4122 .cra_name = "authenc(hmac(sha224),"
4123 "rfc3686(ctr(aes)))",
4124 .cra_driver_name = "authenc-hmac-sha224-"
4125 "rfc3686-ctr-aes-caam",
4128 .setkey = aead_setkey,
4129 .setauthsize = aead_setauthsize,
4130 .encrypt = aead_encrypt,
4131 .decrypt = aead_decrypt,
4132 .ivsize = CTR_RFC3686_IV_SIZE,
4133 .maxauthsize = SHA224_DIGEST_SIZE,
4136 .class1_alg_type = OP_ALG_ALGSEL_AES |
4137 OP_ALG_AAI_CTR_MOD128,
4138 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
4139 OP_ALG_AAI_HMAC_PRECOMP,
4140 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
4147 .cra_name = "seqiv(authenc("
4148 "hmac(sha224),rfc3686(ctr(aes))))",
4149 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
4150 "rfc3686-ctr-aes-caam",
4153 .setkey = aead_setkey,
4154 .setauthsize = aead_setauthsize,
4155 .encrypt = aead_encrypt,
4156 .decrypt = aead_givdecrypt,
4157 .ivsize = CTR_RFC3686_IV_SIZE,
4158 .maxauthsize = SHA224_DIGEST_SIZE,
4161 .class1_alg_type = OP_ALG_ALGSEL_AES |
4162 OP_ALG_AAI_CTR_MOD128,
4163 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
4164 OP_ALG_AAI_HMAC_PRECOMP,
4165 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
4173 .cra_name = "authenc(hmac(sha256),"
4174 "rfc3686(ctr(aes)))",
4175 .cra_driver_name = "authenc-hmac-sha256-"
4176 "rfc3686-ctr-aes-caam",
4179 .setkey = aead_setkey,
4180 .setauthsize = aead_setauthsize,
4181 .encrypt = aead_encrypt,
4182 .decrypt = aead_decrypt,
4183 .ivsize = CTR_RFC3686_IV_SIZE,
4184 .maxauthsize = SHA256_DIGEST_SIZE,
4187 .class1_alg_type = OP_ALG_ALGSEL_AES |
4188 OP_ALG_AAI_CTR_MOD128,
4189 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
4190 OP_ALG_AAI_HMAC_PRECOMP,
4191 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
4198 .cra_name = "seqiv(authenc(hmac(sha256),"
4199 "rfc3686(ctr(aes))))",
4200 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
4201 "rfc3686-ctr-aes-caam",
4204 .setkey = aead_setkey,
4205 .setauthsize = aead_setauthsize,
4206 .encrypt = aead_encrypt,
4207 .decrypt = aead_givdecrypt,
4208 .ivsize = CTR_RFC3686_IV_SIZE,
4209 .maxauthsize = SHA256_DIGEST_SIZE,
4212 .class1_alg_type = OP_ALG_ALGSEL_AES |
4213 OP_ALG_AAI_CTR_MOD128,
4214 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
4215 OP_ALG_AAI_HMAC_PRECOMP,
4216 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
4224 .cra_name = "authenc(hmac(sha384),"
4225 "rfc3686(ctr(aes)))",
4226 .cra_driver_name = "authenc-hmac-sha384-"
4227 "rfc3686-ctr-aes-caam",
4230 .setkey = aead_setkey,
4231 .setauthsize = aead_setauthsize,
4232 .encrypt = aead_encrypt,
4233 .decrypt = aead_decrypt,
4234 .ivsize = CTR_RFC3686_IV_SIZE,
4235 .maxauthsize = SHA384_DIGEST_SIZE,
4238 .class1_alg_type = OP_ALG_ALGSEL_AES |
4239 OP_ALG_AAI_CTR_MOD128,
4240 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
4241 OP_ALG_AAI_HMAC_PRECOMP,
4242 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
4249 .cra_name = "seqiv(authenc(hmac(sha384),"
4250 "rfc3686(ctr(aes))))",
4251 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
4252 "rfc3686-ctr-aes-caam",
4255 .setkey = aead_setkey,
4256 .setauthsize = aead_setauthsize,
4257 .encrypt = aead_encrypt,
4258 .decrypt = aead_givdecrypt,
4259 .ivsize = CTR_RFC3686_IV_SIZE,
4260 .maxauthsize = SHA384_DIGEST_SIZE,
4263 .class1_alg_type = OP_ALG_ALGSEL_AES |
4264 OP_ALG_AAI_CTR_MOD128,
4265 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
4266 OP_ALG_AAI_HMAC_PRECOMP,
4267 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
4275 .cra_name = "authenc(hmac(sha512),"
4276 "rfc3686(ctr(aes)))",
4277 .cra_driver_name = "authenc-hmac-sha512-"
4278 "rfc3686-ctr-aes-caam",
4281 .setkey = aead_setkey,
4282 .setauthsize = aead_setauthsize,
4283 .encrypt = aead_encrypt,
4284 .decrypt = aead_decrypt,
4285 .ivsize = CTR_RFC3686_IV_SIZE,
4286 .maxauthsize = SHA512_DIGEST_SIZE,
4289 .class1_alg_type = OP_ALG_ALGSEL_AES |
4290 OP_ALG_AAI_CTR_MOD128,
4291 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
4292 OP_ALG_AAI_HMAC_PRECOMP,
4293 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
4300 .cra_name = "seqiv(authenc(hmac(sha512),"
4301 "rfc3686(ctr(aes))))",
4302 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
4303 "rfc3686-ctr-aes-caam",
4306 .setkey = aead_setkey,
4307 .setauthsize = aead_setauthsize,
4308 .encrypt = aead_encrypt,
4309 .decrypt = aead_givdecrypt,
4310 .ivsize = CTR_RFC3686_IV_SIZE,
4311 .maxauthsize = SHA512_DIGEST_SIZE,
4314 .class1_alg_type = OP_ALG_ALGSEL_AES |
4315 OP_ALG_AAI_CTR_MOD128,
4316 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
4317 OP_ALG_AAI_HMAC_PRECOMP,
4318 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
4325 struct caam_crypto_alg {
4326 struct crypto_alg crypto_alg;
4327 struct list_head entry;
4328 struct caam_alg_entry caam;
4331 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam)
4333 ctx->jrdev = caam_jr_alloc();
4334 if (IS_ERR(ctx->jrdev)) {
4335 pr_err("Job Ring Device allocation for transform failed\n");
4336 return PTR_ERR(ctx->jrdev);
4339 /* copy descriptor header template value */
4340 ctx->class1_alg_type = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
4341 ctx->class2_alg_type = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
4342 ctx->alg_op = OP_TYPE_CLASS2_ALG | caam->alg_op;
4347 static int caam_cra_init(struct crypto_tfm *tfm)
4349 struct crypto_alg *alg = tfm->__crt_alg;
4350 struct caam_crypto_alg *caam_alg =
4351 container_of(alg, struct caam_crypto_alg, crypto_alg);
4352 struct caam_ctx *ctx = crypto_tfm_ctx(tfm);
4354 return caam_init_common(ctx, &caam_alg->caam);
4357 static int caam_aead_init(struct crypto_aead *tfm)
4359 struct aead_alg *alg = crypto_aead_alg(tfm);
4360 struct caam_aead_alg *caam_alg =
4361 container_of(alg, struct caam_aead_alg, aead);
4362 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
4364 return caam_init_common(ctx, &caam_alg->caam);
4367 static void caam_exit_common(struct caam_ctx *ctx)
4369 if (ctx->sh_desc_enc_dma &&
4370 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_enc_dma))
4371 dma_unmap_single(ctx->jrdev, ctx->sh_desc_enc_dma,
4372 desc_bytes(ctx->sh_desc_enc), DMA_TO_DEVICE);
4373 if (ctx->sh_desc_dec_dma &&
4374 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_dec_dma))
4375 dma_unmap_single(ctx->jrdev, ctx->sh_desc_dec_dma,
4376 desc_bytes(ctx->sh_desc_dec), DMA_TO_DEVICE);
4377 if (ctx->sh_desc_givenc_dma &&
4378 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_givenc_dma))
4379 dma_unmap_single(ctx->jrdev, ctx->sh_desc_givenc_dma,
4380 desc_bytes(ctx->sh_desc_givenc),
4383 !dma_mapping_error(ctx->jrdev, ctx->key_dma))
4384 dma_unmap_single(ctx->jrdev, ctx->key_dma,
4385 ctx->enckeylen + ctx->split_key_pad_len,
4388 caam_jr_free(ctx->jrdev);
4391 static void caam_cra_exit(struct crypto_tfm *tfm)
4393 caam_exit_common(crypto_tfm_ctx(tfm));
4396 static void caam_aead_exit(struct crypto_aead *tfm)
4398 caam_exit_common(crypto_aead_ctx(tfm));
4401 static void __exit caam_algapi_exit(void)
4404 struct caam_crypto_alg *t_alg, *n;
4407 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
4408 struct caam_aead_alg *t_alg = driver_aeads + i;
4410 if (t_alg->registered)
4411 crypto_unregister_aead(&t_alg->aead);
4417 list_for_each_entry_safe(t_alg, n, &alg_list, entry) {
4418 crypto_unregister_alg(&t_alg->crypto_alg);
4419 list_del(&t_alg->entry);
4424 static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template
4427 struct caam_crypto_alg *t_alg;
4428 struct crypto_alg *alg;
4430 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
4432 pr_err("failed to allocate t_alg\n");
4433 return ERR_PTR(-ENOMEM);
4436 alg = &t_alg->crypto_alg;
4438 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
4439 snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
4440 template->driver_name);
4441 alg->cra_module = THIS_MODULE;
4442 alg->cra_init = caam_cra_init;
4443 alg->cra_exit = caam_cra_exit;
4444 alg->cra_priority = CAAM_CRA_PRIORITY;
4445 alg->cra_blocksize = template->blocksize;
4446 alg->cra_alignmask = 0;
4447 alg->cra_ctxsize = sizeof(struct caam_ctx);
4448 alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
4450 switch (template->type) {
4451 case CRYPTO_ALG_TYPE_GIVCIPHER:
4452 alg->cra_type = &crypto_givcipher_type;
4453 alg->cra_ablkcipher = template->template_ablkcipher;
4455 case CRYPTO_ALG_TYPE_ABLKCIPHER:
4456 alg->cra_type = &crypto_ablkcipher_type;
4457 alg->cra_ablkcipher = template->template_ablkcipher;
4461 t_alg->caam.class1_alg_type = template->class1_alg_type;
4462 t_alg->caam.class2_alg_type = template->class2_alg_type;
4463 t_alg->caam.alg_op = template->alg_op;
4468 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
4470 struct aead_alg *alg = &t_alg->aead;
4472 alg->base.cra_module = THIS_MODULE;
4473 alg->base.cra_priority = CAAM_CRA_PRIORITY;
4474 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
4475 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
4477 alg->init = caam_aead_init;
4478 alg->exit = caam_aead_exit;
4481 static int __init caam_algapi_init(void)
4483 struct device_node *dev_node;
4484 struct platform_device *pdev;
4485 struct device *ctrldev;
4486 struct caam_drv_private *priv;
4488 u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
4489 unsigned int md_limit = SHA512_DIGEST_SIZE;
4490 bool registered = false;
4492 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
4494 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
4499 pdev = of_find_device_by_node(dev_node);
4501 of_node_put(dev_node);
4505 ctrldev = &pdev->dev;
4506 priv = dev_get_drvdata(ctrldev);
4507 of_node_put(dev_node);
4510 * If priv is NULL, it's probably because the caam driver wasn't
4511 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
4517 INIT_LIST_HEAD(&alg_list);
4520 * Register crypto algorithms the device supports.
4521 * First, detect presence and attributes of DES, AES, and MD blocks.
4523 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
4524 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
4525 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
4526 aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
4527 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
4529 /* If MD is present, limit digest size based on LP256 */
4530 if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
4531 md_limit = SHA256_DIGEST_SIZE;
4533 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
4534 struct caam_crypto_alg *t_alg;
4535 struct caam_alg_template *alg = driver_algs + i;
4536 u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK;
4538 /* Skip DES algorithms if not supported by device */
4540 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
4541 (alg_sel == OP_ALG_ALGSEL_DES)))
4544 /* Skip AES algorithms if not supported by device */
4545 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
4548 t_alg = caam_alg_alloc(alg);
4549 if (IS_ERR(t_alg)) {
4550 err = PTR_ERR(t_alg);
4551 pr_warn("%s alg allocation failed\n", alg->driver_name);
4555 err = crypto_register_alg(&t_alg->crypto_alg);
4557 pr_warn("%s alg registration failed\n",
4558 t_alg->crypto_alg.cra_driver_name);
4563 list_add_tail(&t_alg->entry, &alg_list);
4567 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
4568 struct caam_aead_alg *t_alg = driver_aeads + i;
4569 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
4571 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
4573 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
4575 /* Skip DES algorithms if not supported by device */
4577 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
4578 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
4581 /* Skip AES algorithms if not supported by device */
4582 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
4586 * Check support for AES algorithms not available
4589 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
4590 if (alg_aai == OP_ALG_AAI_GCM)
4594 * Skip algorithms requiring message digests
4595 * if MD or MD size is not supported by device.
4598 (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
4601 caam_aead_alg_init(t_alg);
4603 err = crypto_register_aead(&t_alg->aead);
4605 pr_warn("%s alg registration failed\n",
4606 t_alg->aead.base.cra_driver_name);
4610 t_alg->registered = true;
4615 pr_info("caam algorithms registered in /proc/crypto\n");
4620 module_init(caam_algapi_init);
4621 module_exit(caam_algapi_exit);
4623 MODULE_LICENSE("GPL");
4624 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
4625 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");