crypto: talitos - fix max key size for sha384 and sha512
[linux-2.6-block.git] / drivers / crypto / talitos.c
CommitLineData
ff9f902c 1// SPDX-License-Identifier: GPL-2.0+
9c4a7965
KP
2/*
3 * talitos - Freescale Integrated Security Engine (SEC) device driver
4 *
5228f0f7 5 * Copyright (c) 2008-2011 Freescale Semiconductor, Inc.
9c4a7965
KP
6 *
7 * Scatterlist Crypto API glue code copied from files with the following:
8 * Copyright (c) 2006-2007 Herbert Xu <herbert@gondor.apana.org.au>
9 *
10 * Crypto algorithm registration code copied from hifn driver:
11 * 2007+ Copyright (c) Evgeniy Polyakov <johnpol@2ka.mipt.ru>
12 * All rights reserved.
9c4a7965
KP
13 */
14
15#include <linux/kernel.h>
16#include <linux/module.h>
17#include <linux/mod_devicetable.h>
18#include <linux/device.h>
19#include <linux/interrupt.h>
20#include <linux/crypto.h>
21#include <linux/hw_random.h>
5af50730
RH
22#include <linux/of_address.h>
23#include <linux/of_irq.h>
9c4a7965
KP
24#include <linux/of_platform.h>
25#include <linux/dma-mapping.h>
26#include <linux/io.h>
27#include <linux/spinlock.h>
28#include <linux/rtnetlink.h>
5a0e3ad6 29#include <linux/slab.h>
9c4a7965
KP
30
31#include <crypto/algapi.h>
32#include <crypto/aes.h>
3952f17e 33#include <crypto/des.h>
9c4a7965 34#include <crypto/sha.h>
497f2e6b 35#include <crypto/md5.h>
e98014ab 36#include <crypto/internal/aead.h>
9c4a7965 37#include <crypto/authenc.h>
4de9d0b5 38#include <crypto/skcipher.h>
acbf7c62
LN
39#include <crypto/hash.h>
40#include <crypto/internal/hash.h>
4de9d0b5 41#include <crypto/scatterwalk.h>
9c4a7965
KP
42
43#include "talitos.h"
44
922f9dc8 45static void to_talitos_ptr(struct talitos_ptr *ptr, dma_addr_t dma_addr,
da9de146 46 unsigned int len, bool is_sec1)
81eb024c 47{
edc6bd69 48 ptr->ptr = cpu_to_be32(lower_32_bits(dma_addr));
da9de146
LC
49 if (is_sec1) {
50 ptr->len1 = cpu_to_be16(len);
51 } else {
52 ptr->len = cpu_to_be16(len);
922f9dc8 53 ptr->eptr = upper_32_bits(dma_addr);
da9de146 54 }
81eb024c
KP
55}
56
340ff60a
HG
57static void copy_talitos_ptr(struct talitos_ptr *dst_ptr,
58 struct talitos_ptr *src_ptr, bool is_sec1)
59{
60 dst_ptr->ptr = src_ptr->ptr;
922f9dc8 61 if (is_sec1) {
da9de146 62 dst_ptr->len1 = src_ptr->len1;
922f9dc8 63 } else {
da9de146
LC
64 dst_ptr->len = src_ptr->len;
65 dst_ptr->eptr = src_ptr->eptr;
922f9dc8 66 }
538caf83
LC
67}
68
922f9dc8
LC
69static unsigned short from_talitos_ptr_len(struct talitos_ptr *ptr,
70 bool is_sec1)
538caf83 71{
922f9dc8
LC
72 if (is_sec1)
73 return be16_to_cpu(ptr->len1);
74 else
75 return be16_to_cpu(ptr->len);
538caf83
LC
76}
77
b096b544
LC
78static void to_talitos_ptr_ext_set(struct talitos_ptr *ptr, u8 val,
79 bool is_sec1)
185eb79f 80{
922f9dc8 81 if (!is_sec1)
b096b544
LC
82 ptr->j_extent = val;
83}
84
85static void to_talitos_ptr_ext_or(struct talitos_ptr *ptr, u8 val, bool is_sec1)
86{
87 if (!is_sec1)
88 ptr->j_extent |= val;
185eb79f
LC
89}
90
9c4a7965
KP
91/*
92 * map virtual single (contiguous) pointer to h/w descriptor pointer
93 */
6a4967c3
LC
94static void __map_single_talitos_ptr(struct device *dev,
95 struct talitos_ptr *ptr,
96 unsigned int len, void *data,
97 enum dma_data_direction dir,
98 unsigned long attrs)
99{
100 dma_addr_t dma_addr = dma_map_single_attrs(dev, data, len, dir, attrs);
101 struct talitos_private *priv = dev_get_drvdata(dev);
102 bool is_sec1 = has_ftr_sec1(priv);
103
104 to_talitos_ptr(ptr, dma_addr, len, is_sec1);
105}
106
9c4a7965 107static void map_single_talitos_ptr(struct device *dev,
edc6bd69 108 struct talitos_ptr *ptr,
42e8b0d7 109 unsigned int len, void *data,
9c4a7965
KP
110 enum dma_data_direction dir)
111{
6a4967c3
LC
112 __map_single_talitos_ptr(dev, ptr, len, data, dir, 0);
113}
81eb024c 114
6a4967c3
LC
115static void map_single_talitos_ptr_nosync(struct device *dev,
116 struct talitos_ptr *ptr,
117 unsigned int len, void *data,
118 enum dma_data_direction dir)
119{
120 __map_single_talitos_ptr(dev, ptr, len, data, dir,
121 DMA_ATTR_SKIP_CPU_SYNC);
9c4a7965
KP
122}
123
124/*
125 * unmap bus single (contiguous) h/w descriptor pointer
126 */
127static void unmap_single_talitos_ptr(struct device *dev,
edc6bd69 128 struct talitos_ptr *ptr,
9c4a7965
KP
129 enum dma_data_direction dir)
130{
922f9dc8
LC
131 struct talitos_private *priv = dev_get_drvdata(dev);
132 bool is_sec1 = has_ftr_sec1(priv);
133
edc6bd69 134 dma_unmap_single(dev, be32_to_cpu(ptr->ptr),
922f9dc8 135 from_talitos_ptr_len(ptr, is_sec1), dir);
9c4a7965
KP
136}
137
138static int reset_channel(struct device *dev, int ch)
139{
140 struct talitos_private *priv = dev_get_drvdata(dev);
141 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987 142 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 143
dd3c0987
LC
144 if (is_sec1) {
145 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
146 TALITOS1_CCCR_LO_RESET);
9c4a7965 147
dd3c0987
LC
148 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR_LO) &
149 TALITOS1_CCCR_LO_RESET) && --timeout)
150 cpu_relax();
151 } else {
152 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
153 TALITOS2_CCCR_RESET);
154
155 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
156 TALITOS2_CCCR_RESET) && --timeout)
157 cpu_relax();
158 }
9c4a7965
KP
159
160 if (timeout == 0) {
161 dev_err(dev, "failed to reset channel %d\n", ch);
162 return -EIO;
163 }
164
81eb024c 165 /* set 36-bit addressing, done writeback enable and done IRQ enable */
ad42d5fc 166 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, TALITOS_CCCR_LO_EAE |
81eb024c 167 TALITOS_CCCR_LO_CDWE | TALITOS_CCCR_LO_CDIE);
37b5e889
LC
168 /* enable chaining descriptors */
169 if (is_sec1)
170 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
171 TALITOS_CCCR_LO_NE);
9c4a7965 172
fe5720e2
KP
173 /* and ICCR writeback, if available */
174 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
ad42d5fc 175 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
fe5720e2
KP
176 TALITOS_CCCR_LO_IWSE);
177
9c4a7965
KP
178 return 0;
179}
180
181static int reset_device(struct device *dev)
182{
183 struct talitos_private *priv = dev_get_drvdata(dev);
184 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987
LC
185 bool is_sec1 = has_ftr_sec1(priv);
186 u32 mcr = is_sec1 ? TALITOS1_MCR_SWR : TALITOS2_MCR_SWR;
9c4a7965 187
c3e337f8 188 setbits32(priv->reg + TALITOS_MCR, mcr);
9c4a7965 189
dd3c0987 190 while ((in_be32(priv->reg + TALITOS_MCR) & mcr)
9c4a7965
KP
191 && --timeout)
192 cpu_relax();
193
2cdba3cf 194 if (priv->irq[1]) {
c3e337f8
KP
195 mcr = TALITOS_MCR_RCA1 | TALITOS_MCR_RCA3;
196 setbits32(priv->reg + TALITOS_MCR, mcr);
197 }
198
9c4a7965
KP
199 if (timeout == 0) {
200 dev_err(dev, "failed to reset device\n");
201 return -EIO;
202 }
203
204 return 0;
205}
206
207/*
208 * Reset and initialize the device
209 */
210static int init_device(struct device *dev)
211{
212 struct talitos_private *priv = dev_get_drvdata(dev);
213 int ch, err;
dd3c0987 214 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965
KP
215
216 /*
217 * Master reset
218 * errata documentation: warning: certain SEC interrupts
219 * are not fully cleared by writing the MCR:SWR bit,
220 * set bit twice to completely reset
221 */
222 err = reset_device(dev);
223 if (err)
224 return err;
225
226 err = reset_device(dev);
227 if (err)
228 return err;
229
230 /* reset channels */
231 for (ch = 0; ch < priv->num_channels; ch++) {
232 err = reset_channel(dev, ch);
233 if (err)
234 return err;
235 }
236
237 /* enable channel done and error interrupts */
dd3c0987
LC
238 if (is_sec1) {
239 clrbits32(priv->reg + TALITOS_IMR, TALITOS1_IMR_INIT);
240 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT);
241 /* disable parity error check in DEU (erroneous? test vect.) */
242 setbits32(priv->reg_deu + TALITOS_EUICR, TALITOS1_DEUICR_KPE);
243 } else {
244 setbits32(priv->reg + TALITOS_IMR, TALITOS2_IMR_INIT);
245 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT);
246 }
9c4a7965 247
fe5720e2
KP
248 /* disable integrity check error interrupts (use writeback instead) */
249 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
5fa7fa14 250 setbits32(priv->reg_mdeu + TALITOS_EUICR_LO,
fe5720e2
KP
251 TALITOS_MDEUICR_LO_ICE);
252
9c4a7965
KP
253 return 0;
254}
255
256/**
257 * talitos_submit - submits a descriptor to the device for processing
258 * @dev: the SEC device to be used
5228f0f7 259 * @ch: the SEC device channel to be used
9c4a7965
KP
260 * @desc: the descriptor to be processed by the device
261 * @callback: whom to call when processing is complete
262 * @context: a handle for use by caller (optional)
263 *
264 * desc must contain valid dma-mapped (bus physical) address pointers.
265 * callback must check err and feedback in descriptor header
266 * for device processing status.
267 */
fbb8d46e
CL
268static int talitos_submit(struct device *dev, int ch, struct talitos_desc *desc,
269 void (*callback)(struct device *dev,
270 struct talitos_desc *desc,
271 void *context, int error),
272 void *context)
9c4a7965
KP
273{
274 struct talitos_private *priv = dev_get_drvdata(dev);
275 struct talitos_request *request;
5228f0f7 276 unsigned long flags;
9c4a7965 277 int head;
7d607c6a 278 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 279
4b992628 280 spin_lock_irqsave(&priv->chan[ch].head_lock, flags);
9c4a7965 281
4b992628 282 if (!atomic_inc_not_zero(&priv->chan[ch].submit_count)) {
ec6644d6 283 /* h/w fifo is full */
4b992628 284 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
9c4a7965
KP
285 return -EAGAIN;
286 }
287
4b992628
KP
288 head = priv->chan[ch].head;
289 request = &priv->chan[ch].fifo[head];
ec6644d6 290
9c4a7965 291 /* map descriptor and save caller data */
7d607c6a
LC
292 if (is_sec1) {
293 desc->hdr1 = desc->hdr;
7d607c6a
LC
294 request->dma_desc = dma_map_single(dev, &desc->hdr1,
295 TALITOS_DESC_SIZE,
296 DMA_BIDIRECTIONAL);
297 } else {
298 request->dma_desc = dma_map_single(dev, desc,
299 TALITOS_DESC_SIZE,
300 DMA_BIDIRECTIONAL);
301 }
9c4a7965
KP
302 request->callback = callback;
303 request->context = context;
304
305 /* increment fifo head */
4b992628 306 priv->chan[ch].head = (priv->chan[ch].head + 1) & (priv->fifo_len - 1);
9c4a7965
KP
307
308 smp_wmb();
309 request->desc = desc;
310
311 /* GO! */
312 wmb();
ad42d5fc
KP
313 out_be32(priv->chan[ch].reg + TALITOS_FF,
314 upper_32_bits(request->dma_desc));
315 out_be32(priv->chan[ch].reg + TALITOS_FF_LO,
a752447a 316 lower_32_bits(request->dma_desc));
9c4a7965 317
4b992628 318 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
9c4a7965
KP
319
320 return -EINPROGRESS;
321}
322
323/*
324 * process what was done, notify callback of error if not
325 */
326static void flush_channel(struct device *dev, int ch, int error, int reset_ch)
327{
328 struct talitos_private *priv = dev_get_drvdata(dev);
329 struct talitos_request *request, saved_req;
330 unsigned long flags;
331 int tail, status;
7d607c6a 332 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 333
4b992628 334 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
9c4a7965 335
4b992628
KP
336 tail = priv->chan[ch].tail;
337 while (priv->chan[ch].fifo[tail].desc) {
7d607c6a
LC
338 __be32 hdr;
339
4b992628 340 request = &priv->chan[ch].fifo[tail];
9c4a7965
KP
341
342 /* descriptors with their done bits set don't get the error */
343 rmb();
37b5e889
LC
344 if (!is_sec1)
345 hdr = request->desc->hdr;
346 else if (request->desc->next_desc)
347 hdr = (request->desc + 1)->hdr1;
348 else
349 hdr = request->desc->hdr1;
7d607c6a
LC
350
351 if ((hdr & DESC_HDR_DONE) == DESC_HDR_DONE)
9c4a7965 352 status = 0;
ca38a814 353 else
9c4a7965
KP
354 if (!error)
355 break;
356 else
357 status = error;
358
359 dma_unmap_single(dev, request->dma_desc,
7d607c6a 360 TALITOS_DESC_SIZE,
e938e465 361 DMA_BIDIRECTIONAL);
9c4a7965
KP
362
363 /* copy entries so we can call callback outside lock */
364 saved_req.desc = request->desc;
365 saved_req.callback = request->callback;
366 saved_req.context = request->context;
367
368 /* release request entry in fifo */
369 smp_wmb();
370 request->desc = NULL;
371
372 /* increment fifo tail */
4b992628 373 priv->chan[ch].tail = (tail + 1) & (priv->fifo_len - 1);
9c4a7965 374
4b992628 375 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
ec6644d6 376
4b992628 377 atomic_dec(&priv->chan[ch].submit_count);
ec6644d6 378
9c4a7965
KP
379 saved_req.callback(dev, saved_req.desc, saved_req.context,
380 status);
381 /* channel may resume processing in single desc error case */
382 if (error && !reset_ch && status == error)
383 return;
4b992628
KP
384 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
385 tail = priv->chan[ch].tail;
9c4a7965
KP
386 }
387
4b992628 388 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
9c4a7965
KP
389}
390
391/*
392 * process completed requests for channels that have done status
393 */
dd3c0987
LC
394#define DEF_TALITOS1_DONE(name, ch_done_mask) \
395static void talitos1_done_##name(unsigned long data) \
396{ \
397 struct device *dev = (struct device *)data; \
398 struct talitos_private *priv = dev_get_drvdata(dev); \
399 unsigned long flags; \
400 \
401 if (ch_done_mask & 0x10000000) \
402 flush_channel(dev, 0, 0, 0); \
dd3c0987
LC
403 if (ch_done_mask & 0x40000000) \
404 flush_channel(dev, 1, 0, 0); \
405 if (ch_done_mask & 0x00010000) \
406 flush_channel(dev, 2, 0, 0); \
407 if (ch_done_mask & 0x00040000) \
408 flush_channel(dev, 3, 0, 0); \
409 \
dd3c0987
LC
410 /* At this point, all completed channels have been processed */ \
411 /* Unmask done interrupts for channels completed later on. */ \
412 spin_lock_irqsave(&priv->reg_lock, flags); \
413 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
414 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT); \
415 spin_unlock_irqrestore(&priv->reg_lock, flags); \
416}
417
418DEF_TALITOS1_DONE(4ch, TALITOS1_ISR_4CHDONE)
9c02e285 419DEF_TALITOS1_DONE(ch0, TALITOS1_ISR_CH_0_DONE)
dd3c0987
LC
420
421#define DEF_TALITOS2_DONE(name, ch_done_mask) \
422static void talitos2_done_##name(unsigned long data) \
c3e337f8
KP
423{ \
424 struct device *dev = (struct device *)data; \
425 struct talitos_private *priv = dev_get_drvdata(dev); \
511d63cb 426 unsigned long flags; \
c3e337f8
KP
427 \
428 if (ch_done_mask & 1) \
429 flush_channel(dev, 0, 0, 0); \
c3e337f8
KP
430 if (ch_done_mask & (1 << 2)) \
431 flush_channel(dev, 1, 0, 0); \
432 if (ch_done_mask & (1 << 4)) \
433 flush_channel(dev, 2, 0, 0); \
434 if (ch_done_mask & (1 << 6)) \
435 flush_channel(dev, 3, 0, 0); \
436 \
c3e337f8
KP
437 /* At this point, all completed channels have been processed */ \
438 /* Unmask done interrupts for channels completed later on. */ \
511d63cb 439 spin_lock_irqsave(&priv->reg_lock, flags); \
c3e337f8 440 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
dd3c0987 441 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT); \
511d63cb 442 spin_unlock_irqrestore(&priv->reg_lock, flags); \
9c4a7965 443}
dd3c0987
LC
444
445DEF_TALITOS2_DONE(4ch, TALITOS2_ISR_4CHDONE)
9c02e285 446DEF_TALITOS2_DONE(ch0, TALITOS2_ISR_CH_0_DONE)
dd3c0987
LC
447DEF_TALITOS2_DONE(ch0_2, TALITOS2_ISR_CH_0_2_DONE)
448DEF_TALITOS2_DONE(ch1_3, TALITOS2_ISR_CH_1_3_DONE)
9c4a7965
KP
449
450/*
451 * locate current (offending) descriptor
452 */
3e721aeb 453static u32 current_desc_hdr(struct device *dev, int ch)
9c4a7965
KP
454{
455 struct talitos_private *priv = dev_get_drvdata(dev);
b62ffd8c 456 int tail, iter;
9c4a7965
KP
457 dma_addr_t cur_desc;
458
b62ffd8c
HG
459 cur_desc = ((u64)in_be32(priv->chan[ch].reg + TALITOS_CDPR)) << 32;
460 cur_desc |= in_be32(priv->chan[ch].reg + TALITOS_CDPR_LO);
9c4a7965 461
b62ffd8c
HG
462 if (!cur_desc) {
463 dev_err(dev, "CDPR is NULL, giving up search for offending descriptor\n");
464 return 0;
465 }
466
467 tail = priv->chan[ch].tail;
468
469 iter = tail;
37b5e889
LC
470 while (priv->chan[ch].fifo[iter].dma_desc != cur_desc &&
471 priv->chan[ch].fifo[iter].desc->next_desc != cur_desc) {
b62ffd8c
HG
472 iter = (iter + 1) & (priv->fifo_len - 1);
473 if (iter == tail) {
9c4a7965 474 dev_err(dev, "couldn't locate current descriptor\n");
3e721aeb 475 return 0;
9c4a7965
KP
476 }
477 }
478
37b5e889
LC
479 if (priv->chan[ch].fifo[iter].desc->next_desc == cur_desc)
480 return (priv->chan[ch].fifo[iter].desc + 1)->hdr;
481
b62ffd8c 482 return priv->chan[ch].fifo[iter].desc->hdr;
9c4a7965
KP
483}
484
485/*
486 * user diagnostics; report root cause of error based on execution unit status
487 */
3e721aeb 488static void report_eu_error(struct device *dev, int ch, u32 desc_hdr)
9c4a7965
KP
489{
490 struct talitos_private *priv = dev_get_drvdata(dev);
491 int i;
492
3e721aeb 493 if (!desc_hdr)
ad42d5fc 494 desc_hdr = in_be32(priv->chan[ch].reg + TALITOS_DESCBUF);
3e721aeb
KP
495
496 switch (desc_hdr & DESC_HDR_SEL0_MASK) {
9c4a7965
KP
497 case DESC_HDR_SEL0_AFEU:
498 dev_err(dev, "AFEUISR 0x%08x_%08x\n",
5fa7fa14
LC
499 in_be32(priv->reg_afeu + TALITOS_EUISR),
500 in_be32(priv->reg_afeu + TALITOS_EUISR_LO));
9c4a7965
KP
501 break;
502 case DESC_HDR_SEL0_DEU:
503 dev_err(dev, "DEUISR 0x%08x_%08x\n",
5fa7fa14
LC
504 in_be32(priv->reg_deu + TALITOS_EUISR),
505 in_be32(priv->reg_deu + TALITOS_EUISR_LO));
9c4a7965
KP
506 break;
507 case DESC_HDR_SEL0_MDEUA:
508 case DESC_HDR_SEL0_MDEUB:
509 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
5fa7fa14
LC
510 in_be32(priv->reg_mdeu + TALITOS_EUISR),
511 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
9c4a7965
KP
512 break;
513 case DESC_HDR_SEL0_RNG:
514 dev_err(dev, "RNGUISR 0x%08x_%08x\n",
5fa7fa14
LC
515 in_be32(priv->reg_rngu + TALITOS_ISR),
516 in_be32(priv->reg_rngu + TALITOS_ISR_LO));
9c4a7965
KP
517 break;
518 case DESC_HDR_SEL0_PKEU:
519 dev_err(dev, "PKEUISR 0x%08x_%08x\n",
5fa7fa14
LC
520 in_be32(priv->reg_pkeu + TALITOS_EUISR),
521 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
9c4a7965
KP
522 break;
523 case DESC_HDR_SEL0_AESU:
524 dev_err(dev, "AESUISR 0x%08x_%08x\n",
5fa7fa14
LC
525 in_be32(priv->reg_aesu + TALITOS_EUISR),
526 in_be32(priv->reg_aesu + TALITOS_EUISR_LO));
9c4a7965
KP
527 break;
528 case DESC_HDR_SEL0_CRCU:
529 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
5fa7fa14
LC
530 in_be32(priv->reg_crcu + TALITOS_EUISR),
531 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
9c4a7965
KP
532 break;
533 case DESC_HDR_SEL0_KEU:
534 dev_err(dev, "KEUISR 0x%08x_%08x\n",
5fa7fa14
LC
535 in_be32(priv->reg_pkeu + TALITOS_EUISR),
536 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
9c4a7965
KP
537 break;
538 }
539
3e721aeb 540 switch (desc_hdr & DESC_HDR_SEL1_MASK) {
9c4a7965
KP
541 case DESC_HDR_SEL1_MDEUA:
542 case DESC_HDR_SEL1_MDEUB:
543 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
5fa7fa14
LC
544 in_be32(priv->reg_mdeu + TALITOS_EUISR),
545 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
9c4a7965
KP
546 break;
547 case DESC_HDR_SEL1_CRCU:
548 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
5fa7fa14
LC
549 in_be32(priv->reg_crcu + TALITOS_EUISR),
550 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
9c4a7965
KP
551 break;
552 }
553
554 for (i = 0; i < 8; i++)
555 dev_err(dev, "DESCBUF 0x%08x_%08x\n",
ad42d5fc
KP
556 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF + 8*i),
557 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF_LO + 8*i));
9c4a7965
KP
558}
559
560/*
561 * recover from error interrupts
562 */
5e718a09 563static void talitos_error(struct device *dev, u32 isr, u32 isr_lo)
9c4a7965 564{
9c4a7965
KP
565 struct talitos_private *priv = dev_get_drvdata(dev);
566 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987 567 int ch, error, reset_dev = 0;
42e8b0d7 568 u32 v_lo;
dd3c0987
LC
569 bool is_sec1 = has_ftr_sec1(priv);
570 int reset_ch = is_sec1 ? 1 : 0; /* only SEC2 supports continuation */
9c4a7965
KP
571
572 for (ch = 0; ch < priv->num_channels; ch++) {
573 /* skip channels without errors */
dd3c0987
LC
574 if (is_sec1) {
575 /* bits 29, 31, 17, 19 */
576 if (!(isr & (1 << (29 + (ch & 1) * 2 - (ch & 2) * 6))))
577 continue;
578 } else {
579 if (!(isr & (1 << (ch * 2 + 1))))
580 continue;
581 }
9c4a7965
KP
582
583 error = -EINVAL;
584
ad42d5fc 585 v_lo = in_be32(priv->chan[ch].reg + TALITOS_CCPSR_LO);
9c4a7965
KP
586
587 if (v_lo & TALITOS_CCPSR_LO_DOF) {
588 dev_err(dev, "double fetch fifo overflow error\n");
589 error = -EAGAIN;
590 reset_ch = 1;
591 }
592 if (v_lo & TALITOS_CCPSR_LO_SOF) {
593 /* h/w dropped descriptor */
594 dev_err(dev, "single fetch fifo overflow error\n");
595 error = -EAGAIN;
596 }
597 if (v_lo & TALITOS_CCPSR_LO_MDTE)
598 dev_err(dev, "master data transfer error\n");
599 if (v_lo & TALITOS_CCPSR_LO_SGDLZ)
4d9b3a5b 600 dev_err(dev, is_sec1 ? "pointer not complete error\n"
dd3c0987 601 : "s/g data length zero error\n");
9c4a7965 602 if (v_lo & TALITOS_CCPSR_LO_FPZ)
dd3c0987
LC
603 dev_err(dev, is_sec1 ? "parity error\n"
604 : "fetch pointer zero error\n");
9c4a7965
KP
605 if (v_lo & TALITOS_CCPSR_LO_IDH)
606 dev_err(dev, "illegal descriptor header error\n");
607 if (v_lo & TALITOS_CCPSR_LO_IEU)
dd3c0987
LC
608 dev_err(dev, is_sec1 ? "static assignment error\n"
609 : "invalid exec unit error\n");
9c4a7965 610 if (v_lo & TALITOS_CCPSR_LO_EU)
3e721aeb 611 report_eu_error(dev, ch, current_desc_hdr(dev, ch));
dd3c0987
LC
612 if (!is_sec1) {
613 if (v_lo & TALITOS_CCPSR_LO_GB)
614 dev_err(dev, "gather boundary error\n");
615 if (v_lo & TALITOS_CCPSR_LO_GRL)
616 dev_err(dev, "gather return/length error\n");
617 if (v_lo & TALITOS_CCPSR_LO_SB)
618 dev_err(dev, "scatter boundary error\n");
619 if (v_lo & TALITOS_CCPSR_LO_SRL)
620 dev_err(dev, "scatter return/length error\n");
621 }
9c4a7965
KP
622
623 flush_channel(dev, ch, error, reset_ch);
624
625 if (reset_ch) {
626 reset_channel(dev, ch);
627 } else {
ad42d5fc 628 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
dd3c0987 629 TALITOS2_CCCR_CONT);
ad42d5fc
KP
630 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, 0);
631 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
dd3c0987 632 TALITOS2_CCCR_CONT) && --timeout)
9c4a7965
KP
633 cpu_relax();
634 if (timeout == 0) {
635 dev_err(dev, "failed to restart channel %d\n",
636 ch);
637 reset_dev = 1;
638 }
639 }
640 }
dd3c0987
LC
641 if (reset_dev || (is_sec1 && isr & ~TALITOS1_ISR_4CHERR) ||
642 (!is_sec1 && isr & ~TALITOS2_ISR_4CHERR) || isr_lo) {
643 if (is_sec1 && (isr_lo & TALITOS1_ISR_TEA_ERR))
644 dev_err(dev, "TEA error: ISR 0x%08x_%08x\n",
645 isr, isr_lo);
646 else
647 dev_err(dev, "done overflow, internal time out, or "
648 "rngu error: ISR 0x%08x_%08x\n", isr, isr_lo);
9c4a7965
KP
649
650 /* purge request queues */
651 for (ch = 0; ch < priv->num_channels; ch++)
652 flush_channel(dev, ch, -EIO, 1);
653
654 /* reset and reinitialize the device */
655 init_device(dev);
656 }
657}
658
dd3c0987
LC
659#define DEF_TALITOS1_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
660static irqreturn_t talitos1_interrupt_##name(int irq, void *data) \
661{ \
662 struct device *dev = data; \
663 struct talitos_private *priv = dev_get_drvdata(dev); \
664 u32 isr, isr_lo; \
665 unsigned long flags; \
666 \
667 spin_lock_irqsave(&priv->reg_lock, flags); \
668 isr = in_be32(priv->reg + TALITOS_ISR); \
669 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
670 /* Acknowledge interrupt */ \
671 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
672 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
673 \
674 if (unlikely(isr & ch_err_mask || isr_lo & TALITOS1_IMR_LO_INIT)) { \
675 spin_unlock_irqrestore(&priv->reg_lock, flags); \
676 talitos_error(dev, isr & ch_err_mask, isr_lo); \
677 } \
678 else { \
679 if (likely(isr & ch_done_mask)) { \
680 /* mask further done interrupts. */ \
681 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
682 /* done_task will unmask done interrupts at exit */ \
683 tasklet_schedule(&priv->done_task[tlet]); \
684 } \
685 spin_unlock_irqrestore(&priv->reg_lock, flags); \
686 } \
687 \
688 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
689 IRQ_NONE; \
690}
691
692DEF_TALITOS1_INTERRUPT(4ch, TALITOS1_ISR_4CHDONE, TALITOS1_ISR_4CHERR, 0)
693
694#define DEF_TALITOS2_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
695static irqreturn_t talitos2_interrupt_##name(int irq, void *data) \
c3e337f8
KP
696{ \
697 struct device *dev = data; \
698 struct talitos_private *priv = dev_get_drvdata(dev); \
699 u32 isr, isr_lo; \
511d63cb 700 unsigned long flags; \
c3e337f8 701 \
511d63cb 702 spin_lock_irqsave(&priv->reg_lock, flags); \
c3e337f8
KP
703 isr = in_be32(priv->reg + TALITOS_ISR); \
704 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
705 /* Acknowledge interrupt */ \
706 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
707 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
708 \
511d63cb
HG
709 if (unlikely(isr & ch_err_mask || isr_lo)) { \
710 spin_unlock_irqrestore(&priv->reg_lock, flags); \
711 talitos_error(dev, isr & ch_err_mask, isr_lo); \
712 } \
713 else { \
c3e337f8
KP
714 if (likely(isr & ch_done_mask)) { \
715 /* mask further done interrupts. */ \
716 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
717 /* done_task will unmask done interrupts at exit */ \
718 tasklet_schedule(&priv->done_task[tlet]); \
719 } \
511d63cb
HG
720 spin_unlock_irqrestore(&priv->reg_lock, flags); \
721 } \
c3e337f8
KP
722 \
723 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
724 IRQ_NONE; \
9c4a7965 725}
dd3c0987
LC
726
727DEF_TALITOS2_INTERRUPT(4ch, TALITOS2_ISR_4CHDONE, TALITOS2_ISR_4CHERR, 0)
728DEF_TALITOS2_INTERRUPT(ch0_2, TALITOS2_ISR_CH_0_2_DONE, TALITOS2_ISR_CH_0_2_ERR,
729 0)
730DEF_TALITOS2_INTERRUPT(ch1_3, TALITOS2_ISR_CH_1_3_DONE, TALITOS2_ISR_CH_1_3_ERR,
731 1)
9c4a7965
KP
732
733/*
734 * hwrng
735 */
736static int talitos_rng_data_present(struct hwrng *rng, int wait)
737{
738 struct device *dev = (struct device *)rng->priv;
739 struct talitos_private *priv = dev_get_drvdata(dev);
740 u32 ofl;
741 int i;
742
743 for (i = 0; i < 20; i++) {
5fa7fa14 744 ofl = in_be32(priv->reg_rngu + TALITOS_EUSR_LO) &
9c4a7965
KP
745 TALITOS_RNGUSR_LO_OFL;
746 if (ofl || !wait)
747 break;
748 udelay(10);
749 }
750
751 return !!ofl;
752}
753
754static int talitos_rng_data_read(struct hwrng *rng, u32 *data)
755{
756 struct device *dev = (struct device *)rng->priv;
757 struct talitos_private *priv = dev_get_drvdata(dev);
758
759 /* rng fifo requires 64-bit accesses */
5fa7fa14
LC
760 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO);
761 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO_LO);
9c4a7965
KP
762
763 return sizeof(u32);
764}
765
766static int talitos_rng_init(struct hwrng *rng)
767{
768 struct device *dev = (struct device *)rng->priv;
769 struct talitos_private *priv = dev_get_drvdata(dev);
770 unsigned int timeout = TALITOS_TIMEOUT;
771
5fa7fa14
LC
772 setbits32(priv->reg_rngu + TALITOS_EURCR_LO, TALITOS_RNGURCR_LO_SR);
773 while (!(in_be32(priv->reg_rngu + TALITOS_EUSR_LO)
774 & TALITOS_RNGUSR_LO_RD)
9c4a7965
KP
775 && --timeout)
776 cpu_relax();
777 if (timeout == 0) {
778 dev_err(dev, "failed to reset rng hw\n");
779 return -ENODEV;
780 }
781
782 /* start generating */
5fa7fa14 783 setbits32(priv->reg_rngu + TALITOS_EUDSR_LO, 0);
9c4a7965
KP
784
785 return 0;
786}
787
788static int talitos_register_rng(struct device *dev)
789{
790 struct talitos_private *priv = dev_get_drvdata(dev);
35a3bb3d 791 int err;
9c4a7965
KP
792
793 priv->rng.name = dev_driver_string(dev),
794 priv->rng.init = talitos_rng_init,
795 priv->rng.data_present = talitos_rng_data_present,
796 priv->rng.data_read = talitos_rng_data_read,
797 priv->rng.priv = (unsigned long)dev;
798
35a3bb3d
AS
799 err = hwrng_register(&priv->rng);
800 if (!err)
801 priv->rng_registered = true;
802
803 return err;
9c4a7965
KP
804}
805
806static void talitos_unregister_rng(struct device *dev)
807{
808 struct talitos_private *priv = dev_get_drvdata(dev);
809
35a3bb3d
AS
810 if (!priv->rng_registered)
811 return;
812
9c4a7965 813 hwrng_unregister(&priv->rng);
35a3bb3d 814 priv->rng_registered = false;
9c4a7965
KP
815}
816
817/*
818 * crypto alg
819 */
820#define TALITOS_CRA_PRIORITY 3000
7405c8d7
LC
821/*
822 * Defines a priority for doing AEAD with descriptors type
823 * HMAC_SNOOP_NO_AFEA (HSNA) instead of type IPSEC_ESP
824 */
825#define TALITOS_CRA_PRIORITY_AEAD_HSNA (TALITOS_CRA_PRIORITY - 1)
192125ed 826#ifdef CONFIG_CRYPTO_DEV_TALITOS2
03d2c511 827#define TALITOS_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + SHA512_BLOCK_SIZE)
b8fbdc2b
CL
828#else
829#define TALITOS_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + SHA256_BLOCK_SIZE)
830#endif
3952f17e 831#define TALITOS_MAX_IV_LENGTH 16 /* max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */
70bcaca7 832
9c4a7965
KP
833struct talitos_ctx {
834 struct device *dev;
5228f0f7 835 int ch;
9c4a7965
KP
836 __be32 desc_hdr_template;
837 u8 key[TALITOS_MAX_KEY_SIZE];
70bcaca7 838 u8 iv[TALITOS_MAX_IV_LENGTH];
2e13ce08 839 dma_addr_t dma_key;
9c4a7965
KP
840 unsigned int keylen;
841 unsigned int enckeylen;
842 unsigned int authkeylen;
9c4a7965
KP
843};
844
497f2e6b
LN
845#define HASH_MAX_BLOCK_SIZE SHA512_BLOCK_SIZE
846#define TALITOS_MDEU_MAX_CONTEXT_SIZE TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512
847
848struct talitos_ahash_req_ctx {
60f208d7 849 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
497f2e6b 850 unsigned int hw_context_size;
3c0dd190
LC
851 u8 buf[2][HASH_MAX_BLOCK_SIZE];
852 int buf_idx;
60f208d7 853 unsigned int swinit;
497f2e6b
LN
854 unsigned int first;
855 unsigned int last;
856 unsigned int to_hash_later;
42e8b0d7 857 unsigned int nbuf;
497f2e6b
LN
858 struct scatterlist bufsl[2];
859 struct scatterlist *psrc;
860};
861
3639ca84
HG
862struct talitos_export_state {
863 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
864 u8 buf[HASH_MAX_BLOCK_SIZE];
865 unsigned int swinit;
866 unsigned int first;
867 unsigned int last;
868 unsigned int to_hash_later;
869 unsigned int nbuf;
870};
871
56af8cd4
LN
872static int aead_setkey(struct crypto_aead *authenc,
873 const u8 *key, unsigned int keylen)
9c4a7965
KP
874{
875 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
2e13ce08 876 struct device *dev = ctx->dev;
c306a98d 877 struct crypto_authenc_keys keys;
9c4a7965 878
c306a98d 879 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
9c4a7965
KP
880 goto badkey;
881
c306a98d 882 if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
9c4a7965
KP
883 goto badkey;
884
2e13ce08
LC
885 if (ctx->keylen)
886 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
887
c306a98d
MK
888 memcpy(ctx->key, keys.authkey, keys.authkeylen);
889 memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
9c4a7965 890
c306a98d
MK
891 ctx->keylen = keys.authkeylen + keys.enckeylen;
892 ctx->enckeylen = keys.enckeylen;
893 ctx->authkeylen = keys.authkeylen;
2e13ce08
LC
894 ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
895 DMA_TO_DEVICE);
9c4a7965 896
8f0691fc 897 memzero_explicit(&keys, sizeof(keys));
9c4a7965
KP
898 return 0;
899
900badkey:
901 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
8f0691fc 902 memzero_explicit(&keys, sizeof(keys));
9c4a7965
KP
903 return -EINVAL;
904}
905
ef7c5c85
HX
906static int aead_des3_setkey(struct crypto_aead *authenc,
907 const u8 *key, unsigned int keylen)
908{
909 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
910 struct device *dev = ctx->dev;
911 struct crypto_authenc_keys keys;
912 u32 flags;
913 int err;
914
915 err = crypto_authenc_extractkeys(&keys, key, keylen);
916 if (unlikely(err))
917 goto badkey;
918
919 err = -EINVAL;
920 if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
921 goto badkey;
922
923 if (keys.enckeylen != DES3_EDE_KEY_SIZE)
924 goto badkey;
925
926 flags = crypto_aead_get_flags(authenc);
927 err = __des3_verify_key(&flags, keys.enckey);
928 if (unlikely(err)) {
929 crypto_aead_set_flags(authenc, flags);
930 goto out;
931 }
932
933 if (ctx->keylen)
934 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
935
936 memcpy(ctx->key, keys.authkey, keys.authkeylen);
937 memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
938
939 ctx->keylen = keys.authkeylen + keys.enckeylen;
940 ctx->enckeylen = keys.enckeylen;
941 ctx->authkeylen = keys.authkeylen;
942 ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
943 DMA_TO_DEVICE);
944
945out:
946 memzero_explicit(&keys, sizeof(keys));
947 return err;
948
949badkey:
950 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
951 goto out;
952}
953
9c4a7965 954/*
56af8cd4 955 * talitos_edesc - s/w-extended descriptor
9c4a7965
KP
956 * @src_nents: number of segments in input scatterlist
957 * @dst_nents: number of segments in output scatterlist
aeb4c132 958 * @icv_ool: whether ICV is out-of-line
79fd31d3 959 * @iv_dma: dma address of iv for checking continuity and link table
9c4a7965 960 * @dma_len: length of dma mapped link_tbl space
6f65f6ac 961 * @dma_link_tbl: bus physical address of link_tbl/buf
9c4a7965 962 * @desc: h/w descriptor
6f65f6ac
LC
963 * @link_tbl: input and output h/w link tables (if {src,dst}_nents > 1) (SEC2)
964 * @buf: input and output buffeur (if {src,dst}_nents > 1) (SEC1)
9c4a7965
KP
965 *
966 * if decrypting (with authcheck), or either one of src_nents or dst_nents
967 * is greater than 1, an integrity check value is concatenated to the end
968 * of link_tbl data
969 */
56af8cd4 970struct talitos_edesc {
9c4a7965
KP
971 int src_nents;
972 int dst_nents;
aeb4c132 973 bool icv_ool;
79fd31d3 974 dma_addr_t iv_dma;
9c4a7965
KP
975 int dma_len;
976 dma_addr_t dma_link_tbl;
977 struct talitos_desc desc;
6f65f6ac
LC
978 union {
979 struct talitos_ptr link_tbl[0];
980 u8 buf[0];
981 };
9c4a7965
KP
982};
983
4de9d0b5
LN
984static void talitos_sg_unmap(struct device *dev,
985 struct talitos_edesc *edesc,
986 struct scatterlist *src,
6a1e8d14
LC
987 struct scatterlist *dst,
988 unsigned int len, unsigned int offset)
4de9d0b5 989{
6a1e8d14
LC
990 struct talitos_private *priv = dev_get_drvdata(dev);
991 bool is_sec1 = has_ftr_sec1(priv);
4de9d0b5
LN
992 unsigned int src_nents = edesc->src_nents ? : 1;
993 unsigned int dst_nents = edesc->dst_nents ? : 1;
994
6a1e8d14
LC
995 if (is_sec1 && dst && dst_nents > 1) {
996 dma_sync_single_for_device(dev, edesc->dma_link_tbl + offset,
997 len, DMA_FROM_DEVICE);
998 sg_pcopy_from_buffer(dst, dst_nents, edesc->buf + offset, len,
999 offset);
1000 }
4de9d0b5 1001 if (src != dst) {
6a1e8d14
LC
1002 if (src_nents == 1 || !is_sec1)
1003 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
4de9d0b5 1004
6a1e8d14 1005 if (dst && (dst_nents == 1 || !is_sec1))
b8a011d4 1006 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
6a1e8d14 1007 } else if (src_nents == 1 || !is_sec1) {
b8a011d4 1008 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
246a87cd
LC
1009 }
1010}
1011
9c4a7965 1012static void ipsec_esp_unmap(struct device *dev,
56af8cd4 1013 struct talitos_edesc *edesc,
7ede4c36 1014 struct aead_request *areq, bool encrypt)
9c4a7965 1015{
549bd8bc
LC
1016 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
1017 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
1018 unsigned int ivsize = crypto_aead_ivsize(aead);
7ede4c36
CL
1019 unsigned int authsize = crypto_aead_authsize(aead);
1020 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
9a655608
LC
1021 bool is_ipsec_esp = edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP;
1022 struct talitos_ptr *civ_ptr = &edesc->desc.ptr[is_ipsec_esp ? 2 : 3];
549bd8bc 1023
9a655608 1024 if (is_ipsec_esp)
549bd8bc
LC
1025 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[6],
1026 DMA_FROM_DEVICE);
9a655608 1027 unmap_single_talitos_ptr(dev, civ_ptr, DMA_TO_DEVICE);
9c4a7965 1028
e345177d
CL
1029 talitos_sg_unmap(dev, edesc, areq->src, areq->dst,
1030 cryptlen + authsize, areq->assoclen);
9c4a7965
KP
1031
1032 if (edesc->dma_len)
1033 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1034 DMA_BIDIRECTIONAL);
549bd8bc 1035
9a655608 1036 if (!is_ipsec_esp) {
549bd8bc
LC
1037 unsigned int dst_nents = edesc->dst_nents ? : 1;
1038
1039 sg_pcopy_to_buffer(areq->dst, dst_nents, ctx->iv, ivsize,
7ede4c36 1040 areq->assoclen + cryptlen - ivsize);
549bd8bc 1041 }
9c4a7965
KP
1042}
1043
1044/*
1045 * ipsec_esp descriptor callbacks
1046 */
1047static void ipsec_esp_encrypt_done(struct device *dev,
1048 struct talitos_desc *desc, void *context,
1049 int err)
1050{
1051 struct aead_request *areq = context;
9c4a7965 1052 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
2e13ce08 1053 unsigned int ivsize = crypto_aead_ivsize(authenc);
19bbbc63 1054 struct talitos_edesc *edesc;
9c4a7965 1055
19bbbc63
KP
1056 edesc = container_of(desc, struct talitos_edesc, desc);
1057
7ede4c36 1058 ipsec_esp_unmap(dev, edesc, areq, true);
9c4a7965 1059
2e13ce08
LC
1060 dma_unmap_single(dev, edesc->iv_dma, ivsize, DMA_TO_DEVICE);
1061
9c4a7965
KP
1062 kfree(edesc);
1063
1064 aead_request_complete(areq, err);
1065}
1066
fe5720e2 1067static void ipsec_esp_decrypt_swauth_done(struct device *dev,
e938e465
KP
1068 struct talitos_desc *desc,
1069 void *context, int err)
9c4a7965
KP
1070{
1071 struct aead_request *req = context;
9c4a7965 1072 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
aeb4c132 1073 unsigned int authsize = crypto_aead_authsize(authenc);
19bbbc63 1074 struct talitos_edesc *edesc;
aeb4c132 1075 char *oicv, *icv;
9c4a7965 1076
19bbbc63
KP
1077 edesc = container_of(desc, struct talitos_edesc, desc);
1078
7ede4c36 1079 ipsec_esp_unmap(dev, edesc, req, false);
9c4a7965
KP
1080
1081 if (!err) {
1082 /* auth check */
e345177d
CL
1083 oicv = edesc->buf + edesc->dma_len;
1084 icv = oicv - authsize;
aeb4c132 1085
79960943 1086 err = crypto_memneq(oicv, icv, authsize) ? -EBADMSG : 0;
9c4a7965
KP
1087 }
1088
1089 kfree(edesc);
1090
1091 aead_request_complete(req, err);
1092}
1093
fe5720e2 1094static void ipsec_esp_decrypt_hwauth_done(struct device *dev,
e938e465
KP
1095 struct talitos_desc *desc,
1096 void *context, int err)
fe5720e2
KP
1097{
1098 struct aead_request *req = context;
19bbbc63
KP
1099 struct talitos_edesc *edesc;
1100
1101 edesc = container_of(desc, struct talitos_edesc, desc);
fe5720e2 1102
7ede4c36 1103 ipsec_esp_unmap(dev, edesc, req, false);
fe5720e2
KP
1104
1105 /* check ICV auth status */
e938e465
KP
1106 if (!err && ((desc->hdr_lo & DESC_HDR_LO_ICCR1_MASK) !=
1107 DESC_HDR_LO_ICCR1_PASS))
1108 err = -EBADMSG;
fe5720e2
KP
1109
1110 kfree(edesc);
1111
1112 aead_request_complete(req, err);
1113}
1114
9c4a7965
KP
1115/*
1116 * convert scatterlist to SEC h/w link table format
1117 * stop at cryptlen bytes
1118 */
aeb4c132 1119static int sg_to_link_tbl_offset(struct scatterlist *sg, int sg_count,
e345177d 1120 unsigned int offset, int datalen, int elen,
aeb4c132 1121 struct talitos_ptr *link_tbl_ptr)
9c4a7965 1122{
e345177d 1123 int n_sg = elen ? sg_count + 1 : sg_count;
aeb4c132 1124 int count = 0;
e345177d 1125 int cryptlen = datalen + elen;
70bcaca7 1126
aeb4c132
HX
1127 while (cryptlen && sg && n_sg--) {
1128 unsigned int len = sg_dma_len(sg);
9c4a7965 1129
aeb4c132
HX
1130 if (offset >= len) {
1131 offset -= len;
1132 goto next;
1133 }
1134
1135 len -= offset;
1136
1137 if (len > cryptlen)
1138 len = cryptlen;
1139
e345177d
CL
1140 if (datalen > 0 && len > datalen) {
1141 to_talitos_ptr(link_tbl_ptr + count,
1142 sg_dma_address(sg) + offset, datalen, 0);
1143 to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
1144 count++;
1145 len -= datalen;
1146 offset += datalen;
1147 }
aeb4c132 1148 to_talitos_ptr(link_tbl_ptr + count,
da9de146 1149 sg_dma_address(sg) + offset, len, 0);
b096b544 1150 to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
aeb4c132
HX
1151 count++;
1152 cryptlen -= len;
e345177d 1153 datalen -= len;
aeb4c132
HX
1154 offset = 0;
1155
1156next:
1157 sg = sg_next(sg);
70bcaca7 1158 }
9c4a7965
KP
1159
1160 /* tag end of link table */
aeb4c132 1161 if (count > 0)
b096b544 1162 to_talitos_ptr_ext_set(link_tbl_ptr + count - 1,
e345177d 1163 DESC_PTR_LNKTBL_RET, 0);
70bcaca7 1164
aeb4c132
HX
1165 return count;
1166}
1167
2b122730
LC
1168static int talitos_sg_map_ext(struct device *dev, struct scatterlist *src,
1169 unsigned int len, struct talitos_edesc *edesc,
1170 struct talitos_ptr *ptr, int sg_count,
e345177d
CL
1171 unsigned int offset, int tbl_off, int elen,
1172 bool force)
246a87cd 1173{
246a87cd
LC
1174 struct talitos_private *priv = dev_get_drvdata(dev);
1175 bool is_sec1 = has_ftr_sec1(priv);
1176
87a81dce
LC
1177 if (!src) {
1178 to_talitos_ptr(ptr, 0, 0, is_sec1);
1179 return 1;
1180 }
2b122730 1181 to_talitos_ptr_ext_set(ptr, elen, is_sec1);
e345177d 1182 if (sg_count == 1 && !force) {
da9de146 1183 to_talitos_ptr(ptr, sg_dma_address(src) + offset, len, is_sec1);
6a1e8d14 1184 return sg_count;
246a87cd 1185 }
246a87cd 1186 if (is_sec1) {
da9de146 1187 to_talitos_ptr(ptr, edesc->dma_link_tbl + offset, len, is_sec1);
6a1e8d14 1188 return sg_count;
246a87cd 1189 }
e345177d 1190 sg_count = sg_to_link_tbl_offset(src, sg_count, offset, len, elen,
6a1e8d14 1191 &edesc->link_tbl[tbl_off]);
e345177d 1192 if (sg_count == 1 && !force) {
6a1e8d14
LC
1193 /* Only one segment now, so no link tbl needed*/
1194 copy_talitos_ptr(ptr, &edesc->link_tbl[tbl_off], is_sec1);
1195 return sg_count;
1196 }
1197 to_talitos_ptr(ptr, edesc->dma_link_tbl +
da9de146 1198 tbl_off * sizeof(struct talitos_ptr), len, is_sec1);
6a1e8d14
LC
1199 to_talitos_ptr_ext_or(ptr, DESC_PTR_LNKTBL_JUMP, is_sec1);
1200
1201 return sg_count;
246a87cd
LC
1202}
1203
2b122730
LC
1204static int talitos_sg_map(struct device *dev, struct scatterlist *src,
1205 unsigned int len, struct talitos_edesc *edesc,
1206 struct talitos_ptr *ptr, int sg_count,
1207 unsigned int offset, int tbl_off)
1208{
1209 return talitos_sg_map_ext(dev, src, len, edesc, ptr, sg_count, offset,
e345177d 1210 tbl_off, 0, false);
2b122730
LC
1211}
1212
9c4a7965
KP
1213/*
1214 * fill in and submit ipsec_esp descriptor
1215 */
56af8cd4 1216static int ipsec_esp(struct talitos_edesc *edesc, struct aead_request *areq,
7ede4c36 1217 bool encrypt,
aeb4c132
HX
1218 void (*callback)(struct device *dev,
1219 struct talitos_desc *desc,
1220 void *context, int error))
9c4a7965
KP
1221{
1222 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
aeb4c132 1223 unsigned int authsize = crypto_aead_authsize(aead);
9c4a7965
KP
1224 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
1225 struct device *dev = ctx->dev;
1226 struct talitos_desc *desc = &edesc->desc;
7ede4c36 1227 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
e41256f1 1228 unsigned int ivsize = crypto_aead_ivsize(aead);
aeb4c132 1229 int tbl_off = 0;
fa86a267 1230 int sg_count, ret;
2b122730 1231 int elen = 0;
549bd8bc
LC
1232 bool sync_needed = false;
1233 struct talitos_private *priv = dev_get_drvdata(dev);
1234 bool is_sec1 = has_ftr_sec1(priv);
9a655608
LC
1235 bool is_ipsec_esp = desc->hdr & DESC_HDR_TYPE_IPSEC_ESP;
1236 struct talitos_ptr *civ_ptr = &desc->ptr[is_ipsec_esp ? 2 : 3];
1237 struct talitos_ptr *ckey_ptr = &desc->ptr[is_ipsec_esp ? 3 : 2];
e345177d 1238 dma_addr_t dma_icv = edesc->dma_link_tbl + edesc->dma_len - authsize;
9c4a7965
KP
1239
1240 /* hmac key */
2e13ce08 1241 to_talitos_ptr(&desc->ptr[0], ctx->dma_key, ctx->authkeylen, is_sec1);
79fd31d3 1242
549bd8bc
LC
1243 sg_count = edesc->src_nents ?: 1;
1244 if (is_sec1 && sg_count > 1)
1245 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1246 areq->assoclen + cryptlen);
1247 else
1248 sg_count = dma_map_sg(dev, areq->src, sg_count,
1249 (areq->src == areq->dst) ?
1250 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
79fd31d3 1251
549bd8bc
LC
1252 /* hmac data */
1253 ret = talitos_sg_map(dev, areq->src, areq->assoclen, edesc,
1254 &desc->ptr[1], sg_count, 0, tbl_off);
340ff60a 1255
549bd8bc 1256 if (ret > 1) {
340ff60a 1257 tbl_off += ret;
549bd8bc 1258 sync_needed = true;
79fd31d3
HG
1259 }
1260
9c4a7965 1261 /* cipher iv */
9a655608 1262 to_talitos_ptr(civ_ptr, edesc->iv_dma, ivsize, is_sec1);
9c4a7965
KP
1263
1264 /* cipher key */
2e13ce08
LC
1265 to_talitos_ptr(ckey_ptr, ctx->dma_key + ctx->authkeylen,
1266 ctx->enckeylen, is_sec1);
9c4a7965
KP
1267
1268 /*
1269 * cipher in
1270 * map and adjust cipher len to aead request cryptlen.
1271 * extent is bytes of HMAC postpended to ciphertext,
1272 * typically 12 for ipsec
1273 */
2b122730
LC
1274 if (is_ipsec_esp && (desc->hdr & DESC_HDR_MODE1_MDEU_CICV))
1275 elen = authsize;
9c4a7965 1276
2b122730 1277 ret = talitos_sg_map_ext(dev, areq->src, cryptlen, edesc, &desc->ptr[4],
e345177d
CL
1278 sg_count, areq->assoclen, tbl_off, elen,
1279 false);
549bd8bc 1280
ec8c7d14
LC
1281 if (ret > 1) {
1282 tbl_off += ret;
549bd8bc
LC
1283 sync_needed = true;
1284 }
9c4a7965 1285
549bd8bc
LC
1286 /* cipher out */
1287 if (areq->src != areq->dst) {
1288 sg_count = edesc->dst_nents ? : 1;
1289 if (!is_sec1 || sg_count == 1)
1290 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1291 }
9c4a7965 1292
e345177d
CL
1293 if (is_ipsec_esp && encrypt)
1294 elen = authsize;
1295 else
1296 elen = 0;
1297 ret = talitos_sg_map_ext(dev, areq->dst, cryptlen, edesc, &desc->ptr[5],
1298 sg_count, areq->assoclen, tbl_off, elen,
1299 is_ipsec_esp && !encrypt);
1300 tbl_off += ret;
aeb4c132 1301
e04a61be 1302 /* ICV data */
e345177d 1303 edesc->icv_ool = !encrypt;
549bd8bc 1304
e345177d
CL
1305 if (!encrypt && is_ipsec_esp) {
1306 struct talitos_ptr *tbl_ptr = &edesc->link_tbl[tbl_off];
549bd8bc 1307
e345177d
CL
1308 /* Add an entry to the link table for ICV data */
1309 to_talitos_ptr_ext_set(tbl_ptr - 1, 0, is_sec1);
1310 to_talitos_ptr_ext_set(tbl_ptr, DESC_PTR_LNKTBL_RET, is_sec1);
e04a61be 1311
e345177d
CL
1312 /* icv data follows link tables */
1313 to_talitos_ptr(tbl_ptr, dma_icv, authsize, is_sec1);
1314 to_talitos_ptr_ext_or(&desc->ptr[5], authsize, is_sec1);
1315 sync_needed = true;
1316 } else if (!encrypt) {
1317 to_talitos_ptr(&desc->ptr[6], dma_icv, authsize, is_sec1);
1318 sync_needed = true;
9a655608 1319 } else if (!is_ipsec_esp) {
e345177d
CL
1320 talitos_sg_map(dev, areq->dst, authsize, edesc, &desc->ptr[6],
1321 sg_count, areq->assoclen + cryptlen, tbl_off);
549bd8bc
LC
1322 }
1323
9c4a7965 1324 /* iv out */
9a655608 1325 if (is_ipsec_esp)
549bd8bc
LC
1326 map_single_talitos_ptr(dev, &desc->ptr[6], ivsize, ctx->iv,
1327 DMA_FROM_DEVICE);
1328
1329 if (sync_needed)
1330 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1331 edesc->dma_len,
1332 DMA_BIDIRECTIONAL);
9c4a7965 1333
5228f0f7 1334 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
fa86a267 1335 if (ret != -EINPROGRESS) {
7ede4c36 1336 ipsec_esp_unmap(dev, edesc, areq, encrypt);
fa86a267
KP
1337 kfree(edesc);
1338 }
1339 return ret;
9c4a7965
KP
1340}
1341
9c4a7965 1342/*
56af8cd4 1343 * allocate and map the extended descriptor
9c4a7965 1344 */
4de9d0b5
LN
1345static struct talitos_edesc *talitos_edesc_alloc(struct device *dev,
1346 struct scatterlist *src,
1347 struct scatterlist *dst,
79fd31d3
HG
1348 u8 *iv,
1349 unsigned int assoclen,
4de9d0b5
LN
1350 unsigned int cryptlen,
1351 unsigned int authsize,
79fd31d3 1352 unsigned int ivsize,
4de9d0b5 1353 int icv_stashing,
62293a37
HG
1354 u32 cryptoflags,
1355 bool encrypt)
9c4a7965 1356{
56af8cd4 1357 struct talitos_edesc *edesc;
6a1e8d14 1358 int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len;
79fd31d3 1359 dma_addr_t iv_dma = 0;
4de9d0b5 1360 gfp_t flags = cryptoflags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
586725f8 1361 GFP_ATOMIC;
6f65f6ac
LC
1362 struct talitos_private *priv = dev_get_drvdata(dev);
1363 bool is_sec1 = has_ftr_sec1(priv);
1364 int max_len = is_sec1 ? TALITOS1_MAX_DATA_LEN : TALITOS2_MAX_DATA_LEN;
9c4a7965 1365
6f65f6ac 1366 if (cryptlen + authsize > max_len) {
4de9d0b5 1367 dev_err(dev, "length exceeds h/w max limit\n");
9c4a7965
KP
1368 return ERR_PTR(-EINVAL);
1369 }
1370
62293a37 1371 if (!dst || dst == src) {
6a1e8d14
LC
1372 src_len = assoclen + cryptlen + authsize;
1373 src_nents = sg_nents_for_len(src, src_len);
8e409fe1
LC
1374 if (src_nents < 0) {
1375 dev_err(dev, "Invalid number of src SG.\n");
c56c2e17 1376 return ERR_PTR(-EINVAL);
8e409fe1 1377 }
62293a37
HG
1378 src_nents = (src_nents == 1) ? 0 : src_nents;
1379 dst_nents = dst ? src_nents : 0;
6a1e8d14 1380 dst_len = 0;
62293a37 1381 } else { /* dst && dst != src*/
6a1e8d14
LC
1382 src_len = assoclen + cryptlen + (encrypt ? 0 : authsize);
1383 src_nents = sg_nents_for_len(src, src_len);
8e409fe1
LC
1384 if (src_nents < 0) {
1385 dev_err(dev, "Invalid number of src SG.\n");
c56c2e17 1386 return ERR_PTR(-EINVAL);
8e409fe1 1387 }
62293a37 1388 src_nents = (src_nents == 1) ? 0 : src_nents;
6a1e8d14
LC
1389 dst_len = assoclen + cryptlen + (encrypt ? authsize : 0);
1390 dst_nents = sg_nents_for_len(dst, dst_len);
8e409fe1
LC
1391 if (dst_nents < 0) {
1392 dev_err(dev, "Invalid number of dst SG.\n");
c56c2e17 1393 return ERR_PTR(-EINVAL);
8e409fe1 1394 }
62293a37 1395 dst_nents = (dst_nents == 1) ? 0 : dst_nents;
9c4a7965
KP
1396 }
1397
1398 /*
1399 * allocate space for base edesc plus the link tables,
aeb4c132
HX
1400 * allowing for two separate entries for AD and generated ICV (+ 2),
1401 * and space for two sets of ICVs (stashed and generated)
9c4a7965 1402 */
56af8cd4 1403 alloc_len = sizeof(struct talitos_edesc);
e345177d 1404 if (src_nents || dst_nents || !encrypt) {
6f65f6ac 1405 if (is_sec1)
6a1e8d14 1406 dma_len = (src_nents ? src_len : 0) +
e345177d 1407 (dst_nents ? dst_len : 0) + authsize;
6f65f6ac 1408 else
aeb4c132 1409 dma_len = (src_nents + dst_nents + 2) *
e345177d 1410 sizeof(struct talitos_ptr) + authsize;
9c4a7965
KP
1411 alloc_len += dma_len;
1412 } else {
1413 dma_len = 0;
9c4a7965 1414 }
e345177d 1415 alloc_len += icv_stashing ? authsize : 0;
9c4a7965 1416
37b5e889
LC
1417 /* if its a ahash, add space for a second desc next to the first one */
1418 if (is_sec1 && !dst)
1419 alloc_len += sizeof(struct talitos_desc);
1bea445b 1420 alloc_len += ivsize;
37b5e889 1421
586725f8 1422 edesc = kmalloc(alloc_len, GFP_DMA | flags);
c56c2e17
CL
1423 if (!edesc)
1424 return ERR_PTR(-ENOMEM);
1bea445b
CL
1425 if (ivsize) {
1426 iv = memcpy(((u8 *)edesc) + alloc_len - ivsize, iv, ivsize);
c56c2e17 1427 iv_dma = dma_map_single(dev, iv, ivsize, DMA_TO_DEVICE);
1bea445b 1428 }
e4a647c4 1429 memset(&edesc->desc, 0, sizeof(edesc->desc));
9c4a7965
KP
1430
1431 edesc->src_nents = src_nents;
1432 edesc->dst_nents = dst_nents;
79fd31d3 1433 edesc->iv_dma = iv_dma;
9c4a7965 1434 edesc->dma_len = dma_len;
37b5e889
LC
1435 if (dma_len) {
1436 void *addr = &edesc->link_tbl[0];
1437
1438 if (is_sec1 && !dst)
1439 addr += sizeof(struct talitos_desc);
1440 edesc->dma_link_tbl = dma_map_single(dev, addr,
497f2e6b
LN
1441 edesc->dma_len,
1442 DMA_BIDIRECTIONAL);
37b5e889 1443 }
9c4a7965
KP
1444 return edesc;
1445}
1446
79fd31d3 1447static struct talitos_edesc *aead_edesc_alloc(struct aead_request *areq, u8 *iv,
62293a37 1448 int icv_stashing, bool encrypt)
4de9d0b5
LN
1449{
1450 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
aeb4c132 1451 unsigned int authsize = crypto_aead_authsize(authenc);
4de9d0b5 1452 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
79fd31d3 1453 unsigned int ivsize = crypto_aead_ivsize(authenc);
7ede4c36 1454 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
4de9d0b5 1455
aeb4c132 1456 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
7ede4c36 1457 iv, areq->assoclen, cryptlen,
aeb4c132 1458 authsize, ivsize, icv_stashing,
62293a37 1459 areq->base.flags, encrypt);
4de9d0b5
LN
1460}
1461
56af8cd4 1462static int aead_encrypt(struct aead_request *req)
9c4a7965
KP
1463{
1464 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1465 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
56af8cd4 1466 struct talitos_edesc *edesc;
9c4a7965
KP
1467
1468 /* allocate extended descriptor */
62293a37 1469 edesc = aead_edesc_alloc(req, req->iv, 0, true);
9c4a7965
KP
1470 if (IS_ERR(edesc))
1471 return PTR_ERR(edesc);
1472
1473 /* set encrypt */
70bcaca7 1474 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
9c4a7965 1475
7ede4c36 1476 return ipsec_esp(edesc, req, true, ipsec_esp_encrypt_done);
9c4a7965
KP
1477}
1478
56af8cd4 1479static int aead_decrypt(struct aead_request *req)
9c4a7965
KP
1480{
1481 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
aeb4c132 1482 unsigned int authsize = crypto_aead_authsize(authenc);
9c4a7965 1483 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
fe5720e2 1484 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
56af8cd4 1485 struct talitos_edesc *edesc;
9c4a7965
KP
1486 void *icvdata;
1487
9c4a7965 1488 /* allocate extended descriptor */
62293a37 1489 edesc = aead_edesc_alloc(req, req->iv, 1, false);
9c4a7965
KP
1490 if (IS_ERR(edesc))
1491 return PTR_ERR(edesc);
1492
4bbfb839
CL
1493 if ((edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP) &&
1494 (priv->features & TALITOS_FTR_HW_AUTH_CHECK) &&
e938e465
KP
1495 ((!edesc->src_nents && !edesc->dst_nents) ||
1496 priv->features & TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT)) {
9c4a7965 1497
fe5720e2 1498 /* decrypt and check the ICV */
e938e465
KP
1499 edesc->desc.hdr = ctx->desc_hdr_template |
1500 DESC_HDR_DIR_INBOUND |
fe5720e2 1501 DESC_HDR_MODE1_MDEU_CICV;
9c4a7965 1502
fe5720e2 1503 /* reset integrity check result bits */
9c4a7965 1504
7ede4c36
CL
1505 return ipsec_esp(edesc, req, false,
1506 ipsec_esp_decrypt_hwauth_done);
e938e465 1507 }
fe5720e2 1508
e938e465
KP
1509 /* Have to check the ICV with software */
1510 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
fe5720e2 1511
e938e465 1512 /* stash incoming ICV for later cmp with ICV generated by the h/w */
e345177d 1513 icvdata = edesc->buf + edesc->dma_len;
fe5720e2 1514
eae55a58
CL
1515 sg_pcopy_to_buffer(req->src, edesc->src_nents ? : 1, icvdata, authsize,
1516 req->assoclen + req->cryptlen - authsize);
9c4a7965 1517
7ede4c36 1518 return ipsec_esp(edesc, req, false, ipsec_esp_decrypt_swauth_done);
9c4a7965
KP
1519}
1520
4de9d0b5
LN
1521static int ablkcipher_setkey(struct crypto_ablkcipher *cipher,
1522 const u8 *key, unsigned int keylen)
1523{
1524 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
2e13ce08 1525 struct device *dev = ctx->dev;
4de9d0b5 1526
ef7c5c85
HX
1527 if (ctx->keylen)
1528 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
1529
1530 memcpy(&ctx->key, key, keylen);
1531 ctx->keylen = keylen;
1532
1533 ctx->dma_key = dma_map_single(dev, ctx->key, keylen, DMA_TO_DEVICE);
1534
1535 return 0;
1536}
1537
1538static int ablkcipher_des_setkey(struct crypto_ablkcipher *cipher,
1539 const u8 *key, unsigned int keylen)
1540{
1541 u32 tmp[DES_EXPKEY_WORDS];
03d2c511 1542
f384cdc4 1543 if (unlikely(crypto_ablkcipher_get_flags(cipher) &
231baecd 1544 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS) &&
f384cdc4
LC
1545 !des_ekey(tmp, key)) {
1546 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_WEAK_KEY);
1547 return -EINVAL;
1548 }
1549
ef7c5c85
HX
1550 return ablkcipher_setkey(cipher, key, keylen);
1551}
2e13ce08 1552
ef7c5c85
HX
1553static int ablkcipher_des3_setkey(struct crypto_ablkcipher *cipher,
1554 const u8 *key, unsigned int keylen)
1555{
1556 u32 flags;
1557 int err;
4de9d0b5 1558
ef7c5c85
HX
1559 flags = crypto_ablkcipher_get_flags(cipher);
1560 err = __des3_verify_key(&flags, key);
1561 if (unlikely(err)) {
1562 crypto_ablkcipher_set_flags(cipher, flags);
1563 return err;
1564 }
2e13ce08 1565
ef7c5c85 1566 return ablkcipher_setkey(cipher, key, keylen);
4de9d0b5
LN
1567}
1568
1ba34e71
CL
1569static int ablkcipher_aes_setkey(struct crypto_ablkcipher *cipher,
1570 const u8 *key, unsigned int keylen)
1571{
1572 if (keylen == AES_KEYSIZE_128 || keylen == AES_KEYSIZE_192 ||
1573 keylen == AES_KEYSIZE_256)
1574 return ablkcipher_setkey(cipher, key, keylen);
1575
1576 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
1577
1578 return -EINVAL;
1579}
1580
4de9d0b5
LN
1581static void common_nonsnoop_unmap(struct device *dev,
1582 struct talitos_edesc *edesc,
1583 struct ablkcipher_request *areq)
1584{
1585 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
032d197e 1586
6a1e8d14 1587 talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->nbytes, 0);
4de9d0b5
LN
1588 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1], DMA_TO_DEVICE);
1589
4de9d0b5
LN
1590 if (edesc->dma_len)
1591 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1592 DMA_BIDIRECTIONAL);
1593}
1594
1595static void ablkcipher_done(struct device *dev,
1596 struct talitos_desc *desc, void *context,
1597 int err)
1598{
1599 struct ablkcipher_request *areq = context;
3e03e792
CL
1600 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1601 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1602 unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
19bbbc63
KP
1603 struct talitos_edesc *edesc;
1604
1605 edesc = container_of(desc, struct talitos_edesc, desc);
4de9d0b5
LN
1606
1607 common_nonsnoop_unmap(dev, edesc, areq);
3e03e792 1608 memcpy(areq->info, ctx->iv, ivsize);
4de9d0b5
LN
1609
1610 kfree(edesc);
1611
1612 areq->base.complete(&areq->base, err);
1613}
1614
1615static int common_nonsnoop(struct talitos_edesc *edesc,
1616 struct ablkcipher_request *areq,
4de9d0b5
LN
1617 void (*callback) (struct device *dev,
1618 struct talitos_desc *desc,
1619 void *context, int error))
1620{
1621 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1622 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1623 struct device *dev = ctx->dev;
1624 struct talitos_desc *desc = &edesc->desc;
1625 unsigned int cryptlen = areq->nbytes;
79fd31d3 1626 unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
4de9d0b5 1627 int sg_count, ret;
6a1e8d14 1628 bool sync_needed = false;
922f9dc8
LC
1629 struct talitos_private *priv = dev_get_drvdata(dev);
1630 bool is_sec1 = has_ftr_sec1(priv);
4de9d0b5
LN
1631
1632 /* first DWORD empty */
4de9d0b5
LN
1633
1634 /* cipher iv */
da9de146 1635 to_talitos_ptr(&desc->ptr[1], edesc->iv_dma, ivsize, is_sec1);
4de9d0b5
LN
1636
1637 /* cipher key */
2e13ce08 1638 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen, is_sec1);
4de9d0b5 1639
6a1e8d14
LC
1640 sg_count = edesc->src_nents ?: 1;
1641 if (is_sec1 && sg_count > 1)
1642 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1643 cryptlen);
1644 else
1645 sg_count = dma_map_sg(dev, areq->src, sg_count,
1646 (areq->src == areq->dst) ?
1647 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
4de9d0b5
LN
1648 /*
1649 * cipher in
1650 */
6a1e8d14
LC
1651 sg_count = talitos_sg_map(dev, areq->src, cryptlen, edesc,
1652 &desc->ptr[3], sg_count, 0, 0);
1653 if (sg_count > 1)
1654 sync_needed = true;
4de9d0b5
LN
1655
1656 /* cipher out */
6a1e8d14
LC
1657 if (areq->src != areq->dst) {
1658 sg_count = edesc->dst_nents ? : 1;
1659 if (!is_sec1 || sg_count == 1)
1660 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1661 }
1662
1663 ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[4],
1664 sg_count, 0, (edesc->src_nents + 1));
1665 if (ret > 1)
1666 sync_needed = true;
4de9d0b5
LN
1667
1668 /* iv out */
a2b35aa8 1669 map_single_talitos_ptr(dev, &desc->ptr[5], ivsize, ctx->iv,
4de9d0b5
LN
1670 DMA_FROM_DEVICE);
1671
1672 /* last DWORD empty */
4de9d0b5 1673
6a1e8d14
LC
1674 if (sync_needed)
1675 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1676 edesc->dma_len, DMA_BIDIRECTIONAL);
1677
5228f0f7 1678 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
4de9d0b5
LN
1679 if (ret != -EINPROGRESS) {
1680 common_nonsnoop_unmap(dev, edesc, areq);
1681 kfree(edesc);
1682 }
1683 return ret;
1684}
1685
e938e465 1686static struct talitos_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request *
62293a37 1687 areq, bool encrypt)
4de9d0b5
LN
1688{
1689 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1690 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
79fd31d3 1691 unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
4de9d0b5 1692
aeb4c132 1693 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
79fd31d3 1694 areq->info, 0, areq->nbytes, 0, ivsize, 0,
62293a37 1695 areq->base.flags, encrypt);
4de9d0b5
LN
1696}
1697
1698static int ablkcipher_encrypt(struct ablkcipher_request *areq)
1699{
1700 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1701 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1702 struct talitos_edesc *edesc;
ee483d32
CL
1703 unsigned int blocksize =
1704 crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(cipher));
1705
1706 if (!areq->nbytes)
1707 return 0;
1708
1709 if (areq->nbytes % blocksize)
1710 return -EINVAL;
4de9d0b5
LN
1711
1712 /* allocate extended descriptor */
62293a37 1713 edesc = ablkcipher_edesc_alloc(areq, true);
4de9d0b5
LN
1714 if (IS_ERR(edesc))
1715 return PTR_ERR(edesc);
1716
1717 /* set encrypt */
1718 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
1719
febec542 1720 return common_nonsnoop(edesc, areq, ablkcipher_done);
4de9d0b5
LN
1721}
1722
1723static int ablkcipher_decrypt(struct ablkcipher_request *areq)
1724{
1725 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1726 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1727 struct talitos_edesc *edesc;
ee483d32
CL
1728 unsigned int blocksize =
1729 crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(cipher));
1730
1731 if (!areq->nbytes)
1732 return 0;
1733
1734 if (areq->nbytes % blocksize)
1735 return -EINVAL;
4de9d0b5
LN
1736
1737 /* allocate extended descriptor */
62293a37 1738 edesc = ablkcipher_edesc_alloc(areq, false);
4de9d0b5
LN
1739 if (IS_ERR(edesc))
1740 return PTR_ERR(edesc);
1741
1742 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1743
febec542 1744 return common_nonsnoop(edesc, areq, ablkcipher_done);
4de9d0b5
LN
1745}
1746
497f2e6b
LN
1747static void common_nonsnoop_hash_unmap(struct device *dev,
1748 struct talitos_edesc *edesc,
1749 struct ahash_request *areq)
1750{
1751 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
ad4cd51f
LC
1752 struct talitos_private *priv = dev_get_drvdata(dev);
1753 bool is_sec1 = has_ftr_sec1(priv);
1754 struct talitos_desc *desc = &edesc->desc;
1755 struct talitos_desc *desc2 = desc + 1;
1756
1757 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
1758 if (desc->next_desc &&
1759 desc->ptr[5].ptr != desc2->ptr[5].ptr)
1760 unmap_single_talitos_ptr(dev, &desc2->ptr[5], DMA_FROM_DEVICE);
497f2e6b 1761
6a1e8d14 1762 talitos_sg_unmap(dev, edesc, req_ctx->psrc, NULL, 0, 0);
032d197e 1763
ad4cd51f
LC
1764 /* When using hashctx-in, must unmap it. */
1765 if (from_talitos_ptr_len(&edesc->desc.ptr[1], is_sec1))
1766 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1],
1767 DMA_TO_DEVICE);
1768 else if (desc->next_desc)
1769 unmap_single_talitos_ptr(dev, &desc2->ptr[1],
1770 DMA_TO_DEVICE);
1771
1772 if (is_sec1 && req_ctx->nbuf)
1773 unmap_single_talitos_ptr(dev, &desc->ptr[3],
1774 DMA_TO_DEVICE);
1775
497f2e6b
LN
1776 if (edesc->dma_len)
1777 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1778 DMA_BIDIRECTIONAL);
1779
37b5e889
LC
1780 if (edesc->desc.next_desc)
1781 dma_unmap_single(dev, be32_to_cpu(edesc->desc.next_desc),
1782 TALITOS_DESC_SIZE, DMA_BIDIRECTIONAL);
497f2e6b
LN
1783}
1784
1785static void ahash_done(struct device *dev,
1786 struct talitos_desc *desc, void *context,
1787 int err)
1788{
1789 struct ahash_request *areq = context;
1790 struct talitos_edesc *edesc =
1791 container_of(desc, struct talitos_edesc, desc);
1792 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1793
1794 if (!req_ctx->last && req_ctx->to_hash_later) {
1795 /* Position any partial block for next update/final/finup */
3c0dd190 1796 req_ctx->buf_idx = (req_ctx->buf_idx + 1) & 1;
5e833bc4 1797 req_ctx->nbuf = req_ctx->to_hash_later;
497f2e6b
LN
1798 }
1799 common_nonsnoop_hash_unmap(dev, edesc, areq);
1800
1801 kfree(edesc);
1802
1803 areq->base.complete(&areq->base, err);
1804}
1805
2d02905e
LC
1806/*
1807 * SEC1 doesn't like hashing of 0 sized message, so we do the padding
1808 * ourself and submit a padded block
1809 */
5b2cf268 1810static void talitos_handle_buggy_hash(struct talitos_ctx *ctx,
2d02905e
LC
1811 struct talitos_edesc *edesc,
1812 struct talitos_ptr *ptr)
1813{
1814 static u8 padded_hash[64] = {
1815 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1816 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1817 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1818 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1819 };
1820
1821 pr_err_once("Bug in SEC1, padding ourself\n");
1822 edesc->desc.hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1823 map_single_talitos_ptr(ctx->dev, ptr, sizeof(padded_hash),
1824 (char *)padded_hash, DMA_TO_DEVICE);
1825}
1826
497f2e6b
LN
1827static int common_nonsnoop_hash(struct talitos_edesc *edesc,
1828 struct ahash_request *areq, unsigned int length,
37b5e889 1829 unsigned int offset,
497f2e6b
LN
1830 void (*callback) (struct device *dev,
1831 struct talitos_desc *desc,
1832 void *context, int error))
1833{
1834 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1835 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1836 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1837 struct device *dev = ctx->dev;
1838 struct talitos_desc *desc = &edesc->desc;
032d197e 1839 int ret;
6a1e8d14 1840 bool sync_needed = false;
922f9dc8
LC
1841 struct talitos_private *priv = dev_get_drvdata(dev);
1842 bool is_sec1 = has_ftr_sec1(priv);
6a1e8d14 1843 int sg_count;
497f2e6b
LN
1844
1845 /* first DWORD empty */
497f2e6b 1846
60f208d7
KP
1847 /* hash context in */
1848 if (!req_ctx->first || req_ctx->swinit) {
6a4967c3
LC
1849 map_single_talitos_ptr_nosync(dev, &desc->ptr[1],
1850 req_ctx->hw_context_size,
1851 req_ctx->hw_context,
1852 DMA_TO_DEVICE);
60f208d7 1853 req_ctx->swinit = 0;
497f2e6b 1854 }
afd62fa2
LC
1855 /* Indicate next op is not the first. */
1856 req_ctx->first = 0;
497f2e6b
LN
1857
1858 /* HMAC key */
1859 if (ctx->keylen)
2e13ce08
LC
1860 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen,
1861 is_sec1);
497f2e6b 1862
37b5e889
LC
1863 if (is_sec1 && req_ctx->nbuf)
1864 length -= req_ctx->nbuf;
1865
6a1e8d14
LC
1866 sg_count = edesc->src_nents ?: 1;
1867 if (is_sec1 && sg_count > 1)
37b5e889
LC
1868 sg_pcopy_to_buffer(req_ctx->psrc, sg_count,
1869 edesc->buf + sizeof(struct talitos_desc),
1870 length, req_ctx->nbuf);
1871 else if (length)
6a1e8d14
LC
1872 sg_count = dma_map_sg(dev, req_ctx->psrc, sg_count,
1873 DMA_TO_DEVICE);
497f2e6b
LN
1874 /*
1875 * data in
1876 */
37b5e889 1877 if (is_sec1 && req_ctx->nbuf) {
ad4cd51f
LC
1878 map_single_talitos_ptr(dev, &desc->ptr[3], req_ctx->nbuf,
1879 req_ctx->buf[req_ctx->buf_idx],
1880 DMA_TO_DEVICE);
37b5e889
LC
1881 } else {
1882 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1883 &desc->ptr[3], sg_count, offset, 0);
1884 if (sg_count > 1)
1885 sync_needed = true;
1886 }
497f2e6b
LN
1887
1888 /* fifth DWORD empty */
497f2e6b
LN
1889
1890 /* hash/HMAC out -or- hash context out */
1891 if (req_ctx->last)
1892 map_single_talitos_ptr(dev, &desc->ptr[5],
1893 crypto_ahash_digestsize(tfm),
a2b35aa8 1894 areq->result, DMA_FROM_DEVICE);
497f2e6b 1895 else
6a4967c3
LC
1896 map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1897 req_ctx->hw_context_size,
1898 req_ctx->hw_context,
1899 DMA_FROM_DEVICE);
497f2e6b
LN
1900
1901 /* last DWORD empty */
497f2e6b 1902
2d02905e
LC
1903 if (is_sec1 && from_talitos_ptr_len(&desc->ptr[3], true) == 0)
1904 talitos_handle_buggy_hash(ctx, edesc, &desc->ptr[3]);
1905
37b5e889
LC
1906 if (is_sec1 && req_ctx->nbuf && length) {
1907 struct talitos_desc *desc2 = desc + 1;
1908 dma_addr_t next_desc;
1909
1910 memset(desc2, 0, sizeof(*desc2));
1911 desc2->hdr = desc->hdr;
1912 desc2->hdr &= ~DESC_HDR_MODE0_MDEU_INIT;
1913 desc2->hdr1 = desc2->hdr;
1914 desc->hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1915 desc->hdr |= DESC_HDR_MODE0_MDEU_CONT;
1916 desc->hdr &= ~DESC_HDR_DONE_NOTIFY;
1917
ad4cd51f
LC
1918 if (desc->ptr[1].ptr)
1919 copy_talitos_ptr(&desc2->ptr[1], &desc->ptr[1],
1920 is_sec1);
1921 else
6a4967c3
LC
1922 map_single_talitos_ptr_nosync(dev, &desc2->ptr[1],
1923 req_ctx->hw_context_size,
1924 req_ctx->hw_context,
1925 DMA_TO_DEVICE);
37b5e889
LC
1926 copy_talitos_ptr(&desc2->ptr[2], &desc->ptr[2], is_sec1);
1927 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1928 &desc2->ptr[3], sg_count, offset, 0);
1929 if (sg_count > 1)
1930 sync_needed = true;
1931 copy_talitos_ptr(&desc2->ptr[5], &desc->ptr[5], is_sec1);
1932 if (req_ctx->last)
6a4967c3
LC
1933 map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1934 req_ctx->hw_context_size,
1935 req_ctx->hw_context,
1936 DMA_FROM_DEVICE);
37b5e889
LC
1937
1938 next_desc = dma_map_single(dev, &desc2->hdr1, TALITOS_DESC_SIZE,
1939 DMA_BIDIRECTIONAL);
1940 desc->next_desc = cpu_to_be32(next_desc);
1941 }
1942
6a1e8d14
LC
1943 if (sync_needed)
1944 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1945 edesc->dma_len, DMA_BIDIRECTIONAL);
1946
5228f0f7 1947 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
497f2e6b
LN
1948 if (ret != -EINPROGRESS) {
1949 common_nonsnoop_hash_unmap(dev, edesc, areq);
1950 kfree(edesc);
1951 }
1952 return ret;
1953}
1954
1955static struct talitos_edesc *ahash_edesc_alloc(struct ahash_request *areq,
1956 unsigned int nbytes)
1957{
1958 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1959 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1960 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
37b5e889
LC
1961 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
1962 bool is_sec1 = has_ftr_sec1(priv);
1963
1964 if (is_sec1)
1965 nbytes -= req_ctx->nbuf;
497f2e6b 1966
aeb4c132 1967 return talitos_edesc_alloc(ctx->dev, req_ctx->psrc, NULL, NULL, 0,
62293a37 1968 nbytes, 0, 0, 0, areq->base.flags, false);
497f2e6b
LN
1969}
1970
1971static int ahash_init(struct ahash_request *areq)
1972{
1973 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
6a4967c3
LC
1974 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1975 struct device *dev = ctx->dev;
497f2e6b 1976 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
49f9783b 1977 unsigned int size;
6a4967c3 1978 dma_addr_t dma;
497f2e6b
LN
1979
1980 /* Initialize the context */
3c0dd190 1981 req_ctx->buf_idx = 0;
5e833bc4 1982 req_ctx->nbuf = 0;
60f208d7
KP
1983 req_ctx->first = 1; /* first indicates h/w must init its context */
1984 req_ctx->swinit = 0; /* assume h/w init of context */
49f9783b 1985 size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
497f2e6b
LN
1986 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
1987 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
49f9783b 1988 req_ctx->hw_context_size = size;
497f2e6b 1989
6a4967c3
LC
1990 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
1991 DMA_TO_DEVICE);
1992 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
1993
497f2e6b
LN
1994 return 0;
1995}
1996
60f208d7
KP
1997/*
1998 * on h/w without explicit sha224 support, we initialize h/w context
1999 * manually with sha224 constants, and tell it to run sha256.
2000 */
2001static int ahash_init_sha224_swinit(struct ahash_request *areq)
2002{
2003 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2004
a752447a
KP
2005 req_ctx->hw_context[0] = SHA224_H0;
2006 req_ctx->hw_context[1] = SHA224_H1;
2007 req_ctx->hw_context[2] = SHA224_H2;
2008 req_ctx->hw_context[3] = SHA224_H3;
2009 req_ctx->hw_context[4] = SHA224_H4;
2010 req_ctx->hw_context[5] = SHA224_H5;
2011 req_ctx->hw_context[6] = SHA224_H6;
2012 req_ctx->hw_context[7] = SHA224_H7;
60f208d7
KP
2013
2014 /* init 64-bit count */
2015 req_ctx->hw_context[8] = 0;
2016 req_ctx->hw_context[9] = 0;
2017
6a4967c3
LC
2018 ahash_init(areq);
2019 req_ctx->swinit = 1;/* prevent h/w initting context with sha256 values*/
2020
60f208d7
KP
2021 return 0;
2022}
2023
497f2e6b
LN
2024static int ahash_process_req(struct ahash_request *areq, unsigned int nbytes)
2025{
2026 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2027 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2028 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2029 struct talitos_edesc *edesc;
2030 unsigned int blocksize =
2031 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2032 unsigned int nbytes_to_hash;
2033 unsigned int to_hash_later;
5e833bc4 2034 unsigned int nsg;
8e409fe1 2035 int nents;
37b5e889
LC
2036 struct device *dev = ctx->dev;
2037 struct talitos_private *priv = dev_get_drvdata(dev);
2038 bool is_sec1 = has_ftr_sec1(priv);
2039 int offset = 0;
3c0dd190 2040 u8 *ctx_buf = req_ctx->buf[req_ctx->buf_idx];
497f2e6b 2041
5e833bc4
LN
2042 if (!req_ctx->last && (nbytes + req_ctx->nbuf <= blocksize)) {
2043 /* Buffer up to one whole block */
8e409fe1
LC
2044 nents = sg_nents_for_len(areq->src, nbytes);
2045 if (nents < 0) {
2046 dev_err(ctx->dev, "Invalid number of src SG.\n");
2047 return nents;
2048 }
2049 sg_copy_to_buffer(areq->src, nents,
3c0dd190 2050 ctx_buf + req_ctx->nbuf, nbytes);
5e833bc4 2051 req_ctx->nbuf += nbytes;
497f2e6b
LN
2052 return 0;
2053 }
2054
5e833bc4
LN
2055 /* At least (blocksize + 1) bytes are available to hash */
2056 nbytes_to_hash = nbytes + req_ctx->nbuf;
2057 to_hash_later = nbytes_to_hash & (blocksize - 1);
2058
2059 if (req_ctx->last)
2060 to_hash_later = 0;
2061 else if (to_hash_later)
2062 /* There is a partial block. Hash the full block(s) now */
2063 nbytes_to_hash -= to_hash_later;
2064 else {
2065 /* Keep one block buffered */
2066 nbytes_to_hash -= blocksize;
2067 to_hash_later = blocksize;
2068 }
2069
2070 /* Chain in any previously buffered data */
37b5e889 2071 if (!is_sec1 && req_ctx->nbuf) {
5e833bc4
LN
2072 nsg = (req_ctx->nbuf < nbytes_to_hash) ? 2 : 1;
2073 sg_init_table(req_ctx->bufsl, nsg);
3c0dd190 2074 sg_set_buf(req_ctx->bufsl, ctx_buf, req_ctx->nbuf);
5e833bc4 2075 if (nsg > 1)
c56f6d12 2076 sg_chain(req_ctx->bufsl, 2, areq->src);
497f2e6b 2077 req_ctx->psrc = req_ctx->bufsl;
37b5e889
LC
2078 } else if (is_sec1 && req_ctx->nbuf && req_ctx->nbuf < blocksize) {
2079 if (nbytes_to_hash > blocksize)
2080 offset = blocksize - req_ctx->nbuf;
2081 else
2082 offset = nbytes_to_hash - req_ctx->nbuf;
2083 nents = sg_nents_for_len(areq->src, offset);
2084 if (nents < 0) {
2085 dev_err(ctx->dev, "Invalid number of src SG.\n");
2086 return nents;
2087 }
2088 sg_copy_to_buffer(areq->src, nents,
3c0dd190 2089 ctx_buf + req_ctx->nbuf, offset);
37b5e889
LC
2090 req_ctx->nbuf += offset;
2091 req_ctx->psrc = areq->src;
5e833bc4 2092 } else
497f2e6b 2093 req_ctx->psrc = areq->src;
5e833bc4
LN
2094
2095 if (to_hash_later) {
8e409fe1
LC
2096 nents = sg_nents_for_len(areq->src, nbytes);
2097 if (nents < 0) {
2098 dev_err(ctx->dev, "Invalid number of src SG.\n");
2099 return nents;
2100 }
d0525723 2101 sg_pcopy_to_buffer(areq->src, nents,
3c0dd190 2102 req_ctx->buf[(req_ctx->buf_idx + 1) & 1],
5e833bc4
LN
2103 to_hash_later,
2104 nbytes - to_hash_later);
497f2e6b 2105 }
5e833bc4 2106 req_ctx->to_hash_later = to_hash_later;
497f2e6b 2107
5e833bc4 2108 /* Allocate extended descriptor */
497f2e6b
LN
2109 edesc = ahash_edesc_alloc(areq, nbytes_to_hash);
2110 if (IS_ERR(edesc))
2111 return PTR_ERR(edesc);
2112
2113 edesc->desc.hdr = ctx->desc_hdr_template;
2114
2115 /* On last one, request SEC to pad; otherwise continue */
2116 if (req_ctx->last)
2117 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_PAD;
2118 else
2119 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_CONT;
2120
60f208d7
KP
2121 /* request SEC to INIT hash. */
2122 if (req_ctx->first && !req_ctx->swinit)
497f2e6b
LN
2123 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_INIT;
2124
2125 /* When the tfm context has a keylen, it's an HMAC.
2126 * A first or last (ie. not middle) descriptor must request HMAC.
2127 */
2128 if (ctx->keylen && (req_ctx->first || req_ctx->last))
2129 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_HMAC;
2130
37b5e889 2131 return common_nonsnoop_hash(edesc, areq, nbytes_to_hash, offset,
497f2e6b
LN
2132 ahash_done);
2133}
2134
2135static int ahash_update(struct ahash_request *areq)
2136{
2137 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2138
2139 req_ctx->last = 0;
2140
2141 return ahash_process_req(areq, areq->nbytes);
2142}
2143
2144static int ahash_final(struct ahash_request *areq)
2145{
2146 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2147
2148 req_ctx->last = 1;
2149
2150 return ahash_process_req(areq, 0);
2151}
2152
2153static int ahash_finup(struct ahash_request *areq)
2154{
2155 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2156
2157 req_ctx->last = 1;
2158
2159 return ahash_process_req(areq, areq->nbytes);
2160}
2161
2162static int ahash_digest(struct ahash_request *areq)
2163{
2164 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
60f208d7 2165 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
497f2e6b 2166
60f208d7 2167 ahash->init(areq);
497f2e6b
LN
2168 req_ctx->last = 1;
2169
2170 return ahash_process_req(areq, areq->nbytes);
2171}
2172
3639ca84
HG
2173static int ahash_export(struct ahash_request *areq, void *out)
2174{
2175 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2176 struct talitos_export_state *export = out;
6a4967c3
LC
2177 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2178 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2179 struct device *dev = ctx->dev;
2180 dma_addr_t dma;
2181
2182 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2183 DMA_FROM_DEVICE);
2184 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_FROM_DEVICE);
3639ca84
HG
2185
2186 memcpy(export->hw_context, req_ctx->hw_context,
2187 req_ctx->hw_context_size);
3c0dd190 2188 memcpy(export->buf, req_ctx->buf[req_ctx->buf_idx], req_ctx->nbuf);
3639ca84
HG
2189 export->swinit = req_ctx->swinit;
2190 export->first = req_ctx->first;
2191 export->last = req_ctx->last;
2192 export->to_hash_later = req_ctx->to_hash_later;
2193 export->nbuf = req_ctx->nbuf;
2194
2195 return 0;
2196}
2197
2198static int ahash_import(struct ahash_request *areq, const void *in)
2199{
2200 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2201 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
6a4967c3
LC
2202 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2203 struct device *dev = ctx->dev;
3639ca84 2204 const struct talitos_export_state *export = in;
49f9783b 2205 unsigned int size;
6a4967c3 2206 dma_addr_t dma;
3639ca84
HG
2207
2208 memset(req_ctx, 0, sizeof(*req_ctx));
49f9783b 2209 size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
3639ca84
HG
2210 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
2211 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
49f9783b 2212 req_ctx->hw_context_size = size;
49f9783b 2213 memcpy(req_ctx->hw_context, export->hw_context, size);
3c0dd190 2214 memcpy(req_ctx->buf[0], export->buf, export->nbuf);
3639ca84
HG
2215 req_ctx->swinit = export->swinit;
2216 req_ctx->first = export->first;
2217 req_ctx->last = export->last;
2218 req_ctx->to_hash_later = export->to_hash_later;
2219 req_ctx->nbuf = export->nbuf;
2220
6a4967c3
LC
2221 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2222 DMA_TO_DEVICE);
2223 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
2224
3639ca84
HG
2225 return 0;
2226}
2227
79b3a418
LN
2228static int keyhash(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen,
2229 u8 *hash)
2230{
2231 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2232
2233 struct scatterlist sg[1];
2234 struct ahash_request *req;
f1c90ac3 2235 struct crypto_wait wait;
79b3a418
LN
2236 int ret;
2237
f1c90ac3 2238 crypto_init_wait(&wait);
79b3a418
LN
2239
2240 req = ahash_request_alloc(tfm, GFP_KERNEL);
2241 if (!req)
2242 return -ENOMEM;
2243
2244 /* Keep tfm keylen == 0 during hash of the long key */
2245 ctx->keylen = 0;
2246 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
f1c90ac3 2247 crypto_req_done, &wait);
79b3a418
LN
2248
2249 sg_init_one(&sg[0], key, keylen);
2250
2251 ahash_request_set_crypt(req, sg, hash, keylen);
f1c90ac3
GBY
2252 ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
2253
79b3a418
LN
2254 ahash_request_free(req);
2255
2256 return ret;
2257}
2258
2259static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
2260 unsigned int keylen)
2261{
2262 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2e13ce08 2263 struct device *dev = ctx->dev;
79b3a418
LN
2264 unsigned int blocksize =
2265 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2266 unsigned int digestsize = crypto_ahash_digestsize(tfm);
2267 unsigned int keysize = keylen;
2268 u8 hash[SHA512_DIGEST_SIZE];
2269 int ret;
2270
2271 if (keylen <= blocksize)
2272 memcpy(ctx->key, key, keysize);
2273 else {
2274 /* Must get the hash of the long key */
2275 ret = keyhash(tfm, key, keylen, hash);
2276
2277 if (ret) {
2278 crypto_ahash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2279 return -EINVAL;
2280 }
2281
2282 keysize = digestsize;
2283 memcpy(ctx->key, hash, digestsize);
2284 }
2285
2e13ce08
LC
2286 if (ctx->keylen)
2287 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
2288
79b3a418 2289 ctx->keylen = keysize;
2e13ce08 2290 ctx->dma_key = dma_map_single(dev, ctx->key, keysize, DMA_TO_DEVICE);
79b3a418
LN
2291
2292 return 0;
2293}
2294
2295
9c4a7965 2296struct talitos_alg_template {
d5e4aaef 2297 u32 type;
b0057763 2298 u32 priority;
d5e4aaef
LN
2299 union {
2300 struct crypto_alg crypto;
acbf7c62 2301 struct ahash_alg hash;
aeb4c132 2302 struct aead_alg aead;
d5e4aaef 2303 } alg;
9c4a7965
KP
2304 __be32 desc_hdr_template;
2305};
2306
2307static struct talitos_alg_template driver_algs[] = {
991155ba 2308 /* AEAD algorithms. These use a single-pass ipsec_esp descriptor */
d5e4aaef 2309 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2310 .alg.aead = {
2311 .base = {
2312 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2313 .cra_driver_name = "authenc-hmac-sha1-"
2314 "cbc-aes-talitos",
2315 .cra_blocksize = AES_BLOCK_SIZE,
2316 .cra_flags = CRYPTO_ALG_ASYNC,
2317 },
2318 .ivsize = AES_BLOCK_SIZE,
2319 .maxauthsize = SHA1_DIGEST_SIZE,
56af8cd4 2320 },
9c4a7965
KP
2321 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2322 DESC_HDR_SEL0_AESU |
2323 DESC_HDR_MODE0_AESU_CBC |
2324 DESC_HDR_SEL1_MDEUA |
2325 DESC_HDR_MODE1_MDEU_INIT |
2326 DESC_HDR_MODE1_MDEU_PAD |
2327 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
70bcaca7 2328 },
7405c8d7
LC
2329 { .type = CRYPTO_ALG_TYPE_AEAD,
2330 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2331 .alg.aead = {
2332 .base = {
2333 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2334 .cra_driver_name = "authenc-hmac-sha1-"
a1a42f84 2335 "cbc-aes-talitos-hsna",
7405c8d7
LC
2336 .cra_blocksize = AES_BLOCK_SIZE,
2337 .cra_flags = CRYPTO_ALG_ASYNC,
2338 },
2339 .ivsize = AES_BLOCK_SIZE,
2340 .maxauthsize = SHA1_DIGEST_SIZE,
2341 },
2342 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2343 DESC_HDR_SEL0_AESU |
2344 DESC_HDR_MODE0_AESU_CBC |
2345 DESC_HDR_SEL1_MDEUA |
2346 DESC_HDR_MODE1_MDEU_INIT |
2347 DESC_HDR_MODE1_MDEU_PAD |
2348 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2349 },
d5e4aaef 2350 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2351 .alg.aead = {
2352 .base = {
2353 .cra_name = "authenc(hmac(sha1),"
2354 "cbc(des3_ede))",
2355 .cra_driver_name = "authenc-hmac-sha1-"
2356 "cbc-3des-talitos",
2357 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2358 .cra_flags = CRYPTO_ALG_ASYNC,
2359 },
2360 .ivsize = DES3_EDE_BLOCK_SIZE,
2361 .maxauthsize = SHA1_DIGEST_SIZE,
ef7c5c85 2362 .setkey = aead_des3_setkey,
56af8cd4 2363 },
70bcaca7
LN
2364 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2365 DESC_HDR_SEL0_DEU |
2366 DESC_HDR_MODE0_DEU_CBC |
2367 DESC_HDR_MODE0_DEU_3DES |
2368 DESC_HDR_SEL1_MDEUA |
2369 DESC_HDR_MODE1_MDEU_INIT |
2370 DESC_HDR_MODE1_MDEU_PAD |
2371 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
3952f17e 2372 },
7405c8d7
LC
2373 { .type = CRYPTO_ALG_TYPE_AEAD,
2374 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2375 .alg.aead = {
2376 .base = {
2377 .cra_name = "authenc(hmac(sha1),"
2378 "cbc(des3_ede))",
2379 .cra_driver_name = "authenc-hmac-sha1-"
a1a42f84 2380 "cbc-3des-talitos-hsna",
7405c8d7
LC
2381 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2382 .cra_flags = CRYPTO_ALG_ASYNC,
2383 },
2384 .ivsize = DES3_EDE_BLOCK_SIZE,
2385 .maxauthsize = SHA1_DIGEST_SIZE,
ef7c5c85 2386 .setkey = aead_des3_setkey,
7405c8d7
LC
2387 },
2388 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2389 DESC_HDR_SEL0_DEU |
2390 DESC_HDR_MODE0_DEU_CBC |
2391 DESC_HDR_MODE0_DEU_3DES |
2392 DESC_HDR_SEL1_MDEUA |
2393 DESC_HDR_MODE1_MDEU_INIT |
2394 DESC_HDR_MODE1_MDEU_PAD |
2395 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2396 },
357fb605 2397 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2398 .alg.aead = {
2399 .base = {
2400 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2401 .cra_driver_name = "authenc-hmac-sha224-"
2402 "cbc-aes-talitos",
2403 .cra_blocksize = AES_BLOCK_SIZE,
2404 .cra_flags = CRYPTO_ALG_ASYNC,
2405 },
2406 .ivsize = AES_BLOCK_SIZE,
2407 .maxauthsize = SHA224_DIGEST_SIZE,
357fb605
HG
2408 },
2409 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2410 DESC_HDR_SEL0_AESU |
2411 DESC_HDR_MODE0_AESU_CBC |
2412 DESC_HDR_SEL1_MDEUA |
2413 DESC_HDR_MODE1_MDEU_INIT |
2414 DESC_HDR_MODE1_MDEU_PAD |
2415 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2416 },
7405c8d7
LC
2417 { .type = CRYPTO_ALG_TYPE_AEAD,
2418 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2419 .alg.aead = {
2420 .base = {
2421 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2422 .cra_driver_name = "authenc-hmac-sha224-"
a1a42f84 2423 "cbc-aes-talitos-hsna",
7405c8d7
LC
2424 .cra_blocksize = AES_BLOCK_SIZE,
2425 .cra_flags = CRYPTO_ALG_ASYNC,
2426 },
2427 .ivsize = AES_BLOCK_SIZE,
2428 .maxauthsize = SHA224_DIGEST_SIZE,
2429 },
2430 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2431 DESC_HDR_SEL0_AESU |
2432 DESC_HDR_MODE0_AESU_CBC |
2433 DESC_HDR_SEL1_MDEUA |
2434 DESC_HDR_MODE1_MDEU_INIT |
2435 DESC_HDR_MODE1_MDEU_PAD |
2436 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2437 },
357fb605 2438 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2439 .alg.aead = {
2440 .base = {
2441 .cra_name = "authenc(hmac(sha224),"
2442 "cbc(des3_ede))",
2443 .cra_driver_name = "authenc-hmac-sha224-"
2444 "cbc-3des-talitos",
2445 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2446 .cra_flags = CRYPTO_ALG_ASYNC,
2447 },
2448 .ivsize = DES3_EDE_BLOCK_SIZE,
2449 .maxauthsize = SHA224_DIGEST_SIZE,
ef7c5c85 2450 .setkey = aead_des3_setkey,
357fb605
HG
2451 },
2452 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2453 DESC_HDR_SEL0_DEU |
2454 DESC_HDR_MODE0_DEU_CBC |
2455 DESC_HDR_MODE0_DEU_3DES |
2456 DESC_HDR_SEL1_MDEUA |
2457 DESC_HDR_MODE1_MDEU_INIT |
2458 DESC_HDR_MODE1_MDEU_PAD |
2459 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2460 },
7405c8d7
LC
2461 { .type = CRYPTO_ALG_TYPE_AEAD,
2462 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2463 .alg.aead = {
2464 .base = {
2465 .cra_name = "authenc(hmac(sha224),"
2466 "cbc(des3_ede))",
2467 .cra_driver_name = "authenc-hmac-sha224-"
a1a42f84 2468 "cbc-3des-talitos-hsna",
7405c8d7
LC
2469 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2470 .cra_flags = CRYPTO_ALG_ASYNC,
2471 },
2472 .ivsize = DES3_EDE_BLOCK_SIZE,
2473 .maxauthsize = SHA224_DIGEST_SIZE,
ef7c5c85 2474 .setkey = aead_des3_setkey,
7405c8d7
LC
2475 },
2476 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2477 DESC_HDR_SEL0_DEU |
2478 DESC_HDR_MODE0_DEU_CBC |
2479 DESC_HDR_MODE0_DEU_3DES |
2480 DESC_HDR_SEL1_MDEUA |
2481 DESC_HDR_MODE1_MDEU_INIT |
2482 DESC_HDR_MODE1_MDEU_PAD |
2483 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2484 },
d5e4aaef 2485 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2486 .alg.aead = {
2487 .base = {
2488 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2489 .cra_driver_name = "authenc-hmac-sha256-"
2490 "cbc-aes-talitos",
2491 .cra_blocksize = AES_BLOCK_SIZE,
2492 .cra_flags = CRYPTO_ALG_ASYNC,
2493 },
2494 .ivsize = AES_BLOCK_SIZE,
2495 .maxauthsize = SHA256_DIGEST_SIZE,
56af8cd4 2496 },
3952f17e
LN
2497 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2498 DESC_HDR_SEL0_AESU |
2499 DESC_HDR_MODE0_AESU_CBC |
2500 DESC_HDR_SEL1_MDEUA |
2501 DESC_HDR_MODE1_MDEU_INIT |
2502 DESC_HDR_MODE1_MDEU_PAD |
2503 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2504 },
7405c8d7
LC
2505 { .type = CRYPTO_ALG_TYPE_AEAD,
2506 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2507 .alg.aead = {
2508 .base = {
2509 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2510 .cra_driver_name = "authenc-hmac-sha256-"
a1a42f84 2511 "cbc-aes-talitos-hsna",
7405c8d7
LC
2512 .cra_blocksize = AES_BLOCK_SIZE,
2513 .cra_flags = CRYPTO_ALG_ASYNC,
2514 },
2515 .ivsize = AES_BLOCK_SIZE,
2516 .maxauthsize = SHA256_DIGEST_SIZE,
2517 },
2518 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2519 DESC_HDR_SEL0_AESU |
2520 DESC_HDR_MODE0_AESU_CBC |
2521 DESC_HDR_SEL1_MDEUA |
2522 DESC_HDR_MODE1_MDEU_INIT |
2523 DESC_HDR_MODE1_MDEU_PAD |
2524 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2525 },
d5e4aaef 2526 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2527 .alg.aead = {
2528 .base = {
2529 .cra_name = "authenc(hmac(sha256),"
2530 "cbc(des3_ede))",
2531 .cra_driver_name = "authenc-hmac-sha256-"
2532 "cbc-3des-talitos",
2533 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2534 .cra_flags = CRYPTO_ALG_ASYNC,
2535 },
2536 .ivsize = DES3_EDE_BLOCK_SIZE,
2537 .maxauthsize = SHA256_DIGEST_SIZE,
ef7c5c85 2538 .setkey = aead_des3_setkey,
56af8cd4 2539 },
3952f17e
LN
2540 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2541 DESC_HDR_SEL0_DEU |
2542 DESC_HDR_MODE0_DEU_CBC |
2543 DESC_HDR_MODE0_DEU_3DES |
2544 DESC_HDR_SEL1_MDEUA |
2545 DESC_HDR_MODE1_MDEU_INIT |
2546 DESC_HDR_MODE1_MDEU_PAD |
2547 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2548 },
7405c8d7
LC
2549 { .type = CRYPTO_ALG_TYPE_AEAD,
2550 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2551 .alg.aead = {
2552 .base = {
2553 .cra_name = "authenc(hmac(sha256),"
2554 "cbc(des3_ede))",
2555 .cra_driver_name = "authenc-hmac-sha256-"
a1a42f84 2556 "cbc-3des-talitos-hsna",
7405c8d7
LC
2557 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2558 .cra_flags = CRYPTO_ALG_ASYNC,
2559 },
2560 .ivsize = DES3_EDE_BLOCK_SIZE,
2561 .maxauthsize = SHA256_DIGEST_SIZE,
ef7c5c85 2562 .setkey = aead_des3_setkey,
7405c8d7
LC
2563 },
2564 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2565 DESC_HDR_SEL0_DEU |
2566 DESC_HDR_MODE0_DEU_CBC |
2567 DESC_HDR_MODE0_DEU_3DES |
2568 DESC_HDR_SEL1_MDEUA |
2569 DESC_HDR_MODE1_MDEU_INIT |
2570 DESC_HDR_MODE1_MDEU_PAD |
2571 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2572 },
d5e4aaef 2573 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2574 .alg.aead = {
2575 .base = {
2576 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2577 .cra_driver_name = "authenc-hmac-sha384-"
2578 "cbc-aes-talitos",
2579 .cra_blocksize = AES_BLOCK_SIZE,
2580 .cra_flags = CRYPTO_ALG_ASYNC,
2581 },
2582 .ivsize = AES_BLOCK_SIZE,
2583 .maxauthsize = SHA384_DIGEST_SIZE,
357fb605
HG
2584 },
2585 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2586 DESC_HDR_SEL0_AESU |
2587 DESC_HDR_MODE0_AESU_CBC |
2588 DESC_HDR_SEL1_MDEUB |
2589 DESC_HDR_MODE1_MDEU_INIT |
2590 DESC_HDR_MODE1_MDEU_PAD |
2591 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2592 },
2593 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2594 .alg.aead = {
2595 .base = {
2596 .cra_name = "authenc(hmac(sha384),"
2597 "cbc(des3_ede))",
2598 .cra_driver_name = "authenc-hmac-sha384-"
2599 "cbc-3des-talitos",
2600 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2601 .cra_flags = CRYPTO_ALG_ASYNC,
2602 },
2603 .ivsize = DES3_EDE_BLOCK_SIZE,
2604 .maxauthsize = SHA384_DIGEST_SIZE,
ef7c5c85 2605 .setkey = aead_des3_setkey,
357fb605
HG
2606 },
2607 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2608 DESC_HDR_SEL0_DEU |
2609 DESC_HDR_MODE0_DEU_CBC |
2610 DESC_HDR_MODE0_DEU_3DES |
2611 DESC_HDR_SEL1_MDEUB |
2612 DESC_HDR_MODE1_MDEU_INIT |
2613 DESC_HDR_MODE1_MDEU_PAD |
2614 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2615 },
2616 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2617 .alg.aead = {
2618 .base = {
2619 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2620 .cra_driver_name = "authenc-hmac-sha512-"
2621 "cbc-aes-talitos",
2622 .cra_blocksize = AES_BLOCK_SIZE,
2623 .cra_flags = CRYPTO_ALG_ASYNC,
2624 },
2625 .ivsize = AES_BLOCK_SIZE,
2626 .maxauthsize = SHA512_DIGEST_SIZE,
357fb605
HG
2627 },
2628 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2629 DESC_HDR_SEL0_AESU |
2630 DESC_HDR_MODE0_AESU_CBC |
2631 DESC_HDR_SEL1_MDEUB |
2632 DESC_HDR_MODE1_MDEU_INIT |
2633 DESC_HDR_MODE1_MDEU_PAD |
2634 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2635 },
2636 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2637 .alg.aead = {
2638 .base = {
2639 .cra_name = "authenc(hmac(sha512),"
2640 "cbc(des3_ede))",
2641 .cra_driver_name = "authenc-hmac-sha512-"
2642 "cbc-3des-talitos",
2643 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2644 .cra_flags = CRYPTO_ALG_ASYNC,
2645 },
2646 .ivsize = DES3_EDE_BLOCK_SIZE,
2647 .maxauthsize = SHA512_DIGEST_SIZE,
ef7c5c85 2648 .setkey = aead_des3_setkey,
357fb605
HG
2649 },
2650 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2651 DESC_HDR_SEL0_DEU |
2652 DESC_HDR_MODE0_DEU_CBC |
2653 DESC_HDR_MODE0_DEU_3DES |
2654 DESC_HDR_SEL1_MDEUB |
2655 DESC_HDR_MODE1_MDEU_INIT |
2656 DESC_HDR_MODE1_MDEU_PAD |
2657 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2658 },
2659 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2660 .alg.aead = {
2661 .base = {
2662 .cra_name = "authenc(hmac(md5),cbc(aes))",
2663 .cra_driver_name = "authenc-hmac-md5-"
2664 "cbc-aes-talitos",
2665 .cra_blocksize = AES_BLOCK_SIZE,
2666 .cra_flags = CRYPTO_ALG_ASYNC,
2667 },
2668 .ivsize = AES_BLOCK_SIZE,
2669 .maxauthsize = MD5_DIGEST_SIZE,
56af8cd4 2670 },
3952f17e
LN
2671 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2672 DESC_HDR_SEL0_AESU |
2673 DESC_HDR_MODE0_AESU_CBC |
2674 DESC_HDR_SEL1_MDEUA |
2675 DESC_HDR_MODE1_MDEU_INIT |
2676 DESC_HDR_MODE1_MDEU_PAD |
2677 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2678 },
7405c8d7
LC
2679 { .type = CRYPTO_ALG_TYPE_AEAD,
2680 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2681 .alg.aead = {
2682 .base = {
2683 .cra_name = "authenc(hmac(md5),cbc(aes))",
2684 .cra_driver_name = "authenc-hmac-md5-"
a1a42f84 2685 "cbc-aes-talitos-hsna",
7405c8d7
LC
2686 .cra_blocksize = AES_BLOCK_SIZE,
2687 .cra_flags = CRYPTO_ALG_ASYNC,
2688 },
2689 .ivsize = AES_BLOCK_SIZE,
2690 .maxauthsize = MD5_DIGEST_SIZE,
2691 },
2692 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2693 DESC_HDR_SEL0_AESU |
2694 DESC_HDR_MODE0_AESU_CBC |
2695 DESC_HDR_SEL1_MDEUA |
2696 DESC_HDR_MODE1_MDEU_INIT |
2697 DESC_HDR_MODE1_MDEU_PAD |
2698 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2699 },
d5e4aaef 2700 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2701 .alg.aead = {
2702 .base = {
2703 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2704 .cra_driver_name = "authenc-hmac-md5-"
2705 "cbc-3des-talitos",
2706 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2707 .cra_flags = CRYPTO_ALG_ASYNC,
2708 },
2709 .ivsize = DES3_EDE_BLOCK_SIZE,
2710 .maxauthsize = MD5_DIGEST_SIZE,
ef7c5c85 2711 .setkey = aead_des3_setkey,
56af8cd4 2712 },
3952f17e
LN
2713 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2714 DESC_HDR_SEL0_DEU |
2715 DESC_HDR_MODE0_DEU_CBC |
2716 DESC_HDR_MODE0_DEU_3DES |
2717 DESC_HDR_SEL1_MDEUA |
2718 DESC_HDR_MODE1_MDEU_INIT |
2719 DESC_HDR_MODE1_MDEU_PAD |
2720 DESC_HDR_MODE1_MDEU_MD5_HMAC,
4de9d0b5 2721 },
7405c8d7
LC
2722 { .type = CRYPTO_ALG_TYPE_AEAD,
2723 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2724 .alg.aead = {
2725 .base = {
2726 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2727 .cra_driver_name = "authenc-hmac-md5-"
a1a42f84 2728 "cbc-3des-talitos-hsna",
7405c8d7
LC
2729 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2730 .cra_flags = CRYPTO_ALG_ASYNC,
2731 },
2732 .ivsize = DES3_EDE_BLOCK_SIZE,
2733 .maxauthsize = MD5_DIGEST_SIZE,
ef7c5c85 2734 .setkey = aead_des3_setkey,
7405c8d7
LC
2735 },
2736 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2737 DESC_HDR_SEL0_DEU |
2738 DESC_HDR_MODE0_DEU_CBC |
2739 DESC_HDR_MODE0_DEU_3DES |
2740 DESC_HDR_SEL1_MDEUA |
2741 DESC_HDR_MODE1_MDEU_INIT |
2742 DESC_HDR_MODE1_MDEU_PAD |
2743 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2744 },
4de9d0b5 2745 /* ABLKCIPHER algorithms. */
5e75ae1b
LC
2746 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2747 .alg.crypto = {
2748 .cra_name = "ecb(aes)",
2749 .cra_driver_name = "ecb-aes-talitos",
2750 .cra_blocksize = AES_BLOCK_SIZE,
2751 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2752 CRYPTO_ALG_ASYNC,
2753 .cra_ablkcipher = {
2754 .min_keysize = AES_MIN_KEY_SIZE,
2755 .max_keysize = AES_MAX_KEY_SIZE,
1ba34e71 2756 .setkey = ablkcipher_aes_setkey,
5e75ae1b
LC
2757 }
2758 },
2759 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2760 DESC_HDR_SEL0_AESU,
2761 },
d5e4aaef
LN
2762 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2763 .alg.crypto = {
4de9d0b5
LN
2764 .cra_name = "cbc(aes)",
2765 .cra_driver_name = "cbc-aes-talitos",
2766 .cra_blocksize = AES_BLOCK_SIZE,
2767 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2768 CRYPTO_ALG_ASYNC,
4de9d0b5 2769 .cra_ablkcipher = {
4de9d0b5
LN
2770 .min_keysize = AES_MIN_KEY_SIZE,
2771 .max_keysize = AES_MAX_KEY_SIZE,
2772 .ivsize = AES_BLOCK_SIZE,
1ba34e71 2773 .setkey = ablkcipher_aes_setkey,
4de9d0b5
LN
2774 }
2775 },
2776 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2777 DESC_HDR_SEL0_AESU |
2778 DESC_HDR_MODE0_AESU_CBC,
2779 },
5e75ae1b
LC
2780 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2781 .alg.crypto = {
2782 .cra_name = "ctr(aes)",
2783 .cra_driver_name = "ctr-aes-talitos",
b9a05b60 2784 .cra_blocksize = 1,
5e75ae1b
LC
2785 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2786 CRYPTO_ALG_ASYNC,
2787 .cra_ablkcipher = {
2788 .min_keysize = AES_MIN_KEY_SIZE,
2789 .max_keysize = AES_MAX_KEY_SIZE,
2790 .ivsize = AES_BLOCK_SIZE,
1ba34e71 2791 .setkey = ablkcipher_aes_setkey,
5e75ae1b
LC
2792 }
2793 },
70d355cc 2794 .desc_hdr_template = DESC_HDR_TYPE_AESU_CTR_NONSNOOP |
5e75ae1b
LC
2795 DESC_HDR_SEL0_AESU |
2796 DESC_HDR_MODE0_AESU_CTR,
2797 },
2798 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2799 .alg.crypto = {
2800 .cra_name = "ecb(des)",
2801 .cra_driver_name = "ecb-des-talitos",
2802 .cra_blocksize = DES_BLOCK_SIZE,
2803 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2804 CRYPTO_ALG_ASYNC,
2805 .cra_ablkcipher = {
2806 .min_keysize = DES_KEY_SIZE,
2807 .max_keysize = DES_KEY_SIZE,
ef7c5c85 2808 .setkey = ablkcipher_des_setkey,
5e75ae1b
LC
2809 }
2810 },
2811 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2812 DESC_HDR_SEL0_DEU,
2813 },
2814 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2815 .alg.crypto = {
2816 .cra_name = "cbc(des)",
2817 .cra_driver_name = "cbc-des-talitos",
2818 .cra_blocksize = DES_BLOCK_SIZE,
2819 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2820 CRYPTO_ALG_ASYNC,
2821 .cra_ablkcipher = {
2822 .min_keysize = DES_KEY_SIZE,
2823 .max_keysize = DES_KEY_SIZE,
2824 .ivsize = DES_BLOCK_SIZE,
ef7c5c85 2825 .setkey = ablkcipher_des_setkey,
5e75ae1b
LC
2826 }
2827 },
2828 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2829 DESC_HDR_SEL0_DEU |
2830 DESC_HDR_MODE0_DEU_CBC,
2831 },
2832 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2833 .alg.crypto = {
2834 .cra_name = "ecb(des3_ede)",
2835 .cra_driver_name = "ecb-3des-talitos",
2836 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2837 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2838 CRYPTO_ALG_ASYNC,
2839 .cra_ablkcipher = {
2840 .min_keysize = DES3_EDE_KEY_SIZE,
2841 .max_keysize = DES3_EDE_KEY_SIZE,
ef7c5c85 2842 .setkey = ablkcipher_des3_setkey,
5e75ae1b
LC
2843 }
2844 },
2845 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2846 DESC_HDR_SEL0_DEU |
2847 DESC_HDR_MODE0_DEU_3DES,
2848 },
d5e4aaef
LN
2849 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2850 .alg.crypto = {
4de9d0b5
LN
2851 .cra_name = "cbc(des3_ede)",
2852 .cra_driver_name = "cbc-3des-talitos",
2853 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2854 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2855 CRYPTO_ALG_ASYNC,
4de9d0b5 2856 .cra_ablkcipher = {
4de9d0b5
LN
2857 .min_keysize = DES3_EDE_KEY_SIZE,
2858 .max_keysize = DES3_EDE_KEY_SIZE,
2859 .ivsize = DES3_EDE_BLOCK_SIZE,
ef7c5c85 2860 .setkey = ablkcipher_des3_setkey,
4de9d0b5
LN
2861 }
2862 },
2863 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2864 DESC_HDR_SEL0_DEU |
2865 DESC_HDR_MODE0_DEU_CBC |
2866 DESC_HDR_MODE0_DEU_3DES,
497f2e6b
LN
2867 },
2868 /* AHASH algorithms. */
2869 { .type = CRYPTO_ALG_TYPE_AHASH,
2870 .alg.hash = {
497f2e6b 2871 .halg.digestsize = MD5_DIGEST_SIZE,
3639ca84 2872 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2873 .halg.base = {
2874 .cra_name = "md5",
2875 .cra_driver_name = "md5-talitos",
b3988618 2876 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
6a38f622 2877 .cra_flags = CRYPTO_ALG_ASYNC,
497f2e6b
LN
2878 }
2879 },
2880 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2881 DESC_HDR_SEL0_MDEUA |
2882 DESC_HDR_MODE0_MDEU_MD5,
2883 },
2884 { .type = CRYPTO_ALG_TYPE_AHASH,
2885 .alg.hash = {
497f2e6b 2886 .halg.digestsize = SHA1_DIGEST_SIZE,
3639ca84 2887 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2888 .halg.base = {
2889 .cra_name = "sha1",
2890 .cra_driver_name = "sha1-talitos",
2891 .cra_blocksize = SHA1_BLOCK_SIZE,
6a38f622 2892 .cra_flags = CRYPTO_ALG_ASYNC,
497f2e6b
LN
2893 }
2894 },
2895 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2896 DESC_HDR_SEL0_MDEUA |
2897 DESC_HDR_MODE0_MDEU_SHA1,
2898 },
60f208d7
KP
2899 { .type = CRYPTO_ALG_TYPE_AHASH,
2900 .alg.hash = {
60f208d7 2901 .halg.digestsize = SHA224_DIGEST_SIZE,
3639ca84 2902 .halg.statesize = sizeof(struct talitos_export_state),
60f208d7
KP
2903 .halg.base = {
2904 .cra_name = "sha224",
2905 .cra_driver_name = "sha224-talitos",
2906 .cra_blocksize = SHA224_BLOCK_SIZE,
6a38f622 2907 .cra_flags = CRYPTO_ALG_ASYNC,
60f208d7
KP
2908 }
2909 },
2910 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2911 DESC_HDR_SEL0_MDEUA |
2912 DESC_HDR_MODE0_MDEU_SHA224,
2913 },
497f2e6b
LN
2914 { .type = CRYPTO_ALG_TYPE_AHASH,
2915 .alg.hash = {
497f2e6b 2916 .halg.digestsize = SHA256_DIGEST_SIZE,
3639ca84 2917 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2918 .halg.base = {
2919 .cra_name = "sha256",
2920 .cra_driver_name = "sha256-talitos",
2921 .cra_blocksize = SHA256_BLOCK_SIZE,
6a38f622 2922 .cra_flags = CRYPTO_ALG_ASYNC,
497f2e6b
LN
2923 }
2924 },
2925 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2926 DESC_HDR_SEL0_MDEUA |
2927 DESC_HDR_MODE0_MDEU_SHA256,
2928 },
2929 { .type = CRYPTO_ALG_TYPE_AHASH,
2930 .alg.hash = {
497f2e6b 2931 .halg.digestsize = SHA384_DIGEST_SIZE,
3639ca84 2932 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2933 .halg.base = {
2934 .cra_name = "sha384",
2935 .cra_driver_name = "sha384-talitos",
2936 .cra_blocksize = SHA384_BLOCK_SIZE,
6a38f622 2937 .cra_flags = CRYPTO_ALG_ASYNC,
497f2e6b
LN
2938 }
2939 },
2940 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2941 DESC_HDR_SEL0_MDEUB |
2942 DESC_HDR_MODE0_MDEUB_SHA384,
2943 },
2944 { .type = CRYPTO_ALG_TYPE_AHASH,
2945 .alg.hash = {
497f2e6b 2946 .halg.digestsize = SHA512_DIGEST_SIZE,
3639ca84 2947 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2948 .halg.base = {
2949 .cra_name = "sha512",
2950 .cra_driver_name = "sha512-talitos",
2951 .cra_blocksize = SHA512_BLOCK_SIZE,
6a38f622 2952 .cra_flags = CRYPTO_ALG_ASYNC,
497f2e6b
LN
2953 }
2954 },
2955 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2956 DESC_HDR_SEL0_MDEUB |
2957 DESC_HDR_MODE0_MDEUB_SHA512,
2958 },
79b3a418
LN
2959 { .type = CRYPTO_ALG_TYPE_AHASH,
2960 .alg.hash = {
79b3a418 2961 .halg.digestsize = MD5_DIGEST_SIZE,
3639ca84 2962 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2963 .halg.base = {
2964 .cra_name = "hmac(md5)",
2965 .cra_driver_name = "hmac-md5-talitos",
b3988618 2966 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
6a38f622 2967 .cra_flags = CRYPTO_ALG_ASYNC,
79b3a418
LN
2968 }
2969 },
2970 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2971 DESC_HDR_SEL0_MDEUA |
2972 DESC_HDR_MODE0_MDEU_MD5,
2973 },
2974 { .type = CRYPTO_ALG_TYPE_AHASH,
2975 .alg.hash = {
79b3a418 2976 .halg.digestsize = SHA1_DIGEST_SIZE,
3639ca84 2977 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2978 .halg.base = {
2979 .cra_name = "hmac(sha1)",
2980 .cra_driver_name = "hmac-sha1-talitos",
2981 .cra_blocksize = SHA1_BLOCK_SIZE,
6a38f622 2982 .cra_flags = CRYPTO_ALG_ASYNC,
79b3a418
LN
2983 }
2984 },
2985 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2986 DESC_HDR_SEL0_MDEUA |
2987 DESC_HDR_MODE0_MDEU_SHA1,
2988 },
2989 { .type = CRYPTO_ALG_TYPE_AHASH,
2990 .alg.hash = {
79b3a418 2991 .halg.digestsize = SHA224_DIGEST_SIZE,
3639ca84 2992 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2993 .halg.base = {
2994 .cra_name = "hmac(sha224)",
2995 .cra_driver_name = "hmac-sha224-talitos",
2996 .cra_blocksize = SHA224_BLOCK_SIZE,
6a38f622 2997 .cra_flags = CRYPTO_ALG_ASYNC,
79b3a418
LN
2998 }
2999 },
3000 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3001 DESC_HDR_SEL0_MDEUA |
3002 DESC_HDR_MODE0_MDEU_SHA224,
3003 },
3004 { .type = CRYPTO_ALG_TYPE_AHASH,
3005 .alg.hash = {
79b3a418 3006 .halg.digestsize = SHA256_DIGEST_SIZE,
3639ca84 3007 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
3008 .halg.base = {
3009 .cra_name = "hmac(sha256)",
3010 .cra_driver_name = "hmac-sha256-talitos",
3011 .cra_blocksize = SHA256_BLOCK_SIZE,
6a38f622 3012 .cra_flags = CRYPTO_ALG_ASYNC,
79b3a418
LN
3013 }
3014 },
3015 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3016 DESC_HDR_SEL0_MDEUA |
3017 DESC_HDR_MODE0_MDEU_SHA256,
3018 },
3019 { .type = CRYPTO_ALG_TYPE_AHASH,
3020 .alg.hash = {
79b3a418 3021 .halg.digestsize = SHA384_DIGEST_SIZE,
3639ca84 3022 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
3023 .halg.base = {
3024 .cra_name = "hmac(sha384)",
3025 .cra_driver_name = "hmac-sha384-talitos",
3026 .cra_blocksize = SHA384_BLOCK_SIZE,
6a38f622 3027 .cra_flags = CRYPTO_ALG_ASYNC,
79b3a418
LN
3028 }
3029 },
3030 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3031 DESC_HDR_SEL0_MDEUB |
3032 DESC_HDR_MODE0_MDEUB_SHA384,
3033 },
3034 { .type = CRYPTO_ALG_TYPE_AHASH,
3035 .alg.hash = {
79b3a418 3036 .halg.digestsize = SHA512_DIGEST_SIZE,
3639ca84 3037 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
3038 .halg.base = {
3039 .cra_name = "hmac(sha512)",
3040 .cra_driver_name = "hmac-sha512-talitos",
3041 .cra_blocksize = SHA512_BLOCK_SIZE,
6a38f622 3042 .cra_flags = CRYPTO_ALG_ASYNC,
79b3a418
LN
3043 }
3044 },
3045 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3046 DESC_HDR_SEL0_MDEUB |
3047 DESC_HDR_MODE0_MDEUB_SHA512,
3048 }
9c4a7965
KP
3049};
3050
3051struct talitos_crypto_alg {
3052 struct list_head entry;
3053 struct device *dev;
acbf7c62 3054 struct talitos_alg_template algt;
9c4a7965
KP
3055};
3056
89d124cb
JE
3057static int talitos_init_common(struct talitos_ctx *ctx,
3058 struct talitos_crypto_alg *talitos_alg)
9c4a7965 3059{
5228f0f7 3060 struct talitos_private *priv;
9c4a7965
KP
3061
3062 /* update context with ptr to dev */
3063 ctx->dev = talitos_alg->dev;
19bbbc63 3064
5228f0f7
KP
3065 /* assign SEC channel to tfm in round-robin fashion */
3066 priv = dev_get_drvdata(ctx->dev);
3067 ctx->ch = atomic_inc_return(&priv->last_chan) &
3068 (priv->num_channels - 1);
3069
9c4a7965 3070 /* copy descriptor header template value */
acbf7c62 3071 ctx->desc_hdr_template = talitos_alg->algt.desc_hdr_template;
9c4a7965 3072
602dba5a
KP
3073 /* select done notification */
3074 ctx->desc_hdr_template |= DESC_HDR_DONE_NOTIFY;
3075
497f2e6b
LN
3076 return 0;
3077}
3078
89d124cb
JE
3079static int talitos_cra_init(struct crypto_tfm *tfm)
3080{
3081 struct crypto_alg *alg = tfm->__crt_alg;
3082 struct talitos_crypto_alg *talitos_alg;
3083 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3084
3085 if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_AHASH)
3086 talitos_alg = container_of(__crypto_ahash_alg(alg),
3087 struct talitos_crypto_alg,
3088 algt.alg.hash);
3089 else
3090 talitos_alg = container_of(alg, struct talitos_crypto_alg,
3091 algt.alg.crypto);
3092
3093 return talitos_init_common(ctx, talitos_alg);
3094}
3095
aeb4c132 3096static int talitos_cra_init_aead(struct crypto_aead *tfm)
497f2e6b 3097{
89d124cb
JE
3098 struct aead_alg *alg = crypto_aead_alg(tfm);
3099 struct talitos_crypto_alg *talitos_alg;
3100 struct talitos_ctx *ctx = crypto_aead_ctx(tfm);
3101
3102 talitos_alg = container_of(alg, struct talitos_crypto_alg,
3103 algt.alg.aead);
3104
3105 return talitos_init_common(ctx, talitos_alg);
9c4a7965
KP
3106}
3107
497f2e6b
LN
3108static int talitos_cra_init_ahash(struct crypto_tfm *tfm)
3109{
3110 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3111
3112 talitos_cra_init(tfm);
3113
3114 ctx->keylen = 0;
3115 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
3116 sizeof(struct talitos_ahash_req_ctx));
3117
3118 return 0;
3119}
3120
2e13ce08
LC
3121static void talitos_cra_exit(struct crypto_tfm *tfm)
3122{
3123 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3124 struct device *dev = ctx->dev;
3125
3126 if (ctx->keylen)
3127 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
3128}
3129
9c4a7965
KP
3130/*
3131 * given the alg's descriptor header template, determine whether descriptor
3132 * type and primary/secondary execution units required match the hw
3133 * capabilities description provided in the device tree node.
3134 */
3135static int hw_supports(struct device *dev, __be32 desc_hdr_template)
3136{
3137 struct talitos_private *priv = dev_get_drvdata(dev);
3138 int ret;
3139
3140 ret = (1 << DESC_TYPE(desc_hdr_template) & priv->desc_types) &&
3141 (1 << PRIMARY_EU(desc_hdr_template) & priv->exec_units);
3142
3143 if (SECONDARY_EU(desc_hdr_template))
3144 ret = ret && (1 << SECONDARY_EU(desc_hdr_template)
3145 & priv->exec_units);
3146
3147 return ret;
3148}
3149
2dc11581 3150static int talitos_remove(struct platform_device *ofdev)
9c4a7965
KP
3151{
3152 struct device *dev = &ofdev->dev;
3153 struct talitos_private *priv = dev_get_drvdata(dev);
3154 struct talitos_crypto_alg *t_alg, *n;
3155 int i;
3156
3157 list_for_each_entry_safe(t_alg, n, &priv->alg_list, entry) {
acbf7c62
LN
3158 switch (t_alg->algt.type) {
3159 case CRYPTO_ALG_TYPE_ABLKCIPHER:
acbf7c62 3160 break;
aeb4c132
HX
3161 case CRYPTO_ALG_TYPE_AEAD:
3162 crypto_unregister_aead(&t_alg->algt.alg.aead);
acbf7c62
LN
3163 case CRYPTO_ALG_TYPE_AHASH:
3164 crypto_unregister_ahash(&t_alg->algt.alg.hash);
3165 break;
3166 }
9c4a7965 3167 list_del(&t_alg->entry);
9c4a7965
KP
3168 }
3169
3170 if (hw_supports(dev, DESC_HDR_SEL0_RNG))
3171 talitos_unregister_rng(dev);
3172
c3e337f8 3173 for (i = 0; i < 2; i++)
2cdba3cf 3174 if (priv->irq[i]) {
c3e337f8
KP
3175 free_irq(priv->irq[i], dev);
3176 irq_dispose_mapping(priv->irq[i]);
3177 }
9c4a7965 3178
c3e337f8 3179 tasklet_kill(&priv->done_task[0]);
2cdba3cf 3180 if (priv->irq[1])
c3e337f8 3181 tasklet_kill(&priv->done_task[1]);
9c4a7965 3182
9c4a7965
KP
3183 return 0;
3184}
3185
3186static struct talitos_crypto_alg *talitos_alg_alloc(struct device *dev,
3187 struct talitos_alg_template
3188 *template)
3189{
60f208d7 3190 struct talitos_private *priv = dev_get_drvdata(dev);
9c4a7965
KP
3191 struct talitos_crypto_alg *t_alg;
3192 struct crypto_alg *alg;
3193
24b92ff2
LC
3194 t_alg = devm_kzalloc(dev, sizeof(struct talitos_crypto_alg),
3195 GFP_KERNEL);
9c4a7965
KP
3196 if (!t_alg)
3197 return ERR_PTR(-ENOMEM);
3198
acbf7c62
LN
3199 t_alg->algt = *template;
3200
3201 switch (t_alg->algt.type) {
3202 case CRYPTO_ALG_TYPE_ABLKCIPHER:
497f2e6b
LN
3203 alg = &t_alg->algt.alg.crypto;
3204 alg->cra_init = talitos_cra_init;
2e13ce08 3205 alg->cra_exit = talitos_cra_exit;
d4cd3283 3206 alg->cra_type = &crypto_ablkcipher_type;
ef7c5c85
HX
3207 alg->cra_ablkcipher.setkey = alg->cra_ablkcipher.setkey ?:
3208 ablkcipher_setkey;
b286e003
KP
3209 alg->cra_ablkcipher.encrypt = ablkcipher_encrypt;
3210 alg->cra_ablkcipher.decrypt = ablkcipher_decrypt;
497f2e6b 3211 break;
acbf7c62 3212 case CRYPTO_ALG_TYPE_AEAD:
aeb4c132 3213 alg = &t_alg->algt.alg.aead.base;
2e13ce08 3214 alg->cra_exit = talitos_cra_exit;
aeb4c132 3215 t_alg->algt.alg.aead.init = talitos_cra_init_aead;
ef7c5c85
HX
3216 t_alg->algt.alg.aead.setkey = t_alg->algt.alg.aead.setkey ?:
3217 aead_setkey;
aeb4c132
HX
3218 t_alg->algt.alg.aead.encrypt = aead_encrypt;
3219 t_alg->algt.alg.aead.decrypt = aead_decrypt;
6cda075a
LC
3220 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
3221 !strncmp(alg->cra_name, "authenc(hmac(sha224)", 20)) {
24b92ff2 3222 devm_kfree(dev, t_alg);
6cda075a
LC
3223 return ERR_PTR(-ENOTSUPP);
3224 }
acbf7c62
LN
3225 break;
3226 case CRYPTO_ALG_TYPE_AHASH:
3227 alg = &t_alg->algt.alg.hash.halg.base;
497f2e6b 3228 alg->cra_init = talitos_cra_init_ahash;
ad4cd51f 3229 alg->cra_exit = talitos_cra_exit;
b286e003
KP
3230 t_alg->algt.alg.hash.init = ahash_init;
3231 t_alg->algt.alg.hash.update = ahash_update;
3232 t_alg->algt.alg.hash.final = ahash_final;
3233 t_alg->algt.alg.hash.finup = ahash_finup;
3234 t_alg->algt.alg.hash.digest = ahash_digest;
56136631
LC
3235 if (!strncmp(alg->cra_name, "hmac", 4))
3236 t_alg->algt.alg.hash.setkey = ahash_setkey;
3639ca84
HG
3237 t_alg->algt.alg.hash.import = ahash_import;
3238 t_alg->algt.alg.hash.export = ahash_export;
b286e003 3239
79b3a418 3240 if (!(priv->features & TALITOS_FTR_HMAC_OK) &&
0b2730d8 3241 !strncmp(alg->cra_name, "hmac", 4)) {
24b92ff2 3242 devm_kfree(dev, t_alg);
79b3a418 3243 return ERR_PTR(-ENOTSUPP);
0b2730d8 3244 }
60f208d7 3245 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
79b3a418
LN
3246 (!strcmp(alg->cra_name, "sha224") ||
3247 !strcmp(alg->cra_name, "hmac(sha224)"))) {
60f208d7
KP
3248 t_alg->algt.alg.hash.init = ahash_init_sha224_swinit;
3249 t_alg->algt.desc_hdr_template =
3250 DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3251 DESC_HDR_SEL0_MDEUA |
3252 DESC_HDR_MODE0_MDEU_SHA256;
3253 }
497f2e6b 3254 break;
1d11911a
KP
3255 default:
3256 dev_err(dev, "unknown algorithm type %d\n", t_alg->algt.type);
24b92ff2 3257 devm_kfree(dev, t_alg);
1d11911a 3258 return ERR_PTR(-EINVAL);
acbf7c62 3259 }
9c4a7965 3260
9c4a7965 3261 alg->cra_module = THIS_MODULE;
b0057763
LC
3262 if (t_alg->algt.priority)
3263 alg->cra_priority = t_alg->algt.priority;
3264 else
3265 alg->cra_priority = TALITOS_CRA_PRIORITY;
c9cca703
CL
3266 if (has_ftr_sec1(priv))
3267 alg->cra_alignmask = 3;
3268 else
3269 alg->cra_alignmask = 0;
9c4a7965 3270 alg->cra_ctxsize = sizeof(struct talitos_ctx);
d912bb76 3271 alg->cra_flags |= CRYPTO_ALG_KERN_DRIVER_ONLY;
9c4a7965 3272
9c4a7965
KP
3273 t_alg->dev = dev;
3274
3275 return t_alg;
3276}
3277
c3e337f8
KP
3278static int talitos_probe_irq(struct platform_device *ofdev)
3279{
3280 struct device *dev = &ofdev->dev;
3281 struct device_node *np = ofdev->dev.of_node;
3282 struct talitos_private *priv = dev_get_drvdata(dev);
3283 int err;
dd3c0987 3284 bool is_sec1 = has_ftr_sec1(priv);
c3e337f8
KP
3285
3286 priv->irq[0] = irq_of_parse_and_map(np, 0);
2cdba3cf 3287 if (!priv->irq[0]) {
c3e337f8
KP
3288 dev_err(dev, "failed to map irq\n");
3289 return -EINVAL;
3290 }
dd3c0987
LC
3291 if (is_sec1) {
3292 err = request_irq(priv->irq[0], talitos1_interrupt_4ch, 0,
3293 dev_driver_string(dev), dev);
3294 goto primary_out;
3295 }
c3e337f8
KP
3296
3297 priv->irq[1] = irq_of_parse_and_map(np, 1);
3298
3299 /* get the primary irq line */
2cdba3cf 3300 if (!priv->irq[1]) {
dd3c0987 3301 err = request_irq(priv->irq[0], talitos2_interrupt_4ch, 0,
c3e337f8
KP
3302 dev_driver_string(dev), dev);
3303 goto primary_out;
3304 }
3305
dd3c0987 3306 err = request_irq(priv->irq[0], talitos2_interrupt_ch0_2, 0,
c3e337f8
KP
3307 dev_driver_string(dev), dev);
3308 if (err)
3309 goto primary_out;
3310
3311 /* get the secondary irq line */
dd3c0987 3312 err = request_irq(priv->irq[1], talitos2_interrupt_ch1_3, 0,
c3e337f8
KP
3313 dev_driver_string(dev), dev);
3314 if (err) {
3315 dev_err(dev, "failed to request secondary irq\n");
3316 irq_dispose_mapping(priv->irq[1]);
2cdba3cf 3317 priv->irq[1] = 0;
c3e337f8
KP
3318 }
3319
3320 return err;
3321
3322primary_out:
3323 if (err) {
3324 dev_err(dev, "failed to request primary irq\n");
3325 irq_dispose_mapping(priv->irq[0]);
2cdba3cf 3326 priv->irq[0] = 0;
c3e337f8
KP
3327 }
3328
3329 return err;
3330}
3331
1c48a5c9 3332static int talitos_probe(struct platform_device *ofdev)
9c4a7965
KP
3333{
3334 struct device *dev = &ofdev->dev;
61c7a080 3335 struct device_node *np = ofdev->dev.of_node;
9c4a7965 3336 struct talitos_private *priv;
9c4a7965 3337 int i, err;
5fa7fa14 3338 int stride;
fd5ea7f0 3339 struct resource *res;
9c4a7965 3340
24b92ff2 3341 priv = devm_kzalloc(dev, sizeof(struct talitos_private), GFP_KERNEL);
9c4a7965
KP
3342 if (!priv)
3343 return -ENOMEM;
3344
f3de9cb1
KH
3345 INIT_LIST_HEAD(&priv->alg_list);
3346
9c4a7965
KP
3347 dev_set_drvdata(dev, priv);
3348
3349 priv->ofdev = ofdev;
3350
511d63cb
HG
3351 spin_lock_init(&priv->reg_lock);
3352
fd5ea7f0
LC
3353 res = platform_get_resource(ofdev, IORESOURCE_MEM, 0);
3354 if (!res)
3355 return -ENXIO;
3356 priv->reg = devm_ioremap(dev, res->start, resource_size(res));
9c4a7965
KP
3357 if (!priv->reg) {
3358 dev_err(dev, "failed to of_iomap\n");
3359 err = -ENOMEM;
3360 goto err_out;
3361 }
3362
3363 /* get SEC version capabilities from device tree */
fa14c6cf
LC
3364 of_property_read_u32(np, "fsl,num-channels", &priv->num_channels);
3365 of_property_read_u32(np, "fsl,channel-fifo-len", &priv->chfifo_len);
3366 of_property_read_u32(np, "fsl,exec-units-mask", &priv->exec_units);
3367 of_property_read_u32(np, "fsl,descriptor-types-mask",
3368 &priv->desc_types);
9c4a7965
KP
3369
3370 if (!is_power_of_2(priv->num_channels) || !priv->chfifo_len ||
3371 !priv->exec_units || !priv->desc_types) {
3372 dev_err(dev, "invalid property data in device tree node\n");
3373 err = -EINVAL;
3374 goto err_out;
3375 }
3376
f3c85bc1
LN
3377 if (of_device_is_compatible(np, "fsl,sec3.0"))
3378 priv->features |= TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT;
3379
fe5720e2 3380 if (of_device_is_compatible(np, "fsl,sec2.1"))
60f208d7 3381 priv->features |= TALITOS_FTR_HW_AUTH_CHECK |
79b3a418
LN
3382 TALITOS_FTR_SHA224_HWINIT |
3383 TALITOS_FTR_HMAC_OK;
fe5720e2 3384
21590888
LC
3385 if (of_device_is_compatible(np, "fsl,sec1.0"))
3386 priv->features |= TALITOS_FTR_SEC1;
3387
5fa7fa14
LC
3388 if (of_device_is_compatible(np, "fsl,sec1.2")) {
3389 priv->reg_deu = priv->reg + TALITOS12_DEU;
3390 priv->reg_aesu = priv->reg + TALITOS12_AESU;
3391 priv->reg_mdeu = priv->reg + TALITOS12_MDEU;
3392 stride = TALITOS1_CH_STRIDE;
3393 } else if (of_device_is_compatible(np, "fsl,sec1.0")) {
3394 priv->reg_deu = priv->reg + TALITOS10_DEU;
3395 priv->reg_aesu = priv->reg + TALITOS10_AESU;
3396 priv->reg_mdeu = priv->reg + TALITOS10_MDEU;
3397 priv->reg_afeu = priv->reg + TALITOS10_AFEU;
3398 priv->reg_rngu = priv->reg + TALITOS10_RNGU;
3399 priv->reg_pkeu = priv->reg + TALITOS10_PKEU;
3400 stride = TALITOS1_CH_STRIDE;
3401 } else {
3402 priv->reg_deu = priv->reg + TALITOS2_DEU;
3403 priv->reg_aesu = priv->reg + TALITOS2_AESU;
3404 priv->reg_mdeu = priv->reg + TALITOS2_MDEU;
3405 priv->reg_afeu = priv->reg + TALITOS2_AFEU;
3406 priv->reg_rngu = priv->reg + TALITOS2_RNGU;
3407 priv->reg_pkeu = priv->reg + TALITOS2_PKEU;
3408 priv->reg_keu = priv->reg + TALITOS2_KEU;
3409 priv->reg_crcu = priv->reg + TALITOS2_CRCU;
3410 stride = TALITOS2_CH_STRIDE;
3411 }
3412
dd3c0987
LC
3413 err = talitos_probe_irq(ofdev);
3414 if (err)
3415 goto err_out;
3416
3417 if (of_device_is_compatible(np, "fsl,sec1.0")) {
9c02e285
LC
3418 if (priv->num_channels == 1)
3419 tasklet_init(&priv->done_task[0], talitos1_done_ch0,
dd3c0987 3420 (unsigned long)dev);
9c02e285
LC
3421 else
3422 tasklet_init(&priv->done_task[0], talitos1_done_4ch,
3423 (unsigned long)dev);
3424 } else {
3425 if (priv->irq[1]) {
dd3c0987
LC
3426 tasklet_init(&priv->done_task[0], talitos2_done_ch0_2,
3427 (unsigned long)dev);
3428 tasklet_init(&priv->done_task[1], talitos2_done_ch1_3,
3429 (unsigned long)dev);
9c02e285
LC
3430 } else if (priv->num_channels == 1) {
3431 tasklet_init(&priv->done_task[0], talitos2_done_ch0,
3432 (unsigned long)dev);
3433 } else {
3434 tasklet_init(&priv->done_task[0], talitos2_done_4ch,
3435 (unsigned long)dev);
dd3c0987
LC
3436 }
3437 }
3438
a86854d0
KC
3439 priv->chan = devm_kcalloc(dev,
3440 priv->num_channels,
3441 sizeof(struct talitos_channel),
3442 GFP_KERNEL);
4b992628
KP
3443 if (!priv->chan) {
3444 dev_err(dev, "failed to allocate channel management space\n");
9c4a7965
KP
3445 err = -ENOMEM;
3446 goto err_out;
3447 }
3448
f641dddd
MH
3449 priv->fifo_len = roundup_pow_of_two(priv->chfifo_len);
3450
c3e337f8 3451 for (i = 0; i < priv->num_channels; i++) {
5fa7fa14 3452 priv->chan[i].reg = priv->reg + stride * (i + 1);
2cdba3cf 3453 if (!priv->irq[1] || !(i & 1))
c3e337f8 3454 priv->chan[i].reg += TALITOS_CH_BASE_OFFSET;
ad42d5fc 3455
4b992628
KP
3456 spin_lock_init(&priv->chan[i].head_lock);
3457 spin_lock_init(&priv->chan[i].tail_lock);
9c4a7965 3458
a86854d0
KC
3459 priv->chan[i].fifo = devm_kcalloc(dev,
3460 priv->fifo_len,
3461 sizeof(struct talitos_request),
3462 GFP_KERNEL);
4b992628 3463 if (!priv->chan[i].fifo) {
9c4a7965
KP
3464 dev_err(dev, "failed to allocate request fifo %d\n", i);
3465 err = -ENOMEM;
3466 goto err_out;
3467 }
9c4a7965 3468
4b992628
KP
3469 atomic_set(&priv->chan[i].submit_count,
3470 -(priv->chfifo_len - 1));
f641dddd 3471 }
9c4a7965 3472
81eb024c
KP
3473 dma_set_mask(dev, DMA_BIT_MASK(36));
3474
9c4a7965
KP
3475 /* reset and initialize the h/w */
3476 err = init_device(dev);
3477 if (err) {
3478 dev_err(dev, "failed to initialize device\n");
3479 goto err_out;
3480 }
3481
3482 /* register the RNG, if available */
3483 if (hw_supports(dev, DESC_HDR_SEL0_RNG)) {
3484 err = talitos_register_rng(dev);
3485 if (err) {
3486 dev_err(dev, "failed to register hwrng: %d\n", err);
3487 goto err_out;
3488 } else
3489 dev_info(dev, "hwrng\n");
3490 }
3491
3492 /* register crypto algorithms the device supports */
9c4a7965
KP
3493 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3494 if (hw_supports(dev, driver_algs[i].desc_hdr_template)) {
3495 struct talitos_crypto_alg *t_alg;
aeb4c132 3496 struct crypto_alg *alg = NULL;
9c4a7965
KP
3497
3498 t_alg = talitos_alg_alloc(dev, &driver_algs[i]);
3499 if (IS_ERR(t_alg)) {
3500 err = PTR_ERR(t_alg);
0b2730d8 3501 if (err == -ENOTSUPP)
79b3a418 3502 continue;
9c4a7965
KP
3503 goto err_out;
3504 }
3505
acbf7c62
LN
3506 switch (t_alg->algt.type) {
3507 case CRYPTO_ALG_TYPE_ABLKCIPHER:
acbf7c62
LN
3508 err = crypto_register_alg(
3509 &t_alg->algt.alg.crypto);
aeb4c132 3510 alg = &t_alg->algt.alg.crypto;
acbf7c62 3511 break;
aeb4c132
HX
3512
3513 case CRYPTO_ALG_TYPE_AEAD:
3514 err = crypto_register_aead(
3515 &t_alg->algt.alg.aead);
3516 alg = &t_alg->algt.alg.aead.base;
3517 break;
3518
acbf7c62
LN
3519 case CRYPTO_ALG_TYPE_AHASH:
3520 err = crypto_register_ahash(
3521 &t_alg->algt.alg.hash);
aeb4c132 3522 alg = &t_alg->algt.alg.hash.halg.base;
acbf7c62
LN
3523 break;
3524 }
9c4a7965
KP
3525 if (err) {
3526 dev_err(dev, "%s alg registration failed\n",
aeb4c132 3527 alg->cra_driver_name);
24b92ff2 3528 devm_kfree(dev, t_alg);
991155ba 3529 } else
9c4a7965 3530 list_add_tail(&t_alg->entry, &priv->alg_list);
9c4a7965
KP
3531 }
3532 }
5b859b6e
KP
3533 if (!list_empty(&priv->alg_list))
3534 dev_info(dev, "%s algorithms registered in /proc/crypto\n",
3535 (char *)of_get_property(np, "compatible", NULL));
9c4a7965
KP
3536
3537 return 0;
3538
3539err_out:
3540 talitos_remove(ofdev);
9c4a7965
KP
3541
3542 return err;
3543}
3544
6c3f975a 3545static const struct of_device_id talitos_match[] = {
0635b7db
LC
3546#ifdef CONFIG_CRYPTO_DEV_TALITOS1
3547 {
3548 .compatible = "fsl,sec1.0",
3549 },
3550#endif
3551#ifdef CONFIG_CRYPTO_DEV_TALITOS2
9c4a7965
KP
3552 {
3553 .compatible = "fsl,sec2.0",
3554 },
0635b7db 3555#endif
9c4a7965
KP
3556 {},
3557};
3558MODULE_DEVICE_TABLE(of, talitos_match);
3559
1c48a5c9 3560static struct platform_driver talitos_driver = {
4018294b
GL
3561 .driver = {
3562 .name = "talitos",
4018294b
GL
3563 .of_match_table = talitos_match,
3564 },
9c4a7965 3565 .probe = talitos_probe,
596f1034 3566 .remove = talitos_remove,
9c4a7965
KP
3567};
3568
741e8c2d 3569module_platform_driver(talitos_driver);
9c4a7965
KP
3570
3571MODULE_LICENSE("GPL");
3572MODULE_AUTHOR("Kim Phillips <kim.phillips@freescale.com>");
3573MODULE_DESCRIPTION("Freescale integrated security engine (SEC) driver");