Merge tag 'asm-generic-fixes-6.1-1' of git://git.kernel.org/pub/scm/linux/kernel...
[linux-2.6-block.git] / drivers / crypto / talitos.c
CommitLineData
1a59d1b8 1// SPDX-License-Identifier: GPL-2.0-or-later
9c4a7965
KP
2/*
3 * talitos - Freescale Integrated Security Engine (SEC) device driver
4 *
5228f0f7 5 * Copyright (c) 2008-2011 Freescale Semiconductor, Inc.
9c4a7965
KP
6 *
7 * Scatterlist Crypto API glue code copied from files with the following:
8 * Copyright (c) 2006-2007 Herbert Xu <herbert@gondor.apana.org.au>
9 *
10 * Crypto algorithm registration code copied from hifn driver:
11 * 2007+ Copyright (c) Evgeniy Polyakov <johnpol@2ka.mipt.ru>
12 * All rights reserved.
9c4a7965
KP
13 */
14
15#include <linux/kernel.h>
16#include <linux/module.h>
17#include <linux/mod_devicetable.h>
18#include <linux/device.h>
19#include <linux/interrupt.h>
20#include <linux/crypto.h>
21#include <linux/hw_random.h>
5af50730
RH
22#include <linux/of_address.h>
23#include <linux/of_irq.h>
9c4a7965
KP
24#include <linux/of_platform.h>
25#include <linux/dma-mapping.h>
26#include <linux/io.h>
27#include <linux/spinlock.h>
28#include <linux/rtnetlink.h>
5a0e3ad6 29#include <linux/slab.h>
9c4a7965
KP
30
31#include <crypto/algapi.h>
32#include <crypto/aes.h>
9d574ae8 33#include <crypto/internal/des.h>
a24d22b2
EB
34#include <crypto/sha1.h>
35#include <crypto/sha2.h>
497f2e6b 36#include <crypto/md5.h>
e98014ab 37#include <crypto/internal/aead.h>
9c4a7965 38#include <crypto/authenc.h>
373960d7 39#include <crypto/internal/skcipher.h>
acbf7c62
LN
40#include <crypto/hash.h>
41#include <crypto/internal/hash.h>
4de9d0b5 42#include <crypto/scatterwalk.h>
9c4a7965
KP
43
44#include "talitos.h"
45
922f9dc8 46static void to_talitos_ptr(struct talitos_ptr *ptr, dma_addr_t dma_addr,
da9de146 47 unsigned int len, bool is_sec1)
81eb024c 48{
edc6bd69 49 ptr->ptr = cpu_to_be32(lower_32_bits(dma_addr));
da9de146
LC
50 if (is_sec1) {
51 ptr->len1 = cpu_to_be16(len);
52 } else {
53 ptr->len = cpu_to_be16(len);
922f9dc8 54 ptr->eptr = upper_32_bits(dma_addr);
da9de146 55 }
81eb024c
KP
56}
57
340ff60a
HG
58static void copy_talitos_ptr(struct talitos_ptr *dst_ptr,
59 struct talitos_ptr *src_ptr, bool is_sec1)
60{
61 dst_ptr->ptr = src_ptr->ptr;
922f9dc8 62 if (is_sec1) {
da9de146 63 dst_ptr->len1 = src_ptr->len1;
922f9dc8 64 } else {
da9de146
LC
65 dst_ptr->len = src_ptr->len;
66 dst_ptr->eptr = src_ptr->eptr;
922f9dc8 67 }
538caf83
LC
68}
69
922f9dc8
LC
70static unsigned short from_talitos_ptr_len(struct talitos_ptr *ptr,
71 bool is_sec1)
538caf83 72{
922f9dc8
LC
73 if (is_sec1)
74 return be16_to_cpu(ptr->len1);
75 else
76 return be16_to_cpu(ptr->len);
538caf83
LC
77}
78
b096b544
LC
79static void to_talitos_ptr_ext_set(struct talitos_ptr *ptr, u8 val,
80 bool is_sec1)
185eb79f 81{
922f9dc8 82 if (!is_sec1)
b096b544
LC
83 ptr->j_extent = val;
84}
85
86static void to_talitos_ptr_ext_or(struct talitos_ptr *ptr, u8 val, bool is_sec1)
87{
88 if (!is_sec1)
89 ptr->j_extent |= val;
185eb79f
LC
90}
91
9c4a7965
KP
92/*
93 * map virtual single (contiguous) pointer to h/w descriptor pointer
94 */
6a4967c3
LC
95static void __map_single_talitos_ptr(struct device *dev,
96 struct talitos_ptr *ptr,
97 unsigned int len, void *data,
98 enum dma_data_direction dir,
99 unsigned long attrs)
100{
101 dma_addr_t dma_addr = dma_map_single_attrs(dev, data, len, dir, attrs);
102 struct talitos_private *priv = dev_get_drvdata(dev);
103 bool is_sec1 = has_ftr_sec1(priv);
104
105 to_talitos_ptr(ptr, dma_addr, len, is_sec1);
106}
107
9c4a7965 108static void map_single_talitos_ptr(struct device *dev,
edc6bd69 109 struct talitos_ptr *ptr,
42e8b0d7 110 unsigned int len, void *data,
9c4a7965
KP
111 enum dma_data_direction dir)
112{
6a4967c3
LC
113 __map_single_talitos_ptr(dev, ptr, len, data, dir, 0);
114}
81eb024c 115
6a4967c3
LC
116static void map_single_talitos_ptr_nosync(struct device *dev,
117 struct talitos_ptr *ptr,
118 unsigned int len, void *data,
119 enum dma_data_direction dir)
120{
121 __map_single_talitos_ptr(dev, ptr, len, data, dir,
122 DMA_ATTR_SKIP_CPU_SYNC);
9c4a7965
KP
123}
124
125/*
126 * unmap bus single (contiguous) h/w descriptor pointer
127 */
128static void unmap_single_talitos_ptr(struct device *dev,
edc6bd69 129 struct talitos_ptr *ptr,
9c4a7965
KP
130 enum dma_data_direction dir)
131{
922f9dc8
LC
132 struct talitos_private *priv = dev_get_drvdata(dev);
133 bool is_sec1 = has_ftr_sec1(priv);
134
edc6bd69 135 dma_unmap_single(dev, be32_to_cpu(ptr->ptr),
922f9dc8 136 from_talitos_ptr_len(ptr, is_sec1), dir);
9c4a7965
KP
137}
138
139static int reset_channel(struct device *dev, int ch)
140{
141 struct talitos_private *priv = dev_get_drvdata(dev);
142 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987 143 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 144
dd3c0987
LC
145 if (is_sec1) {
146 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
147 TALITOS1_CCCR_LO_RESET);
9c4a7965 148
dd3c0987
LC
149 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR_LO) &
150 TALITOS1_CCCR_LO_RESET) && --timeout)
151 cpu_relax();
152 } else {
153 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
154 TALITOS2_CCCR_RESET);
155
156 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
157 TALITOS2_CCCR_RESET) && --timeout)
158 cpu_relax();
159 }
9c4a7965
KP
160
161 if (timeout == 0) {
162 dev_err(dev, "failed to reset channel %d\n", ch);
163 return -EIO;
164 }
165
81eb024c 166 /* set 36-bit addressing, done writeback enable and done IRQ enable */
ad42d5fc 167 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, TALITOS_CCCR_LO_EAE |
81eb024c 168 TALITOS_CCCR_LO_CDWE | TALITOS_CCCR_LO_CDIE);
37b5e889
LC
169 /* enable chaining descriptors */
170 if (is_sec1)
171 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
172 TALITOS_CCCR_LO_NE);
9c4a7965 173
fe5720e2
KP
174 /* and ICCR writeback, if available */
175 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
ad42d5fc 176 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
fe5720e2
KP
177 TALITOS_CCCR_LO_IWSE);
178
9c4a7965
KP
179 return 0;
180}
181
182static int reset_device(struct device *dev)
183{
184 struct talitos_private *priv = dev_get_drvdata(dev);
185 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987
LC
186 bool is_sec1 = has_ftr_sec1(priv);
187 u32 mcr = is_sec1 ? TALITOS1_MCR_SWR : TALITOS2_MCR_SWR;
9c4a7965 188
c3e337f8 189 setbits32(priv->reg + TALITOS_MCR, mcr);
9c4a7965 190
dd3c0987 191 while ((in_be32(priv->reg + TALITOS_MCR) & mcr)
9c4a7965
KP
192 && --timeout)
193 cpu_relax();
194
2cdba3cf 195 if (priv->irq[1]) {
c3e337f8
KP
196 mcr = TALITOS_MCR_RCA1 | TALITOS_MCR_RCA3;
197 setbits32(priv->reg + TALITOS_MCR, mcr);
198 }
199
9c4a7965
KP
200 if (timeout == 0) {
201 dev_err(dev, "failed to reset device\n");
202 return -EIO;
203 }
204
205 return 0;
206}
207
208/*
209 * Reset and initialize the device
210 */
211static int init_device(struct device *dev)
212{
213 struct talitos_private *priv = dev_get_drvdata(dev);
214 int ch, err;
dd3c0987 215 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965
KP
216
217 /*
218 * Master reset
219 * errata documentation: warning: certain SEC interrupts
220 * are not fully cleared by writing the MCR:SWR bit,
221 * set bit twice to completely reset
222 */
223 err = reset_device(dev);
224 if (err)
225 return err;
226
227 err = reset_device(dev);
228 if (err)
229 return err;
230
231 /* reset channels */
232 for (ch = 0; ch < priv->num_channels; ch++) {
233 err = reset_channel(dev, ch);
234 if (err)
235 return err;
236 }
237
238 /* enable channel done and error interrupts */
dd3c0987
LC
239 if (is_sec1) {
240 clrbits32(priv->reg + TALITOS_IMR, TALITOS1_IMR_INIT);
241 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT);
242 /* disable parity error check in DEU (erroneous? test vect.) */
243 setbits32(priv->reg_deu + TALITOS_EUICR, TALITOS1_DEUICR_KPE);
244 } else {
245 setbits32(priv->reg + TALITOS_IMR, TALITOS2_IMR_INIT);
246 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT);
247 }
9c4a7965 248
fe5720e2
KP
249 /* disable integrity check error interrupts (use writeback instead) */
250 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
5fa7fa14 251 setbits32(priv->reg_mdeu + TALITOS_EUICR_LO,
fe5720e2
KP
252 TALITOS_MDEUICR_LO_ICE);
253
9c4a7965
KP
254 return 0;
255}
256
257/**
258 * talitos_submit - submits a descriptor to the device for processing
259 * @dev: the SEC device to be used
5228f0f7 260 * @ch: the SEC device channel to be used
9c4a7965
KP
261 * @desc: the descriptor to be processed by the device
262 * @callback: whom to call when processing is complete
263 * @context: a handle for use by caller (optional)
264 *
265 * desc must contain valid dma-mapped (bus physical) address pointers.
266 * callback must check err and feedback in descriptor header
267 * for device processing status.
268 */
fbb8d46e
CL
269static int talitos_submit(struct device *dev, int ch, struct talitos_desc *desc,
270 void (*callback)(struct device *dev,
271 struct talitos_desc *desc,
272 void *context, int error),
273 void *context)
9c4a7965
KP
274{
275 struct talitos_private *priv = dev_get_drvdata(dev);
276 struct talitos_request *request;
5228f0f7 277 unsigned long flags;
9c4a7965 278 int head;
7d607c6a 279 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 280
4b992628 281 spin_lock_irqsave(&priv->chan[ch].head_lock, flags);
9c4a7965 282
4b992628 283 if (!atomic_inc_not_zero(&priv->chan[ch].submit_count)) {
ec6644d6 284 /* h/w fifo is full */
4b992628 285 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
9c4a7965
KP
286 return -EAGAIN;
287 }
288
4b992628
KP
289 head = priv->chan[ch].head;
290 request = &priv->chan[ch].fifo[head];
ec6644d6 291
9c4a7965 292 /* map descriptor and save caller data */
7d607c6a
LC
293 if (is_sec1) {
294 desc->hdr1 = desc->hdr;
7d607c6a
LC
295 request->dma_desc = dma_map_single(dev, &desc->hdr1,
296 TALITOS_DESC_SIZE,
297 DMA_BIDIRECTIONAL);
298 } else {
299 request->dma_desc = dma_map_single(dev, desc,
300 TALITOS_DESC_SIZE,
301 DMA_BIDIRECTIONAL);
302 }
9c4a7965
KP
303 request->callback = callback;
304 request->context = context;
305
306 /* increment fifo head */
4b992628 307 priv->chan[ch].head = (priv->chan[ch].head + 1) & (priv->fifo_len - 1);
9c4a7965
KP
308
309 smp_wmb();
310 request->desc = desc;
311
312 /* GO! */
313 wmb();
ad42d5fc
KP
314 out_be32(priv->chan[ch].reg + TALITOS_FF,
315 upper_32_bits(request->dma_desc));
316 out_be32(priv->chan[ch].reg + TALITOS_FF_LO,
a752447a 317 lower_32_bits(request->dma_desc));
9c4a7965 318
4b992628 319 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
9c4a7965
KP
320
321 return -EINPROGRESS;
322}
323
58cdbc6d
CL
324static __be32 get_request_hdr(struct talitos_request *request, bool is_sec1)
325{
326 struct talitos_edesc *edesc;
327
328 if (!is_sec1)
329 return request->desc->hdr;
330
331 if (!request->desc->next_desc)
332 return request->desc->hdr1;
333
334 edesc = container_of(request->desc, struct talitos_edesc, desc);
335
336 return ((struct talitos_desc *)(edesc->buf + edesc->dma_len))->hdr1;
337}
9c4a7965
KP
338
339/*
340 * process what was done, notify callback of error if not
341 */
342static void flush_channel(struct device *dev, int ch, int error, int reset_ch)
343{
344 struct talitos_private *priv = dev_get_drvdata(dev);
345 struct talitos_request *request, saved_req;
346 unsigned long flags;
347 int tail, status;
7d607c6a 348 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 349
4b992628 350 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
9c4a7965 351
4b992628
KP
352 tail = priv->chan[ch].tail;
353 while (priv->chan[ch].fifo[tail].desc) {
7d607c6a
LC
354 __be32 hdr;
355
4b992628 356 request = &priv->chan[ch].fifo[tail];
9c4a7965
KP
357
358 /* descriptors with their done bits set don't get the error */
359 rmb();
58cdbc6d 360 hdr = get_request_hdr(request, is_sec1);
7d607c6a
LC
361
362 if ((hdr & DESC_HDR_DONE) == DESC_HDR_DONE)
9c4a7965 363 status = 0;
ca38a814 364 else
9c4a7965
KP
365 if (!error)
366 break;
367 else
368 status = error;
369
370 dma_unmap_single(dev, request->dma_desc,
7d607c6a 371 TALITOS_DESC_SIZE,
e938e465 372 DMA_BIDIRECTIONAL);
9c4a7965
KP
373
374 /* copy entries so we can call callback outside lock */
375 saved_req.desc = request->desc;
376 saved_req.callback = request->callback;
377 saved_req.context = request->context;
378
379 /* release request entry in fifo */
380 smp_wmb();
381 request->desc = NULL;
382
383 /* increment fifo tail */
4b992628 384 priv->chan[ch].tail = (tail + 1) & (priv->fifo_len - 1);
9c4a7965 385
4b992628 386 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
ec6644d6 387
4b992628 388 atomic_dec(&priv->chan[ch].submit_count);
ec6644d6 389
9c4a7965
KP
390 saved_req.callback(dev, saved_req.desc, saved_req.context,
391 status);
392 /* channel may resume processing in single desc error case */
393 if (error && !reset_ch && status == error)
394 return;
4b992628
KP
395 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
396 tail = priv->chan[ch].tail;
9c4a7965
KP
397 }
398
4b992628 399 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
9c4a7965
KP
400}
401
402/*
403 * process completed requests for channels that have done status
404 */
dd3c0987
LC
405#define DEF_TALITOS1_DONE(name, ch_done_mask) \
406static void talitos1_done_##name(unsigned long data) \
407{ \
408 struct device *dev = (struct device *)data; \
409 struct talitos_private *priv = dev_get_drvdata(dev); \
410 unsigned long flags; \
411 \
412 if (ch_done_mask & 0x10000000) \
413 flush_channel(dev, 0, 0, 0); \
dd3c0987
LC
414 if (ch_done_mask & 0x40000000) \
415 flush_channel(dev, 1, 0, 0); \
416 if (ch_done_mask & 0x00010000) \
417 flush_channel(dev, 2, 0, 0); \
418 if (ch_done_mask & 0x00040000) \
419 flush_channel(dev, 3, 0, 0); \
420 \
dd3c0987
LC
421 /* At this point, all completed channels have been processed */ \
422 /* Unmask done interrupts for channels completed later on. */ \
423 spin_lock_irqsave(&priv->reg_lock, flags); \
424 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
425 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT); \
426 spin_unlock_irqrestore(&priv->reg_lock, flags); \
427}
428
429DEF_TALITOS1_DONE(4ch, TALITOS1_ISR_4CHDONE)
9c02e285 430DEF_TALITOS1_DONE(ch0, TALITOS1_ISR_CH_0_DONE)
dd3c0987
LC
431
432#define DEF_TALITOS2_DONE(name, ch_done_mask) \
433static void talitos2_done_##name(unsigned long data) \
c3e337f8
KP
434{ \
435 struct device *dev = (struct device *)data; \
436 struct talitos_private *priv = dev_get_drvdata(dev); \
511d63cb 437 unsigned long flags; \
c3e337f8
KP
438 \
439 if (ch_done_mask & 1) \
440 flush_channel(dev, 0, 0, 0); \
c3e337f8
KP
441 if (ch_done_mask & (1 << 2)) \
442 flush_channel(dev, 1, 0, 0); \
443 if (ch_done_mask & (1 << 4)) \
444 flush_channel(dev, 2, 0, 0); \
445 if (ch_done_mask & (1 << 6)) \
446 flush_channel(dev, 3, 0, 0); \
447 \
c3e337f8
KP
448 /* At this point, all completed channels have been processed */ \
449 /* Unmask done interrupts for channels completed later on. */ \
511d63cb 450 spin_lock_irqsave(&priv->reg_lock, flags); \
c3e337f8 451 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
dd3c0987 452 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT); \
511d63cb 453 spin_unlock_irqrestore(&priv->reg_lock, flags); \
9c4a7965 454}
dd3c0987
LC
455
456DEF_TALITOS2_DONE(4ch, TALITOS2_ISR_4CHDONE)
9c02e285 457DEF_TALITOS2_DONE(ch0, TALITOS2_ISR_CH_0_DONE)
dd3c0987
LC
458DEF_TALITOS2_DONE(ch0_2, TALITOS2_ISR_CH_0_2_DONE)
459DEF_TALITOS2_DONE(ch1_3, TALITOS2_ISR_CH_1_3_DONE)
9c4a7965
KP
460
461/*
462 * locate current (offending) descriptor
463 */
02376161 464static __be32 current_desc_hdr(struct device *dev, int ch)
9c4a7965
KP
465{
466 struct talitos_private *priv = dev_get_drvdata(dev);
b62ffd8c 467 int tail, iter;
9c4a7965
KP
468 dma_addr_t cur_desc;
469
b62ffd8c
HG
470 cur_desc = ((u64)in_be32(priv->chan[ch].reg + TALITOS_CDPR)) << 32;
471 cur_desc |= in_be32(priv->chan[ch].reg + TALITOS_CDPR_LO);
9c4a7965 472
b62ffd8c
HG
473 if (!cur_desc) {
474 dev_err(dev, "CDPR is NULL, giving up search for offending descriptor\n");
475 return 0;
476 }
477
478 tail = priv->chan[ch].tail;
479
480 iter = tail;
37b5e889 481 while (priv->chan[ch].fifo[iter].dma_desc != cur_desc &&
195404db 482 priv->chan[ch].fifo[iter].desc->next_desc != cpu_to_be32(cur_desc)) {
b62ffd8c
HG
483 iter = (iter + 1) & (priv->fifo_len - 1);
484 if (iter == tail) {
9c4a7965 485 dev_err(dev, "couldn't locate current descriptor\n");
3e721aeb 486 return 0;
9c4a7965
KP
487 }
488 }
489
195404db 490 if (priv->chan[ch].fifo[iter].desc->next_desc == cpu_to_be32(cur_desc)) {
58cdbc6d
CL
491 struct talitos_edesc *edesc;
492
493 edesc = container_of(priv->chan[ch].fifo[iter].desc,
494 struct talitos_edesc, desc);
495 return ((struct talitos_desc *)
496 (edesc->buf + edesc->dma_len))->hdr;
497 }
37b5e889 498
b62ffd8c 499 return priv->chan[ch].fifo[iter].desc->hdr;
9c4a7965
KP
500}
501
502/*
503 * user diagnostics; report root cause of error based on execution unit status
504 */
02376161 505static void report_eu_error(struct device *dev, int ch, __be32 desc_hdr)
9c4a7965
KP
506{
507 struct talitos_private *priv = dev_get_drvdata(dev);
508 int i;
509
3e721aeb 510 if (!desc_hdr)
02376161 511 desc_hdr = cpu_to_be32(in_be32(priv->chan[ch].reg + TALITOS_DESCBUF));
3e721aeb
KP
512
513 switch (desc_hdr & DESC_HDR_SEL0_MASK) {
9c4a7965
KP
514 case DESC_HDR_SEL0_AFEU:
515 dev_err(dev, "AFEUISR 0x%08x_%08x\n",
5fa7fa14
LC
516 in_be32(priv->reg_afeu + TALITOS_EUISR),
517 in_be32(priv->reg_afeu + TALITOS_EUISR_LO));
9c4a7965
KP
518 break;
519 case DESC_HDR_SEL0_DEU:
520 dev_err(dev, "DEUISR 0x%08x_%08x\n",
5fa7fa14
LC
521 in_be32(priv->reg_deu + TALITOS_EUISR),
522 in_be32(priv->reg_deu + TALITOS_EUISR_LO));
9c4a7965
KP
523 break;
524 case DESC_HDR_SEL0_MDEUA:
525 case DESC_HDR_SEL0_MDEUB:
526 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
5fa7fa14
LC
527 in_be32(priv->reg_mdeu + TALITOS_EUISR),
528 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
9c4a7965
KP
529 break;
530 case DESC_HDR_SEL0_RNG:
531 dev_err(dev, "RNGUISR 0x%08x_%08x\n",
5fa7fa14
LC
532 in_be32(priv->reg_rngu + TALITOS_ISR),
533 in_be32(priv->reg_rngu + TALITOS_ISR_LO));
9c4a7965
KP
534 break;
535 case DESC_HDR_SEL0_PKEU:
536 dev_err(dev, "PKEUISR 0x%08x_%08x\n",
5fa7fa14
LC
537 in_be32(priv->reg_pkeu + TALITOS_EUISR),
538 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
9c4a7965
KP
539 break;
540 case DESC_HDR_SEL0_AESU:
541 dev_err(dev, "AESUISR 0x%08x_%08x\n",
5fa7fa14
LC
542 in_be32(priv->reg_aesu + TALITOS_EUISR),
543 in_be32(priv->reg_aesu + TALITOS_EUISR_LO));
9c4a7965
KP
544 break;
545 case DESC_HDR_SEL0_CRCU:
546 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
5fa7fa14
LC
547 in_be32(priv->reg_crcu + TALITOS_EUISR),
548 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
9c4a7965
KP
549 break;
550 case DESC_HDR_SEL0_KEU:
551 dev_err(dev, "KEUISR 0x%08x_%08x\n",
5fa7fa14
LC
552 in_be32(priv->reg_pkeu + TALITOS_EUISR),
553 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
9c4a7965
KP
554 break;
555 }
556
3e721aeb 557 switch (desc_hdr & DESC_HDR_SEL1_MASK) {
9c4a7965
KP
558 case DESC_HDR_SEL1_MDEUA:
559 case DESC_HDR_SEL1_MDEUB:
560 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
5fa7fa14
LC
561 in_be32(priv->reg_mdeu + TALITOS_EUISR),
562 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
9c4a7965
KP
563 break;
564 case DESC_HDR_SEL1_CRCU:
565 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
5fa7fa14
LC
566 in_be32(priv->reg_crcu + TALITOS_EUISR),
567 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
9c4a7965
KP
568 break;
569 }
570
571 for (i = 0; i < 8; i++)
572 dev_err(dev, "DESCBUF 0x%08x_%08x\n",
ad42d5fc
KP
573 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF + 8*i),
574 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF_LO + 8*i));
9c4a7965
KP
575}
576
577/*
578 * recover from error interrupts
579 */
5e718a09 580static void talitos_error(struct device *dev, u32 isr, u32 isr_lo)
9c4a7965 581{
9c4a7965
KP
582 struct talitos_private *priv = dev_get_drvdata(dev);
583 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987 584 int ch, error, reset_dev = 0;
42e8b0d7 585 u32 v_lo;
dd3c0987
LC
586 bool is_sec1 = has_ftr_sec1(priv);
587 int reset_ch = is_sec1 ? 1 : 0; /* only SEC2 supports continuation */
9c4a7965
KP
588
589 for (ch = 0; ch < priv->num_channels; ch++) {
590 /* skip channels without errors */
dd3c0987
LC
591 if (is_sec1) {
592 /* bits 29, 31, 17, 19 */
593 if (!(isr & (1 << (29 + (ch & 1) * 2 - (ch & 2) * 6))))
594 continue;
595 } else {
596 if (!(isr & (1 << (ch * 2 + 1))))
597 continue;
598 }
9c4a7965
KP
599
600 error = -EINVAL;
601
ad42d5fc 602 v_lo = in_be32(priv->chan[ch].reg + TALITOS_CCPSR_LO);
9c4a7965
KP
603
604 if (v_lo & TALITOS_CCPSR_LO_DOF) {
605 dev_err(dev, "double fetch fifo overflow error\n");
606 error = -EAGAIN;
607 reset_ch = 1;
608 }
609 if (v_lo & TALITOS_CCPSR_LO_SOF) {
610 /* h/w dropped descriptor */
611 dev_err(dev, "single fetch fifo overflow error\n");
612 error = -EAGAIN;
613 }
614 if (v_lo & TALITOS_CCPSR_LO_MDTE)
615 dev_err(dev, "master data transfer error\n");
616 if (v_lo & TALITOS_CCPSR_LO_SGDLZ)
4d9b3a5b 617 dev_err(dev, is_sec1 ? "pointer not complete error\n"
dd3c0987 618 : "s/g data length zero error\n");
9c4a7965 619 if (v_lo & TALITOS_CCPSR_LO_FPZ)
dd3c0987
LC
620 dev_err(dev, is_sec1 ? "parity error\n"
621 : "fetch pointer zero error\n");
9c4a7965
KP
622 if (v_lo & TALITOS_CCPSR_LO_IDH)
623 dev_err(dev, "illegal descriptor header error\n");
624 if (v_lo & TALITOS_CCPSR_LO_IEU)
dd3c0987
LC
625 dev_err(dev, is_sec1 ? "static assignment error\n"
626 : "invalid exec unit error\n");
9c4a7965 627 if (v_lo & TALITOS_CCPSR_LO_EU)
3e721aeb 628 report_eu_error(dev, ch, current_desc_hdr(dev, ch));
dd3c0987
LC
629 if (!is_sec1) {
630 if (v_lo & TALITOS_CCPSR_LO_GB)
631 dev_err(dev, "gather boundary error\n");
632 if (v_lo & TALITOS_CCPSR_LO_GRL)
633 dev_err(dev, "gather return/length error\n");
634 if (v_lo & TALITOS_CCPSR_LO_SB)
635 dev_err(dev, "scatter boundary error\n");
636 if (v_lo & TALITOS_CCPSR_LO_SRL)
637 dev_err(dev, "scatter return/length error\n");
638 }
9c4a7965
KP
639
640 flush_channel(dev, ch, error, reset_ch);
641
642 if (reset_ch) {
643 reset_channel(dev, ch);
644 } else {
ad42d5fc 645 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
dd3c0987 646 TALITOS2_CCCR_CONT);
ad42d5fc
KP
647 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, 0);
648 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
dd3c0987 649 TALITOS2_CCCR_CONT) && --timeout)
9c4a7965
KP
650 cpu_relax();
651 if (timeout == 0) {
652 dev_err(dev, "failed to restart channel %d\n",
653 ch);
654 reset_dev = 1;
655 }
656 }
657 }
dd3c0987
LC
658 if (reset_dev || (is_sec1 && isr & ~TALITOS1_ISR_4CHERR) ||
659 (!is_sec1 && isr & ~TALITOS2_ISR_4CHERR) || isr_lo) {
660 if (is_sec1 && (isr_lo & TALITOS1_ISR_TEA_ERR))
661 dev_err(dev, "TEA error: ISR 0x%08x_%08x\n",
662 isr, isr_lo);
663 else
664 dev_err(dev, "done overflow, internal time out, or "
665 "rngu error: ISR 0x%08x_%08x\n", isr, isr_lo);
9c4a7965
KP
666
667 /* purge request queues */
668 for (ch = 0; ch < priv->num_channels; ch++)
669 flush_channel(dev, ch, -EIO, 1);
670
671 /* reset and reinitialize the device */
672 init_device(dev);
673 }
674}
675
dd3c0987
LC
676#define DEF_TALITOS1_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
677static irqreturn_t talitos1_interrupt_##name(int irq, void *data) \
678{ \
679 struct device *dev = data; \
680 struct talitos_private *priv = dev_get_drvdata(dev); \
681 u32 isr, isr_lo; \
682 unsigned long flags; \
683 \
684 spin_lock_irqsave(&priv->reg_lock, flags); \
685 isr = in_be32(priv->reg + TALITOS_ISR); \
686 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
687 /* Acknowledge interrupt */ \
688 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
689 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
690 \
691 if (unlikely(isr & ch_err_mask || isr_lo & TALITOS1_IMR_LO_INIT)) { \
692 spin_unlock_irqrestore(&priv->reg_lock, flags); \
693 talitos_error(dev, isr & ch_err_mask, isr_lo); \
694 } \
695 else { \
696 if (likely(isr & ch_done_mask)) { \
697 /* mask further done interrupts. */ \
698 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
699 /* done_task will unmask done interrupts at exit */ \
700 tasklet_schedule(&priv->done_task[tlet]); \
701 } \
702 spin_unlock_irqrestore(&priv->reg_lock, flags); \
703 } \
704 \
705 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
706 IRQ_NONE; \
707}
708
709DEF_TALITOS1_INTERRUPT(4ch, TALITOS1_ISR_4CHDONE, TALITOS1_ISR_4CHERR, 0)
710
711#define DEF_TALITOS2_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
712static irqreturn_t talitos2_interrupt_##name(int irq, void *data) \
c3e337f8
KP
713{ \
714 struct device *dev = data; \
715 struct talitos_private *priv = dev_get_drvdata(dev); \
716 u32 isr, isr_lo; \
511d63cb 717 unsigned long flags; \
c3e337f8 718 \
511d63cb 719 spin_lock_irqsave(&priv->reg_lock, flags); \
c3e337f8
KP
720 isr = in_be32(priv->reg + TALITOS_ISR); \
721 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
722 /* Acknowledge interrupt */ \
723 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
724 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
725 \
511d63cb
HG
726 if (unlikely(isr & ch_err_mask || isr_lo)) { \
727 spin_unlock_irqrestore(&priv->reg_lock, flags); \
728 talitos_error(dev, isr & ch_err_mask, isr_lo); \
729 } \
730 else { \
c3e337f8
KP
731 if (likely(isr & ch_done_mask)) { \
732 /* mask further done interrupts. */ \
733 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
734 /* done_task will unmask done interrupts at exit */ \
735 tasklet_schedule(&priv->done_task[tlet]); \
736 } \
511d63cb
HG
737 spin_unlock_irqrestore(&priv->reg_lock, flags); \
738 } \
c3e337f8
KP
739 \
740 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
741 IRQ_NONE; \
9c4a7965 742}
dd3c0987
LC
743
744DEF_TALITOS2_INTERRUPT(4ch, TALITOS2_ISR_4CHDONE, TALITOS2_ISR_4CHERR, 0)
745DEF_TALITOS2_INTERRUPT(ch0_2, TALITOS2_ISR_CH_0_2_DONE, TALITOS2_ISR_CH_0_2_ERR,
746 0)
747DEF_TALITOS2_INTERRUPT(ch1_3, TALITOS2_ISR_CH_1_3_DONE, TALITOS2_ISR_CH_1_3_ERR,
748 1)
9c4a7965
KP
749
750/*
751 * hwrng
752 */
753static int talitos_rng_data_present(struct hwrng *rng, int wait)
754{
755 struct device *dev = (struct device *)rng->priv;
756 struct talitos_private *priv = dev_get_drvdata(dev);
757 u32 ofl;
758 int i;
759
760 for (i = 0; i < 20; i++) {
5fa7fa14 761 ofl = in_be32(priv->reg_rngu + TALITOS_EUSR_LO) &
9c4a7965
KP
762 TALITOS_RNGUSR_LO_OFL;
763 if (ofl || !wait)
764 break;
765 udelay(10);
766 }
767
768 return !!ofl;
769}
770
771static int talitos_rng_data_read(struct hwrng *rng, u32 *data)
772{
773 struct device *dev = (struct device *)rng->priv;
774 struct talitos_private *priv = dev_get_drvdata(dev);
775
776 /* rng fifo requires 64-bit accesses */
5fa7fa14
LC
777 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO);
778 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO_LO);
9c4a7965
KP
779
780 return sizeof(u32);
781}
782
783static int talitos_rng_init(struct hwrng *rng)
784{
785 struct device *dev = (struct device *)rng->priv;
786 struct talitos_private *priv = dev_get_drvdata(dev);
787 unsigned int timeout = TALITOS_TIMEOUT;
788
5fa7fa14
LC
789 setbits32(priv->reg_rngu + TALITOS_EURCR_LO, TALITOS_RNGURCR_LO_SR);
790 while (!(in_be32(priv->reg_rngu + TALITOS_EUSR_LO)
791 & TALITOS_RNGUSR_LO_RD)
9c4a7965
KP
792 && --timeout)
793 cpu_relax();
794 if (timeout == 0) {
795 dev_err(dev, "failed to reset rng hw\n");
796 return -ENODEV;
797 }
798
799 /* start generating */
5fa7fa14 800 setbits32(priv->reg_rngu + TALITOS_EUDSR_LO, 0);
9c4a7965
KP
801
802 return 0;
803}
804
805static int talitos_register_rng(struct device *dev)
806{
807 struct talitos_private *priv = dev_get_drvdata(dev);
35a3bb3d 808 int err;
9c4a7965 809
77450fd7
JL
810 priv->rng.name = dev_driver_string(dev);
811 priv->rng.init = talitos_rng_init;
812 priv->rng.data_present = talitos_rng_data_present;
813 priv->rng.data_read = talitos_rng_data_read;
9c4a7965
KP
814 priv->rng.priv = (unsigned long)dev;
815
35a3bb3d
AS
816 err = hwrng_register(&priv->rng);
817 if (!err)
818 priv->rng_registered = true;
819
820 return err;
9c4a7965
KP
821}
822
823static void talitos_unregister_rng(struct device *dev)
824{
825 struct talitos_private *priv = dev_get_drvdata(dev);
826
35a3bb3d
AS
827 if (!priv->rng_registered)
828 return;
829
9c4a7965 830 hwrng_unregister(&priv->rng);
35a3bb3d 831 priv->rng_registered = false;
9c4a7965
KP
832}
833
834/*
835 * crypto alg
836 */
837#define TALITOS_CRA_PRIORITY 3000
7405c8d7
LC
838/*
839 * Defines a priority for doing AEAD with descriptors type
840 * HMAC_SNOOP_NO_AFEA (HSNA) instead of type IPSEC_ESP
841 */
842#define TALITOS_CRA_PRIORITY_AEAD_HSNA (TALITOS_CRA_PRIORITY - 1)
192125ed 843#ifdef CONFIG_CRYPTO_DEV_TALITOS2
03d2c511 844#define TALITOS_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + SHA512_BLOCK_SIZE)
b8fbdc2b
CL
845#else
846#define TALITOS_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + SHA256_BLOCK_SIZE)
847#endif
3952f17e 848#define TALITOS_MAX_IV_LENGTH 16 /* max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */
70bcaca7 849
9c4a7965
KP
850struct talitos_ctx {
851 struct device *dev;
5228f0f7 852 int ch;
9c4a7965
KP
853 __be32 desc_hdr_template;
854 u8 key[TALITOS_MAX_KEY_SIZE];
70bcaca7 855 u8 iv[TALITOS_MAX_IV_LENGTH];
2e13ce08 856 dma_addr_t dma_key;
9c4a7965
KP
857 unsigned int keylen;
858 unsigned int enckeylen;
859 unsigned int authkeylen;
9c4a7965
KP
860};
861
497f2e6b
LN
862#define HASH_MAX_BLOCK_SIZE SHA512_BLOCK_SIZE
863#define TALITOS_MDEU_MAX_CONTEXT_SIZE TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512
864
865struct talitos_ahash_req_ctx {
60f208d7 866 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
497f2e6b 867 unsigned int hw_context_size;
3c0dd190
LC
868 u8 buf[2][HASH_MAX_BLOCK_SIZE];
869 int buf_idx;
60f208d7 870 unsigned int swinit;
497f2e6b
LN
871 unsigned int first;
872 unsigned int last;
873 unsigned int to_hash_later;
42e8b0d7 874 unsigned int nbuf;
497f2e6b
LN
875 struct scatterlist bufsl[2];
876 struct scatterlist *psrc;
877};
878
3639ca84
HG
879struct talitos_export_state {
880 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
881 u8 buf[HASH_MAX_BLOCK_SIZE];
882 unsigned int swinit;
883 unsigned int first;
884 unsigned int last;
885 unsigned int to_hash_later;
886 unsigned int nbuf;
887};
888
56af8cd4
LN
889static int aead_setkey(struct crypto_aead *authenc,
890 const u8 *key, unsigned int keylen)
9c4a7965
KP
891{
892 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
2e13ce08 893 struct device *dev = ctx->dev;
c306a98d 894 struct crypto_authenc_keys keys;
9c4a7965 895
c306a98d 896 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
9c4a7965
KP
897 goto badkey;
898
c306a98d 899 if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
9c4a7965
KP
900 goto badkey;
901
2e13ce08
LC
902 if (ctx->keylen)
903 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
904
c306a98d
MK
905 memcpy(ctx->key, keys.authkey, keys.authkeylen);
906 memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
9c4a7965 907
c306a98d
MK
908 ctx->keylen = keys.authkeylen + keys.enckeylen;
909 ctx->enckeylen = keys.enckeylen;
910 ctx->authkeylen = keys.authkeylen;
2e13ce08
LC
911 ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
912 DMA_TO_DEVICE);
9c4a7965 913
8f0691fc 914 memzero_explicit(&keys, sizeof(keys));
9c4a7965
KP
915 return 0;
916
917badkey:
8f0691fc 918 memzero_explicit(&keys, sizeof(keys));
9c4a7965
KP
919 return -EINVAL;
920}
921
ef7c5c85
HX
922static int aead_des3_setkey(struct crypto_aead *authenc,
923 const u8 *key, unsigned int keylen)
924{
925 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
926 struct device *dev = ctx->dev;
927 struct crypto_authenc_keys keys;
ef7c5c85
HX
928 int err;
929
930 err = crypto_authenc_extractkeys(&keys, key, keylen);
931 if (unlikely(err))
674f368a 932 goto out;
ef7c5c85
HX
933
934 err = -EINVAL;
935 if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
674f368a 936 goto out;
ef7c5c85 937
9d574ae8
AB
938 err = verify_aead_des3_key(authenc, keys.enckey, keys.enckeylen);
939 if (err)
ef7c5c85 940 goto out;
ef7c5c85
HX
941
942 if (ctx->keylen)
943 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
944
945 memcpy(ctx->key, keys.authkey, keys.authkeylen);
946 memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
947
948 ctx->keylen = keys.authkeylen + keys.enckeylen;
949 ctx->enckeylen = keys.enckeylen;
950 ctx->authkeylen = keys.authkeylen;
951 ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
952 DMA_TO_DEVICE);
953
954out:
955 memzero_explicit(&keys, sizeof(keys));
956 return err;
ef7c5c85
HX
957}
958
4de9d0b5
LN
959static void talitos_sg_unmap(struct device *dev,
960 struct talitos_edesc *edesc,
961 struct scatterlist *src,
6a1e8d14
LC
962 struct scatterlist *dst,
963 unsigned int len, unsigned int offset)
4de9d0b5 964{
6a1e8d14
LC
965 struct talitos_private *priv = dev_get_drvdata(dev);
966 bool is_sec1 = has_ftr_sec1(priv);
4de9d0b5
LN
967 unsigned int src_nents = edesc->src_nents ? : 1;
968 unsigned int dst_nents = edesc->dst_nents ? : 1;
969
6a1e8d14
LC
970 if (is_sec1 && dst && dst_nents > 1) {
971 dma_sync_single_for_device(dev, edesc->dma_link_tbl + offset,
972 len, DMA_FROM_DEVICE);
973 sg_pcopy_from_buffer(dst, dst_nents, edesc->buf + offset, len,
974 offset);
975 }
4de9d0b5 976 if (src != dst) {
6a1e8d14
LC
977 if (src_nents == 1 || !is_sec1)
978 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
4de9d0b5 979
6a1e8d14 980 if (dst && (dst_nents == 1 || !is_sec1))
b8a011d4 981 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
6a1e8d14 982 } else if (src_nents == 1 || !is_sec1) {
b8a011d4 983 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
246a87cd
LC
984 }
985}
986
9c4a7965 987static void ipsec_esp_unmap(struct device *dev,
56af8cd4 988 struct talitos_edesc *edesc,
7ede4c36 989 struct aead_request *areq, bool encrypt)
9c4a7965 990{
549bd8bc
LC
991 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
992 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
993 unsigned int ivsize = crypto_aead_ivsize(aead);
7ede4c36
CL
994 unsigned int authsize = crypto_aead_authsize(aead);
995 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
9a655608
LC
996 bool is_ipsec_esp = edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP;
997 struct talitos_ptr *civ_ptr = &edesc->desc.ptr[is_ipsec_esp ? 2 : 3];
549bd8bc 998
9a655608 999 if (is_ipsec_esp)
549bd8bc
LC
1000 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[6],
1001 DMA_FROM_DEVICE);
9a655608 1002 unmap_single_talitos_ptr(dev, civ_ptr, DMA_TO_DEVICE);
9c4a7965 1003
e345177d
CL
1004 talitos_sg_unmap(dev, edesc, areq->src, areq->dst,
1005 cryptlen + authsize, areq->assoclen);
9c4a7965
KP
1006
1007 if (edesc->dma_len)
1008 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1009 DMA_BIDIRECTIONAL);
549bd8bc 1010
9a655608 1011 if (!is_ipsec_esp) {
549bd8bc
LC
1012 unsigned int dst_nents = edesc->dst_nents ? : 1;
1013
1014 sg_pcopy_to_buffer(areq->dst, dst_nents, ctx->iv, ivsize,
7ede4c36 1015 areq->assoclen + cryptlen - ivsize);
549bd8bc 1016 }
9c4a7965
KP
1017}
1018
1019/*
1020 * ipsec_esp descriptor callbacks
1021 */
1022static void ipsec_esp_encrypt_done(struct device *dev,
1023 struct talitos_desc *desc, void *context,
1024 int err)
1025{
1026 struct aead_request *areq = context;
9c4a7965 1027 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
2e13ce08 1028 unsigned int ivsize = crypto_aead_ivsize(authenc);
19bbbc63 1029 struct talitos_edesc *edesc;
9c4a7965 1030
19bbbc63
KP
1031 edesc = container_of(desc, struct talitos_edesc, desc);
1032
7ede4c36 1033 ipsec_esp_unmap(dev, edesc, areq, true);
9c4a7965 1034
2e13ce08
LC
1035 dma_unmap_single(dev, edesc->iv_dma, ivsize, DMA_TO_DEVICE);
1036
9c4a7965
KP
1037 kfree(edesc);
1038
1039 aead_request_complete(areq, err);
1040}
1041
fe5720e2 1042static void ipsec_esp_decrypt_swauth_done(struct device *dev,
e938e465
KP
1043 struct talitos_desc *desc,
1044 void *context, int err)
9c4a7965
KP
1045{
1046 struct aead_request *req = context;
9c4a7965 1047 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
aeb4c132 1048 unsigned int authsize = crypto_aead_authsize(authenc);
19bbbc63 1049 struct talitos_edesc *edesc;
aeb4c132 1050 char *oicv, *icv;
9c4a7965 1051
19bbbc63
KP
1052 edesc = container_of(desc, struct talitos_edesc, desc);
1053
7ede4c36 1054 ipsec_esp_unmap(dev, edesc, req, false);
9c4a7965
KP
1055
1056 if (!err) {
1057 /* auth check */
e345177d
CL
1058 oicv = edesc->buf + edesc->dma_len;
1059 icv = oicv - authsize;
aeb4c132 1060
79960943 1061 err = crypto_memneq(oicv, icv, authsize) ? -EBADMSG : 0;
9c4a7965
KP
1062 }
1063
1064 kfree(edesc);
1065
1066 aead_request_complete(req, err);
1067}
1068
fe5720e2 1069static void ipsec_esp_decrypt_hwauth_done(struct device *dev,
e938e465
KP
1070 struct talitos_desc *desc,
1071 void *context, int err)
fe5720e2
KP
1072{
1073 struct aead_request *req = context;
19bbbc63
KP
1074 struct talitos_edesc *edesc;
1075
1076 edesc = container_of(desc, struct talitos_edesc, desc);
fe5720e2 1077
7ede4c36 1078 ipsec_esp_unmap(dev, edesc, req, false);
fe5720e2
KP
1079
1080 /* check ICV auth status */
e938e465
KP
1081 if (!err && ((desc->hdr_lo & DESC_HDR_LO_ICCR1_MASK) !=
1082 DESC_HDR_LO_ICCR1_PASS))
1083 err = -EBADMSG;
fe5720e2
KP
1084
1085 kfree(edesc);
1086
1087 aead_request_complete(req, err);
1088}
1089
9c4a7965
KP
1090/*
1091 * convert scatterlist to SEC h/w link table format
1092 * stop at cryptlen bytes
1093 */
aeb4c132 1094static int sg_to_link_tbl_offset(struct scatterlist *sg, int sg_count,
e345177d 1095 unsigned int offset, int datalen, int elen,
416b8467 1096 struct talitos_ptr *link_tbl_ptr, int align)
9c4a7965 1097{
e345177d 1098 int n_sg = elen ? sg_count + 1 : sg_count;
aeb4c132 1099 int count = 0;
e345177d 1100 int cryptlen = datalen + elen;
416b8467 1101 int padding = ALIGN(cryptlen, align) - cryptlen;
70bcaca7 1102
aeb4c132
HX
1103 while (cryptlen && sg && n_sg--) {
1104 unsigned int len = sg_dma_len(sg);
9c4a7965 1105
aeb4c132
HX
1106 if (offset >= len) {
1107 offset -= len;
1108 goto next;
1109 }
1110
1111 len -= offset;
1112
1113 if (len > cryptlen)
1114 len = cryptlen;
1115
e345177d
CL
1116 if (datalen > 0 && len > datalen) {
1117 to_talitos_ptr(link_tbl_ptr + count,
1118 sg_dma_address(sg) + offset, datalen, 0);
1119 to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
1120 count++;
1121 len -= datalen;
1122 offset += datalen;
1123 }
aeb4c132 1124 to_talitos_ptr(link_tbl_ptr + count,
416b8467 1125 sg_dma_address(sg) + offset, sg_next(sg) ? len : len + padding, 0);
b096b544 1126 to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
aeb4c132
HX
1127 count++;
1128 cryptlen -= len;
e345177d 1129 datalen -= len;
aeb4c132
HX
1130 offset = 0;
1131
1132next:
1133 sg = sg_next(sg);
70bcaca7 1134 }
9c4a7965
KP
1135
1136 /* tag end of link table */
aeb4c132 1137 if (count > 0)
b096b544 1138 to_talitos_ptr_ext_set(link_tbl_ptr + count - 1,
e345177d 1139 DESC_PTR_LNKTBL_RET, 0);
70bcaca7 1140
aeb4c132
HX
1141 return count;
1142}
1143
2b122730
LC
1144static int talitos_sg_map_ext(struct device *dev, struct scatterlist *src,
1145 unsigned int len, struct talitos_edesc *edesc,
1146 struct talitos_ptr *ptr, int sg_count,
e345177d 1147 unsigned int offset, int tbl_off, int elen,
416b8467 1148 bool force, int align)
246a87cd 1149{
246a87cd
LC
1150 struct talitos_private *priv = dev_get_drvdata(dev);
1151 bool is_sec1 = has_ftr_sec1(priv);
416b8467 1152 int aligned_len = ALIGN(len, align);
246a87cd 1153
87a81dce
LC
1154 if (!src) {
1155 to_talitos_ptr(ptr, 0, 0, is_sec1);
1156 return 1;
1157 }
2b122730 1158 to_talitos_ptr_ext_set(ptr, elen, is_sec1);
e345177d 1159 if (sg_count == 1 && !force) {
416b8467 1160 to_talitos_ptr(ptr, sg_dma_address(src) + offset, aligned_len, is_sec1);
6a1e8d14 1161 return sg_count;
246a87cd 1162 }
246a87cd 1163 if (is_sec1) {
416b8467 1164 to_talitos_ptr(ptr, edesc->dma_link_tbl + offset, aligned_len, is_sec1);
6a1e8d14 1165 return sg_count;
246a87cd 1166 }
e345177d 1167 sg_count = sg_to_link_tbl_offset(src, sg_count, offset, len, elen,
416b8467 1168 &edesc->link_tbl[tbl_off], align);
e345177d 1169 if (sg_count == 1 && !force) {
6a1e8d14
LC
1170 /* Only one segment now, so no link tbl needed*/
1171 copy_talitos_ptr(ptr, &edesc->link_tbl[tbl_off], is_sec1);
1172 return sg_count;
1173 }
1174 to_talitos_ptr(ptr, edesc->dma_link_tbl +
416b8467 1175 tbl_off * sizeof(struct talitos_ptr), aligned_len, is_sec1);
6a1e8d14
LC
1176 to_talitos_ptr_ext_or(ptr, DESC_PTR_LNKTBL_JUMP, is_sec1);
1177
1178 return sg_count;
246a87cd
LC
1179}
1180
2b122730
LC
1181static int talitos_sg_map(struct device *dev, struct scatterlist *src,
1182 unsigned int len, struct talitos_edesc *edesc,
1183 struct talitos_ptr *ptr, int sg_count,
1184 unsigned int offset, int tbl_off)
1185{
1186 return talitos_sg_map_ext(dev, src, len, edesc, ptr, sg_count, offset,
416b8467 1187 tbl_off, 0, false, 1);
2b122730
LC
1188}
1189
9c4a7965
KP
1190/*
1191 * fill in and submit ipsec_esp descriptor
1192 */
56af8cd4 1193static int ipsec_esp(struct talitos_edesc *edesc, struct aead_request *areq,
7ede4c36 1194 bool encrypt,
aeb4c132
HX
1195 void (*callback)(struct device *dev,
1196 struct talitos_desc *desc,
1197 void *context, int error))
9c4a7965
KP
1198{
1199 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
aeb4c132 1200 unsigned int authsize = crypto_aead_authsize(aead);
9c4a7965
KP
1201 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
1202 struct device *dev = ctx->dev;
1203 struct talitos_desc *desc = &edesc->desc;
7ede4c36 1204 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
e41256f1 1205 unsigned int ivsize = crypto_aead_ivsize(aead);
aeb4c132 1206 int tbl_off = 0;
fa86a267 1207 int sg_count, ret;
2b122730 1208 int elen = 0;
549bd8bc
LC
1209 bool sync_needed = false;
1210 struct talitos_private *priv = dev_get_drvdata(dev);
1211 bool is_sec1 = has_ftr_sec1(priv);
9a655608
LC
1212 bool is_ipsec_esp = desc->hdr & DESC_HDR_TYPE_IPSEC_ESP;
1213 struct talitos_ptr *civ_ptr = &desc->ptr[is_ipsec_esp ? 2 : 3];
1214 struct talitos_ptr *ckey_ptr = &desc->ptr[is_ipsec_esp ? 3 : 2];
e345177d 1215 dma_addr_t dma_icv = edesc->dma_link_tbl + edesc->dma_len - authsize;
9c4a7965
KP
1216
1217 /* hmac key */
2e13ce08 1218 to_talitos_ptr(&desc->ptr[0], ctx->dma_key, ctx->authkeylen, is_sec1);
79fd31d3 1219
549bd8bc
LC
1220 sg_count = edesc->src_nents ?: 1;
1221 if (is_sec1 && sg_count > 1)
1222 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1223 areq->assoclen + cryptlen);
1224 else
1225 sg_count = dma_map_sg(dev, areq->src, sg_count,
1226 (areq->src == areq->dst) ?
1227 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
79fd31d3 1228
549bd8bc
LC
1229 /* hmac data */
1230 ret = talitos_sg_map(dev, areq->src, areq->assoclen, edesc,
1231 &desc->ptr[1], sg_count, 0, tbl_off);
340ff60a 1232
549bd8bc 1233 if (ret > 1) {
340ff60a 1234 tbl_off += ret;
549bd8bc 1235 sync_needed = true;
79fd31d3
HG
1236 }
1237
9c4a7965 1238 /* cipher iv */
9a655608 1239 to_talitos_ptr(civ_ptr, edesc->iv_dma, ivsize, is_sec1);
9c4a7965
KP
1240
1241 /* cipher key */
2e13ce08
LC
1242 to_talitos_ptr(ckey_ptr, ctx->dma_key + ctx->authkeylen,
1243 ctx->enckeylen, is_sec1);
9c4a7965
KP
1244
1245 /*
1246 * cipher in
1247 * map and adjust cipher len to aead request cryptlen.
1248 * extent is bytes of HMAC postpended to ciphertext,
1249 * typically 12 for ipsec
1250 */
2b122730
LC
1251 if (is_ipsec_esp && (desc->hdr & DESC_HDR_MODE1_MDEU_CICV))
1252 elen = authsize;
9c4a7965 1253
2b122730 1254 ret = talitos_sg_map_ext(dev, areq->src, cryptlen, edesc, &desc->ptr[4],
e345177d 1255 sg_count, areq->assoclen, tbl_off, elen,
416b8467 1256 false, 1);
549bd8bc 1257
ec8c7d14
LC
1258 if (ret > 1) {
1259 tbl_off += ret;
549bd8bc
LC
1260 sync_needed = true;
1261 }
9c4a7965 1262
549bd8bc
LC
1263 /* cipher out */
1264 if (areq->src != areq->dst) {
1265 sg_count = edesc->dst_nents ? : 1;
1266 if (!is_sec1 || sg_count == 1)
1267 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1268 }
9c4a7965 1269
e345177d
CL
1270 if (is_ipsec_esp && encrypt)
1271 elen = authsize;
1272 else
1273 elen = 0;
1274 ret = talitos_sg_map_ext(dev, areq->dst, cryptlen, edesc, &desc->ptr[5],
1275 sg_count, areq->assoclen, tbl_off, elen,
416b8467 1276 is_ipsec_esp && !encrypt, 1);
e345177d 1277 tbl_off += ret;
549bd8bc 1278
e345177d
CL
1279 if (!encrypt && is_ipsec_esp) {
1280 struct talitos_ptr *tbl_ptr = &edesc->link_tbl[tbl_off];
e04a61be 1281
e345177d
CL
1282 /* Add an entry to the link table for ICV data */
1283 to_talitos_ptr_ext_set(tbl_ptr - 1, 0, is_sec1);
1284 to_talitos_ptr_ext_set(tbl_ptr, DESC_PTR_LNKTBL_RET, is_sec1);
e04a61be 1285
e345177d
CL
1286 /* icv data follows link tables */
1287 to_talitos_ptr(tbl_ptr, dma_icv, authsize, is_sec1);
1288 to_talitos_ptr_ext_or(&desc->ptr[5], authsize, is_sec1);
1289 sync_needed = true;
1290 } else if (!encrypt) {
1291 to_talitos_ptr(&desc->ptr[6], dma_icv, authsize, is_sec1);
1292 sync_needed = true;
9a655608 1293 } else if (!is_ipsec_esp) {
e345177d
CL
1294 talitos_sg_map(dev, areq->dst, authsize, edesc, &desc->ptr[6],
1295 sg_count, areq->assoclen + cryptlen, tbl_off);
549bd8bc
LC
1296 }
1297
9c4a7965 1298 /* iv out */
9a655608 1299 if (is_ipsec_esp)
549bd8bc
LC
1300 map_single_talitos_ptr(dev, &desc->ptr[6], ivsize, ctx->iv,
1301 DMA_FROM_DEVICE);
1302
1303 if (sync_needed)
1304 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1305 edesc->dma_len,
1306 DMA_BIDIRECTIONAL);
9c4a7965 1307
5228f0f7 1308 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
fa86a267 1309 if (ret != -EINPROGRESS) {
7ede4c36 1310 ipsec_esp_unmap(dev, edesc, areq, encrypt);
fa86a267
KP
1311 kfree(edesc);
1312 }
1313 return ret;
9c4a7965
KP
1314}
1315
9c4a7965 1316/*
56af8cd4 1317 * allocate and map the extended descriptor
9c4a7965 1318 */
4de9d0b5
LN
1319static struct talitos_edesc *talitos_edesc_alloc(struct device *dev,
1320 struct scatterlist *src,
1321 struct scatterlist *dst,
79fd31d3
HG
1322 u8 *iv,
1323 unsigned int assoclen,
4de9d0b5
LN
1324 unsigned int cryptlen,
1325 unsigned int authsize,
79fd31d3 1326 unsigned int ivsize,
4de9d0b5 1327 int icv_stashing,
62293a37
HG
1328 u32 cryptoflags,
1329 bool encrypt)
9c4a7965 1330{
56af8cd4 1331 struct talitos_edesc *edesc;
6a1e8d14 1332 int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len;
79fd31d3 1333 dma_addr_t iv_dma = 0;
4de9d0b5 1334 gfp_t flags = cryptoflags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
586725f8 1335 GFP_ATOMIC;
6f65f6ac
LC
1336 struct talitos_private *priv = dev_get_drvdata(dev);
1337 bool is_sec1 = has_ftr_sec1(priv);
1338 int max_len = is_sec1 ? TALITOS1_MAX_DATA_LEN : TALITOS2_MAX_DATA_LEN;
9c4a7965 1339
6f65f6ac 1340 if (cryptlen + authsize > max_len) {
4de9d0b5 1341 dev_err(dev, "length exceeds h/w max limit\n");
9c4a7965
KP
1342 return ERR_PTR(-EINVAL);
1343 }
1344
62293a37 1345 if (!dst || dst == src) {
6a1e8d14
LC
1346 src_len = assoclen + cryptlen + authsize;
1347 src_nents = sg_nents_for_len(src, src_len);
8e409fe1
LC
1348 if (src_nents < 0) {
1349 dev_err(dev, "Invalid number of src SG.\n");
c56c2e17 1350 return ERR_PTR(-EINVAL);
8e409fe1 1351 }
62293a37
HG
1352 src_nents = (src_nents == 1) ? 0 : src_nents;
1353 dst_nents = dst ? src_nents : 0;
6a1e8d14 1354 dst_len = 0;
62293a37 1355 } else { /* dst && dst != src*/
6a1e8d14
LC
1356 src_len = assoclen + cryptlen + (encrypt ? 0 : authsize);
1357 src_nents = sg_nents_for_len(src, src_len);
8e409fe1
LC
1358 if (src_nents < 0) {
1359 dev_err(dev, "Invalid number of src SG.\n");
c56c2e17 1360 return ERR_PTR(-EINVAL);
8e409fe1 1361 }
62293a37 1362 src_nents = (src_nents == 1) ? 0 : src_nents;
6a1e8d14
LC
1363 dst_len = assoclen + cryptlen + (encrypt ? authsize : 0);
1364 dst_nents = sg_nents_for_len(dst, dst_len);
8e409fe1
LC
1365 if (dst_nents < 0) {
1366 dev_err(dev, "Invalid number of dst SG.\n");
c56c2e17 1367 return ERR_PTR(-EINVAL);
8e409fe1 1368 }
62293a37 1369 dst_nents = (dst_nents == 1) ? 0 : dst_nents;
9c4a7965
KP
1370 }
1371
1372 /*
1373 * allocate space for base edesc plus the link tables,
aeb4c132
HX
1374 * allowing for two separate entries for AD and generated ICV (+ 2),
1375 * and space for two sets of ICVs (stashed and generated)
9c4a7965 1376 */
56af8cd4 1377 alloc_len = sizeof(struct talitos_edesc);
e345177d 1378 if (src_nents || dst_nents || !encrypt) {
6f65f6ac 1379 if (is_sec1)
6a1e8d14 1380 dma_len = (src_nents ? src_len : 0) +
e345177d 1381 (dst_nents ? dst_len : 0) + authsize;
6f65f6ac 1382 else
aeb4c132 1383 dma_len = (src_nents + dst_nents + 2) *
e345177d 1384 sizeof(struct talitos_ptr) + authsize;
9c4a7965
KP
1385 alloc_len += dma_len;
1386 } else {
1387 dma_len = 0;
9c4a7965 1388 }
e345177d 1389 alloc_len += icv_stashing ? authsize : 0;
9c4a7965 1390
37b5e889
LC
1391 /* if its a ahash, add space for a second desc next to the first one */
1392 if (is_sec1 && !dst)
1393 alloc_len += sizeof(struct talitos_desc);
1bea445b 1394 alloc_len += ivsize;
37b5e889 1395
586725f8 1396 edesc = kmalloc(alloc_len, GFP_DMA | flags);
c56c2e17
CL
1397 if (!edesc)
1398 return ERR_PTR(-ENOMEM);
1bea445b
CL
1399 if (ivsize) {
1400 iv = memcpy(((u8 *)edesc) + alloc_len - ivsize, iv, ivsize);
c56c2e17 1401 iv_dma = dma_map_single(dev, iv, ivsize, DMA_TO_DEVICE);
1bea445b 1402 }
e4a647c4 1403 memset(&edesc->desc, 0, sizeof(edesc->desc));
9c4a7965
KP
1404
1405 edesc->src_nents = src_nents;
1406 edesc->dst_nents = dst_nents;
79fd31d3 1407 edesc->iv_dma = iv_dma;
9c4a7965 1408 edesc->dma_len = dma_len;
58cdbc6d
CL
1409 if (dma_len)
1410 edesc->dma_link_tbl = dma_map_single(dev, &edesc->link_tbl[0],
497f2e6b
LN
1411 edesc->dma_len,
1412 DMA_BIDIRECTIONAL);
58cdbc6d 1413
9c4a7965
KP
1414 return edesc;
1415}
1416
79fd31d3 1417static struct talitos_edesc *aead_edesc_alloc(struct aead_request *areq, u8 *iv,
62293a37 1418 int icv_stashing, bool encrypt)
4de9d0b5
LN
1419{
1420 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
aeb4c132 1421 unsigned int authsize = crypto_aead_authsize(authenc);
4de9d0b5 1422 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
79fd31d3 1423 unsigned int ivsize = crypto_aead_ivsize(authenc);
7ede4c36 1424 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
4de9d0b5 1425
aeb4c132 1426 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
7ede4c36 1427 iv, areq->assoclen, cryptlen,
aeb4c132 1428 authsize, ivsize, icv_stashing,
62293a37 1429 areq->base.flags, encrypt);
4de9d0b5
LN
1430}
1431
56af8cd4 1432static int aead_encrypt(struct aead_request *req)
9c4a7965
KP
1433{
1434 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1435 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
56af8cd4 1436 struct talitos_edesc *edesc;
9c4a7965
KP
1437
1438 /* allocate extended descriptor */
62293a37 1439 edesc = aead_edesc_alloc(req, req->iv, 0, true);
9c4a7965
KP
1440 if (IS_ERR(edesc))
1441 return PTR_ERR(edesc);
1442
1443 /* set encrypt */
70bcaca7 1444 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
9c4a7965 1445
7ede4c36 1446 return ipsec_esp(edesc, req, true, ipsec_esp_encrypt_done);
9c4a7965
KP
1447}
1448
56af8cd4 1449static int aead_decrypt(struct aead_request *req)
9c4a7965
KP
1450{
1451 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
aeb4c132 1452 unsigned int authsize = crypto_aead_authsize(authenc);
9c4a7965 1453 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
fe5720e2 1454 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
56af8cd4 1455 struct talitos_edesc *edesc;
9c4a7965
KP
1456 void *icvdata;
1457
9c4a7965 1458 /* allocate extended descriptor */
62293a37 1459 edesc = aead_edesc_alloc(req, req->iv, 1, false);
9c4a7965
KP
1460 if (IS_ERR(edesc))
1461 return PTR_ERR(edesc);
1462
4bbfb839
CL
1463 if ((edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP) &&
1464 (priv->features & TALITOS_FTR_HW_AUTH_CHECK) &&
e938e465
KP
1465 ((!edesc->src_nents && !edesc->dst_nents) ||
1466 priv->features & TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT)) {
9c4a7965 1467
fe5720e2 1468 /* decrypt and check the ICV */
e938e465
KP
1469 edesc->desc.hdr = ctx->desc_hdr_template |
1470 DESC_HDR_DIR_INBOUND |
fe5720e2 1471 DESC_HDR_MODE1_MDEU_CICV;
9c4a7965 1472
fe5720e2 1473 /* reset integrity check result bits */
9c4a7965 1474
7ede4c36
CL
1475 return ipsec_esp(edesc, req, false,
1476 ipsec_esp_decrypt_hwauth_done);
e938e465 1477 }
fe5720e2 1478
e938e465
KP
1479 /* Have to check the ICV with software */
1480 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
fe5720e2 1481
e938e465 1482 /* stash incoming ICV for later cmp with ICV generated by the h/w */
e345177d 1483 icvdata = edesc->buf + edesc->dma_len;
fe5720e2 1484
eae55a58
CL
1485 sg_pcopy_to_buffer(req->src, edesc->src_nents ? : 1, icvdata, authsize,
1486 req->assoclen + req->cryptlen - authsize);
fe5720e2 1487
7ede4c36 1488 return ipsec_esp(edesc, req, false, ipsec_esp_decrypt_swauth_done);
9c4a7965
KP
1489}
1490
373960d7 1491static int skcipher_setkey(struct crypto_skcipher *cipher,
4de9d0b5
LN
1492 const u8 *key, unsigned int keylen)
1493{
373960d7 1494 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
2e13ce08 1495 struct device *dev = ctx->dev;
4de9d0b5 1496
ef7c5c85
HX
1497 if (ctx->keylen)
1498 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
1499
1500 memcpy(&ctx->key, key, keylen);
1501 ctx->keylen = keylen;
1502
1503 ctx->dma_key = dma_map_single(dev, ctx->key, keylen, DMA_TO_DEVICE);
1504
1505 return 0;
1506}
1507
373960d7 1508static int skcipher_des_setkey(struct crypto_skcipher *cipher,
ef7c5c85
HX
1509 const u8 *key, unsigned int keylen)
1510{
373960d7
AB
1511 return verify_skcipher_des_key(cipher, key) ?:
1512 skcipher_setkey(cipher, key, keylen);
ef7c5c85 1513}
2e13ce08 1514
373960d7 1515static int skcipher_des3_setkey(struct crypto_skcipher *cipher,
ef7c5c85
HX
1516 const u8 *key, unsigned int keylen)
1517{
373960d7
AB
1518 return verify_skcipher_des3_key(cipher, key) ?:
1519 skcipher_setkey(cipher, key, keylen);
4de9d0b5
LN
1520}
1521
373960d7 1522static int skcipher_aes_setkey(struct crypto_skcipher *cipher,
1ba34e71
CL
1523 const u8 *key, unsigned int keylen)
1524{
1525 if (keylen == AES_KEYSIZE_128 || keylen == AES_KEYSIZE_192 ||
1526 keylen == AES_KEYSIZE_256)
373960d7 1527 return skcipher_setkey(cipher, key, keylen);
1ba34e71 1528
1ba34e71
CL
1529 return -EINVAL;
1530}
1531
4de9d0b5
LN
1532static void common_nonsnoop_unmap(struct device *dev,
1533 struct talitos_edesc *edesc,
373960d7 1534 struct skcipher_request *areq)
4de9d0b5
LN
1535{
1536 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
032d197e 1537
373960d7 1538 talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->cryptlen, 0);
4de9d0b5
LN
1539 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1], DMA_TO_DEVICE);
1540
4de9d0b5
LN
1541 if (edesc->dma_len)
1542 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1543 DMA_BIDIRECTIONAL);
1544}
1545
373960d7 1546static void skcipher_done(struct device *dev,
4de9d0b5
LN
1547 struct talitos_desc *desc, void *context,
1548 int err)
1549{
373960d7
AB
1550 struct skcipher_request *areq = context;
1551 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1552 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1553 unsigned int ivsize = crypto_skcipher_ivsize(cipher);
19bbbc63
KP
1554 struct talitos_edesc *edesc;
1555
1556 edesc = container_of(desc, struct talitos_edesc, desc);
4de9d0b5
LN
1557
1558 common_nonsnoop_unmap(dev, edesc, areq);
373960d7 1559 memcpy(areq->iv, ctx->iv, ivsize);
4de9d0b5
LN
1560
1561 kfree(edesc);
1562
1563 areq->base.complete(&areq->base, err);
1564}
1565
1566static int common_nonsnoop(struct talitos_edesc *edesc,
373960d7 1567 struct skcipher_request *areq,
4de9d0b5
LN
1568 void (*callback) (struct device *dev,
1569 struct talitos_desc *desc,
1570 void *context, int error))
1571{
373960d7
AB
1572 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1573 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
4de9d0b5
LN
1574 struct device *dev = ctx->dev;
1575 struct talitos_desc *desc = &edesc->desc;
373960d7
AB
1576 unsigned int cryptlen = areq->cryptlen;
1577 unsigned int ivsize = crypto_skcipher_ivsize(cipher);
4de9d0b5 1578 int sg_count, ret;
6a1e8d14 1579 bool sync_needed = false;
922f9dc8
LC
1580 struct talitos_private *priv = dev_get_drvdata(dev);
1581 bool is_sec1 = has_ftr_sec1(priv);
416b8467
CL
1582 bool is_ctr = (desc->hdr & DESC_HDR_SEL0_MASK) == DESC_HDR_SEL0_AESU &&
1583 (desc->hdr & DESC_HDR_MODE0_AESU_MASK) == DESC_HDR_MODE0_AESU_CTR;
4de9d0b5
LN
1584
1585 /* first DWORD empty */
4de9d0b5
LN
1586
1587 /* cipher iv */
da9de146 1588 to_talitos_ptr(&desc->ptr[1], edesc->iv_dma, ivsize, is_sec1);
4de9d0b5
LN
1589
1590 /* cipher key */
2e13ce08 1591 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen, is_sec1);
4de9d0b5 1592
6a1e8d14
LC
1593 sg_count = edesc->src_nents ?: 1;
1594 if (is_sec1 && sg_count > 1)
1595 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1596 cryptlen);
1597 else
1598 sg_count = dma_map_sg(dev, areq->src, sg_count,
1599 (areq->src == areq->dst) ?
1600 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
4de9d0b5
LN
1601 /*
1602 * cipher in
1603 */
416b8467
CL
1604 sg_count = talitos_sg_map_ext(dev, areq->src, cryptlen, edesc, &desc->ptr[3],
1605 sg_count, 0, 0, 0, false, is_ctr ? 16 : 1);
6a1e8d14
LC
1606 if (sg_count > 1)
1607 sync_needed = true;
4de9d0b5
LN
1608
1609 /* cipher out */
6a1e8d14
LC
1610 if (areq->src != areq->dst) {
1611 sg_count = edesc->dst_nents ? : 1;
1612 if (!is_sec1 || sg_count == 1)
1613 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1614 }
1615
1616 ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[4],
1617 sg_count, 0, (edesc->src_nents + 1));
1618 if (ret > 1)
1619 sync_needed = true;
4de9d0b5
LN
1620
1621 /* iv out */
a2b35aa8 1622 map_single_talitos_ptr(dev, &desc->ptr[5], ivsize, ctx->iv,
4de9d0b5
LN
1623 DMA_FROM_DEVICE);
1624
1625 /* last DWORD empty */
4de9d0b5 1626
6a1e8d14
LC
1627 if (sync_needed)
1628 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1629 edesc->dma_len, DMA_BIDIRECTIONAL);
1630
5228f0f7 1631 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
4de9d0b5
LN
1632 if (ret != -EINPROGRESS) {
1633 common_nonsnoop_unmap(dev, edesc, areq);
1634 kfree(edesc);
1635 }
1636 return ret;
1637}
1638
373960d7 1639static struct talitos_edesc *skcipher_edesc_alloc(struct skcipher_request *
62293a37 1640 areq, bool encrypt)
4de9d0b5 1641{
373960d7
AB
1642 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1643 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1644 unsigned int ivsize = crypto_skcipher_ivsize(cipher);
4de9d0b5 1645
aeb4c132 1646 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
373960d7 1647 areq->iv, 0, areq->cryptlen, 0, ivsize, 0,
62293a37 1648 areq->base.flags, encrypt);
4de9d0b5
LN
1649}
1650
373960d7 1651static int skcipher_encrypt(struct skcipher_request *areq)
4de9d0b5 1652{
373960d7
AB
1653 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1654 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
4de9d0b5 1655 struct talitos_edesc *edesc;
ee483d32 1656 unsigned int blocksize =
373960d7 1657 crypto_tfm_alg_blocksize(crypto_skcipher_tfm(cipher));
ee483d32 1658
373960d7 1659 if (!areq->cryptlen)
ee483d32
CL
1660 return 0;
1661
373960d7 1662 if (areq->cryptlen % blocksize)
ee483d32 1663 return -EINVAL;
4de9d0b5
LN
1664
1665 /* allocate extended descriptor */
373960d7 1666 edesc = skcipher_edesc_alloc(areq, true);
4de9d0b5
LN
1667 if (IS_ERR(edesc))
1668 return PTR_ERR(edesc);
1669
1670 /* set encrypt */
1671 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
1672
373960d7 1673 return common_nonsnoop(edesc, areq, skcipher_done);
4de9d0b5
LN
1674}
1675
373960d7 1676static int skcipher_decrypt(struct skcipher_request *areq)
4de9d0b5 1677{
373960d7
AB
1678 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1679 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
4de9d0b5 1680 struct talitos_edesc *edesc;
ee483d32 1681 unsigned int blocksize =
373960d7 1682 crypto_tfm_alg_blocksize(crypto_skcipher_tfm(cipher));
ee483d32 1683
373960d7 1684 if (!areq->cryptlen)
ee483d32
CL
1685 return 0;
1686
373960d7 1687 if (areq->cryptlen % blocksize)
ee483d32 1688 return -EINVAL;
4de9d0b5
LN
1689
1690 /* allocate extended descriptor */
373960d7 1691 edesc = skcipher_edesc_alloc(areq, false);
4de9d0b5
LN
1692 if (IS_ERR(edesc))
1693 return PTR_ERR(edesc);
1694
1695 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1696
373960d7 1697 return common_nonsnoop(edesc, areq, skcipher_done);
4de9d0b5
LN
1698}
1699
497f2e6b
LN
1700static void common_nonsnoop_hash_unmap(struct device *dev,
1701 struct talitos_edesc *edesc,
1702 struct ahash_request *areq)
1703{
1704 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
7a6eda5b 1705 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
ad4cd51f
LC
1706 struct talitos_private *priv = dev_get_drvdata(dev);
1707 bool is_sec1 = has_ftr_sec1(priv);
1708 struct talitos_desc *desc = &edesc->desc;
58cdbc6d
CL
1709 struct talitos_desc *desc2 = (struct talitos_desc *)
1710 (edesc->buf + edesc->dma_len);
ad4cd51f 1711
6ae7a8b1 1712 unmap_single_talitos_ptr(dev, &desc->ptr[5], DMA_FROM_DEVICE);
ad4cd51f
LC
1713 if (desc->next_desc &&
1714 desc->ptr[5].ptr != desc2->ptr[5].ptr)
1715 unmap_single_talitos_ptr(dev, &desc2->ptr[5], DMA_FROM_DEVICE);
7a6eda5b
CL
1716 if (req_ctx->last)
1717 memcpy(areq->result, req_ctx->hw_context,
1718 crypto_ahash_digestsize(tfm));
497f2e6b 1719
58cdbc6d
CL
1720 if (req_ctx->psrc)
1721 talitos_sg_unmap(dev, edesc, req_ctx->psrc, NULL, 0, 0);
032d197e 1722
ad4cd51f 1723 /* When using hashctx-in, must unmap it. */
6ae7a8b1 1724 if (from_talitos_ptr_len(&desc->ptr[1], is_sec1))
1725 unmap_single_talitos_ptr(dev, &desc->ptr[1],
ad4cd51f
LC
1726 DMA_TO_DEVICE);
1727 else if (desc->next_desc)
1728 unmap_single_talitos_ptr(dev, &desc2->ptr[1],
1729 DMA_TO_DEVICE);
1730
1731 if (is_sec1 && req_ctx->nbuf)
1732 unmap_single_talitos_ptr(dev, &desc->ptr[3],
1733 DMA_TO_DEVICE);
1734
497f2e6b
LN
1735 if (edesc->dma_len)
1736 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1737 DMA_BIDIRECTIONAL);
1738
6ae7a8b1 1739 if (desc->next_desc)
1740 dma_unmap_single(dev, be32_to_cpu(desc->next_desc),
37b5e889 1741 TALITOS_DESC_SIZE, DMA_BIDIRECTIONAL);
497f2e6b
LN
1742}
1743
1744static void ahash_done(struct device *dev,
1745 struct talitos_desc *desc, void *context,
1746 int err)
1747{
1748 struct ahash_request *areq = context;
1749 struct talitos_edesc *edesc =
1750 container_of(desc, struct talitos_edesc, desc);
1751 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1752
1753 if (!req_ctx->last && req_ctx->to_hash_later) {
1754 /* Position any partial block for next update/final/finup */
3c0dd190 1755 req_ctx->buf_idx = (req_ctx->buf_idx + 1) & 1;
5e833bc4 1756 req_ctx->nbuf = req_ctx->to_hash_later;
497f2e6b
LN
1757 }
1758 common_nonsnoop_hash_unmap(dev, edesc, areq);
1759
1760 kfree(edesc);
1761
1762 areq->base.complete(&areq->base, err);
1763}
1764
2d02905e
LC
1765/*
1766 * SEC1 doesn't like hashing of 0 sized message, so we do the padding
1767 * ourself and submit a padded block
1768 */
5b2cf268 1769static void talitos_handle_buggy_hash(struct talitos_ctx *ctx,
2d02905e
LC
1770 struct talitos_edesc *edesc,
1771 struct talitos_ptr *ptr)
1772{
1773 static u8 padded_hash[64] = {
1774 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1775 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1776 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1777 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1778 };
1779
1780 pr_err_once("Bug in SEC1, padding ourself\n");
1781 edesc->desc.hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1782 map_single_talitos_ptr(ctx->dev, ptr, sizeof(padded_hash),
1783 (char *)padded_hash, DMA_TO_DEVICE);
1784}
1785
497f2e6b
LN
1786static int common_nonsnoop_hash(struct talitos_edesc *edesc,
1787 struct ahash_request *areq, unsigned int length,
1788 void (*callback) (struct device *dev,
1789 struct talitos_desc *desc,
1790 void *context, int error))
1791{
1792 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1793 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1794 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1795 struct device *dev = ctx->dev;
1796 struct talitos_desc *desc = &edesc->desc;
032d197e 1797 int ret;
6a1e8d14 1798 bool sync_needed = false;
922f9dc8
LC
1799 struct talitos_private *priv = dev_get_drvdata(dev);
1800 bool is_sec1 = has_ftr_sec1(priv);
6a1e8d14 1801 int sg_count;
497f2e6b
LN
1802
1803 /* first DWORD empty */
497f2e6b 1804
60f208d7
KP
1805 /* hash context in */
1806 if (!req_ctx->first || req_ctx->swinit) {
6a4967c3
LC
1807 map_single_talitos_ptr_nosync(dev, &desc->ptr[1],
1808 req_ctx->hw_context_size,
1809 req_ctx->hw_context,
1810 DMA_TO_DEVICE);
60f208d7 1811 req_ctx->swinit = 0;
497f2e6b 1812 }
afd62fa2
LC
1813 /* Indicate next op is not the first. */
1814 req_ctx->first = 0;
497f2e6b
LN
1815
1816 /* HMAC key */
1817 if (ctx->keylen)
2e13ce08
LC
1818 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen,
1819 is_sec1);
497f2e6b 1820
37b5e889
LC
1821 if (is_sec1 && req_ctx->nbuf)
1822 length -= req_ctx->nbuf;
1823
6a1e8d14
LC
1824 sg_count = edesc->src_nents ?: 1;
1825 if (is_sec1 && sg_count > 1)
58cdbc6d 1826 sg_copy_to_buffer(req_ctx->psrc, sg_count, edesc->buf, length);
37b5e889 1827 else if (length)
6a1e8d14
LC
1828 sg_count = dma_map_sg(dev, req_ctx->psrc, sg_count,
1829 DMA_TO_DEVICE);
497f2e6b
LN
1830 /*
1831 * data in
1832 */
37b5e889 1833 if (is_sec1 && req_ctx->nbuf) {
ad4cd51f
LC
1834 map_single_talitos_ptr(dev, &desc->ptr[3], req_ctx->nbuf,
1835 req_ctx->buf[req_ctx->buf_idx],
1836 DMA_TO_DEVICE);
37b5e889
LC
1837 } else {
1838 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
58cdbc6d 1839 &desc->ptr[3], sg_count, 0, 0);
37b5e889
LC
1840 if (sg_count > 1)
1841 sync_needed = true;
1842 }
497f2e6b
LN
1843
1844 /* fifth DWORD empty */
497f2e6b
LN
1845
1846 /* hash/HMAC out -or- hash context out */
1847 if (req_ctx->last)
1848 map_single_talitos_ptr(dev, &desc->ptr[5],
1849 crypto_ahash_digestsize(tfm),
7a6eda5b 1850 req_ctx->hw_context, DMA_FROM_DEVICE);
497f2e6b 1851 else
6a4967c3
LC
1852 map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1853 req_ctx->hw_context_size,
1854 req_ctx->hw_context,
1855 DMA_FROM_DEVICE);
497f2e6b
LN
1856
1857 /* last DWORD empty */
497f2e6b 1858
2d02905e
LC
1859 if (is_sec1 && from_talitos_ptr_len(&desc->ptr[3], true) == 0)
1860 talitos_handle_buggy_hash(ctx, edesc, &desc->ptr[3]);
1861
37b5e889 1862 if (is_sec1 && req_ctx->nbuf && length) {
58cdbc6d
CL
1863 struct talitos_desc *desc2 = (struct talitos_desc *)
1864 (edesc->buf + edesc->dma_len);
37b5e889
LC
1865 dma_addr_t next_desc;
1866
1867 memset(desc2, 0, sizeof(*desc2));
1868 desc2->hdr = desc->hdr;
1869 desc2->hdr &= ~DESC_HDR_MODE0_MDEU_INIT;
1870 desc2->hdr1 = desc2->hdr;
1871 desc->hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1872 desc->hdr |= DESC_HDR_MODE0_MDEU_CONT;
1873 desc->hdr &= ~DESC_HDR_DONE_NOTIFY;
1874
ad4cd51f
LC
1875 if (desc->ptr[1].ptr)
1876 copy_talitos_ptr(&desc2->ptr[1], &desc->ptr[1],
1877 is_sec1);
1878 else
6a4967c3
LC
1879 map_single_talitos_ptr_nosync(dev, &desc2->ptr[1],
1880 req_ctx->hw_context_size,
1881 req_ctx->hw_context,
1882 DMA_TO_DEVICE);
37b5e889
LC
1883 copy_talitos_ptr(&desc2->ptr[2], &desc->ptr[2], is_sec1);
1884 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
58cdbc6d 1885 &desc2->ptr[3], sg_count, 0, 0);
37b5e889
LC
1886 if (sg_count > 1)
1887 sync_needed = true;
1888 copy_talitos_ptr(&desc2->ptr[5], &desc->ptr[5], is_sec1);
1889 if (req_ctx->last)
6a4967c3
LC
1890 map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1891 req_ctx->hw_context_size,
1892 req_ctx->hw_context,
1893 DMA_FROM_DEVICE);
37b5e889
LC
1894
1895 next_desc = dma_map_single(dev, &desc2->hdr1, TALITOS_DESC_SIZE,
1896 DMA_BIDIRECTIONAL);
1897 desc->next_desc = cpu_to_be32(next_desc);
1898 }
1899
6a1e8d14
LC
1900 if (sync_needed)
1901 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1902 edesc->dma_len, DMA_BIDIRECTIONAL);
1903
5228f0f7 1904 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
497f2e6b
LN
1905 if (ret != -EINPROGRESS) {
1906 common_nonsnoop_hash_unmap(dev, edesc, areq);
1907 kfree(edesc);
1908 }
1909 return ret;
1910}
1911
1912static struct talitos_edesc *ahash_edesc_alloc(struct ahash_request *areq,
1913 unsigned int nbytes)
1914{
1915 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1916 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1917 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
37b5e889
LC
1918 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
1919 bool is_sec1 = has_ftr_sec1(priv);
1920
1921 if (is_sec1)
1922 nbytes -= req_ctx->nbuf;
497f2e6b 1923
aeb4c132 1924 return talitos_edesc_alloc(ctx->dev, req_ctx->psrc, NULL, NULL, 0,
62293a37 1925 nbytes, 0, 0, 0, areq->base.flags, false);
497f2e6b
LN
1926}
1927
1928static int ahash_init(struct ahash_request *areq)
1929{
1930 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
6a4967c3
LC
1931 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1932 struct device *dev = ctx->dev;
497f2e6b 1933 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
49f9783b 1934 unsigned int size;
6a4967c3 1935 dma_addr_t dma;
497f2e6b
LN
1936
1937 /* Initialize the context */
3c0dd190 1938 req_ctx->buf_idx = 0;
5e833bc4 1939 req_ctx->nbuf = 0;
60f208d7
KP
1940 req_ctx->first = 1; /* first indicates h/w must init its context */
1941 req_ctx->swinit = 0; /* assume h/w init of context */
49f9783b 1942 size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
497f2e6b
LN
1943 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
1944 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
49f9783b 1945 req_ctx->hw_context_size = size;
497f2e6b 1946
6a4967c3
LC
1947 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
1948 DMA_TO_DEVICE);
1949 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
1950
497f2e6b
LN
1951 return 0;
1952}
1953
60f208d7
KP
1954/*
1955 * on h/w without explicit sha224 support, we initialize h/w context
1956 * manually with sha224 constants, and tell it to run sha256.
1957 */
1958static int ahash_init_sha224_swinit(struct ahash_request *areq)
1959{
1960 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1961
a752447a
KP
1962 req_ctx->hw_context[0] = SHA224_H0;
1963 req_ctx->hw_context[1] = SHA224_H1;
1964 req_ctx->hw_context[2] = SHA224_H2;
1965 req_ctx->hw_context[3] = SHA224_H3;
1966 req_ctx->hw_context[4] = SHA224_H4;
1967 req_ctx->hw_context[5] = SHA224_H5;
1968 req_ctx->hw_context[6] = SHA224_H6;
1969 req_ctx->hw_context[7] = SHA224_H7;
60f208d7
KP
1970
1971 /* init 64-bit count */
1972 req_ctx->hw_context[8] = 0;
1973 req_ctx->hw_context[9] = 0;
1974
6a4967c3
LC
1975 ahash_init(areq);
1976 req_ctx->swinit = 1;/* prevent h/w initting context with sha256 values*/
1977
60f208d7
KP
1978 return 0;
1979}
1980
497f2e6b
LN
1981static int ahash_process_req(struct ahash_request *areq, unsigned int nbytes)
1982{
1983 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1984 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1985 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1986 struct talitos_edesc *edesc;
1987 unsigned int blocksize =
1988 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1989 unsigned int nbytes_to_hash;
1990 unsigned int to_hash_later;
5e833bc4 1991 unsigned int nsg;
8e409fe1 1992 int nents;
37b5e889
LC
1993 struct device *dev = ctx->dev;
1994 struct talitos_private *priv = dev_get_drvdata(dev);
1995 bool is_sec1 = has_ftr_sec1(priv);
3c0dd190 1996 u8 *ctx_buf = req_ctx->buf[req_ctx->buf_idx];
497f2e6b 1997
5e833bc4
LN
1998 if (!req_ctx->last && (nbytes + req_ctx->nbuf <= blocksize)) {
1999 /* Buffer up to one whole block */
8e409fe1
LC
2000 nents = sg_nents_for_len(areq->src, nbytes);
2001 if (nents < 0) {
2002 dev_err(ctx->dev, "Invalid number of src SG.\n");
2003 return nents;
2004 }
2005 sg_copy_to_buffer(areq->src, nents,
3c0dd190 2006 ctx_buf + req_ctx->nbuf, nbytes);
5e833bc4 2007 req_ctx->nbuf += nbytes;
497f2e6b
LN
2008 return 0;
2009 }
2010
5e833bc4
LN
2011 /* At least (blocksize + 1) bytes are available to hash */
2012 nbytes_to_hash = nbytes + req_ctx->nbuf;
2013 to_hash_later = nbytes_to_hash & (blocksize - 1);
2014
2015 if (req_ctx->last)
2016 to_hash_later = 0;
2017 else if (to_hash_later)
2018 /* There is a partial block. Hash the full block(s) now */
2019 nbytes_to_hash -= to_hash_later;
2020 else {
2021 /* Keep one block buffered */
2022 nbytes_to_hash -= blocksize;
2023 to_hash_later = blocksize;
2024 }
2025
2026 /* Chain in any previously buffered data */
37b5e889 2027 if (!is_sec1 && req_ctx->nbuf) {
5e833bc4
LN
2028 nsg = (req_ctx->nbuf < nbytes_to_hash) ? 2 : 1;
2029 sg_init_table(req_ctx->bufsl, nsg);
3c0dd190 2030 sg_set_buf(req_ctx->bufsl, ctx_buf, req_ctx->nbuf);
5e833bc4 2031 if (nsg > 1)
c56f6d12 2032 sg_chain(req_ctx->bufsl, 2, areq->src);
497f2e6b 2033 req_ctx->psrc = req_ctx->bufsl;
37b5e889 2034 } else if (is_sec1 && req_ctx->nbuf && req_ctx->nbuf < blocksize) {
58cdbc6d
CL
2035 int offset;
2036
37b5e889
LC
2037 if (nbytes_to_hash > blocksize)
2038 offset = blocksize - req_ctx->nbuf;
2039 else
2040 offset = nbytes_to_hash - req_ctx->nbuf;
2041 nents = sg_nents_for_len(areq->src, offset);
2042 if (nents < 0) {
2043 dev_err(ctx->dev, "Invalid number of src SG.\n");
2044 return nents;
2045 }
2046 sg_copy_to_buffer(areq->src, nents,
3c0dd190 2047 ctx_buf + req_ctx->nbuf, offset);
37b5e889 2048 req_ctx->nbuf += offset;
58cdbc6d
CL
2049 req_ctx->psrc = scatterwalk_ffwd(req_ctx->bufsl, areq->src,
2050 offset);
5e833bc4 2051 } else
497f2e6b 2052 req_ctx->psrc = areq->src;
5e833bc4
LN
2053
2054 if (to_hash_later) {
8e409fe1
LC
2055 nents = sg_nents_for_len(areq->src, nbytes);
2056 if (nents < 0) {
2057 dev_err(ctx->dev, "Invalid number of src SG.\n");
2058 return nents;
2059 }
d0525723 2060 sg_pcopy_to_buffer(areq->src, nents,
3c0dd190 2061 req_ctx->buf[(req_ctx->buf_idx + 1) & 1],
5e833bc4
LN
2062 to_hash_later,
2063 nbytes - to_hash_later);
497f2e6b 2064 }
5e833bc4 2065 req_ctx->to_hash_later = to_hash_later;
497f2e6b 2066
5e833bc4 2067 /* Allocate extended descriptor */
497f2e6b
LN
2068 edesc = ahash_edesc_alloc(areq, nbytes_to_hash);
2069 if (IS_ERR(edesc))
2070 return PTR_ERR(edesc);
2071
2072 edesc->desc.hdr = ctx->desc_hdr_template;
2073
2074 /* On last one, request SEC to pad; otherwise continue */
2075 if (req_ctx->last)
2076 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_PAD;
2077 else
2078 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_CONT;
2079
60f208d7
KP
2080 /* request SEC to INIT hash. */
2081 if (req_ctx->first && !req_ctx->swinit)
497f2e6b
LN
2082 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_INIT;
2083
2084 /* When the tfm context has a keylen, it's an HMAC.
2085 * A first or last (ie. not middle) descriptor must request HMAC.
2086 */
2087 if (ctx->keylen && (req_ctx->first || req_ctx->last))
2088 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_HMAC;
2089
58cdbc6d 2090 return common_nonsnoop_hash(edesc, areq, nbytes_to_hash, ahash_done);
497f2e6b
LN
2091}
2092
2093static int ahash_update(struct ahash_request *areq)
2094{
2095 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2096
2097 req_ctx->last = 0;
2098
2099 return ahash_process_req(areq, areq->nbytes);
2100}
2101
2102static int ahash_final(struct ahash_request *areq)
2103{
2104 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2105
2106 req_ctx->last = 1;
2107
2108 return ahash_process_req(areq, 0);
2109}
2110
2111static int ahash_finup(struct ahash_request *areq)
2112{
2113 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2114
2115 req_ctx->last = 1;
2116
2117 return ahash_process_req(areq, areq->nbytes);
2118}
2119
2120static int ahash_digest(struct ahash_request *areq)
2121{
2122 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
60f208d7 2123 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
497f2e6b 2124
60f208d7 2125 ahash->init(areq);
497f2e6b
LN
2126 req_ctx->last = 1;
2127
2128 return ahash_process_req(areq, areq->nbytes);
2129}
2130
3639ca84
HG
2131static int ahash_export(struct ahash_request *areq, void *out)
2132{
2133 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2134 struct talitos_export_state *export = out;
6a4967c3
LC
2135 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2136 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2137 struct device *dev = ctx->dev;
2138 dma_addr_t dma;
2139
2140 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2141 DMA_FROM_DEVICE);
2142 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_FROM_DEVICE);
3639ca84
HG
2143
2144 memcpy(export->hw_context, req_ctx->hw_context,
2145 req_ctx->hw_context_size);
3c0dd190 2146 memcpy(export->buf, req_ctx->buf[req_ctx->buf_idx], req_ctx->nbuf);
3639ca84
HG
2147 export->swinit = req_ctx->swinit;
2148 export->first = req_ctx->first;
2149 export->last = req_ctx->last;
2150 export->to_hash_later = req_ctx->to_hash_later;
2151 export->nbuf = req_ctx->nbuf;
2152
2153 return 0;
2154}
2155
2156static int ahash_import(struct ahash_request *areq, const void *in)
2157{
2158 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2159 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
6a4967c3
LC
2160 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2161 struct device *dev = ctx->dev;
3639ca84 2162 const struct talitos_export_state *export = in;
49f9783b 2163 unsigned int size;
6a4967c3 2164 dma_addr_t dma;
3639ca84
HG
2165
2166 memset(req_ctx, 0, sizeof(*req_ctx));
49f9783b 2167 size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
3639ca84
HG
2168 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
2169 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
49f9783b 2170 req_ctx->hw_context_size = size;
49f9783b 2171 memcpy(req_ctx->hw_context, export->hw_context, size);
3c0dd190 2172 memcpy(req_ctx->buf[0], export->buf, export->nbuf);
3639ca84
HG
2173 req_ctx->swinit = export->swinit;
2174 req_ctx->first = export->first;
2175 req_ctx->last = export->last;
2176 req_ctx->to_hash_later = export->to_hash_later;
2177 req_ctx->nbuf = export->nbuf;
2178
6a4967c3
LC
2179 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2180 DMA_TO_DEVICE);
2181 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
2182
3639ca84
HG
2183 return 0;
2184}
2185
79b3a418
LN
2186static int keyhash(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen,
2187 u8 *hash)
2188{
2189 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2190
2191 struct scatterlist sg[1];
2192 struct ahash_request *req;
f1c90ac3 2193 struct crypto_wait wait;
79b3a418
LN
2194 int ret;
2195
f1c90ac3 2196 crypto_init_wait(&wait);
79b3a418
LN
2197
2198 req = ahash_request_alloc(tfm, GFP_KERNEL);
2199 if (!req)
2200 return -ENOMEM;
2201
2202 /* Keep tfm keylen == 0 during hash of the long key */
2203 ctx->keylen = 0;
2204 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
f1c90ac3 2205 crypto_req_done, &wait);
79b3a418
LN
2206
2207 sg_init_one(&sg[0], key, keylen);
2208
2209 ahash_request_set_crypt(req, sg, hash, keylen);
f1c90ac3
GBY
2210 ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
2211
79b3a418
LN
2212 ahash_request_free(req);
2213
2214 return ret;
2215}
2216
2217static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
2218 unsigned int keylen)
2219{
2220 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2e13ce08 2221 struct device *dev = ctx->dev;
79b3a418
LN
2222 unsigned int blocksize =
2223 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2224 unsigned int digestsize = crypto_ahash_digestsize(tfm);
2225 unsigned int keysize = keylen;
2226 u8 hash[SHA512_DIGEST_SIZE];
2227 int ret;
2228
2229 if (keylen <= blocksize)
2230 memcpy(ctx->key, key, keysize);
2231 else {
2232 /* Must get the hash of the long key */
2233 ret = keyhash(tfm, key, keylen, hash);
2234
674f368a 2235 if (ret)
79b3a418 2236 return -EINVAL;
79b3a418
LN
2237
2238 keysize = digestsize;
2239 memcpy(ctx->key, hash, digestsize);
2240 }
2241
2e13ce08
LC
2242 if (ctx->keylen)
2243 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
2244
79b3a418 2245 ctx->keylen = keysize;
2e13ce08 2246 ctx->dma_key = dma_map_single(dev, ctx->key, keysize, DMA_TO_DEVICE);
79b3a418
LN
2247
2248 return 0;
2249}
2250
2251
9c4a7965 2252struct talitos_alg_template {
d5e4aaef 2253 u32 type;
b0057763 2254 u32 priority;
d5e4aaef 2255 union {
373960d7 2256 struct skcipher_alg skcipher;
acbf7c62 2257 struct ahash_alg hash;
aeb4c132 2258 struct aead_alg aead;
d5e4aaef 2259 } alg;
9c4a7965
KP
2260 __be32 desc_hdr_template;
2261};
2262
2263static struct talitos_alg_template driver_algs[] = {
991155ba 2264 /* AEAD algorithms. These use a single-pass ipsec_esp descriptor */
d5e4aaef 2265 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2266 .alg.aead = {
2267 .base = {
2268 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2269 .cra_driver_name = "authenc-hmac-sha1-"
2270 "cbc-aes-talitos",
2271 .cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2272 .cra_flags = CRYPTO_ALG_ASYNC |
2273 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2274 },
2275 .ivsize = AES_BLOCK_SIZE,
2276 .maxauthsize = SHA1_DIGEST_SIZE,
56af8cd4 2277 },
9c4a7965
KP
2278 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2279 DESC_HDR_SEL0_AESU |
2280 DESC_HDR_MODE0_AESU_CBC |
2281 DESC_HDR_SEL1_MDEUA |
2282 DESC_HDR_MODE1_MDEU_INIT |
2283 DESC_HDR_MODE1_MDEU_PAD |
2284 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
70bcaca7 2285 },
7405c8d7
LC
2286 { .type = CRYPTO_ALG_TYPE_AEAD,
2287 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2288 .alg.aead = {
2289 .base = {
2290 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2291 .cra_driver_name = "authenc-hmac-sha1-"
a1a42f84 2292 "cbc-aes-talitos-hsna",
7405c8d7 2293 .cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2294 .cra_flags = CRYPTO_ALG_ASYNC |
2295 CRYPTO_ALG_ALLOCATES_MEMORY,
7405c8d7
LC
2296 },
2297 .ivsize = AES_BLOCK_SIZE,
2298 .maxauthsize = SHA1_DIGEST_SIZE,
2299 },
2300 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2301 DESC_HDR_SEL0_AESU |
2302 DESC_HDR_MODE0_AESU_CBC |
2303 DESC_HDR_SEL1_MDEUA |
2304 DESC_HDR_MODE1_MDEU_INIT |
2305 DESC_HDR_MODE1_MDEU_PAD |
2306 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2307 },
d5e4aaef 2308 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2309 .alg.aead = {
2310 .base = {
2311 .cra_name = "authenc(hmac(sha1),"
2312 "cbc(des3_ede))",
2313 .cra_driver_name = "authenc-hmac-sha1-"
2314 "cbc-3des-talitos",
2315 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2316 .cra_flags = CRYPTO_ALG_ASYNC |
2317 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2318 },
2319 .ivsize = DES3_EDE_BLOCK_SIZE,
2320 .maxauthsize = SHA1_DIGEST_SIZE,
ef7c5c85 2321 .setkey = aead_des3_setkey,
56af8cd4 2322 },
70bcaca7
LN
2323 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2324 DESC_HDR_SEL0_DEU |
2325 DESC_HDR_MODE0_DEU_CBC |
2326 DESC_HDR_MODE0_DEU_3DES |
2327 DESC_HDR_SEL1_MDEUA |
2328 DESC_HDR_MODE1_MDEU_INIT |
2329 DESC_HDR_MODE1_MDEU_PAD |
2330 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
3952f17e 2331 },
7405c8d7
LC
2332 { .type = CRYPTO_ALG_TYPE_AEAD,
2333 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2334 .alg.aead = {
2335 .base = {
2336 .cra_name = "authenc(hmac(sha1),"
2337 "cbc(des3_ede))",
2338 .cra_driver_name = "authenc-hmac-sha1-"
a1a42f84 2339 "cbc-3des-talitos-hsna",
7405c8d7 2340 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2341 .cra_flags = CRYPTO_ALG_ASYNC |
2342 CRYPTO_ALG_ALLOCATES_MEMORY,
7405c8d7
LC
2343 },
2344 .ivsize = DES3_EDE_BLOCK_SIZE,
2345 .maxauthsize = SHA1_DIGEST_SIZE,
ef7c5c85 2346 .setkey = aead_des3_setkey,
7405c8d7
LC
2347 },
2348 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2349 DESC_HDR_SEL0_DEU |
2350 DESC_HDR_MODE0_DEU_CBC |
2351 DESC_HDR_MODE0_DEU_3DES |
2352 DESC_HDR_SEL1_MDEUA |
2353 DESC_HDR_MODE1_MDEU_INIT |
2354 DESC_HDR_MODE1_MDEU_PAD |
2355 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2356 },
357fb605 2357 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2358 .alg.aead = {
2359 .base = {
2360 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2361 .cra_driver_name = "authenc-hmac-sha224-"
2362 "cbc-aes-talitos",
2363 .cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2364 .cra_flags = CRYPTO_ALG_ASYNC |
2365 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2366 },
2367 .ivsize = AES_BLOCK_SIZE,
2368 .maxauthsize = SHA224_DIGEST_SIZE,
357fb605
HG
2369 },
2370 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2371 DESC_HDR_SEL0_AESU |
2372 DESC_HDR_MODE0_AESU_CBC |
2373 DESC_HDR_SEL1_MDEUA |
2374 DESC_HDR_MODE1_MDEU_INIT |
2375 DESC_HDR_MODE1_MDEU_PAD |
2376 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2377 },
7405c8d7
LC
2378 { .type = CRYPTO_ALG_TYPE_AEAD,
2379 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2380 .alg.aead = {
2381 .base = {
2382 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2383 .cra_driver_name = "authenc-hmac-sha224-"
a1a42f84 2384 "cbc-aes-talitos-hsna",
7405c8d7 2385 .cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2386 .cra_flags = CRYPTO_ALG_ASYNC |
2387 CRYPTO_ALG_ALLOCATES_MEMORY,
7405c8d7
LC
2388 },
2389 .ivsize = AES_BLOCK_SIZE,
2390 .maxauthsize = SHA224_DIGEST_SIZE,
2391 },
2392 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2393 DESC_HDR_SEL0_AESU |
2394 DESC_HDR_MODE0_AESU_CBC |
2395 DESC_HDR_SEL1_MDEUA |
2396 DESC_HDR_MODE1_MDEU_INIT |
2397 DESC_HDR_MODE1_MDEU_PAD |
2398 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2399 },
357fb605 2400 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2401 .alg.aead = {
2402 .base = {
2403 .cra_name = "authenc(hmac(sha224),"
2404 "cbc(des3_ede))",
2405 .cra_driver_name = "authenc-hmac-sha224-"
2406 "cbc-3des-talitos",
2407 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2408 .cra_flags = CRYPTO_ALG_ASYNC |
2409 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2410 },
2411 .ivsize = DES3_EDE_BLOCK_SIZE,
2412 .maxauthsize = SHA224_DIGEST_SIZE,
ef7c5c85 2413 .setkey = aead_des3_setkey,
357fb605
HG
2414 },
2415 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2416 DESC_HDR_SEL0_DEU |
2417 DESC_HDR_MODE0_DEU_CBC |
2418 DESC_HDR_MODE0_DEU_3DES |
2419 DESC_HDR_SEL1_MDEUA |
2420 DESC_HDR_MODE1_MDEU_INIT |
2421 DESC_HDR_MODE1_MDEU_PAD |
2422 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2423 },
7405c8d7
LC
2424 { .type = CRYPTO_ALG_TYPE_AEAD,
2425 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2426 .alg.aead = {
2427 .base = {
2428 .cra_name = "authenc(hmac(sha224),"
2429 "cbc(des3_ede))",
2430 .cra_driver_name = "authenc-hmac-sha224-"
a1a42f84 2431 "cbc-3des-talitos-hsna",
7405c8d7 2432 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2433 .cra_flags = CRYPTO_ALG_ASYNC |
2434 CRYPTO_ALG_ALLOCATES_MEMORY,
7405c8d7
LC
2435 },
2436 .ivsize = DES3_EDE_BLOCK_SIZE,
2437 .maxauthsize = SHA224_DIGEST_SIZE,
ef7c5c85 2438 .setkey = aead_des3_setkey,
7405c8d7
LC
2439 },
2440 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2441 DESC_HDR_SEL0_DEU |
2442 DESC_HDR_MODE0_DEU_CBC |
2443 DESC_HDR_MODE0_DEU_3DES |
2444 DESC_HDR_SEL1_MDEUA |
2445 DESC_HDR_MODE1_MDEU_INIT |
2446 DESC_HDR_MODE1_MDEU_PAD |
2447 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2448 },
d5e4aaef 2449 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2450 .alg.aead = {
2451 .base = {
2452 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2453 .cra_driver_name = "authenc-hmac-sha256-"
2454 "cbc-aes-talitos",
2455 .cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2456 .cra_flags = CRYPTO_ALG_ASYNC |
2457 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2458 },
2459 .ivsize = AES_BLOCK_SIZE,
2460 .maxauthsize = SHA256_DIGEST_SIZE,
56af8cd4 2461 },
3952f17e
LN
2462 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2463 DESC_HDR_SEL0_AESU |
2464 DESC_HDR_MODE0_AESU_CBC |
2465 DESC_HDR_SEL1_MDEUA |
2466 DESC_HDR_MODE1_MDEU_INIT |
2467 DESC_HDR_MODE1_MDEU_PAD |
2468 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2469 },
7405c8d7
LC
2470 { .type = CRYPTO_ALG_TYPE_AEAD,
2471 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2472 .alg.aead = {
2473 .base = {
2474 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2475 .cra_driver_name = "authenc-hmac-sha256-"
a1a42f84 2476 "cbc-aes-talitos-hsna",
7405c8d7 2477 .cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2478 .cra_flags = CRYPTO_ALG_ASYNC |
2479 CRYPTO_ALG_ALLOCATES_MEMORY,
7405c8d7
LC
2480 },
2481 .ivsize = AES_BLOCK_SIZE,
2482 .maxauthsize = SHA256_DIGEST_SIZE,
2483 },
2484 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2485 DESC_HDR_SEL0_AESU |
2486 DESC_HDR_MODE0_AESU_CBC |
2487 DESC_HDR_SEL1_MDEUA |
2488 DESC_HDR_MODE1_MDEU_INIT |
2489 DESC_HDR_MODE1_MDEU_PAD |
2490 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2491 },
d5e4aaef 2492 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2493 .alg.aead = {
2494 .base = {
2495 .cra_name = "authenc(hmac(sha256),"
2496 "cbc(des3_ede))",
2497 .cra_driver_name = "authenc-hmac-sha256-"
2498 "cbc-3des-talitos",
2499 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2500 .cra_flags = CRYPTO_ALG_ASYNC |
2501 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2502 },
2503 .ivsize = DES3_EDE_BLOCK_SIZE,
2504 .maxauthsize = SHA256_DIGEST_SIZE,
ef7c5c85 2505 .setkey = aead_des3_setkey,
56af8cd4 2506 },
3952f17e
LN
2507 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2508 DESC_HDR_SEL0_DEU |
2509 DESC_HDR_MODE0_DEU_CBC |
2510 DESC_HDR_MODE0_DEU_3DES |
2511 DESC_HDR_SEL1_MDEUA |
2512 DESC_HDR_MODE1_MDEU_INIT |
2513 DESC_HDR_MODE1_MDEU_PAD |
2514 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2515 },
7405c8d7
LC
2516 { .type = CRYPTO_ALG_TYPE_AEAD,
2517 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2518 .alg.aead = {
2519 .base = {
2520 .cra_name = "authenc(hmac(sha256),"
2521 "cbc(des3_ede))",
2522 .cra_driver_name = "authenc-hmac-sha256-"
a1a42f84 2523 "cbc-3des-talitos-hsna",
7405c8d7 2524 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2525 .cra_flags = CRYPTO_ALG_ASYNC |
2526 CRYPTO_ALG_ALLOCATES_MEMORY,
7405c8d7
LC
2527 },
2528 .ivsize = DES3_EDE_BLOCK_SIZE,
2529 .maxauthsize = SHA256_DIGEST_SIZE,
ef7c5c85 2530 .setkey = aead_des3_setkey,
7405c8d7
LC
2531 },
2532 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2533 DESC_HDR_SEL0_DEU |
2534 DESC_HDR_MODE0_DEU_CBC |
2535 DESC_HDR_MODE0_DEU_3DES |
2536 DESC_HDR_SEL1_MDEUA |
2537 DESC_HDR_MODE1_MDEU_INIT |
2538 DESC_HDR_MODE1_MDEU_PAD |
2539 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2540 },
d5e4aaef 2541 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2542 .alg.aead = {
2543 .base = {
2544 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2545 .cra_driver_name = "authenc-hmac-sha384-"
2546 "cbc-aes-talitos",
2547 .cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2548 .cra_flags = CRYPTO_ALG_ASYNC |
2549 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2550 },
2551 .ivsize = AES_BLOCK_SIZE,
2552 .maxauthsize = SHA384_DIGEST_SIZE,
357fb605
HG
2553 },
2554 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2555 DESC_HDR_SEL0_AESU |
2556 DESC_HDR_MODE0_AESU_CBC |
2557 DESC_HDR_SEL1_MDEUB |
2558 DESC_HDR_MODE1_MDEU_INIT |
2559 DESC_HDR_MODE1_MDEU_PAD |
2560 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2561 },
2562 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2563 .alg.aead = {
2564 .base = {
2565 .cra_name = "authenc(hmac(sha384),"
2566 "cbc(des3_ede))",
2567 .cra_driver_name = "authenc-hmac-sha384-"
2568 "cbc-3des-talitos",
2569 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2570 .cra_flags = CRYPTO_ALG_ASYNC |
2571 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2572 },
2573 .ivsize = DES3_EDE_BLOCK_SIZE,
2574 .maxauthsize = SHA384_DIGEST_SIZE,
ef7c5c85 2575 .setkey = aead_des3_setkey,
357fb605
HG
2576 },
2577 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2578 DESC_HDR_SEL0_DEU |
2579 DESC_HDR_MODE0_DEU_CBC |
2580 DESC_HDR_MODE0_DEU_3DES |
2581 DESC_HDR_SEL1_MDEUB |
2582 DESC_HDR_MODE1_MDEU_INIT |
2583 DESC_HDR_MODE1_MDEU_PAD |
2584 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2585 },
2586 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2587 .alg.aead = {
2588 .base = {
2589 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2590 .cra_driver_name = "authenc-hmac-sha512-"
2591 "cbc-aes-talitos",
2592 .cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2593 .cra_flags = CRYPTO_ALG_ASYNC |
2594 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2595 },
2596 .ivsize = AES_BLOCK_SIZE,
2597 .maxauthsize = SHA512_DIGEST_SIZE,
357fb605
HG
2598 },
2599 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2600 DESC_HDR_SEL0_AESU |
2601 DESC_HDR_MODE0_AESU_CBC |
2602 DESC_HDR_SEL1_MDEUB |
2603 DESC_HDR_MODE1_MDEU_INIT |
2604 DESC_HDR_MODE1_MDEU_PAD |
2605 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2606 },
2607 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2608 .alg.aead = {
2609 .base = {
2610 .cra_name = "authenc(hmac(sha512),"
2611 "cbc(des3_ede))",
2612 .cra_driver_name = "authenc-hmac-sha512-"
2613 "cbc-3des-talitos",
2614 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2615 .cra_flags = CRYPTO_ALG_ASYNC |
2616 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2617 },
2618 .ivsize = DES3_EDE_BLOCK_SIZE,
2619 .maxauthsize = SHA512_DIGEST_SIZE,
ef7c5c85 2620 .setkey = aead_des3_setkey,
357fb605
HG
2621 },
2622 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2623 DESC_HDR_SEL0_DEU |
2624 DESC_HDR_MODE0_DEU_CBC |
2625 DESC_HDR_MODE0_DEU_3DES |
2626 DESC_HDR_SEL1_MDEUB |
2627 DESC_HDR_MODE1_MDEU_INIT |
2628 DESC_HDR_MODE1_MDEU_PAD |
2629 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2630 },
2631 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2632 .alg.aead = {
2633 .base = {
2634 .cra_name = "authenc(hmac(md5),cbc(aes))",
2635 .cra_driver_name = "authenc-hmac-md5-"
2636 "cbc-aes-talitos",
2637 .cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2638 .cra_flags = CRYPTO_ALG_ASYNC |
2639 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2640 },
2641 .ivsize = AES_BLOCK_SIZE,
2642 .maxauthsize = MD5_DIGEST_SIZE,
56af8cd4 2643 },
3952f17e
LN
2644 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2645 DESC_HDR_SEL0_AESU |
2646 DESC_HDR_MODE0_AESU_CBC |
2647 DESC_HDR_SEL1_MDEUA |
2648 DESC_HDR_MODE1_MDEU_INIT |
2649 DESC_HDR_MODE1_MDEU_PAD |
2650 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2651 },
7405c8d7
LC
2652 { .type = CRYPTO_ALG_TYPE_AEAD,
2653 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2654 .alg.aead = {
2655 .base = {
2656 .cra_name = "authenc(hmac(md5),cbc(aes))",
2657 .cra_driver_name = "authenc-hmac-md5-"
a1a42f84 2658 "cbc-aes-talitos-hsna",
7405c8d7 2659 .cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2660 .cra_flags = CRYPTO_ALG_ASYNC |
2661 CRYPTO_ALG_ALLOCATES_MEMORY,
7405c8d7
LC
2662 },
2663 .ivsize = AES_BLOCK_SIZE,
2664 .maxauthsize = MD5_DIGEST_SIZE,
2665 },
2666 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2667 DESC_HDR_SEL0_AESU |
2668 DESC_HDR_MODE0_AESU_CBC |
2669 DESC_HDR_SEL1_MDEUA |
2670 DESC_HDR_MODE1_MDEU_INIT |
2671 DESC_HDR_MODE1_MDEU_PAD |
2672 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2673 },
d5e4aaef 2674 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2675 .alg.aead = {
2676 .base = {
2677 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2678 .cra_driver_name = "authenc-hmac-md5-"
2679 "cbc-3des-talitos",
2680 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2681 .cra_flags = CRYPTO_ALG_ASYNC |
2682 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2683 },
2684 .ivsize = DES3_EDE_BLOCK_SIZE,
2685 .maxauthsize = MD5_DIGEST_SIZE,
ef7c5c85 2686 .setkey = aead_des3_setkey,
56af8cd4 2687 },
3952f17e
LN
2688 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2689 DESC_HDR_SEL0_DEU |
2690 DESC_HDR_MODE0_DEU_CBC |
2691 DESC_HDR_MODE0_DEU_3DES |
2692 DESC_HDR_SEL1_MDEUA |
2693 DESC_HDR_MODE1_MDEU_INIT |
2694 DESC_HDR_MODE1_MDEU_PAD |
2695 DESC_HDR_MODE1_MDEU_MD5_HMAC,
4de9d0b5 2696 },
7405c8d7
LC
2697 { .type = CRYPTO_ALG_TYPE_AEAD,
2698 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2699 .alg.aead = {
2700 .base = {
2701 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2702 .cra_driver_name = "authenc-hmac-md5-"
a1a42f84 2703 "cbc-3des-talitos-hsna",
7405c8d7 2704 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2705 .cra_flags = CRYPTO_ALG_ASYNC |
2706 CRYPTO_ALG_ALLOCATES_MEMORY,
7405c8d7
LC
2707 },
2708 .ivsize = DES3_EDE_BLOCK_SIZE,
2709 .maxauthsize = MD5_DIGEST_SIZE,
ef7c5c85 2710 .setkey = aead_des3_setkey,
7405c8d7
LC
2711 },
2712 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2713 DESC_HDR_SEL0_DEU |
2714 DESC_HDR_MODE0_DEU_CBC |
2715 DESC_HDR_MODE0_DEU_3DES |
2716 DESC_HDR_SEL1_MDEUA |
2717 DESC_HDR_MODE1_MDEU_INIT |
2718 DESC_HDR_MODE1_MDEU_PAD |
2719 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2720 },
373960d7
AB
2721 /* SKCIPHER algorithms. */
2722 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2723 .alg.skcipher = {
2724 .base.cra_name = "ecb(aes)",
2725 .base.cra_driver_name = "ecb-aes-talitos",
2726 .base.cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2727 .base.cra_flags = CRYPTO_ALG_ASYNC |
2728 CRYPTO_ALG_ALLOCATES_MEMORY,
373960d7
AB
2729 .min_keysize = AES_MIN_KEY_SIZE,
2730 .max_keysize = AES_MAX_KEY_SIZE,
2731 .setkey = skcipher_aes_setkey,
5e75ae1b
LC
2732 },
2733 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2734 DESC_HDR_SEL0_AESU,
2735 },
373960d7
AB
2736 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2737 .alg.skcipher = {
2738 .base.cra_name = "cbc(aes)",
2739 .base.cra_driver_name = "cbc-aes-talitos",
2740 .base.cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2741 .base.cra_flags = CRYPTO_ALG_ASYNC |
2742 CRYPTO_ALG_ALLOCATES_MEMORY,
373960d7
AB
2743 .min_keysize = AES_MIN_KEY_SIZE,
2744 .max_keysize = AES_MAX_KEY_SIZE,
2745 .ivsize = AES_BLOCK_SIZE,
2746 .setkey = skcipher_aes_setkey,
4de9d0b5
LN
2747 },
2748 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2749 DESC_HDR_SEL0_AESU |
2750 DESC_HDR_MODE0_AESU_CBC,
2751 },
373960d7
AB
2752 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2753 .alg.skcipher = {
2754 .base.cra_name = "ctr(aes)",
2755 .base.cra_driver_name = "ctr-aes-talitos",
2756 .base.cra_blocksize = 1,
b8aa7dc5
MP
2757 .base.cra_flags = CRYPTO_ALG_ASYNC |
2758 CRYPTO_ALG_ALLOCATES_MEMORY,
373960d7
AB
2759 .min_keysize = AES_MIN_KEY_SIZE,
2760 .max_keysize = AES_MAX_KEY_SIZE,
2761 .ivsize = AES_BLOCK_SIZE,
2762 .setkey = skcipher_aes_setkey,
5e75ae1b 2763 },
70d355cc 2764 .desc_hdr_template = DESC_HDR_TYPE_AESU_CTR_NONSNOOP |
5e75ae1b
LC
2765 DESC_HDR_SEL0_AESU |
2766 DESC_HDR_MODE0_AESU_CTR,
2767 },
43a942d2
CL
2768 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2769 .alg.skcipher = {
2770 .base.cra_name = "ctr(aes)",
2771 .base.cra_driver_name = "ctr-aes-talitos",
2772 .base.cra_blocksize = 1,
2773 .base.cra_flags = CRYPTO_ALG_ASYNC |
2774 CRYPTO_ALG_ALLOCATES_MEMORY,
2775 .min_keysize = AES_MIN_KEY_SIZE,
2776 .max_keysize = AES_MAX_KEY_SIZE,
2777 .ivsize = AES_BLOCK_SIZE,
2778 .setkey = skcipher_aes_setkey,
2779 },
2780 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2781 DESC_HDR_SEL0_AESU |
2782 DESC_HDR_MODE0_AESU_CTR,
2783 },
373960d7
AB
2784 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2785 .alg.skcipher = {
2786 .base.cra_name = "ecb(des)",
2787 .base.cra_driver_name = "ecb-des-talitos",
2788 .base.cra_blocksize = DES_BLOCK_SIZE,
b8aa7dc5
MP
2789 .base.cra_flags = CRYPTO_ALG_ASYNC |
2790 CRYPTO_ALG_ALLOCATES_MEMORY,
373960d7
AB
2791 .min_keysize = DES_KEY_SIZE,
2792 .max_keysize = DES_KEY_SIZE,
2793 .setkey = skcipher_des_setkey,
5e75ae1b
LC
2794 },
2795 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2796 DESC_HDR_SEL0_DEU,
2797 },
373960d7
AB
2798 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2799 .alg.skcipher = {
2800 .base.cra_name = "cbc(des)",
2801 .base.cra_driver_name = "cbc-des-talitos",
2802 .base.cra_blocksize = DES_BLOCK_SIZE,
b8aa7dc5
MP
2803 .base.cra_flags = CRYPTO_ALG_ASYNC |
2804 CRYPTO_ALG_ALLOCATES_MEMORY,
373960d7
AB
2805 .min_keysize = DES_KEY_SIZE,
2806 .max_keysize = DES_KEY_SIZE,
2807 .ivsize = DES_BLOCK_SIZE,
2808 .setkey = skcipher_des_setkey,
5e75ae1b
LC
2809 },
2810 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2811 DESC_HDR_SEL0_DEU |
2812 DESC_HDR_MODE0_DEU_CBC,
2813 },
373960d7
AB
2814 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2815 .alg.skcipher = {
2816 .base.cra_name = "ecb(des3_ede)",
2817 .base.cra_driver_name = "ecb-3des-talitos",
2818 .base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2819 .base.cra_flags = CRYPTO_ALG_ASYNC |
2820 CRYPTO_ALG_ALLOCATES_MEMORY,
373960d7
AB
2821 .min_keysize = DES3_EDE_KEY_SIZE,
2822 .max_keysize = DES3_EDE_KEY_SIZE,
2823 .setkey = skcipher_des3_setkey,
5e75ae1b
LC
2824 },
2825 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2826 DESC_HDR_SEL0_DEU |
2827 DESC_HDR_MODE0_DEU_3DES,
2828 },
373960d7
AB
2829 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2830 .alg.skcipher = {
2831 .base.cra_name = "cbc(des3_ede)",
2832 .base.cra_driver_name = "cbc-3des-talitos",
2833 .base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2834 .base.cra_flags = CRYPTO_ALG_ASYNC |
2835 CRYPTO_ALG_ALLOCATES_MEMORY,
373960d7
AB
2836 .min_keysize = DES3_EDE_KEY_SIZE,
2837 .max_keysize = DES3_EDE_KEY_SIZE,
2838 .ivsize = DES3_EDE_BLOCK_SIZE,
2839 .setkey = skcipher_des3_setkey,
4de9d0b5
LN
2840 },
2841 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2842 DESC_HDR_SEL0_DEU |
2843 DESC_HDR_MODE0_DEU_CBC |
2844 DESC_HDR_MODE0_DEU_3DES,
497f2e6b
LN
2845 },
2846 /* AHASH algorithms. */
2847 { .type = CRYPTO_ALG_TYPE_AHASH,
2848 .alg.hash = {
497f2e6b 2849 .halg.digestsize = MD5_DIGEST_SIZE,
3639ca84 2850 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2851 .halg.base = {
2852 .cra_name = "md5",
2853 .cra_driver_name = "md5-talitos",
b3988618 2854 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
b8aa7dc5
MP
2855 .cra_flags = CRYPTO_ALG_ASYNC |
2856 CRYPTO_ALG_ALLOCATES_MEMORY,
497f2e6b
LN
2857 }
2858 },
2859 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2860 DESC_HDR_SEL0_MDEUA |
2861 DESC_HDR_MODE0_MDEU_MD5,
2862 },
2863 { .type = CRYPTO_ALG_TYPE_AHASH,
2864 .alg.hash = {
497f2e6b 2865 .halg.digestsize = SHA1_DIGEST_SIZE,
3639ca84 2866 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2867 .halg.base = {
2868 .cra_name = "sha1",
2869 .cra_driver_name = "sha1-talitos",
2870 .cra_blocksize = SHA1_BLOCK_SIZE,
b8aa7dc5
MP
2871 .cra_flags = CRYPTO_ALG_ASYNC |
2872 CRYPTO_ALG_ALLOCATES_MEMORY,
497f2e6b
LN
2873 }
2874 },
2875 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2876 DESC_HDR_SEL0_MDEUA |
2877 DESC_HDR_MODE0_MDEU_SHA1,
2878 },
60f208d7
KP
2879 { .type = CRYPTO_ALG_TYPE_AHASH,
2880 .alg.hash = {
60f208d7 2881 .halg.digestsize = SHA224_DIGEST_SIZE,
3639ca84 2882 .halg.statesize = sizeof(struct talitos_export_state),
60f208d7
KP
2883 .halg.base = {
2884 .cra_name = "sha224",
2885 .cra_driver_name = "sha224-talitos",
2886 .cra_blocksize = SHA224_BLOCK_SIZE,
b8aa7dc5
MP
2887 .cra_flags = CRYPTO_ALG_ASYNC |
2888 CRYPTO_ALG_ALLOCATES_MEMORY,
60f208d7
KP
2889 }
2890 },
2891 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2892 DESC_HDR_SEL0_MDEUA |
2893 DESC_HDR_MODE0_MDEU_SHA224,
2894 },
497f2e6b
LN
2895 { .type = CRYPTO_ALG_TYPE_AHASH,
2896 .alg.hash = {
497f2e6b 2897 .halg.digestsize = SHA256_DIGEST_SIZE,
3639ca84 2898 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2899 .halg.base = {
2900 .cra_name = "sha256",
2901 .cra_driver_name = "sha256-talitos",
2902 .cra_blocksize = SHA256_BLOCK_SIZE,
b8aa7dc5
MP
2903 .cra_flags = CRYPTO_ALG_ASYNC |
2904 CRYPTO_ALG_ALLOCATES_MEMORY,
497f2e6b
LN
2905 }
2906 },
2907 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2908 DESC_HDR_SEL0_MDEUA |
2909 DESC_HDR_MODE0_MDEU_SHA256,
2910 },
2911 { .type = CRYPTO_ALG_TYPE_AHASH,
2912 .alg.hash = {
497f2e6b 2913 .halg.digestsize = SHA384_DIGEST_SIZE,
3639ca84 2914 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2915 .halg.base = {
2916 .cra_name = "sha384",
2917 .cra_driver_name = "sha384-talitos",
2918 .cra_blocksize = SHA384_BLOCK_SIZE,
b8aa7dc5
MP
2919 .cra_flags = CRYPTO_ALG_ASYNC |
2920 CRYPTO_ALG_ALLOCATES_MEMORY,
497f2e6b
LN
2921 }
2922 },
2923 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2924 DESC_HDR_SEL0_MDEUB |
2925 DESC_HDR_MODE0_MDEUB_SHA384,
2926 },
2927 { .type = CRYPTO_ALG_TYPE_AHASH,
2928 .alg.hash = {
497f2e6b 2929 .halg.digestsize = SHA512_DIGEST_SIZE,
3639ca84 2930 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2931 .halg.base = {
2932 .cra_name = "sha512",
2933 .cra_driver_name = "sha512-talitos",
2934 .cra_blocksize = SHA512_BLOCK_SIZE,
b8aa7dc5
MP
2935 .cra_flags = CRYPTO_ALG_ASYNC |
2936 CRYPTO_ALG_ALLOCATES_MEMORY,
497f2e6b
LN
2937 }
2938 },
2939 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2940 DESC_HDR_SEL0_MDEUB |
2941 DESC_HDR_MODE0_MDEUB_SHA512,
2942 },
79b3a418
LN
2943 { .type = CRYPTO_ALG_TYPE_AHASH,
2944 .alg.hash = {
79b3a418 2945 .halg.digestsize = MD5_DIGEST_SIZE,
3639ca84 2946 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2947 .halg.base = {
2948 .cra_name = "hmac(md5)",
2949 .cra_driver_name = "hmac-md5-talitos",
b3988618 2950 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
b8aa7dc5
MP
2951 .cra_flags = CRYPTO_ALG_ASYNC |
2952 CRYPTO_ALG_ALLOCATES_MEMORY,
79b3a418
LN
2953 }
2954 },
2955 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2956 DESC_HDR_SEL0_MDEUA |
2957 DESC_HDR_MODE0_MDEU_MD5,
2958 },
2959 { .type = CRYPTO_ALG_TYPE_AHASH,
2960 .alg.hash = {
79b3a418 2961 .halg.digestsize = SHA1_DIGEST_SIZE,
3639ca84 2962 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2963 .halg.base = {
2964 .cra_name = "hmac(sha1)",
2965 .cra_driver_name = "hmac-sha1-talitos",
2966 .cra_blocksize = SHA1_BLOCK_SIZE,
b8aa7dc5
MP
2967 .cra_flags = CRYPTO_ALG_ASYNC |
2968 CRYPTO_ALG_ALLOCATES_MEMORY,
79b3a418
LN
2969 }
2970 },
2971 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2972 DESC_HDR_SEL0_MDEUA |
2973 DESC_HDR_MODE0_MDEU_SHA1,
2974 },
2975 { .type = CRYPTO_ALG_TYPE_AHASH,
2976 .alg.hash = {
79b3a418 2977 .halg.digestsize = SHA224_DIGEST_SIZE,
3639ca84 2978 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2979 .halg.base = {
2980 .cra_name = "hmac(sha224)",
2981 .cra_driver_name = "hmac-sha224-talitos",
2982 .cra_blocksize = SHA224_BLOCK_SIZE,
b8aa7dc5
MP
2983 .cra_flags = CRYPTO_ALG_ASYNC |
2984 CRYPTO_ALG_ALLOCATES_MEMORY,
79b3a418
LN
2985 }
2986 },
2987 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2988 DESC_HDR_SEL0_MDEUA |
2989 DESC_HDR_MODE0_MDEU_SHA224,
2990 },
2991 { .type = CRYPTO_ALG_TYPE_AHASH,
2992 .alg.hash = {
79b3a418 2993 .halg.digestsize = SHA256_DIGEST_SIZE,
3639ca84 2994 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2995 .halg.base = {
2996 .cra_name = "hmac(sha256)",
2997 .cra_driver_name = "hmac-sha256-talitos",
2998 .cra_blocksize = SHA256_BLOCK_SIZE,
b8aa7dc5
MP
2999 .cra_flags = CRYPTO_ALG_ASYNC |
3000 CRYPTO_ALG_ALLOCATES_MEMORY,
79b3a418
LN
3001 }
3002 },
3003 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3004 DESC_HDR_SEL0_MDEUA |
3005 DESC_HDR_MODE0_MDEU_SHA256,
3006 },
3007 { .type = CRYPTO_ALG_TYPE_AHASH,
3008 .alg.hash = {
79b3a418 3009 .halg.digestsize = SHA384_DIGEST_SIZE,
3639ca84 3010 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
3011 .halg.base = {
3012 .cra_name = "hmac(sha384)",
3013 .cra_driver_name = "hmac-sha384-talitos",
3014 .cra_blocksize = SHA384_BLOCK_SIZE,
b8aa7dc5
MP
3015 .cra_flags = CRYPTO_ALG_ASYNC |
3016 CRYPTO_ALG_ALLOCATES_MEMORY,
79b3a418
LN
3017 }
3018 },
3019 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3020 DESC_HDR_SEL0_MDEUB |
3021 DESC_HDR_MODE0_MDEUB_SHA384,
3022 },
3023 { .type = CRYPTO_ALG_TYPE_AHASH,
3024 .alg.hash = {
79b3a418 3025 .halg.digestsize = SHA512_DIGEST_SIZE,
3639ca84 3026 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
3027 .halg.base = {
3028 .cra_name = "hmac(sha512)",
3029 .cra_driver_name = "hmac-sha512-talitos",
3030 .cra_blocksize = SHA512_BLOCK_SIZE,
b8aa7dc5
MP
3031 .cra_flags = CRYPTO_ALG_ASYNC |
3032 CRYPTO_ALG_ALLOCATES_MEMORY,
79b3a418
LN
3033 }
3034 },
3035 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3036 DESC_HDR_SEL0_MDEUB |
3037 DESC_HDR_MODE0_MDEUB_SHA512,
3038 }
9c4a7965
KP
3039};
3040
3041struct talitos_crypto_alg {
3042 struct list_head entry;
3043 struct device *dev;
acbf7c62 3044 struct talitos_alg_template algt;
9c4a7965
KP
3045};
3046
89d124cb
JE
3047static int talitos_init_common(struct talitos_ctx *ctx,
3048 struct talitos_crypto_alg *talitos_alg)
9c4a7965 3049{
5228f0f7 3050 struct talitos_private *priv;
9c4a7965
KP
3051
3052 /* update context with ptr to dev */
3053 ctx->dev = talitos_alg->dev;
19bbbc63 3054
5228f0f7
KP
3055 /* assign SEC channel to tfm in round-robin fashion */
3056 priv = dev_get_drvdata(ctx->dev);
3057 ctx->ch = atomic_inc_return(&priv->last_chan) &
3058 (priv->num_channels - 1);
3059
9c4a7965 3060 /* copy descriptor header template value */
acbf7c62 3061 ctx->desc_hdr_template = talitos_alg->algt.desc_hdr_template;
9c4a7965 3062
602dba5a
KP
3063 /* select done notification */
3064 ctx->desc_hdr_template |= DESC_HDR_DONE_NOTIFY;
3065
497f2e6b
LN
3066 return 0;
3067}
3068
373960d7 3069static int talitos_cra_init_aead(struct crypto_aead *tfm)
89d124cb 3070{
373960d7 3071 struct aead_alg *alg = crypto_aead_alg(tfm);
89d124cb 3072 struct talitos_crypto_alg *talitos_alg;
373960d7 3073 struct talitos_ctx *ctx = crypto_aead_ctx(tfm);
89d124cb 3074
373960d7
AB
3075 talitos_alg = container_of(alg, struct talitos_crypto_alg,
3076 algt.alg.aead);
89d124cb
JE
3077
3078 return talitos_init_common(ctx, talitos_alg);
3079}
3080
373960d7 3081static int talitos_cra_init_skcipher(struct crypto_skcipher *tfm)
497f2e6b 3082{
373960d7 3083 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
89d124cb 3084 struct talitos_crypto_alg *talitos_alg;
373960d7 3085 struct talitos_ctx *ctx = crypto_skcipher_ctx(tfm);
89d124cb
JE
3086
3087 talitos_alg = container_of(alg, struct talitos_crypto_alg,
373960d7 3088 algt.alg.skcipher);
89d124cb
JE
3089
3090 return talitos_init_common(ctx, talitos_alg);
9c4a7965
KP
3091}
3092
497f2e6b
LN
3093static int talitos_cra_init_ahash(struct crypto_tfm *tfm)
3094{
373960d7
AB
3095 struct crypto_alg *alg = tfm->__crt_alg;
3096 struct talitos_crypto_alg *talitos_alg;
497f2e6b
LN
3097 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3098
373960d7
AB
3099 talitos_alg = container_of(__crypto_ahash_alg(alg),
3100 struct talitos_crypto_alg,
3101 algt.alg.hash);
497f2e6b
LN
3102
3103 ctx->keylen = 0;
3104 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
3105 sizeof(struct talitos_ahash_req_ctx));
3106
373960d7 3107 return talitos_init_common(ctx, talitos_alg);
497f2e6b
LN
3108}
3109
2e13ce08
LC
3110static void talitos_cra_exit(struct crypto_tfm *tfm)
3111{
3112 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3113 struct device *dev = ctx->dev;
3114
3115 if (ctx->keylen)
3116 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
3117}
3118
9c4a7965
KP
3119/*
3120 * given the alg's descriptor header template, determine whether descriptor
3121 * type and primary/secondary execution units required match the hw
3122 * capabilities description provided in the device tree node.
3123 */
3124static int hw_supports(struct device *dev, __be32 desc_hdr_template)
3125{
3126 struct talitos_private *priv = dev_get_drvdata(dev);
3127 int ret;
3128
3129 ret = (1 << DESC_TYPE(desc_hdr_template) & priv->desc_types) &&
3130 (1 << PRIMARY_EU(desc_hdr_template) & priv->exec_units);
3131
3132 if (SECONDARY_EU(desc_hdr_template))
3133 ret = ret && (1 << SECONDARY_EU(desc_hdr_template)
3134 & priv->exec_units);
3135
3136 return ret;
3137}
3138
2dc11581 3139static int talitos_remove(struct platform_device *ofdev)
9c4a7965
KP
3140{
3141 struct device *dev = &ofdev->dev;
3142 struct talitos_private *priv = dev_get_drvdata(dev);
3143 struct talitos_crypto_alg *t_alg, *n;
3144 int i;
3145
3146 list_for_each_entry_safe(t_alg, n, &priv->alg_list, entry) {
acbf7c62 3147 switch (t_alg->algt.type) {
373960d7
AB
3148 case CRYPTO_ALG_TYPE_SKCIPHER:
3149 crypto_unregister_skcipher(&t_alg->algt.alg.skcipher);
acbf7c62 3150 break;
aeb4c132
HX
3151 case CRYPTO_ALG_TYPE_AEAD:
3152 crypto_unregister_aead(&t_alg->algt.alg.aead);
5fc194ea 3153 break;
acbf7c62
LN
3154 case CRYPTO_ALG_TYPE_AHASH:
3155 crypto_unregister_ahash(&t_alg->algt.alg.hash);
3156 break;
3157 }
9c4a7965 3158 list_del(&t_alg->entry);
9c4a7965
KP
3159 }
3160
3161 if (hw_supports(dev, DESC_HDR_SEL0_RNG))
3162 talitos_unregister_rng(dev);
3163
c3e337f8 3164 for (i = 0; i < 2; i++)
2cdba3cf 3165 if (priv->irq[i]) {
c3e337f8
KP
3166 free_irq(priv->irq[i], dev);
3167 irq_dispose_mapping(priv->irq[i]);
3168 }
9c4a7965 3169
c3e337f8 3170 tasklet_kill(&priv->done_task[0]);
2cdba3cf 3171 if (priv->irq[1])
c3e337f8 3172 tasklet_kill(&priv->done_task[1]);
9c4a7965 3173
9c4a7965
KP
3174 return 0;
3175}
3176
3177static struct talitos_crypto_alg *talitos_alg_alloc(struct device *dev,
3178 struct talitos_alg_template
3179 *template)
3180{
60f208d7 3181 struct talitos_private *priv = dev_get_drvdata(dev);
9c4a7965
KP
3182 struct talitos_crypto_alg *t_alg;
3183 struct crypto_alg *alg;
3184
24b92ff2
LC
3185 t_alg = devm_kzalloc(dev, sizeof(struct talitos_crypto_alg),
3186 GFP_KERNEL);
9c4a7965
KP
3187 if (!t_alg)
3188 return ERR_PTR(-ENOMEM);
3189
acbf7c62
LN
3190 t_alg->algt = *template;
3191
3192 switch (t_alg->algt.type) {
373960d7
AB
3193 case CRYPTO_ALG_TYPE_SKCIPHER:
3194 alg = &t_alg->algt.alg.skcipher.base;
2e13ce08 3195 alg->cra_exit = talitos_cra_exit;
373960d7
AB
3196 t_alg->algt.alg.skcipher.init = talitos_cra_init_skcipher;
3197 t_alg->algt.alg.skcipher.setkey =
3198 t_alg->algt.alg.skcipher.setkey ?: skcipher_setkey;
3199 t_alg->algt.alg.skcipher.encrypt = skcipher_encrypt;
3200 t_alg->algt.alg.skcipher.decrypt = skcipher_decrypt;
43a942d2
CL
3201 if (!strcmp(alg->cra_name, "ctr(aes)") && !has_ftr_sec1(priv) &&
3202 DESC_TYPE(t_alg->algt.desc_hdr_template) !=
3203 DESC_TYPE(DESC_HDR_TYPE_AESU_CTR_NONSNOOP)) {
3204 devm_kfree(dev, t_alg);
3205 return ERR_PTR(-ENOTSUPP);
3206 }
497f2e6b 3207 break;
acbf7c62 3208 case CRYPTO_ALG_TYPE_AEAD:
aeb4c132 3209 alg = &t_alg->algt.alg.aead.base;
2e13ce08 3210 alg->cra_exit = talitos_cra_exit;
aeb4c132 3211 t_alg->algt.alg.aead.init = talitos_cra_init_aead;
ef7c5c85
HX
3212 t_alg->algt.alg.aead.setkey = t_alg->algt.alg.aead.setkey ?:
3213 aead_setkey;
aeb4c132
HX
3214 t_alg->algt.alg.aead.encrypt = aead_encrypt;
3215 t_alg->algt.alg.aead.decrypt = aead_decrypt;
6cda075a
LC
3216 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
3217 !strncmp(alg->cra_name, "authenc(hmac(sha224)", 20)) {
24b92ff2 3218 devm_kfree(dev, t_alg);
6cda075a
LC
3219 return ERR_PTR(-ENOTSUPP);
3220 }
acbf7c62
LN
3221 break;
3222 case CRYPTO_ALG_TYPE_AHASH:
3223 alg = &t_alg->algt.alg.hash.halg.base;
497f2e6b 3224 alg->cra_init = talitos_cra_init_ahash;
ad4cd51f 3225 alg->cra_exit = talitos_cra_exit;
b286e003
KP
3226 t_alg->algt.alg.hash.init = ahash_init;
3227 t_alg->algt.alg.hash.update = ahash_update;
3228 t_alg->algt.alg.hash.final = ahash_final;
3229 t_alg->algt.alg.hash.finup = ahash_finup;
3230 t_alg->algt.alg.hash.digest = ahash_digest;
56136631
LC
3231 if (!strncmp(alg->cra_name, "hmac", 4))
3232 t_alg->algt.alg.hash.setkey = ahash_setkey;
3639ca84
HG
3233 t_alg->algt.alg.hash.import = ahash_import;
3234 t_alg->algt.alg.hash.export = ahash_export;
b286e003 3235
79b3a418 3236 if (!(priv->features & TALITOS_FTR_HMAC_OK) &&
0b2730d8 3237 !strncmp(alg->cra_name, "hmac", 4)) {
24b92ff2 3238 devm_kfree(dev, t_alg);
79b3a418 3239 return ERR_PTR(-ENOTSUPP);
0b2730d8 3240 }
60f208d7 3241 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
79b3a418
LN
3242 (!strcmp(alg->cra_name, "sha224") ||
3243 !strcmp(alg->cra_name, "hmac(sha224)"))) {
60f208d7
KP
3244 t_alg->algt.alg.hash.init = ahash_init_sha224_swinit;
3245 t_alg->algt.desc_hdr_template =
3246 DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3247 DESC_HDR_SEL0_MDEUA |
3248 DESC_HDR_MODE0_MDEU_SHA256;
3249 }
497f2e6b 3250 break;
1d11911a
KP
3251 default:
3252 dev_err(dev, "unknown algorithm type %d\n", t_alg->algt.type);
24b92ff2 3253 devm_kfree(dev, t_alg);
1d11911a 3254 return ERR_PTR(-EINVAL);
acbf7c62 3255 }
9c4a7965 3256
9c4a7965 3257 alg->cra_module = THIS_MODULE;
b0057763
LC
3258 if (t_alg->algt.priority)
3259 alg->cra_priority = t_alg->algt.priority;
3260 else
3261 alg->cra_priority = TALITOS_CRA_PRIORITY;
c9cca703
CL
3262 if (has_ftr_sec1(priv))
3263 alg->cra_alignmask = 3;
3264 else
3265 alg->cra_alignmask = 0;
9c4a7965 3266 alg->cra_ctxsize = sizeof(struct talitos_ctx);
d912bb76 3267 alg->cra_flags |= CRYPTO_ALG_KERN_DRIVER_ONLY;
9c4a7965 3268
9c4a7965
KP
3269 t_alg->dev = dev;
3270
3271 return t_alg;
3272}
3273
c3e337f8
KP
3274static int talitos_probe_irq(struct platform_device *ofdev)
3275{
3276 struct device *dev = &ofdev->dev;
3277 struct device_node *np = ofdev->dev.of_node;
3278 struct talitos_private *priv = dev_get_drvdata(dev);
3279 int err;
dd3c0987 3280 bool is_sec1 = has_ftr_sec1(priv);
c3e337f8
KP
3281
3282 priv->irq[0] = irq_of_parse_and_map(np, 0);
2cdba3cf 3283 if (!priv->irq[0]) {
c3e337f8
KP
3284 dev_err(dev, "failed to map irq\n");
3285 return -EINVAL;
3286 }
dd3c0987
LC
3287 if (is_sec1) {
3288 err = request_irq(priv->irq[0], talitos1_interrupt_4ch, 0,
3289 dev_driver_string(dev), dev);
3290 goto primary_out;
3291 }
c3e337f8
KP
3292
3293 priv->irq[1] = irq_of_parse_and_map(np, 1);
3294
3295 /* get the primary irq line */
2cdba3cf 3296 if (!priv->irq[1]) {
dd3c0987 3297 err = request_irq(priv->irq[0], talitos2_interrupt_4ch, 0,
c3e337f8
KP
3298 dev_driver_string(dev), dev);
3299 goto primary_out;
3300 }
3301
dd3c0987 3302 err = request_irq(priv->irq[0], talitos2_interrupt_ch0_2, 0,
c3e337f8
KP
3303 dev_driver_string(dev), dev);
3304 if (err)
3305 goto primary_out;
3306
3307 /* get the secondary irq line */
dd3c0987 3308 err = request_irq(priv->irq[1], talitos2_interrupt_ch1_3, 0,
c3e337f8
KP
3309 dev_driver_string(dev), dev);
3310 if (err) {
3311 dev_err(dev, "failed to request secondary irq\n");
3312 irq_dispose_mapping(priv->irq[1]);
2cdba3cf 3313 priv->irq[1] = 0;
c3e337f8
KP
3314 }
3315
3316 return err;
3317
3318primary_out:
3319 if (err) {
3320 dev_err(dev, "failed to request primary irq\n");
3321 irq_dispose_mapping(priv->irq[0]);
2cdba3cf 3322 priv->irq[0] = 0;
c3e337f8
KP
3323 }
3324
3325 return err;
3326}
3327
1c48a5c9 3328static int talitos_probe(struct platform_device *ofdev)
9c4a7965
KP
3329{
3330 struct device *dev = &ofdev->dev;
61c7a080 3331 struct device_node *np = ofdev->dev.of_node;
9c4a7965 3332 struct talitos_private *priv;
9c4a7965 3333 int i, err;
5fa7fa14 3334 int stride;
fd5ea7f0 3335 struct resource *res;
9c4a7965 3336
24b92ff2 3337 priv = devm_kzalloc(dev, sizeof(struct talitos_private), GFP_KERNEL);
9c4a7965
KP
3338 if (!priv)
3339 return -ENOMEM;
3340
f3de9cb1
KH
3341 INIT_LIST_HEAD(&priv->alg_list);
3342
9c4a7965
KP
3343 dev_set_drvdata(dev, priv);
3344
3345 priv->ofdev = ofdev;
3346
511d63cb
HG
3347 spin_lock_init(&priv->reg_lock);
3348
fd5ea7f0
LC
3349 res = platform_get_resource(ofdev, IORESOURCE_MEM, 0);
3350 if (!res)
3351 return -ENXIO;
3352 priv->reg = devm_ioremap(dev, res->start, resource_size(res));
9c4a7965
KP
3353 if (!priv->reg) {
3354 dev_err(dev, "failed to of_iomap\n");
3355 err = -ENOMEM;
3356 goto err_out;
3357 }
3358
3359 /* get SEC version capabilities from device tree */
fa14c6cf
LC
3360 of_property_read_u32(np, "fsl,num-channels", &priv->num_channels);
3361 of_property_read_u32(np, "fsl,channel-fifo-len", &priv->chfifo_len);
3362 of_property_read_u32(np, "fsl,exec-units-mask", &priv->exec_units);
3363 of_property_read_u32(np, "fsl,descriptor-types-mask",
3364 &priv->desc_types);
9c4a7965
KP
3365
3366 if (!is_power_of_2(priv->num_channels) || !priv->chfifo_len ||
3367 !priv->exec_units || !priv->desc_types) {
3368 dev_err(dev, "invalid property data in device tree node\n");
3369 err = -EINVAL;
3370 goto err_out;
3371 }
3372
f3c85bc1
LN
3373 if (of_device_is_compatible(np, "fsl,sec3.0"))
3374 priv->features |= TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT;
3375
fe5720e2 3376 if (of_device_is_compatible(np, "fsl,sec2.1"))
60f208d7 3377 priv->features |= TALITOS_FTR_HW_AUTH_CHECK |
79b3a418
LN
3378 TALITOS_FTR_SHA224_HWINIT |
3379 TALITOS_FTR_HMAC_OK;
fe5720e2 3380
21590888
LC
3381 if (of_device_is_compatible(np, "fsl,sec1.0"))
3382 priv->features |= TALITOS_FTR_SEC1;
3383
5fa7fa14
LC
3384 if (of_device_is_compatible(np, "fsl,sec1.2")) {
3385 priv->reg_deu = priv->reg + TALITOS12_DEU;
3386 priv->reg_aesu = priv->reg + TALITOS12_AESU;
3387 priv->reg_mdeu = priv->reg + TALITOS12_MDEU;
3388 stride = TALITOS1_CH_STRIDE;
3389 } else if (of_device_is_compatible(np, "fsl,sec1.0")) {
3390 priv->reg_deu = priv->reg + TALITOS10_DEU;
3391 priv->reg_aesu = priv->reg + TALITOS10_AESU;
3392 priv->reg_mdeu = priv->reg + TALITOS10_MDEU;
3393 priv->reg_afeu = priv->reg + TALITOS10_AFEU;
3394 priv->reg_rngu = priv->reg + TALITOS10_RNGU;
3395 priv->reg_pkeu = priv->reg + TALITOS10_PKEU;
3396 stride = TALITOS1_CH_STRIDE;
3397 } else {
3398 priv->reg_deu = priv->reg + TALITOS2_DEU;
3399 priv->reg_aesu = priv->reg + TALITOS2_AESU;
3400 priv->reg_mdeu = priv->reg + TALITOS2_MDEU;
3401 priv->reg_afeu = priv->reg + TALITOS2_AFEU;
3402 priv->reg_rngu = priv->reg + TALITOS2_RNGU;
3403 priv->reg_pkeu = priv->reg + TALITOS2_PKEU;
3404 priv->reg_keu = priv->reg + TALITOS2_KEU;
3405 priv->reg_crcu = priv->reg + TALITOS2_CRCU;
3406 stride = TALITOS2_CH_STRIDE;
3407 }
3408
dd3c0987
LC
3409 err = talitos_probe_irq(ofdev);
3410 if (err)
3411 goto err_out;
3412
c8c74647 3413 if (has_ftr_sec1(priv)) {
9c02e285
LC
3414 if (priv->num_channels == 1)
3415 tasklet_init(&priv->done_task[0], talitos1_done_ch0,
dd3c0987 3416 (unsigned long)dev);
9c02e285
LC
3417 else
3418 tasklet_init(&priv->done_task[0], talitos1_done_4ch,
3419 (unsigned long)dev);
3420 } else {
3421 if (priv->irq[1]) {
dd3c0987
LC
3422 tasklet_init(&priv->done_task[0], talitos2_done_ch0_2,
3423 (unsigned long)dev);
3424 tasklet_init(&priv->done_task[1], talitos2_done_ch1_3,
3425 (unsigned long)dev);
9c02e285
LC
3426 } else if (priv->num_channels == 1) {
3427 tasklet_init(&priv->done_task[0], talitos2_done_ch0,
3428 (unsigned long)dev);
3429 } else {
3430 tasklet_init(&priv->done_task[0], talitos2_done_4ch,
3431 (unsigned long)dev);
dd3c0987
LC
3432 }
3433 }
3434
a86854d0
KC
3435 priv->chan = devm_kcalloc(dev,
3436 priv->num_channels,
3437 sizeof(struct talitos_channel),
3438 GFP_KERNEL);
4b992628
KP
3439 if (!priv->chan) {
3440 dev_err(dev, "failed to allocate channel management space\n");
9c4a7965
KP
3441 err = -ENOMEM;
3442 goto err_out;
3443 }
3444
f641dddd
MH
3445 priv->fifo_len = roundup_pow_of_two(priv->chfifo_len);
3446
c3e337f8 3447 for (i = 0; i < priv->num_channels; i++) {
5fa7fa14 3448 priv->chan[i].reg = priv->reg + stride * (i + 1);
2cdba3cf 3449 if (!priv->irq[1] || !(i & 1))
c3e337f8 3450 priv->chan[i].reg += TALITOS_CH_BASE_OFFSET;
ad42d5fc 3451
4b992628
KP
3452 spin_lock_init(&priv->chan[i].head_lock);
3453 spin_lock_init(&priv->chan[i].tail_lock);
9c4a7965 3454
a86854d0
KC
3455 priv->chan[i].fifo = devm_kcalloc(dev,
3456 priv->fifo_len,
3457 sizeof(struct talitos_request),
3458 GFP_KERNEL);
4b992628 3459 if (!priv->chan[i].fifo) {
9c4a7965
KP
3460 dev_err(dev, "failed to allocate request fifo %d\n", i);
3461 err = -ENOMEM;
3462 goto err_out;
3463 }
9c4a7965 3464
4b992628
KP
3465 atomic_set(&priv->chan[i].submit_count,
3466 -(priv->chfifo_len - 1));
f641dddd 3467 }
9c4a7965 3468
81eb024c
KP
3469 dma_set_mask(dev, DMA_BIT_MASK(36));
3470
9c4a7965
KP
3471 /* reset and initialize the h/w */
3472 err = init_device(dev);
3473 if (err) {
3474 dev_err(dev, "failed to initialize device\n");
3475 goto err_out;
3476 }
3477
3478 /* register the RNG, if available */
3479 if (hw_supports(dev, DESC_HDR_SEL0_RNG)) {
3480 err = talitos_register_rng(dev);
3481 if (err) {
3482 dev_err(dev, "failed to register hwrng: %d\n", err);
3483 goto err_out;
3484 } else
3485 dev_info(dev, "hwrng\n");
3486 }
3487
3488 /* register crypto algorithms the device supports */
9c4a7965
KP
3489 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3490 if (hw_supports(dev, driver_algs[i].desc_hdr_template)) {
3491 struct talitos_crypto_alg *t_alg;
aeb4c132 3492 struct crypto_alg *alg = NULL;
9c4a7965
KP
3493
3494 t_alg = talitos_alg_alloc(dev, &driver_algs[i]);
3495 if (IS_ERR(t_alg)) {
3496 err = PTR_ERR(t_alg);
0b2730d8 3497 if (err == -ENOTSUPP)
79b3a418 3498 continue;
9c4a7965
KP
3499 goto err_out;
3500 }
3501
acbf7c62 3502 switch (t_alg->algt.type) {
373960d7
AB
3503 case CRYPTO_ALG_TYPE_SKCIPHER:
3504 err = crypto_register_skcipher(
3505 &t_alg->algt.alg.skcipher);
3506 alg = &t_alg->algt.alg.skcipher.base;
acbf7c62 3507 break;
aeb4c132
HX
3508
3509 case CRYPTO_ALG_TYPE_AEAD:
3510 err = crypto_register_aead(
3511 &t_alg->algt.alg.aead);
3512 alg = &t_alg->algt.alg.aead.base;
3513 break;
3514
acbf7c62
LN
3515 case CRYPTO_ALG_TYPE_AHASH:
3516 err = crypto_register_ahash(
3517 &t_alg->algt.alg.hash);
aeb4c132 3518 alg = &t_alg->algt.alg.hash.halg.base;
acbf7c62
LN
3519 break;
3520 }
9c4a7965
KP
3521 if (err) {
3522 dev_err(dev, "%s alg registration failed\n",
aeb4c132 3523 alg->cra_driver_name);
24b92ff2 3524 devm_kfree(dev, t_alg);
991155ba 3525 } else
9c4a7965 3526 list_add_tail(&t_alg->entry, &priv->alg_list);
9c4a7965
KP
3527 }
3528 }
5b859b6e
KP
3529 if (!list_empty(&priv->alg_list))
3530 dev_info(dev, "%s algorithms registered in /proc/crypto\n",
3531 (char *)of_get_property(np, "compatible", NULL));
9c4a7965
KP
3532
3533 return 0;
3534
3535err_out:
3536 talitos_remove(ofdev);
9c4a7965
KP
3537
3538 return err;
3539}
3540
6c3f975a 3541static const struct of_device_id talitos_match[] = {
0635b7db
LC
3542#ifdef CONFIG_CRYPTO_DEV_TALITOS1
3543 {
3544 .compatible = "fsl,sec1.0",
3545 },
3546#endif
3547#ifdef CONFIG_CRYPTO_DEV_TALITOS2
9c4a7965
KP
3548 {
3549 .compatible = "fsl,sec2.0",
3550 },
0635b7db 3551#endif
9c4a7965
KP
3552 {},
3553};
3554MODULE_DEVICE_TABLE(of, talitos_match);
3555
1c48a5c9 3556static struct platform_driver talitos_driver = {
4018294b
GL
3557 .driver = {
3558 .name = "talitos",
4018294b
GL
3559 .of_match_table = talitos_match,
3560 },
9c4a7965 3561 .probe = talitos_probe,
596f1034 3562 .remove = talitos_remove,
9c4a7965
KP
3563};
3564
741e8c2d 3565module_platform_driver(talitos_driver);
9c4a7965
KP
3566
3567MODULE_LICENSE("GPL");
3568MODULE_AUTHOR("Kim Phillips <kim.phillips@freescale.com>");
3569MODULE_DESCRIPTION("Freescale integrated security engine (SEC) driver");