crypto: talitos - reduce max key size for SEC1
[linux-2.6-block.git] / drivers / crypto / talitos.c
CommitLineData
9c4a7965
KP
1/*
2 * talitos - Freescale Integrated Security Engine (SEC) device driver
3 *
5228f0f7 4 * Copyright (c) 2008-2011 Freescale Semiconductor, Inc.
9c4a7965
KP
5 *
6 * Scatterlist Crypto API glue code copied from files with the following:
7 * Copyright (c) 2006-2007 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * Crypto algorithm registration code copied from hifn driver:
10 * 2007+ Copyright (c) Evgeniy Polyakov <johnpol@2ka.mipt.ru>
11 * All rights reserved.
12 *
13 * This program is free software; you can redistribute it and/or modify
14 * it under the terms of the GNU General Public License as published by
15 * the Free Software Foundation; either version 2 of the License, or
16 * (at your option) any later version.
17 *
18 * This program is distributed in the hope that it will be useful,
19 * but WITHOUT ANY WARRANTY; without even the implied warranty of
20 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 * GNU General Public License for more details.
22 *
23 * You should have received a copy of the GNU General Public License
24 * along with this program; if not, write to the Free Software
25 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
26 */
27
28#include <linux/kernel.h>
29#include <linux/module.h>
30#include <linux/mod_devicetable.h>
31#include <linux/device.h>
32#include <linux/interrupt.h>
33#include <linux/crypto.h>
34#include <linux/hw_random.h>
5af50730
RH
35#include <linux/of_address.h>
36#include <linux/of_irq.h>
9c4a7965
KP
37#include <linux/of_platform.h>
38#include <linux/dma-mapping.h>
39#include <linux/io.h>
40#include <linux/spinlock.h>
41#include <linux/rtnetlink.h>
5a0e3ad6 42#include <linux/slab.h>
9c4a7965
KP
43
44#include <crypto/algapi.h>
45#include <crypto/aes.h>
3952f17e 46#include <crypto/des.h>
9c4a7965 47#include <crypto/sha.h>
497f2e6b 48#include <crypto/md5.h>
e98014ab 49#include <crypto/internal/aead.h>
9c4a7965 50#include <crypto/authenc.h>
4de9d0b5 51#include <crypto/skcipher.h>
acbf7c62
LN
52#include <crypto/hash.h>
53#include <crypto/internal/hash.h>
4de9d0b5 54#include <crypto/scatterwalk.h>
9c4a7965
KP
55
56#include "talitos.h"
57
922f9dc8 58static void to_talitos_ptr(struct talitos_ptr *ptr, dma_addr_t dma_addr,
da9de146 59 unsigned int len, bool is_sec1)
81eb024c 60{
edc6bd69 61 ptr->ptr = cpu_to_be32(lower_32_bits(dma_addr));
da9de146
LC
62 if (is_sec1) {
63 ptr->len1 = cpu_to_be16(len);
64 } else {
65 ptr->len = cpu_to_be16(len);
922f9dc8 66 ptr->eptr = upper_32_bits(dma_addr);
da9de146 67 }
81eb024c
KP
68}
69
340ff60a
HG
70static void copy_talitos_ptr(struct talitos_ptr *dst_ptr,
71 struct talitos_ptr *src_ptr, bool is_sec1)
72{
73 dst_ptr->ptr = src_ptr->ptr;
922f9dc8 74 if (is_sec1) {
da9de146 75 dst_ptr->len1 = src_ptr->len1;
922f9dc8 76 } else {
da9de146
LC
77 dst_ptr->len = src_ptr->len;
78 dst_ptr->eptr = src_ptr->eptr;
922f9dc8 79 }
538caf83
LC
80}
81
922f9dc8
LC
82static unsigned short from_talitos_ptr_len(struct talitos_ptr *ptr,
83 bool is_sec1)
538caf83 84{
922f9dc8
LC
85 if (is_sec1)
86 return be16_to_cpu(ptr->len1);
87 else
88 return be16_to_cpu(ptr->len);
538caf83
LC
89}
90
b096b544
LC
91static void to_talitos_ptr_ext_set(struct talitos_ptr *ptr, u8 val,
92 bool is_sec1)
185eb79f 93{
922f9dc8 94 if (!is_sec1)
b096b544
LC
95 ptr->j_extent = val;
96}
97
98static void to_talitos_ptr_ext_or(struct talitos_ptr *ptr, u8 val, bool is_sec1)
99{
100 if (!is_sec1)
101 ptr->j_extent |= val;
185eb79f
LC
102}
103
9c4a7965
KP
104/*
105 * map virtual single (contiguous) pointer to h/w descriptor pointer
106 */
6a4967c3
LC
107static void __map_single_talitos_ptr(struct device *dev,
108 struct talitos_ptr *ptr,
109 unsigned int len, void *data,
110 enum dma_data_direction dir,
111 unsigned long attrs)
112{
113 dma_addr_t dma_addr = dma_map_single_attrs(dev, data, len, dir, attrs);
114 struct talitos_private *priv = dev_get_drvdata(dev);
115 bool is_sec1 = has_ftr_sec1(priv);
116
117 to_talitos_ptr(ptr, dma_addr, len, is_sec1);
118}
119
9c4a7965 120static void map_single_talitos_ptr(struct device *dev,
edc6bd69 121 struct talitos_ptr *ptr,
42e8b0d7 122 unsigned int len, void *data,
9c4a7965
KP
123 enum dma_data_direction dir)
124{
6a4967c3
LC
125 __map_single_talitos_ptr(dev, ptr, len, data, dir, 0);
126}
81eb024c 127
6a4967c3
LC
128static void map_single_talitos_ptr_nosync(struct device *dev,
129 struct talitos_ptr *ptr,
130 unsigned int len, void *data,
131 enum dma_data_direction dir)
132{
133 __map_single_talitos_ptr(dev, ptr, len, data, dir,
134 DMA_ATTR_SKIP_CPU_SYNC);
9c4a7965
KP
135}
136
137/*
138 * unmap bus single (contiguous) h/w descriptor pointer
139 */
140static void unmap_single_talitos_ptr(struct device *dev,
edc6bd69 141 struct talitos_ptr *ptr,
9c4a7965
KP
142 enum dma_data_direction dir)
143{
922f9dc8
LC
144 struct talitos_private *priv = dev_get_drvdata(dev);
145 bool is_sec1 = has_ftr_sec1(priv);
146
edc6bd69 147 dma_unmap_single(dev, be32_to_cpu(ptr->ptr),
922f9dc8 148 from_talitos_ptr_len(ptr, is_sec1), dir);
9c4a7965
KP
149}
150
151static int reset_channel(struct device *dev, int ch)
152{
153 struct talitos_private *priv = dev_get_drvdata(dev);
154 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987 155 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 156
dd3c0987
LC
157 if (is_sec1) {
158 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
159 TALITOS1_CCCR_LO_RESET);
9c4a7965 160
dd3c0987
LC
161 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR_LO) &
162 TALITOS1_CCCR_LO_RESET) && --timeout)
163 cpu_relax();
164 } else {
165 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
166 TALITOS2_CCCR_RESET);
167
168 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
169 TALITOS2_CCCR_RESET) && --timeout)
170 cpu_relax();
171 }
9c4a7965
KP
172
173 if (timeout == 0) {
174 dev_err(dev, "failed to reset channel %d\n", ch);
175 return -EIO;
176 }
177
81eb024c 178 /* set 36-bit addressing, done writeback enable and done IRQ enable */
ad42d5fc 179 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, TALITOS_CCCR_LO_EAE |
81eb024c 180 TALITOS_CCCR_LO_CDWE | TALITOS_CCCR_LO_CDIE);
37b5e889
LC
181 /* enable chaining descriptors */
182 if (is_sec1)
183 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
184 TALITOS_CCCR_LO_NE);
9c4a7965 185
fe5720e2
KP
186 /* and ICCR writeback, if available */
187 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
ad42d5fc 188 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
fe5720e2
KP
189 TALITOS_CCCR_LO_IWSE);
190
9c4a7965
KP
191 return 0;
192}
193
194static int reset_device(struct device *dev)
195{
196 struct talitos_private *priv = dev_get_drvdata(dev);
197 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987
LC
198 bool is_sec1 = has_ftr_sec1(priv);
199 u32 mcr = is_sec1 ? TALITOS1_MCR_SWR : TALITOS2_MCR_SWR;
9c4a7965 200
c3e337f8 201 setbits32(priv->reg + TALITOS_MCR, mcr);
9c4a7965 202
dd3c0987 203 while ((in_be32(priv->reg + TALITOS_MCR) & mcr)
9c4a7965
KP
204 && --timeout)
205 cpu_relax();
206
2cdba3cf 207 if (priv->irq[1]) {
c3e337f8
KP
208 mcr = TALITOS_MCR_RCA1 | TALITOS_MCR_RCA3;
209 setbits32(priv->reg + TALITOS_MCR, mcr);
210 }
211
9c4a7965
KP
212 if (timeout == 0) {
213 dev_err(dev, "failed to reset device\n");
214 return -EIO;
215 }
216
217 return 0;
218}
219
220/*
221 * Reset and initialize the device
222 */
223static int init_device(struct device *dev)
224{
225 struct talitos_private *priv = dev_get_drvdata(dev);
226 int ch, err;
dd3c0987 227 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965
KP
228
229 /*
230 * Master reset
231 * errata documentation: warning: certain SEC interrupts
232 * are not fully cleared by writing the MCR:SWR bit,
233 * set bit twice to completely reset
234 */
235 err = reset_device(dev);
236 if (err)
237 return err;
238
239 err = reset_device(dev);
240 if (err)
241 return err;
242
243 /* reset channels */
244 for (ch = 0; ch < priv->num_channels; ch++) {
245 err = reset_channel(dev, ch);
246 if (err)
247 return err;
248 }
249
250 /* enable channel done and error interrupts */
dd3c0987
LC
251 if (is_sec1) {
252 clrbits32(priv->reg + TALITOS_IMR, TALITOS1_IMR_INIT);
253 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT);
254 /* disable parity error check in DEU (erroneous? test vect.) */
255 setbits32(priv->reg_deu + TALITOS_EUICR, TALITOS1_DEUICR_KPE);
256 } else {
257 setbits32(priv->reg + TALITOS_IMR, TALITOS2_IMR_INIT);
258 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT);
259 }
9c4a7965 260
fe5720e2
KP
261 /* disable integrity check error interrupts (use writeback instead) */
262 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
5fa7fa14 263 setbits32(priv->reg_mdeu + TALITOS_EUICR_LO,
fe5720e2
KP
264 TALITOS_MDEUICR_LO_ICE);
265
9c4a7965
KP
266 return 0;
267}
268
269/**
270 * talitos_submit - submits a descriptor to the device for processing
271 * @dev: the SEC device to be used
5228f0f7 272 * @ch: the SEC device channel to be used
9c4a7965
KP
273 * @desc: the descriptor to be processed by the device
274 * @callback: whom to call when processing is complete
275 * @context: a handle for use by caller (optional)
276 *
277 * desc must contain valid dma-mapped (bus physical) address pointers.
278 * callback must check err and feedback in descriptor header
279 * for device processing status.
280 */
865d5061
HG
281int talitos_submit(struct device *dev, int ch, struct talitos_desc *desc,
282 void (*callback)(struct device *dev,
283 struct talitos_desc *desc,
284 void *context, int error),
285 void *context)
9c4a7965
KP
286{
287 struct talitos_private *priv = dev_get_drvdata(dev);
288 struct talitos_request *request;
5228f0f7 289 unsigned long flags;
9c4a7965 290 int head;
7d607c6a 291 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 292
4b992628 293 spin_lock_irqsave(&priv->chan[ch].head_lock, flags);
9c4a7965 294
4b992628 295 if (!atomic_inc_not_zero(&priv->chan[ch].submit_count)) {
ec6644d6 296 /* h/w fifo is full */
4b992628 297 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
9c4a7965
KP
298 return -EAGAIN;
299 }
300
4b992628
KP
301 head = priv->chan[ch].head;
302 request = &priv->chan[ch].fifo[head];
ec6644d6 303
9c4a7965 304 /* map descriptor and save caller data */
7d607c6a
LC
305 if (is_sec1) {
306 desc->hdr1 = desc->hdr;
7d607c6a
LC
307 request->dma_desc = dma_map_single(dev, &desc->hdr1,
308 TALITOS_DESC_SIZE,
309 DMA_BIDIRECTIONAL);
310 } else {
311 request->dma_desc = dma_map_single(dev, desc,
312 TALITOS_DESC_SIZE,
313 DMA_BIDIRECTIONAL);
314 }
9c4a7965
KP
315 request->callback = callback;
316 request->context = context;
317
318 /* increment fifo head */
4b992628 319 priv->chan[ch].head = (priv->chan[ch].head + 1) & (priv->fifo_len - 1);
9c4a7965
KP
320
321 smp_wmb();
322 request->desc = desc;
323
324 /* GO! */
325 wmb();
ad42d5fc
KP
326 out_be32(priv->chan[ch].reg + TALITOS_FF,
327 upper_32_bits(request->dma_desc));
328 out_be32(priv->chan[ch].reg + TALITOS_FF_LO,
a752447a 329 lower_32_bits(request->dma_desc));
9c4a7965 330
4b992628 331 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
9c4a7965
KP
332
333 return -EINPROGRESS;
334}
865d5061 335EXPORT_SYMBOL(talitos_submit);
9c4a7965
KP
336
337/*
338 * process what was done, notify callback of error if not
339 */
340static void flush_channel(struct device *dev, int ch, int error, int reset_ch)
341{
342 struct talitos_private *priv = dev_get_drvdata(dev);
343 struct talitos_request *request, saved_req;
344 unsigned long flags;
345 int tail, status;
7d607c6a 346 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 347
4b992628 348 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
9c4a7965 349
4b992628
KP
350 tail = priv->chan[ch].tail;
351 while (priv->chan[ch].fifo[tail].desc) {
7d607c6a
LC
352 __be32 hdr;
353
4b992628 354 request = &priv->chan[ch].fifo[tail];
9c4a7965
KP
355
356 /* descriptors with their done bits set don't get the error */
357 rmb();
37b5e889
LC
358 if (!is_sec1)
359 hdr = request->desc->hdr;
360 else if (request->desc->next_desc)
361 hdr = (request->desc + 1)->hdr1;
362 else
363 hdr = request->desc->hdr1;
7d607c6a
LC
364
365 if ((hdr & DESC_HDR_DONE) == DESC_HDR_DONE)
9c4a7965 366 status = 0;
ca38a814 367 else
9c4a7965
KP
368 if (!error)
369 break;
370 else
371 status = error;
372
373 dma_unmap_single(dev, request->dma_desc,
7d607c6a 374 TALITOS_DESC_SIZE,
e938e465 375 DMA_BIDIRECTIONAL);
9c4a7965
KP
376
377 /* copy entries so we can call callback outside lock */
378 saved_req.desc = request->desc;
379 saved_req.callback = request->callback;
380 saved_req.context = request->context;
381
382 /* release request entry in fifo */
383 smp_wmb();
384 request->desc = NULL;
385
386 /* increment fifo tail */
4b992628 387 priv->chan[ch].tail = (tail + 1) & (priv->fifo_len - 1);
9c4a7965 388
4b992628 389 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
ec6644d6 390
4b992628 391 atomic_dec(&priv->chan[ch].submit_count);
ec6644d6 392
9c4a7965
KP
393 saved_req.callback(dev, saved_req.desc, saved_req.context,
394 status);
395 /* channel may resume processing in single desc error case */
396 if (error && !reset_ch && status == error)
397 return;
4b992628
KP
398 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
399 tail = priv->chan[ch].tail;
9c4a7965
KP
400 }
401
4b992628 402 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
9c4a7965
KP
403}
404
405/*
406 * process completed requests for channels that have done status
407 */
dd3c0987
LC
408#define DEF_TALITOS1_DONE(name, ch_done_mask) \
409static void talitos1_done_##name(unsigned long data) \
410{ \
411 struct device *dev = (struct device *)data; \
412 struct talitos_private *priv = dev_get_drvdata(dev); \
413 unsigned long flags; \
414 \
415 if (ch_done_mask & 0x10000000) \
416 flush_channel(dev, 0, 0, 0); \
dd3c0987
LC
417 if (ch_done_mask & 0x40000000) \
418 flush_channel(dev, 1, 0, 0); \
419 if (ch_done_mask & 0x00010000) \
420 flush_channel(dev, 2, 0, 0); \
421 if (ch_done_mask & 0x00040000) \
422 flush_channel(dev, 3, 0, 0); \
423 \
dd3c0987
LC
424 /* At this point, all completed channels have been processed */ \
425 /* Unmask done interrupts for channels completed later on. */ \
426 spin_lock_irqsave(&priv->reg_lock, flags); \
427 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
428 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT); \
429 spin_unlock_irqrestore(&priv->reg_lock, flags); \
430}
431
432DEF_TALITOS1_DONE(4ch, TALITOS1_ISR_4CHDONE)
9c02e285 433DEF_TALITOS1_DONE(ch0, TALITOS1_ISR_CH_0_DONE)
dd3c0987
LC
434
435#define DEF_TALITOS2_DONE(name, ch_done_mask) \
436static void talitos2_done_##name(unsigned long data) \
c3e337f8
KP
437{ \
438 struct device *dev = (struct device *)data; \
439 struct talitos_private *priv = dev_get_drvdata(dev); \
511d63cb 440 unsigned long flags; \
c3e337f8
KP
441 \
442 if (ch_done_mask & 1) \
443 flush_channel(dev, 0, 0, 0); \
c3e337f8
KP
444 if (ch_done_mask & (1 << 2)) \
445 flush_channel(dev, 1, 0, 0); \
446 if (ch_done_mask & (1 << 4)) \
447 flush_channel(dev, 2, 0, 0); \
448 if (ch_done_mask & (1 << 6)) \
449 flush_channel(dev, 3, 0, 0); \
450 \
c3e337f8
KP
451 /* At this point, all completed channels have been processed */ \
452 /* Unmask done interrupts for channels completed later on. */ \
511d63cb 453 spin_lock_irqsave(&priv->reg_lock, flags); \
c3e337f8 454 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
dd3c0987 455 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT); \
511d63cb 456 spin_unlock_irqrestore(&priv->reg_lock, flags); \
9c4a7965 457}
dd3c0987
LC
458
459DEF_TALITOS2_DONE(4ch, TALITOS2_ISR_4CHDONE)
9c02e285 460DEF_TALITOS2_DONE(ch0, TALITOS2_ISR_CH_0_DONE)
dd3c0987
LC
461DEF_TALITOS2_DONE(ch0_2, TALITOS2_ISR_CH_0_2_DONE)
462DEF_TALITOS2_DONE(ch1_3, TALITOS2_ISR_CH_1_3_DONE)
9c4a7965
KP
463
464/*
465 * locate current (offending) descriptor
466 */
3e721aeb 467static u32 current_desc_hdr(struct device *dev, int ch)
9c4a7965
KP
468{
469 struct talitos_private *priv = dev_get_drvdata(dev);
b62ffd8c 470 int tail, iter;
9c4a7965
KP
471 dma_addr_t cur_desc;
472
b62ffd8c
HG
473 cur_desc = ((u64)in_be32(priv->chan[ch].reg + TALITOS_CDPR)) << 32;
474 cur_desc |= in_be32(priv->chan[ch].reg + TALITOS_CDPR_LO);
9c4a7965 475
b62ffd8c
HG
476 if (!cur_desc) {
477 dev_err(dev, "CDPR is NULL, giving up search for offending descriptor\n");
478 return 0;
479 }
480
481 tail = priv->chan[ch].tail;
482
483 iter = tail;
37b5e889
LC
484 while (priv->chan[ch].fifo[iter].dma_desc != cur_desc &&
485 priv->chan[ch].fifo[iter].desc->next_desc != cur_desc) {
b62ffd8c
HG
486 iter = (iter + 1) & (priv->fifo_len - 1);
487 if (iter == tail) {
9c4a7965 488 dev_err(dev, "couldn't locate current descriptor\n");
3e721aeb 489 return 0;
9c4a7965
KP
490 }
491 }
492
37b5e889
LC
493 if (priv->chan[ch].fifo[iter].desc->next_desc == cur_desc)
494 return (priv->chan[ch].fifo[iter].desc + 1)->hdr;
495
b62ffd8c 496 return priv->chan[ch].fifo[iter].desc->hdr;
9c4a7965
KP
497}
498
499/*
500 * user diagnostics; report root cause of error based on execution unit status
501 */
3e721aeb 502static void report_eu_error(struct device *dev, int ch, u32 desc_hdr)
9c4a7965
KP
503{
504 struct talitos_private *priv = dev_get_drvdata(dev);
505 int i;
506
3e721aeb 507 if (!desc_hdr)
ad42d5fc 508 desc_hdr = in_be32(priv->chan[ch].reg + TALITOS_DESCBUF);
3e721aeb
KP
509
510 switch (desc_hdr & DESC_HDR_SEL0_MASK) {
9c4a7965
KP
511 case DESC_HDR_SEL0_AFEU:
512 dev_err(dev, "AFEUISR 0x%08x_%08x\n",
5fa7fa14
LC
513 in_be32(priv->reg_afeu + TALITOS_EUISR),
514 in_be32(priv->reg_afeu + TALITOS_EUISR_LO));
9c4a7965
KP
515 break;
516 case DESC_HDR_SEL0_DEU:
517 dev_err(dev, "DEUISR 0x%08x_%08x\n",
5fa7fa14
LC
518 in_be32(priv->reg_deu + TALITOS_EUISR),
519 in_be32(priv->reg_deu + TALITOS_EUISR_LO));
9c4a7965
KP
520 break;
521 case DESC_HDR_SEL0_MDEUA:
522 case DESC_HDR_SEL0_MDEUB:
523 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
5fa7fa14
LC
524 in_be32(priv->reg_mdeu + TALITOS_EUISR),
525 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
9c4a7965
KP
526 break;
527 case DESC_HDR_SEL0_RNG:
528 dev_err(dev, "RNGUISR 0x%08x_%08x\n",
5fa7fa14
LC
529 in_be32(priv->reg_rngu + TALITOS_ISR),
530 in_be32(priv->reg_rngu + TALITOS_ISR_LO));
9c4a7965
KP
531 break;
532 case DESC_HDR_SEL0_PKEU:
533 dev_err(dev, "PKEUISR 0x%08x_%08x\n",
5fa7fa14
LC
534 in_be32(priv->reg_pkeu + TALITOS_EUISR),
535 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
9c4a7965
KP
536 break;
537 case DESC_HDR_SEL0_AESU:
538 dev_err(dev, "AESUISR 0x%08x_%08x\n",
5fa7fa14
LC
539 in_be32(priv->reg_aesu + TALITOS_EUISR),
540 in_be32(priv->reg_aesu + TALITOS_EUISR_LO));
9c4a7965
KP
541 break;
542 case DESC_HDR_SEL0_CRCU:
543 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
5fa7fa14
LC
544 in_be32(priv->reg_crcu + TALITOS_EUISR),
545 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
9c4a7965
KP
546 break;
547 case DESC_HDR_SEL0_KEU:
548 dev_err(dev, "KEUISR 0x%08x_%08x\n",
5fa7fa14
LC
549 in_be32(priv->reg_pkeu + TALITOS_EUISR),
550 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
9c4a7965
KP
551 break;
552 }
553
3e721aeb 554 switch (desc_hdr & DESC_HDR_SEL1_MASK) {
9c4a7965
KP
555 case DESC_HDR_SEL1_MDEUA:
556 case DESC_HDR_SEL1_MDEUB:
557 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
5fa7fa14
LC
558 in_be32(priv->reg_mdeu + TALITOS_EUISR),
559 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
9c4a7965
KP
560 break;
561 case DESC_HDR_SEL1_CRCU:
562 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
5fa7fa14
LC
563 in_be32(priv->reg_crcu + TALITOS_EUISR),
564 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
9c4a7965
KP
565 break;
566 }
567
568 for (i = 0; i < 8; i++)
569 dev_err(dev, "DESCBUF 0x%08x_%08x\n",
ad42d5fc
KP
570 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF + 8*i),
571 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF_LO + 8*i));
9c4a7965
KP
572}
573
574/*
575 * recover from error interrupts
576 */
5e718a09 577static void talitos_error(struct device *dev, u32 isr, u32 isr_lo)
9c4a7965 578{
9c4a7965
KP
579 struct talitos_private *priv = dev_get_drvdata(dev);
580 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987 581 int ch, error, reset_dev = 0;
42e8b0d7 582 u32 v_lo;
dd3c0987
LC
583 bool is_sec1 = has_ftr_sec1(priv);
584 int reset_ch = is_sec1 ? 1 : 0; /* only SEC2 supports continuation */
9c4a7965
KP
585
586 for (ch = 0; ch < priv->num_channels; ch++) {
587 /* skip channels without errors */
dd3c0987
LC
588 if (is_sec1) {
589 /* bits 29, 31, 17, 19 */
590 if (!(isr & (1 << (29 + (ch & 1) * 2 - (ch & 2) * 6))))
591 continue;
592 } else {
593 if (!(isr & (1 << (ch * 2 + 1))))
594 continue;
595 }
9c4a7965
KP
596
597 error = -EINVAL;
598
ad42d5fc 599 v_lo = in_be32(priv->chan[ch].reg + TALITOS_CCPSR_LO);
9c4a7965
KP
600
601 if (v_lo & TALITOS_CCPSR_LO_DOF) {
602 dev_err(dev, "double fetch fifo overflow error\n");
603 error = -EAGAIN;
604 reset_ch = 1;
605 }
606 if (v_lo & TALITOS_CCPSR_LO_SOF) {
607 /* h/w dropped descriptor */
608 dev_err(dev, "single fetch fifo overflow error\n");
609 error = -EAGAIN;
610 }
611 if (v_lo & TALITOS_CCPSR_LO_MDTE)
612 dev_err(dev, "master data transfer error\n");
613 if (v_lo & TALITOS_CCPSR_LO_SGDLZ)
4d9b3a5b 614 dev_err(dev, is_sec1 ? "pointer not complete error\n"
dd3c0987 615 : "s/g data length zero error\n");
9c4a7965 616 if (v_lo & TALITOS_CCPSR_LO_FPZ)
dd3c0987
LC
617 dev_err(dev, is_sec1 ? "parity error\n"
618 : "fetch pointer zero error\n");
9c4a7965
KP
619 if (v_lo & TALITOS_CCPSR_LO_IDH)
620 dev_err(dev, "illegal descriptor header error\n");
621 if (v_lo & TALITOS_CCPSR_LO_IEU)
dd3c0987
LC
622 dev_err(dev, is_sec1 ? "static assignment error\n"
623 : "invalid exec unit error\n");
9c4a7965 624 if (v_lo & TALITOS_CCPSR_LO_EU)
3e721aeb 625 report_eu_error(dev, ch, current_desc_hdr(dev, ch));
dd3c0987
LC
626 if (!is_sec1) {
627 if (v_lo & TALITOS_CCPSR_LO_GB)
628 dev_err(dev, "gather boundary error\n");
629 if (v_lo & TALITOS_CCPSR_LO_GRL)
630 dev_err(dev, "gather return/length error\n");
631 if (v_lo & TALITOS_CCPSR_LO_SB)
632 dev_err(dev, "scatter boundary error\n");
633 if (v_lo & TALITOS_CCPSR_LO_SRL)
634 dev_err(dev, "scatter return/length error\n");
635 }
9c4a7965
KP
636
637 flush_channel(dev, ch, error, reset_ch);
638
639 if (reset_ch) {
640 reset_channel(dev, ch);
641 } else {
ad42d5fc 642 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
dd3c0987 643 TALITOS2_CCCR_CONT);
ad42d5fc
KP
644 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, 0);
645 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
dd3c0987 646 TALITOS2_CCCR_CONT) && --timeout)
9c4a7965
KP
647 cpu_relax();
648 if (timeout == 0) {
649 dev_err(dev, "failed to restart channel %d\n",
650 ch);
651 reset_dev = 1;
652 }
653 }
654 }
dd3c0987
LC
655 if (reset_dev || (is_sec1 && isr & ~TALITOS1_ISR_4CHERR) ||
656 (!is_sec1 && isr & ~TALITOS2_ISR_4CHERR) || isr_lo) {
657 if (is_sec1 && (isr_lo & TALITOS1_ISR_TEA_ERR))
658 dev_err(dev, "TEA error: ISR 0x%08x_%08x\n",
659 isr, isr_lo);
660 else
661 dev_err(dev, "done overflow, internal time out, or "
662 "rngu error: ISR 0x%08x_%08x\n", isr, isr_lo);
9c4a7965
KP
663
664 /* purge request queues */
665 for (ch = 0; ch < priv->num_channels; ch++)
666 flush_channel(dev, ch, -EIO, 1);
667
668 /* reset and reinitialize the device */
669 init_device(dev);
670 }
671}
672
dd3c0987
LC
673#define DEF_TALITOS1_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
674static irqreturn_t talitos1_interrupt_##name(int irq, void *data) \
675{ \
676 struct device *dev = data; \
677 struct talitos_private *priv = dev_get_drvdata(dev); \
678 u32 isr, isr_lo; \
679 unsigned long flags; \
680 \
681 spin_lock_irqsave(&priv->reg_lock, flags); \
682 isr = in_be32(priv->reg + TALITOS_ISR); \
683 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
684 /* Acknowledge interrupt */ \
685 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
686 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
687 \
688 if (unlikely(isr & ch_err_mask || isr_lo & TALITOS1_IMR_LO_INIT)) { \
689 spin_unlock_irqrestore(&priv->reg_lock, flags); \
690 talitos_error(dev, isr & ch_err_mask, isr_lo); \
691 } \
692 else { \
693 if (likely(isr & ch_done_mask)) { \
694 /* mask further done interrupts. */ \
695 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
696 /* done_task will unmask done interrupts at exit */ \
697 tasklet_schedule(&priv->done_task[tlet]); \
698 } \
699 spin_unlock_irqrestore(&priv->reg_lock, flags); \
700 } \
701 \
702 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
703 IRQ_NONE; \
704}
705
706DEF_TALITOS1_INTERRUPT(4ch, TALITOS1_ISR_4CHDONE, TALITOS1_ISR_4CHERR, 0)
707
708#define DEF_TALITOS2_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
709static irqreturn_t talitos2_interrupt_##name(int irq, void *data) \
c3e337f8
KP
710{ \
711 struct device *dev = data; \
712 struct talitos_private *priv = dev_get_drvdata(dev); \
713 u32 isr, isr_lo; \
511d63cb 714 unsigned long flags; \
c3e337f8 715 \
511d63cb 716 spin_lock_irqsave(&priv->reg_lock, flags); \
c3e337f8
KP
717 isr = in_be32(priv->reg + TALITOS_ISR); \
718 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
719 /* Acknowledge interrupt */ \
720 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
721 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
722 \
511d63cb
HG
723 if (unlikely(isr & ch_err_mask || isr_lo)) { \
724 spin_unlock_irqrestore(&priv->reg_lock, flags); \
725 talitos_error(dev, isr & ch_err_mask, isr_lo); \
726 } \
727 else { \
c3e337f8
KP
728 if (likely(isr & ch_done_mask)) { \
729 /* mask further done interrupts. */ \
730 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
731 /* done_task will unmask done interrupts at exit */ \
732 tasklet_schedule(&priv->done_task[tlet]); \
733 } \
511d63cb
HG
734 spin_unlock_irqrestore(&priv->reg_lock, flags); \
735 } \
c3e337f8
KP
736 \
737 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
738 IRQ_NONE; \
9c4a7965 739}
dd3c0987
LC
740
741DEF_TALITOS2_INTERRUPT(4ch, TALITOS2_ISR_4CHDONE, TALITOS2_ISR_4CHERR, 0)
742DEF_TALITOS2_INTERRUPT(ch0_2, TALITOS2_ISR_CH_0_2_DONE, TALITOS2_ISR_CH_0_2_ERR,
743 0)
744DEF_TALITOS2_INTERRUPT(ch1_3, TALITOS2_ISR_CH_1_3_DONE, TALITOS2_ISR_CH_1_3_ERR,
745 1)
9c4a7965
KP
746
747/*
748 * hwrng
749 */
750static int talitos_rng_data_present(struct hwrng *rng, int wait)
751{
752 struct device *dev = (struct device *)rng->priv;
753 struct talitos_private *priv = dev_get_drvdata(dev);
754 u32 ofl;
755 int i;
756
757 for (i = 0; i < 20; i++) {
5fa7fa14 758 ofl = in_be32(priv->reg_rngu + TALITOS_EUSR_LO) &
9c4a7965
KP
759 TALITOS_RNGUSR_LO_OFL;
760 if (ofl || !wait)
761 break;
762 udelay(10);
763 }
764
765 return !!ofl;
766}
767
768static int talitos_rng_data_read(struct hwrng *rng, u32 *data)
769{
770 struct device *dev = (struct device *)rng->priv;
771 struct talitos_private *priv = dev_get_drvdata(dev);
772
773 /* rng fifo requires 64-bit accesses */
5fa7fa14
LC
774 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO);
775 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO_LO);
9c4a7965
KP
776
777 return sizeof(u32);
778}
779
780static int talitos_rng_init(struct hwrng *rng)
781{
782 struct device *dev = (struct device *)rng->priv;
783 struct talitos_private *priv = dev_get_drvdata(dev);
784 unsigned int timeout = TALITOS_TIMEOUT;
785
5fa7fa14
LC
786 setbits32(priv->reg_rngu + TALITOS_EURCR_LO, TALITOS_RNGURCR_LO_SR);
787 while (!(in_be32(priv->reg_rngu + TALITOS_EUSR_LO)
788 & TALITOS_RNGUSR_LO_RD)
9c4a7965
KP
789 && --timeout)
790 cpu_relax();
791 if (timeout == 0) {
792 dev_err(dev, "failed to reset rng hw\n");
793 return -ENODEV;
794 }
795
796 /* start generating */
5fa7fa14 797 setbits32(priv->reg_rngu + TALITOS_EUDSR_LO, 0);
9c4a7965
KP
798
799 return 0;
800}
801
802static int talitos_register_rng(struct device *dev)
803{
804 struct talitos_private *priv = dev_get_drvdata(dev);
35a3bb3d 805 int err;
9c4a7965
KP
806
807 priv->rng.name = dev_driver_string(dev),
808 priv->rng.init = talitos_rng_init,
809 priv->rng.data_present = talitos_rng_data_present,
810 priv->rng.data_read = talitos_rng_data_read,
811 priv->rng.priv = (unsigned long)dev;
812
35a3bb3d
AS
813 err = hwrng_register(&priv->rng);
814 if (!err)
815 priv->rng_registered = true;
816
817 return err;
9c4a7965
KP
818}
819
820static void talitos_unregister_rng(struct device *dev)
821{
822 struct talitos_private *priv = dev_get_drvdata(dev);
823
35a3bb3d
AS
824 if (!priv->rng_registered)
825 return;
826
9c4a7965 827 hwrng_unregister(&priv->rng);
35a3bb3d 828 priv->rng_registered = false;
9c4a7965
KP
829}
830
831/*
832 * crypto alg
833 */
834#define TALITOS_CRA_PRIORITY 3000
7405c8d7
LC
835/*
836 * Defines a priority for doing AEAD with descriptors type
837 * HMAC_SNOOP_NO_AFEA (HSNA) instead of type IPSEC_ESP
838 */
839#define TALITOS_CRA_PRIORITY_AEAD_HSNA (TALITOS_CRA_PRIORITY - 1)
b8fbdc2b 840#ifdef CONFIG_CRYPTO_DEV_TALITOS_SEC2
03d2c511 841#define TALITOS_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + SHA512_BLOCK_SIZE)
b8fbdc2b
CL
842#else
843#define TALITOS_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + SHA256_BLOCK_SIZE)
844#endif
3952f17e 845#define TALITOS_MAX_IV_LENGTH 16 /* max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */
70bcaca7 846
9c4a7965
KP
847struct talitos_ctx {
848 struct device *dev;
5228f0f7 849 int ch;
9c4a7965
KP
850 __be32 desc_hdr_template;
851 u8 key[TALITOS_MAX_KEY_SIZE];
70bcaca7 852 u8 iv[TALITOS_MAX_IV_LENGTH];
2e13ce08 853 dma_addr_t dma_key;
9c4a7965
KP
854 unsigned int keylen;
855 unsigned int enckeylen;
856 unsigned int authkeylen;
9c4a7965
KP
857};
858
497f2e6b
LN
859#define HASH_MAX_BLOCK_SIZE SHA512_BLOCK_SIZE
860#define TALITOS_MDEU_MAX_CONTEXT_SIZE TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512
861
862struct talitos_ahash_req_ctx {
60f208d7 863 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
497f2e6b 864 unsigned int hw_context_size;
3c0dd190
LC
865 u8 buf[2][HASH_MAX_BLOCK_SIZE];
866 int buf_idx;
60f208d7 867 unsigned int swinit;
497f2e6b
LN
868 unsigned int first;
869 unsigned int last;
870 unsigned int to_hash_later;
42e8b0d7 871 unsigned int nbuf;
497f2e6b
LN
872 struct scatterlist bufsl[2];
873 struct scatterlist *psrc;
874};
875
3639ca84
HG
876struct talitos_export_state {
877 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
878 u8 buf[HASH_MAX_BLOCK_SIZE];
879 unsigned int swinit;
880 unsigned int first;
881 unsigned int last;
882 unsigned int to_hash_later;
883 unsigned int nbuf;
884};
885
56af8cd4
LN
886static int aead_setkey(struct crypto_aead *authenc,
887 const u8 *key, unsigned int keylen)
9c4a7965
KP
888{
889 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
2e13ce08 890 struct device *dev = ctx->dev;
c306a98d 891 struct crypto_authenc_keys keys;
9c4a7965 892
c306a98d 893 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
9c4a7965
KP
894 goto badkey;
895
c306a98d 896 if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
9c4a7965
KP
897 goto badkey;
898
2e13ce08
LC
899 if (ctx->keylen)
900 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
901
c306a98d
MK
902 memcpy(ctx->key, keys.authkey, keys.authkeylen);
903 memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
9c4a7965 904
c306a98d
MK
905 ctx->keylen = keys.authkeylen + keys.enckeylen;
906 ctx->enckeylen = keys.enckeylen;
907 ctx->authkeylen = keys.authkeylen;
2e13ce08
LC
908 ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
909 DMA_TO_DEVICE);
9c4a7965 910
8f0691fc 911 memzero_explicit(&keys, sizeof(keys));
9c4a7965
KP
912 return 0;
913
914badkey:
915 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
8f0691fc 916 memzero_explicit(&keys, sizeof(keys));
9c4a7965
KP
917 return -EINVAL;
918}
919
ef7c5c85
HX
920static int aead_des3_setkey(struct crypto_aead *authenc,
921 const u8 *key, unsigned int keylen)
922{
923 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
924 struct device *dev = ctx->dev;
925 struct crypto_authenc_keys keys;
926 u32 flags;
927 int err;
928
929 err = crypto_authenc_extractkeys(&keys, key, keylen);
930 if (unlikely(err))
931 goto badkey;
932
933 err = -EINVAL;
934 if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
935 goto badkey;
936
937 if (keys.enckeylen != DES3_EDE_KEY_SIZE)
938 goto badkey;
939
940 flags = crypto_aead_get_flags(authenc);
941 err = __des3_verify_key(&flags, keys.enckey);
942 if (unlikely(err)) {
943 crypto_aead_set_flags(authenc, flags);
944 goto out;
945 }
946
947 if (ctx->keylen)
948 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
949
950 memcpy(ctx->key, keys.authkey, keys.authkeylen);
951 memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
952
953 ctx->keylen = keys.authkeylen + keys.enckeylen;
954 ctx->enckeylen = keys.enckeylen;
955 ctx->authkeylen = keys.authkeylen;
956 ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
957 DMA_TO_DEVICE);
958
959out:
960 memzero_explicit(&keys, sizeof(keys));
961 return err;
962
963badkey:
964 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
965 goto out;
966}
967
9c4a7965 968/*
56af8cd4 969 * talitos_edesc - s/w-extended descriptor
9c4a7965
KP
970 * @src_nents: number of segments in input scatterlist
971 * @dst_nents: number of segments in output scatterlist
aeb4c132 972 * @icv_ool: whether ICV is out-of-line
79fd31d3 973 * @iv_dma: dma address of iv for checking continuity and link table
9c4a7965 974 * @dma_len: length of dma mapped link_tbl space
6f65f6ac 975 * @dma_link_tbl: bus physical address of link_tbl/buf
9c4a7965 976 * @desc: h/w descriptor
6f65f6ac
LC
977 * @link_tbl: input and output h/w link tables (if {src,dst}_nents > 1) (SEC2)
978 * @buf: input and output buffeur (if {src,dst}_nents > 1) (SEC1)
9c4a7965
KP
979 *
980 * if decrypting (with authcheck), or either one of src_nents or dst_nents
981 * is greater than 1, an integrity check value is concatenated to the end
982 * of link_tbl data
983 */
56af8cd4 984struct talitos_edesc {
9c4a7965
KP
985 int src_nents;
986 int dst_nents;
aeb4c132 987 bool icv_ool;
79fd31d3 988 dma_addr_t iv_dma;
9c4a7965
KP
989 int dma_len;
990 dma_addr_t dma_link_tbl;
991 struct talitos_desc desc;
6f65f6ac
LC
992 union {
993 struct talitos_ptr link_tbl[0];
994 u8 buf[0];
995 };
9c4a7965
KP
996};
997
4de9d0b5
LN
998static void talitos_sg_unmap(struct device *dev,
999 struct talitos_edesc *edesc,
1000 struct scatterlist *src,
6a1e8d14
LC
1001 struct scatterlist *dst,
1002 unsigned int len, unsigned int offset)
4de9d0b5 1003{
6a1e8d14
LC
1004 struct talitos_private *priv = dev_get_drvdata(dev);
1005 bool is_sec1 = has_ftr_sec1(priv);
4de9d0b5
LN
1006 unsigned int src_nents = edesc->src_nents ? : 1;
1007 unsigned int dst_nents = edesc->dst_nents ? : 1;
1008
6a1e8d14
LC
1009 if (is_sec1 && dst && dst_nents > 1) {
1010 dma_sync_single_for_device(dev, edesc->dma_link_tbl + offset,
1011 len, DMA_FROM_DEVICE);
1012 sg_pcopy_from_buffer(dst, dst_nents, edesc->buf + offset, len,
1013 offset);
1014 }
4de9d0b5 1015 if (src != dst) {
6a1e8d14
LC
1016 if (src_nents == 1 || !is_sec1)
1017 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
4de9d0b5 1018
6a1e8d14 1019 if (dst && (dst_nents == 1 || !is_sec1))
b8a011d4 1020 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
6a1e8d14 1021 } else if (src_nents == 1 || !is_sec1) {
b8a011d4 1022 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
246a87cd
LC
1023 }
1024}
1025
9c4a7965 1026static void ipsec_esp_unmap(struct device *dev,
56af8cd4 1027 struct talitos_edesc *edesc,
9c4a7965
KP
1028 struct aead_request *areq)
1029{
549bd8bc
LC
1030 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
1031 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
1032 unsigned int ivsize = crypto_aead_ivsize(aead);
9a655608
LC
1033 bool is_ipsec_esp = edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP;
1034 struct talitos_ptr *civ_ptr = &edesc->desc.ptr[is_ipsec_esp ? 2 : 3];
549bd8bc 1035
9a655608 1036 if (is_ipsec_esp)
549bd8bc
LC
1037 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[6],
1038 DMA_FROM_DEVICE);
9a655608 1039 unmap_single_talitos_ptr(dev, civ_ptr, DMA_TO_DEVICE);
9c4a7965 1040
6a1e8d14
LC
1041 talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->cryptlen,
1042 areq->assoclen);
9c4a7965
KP
1043
1044 if (edesc->dma_len)
1045 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1046 DMA_BIDIRECTIONAL);
549bd8bc 1047
9a655608 1048 if (!is_ipsec_esp) {
549bd8bc
LC
1049 unsigned int dst_nents = edesc->dst_nents ? : 1;
1050
1051 sg_pcopy_to_buffer(areq->dst, dst_nents, ctx->iv, ivsize,
1052 areq->assoclen + areq->cryptlen - ivsize);
1053 }
9c4a7965
KP
1054}
1055
1056/*
1057 * ipsec_esp descriptor callbacks
1058 */
1059static void ipsec_esp_encrypt_done(struct device *dev,
1060 struct talitos_desc *desc, void *context,
1061 int err)
1062{
549bd8bc
LC
1063 struct talitos_private *priv = dev_get_drvdata(dev);
1064 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 1065 struct aead_request *areq = context;
9c4a7965 1066 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
aeb4c132 1067 unsigned int authsize = crypto_aead_authsize(authenc);
2e13ce08 1068 unsigned int ivsize = crypto_aead_ivsize(authenc);
19bbbc63 1069 struct talitos_edesc *edesc;
9c4a7965
KP
1070 struct scatterlist *sg;
1071 void *icvdata;
1072
19bbbc63
KP
1073 edesc = container_of(desc, struct talitos_edesc, desc);
1074
9c4a7965
KP
1075 ipsec_esp_unmap(dev, edesc, areq);
1076
1077 /* copy the generated ICV to dst */
aeb4c132 1078 if (edesc->icv_ool) {
549bd8bc
LC
1079 if (is_sec1)
1080 icvdata = edesc->buf + areq->assoclen + areq->cryptlen;
1081 else
1082 icvdata = &edesc->link_tbl[edesc->src_nents +
1083 edesc->dst_nents + 2];
9c4a7965 1084 sg = sg_last(areq->dst, edesc->dst_nents);
aeb4c132
HX
1085 memcpy((char *)sg_virt(sg) + sg->length - authsize,
1086 icvdata, authsize);
9c4a7965
KP
1087 }
1088
2e13ce08
LC
1089 dma_unmap_single(dev, edesc->iv_dma, ivsize, DMA_TO_DEVICE);
1090
9c4a7965
KP
1091 kfree(edesc);
1092
1093 aead_request_complete(areq, err);
1094}
1095
fe5720e2 1096static void ipsec_esp_decrypt_swauth_done(struct device *dev,
e938e465
KP
1097 struct talitos_desc *desc,
1098 void *context, int err)
9c4a7965
KP
1099{
1100 struct aead_request *req = context;
9c4a7965 1101 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
aeb4c132 1102 unsigned int authsize = crypto_aead_authsize(authenc);
19bbbc63 1103 struct talitos_edesc *edesc;
9c4a7965 1104 struct scatterlist *sg;
aeb4c132 1105 char *oicv, *icv;
549bd8bc
LC
1106 struct talitos_private *priv = dev_get_drvdata(dev);
1107 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 1108
19bbbc63
KP
1109 edesc = container_of(desc, struct talitos_edesc, desc);
1110
9c4a7965
KP
1111 ipsec_esp_unmap(dev, edesc, req);
1112
1113 if (!err) {
1114 /* auth check */
9c4a7965 1115 sg = sg_last(req->dst, edesc->dst_nents ? : 1);
aeb4c132
HX
1116 icv = (char *)sg_virt(sg) + sg->length - authsize;
1117
1118 if (edesc->dma_len) {
549bd8bc
LC
1119 if (is_sec1)
1120 oicv = (char *)&edesc->dma_link_tbl +
1121 req->assoclen + req->cryptlen;
1122 else
1123 oicv = (char *)
1124 &edesc->link_tbl[edesc->src_nents +
aeb4c132
HX
1125 edesc->dst_nents + 2];
1126 if (edesc->icv_ool)
1127 icv = oicv + authsize;
1128 } else
1129 oicv = (char *)&edesc->link_tbl[0];
1130
79960943 1131 err = crypto_memneq(oicv, icv, authsize) ? -EBADMSG : 0;
9c4a7965
KP
1132 }
1133
1134 kfree(edesc);
1135
1136 aead_request_complete(req, err);
1137}
1138
fe5720e2 1139static void ipsec_esp_decrypt_hwauth_done(struct device *dev,
e938e465
KP
1140 struct talitos_desc *desc,
1141 void *context, int err)
fe5720e2
KP
1142{
1143 struct aead_request *req = context;
19bbbc63
KP
1144 struct talitos_edesc *edesc;
1145
1146 edesc = container_of(desc, struct talitos_edesc, desc);
fe5720e2
KP
1147
1148 ipsec_esp_unmap(dev, edesc, req);
1149
1150 /* check ICV auth status */
e938e465
KP
1151 if (!err && ((desc->hdr_lo & DESC_HDR_LO_ICCR1_MASK) !=
1152 DESC_HDR_LO_ICCR1_PASS))
1153 err = -EBADMSG;
fe5720e2
KP
1154
1155 kfree(edesc);
1156
1157 aead_request_complete(req, err);
1158}
1159
9c4a7965
KP
1160/*
1161 * convert scatterlist to SEC h/w link table format
1162 * stop at cryptlen bytes
1163 */
aeb4c132
HX
1164static int sg_to_link_tbl_offset(struct scatterlist *sg, int sg_count,
1165 unsigned int offset, int cryptlen,
1166 struct talitos_ptr *link_tbl_ptr)
9c4a7965 1167{
70bcaca7 1168 int n_sg = sg_count;
aeb4c132 1169 int count = 0;
70bcaca7 1170
aeb4c132
HX
1171 while (cryptlen && sg && n_sg--) {
1172 unsigned int len = sg_dma_len(sg);
9c4a7965 1173
aeb4c132
HX
1174 if (offset >= len) {
1175 offset -= len;
1176 goto next;
1177 }
1178
1179 len -= offset;
1180
1181 if (len > cryptlen)
1182 len = cryptlen;
1183
1184 to_talitos_ptr(link_tbl_ptr + count,
da9de146 1185 sg_dma_address(sg) + offset, len, 0);
b096b544 1186 to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
aeb4c132
HX
1187 count++;
1188 cryptlen -= len;
1189 offset = 0;
1190
1191next:
1192 sg = sg_next(sg);
70bcaca7 1193 }
9c4a7965
KP
1194
1195 /* tag end of link table */
aeb4c132 1196 if (count > 0)
b096b544
LC
1197 to_talitos_ptr_ext_set(link_tbl_ptr + count - 1,
1198 DESC_PTR_LNKTBL_RETURN, 0);
70bcaca7 1199
aeb4c132
HX
1200 return count;
1201}
1202
2b122730
LC
1203static int talitos_sg_map_ext(struct device *dev, struct scatterlist *src,
1204 unsigned int len, struct talitos_edesc *edesc,
1205 struct talitos_ptr *ptr, int sg_count,
1206 unsigned int offset, int tbl_off, int elen)
246a87cd 1207{
246a87cd
LC
1208 struct talitos_private *priv = dev_get_drvdata(dev);
1209 bool is_sec1 = has_ftr_sec1(priv);
1210
87a81dce
LC
1211 if (!src) {
1212 to_talitos_ptr(ptr, 0, 0, is_sec1);
1213 return 1;
1214 }
2b122730 1215 to_talitos_ptr_ext_set(ptr, elen, is_sec1);
6a1e8d14 1216 if (sg_count == 1) {
da9de146 1217 to_talitos_ptr(ptr, sg_dma_address(src) + offset, len, is_sec1);
6a1e8d14 1218 return sg_count;
246a87cd 1219 }
246a87cd 1220 if (is_sec1) {
da9de146 1221 to_talitos_ptr(ptr, edesc->dma_link_tbl + offset, len, is_sec1);
6a1e8d14 1222 return sg_count;
246a87cd 1223 }
2b122730 1224 sg_count = sg_to_link_tbl_offset(src, sg_count, offset, len + elen,
6a1e8d14
LC
1225 &edesc->link_tbl[tbl_off]);
1226 if (sg_count == 1) {
1227 /* Only one segment now, so no link tbl needed*/
1228 copy_talitos_ptr(ptr, &edesc->link_tbl[tbl_off], is_sec1);
1229 return sg_count;
1230 }
1231 to_talitos_ptr(ptr, edesc->dma_link_tbl +
da9de146 1232 tbl_off * sizeof(struct talitos_ptr), len, is_sec1);
6a1e8d14
LC
1233 to_talitos_ptr_ext_or(ptr, DESC_PTR_LNKTBL_JUMP, is_sec1);
1234
1235 return sg_count;
246a87cd
LC
1236}
1237
2b122730
LC
1238static int talitos_sg_map(struct device *dev, struct scatterlist *src,
1239 unsigned int len, struct talitos_edesc *edesc,
1240 struct talitos_ptr *ptr, int sg_count,
1241 unsigned int offset, int tbl_off)
1242{
1243 return talitos_sg_map_ext(dev, src, len, edesc, ptr, sg_count, offset,
1244 tbl_off, 0);
1245}
1246
9c4a7965
KP
1247/*
1248 * fill in and submit ipsec_esp descriptor
1249 */
56af8cd4 1250static int ipsec_esp(struct talitos_edesc *edesc, struct aead_request *areq,
aeb4c132
HX
1251 void (*callback)(struct device *dev,
1252 struct talitos_desc *desc,
1253 void *context, int error))
9c4a7965
KP
1254{
1255 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
aeb4c132 1256 unsigned int authsize = crypto_aead_authsize(aead);
9c4a7965
KP
1257 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
1258 struct device *dev = ctx->dev;
1259 struct talitos_desc *desc = &edesc->desc;
1260 unsigned int cryptlen = areq->cryptlen;
e41256f1 1261 unsigned int ivsize = crypto_aead_ivsize(aead);
aeb4c132 1262 int tbl_off = 0;
fa86a267 1263 int sg_count, ret;
2b122730 1264 int elen = 0;
549bd8bc
LC
1265 bool sync_needed = false;
1266 struct talitos_private *priv = dev_get_drvdata(dev);
1267 bool is_sec1 = has_ftr_sec1(priv);
9a655608
LC
1268 bool is_ipsec_esp = desc->hdr & DESC_HDR_TYPE_IPSEC_ESP;
1269 struct talitos_ptr *civ_ptr = &desc->ptr[is_ipsec_esp ? 2 : 3];
1270 struct talitos_ptr *ckey_ptr = &desc->ptr[is_ipsec_esp ? 3 : 2];
9c4a7965
KP
1271
1272 /* hmac key */
2e13ce08 1273 to_talitos_ptr(&desc->ptr[0], ctx->dma_key, ctx->authkeylen, is_sec1);
79fd31d3 1274
549bd8bc
LC
1275 sg_count = edesc->src_nents ?: 1;
1276 if (is_sec1 && sg_count > 1)
1277 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1278 areq->assoclen + cryptlen);
1279 else
1280 sg_count = dma_map_sg(dev, areq->src, sg_count,
1281 (areq->src == areq->dst) ?
1282 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
79fd31d3 1283
549bd8bc
LC
1284 /* hmac data */
1285 ret = talitos_sg_map(dev, areq->src, areq->assoclen, edesc,
1286 &desc->ptr[1], sg_count, 0, tbl_off);
340ff60a 1287
549bd8bc 1288 if (ret > 1) {
340ff60a 1289 tbl_off += ret;
549bd8bc 1290 sync_needed = true;
79fd31d3
HG
1291 }
1292
9c4a7965 1293 /* cipher iv */
9a655608 1294 to_talitos_ptr(civ_ptr, edesc->iv_dma, ivsize, is_sec1);
9c4a7965
KP
1295
1296 /* cipher key */
2e13ce08
LC
1297 to_talitos_ptr(ckey_ptr, ctx->dma_key + ctx->authkeylen,
1298 ctx->enckeylen, is_sec1);
9c4a7965
KP
1299
1300 /*
1301 * cipher in
1302 * map and adjust cipher len to aead request cryptlen.
1303 * extent is bytes of HMAC postpended to ciphertext,
1304 * typically 12 for ipsec
1305 */
2b122730
LC
1306 if (is_ipsec_esp && (desc->hdr & DESC_HDR_MODE1_MDEU_CICV))
1307 elen = authsize;
9c4a7965 1308
2b122730
LC
1309 ret = talitos_sg_map_ext(dev, areq->src, cryptlen, edesc, &desc->ptr[4],
1310 sg_count, areq->assoclen, tbl_off, elen);
549bd8bc 1311
ec8c7d14
LC
1312 if (ret > 1) {
1313 tbl_off += ret;
549bd8bc
LC
1314 sync_needed = true;
1315 }
9c4a7965 1316
549bd8bc
LC
1317 /* cipher out */
1318 if (areq->src != areq->dst) {
1319 sg_count = edesc->dst_nents ? : 1;
1320 if (!is_sec1 || sg_count == 1)
1321 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1322 }
9c4a7965 1323
e04a61be
LC
1324 ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[5],
1325 sg_count, areq->assoclen, tbl_off);
aeb4c132 1326
9a655608 1327 if (is_ipsec_esp)
549bd8bc 1328 to_talitos_ptr_ext_or(&desc->ptr[5], authsize, is_sec1);
aeb4c132 1329
e04a61be
LC
1330 /* ICV data */
1331 if (ret > 1) {
1332 tbl_off += ret;
aeb4c132 1333 edesc->icv_ool = true;
549bd8bc
LC
1334 sync_needed = true;
1335
9a655608 1336 if (is_ipsec_esp) {
549bd8bc
LC
1337 struct talitos_ptr *tbl_ptr = &edesc->link_tbl[tbl_off];
1338 int offset = (edesc->src_nents + edesc->dst_nents + 2) *
1339 sizeof(struct talitos_ptr) + authsize;
1340
1341 /* Add an entry to the link table for ICV data */
e04a61be 1342 to_talitos_ptr_ext_set(tbl_ptr - 1, 0, is_sec1);
549bd8bc
LC
1343 to_talitos_ptr_ext_set(tbl_ptr, DESC_PTR_LNKTBL_RETURN,
1344 is_sec1);
549bd8bc
LC
1345
1346 /* icv data follows link tables */
1347 to_talitos_ptr(tbl_ptr, edesc->dma_link_tbl + offset,
da9de146 1348 authsize, is_sec1);
e04a61be
LC
1349 } else {
1350 dma_addr_t addr = edesc->dma_link_tbl;
1351
1352 if (is_sec1)
1353 addr += areq->assoclen + cryptlen;
1354 else
1355 addr += sizeof(struct talitos_ptr) * tbl_off;
1356
da9de146 1357 to_talitos_ptr(&desc->ptr[6], addr, authsize, is_sec1);
e04a61be 1358 }
9a655608 1359 } else if (!is_ipsec_esp) {
e04a61be
LC
1360 ret = talitos_sg_map(dev, areq->dst, authsize, edesc,
1361 &desc->ptr[6], sg_count, areq->assoclen +
1362 cryptlen,
1363 tbl_off);
1364 if (ret > 1) {
1365 tbl_off += ret;
1366 edesc->icv_ool = true;
1367 sync_needed = true;
1368 } else {
1369 edesc->icv_ool = false;
549bd8bc 1370 }
340ff60a 1371 } else {
549bd8bc
LC
1372 edesc->icv_ool = false;
1373 }
1374
9c4a7965 1375 /* iv out */
9a655608 1376 if (is_ipsec_esp)
549bd8bc
LC
1377 map_single_talitos_ptr(dev, &desc->ptr[6], ivsize, ctx->iv,
1378 DMA_FROM_DEVICE);
1379
1380 if (sync_needed)
1381 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1382 edesc->dma_len,
1383 DMA_BIDIRECTIONAL);
9c4a7965 1384
5228f0f7 1385 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
fa86a267
KP
1386 if (ret != -EINPROGRESS) {
1387 ipsec_esp_unmap(dev, edesc, areq);
1388 kfree(edesc);
1389 }
1390 return ret;
9c4a7965
KP
1391}
1392
9c4a7965 1393/*
56af8cd4 1394 * allocate and map the extended descriptor
9c4a7965 1395 */
4de9d0b5
LN
1396static struct talitos_edesc *talitos_edesc_alloc(struct device *dev,
1397 struct scatterlist *src,
1398 struct scatterlist *dst,
79fd31d3
HG
1399 u8 *iv,
1400 unsigned int assoclen,
4de9d0b5
LN
1401 unsigned int cryptlen,
1402 unsigned int authsize,
79fd31d3 1403 unsigned int ivsize,
4de9d0b5 1404 int icv_stashing,
62293a37
HG
1405 u32 cryptoflags,
1406 bool encrypt)
9c4a7965 1407{
56af8cd4 1408 struct talitos_edesc *edesc;
6a1e8d14 1409 int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len;
79fd31d3 1410 dma_addr_t iv_dma = 0;
4de9d0b5 1411 gfp_t flags = cryptoflags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
586725f8 1412 GFP_ATOMIC;
6f65f6ac
LC
1413 struct talitos_private *priv = dev_get_drvdata(dev);
1414 bool is_sec1 = has_ftr_sec1(priv);
1415 int max_len = is_sec1 ? TALITOS1_MAX_DATA_LEN : TALITOS2_MAX_DATA_LEN;
9c4a7965 1416
6f65f6ac 1417 if (cryptlen + authsize > max_len) {
4de9d0b5 1418 dev_err(dev, "length exceeds h/w max limit\n");
9c4a7965
KP
1419 return ERR_PTR(-EINVAL);
1420 }
1421
62293a37 1422 if (!dst || dst == src) {
6a1e8d14
LC
1423 src_len = assoclen + cryptlen + authsize;
1424 src_nents = sg_nents_for_len(src, src_len);
8e409fe1
LC
1425 if (src_nents < 0) {
1426 dev_err(dev, "Invalid number of src SG.\n");
c56c2e17 1427 return ERR_PTR(-EINVAL);
8e409fe1 1428 }
62293a37
HG
1429 src_nents = (src_nents == 1) ? 0 : src_nents;
1430 dst_nents = dst ? src_nents : 0;
6a1e8d14 1431 dst_len = 0;
62293a37 1432 } else { /* dst && dst != src*/
6a1e8d14
LC
1433 src_len = assoclen + cryptlen + (encrypt ? 0 : authsize);
1434 src_nents = sg_nents_for_len(src, src_len);
8e409fe1
LC
1435 if (src_nents < 0) {
1436 dev_err(dev, "Invalid number of src SG.\n");
c56c2e17 1437 return ERR_PTR(-EINVAL);
8e409fe1 1438 }
62293a37 1439 src_nents = (src_nents == 1) ? 0 : src_nents;
6a1e8d14
LC
1440 dst_len = assoclen + cryptlen + (encrypt ? authsize : 0);
1441 dst_nents = sg_nents_for_len(dst, dst_len);
8e409fe1
LC
1442 if (dst_nents < 0) {
1443 dev_err(dev, "Invalid number of dst SG.\n");
c56c2e17 1444 return ERR_PTR(-EINVAL);
8e409fe1 1445 }
62293a37 1446 dst_nents = (dst_nents == 1) ? 0 : dst_nents;
9c4a7965
KP
1447 }
1448
1449 /*
1450 * allocate space for base edesc plus the link tables,
aeb4c132
HX
1451 * allowing for two separate entries for AD and generated ICV (+ 2),
1452 * and space for two sets of ICVs (stashed and generated)
9c4a7965 1453 */
56af8cd4 1454 alloc_len = sizeof(struct talitos_edesc);
aeb4c132 1455 if (src_nents || dst_nents) {
6f65f6ac 1456 if (is_sec1)
6a1e8d14
LC
1457 dma_len = (src_nents ? src_len : 0) +
1458 (dst_nents ? dst_len : 0);
6f65f6ac 1459 else
aeb4c132
HX
1460 dma_len = (src_nents + dst_nents + 2) *
1461 sizeof(struct talitos_ptr) + authsize * 2;
9c4a7965
KP
1462 alloc_len += dma_len;
1463 } else {
1464 dma_len = 0;
4de9d0b5 1465 alloc_len += icv_stashing ? authsize : 0;
9c4a7965
KP
1466 }
1467
37b5e889
LC
1468 /* if its a ahash, add space for a second desc next to the first one */
1469 if (is_sec1 && !dst)
1470 alloc_len += sizeof(struct talitos_desc);
1bea445b 1471 alloc_len += ivsize;
37b5e889 1472
586725f8 1473 edesc = kmalloc(alloc_len, GFP_DMA | flags);
c56c2e17
CL
1474 if (!edesc)
1475 return ERR_PTR(-ENOMEM);
1bea445b
CL
1476 if (ivsize) {
1477 iv = memcpy(((u8 *)edesc) + alloc_len - ivsize, iv, ivsize);
c56c2e17 1478 iv_dma = dma_map_single(dev, iv, ivsize, DMA_TO_DEVICE);
1bea445b 1479 }
e4a647c4 1480 memset(&edesc->desc, 0, sizeof(edesc->desc));
9c4a7965
KP
1481
1482 edesc->src_nents = src_nents;
1483 edesc->dst_nents = dst_nents;
79fd31d3 1484 edesc->iv_dma = iv_dma;
9c4a7965 1485 edesc->dma_len = dma_len;
37b5e889
LC
1486 if (dma_len) {
1487 void *addr = &edesc->link_tbl[0];
1488
1489 if (is_sec1 && !dst)
1490 addr += sizeof(struct talitos_desc);
1491 edesc->dma_link_tbl = dma_map_single(dev, addr,
497f2e6b
LN
1492 edesc->dma_len,
1493 DMA_BIDIRECTIONAL);
37b5e889 1494 }
9c4a7965
KP
1495 return edesc;
1496}
1497
79fd31d3 1498static struct talitos_edesc *aead_edesc_alloc(struct aead_request *areq, u8 *iv,
62293a37 1499 int icv_stashing, bool encrypt)
4de9d0b5
LN
1500{
1501 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
aeb4c132 1502 unsigned int authsize = crypto_aead_authsize(authenc);
4de9d0b5 1503 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
79fd31d3 1504 unsigned int ivsize = crypto_aead_ivsize(authenc);
4de9d0b5 1505
aeb4c132 1506 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
79fd31d3 1507 iv, areq->assoclen, areq->cryptlen,
aeb4c132 1508 authsize, ivsize, icv_stashing,
62293a37 1509 areq->base.flags, encrypt);
4de9d0b5
LN
1510}
1511
56af8cd4 1512static int aead_encrypt(struct aead_request *req)
9c4a7965
KP
1513{
1514 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1515 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
56af8cd4 1516 struct talitos_edesc *edesc;
9c4a7965
KP
1517
1518 /* allocate extended descriptor */
62293a37 1519 edesc = aead_edesc_alloc(req, req->iv, 0, true);
9c4a7965
KP
1520 if (IS_ERR(edesc))
1521 return PTR_ERR(edesc);
1522
1523 /* set encrypt */
70bcaca7 1524 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
9c4a7965 1525
aeb4c132 1526 return ipsec_esp(edesc, req, ipsec_esp_encrypt_done);
9c4a7965
KP
1527}
1528
56af8cd4 1529static int aead_decrypt(struct aead_request *req)
9c4a7965
KP
1530{
1531 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
aeb4c132 1532 unsigned int authsize = crypto_aead_authsize(authenc);
9c4a7965 1533 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
fe5720e2 1534 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
56af8cd4 1535 struct talitos_edesc *edesc;
9c4a7965
KP
1536 struct scatterlist *sg;
1537 void *icvdata;
1538
1539 req->cryptlen -= authsize;
1540
1541 /* allocate extended descriptor */
62293a37 1542 edesc = aead_edesc_alloc(req, req->iv, 1, false);
9c4a7965
KP
1543 if (IS_ERR(edesc))
1544 return PTR_ERR(edesc);
1545
fe5720e2 1546 if ((priv->features & TALITOS_FTR_HW_AUTH_CHECK) &&
e938e465
KP
1547 ((!edesc->src_nents && !edesc->dst_nents) ||
1548 priv->features & TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT)) {
9c4a7965 1549
fe5720e2 1550 /* decrypt and check the ICV */
e938e465
KP
1551 edesc->desc.hdr = ctx->desc_hdr_template |
1552 DESC_HDR_DIR_INBOUND |
fe5720e2 1553 DESC_HDR_MODE1_MDEU_CICV;
9c4a7965 1554
fe5720e2 1555 /* reset integrity check result bits */
9c4a7965 1556
aeb4c132 1557 return ipsec_esp(edesc, req, ipsec_esp_decrypt_hwauth_done);
e938e465 1558 }
fe5720e2 1559
e938e465
KP
1560 /* Have to check the ICV with software */
1561 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
fe5720e2 1562
e938e465
KP
1563 /* stash incoming ICV for later cmp with ICV generated by the h/w */
1564 if (edesc->dma_len)
aeb4c132
HX
1565 icvdata = (char *)&edesc->link_tbl[edesc->src_nents +
1566 edesc->dst_nents + 2];
e938e465
KP
1567 else
1568 icvdata = &edesc->link_tbl[0];
fe5720e2 1569
e938e465 1570 sg = sg_last(req->src, edesc->src_nents ? : 1);
fe5720e2 1571
aeb4c132 1572 memcpy(icvdata, (char *)sg_virt(sg) + sg->length - authsize, authsize);
9c4a7965 1573
aeb4c132 1574 return ipsec_esp(edesc, req, ipsec_esp_decrypt_swauth_done);
9c4a7965
KP
1575}
1576
4de9d0b5
LN
1577static int ablkcipher_setkey(struct crypto_ablkcipher *cipher,
1578 const u8 *key, unsigned int keylen)
1579{
1580 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
2e13ce08 1581 struct device *dev = ctx->dev;
4de9d0b5 1582
ef7c5c85
HX
1583 if (ctx->keylen)
1584 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
1585
1586 memcpy(&ctx->key, key, keylen);
1587 ctx->keylen = keylen;
1588
1589 ctx->dma_key = dma_map_single(dev, ctx->key, keylen, DMA_TO_DEVICE);
1590
1591 return 0;
1592}
1593
1594static int ablkcipher_des_setkey(struct crypto_ablkcipher *cipher,
1595 const u8 *key, unsigned int keylen)
1596{
1597 u32 tmp[DES_EXPKEY_WORDS];
03d2c511 1598
f384cdc4 1599 if (unlikely(crypto_ablkcipher_get_flags(cipher) &
231baecd 1600 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS) &&
f384cdc4
LC
1601 !des_ekey(tmp, key)) {
1602 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_WEAK_KEY);
1603 return -EINVAL;
1604 }
1605
ef7c5c85
HX
1606 return ablkcipher_setkey(cipher, key, keylen);
1607}
2e13ce08 1608
ef7c5c85
HX
1609static int ablkcipher_des3_setkey(struct crypto_ablkcipher *cipher,
1610 const u8 *key, unsigned int keylen)
1611{
1612 u32 flags;
1613 int err;
4de9d0b5 1614
ef7c5c85
HX
1615 flags = crypto_ablkcipher_get_flags(cipher);
1616 err = __des3_verify_key(&flags, key);
1617 if (unlikely(err)) {
1618 crypto_ablkcipher_set_flags(cipher, flags);
1619 return err;
1620 }
2e13ce08 1621
ef7c5c85 1622 return ablkcipher_setkey(cipher, key, keylen);
4de9d0b5
LN
1623}
1624
1625static void common_nonsnoop_unmap(struct device *dev,
1626 struct talitos_edesc *edesc,
1627 struct ablkcipher_request *areq)
1628{
1629 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
032d197e 1630
6a1e8d14 1631 talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->nbytes, 0);
4de9d0b5
LN
1632 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1], DMA_TO_DEVICE);
1633
4de9d0b5
LN
1634 if (edesc->dma_len)
1635 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1636 DMA_BIDIRECTIONAL);
1637}
1638
1639static void ablkcipher_done(struct device *dev,
1640 struct talitos_desc *desc, void *context,
1641 int err)
1642{
1643 struct ablkcipher_request *areq = context;
3e03e792
CL
1644 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1645 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1646 unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
19bbbc63
KP
1647 struct talitos_edesc *edesc;
1648
1649 edesc = container_of(desc, struct talitos_edesc, desc);
4de9d0b5
LN
1650
1651 common_nonsnoop_unmap(dev, edesc, areq);
3e03e792 1652 memcpy(areq->info, ctx->iv, ivsize);
4de9d0b5
LN
1653
1654 kfree(edesc);
1655
1656 areq->base.complete(&areq->base, err);
1657}
1658
1659static int common_nonsnoop(struct talitos_edesc *edesc,
1660 struct ablkcipher_request *areq,
4de9d0b5
LN
1661 void (*callback) (struct device *dev,
1662 struct talitos_desc *desc,
1663 void *context, int error))
1664{
1665 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1666 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1667 struct device *dev = ctx->dev;
1668 struct talitos_desc *desc = &edesc->desc;
1669 unsigned int cryptlen = areq->nbytes;
79fd31d3 1670 unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
4de9d0b5 1671 int sg_count, ret;
6a1e8d14 1672 bool sync_needed = false;
922f9dc8
LC
1673 struct talitos_private *priv = dev_get_drvdata(dev);
1674 bool is_sec1 = has_ftr_sec1(priv);
4de9d0b5
LN
1675
1676 /* first DWORD empty */
4de9d0b5
LN
1677
1678 /* cipher iv */
da9de146 1679 to_talitos_ptr(&desc->ptr[1], edesc->iv_dma, ivsize, is_sec1);
4de9d0b5
LN
1680
1681 /* cipher key */
2e13ce08 1682 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen, is_sec1);
4de9d0b5 1683
6a1e8d14
LC
1684 sg_count = edesc->src_nents ?: 1;
1685 if (is_sec1 && sg_count > 1)
1686 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1687 cryptlen);
1688 else
1689 sg_count = dma_map_sg(dev, areq->src, sg_count,
1690 (areq->src == areq->dst) ?
1691 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
4de9d0b5
LN
1692 /*
1693 * cipher in
1694 */
6a1e8d14
LC
1695 sg_count = talitos_sg_map(dev, areq->src, cryptlen, edesc,
1696 &desc->ptr[3], sg_count, 0, 0);
1697 if (sg_count > 1)
1698 sync_needed = true;
4de9d0b5
LN
1699
1700 /* cipher out */
6a1e8d14
LC
1701 if (areq->src != areq->dst) {
1702 sg_count = edesc->dst_nents ? : 1;
1703 if (!is_sec1 || sg_count == 1)
1704 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1705 }
1706
1707 ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[4],
1708 sg_count, 0, (edesc->src_nents + 1));
1709 if (ret > 1)
1710 sync_needed = true;
4de9d0b5
LN
1711
1712 /* iv out */
a2b35aa8 1713 map_single_talitos_ptr(dev, &desc->ptr[5], ivsize, ctx->iv,
4de9d0b5
LN
1714 DMA_FROM_DEVICE);
1715
1716 /* last DWORD empty */
4de9d0b5 1717
6a1e8d14
LC
1718 if (sync_needed)
1719 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1720 edesc->dma_len, DMA_BIDIRECTIONAL);
1721
5228f0f7 1722 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
4de9d0b5
LN
1723 if (ret != -EINPROGRESS) {
1724 common_nonsnoop_unmap(dev, edesc, areq);
1725 kfree(edesc);
1726 }
1727 return ret;
1728}
1729
e938e465 1730static struct talitos_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request *
62293a37 1731 areq, bool encrypt)
4de9d0b5
LN
1732{
1733 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1734 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
79fd31d3 1735 unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
4de9d0b5 1736
aeb4c132 1737 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
79fd31d3 1738 areq->info, 0, areq->nbytes, 0, ivsize, 0,
62293a37 1739 areq->base.flags, encrypt);
4de9d0b5
LN
1740}
1741
1742static int ablkcipher_encrypt(struct ablkcipher_request *areq)
1743{
1744 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1745 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1746 struct talitos_edesc *edesc;
1747
1748 /* allocate extended descriptor */
62293a37 1749 edesc = ablkcipher_edesc_alloc(areq, true);
4de9d0b5
LN
1750 if (IS_ERR(edesc))
1751 return PTR_ERR(edesc);
1752
1753 /* set encrypt */
1754 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
1755
febec542 1756 return common_nonsnoop(edesc, areq, ablkcipher_done);
4de9d0b5
LN
1757}
1758
1759static int ablkcipher_decrypt(struct ablkcipher_request *areq)
1760{
1761 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1762 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1763 struct talitos_edesc *edesc;
1764
1765 /* allocate extended descriptor */
62293a37 1766 edesc = ablkcipher_edesc_alloc(areq, false);
4de9d0b5
LN
1767 if (IS_ERR(edesc))
1768 return PTR_ERR(edesc);
1769
1770 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1771
febec542 1772 return common_nonsnoop(edesc, areq, ablkcipher_done);
4de9d0b5
LN
1773}
1774
497f2e6b
LN
1775static void common_nonsnoop_hash_unmap(struct device *dev,
1776 struct talitos_edesc *edesc,
1777 struct ahash_request *areq)
1778{
1779 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
ad4cd51f
LC
1780 struct talitos_private *priv = dev_get_drvdata(dev);
1781 bool is_sec1 = has_ftr_sec1(priv);
1782 struct talitos_desc *desc = &edesc->desc;
1783 struct talitos_desc *desc2 = desc + 1;
1784
1785 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
1786 if (desc->next_desc &&
1787 desc->ptr[5].ptr != desc2->ptr[5].ptr)
1788 unmap_single_talitos_ptr(dev, &desc2->ptr[5], DMA_FROM_DEVICE);
497f2e6b 1789
6a1e8d14 1790 talitos_sg_unmap(dev, edesc, req_ctx->psrc, NULL, 0, 0);
032d197e 1791
ad4cd51f
LC
1792 /* When using hashctx-in, must unmap it. */
1793 if (from_talitos_ptr_len(&edesc->desc.ptr[1], is_sec1))
1794 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1],
1795 DMA_TO_DEVICE);
1796 else if (desc->next_desc)
1797 unmap_single_talitos_ptr(dev, &desc2->ptr[1],
1798 DMA_TO_DEVICE);
1799
1800 if (is_sec1 && req_ctx->nbuf)
1801 unmap_single_talitos_ptr(dev, &desc->ptr[3],
1802 DMA_TO_DEVICE);
1803
497f2e6b
LN
1804 if (edesc->dma_len)
1805 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1806 DMA_BIDIRECTIONAL);
1807
37b5e889
LC
1808 if (edesc->desc.next_desc)
1809 dma_unmap_single(dev, be32_to_cpu(edesc->desc.next_desc),
1810 TALITOS_DESC_SIZE, DMA_BIDIRECTIONAL);
497f2e6b
LN
1811}
1812
1813static void ahash_done(struct device *dev,
1814 struct talitos_desc *desc, void *context,
1815 int err)
1816{
1817 struct ahash_request *areq = context;
1818 struct talitos_edesc *edesc =
1819 container_of(desc, struct talitos_edesc, desc);
1820 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1821
1822 if (!req_ctx->last && req_ctx->to_hash_later) {
1823 /* Position any partial block for next update/final/finup */
3c0dd190 1824 req_ctx->buf_idx = (req_ctx->buf_idx + 1) & 1;
5e833bc4 1825 req_ctx->nbuf = req_ctx->to_hash_later;
497f2e6b
LN
1826 }
1827 common_nonsnoop_hash_unmap(dev, edesc, areq);
1828
1829 kfree(edesc);
1830
1831 areq->base.complete(&areq->base, err);
1832}
1833
2d02905e
LC
1834/*
1835 * SEC1 doesn't like hashing of 0 sized message, so we do the padding
1836 * ourself and submit a padded block
1837 */
5b2cf268 1838static void talitos_handle_buggy_hash(struct talitos_ctx *ctx,
2d02905e
LC
1839 struct talitos_edesc *edesc,
1840 struct talitos_ptr *ptr)
1841{
1842 static u8 padded_hash[64] = {
1843 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1844 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1845 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1846 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1847 };
1848
1849 pr_err_once("Bug in SEC1, padding ourself\n");
1850 edesc->desc.hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1851 map_single_talitos_ptr(ctx->dev, ptr, sizeof(padded_hash),
1852 (char *)padded_hash, DMA_TO_DEVICE);
1853}
1854
497f2e6b
LN
1855static int common_nonsnoop_hash(struct talitos_edesc *edesc,
1856 struct ahash_request *areq, unsigned int length,
37b5e889 1857 unsigned int offset,
497f2e6b
LN
1858 void (*callback) (struct device *dev,
1859 struct talitos_desc *desc,
1860 void *context, int error))
1861{
1862 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1863 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1864 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1865 struct device *dev = ctx->dev;
1866 struct talitos_desc *desc = &edesc->desc;
032d197e 1867 int ret;
6a1e8d14 1868 bool sync_needed = false;
922f9dc8
LC
1869 struct talitos_private *priv = dev_get_drvdata(dev);
1870 bool is_sec1 = has_ftr_sec1(priv);
6a1e8d14 1871 int sg_count;
497f2e6b
LN
1872
1873 /* first DWORD empty */
497f2e6b 1874
60f208d7
KP
1875 /* hash context in */
1876 if (!req_ctx->first || req_ctx->swinit) {
6a4967c3
LC
1877 map_single_talitos_ptr_nosync(dev, &desc->ptr[1],
1878 req_ctx->hw_context_size,
1879 req_ctx->hw_context,
1880 DMA_TO_DEVICE);
60f208d7 1881 req_ctx->swinit = 0;
497f2e6b 1882 }
afd62fa2
LC
1883 /* Indicate next op is not the first. */
1884 req_ctx->first = 0;
497f2e6b
LN
1885
1886 /* HMAC key */
1887 if (ctx->keylen)
2e13ce08
LC
1888 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen,
1889 is_sec1);
497f2e6b 1890
37b5e889
LC
1891 if (is_sec1 && req_ctx->nbuf)
1892 length -= req_ctx->nbuf;
1893
6a1e8d14
LC
1894 sg_count = edesc->src_nents ?: 1;
1895 if (is_sec1 && sg_count > 1)
37b5e889
LC
1896 sg_pcopy_to_buffer(req_ctx->psrc, sg_count,
1897 edesc->buf + sizeof(struct talitos_desc),
1898 length, req_ctx->nbuf);
1899 else if (length)
6a1e8d14
LC
1900 sg_count = dma_map_sg(dev, req_ctx->psrc, sg_count,
1901 DMA_TO_DEVICE);
497f2e6b
LN
1902 /*
1903 * data in
1904 */
37b5e889 1905 if (is_sec1 && req_ctx->nbuf) {
ad4cd51f
LC
1906 map_single_talitos_ptr(dev, &desc->ptr[3], req_ctx->nbuf,
1907 req_ctx->buf[req_ctx->buf_idx],
1908 DMA_TO_DEVICE);
37b5e889
LC
1909 } else {
1910 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1911 &desc->ptr[3], sg_count, offset, 0);
1912 if (sg_count > 1)
1913 sync_needed = true;
1914 }
497f2e6b
LN
1915
1916 /* fifth DWORD empty */
497f2e6b
LN
1917
1918 /* hash/HMAC out -or- hash context out */
1919 if (req_ctx->last)
1920 map_single_talitos_ptr(dev, &desc->ptr[5],
1921 crypto_ahash_digestsize(tfm),
a2b35aa8 1922 areq->result, DMA_FROM_DEVICE);
497f2e6b 1923 else
6a4967c3
LC
1924 map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1925 req_ctx->hw_context_size,
1926 req_ctx->hw_context,
1927 DMA_FROM_DEVICE);
497f2e6b
LN
1928
1929 /* last DWORD empty */
497f2e6b 1930
2d02905e
LC
1931 if (is_sec1 && from_talitos_ptr_len(&desc->ptr[3], true) == 0)
1932 talitos_handle_buggy_hash(ctx, edesc, &desc->ptr[3]);
1933
37b5e889
LC
1934 if (is_sec1 && req_ctx->nbuf && length) {
1935 struct talitos_desc *desc2 = desc + 1;
1936 dma_addr_t next_desc;
1937
1938 memset(desc2, 0, sizeof(*desc2));
1939 desc2->hdr = desc->hdr;
1940 desc2->hdr &= ~DESC_HDR_MODE0_MDEU_INIT;
1941 desc2->hdr1 = desc2->hdr;
1942 desc->hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1943 desc->hdr |= DESC_HDR_MODE0_MDEU_CONT;
1944 desc->hdr &= ~DESC_HDR_DONE_NOTIFY;
1945
ad4cd51f
LC
1946 if (desc->ptr[1].ptr)
1947 copy_talitos_ptr(&desc2->ptr[1], &desc->ptr[1],
1948 is_sec1);
1949 else
6a4967c3
LC
1950 map_single_talitos_ptr_nosync(dev, &desc2->ptr[1],
1951 req_ctx->hw_context_size,
1952 req_ctx->hw_context,
1953 DMA_TO_DEVICE);
37b5e889
LC
1954 copy_talitos_ptr(&desc2->ptr[2], &desc->ptr[2], is_sec1);
1955 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1956 &desc2->ptr[3], sg_count, offset, 0);
1957 if (sg_count > 1)
1958 sync_needed = true;
1959 copy_talitos_ptr(&desc2->ptr[5], &desc->ptr[5], is_sec1);
1960 if (req_ctx->last)
6a4967c3
LC
1961 map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1962 req_ctx->hw_context_size,
1963 req_ctx->hw_context,
1964 DMA_FROM_DEVICE);
37b5e889
LC
1965
1966 next_desc = dma_map_single(dev, &desc2->hdr1, TALITOS_DESC_SIZE,
1967 DMA_BIDIRECTIONAL);
1968 desc->next_desc = cpu_to_be32(next_desc);
1969 }
1970
6a1e8d14
LC
1971 if (sync_needed)
1972 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1973 edesc->dma_len, DMA_BIDIRECTIONAL);
1974
5228f0f7 1975 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
497f2e6b
LN
1976 if (ret != -EINPROGRESS) {
1977 common_nonsnoop_hash_unmap(dev, edesc, areq);
1978 kfree(edesc);
1979 }
1980 return ret;
1981}
1982
1983static struct talitos_edesc *ahash_edesc_alloc(struct ahash_request *areq,
1984 unsigned int nbytes)
1985{
1986 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1987 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1988 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
37b5e889
LC
1989 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
1990 bool is_sec1 = has_ftr_sec1(priv);
1991
1992 if (is_sec1)
1993 nbytes -= req_ctx->nbuf;
497f2e6b 1994
aeb4c132 1995 return talitos_edesc_alloc(ctx->dev, req_ctx->psrc, NULL, NULL, 0,
62293a37 1996 nbytes, 0, 0, 0, areq->base.flags, false);
497f2e6b
LN
1997}
1998
1999static int ahash_init(struct ahash_request *areq)
2000{
2001 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
6a4967c3
LC
2002 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2003 struct device *dev = ctx->dev;
497f2e6b 2004 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
49f9783b 2005 unsigned int size;
6a4967c3 2006 dma_addr_t dma;
497f2e6b
LN
2007
2008 /* Initialize the context */
3c0dd190 2009 req_ctx->buf_idx = 0;
5e833bc4 2010 req_ctx->nbuf = 0;
60f208d7
KP
2011 req_ctx->first = 1; /* first indicates h/w must init its context */
2012 req_ctx->swinit = 0; /* assume h/w init of context */
49f9783b 2013 size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
497f2e6b
LN
2014 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
2015 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
49f9783b 2016 req_ctx->hw_context_size = size;
497f2e6b 2017
6a4967c3
LC
2018 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2019 DMA_TO_DEVICE);
2020 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
2021
497f2e6b
LN
2022 return 0;
2023}
2024
60f208d7
KP
2025/*
2026 * on h/w without explicit sha224 support, we initialize h/w context
2027 * manually with sha224 constants, and tell it to run sha256.
2028 */
2029static int ahash_init_sha224_swinit(struct ahash_request *areq)
2030{
2031 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2032
a752447a
KP
2033 req_ctx->hw_context[0] = SHA224_H0;
2034 req_ctx->hw_context[1] = SHA224_H1;
2035 req_ctx->hw_context[2] = SHA224_H2;
2036 req_ctx->hw_context[3] = SHA224_H3;
2037 req_ctx->hw_context[4] = SHA224_H4;
2038 req_ctx->hw_context[5] = SHA224_H5;
2039 req_ctx->hw_context[6] = SHA224_H6;
2040 req_ctx->hw_context[7] = SHA224_H7;
60f208d7
KP
2041
2042 /* init 64-bit count */
2043 req_ctx->hw_context[8] = 0;
2044 req_ctx->hw_context[9] = 0;
2045
6a4967c3
LC
2046 ahash_init(areq);
2047 req_ctx->swinit = 1;/* prevent h/w initting context with sha256 values*/
2048
60f208d7
KP
2049 return 0;
2050}
2051
497f2e6b
LN
2052static int ahash_process_req(struct ahash_request *areq, unsigned int nbytes)
2053{
2054 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2055 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2056 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2057 struct talitos_edesc *edesc;
2058 unsigned int blocksize =
2059 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2060 unsigned int nbytes_to_hash;
2061 unsigned int to_hash_later;
5e833bc4 2062 unsigned int nsg;
8e409fe1 2063 int nents;
37b5e889
LC
2064 struct device *dev = ctx->dev;
2065 struct talitos_private *priv = dev_get_drvdata(dev);
2066 bool is_sec1 = has_ftr_sec1(priv);
2067 int offset = 0;
3c0dd190 2068 u8 *ctx_buf = req_ctx->buf[req_ctx->buf_idx];
497f2e6b 2069
5e833bc4
LN
2070 if (!req_ctx->last && (nbytes + req_ctx->nbuf <= blocksize)) {
2071 /* Buffer up to one whole block */
8e409fe1
LC
2072 nents = sg_nents_for_len(areq->src, nbytes);
2073 if (nents < 0) {
2074 dev_err(ctx->dev, "Invalid number of src SG.\n");
2075 return nents;
2076 }
2077 sg_copy_to_buffer(areq->src, nents,
3c0dd190 2078 ctx_buf + req_ctx->nbuf, nbytes);
5e833bc4 2079 req_ctx->nbuf += nbytes;
497f2e6b
LN
2080 return 0;
2081 }
2082
5e833bc4
LN
2083 /* At least (blocksize + 1) bytes are available to hash */
2084 nbytes_to_hash = nbytes + req_ctx->nbuf;
2085 to_hash_later = nbytes_to_hash & (blocksize - 1);
2086
2087 if (req_ctx->last)
2088 to_hash_later = 0;
2089 else if (to_hash_later)
2090 /* There is a partial block. Hash the full block(s) now */
2091 nbytes_to_hash -= to_hash_later;
2092 else {
2093 /* Keep one block buffered */
2094 nbytes_to_hash -= blocksize;
2095 to_hash_later = blocksize;
2096 }
2097
2098 /* Chain in any previously buffered data */
37b5e889 2099 if (!is_sec1 && req_ctx->nbuf) {
5e833bc4
LN
2100 nsg = (req_ctx->nbuf < nbytes_to_hash) ? 2 : 1;
2101 sg_init_table(req_ctx->bufsl, nsg);
3c0dd190 2102 sg_set_buf(req_ctx->bufsl, ctx_buf, req_ctx->nbuf);
5e833bc4 2103 if (nsg > 1)
c56f6d12 2104 sg_chain(req_ctx->bufsl, 2, areq->src);
497f2e6b 2105 req_ctx->psrc = req_ctx->bufsl;
37b5e889
LC
2106 } else if (is_sec1 && req_ctx->nbuf && req_ctx->nbuf < blocksize) {
2107 if (nbytes_to_hash > blocksize)
2108 offset = blocksize - req_ctx->nbuf;
2109 else
2110 offset = nbytes_to_hash - req_ctx->nbuf;
2111 nents = sg_nents_for_len(areq->src, offset);
2112 if (nents < 0) {
2113 dev_err(ctx->dev, "Invalid number of src SG.\n");
2114 return nents;
2115 }
2116 sg_copy_to_buffer(areq->src, nents,
3c0dd190 2117 ctx_buf + req_ctx->nbuf, offset);
37b5e889
LC
2118 req_ctx->nbuf += offset;
2119 req_ctx->psrc = areq->src;
5e833bc4 2120 } else
497f2e6b 2121 req_ctx->psrc = areq->src;
5e833bc4
LN
2122
2123 if (to_hash_later) {
8e409fe1
LC
2124 nents = sg_nents_for_len(areq->src, nbytes);
2125 if (nents < 0) {
2126 dev_err(ctx->dev, "Invalid number of src SG.\n");
2127 return nents;
2128 }
d0525723 2129 sg_pcopy_to_buffer(areq->src, nents,
3c0dd190 2130 req_ctx->buf[(req_ctx->buf_idx + 1) & 1],
5e833bc4
LN
2131 to_hash_later,
2132 nbytes - to_hash_later);
497f2e6b 2133 }
5e833bc4 2134 req_ctx->to_hash_later = to_hash_later;
497f2e6b 2135
5e833bc4 2136 /* Allocate extended descriptor */
497f2e6b
LN
2137 edesc = ahash_edesc_alloc(areq, nbytes_to_hash);
2138 if (IS_ERR(edesc))
2139 return PTR_ERR(edesc);
2140
2141 edesc->desc.hdr = ctx->desc_hdr_template;
2142
2143 /* On last one, request SEC to pad; otherwise continue */
2144 if (req_ctx->last)
2145 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_PAD;
2146 else
2147 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_CONT;
2148
60f208d7
KP
2149 /* request SEC to INIT hash. */
2150 if (req_ctx->first && !req_ctx->swinit)
497f2e6b
LN
2151 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_INIT;
2152
2153 /* When the tfm context has a keylen, it's an HMAC.
2154 * A first or last (ie. not middle) descriptor must request HMAC.
2155 */
2156 if (ctx->keylen && (req_ctx->first || req_ctx->last))
2157 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_HMAC;
2158
37b5e889 2159 return common_nonsnoop_hash(edesc, areq, nbytes_to_hash, offset,
497f2e6b
LN
2160 ahash_done);
2161}
2162
2163static int ahash_update(struct ahash_request *areq)
2164{
2165 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2166
2167 req_ctx->last = 0;
2168
2169 return ahash_process_req(areq, areq->nbytes);
2170}
2171
2172static int ahash_final(struct ahash_request *areq)
2173{
2174 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2175
2176 req_ctx->last = 1;
2177
2178 return ahash_process_req(areq, 0);
2179}
2180
2181static int ahash_finup(struct ahash_request *areq)
2182{
2183 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2184
2185 req_ctx->last = 1;
2186
2187 return ahash_process_req(areq, areq->nbytes);
2188}
2189
2190static int ahash_digest(struct ahash_request *areq)
2191{
2192 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
60f208d7 2193 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
497f2e6b 2194
60f208d7 2195 ahash->init(areq);
497f2e6b
LN
2196 req_ctx->last = 1;
2197
2198 return ahash_process_req(areq, areq->nbytes);
2199}
2200
3639ca84
HG
2201static int ahash_export(struct ahash_request *areq, void *out)
2202{
2203 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2204 struct talitos_export_state *export = out;
6a4967c3
LC
2205 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2206 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2207 struct device *dev = ctx->dev;
2208 dma_addr_t dma;
2209
2210 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2211 DMA_FROM_DEVICE);
2212 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_FROM_DEVICE);
3639ca84
HG
2213
2214 memcpy(export->hw_context, req_ctx->hw_context,
2215 req_ctx->hw_context_size);
3c0dd190 2216 memcpy(export->buf, req_ctx->buf[req_ctx->buf_idx], req_ctx->nbuf);
3639ca84
HG
2217 export->swinit = req_ctx->swinit;
2218 export->first = req_ctx->first;
2219 export->last = req_ctx->last;
2220 export->to_hash_later = req_ctx->to_hash_later;
2221 export->nbuf = req_ctx->nbuf;
2222
2223 return 0;
2224}
2225
2226static int ahash_import(struct ahash_request *areq, const void *in)
2227{
2228 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2229 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
6a4967c3
LC
2230 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2231 struct device *dev = ctx->dev;
3639ca84 2232 const struct talitos_export_state *export = in;
49f9783b 2233 unsigned int size;
6a4967c3 2234 dma_addr_t dma;
3639ca84
HG
2235
2236 memset(req_ctx, 0, sizeof(*req_ctx));
49f9783b 2237 size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
3639ca84
HG
2238 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
2239 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
49f9783b 2240 req_ctx->hw_context_size = size;
49f9783b 2241 memcpy(req_ctx->hw_context, export->hw_context, size);
3c0dd190 2242 memcpy(req_ctx->buf[0], export->buf, export->nbuf);
3639ca84
HG
2243 req_ctx->swinit = export->swinit;
2244 req_ctx->first = export->first;
2245 req_ctx->last = export->last;
2246 req_ctx->to_hash_later = export->to_hash_later;
2247 req_ctx->nbuf = export->nbuf;
2248
6a4967c3
LC
2249 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2250 DMA_TO_DEVICE);
2251 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
2252
3639ca84
HG
2253 return 0;
2254}
2255
79b3a418
LN
2256static int keyhash(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen,
2257 u8 *hash)
2258{
2259 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2260
2261 struct scatterlist sg[1];
2262 struct ahash_request *req;
f1c90ac3 2263 struct crypto_wait wait;
79b3a418
LN
2264 int ret;
2265
f1c90ac3 2266 crypto_init_wait(&wait);
79b3a418
LN
2267
2268 req = ahash_request_alloc(tfm, GFP_KERNEL);
2269 if (!req)
2270 return -ENOMEM;
2271
2272 /* Keep tfm keylen == 0 during hash of the long key */
2273 ctx->keylen = 0;
2274 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
f1c90ac3 2275 crypto_req_done, &wait);
79b3a418
LN
2276
2277 sg_init_one(&sg[0], key, keylen);
2278
2279 ahash_request_set_crypt(req, sg, hash, keylen);
f1c90ac3
GBY
2280 ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
2281
79b3a418
LN
2282 ahash_request_free(req);
2283
2284 return ret;
2285}
2286
2287static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
2288 unsigned int keylen)
2289{
2290 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2e13ce08 2291 struct device *dev = ctx->dev;
79b3a418
LN
2292 unsigned int blocksize =
2293 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2294 unsigned int digestsize = crypto_ahash_digestsize(tfm);
2295 unsigned int keysize = keylen;
2296 u8 hash[SHA512_DIGEST_SIZE];
2297 int ret;
2298
2299 if (keylen <= blocksize)
2300 memcpy(ctx->key, key, keysize);
2301 else {
2302 /* Must get the hash of the long key */
2303 ret = keyhash(tfm, key, keylen, hash);
2304
2305 if (ret) {
2306 crypto_ahash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2307 return -EINVAL;
2308 }
2309
2310 keysize = digestsize;
2311 memcpy(ctx->key, hash, digestsize);
2312 }
2313
2e13ce08
LC
2314 if (ctx->keylen)
2315 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
2316
79b3a418 2317 ctx->keylen = keysize;
2e13ce08 2318 ctx->dma_key = dma_map_single(dev, ctx->key, keysize, DMA_TO_DEVICE);
79b3a418
LN
2319
2320 return 0;
2321}
2322
2323
9c4a7965 2324struct talitos_alg_template {
d5e4aaef 2325 u32 type;
b0057763 2326 u32 priority;
d5e4aaef
LN
2327 union {
2328 struct crypto_alg crypto;
acbf7c62 2329 struct ahash_alg hash;
aeb4c132 2330 struct aead_alg aead;
d5e4aaef 2331 } alg;
9c4a7965
KP
2332 __be32 desc_hdr_template;
2333};
2334
2335static struct talitos_alg_template driver_algs[] = {
991155ba 2336 /* AEAD algorithms. These use a single-pass ipsec_esp descriptor */
d5e4aaef 2337 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2338 .alg.aead = {
2339 .base = {
2340 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2341 .cra_driver_name = "authenc-hmac-sha1-"
2342 "cbc-aes-talitos",
2343 .cra_blocksize = AES_BLOCK_SIZE,
2344 .cra_flags = CRYPTO_ALG_ASYNC,
2345 },
2346 .ivsize = AES_BLOCK_SIZE,
2347 .maxauthsize = SHA1_DIGEST_SIZE,
56af8cd4 2348 },
9c4a7965
KP
2349 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2350 DESC_HDR_SEL0_AESU |
2351 DESC_HDR_MODE0_AESU_CBC |
2352 DESC_HDR_SEL1_MDEUA |
2353 DESC_HDR_MODE1_MDEU_INIT |
2354 DESC_HDR_MODE1_MDEU_PAD |
2355 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
70bcaca7 2356 },
7405c8d7
LC
2357 { .type = CRYPTO_ALG_TYPE_AEAD,
2358 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2359 .alg.aead = {
2360 .base = {
2361 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2362 .cra_driver_name = "authenc-hmac-sha1-"
a1a42f84 2363 "cbc-aes-talitos-hsna",
7405c8d7
LC
2364 .cra_blocksize = AES_BLOCK_SIZE,
2365 .cra_flags = CRYPTO_ALG_ASYNC,
2366 },
2367 .ivsize = AES_BLOCK_SIZE,
2368 .maxauthsize = SHA1_DIGEST_SIZE,
2369 },
2370 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2371 DESC_HDR_SEL0_AESU |
2372 DESC_HDR_MODE0_AESU_CBC |
2373 DESC_HDR_SEL1_MDEUA |
2374 DESC_HDR_MODE1_MDEU_INIT |
2375 DESC_HDR_MODE1_MDEU_PAD |
2376 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2377 },
d5e4aaef 2378 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2379 .alg.aead = {
2380 .base = {
2381 .cra_name = "authenc(hmac(sha1),"
2382 "cbc(des3_ede))",
2383 .cra_driver_name = "authenc-hmac-sha1-"
2384 "cbc-3des-talitos",
2385 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2386 .cra_flags = CRYPTO_ALG_ASYNC,
2387 },
2388 .ivsize = DES3_EDE_BLOCK_SIZE,
2389 .maxauthsize = SHA1_DIGEST_SIZE,
ef7c5c85 2390 .setkey = aead_des3_setkey,
56af8cd4 2391 },
70bcaca7
LN
2392 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2393 DESC_HDR_SEL0_DEU |
2394 DESC_HDR_MODE0_DEU_CBC |
2395 DESC_HDR_MODE0_DEU_3DES |
2396 DESC_HDR_SEL1_MDEUA |
2397 DESC_HDR_MODE1_MDEU_INIT |
2398 DESC_HDR_MODE1_MDEU_PAD |
2399 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
3952f17e 2400 },
7405c8d7
LC
2401 { .type = CRYPTO_ALG_TYPE_AEAD,
2402 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2403 .alg.aead = {
2404 .base = {
2405 .cra_name = "authenc(hmac(sha1),"
2406 "cbc(des3_ede))",
2407 .cra_driver_name = "authenc-hmac-sha1-"
a1a42f84 2408 "cbc-3des-talitos-hsna",
7405c8d7
LC
2409 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2410 .cra_flags = CRYPTO_ALG_ASYNC,
2411 },
2412 .ivsize = DES3_EDE_BLOCK_SIZE,
2413 .maxauthsize = SHA1_DIGEST_SIZE,
ef7c5c85 2414 .setkey = aead_des3_setkey,
7405c8d7
LC
2415 },
2416 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2417 DESC_HDR_SEL0_DEU |
2418 DESC_HDR_MODE0_DEU_CBC |
2419 DESC_HDR_MODE0_DEU_3DES |
2420 DESC_HDR_SEL1_MDEUA |
2421 DESC_HDR_MODE1_MDEU_INIT |
2422 DESC_HDR_MODE1_MDEU_PAD |
2423 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2424 },
357fb605 2425 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2426 .alg.aead = {
2427 .base = {
2428 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2429 .cra_driver_name = "authenc-hmac-sha224-"
2430 "cbc-aes-talitos",
2431 .cra_blocksize = AES_BLOCK_SIZE,
2432 .cra_flags = CRYPTO_ALG_ASYNC,
2433 },
2434 .ivsize = AES_BLOCK_SIZE,
2435 .maxauthsize = SHA224_DIGEST_SIZE,
357fb605
HG
2436 },
2437 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2438 DESC_HDR_SEL0_AESU |
2439 DESC_HDR_MODE0_AESU_CBC |
2440 DESC_HDR_SEL1_MDEUA |
2441 DESC_HDR_MODE1_MDEU_INIT |
2442 DESC_HDR_MODE1_MDEU_PAD |
2443 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2444 },
7405c8d7
LC
2445 { .type = CRYPTO_ALG_TYPE_AEAD,
2446 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2447 .alg.aead = {
2448 .base = {
2449 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2450 .cra_driver_name = "authenc-hmac-sha224-"
a1a42f84 2451 "cbc-aes-talitos-hsna",
7405c8d7
LC
2452 .cra_blocksize = AES_BLOCK_SIZE,
2453 .cra_flags = CRYPTO_ALG_ASYNC,
2454 },
2455 .ivsize = AES_BLOCK_SIZE,
2456 .maxauthsize = SHA224_DIGEST_SIZE,
2457 },
2458 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2459 DESC_HDR_SEL0_AESU |
2460 DESC_HDR_MODE0_AESU_CBC |
2461 DESC_HDR_SEL1_MDEUA |
2462 DESC_HDR_MODE1_MDEU_INIT |
2463 DESC_HDR_MODE1_MDEU_PAD |
2464 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2465 },
357fb605 2466 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2467 .alg.aead = {
2468 .base = {
2469 .cra_name = "authenc(hmac(sha224),"
2470 "cbc(des3_ede))",
2471 .cra_driver_name = "authenc-hmac-sha224-"
2472 "cbc-3des-talitos",
2473 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2474 .cra_flags = CRYPTO_ALG_ASYNC,
2475 },
2476 .ivsize = DES3_EDE_BLOCK_SIZE,
2477 .maxauthsize = SHA224_DIGEST_SIZE,
ef7c5c85 2478 .setkey = aead_des3_setkey,
357fb605
HG
2479 },
2480 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2481 DESC_HDR_SEL0_DEU |
2482 DESC_HDR_MODE0_DEU_CBC |
2483 DESC_HDR_MODE0_DEU_3DES |
2484 DESC_HDR_SEL1_MDEUA |
2485 DESC_HDR_MODE1_MDEU_INIT |
2486 DESC_HDR_MODE1_MDEU_PAD |
2487 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2488 },
7405c8d7
LC
2489 { .type = CRYPTO_ALG_TYPE_AEAD,
2490 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2491 .alg.aead = {
2492 .base = {
2493 .cra_name = "authenc(hmac(sha224),"
2494 "cbc(des3_ede))",
2495 .cra_driver_name = "authenc-hmac-sha224-"
a1a42f84 2496 "cbc-3des-talitos-hsna",
7405c8d7
LC
2497 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2498 .cra_flags = CRYPTO_ALG_ASYNC,
2499 },
2500 .ivsize = DES3_EDE_BLOCK_SIZE,
2501 .maxauthsize = SHA224_DIGEST_SIZE,
ef7c5c85 2502 .setkey = aead_des3_setkey,
7405c8d7
LC
2503 },
2504 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2505 DESC_HDR_SEL0_DEU |
2506 DESC_HDR_MODE0_DEU_CBC |
2507 DESC_HDR_MODE0_DEU_3DES |
2508 DESC_HDR_SEL1_MDEUA |
2509 DESC_HDR_MODE1_MDEU_INIT |
2510 DESC_HDR_MODE1_MDEU_PAD |
2511 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2512 },
d5e4aaef 2513 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2514 .alg.aead = {
2515 .base = {
2516 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2517 .cra_driver_name = "authenc-hmac-sha256-"
2518 "cbc-aes-talitos",
2519 .cra_blocksize = AES_BLOCK_SIZE,
2520 .cra_flags = CRYPTO_ALG_ASYNC,
2521 },
2522 .ivsize = AES_BLOCK_SIZE,
2523 .maxauthsize = SHA256_DIGEST_SIZE,
56af8cd4 2524 },
3952f17e
LN
2525 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2526 DESC_HDR_SEL0_AESU |
2527 DESC_HDR_MODE0_AESU_CBC |
2528 DESC_HDR_SEL1_MDEUA |
2529 DESC_HDR_MODE1_MDEU_INIT |
2530 DESC_HDR_MODE1_MDEU_PAD |
2531 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2532 },
7405c8d7
LC
2533 { .type = CRYPTO_ALG_TYPE_AEAD,
2534 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2535 .alg.aead = {
2536 .base = {
2537 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2538 .cra_driver_name = "authenc-hmac-sha256-"
a1a42f84 2539 "cbc-aes-talitos-hsna",
7405c8d7
LC
2540 .cra_blocksize = AES_BLOCK_SIZE,
2541 .cra_flags = CRYPTO_ALG_ASYNC,
2542 },
2543 .ivsize = AES_BLOCK_SIZE,
2544 .maxauthsize = SHA256_DIGEST_SIZE,
2545 },
2546 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2547 DESC_HDR_SEL0_AESU |
2548 DESC_HDR_MODE0_AESU_CBC |
2549 DESC_HDR_SEL1_MDEUA |
2550 DESC_HDR_MODE1_MDEU_INIT |
2551 DESC_HDR_MODE1_MDEU_PAD |
2552 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2553 },
d5e4aaef 2554 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2555 .alg.aead = {
2556 .base = {
2557 .cra_name = "authenc(hmac(sha256),"
2558 "cbc(des3_ede))",
2559 .cra_driver_name = "authenc-hmac-sha256-"
2560 "cbc-3des-talitos",
2561 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2562 .cra_flags = CRYPTO_ALG_ASYNC,
2563 },
2564 .ivsize = DES3_EDE_BLOCK_SIZE,
2565 .maxauthsize = SHA256_DIGEST_SIZE,
ef7c5c85 2566 .setkey = aead_des3_setkey,
56af8cd4 2567 },
3952f17e
LN
2568 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2569 DESC_HDR_SEL0_DEU |
2570 DESC_HDR_MODE0_DEU_CBC |
2571 DESC_HDR_MODE0_DEU_3DES |
2572 DESC_HDR_SEL1_MDEUA |
2573 DESC_HDR_MODE1_MDEU_INIT |
2574 DESC_HDR_MODE1_MDEU_PAD |
2575 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2576 },
7405c8d7
LC
2577 { .type = CRYPTO_ALG_TYPE_AEAD,
2578 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2579 .alg.aead = {
2580 .base = {
2581 .cra_name = "authenc(hmac(sha256),"
2582 "cbc(des3_ede))",
2583 .cra_driver_name = "authenc-hmac-sha256-"
a1a42f84 2584 "cbc-3des-talitos-hsna",
7405c8d7
LC
2585 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2586 .cra_flags = CRYPTO_ALG_ASYNC,
2587 },
2588 .ivsize = DES3_EDE_BLOCK_SIZE,
2589 .maxauthsize = SHA256_DIGEST_SIZE,
ef7c5c85 2590 .setkey = aead_des3_setkey,
7405c8d7
LC
2591 },
2592 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2593 DESC_HDR_SEL0_DEU |
2594 DESC_HDR_MODE0_DEU_CBC |
2595 DESC_HDR_MODE0_DEU_3DES |
2596 DESC_HDR_SEL1_MDEUA |
2597 DESC_HDR_MODE1_MDEU_INIT |
2598 DESC_HDR_MODE1_MDEU_PAD |
2599 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2600 },
d5e4aaef 2601 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2602 .alg.aead = {
2603 .base = {
2604 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2605 .cra_driver_name = "authenc-hmac-sha384-"
2606 "cbc-aes-talitos",
2607 .cra_blocksize = AES_BLOCK_SIZE,
2608 .cra_flags = CRYPTO_ALG_ASYNC,
2609 },
2610 .ivsize = AES_BLOCK_SIZE,
2611 .maxauthsize = SHA384_DIGEST_SIZE,
357fb605
HG
2612 },
2613 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2614 DESC_HDR_SEL0_AESU |
2615 DESC_HDR_MODE0_AESU_CBC |
2616 DESC_HDR_SEL1_MDEUB |
2617 DESC_HDR_MODE1_MDEU_INIT |
2618 DESC_HDR_MODE1_MDEU_PAD |
2619 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2620 },
2621 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2622 .alg.aead = {
2623 .base = {
2624 .cra_name = "authenc(hmac(sha384),"
2625 "cbc(des3_ede))",
2626 .cra_driver_name = "authenc-hmac-sha384-"
2627 "cbc-3des-talitos",
2628 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2629 .cra_flags = CRYPTO_ALG_ASYNC,
2630 },
2631 .ivsize = DES3_EDE_BLOCK_SIZE,
2632 .maxauthsize = SHA384_DIGEST_SIZE,
ef7c5c85 2633 .setkey = aead_des3_setkey,
357fb605
HG
2634 },
2635 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2636 DESC_HDR_SEL0_DEU |
2637 DESC_HDR_MODE0_DEU_CBC |
2638 DESC_HDR_MODE0_DEU_3DES |
2639 DESC_HDR_SEL1_MDEUB |
2640 DESC_HDR_MODE1_MDEU_INIT |
2641 DESC_HDR_MODE1_MDEU_PAD |
2642 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2643 },
2644 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2645 .alg.aead = {
2646 .base = {
2647 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2648 .cra_driver_name = "authenc-hmac-sha512-"
2649 "cbc-aes-talitos",
2650 .cra_blocksize = AES_BLOCK_SIZE,
2651 .cra_flags = CRYPTO_ALG_ASYNC,
2652 },
2653 .ivsize = AES_BLOCK_SIZE,
2654 .maxauthsize = SHA512_DIGEST_SIZE,
357fb605
HG
2655 },
2656 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2657 DESC_HDR_SEL0_AESU |
2658 DESC_HDR_MODE0_AESU_CBC |
2659 DESC_HDR_SEL1_MDEUB |
2660 DESC_HDR_MODE1_MDEU_INIT |
2661 DESC_HDR_MODE1_MDEU_PAD |
2662 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2663 },
2664 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2665 .alg.aead = {
2666 .base = {
2667 .cra_name = "authenc(hmac(sha512),"
2668 "cbc(des3_ede))",
2669 .cra_driver_name = "authenc-hmac-sha512-"
2670 "cbc-3des-talitos",
2671 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2672 .cra_flags = CRYPTO_ALG_ASYNC,
2673 },
2674 .ivsize = DES3_EDE_BLOCK_SIZE,
2675 .maxauthsize = SHA512_DIGEST_SIZE,
ef7c5c85 2676 .setkey = aead_des3_setkey,
357fb605
HG
2677 },
2678 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2679 DESC_HDR_SEL0_DEU |
2680 DESC_HDR_MODE0_DEU_CBC |
2681 DESC_HDR_MODE0_DEU_3DES |
2682 DESC_HDR_SEL1_MDEUB |
2683 DESC_HDR_MODE1_MDEU_INIT |
2684 DESC_HDR_MODE1_MDEU_PAD |
2685 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2686 },
2687 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2688 .alg.aead = {
2689 .base = {
2690 .cra_name = "authenc(hmac(md5),cbc(aes))",
2691 .cra_driver_name = "authenc-hmac-md5-"
2692 "cbc-aes-talitos",
2693 .cra_blocksize = AES_BLOCK_SIZE,
2694 .cra_flags = CRYPTO_ALG_ASYNC,
2695 },
2696 .ivsize = AES_BLOCK_SIZE,
2697 .maxauthsize = MD5_DIGEST_SIZE,
56af8cd4 2698 },
3952f17e
LN
2699 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2700 DESC_HDR_SEL0_AESU |
2701 DESC_HDR_MODE0_AESU_CBC |
2702 DESC_HDR_SEL1_MDEUA |
2703 DESC_HDR_MODE1_MDEU_INIT |
2704 DESC_HDR_MODE1_MDEU_PAD |
2705 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2706 },
7405c8d7
LC
2707 { .type = CRYPTO_ALG_TYPE_AEAD,
2708 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2709 .alg.aead = {
2710 .base = {
2711 .cra_name = "authenc(hmac(md5),cbc(aes))",
2712 .cra_driver_name = "authenc-hmac-md5-"
a1a42f84 2713 "cbc-aes-talitos-hsna",
7405c8d7
LC
2714 .cra_blocksize = AES_BLOCK_SIZE,
2715 .cra_flags = CRYPTO_ALG_ASYNC,
2716 },
2717 .ivsize = AES_BLOCK_SIZE,
2718 .maxauthsize = MD5_DIGEST_SIZE,
2719 },
2720 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2721 DESC_HDR_SEL0_AESU |
2722 DESC_HDR_MODE0_AESU_CBC |
2723 DESC_HDR_SEL1_MDEUA |
2724 DESC_HDR_MODE1_MDEU_INIT |
2725 DESC_HDR_MODE1_MDEU_PAD |
2726 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2727 },
d5e4aaef 2728 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2729 .alg.aead = {
2730 .base = {
2731 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2732 .cra_driver_name = "authenc-hmac-md5-"
2733 "cbc-3des-talitos",
2734 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2735 .cra_flags = CRYPTO_ALG_ASYNC,
2736 },
2737 .ivsize = DES3_EDE_BLOCK_SIZE,
2738 .maxauthsize = MD5_DIGEST_SIZE,
ef7c5c85 2739 .setkey = aead_des3_setkey,
56af8cd4 2740 },
3952f17e
LN
2741 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2742 DESC_HDR_SEL0_DEU |
2743 DESC_HDR_MODE0_DEU_CBC |
2744 DESC_HDR_MODE0_DEU_3DES |
2745 DESC_HDR_SEL1_MDEUA |
2746 DESC_HDR_MODE1_MDEU_INIT |
2747 DESC_HDR_MODE1_MDEU_PAD |
2748 DESC_HDR_MODE1_MDEU_MD5_HMAC,
4de9d0b5 2749 },
7405c8d7
LC
2750 { .type = CRYPTO_ALG_TYPE_AEAD,
2751 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2752 .alg.aead = {
2753 .base = {
2754 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2755 .cra_driver_name = "authenc-hmac-md5-"
a1a42f84 2756 "cbc-3des-talitos-hsna",
7405c8d7
LC
2757 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2758 .cra_flags = CRYPTO_ALG_ASYNC,
2759 },
2760 .ivsize = DES3_EDE_BLOCK_SIZE,
2761 .maxauthsize = MD5_DIGEST_SIZE,
ef7c5c85 2762 .setkey = aead_des3_setkey,
7405c8d7
LC
2763 },
2764 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2765 DESC_HDR_SEL0_DEU |
2766 DESC_HDR_MODE0_DEU_CBC |
2767 DESC_HDR_MODE0_DEU_3DES |
2768 DESC_HDR_SEL1_MDEUA |
2769 DESC_HDR_MODE1_MDEU_INIT |
2770 DESC_HDR_MODE1_MDEU_PAD |
2771 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2772 },
4de9d0b5 2773 /* ABLKCIPHER algorithms. */
5e75ae1b
LC
2774 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2775 .alg.crypto = {
2776 .cra_name = "ecb(aes)",
2777 .cra_driver_name = "ecb-aes-talitos",
2778 .cra_blocksize = AES_BLOCK_SIZE,
2779 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2780 CRYPTO_ALG_ASYNC,
2781 .cra_ablkcipher = {
2782 .min_keysize = AES_MIN_KEY_SIZE,
2783 .max_keysize = AES_MAX_KEY_SIZE,
2784 .ivsize = AES_BLOCK_SIZE,
2785 }
2786 },
2787 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2788 DESC_HDR_SEL0_AESU,
2789 },
d5e4aaef
LN
2790 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2791 .alg.crypto = {
4de9d0b5
LN
2792 .cra_name = "cbc(aes)",
2793 .cra_driver_name = "cbc-aes-talitos",
2794 .cra_blocksize = AES_BLOCK_SIZE,
2795 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2796 CRYPTO_ALG_ASYNC,
4de9d0b5 2797 .cra_ablkcipher = {
4de9d0b5
LN
2798 .min_keysize = AES_MIN_KEY_SIZE,
2799 .max_keysize = AES_MAX_KEY_SIZE,
2800 .ivsize = AES_BLOCK_SIZE,
2801 }
2802 },
2803 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2804 DESC_HDR_SEL0_AESU |
2805 DESC_HDR_MODE0_AESU_CBC,
2806 },
5e75ae1b
LC
2807 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2808 .alg.crypto = {
2809 .cra_name = "ctr(aes)",
2810 .cra_driver_name = "ctr-aes-talitos",
2811 .cra_blocksize = AES_BLOCK_SIZE,
2812 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2813 CRYPTO_ALG_ASYNC,
2814 .cra_ablkcipher = {
2815 .min_keysize = AES_MIN_KEY_SIZE,
2816 .max_keysize = AES_MAX_KEY_SIZE,
2817 .ivsize = AES_BLOCK_SIZE,
2818 }
2819 },
70d355cc 2820 .desc_hdr_template = DESC_HDR_TYPE_AESU_CTR_NONSNOOP |
5e75ae1b
LC
2821 DESC_HDR_SEL0_AESU |
2822 DESC_HDR_MODE0_AESU_CTR,
2823 },
2824 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2825 .alg.crypto = {
2826 .cra_name = "ecb(des)",
2827 .cra_driver_name = "ecb-des-talitos",
2828 .cra_blocksize = DES_BLOCK_SIZE,
2829 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2830 CRYPTO_ALG_ASYNC,
2831 .cra_ablkcipher = {
2832 .min_keysize = DES_KEY_SIZE,
2833 .max_keysize = DES_KEY_SIZE,
2834 .ivsize = DES_BLOCK_SIZE,
ef7c5c85 2835 .setkey = ablkcipher_des_setkey,
5e75ae1b
LC
2836 }
2837 },
2838 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2839 DESC_HDR_SEL0_DEU,
2840 },
2841 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2842 .alg.crypto = {
2843 .cra_name = "cbc(des)",
2844 .cra_driver_name = "cbc-des-talitos",
2845 .cra_blocksize = DES_BLOCK_SIZE,
2846 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2847 CRYPTO_ALG_ASYNC,
2848 .cra_ablkcipher = {
2849 .min_keysize = DES_KEY_SIZE,
2850 .max_keysize = DES_KEY_SIZE,
2851 .ivsize = DES_BLOCK_SIZE,
ef7c5c85 2852 .setkey = ablkcipher_des_setkey,
5e75ae1b
LC
2853 }
2854 },
2855 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2856 DESC_HDR_SEL0_DEU |
2857 DESC_HDR_MODE0_DEU_CBC,
2858 },
2859 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2860 .alg.crypto = {
2861 .cra_name = "ecb(des3_ede)",
2862 .cra_driver_name = "ecb-3des-talitos",
2863 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2864 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2865 CRYPTO_ALG_ASYNC,
2866 .cra_ablkcipher = {
2867 .min_keysize = DES3_EDE_KEY_SIZE,
2868 .max_keysize = DES3_EDE_KEY_SIZE,
2869 .ivsize = DES3_EDE_BLOCK_SIZE,
ef7c5c85 2870 .setkey = ablkcipher_des3_setkey,
5e75ae1b
LC
2871 }
2872 },
2873 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2874 DESC_HDR_SEL0_DEU |
2875 DESC_HDR_MODE0_DEU_3DES,
2876 },
d5e4aaef
LN
2877 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2878 .alg.crypto = {
4de9d0b5
LN
2879 .cra_name = "cbc(des3_ede)",
2880 .cra_driver_name = "cbc-3des-talitos",
2881 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2882 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2883 CRYPTO_ALG_ASYNC,
4de9d0b5 2884 .cra_ablkcipher = {
4de9d0b5
LN
2885 .min_keysize = DES3_EDE_KEY_SIZE,
2886 .max_keysize = DES3_EDE_KEY_SIZE,
2887 .ivsize = DES3_EDE_BLOCK_SIZE,
ef7c5c85 2888 .setkey = ablkcipher_des3_setkey,
4de9d0b5
LN
2889 }
2890 },
2891 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2892 DESC_HDR_SEL0_DEU |
2893 DESC_HDR_MODE0_DEU_CBC |
2894 DESC_HDR_MODE0_DEU_3DES,
497f2e6b
LN
2895 },
2896 /* AHASH algorithms. */
2897 { .type = CRYPTO_ALG_TYPE_AHASH,
2898 .alg.hash = {
497f2e6b 2899 .halg.digestsize = MD5_DIGEST_SIZE,
3639ca84 2900 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2901 .halg.base = {
2902 .cra_name = "md5",
2903 .cra_driver_name = "md5-talitos",
b3988618 2904 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
6a38f622 2905 .cra_flags = CRYPTO_ALG_ASYNC,
497f2e6b
LN
2906 }
2907 },
2908 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2909 DESC_HDR_SEL0_MDEUA |
2910 DESC_HDR_MODE0_MDEU_MD5,
2911 },
2912 { .type = CRYPTO_ALG_TYPE_AHASH,
2913 .alg.hash = {
497f2e6b 2914 .halg.digestsize = SHA1_DIGEST_SIZE,
3639ca84 2915 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2916 .halg.base = {
2917 .cra_name = "sha1",
2918 .cra_driver_name = "sha1-talitos",
2919 .cra_blocksize = SHA1_BLOCK_SIZE,
6a38f622 2920 .cra_flags = CRYPTO_ALG_ASYNC,
497f2e6b
LN
2921 }
2922 },
2923 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2924 DESC_HDR_SEL0_MDEUA |
2925 DESC_HDR_MODE0_MDEU_SHA1,
2926 },
60f208d7
KP
2927 { .type = CRYPTO_ALG_TYPE_AHASH,
2928 .alg.hash = {
60f208d7 2929 .halg.digestsize = SHA224_DIGEST_SIZE,
3639ca84 2930 .halg.statesize = sizeof(struct talitos_export_state),
60f208d7
KP
2931 .halg.base = {
2932 .cra_name = "sha224",
2933 .cra_driver_name = "sha224-talitos",
2934 .cra_blocksize = SHA224_BLOCK_SIZE,
6a38f622 2935 .cra_flags = CRYPTO_ALG_ASYNC,
60f208d7
KP
2936 }
2937 },
2938 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2939 DESC_HDR_SEL0_MDEUA |
2940 DESC_HDR_MODE0_MDEU_SHA224,
2941 },
497f2e6b
LN
2942 { .type = CRYPTO_ALG_TYPE_AHASH,
2943 .alg.hash = {
497f2e6b 2944 .halg.digestsize = SHA256_DIGEST_SIZE,
3639ca84 2945 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2946 .halg.base = {
2947 .cra_name = "sha256",
2948 .cra_driver_name = "sha256-talitos",
2949 .cra_blocksize = SHA256_BLOCK_SIZE,
6a38f622 2950 .cra_flags = CRYPTO_ALG_ASYNC,
497f2e6b
LN
2951 }
2952 },
2953 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2954 DESC_HDR_SEL0_MDEUA |
2955 DESC_HDR_MODE0_MDEU_SHA256,
2956 },
2957 { .type = CRYPTO_ALG_TYPE_AHASH,
2958 .alg.hash = {
497f2e6b 2959 .halg.digestsize = SHA384_DIGEST_SIZE,
3639ca84 2960 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2961 .halg.base = {
2962 .cra_name = "sha384",
2963 .cra_driver_name = "sha384-talitos",
2964 .cra_blocksize = SHA384_BLOCK_SIZE,
6a38f622 2965 .cra_flags = CRYPTO_ALG_ASYNC,
497f2e6b
LN
2966 }
2967 },
2968 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2969 DESC_HDR_SEL0_MDEUB |
2970 DESC_HDR_MODE0_MDEUB_SHA384,
2971 },
2972 { .type = CRYPTO_ALG_TYPE_AHASH,
2973 .alg.hash = {
497f2e6b 2974 .halg.digestsize = SHA512_DIGEST_SIZE,
3639ca84 2975 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2976 .halg.base = {
2977 .cra_name = "sha512",
2978 .cra_driver_name = "sha512-talitos",
2979 .cra_blocksize = SHA512_BLOCK_SIZE,
6a38f622 2980 .cra_flags = CRYPTO_ALG_ASYNC,
497f2e6b
LN
2981 }
2982 },
2983 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2984 DESC_HDR_SEL0_MDEUB |
2985 DESC_HDR_MODE0_MDEUB_SHA512,
2986 },
79b3a418
LN
2987 { .type = CRYPTO_ALG_TYPE_AHASH,
2988 .alg.hash = {
79b3a418 2989 .halg.digestsize = MD5_DIGEST_SIZE,
3639ca84 2990 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2991 .halg.base = {
2992 .cra_name = "hmac(md5)",
2993 .cra_driver_name = "hmac-md5-talitos",
b3988618 2994 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
6a38f622 2995 .cra_flags = CRYPTO_ALG_ASYNC,
79b3a418
LN
2996 }
2997 },
2998 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2999 DESC_HDR_SEL0_MDEUA |
3000 DESC_HDR_MODE0_MDEU_MD5,
3001 },
3002 { .type = CRYPTO_ALG_TYPE_AHASH,
3003 .alg.hash = {
79b3a418 3004 .halg.digestsize = SHA1_DIGEST_SIZE,
3639ca84 3005 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
3006 .halg.base = {
3007 .cra_name = "hmac(sha1)",
3008 .cra_driver_name = "hmac-sha1-talitos",
3009 .cra_blocksize = SHA1_BLOCK_SIZE,
6a38f622 3010 .cra_flags = CRYPTO_ALG_ASYNC,
79b3a418
LN
3011 }
3012 },
3013 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3014 DESC_HDR_SEL0_MDEUA |
3015 DESC_HDR_MODE0_MDEU_SHA1,
3016 },
3017 { .type = CRYPTO_ALG_TYPE_AHASH,
3018 .alg.hash = {
79b3a418 3019 .halg.digestsize = SHA224_DIGEST_SIZE,
3639ca84 3020 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
3021 .halg.base = {
3022 .cra_name = "hmac(sha224)",
3023 .cra_driver_name = "hmac-sha224-talitos",
3024 .cra_blocksize = SHA224_BLOCK_SIZE,
6a38f622 3025 .cra_flags = CRYPTO_ALG_ASYNC,
79b3a418
LN
3026 }
3027 },
3028 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3029 DESC_HDR_SEL0_MDEUA |
3030 DESC_HDR_MODE0_MDEU_SHA224,
3031 },
3032 { .type = CRYPTO_ALG_TYPE_AHASH,
3033 .alg.hash = {
79b3a418 3034 .halg.digestsize = SHA256_DIGEST_SIZE,
3639ca84 3035 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
3036 .halg.base = {
3037 .cra_name = "hmac(sha256)",
3038 .cra_driver_name = "hmac-sha256-talitos",
3039 .cra_blocksize = SHA256_BLOCK_SIZE,
6a38f622 3040 .cra_flags = CRYPTO_ALG_ASYNC,
79b3a418
LN
3041 }
3042 },
3043 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3044 DESC_HDR_SEL0_MDEUA |
3045 DESC_HDR_MODE0_MDEU_SHA256,
3046 },
3047 { .type = CRYPTO_ALG_TYPE_AHASH,
3048 .alg.hash = {
79b3a418 3049 .halg.digestsize = SHA384_DIGEST_SIZE,
3639ca84 3050 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
3051 .halg.base = {
3052 .cra_name = "hmac(sha384)",
3053 .cra_driver_name = "hmac-sha384-talitos",
3054 .cra_blocksize = SHA384_BLOCK_SIZE,
6a38f622 3055 .cra_flags = CRYPTO_ALG_ASYNC,
79b3a418
LN
3056 }
3057 },
3058 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3059 DESC_HDR_SEL0_MDEUB |
3060 DESC_HDR_MODE0_MDEUB_SHA384,
3061 },
3062 { .type = CRYPTO_ALG_TYPE_AHASH,
3063 .alg.hash = {
79b3a418 3064 .halg.digestsize = SHA512_DIGEST_SIZE,
3639ca84 3065 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
3066 .halg.base = {
3067 .cra_name = "hmac(sha512)",
3068 .cra_driver_name = "hmac-sha512-talitos",
3069 .cra_blocksize = SHA512_BLOCK_SIZE,
6a38f622 3070 .cra_flags = CRYPTO_ALG_ASYNC,
79b3a418
LN
3071 }
3072 },
3073 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3074 DESC_HDR_SEL0_MDEUB |
3075 DESC_HDR_MODE0_MDEUB_SHA512,
3076 }
9c4a7965
KP
3077};
3078
3079struct talitos_crypto_alg {
3080 struct list_head entry;
3081 struct device *dev;
acbf7c62 3082 struct talitos_alg_template algt;
9c4a7965
KP
3083};
3084
89d124cb
JE
3085static int talitos_init_common(struct talitos_ctx *ctx,
3086 struct talitos_crypto_alg *talitos_alg)
9c4a7965 3087{
5228f0f7 3088 struct talitos_private *priv;
9c4a7965
KP
3089
3090 /* update context with ptr to dev */
3091 ctx->dev = talitos_alg->dev;
19bbbc63 3092
5228f0f7
KP
3093 /* assign SEC channel to tfm in round-robin fashion */
3094 priv = dev_get_drvdata(ctx->dev);
3095 ctx->ch = atomic_inc_return(&priv->last_chan) &
3096 (priv->num_channels - 1);
3097
9c4a7965 3098 /* copy descriptor header template value */
acbf7c62 3099 ctx->desc_hdr_template = talitos_alg->algt.desc_hdr_template;
9c4a7965 3100
602dba5a
KP
3101 /* select done notification */
3102 ctx->desc_hdr_template |= DESC_HDR_DONE_NOTIFY;
3103
497f2e6b
LN
3104 return 0;
3105}
3106
89d124cb
JE
3107static int talitos_cra_init(struct crypto_tfm *tfm)
3108{
3109 struct crypto_alg *alg = tfm->__crt_alg;
3110 struct talitos_crypto_alg *talitos_alg;
3111 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3112
3113 if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_AHASH)
3114 talitos_alg = container_of(__crypto_ahash_alg(alg),
3115 struct talitos_crypto_alg,
3116 algt.alg.hash);
3117 else
3118 talitos_alg = container_of(alg, struct talitos_crypto_alg,
3119 algt.alg.crypto);
3120
3121 return talitos_init_common(ctx, talitos_alg);
3122}
3123
aeb4c132 3124static int talitos_cra_init_aead(struct crypto_aead *tfm)
497f2e6b 3125{
89d124cb
JE
3126 struct aead_alg *alg = crypto_aead_alg(tfm);
3127 struct talitos_crypto_alg *talitos_alg;
3128 struct talitos_ctx *ctx = crypto_aead_ctx(tfm);
3129
3130 talitos_alg = container_of(alg, struct talitos_crypto_alg,
3131 algt.alg.aead);
3132
3133 return talitos_init_common(ctx, talitos_alg);
9c4a7965
KP
3134}
3135
497f2e6b
LN
3136static int talitos_cra_init_ahash(struct crypto_tfm *tfm)
3137{
3138 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3139
3140 talitos_cra_init(tfm);
3141
3142 ctx->keylen = 0;
3143 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
3144 sizeof(struct talitos_ahash_req_ctx));
3145
3146 return 0;
3147}
3148
2e13ce08
LC
3149static void talitos_cra_exit(struct crypto_tfm *tfm)
3150{
3151 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3152 struct device *dev = ctx->dev;
3153
3154 if (ctx->keylen)
3155 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
3156}
3157
9c4a7965
KP
3158/*
3159 * given the alg's descriptor header template, determine whether descriptor
3160 * type and primary/secondary execution units required match the hw
3161 * capabilities description provided in the device tree node.
3162 */
3163static int hw_supports(struct device *dev, __be32 desc_hdr_template)
3164{
3165 struct talitos_private *priv = dev_get_drvdata(dev);
3166 int ret;
3167
3168 ret = (1 << DESC_TYPE(desc_hdr_template) & priv->desc_types) &&
3169 (1 << PRIMARY_EU(desc_hdr_template) & priv->exec_units);
3170
3171 if (SECONDARY_EU(desc_hdr_template))
3172 ret = ret && (1 << SECONDARY_EU(desc_hdr_template)
3173 & priv->exec_units);
3174
3175 return ret;
3176}
3177
2dc11581 3178static int talitos_remove(struct platform_device *ofdev)
9c4a7965
KP
3179{
3180 struct device *dev = &ofdev->dev;
3181 struct talitos_private *priv = dev_get_drvdata(dev);
3182 struct talitos_crypto_alg *t_alg, *n;
3183 int i;
3184
3185 list_for_each_entry_safe(t_alg, n, &priv->alg_list, entry) {
acbf7c62
LN
3186 switch (t_alg->algt.type) {
3187 case CRYPTO_ALG_TYPE_ABLKCIPHER:
acbf7c62 3188 break;
aeb4c132
HX
3189 case CRYPTO_ALG_TYPE_AEAD:
3190 crypto_unregister_aead(&t_alg->algt.alg.aead);
acbf7c62
LN
3191 case CRYPTO_ALG_TYPE_AHASH:
3192 crypto_unregister_ahash(&t_alg->algt.alg.hash);
3193 break;
3194 }
9c4a7965 3195 list_del(&t_alg->entry);
9c4a7965
KP
3196 }
3197
3198 if (hw_supports(dev, DESC_HDR_SEL0_RNG))
3199 talitos_unregister_rng(dev);
3200
c3e337f8 3201 for (i = 0; i < 2; i++)
2cdba3cf 3202 if (priv->irq[i]) {
c3e337f8
KP
3203 free_irq(priv->irq[i], dev);
3204 irq_dispose_mapping(priv->irq[i]);
3205 }
9c4a7965 3206
c3e337f8 3207 tasklet_kill(&priv->done_task[0]);
2cdba3cf 3208 if (priv->irq[1])
c3e337f8 3209 tasklet_kill(&priv->done_task[1]);
9c4a7965 3210
9c4a7965
KP
3211 return 0;
3212}
3213
3214static struct talitos_crypto_alg *talitos_alg_alloc(struct device *dev,
3215 struct talitos_alg_template
3216 *template)
3217{
60f208d7 3218 struct talitos_private *priv = dev_get_drvdata(dev);
9c4a7965
KP
3219 struct talitos_crypto_alg *t_alg;
3220 struct crypto_alg *alg;
3221
24b92ff2
LC
3222 t_alg = devm_kzalloc(dev, sizeof(struct talitos_crypto_alg),
3223 GFP_KERNEL);
9c4a7965
KP
3224 if (!t_alg)
3225 return ERR_PTR(-ENOMEM);
3226
acbf7c62
LN
3227 t_alg->algt = *template;
3228
3229 switch (t_alg->algt.type) {
3230 case CRYPTO_ALG_TYPE_ABLKCIPHER:
497f2e6b
LN
3231 alg = &t_alg->algt.alg.crypto;
3232 alg->cra_init = talitos_cra_init;
2e13ce08 3233 alg->cra_exit = talitos_cra_exit;
d4cd3283 3234 alg->cra_type = &crypto_ablkcipher_type;
ef7c5c85
HX
3235 alg->cra_ablkcipher.setkey = alg->cra_ablkcipher.setkey ?:
3236 ablkcipher_setkey;
b286e003
KP
3237 alg->cra_ablkcipher.encrypt = ablkcipher_encrypt;
3238 alg->cra_ablkcipher.decrypt = ablkcipher_decrypt;
497f2e6b 3239 break;
acbf7c62 3240 case CRYPTO_ALG_TYPE_AEAD:
aeb4c132 3241 alg = &t_alg->algt.alg.aead.base;
2e13ce08 3242 alg->cra_exit = talitos_cra_exit;
aeb4c132 3243 t_alg->algt.alg.aead.init = talitos_cra_init_aead;
ef7c5c85
HX
3244 t_alg->algt.alg.aead.setkey = t_alg->algt.alg.aead.setkey ?:
3245 aead_setkey;
aeb4c132
HX
3246 t_alg->algt.alg.aead.encrypt = aead_encrypt;
3247 t_alg->algt.alg.aead.decrypt = aead_decrypt;
6cda075a
LC
3248 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
3249 !strncmp(alg->cra_name, "authenc(hmac(sha224)", 20)) {
24b92ff2 3250 devm_kfree(dev, t_alg);
6cda075a
LC
3251 return ERR_PTR(-ENOTSUPP);
3252 }
acbf7c62
LN
3253 break;
3254 case CRYPTO_ALG_TYPE_AHASH:
3255 alg = &t_alg->algt.alg.hash.halg.base;
497f2e6b 3256 alg->cra_init = talitos_cra_init_ahash;
ad4cd51f 3257 alg->cra_exit = talitos_cra_exit;
b286e003
KP
3258 t_alg->algt.alg.hash.init = ahash_init;
3259 t_alg->algt.alg.hash.update = ahash_update;
3260 t_alg->algt.alg.hash.final = ahash_final;
3261 t_alg->algt.alg.hash.finup = ahash_finup;
3262 t_alg->algt.alg.hash.digest = ahash_digest;
56136631
LC
3263 if (!strncmp(alg->cra_name, "hmac", 4))
3264 t_alg->algt.alg.hash.setkey = ahash_setkey;
3639ca84
HG
3265 t_alg->algt.alg.hash.import = ahash_import;
3266 t_alg->algt.alg.hash.export = ahash_export;
b286e003 3267
79b3a418 3268 if (!(priv->features & TALITOS_FTR_HMAC_OK) &&
0b2730d8 3269 !strncmp(alg->cra_name, "hmac", 4)) {
24b92ff2 3270 devm_kfree(dev, t_alg);
79b3a418 3271 return ERR_PTR(-ENOTSUPP);
0b2730d8 3272 }
60f208d7 3273 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
79b3a418
LN
3274 (!strcmp(alg->cra_name, "sha224") ||
3275 !strcmp(alg->cra_name, "hmac(sha224)"))) {
60f208d7
KP
3276 t_alg->algt.alg.hash.init = ahash_init_sha224_swinit;
3277 t_alg->algt.desc_hdr_template =
3278 DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3279 DESC_HDR_SEL0_MDEUA |
3280 DESC_HDR_MODE0_MDEU_SHA256;
3281 }
497f2e6b 3282 break;
1d11911a
KP
3283 default:
3284 dev_err(dev, "unknown algorithm type %d\n", t_alg->algt.type);
24b92ff2 3285 devm_kfree(dev, t_alg);
1d11911a 3286 return ERR_PTR(-EINVAL);
acbf7c62 3287 }
9c4a7965 3288
9c4a7965 3289 alg->cra_module = THIS_MODULE;
b0057763
LC
3290 if (t_alg->algt.priority)
3291 alg->cra_priority = t_alg->algt.priority;
3292 else
3293 alg->cra_priority = TALITOS_CRA_PRIORITY;
9c4a7965 3294 alg->cra_alignmask = 0;
9c4a7965 3295 alg->cra_ctxsize = sizeof(struct talitos_ctx);
d912bb76 3296 alg->cra_flags |= CRYPTO_ALG_KERN_DRIVER_ONLY;
9c4a7965 3297
9c4a7965
KP
3298 t_alg->dev = dev;
3299
3300 return t_alg;
3301}
3302
c3e337f8
KP
3303static int talitos_probe_irq(struct platform_device *ofdev)
3304{
3305 struct device *dev = &ofdev->dev;
3306 struct device_node *np = ofdev->dev.of_node;
3307 struct talitos_private *priv = dev_get_drvdata(dev);
3308 int err;
dd3c0987 3309 bool is_sec1 = has_ftr_sec1(priv);
c3e337f8
KP
3310
3311 priv->irq[0] = irq_of_parse_and_map(np, 0);
2cdba3cf 3312 if (!priv->irq[0]) {
c3e337f8
KP
3313 dev_err(dev, "failed to map irq\n");
3314 return -EINVAL;
3315 }
dd3c0987
LC
3316 if (is_sec1) {
3317 err = request_irq(priv->irq[0], talitos1_interrupt_4ch, 0,
3318 dev_driver_string(dev), dev);
3319 goto primary_out;
3320 }
c3e337f8
KP
3321
3322 priv->irq[1] = irq_of_parse_and_map(np, 1);
3323
3324 /* get the primary irq line */
2cdba3cf 3325 if (!priv->irq[1]) {
dd3c0987 3326 err = request_irq(priv->irq[0], talitos2_interrupt_4ch, 0,
c3e337f8
KP
3327 dev_driver_string(dev), dev);
3328 goto primary_out;
3329 }
3330
dd3c0987 3331 err = request_irq(priv->irq[0], talitos2_interrupt_ch0_2, 0,
c3e337f8
KP
3332 dev_driver_string(dev), dev);
3333 if (err)
3334 goto primary_out;
3335
3336 /* get the secondary irq line */
dd3c0987 3337 err = request_irq(priv->irq[1], talitos2_interrupt_ch1_3, 0,
c3e337f8
KP
3338 dev_driver_string(dev), dev);
3339 if (err) {
3340 dev_err(dev, "failed to request secondary irq\n");
3341 irq_dispose_mapping(priv->irq[1]);
2cdba3cf 3342 priv->irq[1] = 0;
c3e337f8
KP
3343 }
3344
3345 return err;
3346
3347primary_out:
3348 if (err) {
3349 dev_err(dev, "failed to request primary irq\n");
3350 irq_dispose_mapping(priv->irq[0]);
2cdba3cf 3351 priv->irq[0] = 0;
c3e337f8
KP
3352 }
3353
3354 return err;
3355}
3356
1c48a5c9 3357static int talitos_probe(struct platform_device *ofdev)
9c4a7965
KP
3358{
3359 struct device *dev = &ofdev->dev;
61c7a080 3360 struct device_node *np = ofdev->dev.of_node;
9c4a7965 3361 struct talitos_private *priv;
9c4a7965 3362 int i, err;
5fa7fa14 3363 int stride;
fd5ea7f0 3364 struct resource *res;
9c4a7965 3365
24b92ff2 3366 priv = devm_kzalloc(dev, sizeof(struct talitos_private), GFP_KERNEL);
9c4a7965
KP
3367 if (!priv)
3368 return -ENOMEM;
3369
f3de9cb1
KH
3370 INIT_LIST_HEAD(&priv->alg_list);
3371
9c4a7965
KP
3372 dev_set_drvdata(dev, priv);
3373
3374 priv->ofdev = ofdev;
3375
511d63cb
HG
3376 spin_lock_init(&priv->reg_lock);
3377
fd5ea7f0
LC
3378 res = platform_get_resource(ofdev, IORESOURCE_MEM, 0);
3379 if (!res)
3380 return -ENXIO;
3381 priv->reg = devm_ioremap(dev, res->start, resource_size(res));
9c4a7965
KP
3382 if (!priv->reg) {
3383 dev_err(dev, "failed to of_iomap\n");
3384 err = -ENOMEM;
3385 goto err_out;
3386 }
3387
3388 /* get SEC version capabilities from device tree */
fa14c6cf
LC
3389 of_property_read_u32(np, "fsl,num-channels", &priv->num_channels);
3390 of_property_read_u32(np, "fsl,channel-fifo-len", &priv->chfifo_len);
3391 of_property_read_u32(np, "fsl,exec-units-mask", &priv->exec_units);
3392 of_property_read_u32(np, "fsl,descriptor-types-mask",
3393 &priv->desc_types);
9c4a7965
KP
3394
3395 if (!is_power_of_2(priv->num_channels) || !priv->chfifo_len ||
3396 !priv->exec_units || !priv->desc_types) {
3397 dev_err(dev, "invalid property data in device tree node\n");
3398 err = -EINVAL;
3399 goto err_out;
3400 }
3401
f3c85bc1
LN
3402 if (of_device_is_compatible(np, "fsl,sec3.0"))
3403 priv->features |= TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT;
3404
fe5720e2 3405 if (of_device_is_compatible(np, "fsl,sec2.1"))
60f208d7 3406 priv->features |= TALITOS_FTR_HW_AUTH_CHECK |
79b3a418
LN
3407 TALITOS_FTR_SHA224_HWINIT |
3408 TALITOS_FTR_HMAC_OK;
fe5720e2 3409
21590888
LC
3410 if (of_device_is_compatible(np, "fsl,sec1.0"))
3411 priv->features |= TALITOS_FTR_SEC1;
3412
5fa7fa14
LC
3413 if (of_device_is_compatible(np, "fsl,sec1.2")) {
3414 priv->reg_deu = priv->reg + TALITOS12_DEU;
3415 priv->reg_aesu = priv->reg + TALITOS12_AESU;
3416 priv->reg_mdeu = priv->reg + TALITOS12_MDEU;
3417 stride = TALITOS1_CH_STRIDE;
3418 } else if (of_device_is_compatible(np, "fsl,sec1.0")) {
3419 priv->reg_deu = priv->reg + TALITOS10_DEU;
3420 priv->reg_aesu = priv->reg + TALITOS10_AESU;
3421 priv->reg_mdeu = priv->reg + TALITOS10_MDEU;
3422 priv->reg_afeu = priv->reg + TALITOS10_AFEU;
3423 priv->reg_rngu = priv->reg + TALITOS10_RNGU;
3424 priv->reg_pkeu = priv->reg + TALITOS10_PKEU;
3425 stride = TALITOS1_CH_STRIDE;
3426 } else {
3427 priv->reg_deu = priv->reg + TALITOS2_DEU;
3428 priv->reg_aesu = priv->reg + TALITOS2_AESU;
3429 priv->reg_mdeu = priv->reg + TALITOS2_MDEU;
3430 priv->reg_afeu = priv->reg + TALITOS2_AFEU;
3431 priv->reg_rngu = priv->reg + TALITOS2_RNGU;
3432 priv->reg_pkeu = priv->reg + TALITOS2_PKEU;
3433 priv->reg_keu = priv->reg + TALITOS2_KEU;
3434 priv->reg_crcu = priv->reg + TALITOS2_CRCU;
3435 stride = TALITOS2_CH_STRIDE;
3436 }
3437
dd3c0987
LC
3438 err = talitos_probe_irq(ofdev);
3439 if (err)
3440 goto err_out;
3441
3442 if (of_device_is_compatible(np, "fsl,sec1.0")) {
9c02e285
LC
3443 if (priv->num_channels == 1)
3444 tasklet_init(&priv->done_task[0], talitos1_done_ch0,
dd3c0987 3445 (unsigned long)dev);
9c02e285
LC
3446 else
3447 tasklet_init(&priv->done_task[0], talitos1_done_4ch,
3448 (unsigned long)dev);
3449 } else {
3450 if (priv->irq[1]) {
dd3c0987
LC
3451 tasklet_init(&priv->done_task[0], talitos2_done_ch0_2,
3452 (unsigned long)dev);
3453 tasklet_init(&priv->done_task[1], talitos2_done_ch1_3,
3454 (unsigned long)dev);
9c02e285
LC
3455 } else if (priv->num_channels == 1) {
3456 tasklet_init(&priv->done_task[0], talitos2_done_ch0,
3457 (unsigned long)dev);
3458 } else {
3459 tasklet_init(&priv->done_task[0], talitos2_done_4ch,
3460 (unsigned long)dev);
dd3c0987
LC
3461 }
3462 }
3463
a86854d0
KC
3464 priv->chan = devm_kcalloc(dev,
3465 priv->num_channels,
3466 sizeof(struct talitos_channel),
3467 GFP_KERNEL);
4b992628
KP
3468 if (!priv->chan) {
3469 dev_err(dev, "failed to allocate channel management space\n");
9c4a7965
KP
3470 err = -ENOMEM;
3471 goto err_out;
3472 }
3473
f641dddd
MH
3474 priv->fifo_len = roundup_pow_of_two(priv->chfifo_len);
3475
c3e337f8 3476 for (i = 0; i < priv->num_channels; i++) {
5fa7fa14 3477 priv->chan[i].reg = priv->reg + stride * (i + 1);
2cdba3cf 3478 if (!priv->irq[1] || !(i & 1))
c3e337f8 3479 priv->chan[i].reg += TALITOS_CH_BASE_OFFSET;
ad42d5fc 3480
4b992628
KP
3481 spin_lock_init(&priv->chan[i].head_lock);
3482 spin_lock_init(&priv->chan[i].tail_lock);
9c4a7965 3483
a86854d0
KC
3484 priv->chan[i].fifo = devm_kcalloc(dev,
3485 priv->fifo_len,
3486 sizeof(struct talitos_request),
3487 GFP_KERNEL);
4b992628 3488 if (!priv->chan[i].fifo) {
9c4a7965
KP
3489 dev_err(dev, "failed to allocate request fifo %d\n", i);
3490 err = -ENOMEM;
3491 goto err_out;
3492 }
9c4a7965 3493
4b992628
KP
3494 atomic_set(&priv->chan[i].submit_count,
3495 -(priv->chfifo_len - 1));
f641dddd 3496 }
9c4a7965 3497
81eb024c
KP
3498 dma_set_mask(dev, DMA_BIT_MASK(36));
3499
9c4a7965
KP
3500 /* reset and initialize the h/w */
3501 err = init_device(dev);
3502 if (err) {
3503 dev_err(dev, "failed to initialize device\n");
3504 goto err_out;
3505 }
3506
3507 /* register the RNG, if available */
3508 if (hw_supports(dev, DESC_HDR_SEL0_RNG)) {
3509 err = talitos_register_rng(dev);
3510 if (err) {
3511 dev_err(dev, "failed to register hwrng: %d\n", err);
3512 goto err_out;
3513 } else
3514 dev_info(dev, "hwrng\n");
3515 }
3516
3517 /* register crypto algorithms the device supports */
9c4a7965
KP
3518 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3519 if (hw_supports(dev, driver_algs[i].desc_hdr_template)) {
3520 struct talitos_crypto_alg *t_alg;
aeb4c132 3521 struct crypto_alg *alg = NULL;
9c4a7965
KP
3522
3523 t_alg = talitos_alg_alloc(dev, &driver_algs[i]);
3524 if (IS_ERR(t_alg)) {
3525 err = PTR_ERR(t_alg);
0b2730d8 3526 if (err == -ENOTSUPP)
79b3a418 3527 continue;
9c4a7965
KP
3528 goto err_out;
3529 }
3530
acbf7c62
LN
3531 switch (t_alg->algt.type) {
3532 case CRYPTO_ALG_TYPE_ABLKCIPHER:
acbf7c62
LN
3533 err = crypto_register_alg(
3534 &t_alg->algt.alg.crypto);
aeb4c132 3535 alg = &t_alg->algt.alg.crypto;
acbf7c62 3536 break;
aeb4c132
HX
3537
3538 case CRYPTO_ALG_TYPE_AEAD:
3539 err = crypto_register_aead(
3540 &t_alg->algt.alg.aead);
3541 alg = &t_alg->algt.alg.aead.base;
3542 break;
3543
acbf7c62
LN
3544 case CRYPTO_ALG_TYPE_AHASH:
3545 err = crypto_register_ahash(
3546 &t_alg->algt.alg.hash);
aeb4c132 3547 alg = &t_alg->algt.alg.hash.halg.base;
acbf7c62
LN
3548 break;
3549 }
9c4a7965
KP
3550 if (err) {
3551 dev_err(dev, "%s alg registration failed\n",
aeb4c132 3552 alg->cra_driver_name);
24b92ff2 3553 devm_kfree(dev, t_alg);
991155ba 3554 } else
9c4a7965 3555 list_add_tail(&t_alg->entry, &priv->alg_list);
9c4a7965
KP
3556 }
3557 }
5b859b6e
KP
3558 if (!list_empty(&priv->alg_list))
3559 dev_info(dev, "%s algorithms registered in /proc/crypto\n",
3560 (char *)of_get_property(np, "compatible", NULL));
9c4a7965
KP
3561
3562 return 0;
3563
3564err_out:
3565 talitos_remove(ofdev);
9c4a7965
KP
3566
3567 return err;
3568}
3569
6c3f975a 3570static const struct of_device_id talitos_match[] = {
0635b7db
LC
3571#ifdef CONFIG_CRYPTO_DEV_TALITOS1
3572 {
3573 .compatible = "fsl,sec1.0",
3574 },
3575#endif
3576#ifdef CONFIG_CRYPTO_DEV_TALITOS2
9c4a7965
KP
3577 {
3578 .compatible = "fsl,sec2.0",
3579 },
0635b7db 3580#endif
9c4a7965
KP
3581 {},
3582};
3583MODULE_DEVICE_TABLE(of, talitos_match);
3584
1c48a5c9 3585static struct platform_driver talitos_driver = {
4018294b
GL
3586 .driver = {
3587 .name = "talitos",
4018294b
GL
3588 .of_match_table = talitos_match,
3589 },
9c4a7965 3590 .probe = talitos_probe,
596f1034 3591 .remove = talitos_remove,
9c4a7965
KP
3592};
3593
741e8c2d 3594module_platform_driver(talitos_driver);
9c4a7965
KP
3595
3596MODULE_LICENSE("GPL");
3597MODULE_AUTHOR("Kim Phillips <kim.phillips@freescale.com>");
3598MODULE_DESCRIPTION("Freescale integrated security engine (SEC) driver");