crypto: talitos - fix AEAD processing.
[linux-2.6-block.git] / drivers / crypto / talitos.c
CommitLineData
9c4a7965
KP
1/*
2 * talitos - Freescale Integrated Security Engine (SEC) device driver
3 *
5228f0f7 4 * Copyright (c) 2008-2011 Freescale Semiconductor, Inc.
9c4a7965
KP
5 *
6 * Scatterlist Crypto API glue code copied from files with the following:
7 * Copyright (c) 2006-2007 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * Crypto algorithm registration code copied from hifn driver:
10 * 2007+ Copyright (c) Evgeniy Polyakov <johnpol@2ka.mipt.ru>
11 * All rights reserved.
12 *
13 * This program is free software; you can redistribute it and/or modify
14 * it under the terms of the GNU General Public License as published by
15 * the Free Software Foundation; either version 2 of the License, or
16 * (at your option) any later version.
17 *
18 * This program is distributed in the hope that it will be useful,
19 * but WITHOUT ANY WARRANTY; without even the implied warranty of
20 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 * GNU General Public License for more details.
22 *
23 * You should have received a copy of the GNU General Public License
24 * along with this program; if not, write to the Free Software
25 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
26 */
27
28#include <linux/kernel.h>
29#include <linux/module.h>
30#include <linux/mod_devicetable.h>
31#include <linux/device.h>
32#include <linux/interrupt.h>
33#include <linux/crypto.h>
34#include <linux/hw_random.h>
5af50730
RH
35#include <linux/of_address.h>
36#include <linux/of_irq.h>
9c4a7965
KP
37#include <linux/of_platform.h>
38#include <linux/dma-mapping.h>
39#include <linux/io.h>
40#include <linux/spinlock.h>
41#include <linux/rtnetlink.h>
5a0e3ad6 42#include <linux/slab.h>
9c4a7965
KP
43
44#include <crypto/algapi.h>
45#include <crypto/aes.h>
3952f17e 46#include <crypto/des.h>
9c4a7965 47#include <crypto/sha.h>
497f2e6b 48#include <crypto/md5.h>
e98014ab 49#include <crypto/internal/aead.h>
9c4a7965 50#include <crypto/authenc.h>
4de9d0b5 51#include <crypto/skcipher.h>
acbf7c62
LN
52#include <crypto/hash.h>
53#include <crypto/internal/hash.h>
4de9d0b5 54#include <crypto/scatterwalk.h>
9c4a7965
KP
55
56#include "talitos.h"
57
922f9dc8 58static void to_talitos_ptr(struct talitos_ptr *ptr, dma_addr_t dma_addr,
da9de146 59 unsigned int len, bool is_sec1)
81eb024c 60{
edc6bd69 61 ptr->ptr = cpu_to_be32(lower_32_bits(dma_addr));
da9de146
LC
62 if (is_sec1) {
63 ptr->len1 = cpu_to_be16(len);
64 } else {
65 ptr->len = cpu_to_be16(len);
922f9dc8 66 ptr->eptr = upper_32_bits(dma_addr);
da9de146 67 }
81eb024c
KP
68}
69
340ff60a
HG
70static void copy_talitos_ptr(struct talitos_ptr *dst_ptr,
71 struct talitos_ptr *src_ptr, bool is_sec1)
72{
73 dst_ptr->ptr = src_ptr->ptr;
922f9dc8 74 if (is_sec1) {
da9de146 75 dst_ptr->len1 = src_ptr->len1;
922f9dc8 76 } else {
da9de146
LC
77 dst_ptr->len = src_ptr->len;
78 dst_ptr->eptr = src_ptr->eptr;
922f9dc8 79 }
538caf83
LC
80}
81
922f9dc8
LC
82static unsigned short from_talitos_ptr_len(struct talitos_ptr *ptr,
83 bool is_sec1)
538caf83 84{
922f9dc8
LC
85 if (is_sec1)
86 return be16_to_cpu(ptr->len1);
87 else
88 return be16_to_cpu(ptr->len);
538caf83
LC
89}
90
b096b544
LC
91static void to_talitos_ptr_ext_set(struct talitos_ptr *ptr, u8 val,
92 bool is_sec1)
185eb79f 93{
922f9dc8 94 if (!is_sec1)
b096b544
LC
95 ptr->j_extent = val;
96}
97
98static void to_talitos_ptr_ext_or(struct talitos_ptr *ptr, u8 val, bool is_sec1)
99{
100 if (!is_sec1)
101 ptr->j_extent |= val;
185eb79f
LC
102}
103
9c4a7965
KP
104/*
105 * map virtual single (contiguous) pointer to h/w descriptor pointer
106 */
6a4967c3
LC
107static void __map_single_talitos_ptr(struct device *dev,
108 struct talitos_ptr *ptr,
109 unsigned int len, void *data,
110 enum dma_data_direction dir,
111 unsigned long attrs)
112{
113 dma_addr_t dma_addr = dma_map_single_attrs(dev, data, len, dir, attrs);
114 struct talitos_private *priv = dev_get_drvdata(dev);
115 bool is_sec1 = has_ftr_sec1(priv);
116
117 to_talitos_ptr(ptr, dma_addr, len, is_sec1);
118}
119
9c4a7965 120static void map_single_talitos_ptr(struct device *dev,
edc6bd69 121 struct talitos_ptr *ptr,
42e8b0d7 122 unsigned int len, void *data,
9c4a7965
KP
123 enum dma_data_direction dir)
124{
6a4967c3
LC
125 __map_single_talitos_ptr(dev, ptr, len, data, dir, 0);
126}
81eb024c 127
6a4967c3
LC
128static void map_single_talitos_ptr_nosync(struct device *dev,
129 struct talitos_ptr *ptr,
130 unsigned int len, void *data,
131 enum dma_data_direction dir)
132{
133 __map_single_talitos_ptr(dev, ptr, len, data, dir,
134 DMA_ATTR_SKIP_CPU_SYNC);
9c4a7965
KP
135}
136
137/*
138 * unmap bus single (contiguous) h/w descriptor pointer
139 */
140static void unmap_single_talitos_ptr(struct device *dev,
edc6bd69 141 struct talitos_ptr *ptr,
9c4a7965
KP
142 enum dma_data_direction dir)
143{
922f9dc8
LC
144 struct talitos_private *priv = dev_get_drvdata(dev);
145 bool is_sec1 = has_ftr_sec1(priv);
146
edc6bd69 147 dma_unmap_single(dev, be32_to_cpu(ptr->ptr),
922f9dc8 148 from_talitos_ptr_len(ptr, is_sec1), dir);
9c4a7965
KP
149}
150
151static int reset_channel(struct device *dev, int ch)
152{
153 struct talitos_private *priv = dev_get_drvdata(dev);
154 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987 155 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 156
dd3c0987
LC
157 if (is_sec1) {
158 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
159 TALITOS1_CCCR_LO_RESET);
9c4a7965 160
dd3c0987
LC
161 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR_LO) &
162 TALITOS1_CCCR_LO_RESET) && --timeout)
163 cpu_relax();
164 } else {
165 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
166 TALITOS2_CCCR_RESET);
167
168 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
169 TALITOS2_CCCR_RESET) && --timeout)
170 cpu_relax();
171 }
9c4a7965
KP
172
173 if (timeout == 0) {
174 dev_err(dev, "failed to reset channel %d\n", ch);
175 return -EIO;
176 }
177
81eb024c 178 /* set 36-bit addressing, done writeback enable and done IRQ enable */
ad42d5fc 179 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, TALITOS_CCCR_LO_EAE |
81eb024c 180 TALITOS_CCCR_LO_CDWE | TALITOS_CCCR_LO_CDIE);
37b5e889
LC
181 /* enable chaining descriptors */
182 if (is_sec1)
183 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
184 TALITOS_CCCR_LO_NE);
9c4a7965 185
fe5720e2
KP
186 /* and ICCR writeback, if available */
187 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
ad42d5fc 188 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
fe5720e2
KP
189 TALITOS_CCCR_LO_IWSE);
190
9c4a7965
KP
191 return 0;
192}
193
194static int reset_device(struct device *dev)
195{
196 struct talitos_private *priv = dev_get_drvdata(dev);
197 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987
LC
198 bool is_sec1 = has_ftr_sec1(priv);
199 u32 mcr = is_sec1 ? TALITOS1_MCR_SWR : TALITOS2_MCR_SWR;
9c4a7965 200
c3e337f8 201 setbits32(priv->reg + TALITOS_MCR, mcr);
9c4a7965 202
dd3c0987 203 while ((in_be32(priv->reg + TALITOS_MCR) & mcr)
9c4a7965
KP
204 && --timeout)
205 cpu_relax();
206
2cdba3cf 207 if (priv->irq[1]) {
c3e337f8
KP
208 mcr = TALITOS_MCR_RCA1 | TALITOS_MCR_RCA3;
209 setbits32(priv->reg + TALITOS_MCR, mcr);
210 }
211
9c4a7965
KP
212 if (timeout == 0) {
213 dev_err(dev, "failed to reset device\n");
214 return -EIO;
215 }
216
217 return 0;
218}
219
220/*
221 * Reset and initialize the device
222 */
223static int init_device(struct device *dev)
224{
225 struct talitos_private *priv = dev_get_drvdata(dev);
226 int ch, err;
dd3c0987 227 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965
KP
228
229 /*
230 * Master reset
231 * errata documentation: warning: certain SEC interrupts
232 * are not fully cleared by writing the MCR:SWR bit,
233 * set bit twice to completely reset
234 */
235 err = reset_device(dev);
236 if (err)
237 return err;
238
239 err = reset_device(dev);
240 if (err)
241 return err;
242
243 /* reset channels */
244 for (ch = 0; ch < priv->num_channels; ch++) {
245 err = reset_channel(dev, ch);
246 if (err)
247 return err;
248 }
249
250 /* enable channel done and error interrupts */
dd3c0987
LC
251 if (is_sec1) {
252 clrbits32(priv->reg + TALITOS_IMR, TALITOS1_IMR_INIT);
253 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT);
254 /* disable parity error check in DEU (erroneous? test vect.) */
255 setbits32(priv->reg_deu + TALITOS_EUICR, TALITOS1_DEUICR_KPE);
256 } else {
257 setbits32(priv->reg + TALITOS_IMR, TALITOS2_IMR_INIT);
258 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT);
259 }
9c4a7965 260
fe5720e2
KP
261 /* disable integrity check error interrupts (use writeback instead) */
262 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
5fa7fa14 263 setbits32(priv->reg_mdeu + TALITOS_EUICR_LO,
fe5720e2
KP
264 TALITOS_MDEUICR_LO_ICE);
265
9c4a7965
KP
266 return 0;
267}
268
269/**
270 * talitos_submit - submits a descriptor to the device for processing
271 * @dev: the SEC device to be used
5228f0f7 272 * @ch: the SEC device channel to be used
9c4a7965
KP
273 * @desc: the descriptor to be processed by the device
274 * @callback: whom to call when processing is complete
275 * @context: a handle for use by caller (optional)
276 *
277 * desc must contain valid dma-mapped (bus physical) address pointers.
278 * callback must check err and feedback in descriptor header
279 * for device processing status.
280 */
865d5061
HG
281int talitos_submit(struct device *dev, int ch, struct talitos_desc *desc,
282 void (*callback)(struct device *dev,
283 struct talitos_desc *desc,
284 void *context, int error),
285 void *context)
9c4a7965
KP
286{
287 struct talitos_private *priv = dev_get_drvdata(dev);
288 struct talitos_request *request;
5228f0f7 289 unsigned long flags;
9c4a7965 290 int head;
7d607c6a 291 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 292
4b992628 293 spin_lock_irqsave(&priv->chan[ch].head_lock, flags);
9c4a7965 294
4b992628 295 if (!atomic_inc_not_zero(&priv->chan[ch].submit_count)) {
ec6644d6 296 /* h/w fifo is full */
4b992628 297 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
9c4a7965
KP
298 return -EAGAIN;
299 }
300
4b992628
KP
301 head = priv->chan[ch].head;
302 request = &priv->chan[ch].fifo[head];
ec6644d6 303
9c4a7965 304 /* map descriptor and save caller data */
7d607c6a
LC
305 if (is_sec1) {
306 desc->hdr1 = desc->hdr;
7d607c6a
LC
307 request->dma_desc = dma_map_single(dev, &desc->hdr1,
308 TALITOS_DESC_SIZE,
309 DMA_BIDIRECTIONAL);
310 } else {
311 request->dma_desc = dma_map_single(dev, desc,
312 TALITOS_DESC_SIZE,
313 DMA_BIDIRECTIONAL);
314 }
9c4a7965
KP
315 request->callback = callback;
316 request->context = context;
317
318 /* increment fifo head */
4b992628 319 priv->chan[ch].head = (priv->chan[ch].head + 1) & (priv->fifo_len - 1);
9c4a7965
KP
320
321 smp_wmb();
322 request->desc = desc;
323
324 /* GO! */
325 wmb();
ad42d5fc
KP
326 out_be32(priv->chan[ch].reg + TALITOS_FF,
327 upper_32_bits(request->dma_desc));
328 out_be32(priv->chan[ch].reg + TALITOS_FF_LO,
a752447a 329 lower_32_bits(request->dma_desc));
9c4a7965 330
4b992628 331 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
9c4a7965
KP
332
333 return -EINPROGRESS;
334}
865d5061 335EXPORT_SYMBOL(talitos_submit);
9c4a7965
KP
336
337/*
338 * process what was done, notify callback of error if not
339 */
340static void flush_channel(struct device *dev, int ch, int error, int reset_ch)
341{
342 struct talitos_private *priv = dev_get_drvdata(dev);
343 struct talitos_request *request, saved_req;
344 unsigned long flags;
345 int tail, status;
7d607c6a 346 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 347
4b992628 348 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
9c4a7965 349
4b992628
KP
350 tail = priv->chan[ch].tail;
351 while (priv->chan[ch].fifo[tail].desc) {
7d607c6a
LC
352 __be32 hdr;
353
4b992628 354 request = &priv->chan[ch].fifo[tail];
9c4a7965
KP
355
356 /* descriptors with their done bits set don't get the error */
357 rmb();
37b5e889
LC
358 if (!is_sec1)
359 hdr = request->desc->hdr;
360 else if (request->desc->next_desc)
361 hdr = (request->desc + 1)->hdr1;
362 else
363 hdr = request->desc->hdr1;
7d607c6a
LC
364
365 if ((hdr & DESC_HDR_DONE) == DESC_HDR_DONE)
9c4a7965 366 status = 0;
ca38a814 367 else
9c4a7965
KP
368 if (!error)
369 break;
370 else
371 status = error;
372
373 dma_unmap_single(dev, request->dma_desc,
7d607c6a 374 TALITOS_DESC_SIZE,
e938e465 375 DMA_BIDIRECTIONAL);
9c4a7965
KP
376
377 /* copy entries so we can call callback outside lock */
378 saved_req.desc = request->desc;
379 saved_req.callback = request->callback;
380 saved_req.context = request->context;
381
382 /* release request entry in fifo */
383 smp_wmb();
384 request->desc = NULL;
385
386 /* increment fifo tail */
4b992628 387 priv->chan[ch].tail = (tail + 1) & (priv->fifo_len - 1);
9c4a7965 388
4b992628 389 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
ec6644d6 390
4b992628 391 atomic_dec(&priv->chan[ch].submit_count);
ec6644d6 392
9c4a7965
KP
393 saved_req.callback(dev, saved_req.desc, saved_req.context,
394 status);
395 /* channel may resume processing in single desc error case */
396 if (error && !reset_ch && status == error)
397 return;
4b992628
KP
398 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
399 tail = priv->chan[ch].tail;
9c4a7965
KP
400 }
401
4b992628 402 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
9c4a7965
KP
403}
404
405/*
406 * process completed requests for channels that have done status
407 */
dd3c0987
LC
408#define DEF_TALITOS1_DONE(name, ch_done_mask) \
409static void talitos1_done_##name(unsigned long data) \
410{ \
411 struct device *dev = (struct device *)data; \
412 struct talitos_private *priv = dev_get_drvdata(dev); \
413 unsigned long flags; \
414 \
415 if (ch_done_mask & 0x10000000) \
416 flush_channel(dev, 0, 0, 0); \
dd3c0987
LC
417 if (ch_done_mask & 0x40000000) \
418 flush_channel(dev, 1, 0, 0); \
419 if (ch_done_mask & 0x00010000) \
420 flush_channel(dev, 2, 0, 0); \
421 if (ch_done_mask & 0x00040000) \
422 flush_channel(dev, 3, 0, 0); \
423 \
dd3c0987
LC
424 /* At this point, all completed channels have been processed */ \
425 /* Unmask done interrupts for channels completed later on. */ \
426 spin_lock_irqsave(&priv->reg_lock, flags); \
427 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
428 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT); \
429 spin_unlock_irqrestore(&priv->reg_lock, flags); \
430}
431
432DEF_TALITOS1_DONE(4ch, TALITOS1_ISR_4CHDONE)
9c02e285 433DEF_TALITOS1_DONE(ch0, TALITOS1_ISR_CH_0_DONE)
dd3c0987
LC
434
435#define DEF_TALITOS2_DONE(name, ch_done_mask) \
436static void talitos2_done_##name(unsigned long data) \
c3e337f8
KP
437{ \
438 struct device *dev = (struct device *)data; \
439 struct talitos_private *priv = dev_get_drvdata(dev); \
511d63cb 440 unsigned long flags; \
c3e337f8
KP
441 \
442 if (ch_done_mask & 1) \
443 flush_channel(dev, 0, 0, 0); \
c3e337f8
KP
444 if (ch_done_mask & (1 << 2)) \
445 flush_channel(dev, 1, 0, 0); \
446 if (ch_done_mask & (1 << 4)) \
447 flush_channel(dev, 2, 0, 0); \
448 if (ch_done_mask & (1 << 6)) \
449 flush_channel(dev, 3, 0, 0); \
450 \
c3e337f8
KP
451 /* At this point, all completed channels have been processed */ \
452 /* Unmask done interrupts for channels completed later on. */ \
511d63cb 453 spin_lock_irqsave(&priv->reg_lock, flags); \
c3e337f8 454 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
dd3c0987 455 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT); \
511d63cb 456 spin_unlock_irqrestore(&priv->reg_lock, flags); \
9c4a7965 457}
dd3c0987
LC
458
459DEF_TALITOS2_DONE(4ch, TALITOS2_ISR_4CHDONE)
9c02e285 460DEF_TALITOS2_DONE(ch0, TALITOS2_ISR_CH_0_DONE)
dd3c0987
LC
461DEF_TALITOS2_DONE(ch0_2, TALITOS2_ISR_CH_0_2_DONE)
462DEF_TALITOS2_DONE(ch1_3, TALITOS2_ISR_CH_1_3_DONE)
9c4a7965
KP
463
464/*
465 * locate current (offending) descriptor
466 */
3e721aeb 467static u32 current_desc_hdr(struct device *dev, int ch)
9c4a7965
KP
468{
469 struct talitos_private *priv = dev_get_drvdata(dev);
b62ffd8c 470 int tail, iter;
9c4a7965
KP
471 dma_addr_t cur_desc;
472
b62ffd8c
HG
473 cur_desc = ((u64)in_be32(priv->chan[ch].reg + TALITOS_CDPR)) << 32;
474 cur_desc |= in_be32(priv->chan[ch].reg + TALITOS_CDPR_LO);
9c4a7965 475
b62ffd8c
HG
476 if (!cur_desc) {
477 dev_err(dev, "CDPR is NULL, giving up search for offending descriptor\n");
478 return 0;
479 }
480
481 tail = priv->chan[ch].tail;
482
483 iter = tail;
37b5e889
LC
484 while (priv->chan[ch].fifo[iter].dma_desc != cur_desc &&
485 priv->chan[ch].fifo[iter].desc->next_desc != cur_desc) {
b62ffd8c
HG
486 iter = (iter + 1) & (priv->fifo_len - 1);
487 if (iter == tail) {
9c4a7965 488 dev_err(dev, "couldn't locate current descriptor\n");
3e721aeb 489 return 0;
9c4a7965
KP
490 }
491 }
492
37b5e889
LC
493 if (priv->chan[ch].fifo[iter].desc->next_desc == cur_desc)
494 return (priv->chan[ch].fifo[iter].desc + 1)->hdr;
495
b62ffd8c 496 return priv->chan[ch].fifo[iter].desc->hdr;
9c4a7965
KP
497}
498
499/*
500 * user diagnostics; report root cause of error based on execution unit status
501 */
3e721aeb 502static void report_eu_error(struct device *dev, int ch, u32 desc_hdr)
9c4a7965
KP
503{
504 struct talitos_private *priv = dev_get_drvdata(dev);
505 int i;
506
3e721aeb 507 if (!desc_hdr)
ad42d5fc 508 desc_hdr = in_be32(priv->chan[ch].reg + TALITOS_DESCBUF);
3e721aeb
KP
509
510 switch (desc_hdr & DESC_HDR_SEL0_MASK) {
9c4a7965
KP
511 case DESC_HDR_SEL0_AFEU:
512 dev_err(dev, "AFEUISR 0x%08x_%08x\n",
5fa7fa14
LC
513 in_be32(priv->reg_afeu + TALITOS_EUISR),
514 in_be32(priv->reg_afeu + TALITOS_EUISR_LO));
9c4a7965
KP
515 break;
516 case DESC_HDR_SEL0_DEU:
517 dev_err(dev, "DEUISR 0x%08x_%08x\n",
5fa7fa14
LC
518 in_be32(priv->reg_deu + TALITOS_EUISR),
519 in_be32(priv->reg_deu + TALITOS_EUISR_LO));
9c4a7965
KP
520 break;
521 case DESC_HDR_SEL0_MDEUA:
522 case DESC_HDR_SEL0_MDEUB:
523 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
5fa7fa14
LC
524 in_be32(priv->reg_mdeu + TALITOS_EUISR),
525 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
9c4a7965
KP
526 break;
527 case DESC_HDR_SEL0_RNG:
528 dev_err(dev, "RNGUISR 0x%08x_%08x\n",
5fa7fa14
LC
529 in_be32(priv->reg_rngu + TALITOS_ISR),
530 in_be32(priv->reg_rngu + TALITOS_ISR_LO));
9c4a7965
KP
531 break;
532 case DESC_HDR_SEL0_PKEU:
533 dev_err(dev, "PKEUISR 0x%08x_%08x\n",
5fa7fa14
LC
534 in_be32(priv->reg_pkeu + TALITOS_EUISR),
535 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
9c4a7965
KP
536 break;
537 case DESC_HDR_SEL0_AESU:
538 dev_err(dev, "AESUISR 0x%08x_%08x\n",
5fa7fa14
LC
539 in_be32(priv->reg_aesu + TALITOS_EUISR),
540 in_be32(priv->reg_aesu + TALITOS_EUISR_LO));
9c4a7965
KP
541 break;
542 case DESC_HDR_SEL0_CRCU:
543 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
5fa7fa14
LC
544 in_be32(priv->reg_crcu + TALITOS_EUISR),
545 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
9c4a7965
KP
546 break;
547 case DESC_HDR_SEL0_KEU:
548 dev_err(dev, "KEUISR 0x%08x_%08x\n",
5fa7fa14
LC
549 in_be32(priv->reg_pkeu + TALITOS_EUISR),
550 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
9c4a7965
KP
551 break;
552 }
553
3e721aeb 554 switch (desc_hdr & DESC_HDR_SEL1_MASK) {
9c4a7965
KP
555 case DESC_HDR_SEL1_MDEUA:
556 case DESC_HDR_SEL1_MDEUB:
557 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
5fa7fa14
LC
558 in_be32(priv->reg_mdeu + TALITOS_EUISR),
559 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
9c4a7965
KP
560 break;
561 case DESC_HDR_SEL1_CRCU:
562 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
5fa7fa14
LC
563 in_be32(priv->reg_crcu + TALITOS_EUISR),
564 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
9c4a7965
KP
565 break;
566 }
567
568 for (i = 0; i < 8; i++)
569 dev_err(dev, "DESCBUF 0x%08x_%08x\n",
ad42d5fc
KP
570 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF + 8*i),
571 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF_LO + 8*i));
9c4a7965
KP
572}
573
574/*
575 * recover from error interrupts
576 */
5e718a09 577static void talitos_error(struct device *dev, u32 isr, u32 isr_lo)
9c4a7965 578{
9c4a7965
KP
579 struct talitos_private *priv = dev_get_drvdata(dev);
580 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987 581 int ch, error, reset_dev = 0;
42e8b0d7 582 u32 v_lo;
dd3c0987
LC
583 bool is_sec1 = has_ftr_sec1(priv);
584 int reset_ch = is_sec1 ? 1 : 0; /* only SEC2 supports continuation */
9c4a7965
KP
585
586 for (ch = 0; ch < priv->num_channels; ch++) {
587 /* skip channels without errors */
dd3c0987
LC
588 if (is_sec1) {
589 /* bits 29, 31, 17, 19 */
590 if (!(isr & (1 << (29 + (ch & 1) * 2 - (ch & 2) * 6))))
591 continue;
592 } else {
593 if (!(isr & (1 << (ch * 2 + 1))))
594 continue;
595 }
9c4a7965
KP
596
597 error = -EINVAL;
598
ad42d5fc 599 v_lo = in_be32(priv->chan[ch].reg + TALITOS_CCPSR_LO);
9c4a7965
KP
600
601 if (v_lo & TALITOS_CCPSR_LO_DOF) {
602 dev_err(dev, "double fetch fifo overflow error\n");
603 error = -EAGAIN;
604 reset_ch = 1;
605 }
606 if (v_lo & TALITOS_CCPSR_LO_SOF) {
607 /* h/w dropped descriptor */
608 dev_err(dev, "single fetch fifo overflow error\n");
609 error = -EAGAIN;
610 }
611 if (v_lo & TALITOS_CCPSR_LO_MDTE)
612 dev_err(dev, "master data transfer error\n");
613 if (v_lo & TALITOS_CCPSR_LO_SGDLZ)
4d9b3a5b 614 dev_err(dev, is_sec1 ? "pointer not complete error\n"
dd3c0987 615 : "s/g data length zero error\n");
9c4a7965 616 if (v_lo & TALITOS_CCPSR_LO_FPZ)
dd3c0987
LC
617 dev_err(dev, is_sec1 ? "parity error\n"
618 : "fetch pointer zero error\n");
9c4a7965
KP
619 if (v_lo & TALITOS_CCPSR_LO_IDH)
620 dev_err(dev, "illegal descriptor header error\n");
621 if (v_lo & TALITOS_CCPSR_LO_IEU)
dd3c0987
LC
622 dev_err(dev, is_sec1 ? "static assignment error\n"
623 : "invalid exec unit error\n");
9c4a7965 624 if (v_lo & TALITOS_CCPSR_LO_EU)
3e721aeb 625 report_eu_error(dev, ch, current_desc_hdr(dev, ch));
dd3c0987
LC
626 if (!is_sec1) {
627 if (v_lo & TALITOS_CCPSR_LO_GB)
628 dev_err(dev, "gather boundary error\n");
629 if (v_lo & TALITOS_CCPSR_LO_GRL)
630 dev_err(dev, "gather return/length error\n");
631 if (v_lo & TALITOS_CCPSR_LO_SB)
632 dev_err(dev, "scatter boundary error\n");
633 if (v_lo & TALITOS_CCPSR_LO_SRL)
634 dev_err(dev, "scatter return/length error\n");
635 }
9c4a7965
KP
636
637 flush_channel(dev, ch, error, reset_ch);
638
639 if (reset_ch) {
640 reset_channel(dev, ch);
641 } else {
ad42d5fc 642 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
dd3c0987 643 TALITOS2_CCCR_CONT);
ad42d5fc
KP
644 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, 0);
645 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
dd3c0987 646 TALITOS2_CCCR_CONT) && --timeout)
9c4a7965
KP
647 cpu_relax();
648 if (timeout == 0) {
649 dev_err(dev, "failed to restart channel %d\n",
650 ch);
651 reset_dev = 1;
652 }
653 }
654 }
dd3c0987
LC
655 if (reset_dev || (is_sec1 && isr & ~TALITOS1_ISR_4CHERR) ||
656 (!is_sec1 && isr & ~TALITOS2_ISR_4CHERR) || isr_lo) {
657 if (is_sec1 && (isr_lo & TALITOS1_ISR_TEA_ERR))
658 dev_err(dev, "TEA error: ISR 0x%08x_%08x\n",
659 isr, isr_lo);
660 else
661 dev_err(dev, "done overflow, internal time out, or "
662 "rngu error: ISR 0x%08x_%08x\n", isr, isr_lo);
9c4a7965
KP
663
664 /* purge request queues */
665 for (ch = 0; ch < priv->num_channels; ch++)
666 flush_channel(dev, ch, -EIO, 1);
667
668 /* reset and reinitialize the device */
669 init_device(dev);
670 }
671}
672
dd3c0987
LC
673#define DEF_TALITOS1_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
674static irqreturn_t talitos1_interrupt_##name(int irq, void *data) \
675{ \
676 struct device *dev = data; \
677 struct talitos_private *priv = dev_get_drvdata(dev); \
678 u32 isr, isr_lo; \
679 unsigned long flags; \
680 \
681 spin_lock_irqsave(&priv->reg_lock, flags); \
682 isr = in_be32(priv->reg + TALITOS_ISR); \
683 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
684 /* Acknowledge interrupt */ \
685 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
686 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
687 \
688 if (unlikely(isr & ch_err_mask || isr_lo & TALITOS1_IMR_LO_INIT)) { \
689 spin_unlock_irqrestore(&priv->reg_lock, flags); \
690 talitos_error(dev, isr & ch_err_mask, isr_lo); \
691 } \
692 else { \
693 if (likely(isr & ch_done_mask)) { \
694 /* mask further done interrupts. */ \
695 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
696 /* done_task will unmask done interrupts at exit */ \
697 tasklet_schedule(&priv->done_task[tlet]); \
698 } \
699 spin_unlock_irqrestore(&priv->reg_lock, flags); \
700 } \
701 \
702 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
703 IRQ_NONE; \
704}
705
706DEF_TALITOS1_INTERRUPT(4ch, TALITOS1_ISR_4CHDONE, TALITOS1_ISR_4CHERR, 0)
707
708#define DEF_TALITOS2_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
709static irqreturn_t talitos2_interrupt_##name(int irq, void *data) \
c3e337f8
KP
710{ \
711 struct device *dev = data; \
712 struct talitos_private *priv = dev_get_drvdata(dev); \
713 u32 isr, isr_lo; \
511d63cb 714 unsigned long flags; \
c3e337f8 715 \
511d63cb 716 spin_lock_irqsave(&priv->reg_lock, flags); \
c3e337f8
KP
717 isr = in_be32(priv->reg + TALITOS_ISR); \
718 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
719 /* Acknowledge interrupt */ \
720 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
721 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
722 \
511d63cb
HG
723 if (unlikely(isr & ch_err_mask || isr_lo)) { \
724 spin_unlock_irqrestore(&priv->reg_lock, flags); \
725 talitos_error(dev, isr & ch_err_mask, isr_lo); \
726 } \
727 else { \
c3e337f8
KP
728 if (likely(isr & ch_done_mask)) { \
729 /* mask further done interrupts. */ \
730 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
731 /* done_task will unmask done interrupts at exit */ \
732 tasklet_schedule(&priv->done_task[tlet]); \
733 } \
511d63cb
HG
734 spin_unlock_irqrestore(&priv->reg_lock, flags); \
735 } \
c3e337f8
KP
736 \
737 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
738 IRQ_NONE; \
9c4a7965 739}
dd3c0987
LC
740
741DEF_TALITOS2_INTERRUPT(4ch, TALITOS2_ISR_4CHDONE, TALITOS2_ISR_4CHERR, 0)
742DEF_TALITOS2_INTERRUPT(ch0_2, TALITOS2_ISR_CH_0_2_DONE, TALITOS2_ISR_CH_0_2_ERR,
743 0)
744DEF_TALITOS2_INTERRUPT(ch1_3, TALITOS2_ISR_CH_1_3_DONE, TALITOS2_ISR_CH_1_3_ERR,
745 1)
9c4a7965
KP
746
747/*
748 * hwrng
749 */
750static int talitos_rng_data_present(struct hwrng *rng, int wait)
751{
752 struct device *dev = (struct device *)rng->priv;
753 struct talitos_private *priv = dev_get_drvdata(dev);
754 u32 ofl;
755 int i;
756
757 for (i = 0; i < 20; i++) {
5fa7fa14 758 ofl = in_be32(priv->reg_rngu + TALITOS_EUSR_LO) &
9c4a7965
KP
759 TALITOS_RNGUSR_LO_OFL;
760 if (ofl || !wait)
761 break;
762 udelay(10);
763 }
764
765 return !!ofl;
766}
767
768static int talitos_rng_data_read(struct hwrng *rng, u32 *data)
769{
770 struct device *dev = (struct device *)rng->priv;
771 struct talitos_private *priv = dev_get_drvdata(dev);
772
773 /* rng fifo requires 64-bit accesses */
5fa7fa14
LC
774 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO);
775 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO_LO);
9c4a7965
KP
776
777 return sizeof(u32);
778}
779
780static int talitos_rng_init(struct hwrng *rng)
781{
782 struct device *dev = (struct device *)rng->priv;
783 struct talitos_private *priv = dev_get_drvdata(dev);
784 unsigned int timeout = TALITOS_TIMEOUT;
785
5fa7fa14
LC
786 setbits32(priv->reg_rngu + TALITOS_EURCR_LO, TALITOS_RNGURCR_LO_SR);
787 while (!(in_be32(priv->reg_rngu + TALITOS_EUSR_LO)
788 & TALITOS_RNGUSR_LO_RD)
9c4a7965
KP
789 && --timeout)
790 cpu_relax();
791 if (timeout == 0) {
792 dev_err(dev, "failed to reset rng hw\n");
793 return -ENODEV;
794 }
795
796 /* start generating */
5fa7fa14 797 setbits32(priv->reg_rngu + TALITOS_EUDSR_LO, 0);
9c4a7965
KP
798
799 return 0;
800}
801
802static int talitos_register_rng(struct device *dev)
803{
804 struct talitos_private *priv = dev_get_drvdata(dev);
35a3bb3d 805 int err;
9c4a7965
KP
806
807 priv->rng.name = dev_driver_string(dev),
808 priv->rng.init = talitos_rng_init,
809 priv->rng.data_present = talitos_rng_data_present,
810 priv->rng.data_read = talitos_rng_data_read,
811 priv->rng.priv = (unsigned long)dev;
812
35a3bb3d
AS
813 err = hwrng_register(&priv->rng);
814 if (!err)
815 priv->rng_registered = true;
816
817 return err;
9c4a7965
KP
818}
819
820static void talitos_unregister_rng(struct device *dev)
821{
822 struct talitos_private *priv = dev_get_drvdata(dev);
823
35a3bb3d
AS
824 if (!priv->rng_registered)
825 return;
826
9c4a7965 827 hwrng_unregister(&priv->rng);
35a3bb3d 828 priv->rng_registered = false;
9c4a7965
KP
829}
830
831/*
832 * crypto alg
833 */
834#define TALITOS_CRA_PRIORITY 3000
7405c8d7
LC
835/*
836 * Defines a priority for doing AEAD with descriptors type
837 * HMAC_SNOOP_NO_AFEA (HSNA) instead of type IPSEC_ESP
838 */
839#define TALITOS_CRA_PRIORITY_AEAD_HSNA (TALITOS_CRA_PRIORITY - 1)
b8fbdc2b 840#ifdef CONFIG_CRYPTO_DEV_TALITOS_SEC2
03d2c511 841#define TALITOS_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + SHA512_BLOCK_SIZE)
b8fbdc2b
CL
842#else
843#define TALITOS_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + SHA256_BLOCK_SIZE)
844#endif
3952f17e 845#define TALITOS_MAX_IV_LENGTH 16 /* max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */
70bcaca7 846
9c4a7965
KP
847struct talitos_ctx {
848 struct device *dev;
5228f0f7 849 int ch;
9c4a7965
KP
850 __be32 desc_hdr_template;
851 u8 key[TALITOS_MAX_KEY_SIZE];
70bcaca7 852 u8 iv[TALITOS_MAX_IV_LENGTH];
2e13ce08 853 dma_addr_t dma_key;
9c4a7965
KP
854 unsigned int keylen;
855 unsigned int enckeylen;
856 unsigned int authkeylen;
9c4a7965
KP
857};
858
497f2e6b
LN
859#define HASH_MAX_BLOCK_SIZE SHA512_BLOCK_SIZE
860#define TALITOS_MDEU_MAX_CONTEXT_SIZE TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512
861
862struct talitos_ahash_req_ctx {
60f208d7 863 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
497f2e6b 864 unsigned int hw_context_size;
3c0dd190
LC
865 u8 buf[2][HASH_MAX_BLOCK_SIZE];
866 int buf_idx;
60f208d7 867 unsigned int swinit;
497f2e6b
LN
868 unsigned int first;
869 unsigned int last;
870 unsigned int to_hash_later;
42e8b0d7 871 unsigned int nbuf;
497f2e6b
LN
872 struct scatterlist bufsl[2];
873 struct scatterlist *psrc;
874};
875
3639ca84
HG
876struct talitos_export_state {
877 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
878 u8 buf[HASH_MAX_BLOCK_SIZE];
879 unsigned int swinit;
880 unsigned int first;
881 unsigned int last;
882 unsigned int to_hash_later;
883 unsigned int nbuf;
884};
885
56af8cd4
LN
886static int aead_setkey(struct crypto_aead *authenc,
887 const u8 *key, unsigned int keylen)
9c4a7965
KP
888{
889 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
2e13ce08 890 struct device *dev = ctx->dev;
c306a98d 891 struct crypto_authenc_keys keys;
9c4a7965 892
c306a98d 893 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
9c4a7965
KP
894 goto badkey;
895
c306a98d 896 if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
9c4a7965
KP
897 goto badkey;
898
2e13ce08
LC
899 if (ctx->keylen)
900 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
901
c306a98d
MK
902 memcpy(ctx->key, keys.authkey, keys.authkeylen);
903 memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
9c4a7965 904
c306a98d
MK
905 ctx->keylen = keys.authkeylen + keys.enckeylen;
906 ctx->enckeylen = keys.enckeylen;
907 ctx->authkeylen = keys.authkeylen;
2e13ce08
LC
908 ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
909 DMA_TO_DEVICE);
9c4a7965 910
8f0691fc 911 memzero_explicit(&keys, sizeof(keys));
9c4a7965
KP
912 return 0;
913
914badkey:
915 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
8f0691fc 916 memzero_explicit(&keys, sizeof(keys));
9c4a7965
KP
917 return -EINVAL;
918}
919
ef7c5c85
HX
920static int aead_des3_setkey(struct crypto_aead *authenc,
921 const u8 *key, unsigned int keylen)
922{
923 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
924 struct device *dev = ctx->dev;
925 struct crypto_authenc_keys keys;
926 u32 flags;
927 int err;
928
929 err = crypto_authenc_extractkeys(&keys, key, keylen);
930 if (unlikely(err))
931 goto badkey;
932
933 err = -EINVAL;
934 if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
935 goto badkey;
936
937 if (keys.enckeylen != DES3_EDE_KEY_SIZE)
938 goto badkey;
939
940 flags = crypto_aead_get_flags(authenc);
941 err = __des3_verify_key(&flags, keys.enckey);
942 if (unlikely(err)) {
943 crypto_aead_set_flags(authenc, flags);
944 goto out;
945 }
946
947 if (ctx->keylen)
948 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
949
950 memcpy(ctx->key, keys.authkey, keys.authkeylen);
951 memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
952
953 ctx->keylen = keys.authkeylen + keys.enckeylen;
954 ctx->enckeylen = keys.enckeylen;
955 ctx->authkeylen = keys.authkeylen;
956 ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
957 DMA_TO_DEVICE);
958
959out:
960 memzero_explicit(&keys, sizeof(keys));
961 return err;
962
963badkey:
964 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
965 goto out;
966}
967
9c4a7965 968/*
56af8cd4 969 * talitos_edesc - s/w-extended descriptor
9c4a7965
KP
970 * @src_nents: number of segments in input scatterlist
971 * @dst_nents: number of segments in output scatterlist
aeb4c132 972 * @icv_ool: whether ICV is out-of-line
79fd31d3 973 * @iv_dma: dma address of iv for checking continuity and link table
9c4a7965 974 * @dma_len: length of dma mapped link_tbl space
6f65f6ac 975 * @dma_link_tbl: bus physical address of link_tbl/buf
9c4a7965 976 * @desc: h/w descriptor
6f65f6ac
LC
977 * @link_tbl: input and output h/w link tables (if {src,dst}_nents > 1) (SEC2)
978 * @buf: input and output buffeur (if {src,dst}_nents > 1) (SEC1)
9c4a7965
KP
979 *
980 * if decrypting (with authcheck), or either one of src_nents or dst_nents
981 * is greater than 1, an integrity check value is concatenated to the end
982 * of link_tbl data
983 */
56af8cd4 984struct talitos_edesc {
9c4a7965
KP
985 int src_nents;
986 int dst_nents;
aeb4c132 987 bool icv_ool;
79fd31d3 988 dma_addr_t iv_dma;
9c4a7965
KP
989 int dma_len;
990 dma_addr_t dma_link_tbl;
991 struct talitos_desc desc;
6f65f6ac
LC
992 union {
993 struct talitos_ptr link_tbl[0];
994 u8 buf[0];
995 };
9c4a7965
KP
996};
997
4de9d0b5
LN
998static void talitos_sg_unmap(struct device *dev,
999 struct talitos_edesc *edesc,
1000 struct scatterlist *src,
6a1e8d14
LC
1001 struct scatterlist *dst,
1002 unsigned int len, unsigned int offset)
4de9d0b5 1003{
6a1e8d14
LC
1004 struct talitos_private *priv = dev_get_drvdata(dev);
1005 bool is_sec1 = has_ftr_sec1(priv);
4de9d0b5
LN
1006 unsigned int src_nents = edesc->src_nents ? : 1;
1007 unsigned int dst_nents = edesc->dst_nents ? : 1;
1008
6a1e8d14
LC
1009 if (is_sec1 && dst && dst_nents > 1) {
1010 dma_sync_single_for_device(dev, edesc->dma_link_tbl + offset,
1011 len, DMA_FROM_DEVICE);
1012 sg_pcopy_from_buffer(dst, dst_nents, edesc->buf + offset, len,
1013 offset);
1014 }
4de9d0b5 1015 if (src != dst) {
6a1e8d14
LC
1016 if (src_nents == 1 || !is_sec1)
1017 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
4de9d0b5 1018
6a1e8d14 1019 if (dst && (dst_nents == 1 || !is_sec1))
b8a011d4 1020 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
6a1e8d14 1021 } else if (src_nents == 1 || !is_sec1) {
b8a011d4 1022 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
246a87cd
LC
1023 }
1024}
1025
9c4a7965 1026static void ipsec_esp_unmap(struct device *dev,
56af8cd4 1027 struct talitos_edesc *edesc,
7ede4c36 1028 struct aead_request *areq, bool encrypt)
9c4a7965 1029{
549bd8bc
LC
1030 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
1031 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
1032 unsigned int ivsize = crypto_aead_ivsize(aead);
7ede4c36
CL
1033 unsigned int authsize = crypto_aead_authsize(aead);
1034 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
9a655608
LC
1035 bool is_ipsec_esp = edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP;
1036 struct talitos_ptr *civ_ptr = &edesc->desc.ptr[is_ipsec_esp ? 2 : 3];
549bd8bc 1037
9a655608 1038 if (is_ipsec_esp)
549bd8bc
LC
1039 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[6],
1040 DMA_FROM_DEVICE);
9a655608 1041 unmap_single_talitos_ptr(dev, civ_ptr, DMA_TO_DEVICE);
9c4a7965 1042
e345177d
CL
1043 talitos_sg_unmap(dev, edesc, areq->src, areq->dst,
1044 cryptlen + authsize, areq->assoclen);
9c4a7965
KP
1045
1046 if (edesc->dma_len)
1047 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1048 DMA_BIDIRECTIONAL);
549bd8bc 1049
9a655608 1050 if (!is_ipsec_esp) {
549bd8bc
LC
1051 unsigned int dst_nents = edesc->dst_nents ? : 1;
1052
1053 sg_pcopy_to_buffer(areq->dst, dst_nents, ctx->iv, ivsize,
7ede4c36 1054 areq->assoclen + cryptlen - ivsize);
549bd8bc 1055 }
9c4a7965
KP
1056}
1057
1058/*
1059 * ipsec_esp descriptor callbacks
1060 */
1061static void ipsec_esp_encrypt_done(struct device *dev,
1062 struct talitos_desc *desc, void *context,
1063 int err)
1064{
1065 struct aead_request *areq = context;
9c4a7965 1066 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
2e13ce08 1067 unsigned int ivsize = crypto_aead_ivsize(authenc);
19bbbc63 1068 struct talitos_edesc *edesc;
9c4a7965 1069
19bbbc63
KP
1070 edesc = container_of(desc, struct talitos_edesc, desc);
1071
7ede4c36 1072 ipsec_esp_unmap(dev, edesc, areq, true);
9c4a7965 1073
2e13ce08
LC
1074 dma_unmap_single(dev, edesc->iv_dma, ivsize, DMA_TO_DEVICE);
1075
9c4a7965
KP
1076 kfree(edesc);
1077
1078 aead_request_complete(areq, err);
1079}
1080
fe5720e2 1081static void ipsec_esp_decrypt_swauth_done(struct device *dev,
e938e465
KP
1082 struct talitos_desc *desc,
1083 void *context, int err)
9c4a7965
KP
1084{
1085 struct aead_request *req = context;
9c4a7965 1086 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
aeb4c132 1087 unsigned int authsize = crypto_aead_authsize(authenc);
19bbbc63 1088 struct talitos_edesc *edesc;
aeb4c132 1089 char *oicv, *icv;
9c4a7965 1090
19bbbc63
KP
1091 edesc = container_of(desc, struct talitos_edesc, desc);
1092
7ede4c36 1093 ipsec_esp_unmap(dev, edesc, req, false);
9c4a7965
KP
1094
1095 if (!err) {
1096 /* auth check */
e345177d
CL
1097 oicv = edesc->buf + edesc->dma_len;
1098 icv = oicv - authsize;
aeb4c132 1099
79960943 1100 err = crypto_memneq(oicv, icv, authsize) ? -EBADMSG : 0;
9c4a7965
KP
1101 }
1102
1103 kfree(edesc);
1104
1105 aead_request_complete(req, err);
1106}
1107
fe5720e2 1108static void ipsec_esp_decrypt_hwauth_done(struct device *dev,
e938e465
KP
1109 struct talitos_desc *desc,
1110 void *context, int err)
fe5720e2
KP
1111{
1112 struct aead_request *req = context;
19bbbc63
KP
1113 struct talitos_edesc *edesc;
1114
1115 edesc = container_of(desc, struct talitos_edesc, desc);
fe5720e2 1116
7ede4c36 1117 ipsec_esp_unmap(dev, edesc, req, false);
fe5720e2
KP
1118
1119 /* check ICV auth status */
e938e465
KP
1120 if (!err && ((desc->hdr_lo & DESC_HDR_LO_ICCR1_MASK) !=
1121 DESC_HDR_LO_ICCR1_PASS))
1122 err = -EBADMSG;
fe5720e2
KP
1123
1124 kfree(edesc);
1125
1126 aead_request_complete(req, err);
1127}
1128
9c4a7965
KP
1129/*
1130 * convert scatterlist to SEC h/w link table format
1131 * stop at cryptlen bytes
1132 */
aeb4c132 1133static int sg_to_link_tbl_offset(struct scatterlist *sg, int sg_count,
e345177d 1134 unsigned int offset, int datalen, int elen,
aeb4c132 1135 struct talitos_ptr *link_tbl_ptr)
9c4a7965 1136{
e345177d 1137 int n_sg = elen ? sg_count + 1 : sg_count;
aeb4c132 1138 int count = 0;
e345177d 1139 int cryptlen = datalen + elen;
70bcaca7 1140
aeb4c132
HX
1141 while (cryptlen && sg && n_sg--) {
1142 unsigned int len = sg_dma_len(sg);
9c4a7965 1143
aeb4c132
HX
1144 if (offset >= len) {
1145 offset -= len;
1146 goto next;
1147 }
1148
1149 len -= offset;
1150
1151 if (len > cryptlen)
1152 len = cryptlen;
1153
e345177d
CL
1154 if (datalen > 0 && len > datalen) {
1155 to_talitos_ptr(link_tbl_ptr + count,
1156 sg_dma_address(sg) + offset, datalen, 0);
1157 to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
1158 count++;
1159 len -= datalen;
1160 offset += datalen;
1161 }
aeb4c132 1162 to_talitos_ptr(link_tbl_ptr + count,
da9de146 1163 sg_dma_address(sg) + offset, len, 0);
b096b544 1164 to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
aeb4c132
HX
1165 count++;
1166 cryptlen -= len;
e345177d 1167 datalen -= len;
aeb4c132
HX
1168 offset = 0;
1169
1170next:
1171 sg = sg_next(sg);
70bcaca7 1172 }
9c4a7965
KP
1173
1174 /* tag end of link table */
aeb4c132 1175 if (count > 0)
b096b544 1176 to_talitos_ptr_ext_set(link_tbl_ptr + count - 1,
e345177d 1177 DESC_PTR_LNKTBL_RET, 0);
70bcaca7 1178
aeb4c132
HX
1179 return count;
1180}
1181
2b122730
LC
1182static int talitos_sg_map_ext(struct device *dev, struct scatterlist *src,
1183 unsigned int len, struct talitos_edesc *edesc,
1184 struct talitos_ptr *ptr, int sg_count,
e345177d
CL
1185 unsigned int offset, int tbl_off, int elen,
1186 bool force)
246a87cd 1187{
246a87cd
LC
1188 struct talitos_private *priv = dev_get_drvdata(dev);
1189 bool is_sec1 = has_ftr_sec1(priv);
1190
87a81dce
LC
1191 if (!src) {
1192 to_talitos_ptr(ptr, 0, 0, is_sec1);
1193 return 1;
1194 }
2b122730 1195 to_talitos_ptr_ext_set(ptr, elen, is_sec1);
e345177d 1196 if (sg_count == 1 && !force) {
da9de146 1197 to_talitos_ptr(ptr, sg_dma_address(src) + offset, len, is_sec1);
6a1e8d14 1198 return sg_count;
246a87cd 1199 }
246a87cd 1200 if (is_sec1) {
da9de146 1201 to_talitos_ptr(ptr, edesc->dma_link_tbl + offset, len, is_sec1);
6a1e8d14 1202 return sg_count;
246a87cd 1203 }
e345177d 1204 sg_count = sg_to_link_tbl_offset(src, sg_count, offset, len, elen,
6a1e8d14 1205 &edesc->link_tbl[tbl_off]);
e345177d 1206 if (sg_count == 1 && !force) {
6a1e8d14
LC
1207 /* Only one segment now, so no link tbl needed*/
1208 copy_talitos_ptr(ptr, &edesc->link_tbl[tbl_off], is_sec1);
1209 return sg_count;
1210 }
1211 to_talitos_ptr(ptr, edesc->dma_link_tbl +
da9de146 1212 tbl_off * sizeof(struct talitos_ptr), len, is_sec1);
6a1e8d14
LC
1213 to_talitos_ptr_ext_or(ptr, DESC_PTR_LNKTBL_JUMP, is_sec1);
1214
1215 return sg_count;
246a87cd
LC
1216}
1217
2b122730
LC
1218static int talitos_sg_map(struct device *dev, struct scatterlist *src,
1219 unsigned int len, struct talitos_edesc *edesc,
1220 struct talitos_ptr *ptr, int sg_count,
1221 unsigned int offset, int tbl_off)
1222{
1223 return talitos_sg_map_ext(dev, src, len, edesc, ptr, sg_count, offset,
e345177d 1224 tbl_off, 0, false);
2b122730
LC
1225}
1226
9c4a7965
KP
1227/*
1228 * fill in and submit ipsec_esp descriptor
1229 */
56af8cd4 1230static int ipsec_esp(struct talitos_edesc *edesc, struct aead_request *areq,
7ede4c36 1231 bool encrypt,
aeb4c132
HX
1232 void (*callback)(struct device *dev,
1233 struct talitos_desc *desc,
1234 void *context, int error))
9c4a7965
KP
1235{
1236 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
aeb4c132 1237 unsigned int authsize = crypto_aead_authsize(aead);
9c4a7965
KP
1238 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
1239 struct device *dev = ctx->dev;
1240 struct talitos_desc *desc = &edesc->desc;
7ede4c36 1241 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
e41256f1 1242 unsigned int ivsize = crypto_aead_ivsize(aead);
aeb4c132 1243 int tbl_off = 0;
fa86a267 1244 int sg_count, ret;
2b122730 1245 int elen = 0;
549bd8bc
LC
1246 bool sync_needed = false;
1247 struct talitos_private *priv = dev_get_drvdata(dev);
1248 bool is_sec1 = has_ftr_sec1(priv);
9a655608
LC
1249 bool is_ipsec_esp = desc->hdr & DESC_HDR_TYPE_IPSEC_ESP;
1250 struct talitos_ptr *civ_ptr = &desc->ptr[is_ipsec_esp ? 2 : 3];
1251 struct talitos_ptr *ckey_ptr = &desc->ptr[is_ipsec_esp ? 3 : 2];
e345177d 1252 dma_addr_t dma_icv = edesc->dma_link_tbl + edesc->dma_len - authsize;
9c4a7965
KP
1253
1254 /* hmac key */
2e13ce08 1255 to_talitos_ptr(&desc->ptr[0], ctx->dma_key, ctx->authkeylen, is_sec1);
79fd31d3 1256
549bd8bc
LC
1257 sg_count = edesc->src_nents ?: 1;
1258 if (is_sec1 && sg_count > 1)
1259 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1260 areq->assoclen + cryptlen);
1261 else
1262 sg_count = dma_map_sg(dev, areq->src, sg_count,
1263 (areq->src == areq->dst) ?
1264 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
79fd31d3 1265
549bd8bc
LC
1266 /* hmac data */
1267 ret = talitos_sg_map(dev, areq->src, areq->assoclen, edesc,
1268 &desc->ptr[1], sg_count, 0, tbl_off);
340ff60a 1269
549bd8bc 1270 if (ret > 1) {
340ff60a 1271 tbl_off += ret;
549bd8bc 1272 sync_needed = true;
79fd31d3
HG
1273 }
1274
9c4a7965 1275 /* cipher iv */
9a655608 1276 to_talitos_ptr(civ_ptr, edesc->iv_dma, ivsize, is_sec1);
9c4a7965
KP
1277
1278 /* cipher key */
2e13ce08
LC
1279 to_talitos_ptr(ckey_ptr, ctx->dma_key + ctx->authkeylen,
1280 ctx->enckeylen, is_sec1);
9c4a7965
KP
1281
1282 /*
1283 * cipher in
1284 * map and adjust cipher len to aead request cryptlen.
1285 * extent is bytes of HMAC postpended to ciphertext,
1286 * typically 12 for ipsec
1287 */
2b122730
LC
1288 if (is_ipsec_esp && (desc->hdr & DESC_HDR_MODE1_MDEU_CICV))
1289 elen = authsize;
9c4a7965 1290
2b122730 1291 ret = talitos_sg_map_ext(dev, areq->src, cryptlen, edesc, &desc->ptr[4],
e345177d
CL
1292 sg_count, areq->assoclen, tbl_off, elen,
1293 false);
549bd8bc 1294
ec8c7d14
LC
1295 if (ret > 1) {
1296 tbl_off += ret;
549bd8bc
LC
1297 sync_needed = true;
1298 }
9c4a7965 1299
549bd8bc
LC
1300 /* cipher out */
1301 if (areq->src != areq->dst) {
1302 sg_count = edesc->dst_nents ? : 1;
1303 if (!is_sec1 || sg_count == 1)
1304 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1305 }
9c4a7965 1306
e345177d
CL
1307 if (is_ipsec_esp && encrypt)
1308 elen = authsize;
1309 else
1310 elen = 0;
1311 ret = talitos_sg_map_ext(dev, areq->dst, cryptlen, edesc, &desc->ptr[5],
1312 sg_count, areq->assoclen, tbl_off, elen,
1313 is_ipsec_esp && !encrypt);
1314 tbl_off += ret;
aeb4c132 1315
e04a61be 1316 /* ICV data */
e345177d 1317 edesc->icv_ool = !encrypt;
549bd8bc 1318
e345177d
CL
1319 if (!encrypt && is_ipsec_esp) {
1320 struct talitos_ptr *tbl_ptr = &edesc->link_tbl[tbl_off];
549bd8bc 1321
e345177d
CL
1322 /* Add an entry to the link table for ICV data */
1323 to_talitos_ptr_ext_set(tbl_ptr - 1, 0, is_sec1);
1324 to_talitos_ptr_ext_set(tbl_ptr, DESC_PTR_LNKTBL_RET, is_sec1);
e04a61be 1325
e345177d
CL
1326 /* icv data follows link tables */
1327 to_talitos_ptr(tbl_ptr, dma_icv, authsize, is_sec1);
1328 to_talitos_ptr_ext_or(&desc->ptr[5], authsize, is_sec1);
1329 sync_needed = true;
1330 } else if (!encrypt) {
1331 to_talitos_ptr(&desc->ptr[6], dma_icv, authsize, is_sec1);
1332 sync_needed = true;
9a655608 1333 } else if (!is_ipsec_esp) {
e345177d
CL
1334 talitos_sg_map(dev, areq->dst, authsize, edesc, &desc->ptr[6],
1335 sg_count, areq->assoclen + cryptlen, tbl_off);
549bd8bc
LC
1336 }
1337
9c4a7965 1338 /* iv out */
9a655608 1339 if (is_ipsec_esp)
549bd8bc
LC
1340 map_single_talitos_ptr(dev, &desc->ptr[6], ivsize, ctx->iv,
1341 DMA_FROM_DEVICE);
1342
1343 if (sync_needed)
1344 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1345 edesc->dma_len,
1346 DMA_BIDIRECTIONAL);
9c4a7965 1347
5228f0f7 1348 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
fa86a267 1349 if (ret != -EINPROGRESS) {
7ede4c36 1350 ipsec_esp_unmap(dev, edesc, areq, encrypt);
fa86a267
KP
1351 kfree(edesc);
1352 }
1353 return ret;
9c4a7965
KP
1354}
1355
9c4a7965 1356/*
56af8cd4 1357 * allocate and map the extended descriptor
9c4a7965 1358 */
4de9d0b5
LN
1359static struct talitos_edesc *talitos_edesc_alloc(struct device *dev,
1360 struct scatterlist *src,
1361 struct scatterlist *dst,
79fd31d3
HG
1362 u8 *iv,
1363 unsigned int assoclen,
4de9d0b5
LN
1364 unsigned int cryptlen,
1365 unsigned int authsize,
79fd31d3 1366 unsigned int ivsize,
4de9d0b5 1367 int icv_stashing,
62293a37
HG
1368 u32 cryptoflags,
1369 bool encrypt)
9c4a7965 1370{
56af8cd4 1371 struct talitos_edesc *edesc;
6a1e8d14 1372 int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len;
79fd31d3 1373 dma_addr_t iv_dma = 0;
4de9d0b5 1374 gfp_t flags = cryptoflags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
586725f8 1375 GFP_ATOMIC;
6f65f6ac
LC
1376 struct talitos_private *priv = dev_get_drvdata(dev);
1377 bool is_sec1 = has_ftr_sec1(priv);
1378 int max_len = is_sec1 ? TALITOS1_MAX_DATA_LEN : TALITOS2_MAX_DATA_LEN;
9c4a7965 1379
6f65f6ac 1380 if (cryptlen + authsize > max_len) {
4de9d0b5 1381 dev_err(dev, "length exceeds h/w max limit\n");
9c4a7965
KP
1382 return ERR_PTR(-EINVAL);
1383 }
1384
62293a37 1385 if (!dst || dst == src) {
6a1e8d14
LC
1386 src_len = assoclen + cryptlen + authsize;
1387 src_nents = sg_nents_for_len(src, src_len);
8e409fe1
LC
1388 if (src_nents < 0) {
1389 dev_err(dev, "Invalid number of src SG.\n");
c56c2e17 1390 return ERR_PTR(-EINVAL);
8e409fe1 1391 }
62293a37
HG
1392 src_nents = (src_nents == 1) ? 0 : src_nents;
1393 dst_nents = dst ? src_nents : 0;
6a1e8d14 1394 dst_len = 0;
62293a37 1395 } else { /* dst && dst != src*/
6a1e8d14
LC
1396 src_len = assoclen + cryptlen + (encrypt ? 0 : authsize);
1397 src_nents = sg_nents_for_len(src, src_len);
8e409fe1
LC
1398 if (src_nents < 0) {
1399 dev_err(dev, "Invalid number of src SG.\n");
c56c2e17 1400 return ERR_PTR(-EINVAL);
8e409fe1 1401 }
62293a37 1402 src_nents = (src_nents == 1) ? 0 : src_nents;
6a1e8d14
LC
1403 dst_len = assoclen + cryptlen + (encrypt ? authsize : 0);
1404 dst_nents = sg_nents_for_len(dst, dst_len);
8e409fe1
LC
1405 if (dst_nents < 0) {
1406 dev_err(dev, "Invalid number of dst SG.\n");
c56c2e17 1407 return ERR_PTR(-EINVAL);
8e409fe1 1408 }
62293a37 1409 dst_nents = (dst_nents == 1) ? 0 : dst_nents;
9c4a7965
KP
1410 }
1411
1412 /*
1413 * allocate space for base edesc plus the link tables,
aeb4c132
HX
1414 * allowing for two separate entries for AD and generated ICV (+ 2),
1415 * and space for two sets of ICVs (stashed and generated)
9c4a7965 1416 */
56af8cd4 1417 alloc_len = sizeof(struct talitos_edesc);
e345177d 1418 if (src_nents || dst_nents || !encrypt) {
6f65f6ac 1419 if (is_sec1)
6a1e8d14 1420 dma_len = (src_nents ? src_len : 0) +
e345177d 1421 (dst_nents ? dst_len : 0) + authsize;
6f65f6ac 1422 else
aeb4c132 1423 dma_len = (src_nents + dst_nents + 2) *
e345177d 1424 sizeof(struct talitos_ptr) + authsize;
9c4a7965
KP
1425 alloc_len += dma_len;
1426 } else {
1427 dma_len = 0;
9c4a7965 1428 }
e345177d 1429 alloc_len += icv_stashing ? authsize : 0;
9c4a7965 1430
37b5e889
LC
1431 /* if its a ahash, add space for a second desc next to the first one */
1432 if (is_sec1 && !dst)
1433 alloc_len += sizeof(struct talitos_desc);
1bea445b 1434 alloc_len += ivsize;
37b5e889 1435
586725f8 1436 edesc = kmalloc(alloc_len, GFP_DMA | flags);
c56c2e17
CL
1437 if (!edesc)
1438 return ERR_PTR(-ENOMEM);
1bea445b
CL
1439 if (ivsize) {
1440 iv = memcpy(((u8 *)edesc) + alloc_len - ivsize, iv, ivsize);
c56c2e17 1441 iv_dma = dma_map_single(dev, iv, ivsize, DMA_TO_DEVICE);
1bea445b 1442 }
e4a647c4 1443 memset(&edesc->desc, 0, sizeof(edesc->desc));
9c4a7965
KP
1444
1445 edesc->src_nents = src_nents;
1446 edesc->dst_nents = dst_nents;
79fd31d3 1447 edesc->iv_dma = iv_dma;
9c4a7965 1448 edesc->dma_len = dma_len;
37b5e889
LC
1449 if (dma_len) {
1450 void *addr = &edesc->link_tbl[0];
1451
1452 if (is_sec1 && !dst)
1453 addr += sizeof(struct talitos_desc);
1454 edesc->dma_link_tbl = dma_map_single(dev, addr,
497f2e6b
LN
1455 edesc->dma_len,
1456 DMA_BIDIRECTIONAL);
37b5e889 1457 }
9c4a7965
KP
1458 return edesc;
1459}
1460
79fd31d3 1461static struct talitos_edesc *aead_edesc_alloc(struct aead_request *areq, u8 *iv,
62293a37 1462 int icv_stashing, bool encrypt)
4de9d0b5
LN
1463{
1464 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
aeb4c132 1465 unsigned int authsize = crypto_aead_authsize(authenc);
4de9d0b5 1466 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
79fd31d3 1467 unsigned int ivsize = crypto_aead_ivsize(authenc);
7ede4c36 1468 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
4de9d0b5 1469
aeb4c132 1470 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
7ede4c36 1471 iv, areq->assoclen, cryptlen,
aeb4c132 1472 authsize, ivsize, icv_stashing,
62293a37 1473 areq->base.flags, encrypt);
4de9d0b5
LN
1474}
1475
56af8cd4 1476static int aead_encrypt(struct aead_request *req)
9c4a7965
KP
1477{
1478 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1479 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
56af8cd4 1480 struct talitos_edesc *edesc;
9c4a7965
KP
1481
1482 /* allocate extended descriptor */
62293a37 1483 edesc = aead_edesc_alloc(req, req->iv, 0, true);
9c4a7965
KP
1484 if (IS_ERR(edesc))
1485 return PTR_ERR(edesc);
1486
1487 /* set encrypt */
70bcaca7 1488 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
9c4a7965 1489
7ede4c36 1490 return ipsec_esp(edesc, req, true, ipsec_esp_encrypt_done);
9c4a7965
KP
1491}
1492
56af8cd4 1493static int aead_decrypt(struct aead_request *req)
9c4a7965
KP
1494{
1495 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
aeb4c132 1496 unsigned int authsize = crypto_aead_authsize(authenc);
9c4a7965 1497 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
fe5720e2 1498 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
56af8cd4 1499 struct talitos_edesc *edesc;
9c4a7965
KP
1500 void *icvdata;
1501
9c4a7965 1502 /* allocate extended descriptor */
62293a37 1503 edesc = aead_edesc_alloc(req, req->iv, 1, false);
9c4a7965
KP
1504 if (IS_ERR(edesc))
1505 return PTR_ERR(edesc);
1506
4bbfb839
CL
1507 if ((edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP) &&
1508 (priv->features & TALITOS_FTR_HW_AUTH_CHECK) &&
e938e465
KP
1509 ((!edesc->src_nents && !edesc->dst_nents) ||
1510 priv->features & TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT)) {
9c4a7965 1511
fe5720e2 1512 /* decrypt and check the ICV */
e938e465
KP
1513 edesc->desc.hdr = ctx->desc_hdr_template |
1514 DESC_HDR_DIR_INBOUND |
fe5720e2 1515 DESC_HDR_MODE1_MDEU_CICV;
9c4a7965 1516
fe5720e2 1517 /* reset integrity check result bits */
9c4a7965 1518
7ede4c36
CL
1519 return ipsec_esp(edesc, req, false,
1520 ipsec_esp_decrypt_hwauth_done);
e938e465 1521 }
fe5720e2 1522
e938e465
KP
1523 /* Have to check the ICV with software */
1524 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
fe5720e2 1525
e938e465 1526 /* stash incoming ICV for later cmp with ICV generated by the h/w */
e345177d 1527 icvdata = edesc->buf + edesc->dma_len;
fe5720e2 1528
eae55a58
CL
1529 sg_pcopy_to_buffer(req->src, edesc->src_nents ? : 1, icvdata, authsize,
1530 req->assoclen + req->cryptlen - authsize);
9c4a7965 1531
7ede4c36 1532 return ipsec_esp(edesc, req, false, ipsec_esp_decrypt_swauth_done);
9c4a7965
KP
1533}
1534
4de9d0b5
LN
1535static int ablkcipher_setkey(struct crypto_ablkcipher *cipher,
1536 const u8 *key, unsigned int keylen)
1537{
1538 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
2e13ce08 1539 struct device *dev = ctx->dev;
4de9d0b5 1540
ef7c5c85
HX
1541 if (ctx->keylen)
1542 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
1543
1544 memcpy(&ctx->key, key, keylen);
1545 ctx->keylen = keylen;
1546
1547 ctx->dma_key = dma_map_single(dev, ctx->key, keylen, DMA_TO_DEVICE);
1548
1549 return 0;
1550}
1551
1552static int ablkcipher_des_setkey(struct crypto_ablkcipher *cipher,
1553 const u8 *key, unsigned int keylen)
1554{
1555 u32 tmp[DES_EXPKEY_WORDS];
03d2c511 1556
f384cdc4 1557 if (unlikely(crypto_ablkcipher_get_flags(cipher) &
231baecd 1558 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS) &&
f384cdc4
LC
1559 !des_ekey(tmp, key)) {
1560 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_WEAK_KEY);
1561 return -EINVAL;
1562 }
1563
ef7c5c85
HX
1564 return ablkcipher_setkey(cipher, key, keylen);
1565}
2e13ce08 1566
ef7c5c85
HX
1567static int ablkcipher_des3_setkey(struct crypto_ablkcipher *cipher,
1568 const u8 *key, unsigned int keylen)
1569{
1570 u32 flags;
1571 int err;
4de9d0b5 1572
ef7c5c85
HX
1573 flags = crypto_ablkcipher_get_flags(cipher);
1574 err = __des3_verify_key(&flags, key);
1575 if (unlikely(err)) {
1576 crypto_ablkcipher_set_flags(cipher, flags);
1577 return err;
1578 }
2e13ce08 1579
ef7c5c85 1580 return ablkcipher_setkey(cipher, key, keylen);
4de9d0b5
LN
1581}
1582
1ba34e71
CL
1583static int ablkcipher_aes_setkey(struct crypto_ablkcipher *cipher,
1584 const u8 *key, unsigned int keylen)
1585{
1586 if (keylen == AES_KEYSIZE_128 || keylen == AES_KEYSIZE_192 ||
1587 keylen == AES_KEYSIZE_256)
1588 return ablkcipher_setkey(cipher, key, keylen);
1589
1590 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
1591
1592 return -EINVAL;
1593}
1594
4de9d0b5
LN
1595static void common_nonsnoop_unmap(struct device *dev,
1596 struct talitos_edesc *edesc,
1597 struct ablkcipher_request *areq)
1598{
1599 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
032d197e 1600
6a1e8d14 1601 talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->nbytes, 0);
4de9d0b5
LN
1602 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1], DMA_TO_DEVICE);
1603
4de9d0b5
LN
1604 if (edesc->dma_len)
1605 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1606 DMA_BIDIRECTIONAL);
1607}
1608
1609static void ablkcipher_done(struct device *dev,
1610 struct talitos_desc *desc, void *context,
1611 int err)
1612{
1613 struct ablkcipher_request *areq = context;
3e03e792
CL
1614 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1615 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1616 unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
19bbbc63
KP
1617 struct talitos_edesc *edesc;
1618
1619 edesc = container_of(desc, struct talitos_edesc, desc);
4de9d0b5
LN
1620
1621 common_nonsnoop_unmap(dev, edesc, areq);
3e03e792 1622 memcpy(areq->info, ctx->iv, ivsize);
4de9d0b5
LN
1623
1624 kfree(edesc);
1625
1626 areq->base.complete(&areq->base, err);
1627}
1628
1629static int common_nonsnoop(struct talitos_edesc *edesc,
1630 struct ablkcipher_request *areq,
4de9d0b5
LN
1631 void (*callback) (struct device *dev,
1632 struct talitos_desc *desc,
1633 void *context, int error))
1634{
1635 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1636 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1637 struct device *dev = ctx->dev;
1638 struct talitos_desc *desc = &edesc->desc;
1639 unsigned int cryptlen = areq->nbytes;
79fd31d3 1640 unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
4de9d0b5 1641 int sg_count, ret;
6a1e8d14 1642 bool sync_needed = false;
922f9dc8
LC
1643 struct talitos_private *priv = dev_get_drvdata(dev);
1644 bool is_sec1 = has_ftr_sec1(priv);
4de9d0b5
LN
1645
1646 /* first DWORD empty */
4de9d0b5
LN
1647
1648 /* cipher iv */
da9de146 1649 to_talitos_ptr(&desc->ptr[1], edesc->iv_dma, ivsize, is_sec1);
4de9d0b5
LN
1650
1651 /* cipher key */
2e13ce08 1652 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen, is_sec1);
4de9d0b5 1653
6a1e8d14
LC
1654 sg_count = edesc->src_nents ?: 1;
1655 if (is_sec1 && sg_count > 1)
1656 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1657 cryptlen);
1658 else
1659 sg_count = dma_map_sg(dev, areq->src, sg_count,
1660 (areq->src == areq->dst) ?
1661 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
4de9d0b5
LN
1662 /*
1663 * cipher in
1664 */
6a1e8d14
LC
1665 sg_count = talitos_sg_map(dev, areq->src, cryptlen, edesc,
1666 &desc->ptr[3], sg_count, 0, 0);
1667 if (sg_count > 1)
1668 sync_needed = true;
4de9d0b5
LN
1669
1670 /* cipher out */
6a1e8d14
LC
1671 if (areq->src != areq->dst) {
1672 sg_count = edesc->dst_nents ? : 1;
1673 if (!is_sec1 || sg_count == 1)
1674 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1675 }
1676
1677 ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[4],
1678 sg_count, 0, (edesc->src_nents + 1));
1679 if (ret > 1)
1680 sync_needed = true;
4de9d0b5
LN
1681
1682 /* iv out */
a2b35aa8 1683 map_single_talitos_ptr(dev, &desc->ptr[5], ivsize, ctx->iv,
4de9d0b5
LN
1684 DMA_FROM_DEVICE);
1685
1686 /* last DWORD empty */
4de9d0b5 1687
6a1e8d14
LC
1688 if (sync_needed)
1689 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1690 edesc->dma_len, DMA_BIDIRECTIONAL);
1691
5228f0f7 1692 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
4de9d0b5
LN
1693 if (ret != -EINPROGRESS) {
1694 common_nonsnoop_unmap(dev, edesc, areq);
1695 kfree(edesc);
1696 }
1697 return ret;
1698}
1699
e938e465 1700static struct talitos_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request *
62293a37 1701 areq, bool encrypt)
4de9d0b5
LN
1702{
1703 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1704 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
79fd31d3 1705 unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
4de9d0b5 1706
aeb4c132 1707 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
79fd31d3 1708 areq->info, 0, areq->nbytes, 0, ivsize, 0,
62293a37 1709 areq->base.flags, encrypt);
4de9d0b5
LN
1710}
1711
1712static int ablkcipher_encrypt(struct ablkcipher_request *areq)
1713{
1714 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1715 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1716 struct talitos_edesc *edesc;
ee483d32
CL
1717 unsigned int blocksize =
1718 crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(cipher));
1719
1720 if (!areq->nbytes)
1721 return 0;
1722
1723 if (areq->nbytes % blocksize)
1724 return -EINVAL;
4de9d0b5
LN
1725
1726 /* allocate extended descriptor */
62293a37 1727 edesc = ablkcipher_edesc_alloc(areq, true);
4de9d0b5
LN
1728 if (IS_ERR(edesc))
1729 return PTR_ERR(edesc);
1730
1731 /* set encrypt */
1732 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
1733
febec542 1734 return common_nonsnoop(edesc, areq, ablkcipher_done);
4de9d0b5
LN
1735}
1736
1737static int ablkcipher_decrypt(struct ablkcipher_request *areq)
1738{
1739 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1740 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1741 struct talitos_edesc *edesc;
ee483d32
CL
1742 unsigned int blocksize =
1743 crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(cipher));
1744
1745 if (!areq->nbytes)
1746 return 0;
1747
1748 if (areq->nbytes % blocksize)
1749 return -EINVAL;
4de9d0b5
LN
1750
1751 /* allocate extended descriptor */
62293a37 1752 edesc = ablkcipher_edesc_alloc(areq, false);
4de9d0b5
LN
1753 if (IS_ERR(edesc))
1754 return PTR_ERR(edesc);
1755
1756 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1757
febec542 1758 return common_nonsnoop(edesc, areq, ablkcipher_done);
4de9d0b5
LN
1759}
1760
497f2e6b
LN
1761static void common_nonsnoop_hash_unmap(struct device *dev,
1762 struct talitos_edesc *edesc,
1763 struct ahash_request *areq)
1764{
1765 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
ad4cd51f
LC
1766 struct talitos_private *priv = dev_get_drvdata(dev);
1767 bool is_sec1 = has_ftr_sec1(priv);
1768 struct talitos_desc *desc = &edesc->desc;
1769 struct talitos_desc *desc2 = desc + 1;
1770
1771 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
1772 if (desc->next_desc &&
1773 desc->ptr[5].ptr != desc2->ptr[5].ptr)
1774 unmap_single_talitos_ptr(dev, &desc2->ptr[5], DMA_FROM_DEVICE);
497f2e6b 1775
6a1e8d14 1776 talitos_sg_unmap(dev, edesc, req_ctx->psrc, NULL, 0, 0);
032d197e 1777
ad4cd51f
LC
1778 /* When using hashctx-in, must unmap it. */
1779 if (from_talitos_ptr_len(&edesc->desc.ptr[1], is_sec1))
1780 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1],
1781 DMA_TO_DEVICE);
1782 else if (desc->next_desc)
1783 unmap_single_talitos_ptr(dev, &desc2->ptr[1],
1784 DMA_TO_DEVICE);
1785
1786 if (is_sec1 && req_ctx->nbuf)
1787 unmap_single_talitos_ptr(dev, &desc->ptr[3],
1788 DMA_TO_DEVICE);
1789
497f2e6b
LN
1790 if (edesc->dma_len)
1791 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1792 DMA_BIDIRECTIONAL);
1793
37b5e889
LC
1794 if (edesc->desc.next_desc)
1795 dma_unmap_single(dev, be32_to_cpu(edesc->desc.next_desc),
1796 TALITOS_DESC_SIZE, DMA_BIDIRECTIONAL);
497f2e6b
LN
1797}
1798
1799static void ahash_done(struct device *dev,
1800 struct talitos_desc *desc, void *context,
1801 int err)
1802{
1803 struct ahash_request *areq = context;
1804 struct talitos_edesc *edesc =
1805 container_of(desc, struct talitos_edesc, desc);
1806 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1807
1808 if (!req_ctx->last && req_ctx->to_hash_later) {
1809 /* Position any partial block for next update/final/finup */
3c0dd190 1810 req_ctx->buf_idx = (req_ctx->buf_idx + 1) & 1;
5e833bc4 1811 req_ctx->nbuf = req_ctx->to_hash_later;
497f2e6b
LN
1812 }
1813 common_nonsnoop_hash_unmap(dev, edesc, areq);
1814
1815 kfree(edesc);
1816
1817 areq->base.complete(&areq->base, err);
1818}
1819
2d02905e
LC
1820/*
1821 * SEC1 doesn't like hashing of 0 sized message, so we do the padding
1822 * ourself and submit a padded block
1823 */
5b2cf268 1824static void talitos_handle_buggy_hash(struct talitos_ctx *ctx,
2d02905e
LC
1825 struct talitos_edesc *edesc,
1826 struct talitos_ptr *ptr)
1827{
1828 static u8 padded_hash[64] = {
1829 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1830 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1831 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1832 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1833 };
1834
1835 pr_err_once("Bug in SEC1, padding ourself\n");
1836 edesc->desc.hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1837 map_single_talitos_ptr(ctx->dev, ptr, sizeof(padded_hash),
1838 (char *)padded_hash, DMA_TO_DEVICE);
1839}
1840
497f2e6b
LN
1841static int common_nonsnoop_hash(struct talitos_edesc *edesc,
1842 struct ahash_request *areq, unsigned int length,
37b5e889 1843 unsigned int offset,
497f2e6b
LN
1844 void (*callback) (struct device *dev,
1845 struct talitos_desc *desc,
1846 void *context, int error))
1847{
1848 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1849 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1850 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1851 struct device *dev = ctx->dev;
1852 struct talitos_desc *desc = &edesc->desc;
032d197e 1853 int ret;
6a1e8d14 1854 bool sync_needed = false;
922f9dc8
LC
1855 struct talitos_private *priv = dev_get_drvdata(dev);
1856 bool is_sec1 = has_ftr_sec1(priv);
6a1e8d14 1857 int sg_count;
497f2e6b
LN
1858
1859 /* first DWORD empty */
497f2e6b 1860
60f208d7
KP
1861 /* hash context in */
1862 if (!req_ctx->first || req_ctx->swinit) {
6a4967c3
LC
1863 map_single_talitos_ptr_nosync(dev, &desc->ptr[1],
1864 req_ctx->hw_context_size,
1865 req_ctx->hw_context,
1866 DMA_TO_DEVICE);
60f208d7 1867 req_ctx->swinit = 0;
497f2e6b 1868 }
afd62fa2
LC
1869 /* Indicate next op is not the first. */
1870 req_ctx->first = 0;
497f2e6b
LN
1871
1872 /* HMAC key */
1873 if (ctx->keylen)
2e13ce08
LC
1874 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen,
1875 is_sec1);
497f2e6b 1876
37b5e889
LC
1877 if (is_sec1 && req_ctx->nbuf)
1878 length -= req_ctx->nbuf;
1879
6a1e8d14
LC
1880 sg_count = edesc->src_nents ?: 1;
1881 if (is_sec1 && sg_count > 1)
37b5e889
LC
1882 sg_pcopy_to_buffer(req_ctx->psrc, sg_count,
1883 edesc->buf + sizeof(struct talitos_desc),
1884 length, req_ctx->nbuf);
1885 else if (length)
6a1e8d14
LC
1886 sg_count = dma_map_sg(dev, req_ctx->psrc, sg_count,
1887 DMA_TO_DEVICE);
497f2e6b
LN
1888 /*
1889 * data in
1890 */
37b5e889 1891 if (is_sec1 && req_ctx->nbuf) {
ad4cd51f
LC
1892 map_single_talitos_ptr(dev, &desc->ptr[3], req_ctx->nbuf,
1893 req_ctx->buf[req_ctx->buf_idx],
1894 DMA_TO_DEVICE);
37b5e889
LC
1895 } else {
1896 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1897 &desc->ptr[3], sg_count, offset, 0);
1898 if (sg_count > 1)
1899 sync_needed = true;
1900 }
497f2e6b
LN
1901
1902 /* fifth DWORD empty */
497f2e6b
LN
1903
1904 /* hash/HMAC out -or- hash context out */
1905 if (req_ctx->last)
1906 map_single_talitos_ptr(dev, &desc->ptr[5],
1907 crypto_ahash_digestsize(tfm),
a2b35aa8 1908 areq->result, DMA_FROM_DEVICE);
497f2e6b 1909 else
6a4967c3
LC
1910 map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1911 req_ctx->hw_context_size,
1912 req_ctx->hw_context,
1913 DMA_FROM_DEVICE);
497f2e6b
LN
1914
1915 /* last DWORD empty */
497f2e6b 1916
2d02905e
LC
1917 if (is_sec1 && from_talitos_ptr_len(&desc->ptr[3], true) == 0)
1918 talitos_handle_buggy_hash(ctx, edesc, &desc->ptr[3]);
1919
37b5e889
LC
1920 if (is_sec1 && req_ctx->nbuf && length) {
1921 struct talitos_desc *desc2 = desc + 1;
1922 dma_addr_t next_desc;
1923
1924 memset(desc2, 0, sizeof(*desc2));
1925 desc2->hdr = desc->hdr;
1926 desc2->hdr &= ~DESC_HDR_MODE0_MDEU_INIT;
1927 desc2->hdr1 = desc2->hdr;
1928 desc->hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1929 desc->hdr |= DESC_HDR_MODE0_MDEU_CONT;
1930 desc->hdr &= ~DESC_HDR_DONE_NOTIFY;
1931
ad4cd51f
LC
1932 if (desc->ptr[1].ptr)
1933 copy_talitos_ptr(&desc2->ptr[1], &desc->ptr[1],
1934 is_sec1);
1935 else
6a4967c3
LC
1936 map_single_talitos_ptr_nosync(dev, &desc2->ptr[1],
1937 req_ctx->hw_context_size,
1938 req_ctx->hw_context,
1939 DMA_TO_DEVICE);
37b5e889
LC
1940 copy_talitos_ptr(&desc2->ptr[2], &desc->ptr[2], is_sec1);
1941 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1942 &desc2->ptr[3], sg_count, offset, 0);
1943 if (sg_count > 1)
1944 sync_needed = true;
1945 copy_talitos_ptr(&desc2->ptr[5], &desc->ptr[5], is_sec1);
1946 if (req_ctx->last)
6a4967c3
LC
1947 map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1948 req_ctx->hw_context_size,
1949 req_ctx->hw_context,
1950 DMA_FROM_DEVICE);
37b5e889
LC
1951
1952 next_desc = dma_map_single(dev, &desc2->hdr1, TALITOS_DESC_SIZE,
1953 DMA_BIDIRECTIONAL);
1954 desc->next_desc = cpu_to_be32(next_desc);
1955 }
1956
6a1e8d14
LC
1957 if (sync_needed)
1958 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1959 edesc->dma_len, DMA_BIDIRECTIONAL);
1960
5228f0f7 1961 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
497f2e6b
LN
1962 if (ret != -EINPROGRESS) {
1963 common_nonsnoop_hash_unmap(dev, edesc, areq);
1964 kfree(edesc);
1965 }
1966 return ret;
1967}
1968
1969static struct talitos_edesc *ahash_edesc_alloc(struct ahash_request *areq,
1970 unsigned int nbytes)
1971{
1972 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1973 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1974 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
37b5e889
LC
1975 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
1976 bool is_sec1 = has_ftr_sec1(priv);
1977
1978 if (is_sec1)
1979 nbytes -= req_ctx->nbuf;
497f2e6b 1980
aeb4c132 1981 return talitos_edesc_alloc(ctx->dev, req_ctx->psrc, NULL, NULL, 0,
62293a37 1982 nbytes, 0, 0, 0, areq->base.flags, false);
497f2e6b
LN
1983}
1984
1985static int ahash_init(struct ahash_request *areq)
1986{
1987 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
6a4967c3
LC
1988 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1989 struct device *dev = ctx->dev;
497f2e6b 1990 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
49f9783b 1991 unsigned int size;
6a4967c3 1992 dma_addr_t dma;
497f2e6b
LN
1993
1994 /* Initialize the context */
3c0dd190 1995 req_ctx->buf_idx = 0;
5e833bc4 1996 req_ctx->nbuf = 0;
60f208d7
KP
1997 req_ctx->first = 1; /* first indicates h/w must init its context */
1998 req_ctx->swinit = 0; /* assume h/w init of context */
49f9783b 1999 size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
497f2e6b
LN
2000 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
2001 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
49f9783b 2002 req_ctx->hw_context_size = size;
497f2e6b 2003
6a4967c3
LC
2004 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2005 DMA_TO_DEVICE);
2006 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
2007
497f2e6b
LN
2008 return 0;
2009}
2010
60f208d7
KP
2011/*
2012 * on h/w without explicit sha224 support, we initialize h/w context
2013 * manually with sha224 constants, and tell it to run sha256.
2014 */
2015static int ahash_init_sha224_swinit(struct ahash_request *areq)
2016{
2017 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2018
a752447a
KP
2019 req_ctx->hw_context[0] = SHA224_H0;
2020 req_ctx->hw_context[1] = SHA224_H1;
2021 req_ctx->hw_context[2] = SHA224_H2;
2022 req_ctx->hw_context[3] = SHA224_H3;
2023 req_ctx->hw_context[4] = SHA224_H4;
2024 req_ctx->hw_context[5] = SHA224_H5;
2025 req_ctx->hw_context[6] = SHA224_H6;
2026 req_ctx->hw_context[7] = SHA224_H7;
60f208d7
KP
2027
2028 /* init 64-bit count */
2029 req_ctx->hw_context[8] = 0;
2030 req_ctx->hw_context[9] = 0;
2031
6a4967c3
LC
2032 ahash_init(areq);
2033 req_ctx->swinit = 1;/* prevent h/w initting context with sha256 values*/
2034
60f208d7
KP
2035 return 0;
2036}
2037
497f2e6b
LN
2038static int ahash_process_req(struct ahash_request *areq, unsigned int nbytes)
2039{
2040 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2041 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2042 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2043 struct talitos_edesc *edesc;
2044 unsigned int blocksize =
2045 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2046 unsigned int nbytes_to_hash;
2047 unsigned int to_hash_later;
5e833bc4 2048 unsigned int nsg;
8e409fe1 2049 int nents;
37b5e889
LC
2050 struct device *dev = ctx->dev;
2051 struct talitos_private *priv = dev_get_drvdata(dev);
2052 bool is_sec1 = has_ftr_sec1(priv);
2053 int offset = 0;
3c0dd190 2054 u8 *ctx_buf = req_ctx->buf[req_ctx->buf_idx];
497f2e6b 2055
5e833bc4
LN
2056 if (!req_ctx->last && (nbytes + req_ctx->nbuf <= blocksize)) {
2057 /* Buffer up to one whole block */
8e409fe1
LC
2058 nents = sg_nents_for_len(areq->src, nbytes);
2059 if (nents < 0) {
2060 dev_err(ctx->dev, "Invalid number of src SG.\n");
2061 return nents;
2062 }
2063 sg_copy_to_buffer(areq->src, nents,
3c0dd190 2064 ctx_buf + req_ctx->nbuf, nbytes);
5e833bc4 2065 req_ctx->nbuf += nbytes;
497f2e6b
LN
2066 return 0;
2067 }
2068
5e833bc4
LN
2069 /* At least (blocksize + 1) bytes are available to hash */
2070 nbytes_to_hash = nbytes + req_ctx->nbuf;
2071 to_hash_later = nbytes_to_hash & (blocksize - 1);
2072
2073 if (req_ctx->last)
2074 to_hash_later = 0;
2075 else if (to_hash_later)
2076 /* There is a partial block. Hash the full block(s) now */
2077 nbytes_to_hash -= to_hash_later;
2078 else {
2079 /* Keep one block buffered */
2080 nbytes_to_hash -= blocksize;
2081 to_hash_later = blocksize;
2082 }
2083
2084 /* Chain in any previously buffered data */
37b5e889 2085 if (!is_sec1 && req_ctx->nbuf) {
5e833bc4
LN
2086 nsg = (req_ctx->nbuf < nbytes_to_hash) ? 2 : 1;
2087 sg_init_table(req_ctx->bufsl, nsg);
3c0dd190 2088 sg_set_buf(req_ctx->bufsl, ctx_buf, req_ctx->nbuf);
5e833bc4 2089 if (nsg > 1)
c56f6d12 2090 sg_chain(req_ctx->bufsl, 2, areq->src);
497f2e6b 2091 req_ctx->psrc = req_ctx->bufsl;
37b5e889
LC
2092 } else if (is_sec1 && req_ctx->nbuf && req_ctx->nbuf < blocksize) {
2093 if (nbytes_to_hash > blocksize)
2094 offset = blocksize - req_ctx->nbuf;
2095 else
2096 offset = nbytes_to_hash - req_ctx->nbuf;
2097 nents = sg_nents_for_len(areq->src, offset);
2098 if (nents < 0) {
2099 dev_err(ctx->dev, "Invalid number of src SG.\n");
2100 return nents;
2101 }
2102 sg_copy_to_buffer(areq->src, nents,
3c0dd190 2103 ctx_buf + req_ctx->nbuf, offset);
37b5e889
LC
2104 req_ctx->nbuf += offset;
2105 req_ctx->psrc = areq->src;
5e833bc4 2106 } else
497f2e6b 2107 req_ctx->psrc = areq->src;
5e833bc4
LN
2108
2109 if (to_hash_later) {
8e409fe1
LC
2110 nents = sg_nents_for_len(areq->src, nbytes);
2111 if (nents < 0) {
2112 dev_err(ctx->dev, "Invalid number of src SG.\n");
2113 return nents;
2114 }
d0525723 2115 sg_pcopy_to_buffer(areq->src, nents,
3c0dd190 2116 req_ctx->buf[(req_ctx->buf_idx + 1) & 1],
5e833bc4
LN
2117 to_hash_later,
2118 nbytes - to_hash_later);
497f2e6b 2119 }
5e833bc4 2120 req_ctx->to_hash_later = to_hash_later;
497f2e6b 2121
5e833bc4 2122 /* Allocate extended descriptor */
497f2e6b
LN
2123 edesc = ahash_edesc_alloc(areq, nbytes_to_hash);
2124 if (IS_ERR(edesc))
2125 return PTR_ERR(edesc);
2126
2127 edesc->desc.hdr = ctx->desc_hdr_template;
2128
2129 /* On last one, request SEC to pad; otherwise continue */
2130 if (req_ctx->last)
2131 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_PAD;
2132 else
2133 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_CONT;
2134
60f208d7
KP
2135 /* request SEC to INIT hash. */
2136 if (req_ctx->first && !req_ctx->swinit)
497f2e6b
LN
2137 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_INIT;
2138
2139 /* When the tfm context has a keylen, it's an HMAC.
2140 * A first or last (ie. not middle) descriptor must request HMAC.
2141 */
2142 if (ctx->keylen && (req_ctx->first || req_ctx->last))
2143 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_HMAC;
2144
37b5e889 2145 return common_nonsnoop_hash(edesc, areq, nbytes_to_hash, offset,
497f2e6b
LN
2146 ahash_done);
2147}
2148
2149static int ahash_update(struct ahash_request *areq)
2150{
2151 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2152
2153 req_ctx->last = 0;
2154
2155 return ahash_process_req(areq, areq->nbytes);
2156}
2157
2158static int ahash_final(struct ahash_request *areq)
2159{
2160 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2161
2162 req_ctx->last = 1;
2163
2164 return ahash_process_req(areq, 0);
2165}
2166
2167static int ahash_finup(struct ahash_request *areq)
2168{
2169 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2170
2171 req_ctx->last = 1;
2172
2173 return ahash_process_req(areq, areq->nbytes);
2174}
2175
2176static int ahash_digest(struct ahash_request *areq)
2177{
2178 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
60f208d7 2179 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
497f2e6b 2180
60f208d7 2181 ahash->init(areq);
497f2e6b
LN
2182 req_ctx->last = 1;
2183
2184 return ahash_process_req(areq, areq->nbytes);
2185}
2186
3639ca84
HG
2187static int ahash_export(struct ahash_request *areq, void *out)
2188{
2189 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2190 struct talitos_export_state *export = out;
6a4967c3
LC
2191 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2192 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2193 struct device *dev = ctx->dev;
2194 dma_addr_t dma;
2195
2196 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2197 DMA_FROM_DEVICE);
2198 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_FROM_DEVICE);
3639ca84
HG
2199
2200 memcpy(export->hw_context, req_ctx->hw_context,
2201 req_ctx->hw_context_size);
3c0dd190 2202 memcpy(export->buf, req_ctx->buf[req_ctx->buf_idx], req_ctx->nbuf);
3639ca84
HG
2203 export->swinit = req_ctx->swinit;
2204 export->first = req_ctx->first;
2205 export->last = req_ctx->last;
2206 export->to_hash_later = req_ctx->to_hash_later;
2207 export->nbuf = req_ctx->nbuf;
2208
2209 return 0;
2210}
2211
2212static int ahash_import(struct ahash_request *areq, const void *in)
2213{
2214 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2215 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
6a4967c3
LC
2216 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2217 struct device *dev = ctx->dev;
3639ca84 2218 const struct talitos_export_state *export = in;
49f9783b 2219 unsigned int size;
6a4967c3 2220 dma_addr_t dma;
3639ca84
HG
2221
2222 memset(req_ctx, 0, sizeof(*req_ctx));
49f9783b 2223 size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
3639ca84
HG
2224 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
2225 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
49f9783b 2226 req_ctx->hw_context_size = size;
49f9783b 2227 memcpy(req_ctx->hw_context, export->hw_context, size);
3c0dd190 2228 memcpy(req_ctx->buf[0], export->buf, export->nbuf);
3639ca84
HG
2229 req_ctx->swinit = export->swinit;
2230 req_ctx->first = export->first;
2231 req_ctx->last = export->last;
2232 req_ctx->to_hash_later = export->to_hash_later;
2233 req_ctx->nbuf = export->nbuf;
2234
6a4967c3
LC
2235 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2236 DMA_TO_DEVICE);
2237 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
2238
3639ca84
HG
2239 return 0;
2240}
2241
79b3a418
LN
2242static int keyhash(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen,
2243 u8 *hash)
2244{
2245 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2246
2247 struct scatterlist sg[1];
2248 struct ahash_request *req;
f1c90ac3 2249 struct crypto_wait wait;
79b3a418
LN
2250 int ret;
2251
f1c90ac3 2252 crypto_init_wait(&wait);
79b3a418
LN
2253
2254 req = ahash_request_alloc(tfm, GFP_KERNEL);
2255 if (!req)
2256 return -ENOMEM;
2257
2258 /* Keep tfm keylen == 0 during hash of the long key */
2259 ctx->keylen = 0;
2260 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
f1c90ac3 2261 crypto_req_done, &wait);
79b3a418
LN
2262
2263 sg_init_one(&sg[0], key, keylen);
2264
2265 ahash_request_set_crypt(req, sg, hash, keylen);
f1c90ac3
GBY
2266 ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
2267
79b3a418
LN
2268 ahash_request_free(req);
2269
2270 return ret;
2271}
2272
2273static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
2274 unsigned int keylen)
2275{
2276 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2e13ce08 2277 struct device *dev = ctx->dev;
79b3a418
LN
2278 unsigned int blocksize =
2279 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2280 unsigned int digestsize = crypto_ahash_digestsize(tfm);
2281 unsigned int keysize = keylen;
2282 u8 hash[SHA512_DIGEST_SIZE];
2283 int ret;
2284
2285 if (keylen <= blocksize)
2286 memcpy(ctx->key, key, keysize);
2287 else {
2288 /* Must get the hash of the long key */
2289 ret = keyhash(tfm, key, keylen, hash);
2290
2291 if (ret) {
2292 crypto_ahash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2293 return -EINVAL;
2294 }
2295
2296 keysize = digestsize;
2297 memcpy(ctx->key, hash, digestsize);
2298 }
2299
2e13ce08
LC
2300 if (ctx->keylen)
2301 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
2302
79b3a418 2303 ctx->keylen = keysize;
2e13ce08 2304 ctx->dma_key = dma_map_single(dev, ctx->key, keysize, DMA_TO_DEVICE);
79b3a418
LN
2305
2306 return 0;
2307}
2308
2309
9c4a7965 2310struct talitos_alg_template {
d5e4aaef 2311 u32 type;
b0057763 2312 u32 priority;
d5e4aaef
LN
2313 union {
2314 struct crypto_alg crypto;
acbf7c62 2315 struct ahash_alg hash;
aeb4c132 2316 struct aead_alg aead;
d5e4aaef 2317 } alg;
9c4a7965
KP
2318 __be32 desc_hdr_template;
2319};
2320
2321static struct talitos_alg_template driver_algs[] = {
991155ba 2322 /* AEAD algorithms. These use a single-pass ipsec_esp descriptor */
d5e4aaef 2323 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2324 .alg.aead = {
2325 .base = {
2326 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2327 .cra_driver_name = "authenc-hmac-sha1-"
2328 "cbc-aes-talitos",
2329 .cra_blocksize = AES_BLOCK_SIZE,
2330 .cra_flags = CRYPTO_ALG_ASYNC,
2331 },
2332 .ivsize = AES_BLOCK_SIZE,
2333 .maxauthsize = SHA1_DIGEST_SIZE,
56af8cd4 2334 },
9c4a7965
KP
2335 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2336 DESC_HDR_SEL0_AESU |
2337 DESC_HDR_MODE0_AESU_CBC |
2338 DESC_HDR_SEL1_MDEUA |
2339 DESC_HDR_MODE1_MDEU_INIT |
2340 DESC_HDR_MODE1_MDEU_PAD |
2341 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
70bcaca7 2342 },
7405c8d7
LC
2343 { .type = CRYPTO_ALG_TYPE_AEAD,
2344 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2345 .alg.aead = {
2346 .base = {
2347 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2348 .cra_driver_name = "authenc-hmac-sha1-"
a1a42f84 2349 "cbc-aes-talitos-hsna",
7405c8d7
LC
2350 .cra_blocksize = AES_BLOCK_SIZE,
2351 .cra_flags = CRYPTO_ALG_ASYNC,
2352 },
2353 .ivsize = AES_BLOCK_SIZE,
2354 .maxauthsize = SHA1_DIGEST_SIZE,
2355 },
2356 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2357 DESC_HDR_SEL0_AESU |
2358 DESC_HDR_MODE0_AESU_CBC |
2359 DESC_HDR_SEL1_MDEUA |
2360 DESC_HDR_MODE1_MDEU_INIT |
2361 DESC_HDR_MODE1_MDEU_PAD |
2362 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2363 },
d5e4aaef 2364 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2365 .alg.aead = {
2366 .base = {
2367 .cra_name = "authenc(hmac(sha1),"
2368 "cbc(des3_ede))",
2369 .cra_driver_name = "authenc-hmac-sha1-"
2370 "cbc-3des-talitos",
2371 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2372 .cra_flags = CRYPTO_ALG_ASYNC,
2373 },
2374 .ivsize = DES3_EDE_BLOCK_SIZE,
2375 .maxauthsize = SHA1_DIGEST_SIZE,
ef7c5c85 2376 .setkey = aead_des3_setkey,
56af8cd4 2377 },
70bcaca7
LN
2378 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2379 DESC_HDR_SEL0_DEU |
2380 DESC_HDR_MODE0_DEU_CBC |
2381 DESC_HDR_MODE0_DEU_3DES |
2382 DESC_HDR_SEL1_MDEUA |
2383 DESC_HDR_MODE1_MDEU_INIT |
2384 DESC_HDR_MODE1_MDEU_PAD |
2385 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
3952f17e 2386 },
7405c8d7
LC
2387 { .type = CRYPTO_ALG_TYPE_AEAD,
2388 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2389 .alg.aead = {
2390 .base = {
2391 .cra_name = "authenc(hmac(sha1),"
2392 "cbc(des3_ede))",
2393 .cra_driver_name = "authenc-hmac-sha1-"
a1a42f84 2394 "cbc-3des-talitos-hsna",
7405c8d7
LC
2395 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2396 .cra_flags = CRYPTO_ALG_ASYNC,
2397 },
2398 .ivsize = DES3_EDE_BLOCK_SIZE,
2399 .maxauthsize = SHA1_DIGEST_SIZE,
ef7c5c85 2400 .setkey = aead_des3_setkey,
7405c8d7
LC
2401 },
2402 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2403 DESC_HDR_SEL0_DEU |
2404 DESC_HDR_MODE0_DEU_CBC |
2405 DESC_HDR_MODE0_DEU_3DES |
2406 DESC_HDR_SEL1_MDEUA |
2407 DESC_HDR_MODE1_MDEU_INIT |
2408 DESC_HDR_MODE1_MDEU_PAD |
2409 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2410 },
357fb605 2411 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2412 .alg.aead = {
2413 .base = {
2414 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2415 .cra_driver_name = "authenc-hmac-sha224-"
2416 "cbc-aes-talitos",
2417 .cra_blocksize = AES_BLOCK_SIZE,
2418 .cra_flags = CRYPTO_ALG_ASYNC,
2419 },
2420 .ivsize = AES_BLOCK_SIZE,
2421 .maxauthsize = SHA224_DIGEST_SIZE,
357fb605
HG
2422 },
2423 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2424 DESC_HDR_SEL0_AESU |
2425 DESC_HDR_MODE0_AESU_CBC |
2426 DESC_HDR_SEL1_MDEUA |
2427 DESC_HDR_MODE1_MDEU_INIT |
2428 DESC_HDR_MODE1_MDEU_PAD |
2429 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2430 },
7405c8d7
LC
2431 { .type = CRYPTO_ALG_TYPE_AEAD,
2432 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2433 .alg.aead = {
2434 .base = {
2435 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2436 .cra_driver_name = "authenc-hmac-sha224-"
a1a42f84 2437 "cbc-aes-talitos-hsna",
7405c8d7
LC
2438 .cra_blocksize = AES_BLOCK_SIZE,
2439 .cra_flags = CRYPTO_ALG_ASYNC,
2440 },
2441 .ivsize = AES_BLOCK_SIZE,
2442 .maxauthsize = SHA224_DIGEST_SIZE,
2443 },
2444 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2445 DESC_HDR_SEL0_AESU |
2446 DESC_HDR_MODE0_AESU_CBC |
2447 DESC_HDR_SEL1_MDEUA |
2448 DESC_HDR_MODE1_MDEU_INIT |
2449 DESC_HDR_MODE1_MDEU_PAD |
2450 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2451 },
357fb605 2452 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2453 .alg.aead = {
2454 .base = {
2455 .cra_name = "authenc(hmac(sha224),"
2456 "cbc(des3_ede))",
2457 .cra_driver_name = "authenc-hmac-sha224-"
2458 "cbc-3des-talitos",
2459 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2460 .cra_flags = CRYPTO_ALG_ASYNC,
2461 },
2462 .ivsize = DES3_EDE_BLOCK_SIZE,
2463 .maxauthsize = SHA224_DIGEST_SIZE,
ef7c5c85 2464 .setkey = aead_des3_setkey,
357fb605
HG
2465 },
2466 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2467 DESC_HDR_SEL0_DEU |
2468 DESC_HDR_MODE0_DEU_CBC |
2469 DESC_HDR_MODE0_DEU_3DES |
2470 DESC_HDR_SEL1_MDEUA |
2471 DESC_HDR_MODE1_MDEU_INIT |
2472 DESC_HDR_MODE1_MDEU_PAD |
2473 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2474 },
7405c8d7
LC
2475 { .type = CRYPTO_ALG_TYPE_AEAD,
2476 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2477 .alg.aead = {
2478 .base = {
2479 .cra_name = "authenc(hmac(sha224),"
2480 "cbc(des3_ede))",
2481 .cra_driver_name = "authenc-hmac-sha224-"
a1a42f84 2482 "cbc-3des-talitos-hsna",
7405c8d7
LC
2483 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2484 .cra_flags = CRYPTO_ALG_ASYNC,
2485 },
2486 .ivsize = DES3_EDE_BLOCK_SIZE,
2487 .maxauthsize = SHA224_DIGEST_SIZE,
ef7c5c85 2488 .setkey = aead_des3_setkey,
7405c8d7
LC
2489 },
2490 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2491 DESC_HDR_SEL0_DEU |
2492 DESC_HDR_MODE0_DEU_CBC |
2493 DESC_HDR_MODE0_DEU_3DES |
2494 DESC_HDR_SEL1_MDEUA |
2495 DESC_HDR_MODE1_MDEU_INIT |
2496 DESC_HDR_MODE1_MDEU_PAD |
2497 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2498 },
d5e4aaef 2499 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2500 .alg.aead = {
2501 .base = {
2502 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2503 .cra_driver_name = "authenc-hmac-sha256-"
2504 "cbc-aes-talitos",
2505 .cra_blocksize = AES_BLOCK_SIZE,
2506 .cra_flags = CRYPTO_ALG_ASYNC,
2507 },
2508 .ivsize = AES_BLOCK_SIZE,
2509 .maxauthsize = SHA256_DIGEST_SIZE,
56af8cd4 2510 },
3952f17e
LN
2511 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2512 DESC_HDR_SEL0_AESU |
2513 DESC_HDR_MODE0_AESU_CBC |
2514 DESC_HDR_SEL1_MDEUA |
2515 DESC_HDR_MODE1_MDEU_INIT |
2516 DESC_HDR_MODE1_MDEU_PAD |
2517 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2518 },
7405c8d7
LC
2519 { .type = CRYPTO_ALG_TYPE_AEAD,
2520 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2521 .alg.aead = {
2522 .base = {
2523 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2524 .cra_driver_name = "authenc-hmac-sha256-"
a1a42f84 2525 "cbc-aes-talitos-hsna",
7405c8d7
LC
2526 .cra_blocksize = AES_BLOCK_SIZE,
2527 .cra_flags = CRYPTO_ALG_ASYNC,
2528 },
2529 .ivsize = AES_BLOCK_SIZE,
2530 .maxauthsize = SHA256_DIGEST_SIZE,
2531 },
2532 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2533 DESC_HDR_SEL0_AESU |
2534 DESC_HDR_MODE0_AESU_CBC |
2535 DESC_HDR_SEL1_MDEUA |
2536 DESC_HDR_MODE1_MDEU_INIT |
2537 DESC_HDR_MODE1_MDEU_PAD |
2538 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2539 },
d5e4aaef 2540 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2541 .alg.aead = {
2542 .base = {
2543 .cra_name = "authenc(hmac(sha256),"
2544 "cbc(des3_ede))",
2545 .cra_driver_name = "authenc-hmac-sha256-"
2546 "cbc-3des-talitos",
2547 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2548 .cra_flags = CRYPTO_ALG_ASYNC,
2549 },
2550 .ivsize = DES3_EDE_BLOCK_SIZE,
2551 .maxauthsize = SHA256_DIGEST_SIZE,
ef7c5c85 2552 .setkey = aead_des3_setkey,
56af8cd4 2553 },
3952f17e
LN
2554 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2555 DESC_HDR_SEL0_DEU |
2556 DESC_HDR_MODE0_DEU_CBC |
2557 DESC_HDR_MODE0_DEU_3DES |
2558 DESC_HDR_SEL1_MDEUA |
2559 DESC_HDR_MODE1_MDEU_INIT |
2560 DESC_HDR_MODE1_MDEU_PAD |
2561 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2562 },
7405c8d7
LC
2563 { .type = CRYPTO_ALG_TYPE_AEAD,
2564 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2565 .alg.aead = {
2566 .base = {
2567 .cra_name = "authenc(hmac(sha256),"
2568 "cbc(des3_ede))",
2569 .cra_driver_name = "authenc-hmac-sha256-"
a1a42f84 2570 "cbc-3des-talitos-hsna",
7405c8d7
LC
2571 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2572 .cra_flags = CRYPTO_ALG_ASYNC,
2573 },
2574 .ivsize = DES3_EDE_BLOCK_SIZE,
2575 .maxauthsize = SHA256_DIGEST_SIZE,
ef7c5c85 2576 .setkey = aead_des3_setkey,
7405c8d7
LC
2577 },
2578 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2579 DESC_HDR_SEL0_DEU |
2580 DESC_HDR_MODE0_DEU_CBC |
2581 DESC_HDR_MODE0_DEU_3DES |
2582 DESC_HDR_SEL1_MDEUA |
2583 DESC_HDR_MODE1_MDEU_INIT |
2584 DESC_HDR_MODE1_MDEU_PAD |
2585 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2586 },
d5e4aaef 2587 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2588 .alg.aead = {
2589 .base = {
2590 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2591 .cra_driver_name = "authenc-hmac-sha384-"
2592 "cbc-aes-talitos",
2593 .cra_blocksize = AES_BLOCK_SIZE,
2594 .cra_flags = CRYPTO_ALG_ASYNC,
2595 },
2596 .ivsize = AES_BLOCK_SIZE,
2597 .maxauthsize = SHA384_DIGEST_SIZE,
357fb605
HG
2598 },
2599 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2600 DESC_HDR_SEL0_AESU |
2601 DESC_HDR_MODE0_AESU_CBC |
2602 DESC_HDR_SEL1_MDEUB |
2603 DESC_HDR_MODE1_MDEU_INIT |
2604 DESC_HDR_MODE1_MDEU_PAD |
2605 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2606 },
2607 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2608 .alg.aead = {
2609 .base = {
2610 .cra_name = "authenc(hmac(sha384),"
2611 "cbc(des3_ede))",
2612 .cra_driver_name = "authenc-hmac-sha384-"
2613 "cbc-3des-talitos",
2614 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2615 .cra_flags = CRYPTO_ALG_ASYNC,
2616 },
2617 .ivsize = DES3_EDE_BLOCK_SIZE,
2618 .maxauthsize = SHA384_DIGEST_SIZE,
ef7c5c85 2619 .setkey = aead_des3_setkey,
357fb605
HG
2620 },
2621 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2622 DESC_HDR_SEL0_DEU |
2623 DESC_HDR_MODE0_DEU_CBC |
2624 DESC_HDR_MODE0_DEU_3DES |
2625 DESC_HDR_SEL1_MDEUB |
2626 DESC_HDR_MODE1_MDEU_INIT |
2627 DESC_HDR_MODE1_MDEU_PAD |
2628 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2629 },
2630 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2631 .alg.aead = {
2632 .base = {
2633 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2634 .cra_driver_name = "authenc-hmac-sha512-"
2635 "cbc-aes-talitos",
2636 .cra_blocksize = AES_BLOCK_SIZE,
2637 .cra_flags = CRYPTO_ALG_ASYNC,
2638 },
2639 .ivsize = AES_BLOCK_SIZE,
2640 .maxauthsize = SHA512_DIGEST_SIZE,
357fb605
HG
2641 },
2642 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2643 DESC_HDR_SEL0_AESU |
2644 DESC_HDR_MODE0_AESU_CBC |
2645 DESC_HDR_SEL1_MDEUB |
2646 DESC_HDR_MODE1_MDEU_INIT |
2647 DESC_HDR_MODE1_MDEU_PAD |
2648 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2649 },
2650 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2651 .alg.aead = {
2652 .base = {
2653 .cra_name = "authenc(hmac(sha512),"
2654 "cbc(des3_ede))",
2655 .cra_driver_name = "authenc-hmac-sha512-"
2656 "cbc-3des-talitos",
2657 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2658 .cra_flags = CRYPTO_ALG_ASYNC,
2659 },
2660 .ivsize = DES3_EDE_BLOCK_SIZE,
2661 .maxauthsize = SHA512_DIGEST_SIZE,
ef7c5c85 2662 .setkey = aead_des3_setkey,
357fb605
HG
2663 },
2664 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2665 DESC_HDR_SEL0_DEU |
2666 DESC_HDR_MODE0_DEU_CBC |
2667 DESC_HDR_MODE0_DEU_3DES |
2668 DESC_HDR_SEL1_MDEUB |
2669 DESC_HDR_MODE1_MDEU_INIT |
2670 DESC_HDR_MODE1_MDEU_PAD |
2671 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2672 },
2673 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2674 .alg.aead = {
2675 .base = {
2676 .cra_name = "authenc(hmac(md5),cbc(aes))",
2677 .cra_driver_name = "authenc-hmac-md5-"
2678 "cbc-aes-talitos",
2679 .cra_blocksize = AES_BLOCK_SIZE,
2680 .cra_flags = CRYPTO_ALG_ASYNC,
2681 },
2682 .ivsize = AES_BLOCK_SIZE,
2683 .maxauthsize = MD5_DIGEST_SIZE,
56af8cd4 2684 },
3952f17e
LN
2685 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2686 DESC_HDR_SEL0_AESU |
2687 DESC_HDR_MODE0_AESU_CBC |
2688 DESC_HDR_SEL1_MDEUA |
2689 DESC_HDR_MODE1_MDEU_INIT |
2690 DESC_HDR_MODE1_MDEU_PAD |
2691 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2692 },
7405c8d7
LC
2693 { .type = CRYPTO_ALG_TYPE_AEAD,
2694 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2695 .alg.aead = {
2696 .base = {
2697 .cra_name = "authenc(hmac(md5),cbc(aes))",
2698 .cra_driver_name = "authenc-hmac-md5-"
a1a42f84 2699 "cbc-aes-talitos-hsna",
7405c8d7
LC
2700 .cra_blocksize = AES_BLOCK_SIZE,
2701 .cra_flags = CRYPTO_ALG_ASYNC,
2702 },
2703 .ivsize = AES_BLOCK_SIZE,
2704 .maxauthsize = MD5_DIGEST_SIZE,
2705 },
2706 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2707 DESC_HDR_SEL0_AESU |
2708 DESC_HDR_MODE0_AESU_CBC |
2709 DESC_HDR_SEL1_MDEUA |
2710 DESC_HDR_MODE1_MDEU_INIT |
2711 DESC_HDR_MODE1_MDEU_PAD |
2712 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2713 },
d5e4aaef 2714 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2715 .alg.aead = {
2716 .base = {
2717 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2718 .cra_driver_name = "authenc-hmac-md5-"
2719 "cbc-3des-talitos",
2720 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2721 .cra_flags = CRYPTO_ALG_ASYNC,
2722 },
2723 .ivsize = DES3_EDE_BLOCK_SIZE,
2724 .maxauthsize = MD5_DIGEST_SIZE,
ef7c5c85 2725 .setkey = aead_des3_setkey,
56af8cd4 2726 },
3952f17e
LN
2727 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2728 DESC_HDR_SEL0_DEU |
2729 DESC_HDR_MODE0_DEU_CBC |
2730 DESC_HDR_MODE0_DEU_3DES |
2731 DESC_HDR_SEL1_MDEUA |
2732 DESC_HDR_MODE1_MDEU_INIT |
2733 DESC_HDR_MODE1_MDEU_PAD |
2734 DESC_HDR_MODE1_MDEU_MD5_HMAC,
4de9d0b5 2735 },
7405c8d7
LC
2736 { .type = CRYPTO_ALG_TYPE_AEAD,
2737 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2738 .alg.aead = {
2739 .base = {
2740 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2741 .cra_driver_name = "authenc-hmac-md5-"
a1a42f84 2742 "cbc-3des-talitos-hsna",
7405c8d7
LC
2743 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2744 .cra_flags = CRYPTO_ALG_ASYNC,
2745 },
2746 .ivsize = DES3_EDE_BLOCK_SIZE,
2747 .maxauthsize = MD5_DIGEST_SIZE,
ef7c5c85 2748 .setkey = aead_des3_setkey,
7405c8d7
LC
2749 },
2750 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2751 DESC_HDR_SEL0_DEU |
2752 DESC_HDR_MODE0_DEU_CBC |
2753 DESC_HDR_MODE0_DEU_3DES |
2754 DESC_HDR_SEL1_MDEUA |
2755 DESC_HDR_MODE1_MDEU_INIT |
2756 DESC_HDR_MODE1_MDEU_PAD |
2757 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2758 },
4de9d0b5 2759 /* ABLKCIPHER algorithms. */
5e75ae1b
LC
2760 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2761 .alg.crypto = {
2762 .cra_name = "ecb(aes)",
2763 .cra_driver_name = "ecb-aes-talitos",
2764 .cra_blocksize = AES_BLOCK_SIZE,
2765 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2766 CRYPTO_ALG_ASYNC,
2767 .cra_ablkcipher = {
2768 .min_keysize = AES_MIN_KEY_SIZE,
2769 .max_keysize = AES_MAX_KEY_SIZE,
1ba34e71 2770 .setkey = ablkcipher_aes_setkey,
5e75ae1b
LC
2771 }
2772 },
2773 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2774 DESC_HDR_SEL0_AESU,
2775 },
d5e4aaef
LN
2776 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2777 .alg.crypto = {
4de9d0b5
LN
2778 .cra_name = "cbc(aes)",
2779 .cra_driver_name = "cbc-aes-talitos",
2780 .cra_blocksize = AES_BLOCK_SIZE,
2781 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2782 CRYPTO_ALG_ASYNC,
4de9d0b5 2783 .cra_ablkcipher = {
4de9d0b5
LN
2784 .min_keysize = AES_MIN_KEY_SIZE,
2785 .max_keysize = AES_MAX_KEY_SIZE,
2786 .ivsize = AES_BLOCK_SIZE,
1ba34e71 2787 .setkey = ablkcipher_aes_setkey,
4de9d0b5
LN
2788 }
2789 },
2790 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2791 DESC_HDR_SEL0_AESU |
2792 DESC_HDR_MODE0_AESU_CBC,
2793 },
5e75ae1b
LC
2794 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2795 .alg.crypto = {
2796 .cra_name = "ctr(aes)",
2797 .cra_driver_name = "ctr-aes-talitos",
b9a05b60 2798 .cra_blocksize = 1,
5e75ae1b
LC
2799 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2800 CRYPTO_ALG_ASYNC,
2801 .cra_ablkcipher = {
2802 .min_keysize = AES_MIN_KEY_SIZE,
2803 .max_keysize = AES_MAX_KEY_SIZE,
2804 .ivsize = AES_BLOCK_SIZE,
1ba34e71 2805 .setkey = ablkcipher_aes_setkey,
5e75ae1b
LC
2806 }
2807 },
70d355cc 2808 .desc_hdr_template = DESC_HDR_TYPE_AESU_CTR_NONSNOOP |
5e75ae1b
LC
2809 DESC_HDR_SEL0_AESU |
2810 DESC_HDR_MODE0_AESU_CTR,
2811 },
2812 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2813 .alg.crypto = {
2814 .cra_name = "ecb(des)",
2815 .cra_driver_name = "ecb-des-talitos",
2816 .cra_blocksize = DES_BLOCK_SIZE,
2817 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2818 CRYPTO_ALG_ASYNC,
2819 .cra_ablkcipher = {
2820 .min_keysize = DES_KEY_SIZE,
2821 .max_keysize = DES_KEY_SIZE,
ef7c5c85 2822 .setkey = ablkcipher_des_setkey,
5e75ae1b
LC
2823 }
2824 },
2825 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2826 DESC_HDR_SEL0_DEU,
2827 },
2828 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2829 .alg.crypto = {
2830 .cra_name = "cbc(des)",
2831 .cra_driver_name = "cbc-des-talitos",
2832 .cra_blocksize = DES_BLOCK_SIZE,
2833 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2834 CRYPTO_ALG_ASYNC,
2835 .cra_ablkcipher = {
2836 .min_keysize = DES_KEY_SIZE,
2837 .max_keysize = DES_KEY_SIZE,
2838 .ivsize = DES_BLOCK_SIZE,
ef7c5c85 2839 .setkey = ablkcipher_des_setkey,
5e75ae1b
LC
2840 }
2841 },
2842 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2843 DESC_HDR_SEL0_DEU |
2844 DESC_HDR_MODE0_DEU_CBC,
2845 },
2846 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2847 .alg.crypto = {
2848 .cra_name = "ecb(des3_ede)",
2849 .cra_driver_name = "ecb-3des-talitos",
2850 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2851 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2852 CRYPTO_ALG_ASYNC,
2853 .cra_ablkcipher = {
2854 .min_keysize = DES3_EDE_KEY_SIZE,
2855 .max_keysize = DES3_EDE_KEY_SIZE,
ef7c5c85 2856 .setkey = ablkcipher_des3_setkey,
5e75ae1b
LC
2857 }
2858 },
2859 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2860 DESC_HDR_SEL0_DEU |
2861 DESC_HDR_MODE0_DEU_3DES,
2862 },
d5e4aaef
LN
2863 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2864 .alg.crypto = {
4de9d0b5
LN
2865 .cra_name = "cbc(des3_ede)",
2866 .cra_driver_name = "cbc-3des-talitos",
2867 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2868 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2869 CRYPTO_ALG_ASYNC,
4de9d0b5 2870 .cra_ablkcipher = {
4de9d0b5
LN
2871 .min_keysize = DES3_EDE_KEY_SIZE,
2872 .max_keysize = DES3_EDE_KEY_SIZE,
2873 .ivsize = DES3_EDE_BLOCK_SIZE,
ef7c5c85 2874 .setkey = ablkcipher_des3_setkey,
4de9d0b5
LN
2875 }
2876 },
2877 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2878 DESC_HDR_SEL0_DEU |
2879 DESC_HDR_MODE0_DEU_CBC |
2880 DESC_HDR_MODE0_DEU_3DES,
497f2e6b
LN
2881 },
2882 /* AHASH algorithms. */
2883 { .type = CRYPTO_ALG_TYPE_AHASH,
2884 .alg.hash = {
497f2e6b 2885 .halg.digestsize = MD5_DIGEST_SIZE,
3639ca84 2886 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2887 .halg.base = {
2888 .cra_name = "md5",
2889 .cra_driver_name = "md5-talitos",
b3988618 2890 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
6a38f622 2891 .cra_flags = CRYPTO_ALG_ASYNC,
497f2e6b
LN
2892 }
2893 },
2894 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2895 DESC_HDR_SEL0_MDEUA |
2896 DESC_HDR_MODE0_MDEU_MD5,
2897 },
2898 { .type = CRYPTO_ALG_TYPE_AHASH,
2899 .alg.hash = {
497f2e6b 2900 .halg.digestsize = SHA1_DIGEST_SIZE,
3639ca84 2901 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2902 .halg.base = {
2903 .cra_name = "sha1",
2904 .cra_driver_name = "sha1-talitos",
2905 .cra_blocksize = SHA1_BLOCK_SIZE,
6a38f622 2906 .cra_flags = CRYPTO_ALG_ASYNC,
497f2e6b
LN
2907 }
2908 },
2909 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2910 DESC_HDR_SEL0_MDEUA |
2911 DESC_HDR_MODE0_MDEU_SHA1,
2912 },
60f208d7
KP
2913 { .type = CRYPTO_ALG_TYPE_AHASH,
2914 .alg.hash = {
60f208d7 2915 .halg.digestsize = SHA224_DIGEST_SIZE,
3639ca84 2916 .halg.statesize = sizeof(struct talitos_export_state),
60f208d7
KP
2917 .halg.base = {
2918 .cra_name = "sha224",
2919 .cra_driver_name = "sha224-talitos",
2920 .cra_blocksize = SHA224_BLOCK_SIZE,
6a38f622 2921 .cra_flags = CRYPTO_ALG_ASYNC,
60f208d7
KP
2922 }
2923 },
2924 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2925 DESC_HDR_SEL0_MDEUA |
2926 DESC_HDR_MODE0_MDEU_SHA224,
2927 },
497f2e6b
LN
2928 { .type = CRYPTO_ALG_TYPE_AHASH,
2929 .alg.hash = {
497f2e6b 2930 .halg.digestsize = SHA256_DIGEST_SIZE,
3639ca84 2931 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2932 .halg.base = {
2933 .cra_name = "sha256",
2934 .cra_driver_name = "sha256-talitos",
2935 .cra_blocksize = SHA256_BLOCK_SIZE,
6a38f622 2936 .cra_flags = CRYPTO_ALG_ASYNC,
497f2e6b
LN
2937 }
2938 },
2939 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2940 DESC_HDR_SEL0_MDEUA |
2941 DESC_HDR_MODE0_MDEU_SHA256,
2942 },
2943 { .type = CRYPTO_ALG_TYPE_AHASH,
2944 .alg.hash = {
497f2e6b 2945 .halg.digestsize = SHA384_DIGEST_SIZE,
3639ca84 2946 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2947 .halg.base = {
2948 .cra_name = "sha384",
2949 .cra_driver_name = "sha384-talitos",
2950 .cra_blocksize = SHA384_BLOCK_SIZE,
6a38f622 2951 .cra_flags = CRYPTO_ALG_ASYNC,
497f2e6b
LN
2952 }
2953 },
2954 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2955 DESC_HDR_SEL0_MDEUB |
2956 DESC_HDR_MODE0_MDEUB_SHA384,
2957 },
2958 { .type = CRYPTO_ALG_TYPE_AHASH,
2959 .alg.hash = {
497f2e6b 2960 .halg.digestsize = SHA512_DIGEST_SIZE,
3639ca84 2961 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2962 .halg.base = {
2963 .cra_name = "sha512",
2964 .cra_driver_name = "sha512-talitos",
2965 .cra_blocksize = SHA512_BLOCK_SIZE,
6a38f622 2966 .cra_flags = CRYPTO_ALG_ASYNC,
497f2e6b
LN
2967 }
2968 },
2969 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2970 DESC_HDR_SEL0_MDEUB |
2971 DESC_HDR_MODE0_MDEUB_SHA512,
2972 },
79b3a418
LN
2973 { .type = CRYPTO_ALG_TYPE_AHASH,
2974 .alg.hash = {
79b3a418 2975 .halg.digestsize = MD5_DIGEST_SIZE,
3639ca84 2976 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2977 .halg.base = {
2978 .cra_name = "hmac(md5)",
2979 .cra_driver_name = "hmac-md5-talitos",
b3988618 2980 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
6a38f622 2981 .cra_flags = CRYPTO_ALG_ASYNC,
79b3a418
LN
2982 }
2983 },
2984 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2985 DESC_HDR_SEL0_MDEUA |
2986 DESC_HDR_MODE0_MDEU_MD5,
2987 },
2988 { .type = CRYPTO_ALG_TYPE_AHASH,
2989 .alg.hash = {
79b3a418 2990 .halg.digestsize = SHA1_DIGEST_SIZE,
3639ca84 2991 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2992 .halg.base = {
2993 .cra_name = "hmac(sha1)",
2994 .cra_driver_name = "hmac-sha1-talitos",
2995 .cra_blocksize = SHA1_BLOCK_SIZE,
6a38f622 2996 .cra_flags = CRYPTO_ALG_ASYNC,
79b3a418
LN
2997 }
2998 },
2999 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3000 DESC_HDR_SEL0_MDEUA |
3001 DESC_HDR_MODE0_MDEU_SHA1,
3002 },
3003 { .type = CRYPTO_ALG_TYPE_AHASH,
3004 .alg.hash = {
79b3a418 3005 .halg.digestsize = SHA224_DIGEST_SIZE,
3639ca84 3006 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
3007 .halg.base = {
3008 .cra_name = "hmac(sha224)",
3009 .cra_driver_name = "hmac-sha224-talitos",
3010 .cra_blocksize = SHA224_BLOCK_SIZE,
6a38f622 3011 .cra_flags = CRYPTO_ALG_ASYNC,
79b3a418
LN
3012 }
3013 },
3014 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3015 DESC_HDR_SEL0_MDEUA |
3016 DESC_HDR_MODE0_MDEU_SHA224,
3017 },
3018 { .type = CRYPTO_ALG_TYPE_AHASH,
3019 .alg.hash = {
79b3a418 3020 .halg.digestsize = SHA256_DIGEST_SIZE,
3639ca84 3021 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
3022 .halg.base = {
3023 .cra_name = "hmac(sha256)",
3024 .cra_driver_name = "hmac-sha256-talitos",
3025 .cra_blocksize = SHA256_BLOCK_SIZE,
6a38f622 3026 .cra_flags = CRYPTO_ALG_ASYNC,
79b3a418
LN
3027 }
3028 },
3029 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3030 DESC_HDR_SEL0_MDEUA |
3031 DESC_HDR_MODE0_MDEU_SHA256,
3032 },
3033 { .type = CRYPTO_ALG_TYPE_AHASH,
3034 .alg.hash = {
79b3a418 3035 .halg.digestsize = SHA384_DIGEST_SIZE,
3639ca84 3036 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
3037 .halg.base = {
3038 .cra_name = "hmac(sha384)",
3039 .cra_driver_name = "hmac-sha384-talitos",
3040 .cra_blocksize = SHA384_BLOCK_SIZE,
6a38f622 3041 .cra_flags = CRYPTO_ALG_ASYNC,
79b3a418
LN
3042 }
3043 },
3044 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3045 DESC_HDR_SEL0_MDEUB |
3046 DESC_HDR_MODE0_MDEUB_SHA384,
3047 },
3048 { .type = CRYPTO_ALG_TYPE_AHASH,
3049 .alg.hash = {
79b3a418 3050 .halg.digestsize = SHA512_DIGEST_SIZE,
3639ca84 3051 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
3052 .halg.base = {
3053 .cra_name = "hmac(sha512)",
3054 .cra_driver_name = "hmac-sha512-talitos",
3055 .cra_blocksize = SHA512_BLOCK_SIZE,
6a38f622 3056 .cra_flags = CRYPTO_ALG_ASYNC,
79b3a418
LN
3057 }
3058 },
3059 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3060 DESC_HDR_SEL0_MDEUB |
3061 DESC_HDR_MODE0_MDEUB_SHA512,
3062 }
9c4a7965
KP
3063};
3064
3065struct talitos_crypto_alg {
3066 struct list_head entry;
3067 struct device *dev;
acbf7c62 3068 struct talitos_alg_template algt;
9c4a7965
KP
3069};
3070
89d124cb
JE
3071static int talitos_init_common(struct talitos_ctx *ctx,
3072 struct talitos_crypto_alg *talitos_alg)
9c4a7965 3073{
5228f0f7 3074 struct talitos_private *priv;
9c4a7965
KP
3075
3076 /* update context with ptr to dev */
3077 ctx->dev = talitos_alg->dev;
19bbbc63 3078
5228f0f7
KP
3079 /* assign SEC channel to tfm in round-robin fashion */
3080 priv = dev_get_drvdata(ctx->dev);
3081 ctx->ch = atomic_inc_return(&priv->last_chan) &
3082 (priv->num_channels - 1);
3083
9c4a7965 3084 /* copy descriptor header template value */
acbf7c62 3085 ctx->desc_hdr_template = talitos_alg->algt.desc_hdr_template;
9c4a7965 3086
602dba5a
KP
3087 /* select done notification */
3088 ctx->desc_hdr_template |= DESC_HDR_DONE_NOTIFY;
3089
497f2e6b
LN
3090 return 0;
3091}
3092
89d124cb
JE
3093static int talitos_cra_init(struct crypto_tfm *tfm)
3094{
3095 struct crypto_alg *alg = tfm->__crt_alg;
3096 struct talitos_crypto_alg *talitos_alg;
3097 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3098
3099 if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_AHASH)
3100 talitos_alg = container_of(__crypto_ahash_alg(alg),
3101 struct talitos_crypto_alg,
3102 algt.alg.hash);
3103 else
3104 talitos_alg = container_of(alg, struct talitos_crypto_alg,
3105 algt.alg.crypto);
3106
3107 return talitos_init_common(ctx, talitos_alg);
3108}
3109
aeb4c132 3110static int talitos_cra_init_aead(struct crypto_aead *tfm)
497f2e6b 3111{
89d124cb
JE
3112 struct aead_alg *alg = crypto_aead_alg(tfm);
3113 struct talitos_crypto_alg *talitos_alg;
3114 struct talitos_ctx *ctx = crypto_aead_ctx(tfm);
3115
3116 talitos_alg = container_of(alg, struct talitos_crypto_alg,
3117 algt.alg.aead);
3118
3119 return talitos_init_common(ctx, talitos_alg);
9c4a7965
KP
3120}
3121
497f2e6b
LN
3122static int talitos_cra_init_ahash(struct crypto_tfm *tfm)
3123{
3124 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3125
3126 talitos_cra_init(tfm);
3127
3128 ctx->keylen = 0;
3129 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
3130 sizeof(struct talitos_ahash_req_ctx));
3131
3132 return 0;
3133}
3134
2e13ce08
LC
3135static void talitos_cra_exit(struct crypto_tfm *tfm)
3136{
3137 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3138 struct device *dev = ctx->dev;
3139
3140 if (ctx->keylen)
3141 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
3142}
3143
9c4a7965
KP
3144/*
3145 * given the alg's descriptor header template, determine whether descriptor
3146 * type and primary/secondary execution units required match the hw
3147 * capabilities description provided in the device tree node.
3148 */
3149static int hw_supports(struct device *dev, __be32 desc_hdr_template)
3150{
3151 struct talitos_private *priv = dev_get_drvdata(dev);
3152 int ret;
3153
3154 ret = (1 << DESC_TYPE(desc_hdr_template) & priv->desc_types) &&
3155 (1 << PRIMARY_EU(desc_hdr_template) & priv->exec_units);
3156
3157 if (SECONDARY_EU(desc_hdr_template))
3158 ret = ret && (1 << SECONDARY_EU(desc_hdr_template)
3159 & priv->exec_units);
3160
3161 return ret;
3162}
3163
2dc11581 3164static int talitos_remove(struct platform_device *ofdev)
9c4a7965
KP
3165{
3166 struct device *dev = &ofdev->dev;
3167 struct talitos_private *priv = dev_get_drvdata(dev);
3168 struct talitos_crypto_alg *t_alg, *n;
3169 int i;
3170
3171 list_for_each_entry_safe(t_alg, n, &priv->alg_list, entry) {
acbf7c62
LN
3172 switch (t_alg->algt.type) {
3173 case CRYPTO_ALG_TYPE_ABLKCIPHER:
acbf7c62 3174 break;
aeb4c132
HX
3175 case CRYPTO_ALG_TYPE_AEAD:
3176 crypto_unregister_aead(&t_alg->algt.alg.aead);
acbf7c62
LN
3177 case CRYPTO_ALG_TYPE_AHASH:
3178 crypto_unregister_ahash(&t_alg->algt.alg.hash);
3179 break;
3180 }
9c4a7965 3181 list_del(&t_alg->entry);
9c4a7965
KP
3182 }
3183
3184 if (hw_supports(dev, DESC_HDR_SEL0_RNG))
3185 talitos_unregister_rng(dev);
3186
c3e337f8 3187 for (i = 0; i < 2; i++)
2cdba3cf 3188 if (priv->irq[i]) {
c3e337f8
KP
3189 free_irq(priv->irq[i], dev);
3190 irq_dispose_mapping(priv->irq[i]);
3191 }
9c4a7965 3192
c3e337f8 3193 tasklet_kill(&priv->done_task[0]);
2cdba3cf 3194 if (priv->irq[1])
c3e337f8 3195 tasklet_kill(&priv->done_task[1]);
9c4a7965 3196
9c4a7965
KP
3197 return 0;
3198}
3199
3200static struct talitos_crypto_alg *talitos_alg_alloc(struct device *dev,
3201 struct talitos_alg_template
3202 *template)
3203{
60f208d7 3204 struct talitos_private *priv = dev_get_drvdata(dev);
9c4a7965
KP
3205 struct talitos_crypto_alg *t_alg;
3206 struct crypto_alg *alg;
3207
24b92ff2
LC
3208 t_alg = devm_kzalloc(dev, sizeof(struct talitos_crypto_alg),
3209 GFP_KERNEL);
9c4a7965
KP
3210 if (!t_alg)
3211 return ERR_PTR(-ENOMEM);
3212
acbf7c62
LN
3213 t_alg->algt = *template;
3214
3215 switch (t_alg->algt.type) {
3216 case CRYPTO_ALG_TYPE_ABLKCIPHER:
497f2e6b
LN
3217 alg = &t_alg->algt.alg.crypto;
3218 alg->cra_init = talitos_cra_init;
2e13ce08 3219 alg->cra_exit = talitos_cra_exit;
d4cd3283 3220 alg->cra_type = &crypto_ablkcipher_type;
ef7c5c85
HX
3221 alg->cra_ablkcipher.setkey = alg->cra_ablkcipher.setkey ?:
3222 ablkcipher_setkey;
b286e003
KP
3223 alg->cra_ablkcipher.encrypt = ablkcipher_encrypt;
3224 alg->cra_ablkcipher.decrypt = ablkcipher_decrypt;
497f2e6b 3225 break;
acbf7c62 3226 case CRYPTO_ALG_TYPE_AEAD:
aeb4c132 3227 alg = &t_alg->algt.alg.aead.base;
2e13ce08 3228 alg->cra_exit = talitos_cra_exit;
aeb4c132 3229 t_alg->algt.alg.aead.init = talitos_cra_init_aead;
ef7c5c85
HX
3230 t_alg->algt.alg.aead.setkey = t_alg->algt.alg.aead.setkey ?:
3231 aead_setkey;
aeb4c132
HX
3232 t_alg->algt.alg.aead.encrypt = aead_encrypt;
3233 t_alg->algt.alg.aead.decrypt = aead_decrypt;
6cda075a
LC
3234 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
3235 !strncmp(alg->cra_name, "authenc(hmac(sha224)", 20)) {
24b92ff2 3236 devm_kfree(dev, t_alg);
6cda075a
LC
3237 return ERR_PTR(-ENOTSUPP);
3238 }
acbf7c62
LN
3239 break;
3240 case CRYPTO_ALG_TYPE_AHASH:
3241 alg = &t_alg->algt.alg.hash.halg.base;
497f2e6b 3242 alg->cra_init = talitos_cra_init_ahash;
ad4cd51f 3243 alg->cra_exit = talitos_cra_exit;
b286e003
KP
3244 t_alg->algt.alg.hash.init = ahash_init;
3245 t_alg->algt.alg.hash.update = ahash_update;
3246 t_alg->algt.alg.hash.final = ahash_final;
3247 t_alg->algt.alg.hash.finup = ahash_finup;
3248 t_alg->algt.alg.hash.digest = ahash_digest;
56136631
LC
3249 if (!strncmp(alg->cra_name, "hmac", 4))
3250 t_alg->algt.alg.hash.setkey = ahash_setkey;
3639ca84
HG
3251 t_alg->algt.alg.hash.import = ahash_import;
3252 t_alg->algt.alg.hash.export = ahash_export;
b286e003 3253
79b3a418 3254 if (!(priv->features & TALITOS_FTR_HMAC_OK) &&
0b2730d8 3255 !strncmp(alg->cra_name, "hmac", 4)) {
24b92ff2 3256 devm_kfree(dev, t_alg);
79b3a418 3257 return ERR_PTR(-ENOTSUPP);
0b2730d8 3258 }
60f208d7 3259 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
79b3a418
LN
3260 (!strcmp(alg->cra_name, "sha224") ||
3261 !strcmp(alg->cra_name, "hmac(sha224)"))) {
60f208d7
KP
3262 t_alg->algt.alg.hash.init = ahash_init_sha224_swinit;
3263 t_alg->algt.desc_hdr_template =
3264 DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3265 DESC_HDR_SEL0_MDEUA |
3266 DESC_HDR_MODE0_MDEU_SHA256;
3267 }
497f2e6b 3268 break;
1d11911a
KP
3269 default:
3270 dev_err(dev, "unknown algorithm type %d\n", t_alg->algt.type);
24b92ff2 3271 devm_kfree(dev, t_alg);
1d11911a 3272 return ERR_PTR(-EINVAL);
acbf7c62 3273 }
9c4a7965 3274
9c4a7965 3275 alg->cra_module = THIS_MODULE;
b0057763
LC
3276 if (t_alg->algt.priority)
3277 alg->cra_priority = t_alg->algt.priority;
3278 else
3279 alg->cra_priority = TALITOS_CRA_PRIORITY;
c9cca703
CL
3280 if (has_ftr_sec1(priv))
3281 alg->cra_alignmask = 3;
3282 else
3283 alg->cra_alignmask = 0;
9c4a7965 3284 alg->cra_ctxsize = sizeof(struct talitos_ctx);
d912bb76 3285 alg->cra_flags |= CRYPTO_ALG_KERN_DRIVER_ONLY;
9c4a7965 3286
9c4a7965
KP
3287 t_alg->dev = dev;
3288
3289 return t_alg;
3290}
3291
c3e337f8
KP
3292static int talitos_probe_irq(struct platform_device *ofdev)
3293{
3294 struct device *dev = &ofdev->dev;
3295 struct device_node *np = ofdev->dev.of_node;
3296 struct talitos_private *priv = dev_get_drvdata(dev);
3297 int err;
dd3c0987 3298 bool is_sec1 = has_ftr_sec1(priv);
c3e337f8
KP
3299
3300 priv->irq[0] = irq_of_parse_and_map(np, 0);
2cdba3cf 3301 if (!priv->irq[0]) {
c3e337f8
KP
3302 dev_err(dev, "failed to map irq\n");
3303 return -EINVAL;
3304 }
dd3c0987
LC
3305 if (is_sec1) {
3306 err = request_irq(priv->irq[0], talitos1_interrupt_4ch, 0,
3307 dev_driver_string(dev), dev);
3308 goto primary_out;
3309 }
c3e337f8
KP
3310
3311 priv->irq[1] = irq_of_parse_and_map(np, 1);
3312
3313 /* get the primary irq line */
2cdba3cf 3314 if (!priv->irq[1]) {
dd3c0987 3315 err = request_irq(priv->irq[0], talitos2_interrupt_4ch, 0,
c3e337f8
KP
3316 dev_driver_string(dev), dev);
3317 goto primary_out;
3318 }
3319
dd3c0987 3320 err = request_irq(priv->irq[0], talitos2_interrupt_ch0_2, 0,
c3e337f8
KP
3321 dev_driver_string(dev), dev);
3322 if (err)
3323 goto primary_out;
3324
3325 /* get the secondary irq line */
dd3c0987 3326 err = request_irq(priv->irq[1], talitos2_interrupt_ch1_3, 0,
c3e337f8
KP
3327 dev_driver_string(dev), dev);
3328 if (err) {
3329 dev_err(dev, "failed to request secondary irq\n");
3330 irq_dispose_mapping(priv->irq[1]);
2cdba3cf 3331 priv->irq[1] = 0;
c3e337f8
KP
3332 }
3333
3334 return err;
3335
3336primary_out:
3337 if (err) {
3338 dev_err(dev, "failed to request primary irq\n");
3339 irq_dispose_mapping(priv->irq[0]);
2cdba3cf 3340 priv->irq[0] = 0;
c3e337f8
KP
3341 }
3342
3343 return err;
3344}
3345
1c48a5c9 3346static int talitos_probe(struct platform_device *ofdev)
9c4a7965
KP
3347{
3348 struct device *dev = &ofdev->dev;
61c7a080 3349 struct device_node *np = ofdev->dev.of_node;
9c4a7965 3350 struct talitos_private *priv;
9c4a7965 3351 int i, err;
5fa7fa14 3352 int stride;
fd5ea7f0 3353 struct resource *res;
9c4a7965 3354
24b92ff2 3355 priv = devm_kzalloc(dev, sizeof(struct talitos_private), GFP_KERNEL);
9c4a7965
KP
3356 if (!priv)
3357 return -ENOMEM;
3358
f3de9cb1
KH
3359 INIT_LIST_HEAD(&priv->alg_list);
3360
9c4a7965
KP
3361 dev_set_drvdata(dev, priv);
3362
3363 priv->ofdev = ofdev;
3364
511d63cb
HG
3365 spin_lock_init(&priv->reg_lock);
3366
fd5ea7f0
LC
3367 res = platform_get_resource(ofdev, IORESOURCE_MEM, 0);
3368 if (!res)
3369 return -ENXIO;
3370 priv->reg = devm_ioremap(dev, res->start, resource_size(res));
9c4a7965
KP
3371 if (!priv->reg) {
3372 dev_err(dev, "failed to of_iomap\n");
3373 err = -ENOMEM;
3374 goto err_out;
3375 }
3376
3377 /* get SEC version capabilities from device tree */
fa14c6cf
LC
3378 of_property_read_u32(np, "fsl,num-channels", &priv->num_channels);
3379 of_property_read_u32(np, "fsl,channel-fifo-len", &priv->chfifo_len);
3380 of_property_read_u32(np, "fsl,exec-units-mask", &priv->exec_units);
3381 of_property_read_u32(np, "fsl,descriptor-types-mask",
3382 &priv->desc_types);
9c4a7965
KP
3383
3384 if (!is_power_of_2(priv->num_channels) || !priv->chfifo_len ||
3385 !priv->exec_units || !priv->desc_types) {
3386 dev_err(dev, "invalid property data in device tree node\n");
3387 err = -EINVAL;
3388 goto err_out;
3389 }
3390
f3c85bc1
LN
3391 if (of_device_is_compatible(np, "fsl,sec3.0"))
3392 priv->features |= TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT;
3393
fe5720e2 3394 if (of_device_is_compatible(np, "fsl,sec2.1"))
60f208d7 3395 priv->features |= TALITOS_FTR_HW_AUTH_CHECK |
79b3a418
LN
3396 TALITOS_FTR_SHA224_HWINIT |
3397 TALITOS_FTR_HMAC_OK;
fe5720e2 3398
21590888
LC
3399 if (of_device_is_compatible(np, "fsl,sec1.0"))
3400 priv->features |= TALITOS_FTR_SEC1;
3401
5fa7fa14
LC
3402 if (of_device_is_compatible(np, "fsl,sec1.2")) {
3403 priv->reg_deu = priv->reg + TALITOS12_DEU;
3404 priv->reg_aesu = priv->reg + TALITOS12_AESU;
3405 priv->reg_mdeu = priv->reg + TALITOS12_MDEU;
3406 stride = TALITOS1_CH_STRIDE;
3407 } else if (of_device_is_compatible(np, "fsl,sec1.0")) {
3408 priv->reg_deu = priv->reg + TALITOS10_DEU;
3409 priv->reg_aesu = priv->reg + TALITOS10_AESU;
3410 priv->reg_mdeu = priv->reg + TALITOS10_MDEU;
3411 priv->reg_afeu = priv->reg + TALITOS10_AFEU;
3412 priv->reg_rngu = priv->reg + TALITOS10_RNGU;
3413 priv->reg_pkeu = priv->reg + TALITOS10_PKEU;
3414 stride = TALITOS1_CH_STRIDE;
3415 } else {
3416 priv->reg_deu = priv->reg + TALITOS2_DEU;
3417 priv->reg_aesu = priv->reg + TALITOS2_AESU;
3418 priv->reg_mdeu = priv->reg + TALITOS2_MDEU;
3419 priv->reg_afeu = priv->reg + TALITOS2_AFEU;
3420 priv->reg_rngu = priv->reg + TALITOS2_RNGU;
3421 priv->reg_pkeu = priv->reg + TALITOS2_PKEU;
3422 priv->reg_keu = priv->reg + TALITOS2_KEU;
3423 priv->reg_crcu = priv->reg + TALITOS2_CRCU;
3424 stride = TALITOS2_CH_STRIDE;
3425 }
3426
dd3c0987
LC
3427 err = talitos_probe_irq(ofdev);
3428 if (err)
3429 goto err_out;
3430
3431 if (of_device_is_compatible(np, "fsl,sec1.0")) {
9c02e285
LC
3432 if (priv->num_channels == 1)
3433 tasklet_init(&priv->done_task[0], talitos1_done_ch0,
dd3c0987 3434 (unsigned long)dev);
9c02e285
LC
3435 else
3436 tasklet_init(&priv->done_task[0], talitos1_done_4ch,
3437 (unsigned long)dev);
3438 } else {
3439 if (priv->irq[1]) {
dd3c0987
LC
3440 tasklet_init(&priv->done_task[0], talitos2_done_ch0_2,
3441 (unsigned long)dev);
3442 tasklet_init(&priv->done_task[1], talitos2_done_ch1_3,
3443 (unsigned long)dev);
9c02e285
LC
3444 } else if (priv->num_channels == 1) {
3445 tasklet_init(&priv->done_task[0], talitos2_done_ch0,
3446 (unsigned long)dev);
3447 } else {
3448 tasklet_init(&priv->done_task[0], talitos2_done_4ch,
3449 (unsigned long)dev);
dd3c0987
LC
3450 }
3451 }
3452
a86854d0
KC
3453 priv->chan = devm_kcalloc(dev,
3454 priv->num_channels,
3455 sizeof(struct talitos_channel),
3456 GFP_KERNEL);
4b992628
KP
3457 if (!priv->chan) {
3458 dev_err(dev, "failed to allocate channel management space\n");
9c4a7965
KP
3459 err = -ENOMEM;
3460 goto err_out;
3461 }
3462
f641dddd
MH
3463 priv->fifo_len = roundup_pow_of_two(priv->chfifo_len);
3464
c3e337f8 3465 for (i = 0; i < priv->num_channels; i++) {
5fa7fa14 3466 priv->chan[i].reg = priv->reg + stride * (i + 1);
2cdba3cf 3467 if (!priv->irq[1] || !(i & 1))
c3e337f8 3468 priv->chan[i].reg += TALITOS_CH_BASE_OFFSET;
ad42d5fc 3469
4b992628
KP
3470 spin_lock_init(&priv->chan[i].head_lock);
3471 spin_lock_init(&priv->chan[i].tail_lock);
9c4a7965 3472
a86854d0
KC
3473 priv->chan[i].fifo = devm_kcalloc(dev,
3474 priv->fifo_len,
3475 sizeof(struct talitos_request),
3476 GFP_KERNEL);
4b992628 3477 if (!priv->chan[i].fifo) {
9c4a7965
KP
3478 dev_err(dev, "failed to allocate request fifo %d\n", i);
3479 err = -ENOMEM;
3480 goto err_out;
3481 }
9c4a7965 3482
4b992628
KP
3483 atomic_set(&priv->chan[i].submit_count,
3484 -(priv->chfifo_len - 1));
f641dddd 3485 }
9c4a7965 3486
81eb024c
KP
3487 dma_set_mask(dev, DMA_BIT_MASK(36));
3488
9c4a7965
KP
3489 /* reset and initialize the h/w */
3490 err = init_device(dev);
3491 if (err) {
3492 dev_err(dev, "failed to initialize device\n");
3493 goto err_out;
3494 }
3495
3496 /* register the RNG, if available */
3497 if (hw_supports(dev, DESC_HDR_SEL0_RNG)) {
3498 err = talitos_register_rng(dev);
3499 if (err) {
3500 dev_err(dev, "failed to register hwrng: %d\n", err);
3501 goto err_out;
3502 } else
3503 dev_info(dev, "hwrng\n");
3504 }
3505
3506 /* register crypto algorithms the device supports */
9c4a7965
KP
3507 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3508 if (hw_supports(dev, driver_algs[i].desc_hdr_template)) {
3509 struct talitos_crypto_alg *t_alg;
aeb4c132 3510 struct crypto_alg *alg = NULL;
9c4a7965
KP
3511
3512 t_alg = talitos_alg_alloc(dev, &driver_algs[i]);
3513 if (IS_ERR(t_alg)) {
3514 err = PTR_ERR(t_alg);
0b2730d8 3515 if (err == -ENOTSUPP)
79b3a418 3516 continue;
9c4a7965
KP
3517 goto err_out;
3518 }
3519
acbf7c62
LN
3520 switch (t_alg->algt.type) {
3521 case CRYPTO_ALG_TYPE_ABLKCIPHER:
acbf7c62
LN
3522 err = crypto_register_alg(
3523 &t_alg->algt.alg.crypto);
aeb4c132 3524 alg = &t_alg->algt.alg.crypto;
acbf7c62 3525 break;
aeb4c132
HX
3526
3527 case CRYPTO_ALG_TYPE_AEAD:
3528 err = crypto_register_aead(
3529 &t_alg->algt.alg.aead);
3530 alg = &t_alg->algt.alg.aead.base;
3531 break;
3532
acbf7c62
LN
3533 case CRYPTO_ALG_TYPE_AHASH:
3534 err = crypto_register_ahash(
3535 &t_alg->algt.alg.hash);
aeb4c132 3536 alg = &t_alg->algt.alg.hash.halg.base;
acbf7c62
LN
3537 break;
3538 }
9c4a7965
KP
3539 if (err) {
3540 dev_err(dev, "%s alg registration failed\n",
aeb4c132 3541 alg->cra_driver_name);
24b92ff2 3542 devm_kfree(dev, t_alg);
991155ba 3543 } else
9c4a7965 3544 list_add_tail(&t_alg->entry, &priv->alg_list);
9c4a7965
KP
3545 }
3546 }
5b859b6e
KP
3547 if (!list_empty(&priv->alg_list))
3548 dev_info(dev, "%s algorithms registered in /proc/crypto\n",
3549 (char *)of_get_property(np, "compatible", NULL));
9c4a7965
KP
3550
3551 return 0;
3552
3553err_out:
3554 talitos_remove(ofdev);
9c4a7965
KP
3555
3556 return err;
3557}
3558
6c3f975a 3559static const struct of_device_id talitos_match[] = {
0635b7db
LC
3560#ifdef CONFIG_CRYPTO_DEV_TALITOS1
3561 {
3562 .compatible = "fsl,sec1.0",
3563 },
3564#endif
3565#ifdef CONFIG_CRYPTO_DEV_TALITOS2
9c4a7965
KP
3566 {
3567 .compatible = "fsl,sec2.0",
3568 },
0635b7db 3569#endif
9c4a7965
KP
3570 {},
3571};
3572MODULE_DEVICE_TABLE(of, talitos_match);
3573
1c48a5c9 3574static struct platform_driver talitos_driver = {
4018294b
GL
3575 .driver = {
3576 .name = "talitos",
4018294b
GL
3577 .of_match_table = talitos_match,
3578 },
9c4a7965 3579 .probe = talitos_probe,
596f1034 3580 .remove = talitos_remove,
9c4a7965
KP
3581};
3582
741e8c2d 3583module_platform_driver(talitos_driver);
9c4a7965
KP
3584
3585MODULE_LICENSE("GPL");
3586MODULE_AUTHOR("Kim Phillips <kim.phillips@freescale.com>");
3587MODULE_DESCRIPTION("Freescale integrated security engine (SEC) driver");