Merge branch 'for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/ebiederm...
[linux-2.6-block.git] / drivers / crypto / talitos.c
CommitLineData
9c4a7965
KP
1/*
2 * talitos - Freescale Integrated Security Engine (SEC) device driver
3 *
5228f0f7 4 * Copyright (c) 2008-2011 Freescale Semiconductor, Inc.
9c4a7965
KP
5 *
6 * Scatterlist Crypto API glue code copied from files with the following:
7 * Copyright (c) 2006-2007 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * Crypto algorithm registration code copied from hifn driver:
10 * 2007+ Copyright (c) Evgeniy Polyakov <johnpol@2ka.mipt.ru>
11 * All rights reserved.
12 *
13 * This program is free software; you can redistribute it and/or modify
14 * it under the terms of the GNU General Public License as published by
15 * the Free Software Foundation; either version 2 of the License, or
16 * (at your option) any later version.
17 *
18 * This program is distributed in the hope that it will be useful,
19 * but WITHOUT ANY WARRANTY; without even the implied warranty of
20 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 * GNU General Public License for more details.
22 *
23 * You should have received a copy of the GNU General Public License
24 * along with this program; if not, write to the Free Software
25 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
26 */
27
28#include <linux/kernel.h>
29#include <linux/module.h>
30#include <linux/mod_devicetable.h>
31#include <linux/device.h>
32#include <linux/interrupt.h>
33#include <linux/crypto.h>
34#include <linux/hw_random.h>
5af50730
RH
35#include <linux/of_address.h>
36#include <linux/of_irq.h>
9c4a7965
KP
37#include <linux/of_platform.h>
38#include <linux/dma-mapping.h>
39#include <linux/io.h>
40#include <linux/spinlock.h>
41#include <linux/rtnetlink.h>
5a0e3ad6 42#include <linux/slab.h>
9c4a7965
KP
43
44#include <crypto/algapi.h>
45#include <crypto/aes.h>
3952f17e 46#include <crypto/des.h>
9c4a7965 47#include <crypto/sha.h>
497f2e6b 48#include <crypto/md5.h>
e98014ab 49#include <crypto/internal/aead.h>
9c4a7965 50#include <crypto/authenc.h>
4de9d0b5 51#include <crypto/skcipher.h>
acbf7c62
LN
52#include <crypto/hash.h>
53#include <crypto/internal/hash.h>
4de9d0b5 54#include <crypto/scatterwalk.h>
9c4a7965
KP
55
56#include "talitos.h"
57
922f9dc8 58static void to_talitos_ptr(struct talitos_ptr *ptr, dma_addr_t dma_addr,
da9de146 59 unsigned int len, bool is_sec1)
81eb024c 60{
edc6bd69 61 ptr->ptr = cpu_to_be32(lower_32_bits(dma_addr));
da9de146
LC
62 if (is_sec1) {
63 ptr->len1 = cpu_to_be16(len);
64 } else {
65 ptr->len = cpu_to_be16(len);
922f9dc8 66 ptr->eptr = upper_32_bits(dma_addr);
da9de146 67 }
81eb024c
KP
68}
69
340ff60a
HG
70static void copy_talitos_ptr(struct talitos_ptr *dst_ptr,
71 struct talitos_ptr *src_ptr, bool is_sec1)
72{
73 dst_ptr->ptr = src_ptr->ptr;
922f9dc8 74 if (is_sec1) {
da9de146 75 dst_ptr->len1 = src_ptr->len1;
922f9dc8 76 } else {
da9de146
LC
77 dst_ptr->len = src_ptr->len;
78 dst_ptr->eptr = src_ptr->eptr;
922f9dc8 79 }
538caf83
LC
80}
81
922f9dc8
LC
82static unsigned short from_talitos_ptr_len(struct talitos_ptr *ptr,
83 bool is_sec1)
538caf83 84{
922f9dc8
LC
85 if (is_sec1)
86 return be16_to_cpu(ptr->len1);
87 else
88 return be16_to_cpu(ptr->len);
538caf83
LC
89}
90
b096b544
LC
91static void to_talitos_ptr_ext_set(struct talitos_ptr *ptr, u8 val,
92 bool is_sec1)
185eb79f 93{
922f9dc8 94 if (!is_sec1)
b096b544
LC
95 ptr->j_extent = val;
96}
97
98static void to_talitos_ptr_ext_or(struct talitos_ptr *ptr, u8 val, bool is_sec1)
99{
100 if (!is_sec1)
101 ptr->j_extent |= val;
185eb79f
LC
102}
103
9c4a7965
KP
104/*
105 * map virtual single (contiguous) pointer to h/w descriptor pointer
106 */
107static void map_single_talitos_ptr(struct device *dev,
edc6bd69 108 struct talitos_ptr *ptr,
42e8b0d7 109 unsigned int len, void *data,
9c4a7965
KP
110 enum dma_data_direction dir)
111{
81eb024c 112 dma_addr_t dma_addr = dma_map_single(dev, data, len, dir);
922f9dc8
LC
113 struct talitos_private *priv = dev_get_drvdata(dev);
114 bool is_sec1 = has_ftr_sec1(priv);
81eb024c 115
da9de146 116 to_talitos_ptr(ptr, dma_addr, len, is_sec1);
9c4a7965
KP
117}
118
119/*
120 * unmap bus single (contiguous) h/w descriptor pointer
121 */
122static void unmap_single_talitos_ptr(struct device *dev,
edc6bd69 123 struct talitos_ptr *ptr,
9c4a7965
KP
124 enum dma_data_direction dir)
125{
922f9dc8
LC
126 struct talitos_private *priv = dev_get_drvdata(dev);
127 bool is_sec1 = has_ftr_sec1(priv);
128
edc6bd69 129 dma_unmap_single(dev, be32_to_cpu(ptr->ptr),
922f9dc8 130 from_talitos_ptr_len(ptr, is_sec1), dir);
9c4a7965
KP
131}
132
133static int reset_channel(struct device *dev, int ch)
134{
135 struct talitos_private *priv = dev_get_drvdata(dev);
136 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987 137 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 138
dd3c0987
LC
139 if (is_sec1) {
140 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
141 TALITOS1_CCCR_LO_RESET);
9c4a7965 142
dd3c0987
LC
143 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR_LO) &
144 TALITOS1_CCCR_LO_RESET) && --timeout)
145 cpu_relax();
146 } else {
147 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
148 TALITOS2_CCCR_RESET);
149
150 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
151 TALITOS2_CCCR_RESET) && --timeout)
152 cpu_relax();
153 }
9c4a7965
KP
154
155 if (timeout == 0) {
156 dev_err(dev, "failed to reset channel %d\n", ch);
157 return -EIO;
158 }
159
81eb024c 160 /* set 36-bit addressing, done writeback enable and done IRQ enable */
ad42d5fc 161 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, TALITOS_CCCR_LO_EAE |
81eb024c 162 TALITOS_CCCR_LO_CDWE | TALITOS_CCCR_LO_CDIE);
37b5e889
LC
163 /* enable chaining descriptors */
164 if (is_sec1)
165 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
166 TALITOS_CCCR_LO_NE);
9c4a7965 167
fe5720e2
KP
168 /* and ICCR writeback, if available */
169 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
ad42d5fc 170 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
fe5720e2
KP
171 TALITOS_CCCR_LO_IWSE);
172
9c4a7965
KP
173 return 0;
174}
175
176static int reset_device(struct device *dev)
177{
178 struct talitos_private *priv = dev_get_drvdata(dev);
179 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987
LC
180 bool is_sec1 = has_ftr_sec1(priv);
181 u32 mcr = is_sec1 ? TALITOS1_MCR_SWR : TALITOS2_MCR_SWR;
9c4a7965 182
c3e337f8 183 setbits32(priv->reg + TALITOS_MCR, mcr);
9c4a7965 184
dd3c0987 185 while ((in_be32(priv->reg + TALITOS_MCR) & mcr)
9c4a7965
KP
186 && --timeout)
187 cpu_relax();
188
2cdba3cf 189 if (priv->irq[1]) {
c3e337f8
KP
190 mcr = TALITOS_MCR_RCA1 | TALITOS_MCR_RCA3;
191 setbits32(priv->reg + TALITOS_MCR, mcr);
192 }
193
9c4a7965
KP
194 if (timeout == 0) {
195 dev_err(dev, "failed to reset device\n");
196 return -EIO;
197 }
198
199 return 0;
200}
201
202/*
203 * Reset and initialize the device
204 */
205static int init_device(struct device *dev)
206{
207 struct talitos_private *priv = dev_get_drvdata(dev);
208 int ch, err;
dd3c0987 209 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965
KP
210
211 /*
212 * Master reset
213 * errata documentation: warning: certain SEC interrupts
214 * are not fully cleared by writing the MCR:SWR bit,
215 * set bit twice to completely reset
216 */
217 err = reset_device(dev);
218 if (err)
219 return err;
220
221 err = reset_device(dev);
222 if (err)
223 return err;
224
225 /* reset channels */
226 for (ch = 0; ch < priv->num_channels; ch++) {
227 err = reset_channel(dev, ch);
228 if (err)
229 return err;
230 }
231
232 /* enable channel done and error interrupts */
dd3c0987
LC
233 if (is_sec1) {
234 clrbits32(priv->reg + TALITOS_IMR, TALITOS1_IMR_INIT);
235 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT);
236 /* disable parity error check in DEU (erroneous? test vect.) */
237 setbits32(priv->reg_deu + TALITOS_EUICR, TALITOS1_DEUICR_KPE);
238 } else {
239 setbits32(priv->reg + TALITOS_IMR, TALITOS2_IMR_INIT);
240 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT);
241 }
9c4a7965 242
fe5720e2
KP
243 /* disable integrity check error interrupts (use writeback instead) */
244 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
5fa7fa14 245 setbits32(priv->reg_mdeu + TALITOS_EUICR_LO,
fe5720e2
KP
246 TALITOS_MDEUICR_LO_ICE);
247
9c4a7965
KP
248 return 0;
249}
250
251/**
252 * talitos_submit - submits a descriptor to the device for processing
253 * @dev: the SEC device to be used
5228f0f7 254 * @ch: the SEC device channel to be used
9c4a7965
KP
255 * @desc: the descriptor to be processed by the device
256 * @callback: whom to call when processing is complete
257 * @context: a handle for use by caller (optional)
258 *
259 * desc must contain valid dma-mapped (bus physical) address pointers.
260 * callback must check err and feedback in descriptor header
261 * for device processing status.
262 */
865d5061
HG
263int talitos_submit(struct device *dev, int ch, struct talitos_desc *desc,
264 void (*callback)(struct device *dev,
265 struct talitos_desc *desc,
266 void *context, int error),
267 void *context)
9c4a7965
KP
268{
269 struct talitos_private *priv = dev_get_drvdata(dev);
270 struct talitos_request *request;
5228f0f7 271 unsigned long flags;
9c4a7965 272 int head;
7d607c6a 273 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 274
4b992628 275 spin_lock_irqsave(&priv->chan[ch].head_lock, flags);
9c4a7965 276
4b992628 277 if (!atomic_inc_not_zero(&priv->chan[ch].submit_count)) {
ec6644d6 278 /* h/w fifo is full */
4b992628 279 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
9c4a7965
KP
280 return -EAGAIN;
281 }
282
4b992628
KP
283 head = priv->chan[ch].head;
284 request = &priv->chan[ch].fifo[head];
ec6644d6 285
9c4a7965 286 /* map descriptor and save caller data */
7d607c6a
LC
287 if (is_sec1) {
288 desc->hdr1 = desc->hdr;
7d607c6a
LC
289 request->dma_desc = dma_map_single(dev, &desc->hdr1,
290 TALITOS_DESC_SIZE,
291 DMA_BIDIRECTIONAL);
292 } else {
293 request->dma_desc = dma_map_single(dev, desc,
294 TALITOS_DESC_SIZE,
295 DMA_BIDIRECTIONAL);
296 }
9c4a7965
KP
297 request->callback = callback;
298 request->context = context;
299
300 /* increment fifo head */
4b992628 301 priv->chan[ch].head = (priv->chan[ch].head + 1) & (priv->fifo_len - 1);
9c4a7965
KP
302
303 smp_wmb();
304 request->desc = desc;
305
306 /* GO! */
307 wmb();
ad42d5fc
KP
308 out_be32(priv->chan[ch].reg + TALITOS_FF,
309 upper_32_bits(request->dma_desc));
310 out_be32(priv->chan[ch].reg + TALITOS_FF_LO,
a752447a 311 lower_32_bits(request->dma_desc));
9c4a7965 312
4b992628 313 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
9c4a7965
KP
314
315 return -EINPROGRESS;
316}
865d5061 317EXPORT_SYMBOL(talitos_submit);
9c4a7965
KP
318
319/*
320 * process what was done, notify callback of error if not
321 */
322static void flush_channel(struct device *dev, int ch, int error, int reset_ch)
323{
324 struct talitos_private *priv = dev_get_drvdata(dev);
325 struct talitos_request *request, saved_req;
326 unsigned long flags;
327 int tail, status;
7d607c6a 328 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 329
4b992628 330 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
9c4a7965 331
4b992628
KP
332 tail = priv->chan[ch].tail;
333 while (priv->chan[ch].fifo[tail].desc) {
7d607c6a
LC
334 __be32 hdr;
335
4b992628 336 request = &priv->chan[ch].fifo[tail];
9c4a7965
KP
337
338 /* descriptors with their done bits set don't get the error */
339 rmb();
37b5e889
LC
340 if (!is_sec1)
341 hdr = request->desc->hdr;
342 else if (request->desc->next_desc)
343 hdr = (request->desc + 1)->hdr1;
344 else
345 hdr = request->desc->hdr1;
7d607c6a
LC
346
347 if ((hdr & DESC_HDR_DONE) == DESC_HDR_DONE)
9c4a7965 348 status = 0;
ca38a814 349 else
9c4a7965
KP
350 if (!error)
351 break;
352 else
353 status = error;
354
355 dma_unmap_single(dev, request->dma_desc,
7d607c6a 356 TALITOS_DESC_SIZE,
e938e465 357 DMA_BIDIRECTIONAL);
9c4a7965
KP
358
359 /* copy entries so we can call callback outside lock */
360 saved_req.desc = request->desc;
361 saved_req.callback = request->callback;
362 saved_req.context = request->context;
363
364 /* release request entry in fifo */
365 smp_wmb();
366 request->desc = NULL;
367
368 /* increment fifo tail */
4b992628 369 priv->chan[ch].tail = (tail + 1) & (priv->fifo_len - 1);
9c4a7965 370
4b992628 371 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
ec6644d6 372
4b992628 373 atomic_dec(&priv->chan[ch].submit_count);
ec6644d6 374
9c4a7965
KP
375 saved_req.callback(dev, saved_req.desc, saved_req.context,
376 status);
377 /* channel may resume processing in single desc error case */
378 if (error && !reset_ch && status == error)
379 return;
4b992628
KP
380 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
381 tail = priv->chan[ch].tail;
9c4a7965
KP
382 }
383
4b992628 384 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
9c4a7965
KP
385}
386
387/*
388 * process completed requests for channels that have done status
389 */
dd3c0987
LC
390#define DEF_TALITOS1_DONE(name, ch_done_mask) \
391static void talitos1_done_##name(unsigned long data) \
392{ \
393 struct device *dev = (struct device *)data; \
394 struct talitos_private *priv = dev_get_drvdata(dev); \
395 unsigned long flags; \
396 \
397 if (ch_done_mask & 0x10000000) \
398 flush_channel(dev, 0, 0, 0); \
dd3c0987
LC
399 if (ch_done_mask & 0x40000000) \
400 flush_channel(dev, 1, 0, 0); \
401 if (ch_done_mask & 0x00010000) \
402 flush_channel(dev, 2, 0, 0); \
403 if (ch_done_mask & 0x00040000) \
404 flush_channel(dev, 3, 0, 0); \
405 \
dd3c0987
LC
406 /* At this point, all completed channels have been processed */ \
407 /* Unmask done interrupts for channels completed later on. */ \
408 spin_lock_irqsave(&priv->reg_lock, flags); \
409 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
410 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT); \
411 spin_unlock_irqrestore(&priv->reg_lock, flags); \
412}
413
414DEF_TALITOS1_DONE(4ch, TALITOS1_ISR_4CHDONE)
9c02e285 415DEF_TALITOS1_DONE(ch0, TALITOS1_ISR_CH_0_DONE)
dd3c0987
LC
416
417#define DEF_TALITOS2_DONE(name, ch_done_mask) \
418static void talitos2_done_##name(unsigned long data) \
c3e337f8
KP
419{ \
420 struct device *dev = (struct device *)data; \
421 struct talitos_private *priv = dev_get_drvdata(dev); \
511d63cb 422 unsigned long flags; \
c3e337f8
KP
423 \
424 if (ch_done_mask & 1) \
425 flush_channel(dev, 0, 0, 0); \
c3e337f8
KP
426 if (ch_done_mask & (1 << 2)) \
427 flush_channel(dev, 1, 0, 0); \
428 if (ch_done_mask & (1 << 4)) \
429 flush_channel(dev, 2, 0, 0); \
430 if (ch_done_mask & (1 << 6)) \
431 flush_channel(dev, 3, 0, 0); \
432 \
c3e337f8
KP
433 /* At this point, all completed channels have been processed */ \
434 /* Unmask done interrupts for channels completed later on. */ \
511d63cb 435 spin_lock_irqsave(&priv->reg_lock, flags); \
c3e337f8 436 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
dd3c0987 437 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT); \
511d63cb 438 spin_unlock_irqrestore(&priv->reg_lock, flags); \
9c4a7965 439}
dd3c0987
LC
440
441DEF_TALITOS2_DONE(4ch, TALITOS2_ISR_4CHDONE)
9c02e285 442DEF_TALITOS2_DONE(ch0, TALITOS2_ISR_CH_0_DONE)
dd3c0987
LC
443DEF_TALITOS2_DONE(ch0_2, TALITOS2_ISR_CH_0_2_DONE)
444DEF_TALITOS2_DONE(ch1_3, TALITOS2_ISR_CH_1_3_DONE)
9c4a7965
KP
445
446/*
447 * locate current (offending) descriptor
448 */
3e721aeb 449static u32 current_desc_hdr(struct device *dev, int ch)
9c4a7965
KP
450{
451 struct talitos_private *priv = dev_get_drvdata(dev);
b62ffd8c 452 int tail, iter;
9c4a7965
KP
453 dma_addr_t cur_desc;
454
b62ffd8c
HG
455 cur_desc = ((u64)in_be32(priv->chan[ch].reg + TALITOS_CDPR)) << 32;
456 cur_desc |= in_be32(priv->chan[ch].reg + TALITOS_CDPR_LO);
9c4a7965 457
b62ffd8c
HG
458 if (!cur_desc) {
459 dev_err(dev, "CDPR is NULL, giving up search for offending descriptor\n");
460 return 0;
461 }
462
463 tail = priv->chan[ch].tail;
464
465 iter = tail;
37b5e889
LC
466 while (priv->chan[ch].fifo[iter].dma_desc != cur_desc &&
467 priv->chan[ch].fifo[iter].desc->next_desc != cur_desc) {
b62ffd8c
HG
468 iter = (iter + 1) & (priv->fifo_len - 1);
469 if (iter == tail) {
9c4a7965 470 dev_err(dev, "couldn't locate current descriptor\n");
3e721aeb 471 return 0;
9c4a7965
KP
472 }
473 }
474
37b5e889
LC
475 if (priv->chan[ch].fifo[iter].desc->next_desc == cur_desc)
476 return (priv->chan[ch].fifo[iter].desc + 1)->hdr;
477
b62ffd8c 478 return priv->chan[ch].fifo[iter].desc->hdr;
9c4a7965
KP
479}
480
481/*
482 * user diagnostics; report root cause of error based on execution unit status
483 */
3e721aeb 484static void report_eu_error(struct device *dev, int ch, u32 desc_hdr)
9c4a7965
KP
485{
486 struct talitos_private *priv = dev_get_drvdata(dev);
487 int i;
488
3e721aeb 489 if (!desc_hdr)
ad42d5fc 490 desc_hdr = in_be32(priv->chan[ch].reg + TALITOS_DESCBUF);
3e721aeb
KP
491
492 switch (desc_hdr & DESC_HDR_SEL0_MASK) {
9c4a7965
KP
493 case DESC_HDR_SEL0_AFEU:
494 dev_err(dev, "AFEUISR 0x%08x_%08x\n",
5fa7fa14
LC
495 in_be32(priv->reg_afeu + TALITOS_EUISR),
496 in_be32(priv->reg_afeu + TALITOS_EUISR_LO));
9c4a7965
KP
497 break;
498 case DESC_HDR_SEL0_DEU:
499 dev_err(dev, "DEUISR 0x%08x_%08x\n",
5fa7fa14
LC
500 in_be32(priv->reg_deu + TALITOS_EUISR),
501 in_be32(priv->reg_deu + TALITOS_EUISR_LO));
9c4a7965
KP
502 break;
503 case DESC_HDR_SEL0_MDEUA:
504 case DESC_HDR_SEL0_MDEUB:
505 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
5fa7fa14
LC
506 in_be32(priv->reg_mdeu + TALITOS_EUISR),
507 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
9c4a7965
KP
508 break;
509 case DESC_HDR_SEL0_RNG:
510 dev_err(dev, "RNGUISR 0x%08x_%08x\n",
5fa7fa14
LC
511 in_be32(priv->reg_rngu + TALITOS_ISR),
512 in_be32(priv->reg_rngu + TALITOS_ISR_LO));
9c4a7965
KP
513 break;
514 case DESC_HDR_SEL0_PKEU:
515 dev_err(dev, "PKEUISR 0x%08x_%08x\n",
5fa7fa14
LC
516 in_be32(priv->reg_pkeu + TALITOS_EUISR),
517 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
9c4a7965
KP
518 break;
519 case DESC_HDR_SEL0_AESU:
520 dev_err(dev, "AESUISR 0x%08x_%08x\n",
5fa7fa14
LC
521 in_be32(priv->reg_aesu + TALITOS_EUISR),
522 in_be32(priv->reg_aesu + TALITOS_EUISR_LO));
9c4a7965
KP
523 break;
524 case DESC_HDR_SEL0_CRCU:
525 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
5fa7fa14
LC
526 in_be32(priv->reg_crcu + TALITOS_EUISR),
527 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
9c4a7965
KP
528 break;
529 case DESC_HDR_SEL0_KEU:
530 dev_err(dev, "KEUISR 0x%08x_%08x\n",
5fa7fa14
LC
531 in_be32(priv->reg_pkeu + TALITOS_EUISR),
532 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
9c4a7965
KP
533 break;
534 }
535
3e721aeb 536 switch (desc_hdr & DESC_HDR_SEL1_MASK) {
9c4a7965
KP
537 case DESC_HDR_SEL1_MDEUA:
538 case DESC_HDR_SEL1_MDEUB:
539 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
5fa7fa14
LC
540 in_be32(priv->reg_mdeu + TALITOS_EUISR),
541 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
9c4a7965
KP
542 break;
543 case DESC_HDR_SEL1_CRCU:
544 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
5fa7fa14
LC
545 in_be32(priv->reg_crcu + TALITOS_EUISR),
546 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
9c4a7965
KP
547 break;
548 }
549
550 for (i = 0; i < 8; i++)
551 dev_err(dev, "DESCBUF 0x%08x_%08x\n",
ad42d5fc
KP
552 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF + 8*i),
553 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF_LO + 8*i));
9c4a7965
KP
554}
555
556/*
557 * recover from error interrupts
558 */
5e718a09 559static void talitos_error(struct device *dev, u32 isr, u32 isr_lo)
9c4a7965 560{
9c4a7965
KP
561 struct talitos_private *priv = dev_get_drvdata(dev);
562 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987 563 int ch, error, reset_dev = 0;
42e8b0d7 564 u32 v_lo;
dd3c0987
LC
565 bool is_sec1 = has_ftr_sec1(priv);
566 int reset_ch = is_sec1 ? 1 : 0; /* only SEC2 supports continuation */
9c4a7965
KP
567
568 for (ch = 0; ch < priv->num_channels; ch++) {
569 /* skip channels without errors */
dd3c0987
LC
570 if (is_sec1) {
571 /* bits 29, 31, 17, 19 */
572 if (!(isr & (1 << (29 + (ch & 1) * 2 - (ch & 2) * 6))))
573 continue;
574 } else {
575 if (!(isr & (1 << (ch * 2 + 1))))
576 continue;
577 }
9c4a7965
KP
578
579 error = -EINVAL;
580
ad42d5fc 581 v_lo = in_be32(priv->chan[ch].reg + TALITOS_CCPSR_LO);
9c4a7965
KP
582
583 if (v_lo & TALITOS_CCPSR_LO_DOF) {
584 dev_err(dev, "double fetch fifo overflow error\n");
585 error = -EAGAIN;
586 reset_ch = 1;
587 }
588 if (v_lo & TALITOS_CCPSR_LO_SOF) {
589 /* h/w dropped descriptor */
590 dev_err(dev, "single fetch fifo overflow error\n");
591 error = -EAGAIN;
592 }
593 if (v_lo & TALITOS_CCPSR_LO_MDTE)
594 dev_err(dev, "master data transfer error\n");
595 if (v_lo & TALITOS_CCPSR_LO_SGDLZ)
4d9b3a5b 596 dev_err(dev, is_sec1 ? "pointer not complete error\n"
dd3c0987 597 : "s/g data length zero error\n");
9c4a7965 598 if (v_lo & TALITOS_CCPSR_LO_FPZ)
dd3c0987
LC
599 dev_err(dev, is_sec1 ? "parity error\n"
600 : "fetch pointer zero error\n");
9c4a7965
KP
601 if (v_lo & TALITOS_CCPSR_LO_IDH)
602 dev_err(dev, "illegal descriptor header error\n");
603 if (v_lo & TALITOS_CCPSR_LO_IEU)
dd3c0987
LC
604 dev_err(dev, is_sec1 ? "static assignment error\n"
605 : "invalid exec unit error\n");
9c4a7965 606 if (v_lo & TALITOS_CCPSR_LO_EU)
3e721aeb 607 report_eu_error(dev, ch, current_desc_hdr(dev, ch));
dd3c0987
LC
608 if (!is_sec1) {
609 if (v_lo & TALITOS_CCPSR_LO_GB)
610 dev_err(dev, "gather boundary error\n");
611 if (v_lo & TALITOS_CCPSR_LO_GRL)
612 dev_err(dev, "gather return/length error\n");
613 if (v_lo & TALITOS_CCPSR_LO_SB)
614 dev_err(dev, "scatter boundary error\n");
615 if (v_lo & TALITOS_CCPSR_LO_SRL)
616 dev_err(dev, "scatter return/length error\n");
617 }
9c4a7965
KP
618
619 flush_channel(dev, ch, error, reset_ch);
620
621 if (reset_ch) {
622 reset_channel(dev, ch);
623 } else {
ad42d5fc 624 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
dd3c0987 625 TALITOS2_CCCR_CONT);
ad42d5fc
KP
626 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, 0);
627 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
dd3c0987 628 TALITOS2_CCCR_CONT) && --timeout)
9c4a7965
KP
629 cpu_relax();
630 if (timeout == 0) {
631 dev_err(dev, "failed to restart channel %d\n",
632 ch);
633 reset_dev = 1;
634 }
635 }
636 }
dd3c0987
LC
637 if (reset_dev || (is_sec1 && isr & ~TALITOS1_ISR_4CHERR) ||
638 (!is_sec1 && isr & ~TALITOS2_ISR_4CHERR) || isr_lo) {
639 if (is_sec1 && (isr_lo & TALITOS1_ISR_TEA_ERR))
640 dev_err(dev, "TEA error: ISR 0x%08x_%08x\n",
641 isr, isr_lo);
642 else
643 dev_err(dev, "done overflow, internal time out, or "
644 "rngu error: ISR 0x%08x_%08x\n", isr, isr_lo);
9c4a7965
KP
645
646 /* purge request queues */
647 for (ch = 0; ch < priv->num_channels; ch++)
648 flush_channel(dev, ch, -EIO, 1);
649
650 /* reset and reinitialize the device */
651 init_device(dev);
652 }
653}
654
dd3c0987
LC
655#define DEF_TALITOS1_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
656static irqreturn_t talitos1_interrupt_##name(int irq, void *data) \
657{ \
658 struct device *dev = data; \
659 struct talitos_private *priv = dev_get_drvdata(dev); \
660 u32 isr, isr_lo; \
661 unsigned long flags; \
662 \
663 spin_lock_irqsave(&priv->reg_lock, flags); \
664 isr = in_be32(priv->reg + TALITOS_ISR); \
665 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
666 /* Acknowledge interrupt */ \
667 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
668 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
669 \
670 if (unlikely(isr & ch_err_mask || isr_lo & TALITOS1_IMR_LO_INIT)) { \
671 spin_unlock_irqrestore(&priv->reg_lock, flags); \
672 talitos_error(dev, isr & ch_err_mask, isr_lo); \
673 } \
674 else { \
675 if (likely(isr & ch_done_mask)) { \
676 /* mask further done interrupts. */ \
677 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
678 /* done_task will unmask done interrupts at exit */ \
679 tasklet_schedule(&priv->done_task[tlet]); \
680 } \
681 spin_unlock_irqrestore(&priv->reg_lock, flags); \
682 } \
683 \
684 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
685 IRQ_NONE; \
686}
687
688DEF_TALITOS1_INTERRUPT(4ch, TALITOS1_ISR_4CHDONE, TALITOS1_ISR_4CHERR, 0)
689
690#define DEF_TALITOS2_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
691static irqreturn_t talitos2_interrupt_##name(int irq, void *data) \
c3e337f8
KP
692{ \
693 struct device *dev = data; \
694 struct talitos_private *priv = dev_get_drvdata(dev); \
695 u32 isr, isr_lo; \
511d63cb 696 unsigned long flags; \
c3e337f8 697 \
511d63cb 698 spin_lock_irqsave(&priv->reg_lock, flags); \
c3e337f8
KP
699 isr = in_be32(priv->reg + TALITOS_ISR); \
700 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
701 /* Acknowledge interrupt */ \
702 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
703 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
704 \
511d63cb
HG
705 if (unlikely(isr & ch_err_mask || isr_lo)) { \
706 spin_unlock_irqrestore(&priv->reg_lock, flags); \
707 talitos_error(dev, isr & ch_err_mask, isr_lo); \
708 } \
709 else { \
c3e337f8
KP
710 if (likely(isr & ch_done_mask)) { \
711 /* mask further done interrupts. */ \
712 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
713 /* done_task will unmask done interrupts at exit */ \
714 tasklet_schedule(&priv->done_task[tlet]); \
715 } \
511d63cb
HG
716 spin_unlock_irqrestore(&priv->reg_lock, flags); \
717 } \
c3e337f8
KP
718 \
719 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
720 IRQ_NONE; \
9c4a7965 721}
dd3c0987
LC
722
723DEF_TALITOS2_INTERRUPT(4ch, TALITOS2_ISR_4CHDONE, TALITOS2_ISR_4CHERR, 0)
724DEF_TALITOS2_INTERRUPT(ch0_2, TALITOS2_ISR_CH_0_2_DONE, TALITOS2_ISR_CH_0_2_ERR,
725 0)
726DEF_TALITOS2_INTERRUPT(ch1_3, TALITOS2_ISR_CH_1_3_DONE, TALITOS2_ISR_CH_1_3_ERR,
727 1)
9c4a7965
KP
728
729/*
730 * hwrng
731 */
732static int talitos_rng_data_present(struct hwrng *rng, int wait)
733{
734 struct device *dev = (struct device *)rng->priv;
735 struct talitos_private *priv = dev_get_drvdata(dev);
736 u32 ofl;
737 int i;
738
739 for (i = 0; i < 20; i++) {
5fa7fa14 740 ofl = in_be32(priv->reg_rngu + TALITOS_EUSR_LO) &
9c4a7965
KP
741 TALITOS_RNGUSR_LO_OFL;
742 if (ofl || !wait)
743 break;
744 udelay(10);
745 }
746
747 return !!ofl;
748}
749
750static int talitos_rng_data_read(struct hwrng *rng, u32 *data)
751{
752 struct device *dev = (struct device *)rng->priv;
753 struct talitos_private *priv = dev_get_drvdata(dev);
754
755 /* rng fifo requires 64-bit accesses */
5fa7fa14
LC
756 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO);
757 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO_LO);
9c4a7965
KP
758
759 return sizeof(u32);
760}
761
762static int talitos_rng_init(struct hwrng *rng)
763{
764 struct device *dev = (struct device *)rng->priv;
765 struct talitos_private *priv = dev_get_drvdata(dev);
766 unsigned int timeout = TALITOS_TIMEOUT;
767
5fa7fa14
LC
768 setbits32(priv->reg_rngu + TALITOS_EURCR_LO, TALITOS_RNGURCR_LO_SR);
769 while (!(in_be32(priv->reg_rngu + TALITOS_EUSR_LO)
770 & TALITOS_RNGUSR_LO_RD)
9c4a7965
KP
771 && --timeout)
772 cpu_relax();
773 if (timeout == 0) {
774 dev_err(dev, "failed to reset rng hw\n");
775 return -ENODEV;
776 }
777
778 /* start generating */
5fa7fa14 779 setbits32(priv->reg_rngu + TALITOS_EUDSR_LO, 0);
9c4a7965
KP
780
781 return 0;
782}
783
784static int talitos_register_rng(struct device *dev)
785{
786 struct talitos_private *priv = dev_get_drvdata(dev);
35a3bb3d 787 int err;
9c4a7965
KP
788
789 priv->rng.name = dev_driver_string(dev),
790 priv->rng.init = talitos_rng_init,
791 priv->rng.data_present = talitos_rng_data_present,
792 priv->rng.data_read = talitos_rng_data_read,
793 priv->rng.priv = (unsigned long)dev;
794
35a3bb3d
AS
795 err = hwrng_register(&priv->rng);
796 if (!err)
797 priv->rng_registered = true;
798
799 return err;
9c4a7965
KP
800}
801
802static void talitos_unregister_rng(struct device *dev)
803{
804 struct talitos_private *priv = dev_get_drvdata(dev);
805
35a3bb3d
AS
806 if (!priv->rng_registered)
807 return;
808
9c4a7965 809 hwrng_unregister(&priv->rng);
35a3bb3d 810 priv->rng_registered = false;
9c4a7965
KP
811}
812
813/*
814 * crypto alg
815 */
816#define TALITOS_CRA_PRIORITY 3000
7405c8d7
LC
817/*
818 * Defines a priority for doing AEAD with descriptors type
819 * HMAC_SNOOP_NO_AFEA (HSNA) instead of type IPSEC_ESP
820 */
821#define TALITOS_CRA_PRIORITY_AEAD_HSNA (TALITOS_CRA_PRIORITY - 1)
03d2c511 822#define TALITOS_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + SHA512_BLOCK_SIZE)
3952f17e 823#define TALITOS_MAX_IV_LENGTH 16 /* max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */
70bcaca7 824
9c4a7965
KP
825struct talitos_ctx {
826 struct device *dev;
5228f0f7 827 int ch;
9c4a7965
KP
828 __be32 desc_hdr_template;
829 u8 key[TALITOS_MAX_KEY_SIZE];
70bcaca7 830 u8 iv[TALITOS_MAX_IV_LENGTH];
2e13ce08 831 dma_addr_t dma_key;
9c4a7965
KP
832 unsigned int keylen;
833 unsigned int enckeylen;
834 unsigned int authkeylen;
37b5e889 835 dma_addr_t dma_buf;
49f9783b 836 dma_addr_t dma_hw_context;
9c4a7965
KP
837};
838
497f2e6b
LN
839#define HASH_MAX_BLOCK_SIZE SHA512_BLOCK_SIZE
840#define TALITOS_MDEU_MAX_CONTEXT_SIZE TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512
841
842struct talitos_ahash_req_ctx {
60f208d7 843 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
497f2e6b 844 unsigned int hw_context_size;
3c0dd190
LC
845 u8 buf[2][HASH_MAX_BLOCK_SIZE];
846 int buf_idx;
60f208d7 847 unsigned int swinit;
497f2e6b
LN
848 unsigned int first;
849 unsigned int last;
850 unsigned int to_hash_later;
42e8b0d7 851 unsigned int nbuf;
497f2e6b
LN
852 struct scatterlist bufsl[2];
853 struct scatterlist *psrc;
854};
855
3639ca84
HG
856struct talitos_export_state {
857 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
858 u8 buf[HASH_MAX_BLOCK_SIZE];
859 unsigned int swinit;
860 unsigned int first;
861 unsigned int last;
862 unsigned int to_hash_later;
863 unsigned int nbuf;
864};
865
56af8cd4
LN
866static int aead_setkey(struct crypto_aead *authenc,
867 const u8 *key, unsigned int keylen)
9c4a7965
KP
868{
869 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
2e13ce08 870 struct device *dev = ctx->dev;
c306a98d 871 struct crypto_authenc_keys keys;
9c4a7965 872
c306a98d 873 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
9c4a7965
KP
874 goto badkey;
875
c306a98d 876 if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
9c4a7965
KP
877 goto badkey;
878
2e13ce08
LC
879 if (ctx->keylen)
880 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
881
c306a98d
MK
882 memcpy(ctx->key, keys.authkey, keys.authkeylen);
883 memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
9c4a7965 884
c306a98d
MK
885 ctx->keylen = keys.authkeylen + keys.enckeylen;
886 ctx->enckeylen = keys.enckeylen;
887 ctx->authkeylen = keys.authkeylen;
2e13ce08
LC
888 ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
889 DMA_TO_DEVICE);
9c4a7965
KP
890
891 return 0;
892
893badkey:
894 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
895 return -EINVAL;
896}
897
898/*
56af8cd4 899 * talitos_edesc - s/w-extended descriptor
9c4a7965
KP
900 * @src_nents: number of segments in input scatterlist
901 * @dst_nents: number of segments in output scatterlist
aeb4c132 902 * @icv_ool: whether ICV is out-of-line
79fd31d3 903 * @iv_dma: dma address of iv for checking continuity and link table
9c4a7965 904 * @dma_len: length of dma mapped link_tbl space
6f65f6ac 905 * @dma_link_tbl: bus physical address of link_tbl/buf
9c4a7965 906 * @desc: h/w descriptor
6f65f6ac
LC
907 * @link_tbl: input and output h/w link tables (if {src,dst}_nents > 1) (SEC2)
908 * @buf: input and output buffeur (if {src,dst}_nents > 1) (SEC1)
9c4a7965
KP
909 *
910 * if decrypting (with authcheck), or either one of src_nents or dst_nents
911 * is greater than 1, an integrity check value is concatenated to the end
912 * of link_tbl data
913 */
56af8cd4 914struct talitos_edesc {
9c4a7965
KP
915 int src_nents;
916 int dst_nents;
aeb4c132 917 bool icv_ool;
79fd31d3 918 dma_addr_t iv_dma;
9c4a7965
KP
919 int dma_len;
920 dma_addr_t dma_link_tbl;
921 struct talitos_desc desc;
6f65f6ac
LC
922 union {
923 struct talitos_ptr link_tbl[0];
924 u8 buf[0];
925 };
9c4a7965
KP
926};
927
4de9d0b5
LN
928static void talitos_sg_unmap(struct device *dev,
929 struct talitos_edesc *edesc,
930 struct scatterlist *src,
6a1e8d14
LC
931 struct scatterlist *dst,
932 unsigned int len, unsigned int offset)
4de9d0b5 933{
6a1e8d14
LC
934 struct talitos_private *priv = dev_get_drvdata(dev);
935 bool is_sec1 = has_ftr_sec1(priv);
4de9d0b5
LN
936 unsigned int src_nents = edesc->src_nents ? : 1;
937 unsigned int dst_nents = edesc->dst_nents ? : 1;
938
6a1e8d14
LC
939 if (is_sec1 && dst && dst_nents > 1) {
940 dma_sync_single_for_device(dev, edesc->dma_link_tbl + offset,
941 len, DMA_FROM_DEVICE);
942 sg_pcopy_from_buffer(dst, dst_nents, edesc->buf + offset, len,
943 offset);
944 }
4de9d0b5 945 if (src != dst) {
6a1e8d14
LC
946 if (src_nents == 1 || !is_sec1)
947 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
4de9d0b5 948
6a1e8d14 949 if (dst && (dst_nents == 1 || !is_sec1))
b8a011d4 950 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
6a1e8d14 951 } else if (src_nents == 1 || !is_sec1) {
b8a011d4 952 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
246a87cd
LC
953 }
954}
955
9c4a7965 956static void ipsec_esp_unmap(struct device *dev,
56af8cd4 957 struct talitos_edesc *edesc,
9c4a7965
KP
958 struct aead_request *areq)
959{
549bd8bc
LC
960 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
961 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
962 unsigned int ivsize = crypto_aead_ivsize(aead);
9a655608
LC
963 bool is_ipsec_esp = edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP;
964 struct talitos_ptr *civ_ptr = &edesc->desc.ptr[is_ipsec_esp ? 2 : 3];
549bd8bc 965
9a655608 966 if (is_ipsec_esp)
549bd8bc
LC
967 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[6],
968 DMA_FROM_DEVICE);
9a655608 969 unmap_single_talitos_ptr(dev, civ_ptr, DMA_TO_DEVICE);
9c4a7965 970
6a1e8d14
LC
971 talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->cryptlen,
972 areq->assoclen);
9c4a7965
KP
973
974 if (edesc->dma_len)
975 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
976 DMA_BIDIRECTIONAL);
549bd8bc 977
9a655608 978 if (!is_ipsec_esp) {
549bd8bc
LC
979 unsigned int dst_nents = edesc->dst_nents ? : 1;
980
981 sg_pcopy_to_buffer(areq->dst, dst_nents, ctx->iv, ivsize,
982 areq->assoclen + areq->cryptlen - ivsize);
983 }
9c4a7965
KP
984}
985
986/*
987 * ipsec_esp descriptor callbacks
988 */
989static void ipsec_esp_encrypt_done(struct device *dev,
990 struct talitos_desc *desc, void *context,
991 int err)
992{
549bd8bc
LC
993 struct talitos_private *priv = dev_get_drvdata(dev);
994 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 995 struct aead_request *areq = context;
9c4a7965 996 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
aeb4c132 997 unsigned int authsize = crypto_aead_authsize(authenc);
2e13ce08 998 unsigned int ivsize = crypto_aead_ivsize(authenc);
19bbbc63 999 struct talitos_edesc *edesc;
9c4a7965
KP
1000 struct scatterlist *sg;
1001 void *icvdata;
1002
19bbbc63
KP
1003 edesc = container_of(desc, struct talitos_edesc, desc);
1004
9c4a7965
KP
1005 ipsec_esp_unmap(dev, edesc, areq);
1006
1007 /* copy the generated ICV to dst */
aeb4c132 1008 if (edesc->icv_ool) {
549bd8bc
LC
1009 if (is_sec1)
1010 icvdata = edesc->buf + areq->assoclen + areq->cryptlen;
1011 else
1012 icvdata = &edesc->link_tbl[edesc->src_nents +
1013 edesc->dst_nents + 2];
9c4a7965 1014 sg = sg_last(areq->dst, edesc->dst_nents);
aeb4c132
HX
1015 memcpy((char *)sg_virt(sg) + sg->length - authsize,
1016 icvdata, authsize);
9c4a7965
KP
1017 }
1018
2e13ce08
LC
1019 dma_unmap_single(dev, edesc->iv_dma, ivsize, DMA_TO_DEVICE);
1020
9c4a7965
KP
1021 kfree(edesc);
1022
1023 aead_request_complete(areq, err);
1024}
1025
fe5720e2 1026static void ipsec_esp_decrypt_swauth_done(struct device *dev,
e938e465
KP
1027 struct talitos_desc *desc,
1028 void *context, int err)
9c4a7965
KP
1029{
1030 struct aead_request *req = context;
9c4a7965 1031 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
aeb4c132 1032 unsigned int authsize = crypto_aead_authsize(authenc);
19bbbc63 1033 struct talitos_edesc *edesc;
9c4a7965 1034 struct scatterlist *sg;
aeb4c132 1035 char *oicv, *icv;
549bd8bc
LC
1036 struct talitos_private *priv = dev_get_drvdata(dev);
1037 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 1038
19bbbc63
KP
1039 edesc = container_of(desc, struct talitos_edesc, desc);
1040
9c4a7965
KP
1041 ipsec_esp_unmap(dev, edesc, req);
1042
1043 if (!err) {
1044 /* auth check */
9c4a7965 1045 sg = sg_last(req->dst, edesc->dst_nents ? : 1);
aeb4c132
HX
1046 icv = (char *)sg_virt(sg) + sg->length - authsize;
1047
1048 if (edesc->dma_len) {
549bd8bc
LC
1049 if (is_sec1)
1050 oicv = (char *)&edesc->dma_link_tbl +
1051 req->assoclen + req->cryptlen;
1052 else
1053 oicv = (char *)
1054 &edesc->link_tbl[edesc->src_nents +
aeb4c132
HX
1055 edesc->dst_nents + 2];
1056 if (edesc->icv_ool)
1057 icv = oicv + authsize;
1058 } else
1059 oicv = (char *)&edesc->link_tbl[0];
1060
79960943 1061 err = crypto_memneq(oicv, icv, authsize) ? -EBADMSG : 0;
9c4a7965
KP
1062 }
1063
1064 kfree(edesc);
1065
1066 aead_request_complete(req, err);
1067}
1068
fe5720e2 1069static void ipsec_esp_decrypt_hwauth_done(struct device *dev,
e938e465
KP
1070 struct talitos_desc *desc,
1071 void *context, int err)
fe5720e2
KP
1072{
1073 struct aead_request *req = context;
19bbbc63
KP
1074 struct talitos_edesc *edesc;
1075
1076 edesc = container_of(desc, struct talitos_edesc, desc);
fe5720e2
KP
1077
1078 ipsec_esp_unmap(dev, edesc, req);
1079
1080 /* check ICV auth status */
e938e465
KP
1081 if (!err && ((desc->hdr_lo & DESC_HDR_LO_ICCR1_MASK) !=
1082 DESC_HDR_LO_ICCR1_PASS))
1083 err = -EBADMSG;
fe5720e2
KP
1084
1085 kfree(edesc);
1086
1087 aead_request_complete(req, err);
1088}
1089
9c4a7965
KP
1090/*
1091 * convert scatterlist to SEC h/w link table format
1092 * stop at cryptlen bytes
1093 */
aeb4c132
HX
1094static int sg_to_link_tbl_offset(struct scatterlist *sg, int sg_count,
1095 unsigned int offset, int cryptlen,
1096 struct talitos_ptr *link_tbl_ptr)
9c4a7965 1097{
70bcaca7 1098 int n_sg = sg_count;
aeb4c132 1099 int count = 0;
70bcaca7 1100
aeb4c132
HX
1101 while (cryptlen && sg && n_sg--) {
1102 unsigned int len = sg_dma_len(sg);
9c4a7965 1103
aeb4c132
HX
1104 if (offset >= len) {
1105 offset -= len;
1106 goto next;
1107 }
1108
1109 len -= offset;
1110
1111 if (len > cryptlen)
1112 len = cryptlen;
1113
1114 to_talitos_ptr(link_tbl_ptr + count,
da9de146 1115 sg_dma_address(sg) + offset, len, 0);
b096b544 1116 to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
aeb4c132
HX
1117 count++;
1118 cryptlen -= len;
1119 offset = 0;
1120
1121next:
1122 sg = sg_next(sg);
70bcaca7 1123 }
9c4a7965
KP
1124
1125 /* tag end of link table */
aeb4c132 1126 if (count > 0)
b096b544
LC
1127 to_talitos_ptr_ext_set(link_tbl_ptr + count - 1,
1128 DESC_PTR_LNKTBL_RETURN, 0);
70bcaca7 1129
aeb4c132
HX
1130 return count;
1131}
1132
5b2cf268 1133static int talitos_sg_map(struct device *dev, struct scatterlist *src,
6a1e8d14
LC
1134 unsigned int len, struct talitos_edesc *edesc,
1135 struct talitos_ptr *ptr,
1136 int sg_count, unsigned int offset, int tbl_off)
246a87cd 1137{
246a87cd
LC
1138 struct talitos_private *priv = dev_get_drvdata(dev);
1139 bool is_sec1 = has_ftr_sec1(priv);
1140
87a81dce
LC
1141 if (!src) {
1142 to_talitos_ptr(ptr, 0, 0, is_sec1);
1143 return 1;
1144 }
6a1e8d14 1145 if (sg_count == 1) {
da9de146 1146 to_talitos_ptr(ptr, sg_dma_address(src) + offset, len, is_sec1);
6a1e8d14 1147 return sg_count;
246a87cd 1148 }
246a87cd 1149 if (is_sec1) {
da9de146 1150 to_talitos_ptr(ptr, edesc->dma_link_tbl + offset, len, is_sec1);
6a1e8d14 1151 return sg_count;
246a87cd 1152 }
6a1e8d14
LC
1153 sg_count = sg_to_link_tbl_offset(src, sg_count, offset, len,
1154 &edesc->link_tbl[tbl_off]);
1155 if (sg_count == 1) {
1156 /* Only one segment now, so no link tbl needed*/
1157 copy_talitos_ptr(ptr, &edesc->link_tbl[tbl_off], is_sec1);
1158 return sg_count;
1159 }
1160 to_talitos_ptr(ptr, edesc->dma_link_tbl +
da9de146 1161 tbl_off * sizeof(struct talitos_ptr), len, is_sec1);
6a1e8d14
LC
1162 to_talitos_ptr_ext_or(ptr, DESC_PTR_LNKTBL_JUMP, is_sec1);
1163
1164 return sg_count;
246a87cd
LC
1165}
1166
9c4a7965
KP
1167/*
1168 * fill in and submit ipsec_esp descriptor
1169 */
56af8cd4 1170static int ipsec_esp(struct talitos_edesc *edesc, struct aead_request *areq,
aeb4c132
HX
1171 void (*callback)(struct device *dev,
1172 struct talitos_desc *desc,
1173 void *context, int error))
9c4a7965
KP
1174{
1175 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
aeb4c132 1176 unsigned int authsize = crypto_aead_authsize(aead);
9c4a7965
KP
1177 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
1178 struct device *dev = ctx->dev;
1179 struct talitos_desc *desc = &edesc->desc;
1180 unsigned int cryptlen = areq->cryptlen;
e41256f1 1181 unsigned int ivsize = crypto_aead_ivsize(aead);
aeb4c132 1182 int tbl_off = 0;
fa86a267 1183 int sg_count, ret;
fe5720e2 1184 int sg_link_tbl_len;
549bd8bc
LC
1185 bool sync_needed = false;
1186 struct talitos_private *priv = dev_get_drvdata(dev);
1187 bool is_sec1 = has_ftr_sec1(priv);
9a655608
LC
1188 bool is_ipsec_esp = desc->hdr & DESC_HDR_TYPE_IPSEC_ESP;
1189 struct talitos_ptr *civ_ptr = &desc->ptr[is_ipsec_esp ? 2 : 3];
1190 struct talitos_ptr *ckey_ptr = &desc->ptr[is_ipsec_esp ? 3 : 2];
9c4a7965
KP
1191
1192 /* hmac key */
2e13ce08 1193 to_talitos_ptr(&desc->ptr[0], ctx->dma_key, ctx->authkeylen, is_sec1);
79fd31d3 1194
549bd8bc
LC
1195 sg_count = edesc->src_nents ?: 1;
1196 if (is_sec1 && sg_count > 1)
1197 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1198 areq->assoclen + cryptlen);
1199 else
1200 sg_count = dma_map_sg(dev, areq->src, sg_count,
1201 (areq->src == areq->dst) ?
1202 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
79fd31d3 1203
549bd8bc
LC
1204 /* hmac data */
1205 ret = talitos_sg_map(dev, areq->src, areq->assoclen, edesc,
1206 &desc->ptr[1], sg_count, 0, tbl_off);
340ff60a 1207
549bd8bc 1208 if (ret > 1) {
340ff60a 1209 tbl_off += ret;
549bd8bc 1210 sync_needed = true;
79fd31d3
HG
1211 }
1212
9c4a7965 1213 /* cipher iv */
9a655608 1214 to_talitos_ptr(civ_ptr, edesc->iv_dma, ivsize, is_sec1);
9c4a7965
KP
1215
1216 /* cipher key */
2e13ce08
LC
1217 to_talitos_ptr(ckey_ptr, ctx->dma_key + ctx->authkeylen,
1218 ctx->enckeylen, is_sec1);
9c4a7965
KP
1219
1220 /*
1221 * cipher in
1222 * map and adjust cipher len to aead request cryptlen.
1223 * extent is bytes of HMAC postpended to ciphertext,
1224 * typically 12 for ipsec
1225 */
aeb4c132 1226 sg_link_tbl_len = cryptlen;
aeb4c132 1227
9a655608 1228 if (is_ipsec_esp) {
549bd8bc
LC
1229 to_talitos_ptr_ext_set(&desc->ptr[4], authsize, is_sec1);
1230
9a655608 1231 if (desc->hdr & DESC_HDR_MODE1_MDEU_CICV)
549bd8bc 1232 sg_link_tbl_len += authsize;
340ff60a 1233 }
9c4a7965 1234
fbb22137
LC
1235 ret = talitos_sg_map(dev, areq->src, sg_link_tbl_len, edesc,
1236 &desc->ptr[4], sg_count, areq->assoclen, tbl_off);
549bd8bc 1237
ec8c7d14
LC
1238 if (ret > 1) {
1239 tbl_off += ret;
549bd8bc
LC
1240 sync_needed = true;
1241 }
9c4a7965 1242
549bd8bc
LC
1243 /* cipher out */
1244 if (areq->src != areq->dst) {
1245 sg_count = edesc->dst_nents ? : 1;
1246 if (!is_sec1 || sg_count == 1)
1247 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1248 }
9c4a7965 1249
e04a61be
LC
1250 ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[5],
1251 sg_count, areq->assoclen, tbl_off);
aeb4c132 1252
9a655608 1253 if (is_ipsec_esp)
549bd8bc 1254 to_talitos_ptr_ext_or(&desc->ptr[5], authsize, is_sec1);
aeb4c132 1255
e04a61be
LC
1256 /* ICV data */
1257 if (ret > 1) {
1258 tbl_off += ret;
aeb4c132 1259 edesc->icv_ool = true;
549bd8bc
LC
1260 sync_needed = true;
1261
9a655608 1262 if (is_ipsec_esp) {
549bd8bc
LC
1263 struct talitos_ptr *tbl_ptr = &edesc->link_tbl[tbl_off];
1264 int offset = (edesc->src_nents + edesc->dst_nents + 2) *
1265 sizeof(struct talitos_ptr) + authsize;
1266
1267 /* Add an entry to the link table for ICV data */
e04a61be 1268 to_talitos_ptr_ext_set(tbl_ptr - 1, 0, is_sec1);
549bd8bc
LC
1269 to_talitos_ptr_ext_set(tbl_ptr, DESC_PTR_LNKTBL_RETURN,
1270 is_sec1);
549bd8bc
LC
1271
1272 /* icv data follows link tables */
1273 to_talitos_ptr(tbl_ptr, edesc->dma_link_tbl + offset,
da9de146 1274 authsize, is_sec1);
e04a61be
LC
1275 } else {
1276 dma_addr_t addr = edesc->dma_link_tbl;
1277
1278 if (is_sec1)
1279 addr += areq->assoclen + cryptlen;
1280 else
1281 addr += sizeof(struct talitos_ptr) * tbl_off;
1282
da9de146 1283 to_talitos_ptr(&desc->ptr[6], addr, authsize, is_sec1);
e04a61be 1284 }
9a655608 1285 } else if (!is_ipsec_esp) {
e04a61be
LC
1286 ret = talitos_sg_map(dev, areq->dst, authsize, edesc,
1287 &desc->ptr[6], sg_count, areq->assoclen +
1288 cryptlen,
1289 tbl_off);
1290 if (ret > 1) {
1291 tbl_off += ret;
1292 edesc->icv_ool = true;
1293 sync_needed = true;
1294 } else {
1295 edesc->icv_ool = false;
549bd8bc 1296 }
340ff60a 1297 } else {
549bd8bc
LC
1298 edesc->icv_ool = false;
1299 }
1300
9c4a7965 1301 /* iv out */
9a655608 1302 if (is_ipsec_esp)
549bd8bc
LC
1303 map_single_talitos_ptr(dev, &desc->ptr[6], ivsize, ctx->iv,
1304 DMA_FROM_DEVICE);
1305
1306 if (sync_needed)
1307 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1308 edesc->dma_len,
1309 DMA_BIDIRECTIONAL);
9c4a7965 1310
5228f0f7 1311 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
fa86a267
KP
1312 if (ret != -EINPROGRESS) {
1313 ipsec_esp_unmap(dev, edesc, areq);
1314 kfree(edesc);
1315 }
1316 return ret;
9c4a7965
KP
1317}
1318
9c4a7965 1319/*
56af8cd4 1320 * allocate and map the extended descriptor
9c4a7965 1321 */
4de9d0b5
LN
1322static struct talitos_edesc *talitos_edesc_alloc(struct device *dev,
1323 struct scatterlist *src,
1324 struct scatterlist *dst,
79fd31d3
HG
1325 u8 *iv,
1326 unsigned int assoclen,
4de9d0b5
LN
1327 unsigned int cryptlen,
1328 unsigned int authsize,
79fd31d3 1329 unsigned int ivsize,
4de9d0b5 1330 int icv_stashing,
62293a37
HG
1331 u32 cryptoflags,
1332 bool encrypt)
9c4a7965 1333{
56af8cd4 1334 struct talitos_edesc *edesc;
6a1e8d14 1335 int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len;
79fd31d3 1336 dma_addr_t iv_dma = 0;
4de9d0b5 1337 gfp_t flags = cryptoflags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
586725f8 1338 GFP_ATOMIC;
6f65f6ac
LC
1339 struct talitos_private *priv = dev_get_drvdata(dev);
1340 bool is_sec1 = has_ftr_sec1(priv);
1341 int max_len = is_sec1 ? TALITOS1_MAX_DATA_LEN : TALITOS2_MAX_DATA_LEN;
8e409fe1 1342 void *err;
9c4a7965 1343
6f65f6ac 1344 if (cryptlen + authsize > max_len) {
4de9d0b5 1345 dev_err(dev, "length exceeds h/w max limit\n");
9c4a7965
KP
1346 return ERR_PTR(-EINVAL);
1347 }
1348
935e99a3 1349 if (ivsize)
79fd31d3
HG
1350 iv_dma = dma_map_single(dev, iv, ivsize, DMA_TO_DEVICE);
1351
62293a37 1352 if (!dst || dst == src) {
6a1e8d14
LC
1353 src_len = assoclen + cryptlen + authsize;
1354 src_nents = sg_nents_for_len(src, src_len);
8e409fe1
LC
1355 if (src_nents < 0) {
1356 dev_err(dev, "Invalid number of src SG.\n");
1357 err = ERR_PTR(-EINVAL);
1358 goto error_sg;
1359 }
62293a37
HG
1360 src_nents = (src_nents == 1) ? 0 : src_nents;
1361 dst_nents = dst ? src_nents : 0;
6a1e8d14 1362 dst_len = 0;
62293a37 1363 } else { /* dst && dst != src*/
6a1e8d14
LC
1364 src_len = assoclen + cryptlen + (encrypt ? 0 : authsize);
1365 src_nents = sg_nents_for_len(src, src_len);
8e409fe1
LC
1366 if (src_nents < 0) {
1367 dev_err(dev, "Invalid number of src SG.\n");
1368 err = ERR_PTR(-EINVAL);
1369 goto error_sg;
1370 }
62293a37 1371 src_nents = (src_nents == 1) ? 0 : src_nents;
6a1e8d14
LC
1372 dst_len = assoclen + cryptlen + (encrypt ? authsize : 0);
1373 dst_nents = sg_nents_for_len(dst, dst_len);
8e409fe1
LC
1374 if (dst_nents < 0) {
1375 dev_err(dev, "Invalid number of dst SG.\n");
1376 err = ERR_PTR(-EINVAL);
1377 goto error_sg;
1378 }
62293a37 1379 dst_nents = (dst_nents == 1) ? 0 : dst_nents;
9c4a7965
KP
1380 }
1381
1382 /*
1383 * allocate space for base edesc plus the link tables,
aeb4c132
HX
1384 * allowing for two separate entries for AD and generated ICV (+ 2),
1385 * and space for two sets of ICVs (stashed and generated)
9c4a7965 1386 */
56af8cd4 1387 alloc_len = sizeof(struct talitos_edesc);
aeb4c132 1388 if (src_nents || dst_nents) {
6f65f6ac 1389 if (is_sec1)
6a1e8d14
LC
1390 dma_len = (src_nents ? src_len : 0) +
1391 (dst_nents ? dst_len : 0);
6f65f6ac 1392 else
aeb4c132
HX
1393 dma_len = (src_nents + dst_nents + 2) *
1394 sizeof(struct talitos_ptr) + authsize * 2;
9c4a7965
KP
1395 alloc_len += dma_len;
1396 } else {
1397 dma_len = 0;
4de9d0b5 1398 alloc_len += icv_stashing ? authsize : 0;
9c4a7965
KP
1399 }
1400
37b5e889
LC
1401 /* if its a ahash, add space for a second desc next to the first one */
1402 if (is_sec1 && !dst)
1403 alloc_len += sizeof(struct talitos_desc);
1404
586725f8 1405 edesc = kmalloc(alloc_len, GFP_DMA | flags);
9c4a7965 1406 if (!edesc) {
4de9d0b5 1407 dev_err(dev, "could not allocate edescriptor\n");
8e409fe1
LC
1408 err = ERR_PTR(-ENOMEM);
1409 goto error_sg;
9c4a7965 1410 }
e4a647c4 1411 memset(&edesc->desc, 0, sizeof(edesc->desc));
9c4a7965
KP
1412
1413 edesc->src_nents = src_nents;
1414 edesc->dst_nents = dst_nents;
79fd31d3 1415 edesc->iv_dma = iv_dma;
9c4a7965 1416 edesc->dma_len = dma_len;
37b5e889
LC
1417 if (dma_len) {
1418 void *addr = &edesc->link_tbl[0];
1419
1420 if (is_sec1 && !dst)
1421 addr += sizeof(struct talitos_desc);
1422 edesc->dma_link_tbl = dma_map_single(dev, addr,
497f2e6b
LN
1423 edesc->dma_len,
1424 DMA_BIDIRECTIONAL);
37b5e889 1425 }
9c4a7965 1426 return edesc;
8e409fe1
LC
1427error_sg:
1428 if (iv_dma)
1429 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
1430 return err;
9c4a7965
KP
1431}
1432
79fd31d3 1433static struct talitos_edesc *aead_edesc_alloc(struct aead_request *areq, u8 *iv,
62293a37 1434 int icv_stashing, bool encrypt)
4de9d0b5
LN
1435{
1436 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
aeb4c132 1437 unsigned int authsize = crypto_aead_authsize(authenc);
4de9d0b5 1438 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
79fd31d3 1439 unsigned int ivsize = crypto_aead_ivsize(authenc);
4de9d0b5 1440
aeb4c132 1441 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
79fd31d3 1442 iv, areq->assoclen, areq->cryptlen,
aeb4c132 1443 authsize, ivsize, icv_stashing,
62293a37 1444 areq->base.flags, encrypt);
4de9d0b5
LN
1445}
1446
56af8cd4 1447static int aead_encrypt(struct aead_request *req)
9c4a7965
KP
1448{
1449 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1450 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
56af8cd4 1451 struct talitos_edesc *edesc;
9c4a7965
KP
1452
1453 /* allocate extended descriptor */
62293a37 1454 edesc = aead_edesc_alloc(req, req->iv, 0, true);
9c4a7965
KP
1455 if (IS_ERR(edesc))
1456 return PTR_ERR(edesc);
1457
1458 /* set encrypt */
70bcaca7 1459 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
9c4a7965 1460
aeb4c132 1461 return ipsec_esp(edesc, req, ipsec_esp_encrypt_done);
9c4a7965
KP
1462}
1463
56af8cd4 1464static int aead_decrypt(struct aead_request *req)
9c4a7965
KP
1465{
1466 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
aeb4c132 1467 unsigned int authsize = crypto_aead_authsize(authenc);
9c4a7965 1468 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
fe5720e2 1469 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
56af8cd4 1470 struct talitos_edesc *edesc;
9c4a7965
KP
1471 struct scatterlist *sg;
1472 void *icvdata;
1473
1474 req->cryptlen -= authsize;
1475
1476 /* allocate extended descriptor */
62293a37 1477 edesc = aead_edesc_alloc(req, req->iv, 1, false);
9c4a7965
KP
1478 if (IS_ERR(edesc))
1479 return PTR_ERR(edesc);
1480
fe5720e2 1481 if ((priv->features & TALITOS_FTR_HW_AUTH_CHECK) &&
e938e465
KP
1482 ((!edesc->src_nents && !edesc->dst_nents) ||
1483 priv->features & TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT)) {
9c4a7965 1484
fe5720e2 1485 /* decrypt and check the ICV */
e938e465
KP
1486 edesc->desc.hdr = ctx->desc_hdr_template |
1487 DESC_HDR_DIR_INBOUND |
fe5720e2 1488 DESC_HDR_MODE1_MDEU_CICV;
9c4a7965 1489
fe5720e2 1490 /* reset integrity check result bits */
9c4a7965 1491
aeb4c132 1492 return ipsec_esp(edesc, req, ipsec_esp_decrypt_hwauth_done);
e938e465 1493 }
fe5720e2 1494
e938e465
KP
1495 /* Have to check the ICV with software */
1496 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
fe5720e2 1497
e938e465
KP
1498 /* stash incoming ICV for later cmp with ICV generated by the h/w */
1499 if (edesc->dma_len)
aeb4c132
HX
1500 icvdata = (char *)&edesc->link_tbl[edesc->src_nents +
1501 edesc->dst_nents + 2];
e938e465
KP
1502 else
1503 icvdata = &edesc->link_tbl[0];
fe5720e2 1504
e938e465 1505 sg = sg_last(req->src, edesc->src_nents ? : 1);
fe5720e2 1506
aeb4c132 1507 memcpy(icvdata, (char *)sg_virt(sg) + sg->length - authsize, authsize);
9c4a7965 1508
aeb4c132 1509 return ipsec_esp(edesc, req, ipsec_esp_decrypt_swauth_done);
9c4a7965
KP
1510}
1511
4de9d0b5
LN
1512static int ablkcipher_setkey(struct crypto_ablkcipher *cipher,
1513 const u8 *key, unsigned int keylen)
1514{
1515 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
2e13ce08 1516 struct device *dev = ctx->dev;
f384cdc4 1517 u32 tmp[DES_EXPKEY_WORDS];
4de9d0b5 1518
03d2c511
MH
1519 if (keylen > TALITOS_MAX_KEY_SIZE) {
1520 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
1521 return -EINVAL;
1522 }
1523
f384cdc4
LC
1524 if (unlikely(crypto_ablkcipher_get_flags(cipher) &
1525 CRYPTO_TFM_REQ_WEAK_KEY) &&
1526 !des_ekey(tmp, key)) {
1527 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_WEAK_KEY);
1528 return -EINVAL;
1529 }
1530
2e13ce08
LC
1531 if (ctx->keylen)
1532 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
1533
4de9d0b5
LN
1534 memcpy(&ctx->key, key, keylen);
1535 ctx->keylen = keylen;
1536
2e13ce08
LC
1537 ctx->dma_key = dma_map_single(dev, ctx->key, keylen, DMA_TO_DEVICE);
1538
4de9d0b5 1539 return 0;
4de9d0b5
LN
1540}
1541
1542static void common_nonsnoop_unmap(struct device *dev,
1543 struct talitos_edesc *edesc,
1544 struct ablkcipher_request *areq)
1545{
1546 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
032d197e 1547
6a1e8d14 1548 talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->nbytes, 0);
4de9d0b5
LN
1549 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1], DMA_TO_DEVICE);
1550
4de9d0b5
LN
1551 if (edesc->dma_len)
1552 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1553 DMA_BIDIRECTIONAL);
1554}
1555
1556static void ablkcipher_done(struct device *dev,
1557 struct talitos_desc *desc, void *context,
1558 int err)
1559{
1560 struct ablkcipher_request *areq = context;
19bbbc63
KP
1561 struct talitos_edesc *edesc;
1562
1563 edesc = container_of(desc, struct talitos_edesc, desc);
4de9d0b5
LN
1564
1565 common_nonsnoop_unmap(dev, edesc, areq);
1566
1567 kfree(edesc);
1568
1569 areq->base.complete(&areq->base, err);
1570}
1571
1572static int common_nonsnoop(struct talitos_edesc *edesc,
1573 struct ablkcipher_request *areq,
4de9d0b5
LN
1574 void (*callback) (struct device *dev,
1575 struct talitos_desc *desc,
1576 void *context, int error))
1577{
1578 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1579 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1580 struct device *dev = ctx->dev;
1581 struct talitos_desc *desc = &edesc->desc;
1582 unsigned int cryptlen = areq->nbytes;
79fd31d3 1583 unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
4de9d0b5 1584 int sg_count, ret;
6a1e8d14 1585 bool sync_needed = false;
922f9dc8
LC
1586 struct talitos_private *priv = dev_get_drvdata(dev);
1587 bool is_sec1 = has_ftr_sec1(priv);
4de9d0b5
LN
1588
1589 /* first DWORD empty */
4de9d0b5
LN
1590
1591 /* cipher iv */
da9de146 1592 to_talitos_ptr(&desc->ptr[1], edesc->iv_dma, ivsize, is_sec1);
4de9d0b5
LN
1593
1594 /* cipher key */
2e13ce08 1595 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen, is_sec1);
4de9d0b5 1596
6a1e8d14
LC
1597 sg_count = edesc->src_nents ?: 1;
1598 if (is_sec1 && sg_count > 1)
1599 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1600 cryptlen);
1601 else
1602 sg_count = dma_map_sg(dev, areq->src, sg_count,
1603 (areq->src == areq->dst) ?
1604 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
4de9d0b5
LN
1605 /*
1606 * cipher in
1607 */
6a1e8d14
LC
1608 sg_count = talitos_sg_map(dev, areq->src, cryptlen, edesc,
1609 &desc->ptr[3], sg_count, 0, 0);
1610 if (sg_count > 1)
1611 sync_needed = true;
4de9d0b5
LN
1612
1613 /* cipher out */
6a1e8d14
LC
1614 if (areq->src != areq->dst) {
1615 sg_count = edesc->dst_nents ? : 1;
1616 if (!is_sec1 || sg_count == 1)
1617 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1618 }
1619
1620 ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[4],
1621 sg_count, 0, (edesc->src_nents + 1));
1622 if (ret > 1)
1623 sync_needed = true;
4de9d0b5
LN
1624
1625 /* iv out */
a2b35aa8 1626 map_single_talitos_ptr(dev, &desc->ptr[5], ivsize, ctx->iv,
4de9d0b5
LN
1627 DMA_FROM_DEVICE);
1628
1629 /* last DWORD empty */
4de9d0b5 1630
6a1e8d14
LC
1631 if (sync_needed)
1632 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1633 edesc->dma_len, DMA_BIDIRECTIONAL);
1634
5228f0f7 1635 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
4de9d0b5
LN
1636 if (ret != -EINPROGRESS) {
1637 common_nonsnoop_unmap(dev, edesc, areq);
1638 kfree(edesc);
1639 }
1640 return ret;
1641}
1642
e938e465 1643static struct talitos_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request *
62293a37 1644 areq, bool encrypt)
4de9d0b5
LN
1645{
1646 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1647 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
79fd31d3 1648 unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
4de9d0b5 1649
aeb4c132 1650 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
79fd31d3 1651 areq->info, 0, areq->nbytes, 0, ivsize, 0,
62293a37 1652 areq->base.flags, encrypt);
4de9d0b5
LN
1653}
1654
1655static int ablkcipher_encrypt(struct ablkcipher_request *areq)
1656{
1657 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1658 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1659 struct talitos_edesc *edesc;
1660
1661 /* allocate extended descriptor */
62293a37 1662 edesc = ablkcipher_edesc_alloc(areq, true);
4de9d0b5
LN
1663 if (IS_ERR(edesc))
1664 return PTR_ERR(edesc);
1665
1666 /* set encrypt */
1667 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
1668
febec542 1669 return common_nonsnoop(edesc, areq, ablkcipher_done);
4de9d0b5
LN
1670}
1671
1672static int ablkcipher_decrypt(struct ablkcipher_request *areq)
1673{
1674 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1675 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1676 struct talitos_edesc *edesc;
1677
1678 /* allocate extended descriptor */
62293a37 1679 edesc = ablkcipher_edesc_alloc(areq, false);
4de9d0b5
LN
1680 if (IS_ERR(edesc))
1681 return PTR_ERR(edesc);
1682
1683 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1684
febec542 1685 return common_nonsnoop(edesc, areq, ablkcipher_done);
4de9d0b5
LN
1686}
1687
497f2e6b
LN
1688static void common_nonsnoop_hash_unmap(struct device *dev,
1689 struct talitos_edesc *edesc,
1690 struct ahash_request *areq)
1691{
1692 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
497f2e6b 1693
6a1e8d14 1694 talitos_sg_unmap(dev, edesc, req_ctx->psrc, NULL, 0, 0);
032d197e 1695
497f2e6b
LN
1696 if (edesc->dma_len)
1697 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1698 DMA_BIDIRECTIONAL);
1699
37b5e889
LC
1700 if (edesc->desc.next_desc)
1701 dma_unmap_single(dev, be32_to_cpu(edesc->desc.next_desc),
1702 TALITOS_DESC_SIZE, DMA_BIDIRECTIONAL);
497f2e6b
LN
1703}
1704
1705static void ahash_done(struct device *dev,
1706 struct talitos_desc *desc, void *context,
1707 int err)
1708{
1709 struct ahash_request *areq = context;
1710 struct talitos_edesc *edesc =
1711 container_of(desc, struct talitos_edesc, desc);
1712 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1713
1714 if (!req_ctx->last && req_ctx->to_hash_later) {
1715 /* Position any partial block for next update/final/finup */
3c0dd190 1716 req_ctx->buf_idx = (req_ctx->buf_idx + 1) & 1;
5e833bc4 1717 req_ctx->nbuf = req_ctx->to_hash_later;
497f2e6b
LN
1718 }
1719 common_nonsnoop_hash_unmap(dev, edesc, areq);
1720
1721 kfree(edesc);
1722
1723 areq->base.complete(&areq->base, err);
1724}
1725
2d02905e
LC
1726/*
1727 * SEC1 doesn't like hashing of 0 sized message, so we do the padding
1728 * ourself and submit a padded block
1729 */
5b2cf268 1730static void talitos_handle_buggy_hash(struct talitos_ctx *ctx,
2d02905e
LC
1731 struct talitos_edesc *edesc,
1732 struct talitos_ptr *ptr)
1733{
1734 static u8 padded_hash[64] = {
1735 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1736 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1737 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1738 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1739 };
1740
1741 pr_err_once("Bug in SEC1, padding ourself\n");
1742 edesc->desc.hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1743 map_single_talitos_ptr(ctx->dev, ptr, sizeof(padded_hash),
1744 (char *)padded_hash, DMA_TO_DEVICE);
1745}
1746
497f2e6b
LN
1747static int common_nonsnoop_hash(struct talitos_edesc *edesc,
1748 struct ahash_request *areq, unsigned int length,
37b5e889 1749 unsigned int offset,
497f2e6b
LN
1750 void (*callback) (struct device *dev,
1751 struct talitos_desc *desc,
1752 void *context, int error))
1753{
1754 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1755 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1756 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1757 struct device *dev = ctx->dev;
1758 struct talitos_desc *desc = &edesc->desc;
032d197e 1759 int ret;
6a1e8d14 1760 bool sync_needed = false;
922f9dc8
LC
1761 struct talitos_private *priv = dev_get_drvdata(dev);
1762 bool is_sec1 = has_ftr_sec1(priv);
6a1e8d14 1763 int sg_count;
497f2e6b
LN
1764
1765 /* first DWORD empty */
497f2e6b 1766
60f208d7
KP
1767 /* hash context in */
1768 if (!req_ctx->first || req_ctx->swinit) {
49f9783b
LC
1769 to_talitos_ptr(&desc->ptr[1], ctx->dma_hw_context,
1770 req_ctx->hw_context_size, is_sec1);
60f208d7 1771 req_ctx->swinit = 0;
497f2e6b 1772 }
afd62fa2
LC
1773 /* Indicate next op is not the first. */
1774 req_ctx->first = 0;
497f2e6b
LN
1775
1776 /* HMAC key */
1777 if (ctx->keylen)
2e13ce08
LC
1778 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen,
1779 is_sec1);
497f2e6b 1780
37b5e889
LC
1781 if (is_sec1 && req_ctx->nbuf)
1782 length -= req_ctx->nbuf;
1783
6a1e8d14
LC
1784 sg_count = edesc->src_nents ?: 1;
1785 if (is_sec1 && sg_count > 1)
37b5e889
LC
1786 sg_pcopy_to_buffer(req_ctx->psrc, sg_count,
1787 edesc->buf + sizeof(struct talitos_desc),
1788 length, req_ctx->nbuf);
1789 else if (length)
6a1e8d14
LC
1790 sg_count = dma_map_sg(dev, req_ctx->psrc, sg_count,
1791 DMA_TO_DEVICE);
497f2e6b
LN
1792 /*
1793 * data in
1794 */
37b5e889 1795 if (is_sec1 && req_ctx->nbuf) {
3c0dd190
LC
1796 dma_addr_t dma_buf = ctx->dma_buf + req_ctx->buf_idx *
1797 HASH_MAX_BLOCK_SIZE;
1798
1799 to_talitos_ptr(&desc->ptr[3], dma_buf, req_ctx->nbuf, is_sec1);
37b5e889
LC
1800 } else {
1801 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1802 &desc->ptr[3], sg_count, offset, 0);
1803 if (sg_count > 1)
1804 sync_needed = true;
1805 }
497f2e6b
LN
1806
1807 /* fifth DWORD empty */
497f2e6b
LN
1808
1809 /* hash/HMAC out -or- hash context out */
1810 if (req_ctx->last)
1811 map_single_talitos_ptr(dev, &desc->ptr[5],
1812 crypto_ahash_digestsize(tfm),
a2b35aa8 1813 areq->result, DMA_FROM_DEVICE);
497f2e6b 1814 else
49f9783b
LC
1815 to_talitos_ptr(&desc->ptr[5], ctx->dma_hw_context,
1816 req_ctx->hw_context_size, is_sec1);
497f2e6b
LN
1817
1818 /* last DWORD empty */
497f2e6b 1819
2d02905e
LC
1820 if (is_sec1 && from_talitos_ptr_len(&desc->ptr[3], true) == 0)
1821 talitos_handle_buggy_hash(ctx, edesc, &desc->ptr[3]);
1822
37b5e889
LC
1823 if (is_sec1 && req_ctx->nbuf && length) {
1824 struct talitos_desc *desc2 = desc + 1;
1825 dma_addr_t next_desc;
1826
1827 memset(desc2, 0, sizeof(*desc2));
1828 desc2->hdr = desc->hdr;
1829 desc2->hdr &= ~DESC_HDR_MODE0_MDEU_INIT;
1830 desc2->hdr1 = desc2->hdr;
1831 desc->hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1832 desc->hdr |= DESC_HDR_MODE0_MDEU_CONT;
1833 desc->hdr &= ~DESC_HDR_DONE_NOTIFY;
1834
1835 to_talitos_ptr(&desc2->ptr[1], ctx->dma_hw_context,
1836 req_ctx->hw_context_size, is_sec1);
1837
1838 copy_talitos_ptr(&desc2->ptr[2], &desc->ptr[2], is_sec1);
1839 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1840 &desc2->ptr[3], sg_count, offset, 0);
1841 if (sg_count > 1)
1842 sync_needed = true;
1843 copy_talitos_ptr(&desc2->ptr[5], &desc->ptr[5], is_sec1);
1844 if (req_ctx->last)
1845 to_talitos_ptr(&desc->ptr[5], ctx->dma_hw_context,
1846 req_ctx->hw_context_size, is_sec1);
1847
1848 next_desc = dma_map_single(dev, &desc2->hdr1, TALITOS_DESC_SIZE,
1849 DMA_BIDIRECTIONAL);
1850 desc->next_desc = cpu_to_be32(next_desc);
1851 }
1852
6a1e8d14
LC
1853 if (sync_needed)
1854 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1855 edesc->dma_len, DMA_BIDIRECTIONAL);
1856
5228f0f7 1857 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
497f2e6b
LN
1858 if (ret != -EINPROGRESS) {
1859 common_nonsnoop_hash_unmap(dev, edesc, areq);
1860 kfree(edesc);
1861 }
1862 return ret;
1863}
1864
1865static struct talitos_edesc *ahash_edesc_alloc(struct ahash_request *areq,
1866 unsigned int nbytes)
1867{
1868 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1869 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1870 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
37b5e889
LC
1871 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
1872 bool is_sec1 = has_ftr_sec1(priv);
1873
1874 if (is_sec1)
1875 nbytes -= req_ctx->nbuf;
497f2e6b 1876
aeb4c132 1877 return talitos_edesc_alloc(ctx->dev, req_ctx->psrc, NULL, NULL, 0,
62293a37 1878 nbytes, 0, 0, 0, areq->base.flags, false);
497f2e6b
LN
1879}
1880
1881static int ahash_init(struct ahash_request *areq)
1882{
1883 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
49f9783b
LC
1884 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1885 struct device *dev = ctx->dev;
497f2e6b 1886 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
49f9783b 1887 unsigned int size;
37b5e889
LC
1888 struct talitos_private *priv = dev_get_drvdata(dev);
1889 bool is_sec1 = has_ftr_sec1(priv);
497f2e6b
LN
1890
1891 /* Initialize the context */
3c0dd190 1892 req_ctx->buf_idx = 0;
5e833bc4 1893 req_ctx->nbuf = 0;
60f208d7
KP
1894 req_ctx->first = 1; /* first indicates h/w must init its context */
1895 req_ctx->swinit = 0; /* assume h/w init of context */
49f9783b 1896 size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
497f2e6b
LN
1897 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
1898 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
49f9783b 1899 req_ctx->hw_context_size = size;
497f2e6b 1900
49f9783b
LC
1901 if (ctx->dma_hw_context)
1902 dma_unmap_single(dev, ctx->dma_hw_context, size,
1903 DMA_BIDIRECTIONAL);
1904 ctx->dma_hw_context = dma_map_single(dev, req_ctx->hw_context, size,
1905 DMA_BIDIRECTIONAL);
37b5e889
LC
1906 if (ctx->dma_buf)
1907 dma_unmap_single(dev, ctx->dma_buf, sizeof(req_ctx->buf),
1908 DMA_TO_DEVICE);
1909 if (is_sec1)
1910 ctx->dma_buf = dma_map_single(dev, req_ctx->buf,
1911 sizeof(req_ctx->buf),
1912 DMA_TO_DEVICE);
497f2e6b
LN
1913 return 0;
1914}
1915
60f208d7
KP
1916/*
1917 * on h/w without explicit sha224 support, we initialize h/w context
1918 * manually with sha224 constants, and tell it to run sha256.
1919 */
1920static int ahash_init_sha224_swinit(struct ahash_request *areq)
1921{
1922 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
49f9783b
LC
1923 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1924 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1925 struct device *dev = ctx->dev;
60f208d7
KP
1926
1927 ahash_init(areq);
1928 req_ctx->swinit = 1;/* prevent h/w initting context with sha256 values*/
1929
a752447a
KP
1930 req_ctx->hw_context[0] = SHA224_H0;
1931 req_ctx->hw_context[1] = SHA224_H1;
1932 req_ctx->hw_context[2] = SHA224_H2;
1933 req_ctx->hw_context[3] = SHA224_H3;
1934 req_ctx->hw_context[4] = SHA224_H4;
1935 req_ctx->hw_context[5] = SHA224_H5;
1936 req_ctx->hw_context[6] = SHA224_H6;
1937 req_ctx->hw_context[7] = SHA224_H7;
60f208d7
KP
1938
1939 /* init 64-bit count */
1940 req_ctx->hw_context[8] = 0;
1941 req_ctx->hw_context[9] = 0;
1942
49f9783b
LC
1943 dma_sync_single_for_device(dev, ctx->dma_hw_context,
1944 req_ctx->hw_context_size, DMA_TO_DEVICE);
1945
60f208d7
KP
1946 return 0;
1947}
1948
497f2e6b
LN
1949static int ahash_process_req(struct ahash_request *areq, unsigned int nbytes)
1950{
1951 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1952 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1953 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1954 struct talitos_edesc *edesc;
1955 unsigned int blocksize =
1956 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1957 unsigned int nbytes_to_hash;
1958 unsigned int to_hash_later;
5e833bc4 1959 unsigned int nsg;
8e409fe1 1960 int nents;
37b5e889
LC
1961 struct device *dev = ctx->dev;
1962 struct talitos_private *priv = dev_get_drvdata(dev);
1963 bool is_sec1 = has_ftr_sec1(priv);
1964 int offset = 0;
3c0dd190 1965 u8 *ctx_buf = req_ctx->buf[req_ctx->buf_idx];
497f2e6b 1966
5e833bc4
LN
1967 if (!req_ctx->last && (nbytes + req_ctx->nbuf <= blocksize)) {
1968 /* Buffer up to one whole block */
8e409fe1
LC
1969 nents = sg_nents_for_len(areq->src, nbytes);
1970 if (nents < 0) {
1971 dev_err(ctx->dev, "Invalid number of src SG.\n");
1972 return nents;
1973 }
1974 sg_copy_to_buffer(areq->src, nents,
3c0dd190 1975 ctx_buf + req_ctx->nbuf, nbytes);
5e833bc4 1976 req_ctx->nbuf += nbytes;
497f2e6b
LN
1977 return 0;
1978 }
1979
5e833bc4
LN
1980 /* At least (blocksize + 1) bytes are available to hash */
1981 nbytes_to_hash = nbytes + req_ctx->nbuf;
1982 to_hash_later = nbytes_to_hash & (blocksize - 1);
1983
1984 if (req_ctx->last)
1985 to_hash_later = 0;
1986 else if (to_hash_later)
1987 /* There is a partial block. Hash the full block(s) now */
1988 nbytes_to_hash -= to_hash_later;
1989 else {
1990 /* Keep one block buffered */
1991 nbytes_to_hash -= blocksize;
1992 to_hash_later = blocksize;
1993 }
1994
1995 /* Chain in any previously buffered data */
37b5e889 1996 if (!is_sec1 && req_ctx->nbuf) {
5e833bc4
LN
1997 nsg = (req_ctx->nbuf < nbytes_to_hash) ? 2 : 1;
1998 sg_init_table(req_ctx->bufsl, nsg);
3c0dd190 1999 sg_set_buf(req_ctx->bufsl, ctx_buf, req_ctx->nbuf);
5e833bc4 2000 if (nsg > 1)
c56f6d12 2001 sg_chain(req_ctx->bufsl, 2, areq->src);
497f2e6b 2002 req_ctx->psrc = req_ctx->bufsl;
37b5e889
LC
2003 } else if (is_sec1 && req_ctx->nbuf && req_ctx->nbuf < blocksize) {
2004 if (nbytes_to_hash > blocksize)
2005 offset = blocksize - req_ctx->nbuf;
2006 else
2007 offset = nbytes_to_hash - req_ctx->nbuf;
2008 nents = sg_nents_for_len(areq->src, offset);
2009 if (nents < 0) {
2010 dev_err(ctx->dev, "Invalid number of src SG.\n");
2011 return nents;
2012 }
2013 sg_copy_to_buffer(areq->src, nents,
3c0dd190 2014 ctx_buf + req_ctx->nbuf, offset);
37b5e889
LC
2015 req_ctx->nbuf += offset;
2016 req_ctx->psrc = areq->src;
5e833bc4 2017 } else
497f2e6b 2018 req_ctx->psrc = areq->src;
5e833bc4
LN
2019
2020 if (to_hash_later) {
8e409fe1
LC
2021 nents = sg_nents_for_len(areq->src, nbytes);
2022 if (nents < 0) {
2023 dev_err(ctx->dev, "Invalid number of src SG.\n");
2024 return nents;
2025 }
d0525723 2026 sg_pcopy_to_buffer(areq->src, nents,
3c0dd190 2027 req_ctx->buf[(req_ctx->buf_idx + 1) & 1],
5e833bc4
LN
2028 to_hash_later,
2029 nbytes - to_hash_later);
497f2e6b 2030 }
5e833bc4 2031 req_ctx->to_hash_later = to_hash_later;
497f2e6b 2032
5e833bc4 2033 /* Allocate extended descriptor */
497f2e6b
LN
2034 edesc = ahash_edesc_alloc(areq, nbytes_to_hash);
2035 if (IS_ERR(edesc))
2036 return PTR_ERR(edesc);
2037
2038 edesc->desc.hdr = ctx->desc_hdr_template;
2039
2040 /* On last one, request SEC to pad; otherwise continue */
2041 if (req_ctx->last)
2042 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_PAD;
2043 else
2044 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_CONT;
2045
60f208d7
KP
2046 /* request SEC to INIT hash. */
2047 if (req_ctx->first && !req_ctx->swinit)
497f2e6b 2048 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_INIT;
3c0dd190
LC
2049 if (is_sec1) {
2050 dma_addr_t dma_buf = ctx->dma_buf + req_ctx->buf_idx *
2051 HASH_MAX_BLOCK_SIZE;
2052
2053 dma_sync_single_for_device(dev, dma_buf,
37b5e889 2054 req_ctx->nbuf, DMA_TO_DEVICE);
3c0dd190 2055 }
497f2e6b
LN
2056
2057 /* When the tfm context has a keylen, it's an HMAC.
2058 * A first or last (ie. not middle) descriptor must request HMAC.
2059 */
2060 if (ctx->keylen && (req_ctx->first || req_ctx->last))
2061 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_HMAC;
2062
37b5e889 2063 return common_nonsnoop_hash(edesc, areq, nbytes_to_hash, offset,
497f2e6b
LN
2064 ahash_done);
2065}
2066
2067static int ahash_update(struct ahash_request *areq)
2068{
2069 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2070
2071 req_ctx->last = 0;
2072
2073 return ahash_process_req(areq, areq->nbytes);
2074}
2075
2076static int ahash_final(struct ahash_request *areq)
2077{
2078 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2079
2080 req_ctx->last = 1;
2081
2082 return ahash_process_req(areq, 0);
2083}
2084
2085static int ahash_finup(struct ahash_request *areq)
2086{
2087 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2088
2089 req_ctx->last = 1;
2090
2091 return ahash_process_req(areq, areq->nbytes);
2092}
2093
2094static int ahash_digest(struct ahash_request *areq)
2095{
2096 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
60f208d7 2097 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
497f2e6b 2098
60f208d7 2099 ahash->init(areq);
497f2e6b
LN
2100 req_ctx->last = 1;
2101
2102 return ahash_process_req(areq, areq->nbytes);
2103}
2104
3639ca84
HG
2105static int ahash_export(struct ahash_request *areq, void *out)
2106{
2107 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2108 struct talitos_export_state *export = out;
49f9783b
LC
2109 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
2110 struct talitos_ctx *ctx = crypto_ahash_ctx(ahash);
2111 struct device *dev = ctx->dev;
3639ca84 2112
49f9783b
LC
2113 dma_sync_single_for_cpu(dev, ctx->dma_hw_context,
2114 req_ctx->hw_context_size, DMA_FROM_DEVICE);
3639ca84
HG
2115 memcpy(export->hw_context, req_ctx->hw_context,
2116 req_ctx->hw_context_size);
3c0dd190 2117 memcpy(export->buf, req_ctx->buf[req_ctx->buf_idx], req_ctx->nbuf);
3639ca84
HG
2118 export->swinit = req_ctx->swinit;
2119 export->first = req_ctx->first;
2120 export->last = req_ctx->last;
2121 export->to_hash_later = req_ctx->to_hash_later;
2122 export->nbuf = req_ctx->nbuf;
2123
2124 return 0;
2125}
2126
2127static int ahash_import(struct ahash_request *areq, const void *in)
2128{
2129 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2130 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2131 const struct talitos_export_state *export = in;
49f9783b
LC
2132 unsigned int size;
2133 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2134 struct device *dev = ctx->dev;
37b5e889
LC
2135 struct talitos_private *priv = dev_get_drvdata(dev);
2136 bool is_sec1 = has_ftr_sec1(priv);
3639ca84
HG
2137
2138 memset(req_ctx, 0, sizeof(*req_ctx));
49f9783b 2139 size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
3639ca84
HG
2140 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
2141 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
49f9783b
LC
2142 req_ctx->hw_context_size = size;
2143 if (ctx->dma_hw_context)
2144 dma_unmap_single(dev, ctx->dma_hw_context, size,
2145 DMA_BIDIRECTIONAL);
2146
2147 memcpy(req_ctx->hw_context, export->hw_context, size);
2148 ctx->dma_hw_context = dma_map_single(dev, req_ctx->hw_context, size,
2149 DMA_BIDIRECTIONAL);
37b5e889
LC
2150 if (ctx->dma_buf)
2151 dma_unmap_single(dev, ctx->dma_buf, sizeof(req_ctx->buf),
2152 DMA_TO_DEVICE);
3c0dd190 2153 memcpy(req_ctx->buf[0], export->buf, export->nbuf);
37b5e889
LC
2154 if (is_sec1)
2155 ctx->dma_buf = dma_map_single(dev, req_ctx->buf,
2156 sizeof(req_ctx->buf),
2157 DMA_TO_DEVICE);
3639ca84
HG
2158 req_ctx->swinit = export->swinit;
2159 req_ctx->first = export->first;
2160 req_ctx->last = export->last;
2161 req_ctx->to_hash_later = export->to_hash_later;
2162 req_ctx->nbuf = export->nbuf;
2163
2164 return 0;
2165}
2166
79b3a418
LN
2167static int keyhash(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen,
2168 u8 *hash)
2169{
2170 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2171
2172 struct scatterlist sg[1];
2173 struct ahash_request *req;
f1c90ac3 2174 struct crypto_wait wait;
79b3a418
LN
2175 int ret;
2176
f1c90ac3 2177 crypto_init_wait(&wait);
79b3a418
LN
2178
2179 req = ahash_request_alloc(tfm, GFP_KERNEL);
2180 if (!req)
2181 return -ENOMEM;
2182
2183 /* Keep tfm keylen == 0 during hash of the long key */
2184 ctx->keylen = 0;
2185 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
f1c90ac3 2186 crypto_req_done, &wait);
79b3a418
LN
2187
2188 sg_init_one(&sg[0], key, keylen);
2189
2190 ahash_request_set_crypt(req, sg, hash, keylen);
f1c90ac3
GBY
2191 ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
2192
79b3a418
LN
2193 ahash_request_free(req);
2194
2195 return ret;
2196}
2197
2198static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
2199 unsigned int keylen)
2200{
2201 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2e13ce08 2202 struct device *dev = ctx->dev;
79b3a418
LN
2203 unsigned int blocksize =
2204 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2205 unsigned int digestsize = crypto_ahash_digestsize(tfm);
2206 unsigned int keysize = keylen;
2207 u8 hash[SHA512_DIGEST_SIZE];
2208 int ret;
2209
2210 if (keylen <= blocksize)
2211 memcpy(ctx->key, key, keysize);
2212 else {
2213 /* Must get the hash of the long key */
2214 ret = keyhash(tfm, key, keylen, hash);
2215
2216 if (ret) {
2217 crypto_ahash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2218 return -EINVAL;
2219 }
2220
2221 keysize = digestsize;
2222 memcpy(ctx->key, hash, digestsize);
2223 }
2224
2e13ce08
LC
2225 if (ctx->keylen)
2226 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
2227
79b3a418 2228 ctx->keylen = keysize;
2e13ce08 2229 ctx->dma_key = dma_map_single(dev, ctx->key, keysize, DMA_TO_DEVICE);
79b3a418
LN
2230
2231 return 0;
2232}
2233
2234
9c4a7965 2235struct talitos_alg_template {
d5e4aaef 2236 u32 type;
b0057763 2237 u32 priority;
d5e4aaef
LN
2238 union {
2239 struct crypto_alg crypto;
acbf7c62 2240 struct ahash_alg hash;
aeb4c132 2241 struct aead_alg aead;
d5e4aaef 2242 } alg;
9c4a7965
KP
2243 __be32 desc_hdr_template;
2244};
2245
2246static struct talitos_alg_template driver_algs[] = {
991155ba 2247 /* AEAD algorithms. These use a single-pass ipsec_esp descriptor */
d5e4aaef 2248 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2249 .alg.aead = {
2250 .base = {
2251 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2252 .cra_driver_name = "authenc-hmac-sha1-"
2253 "cbc-aes-talitos",
2254 .cra_blocksize = AES_BLOCK_SIZE,
2255 .cra_flags = CRYPTO_ALG_ASYNC,
2256 },
2257 .ivsize = AES_BLOCK_SIZE,
2258 .maxauthsize = SHA1_DIGEST_SIZE,
56af8cd4 2259 },
9c4a7965
KP
2260 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2261 DESC_HDR_SEL0_AESU |
2262 DESC_HDR_MODE0_AESU_CBC |
2263 DESC_HDR_SEL1_MDEUA |
2264 DESC_HDR_MODE1_MDEU_INIT |
2265 DESC_HDR_MODE1_MDEU_PAD |
2266 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
70bcaca7 2267 },
7405c8d7
LC
2268 { .type = CRYPTO_ALG_TYPE_AEAD,
2269 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2270 .alg.aead = {
2271 .base = {
2272 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2273 .cra_driver_name = "authenc-hmac-sha1-"
2274 "cbc-aes-talitos",
2275 .cra_blocksize = AES_BLOCK_SIZE,
2276 .cra_flags = CRYPTO_ALG_ASYNC,
2277 },
2278 .ivsize = AES_BLOCK_SIZE,
2279 .maxauthsize = SHA1_DIGEST_SIZE,
2280 },
2281 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2282 DESC_HDR_SEL0_AESU |
2283 DESC_HDR_MODE0_AESU_CBC |
2284 DESC_HDR_SEL1_MDEUA |
2285 DESC_HDR_MODE1_MDEU_INIT |
2286 DESC_HDR_MODE1_MDEU_PAD |
2287 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2288 },
d5e4aaef 2289 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2290 .alg.aead = {
2291 .base = {
2292 .cra_name = "authenc(hmac(sha1),"
2293 "cbc(des3_ede))",
2294 .cra_driver_name = "authenc-hmac-sha1-"
2295 "cbc-3des-talitos",
2296 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2297 .cra_flags = CRYPTO_ALG_ASYNC,
2298 },
2299 .ivsize = DES3_EDE_BLOCK_SIZE,
2300 .maxauthsize = SHA1_DIGEST_SIZE,
56af8cd4 2301 },
70bcaca7
LN
2302 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2303 DESC_HDR_SEL0_DEU |
2304 DESC_HDR_MODE0_DEU_CBC |
2305 DESC_HDR_MODE0_DEU_3DES |
2306 DESC_HDR_SEL1_MDEUA |
2307 DESC_HDR_MODE1_MDEU_INIT |
2308 DESC_HDR_MODE1_MDEU_PAD |
2309 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
3952f17e 2310 },
7405c8d7
LC
2311 { .type = CRYPTO_ALG_TYPE_AEAD,
2312 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2313 .alg.aead = {
2314 .base = {
2315 .cra_name = "authenc(hmac(sha1),"
2316 "cbc(des3_ede))",
2317 .cra_driver_name = "authenc-hmac-sha1-"
2318 "cbc-3des-talitos",
2319 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2320 .cra_flags = CRYPTO_ALG_ASYNC,
2321 },
2322 .ivsize = DES3_EDE_BLOCK_SIZE,
2323 .maxauthsize = SHA1_DIGEST_SIZE,
2324 },
2325 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2326 DESC_HDR_SEL0_DEU |
2327 DESC_HDR_MODE0_DEU_CBC |
2328 DESC_HDR_MODE0_DEU_3DES |
2329 DESC_HDR_SEL1_MDEUA |
2330 DESC_HDR_MODE1_MDEU_INIT |
2331 DESC_HDR_MODE1_MDEU_PAD |
2332 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2333 },
357fb605 2334 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2335 .alg.aead = {
2336 .base = {
2337 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2338 .cra_driver_name = "authenc-hmac-sha224-"
2339 "cbc-aes-talitos",
2340 .cra_blocksize = AES_BLOCK_SIZE,
2341 .cra_flags = CRYPTO_ALG_ASYNC,
2342 },
2343 .ivsize = AES_BLOCK_SIZE,
2344 .maxauthsize = SHA224_DIGEST_SIZE,
357fb605
HG
2345 },
2346 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2347 DESC_HDR_SEL0_AESU |
2348 DESC_HDR_MODE0_AESU_CBC |
2349 DESC_HDR_SEL1_MDEUA |
2350 DESC_HDR_MODE1_MDEU_INIT |
2351 DESC_HDR_MODE1_MDEU_PAD |
2352 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2353 },
7405c8d7
LC
2354 { .type = CRYPTO_ALG_TYPE_AEAD,
2355 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2356 .alg.aead = {
2357 .base = {
2358 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2359 .cra_driver_name = "authenc-hmac-sha224-"
2360 "cbc-aes-talitos",
2361 .cra_blocksize = AES_BLOCK_SIZE,
2362 .cra_flags = CRYPTO_ALG_ASYNC,
2363 },
2364 .ivsize = AES_BLOCK_SIZE,
2365 .maxauthsize = SHA224_DIGEST_SIZE,
2366 },
2367 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2368 DESC_HDR_SEL0_AESU |
2369 DESC_HDR_MODE0_AESU_CBC |
2370 DESC_HDR_SEL1_MDEUA |
2371 DESC_HDR_MODE1_MDEU_INIT |
2372 DESC_HDR_MODE1_MDEU_PAD |
2373 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2374 },
357fb605 2375 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2376 .alg.aead = {
2377 .base = {
2378 .cra_name = "authenc(hmac(sha224),"
2379 "cbc(des3_ede))",
2380 .cra_driver_name = "authenc-hmac-sha224-"
2381 "cbc-3des-talitos",
2382 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2383 .cra_flags = CRYPTO_ALG_ASYNC,
2384 },
2385 .ivsize = DES3_EDE_BLOCK_SIZE,
2386 .maxauthsize = SHA224_DIGEST_SIZE,
357fb605
HG
2387 },
2388 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2389 DESC_HDR_SEL0_DEU |
2390 DESC_HDR_MODE0_DEU_CBC |
2391 DESC_HDR_MODE0_DEU_3DES |
2392 DESC_HDR_SEL1_MDEUA |
2393 DESC_HDR_MODE1_MDEU_INIT |
2394 DESC_HDR_MODE1_MDEU_PAD |
2395 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2396 },
7405c8d7
LC
2397 { .type = CRYPTO_ALG_TYPE_AEAD,
2398 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2399 .alg.aead = {
2400 .base = {
2401 .cra_name = "authenc(hmac(sha224),"
2402 "cbc(des3_ede))",
2403 .cra_driver_name = "authenc-hmac-sha224-"
2404 "cbc-3des-talitos",
2405 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2406 .cra_flags = CRYPTO_ALG_ASYNC,
2407 },
2408 .ivsize = DES3_EDE_BLOCK_SIZE,
2409 .maxauthsize = SHA224_DIGEST_SIZE,
2410 },
2411 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2412 DESC_HDR_SEL0_DEU |
2413 DESC_HDR_MODE0_DEU_CBC |
2414 DESC_HDR_MODE0_DEU_3DES |
2415 DESC_HDR_SEL1_MDEUA |
2416 DESC_HDR_MODE1_MDEU_INIT |
2417 DESC_HDR_MODE1_MDEU_PAD |
2418 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2419 },
d5e4aaef 2420 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2421 .alg.aead = {
2422 .base = {
2423 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2424 .cra_driver_name = "authenc-hmac-sha256-"
2425 "cbc-aes-talitos",
2426 .cra_blocksize = AES_BLOCK_SIZE,
2427 .cra_flags = CRYPTO_ALG_ASYNC,
2428 },
2429 .ivsize = AES_BLOCK_SIZE,
2430 .maxauthsize = SHA256_DIGEST_SIZE,
56af8cd4 2431 },
3952f17e
LN
2432 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2433 DESC_HDR_SEL0_AESU |
2434 DESC_HDR_MODE0_AESU_CBC |
2435 DESC_HDR_SEL1_MDEUA |
2436 DESC_HDR_MODE1_MDEU_INIT |
2437 DESC_HDR_MODE1_MDEU_PAD |
2438 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2439 },
7405c8d7
LC
2440 { .type = CRYPTO_ALG_TYPE_AEAD,
2441 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2442 .alg.aead = {
2443 .base = {
2444 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2445 .cra_driver_name = "authenc-hmac-sha256-"
2446 "cbc-aes-talitos",
2447 .cra_blocksize = AES_BLOCK_SIZE,
2448 .cra_flags = CRYPTO_ALG_ASYNC,
2449 },
2450 .ivsize = AES_BLOCK_SIZE,
2451 .maxauthsize = SHA256_DIGEST_SIZE,
2452 },
2453 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2454 DESC_HDR_SEL0_AESU |
2455 DESC_HDR_MODE0_AESU_CBC |
2456 DESC_HDR_SEL1_MDEUA |
2457 DESC_HDR_MODE1_MDEU_INIT |
2458 DESC_HDR_MODE1_MDEU_PAD |
2459 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2460 },
d5e4aaef 2461 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2462 .alg.aead = {
2463 .base = {
2464 .cra_name = "authenc(hmac(sha256),"
2465 "cbc(des3_ede))",
2466 .cra_driver_name = "authenc-hmac-sha256-"
2467 "cbc-3des-talitos",
2468 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2469 .cra_flags = CRYPTO_ALG_ASYNC,
2470 },
2471 .ivsize = DES3_EDE_BLOCK_SIZE,
2472 .maxauthsize = SHA256_DIGEST_SIZE,
56af8cd4 2473 },
3952f17e
LN
2474 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2475 DESC_HDR_SEL0_DEU |
2476 DESC_HDR_MODE0_DEU_CBC |
2477 DESC_HDR_MODE0_DEU_3DES |
2478 DESC_HDR_SEL1_MDEUA |
2479 DESC_HDR_MODE1_MDEU_INIT |
2480 DESC_HDR_MODE1_MDEU_PAD |
2481 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2482 },
7405c8d7
LC
2483 { .type = CRYPTO_ALG_TYPE_AEAD,
2484 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2485 .alg.aead = {
2486 .base = {
2487 .cra_name = "authenc(hmac(sha256),"
2488 "cbc(des3_ede))",
2489 .cra_driver_name = "authenc-hmac-sha256-"
2490 "cbc-3des-talitos",
2491 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2492 .cra_flags = CRYPTO_ALG_ASYNC,
2493 },
2494 .ivsize = DES3_EDE_BLOCK_SIZE,
2495 .maxauthsize = SHA256_DIGEST_SIZE,
2496 },
2497 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2498 DESC_HDR_SEL0_DEU |
2499 DESC_HDR_MODE0_DEU_CBC |
2500 DESC_HDR_MODE0_DEU_3DES |
2501 DESC_HDR_SEL1_MDEUA |
2502 DESC_HDR_MODE1_MDEU_INIT |
2503 DESC_HDR_MODE1_MDEU_PAD |
2504 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2505 },
d5e4aaef 2506 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2507 .alg.aead = {
2508 .base = {
2509 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2510 .cra_driver_name = "authenc-hmac-sha384-"
2511 "cbc-aes-talitos",
2512 .cra_blocksize = AES_BLOCK_SIZE,
2513 .cra_flags = CRYPTO_ALG_ASYNC,
2514 },
2515 .ivsize = AES_BLOCK_SIZE,
2516 .maxauthsize = SHA384_DIGEST_SIZE,
357fb605
HG
2517 },
2518 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2519 DESC_HDR_SEL0_AESU |
2520 DESC_HDR_MODE0_AESU_CBC |
2521 DESC_HDR_SEL1_MDEUB |
2522 DESC_HDR_MODE1_MDEU_INIT |
2523 DESC_HDR_MODE1_MDEU_PAD |
2524 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2525 },
2526 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2527 .alg.aead = {
2528 .base = {
2529 .cra_name = "authenc(hmac(sha384),"
2530 "cbc(des3_ede))",
2531 .cra_driver_name = "authenc-hmac-sha384-"
2532 "cbc-3des-talitos",
2533 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2534 .cra_flags = CRYPTO_ALG_ASYNC,
2535 },
2536 .ivsize = DES3_EDE_BLOCK_SIZE,
2537 .maxauthsize = SHA384_DIGEST_SIZE,
357fb605
HG
2538 },
2539 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2540 DESC_HDR_SEL0_DEU |
2541 DESC_HDR_MODE0_DEU_CBC |
2542 DESC_HDR_MODE0_DEU_3DES |
2543 DESC_HDR_SEL1_MDEUB |
2544 DESC_HDR_MODE1_MDEU_INIT |
2545 DESC_HDR_MODE1_MDEU_PAD |
2546 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2547 },
2548 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2549 .alg.aead = {
2550 .base = {
2551 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2552 .cra_driver_name = "authenc-hmac-sha512-"
2553 "cbc-aes-talitos",
2554 .cra_blocksize = AES_BLOCK_SIZE,
2555 .cra_flags = CRYPTO_ALG_ASYNC,
2556 },
2557 .ivsize = AES_BLOCK_SIZE,
2558 .maxauthsize = SHA512_DIGEST_SIZE,
357fb605
HG
2559 },
2560 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2561 DESC_HDR_SEL0_AESU |
2562 DESC_HDR_MODE0_AESU_CBC |
2563 DESC_HDR_SEL1_MDEUB |
2564 DESC_HDR_MODE1_MDEU_INIT |
2565 DESC_HDR_MODE1_MDEU_PAD |
2566 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2567 },
2568 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2569 .alg.aead = {
2570 .base = {
2571 .cra_name = "authenc(hmac(sha512),"
2572 "cbc(des3_ede))",
2573 .cra_driver_name = "authenc-hmac-sha512-"
2574 "cbc-3des-talitos",
2575 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2576 .cra_flags = CRYPTO_ALG_ASYNC,
2577 },
2578 .ivsize = DES3_EDE_BLOCK_SIZE,
2579 .maxauthsize = SHA512_DIGEST_SIZE,
357fb605
HG
2580 },
2581 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2582 DESC_HDR_SEL0_DEU |
2583 DESC_HDR_MODE0_DEU_CBC |
2584 DESC_HDR_MODE0_DEU_3DES |
2585 DESC_HDR_SEL1_MDEUB |
2586 DESC_HDR_MODE1_MDEU_INIT |
2587 DESC_HDR_MODE1_MDEU_PAD |
2588 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2589 },
2590 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2591 .alg.aead = {
2592 .base = {
2593 .cra_name = "authenc(hmac(md5),cbc(aes))",
2594 .cra_driver_name = "authenc-hmac-md5-"
2595 "cbc-aes-talitos",
2596 .cra_blocksize = AES_BLOCK_SIZE,
2597 .cra_flags = CRYPTO_ALG_ASYNC,
2598 },
2599 .ivsize = AES_BLOCK_SIZE,
2600 .maxauthsize = MD5_DIGEST_SIZE,
56af8cd4 2601 },
3952f17e
LN
2602 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2603 DESC_HDR_SEL0_AESU |
2604 DESC_HDR_MODE0_AESU_CBC |
2605 DESC_HDR_SEL1_MDEUA |
2606 DESC_HDR_MODE1_MDEU_INIT |
2607 DESC_HDR_MODE1_MDEU_PAD |
2608 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2609 },
7405c8d7
LC
2610 { .type = CRYPTO_ALG_TYPE_AEAD,
2611 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2612 .alg.aead = {
2613 .base = {
2614 .cra_name = "authenc(hmac(md5),cbc(aes))",
2615 .cra_driver_name = "authenc-hmac-md5-"
2616 "cbc-aes-talitos",
2617 .cra_blocksize = AES_BLOCK_SIZE,
2618 .cra_flags = CRYPTO_ALG_ASYNC,
2619 },
2620 .ivsize = AES_BLOCK_SIZE,
2621 .maxauthsize = MD5_DIGEST_SIZE,
2622 },
2623 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2624 DESC_HDR_SEL0_AESU |
2625 DESC_HDR_MODE0_AESU_CBC |
2626 DESC_HDR_SEL1_MDEUA |
2627 DESC_HDR_MODE1_MDEU_INIT |
2628 DESC_HDR_MODE1_MDEU_PAD |
2629 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2630 },
d5e4aaef 2631 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2632 .alg.aead = {
2633 .base = {
2634 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2635 .cra_driver_name = "authenc-hmac-md5-"
2636 "cbc-3des-talitos",
2637 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2638 .cra_flags = CRYPTO_ALG_ASYNC,
2639 },
2640 .ivsize = DES3_EDE_BLOCK_SIZE,
2641 .maxauthsize = MD5_DIGEST_SIZE,
56af8cd4 2642 },
3952f17e
LN
2643 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2644 DESC_HDR_SEL0_DEU |
2645 DESC_HDR_MODE0_DEU_CBC |
2646 DESC_HDR_MODE0_DEU_3DES |
2647 DESC_HDR_SEL1_MDEUA |
2648 DESC_HDR_MODE1_MDEU_INIT |
2649 DESC_HDR_MODE1_MDEU_PAD |
2650 DESC_HDR_MODE1_MDEU_MD5_HMAC,
4de9d0b5 2651 },
7405c8d7
LC
2652 { .type = CRYPTO_ALG_TYPE_AEAD,
2653 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2654 .alg.aead = {
2655 .base = {
2656 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2657 .cra_driver_name = "authenc-hmac-md5-"
2658 "cbc-3des-talitos",
2659 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2660 .cra_flags = CRYPTO_ALG_ASYNC,
2661 },
2662 .ivsize = DES3_EDE_BLOCK_SIZE,
2663 .maxauthsize = MD5_DIGEST_SIZE,
2664 },
2665 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2666 DESC_HDR_SEL0_DEU |
2667 DESC_HDR_MODE0_DEU_CBC |
2668 DESC_HDR_MODE0_DEU_3DES |
2669 DESC_HDR_SEL1_MDEUA |
2670 DESC_HDR_MODE1_MDEU_INIT |
2671 DESC_HDR_MODE1_MDEU_PAD |
2672 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2673 },
4de9d0b5 2674 /* ABLKCIPHER algorithms. */
5e75ae1b
LC
2675 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2676 .alg.crypto = {
2677 .cra_name = "ecb(aes)",
2678 .cra_driver_name = "ecb-aes-talitos",
2679 .cra_blocksize = AES_BLOCK_SIZE,
2680 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2681 CRYPTO_ALG_ASYNC,
2682 .cra_ablkcipher = {
2683 .min_keysize = AES_MIN_KEY_SIZE,
2684 .max_keysize = AES_MAX_KEY_SIZE,
2685 .ivsize = AES_BLOCK_SIZE,
2686 }
2687 },
2688 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2689 DESC_HDR_SEL0_AESU,
2690 },
d5e4aaef
LN
2691 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2692 .alg.crypto = {
4de9d0b5
LN
2693 .cra_name = "cbc(aes)",
2694 .cra_driver_name = "cbc-aes-talitos",
2695 .cra_blocksize = AES_BLOCK_SIZE,
2696 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2697 CRYPTO_ALG_ASYNC,
4de9d0b5 2698 .cra_ablkcipher = {
4de9d0b5
LN
2699 .min_keysize = AES_MIN_KEY_SIZE,
2700 .max_keysize = AES_MAX_KEY_SIZE,
2701 .ivsize = AES_BLOCK_SIZE,
2702 }
2703 },
2704 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2705 DESC_HDR_SEL0_AESU |
2706 DESC_HDR_MODE0_AESU_CBC,
2707 },
5e75ae1b
LC
2708 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2709 .alg.crypto = {
2710 .cra_name = "ctr(aes)",
2711 .cra_driver_name = "ctr-aes-talitos",
2712 .cra_blocksize = AES_BLOCK_SIZE,
2713 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2714 CRYPTO_ALG_ASYNC,
2715 .cra_ablkcipher = {
2716 .min_keysize = AES_MIN_KEY_SIZE,
2717 .max_keysize = AES_MAX_KEY_SIZE,
2718 .ivsize = AES_BLOCK_SIZE,
2719 }
2720 },
70d355cc 2721 .desc_hdr_template = DESC_HDR_TYPE_AESU_CTR_NONSNOOP |
5e75ae1b
LC
2722 DESC_HDR_SEL0_AESU |
2723 DESC_HDR_MODE0_AESU_CTR,
2724 },
2725 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2726 .alg.crypto = {
2727 .cra_name = "ecb(des)",
2728 .cra_driver_name = "ecb-des-talitos",
2729 .cra_blocksize = DES_BLOCK_SIZE,
2730 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2731 CRYPTO_ALG_ASYNC,
2732 .cra_ablkcipher = {
2733 .min_keysize = DES_KEY_SIZE,
2734 .max_keysize = DES_KEY_SIZE,
2735 .ivsize = DES_BLOCK_SIZE,
2736 }
2737 },
2738 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2739 DESC_HDR_SEL0_DEU,
2740 },
2741 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2742 .alg.crypto = {
2743 .cra_name = "cbc(des)",
2744 .cra_driver_name = "cbc-des-talitos",
2745 .cra_blocksize = DES_BLOCK_SIZE,
2746 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2747 CRYPTO_ALG_ASYNC,
2748 .cra_ablkcipher = {
2749 .min_keysize = DES_KEY_SIZE,
2750 .max_keysize = DES_KEY_SIZE,
2751 .ivsize = DES_BLOCK_SIZE,
2752 }
2753 },
2754 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2755 DESC_HDR_SEL0_DEU |
2756 DESC_HDR_MODE0_DEU_CBC,
2757 },
2758 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2759 .alg.crypto = {
2760 .cra_name = "ecb(des3_ede)",
2761 .cra_driver_name = "ecb-3des-talitos",
2762 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2763 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2764 CRYPTO_ALG_ASYNC,
2765 .cra_ablkcipher = {
2766 .min_keysize = DES3_EDE_KEY_SIZE,
2767 .max_keysize = DES3_EDE_KEY_SIZE,
2768 .ivsize = DES3_EDE_BLOCK_SIZE,
2769 }
2770 },
2771 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2772 DESC_HDR_SEL0_DEU |
2773 DESC_HDR_MODE0_DEU_3DES,
2774 },
d5e4aaef
LN
2775 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2776 .alg.crypto = {
4de9d0b5
LN
2777 .cra_name = "cbc(des3_ede)",
2778 .cra_driver_name = "cbc-3des-talitos",
2779 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2780 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2781 CRYPTO_ALG_ASYNC,
4de9d0b5 2782 .cra_ablkcipher = {
4de9d0b5
LN
2783 .min_keysize = DES3_EDE_KEY_SIZE,
2784 .max_keysize = DES3_EDE_KEY_SIZE,
2785 .ivsize = DES3_EDE_BLOCK_SIZE,
2786 }
2787 },
2788 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2789 DESC_HDR_SEL0_DEU |
2790 DESC_HDR_MODE0_DEU_CBC |
2791 DESC_HDR_MODE0_DEU_3DES,
497f2e6b
LN
2792 },
2793 /* AHASH algorithms. */
2794 { .type = CRYPTO_ALG_TYPE_AHASH,
2795 .alg.hash = {
497f2e6b 2796 .halg.digestsize = MD5_DIGEST_SIZE,
3639ca84 2797 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2798 .halg.base = {
2799 .cra_name = "md5",
2800 .cra_driver_name = "md5-talitos",
b3988618 2801 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
497f2e6b
LN
2802 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2803 CRYPTO_ALG_ASYNC,
497f2e6b
LN
2804 }
2805 },
2806 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2807 DESC_HDR_SEL0_MDEUA |
2808 DESC_HDR_MODE0_MDEU_MD5,
2809 },
2810 { .type = CRYPTO_ALG_TYPE_AHASH,
2811 .alg.hash = {
497f2e6b 2812 .halg.digestsize = SHA1_DIGEST_SIZE,
3639ca84 2813 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2814 .halg.base = {
2815 .cra_name = "sha1",
2816 .cra_driver_name = "sha1-talitos",
2817 .cra_blocksize = SHA1_BLOCK_SIZE,
2818 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2819 CRYPTO_ALG_ASYNC,
497f2e6b
LN
2820 }
2821 },
2822 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2823 DESC_HDR_SEL0_MDEUA |
2824 DESC_HDR_MODE0_MDEU_SHA1,
2825 },
60f208d7
KP
2826 { .type = CRYPTO_ALG_TYPE_AHASH,
2827 .alg.hash = {
60f208d7 2828 .halg.digestsize = SHA224_DIGEST_SIZE,
3639ca84 2829 .halg.statesize = sizeof(struct talitos_export_state),
60f208d7
KP
2830 .halg.base = {
2831 .cra_name = "sha224",
2832 .cra_driver_name = "sha224-talitos",
2833 .cra_blocksize = SHA224_BLOCK_SIZE,
2834 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2835 CRYPTO_ALG_ASYNC,
60f208d7
KP
2836 }
2837 },
2838 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2839 DESC_HDR_SEL0_MDEUA |
2840 DESC_HDR_MODE0_MDEU_SHA224,
2841 },
497f2e6b
LN
2842 { .type = CRYPTO_ALG_TYPE_AHASH,
2843 .alg.hash = {
497f2e6b 2844 .halg.digestsize = SHA256_DIGEST_SIZE,
3639ca84 2845 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2846 .halg.base = {
2847 .cra_name = "sha256",
2848 .cra_driver_name = "sha256-talitos",
2849 .cra_blocksize = SHA256_BLOCK_SIZE,
2850 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2851 CRYPTO_ALG_ASYNC,
497f2e6b
LN
2852 }
2853 },
2854 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2855 DESC_HDR_SEL0_MDEUA |
2856 DESC_HDR_MODE0_MDEU_SHA256,
2857 },
2858 { .type = CRYPTO_ALG_TYPE_AHASH,
2859 .alg.hash = {
497f2e6b 2860 .halg.digestsize = SHA384_DIGEST_SIZE,
3639ca84 2861 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2862 .halg.base = {
2863 .cra_name = "sha384",
2864 .cra_driver_name = "sha384-talitos",
2865 .cra_blocksize = SHA384_BLOCK_SIZE,
2866 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2867 CRYPTO_ALG_ASYNC,
497f2e6b
LN
2868 }
2869 },
2870 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2871 DESC_HDR_SEL0_MDEUB |
2872 DESC_HDR_MODE0_MDEUB_SHA384,
2873 },
2874 { .type = CRYPTO_ALG_TYPE_AHASH,
2875 .alg.hash = {
497f2e6b 2876 .halg.digestsize = SHA512_DIGEST_SIZE,
3639ca84 2877 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2878 .halg.base = {
2879 .cra_name = "sha512",
2880 .cra_driver_name = "sha512-talitos",
2881 .cra_blocksize = SHA512_BLOCK_SIZE,
2882 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2883 CRYPTO_ALG_ASYNC,
497f2e6b
LN
2884 }
2885 },
2886 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2887 DESC_HDR_SEL0_MDEUB |
2888 DESC_HDR_MODE0_MDEUB_SHA512,
2889 },
79b3a418
LN
2890 { .type = CRYPTO_ALG_TYPE_AHASH,
2891 .alg.hash = {
79b3a418 2892 .halg.digestsize = MD5_DIGEST_SIZE,
3639ca84 2893 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2894 .halg.base = {
2895 .cra_name = "hmac(md5)",
2896 .cra_driver_name = "hmac-md5-talitos",
b3988618 2897 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
79b3a418
LN
2898 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2899 CRYPTO_ALG_ASYNC,
79b3a418
LN
2900 }
2901 },
2902 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2903 DESC_HDR_SEL0_MDEUA |
2904 DESC_HDR_MODE0_MDEU_MD5,
2905 },
2906 { .type = CRYPTO_ALG_TYPE_AHASH,
2907 .alg.hash = {
79b3a418 2908 .halg.digestsize = SHA1_DIGEST_SIZE,
3639ca84 2909 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2910 .halg.base = {
2911 .cra_name = "hmac(sha1)",
2912 .cra_driver_name = "hmac-sha1-talitos",
2913 .cra_blocksize = SHA1_BLOCK_SIZE,
2914 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2915 CRYPTO_ALG_ASYNC,
79b3a418
LN
2916 }
2917 },
2918 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2919 DESC_HDR_SEL0_MDEUA |
2920 DESC_HDR_MODE0_MDEU_SHA1,
2921 },
2922 { .type = CRYPTO_ALG_TYPE_AHASH,
2923 .alg.hash = {
79b3a418 2924 .halg.digestsize = SHA224_DIGEST_SIZE,
3639ca84 2925 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2926 .halg.base = {
2927 .cra_name = "hmac(sha224)",
2928 .cra_driver_name = "hmac-sha224-talitos",
2929 .cra_blocksize = SHA224_BLOCK_SIZE,
2930 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2931 CRYPTO_ALG_ASYNC,
79b3a418
LN
2932 }
2933 },
2934 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2935 DESC_HDR_SEL0_MDEUA |
2936 DESC_HDR_MODE0_MDEU_SHA224,
2937 },
2938 { .type = CRYPTO_ALG_TYPE_AHASH,
2939 .alg.hash = {
79b3a418 2940 .halg.digestsize = SHA256_DIGEST_SIZE,
3639ca84 2941 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2942 .halg.base = {
2943 .cra_name = "hmac(sha256)",
2944 .cra_driver_name = "hmac-sha256-talitos",
2945 .cra_blocksize = SHA256_BLOCK_SIZE,
2946 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2947 CRYPTO_ALG_ASYNC,
79b3a418
LN
2948 }
2949 },
2950 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2951 DESC_HDR_SEL0_MDEUA |
2952 DESC_HDR_MODE0_MDEU_SHA256,
2953 },
2954 { .type = CRYPTO_ALG_TYPE_AHASH,
2955 .alg.hash = {
79b3a418 2956 .halg.digestsize = SHA384_DIGEST_SIZE,
3639ca84 2957 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2958 .halg.base = {
2959 .cra_name = "hmac(sha384)",
2960 .cra_driver_name = "hmac-sha384-talitos",
2961 .cra_blocksize = SHA384_BLOCK_SIZE,
2962 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2963 CRYPTO_ALG_ASYNC,
79b3a418
LN
2964 }
2965 },
2966 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2967 DESC_HDR_SEL0_MDEUB |
2968 DESC_HDR_MODE0_MDEUB_SHA384,
2969 },
2970 { .type = CRYPTO_ALG_TYPE_AHASH,
2971 .alg.hash = {
79b3a418 2972 .halg.digestsize = SHA512_DIGEST_SIZE,
3639ca84 2973 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2974 .halg.base = {
2975 .cra_name = "hmac(sha512)",
2976 .cra_driver_name = "hmac-sha512-talitos",
2977 .cra_blocksize = SHA512_BLOCK_SIZE,
2978 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2979 CRYPTO_ALG_ASYNC,
79b3a418
LN
2980 }
2981 },
2982 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2983 DESC_HDR_SEL0_MDEUB |
2984 DESC_HDR_MODE0_MDEUB_SHA512,
2985 }
9c4a7965
KP
2986};
2987
2988struct talitos_crypto_alg {
2989 struct list_head entry;
2990 struct device *dev;
acbf7c62 2991 struct talitos_alg_template algt;
9c4a7965
KP
2992};
2993
89d124cb
JE
2994static int talitos_init_common(struct talitos_ctx *ctx,
2995 struct talitos_crypto_alg *talitos_alg)
9c4a7965 2996{
5228f0f7 2997 struct talitos_private *priv;
9c4a7965
KP
2998
2999 /* update context with ptr to dev */
3000 ctx->dev = talitos_alg->dev;
19bbbc63 3001
5228f0f7
KP
3002 /* assign SEC channel to tfm in round-robin fashion */
3003 priv = dev_get_drvdata(ctx->dev);
3004 ctx->ch = atomic_inc_return(&priv->last_chan) &
3005 (priv->num_channels - 1);
3006
9c4a7965 3007 /* copy descriptor header template value */
acbf7c62 3008 ctx->desc_hdr_template = talitos_alg->algt.desc_hdr_template;
9c4a7965 3009
602dba5a
KP
3010 /* select done notification */
3011 ctx->desc_hdr_template |= DESC_HDR_DONE_NOTIFY;
3012
497f2e6b
LN
3013 return 0;
3014}
3015
89d124cb
JE
3016static int talitos_cra_init(struct crypto_tfm *tfm)
3017{
3018 struct crypto_alg *alg = tfm->__crt_alg;
3019 struct talitos_crypto_alg *talitos_alg;
3020 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3021
3022 if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_AHASH)
3023 talitos_alg = container_of(__crypto_ahash_alg(alg),
3024 struct talitos_crypto_alg,
3025 algt.alg.hash);
3026 else
3027 talitos_alg = container_of(alg, struct talitos_crypto_alg,
3028 algt.alg.crypto);
3029
3030 return talitos_init_common(ctx, talitos_alg);
3031}
3032
aeb4c132 3033static int talitos_cra_init_aead(struct crypto_aead *tfm)
497f2e6b 3034{
89d124cb
JE
3035 struct aead_alg *alg = crypto_aead_alg(tfm);
3036 struct talitos_crypto_alg *talitos_alg;
3037 struct talitos_ctx *ctx = crypto_aead_ctx(tfm);
3038
3039 talitos_alg = container_of(alg, struct talitos_crypto_alg,
3040 algt.alg.aead);
3041
3042 return talitos_init_common(ctx, talitos_alg);
9c4a7965
KP
3043}
3044
497f2e6b
LN
3045static int talitos_cra_init_ahash(struct crypto_tfm *tfm)
3046{
3047 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3048
3049 talitos_cra_init(tfm);
3050
3051 ctx->keylen = 0;
3052 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
3053 sizeof(struct talitos_ahash_req_ctx));
3054
3055 return 0;
3056}
3057
2e13ce08
LC
3058static void talitos_cra_exit(struct crypto_tfm *tfm)
3059{
3060 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3061 struct device *dev = ctx->dev;
3062
3063 if (ctx->keylen)
3064 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
3065}
3066
49f9783b
LC
3067static void talitos_cra_exit_ahash(struct crypto_tfm *tfm)
3068{
3069 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3070 struct device *dev = ctx->dev;
3071 unsigned int size;
3072
3073 talitos_cra_exit(tfm);
3074
3075 size = (crypto_ahash_digestsize(__crypto_ahash_cast(tfm)) <=
3076 SHA256_DIGEST_SIZE)
3077 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
3078 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
3079
3080 if (ctx->dma_hw_context)
3081 dma_unmap_single(dev, ctx->dma_hw_context, size,
3082 DMA_BIDIRECTIONAL);
37b5e889 3083 if (ctx->dma_buf)
3c0dd190 3084 dma_unmap_single(dev, ctx->dma_buf, HASH_MAX_BLOCK_SIZE * 2,
37b5e889 3085 DMA_TO_DEVICE);
49f9783b
LC
3086}
3087
9c4a7965
KP
3088/*
3089 * given the alg's descriptor header template, determine whether descriptor
3090 * type and primary/secondary execution units required match the hw
3091 * capabilities description provided in the device tree node.
3092 */
3093static int hw_supports(struct device *dev, __be32 desc_hdr_template)
3094{
3095 struct talitos_private *priv = dev_get_drvdata(dev);
3096 int ret;
3097
3098 ret = (1 << DESC_TYPE(desc_hdr_template) & priv->desc_types) &&
3099 (1 << PRIMARY_EU(desc_hdr_template) & priv->exec_units);
3100
3101 if (SECONDARY_EU(desc_hdr_template))
3102 ret = ret && (1 << SECONDARY_EU(desc_hdr_template)
3103 & priv->exec_units);
3104
3105 return ret;
3106}
3107
2dc11581 3108static int talitos_remove(struct platform_device *ofdev)
9c4a7965
KP
3109{
3110 struct device *dev = &ofdev->dev;
3111 struct talitos_private *priv = dev_get_drvdata(dev);
3112 struct talitos_crypto_alg *t_alg, *n;
3113 int i;
3114
3115 list_for_each_entry_safe(t_alg, n, &priv->alg_list, entry) {
acbf7c62
LN
3116 switch (t_alg->algt.type) {
3117 case CRYPTO_ALG_TYPE_ABLKCIPHER:
acbf7c62 3118 break;
aeb4c132
HX
3119 case CRYPTO_ALG_TYPE_AEAD:
3120 crypto_unregister_aead(&t_alg->algt.alg.aead);
acbf7c62
LN
3121 case CRYPTO_ALG_TYPE_AHASH:
3122 crypto_unregister_ahash(&t_alg->algt.alg.hash);
3123 break;
3124 }
9c4a7965 3125 list_del(&t_alg->entry);
9c4a7965
KP
3126 }
3127
3128 if (hw_supports(dev, DESC_HDR_SEL0_RNG))
3129 talitos_unregister_rng(dev);
3130
c3e337f8 3131 for (i = 0; i < 2; i++)
2cdba3cf 3132 if (priv->irq[i]) {
c3e337f8
KP
3133 free_irq(priv->irq[i], dev);
3134 irq_dispose_mapping(priv->irq[i]);
3135 }
9c4a7965 3136
c3e337f8 3137 tasklet_kill(&priv->done_task[0]);
2cdba3cf 3138 if (priv->irq[1])
c3e337f8 3139 tasklet_kill(&priv->done_task[1]);
9c4a7965 3140
9c4a7965
KP
3141 return 0;
3142}
3143
3144static struct talitos_crypto_alg *talitos_alg_alloc(struct device *dev,
3145 struct talitos_alg_template
3146 *template)
3147{
60f208d7 3148 struct talitos_private *priv = dev_get_drvdata(dev);
9c4a7965
KP
3149 struct talitos_crypto_alg *t_alg;
3150 struct crypto_alg *alg;
3151
24b92ff2
LC
3152 t_alg = devm_kzalloc(dev, sizeof(struct talitos_crypto_alg),
3153 GFP_KERNEL);
9c4a7965
KP
3154 if (!t_alg)
3155 return ERR_PTR(-ENOMEM);
3156
acbf7c62
LN
3157 t_alg->algt = *template;
3158
3159 switch (t_alg->algt.type) {
3160 case CRYPTO_ALG_TYPE_ABLKCIPHER:
497f2e6b
LN
3161 alg = &t_alg->algt.alg.crypto;
3162 alg->cra_init = talitos_cra_init;
2e13ce08 3163 alg->cra_exit = talitos_cra_exit;
d4cd3283 3164 alg->cra_type = &crypto_ablkcipher_type;
b286e003
KP
3165 alg->cra_ablkcipher.setkey = ablkcipher_setkey;
3166 alg->cra_ablkcipher.encrypt = ablkcipher_encrypt;
3167 alg->cra_ablkcipher.decrypt = ablkcipher_decrypt;
3168 alg->cra_ablkcipher.geniv = "eseqiv";
497f2e6b 3169 break;
acbf7c62 3170 case CRYPTO_ALG_TYPE_AEAD:
aeb4c132 3171 alg = &t_alg->algt.alg.aead.base;
2e13ce08 3172 alg->cra_exit = talitos_cra_exit;
aeb4c132
HX
3173 t_alg->algt.alg.aead.init = talitos_cra_init_aead;
3174 t_alg->algt.alg.aead.setkey = aead_setkey;
3175 t_alg->algt.alg.aead.encrypt = aead_encrypt;
3176 t_alg->algt.alg.aead.decrypt = aead_decrypt;
6cda075a
LC
3177 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
3178 !strncmp(alg->cra_name, "authenc(hmac(sha224)", 20)) {
24b92ff2 3179 devm_kfree(dev, t_alg);
6cda075a
LC
3180 return ERR_PTR(-ENOTSUPP);
3181 }
acbf7c62
LN
3182 break;
3183 case CRYPTO_ALG_TYPE_AHASH:
3184 alg = &t_alg->algt.alg.hash.halg.base;
497f2e6b 3185 alg->cra_init = talitos_cra_init_ahash;
49f9783b 3186 alg->cra_exit = talitos_cra_exit_ahash;
d4cd3283 3187 alg->cra_type = &crypto_ahash_type;
b286e003
KP
3188 t_alg->algt.alg.hash.init = ahash_init;
3189 t_alg->algt.alg.hash.update = ahash_update;
3190 t_alg->algt.alg.hash.final = ahash_final;
3191 t_alg->algt.alg.hash.finup = ahash_finup;
3192 t_alg->algt.alg.hash.digest = ahash_digest;
56136631
LC
3193 if (!strncmp(alg->cra_name, "hmac", 4))
3194 t_alg->algt.alg.hash.setkey = ahash_setkey;
3639ca84
HG
3195 t_alg->algt.alg.hash.import = ahash_import;
3196 t_alg->algt.alg.hash.export = ahash_export;
b286e003 3197
79b3a418 3198 if (!(priv->features & TALITOS_FTR_HMAC_OK) &&
0b2730d8 3199 !strncmp(alg->cra_name, "hmac", 4)) {
24b92ff2 3200 devm_kfree(dev, t_alg);
79b3a418 3201 return ERR_PTR(-ENOTSUPP);
0b2730d8 3202 }
60f208d7 3203 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
79b3a418
LN
3204 (!strcmp(alg->cra_name, "sha224") ||
3205 !strcmp(alg->cra_name, "hmac(sha224)"))) {
60f208d7
KP
3206 t_alg->algt.alg.hash.init = ahash_init_sha224_swinit;
3207 t_alg->algt.desc_hdr_template =
3208 DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3209 DESC_HDR_SEL0_MDEUA |
3210 DESC_HDR_MODE0_MDEU_SHA256;
3211 }
497f2e6b 3212 break;
1d11911a
KP
3213 default:
3214 dev_err(dev, "unknown algorithm type %d\n", t_alg->algt.type);
24b92ff2 3215 devm_kfree(dev, t_alg);
1d11911a 3216 return ERR_PTR(-EINVAL);
acbf7c62 3217 }
9c4a7965 3218
9c4a7965 3219 alg->cra_module = THIS_MODULE;
b0057763
LC
3220 if (t_alg->algt.priority)
3221 alg->cra_priority = t_alg->algt.priority;
3222 else
3223 alg->cra_priority = TALITOS_CRA_PRIORITY;
9c4a7965 3224 alg->cra_alignmask = 0;
9c4a7965 3225 alg->cra_ctxsize = sizeof(struct talitos_ctx);
d912bb76 3226 alg->cra_flags |= CRYPTO_ALG_KERN_DRIVER_ONLY;
9c4a7965 3227
9c4a7965
KP
3228 t_alg->dev = dev;
3229
3230 return t_alg;
3231}
3232
c3e337f8
KP
3233static int talitos_probe_irq(struct platform_device *ofdev)
3234{
3235 struct device *dev = &ofdev->dev;
3236 struct device_node *np = ofdev->dev.of_node;
3237 struct talitos_private *priv = dev_get_drvdata(dev);
3238 int err;
dd3c0987 3239 bool is_sec1 = has_ftr_sec1(priv);
c3e337f8
KP
3240
3241 priv->irq[0] = irq_of_parse_and_map(np, 0);
2cdba3cf 3242 if (!priv->irq[0]) {
c3e337f8
KP
3243 dev_err(dev, "failed to map irq\n");
3244 return -EINVAL;
3245 }
dd3c0987
LC
3246 if (is_sec1) {
3247 err = request_irq(priv->irq[0], talitos1_interrupt_4ch, 0,
3248 dev_driver_string(dev), dev);
3249 goto primary_out;
3250 }
c3e337f8
KP
3251
3252 priv->irq[1] = irq_of_parse_and_map(np, 1);
3253
3254 /* get the primary irq line */
2cdba3cf 3255 if (!priv->irq[1]) {
dd3c0987 3256 err = request_irq(priv->irq[0], talitos2_interrupt_4ch, 0,
c3e337f8
KP
3257 dev_driver_string(dev), dev);
3258 goto primary_out;
3259 }
3260
dd3c0987 3261 err = request_irq(priv->irq[0], talitos2_interrupt_ch0_2, 0,
c3e337f8
KP
3262 dev_driver_string(dev), dev);
3263 if (err)
3264 goto primary_out;
3265
3266 /* get the secondary irq line */
dd3c0987 3267 err = request_irq(priv->irq[1], talitos2_interrupt_ch1_3, 0,
c3e337f8
KP
3268 dev_driver_string(dev), dev);
3269 if (err) {
3270 dev_err(dev, "failed to request secondary irq\n");
3271 irq_dispose_mapping(priv->irq[1]);
2cdba3cf 3272 priv->irq[1] = 0;
c3e337f8
KP
3273 }
3274
3275 return err;
3276
3277primary_out:
3278 if (err) {
3279 dev_err(dev, "failed to request primary irq\n");
3280 irq_dispose_mapping(priv->irq[0]);
2cdba3cf 3281 priv->irq[0] = 0;
c3e337f8
KP
3282 }
3283
3284 return err;
3285}
3286
1c48a5c9 3287static int talitos_probe(struct platform_device *ofdev)
9c4a7965
KP
3288{
3289 struct device *dev = &ofdev->dev;
61c7a080 3290 struct device_node *np = ofdev->dev.of_node;
9c4a7965 3291 struct talitos_private *priv;
9c4a7965 3292 int i, err;
5fa7fa14 3293 int stride;
fd5ea7f0 3294 struct resource *res;
9c4a7965 3295
24b92ff2 3296 priv = devm_kzalloc(dev, sizeof(struct talitos_private), GFP_KERNEL);
9c4a7965
KP
3297 if (!priv)
3298 return -ENOMEM;
3299
f3de9cb1
KH
3300 INIT_LIST_HEAD(&priv->alg_list);
3301
9c4a7965
KP
3302 dev_set_drvdata(dev, priv);
3303
3304 priv->ofdev = ofdev;
3305
511d63cb
HG
3306 spin_lock_init(&priv->reg_lock);
3307
fd5ea7f0
LC
3308 res = platform_get_resource(ofdev, IORESOURCE_MEM, 0);
3309 if (!res)
3310 return -ENXIO;
3311 priv->reg = devm_ioremap(dev, res->start, resource_size(res));
9c4a7965
KP
3312 if (!priv->reg) {
3313 dev_err(dev, "failed to of_iomap\n");
3314 err = -ENOMEM;
3315 goto err_out;
3316 }
3317
3318 /* get SEC version capabilities from device tree */
fa14c6cf
LC
3319 of_property_read_u32(np, "fsl,num-channels", &priv->num_channels);
3320 of_property_read_u32(np, "fsl,channel-fifo-len", &priv->chfifo_len);
3321 of_property_read_u32(np, "fsl,exec-units-mask", &priv->exec_units);
3322 of_property_read_u32(np, "fsl,descriptor-types-mask",
3323 &priv->desc_types);
9c4a7965
KP
3324
3325 if (!is_power_of_2(priv->num_channels) || !priv->chfifo_len ||
3326 !priv->exec_units || !priv->desc_types) {
3327 dev_err(dev, "invalid property data in device tree node\n");
3328 err = -EINVAL;
3329 goto err_out;
3330 }
3331
f3c85bc1
LN
3332 if (of_device_is_compatible(np, "fsl,sec3.0"))
3333 priv->features |= TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT;
3334
fe5720e2 3335 if (of_device_is_compatible(np, "fsl,sec2.1"))
60f208d7 3336 priv->features |= TALITOS_FTR_HW_AUTH_CHECK |
79b3a418
LN
3337 TALITOS_FTR_SHA224_HWINIT |
3338 TALITOS_FTR_HMAC_OK;
fe5720e2 3339
21590888
LC
3340 if (of_device_is_compatible(np, "fsl,sec1.0"))
3341 priv->features |= TALITOS_FTR_SEC1;
3342
5fa7fa14
LC
3343 if (of_device_is_compatible(np, "fsl,sec1.2")) {
3344 priv->reg_deu = priv->reg + TALITOS12_DEU;
3345 priv->reg_aesu = priv->reg + TALITOS12_AESU;
3346 priv->reg_mdeu = priv->reg + TALITOS12_MDEU;
3347 stride = TALITOS1_CH_STRIDE;
3348 } else if (of_device_is_compatible(np, "fsl,sec1.0")) {
3349 priv->reg_deu = priv->reg + TALITOS10_DEU;
3350 priv->reg_aesu = priv->reg + TALITOS10_AESU;
3351 priv->reg_mdeu = priv->reg + TALITOS10_MDEU;
3352 priv->reg_afeu = priv->reg + TALITOS10_AFEU;
3353 priv->reg_rngu = priv->reg + TALITOS10_RNGU;
3354 priv->reg_pkeu = priv->reg + TALITOS10_PKEU;
3355 stride = TALITOS1_CH_STRIDE;
3356 } else {
3357 priv->reg_deu = priv->reg + TALITOS2_DEU;
3358 priv->reg_aesu = priv->reg + TALITOS2_AESU;
3359 priv->reg_mdeu = priv->reg + TALITOS2_MDEU;
3360 priv->reg_afeu = priv->reg + TALITOS2_AFEU;
3361 priv->reg_rngu = priv->reg + TALITOS2_RNGU;
3362 priv->reg_pkeu = priv->reg + TALITOS2_PKEU;
3363 priv->reg_keu = priv->reg + TALITOS2_KEU;
3364 priv->reg_crcu = priv->reg + TALITOS2_CRCU;
3365 stride = TALITOS2_CH_STRIDE;
3366 }
3367
dd3c0987
LC
3368 err = talitos_probe_irq(ofdev);
3369 if (err)
3370 goto err_out;
3371
3372 if (of_device_is_compatible(np, "fsl,sec1.0")) {
9c02e285
LC
3373 if (priv->num_channels == 1)
3374 tasklet_init(&priv->done_task[0], talitos1_done_ch0,
dd3c0987 3375 (unsigned long)dev);
9c02e285
LC
3376 else
3377 tasklet_init(&priv->done_task[0], talitos1_done_4ch,
3378 (unsigned long)dev);
3379 } else {
3380 if (priv->irq[1]) {
dd3c0987
LC
3381 tasklet_init(&priv->done_task[0], talitos2_done_ch0_2,
3382 (unsigned long)dev);
3383 tasklet_init(&priv->done_task[1], talitos2_done_ch1_3,
3384 (unsigned long)dev);
9c02e285
LC
3385 } else if (priv->num_channels == 1) {
3386 tasklet_init(&priv->done_task[0], talitos2_done_ch0,
3387 (unsigned long)dev);
3388 } else {
3389 tasklet_init(&priv->done_task[0], talitos2_done_4ch,
3390 (unsigned long)dev);
dd3c0987
LC
3391 }
3392 }
3393
24b92ff2
LC
3394 priv->chan = devm_kzalloc(dev, sizeof(struct talitos_channel) *
3395 priv->num_channels, GFP_KERNEL);
4b992628
KP
3396 if (!priv->chan) {
3397 dev_err(dev, "failed to allocate channel management space\n");
9c4a7965
KP
3398 err = -ENOMEM;
3399 goto err_out;
3400 }
3401
f641dddd
MH
3402 priv->fifo_len = roundup_pow_of_two(priv->chfifo_len);
3403
c3e337f8 3404 for (i = 0; i < priv->num_channels; i++) {
5fa7fa14 3405 priv->chan[i].reg = priv->reg + stride * (i + 1);
2cdba3cf 3406 if (!priv->irq[1] || !(i & 1))
c3e337f8 3407 priv->chan[i].reg += TALITOS_CH_BASE_OFFSET;
ad42d5fc 3408
4b992628
KP
3409 spin_lock_init(&priv->chan[i].head_lock);
3410 spin_lock_init(&priv->chan[i].tail_lock);
9c4a7965 3411
24b92ff2
LC
3412 priv->chan[i].fifo = devm_kzalloc(dev,
3413 sizeof(struct talitos_request) *
3414 priv->fifo_len, GFP_KERNEL);
4b992628 3415 if (!priv->chan[i].fifo) {
9c4a7965
KP
3416 dev_err(dev, "failed to allocate request fifo %d\n", i);
3417 err = -ENOMEM;
3418 goto err_out;
3419 }
9c4a7965 3420
4b992628
KP
3421 atomic_set(&priv->chan[i].submit_count,
3422 -(priv->chfifo_len - 1));
f641dddd 3423 }
9c4a7965 3424
81eb024c
KP
3425 dma_set_mask(dev, DMA_BIT_MASK(36));
3426
9c4a7965
KP
3427 /* reset and initialize the h/w */
3428 err = init_device(dev);
3429 if (err) {
3430 dev_err(dev, "failed to initialize device\n");
3431 goto err_out;
3432 }
3433
3434 /* register the RNG, if available */
3435 if (hw_supports(dev, DESC_HDR_SEL0_RNG)) {
3436 err = talitos_register_rng(dev);
3437 if (err) {
3438 dev_err(dev, "failed to register hwrng: %d\n", err);
3439 goto err_out;
3440 } else
3441 dev_info(dev, "hwrng\n");
3442 }
3443
3444 /* register crypto algorithms the device supports */
9c4a7965
KP
3445 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3446 if (hw_supports(dev, driver_algs[i].desc_hdr_template)) {
3447 struct talitos_crypto_alg *t_alg;
aeb4c132 3448 struct crypto_alg *alg = NULL;
9c4a7965
KP
3449
3450 t_alg = talitos_alg_alloc(dev, &driver_algs[i]);
3451 if (IS_ERR(t_alg)) {
3452 err = PTR_ERR(t_alg);
0b2730d8 3453 if (err == -ENOTSUPP)
79b3a418 3454 continue;
9c4a7965
KP
3455 goto err_out;
3456 }
3457
acbf7c62
LN
3458 switch (t_alg->algt.type) {
3459 case CRYPTO_ALG_TYPE_ABLKCIPHER:
acbf7c62
LN
3460 err = crypto_register_alg(
3461 &t_alg->algt.alg.crypto);
aeb4c132 3462 alg = &t_alg->algt.alg.crypto;
acbf7c62 3463 break;
aeb4c132
HX
3464
3465 case CRYPTO_ALG_TYPE_AEAD:
3466 err = crypto_register_aead(
3467 &t_alg->algt.alg.aead);
3468 alg = &t_alg->algt.alg.aead.base;
3469 break;
3470
acbf7c62
LN
3471 case CRYPTO_ALG_TYPE_AHASH:
3472 err = crypto_register_ahash(
3473 &t_alg->algt.alg.hash);
aeb4c132 3474 alg = &t_alg->algt.alg.hash.halg.base;
acbf7c62
LN
3475 break;
3476 }
9c4a7965
KP
3477 if (err) {
3478 dev_err(dev, "%s alg registration failed\n",
aeb4c132 3479 alg->cra_driver_name);
24b92ff2 3480 devm_kfree(dev, t_alg);
991155ba 3481 } else
9c4a7965 3482 list_add_tail(&t_alg->entry, &priv->alg_list);
9c4a7965
KP
3483 }
3484 }
5b859b6e
KP
3485 if (!list_empty(&priv->alg_list))
3486 dev_info(dev, "%s algorithms registered in /proc/crypto\n",
3487 (char *)of_get_property(np, "compatible", NULL));
9c4a7965
KP
3488
3489 return 0;
3490
3491err_out:
3492 talitos_remove(ofdev);
9c4a7965
KP
3493
3494 return err;
3495}
3496
6c3f975a 3497static const struct of_device_id talitos_match[] = {
0635b7db
LC
3498#ifdef CONFIG_CRYPTO_DEV_TALITOS1
3499 {
3500 .compatible = "fsl,sec1.0",
3501 },
3502#endif
3503#ifdef CONFIG_CRYPTO_DEV_TALITOS2
9c4a7965
KP
3504 {
3505 .compatible = "fsl,sec2.0",
3506 },
0635b7db 3507#endif
9c4a7965
KP
3508 {},
3509};
3510MODULE_DEVICE_TABLE(of, talitos_match);
3511
1c48a5c9 3512static struct platform_driver talitos_driver = {
4018294b
GL
3513 .driver = {
3514 .name = "talitos",
4018294b
GL
3515 .of_match_table = talitos_match,
3516 },
9c4a7965 3517 .probe = talitos_probe,
596f1034 3518 .remove = talitos_remove,
9c4a7965
KP
3519};
3520
741e8c2d 3521module_platform_driver(talitos_driver);
9c4a7965
KP
3522
3523MODULE_LICENSE("GPL");
3524MODULE_AUTHOR("Kim Phillips <kim.phillips@freescale.com>");
3525MODULE_DESCRIPTION("Freescale integrated security engine (SEC) driver");