crypto: ahash - make struct ahash_instance be the full size
[linux-block.git] / crypto / gcm.c
CommitLineData
d2912cb1 1// SPDX-License-Identifier: GPL-2.0-only
28db8e3e
MH
2/*
3 * GCM: Galois/Counter Mode.
4 *
5 * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
28db8e3e
MH
6 */
7
28db8e3e 8#include <crypto/gf128mul.h>
dadbc53d 9#include <crypto/internal/aead.h>
1472e5eb 10#include <crypto/internal/skcipher.h>
9382d97a 11#include <crypto/internal/hash.h>
17db8546 12#include <crypto/null.h>
42c271c6 13#include <crypto/scatterwalk.h>
e0ab7e9c 14#include <crypto/gcm.h>
9382d97a
HY
15#include <crypto/hash.h>
16#include "internal.h"
28db8e3e
MH
17#include <linux/err.h>
18#include <linux/init.h>
19#include <linux/kernel.h>
20#include <linux/module.h>
21#include <linux/slab.h>
22
28db8e3e 23struct gcm_instance_ctx {
1472e5eb 24 struct crypto_skcipher_spawn ctr;
9382d97a 25 struct crypto_ahash_spawn ghash;
28db8e3e
MH
26};
27
28struct crypto_gcm_ctx {
16f37ecd 29 struct crypto_skcipher *ctr;
9382d97a 30 struct crypto_ahash *ghash;
28db8e3e
MH
31};
32
dadbc53d
HX
33struct crypto_rfc4106_ctx {
34 struct crypto_aead *child;
35 u8 nonce[4];
36};
37
7b05a373
HX
38struct crypto_rfc4106_req_ctx {
39 struct scatterlist src[3];
40 struct scatterlist dst[3];
41 struct aead_request subreq;
42};
43
9489667d
JK
44struct crypto_rfc4543_instance_ctx {
45 struct crypto_aead_spawn aead;
9489667d
JK
46};
47
73c89c15
TB
48struct crypto_rfc4543_ctx {
49 struct crypto_aead *child;
8d605398 50 struct crypto_sync_skcipher *null;
73c89c15
TB
51 u8 nonce[4];
52};
53
54struct crypto_rfc4543_req_ctx {
73c89c15
TB
55 struct aead_request subreq;
56};
57
28db8e3e 58struct crypto_gcm_ghash_ctx {
9382d97a
HY
59 unsigned int cryptlen;
60 struct scatterlist *src;
adcbc688 61 int (*complete)(struct aead_request *req, u32 flags);
28db8e3e
MH
62};
63
64struct crypto_gcm_req_priv_ctx {
adcbc688 65 u8 iv[16];
28db8e3e 66 u8 auth_tag[16];
6160b289 67 u8 iauth_tag[16];
adcbc688
HX
68 struct scatterlist src[3];
69 struct scatterlist dst[3];
70 struct scatterlist sg;
9382d97a
HY
71 struct crypto_gcm_ghash_ctx ghash_ctx;
72 union {
73 struct ahash_request ahreq;
16f37ecd 74 struct skcipher_request skreq;
9382d97a 75 } u;
28db8e3e
MH
76};
77
adcbc688
HX
78static struct {
79 u8 buf[16];
80 struct scatterlist sg;
81} *gcm_zeroes;
82
83static int crypto_rfc4543_copy_src_to_dst(struct aead_request *req, bool enc);
9382d97a 84
2589469d
HX
85static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
86 struct aead_request *req)
87{
88 unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
89
90 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
91}
92
28db8e3e
MH
93static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
94 unsigned int keylen)
95{
96 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
9382d97a 97 struct crypto_ahash *ghash = ctx->ghash;
16f37ecd 98 struct crypto_skcipher *ctr = ctx->ctr;
84c91152
HX
99 struct {
100 be128 hash;
50d2e6dc 101 u8 iv[16];
84c91152 102
76c67394 103 struct crypto_wait wait;
84c91152
HX
104
105 struct scatterlist sg[1];
16f37ecd 106 struct skcipher_request req;
84c91152
HX
107 } *data;
108 int err;
28db8e3e 109
16f37ecd
HX
110 crypto_skcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
111 crypto_skcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
112 CRYPTO_TFM_REQ_MASK);
113 err = crypto_skcipher_setkey(ctr, key, keylen);
28db8e3e 114 if (err)
84c91152 115 return err;
28db8e3e 116
16f37ecd 117 data = kzalloc(sizeof(*data) + crypto_skcipher_reqsize(ctr),
84c91152
HX
118 GFP_KERNEL);
119 if (!data)
120 return -ENOMEM;
121
76c67394 122 crypto_init_wait(&data->wait);
84c91152 123 sg_init_one(data->sg, &data->hash, sizeof(data->hash));
16f37ecd
HX
124 skcipher_request_set_tfm(&data->req, ctr);
125 skcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP |
126 CRYPTO_TFM_REQ_MAY_BACKLOG,
76c67394
GBY
127 crypto_req_done,
128 &data->wait);
16f37ecd
HX
129 skcipher_request_set_crypt(&data->req, data->sg, data->sg,
130 sizeof(data->hash), data->iv);
131
76c67394
GBY
132 err = crypto_wait_req(crypto_skcipher_encrypt(&data->req),
133 &data->wait);
84c91152 134
28db8e3e
MH
135 if (err)
136 goto out;
137
9382d97a
HY
138 crypto_ahash_clear_flags(ghash, CRYPTO_TFM_REQ_MASK);
139 crypto_ahash_set_flags(ghash, crypto_aead_get_flags(aead) &
140 CRYPTO_TFM_REQ_MASK);
141 err = crypto_ahash_setkey(ghash, (u8 *)&data->hash, sizeof(be128));
84c91152 142out:
adcbc688 143 kzfree(data);
28db8e3e
MH
144 return err;
145}
146
dadbc53d
HX
147static int crypto_gcm_setauthsize(struct crypto_aead *tfm,
148 unsigned int authsize)
149{
65526f63 150 return crypto_gcm_check_authsize(authsize);
dadbc53d
HX
151}
152
adcbc688 153static void crypto_gcm_init_common(struct aead_request *req)
28db8e3e 154{
2589469d 155 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
84c91152 156 __be32 counter = cpu_to_be32(1);
adcbc688 157 struct scatterlist *sg;
84c91152
HX
158
159 memset(pctx->auth_tag, 0, sizeof(pctx->auth_tag));
e0ab7e9c
CL
160 memcpy(pctx->iv, req->iv, GCM_AES_IV_SIZE);
161 memcpy(pctx->iv + GCM_AES_IV_SIZE, &counter, 4);
84c91152 162
adcbc688 163 sg_init_table(pctx->src, 3);
84c91152 164 sg_set_buf(pctx->src, pctx->auth_tag, sizeof(pctx->auth_tag));
adcbc688
HX
165 sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen);
166 if (sg != pctx->src + 1)
c56f6d12 167 sg_chain(pctx->src, 2, sg);
84c91152 168
84c91152 169 if (req->src != req->dst) {
adcbc688 170 sg_init_table(pctx->dst, 3);
84c91152 171 sg_set_buf(pctx->dst, pctx->auth_tag, sizeof(pctx->auth_tag));
adcbc688
HX
172 sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen);
173 if (sg != pctx->dst + 1)
c56f6d12 174 sg_chain(pctx->dst, 2, sg);
84c91152 175 }
adcbc688
HX
176}
177
178static void crypto_gcm_init_crypt(struct aead_request *req,
179 unsigned int cryptlen)
180{
181 struct crypto_aead *aead = crypto_aead_reqtfm(req);
182 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
183 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
16f37ecd 184 struct skcipher_request *skreq = &pctx->u.skreq;
adcbc688
HX
185 struct scatterlist *dst;
186
187 dst = req->src == req->dst ? pctx->src : pctx->dst;
28db8e3e 188
16f37ecd
HX
189 skcipher_request_set_tfm(skreq, ctx->ctr);
190 skcipher_request_set_crypt(skreq, pctx->src, dst,
84c91152 191 cryptlen + sizeof(pctx->auth_tag),
adcbc688 192 pctx->iv);
9382d97a
HY
193}
194
195static inline unsigned int gcm_remain(unsigned int len)
196{
197 len &= 0xfU;
198 return len ? 16 - len : 0;
199}
200
201static void gcm_hash_len_done(struct crypto_async_request *areq, int err);
28db8e3e 202
9382d97a 203static int gcm_hash_update(struct aead_request *req,
3e3dc25f 204 crypto_completion_t compl,
9382d97a 205 struct scatterlist *src,
adcbc688 206 unsigned int len, u32 flags)
9382d97a 207{
adcbc688 208 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
9382d97a 209 struct ahash_request *ahreq = &pctx->u.ahreq;
28db8e3e 210
adcbc688 211 ahash_request_set_callback(ahreq, flags, compl, req);
9382d97a
HY
212 ahash_request_set_crypt(ahreq, src, NULL, len);
213
214 return crypto_ahash_update(ahreq);
28db8e3e
MH
215}
216
9382d97a 217static int gcm_hash_remain(struct aead_request *req,
9382d97a 218 unsigned int remain,
adcbc688 219 crypto_completion_t compl, u32 flags)
28db8e3e 220{
adcbc688 221 return gcm_hash_update(req, compl, &gcm_zeroes->sg, remain, flags);
9382d97a
HY
222}
223
adcbc688 224static int gcm_hash_len(struct aead_request *req, u32 flags)
9382d97a 225{
adcbc688 226 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
9382d97a
HY
227 struct ahash_request *ahreq = &pctx->u.ahreq;
228 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
18666550 229 be128 lengths;
9382d97a
HY
230
231 lengths.a = cpu_to_be64(req->assoclen * 8);
232 lengths.b = cpu_to_be64(gctx->cryptlen * 8);
233 memcpy(pctx->iauth_tag, &lengths, 16);
adcbc688
HX
234 sg_init_one(&pctx->sg, pctx->iauth_tag, 16);
235 ahash_request_set_callback(ahreq, flags, gcm_hash_len_done, req);
236 ahash_request_set_crypt(ahreq, &pctx->sg,
237 pctx->iauth_tag, sizeof(lengths));
9382d97a 238
adcbc688 239 return crypto_ahash_finup(ahreq);
9382d97a
HY
240}
241
adcbc688 242static int gcm_hash_len_continue(struct aead_request *req, u32 flags)
9382d97a 243{
2589469d 244 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
9382d97a
HY
245 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
246
adcbc688 247 return gctx->complete(req, flags);
9382d97a
HY
248}
249
adcbc688 250static void gcm_hash_len_done(struct crypto_async_request *areq, int err)
9382d97a
HY
251{
252 struct aead_request *req = areq->data;
62c5593a 253
adcbc688
HX
254 if (err)
255 goto out;
9382d97a 256
adcbc688
HX
257 err = gcm_hash_len_continue(req, 0);
258 if (err == -EINPROGRESS)
259 return;
62c5593a 260
adcbc688
HX
261out:
262 aead_request_complete(req, err);
62c5593a
HY
263}
264
adcbc688 265static int gcm_hash_crypt_remain_continue(struct aead_request *req, u32 flags)
62c5593a 266{
adcbc688
HX
267 return gcm_hash_len(req, flags) ?:
268 gcm_hash_len_continue(req, flags);
9382d97a
HY
269}
270
62c5593a
HY
271static void gcm_hash_crypt_remain_done(struct crypto_async_request *areq,
272 int err)
9382d97a
HY
273{
274 struct aead_request *req = areq->data;
62c5593a 275
adcbc688
HX
276 if (err)
277 goto out;
278
279 err = gcm_hash_crypt_remain_continue(req, 0);
280 if (err == -EINPROGRESS)
281 return;
282
283out:
284 aead_request_complete(req, err);
62c5593a
HY
285}
286
adcbc688 287static int gcm_hash_crypt_continue(struct aead_request *req, u32 flags)
62c5593a 288{
9382d97a
HY
289 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
290 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
291 unsigned int remain;
292
adcbc688
HX
293 remain = gcm_remain(gctx->cryptlen);
294 if (remain)
295 return gcm_hash_remain(req, remain,
296 gcm_hash_crypt_remain_done, flags) ?:
297 gcm_hash_crypt_remain_continue(req, flags);
9382d97a 298
adcbc688 299 return gcm_hash_crypt_remain_continue(req, flags);
9382d97a
HY
300}
301
62c5593a 302static void gcm_hash_crypt_done(struct crypto_async_request *areq, int err)
9382d97a
HY
303{
304 struct aead_request *req = areq->data;
62c5593a 305
adcbc688
HX
306 if (err)
307 goto out;
308
309 err = gcm_hash_crypt_continue(req, 0);
310 if (err == -EINPROGRESS)
311 return;
312
313out:
314 aead_request_complete(req, err);
62c5593a
HY
315}
316
adcbc688 317static int gcm_hash_assoc_remain_continue(struct aead_request *req, u32 flags)
62c5593a 318{
9382d97a
HY
319 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
320 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
9382d97a 321
adcbc688
HX
322 if (gctx->cryptlen)
323 return gcm_hash_update(req, gcm_hash_crypt_done,
324 gctx->src, gctx->cryptlen, flags) ?:
325 gcm_hash_crypt_continue(req, flags);
326
327 return gcm_hash_crypt_remain_continue(req, flags);
9382d97a
HY
328}
329
62c5593a
HY
330static void gcm_hash_assoc_remain_done(struct crypto_async_request *areq,
331 int err)
9382d97a
HY
332{
333 struct aead_request *req = areq->data;
62c5593a 334
adcbc688
HX
335 if (err)
336 goto out;
337
338 err = gcm_hash_assoc_remain_continue(req, 0);
339 if (err == -EINPROGRESS)
340 return;
341
342out:
343 aead_request_complete(req, err);
62c5593a
HY
344}
345
adcbc688 346static int gcm_hash_assoc_continue(struct aead_request *req, u32 flags)
62c5593a 347{
9382d97a
HY
348 unsigned int remain;
349
adcbc688
HX
350 remain = gcm_remain(req->assoclen);
351 if (remain)
352 return gcm_hash_remain(req, remain,
353 gcm_hash_assoc_remain_done, flags) ?:
354 gcm_hash_assoc_remain_continue(req, flags);
9382d97a 355
adcbc688 356 return gcm_hash_assoc_remain_continue(req, flags);
9382d97a
HY
357}
358
62c5593a 359static void gcm_hash_assoc_done(struct crypto_async_request *areq, int err)
9382d97a
HY
360{
361 struct aead_request *req = areq->data;
62c5593a 362
adcbc688
HX
363 if (err)
364 goto out;
365
366 err = gcm_hash_assoc_continue(req, 0);
367 if (err == -EINPROGRESS)
368 return;
369
370out:
371 aead_request_complete(req, err);
62c5593a
HY
372}
373
adcbc688 374static int gcm_hash_init_continue(struct aead_request *req, u32 flags)
62c5593a 375{
adcbc688
HX
376 if (req->assoclen)
377 return gcm_hash_update(req, gcm_hash_assoc_done,
378 req->src, req->assoclen, flags) ?:
379 gcm_hash_assoc_continue(req, flags);
9382d97a 380
adcbc688 381 return gcm_hash_assoc_remain_continue(req, flags);
62c5593a
HY
382}
383
384static void gcm_hash_init_done(struct crypto_async_request *areq, int err)
385{
386 struct aead_request *req = areq->data;
387
adcbc688
HX
388 if (err)
389 goto out;
390
391 err = gcm_hash_init_continue(req, 0);
392 if (err == -EINPROGRESS)
393 return;
394
395out:
396 aead_request_complete(req, err);
9382d97a
HY
397}
398
adcbc688 399static int gcm_hash(struct aead_request *req, u32 flags)
9382d97a 400{
adcbc688 401 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
9382d97a 402 struct ahash_request *ahreq = &pctx->u.ahreq;
adcbc688 403 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
9382d97a
HY
404
405 ahash_request_set_tfm(ahreq, ctx->ghash);
406
adcbc688
HX
407 ahash_request_set_callback(ahreq, flags, gcm_hash_init_done, req);
408 return crypto_ahash_init(ahreq) ?:
409 gcm_hash_init_continue(req, flags);
9382d97a
HY
410}
411
adcbc688 412static int gcm_enc_copy_hash(struct aead_request *req, u32 flags)
9382d97a 413{
adcbc688 414 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
9382d97a
HY
415 struct crypto_aead *aead = crypto_aead_reqtfm(req);
416 u8 *auth_tag = pctx->auth_tag;
28db8e3e 417
adcbc688
HX
418 crypto_xor(auth_tag, pctx->iauth_tag, 16);
419 scatterwalk_map_and_copy(auth_tag, req->dst,
420 req->assoclen + req->cryptlen,
6160b289 421 crypto_aead_authsize(aead), 1);
adcbc688 422 return 0;
6160b289
HX
423}
424
adcbc688 425static int gcm_encrypt_continue(struct aead_request *req, u32 flags)
6160b289 426{
9382d97a 427 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
adcbc688 428 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
6160b289 429
adcbc688
HX
430 gctx->src = sg_next(req->src == req->dst ? pctx->src : pctx->dst);
431 gctx->cryptlen = req->cryptlen;
432 gctx->complete = gcm_enc_copy_hash;
6160b289 433
adcbc688 434 return gcm_hash(req, flags);
28db8e3e
MH
435}
436
62c5593a 437static void gcm_encrypt_done(struct crypto_async_request *areq, int err)
9382d97a
HY
438{
439 struct aead_request *req = areq->data;
9382d97a 440
adcbc688
HX
441 if (err)
442 goto out;
443
444 err = gcm_encrypt_continue(req, 0);
445 if (err == -EINPROGRESS)
446 return;
9382d97a 447
adcbc688 448out:
62c5593a 449 aead_request_complete(req, err);
9382d97a
HY
450}
451
28db8e3e
MH
452static int crypto_gcm_encrypt(struct aead_request *req)
453{
2589469d 454 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
16f37ecd 455 struct skcipher_request *skreq = &pctx->u.skreq;
adcbc688 456 u32 flags = aead_request_flags(req);
9382d97a 457
adcbc688
HX
458 crypto_gcm_init_common(req);
459 crypto_gcm_init_crypt(req, req->cryptlen);
16f37ecd 460 skcipher_request_set_callback(skreq, flags, gcm_encrypt_done, req);
9382d97a 461
16f37ecd 462 return crypto_skcipher_encrypt(skreq) ?:
adcbc688 463 gcm_encrypt_continue(req, flags);
28db8e3e
MH
464}
465
adcbc688 466static int crypto_gcm_verify(struct aead_request *req)
84c91152 467{
adcbc688 468 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
84c91152 469 struct crypto_aead *aead = crypto_aead_reqtfm(req);
84c91152
HX
470 u8 *auth_tag = pctx->auth_tag;
471 u8 *iauth_tag = pctx->iauth_tag;
472 unsigned int authsize = crypto_aead_authsize(aead);
473 unsigned int cryptlen = req->cryptlen - authsize;
474
9382d97a 475 crypto_xor(auth_tag, iauth_tag, 16);
adcbc688
HX
476 scatterwalk_map_and_copy(iauth_tag, req->src,
477 req->assoclen + cryptlen, authsize, 0);
6bf37e5a 478 return crypto_memneq(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0;
84c91152
HX
479}
480
9382d97a 481static void gcm_decrypt_done(struct crypto_async_request *areq, int err)
28db8e3e 482{
84c91152
HX
483 struct aead_request *req = areq->data;
484
485 if (!err)
adcbc688 486 err = crypto_gcm_verify(req);
84c91152
HX
487
488 aead_request_complete(req, err);
28db8e3e
MH
489}
490
adcbc688 491static int gcm_dec_hash_continue(struct aead_request *req, u32 flags)
9382d97a 492{
9382d97a 493 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
16f37ecd 494 struct skcipher_request *skreq = &pctx->u.skreq;
9382d97a
HY
495 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
496
adcbc688 497 crypto_gcm_init_crypt(req, gctx->cryptlen);
16f37ecd
HX
498 skcipher_request_set_callback(skreq, flags, gcm_decrypt_done, req);
499 return crypto_skcipher_decrypt(skreq) ?: crypto_gcm_verify(req);
9382d97a
HY
500}
501
28db8e3e
MH
502static int crypto_gcm_decrypt(struct aead_request *req)
503{
6160b289 504 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2589469d 505 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
9382d97a 506 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
6160b289 507 unsigned int authsize = crypto_aead_authsize(aead);
9382d97a 508 unsigned int cryptlen = req->cryptlen;
adcbc688 509 u32 flags = aead_request_flags(req);
28db8e3e 510
6160b289 511 cryptlen -= authsize;
28db8e3e 512
adcbc688 513 crypto_gcm_init_common(req);
28db8e3e 514
adcbc688
HX
515 gctx->src = sg_next(pctx->src);
516 gctx->cryptlen = cryptlen;
517 gctx->complete = gcm_dec_hash_continue;
28db8e3e 518
adcbc688 519 return gcm_hash(req, flags);
28db8e3e
MH
520}
521
adcbc688 522static int crypto_gcm_init_tfm(struct crypto_aead *tfm)
28db8e3e 523{
adcbc688
HX
524 struct aead_instance *inst = aead_alg_instance(tfm);
525 struct gcm_instance_ctx *ictx = aead_instance_ctx(inst);
526 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(tfm);
16f37ecd 527 struct crypto_skcipher *ctr;
9382d97a 528 struct crypto_ahash *ghash;
28db8e3e
MH
529 unsigned long align;
530 int err;
531
9382d97a
HY
532 ghash = crypto_spawn_ahash(&ictx->ghash);
533 if (IS_ERR(ghash))
534 return PTR_ERR(ghash);
535
60425a8b 536 ctr = crypto_spawn_skcipher(&ictx->ctr);
28db8e3e
MH
537 err = PTR_ERR(ctr);
538 if (IS_ERR(ctr))
9382d97a 539 goto err_free_hash;
28db8e3e
MH
540
541 ctx->ctr = ctr;
9382d97a 542 ctx->ghash = ghash;
28db8e3e 543
adcbc688 544 align = crypto_aead_alignmask(tfm);
28db8e3e 545 align &= ~(crypto_tfm_ctx_alignment() - 1);
adcbc688 546 crypto_aead_set_reqsize(tfm,
5d72336f 547 align + offsetof(struct crypto_gcm_req_priv_ctx, u) +
16f37ecd
HX
548 max(sizeof(struct skcipher_request) +
549 crypto_skcipher_reqsize(ctr),
9382d97a 550 sizeof(struct ahash_request) +
5d72336f 551 crypto_ahash_reqsize(ghash)));
28db8e3e
MH
552
553 return 0;
9382d97a
HY
554
555err_free_hash:
556 crypto_free_ahash(ghash);
557 return err;
28db8e3e
MH
558}
559
adcbc688 560static void crypto_gcm_exit_tfm(struct crypto_aead *tfm)
28db8e3e 561{
adcbc688 562 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(tfm);
28db8e3e 563
9382d97a 564 crypto_free_ahash(ctx->ghash);
16f37ecd 565 crypto_free_skcipher(ctx->ctr);
28db8e3e
MH
566}
567
7b05a373
HX
568static void crypto_gcm_free(struct aead_instance *inst)
569{
570 struct gcm_instance_ctx *ctx = aead_instance_ctx(inst);
571
572 crypto_drop_skcipher(&ctx->ctr);
573 crypto_drop_ahash(&ctx->ghash);
574 kfree(inst);
575}
576
adcbc688
HX
577static int crypto_gcm_create_common(struct crypto_template *tmpl,
578 struct rtattr **tb,
adcbc688
HX
579 const char *ctr_name,
580 const char *ghash_name)
28db8e3e 581{
d00aa19b 582 struct crypto_attr_type *algt;
adcbc688 583 struct aead_instance *inst;
16f37ecd 584 struct skcipher_alg *ctr;
9382d97a 585 struct crypto_alg *ghash_alg;
adcbc688 586 struct hash_alg_common *ghash;
28db8e3e
MH
587 struct gcm_instance_ctx *ctx;
588 int err;
28db8e3e 589
d00aa19b 590 algt = crypto_get_attr_type(tb);
d00aa19b 591 if (IS_ERR(algt))
adcbc688 592 return PTR_ERR(algt);
28db8e3e 593
5e4b8c1f 594 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
adcbc688 595 return -EINVAL;
28db8e3e 596
9382d97a
HY
597 ghash_alg = crypto_find_alg(ghash_name, &crypto_ahash_type,
598 CRYPTO_ALG_TYPE_HASH,
b30bdfa8
HX
599 CRYPTO_ALG_TYPE_AHASH_MASK |
600 crypto_requires_sync(algt->type,
601 algt->mask));
9382d97a 602 if (IS_ERR(ghash_alg))
adcbc688
HX
603 return PTR_ERR(ghash_alg);
604
605 ghash = __crypto_hash_alg_common(ghash_alg);
9382d97a
HY
606
607 err = -ENOMEM;
1472e5eb
HX
608 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
609 if (!inst)
9382d97a 610 goto out_put_ghash;
28db8e3e 611
adcbc688
HX
612 ctx = aead_instance_ctx(inst);
613 err = crypto_init_ahash_spawn(&ctx->ghash, ghash,
614 aead_crypto_instance(inst));
9382d97a
HY
615 if (err)
616 goto err_free_inst;
617
adcbc688 618 err = -EINVAL;
f699594d
EB
619 if (strcmp(ghash->base.cra_name, "ghash") != 0 ||
620 ghash->digestsize != 16)
adcbc688
HX
621 goto err_drop_ghash;
622
623 crypto_set_skcipher_spawn(&ctx->ctr, aead_crypto_instance(inst));
a35528ec
EB
624 err = crypto_grab_skcipher(&ctx->ctr, ctr_name, 0,
625 crypto_requires_sync(algt->type,
626 algt->mask));
1472e5eb 627 if (err)
9382d97a 628 goto err_drop_ghash;
1472e5eb 629
16f37ecd 630 ctr = crypto_spawn_skcipher_alg(&ctx->ctr);
28db8e3e 631
f699594d 632 /* The skcipher algorithm must be CTR mode, using 16-byte blocks. */
9b40f79c 633 err = -EINVAL;
f699594d
EB
634 if (strncmp(ctr->base.cra_name, "ctr(", 4) != 0 ||
635 crypto_skcipher_alg_ivsize(ctr) != 16 ||
636 ctr->base.cra_blocksize != 1)
d00aa19b
HX
637 goto out_put_ctr;
638
f699594d
EB
639 err = -ENAMETOOLONG;
640 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
641 "gcm(%s", ctr->base.cra_name + 4) >= CRYPTO_MAX_ALG_NAME)
28db8e3e
MH
642 goto out_put_ctr;
643
adcbc688 644 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
16f37ecd 645 "gcm_base(%s,%s)", ctr->base.cra_driver_name,
9382d97a 646 ghash_alg->cra_driver_name) >=
d00aa19b 647 CRYPTO_MAX_ALG_NAME)
1472e5eb 648 goto out_put_ctr;
28db8e3e 649
16f37ecd
HX
650 inst->alg.base.cra_flags = (ghash->base.cra_flags |
651 ctr->base.cra_flags) & CRYPTO_ALG_ASYNC;
adcbc688 652 inst->alg.base.cra_priority = (ghash->base.cra_priority +
16f37ecd 653 ctr->base.cra_priority) / 2;
adcbc688
HX
654 inst->alg.base.cra_blocksize = 1;
655 inst->alg.base.cra_alignmask = ghash->base.cra_alignmask |
16f37ecd 656 ctr->base.cra_alignmask;
adcbc688 657 inst->alg.base.cra_ctxsize = sizeof(struct crypto_gcm_ctx);
e0ab7e9c 658 inst->alg.ivsize = GCM_AES_IV_SIZE;
16f37ecd 659 inst->alg.chunksize = crypto_skcipher_alg_chunksize(ctr);
adcbc688
HX
660 inst->alg.maxauthsize = 16;
661 inst->alg.init = crypto_gcm_init_tfm;
662 inst->alg.exit = crypto_gcm_exit_tfm;
663 inst->alg.setkey = crypto_gcm_setkey;
664 inst->alg.setauthsize = crypto_gcm_setauthsize;
665 inst->alg.encrypt = crypto_gcm_encrypt;
666 inst->alg.decrypt = crypto_gcm_decrypt;
667
7b05a373
HX
668 inst->free = crypto_gcm_free;
669
adcbc688
HX
670 err = aead_register_instance(tmpl, inst);
671 if (err)
672 goto out_put_ctr;
28db8e3e 673
adcbc688 674out_put_ghash:
9382d97a 675 crypto_mod_put(ghash_alg);
adcbc688 676 return err;
1472e5eb
HX
677
678out_put_ctr:
679 crypto_drop_skcipher(&ctx->ctr);
9382d97a
HY
680err_drop_ghash:
681 crypto_drop_ahash(&ctx->ghash);
28db8e3e
MH
682err_free_inst:
683 kfree(inst);
adcbc688 684 goto out_put_ghash;
28db8e3e
MH
685}
686
adcbc688 687static int crypto_gcm_create(struct crypto_template *tmpl, struct rtattr **tb)
d00aa19b 688{
d00aa19b
HX
689 const char *cipher_name;
690 char ctr_name[CRYPTO_MAX_ALG_NAME];
d00aa19b
HX
691
692 cipher_name = crypto_attr_alg_name(tb[1]);
d00aa19b 693 if (IS_ERR(cipher_name))
adcbc688 694 return PTR_ERR(cipher_name);
d00aa19b
HX
695
696 if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", cipher_name) >=
697 CRYPTO_MAX_ALG_NAME)
adcbc688 698 return -ENAMETOOLONG;
d00aa19b 699
f699594d 700 return crypto_gcm_create_common(tmpl, tb, ctr_name, "ghash");
d00aa19b
HX
701}
702
adcbc688
HX
703static int crypto_gcm_base_create(struct crypto_template *tmpl,
704 struct rtattr **tb)
d00aa19b 705{
d00aa19b 706 const char *ctr_name;
9382d97a 707 const char *ghash_name;
d00aa19b
HX
708
709 ctr_name = crypto_attr_alg_name(tb[1]);
d00aa19b 710 if (IS_ERR(ctr_name))
adcbc688 711 return PTR_ERR(ctr_name);
d00aa19b 712
9382d97a 713 ghash_name = crypto_attr_alg_name(tb[2]);
9382d97a 714 if (IS_ERR(ghash_name))
adcbc688 715 return PTR_ERR(ghash_name);
9382d97a 716
f699594d 717 return crypto_gcm_create_common(tmpl, tb, ctr_name, ghash_name);
d00aa19b
HX
718}
719
dadbc53d
HX
720static int crypto_rfc4106_setkey(struct crypto_aead *parent, const u8 *key,
721 unsigned int keylen)
722{
723 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent);
724 struct crypto_aead *child = ctx->child;
dadbc53d
HX
725
726 if (keylen < 4)
727 return -EINVAL;
728
729 keylen -= 4;
730 memcpy(ctx->nonce, key + keylen, 4);
731
732 crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
733 crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
734 CRYPTO_TFM_REQ_MASK);
af5034e8 735 return crypto_aead_setkey(child, key, keylen);
dadbc53d
HX
736}
737
738static int crypto_rfc4106_setauthsize(struct crypto_aead *parent,
739 unsigned int authsize)
740{
741 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent);
65526f63 742 int err;
dadbc53d 743
65526f63
IP
744 err = crypto_rfc4106_check_authsize(authsize);
745 if (err)
746 return err;
dadbc53d
HX
747
748 return crypto_aead_setauthsize(ctx->child, authsize);
749}
750
751static struct aead_request *crypto_rfc4106_crypt(struct aead_request *req)
752{
7b05a373 753 struct crypto_rfc4106_req_ctx *rctx = aead_request_ctx(req);
dadbc53d
HX
754 struct crypto_aead *aead = crypto_aead_reqtfm(req);
755 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(aead);
7b05a373 756 struct aead_request *subreq = &rctx->subreq;
dadbc53d 757 struct crypto_aead *child = ctx->child;
7b05a373 758 struct scatterlist *sg;
dadbc53d
HX
759 u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
760 crypto_aead_alignmask(child) + 1);
761
e0ab7e9c 762 scatterwalk_map_and_copy(iv + GCM_AES_IV_SIZE, req->src, 0, req->assoclen - 8, 0);
7b05a373 763
dadbc53d
HX
764 memcpy(iv, ctx->nonce, 4);
765 memcpy(iv + 4, req->iv, 8);
766
7b05a373 767 sg_init_table(rctx->src, 3);
e0ab7e9c 768 sg_set_buf(rctx->src, iv + GCM_AES_IV_SIZE, req->assoclen - 8);
7b05a373
HX
769 sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen);
770 if (sg != rctx->src + 1)
771 sg_chain(rctx->src, 2, sg);
772
773 if (req->src != req->dst) {
774 sg_init_table(rctx->dst, 3);
e0ab7e9c 775 sg_set_buf(rctx->dst, iv + GCM_AES_IV_SIZE, req->assoclen - 8);
7b05a373
HX
776 sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen);
777 if (sg != rctx->dst + 1)
778 sg_chain(rctx->dst, 2, sg);
779 }
780
dadbc53d
HX
781 aead_request_set_tfm(subreq, child);
782 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
783 req->base.data);
7b05a373
HX
784 aead_request_set_crypt(subreq, rctx->src,
785 req->src == req->dst ? rctx->src : rctx->dst,
786 req->cryptlen, iv);
787 aead_request_set_ad(subreq, req->assoclen - 8);
dadbc53d
HX
788
789 return subreq;
790}
791
792static int crypto_rfc4106_encrypt(struct aead_request *req)
793{
65526f63
IP
794 int err;
795
796 err = crypto_ipsec_check_assoclen(req->assoclen);
797 if (err)
798 return err;
7b05a373 799
dadbc53d
HX
800 req = crypto_rfc4106_crypt(req);
801
802 return crypto_aead_encrypt(req);
803}
804
805static int crypto_rfc4106_decrypt(struct aead_request *req)
806{
65526f63
IP
807 int err;
808
809 err = crypto_ipsec_check_assoclen(req->assoclen);
810 if (err)
811 return err;
7b05a373 812
dadbc53d
HX
813 req = crypto_rfc4106_crypt(req);
814
815 return crypto_aead_decrypt(req);
816}
817
adcbc688 818static int crypto_rfc4106_init_tfm(struct crypto_aead *tfm)
dadbc53d 819{
adcbc688
HX
820 struct aead_instance *inst = aead_alg_instance(tfm);
821 struct crypto_aead_spawn *spawn = aead_instance_ctx(inst);
822 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(tfm);
dadbc53d
HX
823 struct crypto_aead *aead;
824 unsigned long align;
825
826 aead = crypto_spawn_aead(spawn);
827 if (IS_ERR(aead))
828 return PTR_ERR(aead);
829
830 ctx->child = aead;
831
832 align = crypto_aead_alignmask(aead);
833 align &= ~(crypto_tfm_ctx_alignment() - 1);
adcbc688
HX
834 crypto_aead_set_reqsize(
835 tfm,
7b05a373 836 sizeof(struct crypto_rfc4106_req_ctx) +
5d72336f 837 ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) +
7b05a373 838 align + 24);
dadbc53d
HX
839
840 return 0;
841}
842
adcbc688 843static void crypto_rfc4106_exit_tfm(struct crypto_aead *tfm)
dadbc53d 844{
adcbc688 845 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(tfm);
dadbc53d
HX
846
847 crypto_free_aead(ctx->child);
848}
849
7b05a373
HX
850static void crypto_rfc4106_free(struct aead_instance *inst)
851{
852 crypto_drop_aead(aead_instance_ctx(inst));
853 kfree(inst);
854}
855
adcbc688
HX
856static int crypto_rfc4106_create(struct crypto_template *tmpl,
857 struct rtattr **tb)
dadbc53d
HX
858{
859 struct crypto_attr_type *algt;
adcbc688 860 struct aead_instance *inst;
dadbc53d 861 struct crypto_aead_spawn *spawn;
adcbc688 862 struct aead_alg *alg;
dadbc53d
HX
863 const char *ccm_name;
864 int err;
865
866 algt = crypto_get_attr_type(tb);
dadbc53d 867 if (IS_ERR(algt))
adcbc688 868 return PTR_ERR(algt);
dadbc53d 869
5e4b8c1f 870 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
adcbc688 871 return -EINVAL;
dadbc53d
HX
872
873 ccm_name = crypto_attr_alg_name(tb[1]);
dadbc53d 874 if (IS_ERR(ccm_name))
adcbc688 875 return PTR_ERR(ccm_name);
dadbc53d
HX
876
877 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
878 if (!inst)
adcbc688 879 return -ENOMEM;
dadbc53d 880
adcbc688
HX
881 spawn = aead_instance_ctx(inst);
882 crypto_set_aead_spawn(spawn, aead_crypto_instance(inst));
dadbc53d
HX
883 err = crypto_grab_aead(spawn, ccm_name, 0,
884 crypto_requires_sync(algt->type, algt->mask));
885 if (err)
886 goto out_free_inst;
887
adcbc688 888 alg = crypto_spawn_aead_alg(spawn);
dadbc53d
HX
889
890 err = -EINVAL;
891
adcbc688 892 /* Underlying IV size must be 12. */
e0ab7e9c 893 if (crypto_aead_alg_ivsize(alg) != GCM_AES_IV_SIZE)
dadbc53d
HX
894 goto out_drop_alg;
895
896 /* Not a stream cipher? */
adcbc688 897 if (alg->base.cra_blocksize != 1)
dadbc53d
HX
898 goto out_drop_alg;
899
900 err = -ENAMETOOLONG;
adcbc688
HX
901 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
902 "rfc4106(%s)", alg->base.cra_name) >=
903 CRYPTO_MAX_ALG_NAME ||
904 snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
905 "rfc4106(%s)", alg->base.cra_driver_name) >=
dadbc53d
HX
906 CRYPTO_MAX_ALG_NAME)
907 goto out_drop_alg;
908
7b05a373 909 inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
adcbc688
HX
910 inst->alg.base.cra_priority = alg->base.cra_priority;
911 inst->alg.base.cra_blocksize = 1;
912 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
dadbc53d 913
adcbc688 914 inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4106_ctx);
dadbc53d 915
e0ab7e9c 916 inst->alg.ivsize = GCM_RFC4106_IV_SIZE;
16f37ecd 917 inst->alg.chunksize = crypto_aead_alg_chunksize(alg);
adcbc688 918 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
dadbc53d 919
adcbc688
HX
920 inst->alg.init = crypto_rfc4106_init_tfm;
921 inst->alg.exit = crypto_rfc4106_exit_tfm;
dadbc53d 922
adcbc688
HX
923 inst->alg.setkey = crypto_rfc4106_setkey;
924 inst->alg.setauthsize = crypto_rfc4106_setauthsize;
925 inst->alg.encrypt = crypto_rfc4106_encrypt;
926 inst->alg.decrypt = crypto_rfc4106_decrypt;
dadbc53d 927
7b05a373
HX
928 inst->free = crypto_rfc4106_free;
929
adcbc688
HX
930 err = aead_register_instance(tmpl, inst);
931 if (err)
932 goto out_drop_alg;
dadbc53d
HX
933
934out:
adcbc688 935 return err;
dadbc53d
HX
936
937out_drop_alg:
938 crypto_drop_aead(spawn);
939out_free_inst:
940 kfree(inst);
dadbc53d
HX
941 goto out;
942}
943
73c89c15
TB
944static int crypto_rfc4543_setkey(struct crypto_aead *parent, const u8 *key,
945 unsigned int keylen)
946{
947 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(parent);
948 struct crypto_aead *child = ctx->child;
73c89c15
TB
949
950 if (keylen < 4)
951 return -EINVAL;
952
953 keylen -= 4;
954 memcpy(ctx->nonce, key + keylen, 4);
955
956 crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
957 crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
958 CRYPTO_TFM_REQ_MASK);
af5034e8 959 return crypto_aead_setkey(child, key, keylen);
73c89c15
TB
960}
961
962static int crypto_rfc4543_setauthsize(struct crypto_aead *parent,
963 unsigned int authsize)
964{
965 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(parent);
966
967 if (authsize != 16)
968 return -EINVAL;
969
970 return crypto_aead_setauthsize(ctx->child, authsize);
971}
972
adcbc688 973static int crypto_rfc4543_crypt(struct aead_request *req, bool enc)
73c89c15
TB
974{
975 struct crypto_aead *aead = crypto_aead_reqtfm(req);
976 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(aead);
adcbc688 977 struct crypto_rfc4543_req_ctx *rctx = aead_request_ctx(req);
73c89c15 978 struct aead_request *subreq = &rctx->subreq;
73c89c15 979 unsigned int authsize = crypto_aead_authsize(aead);
73c89c15
TB
980 u8 *iv = PTR_ALIGN((u8 *)(rctx + 1) + crypto_aead_reqsize(ctx->child),
981 crypto_aead_alignmask(ctx->child) + 1);
adcbc688
HX
982 int err;
983
984 if (req->src != req->dst) {
985 err = crypto_rfc4543_copy_src_to_dst(req, enc);
986 if (err)
987 return err;
988 }
73c89c15
TB
989
990 memcpy(iv, ctx->nonce, 4);
991 memcpy(iv + 4, req->iv, 8);
992
73c89c15 993 aead_request_set_tfm(subreq, ctx->child);
adcbc688
HX
994 aead_request_set_callback(subreq, req->base.flags,
995 req->base.complete, req->base.data);
996 aead_request_set_crypt(subreq, req->src, req->dst,
997 enc ? 0 : authsize, iv);
998 aead_request_set_ad(subreq, req->assoclen + req->cryptlen -
999 subreq->cryptlen);
1000
1001 return enc ? crypto_aead_encrypt(subreq) : crypto_aead_decrypt(subreq);
73c89c15
TB
1002}
1003
9489667d
JK
1004static int crypto_rfc4543_copy_src_to_dst(struct aead_request *req, bool enc)
1005{
1006 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1007 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(aead);
1008 unsigned int authsize = crypto_aead_authsize(aead);
adcbc688
HX
1009 unsigned int nbytes = req->assoclen + req->cryptlen -
1010 (enc ? 0 : authsize);
8d605398 1011 SYNC_SKCIPHER_REQUEST_ON_STACK(nreq, ctx->null);
16f37ecd 1012
8d605398 1013 skcipher_request_set_sync_tfm(nreq, ctx->null);
16f37ecd
HX
1014 skcipher_request_set_callback(nreq, req->base.flags, NULL, NULL);
1015 skcipher_request_set_crypt(nreq, req->src, req->dst, nbytes, NULL);
9489667d 1016
16f37ecd 1017 return crypto_skcipher_encrypt(nreq);
9489667d
JK
1018}
1019
73c89c15
TB
1020static int crypto_rfc4543_encrypt(struct aead_request *req)
1021{
74bf81d0
IP
1022 return crypto_ipsec_check_assoclen(req->assoclen) ?:
1023 crypto_rfc4543_crypt(req, true);
73c89c15
TB
1024}
1025
1026static int crypto_rfc4543_decrypt(struct aead_request *req)
1027{
74bf81d0
IP
1028 return crypto_ipsec_check_assoclen(req->assoclen) ?:
1029 crypto_rfc4543_crypt(req, false);
73c89c15
TB
1030}
1031
adcbc688 1032static int crypto_rfc4543_init_tfm(struct crypto_aead *tfm)
73c89c15 1033{
adcbc688
HX
1034 struct aead_instance *inst = aead_alg_instance(tfm);
1035 struct crypto_rfc4543_instance_ctx *ictx = aead_instance_ctx(inst);
9489667d 1036 struct crypto_aead_spawn *spawn = &ictx->aead;
adcbc688 1037 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(tfm);
73c89c15 1038 struct crypto_aead *aead;
8d605398 1039 struct crypto_sync_skcipher *null;
73c89c15 1040 unsigned long align;
9489667d 1041 int err = 0;
73c89c15
TB
1042
1043 aead = crypto_spawn_aead(spawn);
1044 if (IS_ERR(aead))
1045 return PTR_ERR(aead);
1046
3a2d4fb5 1047 null = crypto_get_default_null_skcipher();
9489667d
JK
1048 err = PTR_ERR(null);
1049 if (IS_ERR(null))
1050 goto err_free_aead;
1051
73c89c15 1052 ctx->child = aead;
9489667d 1053 ctx->null = null;
73c89c15
TB
1054
1055 align = crypto_aead_alignmask(aead);
1056 align &= ~(crypto_tfm_ctx_alignment() - 1);
adcbc688
HX
1057 crypto_aead_set_reqsize(
1058 tfm,
5d72336f
HX
1059 sizeof(struct crypto_rfc4543_req_ctx) +
1060 ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) +
e0ab7e9c 1061 align + GCM_AES_IV_SIZE);
73c89c15
TB
1062
1063 return 0;
9489667d
JK
1064
1065err_free_aead:
1066 crypto_free_aead(aead);
1067 return err;
73c89c15
TB
1068}
1069
adcbc688 1070static void crypto_rfc4543_exit_tfm(struct crypto_aead *tfm)
73c89c15 1071{
adcbc688 1072 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(tfm);
73c89c15
TB
1073
1074 crypto_free_aead(ctx->child);
3a2d4fb5 1075 crypto_put_default_null_skcipher();
73c89c15
TB
1076}
1077
7b05a373
HX
1078static void crypto_rfc4543_free(struct aead_instance *inst)
1079{
1080 struct crypto_rfc4543_instance_ctx *ctx = aead_instance_ctx(inst);
1081
1082 crypto_drop_aead(&ctx->aead);
1083
1084 kfree(inst);
1085}
1086
adcbc688
HX
1087static int crypto_rfc4543_create(struct crypto_template *tmpl,
1088 struct rtattr **tb)
73c89c15
TB
1089{
1090 struct crypto_attr_type *algt;
adcbc688 1091 struct aead_instance *inst;
73c89c15 1092 struct crypto_aead_spawn *spawn;
adcbc688 1093 struct aead_alg *alg;
9489667d 1094 struct crypto_rfc4543_instance_ctx *ctx;
73c89c15
TB
1095 const char *ccm_name;
1096 int err;
1097
1098 algt = crypto_get_attr_type(tb);
73c89c15 1099 if (IS_ERR(algt))
adcbc688 1100 return PTR_ERR(algt);
73c89c15 1101
5e4b8c1f 1102 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
adcbc688 1103 return -EINVAL;
73c89c15
TB
1104
1105 ccm_name = crypto_attr_alg_name(tb[1]);
73c89c15 1106 if (IS_ERR(ccm_name))
adcbc688 1107 return PTR_ERR(ccm_name);
73c89c15 1108
9489667d 1109 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
73c89c15 1110 if (!inst)
adcbc688 1111 return -ENOMEM;
73c89c15 1112
adcbc688 1113 ctx = aead_instance_ctx(inst);
9489667d 1114 spawn = &ctx->aead;
adcbc688 1115 crypto_set_aead_spawn(spawn, aead_crypto_instance(inst));
73c89c15
TB
1116 err = crypto_grab_aead(spawn, ccm_name, 0,
1117 crypto_requires_sync(algt->type, algt->mask));
1118 if (err)
1119 goto out_free_inst;
1120
adcbc688 1121 alg = crypto_spawn_aead_alg(spawn);
73c89c15
TB
1122
1123 err = -EINVAL;
1124
adcbc688 1125 /* Underlying IV size must be 12. */
e0ab7e9c 1126 if (crypto_aead_alg_ivsize(alg) != GCM_AES_IV_SIZE)
17db8546 1127 goto out_drop_alg;
73c89c15
TB
1128
1129 /* Not a stream cipher? */
adcbc688 1130 if (alg->base.cra_blocksize != 1)
17db8546 1131 goto out_drop_alg;
73c89c15
TB
1132
1133 err = -ENAMETOOLONG;
adcbc688
HX
1134 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
1135 "rfc4543(%s)", alg->base.cra_name) >=
1136 CRYPTO_MAX_ALG_NAME ||
1137 snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
1138 "rfc4543(%s)", alg->base.cra_driver_name) >=
73c89c15 1139 CRYPTO_MAX_ALG_NAME)
17db8546 1140 goto out_drop_alg;
73c89c15 1141
adcbc688
HX
1142 inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
1143 inst->alg.base.cra_priority = alg->base.cra_priority;
1144 inst->alg.base.cra_blocksize = 1;
1145 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
73c89c15 1146
adcbc688 1147 inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4543_ctx);
73c89c15 1148
e0ab7e9c 1149 inst->alg.ivsize = GCM_RFC4543_IV_SIZE;
16f37ecd 1150 inst->alg.chunksize = crypto_aead_alg_chunksize(alg);
adcbc688 1151 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
73c89c15 1152
adcbc688
HX
1153 inst->alg.init = crypto_rfc4543_init_tfm;
1154 inst->alg.exit = crypto_rfc4543_exit_tfm;
73c89c15 1155
adcbc688
HX
1156 inst->alg.setkey = crypto_rfc4543_setkey;
1157 inst->alg.setauthsize = crypto_rfc4543_setauthsize;
1158 inst->alg.encrypt = crypto_rfc4543_encrypt;
1159 inst->alg.decrypt = crypto_rfc4543_decrypt;
73c89c15 1160
7b05a373
HX
1161 inst->free = crypto_rfc4543_free,
1162
adcbc688
HX
1163 err = aead_register_instance(tmpl, inst);
1164 if (err)
1165 goto out_drop_alg;
73c89c15
TB
1166
1167out:
adcbc688 1168 return err;
73c89c15
TB
1169
1170out_drop_alg:
1171 crypto_drop_aead(spawn);
1172out_free_inst:
1173 kfree(inst);
73c89c15
TB
1174 goto out;
1175}
1176
56a00d9d
XW
1177static struct crypto_template crypto_gcm_tmpls[] = {
1178 {
1179 .name = "gcm_base",
1180 .create = crypto_gcm_base_create,
1181 .module = THIS_MODULE,
1182 }, {
1183 .name = "gcm",
1184 .create = crypto_gcm_create,
1185 .module = THIS_MODULE,
1186 }, {
1187 .name = "rfc4106",
1188 .create = crypto_rfc4106_create,
1189 .module = THIS_MODULE,
1190 }, {
1191 .name = "rfc4543",
1192 .create = crypto_rfc4543_create,
1193 .module = THIS_MODULE,
1194 },
73c89c15
TB
1195};
1196
28db8e3e
MH
1197static int __init crypto_gcm_module_init(void)
1198{
d00aa19b
HX
1199 int err;
1200
adcbc688 1201 gcm_zeroes = kzalloc(sizeof(*gcm_zeroes), GFP_KERNEL);
9382d97a
HY
1202 if (!gcm_zeroes)
1203 return -ENOMEM;
1204
adcbc688
HX
1205 sg_init_one(&gcm_zeroes->sg, gcm_zeroes->buf, sizeof(gcm_zeroes->buf));
1206
56a00d9d
XW
1207 err = crypto_register_templates(crypto_gcm_tmpls,
1208 ARRAY_SIZE(crypto_gcm_tmpls));
d00aa19b 1209 if (err)
56a00d9d 1210 kfree(gcm_zeroes);
d00aa19b 1211
9382d97a 1212 return err;
28db8e3e
MH
1213}
1214
1215static void __exit crypto_gcm_module_exit(void)
1216{
9382d97a 1217 kfree(gcm_zeroes);
56a00d9d
XW
1218 crypto_unregister_templates(crypto_gcm_tmpls,
1219 ARRAY_SIZE(crypto_gcm_tmpls));
28db8e3e
MH
1220}
1221
c4741b23 1222subsys_initcall(crypto_gcm_module_init);
28db8e3e
MH
1223module_exit(crypto_gcm_module_exit);
1224
1225MODULE_LICENSE("GPL");
1226MODULE_DESCRIPTION("Galois/Counter Mode");
1227MODULE_AUTHOR("Mikko Herranen <mh1@iki.fi>");
5d26a105
KC
1228MODULE_ALIAS_CRYPTO("gcm_base");
1229MODULE_ALIAS_CRYPTO("rfc4106");
1230MODULE_ALIAS_CRYPTO("rfc4543");
4943ba16 1231MODULE_ALIAS_CRYPTO("gcm");