ccm.c 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893
  1. /*
  2. * CCM: Counter with CBC-MAC
  3. *
  4. * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
  5. *
  6. * This program is free software; you can redistribute it and/or modify it
  7. * under the terms of the GNU General Public License as published by the Free
  8. * Software Foundation; either version 2 of the License, or (at your option)
  9. * any later version.
  10. *
  11. */
  12. #include <crypto/internal/aead.h>
  13. #include <crypto/internal/skcipher.h>
  14. #include <crypto/scatterwalk.h>
  15. #include <linux/err.h>
  16. #include <linux/init.h>
  17. #include <linux/kernel.h>
  18. #include <linux/module.h>
  19. #include <linux/slab.h>
  20. #include "internal.h"
  21. struct ccm_instance_ctx {
  22. struct crypto_skcipher_spawn ctr;
  23. struct crypto_spawn cipher;
  24. };
  25. struct crypto_ccm_ctx {
  26. struct crypto_cipher *cipher;
  27. struct crypto_ablkcipher *ctr;
  28. };
  29. struct crypto_rfc4309_ctx {
  30. struct crypto_aead *child;
  31. u8 nonce[3];
  32. };
  33. struct crypto_ccm_req_priv_ctx {
  34. u8 odata[16];
  35. u8 idata[16];
  36. u8 auth_tag[16];
  37. u32 ilen;
  38. u32 flags;
  39. struct scatterlist src[2];
  40. struct scatterlist dst[2];
  41. struct ablkcipher_request abreq;
  42. };
  43. static inline struct crypto_ccm_req_priv_ctx *crypto_ccm_reqctx(
  44. struct aead_request *req)
  45. {
  46. unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
  47. return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
  48. }
  49. static int set_msg_len(u8 *block, unsigned int msglen, int csize)
  50. {
  51. __be32 data;
  52. memset(block, 0, csize);
  53. block += csize;
  54. if (csize >= 4)
  55. csize = 4;
  56. else if (msglen > (1 << (8 * csize)))
  57. return -EOVERFLOW;
  58. data = cpu_to_be32(msglen);
  59. memcpy(block - csize, (u8 *)&data + 4 - csize, csize);
  60. return 0;
  61. }
  62. static int crypto_ccm_setkey(struct crypto_aead *aead, const u8 *key,
  63. unsigned int keylen)
  64. {
  65. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  66. struct crypto_ablkcipher *ctr = ctx->ctr;
  67. struct crypto_cipher *tfm = ctx->cipher;
  68. int err = 0;
  69. crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
  70. crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
  71. CRYPTO_TFM_REQ_MASK);
  72. err = crypto_ablkcipher_setkey(ctr, key, keylen);
  73. crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) &
  74. CRYPTO_TFM_RES_MASK);
  75. if (err)
  76. goto out;
  77. crypto_cipher_clear_flags(tfm, CRYPTO_TFM_REQ_MASK);
  78. crypto_cipher_set_flags(tfm, crypto_aead_get_flags(aead) &
  79. CRYPTO_TFM_REQ_MASK);
  80. err = crypto_cipher_setkey(tfm, key, keylen);
  81. crypto_aead_set_flags(aead, crypto_cipher_get_flags(tfm) &
  82. CRYPTO_TFM_RES_MASK);
  83. out:
  84. return err;
  85. }
  86. static int crypto_ccm_setauthsize(struct crypto_aead *tfm,
  87. unsigned int authsize)
  88. {
  89. switch (authsize) {
  90. case 4:
  91. case 6:
  92. case 8:
  93. case 10:
  94. case 12:
  95. case 14:
  96. case 16:
  97. break;
  98. default:
  99. return -EINVAL;
  100. }
  101. return 0;
  102. }
  103. static int format_input(u8 *info, struct aead_request *req,
  104. unsigned int cryptlen)
  105. {
  106. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  107. unsigned int lp = req->iv[0];
  108. unsigned int l = lp + 1;
  109. unsigned int m;
  110. m = crypto_aead_authsize(aead);
  111. memcpy(info, req->iv, 16);
  112. /* format control info per RFC 3610 and
  113. * NIST Special Publication 800-38C
  114. */
  115. *info |= (8 * ((m - 2) / 2));
  116. if (req->assoclen)
  117. *info |= 64;
  118. return set_msg_len(info + 16 - l, cryptlen, l);
  119. }
  120. static int format_adata(u8 *adata, unsigned int a)
  121. {
  122. int len = 0;
  123. /* add control info for associated data
  124. * RFC 3610 and NIST Special Publication 800-38C
  125. */
  126. if (a < 65280) {
  127. *(__be16 *)adata = cpu_to_be16(a);
  128. len = 2;
  129. } else {
  130. *(__be16 *)adata = cpu_to_be16(0xfffe);
  131. *(__be32 *)&adata[2] = cpu_to_be32(a);
  132. len = 6;
  133. }
  134. return len;
  135. }
  136. static void compute_mac(struct crypto_cipher *tfm, u8 *data, int n,
  137. struct crypto_ccm_req_priv_ctx *pctx)
  138. {
  139. unsigned int bs = 16;
  140. u8 *odata = pctx->odata;
  141. u8 *idata = pctx->idata;
  142. int datalen, getlen;
  143. datalen = n;
  144. /* first time in here, block may be partially filled. */
  145. getlen = bs - pctx->ilen;
  146. if (datalen >= getlen) {
  147. memcpy(idata + pctx->ilen, data, getlen);
  148. crypto_xor(odata, idata, bs);
  149. crypto_cipher_encrypt_one(tfm, odata, odata);
  150. datalen -= getlen;
  151. data += getlen;
  152. pctx->ilen = 0;
  153. }
  154. /* now encrypt rest of data */
  155. while (datalen >= bs) {
  156. crypto_xor(odata, data, bs);
  157. crypto_cipher_encrypt_one(tfm, odata, odata);
  158. datalen -= bs;
  159. data += bs;
  160. }
  161. /* check and see if there's leftover data that wasn't
  162. * enough to fill a block.
  163. */
  164. if (datalen) {
  165. memcpy(idata + pctx->ilen, data, datalen);
  166. pctx->ilen += datalen;
  167. }
  168. }
  169. static void get_data_to_compute(struct crypto_cipher *tfm,
  170. struct crypto_ccm_req_priv_ctx *pctx,
  171. struct scatterlist *sg, unsigned int len)
  172. {
  173. struct scatter_walk walk;
  174. u8 *data_src;
  175. int n;
  176. scatterwalk_start(&walk, sg);
  177. while (len) {
  178. n = scatterwalk_clamp(&walk, len);
  179. if (!n) {
  180. scatterwalk_start(&walk, sg_next(walk.sg));
  181. n = scatterwalk_clamp(&walk, len);
  182. }
  183. data_src = scatterwalk_map(&walk);
  184. compute_mac(tfm, data_src, n, pctx);
  185. len -= n;
  186. scatterwalk_unmap(data_src);
  187. scatterwalk_advance(&walk, n);
  188. scatterwalk_done(&walk, 0, len);
  189. if (len)
  190. crypto_yield(pctx->flags);
  191. }
  192. /* any leftover needs padding and then encrypted */
  193. if (pctx->ilen) {
  194. int padlen;
  195. u8 *odata = pctx->odata;
  196. u8 *idata = pctx->idata;
  197. padlen = 16 - pctx->ilen;
  198. memset(idata + pctx->ilen, 0, padlen);
  199. crypto_xor(odata, idata, 16);
  200. crypto_cipher_encrypt_one(tfm, odata, odata);
  201. pctx->ilen = 0;
  202. }
  203. }
  204. static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain,
  205. unsigned int cryptlen)
  206. {
  207. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  208. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  209. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  210. struct crypto_cipher *cipher = ctx->cipher;
  211. unsigned int assoclen = req->assoclen;
  212. u8 *odata = pctx->odata;
  213. u8 *idata = pctx->idata;
  214. int err;
  215. /* format control data for input */
  216. err = format_input(odata, req, cryptlen);
  217. if (err)
  218. goto out;
  219. /* encrypt first block to use as start in computing mac */
  220. crypto_cipher_encrypt_one(cipher, odata, odata);
  221. /* format associated data and compute into mac */
  222. if (assoclen) {
  223. pctx->ilen = format_adata(idata, assoclen);
  224. get_data_to_compute(cipher, pctx, req->assoc, req->assoclen);
  225. } else {
  226. pctx->ilen = 0;
  227. }
  228. /* compute plaintext into mac */
  229. if (cryptlen)
  230. get_data_to_compute(cipher, pctx, plain, cryptlen);
  231. out:
  232. return err;
  233. }
  234. static void crypto_ccm_encrypt_done(struct crypto_async_request *areq, int err)
  235. {
  236. struct aead_request *req = areq->data;
  237. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  238. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  239. u8 *odata = pctx->odata;
  240. if (!err)
  241. scatterwalk_map_and_copy(odata, req->dst, req->cryptlen,
  242. crypto_aead_authsize(aead), 1);
  243. aead_request_complete(req, err);
  244. }
  245. static inline int crypto_ccm_check_iv(const u8 *iv)
  246. {
  247. /* 2 <= L <= 8, so 1 <= L' <= 7. */
  248. if (1 > iv[0] || iv[0] > 7)
  249. return -EINVAL;
  250. return 0;
  251. }
  252. static int crypto_ccm_encrypt(struct aead_request *req)
  253. {
  254. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  255. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  256. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  257. struct ablkcipher_request *abreq = &pctx->abreq;
  258. struct scatterlist *dst;
  259. unsigned int cryptlen = req->cryptlen;
  260. u8 *odata = pctx->odata;
  261. u8 *iv = req->iv;
  262. int err;
  263. err = crypto_ccm_check_iv(iv);
  264. if (err)
  265. return err;
  266. pctx->flags = aead_request_flags(req);
  267. err = crypto_ccm_auth(req, req->src, cryptlen);
  268. if (err)
  269. return err;
  270. /* Note: rfc 3610 and NIST 800-38C require counter of
  271. * zero to encrypt auth tag.
  272. */
  273. memset(iv + 15 - iv[0], 0, iv[0] + 1);
  274. sg_init_table(pctx->src, 2);
  275. sg_set_buf(pctx->src, odata, 16);
  276. scatterwalk_sg_chain(pctx->src, 2, req->src);
  277. dst = pctx->src;
  278. if (req->src != req->dst) {
  279. sg_init_table(pctx->dst, 2);
  280. sg_set_buf(pctx->dst, odata, 16);
  281. scatterwalk_sg_chain(pctx->dst, 2, req->dst);
  282. dst = pctx->dst;
  283. }
  284. ablkcipher_request_set_tfm(abreq, ctx->ctr);
  285. ablkcipher_request_set_callback(abreq, pctx->flags,
  286. crypto_ccm_encrypt_done, req);
  287. ablkcipher_request_set_crypt(abreq, pctx->src, dst, cryptlen + 16, iv);
  288. err = crypto_ablkcipher_encrypt(abreq);
  289. if (err)
  290. return err;
  291. /* copy authtag to end of dst */
  292. scatterwalk_map_and_copy(odata, req->dst, cryptlen,
  293. crypto_aead_authsize(aead), 1);
  294. return err;
  295. }
  296. static void crypto_ccm_decrypt_done(struct crypto_async_request *areq,
  297. int err)
  298. {
  299. struct aead_request *req = areq->data;
  300. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  301. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  302. unsigned int authsize = crypto_aead_authsize(aead);
  303. unsigned int cryptlen = req->cryptlen - authsize;
  304. if (!err) {
  305. err = crypto_ccm_auth(req, req->dst, cryptlen);
  306. if (!err && crypto_memneq(pctx->auth_tag, pctx->odata, authsize))
  307. err = -EBADMSG;
  308. }
  309. aead_request_complete(req, err);
  310. }
  311. static int crypto_ccm_decrypt(struct aead_request *req)
  312. {
  313. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  314. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  315. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  316. struct ablkcipher_request *abreq = &pctx->abreq;
  317. struct scatterlist *dst;
  318. unsigned int authsize = crypto_aead_authsize(aead);
  319. unsigned int cryptlen = req->cryptlen;
  320. u8 *authtag = pctx->auth_tag;
  321. u8 *odata = pctx->odata;
  322. u8 *iv = req->iv;
  323. int err;
  324. if (cryptlen < authsize)
  325. return -EINVAL;
  326. cryptlen -= authsize;
  327. err = crypto_ccm_check_iv(iv);
  328. if (err)
  329. return err;
  330. pctx->flags = aead_request_flags(req);
  331. scatterwalk_map_and_copy(authtag, req->src, cryptlen, authsize, 0);
  332. memset(iv + 15 - iv[0], 0, iv[0] + 1);
  333. sg_init_table(pctx->src, 2);
  334. sg_set_buf(pctx->src, authtag, 16);
  335. scatterwalk_sg_chain(pctx->src, 2, req->src);
  336. dst = pctx->src;
  337. if (req->src != req->dst) {
  338. sg_init_table(pctx->dst, 2);
  339. sg_set_buf(pctx->dst, authtag, 16);
  340. scatterwalk_sg_chain(pctx->dst, 2, req->dst);
  341. dst = pctx->dst;
  342. }
  343. ablkcipher_request_set_tfm(abreq, ctx->ctr);
  344. ablkcipher_request_set_callback(abreq, pctx->flags,
  345. crypto_ccm_decrypt_done, req);
  346. ablkcipher_request_set_crypt(abreq, pctx->src, dst, cryptlen + 16, iv);
  347. err = crypto_ablkcipher_decrypt(abreq);
  348. if (err)
  349. return err;
  350. err = crypto_ccm_auth(req, req->dst, cryptlen);
  351. if (err)
  352. return err;
  353. /* verify */
  354. if (crypto_memneq(authtag, odata, authsize))
  355. return -EBADMSG;
  356. return err;
  357. }
  358. static int crypto_ccm_init_tfm(struct crypto_tfm *tfm)
  359. {
  360. struct crypto_instance *inst = (void *)tfm->__crt_alg;
  361. struct ccm_instance_ctx *ictx = crypto_instance_ctx(inst);
  362. struct crypto_ccm_ctx *ctx = crypto_tfm_ctx(tfm);
  363. struct crypto_cipher *cipher;
  364. struct crypto_ablkcipher *ctr;
  365. unsigned long align;
  366. int err;
  367. cipher = crypto_spawn_cipher(&ictx->cipher);
  368. if (IS_ERR(cipher))
  369. return PTR_ERR(cipher);
  370. ctr = crypto_spawn_skcipher(&ictx->ctr);
  371. err = PTR_ERR(ctr);
  372. if (IS_ERR(ctr))
  373. goto err_free_cipher;
  374. ctx->cipher = cipher;
  375. ctx->ctr = ctr;
  376. align = crypto_tfm_alg_alignmask(tfm);
  377. align &= ~(crypto_tfm_ctx_alignment() - 1);
  378. tfm->crt_aead.reqsize = align +
  379. sizeof(struct crypto_ccm_req_priv_ctx) +
  380. crypto_ablkcipher_reqsize(ctr);
  381. return 0;
  382. err_free_cipher:
  383. crypto_free_cipher(cipher);
  384. return err;
  385. }
  386. static void crypto_ccm_exit_tfm(struct crypto_tfm *tfm)
  387. {
  388. struct crypto_ccm_ctx *ctx = crypto_tfm_ctx(tfm);
  389. crypto_free_cipher(ctx->cipher);
  390. crypto_free_ablkcipher(ctx->ctr);
  391. }
  392. static struct crypto_instance *crypto_ccm_alloc_common(struct rtattr **tb,
  393. const char *full_name,
  394. const char *ctr_name,
  395. const char *cipher_name)
  396. {
  397. struct crypto_attr_type *algt;
  398. struct crypto_instance *inst;
  399. struct crypto_alg *ctr;
  400. struct crypto_alg *cipher;
  401. struct ccm_instance_ctx *ictx;
  402. int err;
  403. algt = crypto_get_attr_type(tb);
  404. err = PTR_ERR(algt);
  405. if (IS_ERR(algt))
  406. return ERR_PTR(err);
  407. if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
  408. return ERR_PTR(-EINVAL);
  409. cipher = crypto_alg_mod_lookup(cipher_name, CRYPTO_ALG_TYPE_CIPHER,
  410. CRYPTO_ALG_TYPE_MASK);
  411. err = PTR_ERR(cipher);
  412. if (IS_ERR(cipher))
  413. return ERR_PTR(err);
  414. err = -EINVAL;
  415. if (cipher->cra_blocksize != 16)
  416. goto out_put_cipher;
  417. inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
  418. err = -ENOMEM;
  419. if (!inst)
  420. goto out_put_cipher;
  421. ictx = crypto_instance_ctx(inst);
  422. err = crypto_init_spawn(&ictx->cipher, cipher, inst,
  423. CRYPTO_ALG_TYPE_MASK);
  424. if (err)
  425. goto err_free_inst;
  426. crypto_set_skcipher_spawn(&ictx->ctr, inst);
  427. err = crypto_grab_skcipher(&ictx->ctr, ctr_name, 0,
  428. crypto_requires_sync(algt->type,
  429. algt->mask));
  430. if (err)
  431. goto err_drop_cipher;
  432. ctr = crypto_skcipher_spawn_alg(&ictx->ctr);
  433. /* Not a stream cipher? */
  434. err = -EINVAL;
  435. if (ctr->cra_blocksize != 1)
  436. goto err_drop_ctr;
  437. /* We want the real thing! */
  438. if (ctr->cra_ablkcipher.ivsize != 16)
  439. goto err_drop_ctr;
  440. err = -ENAMETOOLONG;
  441. if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  442. "ccm_base(%s,%s)", ctr->cra_driver_name,
  443. cipher->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
  444. goto err_drop_ctr;
  445. memcpy(inst->alg.cra_name, full_name, CRYPTO_MAX_ALG_NAME);
  446. inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
  447. inst->alg.cra_flags |= ctr->cra_flags & CRYPTO_ALG_ASYNC;
  448. inst->alg.cra_priority = cipher->cra_priority + ctr->cra_priority;
  449. inst->alg.cra_blocksize = 1;
  450. inst->alg.cra_alignmask = cipher->cra_alignmask | ctr->cra_alignmask |
  451. (__alignof__(u32) - 1);
  452. inst->alg.cra_type = &crypto_aead_type;
  453. inst->alg.cra_aead.ivsize = 16;
  454. inst->alg.cra_aead.maxauthsize = 16;
  455. inst->alg.cra_ctxsize = sizeof(struct crypto_ccm_ctx);
  456. inst->alg.cra_init = crypto_ccm_init_tfm;
  457. inst->alg.cra_exit = crypto_ccm_exit_tfm;
  458. inst->alg.cra_aead.setkey = crypto_ccm_setkey;
  459. inst->alg.cra_aead.setauthsize = crypto_ccm_setauthsize;
  460. inst->alg.cra_aead.encrypt = crypto_ccm_encrypt;
  461. inst->alg.cra_aead.decrypt = crypto_ccm_decrypt;
  462. out:
  463. crypto_mod_put(cipher);
  464. return inst;
  465. err_drop_ctr:
  466. crypto_drop_skcipher(&ictx->ctr);
  467. err_drop_cipher:
  468. crypto_drop_spawn(&ictx->cipher);
  469. err_free_inst:
  470. kfree(inst);
  471. out_put_cipher:
  472. inst = ERR_PTR(err);
  473. goto out;
  474. }
  475. static struct crypto_instance *crypto_ccm_alloc(struct rtattr **tb)
  476. {
  477. int err;
  478. const char *cipher_name;
  479. char ctr_name[CRYPTO_MAX_ALG_NAME];
  480. char full_name[CRYPTO_MAX_ALG_NAME];
  481. cipher_name = crypto_attr_alg_name(tb[1]);
  482. err = PTR_ERR(cipher_name);
  483. if (IS_ERR(cipher_name))
  484. return ERR_PTR(err);
  485. if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
  486. cipher_name) >= CRYPTO_MAX_ALG_NAME)
  487. return ERR_PTR(-ENAMETOOLONG);
  488. if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm(%s)", cipher_name) >=
  489. CRYPTO_MAX_ALG_NAME)
  490. return ERR_PTR(-ENAMETOOLONG);
  491. return crypto_ccm_alloc_common(tb, full_name, ctr_name, cipher_name);
  492. }
  493. static void crypto_ccm_free(struct crypto_instance *inst)
  494. {
  495. struct ccm_instance_ctx *ctx = crypto_instance_ctx(inst);
  496. crypto_drop_spawn(&ctx->cipher);
  497. crypto_drop_skcipher(&ctx->ctr);
  498. kfree(inst);
  499. }
  500. static struct crypto_template crypto_ccm_tmpl = {
  501. .name = "ccm",
  502. .alloc = crypto_ccm_alloc,
  503. .free = crypto_ccm_free,
  504. .module = THIS_MODULE,
  505. };
  506. static struct crypto_instance *crypto_ccm_base_alloc(struct rtattr **tb)
  507. {
  508. int err;
  509. const char *ctr_name;
  510. const char *cipher_name;
  511. char full_name[CRYPTO_MAX_ALG_NAME];
  512. ctr_name = crypto_attr_alg_name(tb[1]);
  513. err = PTR_ERR(ctr_name);
  514. if (IS_ERR(ctr_name))
  515. return ERR_PTR(err);
  516. cipher_name = crypto_attr_alg_name(tb[2]);
  517. err = PTR_ERR(cipher_name);
  518. if (IS_ERR(cipher_name))
  519. return ERR_PTR(err);
  520. if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm_base(%s,%s)",
  521. ctr_name, cipher_name) >= CRYPTO_MAX_ALG_NAME)
  522. return ERR_PTR(-ENAMETOOLONG);
  523. return crypto_ccm_alloc_common(tb, full_name, ctr_name, cipher_name);
  524. }
  525. static struct crypto_template crypto_ccm_base_tmpl = {
  526. .name = "ccm_base",
  527. .alloc = crypto_ccm_base_alloc,
  528. .free = crypto_ccm_free,
  529. .module = THIS_MODULE,
  530. };
  531. static int crypto_rfc4309_setkey(struct crypto_aead *parent, const u8 *key,
  532. unsigned int keylen)
  533. {
  534. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
  535. struct crypto_aead *child = ctx->child;
  536. int err;
  537. if (keylen < 3)
  538. return -EINVAL;
  539. keylen -= 3;
  540. memcpy(ctx->nonce, key + keylen, 3);
  541. crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  542. crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
  543. CRYPTO_TFM_REQ_MASK);
  544. err = crypto_aead_setkey(child, key, keylen);
  545. crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
  546. CRYPTO_TFM_RES_MASK);
  547. return err;
  548. }
  549. static int crypto_rfc4309_setauthsize(struct crypto_aead *parent,
  550. unsigned int authsize)
  551. {
  552. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
  553. switch (authsize) {
  554. case 8:
  555. case 12:
  556. case 16:
  557. break;
  558. default:
  559. return -EINVAL;
  560. }
  561. return crypto_aead_setauthsize(ctx->child, authsize);
  562. }
  563. static struct aead_request *crypto_rfc4309_crypt(struct aead_request *req)
  564. {
  565. struct aead_request *subreq = aead_request_ctx(req);
  566. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  567. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(aead);
  568. struct crypto_aead *child = ctx->child;
  569. u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
  570. crypto_aead_alignmask(child) + 1);
  571. /* L' */
  572. iv[0] = 3;
  573. memcpy(iv + 1, ctx->nonce, 3);
  574. memcpy(iv + 4, req->iv, 8);
  575. aead_request_set_tfm(subreq, child);
  576. aead_request_set_callback(subreq, req->base.flags, req->base.complete,
  577. req->base.data);
  578. aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, iv);
  579. aead_request_set_assoc(subreq, req->assoc, req->assoclen);
  580. return subreq;
  581. }
  582. static int crypto_rfc4309_encrypt(struct aead_request *req)
  583. {
  584. req = crypto_rfc4309_crypt(req);
  585. return crypto_aead_encrypt(req);
  586. }
  587. static int crypto_rfc4309_decrypt(struct aead_request *req)
  588. {
  589. req = crypto_rfc4309_crypt(req);
  590. return crypto_aead_decrypt(req);
  591. }
  592. static int crypto_rfc4309_init_tfm(struct crypto_tfm *tfm)
  593. {
  594. struct crypto_instance *inst = (void *)tfm->__crt_alg;
  595. struct crypto_aead_spawn *spawn = crypto_instance_ctx(inst);
  596. struct crypto_rfc4309_ctx *ctx = crypto_tfm_ctx(tfm);
  597. struct crypto_aead *aead;
  598. unsigned long align;
  599. aead = crypto_spawn_aead(spawn);
  600. if (IS_ERR(aead))
  601. return PTR_ERR(aead);
  602. ctx->child = aead;
  603. align = crypto_aead_alignmask(aead);
  604. align &= ~(crypto_tfm_ctx_alignment() - 1);
  605. tfm->crt_aead.reqsize = sizeof(struct aead_request) +
  606. ALIGN(crypto_aead_reqsize(aead),
  607. crypto_tfm_ctx_alignment()) +
  608. align + 16;
  609. return 0;
  610. }
  611. static void crypto_rfc4309_exit_tfm(struct crypto_tfm *tfm)
  612. {
  613. struct crypto_rfc4309_ctx *ctx = crypto_tfm_ctx(tfm);
  614. crypto_free_aead(ctx->child);
  615. }
  616. static struct crypto_instance *crypto_rfc4309_alloc(struct rtattr **tb)
  617. {
  618. struct crypto_attr_type *algt;
  619. struct crypto_instance *inst;
  620. struct crypto_aead_spawn *spawn;
  621. struct crypto_alg *alg;
  622. const char *ccm_name;
  623. int err;
  624. algt = crypto_get_attr_type(tb);
  625. err = PTR_ERR(algt);
  626. if (IS_ERR(algt))
  627. return ERR_PTR(err);
  628. if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
  629. return ERR_PTR(-EINVAL);
  630. ccm_name = crypto_attr_alg_name(tb[1]);
  631. err = PTR_ERR(ccm_name);
  632. if (IS_ERR(ccm_name))
  633. return ERR_PTR(err);
  634. inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
  635. if (!inst)
  636. return ERR_PTR(-ENOMEM);
  637. spawn = crypto_instance_ctx(inst);
  638. crypto_set_aead_spawn(spawn, inst);
  639. err = crypto_grab_aead(spawn, ccm_name, 0,
  640. crypto_requires_sync(algt->type, algt->mask));
  641. if (err)
  642. goto out_free_inst;
  643. alg = crypto_aead_spawn_alg(spawn);
  644. err = -EINVAL;
  645. /* We only support 16-byte blocks. */
  646. if (alg->cra_aead.ivsize != 16)
  647. goto out_drop_alg;
  648. /* Not a stream cipher? */
  649. if (alg->cra_blocksize != 1)
  650. goto out_drop_alg;
  651. err = -ENAMETOOLONG;
  652. if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
  653. "rfc4309(%s)", alg->cra_name) >= CRYPTO_MAX_ALG_NAME ||
  654. snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  655. "rfc4309(%s)", alg->cra_driver_name) >=
  656. CRYPTO_MAX_ALG_NAME)
  657. goto out_drop_alg;
  658. inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
  659. inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC;
  660. inst->alg.cra_priority = alg->cra_priority;
  661. inst->alg.cra_blocksize = 1;
  662. inst->alg.cra_alignmask = alg->cra_alignmask;
  663. inst->alg.cra_type = &crypto_nivaead_type;
  664. inst->alg.cra_aead.ivsize = 8;
  665. inst->alg.cra_aead.maxauthsize = 16;
  666. inst->alg.cra_ctxsize = sizeof(struct crypto_rfc4309_ctx);
  667. inst->alg.cra_init = crypto_rfc4309_init_tfm;
  668. inst->alg.cra_exit = crypto_rfc4309_exit_tfm;
  669. inst->alg.cra_aead.setkey = crypto_rfc4309_setkey;
  670. inst->alg.cra_aead.setauthsize = crypto_rfc4309_setauthsize;
  671. inst->alg.cra_aead.encrypt = crypto_rfc4309_encrypt;
  672. inst->alg.cra_aead.decrypt = crypto_rfc4309_decrypt;
  673. inst->alg.cra_aead.geniv = "seqiv";
  674. out:
  675. return inst;
  676. out_drop_alg:
  677. crypto_drop_aead(spawn);
  678. out_free_inst:
  679. kfree(inst);
  680. inst = ERR_PTR(err);
  681. goto out;
  682. }
  683. static void crypto_rfc4309_free(struct crypto_instance *inst)
  684. {
  685. crypto_drop_spawn(crypto_instance_ctx(inst));
  686. kfree(inst);
  687. }
  688. static struct crypto_template crypto_rfc4309_tmpl = {
  689. .name = "rfc4309",
  690. .alloc = crypto_rfc4309_alloc,
  691. .free = crypto_rfc4309_free,
  692. .module = THIS_MODULE,
  693. };
  694. static int __init crypto_ccm_module_init(void)
  695. {
  696. int err;
  697. err = crypto_register_template(&crypto_ccm_base_tmpl);
  698. if (err)
  699. goto out;
  700. err = crypto_register_template(&crypto_ccm_tmpl);
  701. if (err)
  702. goto out_undo_base;
  703. err = crypto_register_template(&crypto_rfc4309_tmpl);
  704. if (err)
  705. goto out_undo_ccm;
  706. out:
  707. return err;
  708. out_undo_ccm:
  709. crypto_unregister_template(&crypto_ccm_tmpl);
  710. out_undo_base:
  711. crypto_unregister_template(&crypto_ccm_base_tmpl);
  712. goto out;
  713. }
  714. static void __exit crypto_ccm_module_exit(void)
  715. {
  716. crypto_unregister_template(&crypto_rfc4309_tmpl);
  717. crypto_unregister_template(&crypto_ccm_tmpl);
  718. crypto_unregister_template(&crypto_ccm_base_tmpl);
  719. }
  720. module_init(crypto_ccm_module_init);
  721. module_exit(crypto_ccm_module_exit);
  722. MODULE_LICENSE("GPL");
  723. MODULE_DESCRIPTION("Counter with CBC MAC");
  724. MODULE_ALIAS("ccm_base");
  725. MODULE_ALIAS("rfc4309");