gcm.c 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380
  1. /*
  2. * GCM: Galois/Counter Mode.
  3. *
  4. * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
  5. *
  6. * This program is free software; you can redistribute it and/or modify it
  7. * under the terms of the GNU General Public License version 2 as published
  8. * by the Free Software Foundation.
  9. */
  10. #include <crypto/gf128mul.h>
  11. #include <crypto/internal/aead.h>
  12. #include <crypto/internal/skcipher.h>
  13. #include <crypto/internal/hash.h>
  14. #include <crypto/scatterwalk.h>
  15. #include <crypto/hash.h>
  16. #include "internal.h"
  17. #include <linux/completion.h>
  18. #include <linux/err.h>
  19. #include <linux/init.h>
  20. #include <linux/kernel.h>
  21. #include <linux/module.h>
  22. #include <linux/slab.h>
  23. struct gcm_instance_ctx {
  24. struct crypto_skcipher_spawn ctr;
  25. struct crypto_ahash_spawn ghash;
  26. };
  27. struct crypto_gcm_ctx {
  28. struct crypto_ablkcipher *ctr;
  29. struct crypto_ahash *ghash;
  30. };
  31. struct crypto_rfc4106_ctx {
  32. struct crypto_aead *child;
  33. u8 nonce[4];
  34. };
  35. struct crypto_rfc4543_ctx {
  36. struct crypto_aead *child;
  37. u8 nonce[4];
  38. };
  39. struct crypto_rfc4543_req_ctx {
  40. u8 auth_tag[16];
  41. u8 assocbuf[32];
  42. struct scatterlist cipher[1];
  43. struct scatterlist payload[2];
  44. struct scatterlist assoc[2];
  45. struct aead_request subreq;
  46. };
  47. struct crypto_gcm_ghash_ctx {
  48. unsigned int cryptlen;
  49. struct scatterlist *src;
  50. void (*complete)(struct aead_request *req, int err);
  51. };
  52. struct crypto_gcm_req_priv_ctx {
  53. u8 auth_tag[16];
  54. u8 iauth_tag[16];
  55. struct scatterlist src[2];
  56. struct scatterlist dst[2];
  57. struct crypto_gcm_ghash_ctx ghash_ctx;
  58. union {
  59. struct ahash_request ahreq;
  60. struct ablkcipher_request abreq;
  61. } u;
  62. };
  63. struct crypto_gcm_setkey_result {
  64. int err;
  65. struct completion completion;
  66. };
  67. static void *gcm_zeroes;
  68. static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
  69. struct aead_request *req)
  70. {
  71. unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
  72. return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
  73. }
  74. static void crypto_gcm_setkey_done(struct crypto_async_request *req, int err)
  75. {
  76. struct crypto_gcm_setkey_result *result = req->data;
  77. if (err == -EINPROGRESS)
  78. return;
  79. result->err = err;
  80. complete(&result->completion);
  81. }
  82. static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
  83. unsigned int keylen)
  84. {
  85. struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
  86. struct crypto_ahash *ghash = ctx->ghash;
  87. struct crypto_ablkcipher *ctr = ctx->ctr;
  88. struct {
  89. be128 hash;
  90. u8 iv[8];
  91. struct crypto_gcm_setkey_result result;
  92. struct scatterlist sg[1];
  93. struct ablkcipher_request req;
  94. } *data;
  95. int err;
  96. crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
  97. crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
  98. CRYPTO_TFM_REQ_MASK);
  99. err = crypto_ablkcipher_setkey(ctr, key, keylen);
  100. if (err)
  101. return err;
  102. crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) &
  103. CRYPTO_TFM_RES_MASK);
  104. data = kzalloc(sizeof(*data) + crypto_ablkcipher_reqsize(ctr),
  105. GFP_KERNEL);
  106. if (!data)
  107. return -ENOMEM;
  108. init_completion(&data->result.completion);
  109. sg_init_one(data->sg, &data->hash, sizeof(data->hash));
  110. ablkcipher_request_set_tfm(&data->req, ctr);
  111. ablkcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP |
  112. CRYPTO_TFM_REQ_MAY_BACKLOG,
  113. crypto_gcm_setkey_done,
  114. &data->result);
  115. ablkcipher_request_set_crypt(&data->req, data->sg, data->sg,
  116. sizeof(data->hash), data->iv);
  117. err = crypto_ablkcipher_encrypt(&data->req);
  118. if (err == -EINPROGRESS || err == -EBUSY) {
  119. err = wait_for_completion_interruptible(
  120. &data->result.completion);
  121. if (!err)
  122. err = data->result.err;
  123. }
  124. if (err)
  125. goto out;
  126. crypto_ahash_clear_flags(ghash, CRYPTO_TFM_REQ_MASK);
  127. crypto_ahash_set_flags(ghash, crypto_aead_get_flags(aead) &
  128. CRYPTO_TFM_REQ_MASK);
  129. err = crypto_ahash_setkey(ghash, (u8 *)&data->hash, sizeof(be128));
  130. crypto_aead_set_flags(aead, crypto_ahash_get_flags(ghash) &
  131. CRYPTO_TFM_RES_MASK);
  132. out:
  133. kfree(data);
  134. return err;
  135. }
  136. static int crypto_gcm_setauthsize(struct crypto_aead *tfm,
  137. unsigned int authsize)
  138. {
  139. switch (authsize) {
  140. case 4:
  141. case 8:
  142. case 12:
  143. case 13:
  144. case 14:
  145. case 15:
  146. case 16:
  147. break;
  148. default:
  149. return -EINVAL;
  150. }
  151. return 0;
  152. }
  153. static void crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req,
  154. struct aead_request *req,
  155. unsigned int cryptlen)
  156. {
  157. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  158. struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
  159. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  160. struct scatterlist *dst;
  161. __be32 counter = cpu_to_be32(1);
  162. memset(pctx->auth_tag, 0, sizeof(pctx->auth_tag));
  163. memcpy(req->iv + 12, &counter, 4);
  164. sg_init_table(pctx->src, 2);
  165. sg_set_buf(pctx->src, pctx->auth_tag, sizeof(pctx->auth_tag));
  166. scatterwalk_sg_chain(pctx->src, 2, req->src);
  167. dst = pctx->src;
  168. if (req->src != req->dst) {
  169. sg_init_table(pctx->dst, 2);
  170. sg_set_buf(pctx->dst, pctx->auth_tag, sizeof(pctx->auth_tag));
  171. scatterwalk_sg_chain(pctx->dst, 2, req->dst);
  172. dst = pctx->dst;
  173. }
  174. ablkcipher_request_set_tfm(ablk_req, ctx->ctr);
  175. ablkcipher_request_set_crypt(ablk_req, pctx->src, dst,
  176. cryptlen + sizeof(pctx->auth_tag),
  177. req->iv);
  178. }
  179. static inline unsigned int gcm_remain(unsigned int len)
  180. {
  181. len &= 0xfU;
  182. return len ? 16 - len : 0;
  183. }
  184. static void gcm_hash_len_done(struct crypto_async_request *areq, int err);
  185. static void gcm_hash_final_done(struct crypto_async_request *areq, int err);
  186. static int gcm_hash_update(struct aead_request *req,
  187. struct crypto_gcm_req_priv_ctx *pctx,
  188. crypto_completion_t complete,
  189. struct scatterlist *src,
  190. unsigned int len)
  191. {
  192. struct ahash_request *ahreq = &pctx->u.ahreq;
  193. ahash_request_set_callback(ahreq, aead_request_flags(req),
  194. complete, req);
  195. ahash_request_set_crypt(ahreq, src, NULL, len);
  196. return crypto_ahash_update(ahreq);
  197. }
  198. static int gcm_hash_remain(struct aead_request *req,
  199. struct crypto_gcm_req_priv_ctx *pctx,
  200. unsigned int remain,
  201. crypto_completion_t complete)
  202. {
  203. struct ahash_request *ahreq = &pctx->u.ahreq;
  204. ahash_request_set_callback(ahreq, aead_request_flags(req),
  205. complete, req);
  206. sg_init_one(pctx->src, gcm_zeroes, remain);
  207. ahash_request_set_crypt(ahreq, pctx->src, NULL, remain);
  208. return crypto_ahash_update(ahreq);
  209. }
  210. static int gcm_hash_len(struct aead_request *req,
  211. struct crypto_gcm_req_priv_ctx *pctx)
  212. {
  213. struct ahash_request *ahreq = &pctx->u.ahreq;
  214. struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
  215. u128 lengths;
  216. lengths.a = cpu_to_be64(req->assoclen * 8);
  217. lengths.b = cpu_to_be64(gctx->cryptlen * 8);
  218. memcpy(pctx->iauth_tag, &lengths, 16);
  219. sg_init_one(pctx->src, pctx->iauth_tag, 16);
  220. ahash_request_set_callback(ahreq, aead_request_flags(req),
  221. gcm_hash_len_done, req);
  222. ahash_request_set_crypt(ahreq, pctx->src,
  223. NULL, sizeof(lengths));
  224. return crypto_ahash_update(ahreq);
  225. }
  226. static int gcm_hash_final(struct aead_request *req,
  227. struct crypto_gcm_req_priv_ctx *pctx)
  228. {
  229. struct ahash_request *ahreq = &pctx->u.ahreq;
  230. ahash_request_set_callback(ahreq, aead_request_flags(req),
  231. gcm_hash_final_done, req);
  232. ahash_request_set_crypt(ahreq, NULL, pctx->iauth_tag, 0);
  233. return crypto_ahash_final(ahreq);
  234. }
  235. static void __gcm_hash_final_done(struct aead_request *req, int err)
  236. {
  237. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  238. struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
  239. if (!err)
  240. crypto_xor(pctx->auth_tag, pctx->iauth_tag, 16);
  241. gctx->complete(req, err);
  242. }
  243. static void gcm_hash_final_done(struct crypto_async_request *areq, int err)
  244. {
  245. struct aead_request *req = areq->data;
  246. __gcm_hash_final_done(req, err);
  247. }
  248. static void __gcm_hash_len_done(struct aead_request *req, int err)
  249. {
  250. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  251. if (!err) {
  252. err = gcm_hash_final(req, pctx);
  253. if (err == -EINPROGRESS || err == -EBUSY)
  254. return;
  255. }
  256. __gcm_hash_final_done(req, err);
  257. }
  258. static void gcm_hash_len_done(struct crypto_async_request *areq, int err)
  259. {
  260. struct aead_request *req = areq->data;
  261. __gcm_hash_len_done(req, err);
  262. }
  263. static void __gcm_hash_crypt_remain_done(struct aead_request *req, int err)
  264. {
  265. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  266. if (!err) {
  267. err = gcm_hash_len(req, pctx);
  268. if (err == -EINPROGRESS || err == -EBUSY)
  269. return;
  270. }
  271. __gcm_hash_len_done(req, err);
  272. }
  273. static void gcm_hash_crypt_remain_done(struct crypto_async_request *areq,
  274. int err)
  275. {
  276. struct aead_request *req = areq->data;
  277. __gcm_hash_crypt_remain_done(req, err);
  278. }
  279. static void __gcm_hash_crypt_done(struct aead_request *req, int err)
  280. {
  281. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  282. struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
  283. unsigned int remain;
  284. if (!err) {
  285. remain = gcm_remain(gctx->cryptlen);
  286. BUG_ON(!remain);
  287. err = gcm_hash_remain(req, pctx, remain,
  288. gcm_hash_crypt_remain_done);
  289. if (err == -EINPROGRESS || err == -EBUSY)
  290. return;
  291. }
  292. __gcm_hash_crypt_remain_done(req, err);
  293. }
  294. static void gcm_hash_crypt_done(struct crypto_async_request *areq, int err)
  295. {
  296. struct aead_request *req = areq->data;
  297. __gcm_hash_crypt_done(req, err);
  298. }
  299. static void __gcm_hash_assoc_remain_done(struct aead_request *req, int err)
  300. {
  301. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  302. struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
  303. crypto_completion_t complete;
  304. unsigned int remain = 0;
  305. if (!err && gctx->cryptlen) {
  306. remain = gcm_remain(gctx->cryptlen);
  307. complete = remain ? gcm_hash_crypt_done :
  308. gcm_hash_crypt_remain_done;
  309. err = gcm_hash_update(req, pctx, complete,
  310. gctx->src, gctx->cryptlen);
  311. if (err == -EINPROGRESS || err == -EBUSY)
  312. return;
  313. }
  314. if (remain)
  315. __gcm_hash_crypt_done(req, err);
  316. else
  317. __gcm_hash_crypt_remain_done(req, err);
  318. }
  319. static void gcm_hash_assoc_remain_done(struct crypto_async_request *areq,
  320. int err)
  321. {
  322. struct aead_request *req = areq->data;
  323. __gcm_hash_assoc_remain_done(req, err);
  324. }
  325. static void __gcm_hash_assoc_done(struct aead_request *req, int err)
  326. {
  327. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  328. unsigned int remain;
  329. if (!err) {
  330. remain = gcm_remain(req->assoclen);
  331. BUG_ON(!remain);
  332. err = gcm_hash_remain(req, pctx, remain,
  333. gcm_hash_assoc_remain_done);
  334. if (err == -EINPROGRESS || err == -EBUSY)
  335. return;
  336. }
  337. __gcm_hash_assoc_remain_done(req, err);
  338. }
  339. static void gcm_hash_assoc_done(struct crypto_async_request *areq, int err)
  340. {
  341. struct aead_request *req = areq->data;
  342. __gcm_hash_assoc_done(req, err);
  343. }
  344. static void __gcm_hash_init_done(struct aead_request *req, int err)
  345. {
  346. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  347. crypto_completion_t complete;
  348. unsigned int remain = 0;
  349. if (!err && req->assoclen) {
  350. remain = gcm_remain(req->assoclen);
  351. complete = remain ? gcm_hash_assoc_done :
  352. gcm_hash_assoc_remain_done;
  353. err = gcm_hash_update(req, pctx, complete,
  354. req->assoc, req->assoclen);
  355. if (err == -EINPROGRESS || err == -EBUSY)
  356. return;
  357. }
  358. if (remain)
  359. __gcm_hash_assoc_done(req, err);
  360. else
  361. __gcm_hash_assoc_remain_done(req, err);
  362. }
  363. static void gcm_hash_init_done(struct crypto_async_request *areq, int err)
  364. {
  365. struct aead_request *req = areq->data;
  366. __gcm_hash_init_done(req, err);
  367. }
  368. static int gcm_hash(struct aead_request *req,
  369. struct crypto_gcm_req_priv_ctx *pctx)
  370. {
  371. struct ahash_request *ahreq = &pctx->u.ahreq;
  372. struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
  373. struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
  374. unsigned int remain;
  375. crypto_completion_t complete;
  376. int err;
  377. ahash_request_set_tfm(ahreq, ctx->ghash);
  378. ahash_request_set_callback(ahreq, aead_request_flags(req),
  379. gcm_hash_init_done, req);
  380. err = crypto_ahash_init(ahreq);
  381. if (err)
  382. return err;
  383. remain = gcm_remain(req->assoclen);
  384. complete = remain ? gcm_hash_assoc_done : gcm_hash_assoc_remain_done;
  385. err = gcm_hash_update(req, pctx, complete, req->assoc, req->assoclen);
  386. if (err)
  387. return err;
  388. if (remain) {
  389. err = gcm_hash_remain(req, pctx, remain,
  390. gcm_hash_assoc_remain_done);
  391. if (err)
  392. return err;
  393. }
  394. remain = gcm_remain(gctx->cryptlen);
  395. complete = remain ? gcm_hash_crypt_done : gcm_hash_crypt_remain_done;
  396. err = gcm_hash_update(req, pctx, complete, gctx->src, gctx->cryptlen);
  397. if (err)
  398. return err;
  399. if (remain) {
  400. err = gcm_hash_remain(req, pctx, remain,
  401. gcm_hash_crypt_remain_done);
  402. if (err)
  403. return err;
  404. }
  405. err = gcm_hash_len(req, pctx);
  406. if (err)
  407. return err;
  408. err = gcm_hash_final(req, pctx);
  409. if (err)
  410. return err;
  411. return 0;
  412. }
  413. static void gcm_enc_copy_hash(struct aead_request *req,
  414. struct crypto_gcm_req_priv_ctx *pctx)
  415. {
  416. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  417. u8 *auth_tag = pctx->auth_tag;
  418. scatterwalk_map_and_copy(auth_tag, req->dst, req->cryptlen,
  419. crypto_aead_authsize(aead), 1);
  420. }
  421. static void gcm_enc_hash_done(struct aead_request *req, int err)
  422. {
  423. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  424. if (!err)
  425. gcm_enc_copy_hash(req, pctx);
  426. aead_request_complete(req, err);
  427. }
  428. static void gcm_encrypt_done(struct crypto_async_request *areq, int err)
  429. {
  430. struct aead_request *req = areq->data;
  431. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  432. if (!err) {
  433. err = gcm_hash(req, pctx);
  434. if (err == -EINPROGRESS || err == -EBUSY)
  435. return;
  436. else if (!err) {
  437. crypto_xor(pctx->auth_tag, pctx->iauth_tag, 16);
  438. gcm_enc_copy_hash(req, pctx);
  439. }
  440. }
  441. aead_request_complete(req, err);
  442. }
  443. static int crypto_gcm_encrypt(struct aead_request *req)
  444. {
  445. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  446. struct ablkcipher_request *abreq = &pctx->u.abreq;
  447. struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
  448. int err;
  449. crypto_gcm_init_crypt(abreq, req, req->cryptlen);
  450. ablkcipher_request_set_callback(abreq, aead_request_flags(req),
  451. gcm_encrypt_done, req);
  452. gctx->src = req->dst;
  453. gctx->cryptlen = req->cryptlen;
  454. gctx->complete = gcm_enc_hash_done;
  455. err = crypto_ablkcipher_encrypt(abreq);
  456. if (err)
  457. return err;
  458. err = gcm_hash(req, pctx);
  459. if (err)
  460. return err;
  461. crypto_xor(pctx->auth_tag, pctx->iauth_tag, 16);
  462. gcm_enc_copy_hash(req, pctx);
  463. return 0;
  464. }
  465. static int crypto_gcm_verify(struct aead_request *req,
  466. struct crypto_gcm_req_priv_ctx *pctx)
  467. {
  468. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  469. u8 *auth_tag = pctx->auth_tag;
  470. u8 *iauth_tag = pctx->iauth_tag;
  471. unsigned int authsize = crypto_aead_authsize(aead);
  472. unsigned int cryptlen = req->cryptlen - authsize;
  473. crypto_xor(auth_tag, iauth_tag, 16);
  474. scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0);
  475. return crypto_memneq(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0;
  476. }
  477. static void gcm_decrypt_done(struct crypto_async_request *areq, int err)
  478. {
  479. struct aead_request *req = areq->data;
  480. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  481. if (!err)
  482. err = crypto_gcm_verify(req, pctx);
  483. aead_request_complete(req, err);
  484. }
  485. static void gcm_dec_hash_done(struct aead_request *req, int err)
  486. {
  487. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  488. struct ablkcipher_request *abreq = &pctx->u.abreq;
  489. struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
  490. if (!err) {
  491. ablkcipher_request_set_callback(abreq, aead_request_flags(req),
  492. gcm_decrypt_done, req);
  493. crypto_gcm_init_crypt(abreq, req, gctx->cryptlen);
  494. err = crypto_ablkcipher_decrypt(abreq);
  495. if (err == -EINPROGRESS || err == -EBUSY)
  496. return;
  497. else if (!err)
  498. err = crypto_gcm_verify(req, pctx);
  499. }
  500. aead_request_complete(req, err);
  501. }
  502. static int crypto_gcm_decrypt(struct aead_request *req)
  503. {
  504. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  505. struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
  506. struct ablkcipher_request *abreq = &pctx->u.abreq;
  507. struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
  508. unsigned int authsize = crypto_aead_authsize(aead);
  509. unsigned int cryptlen = req->cryptlen;
  510. int err;
  511. if (cryptlen < authsize)
  512. return -EINVAL;
  513. cryptlen -= authsize;
  514. gctx->src = req->src;
  515. gctx->cryptlen = cryptlen;
  516. gctx->complete = gcm_dec_hash_done;
  517. err = gcm_hash(req, pctx);
  518. if (err)
  519. return err;
  520. ablkcipher_request_set_callback(abreq, aead_request_flags(req),
  521. gcm_decrypt_done, req);
  522. crypto_gcm_init_crypt(abreq, req, cryptlen);
  523. err = crypto_ablkcipher_decrypt(abreq);
  524. if (err)
  525. return err;
  526. return crypto_gcm_verify(req, pctx);
  527. }
  528. static int crypto_gcm_init_tfm(struct crypto_tfm *tfm)
  529. {
  530. struct crypto_instance *inst = (void *)tfm->__crt_alg;
  531. struct gcm_instance_ctx *ictx = crypto_instance_ctx(inst);
  532. struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
  533. struct crypto_ablkcipher *ctr;
  534. struct crypto_ahash *ghash;
  535. unsigned long align;
  536. int err;
  537. ghash = crypto_spawn_ahash(&ictx->ghash);
  538. if (IS_ERR(ghash))
  539. return PTR_ERR(ghash);
  540. ctr = crypto_spawn_skcipher(&ictx->ctr);
  541. err = PTR_ERR(ctr);
  542. if (IS_ERR(ctr))
  543. goto err_free_hash;
  544. ctx->ctr = ctr;
  545. ctx->ghash = ghash;
  546. align = crypto_tfm_alg_alignmask(tfm);
  547. align &= ~(crypto_tfm_ctx_alignment() - 1);
  548. tfm->crt_aead.reqsize = align +
  549. offsetof(struct crypto_gcm_req_priv_ctx, u) +
  550. max(sizeof(struct ablkcipher_request) +
  551. crypto_ablkcipher_reqsize(ctr),
  552. sizeof(struct ahash_request) +
  553. crypto_ahash_reqsize(ghash));
  554. return 0;
  555. err_free_hash:
  556. crypto_free_ahash(ghash);
  557. return err;
  558. }
  559. static void crypto_gcm_exit_tfm(struct crypto_tfm *tfm)
  560. {
  561. struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
  562. crypto_free_ahash(ctx->ghash);
  563. crypto_free_ablkcipher(ctx->ctr);
  564. }
  565. static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb,
  566. const char *full_name,
  567. const char *ctr_name,
  568. const char *ghash_name)
  569. {
  570. struct crypto_attr_type *algt;
  571. struct crypto_instance *inst;
  572. struct crypto_alg *ctr;
  573. struct crypto_alg *ghash_alg;
  574. struct ahash_alg *ghash_ahash_alg;
  575. struct gcm_instance_ctx *ctx;
  576. int err;
  577. algt = crypto_get_attr_type(tb);
  578. err = PTR_ERR(algt);
  579. if (IS_ERR(algt))
  580. return ERR_PTR(err);
  581. if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
  582. return ERR_PTR(-EINVAL);
  583. ghash_alg = crypto_find_alg(ghash_name, &crypto_ahash_type,
  584. CRYPTO_ALG_TYPE_HASH,
  585. CRYPTO_ALG_TYPE_AHASH_MASK);
  586. err = PTR_ERR(ghash_alg);
  587. if (IS_ERR(ghash_alg))
  588. return ERR_PTR(err);
  589. err = -ENOMEM;
  590. inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
  591. if (!inst)
  592. goto out_put_ghash;
  593. ctx = crypto_instance_ctx(inst);
  594. ghash_ahash_alg = container_of(ghash_alg, struct ahash_alg, halg.base);
  595. err = crypto_init_ahash_spawn(&ctx->ghash, &ghash_ahash_alg->halg,
  596. inst);
  597. if (err)
  598. goto err_free_inst;
  599. crypto_set_skcipher_spawn(&ctx->ctr, inst);
  600. err = crypto_grab_skcipher(&ctx->ctr, ctr_name, 0,
  601. crypto_requires_sync(algt->type,
  602. algt->mask));
  603. if (err)
  604. goto err_drop_ghash;
  605. ctr = crypto_skcipher_spawn_alg(&ctx->ctr);
  606. /* We only support 16-byte blocks. */
  607. if (ctr->cra_ablkcipher.ivsize != 16)
  608. goto out_put_ctr;
  609. /* Not a stream cipher? */
  610. err = -EINVAL;
  611. if (ctr->cra_blocksize != 1)
  612. goto out_put_ctr;
  613. err = -ENAMETOOLONG;
  614. if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  615. "gcm_base(%s,%s)", ctr->cra_driver_name,
  616. ghash_alg->cra_driver_name) >=
  617. CRYPTO_MAX_ALG_NAME)
  618. goto out_put_ctr;
  619. memcpy(inst->alg.cra_name, full_name, CRYPTO_MAX_ALG_NAME);
  620. inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
  621. inst->alg.cra_flags |= ctr->cra_flags & CRYPTO_ALG_ASYNC;
  622. inst->alg.cra_priority = ctr->cra_priority;
  623. inst->alg.cra_blocksize = 1;
  624. inst->alg.cra_alignmask = ctr->cra_alignmask | (__alignof__(u64) - 1);
  625. inst->alg.cra_type = &crypto_aead_type;
  626. inst->alg.cra_aead.ivsize = 16;
  627. inst->alg.cra_aead.maxauthsize = 16;
  628. inst->alg.cra_ctxsize = sizeof(struct crypto_gcm_ctx);
  629. inst->alg.cra_init = crypto_gcm_init_tfm;
  630. inst->alg.cra_exit = crypto_gcm_exit_tfm;
  631. inst->alg.cra_aead.setkey = crypto_gcm_setkey;
  632. inst->alg.cra_aead.setauthsize = crypto_gcm_setauthsize;
  633. inst->alg.cra_aead.encrypt = crypto_gcm_encrypt;
  634. inst->alg.cra_aead.decrypt = crypto_gcm_decrypt;
  635. out:
  636. crypto_mod_put(ghash_alg);
  637. return inst;
  638. out_put_ctr:
  639. crypto_drop_skcipher(&ctx->ctr);
  640. err_drop_ghash:
  641. crypto_drop_ahash(&ctx->ghash);
  642. err_free_inst:
  643. kfree(inst);
  644. out_put_ghash:
  645. inst = ERR_PTR(err);
  646. goto out;
  647. }
  648. static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb)
  649. {
  650. int err;
  651. const char *cipher_name;
  652. char ctr_name[CRYPTO_MAX_ALG_NAME];
  653. char full_name[CRYPTO_MAX_ALG_NAME];
  654. cipher_name = crypto_attr_alg_name(tb[1]);
  655. err = PTR_ERR(cipher_name);
  656. if (IS_ERR(cipher_name))
  657. return ERR_PTR(err);
  658. if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", cipher_name) >=
  659. CRYPTO_MAX_ALG_NAME)
  660. return ERR_PTR(-ENAMETOOLONG);
  661. if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm(%s)", cipher_name) >=
  662. CRYPTO_MAX_ALG_NAME)
  663. return ERR_PTR(-ENAMETOOLONG);
  664. return crypto_gcm_alloc_common(tb, full_name, ctr_name, "ghash");
  665. }
  666. static void crypto_gcm_free(struct crypto_instance *inst)
  667. {
  668. struct gcm_instance_ctx *ctx = crypto_instance_ctx(inst);
  669. crypto_drop_skcipher(&ctx->ctr);
  670. crypto_drop_ahash(&ctx->ghash);
  671. kfree(inst);
  672. }
  673. static struct crypto_template crypto_gcm_tmpl = {
  674. .name = "gcm",
  675. .alloc = crypto_gcm_alloc,
  676. .free = crypto_gcm_free,
  677. .module = THIS_MODULE,
  678. };
  679. static struct crypto_instance *crypto_gcm_base_alloc(struct rtattr **tb)
  680. {
  681. int err;
  682. const char *ctr_name;
  683. const char *ghash_name;
  684. char full_name[CRYPTO_MAX_ALG_NAME];
  685. ctr_name = crypto_attr_alg_name(tb[1]);
  686. err = PTR_ERR(ctr_name);
  687. if (IS_ERR(ctr_name))
  688. return ERR_PTR(err);
  689. ghash_name = crypto_attr_alg_name(tb[2]);
  690. err = PTR_ERR(ghash_name);
  691. if (IS_ERR(ghash_name))
  692. return ERR_PTR(err);
  693. if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm_base(%s,%s)",
  694. ctr_name, ghash_name) >= CRYPTO_MAX_ALG_NAME)
  695. return ERR_PTR(-ENAMETOOLONG);
  696. return crypto_gcm_alloc_common(tb, full_name, ctr_name, ghash_name);
  697. }
  698. static struct crypto_template crypto_gcm_base_tmpl = {
  699. .name = "gcm_base",
  700. .alloc = crypto_gcm_base_alloc,
  701. .free = crypto_gcm_free,
  702. .module = THIS_MODULE,
  703. };
  704. static int crypto_rfc4106_setkey(struct crypto_aead *parent, const u8 *key,
  705. unsigned int keylen)
  706. {
  707. struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent);
  708. struct crypto_aead *child = ctx->child;
  709. int err;
  710. if (keylen < 4)
  711. return -EINVAL;
  712. keylen -= 4;
  713. memcpy(ctx->nonce, key + keylen, 4);
  714. crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  715. crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
  716. CRYPTO_TFM_REQ_MASK);
  717. err = crypto_aead_setkey(child, key, keylen);
  718. crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
  719. CRYPTO_TFM_RES_MASK);
  720. return err;
  721. }
  722. static int crypto_rfc4106_setauthsize(struct crypto_aead *parent,
  723. unsigned int authsize)
  724. {
  725. struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent);
  726. switch (authsize) {
  727. case 8:
  728. case 12:
  729. case 16:
  730. break;
  731. default:
  732. return -EINVAL;
  733. }
  734. return crypto_aead_setauthsize(ctx->child, authsize);
  735. }
  736. static struct aead_request *crypto_rfc4106_crypt(struct aead_request *req)
  737. {
  738. struct aead_request *subreq = aead_request_ctx(req);
  739. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  740. struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(aead);
  741. struct crypto_aead *child = ctx->child;
  742. u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
  743. crypto_aead_alignmask(child) + 1);
  744. memcpy(iv, ctx->nonce, 4);
  745. memcpy(iv + 4, req->iv, 8);
  746. aead_request_set_tfm(subreq, child);
  747. aead_request_set_callback(subreq, req->base.flags, req->base.complete,
  748. req->base.data);
  749. aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, iv);
  750. aead_request_set_assoc(subreq, req->assoc, req->assoclen);
  751. return subreq;
  752. }
  753. static int crypto_rfc4106_encrypt(struct aead_request *req)
  754. {
  755. req = crypto_rfc4106_crypt(req);
  756. return crypto_aead_encrypt(req);
  757. }
  758. static int crypto_rfc4106_decrypt(struct aead_request *req)
  759. {
  760. req = crypto_rfc4106_crypt(req);
  761. return crypto_aead_decrypt(req);
  762. }
  763. static int crypto_rfc4106_init_tfm(struct crypto_tfm *tfm)
  764. {
  765. struct crypto_instance *inst = (void *)tfm->__crt_alg;
  766. struct crypto_aead_spawn *spawn = crypto_instance_ctx(inst);
  767. struct crypto_rfc4106_ctx *ctx = crypto_tfm_ctx(tfm);
  768. struct crypto_aead *aead;
  769. unsigned long align;
  770. aead = crypto_spawn_aead(spawn);
  771. if (IS_ERR(aead))
  772. return PTR_ERR(aead);
  773. ctx->child = aead;
  774. align = crypto_aead_alignmask(aead);
  775. align &= ~(crypto_tfm_ctx_alignment() - 1);
  776. tfm->crt_aead.reqsize = sizeof(struct aead_request) +
  777. ALIGN(crypto_aead_reqsize(aead),
  778. crypto_tfm_ctx_alignment()) +
  779. align + 16;
  780. return 0;
  781. }
  782. static void crypto_rfc4106_exit_tfm(struct crypto_tfm *tfm)
  783. {
  784. struct crypto_rfc4106_ctx *ctx = crypto_tfm_ctx(tfm);
  785. crypto_free_aead(ctx->child);
  786. }
  787. static struct crypto_instance *crypto_rfc4106_alloc(struct rtattr **tb)
  788. {
  789. struct crypto_attr_type *algt;
  790. struct crypto_instance *inst;
  791. struct crypto_aead_spawn *spawn;
  792. struct crypto_alg *alg;
  793. const char *ccm_name;
  794. int err;
  795. algt = crypto_get_attr_type(tb);
  796. err = PTR_ERR(algt);
  797. if (IS_ERR(algt))
  798. return ERR_PTR(err);
  799. if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
  800. return ERR_PTR(-EINVAL);
  801. ccm_name = crypto_attr_alg_name(tb[1]);
  802. err = PTR_ERR(ccm_name);
  803. if (IS_ERR(ccm_name))
  804. return ERR_PTR(err);
  805. inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
  806. if (!inst)
  807. return ERR_PTR(-ENOMEM);
  808. spawn = crypto_instance_ctx(inst);
  809. crypto_set_aead_spawn(spawn, inst);
  810. err = crypto_grab_aead(spawn, ccm_name, 0,
  811. crypto_requires_sync(algt->type, algt->mask));
  812. if (err)
  813. goto out_free_inst;
  814. alg = crypto_aead_spawn_alg(spawn);
  815. err = -EINVAL;
  816. /* We only support 16-byte blocks. */
  817. if (alg->cra_aead.ivsize != 16)
  818. goto out_drop_alg;
  819. /* Not a stream cipher? */
  820. if (alg->cra_blocksize != 1)
  821. goto out_drop_alg;
  822. err = -ENAMETOOLONG;
  823. if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
  824. "rfc4106(%s)", alg->cra_name) >= CRYPTO_MAX_ALG_NAME ||
  825. snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  826. "rfc4106(%s)", alg->cra_driver_name) >=
  827. CRYPTO_MAX_ALG_NAME)
  828. goto out_drop_alg;
  829. inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
  830. inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC;
  831. inst->alg.cra_priority = alg->cra_priority;
  832. inst->alg.cra_blocksize = 1;
  833. inst->alg.cra_alignmask = alg->cra_alignmask;
  834. inst->alg.cra_type = &crypto_nivaead_type;
  835. inst->alg.cra_aead.ivsize = 8;
  836. inst->alg.cra_aead.maxauthsize = 16;
  837. inst->alg.cra_ctxsize = sizeof(struct crypto_rfc4106_ctx);
  838. inst->alg.cra_init = crypto_rfc4106_init_tfm;
  839. inst->alg.cra_exit = crypto_rfc4106_exit_tfm;
  840. inst->alg.cra_aead.setkey = crypto_rfc4106_setkey;
  841. inst->alg.cra_aead.setauthsize = crypto_rfc4106_setauthsize;
  842. inst->alg.cra_aead.encrypt = crypto_rfc4106_encrypt;
  843. inst->alg.cra_aead.decrypt = crypto_rfc4106_decrypt;
  844. inst->alg.cra_aead.geniv = "seqiv";
  845. out:
  846. return inst;
  847. out_drop_alg:
  848. crypto_drop_aead(spawn);
  849. out_free_inst:
  850. kfree(inst);
  851. inst = ERR_PTR(err);
  852. goto out;
  853. }
  854. static void crypto_rfc4106_free(struct crypto_instance *inst)
  855. {
  856. crypto_drop_spawn(crypto_instance_ctx(inst));
  857. kfree(inst);
  858. }
  859. static struct crypto_template crypto_rfc4106_tmpl = {
  860. .name = "rfc4106",
  861. .alloc = crypto_rfc4106_alloc,
  862. .free = crypto_rfc4106_free,
  863. .module = THIS_MODULE,
  864. };
  865. static inline struct crypto_rfc4543_req_ctx *crypto_rfc4543_reqctx(
  866. struct aead_request *req)
  867. {
  868. unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
  869. return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
  870. }
  871. static int crypto_rfc4543_setkey(struct crypto_aead *parent, const u8 *key,
  872. unsigned int keylen)
  873. {
  874. struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(parent);
  875. struct crypto_aead *child = ctx->child;
  876. int err;
  877. if (keylen < 4)
  878. return -EINVAL;
  879. keylen -= 4;
  880. memcpy(ctx->nonce, key + keylen, 4);
  881. crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  882. crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
  883. CRYPTO_TFM_REQ_MASK);
  884. err = crypto_aead_setkey(child, key, keylen);
  885. crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
  886. CRYPTO_TFM_RES_MASK);
  887. return err;
  888. }
  889. static int crypto_rfc4543_setauthsize(struct crypto_aead *parent,
  890. unsigned int authsize)
  891. {
  892. struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(parent);
  893. if (authsize != 16)
  894. return -EINVAL;
  895. return crypto_aead_setauthsize(ctx->child, authsize);
  896. }
  897. static struct aead_request *crypto_rfc4543_crypt(struct aead_request *req,
  898. int enc)
  899. {
  900. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  901. struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(aead);
  902. struct crypto_rfc4543_req_ctx *rctx = crypto_rfc4543_reqctx(req);
  903. struct aead_request *subreq = &rctx->subreq;
  904. struct scatterlist *dst = req->dst;
  905. struct scatterlist *cipher = rctx->cipher;
  906. struct scatterlist *payload = rctx->payload;
  907. struct scatterlist *assoc = rctx->assoc;
  908. unsigned int authsize = crypto_aead_authsize(aead);
  909. unsigned int assoclen = req->assoclen;
  910. struct page *dstp;
  911. u8 *vdst;
  912. u8 *iv = PTR_ALIGN((u8 *)(rctx + 1) + crypto_aead_reqsize(ctx->child),
  913. crypto_aead_alignmask(ctx->child) + 1);
  914. memcpy(iv, ctx->nonce, 4);
  915. memcpy(iv + 4, req->iv, 8);
  916. /* construct cipher/plaintext */
  917. if (enc)
  918. memset(rctx->auth_tag, 0, authsize);
  919. else
  920. scatterwalk_map_and_copy(rctx->auth_tag, dst,
  921. req->cryptlen - authsize,
  922. authsize, 0);
  923. sg_init_one(cipher, rctx->auth_tag, authsize);
  924. /* construct the aad */
  925. dstp = sg_page(dst);
  926. vdst = PageHighMem(dstp) ? NULL : page_address(dstp) + dst->offset;
  927. sg_init_table(payload, 2);
  928. sg_set_buf(payload, req->iv, 8);
  929. scatterwalk_crypto_chain(payload, dst, vdst == req->iv + 8, 2);
  930. assoclen += 8 + req->cryptlen - (enc ? 0 : authsize);
  931. if (req->assoc->length == req->assoclen) {
  932. sg_init_table(assoc, 2);
  933. sg_set_page(assoc, sg_page(req->assoc), req->assoc->length,
  934. req->assoc->offset);
  935. } else {
  936. BUG_ON(req->assoclen > sizeof(rctx->assocbuf));
  937. scatterwalk_map_and_copy(rctx->assocbuf, req->assoc, 0,
  938. req->assoclen, 0);
  939. sg_init_table(assoc, 2);
  940. sg_set_buf(assoc, rctx->assocbuf, req->assoclen);
  941. }
  942. scatterwalk_crypto_chain(assoc, payload, 0, 2);
  943. aead_request_set_tfm(subreq, ctx->child);
  944. aead_request_set_callback(subreq, req->base.flags, req->base.complete,
  945. req->base.data);
  946. aead_request_set_crypt(subreq, cipher, cipher, enc ? 0 : authsize, iv);
  947. aead_request_set_assoc(subreq, assoc, assoclen);
  948. return subreq;
  949. }
  950. static int crypto_rfc4543_encrypt(struct aead_request *req)
  951. {
  952. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  953. struct crypto_rfc4543_req_ctx *rctx = crypto_rfc4543_reqctx(req);
  954. struct aead_request *subreq;
  955. int err;
  956. subreq = crypto_rfc4543_crypt(req, 1);
  957. err = crypto_aead_encrypt(subreq);
  958. if (err)
  959. return err;
  960. scatterwalk_map_and_copy(rctx->auth_tag, req->dst, req->cryptlen,
  961. crypto_aead_authsize(aead), 1);
  962. return 0;
  963. }
  964. static int crypto_rfc4543_decrypt(struct aead_request *req)
  965. {
  966. req = crypto_rfc4543_crypt(req, 0);
  967. return crypto_aead_decrypt(req);
  968. }
  969. static int crypto_rfc4543_init_tfm(struct crypto_tfm *tfm)
  970. {
  971. struct crypto_instance *inst = (void *)tfm->__crt_alg;
  972. struct crypto_aead_spawn *spawn = crypto_instance_ctx(inst);
  973. struct crypto_rfc4543_ctx *ctx = crypto_tfm_ctx(tfm);
  974. struct crypto_aead *aead;
  975. unsigned long align;
  976. aead = crypto_spawn_aead(spawn);
  977. if (IS_ERR(aead))
  978. return PTR_ERR(aead);
  979. ctx->child = aead;
  980. align = crypto_aead_alignmask(aead);
  981. align &= ~(crypto_tfm_ctx_alignment() - 1);
  982. tfm->crt_aead.reqsize = sizeof(struct crypto_rfc4543_req_ctx) +
  983. ALIGN(crypto_aead_reqsize(aead),
  984. crypto_tfm_ctx_alignment()) +
  985. align + 16;
  986. return 0;
  987. }
  988. static void crypto_rfc4543_exit_tfm(struct crypto_tfm *tfm)
  989. {
  990. struct crypto_rfc4543_ctx *ctx = crypto_tfm_ctx(tfm);
  991. crypto_free_aead(ctx->child);
  992. }
  993. static struct crypto_instance *crypto_rfc4543_alloc(struct rtattr **tb)
  994. {
  995. struct crypto_attr_type *algt;
  996. struct crypto_instance *inst;
  997. struct crypto_aead_spawn *spawn;
  998. struct crypto_alg *alg;
  999. const char *ccm_name;
  1000. int err;
  1001. algt = crypto_get_attr_type(tb);
  1002. err = PTR_ERR(algt);
  1003. if (IS_ERR(algt))
  1004. return ERR_PTR(err);
  1005. if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
  1006. return ERR_PTR(-EINVAL);
  1007. ccm_name = crypto_attr_alg_name(tb[1]);
  1008. err = PTR_ERR(ccm_name);
  1009. if (IS_ERR(ccm_name))
  1010. return ERR_PTR(err);
  1011. inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
  1012. if (!inst)
  1013. return ERR_PTR(-ENOMEM);
  1014. spawn = crypto_instance_ctx(inst);
  1015. crypto_set_aead_spawn(spawn, inst);
  1016. err = crypto_grab_aead(spawn, ccm_name, 0,
  1017. crypto_requires_sync(algt->type, algt->mask));
  1018. if (err)
  1019. goto out_free_inst;
  1020. alg = crypto_aead_spawn_alg(spawn);
  1021. err = -EINVAL;
  1022. /* We only support 16-byte blocks. */
  1023. if (alg->cra_aead.ivsize != 16)
  1024. goto out_drop_alg;
  1025. /* Not a stream cipher? */
  1026. if (alg->cra_blocksize != 1)
  1027. goto out_drop_alg;
  1028. err = -ENAMETOOLONG;
  1029. if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
  1030. "rfc4543(%s)", alg->cra_name) >= CRYPTO_MAX_ALG_NAME ||
  1031. snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  1032. "rfc4543(%s)", alg->cra_driver_name) >=
  1033. CRYPTO_MAX_ALG_NAME)
  1034. goto out_drop_alg;
  1035. inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
  1036. inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC;
  1037. inst->alg.cra_priority = alg->cra_priority;
  1038. inst->alg.cra_blocksize = 1;
  1039. inst->alg.cra_alignmask = alg->cra_alignmask;
  1040. inst->alg.cra_type = &crypto_nivaead_type;
  1041. inst->alg.cra_aead.ivsize = 8;
  1042. inst->alg.cra_aead.maxauthsize = 16;
  1043. inst->alg.cra_ctxsize = sizeof(struct crypto_rfc4543_ctx);
  1044. inst->alg.cra_init = crypto_rfc4543_init_tfm;
  1045. inst->alg.cra_exit = crypto_rfc4543_exit_tfm;
  1046. inst->alg.cra_aead.setkey = crypto_rfc4543_setkey;
  1047. inst->alg.cra_aead.setauthsize = crypto_rfc4543_setauthsize;
  1048. inst->alg.cra_aead.encrypt = crypto_rfc4543_encrypt;
  1049. inst->alg.cra_aead.decrypt = crypto_rfc4543_decrypt;
  1050. inst->alg.cra_aead.geniv = "seqiv";
  1051. out:
  1052. return inst;
  1053. out_drop_alg:
  1054. crypto_drop_aead(spawn);
  1055. out_free_inst:
  1056. kfree(inst);
  1057. inst = ERR_PTR(err);
  1058. goto out;
  1059. }
  1060. static void crypto_rfc4543_free(struct crypto_instance *inst)
  1061. {
  1062. crypto_drop_spawn(crypto_instance_ctx(inst));
  1063. kfree(inst);
  1064. }
  1065. static struct crypto_template crypto_rfc4543_tmpl = {
  1066. .name = "rfc4543",
  1067. .alloc = crypto_rfc4543_alloc,
  1068. .free = crypto_rfc4543_free,
  1069. .module = THIS_MODULE,
  1070. };
  1071. static int __init crypto_gcm_module_init(void)
  1072. {
  1073. int err;
  1074. gcm_zeroes = kzalloc(16, GFP_KERNEL);
  1075. if (!gcm_zeroes)
  1076. return -ENOMEM;
  1077. err = crypto_register_template(&crypto_gcm_base_tmpl);
  1078. if (err)
  1079. goto out;
  1080. err = crypto_register_template(&crypto_gcm_tmpl);
  1081. if (err)
  1082. goto out_undo_base;
  1083. err = crypto_register_template(&crypto_rfc4106_tmpl);
  1084. if (err)
  1085. goto out_undo_gcm;
  1086. err = crypto_register_template(&crypto_rfc4543_tmpl);
  1087. if (err)
  1088. goto out_undo_rfc4106;
  1089. return 0;
  1090. out_undo_rfc4106:
  1091. crypto_unregister_template(&crypto_rfc4106_tmpl);
  1092. out_undo_gcm:
  1093. crypto_unregister_template(&crypto_gcm_tmpl);
  1094. out_undo_base:
  1095. crypto_unregister_template(&crypto_gcm_base_tmpl);
  1096. out:
  1097. kfree(gcm_zeroes);
  1098. return err;
  1099. }
  1100. static void __exit crypto_gcm_module_exit(void)
  1101. {
  1102. kfree(gcm_zeroes);
  1103. crypto_unregister_template(&crypto_rfc4543_tmpl);
  1104. crypto_unregister_template(&crypto_rfc4106_tmpl);
  1105. crypto_unregister_template(&crypto_gcm_tmpl);
  1106. crypto_unregister_template(&crypto_gcm_base_tmpl);
  1107. }
  1108. module_init(crypto_gcm_module_init);
  1109. module_exit(crypto_gcm_module_exit);
  1110. MODULE_LICENSE("GPL");
  1111. MODULE_DESCRIPTION("Galois/Counter Mode");
  1112. MODULE_AUTHOR("Mikko Herranen <mh1@iki.fi>");
  1113. MODULE_ALIAS("gcm_base");
  1114. MODULE_ALIAS("rfc4106");
  1115. MODULE_ALIAS("rfc4543");