aes-glue.c 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462
  1. /*
  2. * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
  3. *
  4. * Copyright (C) 2013 Linaro Ltd <ard.biesheuvel@linaro.org>
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License version 2 as
  8. * published by the Free Software Foundation.
  9. */
  10. #include <asm/neon.h>
  11. #include <asm/hwcap.h>
  12. #include <crypto/aes.h>
  13. #include <crypto/ablk_helper.h>
  14. #include <crypto/algapi.h>
  15. #include <linux/module.h>
  16. #include <linux/cpufeature.h>
  17. #include <crypto/xts.h>
  18. #include "aes-ce-setkey.h"
  19. #ifdef USE_V8_CRYPTO_EXTENSIONS
  20. #define MODE "ce"
  21. #define PRIO 300
  22. #define aes_setkey ce_aes_setkey
  23. #define aes_expandkey ce_aes_expandkey
  24. #define aes_ecb_encrypt ce_aes_ecb_encrypt
  25. #define aes_ecb_decrypt ce_aes_ecb_decrypt
  26. #define aes_cbc_encrypt ce_aes_cbc_encrypt
  27. #define aes_cbc_decrypt ce_aes_cbc_decrypt
  28. #define aes_ctr_encrypt ce_aes_ctr_encrypt
  29. #define aes_xts_encrypt ce_aes_xts_encrypt
  30. #define aes_xts_decrypt ce_aes_xts_decrypt
  31. MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
  32. #else
  33. #define MODE "neon"
  34. #define PRIO 200
  35. #define aes_setkey crypto_aes_set_key
  36. #define aes_expandkey crypto_aes_expand_key
  37. #define aes_ecb_encrypt neon_aes_ecb_encrypt
  38. #define aes_ecb_decrypt neon_aes_ecb_decrypt
  39. #define aes_cbc_encrypt neon_aes_cbc_encrypt
  40. #define aes_cbc_decrypt neon_aes_cbc_decrypt
  41. #define aes_ctr_encrypt neon_aes_ctr_encrypt
  42. #define aes_xts_encrypt neon_aes_xts_encrypt
  43. #define aes_xts_decrypt neon_aes_xts_decrypt
  44. MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 NEON");
  45. MODULE_ALIAS_CRYPTO("ecb(aes)");
  46. MODULE_ALIAS_CRYPTO("cbc(aes)");
  47. MODULE_ALIAS_CRYPTO("ctr(aes)");
  48. MODULE_ALIAS_CRYPTO("xts(aes)");
  49. #endif
  50. MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
  51. MODULE_LICENSE("GPL v2");
  52. /* defined in aes-modes.S */
  53. asmlinkage void aes_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
  54. int rounds, int blocks, int first);
  55. asmlinkage void aes_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[],
  56. int rounds, int blocks, int first);
  57. asmlinkage void aes_cbc_encrypt(u8 out[], u8 const in[], u8 const rk[],
  58. int rounds, int blocks, u8 iv[], int first);
  59. asmlinkage void aes_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
  60. int rounds, int blocks, u8 iv[], int first);
  61. asmlinkage void aes_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
  62. int rounds, int blocks, u8 ctr[], int first);
  63. asmlinkage void aes_xts_encrypt(u8 out[], u8 const in[], u8 const rk1[],
  64. int rounds, int blocks, u8 const rk2[], u8 iv[],
  65. int first);
  66. asmlinkage void aes_xts_decrypt(u8 out[], u8 const in[], u8 const rk1[],
  67. int rounds, int blocks, u8 const rk2[], u8 iv[],
  68. int first);
  69. struct crypto_aes_xts_ctx {
  70. struct crypto_aes_ctx key1;
  71. struct crypto_aes_ctx __aligned(8) key2;
  72. };
  73. static int xts_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  74. unsigned int key_len)
  75. {
  76. struct crypto_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
  77. int ret;
  78. ret = xts_check_key(tfm, in_key, key_len);
  79. if (ret)
  80. return ret;
  81. ret = aes_expandkey(&ctx->key1, in_key, key_len / 2);
  82. if (!ret)
  83. ret = aes_expandkey(&ctx->key2, &in_key[key_len / 2],
  84. key_len / 2);
  85. if (!ret)
  86. return 0;
  87. tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  88. return -EINVAL;
  89. }
  90. static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  91. struct scatterlist *src, unsigned int nbytes)
  92. {
  93. struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  94. int err, first, rounds = 6 + ctx->key_length / 4;
  95. struct blkcipher_walk walk;
  96. unsigned int blocks;
  97. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  98. blkcipher_walk_init(&walk, dst, src, nbytes);
  99. err = blkcipher_walk_virt(desc, &walk);
  100. kernel_neon_begin();
  101. for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
  102. aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  103. (u8 *)ctx->key_enc, rounds, blocks, first);
  104. err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
  105. }
  106. kernel_neon_end();
  107. return err;
  108. }
  109. static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  110. struct scatterlist *src, unsigned int nbytes)
  111. {
  112. struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  113. int err, first, rounds = 6 + ctx->key_length / 4;
  114. struct blkcipher_walk walk;
  115. unsigned int blocks;
  116. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  117. blkcipher_walk_init(&walk, dst, src, nbytes);
  118. err = blkcipher_walk_virt(desc, &walk);
  119. kernel_neon_begin();
  120. for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
  121. aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  122. (u8 *)ctx->key_dec, rounds, blocks, first);
  123. err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
  124. }
  125. kernel_neon_end();
  126. return err;
  127. }
  128. static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  129. struct scatterlist *src, unsigned int nbytes)
  130. {
  131. struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  132. int err, first, rounds = 6 + ctx->key_length / 4;
  133. struct blkcipher_walk walk;
  134. unsigned int blocks;
  135. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  136. blkcipher_walk_init(&walk, dst, src, nbytes);
  137. err = blkcipher_walk_virt(desc, &walk);
  138. kernel_neon_begin();
  139. for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
  140. aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  141. (u8 *)ctx->key_enc, rounds, blocks, walk.iv,
  142. first);
  143. err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
  144. }
  145. kernel_neon_end();
  146. return err;
  147. }
  148. static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  149. struct scatterlist *src, unsigned int nbytes)
  150. {
  151. struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  152. int err, first, rounds = 6 + ctx->key_length / 4;
  153. struct blkcipher_walk walk;
  154. unsigned int blocks;
  155. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  156. blkcipher_walk_init(&walk, dst, src, nbytes);
  157. err = blkcipher_walk_virt(desc, &walk);
  158. kernel_neon_begin();
  159. for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
  160. aes_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  161. (u8 *)ctx->key_dec, rounds, blocks, walk.iv,
  162. first);
  163. err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
  164. }
  165. kernel_neon_end();
  166. return err;
  167. }
  168. static int ctr_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  169. struct scatterlist *src, unsigned int nbytes)
  170. {
  171. struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  172. int err, first, rounds = 6 + ctx->key_length / 4;
  173. struct blkcipher_walk walk;
  174. int blocks;
  175. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  176. blkcipher_walk_init(&walk, dst, src, nbytes);
  177. err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
  178. first = 1;
  179. kernel_neon_begin();
  180. while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
  181. aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  182. (u8 *)ctx->key_enc, rounds, blocks, walk.iv,
  183. first);
  184. first = 0;
  185. nbytes -= blocks * AES_BLOCK_SIZE;
  186. if (nbytes && nbytes == walk.nbytes % AES_BLOCK_SIZE)
  187. break;
  188. err = blkcipher_walk_done(desc, &walk,
  189. walk.nbytes % AES_BLOCK_SIZE);
  190. }
  191. if (walk.nbytes % AES_BLOCK_SIZE) {
  192. u8 *tdst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE;
  193. u8 *tsrc = walk.src.virt.addr + blocks * AES_BLOCK_SIZE;
  194. u8 __aligned(8) tail[AES_BLOCK_SIZE];
  195. /*
  196. * Minimum alignment is 8 bytes, so if nbytes is <= 8, we need
  197. * to tell aes_ctr_encrypt() to only read half a block.
  198. */
  199. blocks = (nbytes <= 8) ? -1 : 1;
  200. aes_ctr_encrypt(tail, tsrc, (u8 *)ctx->key_enc, rounds,
  201. blocks, walk.iv, first);
  202. memcpy(tdst, tail, nbytes);
  203. err = blkcipher_walk_done(desc, &walk, 0);
  204. }
  205. kernel_neon_end();
  206. return err;
  207. }
  208. static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  209. struct scatterlist *src, unsigned int nbytes)
  210. {
  211. struct crypto_aes_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  212. int err, first, rounds = 6 + ctx->key1.key_length / 4;
  213. struct blkcipher_walk walk;
  214. unsigned int blocks;
  215. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  216. blkcipher_walk_init(&walk, dst, src, nbytes);
  217. err = blkcipher_walk_virt(desc, &walk);
  218. kernel_neon_begin();
  219. for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
  220. aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  221. (u8 *)ctx->key1.key_enc, rounds, blocks,
  222. (u8 *)ctx->key2.key_enc, walk.iv, first);
  223. err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
  224. }
  225. kernel_neon_end();
  226. return err;
  227. }
  228. static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  229. struct scatterlist *src, unsigned int nbytes)
  230. {
  231. struct crypto_aes_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  232. int err, first, rounds = 6 + ctx->key1.key_length / 4;
  233. struct blkcipher_walk walk;
  234. unsigned int blocks;
  235. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  236. blkcipher_walk_init(&walk, dst, src, nbytes);
  237. err = blkcipher_walk_virt(desc, &walk);
  238. kernel_neon_begin();
  239. for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
  240. aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  241. (u8 *)ctx->key1.key_dec, rounds, blocks,
  242. (u8 *)ctx->key2.key_enc, walk.iv, first);
  243. err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
  244. }
  245. kernel_neon_end();
  246. return err;
  247. }
  248. static struct crypto_alg aes_algs[] = { {
  249. .cra_name = "__ecb-aes-" MODE,
  250. .cra_driver_name = "__driver-ecb-aes-" MODE,
  251. .cra_priority = 0,
  252. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  253. CRYPTO_ALG_INTERNAL,
  254. .cra_blocksize = AES_BLOCK_SIZE,
  255. .cra_ctxsize = sizeof(struct crypto_aes_ctx),
  256. .cra_alignmask = 7,
  257. .cra_type = &crypto_blkcipher_type,
  258. .cra_module = THIS_MODULE,
  259. .cra_blkcipher = {
  260. .min_keysize = AES_MIN_KEY_SIZE,
  261. .max_keysize = AES_MAX_KEY_SIZE,
  262. .ivsize = 0,
  263. .setkey = aes_setkey,
  264. .encrypt = ecb_encrypt,
  265. .decrypt = ecb_decrypt,
  266. },
  267. }, {
  268. .cra_name = "__cbc-aes-" MODE,
  269. .cra_driver_name = "__driver-cbc-aes-" MODE,
  270. .cra_priority = 0,
  271. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  272. CRYPTO_ALG_INTERNAL,
  273. .cra_blocksize = AES_BLOCK_SIZE,
  274. .cra_ctxsize = sizeof(struct crypto_aes_ctx),
  275. .cra_alignmask = 7,
  276. .cra_type = &crypto_blkcipher_type,
  277. .cra_module = THIS_MODULE,
  278. .cra_blkcipher = {
  279. .min_keysize = AES_MIN_KEY_SIZE,
  280. .max_keysize = AES_MAX_KEY_SIZE,
  281. .ivsize = AES_BLOCK_SIZE,
  282. .setkey = aes_setkey,
  283. .encrypt = cbc_encrypt,
  284. .decrypt = cbc_decrypt,
  285. },
  286. }, {
  287. .cra_name = "__ctr-aes-" MODE,
  288. .cra_driver_name = "__driver-ctr-aes-" MODE,
  289. .cra_priority = 0,
  290. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  291. CRYPTO_ALG_INTERNAL,
  292. .cra_blocksize = 1,
  293. .cra_ctxsize = sizeof(struct crypto_aes_ctx),
  294. .cra_alignmask = 7,
  295. .cra_type = &crypto_blkcipher_type,
  296. .cra_module = THIS_MODULE,
  297. .cra_blkcipher = {
  298. .min_keysize = AES_MIN_KEY_SIZE,
  299. .max_keysize = AES_MAX_KEY_SIZE,
  300. .ivsize = AES_BLOCK_SIZE,
  301. .setkey = aes_setkey,
  302. .encrypt = ctr_encrypt,
  303. .decrypt = ctr_encrypt,
  304. },
  305. }, {
  306. .cra_name = "__xts-aes-" MODE,
  307. .cra_driver_name = "__driver-xts-aes-" MODE,
  308. .cra_priority = 0,
  309. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  310. CRYPTO_ALG_INTERNAL,
  311. .cra_blocksize = AES_BLOCK_SIZE,
  312. .cra_ctxsize = sizeof(struct crypto_aes_xts_ctx),
  313. .cra_alignmask = 7,
  314. .cra_type = &crypto_blkcipher_type,
  315. .cra_module = THIS_MODULE,
  316. .cra_blkcipher = {
  317. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  318. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  319. .ivsize = AES_BLOCK_SIZE,
  320. .setkey = xts_set_key,
  321. .encrypt = xts_encrypt,
  322. .decrypt = xts_decrypt,
  323. },
  324. }, {
  325. .cra_name = "ecb(aes)",
  326. .cra_driver_name = "ecb-aes-" MODE,
  327. .cra_priority = PRIO,
  328. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
  329. .cra_blocksize = AES_BLOCK_SIZE,
  330. .cra_ctxsize = sizeof(struct async_helper_ctx),
  331. .cra_alignmask = 7,
  332. .cra_type = &crypto_ablkcipher_type,
  333. .cra_module = THIS_MODULE,
  334. .cra_init = ablk_init,
  335. .cra_exit = ablk_exit,
  336. .cra_ablkcipher = {
  337. .min_keysize = AES_MIN_KEY_SIZE,
  338. .max_keysize = AES_MAX_KEY_SIZE,
  339. .ivsize = 0,
  340. .setkey = ablk_set_key,
  341. .encrypt = ablk_encrypt,
  342. .decrypt = ablk_decrypt,
  343. }
  344. }, {
  345. .cra_name = "cbc(aes)",
  346. .cra_driver_name = "cbc-aes-" MODE,
  347. .cra_priority = PRIO,
  348. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
  349. .cra_blocksize = AES_BLOCK_SIZE,
  350. .cra_ctxsize = sizeof(struct async_helper_ctx),
  351. .cra_alignmask = 7,
  352. .cra_type = &crypto_ablkcipher_type,
  353. .cra_module = THIS_MODULE,
  354. .cra_init = ablk_init,
  355. .cra_exit = ablk_exit,
  356. .cra_ablkcipher = {
  357. .min_keysize = AES_MIN_KEY_SIZE,
  358. .max_keysize = AES_MAX_KEY_SIZE,
  359. .ivsize = AES_BLOCK_SIZE,
  360. .setkey = ablk_set_key,
  361. .encrypt = ablk_encrypt,
  362. .decrypt = ablk_decrypt,
  363. }
  364. }, {
  365. .cra_name = "ctr(aes)",
  366. .cra_driver_name = "ctr-aes-" MODE,
  367. .cra_priority = PRIO,
  368. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
  369. .cra_blocksize = 1,
  370. .cra_ctxsize = sizeof(struct async_helper_ctx),
  371. .cra_alignmask = 7,
  372. .cra_type = &crypto_ablkcipher_type,
  373. .cra_module = THIS_MODULE,
  374. .cra_init = ablk_init,
  375. .cra_exit = ablk_exit,
  376. .cra_ablkcipher = {
  377. .min_keysize = AES_MIN_KEY_SIZE,
  378. .max_keysize = AES_MAX_KEY_SIZE,
  379. .ivsize = AES_BLOCK_SIZE,
  380. .setkey = ablk_set_key,
  381. .encrypt = ablk_encrypt,
  382. .decrypt = ablk_decrypt,
  383. }
  384. }, {
  385. .cra_name = "xts(aes)",
  386. .cra_driver_name = "xts-aes-" MODE,
  387. .cra_priority = PRIO,
  388. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
  389. .cra_blocksize = AES_BLOCK_SIZE,
  390. .cra_ctxsize = sizeof(struct async_helper_ctx),
  391. .cra_alignmask = 7,
  392. .cra_type = &crypto_ablkcipher_type,
  393. .cra_module = THIS_MODULE,
  394. .cra_init = ablk_init,
  395. .cra_exit = ablk_exit,
  396. .cra_ablkcipher = {
  397. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  398. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  399. .ivsize = AES_BLOCK_SIZE,
  400. .setkey = ablk_set_key,
  401. .encrypt = ablk_encrypt,
  402. .decrypt = ablk_decrypt,
  403. }
  404. } };
  405. static int __init aes_init(void)
  406. {
  407. return crypto_register_algs(aes_algs, ARRAY_SIZE(aes_algs));
  408. }
  409. static void __exit aes_exit(void)
  410. {
  411. crypto_unregister_algs(aes_algs, ARRAY_SIZE(aes_algs));
  412. }
  413. #ifdef USE_V8_CRYPTO_EXTENSIONS
  414. module_cpu_feature_match(AES, aes_init);
  415. #else
  416. module_init(aes_init);
  417. #endif
  418. module_exit(aes_exit);