ctr.c 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425
  1. /*
  2. * CTR: Counter mode
  3. *
  4. * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
  5. *
  6. * This program is free software; you can redistribute it and/or modify it
  7. * under the terms of the GNU General Public License as published by the Free
  8. * Software Foundation; either version 2 of the License, or (at your option)
  9. * any later version.
  10. *
  11. */
  12. #include <crypto/algapi.h>
  13. #include <crypto/ctr.h>
  14. #include <linux/err.h>
  15. #include <linux/init.h>
  16. #include <linux/kernel.h>
  17. #include <linux/module.h>
  18. #include <linux/random.h>
  19. #include <linux/scatterlist.h>
  20. #include <linux/slab.h>
  21. struct crypto_ctr_ctx {
  22. struct crypto_cipher *child;
  23. };
  24. struct crypto_rfc3686_ctx {
  25. struct crypto_blkcipher *child;
  26. u8 nonce[CTR_RFC3686_NONCE_SIZE];
  27. };
  28. static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key,
  29. unsigned int keylen)
  30. {
  31. struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(parent);
  32. struct crypto_cipher *child = ctx->child;
  33. int err;
  34. crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  35. crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
  36. CRYPTO_TFM_REQ_MASK);
  37. err = crypto_cipher_setkey(child, key, keylen);
  38. crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) &
  39. CRYPTO_TFM_RES_MASK);
  40. return err;
  41. }
  42. static void crypto_ctr_crypt_final(struct blkcipher_walk *walk,
  43. struct crypto_cipher *tfm)
  44. {
  45. unsigned int bsize = crypto_cipher_blocksize(tfm);
  46. unsigned long alignmask = crypto_cipher_alignmask(tfm);
  47. u8 *ctrblk = walk->iv;
  48. u8 tmp[bsize + alignmask];
  49. u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
  50. u8 *src = walk->src.virt.addr;
  51. u8 *dst = walk->dst.virt.addr;
  52. unsigned int nbytes = walk->nbytes;
  53. crypto_cipher_encrypt_one(tfm, keystream, ctrblk);
  54. crypto_xor(keystream, src, nbytes);
  55. memcpy(dst, keystream, nbytes);
  56. crypto_inc(ctrblk, bsize);
  57. }
  58. static int crypto_ctr_crypt_segment(struct blkcipher_walk *walk,
  59. struct crypto_cipher *tfm)
  60. {
  61. void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
  62. crypto_cipher_alg(tfm)->cia_encrypt;
  63. unsigned int bsize = crypto_cipher_blocksize(tfm);
  64. u8 *ctrblk = walk->iv;
  65. u8 *src = walk->src.virt.addr;
  66. u8 *dst = walk->dst.virt.addr;
  67. unsigned int nbytes = walk->nbytes;
  68. do {
  69. /* create keystream */
  70. fn(crypto_cipher_tfm(tfm), dst, ctrblk);
  71. crypto_xor(dst, src, bsize);
  72. /* increment counter in counterblock */
  73. crypto_inc(ctrblk, bsize);
  74. src += bsize;
  75. dst += bsize;
  76. } while ((nbytes -= bsize) >= bsize);
  77. return nbytes;
  78. }
  79. static int crypto_ctr_crypt_inplace(struct blkcipher_walk *walk,
  80. struct crypto_cipher *tfm)
  81. {
  82. void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
  83. crypto_cipher_alg(tfm)->cia_encrypt;
  84. unsigned int bsize = crypto_cipher_blocksize(tfm);
  85. unsigned long alignmask = crypto_cipher_alignmask(tfm);
  86. unsigned int nbytes = walk->nbytes;
  87. u8 *ctrblk = walk->iv;
  88. u8 *src = walk->src.virt.addr;
  89. u8 tmp[bsize + alignmask];
  90. u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
  91. do {
  92. /* create keystream */
  93. fn(crypto_cipher_tfm(tfm), keystream, ctrblk);
  94. crypto_xor(src, keystream, bsize);
  95. /* increment counter in counterblock */
  96. crypto_inc(ctrblk, bsize);
  97. src += bsize;
  98. } while ((nbytes -= bsize) >= bsize);
  99. return nbytes;
  100. }
  101. static int crypto_ctr_crypt(struct blkcipher_desc *desc,
  102. struct scatterlist *dst, struct scatterlist *src,
  103. unsigned int nbytes)
  104. {
  105. struct blkcipher_walk walk;
  106. struct crypto_blkcipher *tfm = desc->tfm;
  107. struct crypto_ctr_ctx *ctx = crypto_blkcipher_ctx(tfm);
  108. struct crypto_cipher *child = ctx->child;
  109. unsigned int bsize = crypto_cipher_blocksize(child);
  110. int err;
  111. blkcipher_walk_init(&walk, dst, src, nbytes);
  112. err = blkcipher_walk_virt_block(desc, &walk, bsize);
  113. while (walk.nbytes >= bsize) {
  114. if (walk.src.virt.addr == walk.dst.virt.addr)
  115. nbytes = crypto_ctr_crypt_inplace(&walk, child);
  116. else
  117. nbytes = crypto_ctr_crypt_segment(&walk, child);
  118. err = blkcipher_walk_done(desc, &walk, nbytes);
  119. }
  120. if (walk.nbytes) {
  121. crypto_ctr_crypt_final(&walk, child);
  122. err = blkcipher_walk_done(desc, &walk, 0);
  123. }
  124. return err;
  125. }
  126. static int crypto_ctr_init_tfm(struct crypto_tfm *tfm)
  127. {
  128. struct crypto_instance *inst = (void *)tfm->__crt_alg;
  129. struct crypto_spawn *spawn = crypto_instance_ctx(inst);
  130. struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
  131. struct crypto_cipher *cipher;
  132. cipher = crypto_spawn_cipher(spawn);
  133. if (IS_ERR(cipher))
  134. return PTR_ERR(cipher);
  135. ctx->child = cipher;
  136. return 0;
  137. }
  138. static void crypto_ctr_exit_tfm(struct crypto_tfm *tfm)
  139. {
  140. struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
  141. crypto_free_cipher(ctx->child);
  142. }
  143. static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
  144. {
  145. struct crypto_instance *inst;
  146. struct crypto_alg *alg;
  147. int err;
  148. err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
  149. if (err)
  150. return ERR_PTR(err);
  151. alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER,
  152. CRYPTO_ALG_TYPE_MASK);
  153. if (IS_ERR(alg))
  154. return ERR_CAST(alg);
  155. /* Block size must be >= 4 bytes. */
  156. err = -EINVAL;
  157. if (alg->cra_blocksize < 4)
  158. goto out_put_alg;
  159. /* If this is false we'd fail the alignment of crypto_inc. */
  160. if (alg->cra_blocksize % 4)
  161. goto out_put_alg;
  162. inst = crypto_alloc_instance("ctr", alg);
  163. if (IS_ERR(inst))
  164. goto out;
  165. inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
  166. inst->alg.cra_priority = alg->cra_priority;
  167. inst->alg.cra_blocksize = 1;
  168. inst->alg.cra_alignmask = alg->cra_alignmask | (__alignof__(u32) - 1);
  169. inst->alg.cra_type = &crypto_blkcipher_type;
  170. inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize;
  171. inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
  172. inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
  173. inst->alg.cra_ctxsize = sizeof(struct crypto_ctr_ctx);
  174. inst->alg.cra_init = crypto_ctr_init_tfm;
  175. inst->alg.cra_exit = crypto_ctr_exit_tfm;
  176. inst->alg.cra_blkcipher.setkey = crypto_ctr_setkey;
  177. inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt;
  178. inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt;
  179. inst->alg.cra_blkcipher.geniv = "chainiv";
  180. out:
  181. crypto_mod_put(alg);
  182. return inst;
  183. out_put_alg:
  184. inst = ERR_PTR(err);
  185. goto out;
  186. }
  187. static void crypto_ctr_free(struct crypto_instance *inst)
  188. {
  189. crypto_drop_spawn(crypto_instance_ctx(inst));
  190. kfree(inst);
  191. }
  192. static struct crypto_template crypto_ctr_tmpl = {
  193. .name = "ctr",
  194. .alloc = crypto_ctr_alloc,
  195. .free = crypto_ctr_free,
  196. .module = THIS_MODULE,
  197. };
  198. static int crypto_rfc3686_setkey(struct crypto_tfm *parent, const u8 *key,
  199. unsigned int keylen)
  200. {
  201. struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(parent);
  202. struct crypto_blkcipher *child = ctx->child;
  203. int err;
  204. /* the nonce is stored in bytes at end of key */
  205. if (keylen < CTR_RFC3686_NONCE_SIZE)
  206. return -EINVAL;
  207. memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE),
  208. CTR_RFC3686_NONCE_SIZE);
  209. keylen -= CTR_RFC3686_NONCE_SIZE;
  210. crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  211. crypto_blkcipher_set_flags(child, crypto_tfm_get_flags(parent) &
  212. CRYPTO_TFM_REQ_MASK);
  213. err = crypto_blkcipher_setkey(child, key, keylen);
  214. crypto_tfm_set_flags(parent, crypto_blkcipher_get_flags(child) &
  215. CRYPTO_TFM_RES_MASK);
  216. return err;
  217. }
  218. static int crypto_rfc3686_crypt(struct blkcipher_desc *desc,
  219. struct scatterlist *dst,
  220. struct scatterlist *src, unsigned int nbytes)
  221. {
  222. struct crypto_blkcipher *tfm = desc->tfm;
  223. struct crypto_rfc3686_ctx *ctx = crypto_blkcipher_ctx(tfm);
  224. struct crypto_blkcipher *child = ctx->child;
  225. unsigned long alignmask = crypto_blkcipher_alignmask(tfm);
  226. u8 ivblk[CTR_RFC3686_BLOCK_SIZE + alignmask];
  227. u8 *iv = PTR_ALIGN(ivblk + 0, alignmask + 1);
  228. u8 *info = desc->info;
  229. int err;
  230. /* set up counter block */
  231. memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
  232. memcpy(iv + CTR_RFC3686_NONCE_SIZE, info, CTR_RFC3686_IV_SIZE);
  233. /* initialize counter portion of counter block */
  234. *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
  235. cpu_to_be32(1);
  236. desc->tfm = child;
  237. desc->info = iv;
  238. err = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
  239. desc->tfm = tfm;
  240. desc->info = info;
  241. return err;
  242. }
  243. static int crypto_rfc3686_init_tfm(struct crypto_tfm *tfm)
  244. {
  245. struct crypto_instance *inst = (void *)tfm->__crt_alg;
  246. struct crypto_spawn *spawn = crypto_instance_ctx(inst);
  247. struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(tfm);
  248. struct crypto_blkcipher *cipher;
  249. cipher = crypto_spawn_blkcipher(spawn);
  250. if (IS_ERR(cipher))
  251. return PTR_ERR(cipher);
  252. ctx->child = cipher;
  253. return 0;
  254. }
  255. static void crypto_rfc3686_exit_tfm(struct crypto_tfm *tfm)
  256. {
  257. struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(tfm);
  258. crypto_free_blkcipher(ctx->child);
  259. }
  260. static struct crypto_instance *crypto_rfc3686_alloc(struct rtattr **tb)
  261. {
  262. struct crypto_instance *inst;
  263. struct crypto_alg *alg;
  264. int err;
  265. err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
  266. if (err)
  267. return ERR_PTR(err);
  268. alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_BLKCIPHER,
  269. CRYPTO_ALG_TYPE_MASK);
  270. err = PTR_ERR(alg);
  271. if (IS_ERR(alg))
  272. return ERR_PTR(err);
  273. /* We only support 16-byte blocks. */
  274. err = -EINVAL;
  275. if (alg->cra_blkcipher.ivsize != CTR_RFC3686_BLOCK_SIZE)
  276. goto out_put_alg;
  277. /* Not a stream cipher? */
  278. if (alg->cra_blocksize != 1)
  279. goto out_put_alg;
  280. inst = crypto_alloc_instance("rfc3686", alg);
  281. if (IS_ERR(inst))
  282. goto out;
  283. inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
  284. inst->alg.cra_priority = alg->cra_priority;
  285. inst->alg.cra_blocksize = 1;
  286. inst->alg.cra_alignmask = alg->cra_alignmask;
  287. inst->alg.cra_type = &crypto_blkcipher_type;
  288. inst->alg.cra_blkcipher.ivsize = CTR_RFC3686_IV_SIZE;
  289. inst->alg.cra_blkcipher.min_keysize = alg->cra_blkcipher.min_keysize
  290. + CTR_RFC3686_NONCE_SIZE;
  291. inst->alg.cra_blkcipher.max_keysize = alg->cra_blkcipher.max_keysize
  292. + CTR_RFC3686_NONCE_SIZE;
  293. inst->alg.cra_blkcipher.geniv = "seqiv";
  294. inst->alg.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx);
  295. inst->alg.cra_init = crypto_rfc3686_init_tfm;
  296. inst->alg.cra_exit = crypto_rfc3686_exit_tfm;
  297. inst->alg.cra_blkcipher.setkey = crypto_rfc3686_setkey;
  298. inst->alg.cra_blkcipher.encrypt = crypto_rfc3686_crypt;
  299. inst->alg.cra_blkcipher.decrypt = crypto_rfc3686_crypt;
  300. out:
  301. crypto_mod_put(alg);
  302. return inst;
  303. out_put_alg:
  304. inst = ERR_PTR(err);
  305. goto out;
  306. }
  307. static struct crypto_template crypto_rfc3686_tmpl = {
  308. .name = "rfc3686",
  309. .alloc = crypto_rfc3686_alloc,
  310. .free = crypto_ctr_free,
  311. .module = THIS_MODULE,
  312. };
  313. static int __init crypto_ctr_module_init(void)
  314. {
  315. int err;
  316. err = crypto_register_template(&crypto_ctr_tmpl);
  317. if (err)
  318. goto out;
  319. err = crypto_register_template(&crypto_rfc3686_tmpl);
  320. if (err)
  321. goto out_drop_ctr;
  322. out:
  323. return err;
  324. out_drop_ctr:
  325. crypto_unregister_template(&crypto_ctr_tmpl);
  326. goto out;
  327. }
  328. static void __exit crypto_ctr_module_exit(void)
  329. {
  330. crypto_unregister_template(&crypto_rfc3686_tmpl);
  331. crypto_unregister_template(&crypto_ctr_tmpl);
  332. }
  333. module_init(crypto_ctr_module_init);
  334. module_exit(crypto_ctr_module_exit);
  335. MODULE_LICENSE("GPL");
  336. MODULE_DESCRIPTION("CTR Counter block mode");
  337. MODULE_ALIAS("rfc3686");