serpent.c 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588
  1. /*
  2. * Cryptographic API.
  3. *
  4. * Serpent Cipher Algorithm.
  5. *
  6. * Copyright (C) 2002 Dag Arne Osvik <osvik@ii.uib.no>
  7. * 2003 Herbert Valerio Riedel <hvr@gnu.org>
  8. *
  9. * Added tnepres support: Ruben Jesus Garcia Hernandez <ruben@ugr.es>, 18.10.2004
  10. * Based on code by hvr
  11. *
  12. * This program is free software; you can redistribute it and/or modify
  13. * it under the terms of the GNU General Public License as published by
  14. * the Free Software Foundation; either version 2 of the License, or
  15. * (at your option) any later version.
  16. */
  17. #include <linux/init.h>
  18. #include <linux/module.h>
  19. #include <linux/errno.h>
  20. #include <asm/byteorder.h>
  21. #include <linux/crypto.h>
  22. #include <linux/types.h>
  23. /* Key is padded to the maximum of 256 bits before round key generation.
  24. * Any key length <= 256 bits (32 bytes) is allowed by the algorithm.
  25. */
  26. #define SERPENT_MIN_KEY_SIZE 0
  27. #define SERPENT_MAX_KEY_SIZE 32
  28. #define SERPENT_EXPKEY_WORDS 132
  29. #define SERPENT_BLOCK_SIZE 16
  30. #define PHI 0x9e3779b9UL
  31. #define keyiter(a,b,c,d,i,j) \
  32. b ^= d; b ^= c; b ^= a; b ^= PHI ^ i; b = rol32(b,11); k[j] = b;
  33. #define loadkeys(x0,x1,x2,x3,i) \
  34. x0=k[i]; x1=k[i+1]; x2=k[i+2]; x3=k[i+3];
  35. #define storekeys(x0,x1,x2,x3,i) \
  36. k[i]=x0; k[i+1]=x1; k[i+2]=x2; k[i+3]=x3;
  37. #define K(x0,x1,x2,x3,i) \
  38. x3 ^= k[4*(i)+3]; x2 ^= k[4*(i)+2]; \
  39. x1 ^= k[4*(i)+1]; x0 ^= k[4*(i)+0];
  40. #define LK(x0,x1,x2,x3,x4,i) \
  41. x0=rol32(x0,13);\
  42. x2=rol32(x2,3); x1 ^= x0; x4 = x0 << 3; \
  43. x3 ^= x2; x1 ^= x2; \
  44. x1=rol32(x1,1); x3 ^= x4; \
  45. x3=rol32(x3,7); x4 = x1; \
  46. x0 ^= x1; x4 <<= 7; x2 ^= x3; \
  47. x0 ^= x3; x2 ^= x4; x3 ^= k[4*i+3]; \
  48. x1 ^= k[4*i+1]; x0=rol32(x0,5); x2=rol32(x2,22);\
  49. x0 ^= k[4*i+0]; x2 ^= k[4*i+2];
  50. #define KL(x0,x1,x2,x3,x4,i) \
  51. x0 ^= k[4*i+0]; x1 ^= k[4*i+1]; x2 ^= k[4*i+2]; \
  52. x3 ^= k[4*i+3]; x0=ror32(x0,5); x2=ror32(x2,22);\
  53. x4 = x1; x2 ^= x3; x0 ^= x3; \
  54. x4 <<= 7; x0 ^= x1; x1=ror32(x1,1); \
  55. x2 ^= x4; x3=ror32(x3,7); x4 = x0 << 3; \
  56. x1 ^= x0; x3 ^= x4; x0=ror32(x0,13);\
  57. x1 ^= x2; x3 ^= x2; x2=ror32(x2,3);
  58. #define S0(x0,x1,x2,x3,x4) \
  59. x4 = x3; \
  60. x3 |= x0; x0 ^= x4; x4 ^= x2; \
  61. x4 =~ x4; x3 ^= x1; x1 &= x0; \
  62. x1 ^= x4; x2 ^= x0; x0 ^= x3; \
  63. x4 |= x0; x0 ^= x2; x2 &= x1; \
  64. x3 ^= x2; x1 =~ x1; x2 ^= x4; \
  65. x1 ^= x2;
  66. #define S1(x0,x1,x2,x3,x4) \
  67. x4 = x1; \
  68. x1 ^= x0; x0 ^= x3; x3 =~ x3; \
  69. x4 &= x1; x0 |= x1; x3 ^= x2; \
  70. x0 ^= x3; x1 ^= x3; x3 ^= x4; \
  71. x1 |= x4; x4 ^= x2; x2 &= x0; \
  72. x2 ^= x1; x1 |= x0; x0 =~ x0; \
  73. x0 ^= x2; x4 ^= x1;
  74. #define S2(x0,x1,x2,x3,x4) \
  75. x3 =~ x3; \
  76. x1 ^= x0; x4 = x0; x0 &= x2; \
  77. x0 ^= x3; x3 |= x4; x2 ^= x1; \
  78. x3 ^= x1; x1 &= x0; x0 ^= x2; \
  79. x2 &= x3; x3 |= x1; x0 =~ x0; \
  80. x3 ^= x0; x4 ^= x0; x0 ^= x2; \
  81. x1 |= x2;
  82. #define S3(x0,x1,x2,x3,x4) \
  83. x4 = x1; \
  84. x1 ^= x3; x3 |= x0; x4 &= x0; \
  85. x0 ^= x2; x2 ^= x1; x1 &= x3; \
  86. x2 ^= x3; x0 |= x4; x4 ^= x3; \
  87. x1 ^= x0; x0 &= x3; x3 &= x4; \
  88. x3 ^= x2; x4 |= x1; x2 &= x1; \
  89. x4 ^= x3; x0 ^= x3; x3 ^= x2;
  90. #define S4(x0,x1,x2,x3,x4) \
  91. x4 = x3; \
  92. x3 &= x0; x0 ^= x4; \
  93. x3 ^= x2; x2 |= x4; x0 ^= x1; \
  94. x4 ^= x3; x2 |= x0; \
  95. x2 ^= x1; x1 &= x0; \
  96. x1 ^= x4; x4 &= x2; x2 ^= x3; \
  97. x4 ^= x0; x3 |= x1; x1 =~ x1; \
  98. x3 ^= x0;
  99. #define S5(x0,x1,x2,x3,x4) \
  100. x4 = x1; x1 |= x0; \
  101. x2 ^= x1; x3 =~ x3; x4 ^= x0; \
  102. x0 ^= x2; x1 &= x4; x4 |= x3; \
  103. x4 ^= x0; x0 &= x3; x1 ^= x3; \
  104. x3 ^= x2; x0 ^= x1; x2 &= x4; \
  105. x1 ^= x2; x2 &= x0; \
  106. x3 ^= x2;
  107. #define S6(x0,x1,x2,x3,x4) \
  108. x4 = x1; \
  109. x3 ^= x0; x1 ^= x2; x2 ^= x0; \
  110. x0 &= x3; x1 |= x3; x4 =~ x4; \
  111. x0 ^= x1; x1 ^= x2; \
  112. x3 ^= x4; x4 ^= x0; x2 &= x0; \
  113. x4 ^= x1; x2 ^= x3; x3 &= x1; \
  114. x3 ^= x0; x1 ^= x2;
  115. #define S7(x0,x1,x2,x3,x4) \
  116. x1 =~ x1; \
  117. x4 = x1; x0 =~ x0; x1 &= x2; \
  118. x1 ^= x3; x3 |= x4; x4 ^= x2; \
  119. x2 ^= x3; x3 ^= x0; x0 |= x1; \
  120. x2 &= x0; x0 ^= x4; x4 ^= x3; \
  121. x3 &= x0; x4 ^= x1; \
  122. x2 ^= x4; x3 ^= x1; x4 |= x0; \
  123. x4 ^= x1;
  124. #define SI0(x0,x1,x2,x3,x4) \
  125. x4 = x3; x1 ^= x0; \
  126. x3 |= x1; x4 ^= x1; x0 =~ x0; \
  127. x2 ^= x3; x3 ^= x0; x0 &= x1; \
  128. x0 ^= x2; x2 &= x3; x3 ^= x4; \
  129. x2 ^= x3; x1 ^= x3; x3 &= x0; \
  130. x1 ^= x0; x0 ^= x2; x4 ^= x3;
  131. #define SI1(x0,x1,x2,x3,x4) \
  132. x1 ^= x3; x4 = x0; \
  133. x0 ^= x2; x2 =~ x2; x4 |= x1; \
  134. x4 ^= x3; x3 &= x1; x1 ^= x2; \
  135. x2 &= x4; x4 ^= x1; x1 |= x3; \
  136. x3 ^= x0; x2 ^= x0; x0 |= x4; \
  137. x2 ^= x4; x1 ^= x0; \
  138. x4 ^= x1;
  139. #define SI2(x0,x1,x2,x3,x4) \
  140. x2 ^= x1; x4 = x3; x3 =~ x3; \
  141. x3 |= x2; x2 ^= x4; x4 ^= x0; \
  142. x3 ^= x1; x1 |= x2; x2 ^= x0; \
  143. x1 ^= x4; x4 |= x3; x2 ^= x3; \
  144. x4 ^= x2; x2 &= x1; \
  145. x2 ^= x3; x3 ^= x4; x4 ^= x0;
  146. #define SI3(x0,x1,x2,x3,x4) \
  147. x2 ^= x1; \
  148. x4 = x1; x1 &= x2; \
  149. x1 ^= x0; x0 |= x4; x4 ^= x3; \
  150. x0 ^= x3; x3 |= x1; x1 ^= x2; \
  151. x1 ^= x3; x0 ^= x2; x2 ^= x3; \
  152. x3 &= x1; x1 ^= x0; x0 &= x2; \
  153. x4 ^= x3; x3 ^= x0; x0 ^= x1;
  154. #define SI4(x0,x1,x2,x3,x4) \
  155. x2 ^= x3; x4 = x0; x0 &= x1; \
  156. x0 ^= x2; x2 |= x3; x4 =~ x4; \
  157. x1 ^= x0; x0 ^= x2; x2 &= x4; \
  158. x2 ^= x0; x0 |= x4; \
  159. x0 ^= x3; x3 &= x2; \
  160. x4 ^= x3; x3 ^= x1; x1 &= x0; \
  161. x4 ^= x1; x0 ^= x3;
  162. #define SI5(x0,x1,x2,x3,x4) \
  163. x4 = x1; x1 |= x2; \
  164. x2 ^= x4; x1 ^= x3; x3 &= x4; \
  165. x2 ^= x3; x3 |= x0; x0 =~ x0; \
  166. x3 ^= x2; x2 |= x0; x4 ^= x1; \
  167. x2 ^= x4; x4 &= x0; x0 ^= x1; \
  168. x1 ^= x3; x0 &= x2; x2 ^= x3; \
  169. x0 ^= x2; x2 ^= x4; x4 ^= x3;
  170. #define SI6(x0,x1,x2,x3,x4) \
  171. x0 ^= x2; \
  172. x4 = x0; x0 &= x3; x2 ^= x3; \
  173. x0 ^= x2; x3 ^= x1; x2 |= x4; \
  174. x2 ^= x3; x3 &= x0; x0 =~ x0; \
  175. x3 ^= x1; x1 &= x2; x4 ^= x0; \
  176. x3 ^= x4; x4 ^= x2; x0 ^= x1; \
  177. x2 ^= x0;
  178. #define SI7(x0,x1,x2,x3,x4) \
  179. x4 = x3; x3 &= x0; x0 ^= x2; \
  180. x2 |= x4; x4 ^= x1; x0 =~ x0; \
  181. x1 |= x3; x4 ^= x0; x0 &= x2; \
  182. x0 ^= x1; x1 &= x2; x3 ^= x2; \
  183. x4 ^= x3; x2 &= x3; x3 |= x0; \
  184. x1 ^= x4; x3 ^= x4; x4 &= x0; \
  185. x4 ^= x2;
  186. struct serpent_ctx {
  187. u32 expkey[SERPENT_EXPKEY_WORDS];
  188. };
  189. static int serpent_setkey(struct crypto_tfm *tfm, const u8 *key,
  190. unsigned int keylen)
  191. {
  192. struct serpent_ctx *ctx = crypto_tfm_ctx(tfm);
  193. u32 *k = ctx->expkey;
  194. u8 *k8 = (u8 *)k;
  195. u32 r0,r1,r2,r3,r4;
  196. int i;
  197. /* Copy key, add padding */
  198. for (i = 0; i < keylen; ++i)
  199. k8[i] = key[i];
  200. if (i < SERPENT_MAX_KEY_SIZE)
  201. k8[i++] = 1;
  202. while (i < SERPENT_MAX_KEY_SIZE)
  203. k8[i++] = 0;
  204. /* Expand key using polynomial */
  205. r0 = le32_to_cpu(k[3]);
  206. r1 = le32_to_cpu(k[4]);
  207. r2 = le32_to_cpu(k[5]);
  208. r3 = le32_to_cpu(k[6]);
  209. r4 = le32_to_cpu(k[7]);
  210. keyiter(le32_to_cpu(k[0]),r0,r4,r2,0,0);
  211. keyiter(le32_to_cpu(k[1]),r1,r0,r3,1,1);
  212. keyiter(le32_to_cpu(k[2]),r2,r1,r4,2,2);
  213. keyiter(le32_to_cpu(k[3]),r3,r2,r0,3,3);
  214. keyiter(le32_to_cpu(k[4]),r4,r3,r1,4,4);
  215. keyiter(le32_to_cpu(k[5]),r0,r4,r2,5,5);
  216. keyiter(le32_to_cpu(k[6]),r1,r0,r3,6,6);
  217. keyiter(le32_to_cpu(k[7]),r2,r1,r4,7,7);
  218. keyiter(k[ 0],r3,r2,r0, 8, 8); keyiter(k[ 1],r4,r3,r1, 9, 9);
  219. keyiter(k[ 2],r0,r4,r2, 10, 10); keyiter(k[ 3],r1,r0,r3, 11, 11);
  220. keyiter(k[ 4],r2,r1,r4, 12, 12); keyiter(k[ 5],r3,r2,r0, 13, 13);
  221. keyiter(k[ 6],r4,r3,r1, 14, 14); keyiter(k[ 7],r0,r4,r2, 15, 15);
  222. keyiter(k[ 8],r1,r0,r3, 16, 16); keyiter(k[ 9],r2,r1,r4, 17, 17);
  223. keyiter(k[ 10],r3,r2,r0, 18, 18); keyiter(k[ 11],r4,r3,r1, 19, 19);
  224. keyiter(k[ 12],r0,r4,r2, 20, 20); keyiter(k[ 13],r1,r0,r3, 21, 21);
  225. keyiter(k[ 14],r2,r1,r4, 22, 22); keyiter(k[ 15],r3,r2,r0, 23, 23);
  226. keyiter(k[ 16],r4,r3,r1, 24, 24); keyiter(k[ 17],r0,r4,r2, 25, 25);
  227. keyiter(k[ 18],r1,r0,r3, 26, 26); keyiter(k[ 19],r2,r1,r4, 27, 27);
  228. keyiter(k[ 20],r3,r2,r0, 28, 28); keyiter(k[ 21],r4,r3,r1, 29, 29);
  229. keyiter(k[ 22],r0,r4,r2, 30, 30); keyiter(k[ 23],r1,r0,r3, 31, 31);
  230. k += 50;
  231. keyiter(k[-26],r2,r1,r4, 32,-18); keyiter(k[-25],r3,r2,r0, 33,-17);
  232. keyiter(k[-24],r4,r3,r1, 34,-16); keyiter(k[-23],r0,r4,r2, 35,-15);
  233. keyiter(k[-22],r1,r0,r3, 36,-14); keyiter(k[-21],r2,r1,r4, 37,-13);
  234. keyiter(k[-20],r3,r2,r0, 38,-12); keyiter(k[-19],r4,r3,r1, 39,-11);
  235. keyiter(k[-18],r0,r4,r2, 40,-10); keyiter(k[-17],r1,r0,r3, 41, -9);
  236. keyiter(k[-16],r2,r1,r4, 42, -8); keyiter(k[-15],r3,r2,r0, 43, -7);
  237. keyiter(k[-14],r4,r3,r1, 44, -6); keyiter(k[-13],r0,r4,r2, 45, -5);
  238. keyiter(k[-12],r1,r0,r3, 46, -4); keyiter(k[-11],r2,r1,r4, 47, -3);
  239. keyiter(k[-10],r3,r2,r0, 48, -2); keyiter(k[ -9],r4,r3,r1, 49, -1);
  240. keyiter(k[ -8],r0,r4,r2, 50, 0); keyiter(k[ -7],r1,r0,r3, 51, 1);
  241. keyiter(k[ -6],r2,r1,r4, 52, 2); keyiter(k[ -5],r3,r2,r0, 53, 3);
  242. keyiter(k[ -4],r4,r3,r1, 54, 4); keyiter(k[ -3],r0,r4,r2, 55, 5);
  243. keyiter(k[ -2],r1,r0,r3, 56, 6); keyiter(k[ -1],r2,r1,r4, 57, 7);
  244. keyiter(k[ 0],r3,r2,r0, 58, 8); keyiter(k[ 1],r4,r3,r1, 59, 9);
  245. keyiter(k[ 2],r0,r4,r2, 60, 10); keyiter(k[ 3],r1,r0,r3, 61, 11);
  246. keyiter(k[ 4],r2,r1,r4, 62, 12); keyiter(k[ 5],r3,r2,r0, 63, 13);
  247. keyiter(k[ 6],r4,r3,r1, 64, 14); keyiter(k[ 7],r0,r4,r2, 65, 15);
  248. keyiter(k[ 8],r1,r0,r3, 66, 16); keyiter(k[ 9],r2,r1,r4, 67, 17);
  249. keyiter(k[ 10],r3,r2,r0, 68, 18); keyiter(k[ 11],r4,r3,r1, 69, 19);
  250. keyiter(k[ 12],r0,r4,r2, 70, 20); keyiter(k[ 13],r1,r0,r3, 71, 21);
  251. keyiter(k[ 14],r2,r1,r4, 72, 22); keyiter(k[ 15],r3,r2,r0, 73, 23);
  252. keyiter(k[ 16],r4,r3,r1, 74, 24); keyiter(k[ 17],r0,r4,r2, 75, 25);
  253. keyiter(k[ 18],r1,r0,r3, 76, 26); keyiter(k[ 19],r2,r1,r4, 77, 27);
  254. keyiter(k[ 20],r3,r2,r0, 78, 28); keyiter(k[ 21],r4,r3,r1, 79, 29);
  255. keyiter(k[ 22],r0,r4,r2, 80, 30); keyiter(k[ 23],r1,r0,r3, 81, 31);
  256. k += 50;
  257. keyiter(k[-26],r2,r1,r4, 82,-18); keyiter(k[-25],r3,r2,r0, 83,-17);
  258. keyiter(k[-24],r4,r3,r1, 84,-16); keyiter(k[-23],r0,r4,r2, 85,-15);
  259. keyiter(k[-22],r1,r0,r3, 86,-14); keyiter(k[-21],r2,r1,r4, 87,-13);
  260. keyiter(k[-20],r3,r2,r0, 88,-12); keyiter(k[-19],r4,r3,r1, 89,-11);
  261. keyiter(k[-18],r0,r4,r2, 90,-10); keyiter(k[-17],r1,r0,r3, 91, -9);
  262. keyiter(k[-16],r2,r1,r4, 92, -8); keyiter(k[-15],r3,r2,r0, 93, -7);
  263. keyiter(k[-14],r4,r3,r1, 94, -6); keyiter(k[-13],r0,r4,r2, 95, -5);
  264. keyiter(k[-12],r1,r0,r3, 96, -4); keyiter(k[-11],r2,r1,r4, 97, -3);
  265. keyiter(k[-10],r3,r2,r0, 98, -2); keyiter(k[ -9],r4,r3,r1, 99, -1);
  266. keyiter(k[ -8],r0,r4,r2,100, 0); keyiter(k[ -7],r1,r0,r3,101, 1);
  267. keyiter(k[ -6],r2,r1,r4,102, 2); keyiter(k[ -5],r3,r2,r0,103, 3);
  268. keyiter(k[ -4],r4,r3,r1,104, 4); keyiter(k[ -3],r0,r4,r2,105, 5);
  269. keyiter(k[ -2],r1,r0,r3,106, 6); keyiter(k[ -1],r2,r1,r4,107, 7);
  270. keyiter(k[ 0],r3,r2,r0,108, 8); keyiter(k[ 1],r4,r3,r1,109, 9);
  271. keyiter(k[ 2],r0,r4,r2,110, 10); keyiter(k[ 3],r1,r0,r3,111, 11);
  272. keyiter(k[ 4],r2,r1,r4,112, 12); keyiter(k[ 5],r3,r2,r0,113, 13);
  273. keyiter(k[ 6],r4,r3,r1,114, 14); keyiter(k[ 7],r0,r4,r2,115, 15);
  274. keyiter(k[ 8],r1,r0,r3,116, 16); keyiter(k[ 9],r2,r1,r4,117, 17);
  275. keyiter(k[ 10],r3,r2,r0,118, 18); keyiter(k[ 11],r4,r3,r1,119, 19);
  276. keyiter(k[ 12],r0,r4,r2,120, 20); keyiter(k[ 13],r1,r0,r3,121, 21);
  277. keyiter(k[ 14],r2,r1,r4,122, 22); keyiter(k[ 15],r3,r2,r0,123, 23);
  278. keyiter(k[ 16],r4,r3,r1,124, 24); keyiter(k[ 17],r0,r4,r2,125, 25);
  279. keyiter(k[ 18],r1,r0,r3,126, 26); keyiter(k[ 19],r2,r1,r4,127, 27);
  280. keyiter(k[ 20],r3,r2,r0,128, 28); keyiter(k[ 21],r4,r3,r1,129, 29);
  281. keyiter(k[ 22],r0,r4,r2,130, 30); keyiter(k[ 23],r1,r0,r3,131, 31);
  282. /* Apply S-boxes */
  283. S3(r3,r4,r0,r1,r2); storekeys(r1,r2,r4,r3, 28); loadkeys(r1,r2,r4,r3, 24);
  284. S4(r1,r2,r4,r3,r0); storekeys(r2,r4,r3,r0, 24); loadkeys(r2,r4,r3,r0, 20);
  285. S5(r2,r4,r3,r0,r1); storekeys(r1,r2,r4,r0, 20); loadkeys(r1,r2,r4,r0, 16);
  286. S6(r1,r2,r4,r0,r3); storekeys(r4,r3,r2,r0, 16); loadkeys(r4,r3,r2,r0, 12);
  287. S7(r4,r3,r2,r0,r1); storekeys(r1,r2,r0,r4, 12); loadkeys(r1,r2,r0,r4, 8);
  288. S0(r1,r2,r0,r4,r3); storekeys(r0,r2,r4,r1, 8); loadkeys(r0,r2,r4,r1, 4);
  289. S1(r0,r2,r4,r1,r3); storekeys(r3,r4,r1,r0, 4); loadkeys(r3,r4,r1,r0, 0);
  290. S2(r3,r4,r1,r0,r2); storekeys(r2,r4,r3,r0, 0); loadkeys(r2,r4,r3,r0, -4);
  291. S3(r2,r4,r3,r0,r1); storekeys(r0,r1,r4,r2, -4); loadkeys(r0,r1,r4,r2, -8);
  292. S4(r0,r1,r4,r2,r3); storekeys(r1,r4,r2,r3, -8); loadkeys(r1,r4,r2,r3,-12);
  293. S5(r1,r4,r2,r3,r0); storekeys(r0,r1,r4,r3,-12); loadkeys(r0,r1,r4,r3,-16);
  294. S6(r0,r1,r4,r3,r2); storekeys(r4,r2,r1,r3,-16); loadkeys(r4,r2,r1,r3,-20);
  295. S7(r4,r2,r1,r3,r0); storekeys(r0,r1,r3,r4,-20); loadkeys(r0,r1,r3,r4,-24);
  296. S0(r0,r1,r3,r4,r2); storekeys(r3,r1,r4,r0,-24); loadkeys(r3,r1,r4,r0,-28);
  297. k -= 50;
  298. S1(r3,r1,r4,r0,r2); storekeys(r2,r4,r0,r3, 22); loadkeys(r2,r4,r0,r3, 18);
  299. S2(r2,r4,r0,r3,r1); storekeys(r1,r4,r2,r3, 18); loadkeys(r1,r4,r2,r3, 14);
  300. S3(r1,r4,r2,r3,r0); storekeys(r3,r0,r4,r1, 14); loadkeys(r3,r0,r4,r1, 10);
  301. S4(r3,r0,r4,r1,r2); storekeys(r0,r4,r1,r2, 10); loadkeys(r0,r4,r1,r2, 6);
  302. S5(r0,r4,r1,r2,r3); storekeys(r3,r0,r4,r2, 6); loadkeys(r3,r0,r4,r2, 2);
  303. S6(r3,r0,r4,r2,r1); storekeys(r4,r1,r0,r2, 2); loadkeys(r4,r1,r0,r2, -2);
  304. S7(r4,r1,r0,r2,r3); storekeys(r3,r0,r2,r4, -2); loadkeys(r3,r0,r2,r4, -6);
  305. S0(r3,r0,r2,r4,r1); storekeys(r2,r0,r4,r3, -6); loadkeys(r2,r0,r4,r3,-10);
  306. S1(r2,r0,r4,r3,r1); storekeys(r1,r4,r3,r2,-10); loadkeys(r1,r4,r3,r2,-14);
  307. S2(r1,r4,r3,r2,r0); storekeys(r0,r4,r1,r2,-14); loadkeys(r0,r4,r1,r2,-18);
  308. S3(r0,r4,r1,r2,r3); storekeys(r2,r3,r4,r0,-18); loadkeys(r2,r3,r4,r0,-22);
  309. k -= 50;
  310. S4(r2,r3,r4,r0,r1); storekeys(r3,r4,r0,r1, 28); loadkeys(r3,r4,r0,r1, 24);
  311. S5(r3,r4,r0,r1,r2); storekeys(r2,r3,r4,r1, 24); loadkeys(r2,r3,r4,r1, 20);
  312. S6(r2,r3,r4,r1,r0); storekeys(r4,r0,r3,r1, 20); loadkeys(r4,r0,r3,r1, 16);
  313. S7(r4,r0,r3,r1,r2); storekeys(r2,r3,r1,r4, 16); loadkeys(r2,r3,r1,r4, 12);
  314. S0(r2,r3,r1,r4,r0); storekeys(r1,r3,r4,r2, 12); loadkeys(r1,r3,r4,r2, 8);
  315. S1(r1,r3,r4,r2,r0); storekeys(r0,r4,r2,r1, 8); loadkeys(r0,r4,r2,r1, 4);
  316. S2(r0,r4,r2,r1,r3); storekeys(r3,r4,r0,r1, 4); loadkeys(r3,r4,r0,r1, 0);
  317. S3(r3,r4,r0,r1,r2); storekeys(r1,r2,r4,r3, 0);
  318. return 0;
  319. }
  320. static void serpent_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  321. {
  322. struct serpent_ctx *ctx = crypto_tfm_ctx(tfm);
  323. const u32
  324. *k = ctx->expkey;
  325. const __le32 *s = (const __le32 *)src;
  326. __le32 *d = (__le32 *)dst;
  327. u32 r0, r1, r2, r3, r4;
  328. /*
  329. * Note: The conversions between u8* and u32* might cause trouble
  330. * on architectures with stricter alignment rules than x86
  331. */
  332. r0 = le32_to_cpu(s[0]);
  333. r1 = le32_to_cpu(s[1]);
  334. r2 = le32_to_cpu(s[2]);
  335. r3 = le32_to_cpu(s[3]);
  336. K(r0,r1,r2,r3,0);
  337. S0(r0,r1,r2,r3,r4); LK(r2,r1,r3,r0,r4,1);
  338. S1(r2,r1,r3,r0,r4); LK(r4,r3,r0,r2,r1,2);
  339. S2(r4,r3,r0,r2,r1); LK(r1,r3,r4,r2,r0,3);
  340. S3(r1,r3,r4,r2,r0); LK(r2,r0,r3,r1,r4,4);
  341. S4(r2,r0,r3,r1,r4); LK(r0,r3,r1,r4,r2,5);
  342. S5(r0,r3,r1,r4,r2); LK(r2,r0,r3,r4,r1,6);
  343. S6(r2,r0,r3,r4,r1); LK(r3,r1,r0,r4,r2,7);
  344. S7(r3,r1,r0,r4,r2); LK(r2,r0,r4,r3,r1,8);
  345. S0(r2,r0,r4,r3,r1); LK(r4,r0,r3,r2,r1,9);
  346. S1(r4,r0,r3,r2,r1); LK(r1,r3,r2,r4,r0,10);
  347. S2(r1,r3,r2,r4,r0); LK(r0,r3,r1,r4,r2,11);
  348. S3(r0,r3,r1,r4,r2); LK(r4,r2,r3,r0,r1,12);
  349. S4(r4,r2,r3,r0,r1); LK(r2,r3,r0,r1,r4,13);
  350. S5(r2,r3,r0,r1,r4); LK(r4,r2,r3,r1,r0,14);
  351. S6(r4,r2,r3,r1,r0); LK(r3,r0,r2,r1,r4,15);
  352. S7(r3,r0,r2,r1,r4); LK(r4,r2,r1,r3,r0,16);
  353. S0(r4,r2,r1,r3,r0); LK(r1,r2,r3,r4,r0,17);
  354. S1(r1,r2,r3,r4,r0); LK(r0,r3,r4,r1,r2,18);
  355. S2(r0,r3,r4,r1,r2); LK(r2,r3,r0,r1,r4,19);
  356. S3(r2,r3,r0,r1,r4); LK(r1,r4,r3,r2,r0,20);
  357. S4(r1,r4,r3,r2,r0); LK(r4,r3,r2,r0,r1,21);
  358. S5(r4,r3,r2,r0,r1); LK(r1,r4,r3,r0,r2,22);
  359. S6(r1,r4,r3,r0,r2); LK(r3,r2,r4,r0,r1,23);
  360. S7(r3,r2,r4,r0,r1); LK(r1,r4,r0,r3,r2,24);
  361. S0(r1,r4,r0,r3,r2); LK(r0,r4,r3,r1,r2,25);
  362. S1(r0,r4,r3,r1,r2); LK(r2,r3,r1,r0,r4,26);
  363. S2(r2,r3,r1,r0,r4); LK(r4,r3,r2,r0,r1,27);
  364. S3(r4,r3,r2,r0,r1); LK(r0,r1,r3,r4,r2,28);
  365. S4(r0,r1,r3,r4,r2); LK(r1,r3,r4,r2,r0,29);
  366. S5(r1,r3,r4,r2,r0); LK(r0,r1,r3,r2,r4,30);
  367. S6(r0,r1,r3,r2,r4); LK(r3,r4,r1,r2,r0,31);
  368. S7(r3,r4,r1,r2,r0); K(r0,r1,r2,r3,32);
  369. d[0] = cpu_to_le32(r0);
  370. d[1] = cpu_to_le32(r1);
  371. d[2] = cpu_to_le32(r2);
  372. d[3] = cpu_to_le32(r3);
  373. }
  374. static void serpent_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  375. {
  376. struct serpent_ctx *ctx = crypto_tfm_ctx(tfm);
  377. const u32
  378. *k = ((struct serpent_ctx *)ctx)->expkey;
  379. const __le32 *s = (const __le32 *)src;
  380. __le32 *d = (__le32 *)dst;
  381. u32 r0, r1, r2, r3, r4;
  382. r0 = le32_to_cpu(s[0]);
  383. r1 = le32_to_cpu(s[1]);
  384. r2 = le32_to_cpu(s[2]);
  385. r3 = le32_to_cpu(s[3]);
  386. K(r0,r1,r2,r3,32);
  387. SI7(r0,r1,r2,r3,r4); KL(r1,r3,r0,r4,r2,31);
  388. SI6(r1,r3,r0,r4,r2); KL(r0,r2,r4,r1,r3,30);
  389. SI5(r0,r2,r4,r1,r3); KL(r2,r3,r0,r4,r1,29);
  390. SI4(r2,r3,r0,r4,r1); KL(r2,r0,r1,r4,r3,28);
  391. SI3(r2,r0,r1,r4,r3); KL(r1,r2,r3,r4,r0,27);
  392. SI2(r1,r2,r3,r4,r0); KL(r2,r0,r4,r3,r1,26);
  393. SI1(r2,r0,r4,r3,r1); KL(r1,r0,r4,r3,r2,25);
  394. SI0(r1,r0,r4,r3,r2); KL(r4,r2,r0,r1,r3,24);
  395. SI7(r4,r2,r0,r1,r3); KL(r2,r1,r4,r3,r0,23);
  396. SI6(r2,r1,r4,r3,r0); KL(r4,r0,r3,r2,r1,22);
  397. SI5(r4,r0,r3,r2,r1); KL(r0,r1,r4,r3,r2,21);
  398. SI4(r0,r1,r4,r3,r2); KL(r0,r4,r2,r3,r1,20);
  399. SI3(r0,r4,r2,r3,r1); KL(r2,r0,r1,r3,r4,19);
  400. SI2(r2,r0,r1,r3,r4); KL(r0,r4,r3,r1,r2,18);
  401. SI1(r0,r4,r3,r1,r2); KL(r2,r4,r3,r1,r0,17);
  402. SI0(r2,r4,r3,r1,r0); KL(r3,r0,r4,r2,r1,16);
  403. SI7(r3,r0,r4,r2,r1); KL(r0,r2,r3,r1,r4,15);
  404. SI6(r0,r2,r3,r1,r4); KL(r3,r4,r1,r0,r2,14);
  405. SI5(r3,r4,r1,r0,r2); KL(r4,r2,r3,r1,r0,13);
  406. SI4(r4,r2,r3,r1,r0); KL(r4,r3,r0,r1,r2,12);
  407. SI3(r4,r3,r0,r1,r2); KL(r0,r4,r2,r1,r3,11);
  408. SI2(r0,r4,r2,r1,r3); KL(r4,r3,r1,r2,r0,10);
  409. SI1(r4,r3,r1,r2,r0); KL(r0,r3,r1,r2,r4,9);
  410. SI0(r0,r3,r1,r2,r4); KL(r1,r4,r3,r0,r2,8);
  411. SI7(r1,r4,r3,r0,r2); KL(r4,r0,r1,r2,r3,7);
  412. SI6(r4,r0,r1,r2,r3); KL(r1,r3,r2,r4,r0,6);
  413. SI5(r1,r3,r2,r4,r0); KL(r3,r0,r1,r2,r4,5);
  414. SI4(r3,r0,r1,r2,r4); KL(r3,r1,r4,r2,r0,4);
  415. SI3(r3,r1,r4,r2,r0); KL(r4,r3,r0,r2,r1,3);
  416. SI2(r4,r3,r0,r2,r1); KL(r3,r1,r2,r0,r4,2);
  417. SI1(r3,r1,r2,r0,r4); KL(r4,r1,r2,r0,r3,1);
  418. SI0(r4,r1,r2,r0,r3); K(r2,r3,r1,r4,0);
  419. d[0] = cpu_to_le32(r2);
  420. d[1] = cpu_to_le32(r3);
  421. d[2] = cpu_to_le32(r1);
  422. d[3] = cpu_to_le32(r4);
  423. }
  424. static struct crypto_alg serpent_alg = {
  425. .cra_name = "serpent",
  426. .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
  427. .cra_blocksize = SERPENT_BLOCK_SIZE,
  428. .cra_ctxsize = sizeof(struct serpent_ctx),
  429. .cra_alignmask = 3,
  430. .cra_module = THIS_MODULE,
  431. .cra_list = LIST_HEAD_INIT(serpent_alg.cra_list),
  432. .cra_u = { .cipher = {
  433. .cia_min_keysize = SERPENT_MIN_KEY_SIZE,
  434. .cia_max_keysize = SERPENT_MAX_KEY_SIZE,
  435. .cia_setkey = serpent_setkey,
  436. .cia_encrypt = serpent_encrypt,
  437. .cia_decrypt = serpent_decrypt } }
  438. };
  439. static int tnepres_setkey(struct crypto_tfm *tfm, const u8 *key,
  440. unsigned int keylen)
  441. {
  442. u8 rev_key[SERPENT_MAX_KEY_SIZE];
  443. int i;
  444. for (i = 0; i < keylen; ++i)
  445. rev_key[keylen - i - 1] = key[i];
  446. return serpent_setkey(tfm, rev_key, keylen);
  447. }
  448. static void tnepres_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  449. {
  450. const u32 * const s = (const u32 * const)src;
  451. u32 * const d = (u32 * const)dst;
  452. u32 rs[4], rd[4];
  453. rs[0] = swab32(s[3]);
  454. rs[1] = swab32(s[2]);
  455. rs[2] = swab32(s[1]);
  456. rs[3] = swab32(s[0]);
  457. serpent_encrypt(tfm, (u8 *)rd, (u8 *)rs);
  458. d[0] = swab32(rd[3]);
  459. d[1] = swab32(rd[2]);
  460. d[2] = swab32(rd[1]);
  461. d[3] = swab32(rd[0]);
  462. }
  463. static void tnepres_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  464. {
  465. const u32 * const s = (const u32 * const)src;
  466. u32 * const d = (u32 * const)dst;
  467. u32 rs[4], rd[4];
  468. rs[0] = swab32(s[3]);
  469. rs[1] = swab32(s[2]);
  470. rs[2] = swab32(s[1]);
  471. rs[3] = swab32(s[0]);
  472. serpent_decrypt(tfm, (u8 *)rd, (u8 *)rs);
  473. d[0] = swab32(rd[3]);
  474. d[1] = swab32(rd[2]);
  475. d[2] = swab32(rd[1]);
  476. d[3] = swab32(rd[0]);
  477. }
  478. static struct crypto_alg tnepres_alg = {
  479. .cra_name = "tnepres",
  480. .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
  481. .cra_blocksize = SERPENT_BLOCK_SIZE,
  482. .cra_ctxsize = sizeof(struct serpent_ctx),
  483. .cra_alignmask = 3,
  484. .cra_module = THIS_MODULE,
  485. .cra_list = LIST_HEAD_INIT(serpent_alg.cra_list),
  486. .cra_u = { .cipher = {
  487. .cia_min_keysize = SERPENT_MIN_KEY_SIZE,
  488. .cia_max_keysize = SERPENT_MAX_KEY_SIZE,
  489. .cia_setkey = tnepres_setkey,
  490. .cia_encrypt = tnepres_encrypt,
  491. .cia_decrypt = tnepres_decrypt } }
  492. };
  493. static int __init serpent_mod_init(void)
  494. {
  495. int ret = crypto_register_alg(&serpent_alg);
  496. if (ret)
  497. return ret;
  498. ret = crypto_register_alg(&tnepres_alg);
  499. if (ret)
  500. crypto_unregister_alg(&serpent_alg);
  501. return ret;
  502. }
  503. static void __exit serpent_mod_fini(void)
  504. {
  505. crypto_unregister_alg(&tnepres_alg);
  506. crypto_unregister_alg(&serpent_alg);
  507. }
  508. module_init(serpent_mod_init);
  509. module_exit(serpent_mod_fini);
  510. MODULE_LICENSE("GPL");
  511. MODULE_DESCRIPTION("Serpent and tnepres (kerneli compatible serpent reversed) Cipher Algorithm");
  512. MODULE_AUTHOR("Dag Arne Osvik <osvik@ii.uib.no>");
  513. MODULE_ALIAS("tnepres");