caamalg_desc.c 47 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392
  1. /*
  2. * Shared descriptors for aead, ablkcipher algorithms
  3. *
  4. * Copyright 2016 NXP
  5. */
  6. #include "compat.h"
  7. #include "desc_constr.h"
  8. #include "caamalg_desc.h"
  9. /*
  10. * For aead functions, read payload and write payload,
  11. * both of which are specified in req->src and req->dst
  12. */
  13. static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
  14. {
  15. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
  16. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
  17. KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
  18. }
  19. /* Set DK bit in class 1 operation if shared */
  20. static inline void append_dec_op1(u32 *desc, u32 type)
  21. {
  22. u32 *jump_cmd, *uncond_jump_cmd;
  23. /* DK bit is valid only for AES */
  24. if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
  25. append_operation(desc, type | OP_ALG_AS_INITFINAL |
  26. OP_ALG_DECRYPT);
  27. return;
  28. }
  29. jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
  30. append_operation(desc, type | OP_ALG_AS_INITFINAL |
  31. OP_ALG_DECRYPT);
  32. uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
  33. set_jump_tgt_here(desc, jump_cmd);
  34. append_operation(desc, type | OP_ALG_AS_INITFINAL |
  35. OP_ALG_DECRYPT | OP_ALG_AAI_DK);
  36. set_jump_tgt_here(desc, uncond_jump_cmd);
  37. }
  38. /**
  39. * cnstr_shdsc_aead_null_encap - IPSec ESP encapsulation shared descriptor
  40. * (non-protocol) with no (null) encryption.
  41. * @desc: pointer to buffer used for descriptor construction
  42. * @adata: pointer to authentication transform definitions. Note that since a
  43. * split key is to be used, the size of the split key itself is
  44. * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
  45. * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
  46. * @icvsize: integrity check value (ICV) size (truncated or full)
  47. *
  48. * Note: Requires an MDHA split key.
  49. */
  50. void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata,
  51. unsigned int icvsize)
  52. {
  53. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
  54. init_sh_desc(desc, HDR_SHARE_SERIAL);
  55. /* Skip if already shared */
  56. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  57. JUMP_COND_SHRD);
  58. if (adata->key_inline)
  59. append_key_as_imm(desc, adata->key_virt, adata->keylen_pad,
  60. adata->keylen, CLASS_2 | KEY_DEST_MDHA_SPLIT |
  61. KEY_ENC);
  62. else
  63. append_key(desc, adata->key_dma, adata->keylen, CLASS_2 |
  64. KEY_DEST_MDHA_SPLIT | KEY_ENC);
  65. set_jump_tgt_here(desc, key_jump_cmd);
  66. /* assoclen + cryptlen = seqinlen */
  67. append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
  68. /* Prepare to read and write cryptlen + assoclen bytes */
  69. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  70. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  71. /*
  72. * MOVE_LEN opcode is not available in all SEC HW revisions,
  73. * thus need to do some magic, i.e. self-patch the descriptor
  74. * buffer.
  75. */
  76. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
  77. MOVE_DEST_MATH3 |
  78. (0x6 << MOVE_LEN_SHIFT));
  79. write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
  80. MOVE_DEST_DESCBUF |
  81. MOVE_WAITCOMP |
  82. (0x8 << MOVE_LEN_SHIFT));
  83. /* Class 2 operation */
  84. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  85. OP_ALG_ENCRYPT);
  86. /* Read and write cryptlen bytes */
  87. aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  88. set_move_tgt_here(desc, read_move_cmd);
  89. set_move_tgt_here(desc, write_move_cmd);
  90. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  91. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
  92. MOVE_AUX_LS);
  93. /* Write ICV */
  94. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  95. LDST_SRCDST_BYTE_CONTEXT);
  96. #ifdef DEBUG
  97. print_hex_dump(KERN_ERR,
  98. "aead null enc shdesc@" __stringify(__LINE__)": ",
  99. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  100. #endif
  101. }
  102. EXPORT_SYMBOL(cnstr_shdsc_aead_null_encap);
  103. /**
  104. * cnstr_shdsc_aead_null_decap - IPSec ESP decapsulation shared descriptor
  105. * (non-protocol) with no (null) decryption.
  106. * @desc: pointer to buffer used for descriptor construction
  107. * @adata: pointer to authentication transform definitions. Note that since a
  108. * split key is to be used, the size of the split key itself is
  109. * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
  110. * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
  111. * @icvsize: integrity check value (ICV) size (truncated or full)
  112. *
  113. * Note: Requires an MDHA split key.
  114. */
  115. void cnstr_shdsc_aead_null_decap(u32 * const desc, struct alginfo *adata,
  116. unsigned int icvsize)
  117. {
  118. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd, *jump_cmd;
  119. init_sh_desc(desc, HDR_SHARE_SERIAL);
  120. /* Skip if already shared */
  121. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  122. JUMP_COND_SHRD);
  123. if (adata->key_inline)
  124. append_key_as_imm(desc, adata->key_virt, adata->keylen_pad,
  125. adata->keylen, CLASS_2 |
  126. KEY_DEST_MDHA_SPLIT | KEY_ENC);
  127. else
  128. append_key(desc, adata->key_dma, adata->keylen, CLASS_2 |
  129. KEY_DEST_MDHA_SPLIT | KEY_ENC);
  130. set_jump_tgt_here(desc, key_jump_cmd);
  131. /* Class 2 operation */
  132. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  133. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  134. /* assoclen + cryptlen = seqoutlen */
  135. append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  136. /* Prepare to read and write cryptlen + assoclen bytes */
  137. append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
  138. append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
  139. /*
  140. * MOVE_LEN opcode is not available in all SEC HW revisions,
  141. * thus need to do some magic, i.e. self-patch the descriptor
  142. * buffer.
  143. */
  144. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
  145. MOVE_DEST_MATH2 |
  146. (0x6 << MOVE_LEN_SHIFT));
  147. write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
  148. MOVE_DEST_DESCBUF |
  149. MOVE_WAITCOMP |
  150. (0x8 << MOVE_LEN_SHIFT));
  151. /* Read and write cryptlen bytes */
  152. aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  153. /*
  154. * Insert a NOP here, since we need at least 4 instructions between
  155. * code patching the descriptor buffer and the location being patched.
  156. */
  157. jump_cmd = append_jump(desc, JUMP_TEST_ALL);
  158. set_jump_tgt_here(desc, jump_cmd);
  159. set_move_tgt_here(desc, read_move_cmd);
  160. set_move_tgt_here(desc, write_move_cmd);
  161. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  162. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
  163. MOVE_AUX_LS);
  164. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  165. /* Load ICV */
  166. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
  167. FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
  168. #ifdef DEBUG
  169. print_hex_dump(KERN_ERR,
  170. "aead null dec shdesc@" __stringify(__LINE__)": ",
  171. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  172. #endif
  173. }
  174. EXPORT_SYMBOL(cnstr_shdsc_aead_null_decap);
  175. static void init_sh_desc_key_aead(u32 * const desc,
  176. struct alginfo * const cdata,
  177. struct alginfo * const adata,
  178. const bool is_rfc3686, u32 *nonce)
  179. {
  180. u32 *key_jump_cmd;
  181. unsigned int enckeylen = cdata->keylen;
  182. /* Note: Context registers are saved. */
  183. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  184. /* Skip if already shared */
  185. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  186. JUMP_COND_SHRD);
  187. /*
  188. * RFC3686 specific:
  189. * | key = {AUTH_KEY, ENC_KEY, NONCE}
  190. * | enckeylen = encryption key size + nonce size
  191. */
  192. if (is_rfc3686)
  193. enckeylen -= CTR_RFC3686_NONCE_SIZE;
  194. if (adata->key_inline)
  195. append_key_as_imm(desc, adata->key_virt, adata->keylen_pad,
  196. adata->keylen, CLASS_2 |
  197. KEY_DEST_MDHA_SPLIT | KEY_ENC);
  198. else
  199. append_key(desc, adata->key_dma, adata->keylen, CLASS_2 |
  200. KEY_DEST_MDHA_SPLIT | KEY_ENC);
  201. if (cdata->key_inline)
  202. append_key_as_imm(desc, cdata->key_virt, enckeylen,
  203. enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
  204. else
  205. append_key(desc, cdata->key_dma, enckeylen, CLASS_1 |
  206. KEY_DEST_CLASS_REG);
  207. /* Load Counter into CONTEXT1 reg */
  208. if (is_rfc3686) {
  209. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  210. LDST_CLASS_IND_CCB |
  211. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  212. append_move(desc,
  213. MOVE_SRC_OUTFIFO |
  214. MOVE_DEST_CLASS1CTX |
  215. (16 << MOVE_OFFSET_SHIFT) |
  216. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  217. }
  218. set_jump_tgt_here(desc, key_jump_cmd);
  219. }
  220. /**
  221. * cnstr_shdsc_aead_encap - IPSec ESP encapsulation shared descriptor
  222. * (non-protocol).
  223. * @desc: pointer to buffer used for descriptor construction
  224. * @cdata: pointer to block cipher transform definitions
  225. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  226. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  227. * @adata: pointer to authentication transform definitions. Note that since a
  228. * split key is to be used, the size of the split key itself is
  229. * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
  230. * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
  231. * @ivsize: initialization vector size
  232. * @icvsize: integrity check value (ICV) size (truncated or full)
  233. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  234. * @nonce: pointer to rfc3686 nonce
  235. * @ctx1_iv_off: IV offset in CONTEXT1 register
  236. * @is_qi: true when called from caam/qi
  237. *
  238. * Note: Requires an MDHA split key.
  239. */
  240. void cnstr_shdsc_aead_encap(u32 * const desc, struct alginfo *cdata,
  241. struct alginfo *adata, unsigned int ivsize,
  242. unsigned int icvsize, const bool is_rfc3686,
  243. u32 *nonce, const u32 ctx1_iv_off, const bool is_qi)
  244. {
  245. /* Note: Context registers are saved. */
  246. init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce);
  247. /* Class 2 operation */
  248. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  249. OP_ALG_ENCRYPT);
  250. if (is_qi) {
  251. u32 *wait_load_cmd;
  252. /* REG3 = assoclen */
  253. append_seq_load(desc, 4, LDST_CLASS_DECO |
  254. LDST_SRCDST_WORD_DECO_MATH3 |
  255. (4 << LDST_OFFSET_SHIFT));
  256. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  257. JUMP_COND_CALM | JUMP_COND_NCP |
  258. JUMP_COND_NOP | JUMP_COND_NIP |
  259. JUMP_COND_NIFP);
  260. set_jump_tgt_here(desc, wait_load_cmd);
  261. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  262. LDST_SRCDST_BYTE_CONTEXT |
  263. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  264. }
  265. /* Read and write assoclen bytes */
  266. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  267. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  268. /* Skip assoc data */
  269. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  270. /* read assoc before reading payload */
  271. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
  272. FIFOLDST_VLF);
  273. /* Load Counter into CONTEXT1 reg */
  274. if (is_rfc3686)
  275. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  276. LDST_SRCDST_BYTE_CONTEXT |
  277. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  278. LDST_OFFSET_SHIFT));
  279. /* Class 1 operation */
  280. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  281. OP_ALG_ENCRYPT);
  282. /* Read and write cryptlen bytes */
  283. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  284. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  285. aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
  286. /* Write ICV */
  287. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  288. LDST_SRCDST_BYTE_CONTEXT);
  289. #ifdef DEBUG
  290. print_hex_dump(KERN_ERR, "aead enc shdesc@" __stringify(__LINE__)": ",
  291. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  292. #endif
  293. }
  294. EXPORT_SYMBOL(cnstr_shdsc_aead_encap);
  295. /**
  296. * cnstr_shdsc_aead_decap - IPSec ESP decapsulation shared descriptor
  297. * (non-protocol).
  298. * @desc: pointer to buffer used for descriptor construction
  299. * @cdata: pointer to block cipher transform definitions
  300. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  301. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  302. * @adata: pointer to authentication transform definitions. Note that since a
  303. * split key is to be used, the size of the split key itself is
  304. * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
  305. * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
  306. * @ivsize: initialization vector size
  307. * @icvsize: integrity check value (ICV) size (truncated or full)
  308. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  309. * @nonce: pointer to rfc3686 nonce
  310. * @ctx1_iv_off: IV offset in CONTEXT1 register
  311. * @is_qi: true when called from caam/qi
  312. *
  313. * Note: Requires an MDHA split key.
  314. */
  315. void cnstr_shdsc_aead_decap(u32 * const desc, struct alginfo *cdata,
  316. struct alginfo *adata, unsigned int ivsize,
  317. unsigned int icvsize, const bool geniv,
  318. const bool is_rfc3686, u32 *nonce,
  319. const u32 ctx1_iv_off, const bool is_qi)
  320. {
  321. /* Note: Context registers are saved. */
  322. init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce);
  323. /* Class 2 operation */
  324. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  325. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  326. if (is_qi) {
  327. u32 *wait_load_cmd;
  328. /* REG3 = assoclen */
  329. append_seq_load(desc, 4, LDST_CLASS_DECO |
  330. LDST_SRCDST_WORD_DECO_MATH3 |
  331. (4 << LDST_OFFSET_SHIFT));
  332. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  333. JUMP_COND_CALM | JUMP_COND_NCP |
  334. JUMP_COND_NOP | JUMP_COND_NIP |
  335. JUMP_COND_NIFP);
  336. set_jump_tgt_here(desc, wait_load_cmd);
  337. if (!geniv)
  338. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  339. LDST_SRCDST_BYTE_CONTEXT |
  340. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  341. }
  342. /* Read and write assoclen bytes */
  343. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  344. if (geniv)
  345. append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM, ivsize);
  346. else
  347. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  348. /* Skip assoc data */
  349. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  350. /* read assoc before reading payload */
  351. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
  352. KEY_VLF);
  353. if (geniv) {
  354. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  355. LDST_SRCDST_BYTE_CONTEXT |
  356. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  357. append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
  358. (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
  359. }
  360. /* Load Counter into CONTEXT1 reg */
  361. if (is_rfc3686)
  362. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  363. LDST_SRCDST_BYTE_CONTEXT |
  364. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  365. LDST_OFFSET_SHIFT));
  366. /* Choose operation */
  367. if (ctx1_iv_off)
  368. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  369. OP_ALG_DECRYPT);
  370. else
  371. append_dec_op1(desc, cdata->algtype);
  372. /* Read and write cryptlen bytes */
  373. append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  374. append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  375. aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
  376. /* Load ICV */
  377. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
  378. FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
  379. #ifdef DEBUG
  380. print_hex_dump(KERN_ERR, "aead dec shdesc@" __stringify(__LINE__)": ",
  381. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  382. #endif
  383. }
  384. EXPORT_SYMBOL(cnstr_shdsc_aead_decap);
  385. /**
  386. * cnstr_shdsc_aead_givencap - IPSec ESP encapsulation shared descriptor
  387. * (non-protocol) with HW-generated initialization
  388. * vector.
  389. * @desc: pointer to buffer used for descriptor construction
  390. * @cdata: pointer to block cipher transform definitions
  391. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  392. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  393. * @adata: pointer to authentication transform definitions. Note that since a
  394. * split key is to be used, the size of the split key itself is
  395. * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
  396. * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
  397. * @ivsize: initialization vector size
  398. * @icvsize: integrity check value (ICV) size (truncated or full)
  399. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  400. * @nonce: pointer to rfc3686 nonce
  401. * @ctx1_iv_off: IV offset in CONTEXT1 register
  402. * @is_qi: true when called from caam/qi
  403. *
  404. * Note: Requires an MDHA split key.
  405. */
  406. void cnstr_shdsc_aead_givencap(u32 * const desc, struct alginfo *cdata,
  407. struct alginfo *adata, unsigned int ivsize,
  408. unsigned int icvsize, const bool is_rfc3686,
  409. u32 *nonce, const u32 ctx1_iv_off,
  410. const bool is_qi)
  411. {
  412. u32 geniv, moveiv;
  413. u32 *wait_cmd;
  414. /* Note: Context registers are saved. */
  415. init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce);
  416. if (is_qi) {
  417. u32 *wait_load_cmd;
  418. /* REG3 = assoclen */
  419. append_seq_load(desc, 4, LDST_CLASS_DECO |
  420. LDST_SRCDST_WORD_DECO_MATH3 |
  421. (4 << LDST_OFFSET_SHIFT));
  422. wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  423. JUMP_COND_CALM | JUMP_COND_NCP |
  424. JUMP_COND_NOP | JUMP_COND_NIP |
  425. JUMP_COND_NIFP);
  426. set_jump_tgt_here(desc, wait_load_cmd);
  427. }
  428. if (is_rfc3686) {
  429. if (is_qi)
  430. append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
  431. LDST_SRCDST_BYTE_CONTEXT |
  432. (ctx1_iv_off << LDST_OFFSET_SHIFT));
  433. goto copy_iv;
  434. }
  435. /* Generate IV */
  436. geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
  437. NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
  438. NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
  439. append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
  440. LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
  441. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  442. append_move(desc, MOVE_WAITCOMP |
  443. MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
  444. (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
  445. (ivsize << MOVE_LEN_SHIFT));
  446. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  447. copy_iv:
  448. /* Copy IV to class 1 context */
  449. append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
  450. (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
  451. (ivsize << MOVE_LEN_SHIFT));
  452. /* Return to encryption */
  453. append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
  454. OP_ALG_ENCRYPT);
  455. /* Read and write assoclen bytes */
  456. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  457. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  458. /* Skip assoc data */
  459. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  460. /* read assoc before reading payload */
  461. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
  462. KEY_VLF);
  463. /* Copy iv from outfifo to class 2 fifo */
  464. moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
  465. NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
  466. append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
  467. LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
  468. append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
  469. LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
  470. /* Load Counter into CONTEXT1 reg */
  471. if (is_rfc3686)
  472. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  473. LDST_SRCDST_BYTE_CONTEXT |
  474. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  475. LDST_OFFSET_SHIFT));
  476. /* Class 1 operation */
  477. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  478. OP_ALG_ENCRYPT);
  479. /* Will write ivsize + cryptlen */
  480. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  481. /* Not need to reload iv */
  482. append_seq_fifo_load(desc, ivsize,
  483. FIFOLD_CLASS_SKIP);
  484. /* Will read cryptlen */
  485. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  486. /*
  487. * Wait for IV transfer (ofifo -> class2) to finish before starting
  488. * ciphertext transfer (ofifo -> external memory).
  489. */
  490. wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NIFP);
  491. set_jump_tgt_here(desc, wait_cmd);
  492. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
  493. FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
  494. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
  495. /* Write ICV */
  496. append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
  497. LDST_SRCDST_BYTE_CONTEXT);
  498. #ifdef DEBUG
  499. print_hex_dump(KERN_ERR,
  500. "aead givenc shdesc@" __stringify(__LINE__)": ",
  501. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  502. #endif
  503. }
  504. EXPORT_SYMBOL(cnstr_shdsc_aead_givencap);
  505. /**
  506. * cnstr_shdsc_gcm_encap - gcm encapsulation shared descriptor
  507. * @desc: pointer to buffer used for descriptor construction
  508. * @cdata: pointer to block cipher transform definitions
  509. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  510. * @icvsize: integrity check value (ICV) size (truncated or full)
  511. */
  512. void cnstr_shdsc_gcm_encap(u32 * const desc, struct alginfo *cdata,
  513. unsigned int icvsize)
  514. {
  515. u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1,
  516. *zero_assoc_jump_cmd2;
  517. init_sh_desc(desc, HDR_SHARE_SERIAL);
  518. /* skip key loading if they are loaded due to sharing */
  519. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  520. JUMP_COND_SHRD);
  521. if (cdata->key_inline)
  522. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  523. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  524. else
  525. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  526. KEY_DEST_CLASS_REG);
  527. set_jump_tgt_here(desc, key_jump_cmd);
  528. /* class 1 operation */
  529. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  530. OP_ALG_ENCRYPT);
  531. /* if assoclen + cryptlen is ZERO, skip to ICV write */
  532. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  533. zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
  534. JUMP_COND_MATH_Z);
  535. /* if assoclen is ZERO, skip reading the assoc data */
  536. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  537. zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
  538. JUMP_COND_MATH_Z);
  539. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  540. /* skip assoc data */
  541. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  542. /* cryptlen = seqinlen - assoclen */
  543. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
  544. /* if cryptlen is ZERO jump to zero-payload commands */
  545. zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
  546. JUMP_COND_MATH_Z);
  547. /* read assoc data */
  548. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  549. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  550. set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
  551. append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  552. /* write encrypted data */
  553. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  554. /* read payload data */
  555. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  556. FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
  557. /* jump the zero-payload commands */
  558. append_jump(desc, JUMP_TEST_ALL | 2);
  559. /* zero-payload commands */
  560. set_jump_tgt_here(desc, zero_payload_jump_cmd);
  561. /* read assoc data */
  562. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  563. FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
  564. /* There is no input data */
  565. set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
  566. /* write ICV */
  567. append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
  568. LDST_SRCDST_BYTE_CONTEXT);
  569. #ifdef DEBUG
  570. print_hex_dump(KERN_ERR, "gcm enc shdesc@" __stringify(__LINE__)": ",
  571. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  572. #endif
  573. }
  574. EXPORT_SYMBOL(cnstr_shdsc_gcm_encap);
  575. /**
  576. * cnstr_shdsc_gcm_decap - gcm decapsulation shared descriptor
  577. * @desc: pointer to buffer used for descriptor construction
  578. * @cdata: pointer to block cipher transform definitions
  579. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  580. * @icvsize: integrity check value (ICV) size (truncated or full)
  581. */
  582. void cnstr_shdsc_gcm_decap(u32 * const desc, struct alginfo *cdata,
  583. unsigned int icvsize)
  584. {
  585. u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1;
  586. init_sh_desc(desc, HDR_SHARE_SERIAL);
  587. /* skip key loading if they are loaded due to sharing */
  588. key_jump_cmd = append_jump(desc, JUMP_JSL |
  589. JUMP_TEST_ALL | JUMP_COND_SHRD);
  590. if (cdata->key_inline)
  591. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  592. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  593. else
  594. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  595. KEY_DEST_CLASS_REG);
  596. set_jump_tgt_here(desc, key_jump_cmd);
  597. /* class 1 operation */
  598. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  599. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  600. /* if assoclen is ZERO, skip reading the assoc data */
  601. append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
  602. zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
  603. JUMP_COND_MATH_Z);
  604. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  605. /* skip assoc data */
  606. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  607. /* read assoc data */
  608. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  609. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  610. set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
  611. /* cryptlen = seqoutlen - assoclen */
  612. append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  613. /* jump to zero-payload command if cryptlen is zero */
  614. zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
  615. JUMP_COND_MATH_Z);
  616. append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  617. /* store encrypted data */
  618. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  619. /* read payload data */
  620. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  621. FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  622. /* zero-payload command */
  623. set_jump_tgt_here(desc, zero_payload_jump_cmd);
  624. /* read ICV */
  625. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
  626. FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
  627. #ifdef DEBUG
  628. print_hex_dump(KERN_ERR, "gcm dec shdesc@" __stringify(__LINE__)": ",
  629. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  630. #endif
  631. }
  632. EXPORT_SYMBOL(cnstr_shdsc_gcm_decap);
  633. /**
  634. * cnstr_shdsc_rfc4106_encap - IPSec ESP gcm encapsulation shared descriptor
  635. * (non-protocol).
  636. * @desc: pointer to buffer used for descriptor construction
  637. * @cdata: pointer to block cipher transform definitions
  638. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  639. * @icvsize: integrity check value (ICV) size (truncated or full)
  640. */
  641. void cnstr_shdsc_rfc4106_encap(u32 * const desc, struct alginfo *cdata,
  642. unsigned int icvsize)
  643. {
  644. u32 *key_jump_cmd;
  645. init_sh_desc(desc, HDR_SHARE_SERIAL);
  646. /* Skip key loading if it is loaded due to sharing */
  647. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  648. JUMP_COND_SHRD);
  649. if (cdata->key_inline)
  650. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  651. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  652. else
  653. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  654. KEY_DEST_CLASS_REG);
  655. set_jump_tgt_here(desc, key_jump_cmd);
  656. /* Class 1 operation */
  657. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  658. OP_ALG_ENCRYPT);
  659. append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
  660. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  661. /* Read assoc data */
  662. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  663. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  664. /* Skip IV */
  665. append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
  666. /* Will read cryptlen bytes */
  667. append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  668. /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
  669. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
  670. /* Skip assoc data */
  671. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  672. /* cryptlen = seqoutlen - assoclen */
  673. append_math_sub(desc, VARSEQOUTLEN, VARSEQINLEN, REG0, CAAM_CMD_SZ);
  674. /* Write encrypted data */
  675. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  676. /* Read payload data */
  677. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  678. FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
  679. /* Write ICV */
  680. append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
  681. LDST_SRCDST_BYTE_CONTEXT);
  682. #ifdef DEBUG
  683. print_hex_dump(KERN_ERR,
  684. "rfc4106 enc shdesc@" __stringify(__LINE__)": ",
  685. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  686. #endif
  687. }
  688. EXPORT_SYMBOL(cnstr_shdsc_rfc4106_encap);
  689. /**
  690. * cnstr_shdsc_rfc4106_decap - IPSec ESP gcm decapsulation shared descriptor
  691. * (non-protocol).
  692. * @desc: pointer to buffer used for descriptor construction
  693. * @cdata: pointer to block cipher transform definitions
  694. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  695. * @icvsize: integrity check value (ICV) size (truncated or full)
  696. */
  697. void cnstr_shdsc_rfc4106_decap(u32 * const desc, struct alginfo *cdata,
  698. unsigned int icvsize)
  699. {
  700. u32 *key_jump_cmd;
  701. init_sh_desc(desc, HDR_SHARE_SERIAL);
  702. /* Skip key loading if it is loaded due to sharing */
  703. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  704. JUMP_COND_SHRD);
  705. if (cdata->key_inline)
  706. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  707. cdata->keylen, CLASS_1 |
  708. KEY_DEST_CLASS_REG);
  709. else
  710. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  711. KEY_DEST_CLASS_REG);
  712. set_jump_tgt_here(desc, key_jump_cmd);
  713. /* Class 1 operation */
  714. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  715. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  716. append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
  717. append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
  718. /* Read assoc data */
  719. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  720. FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
  721. /* Skip IV */
  722. append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
  723. /* Will read cryptlen bytes */
  724. append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
  725. /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
  726. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
  727. /* Skip assoc data */
  728. append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
  729. /* Will write cryptlen bytes */
  730. append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  731. /* Store payload data */
  732. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  733. /* Read encrypted data */
  734. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
  735. FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
  736. /* Read ICV */
  737. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
  738. FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
  739. #ifdef DEBUG
  740. print_hex_dump(KERN_ERR,
  741. "rfc4106 dec shdesc@" __stringify(__LINE__)": ",
  742. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  743. #endif
  744. }
  745. EXPORT_SYMBOL(cnstr_shdsc_rfc4106_decap);
  746. /**
  747. * cnstr_shdsc_rfc4543_encap - IPSec ESP gmac encapsulation shared descriptor
  748. * (non-protocol).
  749. * @desc: pointer to buffer used for descriptor construction
  750. * @cdata: pointer to block cipher transform definitions
  751. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  752. * @icvsize: integrity check value (ICV) size (truncated or full)
  753. */
  754. void cnstr_shdsc_rfc4543_encap(u32 * const desc, struct alginfo *cdata,
  755. unsigned int icvsize)
  756. {
  757. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
  758. init_sh_desc(desc, HDR_SHARE_SERIAL);
  759. /* Skip key loading if it is loaded due to sharing */
  760. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  761. JUMP_COND_SHRD);
  762. if (cdata->key_inline)
  763. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  764. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  765. else
  766. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  767. KEY_DEST_CLASS_REG);
  768. set_jump_tgt_here(desc, key_jump_cmd);
  769. /* Class 1 operation */
  770. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  771. OP_ALG_ENCRYPT);
  772. /* assoclen + cryptlen = seqinlen */
  773. append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
  774. /*
  775. * MOVE_LEN opcode is not available in all SEC HW revisions,
  776. * thus need to do some magic, i.e. self-patch the descriptor
  777. * buffer.
  778. */
  779. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
  780. (0x6 << MOVE_LEN_SHIFT));
  781. write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
  782. (0x8 << MOVE_LEN_SHIFT));
  783. /* Will read assoclen + cryptlen bytes */
  784. append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  785. /* Will write assoclen + cryptlen bytes */
  786. append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  787. /* Read and write assoclen + cryptlen bytes */
  788. aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
  789. set_move_tgt_here(desc, read_move_cmd);
  790. set_move_tgt_here(desc, write_move_cmd);
  791. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  792. /* Move payload data to OFIFO */
  793. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
  794. /* Write ICV */
  795. append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
  796. LDST_SRCDST_BYTE_CONTEXT);
  797. #ifdef DEBUG
  798. print_hex_dump(KERN_ERR,
  799. "rfc4543 enc shdesc@" __stringify(__LINE__)": ",
  800. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  801. #endif
  802. }
  803. EXPORT_SYMBOL(cnstr_shdsc_rfc4543_encap);
  804. /**
  805. * cnstr_shdsc_rfc4543_decap - IPSec ESP gmac decapsulation shared descriptor
  806. * (non-protocol).
  807. * @desc: pointer to buffer used for descriptor construction
  808. * @cdata: pointer to block cipher transform definitions
  809. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
  810. * @icvsize: integrity check value (ICV) size (truncated or full)
  811. */
  812. void cnstr_shdsc_rfc4543_decap(u32 * const desc, struct alginfo *cdata,
  813. unsigned int icvsize)
  814. {
  815. u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
  816. init_sh_desc(desc, HDR_SHARE_SERIAL);
  817. /* Skip key loading if it is loaded due to sharing */
  818. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  819. JUMP_COND_SHRD);
  820. if (cdata->key_inline)
  821. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  822. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  823. else
  824. append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
  825. KEY_DEST_CLASS_REG);
  826. set_jump_tgt_here(desc, key_jump_cmd);
  827. /* Class 1 operation */
  828. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  829. OP_ALG_DECRYPT | OP_ALG_ICV_ON);
  830. /* assoclen + cryptlen = seqoutlen */
  831. append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  832. /*
  833. * MOVE_LEN opcode is not available in all SEC HW revisions,
  834. * thus need to do some magic, i.e. self-patch the descriptor
  835. * buffer.
  836. */
  837. read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
  838. (0x6 << MOVE_LEN_SHIFT));
  839. write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
  840. (0x8 << MOVE_LEN_SHIFT));
  841. /* Will read assoclen + cryptlen bytes */
  842. append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  843. /* Will write assoclen + cryptlen bytes */
  844. append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
  845. /* Store payload data */
  846. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
  847. /* In-snoop assoclen + cryptlen data */
  848. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
  849. FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
  850. set_move_tgt_here(desc, read_move_cmd);
  851. set_move_tgt_here(desc, write_move_cmd);
  852. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  853. /* Move payload data to OFIFO */
  854. append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
  855. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  856. /* Read ICV */
  857. append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
  858. FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
  859. #ifdef DEBUG
  860. print_hex_dump(KERN_ERR,
  861. "rfc4543 dec shdesc@" __stringify(__LINE__)": ",
  862. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  863. #endif
  864. }
  865. EXPORT_SYMBOL(cnstr_shdsc_rfc4543_decap);
  866. /*
  867. * For ablkcipher encrypt and decrypt, read from req->src and
  868. * write to req->dst
  869. */
  870. static inline void ablkcipher_append_src_dst(u32 *desc)
  871. {
  872. append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  873. append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
  874. append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
  875. KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
  876. append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
  877. }
  878. /**
  879. * cnstr_shdsc_ablkcipher_encap - ablkcipher encapsulation shared descriptor
  880. * @desc: pointer to buffer used for descriptor construction
  881. * @cdata: pointer to block cipher transform definitions
  882. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  883. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  884. * @ivsize: initialization vector size
  885. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  886. * @ctx1_iv_off: IV offset in CONTEXT1 register
  887. */
  888. void cnstr_shdsc_ablkcipher_encap(u32 * const desc, struct alginfo *cdata,
  889. unsigned int ivsize, const bool is_rfc3686,
  890. const u32 ctx1_iv_off)
  891. {
  892. u32 *key_jump_cmd;
  893. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  894. /* Skip if already shared */
  895. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  896. JUMP_COND_SHRD);
  897. /* Load class1 key only */
  898. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  899. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  900. /* Load nonce into CONTEXT1 reg */
  901. if (is_rfc3686) {
  902. u8 *nonce = cdata->key_virt + cdata->keylen;
  903. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  904. LDST_CLASS_IND_CCB |
  905. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  906. append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
  907. MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
  908. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  909. }
  910. set_jump_tgt_here(desc, key_jump_cmd);
  911. /* Load iv */
  912. append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
  913. LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
  914. /* Load counter into CONTEXT1 reg */
  915. if (is_rfc3686)
  916. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  917. LDST_SRCDST_BYTE_CONTEXT |
  918. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  919. LDST_OFFSET_SHIFT));
  920. /* Load operation */
  921. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  922. OP_ALG_ENCRYPT);
  923. /* Perform operation */
  924. ablkcipher_append_src_dst(desc);
  925. #ifdef DEBUG
  926. print_hex_dump(KERN_ERR,
  927. "ablkcipher enc shdesc@" __stringify(__LINE__)": ",
  928. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  929. #endif
  930. }
  931. EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_encap);
  932. /**
  933. * cnstr_shdsc_ablkcipher_decap - ablkcipher decapsulation shared descriptor
  934. * @desc: pointer to buffer used for descriptor construction
  935. * @cdata: pointer to block cipher transform definitions
  936. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  937. * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
  938. * @ivsize: initialization vector size
  939. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  940. * @ctx1_iv_off: IV offset in CONTEXT1 register
  941. */
  942. void cnstr_shdsc_ablkcipher_decap(u32 * const desc, struct alginfo *cdata,
  943. unsigned int ivsize, const bool is_rfc3686,
  944. const u32 ctx1_iv_off)
  945. {
  946. u32 *key_jump_cmd;
  947. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  948. /* Skip if already shared */
  949. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  950. JUMP_COND_SHRD);
  951. /* Load class1 key only */
  952. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  953. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  954. /* Load nonce into CONTEXT1 reg */
  955. if (is_rfc3686) {
  956. u8 *nonce = cdata->key_virt + cdata->keylen;
  957. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  958. LDST_CLASS_IND_CCB |
  959. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  960. append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
  961. MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
  962. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  963. }
  964. set_jump_tgt_here(desc, key_jump_cmd);
  965. /* load IV */
  966. append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
  967. LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
  968. /* Load counter into CONTEXT1 reg */
  969. if (is_rfc3686)
  970. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  971. LDST_SRCDST_BYTE_CONTEXT |
  972. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  973. LDST_OFFSET_SHIFT));
  974. /* Choose operation */
  975. if (ctx1_iv_off)
  976. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  977. OP_ALG_DECRYPT);
  978. else
  979. append_dec_op1(desc, cdata->algtype);
  980. /* Perform operation */
  981. ablkcipher_append_src_dst(desc);
  982. #ifdef DEBUG
  983. print_hex_dump(KERN_ERR,
  984. "ablkcipher dec shdesc@" __stringify(__LINE__)": ",
  985. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  986. #endif
  987. }
  988. EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_decap);
  989. /**
  990. * cnstr_shdsc_ablkcipher_givencap - ablkcipher encapsulation shared descriptor
  991. * with HW-generated initialization vector.
  992. * @desc: pointer to buffer used for descriptor construction
  993. * @cdata: pointer to block cipher transform definitions
  994. * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
  995. * with OP_ALG_AAI_CBC.
  996. * @ivsize: initialization vector size
  997. * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
  998. * @ctx1_iv_off: IV offset in CONTEXT1 register
  999. */
  1000. void cnstr_shdsc_ablkcipher_givencap(u32 * const desc, struct alginfo *cdata,
  1001. unsigned int ivsize, const bool is_rfc3686,
  1002. const u32 ctx1_iv_off)
  1003. {
  1004. u32 *key_jump_cmd, geniv;
  1005. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1006. /* Skip if already shared */
  1007. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1008. JUMP_COND_SHRD);
  1009. /* Load class1 key only */
  1010. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1011. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1012. /* Load Nonce into CONTEXT1 reg */
  1013. if (is_rfc3686) {
  1014. u8 *nonce = cdata->key_virt + cdata->keylen;
  1015. append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
  1016. LDST_CLASS_IND_CCB |
  1017. LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
  1018. append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
  1019. MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
  1020. (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
  1021. }
  1022. set_jump_tgt_here(desc, key_jump_cmd);
  1023. /* Generate IV */
  1024. geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
  1025. NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 | NFIFOENTRY_PTYPE_RND |
  1026. (ivsize << NFIFOENTRY_DLEN_SHIFT);
  1027. append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
  1028. LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
  1029. append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
  1030. append_move(desc, MOVE_WAITCOMP | MOVE_SRC_INFIFO |
  1031. MOVE_DEST_CLASS1CTX | (ivsize << MOVE_LEN_SHIFT) |
  1032. (ctx1_iv_off << MOVE_OFFSET_SHIFT));
  1033. append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
  1034. /* Copy generated IV to memory */
  1035. append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
  1036. LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
  1037. /* Load Counter into CONTEXT1 reg */
  1038. if (is_rfc3686)
  1039. append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
  1040. LDST_SRCDST_BYTE_CONTEXT |
  1041. ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
  1042. LDST_OFFSET_SHIFT));
  1043. if (ctx1_iv_off)
  1044. append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NCP |
  1045. (1 << JUMP_OFFSET_SHIFT));
  1046. /* Load operation */
  1047. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  1048. OP_ALG_ENCRYPT);
  1049. /* Perform operation */
  1050. ablkcipher_append_src_dst(desc);
  1051. #ifdef DEBUG
  1052. print_hex_dump(KERN_ERR,
  1053. "ablkcipher givenc shdesc@" __stringify(__LINE__) ": ",
  1054. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  1055. #endif
  1056. }
  1057. EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_givencap);
  1058. /**
  1059. * cnstr_shdsc_xts_ablkcipher_encap - xts ablkcipher encapsulation shared
  1060. * descriptor
  1061. * @desc: pointer to buffer used for descriptor construction
  1062. * @cdata: pointer to block cipher transform definitions
  1063. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
  1064. */
  1065. void cnstr_shdsc_xts_ablkcipher_encap(u32 * const desc, struct alginfo *cdata)
  1066. {
  1067. /*
  1068. * Set sector size to a big value, practically disabling
  1069. * sector size segmentation in xts implementation. We cannot
  1070. * take full advantage of this HW feature with existing
  1071. * crypto API / dm-crypt SW architecture.
  1072. */
  1073. __be64 sector_size = cpu_to_be64(BIT(15));
  1074. u32 *key_jump_cmd;
  1075. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1076. /* Skip if already shared */
  1077. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1078. JUMP_COND_SHRD);
  1079. /* Load class1 keys only */
  1080. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1081. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1082. /* Load sector size with index 40 bytes (0x28) */
  1083. append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
  1084. LDST_SRCDST_BYTE_CONTEXT |
  1085. (0x28 << LDST_OFFSET_SHIFT));
  1086. set_jump_tgt_here(desc, key_jump_cmd);
  1087. /*
  1088. * create sequence for loading the sector index
  1089. * Upper 8B of IV - will be used as sector index
  1090. * Lower 8B of IV - will be discarded
  1091. */
  1092. append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
  1093. (0x20 << LDST_OFFSET_SHIFT));
  1094. append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
  1095. /* Load operation */
  1096. append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
  1097. OP_ALG_ENCRYPT);
  1098. /* Perform operation */
  1099. ablkcipher_append_src_dst(desc);
  1100. #ifdef DEBUG
  1101. print_hex_dump(KERN_ERR,
  1102. "xts ablkcipher enc shdesc@" __stringify(__LINE__) ": ",
  1103. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  1104. #endif
  1105. }
  1106. EXPORT_SYMBOL(cnstr_shdsc_xts_ablkcipher_encap);
  1107. /**
  1108. * cnstr_shdsc_xts_ablkcipher_decap - xts ablkcipher decapsulation shared
  1109. * descriptor
  1110. * @desc: pointer to buffer used for descriptor construction
  1111. * @cdata: pointer to block cipher transform definitions
  1112. * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
  1113. */
  1114. void cnstr_shdsc_xts_ablkcipher_decap(u32 * const desc, struct alginfo *cdata)
  1115. {
  1116. /*
  1117. * Set sector size to a big value, practically disabling
  1118. * sector size segmentation in xts implementation. We cannot
  1119. * take full advantage of this HW feature with existing
  1120. * crypto API / dm-crypt SW architecture.
  1121. */
  1122. __be64 sector_size = cpu_to_be64(BIT(15));
  1123. u32 *key_jump_cmd;
  1124. init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
  1125. /* Skip if already shared */
  1126. key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
  1127. JUMP_COND_SHRD);
  1128. /* Load class1 key only */
  1129. append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
  1130. cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
  1131. /* Load sector size with index 40 bytes (0x28) */
  1132. append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
  1133. LDST_SRCDST_BYTE_CONTEXT |
  1134. (0x28 << LDST_OFFSET_SHIFT));
  1135. set_jump_tgt_here(desc, key_jump_cmd);
  1136. /*
  1137. * create sequence for loading the sector index
  1138. * Upper 8B of IV - will be used as sector index
  1139. * Lower 8B of IV - will be discarded
  1140. */
  1141. append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
  1142. (0x20 << LDST_OFFSET_SHIFT));
  1143. append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
  1144. /* Load operation */
  1145. append_dec_op1(desc, cdata->algtype);
  1146. /* Perform operation */
  1147. ablkcipher_append_src_dst(desc);
  1148. #ifdef DEBUG
  1149. print_hex_dump(KERN_ERR,
  1150. "xts ablkcipher dec shdesc@" __stringify(__LINE__) ": ",
  1151. DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
  1152. #endif
  1153. }
  1154. EXPORT_SYMBOL(cnstr_shdsc_xts_ablkcipher_decap);
  1155. MODULE_LICENSE("GPL");
  1156. MODULE_DESCRIPTION("FSL CAAM descriptor support");
  1157. MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");