caamalg.c 93 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515
  1. /*
  2. * caam - Freescale FSL CAAM support for crypto API
  3. *
  4. * Copyright 2008-2011 Freescale Semiconductor, Inc.
  5. * Copyright 2016 NXP
  6. *
  7. * Based on talitos crypto API driver.
  8. *
  9. * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
  10. *
  11. * --------------- ---------------
  12. * | JobDesc #1 |-------------------->| ShareDesc |
  13. * | *(packet 1) | | (PDB) |
  14. * --------------- |------------->| (hashKey) |
  15. * . | | (cipherKey) |
  16. * . | |-------->| (operation) |
  17. * --------------- | | ---------------
  18. * | JobDesc #2 |------| |
  19. * | *(packet 2) | |
  20. * --------------- |
  21. * . |
  22. * . |
  23. * --------------- |
  24. * | JobDesc #3 |------------
  25. * | *(packet 3) |
  26. * ---------------
  27. *
  28. * The SharedDesc never changes for a connection unless rekeyed, but
  29. * each packet will likely be in a different place. So all we need
  30. * to know to process the packet is where the input is, where the
  31. * output goes, and what context we want to process with. Context is
  32. * in the SharedDesc, packet references in the JobDesc.
  33. *
  34. * So, a job desc looks like:
  35. *
  36. * ---------------------
  37. * | Header |
  38. * | ShareDesc Pointer |
  39. * | SEQ_OUT_PTR |
  40. * | (output buffer) |
  41. * | (output length) |
  42. * | SEQ_IN_PTR |
  43. * | (input buffer) |
  44. * | (input length) |
  45. * ---------------------
  46. */
  47. #include "compat.h"
  48. #include "regs.h"
  49. #include "intern.h"
  50. #include "desc_constr.h"
  51. #include "jr.h"
  52. #include "error.h"
  53. #include "sg_sw_sec4.h"
  54. #include "key_gen.h"
  55. #include "caamalg_desc.h"
  56. /*
  57. * crypto alg
  58. */
  59. #define CAAM_CRA_PRIORITY 3000
  60. /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
  61. #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
  62. CTR_RFC3686_NONCE_SIZE + \
  63. SHA512_DIGEST_SIZE * 2)
  64. #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
  65. #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
  66. CAAM_CMD_SZ * 4)
  67. #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
  68. CAAM_CMD_SZ * 5)
  69. #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
  70. #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
  71. #ifdef DEBUG
  72. /* for print_hex_dumps with line references */
  73. #define debug(format, arg...) printk(format, arg)
  74. #else
  75. #define debug(format, arg...)
  76. #endif
  77. static struct list_head alg_list;
  78. struct caam_alg_entry {
  79. int class1_alg_type;
  80. int class2_alg_type;
  81. bool rfc3686;
  82. bool geniv;
  83. };
  84. struct caam_aead_alg {
  85. struct aead_alg aead;
  86. struct caam_alg_entry caam;
  87. bool registered;
  88. };
  89. /*
  90. * per-session context
  91. */
  92. struct caam_ctx {
  93. u32 sh_desc_enc[DESC_MAX_USED_LEN];
  94. u32 sh_desc_dec[DESC_MAX_USED_LEN];
  95. u32 sh_desc_givenc[DESC_MAX_USED_LEN];
  96. u8 key[CAAM_MAX_KEY_SIZE];
  97. dma_addr_t sh_desc_enc_dma;
  98. dma_addr_t sh_desc_dec_dma;
  99. dma_addr_t sh_desc_givenc_dma;
  100. dma_addr_t key_dma;
  101. struct device *jrdev;
  102. struct alginfo adata;
  103. struct alginfo cdata;
  104. unsigned int authsize;
  105. };
  106. static int aead_null_set_sh_desc(struct crypto_aead *aead)
  107. {
  108. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  109. struct device *jrdev = ctx->jrdev;
  110. u32 *desc;
  111. int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
  112. ctx->adata.keylen_pad;
  113. /*
  114. * Job Descriptor and Shared Descriptors
  115. * must all fit into the 64-word Descriptor h/w Buffer
  116. */
  117. if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
  118. ctx->adata.key_inline = true;
  119. ctx->adata.key_virt = ctx->key;
  120. } else {
  121. ctx->adata.key_inline = false;
  122. ctx->adata.key_dma = ctx->key_dma;
  123. }
  124. /* aead_encrypt shared descriptor */
  125. desc = ctx->sh_desc_enc;
  126. cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize);
  127. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  128. desc_bytes(desc), DMA_TO_DEVICE);
  129. /*
  130. * Job Descriptor and Shared Descriptors
  131. * must all fit into the 64-word Descriptor h/w Buffer
  132. */
  133. if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
  134. ctx->adata.key_inline = true;
  135. ctx->adata.key_virt = ctx->key;
  136. } else {
  137. ctx->adata.key_inline = false;
  138. ctx->adata.key_dma = ctx->key_dma;
  139. }
  140. /* aead_decrypt shared descriptor */
  141. desc = ctx->sh_desc_dec;
  142. cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize);
  143. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  144. desc_bytes(desc), DMA_TO_DEVICE);
  145. return 0;
  146. }
  147. static int aead_set_sh_desc(struct crypto_aead *aead)
  148. {
  149. struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
  150. struct caam_aead_alg, aead);
  151. unsigned int ivsize = crypto_aead_ivsize(aead);
  152. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  153. struct device *jrdev = ctx->jrdev;
  154. u32 ctx1_iv_off = 0;
  155. u32 *desc, *nonce = NULL;
  156. u32 inl_mask;
  157. unsigned int data_len[2];
  158. const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
  159. OP_ALG_AAI_CTR_MOD128);
  160. const bool is_rfc3686 = alg->caam.rfc3686;
  161. if (!ctx->authsize)
  162. return 0;
  163. /* NULL encryption / decryption */
  164. if (!ctx->cdata.keylen)
  165. return aead_null_set_sh_desc(aead);
  166. /*
  167. * AES-CTR needs to load IV in CONTEXT1 reg
  168. * at an offset of 128bits (16bytes)
  169. * CONTEXT1[255:128] = IV
  170. */
  171. if (ctr_mode)
  172. ctx1_iv_off = 16;
  173. /*
  174. * RFC3686 specific:
  175. * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
  176. */
  177. if (is_rfc3686) {
  178. ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
  179. nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
  180. ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
  181. }
  182. data_len[0] = ctx->adata.keylen_pad;
  183. data_len[1] = ctx->cdata.keylen;
  184. if (alg->caam.geniv)
  185. goto skip_enc;
  186. /*
  187. * Job Descriptor and Shared Descriptors
  188. * must all fit into the 64-word Descriptor h/w Buffer
  189. */
  190. if (desc_inline_query(DESC_AEAD_ENC_LEN +
  191. (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
  192. AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
  193. ARRAY_SIZE(data_len)) < 0)
  194. return -EINVAL;
  195. if (inl_mask & 1)
  196. ctx->adata.key_virt = ctx->key;
  197. else
  198. ctx->adata.key_dma = ctx->key_dma;
  199. if (inl_mask & 2)
  200. ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
  201. else
  202. ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
  203. ctx->adata.key_inline = !!(inl_mask & 1);
  204. ctx->cdata.key_inline = !!(inl_mask & 2);
  205. /* aead_encrypt shared descriptor */
  206. desc = ctx->sh_desc_enc;
  207. cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
  208. ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
  209. false);
  210. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  211. desc_bytes(desc), DMA_TO_DEVICE);
  212. skip_enc:
  213. /*
  214. * Job Descriptor and Shared Descriptors
  215. * must all fit into the 64-word Descriptor h/w Buffer
  216. */
  217. if (desc_inline_query(DESC_AEAD_DEC_LEN +
  218. (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
  219. AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
  220. ARRAY_SIZE(data_len)) < 0)
  221. return -EINVAL;
  222. if (inl_mask & 1)
  223. ctx->adata.key_virt = ctx->key;
  224. else
  225. ctx->adata.key_dma = ctx->key_dma;
  226. if (inl_mask & 2)
  227. ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
  228. else
  229. ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
  230. ctx->adata.key_inline = !!(inl_mask & 1);
  231. ctx->cdata.key_inline = !!(inl_mask & 2);
  232. /* aead_decrypt shared descriptor */
  233. desc = ctx->sh_desc_dec;
  234. cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
  235. ctx->authsize, alg->caam.geniv, is_rfc3686,
  236. nonce, ctx1_iv_off, false);
  237. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  238. desc_bytes(desc), DMA_TO_DEVICE);
  239. if (!alg->caam.geniv)
  240. goto skip_givenc;
  241. /*
  242. * Job Descriptor and Shared Descriptors
  243. * must all fit into the 64-word Descriptor h/w Buffer
  244. */
  245. if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
  246. (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
  247. AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
  248. ARRAY_SIZE(data_len)) < 0)
  249. return -EINVAL;
  250. if (inl_mask & 1)
  251. ctx->adata.key_virt = ctx->key;
  252. else
  253. ctx->adata.key_dma = ctx->key_dma;
  254. if (inl_mask & 2)
  255. ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
  256. else
  257. ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
  258. ctx->adata.key_inline = !!(inl_mask & 1);
  259. ctx->cdata.key_inline = !!(inl_mask & 2);
  260. /* aead_givencrypt shared descriptor */
  261. desc = ctx->sh_desc_enc;
  262. cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
  263. ctx->authsize, is_rfc3686, nonce,
  264. ctx1_iv_off, false);
  265. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  266. desc_bytes(desc), DMA_TO_DEVICE);
  267. skip_givenc:
  268. return 0;
  269. }
  270. static int aead_setauthsize(struct crypto_aead *authenc,
  271. unsigned int authsize)
  272. {
  273. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  274. ctx->authsize = authsize;
  275. aead_set_sh_desc(authenc);
  276. return 0;
  277. }
  278. static int gcm_set_sh_desc(struct crypto_aead *aead)
  279. {
  280. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  281. struct device *jrdev = ctx->jrdev;
  282. u32 *desc;
  283. int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
  284. ctx->cdata.keylen;
  285. if (!ctx->cdata.keylen || !ctx->authsize)
  286. return 0;
  287. /*
  288. * AES GCM encrypt shared descriptor
  289. * Job Descriptor and Shared Descriptor
  290. * must fit into the 64-word Descriptor h/w Buffer
  291. */
  292. if (rem_bytes >= DESC_GCM_ENC_LEN) {
  293. ctx->cdata.key_inline = true;
  294. ctx->cdata.key_virt = ctx->key;
  295. } else {
  296. ctx->cdata.key_inline = false;
  297. ctx->cdata.key_dma = ctx->key_dma;
  298. }
  299. desc = ctx->sh_desc_enc;
  300. cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ctx->authsize);
  301. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  302. desc_bytes(desc), DMA_TO_DEVICE);
  303. /*
  304. * Job Descriptor and Shared Descriptors
  305. * must all fit into the 64-word Descriptor h/w Buffer
  306. */
  307. if (rem_bytes >= DESC_GCM_DEC_LEN) {
  308. ctx->cdata.key_inline = true;
  309. ctx->cdata.key_virt = ctx->key;
  310. } else {
  311. ctx->cdata.key_inline = false;
  312. ctx->cdata.key_dma = ctx->key_dma;
  313. }
  314. desc = ctx->sh_desc_dec;
  315. cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ctx->authsize);
  316. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  317. desc_bytes(desc), DMA_TO_DEVICE);
  318. return 0;
  319. }
  320. static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
  321. {
  322. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  323. ctx->authsize = authsize;
  324. gcm_set_sh_desc(authenc);
  325. return 0;
  326. }
  327. static int rfc4106_set_sh_desc(struct crypto_aead *aead)
  328. {
  329. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  330. struct device *jrdev = ctx->jrdev;
  331. u32 *desc;
  332. int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
  333. ctx->cdata.keylen;
  334. if (!ctx->cdata.keylen || !ctx->authsize)
  335. return 0;
  336. /*
  337. * RFC4106 encrypt shared descriptor
  338. * Job Descriptor and Shared Descriptor
  339. * must fit into the 64-word Descriptor h/w Buffer
  340. */
  341. if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
  342. ctx->cdata.key_inline = true;
  343. ctx->cdata.key_virt = ctx->key;
  344. } else {
  345. ctx->cdata.key_inline = false;
  346. ctx->cdata.key_dma = ctx->key_dma;
  347. }
  348. desc = ctx->sh_desc_enc;
  349. cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ctx->authsize);
  350. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  351. desc_bytes(desc), DMA_TO_DEVICE);
  352. /*
  353. * Job Descriptor and Shared Descriptors
  354. * must all fit into the 64-word Descriptor h/w Buffer
  355. */
  356. if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
  357. ctx->cdata.key_inline = true;
  358. ctx->cdata.key_virt = ctx->key;
  359. } else {
  360. ctx->cdata.key_inline = false;
  361. ctx->cdata.key_dma = ctx->key_dma;
  362. }
  363. desc = ctx->sh_desc_dec;
  364. cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ctx->authsize);
  365. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  366. desc_bytes(desc), DMA_TO_DEVICE);
  367. return 0;
  368. }
  369. static int rfc4106_setauthsize(struct crypto_aead *authenc,
  370. unsigned int authsize)
  371. {
  372. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  373. ctx->authsize = authsize;
  374. rfc4106_set_sh_desc(authenc);
  375. return 0;
  376. }
  377. static int rfc4543_set_sh_desc(struct crypto_aead *aead)
  378. {
  379. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  380. struct device *jrdev = ctx->jrdev;
  381. u32 *desc;
  382. int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
  383. ctx->cdata.keylen;
  384. if (!ctx->cdata.keylen || !ctx->authsize)
  385. return 0;
  386. /*
  387. * RFC4543 encrypt shared descriptor
  388. * Job Descriptor and Shared Descriptor
  389. * must fit into the 64-word Descriptor h/w Buffer
  390. */
  391. if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
  392. ctx->cdata.key_inline = true;
  393. ctx->cdata.key_virt = ctx->key;
  394. } else {
  395. ctx->cdata.key_inline = false;
  396. ctx->cdata.key_dma = ctx->key_dma;
  397. }
  398. desc = ctx->sh_desc_enc;
  399. cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ctx->authsize);
  400. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  401. desc_bytes(desc), DMA_TO_DEVICE);
  402. /*
  403. * Job Descriptor and Shared Descriptors
  404. * must all fit into the 64-word Descriptor h/w Buffer
  405. */
  406. if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
  407. ctx->cdata.key_inline = true;
  408. ctx->cdata.key_virt = ctx->key;
  409. } else {
  410. ctx->cdata.key_inline = false;
  411. ctx->cdata.key_dma = ctx->key_dma;
  412. }
  413. desc = ctx->sh_desc_dec;
  414. cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ctx->authsize);
  415. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  416. desc_bytes(desc), DMA_TO_DEVICE);
  417. return 0;
  418. }
  419. static int rfc4543_setauthsize(struct crypto_aead *authenc,
  420. unsigned int authsize)
  421. {
  422. struct caam_ctx *ctx = crypto_aead_ctx(authenc);
  423. ctx->authsize = authsize;
  424. rfc4543_set_sh_desc(authenc);
  425. return 0;
  426. }
  427. static int aead_setkey(struct crypto_aead *aead,
  428. const u8 *key, unsigned int keylen)
  429. {
  430. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  431. struct device *jrdev = ctx->jrdev;
  432. struct crypto_authenc_keys keys;
  433. int ret = 0;
  434. if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
  435. goto badkey;
  436. #ifdef DEBUG
  437. printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
  438. keys.authkeylen + keys.enckeylen, keys.enckeylen,
  439. keys.authkeylen);
  440. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  441. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  442. #endif
  443. ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
  444. keys.authkeylen, CAAM_MAX_KEY_SIZE -
  445. keys.enckeylen);
  446. if (ret) {
  447. goto badkey;
  448. }
  449. /* postpend encryption key to auth split key */
  450. memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
  451. dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
  452. keys.enckeylen, DMA_TO_DEVICE);
  453. #ifdef DEBUG
  454. print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
  455. DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
  456. ctx->adata.keylen_pad + keys.enckeylen, 1);
  457. #endif
  458. ctx->cdata.keylen = keys.enckeylen;
  459. return aead_set_sh_desc(aead);
  460. badkey:
  461. crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
  462. return -EINVAL;
  463. }
  464. static int gcm_setkey(struct crypto_aead *aead,
  465. const u8 *key, unsigned int keylen)
  466. {
  467. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  468. struct device *jrdev = ctx->jrdev;
  469. #ifdef DEBUG
  470. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  471. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  472. #endif
  473. memcpy(ctx->key, key, keylen);
  474. dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
  475. ctx->cdata.keylen = keylen;
  476. return gcm_set_sh_desc(aead);
  477. }
  478. static int rfc4106_setkey(struct crypto_aead *aead,
  479. const u8 *key, unsigned int keylen)
  480. {
  481. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  482. struct device *jrdev = ctx->jrdev;
  483. if (keylen < 4)
  484. return -EINVAL;
  485. #ifdef DEBUG
  486. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  487. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  488. #endif
  489. memcpy(ctx->key, key, keylen);
  490. /*
  491. * The last four bytes of the key material are used as the salt value
  492. * in the nonce. Update the AES key length.
  493. */
  494. ctx->cdata.keylen = keylen - 4;
  495. dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
  496. DMA_TO_DEVICE);
  497. return rfc4106_set_sh_desc(aead);
  498. }
  499. static int rfc4543_setkey(struct crypto_aead *aead,
  500. const u8 *key, unsigned int keylen)
  501. {
  502. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  503. struct device *jrdev = ctx->jrdev;
  504. if (keylen < 4)
  505. return -EINVAL;
  506. #ifdef DEBUG
  507. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  508. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  509. #endif
  510. memcpy(ctx->key, key, keylen);
  511. /*
  512. * The last four bytes of the key material are used as the salt value
  513. * in the nonce. Update the AES key length.
  514. */
  515. ctx->cdata.keylen = keylen - 4;
  516. dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
  517. DMA_TO_DEVICE);
  518. return rfc4543_set_sh_desc(aead);
  519. }
  520. static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
  521. const u8 *key, unsigned int keylen)
  522. {
  523. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  524. struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher);
  525. const char *alg_name = crypto_tfm_alg_name(tfm);
  526. struct device *jrdev = ctx->jrdev;
  527. unsigned int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  528. u32 *desc;
  529. u32 ctx1_iv_off = 0;
  530. const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
  531. OP_ALG_AAI_CTR_MOD128);
  532. const bool is_rfc3686 = (ctr_mode &&
  533. (strstr(alg_name, "rfc3686") != NULL));
  534. memcpy(ctx->key, key, keylen);
  535. #ifdef DEBUG
  536. print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
  537. DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
  538. #endif
  539. /*
  540. * AES-CTR needs to load IV in CONTEXT1 reg
  541. * at an offset of 128bits (16bytes)
  542. * CONTEXT1[255:128] = IV
  543. */
  544. if (ctr_mode)
  545. ctx1_iv_off = 16;
  546. /*
  547. * RFC3686 specific:
  548. * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
  549. * | *key = {KEY, NONCE}
  550. */
  551. if (is_rfc3686) {
  552. ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
  553. keylen -= CTR_RFC3686_NONCE_SIZE;
  554. }
  555. dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
  556. ctx->cdata.keylen = keylen;
  557. ctx->cdata.key_virt = ctx->key;
  558. ctx->cdata.key_inline = true;
  559. /* ablkcipher_encrypt shared descriptor */
  560. desc = ctx->sh_desc_enc;
  561. cnstr_shdsc_ablkcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
  562. ctx1_iv_off);
  563. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  564. desc_bytes(desc), DMA_TO_DEVICE);
  565. /* ablkcipher_decrypt shared descriptor */
  566. desc = ctx->sh_desc_dec;
  567. cnstr_shdsc_ablkcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
  568. ctx1_iv_off);
  569. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  570. desc_bytes(desc), DMA_TO_DEVICE);
  571. /* ablkcipher_givencrypt shared descriptor */
  572. desc = ctx->sh_desc_givenc;
  573. cnstr_shdsc_ablkcipher_givencap(desc, &ctx->cdata, ivsize, is_rfc3686,
  574. ctx1_iv_off);
  575. dma_sync_single_for_device(jrdev, ctx->sh_desc_givenc_dma,
  576. desc_bytes(desc), DMA_TO_DEVICE);
  577. return 0;
  578. }
  579. static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
  580. const u8 *key, unsigned int keylen)
  581. {
  582. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  583. struct device *jrdev = ctx->jrdev;
  584. u32 *desc;
  585. if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
  586. crypto_ablkcipher_set_flags(ablkcipher,
  587. CRYPTO_TFM_RES_BAD_KEY_LEN);
  588. dev_err(jrdev, "key size mismatch\n");
  589. return -EINVAL;
  590. }
  591. memcpy(ctx->key, key, keylen);
  592. dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
  593. ctx->cdata.keylen = keylen;
  594. ctx->cdata.key_virt = ctx->key;
  595. ctx->cdata.key_inline = true;
  596. /* xts_ablkcipher_encrypt shared descriptor */
  597. desc = ctx->sh_desc_enc;
  598. cnstr_shdsc_xts_ablkcipher_encap(desc, &ctx->cdata);
  599. dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
  600. desc_bytes(desc), DMA_TO_DEVICE);
  601. /* xts_ablkcipher_decrypt shared descriptor */
  602. desc = ctx->sh_desc_dec;
  603. cnstr_shdsc_xts_ablkcipher_decap(desc, &ctx->cdata);
  604. dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
  605. desc_bytes(desc), DMA_TO_DEVICE);
  606. return 0;
  607. }
  608. /*
  609. * aead_edesc - s/w-extended aead descriptor
  610. * @src_nents: number of segments in input s/w scatterlist
  611. * @dst_nents: number of segments in output s/w scatterlist
  612. * @sec4_sg_bytes: length of dma mapped sec4_sg space
  613. * @sec4_sg_dma: bus physical mapped address of h/w link table
  614. * @sec4_sg: pointer to h/w link table
  615. * @hw_desc: the h/w job descriptor followed by any referenced link tables
  616. */
  617. struct aead_edesc {
  618. int src_nents;
  619. int dst_nents;
  620. int sec4_sg_bytes;
  621. dma_addr_t sec4_sg_dma;
  622. struct sec4_sg_entry *sec4_sg;
  623. u32 hw_desc[];
  624. };
  625. /*
  626. * ablkcipher_edesc - s/w-extended ablkcipher descriptor
  627. * @src_nents: number of segments in input s/w scatterlist
  628. * @dst_nents: number of segments in output s/w scatterlist
  629. * @iv_dma: dma address of iv for checking continuity and link table
  630. * @iv_dir: DMA mapping direction for IV
  631. * @sec4_sg_bytes: length of dma mapped sec4_sg space
  632. * @sec4_sg_dma: bus physical mapped address of h/w link table
  633. * @sec4_sg: pointer to h/w link table
  634. * @hw_desc: the h/w job descriptor followed by any referenced link tables
  635. * and IV
  636. */
  637. struct ablkcipher_edesc {
  638. int src_nents;
  639. int dst_nents;
  640. dma_addr_t iv_dma;
  641. enum dma_data_direction iv_dir;
  642. int sec4_sg_bytes;
  643. dma_addr_t sec4_sg_dma;
  644. struct sec4_sg_entry *sec4_sg;
  645. u32 hw_desc[0];
  646. };
  647. static void caam_unmap(struct device *dev, struct scatterlist *src,
  648. struct scatterlist *dst, int src_nents,
  649. int dst_nents,
  650. dma_addr_t iv_dma, int ivsize,
  651. enum dma_data_direction iv_dir, dma_addr_t sec4_sg_dma,
  652. int sec4_sg_bytes)
  653. {
  654. if (dst != src) {
  655. if (src_nents)
  656. dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
  657. dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
  658. } else {
  659. dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
  660. }
  661. if (iv_dma)
  662. dma_unmap_single(dev, iv_dma, ivsize, iv_dir);
  663. if (sec4_sg_bytes)
  664. dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
  665. DMA_TO_DEVICE);
  666. }
  667. static void aead_unmap(struct device *dev,
  668. struct aead_edesc *edesc,
  669. struct aead_request *req)
  670. {
  671. caam_unmap(dev, req->src, req->dst,
  672. edesc->src_nents, edesc->dst_nents, 0, 0, DMA_NONE,
  673. edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
  674. }
  675. static void ablkcipher_unmap(struct device *dev,
  676. struct ablkcipher_edesc *edesc,
  677. struct ablkcipher_request *req)
  678. {
  679. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  680. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  681. caam_unmap(dev, req->src, req->dst,
  682. edesc->src_nents, edesc->dst_nents,
  683. edesc->iv_dma, ivsize, edesc->iv_dir,
  684. edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
  685. }
  686. static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
  687. void *context)
  688. {
  689. struct aead_request *req = context;
  690. struct aead_edesc *edesc;
  691. #ifdef DEBUG
  692. dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
  693. #endif
  694. edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
  695. if (err)
  696. caam_jr_strstatus(jrdev, err);
  697. aead_unmap(jrdev, edesc, req);
  698. kfree(edesc);
  699. aead_request_complete(req, err);
  700. }
  701. static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
  702. void *context)
  703. {
  704. struct aead_request *req = context;
  705. struct aead_edesc *edesc;
  706. #ifdef DEBUG
  707. dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
  708. #endif
  709. edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
  710. if (err)
  711. caam_jr_strstatus(jrdev, err);
  712. aead_unmap(jrdev, edesc, req);
  713. /*
  714. * verify hw auth check passed else return -EBADMSG
  715. */
  716. if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
  717. err = -EBADMSG;
  718. kfree(edesc);
  719. aead_request_complete(req, err);
  720. }
  721. static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
  722. void *context)
  723. {
  724. struct ablkcipher_request *req = context;
  725. struct ablkcipher_edesc *edesc;
  726. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  727. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  728. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  729. #ifdef DEBUG
  730. dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
  731. #endif
  732. edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
  733. if (err)
  734. caam_jr_strstatus(jrdev, err);
  735. #ifdef DEBUG
  736. print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
  737. DUMP_PREFIX_ADDRESS, 16, 4, req->info,
  738. edesc->src_nents > 1 ? 100 : ivsize, 1);
  739. #endif
  740. caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ",
  741. DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
  742. edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
  743. ablkcipher_unmap(jrdev, edesc, req);
  744. /*
  745. * The crypto API expects us to set the IV (req->info) to the last
  746. * ciphertext block when running in CBC mode.
  747. */
  748. if ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == OP_ALG_AAI_CBC)
  749. scatterwalk_map_and_copy(req->info, req->dst, req->nbytes -
  750. ivsize, ivsize, 0);
  751. /* In case initial IV was generated, copy it in GIVCIPHER request */
  752. if (edesc->iv_dir == DMA_FROM_DEVICE) {
  753. u8 *iv;
  754. struct skcipher_givcrypt_request *greq;
  755. greq = container_of(req, struct skcipher_givcrypt_request,
  756. creq);
  757. iv = (u8 *)edesc->hw_desc + desc_bytes(edesc->hw_desc) +
  758. edesc->sec4_sg_bytes;
  759. memcpy(greq->giv, iv, ivsize);
  760. }
  761. kfree(edesc);
  762. ablkcipher_request_complete(req, err);
  763. }
  764. static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
  765. void *context)
  766. {
  767. struct ablkcipher_request *req = context;
  768. struct ablkcipher_edesc *edesc;
  769. #ifdef DEBUG
  770. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  771. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  772. dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
  773. #endif
  774. edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
  775. if (err)
  776. caam_jr_strstatus(jrdev, err);
  777. #ifdef DEBUG
  778. print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
  779. DUMP_PREFIX_ADDRESS, 16, 4, req->info,
  780. ivsize, 1);
  781. #endif
  782. caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ",
  783. DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
  784. edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
  785. ablkcipher_unmap(jrdev, edesc, req);
  786. kfree(edesc);
  787. ablkcipher_request_complete(req, err);
  788. }
  789. /*
  790. * Fill in aead job descriptor
  791. */
  792. static void init_aead_job(struct aead_request *req,
  793. struct aead_edesc *edesc,
  794. bool all_contig, bool encrypt)
  795. {
  796. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  797. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  798. int authsize = ctx->authsize;
  799. u32 *desc = edesc->hw_desc;
  800. u32 out_options, in_options;
  801. dma_addr_t dst_dma, src_dma;
  802. int len, sec4_sg_index = 0;
  803. dma_addr_t ptr;
  804. u32 *sh_desc;
  805. sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
  806. ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
  807. len = desc_len(sh_desc);
  808. init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
  809. if (all_contig) {
  810. src_dma = edesc->src_nents ? sg_dma_address(req->src) : 0;
  811. in_options = 0;
  812. } else {
  813. src_dma = edesc->sec4_sg_dma;
  814. sec4_sg_index += edesc->src_nents;
  815. in_options = LDST_SGF;
  816. }
  817. append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
  818. in_options);
  819. dst_dma = src_dma;
  820. out_options = in_options;
  821. if (unlikely(req->src != req->dst)) {
  822. if (edesc->dst_nents == 1) {
  823. dst_dma = sg_dma_address(req->dst);
  824. } else {
  825. dst_dma = edesc->sec4_sg_dma +
  826. sec4_sg_index *
  827. sizeof(struct sec4_sg_entry);
  828. out_options = LDST_SGF;
  829. }
  830. }
  831. if (encrypt)
  832. append_seq_out_ptr(desc, dst_dma,
  833. req->assoclen + req->cryptlen + authsize,
  834. out_options);
  835. else
  836. append_seq_out_ptr(desc, dst_dma,
  837. req->assoclen + req->cryptlen - authsize,
  838. out_options);
  839. /* REG3 = assoclen */
  840. append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
  841. }
  842. static void init_gcm_job(struct aead_request *req,
  843. struct aead_edesc *edesc,
  844. bool all_contig, bool encrypt)
  845. {
  846. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  847. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  848. unsigned int ivsize = crypto_aead_ivsize(aead);
  849. u32 *desc = edesc->hw_desc;
  850. bool generic_gcm = (ivsize == 12);
  851. unsigned int last;
  852. init_aead_job(req, edesc, all_contig, encrypt);
  853. /* BUG This should not be specific to generic GCM. */
  854. last = 0;
  855. if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
  856. last = FIFOLD_TYPE_LAST1;
  857. /* Read GCM IV */
  858. append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
  859. FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | 12 | last);
  860. /* Append Salt */
  861. if (!generic_gcm)
  862. append_data(desc, ctx->key + ctx->cdata.keylen, 4);
  863. /* Append IV */
  864. append_data(desc, req->iv, ivsize);
  865. /* End of blank commands */
  866. }
  867. static void init_authenc_job(struct aead_request *req,
  868. struct aead_edesc *edesc,
  869. bool all_contig, bool encrypt)
  870. {
  871. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  872. struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
  873. struct caam_aead_alg, aead);
  874. unsigned int ivsize = crypto_aead_ivsize(aead);
  875. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  876. const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
  877. OP_ALG_AAI_CTR_MOD128);
  878. const bool is_rfc3686 = alg->caam.rfc3686;
  879. u32 *desc = edesc->hw_desc;
  880. u32 ivoffset = 0;
  881. /*
  882. * AES-CTR needs to load IV in CONTEXT1 reg
  883. * at an offset of 128bits (16bytes)
  884. * CONTEXT1[255:128] = IV
  885. */
  886. if (ctr_mode)
  887. ivoffset = 16;
  888. /*
  889. * RFC3686 specific:
  890. * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
  891. */
  892. if (is_rfc3686)
  893. ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
  894. init_aead_job(req, edesc, all_contig, encrypt);
  895. if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
  896. append_load_as_imm(desc, req->iv, ivsize,
  897. LDST_CLASS_1_CCB |
  898. LDST_SRCDST_BYTE_CONTEXT |
  899. (ivoffset << LDST_OFFSET_SHIFT));
  900. }
  901. /*
  902. * Fill in ablkcipher job descriptor
  903. */
  904. static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr,
  905. struct ablkcipher_edesc *edesc,
  906. struct ablkcipher_request *req)
  907. {
  908. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  909. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  910. u32 *desc = edesc->hw_desc;
  911. u32 out_options = 0;
  912. dma_addr_t dst_dma;
  913. int len;
  914. #ifdef DEBUG
  915. print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
  916. DUMP_PREFIX_ADDRESS, 16, 4, req->info,
  917. ivsize, 1);
  918. pr_err("asked=%d, nbytes%d\n",
  919. (int)edesc->src_nents > 1 ? 100 : req->nbytes, req->nbytes);
  920. #endif
  921. caam_dump_sg(KERN_ERR, "src @" __stringify(__LINE__)": ",
  922. DUMP_PREFIX_ADDRESS, 16, 4, req->src,
  923. edesc->src_nents > 1 ? 100 : req->nbytes, 1);
  924. len = desc_len(sh_desc);
  925. init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
  926. append_seq_in_ptr(desc, edesc->sec4_sg_dma, req->nbytes + ivsize,
  927. LDST_SGF);
  928. if (likely(req->src == req->dst)) {
  929. dst_dma = edesc->sec4_sg_dma + sizeof(struct sec4_sg_entry);
  930. out_options = LDST_SGF;
  931. } else {
  932. if (edesc->dst_nents == 1) {
  933. dst_dma = sg_dma_address(req->dst);
  934. out_options = 0;
  935. } else {
  936. dst_dma = edesc->sec4_sg_dma + (edesc->src_nents + 1) *
  937. sizeof(struct sec4_sg_entry);
  938. out_options = LDST_SGF;
  939. }
  940. }
  941. append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options);
  942. }
  943. /*
  944. * Fill in ablkcipher givencrypt job descriptor
  945. */
  946. static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr,
  947. struct ablkcipher_edesc *edesc,
  948. struct ablkcipher_request *req)
  949. {
  950. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  951. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  952. u32 *desc = edesc->hw_desc;
  953. u32 in_options;
  954. dma_addr_t dst_dma, src_dma;
  955. int len, sec4_sg_index = 0;
  956. #ifdef DEBUG
  957. print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ",
  958. DUMP_PREFIX_ADDRESS, 16, 4, req->info,
  959. ivsize, 1);
  960. #endif
  961. caam_dump_sg(KERN_ERR, "src @" __stringify(__LINE__) ": ",
  962. DUMP_PREFIX_ADDRESS, 16, 4, req->src,
  963. edesc->src_nents > 1 ? 100 : req->nbytes, 1);
  964. len = desc_len(sh_desc);
  965. init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
  966. if (edesc->src_nents == 1) {
  967. src_dma = sg_dma_address(req->src);
  968. in_options = 0;
  969. } else {
  970. src_dma = edesc->sec4_sg_dma;
  971. sec4_sg_index += edesc->src_nents;
  972. in_options = LDST_SGF;
  973. }
  974. append_seq_in_ptr(desc, src_dma, req->nbytes, in_options);
  975. dst_dma = edesc->sec4_sg_dma + sec4_sg_index *
  976. sizeof(struct sec4_sg_entry);
  977. append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, LDST_SGF);
  978. }
  979. /*
  980. * allocate and map the aead extended descriptor
  981. */
  982. static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
  983. int desc_bytes, bool *all_contig_ptr,
  984. bool encrypt)
  985. {
  986. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  987. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  988. struct device *jrdev = ctx->jrdev;
  989. gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
  990. GFP_KERNEL : GFP_ATOMIC;
  991. int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
  992. struct aead_edesc *edesc;
  993. int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
  994. unsigned int authsize = ctx->authsize;
  995. if (unlikely(req->dst != req->src)) {
  996. src_nents = sg_nents_for_len(req->src, req->assoclen +
  997. req->cryptlen);
  998. if (unlikely(src_nents < 0)) {
  999. dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
  1000. req->assoclen + req->cryptlen);
  1001. return ERR_PTR(src_nents);
  1002. }
  1003. dst_nents = sg_nents_for_len(req->dst, req->assoclen +
  1004. req->cryptlen +
  1005. (encrypt ? authsize :
  1006. (-authsize)));
  1007. if (unlikely(dst_nents < 0)) {
  1008. dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
  1009. req->assoclen + req->cryptlen +
  1010. (encrypt ? authsize : (-authsize)));
  1011. return ERR_PTR(dst_nents);
  1012. }
  1013. } else {
  1014. src_nents = sg_nents_for_len(req->src, req->assoclen +
  1015. req->cryptlen +
  1016. (encrypt ? authsize : 0));
  1017. if (unlikely(src_nents < 0)) {
  1018. dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
  1019. req->assoclen + req->cryptlen +
  1020. (encrypt ? authsize : 0));
  1021. return ERR_PTR(src_nents);
  1022. }
  1023. }
  1024. if (likely(req->src == req->dst)) {
  1025. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1026. DMA_BIDIRECTIONAL);
  1027. if (unlikely(!mapped_src_nents)) {
  1028. dev_err(jrdev, "unable to map source\n");
  1029. return ERR_PTR(-ENOMEM);
  1030. }
  1031. } else {
  1032. /* Cover also the case of null (zero length) input data */
  1033. if (src_nents) {
  1034. mapped_src_nents = dma_map_sg(jrdev, req->src,
  1035. src_nents, DMA_TO_DEVICE);
  1036. if (unlikely(!mapped_src_nents)) {
  1037. dev_err(jrdev, "unable to map source\n");
  1038. return ERR_PTR(-ENOMEM);
  1039. }
  1040. } else {
  1041. mapped_src_nents = 0;
  1042. }
  1043. mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
  1044. DMA_FROM_DEVICE);
  1045. if (unlikely(!mapped_dst_nents)) {
  1046. dev_err(jrdev, "unable to map destination\n");
  1047. dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
  1048. return ERR_PTR(-ENOMEM);
  1049. }
  1050. }
  1051. sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
  1052. sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
  1053. sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
  1054. /* allocate space for base edesc and hw desc commands, link tables */
  1055. edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
  1056. GFP_DMA | flags);
  1057. if (!edesc) {
  1058. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
  1059. 0, DMA_NONE, 0, 0);
  1060. return ERR_PTR(-ENOMEM);
  1061. }
  1062. edesc->src_nents = src_nents;
  1063. edesc->dst_nents = dst_nents;
  1064. edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
  1065. desc_bytes;
  1066. *all_contig_ptr = !(mapped_src_nents > 1);
  1067. sec4_sg_index = 0;
  1068. if (mapped_src_nents > 1) {
  1069. sg_to_sec4_sg_last(req->src, mapped_src_nents,
  1070. edesc->sec4_sg + sec4_sg_index, 0);
  1071. sec4_sg_index += mapped_src_nents;
  1072. }
  1073. if (mapped_dst_nents > 1) {
  1074. sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
  1075. edesc->sec4_sg + sec4_sg_index, 0);
  1076. }
  1077. if (!sec4_sg_bytes)
  1078. return edesc;
  1079. edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
  1080. sec4_sg_bytes, DMA_TO_DEVICE);
  1081. if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
  1082. dev_err(jrdev, "unable to map S/G table\n");
  1083. aead_unmap(jrdev, edesc, req);
  1084. kfree(edesc);
  1085. return ERR_PTR(-ENOMEM);
  1086. }
  1087. edesc->sec4_sg_bytes = sec4_sg_bytes;
  1088. return edesc;
  1089. }
  1090. static int gcm_encrypt(struct aead_request *req)
  1091. {
  1092. struct aead_edesc *edesc;
  1093. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1094. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1095. struct device *jrdev = ctx->jrdev;
  1096. bool all_contig;
  1097. u32 *desc;
  1098. int ret = 0;
  1099. /* allocate extended descriptor */
  1100. edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
  1101. if (IS_ERR(edesc))
  1102. return PTR_ERR(edesc);
  1103. /* Create and submit job descriptor */
  1104. init_gcm_job(req, edesc, all_contig, true);
  1105. #ifdef DEBUG
  1106. print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
  1107. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1108. desc_bytes(edesc->hw_desc), 1);
  1109. #endif
  1110. desc = edesc->hw_desc;
  1111. ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
  1112. if (!ret) {
  1113. ret = -EINPROGRESS;
  1114. } else {
  1115. aead_unmap(jrdev, edesc, req);
  1116. kfree(edesc);
  1117. }
  1118. return ret;
  1119. }
  1120. static int ipsec_gcm_encrypt(struct aead_request *req)
  1121. {
  1122. if (req->assoclen < 8)
  1123. return -EINVAL;
  1124. return gcm_encrypt(req);
  1125. }
  1126. static int aead_encrypt(struct aead_request *req)
  1127. {
  1128. struct aead_edesc *edesc;
  1129. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1130. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1131. struct device *jrdev = ctx->jrdev;
  1132. bool all_contig;
  1133. u32 *desc;
  1134. int ret = 0;
  1135. /* allocate extended descriptor */
  1136. edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
  1137. &all_contig, true);
  1138. if (IS_ERR(edesc))
  1139. return PTR_ERR(edesc);
  1140. /* Create and submit job descriptor */
  1141. init_authenc_job(req, edesc, all_contig, true);
  1142. #ifdef DEBUG
  1143. print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
  1144. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1145. desc_bytes(edesc->hw_desc), 1);
  1146. #endif
  1147. desc = edesc->hw_desc;
  1148. ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
  1149. if (!ret) {
  1150. ret = -EINPROGRESS;
  1151. } else {
  1152. aead_unmap(jrdev, edesc, req);
  1153. kfree(edesc);
  1154. }
  1155. return ret;
  1156. }
  1157. static int gcm_decrypt(struct aead_request *req)
  1158. {
  1159. struct aead_edesc *edesc;
  1160. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1161. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1162. struct device *jrdev = ctx->jrdev;
  1163. bool all_contig;
  1164. u32 *desc;
  1165. int ret = 0;
  1166. /* allocate extended descriptor */
  1167. edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
  1168. if (IS_ERR(edesc))
  1169. return PTR_ERR(edesc);
  1170. /* Create and submit job descriptor*/
  1171. init_gcm_job(req, edesc, all_contig, false);
  1172. #ifdef DEBUG
  1173. print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
  1174. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1175. desc_bytes(edesc->hw_desc), 1);
  1176. #endif
  1177. desc = edesc->hw_desc;
  1178. ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
  1179. if (!ret) {
  1180. ret = -EINPROGRESS;
  1181. } else {
  1182. aead_unmap(jrdev, edesc, req);
  1183. kfree(edesc);
  1184. }
  1185. return ret;
  1186. }
  1187. static int ipsec_gcm_decrypt(struct aead_request *req)
  1188. {
  1189. if (req->assoclen < 8)
  1190. return -EINVAL;
  1191. return gcm_decrypt(req);
  1192. }
  1193. static int aead_decrypt(struct aead_request *req)
  1194. {
  1195. struct aead_edesc *edesc;
  1196. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  1197. struct caam_ctx *ctx = crypto_aead_ctx(aead);
  1198. struct device *jrdev = ctx->jrdev;
  1199. bool all_contig;
  1200. u32 *desc;
  1201. int ret = 0;
  1202. caam_dump_sg(KERN_ERR, "dec src@" __stringify(__LINE__)": ",
  1203. DUMP_PREFIX_ADDRESS, 16, 4, req->src,
  1204. req->assoclen + req->cryptlen, 1);
  1205. /* allocate extended descriptor */
  1206. edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
  1207. &all_contig, false);
  1208. if (IS_ERR(edesc))
  1209. return PTR_ERR(edesc);
  1210. /* Create and submit job descriptor*/
  1211. init_authenc_job(req, edesc, all_contig, false);
  1212. #ifdef DEBUG
  1213. print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
  1214. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1215. desc_bytes(edesc->hw_desc), 1);
  1216. #endif
  1217. desc = edesc->hw_desc;
  1218. ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
  1219. if (!ret) {
  1220. ret = -EINPROGRESS;
  1221. } else {
  1222. aead_unmap(jrdev, edesc, req);
  1223. kfree(edesc);
  1224. }
  1225. return ret;
  1226. }
  1227. /*
  1228. * allocate and map the ablkcipher extended descriptor for ablkcipher
  1229. */
  1230. static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request
  1231. *req, int desc_bytes)
  1232. {
  1233. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  1234. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  1235. struct device *jrdev = ctx->jrdev;
  1236. gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
  1237. GFP_KERNEL : GFP_ATOMIC;
  1238. int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
  1239. struct ablkcipher_edesc *edesc;
  1240. dma_addr_t iv_dma;
  1241. u8 *iv;
  1242. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  1243. int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
  1244. src_nents = sg_nents_for_len(req->src, req->nbytes);
  1245. if (unlikely(src_nents < 0)) {
  1246. dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
  1247. req->nbytes);
  1248. return ERR_PTR(src_nents);
  1249. }
  1250. if (req->dst != req->src) {
  1251. dst_nents = sg_nents_for_len(req->dst, req->nbytes);
  1252. if (unlikely(dst_nents < 0)) {
  1253. dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
  1254. req->nbytes);
  1255. return ERR_PTR(dst_nents);
  1256. }
  1257. }
  1258. if (likely(req->src == req->dst)) {
  1259. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1260. DMA_BIDIRECTIONAL);
  1261. if (unlikely(!mapped_src_nents)) {
  1262. dev_err(jrdev, "unable to map source\n");
  1263. return ERR_PTR(-ENOMEM);
  1264. }
  1265. } else {
  1266. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1267. DMA_TO_DEVICE);
  1268. if (unlikely(!mapped_src_nents)) {
  1269. dev_err(jrdev, "unable to map source\n");
  1270. return ERR_PTR(-ENOMEM);
  1271. }
  1272. mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
  1273. DMA_FROM_DEVICE);
  1274. if (unlikely(!mapped_dst_nents)) {
  1275. dev_err(jrdev, "unable to map destination\n");
  1276. dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
  1277. return ERR_PTR(-ENOMEM);
  1278. }
  1279. }
  1280. sec4_sg_ents = 1 + mapped_src_nents;
  1281. dst_sg_idx = sec4_sg_ents;
  1282. sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
  1283. sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
  1284. /*
  1285. * allocate space for base edesc and hw desc commands, link tables, IV
  1286. */
  1287. edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize,
  1288. GFP_DMA | flags);
  1289. if (!edesc) {
  1290. dev_err(jrdev, "could not allocate extended descriptor\n");
  1291. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
  1292. 0, DMA_NONE, 0, 0);
  1293. return ERR_PTR(-ENOMEM);
  1294. }
  1295. edesc->src_nents = src_nents;
  1296. edesc->dst_nents = dst_nents;
  1297. edesc->sec4_sg_bytes = sec4_sg_bytes;
  1298. edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc +
  1299. desc_bytes);
  1300. edesc->iv_dir = DMA_TO_DEVICE;
  1301. /* Make sure IV is located in a DMAable area */
  1302. iv = (u8 *)edesc->hw_desc + desc_bytes + sec4_sg_bytes;
  1303. memcpy(iv, req->info, ivsize);
  1304. iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_TO_DEVICE);
  1305. if (dma_mapping_error(jrdev, iv_dma)) {
  1306. dev_err(jrdev, "unable to map IV\n");
  1307. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
  1308. 0, DMA_NONE, 0, 0);
  1309. kfree(edesc);
  1310. return ERR_PTR(-ENOMEM);
  1311. }
  1312. dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
  1313. sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg + 1, 0);
  1314. if (mapped_dst_nents > 1) {
  1315. sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
  1316. edesc->sec4_sg + dst_sg_idx, 0);
  1317. }
  1318. edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
  1319. sec4_sg_bytes, DMA_TO_DEVICE);
  1320. if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
  1321. dev_err(jrdev, "unable to map S/G table\n");
  1322. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
  1323. iv_dma, ivsize, DMA_TO_DEVICE, 0, 0);
  1324. kfree(edesc);
  1325. return ERR_PTR(-ENOMEM);
  1326. }
  1327. edesc->iv_dma = iv_dma;
  1328. #ifdef DEBUG
  1329. print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ",
  1330. DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
  1331. sec4_sg_bytes, 1);
  1332. #endif
  1333. return edesc;
  1334. }
  1335. static int ablkcipher_encrypt(struct ablkcipher_request *req)
  1336. {
  1337. struct ablkcipher_edesc *edesc;
  1338. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  1339. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  1340. struct device *jrdev = ctx->jrdev;
  1341. u32 *desc;
  1342. int ret = 0;
  1343. /* allocate extended descriptor */
  1344. edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
  1345. if (IS_ERR(edesc))
  1346. return PTR_ERR(edesc);
  1347. /* Create and submit job descriptor*/
  1348. init_ablkcipher_job(ctx->sh_desc_enc, ctx->sh_desc_enc_dma, edesc, req);
  1349. #ifdef DEBUG
  1350. print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
  1351. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1352. desc_bytes(edesc->hw_desc), 1);
  1353. #endif
  1354. desc = edesc->hw_desc;
  1355. ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
  1356. if (!ret) {
  1357. ret = -EINPROGRESS;
  1358. } else {
  1359. ablkcipher_unmap(jrdev, edesc, req);
  1360. kfree(edesc);
  1361. }
  1362. return ret;
  1363. }
  1364. static int ablkcipher_decrypt(struct ablkcipher_request *req)
  1365. {
  1366. struct ablkcipher_edesc *edesc;
  1367. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  1368. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  1369. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  1370. struct device *jrdev = ctx->jrdev;
  1371. u32 *desc;
  1372. int ret = 0;
  1373. /* allocate extended descriptor */
  1374. edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
  1375. if (IS_ERR(edesc))
  1376. return PTR_ERR(edesc);
  1377. /*
  1378. * The crypto API expects us to set the IV (req->info) to the last
  1379. * ciphertext block when running in CBC mode.
  1380. */
  1381. if ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == OP_ALG_AAI_CBC)
  1382. scatterwalk_map_and_copy(req->info, req->src, req->nbytes -
  1383. ivsize, ivsize, 0);
  1384. /* Create and submit job descriptor*/
  1385. init_ablkcipher_job(ctx->sh_desc_dec, ctx->sh_desc_dec_dma, edesc, req);
  1386. desc = edesc->hw_desc;
  1387. #ifdef DEBUG
  1388. print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
  1389. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1390. desc_bytes(edesc->hw_desc), 1);
  1391. #endif
  1392. ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req);
  1393. if (!ret) {
  1394. ret = -EINPROGRESS;
  1395. } else {
  1396. ablkcipher_unmap(jrdev, edesc, req);
  1397. kfree(edesc);
  1398. }
  1399. return ret;
  1400. }
  1401. /*
  1402. * allocate and map the ablkcipher extended descriptor
  1403. * for ablkcipher givencrypt
  1404. */
  1405. static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc(
  1406. struct skcipher_givcrypt_request *greq,
  1407. int desc_bytes)
  1408. {
  1409. struct ablkcipher_request *req = &greq->creq;
  1410. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  1411. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  1412. struct device *jrdev = ctx->jrdev;
  1413. gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
  1414. GFP_KERNEL : GFP_ATOMIC;
  1415. int src_nents, mapped_src_nents, dst_nents, mapped_dst_nents;
  1416. struct ablkcipher_edesc *edesc;
  1417. dma_addr_t iv_dma;
  1418. u8 *iv;
  1419. int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  1420. int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
  1421. src_nents = sg_nents_for_len(req->src, req->nbytes);
  1422. if (unlikely(src_nents < 0)) {
  1423. dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
  1424. req->nbytes);
  1425. return ERR_PTR(src_nents);
  1426. }
  1427. if (likely(req->src == req->dst)) {
  1428. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1429. DMA_BIDIRECTIONAL);
  1430. if (unlikely(!mapped_src_nents)) {
  1431. dev_err(jrdev, "unable to map source\n");
  1432. return ERR_PTR(-ENOMEM);
  1433. }
  1434. dst_nents = src_nents;
  1435. mapped_dst_nents = src_nents;
  1436. } else {
  1437. mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
  1438. DMA_TO_DEVICE);
  1439. if (unlikely(!mapped_src_nents)) {
  1440. dev_err(jrdev, "unable to map source\n");
  1441. return ERR_PTR(-ENOMEM);
  1442. }
  1443. dst_nents = sg_nents_for_len(req->dst, req->nbytes);
  1444. if (unlikely(dst_nents < 0)) {
  1445. dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
  1446. req->nbytes);
  1447. return ERR_PTR(dst_nents);
  1448. }
  1449. mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
  1450. DMA_FROM_DEVICE);
  1451. if (unlikely(!mapped_dst_nents)) {
  1452. dev_err(jrdev, "unable to map destination\n");
  1453. dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
  1454. return ERR_PTR(-ENOMEM);
  1455. }
  1456. }
  1457. sec4_sg_ents = mapped_src_nents > 1 ? mapped_src_nents : 0;
  1458. dst_sg_idx = sec4_sg_ents;
  1459. sec4_sg_ents += 1 + mapped_dst_nents;
  1460. /*
  1461. * allocate space for base edesc and hw desc commands, link tables, IV
  1462. */
  1463. sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
  1464. edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize,
  1465. GFP_DMA | flags);
  1466. if (!edesc) {
  1467. dev_err(jrdev, "could not allocate extended descriptor\n");
  1468. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
  1469. 0, DMA_NONE, 0, 0);
  1470. return ERR_PTR(-ENOMEM);
  1471. }
  1472. edesc->src_nents = src_nents;
  1473. edesc->dst_nents = dst_nents;
  1474. edesc->sec4_sg_bytes = sec4_sg_bytes;
  1475. edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc +
  1476. desc_bytes);
  1477. edesc->iv_dir = DMA_FROM_DEVICE;
  1478. /* Make sure IV is located in a DMAable area */
  1479. iv = (u8 *)edesc->hw_desc + desc_bytes + sec4_sg_bytes;
  1480. iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_FROM_DEVICE);
  1481. if (dma_mapping_error(jrdev, iv_dma)) {
  1482. dev_err(jrdev, "unable to map IV\n");
  1483. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
  1484. 0, DMA_NONE, 0, 0);
  1485. kfree(edesc);
  1486. return ERR_PTR(-ENOMEM);
  1487. }
  1488. if (mapped_src_nents > 1)
  1489. sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg,
  1490. 0);
  1491. dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx, iv_dma, ivsize, 0);
  1492. sg_to_sec4_sg_last(req->dst, mapped_dst_nents, edesc->sec4_sg +
  1493. dst_sg_idx + 1, 0);
  1494. edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
  1495. sec4_sg_bytes, DMA_TO_DEVICE);
  1496. if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
  1497. dev_err(jrdev, "unable to map S/G table\n");
  1498. caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
  1499. iv_dma, ivsize, DMA_FROM_DEVICE, 0, 0);
  1500. kfree(edesc);
  1501. return ERR_PTR(-ENOMEM);
  1502. }
  1503. edesc->iv_dma = iv_dma;
  1504. #ifdef DEBUG
  1505. print_hex_dump(KERN_ERR,
  1506. "ablkcipher sec4_sg@" __stringify(__LINE__) ": ",
  1507. DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
  1508. sec4_sg_bytes, 1);
  1509. #endif
  1510. return edesc;
  1511. }
  1512. static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq)
  1513. {
  1514. struct ablkcipher_request *req = &creq->creq;
  1515. struct ablkcipher_edesc *edesc;
  1516. struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
  1517. struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
  1518. struct device *jrdev = ctx->jrdev;
  1519. u32 *desc;
  1520. int ret = 0;
  1521. /* allocate extended descriptor */
  1522. edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
  1523. if (IS_ERR(edesc))
  1524. return PTR_ERR(edesc);
  1525. /* Create and submit job descriptor*/
  1526. init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma,
  1527. edesc, req);
  1528. #ifdef DEBUG
  1529. print_hex_dump(KERN_ERR,
  1530. "ablkcipher jobdesc@" __stringify(__LINE__) ": ",
  1531. DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
  1532. desc_bytes(edesc->hw_desc), 1);
  1533. #endif
  1534. desc = edesc->hw_desc;
  1535. ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
  1536. if (!ret) {
  1537. ret = -EINPROGRESS;
  1538. } else {
  1539. ablkcipher_unmap(jrdev, edesc, req);
  1540. kfree(edesc);
  1541. }
  1542. return ret;
  1543. }
  1544. #define template_aead template_u.aead
  1545. #define template_ablkcipher template_u.ablkcipher
  1546. struct caam_alg_template {
  1547. char name[CRYPTO_MAX_ALG_NAME];
  1548. char driver_name[CRYPTO_MAX_ALG_NAME];
  1549. unsigned int blocksize;
  1550. u32 type;
  1551. union {
  1552. struct ablkcipher_alg ablkcipher;
  1553. } template_u;
  1554. u32 class1_alg_type;
  1555. u32 class2_alg_type;
  1556. };
  1557. static struct caam_alg_template driver_algs[] = {
  1558. /* ablkcipher descriptor */
  1559. {
  1560. .name = "cbc(aes)",
  1561. .driver_name = "cbc-aes-caam",
  1562. .blocksize = AES_BLOCK_SIZE,
  1563. .type = CRYPTO_ALG_TYPE_GIVCIPHER,
  1564. .template_ablkcipher = {
  1565. .setkey = ablkcipher_setkey,
  1566. .encrypt = ablkcipher_encrypt,
  1567. .decrypt = ablkcipher_decrypt,
  1568. .givencrypt = ablkcipher_givencrypt,
  1569. .geniv = "<built-in>",
  1570. .min_keysize = AES_MIN_KEY_SIZE,
  1571. .max_keysize = AES_MAX_KEY_SIZE,
  1572. .ivsize = AES_BLOCK_SIZE,
  1573. },
  1574. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1575. },
  1576. {
  1577. .name = "cbc(des3_ede)",
  1578. .driver_name = "cbc-3des-caam",
  1579. .blocksize = DES3_EDE_BLOCK_SIZE,
  1580. .type = CRYPTO_ALG_TYPE_GIVCIPHER,
  1581. .template_ablkcipher = {
  1582. .setkey = ablkcipher_setkey,
  1583. .encrypt = ablkcipher_encrypt,
  1584. .decrypt = ablkcipher_decrypt,
  1585. .givencrypt = ablkcipher_givencrypt,
  1586. .geniv = "<built-in>",
  1587. .min_keysize = DES3_EDE_KEY_SIZE,
  1588. .max_keysize = DES3_EDE_KEY_SIZE,
  1589. .ivsize = DES3_EDE_BLOCK_SIZE,
  1590. },
  1591. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  1592. },
  1593. {
  1594. .name = "cbc(des)",
  1595. .driver_name = "cbc-des-caam",
  1596. .blocksize = DES_BLOCK_SIZE,
  1597. .type = CRYPTO_ALG_TYPE_GIVCIPHER,
  1598. .template_ablkcipher = {
  1599. .setkey = ablkcipher_setkey,
  1600. .encrypt = ablkcipher_encrypt,
  1601. .decrypt = ablkcipher_decrypt,
  1602. .givencrypt = ablkcipher_givencrypt,
  1603. .geniv = "<built-in>",
  1604. .min_keysize = DES_KEY_SIZE,
  1605. .max_keysize = DES_KEY_SIZE,
  1606. .ivsize = DES_BLOCK_SIZE,
  1607. },
  1608. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  1609. },
  1610. {
  1611. .name = "ctr(aes)",
  1612. .driver_name = "ctr-aes-caam",
  1613. .blocksize = 1,
  1614. .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
  1615. .template_ablkcipher = {
  1616. .setkey = ablkcipher_setkey,
  1617. .encrypt = ablkcipher_encrypt,
  1618. .decrypt = ablkcipher_decrypt,
  1619. .geniv = "chainiv",
  1620. .min_keysize = AES_MIN_KEY_SIZE,
  1621. .max_keysize = AES_MAX_KEY_SIZE,
  1622. .ivsize = AES_BLOCK_SIZE,
  1623. },
  1624. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
  1625. },
  1626. {
  1627. .name = "rfc3686(ctr(aes))",
  1628. .driver_name = "rfc3686-ctr-aes-caam",
  1629. .blocksize = 1,
  1630. .type = CRYPTO_ALG_TYPE_GIVCIPHER,
  1631. .template_ablkcipher = {
  1632. .setkey = ablkcipher_setkey,
  1633. .encrypt = ablkcipher_encrypt,
  1634. .decrypt = ablkcipher_decrypt,
  1635. .givencrypt = ablkcipher_givencrypt,
  1636. .geniv = "<built-in>",
  1637. .min_keysize = AES_MIN_KEY_SIZE +
  1638. CTR_RFC3686_NONCE_SIZE,
  1639. .max_keysize = AES_MAX_KEY_SIZE +
  1640. CTR_RFC3686_NONCE_SIZE,
  1641. .ivsize = CTR_RFC3686_IV_SIZE,
  1642. },
  1643. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
  1644. },
  1645. {
  1646. .name = "xts(aes)",
  1647. .driver_name = "xts-aes-caam",
  1648. .blocksize = AES_BLOCK_SIZE,
  1649. .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
  1650. .template_ablkcipher = {
  1651. .setkey = xts_ablkcipher_setkey,
  1652. .encrypt = ablkcipher_encrypt,
  1653. .decrypt = ablkcipher_decrypt,
  1654. .geniv = "eseqiv",
  1655. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  1656. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  1657. .ivsize = AES_BLOCK_SIZE,
  1658. },
  1659. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
  1660. },
  1661. };
  1662. static struct caam_aead_alg driver_aeads[] = {
  1663. {
  1664. .aead = {
  1665. .base = {
  1666. .cra_name = "rfc4106(gcm(aes))",
  1667. .cra_driver_name = "rfc4106-gcm-aes-caam",
  1668. .cra_blocksize = 1,
  1669. },
  1670. .setkey = rfc4106_setkey,
  1671. .setauthsize = rfc4106_setauthsize,
  1672. .encrypt = ipsec_gcm_encrypt,
  1673. .decrypt = ipsec_gcm_decrypt,
  1674. .ivsize = 8,
  1675. .maxauthsize = AES_BLOCK_SIZE,
  1676. },
  1677. .caam = {
  1678. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
  1679. },
  1680. },
  1681. {
  1682. .aead = {
  1683. .base = {
  1684. .cra_name = "rfc4543(gcm(aes))",
  1685. .cra_driver_name = "rfc4543-gcm-aes-caam",
  1686. .cra_blocksize = 1,
  1687. },
  1688. .setkey = rfc4543_setkey,
  1689. .setauthsize = rfc4543_setauthsize,
  1690. .encrypt = ipsec_gcm_encrypt,
  1691. .decrypt = ipsec_gcm_decrypt,
  1692. .ivsize = 8,
  1693. .maxauthsize = AES_BLOCK_SIZE,
  1694. },
  1695. .caam = {
  1696. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
  1697. },
  1698. },
  1699. /* Galois Counter Mode */
  1700. {
  1701. .aead = {
  1702. .base = {
  1703. .cra_name = "gcm(aes)",
  1704. .cra_driver_name = "gcm-aes-caam",
  1705. .cra_blocksize = 1,
  1706. },
  1707. .setkey = gcm_setkey,
  1708. .setauthsize = gcm_setauthsize,
  1709. .encrypt = gcm_encrypt,
  1710. .decrypt = gcm_decrypt,
  1711. .ivsize = 12,
  1712. .maxauthsize = AES_BLOCK_SIZE,
  1713. },
  1714. .caam = {
  1715. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
  1716. },
  1717. },
  1718. /* single-pass ipsec_esp descriptor */
  1719. {
  1720. .aead = {
  1721. .base = {
  1722. .cra_name = "authenc(hmac(md5),"
  1723. "ecb(cipher_null))",
  1724. .cra_driver_name = "authenc-hmac-md5-"
  1725. "ecb-cipher_null-caam",
  1726. .cra_blocksize = NULL_BLOCK_SIZE,
  1727. },
  1728. .setkey = aead_setkey,
  1729. .setauthsize = aead_setauthsize,
  1730. .encrypt = aead_encrypt,
  1731. .decrypt = aead_decrypt,
  1732. .ivsize = NULL_IV_SIZE,
  1733. .maxauthsize = MD5_DIGEST_SIZE,
  1734. },
  1735. .caam = {
  1736. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  1737. OP_ALG_AAI_HMAC_PRECOMP,
  1738. },
  1739. },
  1740. {
  1741. .aead = {
  1742. .base = {
  1743. .cra_name = "authenc(hmac(sha1),"
  1744. "ecb(cipher_null))",
  1745. .cra_driver_name = "authenc-hmac-sha1-"
  1746. "ecb-cipher_null-caam",
  1747. .cra_blocksize = NULL_BLOCK_SIZE,
  1748. },
  1749. .setkey = aead_setkey,
  1750. .setauthsize = aead_setauthsize,
  1751. .encrypt = aead_encrypt,
  1752. .decrypt = aead_decrypt,
  1753. .ivsize = NULL_IV_SIZE,
  1754. .maxauthsize = SHA1_DIGEST_SIZE,
  1755. },
  1756. .caam = {
  1757. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  1758. OP_ALG_AAI_HMAC_PRECOMP,
  1759. },
  1760. },
  1761. {
  1762. .aead = {
  1763. .base = {
  1764. .cra_name = "authenc(hmac(sha224),"
  1765. "ecb(cipher_null))",
  1766. .cra_driver_name = "authenc-hmac-sha224-"
  1767. "ecb-cipher_null-caam",
  1768. .cra_blocksize = NULL_BLOCK_SIZE,
  1769. },
  1770. .setkey = aead_setkey,
  1771. .setauthsize = aead_setauthsize,
  1772. .encrypt = aead_encrypt,
  1773. .decrypt = aead_decrypt,
  1774. .ivsize = NULL_IV_SIZE,
  1775. .maxauthsize = SHA224_DIGEST_SIZE,
  1776. },
  1777. .caam = {
  1778. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  1779. OP_ALG_AAI_HMAC_PRECOMP,
  1780. },
  1781. },
  1782. {
  1783. .aead = {
  1784. .base = {
  1785. .cra_name = "authenc(hmac(sha256),"
  1786. "ecb(cipher_null))",
  1787. .cra_driver_name = "authenc-hmac-sha256-"
  1788. "ecb-cipher_null-caam",
  1789. .cra_blocksize = NULL_BLOCK_SIZE,
  1790. },
  1791. .setkey = aead_setkey,
  1792. .setauthsize = aead_setauthsize,
  1793. .encrypt = aead_encrypt,
  1794. .decrypt = aead_decrypt,
  1795. .ivsize = NULL_IV_SIZE,
  1796. .maxauthsize = SHA256_DIGEST_SIZE,
  1797. },
  1798. .caam = {
  1799. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  1800. OP_ALG_AAI_HMAC_PRECOMP,
  1801. },
  1802. },
  1803. {
  1804. .aead = {
  1805. .base = {
  1806. .cra_name = "authenc(hmac(sha384),"
  1807. "ecb(cipher_null))",
  1808. .cra_driver_name = "authenc-hmac-sha384-"
  1809. "ecb-cipher_null-caam",
  1810. .cra_blocksize = NULL_BLOCK_SIZE,
  1811. },
  1812. .setkey = aead_setkey,
  1813. .setauthsize = aead_setauthsize,
  1814. .encrypt = aead_encrypt,
  1815. .decrypt = aead_decrypt,
  1816. .ivsize = NULL_IV_SIZE,
  1817. .maxauthsize = SHA384_DIGEST_SIZE,
  1818. },
  1819. .caam = {
  1820. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  1821. OP_ALG_AAI_HMAC_PRECOMP,
  1822. },
  1823. },
  1824. {
  1825. .aead = {
  1826. .base = {
  1827. .cra_name = "authenc(hmac(sha512),"
  1828. "ecb(cipher_null))",
  1829. .cra_driver_name = "authenc-hmac-sha512-"
  1830. "ecb-cipher_null-caam",
  1831. .cra_blocksize = NULL_BLOCK_SIZE,
  1832. },
  1833. .setkey = aead_setkey,
  1834. .setauthsize = aead_setauthsize,
  1835. .encrypt = aead_encrypt,
  1836. .decrypt = aead_decrypt,
  1837. .ivsize = NULL_IV_SIZE,
  1838. .maxauthsize = SHA512_DIGEST_SIZE,
  1839. },
  1840. .caam = {
  1841. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  1842. OP_ALG_AAI_HMAC_PRECOMP,
  1843. },
  1844. },
  1845. {
  1846. .aead = {
  1847. .base = {
  1848. .cra_name = "authenc(hmac(md5),cbc(aes))",
  1849. .cra_driver_name = "authenc-hmac-md5-"
  1850. "cbc-aes-caam",
  1851. .cra_blocksize = AES_BLOCK_SIZE,
  1852. },
  1853. .setkey = aead_setkey,
  1854. .setauthsize = aead_setauthsize,
  1855. .encrypt = aead_encrypt,
  1856. .decrypt = aead_decrypt,
  1857. .ivsize = AES_BLOCK_SIZE,
  1858. .maxauthsize = MD5_DIGEST_SIZE,
  1859. },
  1860. .caam = {
  1861. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1862. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  1863. OP_ALG_AAI_HMAC_PRECOMP,
  1864. },
  1865. },
  1866. {
  1867. .aead = {
  1868. .base = {
  1869. .cra_name = "echainiv(authenc(hmac(md5),"
  1870. "cbc(aes)))",
  1871. .cra_driver_name = "echainiv-authenc-hmac-md5-"
  1872. "cbc-aes-caam",
  1873. .cra_blocksize = AES_BLOCK_SIZE,
  1874. },
  1875. .setkey = aead_setkey,
  1876. .setauthsize = aead_setauthsize,
  1877. .encrypt = aead_encrypt,
  1878. .decrypt = aead_decrypt,
  1879. .ivsize = AES_BLOCK_SIZE,
  1880. .maxauthsize = MD5_DIGEST_SIZE,
  1881. },
  1882. .caam = {
  1883. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1884. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  1885. OP_ALG_AAI_HMAC_PRECOMP,
  1886. .geniv = true,
  1887. },
  1888. },
  1889. {
  1890. .aead = {
  1891. .base = {
  1892. .cra_name = "authenc(hmac(sha1),cbc(aes))",
  1893. .cra_driver_name = "authenc-hmac-sha1-"
  1894. "cbc-aes-caam",
  1895. .cra_blocksize = AES_BLOCK_SIZE,
  1896. },
  1897. .setkey = aead_setkey,
  1898. .setauthsize = aead_setauthsize,
  1899. .encrypt = aead_encrypt,
  1900. .decrypt = aead_decrypt,
  1901. .ivsize = AES_BLOCK_SIZE,
  1902. .maxauthsize = SHA1_DIGEST_SIZE,
  1903. },
  1904. .caam = {
  1905. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1906. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  1907. OP_ALG_AAI_HMAC_PRECOMP,
  1908. },
  1909. },
  1910. {
  1911. .aead = {
  1912. .base = {
  1913. .cra_name = "echainiv(authenc(hmac(sha1),"
  1914. "cbc(aes)))",
  1915. .cra_driver_name = "echainiv-authenc-"
  1916. "hmac-sha1-cbc-aes-caam",
  1917. .cra_blocksize = AES_BLOCK_SIZE,
  1918. },
  1919. .setkey = aead_setkey,
  1920. .setauthsize = aead_setauthsize,
  1921. .encrypt = aead_encrypt,
  1922. .decrypt = aead_decrypt,
  1923. .ivsize = AES_BLOCK_SIZE,
  1924. .maxauthsize = SHA1_DIGEST_SIZE,
  1925. },
  1926. .caam = {
  1927. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1928. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  1929. OP_ALG_AAI_HMAC_PRECOMP,
  1930. .geniv = true,
  1931. },
  1932. },
  1933. {
  1934. .aead = {
  1935. .base = {
  1936. .cra_name = "authenc(hmac(sha224),cbc(aes))",
  1937. .cra_driver_name = "authenc-hmac-sha224-"
  1938. "cbc-aes-caam",
  1939. .cra_blocksize = AES_BLOCK_SIZE,
  1940. },
  1941. .setkey = aead_setkey,
  1942. .setauthsize = aead_setauthsize,
  1943. .encrypt = aead_encrypt,
  1944. .decrypt = aead_decrypt,
  1945. .ivsize = AES_BLOCK_SIZE,
  1946. .maxauthsize = SHA224_DIGEST_SIZE,
  1947. },
  1948. .caam = {
  1949. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1950. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  1951. OP_ALG_AAI_HMAC_PRECOMP,
  1952. },
  1953. },
  1954. {
  1955. .aead = {
  1956. .base = {
  1957. .cra_name = "echainiv(authenc(hmac(sha224),"
  1958. "cbc(aes)))",
  1959. .cra_driver_name = "echainiv-authenc-"
  1960. "hmac-sha224-cbc-aes-caam",
  1961. .cra_blocksize = AES_BLOCK_SIZE,
  1962. },
  1963. .setkey = aead_setkey,
  1964. .setauthsize = aead_setauthsize,
  1965. .encrypt = aead_encrypt,
  1966. .decrypt = aead_decrypt,
  1967. .ivsize = AES_BLOCK_SIZE,
  1968. .maxauthsize = SHA224_DIGEST_SIZE,
  1969. },
  1970. .caam = {
  1971. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1972. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  1973. OP_ALG_AAI_HMAC_PRECOMP,
  1974. .geniv = true,
  1975. },
  1976. },
  1977. {
  1978. .aead = {
  1979. .base = {
  1980. .cra_name = "authenc(hmac(sha256),cbc(aes))",
  1981. .cra_driver_name = "authenc-hmac-sha256-"
  1982. "cbc-aes-caam",
  1983. .cra_blocksize = AES_BLOCK_SIZE,
  1984. },
  1985. .setkey = aead_setkey,
  1986. .setauthsize = aead_setauthsize,
  1987. .encrypt = aead_encrypt,
  1988. .decrypt = aead_decrypt,
  1989. .ivsize = AES_BLOCK_SIZE,
  1990. .maxauthsize = SHA256_DIGEST_SIZE,
  1991. },
  1992. .caam = {
  1993. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  1994. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  1995. OP_ALG_AAI_HMAC_PRECOMP,
  1996. },
  1997. },
  1998. {
  1999. .aead = {
  2000. .base = {
  2001. .cra_name = "echainiv(authenc(hmac(sha256),"
  2002. "cbc(aes)))",
  2003. .cra_driver_name = "echainiv-authenc-"
  2004. "hmac-sha256-cbc-aes-caam",
  2005. .cra_blocksize = AES_BLOCK_SIZE,
  2006. },
  2007. .setkey = aead_setkey,
  2008. .setauthsize = aead_setauthsize,
  2009. .encrypt = aead_encrypt,
  2010. .decrypt = aead_decrypt,
  2011. .ivsize = AES_BLOCK_SIZE,
  2012. .maxauthsize = SHA256_DIGEST_SIZE,
  2013. },
  2014. .caam = {
  2015. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2016. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2017. OP_ALG_AAI_HMAC_PRECOMP,
  2018. .geniv = true,
  2019. },
  2020. },
  2021. {
  2022. .aead = {
  2023. .base = {
  2024. .cra_name = "authenc(hmac(sha384),cbc(aes))",
  2025. .cra_driver_name = "authenc-hmac-sha384-"
  2026. "cbc-aes-caam",
  2027. .cra_blocksize = AES_BLOCK_SIZE,
  2028. },
  2029. .setkey = aead_setkey,
  2030. .setauthsize = aead_setauthsize,
  2031. .encrypt = aead_encrypt,
  2032. .decrypt = aead_decrypt,
  2033. .ivsize = AES_BLOCK_SIZE,
  2034. .maxauthsize = SHA384_DIGEST_SIZE,
  2035. },
  2036. .caam = {
  2037. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2038. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2039. OP_ALG_AAI_HMAC_PRECOMP,
  2040. },
  2041. },
  2042. {
  2043. .aead = {
  2044. .base = {
  2045. .cra_name = "echainiv(authenc(hmac(sha384),"
  2046. "cbc(aes)))",
  2047. .cra_driver_name = "echainiv-authenc-"
  2048. "hmac-sha384-cbc-aes-caam",
  2049. .cra_blocksize = AES_BLOCK_SIZE,
  2050. },
  2051. .setkey = aead_setkey,
  2052. .setauthsize = aead_setauthsize,
  2053. .encrypt = aead_encrypt,
  2054. .decrypt = aead_decrypt,
  2055. .ivsize = AES_BLOCK_SIZE,
  2056. .maxauthsize = SHA384_DIGEST_SIZE,
  2057. },
  2058. .caam = {
  2059. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2060. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2061. OP_ALG_AAI_HMAC_PRECOMP,
  2062. .geniv = true,
  2063. },
  2064. },
  2065. {
  2066. .aead = {
  2067. .base = {
  2068. .cra_name = "authenc(hmac(sha512),cbc(aes))",
  2069. .cra_driver_name = "authenc-hmac-sha512-"
  2070. "cbc-aes-caam",
  2071. .cra_blocksize = AES_BLOCK_SIZE,
  2072. },
  2073. .setkey = aead_setkey,
  2074. .setauthsize = aead_setauthsize,
  2075. .encrypt = aead_encrypt,
  2076. .decrypt = aead_decrypt,
  2077. .ivsize = AES_BLOCK_SIZE,
  2078. .maxauthsize = SHA512_DIGEST_SIZE,
  2079. },
  2080. .caam = {
  2081. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2082. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2083. OP_ALG_AAI_HMAC_PRECOMP,
  2084. },
  2085. },
  2086. {
  2087. .aead = {
  2088. .base = {
  2089. .cra_name = "echainiv(authenc(hmac(sha512),"
  2090. "cbc(aes)))",
  2091. .cra_driver_name = "echainiv-authenc-"
  2092. "hmac-sha512-cbc-aes-caam",
  2093. .cra_blocksize = AES_BLOCK_SIZE,
  2094. },
  2095. .setkey = aead_setkey,
  2096. .setauthsize = aead_setauthsize,
  2097. .encrypt = aead_encrypt,
  2098. .decrypt = aead_decrypt,
  2099. .ivsize = AES_BLOCK_SIZE,
  2100. .maxauthsize = SHA512_DIGEST_SIZE,
  2101. },
  2102. .caam = {
  2103. .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
  2104. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2105. OP_ALG_AAI_HMAC_PRECOMP,
  2106. .geniv = true,
  2107. },
  2108. },
  2109. {
  2110. .aead = {
  2111. .base = {
  2112. .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
  2113. .cra_driver_name = "authenc-hmac-md5-"
  2114. "cbc-des3_ede-caam",
  2115. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2116. },
  2117. .setkey = aead_setkey,
  2118. .setauthsize = aead_setauthsize,
  2119. .encrypt = aead_encrypt,
  2120. .decrypt = aead_decrypt,
  2121. .ivsize = DES3_EDE_BLOCK_SIZE,
  2122. .maxauthsize = MD5_DIGEST_SIZE,
  2123. },
  2124. .caam = {
  2125. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2126. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2127. OP_ALG_AAI_HMAC_PRECOMP,
  2128. }
  2129. },
  2130. {
  2131. .aead = {
  2132. .base = {
  2133. .cra_name = "echainiv(authenc(hmac(md5),"
  2134. "cbc(des3_ede)))",
  2135. .cra_driver_name = "echainiv-authenc-hmac-md5-"
  2136. "cbc-des3_ede-caam",
  2137. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2138. },
  2139. .setkey = aead_setkey,
  2140. .setauthsize = aead_setauthsize,
  2141. .encrypt = aead_encrypt,
  2142. .decrypt = aead_decrypt,
  2143. .ivsize = DES3_EDE_BLOCK_SIZE,
  2144. .maxauthsize = MD5_DIGEST_SIZE,
  2145. },
  2146. .caam = {
  2147. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2148. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2149. OP_ALG_AAI_HMAC_PRECOMP,
  2150. .geniv = true,
  2151. }
  2152. },
  2153. {
  2154. .aead = {
  2155. .base = {
  2156. .cra_name = "authenc(hmac(sha1),"
  2157. "cbc(des3_ede))",
  2158. .cra_driver_name = "authenc-hmac-sha1-"
  2159. "cbc-des3_ede-caam",
  2160. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2161. },
  2162. .setkey = aead_setkey,
  2163. .setauthsize = aead_setauthsize,
  2164. .encrypt = aead_encrypt,
  2165. .decrypt = aead_decrypt,
  2166. .ivsize = DES3_EDE_BLOCK_SIZE,
  2167. .maxauthsize = SHA1_DIGEST_SIZE,
  2168. },
  2169. .caam = {
  2170. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2171. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2172. OP_ALG_AAI_HMAC_PRECOMP,
  2173. },
  2174. },
  2175. {
  2176. .aead = {
  2177. .base = {
  2178. .cra_name = "echainiv(authenc(hmac(sha1),"
  2179. "cbc(des3_ede)))",
  2180. .cra_driver_name = "echainiv-authenc-"
  2181. "hmac-sha1-"
  2182. "cbc-des3_ede-caam",
  2183. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2184. },
  2185. .setkey = aead_setkey,
  2186. .setauthsize = aead_setauthsize,
  2187. .encrypt = aead_encrypt,
  2188. .decrypt = aead_decrypt,
  2189. .ivsize = DES3_EDE_BLOCK_SIZE,
  2190. .maxauthsize = SHA1_DIGEST_SIZE,
  2191. },
  2192. .caam = {
  2193. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2194. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2195. OP_ALG_AAI_HMAC_PRECOMP,
  2196. .geniv = true,
  2197. },
  2198. },
  2199. {
  2200. .aead = {
  2201. .base = {
  2202. .cra_name = "authenc(hmac(sha224),"
  2203. "cbc(des3_ede))",
  2204. .cra_driver_name = "authenc-hmac-sha224-"
  2205. "cbc-des3_ede-caam",
  2206. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2207. },
  2208. .setkey = aead_setkey,
  2209. .setauthsize = aead_setauthsize,
  2210. .encrypt = aead_encrypt,
  2211. .decrypt = aead_decrypt,
  2212. .ivsize = DES3_EDE_BLOCK_SIZE,
  2213. .maxauthsize = SHA224_DIGEST_SIZE,
  2214. },
  2215. .caam = {
  2216. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2217. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2218. OP_ALG_AAI_HMAC_PRECOMP,
  2219. },
  2220. },
  2221. {
  2222. .aead = {
  2223. .base = {
  2224. .cra_name = "echainiv(authenc(hmac(sha224),"
  2225. "cbc(des3_ede)))",
  2226. .cra_driver_name = "echainiv-authenc-"
  2227. "hmac-sha224-"
  2228. "cbc-des3_ede-caam",
  2229. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2230. },
  2231. .setkey = aead_setkey,
  2232. .setauthsize = aead_setauthsize,
  2233. .encrypt = aead_encrypt,
  2234. .decrypt = aead_decrypt,
  2235. .ivsize = DES3_EDE_BLOCK_SIZE,
  2236. .maxauthsize = SHA224_DIGEST_SIZE,
  2237. },
  2238. .caam = {
  2239. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2240. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2241. OP_ALG_AAI_HMAC_PRECOMP,
  2242. .geniv = true,
  2243. },
  2244. },
  2245. {
  2246. .aead = {
  2247. .base = {
  2248. .cra_name = "authenc(hmac(sha256),"
  2249. "cbc(des3_ede))",
  2250. .cra_driver_name = "authenc-hmac-sha256-"
  2251. "cbc-des3_ede-caam",
  2252. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2253. },
  2254. .setkey = aead_setkey,
  2255. .setauthsize = aead_setauthsize,
  2256. .encrypt = aead_encrypt,
  2257. .decrypt = aead_decrypt,
  2258. .ivsize = DES3_EDE_BLOCK_SIZE,
  2259. .maxauthsize = SHA256_DIGEST_SIZE,
  2260. },
  2261. .caam = {
  2262. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2263. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2264. OP_ALG_AAI_HMAC_PRECOMP,
  2265. },
  2266. },
  2267. {
  2268. .aead = {
  2269. .base = {
  2270. .cra_name = "echainiv(authenc(hmac(sha256),"
  2271. "cbc(des3_ede)))",
  2272. .cra_driver_name = "echainiv-authenc-"
  2273. "hmac-sha256-"
  2274. "cbc-des3_ede-caam",
  2275. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2276. },
  2277. .setkey = aead_setkey,
  2278. .setauthsize = aead_setauthsize,
  2279. .encrypt = aead_encrypt,
  2280. .decrypt = aead_decrypt,
  2281. .ivsize = DES3_EDE_BLOCK_SIZE,
  2282. .maxauthsize = SHA256_DIGEST_SIZE,
  2283. },
  2284. .caam = {
  2285. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2286. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2287. OP_ALG_AAI_HMAC_PRECOMP,
  2288. .geniv = true,
  2289. },
  2290. },
  2291. {
  2292. .aead = {
  2293. .base = {
  2294. .cra_name = "authenc(hmac(sha384),"
  2295. "cbc(des3_ede))",
  2296. .cra_driver_name = "authenc-hmac-sha384-"
  2297. "cbc-des3_ede-caam",
  2298. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2299. },
  2300. .setkey = aead_setkey,
  2301. .setauthsize = aead_setauthsize,
  2302. .encrypt = aead_encrypt,
  2303. .decrypt = aead_decrypt,
  2304. .ivsize = DES3_EDE_BLOCK_SIZE,
  2305. .maxauthsize = SHA384_DIGEST_SIZE,
  2306. },
  2307. .caam = {
  2308. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2309. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2310. OP_ALG_AAI_HMAC_PRECOMP,
  2311. },
  2312. },
  2313. {
  2314. .aead = {
  2315. .base = {
  2316. .cra_name = "echainiv(authenc(hmac(sha384),"
  2317. "cbc(des3_ede)))",
  2318. .cra_driver_name = "echainiv-authenc-"
  2319. "hmac-sha384-"
  2320. "cbc-des3_ede-caam",
  2321. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2322. },
  2323. .setkey = aead_setkey,
  2324. .setauthsize = aead_setauthsize,
  2325. .encrypt = aead_encrypt,
  2326. .decrypt = aead_decrypt,
  2327. .ivsize = DES3_EDE_BLOCK_SIZE,
  2328. .maxauthsize = SHA384_DIGEST_SIZE,
  2329. },
  2330. .caam = {
  2331. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2332. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2333. OP_ALG_AAI_HMAC_PRECOMP,
  2334. .geniv = true,
  2335. },
  2336. },
  2337. {
  2338. .aead = {
  2339. .base = {
  2340. .cra_name = "authenc(hmac(sha512),"
  2341. "cbc(des3_ede))",
  2342. .cra_driver_name = "authenc-hmac-sha512-"
  2343. "cbc-des3_ede-caam",
  2344. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2345. },
  2346. .setkey = aead_setkey,
  2347. .setauthsize = aead_setauthsize,
  2348. .encrypt = aead_encrypt,
  2349. .decrypt = aead_decrypt,
  2350. .ivsize = DES3_EDE_BLOCK_SIZE,
  2351. .maxauthsize = SHA512_DIGEST_SIZE,
  2352. },
  2353. .caam = {
  2354. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2355. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2356. OP_ALG_AAI_HMAC_PRECOMP,
  2357. },
  2358. },
  2359. {
  2360. .aead = {
  2361. .base = {
  2362. .cra_name = "echainiv(authenc(hmac(sha512),"
  2363. "cbc(des3_ede)))",
  2364. .cra_driver_name = "echainiv-authenc-"
  2365. "hmac-sha512-"
  2366. "cbc-des3_ede-caam",
  2367. .cra_blocksize = DES3_EDE_BLOCK_SIZE,
  2368. },
  2369. .setkey = aead_setkey,
  2370. .setauthsize = aead_setauthsize,
  2371. .encrypt = aead_encrypt,
  2372. .decrypt = aead_decrypt,
  2373. .ivsize = DES3_EDE_BLOCK_SIZE,
  2374. .maxauthsize = SHA512_DIGEST_SIZE,
  2375. },
  2376. .caam = {
  2377. .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
  2378. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2379. OP_ALG_AAI_HMAC_PRECOMP,
  2380. .geniv = true,
  2381. },
  2382. },
  2383. {
  2384. .aead = {
  2385. .base = {
  2386. .cra_name = "authenc(hmac(md5),cbc(des))",
  2387. .cra_driver_name = "authenc-hmac-md5-"
  2388. "cbc-des-caam",
  2389. .cra_blocksize = DES_BLOCK_SIZE,
  2390. },
  2391. .setkey = aead_setkey,
  2392. .setauthsize = aead_setauthsize,
  2393. .encrypt = aead_encrypt,
  2394. .decrypt = aead_decrypt,
  2395. .ivsize = DES_BLOCK_SIZE,
  2396. .maxauthsize = MD5_DIGEST_SIZE,
  2397. },
  2398. .caam = {
  2399. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2400. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2401. OP_ALG_AAI_HMAC_PRECOMP,
  2402. },
  2403. },
  2404. {
  2405. .aead = {
  2406. .base = {
  2407. .cra_name = "echainiv(authenc(hmac(md5),"
  2408. "cbc(des)))",
  2409. .cra_driver_name = "echainiv-authenc-hmac-md5-"
  2410. "cbc-des-caam",
  2411. .cra_blocksize = DES_BLOCK_SIZE,
  2412. },
  2413. .setkey = aead_setkey,
  2414. .setauthsize = aead_setauthsize,
  2415. .encrypt = aead_encrypt,
  2416. .decrypt = aead_decrypt,
  2417. .ivsize = DES_BLOCK_SIZE,
  2418. .maxauthsize = MD5_DIGEST_SIZE,
  2419. },
  2420. .caam = {
  2421. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2422. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2423. OP_ALG_AAI_HMAC_PRECOMP,
  2424. .geniv = true,
  2425. },
  2426. },
  2427. {
  2428. .aead = {
  2429. .base = {
  2430. .cra_name = "authenc(hmac(sha1),cbc(des))",
  2431. .cra_driver_name = "authenc-hmac-sha1-"
  2432. "cbc-des-caam",
  2433. .cra_blocksize = DES_BLOCK_SIZE,
  2434. },
  2435. .setkey = aead_setkey,
  2436. .setauthsize = aead_setauthsize,
  2437. .encrypt = aead_encrypt,
  2438. .decrypt = aead_decrypt,
  2439. .ivsize = DES_BLOCK_SIZE,
  2440. .maxauthsize = SHA1_DIGEST_SIZE,
  2441. },
  2442. .caam = {
  2443. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2444. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2445. OP_ALG_AAI_HMAC_PRECOMP,
  2446. },
  2447. },
  2448. {
  2449. .aead = {
  2450. .base = {
  2451. .cra_name = "echainiv(authenc(hmac(sha1),"
  2452. "cbc(des)))",
  2453. .cra_driver_name = "echainiv-authenc-"
  2454. "hmac-sha1-cbc-des-caam",
  2455. .cra_blocksize = DES_BLOCK_SIZE,
  2456. },
  2457. .setkey = aead_setkey,
  2458. .setauthsize = aead_setauthsize,
  2459. .encrypt = aead_encrypt,
  2460. .decrypt = aead_decrypt,
  2461. .ivsize = DES_BLOCK_SIZE,
  2462. .maxauthsize = SHA1_DIGEST_SIZE,
  2463. },
  2464. .caam = {
  2465. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2466. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2467. OP_ALG_AAI_HMAC_PRECOMP,
  2468. .geniv = true,
  2469. },
  2470. },
  2471. {
  2472. .aead = {
  2473. .base = {
  2474. .cra_name = "authenc(hmac(sha224),cbc(des))",
  2475. .cra_driver_name = "authenc-hmac-sha224-"
  2476. "cbc-des-caam",
  2477. .cra_blocksize = DES_BLOCK_SIZE,
  2478. },
  2479. .setkey = aead_setkey,
  2480. .setauthsize = aead_setauthsize,
  2481. .encrypt = aead_encrypt,
  2482. .decrypt = aead_decrypt,
  2483. .ivsize = DES_BLOCK_SIZE,
  2484. .maxauthsize = SHA224_DIGEST_SIZE,
  2485. },
  2486. .caam = {
  2487. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2488. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2489. OP_ALG_AAI_HMAC_PRECOMP,
  2490. },
  2491. },
  2492. {
  2493. .aead = {
  2494. .base = {
  2495. .cra_name = "echainiv(authenc(hmac(sha224),"
  2496. "cbc(des)))",
  2497. .cra_driver_name = "echainiv-authenc-"
  2498. "hmac-sha224-cbc-des-caam",
  2499. .cra_blocksize = DES_BLOCK_SIZE,
  2500. },
  2501. .setkey = aead_setkey,
  2502. .setauthsize = aead_setauthsize,
  2503. .encrypt = aead_encrypt,
  2504. .decrypt = aead_decrypt,
  2505. .ivsize = DES_BLOCK_SIZE,
  2506. .maxauthsize = SHA224_DIGEST_SIZE,
  2507. },
  2508. .caam = {
  2509. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2510. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2511. OP_ALG_AAI_HMAC_PRECOMP,
  2512. .geniv = true,
  2513. },
  2514. },
  2515. {
  2516. .aead = {
  2517. .base = {
  2518. .cra_name = "authenc(hmac(sha256),cbc(des))",
  2519. .cra_driver_name = "authenc-hmac-sha256-"
  2520. "cbc-des-caam",
  2521. .cra_blocksize = DES_BLOCK_SIZE,
  2522. },
  2523. .setkey = aead_setkey,
  2524. .setauthsize = aead_setauthsize,
  2525. .encrypt = aead_encrypt,
  2526. .decrypt = aead_decrypt,
  2527. .ivsize = DES_BLOCK_SIZE,
  2528. .maxauthsize = SHA256_DIGEST_SIZE,
  2529. },
  2530. .caam = {
  2531. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2532. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2533. OP_ALG_AAI_HMAC_PRECOMP,
  2534. },
  2535. },
  2536. {
  2537. .aead = {
  2538. .base = {
  2539. .cra_name = "echainiv(authenc(hmac(sha256),"
  2540. "cbc(des)))",
  2541. .cra_driver_name = "echainiv-authenc-"
  2542. "hmac-sha256-cbc-des-caam",
  2543. .cra_blocksize = DES_BLOCK_SIZE,
  2544. },
  2545. .setkey = aead_setkey,
  2546. .setauthsize = aead_setauthsize,
  2547. .encrypt = aead_encrypt,
  2548. .decrypt = aead_decrypt,
  2549. .ivsize = DES_BLOCK_SIZE,
  2550. .maxauthsize = SHA256_DIGEST_SIZE,
  2551. },
  2552. .caam = {
  2553. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2554. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2555. OP_ALG_AAI_HMAC_PRECOMP,
  2556. .geniv = true,
  2557. },
  2558. },
  2559. {
  2560. .aead = {
  2561. .base = {
  2562. .cra_name = "authenc(hmac(sha384),cbc(des))",
  2563. .cra_driver_name = "authenc-hmac-sha384-"
  2564. "cbc-des-caam",
  2565. .cra_blocksize = DES_BLOCK_SIZE,
  2566. },
  2567. .setkey = aead_setkey,
  2568. .setauthsize = aead_setauthsize,
  2569. .encrypt = aead_encrypt,
  2570. .decrypt = aead_decrypt,
  2571. .ivsize = DES_BLOCK_SIZE,
  2572. .maxauthsize = SHA384_DIGEST_SIZE,
  2573. },
  2574. .caam = {
  2575. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2576. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2577. OP_ALG_AAI_HMAC_PRECOMP,
  2578. },
  2579. },
  2580. {
  2581. .aead = {
  2582. .base = {
  2583. .cra_name = "echainiv(authenc(hmac(sha384),"
  2584. "cbc(des)))",
  2585. .cra_driver_name = "echainiv-authenc-"
  2586. "hmac-sha384-cbc-des-caam",
  2587. .cra_blocksize = DES_BLOCK_SIZE,
  2588. },
  2589. .setkey = aead_setkey,
  2590. .setauthsize = aead_setauthsize,
  2591. .encrypt = aead_encrypt,
  2592. .decrypt = aead_decrypt,
  2593. .ivsize = DES_BLOCK_SIZE,
  2594. .maxauthsize = SHA384_DIGEST_SIZE,
  2595. },
  2596. .caam = {
  2597. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2598. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2599. OP_ALG_AAI_HMAC_PRECOMP,
  2600. .geniv = true,
  2601. },
  2602. },
  2603. {
  2604. .aead = {
  2605. .base = {
  2606. .cra_name = "authenc(hmac(sha512),cbc(des))",
  2607. .cra_driver_name = "authenc-hmac-sha512-"
  2608. "cbc-des-caam",
  2609. .cra_blocksize = DES_BLOCK_SIZE,
  2610. },
  2611. .setkey = aead_setkey,
  2612. .setauthsize = aead_setauthsize,
  2613. .encrypt = aead_encrypt,
  2614. .decrypt = aead_decrypt,
  2615. .ivsize = DES_BLOCK_SIZE,
  2616. .maxauthsize = SHA512_DIGEST_SIZE,
  2617. },
  2618. .caam = {
  2619. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2620. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2621. OP_ALG_AAI_HMAC_PRECOMP,
  2622. },
  2623. },
  2624. {
  2625. .aead = {
  2626. .base = {
  2627. .cra_name = "echainiv(authenc(hmac(sha512),"
  2628. "cbc(des)))",
  2629. .cra_driver_name = "echainiv-authenc-"
  2630. "hmac-sha512-cbc-des-caam",
  2631. .cra_blocksize = DES_BLOCK_SIZE,
  2632. },
  2633. .setkey = aead_setkey,
  2634. .setauthsize = aead_setauthsize,
  2635. .encrypt = aead_encrypt,
  2636. .decrypt = aead_decrypt,
  2637. .ivsize = DES_BLOCK_SIZE,
  2638. .maxauthsize = SHA512_DIGEST_SIZE,
  2639. },
  2640. .caam = {
  2641. .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
  2642. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2643. OP_ALG_AAI_HMAC_PRECOMP,
  2644. .geniv = true,
  2645. },
  2646. },
  2647. {
  2648. .aead = {
  2649. .base = {
  2650. .cra_name = "authenc(hmac(md5),"
  2651. "rfc3686(ctr(aes)))",
  2652. .cra_driver_name = "authenc-hmac-md5-"
  2653. "rfc3686-ctr-aes-caam",
  2654. .cra_blocksize = 1,
  2655. },
  2656. .setkey = aead_setkey,
  2657. .setauthsize = aead_setauthsize,
  2658. .encrypt = aead_encrypt,
  2659. .decrypt = aead_decrypt,
  2660. .ivsize = CTR_RFC3686_IV_SIZE,
  2661. .maxauthsize = MD5_DIGEST_SIZE,
  2662. },
  2663. .caam = {
  2664. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2665. OP_ALG_AAI_CTR_MOD128,
  2666. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2667. OP_ALG_AAI_HMAC_PRECOMP,
  2668. .rfc3686 = true,
  2669. },
  2670. },
  2671. {
  2672. .aead = {
  2673. .base = {
  2674. .cra_name = "seqiv(authenc("
  2675. "hmac(md5),rfc3686(ctr(aes))))",
  2676. .cra_driver_name = "seqiv-authenc-hmac-md5-"
  2677. "rfc3686-ctr-aes-caam",
  2678. .cra_blocksize = 1,
  2679. },
  2680. .setkey = aead_setkey,
  2681. .setauthsize = aead_setauthsize,
  2682. .encrypt = aead_encrypt,
  2683. .decrypt = aead_decrypt,
  2684. .ivsize = CTR_RFC3686_IV_SIZE,
  2685. .maxauthsize = MD5_DIGEST_SIZE,
  2686. },
  2687. .caam = {
  2688. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2689. OP_ALG_AAI_CTR_MOD128,
  2690. .class2_alg_type = OP_ALG_ALGSEL_MD5 |
  2691. OP_ALG_AAI_HMAC_PRECOMP,
  2692. .rfc3686 = true,
  2693. .geniv = true,
  2694. },
  2695. },
  2696. {
  2697. .aead = {
  2698. .base = {
  2699. .cra_name = "authenc(hmac(sha1),"
  2700. "rfc3686(ctr(aes)))",
  2701. .cra_driver_name = "authenc-hmac-sha1-"
  2702. "rfc3686-ctr-aes-caam",
  2703. .cra_blocksize = 1,
  2704. },
  2705. .setkey = aead_setkey,
  2706. .setauthsize = aead_setauthsize,
  2707. .encrypt = aead_encrypt,
  2708. .decrypt = aead_decrypt,
  2709. .ivsize = CTR_RFC3686_IV_SIZE,
  2710. .maxauthsize = SHA1_DIGEST_SIZE,
  2711. },
  2712. .caam = {
  2713. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2714. OP_ALG_AAI_CTR_MOD128,
  2715. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2716. OP_ALG_AAI_HMAC_PRECOMP,
  2717. .rfc3686 = true,
  2718. },
  2719. },
  2720. {
  2721. .aead = {
  2722. .base = {
  2723. .cra_name = "seqiv(authenc("
  2724. "hmac(sha1),rfc3686(ctr(aes))))",
  2725. .cra_driver_name = "seqiv-authenc-hmac-sha1-"
  2726. "rfc3686-ctr-aes-caam",
  2727. .cra_blocksize = 1,
  2728. },
  2729. .setkey = aead_setkey,
  2730. .setauthsize = aead_setauthsize,
  2731. .encrypt = aead_encrypt,
  2732. .decrypt = aead_decrypt,
  2733. .ivsize = CTR_RFC3686_IV_SIZE,
  2734. .maxauthsize = SHA1_DIGEST_SIZE,
  2735. },
  2736. .caam = {
  2737. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2738. OP_ALG_AAI_CTR_MOD128,
  2739. .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
  2740. OP_ALG_AAI_HMAC_PRECOMP,
  2741. .rfc3686 = true,
  2742. .geniv = true,
  2743. },
  2744. },
  2745. {
  2746. .aead = {
  2747. .base = {
  2748. .cra_name = "authenc(hmac(sha224),"
  2749. "rfc3686(ctr(aes)))",
  2750. .cra_driver_name = "authenc-hmac-sha224-"
  2751. "rfc3686-ctr-aes-caam",
  2752. .cra_blocksize = 1,
  2753. },
  2754. .setkey = aead_setkey,
  2755. .setauthsize = aead_setauthsize,
  2756. .encrypt = aead_encrypt,
  2757. .decrypt = aead_decrypt,
  2758. .ivsize = CTR_RFC3686_IV_SIZE,
  2759. .maxauthsize = SHA224_DIGEST_SIZE,
  2760. },
  2761. .caam = {
  2762. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2763. OP_ALG_AAI_CTR_MOD128,
  2764. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2765. OP_ALG_AAI_HMAC_PRECOMP,
  2766. .rfc3686 = true,
  2767. },
  2768. },
  2769. {
  2770. .aead = {
  2771. .base = {
  2772. .cra_name = "seqiv(authenc("
  2773. "hmac(sha224),rfc3686(ctr(aes))))",
  2774. .cra_driver_name = "seqiv-authenc-hmac-sha224-"
  2775. "rfc3686-ctr-aes-caam",
  2776. .cra_blocksize = 1,
  2777. },
  2778. .setkey = aead_setkey,
  2779. .setauthsize = aead_setauthsize,
  2780. .encrypt = aead_encrypt,
  2781. .decrypt = aead_decrypt,
  2782. .ivsize = CTR_RFC3686_IV_SIZE,
  2783. .maxauthsize = SHA224_DIGEST_SIZE,
  2784. },
  2785. .caam = {
  2786. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2787. OP_ALG_AAI_CTR_MOD128,
  2788. .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
  2789. OP_ALG_AAI_HMAC_PRECOMP,
  2790. .rfc3686 = true,
  2791. .geniv = true,
  2792. },
  2793. },
  2794. {
  2795. .aead = {
  2796. .base = {
  2797. .cra_name = "authenc(hmac(sha256),"
  2798. "rfc3686(ctr(aes)))",
  2799. .cra_driver_name = "authenc-hmac-sha256-"
  2800. "rfc3686-ctr-aes-caam",
  2801. .cra_blocksize = 1,
  2802. },
  2803. .setkey = aead_setkey,
  2804. .setauthsize = aead_setauthsize,
  2805. .encrypt = aead_encrypt,
  2806. .decrypt = aead_decrypt,
  2807. .ivsize = CTR_RFC3686_IV_SIZE,
  2808. .maxauthsize = SHA256_DIGEST_SIZE,
  2809. },
  2810. .caam = {
  2811. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2812. OP_ALG_AAI_CTR_MOD128,
  2813. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2814. OP_ALG_AAI_HMAC_PRECOMP,
  2815. .rfc3686 = true,
  2816. },
  2817. },
  2818. {
  2819. .aead = {
  2820. .base = {
  2821. .cra_name = "seqiv(authenc(hmac(sha256),"
  2822. "rfc3686(ctr(aes))))",
  2823. .cra_driver_name = "seqiv-authenc-hmac-sha256-"
  2824. "rfc3686-ctr-aes-caam",
  2825. .cra_blocksize = 1,
  2826. },
  2827. .setkey = aead_setkey,
  2828. .setauthsize = aead_setauthsize,
  2829. .encrypt = aead_encrypt,
  2830. .decrypt = aead_decrypt,
  2831. .ivsize = CTR_RFC3686_IV_SIZE,
  2832. .maxauthsize = SHA256_DIGEST_SIZE,
  2833. },
  2834. .caam = {
  2835. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2836. OP_ALG_AAI_CTR_MOD128,
  2837. .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
  2838. OP_ALG_AAI_HMAC_PRECOMP,
  2839. .rfc3686 = true,
  2840. .geniv = true,
  2841. },
  2842. },
  2843. {
  2844. .aead = {
  2845. .base = {
  2846. .cra_name = "authenc(hmac(sha384),"
  2847. "rfc3686(ctr(aes)))",
  2848. .cra_driver_name = "authenc-hmac-sha384-"
  2849. "rfc3686-ctr-aes-caam",
  2850. .cra_blocksize = 1,
  2851. },
  2852. .setkey = aead_setkey,
  2853. .setauthsize = aead_setauthsize,
  2854. .encrypt = aead_encrypt,
  2855. .decrypt = aead_decrypt,
  2856. .ivsize = CTR_RFC3686_IV_SIZE,
  2857. .maxauthsize = SHA384_DIGEST_SIZE,
  2858. },
  2859. .caam = {
  2860. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2861. OP_ALG_AAI_CTR_MOD128,
  2862. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2863. OP_ALG_AAI_HMAC_PRECOMP,
  2864. .rfc3686 = true,
  2865. },
  2866. },
  2867. {
  2868. .aead = {
  2869. .base = {
  2870. .cra_name = "seqiv(authenc(hmac(sha384),"
  2871. "rfc3686(ctr(aes))))",
  2872. .cra_driver_name = "seqiv-authenc-hmac-sha384-"
  2873. "rfc3686-ctr-aes-caam",
  2874. .cra_blocksize = 1,
  2875. },
  2876. .setkey = aead_setkey,
  2877. .setauthsize = aead_setauthsize,
  2878. .encrypt = aead_encrypt,
  2879. .decrypt = aead_decrypt,
  2880. .ivsize = CTR_RFC3686_IV_SIZE,
  2881. .maxauthsize = SHA384_DIGEST_SIZE,
  2882. },
  2883. .caam = {
  2884. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2885. OP_ALG_AAI_CTR_MOD128,
  2886. .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
  2887. OP_ALG_AAI_HMAC_PRECOMP,
  2888. .rfc3686 = true,
  2889. .geniv = true,
  2890. },
  2891. },
  2892. {
  2893. .aead = {
  2894. .base = {
  2895. .cra_name = "authenc(hmac(sha512),"
  2896. "rfc3686(ctr(aes)))",
  2897. .cra_driver_name = "authenc-hmac-sha512-"
  2898. "rfc3686-ctr-aes-caam",
  2899. .cra_blocksize = 1,
  2900. },
  2901. .setkey = aead_setkey,
  2902. .setauthsize = aead_setauthsize,
  2903. .encrypt = aead_encrypt,
  2904. .decrypt = aead_decrypt,
  2905. .ivsize = CTR_RFC3686_IV_SIZE,
  2906. .maxauthsize = SHA512_DIGEST_SIZE,
  2907. },
  2908. .caam = {
  2909. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2910. OP_ALG_AAI_CTR_MOD128,
  2911. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2912. OP_ALG_AAI_HMAC_PRECOMP,
  2913. .rfc3686 = true,
  2914. },
  2915. },
  2916. {
  2917. .aead = {
  2918. .base = {
  2919. .cra_name = "seqiv(authenc(hmac(sha512),"
  2920. "rfc3686(ctr(aes))))",
  2921. .cra_driver_name = "seqiv-authenc-hmac-sha512-"
  2922. "rfc3686-ctr-aes-caam",
  2923. .cra_blocksize = 1,
  2924. },
  2925. .setkey = aead_setkey,
  2926. .setauthsize = aead_setauthsize,
  2927. .encrypt = aead_encrypt,
  2928. .decrypt = aead_decrypt,
  2929. .ivsize = CTR_RFC3686_IV_SIZE,
  2930. .maxauthsize = SHA512_DIGEST_SIZE,
  2931. },
  2932. .caam = {
  2933. .class1_alg_type = OP_ALG_ALGSEL_AES |
  2934. OP_ALG_AAI_CTR_MOD128,
  2935. .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
  2936. OP_ALG_AAI_HMAC_PRECOMP,
  2937. .rfc3686 = true,
  2938. .geniv = true,
  2939. },
  2940. },
  2941. };
  2942. struct caam_crypto_alg {
  2943. struct crypto_alg crypto_alg;
  2944. struct list_head entry;
  2945. struct caam_alg_entry caam;
  2946. };
  2947. static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam)
  2948. {
  2949. dma_addr_t dma_addr;
  2950. ctx->jrdev = caam_jr_alloc();
  2951. if (IS_ERR(ctx->jrdev)) {
  2952. pr_err("Job Ring Device allocation for transform failed\n");
  2953. return PTR_ERR(ctx->jrdev);
  2954. }
  2955. dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
  2956. offsetof(struct caam_ctx,
  2957. sh_desc_enc_dma),
  2958. DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC);
  2959. if (dma_mapping_error(ctx->jrdev, dma_addr)) {
  2960. dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
  2961. caam_jr_free(ctx->jrdev);
  2962. return -ENOMEM;
  2963. }
  2964. ctx->sh_desc_enc_dma = dma_addr;
  2965. ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
  2966. sh_desc_dec);
  2967. ctx->sh_desc_givenc_dma = dma_addr + offsetof(struct caam_ctx,
  2968. sh_desc_givenc);
  2969. ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key);
  2970. /* copy descriptor header template value */
  2971. ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
  2972. ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
  2973. return 0;
  2974. }
  2975. static int caam_cra_init(struct crypto_tfm *tfm)
  2976. {
  2977. struct crypto_alg *alg = tfm->__crt_alg;
  2978. struct caam_crypto_alg *caam_alg =
  2979. container_of(alg, struct caam_crypto_alg, crypto_alg);
  2980. struct caam_ctx *ctx = crypto_tfm_ctx(tfm);
  2981. return caam_init_common(ctx, &caam_alg->caam);
  2982. }
  2983. static int caam_aead_init(struct crypto_aead *tfm)
  2984. {
  2985. struct aead_alg *alg = crypto_aead_alg(tfm);
  2986. struct caam_aead_alg *caam_alg =
  2987. container_of(alg, struct caam_aead_alg, aead);
  2988. struct caam_ctx *ctx = crypto_aead_ctx(tfm);
  2989. return caam_init_common(ctx, &caam_alg->caam);
  2990. }
  2991. static void caam_exit_common(struct caam_ctx *ctx)
  2992. {
  2993. dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
  2994. offsetof(struct caam_ctx, sh_desc_enc_dma),
  2995. DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC);
  2996. caam_jr_free(ctx->jrdev);
  2997. }
  2998. static void caam_cra_exit(struct crypto_tfm *tfm)
  2999. {
  3000. caam_exit_common(crypto_tfm_ctx(tfm));
  3001. }
  3002. static void caam_aead_exit(struct crypto_aead *tfm)
  3003. {
  3004. caam_exit_common(crypto_aead_ctx(tfm));
  3005. }
  3006. static void __exit caam_algapi_exit(void)
  3007. {
  3008. struct caam_crypto_alg *t_alg, *n;
  3009. int i;
  3010. for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
  3011. struct caam_aead_alg *t_alg = driver_aeads + i;
  3012. if (t_alg->registered)
  3013. crypto_unregister_aead(&t_alg->aead);
  3014. }
  3015. if (!alg_list.next)
  3016. return;
  3017. list_for_each_entry_safe(t_alg, n, &alg_list, entry) {
  3018. crypto_unregister_alg(&t_alg->crypto_alg);
  3019. list_del(&t_alg->entry);
  3020. kfree(t_alg);
  3021. }
  3022. }
  3023. static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template
  3024. *template)
  3025. {
  3026. struct caam_crypto_alg *t_alg;
  3027. struct crypto_alg *alg;
  3028. t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
  3029. if (!t_alg) {
  3030. pr_err("failed to allocate t_alg\n");
  3031. return ERR_PTR(-ENOMEM);
  3032. }
  3033. alg = &t_alg->crypto_alg;
  3034. snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
  3035. snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
  3036. template->driver_name);
  3037. alg->cra_module = THIS_MODULE;
  3038. alg->cra_init = caam_cra_init;
  3039. alg->cra_exit = caam_cra_exit;
  3040. alg->cra_priority = CAAM_CRA_PRIORITY;
  3041. alg->cra_blocksize = template->blocksize;
  3042. alg->cra_alignmask = 0;
  3043. alg->cra_ctxsize = sizeof(struct caam_ctx);
  3044. alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
  3045. template->type;
  3046. switch (template->type) {
  3047. case CRYPTO_ALG_TYPE_GIVCIPHER:
  3048. alg->cra_type = &crypto_givcipher_type;
  3049. alg->cra_ablkcipher = template->template_ablkcipher;
  3050. break;
  3051. case CRYPTO_ALG_TYPE_ABLKCIPHER:
  3052. alg->cra_type = &crypto_ablkcipher_type;
  3053. alg->cra_ablkcipher = template->template_ablkcipher;
  3054. break;
  3055. }
  3056. t_alg->caam.class1_alg_type = template->class1_alg_type;
  3057. t_alg->caam.class2_alg_type = template->class2_alg_type;
  3058. return t_alg;
  3059. }
  3060. static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
  3061. {
  3062. struct aead_alg *alg = &t_alg->aead;
  3063. alg->base.cra_module = THIS_MODULE;
  3064. alg->base.cra_priority = CAAM_CRA_PRIORITY;
  3065. alg->base.cra_ctxsize = sizeof(struct caam_ctx);
  3066. alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
  3067. alg->init = caam_aead_init;
  3068. alg->exit = caam_aead_exit;
  3069. }
  3070. static int __init caam_algapi_init(void)
  3071. {
  3072. struct device_node *dev_node;
  3073. struct platform_device *pdev;
  3074. struct device *ctrldev;
  3075. struct caam_drv_private *priv;
  3076. int i = 0, err = 0;
  3077. u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
  3078. unsigned int md_limit = SHA512_DIGEST_SIZE;
  3079. bool registered = false;
  3080. dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
  3081. if (!dev_node) {
  3082. dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
  3083. if (!dev_node)
  3084. return -ENODEV;
  3085. }
  3086. pdev = of_find_device_by_node(dev_node);
  3087. if (!pdev) {
  3088. of_node_put(dev_node);
  3089. return -ENODEV;
  3090. }
  3091. ctrldev = &pdev->dev;
  3092. priv = dev_get_drvdata(ctrldev);
  3093. of_node_put(dev_node);
  3094. /*
  3095. * If priv is NULL, it's probably because the caam driver wasn't
  3096. * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
  3097. */
  3098. if (!priv)
  3099. return -ENODEV;
  3100. INIT_LIST_HEAD(&alg_list);
  3101. /*
  3102. * Register crypto algorithms the device supports.
  3103. * First, detect presence and attributes of DES, AES, and MD blocks.
  3104. */
  3105. cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
  3106. cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
  3107. des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
  3108. aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
  3109. md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
  3110. /* If MD is present, limit digest size based on LP256 */
  3111. if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
  3112. md_limit = SHA256_DIGEST_SIZE;
  3113. for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
  3114. struct caam_crypto_alg *t_alg;
  3115. struct caam_alg_template *alg = driver_algs + i;
  3116. u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK;
  3117. /* Skip DES algorithms if not supported by device */
  3118. if (!des_inst &&
  3119. ((alg_sel == OP_ALG_ALGSEL_3DES) ||
  3120. (alg_sel == OP_ALG_ALGSEL_DES)))
  3121. continue;
  3122. /* Skip AES algorithms if not supported by device */
  3123. if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
  3124. continue;
  3125. /*
  3126. * Check support for AES modes not available
  3127. * on LP devices.
  3128. */
  3129. if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
  3130. if ((alg->class1_alg_type & OP_ALG_AAI_MASK) ==
  3131. OP_ALG_AAI_XTS)
  3132. continue;
  3133. t_alg = caam_alg_alloc(alg);
  3134. if (IS_ERR(t_alg)) {
  3135. err = PTR_ERR(t_alg);
  3136. pr_warn("%s alg allocation failed\n", alg->driver_name);
  3137. continue;
  3138. }
  3139. err = crypto_register_alg(&t_alg->crypto_alg);
  3140. if (err) {
  3141. pr_warn("%s alg registration failed\n",
  3142. t_alg->crypto_alg.cra_driver_name);
  3143. kfree(t_alg);
  3144. continue;
  3145. }
  3146. list_add_tail(&t_alg->entry, &alg_list);
  3147. registered = true;
  3148. }
  3149. for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
  3150. struct caam_aead_alg *t_alg = driver_aeads + i;
  3151. u32 c1_alg_sel = t_alg->caam.class1_alg_type &
  3152. OP_ALG_ALGSEL_MASK;
  3153. u32 c2_alg_sel = t_alg->caam.class2_alg_type &
  3154. OP_ALG_ALGSEL_MASK;
  3155. u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
  3156. /* Skip DES algorithms if not supported by device */
  3157. if (!des_inst &&
  3158. ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
  3159. (c1_alg_sel == OP_ALG_ALGSEL_DES)))
  3160. continue;
  3161. /* Skip AES algorithms if not supported by device */
  3162. if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
  3163. continue;
  3164. /*
  3165. * Check support for AES algorithms not available
  3166. * on LP devices.
  3167. */
  3168. if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
  3169. if (alg_aai == OP_ALG_AAI_GCM)
  3170. continue;
  3171. /*
  3172. * Skip algorithms requiring message digests
  3173. * if MD or MD size is not supported by device.
  3174. */
  3175. if (c2_alg_sel &&
  3176. (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
  3177. continue;
  3178. caam_aead_alg_init(t_alg);
  3179. err = crypto_register_aead(&t_alg->aead);
  3180. if (err) {
  3181. pr_warn("%s alg registration failed\n",
  3182. t_alg->aead.base.cra_driver_name);
  3183. continue;
  3184. }
  3185. t_alg->registered = true;
  3186. registered = true;
  3187. }
  3188. if (registered)
  3189. pr_info("caam algorithms registered in /proc/crypto\n");
  3190. return err;
  3191. }
  3192. module_init(caam_algapi_init);
  3193. module_exit(caam_algapi_exit);
  3194. MODULE_LICENSE("GPL");
  3195. MODULE_DESCRIPTION("FSL CAAM support for crypto API");
  3196. MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");