tree-vect-patterns.c 110 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574
  1. /* Analysis Utilities for Loop Vectorization.
  2. Copyright (C) 2006-2015 Free Software Foundation, Inc.
  3. Contributed by Dorit Nuzman <dorit@il.ibm.com>
  4. This file is part of GCC.
  5. GCC is free software; you can redistribute it and/or modify it under
  6. the terms of the GNU General Public License as published by the Free
  7. Software Foundation; either version 3, or (at your option) any later
  8. version.
  9. GCC is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11. FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
  12. for more details.
  13. You should have received a copy of the GNU General Public License
  14. along with GCC; see the file COPYING3. If not see
  15. <http://www.gnu.org/licenses/>. */
  16. #include "config.h"
  17. #include "system.h"
  18. #include "coretypes.h"
  19. #include "tm.h"
  20. #include "hash-set.h"
  21. #include "machmode.h"
  22. #include "vec.h"
  23. #include "double-int.h"
  24. #include "input.h"
  25. #include "alias.h"
  26. #include "symtab.h"
  27. #include "wide-int.h"
  28. #include "inchash.h"
  29. #include "tree.h"
  30. #include "fold-const.h"
  31. #include "stor-layout.h"
  32. #include "target.h"
  33. #include "predict.h"
  34. #include "hard-reg-set.h"
  35. #include "function.h"
  36. #include "dominance.h"
  37. #include "basic-block.h"
  38. #include "gimple-pretty-print.h"
  39. #include "tree-ssa-alias.h"
  40. #include "internal-fn.h"
  41. #include "tree-eh.h"
  42. #include "gimple-expr.h"
  43. #include "is-a.h"
  44. #include "gimple.h"
  45. #include "gimplify.h"
  46. #include "gimple-iterator.h"
  47. #include "gimple-ssa.h"
  48. #include "tree-phinodes.h"
  49. #include "ssa-iterators.h"
  50. #include "stringpool.h"
  51. #include "tree-ssanames.h"
  52. #include "cfgloop.h"
  53. #include "hashtab.h"
  54. #include "rtl.h"
  55. #include "flags.h"
  56. #include "statistics.h"
  57. #include "real.h"
  58. #include "fixed-value.h"
  59. #include "insn-config.h"
  60. #include "expmed.h"
  61. #include "dojump.h"
  62. #include "explow.h"
  63. #include "calls.h"
  64. #include "emit-rtl.h"
  65. #include "varasm.h"
  66. #include "stmt.h"
  67. #include "expr.h"
  68. #include "insn-codes.h"
  69. #include "optabs.h"
  70. #include "params.h"
  71. #include "tree-data-ref.h"
  72. #include "tree-vectorizer.h"
  73. #include "recog.h" /* FIXME: for insn_data */
  74. #include "diagnostic-core.h"
  75. #include "dumpfile.h"
  76. #include "builtins.h"
  77. /* Pattern recognition functions */
  78. static gimple vect_recog_widen_sum_pattern (vec<gimple> *, tree *,
  79. tree *);
  80. static gimple vect_recog_widen_mult_pattern (vec<gimple> *, tree *,
  81. tree *);
  82. static gimple vect_recog_dot_prod_pattern (vec<gimple> *, tree *,
  83. tree *);
  84. static gimple vect_recog_sad_pattern (vec<gimple> *, tree *,
  85. tree *);
  86. static gimple vect_recog_pow_pattern (vec<gimple> *, tree *, tree *);
  87. static gimple vect_recog_over_widening_pattern (vec<gimple> *, tree *,
  88. tree *);
  89. static gimple vect_recog_widen_shift_pattern (vec<gimple> *,
  90. tree *, tree *);
  91. static gimple vect_recog_rotate_pattern (vec<gimple> *, tree *, tree *);
  92. static gimple vect_recog_vector_vector_shift_pattern (vec<gimple> *,
  93. tree *, tree *);
  94. static gimple vect_recog_divmod_pattern (vec<gimple> *,
  95. tree *, tree *);
  96. static gimple vect_recog_mixed_size_cond_pattern (vec<gimple> *,
  97. tree *, tree *);
  98. static gimple vect_recog_bool_pattern (vec<gimple> *, tree *, tree *);
  99. static vect_recog_func_ptr vect_vect_recog_func_ptrs[NUM_PATTERNS] = {
  100. vect_recog_widen_mult_pattern,
  101. vect_recog_widen_sum_pattern,
  102. vect_recog_dot_prod_pattern,
  103. vect_recog_sad_pattern,
  104. vect_recog_pow_pattern,
  105. vect_recog_widen_shift_pattern,
  106. vect_recog_over_widening_pattern,
  107. vect_recog_rotate_pattern,
  108. vect_recog_vector_vector_shift_pattern,
  109. vect_recog_divmod_pattern,
  110. vect_recog_mixed_size_cond_pattern,
  111. vect_recog_bool_pattern};
  112. static inline void
  113. append_pattern_def_seq (stmt_vec_info stmt_info, gimple stmt)
  114. {
  115. gimple_seq_add_stmt_without_update (&STMT_VINFO_PATTERN_DEF_SEQ (stmt_info),
  116. stmt);
  117. }
  118. static inline void
  119. new_pattern_def_seq (stmt_vec_info stmt_info, gimple stmt)
  120. {
  121. STMT_VINFO_PATTERN_DEF_SEQ (stmt_info) = NULL;
  122. append_pattern_def_seq (stmt_info, stmt);
  123. }
  124. /* Check whether STMT2 is in the same loop or basic block as STMT1.
  125. Which of the two applies depends on whether we're currently doing
  126. loop-based or basic-block-based vectorization, as determined by
  127. the vinfo_for_stmt for STMT1 (which must be defined).
  128. If this returns true, vinfo_for_stmt for STMT2 is guaranteed
  129. to be defined as well. */
  130. static bool
  131. vect_same_loop_or_bb_p (gimple stmt1, gimple stmt2)
  132. {
  133. stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt1);
  134. loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
  135. bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
  136. if (!gimple_bb (stmt2))
  137. return false;
  138. if (loop_vinfo)
  139. {
  140. struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
  141. if (!flow_bb_inside_loop_p (loop, gimple_bb (stmt2)))
  142. return false;
  143. }
  144. else
  145. {
  146. if (gimple_bb (stmt2) != BB_VINFO_BB (bb_vinfo)
  147. || gimple_code (stmt2) == GIMPLE_PHI)
  148. return false;
  149. }
  150. gcc_assert (vinfo_for_stmt (stmt2));
  151. return true;
  152. }
  153. /* If the LHS of DEF_STMT has a single use, and that statement is
  154. in the same loop or basic block, return it. */
  155. static gimple
  156. vect_single_imm_use (gimple def_stmt)
  157. {
  158. tree lhs = gimple_assign_lhs (def_stmt);
  159. use_operand_p use_p;
  160. gimple use_stmt;
  161. if (!single_imm_use (lhs, &use_p, &use_stmt))
  162. return NULL;
  163. if (!vect_same_loop_or_bb_p (def_stmt, use_stmt))
  164. return NULL;
  165. return use_stmt;
  166. }
  167. /* Check whether NAME, an ssa-name used in USE_STMT,
  168. is a result of a type promotion, such that:
  169. DEF_STMT: NAME = NOP (name0)
  170. If CHECK_SIGN is TRUE, check that either both types are signed or both are
  171. unsigned. */
  172. static bool
  173. type_conversion_p (tree name, gimple use_stmt, bool check_sign,
  174. tree *orig_type, gimple *def_stmt, bool *promotion)
  175. {
  176. tree dummy;
  177. gimple dummy_gimple;
  178. loop_vec_info loop_vinfo;
  179. stmt_vec_info stmt_vinfo;
  180. tree type = TREE_TYPE (name);
  181. tree oprnd0;
  182. enum vect_def_type dt;
  183. tree def;
  184. bb_vec_info bb_vinfo;
  185. stmt_vinfo = vinfo_for_stmt (use_stmt);
  186. loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
  187. bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
  188. if (!vect_is_simple_use (name, use_stmt, loop_vinfo, bb_vinfo, def_stmt,
  189. &def, &dt))
  190. return false;
  191. if (dt != vect_internal_def
  192. && dt != vect_external_def && dt != vect_constant_def)
  193. return false;
  194. if (!*def_stmt)
  195. return false;
  196. if (!is_gimple_assign (*def_stmt))
  197. return false;
  198. if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (*def_stmt)))
  199. return false;
  200. oprnd0 = gimple_assign_rhs1 (*def_stmt);
  201. *orig_type = TREE_TYPE (oprnd0);
  202. if (!INTEGRAL_TYPE_P (type) || !INTEGRAL_TYPE_P (*orig_type)
  203. || ((TYPE_UNSIGNED (type) != TYPE_UNSIGNED (*orig_type)) && check_sign))
  204. return false;
  205. if (TYPE_PRECISION (type) >= (TYPE_PRECISION (*orig_type) * 2))
  206. *promotion = true;
  207. else
  208. *promotion = false;
  209. if (!vect_is_simple_use (oprnd0, *def_stmt, loop_vinfo,
  210. bb_vinfo, &dummy_gimple, &dummy, &dt))
  211. return false;
  212. return true;
  213. }
  214. /* Helper to return a new temporary for pattern of TYPE for STMT. If STMT
  215. is NULL, the caller must set SSA_NAME_DEF_STMT for the returned SSA var. */
  216. static tree
  217. vect_recog_temp_ssa_var (tree type, gimple stmt)
  218. {
  219. return make_temp_ssa_name (type, stmt, "patt");
  220. }
  221. /* Function vect_recog_dot_prod_pattern
  222. Try to find the following pattern:
  223. type x_t, y_t;
  224. TYPE1 prod;
  225. TYPE2 sum = init;
  226. loop:
  227. sum_0 = phi <init, sum_1>
  228. S1 x_t = ...
  229. S2 y_t = ...
  230. S3 x_T = (TYPE1) x_t;
  231. S4 y_T = (TYPE1) y_t;
  232. S5 prod = x_T * y_T;
  233. [S6 prod = (TYPE2) prod; #optional]
  234. S7 sum_1 = prod + sum_0;
  235. where 'TYPE1' is exactly double the size of type 'type', and 'TYPE2' is the
  236. same size of 'TYPE1' or bigger. This is a special case of a reduction
  237. computation.
  238. Input:
  239. * STMTS: Contains a stmt from which the pattern search begins. In the
  240. example, when this function is called with S7, the pattern {S3,S4,S5,S6,S7}
  241. will be detected.
  242. Output:
  243. * TYPE_IN: The type of the input arguments to the pattern.
  244. * TYPE_OUT: The type of the output of this pattern.
  245. * Return value: A new stmt that will be used to replace the sequence of
  246. stmts that constitute the pattern. In this case it will be:
  247. WIDEN_DOT_PRODUCT <x_t, y_t, sum_0>
  248. Note: The dot-prod idiom is a widening reduction pattern that is
  249. vectorized without preserving all the intermediate results. It
  250. produces only N/2 (widened) results (by summing up pairs of
  251. intermediate results) rather than all N results. Therefore, we
  252. cannot allow this pattern when we want to get all the results and in
  253. the correct order (as is the case when this computation is in an
  254. inner-loop nested in an outer-loop that us being vectorized). */
  255. static gimple
  256. vect_recog_dot_prod_pattern (vec<gimple> *stmts, tree *type_in,
  257. tree *type_out)
  258. {
  259. gimple stmt, last_stmt = (*stmts)[0];
  260. tree oprnd0, oprnd1;
  261. tree oprnd00, oprnd01;
  262. stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
  263. tree type, half_type;
  264. gimple pattern_stmt;
  265. tree prod_type;
  266. loop_vec_info loop_info = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
  267. struct loop *loop;
  268. tree var;
  269. bool promotion;
  270. if (!loop_info)
  271. return NULL;
  272. loop = LOOP_VINFO_LOOP (loop_info);
  273. if (!is_gimple_assign (last_stmt))
  274. return NULL;
  275. type = gimple_expr_type (last_stmt);
  276. /* Look for the following pattern
  277. DX = (TYPE1) X;
  278. DY = (TYPE1) Y;
  279. DPROD = DX * DY;
  280. DDPROD = (TYPE2) DPROD;
  281. sum_1 = DDPROD + sum_0;
  282. In which
  283. - DX is double the size of X
  284. - DY is double the size of Y
  285. - DX, DY, DPROD all have the same type
  286. - sum is the same size of DPROD or bigger
  287. - sum has been recognized as a reduction variable.
  288. This is equivalent to:
  289. DPROD = X w* Y; #widen mult
  290. sum_1 = DPROD w+ sum_0; #widen summation
  291. or
  292. DPROD = X w* Y; #widen mult
  293. sum_1 = DPROD + sum_0; #summation
  294. */
  295. /* Starting from LAST_STMT, follow the defs of its uses in search
  296. of the above pattern. */
  297. if (gimple_assign_rhs_code (last_stmt) != PLUS_EXPR)
  298. return NULL;
  299. if (STMT_VINFO_IN_PATTERN_P (stmt_vinfo))
  300. {
  301. /* Has been detected as widening-summation? */
  302. stmt = STMT_VINFO_RELATED_STMT (stmt_vinfo);
  303. type = gimple_expr_type (stmt);
  304. if (gimple_assign_rhs_code (stmt) != WIDEN_SUM_EXPR)
  305. return NULL;
  306. oprnd0 = gimple_assign_rhs1 (stmt);
  307. oprnd1 = gimple_assign_rhs2 (stmt);
  308. half_type = TREE_TYPE (oprnd0);
  309. }
  310. else
  311. {
  312. gimple def_stmt;
  313. if (STMT_VINFO_DEF_TYPE (stmt_vinfo) != vect_reduction_def)
  314. return NULL;
  315. oprnd0 = gimple_assign_rhs1 (last_stmt);
  316. oprnd1 = gimple_assign_rhs2 (last_stmt);
  317. if (!types_compatible_p (TREE_TYPE (oprnd0), type)
  318. || !types_compatible_p (TREE_TYPE (oprnd1), type))
  319. return NULL;
  320. stmt = last_stmt;
  321. if (type_conversion_p (oprnd0, stmt, true, &half_type, &def_stmt,
  322. &promotion)
  323. && promotion)
  324. {
  325. stmt = def_stmt;
  326. oprnd0 = gimple_assign_rhs1 (stmt);
  327. }
  328. else
  329. half_type = type;
  330. }
  331. /* So far so good. Since last_stmt was detected as a (summation) reduction,
  332. we know that oprnd1 is the reduction variable (defined by a loop-header
  333. phi), and oprnd0 is an ssa-name defined by a stmt in the loop body.
  334. Left to check that oprnd0 is defined by a (widen_)mult_expr */
  335. if (TREE_CODE (oprnd0) != SSA_NAME)
  336. return NULL;
  337. prod_type = half_type;
  338. stmt = SSA_NAME_DEF_STMT (oprnd0);
  339. /* It could not be the dot_prod pattern if the stmt is outside the loop. */
  340. if (!gimple_bb (stmt) || !flow_bb_inside_loop_p (loop, gimple_bb (stmt)))
  341. return NULL;
  342. /* FORNOW. Can continue analyzing the def-use chain when this stmt in a phi
  343. inside the loop (in case we are analyzing an outer-loop). */
  344. if (!is_gimple_assign (stmt))
  345. return NULL;
  346. stmt_vinfo = vinfo_for_stmt (stmt);
  347. gcc_assert (stmt_vinfo);
  348. if (STMT_VINFO_DEF_TYPE (stmt_vinfo) != vect_internal_def)
  349. return NULL;
  350. if (gimple_assign_rhs_code (stmt) != MULT_EXPR)
  351. return NULL;
  352. if (STMT_VINFO_IN_PATTERN_P (stmt_vinfo))
  353. {
  354. /* Has been detected as a widening multiplication? */
  355. stmt = STMT_VINFO_RELATED_STMT (stmt_vinfo);
  356. if (gimple_assign_rhs_code (stmt) != WIDEN_MULT_EXPR)
  357. return NULL;
  358. stmt_vinfo = vinfo_for_stmt (stmt);
  359. gcc_assert (stmt_vinfo);
  360. gcc_assert (STMT_VINFO_DEF_TYPE (stmt_vinfo) == vect_internal_def);
  361. oprnd00 = gimple_assign_rhs1 (stmt);
  362. oprnd01 = gimple_assign_rhs2 (stmt);
  363. STMT_VINFO_PATTERN_DEF_SEQ (vinfo_for_stmt (last_stmt))
  364. = STMT_VINFO_PATTERN_DEF_SEQ (stmt_vinfo);
  365. }
  366. else
  367. {
  368. tree half_type0, half_type1;
  369. gimple def_stmt;
  370. tree oprnd0, oprnd1;
  371. oprnd0 = gimple_assign_rhs1 (stmt);
  372. oprnd1 = gimple_assign_rhs2 (stmt);
  373. if (!types_compatible_p (TREE_TYPE (oprnd0), prod_type)
  374. || !types_compatible_p (TREE_TYPE (oprnd1), prod_type))
  375. return NULL;
  376. if (!type_conversion_p (oprnd0, stmt, true, &half_type0, &def_stmt,
  377. &promotion)
  378. || !promotion)
  379. return NULL;
  380. oprnd00 = gimple_assign_rhs1 (def_stmt);
  381. if (!type_conversion_p (oprnd1, stmt, true, &half_type1, &def_stmt,
  382. &promotion)
  383. || !promotion)
  384. return NULL;
  385. oprnd01 = gimple_assign_rhs1 (def_stmt);
  386. if (!types_compatible_p (half_type0, half_type1))
  387. return NULL;
  388. if (TYPE_PRECISION (prod_type) != TYPE_PRECISION (half_type0) * 2)
  389. return NULL;
  390. }
  391. half_type = TREE_TYPE (oprnd00);
  392. *type_in = half_type;
  393. *type_out = type;
  394. /* Pattern detected. Create a stmt to be used to replace the pattern: */
  395. var = vect_recog_temp_ssa_var (type, NULL);
  396. pattern_stmt = gimple_build_assign (var, DOT_PROD_EXPR,
  397. oprnd00, oprnd01, oprnd1);
  398. if (dump_enabled_p ())
  399. {
  400. dump_printf_loc (MSG_NOTE, vect_location,
  401. "vect_recog_dot_prod_pattern: detected: ");
  402. dump_gimple_stmt (MSG_NOTE, TDF_SLIM, pattern_stmt, 0);
  403. dump_printf (MSG_NOTE, "\n");
  404. }
  405. /* We don't allow changing the order of the computation in the inner-loop
  406. when doing outer-loop vectorization. */
  407. gcc_assert (!nested_in_vect_loop_p (loop, last_stmt));
  408. return pattern_stmt;
  409. }
  410. /* Function vect_recog_sad_pattern
  411. Try to find the following Sum of Absolute Difference (SAD) pattern:
  412. type x_t, y_t;
  413. signed TYPE1 diff, abs_diff;
  414. TYPE2 sum = init;
  415. loop:
  416. sum_0 = phi <init, sum_1>
  417. S1 x_t = ...
  418. S2 y_t = ...
  419. S3 x_T = (TYPE1) x_t;
  420. S4 y_T = (TYPE1) y_t;
  421. S5 diff = x_T - y_T;
  422. S6 abs_diff = ABS_EXPR <diff>;
  423. [S7 abs_diff = (TYPE2) abs_diff; #optional]
  424. S8 sum_1 = abs_diff + sum_0;
  425. where 'TYPE1' is at least double the size of type 'type', and 'TYPE2' is the
  426. same size of 'TYPE1' or bigger. This is a special case of a reduction
  427. computation.
  428. Input:
  429. * STMTS: Contains a stmt from which the pattern search begins. In the
  430. example, when this function is called with S8, the pattern
  431. {S3,S4,S5,S6,S7,S8} will be detected.
  432. Output:
  433. * TYPE_IN: The type of the input arguments to the pattern.
  434. * TYPE_OUT: The type of the output of this pattern.
  435. * Return value: A new stmt that will be used to replace the sequence of
  436. stmts that constitute the pattern. In this case it will be:
  437. SAD_EXPR <x_t, y_t, sum_0>
  438. */
  439. static gimple
  440. vect_recog_sad_pattern (vec<gimple> *stmts, tree *type_in,
  441. tree *type_out)
  442. {
  443. gimple last_stmt = (*stmts)[0];
  444. tree sad_oprnd0, sad_oprnd1;
  445. stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
  446. tree half_type;
  447. loop_vec_info loop_info = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
  448. struct loop *loop;
  449. bool promotion;
  450. if (!loop_info)
  451. return NULL;
  452. loop = LOOP_VINFO_LOOP (loop_info);
  453. if (!is_gimple_assign (last_stmt))
  454. return NULL;
  455. tree sum_type = gimple_expr_type (last_stmt);
  456. /* Look for the following pattern
  457. DX = (TYPE1) X;
  458. DY = (TYPE1) Y;
  459. DDIFF = DX - DY;
  460. DAD = ABS_EXPR <DDIFF>;
  461. DDPROD = (TYPE2) DPROD;
  462. sum_1 = DAD + sum_0;
  463. In which
  464. - DX is at least double the size of X
  465. - DY is at least double the size of Y
  466. - DX, DY, DDIFF, DAD all have the same type
  467. - sum is the same size of DAD or bigger
  468. - sum has been recognized as a reduction variable.
  469. This is equivalent to:
  470. DDIFF = X w- Y; #widen sub
  471. DAD = ABS_EXPR <DDIFF>;
  472. sum_1 = DAD w+ sum_0; #widen summation
  473. or
  474. DDIFF = X w- Y; #widen sub
  475. DAD = ABS_EXPR <DDIFF>;
  476. sum_1 = DAD + sum_0; #summation
  477. */
  478. /* Starting from LAST_STMT, follow the defs of its uses in search
  479. of the above pattern. */
  480. if (gimple_assign_rhs_code (last_stmt) != PLUS_EXPR)
  481. return NULL;
  482. tree plus_oprnd0, plus_oprnd1;
  483. if (STMT_VINFO_IN_PATTERN_P (stmt_vinfo))
  484. {
  485. /* Has been detected as widening-summation? */
  486. gimple stmt = STMT_VINFO_RELATED_STMT (stmt_vinfo);
  487. sum_type = gimple_expr_type (stmt);
  488. if (gimple_assign_rhs_code (stmt) != WIDEN_SUM_EXPR)
  489. return NULL;
  490. plus_oprnd0 = gimple_assign_rhs1 (stmt);
  491. plus_oprnd1 = gimple_assign_rhs2 (stmt);
  492. half_type = TREE_TYPE (plus_oprnd0);
  493. }
  494. else
  495. {
  496. gimple def_stmt;
  497. if (STMT_VINFO_DEF_TYPE (stmt_vinfo) != vect_reduction_def)
  498. return NULL;
  499. plus_oprnd0 = gimple_assign_rhs1 (last_stmt);
  500. plus_oprnd1 = gimple_assign_rhs2 (last_stmt);
  501. if (!types_compatible_p (TREE_TYPE (plus_oprnd0), sum_type)
  502. || !types_compatible_p (TREE_TYPE (plus_oprnd1), sum_type))
  503. return NULL;
  504. /* The type conversion could be promotion, demotion,
  505. or just signed -> unsigned. */
  506. if (type_conversion_p (plus_oprnd0, last_stmt, false,
  507. &half_type, &def_stmt, &promotion))
  508. plus_oprnd0 = gimple_assign_rhs1 (def_stmt);
  509. else
  510. half_type = sum_type;
  511. }
  512. /* So far so good. Since last_stmt was detected as a (summation) reduction,
  513. we know that plus_oprnd1 is the reduction variable (defined by a loop-header
  514. phi), and plus_oprnd0 is an ssa-name defined by a stmt in the loop body.
  515. Then check that plus_oprnd0 is defined by an abs_expr. */
  516. if (TREE_CODE (plus_oprnd0) != SSA_NAME)
  517. return NULL;
  518. tree abs_type = half_type;
  519. gimple abs_stmt = SSA_NAME_DEF_STMT (plus_oprnd0);
  520. /* It could not be the sad pattern if the abs_stmt is outside the loop. */
  521. if (!gimple_bb (abs_stmt) || !flow_bb_inside_loop_p (loop, gimple_bb (abs_stmt)))
  522. return NULL;
  523. /* FORNOW. Can continue analyzing the def-use chain when this stmt in a phi
  524. inside the loop (in case we are analyzing an outer-loop). */
  525. if (!is_gimple_assign (abs_stmt))
  526. return NULL;
  527. stmt_vec_info abs_stmt_vinfo = vinfo_for_stmt (abs_stmt);
  528. gcc_assert (abs_stmt_vinfo);
  529. if (STMT_VINFO_DEF_TYPE (abs_stmt_vinfo) != vect_internal_def)
  530. return NULL;
  531. if (gimple_assign_rhs_code (abs_stmt) != ABS_EXPR)
  532. return NULL;
  533. tree abs_oprnd = gimple_assign_rhs1 (abs_stmt);
  534. if (!types_compatible_p (TREE_TYPE (abs_oprnd), abs_type))
  535. return NULL;
  536. if (TYPE_UNSIGNED (abs_type))
  537. return NULL;
  538. /* We then detect if the operand of abs_expr is defined by a minus_expr. */
  539. if (TREE_CODE (abs_oprnd) != SSA_NAME)
  540. return NULL;
  541. gimple diff_stmt = SSA_NAME_DEF_STMT (abs_oprnd);
  542. /* It could not be the sad pattern if the diff_stmt is outside the loop. */
  543. if (!gimple_bb (diff_stmt)
  544. || !flow_bb_inside_loop_p (loop, gimple_bb (diff_stmt)))
  545. return NULL;
  546. /* FORNOW. Can continue analyzing the def-use chain when this stmt in a phi
  547. inside the loop (in case we are analyzing an outer-loop). */
  548. if (!is_gimple_assign (diff_stmt))
  549. return NULL;
  550. stmt_vec_info diff_stmt_vinfo = vinfo_for_stmt (diff_stmt);
  551. gcc_assert (diff_stmt_vinfo);
  552. if (STMT_VINFO_DEF_TYPE (diff_stmt_vinfo) != vect_internal_def)
  553. return NULL;
  554. if (gimple_assign_rhs_code (diff_stmt) != MINUS_EXPR)
  555. return NULL;
  556. tree half_type0, half_type1;
  557. gimple def_stmt;
  558. tree minus_oprnd0 = gimple_assign_rhs1 (diff_stmt);
  559. tree minus_oprnd1 = gimple_assign_rhs2 (diff_stmt);
  560. if (!types_compatible_p (TREE_TYPE (minus_oprnd0), abs_type)
  561. || !types_compatible_p (TREE_TYPE (minus_oprnd1), abs_type))
  562. return NULL;
  563. if (!type_conversion_p (minus_oprnd0, diff_stmt, false,
  564. &half_type0, &def_stmt, &promotion)
  565. || !promotion)
  566. return NULL;
  567. sad_oprnd0 = gimple_assign_rhs1 (def_stmt);
  568. if (!type_conversion_p (minus_oprnd1, diff_stmt, false,
  569. &half_type1, &def_stmt, &promotion)
  570. || !promotion)
  571. return NULL;
  572. sad_oprnd1 = gimple_assign_rhs1 (def_stmt);
  573. if (!types_compatible_p (half_type0, half_type1))
  574. return NULL;
  575. if (TYPE_PRECISION (abs_type) < TYPE_PRECISION (half_type0) * 2
  576. || TYPE_PRECISION (sum_type) < TYPE_PRECISION (half_type0) * 2)
  577. return NULL;
  578. *type_in = TREE_TYPE (sad_oprnd0);
  579. *type_out = sum_type;
  580. /* Pattern detected. Create a stmt to be used to replace the pattern: */
  581. tree var = vect_recog_temp_ssa_var (sum_type, NULL);
  582. gimple pattern_stmt = gimple_build_assign (var, SAD_EXPR, sad_oprnd0,
  583. sad_oprnd1, plus_oprnd1);
  584. if (dump_enabled_p ())
  585. {
  586. dump_printf_loc (MSG_NOTE, vect_location,
  587. "vect_recog_sad_pattern: detected: ");
  588. dump_gimple_stmt (MSG_NOTE, TDF_SLIM, pattern_stmt, 0);
  589. dump_printf (MSG_NOTE, "\n");
  590. }
  591. /* We don't allow changing the order of the computation in the inner-loop
  592. when doing outer-loop vectorization. */
  593. gcc_assert (!nested_in_vect_loop_p (loop, last_stmt));
  594. return pattern_stmt;
  595. }
  596. /* Handle widening operation by a constant. At the moment we support MULT_EXPR
  597. and LSHIFT_EXPR.
  598. For MULT_EXPR we check that CONST_OPRND fits HALF_TYPE, and for LSHIFT_EXPR
  599. we check that CONST_OPRND is less or equal to the size of HALF_TYPE.
  600. Otherwise, if the type of the result (TYPE) is at least 4 times bigger than
  601. HALF_TYPE, and there is an intermediate type (2 times smaller than TYPE)
  602. that satisfies the above restrictions, we can perform a widening opeartion
  603. from the intermediate type to TYPE and replace a_T = (TYPE) a_t;
  604. with a_it = (interm_type) a_t; Store such operation in *WSTMT. */
  605. static bool
  606. vect_handle_widen_op_by_const (gimple stmt, enum tree_code code,
  607. tree const_oprnd, tree *oprnd,
  608. gimple *wstmt, tree type,
  609. tree *half_type, gimple def_stmt)
  610. {
  611. tree new_type, new_oprnd;
  612. if (code != MULT_EXPR && code != LSHIFT_EXPR)
  613. return false;
  614. if (((code == MULT_EXPR && int_fits_type_p (const_oprnd, *half_type))
  615. || (code == LSHIFT_EXPR
  616. && compare_tree_int (const_oprnd, TYPE_PRECISION (*half_type))
  617. != 1))
  618. && TYPE_PRECISION (type) == (TYPE_PRECISION (*half_type) * 2))
  619. {
  620. /* CONST_OPRND is a constant of HALF_TYPE. */
  621. *oprnd = gimple_assign_rhs1 (def_stmt);
  622. return true;
  623. }
  624. if (TYPE_PRECISION (type) < (TYPE_PRECISION (*half_type) * 4))
  625. return false;
  626. if (!vect_same_loop_or_bb_p (stmt, def_stmt))
  627. return false;
  628. /* TYPE is 4 times bigger than HALF_TYPE, try widening operation for
  629. a type 2 times bigger than HALF_TYPE. */
  630. new_type = build_nonstandard_integer_type (TYPE_PRECISION (type) / 2,
  631. TYPE_UNSIGNED (type));
  632. if ((code == MULT_EXPR && !int_fits_type_p (const_oprnd, new_type))
  633. || (code == LSHIFT_EXPR
  634. && compare_tree_int (const_oprnd, TYPE_PRECISION (new_type)) == 1))
  635. return false;
  636. /* Use NEW_TYPE for widening operation and create a_T = (NEW_TYPE) a_t; */
  637. *oprnd = gimple_assign_rhs1 (def_stmt);
  638. new_oprnd = make_ssa_name (new_type);
  639. *wstmt = gimple_build_assign (new_oprnd, NOP_EXPR, *oprnd);
  640. *oprnd = new_oprnd;
  641. *half_type = new_type;
  642. return true;
  643. }
  644. /* Function vect_recog_widen_mult_pattern
  645. Try to find the following pattern:
  646. type1 a_t;
  647. type2 b_t;
  648. TYPE a_T, b_T, prod_T;
  649. S1 a_t = ;
  650. S2 b_t = ;
  651. S3 a_T = (TYPE) a_t;
  652. S4 b_T = (TYPE) b_t;
  653. S5 prod_T = a_T * b_T;
  654. where type 'TYPE' is at least double the size of type 'type1' and 'type2'.
  655. Also detect unsigned cases:
  656. unsigned type1 a_t;
  657. unsigned type2 b_t;
  658. unsigned TYPE u_prod_T;
  659. TYPE a_T, b_T, prod_T;
  660. S1 a_t = ;
  661. S2 b_t = ;
  662. S3 a_T = (TYPE) a_t;
  663. S4 b_T = (TYPE) b_t;
  664. S5 prod_T = a_T * b_T;
  665. S6 u_prod_T = (unsigned TYPE) prod_T;
  666. and multiplication by constants:
  667. type a_t;
  668. TYPE a_T, prod_T;
  669. S1 a_t = ;
  670. S3 a_T = (TYPE) a_t;
  671. S5 prod_T = a_T * CONST;
  672. A special case of multiplication by constants is when 'TYPE' is 4 times
  673. bigger than 'type', but CONST fits an intermediate type 2 times smaller
  674. than 'TYPE'. In that case we create an additional pattern stmt for S3
  675. to create a variable of the intermediate type, and perform widen-mult
  676. on the intermediate type as well:
  677. type a_t;
  678. interm_type a_it;
  679. TYPE a_T, prod_T, prod_T';
  680. S1 a_t = ;
  681. S3 a_T = (TYPE) a_t;
  682. '--> a_it = (interm_type) a_t;
  683. S5 prod_T = a_T * CONST;
  684. '--> prod_T' = a_it w* CONST;
  685. Input/Output:
  686. * STMTS: Contains a stmt from which the pattern search begins. In the
  687. example, when this function is called with S5, the pattern {S3,S4,S5,(S6)}
  688. is detected. In case of unsigned widen-mult, the original stmt (S5) is
  689. replaced with S6 in STMTS. In case of multiplication by a constant
  690. of an intermediate type (the last case above), STMTS also contains S3
  691. (inserted before S5).
  692. Output:
  693. * TYPE_IN: The type of the input arguments to the pattern.
  694. * TYPE_OUT: The type of the output of this pattern.
  695. * Return value: A new stmt that will be used to replace the sequence of
  696. stmts that constitute the pattern. In this case it will be:
  697. WIDEN_MULT <a_t, b_t>
  698. If the result of WIDEN_MULT needs to be converted to a larger type, the
  699. returned stmt will be this type conversion stmt.
  700. */
  701. static gimple
  702. vect_recog_widen_mult_pattern (vec<gimple> *stmts,
  703. tree *type_in, tree *type_out)
  704. {
  705. gimple last_stmt = stmts->pop ();
  706. gimple def_stmt0, def_stmt1;
  707. tree oprnd0, oprnd1;
  708. tree type, half_type0, half_type1;
  709. gimple new_stmt = NULL, pattern_stmt = NULL;
  710. tree vectype, vecitype;
  711. tree var;
  712. enum tree_code dummy_code;
  713. int dummy_int;
  714. vec<tree> dummy_vec;
  715. bool op1_ok;
  716. bool promotion;
  717. if (!is_gimple_assign (last_stmt))
  718. return NULL;
  719. type = gimple_expr_type (last_stmt);
  720. /* Starting from LAST_STMT, follow the defs of its uses in search
  721. of the above pattern. */
  722. if (gimple_assign_rhs_code (last_stmt) != MULT_EXPR)
  723. return NULL;
  724. oprnd0 = gimple_assign_rhs1 (last_stmt);
  725. oprnd1 = gimple_assign_rhs2 (last_stmt);
  726. if (!types_compatible_p (TREE_TYPE (oprnd0), type)
  727. || !types_compatible_p (TREE_TYPE (oprnd1), type))
  728. return NULL;
  729. /* Check argument 0. */
  730. if (!type_conversion_p (oprnd0, last_stmt, false, &half_type0, &def_stmt0,
  731. &promotion)
  732. || !promotion)
  733. return NULL;
  734. /* Check argument 1. */
  735. op1_ok = type_conversion_p (oprnd1, last_stmt, false, &half_type1,
  736. &def_stmt1, &promotion);
  737. if (op1_ok && promotion)
  738. {
  739. oprnd0 = gimple_assign_rhs1 (def_stmt0);
  740. oprnd1 = gimple_assign_rhs1 (def_stmt1);
  741. }
  742. else
  743. {
  744. if (TREE_CODE (oprnd1) == INTEGER_CST
  745. && TREE_CODE (half_type0) == INTEGER_TYPE
  746. && vect_handle_widen_op_by_const (last_stmt, MULT_EXPR, oprnd1,
  747. &oprnd0, &new_stmt, type,
  748. &half_type0, def_stmt0))
  749. {
  750. half_type1 = half_type0;
  751. oprnd1 = fold_convert (half_type1, oprnd1);
  752. }
  753. else
  754. return NULL;
  755. }
  756. /* If the two arguments have different sizes, convert the one with
  757. the smaller type into the larger type. */
  758. if (TYPE_PRECISION (half_type0) != TYPE_PRECISION (half_type1))
  759. {
  760. /* If we already used up the single-stmt slot give up. */
  761. if (new_stmt)
  762. return NULL;
  763. tree* oprnd = NULL;
  764. gimple def_stmt = NULL;
  765. if (TYPE_PRECISION (half_type0) < TYPE_PRECISION (half_type1))
  766. {
  767. def_stmt = def_stmt0;
  768. half_type0 = half_type1;
  769. oprnd = &oprnd0;
  770. }
  771. else
  772. {
  773. def_stmt = def_stmt1;
  774. half_type1 = half_type0;
  775. oprnd = &oprnd1;
  776. }
  777. tree old_oprnd = gimple_assign_rhs1 (def_stmt);
  778. tree new_oprnd = make_ssa_name (half_type0);
  779. new_stmt = gimple_build_assign (new_oprnd, NOP_EXPR, old_oprnd);
  780. *oprnd = new_oprnd;
  781. }
  782. /* Handle unsigned case. Look for
  783. S6 u_prod_T = (unsigned TYPE) prod_T;
  784. Use unsigned TYPE as the type for WIDEN_MULT_EXPR. */
  785. if (TYPE_UNSIGNED (type) != TYPE_UNSIGNED (half_type0))
  786. {
  787. gimple use_stmt;
  788. tree use_lhs;
  789. tree use_type;
  790. if (TYPE_UNSIGNED (type) == TYPE_UNSIGNED (half_type1))
  791. return NULL;
  792. use_stmt = vect_single_imm_use (last_stmt);
  793. if (!use_stmt || !is_gimple_assign (use_stmt)
  794. || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt)))
  795. return NULL;
  796. use_lhs = gimple_assign_lhs (use_stmt);
  797. use_type = TREE_TYPE (use_lhs);
  798. if (!INTEGRAL_TYPE_P (use_type)
  799. || (TYPE_UNSIGNED (type) == TYPE_UNSIGNED (use_type))
  800. || (TYPE_PRECISION (type) != TYPE_PRECISION (use_type)))
  801. return NULL;
  802. type = use_type;
  803. last_stmt = use_stmt;
  804. }
  805. if (!types_compatible_p (half_type0, half_type1))
  806. return NULL;
  807. /* If TYPE is more than twice larger than HALF_TYPE, we use WIDEN_MULT
  808. to get an intermediate result of type ITYPE. In this case we need
  809. to build a statement to convert this intermediate result to type TYPE. */
  810. tree itype = type;
  811. if (TYPE_PRECISION (type) > TYPE_PRECISION (half_type0) * 2)
  812. itype = build_nonstandard_integer_type
  813. (GET_MODE_BITSIZE (TYPE_MODE (half_type0)) * 2,
  814. TYPE_UNSIGNED (type));
  815. /* Pattern detected. */
  816. if (dump_enabled_p ())
  817. dump_printf_loc (MSG_NOTE, vect_location,
  818. "vect_recog_widen_mult_pattern: detected:\n");
  819. /* Check target support */
  820. vectype = get_vectype_for_scalar_type (half_type0);
  821. vecitype = get_vectype_for_scalar_type (itype);
  822. if (!vectype
  823. || !vecitype
  824. || !supportable_widening_operation (WIDEN_MULT_EXPR, last_stmt,
  825. vecitype, vectype,
  826. &dummy_code, &dummy_code,
  827. &dummy_int, &dummy_vec))
  828. return NULL;
  829. *type_in = vectype;
  830. *type_out = get_vectype_for_scalar_type (type);
  831. /* Pattern supported. Create a stmt to be used to replace the pattern: */
  832. var = vect_recog_temp_ssa_var (itype, NULL);
  833. pattern_stmt = gimple_build_assign (var, WIDEN_MULT_EXPR, oprnd0, oprnd1);
  834. stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
  835. loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
  836. bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
  837. STMT_VINFO_PATTERN_DEF_SEQ (stmt_vinfo) = NULL;
  838. /* If the original two operands have different sizes, we may need to convert
  839. the smaller one into the larget type. If this is the case, at this point
  840. the new stmt is already built. */
  841. if (new_stmt)
  842. {
  843. append_pattern_def_seq (stmt_vinfo, new_stmt);
  844. stmt_vec_info new_stmt_info
  845. = new_stmt_vec_info (new_stmt, loop_vinfo, bb_vinfo);
  846. set_vinfo_for_stmt (new_stmt, new_stmt_info);
  847. STMT_VINFO_VECTYPE (new_stmt_info) = vectype;
  848. }
  849. /* If ITYPE is not TYPE, we need to build a type convertion stmt to convert
  850. the result of the widen-mult operation into type TYPE. */
  851. if (itype != type)
  852. {
  853. append_pattern_def_seq (stmt_vinfo, pattern_stmt);
  854. stmt_vec_info pattern_stmt_info
  855. = new_stmt_vec_info (pattern_stmt, loop_vinfo, bb_vinfo);
  856. set_vinfo_for_stmt (pattern_stmt, pattern_stmt_info);
  857. STMT_VINFO_VECTYPE (pattern_stmt_info) = vecitype;
  858. pattern_stmt = gimple_build_assign (vect_recog_temp_ssa_var (type, NULL),
  859. NOP_EXPR,
  860. gimple_assign_lhs (pattern_stmt));
  861. }
  862. if (dump_enabled_p ())
  863. dump_gimple_stmt_loc (MSG_NOTE, vect_location, TDF_SLIM, pattern_stmt, 0);
  864. stmts->safe_push (last_stmt);
  865. return pattern_stmt;
  866. }
  867. /* Function vect_recog_pow_pattern
  868. Try to find the following pattern:
  869. x = POW (y, N);
  870. with POW being one of pow, powf, powi, powif and N being
  871. either 2 or 0.5.
  872. Input:
  873. * LAST_STMT: A stmt from which the pattern search begins.
  874. Output:
  875. * TYPE_IN: The type of the input arguments to the pattern.
  876. * TYPE_OUT: The type of the output of this pattern.
  877. * Return value: A new stmt that will be used to replace the sequence of
  878. stmts that constitute the pattern. In this case it will be:
  879. x = x * x
  880. or
  881. x = sqrt (x)
  882. */
  883. static gimple
  884. vect_recog_pow_pattern (vec<gimple> *stmts, tree *type_in,
  885. tree *type_out)
  886. {
  887. gimple last_stmt = (*stmts)[0];
  888. tree fn, base, exp = NULL;
  889. gimple stmt;
  890. tree var;
  891. if (!is_gimple_call (last_stmt) || gimple_call_lhs (last_stmt) == NULL)
  892. return NULL;
  893. fn = gimple_call_fndecl (last_stmt);
  894. if (fn == NULL_TREE || DECL_BUILT_IN_CLASS (fn) != BUILT_IN_NORMAL)
  895. return NULL;
  896. switch (DECL_FUNCTION_CODE (fn))
  897. {
  898. case BUILT_IN_POWIF:
  899. case BUILT_IN_POWI:
  900. case BUILT_IN_POWF:
  901. case BUILT_IN_POW:
  902. base = gimple_call_arg (last_stmt, 0);
  903. exp = gimple_call_arg (last_stmt, 1);
  904. if (TREE_CODE (exp) != REAL_CST
  905. && TREE_CODE (exp) != INTEGER_CST)
  906. return NULL;
  907. break;
  908. default:
  909. return NULL;
  910. }
  911. /* We now have a pow or powi builtin function call with a constant
  912. exponent. */
  913. *type_out = NULL_TREE;
  914. /* Catch squaring. */
  915. if ((tree_fits_shwi_p (exp)
  916. && tree_to_shwi (exp) == 2)
  917. || (TREE_CODE (exp) == REAL_CST
  918. && REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst2)))
  919. {
  920. *type_in = TREE_TYPE (base);
  921. var = vect_recog_temp_ssa_var (TREE_TYPE (base), NULL);
  922. stmt = gimple_build_assign (var, MULT_EXPR, base, base);
  923. return stmt;
  924. }
  925. /* Catch square root. */
  926. if (TREE_CODE (exp) == REAL_CST
  927. && REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconsthalf))
  928. {
  929. tree newfn = mathfn_built_in (TREE_TYPE (base), BUILT_IN_SQRT);
  930. *type_in = get_vectype_for_scalar_type (TREE_TYPE (base));
  931. if (*type_in)
  932. {
  933. gcall *stmt = gimple_build_call (newfn, 1, base);
  934. if (vectorizable_function (stmt, *type_in, *type_in)
  935. != NULL_TREE)
  936. {
  937. var = vect_recog_temp_ssa_var (TREE_TYPE (base), stmt);
  938. gimple_call_set_lhs (stmt, var);
  939. return stmt;
  940. }
  941. }
  942. }
  943. return NULL;
  944. }
  945. /* Function vect_recog_widen_sum_pattern
  946. Try to find the following pattern:
  947. type x_t;
  948. TYPE x_T, sum = init;
  949. loop:
  950. sum_0 = phi <init, sum_1>
  951. S1 x_t = *p;
  952. S2 x_T = (TYPE) x_t;
  953. S3 sum_1 = x_T + sum_0;
  954. where type 'TYPE' is at least double the size of type 'type', i.e - we're
  955. summing elements of type 'type' into an accumulator of type 'TYPE'. This is
  956. a special case of a reduction computation.
  957. Input:
  958. * LAST_STMT: A stmt from which the pattern search begins. In the example,
  959. when this function is called with S3, the pattern {S2,S3} will be detected.
  960. Output:
  961. * TYPE_IN: The type of the input arguments to the pattern.
  962. * TYPE_OUT: The type of the output of this pattern.
  963. * Return value: A new stmt that will be used to replace the sequence of
  964. stmts that constitute the pattern. In this case it will be:
  965. WIDEN_SUM <x_t, sum_0>
  966. Note: The widening-sum idiom is a widening reduction pattern that is
  967. vectorized without preserving all the intermediate results. It
  968. produces only N/2 (widened) results (by summing up pairs of
  969. intermediate results) rather than all N results. Therefore, we
  970. cannot allow this pattern when we want to get all the results and in
  971. the correct order (as is the case when this computation is in an
  972. inner-loop nested in an outer-loop that us being vectorized). */
  973. static gimple
  974. vect_recog_widen_sum_pattern (vec<gimple> *stmts, tree *type_in,
  975. tree *type_out)
  976. {
  977. gimple stmt, last_stmt = (*stmts)[0];
  978. tree oprnd0, oprnd1;
  979. stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
  980. tree type, half_type;
  981. gimple pattern_stmt;
  982. loop_vec_info loop_info = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
  983. struct loop *loop;
  984. tree var;
  985. bool promotion;
  986. if (!loop_info)
  987. return NULL;
  988. loop = LOOP_VINFO_LOOP (loop_info);
  989. if (!is_gimple_assign (last_stmt))
  990. return NULL;
  991. type = gimple_expr_type (last_stmt);
  992. /* Look for the following pattern
  993. DX = (TYPE) X;
  994. sum_1 = DX + sum_0;
  995. In which DX is at least double the size of X, and sum_1 has been
  996. recognized as a reduction variable.
  997. */
  998. /* Starting from LAST_STMT, follow the defs of its uses in search
  999. of the above pattern. */
  1000. if (gimple_assign_rhs_code (last_stmt) != PLUS_EXPR)
  1001. return NULL;
  1002. if (STMT_VINFO_DEF_TYPE (stmt_vinfo) != vect_reduction_def)
  1003. return NULL;
  1004. oprnd0 = gimple_assign_rhs1 (last_stmt);
  1005. oprnd1 = gimple_assign_rhs2 (last_stmt);
  1006. if (!types_compatible_p (TREE_TYPE (oprnd0), type)
  1007. || !types_compatible_p (TREE_TYPE (oprnd1), type))
  1008. return NULL;
  1009. /* So far so good. Since last_stmt was detected as a (summation) reduction,
  1010. we know that oprnd1 is the reduction variable (defined by a loop-header
  1011. phi), and oprnd0 is an ssa-name defined by a stmt in the loop body.
  1012. Left to check that oprnd0 is defined by a cast from type 'type' to type
  1013. 'TYPE'. */
  1014. if (!type_conversion_p (oprnd0, last_stmt, true, &half_type, &stmt,
  1015. &promotion)
  1016. || !promotion)
  1017. return NULL;
  1018. oprnd0 = gimple_assign_rhs1 (stmt);
  1019. *type_in = half_type;
  1020. *type_out = type;
  1021. /* Pattern detected. Create a stmt to be used to replace the pattern: */
  1022. var = vect_recog_temp_ssa_var (type, NULL);
  1023. pattern_stmt = gimple_build_assign (var, WIDEN_SUM_EXPR, oprnd0, oprnd1);
  1024. if (dump_enabled_p ())
  1025. {
  1026. dump_printf_loc (MSG_NOTE, vect_location,
  1027. "vect_recog_widen_sum_pattern: detected: ");
  1028. dump_gimple_stmt (MSG_NOTE, TDF_SLIM, pattern_stmt, 0);
  1029. dump_printf (MSG_NOTE, "\n");
  1030. }
  1031. /* We don't allow changing the order of the computation in the inner-loop
  1032. when doing outer-loop vectorization. */
  1033. gcc_assert (!nested_in_vect_loop_p (loop, last_stmt));
  1034. return pattern_stmt;
  1035. }
  1036. /* Return TRUE if the operation in STMT can be performed on a smaller type.
  1037. Input:
  1038. STMT - a statement to check.
  1039. DEF - we support operations with two operands, one of which is constant.
  1040. The other operand can be defined by a demotion operation, or by a
  1041. previous statement in a sequence of over-promoted operations. In the
  1042. later case DEF is used to replace that operand. (It is defined by a
  1043. pattern statement we created for the previous statement in the
  1044. sequence).
  1045. Input/output:
  1046. NEW_TYPE - Output: a smaller type that we are trying to use. Input: if not
  1047. NULL, it's the type of DEF.
  1048. STMTS - additional pattern statements. If a pattern statement (type
  1049. conversion) is created in this function, its original statement is
  1050. added to STMTS.
  1051. Output:
  1052. OP0, OP1 - if the operation fits a smaller type, OP0 and OP1 are the new
  1053. operands to use in the new pattern statement for STMT (will be created
  1054. in vect_recog_over_widening_pattern ()).
  1055. NEW_DEF_STMT - in case DEF has to be promoted, we create two pattern
  1056. statements for STMT: the first one is a type promotion and the second
  1057. one is the operation itself. We return the type promotion statement
  1058. in NEW_DEF_STMT and further store it in STMT_VINFO_PATTERN_DEF_SEQ of
  1059. the second pattern statement. */
  1060. static bool
  1061. vect_operation_fits_smaller_type (gimple stmt, tree def, tree *new_type,
  1062. tree *op0, tree *op1, gimple *new_def_stmt,
  1063. vec<gimple> *stmts)
  1064. {
  1065. enum tree_code code;
  1066. tree const_oprnd, oprnd;
  1067. tree interm_type = NULL_TREE, half_type, new_oprnd, type;
  1068. gimple def_stmt, new_stmt;
  1069. bool first = false;
  1070. bool promotion;
  1071. *op0 = NULL_TREE;
  1072. *op1 = NULL_TREE;
  1073. *new_def_stmt = NULL;
  1074. if (!is_gimple_assign (stmt))
  1075. return false;
  1076. code = gimple_assign_rhs_code (stmt);
  1077. if (code != LSHIFT_EXPR && code != RSHIFT_EXPR
  1078. && code != BIT_IOR_EXPR && code != BIT_XOR_EXPR && code != BIT_AND_EXPR)
  1079. return false;
  1080. oprnd = gimple_assign_rhs1 (stmt);
  1081. const_oprnd = gimple_assign_rhs2 (stmt);
  1082. type = gimple_expr_type (stmt);
  1083. if (TREE_CODE (oprnd) != SSA_NAME
  1084. || TREE_CODE (const_oprnd) != INTEGER_CST)
  1085. return false;
  1086. /* If oprnd has other uses besides that in stmt we cannot mark it
  1087. as being part of a pattern only. */
  1088. if (!has_single_use (oprnd))
  1089. return false;
  1090. /* If we are in the middle of a sequence, we use DEF from a previous
  1091. statement. Otherwise, OPRND has to be a result of type promotion. */
  1092. if (*new_type)
  1093. {
  1094. half_type = *new_type;
  1095. oprnd = def;
  1096. }
  1097. else
  1098. {
  1099. first = true;
  1100. if (!type_conversion_p (oprnd, stmt, false, &half_type, &def_stmt,
  1101. &promotion)
  1102. || !promotion
  1103. || !vect_same_loop_or_bb_p (stmt, def_stmt))
  1104. return false;
  1105. }
  1106. /* Can we perform the operation on a smaller type? */
  1107. switch (code)
  1108. {
  1109. case BIT_IOR_EXPR:
  1110. case BIT_XOR_EXPR:
  1111. case BIT_AND_EXPR:
  1112. if (!int_fits_type_p (const_oprnd, half_type))
  1113. {
  1114. /* HALF_TYPE is not enough. Try a bigger type if possible. */
  1115. if (TYPE_PRECISION (type) < (TYPE_PRECISION (half_type) * 4))
  1116. return false;
  1117. interm_type = build_nonstandard_integer_type (
  1118. TYPE_PRECISION (half_type) * 2, TYPE_UNSIGNED (type));
  1119. if (!int_fits_type_p (const_oprnd, interm_type))
  1120. return false;
  1121. }
  1122. break;
  1123. case LSHIFT_EXPR:
  1124. /* Try intermediate type - HALF_TYPE is not enough for sure. */
  1125. if (TYPE_PRECISION (type) < (TYPE_PRECISION (half_type) * 4))
  1126. return false;
  1127. /* Check that HALF_TYPE size + shift amount <= INTERM_TYPE size.
  1128. (e.g., if the original value was char, the shift amount is at most 8
  1129. if we want to use short). */
  1130. if (compare_tree_int (const_oprnd, TYPE_PRECISION (half_type)) == 1)
  1131. return false;
  1132. interm_type = build_nonstandard_integer_type (
  1133. TYPE_PRECISION (half_type) * 2, TYPE_UNSIGNED (type));
  1134. if (!vect_supportable_shift (code, interm_type))
  1135. return false;
  1136. break;
  1137. case RSHIFT_EXPR:
  1138. if (vect_supportable_shift (code, half_type))
  1139. break;
  1140. /* Try intermediate type - HALF_TYPE is not supported. */
  1141. if (TYPE_PRECISION (type) < (TYPE_PRECISION (half_type) * 4))
  1142. return false;
  1143. interm_type = build_nonstandard_integer_type (
  1144. TYPE_PRECISION (half_type) * 2, TYPE_UNSIGNED (type));
  1145. if (!vect_supportable_shift (code, interm_type))
  1146. return false;
  1147. break;
  1148. default:
  1149. gcc_unreachable ();
  1150. }
  1151. /* There are four possible cases:
  1152. 1. OPRND is defined by a type promotion (in that case FIRST is TRUE, it's
  1153. the first statement in the sequence)
  1154. a. The original, HALF_TYPE, is not enough - we replace the promotion
  1155. from HALF_TYPE to TYPE with a promotion to INTERM_TYPE.
  1156. b. HALF_TYPE is sufficient, OPRND is set as the RHS of the original
  1157. promotion.
  1158. 2. OPRND is defined by a pattern statement we created.
  1159. a. Its type is not sufficient for the operation, we create a new stmt:
  1160. a type conversion for OPRND from HALF_TYPE to INTERM_TYPE. We store
  1161. this statement in NEW_DEF_STMT, and it is later put in
  1162. STMT_VINFO_PATTERN_DEF_SEQ of the pattern statement for STMT.
  1163. b. OPRND is good to use in the new statement. */
  1164. if (first)
  1165. {
  1166. if (interm_type)
  1167. {
  1168. /* Replace the original type conversion HALF_TYPE->TYPE with
  1169. HALF_TYPE->INTERM_TYPE. */
  1170. if (STMT_VINFO_RELATED_STMT (vinfo_for_stmt (def_stmt)))
  1171. {
  1172. new_stmt = STMT_VINFO_RELATED_STMT (vinfo_for_stmt (def_stmt));
  1173. /* Check if the already created pattern stmt is what we need. */
  1174. if (!is_gimple_assign (new_stmt)
  1175. || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (new_stmt))
  1176. || TREE_TYPE (gimple_assign_lhs (new_stmt)) != interm_type)
  1177. return false;
  1178. stmts->safe_push (def_stmt);
  1179. oprnd = gimple_assign_lhs (new_stmt);
  1180. }
  1181. else
  1182. {
  1183. /* Create NEW_OPRND = (INTERM_TYPE) OPRND. */
  1184. oprnd = gimple_assign_rhs1 (def_stmt);
  1185. new_oprnd = make_ssa_name (interm_type);
  1186. new_stmt = gimple_build_assign (new_oprnd, NOP_EXPR, oprnd);
  1187. STMT_VINFO_RELATED_STMT (vinfo_for_stmt (def_stmt)) = new_stmt;
  1188. stmts->safe_push (def_stmt);
  1189. oprnd = new_oprnd;
  1190. }
  1191. }
  1192. else
  1193. {
  1194. /* Retrieve the operand before the type promotion. */
  1195. oprnd = gimple_assign_rhs1 (def_stmt);
  1196. }
  1197. }
  1198. else
  1199. {
  1200. if (interm_type)
  1201. {
  1202. /* Create a type conversion HALF_TYPE->INTERM_TYPE. */
  1203. new_oprnd = make_ssa_name (interm_type);
  1204. new_stmt = gimple_build_assign (new_oprnd, NOP_EXPR, oprnd);
  1205. oprnd = new_oprnd;
  1206. *new_def_stmt = new_stmt;
  1207. }
  1208. /* Otherwise, OPRND is already set. */
  1209. }
  1210. if (interm_type)
  1211. *new_type = interm_type;
  1212. else
  1213. *new_type = half_type;
  1214. *op0 = oprnd;
  1215. *op1 = fold_convert (*new_type, const_oprnd);
  1216. return true;
  1217. }
  1218. /* Try to find a statement or a sequence of statements that can be performed
  1219. on a smaller type:
  1220. type x_t;
  1221. TYPE x_T, res0_T, res1_T;
  1222. loop:
  1223. S1 x_t = *p;
  1224. S2 x_T = (TYPE) x_t;
  1225. S3 res0_T = op (x_T, C0);
  1226. S4 res1_T = op (res0_T, C1);
  1227. S5 ... = () res1_T; - type demotion
  1228. where type 'TYPE' is at least double the size of type 'type', C0 and C1 are
  1229. constants.
  1230. Check if S3 and S4 can be done on a smaller type than 'TYPE', it can either
  1231. be 'type' or some intermediate type. For now, we expect S5 to be a type
  1232. demotion operation. We also check that S3 and S4 have only one use. */
  1233. static gimple
  1234. vect_recog_over_widening_pattern (vec<gimple> *stmts,
  1235. tree *type_in, tree *type_out)
  1236. {
  1237. gimple stmt = stmts->pop ();
  1238. gimple pattern_stmt = NULL, new_def_stmt, prev_stmt = NULL, use_stmt = NULL;
  1239. tree op0, op1, vectype = NULL_TREE, use_lhs, use_type;
  1240. tree var = NULL_TREE, new_type = NULL_TREE, new_oprnd;
  1241. bool first;
  1242. tree type = NULL;
  1243. first = true;
  1244. while (1)
  1245. {
  1246. if (!vinfo_for_stmt (stmt)
  1247. || STMT_VINFO_IN_PATTERN_P (vinfo_for_stmt (stmt)))
  1248. return NULL;
  1249. new_def_stmt = NULL;
  1250. if (!vect_operation_fits_smaller_type (stmt, var, &new_type,
  1251. &op0, &op1, &new_def_stmt,
  1252. stmts))
  1253. {
  1254. if (first)
  1255. return NULL;
  1256. else
  1257. break;
  1258. }
  1259. /* STMT can be performed on a smaller type. Check its uses. */
  1260. use_stmt = vect_single_imm_use (stmt);
  1261. if (!use_stmt || !is_gimple_assign (use_stmt))
  1262. return NULL;
  1263. /* Create pattern statement for STMT. */
  1264. vectype = get_vectype_for_scalar_type (new_type);
  1265. if (!vectype)
  1266. return NULL;
  1267. /* We want to collect all the statements for which we create pattern
  1268. statetments, except for the case when the last statement in the
  1269. sequence doesn't have a corresponding pattern statement. In such
  1270. case we associate the last pattern statement with the last statement
  1271. in the sequence. Therefore, we only add the original statement to
  1272. the list if we know that it is not the last. */
  1273. if (prev_stmt)
  1274. stmts->safe_push (prev_stmt);
  1275. var = vect_recog_temp_ssa_var (new_type, NULL);
  1276. pattern_stmt
  1277. = gimple_build_assign (var, gimple_assign_rhs_code (stmt), op0, op1);
  1278. STMT_VINFO_RELATED_STMT (vinfo_for_stmt (stmt)) = pattern_stmt;
  1279. new_pattern_def_seq (vinfo_for_stmt (stmt), new_def_stmt);
  1280. if (dump_enabled_p ())
  1281. {
  1282. dump_printf_loc (MSG_NOTE, vect_location,
  1283. "created pattern stmt: ");
  1284. dump_gimple_stmt (MSG_NOTE, TDF_SLIM, pattern_stmt, 0);
  1285. dump_printf (MSG_NOTE, "\n");
  1286. }
  1287. type = gimple_expr_type (stmt);
  1288. prev_stmt = stmt;
  1289. stmt = use_stmt;
  1290. first = false;
  1291. }
  1292. /* We got a sequence. We expect it to end with a type demotion operation.
  1293. Otherwise, we quit (for now). There are three possible cases: the
  1294. conversion is to NEW_TYPE (we don't do anything), the conversion is to
  1295. a type bigger than NEW_TYPE and/or the signedness of USE_TYPE and
  1296. NEW_TYPE differs (we create a new conversion statement). */
  1297. if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt)))
  1298. {
  1299. use_lhs = gimple_assign_lhs (use_stmt);
  1300. use_type = TREE_TYPE (use_lhs);
  1301. /* Support only type demotion or signedess change. */
  1302. if (!INTEGRAL_TYPE_P (use_type)
  1303. || TYPE_PRECISION (type) <= TYPE_PRECISION (use_type))
  1304. return NULL;
  1305. /* Check that NEW_TYPE is not bigger than the conversion result. */
  1306. if (TYPE_PRECISION (new_type) > TYPE_PRECISION (use_type))
  1307. return NULL;
  1308. if (TYPE_UNSIGNED (new_type) != TYPE_UNSIGNED (use_type)
  1309. || TYPE_PRECISION (new_type) != TYPE_PRECISION (use_type))
  1310. {
  1311. /* Create NEW_TYPE->USE_TYPE conversion. */
  1312. new_oprnd = make_ssa_name (use_type);
  1313. pattern_stmt = gimple_build_assign (new_oprnd, NOP_EXPR, var);
  1314. STMT_VINFO_RELATED_STMT (vinfo_for_stmt (use_stmt)) = pattern_stmt;
  1315. *type_in = get_vectype_for_scalar_type (new_type);
  1316. *type_out = get_vectype_for_scalar_type (use_type);
  1317. /* We created a pattern statement for the last statement in the
  1318. sequence, so we don't need to associate it with the pattern
  1319. statement created for PREV_STMT. Therefore, we add PREV_STMT
  1320. to the list in order to mark it later in vect_pattern_recog_1. */
  1321. if (prev_stmt)
  1322. stmts->safe_push (prev_stmt);
  1323. }
  1324. else
  1325. {
  1326. if (prev_stmt)
  1327. STMT_VINFO_PATTERN_DEF_SEQ (vinfo_for_stmt (use_stmt))
  1328. = STMT_VINFO_PATTERN_DEF_SEQ (vinfo_for_stmt (prev_stmt));
  1329. *type_in = vectype;
  1330. *type_out = NULL_TREE;
  1331. }
  1332. stmts->safe_push (use_stmt);
  1333. }
  1334. else
  1335. /* TODO: support general case, create a conversion to the correct type. */
  1336. return NULL;
  1337. /* Pattern detected. */
  1338. if (dump_enabled_p ())
  1339. {
  1340. dump_printf_loc (MSG_NOTE, vect_location,
  1341. "vect_recog_over_widening_pattern: detected: ");
  1342. dump_gimple_stmt (MSG_NOTE, TDF_SLIM, pattern_stmt, 0);
  1343. dump_printf (MSG_NOTE, "\n");
  1344. }
  1345. return pattern_stmt;
  1346. }
  1347. /* Detect widening shift pattern:
  1348. type a_t;
  1349. TYPE a_T, res_T;
  1350. S1 a_t = ;
  1351. S2 a_T = (TYPE) a_t;
  1352. S3 res_T = a_T << CONST;
  1353. where type 'TYPE' is at least double the size of type 'type'.
  1354. Also detect cases where the shift result is immediately converted
  1355. to another type 'result_type' that is no larger in size than 'TYPE'.
  1356. In those cases we perform a widen-shift that directly results in
  1357. 'result_type', to avoid a possible over-widening situation:
  1358. type a_t;
  1359. TYPE a_T, res_T;
  1360. result_type res_result;
  1361. S1 a_t = ;
  1362. S2 a_T = (TYPE) a_t;
  1363. S3 res_T = a_T << CONST;
  1364. S4 res_result = (result_type) res_T;
  1365. '--> res_result' = a_t w<< CONST;
  1366. And a case when 'TYPE' is 4 times bigger than 'type'. In that case we
  1367. create an additional pattern stmt for S2 to create a variable of an
  1368. intermediate type, and perform widen-shift on the intermediate type:
  1369. type a_t;
  1370. interm_type a_it;
  1371. TYPE a_T, res_T, res_T';
  1372. S1 a_t = ;
  1373. S2 a_T = (TYPE) a_t;
  1374. '--> a_it = (interm_type) a_t;
  1375. S3 res_T = a_T << CONST;
  1376. '--> res_T' = a_it <<* CONST;
  1377. Input/Output:
  1378. * STMTS: Contains a stmt from which the pattern search begins.
  1379. In case of unsigned widen-shift, the original stmt (S3) is replaced with S4
  1380. in STMTS. When an intermediate type is used and a pattern statement is
  1381. created for S2, we also put S2 here (before S3).
  1382. Output:
  1383. * TYPE_IN: The type of the input arguments to the pattern.
  1384. * TYPE_OUT: The type of the output of this pattern.
  1385. * Return value: A new stmt that will be used to replace the sequence of
  1386. stmts that constitute the pattern. In this case it will be:
  1387. WIDEN_LSHIFT_EXPR <a_t, CONST>. */
  1388. static gimple
  1389. vect_recog_widen_shift_pattern (vec<gimple> *stmts,
  1390. tree *type_in, tree *type_out)
  1391. {
  1392. gimple last_stmt = stmts->pop ();
  1393. gimple def_stmt0;
  1394. tree oprnd0, oprnd1;
  1395. tree type, half_type0;
  1396. gimple pattern_stmt;
  1397. tree vectype, vectype_out = NULL_TREE;
  1398. tree var;
  1399. enum tree_code dummy_code;
  1400. int dummy_int;
  1401. vec<tree> dummy_vec;
  1402. gimple use_stmt;
  1403. bool promotion;
  1404. if (!is_gimple_assign (last_stmt) || !vinfo_for_stmt (last_stmt))
  1405. return NULL;
  1406. if (STMT_VINFO_IN_PATTERN_P (vinfo_for_stmt (last_stmt)))
  1407. return NULL;
  1408. if (gimple_assign_rhs_code (last_stmt) != LSHIFT_EXPR)
  1409. return NULL;
  1410. oprnd0 = gimple_assign_rhs1 (last_stmt);
  1411. oprnd1 = gimple_assign_rhs2 (last_stmt);
  1412. if (TREE_CODE (oprnd0) != SSA_NAME || TREE_CODE (oprnd1) != INTEGER_CST)
  1413. return NULL;
  1414. /* Check operand 0: it has to be defined by a type promotion. */
  1415. if (!type_conversion_p (oprnd0, last_stmt, false, &half_type0, &def_stmt0,
  1416. &promotion)
  1417. || !promotion)
  1418. return NULL;
  1419. /* Check operand 1: has to be positive. We check that it fits the type
  1420. in vect_handle_widen_op_by_const (). */
  1421. if (tree_int_cst_compare (oprnd1, size_zero_node) <= 0)
  1422. return NULL;
  1423. oprnd0 = gimple_assign_rhs1 (def_stmt0);
  1424. type = gimple_expr_type (last_stmt);
  1425. /* Check for subsequent conversion to another type. */
  1426. use_stmt = vect_single_imm_use (last_stmt);
  1427. if (use_stmt && is_gimple_assign (use_stmt)
  1428. && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt))
  1429. && !STMT_VINFO_IN_PATTERN_P (vinfo_for_stmt (use_stmt)))
  1430. {
  1431. tree use_lhs = gimple_assign_lhs (use_stmt);
  1432. tree use_type = TREE_TYPE (use_lhs);
  1433. if (INTEGRAL_TYPE_P (use_type)
  1434. && TYPE_PRECISION (use_type) <= TYPE_PRECISION (type))
  1435. {
  1436. last_stmt = use_stmt;
  1437. type = use_type;
  1438. }
  1439. }
  1440. /* Check if this a widening operation. */
  1441. gimple wstmt = NULL;
  1442. if (!vect_handle_widen_op_by_const (last_stmt, LSHIFT_EXPR, oprnd1,
  1443. &oprnd0, &wstmt,
  1444. type, &half_type0, def_stmt0))
  1445. return NULL;
  1446. /* Pattern detected. */
  1447. if (dump_enabled_p ())
  1448. dump_printf_loc (MSG_NOTE, vect_location,
  1449. "vect_recog_widen_shift_pattern: detected:\n");
  1450. /* Check target support. */
  1451. vectype = get_vectype_for_scalar_type (half_type0);
  1452. vectype_out = get_vectype_for_scalar_type (type);
  1453. if (!vectype
  1454. || !vectype_out
  1455. || !supportable_widening_operation (WIDEN_LSHIFT_EXPR, last_stmt,
  1456. vectype_out, vectype,
  1457. &dummy_code, &dummy_code,
  1458. &dummy_int, &dummy_vec))
  1459. return NULL;
  1460. *type_in = vectype;
  1461. *type_out = vectype_out;
  1462. /* Pattern supported. Create a stmt to be used to replace the pattern. */
  1463. var = vect_recog_temp_ssa_var (type, NULL);
  1464. pattern_stmt =
  1465. gimple_build_assign (var, WIDEN_LSHIFT_EXPR, oprnd0, oprnd1);
  1466. if (wstmt)
  1467. {
  1468. stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
  1469. loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
  1470. bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
  1471. new_pattern_def_seq (stmt_vinfo, wstmt);
  1472. stmt_vec_info new_stmt_info
  1473. = new_stmt_vec_info (wstmt, loop_vinfo, bb_vinfo);
  1474. set_vinfo_for_stmt (wstmt, new_stmt_info);
  1475. STMT_VINFO_VECTYPE (new_stmt_info) = vectype;
  1476. }
  1477. if (dump_enabled_p ())
  1478. dump_gimple_stmt_loc (MSG_NOTE, vect_location, TDF_SLIM, pattern_stmt, 0);
  1479. stmts->safe_push (last_stmt);
  1480. return pattern_stmt;
  1481. }
  1482. /* Detect a rotate pattern wouldn't be otherwise vectorized:
  1483. type a_t, b_t, c_t;
  1484. S0 a_t = b_t r<< c_t;
  1485. Input/Output:
  1486. * STMTS: Contains a stmt from which the pattern search begins,
  1487. i.e. the shift/rotate stmt. The original stmt (S0) is replaced
  1488. with a sequence:
  1489. S1 d_t = -c_t;
  1490. S2 e_t = d_t & (B - 1);
  1491. S3 f_t = b_t << c_t;
  1492. S4 g_t = b_t >> e_t;
  1493. S0 a_t = f_t | g_t;
  1494. where B is element bitsize of type.
  1495. Output:
  1496. * TYPE_IN: The type of the input arguments to the pattern.
  1497. * TYPE_OUT: The type of the output of this pattern.
  1498. * Return value: A new stmt that will be used to replace the rotate
  1499. S0 stmt. */
  1500. static gimple
  1501. vect_recog_rotate_pattern (vec<gimple> *stmts, tree *type_in, tree *type_out)
  1502. {
  1503. gimple last_stmt = stmts->pop ();
  1504. tree oprnd0, oprnd1, lhs, var, var1, var2, vectype, type, stype, def, def2;
  1505. gimple pattern_stmt, def_stmt;
  1506. enum tree_code rhs_code;
  1507. stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
  1508. loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
  1509. bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
  1510. enum vect_def_type dt;
  1511. optab optab1, optab2;
  1512. edge ext_def = NULL;
  1513. if (!is_gimple_assign (last_stmt))
  1514. return NULL;
  1515. rhs_code = gimple_assign_rhs_code (last_stmt);
  1516. switch (rhs_code)
  1517. {
  1518. case LROTATE_EXPR:
  1519. case RROTATE_EXPR:
  1520. break;
  1521. default:
  1522. return NULL;
  1523. }
  1524. if (STMT_VINFO_IN_PATTERN_P (stmt_vinfo))
  1525. return NULL;
  1526. lhs = gimple_assign_lhs (last_stmt);
  1527. oprnd0 = gimple_assign_rhs1 (last_stmt);
  1528. type = TREE_TYPE (oprnd0);
  1529. oprnd1 = gimple_assign_rhs2 (last_stmt);
  1530. if (TREE_CODE (oprnd0) != SSA_NAME
  1531. || TYPE_PRECISION (TREE_TYPE (lhs)) != TYPE_PRECISION (type)
  1532. || !INTEGRAL_TYPE_P (type)
  1533. || !TYPE_UNSIGNED (type))
  1534. return NULL;
  1535. if (!vect_is_simple_use (oprnd1, last_stmt, loop_vinfo, bb_vinfo, &def_stmt,
  1536. &def, &dt))
  1537. return NULL;
  1538. if (dt != vect_internal_def
  1539. && dt != vect_constant_def
  1540. && dt != vect_external_def)
  1541. return NULL;
  1542. vectype = get_vectype_for_scalar_type (type);
  1543. if (vectype == NULL_TREE)
  1544. return NULL;
  1545. /* If vector/vector or vector/scalar rotate is supported by the target,
  1546. don't do anything here. */
  1547. optab1 = optab_for_tree_code (rhs_code, vectype, optab_vector);
  1548. if (optab1
  1549. && optab_handler (optab1, TYPE_MODE (vectype)) != CODE_FOR_nothing)
  1550. return NULL;
  1551. if (bb_vinfo != NULL || dt != vect_internal_def)
  1552. {
  1553. optab2 = optab_for_tree_code (rhs_code, vectype, optab_scalar);
  1554. if (optab2
  1555. && optab_handler (optab2, TYPE_MODE (vectype)) != CODE_FOR_nothing)
  1556. return NULL;
  1557. }
  1558. /* If vector/vector or vector/scalar shifts aren't supported by the target,
  1559. don't do anything here either. */
  1560. optab1 = optab_for_tree_code (LSHIFT_EXPR, vectype, optab_vector);
  1561. optab2 = optab_for_tree_code (RSHIFT_EXPR, vectype, optab_vector);
  1562. if (!optab1
  1563. || optab_handler (optab1, TYPE_MODE (vectype)) == CODE_FOR_nothing
  1564. || !optab2
  1565. || optab_handler (optab2, TYPE_MODE (vectype)) == CODE_FOR_nothing)
  1566. {
  1567. if (bb_vinfo == NULL && dt == vect_internal_def)
  1568. return NULL;
  1569. optab1 = optab_for_tree_code (LSHIFT_EXPR, vectype, optab_scalar);
  1570. optab2 = optab_for_tree_code (RSHIFT_EXPR, vectype, optab_scalar);
  1571. if (!optab1
  1572. || optab_handler (optab1, TYPE_MODE (vectype)) == CODE_FOR_nothing
  1573. || !optab2
  1574. || optab_handler (optab2, TYPE_MODE (vectype)) == CODE_FOR_nothing)
  1575. return NULL;
  1576. }
  1577. *type_in = vectype;
  1578. *type_out = vectype;
  1579. if (*type_in == NULL_TREE)
  1580. return NULL;
  1581. if (dt == vect_external_def
  1582. && TREE_CODE (oprnd1) == SSA_NAME
  1583. && loop_vinfo)
  1584. {
  1585. struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
  1586. ext_def = loop_preheader_edge (loop);
  1587. if (!SSA_NAME_IS_DEFAULT_DEF (oprnd1))
  1588. {
  1589. basic_block bb = gimple_bb (SSA_NAME_DEF_STMT (oprnd1));
  1590. if (bb == NULL
  1591. || !dominated_by_p (CDI_DOMINATORS, ext_def->dest, bb))
  1592. ext_def = NULL;
  1593. }
  1594. }
  1595. def = NULL_TREE;
  1596. if (TREE_CODE (oprnd1) == INTEGER_CST
  1597. || TYPE_MODE (TREE_TYPE (oprnd1)) == TYPE_MODE (type))
  1598. def = oprnd1;
  1599. else if (def_stmt && gimple_assign_cast_p (def_stmt))
  1600. {
  1601. tree rhs1 = gimple_assign_rhs1 (def_stmt);
  1602. if (TYPE_MODE (TREE_TYPE (rhs1)) == TYPE_MODE (type)
  1603. && TYPE_PRECISION (TREE_TYPE (rhs1))
  1604. == TYPE_PRECISION (type))
  1605. def = rhs1;
  1606. }
  1607. STMT_VINFO_PATTERN_DEF_SEQ (stmt_vinfo) = NULL;
  1608. if (def == NULL_TREE)
  1609. {
  1610. def = vect_recog_temp_ssa_var (type, NULL);
  1611. def_stmt = gimple_build_assign (def, NOP_EXPR, oprnd1);
  1612. if (ext_def)
  1613. {
  1614. basic_block new_bb
  1615. = gsi_insert_on_edge_immediate (ext_def, def_stmt);
  1616. gcc_assert (!new_bb);
  1617. }
  1618. else
  1619. append_pattern_def_seq (stmt_vinfo, def_stmt);
  1620. }
  1621. stype = TREE_TYPE (def);
  1622. if (TREE_CODE (def) == INTEGER_CST)
  1623. {
  1624. if (!tree_fits_uhwi_p (def)
  1625. || tree_to_uhwi (def) >= GET_MODE_PRECISION (TYPE_MODE (type))
  1626. || integer_zerop (def))
  1627. return NULL;
  1628. def2 = build_int_cst (stype,
  1629. GET_MODE_PRECISION (TYPE_MODE (type))
  1630. - tree_to_uhwi (def));
  1631. }
  1632. else
  1633. {
  1634. tree vecstype = get_vectype_for_scalar_type (stype);
  1635. stmt_vec_info def_stmt_vinfo;
  1636. if (vecstype == NULL_TREE)
  1637. return NULL;
  1638. def2 = vect_recog_temp_ssa_var (stype, NULL);
  1639. def_stmt = gimple_build_assign (def2, NEGATE_EXPR, def);
  1640. if (ext_def)
  1641. {
  1642. basic_block new_bb
  1643. = gsi_insert_on_edge_immediate (ext_def, def_stmt);
  1644. gcc_assert (!new_bb);
  1645. }
  1646. else
  1647. {
  1648. def_stmt_vinfo = new_stmt_vec_info (def_stmt, loop_vinfo, bb_vinfo);
  1649. set_vinfo_for_stmt (def_stmt, def_stmt_vinfo);
  1650. STMT_VINFO_VECTYPE (def_stmt_vinfo) = vecstype;
  1651. append_pattern_def_seq (stmt_vinfo, def_stmt);
  1652. }
  1653. def2 = vect_recog_temp_ssa_var (stype, NULL);
  1654. tree mask
  1655. = build_int_cst (stype, GET_MODE_PRECISION (TYPE_MODE (stype)) - 1);
  1656. def_stmt = gimple_build_assign (def2, BIT_AND_EXPR,
  1657. gimple_assign_lhs (def_stmt), mask);
  1658. if (ext_def)
  1659. {
  1660. basic_block new_bb
  1661. = gsi_insert_on_edge_immediate (ext_def, def_stmt);
  1662. gcc_assert (!new_bb);
  1663. }
  1664. else
  1665. {
  1666. def_stmt_vinfo = new_stmt_vec_info (def_stmt, loop_vinfo, bb_vinfo);
  1667. set_vinfo_for_stmt (def_stmt, def_stmt_vinfo);
  1668. STMT_VINFO_VECTYPE (def_stmt_vinfo) = vecstype;
  1669. append_pattern_def_seq (stmt_vinfo, def_stmt);
  1670. }
  1671. }
  1672. var1 = vect_recog_temp_ssa_var (type, NULL);
  1673. def_stmt = gimple_build_assign (var1, rhs_code == LROTATE_EXPR
  1674. ? LSHIFT_EXPR : RSHIFT_EXPR,
  1675. oprnd0, def);
  1676. append_pattern_def_seq (stmt_vinfo, def_stmt);
  1677. var2 = vect_recog_temp_ssa_var (type, NULL);
  1678. def_stmt = gimple_build_assign (var2, rhs_code == LROTATE_EXPR
  1679. ? RSHIFT_EXPR : LSHIFT_EXPR,
  1680. oprnd0, def2);
  1681. append_pattern_def_seq (stmt_vinfo, def_stmt);
  1682. /* Pattern detected. */
  1683. if (dump_enabled_p ())
  1684. dump_printf_loc (MSG_NOTE, vect_location,
  1685. "vect_recog_rotate_pattern: detected:\n");
  1686. /* Pattern supported. Create a stmt to be used to replace the pattern. */
  1687. var = vect_recog_temp_ssa_var (type, NULL);
  1688. pattern_stmt = gimple_build_assign (var, BIT_IOR_EXPR, var1, var2);
  1689. if (dump_enabled_p ())
  1690. dump_gimple_stmt_loc (MSG_NOTE, vect_location, TDF_SLIM, pattern_stmt, 0);
  1691. stmts->safe_push (last_stmt);
  1692. return pattern_stmt;
  1693. }
  1694. /* Detect a vector by vector shift pattern that wouldn't be otherwise
  1695. vectorized:
  1696. type a_t;
  1697. TYPE b_T, res_T;
  1698. S1 a_t = ;
  1699. S2 b_T = ;
  1700. S3 res_T = b_T op a_t;
  1701. where type 'TYPE' is a type with different size than 'type',
  1702. and op is <<, >> or rotate.
  1703. Also detect cases:
  1704. type a_t;
  1705. TYPE b_T, c_T, res_T;
  1706. S0 c_T = ;
  1707. S1 a_t = (type) c_T;
  1708. S2 b_T = ;
  1709. S3 res_T = b_T op a_t;
  1710. Input/Output:
  1711. * STMTS: Contains a stmt from which the pattern search begins,
  1712. i.e. the shift/rotate stmt. The original stmt (S3) is replaced
  1713. with a shift/rotate which has same type on both operands, in the
  1714. second case just b_T op c_T, in the first case with added cast
  1715. from a_t to c_T in STMT_VINFO_PATTERN_DEF_SEQ.
  1716. Output:
  1717. * TYPE_IN: The type of the input arguments to the pattern.
  1718. * TYPE_OUT: The type of the output of this pattern.
  1719. * Return value: A new stmt that will be used to replace the shift/rotate
  1720. S3 stmt. */
  1721. static gimple
  1722. vect_recog_vector_vector_shift_pattern (vec<gimple> *stmts,
  1723. tree *type_in, tree *type_out)
  1724. {
  1725. gimple last_stmt = stmts->pop ();
  1726. tree oprnd0, oprnd1, lhs, var;
  1727. gimple pattern_stmt, def_stmt;
  1728. enum tree_code rhs_code;
  1729. stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
  1730. loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
  1731. bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
  1732. enum vect_def_type dt;
  1733. tree def;
  1734. if (!is_gimple_assign (last_stmt))
  1735. return NULL;
  1736. rhs_code = gimple_assign_rhs_code (last_stmt);
  1737. switch (rhs_code)
  1738. {
  1739. case LSHIFT_EXPR:
  1740. case RSHIFT_EXPR:
  1741. case LROTATE_EXPR:
  1742. case RROTATE_EXPR:
  1743. break;
  1744. default:
  1745. return NULL;
  1746. }
  1747. if (STMT_VINFO_IN_PATTERN_P (stmt_vinfo))
  1748. return NULL;
  1749. lhs = gimple_assign_lhs (last_stmt);
  1750. oprnd0 = gimple_assign_rhs1 (last_stmt);
  1751. oprnd1 = gimple_assign_rhs2 (last_stmt);
  1752. if (TREE_CODE (oprnd0) != SSA_NAME
  1753. || TREE_CODE (oprnd1) != SSA_NAME
  1754. || TYPE_MODE (TREE_TYPE (oprnd0)) == TYPE_MODE (TREE_TYPE (oprnd1))
  1755. || TYPE_PRECISION (TREE_TYPE (oprnd1))
  1756. != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (oprnd1)))
  1757. || TYPE_PRECISION (TREE_TYPE (lhs))
  1758. != TYPE_PRECISION (TREE_TYPE (oprnd0)))
  1759. return NULL;
  1760. if (!vect_is_simple_use (oprnd1, last_stmt, loop_vinfo, bb_vinfo, &def_stmt,
  1761. &def, &dt))
  1762. return NULL;
  1763. if (dt != vect_internal_def)
  1764. return NULL;
  1765. *type_in = get_vectype_for_scalar_type (TREE_TYPE (oprnd0));
  1766. *type_out = *type_in;
  1767. if (*type_in == NULL_TREE)
  1768. return NULL;
  1769. def = NULL_TREE;
  1770. if (gimple_assign_cast_p (def_stmt))
  1771. {
  1772. tree rhs1 = gimple_assign_rhs1 (def_stmt);
  1773. if (TYPE_MODE (TREE_TYPE (rhs1)) == TYPE_MODE (TREE_TYPE (oprnd0))
  1774. && TYPE_PRECISION (TREE_TYPE (rhs1))
  1775. == TYPE_PRECISION (TREE_TYPE (oprnd0)))
  1776. def = rhs1;
  1777. }
  1778. if (def == NULL_TREE)
  1779. {
  1780. def = vect_recog_temp_ssa_var (TREE_TYPE (oprnd0), NULL);
  1781. def_stmt = gimple_build_assign (def, NOP_EXPR, oprnd1);
  1782. new_pattern_def_seq (stmt_vinfo, def_stmt);
  1783. }
  1784. /* Pattern detected. */
  1785. if (dump_enabled_p ())
  1786. dump_printf_loc (MSG_NOTE, vect_location,
  1787. "vect_recog_vector_vector_shift_pattern: detected:\n");
  1788. /* Pattern supported. Create a stmt to be used to replace the pattern. */
  1789. var = vect_recog_temp_ssa_var (TREE_TYPE (oprnd0), NULL);
  1790. pattern_stmt = gimple_build_assign (var, rhs_code, oprnd0, def);
  1791. if (dump_enabled_p ())
  1792. dump_gimple_stmt_loc (MSG_NOTE, vect_location, TDF_SLIM, pattern_stmt, 0);
  1793. stmts->safe_push (last_stmt);
  1794. return pattern_stmt;
  1795. }
  1796. /* Detect a signed division by a constant that wouldn't be
  1797. otherwise vectorized:
  1798. type a_t, b_t;
  1799. S1 a_t = b_t / N;
  1800. where type 'type' is an integral type and N is a constant.
  1801. Similarly handle modulo by a constant:
  1802. S4 a_t = b_t % N;
  1803. Input/Output:
  1804. * STMTS: Contains a stmt from which the pattern search begins,
  1805. i.e. the division stmt. S1 is replaced by if N is a power
  1806. of two constant and type is signed:
  1807. S3 y_t = b_t < 0 ? N - 1 : 0;
  1808. S2 x_t = b_t + y_t;
  1809. S1' a_t = x_t >> log2 (N);
  1810. S4 is replaced if N is a power of two constant and
  1811. type is signed by (where *_T temporaries have unsigned type):
  1812. S9 y_T = b_t < 0 ? -1U : 0U;
  1813. S8 z_T = y_T >> (sizeof (type_t) * CHAR_BIT - log2 (N));
  1814. S7 z_t = (type) z_T;
  1815. S6 w_t = b_t + z_t;
  1816. S5 x_t = w_t & (N - 1);
  1817. S4' a_t = x_t - z_t;
  1818. Output:
  1819. * TYPE_IN: The type of the input arguments to the pattern.
  1820. * TYPE_OUT: The type of the output of this pattern.
  1821. * Return value: A new stmt that will be used to replace the division
  1822. S1 or modulo S4 stmt. */
  1823. static gimple
  1824. vect_recog_divmod_pattern (vec<gimple> *stmts,
  1825. tree *type_in, tree *type_out)
  1826. {
  1827. gimple last_stmt = stmts->pop ();
  1828. tree oprnd0, oprnd1, vectype, itype, cond;
  1829. gimple pattern_stmt, def_stmt;
  1830. enum tree_code rhs_code;
  1831. stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
  1832. loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
  1833. bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
  1834. optab optab;
  1835. tree q;
  1836. int dummy_int, prec;
  1837. stmt_vec_info def_stmt_vinfo;
  1838. if (!is_gimple_assign (last_stmt))
  1839. return NULL;
  1840. rhs_code = gimple_assign_rhs_code (last_stmt);
  1841. switch (rhs_code)
  1842. {
  1843. case TRUNC_DIV_EXPR:
  1844. case TRUNC_MOD_EXPR:
  1845. break;
  1846. default:
  1847. return NULL;
  1848. }
  1849. if (STMT_VINFO_IN_PATTERN_P (stmt_vinfo))
  1850. return NULL;
  1851. oprnd0 = gimple_assign_rhs1 (last_stmt);
  1852. oprnd1 = gimple_assign_rhs2 (last_stmt);
  1853. itype = TREE_TYPE (oprnd0);
  1854. if (TREE_CODE (oprnd0) != SSA_NAME
  1855. || TREE_CODE (oprnd1) != INTEGER_CST
  1856. || TREE_CODE (itype) != INTEGER_TYPE
  1857. || TYPE_PRECISION (itype) != GET_MODE_PRECISION (TYPE_MODE (itype)))
  1858. return NULL;
  1859. vectype = get_vectype_for_scalar_type (itype);
  1860. if (vectype == NULL_TREE)
  1861. return NULL;
  1862. /* If the target can handle vectorized division or modulo natively,
  1863. don't attempt to optimize this. */
  1864. optab = optab_for_tree_code (rhs_code, vectype, optab_default);
  1865. if (optab != unknown_optab)
  1866. {
  1867. machine_mode vec_mode = TYPE_MODE (vectype);
  1868. int icode = (int) optab_handler (optab, vec_mode);
  1869. if (icode != CODE_FOR_nothing)
  1870. return NULL;
  1871. }
  1872. prec = TYPE_PRECISION (itype);
  1873. if (integer_pow2p (oprnd1))
  1874. {
  1875. if (TYPE_UNSIGNED (itype) || tree_int_cst_sgn (oprnd1) != 1)
  1876. return NULL;
  1877. /* Pattern detected. */
  1878. if (dump_enabled_p ())
  1879. dump_printf_loc (MSG_NOTE, vect_location,
  1880. "vect_recog_divmod_pattern: detected:\n");
  1881. cond = build2 (LT_EXPR, boolean_type_node, oprnd0,
  1882. build_int_cst (itype, 0));
  1883. if (rhs_code == TRUNC_DIV_EXPR)
  1884. {
  1885. tree var = vect_recog_temp_ssa_var (itype, NULL);
  1886. tree shift;
  1887. def_stmt
  1888. = gimple_build_assign (var, COND_EXPR, cond,
  1889. fold_build2 (MINUS_EXPR, itype, oprnd1,
  1890. build_int_cst (itype, 1)),
  1891. build_int_cst (itype, 0));
  1892. new_pattern_def_seq (stmt_vinfo, def_stmt);
  1893. var = vect_recog_temp_ssa_var (itype, NULL);
  1894. def_stmt
  1895. = gimple_build_assign (var, PLUS_EXPR, oprnd0,
  1896. gimple_assign_lhs (def_stmt));
  1897. append_pattern_def_seq (stmt_vinfo, def_stmt);
  1898. shift = build_int_cst (itype, tree_log2 (oprnd1));
  1899. pattern_stmt
  1900. = gimple_build_assign (vect_recog_temp_ssa_var (itype, NULL),
  1901. RSHIFT_EXPR, var, shift);
  1902. }
  1903. else
  1904. {
  1905. tree signmask;
  1906. STMT_VINFO_PATTERN_DEF_SEQ (stmt_vinfo) = NULL;
  1907. if (compare_tree_int (oprnd1, 2) == 0)
  1908. {
  1909. signmask = vect_recog_temp_ssa_var (itype, NULL);
  1910. def_stmt = gimple_build_assign (signmask, COND_EXPR, cond,
  1911. build_int_cst (itype, 1),
  1912. build_int_cst (itype, 0));
  1913. append_pattern_def_seq (stmt_vinfo, def_stmt);
  1914. }
  1915. else
  1916. {
  1917. tree utype
  1918. = build_nonstandard_integer_type (prec, 1);
  1919. tree vecutype = get_vectype_for_scalar_type (utype);
  1920. tree shift
  1921. = build_int_cst (utype, GET_MODE_BITSIZE (TYPE_MODE (itype))
  1922. - tree_log2 (oprnd1));
  1923. tree var = vect_recog_temp_ssa_var (utype, NULL);
  1924. def_stmt = gimple_build_assign (var, COND_EXPR, cond,
  1925. build_int_cst (utype, -1),
  1926. build_int_cst (utype, 0));
  1927. def_stmt_vinfo
  1928. = new_stmt_vec_info (def_stmt, loop_vinfo, bb_vinfo);
  1929. set_vinfo_for_stmt (def_stmt, def_stmt_vinfo);
  1930. STMT_VINFO_VECTYPE (def_stmt_vinfo) = vecutype;
  1931. append_pattern_def_seq (stmt_vinfo, def_stmt);
  1932. var = vect_recog_temp_ssa_var (utype, NULL);
  1933. def_stmt = gimple_build_assign (var, RSHIFT_EXPR,
  1934. gimple_assign_lhs (def_stmt),
  1935. shift);
  1936. def_stmt_vinfo
  1937. = new_stmt_vec_info (def_stmt, loop_vinfo, bb_vinfo);
  1938. set_vinfo_for_stmt (def_stmt, def_stmt_vinfo);
  1939. STMT_VINFO_VECTYPE (def_stmt_vinfo) = vecutype;
  1940. append_pattern_def_seq (stmt_vinfo, def_stmt);
  1941. signmask = vect_recog_temp_ssa_var (itype, NULL);
  1942. def_stmt
  1943. = gimple_build_assign (signmask, NOP_EXPR, var);
  1944. append_pattern_def_seq (stmt_vinfo, def_stmt);
  1945. }
  1946. def_stmt
  1947. = gimple_build_assign (vect_recog_temp_ssa_var (itype, NULL),
  1948. PLUS_EXPR, oprnd0, signmask);
  1949. append_pattern_def_seq (stmt_vinfo, def_stmt);
  1950. def_stmt
  1951. = gimple_build_assign (vect_recog_temp_ssa_var (itype, NULL),
  1952. BIT_AND_EXPR, gimple_assign_lhs (def_stmt),
  1953. fold_build2 (MINUS_EXPR, itype, oprnd1,
  1954. build_int_cst (itype, 1)));
  1955. append_pattern_def_seq (stmt_vinfo, def_stmt);
  1956. pattern_stmt
  1957. = gimple_build_assign (vect_recog_temp_ssa_var (itype, NULL),
  1958. MINUS_EXPR, gimple_assign_lhs (def_stmt),
  1959. signmask);
  1960. }
  1961. if (dump_enabled_p ())
  1962. dump_gimple_stmt_loc (MSG_NOTE, vect_location, TDF_SLIM, pattern_stmt,
  1963. 0);
  1964. stmts->safe_push (last_stmt);
  1965. *type_in = vectype;
  1966. *type_out = vectype;
  1967. return pattern_stmt;
  1968. }
  1969. if (prec > HOST_BITS_PER_WIDE_INT
  1970. || integer_zerop (oprnd1))
  1971. return NULL;
  1972. if (!can_mult_highpart_p (TYPE_MODE (vectype), TYPE_UNSIGNED (itype)))
  1973. return NULL;
  1974. STMT_VINFO_PATTERN_DEF_SEQ (stmt_vinfo) = NULL;
  1975. if (TYPE_UNSIGNED (itype))
  1976. {
  1977. unsigned HOST_WIDE_INT mh, ml;
  1978. int pre_shift, post_shift;
  1979. unsigned HOST_WIDE_INT d = (TREE_INT_CST_LOW (oprnd1)
  1980. & GET_MODE_MASK (TYPE_MODE (itype)));
  1981. tree t1, t2, t3, t4;
  1982. if (d >= ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))
  1983. /* FIXME: Can transform this into oprnd0 >= oprnd1 ? 1 : 0. */
  1984. return NULL;
  1985. /* Find a suitable multiplier and right shift count
  1986. instead of multiplying with D. */
  1987. mh = choose_multiplier (d, prec, prec, &ml, &post_shift, &dummy_int);
  1988. /* If the suggested multiplier is more than SIZE bits, we can do better
  1989. for even divisors, using an initial right shift. */
  1990. if (mh != 0 && (d & 1) == 0)
  1991. {
  1992. pre_shift = floor_log2 (d & -d);
  1993. mh = choose_multiplier (d >> pre_shift, prec, prec - pre_shift,
  1994. &ml, &post_shift, &dummy_int);
  1995. gcc_assert (!mh);
  1996. }
  1997. else
  1998. pre_shift = 0;
  1999. if (mh != 0)
  2000. {
  2001. if (post_shift - 1 >= prec)
  2002. return NULL;
  2003. /* t1 = oprnd0 h* ml;
  2004. t2 = oprnd0 - t1;
  2005. t3 = t2 >> 1;
  2006. t4 = t1 + t3;
  2007. q = t4 >> (post_shift - 1); */
  2008. t1 = vect_recog_temp_ssa_var (itype, NULL);
  2009. def_stmt = gimple_build_assign (t1, MULT_HIGHPART_EXPR, oprnd0,
  2010. build_int_cst (itype, ml));
  2011. append_pattern_def_seq (stmt_vinfo, def_stmt);
  2012. t2 = vect_recog_temp_ssa_var (itype, NULL);
  2013. def_stmt
  2014. = gimple_build_assign (t2, MINUS_EXPR, oprnd0, t1);
  2015. append_pattern_def_seq (stmt_vinfo, def_stmt);
  2016. t3 = vect_recog_temp_ssa_var (itype, NULL);
  2017. def_stmt
  2018. = gimple_build_assign (t3, RSHIFT_EXPR, t2, integer_one_node);
  2019. append_pattern_def_seq (stmt_vinfo, def_stmt);
  2020. t4 = vect_recog_temp_ssa_var (itype, NULL);
  2021. def_stmt
  2022. = gimple_build_assign (t4, PLUS_EXPR, t1, t3);
  2023. if (post_shift != 1)
  2024. {
  2025. append_pattern_def_seq (stmt_vinfo, def_stmt);
  2026. q = vect_recog_temp_ssa_var (itype, NULL);
  2027. pattern_stmt
  2028. = gimple_build_assign (q, RSHIFT_EXPR, t4,
  2029. build_int_cst (itype, post_shift - 1));
  2030. }
  2031. else
  2032. {
  2033. q = t4;
  2034. pattern_stmt = def_stmt;
  2035. }
  2036. }
  2037. else
  2038. {
  2039. if (pre_shift >= prec || post_shift >= prec)
  2040. return NULL;
  2041. /* t1 = oprnd0 >> pre_shift;
  2042. t2 = t1 h* ml;
  2043. q = t2 >> post_shift; */
  2044. if (pre_shift)
  2045. {
  2046. t1 = vect_recog_temp_ssa_var (itype, NULL);
  2047. def_stmt
  2048. = gimple_build_assign (t1, RSHIFT_EXPR, oprnd0,
  2049. build_int_cst (NULL, pre_shift));
  2050. append_pattern_def_seq (stmt_vinfo, def_stmt);
  2051. }
  2052. else
  2053. t1 = oprnd0;
  2054. t2 = vect_recog_temp_ssa_var (itype, NULL);
  2055. def_stmt = gimple_build_assign (t2, MULT_HIGHPART_EXPR, t1,
  2056. build_int_cst (itype, ml));
  2057. if (post_shift)
  2058. {
  2059. append_pattern_def_seq (stmt_vinfo, def_stmt);
  2060. q = vect_recog_temp_ssa_var (itype, NULL);
  2061. def_stmt
  2062. = gimple_build_assign (q, RSHIFT_EXPR, t2,
  2063. build_int_cst (itype, post_shift));
  2064. }
  2065. else
  2066. q = t2;
  2067. pattern_stmt = def_stmt;
  2068. }
  2069. }
  2070. else
  2071. {
  2072. unsigned HOST_WIDE_INT ml;
  2073. int post_shift;
  2074. HOST_WIDE_INT d = TREE_INT_CST_LOW (oprnd1);
  2075. unsigned HOST_WIDE_INT abs_d;
  2076. bool add = false;
  2077. tree t1, t2, t3, t4;
  2078. /* Give up for -1. */
  2079. if (d == -1)
  2080. return NULL;
  2081. /* Since d might be INT_MIN, we have to cast to
  2082. unsigned HOST_WIDE_INT before negating to avoid
  2083. undefined signed overflow. */
  2084. abs_d = (d >= 0
  2085. ? (unsigned HOST_WIDE_INT) d
  2086. : - (unsigned HOST_WIDE_INT) d);
  2087. /* n rem d = n rem -d */
  2088. if (rhs_code == TRUNC_MOD_EXPR && d < 0)
  2089. {
  2090. d = abs_d;
  2091. oprnd1 = build_int_cst (itype, abs_d);
  2092. }
  2093. else if (HOST_BITS_PER_WIDE_INT >= prec
  2094. && abs_d == (unsigned HOST_WIDE_INT) 1 << (prec - 1))
  2095. /* This case is not handled correctly below. */
  2096. return NULL;
  2097. choose_multiplier (abs_d, prec, prec - 1, &ml, &post_shift, &dummy_int);
  2098. if (ml >= (unsigned HOST_WIDE_INT) 1 << (prec - 1))
  2099. {
  2100. add = true;
  2101. ml |= (~(unsigned HOST_WIDE_INT) 0) << (prec - 1);
  2102. }
  2103. if (post_shift >= prec)
  2104. return NULL;
  2105. /* t1 = oprnd0 h* ml; */
  2106. t1 = vect_recog_temp_ssa_var (itype, NULL);
  2107. def_stmt = gimple_build_assign (t1, MULT_HIGHPART_EXPR, oprnd0,
  2108. build_int_cst (itype, ml));
  2109. if (add)
  2110. {
  2111. /* t2 = t1 + oprnd0; */
  2112. append_pattern_def_seq (stmt_vinfo, def_stmt);
  2113. t2 = vect_recog_temp_ssa_var (itype, NULL);
  2114. def_stmt = gimple_build_assign (t2, PLUS_EXPR, t1, oprnd0);
  2115. }
  2116. else
  2117. t2 = t1;
  2118. if (post_shift)
  2119. {
  2120. /* t3 = t2 >> post_shift; */
  2121. append_pattern_def_seq (stmt_vinfo, def_stmt);
  2122. t3 = vect_recog_temp_ssa_var (itype, NULL);
  2123. def_stmt = gimple_build_assign (t3, RSHIFT_EXPR, t2,
  2124. build_int_cst (itype, post_shift));
  2125. }
  2126. else
  2127. t3 = t2;
  2128. wide_int oprnd0_min, oprnd0_max;
  2129. int msb = 1;
  2130. if (get_range_info (oprnd0, &oprnd0_min, &oprnd0_max) == VR_RANGE)
  2131. {
  2132. if (!wi::neg_p (oprnd0_min, TYPE_SIGN (itype)))
  2133. msb = 0;
  2134. else if (wi::neg_p (oprnd0_max, TYPE_SIGN (itype)))
  2135. msb = -1;
  2136. }
  2137. if (msb == 0 && d >= 0)
  2138. {
  2139. /* q = t3; */
  2140. q = t3;
  2141. pattern_stmt = def_stmt;
  2142. }
  2143. else
  2144. {
  2145. /* t4 = oprnd0 >> (prec - 1);
  2146. or if we know from VRP that oprnd0 >= 0
  2147. t4 = 0;
  2148. or if we know from VRP that oprnd0 < 0
  2149. t4 = -1; */
  2150. append_pattern_def_seq (stmt_vinfo, def_stmt);
  2151. t4 = vect_recog_temp_ssa_var (itype, NULL);
  2152. if (msb != 1)
  2153. def_stmt = gimple_build_assign (t4, INTEGER_CST,
  2154. build_int_cst (itype, msb));
  2155. else
  2156. def_stmt = gimple_build_assign (t4, RSHIFT_EXPR, oprnd0,
  2157. build_int_cst (itype, prec - 1));
  2158. append_pattern_def_seq (stmt_vinfo, def_stmt);
  2159. /* q = t3 - t4; or q = t4 - t3; */
  2160. q = vect_recog_temp_ssa_var (itype, NULL);
  2161. pattern_stmt = gimple_build_assign (q, MINUS_EXPR, d < 0 ? t4 : t3,
  2162. d < 0 ? t3 : t4);
  2163. }
  2164. }
  2165. if (rhs_code == TRUNC_MOD_EXPR)
  2166. {
  2167. tree r, t1;
  2168. /* We divided. Now finish by:
  2169. t1 = q * oprnd1;
  2170. r = oprnd0 - t1; */
  2171. append_pattern_def_seq (stmt_vinfo, pattern_stmt);
  2172. t1 = vect_recog_temp_ssa_var (itype, NULL);
  2173. def_stmt = gimple_build_assign (t1, MULT_EXPR, q, oprnd1);
  2174. append_pattern_def_seq (stmt_vinfo, def_stmt);
  2175. r = vect_recog_temp_ssa_var (itype, NULL);
  2176. pattern_stmt = gimple_build_assign (r, MINUS_EXPR, oprnd0, t1);
  2177. }
  2178. /* Pattern detected. */
  2179. if (dump_enabled_p ())
  2180. {
  2181. dump_printf_loc (MSG_NOTE, vect_location,
  2182. "vect_recog_divmod_pattern: detected: ");
  2183. dump_gimple_stmt (MSG_NOTE, TDF_SLIM, pattern_stmt, 0);
  2184. dump_printf (MSG_NOTE, "\n");
  2185. }
  2186. stmts->safe_push (last_stmt);
  2187. *type_in = vectype;
  2188. *type_out = vectype;
  2189. return pattern_stmt;
  2190. }
  2191. /* Function vect_recog_mixed_size_cond_pattern
  2192. Try to find the following pattern:
  2193. type x_t, y_t;
  2194. TYPE a_T, b_T, c_T;
  2195. loop:
  2196. S1 a_T = x_t CMP y_t ? b_T : c_T;
  2197. where type 'TYPE' is an integral type which has different size
  2198. from 'type'. b_T and c_T are either constants (and if 'TYPE' is wider
  2199. than 'type', the constants need to fit into an integer type
  2200. with the same width as 'type') or results of conversion from 'type'.
  2201. Input:
  2202. * LAST_STMT: A stmt from which the pattern search begins.
  2203. Output:
  2204. * TYPE_IN: The type of the input arguments to the pattern.
  2205. * TYPE_OUT: The type of the output of this pattern.
  2206. * Return value: A new stmt that will be used to replace the pattern.
  2207. Additionally a def_stmt is added.
  2208. a_it = x_t CMP y_t ? b_it : c_it;
  2209. a_T = (TYPE) a_it; */
  2210. static gimple
  2211. vect_recog_mixed_size_cond_pattern (vec<gimple> *stmts, tree *type_in,
  2212. tree *type_out)
  2213. {
  2214. gimple last_stmt = (*stmts)[0];
  2215. tree cond_expr, then_clause, else_clause;
  2216. stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt), def_stmt_info;
  2217. tree type, vectype, comp_vectype, itype = NULL_TREE, vecitype;
  2218. machine_mode cmpmode;
  2219. gimple pattern_stmt, def_stmt;
  2220. loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
  2221. bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
  2222. tree orig_type0 = NULL_TREE, orig_type1 = NULL_TREE;
  2223. gimple def_stmt0 = NULL, def_stmt1 = NULL;
  2224. bool promotion;
  2225. tree comp_scalar_type;
  2226. if (!is_gimple_assign (last_stmt)
  2227. || gimple_assign_rhs_code (last_stmt) != COND_EXPR
  2228. || STMT_VINFO_DEF_TYPE (stmt_vinfo) != vect_internal_def)
  2229. return NULL;
  2230. cond_expr = gimple_assign_rhs1 (last_stmt);
  2231. then_clause = gimple_assign_rhs2 (last_stmt);
  2232. else_clause = gimple_assign_rhs3 (last_stmt);
  2233. if (!COMPARISON_CLASS_P (cond_expr))
  2234. return NULL;
  2235. comp_scalar_type = TREE_TYPE (TREE_OPERAND (cond_expr, 0));
  2236. comp_vectype = get_vectype_for_scalar_type (comp_scalar_type);
  2237. if (comp_vectype == NULL_TREE)
  2238. return NULL;
  2239. type = gimple_expr_type (last_stmt);
  2240. if (types_compatible_p (type, comp_scalar_type)
  2241. || ((TREE_CODE (then_clause) != INTEGER_CST
  2242. || TREE_CODE (else_clause) != INTEGER_CST)
  2243. && !INTEGRAL_TYPE_P (comp_scalar_type))
  2244. || !INTEGRAL_TYPE_P (type))
  2245. return NULL;
  2246. if ((TREE_CODE (then_clause) != INTEGER_CST
  2247. && !type_conversion_p (then_clause, last_stmt, false, &orig_type0,
  2248. &def_stmt0, &promotion))
  2249. || (TREE_CODE (else_clause) != INTEGER_CST
  2250. && !type_conversion_p (else_clause, last_stmt, false, &orig_type1,
  2251. &def_stmt1, &promotion)))
  2252. return NULL;
  2253. if (orig_type0 && orig_type1
  2254. && !types_compatible_p (orig_type0, orig_type1))
  2255. return NULL;
  2256. if (orig_type0)
  2257. {
  2258. if (!types_compatible_p (orig_type0, comp_scalar_type))
  2259. return NULL;
  2260. then_clause = gimple_assign_rhs1 (def_stmt0);
  2261. itype = orig_type0;
  2262. }
  2263. if (orig_type1)
  2264. {
  2265. if (!types_compatible_p (orig_type1, comp_scalar_type))
  2266. return NULL;
  2267. else_clause = gimple_assign_rhs1 (def_stmt1);
  2268. itype = orig_type1;
  2269. }
  2270. cmpmode = GET_MODE_INNER (TYPE_MODE (comp_vectype));
  2271. if (GET_MODE_BITSIZE (TYPE_MODE (type)) == GET_MODE_BITSIZE (cmpmode))
  2272. return NULL;
  2273. vectype = get_vectype_for_scalar_type (type);
  2274. if (vectype == NULL_TREE)
  2275. return NULL;
  2276. if (expand_vec_cond_expr_p (vectype, comp_vectype))
  2277. return NULL;
  2278. if (itype == NULL_TREE)
  2279. itype = build_nonstandard_integer_type (GET_MODE_BITSIZE (cmpmode),
  2280. TYPE_UNSIGNED (type));
  2281. if (itype == NULL_TREE
  2282. || GET_MODE_BITSIZE (TYPE_MODE (itype)) != GET_MODE_BITSIZE (cmpmode))
  2283. return NULL;
  2284. vecitype = get_vectype_for_scalar_type (itype);
  2285. if (vecitype == NULL_TREE)
  2286. return NULL;
  2287. if (!expand_vec_cond_expr_p (vecitype, comp_vectype))
  2288. return NULL;
  2289. if (GET_MODE_BITSIZE (TYPE_MODE (type)) > GET_MODE_BITSIZE (cmpmode))
  2290. {
  2291. if ((TREE_CODE (then_clause) == INTEGER_CST
  2292. && !int_fits_type_p (then_clause, itype))
  2293. || (TREE_CODE (else_clause) == INTEGER_CST
  2294. && !int_fits_type_p (else_clause, itype)))
  2295. return NULL;
  2296. }
  2297. def_stmt = gimple_build_assign (vect_recog_temp_ssa_var (itype, NULL),
  2298. COND_EXPR, unshare_expr (cond_expr),
  2299. fold_convert (itype, then_clause),
  2300. fold_convert (itype, else_clause));
  2301. pattern_stmt = gimple_build_assign (vect_recog_temp_ssa_var (type, NULL),
  2302. NOP_EXPR, gimple_assign_lhs (def_stmt));
  2303. new_pattern_def_seq (stmt_vinfo, def_stmt);
  2304. def_stmt_info = new_stmt_vec_info (def_stmt, loop_vinfo, bb_vinfo);
  2305. set_vinfo_for_stmt (def_stmt, def_stmt_info);
  2306. STMT_VINFO_VECTYPE (def_stmt_info) = vecitype;
  2307. *type_in = vecitype;
  2308. *type_out = vectype;
  2309. if (dump_enabled_p ())
  2310. dump_printf_loc (MSG_NOTE, vect_location,
  2311. "vect_recog_mixed_size_cond_pattern: detected:\n");
  2312. return pattern_stmt;
  2313. }
  2314. /* Helper function of vect_recog_bool_pattern. Called recursively, return
  2315. true if bool VAR can be optimized that way. */
  2316. static bool
  2317. check_bool_pattern (tree var, loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
  2318. {
  2319. gimple def_stmt;
  2320. enum vect_def_type dt;
  2321. tree def, rhs1;
  2322. enum tree_code rhs_code;
  2323. if (!vect_is_simple_use (var, NULL, loop_vinfo, bb_vinfo, &def_stmt, &def,
  2324. &dt))
  2325. return false;
  2326. if (dt != vect_internal_def)
  2327. return false;
  2328. if (!is_gimple_assign (def_stmt))
  2329. return false;
  2330. if (!has_single_use (def))
  2331. return false;
  2332. rhs1 = gimple_assign_rhs1 (def_stmt);
  2333. rhs_code = gimple_assign_rhs_code (def_stmt);
  2334. switch (rhs_code)
  2335. {
  2336. case SSA_NAME:
  2337. return check_bool_pattern (rhs1, loop_vinfo, bb_vinfo);
  2338. CASE_CONVERT:
  2339. if ((TYPE_PRECISION (TREE_TYPE (rhs1)) != 1
  2340. || !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
  2341. && TREE_CODE (TREE_TYPE (rhs1)) != BOOLEAN_TYPE)
  2342. return false;
  2343. return check_bool_pattern (rhs1, loop_vinfo, bb_vinfo);
  2344. case BIT_NOT_EXPR:
  2345. return check_bool_pattern (rhs1, loop_vinfo, bb_vinfo);
  2346. case BIT_AND_EXPR:
  2347. case BIT_IOR_EXPR:
  2348. case BIT_XOR_EXPR:
  2349. if (!check_bool_pattern (rhs1, loop_vinfo, bb_vinfo))
  2350. return false;
  2351. return check_bool_pattern (gimple_assign_rhs2 (def_stmt), loop_vinfo,
  2352. bb_vinfo);
  2353. default:
  2354. if (TREE_CODE_CLASS (rhs_code) == tcc_comparison)
  2355. {
  2356. tree vecitype, comp_vectype;
  2357. /* If the comparison can throw, then is_gimple_condexpr will be
  2358. false and we can't make a COND_EXPR/VEC_COND_EXPR out of it. */
  2359. if (stmt_could_throw_p (def_stmt))
  2360. return false;
  2361. comp_vectype = get_vectype_for_scalar_type (TREE_TYPE (rhs1));
  2362. if (comp_vectype == NULL_TREE)
  2363. return false;
  2364. if (TREE_CODE (TREE_TYPE (rhs1)) != INTEGER_TYPE)
  2365. {
  2366. machine_mode mode = TYPE_MODE (TREE_TYPE (rhs1));
  2367. tree itype
  2368. = build_nonstandard_integer_type (GET_MODE_BITSIZE (mode), 1);
  2369. vecitype = get_vectype_for_scalar_type (itype);
  2370. if (vecitype == NULL_TREE)
  2371. return false;
  2372. }
  2373. else
  2374. vecitype = comp_vectype;
  2375. return expand_vec_cond_expr_p (vecitype, comp_vectype);
  2376. }
  2377. return false;
  2378. }
  2379. }
  2380. /* Helper function of adjust_bool_pattern. Add a cast to TYPE to a previous
  2381. stmt (SSA_NAME_DEF_STMT of VAR) by moving the COND_EXPR from RELATED_STMT
  2382. to PATTERN_DEF_SEQ and adding a cast as RELATED_STMT. */
  2383. static tree
  2384. adjust_bool_pattern_cast (tree type, tree var)
  2385. {
  2386. stmt_vec_info stmt_vinfo = vinfo_for_stmt (SSA_NAME_DEF_STMT (var));
  2387. gimple cast_stmt, pattern_stmt;
  2388. gcc_assert (!STMT_VINFO_PATTERN_DEF_SEQ (stmt_vinfo));
  2389. pattern_stmt = STMT_VINFO_RELATED_STMT (stmt_vinfo);
  2390. new_pattern_def_seq (stmt_vinfo, pattern_stmt);
  2391. cast_stmt = gimple_build_assign (vect_recog_temp_ssa_var (type, NULL),
  2392. NOP_EXPR, gimple_assign_lhs (pattern_stmt));
  2393. STMT_VINFO_RELATED_STMT (stmt_vinfo) = cast_stmt;
  2394. return gimple_assign_lhs (cast_stmt);
  2395. }
  2396. /* Helper function of vect_recog_bool_pattern. Do the actual transformations,
  2397. recursively. VAR is an SSA_NAME that should be transformed from bool
  2398. to a wider integer type, OUT_TYPE is the desired final integer type of
  2399. the whole pattern, TRUEVAL should be NULL unless optimizing
  2400. BIT_AND_EXPR into a COND_EXPR with one integer from one of the operands
  2401. in the then_clause, STMTS is where statements with added pattern stmts
  2402. should be pushed to. */
  2403. static tree
  2404. adjust_bool_pattern (tree var, tree out_type, tree trueval,
  2405. vec<gimple> *stmts)
  2406. {
  2407. gimple stmt = SSA_NAME_DEF_STMT (var);
  2408. enum tree_code rhs_code, def_rhs_code;
  2409. tree itype, cond_expr, rhs1, rhs2, irhs1, irhs2;
  2410. location_t loc;
  2411. gimple pattern_stmt, def_stmt;
  2412. rhs1 = gimple_assign_rhs1 (stmt);
  2413. rhs2 = gimple_assign_rhs2 (stmt);
  2414. rhs_code = gimple_assign_rhs_code (stmt);
  2415. loc = gimple_location (stmt);
  2416. switch (rhs_code)
  2417. {
  2418. case SSA_NAME:
  2419. CASE_CONVERT:
  2420. irhs1 = adjust_bool_pattern (rhs1, out_type, NULL_TREE, stmts);
  2421. itype = TREE_TYPE (irhs1);
  2422. pattern_stmt
  2423. = gimple_build_assign (vect_recog_temp_ssa_var (itype, NULL),
  2424. SSA_NAME, irhs1);
  2425. break;
  2426. case BIT_NOT_EXPR:
  2427. irhs1 = adjust_bool_pattern (rhs1, out_type, NULL_TREE, stmts);
  2428. itype = TREE_TYPE (irhs1);
  2429. pattern_stmt
  2430. = gimple_build_assign (vect_recog_temp_ssa_var (itype, NULL),
  2431. BIT_XOR_EXPR, irhs1, build_int_cst (itype, 1));
  2432. break;
  2433. case BIT_AND_EXPR:
  2434. /* Try to optimize x = y & (a < b ? 1 : 0); into
  2435. x = (a < b ? y : 0);
  2436. E.g. for:
  2437. bool a_b, b_b, c_b;
  2438. TYPE d_T;
  2439. S1 a_b = x1 CMP1 y1;
  2440. S2 b_b = x2 CMP2 y2;
  2441. S3 c_b = a_b & b_b;
  2442. S4 d_T = (TYPE) c_b;
  2443. we would normally emit:
  2444. S1' a_T = x1 CMP1 y1 ? 1 : 0;
  2445. S2' b_T = x2 CMP2 y2 ? 1 : 0;
  2446. S3' c_T = a_T & b_T;
  2447. S4' d_T = c_T;
  2448. but we can save one stmt by using the
  2449. result of one of the COND_EXPRs in the other COND_EXPR and leave
  2450. BIT_AND_EXPR stmt out:
  2451. S1' a_T = x1 CMP1 y1 ? 1 : 0;
  2452. S3' c_T = x2 CMP2 y2 ? a_T : 0;
  2453. S4' f_T = c_T;
  2454. At least when VEC_COND_EXPR is implemented using masks
  2455. cond ? 1 : 0 is as expensive as cond ? var : 0, in both cases it
  2456. computes the comparison masks and ands it, in one case with
  2457. all ones vector, in the other case with a vector register.
  2458. Don't do this for BIT_IOR_EXPR, because cond ? 1 : var; is
  2459. often more expensive. */
  2460. def_stmt = SSA_NAME_DEF_STMT (rhs2);
  2461. def_rhs_code = gimple_assign_rhs_code (def_stmt);
  2462. if (TREE_CODE_CLASS (def_rhs_code) == tcc_comparison)
  2463. {
  2464. tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
  2465. irhs1 = adjust_bool_pattern (rhs1, out_type, NULL_TREE, stmts);
  2466. if (TYPE_PRECISION (TREE_TYPE (irhs1))
  2467. == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (def_rhs1))))
  2468. {
  2469. gimple tstmt;
  2470. stmt_vec_info stmt_def_vinfo = vinfo_for_stmt (def_stmt);
  2471. irhs2 = adjust_bool_pattern (rhs2, out_type, irhs1, stmts);
  2472. tstmt = stmts->pop ();
  2473. gcc_assert (tstmt == def_stmt);
  2474. stmts->quick_push (stmt);
  2475. STMT_VINFO_RELATED_STMT (vinfo_for_stmt (stmt))
  2476. = STMT_VINFO_RELATED_STMT (stmt_def_vinfo);
  2477. gcc_assert (!STMT_VINFO_PATTERN_DEF_SEQ (stmt_def_vinfo));
  2478. STMT_VINFO_RELATED_STMT (stmt_def_vinfo) = NULL;
  2479. return irhs2;
  2480. }
  2481. else
  2482. irhs2 = adjust_bool_pattern (rhs2, out_type, NULL_TREE, stmts);
  2483. goto and_ior_xor;
  2484. }
  2485. def_stmt = SSA_NAME_DEF_STMT (rhs1);
  2486. def_rhs_code = gimple_assign_rhs_code (def_stmt);
  2487. if (TREE_CODE_CLASS (def_rhs_code) == tcc_comparison)
  2488. {
  2489. tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
  2490. irhs2 = adjust_bool_pattern (rhs2, out_type, NULL_TREE, stmts);
  2491. if (TYPE_PRECISION (TREE_TYPE (irhs2))
  2492. == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (def_rhs1))))
  2493. {
  2494. gimple tstmt;
  2495. stmt_vec_info stmt_def_vinfo = vinfo_for_stmt (def_stmt);
  2496. irhs1 = adjust_bool_pattern (rhs1, out_type, irhs2, stmts);
  2497. tstmt = stmts->pop ();
  2498. gcc_assert (tstmt == def_stmt);
  2499. stmts->quick_push (stmt);
  2500. STMT_VINFO_RELATED_STMT (vinfo_for_stmt (stmt))
  2501. = STMT_VINFO_RELATED_STMT (stmt_def_vinfo);
  2502. gcc_assert (!STMT_VINFO_PATTERN_DEF_SEQ (stmt_def_vinfo));
  2503. STMT_VINFO_RELATED_STMT (stmt_def_vinfo) = NULL;
  2504. return irhs1;
  2505. }
  2506. else
  2507. irhs1 = adjust_bool_pattern (rhs1, out_type, NULL_TREE, stmts);
  2508. goto and_ior_xor;
  2509. }
  2510. /* FALLTHRU */
  2511. case BIT_IOR_EXPR:
  2512. case BIT_XOR_EXPR:
  2513. irhs1 = adjust_bool_pattern (rhs1, out_type, NULL_TREE, stmts);
  2514. irhs2 = adjust_bool_pattern (rhs2, out_type, NULL_TREE, stmts);
  2515. and_ior_xor:
  2516. if (TYPE_PRECISION (TREE_TYPE (irhs1))
  2517. != TYPE_PRECISION (TREE_TYPE (irhs2)))
  2518. {
  2519. int prec1 = TYPE_PRECISION (TREE_TYPE (irhs1));
  2520. int prec2 = TYPE_PRECISION (TREE_TYPE (irhs2));
  2521. int out_prec = TYPE_PRECISION (out_type);
  2522. if (absu_hwi (out_prec - prec1) < absu_hwi (out_prec - prec2))
  2523. irhs2 = adjust_bool_pattern_cast (TREE_TYPE (irhs1), rhs2);
  2524. else if (absu_hwi (out_prec - prec1) > absu_hwi (out_prec - prec2))
  2525. irhs1 = adjust_bool_pattern_cast (TREE_TYPE (irhs2), rhs1);
  2526. else
  2527. {
  2528. irhs1 = adjust_bool_pattern_cast (out_type, rhs1);
  2529. irhs2 = adjust_bool_pattern_cast (out_type, rhs2);
  2530. }
  2531. }
  2532. itype = TREE_TYPE (irhs1);
  2533. pattern_stmt
  2534. = gimple_build_assign (vect_recog_temp_ssa_var (itype, NULL),
  2535. rhs_code, irhs1, irhs2);
  2536. break;
  2537. default:
  2538. gcc_assert (TREE_CODE_CLASS (rhs_code) == tcc_comparison);
  2539. if (TREE_CODE (TREE_TYPE (rhs1)) != INTEGER_TYPE
  2540. || !TYPE_UNSIGNED (TREE_TYPE (rhs1))
  2541. || (TYPE_PRECISION (TREE_TYPE (rhs1))
  2542. != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1)))))
  2543. {
  2544. machine_mode mode = TYPE_MODE (TREE_TYPE (rhs1));
  2545. itype
  2546. = build_nonstandard_integer_type (GET_MODE_BITSIZE (mode), 1);
  2547. }
  2548. else
  2549. itype = TREE_TYPE (rhs1);
  2550. cond_expr = build2_loc (loc, rhs_code, itype, rhs1, rhs2);
  2551. if (trueval == NULL_TREE)
  2552. trueval = build_int_cst (itype, 1);
  2553. else
  2554. gcc_checking_assert (useless_type_conversion_p (itype,
  2555. TREE_TYPE (trueval)));
  2556. pattern_stmt
  2557. = gimple_build_assign (vect_recog_temp_ssa_var (itype, NULL),
  2558. COND_EXPR, cond_expr, trueval,
  2559. build_int_cst (itype, 0));
  2560. break;
  2561. }
  2562. stmts->safe_push (stmt);
  2563. gimple_set_location (pattern_stmt, loc);
  2564. STMT_VINFO_RELATED_STMT (vinfo_for_stmt (stmt)) = pattern_stmt;
  2565. return gimple_assign_lhs (pattern_stmt);
  2566. }
  2567. /* Function vect_recog_bool_pattern
  2568. Try to find pattern like following:
  2569. bool a_b, b_b, c_b, d_b, e_b;
  2570. TYPE f_T;
  2571. loop:
  2572. S1 a_b = x1 CMP1 y1;
  2573. S2 b_b = x2 CMP2 y2;
  2574. S3 c_b = a_b & b_b;
  2575. S4 d_b = x3 CMP3 y3;
  2576. S5 e_b = c_b | d_b;
  2577. S6 f_T = (TYPE) e_b;
  2578. where type 'TYPE' is an integral type. Or a similar pattern
  2579. ending in
  2580. S6 f_Y = e_b ? r_Y : s_Y;
  2581. as results from if-conversion of a complex condition.
  2582. Input:
  2583. * LAST_STMT: A stmt at the end from which the pattern
  2584. search begins, i.e. cast of a bool to
  2585. an integer type.
  2586. Output:
  2587. * TYPE_IN: The type of the input arguments to the pattern.
  2588. * TYPE_OUT: The type of the output of this pattern.
  2589. * Return value: A new stmt that will be used to replace the pattern.
  2590. Assuming size of TYPE is the same as size of all comparisons
  2591. (otherwise some casts would be added where needed), the above
  2592. sequence we create related pattern stmts:
  2593. S1' a_T = x1 CMP1 y1 ? 1 : 0;
  2594. S3' c_T = x2 CMP2 y2 ? a_T : 0;
  2595. S4' d_T = x3 CMP3 y3 ? 1 : 0;
  2596. S5' e_T = c_T | d_T;
  2597. S6' f_T = e_T;
  2598. Instead of the above S3' we could emit:
  2599. S2' b_T = x2 CMP2 y2 ? 1 : 0;
  2600. S3' c_T = a_T | b_T;
  2601. but the above is more efficient. */
  2602. static gimple
  2603. vect_recog_bool_pattern (vec<gimple> *stmts, tree *type_in,
  2604. tree *type_out)
  2605. {
  2606. gimple last_stmt = stmts->pop ();
  2607. enum tree_code rhs_code;
  2608. tree var, lhs, rhs, vectype;
  2609. stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
  2610. loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
  2611. bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
  2612. gimple pattern_stmt;
  2613. if (!is_gimple_assign (last_stmt))
  2614. return NULL;
  2615. var = gimple_assign_rhs1 (last_stmt);
  2616. lhs = gimple_assign_lhs (last_stmt);
  2617. if ((TYPE_PRECISION (TREE_TYPE (var)) != 1
  2618. || !TYPE_UNSIGNED (TREE_TYPE (var)))
  2619. && TREE_CODE (TREE_TYPE (var)) != BOOLEAN_TYPE)
  2620. return NULL;
  2621. rhs_code = gimple_assign_rhs_code (last_stmt);
  2622. if (CONVERT_EXPR_CODE_P (rhs_code))
  2623. {
  2624. if (TREE_CODE (TREE_TYPE (lhs)) != INTEGER_TYPE
  2625. || TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
  2626. return NULL;
  2627. vectype = get_vectype_for_scalar_type (TREE_TYPE (lhs));
  2628. if (vectype == NULL_TREE)
  2629. return NULL;
  2630. if (!check_bool_pattern (var, loop_vinfo, bb_vinfo))
  2631. return NULL;
  2632. rhs = adjust_bool_pattern (var, TREE_TYPE (lhs), NULL_TREE, stmts);
  2633. lhs = vect_recog_temp_ssa_var (TREE_TYPE (lhs), NULL);
  2634. if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
  2635. pattern_stmt = gimple_build_assign (lhs, SSA_NAME, rhs);
  2636. else
  2637. pattern_stmt
  2638. = gimple_build_assign (lhs, NOP_EXPR, rhs);
  2639. *type_out = vectype;
  2640. *type_in = vectype;
  2641. stmts->safe_push (last_stmt);
  2642. if (dump_enabled_p ())
  2643. dump_printf_loc (MSG_NOTE, vect_location,
  2644. "vect_recog_bool_pattern: detected:\n");
  2645. return pattern_stmt;
  2646. }
  2647. else if (rhs_code == COND_EXPR
  2648. && TREE_CODE (var) == SSA_NAME)
  2649. {
  2650. vectype = get_vectype_for_scalar_type (TREE_TYPE (lhs));
  2651. if (vectype == NULL_TREE)
  2652. return NULL;
  2653. /* Build a scalar type for the boolean result that when
  2654. vectorized matches the vector type of the result in
  2655. size and number of elements. */
  2656. unsigned prec
  2657. = wi::udiv_trunc (TYPE_SIZE (vectype),
  2658. TYPE_VECTOR_SUBPARTS (vectype)).to_uhwi ();
  2659. tree type
  2660. = build_nonstandard_integer_type (prec,
  2661. TYPE_UNSIGNED (TREE_TYPE (var)));
  2662. if (get_vectype_for_scalar_type (type) == NULL_TREE)
  2663. return NULL;
  2664. if (!check_bool_pattern (var, loop_vinfo, bb_vinfo))
  2665. return NULL;
  2666. rhs = adjust_bool_pattern (var, type, NULL_TREE, stmts);
  2667. lhs = vect_recog_temp_ssa_var (TREE_TYPE (lhs), NULL);
  2668. pattern_stmt
  2669. = gimple_build_assign (lhs, COND_EXPR,
  2670. build2 (NE_EXPR, boolean_type_node,
  2671. rhs, build_int_cst (type, 0)),
  2672. gimple_assign_rhs2 (last_stmt),
  2673. gimple_assign_rhs3 (last_stmt));
  2674. *type_out = vectype;
  2675. *type_in = vectype;
  2676. stmts->safe_push (last_stmt);
  2677. if (dump_enabled_p ())
  2678. dump_printf_loc (MSG_NOTE, vect_location,
  2679. "vect_recog_bool_pattern: detected:\n");
  2680. return pattern_stmt;
  2681. }
  2682. else if (rhs_code == SSA_NAME
  2683. && STMT_VINFO_DATA_REF (stmt_vinfo))
  2684. {
  2685. stmt_vec_info pattern_stmt_info;
  2686. vectype = STMT_VINFO_VECTYPE (stmt_vinfo);
  2687. gcc_assert (vectype != NULL_TREE);
  2688. if (!VECTOR_MODE_P (TYPE_MODE (vectype)))
  2689. return NULL;
  2690. if (!check_bool_pattern (var, loop_vinfo, bb_vinfo))
  2691. return NULL;
  2692. rhs = adjust_bool_pattern (var, TREE_TYPE (vectype), NULL_TREE, stmts);
  2693. lhs = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (vectype), lhs);
  2694. if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
  2695. {
  2696. tree rhs2 = vect_recog_temp_ssa_var (TREE_TYPE (lhs), NULL);
  2697. gimple cast_stmt = gimple_build_assign (rhs2, NOP_EXPR, rhs);
  2698. new_pattern_def_seq (stmt_vinfo, cast_stmt);
  2699. rhs = rhs2;
  2700. }
  2701. pattern_stmt = gimple_build_assign (lhs, SSA_NAME, rhs);
  2702. pattern_stmt_info = new_stmt_vec_info (pattern_stmt, loop_vinfo,
  2703. bb_vinfo);
  2704. set_vinfo_for_stmt (pattern_stmt, pattern_stmt_info);
  2705. STMT_VINFO_DATA_REF (pattern_stmt_info)
  2706. = STMT_VINFO_DATA_REF (stmt_vinfo);
  2707. STMT_VINFO_DR_BASE_ADDRESS (pattern_stmt_info)
  2708. = STMT_VINFO_DR_BASE_ADDRESS (stmt_vinfo);
  2709. STMT_VINFO_DR_INIT (pattern_stmt_info) = STMT_VINFO_DR_INIT (stmt_vinfo);
  2710. STMT_VINFO_DR_OFFSET (pattern_stmt_info)
  2711. = STMT_VINFO_DR_OFFSET (stmt_vinfo);
  2712. STMT_VINFO_DR_STEP (pattern_stmt_info) = STMT_VINFO_DR_STEP (stmt_vinfo);
  2713. STMT_VINFO_DR_ALIGNED_TO (pattern_stmt_info)
  2714. = STMT_VINFO_DR_ALIGNED_TO (stmt_vinfo);
  2715. DR_STMT (STMT_VINFO_DATA_REF (stmt_vinfo)) = pattern_stmt;
  2716. *type_out = vectype;
  2717. *type_in = vectype;
  2718. stmts->safe_push (last_stmt);
  2719. if (dump_enabled_p ())
  2720. dump_printf_loc (MSG_NOTE, vect_location,
  2721. "vect_recog_bool_pattern: detected:\n");
  2722. return pattern_stmt;
  2723. }
  2724. else
  2725. return NULL;
  2726. }
  2727. /* Mark statements that are involved in a pattern. */
  2728. static inline void
  2729. vect_mark_pattern_stmts (gimple orig_stmt, gimple pattern_stmt,
  2730. tree pattern_vectype)
  2731. {
  2732. stmt_vec_info pattern_stmt_info, def_stmt_info;
  2733. stmt_vec_info orig_stmt_info = vinfo_for_stmt (orig_stmt);
  2734. loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (orig_stmt_info);
  2735. bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (orig_stmt_info);
  2736. gimple def_stmt;
  2737. pattern_stmt_info = vinfo_for_stmt (pattern_stmt);
  2738. if (pattern_stmt_info == NULL)
  2739. {
  2740. pattern_stmt_info = new_stmt_vec_info (pattern_stmt, loop_vinfo,
  2741. bb_vinfo);
  2742. set_vinfo_for_stmt (pattern_stmt, pattern_stmt_info);
  2743. }
  2744. gimple_set_bb (pattern_stmt, gimple_bb (orig_stmt));
  2745. STMT_VINFO_RELATED_STMT (pattern_stmt_info) = orig_stmt;
  2746. STMT_VINFO_DEF_TYPE (pattern_stmt_info)
  2747. = STMT_VINFO_DEF_TYPE (orig_stmt_info);
  2748. STMT_VINFO_VECTYPE (pattern_stmt_info) = pattern_vectype;
  2749. STMT_VINFO_IN_PATTERN_P (orig_stmt_info) = true;
  2750. STMT_VINFO_RELATED_STMT (orig_stmt_info) = pattern_stmt;
  2751. STMT_VINFO_PATTERN_DEF_SEQ (pattern_stmt_info)
  2752. = STMT_VINFO_PATTERN_DEF_SEQ (orig_stmt_info);
  2753. if (STMT_VINFO_PATTERN_DEF_SEQ (pattern_stmt_info))
  2754. {
  2755. gimple_stmt_iterator si;
  2756. for (si = gsi_start (STMT_VINFO_PATTERN_DEF_SEQ (pattern_stmt_info));
  2757. !gsi_end_p (si); gsi_next (&si))
  2758. {
  2759. def_stmt = gsi_stmt (si);
  2760. def_stmt_info = vinfo_for_stmt (def_stmt);
  2761. if (def_stmt_info == NULL)
  2762. {
  2763. def_stmt_info = new_stmt_vec_info (def_stmt, loop_vinfo,
  2764. bb_vinfo);
  2765. set_vinfo_for_stmt (def_stmt, def_stmt_info);
  2766. }
  2767. gimple_set_bb (def_stmt, gimple_bb (orig_stmt));
  2768. STMT_VINFO_RELATED_STMT (def_stmt_info) = orig_stmt;
  2769. STMT_VINFO_DEF_TYPE (def_stmt_info) = vect_internal_def;
  2770. if (STMT_VINFO_VECTYPE (def_stmt_info) == NULL_TREE)
  2771. STMT_VINFO_VECTYPE (def_stmt_info) = pattern_vectype;
  2772. }
  2773. }
  2774. }
  2775. /* Function vect_pattern_recog_1
  2776. Input:
  2777. PATTERN_RECOG_FUNC: A pointer to a function that detects a certain
  2778. computation pattern.
  2779. STMT: A stmt from which the pattern search should start.
  2780. If PATTERN_RECOG_FUNC successfully detected the pattern, it creates an
  2781. expression that computes the same functionality and can be used to
  2782. replace the sequence of stmts that are involved in the pattern.
  2783. Output:
  2784. This function checks if the expression returned by PATTERN_RECOG_FUNC is
  2785. supported in vector form by the target. We use 'TYPE_IN' to obtain the
  2786. relevant vector type. If 'TYPE_IN' is already a vector type, then this
  2787. indicates that target support had already been checked by PATTERN_RECOG_FUNC.
  2788. If 'TYPE_OUT' is also returned by PATTERN_RECOG_FUNC, we check that it fits
  2789. to the available target pattern.
  2790. This function also does some bookkeeping, as explained in the documentation
  2791. for vect_recog_pattern. */
  2792. static void
  2793. vect_pattern_recog_1 (vect_recog_func_ptr vect_recog_func,
  2794. gimple_stmt_iterator si,
  2795. vec<gimple> *stmts_to_replace)
  2796. {
  2797. gimple stmt = gsi_stmt (si), pattern_stmt;
  2798. stmt_vec_info stmt_info;
  2799. loop_vec_info loop_vinfo;
  2800. tree pattern_vectype;
  2801. tree type_in, type_out;
  2802. enum tree_code code;
  2803. int i;
  2804. gimple next;
  2805. stmts_to_replace->truncate (0);
  2806. stmts_to_replace->quick_push (stmt);
  2807. pattern_stmt = (* vect_recog_func) (stmts_to_replace, &type_in, &type_out);
  2808. if (!pattern_stmt)
  2809. return;
  2810. stmt = stmts_to_replace->last ();
  2811. stmt_info = vinfo_for_stmt (stmt);
  2812. loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
  2813. if (VECTOR_MODE_P (TYPE_MODE (type_in)))
  2814. {
  2815. /* No need to check target support (already checked by the pattern
  2816. recognition function). */
  2817. pattern_vectype = type_out ? type_out : type_in;
  2818. }
  2819. else
  2820. {
  2821. machine_mode vec_mode;
  2822. enum insn_code icode;
  2823. optab optab;
  2824. /* Check target support */
  2825. type_in = get_vectype_for_scalar_type (type_in);
  2826. if (!type_in)
  2827. return;
  2828. if (type_out)
  2829. type_out = get_vectype_for_scalar_type (type_out);
  2830. else
  2831. type_out = type_in;
  2832. if (!type_out)
  2833. return;
  2834. pattern_vectype = type_out;
  2835. if (is_gimple_assign (pattern_stmt))
  2836. code = gimple_assign_rhs_code (pattern_stmt);
  2837. else
  2838. {
  2839. gcc_assert (is_gimple_call (pattern_stmt));
  2840. code = CALL_EXPR;
  2841. }
  2842. optab = optab_for_tree_code (code, type_in, optab_default);
  2843. vec_mode = TYPE_MODE (type_in);
  2844. if (!optab
  2845. || (icode = optab_handler (optab, vec_mode)) == CODE_FOR_nothing
  2846. || (insn_data[icode].operand[0].mode != TYPE_MODE (type_out)))
  2847. return;
  2848. }
  2849. /* Found a vectorizable pattern. */
  2850. if (dump_enabled_p ())
  2851. {
  2852. dump_printf_loc (MSG_NOTE, vect_location,
  2853. "pattern recognized: ");
  2854. dump_gimple_stmt (MSG_NOTE, TDF_SLIM, pattern_stmt, 0);
  2855. }
  2856. /* Mark the stmts that are involved in the pattern. */
  2857. vect_mark_pattern_stmts (stmt, pattern_stmt, pattern_vectype);
  2858. /* Patterns cannot be vectorized using SLP, because they change the order of
  2859. computation. */
  2860. if (loop_vinfo)
  2861. FOR_EACH_VEC_ELT (LOOP_VINFO_REDUCTIONS (loop_vinfo), i, next)
  2862. if (next == stmt)
  2863. LOOP_VINFO_REDUCTIONS (loop_vinfo).ordered_remove (i);
  2864. /* It is possible that additional pattern stmts are created and inserted in
  2865. STMTS_TO_REPLACE. We create a stmt_info for each of them, and mark the
  2866. relevant statements. */
  2867. for (i = 0; stmts_to_replace->iterate (i, &stmt)
  2868. && (unsigned) i < (stmts_to_replace->length () - 1);
  2869. i++)
  2870. {
  2871. stmt_info = vinfo_for_stmt (stmt);
  2872. pattern_stmt = STMT_VINFO_RELATED_STMT (stmt_info);
  2873. if (dump_enabled_p ())
  2874. {
  2875. dump_printf_loc (MSG_NOTE, vect_location,
  2876. "additional pattern stmt: ");
  2877. dump_gimple_stmt (MSG_NOTE, TDF_SLIM, pattern_stmt, 0);
  2878. }
  2879. vect_mark_pattern_stmts (stmt, pattern_stmt, NULL_TREE);
  2880. }
  2881. }
  2882. /* Function vect_pattern_recog
  2883. Input:
  2884. LOOP_VINFO - a struct_loop_info of a loop in which we want to look for
  2885. computation idioms.
  2886. Output - for each computation idiom that is detected we create a new stmt
  2887. that provides the same functionality and that can be vectorized. We
  2888. also record some information in the struct_stmt_info of the relevant
  2889. stmts, as explained below:
  2890. At the entry to this function we have the following stmts, with the
  2891. following initial value in the STMT_VINFO fields:
  2892. stmt in_pattern_p related_stmt vec_stmt
  2893. S1: a_i = .... - - -
  2894. S2: a_2 = ..use(a_i).. - - -
  2895. S3: a_1 = ..use(a_2).. - - -
  2896. S4: a_0 = ..use(a_1).. - - -
  2897. S5: ... = ..use(a_0).. - - -
  2898. Say the sequence {S1,S2,S3,S4} was detected as a pattern that can be
  2899. represented by a single stmt. We then:
  2900. - create a new stmt S6 equivalent to the pattern (the stmt is not
  2901. inserted into the code)
  2902. - fill in the STMT_VINFO fields as follows:
  2903. in_pattern_p related_stmt vec_stmt
  2904. S1: a_i = .... - - -
  2905. S2: a_2 = ..use(a_i).. - - -
  2906. S3: a_1 = ..use(a_2).. - - -
  2907. S4: a_0 = ..use(a_1).. true S6 -
  2908. '---> S6: a_new = .... - S4 -
  2909. S5: ... = ..use(a_0).. - - -
  2910. (the last stmt in the pattern (S4) and the new pattern stmt (S6) point
  2911. to each other through the RELATED_STMT field).
  2912. S6 will be marked as relevant in vect_mark_stmts_to_be_vectorized instead
  2913. of S4 because it will replace all its uses. Stmts {S1,S2,S3} will
  2914. remain irrelevant unless used by stmts other than S4.
  2915. If vectorization succeeds, vect_transform_stmt will skip over {S1,S2,S3}
  2916. (because they are marked as irrelevant). It will vectorize S6, and record
  2917. a pointer to the new vector stmt VS6 from S6 (as usual).
  2918. S4 will be skipped, and S5 will be vectorized as usual:
  2919. in_pattern_p related_stmt vec_stmt
  2920. S1: a_i = .... - - -
  2921. S2: a_2 = ..use(a_i).. - - -
  2922. S3: a_1 = ..use(a_2).. - - -
  2923. > VS6: va_new = .... - - -
  2924. S4: a_0 = ..use(a_1).. true S6 VS6
  2925. '---> S6: a_new = .... - S4 VS6
  2926. > VS5: ... = ..vuse(va_new).. - - -
  2927. S5: ... = ..use(a_0).. - - -
  2928. DCE could then get rid of {S1,S2,S3,S4,S5} (if their defs are not used
  2929. elsewhere), and we'll end up with:
  2930. VS6: va_new = ....
  2931. VS5: ... = ..vuse(va_new)..
  2932. In case of more than one pattern statements, e.g., widen-mult with
  2933. intermediate type:
  2934. S1 a_t = ;
  2935. S2 a_T = (TYPE) a_t;
  2936. '--> S3: a_it = (interm_type) a_t;
  2937. S4 prod_T = a_T * CONST;
  2938. '--> S5: prod_T' = a_it w* CONST;
  2939. there may be other users of a_T outside the pattern. In that case S2 will
  2940. be marked as relevant (as well as S3), and both S2 and S3 will be analyzed
  2941. and vectorized. The vector stmt VS2 will be recorded in S2, and VS3 will
  2942. be recorded in S3. */
  2943. void
  2944. vect_pattern_recog (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
  2945. {
  2946. struct loop *loop;
  2947. basic_block *bbs;
  2948. unsigned int nbbs;
  2949. gimple_stmt_iterator si;
  2950. unsigned int i, j;
  2951. vect_recog_func_ptr vect_recog_func;
  2952. auto_vec<gimple, 1> stmts_to_replace;
  2953. gimple stmt;
  2954. if (dump_enabled_p ())
  2955. dump_printf_loc (MSG_NOTE, vect_location,
  2956. "=== vect_pattern_recog ===\n");
  2957. if (loop_vinfo)
  2958. {
  2959. loop = LOOP_VINFO_LOOP (loop_vinfo);
  2960. bbs = LOOP_VINFO_BBS (loop_vinfo);
  2961. nbbs = loop->num_nodes;
  2962. }
  2963. else
  2964. {
  2965. bbs = &BB_VINFO_BB (bb_vinfo);
  2966. nbbs = 1;
  2967. }
  2968. /* Scan through the loop stmts, applying the pattern recognition
  2969. functions starting at each stmt visited: */
  2970. for (i = 0; i < nbbs; i++)
  2971. {
  2972. basic_block bb = bbs[i];
  2973. for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
  2974. {
  2975. if (bb_vinfo && (stmt = gsi_stmt (si))
  2976. && vinfo_for_stmt (stmt)
  2977. && !STMT_VINFO_VECTORIZABLE (vinfo_for_stmt (stmt)))
  2978. continue;
  2979. /* Scan over all generic vect_recog_xxx_pattern functions. */
  2980. for (j = 0; j < NUM_PATTERNS; j++)
  2981. {
  2982. vect_recog_func = vect_vect_recog_func_ptrs[j];
  2983. vect_pattern_recog_1 (vect_recog_func, si,
  2984. &stmts_to_replace);
  2985. }
  2986. }
  2987. }
  2988. }