vm-engine.c 121 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901
  1. /* Copyright (C) 2001, 2009, 2010, 2011, 2012, 2013,
  2. * 2014, 2015 Free Software Foundation, Inc.
  3. *
  4. * This library is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public License
  6. * as published by the Free Software Foundation; either version 3 of
  7. * the License, or (at your option) any later version.
  8. *
  9. * This library is distributed in the hope that it will be useful, but
  10. * WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with this library; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
  17. * 02110-1301 USA
  18. */
  19. /* This file is included in vm.c multiple times. */
  20. #define UNPACK_8_8_8(op,a,b,c) \
  21. do \
  22. { \
  23. a = (op >> 8) & 0xff; \
  24. b = (op >> 16) & 0xff; \
  25. c = op >> 24; \
  26. } \
  27. while (0)
  28. #define UNPACK_8_16(op,a,b) \
  29. do \
  30. { \
  31. a = (op >> 8) & 0xff; \
  32. b = op >> 16; \
  33. } \
  34. while (0)
  35. #define UNPACK_16_8(op,a,b) \
  36. do \
  37. { \
  38. a = (op >> 8) & 0xffff; \
  39. b = op >> 24; \
  40. } \
  41. while (0)
  42. #define UNPACK_12_12(op,a,b) \
  43. do \
  44. { \
  45. a = (op >> 8) & 0xfff; \
  46. b = op >> 20; \
  47. } \
  48. while (0)
  49. #define UNPACK_24(op,a) \
  50. do \
  51. { \
  52. a = op >> 8; \
  53. } \
  54. while (0)
  55. /* Assign some registers by hand. There used to be a bigger list here,
  56. but it was never tested, and in the case of x86-32, was a source of
  57. compilation failures. It can be revived if it's useful, but my naive
  58. hope is that simply annotating the locals with "register" will be a
  59. sufficient hint to the compiler. */
  60. #ifdef __GNUC__
  61. # if defined __x86_64__
  62. /* GCC 4.6 chooses %rbp for IP_REG and %rbx for SP_REG, which works
  63. well. Tell it to keep the jump table in a r12, which is
  64. callee-saved. */
  65. # define JT_REG asm ("r12")
  66. # endif
  67. #endif
  68. #ifndef IP_REG
  69. # define IP_REG
  70. #endif
  71. #ifndef FP_REG
  72. # define FP_REG
  73. #endif
  74. #ifndef JT_REG
  75. # define JT_REG
  76. #endif
  77. #define VM_ASSERT(condition, handler) \
  78. do { \
  79. if (SCM_UNLIKELY (!(condition))) \
  80. { \
  81. SYNC_IP(); \
  82. handler; \
  83. } \
  84. } while (0)
  85. #ifdef VM_ENABLE_ASSERTIONS
  86. # define ASSERT(condition) VM_ASSERT (condition, abort())
  87. #else
  88. # define ASSERT(condition)
  89. #endif
  90. #if VM_USE_HOOKS
  91. #define RUN_HOOK(exp) \
  92. do { \
  93. if (SCM_UNLIKELY (vp->trace_level > 0)) \
  94. { \
  95. SYNC_IP (); \
  96. exp; \
  97. CACHE_SP (); \
  98. } \
  99. } while (0)
  100. #else
  101. #define RUN_HOOK(exp)
  102. #endif
  103. #define RUN_HOOK0(h) RUN_HOOK (vm_dispatch_##h##_hook (vp))
  104. #define RUN_HOOK1(h, arg) RUN_HOOK (vm_dispatch_##h##_hook (vp, arg))
  105. #define APPLY_HOOK() \
  106. RUN_HOOK0 (apply)
  107. #define PUSH_CONTINUATION_HOOK() \
  108. RUN_HOOK0 (push_continuation)
  109. #define POP_CONTINUATION_HOOK(old_fp) \
  110. RUN_HOOK1 (pop_continuation, old_fp)
  111. #define NEXT_HOOK() \
  112. RUN_HOOK0 (next)
  113. #define ABORT_CONTINUATION_HOOK() \
  114. RUN_HOOK0 (abort)
  115. #define VM_HANDLE_INTERRUPTS \
  116. SCM_ASYNC_TICK_WITH_GUARD_CODE (thread, SYNC_IP (), CACHE_SP ())
  117. /* Virtual Machine
  118. The VM has three state bits: the instruction pointer (IP), the frame
  119. pointer (FP), and the stack pointer (SP). We cache the IP in a
  120. machine register, local to the VM, because it is used extensively by
  121. the VM. We do the same for SP. The FP is used more by code outside
  122. the VM than by the VM itself, we don't bother caching it locally.
  123. Keeping vp->ip in sync with the local IP would be a big lose, as it
  124. is updated so often. Instead of updating vp->ip all the time, we
  125. call SYNC_IP whenever we would need to know the IP of the top frame.
  126. In practice, we need to SYNC_IP whenever we call out of the VM to a
  127. function that would like to walk the stack, perhaps as the result of
  128. an exception. On the other hand, we do always keep vp->sp in sync
  129. with the local SP.
  130. One more thing. We allow the stack to move, when it expands.
  131. Therefore if you call out to a C procedure that could call Scheme
  132. code, or otherwise push anything on the stack, you will need to
  133. CACHE_SP afterwards to restore the possibly-changed stack pointer. */
  134. #define SYNC_IP() vp->ip = (ip)
  135. #define CACHE_SP() sp = vp->sp
  136. #define CACHE_REGISTER() \
  137. do { \
  138. ip = vp->ip; \
  139. CACHE_SP (); \
  140. } while (0)
  141. /* Reserve stack space for a frame. Will check that there is sufficient
  142. stack space for N locals, including the procedure. Invoke after
  143. preparing the new frame and setting the fp and ip.
  144. If there is not enough space for this frame, we try to expand the
  145. stack, possibly relocating it somewhere else in the address space.
  146. Because of the possible relocation, no pointer into the stack besides
  147. FP is valid across an ALLOC_FRAME call. Be careful! */
  148. #define ALLOC_FRAME(n) \
  149. do { \
  150. sp = vp->fp - (n); \
  151. if (sp < vp->sp_min_since_gc) \
  152. { \
  153. if (SCM_UNLIKELY (sp < vp->stack_limit)) \
  154. { \
  155. SYNC_IP (); \
  156. vm_expand_stack (vp, sp); \
  157. CACHE_SP (); \
  158. } \
  159. else \
  160. vp->sp_min_since_gc = vp->sp = sp; \
  161. } \
  162. else \
  163. vp->sp = sp; \
  164. } while (0)
  165. /* Reset the current frame to hold N locals. Used when we know that no
  166. stack expansion is needed. */
  167. #define RESET_FRAME(n) \
  168. do { \
  169. vp->sp = sp = vp->fp - (n); \
  170. if (sp < vp->sp_min_since_gc) \
  171. vp->sp_min_since_gc = sp; \
  172. } while (0)
  173. /* Compute the number of locals in the frame. At a call, this is equal
  174. to the number of actual arguments when a function is first called,
  175. plus one for the function. */
  176. #define FRAME_LOCALS_COUNT() (vp->fp - sp)
  177. #define FRAME_LOCALS_COUNT_FROM(slot) (FRAME_LOCALS_COUNT () - slot)
  178. /* Restore registers after returning from a frame. */
  179. #define RESTORE_FRAME() \
  180. do { \
  181. } while (0)
  182. #ifdef HAVE_LABELS_AS_VALUES
  183. # define BEGIN_DISPATCH_SWITCH /* */
  184. # define END_DISPATCH_SWITCH /* */
  185. # define NEXT(n) \
  186. do \
  187. { \
  188. ip += n; \
  189. NEXT_HOOK (); \
  190. op = *ip; \
  191. goto *jump_table[op & 0xff]; \
  192. } \
  193. while (0)
  194. # define VM_DEFINE_OP(opcode, tag, name, meta) \
  195. op_##tag:
  196. #else
  197. # define BEGIN_DISPATCH_SWITCH \
  198. vm_start: \
  199. NEXT_HOOK (); \
  200. op = *ip; \
  201. switch (op & 0xff) \
  202. {
  203. # define END_DISPATCH_SWITCH \
  204. }
  205. # define NEXT(n) \
  206. do \
  207. { \
  208. ip += n; \
  209. goto vm_start; \
  210. } \
  211. while (0)
  212. # define VM_DEFINE_OP(opcode, tag, name, meta) \
  213. op_##tag: \
  214. case opcode:
  215. #endif
  216. #define FP_SLOT(i) SCM_FRAME_SLOT (vp->fp, i)
  217. #define FP_REF(i) SCM_FRAME_LOCAL (vp->fp, i)
  218. #define FP_SET(i,o) SCM_FRAME_LOCAL (vp->fp, i) = o
  219. #define SP_REF(i) (sp[i].as_scm)
  220. #define SP_SET(i,o) (sp[i].as_scm = o)
  221. #define SP_REF_F64(i) (sp[i].as_f64)
  222. #define SP_SET_F64(i,o) (sp[i].as_f64 = o)
  223. #define SP_REF_U64(i) (sp[i].as_u64)
  224. #define SP_SET_U64(i,o) (sp[i].as_u64 = o)
  225. #define SP_REF_S64(i) (sp[i].as_s64)
  226. #define SP_SET_S64(i,o) (sp[i].as_s64 = o)
  227. #define VARIABLE_REF(v) SCM_VARIABLE_REF (v)
  228. #define VARIABLE_SET(v,o) SCM_VARIABLE_SET (v, o)
  229. #define VARIABLE_BOUNDP(v) (!scm_is_eq (VARIABLE_REF (v), SCM_UNDEFINED))
  230. #define RETURN_ONE_VALUE(ret) \
  231. do { \
  232. SCM val = ret; \
  233. union scm_vm_stack_element *old_fp; \
  234. VM_HANDLE_INTERRUPTS; \
  235. ALLOC_FRAME (2); \
  236. old_fp = vp->fp; \
  237. ip = SCM_FRAME_RETURN_ADDRESS (old_fp); \
  238. vp->fp = SCM_FRAME_DYNAMIC_LINK (old_fp); \
  239. /* Clear frame. */ \
  240. old_fp[0].as_scm = SCM_BOOL_F; \
  241. old_fp[1].as_scm = SCM_BOOL_F; \
  242. /* Leave proc. */ \
  243. SP_SET (0, val); \
  244. POP_CONTINUATION_HOOK (old_fp); \
  245. NEXT (0); \
  246. } while (0)
  247. /* While we could generate the list-unrolling code here, it's fine for
  248. now to just tail-call (apply values vals). */
  249. #define RETURN_VALUE_LIST(vals_) \
  250. do { \
  251. SCM vals = vals_; \
  252. VM_HANDLE_INTERRUPTS; \
  253. ALLOC_FRAME (3); \
  254. SP_SET (2, vm_builtin_apply); \
  255. SP_SET (1, vm_builtin_values); \
  256. SP_SET (0, vals); \
  257. ip = (scm_t_uint32 *) vm_builtin_apply_code; \
  258. goto op_tail_apply; \
  259. } while (0)
  260. #define BR_NARGS(rel) \
  261. scm_t_uint32 expected; \
  262. UNPACK_24 (op, expected); \
  263. if (FRAME_LOCALS_COUNT() rel expected) \
  264. { \
  265. scm_t_int32 offset = ip[1]; \
  266. offset >>= 8; /* Sign-extending shift. */ \
  267. NEXT (offset); \
  268. } \
  269. NEXT (2)
  270. #define BR_UNARY(x, exp) \
  271. scm_t_uint32 test; \
  272. SCM x; \
  273. UNPACK_24 (op, test); \
  274. x = SP_REF (test); \
  275. if ((ip[1] & 0x1) ? !(exp) : (exp)) \
  276. { \
  277. scm_t_int32 offset = ip[1]; \
  278. offset >>= 8; /* Sign-extending shift. */ \
  279. if (offset <= 0) \
  280. VM_HANDLE_INTERRUPTS; \
  281. NEXT (offset); \
  282. } \
  283. NEXT (2)
  284. #define BR_BINARY(x, y, exp) \
  285. scm_t_uint32 a, b; \
  286. SCM x, y; \
  287. UNPACK_24 (op, a); \
  288. UNPACK_24 (ip[1], b); \
  289. x = SP_REF (a); \
  290. y = SP_REF (b); \
  291. if ((ip[2] & 0x1) ? !(exp) : (exp)) \
  292. { \
  293. scm_t_int32 offset = ip[2]; \
  294. offset >>= 8; /* Sign-extending shift. */ \
  295. if (offset <= 0) \
  296. VM_HANDLE_INTERRUPTS; \
  297. NEXT (offset); \
  298. } \
  299. NEXT (3)
  300. #define BR_ARITHMETIC(crel,srel) \
  301. { \
  302. scm_t_uint32 a, b; \
  303. SCM x, y; \
  304. UNPACK_24 (op, a); \
  305. UNPACK_24 (ip[1], b); \
  306. x = SP_REF (a); \
  307. y = SP_REF (b); \
  308. if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
  309. { \
  310. scm_t_signed_bits x_bits = SCM_UNPACK (x); \
  311. scm_t_signed_bits y_bits = SCM_UNPACK (y); \
  312. if ((ip[2] & 0x1) ? !(x_bits crel y_bits) : (x_bits crel y_bits)) \
  313. { \
  314. scm_t_int32 offset = ip[2]; \
  315. offset >>= 8; /* Sign-extending shift. */ \
  316. if (offset <= 0) \
  317. VM_HANDLE_INTERRUPTS; \
  318. NEXT (offset); \
  319. } \
  320. NEXT (3); \
  321. } \
  322. else \
  323. { \
  324. SCM res; \
  325. SYNC_IP (); \
  326. res = srel (x, y); \
  327. CACHE_SP (); \
  328. if ((ip[2] & 0x1) ? scm_is_false (res) : scm_is_true (res)) \
  329. { \
  330. scm_t_int32 offset = ip[2]; \
  331. offset >>= 8; /* Sign-extending shift. */ \
  332. if (offset <= 0) \
  333. VM_HANDLE_INTERRUPTS; \
  334. NEXT (offset); \
  335. } \
  336. NEXT (3); \
  337. } \
  338. }
  339. #define BR_U64_ARITHMETIC(crel) \
  340. { \
  341. scm_t_uint32 a, b; \
  342. scm_t_uint64 x, y; \
  343. UNPACK_24 (op, a); \
  344. UNPACK_24 (ip[1], b); \
  345. x = SP_REF_U64 (a); \
  346. y = SP_REF_U64 (b); \
  347. if ((ip[2] & 0x1) ? !(x crel y) : (x crel y)) \
  348. { \
  349. scm_t_int32 offset = ip[2]; \
  350. offset >>= 8; /* Sign-extending shift. */ \
  351. if (offset <= 0) \
  352. VM_HANDLE_INTERRUPTS; \
  353. NEXT (offset); \
  354. } \
  355. NEXT (3); \
  356. }
  357. #define ARGS1(a1) \
  358. scm_t_uint16 dst, src; \
  359. SCM a1; \
  360. UNPACK_12_12 (op, dst, src); \
  361. a1 = SP_REF (src)
  362. #define ARGS2(a1, a2) \
  363. scm_t_uint8 dst, src1, src2; \
  364. SCM a1, a2; \
  365. UNPACK_8_8_8 (op, dst, src1, src2); \
  366. a1 = SP_REF (src1); \
  367. a2 = SP_REF (src2)
  368. #define RETURN(x) \
  369. do { SP_SET (dst, x); NEXT (1); } while (0)
  370. #define RETURN_EXP(exp) \
  371. do { SCM __x; SYNC_IP (); __x = exp; CACHE_SP (); RETURN (__x); } while (0)
  372. /* The maximum/minimum tagged integers. */
  373. #define INUM_MAX \
  374. ((scm_t_signed_bits) SCM_UNPACK (SCM_I_MAKINUM (SCM_MOST_POSITIVE_FIXNUM)))
  375. #define INUM_MIN \
  376. ((scm_t_signed_bits) SCM_UNPACK (SCM_I_MAKINUM (SCM_MOST_NEGATIVE_FIXNUM)))
  377. #define INUM_STEP \
  378. ((scm_t_signed_bits) SCM_UNPACK (SCM_INUM1) \
  379. - (scm_t_signed_bits) SCM_UNPACK (SCM_INUM0))
  380. #define BINARY_INTEGER_OP(CFUNC,SFUNC) \
  381. { \
  382. ARGS2 (x, y); \
  383. if (SCM_I_INUMP (x) && SCM_I_INUMP (y)) \
  384. { \
  385. scm_t_int64 n = SCM_I_INUM (x) CFUNC SCM_I_INUM (y); \
  386. if (SCM_FIXABLE (n)) \
  387. RETURN (SCM_I_MAKINUM (n)); \
  388. } \
  389. RETURN_EXP (SFUNC (x, y)); \
  390. }
  391. #define VM_VALIDATE_PAIR(x, proc) \
  392. VM_ASSERT (scm_is_pair (x), vm_error_not_a_pair (proc, x))
  393. #define VM_VALIDATE_STRUCT(obj, proc) \
  394. VM_ASSERT (SCM_STRUCTP (obj), vm_error_not_a_pair (proc, obj))
  395. #define VM_VALIDATE_BYTEVECTOR(x, proc) \
  396. VM_ASSERT (SCM_BYTEVECTOR_P (x), vm_error_not_a_bytevector (proc, x))
  397. /* Return true (non-zero) if PTR has suitable alignment for TYPE. */
  398. #define ALIGNED_P(ptr, type) \
  399. ((scm_t_uintptr) (ptr) % alignof_type (type) == 0)
  400. static SCM
  401. VM_NAME (scm_i_thread *thread, struct scm_vm *vp,
  402. scm_i_jmp_buf *registers, int resume)
  403. {
  404. /* Instruction pointer: A pointer to the opcode that is currently
  405. running. */
  406. register scm_t_uint32 *ip IP_REG;
  407. /* Stack pointer: A pointer to the hot end of the stack, off of which
  408. we index arguments and local variables. Pushed at function calls,
  409. popped on returns. */
  410. register union scm_vm_stack_element *sp FP_REG;
  411. /* Current opcode: A cache of *ip. */
  412. register scm_t_uint32 op;
  413. #ifdef HAVE_LABELS_AS_VALUES
  414. static const void *jump_table_[256] = {
  415. #define LABEL_ADDR(opcode, tag, name, meta) &&op_##tag,
  416. FOR_EACH_VM_OPERATION(LABEL_ADDR)
  417. #undef LABEL_ADDR
  418. };
  419. register const void **jump_table JT_REG;
  420. /* Attempt to keep JUMP_TABLE_POINTER in a register. This saves one
  421. load instruction at each instruction dispatch. */
  422. jump_table = jump_table_;
  423. #endif
  424. /* Load VM registers. */
  425. CACHE_REGISTER ();
  426. VM_HANDLE_INTERRUPTS;
  427. /* Usually a call to the VM happens on application, with the boot
  428. continuation on the next frame. Sometimes it happens after a
  429. non-local exit however; in that case the VM state is all set up,
  430. and we have but to jump to the next opcode. */
  431. if (SCM_UNLIKELY (resume))
  432. NEXT (0);
  433. if (SCM_LIKELY (SCM_PROGRAM_P (FP_REF (0))))
  434. ip = SCM_PROGRAM_CODE (FP_REF (0));
  435. else
  436. ip = (scm_t_uint32 *) vm_apply_non_program_code;
  437. APPLY_HOOK ();
  438. NEXT (0);
  439. BEGIN_DISPATCH_SWITCH;
  440. /*
  441. * Call and return
  442. */
  443. /* halt _:24
  444. *
  445. * Bring the VM to a halt, returning all the values from the stack.
  446. */
  447. VM_DEFINE_OP (0, halt, "halt", OP1 (X32))
  448. {
  449. /* Boot closure in r0, empty frame in r1/r2, proc in r3, values from r4. */
  450. scm_t_uint32 nvals = FRAME_LOCALS_COUNT_FROM (4);
  451. SCM ret;
  452. if (nvals == 1)
  453. ret = FP_REF (4);
  454. else
  455. {
  456. scm_t_uint32 n;
  457. ret = SCM_EOL;
  458. for (n = nvals; n > 0; n--)
  459. ret = scm_inline_cons (thread, FP_REF (4 + n - 1), ret);
  460. ret = scm_values (ret);
  461. }
  462. vp->ip = SCM_FRAME_RETURN_ADDRESS (vp->fp);
  463. vp->sp = SCM_FRAME_PREVIOUS_SP (vp->fp);
  464. vp->fp = SCM_FRAME_DYNAMIC_LINK (vp->fp);
  465. return ret;
  466. }
  467. /* call proc:24 _:8 nlocals:24
  468. *
  469. * Call a procedure. PROC is the local corresponding to a procedure.
  470. * The two values below PROC will be overwritten by the saved call
  471. * frame data. The new frame will have space for NLOCALS locals: one
  472. * for the procedure, and the rest for the arguments which should
  473. * already have been pushed on.
  474. *
  475. * When the call returns, execution proceeds with the next
  476. * instruction. There may be any number of values on the return
  477. * stack; the precise number can be had by subtracting the address of
  478. * PROC from the post-call SP.
  479. */
  480. VM_DEFINE_OP (1, call, "call", OP2 (X8_F24, X8_C24))
  481. {
  482. scm_t_uint32 proc, nlocals;
  483. union scm_vm_stack_element *old_fp;
  484. UNPACK_24 (op, proc);
  485. UNPACK_24 (ip[1], nlocals);
  486. VM_HANDLE_INTERRUPTS;
  487. PUSH_CONTINUATION_HOOK ();
  488. old_fp = vp->fp;
  489. vp->fp = SCM_FRAME_SLOT (old_fp, proc - 1);
  490. SCM_FRAME_SET_DYNAMIC_LINK (vp->fp, old_fp);
  491. SCM_FRAME_SET_RETURN_ADDRESS (vp->fp, ip + 2);
  492. RESET_FRAME (nlocals);
  493. if (SCM_LIKELY (SCM_PROGRAM_P (FP_REF (0))))
  494. ip = SCM_PROGRAM_CODE (FP_REF (0));
  495. else
  496. ip = (scm_t_uint32 *) vm_apply_non_program_code;
  497. APPLY_HOOK ();
  498. NEXT (0);
  499. }
  500. /* call-label proc:24 _:8 nlocals:24 label:32
  501. *
  502. * Call a procedure in the same compilation unit.
  503. *
  504. * This instruction is just like "call", except that instead of
  505. * dereferencing PROC to find the call target, the call target is
  506. * known to be at LABEL, a signed 32-bit offset in 32-bit units from
  507. * the current IP. Since PROC is not dereferenced, it may be some
  508. * other representation of the closure.
  509. */
  510. VM_DEFINE_OP (2, call_label, "call-label", OP3 (X8_F24, X8_C24, L32))
  511. {
  512. scm_t_uint32 proc, nlocals;
  513. scm_t_int32 label;
  514. union scm_vm_stack_element *old_fp;
  515. UNPACK_24 (op, proc);
  516. UNPACK_24 (ip[1], nlocals);
  517. label = ip[2];
  518. VM_HANDLE_INTERRUPTS;
  519. PUSH_CONTINUATION_HOOK ();
  520. old_fp = vp->fp;
  521. vp->fp = SCM_FRAME_SLOT (old_fp, proc - 1);
  522. SCM_FRAME_SET_DYNAMIC_LINK (vp->fp, old_fp);
  523. SCM_FRAME_SET_RETURN_ADDRESS (vp->fp, ip + 3);
  524. RESET_FRAME (nlocals);
  525. ip += label;
  526. APPLY_HOOK ();
  527. NEXT (0);
  528. }
  529. /* tail-call nlocals:24
  530. *
  531. * Tail-call a procedure. Requires that the procedure and all of the
  532. * arguments have already been shuffled into position. Will reset the
  533. * frame to NLOCALS.
  534. */
  535. VM_DEFINE_OP (3, tail_call, "tail-call", OP1 (X8_C24))
  536. {
  537. scm_t_uint32 nlocals;
  538. UNPACK_24 (op, nlocals);
  539. VM_HANDLE_INTERRUPTS;
  540. RESET_FRAME (nlocals);
  541. if (SCM_LIKELY (SCM_PROGRAM_P (FP_REF (0))))
  542. ip = SCM_PROGRAM_CODE (FP_REF (0));
  543. else
  544. ip = (scm_t_uint32 *) vm_apply_non_program_code;
  545. APPLY_HOOK ();
  546. NEXT (0);
  547. }
  548. /* tail-call-label nlocals:24 label:32
  549. *
  550. * Tail-call a known procedure. As call is to call-label, tail-call
  551. * is to tail-call-label.
  552. */
  553. VM_DEFINE_OP (4, tail_call_label, "tail-call-label", OP2 (X8_C24, L32))
  554. {
  555. scm_t_uint32 nlocals;
  556. scm_t_int32 label;
  557. UNPACK_24 (op, nlocals);
  558. label = ip[1];
  559. VM_HANDLE_INTERRUPTS;
  560. RESET_FRAME (nlocals);
  561. ip += label;
  562. APPLY_HOOK ();
  563. NEXT (0);
  564. }
  565. /* tail-call/shuffle from:24
  566. *
  567. * Tail-call a procedure. The procedure should already be set to slot
  568. * 0. The rest of the args are taken from the frame, starting at
  569. * FROM, shuffled down to start at slot 0. This is part of the
  570. * implementation of the call-with-values builtin.
  571. */
  572. VM_DEFINE_OP (5, tail_call_shuffle, "tail-call/shuffle", OP1 (X8_F24))
  573. {
  574. scm_t_uint32 n, from, nlocals;
  575. UNPACK_24 (op, from);
  576. VM_HANDLE_INTERRUPTS;
  577. VM_ASSERT (from > 0, abort ());
  578. nlocals = FRAME_LOCALS_COUNT ();
  579. for (n = 0; from + n < nlocals; n++)
  580. FP_SET (n + 1, FP_REF (from + n));
  581. RESET_FRAME (n + 1);
  582. if (SCM_LIKELY (SCM_PROGRAM_P (FP_REF (0))))
  583. ip = SCM_PROGRAM_CODE (FP_REF (0));
  584. else
  585. ip = (scm_t_uint32 *) vm_apply_non_program_code;
  586. APPLY_HOOK ();
  587. NEXT (0);
  588. }
  589. /* receive dst:12 proc:12 _:8 nlocals:24
  590. *
  591. * Receive a single return value from a call whose procedure was in
  592. * PROC, asserting that the call actually returned at least one
  593. * value. Afterwards, resets the frame to NLOCALS locals.
  594. */
  595. VM_DEFINE_OP (6, receive, "receive", OP2 (X8_F12_F12, X8_C24) | OP_DST)
  596. {
  597. scm_t_uint16 dst, proc;
  598. scm_t_uint32 nlocals;
  599. UNPACK_12_12 (op, dst, proc);
  600. UNPACK_24 (ip[1], nlocals);
  601. VM_ASSERT (FRAME_LOCALS_COUNT () > proc + 1, vm_error_no_values ());
  602. FP_SET (dst, FP_REF (proc + 1));
  603. RESET_FRAME (nlocals);
  604. NEXT (2);
  605. }
  606. /* receive-values proc:24 allow-extra?:1 _:7 nvalues:24
  607. *
  608. * Receive a return of multiple values from a call whose procedure was
  609. * in PROC. If fewer than NVALUES values were returned, signal an
  610. * error. Unless ALLOW-EXTRA? is true, require that the number of
  611. * return values equals NVALUES exactly. After receive-values has
  612. * run, the values can be copied down via `mov'.
  613. */
  614. VM_DEFINE_OP (7, receive_values, "receive-values", OP2 (X8_F24, B1_X7_C24))
  615. {
  616. scm_t_uint32 proc, nvalues;
  617. UNPACK_24 (op, proc);
  618. UNPACK_24 (ip[1], nvalues);
  619. if (ip[1] & 0x1)
  620. VM_ASSERT (FRAME_LOCALS_COUNT () > proc + nvalues,
  621. vm_error_not_enough_values ());
  622. else
  623. VM_ASSERT (FRAME_LOCALS_COUNT () == proc + 1 + nvalues,
  624. vm_error_wrong_number_of_values (nvalues));
  625. NEXT (2);
  626. }
  627. VM_DEFINE_OP (8, unused_8, NULL, NOP)
  628. {
  629. vm_error_bad_instruction (op);
  630. abort (); /* never reached */
  631. }
  632. /* return-values nlocals:24
  633. *
  634. * Return a number of values from a call frame. This opcode
  635. * corresponds to an application of `values' in tail position. As
  636. * with tail calls, we expect that the values have already been
  637. * shuffled down to a contiguous array starting at slot 1.
  638. * If NLOCALS is not zero, we also reset the frame to hold NLOCALS
  639. * values.
  640. */
  641. VM_DEFINE_OP (9, return_values, "return-values", OP1 (X8_C24))
  642. {
  643. union scm_vm_stack_element *old_fp;
  644. scm_t_uint32 nlocals;
  645. VM_HANDLE_INTERRUPTS;
  646. UNPACK_24 (op, nlocals);
  647. if (nlocals)
  648. RESET_FRAME (nlocals);
  649. old_fp = vp->fp;
  650. ip = SCM_FRAME_RETURN_ADDRESS (vp->fp);
  651. vp->fp = SCM_FRAME_DYNAMIC_LINK (vp->fp);
  652. /* Clear stack frame. */
  653. old_fp[0].as_scm = SCM_BOOL_F;
  654. old_fp[1].as_scm = SCM_BOOL_F;
  655. POP_CONTINUATION_HOOK (old_fp);
  656. NEXT (0);
  657. }
  658. /*
  659. * Specialized call stubs
  660. */
  661. /* subr-call _:24
  662. *
  663. * Call a subr, passing all locals in this frame as arguments. Return
  664. * from the calling frame. This instruction is part of the
  665. * trampolines created in gsubr.c, and is not generated by the
  666. * compiler.
  667. */
  668. VM_DEFINE_OP (10, subr_call, "subr-call", OP1 (X32))
  669. {
  670. SCM ret;
  671. SYNC_IP ();
  672. ret = scm_apply_subr (sp, FRAME_LOCALS_COUNT ());
  673. CACHE_SP ();
  674. if (SCM_UNLIKELY (SCM_VALUESP (ret)))
  675. /* multiple values returned to continuation */
  676. RETURN_VALUE_LIST (scm_struct_ref (ret, SCM_INUM0));
  677. else
  678. RETURN_ONE_VALUE (ret);
  679. }
  680. /* foreign-call cif-idx:12 ptr-idx:12
  681. *
  682. * Call a foreign function. Fetch the CIF and foreign pointer from
  683. * CIF-IDX and PTR-IDX, both free variables. Return from the calling
  684. * frame. Arguments are taken from the stack. This instruction is
  685. * part of the trampolines created by the FFI, and is not generated by
  686. * the compiler.
  687. */
  688. VM_DEFINE_OP (11, foreign_call, "foreign-call", OP1 (X8_C12_C12))
  689. {
  690. scm_t_uint16 cif_idx, ptr_idx;
  691. SCM closure, cif, pointer, ret;
  692. UNPACK_12_12 (op, cif_idx, ptr_idx);
  693. closure = FP_REF (0);
  694. cif = SCM_PROGRAM_FREE_VARIABLE_REF (closure, cif_idx);
  695. pointer = SCM_PROGRAM_FREE_VARIABLE_REF (closure, ptr_idx);
  696. SYNC_IP ();
  697. // FIXME: separate args
  698. ret = scm_i_foreign_call (scm_inline_cons (thread, cif, pointer), sp);
  699. CACHE_SP ();
  700. if (SCM_UNLIKELY (SCM_VALUESP (ret)))
  701. /* multiple values returned to continuation */
  702. RETURN_VALUE_LIST (scm_struct_ref (ret, SCM_INUM0));
  703. else
  704. RETURN_ONE_VALUE (ret);
  705. }
  706. /* continuation-call contregs:24
  707. *
  708. * Return to a continuation, nonlocally. The arguments to the
  709. * continuation are taken from the stack. CONTREGS is a free variable
  710. * containing the reified continuation. This instruction is part of
  711. * the implementation of undelimited continuations, and is not
  712. * generated by the compiler.
  713. */
  714. VM_DEFINE_OP (12, continuation_call, "continuation-call", OP1 (X8_C24))
  715. {
  716. SCM contregs;
  717. scm_t_uint32 contregs_idx;
  718. UNPACK_24 (op, contregs_idx);
  719. contregs =
  720. SCM_PROGRAM_FREE_VARIABLE_REF (FP_REF (0), contregs_idx);
  721. SYNC_IP ();
  722. scm_i_check_continuation (contregs);
  723. vm_return_to_continuation (scm_i_contregs_vp (contregs),
  724. scm_i_contregs_vm_cont (contregs),
  725. FRAME_LOCALS_COUNT_FROM (1),
  726. sp);
  727. scm_i_reinstate_continuation (contregs);
  728. /* no NEXT */
  729. abort ();
  730. }
  731. /* compose-continuation cont:24
  732. *
  733. * Compose a partial continuation with the current continuation. The
  734. * arguments to the continuation are taken from the stack. CONT is a
  735. * free variable containing the reified continuation. This
  736. * instruction is part of the implementation of partial continuations,
  737. * and is not generated by the compiler.
  738. */
  739. VM_DEFINE_OP (13, compose_continuation, "compose-continuation", OP1 (X8_C24))
  740. {
  741. SCM vmcont;
  742. scm_t_uint32 cont_idx;
  743. UNPACK_24 (op, cont_idx);
  744. vmcont = SCM_PROGRAM_FREE_VARIABLE_REF (FP_REF (0), cont_idx);
  745. SYNC_IP ();
  746. VM_ASSERT (SCM_VM_CONT_REWINDABLE_P (vmcont),
  747. vm_error_continuation_not_rewindable (vmcont));
  748. vm_reinstate_partial_continuation (vp, vmcont, FRAME_LOCALS_COUNT_FROM (1),
  749. &thread->dynstack, registers);
  750. CACHE_REGISTER ();
  751. NEXT (0);
  752. }
  753. /* tail-apply _:24
  754. *
  755. * Tail-apply the procedure in local slot 0 to the rest of the
  756. * arguments. This instruction is part of the implementation of
  757. * `apply', and is not generated by the compiler.
  758. */
  759. VM_DEFINE_OP (14, tail_apply, "tail-apply", OP1 (X32))
  760. {
  761. int i, list_idx, list_len, nlocals;
  762. SCM list;
  763. VM_HANDLE_INTERRUPTS;
  764. nlocals = FRAME_LOCALS_COUNT ();
  765. // At a minimum, there should be apply, f, and the list.
  766. VM_ASSERT (nlocals >= 3, abort ());
  767. list_idx = nlocals - 1;
  768. list = FP_REF (list_idx);
  769. list_len = scm_ilength (list);
  770. VM_ASSERT (list_len >= 0, vm_error_apply_to_non_list (list));
  771. nlocals = nlocals - 2 + list_len;
  772. ALLOC_FRAME (nlocals);
  773. for (i = 1; i < list_idx; i++)
  774. FP_SET (i - 1, FP_REF (i));
  775. /* Null out these slots, just in case there are less than 2 elements
  776. in the list. */
  777. FP_SET (list_idx - 1, SCM_UNDEFINED);
  778. FP_SET (list_idx, SCM_UNDEFINED);
  779. for (i = 0; i < list_len; i++, list = SCM_CDR (list))
  780. FP_SET (list_idx - 1 + i, SCM_CAR (list));
  781. if (SCM_LIKELY (SCM_PROGRAM_P (FP_REF (0))))
  782. ip = SCM_PROGRAM_CODE (FP_REF (0));
  783. else
  784. ip = (scm_t_uint32 *) vm_apply_non_program_code;
  785. APPLY_HOOK ();
  786. NEXT (0);
  787. }
  788. /* call/cc _:24
  789. *
  790. * Capture the current continuation, and tail-apply the procedure in
  791. * local slot 1 to it. This instruction is part of the implementation
  792. * of `call/cc', and is not generated by the compiler.
  793. */
  794. VM_DEFINE_OP (15, call_cc, "call/cc", OP1 (X32))
  795. {
  796. SCM vm_cont, cont;
  797. scm_t_dynstack *dynstack;
  798. int first;
  799. VM_HANDLE_INTERRUPTS;
  800. SYNC_IP ();
  801. dynstack = scm_dynstack_capture_all (&thread->dynstack);
  802. vm_cont = scm_i_vm_capture_stack (vp->stack_top,
  803. SCM_FRAME_DYNAMIC_LINK (vp->fp),
  804. SCM_FRAME_PREVIOUS_SP (vp->fp),
  805. SCM_FRAME_RETURN_ADDRESS (vp->fp),
  806. dynstack,
  807. 0);
  808. /* FIXME: Seems silly to capture the registers here, when they are
  809. already captured in the registers local, which here we are
  810. copying out to the heap; and likewise, the setjmp(&registers)
  811. code already has the non-local return handler. But oh
  812. well! */
  813. cont = scm_i_make_continuation (&first, vp, vm_cont);
  814. if (first)
  815. {
  816. RESET_FRAME (2);
  817. SP_SET (1, SP_REF (0));
  818. SP_SET (0, cont);
  819. if (SCM_LIKELY (SCM_PROGRAM_P (SP_REF (1))))
  820. ip = SCM_PROGRAM_CODE (SP_REF (1));
  821. else
  822. ip = (scm_t_uint32 *) vm_apply_non_program_code;
  823. APPLY_HOOK ();
  824. NEXT (0);
  825. }
  826. else
  827. {
  828. CACHE_REGISTER ();
  829. ABORT_CONTINUATION_HOOK ();
  830. NEXT (0);
  831. }
  832. }
  833. /* abort _:24
  834. *
  835. * Abort to a prompt handler. The tag is expected in r1, and the rest
  836. * of the values in the frame are returned to the prompt handler.
  837. * This corresponds to a tail application of abort-to-prompt.
  838. */
  839. VM_DEFINE_OP (16, abort, "abort", OP1 (X32))
  840. {
  841. scm_t_uint32 nlocals = FRAME_LOCALS_COUNT ();
  842. ASSERT (nlocals >= 2);
  843. /* FIXME: Really we should capture the caller's registers. Until
  844. then, manually advance the IP so that when the prompt resumes,
  845. it continues with the next instruction. */
  846. ip++;
  847. SYNC_IP ();
  848. vm_abort (vp, FP_REF (1), nlocals - 2, registers);
  849. /* vm_abort should not return */
  850. abort ();
  851. }
  852. /* builtin-ref dst:12 idx:12
  853. *
  854. * Load a builtin stub by index into DST.
  855. */
  856. VM_DEFINE_OP (17, builtin_ref, "builtin-ref", OP1 (X8_S12_C12) | OP_DST)
  857. {
  858. scm_t_uint16 dst, idx;
  859. UNPACK_12_12 (op, dst, idx);
  860. SP_SET (dst, scm_vm_builtin_ref (idx));
  861. NEXT (1);
  862. }
  863. /*
  864. * Function prologues
  865. */
  866. /* br-if-nargs-ne expected:24 _:8 offset:24
  867. * br-if-nargs-lt expected:24 _:8 offset:24
  868. * br-if-nargs-gt expected:24 _:8 offset:24
  869. *
  870. * If the number of actual arguments is not equal, less than, or greater
  871. * than EXPECTED, respectively, add OFFSET, a signed 24-bit number, to
  872. * the current instruction pointer.
  873. */
  874. VM_DEFINE_OP (18, br_if_nargs_ne, "br-if-nargs-ne", OP2 (X8_C24, X8_L24))
  875. {
  876. BR_NARGS (!=);
  877. }
  878. VM_DEFINE_OP (19, br_if_nargs_lt, "br-if-nargs-lt", OP2 (X8_C24, X8_L24))
  879. {
  880. BR_NARGS (<);
  881. }
  882. VM_DEFINE_OP (20, br_if_nargs_gt, "br-if-nargs-gt", OP2 (X8_C24, X8_L24))
  883. {
  884. BR_NARGS (>);
  885. }
  886. /* assert-nargs-ee expected:24
  887. * assert-nargs-ge expected:24
  888. * assert-nargs-le expected:24
  889. *
  890. * If the number of actual arguments is not ==, >=, or <= EXPECTED,
  891. * respectively, signal an error.
  892. */
  893. VM_DEFINE_OP (21, assert_nargs_ee, "assert-nargs-ee", OP1 (X8_C24))
  894. {
  895. scm_t_uint32 expected;
  896. UNPACK_24 (op, expected);
  897. VM_ASSERT (FRAME_LOCALS_COUNT () == expected,
  898. vm_error_wrong_num_args (FP_REF (0)));
  899. NEXT (1);
  900. }
  901. VM_DEFINE_OP (22, assert_nargs_ge, "assert-nargs-ge", OP1 (X8_C24))
  902. {
  903. scm_t_uint32 expected;
  904. UNPACK_24 (op, expected);
  905. VM_ASSERT (FRAME_LOCALS_COUNT () >= expected,
  906. vm_error_wrong_num_args (FP_REF (0)));
  907. NEXT (1);
  908. }
  909. VM_DEFINE_OP (23, assert_nargs_le, "assert-nargs-le", OP1 (X8_C24))
  910. {
  911. scm_t_uint32 expected;
  912. UNPACK_24 (op, expected);
  913. VM_ASSERT (FRAME_LOCALS_COUNT () <= expected,
  914. vm_error_wrong_num_args (FP_REF (0)));
  915. NEXT (1);
  916. }
  917. /* alloc-frame nlocals:24
  918. *
  919. * Ensure that there is space on the stack for NLOCALS local variables,
  920. * setting them all to SCM_UNDEFINED, except those nargs values that
  921. * were passed as arguments and procedure.
  922. */
  923. VM_DEFINE_OP (24, alloc_frame, "alloc-frame", OP1 (X8_C24))
  924. {
  925. scm_t_uint32 nlocals, nargs;
  926. UNPACK_24 (op, nlocals);
  927. nargs = FRAME_LOCALS_COUNT ();
  928. ALLOC_FRAME (nlocals);
  929. while (nlocals-- > nargs)
  930. FP_SET (nlocals, SCM_UNDEFINED);
  931. NEXT (1);
  932. }
  933. /* reset-frame nlocals:24
  934. *
  935. * Like alloc-frame, but doesn't check that the stack is big enough.
  936. * Used to reset the frame size to something less than the size that
  937. * was previously set via alloc-frame.
  938. */
  939. VM_DEFINE_OP (25, reset_frame, "reset-frame", OP1 (X8_C24))
  940. {
  941. scm_t_uint32 nlocals;
  942. UNPACK_24 (op, nlocals);
  943. RESET_FRAME (nlocals);
  944. NEXT (1);
  945. }
  946. /* push src:24
  947. *
  948. * Push SRC onto the stack.
  949. */
  950. VM_DEFINE_OP (26, push, "push", OP1 (X8_S24))
  951. {
  952. scm_t_uint32 src;
  953. SCM val;
  954. UNPACK_24 (op, src);
  955. val = SP_REF (src);
  956. ALLOC_FRAME (FRAME_LOCALS_COUNT () + 1);
  957. SP_SET (0, val);
  958. NEXT (1);
  959. }
  960. /* pop dst:24
  961. *
  962. * Pop the stack, storing to DST.
  963. */
  964. VM_DEFINE_OP (27, pop, "pop", OP1 (X8_S24) | OP_DST)
  965. {
  966. scm_t_uint32 dst;
  967. SCM val;
  968. UNPACK_24 (op, dst);
  969. val = SP_REF (0);
  970. vp->sp = sp = sp + 1;
  971. SP_SET (dst, val);
  972. NEXT (1);
  973. }
  974. /* drop count:24
  975. *
  976. * Drop some number of values from the stack.
  977. */
  978. VM_DEFINE_OP (28, drop, "drop", OP1 (X8_C24))
  979. {
  980. scm_t_uint32 count;
  981. UNPACK_24 (op, count);
  982. vp->sp = sp = sp + count;
  983. NEXT (1);
  984. }
  985. /* assert-nargs-ee/locals expected:12 nlocals:12
  986. *
  987. * Equivalent to a sequence of assert-nargs-ee and reserve-locals. The
  988. * number of locals reserved is EXPECTED + NLOCALS.
  989. */
  990. VM_DEFINE_OP (29, assert_nargs_ee_locals, "assert-nargs-ee/locals", OP1 (X8_C12_C12))
  991. {
  992. scm_t_uint16 expected, nlocals;
  993. UNPACK_12_12 (op, expected, nlocals);
  994. VM_ASSERT (FRAME_LOCALS_COUNT () == expected,
  995. vm_error_wrong_num_args (FP_REF (0)));
  996. ALLOC_FRAME (expected + nlocals);
  997. while (nlocals--)
  998. SP_SET (nlocals, SCM_UNDEFINED);
  999. NEXT (1);
  1000. }
  1001. /* br-if-npos-gt nreq:24 _:8 npos:24 _:8 offset:24
  1002. *
  1003. * Find the first positional argument after NREQ. If it is greater
  1004. * than NPOS, jump to OFFSET.
  1005. *
  1006. * This instruction is only emitted for functions with multiple
  1007. * clauses, and an earlier clause has keywords and no rest arguments.
  1008. * See "Case-lambda" in the manual, for more on how case-lambda
  1009. * chooses the clause to apply.
  1010. */
  1011. VM_DEFINE_OP (30, br_if_npos_gt, "br-if-npos-gt", OP3 (X8_C24, X8_C24, X8_L24))
  1012. {
  1013. scm_t_uint32 nreq, npos;
  1014. UNPACK_24 (op, nreq);
  1015. UNPACK_24 (ip[1], npos);
  1016. /* We can only have too many positionals if there are more
  1017. arguments than NPOS. */
  1018. if (FRAME_LOCALS_COUNT() > npos)
  1019. {
  1020. scm_t_uint32 n;
  1021. for (n = nreq; n < npos; n++)
  1022. if (scm_is_keyword (FP_REF (n)))
  1023. break;
  1024. if (n == npos && !scm_is_keyword (FP_REF (n)))
  1025. {
  1026. scm_t_int32 offset = ip[2];
  1027. offset >>= 8; /* Sign-extending shift. */
  1028. NEXT (offset);
  1029. }
  1030. }
  1031. NEXT (3);
  1032. }
  1033. /* bind-kwargs nreq:24 flags:8 nreq-and-opt:24 _:8 ntotal:24 kw-offset:32
  1034. *
  1035. * flags := allow-other-keys:1 has-rest:1 _:6
  1036. *
  1037. * Find the last positional argument, and shuffle all the rest above
  1038. * NTOTAL. Initialize the intervening locals to SCM_UNDEFINED. Then
  1039. * load the constant at KW-OFFSET words from the current IP, and use it
  1040. * to bind keyword arguments. If HAS-REST, collect all shuffled
  1041. * arguments into a list, and store it in NREQ-AND-OPT. Finally, clear
  1042. * the arguments that we shuffled up.
  1043. *
  1044. * A macro-mega-instruction.
  1045. */
  1046. VM_DEFINE_OP (31, bind_kwargs, "bind-kwargs", OP4 (X8_C24, C8_C24, X8_C24, N32))
  1047. {
  1048. scm_t_uint32 nreq, nreq_and_opt, ntotal, npositional, nkw, n, nargs;
  1049. scm_t_int32 kw_offset;
  1050. scm_t_bits kw_bits;
  1051. SCM kw;
  1052. char allow_other_keys, has_rest;
  1053. UNPACK_24 (op, nreq);
  1054. allow_other_keys = ip[1] & 0x1;
  1055. has_rest = ip[1] & 0x2;
  1056. UNPACK_24 (ip[1], nreq_and_opt);
  1057. UNPACK_24 (ip[2], ntotal);
  1058. kw_offset = ip[3];
  1059. kw_bits = (scm_t_bits) (ip + kw_offset);
  1060. VM_ASSERT (!(kw_bits & 0x7), abort());
  1061. kw = SCM_PACK (kw_bits);
  1062. nargs = FRAME_LOCALS_COUNT ();
  1063. /* look in optionals for first keyword or last positional */
  1064. /* starting after the last required positional arg */
  1065. npositional = nreq;
  1066. while (/* while we have args */
  1067. npositional < nargs
  1068. /* and we still have positionals to fill */
  1069. && npositional < nreq_and_opt
  1070. /* and we haven't reached a keyword yet */
  1071. && !scm_is_keyword (FP_REF (npositional)))
  1072. /* bind this optional arg (by leaving it in place) */
  1073. npositional++;
  1074. nkw = nargs - npositional;
  1075. /* shuffle non-positional arguments above ntotal */
  1076. ALLOC_FRAME (ntotal + nkw);
  1077. n = nkw;
  1078. while (n--)
  1079. FP_SET (ntotal + n, FP_REF (npositional + n));
  1080. /* and fill optionals & keyword args with SCM_UNDEFINED */
  1081. n = npositional;
  1082. while (n < ntotal)
  1083. FP_SET (n++, SCM_UNDEFINED);
  1084. VM_ASSERT (has_rest || (nkw % 2) == 0,
  1085. vm_error_kwargs_length_not_even (FP_REF (0)));
  1086. /* Now bind keywords, in the order given. */
  1087. for (n = 0; n < nkw; n++)
  1088. if (scm_is_keyword (FP_REF (ntotal + n)))
  1089. {
  1090. SCM walk;
  1091. for (walk = kw; scm_is_pair (walk); walk = SCM_CDR (walk))
  1092. if (scm_is_eq (SCM_CAAR (walk), FP_REF (ntotal + n)))
  1093. {
  1094. SCM si = SCM_CDAR (walk);
  1095. FP_SET (SCM_I_INUMP (si) ? SCM_I_INUM (si) : scm_to_uint32 (si),
  1096. FP_REF (ntotal + n + 1));
  1097. break;
  1098. }
  1099. VM_ASSERT (scm_is_pair (walk) || allow_other_keys,
  1100. vm_error_kwargs_unrecognized_keyword (FP_REF (0),
  1101. FP_REF (ntotal + n)));
  1102. n++;
  1103. }
  1104. else
  1105. VM_ASSERT (has_rest, vm_error_kwargs_invalid_keyword (FP_REF (0),
  1106. FP_REF (ntotal + n)));
  1107. if (has_rest)
  1108. {
  1109. SCM rest = SCM_EOL;
  1110. n = nkw;
  1111. while (n--)
  1112. rest = scm_inline_cons (thread, FP_REF (ntotal + n), rest);
  1113. FP_SET (nreq_and_opt, rest);
  1114. }
  1115. RESET_FRAME (ntotal);
  1116. NEXT (4);
  1117. }
  1118. /* bind-rest dst:24
  1119. *
  1120. * Collect any arguments at or above DST into a list, and store that
  1121. * list at DST.
  1122. */
  1123. VM_DEFINE_OP (32, bind_rest, "bind-rest", OP1 (X8_F24) | OP_DST)
  1124. {
  1125. scm_t_uint32 dst, nargs;
  1126. SCM rest = SCM_EOL;
  1127. UNPACK_24 (op, dst);
  1128. nargs = FRAME_LOCALS_COUNT ();
  1129. if (nargs <= dst)
  1130. {
  1131. ALLOC_FRAME (dst + 1);
  1132. while (nargs < dst)
  1133. FP_SET (nargs++, SCM_UNDEFINED);
  1134. }
  1135. else
  1136. {
  1137. while (nargs-- > dst)
  1138. {
  1139. rest = scm_inline_cons (thread, FP_REF (nargs), rest);
  1140. FP_SET (nargs, SCM_UNDEFINED);
  1141. }
  1142. RESET_FRAME (dst + 1);
  1143. }
  1144. FP_SET (dst, rest);
  1145. NEXT (1);
  1146. }
  1147. /*
  1148. * Branching instructions
  1149. */
  1150. /* br offset:24
  1151. *
  1152. * Add OFFSET, a signed 24-bit number, to the current instruction
  1153. * pointer.
  1154. */
  1155. VM_DEFINE_OP (33, br, "br", OP1 (X8_L24))
  1156. {
  1157. scm_t_int32 offset = op;
  1158. offset >>= 8; /* Sign-extending shift. */
  1159. if (offset <= 0)
  1160. VM_HANDLE_INTERRUPTS;
  1161. NEXT (offset);
  1162. }
  1163. /* br-if-true test:24 invert:1 _:7 offset:24
  1164. *
  1165. * If the value in TEST is true for the purposes of Scheme, add
  1166. * OFFSET, a signed 24-bit number, to the current instruction pointer.
  1167. */
  1168. VM_DEFINE_OP (34, br_if_true, "br-if-true", OP2 (X8_S24, B1_X7_L24))
  1169. {
  1170. BR_UNARY (x, scm_is_true (x));
  1171. }
  1172. /* br-if-null test:24 invert:1 _:7 offset:24
  1173. *
  1174. * If the value in TEST is the end-of-list or Lisp nil, add OFFSET, a
  1175. * signed 24-bit number, to the current instruction pointer.
  1176. */
  1177. VM_DEFINE_OP (35, br_if_null, "br-if-null", OP2 (X8_S24, B1_X7_L24))
  1178. {
  1179. BR_UNARY (x, scm_is_null (x));
  1180. }
  1181. /* br-if-nil test:24 invert:1 _:7 offset:24
  1182. *
  1183. * If the value in TEST is false to Lisp, add OFFSET, a signed 24-bit
  1184. * number, to the current instruction pointer.
  1185. */
  1186. VM_DEFINE_OP (36, br_if_nil, "br-if-nil", OP2 (X8_S24, B1_X7_L24))
  1187. {
  1188. BR_UNARY (x, scm_is_lisp_false (x));
  1189. }
  1190. /* br-if-pair test:24 invert:1 _:7 offset:24
  1191. *
  1192. * If the value in TEST is a pair, add OFFSET, a signed 24-bit number,
  1193. * to the current instruction pointer.
  1194. */
  1195. VM_DEFINE_OP (37, br_if_pair, "br-if-pair", OP2 (X8_S24, B1_X7_L24))
  1196. {
  1197. BR_UNARY (x, scm_is_pair (x));
  1198. }
  1199. /* br-if-struct test:24 invert:1 _:7 offset:24
  1200. *
  1201. * If the value in TEST is a struct, add OFFSET, a signed 24-bit
  1202. * number, to the current instruction pointer.
  1203. */
  1204. VM_DEFINE_OP (38, br_if_struct, "br-if-struct", OP2 (X8_S24, B1_X7_L24))
  1205. {
  1206. BR_UNARY (x, SCM_STRUCTP (x));
  1207. }
  1208. /* br-if-char test:24 invert:1 _:7 offset:24
  1209. *
  1210. * If the value in TEST is a char, add OFFSET, a signed 24-bit number,
  1211. * to the current instruction pointer.
  1212. */
  1213. VM_DEFINE_OP (39, br_if_char, "br-if-char", OP2 (X8_S24, B1_X7_L24))
  1214. {
  1215. BR_UNARY (x, SCM_CHARP (x));
  1216. }
  1217. /* br-if-tc7 test:24 invert:1 tc7:7 offset:24
  1218. *
  1219. * If the value in TEST has the TC7 given in the second word, add
  1220. * OFFSET, a signed 24-bit number, to the current instruction pointer.
  1221. */
  1222. VM_DEFINE_OP (40, br_if_tc7, "br-if-tc7", OP2 (X8_S24, B1_C7_L24))
  1223. {
  1224. BR_UNARY (x, SCM_HAS_TYP7 (x, (ip[1] >> 1) & 0x7f));
  1225. }
  1226. /* br-if-eq a:12 b:12 invert:1 _:7 offset:24
  1227. *
  1228. * If the value in A is eq? to the value in B, add OFFSET, a signed
  1229. * 24-bit number, to the current instruction pointer.
  1230. */
  1231. VM_DEFINE_OP (41, br_if_eq, "br-if-eq", OP3 (X8_S24, X8_S24, B1_X7_L24))
  1232. {
  1233. BR_BINARY (x, y, scm_is_eq (x, y));
  1234. }
  1235. /* br-if-eqv a:12 b:12 invert:1 _:7 offset:24
  1236. *
  1237. * If the value in A is eqv? to the value in B, add OFFSET, a signed
  1238. * 24-bit number, to the current instruction pointer.
  1239. */
  1240. VM_DEFINE_OP (42, br_if_eqv, "br-if-eqv", OP3 (X8_S24, X8_S24, B1_X7_L24))
  1241. {
  1242. BR_BINARY (x, y,
  1243. scm_is_eq (x, y)
  1244. || (SCM_NIMP (x) && SCM_NIMP (y)
  1245. && scm_is_true (scm_eqv_p (x, y))));
  1246. }
  1247. VM_DEFINE_OP (43, unused_43, NULL, NOP)
  1248. {
  1249. abort ();
  1250. }
  1251. /* br-if-logtest a:24 _:8 b:24 invert:1 _:7 offset:24
  1252. *
  1253. * If the exact integer in A has any bits in common with the exact
  1254. * integer in B, add OFFSET, a signed 24-bit number, to the current
  1255. * instruction pointer.
  1256. */
  1257. VM_DEFINE_OP (44, br_if_logtest, "br-if-logtest", OP3 (X8_S24, X8_S24, B1_X7_L24))
  1258. {
  1259. BR_BINARY (x, y,
  1260. ((SCM_I_INUMP (x) && SCM_I_INUMP (y))
  1261. ? (SCM_UNPACK (x) & SCM_UNPACK (y) & ~scm_tc2_int)
  1262. : scm_is_true (scm_logtest (x, y))));
  1263. }
  1264. /* br-if-= a:12 b:12 invert:1 _:7 offset:24
  1265. *
  1266. * If the value in A is = to the value in B, add OFFSET, a signed
  1267. * 24-bit number, to the current instruction pointer.
  1268. */
  1269. VM_DEFINE_OP (45, br_if_ee, "br-if-=", OP3 (X8_S24, X8_S24, B1_X7_L24))
  1270. {
  1271. BR_ARITHMETIC (==, scm_num_eq_p);
  1272. }
  1273. /* br-if-< a:12 b:12 invert:1 _:7 offset:24
  1274. *
  1275. * If the value in A is < to the value in B, add OFFSET, a signed
  1276. * 24-bit number, to the current instruction pointer.
  1277. */
  1278. VM_DEFINE_OP (46, br_if_lt, "br-if-<", OP3 (X8_S24, X8_S24, B1_X7_L24))
  1279. {
  1280. BR_ARITHMETIC (<, scm_less_p);
  1281. }
  1282. /* br-if-<= a:12 b:12 invert:1 _:7 offset:24
  1283. *
  1284. * If the value in A is <= to the value in B, add OFFSET, a signed
  1285. * 24-bit number, to the current instruction pointer.
  1286. */
  1287. VM_DEFINE_OP (47, br_if_le, "br-if-<=", OP3 (X8_S24, X8_S24, B1_X7_L24))
  1288. {
  1289. BR_ARITHMETIC (<=, scm_leq_p);
  1290. }
  1291. /*
  1292. * Lexical binding instructions
  1293. */
  1294. /* mov dst:12 src:12
  1295. *
  1296. * Copy a value from one local slot to another.
  1297. */
  1298. VM_DEFINE_OP (48, mov, "mov", OP1 (X8_S12_S12) | OP_DST)
  1299. {
  1300. scm_t_uint16 dst;
  1301. scm_t_uint16 src;
  1302. UNPACK_12_12 (op, dst, src);
  1303. SP_SET (dst, SP_REF (src));
  1304. NEXT (1);
  1305. }
  1306. /* long-mov dst:24 _:8 src:24
  1307. *
  1308. * Copy a value from one local slot to another.
  1309. */
  1310. VM_DEFINE_OP (49, long_mov, "long-mov", OP2 (X8_S24, X8_S24) | OP_DST)
  1311. {
  1312. scm_t_uint32 dst;
  1313. scm_t_uint32 src;
  1314. UNPACK_24 (op, dst);
  1315. UNPACK_24 (ip[1], src);
  1316. SP_SET (dst, SP_REF (src));
  1317. NEXT (2);
  1318. }
  1319. /* long-fmov dst:24 _:8 src:24
  1320. *
  1321. * Copy a value from one local slot to another. Slot indexes are
  1322. * relative to the FP.
  1323. */
  1324. VM_DEFINE_OP (50, long_fmov, "long-fmov", OP2 (X8_F24, X8_F24) | OP_DST)
  1325. {
  1326. scm_t_uint32 dst;
  1327. scm_t_uint32 src;
  1328. UNPACK_24 (op, dst);
  1329. UNPACK_24 (ip[1], src);
  1330. FP_SET (dst, FP_REF (src));
  1331. NEXT (2);
  1332. }
  1333. /* box dst:12 src:12
  1334. *
  1335. * Create a new variable holding SRC, and place it in DST.
  1336. */
  1337. VM_DEFINE_OP (51, box, "box", OP1 (X8_S12_S12) | OP_DST)
  1338. {
  1339. scm_t_uint16 dst, src;
  1340. UNPACK_12_12 (op, dst, src);
  1341. SP_SET (dst, scm_inline_cell (thread, scm_tc7_variable,
  1342. SCM_UNPACK (SP_REF (src))));
  1343. NEXT (1);
  1344. }
  1345. /* box-ref dst:12 src:12
  1346. *
  1347. * Unpack the variable at SRC into DST, asserting that the variable is
  1348. * actually bound.
  1349. */
  1350. VM_DEFINE_OP (52, box_ref, "box-ref", OP1 (X8_S12_S12) | OP_DST)
  1351. {
  1352. scm_t_uint16 dst, src;
  1353. SCM var;
  1354. UNPACK_12_12 (op, dst, src);
  1355. var = SP_REF (src);
  1356. VM_ASSERT (SCM_VARIABLEP (var),
  1357. vm_error_not_a_variable ("variable-ref", var));
  1358. VM_ASSERT (VARIABLE_BOUNDP (var), vm_error_unbound (var));
  1359. SP_SET (dst, VARIABLE_REF (var));
  1360. NEXT (1);
  1361. }
  1362. /* box-set! dst:12 src:12
  1363. *
  1364. * Set the contents of the variable at DST to SET.
  1365. */
  1366. VM_DEFINE_OP (53, box_set, "box-set!", OP1 (X8_S12_S12))
  1367. {
  1368. scm_t_uint16 dst, src;
  1369. SCM var;
  1370. UNPACK_12_12 (op, dst, src);
  1371. var = SP_REF (dst);
  1372. VM_ASSERT (SCM_VARIABLEP (var),
  1373. vm_error_not_a_variable ("variable-set!", var));
  1374. VARIABLE_SET (var, SP_REF (src));
  1375. NEXT (1);
  1376. }
  1377. /* make-closure dst:24 offset:32 _:8 nfree:24
  1378. *
  1379. * Make a new closure, and write it to DST. The code for the closure
  1380. * will be found at OFFSET words from the current IP. OFFSET is a
  1381. * signed 32-bit integer. Space for NFREE free variables will be
  1382. * allocated.
  1383. */
  1384. VM_DEFINE_OP (54, make_closure, "make-closure", OP3 (X8_S24, L32, X8_C24) | OP_DST)
  1385. {
  1386. scm_t_uint32 dst, nfree, n;
  1387. scm_t_int32 offset;
  1388. SCM closure;
  1389. UNPACK_24 (op, dst);
  1390. offset = ip[1];
  1391. UNPACK_24 (ip[2], nfree);
  1392. // FIXME: Assert range of nfree?
  1393. closure = scm_inline_words (thread, scm_tc7_program | (nfree << 16),
  1394. nfree + 2);
  1395. SCM_SET_CELL_WORD_1 (closure, ip + offset);
  1396. // FIXME: Elide these initializations?
  1397. for (n = 0; n < nfree; n++)
  1398. SCM_PROGRAM_FREE_VARIABLE_SET (closure, n, SCM_BOOL_F);
  1399. SP_SET (dst, closure);
  1400. NEXT (3);
  1401. }
  1402. /* free-ref dst:12 src:12 _:8 idx:24
  1403. *
  1404. * Load free variable IDX from the closure SRC into local slot DST.
  1405. */
  1406. VM_DEFINE_OP (55, free_ref, "free-ref", OP2 (X8_S12_S12, X8_C24) | OP_DST)
  1407. {
  1408. scm_t_uint16 dst, src;
  1409. scm_t_uint32 idx;
  1410. UNPACK_12_12 (op, dst, src);
  1411. UNPACK_24 (ip[1], idx);
  1412. /* CHECK_FREE_VARIABLE (src); */
  1413. SP_SET (dst, SCM_PROGRAM_FREE_VARIABLE_REF (SP_REF (src), idx));
  1414. NEXT (2);
  1415. }
  1416. /* free-set! dst:12 src:12 _:8 idx:24
  1417. *
  1418. * Set free variable IDX from the closure DST to SRC.
  1419. */
  1420. VM_DEFINE_OP (56, free_set, "free-set!", OP2 (X8_S12_S12, X8_C24))
  1421. {
  1422. scm_t_uint16 dst, src;
  1423. scm_t_uint32 idx;
  1424. UNPACK_12_12 (op, dst, src);
  1425. UNPACK_24 (ip[1], idx);
  1426. /* CHECK_FREE_VARIABLE (src); */
  1427. SCM_PROGRAM_FREE_VARIABLE_SET (SP_REF (dst), idx, SP_REF (src));
  1428. NEXT (2);
  1429. }
  1430. /*
  1431. * Immediates and statically allocated non-immediates
  1432. */
  1433. /* make-short-immediate dst:8 low-bits:16
  1434. *
  1435. * Make an immediate whose low bits are LOW-BITS, and whose top bits are
  1436. * 0.
  1437. */
  1438. VM_DEFINE_OP (57, make_short_immediate, "make-short-immediate", OP1 (X8_S8_I16) | OP_DST)
  1439. {
  1440. scm_t_uint8 dst;
  1441. scm_t_bits val;
  1442. UNPACK_8_16 (op, dst, val);
  1443. SP_SET (dst, SCM_PACK (val));
  1444. NEXT (1);
  1445. }
  1446. /* make-long-immediate dst:24 low-bits:32
  1447. *
  1448. * Make an immediate whose low bits are LOW-BITS, and whose top bits are
  1449. * 0.
  1450. */
  1451. VM_DEFINE_OP (58, make_long_immediate, "make-long-immediate", OP2 (X8_S24, I32) | OP_DST)
  1452. {
  1453. scm_t_uint32 dst;
  1454. scm_t_bits val;
  1455. UNPACK_24 (op, dst);
  1456. val = ip[1];
  1457. SP_SET (dst, SCM_PACK (val));
  1458. NEXT (2);
  1459. }
  1460. /* make-long-long-immediate dst:24 high-bits:32 low-bits:32
  1461. *
  1462. * Make an immediate with HIGH-BITS and LOW-BITS.
  1463. */
  1464. VM_DEFINE_OP (59, make_long_long_immediate, "make-long-long-immediate", OP3 (X8_S24, A32, B32) | OP_DST)
  1465. {
  1466. scm_t_uint32 dst;
  1467. scm_t_bits val;
  1468. UNPACK_24 (op, dst);
  1469. #if SIZEOF_SCM_T_BITS > 4
  1470. val = ip[1];
  1471. val <<= 32;
  1472. val |= ip[2];
  1473. #else
  1474. ASSERT (ip[1] == 0);
  1475. val = ip[2];
  1476. #endif
  1477. SP_SET (dst, SCM_PACK (val));
  1478. NEXT (3);
  1479. }
  1480. /* make-non-immediate dst:24 offset:32
  1481. *
  1482. * Load a pointer to statically allocated memory into DST. The
  1483. * object's memory is will be found OFFSET 32-bit words away from the
  1484. * current instruction pointer. OFFSET is a signed value. The
  1485. * intention here is that the compiler would produce an object file
  1486. * containing the words of a non-immediate object, and this
  1487. * instruction creates a pointer to that memory, effectively
  1488. * resurrecting that object.
  1489. *
  1490. * Whether the object is mutable or immutable depends on where it was
  1491. * allocated by the compiler, and loaded by the loader.
  1492. */
  1493. VM_DEFINE_OP (60, make_non_immediate, "make-non-immediate", OP2 (X8_S24, N32) | OP_DST)
  1494. {
  1495. scm_t_uint32 dst;
  1496. scm_t_int32 offset;
  1497. scm_t_uint32* loc;
  1498. scm_t_bits unpacked;
  1499. UNPACK_24 (op, dst);
  1500. offset = ip[1];
  1501. loc = ip + offset;
  1502. unpacked = (scm_t_bits) loc;
  1503. VM_ASSERT (!(unpacked & 0x7), abort());
  1504. SP_SET (dst, SCM_PACK (unpacked));
  1505. NEXT (2);
  1506. }
  1507. /* static-ref dst:24 offset:32
  1508. *
  1509. * Load a SCM value into DST. The SCM value will be fetched from
  1510. * memory, OFFSET 32-bit words away from the current instruction
  1511. * pointer. OFFSET is a signed value.
  1512. *
  1513. * The intention is for this instruction to be used to load constants
  1514. * that the compiler is unable to statically allocate, like symbols.
  1515. * These values would be initialized when the object file loads.
  1516. */
  1517. VM_DEFINE_OP (61, static_ref, "static-ref", OP2 (X8_S24, R32) | OP_DST)
  1518. {
  1519. scm_t_uint32 dst;
  1520. scm_t_int32 offset;
  1521. scm_t_uint32* loc;
  1522. scm_t_uintptr loc_bits;
  1523. UNPACK_24 (op, dst);
  1524. offset = ip[1];
  1525. loc = ip + offset;
  1526. loc_bits = (scm_t_uintptr) loc;
  1527. VM_ASSERT (ALIGNED_P (loc, SCM), abort());
  1528. SP_SET (dst, *((SCM *) loc_bits));
  1529. NEXT (2);
  1530. }
  1531. /* static-set! src:24 offset:32
  1532. *
  1533. * Store a SCM value into memory, OFFSET 32-bit words away from the
  1534. * current instruction pointer. OFFSET is a signed value.
  1535. */
  1536. VM_DEFINE_OP (62, static_set, "static-set!", OP2 (X8_S24, LO32))
  1537. {
  1538. scm_t_uint32 src;
  1539. scm_t_int32 offset;
  1540. scm_t_uint32* loc;
  1541. UNPACK_24 (op, src);
  1542. offset = ip[1];
  1543. loc = ip + offset;
  1544. VM_ASSERT (ALIGNED_P (loc, SCM), abort());
  1545. *((SCM *) loc) = SP_REF (src);
  1546. NEXT (2);
  1547. }
  1548. /* static-patch! _:24 dst-offset:32 src-offset:32
  1549. *
  1550. * Patch a pointer at DST-OFFSET to point to SRC-OFFSET. Both offsets
  1551. * are signed 32-bit values, indicating a memory address as a number
  1552. * of 32-bit words away from the current instruction pointer.
  1553. */
  1554. VM_DEFINE_OP (63, static_patch, "static-patch!", OP3 (X32, LO32, L32))
  1555. {
  1556. scm_t_int32 dst_offset, src_offset;
  1557. void *src;
  1558. void** dst_loc;
  1559. dst_offset = ip[1];
  1560. src_offset = ip[2];
  1561. dst_loc = (void **) (ip + dst_offset);
  1562. src = ip + src_offset;
  1563. VM_ASSERT (ALIGNED_P (dst_loc, void*), abort());
  1564. *dst_loc = src;
  1565. NEXT (3);
  1566. }
  1567. /*
  1568. * Mutable top-level bindings
  1569. */
  1570. /* There are three slightly different ways to resolve toplevel
  1571. variables.
  1572. 1. A toplevel reference outside of a function. These need to be
  1573. looked up when the expression is evaluated -- no later, and no
  1574. before. They are looked up relative to the module that is
  1575. current when the expression is evaluated. For example:
  1576. (if (foo) a b)
  1577. The "resolve" instruction resolves the variable (box), and then
  1578. access is via box-ref or box-set!.
  1579. 2. A toplevel reference inside a function. These are looked up
  1580. relative to the module that was current when the function was
  1581. defined. Unlike code at the toplevel, which is usually run only
  1582. once, these bindings benefit from memoized lookup, in which the
  1583. variable resulting from the lookup is cached in the function.
  1584. (lambda () (if (foo) a b))
  1585. The toplevel-box instruction is equivalent to "resolve", but
  1586. caches the resulting variable in statically allocated memory.
  1587. 3. A reference to an identifier with respect to a particular
  1588. module. This can happen for primitive references, and
  1589. references residualized by macro expansions. These can always
  1590. be cached. Use module-box for these.
  1591. */
  1592. /* current-module dst:24
  1593. *
  1594. * Store the current module in DST.
  1595. */
  1596. VM_DEFINE_OP (64, current_module, "current-module", OP1 (X8_S24) | OP_DST)
  1597. {
  1598. scm_t_uint32 dst;
  1599. UNPACK_24 (op, dst);
  1600. SYNC_IP ();
  1601. SP_SET (dst, scm_current_module ());
  1602. NEXT (1);
  1603. }
  1604. /* resolve dst:24 bound?:1 _:7 sym:24
  1605. *
  1606. * Resolve SYM in the current module, and place the resulting variable
  1607. * in DST.
  1608. */
  1609. VM_DEFINE_OP (65, resolve, "resolve", OP2 (X8_S24, B1_X7_S24) | OP_DST)
  1610. {
  1611. scm_t_uint32 dst;
  1612. scm_t_uint32 sym;
  1613. SCM var;
  1614. UNPACK_24 (op, dst);
  1615. UNPACK_24 (ip[1], sym);
  1616. SYNC_IP ();
  1617. var = scm_lookup (SP_REF (sym));
  1618. CACHE_SP ();
  1619. if (ip[1] & 0x1)
  1620. VM_ASSERT (VARIABLE_BOUNDP (var), vm_error_unbound (SP_REF (sym)));
  1621. SP_SET (dst, var);
  1622. NEXT (2);
  1623. }
  1624. /* define! sym:12 val:12
  1625. *
  1626. * Look up a binding for SYM in the current module, creating it if
  1627. * necessary. Set its value to VAL.
  1628. */
  1629. VM_DEFINE_OP (66, define, "define!", OP1 (X8_S12_S12))
  1630. {
  1631. scm_t_uint16 sym, val;
  1632. UNPACK_12_12 (op, sym, val);
  1633. SYNC_IP ();
  1634. scm_define (SP_REF (sym), SP_REF (val));
  1635. CACHE_SP ();
  1636. NEXT (1);
  1637. }
  1638. /* toplevel-box dst:24 var-offset:32 mod-offset:32 sym-offset:32 bound?:1 _:31
  1639. *
  1640. * Load a SCM value. The SCM value will be fetched from memory,
  1641. * VAR-OFFSET 32-bit words away from the current instruction pointer.
  1642. * VAR-OFFSET is a signed value. Up to here, toplevel-box is like
  1643. * static-ref.
  1644. *
  1645. * Then, if the loaded value is a variable, it is placed in DST, and control
  1646. * flow continues.
  1647. *
  1648. * Otherwise, we have to resolve the variable. In that case we load
  1649. * the module from MOD-OFFSET, just as we loaded the variable.
  1650. * Usually the module gets set when the closure is created. The name
  1651. * is an offset to a symbol.
  1652. *
  1653. * We use the module and the symbol to resolve the variable, placing it in
  1654. * DST, and caching the resolved variable so that we will hit the cache next
  1655. * time.
  1656. */
  1657. VM_DEFINE_OP (67, toplevel_box, "toplevel-box", OP5 (X8_S24, R32, R32, N32, B1_X31) | OP_DST)
  1658. {
  1659. scm_t_uint32 dst;
  1660. scm_t_int32 var_offset;
  1661. scm_t_uint32* var_loc_u32;
  1662. SCM *var_loc;
  1663. SCM var;
  1664. UNPACK_24 (op, dst);
  1665. var_offset = ip[1];
  1666. var_loc_u32 = ip + var_offset;
  1667. VM_ASSERT (ALIGNED_P (var_loc_u32, SCM), abort());
  1668. var_loc = (SCM *) var_loc_u32;
  1669. var = *var_loc;
  1670. if (SCM_UNLIKELY (!SCM_VARIABLEP (var)))
  1671. {
  1672. SCM mod, sym;
  1673. scm_t_int32 mod_offset = ip[2]; /* signed */
  1674. scm_t_int32 sym_offset = ip[3]; /* signed */
  1675. scm_t_uint32 *mod_loc = ip + mod_offset;
  1676. scm_t_uint32 *sym_loc = ip + sym_offset;
  1677. SYNC_IP ();
  1678. VM_ASSERT (ALIGNED_P (mod_loc, SCM), abort());
  1679. VM_ASSERT (ALIGNED_P (sym_loc, SCM), abort());
  1680. mod = *((SCM *) mod_loc);
  1681. sym = *((SCM *) sym_loc);
  1682. /* If the toplevel scope was captured before modules were
  1683. booted, use the root module. */
  1684. if (scm_is_false (mod))
  1685. mod = scm_the_root_module ();
  1686. var = scm_module_lookup (mod, sym);
  1687. CACHE_SP ();
  1688. if (ip[4] & 0x1)
  1689. VM_ASSERT (VARIABLE_BOUNDP (var), vm_error_unbound (sym));
  1690. *var_loc = var;
  1691. }
  1692. SP_SET (dst, var);
  1693. NEXT (5);
  1694. }
  1695. /* module-box dst:24 var-offset:32 mod-offset:32 sym-offset:32 bound?:1 _:31
  1696. *
  1697. * Like toplevel-box, except MOD-OFFSET points at the name of a module
  1698. * instead of the module itself.
  1699. */
  1700. VM_DEFINE_OP (68, module_box, "module-box", OP5 (X8_S24, R32, N32, N32, B1_X31) | OP_DST)
  1701. {
  1702. scm_t_uint32 dst;
  1703. scm_t_int32 var_offset;
  1704. scm_t_uint32* var_loc_u32;
  1705. SCM *var_loc;
  1706. SCM var;
  1707. UNPACK_24 (op, dst);
  1708. var_offset = ip[1];
  1709. var_loc_u32 = ip + var_offset;
  1710. VM_ASSERT (ALIGNED_P (var_loc_u32, SCM), abort());
  1711. var_loc = (SCM *) var_loc_u32;
  1712. var = *var_loc;
  1713. if (SCM_UNLIKELY (!SCM_VARIABLEP (var)))
  1714. {
  1715. SCM modname, sym;
  1716. scm_t_int32 modname_offset = ip[2]; /* signed */
  1717. scm_t_int32 sym_offset = ip[3]; /* signed */
  1718. scm_t_uint32 *modname_words = ip + modname_offset;
  1719. scm_t_uint32 *sym_loc = ip + sym_offset;
  1720. SYNC_IP ();
  1721. VM_ASSERT (!(((scm_t_uintptr) modname_words) & 0x7), abort());
  1722. VM_ASSERT (ALIGNED_P (sym_loc, SCM), abort());
  1723. modname = SCM_PACK ((scm_t_bits) modname_words);
  1724. sym = *((SCM *) sym_loc);
  1725. if (!scm_module_system_booted_p)
  1726. {
  1727. ASSERT (scm_is_true
  1728. scm_equal_p (modname,
  1729. scm_list_2
  1730. (SCM_BOOL_T,
  1731. scm_from_utf8_symbol ("guile"))));
  1732. var = scm_lookup (sym);
  1733. }
  1734. else if (scm_is_true (SCM_CAR (modname)))
  1735. var = scm_public_lookup (SCM_CDR (modname), sym);
  1736. else
  1737. var = scm_private_lookup (SCM_CDR (modname), sym);
  1738. CACHE_SP ();
  1739. if (ip[4] & 0x1)
  1740. VM_ASSERT (VARIABLE_BOUNDP (var), vm_error_unbound (sym));
  1741. *var_loc = var;
  1742. }
  1743. SP_SET (dst, var);
  1744. NEXT (5);
  1745. }
  1746. /*
  1747. * The dynamic environment
  1748. */
  1749. /* prompt tag:24 escape-only?:1 _:7 proc-slot:24 _:8 handler-offset:24
  1750. *
  1751. * Push a new prompt on the dynamic stack, with a tag from TAG and a
  1752. * handler at HANDLER-OFFSET words from the current IP. The handler
  1753. * will expect a multiple-value return as if from a call with the
  1754. * procedure at PROC-SLOT.
  1755. */
  1756. VM_DEFINE_OP (69, prompt, "prompt", OP3 (X8_S24, B1_X7_F24, X8_L24))
  1757. {
  1758. scm_t_uint32 tag, proc_slot;
  1759. scm_t_int32 offset;
  1760. scm_t_uint8 escape_only_p;
  1761. scm_t_dynstack_prompt_flags flags;
  1762. UNPACK_24 (op, tag);
  1763. escape_only_p = ip[1] & 0x1;
  1764. UNPACK_24 (ip[1], proc_slot);
  1765. offset = ip[2];
  1766. offset >>= 8; /* Sign extension */
  1767. /* Push the prompt onto the dynamic stack. */
  1768. flags = escape_only_p ? SCM_F_DYNSTACK_PROMPT_ESCAPE_ONLY : 0;
  1769. scm_dynstack_push_prompt (&thread->dynstack, flags,
  1770. SP_REF (tag),
  1771. vp->stack_top - vp->fp,
  1772. vp->stack_top - FP_SLOT (proc_slot),
  1773. ip + offset,
  1774. registers);
  1775. NEXT (3);
  1776. }
  1777. /* wind winder:12 unwinder:12
  1778. *
  1779. * Push wind and unwind procedures onto the dynamic stack. Note that
  1780. * neither are actually called; the compiler should emit calls to wind
  1781. * and unwind for the normal dynamic-wind control flow. Also note that
  1782. * the compiler should have inserted checks that they wind and unwind
  1783. * procs are thunks, if it could not prove that to be the case.
  1784. */
  1785. VM_DEFINE_OP (70, wind, "wind", OP1 (X8_S12_S12))
  1786. {
  1787. scm_t_uint16 winder, unwinder;
  1788. UNPACK_12_12 (op, winder, unwinder);
  1789. scm_dynstack_push_dynwind (&thread->dynstack,
  1790. SP_REF (winder), SP_REF (unwinder));
  1791. NEXT (1);
  1792. }
  1793. /* unwind _:24
  1794. *
  1795. * A normal exit from the dynamic extent of an expression. Pop the top
  1796. * entry off of the dynamic stack.
  1797. */
  1798. VM_DEFINE_OP (71, unwind, "unwind", OP1 (X32))
  1799. {
  1800. scm_dynstack_pop (&thread->dynstack);
  1801. NEXT (1);
  1802. }
  1803. /* push-fluid fluid:12 value:12
  1804. *
  1805. * Dynamically bind VALUE to FLUID.
  1806. */
  1807. VM_DEFINE_OP (72, push_fluid, "push-fluid", OP1 (X8_S12_S12))
  1808. {
  1809. scm_t_uint32 fluid, value;
  1810. UNPACK_12_12 (op, fluid, value);
  1811. scm_dynstack_push_fluid (&thread->dynstack,
  1812. SP_REF (fluid), SP_REF (value),
  1813. thread->dynamic_state);
  1814. NEXT (1);
  1815. }
  1816. /* pop-fluid _:24
  1817. *
  1818. * Leave the dynamic extent of a with-fluid* expression, restoring the
  1819. * fluid to its previous value.
  1820. */
  1821. VM_DEFINE_OP (73, pop_fluid, "pop-fluid", OP1 (X32))
  1822. {
  1823. /* This function must not allocate. */
  1824. scm_dynstack_unwind_fluid (&thread->dynstack,
  1825. thread->dynamic_state);
  1826. NEXT (1);
  1827. }
  1828. /* fluid-ref dst:12 src:12
  1829. *
  1830. * Reference the fluid in SRC, and place the value in DST.
  1831. */
  1832. VM_DEFINE_OP (74, fluid_ref, "fluid-ref", OP1 (X8_S12_S12) | OP_DST)
  1833. {
  1834. scm_t_uint16 dst, src;
  1835. size_t num;
  1836. SCM fluid, fluids;
  1837. UNPACK_12_12 (op, dst, src);
  1838. fluid = SP_REF (src);
  1839. fluids = SCM_I_DYNAMIC_STATE_FLUIDS (thread->dynamic_state);
  1840. if (SCM_UNLIKELY (!SCM_FLUID_P (fluid))
  1841. || ((num = SCM_I_FLUID_NUM (fluid)) >= SCM_SIMPLE_VECTOR_LENGTH (fluids)))
  1842. {
  1843. /* Punt dynstate expansion and error handling to the C proc. */
  1844. SYNC_IP ();
  1845. SP_SET (dst, scm_fluid_ref (fluid));
  1846. }
  1847. else
  1848. {
  1849. SCM val = SCM_SIMPLE_VECTOR_REF (fluids, num);
  1850. if (scm_is_eq (val, SCM_UNDEFINED))
  1851. val = SCM_I_FLUID_DEFAULT (fluid);
  1852. VM_ASSERT (!scm_is_eq (val, SCM_UNDEFINED),
  1853. vm_error_unbound_fluid (fluid));
  1854. SP_SET (dst, val);
  1855. }
  1856. NEXT (1);
  1857. }
  1858. /* fluid-set fluid:12 val:12
  1859. *
  1860. * Set the value of the fluid in DST to the value in SRC.
  1861. */
  1862. VM_DEFINE_OP (75, fluid_set, "fluid-set", OP1 (X8_S12_S12))
  1863. {
  1864. scm_t_uint16 a, b;
  1865. size_t num;
  1866. SCM fluid, fluids;
  1867. UNPACK_12_12 (op, a, b);
  1868. fluid = SP_REF (a);
  1869. fluids = SCM_I_DYNAMIC_STATE_FLUIDS (thread->dynamic_state);
  1870. if (SCM_UNLIKELY (!SCM_FLUID_P (fluid))
  1871. || ((num = SCM_I_FLUID_NUM (fluid)) >= SCM_SIMPLE_VECTOR_LENGTH (fluids)))
  1872. {
  1873. /* Punt dynstate expansion and error handling to the C proc. */
  1874. SYNC_IP ();
  1875. scm_fluid_set_x (fluid, SP_REF (b));
  1876. }
  1877. else
  1878. SCM_SIMPLE_VECTOR_SET (fluids, num, SP_REF (b));
  1879. NEXT (1);
  1880. }
  1881. /*
  1882. * Strings, symbols, and keywords
  1883. */
  1884. /* string-length dst:12 src:12
  1885. *
  1886. * Store the length of the string in SRC in DST.
  1887. */
  1888. VM_DEFINE_OP (76, string_length, "string-length", OP1 (X8_S12_S12) | OP_DST)
  1889. {
  1890. ARGS1 (str);
  1891. VM_ASSERT (scm_is_string (str),
  1892. vm_error_not_a_string ("string-length", str));
  1893. SP_SET_U64 (dst, scm_i_string_length (str));
  1894. NEXT (1);
  1895. }
  1896. /* string-ref dst:8 src:8 idx:8
  1897. *
  1898. * Fetch the character at position IDX in the string in SRC, and store
  1899. * it in DST.
  1900. */
  1901. VM_DEFINE_OP (77, string_ref, "string-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  1902. {
  1903. scm_t_uint8 dst, src, idx;
  1904. SCM str;
  1905. scm_t_uint32 c_idx;
  1906. UNPACK_8_8_8 (op, dst, src, idx);
  1907. str = SP_REF (src);
  1908. c_idx = SP_REF_U64 (idx);
  1909. VM_ASSERT (scm_is_string (str),
  1910. vm_error_not_a_string ("string-ref", str));
  1911. VM_ASSERT (c_idx < scm_i_string_length (str),
  1912. vm_error_out_of_range_uint64 ("string-ref", c_idx));
  1913. RETURN (SCM_MAKE_CHAR (scm_i_string_ref (str, c_idx)));
  1914. }
  1915. /* No string-set! instruction, as there is no good fast path there. */
  1916. /* string->number dst:12 src:12
  1917. *
  1918. * Parse a string in SRC to a number, and store in DST.
  1919. */
  1920. VM_DEFINE_OP (78, string_to_number, "string->number", OP1 (X8_S12_S12) | OP_DST)
  1921. {
  1922. scm_t_uint16 dst, src;
  1923. UNPACK_12_12 (op, dst, src);
  1924. SYNC_IP ();
  1925. SP_SET (dst,
  1926. scm_string_to_number (SP_REF (src),
  1927. SCM_UNDEFINED /* radix = 10 */));
  1928. NEXT (1);
  1929. }
  1930. /* string->symbol dst:12 src:12
  1931. *
  1932. * Parse a string in SRC to a symbol, and store in DST.
  1933. */
  1934. VM_DEFINE_OP (79, string_to_symbol, "string->symbol", OP1 (X8_S12_S12) | OP_DST)
  1935. {
  1936. scm_t_uint16 dst, src;
  1937. UNPACK_12_12 (op, dst, src);
  1938. SYNC_IP ();
  1939. SP_SET (dst, scm_string_to_symbol (SP_REF (src)));
  1940. NEXT (1);
  1941. }
  1942. /* symbol->keyword dst:12 src:12
  1943. *
  1944. * Make a keyword from the symbol in SRC, and store it in DST.
  1945. */
  1946. VM_DEFINE_OP (80, symbol_to_keyword, "symbol->keyword", OP1 (X8_S12_S12) | OP_DST)
  1947. {
  1948. scm_t_uint16 dst, src;
  1949. UNPACK_12_12 (op, dst, src);
  1950. SYNC_IP ();
  1951. SP_SET (dst, scm_symbol_to_keyword (SP_REF (src)));
  1952. NEXT (1);
  1953. }
  1954. /*
  1955. * Pairs
  1956. */
  1957. /* cons dst:8 car:8 cdr:8
  1958. *
  1959. * Cons CAR and CDR, and store the result in DST.
  1960. */
  1961. VM_DEFINE_OP (81, cons, "cons", OP1 (X8_S8_S8_S8) | OP_DST)
  1962. {
  1963. ARGS2 (x, y);
  1964. RETURN (scm_inline_cons (thread, x, y));
  1965. }
  1966. /* car dst:12 src:12
  1967. *
  1968. * Place the car of SRC in DST.
  1969. */
  1970. VM_DEFINE_OP (82, car, "car", OP1 (X8_S12_S12) | OP_DST)
  1971. {
  1972. ARGS1 (x);
  1973. VM_VALIDATE_PAIR (x, "car");
  1974. RETURN (SCM_CAR (x));
  1975. }
  1976. /* cdr dst:12 src:12
  1977. *
  1978. * Place the cdr of SRC in DST.
  1979. */
  1980. VM_DEFINE_OP (83, cdr, "cdr", OP1 (X8_S12_S12) | OP_DST)
  1981. {
  1982. ARGS1 (x);
  1983. VM_VALIDATE_PAIR (x, "cdr");
  1984. RETURN (SCM_CDR (x));
  1985. }
  1986. /* set-car! pair:12 car:12
  1987. *
  1988. * Set the car of DST to SRC.
  1989. */
  1990. VM_DEFINE_OP (84, set_car, "set-car!", OP1 (X8_S12_S12))
  1991. {
  1992. scm_t_uint16 a, b;
  1993. SCM x, y;
  1994. UNPACK_12_12 (op, a, b);
  1995. x = SP_REF (a);
  1996. y = SP_REF (b);
  1997. VM_VALIDATE_PAIR (x, "set-car!");
  1998. SCM_SETCAR (x, y);
  1999. NEXT (1);
  2000. }
  2001. /* set-cdr! pair:12 cdr:12
  2002. *
  2003. * Set the cdr of DST to SRC.
  2004. */
  2005. VM_DEFINE_OP (85, set_cdr, "set-cdr!", OP1 (X8_S12_S12))
  2006. {
  2007. scm_t_uint16 a, b;
  2008. SCM x, y;
  2009. UNPACK_12_12 (op, a, b);
  2010. x = SP_REF (a);
  2011. y = SP_REF (b);
  2012. VM_VALIDATE_PAIR (x, "set-car!");
  2013. SCM_SETCDR (x, y);
  2014. NEXT (1);
  2015. }
  2016. /*
  2017. * Numeric operations
  2018. */
  2019. /* add dst:8 a:8 b:8
  2020. *
  2021. * Add A to B, and place the result in DST.
  2022. */
  2023. VM_DEFINE_OP (86, add, "add", OP1 (X8_S8_S8_S8) | OP_DST)
  2024. {
  2025. BINARY_INTEGER_OP (+, scm_sum);
  2026. }
  2027. /* add/immediate dst:8 src:8 imm:8
  2028. *
  2029. * Add the unsigned 8-bit value IMM to the value from SRC, and place
  2030. * the result in DST.
  2031. */
  2032. VM_DEFINE_OP (87, add_immediate, "add/immediate", OP1 (X8_S8_S8_C8) | OP_DST)
  2033. {
  2034. scm_t_uint8 dst, src, imm;
  2035. SCM x;
  2036. UNPACK_8_8_8 (op, dst, src, imm);
  2037. x = SP_REF (src);
  2038. if (SCM_LIKELY (SCM_I_INUMP (x)))
  2039. {
  2040. scm_t_signed_bits sum = SCM_I_INUM (x) + (scm_t_signed_bits) imm;
  2041. if (SCM_LIKELY (SCM_POSFIXABLE (sum)))
  2042. RETURN (SCM_I_MAKINUM (sum));
  2043. }
  2044. RETURN_EXP (scm_sum (x, SCM_I_MAKINUM (imm)));
  2045. }
  2046. /* sub dst:8 a:8 b:8
  2047. *
  2048. * Subtract B from A, and place the result in DST.
  2049. */
  2050. VM_DEFINE_OP (88, sub, "sub", OP1 (X8_S8_S8_S8) | OP_DST)
  2051. {
  2052. BINARY_INTEGER_OP (-, scm_difference);
  2053. }
  2054. /* sub/immediate dst:8 src:8 imm:8
  2055. *
  2056. * Subtract the unsigned 8-bit value IMM from the value in SRC, and
  2057. * place the result in DST.
  2058. */
  2059. VM_DEFINE_OP (89, sub_immediate, "sub/immediate", OP1 (X8_S8_S8_C8) | OP_DST)
  2060. {
  2061. scm_t_uint8 dst, src, imm;
  2062. SCM x;
  2063. UNPACK_8_8_8 (op, dst, src, imm);
  2064. x = SP_REF (src);
  2065. if (SCM_LIKELY (SCM_I_INUMP (x)))
  2066. {
  2067. scm_t_signed_bits diff = SCM_I_INUM (x) - (scm_t_signed_bits) imm;
  2068. if (SCM_LIKELY (SCM_NEGFIXABLE (diff)))
  2069. RETURN (SCM_I_MAKINUM (diff));
  2070. }
  2071. RETURN_EXP (scm_difference (x, SCM_I_MAKINUM (imm)));
  2072. }
  2073. /* mul dst:8 a:8 b:8
  2074. *
  2075. * Multiply A and B, and place the result in DST.
  2076. */
  2077. VM_DEFINE_OP (90, mul, "mul", OP1 (X8_S8_S8_S8) | OP_DST)
  2078. {
  2079. ARGS2 (x, y);
  2080. RETURN_EXP (scm_product (x, y));
  2081. }
  2082. /* div dst:8 a:8 b:8
  2083. *
  2084. * Divide A by B, and place the result in DST.
  2085. */
  2086. VM_DEFINE_OP (91, div, "div", OP1 (X8_S8_S8_S8) | OP_DST)
  2087. {
  2088. ARGS2 (x, y);
  2089. RETURN_EXP (scm_divide (x, y));
  2090. }
  2091. /* quo dst:8 a:8 b:8
  2092. *
  2093. * Divide A by B, and place the quotient in DST.
  2094. */
  2095. VM_DEFINE_OP (92, quo, "quo", OP1 (X8_S8_S8_S8) | OP_DST)
  2096. {
  2097. ARGS2 (x, y);
  2098. RETURN_EXP (scm_quotient (x, y));
  2099. }
  2100. /* rem dst:8 a:8 b:8
  2101. *
  2102. * Divide A by B, and place the remainder in DST.
  2103. */
  2104. VM_DEFINE_OP (93, rem, "rem", OP1 (X8_S8_S8_S8) | OP_DST)
  2105. {
  2106. ARGS2 (x, y);
  2107. RETURN_EXP (scm_remainder (x, y));
  2108. }
  2109. /* mod dst:8 a:8 b:8
  2110. *
  2111. * Place the modulo of A by B in DST.
  2112. */
  2113. VM_DEFINE_OP (94, mod, "mod", OP1 (X8_S8_S8_S8) | OP_DST)
  2114. {
  2115. ARGS2 (x, y);
  2116. RETURN_EXP (scm_modulo (x, y));
  2117. }
  2118. /* ash dst:8 a:8 b:8
  2119. *
  2120. * Shift A arithmetically by B bits, and place the result in DST.
  2121. */
  2122. VM_DEFINE_OP (95, ash, "ash", OP1 (X8_S8_S8_S8) | OP_DST)
  2123. {
  2124. ARGS2 (x, y);
  2125. if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
  2126. {
  2127. if (SCM_I_INUM (y) < 0)
  2128. /* Right shift, will be a fixnum. */
  2129. RETURN (SCM_I_MAKINUM
  2130. (SCM_SRS (SCM_I_INUM (x),
  2131. (-SCM_I_INUM (y) <= SCM_I_FIXNUM_BIT-1)
  2132. ? -SCM_I_INUM (y) : SCM_I_FIXNUM_BIT-1)));
  2133. else
  2134. /* Left shift. See comments in scm_ash. */
  2135. {
  2136. scm_t_signed_bits nn, bits_to_shift;
  2137. nn = SCM_I_INUM (x);
  2138. bits_to_shift = SCM_I_INUM (y);
  2139. if (bits_to_shift < SCM_I_FIXNUM_BIT-1
  2140. && ((scm_t_bits)
  2141. (SCM_SRS (nn, (SCM_I_FIXNUM_BIT-1 - bits_to_shift)) + 1)
  2142. <= 1))
  2143. RETURN (SCM_I_MAKINUM (nn < 0
  2144. ? -(-nn << bits_to_shift)
  2145. : (nn << bits_to_shift)));
  2146. /* fall through */
  2147. }
  2148. /* fall through */
  2149. }
  2150. RETURN_EXP (scm_ash (x, y));
  2151. }
  2152. /* logand dst:8 a:8 b:8
  2153. *
  2154. * Place the bitwise AND of A and B into DST.
  2155. */
  2156. VM_DEFINE_OP (96, logand, "logand", OP1 (X8_S8_S8_S8) | OP_DST)
  2157. {
  2158. ARGS2 (x, y);
  2159. if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
  2160. /* Compute bitwise AND without untagging */
  2161. RETURN (SCM_PACK (SCM_UNPACK (x) & SCM_UNPACK (y)));
  2162. RETURN_EXP (scm_logand (x, y));
  2163. }
  2164. /* logior dst:8 a:8 b:8
  2165. *
  2166. * Place the bitwise inclusive OR of A with B in DST.
  2167. */
  2168. VM_DEFINE_OP (97, logior, "logior", OP1 (X8_S8_S8_S8) | OP_DST)
  2169. {
  2170. ARGS2 (x, y);
  2171. if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
  2172. /* Compute bitwise OR without untagging */
  2173. RETURN (SCM_PACK (SCM_UNPACK (x) | SCM_UNPACK (y)));
  2174. RETURN_EXP (scm_logior (x, y));
  2175. }
  2176. /* logxor dst:8 a:8 b:8
  2177. *
  2178. * Place the bitwise exclusive OR of A with B in DST.
  2179. */
  2180. VM_DEFINE_OP (98, logxor, "logxor", OP1 (X8_S8_S8_S8) | OP_DST)
  2181. {
  2182. ARGS2 (x, y);
  2183. if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
  2184. RETURN (SCM_I_MAKINUM (SCM_I_INUM (x) ^ SCM_I_INUM (y)));
  2185. RETURN_EXP (scm_logxor (x, y));
  2186. }
  2187. /* make-vector dst:8 length:8 init:8
  2188. *
  2189. * Make a vector and write it to DST. The vector will have space for
  2190. * LENGTH slots. They will be filled with the value in slot INIT.
  2191. */
  2192. VM_DEFINE_OP (99, make_vector, "make-vector", OP1 (X8_S8_S8_S8) | OP_DST)
  2193. {
  2194. scm_t_uint8 dst, length, init;
  2195. scm_t_uint64 length_val;
  2196. UNPACK_8_8_8 (op, dst, length, init);
  2197. length_val = SP_REF_U64 (length);
  2198. VM_ASSERT (length_val < (size_t) -1,
  2199. vm_error_out_of_range_uint64 ("make-vector", length_val));
  2200. /* TODO: Inline this allocation. */
  2201. SYNC_IP ();
  2202. SP_SET (dst, scm_c_make_vector (length_val, SP_REF (init)));
  2203. NEXT (1);
  2204. }
  2205. /* make-vector/immediate dst:8 length:8 init:8
  2206. *
  2207. * Make a short vector of known size and write it to DST. The vector
  2208. * will have space for LENGTH slots, an immediate value. They will be
  2209. * filled with the value in slot INIT.
  2210. */
  2211. VM_DEFINE_OP (100, make_vector_immediate, "make-vector/immediate", OP1 (X8_S8_C8_S8) | OP_DST)
  2212. {
  2213. scm_t_uint8 dst, init;
  2214. scm_t_int32 length, n;
  2215. SCM val, vector;
  2216. UNPACK_8_8_8 (op, dst, length, init);
  2217. val = SP_REF (init);
  2218. vector = scm_inline_words (thread, scm_tc7_vector | (length << 8),
  2219. length + 1);
  2220. for (n = 0; n < length; n++)
  2221. SCM_SIMPLE_VECTOR_SET (vector, n, val);
  2222. SP_SET (dst, vector);
  2223. NEXT (1);
  2224. }
  2225. /* vector-length dst:12 src:12
  2226. *
  2227. * Store the length of the vector in SRC in DST.
  2228. */
  2229. VM_DEFINE_OP (101, vector_length, "vector-length", OP1 (X8_S12_S12) | OP_DST)
  2230. {
  2231. ARGS1 (vect);
  2232. VM_ASSERT (SCM_I_IS_VECTOR (vect),
  2233. vm_error_not_a_vector ("vector-ref", vect));
  2234. SP_SET_U64 (dst, SCM_I_VECTOR_LENGTH (vect));
  2235. NEXT (1);
  2236. }
  2237. /* vector-ref dst:8 src:8 idx:8
  2238. *
  2239. * Fetch the item at position IDX in the vector in SRC, and store it
  2240. * in DST.
  2241. */
  2242. VM_DEFINE_OP (102, vector_ref, "vector-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2243. {
  2244. scm_t_uint8 dst, src, idx;
  2245. SCM vect;
  2246. scm_t_uint64 c_idx;
  2247. UNPACK_8_8_8 (op, dst, src, idx);
  2248. vect = SP_REF (src);
  2249. c_idx = SP_REF_U64 (idx);
  2250. VM_ASSERT (SCM_I_IS_VECTOR (vect),
  2251. vm_error_not_a_vector ("vector-ref", vect));
  2252. VM_ASSERT (c_idx < SCM_I_VECTOR_LENGTH (vect),
  2253. vm_error_out_of_range_uint64 ("vector-ref", c_idx));
  2254. RETURN (SCM_I_VECTOR_ELTS (vect)[c_idx]);
  2255. }
  2256. /* vector-ref/immediate dst:8 src:8 idx:8
  2257. *
  2258. * Fill DST with the item IDX elements into the vector at SRC. Useful
  2259. * for building data types using vectors.
  2260. */
  2261. VM_DEFINE_OP (103, vector_ref_immediate, "vector-ref/immediate", OP1 (X8_S8_S8_C8) | OP_DST)
  2262. {
  2263. scm_t_uint8 dst, src, idx;
  2264. SCM vect;
  2265. UNPACK_8_8_8 (op, dst, src, idx);
  2266. vect = SP_REF (src);
  2267. VM_ASSERT (SCM_I_IS_VECTOR (vect),
  2268. vm_error_not_a_vector ("vector-ref", vect));
  2269. VM_ASSERT (idx < SCM_I_VECTOR_LENGTH (vect),
  2270. vm_error_out_of_range_uint64 ("vector-ref", idx));
  2271. SP_SET (dst, SCM_I_VECTOR_ELTS (vect)[idx]);
  2272. NEXT (1);
  2273. }
  2274. /* vector-set! dst:8 idx:8 src:8
  2275. *
  2276. * Store SRC into the vector DST at index IDX.
  2277. */
  2278. VM_DEFINE_OP (104, vector_set, "vector-set!", OP1 (X8_S8_S8_S8))
  2279. {
  2280. scm_t_uint8 dst, idx, src;
  2281. SCM vect, val;
  2282. scm_t_uint64 c_idx;
  2283. UNPACK_8_8_8 (op, dst, idx, src);
  2284. vect = SP_REF (dst);
  2285. c_idx = SP_REF_U64 (idx);
  2286. val = SP_REF (src);
  2287. VM_ASSERT (SCM_I_IS_VECTOR (vect),
  2288. vm_error_not_a_vector ("vector-set!", vect));
  2289. VM_ASSERT (c_idx < SCM_I_VECTOR_LENGTH (vect),
  2290. vm_error_out_of_range_uint64 ("vector-set!", c_idx));
  2291. SCM_I_VECTOR_WELTS (vect)[c_idx] = val;
  2292. NEXT (1);
  2293. }
  2294. /* vector-set!/immediate dst:8 idx:8 src:8
  2295. *
  2296. * Store SRC into the vector DST at index IDX. Here IDX is an
  2297. * immediate value.
  2298. */
  2299. VM_DEFINE_OP (105, vector_set_immediate, "vector-set!/immediate", OP1 (X8_S8_C8_S8))
  2300. {
  2301. scm_t_uint8 dst, idx, src;
  2302. SCM vect, val;
  2303. UNPACK_8_8_8 (op, dst, idx, src);
  2304. vect = SP_REF (dst);
  2305. val = SP_REF (src);
  2306. VM_ASSERT (SCM_I_IS_VECTOR (vect),
  2307. vm_error_not_a_vector ("vector-ref", vect));
  2308. VM_ASSERT (idx < SCM_I_VECTOR_LENGTH (vect),
  2309. vm_error_out_of_range_uint64 ("vector-ref", idx));
  2310. SCM_I_VECTOR_WELTS (vect)[idx] = val;
  2311. NEXT (1);
  2312. }
  2313. /*
  2314. * Structs and GOOPS
  2315. */
  2316. /* struct-vtable dst:12 src:12
  2317. *
  2318. * Store the vtable of SRC into DST.
  2319. */
  2320. VM_DEFINE_OP (106, struct_vtable, "struct-vtable", OP1 (X8_S12_S12) | OP_DST)
  2321. {
  2322. ARGS1 (obj);
  2323. VM_VALIDATE_STRUCT (obj, "struct_vtable");
  2324. RETURN (SCM_STRUCT_VTABLE (obj));
  2325. }
  2326. /* allocate-struct dst:8 vtable:8 nfields:8
  2327. *
  2328. * Allocate a new struct with VTABLE, and place it in DST. The struct
  2329. * will be constructed with space for NFIELDS fields, which should
  2330. * correspond to the field count of the VTABLE.
  2331. */
  2332. VM_DEFINE_OP (107, allocate_struct, "allocate-struct", OP1 (X8_S8_S8_S8) | OP_DST)
  2333. {
  2334. scm_t_uint8 dst, vtable, nfields;
  2335. SCM ret;
  2336. UNPACK_8_8_8 (op, dst, vtable, nfields);
  2337. /* TODO: Specify nfields as untagged value when calling
  2338. allocate-struct. */
  2339. SYNC_IP ();
  2340. ret = scm_allocate_struct (SP_REF (vtable),
  2341. scm_from_uint64 (SP_REF_U64 (nfields)));
  2342. SP_SET (dst, ret);
  2343. NEXT (1);
  2344. }
  2345. /* struct-ref dst:8 src:8 idx:8
  2346. *
  2347. * Fetch the item at slot IDX in the struct in SRC, and store it
  2348. * in DST.
  2349. */
  2350. VM_DEFINE_OP (108, struct_ref, "struct-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2351. {
  2352. scm_t_uint8 dst, src, idx;
  2353. SCM obj;
  2354. scm_t_uint64 index;
  2355. UNPACK_8_8_8 (op, dst, src, idx);
  2356. obj = SP_REF (src);
  2357. index = SP_REF_U64 (idx);
  2358. if (SCM_LIKELY (SCM_STRUCTP (obj)
  2359. && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
  2360. SCM_VTABLE_FLAG_SIMPLE)
  2361. && index < (SCM_STRUCT_DATA_REF (SCM_STRUCT_VTABLE (obj),
  2362. scm_vtable_index_size))))
  2363. RETURN (SCM_STRUCT_SLOT_REF (obj, index));
  2364. SYNC_IP ();
  2365. RETURN (scm_struct_ref (obj, scm_from_uint64 (index)));
  2366. }
  2367. /* struct-set! dst:8 idx:8 src:8
  2368. *
  2369. * Store SRC into the struct DST at slot IDX.
  2370. */
  2371. VM_DEFINE_OP (109, struct_set, "struct-set!", OP1 (X8_S8_S8_S8))
  2372. {
  2373. scm_t_uint8 dst, idx, src;
  2374. SCM obj, val;
  2375. scm_t_uint64 index;
  2376. UNPACK_8_8_8 (op, dst, idx, src);
  2377. obj = SP_REF (dst);
  2378. val = SP_REF (src);
  2379. index = SP_REF_U64 (idx);
  2380. if (SCM_LIKELY (SCM_STRUCTP (obj)
  2381. && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
  2382. SCM_VTABLE_FLAG_SIMPLE)
  2383. && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
  2384. SCM_VTABLE_FLAG_SIMPLE_RW)
  2385. && index < (SCM_STRUCT_DATA_REF (SCM_STRUCT_VTABLE (obj),
  2386. scm_vtable_index_size))))
  2387. {
  2388. SCM_STRUCT_SLOT_SET (obj, index, val);
  2389. NEXT (1);
  2390. }
  2391. SYNC_IP ();
  2392. scm_struct_set_x (obj, scm_from_uint64 (index), val);
  2393. NEXT (1);
  2394. }
  2395. /* allocate-struct/immediate dst:8 vtable:8 nfields:8
  2396. *
  2397. * Allocate a new struct with VTABLE, and place it in DST. The struct
  2398. * will be constructed with space for NFIELDS fields, which should
  2399. * correspond to the field count of the VTABLE.
  2400. */
  2401. VM_DEFINE_OP (110, allocate_struct_immediate, "allocate-struct/immediate", OP1 (X8_S8_S8_C8) | OP_DST)
  2402. {
  2403. scm_t_uint8 dst, vtable, nfields;
  2404. SCM ret;
  2405. UNPACK_8_8_8 (op, dst, vtable, nfields);
  2406. SYNC_IP ();
  2407. ret = scm_allocate_struct (SP_REF (vtable), SCM_I_MAKINUM (nfields));
  2408. SP_SET (dst, ret);
  2409. NEXT (1);
  2410. }
  2411. /* struct-ref/immediate dst:8 src:8 idx:8
  2412. *
  2413. * Fetch the item at slot IDX in the struct in SRC, and store it
  2414. * in DST. IDX is an immediate unsigned 8-bit value.
  2415. */
  2416. VM_DEFINE_OP (111, struct_ref_immediate, "struct-ref/immediate", OP1 (X8_S8_S8_C8) | OP_DST)
  2417. {
  2418. scm_t_uint8 dst, src, idx;
  2419. SCM obj;
  2420. UNPACK_8_8_8 (op, dst, src, idx);
  2421. obj = SP_REF (src);
  2422. if (SCM_LIKELY (SCM_STRUCTP (obj)
  2423. && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
  2424. SCM_VTABLE_FLAG_SIMPLE)
  2425. && idx < SCM_STRUCT_DATA_REF (SCM_STRUCT_VTABLE (obj),
  2426. scm_vtable_index_size)))
  2427. RETURN (SCM_STRUCT_SLOT_REF (obj, idx));
  2428. SYNC_IP ();
  2429. RETURN (scm_struct_ref (obj, SCM_I_MAKINUM (idx)));
  2430. }
  2431. /* struct-set!/immediate dst:8 idx:8 src:8
  2432. *
  2433. * Store SRC into the struct DST at slot IDX. IDX is an immediate
  2434. * unsigned 8-bit value.
  2435. */
  2436. VM_DEFINE_OP (112, struct_set_immediate, "struct-set!/immediate", OP1 (X8_S8_C8_S8))
  2437. {
  2438. scm_t_uint8 dst, idx, src;
  2439. SCM obj, val;
  2440. UNPACK_8_8_8 (op, dst, idx, src);
  2441. obj = SP_REF (dst);
  2442. val = SP_REF (src);
  2443. if (SCM_LIKELY (SCM_STRUCTP (obj)
  2444. && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
  2445. SCM_VTABLE_FLAG_SIMPLE)
  2446. && SCM_STRUCT_VTABLE_FLAG_IS_SET (obj,
  2447. SCM_VTABLE_FLAG_SIMPLE_RW)
  2448. && idx < SCM_STRUCT_DATA_REF (SCM_STRUCT_VTABLE (obj),
  2449. scm_vtable_index_size)))
  2450. {
  2451. SCM_STRUCT_SLOT_SET (obj, idx, val);
  2452. NEXT (1);
  2453. }
  2454. SYNC_IP ();
  2455. scm_struct_set_x (obj, SCM_I_MAKINUM (idx), val);
  2456. NEXT (1);
  2457. }
  2458. /* class-of dst:12 type:12
  2459. *
  2460. * Store the vtable of SRC into DST.
  2461. */
  2462. VM_DEFINE_OP (113, class_of, "class-of", OP1 (X8_S12_S12) | OP_DST)
  2463. {
  2464. ARGS1 (obj);
  2465. if (SCM_INSTANCEP (obj))
  2466. RETURN (SCM_CLASS_OF (obj));
  2467. SYNC_IP ();
  2468. RETURN (scm_class_of (obj));
  2469. }
  2470. /*
  2471. * Arrays, packed uniform arrays, and bytevectors.
  2472. */
  2473. /* load-typed-array dst:24 _:8 type:24 _:8 shape:24 offset:32 len:32
  2474. *
  2475. * Load the contiguous typed array located at OFFSET 32-bit words away
  2476. * from the instruction pointer, and store into DST. LEN is a byte
  2477. * length. OFFSET is signed.
  2478. */
  2479. VM_DEFINE_OP (114, load_typed_array, "load-typed-array", OP5 (X8_S24, X8_S24, X8_S24, N32, C32) | OP_DST)
  2480. {
  2481. scm_t_uint32 dst, type, shape;
  2482. scm_t_int32 offset;
  2483. scm_t_uint32 len;
  2484. UNPACK_24 (op, dst);
  2485. UNPACK_24 (ip[1], type);
  2486. UNPACK_24 (ip[2], shape);
  2487. offset = ip[3];
  2488. len = ip[4];
  2489. SYNC_IP ();
  2490. SP_SET (dst, scm_from_contiguous_typed_array (SP_REF (type),
  2491. SP_REF (shape),
  2492. ip + offset, len));
  2493. NEXT (5);
  2494. }
  2495. /* make-array dst:24 _:8 type:24 _:8 fill:24 _:8 bounds:24
  2496. *
  2497. * Make a new array with TYPE, FILL, and BOUNDS, storing it in DST.
  2498. */
  2499. VM_DEFINE_OP (115, make_array, "make-array", OP4 (X8_S24, X8_S24, X8_S24, X8_S24) | OP_DST)
  2500. {
  2501. scm_t_uint32 dst, type, fill, bounds;
  2502. UNPACK_24 (op, dst);
  2503. UNPACK_24 (ip[1], type);
  2504. UNPACK_24 (ip[2], fill);
  2505. UNPACK_24 (ip[3], bounds);
  2506. SYNC_IP ();
  2507. SP_SET (dst, scm_make_typed_array (SP_REF (type), SP_REF (fill),
  2508. SP_REF (bounds)));
  2509. NEXT (4);
  2510. }
  2511. /* bv-u8-ref dst:8 src:8 idx:8
  2512. * bv-s8-ref dst:8 src:8 idx:8
  2513. * bv-u16-ref dst:8 src:8 idx:8
  2514. * bv-s16-ref dst:8 src:8 idx:8
  2515. * bv-u32-ref dst:8 src:8 idx:8
  2516. * bv-s32-ref dst:8 src:8 idx:8
  2517. * bv-u64-ref dst:8 src:8 idx:8
  2518. * bv-s64-ref dst:8 src:8 idx:8
  2519. * bv-f32-ref dst:8 src:8 idx:8
  2520. * bv-f64-ref dst:8 src:8 idx:8
  2521. *
  2522. * Fetch the item at byte offset IDX in the bytevector SRC, and store
  2523. * it in DST. All accesses use native endianness.
  2524. */
  2525. #define BV_REF(stem, type, size, slot) \
  2526. do { \
  2527. type result; \
  2528. scm_t_uint8 dst, src, idx; \
  2529. SCM bv; \
  2530. scm_t_uint64 c_idx; \
  2531. UNPACK_8_8_8 (op, dst, src, idx); \
  2532. bv = SP_REF (src); \
  2533. c_idx = SP_REF_U64 (idx); \
  2534. \
  2535. VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-ref"); \
  2536. \
  2537. if (SCM_UNLIKELY (SCM_BYTEVECTOR_LENGTH (bv) < size) \
  2538. || SCM_UNLIKELY (SCM_BYTEVECTOR_LENGTH (bv) - size < c_idx)) \
  2539. vm_error_out_of_range_uint64 ("bv-" #stem "-ref", c_idx); \
  2540. \
  2541. memcpy (&result, SCM_BYTEVECTOR_CONTENTS (bv) + c_idx, size); \
  2542. SP_SET_ ## slot (dst, result); \
  2543. NEXT (1); \
  2544. } while (0)
  2545. VM_DEFINE_OP (116, bv_u8_ref, "bv-u8-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2546. BV_REF (u8, scm_t_uint8, 1, U64);
  2547. VM_DEFINE_OP (117, bv_s8_ref, "bv-s8-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2548. BV_REF (s8, scm_t_int8, 1, S64);
  2549. VM_DEFINE_OP (118, bv_u16_ref, "bv-u16-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2550. BV_REF (u16, scm_t_uint16, 2, U64);
  2551. VM_DEFINE_OP (119, bv_s16_ref, "bv-s16-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2552. BV_REF (s16, scm_t_int16, 2, S64);
  2553. VM_DEFINE_OP (120, bv_u32_ref, "bv-u32-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2554. BV_REF (u32, scm_t_uint32, 4, U64);
  2555. VM_DEFINE_OP (121, bv_s32_ref, "bv-s32-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2556. BV_REF (s32, scm_t_int32, 4, S64);
  2557. VM_DEFINE_OP (122, bv_u64_ref, "bv-u64-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2558. BV_REF (u64, scm_t_uint64, 8, U64);
  2559. VM_DEFINE_OP (123, bv_s64_ref, "bv-s64-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2560. BV_REF (s64, scm_t_int64, 8, S64);
  2561. VM_DEFINE_OP (124, bv_f32_ref, "bv-f32-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2562. BV_REF (f32, float, 4, F64);
  2563. VM_DEFINE_OP (125, bv_f64_ref, "bv-f64-ref", OP1 (X8_S8_S8_S8) | OP_DST)
  2564. BV_REF (f64, double, 8, F64);
  2565. /* bv-u8-set! dst:8 idx:8 src:8
  2566. * bv-s8-set! dst:8 idx:8 src:8
  2567. * bv-u16-set! dst:8 idx:8 src:8
  2568. * bv-s16-set! dst:8 idx:8 src:8
  2569. * bv-u32-set! dst:8 idx:8 src:8
  2570. * bv-s32-set! dst:8 idx:8 src:8
  2571. * bv-u64-set! dst:8 idx:8 src:8
  2572. * bv-s64-set! dst:8 idx:8 src:8
  2573. * bv-f32-set! dst:8 idx:8 src:8
  2574. * bv-f64-set! dst:8 idx:8 src:8
  2575. *
  2576. * Store SRC into the bytevector DST at byte offset IDX. Multibyte
  2577. * values are written using native endianness.
  2578. */
  2579. #define BV_BOUNDED_SET(stem, type, min, max, size, slot_type, slot) \
  2580. do { \
  2581. scm_t_ ## slot_type slot_val; \
  2582. type val; \
  2583. scm_t_uint8 dst, idx, src; \
  2584. SCM bv; \
  2585. scm_t_uint64 c_idx; \
  2586. UNPACK_8_8_8 (op, dst, idx, src); \
  2587. bv = SP_REF (dst); \
  2588. c_idx = SP_REF_U64 (idx); \
  2589. slot_val = SP_REF_ ## slot (src); \
  2590. \
  2591. VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set!"); \
  2592. \
  2593. if (SCM_UNLIKELY (SCM_BYTEVECTOR_LENGTH (bv) < size) \
  2594. || SCM_UNLIKELY (SCM_BYTEVECTOR_LENGTH (bv) - size < c_idx)) \
  2595. vm_error_out_of_range_uint64 ("bv-" #stem "-set!", c_idx); \
  2596. \
  2597. if (SCM_UNLIKELY (slot_val < min) || SCM_UNLIKELY (slot_val > max)) \
  2598. vm_error_out_of_range_ ## slot_type ("bv-" #stem "-set!", \
  2599. slot_val); \
  2600. \
  2601. val = slot_val; \
  2602. memcpy (SCM_BYTEVECTOR_CONTENTS (bv) + c_idx, &val, size); \
  2603. NEXT (1); \
  2604. } while (0)
  2605. #define BV_SET(stem, type, size, slot) \
  2606. do { \
  2607. type val; \
  2608. scm_t_uint8 dst, idx, src; \
  2609. SCM bv; \
  2610. scm_t_uint64 c_idx; \
  2611. UNPACK_8_8_8 (op, dst, idx, src); \
  2612. bv = SP_REF (dst); \
  2613. c_idx = SP_REF_U64 (idx); \
  2614. val = SP_REF_ ## slot (src); \
  2615. \
  2616. VM_VALIDATE_BYTEVECTOR (bv, "bv-" #stem "-set!"); \
  2617. \
  2618. if (SCM_UNLIKELY (SCM_BYTEVECTOR_LENGTH (bv) < size) \
  2619. || SCM_UNLIKELY (SCM_BYTEVECTOR_LENGTH (bv) - size < c_idx)) \
  2620. vm_error_out_of_range_uint64 ("bv-" #stem "-set!", c_idx); \
  2621. \
  2622. memcpy (SCM_BYTEVECTOR_CONTENTS (bv) + c_idx, &val, size); \
  2623. NEXT (1); \
  2624. } while (0)
  2625. VM_DEFINE_OP (126, bv_u8_set, "bv-u8-set!", OP1 (X8_S8_S8_S8))
  2626. BV_BOUNDED_SET (u8, scm_t_uint8,
  2627. 0, SCM_T_UINT8_MAX, 1, uint64, U64);
  2628. VM_DEFINE_OP (127, bv_s8_set, "bv-s8-set!", OP1 (X8_S8_S8_S8))
  2629. BV_BOUNDED_SET (s8, scm_t_int8,
  2630. SCM_T_INT8_MIN, SCM_T_INT8_MAX, 1, int64, S64);
  2631. VM_DEFINE_OP (128, bv_u16_set, "bv-u16-set!", OP1 (X8_S8_S8_S8))
  2632. BV_BOUNDED_SET (u16, scm_t_uint16,
  2633. 0, SCM_T_UINT16_MAX, 2, uint64, U64);
  2634. VM_DEFINE_OP (129, bv_s16_set, "bv-s16-set!", OP1 (X8_S8_S8_S8))
  2635. BV_BOUNDED_SET (s16, scm_t_int16,
  2636. SCM_T_INT16_MIN, SCM_T_INT16_MAX, 2, int64, S64);
  2637. VM_DEFINE_OP (130, bv_u32_set, "bv-u32-set!", OP1 (X8_S8_S8_S8))
  2638. BV_BOUNDED_SET (u32, scm_t_uint32,
  2639. 0, SCM_T_UINT32_MAX, 4, uint64, U64);
  2640. VM_DEFINE_OP (131, bv_s32_set, "bv-s32-set!", OP1 (X8_S8_S8_S8))
  2641. BV_BOUNDED_SET (s32, scm_t_int32,
  2642. SCM_T_INT32_MIN, SCM_T_INT32_MAX, 4, int64, S64);
  2643. VM_DEFINE_OP (132, bv_u64_set, "bv-u64-set!", OP1 (X8_S8_S8_S8))
  2644. BV_SET (u64, scm_t_uint64, 8, U64);
  2645. VM_DEFINE_OP (133, bv_s64_set, "bv-s64-set!", OP1 (X8_S8_S8_S8))
  2646. BV_SET (s64, scm_t_int64, 8, S64);
  2647. VM_DEFINE_OP (134, bv_f32_set, "bv-f32-set!", OP1 (X8_S8_S8_S8))
  2648. BV_SET (f32, float, 4, F64);
  2649. VM_DEFINE_OP (135, bv_f64_set, "bv-f64-set!", OP1 (X8_S8_S8_S8))
  2650. BV_SET (f6, double, 8, F64);
  2651. /* scm->f64 dst:12 src:12
  2652. *
  2653. * Unpack a raw double-precision floating-point value from SRC and
  2654. * place it in DST. Note that SRC can be any value on which
  2655. * scm_to_double can operate.
  2656. */
  2657. VM_DEFINE_OP (136, scm_to_f64, "scm->f64", OP1 (X8_S12_S12) | OP_DST)
  2658. {
  2659. scm_t_uint16 dst, src;
  2660. UNPACK_12_12 (op, dst, src);
  2661. SYNC_IP ();
  2662. SP_SET_F64 (dst, scm_to_double (SP_REF (src)));
  2663. NEXT (1);
  2664. }
  2665. /* f64->scm dst:12 src:12
  2666. *
  2667. * Pack a raw double-precision floating point value into an inexact
  2668. * number allocated on the heap.
  2669. */
  2670. VM_DEFINE_OP (137, f64_to_scm, "f64->scm", OP1 (X8_S12_S12) | OP_DST)
  2671. {
  2672. scm_t_uint16 dst, src;
  2673. UNPACK_12_12 (op, dst, src);
  2674. SYNC_IP ();
  2675. SP_SET (dst, scm_from_double (SP_REF_F64 (src)));
  2676. NEXT (1);
  2677. }
  2678. /* fadd dst:8 a:8 b:8
  2679. *
  2680. * Add A to B, and place the result in DST. The operands and the
  2681. * result are unboxed double-precision floating-point numbers.
  2682. */
  2683. VM_DEFINE_OP (138, fadd, "fadd", OP1 (X8_S8_S8_S8) | OP_DST)
  2684. {
  2685. scm_t_uint8 dst, a, b;
  2686. UNPACK_8_8_8 (op, dst, a, b);
  2687. SP_SET_F64 (dst, SP_REF_F64 (a) + SP_REF_F64 (b));
  2688. NEXT (1);
  2689. }
  2690. /* fsub dst:8 a:8 b:8
  2691. *
  2692. * Subtract B from A, and place the result in DST. The operands and
  2693. * the result are unboxed double-precision floating-point numbers.
  2694. */
  2695. VM_DEFINE_OP (139, fsub, "fsub", OP1 (X8_S8_S8_S8) | OP_DST)
  2696. {
  2697. scm_t_uint8 dst, a, b;
  2698. UNPACK_8_8_8 (op, dst, a, b);
  2699. SP_SET_F64 (dst, SP_REF_F64 (a) - SP_REF_F64 (b));
  2700. NEXT (1);
  2701. }
  2702. /* fmul dst:8 a:8 b:8
  2703. *
  2704. * Multiply A and B, and place the result in DST. The operands and
  2705. * the result are unboxed double-precision floating-point numbers.
  2706. */
  2707. VM_DEFINE_OP (140, fmul, "fmul", OP1 (X8_S8_S8_S8) | OP_DST)
  2708. {
  2709. scm_t_uint8 dst, a, b;
  2710. UNPACK_8_8_8 (op, dst, a, b);
  2711. SP_SET_F64 (dst, SP_REF_F64 (a) * SP_REF_F64 (b));
  2712. NEXT (1);
  2713. }
  2714. /* fdiv dst:8 a:8 b:8
  2715. *
  2716. * Divide A by B, and place the result in DST. The operands and the
  2717. * result are unboxed double-precision floating-point numbers.
  2718. */
  2719. VM_DEFINE_OP (141, fdiv, "fdiv", OP1 (X8_S8_S8_S8) | OP_DST)
  2720. {
  2721. scm_t_uint8 dst, a, b;
  2722. UNPACK_8_8_8 (op, dst, a, b);
  2723. SP_SET_F64 (dst, SP_REF_F64 (a) / SP_REF_F64 (b));
  2724. NEXT (1);
  2725. }
  2726. /* apply-non-program _:24
  2727. *
  2728. * Used by the VM as a trampoline to apply non-programs.
  2729. */
  2730. VM_DEFINE_OP (142, apply_non_program, "apply-non-program", OP1 (X32))
  2731. {
  2732. SCM proc = FP_REF (0);
  2733. while (!SCM_PROGRAM_P (proc))
  2734. {
  2735. if (SCM_STRUCTP (proc) && SCM_STRUCT_APPLICABLE_P (proc))
  2736. {
  2737. proc = SCM_STRUCT_PROCEDURE (proc);
  2738. FP_SET (0, proc);
  2739. continue;
  2740. }
  2741. if (SCM_HAS_TYP7 (proc, scm_tc7_smob) && SCM_SMOB_APPLICABLE_P (proc))
  2742. {
  2743. scm_t_uint32 n = FRAME_LOCALS_COUNT();
  2744. /* Shuffle args up. (FIXME: no real need to shuffle; just set
  2745. IP and go. ) */
  2746. ALLOC_FRAME (n + 1);
  2747. while (n--)
  2748. FP_SET (n + 1, FP_REF (n));
  2749. proc = SCM_SMOB_DESCRIPTOR (proc).apply_trampoline;
  2750. FP_SET (0, proc);
  2751. continue;
  2752. }
  2753. SYNC_IP();
  2754. vm_error_wrong_type_apply (proc);
  2755. }
  2756. ip = SCM_PROGRAM_CODE (proc);
  2757. NEXT (0);
  2758. }
  2759. /* scm->u64 dst:12 src:12
  2760. *
  2761. * Unpack an unsigned 64-bit integer from SRC and place it in DST.
  2762. */
  2763. VM_DEFINE_OP (143, scm_to_u64, "scm->u64", OP1 (X8_S12_S12) | OP_DST)
  2764. {
  2765. scm_t_uint16 dst, src;
  2766. UNPACK_12_12 (op, dst, src);
  2767. SYNC_IP ();
  2768. SP_SET_U64 (dst, scm_to_uint64 (SP_REF (src)));
  2769. NEXT (1);
  2770. }
  2771. /* u64->scm dst:12 src:12
  2772. *
  2773. * Pack an unsigned 64-bit integer into a SCM value.
  2774. */
  2775. VM_DEFINE_OP (144, u64_to_scm, "u64->scm", OP1 (X8_S12_S12) | OP_DST)
  2776. {
  2777. scm_t_uint16 dst, src;
  2778. UNPACK_12_12 (op, dst, src);
  2779. SYNC_IP ();
  2780. SP_SET (dst, scm_from_uint64 (SP_REF_U64 (src)));
  2781. NEXT (1);
  2782. }
  2783. /* bv-length dst:12 src:12
  2784. *
  2785. * Store the length of the bytevector in SRC in DST, as an untagged
  2786. * 64-bit integer.
  2787. */
  2788. VM_DEFINE_OP (145, bv_length, "bv-length", OP1 (X8_S12_S12) | OP_DST)
  2789. {
  2790. ARGS1 (bv);
  2791. VM_VALIDATE_BYTEVECTOR (bv, "bytevector-length");
  2792. SP_SET_U64 (dst, SCM_BYTEVECTOR_LENGTH (bv));
  2793. NEXT (1);
  2794. }
  2795. /* br-if-= a:12 b:12 invert:1 _:7 offset:24
  2796. *
  2797. * If the value in A is = to the value in B, add OFFSET, a signed
  2798. * 24-bit number, to the current instruction pointer.
  2799. */
  2800. VM_DEFINE_OP (146, br_if_u64_ee, "br-if-u64-=", OP3 (X8_S24, X8_S24, B1_X7_L24))
  2801. {
  2802. BR_U64_ARITHMETIC (==);
  2803. }
  2804. /* br-if-< a:12 b:12 invert:1 _:7 offset:24
  2805. *
  2806. * If the value in A is < to the value in B, add OFFSET, a signed
  2807. * 24-bit number, to the current instruction pointer.
  2808. */
  2809. VM_DEFINE_OP (147, br_if_u64_lt, "br-if-u64-<", OP3 (X8_S24, X8_S24, B1_X7_L24))
  2810. {
  2811. BR_U64_ARITHMETIC (<);
  2812. }
  2813. VM_DEFINE_OP (148, br_if_u64_le, "br-if-u64-<=", OP3 (X8_S24, X8_S24, B1_X7_L24))
  2814. {
  2815. BR_U64_ARITHMETIC (<=);
  2816. }
  2817. /* uadd dst:8 a:8 b:8
  2818. *
  2819. * Add A to B, and place the result in DST. The operands and the
  2820. * result are unboxed unsigned 64-bit integers. Overflow will wrap
  2821. * around.
  2822. */
  2823. VM_DEFINE_OP (149, uadd, "uadd", OP1 (X8_S8_S8_S8) | OP_DST)
  2824. {
  2825. scm_t_uint8 dst, a, b;
  2826. UNPACK_8_8_8 (op, dst, a, b);
  2827. SP_SET_U64 (dst, SP_REF_U64 (a) + SP_REF_U64 (b));
  2828. NEXT (1);
  2829. }
  2830. /* usub dst:8 a:8 b:8
  2831. *
  2832. * Subtract B from A, and place the result in DST. The operands and
  2833. * the result are unboxed unsigned 64-bit integers. Overflow will
  2834. * wrap around.
  2835. */
  2836. VM_DEFINE_OP (150, usub, "usub", OP1 (X8_S8_S8_S8) | OP_DST)
  2837. {
  2838. scm_t_uint8 dst, a, b;
  2839. UNPACK_8_8_8 (op, dst, a, b);
  2840. SP_SET_U64 (dst, SP_REF_U64 (a) - SP_REF_U64 (b));
  2841. NEXT (1);
  2842. }
  2843. /* umul dst:8 a:8 b:8
  2844. *
  2845. * Multiply A and B, and place the result in DST. The operands and
  2846. * the result are unboxed unsigned 64-bit integers. Overflow will
  2847. * wrap around.
  2848. */
  2849. VM_DEFINE_OP (151, umul, "umul", OP1 (X8_S8_S8_S8) | OP_DST)
  2850. {
  2851. scm_t_uint8 dst, a, b;
  2852. UNPACK_8_8_8 (op, dst, a, b);
  2853. SP_SET_U64 (dst, SP_REF_U64 (a) * SP_REF_U64 (b));
  2854. NEXT (1);
  2855. }
  2856. /* uadd/immediate dst:8 src:8 imm:8
  2857. *
  2858. * Add the unsigned 64-bit value from SRC with the unsigned 8-bit
  2859. * value IMM and place the raw unsigned 64-bit result in DST.
  2860. * Overflow will wrap around.
  2861. */
  2862. VM_DEFINE_OP (152, uadd_immediate, "uadd/immediate", OP1 (X8_S8_S8_C8) | OP_DST)
  2863. {
  2864. scm_t_uint8 dst, src, imm;
  2865. scm_t_uint64 x;
  2866. UNPACK_8_8_8 (op, dst, src, imm);
  2867. x = SP_REF_U64 (src);
  2868. SP_SET_U64 (dst, x + (scm_t_uint64) imm);
  2869. NEXT (1);
  2870. }
  2871. /* usub/immediate dst:8 src:8 imm:8
  2872. *
  2873. * Subtract the unsigned 8-bit value IMM from the unsigned 64-bit
  2874. * value in SRC and place the raw unsigned 64-bit result in DST.
  2875. * Overflow will wrap around.
  2876. */
  2877. VM_DEFINE_OP (153, usub_immediate, "usub/immediate", OP1 (X8_S8_S8_C8) | OP_DST)
  2878. {
  2879. scm_t_uint8 dst, src, imm;
  2880. scm_t_uint64 x;
  2881. UNPACK_8_8_8 (op, dst, src, imm);
  2882. x = SP_REF_U64 (src);
  2883. SP_SET_U64 (dst, x - (scm_t_uint64) imm);
  2884. NEXT (1);
  2885. }
  2886. /* umul/immediate dst:8 src:8 imm:8
  2887. *
  2888. * Multiply the unsigned 64-bit value from SRC by the unsigned 8-bit
  2889. * value IMM and place the raw unsigned 64-bit result in DST.
  2890. * Overflow will wrap around.
  2891. */
  2892. VM_DEFINE_OP (154, umul_immediate, "umul/immediate", OP1 (X8_S8_S8_C8) | OP_DST)
  2893. {
  2894. scm_t_uint8 dst, src, imm;
  2895. scm_t_uint64 x;
  2896. UNPACK_8_8_8 (op, dst, src, imm);
  2897. x = SP_REF_U64 (src);
  2898. SP_SET_U64 (dst, x * (scm_t_uint64) imm);
  2899. NEXT (1);
  2900. }
  2901. /* load-f64 dst:24 high-bits:32 low-bits:32
  2902. *
  2903. * Make a double-precision floating-point value with HIGH-BITS and
  2904. * LOW-BITS.
  2905. */
  2906. VM_DEFINE_OP (155, load_f64, "load-f64", OP3 (X8_S24, AF32, BF32) | OP_DST)
  2907. {
  2908. scm_t_uint32 dst;
  2909. scm_t_uint64 val;
  2910. UNPACK_24 (op, dst);
  2911. val = ip[1];
  2912. val <<= 32;
  2913. val |= ip[2];
  2914. SP_SET_U64 (dst, val);
  2915. NEXT (3);
  2916. }
  2917. /* load-u64 dst:24 high-bits:32 low-bits:32
  2918. *
  2919. * Make an unsigned 64-bit integer with HIGH-BITS and LOW-BITS.
  2920. */
  2921. VM_DEFINE_OP (156, load_u64, "load-u64", OP3 (X8_S24, AU32, BU32) | OP_DST)
  2922. {
  2923. scm_t_uint32 dst;
  2924. scm_t_uint64 val;
  2925. UNPACK_24 (op, dst);
  2926. val = ip[1];
  2927. val <<= 32;
  2928. val |= ip[2];
  2929. SP_SET_U64 (dst, val);
  2930. NEXT (3);
  2931. }
  2932. /* scm->s64 dst:12 src:12
  2933. *
  2934. * Unpack a signed 64-bit integer from SRC and place it in DST.
  2935. */
  2936. VM_DEFINE_OP (157, scm_to_s64, "scm->s64", OP1 (X8_S12_S12) | OP_DST)
  2937. {
  2938. scm_t_uint16 dst, src;
  2939. UNPACK_12_12 (op, dst, src);
  2940. SYNC_IP ();
  2941. SP_SET_S64 (dst, scm_to_int64 (SP_REF (src)));
  2942. NEXT (1);
  2943. }
  2944. /* s64->scm dst:12 src:12
  2945. *
  2946. * Pack an signed 64-bit integer into a SCM value.
  2947. */
  2948. VM_DEFINE_OP (158, s64_to_scm, "s64->scm", OP1 (X8_S12_S12) | OP_DST)
  2949. {
  2950. scm_t_uint16 dst, src;
  2951. UNPACK_12_12 (op, dst, src);
  2952. SYNC_IP ();
  2953. SP_SET (dst, scm_from_int64 (SP_REF_S64 (src)));
  2954. NEXT (1);
  2955. }
  2956. /* load-s64 dst:24 high-bits:32 low-bits:32
  2957. *
  2958. * Make an unsigned 64-bit integer with HIGH-BITS and LOW-BITS.
  2959. */
  2960. VM_DEFINE_OP (159, load_s64, "load-s64", OP3 (X8_S24, AS32, BS32) | OP_DST)
  2961. {
  2962. scm_t_uint32 dst;
  2963. scm_t_uint64 val;
  2964. UNPACK_24 (op, dst);
  2965. val = ip[1];
  2966. val <<= 32;
  2967. val |= ip[2];
  2968. SP_SET_U64 (dst, val);
  2969. NEXT (3);
  2970. }
  2971. /* current-thread dst:24
  2972. *
  2973. * Write the current thread into DST.
  2974. */
  2975. VM_DEFINE_OP (160, current_thread, "current-thread", OP1 (X8_S24) | OP_DST)
  2976. {
  2977. scm_t_uint32 dst;
  2978. UNPACK_24 (op, dst);
  2979. SP_SET (dst, thread->handle);
  2980. NEXT (1);
  2981. }
  2982. /* logsub dst:8 a:8 b:8
  2983. *
  2984. * Place the bitwise AND of A and the bitwise NOT of B into DST.
  2985. */
  2986. VM_DEFINE_OP (161, logsub, "logsub", OP1 (X8_S8_S8_S8) | OP_DST)
  2987. {
  2988. ARGS2 (x, y);
  2989. if (SCM_I_INUMP (x) && SCM_I_INUMP (y))
  2990. {
  2991. scm_t_signed_bits a, b;
  2992. a = SCM_I_INUM (x);
  2993. b = SCM_I_INUM (y);
  2994. RETURN (SCM_I_MAKINUM (a & ~b));
  2995. }
  2996. RETURN_EXP (scm_logand (x, scm_lognot (y)));
  2997. }
  2998. /* ulogand dst:8 a:8 b:8
  2999. *
  3000. * Place the bitwise AND of the u64 values in A and B into DST.
  3001. */
  3002. VM_DEFINE_OP (162, ulogand, "ulogand", OP1 (X8_S8_S8_S8) | OP_DST)
  3003. {
  3004. scm_t_uint8 dst, a, b;
  3005. UNPACK_8_8_8 (op, dst, a, b);
  3006. SP_SET_U64 (dst, SP_REF_U64 (a) & SP_REF_U64 (b));
  3007. NEXT (1);
  3008. }
  3009. /* ulogior dst:8 a:8 b:8
  3010. *
  3011. * Place the bitwise inclusive OR of the u64 values in A and B into
  3012. * DST.
  3013. */
  3014. VM_DEFINE_OP (163, ulogior, "ulogior", OP1 (X8_S8_S8_S8) | OP_DST)
  3015. {
  3016. scm_t_uint8 dst, a, b;
  3017. UNPACK_8_8_8 (op, dst, a, b);
  3018. SP_SET_U64 (dst, SP_REF_U64 (a) | SP_REF_U64 (b));
  3019. NEXT (1);
  3020. }
  3021. /* ulogsub dst:8 a:8 b:8
  3022. *
  3023. * Place the (A & ~B) of the u64 values A and B into DST.
  3024. */
  3025. VM_DEFINE_OP (164, ulogsub, "ulogsub", OP1 (X8_S8_S8_S8) | OP_DST)
  3026. {
  3027. scm_t_uint8 dst, a, b;
  3028. UNPACK_8_8_8 (op, dst, a, b);
  3029. SP_SET_U64 (dst, SP_REF_U64 (a) & ~SP_REF_U64 (b));
  3030. NEXT (1);
  3031. }
  3032. /* ursh dst:8 a:8 b:8
  3033. *
  3034. * Shift the u64 value in A right by B bits, and place the result in
  3035. * DST. Only the lower 6 bits of B are used.
  3036. */
  3037. VM_DEFINE_OP (165, ursh, "ursh", OP1 (X8_S8_S8_S8) | OP_DST)
  3038. {
  3039. scm_t_uint8 dst, a, b;
  3040. UNPACK_8_8_8 (op, dst, a, b);
  3041. SP_SET_U64 (dst, SP_REF_U64 (a) >> (SP_REF_U64 (b) & 63));
  3042. NEXT (1);
  3043. }
  3044. /* ulsh dst:8 a:8 b:8
  3045. *
  3046. * Shift the u64 value in A left by B bits, and place the result in
  3047. * DST. Only the lower 6 bits of B are used.
  3048. */
  3049. VM_DEFINE_OP (166, ulsh, "ulsh", OP1 (X8_S8_S8_S8) | OP_DST)
  3050. {
  3051. scm_t_uint8 dst, a, b;
  3052. UNPACK_8_8_8 (op, dst, a, b);
  3053. SP_SET_U64 (dst, SP_REF_U64 (a) << (SP_REF_U64 (b) & 63));
  3054. NEXT (1);
  3055. }
  3056. /* scm->u64/truncate dst:12 src:12
  3057. *
  3058. * Unpack an exact integer from SRC and place it in the unsigned
  3059. * 64-bit register DST, truncating any high bits. If the number in
  3060. * SRC is negative, all the high bits will be set.
  3061. */
  3062. VM_DEFINE_OP (167, scm_to_u64_truncate, "scm->u64/truncate", OP1 (X8_S12_S12) | OP_DST)
  3063. {
  3064. scm_t_uint16 dst, src;
  3065. SCM x;
  3066. UNPACK_12_12 (op, dst, src);
  3067. x = SP_REF (src);
  3068. if (SCM_I_INUMP (x))
  3069. SP_SET_U64 (dst, (scm_t_uint64) SCM_I_INUM (x));
  3070. else
  3071. {
  3072. SYNC_IP ();
  3073. SP_SET_U64 (dst,
  3074. scm_to_uint64
  3075. (scm_logand (x, scm_from_uint64 ((scm_t_uint64) -1))));
  3076. }
  3077. NEXT (1);
  3078. }
  3079. /* ursh/immediate dst:8 a:8 b:8
  3080. *
  3081. * Shift the u64 value in A right by the immediate B bits, and place
  3082. * the result in DST. Only the lower 6 bits of B are used.
  3083. */
  3084. VM_DEFINE_OP (168, ursh_immediate, "ursh/immediate", OP1 (X8_S8_S8_C8) | OP_DST)
  3085. {
  3086. scm_t_uint8 dst, a, b;
  3087. UNPACK_8_8_8 (op, dst, a, b);
  3088. SP_SET_U64 (dst, SP_REF_U64 (a) >> (b & 63));
  3089. NEXT (1);
  3090. }
  3091. /* ulsh/immediate dst:8 a:8 b:8
  3092. *
  3093. * Shift the u64 value in A left by the immediate B bits, and place
  3094. * the result in DST. Only the lower 6 bits of B are used.
  3095. */
  3096. VM_DEFINE_OP (169, ulsh_immediate, "ulsh/immediate", OP1 (X8_S8_S8_C8) | OP_DST)
  3097. {
  3098. scm_t_uint8 dst, a, b;
  3099. UNPACK_8_8_8 (op, dst, a, b);
  3100. SP_SET_U64 (dst, SP_REF_U64 (a) << (b & 63));
  3101. NEXT (1);
  3102. }
  3103. #define BR_U64_SCM_COMPARISON(x, y, unboxed, boxed) \
  3104. do { \
  3105. scm_t_uint32 a, b; \
  3106. scm_t_uint64 x; \
  3107. SCM y_scm; \
  3108. \
  3109. UNPACK_24 (op, a); \
  3110. UNPACK_24 (ip[1], b); \
  3111. x = SP_REF_U64 (a); \
  3112. y_scm = SP_REF (b); \
  3113. \
  3114. if (SCM_I_INUMP (y_scm)) \
  3115. { \
  3116. scm_t_signed_bits y = SCM_I_INUM (y_scm); \
  3117. \
  3118. if ((ip[2] & 0x1) ? !(unboxed) : (unboxed)) \
  3119. { \
  3120. scm_t_int32 offset = ip[2]; \
  3121. offset >>= 8; /* Sign-extending shift. */ \
  3122. if (offset <= 0) \
  3123. VM_HANDLE_INTERRUPTS; \
  3124. NEXT (offset); \
  3125. } \
  3126. NEXT (3); \
  3127. } \
  3128. else \
  3129. { \
  3130. SCM res; \
  3131. SYNC_IP (); \
  3132. res = boxed (scm_from_uint64 (x), y_scm); \
  3133. CACHE_SP (); \
  3134. if ((ip[2] & 0x1) ? scm_is_false (res) : scm_is_true (res)) \
  3135. { \
  3136. scm_t_int32 offset = ip[2]; \
  3137. offset >>= 8; /* Sign-extending shift. */ \
  3138. if (offset <= 0) \
  3139. VM_HANDLE_INTERRUPTS; \
  3140. NEXT (offset); \
  3141. } \
  3142. NEXT (3); \
  3143. } \
  3144. } while (0)
  3145. /* br-if-u64-=-scm a:24 _:8 b:24 invert:1 _:7 offset:24
  3146. *
  3147. * If the U64 value in A is = to the SCM value in B, add OFFSET, a
  3148. * signed 24-bit number, to the current instruction pointer.
  3149. */
  3150. VM_DEFINE_OP (170, br_if_u64_ee_scm, "br-if-u64-=-scm", OP3 (X8_S24, X8_S24, B1_X7_L24))
  3151. {
  3152. BR_U64_SCM_COMPARISON(x, y, y >= 0 && (scm_t_uint64) y == x, scm_num_eq_p);
  3153. }
  3154. /* br-if-u64-<-scm a:24 _:8 b:24 invert:1 _:7 offset:24
  3155. *
  3156. * If the U64 value in A is < than the SCM value in B, add OFFSET, a
  3157. * signed 24-bit number, to the current instruction pointer.
  3158. */
  3159. VM_DEFINE_OP (171, br_if_u64_lt_scm, "br-if-u64-<-scm", OP3 (X8_S24, X8_S24, B1_X7_L24))
  3160. {
  3161. BR_U64_SCM_COMPARISON(x, y, y >= 0 && (scm_t_uint64) y > x, scm_less_p);
  3162. }
  3163. /* br-if-u64-=-scm a:24 _:8 b:24 invert:1 _:7 offset:24
  3164. *
  3165. * If the U64 value in A is <= than the SCM value in B, add OFFSET, a
  3166. * signed 24-bit number, to the current instruction pointer.
  3167. */
  3168. VM_DEFINE_OP (172, br_if_u64_le_scm, "br-if-u64-<=-scm", OP3 (X8_S24, X8_S24, B1_X7_L24))
  3169. {
  3170. BR_U64_SCM_COMPARISON(x, y, y >= 0 && (scm_t_uint64) y >= x, scm_leq_p);
  3171. }
  3172. /* br-if-u64->-scm a:24 _:8 b:24 invert:1 _:7 offset:24
  3173. *
  3174. * If the U64 value in A is > than the SCM value in B, add OFFSET, a
  3175. * signed 24-bit number, to the current instruction pointer.
  3176. */
  3177. VM_DEFINE_OP (173, br_if_u64_gt_scm, "br-if-u64->-scm", OP3 (X8_S24, X8_S24, B1_X7_L24))
  3178. {
  3179. BR_U64_SCM_COMPARISON(x, y, y < 0 || (scm_t_uint64) y < x, scm_gr_p);
  3180. }
  3181. /* br-if-u64->=-scm a:24 _:8 b:24 invert:1 _:7 offset:24
  3182. *
  3183. * If the U64 value in A is >= than the SCM value in B, add OFFSET, a
  3184. * signed 24-bit number, to the current instruction pointer.
  3185. */
  3186. VM_DEFINE_OP (174, br_if_u64_ge_scm, "br-if-u64->=-scm", OP3 (X8_S24, X8_S24, B1_X7_L24))
  3187. {
  3188. BR_U64_SCM_COMPARISON(x, y, y <= 0 || (scm_t_uint64) y <= x, scm_geq_p);
  3189. }
  3190. VM_DEFINE_OP (175, unused_175, NULL, NOP)
  3191. VM_DEFINE_OP (176, unused_176, NULL, NOP)
  3192. VM_DEFINE_OP (177, unused_177, NULL, NOP)
  3193. VM_DEFINE_OP (178, unused_178, NULL, NOP)
  3194. VM_DEFINE_OP (179, unused_179, NULL, NOP)
  3195. VM_DEFINE_OP (180, unused_180, NULL, NOP)
  3196. VM_DEFINE_OP (181, unused_181, NULL, NOP)
  3197. VM_DEFINE_OP (182, unused_182, NULL, NOP)
  3198. VM_DEFINE_OP (183, unused_183, NULL, NOP)
  3199. VM_DEFINE_OP (184, unused_184, NULL, NOP)
  3200. VM_DEFINE_OP (185, unused_185, NULL, NOP)
  3201. VM_DEFINE_OP (186, unused_186, NULL, NOP)
  3202. VM_DEFINE_OP (187, unused_187, NULL, NOP)
  3203. VM_DEFINE_OP (188, unused_188, NULL, NOP)
  3204. VM_DEFINE_OP (189, unused_189, NULL, NOP)
  3205. VM_DEFINE_OP (190, unused_190, NULL, NOP)
  3206. VM_DEFINE_OP (191, unused_191, NULL, NOP)
  3207. VM_DEFINE_OP (192, unused_192, NULL, NOP)
  3208. VM_DEFINE_OP (193, unused_193, NULL, NOP)
  3209. VM_DEFINE_OP (194, unused_194, NULL, NOP)
  3210. VM_DEFINE_OP (195, unused_195, NULL, NOP)
  3211. VM_DEFINE_OP (196, unused_196, NULL, NOP)
  3212. VM_DEFINE_OP (197, unused_197, NULL, NOP)
  3213. VM_DEFINE_OP (198, unused_198, NULL, NOP)
  3214. VM_DEFINE_OP (199, unused_199, NULL, NOP)
  3215. VM_DEFINE_OP (200, unused_200, NULL, NOP)
  3216. VM_DEFINE_OP (201, unused_201, NULL, NOP)
  3217. VM_DEFINE_OP (202, unused_202, NULL, NOP)
  3218. VM_DEFINE_OP (203, unused_203, NULL, NOP)
  3219. VM_DEFINE_OP (204, unused_204, NULL, NOP)
  3220. VM_DEFINE_OP (205, unused_205, NULL, NOP)
  3221. VM_DEFINE_OP (206, unused_206, NULL, NOP)
  3222. VM_DEFINE_OP (207, unused_207, NULL, NOP)
  3223. VM_DEFINE_OP (208, unused_208, NULL, NOP)
  3224. VM_DEFINE_OP (209, unused_209, NULL, NOP)
  3225. VM_DEFINE_OP (210, unused_210, NULL, NOP)
  3226. VM_DEFINE_OP (211, unused_211, NULL, NOP)
  3227. VM_DEFINE_OP (212, unused_212, NULL, NOP)
  3228. VM_DEFINE_OP (213, unused_213, NULL, NOP)
  3229. VM_DEFINE_OP (214, unused_214, NULL, NOP)
  3230. VM_DEFINE_OP (215, unused_215, NULL, NOP)
  3231. VM_DEFINE_OP (216, unused_216, NULL, NOP)
  3232. VM_DEFINE_OP (217, unused_217, NULL, NOP)
  3233. VM_DEFINE_OP (218, unused_218, NULL, NOP)
  3234. VM_DEFINE_OP (219, unused_219, NULL, NOP)
  3235. VM_DEFINE_OP (220, unused_220, NULL, NOP)
  3236. VM_DEFINE_OP (221, unused_221, NULL, NOP)
  3237. VM_DEFINE_OP (222, unused_222, NULL, NOP)
  3238. VM_DEFINE_OP (223, unused_223, NULL, NOP)
  3239. VM_DEFINE_OP (224, unused_224, NULL, NOP)
  3240. VM_DEFINE_OP (225, unused_225, NULL, NOP)
  3241. VM_DEFINE_OP (226, unused_226, NULL, NOP)
  3242. VM_DEFINE_OP (227, unused_227, NULL, NOP)
  3243. VM_DEFINE_OP (228, unused_228, NULL, NOP)
  3244. VM_DEFINE_OP (229, unused_229, NULL, NOP)
  3245. VM_DEFINE_OP (230, unused_230, NULL, NOP)
  3246. VM_DEFINE_OP (231, unused_231, NULL, NOP)
  3247. VM_DEFINE_OP (232, unused_232, NULL, NOP)
  3248. VM_DEFINE_OP (233, unused_233, NULL, NOP)
  3249. VM_DEFINE_OP (234, unused_234, NULL, NOP)
  3250. VM_DEFINE_OP (235, unused_235, NULL, NOP)
  3251. VM_DEFINE_OP (236, unused_236, NULL, NOP)
  3252. VM_DEFINE_OP (237, unused_237, NULL, NOP)
  3253. VM_DEFINE_OP (238, unused_238, NULL, NOP)
  3254. VM_DEFINE_OP (239, unused_239, NULL, NOP)
  3255. VM_DEFINE_OP (240, unused_240, NULL, NOP)
  3256. VM_DEFINE_OP (241, unused_241, NULL, NOP)
  3257. VM_DEFINE_OP (242, unused_242, NULL, NOP)
  3258. VM_DEFINE_OP (243, unused_243, NULL, NOP)
  3259. VM_DEFINE_OP (244, unused_244, NULL, NOP)
  3260. VM_DEFINE_OP (245, unused_245, NULL, NOP)
  3261. VM_DEFINE_OP (246, unused_246, NULL, NOP)
  3262. VM_DEFINE_OP (247, unused_247, NULL, NOP)
  3263. VM_DEFINE_OP (248, unused_248, NULL, NOP)
  3264. VM_DEFINE_OP (249, unused_249, NULL, NOP)
  3265. VM_DEFINE_OP (250, unused_250, NULL, NOP)
  3266. VM_DEFINE_OP (251, unused_251, NULL, NOP)
  3267. VM_DEFINE_OP (252, unused_252, NULL, NOP)
  3268. VM_DEFINE_OP (253, unused_253, NULL, NOP)
  3269. VM_DEFINE_OP (254, unused_254, NULL, NOP)
  3270. VM_DEFINE_OP (255, unused_255, NULL, NOP)
  3271. {
  3272. vm_error_bad_instruction (op);
  3273. abort (); /* never reached */
  3274. }
  3275. END_DISPATCH_SWITCH;
  3276. }
  3277. #undef ABORT_CONTINUATION_HOOK
  3278. #undef ALIGNED_P
  3279. #undef APPLY_HOOK
  3280. #undef ARGS1
  3281. #undef ARGS2
  3282. #undef BEGIN_DISPATCH_SWITCH
  3283. #undef BINARY_INTEGER_OP
  3284. #undef BR_ARITHMETIC
  3285. #undef BR_BINARY
  3286. #undef BR_NARGS
  3287. #undef BR_UNARY
  3288. #undef BV_FIXABLE_INT_REF
  3289. #undef BV_FIXABLE_INT_SET
  3290. #undef BV_FLOAT_REF
  3291. #undef BV_FLOAT_SET
  3292. #undef BV_INT_REF
  3293. #undef BV_INT_SET
  3294. #undef CACHE_REGISTER
  3295. #undef END_DISPATCH_SWITCH
  3296. #undef FREE_VARIABLE_REF
  3297. #undef INIT
  3298. #undef INUM_MAX
  3299. #undef INUM_MIN
  3300. #undef FP_REF
  3301. #undef FP_SET
  3302. #undef FP_SLOT
  3303. #undef SP_REF
  3304. #undef SP_SET
  3305. #undef NEXT
  3306. #undef NEXT_HOOK
  3307. #undef NEXT_JUMP
  3308. #undef POP_CONTINUATION_HOOK
  3309. #undef PUSH_CONTINUATION_HOOK
  3310. #undef RETURN
  3311. #undef RETURN_ONE_VALUE
  3312. #undef RETURN_VALUE_LIST
  3313. #undef RUN_HOOK
  3314. #undef RUN_HOOK0
  3315. #undef RUN_HOOK1
  3316. #undef SYNC_IP
  3317. #undef UNPACK_8_8_8
  3318. #undef UNPACK_8_16
  3319. #undef UNPACK_16_8
  3320. #undef UNPACK_12_12
  3321. #undef UNPACK_24
  3322. #undef VARIABLE_BOUNDP
  3323. #undef VARIABLE_REF
  3324. #undef VARIABLE_SET
  3325. #undef VM_CHECK_FREE_VARIABLE
  3326. #undef VM_CHECK_OBJECT
  3327. #undef VM_CHECK_UNDERFLOW
  3328. #undef VM_DEFINE_OP
  3329. #undef VM_INSTRUCTION_TO_LABEL
  3330. #undef VM_USE_HOOKS
  3331. #undef VM_VALIDATE_BYTEVECTOR
  3332. #undef VM_VALIDATE_PAIR
  3333. #undef VM_VALIDATE_STRUCT
  3334. /*
  3335. (defun renumber-ops ()
  3336. "start from top of buffer and renumber 'VM_DEFINE_FOO (\n' sequences"
  3337. (interactive "")
  3338. (save-excursion
  3339. (let ((counter -1)) (goto-char (point-min))
  3340. (while (re-search-forward "^ *VM_DEFINE_[^ ]+ (\\([^,]+\\)," (point-max) t)
  3341. (replace-match
  3342. (number-to-string (setq counter (1+ counter)))
  3343. t t nil 1)))))
  3344. (renumber-ops)
  3345. */
  3346. /*
  3347. Local Variables:
  3348. c-file-style: "gnu"
  3349. End:
  3350. */