cc_core.c 113 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142
  1. /* Copyright (C) 2016 Jeremiah Orians
  2. * Copyright (C) 2018 Jan (janneke) Nieuwenhuizen <janneke@gnu.org>
  3. * Copyright (C) 2020 deesix <deesix@tuta.io>
  4. * Copyright (C) 2021 Andrius Štikonas <andrius@stikonas.eu>
  5. * This file is part of M2-Planet.
  6. *
  7. * M2-Planet is free software: you can redistribute it and/or modify
  8. * it under the terms of the GNU General Public License as published by
  9. * the Free Software Foundation, either version 3 of the License, or
  10. * (at your option) any later version.
  11. *
  12. * M2-Planet is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. * GNU General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU General Public License
  18. * along with M2-Planet. If not, see <http://www.gnu.org/licenses/>.
  19. */
  20. #include "cc.h"
  21. #include "gcc_req.h"
  22. #include <stdint.h>
  23. /* Global lists */
  24. struct token_list* global_symbol_list;
  25. struct token_list* global_function_list;
  26. /* Core lists for this file */
  27. struct token_list* function;
  28. /* What we are currently working on */
  29. struct type* current_target;
  30. char* break_target_head;
  31. char* break_target_func;
  32. char* break_target_num;
  33. char* continue_target_head;
  34. struct token_list* break_frame;
  35. int current_count;
  36. int Address_of;
  37. /* Imported functions */
  38. char* int2str(int x, int base, int signed_p);
  39. int strtoint(char *a);
  40. char* parse_string(char* string);
  41. int escape_lookup(char* c);
  42. void require(int bool, char* error);
  43. struct token_list* reverse_list(struct token_list* head);
  44. struct type* mirror_type(struct type* source, char* name);
  45. struct type* add_primitive(struct type* a);
  46. struct token_list* emit(char *s, struct token_list* head)
  47. {
  48. struct token_list* t = calloc(1, sizeof(struct token_list));
  49. require(NULL != t, "Exhausted memory while generating token to emit\n");
  50. t->next = head;
  51. t->s = s;
  52. return t;
  53. }
  54. void emit_out(char* s)
  55. {
  56. output_list = emit(s, output_list);
  57. }
  58. struct token_list* uniqueID(char* s, struct token_list* l, char* num)
  59. {
  60. l = emit("\n", emit(num, emit("_", emit(s, l))));
  61. return l;
  62. }
  63. void uniqueID_out(char* s, char* num)
  64. {
  65. output_list = uniqueID(s, output_list, num);
  66. }
  67. struct token_list* sym_declare(char *s, struct type* t, struct token_list* list)
  68. {
  69. struct token_list* a = calloc(1, sizeof(struct token_list));
  70. require(NULL != a, "Exhausted memory while attempting to declare a symbol\n");
  71. a->next = list;
  72. a->s = s;
  73. a->type = t;
  74. return a;
  75. }
  76. struct token_list* sym_lookup(char *s, struct token_list* symbol_list)
  77. {
  78. struct token_list* i;
  79. for(i = symbol_list; NULL != i; i = i->next)
  80. {
  81. if(match(i->s, s)) return i;
  82. }
  83. return NULL;
  84. }
  85. void line_error_token(struct token_list *token)
  86. {
  87. if(NULL == token)
  88. {
  89. fputs("EOF reached inside of line_error\n", stderr);
  90. fputs("problem at end of file\n", stderr);
  91. return;
  92. }
  93. fputs(token->filename, stderr);
  94. fputs(":", stderr);
  95. fputs(int2str(token->linenumber, 10, TRUE), stderr);
  96. fputs(":", stderr);
  97. }
  98. void line_error(void)
  99. {
  100. line_error_token(global_token);
  101. }
  102. void require_match(char* message, char* required)
  103. {
  104. if(NULL == global_token)
  105. {
  106. line_error();
  107. fputs("EOF reached inside of require match\n", stderr);
  108. fputs("problem at end of file\n", stderr);
  109. fputs(message, stderr);
  110. exit(EXIT_FAILURE);
  111. }
  112. if(!match(global_token->s, required))
  113. {
  114. line_error();
  115. fputs(message, stderr);
  116. exit(EXIT_FAILURE);
  117. }
  118. global_token = global_token->next;
  119. }
  120. void maybe_bootstrap_error(char* feature)
  121. {
  122. if (BOOTSTRAP_MODE)
  123. {
  124. line_error();
  125. fputs(feature, stderr);
  126. fputs(" is not supported in --bootstrap-mode\n", stderr);
  127. exit(EXIT_FAILURE);
  128. }
  129. }
  130. void expression(void);
  131. void function_call(char* s, int bool)
  132. {
  133. require_match("ERROR in process_expression_list\nNo ( was found\n", "(");
  134. require(NULL != global_token, "Improper function call\n");
  135. int passed = 0;
  136. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  137. {
  138. emit_out("PUSHR R13 R15\t# Prevent overwriting in recursion\n");
  139. emit_out("PUSHR R14 R15\t# Protect the old base pointer\n");
  140. emit_out("COPY R13 R15\t# Copy new base pointer\n");
  141. }
  142. else if(X86 == Architecture)
  143. {
  144. emit_out("push_edi\t# Prevent overwriting in recursion\n");
  145. emit_out("push_ebp\t# Protect the old base pointer\n");
  146. emit_out("mov_edi,esp\t# Copy new base pointer\n");
  147. }
  148. else if(AMD64 == Architecture)
  149. {
  150. emit_out("push_rdi\t# Prevent overwriting in recursion\n");
  151. emit_out("push_rbp\t# Protect the old base pointer\n");
  152. emit_out("mov_rdi,rsp\t# Copy new base pointer\n");
  153. }
  154. else if(ARMV7L == Architecture)
  155. {
  156. emit_out("{R11} PUSH_ALWAYS\t# Prevent overwriting in recursion\n");
  157. emit_out("{BP} PUSH_ALWAYS\t# Protect the old base pointer\n");
  158. emit_out("'0' SP R11 NO_SHIFT MOVE_ALWAYS\t# Copy new base pointer\n");
  159. }
  160. else if(AARCH64 == Architecture)
  161. {
  162. emit_out("PUSH_X16\t# Protect a tmp register we're going to use\n");
  163. emit_out("PUSH_LR\t# Protect the old return pointer (link)\n");
  164. emit_out("PUSH_BP\t# Protect the old base pointer\n");
  165. emit_out("SET_X16_FROM_SP\t# The base pointer to-be\n");
  166. }
  167. else if(RISCV32 == Architecture)
  168. {
  169. emit_out("rd_sp rs1_sp !-12 addi\t# Allocate stack\n");
  170. emit_out("rs1_sp rs2_ra @4 sw\t# Protect the old return pointer\n");
  171. emit_out("rs1_sp rs2_fp sw\t# Protect the old frame pointer\n");
  172. emit_out("rs1_sp rs2_tp @8 sw\t# Protect temp register we are going to use\n");
  173. emit_out("rd_tp rs1_sp mv\t# The base pointer to-be\n");
  174. }
  175. else if(RISCV64 == Architecture)
  176. {
  177. emit_out("rd_sp rs1_sp !-24 addi\t# Allocate stack\n");
  178. emit_out("rs1_sp rs2_ra @8 sd\t# Protect the old return pointer\n");
  179. emit_out("rs1_sp rs2_fp sd\t# Protect the old frame pointer\n");
  180. emit_out("rs1_sp rs2_tp @16 sd\t# Protect temp register we are going to use\n");
  181. emit_out("rd_tp rs1_sp mv\t# The base pointer to-be\n");
  182. }
  183. if(global_token->s[0] != ')')
  184. {
  185. expression();
  186. require(NULL != global_token, "incomplete function call, received EOF instead of )\n");
  187. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("PUSHR R0 R15\t#_process_expression1\n");
  188. else if(X86 == Architecture) emit_out("push_eax\t#_process_expression1\n");
  189. else if(AMD64 == Architecture) emit_out("push_rax\t#_process_expression1\n");
  190. else if(ARMV7L == Architecture) emit_out("{R0} PUSH_ALWAYS\t#_process_expression1\n");
  191. else if(AARCH64 == Architecture) emit_out("PUSH_X0\t#_process_expression1\n");
  192. else if(RISCV32 == Architecture) emit_out("rd_sp rs1_sp !-4 addi\nrs1_sp rs2_a0 sw\t#_process_expression1\n");
  193. else if(RISCV64 == Architecture) emit_out("rd_sp rs1_sp !-8 addi\nrs1_sp rs2_a0 sd\t#_process_expression1\n");
  194. passed = 1;
  195. while(global_token->s[0] == ',')
  196. {
  197. global_token = global_token->next;
  198. require(NULL != global_token, "incomplete function call, received EOF instead of argument\n");
  199. expression();
  200. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("PUSHR R0 R15\t#_process_expression2\n");
  201. else if(X86 == Architecture) emit_out("push_eax\t#_process_expression2\n");
  202. else if(AMD64 == Architecture) emit_out("push_rax\t#_process_expression2\n");
  203. else if(ARMV7L == Architecture) emit_out("{R0} PUSH_ALWAYS\t#_process_expression2\n");
  204. else if(AARCH64 == Architecture) emit_out("PUSH_X0\t#_process_expression2\n");
  205. else if(RISCV32 == Architecture) emit_out("rd_sp rs1_sp !-4 addi\nrs1_sp rs2_a0 sw\t#_process_expression2\n");
  206. else if(RISCV64 == Architecture) emit_out("rd_sp rs1_sp !-8 addi\nrs1_sp rs2_a0 sd\t#_process_expression2\n");
  207. passed = passed + 1;
  208. }
  209. }
  210. require_match("ERROR in process_expression_list\nNo ) was found\n", ")");
  211. if(TRUE == bool)
  212. {
  213. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  214. {
  215. emit_out("LOAD R0 R14 ");
  216. emit_out(s);
  217. emit_out("\nMOVE R14 R13\n");
  218. emit_out("CALL R0 R15\n");
  219. }
  220. else if(X86 == Architecture)
  221. {
  222. emit_out("lea_eax,[ebp+DWORD] %");
  223. emit_out(s);
  224. emit_out("\nmov_eax,[eax]\n");
  225. emit_out("mov_ebp,edi\n");
  226. emit_out("call_eax\n");
  227. }
  228. else if(AMD64 == Architecture)
  229. {
  230. emit_out("lea_rax,[rbp+DWORD] %");
  231. emit_out(s);
  232. emit_out("\nmov_rax,[rax]\n");
  233. emit_out("mov_rbp,rdi\n");
  234. emit_out("call_rax\n");
  235. }
  236. else if(ARMV7L == Architecture)
  237. {
  238. emit_out("!");
  239. emit_out(s);
  240. emit_out(" R0 SUB BP ARITH_ALWAYS\n");
  241. emit_out("!0 R0 LOAD32 R0 MEMORY\n");
  242. emit_out("{LR} PUSH_ALWAYS\t# Protect the old link register\n");
  243. emit_out("'0' R11 BP NO_SHIFT MOVE_ALWAYS\n");
  244. emit_out("'3' R0 CALL_REG_ALWAYS\n");
  245. emit_out("{LR} POP_ALWAYS\t# Prevent overwrite\n");
  246. }
  247. else if(AARCH64 == Architecture)
  248. {
  249. emit_out("SET_X0_FROM_BP\n");
  250. emit_out("LOAD_W1_AHEAD\nSKIP_32_DATA\n%");
  251. emit_out(s);
  252. emit_out("\nSUB_X0_X0_X1\n");
  253. emit_out("DEREF_X0\n");
  254. emit_out("SET_BP_FROM_X16\n");
  255. emit_out("SET_X16_FROM_X0\n");
  256. emit_out("BLR_X16\n");
  257. }
  258. else if(RISCV32 == Architecture)
  259. {
  260. emit_out("rd_a0 rs1_fp !");
  261. emit_out(s);
  262. emit_out(" addi\n");
  263. emit_out("rd_a0 rs1_a0 lw\n");
  264. emit_out("rd_fp rs1_tp mv\n");
  265. emit_out("rd_ra rs1_a0 jalr\n");
  266. }
  267. else if(RISCV64 == Architecture)
  268. {
  269. emit_out("rd_a0 rs1_fp !");
  270. emit_out(s);
  271. emit_out(" addi\n");
  272. emit_out("rd_a0 rs1_a0 ld\n");
  273. emit_out("rd_fp rs1_tp mv\n");
  274. emit_out("rd_ra rs1_a0 jalr\n");
  275. }
  276. }
  277. else
  278. {
  279. if((KNIGHT_NATIVE == Architecture) || (KNIGHT_POSIX == Architecture))
  280. {
  281. emit_out("MOVE R14 R13\n");
  282. emit_out("LOADR R0 4\nJUMP 4\n&FUNCTION_");
  283. emit_out(s);
  284. emit_out("\nCALL R0 R15\n");
  285. }
  286. else if(X86 == Architecture)
  287. {
  288. emit_out("mov_ebp,edi\n");
  289. emit_out("call %FUNCTION_");
  290. emit_out(s);
  291. emit_out("\n");
  292. }
  293. else if(AMD64 == Architecture)
  294. {
  295. emit_out("mov_rbp,rdi\n");
  296. emit_out("call %FUNCTION_");
  297. emit_out(s);
  298. emit_out("\n");
  299. }
  300. else if(ARMV7L == Architecture)
  301. {
  302. emit_out("{LR} PUSH_ALWAYS\t# Protect the old link register\n");
  303. emit_out("'0' R11 BP NO_SHIFT MOVE_ALWAYS\n");
  304. emit_out("^~FUNCTION_");
  305. emit_out(s);
  306. emit_out(" CALL_ALWAYS\n");
  307. emit_out("{LR} POP_ALWAYS\t# Restore the old link register\n");
  308. }
  309. else if(AARCH64 == Architecture)
  310. {
  311. emit_out("SET_BP_FROM_X16\n");
  312. emit_out("LOAD_W16_AHEAD\nSKIP_32_DATA\n&FUNCTION_");
  313. emit_out(s);
  314. emit_out("\n");
  315. emit_out("BLR_X16\n");
  316. }
  317. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  318. {
  319. emit_out("rd_fp rs1_tp mv\n");
  320. emit_out("rd_ra $FUNCTION_");
  321. emit_out(s);
  322. emit_out(" jal\n");
  323. }
  324. }
  325. for(; passed > 0; passed = passed - 1)
  326. {
  327. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("POPR R1 R15\t# _process_expression_locals\n");
  328. else if(X86 == Architecture) emit_out("pop_ebx\t# _process_expression_locals\n");
  329. else if(AMD64 == Architecture) emit_out("pop_rbx\t# _process_expression_locals\n");
  330. else if(ARMV7L == Architecture) emit_out("{R1} POP_ALWAYS\t# _process_expression_locals\n");
  331. else if(AARCH64 == Architecture) emit_out("POP_X1\t# _process_expression_locals\n");
  332. else if(RISCV32 == Architecture) emit_out("rd_a1 rs1_sp lw\t# _process_expression_locals\nrd_sp rs1_sp !4 addi\n");
  333. else if(RISCV64 == Architecture) emit_out("rd_a1 rs1_sp ld\t# _process_expression_locals\nrd_sp rs1_sp !8 addi\n");
  334. }
  335. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  336. {
  337. emit_out("POPR R14 R15\t# Restore old base pointer\n");
  338. emit_out("POPR R13 R15\t# Prevent overwrite\n");
  339. }
  340. else if(X86 == Architecture)
  341. {
  342. emit_out("pop_ebp\t# Restore old base pointer\n");
  343. emit_out("pop_edi\t# Prevent overwrite\n");
  344. }
  345. else if(AMD64 == Architecture)
  346. {
  347. emit_out("pop_rbp\t# Restore old base pointer\n");
  348. emit_out("pop_rdi\t# Prevent overwrite\n");
  349. }
  350. else if(ARMV7L == Architecture)
  351. {
  352. emit_out("{BP} POP_ALWAYS\t# Restore old base pointer\n");
  353. emit_out("{R11} POP_ALWAYS\t# Prevent overwrite\n");
  354. }
  355. else if(AARCH64 == Architecture)
  356. {
  357. emit_out("POP_BP\t# Restore the old base pointer\n");
  358. emit_out("POP_LR\t# Restore the old return pointer (link)\n");
  359. emit_out("POP_X16\t# Restore a register we used as tmp\n");
  360. }
  361. else if(RISCV32 == Architecture)
  362. {
  363. emit_out("rd_fp rs1_sp lw\t# Restore old frame pointer\n");
  364. emit_out("rd_tp rs1_sp !8 lw\t# Restore temp register\n");
  365. emit_out("rd_ra rs1_sp !4 lw\t# Restore return address\n");
  366. emit_out("rd_sp rs1_sp !12 addi\t# Deallocate stack\n");
  367. }
  368. else if(RISCV64 == Architecture)
  369. {
  370. emit_out("rd_fp rs1_sp ld\t# Restore old frame pointer\n");
  371. emit_out("rd_tp rs1_sp !16 ld\t# Restore temp register\n");
  372. emit_out("rd_ra rs1_sp !8 ld\t# Restore return address\n");
  373. emit_out("rd_sp rs1_sp !24 addi\t# Deallocate stack\n");
  374. }
  375. }
  376. void constant_load(char* s)
  377. {
  378. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("LOADI R0 ");
  379. else if(X86 == Architecture) emit_out("mov_eax, %");
  380. else if(AMD64 == Architecture) emit_out("mov_rax, %");
  381. else if(ARMV7L == Architecture) emit_out("!0 R0 LOAD32 R15 MEMORY\n~0 JUMP_ALWAYS\n%");
  382. else if(AARCH64 == Architecture) emit_out("LOAD_W0_AHEAD\nSKIP_32_DATA\n%");
  383. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  384. {
  385. emit_out("rd_a0 ~");
  386. emit_out(s);
  387. emit_out(" lui\nrd_a0 rs1_a0 !");
  388. }
  389. emit_out(s);
  390. if(RISCV32 == Architecture) emit_out(" addi\n");
  391. else if(RISCV64 == Architecture) emit_out(" addiw\n");
  392. emit_out("\n");
  393. }
  394. char* load_value_signed(unsigned size)
  395. {
  396. if(size == 1)
  397. {
  398. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) return "LOAD8 R0 R0 0\n";
  399. else if(X86 == Architecture) return "movsx_eax,BYTE_PTR_[eax]\n";
  400. else if(AMD64 == Architecture) return "movsx_rax,BYTE_PTR_[rax]\n";
  401. else if(ARMV7L == Architecture) return "LOADS8 R0 LOAD R0 HALF_MEMORY\n";
  402. else if(AARCH64 == Architecture) return "LDRSB_X0_[X0]\n";
  403. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) return "rd_a0 rs1_a0 lb\n";
  404. }
  405. else if(size == 2)
  406. {
  407. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) return "LOAD16 R0 R0 0\n";
  408. else if(X86 == Architecture) return "movsx_eax,WORD_PTR_[eax]\n";
  409. else if(AMD64 == Architecture) return "movsx_rax,WORD_PTR_[rax]\n";
  410. else if(ARMV7L == Architecture) return "LOADS16 R0 LOAD R0 HALF_MEMORY\n";
  411. else if(AARCH64 == Architecture) return "LDRSH_X0_[X0]\n";
  412. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) return "rd_a0 rs1_a0 lh\n";
  413. }
  414. else if(size == 4)
  415. {
  416. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) return "LOAD R0 R0 0\n";
  417. else if(X86 == Architecture) return "mov_eax,[eax]\n";
  418. else if(AMD64 == Architecture) return "movsx_rax,DWORD_PTR_[rax]\n";
  419. else if(ARMV7L == Architecture) return "!0 R0 LOAD32 R0 MEMORY\n";
  420. else if(AARCH64 == Architecture) return "LDR_W0_[X0]\n";
  421. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) return "rd_a0 rs1_a0 lw\n";
  422. }
  423. else if(size == 8)
  424. {
  425. if(AMD64 == Architecture) return "mov_rax,[rax]\n";
  426. else if(AARCH64 == Architecture) return "DEREF_X0\n";
  427. else if(RISCV64 == Architecture) return "rd_a0 rs1_a0 ld\n";
  428. }
  429. line_error();
  430. fputs(" Got unsupported size ", stderr);
  431. fputs(int2str(size, 10, TRUE), stderr);
  432. fputs(" when trying to load value.\n", stderr);
  433. exit(EXIT_FAILURE);
  434. }
  435. char* load_value_unsigned(unsigned size)
  436. {
  437. if(size == 1)
  438. {
  439. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) return "LOADU8 R0 R0 0\n";
  440. else if(X86 == Architecture) return "movzx_eax,BYTE_PTR_[eax]\n";
  441. else if(AMD64 == Architecture) return "movzx_rax,BYTE_PTR_[rax]\n";
  442. else if(ARMV7L == Architecture) return "!0 R0 LOAD R0 MEMORY\n";
  443. else if(AARCH64 == Architecture) return "DEREF_X0_BYTE\n";
  444. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) return "rd_a0 rs1_a0 lbu\n";
  445. }
  446. else if(size == 2)
  447. {
  448. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) return "LOADU16 R0 R0 0\n";
  449. else if(X86 == Architecture) return "movzx_eax,WORD_PTR_[eax]\n";
  450. else if(AMD64 == Architecture) return "movzx_rax,WORD_PTR_[rax]\n";
  451. else if(ARMV7L == Architecture) return "NO_OFFSET R0 LOAD R0 HALF_MEMORY\n";
  452. else if(AARCH64 == Architecture) return "LDRH_W0_[X0]\n";
  453. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) return "rd_a0 rs1_a0 lhu\n";
  454. }
  455. else if(size == 4)
  456. {
  457. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) return "LOAD R0 R0 0\n";
  458. else if(X86 == Architecture) return "mov_eax,[eax]\n";
  459. else if(AMD64 == Architecture) return "mov_eax,[rax]\n";
  460. else if(ARMV7L == Architecture) return "!0 R0 LOAD32 R0 MEMORY\n";
  461. else if(AARCH64 == Architecture) return "LDR_W0_[X0]\n";
  462. else if(RISCV32 == Architecture) return "rd_a0 rs1_a0 lw\n";
  463. else if(RISCV64 == Architecture) return "rd_a0 rs1_a0 lwu\n";
  464. }
  465. else if(size == 8)
  466. {
  467. if(AMD64 == Architecture) return "mov_rax,[rax]\n";
  468. else if(AARCH64 == Architecture) return "DEREF_X0\n";
  469. else if(RISCV64 == Architecture) return "rd_a0 rs1_a0 ld\n";
  470. }
  471. line_error();
  472. fputs(" Got unsupported size ", stderr);
  473. fputs(int2str(size, 10, TRUE), stderr);
  474. fputs(" when trying to load value.\n", stderr);
  475. exit(EXIT_FAILURE);
  476. }
  477. char* load_value(unsigned size, int is_signed)
  478. {
  479. if(is_signed) return load_value_signed(size);
  480. return load_value_unsigned(size);
  481. }
  482. char* store_value(unsigned size)
  483. {
  484. if(size == 1)
  485. {
  486. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) return "STORE8 R0 R1 0\n";
  487. else if(X86 == Architecture) return "mov_[ebx],al\n";
  488. else if(AMD64 == Architecture) return "mov_[rbx],al\n";
  489. else if(ARMV7L == Architecture) return "!0 R0 STORE8 R1 MEMORY\n";
  490. else if(AARCH64 == Architecture) return "STR_BYTE_W0_[X1]\n";
  491. else if(RISCV32 == Architecture) return "rs1_a1 rs2_a0 sb\n";
  492. else if(RISCV64 == Architecture) return "rs1_a1 rs2_a0 sb\n";
  493. }
  494. else if(size == 2)
  495. {
  496. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) return "STORE16 R0 R1 0\n";
  497. else if(X86 == Architecture) return "mov_[ebx],ax\n";
  498. else if(AMD64 == Architecture) return "mov_[rbx],ax\n";
  499. else if(ARMV7L == Architecture) return "NO_OFFSET R0 STORE16 R1 HALF_MEMORY\n";
  500. else if(AARCH64 == Architecture) return "STRH_W0_[X1]\n";
  501. else if(RISCV32 == Architecture || RISCV64 == Architecture) return "rs1_a1 rs2_a0 sh\n";
  502. }
  503. else if(size == 4)
  504. {
  505. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) return "STORE R0 R1 0\n";
  506. else if(X86 == Architecture) return "mov_[ebx],eax\n";
  507. else if(AMD64 == Architecture) return "mov_[rbx],eax\n";
  508. else if(ARMV7L == Architecture) return "!0 R0 STORE32 R1 MEMORY\n";
  509. else if(AARCH64 == Architecture) return "STR_W0_[X1]\n";
  510. else if(RISCV32 == Architecture || RISCV64 == Architecture) return "rs1_a1 rs2_a0 sw\n";
  511. }
  512. else if(size == 8)
  513. {
  514. if(AMD64 == Architecture) return "mov_[rbx],rax\n";
  515. else if(AARCH64 == Architecture) return "STR_X0_[X1]\n";
  516. else if(RISCV64 == Architecture) return "rs1_a1 rs2_a0 sd\n";
  517. }
  518. /* Should not happen but print error message. */
  519. fputs("Got unsupported size ", stderr);
  520. fputs(int2str(size, 10, TRUE), stderr);
  521. fputs(" when storing number in register.\n", stderr);
  522. line_error();
  523. exit(EXIT_FAILURE);
  524. }
  525. int is_compound_assignment(char* token)
  526. {
  527. if(match("+=", token)) return TRUE;
  528. else if(match("-=", token)) return TRUE;
  529. else if(match("*=", token)) return TRUE;
  530. else if(match("/=", token)) return TRUE;
  531. else if(match("%=", token)) return TRUE;
  532. else if(match("<<=", token)) return TRUE;
  533. else if(match(">>=", token)) return TRUE;
  534. else if(match("&=", token)) return TRUE;
  535. else if(match("^=", token)) return TRUE;
  536. else if(match("|=", token)) return TRUE;
  537. return FALSE;
  538. }
  539. void postfix_expr_stub(void);
  540. void variable_load(struct token_list* a, int num_dereference)
  541. {
  542. require(NULL != global_token, "incomplete variable load received\n");
  543. if((match("FUNCTION", a->type->name) || match("FUNCTION*", a->type->name)) && match("(", global_token->s))
  544. {
  545. function_call(int2str(a->depth, 10, TRUE), TRUE);
  546. return;
  547. }
  548. current_target = a->type;
  549. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("ADDI R0 R14 ");
  550. else if(X86 == Architecture) emit_out("lea_eax,[ebp+DWORD] %");
  551. else if(AMD64 == Architecture) emit_out("lea_rax,[rbp+DWORD] %");
  552. else if(ARMV7L == Architecture) emit_out("!");
  553. else if(AARCH64 == Architecture) emit_out("SET_X0_FROM_BP\nLOAD_W1_AHEAD\nSKIP_32_DATA\n%");
  554. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 rs1_fp !");
  555. emit_out(int2str(a->depth, 10, TRUE));
  556. if(ARMV7L == Architecture) emit_out(" R0 SUB BP ARITH_ALWAYS");
  557. else if(AARCH64 == Architecture) emit_out("\nSUB_X0_X0_X1\n");
  558. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out(" addi");
  559. emit_out("\n");
  560. if(TRUE == Address_of) return;
  561. if(match(".", global_token->s))
  562. {
  563. postfix_expr_stub();
  564. return;
  565. }
  566. if(!match("=", global_token->s) && !is_compound_assignment(global_token->s))
  567. {
  568. emit_out(load_value(current_target->size, current_target->is_signed));
  569. }
  570. while (num_dereference > 0)
  571. {
  572. current_target = current_target->type;
  573. emit_out(load_value(current_target->size, current_target->is_signed));
  574. num_dereference = num_dereference - 1;
  575. }
  576. }
  577. void function_load(struct token_list* a)
  578. {
  579. require(NULL != global_token, "incomplete function load\n");
  580. if(match("(", global_token->s))
  581. {
  582. function_call(a->s, FALSE);
  583. return;
  584. }
  585. if((KNIGHT_NATIVE == Architecture) || (KNIGHT_POSIX == Architecture)) emit_out("LOADR R0 4\nJUMP 4\n&FUNCTION_");
  586. else if(X86 == Architecture) emit_out("mov_eax, &FUNCTION_");
  587. else if(AMD64 == Architecture) emit_out("lea_rax,[rip+DWORD] %FUNCTION_");
  588. else if(ARMV7L == Architecture) emit_out("!0 R0 LOAD32 R15 MEMORY\n~0 JUMP_ALWAYS\n&FUNCTION_");
  589. else if(AARCH64 == Architecture) emit_out("LOAD_W0_AHEAD\nSKIP_32_DATA\n&FUNCTION_");
  590. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 ~FUNCTION_");
  591. emit_out(a->s);
  592. if(RISCV32 == Architecture)
  593. {
  594. emit_out(" auipc\n");
  595. emit_out("rd_a0 rs1_a0 !FUNCTION_");
  596. emit_out(a->s);
  597. emit_out(" addi");
  598. }
  599. else if(RISCV64 == Architecture)
  600. {
  601. emit_out(" auipc\n");
  602. emit_out("rd_a0 rs1_a0 !FUNCTION_");
  603. emit_out(a->s);
  604. emit_out(" addiw");
  605. }
  606. emit_out("\n");
  607. }
  608. void global_load(struct token_list* a)
  609. {
  610. current_target = a->type;
  611. if((KNIGHT_NATIVE == Architecture) || (KNIGHT_POSIX == Architecture)) emit_out("LOADR R0 4\nJUMP 4\n&GLOBAL_");
  612. else if(X86 == Architecture) emit_out("mov_eax, &GLOBAL_");
  613. else if(AMD64 == Architecture) emit_out("lea_rax,[rip+DWORD] %GLOBAL_");
  614. else if(ARMV7L == Architecture) emit_out("!0 R0 LOAD32 R15 MEMORY\n~0 JUMP_ALWAYS\n&GLOBAL_");
  615. else if(AARCH64 == Architecture) emit_out("LOAD_W0_AHEAD\nSKIP_32_DATA\n&GLOBAL_");
  616. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 ~GLOBAL_");
  617. emit_out(a->s);
  618. if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  619. {
  620. emit_out(" auipc\n");
  621. emit_out("rd_a0 rs1_a0 !GLOBAL_");
  622. emit_out(a->s);
  623. emit_out(" addi");
  624. }
  625. emit_out("\n");
  626. require(NULL != global_token, "unterminated global load\n");
  627. if(TRUE == Address_of) return;
  628. if(match(".", global_token->s))
  629. {
  630. postfix_expr_stub();
  631. return;
  632. }
  633. if(match("=", global_token->s) || is_compound_assignment(global_token->s)) return;
  634. emit_out(load_value(register_size, current_target->is_signed));
  635. }
  636. /*
  637. * primary-expr:
  638. * FAILURE
  639. * "String"
  640. * 'Char'
  641. * [0-9]*
  642. * [a-z,A-Z]*
  643. * ( expression )
  644. */
  645. void primary_expr_failure(void)
  646. {
  647. require(NULL != global_token, "hit EOF when expecting primary expression\n");
  648. line_error();
  649. fputs("Received ", stderr);
  650. fputs(global_token->s, stderr);
  651. fputs(" in primary_expr\n", stderr);
  652. exit(EXIT_FAILURE);
  653. }
  654. void primary_expr_string(void)
  655. {
  656. char* number_string = int2str(current_count, 10, TRUE);
  657. current_count = current_count + 1;
  658. if((KNIGHT_NATIVE == Architecture) || (KNIGHT_POSIX == Architecture)) emit_out("LOADR R0 4\nJUMP 4\n&STRING_");
  659. else if(X86 == Architecture) emit_out("mov_eax, &STRING_");
  660. else if(AMD64 == Architecture) emit_out("lea_rax,[rip+DWORD] %STRING_");
  661. else if(ARMV7L == Architecture) emit_out("!0 R0 LOAD32 R15 MEMORY\n~0 JUMP_ALWAYS\n&STRING_");
  662. else if(AARCH64 == Architecture) emit_out("LOAD_W0_AHEAD\nSKIP_32_DATA\n&STRING_");
  663. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 ~STRING_");
  664. uniqueID_out(function->s, number_string);
  665. if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  666. {
  667. emit_out("auipc\n");
  668. emit_out("rd_a0 rs1_a0 !STRING_");
  669. uniqueID_out(function->s, number_string);
  670. emit_out("addi\n");
  671. }
  672. /* The target */
  673. strings_list = emit(":STRING_", strings_list);
  674. strings_list = uniqueID(function->s, strings_list, number_string);
  675. /* catch case of just "foo" from segfaulting */
  676. require(NULL != global_token->next, "a string by itself is not valid C\n");
  677. /* Parse the string */
  678. if('"' != global_token->next->s[0])
  679. {
  680. strings_list = emit(parse_string(global_token->s), strings_list);
  681. global_token = global_token->next;
  682. }
  683. else
  684. {
  685. char* s = calloc(MAX_STRING, sizeof(char));
  686. /* prefix leading string */
  687. s[0] = '"';
  688. int i = 1;
  689. int j;
  690. while('"' == global_token->s[0])
  691. {
  692. /* Step past the leading '"' */
  693. j = 1;
  694. /* Copy the rest of the string as is */
  695. while(0 != global_token->s[j])
  696. {
  697. require(i < MAX_STRING, "concat string exceeded max string length\n");
  698. s[i] = global_token->s[j];
  699. i = i + 1;
  700. j = j + 1;
  701. }
  702. /* Move on to the next token */
  703. global_token = global_token->next;
  704. require(NULL != global_token, "multi-string null is not valid C\n");
  705. }
  706. /* Now use it */
  707. strings_list = emit(parse_string(s), strings_list);
  708. }
  709. }
  710. void primary_expr_char(void)
  711. {
  712. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("LOADI R0 ");
  713. else if(X86 == Architecture) emit_out("mov_eax, %");
  714. else if(AMD64 == Architecture) emit_out("mov_rax, %");
  715. else if(ARMV7L == Architecture) emit_out("!");
  716. else if(AARCH64 == Architecture) emit_out("LOAD_W0_AHEAD\nSKIP_32_DATA\n%");
  717. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 !");
  718. emit_out(int2str(escape_lookup(global_token->s + 1), 10, TRUE));
  719. if(ARMV7L == Architecture) emit_out(" R0 LOADI8_ALWAYS");
  720. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out(" addi");
  721. emit_out("\n");
  722. global_token = global_token->next;
  723. }
  724. int hex2char(int c)
  725. {
  726. if((c >= 0) && (c <= 9)) return (c + 48);
  727. else if((c >= 10) && (c <= 15)) return (c + 55);
  728. else return -1;
  729. }
  730. char* number_to_hex(int a, int bytes)
  731. {
  732. require(bytes > 0, "number to hex must have a positive number of bytes greater than zero\n");
  733. char* result = calloc(1 + (bytes << 1), sizeof(char));
  734. if(NULL == result)
  735. {
  736. fputs("calloc failed in number_to_hex\n", stderr);
  737. exit(EXIT_FAILURE);
  738. }
  739. int i = 0;
  740. int divisor = (bytes << 3);
  741. require(divisor > 0, "unexpected wrap around in number_to_hex\n");
  742. /* Simply collect numbers until divisor is gone */
  743. while(0 != divisor)
  744. {
  745. divisor = divisor - 4;
  746. result[i] = hex2char((a >> divisor) & 0xF);
  747. i = i + 1;
  748. }
  749. return result;
  750. }
  751. void primary_expr_number(char* s)
  752. {
  753. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  754. {
  755. int size = strtoint(s);
  756. if((32767 > size) && (size > -32768))
  757. {
  758. emit_out("LOADI R0 ");
  759. emit_out(s);
  760. }
  761. else
  762. {
  763. emit_out("LOADR R0 4\nJUMP 4\n'");
  764. emit_out(number_to_hex(size, register_size));
  765. emit_out("'");
  766. }
  767. }
  768. else if(X86 == Architecture)
  769. {
  770. emit_out("mov_eax, %");
  771. emit_out(s);
  772. }
  773. else if(AMD64 == Architecture)
  774. {
  775. emit_out("mov_rax, %");
  776. emit_out(s);
  777. }
  778. else if(ARMV7L == Architecture)
  779. {
  780. emit_out("!0 R0 LOAD32 R15 MEMORY\n~0 JUMP_ALWAYS\n%");
  781. emit_out(s);
  782. }
  783. else if(AARCH64 == Architecture)
  784. {
  785. emit_out("LOAD_W0_AHEAD\nSKIP_32_DATA\n%");
  786. emit_out(s);
  787. }
  788. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  789. {
  790. int size = strtoint(s);
  791. if((2047 > size) && (size > -2048))
  792. {
  793. emit_out("rd_a0 !");
  794. emit_out(s);
  795. emit_out(" addi");
  796. }
  797. else if (0 == (size >> 30))
  798. {
  799. emit_out("rd_a0 ~");
  800. emit_out(s);
  801. emit_out(" lui\n");
  802. emit_out("rd_a0 rs1_a0 !");
  803. emit_out(s);
  804. emit_out(" addi");
  805. }
  806. else
  807. {
  808. int high = size >> 30;
  809. int low = ((size >> 30) << 30) ^ size;
  810. emit_out("rd_a0 ~");
  811. emit_out(int2str(high, 10, TRUE));
  812. emit_out(" lui\n");
  813. emit_out("rd_a0 rs1_a0 !");
  814. emit_out(int2str(high, 10, TRUE));
  815. emit_out(" addi\n");
  816. emit_out("rd_a0 rs1_a0 rs2_x30 slli\n");
  817. emit_out("rd_t1 ~");
  818. emit_out(int2str(low, 10, TRUE));
  819. emit_out(" lui\n");
  820. emit_out("rd_t1 rs1_t1 !");
  821. emit_out(int2str(low, 10, TRUE));
  822. emit_out(" addi\n");
  823. emit_out("rd_a0 rs1_a0 rs2_t1 or\n");
  824. }
  825. }
  826. emit_out("\n");
  827. }
  828. void primary_expr_variable(void)
  829. {
  830. int num_dereference = 0;
  831. while(global_token->s[0] == '*') {
  832. global_token = global_token->next;
  833. require(NULL != global_token, "Walked off the end of a variable dereference\n");
  834. num_dereference = num_dereference + 1;
  835. }
  836. char* s = global_token->s;
  837. global_token = global_token->next;
  838. struct token_list* a = sym_lookup(s, global_constant_list);
  839. if(NULL != a)
  840. {
  841. constant_load(a->arguments->s);
  842. return;
  843. }
  844. a = sym_lookup(s, function->locals);
  845. if(NULL != a)
  846. {
  847. variable_load(a, num_dereference);
  848. return;
  849. }
  850. a = sym_lookup(s, function->arguments);
  851. if(NULL != a)
  852. {
  853. variable_load(a, num_dereference);
  854. return;
  855. }
  856. a = sym_lookup(s, global_function_list);
  857. if(NULL != a)
  858. {
  859. function_load(a);
  860. return;
  861. }
  862. a = sym_lookup(s, global_symbol_list);
  863. if(NULL != a)
  864. {
  865. global_load(a);
  866. return;
  867. }
  868. line_error();
  869. fputs(s ,stderr);
  870. fputs(" is not a defined symbol\n", stderr);
  871. exit(EXIT_FAILURE);
  872. }
  873. void primary_expr(void);
  874. struct type* promote_type(struct type* a, struct type* b)
  875. {
  876. require(NULL != b, "impossible case 1 in promote_type\n");
  877. require(NULL != a, "impossible case 2 in promote_type\n");
  878. if(a == b) return a;
  879. struct type* i;
  880. for(i = global_types; NULL != i; i = i->next)
  881. {
  882. if(a->name == i->name) break;
  883. if(b->name == i->name) break;
  884. if(a->name == i->indirect->name) break;
  885. if(b->name == i->indirect->name) break;
  886. if(a->name == i->indirect->indirect->name) break;
  887. if(b->name == i->indirect->indirect->name) break;
  888. }
  889. require(NULL != i, "impossible case 3 in promote_type\n");
  890. return i;
  891. }
  892. void common_recursion(FUNCTION f)
  893. {
  894. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("PUSHR R0 R15\t#_common_recursion\n");
  895. else if(X86 == Architecture) emit_out("push_eax\t#_common_recursion\n");
  896. else if(AMD64 == Architecture) emit_out("push_rax\t#_common_recursion\n");
  897. else if(ARMV7L == Architecture) emit_out("{R0} PUSH_ALWAYS\t#_common_recursion\n");
  898. else if(AARCH64 == Architecture) emit_out("PUSH_X0\t#_common_recursion\n");
  899. else if(RISCV32 == Architecture) emit_out("rd_sp rs1_sp !-4 addi\t# _common_recursion\nrs1_sp rs2_a0 sw\n");
  900. else if(RISCV64 == Architecture) emit_out("rd_sp rs1_sp !-8 addi\t# _common_recursion\nrs1_sp rs2_a0 sd\n");
  901. struct type* last_type = current_target;
  902. global_token = global_token->next;
  903. require(NULL != global_token, "Received EOF in common_recursion\n");
  904. f();
  905. current_target = promote_type(current_target, last_type);
  906. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("POPR R1 R15\t# _common_recursion\n");
  907. else if(X86 == Architecture) emit_out("pop_ebx\t# _common_recursion\n");
  908. else if(AMD64 == Architecture) emit_out("pop_rbx\t# _common_recursion\n");
  909. else if(ARMV7L == Architecture) emit_out("{R1} POP_ALWAYS\t# _common_recursion\n");
  910. else if(AARCH64 == Architecture) emit_out("POP_X1\t# _common_recursion\n");
  911. else if(RISCV32 == Architecture) emit_out("rd_a1 rs1_sp lw\nrd_sp rs1_sp !4 addi\t# _common_recursion\n");
  912. else if(RISCV64 == Architecture) emit_out("rd_a1 rs1_sp ld\nrd_sp rs1_sp !8 addi\t# _common_recursion\n");
  913. }
  914. void general_recursion(FUNCTION f, char* s, char* name, FUNCTION iterate)
  915. {
  916. require(NULL != global_token, "Received EOF in general_recursion\n");
  917. if(match(name, global_token->s))
  918. {
  919. common_recursion(f);
  920. emit_out(s);
  921. iterate();
  922. }
  923. }
  924. void arithmetic_recursion(FUNCTION f, char* s1, char* s2, char* name, FUNCTION iterate)
  925. {
  926. require(NULL != global_token, "Received EOF in arithmetic_recursion\n");
  927. if(match(name, global_token->s))
  928. {
  929. common_recursion(f);
  930. if(NULL == current_target)
  931. {
  932. emit_out(s1);
  933. }
  934. else if(current_target->is_signed)
  935. {
  936. emit_out(s1);
  937. }
  938. else
  939. {
  940. emit_out(s2);
  941. }
  942. iterate();
  943. }
  944. }
  945. /*
  946. * postfix-expr:
  947. * primary-expr
  948. * postfix-expr [ expression ]
  949. * postfix-expr ( expression-list-opt )
  950. * postfix-expr -> member
  951. * postfix-expr . member
  952. */
  953. struct type* lookup_member(struct type* parent, char* name);
  954. void postfix_expr_arrow(void)
  955. {
  956. emit_out("# looking up offset\n");
  957. global_token = global_token->next;
  958. require(NULL != global_token, "naked -> not allowed\n");
  959. struct type* i = lookup_member(current_target, global_token->s);
  960. current_target = i->type;
  961. global_token = global_token->next;
  962. require(NULL != global_token, "Unterminated -> expression not allowed\n");
  963. if(0 != i->offset)
  964. {
  965. emit_out("# -> offset calculation\n");
  966. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  967. {
  968. emit_out("ADDUI R0 R0 ");
  969. emit_out(int2str(i->offset, 10, TRUE));
  970. emit_out("\n");
  971. }
  972. else if(X86 == Architecture)
  973. {
  974. emit_out("mov_ebx, %");
  975. emit_out(int2str(i->offset, 10, TRUE));
  976. emit_out("\nadd_eax,ebx\n");
  977. }
  978. else if(AMD64 == Architecture)
  979. {
  980. emit_out("mov_rbx, %");
  981. emit_out(int2str(i->offset, 10, TRUE));
  982. emit_out("\nadd_rax,rbx\n");
  983. }
  984. else if(ARMV7L == Architecture)
  985. {
  986. emit_out("!0 R1 LOAD32 R15 MEMORY\n~0 JUMP_ALWAYS\n%");
  987. emit_out(int2str(i->offset, 10, TRUE));
  988. emit_out("\n'0' R0 R0 ADD R1 ARITH2_ALWAYS\n");
  989. }
  990. else if(AARCH64 == Architecture)
  991. {
  992. emit_out("LOAD_W1_AHEAD\nSKIP_32_DATA\n%");
  993. emit_out(int2str(i->offset, 10, TRUE));
  994. emit_out("\nADD_X0_X1_X0\n");
  995. }
  996. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  997. {
  998. emit_out("rd_a1 !");
  999. emit_out(int2str(i->offset, 10, TRUE));
  1000. emit_out(" addi\n");
  1001. emit_out("rd_a0 rs1_a1 rs2_a0 add\n");
  1002. }
  1003. }
  1004. /* We don't yet support assigning structs to structs */
  1005. if((!match("=", global_token->s) && !is_compound_assignment(global_token->s) && (register_size >= i->size)))
  1006. {
  1007. emit_out(load_value(i->size, i->is_signed));
  1008. }
  1009. }
  1010. void postfix_expr_dot(void)
  1011. {
  1012. maybe_bootstrap_error("Member access using .");
  1013. emit_out("# looking up offset\n");
  1014. global_token = global_token->next;
  1015. require(NULL != global_token, "naked . not allowed\n");
  1016. struct type* i = lookup_member(current_target, global_token->s);
  1017. current_target = i->type;
  1018. global_token = global_token->next;
  1019. require(NULL != global_token, "Unterminated . expression not allowed\n");
  1020. if(0 != i->offset)
  1021. {
  1022. emit_out("# . offset calculation\n");
  1023. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1024. {
  1025. emit_out("ADDUI R0 R0 ");
  1026. emit_out(int2str(i->offset, 10, TRUE));
  1027. emit_out("\n");
  1028. }
  1029. else if(X86 == Architecture)
  1030. {
  1031. emit_out("mov_ebx, %");
  1032. emit_out(int2str(i->offset, 10, TRUE));
  1033. emit_out("\nadd_eax,ebx\n");
  1034. }
  1035. else if(AMD64 == Architecture)
  1036. {
  1037. emit_out("mov_rbx, %");
  1038. emit_out(int2str(i->offset, 10, TRUE));
  1039. emit_out("\nadd_rax,rbx\n");
  1040. }
  1041. else if(ARMV7L == Architecture)
  1042. {
  1043. emit_out("!0 R1 LOAD32 R15 MEMORY\n~0 JUMP_ALWAYS\n%");
  1044. emit_out(int2str(i->offset, 10, TRUE));
  1045. emit_out("\n'0' R0 R0 ADD R1 ARITH2_ALWAYS\n");
  1046. }
  1047. else if(AARCH64 == Architecture)
  1048. {
  1049. emit_out("LOAD_W1_AHEAD\nSKIP_32_DATA\n%");
  1050. emit_out(int2str(i->offset, 10, TRUE));
  1051. emit_out("\nADD_X0_X1_X0\n");
  1052. }
  1053. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  1054. {
  1055. emit_out("rd_a1 !");
  1056. emit_out(int2str(i->offset, 10, TRUE));
  1057. emit_out(" addi\n");
  1058. emit_out("rd_a0 rs1_a1 rs2_a0 add\n");
  1059. }
  1060. }
  1061. if(match("=", global_token->s) || is_compound_assignment(global_token->s)) return;
  1062. if(match("[", global_token->s)) return;
  1063. emit_out(load_value(current_target->size, current_target->is_signed));
  1064. }
  1065. void postfix_expr_array(void)
  1066. {
  1067. struct type* array = current_target;
  1068. common_recursion(expression);
  1069. current_target = array;
  1070. require(NULL != current_target, "Arrays only apply to variables\n");
  1071. char* assign = load_value(register_size, current_target->is_signed);
  1072. /* Add support for Ints */
  1073. if(match("char*", current_target->name))
  1074. {
  1075. assign = load_value(1, TRUE);
  1076. }
  1077. else
  1078. {
  1079. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("PUSHR R1 R15\nLOADI R1 ");
  1080. else if(X86 == Architecture) emit_out("push_ebx\nmov_ebx, %");
  1081. else if(AMD64 == Architecture) emit_out("push_rbx\nmov_rbx, %");
  1082. else if(ARMV7L == Architecture) emit_out("{R1} PUSH_ALWAYS\n!0 R1 LOAD32 R15 MEMORY\n~0 JUMP_ALWAYS\n%");
  1083. else if(AARCH64 == Architecture) emit_out("PUSH_X1\nLOAD_W1_AHEAD\nSKIP_32_DATA\n%");
  1084. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a2 rs1_a1 addi\nrd_a1 !");
  1085. emit_out(int2str(current_target->type->size, 10, TRUE));
  1086. if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out(" addi");
  1087. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("\nMULU R0 R1 R0\nPOPR R1 R15\n");
  1088. else if(X86 == Architecture) emit_out("\nmul_ebx\npop_ebx\n");
  1089. else if(AMD64 == Architecture) emit_out("\nmul_rbx\npop_rbx\n");
  1090. else if(ARMV7L == Architecture) emit_out("\n'9' R0 '0' R1 MUL R0 ARITH2_ALWAYS\n{R1} POP_ALWAYS\n");
  1091. else if(AARCH64 == Architecture) emit_out("\nMUL_X0_X1_X0\nPOP_X1\n");
  1092. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("\nrd_a0 rs1_a1 rs2_a0 mul\nrd_a1 rs1_a2 addi\n");
  1093. }
  1094. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("ADD R0 R0 R1\n");
  1095. else if(X86 == Architecture) emit_out("add_eax,ebx\n");
  1096. else if(AMD64 == Architecture) emit_out("add_rax,rbx\n");
  1097. else if(ARMV7L == Architecture) emit_out("'0' R0 R0 ADD R1 ARITH2_ALWAYS\n");
  1098. else if(AARCH64 == Architecture) emit_out("ADD_X0_X1_X0\n");
  1099. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 rs1_a1 rs2_a0 add\n");
  1100. require_match("ERROR in postfix_expr\nMissing ]\n", "]");
  1101. require(NULL != global_token, "truncated array expression\n");
  1102. if(match("=", global_token->s) || is_compound_assignment(global_token->s) || match(".", global_token->s))
  1103. {
  1104. assign = "";
  1105. }
  1106. if(match("[", global_token->s))
  1107. {
  1108. current_target = current_target->type;
  1109. }
  1110. emit_out(assign);
  1111. }
  1112. /*
  1113. * unary-expr:
  1114. * &postfix-expr
  1115. * - postfix-expr
  1116. * !postfix-expr
  1117. * sizeof ( type )
  1118. */
  1119. struct type* type_name(void);
  1120. void unary_expr_sizeof(void)
  1121. {
  1122. global_token = global_token->next;
  1123. require(NULL != global_token, "Received EOF when starting sizeof\n");
  1124. require_match("ERROR in unary_expr\nMissing (\n", "(");
  1125. struct type* a = type_name();
  1126. require_match("ERROR in unary_expr\nMissing )\n", ")");
  1127. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("LOADUI R0 ");
  1128. else if(X86 == Architecture) emit_out("mov_eax, %");
  1129. else if(AMD64 == Architecture) emit_out("mov_rax, %");
  1130. else if(ARMV7L == Architecture) emit_out("!0 R0 LOAD32 R15 MEMORY\n~0 JUMP_ALWAYS\n%");
  1131. else if(AARCH64 == Architecture) emit_out("LOAD_W0_AHEAD\nSKIP_32_DATA\n%");
  1132. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 !");
  1133. emit_out(int2str(a->size, 10, TRUE));
  1134. if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out(" addi");
  1135. emit_out("\n");
  1136. }
  1137. void postfix_expr_stub(void)
  1138. {
  1139. require(NULL != global_token, "Unexpected EOF, improperly terminated primary expression\n");
  1140. if(match("[", global_token->s))
  1141. {
  1142. postfix_expr_array();
  1143. postfix_expr_stub();
  1144. }
  1145. if(match("->", global_token->s))
  1146. {
  1147. postfix_expr_arrow();
  1148. postfix_expr_stub();
  1149. }
  1150. if(match(".", global_token->s))
  1151. {
  1152. postfix_expr_dot();
  1153. postfix_expr_stub();
  1154. }
  1155. }
  1156. void postfix_expr(void)
  1157. {
  1158. primary_expr();
  1159. postfix_expr_stub();
  1160. }
  1161. /*
  1162. * additive-expr:
  1163. * postfix-expr
  1164. * additive-expr * postfix-expr
  1165. * additive-expr / postfix-expr
  1166. * additive-expr % postfix-expr
  1167. * additive-expr + postfix-expr
  1168. * additive-expr - postfix-expr
  1169. * additive-expr << postfix-expr
  1170. * additive-expr >> postfix-expr
  1171. */
  1172. void additive_expr_stub_a(void)
  1173. {
  1174. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1175. {
  1176. arithmetic_recursion(postfix_expr, "MUL R0 R1 R0\n", "MULU R0 R1 R0\n", "*", additive_expr_stub_a);
  1177. arithmetic_recursion(postfix_expr, "DIV R0 R1 R0\n", "DIVU R0 R1 R0\n", "/", additive_expr_stub_a);
  1178. arithmetic_recursion(postfix_expr, "MOD R0 R1 R0\n", "MODU R0 R1 R0\n", "%", additive_expr_stub_a);
  1179. }
  1180. else if(X86 == Architecture)
  1181. {
  1182. arithmetic_recursion(postfix_expr, "imul_ebx\n", "mul_ebx\n", "*", additive_expr_stub_a);
  1183. arithmetic_recursion(postfix_expr, "xchg_ebx,eax\ncdq\nidiv_ebx\n", "xchg_ebx,eax\nmov_edx, %0\ndiv_ebx\n", "/", additive_expr_stub_a);
  1184. arithmetic_recursion(postfix_expr, "xchg_ebx,eax\ncdq\nidiv_ebx\nmov_eax,edx\n", "xchg_ebx,eax\nmov_edx, %0\ndiv_ebx\nmov_eax,edx\n", "%", additive_expr_stub_a);
  1185. }
  1186. else if(AMD64 == Architecture)
  1187. {
  1188. arithmetic_recursion(postfix_expr, "imul_rbx\n", "mul_rbx\n", "*", additive_expr_stub_a);
  1189. arithmetic_recursion(postfix_expr, "xchg_rbx,rax\ncqo\nidiv_rbx\n", "xchg_rbx,rax\nmov_rdx, %0\ndiv_rbx\n", "/", additive_expr_stub_a);
  1190. arithmetic_recursion(postfix_expr, "xchg_rbx,rax\ncqo\nidiv_rbx\nmov_rax,rdx\n", "xchg_rbx,rax\nmov_rdx, %0\ndiv_rbx\nmov_rax,rdx\n", "%", additive_expr_stub_a);
  1191. }
  1192. else if(ARMV7L == Architecture)
  1193. {
  1194. arithmetic_recursion(postfix_expr, "'9' R0 '0' R1 MULS R0 ARITH2_ALWAYS\n", "'9' R0 '0' R1 MUL R0 ARITH2_ALWAYS\n", "*", additive_expr_stub_a);
  1195. arithmetic_recursion(postfix_expr, "{LR} PUSH_ALWAYS\n^~divides CALL_ALWAYS\n{LR} POP_ALWAYS\n", "{LR} PUSH_ALWAYS\n^~divide CALL_ALWAYS\n{LR} POP_ALWAYS\n", "/", additive_expr_stub_a);
  1196. arithmetic_recursion(postfix_expr, "{LR} PUSH_ALWAYS\n^~moduluss CALL_ALWAYS\n{LR} POP_ALWAYS\n", "{LR} PUSH_ALWAYS\n^~modulus CALL_ALWAYS\n{LR} POP_ALWAYS\n", "%", additive_expr_stub_a);
  1197. }
  1198. else if(AARCH64 == Architecture)
  1199. {
  1200. general_recursion(postfix_expr, "MUL_X0_X1_X0\n", "*", additive_expr_stub_a);
  1201. arithmetic_recursion(postfix_expr, "SDIV_X0_X1_X0\n", "UDIV_X0_X1_X0\n", "/", additive_expr_stub_a);
  1202. arithmetic_recursion(postfix_expr, "SDIV_X2_X1_X0\nMSUB_X0_X0_X2_X1\n", "UDIV_X2_X1_X0\nMSUB_X0_X0_X2_X1\n", "%", additive_expr_stub_a);
  1203. }
  1204. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  1205. {
  1206. general_recursion(postfix_expr, "rd_a0 rs1_a1 rs2_a0 mul\n", "*", additive_expr_stub_a);
  1207. arithmetic_recursion(postfix_expr, "rd_a0 rs1_a1 rs2_a0 div\n", "rd_a0 rs1_a1 rs2_a0 divu\n", "/", additive_expr_stub_a);
  1208. arithmetic_recursion(postfix_expr, "rd_a0 rs1_a1 rs2_a0 rem\n", "rd_a0 rs1_a1 rs2_a0 remu\n", "%", additive_expr_stub_a);
  1209. }
  1210. }
  1211. void additive_expr_a(void)
  1212. {
  1213. postfix_expr();
  1214. additive_expr_stub_a();
  1215. }
  1216. void additive_expr_stub_b(void)
  1217. {
  1218. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1219. {
  1220. arithmetic_recursion(additive_expr_a, "ADD R0 R1 R0\n", "ADDU R0 R1 R0\n", "+", additive_expr_stub_b);
  1221. arithmetic_recursion(additive_expr_a, "SUB R0 R1 R0\n", "SUBU R0 R1 R0\n", "-", additive_expr_stub_b);
  1222. }
  1223. else if(X86 == Architecture)
  1224. {
  1225. arithmetic_recursion(additive_expr_a, "add_eax,ebx\n", "add_eax,ebx\n", "+", additive_expr_stub_b);
  1226. arithmetic_recursion(additive_expr_a, "sub_ebx,eax\nmov_eax,ebx\n", "sub_ebx,eax\nmov_eax,ebx\n", "-", additive_expr_stub_b);
  1227. }
  1228. else if(AMD64 == Architecture)
  1229. {
  1230. arithmetic_recursion(additive_expr_a, "add_rax,rbx\n", "add_rax,rbx\n", "+", additive_expr_stub_b);
  1231. arithmetic_recursion(additive_expr_a, "sub_rbx,rax\nmov_rax,rbx\n", "sub_rbx,rax\nmov_rax,rbx\n", "-", additive_expr_stub_b);
  1232. }
  1233. else if(ARMV7L == Architecture)
  1234. {
  1235. arithmetic_recursion(additive_expr_a, "'0' R0 R0 ADD R1 ARITH2_ALWAYS\n", "'0' R0 R0 ADD R1 ARITH2_ALWAYS\n", "+", additive_expr_stub_b);
  1236. arithmetic_recursion(additive_expr_a, "'0' R0 R0 SUB R1 ARITH2_ALWAYS\n", "'0' R0 R0 SUB R1 ARITH2_ALWAYS\n", "-", additive_expr_stub_b);
  1237. }
  1238. else if(AARCH64 == Architecture)
  1239. {
  1240. general_recursion(additive_expr_a, "ADD_X0_X1_X0\n", "+", additive_expr_stub_b);
  1241. general_recursion(additive_expr_a, "SUB_X0_X1_X0\n", "-", additive_expr_stub_b);
  1242. }
  1243. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  1244. {
  1245. general_recursion(additive_expr_a, "rd_a0 rs1_a1 rs2_a0 add\n", "+", additive_expr_stub_b);
  1246. general_recursion(additive_expr_a, "rd_a0 rs1_a1 rs2_a0 sub\n", "-", additive_expr_stub_b);
  1247. }
  1248. }
  1249. void additive_expr_b(void)
  1250. {
  1251. additive_expr_a();
  1252. additive_expr_stub_b();
  1253. }
  1254. void additive_expr_stub_c(void)
  1255. {
  1256. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1257. {
  1258. arithmetic_recursion(additive_expr_b, "SAL R0 R1 R0\n", "SL0 R0 R1 R0\n", "<<", additive_expr_stub_c);
  1259. arithmetic_recursion(additive_expr_b, "SAR R0 R1 R0\n", "SR0 R0 R1 R0\n", ">>", additive_expr_stub_c);
  1260. }
  1261. else if(X86 == Architecture)
  1262. {
  1263. arithmetic_recursion(additive_expr_b, "mov_ecx,eax\nmov_eax,ebx\nsal_eax,cl\n", "mov_ecx,eax\nmov_eax,ebx\nshl_eax,cl\n", "<<", additive_expr_stub_c);
  1264. arithmetic_recursion(additive_expr_b, "mov_ecx,eax\nmov_eax,ebx\nsar_eax,cl\n", "mov_ecx,eax\nmov_eax,ebx\nshr_eax,cl\n", ">>", additive_expr_stub_c);
  1265. }
  1266. else if(AMD64 == Architecture)
  1267. {
  1268. arithmetic_recursion(additive_expr_b, "mov_rcx,rax\nmov_rax,rbx\nsal_rax,cl\n", "mov_rcx,rax\nmov_rax,rbx\nshl_rax,cl\n", "<<", additive_expr_stub_c);
  1269. arithmetic_recursion(additive_expr_b, "mov_rcx,rax\nmov_rax,rbx\nsar_rax,cl\n", "mov_rcx,rax\nmov_rax,rbx\nshr_rax,cl\n", ">>", additive_expr_stub_c);
  1270. }
  1271. else if(ARMV7L == Architecture)
  1272. {
  1273. arithmetic_recursion(additive_expr_b, "LEFT R1 R0 R0 SHIFT AUX_ALWAYS\n", "LEFT R1 R0 R0 SHIFT AUX_ALWAYS\n", "<<", additive_expr_stub_c);
  1274. arithmetic_recursion(additive_expr_b, "ARITH_RIGHT R1 R0 R0 SHIFT AUX_ALWAYS\n", "RIGHT R1 R0 R0 SHIFT AUX_ALWAYS\n", ">>", additive_expr_stub_c);
  1275. }
  1276. else if(AARCH64 == Architecture)
  1277. {
  1278. general_recursion(additive_expr_b, "LSHIFT_X0_X1_X0\n", "<<", additive_expr_stub_c);
  1279. arithmetic_recursion(additive_expr_b, "ARITH_RSHIFT_X0_X1_X0\n", "LOGICAL_RSHIFT_X0_X1_X0\n", ">>", additive_expr_stub_c);
  1280. }
  1281. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  1282. {
  1283. general_recursion(additive_expr_b, "rd_a0 rs1_a1 rs2_a0 sll\n", "<<", additive_expr_stub_c);
  1284. arithmetic_recursion(additive_expr_b, "rd_a0 rs1_a1 rs2_a0 sra\n", "rd_a0 rs1_a1 rs2_a0 srl\n", ">>", additive_expr_stub_c);
  1285. }
  1286. }
  1287. void additive_expr_c(void)
  1288. {
  1289. additive_expr_b();
  1290. additive_expr_stub_c();
  1291. }
  1292. /*
  1293. * relational-expr:
  1294. * additive_expr
  1295. * relational-expr < additive_expr
  1296. * relational-expr <= additive_expr
  1297. * relational-expr >= additive_expr
  1298. * relational-expr > additive_expr
  1299. */
  1300. void relational_expr_stub(void)
  1301. {
  1302. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1303. {
  1304. arithmetic_recursion(additive_expr_c, "CMP R0 R1 R0\nSET.L R0 R0 1\n", "CMPU R0 R1 R0\nSET.L R0 R0 1\n", "<", relational_expr_stub);
  1305. arithmetic_recursion(additive_expr_c, "CMP R0 R1 R0\nSET.LE R0 R0 1\n", "CMPU R0 R1 R0\nSET.LE R0 R0 1\n", "<=", relational_expr_stub);
  1306. arithmetic_recursion(additive_expr_c, "CMP R0 R1 R0\nSET.GE R0 R0 1\n", "CMPU R0 R1 R0\nSET.GE R0 R0 1\n", ">=", relational_expr_stub);
  1307. arithmetic_recursion(additive_expr_c, "CMP R0 R1 R0\nSET.G R0 R0 1\n", "CMPU R0 R1 R0\nSET.G R0 R0 1\n", ">", relational_expr_stub);
  1308. arithmetic_recursion(additive_expr_c, "CMP R0 R1 R0\nSET.E R0 R0 1\n", "CMPU R0 R1 R0\nSET.E R0 R0 1\n", "==", relational_expr_stub);
  1309. arithmetic_recursion(additive_expr_c, "CMP R0 R1 R0\nSET.NE R0 R0 1\n", "CMPU R0 R1 R0\nSET.NE R0 R0 1\n", "!=", relational_expr_stub);
  1310. }
  1311. else if(X86 == Architecture)
  1312. {
  1313. arithmetic_recursion(additive_expr_c, "cmp\nsetl_al\nmovzx_eax,al\n", "cmp\nsetb_al\nmovzx_eax,al\n", "<", relational_expr_stub);
  1314. arithmetic_recursion(additive_expr_c, "cmp\nsetle_al\nmovzx_eax,al\n", "cmp\nsetbe_al\nmovzx_eax,al\n", "<=", relational_expr_stub);
  1315. arithmetic_recursion(additive_expr_c, "cmp\nsetge_al\nmovzx_eax,al\n", "cmp\nsetae_al\nmovzx_eax,al\n", ">=", relational_expr_stub);
  1316. arithmetic_recursion(additive_expr_c, "cmp\nsetg_al\nmovzx_eax,al\n", "cmp\nseta_al\nmovzx_eax,al\n", ">", relational_expr_stub);
  1317. general_recursion(additive_expr_c, "cmp\nsete_al\nmovzx_eax,al\n", "==", relational_expr_stub);
  1318. general_recursion(additive_expr_c, "cmp\nsetne_al\nmovzx_eax,al\n", "!=", relational_expr_stub);
  1319. }
  1320. else if(AMD64 == Architecture)
  1321. {
  1322. arithmetic_recursion(additive_expr_c, "cmp_rbx,rax\nsetl_al\nmovzx_rax,al\n", "cmp_rbx,rax\nsetb_al\nmovzx_rax,al\n", "<", relational_expr_stub);
  1323. arithmetic_recursion(additive_expr_c, "cmp_rbx,rax\nsetle_al\nmovzx_rax,al\n", "cmp_rbx,rax\nsetbe_al\nmovzx_rax,al\n", "<=", relational_expr_stub);
  1324. arithmetic_recursion(additive_expr_c, "cmp_rbx,rax\nsetge_al\nmovzx_rax,al\n", "cmp_rbx,rax\nsetae_al\nmovzx_rax,al\n", ">=", relational_expr_stub);
  1325. arithmetic_recursion(additive_expr_c, "cmp_rbx,rax\nsetg_al\nmovzx_rax,al\n", "cmp_rbx,rax\nseta_al\nmovzx_rax,al\n", ">", relational_expr_stub);
  1326. general_recursion(additive_expr_c, "cmp_rbx,rax\nsete_al\nmovzx_rax,al\n", "==", relational_expr_stub);
  1327. general_recursion(additive_expr_c, "cmp_rbx,rax\nsetne_al\nmovzx_rax,al\n", "!=", relational_expr_stub);
  1328. }
  1329. else if(ARMV7L == Architecture)
  1330. {
  1331. arithmetic_recursion(additive_expr_c, "'0' R0 CMP R1 AUX_ALWAYS\n!0 R0 LOADI8_ALWAYS\n!1 R0 LOADI8_L\n", "'0' R0 CMP R1 AUX_ALWAYS\n!0 R0 LOADI8_ALWAYS\n!1 R0 LOADI8_LO\n", "<", relational_expr_stub);
  1332. arithmetic_recursion(additive_expr_c, "'0' R0 CMP R1 AUX_ALWAYS\n!0 R0 LOADI8_ALWAYS\n!1 R0 LOADI8_LE\n", "'0' R0 CMP R1 AUX_ALWAYS\n!0 R0 LOADI8_ALWAYS\n!1 R0 LOADI8_LS\n", "<=", relational_expr_stub);
  1333. arithmetic_recursion(additive_expr_c, "'0' R0 CMP R1 AUX_ALWAYS\n!0 R0 LOADI8_ALWAYS\n!1 R0 LOADI8_GE\n", "'0' R0 CMP R1 AUX_ALWAYS\n!0 R0 LOADI8_ALWAYS\n!1 R0 LOADI8_HS\n", ">=", relational_expr_stub);
  1334. arithmetic_recursion(additive_expr_c, "'0' R0 CMP R1 AUX_ALWAYS\n!0 R0 LOADI8_ALWAYS\n!1 R0 LOADI8_G\n", "'0' R0 CMP R1 AUX_ALWAYS\n!0 R0 LOADI8_ALWAYS\n!1 R0 LOADI8_HI\n", ">", relational_expr_stub);
  1335. general_recursion(additive_expr_c, "'0' R0 CMP R1 AUX_ALWAYS\n!0 R0 LOADI8_ALWAYS\n!1 R0 LOADI8_EQUAL\n", "==", relational_expr_stub);
  1336. general_recursion(additive_expr_c, "'0' R0 CMP R1 AUX_ALWAYS\n!0 R0 LOADI8_ALWAYS\n!1 R0 LOADI8_NE\n", "!=", relational_expr_stub);
  1337. }
  1338. else if(AARCH64 == Architecture)
  1339. {
  1340. arithmetic_recursion(additive_expr_c, "CMP_X1_X0\nSET_X0_TO_1\nSKIP_INST_LT\nSET_X0_TO_0\n", "CMP_X1_X0\nSET_X0_TO_1\nSKIP_INST_LO\nSET_X0_TO_0\n", "<", relational_expr_stub);
  1341. arithmetic_recursion(additive_expr_c, "CMP_X1_X0\nSET_X0_TO_1\nSKIP_INST_LE\nSET_X0_TO_0\n", "CMP_X1_X0\nSET_X0_TO_1\nSKIP_INST_LS\nSET_X0_TO_0\n", "<=", relational_expr_stub);
  1342. arithmetic_recursion(additive_expr_c, "CMP_X1_X0\nSET_X0_TO_1\nSKIP_INST_GE\nSET_X0_TO_0\n", "CMP_X1_X0\nSET_X0_TO_1\nSKIP_INST_HS\nSET_X0_TO_0\n", ">=", relational_expr_stub);
  1343. arithmetic_recursion(additive_expr_c, "CMP_X1_X0\nSET_X0_TO_1\nSKIP_INST_GT\nSET_X0_TO_0\n", "CMP_X1_X0\nSET_X0_TO_1\nSKIP_INST_HI\nSET_X0_TO_0\n", ">", relational_expr_stub);
  1344. general_recursion(additive_expr_c, "CMP_X1_X0\nSET_X0_TO_1\nSKIP_INST_EQ\nSET_X0_TO_0\n", "==", relational_expr_stub);
  1345. general_recursion(additive_expr_c, "CMP_X1_X0\nSET_X0_TO_1\nSKIP_INST_NE\nSET_X0_TO_0\n", "!=", relational_expr_stub);
  1346. }
  1347. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  1348. {
  1349. arithmetic_recursion(additive_expr_c, "rd_a0 rs1_a1 rs2_a0 slt\n", "rd_a0 rs1_a1 rs2_a0 sltu\n", "<", relational_expr_stub);
  1350. arithmetic_recursion(additive_expr_c, "rd_a0 rs1_a0 rs2_a1 slt\nrd_a0 rs1_a0 !1 xori\n", "rd_a0 rs1_a0 rs2_a1 sltu\nrd_a0 rs1_a0 !1 xori\n", "<=", relational_expr_stub);
  1351. arithmetic_recursion(additive_expr_c, "rd_a0 rs1_a1 rs2_a0 slt\nrd_a0 rs1_a0 !1 xori\n", "rd_a0 rs1_a1 rs2_a0 sltu\nrd_a0 rs1_a0 !1 xori\n", ">=", relational_expr_stub);
  1352. arithmetic_recursion(additive_expr_c, "rd_a0 rs1_a0 rs2_a1 slt\n", "rd_a0 rs1_a0 rs2_a1 sltu\n", ">", relational_expr_stub);
  1353. general_recursion(additive_expr_c, "rd_a0 rs1_a0 rs2_a1 sub\nrd_a0 rs1_a0 !1 sltiu\n", "==", relational_expr_stub);
  1354. general_recursion(additive_expr_c, "rd_a0 rs1_a0 rs2_a1 sub\nrd_a0 rs2_a0 sltu\n", "!=", relational_expr_stub);
  1355. }
  1356. }
  1357. void relational_expr(void)
  1358. {
  1359. additive_expr_c();
  1360. relational_expr_stub();
  1361. }
  1362. /*
  1363. * bitwise-expr:
  1364. * relational-expr
  1365. * bitwise-expr & bitwise-expr
  1366. * bitwise-expr && bitwise-expr
  1367. * bitwise-expr | bitwise-expr
  1368. * bitwise-expr || bitwise-expr
  1369. * bitwise-expr ^ bitwise-expr
  1370. */
  1371. void bitwise_expr_stub(void)
  1372. {
  1373. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1374. {
  1375. general_recursion(relational_expr, "AND R0 R0 R1\n", "&", bitwise_expr_stub);
  1376. general_recursion(relational_expr, "AND R0 R0 R1\n", "&&", bitwise_expr_stub);
  1377. general_recursion(relational_expr, "OR R0 R0 R1\n", "|", bitwise_expr_stub);
  1378. general_recursion(relational_expr, "OR R0 R0 R1\n", "||", bitwise_expr_stub);
  1379. general_recursion(relational_expr, "XOR R0 R0 R1\n", "^", bitwise_expr_stub);
  1380. }
  1381. else if(X86 == Architecture)
  1382. {
  1383. general_recursion(relational_expr, "and_eax,ebx\n", "&", bitwise_expr_stub);
  1384. general_recursion(relational_expr, "and_eax,ebx\n", "&&", bitwise_expr_stub);
  1385. general_recursion(relational_expr, "or_eax,ebx\n", "|", bitwise_expr_stub);
  1386. general_recursion(relational_expr, "or_eax,ebx\n", "||", bitwise_expr_stub);
  1387. general_recursion(relational_expr, "xor_eax,ebx\n", "^", bitwise_expr_stub);
  1388. }
  1389. else if(AMD64 == Architecture)
  1390. {
  1391. general_recursion(relational_expr, "and_rax,rbx\n", "&", bitwise_expr_stub);
  1392. general_recursion(relational_expr, "and_rax,rbx\n", "&&", bitwise_expr_stub);
  1393. general_recursion(relational_expr, "or_rax,rbx\n", "|", bitwise_expr_stub);
  1394. general_recursion(relational_expr, "or_rax,rbx\n", "||", bitwise_expr_stub);
  1395. general_recursion(relational_expr, "xor_rax,rbx\n", "^", bitwise_expr_stub);
  1396. }
  1397. else if(ARMV7L == Architecture)
  1398. {
  1399. general_recursion(relational_expr, "NO_SHIFT R0 R0 AND R1 ARITH2_ALWAYS\n", "&", bitwise_expr_stub);
  1400. general_recursion(relational_expr, "NO_SHIFT R0 R0 AND R1 ARITH2_ALWAYS\n", "&&", bitwise_expr_stub);
  1401. general_recursion(relational_expr, "NO_SHIFT R0 R0 OR R1 AUX_ALWAYS\n", "|", bitwise_expr_stub);
  1402. general_recursion(relational_expr, "NO_SHIFT R0 R0 OR R1 AUX_ALWAYS\n", "||", bitwise_expr_stub);
  1403. general_recursion(relational_expr, "'0' R0 R0 XOR R1 ARITH2_ALWAYS\n", "^", bitwise_expr_stub);
  1404. }
  1405. else if(AARCH64 == Architecture)
  1406. {
  1407. general_recursion(relational_expr, "AND_X0_X1_X0\n", "&", bitwise_expr_stub);
  1408. general_recursion(relational_expr, "AND_X0_X1_X0\n", "&&", bitwise_expr_stub);
  1409. general_recursion(relational_expr, "OR_X0_X1_X0\n", "|", bitwise_expr_stub);
  1410. general_recursion(relational_expr, "OR_X0_X1_X0\n", "||", bitwise_expr_stub);
  1411. general_recursion(relational_expr, "XOR_X0_X1_X0\n", "^", bitwise_expr_stub);
  1412. }
  1413. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  1414. {
  1415. general_recursion(relational_expr, "rd_a0 rs1_a1 rs2_a0 and\n", "&", bitwise_expr_stub);
  1416. general_recursion(relational_expr, "rd_a0 rs1_a1 rs2_a0 and\n", "&&", bitwise_expr_stub);
  1417. general_recursion(relational_expr, "rd_a0 rs1_a1 rs2_a0 or\n", "|", bitwise_expr_stub);
  1418. general_recursion(relational_expr, "rd_a0 rs1_a1 rs2_a0 or\n", "||", bitwise_expr_stub);
  1419. general_recursion(relational_expr, "rd_a0 rs1_a1 rs2_a0 xor\n", "^", bitwise_expr_stub);
  1420. }
  1421. }
  1422. void bitwise_expr(void)
  1423. {
  1424. relational_expr();
  1425. bitwise_expr_stub();
  1426. }
  1427. /*
  1428. * expression:
  1429. * bitwise-or-expr
  1430. * bitwise-or-expr = expression
  1431. */
  1432. void primary_expr(void)
  1433. {
  1434. require(NULL != global_token, "Received EOF where primary expression expected\n");
  1435. if(match("&", global_token->s))
  1436. {
  1437. Address_of = TRUE;
  1438. global_token = global_token->next;
  1439. require(NULL != global_token, "Received EOF after & where primary expression expected\n");
  1440. }
  1441. else
  1442. {
  1443. Address_of = FALSE;
  1444. }
  1445. if(match("sizeof", global_token->s)) unary_expr_sizeof();
  1446. else if('-' == global_token->s[0])
  1447. {
  1448. if(X86 == Architecture) emit_out("mov_eax, %0\n");
  1449. else if(AMD64 == Architecture) emit_out("mov_rax, %0\n");
  1450. else if(ARMV7L == Architecture) emit_out("!0 R0 LOADI8_ALWAYS\n");
  1451. else if(AARCH64 == Architecture) emit_out("SET_X0_TO_0\n");
  1452. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 mv\n");
  1453. common_recursion(primary_expr);
  1454. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("NEG R0 R0\n");
  1455. else if(X86 == Architecture) emit_out("sub_ebx,eax\nmov_eax,ebx\n");
  1456. else if(AMD64 == Architecture) emit_out("sub_rbx,rax\nmov_rax,rbx\n");
  1457. else if(ARMV7L == Architecture) emit_out("'0' R0 R0 SUB R1 ARITH2_ALWAYS\n");
  1458. else if(AARCH64 == Architecture) emit_out("SUB_X0_X1_X0\n");
  1459. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 rs1_a1 rs2_a0 sub\n");
  1460. }
  1461. else if('!' == global_token->s[0])
  1462. {
  1463. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("LOADI R0 1\n");
  1464. else if(X86 == Architecture) emit_out("mov_eax, %1\n");
  1465. else if(AMD64 == Architecture) emit_out("mov_rax, %1\n");
  1466. else if(ARMV7L == Architecture) emit_out("!1 R0 LOADI8_ALWAYS\n");
  1467. else if(AARCH64 == Architecture) emit_out("SET_X0_TO_1\n");
  1468. common_recursion(postfix_expr);
  1469. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("CMPU R0 R1 R0\nSET.G R0 R0 1\n");
  1470. else if(X86 == Architecture) emit_out("cmp\nseta_al\nmovzx_eax,al\n");
  1471. else if(AMD64 == Architecture) emit_out("cmp_rbx,rax\nseta_al\nmovzx_rax,al\n");
  1472. else if(ARMV7L == Architecture) emit_out("'0' R0 CMP R1 AUX_ALWAYS\n!0 R0 LOADI8_ALWAYS\n!1 R0 LOADI8_HI\n");
  1473. else if(AARCH64 == Architecture) emit_out("CMP_X1_X0\nSET_X0_TO_1\nSKIP_INST_HI\nSET_X0_TO_0\n");
  1474. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 rs1_a0 !1 sltiu\n");
  1475. }
  1476. else if('~' == global_token->s[0])
  1477. {
  1478. common_recursion(postfix_expr);
  1479. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("NOT R0 R0\n");
  1480. else if(X86 == Architecture) emit_out("not_eax\n");
  1481. else if(AMD64 == Architecture) emit_out("not_rax\n");
  1482. else if(ARMV7L == Architecture) emit_out("'0' R0 R0 MVN_ALWAYS\n");
  1483. else if(AARCH64 == Architecture) emit_out("MVN_X0\n");
  1484. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 rs1_a0 not\n");
  1485. }
  1486. else if(global_token->s[0] == '(')
  1487. {
  1488. global_token = global_token->next;
  1489. expression();
  1490. require_match("Error in Primary expression\nDidn't get )\n", ")");
  1491. }
  1492. else if(global_token->s[0] == '\'') primary_expr_char();
  1493. else if(global_token->s[0] == '"') primary_expr_string();
  1494. else if(in_set(global_token->s[0], "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_")) primary_expr_variable();
  1495. else if(global_token->s[0] == '*') primary_expr_variable();
  1496. else if(in_set(global_token->s[0], "0123456789"))
  1497. {
  1498. primary_expr_number(global_token->s);
  1499. global_token = global_token->next;
  1500. }
  1501. else primary_expr_failure();
  1502. }
  1503. char* compound_operation(char* operator, int is_signed)
  1504. {
  1505. char* operation = "";
  1506. if(match("+=", operator))
  1507. {
  1508. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1509. {
  1510. if(is_signed) operation = "ADD R0 R1 R0\n";
  1511. else operation = "ADDU R0 R1 R0\n";
  1512. }
  1513. else if(X86 == Architecture) operation = "add_eax,ebx\n";
  1514. else if(AMD64 == Architecture) operation = "add_rax,rbx\n";
  1515. else if(ARMV7L == Architecture) operation = "'0' R0 R0 ADD R1 ARITH2_ALWAYS\n";
  1516. else if(AARCH64 == Architecture) operation = "ADD_X0_X1_X0\n";
  1517. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) operation = "rd_a0 rs1_a1 rs2_a0 add\n";
  1518. }
  1519. else if(match("-=", operator))
  1520. {
  1521. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1522. {
  1523. if(is_signed) operation = "SUB R0 R1 R0\n";
  1524. else operation = "SUBU R0 R1 R0\n";
  1525. }
  1526. else if(X86 == Architecture) operation = "sub_ebx,eax\nmov_eax,ebx\n";
  1527. else if(AMD64 == Architecture) operation = "sub_rbx,rax\nmov_rax,rbx\n";
  1528. else if(ARMV7L == Architecture) operation = "'0' R0 R0 SUB R1 ARITH2_ALWAYS\n";
  1529. else if(AARCH64 == Architecture) operation = "SUB_X0_X1_X0\n";
  1530. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) operation = "rd_a0 rs1_a1 rs2_a0 sub\n";
  1531. }
  1532. else if(match("*=", operator))
  1533. {
  1534. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1535. {
  1536. if(is_signed) operation = "MUL R0 R1 R0\n";
  1537. else operation = "MULU R0 R1 R0\n";
  1538. }
  1539. else if(X86 == Architecture)
  1540. {
  1541. if(is_signed) operation = "imul_ebx\n";
  1542. else operation = "mul_ebx\n";
  1543. }
  1544. else if(AMD64 == Architecture)
  1545. {
  1546. if(is_signed) operation = "imul_rbx\n";
  1547. else operation = "mul_rbx\n";
  1548. }
  1549. else if(ARMV7L == Architecture) operation = "'9' R0 '0' R1 MULS R0 ARITH2_ALWAYS\n";
  1550. else if(AARCH64 == Architecture) operation = "MUL_X0_X1_X0\n";
  1551. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) operation = "rd_a0 rs1_a1 rs2_a0 mul\n";
  1552. }
  1553. else if(match("/=", operator))
  1554. {
  1555. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1556. {
  1557. if(is_signed) operation = "DIV R0 R1 R0\n";
  1558. else operation = "DIVU R0 R1 R0\n";
  1559. }
  1560. else if(X86 == Architecture)
  1561. {
  1562. if (is_signed) operation = "xchg_ebx,eax\ncdq\nidiv_ebx\n";
  1563. else operation = "xchg_ebx,eax\nmov_edx, %0\ndiv_ebx\n";
  1564. }
  1565. else if(AMD64 == Architecture)
  1566. {
  1567. if(is_signed) operation = "xchg_rbx,rax\ncqo\nidiv_rbx\n";
  1568. else operation = "xchg_rbx,rax\nmov_rdx, %0\ndiv_rbx\n";
  1569. }
  1570. else if(ARMV7L == Architecture)
  1571. {
  1572. if(is_signed) operation = "{LR} PUSH_ALWAYS\n^~divides CALL_ALWAYS\n{LR} POP_ALWAYS\n";
  1573. else operation = "{LR} PUSH_ALWAYS\n^~divide CALL_ALWAYS\n{LR} POP_ALWAYS\n";
  1574. }
  1575. else if(AARCH64 == Architecture)
  1576. {
  1577. if(is_signed) operation = "SDIV_X0_X1_X0\n";
  1578. else operation = "UDIV_X0_X1_X0\n";
  1579. }
  1580. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  1581. {
  1582. if(is_signed) operation = "rd_a0 rs1_a1 rs2_a0 div\n";
  1583. else operation = "rd_a0 rs1_a1 rs2_a0 divu\n";
  1584. }
  1585. }
  1586. else if(match("%=", operator))
  1587. {
  1588. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1589. {
  1590. if(is_signed) operation = "MOD R0 R1 R0\n";
  1591. else operation = "MODU R0 R1 R0\n";
  1592. }
  1593. else if(X86 == Architecture)
  1594. {
  1595. if(is_signed) operation = "xchg_ebx,eax\ncdq\nidiv_ebx\nmov_eax,edx\n";
  1596. else operation = "xchg_ebx,eax\nmov_edx, %0\ndiv_ebx\nmov_eax,edx\n";
  1597. }
  1598. else if(AMD64 == Architecture)
  1599. {
  1600. if(is_signed) operation = "xchg_rbx,rax\ncqo\nidiv_rbx\nmov_rax,rdx\n";
  1601. else operation = "xchg_rbx,rax\nmov_rdx, %0\ndiv_rbx\nmov_rax,rdx\n";
  1602. }
  1603. else if(ARMV7L == Architecture)
  1604. {
  1605. if(is_signed) operation = "{LR} PUSH_ALWAYS\n^~moduluss CALL_ALWAYS\n{LR} POP_ALWAYS\n";
  1606. else operation = "{LR} PUSH_ALWAYS\n^~modulus CALL_ALWAYS\n{LR} POP_ALWAYS\n";
  1607. }
  1608. else if(AARCH64 == Architecture)
  1609. {
  1610. if(is_signed) operation = "SDIV_X2_X1_X0\nMSUB_X0_X0_X2_X1\n";
  1611. else operation = "UDIV_X2_X1_X0\nMSUB_X0_X0_X2_X1\n";
  1612. }
  1613. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  1614. {
  1615. if(is_signed) operation = "rd_a0 rs1_a1 rs2_a0 rem\n";
  1616. else operation = "rd_a0 rs1_a1 rs2_a0 remu\n";
  1617. }
  1618. }
  1619. else if(match("<<=", operator))
  1620. {
  1621. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1622. {
  1623. if(is_signed) operation = "SAL R0 R1 R0\n";
  1624. else operation = "SL0 R0 R1 R0\n";
  1625. }
  1626. else if(X86 == Architecture)
  1627. {
  1628. if(is_signed) operation = "mov_ecx,eax\nmov_eax,ebx\nsal_eax,cl\n";
  1629. else operation = "mov_ecx,eax\nmov_eax,ebx\nshl_eax,cl\n";
  1630. }
  1631. else if(AMD64 == Architecture)
  1632. {
  1633. if(is_signed) operation = "mov_rcx,rax\nmov_rax,rbx\nsal_rax,cl\n";
  1634. else operation = "mov_rcx,rax\nmov_rax,rbx\nshl_rax,cl\n";
  1635. }
  1636. else if(ARMV7L == Architecture) operation = "LEFT R1 R0 R0 SHIFT AUX_ALWAYS\n";
  1637. else if(AARCH64 == Architecture) operation = "LSHIFT_X0_X1_X0\n";
  1638. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) operation = "rd_a0 rs1_a1 rs2_a0 sll\n";
  1639. }
  1640. else if(match(">>=", operator))
  1641. {
  1642. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture))
  1643. {
  1644. if(is_signed) operation = "SAR R0 R1 R0\n";
  1645. else operation = "SR0 R0 R1 R0\n";
  1646. }
  1647. else if(X86 == Architecture)
  1648. {
  1649. if(is_signed) operation = "mov_ecx,eax\nmov_eax,ebx\nsar_eax,cl\n";
  1650. else operation = "mov_ecx,eax\nmov_eax,ebx\nshr_eax,cl\n";
  1651. }
  1652. else if(AMD64 == Architecture)
  1653. {
  1654. if(is_signed) operation = "mov_rcx,rax\nmov_rax,rbx\nsar_rax,cl\n";
  1655. else operation = "mov_rcx,rax\nmov_rax,rbx\nshr_rax,cl\n";
  1656. }
  1657. else if(ARMV7L == Architecture)
  1658. {
  1659. if(is_signed) operation = "ARITH_RIGHT R1 R0 R0 SHIFT AUX_ALWAYS\n";
  1660. else operation = "RIGHT R1 R0 R0 SHIFT AUX_ALWAYS\n";
  1661. }
  1662. else if(AARCH64 == Architecture)
  1663. {
  1664. if(is_signed) operation = "ARITH_RSHIFT_X0_X1_X0\n";
  1665. else operation = "LOGICAL_RSHIFT_X0_X1_X0\n";
  1666. }
  1667. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  1668. {
  1669. if(is_signed) operation = "rd_a0 rs1_a1 rs2_a0 sra\n";
  1670. else operation = "rd_a0 rs1_a1 rs2_a0 srl\n";
  1671. }
  1672. }
  1673. else if(match("&=", operator))
  1674. {
  1675. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) operation = "AND R0 R0 R1\n";
  1676. else if(X86 == Architecture) operation = "and_eax,ebx\n";
  1677. else if(AMD64 == Architecture) operation = "and_rax,rbx\n";
  1678. else if(ARMV7L == Architecture) operation = "NO_SHIFT R0 R0 AND R1 ARITH2_ALWAYS\n";
  1679. else if(AARCH64 == Architecture) operation = "AND_X0_X1_X0\n";
  1680. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) operation = "rd_a0 rs1_a1 rs2_a0 and\n";
  1681. }
  1682. else if(match("^=", operator))
  1683. {
  1684. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) operation = "XOR R0 R0 R1\n";
  1685. else if(X86 == Architecture) operation = "xor_eax,ebx\n";
  1686. else if(AMD64 == Architecture) operation = "xor_rax,rbx\n";
  1687. else if(ARMV7L == Architecture) operation = "'0' R0 R0 XOR R1 ARITH2_ALWAYS\n";
  1688. else if(AARCH64 == Architecture) operation = "XOR_X0_X1_X0\n";
  1689. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) operation = "rd_a0 rs1_a1 rs2_a0 xor\n";
  1690. }
  1691. else if(match("|=", operator))
  1692. {
  1693. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) operation = "OR R0 R0 R1\n";
  1694. else if(X86 == Architecture) operation = "or_eax,ebx\n";
  1695. else if(AMD64 == Architecture) operation = "or_rax,rbx\n";
  1696. else if(ARMV7L == Architecture) operation = "NO_SHIFT R0 R0 OR R1 AUX_ALWAYS\n";
  1697. else if(AARCH64 == Architecture) operation = "OR_X0_X1_X0\n";
  1698. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) operation = "rd_a0 rs1_a1 rs2_a0 or\n";
  1699. }
  1700. else
  1701. {
  1702. fputs("Found illegal compound assignment operator: ", stderr);
  1703. fputs(operator, stderr);
  1704. fputc('\n', stderr);
  1705. exit(EXIT_FAILURE);
  1706. }
  1707. return operation;
  1708. }
  1709. void expression(void)
  1710. {
  1711. bitwise_expr();
  1712. if(match("=", global_token->s))
  1713. {
  1714. char* store = "";
  1715. if(match("]", global_token->prev->s))
  1716. {
  1717. store = store_value(current_target->type->size);
  1718. }
  1719. else
  1720. {
  1721. store = store_value(current_target->size);
  1722. }
  1723. common_recursion(expression);
  1724. emit_out(store);
  1725. current_target = integer;
  1726. }
  1727. else if(is_compound_assignment(global_token->s))
  1728. {
  1729. maybe_bootstrap_error("compound operator");
  1730. char* push = "";
  1731. char* load = "";
  1732. char* operation = "";
  1733. char* pop = "";
  1734. char* store = "";
  1735. struct type* last_type = current_target;
  1736. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) push = "PUSHR R1 R15\n";
  1737. else if(X86 == Architecture) push = "push_ebx\n";
  1738. else if(AMD64 == Architecture) push = "push_rbx\n";
  1739. else if(ARMV7L == Architecture) push = "{R1} PUSH_ALWAYS\n";
  1740. else if(AARCH64 == Architecture) push = "PUSH_X1\n";
  1741. else if(RISCV32 == Architecture) push = "rs1_sp rs2_a1 @-4 sw\n";
  1742. else if(RISCV64 == Architecture) push = "rs1_sp rs2_a1 @-8 sd\n";
  1743. if(!match("]", global_token->prev->s) || !match("char*", current_target->name))
  1744. {
  1745. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) load = "LOAD R1 R1 0\n";
  1746. else if(X86 == Architecture) load = "mov_ebx,[ebx]\n";
  1747. else if(AMD64 == Architecture) load = "mov_rbx,[rbx]\n";
  1748. else if(ARMV7L == Architecture) load = "!0 R1 LOAD32 R1 MEMORY\n";
  1749. else if(AARCH64 == Architecture) load = "DEREF_X1\n";
  1750. else if(RISCV32 == Architecture) load = "rd_a1 rs1_a1 lw\n";
  1751. else if(RISCV64 == Architecture) load = "rd_a1 rs1_a1 ld\n";
  1752. }
  1753. else
  1754. {
  1755. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) load = "LOAD8 R1 R1 0\n";
  1756. else if(X86 == Architecture) load = "movsx_ebx,BYTE_PTR_[ebx]\n";
  1757. else if(AMD64 == Architecture) load = "movsx_rbx,BYTE_PTR_[rbx]\n";
  1758. else if(ARMV7L == Architecture) load = "LOADU8 R1 LOAD R1 MEMORY\n";
  1759. else if(AARCH64 == Architecture) load = "DEREF_X1_BYTE\n";
  1760. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) load = "rd_a1 rs1_a1 lbu\n";
  1761. }
  1762. char *operator = global_token->s;
  1763. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) pop = "POPR R1 R15\n";
  1764. else if(X86 == Architecture) pop = "pop_ebx\n";
  1765. else if(AMD64 == Architecture) pop = "pop_rbx\n";
  1766. else if(ARMV7L == Architecture) pop = "{R1} POP_ALWAYS\n";
  1767. else if(AARCH64 == Architecture) pop = "POP_X1\n";
  1768. else if(RISCV32 == Architecture) pop = "rd_a1 rs1_sp !-4 lw\n";
  1769. else if(RISCV64 == Architecture) pop = "rd_a1 rs1_sp !-8 ld\n";
  1770. if(match("]", global_token->prev->s))
  1771. {
  1772. store = store_value(current_target->type->size);
  1773. }
  1774. else
  1775. {
  1776. store = store_value(current_target->size);
  1777. }
  1778. common_recursion(expression);
  1779. current_target = promote_type(current_target, last_type);
  1780. emit_out(push);
  1781. emit_out(load);
  1782. operation = compound_operation(operator, current_target->is_signed);
  1783. emit_out(operation);
  1784. emit_out(pop);
  1785. emit_out(store);
  1786. current_target = integer;
  1787. }
  1788. }
  1789. int iskeywordp(char* s)
  1790. {
  1791. if(match("auto", s)) return TRUE;
  1792. if(match("break", s)) return TRUE;
  1793. if(match("case", s)) return TRUE;
  1794. if(match("char", s)) return TRUE;
  1795. if(match("const", s)) return TRUE;
  1796. if(match("continue", s)) return TRUE;
  1797. if(match("default", s)) return TRUE;
  1798. if(match("do", s)) return TRUE;
  1799. if(match("double", s)) return TRUE;
  1800. if(match("else", s)) return TRUE;
  1801. if(match("enum", s)) return TRUE;
  1802. if(match("extern", s)) return TRUE;
  1803. if(match("float", s)) return TRUE;
  1804. if(match("for", s)) return TRUE;
  1805. if(match("goto", s)) return TRUE;
  1806. if(match("if", s)) return TRUE;
  1807. if(match("int", s)) return TRUE;
  1808. if(match("long", s)) return TRUE;
  1809. if(match("register", s)) return TRUE;
  1810. if(match("return", s)) return TRUE;
  1811. if(match("short", s)) return TRUE;
  1812. if(match("signed", s)) return TRUE;
  1813. if(match("sizeof", s)) return TRUE;
  1814. if(match("static", s)) return TRUE;
  1815. if(match("struct", s)) return TRUE;
  1816. if(match("switch", s)) return TRUE;
  1817. if(match("typedef", s)) return TRUE;
  1818. if(match("union", s)) return TRUE;
  1819. if(match("unsigned", s)) return TRUE;
  1820. if(match("void", s)) return TRUE;
  1821. if(match("volatile", s)) return TRUE;
  1822. if(match("while", s)) return TRUE;
  1823. return FALSE;
  1824. }
  1825. /* Similar to integer division a / b but rounds up */
  1826. unsigned ceil_div(unsigned a, unsigned b)
  1827. {
  1828. return (a + b - 1) / b;
  1829. }
  1830. /* Process local variable */
  1831. void collect_local(void)
  1832. {
  1833. if(NULL != break_target_func)
  1834. {
  1835. fputs("Local variable initialized inside of loop in file: ", stderr);
  1836. line_error();
  1837. fputs("\nMove the variable outside of the loop to resolve\n", stderr);
  1838. fputs("Otherwise the binary will segfault while running\n", stderr);
  1839. exit(EXIT_FAILURE);
  1840. }
  1841. struct type* type_size = type_name();
  1842. require(NULL != global_token, "Received EOF while collecting locals\n");
  1843. require(!in_set(global_token->s[0], "[{(<=>)}]|&!^%;:'\""), "forbidden character in local variable name\n");
  1844. require(!iskeywordp(global_token->s), "You are not allowed to use a keyword as a local variable name\n");
  1845. require(NULL != type_size, "Must have non-null type\n");
  1846. struct token_list* a = sym_declare(global_token->s, type_size, function->locals);
  1847. if(match("main", function->s) && (NULL == function->locals))
  1848. {
  1849. if(KNIGHT_NATIVE == Architecture) a->depth = register_size;
  1850. else if(KNIGHT_POSIX == Architecture) a->depth = 20;
  1851. else if(X86 == Architecture) a->depth = -20;
  1852. else if(AMD64 == Architecture) a->depth = -40;
  1853. else if(ARMV7L == Architecture) a->depth = 16;
  1854. else if(AARCH64 == Architecture) a->depth = 32; /* argc, argv, envp and the local (8 bytes each) */
  1855. else if(RISCV32 == Architecture) a->depth = -16;
  1856. else if(RISCV64 == Architecture) a->depth = -32;
  1857. }
  1858. else if((NULL == function->arguments) && (NULL == function->locals))
  1859. {
  1860. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) a->depth = register_size;
  1861. else if(X86 == Architecture) a->depth = -8;
  1862. else if(AMD64 == Architecture) a->depth = -16;
  1863. else if(ARMV7L == Architecture) a->depth = 8;
  1864. else if(AARCH64 == Architecture) a->depth = register_size;
  1865. else if(RISCV32 == Architecture) a->depth = -4;
  1866. else if(RISCV64 == Architecture) a->depth = -8;
  1867. }
  1868. else if(NULL == function->locals)
  1869. {
  1870. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) a->depth = function->arguments->depth + 8;
  1871. else if(X86 == Architecture) a->depth = function->arguments->depth - 8;
  1872. else if(AMD64 == Architecture) a->depth = function->arguments->depth - 16;
  1873. else if(ARMV7L == Architecture) a->depth = function->arguments->depth + 8;
  1874. else if(AARCH64 == Architecture) a->depth = function->arguments->depth + register_size;
  1875. else if(RISCV32 == Architecture) a->depth = function->arguments->depth - 4;
  1876. else if(RISCV64 == Architecture) a->depth = function->arguments->depth - 8;
  1877. }
  1878. else
  1879. {
  1880. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) a->depth = function->locals->depth + register_size;
  1881. else if(X86 == Architecture) a->depth = function->locals->depth - register_size;
  1882. else if(AMD64 == Architecture) a->depth = function->locals->depth - register_size;
  1883. else if(ARMV7L == Architecture) a->depth = function->locals->depth + register_size;
  1884. else if(AARCH64 == Architecture) a->depth = function->locals->depth + register_size;
  1885. else if(RISCV32 == Architecture) a->depth = function->locals->depth - register_size;
  1886. else if(RISCV64 == Architecture) a->depth = function->locals->depth - register_size;
  1887. }
  1888. /* Adjust the depth of local structs. When stack grows downwards, we want them to
  1889. start at the bottom of allocated space. */
  1890. unsigned struct_depth_adjustment = (ceil_div(a->type->size, register_size) - 1) * register_size;
  1891. if(KNIGHT_POSIX == Architecture) a->depth = a->depth + struct_depth_adjustment;
  1892. else if(KNIGHT_NATIVE == Architecture) a->depth = a->depth + struct_depth_adjustment;
  1893. else if(X86 == Architecture) a->depth = a->depth - struct_depth_adjustment;
  1894. else if(AMD64 == Architecture) a->depth = a->depth - struct_depth_adjustment;
  1895. else if(ARMV7L == Architecture) a->depth = a->depth + struct_depth_adjustment;
  1896. else if(AARCH64 == Architecture) a->depth = a->depth + struct_depth_adjustment;
  1897. else if(RISCV32 == Architecture) a->depth = a->depth - struct_depth_adjustment;
  1898. else if(RISCV64 == Architecture) a->depth = a->depth - struct_depth_adjustment;
  1899. function->locals = a;
  1900. emit_out("# Defining local ");
  1901. emit_out(global_token->s);
  1902. emit_out("\n");
  1903. global_token = global_token->next;
  1904. require(NULL != global_token, "incomplete local missing name\n");
  1905. if(match("=", global_token->s))
  1906. {
  1907. global_token = global_token->next;
  1908. require(NULL != global_token, "incomplete local assignment\n");
  1909. expression();
  1910. }
  1911. require_match("ERROR in collect_local\nMissing ;\n", ";");
  1912. unsigned i = ceil_div(a->type->size, register_size);
  1913. while(i != 0)
  1914. {
  1915. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("PUSHR R0 R15\t#");
  1916. else if(X86 == Architecture) emit_out("push_eax\t#");
  1917. else if(AMD64 == Architecture) emit_out("push_rax\t#");
  1918. else if(ARMV7L == Architecture) emit_out("{R0} PUSH_ALWAYS\t#");
  1919. else if(AARCH64 == Architecture) emit_out("PUSH_X0\t#");
  1920. else if(RISCV32 == Architecture) emit_out("rd_sp rs1_sp !-4 addi\nrs1_sp rs2_a0 sw\t#");
  1921. else if(RISCV64 == Architecture) emit_out("rd_sp rs1_sp !-8 addi\nrs1_sp rs2_a0 sd\t#");
  1922. emit_out(a->s);
  1923. emit_out("\n");
  1924. i = i - 1;
  1925. }
  1926. }
  1927. void statement(void);
  1928. /* Evaluate if statements */
  1929. void process_if(void)
  1930. {
  1931. char* number_string = int2str(current_count, 10, TRUE);
  1932. current_count = current_count + 1;
  1933. emit_out("# IF_");
  1934. uniqueID_out(function->s, number_string);
  1935. global_token = global_token->next;
  1936. require_match("ERROR in process_if\nMISSING (\n", "(");
  1937. expression();
  1938. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP.Z R0 @ELSE_");
  1939. else if(X86 == Architecture) emit_out("test_eax,eax\nje %ELSE_");
  1940. else if(AMD64 == Architecture) emit_out("test_rax,rax\nje %ELSE_");
  1941. else if(ARMV7L == Architecture) emit_out("!0 CMPI8 R0 IMM_ALWAYS\n^~ELSE_");
  1942. else if(AARCH64 == Architecture) emit_out("CBNZ_X0_PAST_BR\nLOAD_W16_AHEAD\nSKIP_32_DATA\n&ELSE_");
  1943. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rs1_a0 @8 bnez\n$ELSE_");
  1944. uniqueID_out(function->s, number_string);
  1945. if(ARMV7L == Architecture) emit_out(" JUMP_EQUAL\n");
  1946. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  1947. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  1948. require_match("ERROR in process_if\nMISSING )\n", ")");
  1949. statement();
  1950. require(NULL != global_token, "Reached EOF inside of function\n");
  1951. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP @_END_IF_");
  1952. else if(X86 == Architecture) emit_out("jmp %_END_IF_");
  1953. else if(AMD64 == Architecture) emit_out("jmp %_END_IF_");
  1954. else if(ARMV7L == Architecture) emit_out("^~_END_IF_");
  1955. else if(AARCH64 == Architecture) emit_out("LOAD_W16_AHEAD\nSKIP_32_DATA\n&_END_IF_");
  1956. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("$_END_IF_");
  1957. uniqueID_out(function->s, number_string);
  1958. if(ARMV7L == Architecture) emit_out(" JUMP_ALWAYS\n");
  1959. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  1960. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  1961. emit_out(":ELSE_");
  1962. uniqueID_out(function->s, number_string);
  1963. if(match("else", global_token->s))
  1964. {
  1965. global_token = global_token->next;
  1966. require(NULL != global_token, "Received EOF where an else statement expected\n");
  1967. statement();
  1968. require(NULL != global_token, "Reached EOF inside of function\n");
  1969. }
  1970. emit_out(":_END_IF_");
  1971. uniqueID_out(function->s, number_string);
  1972. }
  1973. void process_case(void)
  1974. {
  1975. process_case_iter:
  1976. if(match("case", global_token->s)) return;
  1977. if(match(":default", global_token->s)) return;
  1978. if(match("break", global_token->s))
  1979. {
  1980. statement();
  1981. }
  1982. else
  1983. {
  1984. statement();
  1985. goto process_case_iter;
  1986. }
  1987. }
  1988. void process_switch(void)
  1989. {
  1990. maybe_bootstrap_error("switch/case statements");
  1991. struct token_list* nested_locals = break_frame;
  1992. char* nested_break_head = break_target_head;
  1993. char* nested_break_func = break_target_func;
  1994. char* nested_break_num = break_target_num;
  1995. char* nested_continue_head = continue_target_head;
  1996. char* number_string = int2str(current_count, 10, TRUE);
  1997. current_count = current_count + 1;
  1998. break_target_head = "_SWITCH_END_";
  1999. continue_target_head = NULL; /* don't allow continue in switch statements */
  2000. break_target_num = number_string;
  2001. break_frame = function->locals;
  2002. break_target_func = function->s;
  2003. emit_out("# switch_");
  2004. uniqueID_out(function->s, number_string);
  2005. /* get what we are casing on */
  2006. global_token = global_token->next;
  2007. require_match("ERROR in process_switch\nMISSING (\n", "(");
  2008. expression();
  2009. require_match("ERROR in process_switch\nMISSING )\n", ")");
  2010. /* Put the value in R1 as it is currently in R0 */
  2011. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("MOVE R1 R0\n");
  2012. else if(X86 == Architecture) emit_out("mov_ebx,eax\n");
  2013. else if(AMD64 == Architecture) emit_out("push_rax\npop_rbx\n");
  2014. else if(ARMV7L == Architecture) emit_out("'0' R1 R0 NO_SHIFT MOVE_ALWAYS\n");
  2015. else if(AARCH64 == Architecture) emit_out("SET_X1_FROM_X0\n");
  2016. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a1 rs1_a0 mv\n");
  2017. /* Jump to the switch table */
  2018. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP @_SWITCH_TABLE_");
  2019. else if(X86 == Architecture) emit_out("jmp %_SWITCH_TABLE_");
  2020. else if(AMD64 == Architecture) emit_out("jmp %_SWITCH_TABLE_");
  2021. else if(ARMV7L == Architecture) emit_out("^~_SWITCH_TABLE_");
  2022. else if(AARCH64 == Architecture) emit_out("LOAD_W16_AHEAD\nSKIP_32_DATA\n&_SWITCH_TABLE_");
  2023. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("$_SWITCH_TABLE_");
  2024. uniqueID_out(function->s, number_string);
  2025. if(ARMV7L == Architecture) emit_out(" JUMP_ALWAYS\n");
  2026. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  2027. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  2028. /* must be switch (exp) {$STATEMENTS}; form */
  2029. require_match("ERROR in process_switch\nMISSING {\n", "{");
  2030. struct case_list* backtrack = NULL;
  2031. process_switch_iter:
  2032. if(match("case", global_token->s))
  2033. {
  2034. global_token = global_token->next;
  2035. if(':' == global_token->s[0])
  2036. {
  2037. struct case_list* c = calloc(1, sizeof(struct case_list));
  2038. c->next = backtrack;
  2039. c->value = global_token->s + 1;
  2040. backtrack = c;
  2041. emit_out(":_SWITCH_CASE_");
  2042. emit_out(c->value);
  2043. emit_out("_");
  2044. uniqueID_out(function->s, number_string);
  2045. global_token = global_token->next;
  2046. process_case();
  2047. }
  2048. else line_error();
  2049. goto process_switch_iter;
  2050. }
  2051. else if(match(":default", global_token->s))
  2052. { /* because of how M2-Planet treats labels */
  2053. global_token = global_token->next;
  2054. emit_out(":_SWITCH_DEFAULT_");
  2055. uniqueID_out(function->s, number_string);
  2056. /* collect statements until } */
  2057. while(!match("}", global_token->s))
  2058. {
  2059. statement();
  2060. }
  2061. /* jump over the switch table */
  2062. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP @_SWITCH_END_");
  2063. else if(X86 == Architecture) emit_out("jmp %_SWITCH_END_");
  2064. else if(AMD64 == Architecture) emit_out("jmp %_SWITCH_END_");
  2065. else if(ARMV7L == Architecture) emit_out("^~_SWITCH_END_");
  2066. else if(AARCH64 == Architecture) emit_out("LOAD_W16_AHEAD\nSKIP_32_DATA\n&_SWITCH_END_");
  2067. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("$_SWITCH_END_");
  2068. uniqueID_out(function->s, number_string);
  2069. if(ARMV7L == Architecture) emit_out(" JUMP_ALWAYS\n");
  2070. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  2071. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  2072. }
  2073. /* Switch statements must end with } */
  2074. require_match("ERROR in process_switch\nMISSING }\n", "}");
  2075. /* create the table */
  2076. emit_out(":_SWITCH_TABLE_");
  2077. uniqueID_out(function->s, number_string);
  2078. struct case_list* hold;
  2079. while(NULL != backtrack)
  2080. {
  2081. /* put case value in R0 as the switch (value) is in R1 */
  2082. primary_expr_number(backtrack->value);
  2083. hold = backtrack->next;
  2084. /* compare R0 and R1 and jump to case if equal */
  2085. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("CMPU R0 R0 R1\nJUMP.E R0 @_SWITCH_CASE_");
  2086. else if(X86 == Architecture) emit_out("cmp\nje %_SWITCH_CASE_");
  2087. else if(AMD64 == Architecture) emit_out("cmp_rbx,rax\nje %_SWITCH_CASE_");
  2088. else if(ARMV7L == Architecture) emit_out("'0' R0 CMP R1 AUX_ALWAYS\n^~_SWITCH_CASE_");
  2089. else if(AARCH64 == Architecture) emit_out("CMP_X1_X0\nSKIP_32_DATA\n&_SWITCH_CASE_");
  2090. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rd_a0 rs1_a0 rs2_a1 sub\nrs1_a0 @8 bnez\n$_SWITCH_CASE_");
  2091. emit_out(backtrack->value);
  2092. emit_out("_");
  2093. uniqueID_out(function->s, number_string);
  2094. if(ARMV7L == Architecture) emit_out(" JUMP_EQUAL\n");
  2095. else if(AARCH64 == Architecture) emit_out("\nSKIP_INST_NE\nBR_X16\n");
  2096. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  2097. free(backtrack);
  2098. backtrack = hold;
  2099. }
  2100. /* Default to :default */
  2101. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP @_SWITCH_DEFAULT_");
  2102. else if(X86 == Architecture) emit_out("jmp %_SWITCH_DEFAULT_");
  2103. else if(AMD64 == Architecture) emit_out("jmp %_SWITCH_DEFAULT_");
  2104. else if(ARMV7L == Architecture) emit_out("^~_SWITCH_DEFAULT_");
  2105. else if(AARCH64 == Architecture) emit_out("SKIP_32_DATA\n&_SWITCH_DEFAULT_");
  2106. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("$_SWITCH_DEFAULT_");
  2107. uniqueID_out(function->s, number_string);
  2108. if(ARMV7L == Architecture) emit_out(" JUMP_ALWAYS\n");
  2109. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  2110. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  2111. /* put the exit of the switch */
  2112. emit_out(":_SWITCH_END_");
  2113. uniqueID_out(function->s, number_string);
  2114. break_target_head = nested_break_head;
  2115. break_target_func = nested_break_func;
  2116. break_target_num = nested_break_num;
  2117. continue_target_head = nested_continue_head;
  2118. break_frame = nested_locals;
  2119. }
  2120. void process_for(void)
  2121. {
  2122. struct token_list* nested_locals = break_frame;
  2123. char* nested_break_head = break_target_head;
  2124. char* nested_break_func = break_target_func;
  2125. char* nested_break_num = break_target_num;
  2126. char* nested_continue_head = continue_target_head;
  2127. char* number_string = int2str(current_count, 10, TRUE);
  2128. current_count = current_count + 1;
  2129. break_target_head = "FOR_END_";
  2130. continue_target_head = "FOR_ITER_";
  2131. break_target_num = number_string;
  2132. break_frame = function->locals;
  2133. break_target_func = function->s;
  2134. emit_out("# FOR_initialization_");
  2135. uniqueID_out(function->s, number_string);
  2136. global_token = global_token->next;
  2137. require_match("ERROR in process_for\nMISSING (\n", "(");
  2138. if(!match(";",global_token->s))
  2139. {
  2140. expression();
  2141. }
  2142. emit_out(":FOR_");
  2143. uniqueID_out(function->s, number_string);
  2144. require_match("ERROR in process_for\nMISSING ;1\n", ";");
  2145. expression();
  2146. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP.Z R0 @FOR_END_");
  2147. else if(X86 == Architecture) emit_out("test_eax,eax\nje %FOR_END_");
  2148. else if(AMD64 == Architecture) emit_out("test_rax,rax\nje %FOR_END_");
  2149. else if(ARMV7L == Architecture) emit_out("!0 CMPI8 R0 IMM_ALWAYS\n^~FOR_END_");
  2150. else if(AARCH64 == Architecture) emit_out("CBNZ_X0_PAST_BR\nLOAD_W16_AHEAD\nSKIP_32_DATA\n&FOR_END_");
  2151. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rs1_a0 @8 bnez\n$FOR_END_");
  2152. uniqueID_out(function->s, number_string);
  2153. if(ARMV7L == Architecture) emit_out(" JUMP_EQUAL\n");
  2154. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  2155. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  2156. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP @FOR_THEN_");
  2157. else if(X86 == Architecture) emit_out("jmp %FOR_THEN_");
  2158. else if(AMD64 == Architecture) emit_out("jmp %FOR_THEN_");
  2159. else if(ARMV7L == Architecture) emit_out("^~FOR_THEN_");
  2160. else if(AARCH64 == Architecture) emit_out("LOAD_W16_AHEAD\nSKIP_32_DATA\n&FOR_THEN_");
  2161. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("$FOR_THEN_");
  2162. uniqueID_out(function->s, number_string);
  2163. if(ARMV7L == Architecture) emit_out(" JUMP_ALWAYS\n");
  2164. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  2165. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  2166. emit_out(":FOR_ITER_");
  2167. uniqueID_out(function->s, number_string);
  2168. require_match("ERROR in process_for\nMISSING ;2\n", ";");
  2169. expression();
  2170. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP @FOR_");
  2171. else if(X86 == Architecture) emit_out("jmp %FOR_");
  2172. else if(AMD64 == Architecture) emit_out("jmp %FOR_");
  2173. else if(ARMV7L == Architecture) emit_out("^~FOR_");
  2174. else if(AARCH64 == Architecture) emit_out("LOAD_W16_AHEAD\nSKIP_32_DATA\n&FOR_");
  2175. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("$FOR_");
  2176. uniqueID_out(function->s, number_string);
  2177. if(ARMV7L == Architecture) emit_out(" JUMP_ALWAYS\n");
  2178. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  2179. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  2180. emit_out(":FOR_THEN_");
  2181. uniqueID_out(function->s, number_string);
  2182. require_match("ERROR in process_for\nMISSING )\n", ")");
  2183. statement();
  2184. require(NULL != global_token, "Reached EOF inside of function\n");
  2185. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP @FOR_ITER_");
  2186. else if(X86 == Architecture) emit_out("jmp %FOR_ITER_");
  2187. else if(AMD64 == Architecture) emit_out("jmp %FOR_ITER_");
  2188. else if(ARMV7L == Architecture) emit_out("^~FOR_ITER_");
  2189. else if(AARCH64 == Architecture) emit_out("LOAD_W16_AHEAD\nSKIP_32_DATA\n&FOR_ITER_");
  2190. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("$FOR_ITER_");
  2191. uniqueID_out(function->s, number_string);
  2192. if(ARMV7L == Architecture) emit_out(" JUMP_ALWAYS\n");
  2193. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  2194. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  2195. emit_out(":FOR_END_");
  2196. uniqueID_out(function->s, number_string);
  2197. break_target_head = nested_break_head;
  2198. break_target_func = nested_break_func;
  2199. break_target_num = nested_break_num;
  2200. continue_target_head = nested_continue_head;
  2201. break_frame = nested_locals;
  2202. }
  2203. /* Process Assembly statements */
  2204. void process_asm(void)
  2205. {
  2206. global_token = global_token->next;
  2207. require_match("ERROR in process_asm\nMISSING (\n", "(");
  2208. while('"' == global_token->s[0])
  2209. {
  2210. emit_out((global_token->s + 1));
  2211. emit_out("\n");
  2212. global_token = global_token->next;
  2213. require(NULL != global_token, "Received EOF inside asm statement\n");
  2214. }
  2215. require_match("ERROR in process_asm\nMISSING )\n", ")");
  2216. require_match("ERROR in process_asm\nMISSING ;\n", ";");
  2217. }
  2218. /* Process do while loops */
  2219. void process_do(void)
  2220. {
  2221. struct token_list* nested_locals = break_frame;
  2222. char* nested_break_head = break_target_head;
  2223. char* nested_break_func = break_target_func;
  2224. char* nested_break_num = break_target_num;
  2225. char* nested_continue_head = continue_target_head;
  2226. char* number_string = int2str(current_count, 10, TRUE);
  2227. current_count = current_count + 1;
  2228. break_target_head = "DO_END_";
  2229. continue_target_head = "DO_TEST_";
  2230. break_target_num = number_string;
  2231. break_frame = function->locals;
  2232. break_target_func = function->s;
  2233. emit_out(":DO_");
  2234. uniqueID_out(function->s, number_string);
  2235. global_token = global_token->next;
  2236. require(NULL != global_token, "Received EOF where do statement is expected\n");
  2237. statement();
  2238. require(NULL != global_token, "Reached EOF inside of function\n");
  2239. emit_out(":DO_TEST_");
  2240. uniqueID_out(function->s, number_string);
  2241. require_match("ERROR in process_do\nMISSING while\n", "while");
  2242. require_match("ERROR in process_do\nMISSING (\n", "(");
  2243. expression();
  2244. require_match("ERROR in process_do\nMISSING )\n", ")");
  2245. require_match("ERROR in process_do\nMISSING ;\n", ";");
  2246. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP.NZ R0 @DO_");
  2247. else if(X86 == Architecture) emit_out("test_eax,eax\njne %DO_");
  2248. else if(AMD64 == Architecture) emit_out("test_rax,rax\njne %DO_");
  2249. else if(ARMV7L == Architecture) emit_out("!0 CMPI8 R0 IMM_ALWAYS\n^~DO_");
  2250. else if(AARCH64 == Architecture) emit_out("CBZ_X0_PAST_BR\nLOAD_W16_AHEAD\nSKIP_32_DATA\n&DO_");
  2251. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rs1_a0 @DO_END_");
  2252. uniqueID_out(function->s, number_string);
  2253. if(ARMV7L == Architecture) emit_out(" JUMP_NE\n");
  2254. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  2255. else if((RISCV32 == Architecture) || (RISCV64 == Architecture))
  2256. {
  2257. emit_out("beqz\n$DO_");
  2258. uniqueID_out(function->s, number_string);
  2259. emit_out("jal\n");
  2260. }
  2261. emit_out(":DO_END_");
  2262. uniqueID_out(function->s, number_string);
  2263. break_frame = nested_locals;
  2264. break_target_head = nested_break_head;
  2265. break_target_func = nested_break_func;
  2266. break_target_num = nested_break_num;
  2267. continue_target_head = nested_continue_head;
  2268. }
  2269. /* Process while loops */
  2270. void process_while(void)
  2271. {
  2272. struct token_list* nested_locals = break_frame;
  2273. char* nested_break_head = break_target_head;
  2274. char* nested_break_func = break_target_func;
  2275. char* nested_break_num = break_target_num;
  2276. char* nested_continue_head = continue_target_head;
  2277. char* number_string = int2str(current_count, 10, TRUE);
  2278. current_count = current_count + 1;
  2279. break_target_head = "END_WHILE_";
  2280. continue_target_head = "WHILE_";
  2281. break_target_num = number_string;
  2282. break_frame = function->locals;
  2283. break_target_func = function->s;
  2284. emit_out(":WHILE_");
  2285. uniqueID_out(function->s, number_string);
  2286. global_token = global_token->next;
  2287. require_match("ERROR in process_while\nMISSING (\n", "(");
  2288. expression();
  2289. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP.Z R0 @END_WHILE_");
  2290. else if(X86 == Architecture) emit_out("test_eax,eax\nje %END_WHILE_");
  2291. else if(AMD64 == Architecture) emit_out("test_rax,rax\nje %END_WHILE_");
  2292. else if(ARMV7L == Architecture) emit_out("!0 CMPI8 R0 IMM_ALWAYS\n^~END_WHILE_");
  2293. else if(AARCH64 == Architecture) emit_out("CBNZ_X0_PAST_BR\nLOAD_W16_AHEAD\nSKIP_32_DATA\n&END_WHILE_");
  2294. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("rs1_a0 @8 bnez\n$END_WHILE_");
  2295. uniqueID_out(function->s, number_string);
  2296. if(ARMV7L == Architecture) emit_out(" JUMP_EQUAL\t");
  2297. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  2298. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  2299. emit_out("# THEN_while_");
  2300. uniqueID_out(function->s, number_string);
  2301. require_match("ERROR in process_while\nMISSING )\n", ")");
  2302. statement();
  2303. require(NULL != global_token, "Reached EOF inside of function\n");
  2304. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP @WHILE_");
  2305. else if(X86 == Architecture) emit_out("jmp %WHILE_");
  2306. else if(AMD64 == Architecture) emit_out("jmp %WHILE_");
  2307. else if(ARMV7L == Architecture) emit_out("^~WHILE_");
  2308. else if(AARCH64 == Architecture) emit_out("LOAD_W16_AHEAD\nSKIP_32_DATA\n&WHILE_");
  2309. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("$WHILE_");
  2310. uniqueID_out(function->s, number_string);
  2311. if(ARMV7L == Architecture) emit_out(" JUMP_ALWAYS\n");
  2312. else if(AARCH64 == Architecture) emit_out("\nBR_X16\n");
  2313. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("jal\n");
  2314. emit_out(":END_WHILE_");
  2315. uniqueID_out(function->s, number_string);
  2316. break_target_head = nested_break_head;
  2317. break_target_func = nested_break_func;
  2318. break_target_num = nested_break_num;
  2319. continue_target_head = nested_continue_head;
  2320. break_frame = nested_locals;
  2321. }
  2322. /* Ensure that functions return */
  2323. void return_result(void)
  2324. {
  2325. global_token = global_token->next;
  2326. require(NULL != global_token, "Incomplete return statement received\n");
  2327. if(global_token->s[0] != ';') expression();
  2328. require_match("ERROR in return_result\nMISSING ;\n", ";");
  2329. struct token_list* i;
  2330. unsigned size_local_var;
  2331. for(i = function->locals; NULL != i; i = i->next)
  2332. {
  2333. size_local_var = ceil_div(i->type->size, register_size);
  2334. while(size_local_var != 0)
  2335. {
  2336. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("POPR R1 R15\t# _return_result_locals\n");
  2337. else if(X86 == Architecture) emit_out("pop_ebx\t# _return_result_locals\n");
  2338. else if(AMD64 == Architecture) emit_out("pop_rbx\t# _return_result_locals\n");
  2339. else if(ARMV7L == Architecture) emit_out("{R1} POP_ALWAYS\t# _return_result_locals\n");
  2340. else if(AARCH64 == Architecture) emit_out("POP_X1\t# _return_result_locals\n");
  2341. else if(RISCV32 == Architecture) emit_out("rd_a1 rs1_sp lw # _return_result_locals\nrd_sp rs1_sp !4 addi\n");
  2342. else if(RISCV64 == Architecture) emit_out("rd_a1 rs1_sp ld # _return_result_locals\nrd_sp rs1_sp !8 addi\n");
  2343. size_local_var = size_local_var - 1;
  2344. }
  2345. }
  2346. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("RET R15\n");
  2347. else if(X86 == Architecture) emit_out("ret\n");
  2348. else if(AMD64 == Architecture) emit_out("ret\n");
  2349. else if(ARMV7L == Architecture) emit_out("'1' LR RETURN\n");
  2350. else if(AARCH64 == Architecture) emit_out("RETURN\n");
  2351. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("ret\n");
  2352. }
  2353. void process_break(void)
  2354. {
  2355. if(NULL == break_target_head)
  2356. {
  2357. line_error();
  2358. fputs("Not inside of a loop or case statement\n", stderr);
  2359. exit(EXIT_FAILURE);
  2360. }
  2361. struct token_list* i = function->locals;
  2362. while(i != break_frame)
  2363. {
  2364. if(NULL == i) break;
  2365. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("POPR R1 R15\t# break_cleanup_locals\n");
  2366. else if(X86 == Architecture) emit_out("pop_ebx\t# break_cleanup_locals\n");
  2367. else if(AMD64 == Architecture) emit_out("pop_rbx\t# break_cleanup_locals\n");
  2368. else if(ARMV7L == Architecture) emit_out("{R1} POP_ALWAYS\t# break_cleanup_locals\n");
  2369. else if(AARCH64 == Architecture) emit_out("POP_X1\t# break_cleanup_locals\n");
  2370. else if(RISCV32 == Architecture) emit_out("rd_a1 rs1_sp lw\t# break_cleanup_locals\nrd_sp rs1_sp !4 addi\n");
  2371. else if(RISCV64 == Architecture) emit_out("rd_a1 rs1_sp ld\t# break_cleanup_locals\nrd_sp rs1_sp !8 addi\n");
  2372. i = i->next;
  2373. }
  2374. global_token = global_token->next;
  2375. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP @");
  2376. else if(X86 == Architecture) emit_out("jmp %");
  2377. else if(AMD64 == Architecture) emit_out("jmp %");
  2378. else if(ARMV7L == Architecture) emit_out("^~");
  2379. else if(AARCH64 == Architecture) emit_out("LOAD_W16_AHEAD\nSKIP_32_DATA\n&");
  2380. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("$");
  2381. emit_out(break_target_head);
  2382. emit_out(break_target_func);
  2383. emit_out("_");
  2384. emit_out(break_target_num);
  2385. if(ARMV7L == Architecture) emit_out(" JUMP_ALWAYS");
  2386. else if(AARCH64 == Architecture) emit_out("\nBR_X16");
  2387. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out(" jal");
  2388. emit_out("\n");
  2389. require_match("ERROR in break statement\nMissing ;\n", ";");
  2390. }
  2391. void process_continue(void)
  2392. {
  2393. if(NULL == continue_target_head)
  2394. {
  2395. line_error();
  2396. fputs("Not inside of a loop\n", stderr);
  2397. exit(EXIT_FAILURE);
  2398. }
  2399. global_token = global_token->next;
  2400. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP @");
  2401. else if(X86 == Architecture) emit_out("jmp %");
  2402. else if(AMD64 == Architecture) emit_out("jmp %");
  2403. else if(ARMV7L == Architecture) emit_out("^~");
  2404. else if(AARCH64 == Architecture) emit_out("LOAD_W16_AHEAD\nSKIP_32_DATA\n&");
  2405. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("$");
  2406. emit_out(continue_target_head);
  2407. emit_out(break_target_func);
  2408. emit_out("_");
  2409. emit_out(break_target_num);
  2410. if(ARMV7L == Architecture) emit_out(" JUMP_ALWAYS");
  2411. else if(AARCH64 == Architecture) emit_out("\nBR_X16");
  2412. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out(" jal");
  2413. emit_out("\n");
  2414. require_match("ERROR in continue statement\nMissing ;\n", ";");
  2415. }
  2416. void recursive_statement(void)
  2417. {
  2418. global_token = global_token->next;
  2419. require(NULL != global_token, "Received EOF in recursive statement\n");
  2420. struct token_list* frame = function->locals;
  2421. while(!match("}", global_token->s))
  2422. {
  2423. statement();
  2424. require(NULL != global_token, "Received EOF in recursive statement prior to }\n");
  2425. }
  2426. global_token = global_token->next;
  2427. /* Clean up any locals added */
  2428. if(((X86 == Architecture) && !match("ret\n", output_list->s)) ||
  2429. ((AMD64 == Architecture) && !match("ret\n", output_list->s)) ||
  2430. (((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) && !match("RET R15\n", output_list->s)) ||
  2431. ((ARMV7L == Architecture) && !match("'1' LR RETURN\n", output_list->s)) ||
  2432. ((AARCH64 == Architecture) && !match("RETURN\n", output_list->s)) ||
  2433. (((RISCV32 == Architecture) || (RISCV64 == Architecture)) && !match("ret\n", output_list->s)))
  2434. {
  2435. struct token_list* i;
  2436. for(i = function->locals; frame != i; i = i->next)
  2437. {
  2438. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("POPR R1 R15\t# _recursive_statement_locals\n");
  2439. else if(X86 == Architecture) emit_out( "pop_ebx\t# _recursive_statement_locals\n");
  2440. else if(AMD64 == Architecture) emit_out("pop_rbx\t# _recursive_statement_locals\n");
  2441. else if(ARMV7L == Architecture) emit_out("{R1} POP_ALWAYS\t# _recursive_statement_locals\n");
  2442. else if(AARCH64 == Architecture) emit_out("POP_X1\t# _recursive_statement_locals\n");
  2443. else if(RISCV32 == Architecture) emit_out("rd_a1 rs1_sp lw\t# _recursive_statement_locals\nrd_sp rs1_sp !4 addi\n");
  2444. else if(RISCV64 == Architecture) emit_out("rd_a1 rs1_sp ld\t# _recursive_statement_locals\nrd_sp rs1_sp !8 addi\n");
  2445. }
  2446. }
  2447. function->locals = frame;
  2448. }
  2449. /*
  2450. * statement:
  2451. * { statement-list-opt }
  2452. * type-name identifier ;
  2453. * type-name identifier = expression;
  2454. * if ( expression ) statement
  2455. * if ( expression ) statement else statement
  2456. * do statement while ( expression ) ;
  2457. * while ( expression ) statement
  2458. * for ( expression ; expression ; expression ) statement
  2459. * asm ( "assembly" ... "assembly" ) ;
  2460. * goto label ;
  2461. * label:
  2462. * return ;
  2463. * break ;
  2464. * expr ;
  2465. */
  2466. struct type* lookup_type(char* s, struct type* start);
  2467. void statement(void)
  2468. {
  2469. require(NULL != global_token, "expected a C statement but received EOF\n");
  2470. /* Always an integer until told otherwise */
  2471. current_target = integer;
  2472. if(global_token->s[0] == '{')
  2473. {
  2474. recursive_statement();
  2475. }
  2476. else if(':' == global_token->s[0])
  2477. {
  2478. emit_out(global_token->s);
  2479. emit_out("\t#C goto label\n");
  2480. global_token = global_token->next;
  2481. }
  2482. else if((NULL != lookup_type(global_token->s, prim_types)) ||
  2483. match("enum", global_token->s) ||
  2484. match("struct", global_token->s) ||
  2485. match("const", global_token->s))
  2486. {
  2487. collect_local();
  2488. }
  2489. else if(match("if", global_token->s))
  2490. {
  2491. process_if();
  2492. }
  2493. else if(match("switch", global_token->s))
  2494. {
  2495. process_switch();
  2496. }
  2497. else if(match("do", global_token->s))
  2498. {
  2499. process_do();
  2500. }
  2501. else if(match("while", global_token->s))
  2502. {
  2503. process_while();
  2504. }
  2505. else if(match("for", global_token->s))
  2506. {
  2507. process_for();
  2508. }
  2509. else if(match("asm", global_token->s))
  2510. {
  2511. process_asm();
  2512. }
  2513. else if(match("goto", global_token->s))
  2514. {
  2515. global_token = global_token->next;
  2516. require(NULL != global_token, "naked goto is not supported\n");
  2517. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) emit_out("JUMP @");
  2518. else if(X86 == Architecture) emit_out("jmp %");
  2519. else if(AMD64 == Architecture) emit_out("jmp %");
  2520. else if(ARMV7L == Architecture) emit_out("^~");
  2521. else if(AARCH64 == Architecture) emit_out("LOAD_W16_AHEAD\nSKIP_32_DATA\n&");
  2522. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out("$");
  2523. emit_out(global_token->s);
  2524. if(ARMV7L == Architecture) emit_out(" JUMP_ALWAYS");
  2525. else if(AARCH64 == Architecture) emit_out("\nBR_X16");
  2526. else if((RISCV32 == Architecture) || (RISCV64 == Architecture)) emit_out(" jal");
  2527. emit_out("\n");
  2528. global_token = global_token->next;
  2529. require_match("ERROR in statement\nMissing ;\n", ";");
  2530. }
  2531. else if(match("return", global_token->s))
  2532. {
  2533. return_result();
  2534. }
  2535. else if(match("break", global_token->s))
  2536. {
  2537. process_break();
  2538. }
  2539. else if(match("continue", global_token->s))
  2540. {
  2541. process_continue();
  2542. }
  2543. else
  2544. {
  2545. expression();
  2546. require_match("ERROR in statement\nMISSING ;\n", ";");
  2547. }
  2548. }
  2549. /* Collect function arguments */
  2550. void collect_arguments(void)
  2551. {
  2552. global_token = global_token->next;
  2553. require(NULL != global_token, "Received EOF when attempting to collect arguments\n");
  2554. struct type* type_size;
  2555. struct token_list* a;
  2556. while(!match(")", global_token->s))
  2557. {
  2558. type_size = type_name();
  2559. require(NULL != global_token, "Received EOF when attempting to collect arguments\n");
  2560. require(NULL != type_size, "Must have non-null type\n");
  2561. if(global_token->s[0] == ')')
  2562. {
  2563. /* foo(int,char,void) doesn't need anything done */
  2564. continue;
  2565. }
  2566. else if(global_token->s[0] != ',')
  2567. {
  2568. /* deal with foo(int a, char b) */
  2569. require(!in_set(global_token->s[0], "[{(<=>)}]|&!^%;:'\""), "forbidden character in argument variable name\n");
  2570. require(!iskeywordp(global_token->s), "You are not allowed to use a keyword as a argument variable name\n");
  2571. a = sym_declare(global_token->s, type_size, function->arguments);
  2572. if(NULL == function->arguments)
  2573. {
  2574. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) a->depth = 0;
  2575. else if(X86 == Architecture) a->depth = -4;
  2576. else if(AMD64 == Architecture) a->depth = -8;
  2577. else if(ARMV7L == Architecture) a->depth = 4;
  2578. else if(AARCH64 == Architecture) a->depth = register_size;
  2579. else if(RISCV32 == Architecture) a->depth = -4;
  2580. else if(RISCV64 == Architecture) a->depth = -8;
  2581. }
  2582. else
  2583. {
  2584. if((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) a->depth = function->arguments->depth + register_size;
  2585. else if(X86 == Architecture) a->depth = function->arguments->depth - register_size;
  2586. else if(AMD64 == Architecture) a->depth = function->arguments->depth - register_size;
  2587. else if(ARMV7L == Architecture) a->depth = function->arguments->depth + register_size;
  2588. else if(AARCH64 == Architecture) a->depth = function->arguments->depth + register_size;
  2589. else if(RISCV32 == Architecture) a->depth = function->arguments->depth - register_size;
  2590. else if(RISCV64 == Architecture) a->depth = function->arguments->depth - register_size;
  2591. }
  2592. global_token = global_token->next;
  2593. require(NULL != global_token, "Incomplete argument list\n");
  2594. function->arguments = a;
  2595. }
  2596. /* ignore trailing comma (needed for foo(bar(), 1); expressions*/
  2597. if(global_token->s[0] == ',')
  2598. {
  2599. global_token = global_token->next;
  2600. require(NULL != global_token, "naked comma in collect arguments\n");
  2601. }
  2602. require(NULL != global_token, "Argument list never completed\n");
  2603. }
  2604. global_token = global_token->next;
  2605. }
  2606. void declare_function(void)
  2607. {
  2608. current_count = 0;
  2609. function = sym_declare(global_token->prev->s, NULL, global_function_list);
  2610. /* allow previously defined functions to be looked up */
  2611. global_function_list = function;
  2612. if((KNIGHT_NATIVE == Architecture) && match("main", function->s))
  2613. {
  2614. require_match("Impossible error ( vanished\n", "(");
  2615. require_match("Reality ERROR (USING KNIGHT-NATIVE)\nHardware does not support arguments\nthus neither can main on this architecture\ntry tape_01 and tape_02 instead\n", ")");
  2616. }
  2617. else collect_arguments();
  2618. require(NULL != global_token, "Function definitions either need to be prototypes or full\n");
  2619. /* If just a prototype don't waste time */
  2620. if(global_token->s[0] == ';') global_token = global_token->next;
  2621. else
  2622. {
  2623. emit_out("# Defining function ");
  2624. emit_out(function->s);
  2625. emit_out("\n");
  2626. emit_out(":FUNCTION_");
  2627. emit_out(function->s);
  2628. emit_out("\n");
  2629. statement();
  2630. /* Prevent duplicate RETURNS */
  2631. if(((KNIGHT_POSIX == Architecture) || (KNIGHT_NATIVE == Architecture)) && !match("RET R15\n", output_list->s)) emit_out("RET R15\n");
  2632. else if((X86 == Architecture) && !match("ret\n", output_list->s)) emit_out("ret\n");
  2633. else if((AMD64 == Architecture) && !match("ret\n", output_list->s)) emit_out("ret\n");
  2634. else if((ARMV7L == Architecture) && !match("'1' LR RETURN\n", output_list->s)) emit_out("'1' LR RETURN\n");
  2635. else if((AARCH64 == Architecture) && !match("RETURN\n", output_list->s)) emit_out("RETURN\n");
  2636. else if((RISCV32 == Architecture) && !match("ret\n", output_list->s)) emit_out("ret\n");
  2637. else if((RISCV64 == Architecture) && !match("ret\n", output_list->s)) emit_out("ret\n");
  2638. }
  2639. }
  2640. void global_constant(void)
  2641. {
  2642. global_token = global_token->next;
  2643. require(NULL != global_token, "CONSTANT lacks a name\n");
  2644. global_constant_list = sym_declare(global_token->s, NULL, global_constant_list);
  2645. require(NULL != global_token->next, "CONSTANT lacks a value\n");
  2646. if(match("sizeof", global_token->next->s))
  2647. {
  2648. global_token = global_token->next->next;
  2649. require_match("ERROR in CONSTANT with sizeof\nMissing (\n", "(");
  2650. struct type* a = type_name();
  2651. require_match("ERROR in CONSTANT with sizeof\nMissing )\n", ")");
  2652. global_token->prev->s = int2str(a->size, 10, TRUE);
  2653. global_constant_list->arguments = global_token->prev;
  2654. }
  2655. else
  2656. {
  2657. global_constant_list->arguments = global_token->next;
  2658. global_token = global_token->next->next;
  2659. }
  2660. }
  2661. struct type* global_typedef(void)
  2662. {
  2663. struct type* type_size;
  2664. /* typedef $TYPE $NAME; */
  2665. global_token = global_token->next;
  2666. type_size = type_name();
  2667. require(NULL != global_token, "Received EOF while reading typedef\n");
  2668. type_size = mirror_type(type_size, global_token->s);
  2669. add_primitive(type_size);
  2670. global_token = global_token->next;
  2671. require_match("ERROR in typedef statement\nMissing ;\n", ";");
  2672. return type_size;
  2673. }
  2674. void global_static_array(struct type* type_size, struct token_list* name)
  2675. {
  2676. int size;
  2677. maybe_bootstrap_error("global array definitions");
  2678. globals_list = emit(":GLOBAL_", globals_list);
  2679. globals_list = emit(name->s, globals_list);
  2680. globals_list = emit("\n&GLOBAL_STORAGE_", globals_list);
  2681. globals_list = emit(name->s, globals_list);
  2682. if (AARCH64 == Architecture || AMD64 == Architecture || RISCV64 == Architecture)
  2683. {
  2684. globals_list = emit(" %0", globals_list);
  2685. }
  2686. globals_list = emit("\n:GLOBAL_STORAGE_", globals_list);
  2687. globals_list = emit(name->s, globals_list);
  2688. require(NULL != global_token->next, "Unterminated global\n");
  2689. global_token = global_token->next;
  2690. /* Make sure not negative */
  2691. if(match("-", global_token->s))
  2692. {
  2693. line_error();
  2694. fputs("Negative values are not supported for allocated arrays\n", stderr);
  2695. exit(EXIT_FAILURE);
  2696. }
  2697. /* length */
  2698. size = strtoint(global_token->s) * type_size->size;
  2699. /* Stop bad states */
  2700. if((size < 0) || (size > 0x100000))
  2701. {
  2702. line_error();
  2703. fputs("M2-Planet is very inefficient so you probably don't want to allocate over 1MB into your binary for NULLs\n", stderr);
  2704. exit(EXIT_FAILURE);
  2705. }
  2706. /* Ensure properly closed */
  2707. global_token = global_token->next;
  2708. require_match("missing close bracket\n", "]");
  2709. require_match("missing ;\n", ";");
  2710. globals_list = emit("\n'", globals_list);
  2711. while (0 != size)
  2712. {
  2713. globals_list = emit(" 00", globals_list);
  2714. size = size - 1;
  2715. }
  2716. globals_list = emit("'\n", globals_list);
  2717. }
  2718. void global_assignment(void)
  2719. {
  2720. /* Store the global's value*/
  2721. globals_list = emit(":GLOBAL_", globals_list);
  2722. globals_list = emit(global_token->prev->s, globals_list);
  2723. globals_list = emit("\n", globals_list);
  2724. global_token = global_token->next;
  2725. require(NULL != global_token, "Global locals value in assignment\n");
  2726. unsigned padding_zeroes;
  2727. if(in_set(global_token->s[0], "0123456789"))
  2728. { /* Assume Int */
  2729. globals_list = emit("%", globals_list);
  2730. globals_list = emit(global_token->s, globals_list);
  2731. /* broken for big endian architectures */
  2732. padding_zeroes = (register_size / 4) - 1;
  2733. while(padding_zeroes > 0)
  2734. {
  2735. /* Assume positive Int */
  2736. globals_list = emit(" %0", globals_list);
  2737. padding_zeroes = padding_zeroes - 1;
  2738. }
  2739. globals_list = emit("\n", globals_list);
  2740. }
  2741. else if(('"' == global_token->s[0]))
  2742. { /* Assume a string*/
  2743. globals_list = emit("&GLOBAL_", globals_list);
  2744. globals_list = emit(global_token->prev->prev->s, globals_list);
  2745. globals_list = emit("_contents\n", globals_list);
  2746. globals_list = emit(":GLOBAL_", globals_list);
  2747. globals_list = emit(global_token->prev->prev->s, globals_list);
  2748. globals_list = emit("_contents\n", globals_list);
  2749. globals_list = emit(parse_string(global_token->s), globals_list);
  2750. }
  2751. else
  2752. {
  2753. line_error();
  2754. fputs("Received ", stderr);
  2755. fputs(global_token->s, stderr);
  2756. fputs(" in program\n", stderr);
  2757. exit(EXIT_FAILURE);
  2758. }
  2759. global_token = global_token->next;
  2760. require_match("ERROR in Program\nMissing ;\n", ";");
  2761. }
  2762. /*
  2763. * program:
  2764. * declaration
  2765. * declaration program
  2766. *
  2767. * declaration:
  2768. * CONSTANT identifer value
  2769. * typedef identifer type;
  2770. * type-name identifier ;
  2771. * type-name identifier = value ;
  2772. * type-name identifier [ value ];
  2773. * type-name identifier ( parameter-list ) ;
  2774. * type-name identifier ( parameter-list ) statement
  2775. *
  2776. * parameter-list:
  2777. * parameter-declaration
  2778. * parameter-list, parameter-declaration
  2779. *
  2780. * parameter-declaration:
  2781. * type-name identifier-opt
  2782. */
  2783. void program(void)
  2784. {
  2785. unsigned i;
  2786. function = NULL;
  2787. Address_of = FALSE;
  2788. struct type* type_size;
  2789. new_type:
  2790. /* Deal with garbage input */
  2791. if (NULL == global_token) return;
  2792. require('#' != global_token->s[0], "unhandled macro directive\n");
  2793. require(!match("\n", global_token->s), "unexpected newline token\n");
  2794. /* Handle cc_* CONSTANT statements */
  2795. if(match("CONSTANT", global_token->s))
  2796. {
  2797. global_constant();
  2798. goto new_type;
  2799. }
  2800. /* Handle c typedef statements */
  2801. if(match("typedef", global_token->s))
  2802. {
  2803. type_size = global_typedef();
  2804. goto new_type;
  2805. }
  2806. type_size = type_name();
  2807. /* Deal with case of struct definitions */
  2808. if(NULL == type_size) goto new_type;
  2809. require(NULL != global_token->next, "Unterminated global\n");
  2810. /* Add to global symbol table */
  2811. global_symbol_list = sym_declare(global_token->s, type_size, global_symbol_list);
  2812. global_token = global_token->next;
  2813. /* Deal with global variables */
  2814. if(match(";", global_token->s))
  2815. {
  2816. /* Ensure enough bytes are allocated to store global variable.
  2817. In some cases it allocates too much but that is harmless. */
  2818. globals_list = emit(":GLOBAL_", globals_list);
  2819. globals_list = emit(global_token->prev->s, globals_list);
  2820. /* round up division */
  2821. i = ceil_div(type_size->size, register_size);
  2822. globals_list = emit("\n", globals_list);
  2823. while(i != 0)
  2824. {
  2825. globals_list = emit("NULL\n", globals_list);
  2826. i = i - 1;
  2827. }
  2828. global_token = global_token->next;
  2829. goto new_type;
  2830. }
  2831. /* Deal with global functions */
  2832. if(match("(", global_token->s))
  2833. {
  2834. declare_function();
  2835. goto new_type;
  2836. }
  2837. /* Deal with assignment to a global variable */
  2838. if(match("=", global_token->s))
  2839. {
  2840. global_assignment();
  2841. goto new_type;
  2842. }
  2843. /* Deal with global static arrays */
  2844. if(match("[", global_token->s))
  2845. {
  2846. global_static_array(type_size, global_token->prev);
  2847. goto new_type;
  2848. }
  2849. /* Everything else is just an error */
  2850. line_error();
  2851. fputs("Received ", stderr);
  2852. fputs(global_token->s, stderr);
  2853. fputs(" in program\n", stderr);
  2854. exit(EXIT_FAILURE);
  2855. }
  2856. void recursive_output(struct token_list* head, FILE* out)
  2857. {
  2858. struct token_list* i = reverse_list(head);
  2859. while(NULL != i)
  2860. {
  2861. fputs(i->s, out);
  2862. i = i->next;
  2863. }
  2864. }
  2865. void output_tokens(struct token_list *i, FILE* out)
  2866. {
  2867. while(NULL != i)
  2868. {
  2869. fputs(i->s, out);
  2870. fputs(" ", out);
  2871. i = i->next;
  2872. }
  2873. }