i386-asm.c 50 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715
  1. /*
  2. * i386 specific functions for TCC assembler
  3. *
  4. * Copyright (c) 2001, 2002 Fabrice Bellard
  5. * Copyright (c) 2009 Frédéric Feret (x86_64 support)
  6. *
  7. * This library is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2 of the License, or (at your option) any later version.
  11. *
  12. * This library is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with this library; if not, write to the Free Software
  19. * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
  20. */
  21. #include "tcc.h"
  22. #define MAX_OPERANDS 3
  23. #define TOK_ASM_first TOK_ASM_clc
  24. #define TOK_ASM_last TOK_ASM_emms
  25. #define TOK_ASM_alllast TOK_ASM_subps
  26. #define OPC_B 0x01 /* only used with OPC_WL */
  27. #define OPC_WL 0x02 /* accepts w, l or no suffix */
  28. #define OPC_BWL (OPC_B | OPC_WL) /* accepts b, w, l or no suffix */
  29. #define OPC_REG 0x04 /* register is added to opcode */
  30. #define OPC_MODRM 0x08 /* modrm encoding */
  31. #define OPCT_MASK 0x70
  32. #define OPC_FWAIT 0x10 /* add fwait opcode */
  33. #define OPC_SHIFT 0x20 /* shift opcodes */
  34. #define OPC_ARITH 0x30 /* arithmetic opcodes */
  35. #define OPC_FARITH 0x40 /* FPU arithmetic opcodes */
  36. #define OPC_TEST 0x50 /* test opcodes */
  37. #define OPCT_IS(v,i) (((v) & OPCT_MASK) == (i))
  38. #define OPC_0F 0x100 /* Is secondary map (0x0f prefix) */
  39. #define OPC_48 0x200 /* Always has REX prefix */
  40. #ifdef TCC_TARGET_X86_64
  41. # define OPC_WLQ 0x1000 /* accepts w, l, q or no suffix */
  42. # define OPC_BWLQ (OPC_B | OPC_WLQ) /* accepts b, w, l, q or no suffix */
  43. # define OPC_WLX OPC_WLQ
  44. # define OPC_BWLX OPC_BWLQ
  45. #else
  46. # define OPC_WLX OPC_WL
  47. # define OPC_BWLX OPC_BWL
  48. #endif
  49. #define OPC_GROUP_SHIFT 13
  50. /* in order to compress the operand type, we use specific operands and
  51. we or only with EA */
  52. enum {
  53. OPT_REG8=0, /* warning: value is hardcoded from TOK_ASM_xxx */
  54. OPT_REG16, /* warning: value is hardcoded from TOK_ASM_xxx */
  55. OPT_REG32, /* warning: value is hardcoded from TOK_ASM_xxx */
  56. #ifdef TCC_TARGET_X86_64
  57. OPT_REG64, /* warning: value is hardcoded from TOK_ASM_xxx */
  58. #endif
  59. OPT_MMX, /* warning: value is hardcoded from TOK_ASM_xxx */
  60. OPT_SSE, /* warning: value is hardcoded from TOK_ASM_xxx */
  61. OPT_CR, /* warning: value is hardcoded from TOK_ASM_xxx */
  62. OPT_TR, /* warning: value is hardcoded from TOK_ASM_xxx */
  63. OPT_DB, /* warning: value is hardcoded from TOK_ASM_xxx */
  64. OPT_SEG,
  65. OPT_ST,
  66. #ifdef TCC_TARGET_X86_64
  67. OPT_REG8_LOW, /* %spl,%bpl,%sil,%dil, encoded like ah,ch,dh,bh, but
  68. with REX prefix, not used in insn templates */
  69. #endif
  70. OPT_IM8,
  71. OPT_IM8S,
  72. OPT_IM16,
  73. OPT_IM32,
  74. #ifdef TCC_TARGET_X86_64
  75. OPT_IM64,
  76. #endif
  77. OPT_EAX, /* %al, %ax, %eax or %rax register */
  78. OPT_ST0, /* %st(0) register */
  79. OPT_CL, /* %cl register */
  80. OPT_DX, /* %dx register */
  81. OPT_ADDR, /* OP_EA with only offset */
  82. OPT_INDIR, /* *(expr) */
  83. /* composite types */
  84. OPT_COMPOSITE_FIRST,
  85. OPT_IM, /* IM8 | IM16 | IM32 */
  86. OPT_REG, /* REG8 | REG16 | REG32 | REG64 */
  87. OPT_REGW, /* REG16 | REG32 | REG64 */
  88. OPT_IMW, /* IM16 | IM32 */
  89. OPT_MMXSSE, /* MMX | SSE */
  90. OPT_DISP, /* Like OPT_ADDR, but emitted as displacement (for jumps) */
  91. OPT_DISP8, /* Like OPT_ADDR, but only 8bit (short jumps) */
  92. /* can be ored with any OPT_xxx */
  93. OPT_EA = 0x80
  94. };
  95. #define OP_REG8 (1 << OPT_REG8)
  96. #define OP_REG16 (1 << OPT_REG16)
  97. #define OP_REG32 (1 << OPT_REG32)
  98. #define OP_MMX (1 << OPT_MMX)
  99. #define OP_SSE (1 << OPT_SSE)
  100. #define OP_CR (1 << OPT_CR)
  101. #define OP_TR (1 << OPT_TR)
  102. #define OP_DB (1 << OPT_DB)
  103. #define OP_SEG (1 << OPT_SEG)
  104. #define OP_ST (1 << OPT_ST)
  105. #define OP_IM8 (1 << OPT_IM8)
  106. #define OP_IM8S (1 << OPT_IM8S)
  107. #define OP_IM16 (1 << OPT_IM16)
  108. #define OP_IM32 (1 << OPT_IM32)
  109. #define OP_EAX (1 << OPT_EAX)
  110. #define OP_ST0 (1 << OPT_ST0)
  111. #define OP_CL (1 << OPT_CL)
  112. #define OP_DX (1 << OPT_DX)
  113. #define OP_ADDR (1 << OPT_ADDR)
  114. #define OP_INDIR (1 << OPT_INDIR)
  115. #ifdef TCC_TARGET_X86_64
  116. # define OP_REG64 (1 << OPT_REG64)
  117. # define OP_REG8_LOW (1 << OPT_REG8_LOW)
  118. # define OP_IM64 (1 << OPT_IM64)
  119. # define OP_EA32 (OP_EA << 1)
  120. #else
  121. # define OP_REG64 0
  122. # define OP_REG8_LOW 0
  123. # define OP_IM64 0
  124. # define OP_EA32 0
  125. #endif
  126. #define OP_EA 0x40000000
  127. #define OP_REG (OP_REG8 | OP_REG16 | OP_REG32 | OP_REG64)
  128. #ifdef TCC_TARGET_X86_64
  129. # define TREG_XAX TREG_RAX
  130. # define TREG_XCX TREG_RCX
  131. # define TREG_XDX TREG_RDX
  132. #else
  133. # define TREG_XAX TREG_EAX
  134. # define TREG_XCX TREG_ECX
  135. # define TREG_XDX TREG_EDX
  136. #endif
  137. typedef struct ASMInstr {
  138. uint16_t sym;
  139. uint16_t opcode;
  140. uint16_t instr_type;
  141. uint8_t nb_ops;
  142. uint8_t op_type[MAX_OPERANDS]; /* see OP_xxx */
  143. } ASMInstr;
  144. typedef struct Operand {
  145. uint32_t type;
  146. int8_t reg; /* register, -1 if none */
  147. int8_t reg2; /* second register, -1 if none */
  148. uint8_t shift;
  149. ExprValue e;
  150. } Operand;
  151. static const uint8_t reg_to_size[9] = {
  152. /*
  153. [OP_REG8] = 0,
  154. [OP_REG16] = 1,
  155. [OP_REG32] = 2,
  156. #ifdef TCC_TARGET_X86_64
  157. [OP_REG64] = 3,
  158. #endif
  159. */
  160. 0, 0, 1, 0, 2, 0, 0, 0, 3
  161. };
  162. #define NB_TEST_OPCODES 30
  163. static const uint8_t test_bits[NB_TEST_OPCODES] = {
  164. 0x00, /* o */
  165. 0x01, /* no */
  166. 0x02, /* b */
  167. 0x02, /* c */
  168. 0x02, /* nae */
  169. 0x03, /* nb */
  170. 0x03, /* nc */
  171. 0x03, /* ae */
  172. 0x04, /* e */
  173. 0x04, /* z */
  174. 0x05, /* ne */
  175. 0x05, /* nz */
  176. 0x06, /* be */
  177. 0x06, /* na */
  178. 0x07, /* nbe */
  179. 0x07, /* a */
  180. 0x08, /* s */
  181. 0x09, /* ns */
  182. 0x0a, /* p */
  183. 0x0a, /* pe */
  184. 0x0b, /* np */
  185. 0x0b, /* po */
  186. 0x0c, /* l */
  187. 0x0c, /* nge */
  188. 0x0d, /* nl */
  189. 0x0d, /* ge */
  190. 0x0e, /* le */
  191. 0x0e, /* ng */
  192. 0x0f, /* nle */
  193. 0x0f, /* g */
  194. };
  195. static const uint8_t segment_prefixes[] = {
  196. 0x26, /* es */
  197. 0x2e, /* cs */
  198. 0x36, /* ss */
  199. 0x3e, /* ds */
  200. 0x64, /* fs */
  201. 0x65 /* gs */
  202. };
  203. static const ASMInstr asm_instrs[] = {
  204. #define ALT(x) x
  205. /* This removes a 0x0f in the second byte */
  206. #define O(o) ((uint64_t) ((((o) & 0xff00) == 0x0f00) ? ((((o) >> 8) & ~0xff) | ((o) & 0xff)) : (o)))
  207. /* This constructs instr_type from opcode, type and group. */
  208. #define T(o,i,g) ((i) | ((g) << OPC_GROUP_SHIFT) | ((((o) & 0xff00) == 0x0f00) ? OPC_0F : 0))
  209. #define DEF_ASM_OP0(name, opcode)
  210. #define DEF_ASM_OP0L(name, opcode, group, instr_type) { TOK_ASM_ ## name, O(opcode), T(opcode, instr_type, group), 0, { 0 } },
  211. #define DEF_ASM_OP1(name, opcode, group, instr_type, op0) { TOK_ASM_ ## name, O(opcode), T(opcode, instr_type, group), 1, { op0 }},
  212. #define DEF_ASM_OP2(name, opcode, group, instr_type, op0, op1) { TOK_ASM_ ## name, O(opcode), T(opcode, instr_type, group), 2, { op0, op1 }},
  213. #define DEF_ASM_OP3(name, opcode, group, instr_type, op0, op1, op2) { TOK_ASM_ ## name, O(opcode), T(opcode, instr_type, group), 3, { op0, op1, op2 }},
  214. #ifdef TCC_TARGET_X86_64
  215. # include "x86_64-asm.h"
  216. #else
  217. # include "i386-asm.h"
  218. #endif
  219. /* last operation */
  220. { 0, },
  221. };
  222. static const uint16_t op0_codes[] = {
  223. #define ALT(x)
  224. #define DEF_ASM_OP0(x, opcode) opcode,
  225. #define DEF_ASM_OP0L(name, opcode, group, instr_type)
  226. #define DEF_ASM_OP1(name, opcode, group, instr_type, op0)
  227. #define DEF_ASM_OP2(name, opcode, group, instr_type, op0, op1)
  228. #define DEF_ASM_OP3(name, opcode, group, instr_type, op0, op1, op2)
  229. #ifdef TCC_TARGET_X86_64
  230. # include "x86_64-asm.h"
  231. #else
  232. # include "i386-asm.h"
  233. #endif
  234. };
  235. static inline int get_reg_shift(TCCState *s1)
  236. {
  237. int shift, v;
  238. v = asm_int_expr(s1);
  239. switch(v) {
  240. case 1:
  241. shift = 0;
  242. break;
  243. case 2:
  244. shift = 1;
  245. break;
  246. case 4:
  247. shift = 2;
  248. break;
  249. case 8:
  250. shift = 3;
  251. break;
  252. default:
  253. expect("1, 2, 4 or 8 constant");
  254. shift = 0;
  255. break;
  256. }
  257. return shift;
  258. }
  259. #ifdef TCC_TARGET_X86_64
  260. static int asm_parse_numeric_reg(int t, unsigned int *type)
  261. {
  262. int reg = -1;
  263. if (t >= TOK_IDENT && t < tok_ident) {
  264. const char *s = table_ident[t - TOK_IDENT]->str;
  265. char c;
  266. *type = OP_REG64;
  267. if (*s == 'c') {
  268. s++;
  269. *type = OP_CR;
  270. }
  271. if (*s++ != 'r')
  272. return -1;
  273. /* Don't allow leading '0'. */
  274. if ((c = *s++) >= '1' && c <= '9')
  275. reg = c - '0';
  276. else
  277. return -1;
  278. if ((c = *s) >= '0' && c <= '5')
  279. s++, reg = reg * 10 + c - '0';
  280. if (reg > 15)
  281. return -1;
  282. if ((c = *s) == 0)
  283. ;
  284. else if (*type != OP_REG64)
  285. return -1;
  286. else if (c == 'b' && !s[1])
  287. *type = OP_REG8;
  288. else if (c == 'w' && !s[1])
  289. *type = OP_REG16;
  290. else if (c == 'd' && !s[1])
  291. *type = OP_REG32;
  292. else
  293. return -1;
  294. }
  295. return reg;
  296. }
  297. #endif
  298. static int asm_parse_reg(unsigned int *type)
  299. {
  300. int reg = 0;
  301. *type = 0;
  302. if (tok != '%')
  303. goto error_32;
  304. next();
  305. if (tok >= TOK_ASM_eax && tok <= TOK_ASM_edi) {
  306. reg = tok - TOK_ASM_eax;
  307. *type = OP_REG32;
  308. #ifdef TCC_TARGET_X86_64
  309. } else if (tok >= TOK_ASM_rax && tok <= TOK_ASM_rdi) {
  310. reg = tok - TOK_ASM_rax;
  311. *type = OP_REG64;
  312. } else if (tok == TOK_ASM_rip) {
  313. reg = -2; /* Probably should use different escape code. */
  314. *type = OP_REG64;
  315. } else if ((reg = asm_parse_numeric_reg(tok, type)) >= 0
  316. && (*type == OP_REG32 || *type == OP_REG64)) {
  317. ;
  318. #endif
  319. } else {
  320. error_32:
  321. expect("register");
  322. }
  323. next();
  324. return reg;
  325. }
  326. static void parse_operand(TCCState *s1, Operand *op)
  327. {
  328. ExprValue e;
  329. int reg, indir;
  330. const char *p;
  331. indir = 0;
  332. if (tok == '*') {
  333. next();
  334. indir = OP_INDIR;
  335. }
  336. if (tok == '%') {
  337. next();
  338. if (tok >= TOK_ASM_al && tok <= TOK_ASM_db7) {
  339. reg = tok - TOK_ASM_al;
  340. op->type = 1 << (reg >> 3); /* WARNING: do not change constant order */
  341. op->reg = reg & 7;
  342. if ((op->type & OP_REG) && op->reg == TREG_XAX)
  343. op->type |= OP_EAX;
  344. else if (op->type == OP_REG8 && op->reg == TREG_XCX)
  345. op->type |= OP_CL;
  346. else if (op->type == OP_REG16 && op->reg == TREG_XDX)
  347. op->type |= OP_DX;
  348. } else if (tok >= TOK_ASM_dr0 && tok <= TOK_ASM_dr7) {
  349. op->type = OP_DB;
  350. op->reg = tok - TOK_ASM_dr0;
  351. } else if (tok >= TOK_ASM_es && tok <= TOK_ASM_gs) {
  352. op->type = OP_SEG;
  353. op->reg = tok - TOK_ASM_es;
  354. } else if (tok == TOK_ASM_st) {
  355. op->type = OP_ST;
  356. op->reg = 0;
  357. next();
  358. if (tok == '(') {
  359. next();
  360. if (tok != TOK_PPNUM)
  361. goto reg_error;
  362. p = tokc.str.data;
  363. reg = p[0] - '0';
  364. if ((unsigned)reg >= 8 || p[1] != '\0')
  365. goto reg_error;
  366. op->reg = reg;
  367. next();
  368. skip(')');
  369. }
  370. if (op->reg == 0)
  371. op->type |= OP_ST0;
  372. goto no_skip;
  373. #ifdef TCC_TARGET_X86_64
  374. } else if (tok >= TOK_ASM_spl && tok <= TOK_ASM_dil) {
  375. op->type = OP_REG8 | OP_REG8_LOW;
  376. op->reg = 4 + tok - TOK_ASM_spl;
  377. } else if ((op->reg = asm_parse_numeric_reg(tok, &op->type)) >= 0) {
  378. ;
  379. #endif
  380. } else {
  381. reg_error:
  382. tcc_error("unknown register %%%s", get_tok_str(tok, &tokc));
  383. }
  384. next();
  385. no_skip: ;
  386. } else if (tok == '$') {
  387. /* constant value */
  388. next();
  389. asm_expr(s1, &e);
  390. op->type = OP_IM32;
  391. op->e = e;
  392. if (!op->e.sym) {
  393. if (op->e.v == (uint8_t)op->e.v)
  394. op->type |= OP_IM8;
  395. if (op->e.v == (int8_t)op->e.v)
  396. op->type |= OP_IM8S;
  397. if (op->e.v == (uint16_t)op->e.v)
  398. op->type |= OP_IM16;
  399. #ifdef TCC_TARGET_X86_64
  400. if (op->e.v != (int32_t)op->e.v && op->e.v != (uint32_t)op->e.v)
  401. op->type = OP_IM64;
  402. #endif
  403. }
  404. } else {
  405. /* address(reg,reg2,shift) with all variants */
  406. op->type = OP_EA;
  407. op->reg = -1;
  408. op->reg2 = -1;
  409. op->shift = 0;
  410. if (tok != '(') {
  411. asm_expr(s1, &e);
  412. op->e = e;
  413. } else {
  414. next();
  415. if (tok == '%') {
  416. unget_tok('(');
  417. op->e.v = 0;
  418. op->e.sym = NULL;
  419. } else {
  420. /* bracketed offset expression */
  421. asm_expr(s1, &e);
  422. if (tok != ')')
  423. expect(")");
  424. next();
  425. op->e.v = e.v;
  426. op->e.sym = e.sym;
  427. }
  428. op->e.pcrel = 0;
  429. }
  430. if (tok == '(') {
  431. unsigned int type = 0;
  432. next();
  433. if (tok != ',') {
  434. op->reg = asm_parse_reg(&type);
  435. }
  436. if (tok == ',') {
  437. next();
  438. if (tok != ',') {
  439. op->reg2 = asm_parse_reg(&type);
  440. }
  441. if (tok == ',') {
  442. next();
  443. op->shift = get_reg_shift(s1);
  444. }
  445. }
  446. if (type & OP_REG32)
  447. op->type |= OP_EA32;
  448. skip(')');
  449. }
  450. if (op->reg == -1 && op->reg2 == -1)
  451. op->type |= OP_ADDR;
  452. }
  453. op->type |= indir;
  454. }
  455. /* XXX: unify with C code output ? */
  456. ST_FUNC void gen_expr32(ExprValue *pe)
  457. {
  458. if (pe->pcrel)
  459. /* If PC-relative, always set VT_SYM, even without symbol,
  460. so as to force a relocation to be emitted. */
  461. gen_addrpc32(VT_SYM, pe->sym, pe->v);
  462. else
  463. gen_addr32(pe->sym ? VT_SYM : 0, pe->sym, pe->v);
  464. }
  465. #ifdef TCC_TARGET_X86_64
  466. ST_FUNC void gen_expr64(ExprValue *pe)
  467. {
  468. gen_addr64(pe->sym ? VT_SYM : 0, pe->sym, pe->v);
  469. }
  470. #endif
  471. /* XXX: unify with C code output ? */
  472. static void gen_disp32(ExprValue *pe)
  473. {
  474. Sym *sym = pe->sym;
  475. if (sym && sym->r == cur_text_section->sh_num) {
  476. /* same section: we can output an absolute value. Note
  477. that the TCC compiler behaves differently here because
  478. it always outputs a relocation to ease (future) code
  479. elimination in the linker */
  480. gen_le32(pe->v + sym->jnext - ind - 4);
  481. } else {
  482. if (sym && sym->type.t == VT_VOID) {
  483. sym->type.t = VT_FUNC;
  484. sym->type.ref = NULL;
  485. }
  486. gen_addrpc32(VT_SYM, sym, pe->v);
  487. }
  488. }
  489. /* generate the modrm operand */
  490. static inline int asm_modrm(int reg, Operand *op)
  491. {
  492. int mod, reg1, reg2, sib_reg1;
  493. if (op->type & (OP_REG | OP_MMX | OP_SSE)) {
  494. g(0xc0 + (reg << 3) + op->reg);
  495. } else if (op->reg == -1 && op->reg2 == -1) {
  496. /* displacement only */
  497. #ifdef TCC_TARGET_X86_64
  498. g(0x04 + (reg << 3));
  499. g(0x25);
  500. #else
  501. g(0x05 + (reg << 3));
  502. #endif
  503. gen_expr32(&op->e);
  504. #ifdef TCC_TARGET_X86_64
  505. } else if (op->reg == -2) {
  506. ExprValue *pe = &op->e;
  507. g(0x05 + (reg << 3));
  508. gen_addrpc32(pe->sym ? VT_SYM : 0, pe->sym, pe->v);
  509. return ind;
  510. #endif
  511. } else {
  512. sib_reg1 = op->reg;
  513. /* fist compute displacement encoding */
  514. if (sib_reg1 == -1) {
  515. sib_reg1 = 5;
  516. mod = 0x00;
  517. } else if (op->e.v == 0 && !op->e.sym && op->reg != 5) {
  518. mod = 0x00;
  519. } else if (op->e.v == (int8_t)op->e.v && !op->e.sym) {
  520. mod = 0x40;
  521. } else {
  522. mod = 0x80;
  523. }
  524. /* compute if sib byte needed */
  525. reg1 = op->reg;
  526. if (op->reg2 != -1)
  527. reg1 = 4;
  528. g(mod + (reg << 3) + reg1);
  529. if (reg1 == 4) {
  530. /* add sib byte */
  531. reg2 = op->reg2;
  532. if (reg2 == -1)
  533. reg2 = 4; /* indicate no index */
  534. g((op->shift << 6) + (reg2 << 3) + sib_reg1);
  535. }
  536. /* add offset */
  537. if (mod == 0x40) {
  538. g(op->e.v);
  539. } else if (mod == 0x80 || op->reg == -1) {
  540. gen_expr32(&op->e);
  541. }
  542. }
  543. return 0;
  544. }
  545. #ifdef TCC_TARGET_X86_64
  546. #define REX_W 0x48
  547. #define REX_R 0x44
  548. #define REX_X 0x42
  549. #define REX_B 0x41
  550. static void asm_rex(int width64, Operand *ops, int nb_ops, int *op_type,
  551. int regi, int rmi)
  552. {
  553. unsigned char rex = width64 ? 0x48 : 0;
  554. int saw_high_8bit = 0;
  555. int i;
  556. if (rmi == -1) {
  557. /* No mod/rm byte, but we might have a register op nevertheless
  558. (we will add it to the opcode later). */
  559. for(i = 0; i < nb_ops; i++) {
  560. if (op_type[i] & (OP_REG | OP_ST)) {
  561. if (ops[i].reg >= 8) {
  562. rex |= REX_B;
  563. ops[i].reg -= 8;
  564. } else if (ops[i].type & OP_REG8_LOW)
  565. rex |= 0x40;
  566. else if (ops[i].type & OP_REG8 && ops[i].reg >= 4)
  567. /* An 8 bit reg >= 4 without REG8 is ah/ch/dh/bh */
  568. saw_high_8bit = ops[i].reg;
  569. break;
  570. }
  571. }
  572. } else {
  573. if (regi != -1) {
  574. if (ops[regi].reg >= 8) {
  575. rex |= REX_R;
  576. ops[regi].reg -= 8;
  577. } else if (ops[regi].type & OP_REG8_LOW)
  578. rex |= 0x40;
  579. else if (ops[regi].type & OP_REG8 && ops[regi].reg >= 4)
  580. /* An 8 bit reg >= 4 without REG8 is ah/ch/dh/bh */
  581. saw_high_8bit = ops[regi].reg;
  582. }
  583. if (ops[rmi].type & (OP_REG | OP_MMX | OP_SSE | OP_CR | OP_EA)) {
  584. if (ops[rmi].reg >= 8) {
  585. rex |= REX_B;
  586. ops[rmi].reg -= 8;
  587. } else if (ops[rmi].type & OP_REG8_LOW)
  588. rex |= 0x40;
  589. else if (ops[rmi].type & OP_REG8 && ops[rmi].reg >= 4)
  590. /* An 8 bit reg >= 4 without REG8 is ah/ch/dh/bh */
  591. saw_high_8bit = ops[rmi].reg;
  592. }
  593. if (ops[rmi].type & OP_EA && ops[rmi].reg2 >= 8) {
  594. rex |= REX_X;
  595. ops[rmi].reg2 -= 8;
  596. }
  597. }
  598. if (rex) {
  599. if (saw_high_8bit)
  600. tcc_error("can't encode register %%%ch when REX prefix is required",
  601. "acdb"[saw_high_8bit-4]);
  602. g(rex);
  603. }
  604. }
  605. #endif
  606. static void maybe_print_stats (void)
  607. {
  608. static int already = 1;
  609. if (!already)
  610. /* print stats about opcodes */
  611. {
  612. const struct ASMInstr *pa;
  613. int freq[4];
  614. int op_vals[500];
  615. int nb_op_vals, i, j;
  616. already = 1;
  617. nb_op_vals = 0;
  618. memset(freq, 0, sizeof(freq));
  619. for(pa = asm_instrs; pa->sym != 0; pa++) {
  620. freq[pa->nb_ops]++;
  621. //for(i=0;i<pa->nb_ops;i++) {
  622. for(j=0;j<nb_op_vals;j++) {
  623. //if (pa->op_type[i] == op_vals[j])
  624. if (pa->instr_type == op_vals[j])
  625. goto found;
  626. }
  627. //op_vals[nb_op_vals++] = pa->op_type[i];
  628. op_vals[nb_op_vals++] = pa->instr_type;
  629. found: ;
  630. //}
  631. }
  632. for(i=0;i<nb_op_vals;i++) {
  633. int v = op_vals[i];
  634. //if ((v & (v - 1)) != 0)
  635. printf("%3d: %08x\n", i, v);
  636. }
  637. printf("size=%d nb=%d f0=%d f1=%d f2=%d f3=%d\n",
  638. (int)sizeof(asm_instrs),
  639. (int)sizeof(asm_instrs) / (int)sizeof(ASMInstr),
  640. freq[0], freq[1], freq[2], freq[3]);
  641. }
  642. }
  643. ST_FUNC void asm_opcode(TCCState *s1, int opcode)
  644. {
  645. const ASMInstr *pa;
  646. int i, modrm_index, modreg_index, reg, v, op1, seg_prefix, pc;
  647. int nb_ops, s;
  648. Operand ops[MAX_OPERANDS], *pop;
  649. int op_type[3]; /* decoded op type */
  650. int alltypes; /* OR of all operand types */
  651. int autosize;
  652. int p66;
  653. #ifdef TCC_TARGET_X86_64
  654. int rex64;
  655. #endif
  656. maybe_print_stats();
  657. /* force synthetic ';' after prefix instruction, so we can handle */
  658. /* one-line things like "rep stosb" instead of only "rep\nstosb" */
  659. if (opcode >= TOK_ASM_wait && opcode <= TOK_ASM_repnz)
  660. unget_tok(';');
  661. /* get operands */
  662. pop = ops;
  663. nb_ops = 0;
  664. seg_prefix = 0;
  665. alltypes = 0;
  666. for(;;) {
  667. if (tok == ';' || tok == TOK_LINEFEED)
  668. break;
  669. if (nb_ops >= MAX_OPERANDS) {
  670. tcc_error("incorrect number of operands");
  671. }
  672. parse_operand(s1, pop);
  673. if (tok == ':') {
  674. if (pop->type != OP_SEG || seg_prefix)
  675. tcc_error("incorrect prefix");
  676. seg_prefix = segment_prefixes[pop->reg];
  677. next();
  678. parse_operand(s1, pop);
  679. if (!(pop->type & OP_EA)) {
  680. tcc_error("segment prefix must be followed by memory reference");
  681. }
  682. }
  683. pop++;
  684. nb_ops++;
  685. if (tok != ',')
  686. break;
  687. next();
  688. }
  689. s = 0; /* avoid warning */
  690. /* optimize matching by using a lookup table (no hashing is needed
  691. !) */
  692. for(pa = asm_instrs; pa->sym != 0; pa++) {
  693. int it = pa->instr_type & OPCT_MASK;
  694. s = 0;
  695. if (it == OPC_FARITH) {
  696. v = opcode - pa->sym;
  697. if (!((unsigned)v < 8 * 6 && (v % 6) == 0))
  698. continue;
  699. } else if (it == OPC_ARITH) {
  700. if (!(opcode >= pa->sym && opcode < pa->sym + 8*NBWLX))
  701. continue;
  702. s = (opcode - pa->sym) % NBWLX;
  703. if ((pa->instr_type & OPC_BWLX) == OPC_WLX)
  704. {
  705. /* We need to reject the xxxb opcodes that we accepted above.
  706. Note that pa->sym for WLX opcodes is the 'w' token,
  707. to get the 'b' token subtract one. */
  708. if (((opcode - pa->sym + 1) % NBWLX) == 0)
  709. continue;
  710. s++;
  711. }
  712. } else if (it == OPC_SHIFT) {
  713. if (!(opcode >= pa->sym && opcode < pa->sym + 7*NBWLX))
  714. continue;
  715. s = (opcode - pa->sym) % NBWLX;
  716. } else if (it == OPC_TEST) {
  717. if (!(opcode >= pa->sym && opcode < pa->sym + NB_TEST_OPCODES))
  718. continue;
  719. /* cmovxx is a test opcode but accepts multiple sizes.
  720. TCC doesn't accept the suffixed mnemonic, instead we
  721. simply force size autodetection always. */
  722. if (pa->instr_type & OPC_WLX)
  723. s = NBWLX - 1;
  724. } else if (pa->instr_type & OPC_B) {
  725. #ifdef TCC_TARGET_X86_64
  726. /* Some instructions don't have the full size but only
  727. bwl form. insb e.g. */
  728. if ((pa->instr_type & OPC_WLQ) != OPC_WLQ
  729. && !(opcode >= pa->sym && opcode < pa->sym + NBWLX-1))
  730. continue;
  731. #endif
  732. if (!(opcode >= pa->sym && opcode < pa->sym + NBWLX))
  733. continue;
  734. s = opcode - pa->sym;
  735. } else if (pa->instr_type & OPC_WLX) {
  736. if (!(opcode >= pa->sym && opcode < pa->sym + NBWLX-1))
  737. continue;
  738. s = opcode - pa->sym + 1;
  739. } else {
  740. if (pa->sym != opcode)
  741. continue;
  742. }
  743. if (pa->nb_ops != nb_ops)
  744. continue;
  745. #ifdef TCC_TARGET_X86_64
  746. /* Special case for moves. Selecting the IM64->REG64 form
  747. should only be done if we really have an >32bit imm64, and that
  748. is hardcoded. Ignore it here. */
  749. if (pa->opcode == 0xb0 && ops[0].type != OP_IM64
  750. && (ops[1].type & OP_REG) == OP_REG64
  751. && !(pa->instr_type & OPC_0F))
  752. continue;
  753. #endif
  754. /* now decode and check each operand */
  755. alltypes = 0;
  756. for(i = 0; i < nb_ops; i++) {
  757. int op1, op2;
  758. op1 = pa->op_type[i];
  759. op2 = op1 & 0x1f;
  760. switch(op2) {
  761. case OPT_IM:
  762. v = OP_IM8 | OP_IM16 | OP_IM32;
  763. break;
  764. case OPT_REG:
  765. v = OP_REG8 | OP_REG16 | OP_REG32 | OP_REG64;
  766. break;
  767. case OPT_REGW:
  768. v = OP_REG16 | OP_REG32 | OP_REG64;
  769. break;
  770. case OPT_IMW:
  771. v = OP_IM16 | OP_IM32;
  772. break;
  773. case OPT_MMXSSE:
  774. v = OP_MMX | OP_SSE;
  775. break;
  776. case OPT_DISP:
  777. case OPT_DISP8:
  778. v = OP_ADDR;
  779. break;
  780. default:
  781. v = 1 << op2;
  782. break;
  783. }
  784. if (op1 & OPT_EA)
  785. v |= OP_EA;
  786. op_type[i] = v;
  787. if ((ops[i].type & v) == 0)
  788. goto next;
  789. alltypes |= ops[i].type;
  790. }
  791. /* all is matching ! */
  792. break;
  793. next: ;
  794. }
  795. if (pa->sym == 0) {
  796. if (opcode >= TOK_ASM_first && opcode <= TOK_ASM_last) {
  797. int b;
  798. b = op0_codes[opcode - TOK_ASM_first];
  799. if (b & 0xff00)
  800. g(b >> 8);
  801. g(b);
  802. return;
  803. } else if (opcode <= TOK_ASM_alllast) {
  804. tcc_error("bad operand with opcode '%s'",
  805. get_tok_str(opcode, NULL));
  806. } else {
  807. tcc_error("unknown opcode '%s'",
  808. get_tok_str(opcode, NULL));
  809. }
  810. }
  811. /* if the size is unknown, then evaluate it (OPC_B or OPC_WL case) */
  812. autosize = NBWLX-1;
  813. #ifdef TCC_TARGET_X86_64
  814. /* XXX the autosize should rather be zero, to not have to adjust this
  815. all the time. */
  816. if ((pa->instr_type & OPC_BWLQ) == OPC_B)
  817. autosize = NBWLX-2;
  818. #endif
  819. if (s == autosize) {
  820. /* Check for register operands providing hints about the size.
  821. Start from the end, i.e. destination operands. This matters
  822. only for opcodes accepting different sized registers, lar and lsl
  823. are such opcodes. */
  824. for(i = nb_ops - 1; s == autosize && i >= 0; i--) {
  825. if ((ops[i].type & OP_REG) && !(op_type[i] & (OP_CL | OP_DX)))
  826. s = reg_to_size[ops[i].type & OP_REG];
  827. }
  828. if (s == autosize) {
  829. if ((opcode == TOK_ASM_push || opcode == TOK_ASM_pop) &&
  830. (ops[0].type & (OP_SEG | OP_IM8S | OP_IM32)))
  831. s = 2;
  832. else if ((opcode == TOK_ASM_push || opcode == TOK_ASM_pop) &&
  833. (ops[0].type & OP_EA))
  834. s = NBWLX - 2;
  835. else
  836. tcc_error("cannot infer opcode suffix");
  837. }
  838. }
  839. #ifdef TCC_TARGET_X86_64
  840. /* Generate addr32 prefix if needed */
  841. for(i = 0; i < nb_ops; i++) {
  842. if (ops[i].type & OP_EA32) {
  843. g(0x67);
  844. break;
  845. }
  846. }
  847. #endif
  848. /* generate data16 prefix if needed */
  849. p66 = 0;
  850. if (s == 1)
  851. p66 = 1;
  852. else {
  853. /* accepting mmx+sse in all operands --> needs 0x66 to
  854. switch to sse mode. Accepting only sse in an operand --> is
  855. already SSE insn and needs 0x66/f2/f3 handling. */
  856. for (i = 0; i < nb_ops; i++)
  857. if ((op_type[i] & (OP_MMX | OP_SSE)) == (OP_MMX | OP_SSE)
  858. && ops[i].type & OP_SSE)
  859. p66 = 1;
  860. }
  861. if (p66)
  862. g(0x66);
  863. #ifdef TCC_TARGET_X86_64
  864. rex64 = 0;
  865. if (pa->instr_type & OPC_48)
  866. rex64 = 1;
  867. else if (s == 3 || (alltypes & OP_REG64)) {
  868. /* generate REX prefix */
  869. int default64 = 0;
  870. for(i = 0; i < nb_ops; i++) {
  871. if (op_type[i] == OP_REG64 && pa->opcode != 0xb8) {
  872. /* If only 64bit regs are accepted in one operand
  873. this is a default64 instruction without need for
  874. REX prefixes, except for movabs(0xb8). */
  875. default64 = 1;
  876. break;
  877. }
  878. }
  879. /* XXX find better encoding for the default64 instructions. */
  880. if (((opcode != TOK_ASM_push && opcode != TOK_ASM_pop
  881. && opcode != TOK_ASM_pushw && opcode != TOK_ASM_pushl
  882. && opcode != TOK_ASM_pushq && opcode != TOK_ASM_popw
  883. && opcode != TOK_ASM_popl && opcode != TOK_ASM_popq
  884. && opcode != TOK_ASM_call && opcode != TOK_ASM_jmp))
  885. && !default64)
  886. rex64 = 1;
  887. }
  888. #endif
  889. /* now generates the operation */
  890. if (OPCT_IS(pa->instr_type, OPC_FWAIT))
  891. g(0x9b);
  892. if (seg_prefix)
  893. g(seg_prefix);
  894. v = pa->opcode;
  895. if (pa->instr_type & OPC_0F)
  896. v = ((v & ~0xff) << 8) | 0x0f00 | (v & 0xff);
  897. if ((v == 0x69 || v == 0x6b) && nb_ops == 2) {
  898. /* kludge for imul $im, %reg */
  899. nb_ops = 3;
  900. ops[2] = ops[1];
  901. op_type[2] = op_type[1];
  902. } else if (v == 0xcd && ops[0].e.v == 3 && !ops[0].e.sym) {
  903. v--; /* int $3 case */
  904. nb_ops = 0;
  905. } else if ((v == 0x06 || v == 0x07)) {
  906. if (ops[0].reg >= 4) {
  907. /* push/pop %fs or %gs */
  908. v = 0x0fa0 + (v - 0x06) + ((ops[0].reg - 4) << 3);
  909. } else {
  910. v += ops[0].reg << 3;
  911. }
  912. nb_ops = 0;
  913. } else if (v <= 0x05) {
  914. /* arith case */
  915. v += ((opcode - TOK_ASM_addb) / NBWLX) << 3;
  916. } else if ((pa->instr_type & (OPCT_MASK | OPC_MODRM)) == OPC_FARITH) {
  917. /* fpu arith case */
  918. v += ((opcode - pa->sym) / 6) << 3;
  919. }
  920. /* search which operand will be used for modrm */
  921. modrm_index = -1;
  922. modreg_index = -1;
  923. if (pa->instr_type & OPC_MODRM) {
  924. if (!nb_ops) {
  925. /* A modrm opcode without operands is a special case (e.g. mfence).
  926. It has a group and acts as if there's an register operand 0
  927. (ax). */
  928. i = 0;
  929. ops[i].type = OP_REG;
  930. ops[i].reg = 0;
  931. goto modrm_found;
  932. }
  933. /* first look for an ea operand */
  934. for(i = 0;i < nb_ops; i++) {
  935. if (op_type[i] & OP_EA)
  936. goto modrm_found;
  937. }
  938. /* then if not found, a register or indirection (shift instructions) */
  939. for(i = 0;i < nb_ops; i++) {
  940. if (op_type[i] & (OP_REG | OP_MMX | OP_SSE | OP_INDIR))
  941. goto modrm_found;
  942. }
  943. #ifdef ASM_DEBUG
  944. tcc_error("bad op table");
  945. #endif
  946. modrm_found:
  947. modrm_index = i;
  948. /* if a register is used in another operand then it is
  949. used instead of group */
  950. for(i = 0;i < nb_ops; i++) {
  951. int t = op_type[i];
  952. if (i != modrm_index &&
  953. (t & (OP_REG | OP_MMX | OP_SSE | OP_CR | OP_TR | OP_DB | OP_SEG))) {
  954. modreg_index = i;
  955. break;
  956. }
  957. }
  958. }
  959. #ifdef TCC_TARGET_X86_64
  960. asm_rex (rex64, ops, nb_ops, op_type, modreg_index, modrm_index);
  961. #endif
  962. if (pa->instr_type & OPC_REG) {
  963. /* mov $im, %reg case */
  964. if (v == 0xb0 && s >= 1)
  965. v += 7;
  966. for(i = 0; i < nb_ops; i++) {
  967. if (op_type[i] & (OP_REG | OP_ST)) {
  968. v += ops[i].reg;
  969. break;
  970. }
  971. }
  972. }
  973. if (pa->instr_type & OPC_B)
  974. v += s >= 1;
  975. if (nb_ops == 1 && pa->op_type[0] == OPT_DISP8) {
  976. Sym *sym;
  977. int jmp_disp;
  978. /* see if we can really generate the jump with a byte offset */
  979. sym = ops[0].e.sym;
  980. if (!sym)
  981. goto no_short_jump;
  982. if (sym->r != cur_text_section->sh_num)
  983. goto no_short_jump;
  984. jmp_disp = ops[0].e.v + sym->jnext - ind - 2 - (v >= 0xff);
  985. if (jmp_disp == (int8_t)jmp_disp) {
  986. /* OK to generate jump */
  987. ops[0].e.sym = 0;
  988. ops[0].e.v = jmp_disp;
  989. op_type[0] = OP_IM8S;
  990. } else {
  991. no_short_jump:
  992. /* long jump will be allowed. need to modify the
  993. opcode slightly */
  994. if (v == 0xeb) /* jmp */
  995. v = 0xe9;
  996. else if (v == 0x70) /* jcc */
  997. v += 0x0f10;
  998. else
  999. tcc_error("invalid displacement");
  1000. }
  1001. }
  1002. if (OPCT_IS(pa->instr_type, OPC_TEST))
  1003. v += test_bits[opcode - pa->sym];
  1004. op1 = v >> 16;
  1005. if (op1)
  1006. g(op1);
  1007. op1 = (v >> 8) & 0xff;
  1008. if (op1)
  1009. g(op1);
  1010. g(v);
  1011. if (OPCT_IS(pa->instr_type, OPC_SHIFT)) {
  1012. reg = (opcode - pa->sym) / NBWLX;
  1013. if (reg == 6)
  1014. reg = 7;
  1015. } else if (OPCT_IS(pa->instr_type, OPC_ARITH)) {
  1016. reg = (opcode - pa->sym) / NBWLX;
  1017. } else if (OPCT_IS(pa->instr_type, OPC_FARITH)) {
  1018. reg = (opcode - pa->sym) / 6;
  1019. } else {
  1020. reg = (pa->instr_type >> OPC_GROUP_SHIFT) & 7;
  1021. }
  1022. pc = 0;
  1023. if (pa->instr_type & OPC_MODRM) {
  1024. /* if a register is used in another operand then it is
  1025. used instead of group */
  1026. if (modreg_index >= 0)
  1027. reg = ops[modreg_index].reg;
  1028. pc = asm_modrm(reg, &ops[modrm_index]);
  1029. }
  1030. /* emit constants */
  1031. #ifndef TCC_TARGET_X86_64
  1032. if (!(pa->instr_type & OPC_0F)
  1033. && (pa->opcode == 0x9a || pa->opcode == 0xea)) {
  1034. /* ljmp or lcall kludge */
  1035. gen_expr32(&ops[1].e);
  1036. if (ops[0].e.sym)
  1037. tcc_error("cannot relocate");
  1038. gen_le16(ops[0].e.v);
  1039. return;
  1040. }
  1041. #endif
  1042. for(i = 0;i < nb_ops; i++) {
  1043. v = op_type[i];
  1044. if (v & (OP_IM8 | OP_IM16 | OP_IM32 | OP_IM64 | OP_IM8S | OP_ADDR)) {
  1045. /* if multiple sizes are given it means we must look
  1046. at the op size */
  1047. if ((v | OP_IM8 | OP_IM64) == (OP_IM8 | OP_IM16 | OP_IM32 | OP_IM64)) {
  1048. if (s == 0)
  1049. v = OP_IM8;
  1050. else if (s == 1)
  1051. v = OP_IM16;
  1052. else if (s == 2 || (v & OP_IM64) == 0)
  1053. v = OP_IM32;
  1054. else
  1055. v = OP_IM64;
  1056. }
  1057. if ((v & (OP_IM8 | OP_IM8S | OP_IM16)) && ops[i].e.sym)
  1058. tcc_error("cannot relocate");
  1059. if (v & (OP_IM8 | OP_IM8S)) {
  1060. g(ops[i].e.v);
  1061. } else if (v & OP_IM16) {
  1062. gen_le16(ops[i].e.v);
  1063. #ifdef TCC_TARGET_X86_64
  1064. } else if (v & OP_IM64) {
  1065. gen_expr64(&ops[i].e);
  1066. #endif
  1067. } else if (pa->op_type[i] == OPT_DISP || pa->op_type[i] == OPT_DISP8) {
  1068. gen_disp32(&ops[i].e);
  1069. } else {
  1070. gen_expr32(&ops[i].e);
  1071. }
  1072. }
  1073. }
  1074. /* after immediate operands, adjust pc-relative address */
  1075. if (pc)
  1076. add32le(cur_text_section->data + pc - 4, pc - ind);
  1077. }
  1078. /* return the constraint priority (we allocate first the lowest
  1079. numbered constraints) */
  1080. static inline int constraint_priority(const char *str)
  1081. {
  1082. int priority, c, pr;
  1083. /* we take the lowest priority */
  1084. priority = 0;
  1085. for(;;) {
  1086. c = *str;
  1087. if (c == '\0')
  1088. break;
  1089. str++;
  1090. switch(c) {
  1091. case 'A':
  1092. pr = 0;
  1093. break;
  1094. case 'a':
  1095. case 'b':
  1096. case 'c':
  1097. case 'd':
  1098. case 'S':
  1099. case 'D':
  1100. pr = 1;
  1101. break;
  1102. case 'q':
  1103. pr = 2;
  1104. break;
  1105. case 'r':
  1106. case 'R':
  1107. case 'p':
  1108. pr = 3;
  1109. break;
  1110. case 'N':
  1111. case 'M':
  1112. case 'I':
  1113. case 'e':
  1114. case 'i':
  1115. case 'm':
  1116. case 'g':
  1117. pr = 4;
  1118. break;
  1119. default:
  1120. tcc_error("unknown constraint '%c'", c);
  1121. pr = 0;
  1122. }
  1123. if (pr > priority)
  1124. priority = pr;
  1125. }
  1126. return priority;
  1127. }
  1128. static const char *skip_constraint_modifiers(const char *p)
  1129. {
  1130. while (*p == '=' || *p == '&' || *p == '+' || *p == '%')
  1131. p++;
  1132. return p;
  1133. }
  1134. /* If T (a token) is of the form "%reg" returns the register
  1135. number and type, otherwise return -1. */
  1136. ST_FUNC int asm_parse_regvar (int t)
  1137. {
  1138. const char *s;
  1139. Operand op;
  1140. if (t < TOK_IDENT)
  1141. return -1;
  1142. s = table_ident[t - TOK_IDENT]->str;
  1143. if (s[0] != '%')
  1144. return -1;
  1145. t = tok_alloc(s+1, strlen(s)-1)->tok;
  1146. unget_tok(t);
  1147. unget_tok('%');
  1148. parse_operand(tcc_state, &op);
  1149. /* Accept only integer regs for now. */
  1150. if (op.type & OP_REG)
  1151. return op.reg;
  1152. else
  1153. return -1;
  1154. }
  1155. #define REG_OUT_MASK 0x01
  1156. #define REG_IN_MASK 0x02
  1157. #define is_reg_allocated(reg) (regs_allocated[reg] & reg_mask)
  1158. ST_FUNC void asm_compute_constraints(ASMOperand *operands,
  1159. int nb_operands, int nb_outputs,
  1160. const uint8_t *clobber_regs,
  1161. int *pout_reg)
  1162. {
  1163. ASMOperand *op;
  1164. int sorted_op[MAX_ASM_OPERANDS];
  1165. int i, j, k, p1, p2, tmp, reg, c, reg_mask;
  1166. const char *str;
  1167. uint8_t regs_allocated[NB_ASM_REGS];
  1168. /* init fields */
  1169. for(i=0;i<nb_operands;i++) {
  1170. op = &operands[i];
  1171. op->input_index = -1;
  1172. op->ref_index = -1;
  1173. op->reg = -1;
  1174. op->is_memory = 0;
  1175. op->is_rw = 0;
  1176. }
  1177. /* compute constraint priority and evaluate references to output
  1178. constraints if input constraints */
  1179. for(i=0;i<nb_operands;i++) {
  1180. op = &operands[i];
  1181. str = op->constraint;
  1182. str = skip_constraint_modifiers(str);
  1183. if (isnum(*str) || *str == '[') {
  1184. /* this is a reference to another constraint */
  1185. k = find_constraint(operands, nb_operands, str, NULL);
  1186. if ((unsigned)k >= i || i < nb_outputs)
  1187. tcc_error("invalid reference in constraint %d ('%s')",
  1188. i, str);
  1189. op->ref_index = k;
  1190. if (operands[k].input_index >= 0)
  1191. tcc_error("cannot reference twice the same operand");
  1192. operands[k].input_index = i;
  1193. op->priority = 5;
  1194. } else if ((op->vt->r & VT_VALMASK) == VT_LOCAL
  1195. && op->vt->sym
  1196. && (reg = op->vt->sym->r & VT_VALMASK) < VT_CONST) {
  1197. op->priority = 1;
  1198. op->reg = reg;
  1199. } else {
  1200. op->priority = constraint_priority(str);
  1201. }
  1202. }
  1203. /* sort operands according to their priority */
  1204. for(i=0;i<nb_operands;i++)
  1205. sorted_op[i] = i;
  1206. for(i=0;i<nb_operands - 1;i++) {
  1207. for(j=i+1;j<nb_operands;j++) {
  1208. p1 = operands[sorted_op[i]].priority;
  1209. p2 = operands[sorted_op[j]].priority;
  1210. if (p2 < p1) {
  1211. tmp = sorted_op[i];
  1212. sorted_op[i] = sorted_op[j];
  1213. sorted_op[j] = tmp;
  1214. }
  1215. }
  1216. }
  1217. for(i = 0;i < NB_ASM_REGS; i++) {
  1218. if (clobber_regs[i])
  1219. regs_allocated[i] = REG_IN_MASK | REG_OUT_MASK;
  1220. else
  1221. regs_allocated[i] = 0;
  1222. }
  1223. /* esp cannot be used */
  1224. regs_allocated[4] = REG_IN_MASK | REG_OUT_MASK;
  1225. /* ebp cannot be used yet */
  1226. regs_allocated[5] = REG_IN_MASK | REG_OUT_MASK;
  1227. /* allocate registers and generate corresponding asm moves */
  1228. for(i=0;i<nb_operands;i++) {
  1229. j = sorted_op[i];
  1230. op = &operands[j];
  1231. str = op->constraint;
  1232. /* no need to allocate references */
  1233. if (op->ref_index >= 0)
  1234. continue;
  1235. /* select if register is used for output, input or both */
  1236. if (op->input_index >= 0) {
  1237. reg_mask = REG_IN_MASK | REG_OUT_MASK;
  1238. } else if (j < nb_outputs) {
  1239. reg_mask = REG_OUT_MASK;
  1240. } else {
  1241. reg_mask = REG_IN_MASK;
  1242. }
  1243. if (op->reg >= 0) {
  1244. if (is_reg_allocated(op->reg))
  1245. tcc_error("asm regvar requests register that's taken already");
  1246. reg = op->reg;
  1247. goto reg_found;
  1248. }
  1249. try_next:
  1250. c = *str++;
  1251. switch(c) {
  1252. case '=':
  1253. goto try_next;
  1254. case '+':
  1255. op->is_rw = 1;
  1256. /* FALL THRU */
  1257. case '&':
  1258. if (j >= nb_outputs)
  1259. tcc_error("'%c' modifier can only be applied to outputs", c);
  1260. reg_mask = REG_IN_MASK | REG_OUT_MASK;
  1261. goto try_next;
  1262. case 'A':
  1263. /* allocate both eax and edx */
  1264. if (is_reg_allocated(TREG_XAX) ||
  1265. is_reg_allocated(TREG_XDX))
  1266. goto try_next;
  1267. op->is_llong = 1;
  1268. op->reg = TREG_XAX;
  1269. regs_allocated[TREG_XAX] |= reg_mask;
  1270. regs_allocated[TREG_XDX] |= reg_mask;
  1271. break;
  1272. case 'a':
  1273. reg = TREG_XAX;
  1274. goto alloc_reg;
  1275. case 'b':
  1276. reg = 3;
  1277. goto alloc_reg;
  1278. case 'c':
  1279. reg = TREG_XCX;
  1280. goto alloc_reg;
  1281. case 'd':
  1282. reg = TREG_XDX;
  1283. goto alloc_reg;
  1284. case 'S':
  1285. reg = 6;
  1286. goto alloc_reg;
  1287. case 'D':
  1288. reg = 7;
  1289. alloc_reg:
  1290. if (is_reg_allocated(reg))
  1291. goto try_next;
  1292. goto reg_found;
  1293. case 'q':
  1294. /* eax, ebx, ecx or edx */
  1295. for(reg = 0; reg < 4; reg++) {
  1296. if (!is_reg_allocated(reg))
  1297. goto reg_found;
  1298. }
  1299. goto try_next;
  1300. case 'r':
  1301. case 'R':
  1302. case 'p': /* A general address, for x86(64) any register is acceptable*/
  1303. /* any general register */
  1304. for(reg = 0; reg < 8; reg++) {
  1305. if (!is_reg_allocated(reg))
  1306. goto reg_found;
  1307. }
  1308. goto try_next;
  1309. reg_found:
  1310. /* now we can reload in the register */
  1311. op->is_llong = 0;
  1312. op->reg = reg;
  1313. regs_allocated[reg] |= reg_mask;
  1314. break;
  1315. case 'e':
  1316. case 'i':
  1317. if (!((op->vt->r & (VT_VALMASK | VT_LVAL)) == VT_CONST))
  1318. goto try_next;
  1319. break;
  1320. case 'I':
  1321. case 'N':
  1322. case 'M':
  1323. if (!((op->vt->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST))
  1324. goto try_next;
  1325. break;
  1326. case 'm':
  1327. case 'g':
  1328. /* nothing special to do because the operand is already in
  1329. memory, except if the pointer itself is stored in a
  1330. memory variable (VT_LLOCAL case) */
  1331. /* XXX: fix constant case */
  1332. /* if it is a reference to a memory zone, it must lie
  1333. in a register, so we reserve the register in the
  1334. input registers and a load will be generated
  1335. later */
  1336. if (j < nb_outputs || c == 'm') {
  1337. if ((op->vt->r & VT_VALMASK) == VT_LLOCAL) {
  1338. /* any general register */
  1339. for(reg = 0; reg < 8; reg++) {
  1340. if (!(regs_allocated[reg] & REG_IN_MASK))
  1341. goto reg_found1;
  1342. }
  1343. goto try_next;
  1344. reg_found1:
  1345. /* now we can reload in the register */
  1346. regs_allocated[reg] |= REG_IN_MASK;
  1347. op->reg = reg;
  1348. op->is_memory = 1;
  1349. }
  1350. }
  1351. break;
  1352. default:
  1353. tcc_error("asm constraint %d ('%s') could not be satisfied",
  1354. j, op->constraint);
  1355. break;
  1356. }
  1357. /* if a reference is present for that operand, we assign it too */
  1358. if (op->input_index >= 0) {
  1359. operands[op->input_index].reg = op->reg;
  1360. operands[op->input_index].is_llong = op->is_llong;
  1361. }
  1362. }
  1363. /* compute out_reg. It is used to store outputs registers to memory
  1364. locations references by pointers (VT_LLOCAL case) */
  1365. *pout_reg = -1;
  1366. for(i=0;i<nb_operands;i++) {
  1367. op = &operands[i];
  1368. if (op->reg >= 0 &&
  1369. (op->vt->r & VT_VALMASK) == VT_LLOCAL &&
  1370. !op->is_memory) {
  1371. for(reg = 0; reg < 8; reg++) {
  1372. if (!(regs_allocated[reg] & REG_OUT_MASK))
  1373. goto reg_found2;
  1374. }
  1375. tcc_error("could not find free output register for reloading");
  1376. reg_found2:
  1377. *pout_reg = reg;
  1378. break;
  1379. }
  1380. }
  1381. /* print sorted constraints */
  1382. #ifdef ASM_DEBUG
  1383. for(i=0;i<nb_operands;i++) {
  1384. j = sorted_op[i];
  1385. op = &operands[j];
  1386. printf("%%%d [%s]: \"%s\" r=0x%04x reg=%d\n",
  1387. j,
  1388. op->id ? get_tok_str(op->id, NULL) : "",
  1389. op->constraint,
  1390. op->vt->r,
  1391. op->reg);
  1392. }
  1393. if (*pout_reg >= 0)
  1394. printf("out_reg=%d\n", *pout_reg);
  1395. #endif
  1396. }
  1397. ST_FUNC void subst_asm_operand(CString *add_str,
  1398. SValue *sv, int modifier)
  1399. {
  1400. int r, reg, size, val;
  1401. char buf[64];
  1402. r = sv->r;
  1403. if ((r & VT_VALMASK) == VT_CONST) {
  1404. if (!(r & VT_LVAL) && modifier != 'c' && modifier != 'n' &&
  1405. modifier != 'P')
  1406. cstr_ccat(add_str, '$');
  1407. if (r & VT_SYM) {
  1408. const char *name = get_tok_str(sv->sym->v, NULL);
  1409. if (sv->sym->v >= SYM_FIRST_ANOM) {
  1410. /* In case of anonymous symbols ("L.42", used
  1411. for static data labels) we can't find them
  1412. in the C symbol table when later looking up
  1413. this name. So enter them now into the asm label
  1414. list when we still know the symbol. */
  1415. get_asm_sym(tok_alloc(name, strlen(name))->tok, sv->sym);
  1416. }
  1417. cstr_cat(add_str, name, -1);
  1418. if ((uint32_t)sv->c.i == 0)
  1419. goto no_offset;
  1420. cstr_ccat(add_str, '+');
  1421. }
  1422. val = sv->c.i;
  1423. if (modifier == 'n')
  1424. val = -val;
  1425. snprintf(buf, sizeof(buf), "%d", (int)sv->c.i);
  1426. cstr_cat(add_str, buf, -1);
  1427. no_offset:;
  1428. #ifdef TCC_TARGET_X86_64
  1429. if (r & VT_LVAL)
  1430. cstr_cat(add_str, "(%rip)", -1);
  1431. #endif
  1432. } else if ((r & VT_VALMASK) == VT_LOCAL) {
  1433. #ifdef TCC_TARGET_X86_64
  1434. snprintf(buf, sizeof(buf), "%d(%%rbp)", (int)sv->c.i);
  1435. #else
  1436. snprintf(buf, sizeof(buf), "%d(%%ebp)", (int)sv->c.i);
  1437. #endif
  1438. cstr_cat(add_str, buf, -1);
  1439. } else if (r & VT_LVAL) {
  1440. reg = r & VT_VALMASK;
  1441. if (reg >= VT_CONST)
  1442. tcc_error("internal compiler error");
  1443. snprintf(buf, sizeof(buf), "(%%%s)",
  1444. #ifdef TCC_TARGET_X86_64
  1445. get_tok_str(TOK_ASM_rax + reg, NULL)
  1446. #else
  1447. get_tok_str(TOK_ASM_eax + reg, NULL)
  1448. #endif
  1449. );
  1450. cstr_cat(add_str, buf, -1);
  1451. } else {
  1452. /* register case */
  1453. reg = r & VT_VALMASK;
  1454. if (reg >= VT_CONST)
  1455. tcc_error("internal compiler error");
  1456. /* choose register operand size */
  1457. if ((sv->type.t & VT_BTYPE) == VT_BYTE ||
  1458. (sv->type.t & VT_BTYPE) == VT_BOOL)
  1459. size = 1;
  1460. else if ((sv->type.t & VT_BTYPE) == VT_SHORT)
  1461. size = 2;
  1462. #ifdef TCC_TARGET_X86_64
  1463. else if ((sv->type.t & VT_BTYPE) == VT_LLONG ||
  1464. (sv->type.t & VT_BTYPE) == VT_PTR)
  1465. size = 8;
  1466. #endif
  1467. else
  1468. size = 4;
  1469. if (size == 1 && reg >= 4)
  1470. size = 4;
  1471. if (modifier == 'b') {
  1472. if (reg >= 4)
  1473. tcc_error("cannot use byte register");
  1474. size = 1;
  1475. } else if (modifier == 'h') {
  1476. if (reg >= 4)
  1477. tcc_error("cannot use byte register");
  1478. size = -1;
  1479. } else if (modifier == 'w') {
  1480. size = 2;
  1481. } else if (modifier == 'k') {
  1482. size = 4;
  1483. #ifdef TCC_TARGET_X86_64
  1484. } else if (modifier == 'q') {
  1485. size = 8;
  1486. #endif
  1487. }
  1488. switch(size) {
  1489. case -1:
  1490. reg = TOK_ASM_ah + reg;
  1491. break;
  1492. case 1:
  1493. reg = TOK_ASM_al + reg;
  1494. break;
  1495. case 2:
  1496. reg = TOK_ASM_ax + reg;
  1497. break;
  1498. default:
  1499. reg = TOK_ASM_eax + reg;
  1500. break;
  1501. #ifdef TCC_TARGET_X86_64
  1502. case 8:
  1503. reg = TOK_ASM_rax + reg;
  1504. break;
  1505. #endif
  1506. }
  1507. snprintf(buf, sizeof(buf), "%%%s", get_tok_str(reg, NULL));
  1508. cstr_cat(add_str, buf, -1);
  1509. }
  1510. }
  1511. /* generate prolog and epilog code for asm statement */
  1512. ST_FUNC void asm_gen_code(ASMOperand *operands, int nb_operands,
  1513. int nb_outputs, int is_output,
  1514. uint8_t *clobber_regs,
  1515. int out_reg)
  1516. {
  1517. uint8_t regs_allocated[NB_ASM_REGS];
  1518. ASMOperand *op;
  1519. int i, reg;
  1520. /* Strictly speaking %Xbp and %Xsp should be included in the
  1521. call-preserved registers, but currently it doesn't matter. */
  1522. #ifdef TCC_TARGET_X86_64
  1523. #ifdef TCC_TARGET_PE
  1524. static uint8_t reg_saved[] = { 3, 6, 7, 12, 13, 14, 15 };
  1525. #else
  1526. static uint8_t reg_saved[] = { 3, 12, 13, 14, 15 };
  1527. #endif
  1528. #else
  1529. static uint8_t reg_saved[] = { 3, 6, 7 };
  1530. #endif
  1531. /* mark all used registers */
  1532. memcpy(regs_allocated, clobber_regs, sizeof(regs_allocated));
  1533. for(i = 0; i < nb_operands;i++) {
  1534. op = &operands[i];
  1535. if (op->reg >= 0)
  1536. regs_allocated[op->reg] = 1;
  1537. }
  1538. if (!is_output) {
  1539. /* generate reg save code */
  1540. for(i = 0; i < sizeof(reg_saved)/sizeof(reg_saved[0]); i++) {
  1541. reg = reg_saved[i];
  1542. if (regs_allocated[reg]) {
  1543. if (reg >= 8)
  1544. g(0x41), reg-=8;
  1545. g(0x50 + reg);
  1546. }
  1547. }
  1548. /* generate load code */
  1549. for(i = 0; i < nb_operands; i++) {
  1550. op = &operands[i];
  1551. if (op->reg >= 0) {
  1552. if ((op->vt->r & VT_VALMASK) == VT_LLOCAL &&
  1553. op->is_memory) {
  1554. /* memory reference case (for both input and
  1555. output cases) */
  1556. SValue sv;
  1557. sv = *op->vt;
  1558. sv.r = (sv.r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
  1559. sv.type.t = VT_PTR;
  1560. load(op->reg, &sv);
  1561. } else if (i >= nb_outputs || op->is_rw) {
  1562. /* load value in register */
  1563. load(op->reg, op->vt);
  1564. if (op->is_llong) {
  1565. SValue sv;
  1566. sv = *op->vt;
  1567. sv.c.i += 4;
  1568. load(TREG_XDX, &sv);
  1569. }
  1570. }
  1571. }
  1572. }
  1573. } else {
  1574. /* generate save code */
  1575. for(i = 0 ; i < nb_outputs; i++) {
  1576. op = &operands[i];
  1577. if (op->reg >= 0) {
  1578. if ((op->vt->r & VT_VALMASK) == VT_LLOCAL) {
  1579. if (!op->is_memory) {
  1580. SValue sv;
  1581. sv = *op->vt;
  1582. sv.r = (sv.r & ~VT_VALMASK) | VT_LOCAL;
  1583. sv.type.t = VT_PTR;
  1584. load(out_reg, &sv);
  1585. sv = *op->vt;
  1586. sv.r = (sv.r & ~VT_VALMASK) | out_reg;
  1587. store(op->reg, &sv);
  1588. }
  1589. } else {
  1590. store(op->reg, op->vt);
  1591. if (op->is_llong) {
  1592. SValue sv;
  1593. sv = *op->vt;
  1594. sv.c.i += 4;
  1595. store(TREG_XDX, &sv);
  1596. }
  1597. }
  1598. }
  1599. }
  1600. /* generate reg restore code */
  1601. for(i = sizeof(reg_saved)/sizeof(reg_saved[0]) - 1; i >= 0; i--) {
  1602. reg = reg_saved[i];
  1603. if (regs_allocated[reg]) {
  1604. if (reg >= 8)
  1605. g(0x41), reg-=8;
  1606. g(0x58 + reg);
  1607. }
  1608. }
  1609. }
  1610. }
  1611. ST_FUNC void asm_clobber(uint8_t *clobber_regs, const char *str)
  1612. {
  1613. int reg;
  1614. TokenSym *ts;
  1615. #ifdef TCC_TARGET_X86_64
  1616. unsigned int type;
  1617. #endif
  1618. if (!strcmp(str, "memory") ||
  1619. !strcmp(str, "cc") ||
  1620. !strcmp(str, "flags"))
  1621. return;
  1622. ts = tok_alloc(str, strlen(str));
  1623. reg = ts->tok;
  1624. if (reg >= TOK_ASM_eax && reg <= TOK_ASM_edi) {
  1625. reg -= TOK_ASM_eax;
  1626. } else if (reg >= TOK_ASM_ax && reg <= TOK_ASM_di) {
  1627. reg -= TOK_ASM_ax;
  1628. #ifdef TCC_TARGET_X86_64
  1629. } else if (reg >= TOK_ASM_rax && reg <= TOK_ASM_rdi) {
  1630. reg -= TOK_ASM_rax;
  1631. } else if ((reg = asm_parse_numeric_reg(reg, &type)) >= 0) {
  1632. ;
  1633. #endif
  1634. } else {
  1635. tcc_error("invalid clobber register '%s'", str);
  1636. }
  1637. clobber_regs[reg] = 1;
  1638. }