MacroAssemblerX86_64.h 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659
  1. /*
  2. * Copyright (C) 2008, 2012 Apple Inc. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. * 1. Redistributions of source code must retain the above copyright
  8. * notice, this list of conditions and the following disclaimer.
  9. * 2. Redistributions in binary form must reproduce the above copyright
  10. * notice, this list of conditions and the following disclaimer in the
  11. * documentation and/or other materials provided with the distribution.
  12. *
  13. * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
  14. * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  15. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  16. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
  17. * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
  18. * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
  19. * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
  20. * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
  21. * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  22. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  23. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  24. */
  25. #ifndef MacroAssemblerX86_64_h
  26. #define MacroAssemblerX86_64_h
  27. #if ENABLE(ASSEMBLER) && CPU(X86_64)
  28. #include "MacroAssemblerX86Common.h"
  29. #define REPTACH_OFFSET_CALL_R11 3
  30. namespace JSC {
  31. class MacroAssemblerX86_64 : public MacroAssemblerX86Common {
  32. public:
  33. static const Scale ScalePtr = TimesEight;
  34. using MacroAssemblerX86Common::add32;
  35. using MacroAssemblerX86Common::and32;
  36. using MacroAssemblerX86Common::branchAdd32;
  37. using MacroAssemblerX86Common::or32;
  38. using MacroAssemblerX86Common::sub32;
  39. using MacroAssemblerX86Common::load32;
  40. using MacroAssemblerX86Common::store32;
  41. using MacroAssemblerX86Common::store8;
  42. using MacroAssemblerX86Common::call;
  43. using MacroAssemblerX86Common::jump;
  44. using MacroAssemblerX86Common::addDouble;
  45. using MacroAssemblerX86Common::loadDouble;
  46. using MacroAssemblerX86Common::convertInt32ToDouble;
  47. void add32(TrustedImm32 imm, AbsoluteAddress address)
  48. {
  49. move(TrustedImmPtr(address.m_ptr), scratchRegister);
  50. add32(imm, Address(scratchRegister));
  51. }
  52. void and32(TrustedImm32 imm, AbsoluteAddress address)
  53. {
  54. move(TrustedImmPtr(address.m_ptr), scratchRegister);
  55. and32(imm, Address(scratchRegister));
  56. }
  57. void add32(AbsoluteAddress address, RegisterID dest)
  58. {
  59. move(TrustedImmPtr(address.m_ptr), scratchRegister);
  60. add32(Address(scratchRegister), dest);
  61. }
  62. void or32(TrustedImm32 imm, AbsoluteAddress address)
  63. {
  64. move(TrustedImmPtr(address.m_ptr), scratchRegister);
  65. or32(imm, Address(scratchRegister));
  66. }
  67. void or32(RegisterID reg, AbsoluteAddress address)
  68. {
  69. move(TrustedImmPtr(address.m_ptr), scratchRegister);
  70. or32(reg, Address(scratchRegister));
  71. }
  72. void sub32(TrustedImm32 imm, AbsoluteAddress address)
  73. {
  74. move(TrustedImmPtr(address.m_ptr), scratchRegister);
  75. sub32(imm, Address(scratchRegister));
  76. }
  77. void load32(const void* address, RegisterID dest)
  78. {
  79. if (dest == X86Registers::eax)
  80. m_assembler.movl_mEAX(address);
  81. else {
  82. move(TrustedImmPtr(address), dest);
  83. load32(dest, dest);
  84. }
  85. }
  86. void addDouble(AbsoluteAddress address, FPRegisterID dest)
  87. {
  88. move(TrustedImmPtr(address.m_ptr), scratchRegister);
  89. m_assembler.addsd_mr(0, scratchRegister, dest);
  90. }
  91. void convertInt32ToDouble(TrustedImm32 imm, FPRegisterID dest)
  92. {
  93. move(imm, scratchRegister);
  94. m_assembler.cvtsi2sd_rr(scratchRegister, dest);
  95. }
  96. void store32(TrustedImm32 imm, void* address)
  97. {
  98. move(TrustedImmPtr(address), scratchRegister);
  99. store32(imm, scratchRegister);
  100. }
  101. void store8(TrustedImm32 imm, void* address)
  102. {
  103. move(TrustedImmPtr(address), scratchRegister);
  104. store8(imm, Address(scratchRegister));
  105. }
  106. Call call()
  107. {
  108. DataLabelPtr label = moveWithPatch(TrustedImmPtr(0), scratchRegister);
  109. Call result = Call(m_assembler.call(scratchRegister), Call::Linkable);
  110. ASSERT_UNUSED(label, differenceBetween(label, result) == REPTACH_OFFSET_CALL_R11);
  111. return result;
  112. }
  113. // Address is a memory location containing the address to jump to
  114. void jump(AbsoluteAddress address)
  115. {
  116. move(TrustedImmPtr(address.m_ptr), scratchRegister);
  117. jump(Address(scratchRegister));
  118. }
  119. Call tailRecursiveCall()
  120. {
  121. DataLabelPtr label = moveWithPatch(TrustedImmPtr(0), scratchRegister);
  122. Jump newJump = Jump(m_assembler.jmp_r(scratchRegister));
  123. ASSERT_UNUSED(label, differenceBetween(label, newJump) == REPTACH_OFFSET_CALL_R11);
  124. return Call::fromTailJump(newJump);
  125. }
  126. Call makeTailRecursiveCall(Jump oldJump)
  127. {
  128. oldJump.link(this);
  129. DataLabelPtr label = moveWithPatch(TrustedImmPtr(0), scratchRegister);
  130. Jump newJump = Jump(m_assembler.jmp_r(scratchRegister));
  131. ASSERT_UNUSED(label, differenceBetween(label, newJump) == REPTACH_OFFSET_CALL_R11);
  132. return Call::fromTailJump(newJump);
  133. }
  134. Jump branchAdd32(ResultCondition cond, TrustedImm32 src, AbsoluteAddress dest)
  135. {
  136. move(TrustedImmPtr(dest.m_ptr), scratchRegister);
  137. add32(src, Address(scratchRegister));
  138. return Jump(m_assembler.jCC(x86Condition(cond)));
  139. }
  140. void add64(RegisterID src, RegisterID dest)
  141. {
  142. m_assembler.addq_rr(src, dest);
  143. }
  144. void add64(Address src, RegisterID dest)
  145. {
  146. m_assembler.addq_mr(src.offset, src.base, dest);
  147. }
  148. void add64(AbsoluteAddress src, RegisterID dest)
  149. {
  150. move(TrustedImmPtr(src.m_ptr), scratchRegister);
  151. add64(Address(scratchRegister), dest);
  152. }
  153. void add64(TrustedImm32 imm, RegisterID srcDest)
  154. {
  155. m_assembler.addq_ir(imm.m_value, srcDest);
  156. }
  157. void add64(TrustedImm64 imm, RegisterID dest)
  158. {
  159. move(imm, scratchRegister);
  160. add64(scratchRegister, dest);
  161. }
  162. void add64(TrustedImm32 imm, RegisterID src, RegisterID dest)
  163. {
  164. m_assembler.leaq_mr(imm.m_value, src, dest);
  165. }
  166. void add64(TrustedImm32 imm, Address address)
  167. {
  168. m_assembler.addq_im(imm.m_value, address.offset, address.base);
  169. }
  170. void add64(TrustedImm32 imm, AbsoluteAddress address)
  171. {
  172. move(TrustedImmPtr(address.m_ptr), scratchRegister);
  173. add64(imm, Address(scratchRegister));
  174. }
  175. void and64(RegisterID src, RegisterID dest)
  176. {
  177. m_assembler.andq_rr(src, dest);
  178. }
  179. void and64(TrustedImm32 imm, RegisterID srcDest)
  180. {
  181. m_assembler.andq_ir(imm.m_value, srcDest);
  182. }
  183. void neg64(RegisterID dest)
  184. {
  185. m_assembler.negq_r(dest);
  186. }
  187. void or64(RegisterID src, RegisterID dest)
  188. {
  189. m_assembler.orq_rr(src, dest);
  190. }
  191. void or64(TrustedImm64 imm, RegisterID dest)
  192. {
  193. move(imm, scratchRegister);
  194. or64(scratchRegister, dest);
  195. }
  196. void or64(TrustedImm32 imm, RegisterID dest)
  197. {
  198. m_assembler.orq_ir(imm.m_value, dest);
  199. }
  200. void or64(RegisterID op1, RegisterID op2, RegisterID dest)
  201. {
  202. if (op1 == op2)
  203. move(op1, dest);
  204. else if (op1 == dest)
  205. or64(op2, dest);
  206. else {
  207. move(op2, dest);
  208. or64(op1, dest);
  209. }
  210. }
  211. void or64(TrustedImm32 imm, RegisterID src, RegisterID dest)
  212. {
  213. move(src, dest);
  214. or64(imm, dest);
  215. }
  216. void rotateRight64(TrustedImm32 imm, RegisterID srcDst)
  217. {
  218. m_assembler.rorq_i8r(imm.m_value, srcDst);
  219. }
  220. void sub64(RegisterID src, RegisterID dest)
  221. {
  222. m_assembler.subq_rr(src, dest);
  223. }
  224. void sub64(TrustedImm32 imm, RegisterID dest)
  225. {
  226. m_assembler.subq_ir(imm.m_value, dest);
  227. }
  228. void sub64(TrustedImm64 imm, RegisterID dest)
  229. {
  230. move(imm, scratchRegister);
  231. sub64(scratchRegister, dest);
  232. }
  233. void xor64(RegisterID src, RegisterID dest)
  234. {
  235. m_assembler.xorq_rr(src, dest);
  236. }
  237. void xor64(RegisterID src, Address dest)
  238. {
  239. m_assembler.xorq_rm(src, dest.offset, dest.base);
  240. }
  241. void xor64(TrustedImm32 imm, RegisterID srcDest)
  242. {
  243. m_assembler.xorq_ir(imm.m_value, srcDest);
  244. }
  245. void load64(ImplicitAddress address, RegisterID dest)
  246. {
  247. m_assembler.movq_mr(address.offset, address.base, dest);
  248. }
  249. void load64(BaseIndex address, RegisterID dest)
  250. {
  251. m_assembler.movq_mr(address.offset, address.base, address.index, address.scale, dest);
  252. }
  253. void load64(const void* address, RegisterID dest)
  254. {
  255. if (dest == X86Registers::eax)
  256. m_assembler.movq_mEAX(address);
  257. else {
  258. move(TrustedImmPtr(address), dest);
  259. load64(dest, dest);
  260. }
  261. }
  262. DataLabel32 load64WithAddressOffsetPatch(Address address, RegisterID dest)
  263. {
  264. padBeforePatch();
  265. m_assembler.movq_mr_disp32(address.offset, address.base, dest);
  266. return DataLabel32(this);
  267. }
  268. DataLabelCompact load64WithCompactAddressOffsetPatch(Address address, RegisterID dest)
  269. {
  270. padBeforePatch();
  271. m_assembler.movq_mr_disp8(address.offset, address.base, dest);
  272. return DataLabelCompact(this);
  273. }
  274. void store64(RegisterID src, ImplicitAddress address)
  275. {
  276. m_assembler.movq_rm(src, address.offset, address.base);
  277. }
  278. void store64(RegisterID src, BaseIndex address)
  279. {
  280. m_assembler.movq_rm(src, address.offset, address.base, address.index, address.scale);
  281. }
  282. void store64(RegisterID src, void* address)
  283. {
  284. if (src == X86Registers::eax)
  285. m_assembler.movq_EAXm(address);
  286. else {
  287. move(TrustedImmPtr(address), scratchRegister);
  288. store64(src, scratchRegister);
  289. }
  290. }
  291. void store64(TrustedImm64 imm, ImplicitAddress address)
  292. {
  293. move(imm, scratchRegister);
  294. store64(scratchRegister, address);
  295. }
  296. void store64(TrustedImm64 imm, BaseIndex address)
  297. {
  298. move(imm, scratchRegister);
  299. m_assembler.movq_rm(scratchRegister, address.offset, address.base, address.index, address.scale);
  300. }
  301. DataLabel32 store64WithAddressOffsetPatch(RegisterID src, Address address)
  302. {
  303. padBeforePatch();
  304. m_assembler.movq_rm_disp32(src, address.offset, address.base);
  305. return DataLabel32(this);
  306. }
  307. void move64ToDouble(RegisterID src, FPRegisterID dest)
  308. {
  309. m_assembler.movq_rr(src, dest);
  310. }
  311. void moveDoubleTo64(FPRegisterID src, RegisterID dest)
  312. {
  313. m_assembler.movq_rr(src, dest);
  314. }
  315. void compare64(RelationalCondition cond, RegisterID left, TrustedImm32 right, RegisterID dest)
  316. {
  317. if (((cond == Equal) || (cond == NotEqual)) && !right.m_value)
  318. m_assembler.testq_rr(left, left);
  319. else
  320. m_assembler.cmpq_ir(right.m_value, left);
  321. m_assembler.setCC_r(x86Condition(cond), dest);
  322. m_assembler.movzbl_rr(dest, dest);
  323. }
  324. void compare64(RelationalCondition cond, RegisterID left, RegisterID right, RegisterID dest)
  325. {
  326. m_assembler.cmpq_rr(right, left);
  327. m_assembler.setCC_r(x86Condition(cond), dest);
  328. m_assembler.movzbl_rr(dest, dest);
  329. }
  330. Jump branch64(RelationalCondition cond, RegisterID left, RegisterID right)
  331. {
  332. m_assembler.cmpq_rr(right, left);
  333. return Jump(m_assembler.jCC(x86Condition(cond)));
  334. }
  335. Jump branch64(RelationalCondition cond, RegisterID left, TrustedImm64 right)
  336. {
  337. if (((cond == Equal) || (cond == NotEqual)) && !right.m_value) {
  338. m_assembler.testq_rr(left, left);
  339. return Jump(m_assembler.jCC(x86Condition(cond)));
  340. }
  341. move(right, scratchRegister);
  342. return branch64(cond, left, scratchRegister);
  343. }
  344. Jump branch64(RelationalCondition cond, RegisterID left, Address right)
  345. {
  346. m_assembler.cmpq_mr(right.offset, right.base, left);
  347. return Jump(m_assembler.jCC(x86Condition(cond)));
  348. }
  349. Jump branch64(RelationalCondition cond, AbsoluteAddress left, RegisterID right)
  350. {
  351. move(TrustedImmPtr(left.m_ptr), scratchRegister);
  352. return branch64(cond, Address(scratchRegister), right);
  353. }
  354. Jump branch64(RelationalCondition cond, Address left, RegisterID right)
  355. {
  356. m_assembler.cmpq_rm(right, left.offset, left.base);
  357. return Jump(m_assembler.jCC(x86Condition(cond)));
  358. }
  359. Jump branch64(RelationalCondition cond, Address left, TrustedImm64 right)
  360. {
  361. move(right, scratchRegister);
  362. return branch64(cond, left, scratchRegister);
  363. }
  364. Jump branchTest64(ResultCondition cond, RegisterID reg, RegisterID mask)
  365. {
  366. m_assembler.testq_rr(reg, mask);
  367. return Jump(m_assembler.jCC(x86Condition(cond)));
  368. }
  369. Jump branchTest64(ResultCondition cond, RegisterID reg, TrustedImm32 mask = TrustedImm32(-1))
  370. {
  371. // if we are only interested in the low seven bits, this can be tested with a testb
  372. if (mask.m_value == -1)
  373. m_assembler.testq_rr(reg, reg);
  374. else if ((mask.m_value & ~0x7f) == 0)
  375. m_assembler.testb_i8r(mask.m_value, reg);
  376. else
  377. m_assembler.testq_i32r(mask.m_value, reg);
  378. return Jump(m_assembler.jCC(x86Condition(cond)));
  379. }
  380. void test64(ResultCondition cond, RegisterID reg, TrustedImm32 mask, RegisterID dest)
  381. {
  382. if (mask.m_value == -1)
  383. m_assembler.testq_rr(reg, reg);
  384. else if ((mask.m_value & ~0x7f) == 0)
  385. m_assembler.testb_i8r(mask.m_value, reg);
  386. else
  387. m_assembler.testq_i32r(mask.m_value, reg);
  388. set32(x86Condition(cond), dest);
  389. }
  390. void test64(ResultCondition cond, RegisterID reg, RegisterID mask, RegisterID dest)
  391. {
  392. m_assembler.testq_rr(reg, mask);
  393. set32(x86Condition(cond), dest);
  394. }
  395. Jump branchTest64(ResultCondition cond, AbsoluteAddress address, TrustedImm32 mask = TrustedImm32(-1))
  396. {
  397. load64(address.m_ptr, scratchRegister);
  398. return branchTest64(cond, scratchRegister, mask);
  399. }
  400. Jump branchTest64(ResultCondition cond, Address address, TrustedImm32 mask = TrustedImm32(-1))
  401. {
  402. if (mask.m_value == -1)
  403. m_assembler.cmpq_im(0, address.offset, address.base);
  404. else
  405. m_assembler.testq_i32m(mask.m_value, address.offset, address.base);
  406. return Jump(m_assembler.jCC(x86Condition(cond)));
  407. }
  408. Jump branchTest64(ResultCondition cond, Address address, RegisterID reg)
  409. {
  410. m_assembler.testq_rm(reg, address.offset, address.base);
  411. return Jump(m_assembler.jCC(x86Condition(cond)));
  412. }
  413. Jump branchTest64(ResultCondition cond, BaseIndex address, TrustedImm32 mask = TrustedImm32(-1))
  414. {
  415. if (mask.m_value == -1)
  416. m_assembler.cmpq_im(0, address.offset, address.base, address.index, address.scale);
  417. else
  418. m_assembler.testq_i32m(mask.m_value, address.offset, address.base, address.index, address.scale);
  419. return Jump(m_assembler.jCC(x86Condition(cond)));
  420. }
  421. Jump branchAdd64(ResultCondition cond, TrustedImm32 imm, RegisterID dest)
  422. {
  423. add64(imm, dest);
  424. return Jump(m_assembler.jCC(x86Condition(cond)));
  425. }
  426. Jump branchAdd64(ResultCondition cond, RegisterID src, RegisterID dest)
  427. {
  428. add64(src, dest);
  429. return Jump(m_assembler.jCC(x86Condition(cond)));
  430. }
  431. Jump branchSub64(ResultCondition cond, TrustedImm32 imm, RegisterID dest)
  432. {
  433. sub64(imm, dest);
  434. return Jump(m_assembler.jCC(x86Condition(cond)));
  435. }
  436. Jump branchSub64(ResultCondition cond, RegisterID src, RegisterID dest)
  437. {
  438. sub64(src, dest);
  439. return Jump(m_assembler.jCC(x86Condition(cond)));
  440. }
  441. Jump branchSub64(ResultCondition cond, RegisterID src1, TrustedImm32 src2, RegisterID dest)
  442. {
  443. move(src1, dest);
  444. return branchSub64(cond, src2, dest);
  445. }
  446. ConvertibleLoadLabel convertibleLoadPtr(Address address, RegisterID dest)
  447. {
  448. ConvertibleLoadLabel result = ConvertibleLoadLabel(this);
  449. m_assembler.movq_mr(address.offset, address.base, dest);
  450. return result;
  451. }
  452. DataLabelPtr moveWithPatch(TrustedImmPtr initialValue, RegisterID dest)
  453. {
  454. padBeforePatch();
  455. m_assembler.movq_i64r(initialValue.asIntptr(), dest);
  456. return DataLabelPtr(this);
  457. }
  458. Jump branchPtrWithPatch(RelationalCondition cond, RegisterID left, DataLabelPtr& dataLabel, TrustedImmPtr initialRightValue = TrustedImmPtr(0))
  459. {
  460. dataLabel = moveWithPatch(initialRightValue, scratchRegister);
  461. return branch64(cond, left, scratchRegister);
  462. }
  463. Jump branchPtrWithPatch(RelationalCondition cond, Address left, DataLabelPtr& dataLabel, TrustedImmPtr initialRightValue = TrustedImmPtr(0))
  464. {
  465. dataLabel = moveWithPatch(initialRightValue, scratchRegister);
  466. return branch64(cond, left, scratchRegister);
  467. }
  468. DataLabelPtr storePtrWithPatch(TrustedImmPtr initialValue, ImplicitAddress address)
  469. {
  470. DataLabelPtr label = moveWithPatch(initialValue, scratchRegister);
  471. store64(scratchRegister, address);
  472. return label;
  473. }
  474. using MacroAssemblerX86Common::branchTest8;
  475. Jump branchTest8(ResultCondition cond, ExtendedAddress address, TrustedImm32 mask = TrustedImm32(-1))
  476. {
  477. TrustedImmPtr addr(reinterpret_cast<void*>(address.offset));
  478. MacroAssemblerX86Common::move(addr, scratchRegister);
  479. return MacroAssemblerX86Common::branchTest8(cond, BaseIndex(scratchRegister, address.base, TimesOne), mask);
  480. }
  481. Jump branchTest8(ResultCondition cond, AbsoluteAddress address, TrustedImm32 mask = TrustedImm32(-1))
  482. {
  483. MacroAssemblerX86Common::move(TrustedImmPtr(address.m_ptr), scratchRegister);
  484. return MacroAssemblerX86Common::branchTest8(cond, Address(scratchRegister), mask);
  485. }
  486. static bool supportsFloatingPoint() { return true; }
  487. // See comment on MacroAssemblerARMv7::supportsFloatingPointTruncate()
  488. static bool supportsFloatingPointTruncate() { return true; }
  489. static bool supportsFloatingPointSqrt() { return true; }
  490. static bool supportsFloatingPointAbs() { return true; }
  491. static FunctionPtr readCallTarget(CodeLocationCall call)
  492. {
  493. return FunctionPtr(X86Assembler::readPointer(call.dataLabelPtrAtOffset(-REPTACH_OFFSET_CALL_R11).dataLocation()));
  494. }
  495. static RegisterID scratchRegisterForBlinding() { return scratchRegister; }
  496. static bool canJumpReplacePatchableBranchPtrWithPatch() { return true; }
  497. static CodeLocationLabel startOfBranchPtrWithPatchOnRegister(CodeLocationDataLabelPtr label)
  498. {
  499. const int rexBytes = 1;
  500. const int opcodeBytes = 1;
  501. const int immediateBytes = 8;
  502. const int totalBytes = rexBytes + opcodeBytes + immediateBytes;
  503. ASSERT(totalBytes >= maxJumpReplacementSize());
  504. return label.labelAtOffset(-totalBytes);
  505. }
  506. static CodeLocationLabel startOfPatchableBranchPtrWithPatchOnAddress(CodeLocationDataLabelPtr label)
  507. {
  508. return startOfBranchPtrWithPatchOnRegister(label);
  509. }
  510. #if ENABLE(DETACHED_JIT) && !BUILDING_DETACHED_JIT
  511. static void revertJumpReplacementToPatchableBranchPtrWithPatch(CodeLocationLabel instructionStart, Address, void* initialValue);
  512. static void revertJumpReplacementToBranchPtrWithPatch(CodeLocationLabel instructionStart, RegisterID, void* initialValue);
  513. #else
  514. static void revertJumpReplacementToPatchableBranchPtrWithPatch(CodeLocationLabel instructionStart, Address, void* initialValue)
  515. {
  516. X86Assembler::revertJumpTo_movq_i64r(instructionStart.executableAddress(), reinterpret_cast<intptr_t>(initialValue), scratchRegister);
  517. }
  518. static void revertJumpReplacementToBranchPtrWithPatch(CodeLocationLabel instructionStart, RegisterID, void* initialValue)
  519. {
  520. X86Assembler::revertJumpTo_movq_i64r(instructionStart.executableAddress(), reinterpret_cast<intptr_t>(initialValue), scratchRegister);
  521. }
  522. #endif
  523. private:
  524. friend class LinkBuffer;
  525. friend class RepatchBuffer;
  526. #if ENABLE(DETACHED_JIT) && BUILDING_DETACHED_JIT
  527. friend void JSCBridge::performCompilerTask();
  528. #endif
  529. static void linkCall(void* code, Call call, FunctionPtr function)
  530. {
  531. if (!call.isFlagSet(Call::Near))
  532. X86Assembler::linkPointer(code, call.m_label.labelAtOffset(-REPTACH_OFFSET_CALL_R11), function.value());
  533. else
  534. X86Assembler::linkCall(code, call.m_label, function.value());
  535. }
  536. #if ENABLE(DETACHED_JIT) && !BUILDING_DETACHED_JIT
  537. static void repatchCall(CodeLocationCall call, CodeLocationLabel destination);
  538. static void repatchCall(CodeLocationCall call, FunctionPtr destination);
  539. #else
  540. static void repatchCall(CodeLocationCall call, CodeLocationLabel destination)
  541. {
  542. X86Assembler::repatchPointer(call.dataLabelPtrAtOffset(-REPTACH_OFFSET_CALL_R11).dataLocation(), destination.executableAddress());
  543. }
  544. static void repatchCall(CodeLocationCall call, FunctionPtr destination)
  545. {
  546. X86Assembler::repatchPointer(call.dataLabelPtrAtOffset(-REPTACH_OFFSET_CALL_R11).dataLocation(), destination.executableAddress());
  547. }
  548. #endif
  549. };
  550. } // namespace JSC
  551. #endif // ENABLE(ASSEMBLER)
  552. #endif // MacroAssemblerX86_64_h