AbstractMacroAssembler.h 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881
  1. /*
  2. * Copyright (C) 2008, 2012 Apple Inc. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. * 1. Redistributions of source code must retain the above copyright
  8. * notice, this list of conditions and the following disclaimer.
  9. * 2. Redistributions in binary form must reproduce the above copyright
  10. * notice, this list of conditions and the following disclaimer in the
  11. * documentation and/or other materials provided with the distribution.
  12. *
  13. * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
  14. * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  15. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  16. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
  17. * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
  18. * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
  19. * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
  20. * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
  21. * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  22. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  23. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  24. */
  25. #ifndef AbstractMacroAssembler_h
  26. #define AbstractMacroAssembler_h
  27. #include "AssemblerBuffer.h"
  28. #include "CodeLocation.h"
  29. #include "MacroAssemblerCodeRef.h"
  30. #include <wtf/CryptographicallyRandomNumber.h>
  31. #include <wtf/Noncopyable.h>
  32. #if ENABLE(DETACHED_JIT) && BUILDING_DETACHED_JIT
  33. #include <JITBridge.h>
  34. #endif
  35. #if ENABLE(ASSEMBLER)
  36. #if PLATFORM(QT)
  37. #define ENABLE_JIT_CONSTANT_BLINDING 0
  38. #endif
  39. #ifndef ENABLE_JIT_CONSTANT_BLINDING
  40. #define ENABLE_JIT_CONSTANT_BLINDING 1
  41. #endif
  42. namespace JSC {
  43. inline bool isARMv7s()
  44. {
  45. #if CPU(APPLE_ARMV7S)
  46. return true;
  47. #else
  48. return false;
  49. #endif
  50. }
  51. inline bool isX86()
  52. {
  53. #if CPU(X86_64) || CPU(X86)
  54. return true;
  55. #else
  56. return false;
  57. #endif
  58. }
  59. class JumpReplacementWatchpoint;
  60. class LinkBuffer;
  61. class RepatchBuffer;
  62. class Watchpoint;
  63. namespace DFG {
  64. struct OSRExit;
  65. }
  66. template <class AssemblerType>
  67. class AbstractMacroAssembler {
  68. public:
  69. friend class JITWriteBarrierBase;
  70. typedef AssemblerType AssemblerType_T;
  71. typedef MacroAssemblerCodePtr CodePtr;
  72. typedef MacroAssemblerCodeRef CodeRef;
  73. class Jump;
  74. typedef typename AssemblerType::RegisterID RegisterID;
  75. // Section 1: MacroAssembler operand types
  76. //
  77. // The following types are used as operands to MacroAssembler operations,
  78. // describing immediate and memory operands to the instructions to be planted.
  79. enum Scale {
  80. TimesOne,
  81. TimesTwo,
  82. TimesFour,
  83. TimesEight,
  84. };
  85. // Address:
  86. //
  87. // Describes a simple base-offset address.
  88. struct Address {
  89. explicit Address(RegisterID base, int32_t offset = 0)
  90. : base(base)
  91. , offset(offset)
  92. {
  93. }
  94. RegisterID base;
  95. int32_t offset;
  96. };
  97. struct ExtendedAddress {
  98. explicit ExtendedAddress(RegisterID base, intptr_t offset = 0)
  99. : base(base)
  100. , offset(offset)
  101. {
  102. }
  103. RegisterID base;
  104. intptr_t offset;
  105. };
  106. // ImplicitAddress:
  107. //
  108. // This class is used for explicit 'load' and 'store' operations
  109. // (as opposed to situations in which a memory operand is provided
  110. // to a generic operation, such as an integer arithmetic instruction).
  111. //
  112. // In the case of a load (or store) operation we want to permit
  113. // addresses to be implicitly constructed, e.g. the two calls:
  114. //
  115. // load32(Address(addrReg), destReg);
  116. // load32(addrReg, destReg);
  117. //
  118. // Are equivalent, and the explicit wrapping of the Address in the former
  119. // is unnecessary.
  120. struct ImplicitAddress {
  121. ImplicitAddress(RegisterID base)
  122. : base(base)
  123. , offset(0)
  124. {
  125. }
  126. ImplicitAddress(Address address)
  127. : base(address.base)
  128. , offset(address.offset)
  129. {
  130. }
  131. RegisterID base;
  132. int32_t offset;
  133. };
  134. // BaseIndex:
  135. //
  136. // Describes a complex addressing mode.
  137. struct BaseIndex {
  138. BaseIndex(RegisterID base, RegisterID index, Scale scale, int32_t offset = 0)
  139. : base(base)
  140. , index(index)
  141. , scale(scale)
  142. , offset(offset)
  143. {
  144. }
  145. RegisterID base;
  146. RegisterID index;
  147. Scale scale;
  148. int32_t offset;
  149. };
  150. // AbsoluteAddress:
  151. //
  152. // Describes an memory operand given by a pointer. For regular load & store
  153. // operations an unwrapped void* will be used, rather than using this.
  154. struct AbsoluteAddress {
  155. explicit AbsoluteAddress(const void* ptr)
  156. : m_ptr(ptr)
  157. {
  158. }
  159. const void* m_ptr;
  160. };
  161. // TrustedImmPtr:
  162. //
  163. // A pointer sized immediate operand to an instruction - this is wrapped
  164. // in a class requiring explicit construction in order to differentiate
  165. // from pointers used as absolute addresses to memory operations
  166. struct TrustedImmPtr {
  167. TrustedImmPtr() { }
  168. explicit TrustedImmPtr(const void* value)
  169. : m_value(value)
  170. {
  171. }
  172. // This is only here so that TrustedImmPtr(0) does not confuse the C++
  173. // overload handling rules.
  174. explicit TrustedImmPtr(int value)
  175. : m_value(0)
  176. {
  177. ASSERT_UNUSED(value, !value);
  178. }
  179. explicit TrustedImmPtr(size_t value)
  180. : m_value(reinterpret_cast<void*>(value))
  181. {
  182. }
  183. intptr_t asIntptr()
  184. {
  185. return reinterpret_cast<intptr_t>(m_value);
  186. }
  187. const void* m_value;
  188. };
  189. struct ImmPtr :
  190. #if ENABLE(JIT_CONSTANT_BLINDING)
  191. private TrustedImmPtr
  192. #else
  193. public TrustedImmPtr
  194. #endif
  195. {
  196. explicit ImmPtr(const void* value)
  197. : TrustedImmPtr(value)
  198. {
  199. }
  200. TrustedImmPtr asTrustedImmPtr() { return *this; }
  201. };
  202. // TrustedImm32:
  203. //
  204. // A 32bit immediate operand to an instruction - this is wrapped in a
  205. // class requiring explicit construction in order to prevent RegisterIDs
  206. // (which are implemented as an enum) from accidentally being passed as
  207. // immediate values.
  208. struct TrustedImm32 {
  209. TrustedImm32() { }
  210. explicit TrustedImm32(int32_t value)
  211. : m_value(value)
  212. {
  213. }
  214. #if !CPU(X86_64)
  215. explicit TrustedImm32(TrustedImmPtr ptr)
  216. : m_value(ptr.asIntptr())
  217. {
  218. }
  219. #endif
  220. int32_t m_value;
  221. };
  222. struct Imm32 :
  223. #if ENABLE(JIT_CONSTANT_BLINDING)
  224. private TrustedImm32
  225. #else
  226. public TrustedImm32
  227. #endif
  228. {
  229. explicit Imm32(int32_t value)
  230. : TrustedImm32(value)
  231. {
  232. }
  233. #if !CPU(X86_64)
  234. explicit Imm32(TrustedImmPtr ptr)
  235. : TrustedImm32(ptr)
  236. {
  237. }
  238. #endif
  239. const TrustedImm32& asTrustedImm32() const { return *this; }
  240. };
  241. // TrustedImm64:
  242. //
  243. // A 64bit immediate operand to an instruction - this is wrapped in a
  244. // class requiring explicit construction in order to prevent RegisterIDs
  245. // (which are implemented as an enum) from accidentally being passed as
  246. // immediate values.
  247. struct TrustedImm64 {
  248. TrustedImm64() { }
  249. explicit TrustedImm64(int64_t value)
  250. : m_value(value)
  251. {
  252. }
  253. #if CPU(X86_64)
  254. explicit TrustedImm64(TrustedImmPtr ptr)
  255. : m_value(ptr.asIntptr())
  256. {
  257. }
  258. #endif
  259. int64_t m_value;
  260. };
  261. struct Imm64 :
  262. #if ENABLE(JIT_CONSTANT_BLINDING)
  263. private TrustedImm64
  264. #else
  265. public TrustedImm64
  266. #endif
  267. {
  268. explicit Imm64(int64_t value)
  269. : TrustedImm64(value)
  270. {
  271. }
  272. #if CPU(X86_64)
  273. explicit Imm64(TrustedImmPtr ptr)
  274. : TrustedImm64(ptr)
  275. {
  276. }
  277. #endif
  278. const TrustedImm64& asTrustedImm64() const { return *this; }
  279. };
  280. // Section 2: MacroAssembler code buffer handles
  281. //
  282. // The following types are used to reference items in the code buffer
  283. // during JIT code generation. For example, the type Jump is used to
  284. // track the location of a jump instruction so that it may later be
  285. // linked to a label marking its destination.
  286. // Label:
  287. //
  288. // A Label records a point in the generated instruction stream, typically such that
  289. // it may be used as a destination for a jump.
  290. class Label {
  291. template<class TemplateAssemblerType>
  292. friend class AbstractMacroAssembler;
  293. friend struct DFG::OSRExit;
  294. friend class Jump;
  295. friend class JumpReplacementWatchpoint;
  296. friend class MacroAssemblerCodeRef;
  297. friend class LinkBuffer;
  298. friend class Watchpoint;
  299. public:
  300. Label()
  301. {
  302. }
  303. Label(AbstractMacroAssembler<AssemblerType>* masm)
  304. : m_label(masm->m_assembler.label())
  305. {
  306. }
  307. bool isSet() const { return m_label.isSet(); }
  308. private:
  309. AssemblerLabel m_label;
  310. };
  311. // ConvertibleLoadLabel:
  312. //
  313. // A ConvertibleLoadLabel records a loadPtr instruction that can be patched to an addPtr
  314. // so that:
  315. //
  316. // loadPtr(Address(a, i), b)
  317. //
  318. // becomes:
  319. //
  320. // addPtr(TrustedImmPtr(i), a, b)
  321. class ConvertibleLoadLabel {
  322. template<class TemplateAssemblerType>
  323. friend class AbstractMacroAssembler;
  324. friend class LinkBuffer;
  325. public:
  326. ConvertibleLoadLabel()
  327. {
  328. }
  329. ConvertibleLoadLabel(AbstractMacroAssembler<AssemblerType>* masm)
  330. : m_label(masm->m_assembler.labelIgnoringWatchpoints())
  331. {
  332. }
  333. bool isSet() const { return m_label.isSet(); }
  334. private:
  335. AssemblerLabel m_label;
  336. };
  337. // DataLabelPtr:
  338. //
  339. // A DataLabelPtr is used to refer to a location in the code containing a pointer to be
  340. // patched after the code has been generated.
  341. class DataLabelPtr {
  342. template<class TemplateAssemblerType>
  343. friend class AbstractMacroAssembler;
  344. friend class LinkBuffer;
  345. public:
  346. DataLabelPtr()
  347. {
  348. }
  349. DataLabelPtr(AbstractMacroAssembler<AssemblerType>* masm)
  350. : m_label(masm->m_assembler.label())
  351. {
  352. }
  353. bool isSet() const { return m_label.isSet(); }
  354. private:
  355. AssemblerLabel m_label;
  356. };
  357. // DataLabel32:
  358. //
  359. // A DataLabelPtr is used to refer to a location in the code containing a pointer to be
  360. // patched after the code has been generated.
  361. class DataLabel32 {
  362. template<class TemplateAssemblerType>
  363. friend class AbstractMacroAssembler;
  364. friend class LinkBuffer;
  365. public:
  366. DataLabel32()
  367. {
  368. }
  369. DataLabel32(AbstractMacroAssembler<AssemblerType>* masm)
  370. : m_label(masm->m_assembler.label())
  371. {
  372. }
  373. AssemblerLabel label() const { return m_label; }
  374. private:
  375. AssemblerLabel m_label;
  376. };
  377. // DataLabelCompact:
  378. //
  379. // A DataLabelCompact is used to refer to a location in the code containing a
  380. // compact immediate to be patched after the code has been generated.
  381. class DataLabelCompact {
  382. template<class TemplateAssemblerType>
  383. friend class AbstractMacroAssembler;
  384. friend class LinkBuffer;
  385. public:
  386. DataLabelCompact()
  387. {
  388. }
  389. DataLabelCompact(AbstractMacroAssembler<AssemblerType>* masm)
  390. : m_label(masm->m_assembler.label())
  391. {
  392. }
  393. DataLabelCompact(AssemblerLabel label)
  394. : m_label(label)
  395. {
  396. }
  397. private:
  398. AssemblerLabel m_label;
  399. };
  400. // Call:
  401. //
  402. // A Call object is a reference to a call instruction that has been planted
  403. // into the code buffer - it is typically used to link the call, setting the
  404. // relative offset such that when executed it will call to the desired
  405. // destination.
  406. class Call {
  407. template<class TemplateAssemblerType>
  408. friend class AbstractMacroAssembler;
  409. public:
  410. enum Flags {
  411. None = 0x0,
  412. Linkable = 0x1,
  413. Near = 0x2,
  414. LinkableNear = 0x3,
  415. };
  416. Call()
  417. : m_flags(None)
  418. {
  419. }
  420. Call(AssemblerLabel jmp, Flags flags)
  421. : m_label(jmp)
  422. , m_flags(flags)
  423. {
  424. }
  425. bool isFlagSet(Flags flag)
  426. {
  427. return m_flags & flag;
  428. }
  429. static Call fromTailJump(Jump jump)
  430. {
  431. return Call(jump.m_label, Linkable);
  432. }
  433. AssemblerLabel m_label;
  434. private:
  435. Flags m_flags;
  436. };
  437. // Jump:
  438. //
  439. // A jump object is a reference to a jump instruction that has been planted
  440. // into the code buffer - it is typically used to link the jump, setting the
  441. // relative offset such that when executed it will jump to the desired
  442. // destination.
  443. class Jump {
  444. template<class TemplateAssemblerType>
  445. friend class AbstractMacroAssembler;
  446. friend class Call;
  447. friend struct DFG::OSRExit;
  448. friend class LinkBuffer;
  449. public:
  450. Jump()
  451. {
  452. }
  453. #if CPU(ARM_THUMB2)
  454. // Fixme: this information should be stored in the instruction stream, not in the Jump object.
  455. Jump(AssemblerLabel jmp, ARMv7Assembler::JumpType type = ARMv7Assembler::JumpNoCondition, ARMv7Assembler::Condition condition = ARMv7Assembler::ConditionInvalid)
  456. : m_label(jmp)
  457. , m_type(type)
  458. , m_condition(condition)
  459. {
  460. }
  461. #elif CPU(SH4)
  462. Jump(AssemblerLabel jmp, SH4Assembler::JumpType type = SH4Assembler::JumpFar)
  463. : m_label(jmp)
  464. , m_type(type)
  465. {
  466. }
  467. #else
  468. Jump(AssemblerLabel jmp)
  469. : m_label(jmp)
  470. {
  471. }
  472. #endif
  473. Label label() const
  474. {
  475. Label result;
  476. result.m_label = m_label;
  477. return result;
  478. }
  479. void link(AbstractMacroAssembler<AssemblerType>* masm) const
  480. {
  481. #if ENABLE(DFG_REGISTER_ALLOCATION_VALIDATION)
  482. masm->checkRegisterAllocationAgainstBranchRange(m_label.m_offset, masm->debugOffset());
  483. #endif
  484. #if CPU(ARM_THUMB2)
  485. masm->m_assembler.linkJump(m_label, masm->m_assembler.label(), m_type, m_condition);
  486. #elif CPU(SH4)
  487. masm->m_assembler.linkJump(m_label, masm->m_assembler.label(), m_type);
  488. #else
  489. masm->m_assembler.linkJump(m_label, masm->m_assembler.label());
  490. #endif
  491. }
  492. void linkTo(Label label, AbstractMacroAssembler<AssemblerType>* masm) const
  493. {
  494. #if ENABLE(DFG_REGISTER_ALLOCATION_VALIDATION)
  495. masm->checkRegisterAllocationAgainstBranchRange(label.m_label.m_offset, m_label.m_offset);
  496. #endif
  497. #if CPU(ARM_THUMB2)
  498. masm->m_assembler.linkJump(m_label, label.m_label, m_type, m_condition);
  499. #else
  500. masm->m_assembler.linkJump(m_label, label.m_label);
  501. #endif
  502. }
  503. bool isSet() const { return m_label.isSet(); }
  504. private:
  505. AssemblerLabel m_label;
  506. #if CPU(ARM_THUMB2)
  507. ARMv7Assembler::JumpType m_type;
  508. ARMv7Assembler::Condition m_condition;
  509. #endif
  510. #if CPU(SH4)
  511. SH4Assembler::JumpType m_type;
  512. #endif
  513. };
  514. struct PatchableJump {
  515. PatchableJump()
  516. {
  517. }
  518. explicit PatchableJump(Jump jump)
  519. : m_jump(jump)
  520. {
  521. }
  522. operator Jump&() { return m_jump; }
  523. Jump m_jump;
  524. };
  525. // JumpList:
  526. //
  527. // A JumpList is a set of Jump objects.
  528. // All jumps in the set will be linked to the same destination.
  529. class JumpList {
  530. friend class LinkBuffer;
  531. public:
  532. typedef Vector<Jump, 2> JumpVector;
  533. JumpList() { }
  534. JumpList(Jump jump)
  535. {
  536. append(jump);
  537. }
  538. void link(AbstractMacroAssembler<AssemblerType>* masm)
  539. {
  540. size_t size = m_jumps.size();
  541. for (size_t i = 0; i < size; ++i)
  542. m_jumps[i].link(masm);
  543. m_jumps.clear();
  544. }
  545. void linkTo(Label label, AbstractMacroAssembler<AssemblerType>* masm)
  546. {
  547. size_t size = m_jumps.size();
  548. for (size_t i = 0; i < size; ++i)
  549. m_jumps[i].linkTo(label, masm);
  550. m_jumps.clear();
  551. }
  552. void append(Jump jump)
  553. {
  554. m_jumps.append(jump);
  555. }
  556. void append(const JumpList& other)
  557. {
  558. m_jumps.append(other.m_jumps.begin(), other.m_jumps.size());
  559. }
  560. bool empty()
  561. {
  562. return !m_jumps.size();
  563. }
  564. void clear()
  565. {
  566. m_jumps.clear();
  567. }
  568. const JumpVector& jumps() const { return m_jumps; }
  569. private:
  570. JumpVector m_jumps;
  571. };
  572. // Section 3: Misc admin methods
  573. #if ENABLE(DFG_JIT)
  574. Label labelIgnoringWatchpoints()
  575. {
  576. Label result;
  577. result.m_label = m_assembler.labelIgnoringWatchpoints();
  578. return result;
  579. }
  580. #else
  581. Label labelIgnoringWatchpoints()
  582. {
  583. return label();
  584. }
  585. #endif
  586. Label label()
  587. {
  588. return Label(this);
  589. }
  590. void padBeforePatch()
  591. {
  592. // Rely on the fact that asking for a label already does the padding.
  593. (void)label();
  594. }
  595. Label watchpointLabel()
  596. {
  597. Label result;
  598. result.m_label = m_assembler.labelForWatchpoint();
  599. return result;
  600. }
  601. Label align()
  602. {
  603. m_assembler.align(16);
  604. return Label(this);
  605. }
  606. #if ENABLE(DFG_REGISTER_ALLOCATION_VALIDATION)
  607. class RegisterAllocationOffset {
  608. public:
  609. RegisterAllocationOffset(unsigned offset)
  610. : m_offset(offset)
  611. {
  612. }
  613. void check(unsigned low, unsigned high)
  614. {
  615. RELEASE_ASSERT_WITH_MESSAGE(!(low <= m_offset && m_offset <= high), "Unsafe branch over register allocation at instruction offset %u in jump offset range %u..%u", m_offset, low, high);
  616. }
  617. private:
  618. unsigned m_offset;
  619. };
  620. void addRegisterAllocationAtOffset(unsigned offset)
  621. {
  622. m_registerAllocationForOffsets.append(RegisterAllocationOffset(offset));
  623. }
  624. void clearRegisterAllocationOffsets()
  625. {
  626. m_registerAllocationForOffsets.clear();
  627. }
  628. void checkRegisterAllocationAgainstBranchRange(unsigned offset1, unsigned offset2)
  629. {
  630. if (offset1 > offset2)
  631. std::swap(offset1, offset2);
  632. size_t size = m_registerAllocationForOffsets.size();
  633. for (size_t i = 0; i < size; ++i)
  634. m_registerAllocationForOffsets[i].check(offset1, offset2);
  635. }
  636. #endif
  637. template<typename T, typename U>
  638. static ptrdiff_t differenceBetween(T from, U to)
  639. {
  640. return AssemblerType::getDifferenceBetweenLabels(from.m_label, to.m_label);
  641. }
  642. static ptrdiff_t differenceBetweenCodePtr(const MacroAssemblerCodePtr& a, const MacroAssemblerCodePtr& b)
  643. {
  644. return reinterpret_cast<ptrdiff_t>(b.executableAddress()) - reinterpret_cast<ptrdiff_t>(a.executableAddress());
  645. }
  646. unsigned debugOffset() { return m_assembler.debugOffset(); }
  647. ALWAYS_INLINE static void cacheFlush(void* code, size_t size)
  648. {
  649. AssemblerType::cacheFlush(code, size);
  650. }
  651. protected:
  652. AbstractMacroAssembler()
  653. : m_randomSource(cryptographicallyRandomNumber())
  654. {
  655. }
  656. AssemblerType m_assembler;
  657. uint32_t random()
  658. {
  659. return m_randomSource.getUint32();
  660. }
  661. WeakRandom m_randomSource;
  662. #if ENABLE(DFG_REGISTER_ALLOCATION_VALIDATION)
  663. Vector<RegisterAllocationOffset, 10> m_registerAllocationForOffsets;
  664. #endif
  665. #if ENABLE(JIT_CONSTANT_BLINDING)
  666. static bool scratchRegisterForBlinding() { return false; }
  667. static bool shouldBlindForSpecificArch(uint32_t) { return true; }
  668. static bool shouldBlindForSpecificArch(uint64_t) { return true; }
  669. #endif
  670. friend class LinkBuffer;
  671. friend class RepatchBuffer;
  672. #if ENABLE(DETACHED_JIT) && !BUILDING_DETACHED_JIT
  673. static void linkJump(void* code, Jump jump, CodeLocationLabel target);
  674. static void linkPointer(void* code, AssemblerLabel label, void* value);
  675. static void repatchJump(CodeLocationJump jump, CodeLocationLabel destination);
  676. static void repatchNearCall(CodeLocationNearCall nearCall, CodeLocationLabel destination);
  677. static void repatchCompact(CodeLocationDataLabelCompact dataLabelCompact, int32_t value);
  678. static void repatchInt32(CodeLocationDataLabel32 dataLabel32, int32_t value);
  679. static void repatchPointer(CodeLocationDataLabelPtr dataLabelPtr, void* value);
  680. static void replaceWithLoad(CodeLocationConvertibleLoad label);
  681. static void replaceWithAddressComputation(CodeLocationConvertibleLoad label);
  682. #else
  683. static void linkJump(void* code, Jump jump, CodeLocationLabel target)
  684. {
  685. AssemblerType::linkJump(code, jump.m_label, target.dataLocation());
  686. }
  687. static void linkPointer(void* code, AssemblerLabel label, void* value)
  688. {
  689. AssemblerType::linkPointer(code, label, value);
  690. }
  691. static void repatchJump(CodeLocationJump jump, CodeLocationLabel destination)
  692. {
  693. AssemblerType::relinkJump(jump.dataLocation(), destination.dataLocation());
  694. }
  695. static void repatchNearCall(CodeLocationNearCall nearCall, CodeLocationLabel destination)
  696. {
  697. AssemblerType::relinkCall(nearCall.dataLocation(), destination.executableAddress());
  698. }
  699. static void repatchCompact(CodeLocationDataLabelCompact dataLabelCompact, int32_t value)
  700. {
  701. AssemblerType::repatchCompact(dataLabelCompact.dataLocation(), value);
  702. }
  703. static void repatchInt32(CodeLocationDataLabel32 dataLabel32, int32_t value)
  704. {
  705. AssemblerType::repatchInt32(dataLabel32.dataLocation(), value);
  706. }
  707. static void repatchPointer(CodeLocationDataLabelPtr dataLabelPtr, void* value)
  708. {
  709. AssemblerType::repatchPointer(dataLabelPtr.dataLocation(), value);
  710. }
  711. static void replaceWithLoad(CodeLocationConvertibleLoad label)
  712. {
  713. AssemblerType::replaceWithLoad(label.dataLocation());
  714. }
  715. static void replaceWithAddressComputation(CodeLocationConvertibleLoad label)
  716. {
  717. AssemblerType::replaceWithAddressComputation(label.dataLocation());
  718. }
  719. #endif
  720. static void* getLinkerAddress(void* code, AssemblerLabel label)
  721. {
  722. return AssemblerType::getRelocatedAddress(code, label);
  723. }
  724. static unsigned getLinkerCallReturnOffset(Call call)
  725. {
  726. return AssemblerType::getCallReturnOffset(call.m_label);
  727. }
  728. static void* readPointer(CodeLocationDataLabelPtr dataLabelPtr)
  729. {
  730. return AssemblerType::readPointer(dataLabelPtr.dataLocation());
  731. }
  732. };
  733. } // namespace JSC
  734. #endif // ENABLE(ASSEMBLER)
  735. #endif // AbstractMacroAssembler_h