GetByIdStatus.cpp 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295
  1. /*
  2. * Copyright (C) 2012 Apple Inc. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. * 1. Redistributions of source code must retain the above copyright
  8. * notice, this list of conditions and the following disclaimer.
  9. * 2. Redistributions in binary form must reproduce the above copyright
  10. * notice, this list of conditions and the following disclaimer in the
  11. * documentation and/or other materials provided with the distribution.
  12. *
  13. * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
  14. * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  15. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  16. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
  17. * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
  18. * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
  19. * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
  20. * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
  21. * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  22. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  23. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  24. */
  25. #include "config.h"
  26. #include "GetByIdStatus.h"
  27. #include "CodeBlock.h"
  28. #include "JSScope.h"
  29. #include "LLIntData.h"
  30. #include "LowLevelInterpreter.h"
  31. #include "Operations.h"
  32. namespace JSC {
  33. #if !(ENABLE(DETACHED_JIT) && BUILDING_DETACHED_JIT)
  34. GetByIdStatus GetByIdStatus::computeFromLLInt(CodeBlock* profiledBlock, unsigned bytecodeIndex, Identifier& ident)
  35. {
  36. UNUSED_PARAM(profiledBlock);
  37. UNUSED_PARAM(bytecodeIndex);
  38. UNUSED_PARAM(ident);
  39. #if ENABLE(LLINT)
  40. Instruction* instruction = profiledBlock->instructions().begin() + bytecodeIndex;
  41. if (instruction[0].u.opcode == LLInt::getOpcode(llint_op_get_array_length))
  42. return GetByIdStatus(NoInformation, false);
  43. Structure* structure = instruction[4].u.structure.get();
  44. if (!structure)
  45. return GetByIdStatus(NoInformation, false);
  46. unsigned attributesIgnored;
  47. JSCell* specificValue;
  48. PropertyOffset offset = structure->get(
  49. *profiledBlock->vm(), ident, attributesIgnored, specificValue);
  50. if (structure->isDictionary())
  51. specificValue = 0;
  52. if (!isValidOffset(offset))
  53. return GetByIdStatus(NoInformation, false);
  54. return GetByIdStatus(Simple, false, StructureSet(structure), offset, specificValue);
  55. #else
  56. return GetByIdStatus(NoInformation, false);
  57. #endif
  58. }
  59. void GetByIdStatus::computeForChain(GetByIdStatus& result, CodeBlock* profiledBlock, Identifier& ident, Structure* structure)
  60. {
  61. #if ENABLE(JIT) && ENABLE(VALUE_PROFILER)
  62. // Validate the chain. If the chain is invalid, then currently the best thing
  63. // we can do is to assume that TakesSlow is true. In the future, it might be
  64. // worth exploring reifying the structure chain from the structure we've got
  65. // instead of using the one from the cache, since that will do the right things
  66. // if the structure chain has changed. But that may be harder, because we may
  67. // then end up having a different type of access altogether. And it currently
  68. // does not appear to be worth it to do so -- effectively, the heuristic we
  69. // have now is that if the structure chain has changed between when it was
  70. // cached on in the baseline JIT and when the DFG tried to inline the access,
  71. // then we fall back on a polymorphic access.
  72. Structure* currentStructure = structure;
  73. JSObject* currentObject = 0;
  74. for (unsigned i = 0; i < result.m_chain.size(); ++i) {
  75. ASSERT(!currentStructure->isDictionary());
  76. currentObject = asObject(currentStructure->prototypeForLookup(profiledBlock));
  77. currentStructure = result.m_chain[i];
  78. if (currentObject->structure() != currentStructure)
  79. return;
  80. }
  81. ASSERT(currentObject);
  82. unsigned attributesIgnored;
  83. JSCell* specificValue;
  84. result.m_offset = currentStructure->get(
  85. *profiledBlock->vm(), ident, attributesIgnored, specificValue);
  86. if (currentStructure->isDictionary())
  87. specificValue = 0;
  88. if (!isValidOffset(result.m_offset))
  89. return;
  90. result.m_structureSet.add(structure);
  91. result.m_specificValue = JSValue(specificValue);
  92. #else
  93. UNUSED_PARAM(result);
  94. UNUSED_PARAM(profiledBlock);
  95. UNUSED_PARAM(ident);
  96. UNUSED_PARAM(structure);
  97. UNREACHABLE_FOR_PLATFORM();
  98. #endif
  99. }
  100. GetByIdStatus GetByIdStatus::computeFor(CodeBlock* profiledBlock, unsigned bytecodeIndex, Identifier& ident)
  101. {
  102. UNUSED_PARAM(profiledBlock);
  103. UNUSED_PARAM(bytecodeIndex);
  104. UNUSED_PARAM(ident);
  105. #if ENABLE(JIT) && ENABLE(VALUE_PROFILER)
  106. if (!profiledBlock->numberOfStructureStubInfos())
  107. return computeFromLLInt(profiledBlock, bytecodeIndex, ident);
  108. // First check if it makes either calls, in which case we want to be super careful, or
  109. // if it's not set at all, in which case we punt.
  110. StructureStubInfo& stubInfo = profiledBlock->getStubInfo(bytecodeIndex);
  111. if (!stubInfo.seen)
  112. return computeFromLLInt(profiledBlock, bytecodeIndex, ident);
  113. if (stubInfo.resetByGC)
  114. return GetByIdStatus(TakesSlowPath, true);
  115. PolymorphicAccessStructureList* list;
  116. int listSize;
  117. switch (stubInfo.accessType) {
  118. case access_get_by_id_self_list:
  119. list = stubInfo.u.getByIdSelfList.structureList;
  120. listSize = stubInfo.u.getByIdSelfList.listSize;
  121. break;
  122. case access_get_by_id_proto_list:
  123. list = stubInfo.u.getByIdProtoList.structureList;
  124. listSize = stubInfo.u.getByIdProtoList.listSize;
  125. break;
  126. default:
  127. list = 0;
  128. listSize = 0;
  129. break;
  130. }
  131. for (int i = 0; i < listSize; ++i) {
  132. if (!list->list[i].isDirect)
  133. return GetByIdStatus(MakesCalls, true);
  134. }
  135. // Next check if it takes slow case, in which case we want to be kind of careful.
  136. if (profiledBlock->likelyToTakeSlowCase(bytecodeIndex))
  137. return GetByIdStatus(TakesSlowPath, true);
  138. // Finally figure out if we can derive an access strategy.
  139. GetByIdStatus result;
  140. result.m_wasSeenInJIT = true; // This is interesting for bytecode dumping only.
  141. switch (stubInfo.accessType) {
  142. case access_unset:
  143. return computeFromLLInt(profiledBlock, bytecodeIndex, ident);
  144. case access_get_by_id_self: {
  145. Structure* structure = stubInfo.u.getByIdSelf.baseObjectStructure.get();
  146. unsigned attributesIgnored;
  147. JSCell* specificValue;
  148. result.m_offset = structure->get(
  149. *profiledBlock->vm(), ident, attributesIgnored, specificValue);
  150. if (structure->isDictionary())
  151. specificValue = 0;
  152. if (isValidOffset(result.m_offset)) {
  153. result.m_structureSet.add(structure);
  154. result.m_specificValue = JSValue(specificValue);
  155. }
  156. if (isValidOffset(result.m_offset))
  157. ASSERT(result.m_structureSet.size());
  158. break;
  159. }
  160. case access_get_by_id_self_list: {
  161. for (int i = 0; i < listSize; ++i) {
  162. ASSERT(list->list[i].isDirect);
  163. Structure* structure = list->list[i].base.get();
  164. if (result.m_structureSet.contains(structure))
  165. continue;
  166. unsigned attributesIgnored;
  167. JSCell* specificValue;
  168. PropertyOffset myOffset = structure->get(
  169. *profiledBlock->vm(), ident, attributesIgnored, specificValue);
  170. if (structure->isDictionary())
  171. specificValue = 0;
  172. if (!isValidOffset(myOffset)) {
  173. result.m_offset = invalidOffset;
  174. break;
  175. }
  176. if (!i) {
  177. result.m_offset = myOffset;
  178. result.m_specificValue = JSValue(specificValue);
  179. } else if (result.m_offset != myOffset) {
  180. result.m_offset = invalidOffset;
  181. break;
  182. } else if (result.m_specificValue != JSValue(specificValue))
  183. result.m_specificValue = JSValue();
  184. result.m_structureSet.add(structure);
  185. }
  186. if (isValidOffset(result.m_offset))
  187. ASSERT(result.m_structureSet.size());
  188. break;
  189. }
  190. case access_get_by_id_proto: {
  191. if (!stubInfo.u.getByIdProto.isDirect)
  192. return GetByIdStatus(MakesCalls, true);
  193. result.m_chain.append(stubInfo.u.getByIdProto.prototypeStructure.get());
  194. computeForChain(
  195. result, profiledBlock, ident,
  196. stubInfo.u.getByIdProto.baseObjectStructure.get());
  197. break;
  198. }
  199. case access_get_by_id_chain: {
  200. if (!stubInfo.u.getByIdChain.isDirect)
  201. return GetByIdStatus(MakesCalls, true);
  202. for (unsigned i = 0; i < stubInfo.u.getByIdChain.count; ++i)
  203. result.m_chain.append(stubInfo.u.getByIdChain.chain->head()[i].get());
  204. computeForChain(
  205. result, profiledBlock, ident,
  206. stubInfo.u.getByIdChain.baseObjectStructure.get());
  207. break;
  208. }
  209. default:
  210. ASSERT(!isValidOffset(result.m_offset));
  211. break;
  212. }
  213. if (!isValidOffset(result.m_offset)) {
  214. result.m_state = TakesSlowPath;
  215. result.m_structureSet.clear();
  216. result.m_chain.clear();
  217. result.m_specificValue = JSValue();
  218. } else
  219. result.m_state = Simple;
  220. return result;
  221. #else // ENABLE(JIT)
  222. return GetByIdStatus(NoInformation, false);
  223. #endif // ENABLE(JIT)
  224. }
  225. #endif
  226. GetByIdStatus GetByIdStatus::computeFor(VM& vm, Structure* structure, Identifier& ident)
  227. {
  228. // For now we only handle the super simple self access case. We could handle the
  229. // prototype case in the future.
  230. if (PropertyName(ident).asIndex() != PropertyName::NotAnIndex)
  231. return GetByIdStatus(TakesSlowPath);
  232. if (structure->typeInfo().overridesGetOwnPropertySlot())
  233. return GetByIdStatus(TakesSlowPath);
  234. if (!structure->propertyAccessesAreCacheable())
  235. return GetByIdStatus(TakesSlowPath);
  236. GetByIdStatus result;
  237. result.m_wasSeenInJIT = false; // To my knowledge nobody that uses computeFor(VM&, Structure*, Identifier&) reads this field, but I might as well be honest: no, it wasn't seen in the JIT, since I computed it statically.
  238. unsigned attributes;
  239. JSCell* specificValue;
  240. result.m_offset = structure->get(vm, ident, attributes, specificValue);
  241. if (!isValidOffset(result.m_offset))
  242. return GetByIdStatus(TakesSlowPath); // It's probably a prototype lookup. Give up on life for now, even though we could totally be way smarter about it.
  243. if (attributes & Accessor)
  244. return GetByIdStatus(MakesCalls);
  245. if (structure->isDictionary())
  246. specificValue = 0;
  247. result.m_structureSet.add(structure);
  248. result.m_specificValue = JSValue(specificValue);
  249. return result;
  250. }
  251. } // namespace JSC