1 | /* |
2 | * Copyright (C) 2008-2018 Apple Inc. All rights reserved. |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without |
5 | * modification, are permitted provided that the following conditions |
6 | * are met: |
7 | * 1. Redistributions of source code must retain the above copyright |
8 | * notice, this list of conditions and the following disclaimer. |
9 | * 2. Redistributions in binary form must reproduce the above copyright |
10 | * notice, this list of conditions and the following disclaimer in the |
11 | * documentation and/or other materials provided with the distribution. |
12 | * |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
24 | */ |
25 | |
26 | #pragma once |
27 | |
28 | #include "CodeBlock.h" |
29 | #include "CodeOrigin.h" |
30 | #include "Instruction.h" |
31 | #include "JITStubRoutine.h" |
32 | #include "MacroAssembler.h" |
33 | #include "Options.h" |
34 | #include "RegisterSet.h" |
35 | #include "Structure.h" |
36 | #include "StructureSet.h" |
37 | #include "StructureStubClearingWatchpoint.h" |
38 | #include "StubInfoSummary.h" |
39 | #include <wtf/Box.h> |
40 | |
41 | namespace JSC { |
42 | |
43 | #if ENABLE(JIT) |
44 | |
45 | class AccessCase; |
46 | class AccessGenerationResult; |
47 | class PolymorphicAccess; |
48 | |
49 | enum class AccessType : int8_t { |
50 | GetById, |
51 | GetByIdWithThis, |
52 | GetByIdDirect, |
53 | TryGetById, |
54 | GetByVal, |
55 | Put, |
56 | In, |
57 | InstanceOf |
58 | }; |
59 | |
60 | enum class CacheType : int8_t { |
61 | Unset, |
62 | GetByIdSelf, |
63 | PutByIdReplace, |
64 | InByIdSelf, |
65 | Stub, |
66 | ArrayLength, |
67 | StringLength |
68 | }; |
69 | |
70 | class StructureStubInfo { |
71 | WTF_MAKE_NONCOPYABLE(StructureStubInfo); |
72 | WTF_MAKE_FAST_ALLOCATED; |
73 | public: |
74 | StructureStubInfo(AccessType); |
75 | ~StructureStubInfo(); |
76 | |
77 | void initGetByIdSelf(CodeBlock*, Structure* baseObjectStructure, PropertyOffset, const Identifier&); |
78 | void initArrayLength(); |
79 | void initStringLength(); |
80 | void initPutByIdReplace(CodeBlock*, Structure* baseObjectStructure, PropertyOffset); |
81 | void initInByIdSelf(CodeBlock*, Structure* baseObjectStructure, PropertyOffset); |
82 | |
83 | AccessGenerationResult addAccessCase(const GCSafeConcurrentJSLocker&, CodeBlock*, const Identifier&, std::unique_ptr<AccessCase>); |
84 | |
85 | void reset(CodeBlock*); |
86 | |
87 | void deref(); |
88 | void aboutToDie(); |
89 | |
90 | // Check if the stub has weak references that are dead. If it does, then it resets itself, |
91 | // either entirely or just enough to ensure that those dead pointers don't get used anymore. |
92 | void visitWeakReferences(CodeBlock*); |
93 | |
94 | // This returns true if it has marked everything that it will ever mark. |
95 | bool propagateTransitions(SlotVisitor&); |
96 | |
97 | ALWAYS_INLINE bool considerCaching(VM& vm, CodeBlock* codeBlock, Structure* structure, UniquedStringImpl* impl = nullptr) |
98 | { |
99 | DisallowGC disallowGC; |
100 | |
101 | // We never cache non-cells. |
102 | if (!structure) { |
103 | sawNonCell = true; |
104 | return false; |
105 | } |
106 | |
107 | // This method is called from the Optimize variants of IC slow paths. The first part of this |
108 | // method tries to determine if the Optimize variant should really behave like the |
109 | // non-Optimize variant and leave the IC untouched. |
110 | // |
111 | // If we determine that we should do something to the IC then the next order of business is |
112 | // to determine if this Structure would impact the IC at all. We know that it won't, if we |
113 | // have already buffered something on its behalf. That's what the bufferedStructures set is |
114 | // for. |
115 | |
116 | everConsidered = true; |
117 | if (!countdown) { |
118 | // Check if we have been doing repatching too frequently. If so, then we should cool off |
119 | // for a while. |
120 | WTF::incrementWithSaturation(repatchCount); |
121 | if (repatchCount > Options::repatchCountForCoolDown()) { |
122 | // We've been repatching too much, so don't do it now. |
123 | repatchCount = 0; |
124 | // The amount of time we require for cool-down depends on the number of times we've |
125 | // had to cool down in the past. The relationship is exponential. The max value we |
126 | // allow here is 2^256 - 2, since the slow paths may increment the count to indicate |
127 | // that they'd like to temporarily skip patching just this once. |
128 | countdown = WTF::leftShiftWithSaturation( |
129 | static_cast<uint8_t>(Options::initialCoolDownCount()), |
130 | numberOfCoolDowns, |
131 | static_cast<uint8_t>(std::numeric_limits<uint8_t>::max() - 1)); |
132 | WTF::incrementWithSaturation(numberOfCoolDowns); |
133 | |
134 | // We may still have had something buffered. Trigger generation now. |
135 | bufferingCountdown = 0; |
136 | return true; |
137 | } |
138 | |
139 | // We don't want to return false due to buffering indefinitely. |
140 | if (!bufferingCountdown) { |
141 | // Note that when this returns true, it's possible that we will not even get an |
142 | // AccessCase because this may cause Repatch.cpp to simply do an in-place |
143 | // repatching. |
144 | return true; |
145 | } |
146 | |
147 | bufferingCountdown--; |
148 | |
149 | // Now protect the IC buffering. We want to proceed only if this is a structure that |
150 | // we don't already have a case buffered for. Note that if this returns true but the |
151 | // bufferingCountdown is not zero then we will buffer the access case for later without |
152 | // immediately generating code for it. |
153 | // |
154 | // NOTE: This will behave oddly for InstanceOf if the user varies the prototype but not |
155 | // the base's structure. That seems unlikely for the canonical use of instanceof, where |
156 | // the prototype is fixed. |
157 | bool isNewlyAdded = bufferedStructures.add({ structure, impl }).isNewEntry; |
158 | if (isNewlyAdded) |
159 | vm.heap.writeBarrier(codeBlock); |
160 | return isNewlyAdded; |
161 | } |
162 | countdown--; |
163 | return false; |
164 | } |
165 | |
166 | StubInfoSummary summary() const; |
167 | |
168 | static StubInfoSummary summary(const StructureStubInfo*); |
169 | |
170 | bool containsPC(void* pc) const; |
171 | |
172 | CodeOrigin codeOrigin; |
173 | private: |
174 | Box<Identifier> m_getByIdSelfIdentifier; |
175 | public: |
176 | |
177 | union { |
178 | struct { |
179 | WriteBarrierBase<Structure> baseObjectStructure; |
180 | PropertyOffset offset; |
181 | } byIdSelf; |
182 | PolymorphicAccess* stub; |
183 | } u; |
184 | |
185 | Box<Identifier> getByIdSelfIdentifier() |
186 | { |
187 | RELEASE_ASSERT(m_cacheType == CacheType::GetByIdSelf); |
188 | return m_getByIdSelfIdentifier; |
189 | } |
190 | |
191 | private: |
192 | // Represents those structures that already have buffered AccessCases in the PolymorphicAccess. |
193 | // Note that it's always safe to clear this. If we clear it prematurely, then if we see the same |
194 | // structure again during this buffering countdown, we will create an AccessCase object for it. |
195 | // That's not so bad - we'll get rid of the redundant ones once we regenerate. |
196 | HashSet<std::pair<Structure*, RefPtr<UniquedStringImpl>>> bufferedStructures; |
197 | public: |
198 | |
199 | struct { |
200 | CodeLocationLabel<JITStubRoutinePtrTag> start; // This is either the start of the inline IC for *byId caches. or the location of patchable jump for 'instanceof' caches. |
201 | CodeLocationLabel<JSInternalPtrTag> doneLocation; |
202 | CodeLocationCall<JSInternalPtrTag> slowPathCallLocation; |
203 | CodeLocationLabel<JITStubRoutinePtrTag> slowPathStartLocation; |
204 | |
205 | RegisterSet usedRegisters; |
206 | |
207 | uint32_t inlineSize() const |
208 | { |
209 | int32_t inlineSize = MacroAssembler::differenceBetweenCodePtr(start, doneLocation); |
210 | ASSERT(inlineSize >= 0); |
211 | return inlineSize; |
212 | } |
213 | |
214 | GPRReg baseGPR; |
215 | GPRReg valueGPR; |
216 | union { |
217 | GPRReg thisGPR; |
218 | GPRReg prototypeGPR; |
219 | GPRReg propertyGPR; |
220 | } u; |
221 | #if USE(JSVALUE32_64) |
222 | GPRReg valueTagGPR; |
223 | GPRReg baseTagGPR; |
224 | GPRReg thisTagGPR; |
225 | #endif |
226 | } patch; |
227 | |
228 | GPRReg baseGPR() const |
229 | { |
230 | return patch.baseGPR; |
231 | } |
232 | |
233 | CodeLocationCall<JSInternalPtrTag> slowPathCallLocation() { return patch.slowPathCallLocation; } |
234 | CodeLocationLabel<JSInternalPtrTag> doneLocation() { return patch.doneLocation; } |
235 | CodeLocationLabel<JITStubRoutinePtrTag> slowPathStartLocation() { return patch.slowPathStartLocation; } |
236 | |
237 | CodeLocationJump<JSInternalPtrTag> patchableJump() |
238 | { |
239 | ASSERT(accessType == AccessType::InstanceOf); |
240 | return patch.start.jumpAtOffset<JSInternalPtrTag>(0); |
241 | } |
242 | |
243 | JSValueRegs valueRegs() const |
244 | { |
245 | return JSValueRegs( |
246 | #if USE(JSVALUE32_64) |
247 | patch.valueTagGPR, |
248 | #endif |
249 | patch.valueGPR); |
250 | } |
251 | |
252 | bool thisValueIsInThisGPR() const { return accessType == AccessType::GetByIdWithThis; } |
253 | |
254 | #if !ASSERT_DISABLED |
255 | void checkConsistency(); |
256 | #else |
257 | ALWAYS_INLINE void checkConsistency() { } |
258 | #endif |
259 | |
260 | AccessType accessType; |
261 | private: |
262 | CacheType m_cacheType; |
263 | void setCacheType(CacheType); |
264 | public: |
265 | CacheType cacheType() const { return m_cacheType; } |
266 | uint8_t countdown; // We repatch only when this is zero. If not zero, we decrement. |
267 | uint8_t repatchCount; |
268 | uint8_t numberOfCoolDowns; |
269 | |
270 | CallSiteIndex callSiteIndex; |
271 | |
272 | uint8_t bufferingCountdown; |
273 | bool resetByGC : 1; |
274 | bool tookSlowPath : 1; |
275 | bool everConsidered : 1; |
276 | bool prototypeIsKnownObject : 1; // Only relevant for InstanceOf. |
277 | bool sawNonCell : 1; |
278 | bool hasConstantIdentifier : 1; |
279 | bool propertyIsString : 1; |
280 | bool propertyIsInt32 : 1; |
281 | bool propertyIsSymbol : 1; |
282 | }; |
283 | |
284 | inline CodeOrigin getStructureStubInfoCodeOrigin(StructureStubInfo& structureStubInfo) |
285 | { |
286 | return structureStubInfo.codeOrigin; |
287 | } |
288 | |
289 | inline auto appropriateOptimizingGetByIdFunction(AccessType type) -> decltype(&operationGetByIdOptimize) |
290 | { |
291 | switch (type) { |
292 | case AccessType::GetById: |
293 | return operationGetByIdOptimize; |
294 | case AccessType::TryGetById: |
295 | return operationTryGetByIdOptimize; |
296 | case AccessType::GetByIdDirect: |
297 | return operationGetByIdDirectOptimize; |
298 | case AccessType::GetByIdWithThis: |
299 | default: |
300 | ASSERT_NOT_REACHED(); |
301 | return nullptr; |
302 | } |
303 | } |
304 | |
305 | inline auto appropriateGenericGetByIdFunction(AccessType type) -> decltype(&operationGetByIdGeneric) |
306 | { |
307 | switch (type) { |
308 | case AccessType::GetById: |
309 | return operationGetByIdGeneric; |
310 | case AccessType::TryGetById: |
311 | return operationTryGetByIdGeneric; |
312 | case AccessType::GetByIdDirect: |
313 | return operationGetByIdDirectGeneric; |
314 | case AccessType::GetByIdWithThis: |
315 | default: |
316 | ASSERT_NOT_REACHED(); |
317 | return nullptr; |
318 | } |
319 | } |
320 | |
321 | #else |
322 | |
323 | class StructureStubInfo; |
324 | |
325 | #endif // ENABLE(JIT) |
326 | |
327 | typedef HashMap<CodeOrigin, StructureStubInfo*, CodeOriginApproximateHash> StubInfoMap; |
328 | |
329 | } // namespace JSC |
330 | |