1 | /* |
2 | * Copyright (C) 2008-2019 Apple Inc. All rights reserved. |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without |
5 | * modification, are permitted provided that the following conditions |
6 | * are met: |
7 | * 1. Redistributions of source code must retain the above copyright |
8 | * notice, this list of conditions and the following disclaimer. |
9 | * 2. Redistributions in binary form must reproduce the above copyright |
10 | * notice, this list of conditions and the following disclaimer in the |
11 | * documentation and/or other materials provided with the distribution. |
12 | * |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
24 | */ |
25 | |
26 | #include "config.h" |
27 | #include "StructureStubInfo.h" |
28 | |
29 | #include "JSObject.h" |
30 | #include "JSCInlines.h" |
31 | #include "PolymorphicAccess.h" |
32 | #include "Repatch.h" |
33 | |
34 | namespace JSC { |
35 | |
36 | #if ENABLE(JIT) |
37 | |
38 | namespace StructureStubInfoInternal { |
39 | static constexpr bool verbose = false; |
40 | } |
41 | |
42 | StructureStubInfo::StructureStubInfo(AccessType accessType) |
43 | : accessType(accessType) |
44 | , m_cacheType(CacheType::Unset) |
45 | , countdown(1) // For a totally clear stub, we'll patch it after the first execution. |
46 | , repatchCount(0) |
47 | , numberOfCoolDowns(0) |
48 | , bufferingCountdown(Options::repatchBufferingCountdown()) |
49 | , resetByGC(false) |
50 | , tookSlowPath(false) |
51 | , everConsidered(false) |
52 | , prototypeIsKnownObject(false) |
53 | , sawNonCell(false) |
54 | , hasConstantIdentifier(true) |
55 | , propertyIsString(false) |
56 | , propertyIsInt32(false) |
57 | , propertyIsSymbol(false) |
58 | { |
59 | } |
60 | |
61 | StructureStubInfo::~StructureStubInfo() |
62 | { |
63 | } |
64 | |
65 | void StructureStubInfo::initGetByIdSelf(CodeBlock* codeBlock, Structure* baseObjectStructure, PropertyOffset offset, const Identifier& identifier) |
66 | { |
67 | ASSERT(hasConstantIdentifier); |
68 | setCacheType(CacheType::GetByIdSelf); |
69 | m_getByIdSelfIdentifier = Box<Identifier>::create(identifier); |
70 | |
71 | u.byIdSelf.baseObjectStructure.set( |
72 | codeBlock->vm(), codeBlock, baseObjectStructure); |
73 | u.byIdSelf.offset = offset; |
74 | } |
75 | |
76 | void StructureStubInfo::initArrayLength() |
77 | { |
78 | setCacheType(CacheType::ArrayLength); |
79 | } |
80 | |
81 | void StructureStubInfo::initStringLength() |
82 | { |
83 | setCacheType(CacheType::StringLength); |
84 | } |
85 | |
86 | void StructureStubInfo::initPutByIdReplace(CodeBlock* codeBlock, Structure* baseObjectStructure, PropertyOffset offset) |
87 | { |
88 | setCacheType(CacheType::PutByIdReplace); |
89 | |
90 | u.byIdSelf.baseObjectStructure.set( |
91 | codeBlock->vm(), codeBlock, baseObjectStructure); |
92 | u.byIdSelf.offset = offset; |
93 | } |
94 | |
95 | void StructureStubInfo::initInByIdSelf(CodeBlock* codeBlock, Structure* baseObjectStructure, PropertyOffset offset) |
96 | { |
97 | setCacheType(CacheType::InByIdSelf); |
98 | |
99 | u.byIdSelf.baseObjectStructure.set( |
100 | codeBlock->vm(), codeBlock, baseObjectStructure); |
101 | u.byIdSelf.offset = offset; |
102 | } |
103 | |
104 | void StructureStubInfo::deref() |
105 | { |
106 | switch (m_cacheType) { |
107 | case CacheType::Stub: |
108 | delete u.stub; |
109 | return; |
110 | case CacheType::Unset: |
111 | case CacheType::GetByIdSelf: |
112 | case CacheType::PutByIdReplace: |
113 | case CacheType::InByIdSelf: |
114 | case CacheType::ArrayLength: |
115 | case CacheType::StringLength: |
116 | return; |
117 | } |
118 | |
119 | RELEASE_ASSERT_NOT_REACHED(); |
120 | } |
121 | |
122 | void StructureStubInfo::aboutToDie() |
123 | { |
124 | switch (m_cacheType) { |
125 | case CacheType::Stub: |
126 | u.stub->aboutToDie(); |
127 | return; |
128 | case CacheType::Unset: |
129 | case CacheType::GetByIdSelf: |
130 | case CacheType::PutByIdReplace: |
131 | case CacheType::InByIdSelf: |
132 | case CacheType::ArrayLength: |
133 | case CacheType::StringLength: |
134 | return; |
135 | } |
136 | |
137 | RELEASE_ASSERT_NOT_REACHED(); |
138 | } |
139 | |
140 | AccessGenerationResult StructureStubInfo::addAccessCase( |
141 | const GCSafeConcurrentJSLocker& locker, CodeBlock* codeBlock, const Identifier& ident, std::unique_ptr<AccessCase> accessCase) |
142 | { |
143 | checkConsistency(); |
144 | |
145 | VM& vm = codeBlock->vm(); |
146 | ASSERT(vm.heap.isDeferred()); |
147 | AccessGenerationResult result = ([&] () -> AccessGenerationResult { |
148 | if (StructureStubInfoInternal::verbose) |
149 | dataLog("Adding access case: " , accessCase, "\n" ); |
150 | |
151 | if (!accessCase) |
152 | return AccessGenerationResult::GaveUp; |
153 | |
154 | AccessGenerationResult result; |
155 | |
156 | if (m_cacheType == CacheType::Stub) { |
157 | result = u.stub->addCase(locker, vm, codeBlock, *this, WTFMove(accessCase)); |
158 | |
159 | if (StructureStubInfoInternal::verbose) |
160 | dataLog("Had stub, result: " , result, "\n" ); |
161 | |
162 | if (result.shouldResetStubAndFireWatchpoints()) |
163 | return result; |
164 | |
165 | if (!result.buffered()) { |
166 | bufferedStructures.clear(); |
167 | return result; |
168 | } |
169 | } else { |
170 | std::unique_ptr<PolymorphicAccess> access = makeUnique<PolymorphicAccess>(); |
171 | |
172 | Vector<std::unique_ptr<AccessCase>, 2> accessCases; |
173 | |
174 | std::unique_ptr<AccessCase> previousCase = AccessCase::fromStructureStubInfo(vm, codeBlock, ident, *this); |
175 | if (previousCase) |
176 | accessCases.append(WTFMove(previousCase)); |
177 | |
178 | accessCases.append(WTFMove(accessCase)); |
179 | |
180 | result = access->addCases(locker, vm, codeBlock, *this, WTFMove(accessCases)); |
181 | |
182 | if (StructureStubInfoInternal::verbose) |
183 | dataLog("Created stub, result: " , result, "\n" ); |
184 | |
185 | if (result.shouldResetStubAndFireWatchpoints()) |
186 | return result; |
187 | |
188 | if (!result.buffered()) { |
189 | bufferedStructures.clear(); |
190 | return result; |
191 | } |
192 | |
193 | setCacheType(CacheType::Stub); |
194 | u.stub = access.release(); |
195 | } |
196 | |
197 | RELEASE_ASSERT(!result.generatedSomeCode()); |
198 | |
199 | // If we didn't buffer any cases then bail. If this made no changes then we'll just try again |
200 | // subject to cool-down. |
201 | if (!result.buffered()) { |
202 | if (StructureStubInfoInternal::verbose) |
203 | dataLog("Didn't buffer anything, bailing.\n" ); |
204 | bufferedStructures.clear(); |
205 | return result; |
206 | } |
207 | |
208 | // The buffering countdown tells us if we should be repatching now. |
209 | if (bufferingCountdown) { |
210 | if (StructureStubInfoInternal::verbose) |
211 | dataLog("Countdown is too high: " , bufferingCountdown, ".\n" ); |
212 | return result; |
213 | } |
214 | |
215 | // Forget the buffered structures so that all future attempts to cache get fully handled by the |
216 | // PolymorphicAccess. |
217 | bufferedStructures.clear(); |
218 | |
219 | result = u.stub->regenerate(locker, vm, codeBlock, *this); |
220 | |
221 | if (StructureStubInfoInternal::verbose) |
222 | dataLog("Regeneration result: " , result, "\n" ); |
223 | |
224 | RELEASE_ASSERT(!result.buffered()); |
225 | |
226 | if (!result.generatedSomeCode()) |
227 | return result; |
228 | |
229 | // If we generated some code then we don't want to attempt to repatch in the future until we |
230 | // gather enough cases. |
231 | bufferingCountdown = Options::repatchBufferingCountdown(); |
232 | return result; |
233 | })(); |
234 | vm.heap.writeBarrier(codeBlock); |
235 | return result; |
236 | } |
237 | |
238 | void StructureStubInfo::reset(CodeBlock* codeBlock) |
239 | { |
240 | bufferedStructures.clear(); |
241 | |
242 | if (m_cacheType == CacheType::Unset) |
243 | return; |
244 | |
245 | if (Options::verboseOSR()) { |
246 | // This can be called from GC destructor calls, so we don't try to do a full dump |
247 | // of the CodeBlock. |
248 | dataLog("Clearing structure cache (kind " , static_cast<int>(accessType), ") in " , RawPointer(codeBlock), ".\n" ); |
249 | } |
250 | |
251 | switch (accessType) { |
252 | case AccessType::TryGetById: |
253 | resetGetBy(codeBlock, *this, GetByKind::Try); |
254 | break; |
255 | case AccessType::GetById: |
256 | resetGetBy(codeBlock, *this, GetByKind::Normal); |
257 | break; |
258 | case AccessType::GetByIdWithThis: |
259 | resetGetBy(codeBlock, *this, GetByKind::WithThis); |
260 | break; |
261 | case AccessType::GetByIdDirect: |
262 | resetGetBy(codeBlock, *this, GetByKind::Direct); |
263 | break; |
264 | case AccessType::GetByVal: |
265 | resetGetBy(codeBlock, *this, GetByKind::NormalByVal); |
266 | break; |
267 | case AccessType::Put: |
268 | resetPutByID(codeBlock, *this); |
269 | break; |
270 | case AccessType::In: |
271 | resetInByID(codeBlock, *this); |
272 | break; |
273 | case AccessType::InstanceOf: |
274 | resetInstanceOf(*this); |
275 | break; |
276 | } |
277 | |
278 | deref(); |
279 | setCacheType(CacheType::Unset); |
280 | } |
281 | |
282 | void StructureStubInfo::visitWeakReferences(CodeBlock* codeBlock) |
283 | { |
284 | VM& vm = codeBlock->vm(); |
285 | |
286 | bufferedStructures.removeIf( |
287 | [&] (auto& pair) -> bool { |
288 | Structure* structure = pair.first; |
289 | return !vm.heap.isMarked(structure); |
290 | }); |
291 | |
292 | switch (m_cacheType) { |
293 | case CacheType::GetByIdSelf: |
294 | case CacheType::PutByIdReplace: |
295 | case CacheType::InByIdSelf: |
296 | if (vm.heap.isMarked(u.byIdSelf.baseObjectStructure.get())) |
297 | return; |
298 | break; |
299 | case CacheType::Stub: |
300 | if (u.stub->visitWeak(vm)) |
301 | return; |
302 | break; |
303 | default: |
304 | return; |
305 | } |
306 | |
307 | reset(codeBlock); |
308 | resetByGC = true; |
309 | } |
310 | |
311 | bool StructureStubInfo::propagateTransitions(SlotVisitor& visitor) |
312 | { |
313 | switch (m_cacheType) { |
314 | case CacheType::Unset: |
315 | case CacheType::ArrayLength: |
316 | case CacheType::StringLength: |
317 | return true; |
318 | case CacheType::GetByIdSelf: |
319 | case CacheType::PutByIdReplace: |
320 | case CacheType::InByIdSelf: |
321 | return u.byIdSelf.baseObjectStructure->markIfCheap(visitor); |
322 | case CacheType::Stub: |
323 | return u.stub->propagateTransitions(visitor); |
324 | } |
325 | |
326 | RELEASE_ASSERT_NOT_REACHED(); |
327 | return true; |
328 | } |
329 | |
330 | StubInfoSummary StructureStubInfo::summary() const |
331 | { |
332 | StubInfoSummary takesSlowPath = StubInfoSummary::TakesSlowPath; |
333 | StubInfoSummary simple = StubInfoSummary::Simple; |
334 | if (m_cacheType == CacheType::Stub) { |
335 | PolymorphicAccess* list = u.stub; |
336 | for (unsigned i = 0; i < list->size(); ++i) { |
337 | const AccessCase& access = list->at(i); |
338 | if (access.doesCalls()) { |
339 | takesSlowPath = StubInfoSummary::TakesSlowPathAndMakesCalls; |
340 | simple = StubInfoSummary::MakesCalls; |
341 | break; |
342 | } |
343 | } |
344 | } |
345 | |
346 | if (tookSlowPath || sawNonCell) |
347 | return takesSlowPath; |
348 | |
349 | if (!everConsidered) |
350 | return StubInfoSummary::NoInformation; |
351 | |
352 | return simple; |
353 | } |
354 | |
355 | StubInfoSummary StructureStubInfo::summary(const StructureStubInfo* stubInfo) |
356 | { |
357 | if (!stubInfo) |
358 | return StubInfoSummary::NoInformation; |
359 | |
360 | return stubInfo->summary(); |
361 | } |
362 | |
363 | bool StructureStubInfo::containsPC(void* pc) const |
364 | { |
365 | if (m_cacheType != CacheType::Stub) |
366 | return false; |
367 | return u.stub->containsPC(pc); |
368 | } |
369 | |
370 | void StructureStubInfo::setCacheType(CacheType newCacheType) |
371 | { |
372 | if (m_cacheType == CacheType::GetByIdSelf) |
373 | m_getByIdSelfIdentifier = nullptr; |
374 | m_cacheType = newCacheType; |
375 | } |
376 | |
377 | #if !ASSERT_DISABLED |
378 | void StructureStubInfo::checkConsistency() |
379 | { |
380 | if (thisValueIsInThisGPR()) { |
381 | // We currently use a union for both "thisGPR" and "propertyGPR". If this were |
382 | // not the case, we'd need to take one of them out of the union. |
383 | RELEASE_ASSERT(hasConstantIdentifier); |
384 | } |
385 | } |
386 | #endif |
387 | |
388 | #endif // ENABLE(JIT) |
389 | |
390 | } // namespace JSC |
391 | |