1/*
2 * Copyright (C) 2012-2019 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "GetByStatus.h"
28
29#include "BytecodeStructs.h"
30#include "CodeBlock.h"
31#include "ComplexGetStatus.h"
32#include "GetterSetterAccessCase.h"
33#include "ICStatusUtils.h"
34#include "InterpreterInlines.h"
35#include "IntrinsicGetterAccessCase.h"
36#include "JSCInlines.h"
37#include "JSScope.h"
38#include "LLIntData.h"
39#include "LowLevelInterpreter.h"
40#include "ModuleNamespaceAccessCase.h"
41#include "PolymorphicAccess.h"
42#include "StructureStubInfo.h"
43#include <wtf/ListDump.h>
44
45namespace JSC {
46namespace DOMJIT {
47class GetterSetter;
48}
49
50bool GetByStatus::appendVariant(const GetByIdVariant& variant)
51{
52 return appendICStatusVariant(m_variants, variant);
53}
54
55GetByStatus GetByStatus::computeFromLLInt(CodeBlock* profiledBlock, BytecodeIndex bytecodeIndex, TrackIdentifiers trackIdentifiers)
56{
57 VM& vm = profiledBlock->vm();
58
59 auto instruction = profiledBlock->instructions().at(bytecodeIndex.offset());
60
61 StructureID structureID;
62 const Identifier* identifier = nullptr;
63 switch (instruction->opcodeID()) {
64 case op_get_by_id: {
65 auto& metadata = instruction->as<OpGetById>().metadata(profiledBlock);
66 // FIXME: We should not just bail if we see a get_by_id_proto_load.
67 // https://bugs.webkit.org/show_bug.cgi?id=158039
68 if (metadata.m_modeMetadata.mode != GetByIdMode::Default)
69 return GetByStatus(NoInformation, false);
70 structureID = metadata.m_modeMetadata.defaultMode.structureID;
71
72 identifier = &(profiledBlock->identifier(instruction->as<OpGetById>().m_property));
73 break;
74 }
75 case op_get_by_id_direct:
76 structureID = instruction->as<OpGetByIdDirect>().metadata(profiledBlock).m_structureID;
77 identifier = &(profiledBlock->identifier(instruction->as<OpGetByIdDirect>().m_property));
78 break;
79 case op_try_get_by_id: {
80 // FIXME: We should not just bail if we see a try_get_by_id.
81 // https://bugs.webkit.org/show_bug.cgi?id=158039
82 return GetByStatus(NoInformation, false);
83 }
84
85 case op_get_by_val:
86 return GetByStatus(NoInformation, false);
87
88 default: {
89 ASSERT_NOT_REACHED();
90 return GetByStatus(NoInformation, false);
91 }
92 }
93
94 ASSERT_UNUSED(trackIdentifiers, trackIdentifiers == TrackIdentifiers::No); // We could make this work in the future, but nobody needs it right now.
95
96 if (!structureID)
97 return GetByStatus(NoInformation, false);
98
99 Structure* structure = vm.heap.structureIDTable().get(structureID);
100
101 if (structure->takesSlowPathInDFGForImpureProperty())
102 return GetByStatus(NoInformation, false);
103
104 unsigned attributes;
105 PropertyOffset offset = structure->getConcurrently(identifier->impl(), attributes);
106 if (!isValidOffset(offset))
107 return GetByStatus(NoInformation, false);
108 if (attributes & PropertyAttribute::CustomAccessorOrValue)
109 return GetByStatus(NoInformation, false);
110
111 GetByStatus result(Simple, false);
112 result.appendVariant(GetByIdVariant(nullptr, StructureSet(structure), offset));
113 return result;
114}
115
116GetByStatus GetByStatus::computeFor(CodeBlock* profiledBlock, ICStatusMap& map, BytecodeIndex bytecodeIndex, ExitFlag didExit, CallLinkStatus::ExitSiteData callExitSiteData, TrackIdentifiers trackIdentifiers)
117{
118 ConcurrentJSLocker locker(profiledBlock->m_lock);
119
120 GetByStatus result;
121
122#if ENABLE(DFG_JIT)
123 result = computeForStubInfoWithoutExitSiteFeedback(
124 locker, profiledBlock, map.get(CodeOrigin(bytecodeIndex)).stubInfo, callExitSiteData, trackIdentifiers);
125
126 if (didExit)
127 return result.slowVersion();
128#else
129 UNUSED_PARAM(map);
130 UNUSED_PARAM(didExit);
131 UNUSED_PARAM(callExitSiteData);
132#endif
133
134 if (!result)
135 return computeFromLLInt(profiledBlock, bytecodeIndex, trackIdentifiers);
136
137 return result;
138}
139
140#if ENABLE(JIT)
141GetByStatus::GetByStatus(StubInfoSummary summary, StructureStubInfo& stubInfo)
142 : m_wasSeenInJIT(true)
143{
144 switch (summary) {
145 case StubInfoSummary::NoInformation:
146 m_state = NoInformation;
147 return;
148 case StubInfoSummary::Simple:
149 case StubInfoSummary::MakesCalls:
150 RELEASE_ASSERT_NOT_REACHED();
151 return;
152 case StubInfoSummary::TakesSlowPath:
153 m_state = stubInfo.tookSlowPath ? ObservedTakesSlowPath : LikelyTakesSlowPath;
154 return;
155 case StubInfoSummary::TakesSlowPathAndMakesCalls:
156 m_state = stubInfo.tookSlowPath ? ObservedSlowPathAndMakesCalls : MakesCalls;
157 return;
158 }
159 RELEASE_ASSERT_NOT_REACHED();
160}
161
162GetByStatus::GetByStatus(const ModuleNamespaceAccessCase& accessCase)
163 : m_moduleNamespaceData(Box<ModuleNamespaceData>::create(ModuleNamespaceData { accessCase.moduleNamespaceObject(), accessCase.moduleEnvironment(), accessCase.scopeOffset(), accessCase.identifier() }))
164 , m_state(ModuleNamespace)
165 , m_wasSeenInJIT(true)
166{
167}
168
169GetByStatus GetByStatus::computeForStubInfoWithoutExitSiteFeedback(
170 const ConcurrentJSLocker& locker, CodeBlock* profiledBlock, StructureStubInfo* stubInfo, CallLinkStatus::ExitSiteData callExitSiteData, TrackIdentifiers trackIdentifiers)
171{
172 StubInfoSummary summary = StructureStubInfo::summary(stubInfo);
173 if (!isInlineable(summary))
174 return GetByStatus(summary, *stubInfo);
175
176 // Finally figure out if we can derive an access strategy.
177 GetByStatus result;
178 result.m_state = Simple;
179 result.m_wasSeenInJIT = true; // This is interesting for bytecode dumping only.
180 switch (stubInfo->cacheType()) {
181 case CacheType::Unset:
182 return GetByStatus(NoInformation);
183
184 case CacheType::GetByIdSelf: {
185 Structure* structure = stubInfo->u.byIdSelf.baseObjectStructure.get();
186 if (structure->takesSlowPathInDFGForImpureProperty())
187 return GetByStatus(JSC::slowVersion(summary), *stubInfo);
188 Box<Identifier> identifier = stubInfo->getByIdSelfIdentifier();
189 UniquedStringImpl* uid = identifier->impl();
190 RELEASE_ASSERT(uid);
191 if (trackIdentifiers == TrackIdentifiers::No)
192 identifier = nullptr;
193 GetByIdVariant variant(WTFMove(identifier));
194 unsigned attributes;
195 variant.m_offset = structure->getConcurrently(uid, attributes);
196 if (!isValidOffset(variant.m_offset))
197 return GetByStatus(JSC::slowVersion(summary), *stubInfo);
198 if (attributes & PropertyAttribute::CustomAccessorOrValue)
199 return GetByStatus(JSC::slowVersion(summary), *stubInfo);
200
201 variant.m_structureSet.add(structure);
202 bool didAppend = result.appendVariant(variant);
203 ASSERT_UNUSED(didAppend, didAppend);
204 return result;
205 }
206
207 case CacheType::Stub: {
208 PolymorphicAccess* list = stubInfo->u.stub;
209 if (list->size() == 1) {
210 const AccessCase& access = list->at(0);
211 switch (access.type()) {
212 case AccessCase::ModuleNamespaceLoad:
213 return GetByStatus(access.as<ModuleNamespaceAccessCase>());
214 default:
215 break;
216 }
217 }
218
219 for (unsigned listIndex = 0; listIndex < list->size(); ++listIndex) {
220 const AccessCase& access = list->at(listIndex);
221 if (access.viaProxy())
222 return GetByStatus(JSC::slowVersion(summary), *stubInfo);
223
224 if (access.usesPolyProto())
225 return GetByStatus(JSC::slowVersion(summary), *stubInfo);
226
227 if (!access.requiresIdentifierNameMatch()) {
228 // FIXME: We could use this for indexed loads in the future. This is pretty solid profiling
229 // information, and probably better than ArrayProfile when it's available.
230 // https://bugs.webkit.org/show_bug.cgi?id=204215
231 return GetByStatus(JSC::slowVersion(summary), *stubInfo);
232 }
233
234 Structure* structure = access.structure();
235 if (!structure) {
236 // The null structure cases arise due to array.length and string.length. We have no way
237 // of creating a GetByIdVariant for those, and we don't really have to since the DFG
238 // handles those cases in FixupPhase using value profiling. That's a bit awkward - we
239 // shouldn't have to use value profiling to discover something that the AccessCase
240 // could have told us. But, it works well enough. So, our only concern here is to not
241 // crash on null structure.
242 return GetByStatus(JSC::slowVersion(summary), *stubInfo);
243 }
244
245 ComplexGetStatus complexGetStatus = ComplexGetStatus::computeFor(
246 structure, access.conditionSet(), access.uid());
247
248 switch (complexGetStatus.kind()) {
249 case ComplexGetStatus::ShouldSkip:
250 continue;
251
252 case ComplexGetStatus::TakesSlowPath:
253 return GetByStatus(JSC::slowVersion(summary), *stubInfo);
254
255 case ComplexGetStatus::Inlineable: {
256 std::unique_ptr<CallLinkStatus> callLinkStatus;
257 JSFunction* intrinsicFunction = nullptr;
258 FunctionPtr<OperationPtrTag> customAccessorGetter;
259 std::unique_ptr<DOMAttributeAnnotation> domAttribute;
260 bool haveDOMAttribute = false;
261
262 switch (access.type()) {
263 case AccessCase::Load:
264 case AccessCase::GetGetter:
265 case AccessCase::Miss: {
266 break;
267 }
268 case AccessCase::IntrinsicGetter: {
269 intrinsicFunction = access.as<IntrinsicGetterAccessCase>().intrinsicFunction();
270 break;
271 }
272 case AccessCase::Getter: {
273 callLinkStatus = makeUnique<CallLinkStatus>();
274 if (CallLinkInfo* callLinkInfo = access.as<GetterSetterAccessCase>().callLinkInfo()) {
275 *callLinkStatus = CallLinkStatus::computeFor(
276 locker, profiledBlock, *callLinkInfo, callExitSiteData);
277 }
278 break;
279 }
280 case AccessCase::CustomAccessorGetter: {
281 customAccessorGetter = access.as<GetterSetterAccessCase>().customAccessor();
282 if (!access.as<GetterSetterAccessCase>().domAttribute())
283 return GetByStatus(JSC::slowVersion(summary), *stubInfo);
284 domAttribute = WTF::makeUnique<DOMAttributeAnnotation>(*access.as<GetterSetterAccessCase>().domAttribute());
285 haveDOMAttribute = true;
286 result.m_state = Custom;
287 break;
288 }
289 default: {
290 // FIXME: It would be totally sweet to support more of these at some point in the
291 // future. https://bugs.webkit.org/show_bug.cgi?id=133052
292 return GetByStatus(JSC::slowVersion(summary), *stubInfo);
293 } }
294
295 ASSERT((AccessCase::Miss == access.type()) == (access.offset() == invalidOffset));
296 GetByIdVariant variant(
297 trackIdentifiers == TrackIdentifiers::Yes ? access.identifier() : Box<Identifier>(nullptr), StructureSet(structure), complexGetStatus.offset(),
298 complexGetStatus.conditionSet(), WTFMove(callLinkStatus),
299 intrinsicFunction,
300 customAccessorGetter,
301 WTFMove(domAttribute));
302
303 if (!result.appendVariant(variant))
304 return GetByStatus(JSC::slowVersion(summary), *stubInfo);
305
306 if (haveDOMAttribute) {
307 // Give up when custom accesses are not merged into one.
308 if (result.numVariants() != 1)
309 return GetByStatus(JSC::slowVersion(summary), *stubInfo);
310 } else {
311 // Give up when custom access and simple access are mixed.
312 if (result.m_state == Custom)
313 return GetByStatus(JSC::slowVersion(summary), *stubInfo);
314 }
315 break;
316 } }
317 }
318
319 return result;
320 }
321
322 default:
323 return GetByStatus(JSC::slowVersion(summary), *stubInfo);
324 }
325
326 RELEASE_ASSERT_NOT_REACHED();
327 return GetByStatus();
328}
329
330GetByStatus GetByStatus::computeFor(
331 CodeBlock* profiledBlock, ICStatusMap& baselineMap,
332 ICStatusContextStack& icContextStack, CodeOrigin codeOrigin, TrackIdentifiers trackIdentifiers)
333{
334 BytecodeIndex bytecodeIndex = codeOrigin.bytecodeIndex();
335 CallLinkStatus::ExitSiteData callExitSiteData = CallLinkStatus::computeExitSiteData(profiledBlock, bytecodeIndex);
336 ExitFlag didExit = hasBadCacheExitSite(profiledBlock, bytecodeIndex);
337
338 for (ICStatusContext* context : icContextStack) {
339 ICStatus status = context->get(codeOrigin);
340
341 auto bless = [&] (const GetByStatus& result) -> GetByStatus {
342 if (!context->isInlined(codeOrigin)) {
343 // Merge with baseline result, which also happens to contain exit data for both
344 // inlined and not-inlined.
345 GetByStatus baselineResult = computeFor(
346 profiledBlock, baselineMap, bytecodeIndex, didExit,
347 callExitSiteData, trackIdentifiers);
348 baselineResult.merge(result);
349 return baselineResult;
350 }
351 if (didExit.isSet(ExitFromInlined))
352 return result.slowVersion();
353 return result;
354 };
355
356 if (status.stubInfo) {
357 GetByStatus result;
358 {
359 ConcurrentJSLocker locker(context->optimizedCodeBlock->m_lock);
360 result = computeForStubInfoWithoutExitSiteFeedback(
361 locker, context->optimizedCodeBlock, status.stubInfo, callExitSiteData, trackIdentifiers);
362 }
363 if (result.isSet())
364 return bless(result);
365 }
366
367 if (status.getStatus)
368 return bless(*status.getStatus);
369 }
370
371 return computeFor(profiledBlock, baselineMap, bytecodeIndex, didExit, callExitSiteData, trackIdentifiers);
372}
373
374GetByStatus GetByStatus::computeFor(const StructureSet& set, UniquedStringImpl* uid)
375{
376 // For now we only handle the super simple self access case. We could handle the
377 // prototype case in the future.
378 //
379 // Note that this code is also used for GetByIdDirect since this function only looks
380 // into direct properties. When supporting prototype chains, we should split this for
381 // GetById and GetByIdDirect.
382
383 if (set.isEmpty())
384 return GetByStatus();
385
386 if (parseIndex(*uid))
387 return GetByStatus(LikelyTakesSlowPath);
388
389 GetByStatus result;
390 result.m_state = Simple;
391 result.m_wasSeenInJIT = false;
392 for (unsigned i = 0; i < set.size(); ++i) {
393 Structure* structure = set[i];
394 if (structure->typeInfo().overridesGetOwnPropertySlot() && structure->typeInfo().type() != GlobalObjectType)
395 return GetByStatus(LikelyTakesSlowPath);
396
397 if (!structure->propertyAccessesAreCacheable())
398 return GetByStatus(LikelyTakesSlowPath);
399
400 unsigned attributes;
401 PropertyOffset offset = structure->getConcurrently(uid, attributes);
402 if (!isValidOffset(offset))
403 return GetByStatus(LikelyTakesSlowPath); // It's probably a prototype lookup. Give up on life for now, even though we could totally be way smarter about it.
404 if (attributes & PropertyAttribute::Accessor)
405 return GetByStatus(MakesCalls); // We could be smarter here, like strength-reducing this to a Call.
406 if (attributes & PropertyAttribute::CustomAccessorOrValue)
407 return GetByStatus(LikelyTakesSlowPath);
408
409 if (!result.appendVariant(GetByIdVariant(nullptr, structure, offset)))
410 return GetByStatus(LikelyTakesSlowPath);
411 }
412
413 return result;
414}
415#endif // ENABLE(JIT)
416
417bool GetByStatus::makesCalls() const
418{
419 switch (m_state) {
420 case NoInformation:
421 case LikelyTakesSlowPath:
422 case ObservedTakesSlowPath:
423 case Custom:
424 case ModuleNamespace:
425 return false;
426 case Simple:
427 for (unsigned i = m_variants.size(); i--;) {
428 if (m_variants[i].callLinkStatus())
429 return true;
430 }
431 return false;
432 case MakesCalls:
433 case ObservedSlowPathAndMakesCalls:
434 return true;
435 }
436 RELEASE_ASSERT_NOT_REACHED();
437
438 return false;
439}
440
441GetByStatus GetByStatus::slowVersion() const
442{
443 if (observedStructureStubInfoSlowPath())
444 return GetByStatus(makesCalls() ? ObservedSlowPathAndMakesCalls : ObservedTakesSlowPath, wasSeenInJIT());
445 return GetByStatus(makesCalls() ? MakesCalls : LikelyTakesSlowPath, wasSeenInJIT());
446}
447
448void GetByStatus::merge(const GetByStatus& other)
449{
450 if (other.m_state == NoInformation)
451 return;
452
453 auto mergeSlow = [&] () {
454 if (observedStructureStubInfoSlowPath() || other.observedStructureStubInfoSlowPath())
455 *this = GetByStatus((makesCalls() || other.makesCalls()) ? ObservedSlowPathAndMakesCalls : ObservedTakesSlowPath);
456 else
457 *this = GetByStatus((makesCalls() || other.makesCalls()) ? MakesCalls : LikelyTakesSlowPath);
458 };
459
460 switch (m_state) {
461 case NoInformation:
462 *this = other;
463 return;
464
465 case Simple:
466 case Custom:
467 if (m_state != other.m_state)
468 return mergeSlow();
469
470 for (const GetByIdVariant& otherVariant : other.m_variants) {
471 if (!appendVariant(otherVariant))
472 return mergeSlow();
473 }
474 return;
475
476 case ModuleNamespace:
477 if (other.m_state != ModuleNamespace)
478 return mergeSlow();
479
480 if (m_moduleNamespaceData->m_moduleNamespaceObject != other.m_moduleNamespaceData->m_moduleNamespaceObject)
481 return mergeSlow();
482
483 if (m_moduleNamespaceData->m_moduleEnvironment != other.m_moduleNamespaceData->m_moduleEnvironment)
484 return mergeSlow();
485
486 if (m_moduleNamespaceData->m_scopeOffset != other.m_moduleNamespaceData->m_scopeOffset)
487 return mergeSlow();
488
489 return;
490
491 case LikelyTakesSlowPath:
492 case ObservedTakesSlowPath:
493 case MakesCalls:
494 case ObservedSlowPathAndMakesCalls:
495 return mergeSlow();
496 }
497
498 RELEASE_ASSERT_NOT_REACHED();
499}
500
501void GetByStatus::filter(const StructureSet& set)
502{
503 if (m_state != Simple)
504 return;
505 filterICStatusVariants(m_variants, set);
506 if (m_variants.isEmpty())
507 m_state = NoInformation;
508}
509
510void GetByStatus::markIfCheap(SlotVisitor& visitor)
511{
512 for (GetByIdVariant& variant : m_variants)
513 variant.markIfCheap(visitor);
514}
515
516bool GetByStatus::finalize(VM& vm)
517{
518 for (GetByIdVariant& variant : m_variants) {
519 if (!variant.finalize(vm))
520 return false;
521 }
522 if (isModuleNamespace()) {
523 if (m_moduleNamespaceData->m_moduleNamespaceObject && !vm.heap.isMarked(m_moduleNamespaceData->m_moduleNamespaceObject))
524 return false;
525 if (m_moduleNamespaceData->m_moduleEnvironment && !vm.heap.isMarked(m_moduleNamespaceData->m_moduleEnvironment))
526 return false;
527 }
528 return true;
529}
530
531Box<Identifier> GetByStatus::singleIdentifier() const
532{
533 if (isModuleNamespace()) {
534 Box<Identifier> result = m_moduleNamespaceData->m_identifier;
535 if (!result || result->isNull())
536 return nullptr;
537 return result;
538 }
539
540 if (m_variants.isEmpty())
541 return nullptr;
542
543 Box<Identifier> result = m_variants.first().identifier();
544 if (!result)
545 return nullptr;
546 if (result->isNull())
547 return nullptr;
548 for (size_t i = 1; i < m_variants.size(); ++i) {
549 Box<Identifier> uid = m_variants[i].identifier();
550 if (!uid)
551 return nullptr;
552 if (*uid != *result)
553 return nullptr;
554 }
555 return result;
556}
557
558void GetByStatus::dump(PrintStream& out) const
559{
560 out.print("(");
561 switch (m_state) {
562 case NoInformation:
563 out.print("NoInformation");
564 break;
565 case Simple:
566 out.print("Simple");
567 break;
568 case Custom:
569 out.print("Custom");
570 break;
571 case ModuleNamespace:
572 out.print("ModuleNamespace");
573 break;
574 case LikelyTakesSlowPath:
575 out.print("LikelyTakesSlowPath");
576 break;
577 case ObservedTakesSlowPath:
578 out.print("ObservedTakesSlowPath");
579 break;
580 case MakesCalls:
581 out.print("MakesCalls");
582 break;
583 case ObservedSlowPathAndMakesCalls:
584 out.print("ObservedSlowPathAndMakesCalls");
585 break;
586 }
587 out.print(", ", listDump(m_variants), ", seenInJIT = ", m_wasSeenInJIT, ")");
588}
589
590} // namespace JSC
591
592