1/*
2 * Copyright (C) 2017-2019 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "AccessCase.h"
28
29#if ENABLE(JIT)
30
31#include "CCallHelpers.h"
32#include "CallLinkInfo.h"
33#include "DOMJITGetterSetter.h"
34#include "DirectArguments.h"
35#include "GetterSetter.h"
36#include "GetterSetterAccessCase.h"
37#include "InstanceOfAccessCase.h"
38#include "IntrinsicGetterAccessCase.h"
39#include "JSCInlines.h"
40#include "JSModuleEnvironment.h"
41#include "JSModuleNamespaceObject.h"
42#include "LinkBuffer.h"
43#include "ModuleNamespaceAccessCase.h"
44#include "PolymorphicAccess.h"
45#include "ScopedArguments.h"
46#include "ScratchRegisterAllocator.h"
47#include "StructureStubInfo.h"
48#include "SuperSampler.h"
49#include "ThunkGenerators.h"
50
51namespace JSC {
52
53namespace AccessCaseInternal {
54static constexpr bool verbose = false;
55}
56
57AccessCase::AccessCase(VM& vm, JSCell* owner, AccessType type, const Identifier& identifier, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
58 : m_type(type)
59 , m_offset(offset)
60 , m_polyProtoAccessChain(WTFMove(prototypeAccessChain))
61 , m_identifier(Box<Identifier>::create(identifier))
62{
63 m_structure.setMayBeNull(vm, owner, structure);
64 m_conditionSet = conditionSet;
65 RELEASE_ASSERT(m_conditionSet.isValid());
66}
67
68std::unique_ptr<AccessCase> AccessCase::create(VM& vm, JSCell* owner, AccessType type, const Identifier& identifier, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
69{
70 switch (type) {
71 case InHit:
72 case InMiss:
73 break;
74 case ArrayLength:
75 case StringLength:
76 case DirectArgumentsLength:
77 case ScopedArgumentsLength:
78 case ModuleNamespaceLoad:
79 case Replace:
80 case InstanceOfGeneric:
81 case IndexedInt32Load:
82 case IndexedDoubleLoad:
83 case IndexedContiguousLoad:
84 case IndexedArrayStorageLoad:
85 case IndexedScopedArgumentsLoad:
86 case IndexedDirectArgumentsLoad:
87 case IndexedTypedArrayInt8Load:
88 case IndexedTypedArrayUint8Load:
89 case IndexedTypedArrayUint8ClampedLoad:
90 case IndexedTypedArrayInt16Load:
91 case IndexedTypedArrayUint16Load:
92 case IndexedTypedArrayInt32Load:
93 case IndexedTypedArrayUint32Load:
94 case IndexedTypedArrayFloat32Load:
95 case IndexedTypedArrayFloat64Load:
96 case IndexedStringLoad:
97 RELEASE_ASSERT(!prototypeAccessChain);
98 break;
99 default:
100 RELEASE_ASSERT_NOT_REACHED();
101 };
102
103 return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, type, identifier, offset, structure, conditionSet, WTFMove(prototypeAccessChain)));
104}
105
106std::unique_ptr<AccessCase> AccessCase::create(
107 VM& vm, JSCell* owner, const Identifier& identifier, PropertyOffset offset, Structure* oldStructure, Structure* newStructure,
108 const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
109{
110 RELEASE_ASSERT(oldStructure == newStructure->previousID());
111
112 // Skip optimizing the case where we need a realloc, if we don't have
113 // enough registers to make it happen.
114 if (GPRInfo::numberOfRegisters < 6
115 && oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity()
116 && oldStructure->outOfLineCapacity()) {
117 return nullptr;
118 }
119
120 return std::unique_ptr<AccessCase>(new AccessCase(vm, owner, Transition, identifier, offset, newStructure, conditionSet, WTFMove(prototypeAccessChain)));
121}
122
123AccessCase::~AccessCase()
124{
125}
126
127std::unique_ptr<AccessCase> AccessCase::fromStructureStubInfo(
128 VM& vm, JSCell* owner, const Identifier& identifier, StructureStubInfo& stubInfo)
129{
130 switch (stubInfo.cacheType()) {
131 case CacheType::GetByIdSelf:
132 RELEASE_ASSERT(stubInfo.hasConstantIdentifier);
133 return ProxyableAccessCase::create(vm, owner, Load, identifier, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
134
135 case CacheType::PutByIdReplace:
136 RELEASE_ASSERT(stubInfo.hasConstantIdentifier);
137 return AccessCase::create(vm, owner, Replace, identifier, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
138
139 case CacheType::InByIdSelf:
140 RELEASE_ASSERT(stubInfo.hasConstantIdentifier);
141 return AccessCase::create(vm, owner, InHit, identifier, stubInfo.u.byIdSelf.offset, stubInfo.u.byIdSelf.baseObjectStructure.get());
142
143 case CacheType::ArrayLength:
144 RELEASE_ASSERT(stubInfo.hasConstantIdentifier);
145 return AccessCase::create(vm, owner, AccessCase::ArrayLength, identifier);
146
147 case CacheType::StringLength:
148 RELEASE_ASSERT(stubInfo.hasConstantIdentifier);
149 return AccessCase::create(vm, owner, AccessCase::StringLength, identifier);
150
151 default:
152 return nullptr;
153 }
154}
155
156bool AccessCase::hasAlternateBase() const
157{
158 return !conditionSet().isEmpty();
159}
160
161JSObject* AccessCase::alternateBase() const
162{
163 return conditionSet().slotBaseCondition().object();
164}
165
166std::unique_ptr<AccessCase> AccessCase::clone() const
167{
168 std::unique_ptr<AccessCase> result(new AccessCase(*this));
169 result->resetState();
170 return result;
171}
172
173Vector<WatchpointSet*, 2> AccessCase::commit(VM& vm)
174{
175 // It's fine to commit something that is already committed. That arises when we switch to using
176 // newly allocated watchpoints. When it happens, it's not efficient - but we think that's OK
177 // because most AccessCases have no extra watchpoints anyway.
178 RELEASE_ASSERT(m_state == Primordial || m_state == Committed);
179
180 Vector<WatchpointSet*, 2> result;
181 Structure* structure = this->structure();
182
183 if (!m_identifier->isNull()) {
184 if ((structure && structure->needImpurePropertyWatchpoint())
185 || m_conditionSet.needImpurePropertyWatchpoint()
186 || (m_polyProtoAccessChain && m_polyProtoAccessChain->needImpurePropertyWatchpoint()))
187 result.append(vm.ensureWatchpointSetForImpureProperty(*m_identifier));
188 }
189
190 if (additionalSet())
191 result.append(additionalSet());
192
193 if (structure
194 && structure->hasRareData()
195 && structure->rareData()->hasSharedPolyProtoWatchpoint()
196 && structure->rareData()->sharedPolyProtoWatchpoint()->isStillValid()) {
197 WatchpointSet* set = structure->rareData()->sharedPolyProtoWatchpoint()->inflate();
198 result.append(set);
199 }
200
201 m_state = Committed;
202
203 return result;
204}
205
206bool AccessCase::guardedByStructureCheck(const StructureStubInfo& stubInfo) const
207{
208 if (!stubInfo.hasConstantIdentifier)
209 return false;
210 return guardedByStructureCheckSkippingConstantIdentifierCheck();
211}
212
213bool AccessCase::guardedByStructureCheckSkippingConstantIdentifierCheck() const
214{
215 if (viaProxy())
216 return false;
217
218 if (m_polyProtoAccessChain)
219 return false;
220
221 switch (m_type) {
222 case ArrayLength:
223 case StringLength:
224 case DirectArgumentsLength:
225 case ScopedArgumentsLength:
226 case ModuleNamespaceLoad:
227 case InstanceOfHit:
228 case InstanceOfMiss:
229 case InstanceOfGeneric:
230 case IndexedInt32Load:
231 case IndexedDoubleLoad:
232 case IndexedContiguousLoad:
233 case IndexedArrayStorageLoad:
234 case IndexedScopedArgumentsLoad:
235 case IndexedDirectArgumentsLoad:
236 case IndexedTypedArrayInt8Load:
237 case IndexedTypedArrayUint8Load:
238 case IndexedTypedArrayUint8ClampedLoad:
239 case IndexedTypedArrayInt16Load:
240 case IndexedTypedArrayUint16Load:
241 case IndexedTypedArrayInt32Load:
242 case IndexedTypedArrayUint32Load:
243 case IndexedTypedArrayFloat32Load:
244 case IndexedTypedArrayFloat64Load:
245 case IndexedStringLoad:
246 return false;
247 default:
248 return true;
249 }
250}
251
252bool AccessCase::requiresIdentifierNameMatch() const
253{
254 switch (m_type) {
255 case Load:
256 // We don't currently have a by_val for these puts, but we do care about the identifier.
257 case Transition:
258 case Replace:
259 case Miss:
260 case GetGetter:
261 case Getter:
262 case Setter:
263 case CustomValueGetter:
264 case CustomAccessorGetter:
265 case CustomValueSetter:
266 case CustomAccessorSetter:
267 case IntrinsicGetter:
268 case InHit:
269 case InMiss:
270 case ArrayLength:
271 case StringLength:
272 case DirectArgumentsLength:
273 case ScopedArgumentsLength:
274 case ModuleNamespaceLoad:
275 return true;
276 case InstanceOfHit:
277 case InstanceOfMiss:
278 case InstanceOfGeneric:
279 case IndexedInt32Load:
280 case IndexedDoubleLoad:
281 case IndexedContiguousLoad:
282 case IndexedArrayStorageLoad:
283 case IndexedScopedArgumentsLoad:
284 case IndexedDirectArgumentsLoad:
285 case IndexedTypedArrayInt8Load:
286 case IndexedTypedArrayUint8Load:
287 case IndexedTypedArrayUint8ClampedLoad:
288 case IndexedTypedArrayInt16Load:
289 case IndexedTypedArrayUint16Load:
290 case IndexedTypedArrayInt32Load:
291 case IndexedTypedArrayUint32Load:
292 case IndexedTypedArrayFloat32Load:
293 case IndexedTypedArrayFloat64Load:
294 case IndexedStringLoad:
295 return false;
296 }
297 RELEASE_ASSERT_NOT_REACHED();
298}
299
300bool AccessCase::requiresInt32PropertyCheck() const
301{
302 switch (m_type) {
303 case Load:
304 case Transition:
305 case Replace:
306 case Miss:
307 case GetGetter:
308 case Getter:
309 case Setter:
310 case CustomValueGetter:
311 case CustomAccessorGetter:
312 case CustomValueSetter:
313 case CustomAccessorSetter:
314 case IntrinsicGetter:
315 case InHit:
316 case InMiss:
317 case ArrayLength:
318 case StringLength:
319 case DirectArgumentsLength:
320 case ScopedArgumentsLength:
321 case ModuleNamespaceLoad:
322 case InstanceOfHit:
323 case InstanceOfMiss:
324 case InstanceOfGeneric:
325 return false;
326 case IndexedInt32Load:
327 case IndexedDoubleLoad:
328 case IndexedContiguousLoad:
329 case IndexedArrayStorageLoad:
330 case IndexedScopedArgumentsLoad:
331 case IndexedDirectArgumentsLoad:
332 case IndexedTypedArrayInt8Load:
333 case IndexedTypedArrayUint8Load:
334 case IndexedTypedArrayUint8ClampedLoad:
335 case IndexedTypedArrayInt16Load:
336 case IndexedTypedArrayUint16Load:
337 case IndexedTypedArrayInt32Load:
338 case IndexedTypedArrayUint32Load:
339 case IndexedTypedArrayFloat32Load:
340 case IndexedTypedArrayFloat64Load:
341 case IndexedStringLoad:
342 return true;
343 }
344 RELEASE_ASSERT_NOT_REACHED();
345}
346
347bool AccessCase::needsScratchFPR() const
348{
349 switch (m_type) {
350 case Load:
351 case Transition:
352 case Replace:
353 case Miss:
354 case GetGetter:
355 case Getter:
356 case Setter:
357 case CustomValueGetter:
358 case CustomAccessorGetter:
359 case CustomValueSetter:
360 case CustomAccessorSetter:
361 case IntrinsicGetter:
362 case InHit:
363 case InMiss:
364 case ArrayLength:
365 case StringLength:
366 case DirectArgumentsLength:
367 case ScopedArgumentsLength:
368 case ModuleNamespaceLoad:
369 case InstanceOfHit:
370 case InstanceOfMiss:
371 case InstanceOfGeneric:
372 case IndexedInt32Load:
373 case IndexedContiguousLoad:
374 case IndexedArrayStorageLoad:
375 case IndexedScopedArgumentsLoad:
376 case IndexedDirectArgumentsLoad:
377 case IndexedTypedArrayInt8Load:
378 case IndexedTypedArrayUint8Load:
379 case IndexedTypedArrayUint8ClampedLoad:
380 case IndexedTypedArrayInt16Load:
381 case IndexedTypedArrayUint16Load:
382 case IndexedTypedArrayInt32Load:
383 case IndexedStringLoad:
384 return false;
385 case IndexedDoubleLoad:
386 case IndexedTypedArrayFloat32Load:
387 case IndexedTypedArrayFloat64Load:
388 case IndexedTypedArrayUint32Load:
389 return true;
390 }
391 RELEASE_ASSERT_NOT_REACHED();
392}
393
394template<typename Functor>
395void AccessCase::forEachDependentCell(const Functor& functor) const
396{
397 m_conditionSet.forEachDependentCell(functor);
398 if (m_structure)
399 functor(m_structure.get());
400 if (m_polyProtoAccessChain) {
401 for (Structure* structure : m_polyProtoAccessChain->chain())
402 functor(structure);
403 }
404
405 switch (type()) {
406 case Getter:
407 case Setter: {
408 auto& accessor = this->as<GetterSetterAccessCase>();
409 if (accessor.callLinkInfo())
410 accessor.callLinkInfo()->forEachDependentCell(functor);
411 break;
412 }
413 case CustomValueGetter:
414 case CustomValueSetter: {
415 auto& accessor = this->as<GetterSetterAccessCase>();
416 if (accessor.customSlotBase())
417 functor(accessor.customSlotBase());
418 break;
419 }
420 case IntrinsicGetter: {
421 auto& intrinsic = this->as<IntrinsicGetterAccessCase>();
422 if (intrinsic.intrinsicFunction())
423 functor(intrinsic.intrinsicFunction());
424 break;
425 }
426 case ModuleNamespaceLoad: {
427 auto& accessCase = this->as<ModuleNamespaceAccessCase>();
428 if (accessCase.moduleNamespaceObject())
429 functor(accessCase.moduleNamespaceObject());
430 if (accessCase.moduleEnvironment())
431 functor(accessCase.moduleEnvironment());
432 break;
433 }
434 case InstanceOfHit:
435 case InstanceOfMiss:
436 if (as<InstanceOfAccessCase>().prototype())
437 functor(as<InstanceOfAccessCase>().prototype());
438 break;
439 case CustomAccessorGetter:
440 case CustomAccessorSetter:
441 case Load:
442 case Transition:
443 case Replace:
444 case Miss:
445 case GetGetter:
446 case InHit:
447 case InMiss:
448 case ArrayLength:
449 case StringLength:
450 case DirectArgumentsLength:
451 case ScopedArgumentsLength:
452 case InstanceOfGeneric:
453 case IndexedInt32Load:
454 case IndexedDoubleLoad:
455 case IndexedContiguousLoad:
456 case IndexedArrayStorageLoad:
457 case IndexedScopedArgumentsLoad:
458 case IndexedDirectArgumentsLoad:
459 case IndexedTypedArrayInt8Load:
460 case IndexedTypedArrayUint8Load:
461 case IndexedTypedArrayUint8ClampedLoad:
462 case IndexedTypedArrayInt16Load:
463 case IndexedTypedArrayUint16Load:
464 case IndexedTypedArrayInt32Load:
465 case IndexedTypedArrayUint32Load:
466 case IndexedTypedArrayFloat32Load:
467 case IndexedTypedArrayFloat64Load:
468 case IndexedStringLoad:
469 break;
470 }
471}
472
473bool AccessCase::doesCalls(Vector<JSCell*>* cellsToMarkIfDoesCalls) const
474{
475 bool doesCalls = false;
476 switch (type()) {
477 case Transition:
478 doesCalls = newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity() && structure()->couldHaveIndexingHeader();
479 break;
480 case Getter:
481 case Setter:
482 case CustomValueGetter:
483 case CustomAccessorGetter:
484 case CustomValueSetter:
485 case CustomAccessorSetter:
486 doesCalls = true;
487 break;
488 case Load:
489 case Replace:
490 case Miss:
491 case GetGetter:
492 case IntrinsicGetter:
493 case InHit:
494 case InMiss:
495 case ArrayLength:
496 case StringLength:
497 case DirectArgumentsLength:
498 case ScopedArgumentsLength:
499 case ModuleNamespaceLoad:
500 case InstanceOfHit:
501 case InstanceOfMiss:
502 case InstanceOfGeneric:
503 case IndexedInt32Load:
504 case IndexedDoubleLoad:
505 case IndexedContiguousLoad:
506 case IndexedArrayStorageLoad:
507 case IndexedScopedArgumentsLoad:
508 case IndexedDirectArgumentsLoad:
509 case IndexedTypedArrayInt8Load:
510 case IndexedTypedArrayUint8Load:
511 case IndexedTypedArrayUint8ClampedLoad:
512 case IndexedTypedArrayInt16Load:
513 case IndexedTypedArrayUint16Load:
514 case IndexedTypedArrayInt32Load:
515 case IndexedTypedArrayUint32Load:
516 case IndexedTypedArrayFloat32Load:
517 case IndexedTypedArrayFloat64Load:
518 case IndexedStringLoad:
519 doesCalls = false;
520 break;
521 }
522
523 if (doesCalls && cellsToMarkIfDoesCalls) {
524 forEachDependentCell([&](JSCell* cell) {
525 cellsToMarkIfDoesCalls->append(cell);
526 });
527 }
528 return doesCalls;
529}
530
531bool AccessCase::couldStillSucceed() const
532{
533 for (const ObjectPropertyCondition& condition : m_conditionSet) {
534 if (condition.condition().kind() == PropertyCondition::Equivalence) {
535 if (!condition.isWatchableAssumingImpurePropertyWatchpoint(PropertyCondition::WatchabilityEffort::EnsureWatchability))
536 return false;
537 } else {
538 if (!condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint())
539 return false;
540 }
541 }
542 return true;
543}
544
545bool AccessCase::canReplace(const AccessCase& other) const
546{
547 // This puts in a good effort to try to figure out if 'other' is made superfluous by '*this'.
548 // It's fine for this to return false if it's in doubt.
549 //
550 // Note that if A->guardedByStructureCheck() && B->guardedByStructureCheck() then
551 // A->canReplace(B) == B->canReplace(A).
552
553 if (*m_identifier != *other.m_identifier)
554 return false;
555
556 switch (type()) {
557 case IndexedInt32Load:
558 case IndexedDoubleLoad:
559 case IndexedContiguousLoad:
560 case IndexedArrayStorageLoad:
561 case ArrayLength:
562 case StringLength:
563 case DirectArgumentsLength:
564 case ScopedArgumentsLength:
565 case IndexedScopedArgumentsLoad:
566 case IndexedDirectArgumentsLoad:
567 case IndexedTypedArrayInt8Load:
568 case IndexedTypedArrayUint8Load:
569 case IndexedTypedArrayUint8ClampedLoad:
570 case IndexedTypedArrayInt16Load:
571 case IndexedTypedArrayUint16Load:
572 case IndexedTypedArrayInt32Load:
573 case IndexedTypedArrayUint32Load:
574 case IndexedTypedArrayFloat32Load:
575 case IndexedTypedArrayFloat64Load:
576 case IndexedStringLoad:
577 return other.type() == type();
578
579 case ModuleNamespaceLoad: {
580 if (other.type() != type())
581 return false;
582 auto& thisCase = this->as<ModuleNamespaceAccessCase>();
583 auto& otherCase = this->as<ModuleNamespaceAccessCase>();
584 return thisCase.moduleNamespaceObject() == otherCase.moduleNamespaceObject();
585 }
586
587 case InstanceOfHit:
588 case InstanceOfMiss: {
589 if (other.type() != type())
590 return false;
591
592 if (this->as<InstanceOfAccessCase>().prototype() != other.as<InstanceOfAccessCase>().prototype())
593 return false;
594
595 return structure() == other.structure();
596 }
597
598 case InstanceOfGeneric:
599 switch (other.type()) {
600 case InstanceOfGeneric:
601 case InstanceOfHit:
602 case InstanceOfMiss:
603 return true;
604 default:
605 return false;
606 }
607
608 case Load:
609 case Transition:
610 case Replace:
611 case Miss:
612 case GetGetter:
613 case Getter:
614 case Setter:
615 case CustomValueGetter:
616 case CustomAccessorGetter:
617 case CustomValueSetter:
618 case CustomAccessorSetter:
619 case IntrinsicGetter:
620 case InHit:
621 case InMiss:
622 if (other.type() != type())
623 return false;
624
625 if (m_polyProtoAccessChain) {
626 if (!other.m_polyProtoAccessChain)
627 return false;
628 // This is the only check we need since PolyProtoAccessChain contains the base structure.
629 // If we ever change it to contain only the prototype chain, we'll also need to change
630 // this to check the base structure.
631 return structure() == other.structure()
632 && *m_polyProtoAccessChain == *other.m_polyProtoAccessChain;
633 }
634
635 if (!guardedByStructureCheckSkippingConstantIdentifierCheck() || !other.guardedByStructureCheckSkippingConstantIdentifierCheck())
636 return false;
637
638 return structure() == other.structure();
639 }
640 RELEASE_ASSERT_NOT_REACHED();
641}
642
643void AccessCase::dump(PrintStream& out) const
644{
645 out.print("\n", m_type, ":(");
646
647 CommaPrinter comma;
648
649 out.print(comma, m_state);
650
651 out.print(comma, "ident = '", *m_identifier, "'");
652 if (isValidOffset(m_offset))
653 out.print(comma, "offset = ", m_offset);
654
655 if (m_polyProtoAccessChain) {
656 out.print(comma, "prototype access chain = ");
657 m_polyProtoAccessChain->dump(structure(), out);
658 } else {
659 if (m_type == Transition)
660 out.print(comma, "structure = ", pointerDump(structure()), " -> ", pointerDump(newStructure()));
661 else if (m_structure)
662 out.print(comma, "structure = ", pointerDump(m_structure.get()));
663 }
664
665 if (!m_conditionSet.isEmpty())
666 out.print(comma, "conditions = ", m_conditionSet);
667
668 dumpImpl(out, comma);
669 out.print(")");
670}
671
672bool AccessCase::visitWeak(VM& vm) const
673{
674 if (isAccessor()) {
675 auto& accessor = this->as<GetterSetterAccessCase>();
676 if (accessor.callLinkInfo())
677 accessor.callLinkInfo()->visitWeak(vm);
678 }
679
680 bool isValid = true;
681 forEachDependentCell([&](JSCell* cell) {
682 isValid &= vm.heap.isMarked(cell);
683 });
684 return isValid;
685}
686
687bool AccessCase::propagateTransitions(SlotVisitor& visitor) const
688{
689 bool result = true;
690
691 if (m_structure)
692 result &= m_structure->markIfCheap(visitor);
693
694 if (m_polyProtoAccessChain) {
695 for (Structure* structure : m_polyProtoAccessChain->chain())
696 result &= structure->markIfCheap(visitor);
697 }
698
699 switch (m_type) {
700 case Transition:
701 if (visitor.vm().heap.isMarked(m_structure->previousID()))
702 visitor.appendUnbarriered(m_structure.get());
703 else
704 result = false;
705 break;
706 default:
707 break;
708 }
709
710 return result;
711}
712
713void AccessCase::generateWithGuard(
714 AccessGenerationState& state, CCallHelpers::JumpList& fallThrough)
715{
716 SuperSamplerScope superSamplerScope(false);
717
718 checkConsistency(*state.stubInfo);
719
720 RELEASE_ASSERT(m_state == Committed);
721 m_state = Generated;
722
723 CCallHelpers& jit = *state.jit;
724 StructureStubInfo& stubInfo = *state.stubInfo;
725 VM& vm = state.m_vm;
726 JSValueRegs valueRegs = state.valueRegs;
727 GPRReg baseGPR = state.baseGPR;
728 GPRReg scratchGPR = state.scratchGPR;
729
730 if (requiresIdentifierNameMatch() && !stubInfo.hasConstantIdentifier) {
731 RELEASE_ASSERT(!m_identifier->isNull());
732 GPRReg propertyGPR = state.u.propertyGPR;
733 // non-rope string check done inside polymorphic access.
734
735 if (uid()->isSymbol())
736 jit.loadPtr(MacroAssembler::Address(propertyGPR, Symbol::offsetOfSymbolImpl()), scratchGPR);
737 else
738 jit.loadPtr(MacroAssembler::Address(propertyGPR, JSString::offsetOfValue()), scratchGPR);
739 fallThrough.append(jit.branchPtr(CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImmPtr(uid())));
740 }
741
742 auto emitDefaultGuard = [&] () {
743 if (m_polyProtoAccessChain) {
744 GPRReg baseForAccessGPR = state.scratchGPR;
745 jit.move(state.baseGPR, baseForAccessGPR);
746 m_polyProtoAccessChain->forEach(structure(), [&] (Structure* structure, bool atEnd) {
747 fallThrough.append(
748 jit.branchStructure(
749 CCallHelpers::NotEqual,
750 CCallHelpers::Address(baseForAccessGPR, JSCell::structureIDOffset()),
751 structure));
752 if (atEnd) {
753 if ((m_type == Miss || m_type == InMiss || m_type == Transition) && structure->hasPolyProto()) {
754 // For a Miss/InMiss/Transition, we must ensure we're at the end when the last item is poly proto.
755 // Transitions must do this because they need to verify there isn't a setter in the chain.
756 // Miss/InMiss need to do this to ensure there isn't a new item at the end of the chain that
757 // has the property.
758#if USE(JSVALUE64)
759 jit.load64(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset)), baseForAccessGPR);
760 fallThrough.append(jit.branch64(CCallHelpers::NotEqual, baseForAccessGPR, CCallHelpers::TrustedImm64(JSValue::ValueNull)));
761#else
762 jit.load32(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), baseForAccessGPR);
763 fallThrough.append(jit.branchTestPtr(CCallHelpers::NonZero, baseForAccessGPR));
764#endif
765 }
766 } else {
767 if (structure->hasMonoProto()) {
768 JSValue prototype = structure->prototypeForLookup(state.m_globalObject);
769 RELEASE_ASSERT(prototype.isObject());
770 jit.move(CCallHelpers::TrustedImmPtr(asObject(prototype)), baseForAccessGPR);
771 } else {
772 RELEASE_ASSERT(structure->isObject()); // Primitives must have a stored prototype. We use prototypeForLookup for them.
773#if USE(JSVALUE64)
774 jit.load64(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset)), baseForAccessGPR);
775 fallThrough.append(jit.branch64(CCallHelpers::Equal, baseForAccessGPR, CCallHelpers::TrustedImm64(JSValue::ValueNull)));
776#else
777 jit.load32(MacroAssembler::Address(baseForAccessGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset), baseForAccessGPR);
778 fallThrough.append(jit.branchTestPtr(CCallHelpers::Zero, baseForAccessGPR));
779#endif
780 }
781 }
782 });
783 return;
784 }
785
786 if (viaProxy()) {
787 fallThrough.append(
788 jit.branchIfNotType(baseGPR, PureForwardingProxyType));
789
790 jit.loadPtr(CCallHelpers::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
791
792 fallThrough.append(
793 jit.branchStructure(
794 CCallHelpers::NotEqual,
795 CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
796 structure()));
797 return;
798 }
799
800 fallThrough.append(
801 jit.branchStructure(
802 CCallHelpers::NotEqual,
803 CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()),
804 structure()));
805 };
806
807 switch (m_type) {
808 case ArrayLength: {
809 ASSERT(!viaProxy());
810 jit.load8(CCallHelpers::Address(baseGPR, JSCell::indexingTypeAndMiscOffset()), scratchGPR);
811 fallThrough.append(
812 jit.branchTest32(
813 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IsArray)));
814 fallThrough.append(
815 jit.branchTest32(
816 CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(IndexingShapeMask)));
817 break;
818 }
819
820 case StringLength: {
821 ASSERT(!viaProxy());
822 fallThrough.append(
823 jit.branchIfNotString(baseGPR));
824 break;
825 }
826
827 case DirectArgumentsLength: {
828 ASSERT(!viaProxy());
829 fallThrough.append(
830 jit.branchIfNotType(baseGPR, DirectArgumentsType));
831
832 fallThrough.append(
833 jit.branchTestPtr(
834 CCallHelpers::NonZero,
835 CCallHelpers::Address(baseGPR, DirectArguments::offsetOfMappedArguments())));
836 jit.load32(
837 CCallHelpers::Address(baseGPR, DirectArguments::offsetOfLength()),
838 valueRegs.payloadGPR());
839 jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
840 state.succeed();
841 return;
842 }
843
844 case ScopedArgumentsLength: {
845 ASSERT(!viaProxy());
846 fallThrough.append(
847 jit.branchIfNotType(baseGPR, ScopedArgumentsType));
848
849 fallThrough.append(
850 jit.branchTest8(
851 CCallHelpers::NonZero,
852 CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfOverrodeThings())));
853 jit.load32(
854 CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfTotalLength()),
855 valueRegs.payloadGPR());
856 jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
857 state.succeed();
858 return;
859 }
860
861 case ModuleNamespaceLoad: {
862 this->as<ModuleNamespaceAccessCase>().emit(state, fallThrough);
863 return;
864 }
865
866 case IndexedScopedArgumentsLoad: {
867 // This code is written such that the result could alias with the base or the property.
868 GPRReg propertyGPR = state.u.propertyGPR;
869
870 jit.load8(CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()), scratchGPR);
871 fallThrough.append(jit.branch32(CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImm32(ScopedArgumentsType)));
872
873 ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
874 allocator.lock(baseGPR);
875 allocator.lock(valueRegs.payloadGPR());
876 allocator.lock(propertyGPR);
877 allocator.lock(scratchGPR);
878
879 GPRReg scratch2GPR = allocator.allocateScratchGPR();
880 GPRReg scratch3GPR = allocator.allocateScratchGPR();
881
882 ScratchRegisterAllocator::PreservedState preservedState = allocator.preserveReusedRegistersByPushing(
883 jit, ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace);
884
885 CCallHelpers::JumpList failAndIgnore;
886
887 failAndIgnore.append(jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfTotalLength())));
888
889 jit.loadPtr(CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfTable()), scratchGPR);
890 jit.load32(CCallHelpers::Address(scratchGPR, ScopedArgumentsTable::offsetOfLength()), scratch2GPR);
891 auto overflowCase = jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, scratch2GPR);
892
893 jit.loadPtr(CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfScope()), scratch2GPR);
894 jit.loadPtr(CCallHelpers::Address(scratchGPR, ScopedArgumentsTable::offsetOfArguments()), scratchGPR);
895 jit.zeroExtend32ToPtr(propertyGPR, scratch3GPR);
896 jit.load32(CCallHelpers::BaseIndex(scratchGPR, scratch3GPR, CCallHelpers::TimesFour), scratchGPR);
897 failAndIgnore.append(jit.branch32(CCallHelpers::Equal, scratchGPR, CCallHelpers::TrustedImm32(ScopeOffset::invalidOffset)));
898 jit.loadValue(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesEight, JSLexicalEnvironment::offsetOfVariables()), valueRegs);
899 auto done = jit.jump();
900
901 overflowCase.link(&jit);
902 jit.sub32(propertyGPR, scratch2GPR);
903 jit.neg32(scratch2GPR);
904 jit.loadPtr(CCallHelpers::Address(baseGPR, ScopedArguments::offsetOfStorage()), scratch3GPR);
905 jit.loadValue(CCallHelpers::BaseIndex(scratch3GPR, scratch2GPR, CCallHelpers::TimesEight), JSValueRegs::payloadOnly(scratchGPR));
906 failAndIgnore.append(jit.branchIfEmpty(scratchGPR));
907 jit.move(scratchGPR, valueRegs.payloadGPR());
908
909 done.link(&jit);
910
911 allocator.restoreReusedRegistersByPopping(jit, preservedState);
912 state.succeed();
913
914 if (allocator.didReuseRegisters()) {
915 failAndIgnore.link(&jit);
916 allocator.restoreReusedRegistersByPopping(jit, preservedState);
917 state.failAndIgnore.append(jit.jump());
918 } else
919 state.failAndIgnore.append(failAndIgnore);
920
921 return;
922 }
923
924 case IndexedDirectArgumentsLoad: {
925 // This code is written such that the result could alias with the base or the property.
926 GPRReg propertyGPR = state.u.propertyGPR;
927 jit.load8(CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()), scratchGPR);
928 fallThrough.append(jit.branch32(CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImm32(DirectArgumentsType)));
929
930 jit.load32(CCallHelpers::Address(baseGPR, DirectArguments::offsetOfLength()), scratchGPR);
931 state.failAndRepatch.append(jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, scratchGPR));
932 state.failAndRepatch.append(jit.branchTestPtr(CCallHelpers::NonZero, CCallHelpers::Address(baseGPR, DirectArguments::offsetOfMappedArguments())));
933 jit.zeroExtend32ToPtr(propertyGPR, scratchGPR);
934 jit.loadValue(CCallHelpers::BaseIndex(baseGPR, scratchGPR, CCallHelpers::TimesEight, DirectArguments::storageOffset()), valueRegs);
935 state.succeed();
936 return;
937 }
938
939 case IndexedTypedArrayInt8Load:
940 case IndexedTypedArrayUint8Load:
941 case IndexedTypedArrayUint8ClampedLoad:
942 case IndexedTypedArrayInt16Load:
943 case IndexedTypedArrayUint16Load:
944 case IndexedTypedArrayInt32Load:
945 case IndexedTypedArrayUint32Load:
946 case IndexedTypedArrayFloat32Load:
947 case IndexedTypedArrayFloat64Load: {
948 // This code is written such that the result could alias with the base or the property.
949
950 TypedArrayType type = toTypedArrayType(m_type);
951
952 GPRReg propertyGPR = state.u.propertyGPR;
953
954
955 jit.load8(CCallHelpers::Address(baseGPR, JSCell::typeInfoTypeOffset()), scratchGPR);
956 fallThrough.append(jit.branch32(CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImm32(typeForTypedArrayType(type))));
957
958 jit.load32(CCallHelpers::Address(baseGPR, JSArrayBufferView::offsetOfLength()), scratchGPR);
959 state.failAndRepatch.append(jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, scratchGPR));
960
961 ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
962 allocator.lock(baseGPR);
963 allocator.lock(valueRegs.payloadGPR());
964 allocator.lock(propertyGPR);
965 allocator.lock(scratchGPR);
966 GPRReg scratch2GPR = allocator.allocateScratchGPR();
967
968 ScratchRegisterAllocator::PreservedState preservedState = allocator.preserveReusedRegistersByPushing(
969 jit, ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace);
970
971 jit.loadPtr(CCallHelpers::Address(baseGPR, JSArrayBufferView::offsetOfVector()), scratch2GPR);
972 jit.cageConditionally(Gigacage::Primitive, scratch2GPR, scratchGPR, scratchGPR);
973
974 jit.signExtend32ToPtr(propertyGPR, scratchGPR);
975 if (isInt(type)) {
976 switch (elementSize(type)) {
977 case 1:
978 if (JSC::isSigned(type))
979 jit.load8SignedExtendTo32(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesOne), valueRegs.payloadGPR());
980 else
981 jit.load8(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesOne), valueRegs.payloadGPR());
982 break;
983 case 2:
984 if (JSC::isSigned(type))
985 jit.load16SignedExtendTo32(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesTwo), valueRegs.payloadGPR());
986 else
987 jit.load16(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesTwo), valueRegs.payloadGPR());
988 break;
989 case 4:
990 jit.load32(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesFour), valueRegs.payloadGPR());
991 break;
992 default:
993 CRASH();
994 }
995
996 CCallHelpers::Jump done;
997 if (type == TypeUint32) {
998 RELEASE_ASSERT(state.scratchFPR != InvalidFPRReg);
999 auto canBeInt = jit.branch32(CCallHelpers::GreaterThanOrEqual, valueRegs.payloadGPR(), CCallHelpers::TrustedImm32(0));
1000
1001 jit.convertInt32ToDouble(valueRegs.payloadGPR(), state.scratchFPR);
1002 jit.addDouble(CCallHelpers::AbsoluteAddress(&CCallHelpers::twoToThe32), state.scratchFPR);
1003 jit.boxDouble(state.scratchFPR, valueRegs);
1004 done = jit.jump();
1005 canBeInt.link(&jit);
1006 }
1007
1008 jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
1009 if (done.isSet())
1010 done.link(&jit);
1011 } else {
1012 ASSERT(isFloat(type));
1013 RELEASE_ASSERT(state.scratchFPR != InvalidFPRReg);
1014 switch (elementSize(type)) {
1015 case 4:
1016 jit.loadFloat(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesFour), state.scratchFPR);
1017 jit.convertFloatToDouble(state.scratchFPR, state.scratchFPR);
1018 break;
1019 case 8: {
1020 jit.loadDouble(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesEight), state.scratchFPR);
1021 break;
1022 }
1023 default:
1024 CRASH();
1025 }
1026
1027 jit.purifyNaN(state.scratchFPR);
1028 jit.boxDouble(state.scratchFPR, valueRegs);
1029 }
1030
1031 allocator.restoreReusedRegistersByPopping(jit, preservedState);
1032 state.succeed();
1033
1034 return;
1035 }
1036
1037 case IndexedStringLoad: {
1038 // This code is written such that the result could alias with the base or the property.
1039 GPRReg propertyGPR = state.u.propertyGPR;
1040
1041 fallThrough.append(jit.branchIfNotString(baseGPR));
1042
1043 ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1044 allocator.lock(baseGPR);
1045 allocator.lock(valueRegs.payloadGPR());
1046 allocator.lock(propertyGPR);
1047 allocator.lock(scratchGPR);
1048 GPRReg scratch2GPR = allocator.allocateScratchGPR();
1049
1050 CCallHelpers::JumpList failAndIgnore;
1051
1052 ScratchRegisterAllocator::PreservedState preservedState = allocator.preserveReusedRegistersByPushing(
1053 jit, ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace);
1054
1055 jit.loadPtr(CCallHelpers::Address(baseGPR, JSString::offsetOfValue()), scratch2GPR);
1056 failAndIgnore.append(jit.branchIfRopeStringImpl(scratch2GPR));
1057 jit.load32(CCallHelpers::Address(scratch2GPR, StringImpl::lengthMemoryOffset()), scratchGPR);
1058
1059 failAndIgnore.append(jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, scratchGPR));
1060
1061 jit.load32(CCallHelpers::Address(scratch2GPR, StringImpl::flagsOffset()), scratchGPR);
1062 jit.loadPtr(CCallHelpers::Address(scratch2GPR, StringImpl::dataOffset()), scratch2GPR);
1063 auto is16Bit = jit.branchTest32(CCallHelpers::Zero, scratchGPR, CCallHelpers::TrustedImm32(StringImpl::flagIs8Bit()));
1064 jit.zeroExtend32ToPtr(propertyGPR, scratchGPR);
1065 jit.load8(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesOne, 0), scratch2GPR);
1066 auto is8BitLoadDone = jit.jump();
1067 is16Bit.link(&jit);
1068 jit.zeroExtend32ToPtr(propertyGPR, scratchGPR);
1069 jit.load16(CCallHelpers::BaseIndex(scratch2GPR, scratchGPR, CCallHelpers::TimesTwo, 0), scratch2GPR);
1070 is8BitLoadDone.link(&jit);
1071
1072 failAndIgnore.append(jit.branch32(CCallHelpers::Above, scratch2GPR, CCallHelpers::TrustedImm32(maxSingleCharacterString)));
1073 jit.move(CCallHelpers::TrustedImmPtr(vm.smallStrings.singleCharacterStrings()), scratchGPR);
1074 jit.loadPtr(CCallHelpers::BaseIndex(scratchGPR, scratch2GPR, CCallHelpers::ScalePtr, 0), valueRegs.payloadGPR());
1075 allocator.restoreReusedRegistersByPopping(jit, preservedState);
1076 state.succeed();
1077
1078 if (allocator.didReuseRegisters()) {
1079 failAndIgnore.link(&jit);
1080 allocator.restoreReusedRegistersByPopping(jit, preservedState);
1081 state.failAndIgnore.append(jit.jump());
1082 } else
1083 state.failAndIgnore.append(failAndIgnore);
1084
1085 return;
1086 }
1087
1088 case IndexedInt32Load:
1089 case IndexedDoubleLoad:
1090 case IndexedContiguousLoad:
1091 case IndexedArrayStorageLoad: {
1092 // This code is written such that the result could alias with the base or the property.
1093 GPRReg propertyGPR = state.u.propertyGPR;
1094
1095 // int32 check done in polymorphic access.
1096 jit.load8(CCallHelpers::Address(baseGPR, JSCell::indexingTypeAndMiscOffset()), scratchGPR);
1097 jit.and32(CCallHelpers::TrustedImm32(IndexingShapeMask), scratchGPR);
1098
1099 CCallHelpers::Jump isOutOfBounds;
1100 CCallHelpers::Jump isEmpty;
1101
1102 ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1103 allocator.lock(baseGPR);
1104 allocator.lock(valueRegs.payloadGPR());
1105 allocator.lock(propertyGPR);
1106 allocator.lock(scratchGPR);
1107 GPRReg scratch2GPR = allocator.allocateScratchGPR();
1108 ScratchRegisterAllocator::PreservedState preservedState;
1109
1110 CCallHelpers::JumpList failAndIgnore;
1111 auto preserveReusedRegisters = [&] {
1112 preservedState = allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace);
1113 };
1114
1115 if (m_type == IndexedArrayStorageLoad) {
1116 jit.add32(CCallHelpers::TrustedImm32(-ArrayStorageShape), scratchGPR, scratchGPR);
1117 fallThrough.append(jit.branch32(CCallHelpers::Above, scratchGPR, CCallHelpers::TrustedImm32(SlowPutArrayStorageShape - ArrayStorageShape)));
1118
1119 preserveReusedRegisters();
1120
1121 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1122 isOutOfBounds = jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, CCallHelpers::Address(scratchGPR, ArrayStorage::vectorLengthOffset()));
1123
1124 jit.zeroExtend32ToPtr(propertyGPR, scratch2GPR);
1125 jit.loadValue(CCallHelpers::BaseIndex(scratchGPR, scratch2GPR, CCallHelpers::TimesEight, ArrayStorage::vectorOffset()), JSValueRegs::payloadOnly(scratchGPR));
1126 isEmpty = jit.branchIfEmpty(scratchGPR);
1127 jit.move(scratchGPR, valueRegs.payloadGPR());
1128 } else {
1129 IndexingType expectedShape;
1130 switch (m_type) {
1131 case IndexedInt32Load:
1132 expectedShape = Int32Shape;
1133 break;
1134 case IndexedDoubleLoad:
1135 expectedShape = DoubleShape;
1136 break;
1137 case IndexedContiguousLoad:
1138 expectedShape = ContiguousShape;
1139 break;
1140 default:
1141 RELEASE_ASSERT_NOT_REACHED();
1142 break;
1143 }
1144
1145 fallThrough.append(jit.branch32(CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImm32(expectedShape)));
1146
1147 preserveReusedRegisters();
1148
1149 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1150 isOutOfBounds = jit.branch32(CCallHelpers::AboveOrEqual, propertyGPR, CCallHelpers::Address(scratchGPR, Butterfly::offsetOfPublicLength()));
1151 jit.zeroExtend32ToPtr(propertyGPR, scratch2GPR);
1152 if (m_type == IndexedDoubleLoad) {
1153 RELEASE_ASSERT(state.scratchFPR != InvalidFPRReg);
1154 jit.loadDouble(CCallHelpers::BaseIndex(scratchGPR, scratch2GPR, CCallHelpers::TimesEight), state.scratchFPR);
1155 isEmpty = jit.branchIfNaN(state.scratchFPR);
1156 jit.boxDouble(state.scratchFPR, valueRegs);
1157 } else {
1158 jit.loadValue(CCallHelpers::BaseIndex(scratchGPR, scratch2GPR, CCallHelpers::TimesEight), JSValueRegs::payloadOnly(scratchGPR));
1159 isEmpty = jit.branchIfEmpty(scratchGPR);
1160 jit.move(scratchGPR, valueRegs.payloadGPR());
1161 }
1162 }
1163
1164 allocator.restoreReusedRegistersByPopping(jit, preservedState);
1165 state.succeed();
1166
1167 if (allocator.didReuseRegisters()) {
1168 isOutOfBounds.link(&jit);
1169 isEmpty.link(&jit);
1170 allocator.restoreReusedRegistersByPopping(jit, preservedState);
1171 state.failAndIgnore.append(jit.jump());
1172 } else {
1173 state.failAndIgnore.append(isOutOfBounds);
1174 state.failAndIgnore.append(isEmpty);
1175 }
1176
1177 return;
1178 }
1179
1180 case InstanceOfHit:
1181 case InstanceOfMiss:
1182 emitDefaultGuard();
1183
1184 fallThrough.append(
1185 jit.branchPtr(
1186 CCallHelpers::NotEqual, state.u.prototypeGPR,
1187 CCallHelpers::TrustedImmPtr(as<InstanceOfAccessCase>().prototype())));
1188 break;
1189
1190 case InstanceOfGeneric: {
1191 GPRReg prototypeGPR = state.u.prototypeGPR;
1192 // Legend: value = `base instanceof prototypeGPR`.
1193
1194 GPRReg valueGPR = valueRegs.payloadGPR();
1195
1196 ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1197 allocator.lock(baseGPR);
1198 allocator.lock(valueGPR);
1199 allocator.lock(prototypeGPR);
1200 allocator.lock(scratchGPR);
1201
1202 GPRReg scratch2GPR = allocator.allocateScratchGPR();
1203
1204 if (!state.stubInfo->prototypeIsKnownObject)
1205 state.failAndIgnore.append(jit.branchIfNotObject(prototypeGPR));
1206
1207 ScratchRegisterAllocator::PreservedState preservedState =
1208 allocator.preserveReusedRegistersByPushing(
1209 jit,
1210 ScratchRegisterAllocator::ExtraStackSpace::NoExtraSpace);
1211 CCallHelpers::Jump failAndIgnore;
1212
1213 jit.move(baseGPR, valueGPR);
1214
1215 CCallHelpers::Label loop(&jit);
1216 failAndIgnore = jit.branchIfType(valueGPR, ProxyObjectType);
1217
1218 jit.emitLoadStructure(vm, valueGPR, scratch2GPR, scratchGPR);
1219#if USE(JSVALUE64)
1220 jit.load64(CCallHelpers::Address(scratch2GPR, Structure::prototypeOffset()), scratch2GPR);
1221 CCallHelpers::Jump hasMonoProto = jit.branchTest64(CCallHelpers::NonZero, scratch2GPR);
1222 jit.load64(
1223 CCallHelpers::Address(valueGPR, offsetRelativeToBase(knownPolyProtoOffset)),
1224 scratch2GPR);
1225 hasMonoProto.link(&jit);
1226#else
1227 jit.load32(
1228 CCallHelpers::Address(scratch2GPR, Structure::prototypeOffset() + TagOffset),
1229 scratchGPR);
1230 jit.load32(
1231 CCallHelpers::Address(scratch2GPR, Structure::prototypeOffset() + PayloadOffset),
1232 scratch2GPR);
1233 CCallHelpers::Jump hasMonoProto = jit.branch32(
1234 CCallHelpers::NotEqual, scratchGPR, CCallHelpers::TrustedImm32(JSValue::EmptyValueTag));
1235 jit.load32(
1236 CCallHelpers::Address(
1237 valueGPR, offsetRelativeToBase(knownPolyProtoOffset) + PayloadOffset),
1238 scratch2GPR);
1239 hasMonoProto.link(&jit);
1240#endif
1241 jit.move(scratch2GPR, valueGPR);
1242
1243 CCallHelpers::Jump isInstance = jit.branchPtr(CCallHelpers::Equal, valueGPR, prototypeGPR);
1244
1245#if USE(JSVALUE64)
1246 jit.branchIfCell(JSValueRegs(valueGPR)).linkTo(loop, &jit);
1247#else
1248 jit.branchTestPtr(CCallHelpers::NonZero, valueGPR).linkTo(loop, &jit);
1249#endif
1250
1251 jit.boxBooleanPayload(false, valueGPR);
1252 allocator.restoreReusedRegistersByPopping(jit, preservedState);
1253 state.succeed();
1254
1255 isInstance.link(&jit);
1256 jit.boxBooleanPayload(true, valueGPR);
1257 allocator.restoreReusedRegistersByPopping(jit, preservedState);
1258 state.succeed();
1259
1260 if (allocator.didReuseRegisters()) {
1261 failAndIgnore.link(&jit);
1262 allocator.restoreReusedRegistersByPopping(jit, preservedState);
1263 state.failAndIgnore.append(jit.jump());
1264 } else
1265 state.failAndIgnore.append(failAndIgnore);
1266 return;
1267 }
1268
1269 default:
1270 emitDefaultGuard();
1271 break;
1272 }
1273
1274 generateImpl(state);
1275}
1276
1277void AccessCase::generate(AccessGenerationState& state)
1278{
1279 RELEASE_ASSERT(m_state == Committed);
1280 RELEASE_ASSERT(state.stubInfo->hasConstantIdentifier);
1281 m_state = Generated;
1282
1283 checkConsistency(*state.stubInfo);
1284
1285 generateImpl(state);
1286}
1287
1288void AccessCase::generateImpl(AccessGenerationState& state)
1289{
1290 SuperSamplerScope superSamplerScope(false);
1291 if (AccessCaseInternal::verbose)
1292 dataLog("\n\nGenerating code for: ", *this, "\n");
1293
1294 ASSERT(m_state == Generated); // We rely on the callers setting this for us.
1295
1296 CCallHelpers& jit = *state.jit;
1297 VM& vm = state.m_vm;
1298 CodeBlock* codeBlock = jit.codeBlock();
1299 StructureStubInfo& stubInfo = *state.stubInfo;
1300 JSValueRegs valueRegs = state.valueRegs;
1301 GPRReg baseGPR = state.baseGPR;
1302 GPRReg thisGPR = stubInfo.thisValueIsInThisGPR() ? state.u.thisGPR : baseGPR;
1303 GPRReg scratchGPR = state.scratchGPR;
1304
1305 for (const ObjectPropertyCondition& condition : m_conditionSet) {
1306 RELEASE_ASSERT(!m_polyProtoAccessChain);
1307
1308 if (condition.isWatchableAssumingImpurePropertyWatchpoint(PropertyCondition::WatchabilityEffort::EnsureWatchability)) {
1309 state.installWatchpoint(condition);
1310 continue;
1311 }
1312
1313 // For now, we only allow equivalence when it's watchable.
1314 RELEASE_ASSERT(condition.condition().kind() != PropertyCondition::Equivalence);
1315
1316 if (!condition.structureEnsuresValidityAssumingImpurePropertyWatchpoint()) {
1317 // The reason why this cannot happen is that we require that PolymorphicAccess calls
1318 // AccessCase::generate() only after it has verified that
1319 // AccessCase::couldStillSucceed() returned true.
1320
1321 dataLog("This condition is no longer met: ", condition, "\n");
1322 RELEASE_ASSERT_NOT_REACHED();
1323 }
1324
1325 // We will emit code that has a weak reference that isn't otherwise listed anywhere.
1326 Structure* structure = condition.object()->structure(vm);
1327 state.weakReferences.append(WriteBarrier<JSCell>(vm, codeBlock, structure));
1328
1329 jit.move(CCallHelpers::TrustedImmPtr(condition.object()), scratchGPR);
1330 state.failAndRepatch.append(
1331 jit.branchStructure(
1332 CCallHelpers::NotEqual,
1333 CCallHelpers::Address(scratchGPR, JSCell::structureIDOffset()),
1334 structure));
1335 }
1336
1337 switch (m_type) {
1338 case InHit:
1339 case InMiss:
1340 jit.boxBoolean(m_type == InHit, valueRegs);
1341 state.succeed();
1342 return;
1343
1344 case Miss:
1345 jit.moveTrustedValue(jsUndefined(), valueRegs);
1346 state.succeed();
1347 return;
1348
1349 case InstanceOfHit:
1350 case InstanceOfMiss:
1351 jit.boxBooleanPayload(m_type == InstanceOfHit, valueRegs.payloadGPR());
1352 state.succeed();
1353 return;
1354
1355 case Load:
1356 case GetGetter:
1357 case Getter:
1358 case Setter:
1359 case CustomValueGetter:
1360 case CustomAccessorGetter:
1361 case CustomValueSetter:
1362 case CustomAccessorSetter: {
1363 GPRReg valueRegsPayloadGPR = valueRegs.payloadGPR();
1364
1365 if (isValidOffset(m_offset)) {
1366 Structure* currStructure;
1367 if (!hasAlternateBase())
1368 currStructure = structure();
1369 else
1370 currStructure = alternateBase()->structure(vm);
1371 currStructure->startWatchingPropertyForReplacements(vm, offset());
1372 }
1373
1374 GPRReg baseForGetGPR;
1375 if (viaProxy()) {
1376 ASSERT(m_type != CustomValueSetter || m_type != CustomAccessorSetter); // Because setters need to not trash valueRegsPayloadGPR.
1377 if (m_type == Getter || m_type == Setter)
1378 baseForGetGPR = scratchGPR;
1379 else
1380 baseForGetGPR = valueRegsPayloadGPR;
1381
1382 ASSERT((m_type != Getter && m_type != Setter) || baseForGetGPR != baseGPR);
1383 ASSERT(m_type != Setter || baseForGetGPR != valueRegsPayloadGPR);
1384
1385 jit.loadPtr(
1386 CCallHelpers::Address(baseGPR, JSProxy::targetOffset()),
1387 baseForGetGPR);
1388 } else
1389 baseForGetGPR = baseGPR;
1390
1391 GPRReg baseForAccessGPR;
1392 if (m_polyProtoAccessChain) {
1393 // This isn't pretty, but we know we got here via generateWithGuard,
1394 // and it left the baseForAccess inside scratchGPR. We could re-derive the base,
1395 // but it'd require emitting the same code to load the base twice.
1396 baseForAccessGPR = scratchGPR;
1397 } else {
1398 if (hasAlternateBase()) {
1399 jit.move(
1400 CCallHelpers::TrustedImmPtr(alternateBase()), scratchGPR);
1401 baseForAccessGPR = scratchGPR;
1402 } else
1403 baseForAccessGPR = baseForGetGPR;
1404 }
1405
1406 GPRReg loadedValueGPR = InvalidGPRReg;
1407 if (m_type != CustomValueGetter && m_type != CustomAccessorGetter && m_type != CustomValueSetter && m_type != CustomAccessorSetter) {
1408 if (m_type == Load || m_type == GetGetter)
1409 loadedValueGPR = valueRegsPayloadGPR;
1410 else
1411 loadedValueGPR = scratchGPR;
1412
1413 ASSERT((m_type != Getter && m_type != Setter) || loadedValueGPR != baseGPR);
1414 ASSERT(m_type != Setter || loadedValueGPR != valueRegsPayloadGPR);
1415
1416 GPRReg storageGPR;
1417 if (isInlineOffset(m_offset))
1418 storageGPR = baseForAccessGPR;
1419 else {
1420 jit.loadPtr(
1421 CCallHelpers::Address(baseForAccessGPR, JSObject::butterflyOffset()),
1422 loadedValueGPR);
1423 storageGPR = loadedValueGPR;
1424 }
1425
1426#if USE(JSVALUE64)
1427 jit.load64(
1428 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset)), loadedValueGPR);
1429#else
1430 if (m_type == Load || m_type == GetGetter) {
1431 jit.load32(
1432 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + TagOffset),
1433 valueRegs.tagGPR());
1434 }
1435 jit.load32(
1436 CCallHelpers::Address(storageGPR, offsetRelativeToBase(m_offset) + PayloadOffset),
1437 loadedValueGPR);
1438#endif
1439 }
1440
1441 if (m_type == Load || m_type == GetGetter) {
1442 state.succeed();
1443 return;
1444 }
1445
1446 if (m_type == CustomAccessorGetter && this->as<GetterSetterAccessCase>().domAttribute()) {
1447 auto& access = this->as<GetterSetterAccessCase>();
1448 // We do not need to emit CheckDOM operation since structure check ensures
1449 // that the structure of the given base value is structure()! So all we should
1450 // do is performing the CheckDOM thingy in IC compiling time here.
1451 if (!structure()->classInfo()->isSubClassOf(access.domAttribute()->classInfo)) {
1452 state.failAndIgnore.append(jit.jump());
1453 return;
1454 }
1455
1456 if (Options::useDOMJIT() && access.domAttribute()->domJIT) {
1457 access.emitDOMJITGetter(state, access.domAttribute()->domJIT, baseForGetGPR);
1458 return;
1459 }
1460 }
1461
1462 // Stuff for custom getters/setters.
1463 CCallHelpers::Call operationCall;
1464
1465 // Stuff for JS getters/setters.
1466 CCallHelpers::DataLabelPtr addressOfLinkFunctionCheck;
1467 CCallHelpers::Call fastPathCall;
1468 CCallHelpers::Call slowPathCall;
1469
1470 // This also does the necessary calculations of whether or not we're an
1471 // exception handling call site.
1472 AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall();
1473
1474 auto restoreLiveRegistersFromStackForCall = [&](AccessGenerationState::SpillState& spillState, bool callHasReturnValue) {
1475 RegisterSet dontRestore;
1476 if (callHasReturnValue) {
1477 // This is the result value. We don't want to overwrite the result with what we stored to the stack.
1478 // We sometimes have to store it to the stack just in case we throw an exception and need the original value.
1479 dontRestore.set(valueRegs);
1480 }
1481 state.restoreLiveRegistersFromStackForCall(spillState, dontRestore);
1482 };
1483
1484 jit.store32(
1485 CCallHelpers::TrustedImm32(state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
1486 CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
1487
1488 if (m_type == Getter || m_type == Setter) {
1489 auto& access = this->as<GetterSetterAccessCase>();
1490 ASSERT(baseGPR != loadedValueGPR);
1491 ASSERT(m_type != Setter || valueRegsPayloadGPR != loadedValueGPR);
1492
1493 // Create a JS call using a JS call inline cache. Assume that:
1494 //
1495 // - SP is aligned and represents the extent of the calling compiler's stack usage.
1496 //
1497 // - FP is set correctly (i.e. it points to the caller's call frame header).
1498 //
1499 // - SP - FP is an aligned difference.
1500 //
1501 // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
1502 // code.
1503 //
1504 // Therefore, we temporarily grow the stack for the purpose of the call and then
1505 // shrink it after.
1506
1507 state.setSpillStateForJSGetterSetter(spillState);
1508
1509 RELEASE_ASSERT(!access.callLinkInfo());
1510 access.m_callLinkInfo = makeUnique<CallLinkInfo>();
1511
1512 // FIXME: If we generated a polymorphic call stub that jumped back to the getter
1513 // stub, which then jumped back to the main code, then we'd have a reachability
1514 // situation that the GC doesn't know about. The GC would ensure that the polymorphic
1515 // call stub stayed alive, and it would ensure that the main code stayed alive, but
1516 // it wouldn't know that the getter stub was alive. Ideally JIT stub routines would
1517 // be GC objects, and then we'd be able to say that the polymorphic call stub has a
1518 // reference to the getter stub.
1519 // https://bugs.webkit.org/show_bug.cgi?id=148914
1520 access.callLinkInfo()->disallowStubs();
1521
1522 access.callLinkInfo()->setUpCall(
1523 CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
1524
1525 CCallHelpers::JumpList done;
1526
1527 // There is a "this" argument.
1528 unsigned numberOfParameters = 1;
1529 // ... and a value argument if we're calling a setter.
1530 if (m_type == Setter)
1531 numberOfParameters++;
1532
1533 // Get the accessor; if there ain't one then the result is jsUndefined().
1534 if (m_type == Setter) {
1535 jit.loadPtr(
1536 CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
1537 loadedValueGPR);
1538 } else {
1539 jit.loadPtr(
1540 CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
1541 loadedValueGPR);
1542 }
1543
1544 CCallHelpers::Jump returnUndefined = jit.branchTestPtr(
1545 CCallHelpers::Zero, loadedValueGPR);
1546
1547 unsigned numberOfRegsForCall = CallFrame::headerSizeInRegisters + numberOfParameters;
1548 unsigned numberOfBytesForCall = numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
1549
1550 unsigned alignedNumberOfBytesForCall =
1551 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
1552
1553 jit.subPtr(
1554 CCallHelpers::TrustedImm32(alignedNumberOfBytesForCall),
1555 CCallHelpers::stackPointerRegister);
1556
1557 CCallHelpers::Address calleeFrame = CCallHelpers::Address(
1558 CCallHelpers::stackPointerRegister,
1559 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
1560
1561 jit.store32(
1562 CCallHelpers::TrustedImm32(numberOfParameters),
1563 calleeFrame.withOffset(CallFrameSlot::argumentCount * sizeof(Register) + PayloadOffset));
1564
1565 jit.storeCell(
1566 loadedValueGPR, calleeFrame.withOffset(CallFrameSlot::callee * sizeof(Register)));
1567
1568 jit.storeCell(
1569 thisGPR,
1570 calleeFrame.withOffset(virtualRegisterForArgument(0).offset() * sizeof(Register)));
1571
1572 if (m_type == Setter) {
1573 jit.storeValue(
1574 valueRegs,
1575 calleeFrame.withOffset(
1576 virtualRegisterForArgument(1).offset() * sizeof(Register)));
1577 }
1578
1579 CCallHelpers::Jump slowCase = jit.branchPtrWithPatch(
1580 CCallHelpers::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
1581 CCallHelpers::TrustedImmPtr(nullptr));
1582
1583 fastPathCall = jit.nearCall();
1584 if (m_type == Getter)
1585 jit.setupResults(valueRegs);
1586 done.append(jit.jump());
1587
1588 // FIXME: Revisit JSGlobalObject.
1589 // https://bugs.webkit.org/show_bug.cgi?id=203204
1590 slowCase.link(&jit);
1591 jit.move(loadedValueGPR, GPRInfo::regT0);
1592#if USE(JSVALUE32_64)
1593 // We *always* know that the getter/setter, if non-null, is a cell.
1594 jit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1595#endif
1596 jit.move(CCallHelpers::TrustedImmPtr(access.callLinkInfo()), GPRInfo::regT2);
1597 jit.move(CCallHelpers::TrustedImmPtr(state.m_globalObject), GPRInfo::regT3);
1598 slowPathCall = jit.nearCall();
1599 if (m_type == Getter)
1600 jit.setupResults(valueRegs);
1601 done.append(jit.jump());
1602
1603 returnUndefined.link(&jit);
1604 if (m_type == Getter)
1605 jit.moveTrustedValue(jsUndefined(), valueRegs);
1606
1607 done.link(&jit);
1608
1609 jit.addPtr(CCallHelpers::TrustedImm32((codeBlock->stackPointerOffset() * sizeof(Register)) - state.preservedReusedRegisterState.numberOfBytesPreserved - spillState.numberOfStackBytesUsedForRegisterPreservation),
1610 GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
1611 bool callHasReturnValue = isGetter();
1612 restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
1613
1614 jit.addLinkTask([=, &vm] (LinkBuffer& linkBuffer) {
1615 this->as<GetterSetterAccessCase>().callLinkInfo()->setCallLocations(
1616 CodeLocationLabel<JSInternalPtrTag>(linkBuffer.locationOfNearCall<JSInternalPtrTag>(slowPathCall)),
1617 CodeLocationLabel<JSInternalPtrTag>(linkBuffer.locationOf<JSInternalPtrTag>(addressOfLinkFunctionCheck)),
1618 linkBuffer.locationOfNearCall<JSInternalPtrTag>(fastPathCall));
1619
1620 linkBuffer.link(
1621 slowPathCall,
1622 CodeLocationLabel<JITThunkPtrTag>(vm.getCTIStub(linkCallThunkGenerator).code()));
1623 });
1624 } else {
1625 ASSERT(m_type == CustomValueGetter || m_type == CustomAccessorGetter || m_type == CustomValueSetter || m_type == CustomAccessorSetter);
1626
1627 // Need to make room for the C call so any of our stack spillage isn't overwritten. It's
1628 // hard to track if someone did spillage or not, so we just assume that we always need
1629 // to make some space here.
1630 jit.makeSpaceOnStackForCCall();
1631
1632 // Check if it is a super access
1633 GPRReg baseForCustomGetGPR = baseGPR != thisGPR ? thisGPR : baseForGetGPR;
1634
1635 // getter: EncodedJSValue (*GetValueFunc)(JSGlobalObject*, EncodedJSValue thisValue, PropertyName);
1636 // setter: void (*PutValueFunc)(JSGlobalObject*, EncodedJSValue thisObject, EncodedJSValue value);
1637 // Custom values are passed the slotBase (the property holder), custom accessors are passed the thisVaule (reciever).
1638 // FIXME: Remove this differences in custom values and custom accessors.
1639 // https://bugs.webkit.org/show_bug.cgi?id=158014
1640 GPRReg baseForCustom = m_type == CustomValueGetter || m_type == CustomValueSetter ? baseForAccessGPR : baseForCustomGetGPR;
1641 // FIXME: Revisit JSGlobalObject.
1642 // https://bugs.webkit.org/show_bug.cgi?id=203204
1643 if (m_type == CustomValueGetter || m_type == CustomAccessorGetter) {
1644 RELEASE_ASSERT(!m_identifier->isNull());
1645 jit.setupArguments<PropertySlot::GetValueFunc>(
1646 CCallHelpers::TrustedImmPtr(codeBlock->globalObject()),
1647 CCallHelpers::CellValue(baseForCustom),
1648 CCallHelpers::TrustedImmPtr(uid()));
1649 } else {
1650 jit.setupArguments<PutPropertySlot::PutValueFunc>(
1651 CCallHelpers::TrustedImmPtr(codeBlock->globalObject()),
1652 CCallHelpers::CellValue(baseForCustom),
1653 valueRegs);
1654 }
1655 jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
1656
1657 operationCall = jit.call(OperationPtrTag);
1658 jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1659 linkBuffer.link(operationCall, this->as<GetterSetterAccessCase>().m_customAccessor);
1660 });
1661
1662 if (m_type == CustomValueGetter || m_type == CustomAccessorGetter)
1663 jit.setupResults(valueRegs);
1664 jit.reclaimSpaceOnStackForCCall();
1665
1666 CCallHelpers::Jump noException =
1667 jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
1668
1669 state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
1670 state.emitExplicitExceptionHandler();
1671
1672 noException.link(&jit);
1673 bool callHasReturnValue = isGetter();
1674 restoreLiveRegistersFromStackForCall(spillState, callHasReturnValue);
1675 }
1676 state.succeed();
1677 return;
1678 }
1679
1680 case Replace: {
1681 if (isInlineOffset(m_offset)) {
1682 jit.storeValue(
1683 valueRegs,
1684 CCallHelpers::Address(
1685 baseGPR,
1686 JSObject::offsetOfInlineStorage() +
1687 offsetInInlineStorage(m_offset) * sizeof(JSValue)));
1688 } else {
1689 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1690 jit.storeValue(
1691 valueRegs,
1692 CCallHelpers::Address(
1693 scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
1694 }
1695 state.succeed();
1696 return;
1697 }
1698
1699 case Transition: {
1700 // AccessCase::transition() should have returned null if this wasn't true.
1701 RELEASE_ASSERT(GPRInfo::numberOfRegisters >= 6 || !structure()->outOfLineCapacity() || structure()->outOfLineCapacity() == newStructure()->outOfLineCapacity());
1702
1703 // NOTE: This logic is duplicated in AccessCase::doesCalls(). It's important that doesCalls() knows
1704 // exactly when this would make calls.
1705 bool allocating = newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity();
1706 bool reallocating = allocating && structure()->outOfLineCapacity();
1707 bool allocatingInline = allocating && !structure()->couldHaveIndexingHeader();
1708
1709 ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
1710 allocator.lock(baseGPR);
1711#if USE(JSVALUE32_64)
1712 allocator.lock(stubInfo.patch.baseTagGPR);
1713#endif
1714 allocator.lock(valueRegs);
1715 allocator.lock(scratchGPR);
1716
1717 GPRReg scratchGPR2 = InvalidGPRReg;
1718 GPRReg scratchGPR3 = InvalidGPRReg;
1719 if (allocatingInline) {
1720 scratchGPR2 = allocator.allocateScratchGPR();
1721 scratchGPR3 = allocator.allocateScratchGPR();
1722 }
1723
1724 ScratchRegisterAllocator::PreservedState preservedState =
1725 allocator.preserveReusedRegistersByPushing(jit, ScratchRegisterAllocator::ExtraStackSpace::SpaceForCCall);
1726
1727 CCallHelpers::JumpList slowPath;
1728
1729 ASSERT(structure()->transitionWatchpointSetHasBeenInvalidated());
1730
1731 if (allocating) {
1732 size_t newSize = newStructure()->outOfLineCapacity() * sizeof(JSValue);
1733
1734 if (allocatingInline) {
1735 Allocator allocator = vm.jsValueGigacageAuxiliarySpace.allocatorFor(newSize, AllocatorForMode::AllocatorIfExists);
1736
1737 jit.emitAllocate(scratchGPR, JITAllocator::constant(allocator), scratchGPR2, scratchGPR3, slowPath);
1738 jit.addPtr(CCallHelpers::TrustedImm32(newSize + sizeof(IndexingHeader)), scratchGPR);
1739
1740 size_t oldSize = structure()->outOfLineCapacity() * sizeof(JSValue);
1741 ASSERT(newSize > oldSize);
1742
1743 if (reallocating) {
1744 // Handle the case where we are reallocating (i.e. the old structure/butterfly
1745 // already had out-of-line property storage).
1746
1747 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1748
1749 // We have scratchGPR = new storage, scratchGPR3 = old storage,
1750 // scratchGPR2 = available
1751 for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1752 jit.loadPtr(
1753 CCallHelpers::Address(
1754 scratchGPR3,
1755 -static_cast<ptrdiff_t>(
1756 offset + sizeof(JSValue) + sizeof(void*))),
1757 scratchGPR2);
1758 jit.storePtr(
1759 scratchGPR2,
1760 CCallHelpers::Address(
1761 scratchGPR,
1762 -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1763 }
1764 }
1765
1766 for (size_t offset = oldSize; offset < newSize; offset += sizeof(void*))
1767 jit.storePtr(CCallHelpers::TrustedImmPtr(nullptr), CCallHelpers::Address(scratchGPR, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1768 } else {
1769 // Handle the case where we are allocating out-of-line using an operation.
1770 RegisterSet extraRegistersToPreserve;
1771 extraRegistersToPreserve.set(baseGPR);
1772 extraRegistersToPreserve.set(valueRegs);
1773 AccessGenerationState::SpillState spillState = state.preserveLiveRegistersToStackForCall(extraRegistersToPreserve);
1774
1775 jit.store32(
1776 CCallHelpers::TrustedImm32(
1777 state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
1778 CCallHelpers::tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
1779
1780 jit.makeSpaceOnStackForCCall();
1781
1782 if (!reallocating) {
1783 jit.setupArguments<decltype(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity)>(CCallHelpers::TrustedImmPtr(&vm), baseGPR);
1784 jit.prepareCallOperation(vm);
1785
1786 CCallHelpers::Call operationCall = jit.call(OperationPtrTag);
1787 jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1788 linkBuffer.link(
1789 operationCall,
1790 FunctionPtr<OperationPtrTag>(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity));
1791 });
1792 } else {
1793 // Handle the case where we are reallocating (i.e. the old structure/butterfly
1794 // already had out-of-line property storage).
1795 jit.setupArguments<decltype(operationReallocateButterflyToGrowPropertyStorage)>(CCallHelpers::TrustedImmPtr(&vm), baseGPR, CCallHelpers::TrustedImm32(newSize / sizeof(JSValue)));
1796 jit.prepareCallOperation(vm);
1797
1798 CCallHelpers::Call operationCall = jit.call(OperationPtrTag);
1799 jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
1800 linkBuffer.link(
1801 operationCall,
1802 FunctionPtr<OperationPtrTag>(operationReallocateButterflyToGrowPropertyStorage));
1803 });
1804 }
1805
1806 jit.reclaimSpaceOnStackForCCall();
1807 jit.move(GPRInfo::returnValueGPR, scratchGPR);
1808
1809 CCallHelpers::Jump noException = jit.emitExceptionCheck(vm, CCallHelpers::InvertedExceptionCheck);
1810
1811 state.restoreLiveRegistersFromStackForCallWithThrownException(spillState);
1812 state.emitExplicitExceptionHandler();
1813
1814 noException.link(&jit);
1815 RegisterSet resultRegisterToExclude;
1816 resultRegisterToExclude.set(scratchGPR);
1817 state.restoreLiveRegistersFromStackForCall(spillState, resultRegisterToExclude);
1818 }
1819 }
1820
1821 if (isInlineOffset(m_offset)) {
1822 jit.storeValue(
1823 valueRegs,
1824 CCallHelpers::Address(
1825 baseGPR,
1826 JSObject::offsetOfInlineStorage() +
1827 offsetInInlineStorage(m_offset) * sizeof(JSValue)));
1828 } else {
1829 if (!allocating)
1830 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1831 jit.storeValue(
1832 valueRegs,
1833 CCallHelpers::Address(scratchGPR, offsetInButterfly(m_offset) * sizeof(JSValue)));
1834 }
1835
1836 if (allocatingInline) {
1837 // If we were to have any indexed properties, then we would need to update the indexing mask on the base object.
1838 RELEASE_ASSERT(!newStructure()->couldHaveIndexingHeader());
1839 // We set the new butterfly and the structure last. Doing it this way ensures that
1840 // whatever we had done up to this point is forgotten if we choose to branch to slow
1841 // path.
1842 jit.nukeStructureAndStoreButterfly(vm, scratchGPR, baseGPR);
1843 }
1844
1845 uint32_t structureBits = bitwise_cast<uint32_t>(newStructure()->id());
1846 jit.store32(
1847 CCallHelpers::TrustedImm32(structureBits),
1848 CCallHelpers::Address(baseGPR, JSCell::structureIDOffset()));
1849
1850 allocator.restoreReusedRegistersByPopping(jit, preservedState);
1851 state.succeed();
1852
1853 // We will have a slow path if we were allocating without the help of an operation.
1854 if (allocatingInline) {
1855 if (allocator.didReuseRegisters()) {
1856 slowPath.link(&jit);
1857 allocator.restoreReusedRegistersByPopping(jit, preservedState);
1858 state.failAndIgnore.append(jit.jump());
1859 } else
1860 state.failAndIgnore.append(slowPath);
1861 } else
1862 RELEASE_ASSERT(slowPath.empty());
1863 return;
1864 }
1865
1866 case ArrayLength: {
1867 jit.loadPtr(CCallHelpers::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
1868 jit.load32(CCallHelpers::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
1869 state.failAndIgnore.append(
1870 jit.branch32(CCallHelpers::LessThan, scratchGPR, CCallHelpers::TrustedImm32(0)));
1871 jit.boxInt32(scratchGPR, valueRegs);
1872 state.succeed();
1873 return;
1874 }
1875
1876 case StringLength: {
1877 jit.loadPtr(CCallHelpers::Address(baseGPR, JSString::offsetOfValue()), scratchGPR);
1878 auto isRope = jit.branchIfRopeStringImpl(scratchGPR);
1879 jit.load32(CCallHelpers::Address(scratchGPR, StringImpl::lengthMemoryOffset()), valueRegs.payloadGPR());
1880 auto done = jit.jump();
1881
1882 isRope.link(&jit);
1883 jit.load32(CCallHelpers::Address(baseGPR, JSRopeString::offsetOfLength()), valueRegs.payloadGPR());
1884
1885 done.link(&jit);
1886 jit.boxInt32(valueRegs.payloadGPR(), valueRegs);
1887 state.succeed();
1888 return;
1889 }
1890
1891 case IntrinsicGetter: {
1892 RELEASE_ASSERT(isValidOffset(offset()));
1893
1894 // We need to ensure the getter value does not move from under us. Note that GetterSetters
1895 // are immutable so we just need to watch the property not any value inside it.
1896 Structure* currStructure;
1897 if (!hasAlternateBase())
1898 currStructure = structure();
1899 else
1900 currStructure = alternateBase()->structure(vm);
1901 currStructure->startWatchingPropertyForReplacements(vm, offset());
1902
1903 this->as<IntrinsicGetterAccessCase>().emitIntrinsicGetter(state);
1904 return;
1905 }
1906
1907 case DirectArgumentsLength:
1908 case ScopedArgumentsLength:
1909 case ModuleNamespaceLoad:
1910 case InstanceOfGeneric:
1911 case IndexedInt32Load:
1912 case IndexedDoubleLoad:
1913 case IndexedContiguousLoad:
1914 case IndexedArrayStorageLoad:
1915 case IndexedScopedArgumentsLoad:
1916 case IndexedDirectArgumentsLoad:
1917 case IndexedTypedArrayInt8Load:
1918 case IndexedTypedArrayUint8Load:
1919 case IndexedTypedArrayUint8ClampedLoad:
1920 case IndexedTypedArrayInt16Load:
1921 case IndexedTypedArrayUint16Load:
1922 case IndexedTypedArrayInt32Load:
1923 case IndexedTypedArrayUint32Load:
1924 case IndexedTypedArrayFloat32Load:
1925 case IndexedTypedArrayFloat64Load:
1926 case IndexedStringLoad:
1927 // These need to be handled by generateWithGuard(), since the guard is part of the
1928 // algorithm. We can be sure that nobody will call generate() directly for these since they
1929 // are not guarded by structure checks.
1930 RELEASE_ASSERT_NOT_REACHED();
1931 }
1932
1933 RELEASE_ASSERT_NOT_REACHED();
1934}
1935
1936TypedArrayType AccessCase::toTypedArrayType(AccessType accessType)
1937{
1938 switch (accessType) {
1939 case IndexedTypedArrayInt8Load:
1940 return TypeInt8;
1941 case IndexedTypedArrayUint8Load:
1942 return TypeUint8;
1943 case IndexedTypedArrayUint8ClampedLoad:
1944 return TypeUint8Clamped;
1945 case IndexedTypedArrayInt16Load:
1946 return TypeInt16;
1947 case IndexedTypedArrayUint16Load:
1948 return TypeUint16;
1949 case IndexedTypedArrayInt32Load:
1950 return TypeInt32;
1951 case IndexedTypedArrayUint32Load:
1952 return TypeUint32;
1953 case IndexedTypedArrayFloat32Load:
1954 return TypeFloat32;
1955 case IndexedTypedArrayFloat64Load:
1956 return TypeFloat64;
1957 default:
1958 RELEASE_ASSERT_NOT_REACHED();
1959 }
1960}
1961
1962#if !ASSERT_DISABLED
1963void AccessCase::checkConsistency(StructureStubInfo& stubInfo)
1964{
1965 RELEASE_ASSERT(!(requiresInt32PropertyCheck() && requiresIdentifierNameMatch()));
1966
1967 if (stubInfo.hasConstantIdentifier) {
1968 RELEASE_ASSERT(!requiresInt32PropertyCheck());
1969 RELEASE_ASSERT(requiresIdentifierNameMatch());
1970 }
1971}
1972#endif
1973
1974} // namespace JSC
1975
1976#endif
1977