1/*
2 * Copyright (C) 2011-2019 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "DFGGraph.h"
28
29#if ENABLE(DFG_JIT)
30
31#include "BytecodeKills.h"
32#include "BytecodeLivenessAnalysisInlines.h"
33#include "CodeBlock.h"
34#include "CodeBlockWithJITType.h"
35#include "DFGBackwardsCFG.h"
36#include "DFGBackwardsDominators.h"
37#include "DFGBlockWorklist.h"
38#include "DFGCFG.h"
39#include "DFGClobberSet.h"
40#include "DFGClobbersExitState.h"
41#include "DFGControlEquivalenceAnalysis.h"
42#include "DFGDominators.h"
43#include "DFGFlowIndexing.h"
44#include "DFGFlowMap.h"
45#include "DFGJITCode.h"
46#include "DFGMayExit.h"
47#include "DFGNaturalLoops.h"
48#include "DFGVariableAccessDataDump.h"
49#include "FullBytecodeLiveness.h"
50#include "FunctionExecutableDump.h"
51#include "GetterSetter.h"
52#include "JIT.h"
53#include "JSLexicalEnvironment.h"
54#include "MaxFrameExtentForSlowPathCall.h"
55#include "OperandsInlines.h"
56#include "JSCInlines.h"
57#include "StackAlignment.h"
58#include <wtf/CommaPrinter.h>
59#include <wtf/ListDump.h>
60
61namespace JSC { namespace DFG {
62
63static constexpr bool dumpOSRAvailabilityData = false;
64
65// Creates an array of stringized names.
66static const char* dfgOpNames[] = {
67#define STRINGIZE_DFG_OP_ENUM(opcode, flags) #opcode ,
68 FOR_EACH_DFG_OP(STRINGIZE_DFG_OP_ENUM)
69#undef STRINGIZE_DFG_OP_ENUM
70};
71
72Graph::Graph(VM& vm, Plan& plan)
73 : m_vm(vm)
74 , m_plan(plan)
75 , m_codeBlock(m_plan.codeBlock())
76 , m_profiledBlock(m_codeBlock->alternative())
77 , m_ssaCFG(makeUnique<SSACFG>(*this))
78 , m_nextMachineLocal(0)
79 , m_fixpointState(BeforeFixpoint)
80 , m_structureRegistrationState(HaveNotStartedRegistering)
81 , m_form(LoadStore)
82 , m_unificationState(LocallyUnified)
83 , m_refCountState(EverythingIsLive)
84{
85 ASSERT(m_profiledBlock);
86
87 m_hasDebuggerEnabled = m_profiledBlock->wasCompiledWithDebuggingOpcodes() || Options::forceDebuggerBytecodeGeneration();
88
89 m_indexingCache = makeUnique<FlowIndexing>(*this);
90 m_abstractValuesCache = makeUnique<FlowMap<AbstractValue>>(*this);
91
92 registerStructure(vm.structureStructure.get());
93 this->stringStructure = registerStructure(vm.stringStructure.get());
94 this->symbolStructure = registerStructure(vm.symbolStructure.get());
95}
96
97Graph::~Graph()
98{
99}
100
101const char *Graph::opName(NodeType op)
102{
103 return dfgOpNames[op];
104}
105
106static void printWhiteSpace(PrintStream& out, unsigned amount)
107{
108 while (amount-- > 0)
109 out.print(" ");
110}
111
112bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefixStr, Node*& previousNodeRef, Node* currentNode, DumpContext* context)
113{
114 Prefix myPrefix(prefixStr);
115 Prefix& prefix = prefixStr ? myPrefix : m_prefix;
116
117 if (!currentNode->origin.semantic)
118 return false;
119
120 Node* previousNode = previousNodeRef;
121 previousNodeRef = currentNode;
122
123 if (!previousNode)
124 return false;
125
126 if (previousNode->origin.semantic.inlineCallFrame() == currentNode->origin.semantic.inlineCallFrame())
127 return false;
128
129 Vector<CodeOrigin> previousInlineStack = previousNode->origin.semantic.inlineStack();
130 Vector<CodeOrigin> currentInlineStack = currentNode->origin.semantic.inlineStack();
131 unsigned commonSize = std::min(previousInlineStack.size(), currentInlineStack.size());
132 unsigned indexOfDivergence = commonSize;
133 for (unsigned i = 0; i < commonSize; ++i) {
134 if (previousInlineStack[i].inlineCallFrame() != currentInlineStack[i].inlineCallFrame()) {
135 indexOfDivergence = i;
136 break;
137 }
138 }
139
140 bool hasPrinted = false;
141
142 // Print the pops.
143 for (unsigned i = previousInlineStack.size(); i-- > indexOfDivergence;) {
144 out.print(prefix);
145 printWhiteSpace(out, i * 2);
146 out.print("<-- ", inContext(*previousInlineStack[i].inlineCallFrame(), context), "\n");
147 hasPrinted = true;
148 }
149
150 // Print the pushes.
151 for (unsigned i = indexOfDivergence; i < currentInlineStack.size(); ++i) {
152 out.print(prefix);
153 printWhiteSpace(out, i * 2);
154 out.print("--> ", inContext(*currentInlineStack[i].inlineCallFrame(), context), "\n");
155 hasPrinted = true;
156 }
157
158 return hasPrinted;
159}
160
161int Graph::amountOfNodeWhiteSpace(Node* node)
162{
163 return (node->origin.semantic.inlineDepth() - 1) * 2;
164}
165
166void Graph::printNodeWhiteSpace(PrintStream& out, Node* node)
167{
168 printWhiteSpace(out, amountOfNodeWhiteSpace(node));
169}
170
171void Graph::dump(PrintStream& out, const char* prefixStr, Node* node, DumpContext* context)
172{
173 Prefix myPrefix(prefixStr);
174 Prefix& prefix = prefixStr ? myPrefix : m_prefix;
175
176 NodeType op = node->op();
177
178 unsigned refCount = node->refCount();
179 bool mustGenerate = node->mustGenerate();
180 if (mustGenerate)
181 --refCount;
182
183 out.print(prefix);
184 printNodeWhiteSpace(out, node);
185
186 // Example/explanation of dataflow dump output
187 //
188 // 14: <!2:7> GetByVal(@3, @13)
189 // ^1 ^2 ^3 ^4 ^5
190 //
191 // (1) The nodeIndex of this operation.
192 // (2) The reference count. The number printed is the 'real' count,
193 // not including the 'mustGenerate' ref. If the node is
194 // 'mustGenerate' then the count it prefixed with '!'.
195 // (3) The virtual register slot assigned to this node.
196 // (4) The name of the operation.
197 // (5) The arguments to the operation. The may be of the form:
198 // @# - a NodeIndex referencing a prior node in the graph.
199 // arg# - an argument number.
200 // id# - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
201 // var# - the index of a var on the global object, used by GetGlobalVar/GetGlobalLexicalVariable/PutGlobalVariable operations.
202 out.printf("% 4d:<%c%u:", (int)node->index(), mustGenerate ? '!' : ' ', refCount);
203 if (node->hasResult() && node->hasVirtualRegister() && node->virtualRegister().isValid())
204 out.print(node->virtualRegister());
205 else
206 out.print("-");
207 out.print(">\t", opName(op), "(");
208 CommaPrinter comma;
209 if (node->flags() & NodeHasVarArgs) {
210 for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++) {
211 if (!m_varArgChildren[childIdx])
212 continue;
213 out.print(comma, m_varArgChildren[childIdx]);
214 }
215 } else {
216 if (!!node->child1() || !!node->child2() || !!node->child3())
217 out.print(comma, node->child1());
218 if (!!node->child2() || !!node->child3())
219 out.print(comma, node->child2());
220 if (!!node->child3())
221 out.print(comma, node->child3());
222 }
223
224 if (toCString(NodeFlagsDump(node->flags())) != "<empty>")
225 out.print(comma, NodeFlagsDump(node->flags()));
226 if (node->prediction())
227 out.print(comma, SpeculationDump(node->prediction()));
228 if (node->hasNumberOfArgumentsToSkip())
229 out.print(comma, "numberOfArgumentsToSkip = ", node->numberOfArgumentsToSkip());
230 if (node->hasArrayMode())
231 out.print(comma, node->arrayMode());
232 if (node->hasArithUnaryType())
233 out.print(comma, "Type:", node->arithUnaryType());
234 if (node->hasArithMode())
235 out.print(comma, node->arithMode());
236 if (node->hasArithRoundingMode())
237 out.print(comma, "Rounding:", node->arithRoundingMode());
238 if (node->hasScopeOffset())
239 out.print(comma, node->scopeOffset());
240 if (node->hasDirectArgumentsOffset())
241 out.print(comma, node->capturedArgumentsOffset());
242 if (node->hasArgumentIndex())
243 out.print(comma, node->argumentIndex());
244 if (node->hasRegisterPointer())
245 out.print(comma, "global", "(", RawPointer(node->variablePointer()), ")");
246 if (node->hasIdentifier())
247 out.print(comma, "id", node->identifierNumber(), "{", identifiers()[node->identifierNumber()], "}");
248 if (node->hasPromotedLocationDescriptor())
249 out.print(comma, node->promotedLocationDescriptor());
250 if (node->hasClassInfo())
251 out.print(comma, *node->classInfo());
252 if (node->hasStructureSet())
253 out.print(comma, inContext(node->structureSet().toStructureSet(), context));
254 if (node->hasStructure())
255 out.print(comma, inContext(*node->structure().get(), context));
256 if (node->op() == CPUIntrinsic)
257 out.print(comma, intrinsicName(node->intrinsic()));
258 if (node->hasTransition()) {
259 out.print(comma, pointerDumpInContext(node->transition(), context));
260#if USE(JSVALUE64)
261 out.print(", ID:", node->transition()->next->id());
262#else
263 out.print(", ID:", RawPointer(node->transition()->next.get()));
264#endif
265 }
266 if (node->hasCellOperand()) {
267 if (!node->cellOperand()->value() || !node->cellOperand()->value().isCell())
268 out.print(comma, "invalid cell operand: ", node->cellOperand()->value());
269 else {
270 out.print(comma, pointerDump(node->cellOperand()->value().asCell()));
271 if (node->cellOperand()->value().isCell()) {
272 CallVariant variant(node->cellOperand()->value().asCell());
273 if (ExecutableBase* executable = variant.executable()) {
274 if (executable->isHostFunction())
275 out.print(comma, "<host function>");
276 else if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(m_vm, executable))
277 out.print(comma, FunctionExecutableDump(functionExecutable));
278 else
279 out.print(comma, "<non-function executable>");
280 }
281 }
282 }
283 }
284 if (node->hasQueriedType())
285 out.print(comma, node->queriedType());
286 if (node->hasStorageAccessData()) {
287 StorageAccessData& storageAccessData = node->storageAccessData();
288 out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
289 out.print(", ", static_cast<ptrdiff_t>(storageAccessData.offset));
290 }
291 if (node->hasMultiGetByOffsetData()) {
292 MultiGetByOffsetData& data = node->multiGetByOffsetData();
293 out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
294 for (unsigned i = 0; i < data.cases.size(); ++i)
295 out.print(comma, inContext(data.cases[i], context));
296 }
297 if (node->hasMultiPutByOffsetData()) {
298 MultiPutByOffsetData& data = node->multiPutByOffsetData();
299 out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
300 for (unsigned i = 0; i < data.variants.size(); ++i)
301 out.print(comma, inContext(data.variants[i], context));
302 }
303 if (node->hasMatchStructureData()) {
304 for (MatchStructureVariant& variant : node->matchStructureData().variants)
305 out.print(comma, inContext(*variant.structure.get(), context), "=>", variant.result);
306 }
307 ASSERT(node->hasVariableAccessData(*this) == node->accessesStack(*this));
308 if (node->hasVariableAccessData(*this)) {
309 VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
310 if (variableAccessData) {
311 VirtualRegister operand = variableAccessData->local();
312 out.print(comma, variableAccessData->local(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
313 operand = variableAccessData->machineLocal();
314 if (operand.isValid())
315 out.print(comma, "machine:", operand);
316 }
317 }
318 if (node->hasStackAccessData()) {
319 StackAccessData* data = node->stackAccessData();
320 out.print(comma, data->local);
321 if (data->machineLocal.isValid())
322 out.print(comma, "machine:", data->machineLocal);
323 out.print(comma, data->format);
324 }
325 if (node->hasUnlinkedLocal())
326 out.print(comma, node->unlinkedLocal());
327 if (node->hasVectorLengthHint())
328 out.print(comma, "vectorLengthHint = ", node->vectorLengthHint());
329 if (node->hasLazyJSValue())
330 out.print(comma, node->lazyJSValue());
331 if (node->hasIndexingType())
332 out.print(comma, IndexingTypeDump(node->indexingMode()));
333 if (node->hasTypedArrayType())
334 out.print(comma, node->typedArrayType());
335 if (node->hasPhi())
336 out.print(comma, "^", node->phi()->index());
337 if (node->hasExecutionCounter())
338 out.print(comma, RawPointer(node->executionCounter()));
339 if (node->hasWatchpointSet())
340 out.print(comma, RawPointer(node->watchpointSet()));
341 if (node->hasStoragePointer())
342 out.print(comma, RawPointer(node->storagePointer()));
343 if (node->hasObjectMaterializationData())
344 out.print(comma, node->objectMaterializationData());
345 if (node->hasCallVarargsData())
346 out.print(comma, "firstVarArgOffset = ", node->callVarargsData()->firstVarArgOffset);
347 if (node->hasLoadVarargsData()) {
348 LoadVarargsData* data = node->loadVarargsData();
349 out.print(comma, "start = ", data->start, ", count = ", data->count);
350 if (data->machineStart.isValid())
351 out.print(", machineStart = ", data->machineStart);
352 if (data->machineCount.isValid())
353 out.print(", machineCount = ", data->machineCount);
354 out.print(", offset = ", data->offset, ", mandatoryMinimum = ", data->mandatoryMinimum);
355 out.print(", limit = ", data->limit);
356 }
357 if (node->hasIsInternalPromise())
358 out.print(comma, "isInternalPromise = ", node->isInternalPromise());
359 if (node->hasInternalFieldIndex())
360 out.print(comma, "internalFieldIndex = ", node->internalFieldIndex());
361 if (node->hasCallDOMGetterData()) {
362 CallDOMGetterData* data = node->callDOMGetterData();
363 out.print(comma, "id", data->identifierNumber, "{", identifiers()[data->identifierNumber], "}");
364 out.print(", domJIT = ", RawPointer(data->domJIT));
365 }
366 if (node->hasIgnoreLastIndexIsWritable())
367 out.print(comma, "ignoreLastIndexIsWritable = ", node->ignoreLastIndexIsWritable());
368 if (node->hasIntrinsic())
369 out.print(comma, "intrinsic = ", node->intrinsic());
370 if (node->isConstant())
371 out.print(comma, pointerDumpInContext(node->constant(), context));
372 if (node->hasCallLinkStatus())
373 out.print(comma, *node->callLinkStatus());
374 if (node->hasGetByStatus())
375 out.print(comma, *node->getByStatus());
376 if (node->hasInByIdStatus())
377 out.print(comma, *node->inByIdStatus());
378 if (node->hasPutByIdStatus())
379 out.print(comma, *node->putByIdStatus());
380 if (node->isJump())
381 out.print(comma, "T:", *node->targetBlock());
382 if (node->isBranch())
383 out.print(comma, "T:", node->branchData()->taken, ", F:", node->branchData()->notTaken);
384 if (node->isSwitch()) {
385 SwitchData* data = node->switchData();
386 out.print(comma, data->kind);
387 for (unsigned i = 0; i < data->cases.size(); ++i)
388 out.print(comma, inContext(data->cases[i].value, context), ":", data->cases[i].target);
389 out.print(comma, "default:", data->fallThrough);
390 }
391 if (node->isEntrySwitch()) {
392 EntrySwitchData* data = node->entrySwitchData();
393 for (unsigned i = 0; i < data->cases.size(); ++i)
394 out.print(comma, BranchTarget(data->cases[i]));
395 }
396 ClobberSet reads;
397 ClobberSet writes;
398 addReadsAndWrites(*this, node, reads, writes);
399 if (!reads.isEmpty())
400 out.print(comma, "R:", sortedListDump(reads.direct(), ","));
401 if (!writes.isEmpty())
402 out.print(comma, "W:", sortedListDump(writes.direct(), ","));
403 ExitMode exitMode = mayExit(*this, node);
404 if (exitMode != DoesNotExit)
405 out.print(comma, exitMode);
406 if (clobbersExitState(*this, node))
407 out.print(comma, "ClobbersExit");
408 if (node->origin.isSet()) {
409 out.print(comma, node->origin.semantic.bytecodeIndex());
410 if (node->origin.semantic != node->origin.forExit && node->origin.forExit.isSet())
411 out.print(comma, "exit: ", node->origin.forExit);
412 }
413 out.print(comma, node->origin.exitOK ? "ExitValid" : "ExitInvalid");
414 if (node->origin.wasHoisted)
415 out.print(comma, "WasHoisted");
416 out.print(")");
417
418 if (node->accessesStack(*this) && node->tryGetVariableAccessData())
419 out.print(" predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
420 else if (node->hasHeapPrediction())
421 out.print(" predicting ", SpeculationDump(node->getHeapPrediction()));
422
423 out.print("\n");
424}
425
426bool Graph::terminalsAreValid()
427{
428 for (BasicBlock* block : blocksInNaturalOrder()) {
429 if (!block->terminal())
430 return false;
431 }
432 return true;
433}
434
435static BasicBlock* unboxLoopNode(const CPSCFG::Node& node) { return node.node(); }
436static BasicBlock* unboxLoopNode(BasicBlock* block) { return block; }
437
438void Graph::dumpBlockHeader(PrintStream& out, const char* prefixStr, BasicBlock* block, PhiNodeDumpMode phiNodeDumpMode, DumpContext* context)
439{
440 Prefix myPrefix(prefixStr);
441 Prefix& prefix = prefixStr ? myPrefix : m_prefix;
442
443 out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->origin.semantic, context), "):",
444 block->isReachable ? "" : " (skipped)", block->isOSRTarget ? " (OSR target)" : "", block->isCatchEntrypoint ? " (Catch Entrypoint)" : "", "\n");
445 if (block->executionCount == block->executionCount)
446 out.print(prefix, " Execution count: ", block->executionCount, "\n");
447 out.print(prefix, " Predecessors:");
448 for (size_t i = 0; i < block->predecessors.size(); ++i)
449 out.print(" ", *block->predecessors[i]);
450 out.print("\n");
451 out.print(prefix, " Successors:");
452 if (block->terminal()) {
453 for (BasicBlock* successor : block->successors()) {
454 out.print(" ", *successor);
455 }
456 } else
457 out.print(" <invalid>");
458 out.print("\n");
459
460 auto printDominators = [&] (auto& dominators) {
461 out.print(prefix, " Dominated by: ", dominators.dominatorsOf(block), "\n");
462 out.print(prefix, " Dominates: ", dominators.blocksDominatedBy(block), "\n");
463 out.print(prefix, " Dominance Frontier: ", dominators.dominanceFrontierOf(block), "\n");
464 out.print(prefix, " Iterated Dominance Frontier: ",
465 dominators.iteratedDominanceFrontierOf(typename std::remove_reference<decltype(dominators)>::type::List { block }), "\n");
466 };
467
468 if (terminalsAreValid()) {
469 if (m_ssaDominators)
470 printDominators(*m_ssaDominators);
471 else if (m_cpsDominators)
472 printDominators(*m_cpsDominators);
473 }
474
475 if (m_backwardsDominators && terminalsAreValid()) {
476 out.print(prefix, " Backwards dominates by: ", m_backwardsDominators->dominatorsOf(block), "\n");
477 out.print(prefix, " Backwards dominates: ", m_backwardsDominators->blocksDominatedBy(block), "\n");
478 }
479 if (m_controlEquivalenceAnalysis && terminalsAreValid()) {
480 out.print(prefix, " Control equivalent to:");
481 for (BasicBlock* otherBlock : blocksInNaturalOrder()) {
482 if (m_controlEquivalenceAnalysis->areEquivalent(block, otherBlock))
483 out.print(" ", *otherBlock);
484 }
485 out.print("\n");
486 }
487
488 auto printNaturalLoops = [&] (auto& naturalLoops) {
489 if (const auto* loop = naturalLoops->headerOf(block)) {
490 out.print(prefix, " Loop header, contains:");
491 Vector<BlockIndex> sortedBlockList;
492 for (unsigned i = 0; i < loop->size(); ++i)
493 sortedBlockList.append(unboxLoopNode(loop->at(i))->index);
494 std::sort(sortedBlockList.begin(), sortedBlockList.end());
495 for (unsigned i = 0; i < sortedBlockList.size(); ++i)
496 out.print(" #", sortedBlockList[i]);
497 out.print("\n");
498 }
499
500 auto containingLoops = naturalLoops->loopsOf(block);
501 if (!containingLoops.isEmpty()) {
502 out.print(prefix, " Containing loop headers:");
503 for (unsigned i = 0; i < containingLoops.size(); ++i)
504 out.print(" ", *unboxLoopNode(containingLoops[i]->header()));
505 out.print("\n");
506 }
507 };
508
509 if (m_ssaNaturalLoops)
510 printNaturalLoops(m_ssaNaturalLoops);
511 else if (m_cpsNaturalLoops)
512 printNaturalLoops(m_cpsNaturalLoops);
513
514 if (!block->phis.isEmpty()) {
515 out.print(prefix, " Phi Nodes:");
516 for (size_t i = 0; i < block->phis.size(); ++i) {
517 Node* phiNode = block->phis[i];
518 if (!phiNode->shouldGenerate() && phiNodeDumpMode == DumpLivePhisOnly)
519 continue;
520 out.print(" @", phiNode->index(), "<", phiNode->local(), ",", phiNode->refCount(), ">->(");
521 if (phiNode->child1()) {
522 out.print("@", phiNode->child1()->index());
523 if (phiNode->child2()) {
524 out.print(", @", phiNode->child2()->index());
525 if (phiNode->child3())
526 out.print(", @", phiNode->child3()->index());
527 }
528 }
529 out.print(")", i + 1 < block->phis.size() ? "," : "");
530 }
531 out.print("\n");
532 }
533}
534
535void Graph::dump(PrintStream& out, DumpContext* context)
536{
537 Prefix& prefix = m_prefix;
538 DumpContext myContext;
539 myContext.graph = this;
540 if (!context)
541 context = &myContext;
542
543 out.print("\n");
544 out.print(prefix, "DFG for ", CodeBlockWithJITType(m_codeBlock, JITType::DFGJIT), ":\n");
545 out.print(prefix, " Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
546 if (m_form == SSA) {
547 for (unsigned entrypointIndex = 0; entrypointIndex < m_argumentFormats.size(); ++entrypointIndex)
548 out.print(prefix, " Argument formats for entrypoint index: ", entrypointIndex, " : ", listDump(m_argumentFormats[entrypointIndex]), "\n");
549 }
550 else {
551 for (const auto& pair : m_rootToArguments)
552 out.print(prefix, " Arguments for block#", pair.key->index, ": ", listDump(pair.value), "\n");
553 }
554 out.print("\n");
555
556 Node* lastNode = nullptr;
557 for (size_t b = 0; b < m_blocks.size(); ++b) {
558 BasicBlock* block = m_blocks[b].get();
559 if (!block)
560 continue;
561 prefix.blockIndex = block->index;
562 dumpBlockHeader(out, Prefix::noString, block, DumpAllPhis, context);
563 out.print(prefix, " States: ", block->cfaStructureClobberStateAtHead);
564 if (!block->cfaHasVisited)
565 out.print(", CurrentlyCFAUnreachable");
566 if (!block->intersectionOfCFAHasVisited)
567 out.print(", CFAUnreachable");
568 out.print("\n");
569 switch (m_form) {
570 case LoadStore:
571 case ThreadedCPS: {
572 out.print(prefix, " Vars Before: ");
573 if (block->cfaHasVisited)
574 out.print(inContext(block->valuesAtHead, context));
575 else
576 out.print("<empty>");
577 out.print("\n");
578 out.print(prefix, " Intersected Vars Before: ");
579 if (block->intersectionOfCFAHasVisited)
580 out.print(inContext(block->intersectionOfPastValuesAtHead, context));
581 else
582 out.print("<empty>");
583 out.print("\n");
584 out.print(prefix, " Var Links: ", block->variablesAtHead, "\n");
585 break;
586 }
587
588 case SSA: {
589 RELEASE_ASSERT(block->ssa);
590 if (dumpOSRAvailabilityData)
591 out.print(prefix, " Availability: ", block->ssa->availabilityAtHead, "\n");
592 out.print(prefix, " Live: ", nodeListDump(block->ssa->liveAtHead), "\n");
593 out.print(prefix, " Values: ", nodeValuePairListDump(block->ssa->valuesAtHead, context), "\n");
594 break;
595 } }
596 for (size_t i = 0; i < block->size(); ++i) {
597 prefix.clearNodeIndex();
598 dumpCodeOrigin(out, Prefix::noString, lastNode, block->at(i), context);
599 prefix.nodeIndex = i;
600 dump(out, Prefix::noString, block->at(i), context);
601 }
602 prefix.clearNodeIndex();
603 out.print(prefix, " States: ", block->cfaBranchDirection, ", ", block->cfaStructureClobberStateAtTail);
604 if (!block->cfaDidFinish)
605 out.print(", CFAInvalidated");
606 out.print("\n");
607 switch (m_form) {
608 case LoadStore:
609 case ThreadedCPS: {
610 out.print(prefix, " Vars After: ");
611 if (block->cfaHasVisited)
612 out.print(inContext(block->valuesAtTail, context));
613 else
614 out.print("<empty>");
615 out.print("\n");
616 out.print(prefix, " Var Links: ", block->variablesAtTail, "\n");
617 break;
618 }
619
620 case SSA: {
621 RELEASE_ASSERT(block->ssa);
622 if (dumpOSRAvailabilityData)
623 out.print(prefix, " Availability: ", block->ssa->availabilityAtTail, "\n");
624 out.print(prefix, " Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
625 out.print(prefix, " Values: ", nodeValuePairListDump(block->ssa->valuesAtTail, context), "\n");
626 break;
627 } }
628 out.print("\n");
629 }
630 prefix.clearBlockIndex();
631
632 out.print(prefix, "GC Values:\n");
633 for (FrozenValue* value : m_frozenValues) {
634 if (value->pointsToHeap())
635 out.print(prefix, " ", inContext(*value, &myContext), "\n");
636 }
637
638 out.print(inContext(watchpoints(), &myContext));
639
640 if (!myContext.isEmpty()) {
641 StringPrintStream prefixStr;
642 prefixStr.print(prefix);
643 myContext.dump(out, prefixStr.toCString().data());
644 out.print("\n");
645 }
646}
647
648void Graph::deleteNode(Node* node)
649{
650 if (validationEnabled() && m_form == SSA) {
651 for (BasicBlock* block : blocksInNaturalOrder()) {
652 DFG_ASSERT(*this, node, !block->ssa->liveAtHead.contains(node));
653 DFG_ASSERT(*this, node, !block->ssa->liveAtTail.contains(node));
654 }
655 }
656
657 m_nodes.remove(node);
658}
659
660void Graph::packNodeIndices()
661{
662 m_nodes.packIndices();
663}
664
665void Graph::dethread()
666{
667 if (m_form == LoadStore || m_form == SSA)
668 return;
669
670 if (logCompilationChanges())
671 dataLog("Dethreading DFG graph.\n");
672
673 for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
674 BasicBlock* block = m_blocks[blockIndex].get();
675 if (!block)
676 continue;
677 for (unsigned phiIndex = block->phis.size(); phiIndex--;) {
678 Node* phi = block->phis[phiIndex];
679 phi->children.reset();
680 }
681 }
682
683 m_form = LoadStore;
684}
685
686void Graph::handleSuccessor(Vector<BasicBlock*, 16>& worklist, BasicBlock* block, BasicBlock* successor)
687{
688 if (!successor->isReachable) {
689 successor->isReachable = true;
690 worklist.append(successor);
691 }
692
693 if (!successor->predecessors.contains(block))
694 successor->predecessors.append(block);
695}
696
697void Graph::determineReachability()
698{
699 Vector<BasicBlock*, 16> worklist;
700 for (BasicBlock* entrypoint : m_roots) {
701 entrypoint->isReachable = true;
702 worklist.append(entrypoint);
703 }
704 while (!worklist.isEmpty()) {
705 BasicBlock* block = worklist.takeLast();
706 for (unsigned i = block->numSuccessors(); i--;)
707 handleSuccessor(worklist, block, block->successor(i));
708 }
709}
710
711void Graph::resetReachability()
712{
713 for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
714 BasicBlock* block = m_blocks[blockIndex].get();
715 if (!block)
716 continue;
717 block->isReachable = false;
718 block->predecessors.clear();
719 }
720
721 determineReachability();
722}
723
724namespace {
725
726class RefCountCalculator {
727public:
728 RefCountCalculator(Graph& graph)
729 : m_graph(graph)
730 {
731 }
732
733 void calculate()
734 {
735 // First reset the counts to 0 for all nodes.
736 for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
737 BasicBlock* block = m_graph.block(blockIndex);
738 if (!block)
739 continue;
740 for (unsigned indexInBlock = block->size(); indexInBlock--;)
741 block->at(indexInBlock)->setRefCount(0);
742 for (unsigned phiIndex = block->phis.size(); phiIndex--;)
743 block->phis[phiIndex]->setRefCount(0);
744 }
745
746 // Now find the roots:
747 // - Nodes that are must-generate.
748 // - Nodes that are reachable from type checks.
749 // Set their ref counts to 1 and put them on the worklist.
750 for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
751 BasicBlock* block = m_graph.block(blockIndex);
752 if (!block)
753 continue;
754 for (unsigned indexInBlock = block->size(); indexInBlock--;) {
755 Node* node = block->at(indexInBlock);
756 DFG_NODE_DO_TO_CHILDREN(m_graph, node, findTypeCheckRoot);
757 if (!(node->flags() & NodeMustGenerate))
758 continue;
759 if (!node->postfixRef())
760 m_worklist.append(node);
761 }
762 }
763
764 while (!m_worklist.isEmpty()) {
765 while (!m_worklist.isEmpty()) {
766 Node* node = m_worklist.last();
767 m_worklist.removeLast();
768 ASSERT(node->shouldGenerate()); // It should not be on the worklist unless it's ref'ed.
769 DFG_NODE_DO_TO_CHILDREN(m_graph, node, countEdge);
770 }
771
772 if (m_graph.m_form == SSA) {
773 // Find Phi->Upsilon edges, which are represented as meta-data in the
774 // Upsilon.
775 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
776 BasicBlock* block = m_graph.block(blockIndex);
777 if (!block)
778 continue;
779 for (unsigned nodeIndex = block->size(); nodeIndex--;) {
780 Node* node = block->at(nodeIndex);
781 if (node->op() != Upsilon)
782 continue;
783 if (node->shouldGenerate())
784 continue;
785 if (node->phi()->shouldGenerate())
786 countNode(node);
787 }
788 }
789 }
790 }
791 }
792
793private:
794 void findTypeCheckRoot(Node*, Edge edge)
795 {
796 // We may have an "unproved" untyped use for code that is unreachable. The CFA
797 // will just not have gotten around to it.
798 if (edge.isProved() || edge.willNotHaveCheck())
799 return;
800 if (!edge->postfixRef())
801 m_worklist.append(edge.node());
802 }
803
804 void countNode(Node* node)
805 {
806 if (node->postfixRef())
807 return;
808 m_worklist.append(node);
809 }
810
811 void countEdge(Node*, Edge edge)
812 {
813 // Don't count edges that are already counted for their type checks.
814 if (!(edge.isProved() || edge.willNotHaveCheck()))
815 return;
816 countNode(edge.node());
817 }
818
819 Graph& m_graph;
820 Vector<Node*, 128> m_worklist;
821};
822
823} // anonymous namespace
824
825void Graph::computeRefCounts()
826{
827 RefCountCalculator calculator(*this);
828 calculator.calculate();
829}
830
831void Graph::killBlockAndItsContents(BasicBlock* block)
832{
833 if (auto& ssaData = block->ssa)
834 ssaData->invalidate();
835 for (unsigned phiIndex = block->phis.size(); phiIndex--;)
836 deleteNode(block->phis[phiIndex]);
837 for (Node* node : *block)
838 deleteNode(node);
839
840 killBlock(block);
841}
842
843void Graph::killUnreachableBlocks()
844{
845 invalidateNodeLiveness();
846
847 for (BlockIndex blockIndex = 0; blockIndex < numBlocks(); ++blockIndex) {
848 BasicBlock* block = this->block(blockIndex);
849 if (!block)
850 continue;
851 if (block->isReachable)
852 continue;
853
854 dataLogIf(Options::verboseDFGBytecodeParsing(), "Basic block #", blockIndex, " was killed because it was unreachable\n");
855 killBlockAndItsContents(block);
856 }
857}
858
859void Graph::invalidateCFG()
860{
861 m_cpsDominators = nullptr;
862 m_ssaDominators = nullptr;
863 m_cpsNaturalLoops = nullptr;
864 m_ssaNaturalLoops = nullptr;
865 m_controlEquivalenceAnalysis = nullptr;
866 m_backwardsDominators = nullptr;
867 m_backwardsCFG = nullptr;
868 m_cpsCFG = nullptr;
869}
870
871void Graph::invalidateNodeLiveness()
872{
873 if (m_form != SSA)
874 return;
875
876 for (BasicBlock* block : blocksInNaturalOrder())
877 block->ssa->invalidate();
878}
879
880void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
881{
882 for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
883 Node* node = block[indexInBlock];
884 bool shouldContinue = true;
885 switch (node->op()) {
886 case SetLocal: {
887 if (node->local() == variableAccessData->local())
888 shouldContinue = false;
889 break;
890 }
891
892 case GetLocal: {
893 if (node->variableAccessData() != variableAccessData)
894 continue;
895 substitute(block, indexInBlock, node, newGetLocal);
896 Node* oldTailNode = block.variablesAtTail.operand(variableAccessData->local());
897 if (oldTailNode == node)
898 block.variablesAtTail.operand(variableAccessData->local()) = newGetLocal;
899 shouldContinue = false;
900 break;
901 }
902
903 default:
904 break;
905 }
906 if (!shouldContinue)
907 break;
908 }
909}
910
911BlockList Graph::blocksInPreOrder()
912{
913 BlockList result;
914 result.reserveInitialCapacity(m_blocks.size());
915 BlockWorklist worklist;
916 for (BasicBlock* entrypoint : m_roots)
917 worklist.push(entrypoint);
918 while (BasicBlock* block = worklist.pop()) {
919 result.append(block);
920 for (unsigned i = block->numSuccessors(); i--;)
921 worklist.push(block->successor(i));
922 }
923
924 if (validationEnabled()) {
925 // When iterating over pre order, we should see dominators
926 // before things they dominate.
927 auto validateResults = [&] (auto& dominators) {
928 for (unsigned i = 0; i < result.size(); ++i) {
929 BasicBlock* a = result[i];
930 if (!a)
931 continue;
932 for (unsigned j = 0; j < result.size(); ++j) {
933 BasicBlock* b = result[j];
934 if (!b || a == b)
935 continue;
936 if (dominators.dominates(a, b))
937 RELEASE_ASSERT(i < j);
938 }
939 }
940 };
941
942 if (m_form == SSA || m_isInSSAConversion)
943 validateResults(ensureSSADominators());
944 else
945 validateResults(ensureCPSDominators());
946 }
947 return result;
948}
949
950BlockList Graph::blocksInPostOrder(bool isSafeToValidate)
951{
952 BlockList result;
953 result.reserveInitialCapacity(m_blocks.size());
954 PostOrderBlockWorklist worklist;
955 for (BasicBlock* entrypoint : m_roots)
956 worklist.push(entrypoint);
957 while (BlockWithOrder item = worklist.pop()) {
958 switch (item.order) {
959 case VisitOrder::Pre:
960 worklist.pushPost(item.node);
961 for (unsigned i = item.node->numSuccessors(); i--;)
962 worklist.push(item.node->successor(i));
963 break;
964 case VisitOrder::Post:
965 result.append(item.node);
966 break;
967 }
968 }
969
970 if (isSafeToValidate && validationEnabled()) { // There are users of this where we haven't yet built of the CFG enough to be able to run dominators.
971 auto validateResults = [&] (auto& dominators) {
972 // When iterating over reverse post order, we should see dominators
973 // before things they dominate.
974 for (unsigned i = 0; i < result.size(); ++i) {
975 BasicBlock* a = result[i];
976 if (!a)
977 continue;
978 for (unsigned j = 0; j < result.size(); ++j) {
979 BasicBlock* b = result[j];
980 if (!b || a == b)
981 continue;
982 if (dominators.dominates(a, b))
983 RELEASE_ASSERT(i > j);
984 }
985 }
986 };
987
988 if (m_form == SSA || m_isInSSAConversion)
989 validateResults(ensureSSADominators());
990 else
991 validateResults(ensureCPSDominators());
992 }
993
994 return result;
995}
996
997void Graph::clearReplacements()
998{
999 for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1000 BasicBlock* block = m_blocks[blockIndex].get();
1001 if (!block)
1002 continue;
1003 for (unsigned phiIndex = block->phis.size(); phiIndex--;)
1004 block->phis[phiIndex]->setReplacement(nullptr);
1005 for (unsigned nodeIndex = block->size(); nodeIndex--;)
1006 block->at(nodeIndex)->setReplacement(nullptr);
1007 }
1008}
1009
1010void Graph::clearEpochs()
1011{
1012 for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1013 BasicBlock* block = m_blocks[blockIndex].get();
1014 if (!block)
1015 continue;
1016 for (unsigned phiIndex = block->phis.size(); phiIndex--;)
1017 block->phis[phiIndex]->setEpoch(Epoch());
1018 for (unsigned nodeIndex = block->size(); nodeIndex--;)
1019 block->at(nodeIndex)->setEpoch(Epoch());
1020 }
1021}
1022
1023void Graph::initializeNodeOwners()
1024{
1025 for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1026 BasicBlock* block = m_blocks[blockIndex].get();
1027 if (!block)
1028 continue;
1029 for (unsigned phiIndex = block->phis.size(); phiIndex--;)
1030 block->phis[phiIndex]->owner = block;
1031 for (unsigned nodeIndex = block->size(); nodeIndex--;)
1032 block->at(nodeIndex)->owner = block;
1033 }
1034}
1035
1036void Graph::clearFlagsOnAllNodes(NodeFlags flags)
1037{
1038 for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1039 BasicBlock* block = m_blocks[blockIndex].get();
1040 if (!block)
1041 continue;
1042 for (unsigned phiIndex = block->phis.size(); phiIndex--;)
1043 block->phis[phiIndex]->clearFlags(flags);
1044 for (unsigned nodeIndex = block->size(); nodeIndex--;)
1045 block->at(nodeIndex)->clearFlags(flags);
1046 }
1047}
1048
1049bool Graph::watchCondition(const ObjectPropertyCondition& key)
1050{
1051 if (!key.isWatchable())
1052 return false;
1053
1054 DesiredWeakReferences& weakReferences = m_plan.weakReferences();
1055 weakReferences.addLazily(key.object());
1056 if (key.hasPrototype())
1057 weakReferences.addLazily(key.prototype());
1058 if (key.hasRequiredValue())
1059 weakReferences.addLazily(key.requiredValue());
1060
1061 m_plan.watchpoints().addLazily(key);
1062
1063 if (key.kind() == PropertyCondition::Presence)
1064 m_safeToLoad.add(std::make_pair(key.object(), key.offset()));
1065
1066 return true;
1067}
1068
1069bool Graph::watchConditions(const ObjectPropertyConditionSet& keys)
1070{
1071 if (!keys.isValid())
1072 return false;
1073
1074 for (const ObjectPropertyCondition& key : keys) {
1075 if (!watchCondition(key))
1076 return false;
1077 }
1078 return true;
1079}
1080
1081bool Graph::isSafeToLoad(JSObject* base, PropertyOffset offset)
1082{
1083 return m_safeToLoad.contains(std::make_pair(base, offset));
1084}
1085
1086bool Graph::watchGlobalProperty(JSGlobalObject* globalObject, unsigned identifierNumber)
1087{
1088 UniquedStringImpl* uid = identifiers()[identifierNumber];
1089 // If we already have a WatchpointSet, and it is already invalidated, it means that this scope operation must be changed from GlobalProperty to GlobalLexicalVar,
1090 // but we still have stale metadata here since we have not yet executed this bytecode operation since the invalidation. Just emitting ForceOSRExit to update the
1091 // metadata when it reaches to this code.
1092 if (auto* watchpoint = globalObject->getReferencedPropertyWatchpointSet(uid)) {
1093 if (!watchpoint->isStillValid())
1094 return false;
1095 }
1096 globalProperties().addLazily(DesiredGlobalProperty(globalObject, identifierNumber));
1097 return true;
1098}
1099
1100FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
1101{
1102 HashMap<CodeBlock*, std::unique_ptr<FullBytecodeLiveness>>::iterator iter = m_bytecodeLiveness.find(codeBlock);
1103 if (iter != m_bytecodeLiveness.end())
1104 return *iter->value;
1105
1106 std::unique_ptr<FullBytecodeLiveness> liveness = makeUnique<FullBytecodeLiveness>();
1107 codeBlock->livenessAnalysis().computeFullLiveness(codeBlock, *liveness);
1108 FullBytecodeLiveness& result = *liveness;
1109 m_bytecodeLiveness.add(codeBlock, WTFMove(liveness));
1110 return result;
1111}
1112
1113FullBytecodeLiveness& Graph::livenessFor(InlineCallFrame* inlineCallFrame)
1114{
1115 return livenessFor(baselineCodeBlockFor(inlineCallFrame));
1116}
1117
1118BytecodeKills& Graph::killsFor(CodeBlock* codeBlock)
1119{
1120 HashMap<CodeBlock*, std::unique_ptr<BytecodeKills>>::iterator iter = m_bytecodeKills.find(codeBlock);
1121 if (iter != m_bytecodeKills.end())
1122 return *iter->value;
1123
1124 std::unique_ptr<BytecodeKills> kills = makeUnique<BytecodeKills>();
1125 codeBlock->livenessAnalysis().computeKills(codeBlock, *kills);
1126 BytecodeKills& result = *kills;
1127 m_bytecodeKills.add(codeBlock, WTFMove(kills));
1128 return result;
1129}
1130
1131BytecodeKills& Graph::killsFor(InlineCallFrame* inlineCallFrame)
1132{
1133 return killsFor(baselineCodeBlockFor(inlineCallFrame));
1134}
1135
1136bool Graph::isLiveInBytecode(VirtualRegister operand, CodeOrigin codeOrigin)
1137{
1138 static constexpr bool verbose = false;
1139
1140 if (verbose)
1141 dataLog("Checking of operand is live: ", operand, "\n");
1142 CodeOrigin* codeOriginPtr = &codeOrigin;
1143 for (;;) {
1144 VirtualRegister reg = VirtualRegister(
1145 operand.offset() - codeOriginPtr->stackOffset());
1146
1147 if (verbose)
1148 dataLog("reg = ", reg, "\n");
1149
1150 auto* inlineCallFrame = codeOriginPtr->inlineCallFrame();
1151 if (operand.offset() < codeOriginPtr->stackOffset() + CallFrame::headerSizeInRegisters) {
1152 if (reg.isArgument()) {
1153 RELEASE_ASSERT(reg.offset() < CallFrame::headerSizeInRegisters);
1154
1155
1156 if (inlineCallFrame->isClosureCall
1157 && reg.offset() == CallFrameSlot::callee) {
1158 if (verbose)
1159 dataLog("Looks like a callee.\n");
1160 return true;
1161 }
1162
1163 if (inlineCallFrame->isVarargs()
1164 && reg.offset() == CallFrameSlot::argumentCount) {
1165 if (verbose)
1166 dataLog("Looks like the argument count.\n");
1167 return true;
1168 }
1169
1170 return false;
1171 }
1172
1173 if (verbose)
1174 dataLog("Asking the bytecode liveness.\n");
1175 return livenessFor(inlineCallFrame).operandIsLive(reg.offset(), codeOriginPtr->bytecodeIndex());
1176 }
1177
1178 if (!inlineCallFrame) {
1179 if (verbose)
1180 dataLog("Ran out of stack, returning true.\n");
1181 return true;
1182 }
1183
1184 // Arguments are always live. This would be redundant if it wasn't for our
1185 // op_call_varargs inlining.
1186 if (reg.isArgument()
1187 && static_cast<size_t>(reg.toArgument()) < inlineCallFrame->argumentsWithFixup.size()) {
1188 if (verbose)
1189 dataLog("Argument is live.\n");
1190 return true;
1191 }
1192
1193 // We need to handle tail callers because we may decide to exit to the
1194 // the return bytecode following the tail call.
1195 codeOriginPtr = &inlineCallFrame->directCaller;
1196 }
1197
1198 RELEASE_ASSERT_NOT_REACHED();
1199}
1200
1201BitVector Graph::localsLiveInBytecode(CodeOrigin codeOrigin)
1202{
1203 BitVector result;
1204 result.ensureSize(block(0)->variablesAtHead.numberOfLocals());
1205 forAllLocalsLiveInBytecode(
1206 codeOrigin,
1207 [&] (VirtualRegister reg) {
1208 ASSERT(reg.isLocal());
1209 result.quickSet(reg.toLocal());
1210 });
1211 return result;
1212}
1213
1214unsigned Graph::parameterSlotsForArgCount(unsigned argCount)
1215{
1216 size_t frameSize = CallFrame::headerSizeInRegisters + argCount;
1217 size_t alignedFrameSize = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), frameSize);
1218 return alignedFrameSize - CallerFrameAndPC::sizeInRegisters;
1219}
1220
1221unsigned Graph::frameRegisterCount()
1222{
1223 unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
1224 return roundLocalRegisterCountForFramePointerOffset(result);
1225}
1226
1227unsigned Graph::stackPointerOffset()
1228{
1229 return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
1230}
1231
1232unsigned Graph::requiredRegisterCountForExit()
1233{
1234 unsigned count = JIT::frameRegisterCountFor(m_profiledBlock);
1235 for (InlineCallFrameSet::iterator iter = m_plan.inlineCallFrames()->begin(); !!iter; ++iter) {
1236 InlineCallFrame* inlineCallFrame = *iter;
1237 CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
1238 unsigned requiredCount = VirtualRegister(inlineCallFrame->stackOffset).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock);
1239 count = std::max(count, requiredCount);
1240 }
1241 return count;
1242}
1243
1244unsigned Graph::requiredRegisterCountForExecutionAndExit()
1245{
1246 // FIXME: We should make sure that frameRegisterCount() and requiredRegisterCountForExit()
1247 // never overflows. https://bugs.webkit.org/show_bug.cgi?id=173852
1248 return std::max(frameRegisterCount(), requiredRegisterCountForExit());
1249}
1250
1251JSValue Graph::tryGetConstantProperty(
1252 JSValue base, const RegisteredStructureSet& structureSet, PropertyOffset offset)
1253{
1254 if (!base || !base.isObject())
1255 return JSValue();
1256
1257 JSObject* object = asObject(base);
1258
1259 for (unsigned i = structureSet.size(); i--;) {
1260 RegisteredStructure structure = structureSet[i];
1261
1262 WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
1263 if (!set || !set->isStillValid())
1264 return JSValue();
1265
1266 ASSERT(structure->isValidOffset(offset));
1267 ASSERT(!structure->isUncacheableDictionary());
1268
1269 watchpoints().addLazily(set);
1270 }
1271
1272 // What follows may require some extra thought. We need this load to load a valid JSValue. If
1273 // our profiling makes sense and we're still on track to generate code that won't be
1274 // invalidated, then we have nothing to worry about. We do, however, have to worry about
1275 // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
1276 // is doomed.
1277 //
1278 // One argument in favor of this code is that it should definitely work because the butterfly
1279 // is always set before the structure. However, we don't currently have a fence between those
1280 // stores. It's not clear if this matters, however. We only shrink the propertyStorage while
1281 // holding the Structure's lock. So, for this to fail, you'd need an access on a constant
1282 // object pointer such that the inline caches told us that the object had a structure that it
1283 // did not *yet* have, and then later,the object transitioned to that structure that the inline
1284 // caches had already seen. And then the processor reordered the stores. Seems unlikely and
1285 // difficult to test. I believe that this is worth revisiting but it isn't worth losing sleep
1286 // over. Filed:
1287 // https://bugs.webkit.org/show_bug.cgi?id=134641
1288 //
1289 // For now, we just do the minimal thing: defend against the structure right now being
1290 // incompatible with the getDirect we're trying to do. The easiest way to do that is to
1291 // determine if the structure belongs to the proven set.
1292
1293 Structure* structure = object->structure(m_vm);
1294 if (!structureSet.toStructureSet().contains(structure))
1295 return JSValue();
1296
1297 return object->getDirectConcurrently(structure, offset);
1298}
1299
1300JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
1301{
1302 return tryGetConstantProperty(base, RegisteredStructureSet(registerStructure(structure)), offset);
1303}
1304
1305JSValue Graph::tryGetConstantProperty(
1306 JSValue base, const StructureAbstractValue& structure, PropertyOffset offset)
1307{
1308 if (structure.isInfinite()) {
1309 // FIXME: If we just converted the offset to a uid, we could do ObjectPropertyCondition
1310 // watching to constant-fold the property.
1311 // https://bugs.webkit.org/show_bug.cgi?id=147271
1312 return JSValue();
1313 }
1314
1315 return tryGetConstantProperty(base, structure.set(), offset);
1316}
1317
1318JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset offset)
1319{
1320 return tryGetConstantProperty(base.m_value, base.m_structure, offset);
1321}
1322
1323AbstractValue Graph::inferredValueForProperty(
1324 const AbstractValue& base, PropertyOffset offset,
1325 StructureClobberState clobberState)
1326{
1327 if (JSValue value = tryGetConstantProperty(base, offset)) {
1328 AbstractValue result;
1329 result.set(*this, *freeze(value), clobberState);
1330 return result;
1331 }
1332
1333 return AbstractValue::heapTop();
1334}
1335
1336JSValue Graph::tryGetConstantClosureVar(JSValue base, ScopeOffset offset)
1337{
1338 // This has an awesome concurrency story. See comment for GetGlobalVar in ByteCodeParser.
1339
1340 if (!base)
1341 return JSValue();
1342
1343 JSLexicalEnvironment* activation = jsDynamicCast<JSLexicalEnvironment*>(m_vm, base);
1344 if (!activation)
1345 return JSValue();
1346
1347 SymbolTable* symbolTable = activation->symbolTable();
1348 JSValue value;
1349 WatchpointSet* set;
1350 {
1351 ConcurrentJSLocker locker(symbolTable->m_lock);
1352
1353 SymbolTableEntry* entry = symbolTable->entryFor(locker, offset);
1354 if (!entry)
1355 return JSValue();
1356
1357 set = entry->watchpointSet();
1358 if (!set)
1359 return JSValue();
1360
1361 if (set->state() != IsWatched)
1362 return JSValue();
1363
1364 ASSERT(entry->scopeOffset() == offset);
1365 value = activation->variableAt(offset).get();
1366 if (!value)
1367 return JSValue();
1368 }
1369
1370 watchpoints().addLazily(set);
1371
1372 return value;
1373}
1374
1375JSValue Graph::tryGetConstantClosureVar(const AbstractValue& value, ScopeOffset offset)
1376{
1377 return tryGetConstantClosureVar(value.m_value, offset);
1378}
1379
1380JSValue Graph::tryGetConstantClosureVar(Node* node, ScopeOffset offset)
1381{
1382 if (!node->hasConstant())
1383 return JSValue();
1384 return tryGetConstantClosureVar(node->asJSValue(), offset);
1385}
1386
1387JSArrayBufferView* Graph::tryGetFoldableView(JSValue value)
1388{
1389 if (!value)
1390 return nullptr;
1391 JSArrayBufferView* view = jsDynamicCast<JSArrayBufferView*>(m_vm, value);
1392 if (!view)
1393 return nullptr;
1394 if (!view->length())
1395 return nullptr;
1396 WTF::loadLoadFence();
1397 watchpoints().addLazily(view);
1398 return view;
1399}
1400
1401JSArrayBufferView* Graph::tryGetFoldableView(JSValue value, ArrayMode arrayMode)
1402{
1403 if (arrayMode.type() != Array::AnyTypedArray && arrayMode.typedArrayType() == NotTypedArray)
1404 return nullptr;
1405 return tryGetFoldableView(value);
1406}
1407
1408void Graph::registerFrozenValues()
1409{
1410 ConcurrentJSLocker locker(m_codeBlock->m_lock);
1411 m_codeBlock->constants().shrink(0);
1412 m_codeBlock->constantsSourceCodeRepresentation().resize(0);
1413 for (FrozenValue* value : m_frozenValues) {
1414 if (!value->pointsToHeap())
1415 continue;
1416
1417 ASSERT(value->structure());
1418 ASSERT(m_plan.weakReferences().contains(value->structure()));
1419
1420 switch (value->strength()) {
1421 case WeakValue: {
1422 m_plan.weakReferences().addLazily(value->value().asCell());
1423 break;
1424 }
1425 case StrongValue: {
1426 unsigned constantIndex = m_codeBlock->addConstantLazily(locker);
1427 // We already have a barrier on the code block.
1428 m_codeBlock->constants()[constantIndex].setWithoutWriteBarrier(value->value());
1429 break;
1430 } }
1431 }
1432 m_codeBlock->constants().shrinkToFit();
1433 m_codeBlock->constantsSourceCodeRepresentation().shrinkToFit();
1434}
1435
1436void Graph::visitChildren(SlotVisitor& visitor)
1437{
1438 for (FrozenValue* value : m_frozenValues) {
1439 visitor.appendUnbarriered(value->value());
1440 visitor.appendUnbarriered(value->structure());
1441 }
1442}
1443
1444FrozenValue* Graph::freeze(JSValue value)
1445{
1446 if (UNLIKELY(!value))
1447 return FrozenValue::emptySingleton();
1448
1449 // There are weird relationships in how optimized CodeBlocks
1450 // point to other CodeBlocks. We don't want to have them be
1451 // part of the weak pointer set. For example, an optimized CodeBlock
1452 // having a weak pointer to itself will cause it to get collected.
1453 RELEASE_ASSERT(!jsDynamicCast<CodeBlock*>(m_vm, value));
1454
1455 auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
1456 if (LIKELY(!result.isNewEntry))
1457 return result.iterator->value;
1458
1459 if (value.isUInt32())
1460 m_uint32ValuesInUse.append(value.asUInt32());
1461
1462 FrozenValue frozenValue = FrozenValue::freeze(value);
1463 if (Structure* structure = frozenValue.structure())
1464 registerStructure(structure);
1465
1466 return result.iterator->value = m_frozenValues.add(frozenValue);
1467}
1468
1469FrozenValue* Graph::freezeStrong(JSValue value)
1470{
1471 FrozenValue* result = freeze(value);
1472 result->strengthenTo(StrongValue);
1473 return result;
1474}
1475
1476void Graph::convertToConstant(Node* node, FrozenValue* value)
1477{
1478 if (value->structure())
1479 assertIsRegistered(value->structure());
1480 node->convertToConstant(value);
1481}
1482
1483void Graph::convertToConstant(Node* node, JSValue value)
1484{
1485 convertToConstant(node, freeze(value));
1486}
1487
1488void Graph::convertToStrongConstant(Node* node, JSValue value)
1489{
1490 convertToConstant(node, freezeStrong(value));
1491}
1492
1493RegisteredStructure Graph::registerStructure(Structure* structure, StructureRegistrationResult& result)
1494{
1495 m_plan.weakReferences().addLazily(structure);
1496 if (m_plan.watchpoints().consider(structure))
1497 result = StructureRegisteredAndWatched;
1498 else
1499 result = StructureRegisteredNormally;
1500 return RegisteredStructure::createPrivate(structure);
1501}
1502
1503void Graph::registerAndWatchStructureTransition(Structure* structure)
1504{
1505 m_plan.weakReferences().addLazily(structure);
1506 m_plan.watchpoints().addLazily(structure->transitionWatchpointSet());
1507}
1508
1509void Graph::assertIsRegistered(Structure* structure)
1510{
1511 // It's convenient to be able to call this with a maybe-null structure.
1512 if (!structure)
1513 return;
1514
1515 DFG_ASSERT(*this, nullptr, m_plan.weakReferences().contains(structure));
1516
1517 if (!structure->dfgShouldWatch())
1518 return;
1519 if (watchpoints().isWatched(structure->transitionWatchpointSet()))
1520 return;
1521
1522 DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure), " is watchable but isn't being watched.").data());
1523}
1524
1525static void logDFGAssertionFailure(
1526 Graph& graph, const CString& whileText, const char* file, int line, const char* function,
1527 const char* assertion)
1528{
1529 startCrashing();
1530 dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1531 dataLog(file, "(", line, ") : ", function, "\n");
1532 dataLog("\n");
1533 dataLog(whileText);
1534 dataLog("Graph at time of failure:\n");
1535 graph.dump();
1536 dataLog("\n");
1537 dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1538 dataLog(file, "(", line, ") : ", function, "\n");
1539}
1540
1541void Graph::logAssertionFailure(
1542 std::nullptr_t, const char* file, int line, const char* function, const char* assertion)
1543{
1544 logDFGAssertionFailure(*this, "", file, line, function, assertion);
1545}
1546
1547void Graph::logAssertionFailure(
1548 Node* node, const char* file, int line, const char* function, const char* assertion)
1549{
1550 logDFGAssertionFailure(*this, toCString("While handling node ", node, "\n\n"), file, line, function, assertion);
1551}
1552
1553void Graph::logAssertionFailure(
1554 BasicBlock* block, const char* file, int line, const char* function, const char* assertion)
1555{
1556 logDFGAssertionFailure(*this, toCString("While handling block ", pointerDump(block), "\n\n"), file, line, function, assertion);
1557}
1558
1559CPSCFG& Graph::ensureCPSCFG()
1560{
1561 RELEASE_ASSERT(m_form != SSA && !m_isInSSAConversion);
1562 if (!m_cpsCFG)
1563 m_cpsCFG = makeUnique<CPSCFG>(*this);
1564 return *m_cpsCFG;
1565}
1566
1567CPSDominators& Graph::ensureCPSDominators()
1568{
1569 RELEASE_ASSERT(m_form != SSA && !m_isInSSAConversion);
1570 if (!m_cpsDominators)
1571 m_cpsDominators = makeUnique<CPSDominators>(*this);
1572 return *m_cpsDominators;
1573}
1574
1575SSADominators& Graph::ensureSSADominators()
1576{
1577 RELEASE_ASSERT(m_form == SSA || m_isInSSAConversion);
1578 if (!m_ssaDominators)
1579 m_ssaDominators = makeUnique<SSADominators>(*this);
1580 return *m_ssaDominators;
1581}
1582
1583CPSNaturalLoops& Graph::ensureCPSNaturalLoops()
1584{
1585 RELEASE_ASSERT(m_form != SSA && !m_isInSSAConversion);
1586 ensureCPSDominators();
1587 if (!m_cpsNaturalLoops)
1588 m_cpsNaturalLoops = makeUnique<CPSNaturalLoops>(*this);
1589 return *m_cpsNaturalLoops;
1590}
1591
1592SSANaturalLoops& Graph::ensureSSANaturalLoops()
1593{
1594 RELEASE_ASSERT(m_form == SSA);
1595 ensureSSADominators();
1596 if (!m_ssaNaturalLoops)
1597 m_ssaNaturalLoops = makeUnique<SSANaturalLoops>(*this);
1598 return *m_ssaNaturalLoops;
1599}
1600
1601BackwardsCFG& Graph::ensureBackwardsCFG()
1602{
1603 // We could easily relax this in the future to work over CPS, but today, it's only used in SSA.
1604 RELEASE_ASSERT(m_form == SSA);
1605 if (!m_backwardsCFG)
1606 m_backwardsCFG = makeUnique<BackwardsCFG>(*this);
1607 return *m_backwardsCFG;
1608}
1609
1610BackwardsDominators& Graph::ensureBackwardsDominators()
1611{
1612 RELEASE_ASSERT(m_form == SSA);
1613 if (!m_backwardsDominators)
1614 m_backwardsDominators = makeUnique<BackwardsDominators>(*this);
1615 return *m_backwardsDominators;
1616}
1617
1618ControlEquivalenceAnalysis& Graph::ensureControlEquivalenceAnalysis()
1619{
1620 RELEASE_ASSERT(m_form == SSA);
1621 if (!m_controlEquivalenceAnalysis)
1622 m_controlEquivalenceAnalysis = makeUnique<ControlEquivalenceAnalysis>(*this);
1623 return *m_controlEquivalenceAnalysis;
1624}
1625
1626MethodOfGettingAValueProfile Graph::methodOfGettingAValueProfileFor(Node* currentNode, Node* operandNode)
1627{
1628 // This represents IR like `CurrentNode(@operandNode)`. For example: `GetByVal(..., Int32:@GetLocal)`.
1629
1630 for (Node* node = operandNode; node;) {
1631 // currentNode is null when we're doing speculation checks for checkArgumentTypes().
1632 if (!currentNode || node->origin.semantic != currentNode->origin.semantic || !currentNode->hasResult()) {
1633 CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1634
1635 if (node->accessesStack(*this)) {
1636 if (m_form != SSA && node->local().isArgument()) {
1637 int argument = node->local().toArgument();
1638 Node* argumentNode = m_rootToArguments.find(block(0))->value[argument];
1639 // FIXME: We should match SetArgumentDefinitely nodes at other entrypoints as well:
1640 // https://bugs.webkit.org/show_bug.cgi?id=175841
1641 if (argumentNode && node->variableAccessData() == argumentNode->variableAccessData())
1642 return &profiledBlock->valueProfileForArgument(argument);
1643 }
1644
1645 if (node->op() == GetLocal) {
1646 return MethodOfGettingAValueProfile::fromLazyOperand(
1647 profiledBlock,
1648 LazyOperandValueProfileKey(
1649 node->origin.semantic.bytecodeIndex(), node->local()));
1650 }
1651 }
1652
1653 if (node->hasHeapPrediction())
1654 return &profiledBlock->valueProfileForBytecodeIndex(node->origin.semantic.bytecodeIndex());
1655
1656 if (profiledBlock->hasBaselineJITProfiling()) {
1657 if (BinaryArithProfile* result = profiledBlock->binaryArithProfileForBytecodeIndex(node->origin.semantic.bytecodeIndex()))
1658 return result;
1659 if (UnaryArithProfile* result = profiledBlock->unaryArithProfileForBytecodeIndex(node->origin.semantic.bytecodeIndex()))
1660 return result;
1661 }
1662 }
1663
1664 switch (node->op()) {
1665 case BooleanToNumber:
1666 case Identity:
1667 case ValueRep:
1668 case DoubleRep:
1669 case Int52Rep:
1670 node = node->child1().node();
1671 break;
1672 default:
1673 node = nullptr;
1674 }
1675 }
1676
1677 return MethodOfGettingAValueProfile();
1678}
1679
1680bool Graph::getRegExpPrototypeProperty(JSObject* regExpPrototype, Structure* regExpPrototypeStructure, UniquedStringImpl* uid, JSValue& returnJSValue)
1681{
1682 unsigned attributesUnused;
1683 PropertyOffset offset = regExpPrototypeStructure->getConcurrently(uid, attributesUnused);
1684 if (!isValidOffset(offset))
1685 return false;
1686
1687 JSValue value = tryGetConstantProperty(regExpPrototype, regExpPrototypeStructure, offset);
1688 if (!value)
1689 return false;
1690
1691 // We only care about functions and getters at this point. If you want to access other properties
1692 // you'll have to add code for those types.
1693 JSFunction* function = jsDynamicCast<JSFunction*>(m_vm, value);
1694 if (!function) {
1695 GetterSetter* getterSetter = jsDynamicCast<GetterSetter*>(m_vm, value);
1696
1697 if (!getterSetter)
1698 return false;
1699
1700 returnJSValue = JSValue(getterSetter);
1701 return true;
1702 }
1703
1704 returnJSValue = value;
1705 return true;
1706}
1707
1708bool Graph::isStringPrototypeMethodSane(JSGlobalObject* globalObject, UniquedStringImpl* uid)
1709{
1710 ObjectPropertyConditionSet conditions = generateConditionsForPrototypeEquivalenceConcurrently(m_vm, globalObject, globalObject->stringObjectStructure(), globalObject->stringPrototype(), uid);
1711
1712 if (!conditions.isValid())
1713 return false;
1714
1715 ObjectPropertyCondition equivalenceCondition = conditions.slotBaseCondition();
1716 RELEASE_ASSERT(equivalenceCondition.hasRequiredValue());
1717 JSFunction* function = jsDynamicCast<JSFunction*>(m_vm, equivalenceCondition.condition().requiredValue());
1718 if (!function)
1719 return false;
1720
1721 if (function->executable()->intrinsicFor(CodeForCall) != StringPrototypeValueOfIntrinsic)
1722 return false;
1723
1724 return watchConditions(conditions);
1725}
1726
1727
1728bool Graph::canOptimizeStringObjectAccess(const CodeOrigin& codeOrigin)
1729{
1730 if (hasExitSite(codeOrigin, BadCache) || hasExitSite(codeOrigin, BadConstantCache))
1731 return false;
1732
1733 JSGlobalObject* globalObject = globalObjectFor(codeOrigin);
1734 Structure* stringObjectStructure = globalObjectFor(codeOrigin)->stringObjectStructure();
1735 registerStructure(stringObjectStructure);
1736 ASSERT(stringObjectStructure->storedPrototype().isObject());
1737 ASSERT(stringObjectStructure->storedPrototype().asCell()->classInfo(stringObjectStructure->storedPrototype().asCell()->vm()) == StringPrototype::info());
1738
1739 if (!watchConditions(generateConditionsForPropertyMissConcurrently(m_vm, globalObject, stringObjectStructure, m_vm.propertyNames->toPrimitiveSymbol.impl())))
1740 return false;
1741
1742 // We're being conservative here. We want DFG's ToString on StringObject to be
1743 // used in both numeric contexts (that would call valueOf()) and string contexts
1744 // (that would call toString()). We don't want the DFG to have to distinguish
1745 // between the two, just because that seems like it would get confusing. So we
1746 // just require both methods to be sane.
1747 if (!isStringPrototypeMethodSane(globalObject, m_vm.propertyNames->valueOf.impl()))
1748 return false;
1749 return isStringPrototypeMethodSane(globalObject, m_vm.propertyNames->toString.impl());
1750}
1751
1752bool Graph::willCatchExceptionInMachineFrame(CodeOrigin codeOrigin, CodeOrigin& opCatchOriginOut, HandlerInfo*& catchHandlerOut)
1753{
1754 if (!m_hasExceptionHandlers)
1755 return false;
1756
1757 BytecodeIndex bytecodeIndexToCheck = codeOrigin.bytecodeIndex();
1758 while (1) {
1759 InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame();
1760 CodeBlock* codeBlock = baselineCodeBlockFor(inlineCallFrame);
1761 if (HandlerInfo* handler = codeBlock->handlerForBytecodeIndex(bytecodeIndexToCheck)) {
1762 opCatchOriginOut = CodeOrigin(BytecodeIndex(handler->target), inlineCallFrame);
1763 catchHandlerOut = handler;
1764 return true;
1765 }
1766
1767 if (!inlineCallFrame)
1768 return false;
1769
1770 bytecodeIndexToCheck = inlineCallFrame->directCaller.bytecodeIndex();
1771 codeOrigin = inlineCallFrame->directCaller;
1772 }
1773
1774 RELEASE_ASSERT_NOT_REACHED();
1775}
1776
1777bool Graph::canDoFastSpread(Node* node, const AbstractValue& value)
1778{
1779 // The parameter 'value' is the AbstractValue for child1 (the thing being spread).
1780 ASSERT(node->op() == Spread);
1781
1782 if (node->child1().useKind() != ArrayUse) {
1783 // Note: we only speculate on ArrayUse when we've set up the necessary watchpoints
1784 // to prove that the iteration protocol is non-observable starting from ArrayPrototype.
1785 return false;
1786 }
1787
1788 // FIXME: We should add profiling of the incoming operand to Spread
1789 // so we can speculate in such a way that we guarantee that this
1790 // function would return true:
1791 // https://bugs.webkit.org/show_bug.cgi?id=171198
1792
1793 if (!value.m_structure.isFinite())
1794 return false;
1795
1796 ArrayPrototype* arrayPrototype = globalObjectFor(node->child1()->origin.semantic)->arrayPrototype();
1797 bool allGood = true;
1798 value.m_structure.forEach([&] (RegisteredStructure structure) {
1799 allGood &= structure->hasMonoProto()
1800 && structure->storedPrototype() == arrayPrototype
1801 && !structure->isDictionary()
1802 && structure->getConcurrently(m_vm.propertyNames->iteratorSymbol.impl()) == invalidOffset
1803 && !structure->mayInterceptIndexedAccesses();
1804 });
1805
1806 return allGood;
1807}
1808
1809void Graph::clearCPSCFGData()
1810{
1811 m_cpsNaturalLoops = nullptr;
1812 m_cpsDominators = nullptr;
1813 m_cpsCFG = nullptr;
1814}
1815
1816void Prefix::dump(PrintStream& out) const
1817{
1818 if (!m_enabled)
1819 return;
1820
1821 if (!noHeader) {
1822 if (nodeIndex >= 0)
1823 out.printf("%3d ", nodeIndex);
1824 else
1825 out.printf(" ");
1826
1827 if (blockIndex >= 0)
1828 out.printf("%2d ", blockIndex);
1829 else
1830 out.printf(" ");
1831
1832 if (phaseNumber >= 0)
1833 out.printf("%2d: ", phaseNumber);
1834 else
1835 out.printf(" : ");
1836 }
1837 if (prefixStr)
1838 out.printf("%s", prefixStr);
1839}
1840
1841} } // namespace JSC::DFG
1842
1843#endif // ENABLE(DFG_JIT)
1844