1/*
2 * Copyright (C) 2013-2019 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "DFGStackLayoutPhase.h"
28
29#if ENABLE(DFG_JIT)
30
31#include "DFGGraph.h"
32#include "DFGPhase.h"
33#include "DFGValueSource.h"
34#include "JSCInlines.h"
35
36namespace JSC { namespace DFG {
37
38class StackLayoutPhase : public Phase {
39 static constexpr bool verbose = false;
40
41public:
42 StackLayoutPhase(Graph& graph)
43 : Phase(graph, "stack layout")
44 {
45 }
46
47 bool run()
48 {
49 // This enumerates the locals that we actually care about and packs them. So for example
50 // if we use local 1, 3, 4, 5, 7, then we remap them: 1->0, 3->1, 4->2, 5->3, 7->4. We
51 // treat a variable as being "used" if there exists an access to it (SetLocal, GetLocal,
52 // Flush, PhantomLocal).
53
54 BitVector usedLocals;
55
56 // Collect those variables that are used from IR.
57 bool hasNodesThatNeedFixup = false;
58 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
59 BasicBlock* block = m_graph.block(blockIndex);
60 if (!block)
61 continue;
62 for (unsigned nodeIndex = block->size(); nodeIndex--;) {
63 Node* node = block->at(nodeIndex);
64 switch (node->op()) {
65 case GetLocal:
66 case SetLocal:
67 case Flush:
68 case PhantomLocal: {
69 VariableAccessData* variable = node->variableAccessData();
70 if (variable->local().isArgument())
71 break;
72 usedLocals.set(variable->local().toLocal());
73 break;
74 }
75
76 case LoadVarargs:
77 case ForwardVarargs: {
78 LoadVarargsData* data = node->loadVarargsData();
79 if (data->count.isLocal())
80 usedLocals.set(data->count.toLocal());
81 if (data->start.isLocal()) {
82 // This part really relies on the contiguity of stack layout
83 // assignments.
84 ASSERT(VirtualRegister(data->start.offset() + data->limit - 1).isLocal());
85 for (unsigned i = data->limit; i--;)
86 usedLocals.set(VirtualRegister(data->start.offset() + i).toLocal());
87 } // the else case shouldn't happen.
88 hasNodesThatNeedFixup = true;
89 break;
90 }
91
92 case PutStack:
93 case GetStack: {
94 StackAccessData* stack = node->stackAccessData();
95 if (stack->local.isArgument())
96 break;
97 usedLocals.set(stack->local.toLocal());
98 break;
99 }
100
101 default:
102 break;
103 }
104 }
105 }
106
107 for (InlineCallFrameSet::iterator iter = m_graph.m_plan.inlineCallFrames()->begin(); !!iter; ++iter) {
108 InlineCallFrame* inlineCallFrame = *iter;
109
110 if (inlineCallFrame->isVarargs()) {
111 usedLocals.set(VirtualRegister(
112 CallFrameSlot::argumentCount + inlineCallFrame->stackOffset).toLocal());
113 }
114
115 for (unsigned argument = inlineCallFrame->argumentsWithFixup.size(); argument--;) {
116 usedLocals.set(VirtualRegister(
117 virtualRegisterForArgument(argument).offset() +
118 inlineCallFrame->stackOffset).toLocal());
119 }
120 }
121
122 Vector<unsigned> allocation(usedLocals.size());
123 m_graph.m_nextMachineLocal = codeBlock()->calleeSaveSpaceAsVirtualRegisters();
124 for (unsigned i = 0; i < usedLocals.size(); ++i) {
125 if (!usedLocals.get(i)) {
126 allocation[i] = UINT_MAX;
127 continue;
128 }
129
130 allocation[i] = m_graph.m_nextMachineLocal++;
131 }
132
133 for (unsigned i = m_graph.m_variableAccessData.size(); i--;) {
134 VariableAccessData* variable = &m_graph.m_variableAccessData[i];
135 if (!variable->isRoot())
136 continue;
137
138 if (variable->local().isArgument()) {
139 variable->machineLocal() = variable->local();
140 continue;
141 }
142
143 size_t local = variable->local().toLocal();
144 if (local >= allocation.size())
145 continue;
146
147 if (allocation[local] == UINT_MAX)
148 continue;
149
150 variable->machineLocal() = assign(allocation, variable->local());
151 }
152
153 for (StackAccessData* data : m_graph.m_stackAccessData) {
154 if (!data->local.isLocal()) {
155 data->machineLocal = data->local;
156 continue;
157 }
158
159 if (static_cast<size_t>(data->local.toLocal()) >= allocation.size())
160 continue;
161 if (allocation[data->local.toLocal()] == UINT_MAX)
162 continue;
163
164 data->machineLocal = assign(allocation, data->local);
165 }
166
167 if (!m_graph.needsScopeRegister())
168 codeBlock()->setScopeRegister(VirtualRegister());
169 else
170 codeBlock()->setScopeRegister(assign(allocation, codeBlock()->scopeRegister()));
171
172 for (unsigned i = m_graph.m_inlineVariableData.size(); i--;) {
173 InlineVariableData data = m_graph.m_inlineVariableData[i];
174 InlineCallFrame* inlineCallFrame = data.inlineCallFrame;
175
176 if (inlineCallFrame->isVarargs()) {
177 inlineCallFrame->argumentCountRegister = assign(
178 allocation, VirtualRegister(inlineCallFrame->stackOffset + CallFrameSlot::argumentCount));
179 }
180
181 for (unsigned argument = inlineCallFrame->argumentsWithFixup.size(); argument--;) {
182 ArgumentPosition& position = m_graph.m_argumentPositions[
183 data.argumentPositionStart + argument];
184 VariableAccessData* variable = position.someVariable();
185 ValueSource source;
186 if (!variable)
187 source = ValueSource(SourceIsDead);
188 else {
189 source = ValueSource::forFlushFormat(
190 variable->machineLocal(), variable->flushFormat());
191 }
192 inlineCallFrame->argumentsWithFixup[argument] = source.valueRecovery();
193 }
194
195 RELEASE_ASSERT(inlineCallFrame->isClosureCall == !!data.calleeVariable);
196 if (inlineCallFrame->isClosureCall) {
197 VariableAccessData* variable = data.calleeVariable->find();
198 ValueSource source = ValueSource::forFlushFormat(
199 variable->machineLocal(),
200 variable->flushFormat());
201 inlineCallFrame->calleeRecovery = source.valueRecovery();
202 } else
203 RELEASE_ASSERT(inlineCallFrame->calleeRecovery.isConstant());
204 }
205
206 // Fix Varargs' variable references.
207 if (hasNodesThatNeedFixup) {
208 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
209 BasicBlock* block = m_graph.block(blockIndex);
210 if (!block)
211 continue;
212 for (unsigned nodeIndex = block->size(); nodeIndex--;) {
213 Node* node = block->at(nodeIndex);
214 switch (node->op()) {
215 case LoadVarargs:
216 case ForwardVarargs: {
217 LoadVarargsData* data = node->loadVarargsData();
218 data->machineCount = assign(allocation, data->count);
219 data->machineStart = assign(allocation, data->start);
220 break;
221 }
222
223 default:
224 break;
225 }
226 }
227 }
228 }
229
230 return true;
231 }
232
233private:
234 VirtualRegister assign(const Vector<unsigned>& allocation, VirtualRegister src)
235 {
236 VirtualRegister result = src;
237 if (result.isLocal()) {
238 unsigned myAllocation = allocation[result.toLocal()];
239 if (myAllocation == UINT_MAX)
240 result = VirtualRegister();
241 else
242 result = virtualRegisterForLocal(myAllocation);
243 }
244 return result;
245 }
246};
247
248bool performStackLayout(Graph& graph)
249{
250 return runPhase<StackLayoutPhase>(graph);
251}
252
253} } // namespace JSC::DFG
254
255#endif // ENABLE(DFG_JIT)
256
257