1 | /* |
2 | * Copyright (C) 2015-2018 Apple Inc. All rights reserved. |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without |
5 | * modification, are permitted provided that the following conditions |
6 | * are met: |
7 | * 1. Redistributions of source code must retain the above copyright |
8 | * notice, this list of conditions and the following disclaimer. |
9 | * 2. Redistributions in binary form must reproduce the above copyright |
10 | * notice, this list of conditions and the following disclaimer in the |
11 | * documentation and/or other materials provided with the distribution. |
12 | * |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
24 | */ |
25 | |
26 | #pragma once |
27 | |
28 | #if ENABLE(B3_JIT) |
29 | |
30 | #include "B3ConstrainedValue.h" |
31 | #include "B3Value.h" |
32 | #include "B3ValueRep.h" |
33 | #include "CCallHelpers.h" |
34 | #include "RegisterSet.h" |
35 | #include <wtf/SharedTask.h> |
36 | |
37 | namespace JSC { namespace B3 { |
38 | |
39 | class StackmapGenerationParams; |
40 | |
41 | typedef void StackmapGeneratorFunction(CCallHelpers&, const StackmapGenerationParams&); |
42 | typedef SharedTask<StackmapGeneratorFunction> StackmapGenerator; |
43 | |
44 | class JS_EXPORT_PRIVATE StackmapValue : public Value { |
45 | public: |
46 | static bool accepts(Kind kind) |
47 | { |
48 | // This needs to include opcodes of all subclasses. |
49 | switch (kind.opcode()) { |
50 | case CheckAdd: |
51 | case CheckSub: |
52 | case CheckMul: |
53 | case Check: |
54 | case Patchpoint: |
55 | return true; |
56 | default: |
57 | return false; |
58 | } |
59 | } |
60 | |
61 | ~StackmapValue(); |
62 | |
63 | // Use this to add children. |
64 | void append(const ConstrainedValue& value) |
65 | { |
66 | ASSERT(value.value()->type().isNumeric()); |
67 | append(value.value(), value.rep()); |
68 | } |
69 | |
70 | void append(Value*, const ValueRep&); |
71 | |
72 | template<typename VectorType> |
73 | void appendVector(const VectorType& vector) |
74 | { |
75 | for (const auto& value : vector) |
76 | append(value); |
77 | } |
78 | |
79 | // Helper for appending a bunch of values with some ValueRep. |
80 | template<typename VectorType> |
81 | void appendVectorWithRep(const VectorType& vector, const ValueRep& rep) |
82 | { |
83 | for (Value* value : vector) |
84 | append(value, rep); |
85 | } |
86 | |
87 | // Helper for appending cold any's. This often used by clients to implement OSR. |
88 | template<typename VectorType> |
89 | void appendColdAnys(const VectorType& vector) |
90 | { |
91 | appendVectorWithRep(vector, ValueRep::ColdAny); |
92 | } |
93 | template<typename VectorType> |
94 | void appendLateColdAnys(const VectorType& vector) |
95 | { |
96 | appendVectorWithRep(vector, ValueRep::LateColdAny); |
97 | } |
98 | |
99 | // This is a helper for something you might do a lot of: append a value that should be constrained |
100 | // to SomeRegister. |
101 | void appendSomeRegister(Value*); |
102 | void appendSomeRegisterWithClobber(Value*); |
103 | |
104 | const Vector<ValueRep>& reps() const { return m_reps; } |
105 | |
106 | // Stackmaps allow you to specify that the operation may clobber some registers. Clobbering a register |
107 | // means that the operation appears to store a value into the register, but the compiler doesn't |
108 | // assume to know anything about what kind of value might have been stored. In B3's model of |
109 | // execution, registers are read or written at instruction boundaries rather than inside the |
110 | // instructions themselves. A register could be read or written immediately before the instruction |
111 | // executes, or immediately after. Note that at a boundary between instruction A and instruction B we |
112 | // simultaneously look at what A does after it executes and what B does before it executes. This is |
113 | // because when the compiler considers what happens to registers, it views the boundary between two |
114 | // instructions as a kind of atomic point where the late effects of A happen at the same time as the |
115 | // early effects of B. |
116 | // |
117 | // The compiler views a stackmap as a single instruction, even though of course the stackmap may be |
118 | // composed of any number of instructions (if it's a Patchpoint). You can claim that a stackmap value |
119 | // clobbers a set of registers before the stackmap's instruction or after. Clobbering before is called |
120 | // early clobber, while clobbering after is called late clobber. |
121 | // |
122 | // This is quite flexible but it has its limitations. Any register listed as an early clobber will |
123 | // interfere with all uses of the stackmap. Any register listed as a late clobber will interfere with |
124 | // all defs of the stackmap (i.e. the result). This means that it's currently not possible to claim |
125 | // to clobber a register while still allowing that register to be used for both an input and an output |
126 | // of the instruction. It just so happens that B3's sole client (the FTL) currently never wants to |
127 | // convey such a constraint, but it will want it eventually (FIXME: |
128 | // https://bugs.webkit.org/show_bug.cgi?id=151823). |
129 | // |
130 | // Note that a common use case of early clobber sets is to indicate that this is the set of registers |
131 | // that shall not be used for inputs to the value. But B3 supports two different ways of specifying |
132 | // this, the other being LateUse in combination with late clobber (not yet available to stackmaps |
133 | // directly, FIXME: https://bugs.webkit.org/show_bug.cgi?id=151335). A late use makes the use of that |
134 | // value appear to happen after the instruction. This means that a late use cannot use the same |
135 | // register as the result and it cannot use the same register as either early or late clobbered |
136 | // registers. Late uses are usually a better way of saying that a clobbered register cannot be used |
137 | // for an input. Early clobber means that some register(s) interfere with *all* inputs, while LateUse |
138 | // means that some value interferes with whatever is live after the instruction. Below is a list of |
139 | // examples of how the FTL can handle its various kinds of scenarios using a combination of early |
140 | // clobber, late clobber, and late use. These examples are for X86_64, w.l.o.g. |
141 | // |
142 | // Basic ById patchpoint: Early and late clobber of r11. Early clobber prevents any inputs from using |
143 | // r11 since that would mess with the MacroAssembler's assumptions when we |
144 | // AllowMacroScratchRegisterUsage. Late clobber tells B3 that the patchpoint may overwrite r11. |
145 | // |
146 | // ById patchpoint in a try block with some live state: This might throw an exception after already |
147 | // assigning to the result. So, this should LateUse all stackmap values to ensure that the stackmap |
148 | // values don't interfere with the result. Note that we do not LateUse the non-OSR inputs of the ById |
149 | // since LateUse implies that the use is cold: the register allocator will assume that the use is not |
150 | // important for the critical path. Also, early and late clobber of r11. |
151 | // |
152 | // Basic ByIdFlush patchpoint: We could do Flush the same way we did it with LLVM: ignore it and let |
153 | // PolymorphicAccess figure it out. Or, we could add internal clobber support (FIXME: |
154 | // https://bugs.webkit.org/show_bug.cgi?id=151823). Or, we could do it by early clobbering r11, late |
155 | // clobbering all volatile registers, and constraining the result to some register. Or, we could do |
156 | // that but leave the result constrained to SomeRegister, which will cause it to use a callee-save |
157 | // register. Internal clobber support would allow us to use SomeRegister while getting the result into |
158 | // a volatile register. |
159 | // |
160 | // ByIdFlush patchpoint in a try block with some live state: LateUse all for-OSR stackmap values, |
161 | // early clobber of r11 to prevent the other inputs from using r11, and late clobber of all volatile |
162 | // registers to make way for the call. To handle the result, we could do any of what is listed in the |
163 | // previous paragraph. |
164 | // |
165 | // Basic JS call: Force all non-OSR inputs into specific locations (register, stack, whatever). |
166 | // All volatile registers are late-clobbered. The output is constrained to a register as well. |
167 | // |
168 | // JS call in a try block with some live state: LateUse all for-OSR stackmap values, fully constrain |
169 | // all non-OSR inputs and the result, and late clobber all volatile registers. |
170 | // |
171 | // JS tail call: Pass all inputs as a warm variant of Any (FIXME: |
172 | // https://bugs.webkit.org/show_bug.cgi?id=151811). |
173 | // |
174 | // Note that we cannot yet do all of these things because although Air already supports all of these |
175 | // various forms of uses (LateUse and warm unconstrained use), B3 doesn't yet expose all of it. The |
176 | // bugs are: |
177 | // https://bugs.webkit.org/show_bug.cgi?id=151335 (LateUse) |
178 | // https://bugs.webkit.org/show_bug.cgi?id=151811 (warm Any) |
179 | void clobberEarly(const RegisterSet& set) |
180 | { |
181 | m_earlyClobbered.merge(set); |
182 | } |
183 | |
184 | void clobberLate(const RegisterSet& set) |
185 | { |
186 | m_lateClobbered.merge(set); |
187 | } |
188 | |
189 | void clobber(const RegisterSet& set) |
190 | { |
191 | clobberEarly(set); |
192 | clobberLate(set); |
193 | } |
194 | |
195 | RegisterSet& earlyClobbered() { return m_earlyClobbered; } |
196 | RegisterSet& lateClobbered() { return m_lateClobbered; } |
197 | const RegisterSet& earlyClobbered() const { return m_earlyClobbered; } |
198 | const RegisterSet& lateClobbered() const { return m_lateClobbered; } |
199 | |
200 | void setGenerator(RefPtr<StackmapGenerator> generator) |
201 | { |
202 | m_generator = generator; |
203 | } |
204 | |
205 | template<typename Functor> |
206 | void setGenerator(const Functor& functor) |
207 | { |
208 | m_generator = createSharedTask<StackmapGeneratorFunction>(functor); |
209 | } |
210 | |
211 | RefPtr<StackmapGenerator> generator() const { return m_generator; } |
212 | |
213 | ConstrainedValue constrainedChild(unsigned index) const |
214 | { |
215 | return ConstrainedValue(child(index), index < m_reps.size() ? m_reps[index] : ValueRep::ColdAny); |
216 | } |
217 | |
218 | void setConstrainedChild(unsigned index, const ConstrainedValue&); |
219 | |
220 | void setConstraint(unsigned index, const ValueRep&); |
221 | |
222 | class ConstrainedValueCollection { |
223 | public: |
224 | |
225 | ConstrainedValueCollection(const StackmapValue& value) |
226 | : m_value(value) |
227 | { |
228 | } |
229 | |
230 | unsigned size() const { return m_value.numChildren(); } |
231 | |
232 | ConstrainedValue at(unsigned index) const { return m_value.constrainedChild(index); } |
233 | |
234 | ConstrainedValue operator[](unsigned index) const { return at(index); } |
235 | |
236 | class iterator { |
237 | public: |
238 | using iterator_category = std::forward_iterator_tag; |
239 | using value_type = ConstrainedValue; |
240 | using difference_type = int; |
241 | using pointer = void; |
242 | using reference = ConstrainedValue; |
243 | |
244 | iterator() |
245 | : m_collection(nullptr) |
246 | , m_index(0) |
247 | { |
248 | } |
249 | |
250 | iterator(const ConstrainedValueCollection& collection, unsigned index) |
251 | : m_collection(&collection) |
252 | , m_index(index) |
253 | { |
254 | } |
255 | |
256 | ConstrainedValue operator*() const |
257 | { |
258 | return m_collection->at(m_index); |
259 | } |
260 | |
261 | iterator& operator++() |
262 | { |
263 | m_index++; |
264 | return *this; |
265 | } |
266 | |
267 | bool operator==(const iterator& other) const |
268 | { |
269 | ASSERT(m_collection == other.m_collection); |
270 | return m_index == other.m_index; |
271 | } |
272 | |
273 | bool operator!=(const iterator& other) const |
274 | { |
275 | return !(*this == other); |
276 | } |
277 | |
278 | private: |
279 | const ConstrainedValueCollection* m_collection; |
280 | unsigned m_index; |
281 | }; |
282 | |
283 | iterator begin() const { return iterator(*this, 0); } |
284 | iterator end() const { return iterator(*this, size()); } |
285 | |
286 | private: |
287 | const StackmapValue& m_value; |
288 | }; |
289 | |
290 | ConstrainedValueCollection constrainedChildren() const |
291 | { |
292 | return ConstrainedValueCollection(*this); |
293 | } |
294 | |
295 | B3_SPECIALIZE_VALUE_FOR_VARARGS_CHILDREN |
296 | |
297 | protected: |
298 | void dumpChildren(CommaPrinter&, PrintStream&) const override; |
299 | void dumpMeta(CommaPrinter&, PrintStream&) const override; |
300 | |
301 | StackmapValue(CheckedOpcodeTag, Kind, Type, Origin); |
302 | |
303 | private: |
304 | friend class CheckSpecial; |
305 | friend class PatchpointSpecial; |
306 | friend class StackmapGenerationParams; |
307 | friend class StackmapSpecial; |
308 | |
309 | Vector<ValueRep> m_reps; |
310 | RefPtr<StackmapGenerator> m_generator; |
311 | RegisterSet m_earlyClobbered; |
312 | RegisterSet m_lateClobbered; |
313 | RegisterSet m_usedRegisters; // Stackmaps could be further duplicated by Air, but that's unlikely, so we just merge the used registers sets if that were to happen. |
314 | }; |
315 | |
316 | } } // namespace JSC::B3 |
317 | |
318 | #endif // ENABLE(B3_JIT) |
319 | |