1 | /* |
2 | * Copyright (C) 2011-2019 Apple Inc. All rights reserved. |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without |
5 | * modification, are permitted provided that the following conditions |
6 | * are met: |
7 | * 1. Redistributions of source code must retain the above copyright |
8 | * notice, this list of conditions and the following disclaimer. |
9 | * 2. Redistributions in binary form must reproduce the above copyright |
10 | * notice, this list of conditions and the following disclaimer in the |
11 | * documentation and/or other materials provided with the distribution. |
12 | * |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
24 | */ |
25 | |
26 | #pragma once |
27 | |
28 | #include "BytecodeIndex.h" |
29 | |
30 | #include <limits.h> |
31 | #include <wtf/HashMap.h> |
32 | #include <wtf/PrintStream.h> |
33 | #include <wtf/StdLibExtras.h> |
34 | #include <wtf/Vector.h> |
35 | |
36 | namespace JSC { |
37 | |
38 | class CodeBlock; |
39 | struct DumpContext; |
40 | struct InlineCallFrame; |
41 | |
42 | class CodeOrigin { |
43 | public: |
44 | CodeOrigin() |
45 | #if CPU(ADDRESS64) |
46 | : m_compositeValue(buildCompositeValue(nullptr, BytecodeIndex())) |
47 | #else |
48 | : m_inlineCallFrame(nullptr) |
49 | #endif |
50 | { |
51 | } |
52 | |
53 | CodeOrigin(WTF::HashTableDeletedValueType) |
54 | #if CPU(ADDRESS64) |
55 | : m_compositeValue(buildCompositeValue(deletedMarker(), BytecodeIndex())) |
56 | #else |
57 | : m_bytecodeIndex(WTF::HashTableDeletedValue) |
58 | , m_inlineCallFrame(deletedMarker()) |
59 | #endif |
60 | { |
61 | } |
62 | |
63 | explicit CodeOrigin(BytecodeIndex bytecodeIndex, InlineCallFrame* inlineCallFrame = nullptr) |
64 | #if CPU(ADDRESS64) |
65 | : m_compositeValue(buildCompositeValue(inlineCallFrame, bytecodeIndex)) |
66 | #else |
67 | : m_bytecodeIndex(bytecodeIndex) |
68 | , m_inlineCallFrame(inlineCallFrame) |
69 | #endif |
70 | { |
71 | ASSERT(!!bytecodeIndex); |
72 | #if CPU(ADDRESS64) |
73 | ASSERT(!(bitwise_cast<uintptr_t>(inlineCallFrame) & ~s_maskCompositeValueForPointer)); |
74 | #endif |
75 | } |
76 | |
77 | #if CPU(ADDRESS64) |
78 | CodeOrigin& operator=(const CodeOrigin& other) |
79 | { |
80 | if (this != &other) { |
81 | if (UNLIKELY(isOutOfLine())) |
82 | delete outOfLineCodeOrigin(); |
83 | |
84 | if (UNLIKELY(other.isOutOfLine())) |
85 | m_compositeValue = buildCompositeValue(other.inlineCallFrame(), other.bytecodeIndex()); |
86 | else |
87 | m_compositeValue = other.m_compositeValue; |
88 | } |
89 | return *this; |
90 | } |
91 | CodeOrigin& operator=(CodeOrigin&& other) |
92 | { |
93 | if (this != &other) { |
94 | if (UNLIKELY(isOutOfLine())) |
95 | delete outOfLineCodeOrigin(); |
96 | |
97 | m_compositeValue = std::exchange(other.m_compositeValue, 0); |
98 | } |
99 | return *this; |
100 | } |
101 | |
102 | CodeOrigin(const CodeOrigin& other) |
103 | { |
104 | // We don't use the member initializer list because it would not let us optimize the common case where there is no out-of-line storage |
105 | // (in which case we don't have to extract the components of the composite value just to reassemble it). |
106 | if (UNLIKELY(other.isOutOfLine())) |
107 | m_compositeValue = buildCompositeValue(other.inlineCallFrame(), other.bytecodeIndex()); |
108 | else |
109 | m_compositeValue = other.m_compositeValue; |
110 | } |
111 | CodeOrigin(CodeOrigin&& other) |
112 | : m_compositeValue(std::exchange(other.m_compositeValue, 0)) |
113 | { |
114 | } |
115 | |
116 | ~CodeOrigin() |
117 | { |
118 | if (UNLIKELY(isOutOfLine())) |
119 | delete outOfLineCodeOrigin(); |
120 | } |
121 | #endif |
122 | |
123 | bool isSet() const |
124 | { |
125 | #if CPU(ADDRESS64) |
126 | return !(m_compositeValue & s_maskIsBytecodeIndexInvalid); |
127 | #else |
128 | return !!m_bytecodeIndex; |
129 | #endif |
130 | } |
131 | explicit operator bool() const { return isSet(); } |
132 | |
133 | bool isHashTableDeletedValue() const |
134 | { |
135 | #if CPU(ADDRESS64) |
136 | return !isSet() && (m_compositeValue & s_maskCompositeValueForPointer); |
137 | #else |
138 | return m_bytecodeIndex.isHashTableDeletedValue() && !!m_inlineCallFrame; |
139 | #endif |
140 | } |
141 | |
142 | // The inline depth is the depth of the inline stack, so 1 = not inlined, |
143 | // 2 = inlined one deep, etc. |
144 | unsigned inlineDepth() const; |
145 | |
146 | // If the code origin corresponds to inlined code, gives you the heap object that |
147 | // would have owned the code if it had not been inlined. Otherwise returns 0. |
148 | CodeBlock* codeOriginOwner() const; |
149 | |
150 | int stackOffset() const; |
151 | |
152 | unsigned hash() const; |
153 | bool operator==(const CodeOrigin& other) const; |
154 | bool operator!=(const CodeOrigin& other) const { return !(*this == other); } |
155 | |
156 | // This checks if the two code origins correspond to the same stack trace snippets, |
157 | // but ignore whether the InlineCallFrame's are identical. |
158 | bool isApproximatelyEqualTo(const CodeOrigin& other, InlineCallFrame* terminal = nullptr) const; |
159 | |
160 | unsigned approximateHash(InlineCallFrame* terminal = nullptr) const; |
161 | |
162 | template <typename Function> |
163 | void walkUpInlineStack(const Function&); |
164 | |
165 | // Get the inline stack. This is slow, and is intended for debugging only. |
166 | Vector<CodeOrigin> inlineStack() const; |
167 | |
168 | JS_EXPORT_PRIVATE void dump(PrintStream&) const; |
169 | void dumpInContext(PrintStream&, DumpContext*) const; |
170 | |
171 | BytecodeIndex bytecodeIndex() const |
172 | { |
173 | #if CPU(ADDRESS64) |
174 | if (!isSet()) |
175 | return BytecodeIndex(); |
176 | if (UNLIKELY(isOutOfLine())) |
177 | return outOfLineCodeOrigin()->bytecodeIndex; |
178 | return BytecodeIndex::fromBits(m_compositeValue >> (64 - s_freeBitsAtTop)); |
179 | #else |
180 | return m_bytecodeIndex; |
181 | #endif |
182 | } |
183 | |
184 | InlineCallFrame* inlineCallFrame() const |
185 | { |
186 | #if CPU(ADDRESS64) |
187 | if (UNLIKELY(isOutOfLine())) |
188 | return outOfLineCodeOrigin()->inlineCallFrame; |
189 | return bitwise_cast<InlineCallFrame*>(m_compositeValue & s_maskCompositeValueForPointer); |
190 | #else |
191 | return m_inlineCallFrame; |
192 | #endif |
193 | } |
194 | |
195 | private: |
196 | #if CPU(ADDRESS64) |
197 | static constexpr uintptr_t s_maskIsOutOfLine = 1; |
198 | static constexpr uintptr_t s_maskIsBytecodeIndexInvalid = 2; |
199 | |
200 | struct OutOfLineCodeOrigin { |
201 | WTF_MAKE_FAST_ALLOCATED; |
202 | public: |
203 | InlineCallFrame* inlineCallFrame; |
204 | BytecodeIndex bytecodeIndex; |
205 | |
206 | OutOfLineCodeOrigin(InlineCallFrame* inlineCallFrame, BytecodeIndex bytecodeIndex) |
207 | : inlineCallFrame(inlineCallFrame) |
208 | , bytecodeIndex(bytecodeIndex) |
209 | { |
210 | } |
211 | }; |
212 | |
213 | bool isOutOfLine() const |
214 | { |
215 | return m_compositeValue & s_maskIsOutOfLine; |
216 | } |
217 | OutOfLineCodeOrigin* outOfLineCodeOrigin() const |
218 | { |
219 | ASSERT(isOutOfLine()); |
220 | return bitwise_cast<OutOfLineCodeOrigin*>(m_compositeValue & s_maskCompositeValueForPointer); |
221 | } |
222 | #endif |
223 | |
224 | static InlineCallFrame* deletedMarker() |
225 | { |
226 | auto value = static_cast<uintptr_t>(1 << 3); |
227 | #if CPU(ADDRESS64) |
228 | ASSERT(value & s_maskCompositeValueForPointer); |
229 | ASSERT(!(value & ~s_maskCompositeValueForPointer)); |
230 | #endif |
231 | return bitwise_cast<InlineCallFrame*>(value); |
232 | } |
233 | |
234 | #if CPU(ADDRESS64) |
235 | static constexpr unsigned s_freeBitsAtTop = 64 - WTF_CPU_EFFECTIVE_ADDRESS_WIDTH; |
236 | static constexpr uintptr_t s_maskCompositeValueForPointer = ((1ULL << WTF_CPU_EFFECTIVE_ADDRESS_WIDTH) - 1) & ~(8ULL - 1); |
237 | static uintptr_t buildCompositeValue(InlineCallFrame* inlineCallFrame, BytecodeIndex bytecodeIndex) |
238 | { |
239 | if (!bytecodeIndex) |
240 | return bitwise_cast<uintptr_t>(inlineCallFrame) | s_maskIsBytecodeIndexInvalid; |
241 | |
242 | if (UNLIKELY(bytecodeIndex.asBits() >= 1 << s_freeBitsAtTop)) { |
243 | auto* outOfLine = new OutOfLineCodeOrigin(inlineCallFrame, bytecodeIndex); |
244 | return bitwise_cast<uintptr_t>(outOfLine) | s_maskIsOutOfLine; |
245 | } |
246 | |
247 | uintptr_t encodedBytecodeIndex = static_cast<uintptr_t>(bytecodeIndex.asBits()) << (64 - s_freeBitsAtTop); |
248 | ASSERT(!(encodedBytecodeIndex & bitwise_cast<uintptr_t>(inlineCallFrame))); |
249 | return encodedBytecodeIndex | bitwise_cast<uintptr_t>(inlineCallFrame); |
250 | } |
251 | |
252 | // The bottom bit indicates whether to look at an out-of-line implementation (because of a bytecode index which is too big for us to store). |
253 | // The next bit indicates whether this is an invalid bytecode (which depending on the InlineCallFrame* can either indicate an unset CodeOrigin, |
254 | // or a deletion marker for a hash table). |
255 | // The next bit is free |
256 | // The next 64-s_freeBitsAtTop-3 are the InlineCallFrame* or the OutOfLineCodeOrigin* |
257 | // Finally the last s_freeBitsAtTop are the bytecodeIndex if it is inline |
258 | uintptr_t m_compositeValue; |
259 | #else |
260 | BytecodeIndex m_bytecodeIndex; |
261 | InlineCallFrame* m_inlineCallFrame; |
262 | #endif |
263 | }; |
264 | |
265 | inline unsigned CodeOrigin::hash() const |
266 | { |
267 | return WTF::IntHash<unsigned>::hash(bytecodeIndex().asBits()) + |
268 | WTF::PtrHash<InlineCallFrame*>::hash(inlineCallFrame()); |
269 | } |
270 | |
271 | inline bool CodeOrigin::operator==(const CodeOrigin& other) const |
272 | { |
273 | #if CPU(ADDRESS64) |
274 | if (m_compositeValue == other.m_compositeValue) |
275 | return true; |
276 | #endif |
277 | return bytecodeIndex() == other.bytecodeIndex() |
278 | && inlineCallFrame() == other.inlineCallFrame(); |
279 | } |
280 | |
281 | struct CodeOriginHash { |
282 | static unsigned hash(const CodeOrigin& key) { return key.hash(); } |
283 | static bool equal(const CodeOrigin& a, const CodeOrigin& b) { return a == b; } |
284 | static constexpr bool safeToCompareToEmptyOrDeleted = true; |
285 | }; |
286 | |
287 | struct CodeOriginApproximateHash { |
288 | static unsigned hash(const CodeOrigin& key) { return key.approximateHash(); } |
289 | static bool equal(const CodeOrigin& a, const CodeOrigin& b) { return a.isApproximatelyEqualTo(b); } |
290 | static constexpr bool safeToCompareToEmptyOrDeleted = true; |
291 | }; |
292 | |
293 | } // namespace JSC |
294 | |
295 | namespace WTF { |
296 | |
297 | template<typename T> struct DefaultHash; |
298 | template<> struct DefaultHash<JSC::CodeOrigin> { |
299 | typedef JSC::CodeOriginHash Hash; |
300 | }; |
301 | |
302 | template<typename T> struct HashTraits; |
303 | template<> struct HashTraits<JSC::CodeOrigin> : SimpleClassHashTraits<JSC::CodeOrigin> { |
304 | static constexpr bool emptyValueIsZero = false; |
305 | }; |
306 | |
307 | } // namespace WTF |
308 | |