1/*
2 * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "DFGCommonData.h"
28
29#if ENABLE(DFG_JIT)
30
31#include "CodeBlock.h"
32#include "DFGNode.h"
33#include "DFGPlan.h"
34#include "InlineCallFrame.h"
35#include "JSCInlines.h"
36#include "TrackedReferences.h"
37#include "VM.h"
38
39#include <wtf/NeverDestroyed.h>
40
41namespace JSC { namespace DFG {
42
43void CommonData::notifyCompilingStructureTransition(Plan& plan, CodeBlock* codeBlock, Node* node)
44{
45 plan.transitions().addLazily(
46 codeBlock,
47 node->origin.semantic.codeOriginOwner(),
48 node->transition()->previous.get(),
49 node->transition()->next.get());
50}
51
52CallSiteIndex CommonData::addCodeOrigin(CodeOrigin codeOrigin)
53{
54 if (codeOrigins.isEmpty()
55 || codeOrigins.last() != codeOrigin)
56 codeOrigins.append(codeOrigin);
57 unsigned index = codeOrigins.size() - 1;
58 ASSERT(codeOrigins[index] == codeOrigin);
59 return CallSiteIndex(BytecodeIndex(index));
60}
61
62CallSiteIndex CommonData::addUniqueCallSiteIndex(CodeOrigin codeOrigin)
63{
64 codeOrigins.append(codeOrigin);
65 unsigned index = codeOrigins.size() - 1;
66 ASSERT(codeOrigins[index] == codeOrigin);
67 return CallSiteIndex(BytecodeIndex(index));
68}
69
70CallSiteIndex CommonData::lastCallSite() const
71{
72 RELEASE_ASSERT(codeOrigins.size());
73 return CallSiteIndex(BytecodeIndex(codeOrigins.size() - 1));
74}
75
76DisposableCallSiteIndex CommonData::addDisposableCallSiteIndex(CodeOrigin codeOrigin)
77{
78 if (callSiteIndexFreeList.size()) {
79 unsigned index = callSiteIndexFreeList.takeAny();
80 codeOrigins[index] = codeOrigin;
81 return DisposableCallSiteIndex(index);
82 }
83
84 codeOrigins.append(codeOrigin);
85 unsigned index = codeOrigins.size() - 1;
86 ASSERT(codeOrigins[index] == codeOrigin);
87 return DisposableCallSiteIndex(index);
88}
89
90
91void CommonData::removeDisposableCallSiteIndex(DisposableCallSiteIndex callSite)
92{
93 RELEASE_ASSERT(callSite.bits() < codeOrigins.size());
94 callSiteIndexFreeList.add(callSite.bits());
95 codeOrigins[callSite.bits()] = CodeOrigin();
96}
97
98void CommonData::shrinkToFit()
99{
100 codeOrigins.shrinkToFit();
101 dfgIdentifiers.shrinkToFit();
102 weakReferences.shrinkToFit();
103 weakStructureReferences.shrinkToFit();
104 transitions.shrinkToFit();
105 catchEntrypoints.shrinkToFit();
106 jumpReplacements.shrinkToFit();
107}
108
109static Lock pcCodeBlockMapLock;
110inline HashMap<void*, CodeBlock*>& pcCodeBlockMap(AbstractLocker&)
111{
112 static NeverDestroyed<HashMap<void*, CodeBlock*>> pcCodeBlockMap;
113 return pcCodeBlockMap;
114}
115
116bool CommonData::invalidate()
117{
118 if (!isStillValid)
119 return false;
120
121 if (UNLIKELY(hasVMTrapsBreakpointsInstalled)) {
122 LockHolder locker(pcCodeBlockMapLock);
123 auto& map = pcCodeBlockMap(locker);
124 for (auto& jumpReplacement : jumpReplacements)
125 map.remove(jumpReplacement.dataLocation());
126 hasVMTrapsBreakpointsInstalled = false;
127 }
128
129 for (unsigned i = jumpReplacements.size(); i--;)
130 jumpReplacements[i].fire();
131 isStillValid = false;
132 return true;
133}
134
135CommonData::~CommonData()
136{
137 if (UNLIKELY(hasVMTrapsBreakpointsInstalled)) {
138 LockHolder locker(pcCodeBlockMapLock);
139 auto& map = pcCodeBlockMap(locker);
140 for (auto& jumpReplacement : jumpReplacements)
141 map.remove(jumpReplacement.dataLocation());
142 }
143}
144
145void CommonData::installVMTrapBreakpoints(CodeBlock* owner)
146{
147 LockHolder locker(pcCodeBlockMapLock);
148 if (!isStillValid || hasVMTrapsBreakpointsInstalled)
149 return;
150 hasVMTrapsBreakpointsInstalled = true;
151
152 auto& map = pcCodeBlockMap(locker);
153#if !defined(NDEBUG)
154 // We need to be able to handle more than one invalidation point at the same pc
155 // but we want to make sure we don't forget to remove a pc from the map.
156 HashSet<void*> newReplacements;
157#endif
158 for (auto& jumpReplacement : jumpReplacements) {
159 jumpReplacement.installVMTrapBreakpoint();
160 void* source = jumpReplacement.dataLocation();
161 auto result = map.add(source, owner);
162 UNUSED_PARAM(result);
163#if !defined(NDEBUG)
164 ASSERT(result.isNewEntry || newReplacements.contains(source));
165 newReplacements.add(source);
166#endif
167 }
168}
169
170CodeBlock* codeBlockForVMTrapPC(void* pc)
171{
172 ASSERT(isJITPC(pc));
173 LockHolder locker(pcCodeBlockMapLock);
174 auto& map = pcCodeBlockMap(locker);
175 auto result = map.find(pc);
176 if (result == map.end())
177 return nullptr;
178 return result->value;
179}
180
181bool CommonData::isVMTrapBreakpoint(void* address)
182{
183 if (!isStillValid)
184 return false;
185 for (unsigned i = jumpReplacements.size(); i--;) {
186 if (address == jumpReplacements[i].dataLocation())
187 return true;
188 }
189 return false;
190}
191
192void CommonData::validateReferences(const TrackedReferences& trackedReferences)
193{
194 if (InlineCallFrameSet* set = inlineCallFrames.get()) {
195 for (InlineCallFrame* inlineCallFrame : *set) {
196 for (ValueRecovery& recovery : inlineCallFrame->argumentsWithFixup) {
197 if (recovery.isConstant())
198 trackedReferences.check(recovery.constant());
199 }
200
201 if (CodeBlock* baselineCodeBlock = inlineCallFrame->baselineCodeBlock.get())
202 trackedReferences.check(baselineCodeBlock);
203
204 if (inlineCallFrame->calleeRecovery.isConstant())
205 trackedReferences.check(inlineCallFrame->calleeRecovery.constant());
206 }
207 }
208
209 for (AdaptiveStructureWatchpoint* watchpoint : adaptiveStructureWatchpoints)
210 watchpoint->key().validateReferences(trackedReferences);
211}
212
213void CommonData::finalizeCatchEntrypoints()
214{
215 std::sort(catchEntrypoints.begin(), catchEntrypoints.end(),
216 [] (const CatchEntrypointData& a, const CatchEntrypointData& b) { return a.bytecodeIndex < b.bytecodeIndex; });
217
218#if !ASSERT_DISABLED
219 for (unsigned i = 0; i + 1 < catchEntrypoints.size(); ++i)
220 ASSERT(catchEntrypoints[i].bytecodeIndex <= catchEntrypoints[i + 1].bytecodeIndex);
221#endif
222}
223
224void CommonData::clearWatchpoints()
225{
226 watchpoints.clear();
227 adaptiveStructureWatchpoints.clear();
228 adaptiveInferredPropertyValueWatchpoints.clear();
229}
230
231} } // namespace JSC::DFG
232
233#endif // ENABLE(DFG_JIT)
234
235