1 | /* |
2 | * Copyright (C) 2012-2019 Apple Inc. All rights reserved. |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without |
5 | * modification, are permitted provided that the following conditions |
6 | * are met: |
7 | * |
8 | * 1. Redistributions of source code must retain the above copyright |
9 | * notice, this list of conditions and the following disclaimer. |
10 | * 2. Redistributions in binary form must reproduce the above copyright |
11 | * notice, this list of conditions and the following disclaimer in the |
12 | * documentation and/or other materials provided with the distribution. |
13 | * 3. Neither the name of Apple Inc. ("Apple") nor the names of |
14 | * its contributors may be used to endorse or promote products derived |
15 | * from this software without specific prior written permission. |
16 | * |
17 | * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY |
18 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED |
19 | * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE |
20 | * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY |
21 | * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES |
22 | * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; |
23 | * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND |
24 | * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
26 | * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | */ |
28 | |
29 | #include "config.h" |
30 | #include "SymbolTable.h" |
31 | |
32 | #include "CodeBlock.h" |
33 | #include "JSDestructibleObject.h" |
34 | #include "JSCInlines.h" |
35 | #include "SlotVisitorInlines.h" |
36 | #include "TypeProfiler.h" |
37 | |
38 | namespace JSC { |
39 | |
40 | const ClassInfo SymbolTable::s_info = { "SymbolTable" , nullptr, nullptr, nullptr, CREATE_METHOD_TABLE(SymbolTable) }; |
41 | |
42 | SymbolTableEntry& SymbolTableEntry::copySlow(const SymbolTableEntry& other) |
43 | { |
44 | ASSERT(other.isFat()); |
45 | FatEntry* newFatEntry = new FatEntry(*other.fatEntry()); |
46 | freeFatEntry(); |
47 | m_bits = bitwise_cast<intptr_t>(newFatEntry); |
48 | return *this; |
49 | } |
50 | |
51 | void SymbolTable::destroy(JSCell* cell) |
52 | { |
53 | SymbolTable* thisObject = static_cast<SymbolTable*>(cell); |
54 | thisObject->SymbolTable::~SymbolTable(); |
55 | } |
56 | |
57 | void SymbolTableEntry::freeFatEntrySlow() |
58 | { |
59 | ASSERT(isFat()); |
60 | delete fatEntry(); |
61 | } |
62 | |
63 | void SymbolTableEntry::prepareToWatch() |
64 | { |
65 | if (!isWatchable()) |
66 | return; |
67 | FatEntry* entry = inflate(); |
68 | if (entry->m_watchpoints) |
69 | return; |
70 | entry->m_watchpoints = adoptRef(new WatchpointSet(ClearWatchpoint)); |
71 | } |
72 | |
73 | SymbolTableEntry::FatEntry* SymbolTableEntry::inflateSlow() |
74 | { |
75 | FatEntry* entry = new FatEntry(m_bits); |
76 | m_bits = bitwise_cast<intptr_t>(entry); |
77 | return entry; |
78 | } |
79 | |
80 | SymbolTable::SymbolTable(VM& vm) |
81 | : JSCell(vm, vm.symbolTableStructure.get()) |
82 | , m_usesNonStrictEval(false) |
83 | , m_nestedLexicalScope(false) |
84 | , m_scopeType(VarScope) |
85 | { |
86 | } |
87 | |
88 | SymbolTable::~SymbolTable() { } |
89 | |
90 | void SymbolTable::finishCreation(VM& vm) |
91 | { |
92 | Base::finishCreation(vm); |
93 | } |
94 | |
95 | void SymbolTable::visitChildren(JSCell* thisCell, SlotVisitor& visitor) |
96 | { |
97 | SymbolTable* thisSymbolTable = jsCast<SymbolTable*>(thisCell); |
98 | ASSERT_GC_OBJECT_INHERITS(thisSymbolTable, info()); |
99 | Base::visitChildren(thisSymbolTable, visitor); |
100 | |
101 | visitor.append(thisSymbolTable->m_arguments); |
102 | |
103 | if (thisSymbolTable->m_rareData) |
104 | visitor.append(thisSymbolTable->m_rareData->m_codeBlock); |
105 | |
106 | // Save some memory. This is O(n) to rebuild and we do so on the fly. |
107 | ConcurrentJSLocker locker(thisSymbolTable->m_lock); |
108 | thisSymbolTable->m_localToEntry = nullptr; |
109 | } |
110 | |
111 | const SymbolTable::LocalToEntryVec& SymbolTable::localToEntry(const ConcurrentJSLocker&) |
112 | { |
113 | if (UNLIKELY(!m_localToEntry)) { |
114 | unsigned size = 0; |
115 | for (auto& entry : m_map) { |
116 | VarOffset offset = entry.value.varOffset(); |
117 | if (offset.isScope()) |
118 | size = std::max(size, offset.scopeOffset().offset() + 1); |
119 | } |
120 | |
121 | m_localToEntry = makeUnique<LocalToEntryVec>(size, nullptr); |
122 | for (auto& entry : m_map) { |
123 | VarOffset offset = entry.value.varOffset(); |
124 | if (offset.isScope()) |
125 | m_localToEntry->at(offset.scopeOffset().offset()) = &entry.value; |
126 | } |
127 | } |
128 | |
129 | return *m_localToEntry; |
130 | } |
131 | |
132 | SymbolTableEntry* SymbolTable::entryFor(const ConcurrentJSLocker& locker, ScopeOffset offset) |
133 | { |
134 | auto& toEntryVector = localToEntry(locker); |
135 | if (offset.offset() >= toEntryVector.size()) |
136 | return nullptr; |
137 | return toEntryVector[offset.offset()]; |
138 | } |
139 | |
140 | SymbolTable* SymbolTable::cloneScopePart(VM& vm) |
141 | { |
142 | SymbolTable* result = SymbolTable::create(vm); |
143 | |
144 | result->m_usesNonStrictEval = m_usesNonStrictEval; |
145 | result->m_nestedLexicalScope = m_nestedLexicalScope; |
146 | result->m_scopeType = m_scopeType; |
147 | |
148 | for (auto iter = m_map.begin(), end = m_map.end(); iter != end; ++iter) { |
149 | if (!iter->value.varOffset().isScope()) |
150 | continue; |
151 | result->m_map.add( |
152 | iter->key, |
153 | SymbolTableEntry(iter->value.varOffset(), iter->value.getAttributes())); |
154 | } |
155 | |
156 | result->m_maxScopeOffset = m_maxScopeOffset; |
157 | |
158 | if (ScopedArgumentsTable* arguments = this->arguments()) |
159 | result->m_arguments.set(vm, result, arguments); |
160 | |
161 | if (m_rareData) { |
162 | result->m_rareData = makeUnique<SymbolTableRareData>(); |
163 | |
164 | { |
165 | auto iter = m_rareData->m_uniqueIDMap.begin(); |
166 | auto end = m_rareData->m_uniqueIDMap.end(); |
167 | for (; iter != end; ++iter) |
168 | result->m_rareData->m_uniqueIDMap.set(iter->key, iter->value); |
169 | } |
170 | |
171 | { |
172 | auto iter = m_rareData->m_offsetToVariableMap.begin(); |
173 | auto end = m_rareData->m_offsetToVariableMap.end(); |
174 | for (; iter != end; ++iter) |
175 | result->m_rareData->m_offsetToVariableMap.set(iter->key, iter->value); |
176 | } |
177 | |
178 | { |
179 | auto iter = m_rareData->m_uniqueTypeSetMap.begin(); |
180 | auto end = m_rareData->m_uniqueTypeSetMap.end(); |
181 | for (; iter != end; ++iter) |
182 | result->m_rareData->m_uniqueTypeSetMap.set(iter->key, iter->value); |
183 | } |
184 | } |
185 | |
186 | return result; |
187 | } |
188 | |
189 | void SymbolTable::prepareForTypeProfiling(const ConcurrentJSLocker&) |
190 | { |
191 | if (m_rareData) |
192 | return; |
193 | |
194 | m_rareData = makeUnique<SymbolTableRareData>(); |
195 | |
196 | for (auto iter = m_map.begin(), end = m_map.end(); iter != end; ++iter) { |
197 | m_rareData->m_uniqueIDMap.set(iter->key, TypeProfilerNeedsUniqueIDGeneration); |
198 | m_rareData->m_offsetToVariableMap.set(iter->value.varOffset(), iter->key); |
199 | } |
200 | } |
201 | |
202 | CodeBlock* SymbolTable::rareDataCodeBlock() |
203 | { |
204 | if (!m_rareData) |
205 | return nullptr; |
206 | |
207 | return m_rareData->m_codeBlock.get(); |
208 | } |
209 | |
210 | void SymbolTable::setRareDataCodeBlock(CodeBlock* codeBlock) |
211 | { |
212 | if (!m_rareData) |
213 | m_rareData = makeUnique<SymbolTableRareData>(); |
214 | |
215 | ASSERT(!m_rareData->m_codeBlock); |
216 | m_rareData->m_codeBlock.set(codeBlock->vm(), this, codeBlock); |
217 | } |
218 | |
219 | GlobalVariableID SymbolTable::uniqueIDForVariable(const ConcurrentJSLocker&, UniquedStringImpl* key, VM& vm) |
220 | { |
221 | RELEASE_ASSERT(m_rareData); |
222 | |
223 | auto iter = m_rareData->m_uniqueIDMap.find(key); |
224 | auto end = m_rareData->m_uniqueIDMap.end(); |
225 | if (iter == end) |
226 | return TypeProfilerNoGlobalIDExists; |
227 | |
228 | GlobalVariableID id = iter->value; |
229 | if (id == TypeProfilerNeedsUniqueIDGeneration) { |
230 | id = vm.typeProfiler()->getNextUniqueVariableID(); |
231 | m_rareData->m_uniqueIDMap.set(key, id); |
232 | m_rareData->m_uniqueTypeSetMap.set(key, TypeSet::create()); // Make a new global typeset for this corresponding ID. |
233 | } |
234 | |
235 | return id; |
236 | } |
237 | |
238 | GlobalVariableID SymbolTable::uniqueIDForOffset(const ConcurrentJSLocker& locker, VarOffset offset, VM& vm) |
239 | { |
240 | RELEASE_ASSERT(m_rareData); |
241 | |
242 | auto iter = m_rareData->m_offsetToVariableMap.find(offset); |
243 | auto end = m_rareData->m_offsetToVariableMap.end(); |
244 | if (iter == end) |
245 | return TypeProfilerNoGlobalIDExists; |
246 | |
247 | return uniqueIDForVariable(locker, iter->value.get(), vm); |
248 | } |
249 | |
250 | RefPtr<TypeSet> SymbolTable::globalTypeSetForOffset(const ConcurrentJSLocker& locker, VarOffset offset, VM& vm) |
251 | { |
252 | RELEASE_ASSERT(m_rareData); |
253 | |
254 | uniqueIDForOffset(locker, offset, vm); // Lazily create the TypeSet if necessary. |
255 | |
256 | auto iter = m_rareData->m_offsetToVariableMap.find(offset); |
257 | auto end = m_rareData->m_offsetToVariableMap.end(); |
258 | if (iter == end) |
259 | return nullptr; |
260 | |
261 | return globalTypeSetForVariable(locker, iter->value.get(), vm); |
262 | } |
263 | |
264 | RefPtr<TypeSet> SymbolTable::globalTypeSetForVariable(const ConcurrentJSLocker& locker, UniquedStringImpl* key, VM& vm) |
265 | { |
266 | RELEASE_ASSERT(m_rareData); |
267 | |
268 | uniqueIDForVariable(locker, key, vm); // Lazily create the TypeSet if necessary. |
269 | |
270 | auto iter = m_rareData->m_uniqueTypeSetMap.find(key); |
271 | auto end = m_rareData->m_uniqueTypeSetMap.end(); |
272 | if (iter == end) |
273 | return nullptr; |
274 | |
275 | return iter->value; |
276 | } |
277 | |
278 | } // namespace JSC |
279 | |
280 | |