1 | /* |
2 | * Copyright (C) 2012, 2014, 2015 Apple Inc. All rights reserved. |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without |
5 | * modification, are permitted provided that the following conditions |
6 | * are met: |
7 | * |
8 | * 1. Redistributions of source code must retain the above copyright |
9 | * notice, this list of conditions and the following disclaimer. |
10 | * 2. Redistributions in binary form must reproduce the above copyright |
11 | * notice, this list of conditions and the following disclaimer in the |
12 | * documentation and/or other materials provided with the distribution. |
13 | * 3. Neither the name of Apple Inc. ("Apple") nor the names of |
14 | * its contributors may be used to endorse or promote products derived |
15 | * from this software without specific prior written permission. |
16 | * |
17 | * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY |
18 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED |
19 | * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE |
20 | * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY |
21 | * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES |
22 | * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; |
23 | * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND |
24 | * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF |
26 | * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | */ |
28 | |
29 | #include "config.h" |
30 | #include "SymbolTable.h" |
31 | |
32 | #include "CodeBlock.h" |
33 | #include "JSDestructibleObject.h" |
34 | #include "JSCInlines.h" |
35 | #include "SlotVisitorInlines.h" |
36 | #include "TypeProfiler.h" |
37 | |
38 | namespace JSC { |
39 | |
40 | const ClassInfo SymbolTable::s_info = { "SymbolTable" , nullptr, nullptr, nullptr, CREATE_METHOD_TABLE(SymbolTable) }; |
41 | |
42 | SymbolTableEntry& SymbolTableEntry::copySlow(const SymbolTableEntry& other) |
43 | { |
44 | ASSERT(other.isFat()); |
45 | FatEntry* newFatEntry = new FatEntry(*other.fatEntry()); |
46 | freeFatEntry(); |
47 | m_bits = bitwise_cast<intptr_t>(newFatEntry); |
48 | return *this; |
49 | } |
50 | |
51 | void SymbolTable::destroy(JSCell* cell) |
52 | { |
53 | SymbolTable* thisObject = static_cast<SymbolTable*>(cell); |
54 | thisObject->SymbolTable::~SymbolTable(); |
55 | } |
56 | |
57 | void SymbolTableEntry::freeFatEntrySlow() |
58 | { |
59 | ASSERT(isFat()); |
60 | delete fatEntry(); |
61 | } |
62 | |
63 | void SymbolTableEntry::prepareToWatch() |
64 | { |
65 | if (!isWatchable()) |
66 | return; |
67 | FatEntry* entry = inflate(); |
68 | if (entry->m_watchpoints) |
69 | return; |
70 | entry->m_watchpoints = adoptRef(new WatchpointSet(ClearWatchpoint)); |
71 | } |
72 | |
73 | SymbolTableEntry::FatEntry* SymbolTableEntry::inflateSlow() |
74 | { |
75 | FatEntry* entry = new FatEntry(m_bits); |
76 | m_bits = bitwise_cast<intptr_t>(entry); |
77 | return entry; |
78 | } |
79 | |
80 | SymbolTable::SymbolTable(VM& vm) |
81 | : JSCell(vm, vm.symbolTableStructure.get()) |
82 | , m_usesNonStrictEval(false) |
83 | , m_nestedLexicalScope(false) |
84 | , m_scopeType(VarScope) |
85 | { |
86 | } |
87 | |
88 | SymbolTable::~SymbolTable() { } |
89 | |
90 | void SymbolTable::finishCreation(VM& vm) |
91 | { |
92 | Base::finishCreation(vm); |
93 | } |
94 | |
95 | void SymbolTable::visitChildren(JSCell* thisCell, SlotVisitor& visitor) |
96 | { |
97 | SymbolTable* thisSymbolTable = jsCast<SymbolTable*>(thisCell); |
98 | Base::visitChildren(thisSymbolTable, visitor); |
99 | |
100 | visitor.append(thisSymbolTable->m_arguments); |
101 | |
102 | if (thisSymbolTable->m_rareData) |
103 | visitor.append(thisSymbolTable->m_rareData->m_codeBlock); |
104 | |
105 | // Save some memory. This is O(n) to rebuild and we do so on the fly. |
106 | ConcurrentJSLocker locker(thisSymbolTable->m_lock); |
107 | thisSymbolTable->m_localToEntry = nullptr; |
108 | } |
109 | |
110 | const SymbolTable::LocalToEntryVec& SymbolTable::localToEntry(const ConcurrentJSLocker&) |
111 | { |
112 | if (UNLIKELY(!m_localToEntry)) { |
113 | unsigned size = 0; |
114 | for (auto& entry : m_map) { |
115 | VarOffset offset = entry.value.varOffset(); |
116 | if (offset.isScope()) |
117 | size = std::max(size, offset.scopeOffset().offset() + 1); |
118 | } |
119 | |
120 | m_localToEntry = std::make_unique<LocalToEntryVec>(size, nullptr); |
121 | for (auto& entry : m_map) { |
122 | VarOffset offset = entry.value.varOffset(); |
123 | if (offset.isScope()) |
124 | m_localToEntry->at(offset.scopeOffset().offset()) = &entry.value; |
125 | } |
126 | } |
127 | |
128 | return *m_localToEntry; |
129 | } |
130 | |
131 | SymbolTableEntry* SymbolTable::entryFor(const ConcurrentJSLocker& locker, ScopeOffset offset) |
132 | { |
133 | auto& toEntryVector = localToEntry(locker); |
134 | if (offset.offset() >= toEntryVector.size()) |
135 | return nullptr; |
136 | return toEntryVector[offset.offset()]; |
137 | } |
138 | |
139 | SymbolTable* SymbolTable::cloneScopePart(VM& vm) |
140 | { |
141 | SymbolTable* result = SymbolTable::create(vm); |
142 | |
143 | result->m_usesNonStrictEval = m_usesNonStrictEval; |
144 | result->m_nestedLexicalScope = m_nestedLexicalScope; |
145 | result->m_scopeType = m_scopeType; |
146 | |
147 | for (auto iter = m_map.begin(), end = m_map.end(); iter != end; ++iter) { |
148 | if (!iter->value.varOffset().isScope()) |
149 | continue; |
150 | result->m_map.add( |
151 | iter->key, |
152 | SymbolTableEntry(iter->value.varOffset(), iter->value.getAttributes())); |
153 | } |
154 | |
155 | result->m_maxScopeOffset = m_maxScopeOffset; |
156 | |
157 | if (ScopedArgumentsTable* arguments = this->arguments()) |
158 | result->m_arguments.set(vm, result, arguments); |
159 | |
160 | if (m_rareData) { |
161 | result->m_rareData = std::make_unique<SymbolTableRareData>(); |
162 | |
163 | { |
164 | auto iter = m_rareData->m_uniqueIDMap.begin(); |
165 | auto end = m_rareData->m_uniqueIDMap.end(); |
166 | for (; iter != end; ++iter) |
167 | result->m_rareData->m_uniqueIDMap.set(iter->key, iter->value); |
168 | } |
169 | |
170 | { |
171 | auto iter = m_rareData->m_offsetToVariableMap.begin(); |
172 | auto end = m_rareData->m_offsetToVariableMap.end(); |
173 | for (; iter != end; ++iter) |
174 | result->m_rareData->m_offsetToVariableMap.set(iter->key, iter->value); |
175 | } |
176 | |
177 | { |
178 | auto iter = m_rareData->m_uniqueTypeSetMap.begin(); |
179 | auto end = m_rareData->m_uniqueTypeSetMap.end(); |
180 | for (; iter != end; ++iter) |
181 | result->m_rareData->m_uniqueTypeSetMap.set(iter->key, iter->value); |
182 | } |
183 | } |
184 | |
185 | return result; |
186 | } |
187 | |
188 | void SymbolTable::prepareForTypeProfiling(const ConcurrentJSLocker&) |
189 | { |
190 | if (m_rareData) |
191 | return; |
192 | |
193 | m_rareData = std::make_unique<SymbolTableRareData>(); |
194 | |
195 | for (auto iter = m_map.begin(), end = m_map.end(); iter != end; ++iter) { |
196 | m_rareData->m_uniqueIDMap.set(iter->key, TypeProfilerNeedsUniqueIDGeneration); |
197 | m_rareData->m_offsetToVariableMap.set(iter->value.varOffset(), iter->key); |
198 | } |
199 | } |
200 | |
201 | CodeBlock* SymbolTable::rareDataCodeBlock() |
202 | { |
203 | if (!m_rareData) |
204 | return nullptr; |
205 | |
206 | return m_rareData->m_codeBlock.get(); |
207 | } |
208 | |
209 | void SymbolTable::setRareDataCodeBlock(CodeBlock* codeBlock) |
210 | { |
211 | if (!m_rareData) |
212 | m_rareData = std::make_unique<SymbolTableRareData>(); |
213 | |
214 | ASSERT(!m_rareData->m_codeBlock); |
215 | m_rareData->m_codeBlock.set(*codeBlock->vm(), this, codeBlock); |
216 | } |
217 | |
218 | GlobalVariableID SymbolTable::uniqueIDForVariable(const ConcurrentJSLocker&, UniquedStringImpl* key, VM& vm) |
219 | { |
220 | RELEASE_ASSERT(m_rareData); |
221 | |
222 | auto iter = m_rareData->m_uniqueIDMap.find(key); |
223 | auto end = m_rareData->m_uniqueIDMap.end(); |
224 | if (iter == end) |
225 | return TypeProfilerNoGlobalIDExists; |
226 | |
227 | GlobalVariableID id = iter->value; |
228 | if (id == TypeProfilerNeedsUniqueIDGeneration) { |
229 | id = vm.typeProfiler()->getNextUniqueVariableID(); |
230 | m_rareData->m_uniqueIDMap.set(key, id); |
231 | m_rareData->m_uniqueTypeSetMap.set(key, TypeSet::create()); // Make a new global typeset for this corresponding ID. |
232 | } |
233 | |
234 | return id; |
235 | } |
236 | |
237 | GlobalVariableID SymbolTable::uniqueIDForOffset(const ConcurrentJSLocker& locker, VarOffset offset, VM& vm) |
238 | { |
239 | RELEASE_ASSERT(m_rareData); |
240 | |
241 | auto iter = m_rareData->m_offsetToVariableMap.find(offset); |
242 | auto end = m_rareData->m_offsetToVariableMap.end(); |
243 | if (iter == end) |
244 | return TypeProfilerNoGlobalIDExists; |
245 | |
246 | return uniqueIDForVariable(locker, iter->value.get(), vm); |
247 | } |
248 | |
249 | RefPtr<TypeSet> SymbolTable::globalTypeSetForOffset(const ConcurrentJSLocker& locker, VarOffset offset, VM& vm) |
250 | { |
251 | RELEASE_ASSERT(m_rareData); |
252 | |
253 | uniqueIDForOffset(locker, offset, vm); // Lazily create the TypeSet if necessary. |
254 | |
255 | auto iter = m_rareData->m_offsetToVariableMap.find(offset); |
256 | auto end = m_rareData->m_offsetToVariableMap.end(); |
257 | if (iter == end) |
258 | return nullptr; |
259 | |
260 | return globalTypeSetForVariable(locker, iter->value.get(), vm); |
261 | } |
262 | |
263 | RefPtr<TypeSet> SymbolTable::globalTypeSetForVariable(const ConcurrentJSLocker& locker, UniquedStringImpl* key, VM& vm) |
264 | { |
265 | RELEASE_ASSERT(m_rareData); |
266 | |
267 | uniqueIDForVariable(locker, key, vm); // Lazily create the TypeSet if necessary. |
268 | |
269 | auto iter = m_rareData->m_uniqueTypeSetMap.find(key); |
270 | auto end = m_rareData->m_uniqueTypeSetMap.end(); |
271 | if (iter == end) |
272 | return nullptr; |
273 | |
274 | return iter->value; |
275 | } |
276 | |
277 | } // namespace JSC |
278 | |
279 | |