1 | /* |
2 | * Copyright (C) 2012-2019 Apple Inc. All Rights Reserved. |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without |
5 | * modification, are permitted provided that the following conditions |
6 | * are met: |
7 | * 1. Redistributions of source code must retain the above copyright |
8 | * notice, this list of conditions and the following disclaimer. |
9 | * 2. Redistributions in binary form must reproduce the above copyright |
10 | * notice, this list of conditions and the following disclaimer in the |
11 | * documentation and/or other materials provided with the distribution. |
12 | * |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
24 | */ |
25 | |
26 | #include "config.h" |
27 | |
28 | #include "UnlinkedCodeBlock.h" |
29 | |
30 | #include "BytecodeGenerator.h" |
31 | #include "BytecodeLivenessAnalysis.h" |
32 | #include "BytecodeRewriter.h" |
33 | #include "ClassInfo.h" |
34 | #include "CodeCache.h" |
35 | #include "ExecutableInfo.h" |
36 | #include "FunctionOverrides.h" |
37 | #include "InstructionStream.h" |
38 | #include "JSCInlines.h" |
39 | #include "JSString.h" |
40 | #include "Opcode.h" |
41 | #include "Parser.h" |
42 | #include "PreciseJumpTargetsInlines.h" |
43 | #include "SourceProvider.h" |
44 | #include "Structure.h" |
45 | #include "SymbolTable.h" |
46 | #include "UnlinkedEvalCodeBlock.h" |
47 | #include "UnlinkedFunctionCodeBlock.h" |
48 | #include "UnlinkedMetadataTableInlines.h" |
49 | #include "UnlinkedModuleProgramCodeBlock.h" |
50 | #include "UnlinkedProgramCodeBlock.h" |
51 | #include <wtf/DataLog.h> |
52 | |
53 | namespace JSC { |
54 | |
55 | const ClassInfo UnlinkedCodeBlock::s_info = { "UnlinkedCodeBlock" , nullptr, nullptr, nullptr, CREATE_METHOD_TABLE(UnlinkedCodeBlock) }; |
56 | |
57 | UnlinkedCodeBlock::UnlinkedCodeBlock(VM* vm, Structure* structure, CodeType codeType, const ExecutableInfo& info, OptionSet<CodeGenerationMode> codeGenerationMode) |
58 | : Base(*vm, structure) |
59 | , m_usesEval(info.usesEval()) |
60 | , m_isStrictMode(info.isStrictMode()) |
61 | , m_isConstructor(info.isConstructor()) |
62 | , m_hasCapturedVariables(false) |
63 | , m_isBuiltinFunction(info.isBuiltinFunction()) |
64 | , m_superBinding(static_cast<unsigned>(info.superBinding())) |
65 | , m_scriptMode(static_cast<unsigned>(info.scriptMode())) |
66 | , m_isArrowFunctionContext(info.isArrowFunctionContext()) |
67 | , m_isClassContext(info.isClassContext()) |
68 | , m_hasTailCalls(false) |
69 | , m_constructorKind(static_cast<unsigned>(info.constructorKind())) |
70 | , m_derivedContextType(static_cast<unsigned>(info.derivedContextType())) |
71 | , m_evalContextType(static_cast<unsigned>(info.evalContextType())) |
72 | , m_codeType(static_cast<unsigned>(codeType)) |
73 | , m_didOptimize(static_cast<unsigned>(MixedTriState)) |
74 | , m_age(0) |
75 | , m_parseMode(info.parseMode()) |
76 | , m_codeGenerationMode(codeGenerationMode) |
77 | , m_metadata(UnlinkedMetadataTable::create()) |
78 | { |
79 | for (auto& constantRegisterIndex : m_linkTimeConstants) |
80 | constantRegisterIndex = 0; |
81 | ASSERT(m_constructorKind == static_cast<unsigned>(info.constructorKind())); |
82 | ASSERT(m_codeType == static_cast<unsigned>(codeType)); |
83 | ASSERT(m_didOptimize == static_cast<unsigned>(MixedTriState)); |
84 | } |
85 | |
86 | void UnlinkedCodeBlock::visitChildren(JSCell* cell, SlotVisitor& visitor) |
87 | { |
88 | UnlinkedCodeBlock* thisObject = jsCast<UnlinkedCodeBlock*>(cell); |
89 | ASSERT_GC_OBJECT_INHERITS(thisObject, info()); |
90 | Base::visitChildren(thisObject, visitor); |
91 | auto locker = holdLock(thisObject->cellLock()); |
92 | thisObject->m_age = std::min<unsigned>(static_cast<unsigned>(thisObject->m_age) + 1, maxAge); |
93 | for (FunctionExpressionVector::iterator ptr = thisObject->m_functionDecls.begin(), end = thisObject->m_functionDecls.end(); ptr != end; ++ptr) |
94 | visitor.append(*ptr); |
95 | for (FunctionExpressionVector::iterator ptr = thisObject->m_functionExprs.begin(), end = thisObject->m_functionExprs.end(); ptr != end; ++ptr) |
96 | visitor.append(*ptr); |
97 | visitor.appendValues(thisObject->m_constantRegisters.data(), thisObject->m_constantRegisters.size()); |
98 | size_t = thisObject->m_metadata->sizeInBytes(); |
99 | if (thisObject->m_instructions) |
100 | extraMemory += thisObject->m_instructions->sizeInBytes(); |
101 | visitor.reportExtraMemoryVisited(extraMemory); |
102 | } |
103 | |
104 | size_t UnlinkedCodeBlock::estimatedSize(JSCell* cell, VM& vm) |
105 | { |
106 | UnlinkedCodeBlock* thisObject = jsCast<UnlinkedCodeBlock*>(cell); |
107 | size_t = thisObject->m_metadata->sizeInBytes(); |
108 | if (thisObject->m_instructions) |
109 | extraSize += thisObject->m_instructions->sizeInBytes(); |
110 | return Base::estimatedSize(cell, vm) + extraSize; |
111 | } |
112 | |
113 | int UnlinkedCodeBlock::lineNumberForBytecodeOffset(unsigned bytecodeOffset) |
114 | { |
115 | ASSERT(bytecodeOffset < instructions().size()); |
116 | int divot { 0 }; |
117 | int startOffset { 0 }; |
118 | int endOffset { 0 }; |
119 | unsigned line { 0 }; |
120 | unsigned column { 0 }; |
121 | expressionRangeForBytecodeOffset(bytecodeOffset, divot, startOffset, endOffset, line, column); |
122 | return line; |
123 | } |
124 | |
125 | inline void UnlinkedCodeBlock::getLineAndColumn(const ExpressionRangeInfo& info, |
126 | unsigned& line, unsigned& column) const |
127 | { |
128 | switch (info.mode) { |
129 | case ExpressionRangeInfo::FatLineMode: |
130 | info.decodeFatLineMode(line, column); |
131 | break; |
132 | case ExpressionRangeInfo::FatColumnMode: |
133 | info.decodeFatColumnMode(line, column); |
134 | break; |
135 | case ExpressionRangeInfo::FatLineAndColumnMode: { |
136 | unsigned fatIndex = info.position; |
137 | ExpressionRangeInfo::FatPosition& fatPos = m_rareData->m_expressionInfoFatPositions[fatIndex]; |
138 | line = fatPos.line; |
139 | column = fatPos.column; |
140 | break; |
141 | } |
142 | } // switch |
143 | } |
144 | |
145 | #ifndef NDEBUG |
146 | static void dumpLineColumnEntry(size_t index, const InstructionStream& instructionStream, unsigned instructionOffset, unsigned line, unsigned column) |
147 | { |
148 | const auto instruction = instructionStream.at(instructionOffset); |
149 | const char* event = "" ; |
150 | if (instruction->is<OpDebug>()) { |
151 | switch (instruction->as<OpDebug>().m_debugHookType) { |
152 | case WillExecuteProgram: event = " WillExecuteProgram" ; break; |
153 | case DidExecuteProgram: event = " DidExecuteProgram" ; break; |
154 | case DidEnterCallFrame: event = " DidEnterCallFrame" ; break; |
155 | case DidReachBreakpoint: event = " DidReachBreakpoint" ; break; |
156 | case WillLeaveCallFrame: event = " WillLeaveCallFrame" ; break; |
157 | case WillExecuteStatement: event = " WillExecuteStatement" ; break; |
158 | case WillExecuteExpression: event = " WillExecuteExpression" ; break; |
159 | } |
160 | } |
161 | dataLogF(" [%zu] pc %u @ line %u col %u : %s%s\n" , index, instructionOffset, line, column, instruction->name(), event); |
162 | } |
163 | |
164 | void UnlinkedCodeBlock::dumpExpressionRangeInfo() |
165 | { |
166 | Vector<ExpressionRangeInfo>& expressionInfo = m_expressionInfo; |
167 | |
168 | size_t size = m_expressionInfo.size(); |
169 | dataLogF("UnlinkedCodeBlock %p expressionRangeInfo[%zu] {\n" , this, size); |
170 | for (size_t i = 0; i < size; i++) { |
171 | ExpressionRangeInfo& info = expressionInfo[i]; |
172 | unsigned line; |
173 | unsigned column; |
174 | getLineAndColumn(info, line, column); |
175 | dumpLineColumnEntry(i, instructions(), info.instructionOffset, line, column); |
176 | } |
177 | dataLog("}\n" ); |
178 | } |
179 | #endif |
180 | |
181 | void UnlinkedCodeBlock::expressionRangeForBytecodeOffset(unsigned bytecodeOffset, |
182 | int& divot, int& startOffset, int& endOffset, unsigned& line, unsigned& column) const |
183 | { |
184 | ASSERT(bytecodeOffset < instructions().size()); |
185 | |
186 | if (!m_expressionInfo.size()) { |
187 | startOffset = 0; |
188 | endOffset = 0; |
189 | divot = 0; |
190 | line = 0; |
191 | column = 0; |
192 | return; |
193 | } |
194 | |
195 | const Vector<ExpressionRangeInfo>& expressionInfo = m_expressionInfo; |
196 | |
197 | int low = 0; |
198 | int high = expressionInfo.size(); |
199 | while (low < high) { |
200 | int mid = low + (high - low) / 2; |
201 | if (expressionInfo[mid].instructionOffset <= bytecodeOffset) |
202 | low = mid + 1; |
203 | else |
204 | high = mid; |
205 | } |
206 | |
207 | if (!low) |
208 | low = 1; |
209 | |
210 | const ExpressionRangeInfo& info = expressionInfo[low - 1]; |
211 | startOffset = info.startOffset; |
212 | endOffset = info.endOffset; |
213 | divot = info.divotPoint; |
214 | getLineAndColumn(info, line, column); |
215 | } |
216 | |
217 | void UnlinkedCodeBlock::addExpressionInfo(unsigned instructionOffset, |
218 | int divot, int startOffset, int endOffset, unsigned line, unsigned column) |
219 | { |
220 | if (divot > ExpressionRangeInfo::MaxDivot) { |
221 | // Overflow has occurred, we can only give line number info for errors for this region |
222 | divot = 0; |
223 | startOffset = 0; |
224 | endOffset = 0; |
225 | } else if (startOffset > ExpressionRangeInfo::MaxOffset) { |
226 | // If the start offset is out of bounds we clear both offsets |
227 | // so we only get the divot marker. Error message will have to be reduced |
228 | // to line and charPosition number. |
229 | startOffset = 0; |
230 | endOffset = 0; |
231 | } else if (endOffset > ExpressionRangeInfo::MaxOffset) { |
232 | // The end offset is only used for additional context, and is much more likely |
233 | // to overflow (eg. function call arguments) so we are willing to drop it without |
234 | // dropping the rest of the range. |
235 | endOffset = 0; |
236 | } |
237 | |
238 | unsigned positionMode = |
239 | (line <= ExpressionRangeInfo::MaxFatLineModeLine && column <= ExpressionRangeInfo::MaxFatLineModeColumn) |
240 | ? ExpressionRangeInfo::FatLineMode |
241 | : (line <= ExpressionRangeInfo::MaxFatColumnModeLine && column <= ExpressionRangeInfo::MaxFatColumnModeColumn) |
242 | ? ExpressionRangeInfo::FatColumnMode |
243 | : ExpressionRangeInfo::FatLineAndColumnMode; |
244 | |
245 | ExpressionRangeInfo info; |
246 | info.instructionOffset = instructionOffset; |
247 | info.divotPoint = divot; |
248 | info.startOffset = startOffset; |
249 | info.endOffset = endOffset; |
250 | |
251 | info.mode = positionMode; |
252 | switch (positionMode) { |
253 | case ExpressionRangeInfo::FatLineMode: |
254 | info.encodeFatLineMode(line, column); |
255 | break; |
256 | case ExpressionRangeInfo::FatColumnMode: |
257 | info.encodeFatColumnMode(line, column); |
258 | break; |
259 | case ExpressionRangeInfo::FatLineAndColumnMode: { |
260 | createRareDataIfNecessary(); |
261 | unsigned fatIndex = m_rareData->m_expressionInfoFatPositions.size(); |
262 | ExpressionRangeInfo::FatPosition fatPos = { line, column }; |
263 | m_rareData->m_expressionInfoFatPositions.append(fatPos); |
264 | info.position = fatIndex; |
265 | } |
266 | } // switch |
267 | |
268 | m_expressionInfo.append(info); |
269 | } |
270 | |
271 | bool UnlinkedCodeBlock::typeProfilerExpressionInfoForBytecodeOffset(unsigned bytecodeOffset, unsigned& startDivot, unsigned& endDivot) |
272 | { |
273 | static const bool verbose = false; |
274 | if (!m_rareData) { |
275 | if (verbose) |
276 | dataLogF("Don't have assignment info for offset:%u\n" , bytecodeOffset); |
277 | startDivot = UINT_MAX; |
278 | endDivot = UINT_MAX; |
279 | return false; |
280 | } |
281 | |
282 | auto iter = m_rareData->m_typeProfilerInfoMap.find(bytecodeOffset); |
283 | if (iter == m_rareData->m_typeProfilerInfoMap.end()) { |
284 | if (verbose) |
285 | dataLogF("Don't have assignment info for offset:%u\n" , bytecodeOffset); |
286 | startDivot = UINT_MAX; |
287 | endDivot = UINT_MAX; |
288 | return false; |
289 | } |
290 | |
291 | RareData::TypeProfilerExpressionRange& range = iter->value; |
292 | startDivot = range.m_startDivot; |
293 | endDivot = range.m_endDivot; |
294 | return true; |
295 | } |
296 | |
297 | void UnlinkedCodeBlock::addTypeProfilerExpressionInfo(unsigned instructionOffset, unsigned startDivot, unsigned endDivot) |
298 | { |
299 | createRareDataIfNecessary(); |
300 | RareData::TypeProfilerExpressionRange range; |
301 | range.m_startDivot = startDivot; |
302 | range.m_endDivot = endDivot; |
303 | m_rareData->m_typeProfilerInfoMap.set(instructionOffset, range); |
304 | } |
305 | |
306 | UnlinkedCodeBlock::~UnlinkedCodeBlock() |
307 | { |
308 | } |
309 | |
310 | void UnlinkedCodeBlock::setInstructions(std::unique_ptr<InstructionStream> instructions) |
311 | { |
312 | ASSERT(instructions); |
313 | { |
314 | auto locker = holdLock(cellLock()); |
315 | m_instructions = WTFMove(instructions); |
316 | m_metadata->finalize(); |
317 | } |
318 | Heap::heap(this)->reportExtraMemoryAllocated(m_instructions->sizeInBytes() + m_metadata->sizeInBytes()); |
319 | } |
320 | |
321 | const InstructionStream& UnlinkedCodeBlock::instructions() const |
322 | { |
323 | ASSERT(m_instructions.get()); |
324 | return *m_instructions; |
325 | } |
326 | |
327 | UnlinkedHandlerInfo* UnlinkedCodeBlock::handlerForBytecodeOffset(unsigned bytecodeOffset, RequiredHandler requiredHandler) |
328 | { |
329 | return handlerForIndex(bytecodeOffset, requiredHandler); |
330 | } |
331 | |
332 | UnlinkedHandlerInfo* UnlinkedCodeBlock::handlerForIndex(unsigned index, RequiredHandler requiredHandler) |
333 | { |
334 | if (!m_rareData) |
335 | return nullptr; |
336 | return UnlinkedHandlerInfo::handlerForIndex(m_rareData->m_exceptionHandlers, index, requiredHandler); |
337 | } |
338 | |
339 | void UnlinkedCodeBlock::applyModification(BytecodeRewriter& rewriter, InstructionStreamWriter& instructions) |
340 | { |
341 | // Before applying the changes, we adjust the jumps based on the original bytecode offset, the offset to the jump target, and |
342 | // the insertion information. |
343 | |
344 | rewriter.adjustJumpTargets(); |
345 | |
346 | // Then, exception handlers should be adjusted. |
347 | if (m_rareData) { |
348 | for (UnlinkedHandlerInfo& handler : m_rareData->m_exceptionHandlers) { |
349 | handler.target = rewriter.adjustAbsoluteOffset(handler.target); |
350 | handler.start = rewriter.adjustAbsoluteOffset(handler.start); |
351 | handler.end = rewriter.adjustAbsoluteOffset(handler.end); |
352 | } |
353 | |
354 | for (size_t i = 0; i < m_rareData->m_opProfileControlFlowBytecodeOffsets.size(); ++i) |
355 | m_rareData->m_opProfileControlFlowBytecodeOffsets[i] = rewriter.adjustAbsoluteOffset(m_rareData->m_opProfileControlFlowBytecodeOffsets[i]); |
356 | |
357 | if (!m_rareData->m_typeProfilerInfoMap.isEmpty()) { |
358 | HashMap<unsigned, RareData::TypeProfilerExpressionRange> adjustedTypeProfilerInfoMap; |
359 | for (auto& entry : m_rareData->m_typeProfilerInfoMap) |
360 | adjustedTypeProfilerInfoMap.set(rewriter.adjustAbsoluteOffset(entry.key), entry.value); |
361 | m_rareData->m_typeProfilerInfoMap.swap(adjustedTypeProfilerInfoMap); |
362 | } |
363 | } |
364 | |
365 | for (size_t i = 0; i < m_propertyAccessInstructions.size(); ++i) |
366 | m_propertyAccessInstructions[i] = rewriter.adjustAbsoluteOffset(m_propertyAccessInstructions[i]); |
367 | |
368 | for (size_t i = 0; i < m_expressionInfo.size(); ++i) |
369 | m_expressionInfo[i].instructionOffset = rewriter.adjustAbsoluteOffset(m_expressionInfo[i].instructionOffset); |
370 | |
371 | // Then, modify the unlinked instructions. |
372 | rewriter.applyModification(); |
373 | |
374 | // And recompute the jump target based on the modified unlinked instructions. |
375 | m_jumpTargets.clear(); |
376 | recomputePreciseJumpTargets(this, instructions, m_jumpTargets); |
377 | } |
378 | |
379 | void UnlinkedCodeBlock::shrinkToFit() |
380 | { |
381 | auto locker = holdLock(cellLock()); |
382 | |
383 | m_jumpTargets.shrinkToFit(); |
384 | m_propertyAccessInstructions.shrinkToFit(); |
385 | m_identifiers.shrinkToFit(); |
386 | m_constantRegisters.shrinkToFit(); |
387 | m_constantsSourceCodeRepresentation.shrinkToFit(); |
388 | m_functionDecls.shrinkToFit(); |
389 | m_functionExprs.shrinkToFit(); |
390 | m_expressionInfo.shrinkToFit(); |
391 | |
392 | if (m_rareData) { |
393 | m_rareData->m_exceptionHandlers.shrinkToFit(); |
394 | m_rareData->m_switchJumpTables.shrinkToFit(); |
395 | m_rareData->m_stringSwitchJumpTables.shrinkToFit(); |
396 | m_rareData->m_expressionInfoFatPositions.shrinkToFit(); |
397 | m_rareData->m_opProfileControlFlowBytecodeOffsets.shrinkToFit(); |
398 | m_rareData->m_bitVectors.shrinkToFit(); |
399 | m_rareData->m_constantIdentifierSets.shrinkToFit(); |
400 | } |
401 | } |
402 | |
403 | void UnlinkedCodeBlock::dump(PrintStream&) const |
404 | { |
405 | } |
406 | |
407 | BytecodeLivenessAnalysis& UnlinkedCodeBlock::livenessAnalysisSlow(CodeBlock* codeBlock) |
408 | { |
409 | RELEASE_ASSERT(codeBlock->unlinkedCodeBlock() == this); |
410 | |
411 | { |
412 | ConcurrentJSLocker locker(m_lock); |
413 | if (!m_liveness) { |
414 | // There is a chance two compiler threads raced to the slow path. |
415 | // Grabbing the lock above defends against computing liveness twice. |
416 | m_liveness = std::make_unique<BytecodeLivenessAnalysis>(codeBlock); |
417 | } |
418 | } |
419 | |
420 | return *m_liveness; |
421 | } |
422 | |
423 | void UnlinkedCodeBlock::addOutOfLineJumpTarget(InstructionStream::Offset bytecodeOffset, int target) |
424 | { |
425 | RELEASE_ASSERT(target); |
426 | m_outOfLineJumpTargets.set(bytecodeOffset, target); |
427 | } |
428 | |
429 | int UnlinkedCodeBlock::outOfLineJumpOffset(InstructionStream::Offset bytecodeOffset) |
430 | { |
431 | ASSERT(m_outOfLineJumpTargets.contains(bytecodeOffset)); |
432 | return m_outOfLineJumpTargets.get(bytecodeOffset); |
433 | } |
434 | |
435 | } // namespace JSC |
436 | |