1/*
2 * Copyright (C) 2008-2019 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. Neither the name of Apple Inc. ("Apple") nor the names of
14 * its contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 */
28
29#include "config.h"
30#include "VM.h"
31
32#include "ArgList.h"
33#include "ArrayBufferNeuteringWatchpointSet.h"
34#include "BuiltinExecutables.h"
35#include "BytecodeIntrinsicRegistry.h"
36#include "CodeBlock.h"
37#include "CodeCache.h"
38#include "CommonIdentifiers.h"
39#include "CommonSlowPaths.h"
40#include "CustomGetterSetter.h"
41#include "DFGWorklist.h"
42#include "DirectEvalExecutable.h"
43#include "Disassembler.h"
44#include "DoublePredictionFuzzerAgent.h"
45#include "Error.h"
46#include "ErrorConstructor.h"
47#include "ErrorInstance.h"
48#include "EvalCodeBlock.h"
49#include "Exception.h"
50#include "ExecutableToCodeBlockEdge.h"
51#include "FTLThunks.h"
52#include "FastMallocAlignedMemoryAllocator.h"
53#include "FunctionCodeBlock.h"
54#include "FunctionConstructor.h"
55#include "FunctionExecutable.h"
56#include "GCActivityCallback.h"
57#include "GetterSetter.h"
58#include "GigacageAlignedMemoryAllocator.h"
59#include "HasOwnPropertyCache.h"
60#include "Heap.h"
61#include "HeapIterationScope.h"
62#include "HeapProfiler.h"
63#include "HostCallReturnValue.h"
64#include "Identifier.h"
65#include "IncrementalSweeper.h"
66#include "IndirectEvalExecutable.h"
67#include "Interpreter.h"
68#include "IntlCollatorConstructor.h"
69#include "IntlDateTimeFormatConstructor.h"
70#include "IntlNumberFormatConstructor.h"
71#include "IntlPluralRulesConstructor.h"
72#include "JITCode.h"
73#include "JITWorklist.h"
74#include "JSAPIValueWrapper.h"
75#include "JSArray.h"
76#include "JSArrayBufferConstructor.h"
77#include "JSAsyncFunction.h"
78#include "JSBigInt.h"
79#include "JSBoundFunction.h"
80#include "JSCInlines.h"
81#include "JSCallbackFunction.h"
82#include "JSCustomGetterSetterFunction.h"
83#include "JSDestructibleObjectHeapCellType.h"
84#include "JSFixedArray.h"
85#include "JSFunction.h"
86#include "JSGlobalObjectFunctions.h"
87#include "JSImmutableButterfly.h"
88#include "JSInternalPromiseDeferred.h"
89#include "JSLock.h"
90#include "JSMap.h"
91#include "JSMapIterator.h"
92#include "JSPromiseDeferred.h"
93#include "JSPropertyNameEnumerator.h"
94#include "JSScriptFetchParameters.h"
95#include "JSScriptFetcher.h"
96#include "JSSet.h"
97#include "JSSetIterator.h"
98#include "JSSourceCode.h"
99#include "JSStringHeapCellType.h"
100#include "JSTemplateObjectDescriptor.h"
101#include "JSWeakMap.h"
102#include "JSWeakObjectRef.h"
103#include "JSWeakSet.h"
104#include "JSWebAssembly.h"
105#include "JSWebAssemblyCodeBlock.h"
106#include "JSWebAssemblyCodeBlockHeapCellType.h"
107#include "JSWithScope.h"
108#include "LLIntData.h"
109#include "Lexer.h"
110#include "Lookup.h"
111#include "MinimumReservedZoneSize.h"
112#include "ModuleProgramCodeBlock.h"
113#include "ModuleProgramExecutable.h"
114#include "NativeErrorConstructor.h"
115#include "NativeExecutable.h"
116#include "NativeStdFunctionCell.h"
117#include "Nodes.h"
118#include "ObjCCallbackFunction.h"
119#include "Parser.h"
120#include "ProfilerDatabase.h"
121#include "ProgramCodeBlock.h"
122#include "ProgramExecutable.h"
123#include "PromiseDeferredTimer.h"
124#include "PropertyMapHashTable.h"
125#include "ProxyRevoke.h"
126#include "RandomizingFuzzerAgent.h"
127#include "RegExpCache.h"
128#include "RegExpObject.h"
129#include "RegisterAtOffsetList.h"
130#include "RuntimeType.h"
131#include "SamplingProfiler.h"
132#include "ShadowChicken.h"
133#include "SimpleTypedArrayController.h"
134#include "SourceProviderCache.h"
135#include "StackVisitor.h"
136#include "StrictEvalActivation.h"
137#include "StrongInlines.h"
138#include "StructureInlines.h"
139#include "TestRunnerUtils.h"
140#include "ThunkGenerators.h"
141#include "TypeProfiler.h"
142#include "TypeProfilerLog.h"
143#include "UnlinkedCodeBlock.h"
144#include "VMEntryScope.h"
145#include "VMInlines.h"
146#include "VMInspector.h"
147#include "VariableEnvironment.h"
148#include "WasmWorklist.h"
149#include "Watchdog.h"
150#include "WeakGCMapInlines.h"
151#include "WebAssemblyFunction.h"
152#include "WebAssemblyFunctionHeapCellType.h"
153#include "WebAssemblyWrapperFunction.h"
154#include <wtf/ProcessID.h>
155#include <wtf/ReadWriteLock.h>
156#include <wtf/SimpleStats.h>
157#include <wtf/StringPrintStream.h>
158#include <wtf/Threading.h>
159#include <wtf/text/AtomStringTable.h>
160#include <wtf/text/SymbolRegistry.h>
161
162#if ENABLE(C_LOOP)
163#include "CLoopStack.h"
164#include "CLoopStackInlines.h"
165#endif
166
167#if ENABLE(DFG_JIT)
168#include "ConservativeRoots.h"
169#endif
170
171#if ENABLE(REGEXP_TRACING)
172#include "RegExp.h"
173#endif
174
175namespace JSC {
176
177#if ENABLE(JIT)
178#if !ASSERT_DISABLED
179bool VM::s_canUseJITIsSet = false;
180#endif
181bool VM::s_canUseJIT = false;
182#endif
183
184Atomic<unsigned> VM::s_numberOfIDs;
185
186// Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
187// ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
188// just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
189
190#if ENABLE(ASSEMBLER)
191static bool enableAssembler()
192{
193 if (!Options::useJIT())
194 return false;
195
196 char* canUseJITString = getenv("JavaScriptCoreUseJIT");
197 if (canUseJITString && !atoi(canUseJITString))
198 return false;
199
200 ExecutableAllocator::initializeUnderlyingAllocator();
201 if (!ExecutableAllocator::singleton().isValid()) {
202 if (Options::crashIfCantAllocateJITMemory())
203 CRASH();
204 return false;
205 }
206
207 return true;
208}
209#endif // ENABLE(!ASSEMBLER)
210
211bool VM::canUseAssembler()
212{
213#if ENABLE(ASSEMBLER)
214 static std::once_flag onceKey;
215 static bool enabled = false;
216 std::call_once(onceKey, [] {
217 enabled = enableAssembler();
218 });
219 return enabled;
220#else
221 return false; // interpreter only
222#endif
223}
224
225void VM::computeCanUseJIT()
226{
227#if ENABLE(JIT)
228#if !ASSERT_DISABLED
229 RELEASE_ASSERT(!s_canUseJITIsSet);
230 s_canUseJITIsSet = true;
231#endif
232 s_canUseJIT = VM::canUseAssembler() && Options::useJIT();
233#endif
234}
235
236inline unsigned VM::nextID()
237{
238 for (;;) {
239 unsigned currentNumberOfIDs = s_numberOfIDs.load();
240 unsigned newID = currentNumberOfIDs + 1;
241 if (s_numberOfIDs.compareExchangeWeak(currentNumberOfIDs, newID))
242 return newID;
243 }
244}
245
246static bool vmCreationShouldCrash = false;
247
248VM::VM(VMType vmType, HeapType heapType)
249 : m_id(nextID())
250 , m_apiLock(adoptRef(new JSLock(this)))
251#if USE(CF)
252 , m_runLoop(CFRunLoopGetCurrent())
253#endif // USE(CF)
254 , heap(this, heapType)
255 , fastMallocAllocator(std::make_unique<FastMallocAlignedMemoryAllocator>())
256 , primitiveGigacageAllocator(std::make_unique<GigacageAlignedMemoryAllocator>(Gigacage::Primitive))
257 , jsValueGigacageAllocator(std::make_unique<GigacageAlignedMemoryAllocator>(Gigacage::JSValue))
258 , auxiliaryHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::Auxiliary)))
259 , immutableButterflyHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCellWithInteriorPointers)))
260 , cellHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCell)))
261 , destructibleCellHeapCellType(std::make_unique<HeapCellType>(CellAttributes(NeedsDestruction, HeapCell::JSCell)))
262 , stringHeapCellType(std::make_unique<JSStringHeapCellType>())
263 , destructibleObjectHeapCellType(std::make_unique<JSDestructibleObjectHeapCellType>())
264#if ENABLE(WEBASSEMBLY)
265 , webAssemblyCodeBlockHeapCellType(std::make_unique<JSWebAssemblyCodeBlockHeapCellType>())
266 , webAssemblyFunctionHeapCellType(std::make_unique<WebAssemblyFunctionHeapCellType>())
267#endif
268 , primitiveGigacageAuxiliarySpace("Primitive Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), primitiveGigacageAllocator.get())
269 , jsValueGigacageAuxiliarySpace("JSValue Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), jsValueGigacageAllocator.get())
270 , immutableButterflyJSValueGigacageAuxiliarySpace("ImmutableButterfly Gigacage JSCellWithInteriorPointers", heap, immutableButterflyHeapCellType.get(), jsValueGigacageAllocator.get())
271 , cellSpace("JSCell", heap, cellHeapCellType.get(), fastMallocAllocator.get())
272 , jsValueGigacageCellSpace("JSValue Gigacage JSCell", heap, cellHeapCellType.get(), jsValueGigacageAllocator.get())
273 , destructibleCellSpace("Destructible JSCell", heap, destructibleCellHeapCellType.get(), fastMallocAllocator.get())
274 , stringSpace("JSString", heap, stringHeapCellType.get(), fastMallocAllocator.get())
275 , destructibleObjectSpace("JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get())
276 , eagerlySweptDestructibleObjectSpace("Eagerly Swept JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get())
277 , executableToCodeBlockEdgeSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), ExecutableToCodeBlockEdge)
278 , functionSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSFunction)
279 , internalFunctionSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), InternalFunction)
280 , nativeExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), NativeExecutable)
281 , propertyTableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), PropertyTable)
282 , structureRareDataSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), StructureRareData)
283 , structureSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), Structure)
284 , symbolTableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), SymbolTable)
285 , executableToCodeBlockEdgesWithConstraints(executableToCodeBlockEdgeSpace)
286 , executableToCodeBlockEdgesWithFinalizers(executableToCodeBlockEdgeSpace)
287 , codeBlockSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), CodeBlock)
288 , functionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), FunctionExecutable)
289 , programExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), ProgramExecutable)
290 , unlinkedFunctionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), UnlinkedFunctionExecutable)
291 , vmType(vmType)
292 , clientData(0)
293 , topEntryFrame(nullptr)
294 , topCallFrame(CallFrame::noCaller())
295 , promiseDeferredTimer(std::make_unique<PromiseDeferredTimer>(*this))
296 , m_atomStringTable(vmType == Default ? Thread::current().atomStringTable() : new AtomStringTable)
297 , propertyNames(nullptr)
298 , emptyList(new ArgList)
299 , machineCodeBytesPerBytecodeWordForBaselineJIT(std::make_unique<SimpleStats>())
300 , customGetterSetterFunctionMap(*this)
301 , stringCache(*this)
302 , symbolImplToSymbolMap(*this)
303 , structureCache(*this)
304 , interpreter(0)
305 , entryScope(0)
306 , m_regExpCache(new RegExpCache(this))
307 , m_compactVariableMap(adoptRef(*(new CompactVariableMap)))
308#if ENABLE(REGEXP_TRACING)
309 , m_rtTraceList(new RTTraceList())
310#endif
311#if ENABLE(GC_VALIDATION)
312 , m_initializingObjectClass(0)
313#endif
314 , m_stackPointerAtVMEntry(0)
315 , m_codeCache(std::make_unique<CodeCache>())
316 , m_builtinExecutables(std::make_unique<BuiltinExecutables>(*this))
317 , m_typeProfilerEnabledCount(0)
318 , m_primitiveGigacageEnabled(IsWatched)
319 , m_controlFlowProfilerEnabledCount(0)
320{
321 if (UNLIKELY(vmCreationShouldCrash))
322 CRASH_WITH_INFO(0x4242424220202020, 0xbadbeef0badbeef, 0x1234123412341234, 0x1337133713371337);
323
324 interpreter = new Interpreter(*this);
325 StackBounds stack = Thread::current().stack();
326 updateSoftReservedZoneSize(Options::softReservedZoneSize());
327 setLastStackTop(stack.origin());
328
329 JSRunLoopTimer::Manager::shared().registerVM(*this);
330
331 // Need to be careful to keep everything consistent here
332 JSLockHolder lock(this);
333 AtomStringTable* existingEntryAtomStringTable = Thread::current().setCurrentAtomStringTable(m_atomStringTable);
334 structureStructure.set(*this, Structure::createStructure(*this));
335 structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
336 stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
337
338 smallStrings.initializeCommonStrings(*this);
339
340 propertyNames = new CommonIdentifiers(this);
341 terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
342 propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
343 customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
344 domAttributeGetterSetterStructure.set(*this, DOMAttributeGetterSetter::createStructure(*this, 0, jsNull()));
345 scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
346 apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
347 nativeExecutableStructure.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
348 evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
349 programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
350 functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
351#if ENABLE(WEBASSEMBLY)
352 webAssemblyCodeBlockStructure.set(*this, JSWebAssemblyCodeBlock::createStructure(*this, 0, jsNull()));
353#endif
354 moduleProgramExecutableStructure.set(*this, ModuleProgramExecutable::createStructure(*this, 0, jsNull()));
355 regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
356 symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
357 symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
358 fixedArrayStructure.set(*this, JSFixedArray::createStructure(*this, 0, jsNull()));
359
360 immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithInt32) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithInt32));
361 immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithDouble) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithDouble));
362 immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithContiguous) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithContiguous));
363
364 sourceCodeStructure.set(*this, JSSourceCode::createStructure(*this, 0, jsNull()));
365 scriptFetcherStructure.set(*this, JSScriptFetcher::createStructure(*this, 0, jsNull()));
366 scriptFetchParametersStructure.set(*this, JSScriptFetchParameters::createStructure(*this, 0, jsNull()));
367 structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
368 sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
369 templateObjectDescriptorStructure.set(*this, JSTemplateObjectDescriptor::createStructure(*this, 0, jsNull()));
370 arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpointSet::createStructure(*this));
371 unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
372 unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
373 unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
374 unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
375 unlinkedModuleProgramCodeBlockStructure.set(*this, UnlinkedModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
376 propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
377 functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
378 exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
379 promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
380 internalPromiseDeferredStructure.set(*this, JSInternalPromiseDeferred::createStructure(*this, 0, jsNull()));
381 nativeStdFunctionCellStructure.set(*this, NativeStdFunctionCell::createStructure(*this, 0, jsNull()));
382 programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, 0, jsNull()));
383 moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
384 evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, 0, jsNull()));
385 functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, 0, jsNull()));
386 hashMapBucketSetStructure.set(*this, HashMapBucket<HashMapBucketDataKey>::createStructure(*this, 0, jsNull()));
387 hashMapBucketMapStructure.set(*this, HashMapBucket<HashMapBucketDataKeyValue>::createStructure(*this, 0, jsNull()));
388 bigIntStructure.set(*this, JSBigInt::createStructure(*this, 0, jsNull()));
389 executableToCodeBlockEdgeStructure.set(*this, ExecutableToCodeBlockEdge::createStructure(*this, nullptr, jsNull()));
390
391 // Eagerly initialize constant cells since the concurrent compiler can access them.
392 if (canUseJIT()) {
393 sentinelMapBucket();
394 sentinelSetBucket();
395 }
396
397 Thread::current().setCurrentAtomStringTable(existingEntryAtomStringTable);
398
399#if !ENABLE(C_LOOP)
400 initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
401#endif
402
403 Gigacage::addPrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
404
405 heap.notifyIsSafeToCollect();
406
407 LLInt::Data::performAssertions(*this);
408
409 if (UNLIKELY(Options::useProfiler())) {
410 m_perBytecodeProfiler = std::make_unique<Profiler::Database>(*this);
411
412 StringPrintStream pathOut;
413 const char* profilerPath = getenv("JSC_PROFILER_PATH");
414 if (profilerPath)
415 pathOut.print(profilerPath, "/");
416 pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
417 m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
418 }
419
420 callFrameForCatch = nullptr;
421
422 // Initialize this last, as a free way of asserting that VM initialization itself
423 // won't use this.
424 m_typedArrayController = adoptRef(new SimpleTypedArrayController());
425
426 m_bytecodeIntrinsicRegistry = std::make_unique<BytecodeIntrinsicRegistry>(*this);
427
428 if (Options::useTypeProfiler())
429 enableTypeProfiler();
430 if (Options::useControlFlowProfiler())
431 enableControlFlowProfiler();
432#if ENABLE(SAMPLING_PROFILER)
433 if (Options::useSamplingProfiler()) {
434 setShouldBuildPCToCodeOriginMapping();
435 Ref<Stopwatch> stopwatch = Stopwatch::create();
436 stopwatch->start();
437 m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
438 if (Options::samplingProfilerPath())
439 m_samplingProfiler->registerForReportAtExit();
440 m_samplingProfiler->start();
441 }
442#endif // ENABLE(SAMPLING_PROFILER)
443
444 if (Options::useRandomizingFuzzerAgent())
445 setFuzzerAgent(std::make_unique<RandomizingFuzzerAgent>(*this));
446 else if (Options::useDoublePredictionFuzzerAgent())
447 setFuzzerAgent(std::make_unique<DoublePredictionFuzzerAgent>(*this));
448
449 if (Options::alwaysGeneratePCToCodeOriginMap())
450 setShouldBuildPCToCodeOriginMapping();
451
452 if (Options::watchdog()) {
453 Watchdog& watchdog = ensureWatchdog();
454 watchdog.setTimeLimit(Seconds::fromMilliseconds(Options::watchdog()));
455 }
456
457#if ENABLE(JIT)
458 // Make sure that any stubs that the JIT is going to use are initialized in non-compilation threads.
459 if (canUseJIT()) {
460 jitStubs = std::make_unique<JITThunks>();
461#if ENABLE(FTL_JIT)
462 ftlThunks = std::make_unique<FTL::Thunks>();
463#endif // ENABLE(FTL_JIT)
464 getCTIInternalFunctionTrampolineFor(CodeForCall);
465 getCTIInternalFunctionTrampolineFor(CodeForConstruct);
466 }
467#endif
468
469 if (Options::forceDebuggerBytecodeGeneration() || Options::alwaysUseShadowChicken())
470 ensureShadowChicken();
471
472 VMInspector::instance().add(this);
473}
474
475static ReadWriteLock s_destructionLock;
476
477void waitForVMDestruction()
478{
479 auto locker = holdLock(s_destructionLock.write());
480}
481
482VM::~VM()
483{
484 auto destructionLocker = holdLock(s_destructionLock.read());
485
486 Gigacage::removePrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
487 promiseDeferredTimer->stopRunningTasks();
488#if ENABLE(WEBASSEMBLY)
489 if (Wasm::Worklist* worklist = Wasm::existingWorklistOrNull())
490 worklist->stopAllPlansForContext(wasmContext);
491#endif
492 if (UNLIKELY(m_watchdog))
493 m_watchdog->willDestroyVM(this);
494 m_traps.willDestroyVM();
495 VMInspector::instance().remove(this);
496
497 // Never GC, ever again.
498 heap.incrementDeferralDepth();
499
500#if ENABLE(SAMPLING_PROFILER)
501 if (m_samplingProfiler) {
502 m_samplingProfiler->reportDataToOptionFile();
503 m_samplingProfiler->shutdown();
504 }
505#endif // ENABLE(SAMPLING_PROFILER)
506
507#if ENABLE(JIT)
508 if (JITWorklist* worklist = JITWorklist::existingGlobalWorklistOrNull())
509 worklist->completeAllForVM(*this);
510#endif // ENABLE(JIT)
511
512#if ENABLE(DFG_JIT)
513 // Make sure concurrent compilations are done, but don't install them, since there is
514 // no point to doing so.
515 for (unsigned i = DFG::numberOfWorklists(); i--;) {
516 if (DFG::Worklist* worklist = DFG::existingWorklistForIndexOrNull(i)) {
517 worklist->removeNonCompilingPlansForVM(*this);
518 worklist->waitUntilAllPlansForVMAreReady(*this);
519 worklist->removeAllReadyPlansForVM(*this);
520 }
521 }
522#endif // ENABLE(DFG_JIT)
523
524 waitForAsynchronousDisassembly();
525
526 // Clear this first to ensure that nobody tries to remove themselves from it.
527 m_perBytecodeProfiler = nullptr;
528
529 ASSERT(currentThreadIsHoldingAPILock());
530 m_apiLock->willDestroyVM(this);
531 smallStrings.setIsInitialized(false);
532 heap.lastChanceToFinalize();
533
534 JSRunLoopTimer::Manager::shared().unregisterVM(*this);
535
536 delete interpreter;
537#ifndef NDEBUG
538 interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
539#endif
540
541 delete emptyList;
542
543 delete propertyNames;
544 if (vmType != Default)
545 delete m_atomStringTable;
546
547 delete clientData;
548 delete m_regExpCache;
549
550#if ENABLE(REGEXP_TRACING)
551 delete m_rtTraceList;
552#endif
553
554#if ENABLE(DFG_JIT)
555 for (unsigned i = 0; i < m_scratchBuffers.size(); ++i)
556 fastFree(m_scratchBuffers[i]);
557#endif
558}
559
560void VM::primitiveGigacageDisabledCallback(void* argument)
561{
562 static_cast<VM*>(argument)->primitiveGigacageDisabled();
563}
564
565void VM::primitiveGigacageDisabled()
566{
567 if (m_apiLock->currentThreadIsHoldingLock()) {
568 m_primitiveGigacageEnabled.fireAll(*this, "Primitive gigacage disabled");
569 return;
570 }
571
572 // This is totally racy, and that's OK. The point is, it's up to the user to ensure that they pass the
573 // uncaged buffer in a nicely synchronized manner.
574 m_needToFirePrimitiveGigacageEnabled = true;
575}
576
577void VM::setLastStackTop(void* lastStackTop)
578{
579 m_lastStackTop = lastStackTop;
580}
581
582Ref<VM> VM::createContextGroup(HeapType heapType)
583{
584 return adoptRef(*new VM(APIContextGroup, heapType));
585}
586
587Ref<VM> VM::create(HeapType heapType)
588{
589 return adoptRef(*new VM(Default, heapType));
590}
591
592bool VM::sharedInstanceExists()
593{
594 return sharedInstanceInternal();
595}
596
597VM& VM::sharedInstance()
598{
599 GlobalJSLock globalLock;
600 VM*& instance = sharedInstanceInternal();
601 if (!instance)
602 instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
603 return *instance;
604}
605
606VM*& VM::sharedInstanceInternal()
607{
608 static VM* sharedInstance;
609 return sharedInstance;
610}
611
612Watchdog& VM::ensureWatchdog()
613{
614 if (!m_watchdog)
615 m_watchdog = adoptRef(new Watchdog(this));
616 return *m_watchdog;
617}
618
619HeapProfiler& VM::ensureHeapProfiler()
620{
621 if (!m_heapProfiler)
622 m_heapProfiler = std::make_unique<HeapProfiler>(*this);
623 return *m_heapProfiler;
624}
625
626#if ENABLE(SAMPLING_PROFILER)
627SamplingProfiler& VM::ensureSamplingProfiler(RefPtr<Stopwatch>&& stopwatch)
628{
629 if (!m_samplingProfiler)
630 m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
631 return *m_samplingProfiler;
632}
633#endif // ENABLE(SAMPLING_PROFILER)
634
635#if ENABLE(JIT)
636static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
637{
638 switch (intrinsic) {
639 case CharCodeAtIntrinsic:
640 return charCodeAtThunkGenerator;
641 case CharAtIntrinsic:
642 return charAtThunkGenerator;
643 case Clz32Intrinsic:
644 return clz32ThunkGenerator;
645 case FromCharCodeIntrinsic:
646 return fromCharCodeThunkGenerator;
647 case SqrtIntrinsic:
648 return sqrtThunkGenerator;
649 case AbsIntrinsic:
650 return absThunkGenerator;
651 case FloorIntrinsic:
652 return floorThunkGenerator;
653 case CeilIntrinsic:
654 return ceilThunkGenerator;
655 case TruncIntrinsic:
656 return truncThunkGenerator;
657 case RoundIntrinsic:
658 return roundThunkGenerator;
659 case ExpIntrinsic:
660 return expThunkGenerator;
661 case LogIntrinsic:
662 return logThunkGenerator;
663 case IMulIntrinsic:
664 return imulThunkGenerator;
665 case RandomIntrinsic:
666 return randomThunkGenerator;
667 case BoundThisNoArgsFunctionCallIntrinsic:
668 return boundThisNoArgsFunctionCallGenerator;
669 default:
670 return nullptr;
671 }
672}
673
674#endif // ENABLE(JIT)
675
676NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
677{
678 return getHostFunction(function, NoIntrinsic, constructor, nullptr, name);
679}
680
681static Ref<NativeJITCode> jitCodeForCallTrampoline()
682{
683 static NativeJITCode* result;
684 static std::once_flag onceKey;
685 std::call_once(onceKey, [&] {
686 result = new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_call_trampoline), JITType::HostCallThunk, NoIntrinsic);
687 });
688 return makeRef(*result);
689}
690
691static Ref<NativeJITCode> jitCodeForConstructTrampoline()
692{
693 static NativeJITCode* result;
694 static std::once_flag onceKey;
695 std::call_once(onceKey, [&] {
696 result = new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_construct_trampoline), JITType::HostCallThunk, NoIntrinsic);
697 });
698 return makeRef(*result);
699}
700
701NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic, NativeFunction constructor, const DOMJIT::Signature* signature, const String& name)
702{
703#if ENABLE(JIT)
704 if (canUseJIT()) {
705 return jitStubs->hostFunctionStub(
706 this, function, constructor,
707 intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0,
708 intrinsic, signature, name);
709 }
710#endif // ENABLE(JIT)
711 UNUSED_PARAM(intrinsic);
712 UNUSED_PARAM(signature);
713 return NativeExecutable::create(*this, jitCodeForCallTrampoline(), function, jitCodeForConstructTrampoline(), constructor, name);
714}
715
716MacroAssemblerCodePtr<JSEntryPtrTag> VM::getCTIInternalFunctionTrampolineFor(CodeSpecializationKind kind)
717{
718#if ENABLE(JIT)
719 if (canUseJIT()) {
720 if (kind == CodeForCall)
721 return jitStubs->ctiInternalFunctionCall(this).retagged<JSEntryPtrTag>();
722 return jitStubs->ctiInternalFunctionConstruct(this).retagged<JSEntryPtrTag>();
723 }
724#endif
725 if (kind == CodeForCall)
726 return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_call_trampoline);
727 return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_construct_trampoline);
728}
729
730VM::ClientData::~ClientData()
731{
732}
733
734void VM::resetDateCache()
735{
736 localTimeOffsetCache.reset();
737 cachedDateString = String();
738 cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
739 dateInstanceCache.reset();
740}
741
742void VM::whenIdle(Function<void()>&& callback)
743{
744 if (!entryScope) {
745 callback();
746 return;
747 }
748
749 entryScope->addDidPopListener(WTFMove(callback));
750}
751
752void VM::deleteAllLinkedCode(DeleteAllCodeEffort effort)
753{
754 whenIdle([=] () {
755 heap.deleteAllCodeBlocks(effort);
756 });
757}
758
759void VM::deleteAllCode(DeleteAllCodeEffort effort)
760{
761 whenIdle([=] () {
762 m_codeCache->clear();
763 m_regExpCache->deleteAllCode();
764 heap.deleteAllCodeBlocks(effort);
765 heap.deleteAllUnlinkedCodeBlocks(effort);
766 heap.reportAbandonedObjectGraph();
767 });
768}
769
770void VM::shrinkFootprintWhenIdle()
771{
772 whenIdle([=] () {
773 sanitizeStackForVM(this);
774 deleteAllCode(DeleteAllCodeIfNotCollecting);
775 heap.collectNow(Synchronousness::Sync, CollectionScope::Full);
776 // FIXME: Consider stopping various automatic threads here.
777 // https://bugs.webkit.org/show_bug.cgi?id=185447
778 WTF::releaseFastMallocFreeMemory();
779 });
780}
781
782SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
783{
784 auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
785 if (addResult.isNewEntry)
786 addResult.iterator->value = adoptRef(new SourceProviderCache);
787 return addResult.iterator->value.get();
788}
789
790void VM::clearSourceProviderCaches()
791{
792 sourceProviderCacheMap.clear();
793}
794
795Exception* VM::throwException(ExecState* exec, Exception* exception)
796{
797 ASSERT(exec == topCallFrame || exec->isGlobalExec() || exec == exec->lexicalGlobalObject()->callFrameAtDebuggerEntry());
798 CallFrame* throwOriginFrame = exec->isGlobalExec() ? exec : topJSCallFrame();
799
800 if (Options::breakOnThrow()) {
801 CodeBlock* codeBlock = throwOriginFrame ? throwOriginFrame->codeBlock() : nullptr;
802 dataLog("Throwing exception in call frame ", RawPointer(throwOriginFrame), " for code block ", codeBlock, "\n");
803 CRASH();
804 }
805
806 interpreter->notifyDebuggerOfExceptionToBeThrown(*this, throwOriginFrame, exception);
807
808 setException(exception);
809
810#if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
811 m_nativeStackTraceOfLastThrow = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
812 m_throwingThread = &Thread::current();
813#endif
814 return exception;
815}
816
817Exception* VM::throwException(ExecState* exec, JSValue thrownValue)
818{
819 VM& vm = *this;
820 Exception* exception = jsDynamicCast<Exception*>(vm, thrownValue);
821 if (!exception)
822 exception = Exception::create(*this, thrownValue);
823
824 return throwException(exec, exception);
825}
826
827Exception* VM::throwException(ExecState* exec, JSObject* error)
828{
829 return throwException(exec, JSValue(error));
830}
831
832void VM::setStackPointerAtVMEntry(void* sp)
833{
834 m_stackPointerAtVMEntry = sp;
835 updateStackLimits();
836}
837
838size_t VM::updateSoftReservedZoneSize(size_t softReservedZoneSize)
839{
840 size_t oldSoftReservedZoneSize = m_currentSoftReservedZoneSize;
841 m_currentSoftReservedZoneSize = softReservedZoneSize;
842#if ENABLE(C_LOOP)
843 interpreter->cloopStack().setSoftReservedZoneSize(softReservedZoneSize);
844#endif
845
846 updateStackLimits();
847
848 return oldSoftReservedZoneSize;
849}
850
851#if OS(WINDOWS)
852// On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
853// where the guard page is a barrier between committed and uncommitted memory.
854// When data from the guard page is read or written, the guard page is moved, and memory is committed.
855// This is how the system grows the stack.
856// When using the C stack on Windows we need to precommit the needed stack space.
857// Otherwise we might crash later if we access uncommitted stack memory.
858// This can happen if we allocate stack space larger than the page guard size (4K).
859// The system does not get the chance to move the guard page, and commit more memory,
860// and we crash if uncommitted memory is accessed.
861// The MSVC compiler fixes this by inserting a call to the _chkstk() function,
862// when needed, see http://support.microsoft.com/kb/100775.
863// By touching every page up to the stack limit with a dummy operation,
864// we force the system to move the guard page, and commit memory.
865
866static void preCommitStackMemory(void* stackLimit)
867{
868 const int pageSize = 4096;
869 for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
870 char ch = *p;
871 *p = ch;
872 }
873}
874#endif
875
876inline void VM::updateStackLimits()
877{
878#if OS(WINDOWS)
879 void* lastSoftStackLimit = m_softStackLimit;
880#endif
881
882 const StackBounds& stack = Thread::current().stack();
883 size_t reservedZoneSize = Options::reservedZoneSize();
884 // We should have already ensured that Options::reservedZoneSize() >= minimumReserveZoneSize at
885 // options initialization time, and the option value should not have been changed thereafter.
886 // We don't have the ability to assert here that it hasn't changed, but we can at least assert
887 // that the value is sane.
888 RELEASE_ASSERT(reservedZoneSize >= minimumReservedZoneSize);
889
890 if (m_stackPointerAtVMEntry) {
891 ASSERT(stack.isGrowingDownward());
892 char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
893 m_softStackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_currentSoftReservedZoneSize);
894 m_stackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), reservedZoneSize);
895 } else {
896 m_softStackLimit = stack.recursionLimit(m_currentSoftReservedZoneSize);
897 m_stackLimit = stack.recursionLimit(reservedZoneSize);
898 }
899
900#if OS(WINDOWS)
901 // We only need to precommit stack memory dictated by the VM::m_softStackLimit limit.
902 // This is because VM::m_softStackLimit applies to stack usage by LLINT asm or JIT
903 // generated code which can allocate stack space that the C++ compiler does not know
904 // about. As such, we have to precommit that stack memory manually.
905 //
906 // In contrast, we do not need to worry about VM::m_stackLimit because that limit is
907 // used exclusively by C++ code, and the C++ compiler will automatically commit the
908 // needed stack pages.
909 if (lastSoftStackLimit != m_softStackLimit)
910 preCommitStackMemory(m_softStackLimit);
911#endif
912}
913
914#if ENABLE(DFG_JIT)
915void VM::gatherScratchBufferRoots(ConservativeRoots& conservativeRoots)
916{
917 auto lock = holdLock(m_scratchBufferLock);
918 for (auto* scratchBuffer : m_scratchBuffers) {
919 if (scratchBuffer->activeLength()) {
920 void* bufferStart = scratchBuffer->dataBuffer();
921 conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
922 }
923 }
924}
925#endif
926
927void logSanitizeStack(VM* vm)
928{
929 if (Options::verboseSanitizeStack() && vm->topCallFrame) {
930 int dummy;
931 auto& stackBounds = Thread::current().stack();
932 dataLog(
933 "Sanitizing stack for VM = ", RawPointer(vm), " with top call frame at ", RawPointer(vm->topCallFrame),
934 ", current stack pointer at ", RawPointer(&dummy), ", in ",
935 pointerDump(vm->topCallFrame->codeBlock()), ", last code origin = ",
936 vm->topCallFrame->codeOrigin(), ", last stack top = ", RawPointer(vm->lastStackTop()), ", in stack range [", RawPointer(stackBounds.origin()), ", ", RawPointer(stackBounds.end()), "]\n");
937 }
938}
939
940#if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
941char* VM::acquireRegExpPatternContexBuffer()
942{
943 m_regExpPatternContextLock.lock();
944 ASSERT(m_regExpPatternContextLock.isLocked());
945 if (!m_regExpPatternContexBuffer)
946 m_regExpPatternContexBuffer = makeUniqueArray<char>(VM::patternContextBufferSize);
947 return m_regExpPatternContexBuffer.get();
948}
949
950void VM::releaseRegExpPatternContexBuffer()
951{
952 ASSERT(m_regExpPatternContextLock.isLocked());
953
954 m_regExpPatternContextLock.unlock();
955}
956#endif
957
958#if ENABLE(REGEXP_TRACING)
959void VM::addRegExpToTrace(RegExp* regExp)
960{
961 gcProtect(regExp);
962 m_rtTraceList->add(regExp);
963}
964
965void VM::dumpRegExpTrace()
966{
967 // The first RegExp object is ignored. It is create by the RegExpPrototype ctor and not used.
968 RTTraceList::iterator iter = ++m_rtTraceList->begin();
969
970 if (iter != m_rtTraceList->end()) {
971 dataLogF("\nRegExp Tracing\n");
972 dataLogF("Regular Expression 8 Bit 16 Bit match() Matches Average\n");
973 dataLogF(" <Match only / Match> JIT Addr JIT Address calls found String len\n");
974 dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
975
976 unsigned reCount = 0;
977
978 for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
979 (*iter)->printTraceData();
980 gcUnprotect(*iter);
981 }
982
983 dataLogF("%d Regular Expressions\n", reCount);
984 }
985
986 m_rtTraceList->clear();
987}
988#else
989void VM::dumpRegExpTrace()
990{
991}
992#endif
993
994WatchpointSet* VM::ensureWatchpointSetForImpureProperty(const Identifier& propertyName)
995{
996 auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
997 if (result.isNewEntry)
998 result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
999 return result.iterator->value.get();
1000}
1001
1002void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
1003{
1004 ensureWatchpointSetForImpureProperty(propertyName)->add(watchpoint);
1005}
1006
1007void VM::addImpureProperty(const String& propertyName)
1008{
1009 if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
1010 watchpointSet->fireAll(*this, "Impure property added");
1011}
1012
1013template<typename Func>
1014static bool enableProfilerWithRespectToCount(unsigned& counter, const Func& doEnableWork)
1015{
1016 bool needsToRecompile = false;
1017 if (!counter) {
1018 doEnableWork();
1019 needsToRecompile = true;
1020 }
1021 counter++;
1022
1023 return needsToRecompile;
1024}
1025
1026template<typename Func>
1027static bool disableProfilerWithRespectToCount(unsigned& counter, const Func& doDisableWork)
1028{
1029 RELEASE_ASSERT(counter > 0);
1030 bool needsToRecompile = false;
1031 counter--;
1032 if (!counter) {
1033 doDisableWork();
1034 needsToRecompile = true;
1035 }
1036
1037 return needsToRecompile;
1038}
1039
1040bool VM::enableTypeProfiler()
1041{
1042 auto enableTypeProfiler = [this] () {
1043 this->m_typeProfiler = std::make_unique<TypeProfiler>();
1044 this->m_typeProfilerLog = std::make_unique<TypeProfilerLog>(*this);
1045 };
1046
1047 return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
1048}
1049
1050bool VM::disableTypeProfiler()
1051{
1052 auto disableTypeProfiler = [this] () {
1053 this->m_typeProfiler.reset(nullptr);
1054 this->m_typeProfilerLog.reset(nullptr);
1055 };
1056
1057 return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
1058}
1059
1060bool VM::enableControlFlowProfiler()
1061{
1062 auto enableControlFlowProfiler = [this] () {
1063 this->m_controlFlowProfiler = std::make_unique<ControlFlowProfiler>();
1064 };
1065
1066 return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
1067}
1068
1069bool VM::disableControlFlowProfiler()
1070{
1071 auto disableControlFlowProfiler = [this] () {
1072 this->m_controlFlowProfiler.reset(nullptr);
1073 };
1074
1075 return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
1076}
1077
1078void VM::dumpTypeProfilerData()
1079{
1080 if (!typeProfiler())
1081 return;
1082
1083 typeProfilerLog()->processLogEntries(*this, "VM Dump Types"_s);
1084 typeProfiler()->dumpTypeProfilerData(*this);
1085}
1086
1087void VM::queueMicrotask(JSGlobalObject& globalObject, Ref<Microtask>&& task)
1088{
1089 m_microtaskQueue.append(std::make_unique<QueuedTask>(*this, &globalObject, WTFMove(task)));
1090}
1091
1092void VM::drainMicrotasks()
1093{
1094 while (!m_microtaskQueue.isEmpty()) {
1095 m_microtaskQueue.takeFirst()->run();
1096 if (m_onEachMicrotaskTick)
1097 m_onEachMicrotaskTick(*this);
1098 }
1099 finalizeSynchronousJSExecution();
1100}
1101
1102void QueuedTask::run()
1103{
1104 m_microtask->run(m_globalObject->globalExec());
1105}
1106
1107void sanitizeStackForVM(VM* vm)
1108{
1109 logSanitizeStack(vm);
1110 if (vm->topCallFrame) {
1111 auto& stackBounds = Thread::current().stack();
1112 ASSERT(vm->currentThreadIsHoldingAPILock());
1113 ASSERT_UNUSED(stackBounds, stackBounds.contains(vm->lastStackTop()));
1114 }
1115#if ENABLE(C_LOOP)
1116 vm->interpreter->cloopStack().sanitizeStack();
1117#else
1118 sanitizeStackForVMImpl(vm);
1119#endif
1120}
1121
1122size_t VM::committedStackByteCount()
1123{
1124#if !ENABLE(C_LOOP)
1125 // When using the C stack, we don't know how many stack pages are actually
1126 // committed. So, we use the current stack usage as an estimate.
1127 ASSERT(Thread::current().stack().isGrowingDownward());
1128 uint8_t* current = bitwise_cast<uint8_t*>(currentStackPointer());
1129 uint8_t* high = bitwise_cast<uint8_t*>(Thread::current().stack().origin());
1130 return high - current;
1131#else
1132 return CLoopStack::committedByteCount();
1133#endif
1134}
1135
1136#if ENABLE(C_LOOP)
1137bool VM::ensureStackCapacityForCLoop(Register* newTopOfStack)
1138{
1139 return interpreter->cloopStack().ensureCapacityFor(newTopOfStack);
1140}
1141
1142bool VM::isSafeToRecurseSoftCLoop() const
1143{
1144 return interpreter->cloopStack().isSafeToRecurse();
1145}
1146
1147void* VM::currentCLoopStackPointer() const
1148{
1149 return interpreter->cloopStack().currentStackPointer();
1150}
1151#endif // ENABLE(C_LOOP)
1152
1153#if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
1154void VM::verifyExceptionCheckNeedIsSatisfied(unsigned recursionDepth, ExceptionEventLocation& location)
1155{
1156 if (!Options::validateExceptionChecks())
1157 return;
1158
1159 if (UNLIKELY(m_needExceptionCheck)) {
1160 auto throwDepth = m_simulatedThrowPointRecursionDepth;
1161 auto& throwLocation = m_simulatedThrowPointLocation;
1162
1163 dataLog(
1164 "ERROR: Unchecked JS exception:\n"
1165 " This scope can throw a JS exception: ", throwLocation, "\n"
1166 " (ExceptionScope::m_recursionDepth was ", throwDepth, ")\n"
1167 " But the exception was unchecked as of this scope: ", location, "\n"
1168 " (ExceptionScope::m_recursionDepth was ", recursionDepth, ")\n"
1169 "\n");
1170
1171 StringPrintStream out;
1172 std::unique_ptr<StackTrace> currentTrace = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
1173
1174 if (Options::dumpSimulatedThrows()) {
1175 out.println("The simulated exception was thrown at:");
1176 m_nativeStackTraceOfLastSimulatedThrow->dump(out, " ");
1177 out.println();
1178 }
1179 out.println("Unchecked exception detected at:");
1180 currentTrace->dump(out, " ");
1181 out.println();
1182
1183 dataLog(out.toCString());
1184 RELEASE_ASSERT(!m_needExceptionCheck);
1185 }
1186}
1187#endif
1188
1189#if USE(CF)
1190void VM::setRunLoop(CFRunLoopRef runLoop)
1191{
1192 ASSERT(runLoop);
1193 m_runLoop = runLoop;
1194 JSRunLoopTimer::Manager::shared().didChangeRunLoop(*this, runLoop);
1195}
1196#endif // USE(CF)
1197
1198ScratchBuffer* VM::scratchBufferForSize(size_t size)
1199{
1200 if (!size)
1201 return nullptr;
1202
1203 auto locker = holdLock(m_scratchBufferLock);
1204
1205 if (size > m_sizeOfLastScratchBuffer) {
1206 // Protect against a N^2 memory usage pathology by ensuring
1207 // that at worst, we get a geometric series, meaning that the
1208 // total memory usage is somewhere around
1209 // max(scratch buffer size) * 4.
1210 m_sizeOfLastScratchBuffer = size * 2;
1211
1212 ScratchBuffer* newBuffer = ScratchBuffer::create(m_sizeOfLastScratchBuffer);
1213 RELEASE_ASSERT(newBuffer);
1214 m_scratchBuffers.append(newBuffer);
1215 }
1216
1217 ScratchBuffer* result = m_scratchBuffers.last();
1218 return result;
1219}
1220
1221void VM::clearScratchBuffers()
1222{
1223 auto lock = holdLock(m_scratchBufferLock);
1224 for (auto* scratchBuffer : m_scratchBuffers)
1225 scratchBuffer->setActiveLength(0);
1226}
1227
1228void VM::ensureShadowChicken()
1229{
1230 if (m_shadowChicken)
1231 return;
1232 m_shadowChicken = std::make_unique<ShadowChicken>();
1233}
1234
1235#define DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(name, heapCellType, type) \
1236 IsoSubspace* VM::name##Slow() \
1237 { \
1238 ASSERT(!m_##name); \
1239 auto space = std::make_unique<IsoSubspace> ISO_SUBSPACE_INIT(heap, heapCellType, type); \
1240 WTF::storeStoreFence(); \
1241 m_##name = WTFMove(space); \
1242 return m_##name.get(); \
1243 }
1244
1245
1246DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(boundFunctionSpace, cellHeapCellType.get(), JSBoundFunction)
1247DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(callbackFunctionSpace, destructibleObjectHeapCellType.get(), JSCallbackFunction)
1248DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(customGetterSetterFunctionSpace, cellHeapCellType.get(), JSCustomGetterSetterFunction)
1249DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(errorInstanceSpace, destructibleObjectHeapCellType.get(), ErrorInstance)
1250DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(nativeStdFunctionSpace, cellHeapCellType.get(), JSNativeStdFunction)
1251DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(proxyRevokeSpace, destructibleObjectHeapCellType.get(), ProxyRevoke)
1252DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakMapSpace, destructibleObjectHeapCellType.get(), JSWeakMap)
1253DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakSetSpace, destructibleObjectHeapCellType.get(), JSWeakSet)
1254DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakObjectRefSpace, cellHeapCellType.get(), JSWeakObjectRef)
1255#if JSC_OBJC_API_ENABLED
1256DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(objCCallbackFunctionSpace, destructibleObjectHeapCellType.get(), ObjCCallbackFunction)
1257#endif
1258#if ENABLE(WEBASSEMBLY)
1259DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyCodeBlockSpace, webAssemblyCodeBlockHeapCellType.get(), JSWebAssemblyCodeBlock)
1260DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyFunctionSpace, webAssemblyFunctionHeapCellType.get(), WebAssemblyFunction)
1261DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyWrapperFunctionSpace, cellHeapCellType.get(), WebAssemblyWrapperFunction)
1262#endif
1263
1264#undef DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW
1265
1266#define DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(name, heapCellType, type) \
1267 IsoSubspace* VM::name##Slow() \
1268 { \
1269 ASSERT(!m_##name); \
1270 auto space = std::make_unique<SpaceAndSet> ISO_SUBSPACE_INIT(heap, heapCellType, type); \
1271 WTF::storeStoreFence(); \
1272 m_##name = WTFMove(space); \
1273 return &m_##name->space; \
1274 }
1275
1276DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(evalExecutableSpace, destructibleCellHeapCellType.get(), EvalExecutable)
1277DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(moduleProgramExecutableSpace, destructibleCellHeapCellType.get(), ModuleProgramExecutable)
1278
1279#undef DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW
1280
1281Structure* VM::setIteratorStructureSlow()
1282{
1283 ASSERT(!m_setIteratorStructure);
1284 m_setIteratorStructure.set(*this, JSSetIterator::createStructure(*this, 0, jsNull()));
1285 return m_setIteratorStructure.get();
1286}
1287
1288Structure* VM::mapIteratorStructureSlow()
1289{
1290 ASSERT(!m_mapIteratorStructure);
1291 m_mapIteratorStructure.set(*this, JSMapIterator::createStructure(*this, 0, jsNull()));
1292 return m_mapIteratorStructure.get();
1293}
1294
1295JSCell* VM::sentinelSetBucketSlow()
1296{
1297 ASSERT(!m_sentinelSetBucket);
1298 auto* sentinel = JSSet::BucketType::createSentinel(*this);
1299 m_sentinelSetBucket.set(*this, sentinel);
1300 return sentinel;
1301}
1302
1303JSCell* VM::sentinelMapBucketSlow()
1304{
1305 ASSERT(!m_sentinelMapBucket);
1306 auto* sentinel = JSMap::BucketType::createSentinel(*this);
1307 m_sentinelMapBucket.set(*this, sentinel);
1308 return sentinel;
1309}
1310
1311JSGlobalObject* VM::vmEntryGlobalObject(const CallFrame* callFrame) const
1312{
1313 if (callFrame && callFrame->isGlobalExec()) {
1314 ASSERT(callFrame->callee().isCell() && callFrame->callee().asCell()->isObject());
1315 ASSERT(callFrame == callFrame->lexicalGlobalObject()->globalExec());
1316 return callFrame->lexicalGlobalObject();
1317 }
1318 ASSERT(entryScope);
1319 return entryScope->globalObject();
1320}
1321
1322void VM::setCrashOnVMCreation(bool shouldCrash)
1323{
1324 vmCreationShouldCrash = shouldCrash;
1325}
1326
1327} // namespace JSC
1328