1/*
2 * Copyright (C) 2014-2017 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15 * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23 * THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#pragma once
27
28#include "GCDeferralContext.h"
29#include "Heap.h"
30#include "HeapCellInlines.h"
31#include "IndexingHeader.h"
32#include "JSCast.h"
33#include "Structure.h"
34#include <type_traits>
35#include <wtf/Assertions.h>
36#include <wtf/MainThread.h>
37#include <wtf/RandomNumber.h>
38
39namespace JSC {
40
41ALWAYS_INLINE VM* Heap::vm() const
42{
43 return bitwise_cast<VM*>(bitwise_cast<uintptr_t>(this) - OBJECT_OFFSETOF(VM, heap));
44}
45
46ALWAYS_INLINE Heap* Heap::heap(const HeapCell* cell)
47{
48 if (!cell)
49 return nullptr;
50 return cell->heap();
51}
52
53inline Heap* Heap::heap(const JSValue v)
54{
55 if (!v.isCell())
56 return nullptr;
57 return heap(v.asCell());
58}
59
60inline bool Heap::hasHeapAccess() const
61{
62 return m_worldState.load() & hasAccessBit;
63}
64
65inline bool Heap::worldIsStopped() const
66{
67 return m_worldIsStopped;
68}
69
70ALWAYS_INLINE bool Heap::isMarked(const void* rawCell)
71{
72 HeapCell* cell = bitwise_cast<HeapCell*>(rawCell);
73 if (cell->isLargeAllocation())
74 return cell->largeAllocation().isMarked();
75 MarkedBlock& block = cell->markedBlock();
76 return block.isMarked(m_objectSpace.markingVersion(), cell);
77}
78
79ALWAYS_INLINE bool Heap::testAndSetMarked(HeapVersion markingVersion, const void* rawCell)
80{
81 HeapCell* cell = bitwise_cast<HeapCell*>(rawCell);
82 if (cell->isLargeAllocation())
83 return cell->largeAllocation().testAndSetMarked();
84 MarkedBlock& block = cell->markedBlock();
85 Dependency dependency = block.aboutToMark(markingVersion);
86 return block.testAndSetMarked(cell, dependency);
87}
88
89ALWAYS_INLINE size_t Heap::cellSize(const void* rawCell)
90{
91 return bitwise_cast<HeapCell*>(rawCell)->cellSize();
92}
93
94inline void Heap::writeBarrier(const JSCell* from, JSValue to)
95{
96#if ENABLE(WRITE_BARRIER_PROFILING)
97 WriteBarrierCounters::countWriteBarrier();
98#endif
99 if (!to.isCell())
100 return;
101 writeBarrier(from, to.asCell());
102}
103
104inline void Heap::writeBarrier(const JSCell* from, JSCell* to)
105{
106#if ENABLE(WRITE_BARRIER_PROFILING)
107 WriteBarrierCounters::countWriteBarrier();
108#endif
109 if (!from)
110 return;
111 if (!isWithinThreshold(from->cellState(), barrierThreshold()))
112 return;
113 if (LIKELY(!to))
114 return;
115 writeBarrierSlowPath(from);
116}
117
118inline void Heap::writeBarrier(const JSCell* from)
119{
120 ASSERT_GC_OBJECT_LOOKS_VALID(const_cast<JSCell*>(from));
121 if (!from)
122 return;
123 if (UNLIKELY(isWithinThreshold(from->cellState(), barrierThreshold())))
124 writeBarrierSlowPath(from);
125}
126
127inline void Heap::writeBarrierWithoutFence(const JSCell* from)
128{
129 ASSERT_GC_OBJECT_LOOKS_VALID(const_cast<JSCell*>(from));
130 if (!from)
131 return;
132 if (UNLIKELY(isWithinThreshold(from->cellState(), blackThreshold)))
133 addToRememberedSet(from);
134}
135
136inline void Heap::mutatorFence()
137{
138 if (isX86() || UNLIKELY(mutatorShouldBeFenced()))
139 WTF::storeStoreFence();
140}
141
142template<typename Functor> inline void Heap::forEachCodeBlock(const Functor& func)
143{
144 forEachCodeBlockImpl(scopedLambdaRef<void(CodeBlock*)>(func));
145}
146
147template<typename Functor> inline void Heap::forEachCodeBlockIgnoringJITPlans(const AbstractLocker& codeBlockSetLocker, const Functor& func)
148{
149 forEachCodeBlockIgnoringJITPlansImpl(codeBlockSetLocker, scopedLambdaRef<void(CodeBlock*)>(func));
150}
151
152template<typename Functor> inline void Heap::forEachProtectedCell(const Functor& functor)
153{
154 for (auto& pair : m_protectedValues)
155 functor(pair.key);
156 m_handleSet.forEachStrongHandle(functor, m_protectedValues);
157}
158
159#if USE(FOUNDATION)
160template <typename T>
161inline void Heap::releaseSoon(RetainPtr<T>&& object)
162{
163 m_delayedReleaseObjects.append(WTFMove(object));
164}
165#endif
166
167#ifdef JSC_GLIB_API_ENABLED
168inline void Heap::releaseSoon(std::unique_ptr<JSCGLibWrapperObject>&& object)
169{
170 m_delayedReleaseObjects.append(WTFMove(object));
171}
172#endif
173
174inline void Heap::incrementDeferralDepth()
175{
176 ASSERT(!Thread::mayBeGCThread() || m_worldIsStopped);
177 m_deferralDepth++;
178}
179
180inline void Heap::decrementDeferralDepth()
181{
182 ASSERT(!Thread::mayBeGCThread() || m_worldIsStopped);
183 m_deferralDepth--;
184}
185
186inline void Heap::decrementDeferralDepthAndGCIfNeeded()
187{
188 ASSERT(!Thread::mayBeGCThread() || m_worldIsStopped);
189 m_deferralDepth--;
190
191 if (UNLIKELY(m_didDeferGCWork)) {
192 decrementDeferralDepthAndGCIfNeededSlow();
193
194 // Here are the possible relationships between m_deferralDepth and m_didDeferGCWork.
195 // Note that prior to the call to decrementDeferralDepthAndGCIfNeededSlow,
196 // m_didDeferGCWork had to have been true. Now it can be either false or true. There is
197 // nothing we can reliably assert.
198 //
199 // Possible arrangements of m_didDeferGCWork and !!m_deferralDepth:
200 //
201 // Both false: We popped out of all DeferGCs and we did whatever work was deferred.
202 //
203 // Only m_didDeferGCWork is true: We stopped for GC and the GC did DeferGC. This is
204 // possible because of how we handle the baseline JIT's worklist. It's also perfectly
205 // safe because it only protects reportExtraMemory. We can just ignore this.
206 //
207 // Only !!m_deferralDepth is true: m_didDeferGCWork had been set spuriously. It is only
208 // cleared by decrementDeferralDepthAndGCIfNeededSlow(). So, if we had deferred work but
209 // then decrementDeferralDepth()'d, then we might have the bit set even if we GC'd since
210 // then.
211 //
212 // Both true: We're in a recursive ~DeferGC. We wanted to do something about the
213 // deferred work, but were unable to.
214 }
215}
216
217inline HashSet<MarkedArgumentBuffer*>& Heap::markListSet()
218{
219 if (!m_markListSet)
220 m_markListSet = std::make_unique<HashSet<MarkedArgumentBuffer*>>();
221 return *m_markListSet;
222}
223
224inline void Heap::reportExtraMemoryAllocated(size_t size)
225{
226 if (size > minExtraMemory)
227 reportExtraMemoryAllocatedSlowCase(size);
228}
229
230inline void Heap::deprecatedReportExtraMemory(size_t size)
231{
232 if (size > minExtraMemory)
233 deprecatedReportExtraMemorySlowCase(size);
234}
235
236inline void Heap::acquireAccess()
237{
238 if (validateDFGDoesGC)
239 RELEASE_ASSERT(expectDoesGC());
240
241 if (m_worldState.compareExchangeWeak(0, hasAccessBit))
242 return;
243 acquireAccessSlow();
244}
245
246inline bool Heap::hasAccess() const
247{
248 return m_worldState.loadRelaxed() & hasAccessBit;
249}
250
251inline void Heap::releaseAccess()
252{
253 if (m_worldState.compareExchangeWeak(hasAccessBit, 0))
254 return;
255 releaseAccessSlow();
256}
257
258inline bool Heap::mayNeedToStop()
259{
260 return m_worldState.loadRelaxed() != hasAccessBit;
261}
262
263inline void Heap::stopIfNecessary()
264{
265 if (validateDFGDoesGC)
266 RELEASE_ASSERT(expectDoesGC());
267
268 if (mayNeedToStop())
269 stopIfNecessarySlow();
270}
271
272template<typename Func>
273void Heap::forEachSlotVisitor(const Func& func)
274{
275 func(*m_collectorSlotVisitor);
276 func(*m_mutatorSlotVisitor);
277 for (auto& slotVisitor : m_parallelSlotVisitors)
278 func(*slotVisitor);
279}
280
281} // namespace JSC
282