1/*
2 * Copyright (C) 2018-2019 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "LocalAllocator.h"
28
29#include "AllocatingScope.h"
30#include "FreeListInlines.h"
31#include "GCDeferralContext.h"
32#include "JSCInlines.h"
33#include "LocalAllocatorInlines.h"
34#include "Options.h"
35#include "SuperSampler.h"
36
37namespace JSC {
38
39LocalAllocator::LocalAllocator(BlockDirectory* directory)
40 : m_directory(directory)
41 , m_freeList(directory->m_cellSize)
42{
43 auto locker = holdLock(directory->m_localAllocatorsLock);
44 directory->m_localAllocators.append(this);
45}
46
47void LocalAllocator::reset()
48{
49 m_freeList.clear();
50 m_currentBlock = nullptr;
51 m_lastActiveBlock = nullptr;
52 m_allocationCursor = 0;
53}
54
55LocalAllocator::~LocalAllocator()
56{
57 if (isOnList()) {
58 auto locker = holdLock(m_directory->m_localAllocatorsLock);
59 remove();
60 }
61
62 bool ok = true;
63 if (!m_freeList.allocationWillFail()) {
64 dataLog("FATAL: ", RawPointer(this), "->~LocalAllocator has non-empty free-list.\n");
65 ok = false;
66 }
67 if (m_currentBlock) {
68 dataLog("FATAL: ", RawPointer(this), "->~LocalAllocator has non-null current block.\n");
69 ok = false;
70 }
71 if (m_lastActiveBlock) {
72 dataLog("FATAL: ", RawPointer(this), "->~LocalAllocator has non-null last active block.\n");
73 ok = false;
74 }
75 RELEASE_ASSERT(ok);
76}
77
78void LocalAllocator::stopAllocating()
79{
80 ASSERT(!m_lastActiveBlock);
81 if (!m_currentBlock) {
82 ASSERT(m_freeList.allocationWillFail());
83 return;
84 }
85
86 m_currentBlock->stopAllocating(m_freeList);
87 m_lastActiveBlock = m_currentBlock;
88 m_currentBlock = nullptr;
89 m_freeList.clear();
90}
91
92void LocalAllocator::resumeAllocating()
93{
94 if (!m_lastActiveBlock)
95 return;
96
97 m_lastActiveBlock->resumeAllocating(m_freeList);
98 m_currentBlock = m_lastActiveBlock;
99 m_lastActiveBlock = nullptr;
100}
101
102void LocalAllocator::prepareForAllocation()
103{
104 reset();
105}
106
107void LocalAllocator::stopAllocatingForGood()
108{
109 stopAllocating();
110 reset();
111}
112
113void* LocalAllocator::allocateSlowCase(GCDeferralContext* deferralContext, AllocationFailureMode failureMode)
114{
115 SuperSamplerScope superSamplerScope(false);
116 Heap& heap = *m_directory->m_heap;
117 ASSERT(heap.vm()->currentThreadIsHoldingAPILock());
118 doTestCollectionsIfNeeded(deferralContext);
119
120 ASSERT(!m_directory->markedSpace().isIterating());
121 heap.didAllocate(m_freeList.originalSize());
122
123 didConsumeFreeList();
124
125 AllocatingScope helpingHeap(heap);
126
127 heap.collectIfNecessaryOrDefer(deferralContext);
128
129 // Goofy corner case: the GC called a callback and now this directory has a currentBlock. This only
130 // happens when running WebKit tests, which inject a callback into the GC's finalization.
131 if (UNLIKELY(m_currentBlock))
132 return allocate(deferralContext, failureMode);
133
134 void* result = tryAllocateWithoutCollecting();
135
136 if (LIKELY(result != 0))
137 return result;
138
139 MarkedBlock::Handle* block = m_directory->tryAllocateBlock();
140 if (!block) {
141 if (failureMode == AllocationFailureMode::Assert)
142 RELEASE_ASSERT_NOT_REACHED();
143 else
144 return nullptr;
145 }
146 m_directory->addBlock(block);
147 result = allocateIn(block);
148 ASSERT(result);
149 return result;
150}
151
152void LocalAllocator::didConsumeFreeList()
153{
154 if (m_currentBlock)
155 m_currentBlock->didConsumeFreeList();
156
157 m_freeList.clear();
158 m_currentBlock = nullptr;
159}
160
161void* LocalAllocator::tryAllocateWithoutCollecting()
162{
163 // FIXME: If we wanted this to be used for real multi-threaded allocations then we would have to
164 // come up with some concurrency protocol here. That protocol would need to be able to handle:
165 //
166 // - The basic case of multiple LocalAllocators trying to do an allocationCursor search on the
167 // same bitvector. That probably needs the bitvector lock at least.
168 //
169 // - The harder case of some LocalAllocator triggering a steal from a different BlockDirectory
170 // via a search in the AlignedMemoryAllocator's list. Who knows what locks that needs.
171 //
172 // One way to make this work is to have a single per-Heap lock that protects all mutator lock
173 // allocation slow paths. That would probably be scalable enough for years. It would certainly be
174 // for using TLC allocation from JIT threads.
175 // https://bugs.webkit.org/show_bug.cgi?id=181635
176
177 SuperSamplerScope superSamplerScope(false);
178
179 ASSERT(!m_currentBlock);
180 ASSERT(m_freeList.allocationWillFail());
181
182 for (;;) {
183 MarkedBlock::Handle* block = m_directory->findBlockForAllocation(*this);
184 if (!block)
185 break;
186
187 if (void* result = tryAllocateIn(block))
188 return result;
189 }
190
191 if (Options::stealEmptyBlocksFromOtherAllocators()) {
192 if (MarkedBlock::Handle* block = m_directory->m_subspace->findEmptyBlockToSteal()) {
193 RELEASE_ASSERT(block->alignedMemoryAllocator() == m_directory->m_subspace->alignedMemoryAllocator());
194
195 block->sweep(nullptr);
196
197 // It's good that this clears canAllocateButNotEmpty as well as all other bits,
198 // because there is a remote chance that a block may have both canAllocateButNotEmpty
199 // and empty set at the same time.
200 block->removeFromDirectory();
201 m_directory->addBlock(block);
202 return allocateIn(block);
203 }
204 }
205
206 return nullptr;
207}
208
209void* LocalAllocator::allocateIn(MarkedBlock::Handle* block)
210{
211 void* result = tryAllocateIn(block);
212 RELEASE_ASSERT(result);
213 return result;
214}
215
216void* LocalAllocator::tryAllocateIn(MarkedBlock::Handle* block)
217{
218 ASSERT(block);
219 ASSERT(!block->isFreeListed());
220
221 block->sweep(&m_freeList);
222
223 // It's possible to stumble on a completely full block. Marking tries to retire these, but
224 // that algorithm is racy and may forget to do it sometimes.
225 if (m_freeList.allocationWillFail()) {
226 ASSERT(block->isFreeListed());
227 block->unsweepWithNoNewlyAllocated();
228 ASSERT(!block->isFreeListed());
229 ASSERT(!m_directory->isEmpty(NoLockingNecessary, block));
230 ASSERT(!m_directory->isCanAllocateButNotEmpty(NoLockingNecessary, block));
231 return nullptr;
232 }
233
234 m_currentBlock = block;
235
236 void* result = m_freeList.allocate(
237 [] () -> HeapCell* {
238 RELEASE_ASSERT_NOT_REACHED();
239 return nullptr;
240 });
241 m_directory->setIsEden(NoLockingNecessary, m_currentBlock, true);
242 m_directory->markedSpace().didAllocateInBlock(m_currentBlock);
243 return result;
244}
245
246void LocalAllocator::doTestCollectionsIfNeeded(GCDeferralContext* deferralContext)
247{
248 if (!Options::slowPathAllocsBetweenGCs())
249 return;
250
251 static unsigned allocationCount = 0;
252 if (!allocationCount) {
253 if (!m_directory->m_heap->isDeferred()) {
254 if (deferralContext)
255 deferralContext->m_shouldGC = true;
256 else
257 m_directory->m_heap->collectNow(Sync, CollectionScope::Full);
258 }
259 }
260 if (++allocationCount >= Options::slowPathAllocsBetweenGCs())
261 allocationCount = 0;
262}
263
264bool LocalAllocator::isFreeListedCell(const void* target) const
265{
266 // This abomination exists to detect when an object is in the dead-but-not-destructed state.
267 // Therefore, it's not even clear that this needs to do anything beyond returning "false", since
268 // if we know that the block owning the object is free-listed, then it's impossible for any
269 // objects to be in the dead-but-not-destructed state.
270 // FIXME: Get rid of this abomination. https://bugs.webkit.org/show_bug.cgi?id=181655
271 return m_freeList.contains(bitwise_cast<HeapCell*>(target));
272}
273
274} // namespace JSC
275
276