1/*
2 * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. Neither the name of Apple Inc. ("Apple") nor the names of
14 * its contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 */
28
29#include "config.h"
30#include <stdarg.h>
31#include <wtf/MetaAllocator.h>
32#include <wtf/Vector.h>
33
34#if OS(WINDOWS)
35#undef small
36#endif
37
38namespace TestWebKitAPI {
39
40using namespace WTF;
41
42class MetaAllocatorTest: public testing::Test {
43public:
44 enum SanityCheckMode { RunSanityCheck, DontRunSanityCheck };
45
46 enum HeapGrowthMode { DontGrowHeap, ForTestDemandAllocCoalesce, ForTestDemandAllocDontCoalesce };
47
48 HeapGrowthMode currentHeapGrowthMode;
49 size_t allowAllocatePages;
50 size_t requestedNumPages;
51
52 class SimpleTestAllocator: public MetaAllocator {
53 public:
54 SimpleTestAllocator(MetaAllocatorTest* parent)
55 : MetaAllocator(32)
56 , m_parent(parent)
57 {
58 addFreshFreeSpace(reinterpret_cast<void*>(basePage * pageSize()), defaultPagesInHeap * pageSize());
59 }
60
61 virtual ~SimpleTestAllocator()
62 {
63 EXPECT_TRUE(!m_parent->allocatorDestroyed);
64 m_parent->allocatorDestroyed = true;
65 }
66
67 virtual FreeSpacePtr allocateNewSpace(size_t& numPages)
68 {
69 switch (m_parent->currentHeapGrowthMode) {
70 case DontGrowHeap:
71 return nullptr;
72
73 case ForTestDemandAllocCoalesce:
74 case ForTestDemandAllocDontCoalesce: {
75 EXPECT_TRUE(m_parent->allowAllocatePages);
76 EXPECT_TRUE(m_parent->allowAllocatePages >= numPages);
77 m_parent->requestedNumPages = numPages;
78 numPages = m_parent->allowAllocatePages;
79
80 unsigned offset;
81 if (m_parent->currentHeapGrowthMode == ForTestDemandAllocCoalesce)
82 offset = 0;
83 else
84 offset = 1;
85
86 void* result = reinterpret_cast<void*>((basePage + defaultPagesInHeap + offset) * pageSize());
87
88 m_parent->allowAllocatePages = 0;
89 m_parent->currentHeapGrowthMode = DontGrowHeap;
90
91 for (size_t counter = 0; counter < numPages + offset; ++counter) {
92 m_parent->pageMap->append(false);
93 for (unsigned byteCounter = 0; byteCounter < pageSize(); ++byteCounter)
94 m_parent->memoryMap->append(false);
95 }
96
97 m_parent->additionalPagesInHeap += numPages;
98
99 return FreeSpacePtr(result);
100 }
101
102 default:
103 CRASH();
104 return nullptr;
105 }
106 }
107
108 virtual void notifyNeedPage(void* page)
109 {
110 // the page should be both free and unmapped.
111 EXPECT_TRUE(!m_parent->pageState(reinterpret_cast<uintptr_t>(page) / pageSize()));
112 for (uintptr_t address = reinterpret_cast<uintptr_t>(page); address < reinterpret_cast<uintptr_t>(page) + pageSize(); ++address)
113 EXPECT_TRUE(!m_parent->byteState(reinterpret_cast<void*>(address)));
114 m_parent->pageState(reinterpret_cast<uintptr_t>(page) / pageSize()) = true;
115 }
116
117 virtual void notifyPageIsFree(void* page)
118 {
119 // the page should be free of objects at this point, but it should still
120 // be mapped.
121 EXPECT_TRUE(m_parent->pageState(reinterpret_cast<uintptr_t>(page) / pageSize()));
122 for (uintptr_t address = reinterpret_cast<uintptr_t>(page); address < reinterpret_cast<uintptr_t>(page) + pageSize(); ++address)
123 EXPECT_TRUE(!m_parent->byteState(reinterpret_cast<void*>(address)));
124 m_parent->pageState(reinterpret_cast<uintptr_t>(page) / pageSize()) = false;
125 }
126
127 private:
128 MetaAllocatorTest* m_parent;
129 };
130
131 static const unsigned basePage = 1;
132 static const unsigned defaultPagesInHeap = 100;
133
134 unsigned additionalPagesInHeap;
135
136 Vector<bool, 0>* memoryMap;
137 Vector<bool, 0>* pageMap;
138 bool allocatorDestroyed;
139
140 SimpleTestAllocator* allocator;
141
142 virtual void SetUp()
143 {
144 memoryMap = new Vector<bool, 0>();
145 pageMap = new Vector<bool, 0>();
146
147 for (unsigned page = basePage; page < basePage + defaultPagesInHeap; ++page) {
148 pageMap->append(false);
149 for (unsigned byteInPage = 0; byteInPage < pageSize(); ++byteInPage)
150 memoryMap->append(false);
151 }
152
153 allocatorDestroyed = false;
154
155 currentHeapGrowthMode = DontGrowHeap;
156 allowAllocatePages = 0;
157 additionalPagesInHeap = 0;
158 requestedNumPages = 0;
159
160 allocator = new SimpleTestAllocator(this);
161 }
162
163 virtual void TearDown()
164 {
165 EXPECT_TRUE(currentHeapGrowthMode == DontGrowHeap);
166 EXPECT_EQ(allowAllocatePages, static_cast<size_t>(0));
167 EXPECT_EQ(requestedNumPages, static_cast<size_t>(0));
168
169 // memory should be free.
170 for (unsigned page = basePage; page < basePage + defaultPagesInHeap; ++page) {
171 EXPECT_TRUE(!pageState(page));
172 for (unsigned byteInPage = 0; byteInPage < pageSize(); ++byteInPage)
173 EXPECT_TRUE(!byteState(page * pageSize() + byteInPage));
174 }
175
176 // NOTE: this automatically tests that the allocator did not leak
177 // memory, so long as these tests are running with !defined(NDEBUG).
178 // See MetaAllocator::m_mallocBalance.
179 delete allocator;
180
181 EXPECT_TRUE(allocatorDestroyed);
182
183 delete memoryMap;
184 delete pageMap;
185 }
186
187 MetaAllocatorHandle* allocate(size_t sizeInBytes, SanityCheckMode sanityCheckMode = RunSanityCheck)
188 {
189 MetaAllocatorHandle* handle = allocator->allocate(sizeInBytes, 0).leakRef();
190 EXPECT_TRUE(handle);
191 EXPECT_EQ(handle->sizeInBytes(), sizeInBytes);
192
193 uintptr_t startByte = handle->start().untaggedPtr<uintptr_t>();
194 uintptr_t endByte = handle->end().untaggedPtr<uintptr_t>();
195 for (uintptr_t currentByte = startByte; currentByte < endByte; ++currentByte) {
196 EXPECT_TRUE(!byteState(currentByte));
197 byteState(currentByte) = true;
198 EXPECT_TRUE(pageState(currentByte / pageSize()));
199 }
200
201 if (sanityCheckMode == RunSanityCheck)
202 sanityCheck();
203
204 return handle;
205 }
206
207 void free(MetaAllocatorHandle* handle, SanityCheckMode sanityCheckMode = RunSanityCheck)
208 {
209 EXPECT_TRUE(handle);
210
211 notifyFree(handle->start().untaggedPtr(), handle->sizeInBytes());
212 handle->deref();
213
214 if (sanityCheckMode == RunSanityCheck)
215 sanityCheck();
216 }
217
218 void notifyFree(void* start, size_t sizeInBytes)
219 {
220 uintptr_t startByte = reinterpret_cast<uintptr_t>(start);
221 uintptr_t endByte = startByte + sizeInBytes;
222 for (uintptr_t currentByte = startByte; currentByte < endByte; ++currentByte) {
223 EXPECT_TRUE(byteState(currentByte));
224 byteState(currentByte) = false;
225 }
226 }
227
228 void sanityCheck()
229 {
230#ifndef NDEBUG
231 EXPECT_EQ(allocator->bytesReserved() - allocator->bytesAllocated(), allocator->debugFreeSpaceSize());
232#endif
233 EXPECT_EQ(allocator->bytesReserved(), (defaultPagesInHeap + additionalPagesInHeap) * pageSize());
234 EXPECT_EQ(allocator->bytesAllocated(), bytesAllocated());
235 EXPECT_EQ(allocator->bytesCommitted(), bytesCommitted());
236 }
237
238 void confirm(MetaAllocatorHandle* handle)
239 {
240 uintptr_t startByte = handle->start().untaggedPtr<uintptr_t>();
241 confirm(startByte, startByte + handle->sizeInBytes(), true);
242 }
243
244 void confirmHighWatermark(MetaAllocatorHandle* handle)
245 {
246 confirm(handle->end().untaggedPtr<uintptr_t>(), (basePage + defaultPagesInHeap) * pageSize(), false);
247 }
248
249 void confirm(uintptr_t startByte, uintptr_t endByte, bool value)
250 {
251 for (uintptr_t currentByte = startByte; currentByte < endByte; ++currentByte) {
252 EXPECT_EQ(byteState(currentByte), value);
253 if (value)
254 EXPECT_TRUE(pageState(currentByte / pageSize()));
255 }
256 if (!value) {
257 uintptr_t firstFreePage = (startByte + pageSize() - 1) / pageSize();
258 uintptr_t lastFreePage = (endByte - pageSize()) / pageSize();
259 for (uintptr_t currentPage = firstFreePage; currentPage <= lastFreePage; ++currentPage)
260 EXPECT_TRUE(!pageState(currentPage));
261 }
262 }
263
264 size_t bytesAllocated()
265 {
266 size_t result = 0;
267 for (unsigned index = 0; index < memoryMap->size(); ++index) {
268 if (memoryMap->at(index))
269 result++;
270 }
271 return result;
272 }
273
274 size_t bytesCommitted()
275 {
276 size_t result = 0;
277 for (unsigned index = 0; index < pageMap->size(); ++index) {
278 if (pageMap->at(index))
279 result++;
280 }
281 return result * pageSize();
282 }
283
284 bool& byteState(void* address)
285 {
286 return byteState(reinterpret_cast<uintptr_t>(address));
287 }
288
289 bool& byteState(uintptr_t address)
290 {
291 uintptr_t byteIndex = address - basePage * pageSize();
292 return memoryMap->at(byteIndex);
293 }
294
295 bool& pageState(uintptr_t page)
296 {
297 uintptr_t pageIndex = page - basePage;
298 return pageMap->at(pageIndex);
299 }
300
301 // Test helpers
302
303 void testOneAlloc(size_t size)
304 {
305 // Tests the most basic behavior: allocate one thing and free it. Also
306 // verifies that the state of pages is correct.
307
308 MetaAllocatorHandle* handle = allocate(size);
309 EXPECT_EQ(handle->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
310 EXPECT_EQ(handle->sizeInBytes(), size);
311 EXPECT_TRUE(pageState(basePage));
312
313 confirm(handle);
314 confirmHighWatermark(handle);
315
316 free(handle);
317 }
318
319 void testRepeatAllocFree(size_t firstSize, ...)
320 {
321 // Tests right-coalescing by repeatedly allocating and freeing. The idea
322 // is that if you allocate something and then free it, then the heap should
323 // look identical to what it was before the allocation due to a right-coalesce
324 // of the freed chunk and the already-free memory, and so subsequent
325 // allocations should behave the same as the first one.
326
327 MetaAllocatorHandle* handle = allocate(firstSize);
328 EXPECT_EQ(handle->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
329 EXPECT_EQ(handle->sizeInBytes(), firstSize);
330
331 confirm(handle);
332 confirmHighWatermark(handle);
333
334 free(handle);
335
336 va_list argList;
337 va_start(argList, firstSize);
338 while (size_t sizeInBytes = va_arg(argList, int)) {
339 handle = allocate(sizeInBytes);
340 EXPECT_EQ(handle->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
341 EXPECT_EQ(handle->sizeInBytes(), sizeInBytes);
342
343 confirm(handle);
344 confirmHighWatermark(handle);
345
346 free(handle);
347 }
348 va_end(argList);
349 }
350
351 void testSimpleFullCoalesce(size_t firstSize, size_t secondSize, size_t thirdSize)
352 {
353 // Allocates something of size firstSize, then something of size secondSize, and then
354 // frees the first allocation, and then the second, and then attempts to allocate the
355 // third, asserting that it allocated at the base address of the heap.
356
357 // Note that this test may cause right-allocation, which will cause the test to fail.
358 // Thus the correct way of running this test is to ensure that secondSize is
359 // picked in such a way that it never straddles a page.
360
361 MetaAllocatorHandle* firstHandle = allocate(firstSize);
362 EXPECT_EQ(firstHandle->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
363 EXPECT_EQ(firstHandle->sizeInBytes(), firstSize);
364
365 confirm(firstHandle);
366 confirmHighWatermark(firstHandle);
367
368 MetaAllocatorHandle* secondHandle = allocate(secondSize);
369 EXPECT_EQ(secondHandle->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize() + firstSize));
370 EXPECT_EQ(secondHandle->sizeInBytes(), secondSize);
371
372 confirm(firstHandle);
373 confirm(secondHandle);
374 confirmHighWatermark(secondHandle);
375
376 free(firstHandle);
377
378 confirm(secondHandle);
379 confirmHighWatermark(secondHandle);
380
381 free(secondHandle);
382
383 confirm(basePage * pageSize(), (basePage + defaultPagesInHeap) * pageSize(), false);
384
385 MetaAllocatorHandle* thirdHandle = allocate(thirdSize);
386 EXPECT_EQ(thirdHandle->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
387 EXPECT_EQ(thirdHandle->sizeInBytes(), thirdSize);
388
389 confirm(thirdHandle);
390 confirmHighWatermark(thirdHandle);
391
392 free(thirdHandle);
393 }
394
395 enum class TestFIFOAllocMode { FillAtEnd, EagerFill };
396 void testFIFOAlloc(TestFIFOAllocMode mode, ...)
397 {
398 // This will test the simple case of no-coalesce (freeing the left-most
399 // chunk in memory when the chunk to the right of it is allocated) and
400 // fully exercise left-coalescing and full-coalescing. In EagerFill
401 // mode, this also tests perfect-fit allocation and no-coalescing free.
402
403 size_t totalSize = 0;
404
405 Vector<MetaAllocatorHandle*, 0> handles;
406
407 va_list argList;
408 va_start(argList, mode);
409 while (size_t sizeInBytes = va_arg(argList, int)) {
410 MetaAllocatorHandle* handle = allocate(sizeInBytes);
411 EXPECT_EQ(handle->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize() + totalSize));
412 EXPECT_EQ(handle->sizeInBytes(), sizeInBytes);
413
414 confirm(handle);
415 confirmHighWatermark(handle);
416
417 handles.append(handle);
418 totalSize += sizeInBytes;
419 }
420 va_end(argList);
421
422 for (unsigned index = 0; index < handles.size(); ++index)
423 confirm(handles.at(index));
424
425 size_t sizeSoFar = 0;
426 for (unsigned index = 0; index < handles.size(); ++index) {
427 sizeSoFar += handles.at(index)->sizeInBytes();
428 free(handles.at(index));
429 if (mode == TestFIFOAllocMode::EagerFill) {
430 MetaAllocatorHandle* handle = allocate(sizeSoFar);
431 EXPECT_EQ(handle->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
432 EXPECT_EQ(handle->sizeInBytes(), sizeSoFar);
433
434 confirm(basePage * pageSize(), basePage * pageSize() + totalSize, true);
435 if (index < handles.size() - 1)
436 confirmHighWatermark(handles.last());
437 else
438 confirmHighWatermark(handle);
439
440 free(handle);
441
442 confirm(basePage * pageSize(), basePage * pageSize() + sizeSoFar, false);
443 }
444 }
445
446 ASSERT(sizeSoFar == totalSize);
447
448 confirm(basePage * pageSize(), (basePage + defaultPagesInHeap) * pageSize(), false);
449
450 if (mode == TestFIFOAllocMode::FillAtEnd) {
451 MetaAllocatorHandle* finalHandle = allocate(totalSize);
452 EXPECT_EQ(finalHandle->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
453 EXPECT_EQ(finalHandle->sizeInBytes(), totalSize);
454
455 confirm(finalHandle);
456 confirmHighWatermark(finalHandle);
457
458 free(finalHandle);
459 }
460 }
461
462 void testFillHeap(size_t sizeInBytes, size_t numAllocations)
463 {
464 Vector<MetaAllocatorHandle*, 0> handles;
465
466 for (size_t index = 0; index < numAllocations; ++index)
467 handles.append(allocate(sizeInBytes, DontRunSanityCheck));
468
469 sanityCheck();
470
471 EXPECT_TRUE(!allocator->allocate(sizeInBytes, 0));
472
473 for (size_t index = 0; index < numAllocations; ++index)
474 free(handles.at(index), DontRunSanityCheck);
475
476 sanityCheck();
477 }
478
479 void testRightAllocation(size_t firstLeftSize, size_t firstRightSize, size_t secondLeftSize, size_t secondRightSize)
480 {
481 MetaAllocatorHandle* firstLeft = allocate(firstLeftSize);
482 EXPECT_EQ(firstLeft->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
483
484 MetaAllocatorHandle* firstRight = allocate(firstRightSize);
485 EXPECT_EQ(firstRight->end().untaggedPtr(), reinterpret_cast<void*>((basePage + defaultPagesInHeap) * pageSize()));
486
487 MetaAllocatorHandle* secondLeft = allocate(secondLeftSize);
488 EXPECT_EQ(secondLeft->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize() + firstLeft->sizeInBytes()));
489
490 MetaAllocatorHandle* secondRight = allocate(secondRightSize);
491 EXPECT_EQ(secondRight->end().untaggedPtr(), reinterpret_cast<void*>((basePage + defaultPagesInHeap) * pageSize() - firstRight->sizeInBytes()));
492
493 free(firstLeft);
494 free(firstRight);
495 free(secondLeft);
496 free(secondRight);
497
498 MetaAllocatorHandle* final = allocate(defaultPagesInHeap * pageSize());
499 EXPECT_EQ(final->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
500
501 free(final);
502 }
503
504 void testBestFit(size_t firstSize, size_t step, unsigned numSlots, SanityCheckMode sanityCheckMode)
505 {
506 Vector<MetaAllocatorHandle*, 0> handlesToFree;
507 Vector<MetaAllocatorHandle*, 0> handles;
508 Vector<void*, 0> locations;
509
510 size_t size = firstSize;
511 for (unsigned index = 0; index < numSlots; ++index) {
512 MetaAllocatorHandle* toFree = allocate(size, sanityCheckMode);
513 if (!handles.isEmpty()) {
514 while (toFree->start().untaggedPtr() != handles.last()->end().untaggedPtr()) {
515 handlesToFree.append(toFree);
516 toFree = allocate(size, sanityCheckMode);
517 }
518 }
519
520 MetaAllocatorHandle* fragger = allocate(32, sanityCheckMode);
521 EXPECT_EQ(fragger->start().untaggedPtr(), toFree->end().untaggedPtr());
522
523 locations.append(toFree->start().untaggedPtr());
524
525 handlesToFree.append(toFree);
526 handles.append(fragger);
527
528 size += step;
529 }
530
531 ASSERT(locations.size() == numSlots);
532
533 for (unsigned index = 0; index < handlesToFree.size(); ++index)
534 free(handlesToFree.at(index), sanityCheckMode);
535
536 size = firstSize;
537 for (unsigned index = 0; index < numSlots; ++index) {
538 MetaAllocatorHandle* bestFit = allocate(size - 32, sanityCheckMode);
539
540 EXPECT_TRUE(bestFit->start().untaggedPtr() == locations.at(index)
541 || bestFit->end().untaggedPtr() == reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(locations.at(index)) + size));
542
543 MetaAllocatorHandle* small = allocate(32, sanityCheckMode);
544 if (bestFit->start().untaggedPtr() == locations.at(index))
545 EXPECT_EQ(small->start().untaggedPtr(), bestFit->end().untaggedPtr());
546 else
547 EXPECT_EQ(small->end().untaggedPtr(), bestFit->start().untaggedPtr());
548
549 free(bestFit, sanityCheckMode);
550 free(small, sanityCheckMode);
551
552 size += step;
553 }
554
555 for (unsigned index = 0; index < numSlots; ++index)
556 free(handles.at(index), sanityCheckMode);
557
558 MetaAllocatorHandle* final = allocate(defaultPagesInHeap * pageSize(), sanityCheckMode);
559 EXPECT_EQ(final->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
560
561 free(final, sanityCheckMode);
562 }
563
564 void testShrink(size_t firstSize, size_t secondSize)
565 {
566 // Allocate the thing that will be shrunk
567 MetaAllocatorHandle* handle = allocate(firstSize);
568
569 // Shrink it, and make sure that our state reflects the shrinkage.
570 notifyFree(reinterpret_cast<void*>(handle->start().untaggedPtr<uintptr_t>() + secondSize), firstSize - secondSize);
571
572 handle->shrink(secondSize);
573 EXPECT_EQ(handle->sizeInBytes(), secondSize);
574
575 sanityCheck();
576
577 // Assert that the heap is not empty.
578 EXPECT_TRUE(!allocator->allocate(defaultPagesInHeap * pageSize(), 0));
579
580 // Allocate the remainder of the heap.
581 MetaAllocatorHandle* remainder = allocate(defaultPagesInHeap * pageSize() - secondSize);
582 EXPECT_EQ(remainder->start().untaggedPtr(), handle->end().untaggedPtr());
583
584 free(remainder);
585 free(handle);
586
587 // Assert that the heap is empty and finish up.
588 MetaAllocatorHandle* final = allocate(defaultPagesInHeap * pageSize());
589 EXPECT_EQ(final->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
590
591 free(final);
592 }
593
594 void testDemandAllocCoalesce(size_t firstSize, size_t numPages, size_t secondSize)
595 {
596 EXPECT_TRUE(!allocator->allocate((defaultPagesInHeap + numPages) * pageSize(), 0));
597
598 MetaAllocatorHandle* firstHandle = allocate(firstSize);
599
600 EXPECT_TRUE(!allocator->allocate(secondSize, 0));
601 EXPECT_TRUE(!allocator->allocate((defaultPagesInHeap + numPages) * pageSize(), 0));
602
603 currentHeapGrowthMode = ForTestDemandAllocCoalesce;
604 allowAllocatePages = numPages;
605
606 MetaAllocatorHandle* secondHandle = allocate(secondSize);
607
608 EXPECT_TRUE(currentHeapGrowthMode == DontGrowHeap);
609 EXPECT_EQ(allowAllocatePages, static_cast<size_t>(0));
610 EXPECT_EQ(requestedNumPages, (secondSize + pageSize() - 1) / pageSize());
611 EXPECT_EQ(secondHandle->start().untaggedPtr(), reinterpret_cast<void*>((basePage + defaultPagesInHeap) * pageSize()));
612
613 requestedNumPages = 0;
614
615 free(firstHandle);
616 free(secondHandle);
617
618 free(allocate((defaultPagesInHeap + numPages) * pageSize()));
619 }
620
621 void testDemandAllocDontCoalesce(size_t firstSize, size_t numPages, size_t secondSize)
622 {
623 free(allocate(firstSize));
624 free(allocate(defaultPagesInHeap * pageSize()));
625 EXPECT_TRUE(!allocator->allocate((defaultPagesInHeap + numPages) * pageSize(), 0));
626
627 MetaAllocatorHandle* firstHandle = allocate(firstSize);
628
629 EXPECT_TRUE(!allocator->allocate(secondSize, 0));
630 EXPECT_TRUE(!allocator->allocate((defaultPagesInHeap + numPages) * pageSize(), 0));
631
632 currentHeapGrowthMode = ForTestDemandAllocDontCoalesce;
633 allowAllocatePages = numPages;
634
635 MetaAllocatorHandle* secondHandle = allocate(secondSize);
636
637 EXPECT_TRUE(currentHeapGrowthMode == DontGrowHeap);
638 EXPECT_EQ(allowAllocatePages, static_cast<size_t>(0));
639 EXPECT_EQ(requestedNumPages, (secondSize + pageSize() - 1) / pageSize());
640 EXPECT_EQ(secondHandle->start().untaggedPtr(), reinterpret_cast<void*>((basePage + defaultPagesInHeap + 1) * pageSize()));
641
642 requestedNumPages = 0;
643
644 EXPECT_TRUE(!allocator->allocate((defaultPagesInHeap + numPages) * pageSize(), 0));
645
646 free(firstHandle);
647 free(secondHandle);
648
649 EXPECT_TRUE(!allocator->allocate((defaultPagesInHeap + numPages) * pageSize(), 0));
650
651 firstHandle = allocate(firstSize);
652 secondHandle = allocate(secondSize);
653 EXPECT_EQ(firstHandle->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
654 EXPECT_EQ(secondHandle->start().untaggedPtr(), reinterpret_cast<void*>((basePage + defaultPagesInHeap + 1) * pageSize()));
655 free(firstHandle);
656 free(secondHandle);
657 }
658};
659
660TEST_F(MetaAllocatorTest, Empty)
661{
662 // Tests that creating and destroying an allocator works.
663}
664
665TEST_F(MetaAllocatorTest, AllocZero)
666{
667 // Tests that allocating a zero-length block returns 0 and
668 // does not change anything in memory.
669
670 ASSERT(!allocator->allocate(0, 0));
671
672 MetaAllocatorHandle* final = allocate(defaultPagesInHeap * pageSize());
673 EXPECT_EQ(final->start().untaggedPtr(), reinterpret_cast<void*>(basePage * pageSize()));
674 free(final);
675}
676
677TEST_F(MetaAllocatorTest, OneAlloc32)
678{
679 testOneAlloc(32);
680}
681
682TEST_F(MetaAllocatorTest, OneAlloc64)
683{
684 testOneAlloc(64);
685}
686
687TEST_F(MetaAllocatorTest, OneAllocTwoPages)
688{
689 testOneAlloc(pageSize() * 2);
690}
691
692TEST_F(MetaAllocatorTest, RepeatAllocFree32Twice)
693{
694 testRepeatAllocFree(32, 32, 0);
695}
696
697TEST_F(MetaAllocatorTest, RepeatAllocFree32Then64)
698{
699 testRepeatAllocFree(32, 64, 0);
700}
701
702TEST_F(MetaAllocatorTest, RepeatAllocFree64Then32)
703{
704 testRepeatAllocFree(64, 32, 0);
705}
706
707TEST_F(MetaAllocatorTest, RepeatAllocFree32TwiceThen64)
708{
709 testRepeatAllocFree(32, 32, 64, 0);
710}
711
712TEST_F(MetaAllocatorTest, RepeatAllocFree32Then64Twice)
713{
714 testRepeatAllocFree(32, 64, 64, 0);
715}
716
717TEST_F(MetaAllocatorTest, RepeatAllocFree64Then32Then64)
718{
719 testRepeatAllocFree(64, 32, 64, 0);
720}
721
722TEST_F(MetaAllocatorTest, RepeatAllocFree32Thrice)
723{
724 testRepeatAllocFree(32, 32, 32, 0);
725}
726
727TEST_F(MetaAllocatorTest, RepeatAllocFree32Then64Then32)
728{
729 testRepeatAllocFree(32, 32, 32, 0);
730}
731
732TEST_F(MetaAllocatorTest, RepeatAllocFree64Then32Twice)
733{
734 testRepeatAllocFree(64, 32, 32, 0);
735}
736
737TEST_F(MetaAllocatorTest, RepeatAllocFreeTwoPagesThen32)
738{
739 testRepeatAllocFree(static_cast<int>(pageSize() * 2), 32, 0);
740}
741
742TEST_F(MetaAllocatorTest, RepeatAllocFree32ThenTwoPages)
743{
744 testRepeatAllocFree(32, static_cast<int>(pageSize() * 2), 0);
745}
746
747TEST_F(MetaAllocatorTest, RepeatAllocFreePageThenTwoPages)
748{
749 testRepeatAllocFree(static_cast<int>(pageSize()), static_cast<int>(pageSize() * 2), 0);
750}
751
752TEST_F(MetaAllocatorTest, RepeatAllocFreeTwoPagesThenPage)
753{
754 testRepeatAllocFree(static_cast<int>(pageSize() * 2), static_cast<int>(pageSize()), 0);
755}
756
757TEST_F(MetaAllocatorTest, SimpleFullCoalesce32Plus32Then128)
758{
759 testSimpleFullCoalesce(32, 32, 128);
760}
761
762TEST_F(MetaAllocatorTest, SimpleFullCoalesce32Plus64Then128)
763{
764 testSimpleFullCoalesce(32, 64, 128);
765}
766
767TEST_F(MetaAllocatorTest, SimpleFullCoalesce64Plus32Then128)
768{
769 testSimpleFullCoalesce(64, 32, 128);
770}
771
772TEST_F(MetaAllocatorTest, SimpleFullCoalesce32PlusPageLess32ThenPage)
773{
774 testSimpleFullCoalesce(32, pageSize() - 32, pageSize());
775}
776
777TEST_F(MetaAllocatorTest, SimpleFullCoalesce32PlusPageLess32ThenTwoPages)
778{
779 testSimpleFullCoalesce(32, pageSize() - 32, pageSize() * 2);
780}
781
782TEST_F(MetaAllocatorTest, SimpleFullCoalescePagePlus32ThenTwoPages)
783{
784 testSimpleFullCoalesce(pageSize(), 32, pageSize() * 2);
785}
786
787TEST_F(MetaAllocatorTest, SimpleFullCoalescePagePlusPageThenTwoPages)
788{
789 testSimpleFullCoalesce(pageSize(), pageSize(), pageSize() * 2);
790}
791
792TEST_F(MetaAllocatorTest, FIFOAllocFillAtEnd32Twice)
793{
794 testFIFOAlloc(TestFIFOAllocMode::FillAtEnd, 32, 32, 0);
795}
796
797TEST_F(MetaAllocatorTest, FIFOAllocFillAtEnd32Thrice)
798{
799 testFIFOAlloc(TestFIFOAllocMode::FillAtEnd, 32, 32, 32, 0);
800}
801
802TEST_F(MetaAllocatorTest, FIFOAllocFillAtEnd32FourTimes)
803{
804 testFIFOAlloc(TestFIFOAllocMode::FillAtEnd, 32, 32, 32, 32, 0);
805}
806
807TEST_F(MetaAllocatorTest, FIFOAllocFillAtEndPageLess32Then32ThenPageLess64Then64)
808{
809 testFIFOAlloc(TestFIFOAllocMode::FillAtEnd, static_cast<int>(pageSize() - 32), 32, static_cast<int>(pageSize() - 64), 64, 0);
810}
811
812TEST_F(MetaAllocatorTest, FIFOAllocEagerFill32Twice)
813{
814 testFIFOAlloc(TestFIFOAllocMode::EagerFill, 32, 32, 0);
815}
816
817TEST_F(MetaAllocatorTest, FIFOAllocEagerFill32Thrice)
818{
819 testFIFOAlloc(TestFIFOAllocMode::EagerFill, 32, 32, 32, 0);
820}
821
822TEST_F(MetaAllocatorTest, FIFOAllocEagerFill32FourTimes)
823{
824 testFIFOAlloc(TestFIFOAllocMode::EagerFill, 32, 32, 32, 32, 0);
825}
826
827TEST_F(MetaAllocatorTest, FIFOAllocEagerFillPageLess32Then32ThenPageLess64Then64)
828{
829 testFIFOAlloc(TestFIFOAllocMode::EagerFill, static_cast<int>(pageSize() - 32), 32, static_cast<int>(pageSize() - 64), 64, 0);
830}
831
832TEST_F(MetaAllocatorTest, FillHeap32)
833{
834 testFillHeap(32, defaultPagesInHeap * pageSize() / 32);
835}
836
837TEST_F(MetaAllocatorTest, FillHeapPage)
838{
839 testFillHeap(pageSize(), defaultPagesInHeap);
840}
841
842TEST_F(MetaAllocatorTest, FillHeapTwoPages)
843{
844 testFillHeap(pageSize() * 2, defaultPagesInHeap / 2);
845}
846
847TEST_F(MetaAllocatorTest, RightAllocation32ThenPageThen32ThenPage)
848{
849 testRightAllocation(32, pageSize(), 32, pageSize());
850}
851
852TEST_F(MetaAllocatorTest, RightAllocationQuarterPageThenPageThenQuarterPageThenPage)
853{
854 testRightAllocation(pageSize() / 4, pageSize(), pageSize() / 4, pageSize());
855}
856
857TEST_F(MetaAllocatorTest, BestFit64Plus64Thrice)
858{
859 testBestFit(64, 64, 3, RunSanityCheck);
860}
861
862TEST_F(MetaAllocatorTest, BestFit64Plus64TenTimes)
863{
864 testBestFit(64, 64, 10, DontRunSanityCheck);
865}
866
867TEST_F(MetaAllocatorTest, BestFit64Plus64HundredTimes)
868{
869 testBestFit(64, 64, 100, DontRunSanityCheck);
870}
871
872TEST_F(MetaAllocatorTest, BestFit96Plus64Thrice)
873{
874 testBestFit(96, 64, 3, RunSanityCheck);
875}
876
877TEST_F(MetaAllocatorTest, BestFit96Plus64TenTimes)
878{
879 testBestFit(96, 64, 10, DontRunSanityCheck);
880}
881
882TEST_F(MetaAllocatorTest, BestFit96Plus64HundredTimes)
883{
884 testBestFit(96, 64, 100, DontRunSanityCheck);
885}
886
887TEST_F(MetaAllocatorTest, BestFit96Plus96Thrice)
888{
889 testBestFit(96, 96, 3, RunSanityCheck);
890}
891
892TEST_F(MetaAllocatorTest, BestFit96Plus96TenTimes)
893{
894 testBestFit(96, 96, 10, DontRunSanityCheck);
895}
896
897TEST_F(MetaAllocatorTest, BestFit96Plus96EightyTimes)
898{
899 testBestFit(96, 96, 80, DontRunSanityCheck);
900}
901
902TEST_F(MetaAllocatorTest, Shrink64To32)
903{
904 testShrink(64, 32);
905}
906
907TEST_F(MetaAllocatorTest, ShrinkPageTo32)
908{
909 testShrink(pageSize(), 32);
910}
911
912TEST_F(MetaAllocatorTest, ShrinkPageToPageLess32)
913{
914 testShrink(pageSize(), pageSize() - 32);
915}
916
917TEST_F(MetaAllocatorTest, ShrinkTwoPagesTo32)
918{
919 testShrink(pageSize() * 2, 32);
920}
921
922TEST_F(MetaAllocatorTest, ShrinkTwoPagesToPagePlus32)
923{
924 testShrink(pageSize() * 2, pageSize() + 32);
925}
926
927TEST_F(MetaAllocatorTest, ShrinkTwoPagesToPage)
928{
929 testShrink(pageSize() * 2, pageSize());
930}
931
932TEST_F(MetaAllocatorTest, ShrinkTwoPagesToPageLess32)
933{
934 testShrink(pageSize() * 2, pageSize() - 32);
935}
936
937TEST_F(MetaAllocatorTest, ShrinkTwoPagesToTwoPagesLess32)
938{
939 testShrink(pageSize() * 2, pageSize() * 2 - 32);
940}
941
942TEST_F(MetaAllocatorTest, DemandAllocCoalescePageThenDoubleHeap)
943{
944 testDemandAllocCoalesce(pageSize(), defaultPagesInHeap, defaultPagesInHeap * pageSize());
945}
946
947TEST_F(MetaAllocatorTest, DemandAllocCoalescePageThenTripleHeap)
948{
949 testDemandAllocCoalesce(pageSize(), defaultPagesInHeap * 2, defaultPagesInHeap * pageSize());
950}
951
952TEST_F(MetaAllocatorTest, DemandAllocDontCoalescePageThenDoubleHeap)
953{
954 testDemandAllocDontCoalesce(pageSize(), defaultPagesInHeap, defaultPagesInHeap * pageSize());
955}
956
957} // namespace TestWebKitAPI
958
959#if USE(POINTER_PROFILING)
960
961namespace WTF {
962
963const char* tagForPtr(const void*) { return "<unknown>"; }
964const char* ptrTagName(PtrTag) { return "<unknown>"; }
965
966} // namespace WTF
967
968#endif // USE(POINTER_PROFILING)
969