1 | /* |
2 | * Copyright (C) 2014-2017 Apple Inc. All rights reserved. |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without |
5 | * modification, are permitted provided that the following conditions |
6 | * are met: |
7 | * 1. Redistributions of source code must retain the above copyright |
8 | * notice, this list of conditions and the following disclaimer. |
9 | * 2. Redistributions in binary form must reproduce the above copyright |
10 | * notice, this list of conditions and the following disclaimer in the |
11 | * documentation and/or other materials provided with the distribution. |
12 | * |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
24 | */ |
25 | |
26 | #ifndef Cache_h |
27 | #define Cache_h |
28 | |
29 | #include "Allocator.h" |
30 | #include "BExport.h" |
31 | #include "Deallocator.h" |
32 | #include "HeapKind.h" |
33 | #include "PerThread.h" |
34 | |
35 | namespace bmalloc { |
36 | |
37 | // Per-thread allocation / deallocation cache, backed by a per-process Heap. |
38 | |
39 | class Cache { |
40 | public: |
41 | static void* tryAllocate(HeapKind, size_t); |
42 | static void* allocate(HeapKind, size_t); |
43 | static void* tryAllocate(HeapKind, size_t alignment, size_t); |
44 | static void* allocate(HeapKind, size_t alignment, size_t); |
45 | static void deallocate(HeapKind, void*); |
46 | static void* tryReallocate(HeapKind, void*, size_t); |
47 | static void* reallocate(HeapKind, void*, size_t); |
48 | |
49 | static void scavenge(HeapKind); |
50 | |
51 | Cache(HeapKind); |
52 | |
53 | Allocator& allocator() { return m_allocator; } |
54 | Deallocator& deallocator() { return m_deallocator; } |
55 | |
56 | private: |
57 | BEXPORT static void* tryAllocateSlowCaseNullCache(HeapKind, size_t); |
58 | BEXPORT static void* allocateSlowCaseNullCache(HeapKind, size_t); |
59 | BEXPORT static void* tryAllocateSlowCaseNullCache(HeapKind, size_t alignment, size_t); |
60 | BEXPORT static void* allocateSlowCaseNullCache(HeapKind, size_t alignment, size_t); |
61 | BEXPORT static void deallocateSlowCaseNullCache(HeapKind, void*); |
62 | BEXPORT static void* tryReallocateSlowCaseNullCache(HeapKind, void*, size_t); |
63 | BEXPORT static void* reallocateSlowCaseNullCache(HeapKind, void*, size_t); |
64 | |
65 | Deallocator m_deallocator; |
66 | Allocator m_allocator; |
67 | }; |
68 | |
69 | inline void* Cache::tryAllocate(HeapKind heapKind, size_t size) |
70 | { |
71 | PerHeapKind<Cache>* caches = PerThread<PerHeapKind<Cache>>::getFastCase(); |
72 | if (!caches) |
73 | return tryAllocateSlowCaseNullCache(heapKind, size); |
74 | return caches->at(mapToActiveHeapKindAfterEnsuringGigacage(heapKind)).allocator().tryAllocate(size); |
75 | } |
76 | |
77 | inline void* Cache::allocate(HeapKind heapKind, size_t size) |
78 | { |
79 | PerHeapKind<Cache>* caches = PerThread<PerHeapKind<Cache>>::getFastCase(); |
80 | if (!caches) |
81 | return allocateSlowCaseNullCache(heapKind, size); |
82 | return caches->at(mapToActiveHeapKindAfterEnsuringGigacage(heapKind)).allocator().allocate(size); |
83 | } |
84 | |
85 | inline void* Cache::tryAllocate(HeapKind heapKind, size_t alignment, size_t size) |
86 | { |
87 | PerHeapKind<Cache>* caches = PerThread<PerHeapKind<Cache>>::getFastCase(); |
88 | if (!caches) |
89 | return tryAllocateSlowCaseNullCache(heapKind, alignment, size); |
90 | return caches->at(mapToActiveHeapKindAfterEnsuringGigacage(heapKind)).allocator().tryAllocate(alignment, size); |
91 | } |
92 | |
93 | inline void* Cache::allocate(HeapKind heapKind, size_t alignment, size_t size) |
94 | { |
95 | PerHeapKind<Cache>* caches = PerThread<PerHeapKind<Cache>>::getFastCase(); |
96 | if (!caches) |
97 | return allocateSlowCaseNullCache(heapKind, alignment, size); |
98 | return caches->at(mapToActiveHeapKindAfterEnsuringGigacage(heapKind)).allocator().allocate(alignment, size); |
99 | } |
100 | |
101 | inline void Cache::deallocate(HeapKind heapKind, void* object) |
102 | { |
103 | PerHeapKind<Cache>* caches = PerThread<PerHeapKind<Cache>>::getFastCase(); |
104 | if (!caches) |
105 | return deallocateSlowCaseNullCache(heapKind, object); |
106 | return caches->at(mapToActiveHeapKindAfterEnsuringGigacage(heapKind)).deallocator().deallocate(object); |
107 | } |
108 | |
109 | inline void* Cache::tryReallocate(HeapKind heapKind, void* object, size_t newSize) |
110 | { |
111 | PerHeapKind<Cache>* caches = PerThread<PerHeapKind<Cache>>::getFastCase(); |
112 | if (!caches) |
113 | return tryReallocateSlowCaseNullCache(heapKind, object, newSize); |
114 | return caches->at(mapToActiveHeapKindAfterEnsuringGigacage(heapKind)).allocator().tryReallocate(object, newSize); |
115 | } |
116 | |
117 | inline void* Cache::reallocate(HeapKind heapKind, void* object, size_t newSize) |
118 | { |
119 | PerHeapKind<Cache>* caches = PerThread<PerHeapKind<Cache>>::getFastCase(); |
120 | if (!caches) |
121 | return reallocateSlowCaseNullCache(heapKind, object, newSize); |
122 | return caches->at(mapToActiveHeapKindAfterEnsuringGigacage(heapKind)).allocator().reallocate(object, newSize); |
123 | } |
124 | |
125 | } // namespace bmalloc |
126 | |
127 | #endif // Cache_h |
128 | |