1 | /* |
2 | * Copyright (C) 2016-2018 Apple Inc. All rights reserved. |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without |
5 | * modification, are permitted provided that the following conditions |
6 | * are met: |
7 | * 1. Redistributions of source code must retain the above copyright |
8 | * notice, this list of conditions and the following disclaimer. |
9 | * 2. Redistributions in binary form must reproduce the above copyright |
10 | * notice, this list of conditions and the following disclaimer in the |
11 | * documentation and/or other materials provided with the distribution. |
12 | * |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
24 | */ |
25 | |
26 | #include "config.h" |
27 | #include "LargeAllocation.h" |
28 | |
29 | #include "AlignedMemoryAllocator.h" |
30 | #include "Heap.h" |
31 | #include "JSCInlines.h" |
32 | #include "Operations.h" |
33 | #include "SubspaceInlines.h" |
34 | |
35 | namespace JSC { |
36 | |
37 | static inline bool isAlignedForLargeAllocation(void* memory) |
38 | { |
39 | uintptr_t allocatedPointer = bitwise_cast<uintptr_t>(memory); |
40 | return !(allocatedPointer & (LargeAllocation::alignment - 1)); |
41 | } |
42 | |
43 | LargeAllocation* LargeAllocation::tryCreate(Heap& heap, size_t size, Subspace* subspace, unsigned indexInSpace) |
44 | { |
45 | if (validateDFGDoesGC) |
46 | RELEASE_ASSERT(heap.expectDoesGC()); |
47 | |
48 | size_t adjustedAlignmentAllocationSize = headerSize() + size + halfAlignment; |
49 | static_assert(halfAlignment == 8, "We assume that memory returned by malloc has alignment >= 8." ); |
50 | |
51 | // We must use tryAllocateMemory instead of tryAllocateAlignedMemory since we want to use "realloc" feature. |
52 | void* space = subspace->alignedMemoryAllocator()->tryAllocateMemory(adjustedAlignmentAllocationSize); |
53 | if (!space) |
54 | return nullptr; |
55 | |
56 | bool adjustedAlignment = false; |
57 | if (!isAlignedForLargeAllocation(space)) { |
58 | space = bitwise_cast<void*>(bitwise_cast<uintptr_t>(space) + halfAlignment); |
59 | adjustedAlignment = true; |
60 | ASSERT(isAlignedForLargeAllocation(space)); |
61 | } |
62 | |
63 | if (scribbleFreeCells()) |
64 | scribble(space, size); |
65 | return new (NotNull, space) LargeAllocation(heap, size, subspace, indexInSpace, adjustedAlignment); |
66 | } |
67 | |
68 | LargeAllocation* LargeAllocation::tryReallocate(size_t size, Subspace* subspace) |
69 | { |
70 | size_t adjustedAlignmentAllocationSize = headerSize() + size + halfAlignment; |
71 | static_assert(halfAlignment == 8, "We assume that memory returned by malloc has alignment >= 8." ); |
72 | |
73 | ASSERT(subspace == m_subspace); |
74 | |
75 | unsigned oldCellSize = m_cellSize; |
76 | bool oldAdjustedAlignment = m_adjustedAlignment; |
77 | void* oldBasePointer = basePointer(); |
78 | |
79 | void* newBasePointer = subspace->alignedMemoryAllocator()->tryReallocateMemory(oldBasePointer, adjustedAlignmentAllocationSize); |
80 | if (!newBasePointer) |
81 | return nullptr; |
82 | |
83 | LargeAllocation* newAllocation = bitwise_cast<LargeAllocation*>(newBasePointer); |
84 | bool newAdjustedAlignment = false; |
85 | if (!isAlignedForLargeAllocation(newBasePointer)) { |
86 | newAdjustedAlignment = true; |
87 | newAllocation = bitwise_cast<LargeAllocation*>(bitwise_cast<uintptr_t>(newBasePointer) + halfAlignment); |
88 | ASSERT(isAlignedForLargeAllocation(static_cast<void*>(newAllocation))); |
89 | } |
90 | |
91 | // We have 4 patterns. |
92 | // oldAdjustedAlignment = true newAdjustedAlignment = true => Do nothing. |
93 | // oldAdjustedAlignment = true newAdjustedAlignment = false => Shift forward by halfAlignment |
94 | // oldAdjustedAlignment = false newAdjustedAlignment = true => Shift backward by halfAlignment |
95 | // oldAdjustedAlignment = false newAdjustedAlignment = false => Do nothing. |
96 | |
97 | if (oldAdjustedAlignment != newAdjustedAlignment) { |
98 | if (oldAdjustedAlignment) { |
99 | ASSERT(!newAdjustedAlignment); |
100 | ASSERT(newAllocation == newBasePointer); |
101 | // Old [ 8 ][ content ] |
102 | // Now [ ][ content ] |
103 | // New [ content ]... |
104 | memmove(newBasePointer, bitwise_cast<char*>(newBasePointer) + halfAlignment, oldCellSize + LargeAllocation::headerSize()); |
105 | } else { |
106 | ASSERT(newAdjustedAlignment); |
107 | ASSERT(newAllocation != newBasePointer); |
108 | ASSERT(newAllocation == bitwise_cast<void*>(bitwise_cast<char*>(newBasePointer) + halfAlignment)); |
109 | // Old [ content ] |
110 | // Now [ content ][ ] |
111 | // New [ 8 ][ content ] |
112 | memmove(bitwise_cast<char*>(newBasePointer) + halfAlignment, newBasePointer, oldCellSize + LargeAllocation::headerSize()); |
113 | } |
114 | } |
115 | |
116 | newAllocation->m_cellSize = size; |
117 | newAllocation->m_adjustedAlignment = newAdjustedAlignment; |
118 | return newAllocation; |
119 | } |
120 | |
121 | LargeAllocation::LargeAllocation(Heap& heap, size_t size, Subspace* subspace, unsigned indexInSpace, bool adjustedAlignment) |
122 | : m_cellSize(size) |
123 | , m_indexInSpace(indexInSpace) |
124 | , m_isNewlyAllocated(true) |
125 | , m_hasValidCell(true) |
126 | , m_adjustedAlignment(adjustedAlignment) |
127 | , m_attributes(subspace->attributes()) |
128 | , m_subspace(subspace) |
129 | , m_weakSet(heap.vm(), *this) |
130 | { |
131 | m_isMarked.store(0); |
132 | } |
133 | |
134 | LargeAllocation::~LargeAllocation() |
135 | { |
136 | if (isOnList()) |
137 | remove(); |
138 | } |
139 | |
140 | void LargeAllocation::lastChanceToFinalize() |
141 | { |
142 | m_weakSet.lastChanceToFinalize(); |
143 | clearMarked(); |
144 | clearNewlyAllocated(); |
145 | sweep(); |
146 | } |
147 | |
148 | void LargeAllocation::shrink() |
149 | { |
150 | m_weakSet.shrink(); |
151 | } |
152 | |
153 | void LargeAllocation::visitWeakSet(SlotVisitor& visitor) |
154 | { |
155 | m_weakSet.visit(visitor); |
156 | } |
157 | |
158 | void LargeAllocation::reapWeakSet() |
159 | { |
160 | return m_weakSet.reap(); |
161 | } |
162 | |
163 | void LargeAllocation::flip() |
164 | { |
165 | ASSERT(heap()->collectionScope() == CollectionScope::Full); |
166 | clearMarked(); |
167 | } |
168 | |
169 | bool LargeAllocation::isEmpty() |
170 | { |
171 | return !isMarked() && m_weakSet.isEmpty() && !isNewlyAllocated(); |
172 | } |
173 | |
174 | void LargeAllocation::sweep() |
175 | { |
176 | m_weakSet.sweep(); |
177 | |
178 | if (m_hasValidCell && !isLive()) { |
179 | if (m_attributes.destruction == NeedsDestruction) |
180 | m_subspace->destroy(*vm(), static_cast<JSCell*>(cell())); |
181 | m_hasValidCell = false; |
182 | } |
183 | } |
184 | |
185 | void LargeAllocation::destroy() |
186 | { |
187 | AlignedMemoryAllocator* allocator = m_subspace->alignedMemoryAllocator(); |
188 | void* basePointer = this->basePointer(); |
189 | this->~LargeAllocation(); |
190 | allocator->freeMemory(basePointer); |
191 | } |
192 | |
193 | void LargeAllocation::dump(PrintStream& out) const |
194 | { |
195 | out.print(RawPointer(this), ":(cell at " , RawPointer(cell()), " with size " , m_cellSize, " and attributes " , m_attributes, ")" ); |
196 | } |
197 | |
198 | #if !ASSERT_DISABLED |
199 | void LargeAllocation::assertValidCell(VM& vm, HeapCell* cell) const |
200 | { |
201 | ASSERT(&vm == this->vm()); |
202 | ASSERT(cell == this->cell()); |
203 | ASSERT(m_hasValidCell); |
204 | } |
205 | #endif |
206 | |
207 | } // namespace JSC |
208 | |
209 | |