1 | /* |
2 | * Copyright (C) 2016-2017 Apple Inc. All rights reserved. |
3 | * Copyright (C) 2017 Yusuke Suzuki <[email protected]>. |
4 | * |
5 | * Redistribution and use in source and binary forms, with or without |
6 | * modification, are permitted provided that the following conditions |
7 | * are met: |
8 | * 1. Redistributions of source code must retain the above copyright |
9 | * notice, this list of conditions and the following disclaimer. |
10 | * 2. Redistributions in binary form must reproduce the above copyright |
11 | * notice, this list of conditions and the following disclaimer in the |
12 | * documentation and/or other materials provided with the distribution. |
13 | * |
14 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
15 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
16 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
17 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
18 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
19 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
20 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
21 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
22 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
23 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
24 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
25 | */ |
26 | |
27 | #pragma once |
28 | |
29 | #include "ExceptionHelpers.h" |
30 | #include "HashMapImpl.h" |
31 | #include "JSObject.h" |
32 | #include <wtf/JSValueMalloc.h> |
33 | #include <wtf/MallocPtr.h> |
34 | |
35 | namespace JSC { |
36 | |
37 | struct WeakMapBucketDataKey { |
38 | static const HashTableType Type = HashTableType::Key; |
39 | WriteBarrier<JSObject> key; |
40 | }; |
41 | static_assert(sizeof(WeakMapBucketDataKey) == sizeof(void*), "" ); |
42 | |
43 | struct WeakMapBucketDataKeyValue { |
44 | static const HashTableType Type = HashTableType::KeyValue; |
45 | WriteBarrier<JSObject> key; |
46 | #if USE(JSVALUE32_64) |
47 | uint32_t padding; |
48 | #endif |
49 | WriteBarrier<Unknown> value; |
50 | }; |
51 | static_assert(sizeof(WeakMapBucketDataKeyValue) == 16, "" ); |
52 | |
53 | ALWAYS_INLINE uint32_t jsWeakMapHash(JSObject* key) |
54 | { |
55 | return wangsInt64Hash(JSValue::encode(key)); |
56 | } |
57 | |
58 | ALWAYS_INLINE uint32_t nextCapacityAfterBatchRemoval(uint32_t capacity, uint32_t keyCount) |
59 | { |
60 | while (shouldShrink(capacity, keyCount)) |
61 | capacity = nextCapacity(capacity, keyCount); |
62 | return capacity; |
63 | } |
64 | |
65 | template <typename Data> |
66 | class WeakMapBucket { |
67 | public: |
68 | ALWAYS_INLINE void setKey(VM& vm, JSCell* owner, JSObject* key) |
69 | { |
70 | m_data.key.set(vm, owner, key); |
71 | } |
72 | |
73 | template <typename T = Data> |
74 | ALWAYS_INLINE typename std::enable_if<std::is_same<T, WeakMapBucketDataKeyValue>::value>::type setValue(VM& vm, JSCell* owner, JSValue value) |
75 | { |
76 | m_data.value.set(vm, owner, value); |
77 | } |
78 | template <typename T = Data> |
79 | ALWAYS_INLINE typename std::enable_if<std::is_same<T, WeakMapBucketDataKey>::value>::type setValue(VM&, JSCell*, JSValue) { } |
80 | |
81 | ALWAYS_INLINE JSObject* key() const { return m_data.key.get(); } |
82 | |
83 | template <typename T = Data> |
84 | ALWAYS_INLINE typename std::enable_if<std::is_same<T, WeakMapBucketDataKeyValue>::value, JSValue>::type value() const |
85 | { |
86 | return m_data.value.get(); |
87 | } |
88 | template <typename T = Data> |
89 | ALWAYS_INLINE typename std::enable_if<std::is_same<T, WeakMapBucketDataKey>::value, JSValue>::type value() const { return JSValue(); } |
90 | |
91 | template <typename T = Data> |
92 | ALWAYS_INLINE typename std::enable_if<std::is_same<T, WeakMapBucketDataKeyValue>::value>::type copyFrom(const WeakMapBucket& from) |
93 | { |
94 | m_data.key.copyFrom(from.m_data.key); |
95 | m_data.value.setWithoutWriteBarrier(from.m_data.value.get()); |
96 | } |
97 | template <typename T = Data> |
98 | ALWAYS_INLINE typename std::enable_if<std::is_same<T, WeakMapBucketDataKey>::value>::type copyFrom(const WeakMapBucket& from) |
99 | { |
100 | m_data.key.copyFrom(from.m_data.key); |
101 | } |
102 | |
103 | static ptrdiff_t offsetOfKey() |
104 | { |
105 | return OBJECT_OFFSETOF(WeakMapBucket, m_data) + OBJECT_OFFSETOF(Data, key); |
106 | } |
107 | |
108 | template <typename T = Data> |
109 | static typename std::enable_if<std::is_same<T, WeakMapBucketDataKeyValue>::value, ptrdiff_t>::type offsetOfValue() |
110 | { |
111 | return OBJECT_OFFSETOF(WeakMapBucket, m_data) + OBJECT_OFFSETOF(Data, value); |
112 | } |
113 | |
114 | template <typename T = Data> |
115 | ALWAYS_INLINE static typename std::enable_if<std::is_same<T, WeakMapBucketDataKeyValue>::value, JSValue>::type (const WeakMapBucket& bucket) |
116 | { |
117 | return bucket.value(); |
118 | } |
119 | |
120 | template <typename T = Data> |
121 | ALWAYS_INLINE static typename std::enable_if<std::is_same<T, WeakMapBucketDataKey>::value, JSValue>::type (const WeakMapBucket&) |
122 | { |
123 | return JSValue(); |
124 | } |
125 | |
126 | bool isEmpty() |
127 | { |
128 | return !m_data.key.unvalidatedGet(); |
129 | } |
130 | |
131 | static JSObject* deletedKey() |
132 | { |
133 | return bitwise_cast<JSObject*>(static_cast<uintptr_t>(-3)); |
134 | } |
135 | |
136 | bool isDeleted() |
137 | { |
138 | return m_data.key.unvalidatedGet() == deletedKey(); |
139 | } |
140 | |
141 | void makeDeleted() |
142 | { |
143 | m_data.key.setWithoutWriteBarrier(deletedKey()); |
144 | clearValue(); |
145 | } |
146 | |
147 | template <typename T = Data> |
148 | ALWAYS_INLINE typename std::enable_if<std::is_same<T, WeakMapBucketDataKeyValue>::value>::type visitAggregate(SlotVisitor& visitor) |
149 | { |
150 | visitor.append(m_data.value); |
151 | } |
152 | |
153 | private: |
154 | template <typename T = Data> |
155 | ALWAYS_INLINE typename std::enable_if<std::is_same<T, WeakMapBucketDataKeyValue>::value>::type clearValue() |
156 | { |
157 | m_data.value.clear(); |
158 | } |
159 | template <typename T = Data> |
160 | ALWAYS_INLINE typename std::enable_if<std::is_same<T, WeakMapBucketDataKey>::value>::type clearValue() { } |
161 | |
162 | Data m_data; |
163 | }; |
164 | |
165 | template <typename BucketType> |
166 | class WeakMapBuffer { |
167 | public: |
168 | WeakMapBuffer() = delete; |
169 | |
170 | static size_t allocationSize(Checked<size_t> capacity) |
171 | { |
172 | return (capacity * sizeof(BucketType)).unsafeGet(); |
173 | } |
174 | |
175 | ALWAYS_INLINE BucketType* buffer() const |
176 | { |
177 | return bitwise_cast<BucketType*>(this); |
178 | } |
179 | |
180 | static MallocPtr<WeakMapBuffer, JSValueMalloc> create(uint32_t capacity) |
181 | { |
182 | size_t allocationSize = WeakMapBuffer::allocationSize(capacity); |
183 | auto buffer = MallocPtr<WeakMapBuffer, JSValueMalloc>::malloc(allocationSize); |
184 | buffer->reset(capacity); |
185 | return buffer; |
186 | } |
187 | |
188 | ALWAYS_INLINE void reset(uint32_t capacity) |
189 | { |
190 | memset(this, 0, allocationSize(capacity)); |
191 | } |
192 | }; |
193 | |
194 | template <typename WeakMapBucketType> |
195 | class WeakMapImpl : public JSDestructibleObject { |
196 | using Base = JSDestructibleObject; |
197 | using WeakMapBufferType = WeakMapBuffer<WeakMapBucketType>; |
198 | |
199 | public: |
200 | using BucketType = WeakMapBucketType; |
201 | |
202 | static void destroy(JSCell*); |
203 | |
204 | static void visitChildren(JSCell*, SlotVisitor&); |
205 | |
206 | static size_t estimatedSize(JSCell*, VM&); |
207 | |
208 | WeakMapImpl(VM& vm, Structure* structure) |
209 | : Base(vm, structure) |
210 | { |
211 | } |
212 | |
213 | static constexpr uint32_t initialCapacity = 4; |
214 | |
215 | void finishCreation(VM& vm) |
216 | { |
217 | ASSERT_WITH_MESSAGE(WeakMapBucket<WeakMapBucketDataKey>::offsetOfKey() == WeakMapBucket<WeakMapBucketDataKeyValue>::offsetOfKey(), "We assume this to be true in the DFG and FTL JIT." ); |
218 | |
219 | Base::finishCreation(vm); |
220 | |
221 | auto locker = holdLock(cellLock()); |
222 | makeAndSetNewBuffer(locker, initialCapacity); |
223 | } |
224 | |
225 | // WeakMap operations must not cause GC. We model operations in DFG based on this guarantee. |
226 | // This guarantee is ensured by DisallowGC. |
227 | |
228 | template <typename T = WeakMapBucketType> |
229 | ALWAYS_INLINE typename std::enable_if<std::is_same<T, WeakMapBucket<WeakMapBucketDataKeyValue>>::value, JSValue>::type get(JSObject* key) |
230 | { |
231 | DisallowGC disallowGC; |
232 | if (WeakMapBucketType* bucket = findBucket(key)) |
233 | return bucket->value(); |
234 | return jsUndefined(); |
235 | } |
236 | |
237 | ALWAYS_INLINE bool has(JSObject* key) |
238 | { |
239 | DisallowGC disallowGC; |
240 | return !!findBucket(key); |
241 | } |
242 | |
243 | ALWAYS_INLINE void add(VM& vm, JSObject* key, JSValue value = JSValue()) |
244 | { |
245 | DisallowGC disallowGC; |
246 | add(vm, key, value, jsWeakMapHash(key)); |
247 | } |
248 | |
249 | ALWAYS_INLINE void add(VM& vm, JSObject* key, JSValue value, uint32_t hash) |
250 | { |
251 | DisallowGC disallowGC; |
252 | ASSERT_WITH_MESSAGE(jsWeakMapHash(key) == hash, "We expect hash value is what we expect." ); |
253 | |
254 | addInternal(vm, key, value, hash); |
255 | if (shouldRehashAfterAdd()) |
256 | rehash(); |
257 | } |
258 | |
259 | ALWAYS_INLINE bool remove(JSObject* key) |
260 | { |
261 | DisallowGC disallowGC; |
262 | WeakMapBucketType* bucket = findBucket(key); |
263 | if (!bucket) |
264 | return false; |
265 | |
266 | bucket->makeDeleted(); |
267 | |
268 | ++m_deleteCount; |
269 | RELEASE_ASSERT(m_keyCount > 0); |
270 | --m_keyCount; |
271 | |
272 | if (shouldShrink()) |
273 | rehash(); |
274 | |
275 | return true; |
276 | } |
277 | |
278 | ALWAYS_INLINE uint32_t size() const |
279 | { |
280 | return m_keyCount; |
281 | } |
282 | |
283 | void takeSnapshot(MarkedArgumentBuffer&, unsigned limit = 0); |
284 | |
285 | static ptrdiff_t offsetOfBuffer() |
286 | { |
287 | return OBJECT_OFFSETOF(WeakMapImpl<WeakMapBucketType>, m_buffer); |
288 | } |
289 | |
290 | static ptrdiff_t offsetOfCapacity() |
291 | { |
292 | return OBJECT_OFFSETOF(WeakMapImpl<WeakMapBucketType>, m_capacity); |
293 | } |
294 | |
295 | static constexpr bool isWeakMap() |
296 | { |
297 | return std::is_same<WeakMapBucketType, JSC::WeakMapBucket<WeakMapBucketDataKeyValue>>::value; |
298 | } |
299 | |
300 | static constexpr bool isWeakSet() |
301 | { |
302 | return std::is_same<WeakMapBucketType, JSC::WeakMapBucket<WeakMapBucketDataKey>>::value; |
303 | } |
304 | |
305 | template<typename CellType, SubspaceAccess mode> |
306 | static IsoSubspace* subspaceFor(VM& vm) |
307 | { |
308 | if (isWeakMap()) |
309 | return vm.weakMapSpace<mode>(); |
310 | return vm.weakSetSpace<mode>(); |
311 | } |
312 | |
313 | static void visitOutputConstraints(JSCell*, SlotVisitor&); |
314 | void finalizeUnconditionally(VM&); |
315 | |
316 | private: |
317 | ALWAYS_INLINE WeakMapBucketType* findBucket(JSObject* key) |
318 | { |
319 | return findBucket(key, jsWeakMapHash(key)); |
320 | } |
321 | |
322 | ALWAYS_INLINE WeakMapBucketType* findBucket(JSObject* key, uint32_t hash) |
323 | { |
324 | return findBucketAlreadyHashed(key, hash); |
325 | } |
326 | |
327 | ALWAYS_INLINE WeakMapBucketType* buffer() const |
328 | { |
329 | return m_buffer->buffer(); |
330 | } |
331 | |
332 | enum class IterationState { Continue, Stop }; |
333 | template<typename Functor> |
334 | void forEach(Functor functor) |
335 | { |
336 | auto* buffer = this->buffer(); |
337 | for (uint32_t index = 0; index < m_capacity; ++index) { |
338 | auto* bucket = buffer + index; |
339 | if (bucket->isEmpty() || bucket->isDeleted()) |
340 | continue; |
341 | if (functor(bucket->key(), bucket->value()) == IterationState::Stop) |
342 | return; |
343 | } |
344 | } |
345 | |
346 | ALWAYS_INLINE uint32_t shouldRehashAfterAdd() const |
347 | { |
348 | return JSC::shouldRehashAfterAdd(m_capacity, m_keyCount, m_deleteCount); |
349 | } |
350 | |
351 | ALWAYS_INLINE uint32_t shouldShrink() const |
352 | { |
353 | return JSC::shouldShrink(m_capacity, m_keyCount); |
354 | } |
355 | |
356 | ALWAYS_INLINE static bool canUseBucket(WeakMapBucketType* bucket, JSObject* key) |
357 | { |
358 | return !bucket->isDeleted() && key == bucket->key(); |
359 | } |
360 | |
361 | ALWAYS_INLINE void addInternal(VM& vm, JSObject* key, JSValue value, uint32_t hash) |
362 | { |
363 | const uint32_t mask = m_capacity - 1; |
364 | uint32_t index = hash & mask; |
365 | WeakMapBucketType* buffer = this->buffer(); |
366 | WeakMapBucketType* bucket = buffer + index; |
367 | while (!bucket->isEmpty()) { |
368 | if (canUseBucket(bucket, key)) { |
369 | ASSERT(!bucket->isDeleted()); |
370 | bucket->setValue(vm, this, value); |
371 | return; |
372 | } |
373 | index = (index + 1) & mask; |
374 | bucket = buffer + index; |
375 | } |
376 | |
377 | auto* newEntry = buffer + index; |
378 | newEntry->setKey(vm, this, key); |
379 | newEntry->setValue(vm, this, value); |
380 | ++m_keyCount; |
381 | } |
382 | |
383 | ALWAYS_INLINE WeakMapBucketType* findBucketAlreadyHashed(JSObject* key, uint32_t hash) |
384 | { |
385 | const uint32_t mask = m_capacity - 1; |
386 | uint32_t index = hash & mask; |
387 | WeakMapBucketType* buffer = this->buffer(); |
388 | WeakMapBucketType* bucket = buffer + index; |
389 | |
390 | while (!bucket->isEmpty()) { |
391 | if (canUseBucket(bucket, key)) { |
392 | ASSERT(!bucket->isDeleted()); |
393 | return buffer + index; |
394 | } |
395 | index = (index + 1) & mask; |
396 | bucket = buffer + index; |
397 | } |
398 | return nullptr; |
399 | } |
400 | |
401 | enum class RehashMode { Normal, RemoveBatching }; |
402 | void rehash(RehashMode mode = RehashMode::Normal) |
403 | { |
404 | // Since shrinking is done just after GC runs (finalizeUnconditionally), WeakMapImpl::rehash() |
405 | // function must not touch any GC related features. This is why we do not allocate WeakMapBuffer |
406 | // in auxiliary buffer. |
407 | |
408 | // This rehash modifies m_buffer which is not GC-managed buffer. But m_buffer can be touched in |
409 | // visitOutputConstraints. Thus, we should guard it with cellLock. |
410 | auto locker = holdLock(cellLock()); |
411 | |
412 | uint32_t oldCapacity = m_capacity; |
413 | MallocPtr<WeakMapBufferType, JSValueMalloc> oldBuffer = WTFMove(m_buffer); |
414 | |
415 | uint32_t capacity = m_capacity; |
416 | if (mode == RehashMode::RemoveBatching) { |
417 | ASSERT(shouldShrink()); |
418 | capacity = nextCapacityAfterBatchRemoval(capacity, m_keyCount); |
419 | } else |
420 | capacity = nextCapacity(capacity, m_keyCount); |
421 | makeAndSetNewBuffer(locker, capacity); |
422 | |
423 | auto* buffer = this->buffer(); |
424 | const uint32_t mask = m_capacity - 1; |
425 | for (uint32_t oldIndex = 0; oldIndex < oldCapacity; ++oldIndex) { |
426 | auto* entry = oldBuffer->buffer() + oldIndex; |
427 | if (entry->isEmpty() || entry->isDeleted()) |
428 | continue; |
429 | |
430 | uint32_t index = jsWeakMapHash(entry->key()) & mask; |
431 | WeakMapBucketType* bucket = buffer + index; |
432 | while (!bucket->isEmpty()) { |
433 | index = (index + 1) & mask; |
434 | bucket = buffer + index; |
435 | } |
436 | bucket->copyFrom(*entry); |
437 | } |
438 | |
439 | m_deleteCount = 0; |
440 | |
441 | checkConsistency(); |
442 | } |
443 | |
444 | ALWAYS_INLINE void checkConsistency() const |
445 | { |
446 | if (!ASSERT_DISABLED) { |
447 | uint32_t size = 0; |
448 | auto* buffer = this->buffer(); |
449 | for (uint32_t index = 0; index < m_capacity; ++index) { |
450 | auto* bucket = buffer + index; |
451 | if (bucket->isEmpty() || bucket->isDeleted()) |
452 | continue; |
453 | ++size; |
454 | } |
455 | ASSERT(size == m_keyCount); |
456 | } |
457 | } |
458 | |
459 | void makeAndSetNewBuffer(const AbstractLocker&, uint32_t capacity) |
460 | { |
461 | ASSERT(!(capacity & (capacity - 1))); |
462 | |
463 | m_buffer = WeakMapBufferType::create(capacity); |
464 | m_capacity = capacity; |
465 | ASSERT(m_buffer); |
466 | assertBufferIsEmpty(); |
467 | } |
468 | |
469 | ALWAYS_INLINE void assertBufferIsEmpty() const |
470 | { |
471 | if (!ASSERT_DISABLED) { |
472 | for (unsigned i = 0; i < m_capacity; i++) |
473 | ASSERT((buffer() + i)->isEmpty()); |
474 | } |
475 | } |
476 | |
477 | template<typename Appender> |
478 | void takeSnapshotInternal(unsigned limit, Appender); |
479 | |
480 | MallocPtr<WeakMapBufferType, JSValueMalloc> m_buffer; |
481 | uint32_t m_capacity { 0 }; |
482 | uint32_t m_keyCount { 0 }; |
483 | uint32_t m_deleteCount { 0 }; |
484 | }; |
485 | |
486 | } // namespace JSC |
487 | |