1/*
2 * Copyright (C) 2016-2017 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "AtomicsObject.h"
28
29#include "FrameTracers.h"
30#include "JSCInlines.h"
31#include "JSTypedArrays.h"
32#include "ObjectPrototype.h"
33#include "ReleaseHeapAccessScope.h"
34#include "TypedArrayController.h"
35
36namespace JSC {
37
38STATIC_ASSERT_IS_TRIVIALLY_DESTRUCTIBLE(AtomicsObject);
39
40#define FOR_EACH_ATOMICS_FUNC(macro) \
41 macro(add, Add, 3) \
42 macro(and, And, 3) \
43 macro(compareExchange, CompareExchange, 4) \
44 macro(exchange, Exchange, 3) \
45 macro(isLockFree, IsLockFree, 1) \
46 macro(load, Load, 2) \
47 macro(or, Or, 3) \
48 macro(store, Store, 3) \
49 macro(sub, Sub, 3) \
50 macro(wait, Wait, 4) \
51 macro(wake, Wake, 3) \
52 macro(xor, Xor, 3)
53
54#define DECLARE_FUNC_PROTO(lowerName, upperName, count) \
55 EncodedJSValue JSC_HOST_CALL atomicsFunc ## upperName(ExecState*);
56FOR_EACH_ATOMICS_FUNC(DECLARE_FUNC_PROTO)
57#undef DECLARE_FUNC_PROTO
58
59const ClassInfo AtomicsObject::s_info = { "Atomics", &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(AtomicsObject) };
60
61AtomicsObject::AtomicsObject(VM& vm, Structure* structure)
62 : JSNonFinalObject(vm, structure)
63{
64}
65
66AtomicsObject* AtomicsObject::create(VM& vm, JSGlobalObject* globalObject, Structure* structure)
67{
68 AtomicsObject* object = new (NotNull, allocateCell<AtomicsObject>(vm.heap)) AtomicsObject(vm, structure);
69 object->finishCreation(vm, globalObject);
70 return object;
71}
72
73Structure* AtomicsObject::createStructure(VM& vm, JSGlobalObject* globalObject, JSValue prototype)
74{
75 return Structure::create(vm, globalObject, prototype, TypeInfo(ObjectType, StructureFlags), info());
76}
77
78void AtomicsObject::finishCreation(VM& vm, JSGlobalObject* globalObject)
79{
80 Base::finishCreation(vm);
81 ASSERT(inherits(vm, info()));
82
83#define PUT_DIRECT_NATIVE_FUNC(lowerName, upperName, count) \
84 putDirectNativeFunctionWithoutTransition(vm, globalObject, Identifier::fromString(&vm, #lowerName), count, atomicsFunc ## upperName, Atomics ## upperName ## Intrinsic, static_cast<unsigned>(PropertyAttribute::DontEnum));
85 FOR_EACH_ATOMICS_FUNC(PUT_DIRECT_NATIVE_FUNC)
86#undef PUT_DIRECT_NATIVE_FUNC
87}
88
89namespace {
90
91template<typename Adaptor, typename Func>
92EncodedJSValue atomicOperationWithArgsCase(ExecState* exec, const JSValue* args, ThrowScope& scope, JSArrayBufferView* typedArrayView, unsigned accessIndex, const Func& func)
93{
94 JSGenericTypedArrayView<Adaptor>* typedArray = jsCast<JSGenericTypedArrayView<Adaptor>*>(typedArrayView);
95
96 double extraArgs[Func::numExtraArgs + 1]; // Add 1 to avoid 0 size array error in VS.
97 for (unsigned i = 0; i < Func::numExtraArgs; ++i) {
98 double value = args[2 + i].toInteger(exec);
99 RETURN_IF_EXCEPTION(scope, JSValue::encode(jsUndefined()));
100 extraArgs[i] = value;
101 }
102
103 return JSValue::encode(func(typedArray->typedVector() + accessIndex, extraArgs));
104}
105
106unsigned validatedAccessIndex(VM& vm, ExecState* exec, JSValue accessIndexValue, JSArrayBufferView* typedArrayView)
107{
108 auto scope = DECLARE_THROW_SCOPE(vm);
109 if (UNLIKELY(!accessIndexValue.isInt32())) {
110 double accessIndexDouble = accessIndexValue.toNumber(exec);
111 RETURN_IF_EXCEPTION(scope, 0);
112 if (accessIndexDouble == 0)
113 accessIndexValue = jsNumber(0);
114 else {
115 accessIndexValue = jsNumber(accessIndexDouble);
116 if (!accessIndexValue.isInt32()) {
117 throwRangeError(exec, scope, "Access index is not an integer."_s);
118 return 0;
119 }
120 }
121 }
122 int32_t accessIndex = accessIndexValue.asInt32();
123
124 ASSERT(typedArrayView->length() <= static_cast<unsigned>(INT_MAX));
125 if (static_cast<unsigned>(accessIndex) >= typedArrayView->length()) {
126 throwRangeError(exec, scope, "Access index out of bounds for atomic access."_s);
127 return 0;
128 }
129
130 return accessIndex;
131}
132
133template<typename Func>
134EncodedJSValue atomicOperationWithArgs(VM& vm, ExecState* exec, const JSValue* args, const Func& func)
135{
136 auto scope = DECLARE_THROW_SCOPE(vm);
137
138 JSValue typedArrayValue = args[0];
139 if (!typedArrayValue.isCell()) {
140 throwTypeError(exec, scope, "Typed array argument must be a cell."_s);
141 return JSValue::encode(jsUndefined());
142 }
143
144 JSCell* typedArrayCell = typedArrayValue.asCell();
145
146 JSType type = typedArrayCell->type();
147 switch (type) {
148 case Int8ArrayType:
149 case Int16ArrayType:
150 case Int32ArrayType:
151 case Uint8ArrayType:
152 case Uint16ArrayType:
153 case Uint32ArrayType:
154 break;
155 default:
156 throwTypeError(exec, scope, "Typed array argument must be an Int8Array, Int16Array, Int32Array, Uint8Array, Uint16Array, or Uint32Array."_s);
157 return JSValue::encode(jsUndefined());
158 }
159
160 JSArrayBufferView* typedArrayView = jsCast<JSArrayBufferView*>(typedArrayCell);
161 if (!typedArrayView->isShared()) {
162 throwTypeError(exec, scope, "Typed array argument must wrap a SharedArrayBuffer."_s);
163 return JSValue::encode(jsUndefined());
164 }
165
166 unsigned accessIndex = validatedAccessIndex(vm, exec, args[1], typedArrayView);
167 RETURN_IF_EXCEPTION(scope, JSValue::encode(jsUndefined()));
168
169 switch (type) {
170 case Int8ArrayType:
171 return atomicOperationWithArgsCase<Int8Adaptor>(exec, args, scope, typedArrayView, accessIndex, func);
172 case Int16ArrayType:
173 return atomicOperationWithArgsCase<Int16Adaptor>(exec, args, scope, typedArrayView, accessIndex, func);
174 case Int32ArrayType:
175 return atomicOperationWithArgsCase<Int32Adaptor>(exec, args, scope, typedArrayView, accessIndex, func);
176 case Uint8ArrayType:
177 return atomicOperationWithArgsCase<Uint8Adaptor>(exec, args, scope, typedArrayView, accessIndex, func);
178 case Uint16ArrayType:
179 return atomicOperationWithArgsCase<Uint16Adaptor>(exec, args, scope, typedArrayView, accessIndex, func);
180 case Uint32ArrayType:
181 return atomicOperationWithArgsCase<Uint32Adaptor>(exec, args, scope, typedArrayView, accessIndex, func);
182 default:
183 RELEASE_ASSERT_NOT_REACHED();
184 return JSValue::encode(jsUndefined());
185 }
186}
187
188template<typename Func>
189EncodedJSValue atomicOperationWithArgs(ExecState* exec, const Func& func)
190{
191 JSValue args[2 + Func::numExtraArgs];
192 for (unsigned i = 2 + Func::numExtraArgs; i--;)
193 args[i] = exec->argument(i);
194 return atomicOperationWithArgs(exec->vm(), exec, args, func);
195}
196
197struct AddFunc {
198 static const unsigned numExtraArgs = 1;
199
200 template<typename T>
201 JSValue operator()(T* ptr, const double* args) const
202 {
203 return jsNumber(WTF::atomicExchangeAdd(ptr, toInt32(args[0])));
204 }
205};
206
207struct AndFunc {
208 static const unsigned numExtraArgs = 1;
209
210 template<typename T>
211 JSValue operator()(T* ptr, const double* args) const
212 {
213 return jsNumber(WTF::atomicExchangeAnd(ptr, toInt32(args[0])));
214 }
215};
216
217struct CompareExchangeFunc {
218 static const unsigned numExtraArgs = 2;
219
220 template<typename T>
221 JSValue operator()(T* ptr, const double* args) const
222 {
223 T expected = static_cast<T>(toInt32(args[0]));
224 T newValue = static_cast<T>(toInt32(args[1]));
225 return jsNumber(WTF::atomicCompareExchangeStrong(ptr, expected, newValue));
226 }
227};
228
229struct ExchangeFunc {
230 static const unsigned numExtraArgs = 1;
231
232 template<typename T>
233 JSValue operator()(T* ptr, const double* args) const
234 {
235 return jsNumber(WTF::atomicExchange(ptr, static_cast<T>(toInt32(args[0]))));
236 }
237};
238
239struct LoadFunc {
240 static const unsigned numExtraArgs = 0;
241
242 template<typename T>
243 JSValue operator()(T* ptr, const double*) const
244 {
245 return jsNumber(WTF::atomicLoadFullyFenced(ptr));
246 }
247};
248
249struct OrFunc {
250 static const unsigned numExtraArgs = 1;
251
252 template<typename T>
253 JSValue operator()(T* ptr, const double* args) const
254 {
255 return jsNumber(WTF::atomicExchangeOr(ptr, toInt32(args[0])));
256 }
257};
258
259struct StoreFunc {
260 static const unsigned numExtraArgs = 1;
261
262 template<typename T>
263 JSValue operator()(T* ptr, const double* args) const
264 {
265 double valueAsInt = args[0];
266 T valueAsT = static_cast<T>(toInt32(valueAsInt));
267 WTF::atomicStoreFullyFenced(ptr, valueAsT);
268 return jsNumber(valueAsInt);
269 }
270};
271
272struct SubFunc {
273 static const unsigned numExtraArgs = 1;
274
275 template<typename T>
276 JSValue operator()(T* ptr, const double* args) const
277 {
278 return jsNumber(WTF::atomicExchangeSub(ptr, toInt32(args[0])));
279 }
280};
281
282struct XorFunc {
283 static const unsigned numExtraArgs = 1;
284
285 template<typename T>
286 JSValue operator()(T* ptr, const double* args) const
287 {
288 return jsNumber(WTF::atomicExchangeXor(ptr, toInt32(args[0])));
289 }
290};
291
292EncodedJSValue isLockFree(ExecState* exec, JSValue arg)
293{
294 VM& vm = exec->vm();
295 auto scope = DECLARE_THROW_SCOPE(vm);
296
297 int32_t size = arg.toInt32(exec);
298 RETURN_IF_EXCEPTION(scope, JSValue::encode(jsUndefined()));
299
300 bool result;
301 switch (size) {
302 case 1:
303 case 2:
304 case 4:
305 result = true;
306 break;
307 default:
308 result = false;
309 break;
310 }
311 return JSValue::encode(jsBoolean(result));
312}
313
314} // anonymous namespace
315
316EncodedJSValue JSC_HOST_CALL atomicsFuncAdd(ExecState* exec)
317{
318 return atomicOperationWithArgs(exec, AddFunc());
319}
320
321EncodedJSValue JSC_HOST_CALL atomicsFuncAnd(ExecState* exec)
322{
323 return atomicOperationWithArgs(exec, AndFunc());
324}
325
326EncodedJSValue JSC_HOST_CALL atomicsFuncCompareExchange(ExecState* exec)
327{
328 return atomicOperationWithArgs(exec, CompareExchangeFunc());
329}
330
331EncodedJSValue JSC_HOST_CALL atomicsFuncExchange(ExecState* exec)
332{
333 return atomicOperationWithArgs(exec, ExchangeFunc());
334}
335
336EncodedJSValue JSC_HOST_CALL atomicsFuncIsLockFree(ExecState* exec)
337{
338 return isLockFree(exec, exec->argument(0));
339}
340
341EncodedJSValue JSC_HOST_CALL atomicsFuncLoad(ExecState* exec)
342{
343 return atomicOperationWithArgs(exec, LoadFunc());
344}
345
346EncodedJSValue JSC_HOST_CALL atomicsFuncOr(ExecState* exec)
347{
348 return atomicOperationWithArgs(exec, OrFunc());
349}
350
351EncodedJSValue JSC_HOST_CALL atomicsFuncStore(ExecState* exec)
352{
353 return atomicOperationWithArgs(exec, StoreFunc());
354}
355
356EncodedJSValue JSC_HOST_CALL atomicsFuncSub(ExecState* exec)
357{
358 return atomicOperationWithArgs(exec, SubFunc());
359}
360
361EncodedJSValue JSC_HOST_CALL atomicsFuncWait(ExecState* exec)
362{
363 VM& vm = exec->vm();
364 auto scope = DECLARE_THROW_SCOPE(vm);
365
366 JSInt32Array* typedArray = jsDynamicCast<JSInt32Array*>(vm, exec->argument(0));
367 if (!typedArray) {
368 throwTypeError(exec, scope, "Typed array for wait/wake must be an Int32Array."_s);
369 return JSValue::encode(jsUndefined());
370 }
371
372 if (!typedArray->isShared()) {
373 throwTypeError(exec, scope, "Typed array for wait/wake must wrap a SharedArrayBuffer."_s);
374 return JSValue::encode(jsUndefined());
375 }
376
377 unsigned accessIndex = validatedAccessIndex(vm, exec, exec->argument(1), typedArray);
378 RETURN_IF_EXCEPTION(scope, JSValue::encode(jsUndefined()));
379
380 int32_t* ptr = typedArray->typedVector() + accessIndex;
381
382 int32_t expectedValue = exec->argument(2).toInt32(exec);
383 RETURN_IF_EXCEPTION(scope, JSValue::encode(jsUndefined()));
384
385 double timeoutInMilliseconds = exec->argument(3).toNumber(exec);
386 RETURN_IF_EXCEPTION(scope, JSValue::encode(jsUndefined()));
387
388 if (!vm.m_typedArrayController->isAtomicsWaitAllowedOnCurrentThread()) {
389 throwTypeError(exec, scope, "Atomics.wait cannot be called from the current thread."_s);
390 return JSValue::encode(jsUndefined());
391 }
392
393 Seconds timeout = Seconds::fromMilliseconds(timeoutInMilliseconds);
394
395 // This covers the proposed rule:
396 //
397 // 4. If timeout is not provided or is undefined then let t be +inf. Otherwise:
398 // a. Let q be ? ToNumber(timeout).
399 // b. If q is NaN then let t be +inf, otherwise let t be max(0, q).
400 //
401 // exec->argument(3) returns undefined if it's not provided and ToNumber(undefined) returns NaN,
402 // so NaN is the only special case.
403 if (!std::isnan(timeout))
404 timeout = std::max(0_s, timeout);
405 else
406 timeout = Seconds::infinity();
407
408 bool didPassValidation = false;
409 ParkingLot::ParkResult result;
410 {
411 ReleaseHeapAccessScope releaseHeapAccessScope(vm.heap);
412 result = ParkingLot::parkConditionally(
413 ptr,
414 [&] () -> bool {
415 didPassValidation = WTF::atomicLoad(ptr) == expectedValue;
416 return didPassValidation;
417 },
418 [] () { },
419 MonotonicTime::now() + timeout);
420 }
421 const char* resultString;
422 if (!didPassValidation)
423 resultString = "not-equal";
424 else if (!result.wasUnparked)
425 resultString = "timed-out";
426 else
427 resultString = "ok";
428 return JSValue::encode(jsString(exec, resultString));
429}
430
431EncodedJSValue JSC_HOST_CALL atomicsFuncWake(ExecState* exec)
432{
433 VM& vm = exec->vm();
434 auto scope = DECLARE_THROW_SCOPE(vm);
435
436 JSInt32Array* typedArray = jsDynamicCast<JSInt32Array*>(vm, exec->argument(0));
437 if (!typedArray) {
438 throwTypeError(exec, scope, "Typed array for wait/wake must be an Int32Array."_s);
439 return JSValue::encode(jsUndefined());
440 }
441
442 if (!typedArray->isShared()) {
443 throwTypeError(exec, scope, "Typed array for wait/wake must wrap a SharedArrayBuffer."_s);
444 return JSValue::encode(jsUndefined());
445 }
446
447 unsigned accessIndex = validatedAccessIndex(vm, exec, exec->argument(1), typedArray);
448 RETURN_IF_EXCEPTION(scope, JSValue::encode(jsUndefined()));
449
450 int32_t* ptr = typedArray->typedVector() + accessIndex;
451
452 JSValue countValue = exec->argument(2);
453 unsigned count = UINT_MAX;
454 if (!countValue.isUndefined()) {
455 int32_t countInt = countValue.toInt32(exec);
456 RETURN_IF_EXCEPTION(scope, JSValue::encode(jsUndefined()));
457 count = std::max(0, countInt);
458 }
459
460 return JSValue::encode(jsNumber(ParkingLot::unparkCount(ptr, count)));
461}
462
463EncodedJSValue JSC_HOST_CALL atomicsFuncXor(ExecState* exec)
464{
465 return atomicOperationWithArgs(exec, XorFunc());
466}
467
468EncodedJSValue JIT_OPERATION operationAtomicsAdd(ExecState* exec, EncodedJSValue base, EncodedJSValue index, EncodedJSValue operand)
469{
470 VM& vm = exec->vm();
471 NativeCallFrameTracer tracer(&vm, exec);
472 JSValue args[] = {JSValue::decode(base), JSValue::decode(index), JSValue::decode(operand)};
473 return atomicOperationWithArgs(vm, exec, args, AddFunc());
474}
475
476EncodedJSValue JIT_OPERATION operationAtomicsAnd(ExecState* exec, EncodedJSValue base, EncodedJSValue index, EncodedJSValue operand)
477{
478 VM& vm = exec->vm();
479 NativeCallFrameTracer tracer(&vm, exec);
480 JSValue args[] = {JSValue::decode(base), JSValue::decode(index), JSValue::decode(operand)};
481 return atomicOperationWithArgs(vm, exec, args, AndFunc());
482}
483
484EncodedJSValue JIT_OPERATION operationAtomicsCompareExchange(ExecState* exec, EncodedJSValue base, EncodedJSValue index, EncodedJSValue expected, EncodedJSValue newValue)
485{
486 VM& vm = exec->vm();
487 NativeCallFrameTracer tracer(&vm, exec);
488 JSValue args[] = {JSValue::decode(base), JSValue::decode(index), JSValue::decode(expected), JSValue::decode(newValue)};
489 return atomicOperationWithArgs(vm, exec, args, CompareExchangeFunc());
490}
491
492EncodedJSValue JIT_OPERATION operationAtomicsExchange(ExecState* exec, EncodedJSValue base, EncodedJSValue index, EncodedJSValue operand)
493{
494 VM& vm = exec->vm();
495 NativeCallFrameTracer tracer(&vm, exec);
496 JSValue args[] = {JSValue::decode(base), JSValue::decode(index), JSValue::decode(operand)};
497 return atomicOperationWithArgs(vm, exec, args, ExchangeFunc());
498}
499
500EncodedJSValue JIT_OPERATION operationAtomicsIsLockFree(ExecState* exec, EncodedJSValue size)
501{
502 VM& vm = exec->vm();
503 NativeCallFrameTracer tracer(&vm, exec);
504 return isLockFree(exec, JSValue::decode(size));
505}
506
507EncodedJSValue JIT_OPERATION operationAtomicsLoad(ExecState* exec, EncodedJSValue base, EncodedJSValue index)
508{
509 VM& vm = exec->vm();
510 NativeCallFrameTracer tracer(&vm, exec);
511 JSValue args[] = {JSValue::decode(base), JSValue::decode(index)};
512 return atomicOperationWithArgs(vm, exec, args, LoadFunc());
513}
514
515EncodedJSValue JIT_OPERATION operationAtomicsOr(ExecState* exec, EncodedJSValue base, EncodedJSValue index, EncodedJSValue operand)
516{
517 VM& vm = exec->vm();
518 NativeCallFrameTracer tracer(&vm, exec);
519 JSValue args[] = {JSValue::decode(base), JSValue::decode(index), JSValue::decode(operand)};
520 return atomicOperationWithArgs(vm, exec, args, OrFunc());
521}
522
523EncodedJSValue JIT_OPERATION operationAtomicsStore(ExecState* exec, EncodedJSValue base, EncodedJSValue index, EncodedJSValue operand)
524{
525 VM& vm = exec->vm();
526 NativeCallFrameTracer tracer(&vm, exec);
527 JSValue args[] = {JSValue::decode(base), JSValue::decode(index), JSValue::decode(operand)};
528 return atomicOperationWithArgs(vm, exec, args, StoreFunc());
529}
530
531EncodedJSValue JIT_OPERATION operationAtomicsSub(ExecState* exec, EncodedJSValue base, EncodedJSValue index, EncodedJSValue operand)
532{
533 VM& vm = exec->vm();
534 NativeCallFrameTracer tracer(&vm, exec);
535 JSValue args[] = {JSValue::decode(base), JSValue::decode(index), JSValue::decode(operand)};
536 return atomicOperationWithArgs(vm, exec, args, SubFunc());
537}
538
539EncodedJSValue JIT_OPERATION operationAtomicsXor(ExecState* exec, EncodedJSValue base, EncodedJSValue index, EncodedJSValue operand)
540{
541 VM& vm = exec->vm();
542 NativeCallFrameTracer tracer(&vm, exec);
543 JSValue args[] = {JSValue::decode(base), JSValue::decode(index), JSValue::decode(operand)};
544 return atomicOperationWithArgs(vm, exec, args, XorFunc());
545}
546
547} // namespace JSC
548
549