1/*
2 * Copyright (C) 2013-2019 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "JITOperations.h"
28
29#if ENABLE(JIT)
30
31#include "ArithProfile.h"
32#include "ArrayConstructor.h"
33#include "CommonSlowPaths.h"
34#include "DFGCompilationMode.h"
35#include "DFGDriver.h"
36#include "DFGOSREntry.h"
37#include "DFGThunks.h"
38#include "DFGWorklist.h"
39#include "Debugger.h"
40#include "DirectArguments.h"
41#include "Error.h"
42#include "ErrorHandlingScope.h"
43#include "EvalCodeBlock.h"
44#include "ExceptionFuzz.h"
45#include "ExecutableBaseInlines.h"
46#include "FTLOSREntry.h"
47#include "FrameTracers.h"
48#include "FunctionCodeBlock.h"
49#include "GetterSetter.h"
50#include "HostCallReturnValue.h"
51#include "ICStats.h"
52#include "Interpreter.h"
53#include "JIT.h"
54#include "JITExceptions.h"
55#include "JITToDFGDeferredCompilationCallback.h"
56#include "JSAsyncFunction.h"
57#include "JSAsyncGeneratorFunction.h"
58#include "JSCInlines.h"
59#include "JSCPtrTag.h"
60#include "JSGeneratorFunction.h"
61#include "JSGlobalObjectFunctions.h"
62#include "JSLexicalEnvironment.h"
63#include "JSWithScope.h"
64#include "ModuleProgramCodeBlock.h"
65#include "ObjectConstructor.h"
66#include "PolymorphicAccess.h"
67#include "ProgramCodeBlock.h"
68#include "PropertyName.h"
69#include "RegExpObject.h"
70#include "Repatch.h"
71#include "ScopedArguments.h"
72#include "ShadowChicken.h"
73#include "StructureStubInfo.h"
74#include "SuperSampler.h"
75#include "TestRunnerUtils.h"
76#include "ThunkGenerators.h"
77#include "TypeProfilerLog.h"
78#include "VMInlines.h"
79#include "WebAssemblyFunction.h"
80#include <wtf/InlineASM.h>
81
82namespace JSC {
83
84extern "C" {
85
86#if COMPILER(MSVC)
87void * _ReturnAddress(void);
88#pragma intrinsic(_ReturnAddress)
89
90#define OUR_RETURN_ADDRESS _ReturnAddress()
91#else
92#define OUR_RETURN_ADDRESS __builtin_return_address(0)
93#endif
94
95#if ENABLE(OPCODE_SAMPLING)
96#define CTI_SAMPLER vm->interpreter->sampler()
97#else
98#define CTI_SAMPLER 0
99#endif
100
101
102void JIT_OPERATION operationThrowStackOverflowError(ExecState* exec, CodeBlock* codeBlock)
103{
104 // We pass in our own code block, because the callframe hasn't been populated.
105 VM* vm = codeBlock->vm();
106 auto scope = DECLARE_THROW_SCOPE(*vm);
107 exec->convertToStackOverflowFrame(*vm, codeBlock);
108 NativeCallFrameTracer tracer(vm, exec);
109 throwStackOverflowError(exec, scope);
110}
111
112int32_t JIT_OPERATION operationCallArityCheck(ExecState* exec)
113{
114 VM* vm = &exec->vm();
115 auto scope = DECLARE_THROW_SCOPE(*vm);
116
117 int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForCall);
118 if (UNLIKELY(missingArgCount < 0)) {
119 CodeBlock* codeBlock = CommonSlowPaths::codeBlockFromCallFrameCallee(exec, CodeForCall);
120 exec->convertToStackOverflowFrame(*vm, codeBlock);
121 NativeCallFrameTracer tracer(vm, exec);
122 throwStackOverflowError(vm->topCallFrame, scope);
123 }
124
125 return missingArgCount;
126}
127
128int32_t JIT_OPERATION operationConstructArityCheck(ExecState* exec)
129{
130 VM* vm = &exec->vm();
131 auto scope = DECLARE_THROW_SCOPE(*vm);
132
133 int32_t missingArgCount = CommonSlowPaths::arityCheckFor(exec, *vm, CodeForConstruct);
134 if (UNLIKELY(missingArgCount < 0)) {
135 CodeBlock* codeBlock = CommonSlowPaths::codeBlockFromCallFrameCallee(exec, CodeForConstruct);
136 exec->convertToStackOverflowFrame(*vm, codeBlock);
137 NativeCallFrameTracer tracer(vm, exec);
138 throwStackOverflowError(vm->topCallFrame, scope);
139 }
140
141 return missingArgCount;
142}
143
144EncodedJSValue JIT_OPERATION operationTryGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
145{
146 VM* vm = &exec->vm();
147 NativeCallFrameTracer tracer(vm, exec);
148 Identifier ident = Identifier::fromUid(vm, uid);
149 stubInfo->tookSlowPath = true;
150
151 JSValue baseValue = JSValue::decode(base);
152 PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
153 baseValue.getPropertySlot(exec, ident, slot);
154
155 return JSValue::encode(slot.getPureResult());
156}
157
158
159EncodedJSValue JIT_OPERATION operationTryGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
160{
161 VM* vm = &exec->vm();
162 NativeCallFrameTracer tracer(vm, exec);
163 Identifier ident = Identifier::fromUid(vm, uid);
164
165 JSValue baseValue = JSValue::decode(base);
166 PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
167 baseValue.getPropertySlot(exec, ident, slot);
168
169 return JSValue::encode(slot.getPureResult());
170}
171
172EncodedJSValue JIT_OPERATION operationTryGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
173{
174 VM* vm = &exec->vm();
175 NativeCallFrameTracer tracer(vm, exec);
176 auto scope = DECLARE_THROW_SCOPE(*vm);
177 Identifier ident = Identifier::fromUid(vm, uid);
178
179 JSValue baseValue = JSValue::decode(base);
180 PropertySlot slot(baseValue, PropertySlot::InternalMethodType::VMInquiry);
181
182 baseValue.getPropertySlot(exec, ident, slot);
183 RETURN_IF_EXCEPTION(scope, encodedJSValue());
184
185 if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()) && !slot.isTaintedByOpaqueObject() && (slot.isCacheableValue() || slot.isCacheableGetter() || slot.isUnset()))
186 repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Try);
187
188 return JSValue::encode(slot.getPureResult());
189}
190
191EncodedJSValue JIT_OPERATION operationGetByIdDirect(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
192{
193 VM& vm = exec->vm();
194 NativeCallFrameTracer tracer(&vm, exec);
195 auto scope = DECLARE_THROW_SCOPE(vm);
196 Identifier ident = Identifier::fromUid(&vm, uid);
197 stubInfo->tookSlowPath = true;
198
199 JSValue baseValue = JSValue::decode(base);
200 PropertySlot slot(baseValue, PropertySlot::InternalMethodType::GetOwnProperty);
201
202 bool found = baseValue.getOwnPropertySlot(exec, ident, slot);
203 RETURN_IF_EXCEPTION(scope, encodedJSValue());
204
205 RELEASE_AND_RETURN(scope, JSValue::encode(found ? slot.getValue(exec, ident) : jsUndefined()));
206}
207
208EncodedJSValue JIT_OPERATION operationGetByIdDirectGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
209{
210 VM& vm = exec->vm();
211 NativeCallFrameTracer tracer(&vm, exec);
212 auto scope = DECLARE_THROW_SCOPE(vm);
213 Identifier ident = Identifier::fromUid(&vm, uid);
214
215 JSValue baseValue = JSValue::decode(base);
216 PropertySlot slot(baseValue, PropertySlot::InternalMethodType::GetOwnProperty);
217
218 bool found = baseValue.getOwnPropertySlot(exec, ident, slot);
219 RETURN_IF_EXCEPTION(scope, encodedJSValue());
220
221 RELEASE_AND_RETURN(scope, JSValue::encode(found ? slot.getValue(exec, ident) : jsUndefined()));
222}
223
224EncodedJSValue JIT_OPERATION operationGetByIdDirectOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
225{
226 VM& vm = exec->vm();
227 NativeCallFrameTracer tracer(&vm, exec);
228 auto scope = DECLARE_THROW_SCOPE(vm);
229 Identifier ident = Identifier::fromUid(&vm, uid);
230
231 JSValue baseValue = JSValue::decode(base);
232 PropertySlot slot(baseValue, PropertySlot::InternalMethodType::GetOwnProperty);
233
234 bool found = baseValue.getOwnPropertySlot(exec, ident, slot);
235 RETURN_IF_EXCEPTION(scope, encodedJSValue());
236
237 if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()))
238 repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Direct);
239
240 RELEASE_AND_RETURN(scope, JSValue::encode(found ? slot.getValue(exec, ident) : jsUndefined()));
241}
242
243EncodedJSValue JIT_OPERATION operationGetById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
244{
245 SuperSamplerScope superSamplerScope(false);
246
247 VM* vm = &exec->vm();
248 NativeCallFrameTracer tracer(vm, exec);
249
250 stubInfo->tookSlowPath = true;
251
252 JSValue baseValue = JSValue::decode(base);
253 PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
254 Identifier ident = Identifier::fromUid(vm, uid);
255 JSValue result = baseValue.get(exec, ident, slot);
256
257 LOG_IC((ICEvent::OperationGetById, baseValue.classInfoOrNull(*vm), ident, baseValue == slot.slotBase()));
258
259 return JSValue::encode(result);
260}
261
262EncodedJSValue JIT_OPERATION operationGetByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
263{
264 SuperSamplerScope superSamplerScope(false);
265
266 VM* vm = &exec->vm();
267 NativeCallFrameTracer tracer(vm, exec);
268
269 JSValue baseValue = JSValue::decode(base);
270 PropertySlot slot(baseValue, PropertySlot::InternalMethodType::Get);
271 Identifier ident = Identifier::fromUid(vm, uid);
272 JSValue result = baseValue.get(exec, ident, slot);
273
274 LOG_IC((ICEvent::OperationGetByIdGeneric, baseValue.classInfoOrNull(*vm), ident, baseValue == slot.slotBase()));
275
276 return JSValue::encode(result);
277}
278
279EncodedJSValue JIT_OPERATION operationGetByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
280{
281 SuperSamplerScope superSamplerScope(false);
282
283 VM* vm = &exec->vm();
284 NativeCallFrameTracer tracer(vm, exec);
285 Identifier ident = Identifier::fromUid(vm, uid);
286
287 JSValue baseValue = JSValue::decode(base);
288
289 return JSValue::encode(baseValue.getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
290
291 LOG_IC((ICEvent::OperationGetByIdOptimize, baseValue.classInfoOrNull(*vm), ident, baseValue == slot.slotBase()));
292
293 if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()))
294 repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::Normal);
295 return found ? slot.getValue(exec, ident) : jsUndefined();
296 }));
297}
298
299EncodedJSValue JIT_OPERATION operationGetByIdWithThis(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, EncodedJSValue thisEncoded, UniquedStringImpl* uid)
300{
301 SuperSamplerScope superSamplerScope(false);
302
303 VM* vm = &exec->vm();
304 NativeCallFrameTracer tracer(vm, exec);
305 Identifier ident = Identifier::fromUid(vm, uid);
306
307 stubInfo->tookSlowPath = true;
308
309 JSValue baseValue = JSValue::decode(base);
310 JSValue thisValue = JSValue::decode(thisEncoded);
311 PropertySlot slot(thisValue, PropertySlot::InternalMethodType::Get);
312
313 return JSValue::encode(baseValue.get(exec, ident, slot));
314}
315
316EncodedJSValue JIT_OPERATION operationGetByIdWithThisGeneric(ExecState* exec, EncodedJSValue base, EncodedJSValue thisEncoded, UniquedStringImpl* uid)
317{
318 SuperSamplerScope superSamplerScope(false);
319
320 VM* vm = &exec->vm();
321 NativeCallFrameTracer tracer(vm, exec);
322 Identifier ident = Identifier::fromUid(vm, uid);
323
324 JSValue baseValue = JSValue::decode(base);
325 JSValue thisValue = JSValue::decode(thisEncoded);
326 PropertySlot slot(thisValue, PropertySlot::InternalMethodType::Get);
327
328 return JSValue::encode(baseValue.get(exec, ident, slot));
329}
330
331EncodedJSValue JIT_OPERATION operationGetByIdWithThisOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, EncodedJSValue thisEncoded, UniquedStringImpl* uid)
332{
333 SuperSamplerScope superSamplerScope(false);
334
335 VM* vm = &exec->vm();
336 NativeCallFrameTracer tracer(vm, exec);
337 Identifier ident = Identifier::fromUid(vm, uid);
338
339 JSValue baseValue = JSValue::decode(base);
340 JSValue thisValue = JSValue::decode(thisEncoded);
341
342 PropertySlot slot(thisValue, PropertySlot::InternalMethodType::Get);
343 return JSValue::encode(baseValue.getPropertySlot(exec, ident, slot, [&] (bool found, PropertySlot& slot) -> JSValue {
344 LOG_IC((ICEvent::OperationGetByIdWithThisOptimize, baseValue.classInfoOrNull(*vm), ident, baseValue == slot.slotBase()));
345
346 if (stubInfo->considerCaching(exec->codeBlock(), baseValue.structureOrNull()))
347 repatchGetByID(exec, baseValue, ident, slot, *stubInfo, GetByIDKind::WithThis);
348 return found ? slot.getValue(exec, ident) : jsUndefined();
349 }));
350}
351
352EncodedJSValue JIT_OPERATION operationInById(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
353{
354 SuperSamplerScope superSamplerScope(false);
355
356 VM& vm = exec->vm();
357 NativeCallFrameTracer tracer(&vm, exec);
358 auto scope = DECLARE_THROW_SCOPE(vm);
359
360 stubInfo->tookSlowPath = true;
361
362 Identifier ident = Identifier::fromUid(&vm, uid);
363
364 JSValue baseValue = JSValue::decode(base);
365 if (!baseValue.isObject()) {
366 throwException(exec, scope, createInvalidInParameterError(exec, baseValue));
367 return JSValue::encode(jsUndefined());
368 }
369 JSObject* baseObject = asObject(baseValue);
370
371 LOG_IC((ICEvent::OperationInById, baseObject->classInfo(vm), ident));
372
373 scope.release();
374 PropertySlot slot(baseObject, PropertySlot::InternalMethodType::HasProperty);
375 return JSValue::encode(jsBoolean(baseObject->getPropertySlot(exec, ident, slot)));
376}
377
378EncodedJSValue JIT_OPERATION operationInByIdGeneric(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
379{
380 SuperSamplerScope superSamplerScope(false);
381
382 VM& vm = exec->vm();
383 NativeCallFrameTracer tracer(&vm, exec);
384 auto scope = DECLARE_THROW_SCOPE(vm);
385
386 Identifier ident = Identifier::fromUid(&vm, uid);
387
388 JSValue baseValue = JSValue::decode(base);
389 if (!baseValue.isObject()) {
390 throwException(exec, scope, createInvalidInParameterError(exec, baseValue));
391 return JSValue::encode(jsUndefined());
392 }
393 JSObject* baseObject = asObject(baseValue);
394
395 LOG_IC((ICEvent::OperationInByIdGeneric, baseObject->classInfo(vm), ident));
396
397 scope.release();
398 PropertySlot slot(baseObject, PropertySlot::InternalMethodType::HasProperty);
399 return JSValue::encode(jsBoolean(baseObject->getPropertySlot(exec, ident, slot)));
400}
401
402EncodedJSValue JIT_OPERATION operationInByIdOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue base, UniquedStringImpl* uid)
403{
404 SuperSamplerScope superSamplerScope(false);
405
406 VM& vm = exec->vm();
407 NativeCallFrameTracer tracer(&vm, exec);
408 auto scope = DECLARE_THROW_SCOPE(vm);
409
410 Identifier ident = Identifier::fromUid(&vm, uid);
411
412 JSValue baseValue = JSValue::decode(base);
413 if (!baseValue.isObject()) {
414 throwException(exec, scope, createInvalidInParameterError(exec, baseValue));
415 return JSValue::encode(jsUndefined());
416 }
417 JSObject* baseObject = asObject(baseValue);
418
419 LOG_IC((ICEvent::OperationInByIdOptimize, baseObject->classInfo(vm), ident));
420
421 scope.release();
422 PropertySlot slot(baseObject, PropertySlot::InternalMethodType::HasProperty);
423 bool found = baseObject->getPropertySlot(exec, ident, slot);
424 if (stubInfo->considerCaching(exec->codeBlock(), baseObject->structure(vm)))
425 repatchInByID(exec, baseObject, ident, found, slot, *stubInfo);
426 return JSValue::encode(jsBoolean(found));
427}
428
429EncodedJSValue JIT_OPERATION operationInByVal(ExecState* exec, JSCell* base, EncodedJSValue key)
430{
431 SuperSamplerScope superSamplerScope(false);
432
433 VM* vm = &exec->vm();
434 NativeCallFrameTracer tracer(vm, exec);
435
436 return JSValue::encode(jsBoolean(CommonSlowPaths::opInByVal(exec, base, JSValue::decode(key))));
437}
438
439void JIT_OPERATION operationPutByIdStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
440{
441 SuperSamplerScope superSamplerScope(false);
442
443 VM* vm = &exec->vm();
444 NativeCallFrameTracer tracer(vm, exec);
445
446 stubInfo->tookSlowPath = true;
447
448 JSValue baseValue = JSValue::decode(encodedBase);
449 Identifier ident = Identifier::fromUid(vm, uid);
450 PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
451 baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
452
453 LOG_IC((ICEvent::OperationPutByIdStrict, baseValue.classInfoOrNull(*vm), ident, slot.base() == baseValue));
454}
455
456void JIT_OPERATION operationPutByIdNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
457{
458 SuperSamplerScope superSamplerScope(false);
459
460 VM* vm = &exec->vm();
461 NativeCallFrameTracer tracer(vm, exec);
462
463 stubInfo->tookSlowPath = true;
464
465 JSValue baseValue = JSValue::decode(encodedBase);
466 Identifier ident = Identifier::fromUid(vm, uid);
467 PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
468 baseValue.putInline(exec, ident, JSValue::decode(encodedValue), slot);
469
470 LOG_IC((ICEvent::OperationPutByIdNonStrict, baseValue.classInfoOrNull(*vm), ident, slot.base() == baseValue));
471}
472
473void JIT_OPERATION operationPutByIdDirectStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
474{
475 SuperSamplerScope superSamplerScope(false);
476
477 VM& vm = exec->vm();
478 NativeCallFrameTracer tracer(&vm, exec);
479
480 stubInfo->tookSlowPath = true;
481
482 JSValue baseValue = JSValue::decode(encodedBase);
483 Identifier ident = Identifier::fromUid(&vm, uid);
484 PutPropertySlot slot(baseValue, true, exec->codeBlock()->putByIdContext());
485 CommonSlowPaths::putDirectWithReify(vm, exec, asObject(baseValue), ident, JSValue::decode(encodedValue), slot);
486
487 LOG_IC((ICEvent::OperationPutByIdDirectStrict, baseValue.classInfoOrNull(vm), ident, slot.base() == baseValue));
488}
489
490void JIT_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
491{
492 SuperSamplerScope superSamplerScope(false);
493
494 VM& vm = exec->vm();
495 NativeCallFrameTracer tracer(&vm, exec);
496
497 stubInfo->tookSlowPath = true;
498
499 JSValue baseValue = JSValue::decode(encodedBase);
500 Identifier ident = Identifier::fromUid(&vm, uid);
501 PutPropertySlot slot(baseValue, false, exec->codeBlock()->putByIdContext());
502 CommonSlowPaths::putDirectWithReify(vm, exec, asObject(baseValue), ident, JSValue::decode(encodedValue), slot);
503
504 LOG_IC((ICEvent::OperationPutByIdDirectNonStrict, baseValue.classInfoOrNull(vm), ident, slot.base() == baseValue));
505}
506
507void JIT_OPERATION operationPutByIdStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
508{
509 SuperSamplerScope superSamplerScope(false);
510
511 VM* vm = &exec->vm();
512 NativeCallFrameTracer tracer(vm, exec);
513 auto scope = DECLARE_THROW_SCOPE(*vm);
514
515 Identifier ident = Identifier::fromUid(vm, uid);
516 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
517
518 JSValue value = JSValue::decode(encodedValue);
519 JSValue baseValue = JSValue::decode(encodedBase);
520 CodeBlock* codeBlock = exec->codeBlock();
521 PutPropertySlot slot(baseValue, true, codeBlock->putByIdContext());
522
523 Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
524 baseValue.putInline(exec, ident, value, slot);
525
526 LOG_IC((ICEvent::OperationPutByIdStrictOptimize, baseValue.classInfoOrNull(*vm), ident, slot.base() == baseValue));
527
528 RETURN_IF_EXCEPTION(scope, void());
529
530 if (accessType != static_cast<AccessType>(stubInfo->accessType))
531 return;
532
533 if (stubInfo->considerCaching(codeBlock, structure))
534 repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
535}
536
537void JIT_OPERATION operationPutByIdNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
538{
539 SuperSamplerScope superSamplerScope(false);
540
541 VM* vm = &exec->vm();
542 NativeCallFrameTracer tracer(vm, exec);
543 auto scope = DECLARE_THROW_SCOPE(*vm);
544
545 Identifier ident = Identifier::fromUid(vm, uid);
546 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
547
548 JSValue value = JSValue::decode(encodedValue);
549 JSValue baseValue = JSValue::decode(encodedBase);
550 CodeBlock* codeBlock = exec->codeBlock();
551 PutPropertySlot slot(baseValue, false, codeBlock->putByIdContext());
552
553 Structure* structure = baseValue.isCell() ? baseValue.asCell()->structure(*vm) : nullptr;
554 baseValue.putInline(exec, ident, value, slot);
555
556 LOG_IC((ICEvent::OperationPutByIdNonStrictOptimize, baseValue.classInfoOrNull(*vm), ident, slot.base() == baseValue));
557
558 RETURN_IF_EXCEPTION(scope, void());
559
560 if (accessType != static_cast<AccessType>(stubInfo->accessType))
561 return;
562
563 if (stubInfo->considerCaching(codeBlock, structure))
564 repatchPutByID(exec, baseValue, structure, ident, slot, *stubInfo, NotDirect);
565}
566
567void JIT_OPERATION operationPutByIdDirectStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
568{
569 SuperSamplerScope superSamplerScope(false);
570
571 VM& vm = exec->vm();
572 NativeCallFrameTracer tracer(&vm, exec);
573 auto scope = DECLARE_THROW_SCOPE(vm);
574
575 Identifier ident = Identifier::fromUid(&vm, uid);
576 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
577
578 JSValue value = JSValue::decode(encodedValue);
579 JSObject* baseObject = asObject(JSValue::decode(encodedBase));
580 CodeBlock* codeBlock = exec->codeBlock();
581 PutPropertySlot slot(baseObject, true, codeBlock->putByIdContext());
582 Structure* structure = nullptr;
583 CommonSlowPaths::putDirectWithReify(vm, exec, baseObject, ident, value, slot, &structure);
584
585 LOG_IC((ICEvent::OperationPutByIdDirectStrictOptimize, baseObject->classInfo(vm), ident, slot.base() == baseObject));
586
587 RETURN_IF_EXCEPTION(scope, void());
588
589 if (accessType != static_cast<AccessType>(stubInfo->accessType))
590 return;
591
592 if (stubInfo->considerCaching(codeBlock, structure))
593 repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
594}
595
596void JIT_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedBase, UniquedStringImpl* uid)
597{
598 SuperSamplerScope superSamplerScope(false);
599
600 VM& vm = exec->vm();
601 NativeCallFrameTracer tracer(&vm, exec);
602 auto scope = DECLARE_THROW_SCOPE(vm);
603
604 Identifier ident = Identifier::fromUid(&vm, uid);
605 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
606
607 JSValue value = JSValue::decode(encodedValue);
608 JSObject* baseObject = asObject(JSValue::decode(encodedBase));
609 CodeBlock* codeBlock = exec->codeBlock();
610 PutPropertySlot slot(baseObject, false, codeBlock->putByIdContext());
611 Structure* structure = nullptr;
612 CommonSlowPaths::putDirectWithReify(vm, exec, baseObject, ident, value, slot, &structure);
613
614 LOG_IC((ICEvent::OperationPutByIdDirectNonStrictOptimize, baseObject->classInfo(vm), ident, slot.base() == baseObject));
615
616 RETURN_IF_EXCEPTION(scope, void());
617
618 if (accessType != static_cast<AccessType>(stubInfo->accessType))
619 return;
620
621 if (stubInfo->considerCaching(codeBlock, structure))
622 repatchPutByID(exec, baseObject, structure, ident, slot, *stubInfo, Direct);
623}
624
625ALWAYS_INLINE static bool isStringOrSymbol(JSValue value)
626{
627 return value.isString() || value.isSymbol();
628}
629
630static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value, ByValInfo* byValInfo)
631{
632 VM& vm = callFrame->vm();
633 auto scope = DECLARE_THROW_SCOPE(vm);
634 if (LIKELY(subscript.isUInt32())) {
635 byValInfo->tookSlowPath = true;
636 uint32_t i = subscript.asUInt32();
637 if (baseValue.isObject()) {
638 JSObject* object = asObject(baseValue);
639 if (object->canSetIndexQuickly(i)) {
640 object->setIndexQuickly(vm, i, value);
641 return;
642 }
643
644 // FIXME: This will make us think that in-bounds typed array accesses are actually
645 // out-of-bounds.
646 // https://bugs.webkit.org/show_bug.cgi?id=149886
647 byValInfo->arrayProfile->setOutOfBounds();
648 scope.release();
649 object->methodTable(vm)->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
650 return;
651 }
652
653 scope.release();
654 baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
655 return;
656 } else if (subscript.isInt32()) {
657 byValInfo->tookSlowPath = true;
658 if (baseValue.isObject())
659 byValInfo->arrayProfile->setOutOfBounds();
660 }
661
662 auto property = subscript.toPropertyKey(callFrame);
663 // Don't put to an object if toString threw an exception.
664 RETURN_IF_EXCEPTION(scope, void());
665
666 if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
667 byValInfo->tookSlowPath = true;
668
669 scope.release();
670 PutPropertySlot slot(baseValue, callFrame->codeBlock()->isStrictMode());
671 baseValue.putInline(callFrame, property, value, slot);
672}
673
674static void directPutByVal(CallFrame* callFrame, JSObject* baseObject, JSValue subscript, JSValue value, ByValInfo* byValInfo)
675{
676 VM& vm = callFrame->vm();
677 auto scope = DECLARE_THROW_SCOPE(vm);
678 bool isStrictMode = callFrame->codeBlock()->isStrictMode();
679
680 if (LIKELY(subscript.isUInt32())) {
681 // Despite its name, JSValue::isUInt32 will return true only for positive boxed int32_t; all those values are valid array indices.
682 byValInfo->tookSlowPath = true;
683 uint32_t index = subscript.asUInt32();
684 ASSERT(isIndex(index));
685
686 switch (baseObject->indexingType()) {
687 case ALL_INT32_INDEXING_TYPES:
688 case ALL_DOUBLE_INDEXING_TYPES:
689 case ALL_CONTIGUOUS_INDEXING_TYPES:
690 case ALL_ARRAY_STORAGE_INDEXING_TYPES:
691 if (index < baseObject->butterfly()->vectorLength())
692 break;
693 FALLTHROUGH;
694 default:
695 byValInfo->arrayProfile->setOutOfBounds();
696 break;
697 }
698
699 scope.release();
700 baseObject->putDirectIndex(callFrame, index, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
701 return;
702 }
703
704 if (subscript.isDouble()) {
705 double subscriptAsDouble = subscript.asDouble();
706 uint32_t subscriptAsUInt32 = static_cast<uint32_t>(subscriptAsDouble);
707 if (subscriptAsDouble == subscriptAsUInt32 && isIndex(subscriptAsUInt32)) {
708 byValInfo->tookSlowPath = true;
709 scope.release();
710 baseObject->putDirectIndex(callFrame, subscriptAsUInt32, value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
711 return;
712 }
713 }
714
715 // Don't put to an object if toString threw an exception.
716 auto property = subscript.toPropertyKey(callFrame);
717 RETURN_IF_EXCEPTION(scope, void());
718
719 if (Optional<uint32_t> index = parseIndex(property)) {
720 byValInfo->tookSlowPath = true;
721 scope.release();
722 baseObject->putDirectIndex(callFrame, index.value(), value, 0, isStrictMode ? PutDirectIndexShouldThrow : PutDirectIndexShouldNotThrow);
723 return;
724 }
725
726 if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
727 byValInfo->tookSlowPath = true;
728
729 scope.release();
730 PutPropertySlot slot(baseObject, isStrictMode);
731 CommonSlowPaths::putDirectWithReify(vm, callFrame, baseObject, property, value, slot);
732}
733
734enum class OptimizationResult {
735 NotOptimized,
736 SeenOnce,
737 Optimized,
738 GiveUp,
739};
740
741static OptimizationResult tryPutByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
742{
743 // See if it's worth optimizing at all.
744 OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
745
746 VM& vm = exec->vm();
747 auto scope = DECLARE_THROW_SCOPE(vm);
748
749 if (baseValue.isObject() && isCopyOnWrite(baseValue.getObject()->indexingMode()))
750 return OptimizationResult::GiveUp;
751
752 if (baseValue.isObject() && subscript.isInt32()) {
753 JSObject* object = asObject(baseValue);
754
755 ASSERT(exec->bytecodeOffset());
756 ASSERT(!byValInfo->stubRoutine);
757
758 Structure* structure = object->structure(vm);
759 if (hasOptimizableIndexing(structure)) {
760 // Attempt to optimize.
761 JITArrayMode arrayMode = jitArrayModeForStructure(structure);
762 if (jitArrayModePermitsPut(arrayMode) && arrayMode != byValInfo->arrayMode) {
763 CodeBlock* codeBlock = exec->codeBlock();
764 ConcurrentJSLocker locker(codeBlock->m_lock);
765 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
766 JIT::compilePutByVal(locker, &vm, codeBlock, byValInfo, returnAddress, arrayMode);
767 optimizationResult = OptimizationResult::Optimized;
768 }
769 }
770
771 // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
772 if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
773 optimizationResult = OptimizationResult::GiveUp;
774 }
775
776 if (baseValue.isObject() && isStringOrSymbol(subscript)) {
777 const Identifier propertyName = subscript.toPropertyKey(exec);
778 RETURN_IF_EXCEPTION(scope, OptimizationResult::GiveUp);
779 if (subscript.isSymbol() || !parseIndex(propertyName)) {
780 ASSERT(exec->bytecodeOffset());
781 ASSERT(!byValInfo->stubRoutine);
782 if (byValInfo->seen) {
783 if (byValInfo->cachedId == propertyName) {
784 JIT::compilePutByValWithCachedId<OpPutByVal>(&vm, exec->codeBlock(), byValInfo, returnAddress, NotDirect, propertyName);
785 optimizationResult = OptimizationResult::Optimized;
786 } else {
787 // Seem like a generic property access site.
788 optimizationResult = OptimizationResult::GiveUp;
789 }
790 } else {
791 CodeBlock* codeBlock = exec->codeBlock();
792 ConcurrentJSLocker locker(codeBlock->m_lock);
793 byValInfo->seen = true;
794 byValInfo->cachedId = propertyName;
795 if (subscript.isSymbol())
796 byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
797 optimizationResult = OptimizationResult::SeenOnce;
798 }
799 }
800 }
801
802 if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
803 // If we take slow path more than 10 times without patching then make sure we
804 // never make that mistake again. For cases where we see non-index-intercepting
805 // objects, this gives 10 iterations worth of opportunity for us to observe
806 // that the put_by_val may be polymorphic. We count up slowPathCount even if
807 // the result is GiveUp.
808 if (++byValInfo->slowPathCount >= 10)
809 optimizationResult = OptimizationResult::GiveUp;
810 }
811
812 return optimizationResult;
813}
814
815void JIT_OPERATION operationPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
816{
817 VM& vm = exec->vm();
818 NativeCallFrameTracer tracer(&vm, exec);
819 auto scope = DECLARE_THROW_SCOPE(vm);
820
821 JSValue baseValue = JSValue::decode(encodedBaseValue);
822 JSValue subscript = JSValue::decode(encodedSubscript);
823 JSValue value = JSValue::decode(encodedValue);
824 OptimizationResult result = tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
825 RETURN_IF_EXCEPTION(scope, void());
826 if (result == OptimizationResult::GiveUp) {
827 // Don't ever try to optimize.
828 byValInfo->tookSlowPath = true;
829 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), operationPutByValGeneric);
830 }
831 RELEASE_AND_RETURN(scope, putByVal(exec, baseValue, subscript, value, byValInfo));
832}
833
834static OptimizationResult tryDirectPutByValOptimize(ExecState* exec, JSObject* object, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
835{
836 // See if it's worth optimizing at all.
837 OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
838
839 VM& vm = exec->vm();
840 auto scope = DECLARE_THROW_SCOPE(vm);
841
842 if (subscript.isInt32()) {
843 ASSERT(exec->bytecodeOffset());
844 ASSERT(!byValInfo->stubRoutine);
845
846 Structure* structure = object->structure(vm);
847 if (hasOptimizableIndexing(structure)) {
848 // Attempt to optimize.
849 JITArrayMode arrayMode = jitArrayModeForStructure(structure);
850 if (jitArrayModePermitsPutDirect(arrayMode) && arrayMode != byValInfo->arrayMode) {
851 CodeBlock* codeBlock = exec->codeBlock();
852 ConcurrentJSLocker locker(codeBlock->m_lock);
853 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
854
855 JIT::compileDirectPutByVal(locker, &vm, codeBlock, byValInfo, returnAddress, arrayMode);
856 optimizationResult = OptimizationResult::Optimized;
857 }
858 }
859
860 // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
861 if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
862 optimizationResult = OptimizationResult::GiveUp;
863 } else if (isStringOrSymbol(subscript)) {
864 const Identifier propertyName = subscript.toPropertyKey(exec);
865 RETURN_IF_EXCEPTION(scope, OptimizationResult::GiveUp);
866 if (subscript.isSymbol() || !parseIndex(propertyName)) {
867 ASSERT(exec->bytecodeOffset());
868 ASSERT(!byValInfo->stubRoutine);
869 if (byValInfo->seen) {
870 if (byValInfo->cachedId == propertyName) {
871 JIT::compilePutByValWithCachedId<OpPutByValDirect>(&vm, exec->codeBlock(), byValInfo, returnAddress, Direct, propertyName);
872 optimizationResult = OptimizationResult::Optimized;
873 } else {
874 // Seem like a generic property access site.
875 optimizationResult = OptimizationResult::GiveUp;
876 }
877 } else {
878 CodeBlock* codeBlock = exec->codeBlock();
879 ConcurrentJSLocker locker(codeBlock->m_lock);
880 byValInfo->seen = true;
881 byValInfo->cachedId = propertyName;
882 if (subscript.isSymbol())
883 byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
884 optimizationResult = OptimizationResult::SeenOnce;
885 }
886 }
887 }
888
889 if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
890 // If we take slow path more than 10 times without patching then make sure we
891 // never make that mistake again. For cases where we see non-index-intercepting
892 // objects, this gives 10 iterations worth of opportunity for us to observe
893 // that the get_by_val may be polymorphic. We count up slowPathCount even if
894 // the result is GiveUp.
895 if (++byValInfo->slowPathCount >= 10)
896 optimizationResult = OptimizationResult::GiveUp;
897 }
898
899 return optimizationResult;
900}
901
902void JIT_OPERATION operationDirectPutByValOptimize(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
903{
904 VM& vm = exec->vm();
905 NativeCallFrameTracer tracer(&vm, exec);
906 auto scope = DECLARE_THROW_SCOPE(vm);
907
908 JSValue baseValue = JSValue::decode(encodedBaseValue);
909 JSValue subscript = JSValue::decode(encodedSubscript);
910 JSValue value = JSValue::decode(encodedValue);
911 RELEASE_ASSERT(baseValue.isObject());
912 JSObject* object = asObject(baseValue);
913 OptimizationResult result = tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
914 RETURN_IF_EXCEPTION(scope, void());
915 if (result == OptimizationResult::GiveUp) {
916 // Don't ever try to optimize.
917 byValInfo->tookSlowPath = true;
918 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), operationDirectPutByValGeneric);
919 }
920
921 RELEASE_AND_RETURN(scope, directPutByVal(exec, object, subscript, value, byValInfo));
922}
923
924void JIT_OPERATION operationPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
925{
926 VM& vm = exec->vm();
927 NativeCallFrameTracer tracer(&vm, exec);
928
929 JSValue baseValue = JSValue::decode(encodedBaseValue);
930 JSValue subscript = JSValue::decode(encodedSubscript);
931 JSValue value = JSValue::decode(encodedValue);
932
933 putByVal(exec, baseValue, subscript, value, byValInfo);
934}
935
936
937void JIT_OPERATION operationDirectPutByValGeneric(ExecState* exec, EncodedJSValue encodedBaseValue, EncodedJSValue encodedSubscript, EncodedJSValue encodedValue, ByValInfo* byValInfo)
938{
939 VM& vm = exec->vm();
940 NativeCallFrameTracer tracer(&vm, exec);
941
942 JSValue baseValue = JSValue::decode(encodedBaseValue);
943 JSValue subscript = JSValue::decode(encodedSubscript);
944 JSValue value = JSValue::decode(encodedValue);
945 RELEASE_ASSERT(baseValue.isObject());
946 directPutByVal(exec, asObject(baseValue), subscript, value, byValInfo);
947}
948
949EncodedJSValue JIT_OPERATION operationCallEval(ExecState* exec, ExecState* execCallee)
950{
951 VM* vm = &exec->vm();
952 auto scope = DECLARE_THROW_SCOPE(*vm);
953
954 execCallee->setCodeBlock(0);
955
956 if (!isHostFunction(execCallee->guaranteedJSValueCallee(), globalFuncEval))
957 return JSValue::encode(JSValue());
958
959 JSValue result = eval(execCallee);
960 RETURN_IF_EXCEPTION(scope, encodedJSValue());
961
962 return JSValue::encode(result);
963}
964
965static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
966{
967 ExecState* exec = execCallee->callerFrame();
968 VM* vm = &exec->vm();
969 auto scope = DECLARE_THROW_SCOPE(*vm);
970
971 execCallee->setCodeBlock(0);
972
973 if (callLinkInfo->specializationKind() == CodeForCall) {
974 CallData callData;
975 CallType callType = getCallData(*vm, callee, callData);
976
977 ASSERT(callType != CallType::JS);
978
979 if (callType == CallType::Host) {
980 NativeCallFrameTracer tracer(vm, execCallee);
981 execCallee->setCallee(asObject(callee));
982 vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
983 if (UNLIKELY(scope.exception())) {
984 return encodeResult(
985 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
986 reinterpret_cast<void*>(KeepTheFrame));
987 }
988
989 return encodeResult(
990 tagCFunctionPtr<void*, JSEntryPtrTag>(getHostCallReturnValue),
991 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
992 }
993
994 ASSERT(callType == CallType::None);
995 throwException(exec, scope, createNotAFunctionError(exec, callee));
996 return encodeResult(
997 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
998 reinterpret_cast<void*>(KeepTheFrame));
999 }
1000
1001 ASSERT(callLinkInfo->specializationKind() == CodeForConstruct);
1002
1003 ConstructData constructData;
1004 ConstructType constructType = getConstructData(*vm, callee, constructData);
1005
1006 ASSERT(constructType != ConstructType::JS);
1007
1008 if (constructType == ConstructType::Host) {
1009 NativeCallFrameTracer tracer(vm, execCallee);
1010 execCallee->setCallee(asObject(callee));
1011 vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
1012 if (UNLIKELY(scope.exception())) {
1013 return encodeResult(
1014 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
1015 reinterpret_cast<void*>(KeepTheFrame));
1016 }
1017
1018 return encodeResult(tagCFunctionPtr<void*, JSEntryPtrTag>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
1019 }
1020
1021 ASSERT(constructType == ConstructType::None);
1022 throwException(exec, scope, createNotAConstructorError(exec, callee));
1023 return encodeResult(
1024 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
1025 reinterpret_cast<void*>(KeepTheFrame));
1026}
1027
1028SlowPathReturnType JIT_OPERATION operationLinkCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1029{
1030 ExecState* exec = execCallee->callerFrame();
1031 VM* vm = &exec->vm();
1032 auto throwScope = DECLARE_THROW_SCOPE(*vm);
1033
1034 CodeSpecializationKind kind = callLinkInfo->specializationKind();
1035 NativeCallFrameTracer tracer(vm, exec);
1036
1037 RELEASE_ASSERT(!callLinkInfo->isDirect());
1038
1039 JSValue calleeAsValue = execCallee->guaranteedJSValueCallee();
1040 JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
1041 if (!calleeAsFunctionCell) {
1042 if (auto* internalFunction = jsDynamicCast<InternalFunction*>(*vm, calleeAsValue)) {
1043 MacroAssemblerCodePtr<JSEntryPtrTag> codePtr = vm->getCTIInternalFunctionTrampolineFor(kind);
1044 RELEASE_ASSERT(!!codePtr);
1045
1046 if (!callLinkInfo->seenOnce())
1047 callLinkInfo->setSeen();
1048 else
1049 linkFor(execCallee, *callLinkInfo, nullptr, internalFunction, codePtr);
1050
1051 void* linkedTarget = codePtr.executableAddress();
1052 return encodeResult(linkedTarget, reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1053 }
1054 RELEASE_AND_RETURN(throwScope, handleHostCall(execCallee, calleeAsValue, callLinkInfo));
1055 }
1056
1057 JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
1058 JSScope* scope = callee->scopeUnchecked();
1059 ExecutableBase* executable = callee->executable();
1060
1061 MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
1062 CodeBlock* codeBlock = nullptr;
1063 if (executable->isHostFunction()) {
1064 codePtr = jsToWasmICCodePtr(*vm, kind, callee);
1065 if (!codePtr)
1066 codePtr = executable->entrypointFor(kind, MustCheckArity);
1067 } else {
1068 FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1069
1070 auto handleThrowException = [&] () {
1071 void* throwTarget = vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress();
1072 return encodeResult(throwTarget, reinterpret_cast<void*>(KeepTheFrame));
1073 };
1074
1075 if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
1076 throwException(exec, throwScope, createNotAConstructorError(exec, callee));
1077 return handleThrowException();
1078 }
1079
1080 CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
1081 Exception* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, callee, scope, kind, *codeBlockSlot);
1082 EXCEPTION_ASSERT(throwScope.exception() == error);
1083 if (UNLIKELY(error))
1084 return handleThrowException();
1085 codeBlock = *codeBlockSlot;
1086 ArityCheckMode arity;
1087 if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo->isVarargs())
1088 arity = MustCheckArity;
1089 else
1090 arity = ArityCheckNotRequired;
1091 codePtr = functionExecutable->entrypointFor(kind, arity);
1092 }
1093
1094 if (!callLinkInfo->seenOnce())
1095 callLinkInfo->setSeen();
1096 else
1097 linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
1098
1099 return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1100}
1101
1102void JIT_OPERATION operationLinkDirectCall(ExecState* exec, CallLinkInfo* callLinkInfo, JSFunction* callee)
1103{
1104 VM* vm = &exec->vm();
1105 auto throwScope = DECLARE_THROW_SCOPE(*vm);
1106
1107 CodeSpecializationKind kind = callLinkInfo->specializationKind();
1108 NativeCallFrameTracer tracer(vm, exec);
1109
1110 RELEASE_ASSERT(callLinkInfo->isDirect());
1111
1112 // This would happen if the executable died during GC but the CodeBlock did not die. That should
1113 // not happen because the CodeBlock should have a weak reference to any executable it uses for
1114 // this purpose.
1115 RELEASE_ASSERT(callLinkInfo->executable());
1116
1117 // Having a CodeBlock indicates that this is linked. We shouldn't be taking this path if it's
1118 // linked.
1119 RELEASE_ASSERT(!callLinkInfo->codeBlock());
1120
1121 // We just don't support this yet.
1122 RELEASE_ASSERT(!callLinkInfo->isVarargs());
1123
1124 ExecutableBase* executable = callLinkInfo->executable();
1125 RELEASE_ASSERT(callee->executable() == callLinkInfo->executable());
1126
1127 JSScope* scope = callee->scopeUnchecked();
1128
1129 MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
1130 CodeBlock* codeBlock = nullptr;
1131 if (executable->isHostFunction())
1132 codePtr = executable->entrypointFor(kind, MustCheckArity);
1133 else {
1134 FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1135
1136 RELEASE_ASSERT(isCall(kind) || functionExecutable->constructAbility() != ConstructAbility::CannotConstruct);
1137
1138 Exception* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, callee, scope, kind, codeBlock);
1139 EXCEPTION_ASSERT_UNUSED(throwScope, throwScope.exception() == error);
1140 if (UNLIKELY(error))
1141 return;
1142 unsigned argumentStackSlots = callLinkInfo->maxNumArguments();
1143 if (argumentStackSlots < static_cast<size_t>(codeBlock->numParameters()))
1144 codePtr = functionExecutable->entrypointFor(kind, MustCheckArity);
1145 else
1146 codePtr = functionExecutable->entrypointFor(kind, ArityCheckNotRequired);
1147 }
1148
1149 linkDirectFor(exec, *callLinkInfo, codeBlock, codePtr);
1150}
1151
1152inline SlowPathReturnType virtualForWithFunction(
1153 ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
1154{
1155 ExecState* exec = execCallee->callerFrame();
1156 VM* vm = &exec->vm();
1157 auto throwScope = DECLARE_THROW_SCOPE(*vm);
1158
1159 CodeSpecializationKind kind = callLinkInfo->specializationKind();
1160 NativeCallFrameTracer tracer(vm, exec);
1161
1162 JSValue calleeAsValue = execCallee->guaranteedJSValueCallee();
1163 calleeAsFunctionCell = getJSFunction(calleeAsValue);
1164 if (UNLIKELY(!calleeAsFunctionCell)) {
1165 if (jsDynamicCast<InternalFunction*>(*vm, calleeAsValue)) {
1166 MacroAssemblerCodePtr<JSEntryPtrTag> codePtr = vm->getCTIInternalFunctionTrampolineFor(kind);
1167 ASSERT(!!codePtr);
1168 return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1169 }
1170 RELEASE_AND_RETURN(throwScope, handleHostCall(execCallee, calleeAsValue, callLinkInfo));
1171 }
1172
1173 JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
1174 JSScope* scope = function->scopeUnchecked();
1175 ExecutableBase* executable = function->executable();
1176 if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
1177 FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
1178
1179 if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
1180 throwException(exec, throwScope, createNotAConstructorError(exec, function));
1181 return encodeResult(
1182 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
1183 reinterpret_cast<void*>(KeepTheFrame));
1184 }
1185
1186 CodeBlock** codeBlockSlot = execCallee->addressOfCodeBlock();
1187 Exception* error = functionExecutable->prepareForExecution<FunctionExecutable>(*vm, function, scope, kind, *codeBlockSlot);
1188 EXCEPTION_ASSERT(throwScope.exception() == error);
1189 if (UNLIKELY(error)) {
1190 return encodeResult(
1191 vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
1192 reinterpret_cast<void*>(KeepTheFrame));
1193 }
1194 }
1195 return encodeResult(executable->entrypointFor(
1196 kind, MustCheckArity).executableAddress(),
1197 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
1198}
1199
1200SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1201{
1202 ASSERT(callLinkInfo->specializationKind() == CodeForCall);
1203 JSCell* calleeAsFunctionCell;
1204 SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
1205
1206 linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
1207
1208 return result;
1209}
1210
1211SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
1212{
1213 JSCell* calleeAsFunctionCellIgnored;
1214 return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
1215}
1216
1217size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1218{
1219 VM* vm = &exec->vm();
1220 NativeCallFrameTracer tracer(vm, exec);
1221
1222 return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1223}
1224
1225size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1226{
1227 VM* vm = &exec->vm();
1228 NativeCallFrameTracer tracer(vm, exec);
1229
1230 return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1231}
1232
1233size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1234{
1235 VM* vm = &exec->vm();
1236 NativeCallFrameTracer tracer(vm, exec);
1237
1238 return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1239}
1240
1241size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1242{
1243 VM* vm = &exec->vm();
1244 NativeCallFrameTracer tracer(vm, exec);
1245
1246 return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
1247}
1248
1249size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1250{
1251 VM* vm = &exec->vm();
1252 NativeCallFrameTracer tracer(vm, exec);
1253
1254 return JSValue::equalSlowCaseInline(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
1255}
1256
1257#if USE(JSVALUE64)
1258EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1259#else
1260size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
1261#endif
1262{
1263 VM* vm = &exec->vm();
1264 NativeCallFrameTracer tracer(vm, exec);
1265
1266 bool result = asString(left)->equal(exec, asString(right));
1267#if USE(JSVALUE64)
1268 return JSValue::encode(jsBoolean(result));
1269#else
1270 return result;
1271#endif
1272}
1273
1274size_t JIT_OPERATION operationCompareStrictEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
1275{
1276 VM* vm = &exec->vm();
1277 NativeCallFrameTracer tracer(vm, exec);
1278
1279 JSValue src1 = JSValue::decode(encodedOp1);
1280 JSValue src2 = JSValue::decode(encodedOp2);
1281
1282 return JSValue::strictEqual(exec, src1, src2);
1283}
1284
1285EncodedJSValue JIT_OPERATION operationNewArrayWithProfile(ExecState* exec, ArrayAllocationProfile* profile, const JSValue* values, int size)
1286{
1287 VM* vm = &exec->vm();
1288 NativeCallFrameTracer tracer(vm, exec);
1289 return JSValue::encode(constructArrayNegativeIndexed(exec, profile, values, size));
1290}
1291
1292EncodedJSValue JIT_OPERATION operationNewArrayWithSizeAndProfile(ExecState* exec, ArrayAllocationProfile* profile, EncodedJSValue size)
1293{
1294 VM* vm = &exec->vm();
1295 NativeCallFrameTracer tracer(vm, exec);
1296 JSValue sizeValue = JSValue::decode(size);
1297 return JSValue::encode(constructArrayWithSizeQuirk(exec, profile, exec->lexicalGlobalObject(), sizeValue));
1298}
1299
1300}
1301
1302template<typename FunctionType>
1303static EncodedJSValue operationNewFunctionCommon(ExecState* exec, JSScope* scope, JSCell* functionExecutable, bool isInvalidated)
1304{
1305 VM& vm = exec->vm();
1306 ASSERT(functionExecutable->inherits<FunctionExecutable>(vm));
1307 NativeCallFrameTracer tracer(&vm, exec);
1308 if (isInvalidated)
1309 return JSValue::encode(FunctionType::createWithInvalidatedReallocationWatchpoint(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1310 return JSValue::encode(FunctionType::create(vm, static_cast<FunctionExecutable*>(functionExecutable), scope));
1311}
1312
1313extern "C" {
1314
1315EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1316{
1317 return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, false);
1318}
1319
1320EncodedJSValue JIT_OPERATION operationNewFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1321{
1322 return operationNewFunctionCommon<JSFunction>(exec, scope, functionExecutable, true);
1323}
1324
1325EncodedJSValue JIT_OPERATION operationNewGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1326{
1327 return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, false);
1328}
1329
1330EncodedJSValue JIT_OPERATION operationNewGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1331{
1332 return operationNewFunctionCommon<JSGeneratorFunction>(exec, scope, functionExecutable, true);
1333}
1334
1335EncodedJSValue JIT_OPERATION operationNewAsyncFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1336{
1337 return operationNewFunctionCommon<JSAsyncFunction>(exec, scope, functionExecutable, false);
1338}
1339
1340EncodedJSValue JIT_OPERATION operationNewAsyncFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1341{
1342 return operationNewFunctionCommon<JSAsyncFunction>(exec, scope, functionExecutable, true);
1343}
1344
1345EncodedJSValue JIT_OPERATION operationNewAsyncGeneratorFunction(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1346{
1347 return operationNewFunctionCommon<JSAsyncGeneratorFunction>(exec, scope, functionExecutable, false);
1348}
1349
1350EncodedJSValue JIT_OPERATION operationNewAsyncGeneratorFunctionWithInvalidatedReallocationWatchpoint(ExecState* exec, JSScope* scope, JSCell* functionExecutable)
1351{
1352 return operationNewFunctionCommon<JSAsyncGeneratorFunction>(exec, scope, functionExecutable, true);
1353}
1354
1355void JIT_OPERATION operationSetFunctionName(ExecState* exec, JSCell* funcCell, EncodedJSValue encodedName)
1356{
1357 VM* vm = &exec->vm();
1358 NativeCallFrameTracer tracer(vm, exec);
1359
1360 JSFunction* func = jsCast<JSFunction*>(funcCell);
1361 JSValue name = JSValue::decode(encodedName);
1362 func->setFunctionName(exec, name);
1363}
1364
1365JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
1366{
1367 VM* vm = &exec->vm();
1368 NativeCallFrameTracer tracer(vm, exec);
1369
1370 return constructEmptyObject(exec, structure);
1371}
1372
1373JSCell* JIT_OPERATION operationNewRegexp(ExecState* exec, JSCell* regexpPtr)
1374{
1375 SuperSamplerScope superSamplerScope(false);
1376 VM& vm = exec->vm();
1377 NativeCallFrameTracer tracer(&vm, exec);
1378
1379 RegExp* regexp = static_cast<RegExp*>(regexpPtr);
1380 ASSERT(regexp->isValid());
1381 return RegExpObject::create(vm, exec->lexicalGlobalObject()->regExpStructure(), regexp);
1382}
1383
1384// The only reason for returning an UnusedPtr (instead of void) is so that we can reuse the
1385// existing DFG slow path generator machinery when creating the slow path for CheckTraps
1386// in the DFG. If a DFG slow path generator that supports a void return type is added in the
1387// future, we can switch to using that then.
1388UnusedPtr JIT_OPERATION operationHandleTraps(ExecState* exec)
1389{
1390 VM& vm = exec->vm();
1391 NativeCallFrameTracer tracer(&vm, exec);
1392 ASSERT(vm.needTrapHandling());
1393 vm.handleTraps(exec);
1394 return nullptr;
1395}
1396
1397void JIT_OPERATION operationDebug(ExecState* exec, int32_t debugHookType)
1398{
1399 VM& vm = exec->vm();
1400 NativeCallFrameTracer tracer(&vm, exec);
1401
1402 vm.interpreter->debug(exec, static_cast<DebugHookType>(debugHookType));
1403}
1404
1405#if ENABLE(DFG_JIT)
1406static void updateAllPredictionsAndOptimizeAfterWarmUp(CodeBlock* codeBlock)
1407{
1408 codeBlock->updateAllPredictions();
1409 codeBlock->optimizeAfterWarmUp();
1410}
1411
1412SlowPathReturnType JIT_OPERATION operationOptimize(ExecState* exec, uint32_t bytecodeIndex)
1413{
1414 VM& vm = exec->vm();
1415 NativeCallFrameTracer tracer(&vm, exec);
1416
1417 // Defer GC for a while so that it doesn't run between when we enter into this
1418 // slow path and when we figure out the state of our code block. This prevents
1419 // a number of awkward reentrancy scenarios, including:
1420 //
1421 // - The optimized version of our code block being jettisoned by GC right after
1422 // we concluded that we wanted to use it, but have not planted it into the JS
1423 // stack yet.
1424 //
1425 // - An optimized version of our code block being installed just as we decided
1426 // that it wasn't ready yet.
1427 //
1428 // Note that jettisoning won't happen if we already initiated OSR, because in
1429 // that case we would have already planted the optimized code block into the JS
1430 // stack.
1431 DeferGCForAWhile deferGC(vm.heap);
1432
1433 CodeBlock* codeBlock = exec->codeBlock();
1434 if (UNLIKELY(codeBlock->jitType() != JITType::BaselineJIT)) {
1435 dataLog("Unexpected code block in Baseline->DFG tier-up: ", *codeBlock, "\n");
1436 RELEASE_ASSERT_NOT_REACHED();
1437 }
1438
1439 if (bytecodeIndex) {
1440 // If we're attempting to OSR from a loop, assume that this should be
1441 // separately optimized.
1442 codeBlock->m_shouldAlwaysBeInlined = false;
1443 }
1444
1445 if (UNLIKELY(Options::verboseOSR())) {
1446 dataLog(
1447 *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
1448 ", executeCounter = ", codeBlock->jitExecuteCounter(),
1449 ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
1450 ", exitCounter = ");
1451 if (codeBlock->hasOptimizedReplacement())
1452 dataLog(codeBlock->replacement()->osrExitCounter());
1453 else
1454 dataLog("N/A");
1455 dataLog("\n");
1456 }
1457
1458 if (!codeBlock->checkIfOptimizationThresholdReached()) {
1459 CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("counter = ", codeBlock->jitExecuteCounter()));
1460 codeBlock->updateAllPredictions();
1461 if (UNLIKELY(Options::verboseOSR()))
1462 dataLog("Choosing not to optimize ", *codeBlock, " yet, because the threshold hasn't been reached.\n");
1463 return encodeResult(0, 0);
1464 }
1465
1466 Debugger* debugger = codeBlock->globalObject()->debugger();
1467 if (UNLIKELY(debugger && (debugger->isStepping() || codeBlock->baselineAlternative()->hasDebuggerRequests()))) {
1468 CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("debugger is stepping or has requests"));
1469 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1470 return encodeResult(0, 0);
1471 }
1472
1473 if (codeBlock->m_shouldAlwaysBeInlined) {
1474 CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should always be inlined"));
1475 updateAllPredictionsAndOptimizeAfterWarmUp(codeBlock);
1476 if (UNLIKELY(Options::verboseOSR()))
1477 dataLog("Choosing not to optimize ", *codeBlock, " yet, because m_shouldAlwaysBeInlined == true.\n");
1478 return encodeResult(0, 0);
1479 }
1480
1481 // We cannot be in the process of asynchronous compilation and also have an optimized
1482 // replacement.
1483 DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull();
1484 ASSERT(
1485 !worklist
1486 || !(worklist->compilationState(DFG::CompilationKey(codeBlock, DFG::DFGMode)) != DFG::Worklist::NotKnown
1487 && codeBlock->hasOptimizedReplacement()));
1488
1489 DFG::Worklist::State worklistState;
1490 if (worklist) {
1491 // The call to DFG::Worklist::completeAllReadyPlansForVM() will complete all ready
1492 // (i.e. compiled) code blocks. But if it completes ours, we also need to know
1493 // what the result was so that we don't plow ahead and attempt OSR or immediate
1494 // reoptimization. This will have already also set the appropriate JIT execution
1495 // count threshold depending on what happened, so if the compilation was anything
1496 // but successful we just want to return early. See the case for worklistState ==
1497 // DFG::Worklist::Compiled, below.
1498
1499 // Note that we could have alternatively just called Worklist::compilationState()
1500 // here, and if it returned Compiled, we could have then called
1501 // completeAndScheduleOSR() below. But that would have meant that it could take
1502 // longer for code blocks to be completed: they would only complete when *their*
1503 // execution count trigger fired; but that could take a while since the firing is
1504 // racy. It could also mean that code blocks that never run again after being
1505 // compiled would sit on the worklist until next GC. That's fine, but it's
1506 // probably a waste of memory. Our goal here is to complete code blocks as soon as
1507 // possible in order to minimize the chances of us executing baseline code after
1508 // optimized code is already available.
1509 worklistState = worklist->completeAllReadyPlansForVM(
1510 vm, DFG::CompilationKey(codeBlock, DFG::DFGMode));
1511 } else
1512 worklistState = DFG::Worklist::NotKnown;
1513
1514 if (worklistState == DFG::Worklist::Compiling) {
1515 CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiling"));
1516 // We cannot be in the process of asynchronous compilation and also have an optimized
1517 // replacement.
1518 RELEASE_ASSERT(!codeBlock->hasOptimizedReplacement());
1519 codeBlock->setOptimizationThresholdBasedOnCompilationResult(CompilationDeferred);
1520 return encodeResult(0, 0);
1521 }
1522
1523 if (worklistState == DFG::Worklist::Compiled) {
1524 // If we don't have an optimized replacement but we did just get compiled, then
1525 // the compilation failed or was invalidated, in which case the execution count
1526 // thresholds have already been set appropriately by
1527 // CodeBlock::setOptimizationThresholdBasedOnCompilationResult() and we have
1528 // nothing left to do.
1529 if (!codeBlock->hasOptimizedReplacement()) {
1530 CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compiled and failed"));
1531 codeBlock->updateAllPredictions();
1532 if (UNLIKELY(Options::verboseOSR()))
1533 dataLog("Code block ", *codeBlock, " was compiled but it doesn't have an optimized replacement.\n");
1534 return encodeResult(0, 0);
1535 }
1536 } else if (codeBlock->hasOptimizedReplacement()) {
1537 CodeBlock* replacement = codeBlock->replacement();
1538 if (UNLIKELY(Options::verboseOSR()))
1539 dataLog("Considering OSR ", codeBlock, " -> ", replacement, ".\n");
1540 // If we have an optimized replacement, then it must be the case that we entered
1541 // cti_optimize from a loop. That's because if there's an optimized replacement,
1542 // then all calls to this function will be relinked to the replacement and so
1543 // the prologue OSR will never fire.
1544
1545 // This is an interesting threshold check. Consider that a function OSR exits
1546 // in the middle of a loop, while having a relatively low exit count. The exit
1547 // will reset the execution counter to some target threshold, meaning that this
1548 // code won't be reached until that loop heats up for >=1000 executions. But then
1549 // we do a second check here, to see if we should either reoptimize, or just
1550 // attempt OSR entry. Hence it might even be correct for
1551 // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
1552 // additional checking anyway, to reduce the amount of recompilation thrashing.
1553 if (replacement->shouldReoptimizeFromLoopNow()) {
1554 CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize from loop now"));
1555 if (UNLIKELY(Options::verboseOSR())) {
1556 dataLog(
1557 "Triggering reoptimization of ", codeBlock,
1558 "(", replacement, ") (in loop).\n");
1559 }
1560 replacement->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTrigger, CountReoptimization);
1561 return encodeResult(0, 0);
1562 }
1563 } else {
1564 if (!codeBlock->shouldOptimizeNow()) {
1565 CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("insufficient profiling"));
1566 if (UNLIKELY(Options::verboseOSR())) {
1567 dataLog(
1568 "Delaying optimization for ", *codeBlock,
1569 " because of insufficient profiling.\n");
1570 }
1571 return encodeResult(0, 0);
1572 }
1573
1574 if (UNLIKELY(Options::verboseOSR()))
1575 dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
1576
1577 unsigned numVarsWithValues;
1578 if (bytecodeIndex)
1579 numVarsWithValues = codeBlock->numCalleeLocals();
1580 else
1581 numVarsWithValues = 0;
1582 Operands<Optional<JSValue>> mustHandleValues(codeBlock->numParameters(), numVarsWithValues);
1583 int localsUsedForCalleeSaves = static_cast<int>(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters());
1584 for (size_t i = 0; i < mustHandleValues.size(); ++i) {
1585 int operand = mustHandleValues.operandForIndex(i);
1586 if (operandIsLocal(operand) && VirtualRegister(operand).toLocal() < localsUsedForCalleeSaves)
1587 continue;
1588 mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
1589 }
1590
1591 CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
1592 CompilationResult result = DFG::compile(
1593 vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
1594 mustHandleValues, JITToDFGDeferredCompilationCallback::create());
1595
1596 if (result != CompilationSuccessful) {
1597 CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("compilation failed"));
1598 return encodeResult(0, 0);
1599 }
1600 }
1601
1602 CodeBlock* optimizedCodeBlock = codeBlock->replacement();
1603 ASSERT(optimizedCodeBlock && JITCode::isOptimizingJIT(optimizedCodeBlock->jitType()));
1604
1605 if (void* dataBuffer = DFG::prepareOSREntry(exec, optimizedCodeBlock, bytecodeIndex)) {
1606 CODEBLOCK_LOG_EVENT(optimizedCodeBlock, "osrEntry", ("at bc#", bytecodeIndex));
1607 if (UNLIKELY(Options::verboseOSR())) {
1608 dataLog(
1609 "Performing OSR ", codeBlock, " -> ", optimizedCodeBlock, ".\n");
1610 }
1611
1612 codeBlock->optimizeSoon();
1613 codeBlock->unlinkedCodeBlock()->setDidOptimize(TrueTriState);
1614 void* targetPC = vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress();
1615 targetPC = retagCodePtr(targetPC, JITThunkPtrTag, bitwise_cast<PtrTag>(exec));
1616 return encodeResult(targetPC, dataBuffer);
1617 }
1618
1619 if (UNLIKELY(Options::verboseOSR())) {
1620 dataLog(
1621 "Optimizing ", codeBlock, " -> ", codeBlock->replacement(),
1622 " succeeded, OSR failed, after a delay of ",
1623 codeBlock->optimizationDelayCounter(), ".\n");
1624 }
1625
1626 // Count the OSR failure as a speculation failure. If this happens a lot, then
1627 // reoptimize.
1628 optimizedCodeBlock->countOSRExit();
1629
1630 // We are a lot more conservative about triggering reoptimization after OSR failure than
1631 // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
1632 // already, then we really would like to reoptimize immediately. But this case covers
1633 // something else: there weren't many (or any) speculation failures before, but we just
1634 // failed to enter the speculative code because some variable had the wrong value or
1635 // because the OSR code decided for any spurious reason that it did not want to OSR
1636 // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
1637 // reoptimization trigger.
1638 if (optimizedCodeBlock->shouldReoptimizeNow()) {
1639 CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("should reoptimize now"));
1640 if (UNLIKELY(Options::verboseOSR())) {
1641 dataLog(
1642 "Triggering reoptimization of ", codeBlock, " -> ",
1643 codeBlock->replacement(), " (after OSR fail).\n");
1644 }
1645 optimizedCodeBlock->jettison(Profiler::JettisonDueToBaselineLoopReoptimizationTriggerOnOSREntryFail, CountReoptimization);
1646 return encodeResult(0, 0);
1647 }
1648
1649 // OSR failed this time, but it might succeed next time! Let the code run a bit
1650 // longer and then try again.
1651 codeBlock->optimizeAfterWarmUp();
1652
1653 CODEBLOCK_LOG_EVENT(codeBlock, "delayOptimizeToDFG", ("OSR failed"));
1654 return encodeResult(0, 0);
1655}
1656
1657char* JIT_OPERATION operationTryOSREnterAtCatch(ExecState* exec, uint32_t bytecodeIndex)
1658{
1659 VM& vm = exec->vm();
1660 NativeCallFrameTracer tracer(&vm, exec);
1661
1662 CodeBlock* optimizedReplacement = exec->codeBlock()->replacement();
1663 if (UNLIKELY(!optimizedReplacement))
1664 return nullptr;
1665
1666 switch (optimizedReplacement->jitType()) {
1667 case JITType::DFGJIT:
1668 case JITType::FTLJIT: {
1669 MacroAssemblerCodePtr<ExceptionHandlerPtrTag> entry = DFG::prepareCatchOSREntry(exec, optimizedReplacement, bytecodeIndex);
1670 return entry.executableAddress<char*>();
1671 }
1672 default:
1673 break;
1674 }
1675 return nullptr;
1676}
1677
1678char* JIT_OPERATION operationTryOSREnterAtCatchAndValueProfile(ExecState* exec, uint32_t bytecodeIndex)
1679{
1680 VM& vm = exec->vm();
1681 NativeCallFrameTracer tracer(&vm, exec);
1682
1683 CodeBlock* codeBlock = exec->codeBlock();
1684 CodeBlock* optimizedReplacement = codeBlock->replacement();
1685 if (UNLIKELY(!optimizedReplacement))
1686 return nullptr;
1687
1688 switch (optimizedReplacement->jitType()) {
1689 case JITType::DFGJIT:
1690 case JITType::FTLJIT: {
1691 MacroAssemblerCodePtr<ExceptionHandlerPtrTag> entry = DFG::prepareCatchOSREntry(exec, optimizedReplacement, bytecodeIndex);
1692 return entry.executableAddress<char*>();
1693 }
1694 default:
1695 break;
1696 }
1697
1698 codeBlock->ensureCatchLivenessIsComputedForBytecodeOffset(bytecodeIndex);
1699 auto bytecode = codeBlock->instructions().at(bytecodeIndex)->as<OpCatch>();
1700 auto& metadata = bytecode.metadata(codeBlock);
1701 metadata.m_buffer->forEach([&] (ValueProfileAndOperand& profile) {
1702 profile.m_buckets[0] = JSValue::encode(exec->uncheckedR(profile.m_operand).jsValue());
1703 });
1704
1705 return nullptr;
1706}
1707
1708#endif
1709
1710void JIT_OPERATION operationPutByIndex(ExecState* exec, EncodedJSValue encodedArrayValue, int32_t index, EncodedJSValue encodedValue)
1711{
1712 VM& vm = exec->vm();
1713 NativeCallFrameTracer tracer(&vm, exec);
1714
1715 JSValue arrayValue = JSValue::decode(encodedArrayValue);
1716 ASSERT(isJSArray(arrayValue));
1717 asArray(arrayValue)->putDirectIndex(exec, index, JSValue::decode(encodedValue));
1718}
1719
1720enum class AccessorType {
1721 Getter,
1722 Setter
1723};
1724
1725static void putAccessorByVal(ExecState* exec, JSObject* base, JSValue subscript, int32_t attribute, JSObject* accessor, AccessorType accessorType)
1726{
1727 VM& vm = exec->vm();
1728 auto scope = DECLARE_THROW_SCOPE(vm);
1729 auto propertyKey = subscript.toPropertyKey(exec);
1730 RETURN_IF_EXCEPTION(scope, void());
1731
1732 scope.release();
1733 if (accessorType == AccessorType::Getter)
1734 base->putGetter(exec, propertyKey, accessor, attribute);
1735 else
1736 base->putSetter(exec, propertyKey, accessor, attribute);
1737}
1738
1739void JIT_OPERATION operationPutGetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* getter)
1740{
1741 VM& vm = exec->vm();
1742 NativeCallFrameTracer tracer(&vm, exec);
1743
1744 ASSERT(object && object->isObject());
1745 JSObject* baseObj = object->getObject();
1746
1747 ASSERT(getter->isObject());
1748 baseObj->putGetter(exec, uid, getter, options);
1749}
1750
1751void JIT_OPERATION operationPutSetterById(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t options, JSCell* setter)
1752{
1753 VM& vm = exec->vm();
1754 NativeCallFrameTracer tracer(&vm, exec);
1755
1756 ASSERT(object && object->isObject());
1757 JSObject* baseObj = object->getObject();
1758
1759 ASSERT(setter->isObject());
1760 baseObj->putSetter(exec, uid, setter, options);
1761}
1762
1763void JIT_OPERATION operationPutGetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* getter)
1764{
1765 VM& vm = exec->vm();
1766 NativeCallFrameTracer tracer(&vm, exec);
1767
1768 putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(getter), AccessorType::Getter);
1769}
1770
1771void JIT_OPERATION operationPutSetterByVal(ExecState* exec, JSCell* base, EncodedJSValue encodedSubscript, int32_t attribute, JSCell* setter)
1772{
1773 VM& vm = exec->vm();
1774 NativeCallFrameTracer tracer(&vm, exec);
1775
1776 putAccessorByVal(exec, asObject(base), JSValue::decode(encodedSubscript), attribute, asObject(setter), AccessorType::Setter);
1777}
1778
1779#if USE(JSVALUE64)
1780void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, EncodedJSValue encodedGetterValue, EncodedJSValue encodedSetterValue)
1781{
1782 VM& vm = exec->vm();
1783 NativeCallFrameTracer tracer(&vm, exec);
1784
1785 ASSERT(object && object->isObject());
1786 JSObject* baseObject = asObject(object);
1787
1788 JSValue getter = JSValue::decode(encodedGetterValue);
1789 JSValue setter = JSValue::decode(encodedSetterValue);
1790 ASSERT(getter.isObject() || setter.isObject());
1791 GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject(), getter, setter);
1792 CommonSlowPaths::putDirectAccessorWithReify(vm, exec, baseObject, uid, accessor, attribute);
1793}
1794
1795#else
1796void JIT_OPERATION operationPutGetterSetter(ExecState* exec, JSCell* object, UniquedStringImpl* uid, int32_t attribute, JSCell* getterCell, JSCell* setterCell)
1797{
1798 VM& vm = exec->vm();
1799 NativeCallFrameTracer tracer(&vm, exec);
1800
1801 ASSERT(object && object->isObject());
1802 JSObject* baseObject = asObject(object);
1803
1804 ASSERT(getterCell || setterCell);
1805 JSObject* getter = getterCell ? getterCell->getObject() : nullptr;
1806 JSObject* setter = setterCell ? setterCell->getObject() : nullptr;
1807 GetterSetter* accessor = GetterSetter::create(vm, exec->lexicalGlobalObject(), getter, setter);
1808 CommonSlowPaths::putDirectAccessorWithReify(vm, exec, baseObject, uid, accessor, attribute);
1809}
1810#endif
1811
1812void JIT_OPERATION operationPopScope(ExecState* exec, int32_t scopeReg)
1813{
1814 VM& vm = exec->vm();
1815 NativeCallFrameTracer tracer(&vm, exec);
1816
1817 JSScope* scope = exec->uncheckedR(scopeReg).Register::scope();
1818 exec->uncheckedR(scopeReg) = scope->next();
1819}
1820
1821int32_t JIT_OPERATION operationInstanceOfCustom(ExecState* exec, EncodedJSValue encodedValue, JSObject* constructor, EncodedJSValue encodedHasInstance)
1822{
1823 VM& vm = exec->vm();
1824 NativeCallFrameTracer tracer(&vm, exec);
1825
1826 JSValue value = JSValue::decode(encodedValue);
1827 JSValue hasInstanceValue = JSValue::decode(encodedHasInstance);
1828
1829 if (constructor->hasInstance(exec, value, hasInstanceValue))
1830 return 1;
1831 return 0;
1832}
1833
1834}
1835
1836static JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1837{
1838 VM& vm = exec->vm();
1839 auto scope = DECLARE_THROW_SCOPE(vm);
1840
1841 if (LIKELY(baseValue.isCell() && subscript.isString())) {
1842 Structure& structure = *baseValue.asCell()->structure(vm);
1843 if (JSCell::canUseFastGetOwnProperty(structure)) {
1844 RefPtr<AtomStringImpl> existingAtomString = asString(subscript)->toExistingAtomString(exec);
1845 RETURN_IF_EXCEPTION(scope, JSValue());
1846 if (existingAtomString) {
1847 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(vm, structure, existingAtomString.get())) {
1848 ASSERT(exec->bytecodeOffset());
1849 if (byValInfo->stubInfo && byValInfo->cachedId.impl() != existingAtomString)
1850 byValInfo->tookSlowPath = true;
1851 return result;
1852 }
1853 }
1854 }
1855 }
1856
1857 if (subscript.isInt32()) {
1858 ASSERT(exec->bytecodeOffset());
1859 byValInfo->tookSlowPath = true;
1860
1861 int32_t i = subscript.asInt32();
1862 if (isJSString(baseValue)) {
1863 if (i >= 0 && asString(baseValue)->canGetIndex(i)) {
1864 ctiPatchCallByReturnAddress(returnAddress, operationGetByValString);
1865 RELEASE_AND_RETURN(scope, asString(baseValue)->getIndex(exec, i));
1866 }
1867 byValInfo->arrayProfile->setOutOfBounds();
1868 } else if (baseValue.isObject()) {
1869 JSObject* object = asObject(baseValue);
1870 if (object->canGetIndexQuickly(i))
1871 return object->getIndexQuickly(i);
1872
1873 bool skipMarkingOutOfBounds = false;
1874
1875 if (object->indexingType() == ArrayWithContiguous && i >= 0 && static_cast<uint32_t>(i) < object->butterfly()->publicLength()) {
1876 // FIXME: expand this to ArrayStorage, Int32, and maybe Double:
1877 // https://bugs.webkit.org/show_bug.cgi?id=182940
1878 auto* globalObject = object->globalObject(vm);
1879 skipMarkingOutOfBounds = globalObject->isOriginalArrayStructure(object->structure(vm)) && globalObject->arrayPrototypeChainIsSane();
1880 }
1881
1882 if (!skipMarkingOutOfBounds && !CommonSlowPaths::canAccessArgumentIndexQuickly(*object, i)) {
1883 // FIXME: This will make us think that in-bounds typed array accesses are actually
1884 // out-of-bounds.
1885 // https://bugs.webkit.org/show_bug.cgi?id=149886
1886 byValInfo->arrayProfile->setOutOfBounds();
1887 }
1888 }
1889
1890 if (i >= 0)
1891 RELEASE_AND_RETURN(scope, baseValue.get(exec, static_cast<uint32_t>(i)));
1892 }
1893
1894 baseValue.requireObjectCoercible(exec);
1895 RETURN_IF_EXCEPTION(scope, JSValue());
1896 auto property = subscript.toPropertyKey(exec);
1897 RETURN_IF_EXCEPTION(scope, JSValue());
1898
1899 ASSERT(exec->bytecodeOffset());
1900 if (byValInfo->stubInfo && (!isStringOrSymbol(subscript) || byValInfo->cachedId != property))
1901 byValInfo->tookSlowPath = true;
1902
1903 RELEASE_AND_RETURN(scope, baseValue.get(exec, property));
1904}
1905
1906static OptimizationResult tryGetByValOptimize(ExecState* exec, JSValue baseValue, JSValue subscript, ByValInfo* byValInfo, ReturnAddressPtr returnAddress)
1907{
1908 // See if it's worth optimizing this at all.
1909 OptimizationResult optimizationResult = OptimizationResult::NotOptimized;
1910
1911 VM& vm = exec->vm();
1912 auto scope = DECLARE_THROW_SCOPE(vm);
1913
1914 if (baseValue.isObject() && subscript.isInt32()) {
1915 JSObject* object = asObject(baseValue);
1916
1917 ASSERT(exec->bytecodeOffset());
1918 ASSERT(!byValInfo->stubRoutine);
1919
1920 if (hasOptimizableIndexing(object->structure(vm))) {
1921 // Attempt to optimize.
1922 Structure* structure = object->structure(vm);
1923 JITArrayMode arrayMode = jitArrayModeForStructure(structure);
1924 if (arrayMode != byValInfo->arrayMode) {
1925 // If we reached this case, we got an interesting array mode we did not expect when we compiled.
1926 // Let's update the profile to do better next time.
1927 CodeBlock* codeBlock = exec->codeBlock();
1928 ConcurrentJSLocker locker(codeBlock->m_lock);
1929 byValInfo->arrayProfile->computeUpdatedPrediction(locker, codeBlock, structure);
1930
1931 JIT::compileGetByVal(locker, &vm, codeBlock, byValInfo, returnAddress, arrayMode);
1932 optimizationResult = OptimizationResult::Optimized;
1933 }
1934 }
1935
1936 // If we failed to patch and we have some object that intercepts indexed get, then don't even wait until 10 times.
1937 if (optimizationResult != OptimizationResult::Optimized && object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero())
1938 optimizationResult = OptimizationResult::GiveUp;
1939 }
1940
1941 if (baseValue.isObject() && isStringOrSymbol(subscript)) {
1942 const Identifier propertyName = subscript.toPropertyKey(exec);
1943 RETURN_IF_EXCEPTION(scope, OptimizationResult::GiveUp);
1944 if (subscript.isSymbol() || !parseIndex(propertyName)) {
1945 ASSERT(exec->bytecodeOffset());
1946 ASSERT(!byValInfo->stubRoutine);
1947 if (byValInfo->seen) {
1948 if (byValInfo->cachedId == propertyName) {
1949 JIT::compileGetByValWithCachedId(&vm, exec->codeBlock(), byValInfo, returnAddress, propertyName);
1950 optimizationResult = OptimizationResult::Optimized;
1951 } else {
1952 // Seem like a generic property access site.
1953 optimizationResult = OptimizationResult::GiveUp;
1954 }
1955 } else {
1956 CodeBlock* codeBlock = exec->codeBlock();
1957 ConcurrentJSLocker locker(codeBlock->m_lock);
1958 byValInfo->seen = true;
1959 byValInfo->cachedId = propertyName;
1960 if (subscript.isSymbol())
1961 byValInfo->cachedSymbol.set(vm, codeBlock, asSymbol(subscript));
1962 optimizationResult = OptimizationResult::SeenOnce;
1963 }
1964 }
1965 }
1966
1967 if (optimizationResult != OptimizationResult::Optimized && optimizationResult != OptimizationResult::SeenOnce) {
1968 // If we take slow path more than 10 times without patching then make sure we
1969 // never make that mistake again. For cases where we see non-index-intercepting
1970 // objects, this gives 10 iterations worth of opportunity for us to observe
1971 // that the get_by_val may be polymorphic. We count up slowPathCount even if
1972 // the result is GiveUp.
1973 if (++byValInfo->slowPathCount >= 10)
1974 optimizationResult = OptimizationResult::GiveUp;
1975 }
1976
1977 return optimizationResult;
1978}
1979
1980extern "C" {
1981
1982EncodedJSValue JIT_OPERATION operationGetByValGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1983{
1984 VM& vm = exec->vm();
1985 NativeCallFrameTracer tracer(&vm, exec);
1986 JSValue baseValue = JSValue::decode(encodedBase);
1987 JSValue subscript = JSValue::decode(encodedSubscript);
1988
1989 JSValue result = getByVal(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS));
1990 return JSValue::encode(result);
1991}
1992
1993EncodedJSValue JIT_OPERATION operationGetByValOptimize(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
1994{
1995 VM& vm = exec->vm();
1996 NativeCallFrameTracer tracer(&vm, exec);
1997 auto scope = DECLARE_THROW_SCOPE(vm);
1998
1999 JSValue baseValue = JSValue::decode(encodedBase);
2000 JSValue subscript = JSValue::decode(encodedSubscript);
2001 ReturnAddressPtr returnAddress = ReturnAddressPtr(OUR_RETURN_ADDRESS);
2002 OptimizationResult result = tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress);
2003 RETURN_IF_EXCEPTION(scope, { });
2004 if (result == OptimizationResult::GiveUp) {
2005 // Don't ever try to optimize.
2006 byValInfo->tookSlowPath = true;
2007 ctiPatchCallByReturnAddress(returnAddress, operationGetByValGeneric);
2008 }
2009
2010 RELEASE_AND_RETURN(scope, JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress)));
2011}
2012
2013EncodedJSValue JIT_OPERATION operationHasIndexedPropertyDefault(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
2014{
2015 VM& vm = exec->vm();
2016 NativeCallFrameTracer tracer(&vm, exec);
2017 JSValue baseValue = JSValue::decode(encodedBase);
2018 JSValue subscript = JSValue::decode(encodedSubscript);
2019
2020 ASSERT(baseValue.isObject());
2021 ASSERT(subscript.isUInt32AsAnyInt());
2022
2023 JSObject* object = asObject(baseValue);
2024 bool didOptimize = false;
2025
2026 ASSERT(exec->bytecodeOffset());
2027 ASSERT(!byValInfo->stubRoutine);
2028
2029 if (hasOptimizableIndexing(object->structure(vm))) {
2030 // Attempt to optimize.
2031 JITArrayMode arrayMode = jitArrayModeForStructure(object->structure(vm));
2032 if (arrayMode != byValInfo->arrayMode) {
2033 JIT::compileHasIndexedProperty(&vm, exec->codeBlock(), byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS), arrayMode);
2034 didOptimize = true;
2035 }
2036 }
2037
2038 if (!didOptimize) {
2039 // If we take slow path more than 10 times without patching then make sure we
2040 // never make that mistake again. Or, if we failed to patch and we have some object
2041 // that intercepts indexed get, then don't even wait until 10 times. For cases
2042 // where we see non-index-intercepting objects, this gives 10 iterations worth of
2043 // opportunity for us to observe that the get_by_val may be polymorphic.
2044 if (++byValInfo->slowPathCount >= 10
2045 || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
2046 // Don't ever try to optimize.
2047 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), operationHasIndexedPropertyGeneric);
2048 }
2049 }
2050
2051 uint32_t index = subscript.asUInt32AsAnyInt();
2052 if (object->canGetIndexQuickly(index))
2053 return JSValue::encode(JSValue(JSValue::JSTrue));
2054
2055 if (!CommonSlowPaths::canAccessArgumentIndexQuickly(*object, index)) {
2056 // FIXME: This will make us think that in-bounds typed array accesses are actually
2057 // out-of-bounds.
2058 // https://bugs.webkit.org/show_bug.cgi?id=149886
2059 byValInfo->arrayProfile->setOutOfBounds();
2060 }
2061 return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, index, PropertySlot::InternalMethodType::GetOwnProperty)));
2062}
2063
2064EncodedJSValue JIT_OPERATION operationHasIndexedPropertyGeneric(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
2065{
2066 VM& vm = exec->vm();
2067 NativeCallFrameTracer tracer(&vm, exec);
2068 JSValue baseValue = JSValue::decode(encodedBase);
2069 JSValue subscript = JSValue::decode(encodedSubscript);
2070
2071 ASSERT(baseValue.isObject());
2072 ASSERT(subscript.isUInt32AsAnyInt());
2073
2074 JSObject* object = asObject(baseValue);
2075 uint32_t index = subscript.asUInt32AsAnyInt();
2076 if (object->canGetIndexQuickly(index))
2077 return JSValue::encode(JSValue(JSValue::JSTrue));
2078
2079 if (!CommonSlowPaths::canAccessArgumentIndexQuickly(*object, index)) {
2080 // FIXME: This will make us think that in-bounds typed array accesses are actually
2081 // out-of-bounds.
2082 // https://bugs.webkit.org/show_bug.cgi?id=149886
2083 byValInfo->arrayProfile->setOutOfBounds();
2084 }
2085 return JSValue::encode(jsBoolean(object->hasPropertyGeneric(exec, index, PropertySlot::InternalMethodType::GetOwnProperty)));
2086}
2087
2088EncodedJSValue JIT_OPERATION operationGetByValString(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedSubscript, ByValInfo* byValInfo)
2089{
2090 VM& vm = exec->vm();
2091 NativeCallFrameTracer tracer(&vm, exec);
2092 auto scope = DECLARE_THROW_SCOPE(vm);
2093 JSValue baseValue = JSValue::decode(encodedBase);
2094 JSValue subscript = JSValue::decode(encodedSubscript);
2095
2096 JSValue result;
2097 if (LIKELY(subscript.isUInt32())) {
2098 uint32_t i = subscript.asUInt32();
2099 if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
2100 RELEASE_AND_RETURN(scope, JSValue::encode(asString(baseValue)->getIndex(exec, i)));
2101
2102 result = baseValue.get(exec, i);
2103 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2104 if (!isJSString(baseValue)) {
2105 ASSERT(exec->bytecodeOffset());
2106 auto getByValFunction = byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize;
2107 ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), getByValFunction);
2108 }
2109 } else {
2110 baseValue.requireObjectCoercible(exec);
2111 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2112 auto property = subscript.toPropertyKey(exec);
2113 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2114 scope.release();
2115 result = baseValue.get(exec, property);
2116 }
2117
2118 return JSValue::encode(result);
2119}
2120
2121EncodedJSValue JIT_OPERATION operationDeleteByIdJSResult(ExecState* exec, EncodedJSValue base, UniquedStringImpl* uid)
2122{
2123 return JSValue::encode(jsBoolean(operationDeleteById(exec, base, uid)));
2124}
2125
2126size_t JIT_OPERATION operationDeleteById(ExecState* exec, EncodedJSValue encodedBase, UniquedStringImpl* uid)
2127{
2128 VM& vm = exec->vm();
2129 NativeCallFrameTracer tracer(&vm, exec);
2130 auto scope = DECLARE_THROW_SCOPE(vm);
2131
2132 JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
2133 RETURN_IF_EXCEPTION(scope, false);
2134 if (!baseObj)
2135 return false;
2136 bool couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, Identifier::fromUid(&vm, uid));
2137 RETURN_IF_EXCEPTION(scope, false);
2138 if (!couldDelete && exec->codeBlock()->isStrictMode())
2139 throwTypeError(exec, scope, UnableToDeletePropertyError);
2140 return couldDelete;
2141}
2142
2143EncodedJSValue JIT_OPERATION operationDeleteByValJSResult(ExecState* exec, EncodedJSValue base, EncodedJSValue key)
2144{
2145 return JSValue::encode(jsBoolean(operationDeleteByVal(exec, base, key)));
2146}
2147
2148size_t JIT_OPERATION operationDeleteByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedKey)
2149{
2150 VM& vm = exec->vm();
2151 NativeCallFrameTracer tracer(&vm, exec);
2152 auto scope = DECLARE_THROW_SCOPE(vm);
2153
2154 JSObject* baseObj = JSValue::decode(encodedBase).toObject(exec);
2155 RETURN_IF_EXCEPTION(scope, false);
2156 JSValue key = JSValue::decode(encodedKey);
2157 if (!baseObj)
2158 return false;
2159
2160 bool couldDelete;
2161 uint32_t index;
2162 if (key.getUInt32(index))
2163 couldDelete = baseObj->methodTable(vm)->deletePropertyByIndex(baseObj, exec, index);
2164 else {
2165 Identifier property = key.toPropertyKey(exec);
2166 RETURN_IF_EXCEPTION(scope, false);
2167 couldDelete = baseObj->methodTable(vm)->deleteProperty(baseObj, exec, property);
2168 }
2169 RETURN_IF_EXCEPTION(scope, false);
2170 if (!couldDelete && exec->codeBlock()->isStrictMode())
2171 throwTypeError(exec, scope, UnableToDeletePropertyError);
2172 return couldDelete;
2173}
2174
2175JSCell* JIT_OPERATION operationPushWithScope(ExecState* exec, JSCell* currentScopeCell, EncodedJSValue objectValue)
2176{
2177 VM& vm = exec->vm();
2178 NativeCallFrameTracer tracer(&vm, exec);
2179 auto scope = DECLARE_THROW_SCOPE(vm);
2180
2181 JSObject* object = JSValue::decode(objectValue).toObject(exec);
2182 RETURN_IF_EXCEPTION(scope, nullptr);
2183
2184 JSScope* currentScope = jsCast<JSScope*>(currentScopeCell);
2185
2186 return JSWithScope::create(vm, exec->lexicalGlobalObject(), currentScope, object);
2187}
2188
2189JSCell* JIT_OPERATION operationPushWithScopeObject(ExecState* exec, JSCell* currentScopeCell, JSObject* object)
2190{
2191 VM& vm = exec->vm();
2192 NativeCallFrameTracer tracer(&vm, exec);
2193 JSScope* currentScope = jsCast<JSScope*>(currentScopeCell);
2194 return JSWithScope::create(vm, exec->lexicalGlobalObject(), currentScope, object);
2195}
2196
2197EncodedJSValue JIT_OPERATION operationInstanceOf(ExecState* exec, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
2198{
2199 VM& vm = exec->vm();
2200 NativeCallFrameTracer tracer(&vm, exec);
2201 JSValue value = JSValue::decode(encodedValue);
2202 JSValue proto = JSValue::decode(encodedProto);
2203
2204 bool result = JSObject::defaultHasInstance(exec, value, proto);
2205 return JSValue::encode(jsBoolean(result));
2206}
2207
2208EncodedJSValue JIT_OPERATION operationInstanceOfGeneric(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
2209{
2210 VM& vm = exec->vm();
2211 NativeCallFrameTracer tracer(&vm, exec);
2212 JSValue value = JSValue::decode(encodedValue);
2213 JSValue proto = JSValue::decode(encodedProto);
2214
2215 stubInfo->tookSlowPath = true;
2216
2217 bool result = JSObject::defaultHasInstance(exec, value, proto);
2218 return JSValue::encode(jsBoolean(result));
2219}
2220
2221EncodedJSValue JIT_OPERATION operationInstanceOfOptimize(ExecState* exec, StructureStubInfo* stubInfo, EncodedJSValue encodedValue, EncodedJSValue encodedProto)
2222{
2223 VM& vm = exec->vm();
2224 NativeCallFrameTracer tracer(&vm, exec);
2225 auto scope = DECLARE_THROW_SCOPE(vm);
2226 JSValue value = JSValue::decode(encodedValue);
2227 JSValue proto = JSValue::decode(encodedProto);
2228
2229 bool result = JSObject::defaultHasInstance(exec, value, proto);
2230 RETURN_IF_EXCEPTION(scope, JSValue::encode(jsUndefined()));
2231
2232 if (stubInfo->considerCaching(exec->codeBlock(), value.structureOrNull()))
2233 repatchInstanceOf(exec, value, proto, *stubInfo, result);
2234
2235 return JSValue::encode(jsBoolean(result));
2236}
2237
2238int32_t JIT_OPERATION operationSizeFrameForForwardArguments(ExecState* exec, EncodedJSValue, int32_t numUsedStackSlots, int32_t)
2239{
2240 VM& vm = exec->vm();
2241 NativeCallFrameTracer tracer(&vm, exec);
2242 return sizeFrameForForwardArguments(exec, vm, numUsedStackSlots);
2243}
2244
2245int32_t JIT_OPERATION operationSizeFrameForVarargs(ExecState* exec, EncodedJSValue encodedArguments, int32_t numUsedStackSlots, int32_t firstVarArgOffset)
2246{
2247 VM& vm = exec->vm();
2248 NativeCallFrameTracer tracer(&vm, exec);
2249 JSValue arguments = JSValue::decode(encodedArguments);
2250 return sizeFrameForVarargs(exec, vm, arguments, numUsedStackSlots, firstVarArgOffset);
2251}
2252
2253CallFrame* JIT_OPERATION operationSetupForwardArgumentsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue, int32_t, int32_t length)
2254{
2255 VM& vm = exec->vm();
2256 NativeCallFrameTracer tracer(&vm, exec);
2257 setupForwardArgumentsFrame(exec, newCallFrame, length);
2258 return newCallFrame;
2259}
2260
2261CallFrame* JIT_OPERATION operationSetupVarargsFrame(ExecState* exec, CallFrame* newCallFrame, EncodedJSValue encodedArguments, int32_t firstVarArgOffset, int32_t length)
2262{
2263 VM& vm = exec->vm();
2264 NativeCallFrameTracer tracer(&vm, exec);
2265 JSValue arguments = JSValue::decode(encodedArguments);
2266 setupVarargsFrame(exec, newCallFrame, arguments, firstVarArgOffset, length);
2267 return newCallFrame;
2268}
2269
2270char* JIT_OPERATION operationSwitchCharWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2271{
2272 VM& vm = exec->vm();
2273 NativeCallFrameTracer tracer(&vm, exec);
2274 JSValue key = JSValue::decode(encodedKey);
2275 CodeBlock* codeBlock = exec->codeBlock();
2276
2277 SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2278 void* result = jumpTable.ctiDefault.executableAddress();
2279
2280 if (key.isString()) {
2281 StringImpl* value = asString(key)->value(exec).impl();
2282 if (value->length() == 1)
2283 result = jumpTable.ctiForValue((*value)[0]).executableAddress();
2284 }
2285
2286 assertIsTaggedWith(result, JSSwitchPtrTag);
2287 return reinterpret_cast<char*>(result);
2288}
2289
2290char* JIT_OPERATION operationSwitchImmWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2291{
2292 VM& vm = exec->vm();
2293 NativeCallFrameTracer tracer(&vm, exec);
2294 JSValue key = JSValue::decode(encodedKey);
2295 CodeBlock* codeBlock = exec->codeBlock();
2296
2297 SimpleJumpTable& jumpTable = codeBlock->switchJumpTable(tableIndex);
2298 void* result;
2299 if (key.isInt32())
2300 result = jumpTable.ctiForValue(key.asInt32()).executableAddress();
2301 else if (key.isDouble() && key.asDouble() == static_cast<int32_t>(key.asDouble()))
2302 result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
2303 else
2304 result = jumpTable.ctiDefault.executableAddress();
2305 assertIsTaggedWith(result, JSSwitchPtrTag);
2306 return reinterpret_cast<char*>(result);
2307}
2308
2309char* JIT_OPERATION operationSwitchStringWithUnknownKeyType(ExecState* exec, EncodedJSValue encodedKey, size_t tableIndex)
2310{
2311 VM& vm = exec->vm();
2312 NativeCallFrameTracer tracer(&vm, exec);
2313 JSValue key = JSValue::decode(encodedKey);
2314 CodeBlock* codeBlock = exec->codeBlock();
2315
2316 void* result;
2317 StringJumpTable& jumpTable = codeBlock->stringSwitchJumpTable(tableIndex);
2318
2319 if (key.isString()) {
2320 StringImpl* value = asString(key)->value(exec).impl();
2321 result = jumpTable.ctiForValue(value).executableAddress();
2322 } else
2323 result = jumpTable.ctiDefault.executableAddress();
2324
2325 assertIsTaggedWith(result, JSSwitchPtrTag);
2326 return reinterpret_cast<char*>(result);
2327}
2328
2329EncodedJSValue JIT_OPERATION operationGetFromScope(ExecState* exec, const Instruction* pc)
2330{
2331 VM& vm = exec->vm();
2332 NativeCallFrameTracer tracer(&vm, exec);
2333 auto throwScope = DECLARE_THROW_SCOPE(vm);
2334
2335 CodeBlock* codeBlock = exec->codeBlock();
2336
2337 auto bytecode = pc->as<OpGetFromScope>();
2338 const Identifier& ident = codeBlock->identifier(bytecode.m_var);
2339 JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(bytecode.m_scope.offset()).jsValue());
2340 GetPutInfo& getPutInfo = bytecode.metadata(codeBlock).m_getPutInfo;
2341
2342 // ModuleVar is always converted to ClosureVar for get_from_scope.
2343 ASSERT(getPutInfo.resolveType() != ModuleVar);
2344
2345 RELEASE_AND_RETURN(throwScope, JSValue::encode(scope->getPropertySlot(exec, ident, [&] (bool found, PropertySlot& slot) -> JSValue {
2346 if (!found) {
2347 if (getPutInfo.resolveMode() == ThrowIfNotFound)
2348 throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
2349 return jsUndefined();
2350 }
2351
2352 JSValue result = JSValue();
2353 if (scope->isGlobalLexicalEnvironment()) {
2354 // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2355 result = slot.getValue(exec, ident);
2356 if (result == jsTDZValue()) {
2357 throwException(exec, throwScope, createTDZError(exec));
2358 return jsUndefined();
2359 }
2360 }
2361
2362 CommonSlowPaths::tryCacheGetFromScopeGlobal(exec, vm, bytecode, scope, slot, ident);
2363
2364 if (!result)
2365 return slot.getValue(exec, ident);
2366 return result;
2367 })));
2368}
2369
2370void JIT_OPERATION operationPutToScope(ExecState* exec, const Instruction* pc)
2371{
2372 VM& vm = exec->vm();
2373 NativeCallFrameTracer tracer(&vm, exec);
2374 auto throwScope = DECLARE_THROW_SCOPE(vm);
2375
2376 CodeBlock* codeBlock = exec->codeBlock();
2377 auto bytecode = pc->as<OpPutToScope>();
2378 auto& metadata = bytecode.metadata(codeBlock);
2379
2380 const Identifier& ident = codeBlock->identifier(bytecode.m_var);
2381 JSObject* scope = jsCast<JSObject*>(exec->uncheckedR(bytecode.m_scope.offset()).jsValue());
2382 JSValue value = exec->r(bytecode.m_value.offset()).jsValue();
2383 GetPutInfo& getPutInfo = metadata.m_getPutInfo;
2384
2385 // ModuleVar does not keep the scope register value alive in DFG.
2386 ASSERT(getPutInfo.resolveType() != ModuleVar);
2387
2388 if (getPutInfo.resolveType() == LocalClosureVar) {
2389 JSLexicalEnvironment* environment = jsCast<JSLexicalEnvironment*>(scope);
2390 environment->variableAt(ScopeOffset(metadata.m_operand)).set(vm, environment, value);
2391 if (WatchpointSet* set = metadata.m_watchpointSet)
2392 set->touch(vm, "Executed op_put_scope<LocalClosureVar>");
2393 return;
2394 }
2395
2396 bool hasProperty = scope->hasProperty(exec, ident);
2397 RETURN_IF_EXCEPTION(throwScope, void());
2398 if (hasProperty
2399 && scope->isGlobalLexicalEnvironment()
2400 && !isInitialization(getPutInfo.initializationMode())) {
2401 // When we can't statically prove we need a TDZ check, we must perform the check on the slow path.
2402 PropertySlot slot(scope, PropertySlot::InternalMethodType::Get);
2403 JSGlobalLexicalEnvironment::getOwnPropertySlot(scope, exec, ident, slot);
2404 if (slot.getValue(exec, ident) == jsTDZValue()) {
2405 throwException(exec, throwScope, createTDZError(exec));
2406 return;
2407 }
2408 }
2409
2410 if (getPutInfo.resolveMode() == ThrowIfNotFound && !hasProperty) {
2411 throwException(exec, throwScope, createUndefinedVariableError(exec, ident));
2412 return;
2413 }
2414
2415 PutPropertySlot slot(scope, codeBlock->isStrictMode(), PutPropertySlot::UnknownContext, isInitialization(getPutInfo.initializationMode()));
2416 scope->methodTable(vm)->put(scope, exec, ident, value, slot);
2417
2418 RETURN_IF_EXCEPTION(throwScope, void());
2419
2420 CommonSlowPaths::tryCachePutToScopeGlobal(exec, codeBlock, bytecode, scope, slot, ident);
2421}
2422
2423void JIT_OPERATION operationThrow(ExecState* exec, EncodedJSValue encodedExceptionValue)
2424{
2425 VM* vm = &exec->vm();
2426 NativeCallFrameTracer tracer(vm, exec);
2427 auto scope = DECLARE_THROW_SCOPE(*vm);
2428
2429 JSValue exceptionValue = JSValue::decode(encodedExceptionValue);
2430 throwException(exec, scope, exceptionValue);
2431
2432 // Results stored out-of-band in vm.targetMachinePCForThrow & vm.callFrameForCatch
2433 genericUnwind(vm, exec);
2434}
2435
2436char* JIT_OPERATION operationReallocateButterflyToHavePropertyStorageWithInitialCapacity(ExecState* exec, JSObject* object)
2437{
2438 VM& vm = exec->vm();
2439 NativeCallFrameTracer tracer(&vm, exec);
2440
2441 ASSERT(!object->structure(vm)->outOfLineCapacity());
2442 Butterfly* result = object->allocateMoreOutOfLineStorage(vm, 0, initialOutOfLineCapacity);
2443 object->nukeStructureAndSetButterfly(vm, object->structureID(), result);
2444 return reinterpret_cast<char*>(result);
2445}
2446
2447char* JIT_OPERATION operationReallocateButterflyToGrowPropertyStorage(ExecState* exec, JSObject* object, size_t newSize)
2448{
2449 VM& vm = exec->vm();
2450 NativeCallFrameTracer tracer(&vm, exec);
2451
2452 Butterfly* result = object->allocateMoreOutOfLineStorage(vm, object->structure(vm)->outOfLineCapacity(), newSize);
2453 object->nukeStructureAndSetButterfly(vm, object->structureID(), result);
2454 return reinterpret_cast<char*>(result);
2455}
2456
2457void JIT_OPERATION operationOSRWriteBarrier(ExecState* exec, JSCell* cell)
2458{
2459 VM* vm = &exec->vm();
2460 NativeCallFrameTracer tracer(vm, exec);
2461 vm->heap.writeBarrier(cell);
2462}
2463
2464void JIT_OPERATION operationWriteBarrierSlowPath(ExecState* exec, JSCell* cell)
2465{
2466 VM* vm = &exec->vm();
2467 NativeCallFrameTracer tracer(vm, exec);
2468 vm->heap.writeBarrierSlowPath(cell);
2469}
2470
2471void JIT_OPERATION lookupExceptionHandler(VM* vm, ExecState* exec)
2472{
2473 NativeCallFrameTracer tracer(vm, exec);
2474 genericUnwind(vm, exec);
2475 ASSERT(vm->targetMachinePCForThrow);
2476}
2477
2478void JIT_OPERATION lookupExceptionHandlerFromCallerFrame(VM* vm, ExecState* exec)
2479{
2480 ASSERT(exec->isStackOverflowFrame());
2481 ASSERT(jsCast<ErrorInstance*>(vm->exceptionForInspection()->value().asCell())->isStackOverflowError());
2482 lookupExceptionHandler(vm, exec);
2483}
2484
2485void JIT_OPERATION operationVMHandleException(ExecState* exec)
2486{
2487 VM* vm = &exec->vm();
2488 NativeCallFrameTracer tracer(vm, exec);
2489 genericUnwind(vm, exec);
2490}
2491
2492// This function "should" just take the ExecState*, but doing so would make it more difficult
2493// to call from exception check sites. So, unlike all of our other functions, we allow
2494// ourselves to play some gnarly ABI tricks just to simplify the calling convention. This is
2495// particularly safe here since this is never called on the critical path - it's only for
2496// testing.
2497void JIT_OPERATION operationExceptionFuzz(ExecState* exec)
2498{
2499 VM* vm = &exec->vm();
2500 NativeCallFrameTracer tracer(vm, exec);
2501 auto scope = DECLARE_THROW_SCOPE(*vm);
2502 UNUSED_PARAM(scope);
2503#if COMPILER(GCC_COMPATIBLE)
2504 void* returnPC = __builtin_return_address(0);
2505 doExceptionFuzzing(exec, scope, "JITOperations", returnPC);
2506#endif // COMPILER(GCC_COMPATIBLE)
2507}
2508
2509ALWAYS_INLINE static EncodedJSValue unprofiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2510{
2511 VM* vm = &exec->vm();
2512 NativeCallFrameTracer tracer(vm, exec);
2513
2514 JSValue op1 = JSValue::decode(encodedOp1);
2515 JSValue op2 = JSValue::decode(encodedOp2);
2516
2517 return JSValue::encode(jsAdd(exec, op1, op2));
2518}
2519
2520ALWAYS_INLINE static EncodedJSValue profiledAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile)
2521{
2522 VM* vm = &exec->vm();
2523 NativeCallFrameTracer tracer(vm, exec);
2524
2525 JSValue op1 = JSValue::decode(encodedOp1);
2526 JSValue op2 = JSValue::decode(encodedOp2);
2527
2528 arithProfile.observeLHSAndRHS(op1, op2);
2529 JSValue result = jsAdd(exec, op1, op2);
2530 arithProfile.observeResult(result);
2531
2532 return JSValue::encode(result);
2533}
2534
2535EncodedJSValue JIT_OPERATION operationValueAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2536{
2537 return unprofiledAdd(exec, encodedOp1, encodedOp2);
2538}
2539
2540EncodedJSValue JIT_OPERATION operationValueAddProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2541{
2542 ASSERT(arithProfile);
2543 return profiledAdd(exec, encodedOp1, encodedOp2, *arithProfile);
2544}
2545
2546EncodedJSValue JIT_OPERATION operationValueAddProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2547{
2548 VM* vm = &exec->vm();
2549 NativeCallFrameTracer tracer(vm, exec);
2550
2551 JSValue op1 = JSValue::decode(encodedOp1);
2552 JSValue op2 = JSValue::decode(encodedOp2);
2553
2554 ArithProfile* arithProfile = addIC->arithProfile();
2555 ASSERT(arithProfile);
2556 arithProfile->observeLHSAndRHS(op1, op2);
2557 auto nonOptimizeVariant = operationValueAddProfiledNoOptimize;
2558 addIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2559
2560#if ENABLE(MATH_IC_STATS)
2561 exec->codeBlock()->dumpMathICStats();
2562#endif
2563
2564 JSValue result = jsAdd(exec, op1, op2);
2565 arithProfile->observeResult(result);
2566
2567 return JSValue::encode(result);
2568}
2569
2570EncodedJSValue JIT_OPERATION operationValueAddProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2571{
2572 VM* vm = &exec->vm();
2573 NativeCallFrameTracer tracer(vm, exec);
2574
2575 ArithProfile* arithProfile = addIC->arithProfile();
2576 ASSERT(arithProfile);
2577 return profiledAdd(exec, encodedOp1, encodedOp2, *arithProfile);
2578}
2579
2580EncodedJSValue JIT_OPERATION operationValueAddOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC* addIC)
2581{
2582 VM* vm = &exec->vm();
2583 NativeCallFrameTracer tracer(vm, exec);
2584
2585 JSValue op1 = JSValue::decode(encodedOp1);
2586 JSValue op2 = JSValue::decode(encodedOp2);
2587
2588 auto nonOptimizeVariant = operationValueAddNoOptimize;
2589 if (ArithProfile* arithProfile = addIC->arithProfile())
2590 arithProfile->observeLHSAndRHS(op1, op2);
2591 addIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2592
2593#if ENABLE(MATH_IC_STATS)
2594 exec->codeBlock()->dumpMathICStats();
2595#endif
2596
2597 return JSValue::encode(jsAdd(exec, op1, op2));
2598}
2599
2600EncodedJSValue JIT_OPERATION operationValueAddNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITAddIC*)
2601{
2602 VM* vm = &exec->vm();
2603 NativeCallFrameTracer tracer(vm, exec);
2604
2605 JSValue op1 = JSValue::decode(encodedOp1);
2606 JSValue op2 = JSValue::decode(encodedOp2);
2607
2608 JSValue result = jsAdd(exec, op1, op2);
2609
2610 return JSValue::encode(result);
2611}
2612
2613ALWAYS_INLINE static EncodedJSValue unprofiledMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2614{
2615 JSValue op1 = JSValue::decode(encodedOp1);
2616 JSValue op2 = JSValue::decode(encodedOp2);
2617
2618 return JSValue::encode(jsMul(exec, op1, op2));
2619}
2620
2621ALWAYS_INLINE static EncodedJSValue profiledMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2622{
2623 VM& vm = exec->vm();
2624 auto scope = DECLARE_THROW_SCOPE(vm);
2625 JSValue op1 = JSValue::decode(encodedOp1);
2626 JSValue op2 = JSValue::decode(encodedOp2);
2627
2628 if (shouldObserveLHSAndRHSTypes)
2629 arithProfile.observeLHSAndRHS(op1, op2);
2630
2631 JSValue result = jsMul(exec, op1, op2);
2632 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2633 arithProfile.observeResult(result);
2634 return JSValue::encode(result);
2635}
2636
2637EncodedJSValue JIT_OPERATION operationValueMul(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2638{
2639 VM* vm = &exec->vm();
2640 NativeCallFrameTracer tracer(vm, exec);
2641
2642 return unprofiledMul(exec, encodedOp1, encodedOp2);
2643}
2644
2645EncodedJSValue JIT_OPERATION operationValueMulNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC*)
2646{
2647 VM* vm = &exec->vm();
2648 NativeCallFrameTracer tracer(vm, exec);
2649
2650 return unprofiledMul(exec, encodedOp1, encodedOp2);
2651}
2652
2653EncodedJSValue JIT_OPERATION operationValueMulOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2654{
2655 VM* vm = &exec->vm();
2656 NativeCallFrameTracer tracer(vm, exec);
2657
2658 auto nonOptimizeVariant = operationValueMulNoOptimize;
2659 if (ArithProfile* arithProfile = mulIC->arithProfile())
2660 arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2661 mulIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2662
2663#if ENABLE(MATH_IC_STATS)
2664 exec->codeBlock()->dumpMathICStats();
2665#endif
2666
2667 return unprofiledMul(exec, encodedOp1, encodedOp2);
2668}
2669
2670EncodedJSValue JIT_OPERATION operationValueMulProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2671{
2672 VM* vm = &exec->vm();
2673 NativeCallFrameTracer tracer(vm, exec);
2674
2675 ASSERT(arithProfile);
2676 return profiledMul(exec, encodedOp1, encodedOp2, *arithProfile);
2677}
2678
2679EncodedJSValue JIT_OPERATION operationValueMulProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2680{
2681 VM* vm = &exec->vm();
2682 NativeCallFrameTracer tracer(vm, exec);
2683
2684 ArithProfile* arithProfile = mulIC->arithProfile();
2685 ASSERT(arithProfile);
2686 arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2687 auto nonOptimizeVariant = operationValueMulProfiledNoOptimize;
2688 mulIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2689
2690#if ENABLE(MATH_IC_STATS)
2691 exec->codeBlock()->dumpMathICStats();
2692#endif
2693
2694 return profiledMul(exec, encodedOp1, encodedOp2, *arithProfile, false);
2695}
2696
2697EncodedJSValue JIT_OPERATION operationValueMulProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITMulIC* mulIC)
2698{
2699 VM* vm = &exec->vm();
2700 NativeCallFrameTracer tracer(vm, exec);
2701
2702 ArithProfile* arithProfile = mulIC->arithProfile();
2703 ASSERT(arithProfile);
2704 return profiledMul(exec, encodedOp1, encodedOp2, *arithProfile);
2705}
2706
2707ALWAYS_INLINE static EncodedJSValue unprofiledNegate(ExecState* exec, EncodedJSValue encodedOperand)
2708{
2709 VM& vm = exec->vm();
2710 auto scope = DECLARE_THROW_SCOPE(vm);
2711 NativeCallFrameTracer tracer(&vm, exec);
2712
2713 JSValue operand = JSValue::decode(encodedOperand);
2714
2715 JSValue primValue = operand.toPrimitive(exec, PreferNumber);
2716 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2717
2718 if (primValue.isBigInt())
2719 return JSValue::encode(JSBigInt::unaryMinus(vm, asBigInt(primValue)));
2720
2721 double number = primValue.toNumber(exec);
2722 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2723 return JSValue::encode(jsNumber(-number));
2724}
2725
2726ALWAYS_INLINE static EncodedJSValue profiledNegate(ExecState* exec, EncodedJSValue encodedOperand, ArithProfile& arithProfile)
2727{
2728 VM& vm = exec->vm();
2729 auto scope = DECLARE_THROW_SCOPE(vm);
2730 NativeCallFrameTracer tracer(&vm, exec);
2731
2732 JSValue operand = JSValue::decode(encodedOperand);
2733 arithProfile.observeLHS(operand);
2734
2735 JSValue primValue = operand.toPrimitive(exec);
2736 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2737
2738 if (primValue.isBigInt()) {
2739 JSBigInt* result = JSBigInt::unaryMinus(vm, asBigInt(primValue));
2740 arithProfile.observeResult(result);
2741
2742 return JSValue::encode(result);
2743 }
2744
2745 double number = primValue.toNumber(exec);
2746 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2747 JSValue result = jsNumber(-number);
2748 arithProfile.observeResult(result);
2749 return JSValue::encode(result);
2750}
2751
2752EncodedJSValue JIT_OPERATION operationArithNegate(ExecState* exec, EncodedJSValue operand)
2753{
2754 return unprofiledNegate(exec, operand);
2755}
2756
2757EncodedJSValue JIT_OPERATION operationArithNegateProfiled(ExecState* exec, EncodedJSValue operand, ArithProfile* arithProfile)
2758{
2759 ASSERT(arithProfile);
2760 return profiledNegate(exec, operand, *arithProfile);
2761}
2762
2763EncodedJSValue JIT_OPERATION operationArithNegateProfiledOptimize(ExecState* exec, EncodedJSValue encodedOperand, JITNegIC* negIC)
2764{
2765 VM& vm = exec->vm();
2766 auto scope = DECLARE_THROW_SCOPE(vm);
2767 NativeCallFrameTracer tracer(&vm, exec);
2768
2769 JSValue operand = JSValue::decode(encodedOperand);
2770
2771 ArithProfile* arithProfile = negIC->arithProfile();
2772 ASSERT(arithProfile);
2773 arithProfile->observeLHS(operand);
2774 negIC->generateOutOfLine(exec->codeBlock(), operationArithNegateProfiled);
2775
2776#if ENABLE(MATH_IC_STATS)
2777 exec->codeBlock()->dumpMathICStats();
2778#endif
2779
2780 JSValue primValue = operand.toPrimitive(exec);
2781 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2782
2783 if (primValue.isBigInt()) {
2784 JSBigInt* result = JSBigInt::unaryMinus(vm, asBigInt(primValue));
2785 arithProfile->observeResult(result);
2786 return JSValue::encode(result);
2787 }
2788
2789 double number = primValue.toNumber(exec);
2790 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2791 JSValue result = jsNumber(-number);
2792 arithProfile->observeResult(result);
2793 return JSValue::encode(result);
2794}
2795
2796EncodedJSValue JIT_OPERATION operationArithNegateOptimize(ExecState* exec, EncodedJSValue encodedOperand, JITNegIC* negIC)
2797{
2798 VM& vm = exec->vm();
2799 auto scope = DECLARE_THROW_SCOPE(vm);
2800 NativeCallFrameTracer tracer(&vm, exec);
2801
2802 JSValue operand = JSValue::decode(encodedOperand);
2803
2804 if (ArithProfile* arithProfile = negIC->arithProfile())
2805 arithProfile->observeLHS(operand);
2806 negIC->generateOutOfLine(exec->codeBlock(), operationArithNegate);
2807
2808#if ENABLE(MATH_IC_STATS)
2809 exec->codeBlock()->dumpMathICStats();
2810#endif
2811
2812 JSValue primValue = operand.toPrimitive(exec);
2813 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2814
2815 if (primValue.isBigInt())
2816 return JSValue::encode(JSBigInt::unaryMinus(vm, asBigInt(primValue)));
2817
2818 double number = primValue.toNumber(exec);
2819 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2820 return JSValue::encode(jsNumber(-number));
2821}
2822
2823ALWAYS_INLINE static EncodedJSValue unprofiledSub(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2824{
2825 JSValue op1 = JSValue::decode(encodedOp1);
2826 JSValue op2 = JSValue::decode(encodedOp2);
2827
2828 return JSValue::encode(jsSub(exec, op1, op2));
2829}
2830
2831ALWAYS_INLINE static EncodedJSValue profiledSub(VM& vm, ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile& arithProfile, bool shouldObserveLHSAndRHSTypes = true)
2832{
2833 auto scope = DECLARE_THROW_SCOPE(vm);
2834
2835 JSValue op1 = JSValue::decode(encodedOp1);
2836 JSValue op2 = JSValue::decode(encodedOp2);
2837
2838 if (shouldObserveLHSAndRHSTypes)
2839 arithProfile.observeLHSAndRHS(op1, op2);
2840
2841 JSValue result = jsSub(exec, op1, op2);
2842 RETURN_IF_EXCEPTION(scope, encodedJSValue());
2843 arithProfile.observeResult(result);
2844 return JSValue::encode(result);
2845}
2846
2847EncodedJSValue JIT_OPERATION operationValueSub(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
2848{
2849 VM* vm = &exec->vm();
2850 NativeCallFrameTracer tracer(vm, exec);
2851 return unprofiledSub(exec, encodedOp1, encodedOp2);
2852}
2853
2854EncodedJSValue JIT_OPERATION operationValueSubProfiled(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, ArithProfile* arithProfile)
2855{
2856 ASSERT(arithProfile);
2857
2858 VM* vm = &exec->vm();
2859 NativeCallFrameTracer tracer(vm, exec);
2860
2861 return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2862}
2863
2864EncodedJSValue JIT_OPERATION operationValueSubOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2865{
2866 VM* vm = &exec->vm();
2867 NativeCallFrameTracer tracer(vm, exec);
2868
2869 auto nonOptimizeVariant = operationValueSubNoOptimize;
2870 if (ArithProfile* arithProfile = subIC->arithProfile())
2871 arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2872 subIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2873
2874#if ENABLE(MATH_IC_STATS)
2875 exec->codeBlock()->dumpMathICStats();
2876#endif
2877
2878 return unprofiledSub(exec, encodedOp1, encodedOp2);
2879}
2880
2881EncodedJSValue JIT_OPERATION operationValueSubNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC*)
2882{
2883 VM* vm = &exec->vm();
2884 NativeCallFrameTracer tracer(vm, exec);
2885
2886 return unprofiledSub(exec, encodedOp1, encodedOp2);
2887}
2888
2889EncodedJSValue JIT_OPERATION operationValueSubProfiledOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2890{
2891 VM* vm = &exec->vm();
2892 NativeCallFrameTracer tracer(vm, exec);
2893
2894 ArithProfile* arithProfile = subIC->arithProfile();
2895 ASSERT(arithProfile);
2896 arithProfile->observeLHSAndRHS(JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
2897 auto nonOptimizeVariant = operationValueSubProfiledNoOptimize;
2898 subIC->generateOutOfLine(exec->codeBlock(), nonOptimizeVariant);
2899
2900#if ENABLE(MATH_IC_STATS)
2901 exec->codeBlock()->dumpMathICStats();
2902#endif
2903
2904 return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile, false);
2905}
2906
2907EncodedJSValue JIT_OPERATION operationValueSubProfiledNoOptimize(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2, JITSubIC* subIC)
2908{
2909 VM* vm = &exec->vm();
2910 NativeCallFrameTracer tracer(vm, exec);
2911
2912 ArithProfile* arithProfile = subIC->arithProfile();
2913 ASSERT(arithProfile);
2914 return profiledSub(*vm, exec, encodedOp1, encodedOp2, *arithProfile);
2915}
2916
2917void JIT_OPERATION operationProcessTypeProfilerLog(ExecState* exec)
2918{
2919 VM& vm = exec->vm();
2920 NativeCallFrameTracer tracer(&vm, exec);
2921 vm.typeProfilerLog()->processLogEntries(vm, "Log Full, called from inside baseline JIT"_s);
2922}
2923
2924void JIT_OPERATION operationProcessShadowChickenLog(ExecState* exec)
2925{
2926 VM& vm = exec->vm();
2927 NativeCallFrameTracer tracer(&vm, exec);
2928 RELEASE_ASSERT(vm.shadowChicken());
2929 vm.shadowChicken()->update(vm, exec);
2930}
2931
2932int32_t JIT_OPERATION operationCheckIfExceptionIsUncatchableAndNotifyProfiler(ExecState* exec)
2933{
2934 VM& vm = exec->vm();
2935 NativeCallFrameTracer tracer(&vm, exec);
2936 auto scope = DECLARE_THROW_SCOPE(vm);
2937 RELEASE_ASSERT(!!scope.exception());
2938
2939 if (isTerminatedExecutionException(vm, scope.exception())) {
2940 genericUnwind(&vm, exec);
2941 return 1;
2942 }
2943 return 0;
2944}
2945
2946} // extern "C"
2947
2948} // namespace JSC
2949
2950#endif // ENABLE(JIT)
2951