1/*
2 * Copyright (C) 2008-2019 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27
28#if ENABLE(JIT)
29#if USE(JSVALUE32_64)
30#include "JIT.h"
31
32#include "CodeBlock.h"
33#include "DirectArguments.h"
34#include "GCAwareJITStubRoutine.h"
35#include "InterpreterInlines.h"
36#include "JITInlines.h"
37#include "JSArray.h"
38#include "JSFunction.h"
39#include "JSLexicalEnvironment.h"
40#include "LinkBuffer.h"
41#include "OpcodeInlines.h"
42#include "ResultType.h"
43#include "SlowPathCall.h"
44#include "StructureStubInfo.h"
45#include <wtf/StringPrintStream.h>
46
47
48namespace JSC {
49
50void JIT::emit_op_put_getter_by_id(const Instruction* currentInstruction)
51{
52 auto bytecode = currentInstruction->as<OpPutGetterById>();
53 int base = bytecode.m_base.offset();
54 int property = bytecode.m_property;
55 int options = bytecode.m_attributes;
56 int getter = bytecode.m_accessor.offset();
57
58 emitLoadPayload(base, regT1);
59 emitLoadPayload(getter, regT3);
60 callOperation(operationPutGetterById, m_codeBlock->globalObject(), regT1, m_codeBlock->identifier(property).impl(), options, regT3);
61}
62
63void JIT::emit_op_put_setter_by_id(const Instruction* currentInstruction)
64{
65 auto bytecode = currentInstruction->as<OpPutSetterById>();
66 int base = bytecode.m_base.offset();
67 int property = bytecode.m_property;
68 int options = bytecode.m_attributes;
69 int setter = bytecode.m_accessor.offset();
70
71 emitLoadPayload(base, regT1);
72 emitLoadPayload(setter, regT3);
73 callOperation(operationPutSetterById, m_codeBlock->globalObject(), regT1, m_codeBlock->identifier(property).impl(), options, regT3);
74}
75
76void JIT::emit_op_put_getter_setter_by_id(const Instruction* currentInstruction)
77{
78 auto bytecode = currentInstruction->as<OpPutGetterSetterById>();
79 int base = bytecode.m_base.offset();
80 int property = bytecode.m_property;
81 int attributes = bytecode.m_attributes;
82 int getter = bytecode.m_getter.offset();
83 int setter = bytecode.m_setter.offset();
84
85 emitLoadPayload(base, regT1);
86 emitLoadPayload(getter, regT3);
87 emitLoadPayload(setter, regT4);
88 callOperation(operationPutGetterSetter, m_codeBlock->globalObject(), regT1, m_codeBlock->identifier(property).impl(), attributes, regT3, regT4);
89}
90
91void JIT::emit_op_put_getter_by_val(const Instruction* currentInstruction)
92{
93 auto bytecode = currentInstruction->as<OpPutGetterByVal>();
94 int base = bytecode.m_base.offset();
95 int property = bytecode.m_property.offset();
96 int32_t attributes = bytecode.m_attributes;
97 int getter = bytecode.m_accessor.offset();
98
99 emitLoadPayload(base, regT2);
100 emitLoad(property, regT1, regT0);
101 emitLoadPayload(getter, regT3);
102 callOperation(operationPutGetterByVal, m_codeBlock->globalObject(), regT2, JSValueRegs(regT1, regT0), attributes, regT3);
103}
104
105void JIT::emit_op_put_setter_by_val(const Instruction* currentInstruction)
106{
107 auto bytecode = currentInstruction->as<OpPutSetterByVal>();
108 int base = bytecode.m_base.offset();
109 int property = bytecode.m_property.offset();
110 int32_t attributes = bytecode.m_attributes;
111 int setter = bytecode.m_accessor.offset();
112
113 emitLoadPayload(base, regT2);
114 emitLoad(property, regT1, regT0);
115 emitLoadPayload(setter, regT3);
116 callOperation(operationPutSetterByVal, m_codeBlock->globalObject(), regT2, JSValueRegs(regT1, regT0), attributes, regT3);
117}
118
119void JIT::emit_op_del_by_id(const Instruction* currentInstruction)
120{
121 auto bytecode = currentInstruction->as<OpDelById>();
122 int dst = bytecode.m_dst.offset();
123 int base = bytecode.m_base.offset();
124 int property = bytecode.m_property;
125 emitLoad(base, regT1, regT0);
126 callOperation(operationDeleteByIdJSResult, dst, m_codeBlock->globalObject(), JSValueRegs(regT1, regT0), m_codeBlock->identifier(property).impl());
127}
128
129void JIT::emit_op_del_by_val(const Instruction* currentInstruction)
130{
131 auto bytecode = currentInstruction->as<OpDelByVal>();
132 int dst = bytecode.m_dst.offset();
133 int base = bytecode.m_base.offset();
134 int property = bytecode.m_property.offset();
135 emitLoad2(base, regT1, regT0, property, regT3, regT2);
136 callOperation(operationDeleteByValJSResult, dst, m_codeBlock->globalObject(), JSValueRegs(regT1, regT0), JSValueRegs(regT3, regT2));
137}
138
139void JIT::emit_op_get_by_val(const Instruction* currentInstruction)
140{
141 // FIXME: Implement IC here:
142 // https://bugs.webkit.org/show_bug.cgi?id=204082
143 auto bytecode = currentInstruction->as<OpGetByVal>();
144 auto& metadata = bytecode.metadata(m_codeBlock);
145 int dst = bytecode.m_dst.offset();
146 int base = bytecode.m_base.offset();
147 int property = bytecode.m_property.offset();
148 ArrayProfile* profile = &metadata.m_arrayProfile;
149
150 emitLoad2(base, regT1, regT0, property, regT3, regT2);
151
152 emitJumpSlowCaseIfNotJSCell(base, regT1);
153 emitArrayProfilingSiteWithCell(regT0, regT4, profile);
154
155 JITGetByValGenerator gen(
156 m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(m_bytecodeIndex), RegisterSet::stubUnavailableRegisters(),
157 JSValueRegs::payloadOnly(regT0), JSValueRegs(regT3, regT2), JSValueRegs(regT1, regT0));
158 gen.generateFastPath(*this);
159 addSlowCase(gen.slowPathJump());
160 m_getByVals.append(gen);
161
162 emitValueProfilingSite(bytecode.metadata(m_codeBlock));
163 emitStore(dst, regT1, regT0);
164}
165
166void JIT::emitSlow_op_get_by_val(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
167{
168 auto bytecode = currentInstruction->as<OpGetByVal>();
169 int dst = bytecode.m_dst.offset();
170 auto& metadata = bytecode.metadata(m_codeBlock);
171 ArrayProfile* profile = &metadata.m_arrayProfile;
172
173 JITGetByValGenerator& gen = m_getByVals[m_getByValIndex];
174 ++m_getByValIndex;
175
176 linkAllSlowCases(iter);
177
178 Label coldPathBegin = label();
179 Call call = callOperationWithProfile(bytecode.metadata(m_codeBlock), operationGetByValGeneric, dst, TrustedImmPtr(m_codeBlock->globalObject()), gen.stubInfo(), profile, JSValueRegs(regT1, regT0), JSValueRegs(regT3, regT2));
180 gen.reportSlowPathCall(coldPathBegin, call);
181}
182
183void JIT::emit_op_put_by_val_direct(const Instruction* currentInstruction)
184{
185 emit_op_put_by_val<OpPutByValDirect>(currentInstruction);
186}
187
188template<typename Op>
189void JIT::emit_op_put_by_val(const Instruction* currentInstruction)
190{
191 auto bytecode = currentInstruction->as<Op>();
192 auto& metadata = bytecode.metadata(m_codeBlock);
193 int base = bytecode.m_base.offset();
194 int property = bytecode.m_property.offset();
195 ArrayProfile* profile = &metadata.m_arrayProfile;
196 ByValInfo* byValInfo = m_codeBlock->addByValInfo();
197
198 emitLoad2(base, regT1, regT0, property, regT3, regT2);
199
200 emitJumpSlowCaseIfNotJSCell(base, regT1);
201 PatchableJump notIndex = patchableBranch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag));
202 addSlowCase(notIndex);
203 emitArrayProfilingSiteWithCell(regT0, regT1, profile);
204
205 PatchableJump badType;
206 JumpList slowCases;
207
208 // FIXME: Maybe we should do this inline?
209 addSlowCase(branchTest32(NonZero, regT1, TrustedImm32(CopyOnWrite)));
210 and32(TrustedImm32(IndexingShapeMask), regT1);
211
212 JITArrayMode mode = chooseArrayMode(profile);
213 switch (mode) {
214 case JITInt32:
215 slowCases = emitInt32PutByVal(bytecode, badType);
216 break;
217 case JITDouble:
218 slowCases = emitDoublePutByVal(bytecode, badType);
219 break;
220 case JITContiguous:
221 slowCases = emitContiguousPutByVal(bytecode, badType);
222 break;
223 case JITArrayStorage:
224 slowCases = emitArrayStoragePutByVal(bytecode, badType);
225 break;
226 default:
227 CRASH();
228 break;
229 }
230
231 addSlowCase(badType);
232 addSlowCase(slowCases);
233
234 Label done = label();
235
236 m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeIndex, notIndex, badType, mode, profile, done, done));
237}
238
239template <typename Op>
240JIT::JumpList JIT::emitGenericContiguousPutByVal(Op bytecode, PatchableJump& badType, IndexingType indexingShape)
241{
242 auto& metadata = bytecode.metadata(m_codeBlock);
243 int base = bytecode.m_base.offset();
244 int value = bytecode.m_value.offset();
245 ArrayProfile* profile = &metadata.m_arrayProfile;
246
247 JumpList slowCases;
248
249 badType = patchableBranch32(NotEqual, regT1, TrustedImm32(ContiguousShape));
250
251 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
252 Jump outOfBounds = branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfPublicLength()));
253
254 Label storeResult = label();
255 emitLoad(value, regT1, regT0);
256 switch (indexingShape) {
257 case Int32Shape:
258 slowCases.append(branchIfNotInt32(regT1));
259 store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
260 store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
261 break;
262 case ContiguousShape:
263 store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
264 store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
265 emitLoad(base, regT2, regT3);
266 emitWriteBarrier(base, value, ShouldFilterValue);
267 break;
268 case DoubleShape: {
269 Jump notInt = branchIfNotInt32(regT1);
270 convertInt32ToDouble(regT0, fpRegT0);
271 Jump ready = jump();
272 notInt.link(this);
273 moveIntsToDouble(regT0, regT1, fpRegT0, fpRegT1);
274 slowCases.append(branchIfNaN(fpRegT0));
275 ready.link(this);
276 storeDouble(fpRegT0, BaseIndex(regT3, regT2, TimesEight));
277 break;
278 }
279 default:
280 CRASH();
281 break;
282 }
283
284 Jump done = jump();
285
286 outOfBounds.link(this);
287 slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfVectorLength())));
288
289 emitArrayProfileStoreToHoleSpecialCase(profile);
290
291 add32(TrustedImm32(1), regT2, regT1);
292 store32(regT1, Address(regT3, Butterfly::offsetOfPublicLength()));
293 jump().linkTo(storeResult, this);
294
295 done.link(this);
296
297 return slowCases;
298}
299
300template <typename Op>
301JIT::JumpList JIT::emitArrayStoragePutByVal(Op bytecode, PatchableJump& badType)
302{
303 auto& metadata = bytecode.metadata(m_codeBlock);
304 int base = bytecode.m_base.offset();
305 int value = bytecode.m_value.offset();
306 ArrayProfile* profile = &metadata.m_arrayProfile;
307
308 JumpList slowCases;
309
310 badType = patchableBranch32(NotEqual, regT1, TrustedImm32(ArrayStorageShape));
311
312 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
313 slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, ArrayStorage::vectorLengthOffset())));
314
315 Jump empty = branch32(Equal, BaseIndex(regT3, regT2, TimesEight, ArrayStorage::vectorOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
316
317 Label storeResult(this);
318 emitLoad(value, regT1, regT0);
319 store32(regT0, BaseIndex(regT3, regT2, TimesEight, ArrayStorage::vectorOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload))); // payload
320 store32(regT1, BaseIndex(regT3, regT2, TimesEight, ArrayStorage::vectorOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag))); // tag
321 Jump end = jump();
322
323 empty.link(this);
324 emitArrayProfileStoreToHoleSpecialCase(profile);
325 add32(TrustedImm32(1), Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector)));
326 branch32(Below, regT2, Address(regT3, ArrayStorage::lengthOffset())).linkTo(storeResult, this);
327
328 add32(TrustedImm32(1), regT2, regT0);
329 store32(regT0, Address(regT3, ArrayStorage::lengthOffset()));
330 jump().linkTo(storeResult, this);
331
332 end.link(this);
333
334 emitWriteBarrier(base, value, ShouldFilterValue);
335
336 return slowCases;
337}
338
339template <typename Op>
340JITPutByIdGenerator JIT::emitPutByValWithCachedId(ByValInfo* byValInfo, Op bytecode, PutKind putKind, const Identifier& propertyName, JumpList& doneCases, JumpList& slowCases)
341{
342 // base: tag(regT1), payload(regT0)
343 // property: tag(regT3), payload(regT2)
344
345 int base = bytecode.m_base.offset();
346 int value = bytecode.m_value.offset();
347
348 slowCases.append(branchIfNotCell(regT3));
349 emitByValIdentifierCheck(byValInfo, regT2, regT2, propertyName, slowCases);
350
351 // Write barrier breaks the registers. So after issuing the write barrier,
352 // reload the registers.
353 emitWriteBarrier(base, value, ShouldFilterBase);
354 emitLoadPayload(base, regT0);
355 emitLoad(value, regT3, regT2);
356
357 const Instruction* currentInstruction = m_codeBlock->instructions().at(byValInfo->bytecodeIndex).ptr();
358 JITPutByIdGenerator gen(
359 m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(BytecodeIndex(bitwise_cast<uint32_t>(currentInstruction))), RegisterSet::stubUnavailableRegisters(),
360 JSValueRegs::payloadOnly(regT0), JSValueRegs(regT3, regT2), regT1, m_codeBlock->ecmaMode(), putKind);
361 gen.generateFastPath(*this);
362 doneCases.append(jump());
363
364 Label coldPathBegin = label();
365 gen.slowPathJump().link(this);
366
367 // JITPutByIdGenerator only preserve the value and the base's payload, we have to reload the tag.
368 emitLoadTag(base, regT1);
369
370 Call call = callOperation(gen.slowPathFunction(), m_codeBlock->globalObject(), gen.stubInfo(), JSValueRegs(regT3, regT2), JSValueRegs(regT1, regT0), propertyName.impl());
371 gen.reportSlowPathCall(coldPathBegin, call);
372 doneCases.append(jump());
373
374 return gen;
375}
376
377void JIT::emitSlow_op_put_by_val(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
378{
379 bool isDirect = currentInstruction->opcodeID() == op_put_by_val_direct;
380 int base;
381 int property;
382 int value;
383
384 auto load = [&](auto bytecode) {
385 base = bytecode.m_base.offset();
386 property = bytecode.m_property.offset();
387 value = bytecode.m_value.offset();
388 };
389
390 if (isDirect)
391 load(currentInstruction->as<OpPutByValDirect>());
392 else
393 load(currentInstruction->as<OpPutByVal>());
394
395 ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo;
396
397 linkAllSlowCases(iter);
398 Label slowPath = label();
399
400 // The register selection below is chosen to reduce register swapping on ARM.
401 // Swapping shouldn't happen on other platforms.
402 emitLoad(base, regT2, regT1);
403 emitLoad(property, regT3, regT0);
404 emitLoad(value, regT5, regT4);
405 Call call = callOperation(isDirect ? operationDirectPutByValOptimize : operationPutByValOptimize, m_codeBlock->globalObject(), JSValueRegs(regT2, regT1), JSValueRegs(regT3, regT0), JSValueRegs(regT5, regT4), byValInfo);
406
407 m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
408 m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
409 m_byValInstructionIndex++;
410}
411
412void JIT::emit_op_try_get_by_id(const Instruction* currentInstruction)
413{
414 auto bytecode = currentInstruction->as<OpTryGetById>();
415 int dst = bytecode.m_dst.offset();
416 int base = bytecode.m_base.offset();
417 const Identifier* ident = &(m_codeBlock->identifier(bytecode.m_property));
418
419 emitLoad(base, regT1, regT0);
420 emitJumpSlowCaseIfNotJSCell(base, regT1);
421
422 JITGetByIdGenerator gen(
423 m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(BytecodeIndex(bitwise_cast<uint32_t>(currentInstruction))), RegisterSet::stubUnavailableRegisters(),
424 ident->impl(), JSValueRegs::payloadOnly(regT0), JSValueRegs(regT1, regT0), AccessType::TryGetById);
425 gen.generateFastPath(*this);
426 addSlowCase(gen.slowPathJump());
427 m_getByIds.append(gen);
428
429 emitValueProfilingSite(bytecode.metadata(m_codeBlock));
430 emitStore(dst, regT1, regT0);
431}
432
433void JIT::emitSlow_op_try_get_by_id(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
434{
435 linkAllSlowCases(iter);
436
437 auto bytecode = currentInstruction->as<OpTryGetById>();
438 int resultVReg = bytecode.m_dst.offset();
439 const Identifier* ident = &(m_codeBlock->identifier(bytecode.m_property));
440
441 JITGetByIdGenerator& gen = m_getByIds[m_getByIdIndex++];
442
443 Label coldPathBegin = label();
444
445 Call call = callOperation(operationTryGetByIdOptimize, resultVReg, m_codeBlock->globalObject(), gen.stubInfo(), JSValueRegs(regT1, regT0), ident->impl());
446
447 gen.reportSlowPathCall(coldPathBegin, call);
448}
449
450
451void JIT::emit_op_get_by_id_direct(const Instruction* currentInstruction)
452{
453 auto bytecode = currentInstruction->as<OpGetByIdDirect>();
454 int dst = bytecode.m_dst.offset();
455 int base = bytecode.m_base.offset();
456 const Identifier* ident = &(m_codeBlock->identifier(bytecode.m_property));
457
458 emitLoad(base, regT1, regT0);
459 emitJumpSlowCaseIfNotJSCell(base, regT1);
460
461 JITGetByIdGenerator gen(
462 m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(BytecodeIndex(bitwise_cast<uint32_t>(currentInstruction))), RegisterSet::stubUnavailableRegisters(),
463 ident->impl(), JSValueRegs::payloadOnly(regT0), JSValueRegs(regT1, regT0), AccessType::GetByIdDirect);
464 gen.generateFastPath(*this);
465 addSlowCase(gen.slowPathJump());
466 m_getByIds.append(gen);
467
468 emitValueProfilingSite(bytecode.metadata(m_codeBlock));
469 emitStore(dst, regT1, regT0);
470}
471
472void JIT::emitSlow_op_get_by_id_direct(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
473{
474 linkAllSlowCases(iter);
475
476 auto bytecode = currentInstruction->as<OpGetByIdDirect>();
477 int resultVReg = bytecode.m_dst.offset();
478 const Identifier* ident = &(m_codeBlock->identifier(bytecode.m_property));
479
480 JITGetByIdGenerator& gen = m_getByIds[m_getByIdIndex++];
481
482 Label coldPathBegin = label();
483
484 Call call = callOperationWithProfile(bytecode.metadata(m_codeBlock), operationGetByIdDirectOptimize, resultVReg, m_codeBlock->globalObject(), gen.stubInfo(), JSValueRegs(regT1, regT0), ident->impl());
485
486 gen.reportSlowPathCall(coldPathBegin, call);
487}
488
489
490void JIT::emit_op_get_by_id(const Instruction* currentInstruction)
491{
492 auto bytecode = currentInstruction->as<OpGetById>();
493 auto& metadata = bytecode.metadata(m_codeBlock);
494 int dst = bytecode.m_dst.offset();
495 int base = bytecode.m_base.offset();
496 const Identifier* ident = &(m_codeBlock->identifier(bytecode.m_property));
497
498 emitLoad(base, regT1, regT0);
499 emitJumpSlowCaseIfNotJSCell(base, regT1);
500
501 if (*ident == m_vm->propertyNames->length && shouldEmitProfiling()) {
502 Jump notArrayLengthMode = branch8(NotEqual, AbsoluteAddress(&metadata.m_modeMetadata.mode), TrustedImm32(static_cast<uint8_t>(GetByIdMode::ArrayLength)));
503 emitArrayProfilingSiteWithCell(regT0, regT2, &metadata.m_modeMetadata.arrayLengthMode.arrayProfile);
504 notArrayLengthMode.link(this);
505 }
506
507 JITGetByIdGenerator gen(
508 m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(BytecodeIndex(bitwise_cast<uint32_t>(currentInstruction))), RegisterSet::stubUnavailableRegisters(),
509 ident->impl(), JSValueRegs::payloadOnly(regT0), JSValueRegs(regT1, regT0), AccessType::GetById);
510 gen.generateFastPath(*this);
511 addSlowCase(gen.slowPathJump());
512 m_getByIds.append(gen);
513
514 emitValueProfilingSite(bytecode.metadata(m_codeBlock));
515 emitStore(dst, regT1, regT0);
516}
517
518void JIT::emitSlow_op_get_by_id(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
519{
520 linkAllSlowCases(iter);
521
522 auto bytecode = currentInstruction->as<OpGetById>();
523 int resultVReg = bytecode.m_dst.offset();
524 const Identifier* ident = &(m_codeBlock->identifier(bytecode.m_property));
525
526 JITGetByIdGenerator& gen = m_getByIds[m_getByIdIndex++];
527
528 Label coldPathBegin = label();
529
530 Call call = callOperationWithProfile(bytecode.metadata(m_codeBlock), operationGetByIdOptimize, resultVReg, m_codeBlock->globalObject(), gen.stubInfo(), JSValueRegs(regT1, regT0), ident->impl());
531
532 gen.reportSlowPathCall(coldPathBegin, call);
533}
534
535void JIT::emit_op_get_by_id_with_this(const Instruction* currentInstruction)
536{
537 auto bytecode = currentInstruction->as<OpGetByIdWithThis>();
538 int dst = bytecode.m_dst.offset();
539 int base = bytecode.m_base.offset();
540 int thisVReg = bytecode.m_thisValue.offset();
541 const Identifier* ident = &(m_codeBlock->identifier(bytecode.m_property));
542
543 emitLoad(base, regT1, regT0);
544 emitLoad(thisVReg, regT4, regT3);
545 emitJumpSlowCaseIfNotJSCell(base, regT1);
546 emitJumpSlowCaseIfNotJSCell(thisVReg, regT4);
547
548 JITGetByIdWithThisGenerator gen(
549 m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(BytecodeIndex(bitwise_cast<uint32_t>(currentInstruction))), RegisterSet::stubUnavailableRegisters(),
550 ident->impl(), JSValueRegs(regT1, regT0), JSValueRegs::payloadOnly(regT0), JSValueRegs(regT4, regT3));
551 gen.generateFastPath(*this);
552 addSlowCase(gen.slowPathJump());
553 m_getByIdsWithThis.append(gen);
554
555 emitValueProfilingSite(bytecode.metadata(m_codeBlock));
556 emitStore(dst, regT1, regT0);
557}
558
559void JIT::emitSlow_op_get_by_id_with_this(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
560{
561 linkAllSlowCases(iter);
562
563 auto bytecode = currentInstruction->as<OpGetByIdWithThis>();
564 int resultVReg = bytecode.m_dst.offset();
565 const Identifier* ident = &(m_codeBlock->identifier(bytecode.m_property));
566
567 JITGetByIdWithThisGenerator& gen = m_getByIdsWithThis[m_getByIdWithThisIndex++];
568
569 Label coldPathBegin = label();
570
571 Call call = callOperationWithProfile(bytecode.metadata(m_codeBlock), operationGetByIdWithThisOptimize, resultVReg, m_codeBlock->globalObject(), gen.stubInfo(), JSValueRegs(regT1, regT0), JSValueRegs(regT4, regT3), ident->impl());
572
573 gen.reportSlowPathCall(coldPathBegin, call);
574}
575
576void JIT::emit_op_put_by_id(const Instruction* currentInstruction)
577{
578 // In order to be able to patch both the Structure, and the object offset, we store one pointer,
579 // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
580 // such that the Structure & offset are always at the same distance from this.
581
582 auto bytecode = currentInstruction->as<OpPutById>();
583 int base = bytecode.m_base.offset();
584 int value = bytecode.m_value.offset();
585 bool direct = !!(bytecode.m_flags & PutByIdIsDirect);
586
587 emitLoad2(base, regT1, regT0, value, regT3, regT2);
588
589 emitJumpSlowCaseIfNotJSCell(base, regT1);
590
591 JITPutByIdGenerator gen(
592 m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(BytecodeIndex(bitwise_cast<uint32_t>(currentInstruction))), RegisterSet::stubUnavailableRegisters(),
593 JSValueRegs::payloadOnly(regT0), JSValueRegs(regT3, regT2),
594 regT1, m_codeBlock->ecmaMode(), direct ? Direct : NotDirect);
595
596 gen.generateFastPath(*this);
597 addSlowCase(gen.slowPathJump());
598
599 emitWriteBarrier(base, value, ShouldFilterBase);
600
601 m_putByIds.append(gen);
602}
603
604void JIT::emitSlow_op_put_by_id(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
605{
606 linkAllSlowCases(iter);
607
608 auto bytecode = currentInstruction->as<OpPutById>();
609 int base = bytecode.m_base.offset();
610 const Identifier* ident = &(m_codeBlock->identifier(bytecode.m_property));
611
612 Label coldPathBegin(this);
613
614 // JITPutByIdGenerator only preserve the value and the base's payload, we have to reload the tag.
615 emitLoadTag(base, regT1);
616
617 JITPutByIdGenerator& gen = m_putByIds[m_putByIdIndex++];
618
619 Call call = callOperation(
620 gen.slowPathFunction(), m_codeBlock->globalObject(), gen.stubInfo(), JSValueRegs(regT3, regT2), JSValueRegs(regT1, regT0), ident->impl());
621
622 gen.reportSlowPathCall(coldPathBegin, call);
623}
624
625void JIT::emit_op_in_by_id(const Instruction* currentInstruction)
626{
627 auto bytecode = currentInstruction->as<OpInById>();
628 int dst = bytecode.m_dst.offset();
629 int base = bytecode.m_base.offset();
630 const Identifier* ident = &(m_codeBlock->identifier(bytecode.m_property));
631
632 emitLoad(base, regT1, regT0);
633 emitJumpSlowCaseIfNotJSCell(base, regT1);
634
635 JITInByIdGenerator gen(
636 m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(BytecodeIndex(bitwise_cast<uint32_t>(currentInstruction))), RegisterSet::stubUnavailableRegisters(),
637 ident->impl(), JSValueRegs::payloadOnly(regT0), JSValueRegs(regT1, regT0));
638 gen.generateFastPath(*this);
639 addSlowCase(gen.slowPathJump());
640 m_inByIds.append(gen);
641
642 emitStore(dst, regT1, regT0);
643}
644
645void JIT::emitSlow_op_in_by_id(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
646{
647 linkAllSlowCases(iter);
648
649 auto bytecode = currentInstruction->as<OpInById>();
650 int resultVReg = bytecode.m_dst.offset();
651 const Identifier* ident = &(m_codeBlock->identifier(bytecode.m_property));
652
653 JITInByIdGenerator& gen = m_inByIds[m_inByIdIndex++];
654
655 Label coldPathBegin = label();
656
657 Call call = callOperation(operationInByIdOptimize, resultVReg, m_codeBlock->globalObject(), gen.stubInfo(), JSValueRegs(regT1, regT0), ident->impl());
658
659 gen.reportSlowPathCall(coldPathBegin, call);
660}
661
662void JIT::emitVarInjectionCheck(bool needsVarInjectionChecks)
663{
664 if (!needsVarInjectionChecks)
665 return;
666 addSlowCase(branch8(Equal, AbsoluteAddress(m_codeBlock->globalObject()->varInjectionWatchpoint()->addressOfState()), TrustedImm32(IsInvalidated)));
667}
668
669void JIT::emitResolveClosure(int dst, int scope, bool needsVarInjectionChecks, unsigned depth)
670{
671 emitVarInjectionCheck(needsVarInjectionChecks);
672 move(TrustedImm32(JSValue::CellTag), regT1);
673 emitLoadPayload(scope, regT0);
674 for (unsigned i = 0; i < depth; ++i)
675 loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
676 emitStore(dst, regT1, regT0);
677}
678
679void JIT::emit_op_resolve_scope(const Instruction* currentInstruction)
680{
681 auto bytecode = currentInstruction->as<OpResolveScope>();
682 auto& metadata = bytecode.metadata(m_codeBlock);
683 int dst = bytecode.m_dst.offset();
684 int scope = bytecode.m_scope.offset();
685 ResolveType resolveType = metadata.m_resolveType;
686 unsigned depth = metadata.m_localScopeDepth;
687
688 auto emitCode = [&] (ResolveType resolveType) {
689 switch (resolveType) {
690 case GlobalProperty:
691 case GlobalPropertyWithVarInjectionChecks: {
692 JSScope* constantScope = JSScope::constantScopeForCodeBlock(resolveType, m_codeBlock);
693 RELEASE_ASSERT(constantScope);
694 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
695 load32(&metadata.m_globalLexicalBindingEpoch, regT1);
696 addSlowCase(branch32(NotEqual, AbsoluteAddress(m_codeBlock->globalObject()->addressOfGlobalLexicalBindingEpoch()), regT1));
697 move(TrustedImm32(JSValue::CellTag), regT1);
698 move(TrustedImmPtr(constantScope), regT0);
699 emitStore(dst, regT1, regT0);
700 break;
701 }
702
703 case GlobalVar:
704 case GlobalVarWithVarInjectionChecks:
705 case GlobalLexicalVar:
706 case GlobalLexicalVarWithVarInjectionChecks: {
707 JSScope* constantScope = JSScope::constantScopeForCodeBlock(resolveType, m_codeBlock);
708 RELEASE_ASSERT(constantScope);
709 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
710 move(TrustedImm32(JSValue::CellTag), regT1);
711 move(TrustedImmPtr(constantScope), regT0);
712 emitStore(dst, regT1, regT0);
713 break;
714 }
715 case ClosureVar:
716 case ClosureVarWithVarInjectionChecks:
717 emitResolveClosure(dst, scope, needsVarInjectionChecks(resolveType), depth);
718 break;
719 case ModuleVar:
720 move(TrustedImm32(JSValue::CellTag), regT1);
721 move(TrustedImmPtr(metadata.m_lexicalEnvironment.get()), regT0);
722 emitStore(dst, regT1, regT0);
723 break;
724 case Dynamic:
725 addSlowCase(jump());
726 break;
727 case LocalClosureVar:
728 case UnresolvedProperty:
729 case UnresolvedPropertyWithVarInjectionChecks:
730 RELEASE_ASSERT_NOT_REACHED();
731 }
732 };
733 switch (resolveType) {
734 case GlobalProperty:
735 case GlobalPropertyWithVarInjectionChecks: {
736 JumpList skipToEnd;
737 load32(&metadata.m_resolveType, regT0);
738
739 Jump notGlobalProperty = branch32(NotEqual, regT0, TrustedImm32(resolveType));
740 emitCode(resolveType);
741 skipToEnd.append(jump());
742
743 notGlobalProperty.link(this);
744 emitCode(needsVarInjectionChecks(resolveType) ? GlobalLexicalVarWithVarInjectionChecks : GlobalLexicalVar);
745
746 skipToEnd.link(this);
747 break;
748 }
749 case UnresolvedProperty:
750 case UnresolvedPropertyWithVarInjectionChecks: {
751 JumpList skipToEnd;
752 load32(&metadata.m_resolveType, regT0);
753
754 Jump notGlobalProperty = branch32(NotEqual, regT0, TrustedImm32(GlobalProperty));
755 emitCode(GlobalProperty);
756 skipToEnd.append(jump());
757 notGlobalProperty.link(this);
758
759 Jump notGlobalPropertyWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalPropertyWithVarInjectionChecks));
760 emitCode(GlobalPropertyWithVarInjectionChecks);
761 skipToEnd.append(jump());
762 notGlobalPropertyWithVarInjections.link(this);
763
764 Jump notGlobalLexicalVar = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVar));
765 emitCode(GlobalLexicalVar);
766 skipToEnd.append(jump());
767 notGlobalLexicalVar.link(this);
768
769 Jump notGlobalLexicalVarWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVarWithVarInjectionChecks));
770 emitCode(GlobalLexicalVarWithVarInjectionChecks);
771 skipToEnd.append(jump());
772 notGlobalLexicalVarWithVarInjections.link(this);
773
774 addSlowCase(jump());
775 skipToEnd.link(this);
776 break;
777 }
778
779 default:
780 emitCode(resolveType);
781 break;
782 }
783}
784
785void JIT::emitLoadWithStructureCheck(int scope, Structure** structureSlot)
786{
787 emitLoad(scope, regT1, regT0);
788 loadPtr(structureSlot, regT2);
789 addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::structureIDOffset()), regT2));
790}
791
792void JIT::emitGetVarFromPointer(JSValue* operand, GPRReg tag, GPRReg payload)
793{
794 uintptr_t rawAddress = bitwise_cast<uintptr_t>(operand);
795 load32(bitwise_cast<void*>(rawAddress + TagOffset), tag);
796 load32(bitwise_cast<void*>(rawAddress + PayloadOffset), payload);
797}
798void JIT::emitGetVarFromIndirectPointer(JSValue** operand, GPRReg tag, GPRReg payload)
799{
800 loadPtr(operand, payload);
801 load32(Address(payload, TagOffset), tag);
802 load32(Address(payload, PayloadOffset), payload);
803}
804
805void JIT::emitGetClosureVar(int scope, uintptr_t operand)
806{
807 emitLoad(scope, regT1, regT0);
808 load32(Address(regT0, JSLexicalEnvironment::offsetOfVariables() + operand * sizeof(Register) + TagOffset), regT1);
809 load32(Address(regT0, JSLexicalEnvironment::offsetOfVariables() + operand * sizeof(Register) + PayloadOffset), regT0);
810}
811
812void JIT::emit_op_get_from_scope(const Instruction* currentInstruction)
813{
814 auto bytecode = currentInstruction->as<OpGetFromScope>();
815 auto& metadata = bytecode.metadata(m_codeBlock);
816 int dst = bytecode.m_dst.offset();
817 int scope = bytecode.m_scope.offset();
818 ResolveType resolveType = metadata.m_getPutInfo.resolveType();
819 Structure** structureSlot = metadata.m_structure.slot();
820 uintptr_t* operandSlot = reinterpret_cast<uintptr_t*>(&metadata.m_operand);
821
822 auto emitCode = [&] (ResolveType resolveType, bool indirectLoadForOperand) {
823 switch (resolveType) {
824 case GlobalProperty:
825 case GlobalPropertyWithVarInjectionChecks: {
826 emitLoadWithStructureCheck(scope, structureSlot); // Structure check covers var injection.
827 GPRReg base = regT2;
828 GPRReg resultTag = regT1;
829 GPRReg resultPayload = regT0;
830 GPRReg offset = regT3;
831
832 move(regT0, base);
833 load32(operandSlot, offset);
834 if (!ASSERT_DISABLED) {
835 Jump isOutOfLine = branch32(GreaterThanOrEqual, offset, TrustedImm32(firstOutOfLineOffset));
836 abortWithReason(JITOffsetIsNotOutOfLine);
837 isOutOfLine.link(this);
838 }
839 loadPtr(Address(base, JSObject::butterflyOffset()), base);
840 neg32(offset);
841 load32(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload) + (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), resultPayload);
842 load32(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag) + (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), resultTag);
843 break;
844 }
845 case GlobalVar:
846 case GlobalVarWithVarInjectionChecks:
847 case GlobalLexicalVar:
848 case GlobalLexicalVarWithVarInjectionChecks:
849 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
850 if (indirectLoadForOperand)
851 emitGetVarFromIndirectPointer(bitwise_cast<JSValue**>(operandSlot), regT1, regT0);
852 else
853 emitGetVarFromPointer(bitwise_cast<JSValue*>(*operandSlot), regT1, regT0);
854 if (resolveType == GlobalLexicalVar || resolveType == GlobalLexicalVarWithVarInjectionChecks) // TDZ check.
855 addSlowCase(branchIfEmpty(regT1));
856 break;
857 case ClosureVar:
858 case ClosureVarWithVarInjectionChecks:
859 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
860 emitGetClosureVar(scope, *operandSlot);
861 break;
862 case Dynamic:
863 addSlowCase(jump());
864 break;
865 case ModuleVar:
866 case LocalClosureVar:
867 case UnresolvedProperty:
868 case UnresolvedPropertyWithVarInjectionChecks:
869 RELEASE_ASSERT_NOT_REACHED();
870 }
871 };
872
873 switch (resolveType) {
874 case GlobalProperty:
875 case GlobalPropertyWithVarInjectionChecks: {
876 JumpList skipToEnd;
877 load32(&metadata.m_getPutInfo, regT0);
878 and32(TrustedImm32(GetPutInfo::typeBits), regT0); // Load ResolveType into T0
879
880 Jump isNotGlobalProperty = branch32(NotEqual, regT0, TrustedImm32(resolveType));
881 emitCode(resolveType, false);
882 skipToEnd.append(jump());
883
884 isNotGlobalProperty.link(this);
885 emitCode(needsVarInjectionChecks(resolveType) ? GlobalLexicalVarWithVarInjectionChecks : GlobalLexicalVar, true);
886 skipToEnd.link(this);
887 break;
888 }
889 case UnresolvedProperty:
890 case UnresolvedPropertyWithVarInjectionChecks: {
891 JumpList skipToEnd;
892 load32(&metadata.m_getPutInfo, regT0);
893 and32(TrustedImm32(GetPutInfo::typeBits), regT0); // Load ResolveType into T0
894
895 Jump isGlobalProperty = branch32(Equal, regT0, TrustedImm32(GlobalProperty));
896 Jump notGlobalPropertyWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalPropertyWithVarInjectionChecks));
897 isGlobalProperty.link(this);
898 emitCode(GlobalProperty, false);
899 skipToEnd.append(jump());
900 notGlobalPropertyWithVarInjections.link(this);
901
902 Jump notGlobalLexicalVar = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVar));
903 emitCode(GlobalLexicalVar, true);
904 skipToEnd.append(jump());
905 notGlobalLexicalVar.link(this);
906
907 Jump notGlobalLexicalVarWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVarWithVarInjectionChecks));
908 emitCode(GlobalLexicalVarWithVarInjectionChecks, true);
909 skipToEnd.append(jump());
910 notGlobalLexicalVarWithVarInjections.link(this);
911
912 addSlowCase(jump());
913
914 skipToEnd.link(this);
915 break;
916 }
917
918 default:
919 emitCode(resolveType, false);
920 break;
921 }
922 emitValueProfilingSite(bytecode.metadata(m_codeBlock));
923 emitStore(dst, regT1, regT0);
924}
925
926void JIT::emitSlow_op_get_from_scope(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
927{
928 linkAllSlowCases(iter);
929
930 auto bytecode = currentInstruction->as<OpGetFromScope>();
931 int dst = bytecode.m_dst.offset();
932 callOperationWithProfile(bytecode.metadata(m_codeBlock), operationGetFromScope, dst, m_codeBlock->globalObject(), currentInstruction);
933}
934
935void JIT::emitPutGlobalVariable(JSValue* operand, int value, WatchpointSet* set)
936{
937 emitLoad(value, regT1, regT0);
938 emitNotifyWrite(set);
939 uintptr_t rawAddress = bitwise_cast<uintptr_t>(operand);
940 store32(regT1, bitwise_cast<void*>(rawAddress + TagOffset));
941 store32(regT0, bitwise_cast<void*>(rawAddress + PayloadOffset));
942}
943
944void JIT::emitPutGlobalVariableIndirect(JSValue** addressOfOperand, int value, WatchpointSet** indirectWatchpointSet)
945{
946 emitLoad(value, regT1, regT0);
947 loadPtr(indirectWatchpointSet, regT2);
948 emitNotifyWrite(regT2);
949 loadPtr(addressOfOperand, regT2);
950 store32(regT1, Address(regT2, TagOffset));
951 store32(regT0, Address(regT2, PayloadOffset));
952}
953
954void JIT::emitPutClosureVar(int scope, uintptr_t operand, int value, WatchpointSet* set)
955{
956 emitLoad(value, regT3, regT2);
957 emitLoad(scope, regT1, regT0);
958 emitNotifyWrite(set);
959 store32(regT3, Address(regT0, JSLexicalEnvironment::offsetOfVariables() + operand * sizeof(Register) + TagOffset));
960 store32(regT2, Address(regT0, JSLexicalEnvironment::offsetOfVariables() + operand * sizeof(Register) + PayloadOffset));
961}
962
963void JIT::emit_op_put_to_scope(const Instruction* currentInstruction)
964{
965 auto bytecode = currentInstruction->as<OpPutToScope>();
966 auto& metadata = bytecode.metadata(m_codeBlock);
967 int scope = bytecode.m_scope.offset();
968 int value = bytecode.m_value.offset();
969 GetPutInfo getPutInfo = copiedGetPutInfo(bytecode);
970 ResolveType resolveType = getPutInfo.resolveType();
971 Structure** structureSlot = metadata.m_structure.slot();
972 uintptr_t* operandSlot = reinterpret_cast<uintptr_t*>(&metadata.m_operand);
973
974 auto emitCode = [&] (ResolveType resolveType, bool indirectLoadForOperand) {
975 switch (resolveType) {
976 case GlobalProperty:
977 case GlobalPropertyWithVarInjectionChecks: {
978 emitWriteBarrier(m_codeBlock->globalObject(), value, ShouldFilterValue);
979 emitLoadWithStructureCheck(scope, structureSlot); // Structure check covers var injection.
980 emitLoad(value, regT3, regT2);
981
982 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
983 loadPtr(operandSlot, regT1);
984 negPtr(regT1);
985 store32(regT3, BaseIndex(regT0, regT1, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
986 store32(regT2, BaseIndex(regT0, regT1, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
987 break;
988 }
989 case GlobalVar:
990 case GlobalVarWithVarInjectionChecks:
991 case GlobalLexicalVar:
992 case GlobalLexicalVarWithVarInjectionChecks: {
993 JSScope* constantScope = JSScope::constantScopeForCodeBlock(resolveType, m_codeBlock);
994 RELEASE_ASSERT(constantScope);
995 emitWriteBarrier(constantScope, value, ShouldFilterValue);
996 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
997 if (!isInitialization(getPutInfo.initializationMode()) && (resolveType == GlobalLexicalVar || resolveType == GlobalLexicalVarWithVarInjectionChecks)) {
998 // We need to do a TDZ check here because we can't always prove we need to emit TDZ checks statically.
999 if (indirectLoadForOperand)
1000 emitGetVarFromIndirectPointer(bitwise_cast<JSValue**>(operandSlot), regT1, regT0);
1001 else
1002 emitGetVarFromPointer(bitwise_cast<JSValue*>(*operandSlot), regT1, regT0);
1003 addSlowCase(branchIfEmpty(regT1));
1004 }
1005 if (indirectLoadForOperand)
1006 emitPutGlobalVariableIndirect(bitwise_cast<JSValue**>(operandSlot), value, &metadata.m_watchpointSet);
1007 else
1008 emitPutGlobalVariable(bitwise_cast<JSValue*>(*operandSlot), value, metadata.m_watchpointSet);
1009 break;
1010 }
1011 case LocalClosureVar:
1012 case ClosureVar:
1013 case ClosureVarWithVarInjectionChecks:
1014 emitWriteBarrier(scope, value, ShouldFilterValue);
1015 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
1016 emitPutClosureVar(scope, *operandSlot, value, metadata.m_watchpointSet);
1017 break;
1018 case ModuleVar:
1019 case Dynamic:
1020 addSlowCase(jump());
1021 break;
1022 case UnresolvedProperty:
1023 case UnresolvedPropertyWithVarInjectionChecks:
1024 RELEASE_ASSERT_NOT_REACHED();
1025 }
1026 };
1027
1028 switch (resolveType) {
1029 case GlobalProperty:
1030 case GlobalPropertyWithVarInjectionChecks: {
1031 JumpList skipToEnd;
1032 load32(&metadata.m_getPutInfo, regT0);
1033 and32(TrustedImm32(GetPutInfo::typeBits), regT0); // Load ResolveType into T0
1034
1035 Jump isGlobalProperty = branch32(Equal, regT0, TrustedImm32(resolveType));
1036 Jump isGlobalLexicalVar = branch32(Equal, regT0, TrustedImm32(needsVarInjectionChecks(resolveType) ? GlobalLexicalVarWithVarInjectionChecks : GlobalLexicalVar));
1037 addSlowCase(jump()); // Dynamic, it can happen if we attempt to put a value to already-initialized const binding.
1038
1039 isGlobalLexicalVar.link(this);
1040 emitCode(needsVarInjectionChecks(resolveType) ? GlobalLexicalVarWithVarInjectionChecks : GlobalLexicalVar, true);
1041 skipToEnd.append(jump());
1042
1043 isGlobalProperty.link(this);
1044 emitCode(resolveType, false);
1045 skipToEnd.link(this);
1046 break;
1047 }
1048 case UnresolvedProperty:
1049 case UnresolvedPropertyWithVarInjectionChecks: {
1050 JumpList skipToEnd;
1051 load32(&metadata.m_getPutInfo, regT0);
1052 and32(TrustedImm32(GetPutInfo::typeBits), regT0); // Load ResolveType into T0
1053
1054 Jump isGlobalProperty = branch32(Equal, regT0, TrustedImm32(GlobalProperty));
1055 Jump notGlobalPropertyWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalPropertyWithVarInjectionChecks));
1056 isGlobalProperty.link(this);
1057 emitCode(GlobalProperty, false);
1058 skipToEnd.append(jump());
1059 notGlobalPropertyWithVarInjections.link(this);
1060
1061 Jump notGlobalLexicalVar = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVar));
1062 emitCode(GlobalLexicalVar, true);
1063 skipToEnd.append(jump());
1064 notGlobalLexicalVar.link(this);
1065
1066 Jump notGlobalLexicalVarWithVarInjections = branch32(NotEqual, regT0, TrustedImm32(GlobalLexicalVarWithVarInjectionChecks));
1067 emitCode(GlobalLexicalVarWithVarInjectionChecks, true);
1068 skipToEnd.append(jump());
1069 notGlobalLexicalVarWithVarInjections.link(this);
1070
1071 addSlowCase(jump());
1072
1073 skipToEnd.link(this);
1074 break;
1075 }
1076
1077 default:
1078 emitCode(resolveType, false);
1079 break;
1080 }
1081}
1082
1083void JIT::emitSlow_op_put_to_scope(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1084{
1085 linkAllSlowCases(iter);
1086
1087 auto bytecode = currentInstruction->as<OpPutToScope>();
1088 ResolveType resolveType = copiedGetPutInfo(bytecode).resolveType();
1089 if (resolveType == ModuleVar) {
1090 JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_strict_mode_readonly_property_write_error);
1091 slowPathCall.call();
1092 } else
1093 callOperation(operationPutToScope, m_codeBlock->globalObject(), currentInstruction);
1094}
1095
1096void JIT::emit_op_get_from_arguments(const Instruction* currentInstruction)
1097{
1098 auto bytecode = currentInstruction->as<OpGetFromArguments>();
1099 int dst = bytecode.m_dst.offset();
1100 int arguments = bytecode.m_arguments.offset();
1101 int index = bytecode.m_index;
1102
1103 emitLoadPayload(arguments, regT0);
1104 load32(Address(regT0, DirectArguments::storageOffset() + index * sizeof(WriteBarrier<Unknown>) + TagOffset), regT1);
1105 load32(Address(regT0, DirectArguments::storageOffset() + index * sizeof(WriteBarrier<Unknown>) + PayloadOffset), regT0);
1106 emitValueProfilingSite(bytecode.metadata(m_codeBlock));
1107 emitStore(dst, regT1, regT0);
1108}
1109
1110void JIT::emit_op_put_to_arguments(const Instruction* currentInstruction)
1111{
1112 auto bytecode = currentInstruction->as<OpPutToArguments>();
1113 int arguments = bytecode.m_arguments.offset();
1114 int index = bytecode.m_index;
1115 int value = bytecode.m_value.offset();
1116
1117 emitWriteBarrier(arguments, value, ShouldFilterValue);
1118
1119 emitLoadPayload(arguments, regT0);
1120 emitLoad(value, regT1, regT2);
1121 store32(regT1, Address(regT0, DirectArguments::storageOffset() + index * sizeof(WriteBarrier<Unknown>) + TagOffset));
1122 store32(regT2, Address(regT0, DirectArguments::storageOffset() + index * sizeof(WriteBarrier<Unknown>) + PayloadOffset));
1123}
1124
1125void JIT::emit_op_get_internal_field(const Instruction* currentInstruction)
1126{
1127 auto bytecode = currentInstruction->as<OpGetInternalField>();
1128 auto& metadata = bytecode.metadata(m_codeBlock);
1129 int dst = bytecode.m_dst.offset();
1130 int base = bytecode.m_base.offset();
1131 unsigned index = bytecode.m_index;
1132 ASSERT(index < JSPromise::numberOfInternalFields);
1133
1134 emitLoadPayload(base, regT2);
1135 load32(Address(regT2, JSInternalFieldObjectImpl<>::offsetOfInternalField(index) + TagOffset), regT1);
1136 load32(Address(regT2, JSInternalFieldObjectImpl<>::offsetOfInternalField(index) + PayloadOffset), regT0);
1137 emitValueProfilingSite(metadata);
1138 emitStore(dst, regT1, regT0);
1139}
1140
1141void JIT::emit_op_put_internal_field(const Instruction* currentInstruction)
1142{
1143 auto bytecode = currentInstruction->as<OpPutInternalField>();
1144 int base = bytecode.m_base.offset();
1145 int value = bytecode.m_value.offset();
1146 unsigned index = bytecode.m_index;
1147 ASSERT(index < JSPromise::numberOfInternalFields);
1148
1149 emitLoadPayload(base, regT0);
1150 emitLoad(value, regT1, regT2);
1151 store32(regT1, Address(regT0, JSInternalFieldObjectImpl<>::offsetOfInternalField(index) + TagOffset));
1152 store32(regT2, Address(regT0, JSInternalFieldObjectImpl<>::offsetOfInternalField(index) + PayloadOffset));
1153 emitWriteBarrier(base, value, ShouldFilterValue);
1154}
1155
1156} // namespace JSC
1157
1158#endif // USE(JSVALUE32_64)
1159#endif // ENABLE(JIT)
1160