1 | /* |
2 | * Copyright (C) 2011-2019 Apple Inc. All rights reserved. |
3 | * Copyright (C) 2011 Intel Corporation. All rights reserved. |
4 | * |
5 | * Redistribution and use in source and binary forms, with or without |
6 | * modification, are permitted provided that the following conditions |
7 | * are met: |
8 | * 1. Redistributions of source code must retain the above copyright |
9 | * notice, this list of conditions and the following disclaimer. |
10 | * 2. Redistributions in binary form must reproduce the above copyright |
11 | * notice, this list of conditions and the following disclaimer in the |
12 | * documentation and/or other materials provided with the distribution. |
13 | * |
14 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
15 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
16 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
17 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
18 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
19 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
20 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
21 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
22 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
23 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
24 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
25 | */ |
26 | |
27 | #include "config.h" |
28 | #include "DFGSpeculativeJIT.h" |
29 | |
30 | #if ENABLE(DFG_JIT) |
31 | |
32 | #include "ArrayPrototype.h" |
33 | #include "CallFrameShuffler.h" |
34 | #include "DFGAbstractInterpreterInlines.h" |
35 | #include "DFGCallArrayAllocatorSlowPathGenerator.h" |
36 | #include "DFGOperations.h" |
37 | #include "DFGSlowPathGenerator.h" |
38 | #include "DirectArguments.h" |
39 | #include "GetterSetter.h" |
40 | #include "HasOwnPropertyCache.h" |
41 | #include "HashMapImpl.h" |
42 | #include "JSLexicalEnvironment.h" |
43 | #include "JSPropertyNameEnumerator.h" |
44 | #include "ObjectPrototype.h" |
45 | #include "JSCInlines.h" |
46 | #include "SetupVarargsFrame.h" |
47 | #include "SuperSampler.h" |
48 | #include "Watchdog.h" |
49 | |
50 | namespace JSC { namespace DFG { |
51 | |
52 | #if USE(JSVALUE32_64) |
53 | |
54 | static_assert(SpecCellCheck == SpecCell, "This is strongly assumed in the 32-bit DFG backend." ); |
55 | |
56 | bool SpeculativeJIT::fillJSValue(Edge edge, GPRReg& tagGPR, GPRReg& payloadGPR, FPRReg& fpr) |
57 | { |
58 | // FIXME: For double we could fill with a FPR. |
59 | UNUSED_PARAM(fpr); |
60 | |
61 | VirtualRegister virtualRegister = edge->virtualRegister(); |
62 | GenerationInfo& info = generationInfoFromVirtualRegister(virtualRegister); |
63 | |
64 | switch (info.registerFormat()) { |
65 | case DataFormatNone: { |
66 | |
67 | if (edge->hasConstant()) { |
68 | tagGPR = allocate(); |
69 | payloadGPR = allocate(); |
70 | JSValue value = edge->asJSValue(); |
71 | m_jit.move(Imm32(value.tag()), tagGPR); |
72 | m_jit.move(Imm32(value.payload()), payloadGPR); |
73 | m_gprs.retain(tagGPR, virtualRegister, SpillOrderConstant); |
74 | m_gprs.retain(payloadGPR, virtualRegister, SpillOrderConstant); |
75 | info.fillJSValue(*m_stream, tagGPR, payloadGPR, DataFormatJS); |
76 | } else { |
77 | DataFormat spillFormat = info.spillFormat(); |
78 | ASSERT(spillFormat != DataFormatNone && spillFormat != DataFormatStorage); |
79 | tagGPR = allocate(); |
80 | payloadGPR = allocate(); |
81 | switch (spillFormat) { |
82 | case DataFormatInt32: |
83 | m_jit.move(TrustedImm32(JSValue::Int32Tag), tagGPR); |
84 | spillFormat = DataFormatJSInt32; // This will be used as the new register format. |
85 | break; |
86 | case DataFormatCell: |
87 | m_jit.move(TrustedImm32(JSValue::CellTag), tagGPR); |
88 | spillFormat = DataFormatJSCell; // This will be used as the new register format. |
89 | break; |
90 | case DataFormatBoolean: |
91 | m_jit.move(TrustedImm32(JSValue::BooleanTag), tagGPR); |
92 | spillFormat = DataFormatJSBoolean; // This will be used as the new register format. |
93 | break; |
94 | default: |
95 | m_jit.load32(JITCompiler::tagFor(virtualRegister), tagGPR); |
96 | break; |
97 | } |
98 | m_jit.load32(JITCompiler::payloadFor(virtualRegister), payloadGPR); |
99 | m_gprs.retain(tagGPR, virtualRegister, SpillOrderSpilled); |
100 | m_gprs.retain(payloadGPR, virtualRegister, SpillOrderSpilled); |
101 | info.fillJSValue(*m_stream, tagGPR, payloadGPR, spillFormat == DataFormatJSDouble ? DataFormatJS : spillFormat); |
102 | } |
103 | |
104 | return true; |
105 | } |
106 | |
107 | case DataFormatInt32: |
108 | case DataFormatCell: |
109 | case DataFormatBoolean: { |
110 | GPRReg gpr = info.gpr(); |
111 | // If the register has already been locked we need to take a copy. |
112 | if (m_gprs.isLocked(gpr)) { |
113 | payloadGPR = allocate(); |
114 | m_jit.move(gpr, payloadGPR); |
115 | } else { |
116 | payloadGPR = gpr; |
117 | m_gprs.lock(gpr); |
118 | } |
119 | tagGPR = allocate(); |
120 | int32_t tag = JSValue::EmptyValueTag; |
121 | DataFormat fillFormat = DataFormatJS; |
122 | switch (info.registerFormat()) { |
123 | case DataFormatInt32: |
124 | tag = JSValue::Int32Tag; |
125 | fillFormat = DataFormatJSInt32; |
126 | break; |
127 | case DataFormatCell: |
128 | tag = JSValue::CellTag; |
129 | fillFormat = DataFormatJSCell; |
130 | break; |
131 | case DataFormatBoolean: |
132 | tag = JSValue::BooleanTag; |
133 | fillFormat = DataFormatJSBoolean; |
134 | break; |
135 | default: |
136 | RELEASE_ASSERT_NOT_REACHED(); |
137 | break; |
138 | } |
139 | m_jit.move(TrustedImm32(tag), tagGPR); |
140 | m_gprs.release(gpr); |
141 | m_gprs.retain(tagGPR, virtualRegister, SpillOrderJS); |
142 | m_gprs.retain(payloadGPR, virtualRegister, SpillOrderJS); |
143 | info.fillJSValue(*m_stream, tagGPR, payloadGPR, fillFormat); |
144 | return true; |
145 | } |
146 | |
147 | case DataFormatJSDouble: |
148 | case DataFormatJS: |
149 | case DataFormatJSInt32: |
150 | case DataFormatJSCell: |
151 | case DataFormatJSBoolean: { |
152 | tagGPR = info.tagGPR(); |
153 | payloadGPR = info.payloadGPR(); |
154 | m_gprs.lock(tagGPR); |
155 | m_gprs.lock(payloadGPR); |
156 | return true; |
157 | } |
158 | |
159 | case DataFormatStorage: |
160 | case DataFormatDouble: |
161 | // this type currently never occurs |
162 | RELEASE_ASSERT_NOT_REACHED(); |
163 | |
164 | default: |
165 | RELEASE_ASSERT_NOT_REACHED(); |
166 | return true; |
167 | } |
168 | } |
169 | |
170 | void SpeculativeJIT::cachedGetById(CodeOrigin origin, JSValueRegs base, JSValueRegs result, unsigned identifierNumber, JITCompiler::Jump slowPathTarget , SpillRegistersMode mode, AccessType type) |
171 | { |
172 | cachedGetById(origin, base.tagGPR(), base.payloadGPR(), result.tagGPR(), result.payloadGPR(), identifierNumber, slowPathTarget, mode, type); |
173 | } |
174 | |
175 | void SpeculativeJIT::cachedGetById( |
176 | CodeOrigin codeOrigin, GPRReg baseTagGPROrNone, GPRReg basePayloadGPR, GPRReg resultTagGPR, GPRReg resultPayloadGPR, |
177 | unsigned identifierNumber, JITCompiler::Jump slowPathTarget, SpillRegistersMode spillMode, AccessType type) |
178 | { |
179 | // This is a hacky fix for when the register allocator decides to alias the base payload with the result tag. This only happens |
180 | // in the case of GetByIdFlush/GetByIdDirectFlush, which has a relatively expensive register allocation story already so we probably don't need to |
181 | // trip over one move instruction. |
182 | if (basePayloadGPR == resultTagGPR) { |
183 | RELEASE_ASSERT(basePayloadGPR != resultPayloadGPR); |
184 | |
185 | if (baseTagGPROrNone == resultPayloadGPR) { |
186 | m_jit.swap(basePayloadGPR, baseTagGPROrNone); |
187 | baseTagGPROrNone = resultTagGPR; |
188 | } else |
189 | m_jit.move(basePayloadGPR, resultPayloadGPR); |
190 | basePayloadGPR = resultPayloadGPR; |
191 | } |
192 | |
193 | RegisterSet usedRegisters = this->usedRegisters(); |
194 | if (spillMode == DontSpill) { |
195 | // We've already flushed registers to the stack, we don't need to spill these. |
196 | usedRegisters.set(JSValueRegs(baseTagGPROrNone, basePayloadGPR), false); |
197 | usedRegisters.set(JSValueRegs(resultTagGPR, resultPayloadGPR), false); |
198 | } |
199 | |
200 | CallSiteIndex callSite = m_jit.recordCallSiteAndGenerateExceptionHandlingOSRExitIfNeeded(codeOrigin, m_stream->size()); |
201 | JITGetByIdGenerator gen( |
202 | m_jit.codeBlock(), codeOrigin, callSite, usedRegisters, identifierUID(identifierNumber), |
203 | JSValueRegs(baseTagGPROrNone, basePayloadGPR), JSValueRegs(resultTagGPR, resultPayloadGPR), type); |
204 | |
205 | gen.generateFastPath(m_jit); |
206 | |
207 | JITCompiler::JumpList slowCases; |
208 | if (slowPathTarget.isSet()) |
209 | slowCases.append(slowPathTarget); |
210 | slowCases.append(gen.slowPathJump()); |
211 | |
212 | std::unique_ptr<SlowPathGenerator> slowPath; |
213 | if (baseTagGPROrNone == InvalidGPRReg) { |
214 | slowPath = slowPathCall( |
215 | slowCases, this, appropriateOptimizingGetByIdFunction(type), |
216 | JSValueRegs(resultTagGPR, resultPayloadGPR), TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(codeOrigin)), gen.stubInfo(), |
217 | CCallHelpers::CellValue(basePayloadGPR), |
218 | identifierUID(identifierNumber)); |
219 | } else { |
220 | slowPath = slowPathCall( |
221 | slowCases, this, appropriateOptimizingGetByIdFunction(type), |
222 | JSValueRegs(resultTagGPR, resultPayloadGPR), TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(codeOrigin)), gen.stubInfo(), JSValueRegs(baseTagGPROrNone, basePayloadGPR), identifierUID(identifierNumber)); |
223 | } |
224 | |
225 | m_jit.addGetById(gen, slowPath.get()); |
226 | addSlowPathGenerator(WTFMove(slowPath)); |
227 | } |
228 | |
229 | void SpeculativeJIT::cachedGetByIdWithThis( |
230 | CodeOrigin codeOrigin, GPRReg baseTagGPROrNone, GPRReg basePayloadGPR, GPRReg thisTagGPR, GPRReg thisPayloadGPR, GPRReg resultTagGPR, GPRReg resultPayloadGPR, |
231 | unsigned identifierNumber, const JITCompiler::JumpList& slowPathTarget) |
232 | { |
233 | RegisterSet usedRegisters = this->usedRegisters(); |
234 | |
235 | CallSiteIndex callSite = m_jit.recordCallSiteAndGenerateExceptionHandlingOSRExitIfNeeded(codeOrigin, m_stream->size()); |
236 | JITGetByIdWithThisGenerator gen( |
237 | m_jit.codeBlock(), codeOrigin, callSite, usedRegisters, identifierUID(identifierNumber), |
238 | JSValueRegs(resultTagGPR, resultPayloadGPR), JSValueRegs(baseTagGPROrNone, basePayloadGPR), JSValueRegs(thisTagGPR, thisPayloadGPR)); |
239 | |
240 | gen.generateFastPath(m_jit); |
241 | |
242 | JITCompiler::JumpList slowCases; |
243 | if (!slowPathTarget.empty()) |
244 | slowCases.append(slowPathTarget); |
245 | slowCases.append(gen.slowPathJump()); |
246 | |
247 | std::unique_ptr<SlowPathGenerator> slowPath; |
248 | if (baseTagGPROrNone == InvalidGPRReg && thisTagGPR == InvalidGPRReg) { |
249 | slowPath = slowPathCall( |
250 | slowCases, this, operationGetByIdWithThisOptimize, |
251 | JSValueRegs(resultTagGPR, resultPayloadGPR), TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(codeOrigin)), gen.stubInfo(), |
252 | CCallHelpers::CellValue(basePayloadGPR), |
253 | CCallHelpers::CellValue(thisPayloadGPR), |
254 | identifierUID(identifierNumber)); |
255 | } else { |
256 | ASSERT(baseTagGPROrNone != InvalidGPRReg); |
257 | ASSERT(thisTagGPR != InvalidGPRReg); |
258 | |
259 | slowPath = slowPathCall( |
260 | slowCases, this, operationGetByIdWithThisOptimize, |
261 | JSValueRegs(resultTagGPR, resultPayloadGPR), TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(codeOrigin)), gen.stubInfo(), JSValueRegs(baseTagGPROrNone, basePayloadGPR), JSValueRegs(thisTagGPR, thisPayloadGPR), identifierUID(identifierNumber)); |
262 | } |
263 | |
264 | m_jit.addGetByIdWithThis(gen, slowPath.get()); |
265 | addSlowPathGenerator(WTFMove(slowPath)); |
266 | } |
267 | |
268 | void SpeculativeJIT::nonSpeculativeNonPeepholeCompareNullOrUndefined(Edge operand) |
269 | { |
270 | JSValueOperand arg(this, operand, ManualOperandSpeculation); |
271 | GPRReg argTagGPR = arg.tagGPR(); |
272 | GPRReg argPayloadGPR = arg.payloadGPR(); |
273 | |
274 | GPRTemporary resultPayload(this, Reuse, arg, PayloadWord); |
275 | GPRReg resultPayloadGPR = resultPayload.gpr(); |
276 | |
277 | JITCompiler::Jump notCell; |
278 | JITCompiler::Jump notMasqueradesAsUndefined; |
279 | if (masqueradesAsUndefinedWatchpointIsStillValid()) { |
280 | if (!isKnownCell(operand.node())) |
281 | notCell = m_jit.branchIfNotCell(arg.jsValueRegs()); |
282 | |
283 | m_jit.move(TrustedImm32(0), resultPayloadGPR); |
284 | notMasqueradesAsUndefined = m_jit.jump(); |
285 | } else { |
286 | GPRTemporary localGlobalObject(this); |
287 | GPRTemporary remoteGlobalObject(this); |
288 | |
289 | if (!isKnownCell(operand.node())) |
290 | notCell = m_jit.branchIfNotCell(arg.jsValueRegs()); |
291 | |
292 | JITCompiler::Jump isMasqueradesAsUndefined = m_jit.branchTest8( |
293 | JITCompiler::NonZero, |
294 | JITCompiler::Address(argPayloadGPR, JSCell::typeInfoFlagsOffset()), |
295 | JITCompiler::TrustedImm32(MasqueradesAsUndefined)); |
296 | |
297 | m_jit.move(TrustedImm32(0), resultPayloadGPR); |
298 | notMasqueradesAsUndefined = m_jit.jump(); |
299 | |
300 | isMasqueradesAsUndefined.link(&m_jit); |
301 | GPRReg localGlobalObjectGPR = localGlobalObject.gpr(); |
302 | GPRReg remoteGlobalObjectGPR = remoteGlobalObject.gpr(); |
303 | m_jit.move(TrustedImmPtr::weakPointer(m_jit.graph(), m_jit.graph().globalObjectFor(m_currentNode->origin.semantic)), localGlobalObjectGPR); |
304 | m_jit.loadPtr(JITCompiler::Address(argPayloadGPR, JSCell::structureIDOffset()), resultPayloadGPR); |
305 | m_jit.loadPtr(JITCompiler::Address(resultPayloadGPR, Structure::globalObjectOffset()), remoteGlobalObjectGPR); |
306 | m_jit.compare32(JITCompiler::Equal, localGlobalObjectGPR, remoteGlobalObjectGPR, resultPayloadGPR); |
307 | } |
308 | |
309 | if (!isKnownCell(operand.node())) { |
310 | JITCompiler::Jump done = m_jit.jump(); |
311 | |
312 | notCell.link(&m_jit); |
313 | // null or undefined? |
314 | COMPILE_ASSERT((JSValue::UndefinedTag | 1) == JSValue::NullTag, UndefinedTag_OR_1_EQUALS_NullTag); |
315 | m_jit.or32(TrustedImm32(1), argTagGPR, resultPayloadGPR); |
316 | m_jit.compare32(JITCompiler::Equal, resultPayloadGPR, TrustedImm32(JSValue::NullTag), resultPayloadGPR); |
317 | |
318 | done.link(&m_jit); |
319 | } |
320 | |
321 | notMasqueradesAsUndefined.link(&m_jit); |
322 | |
323 | booleanResult(resultPayloadGPR, m_currentNode); |
324 | } |
325 | |
326 | void SpeculativeJIT::nonSpeculativePeepholeBranchNullOrUndefined(Edge operand, Node* branchNode) |
327 | { |
328 | BasicBlock* taken = branchNode->branchData()->taken.block; |
329 | BasicBlock* notTaken = branchNode->branchData()->notTaken.block; |
330 | |
331 | bool invert = false; |
332 | if (taken == nextBlock()) { |
333 | invert = !invert; |
334 | BasicBlock* tmp = taken; |
335 | taken = notTaken; |
336 | notTaken = tmp; |
337 | } |
338 | |
339 | JSValueOperand arg(this, operand, ManualOperandSpeculation); |
340 | GPRReg argTagGPR = arg.tagGPR(); |
341 | GPRReg argPayloadGPR = arg.payloadGPR(); |
342 | |
343 | GPRTemporary result(this, Reuse, arg, TagWord); |
344 | GPRReg resultGPR = result.gpr(); |
345 | |
346 | JITCompiler::Jump notCell; |
347 | |
348 | if (masqueradesAsUndefinedWatchpointIsStillValid()) { |
349 | if (!isKnownCell(operand.node())) |
350 | notCell = m_jit.branchIfNotCell(arg.jsValueRegs()); |
351 | |
352 | jump(invert ? taken : notTaken, ForceJump); |
353 | } else { |
354 | GPRTemporary localGlobalObject(this); |
355 | GPRTemporary remoteGlobalObject(this); |
356 | |
357 | if (!isKnownCell(operand.node())) |
358 | notCell = m_jit.branchIfNotCell(arg.jsValueRegs()); |
359 | |
360 | branchTest8(JITCompiler::Zero, |
361 | JITCompiler::Address(argPayloadGPR, JSCell::typeInfoFlagsOffset()), |
362 | JITCompiler::TrustedImm32(MasqueradesAsUndefined), |
363 | invert ? taken : notTaken); |
364 | |
365 | GPRReg localGlobalObjectGPR = localGlobalObject.gpr(); |
366 | GPRReg remoteGlobalObjectGPR = remoteGlobalObject.gpr(); |
367 | m_jit.move(TrustedImmPtr::weakPointer(m_jit.graph(), m_jit.graph().globalObjectFor(m_currentNode->origin.semantic)), localGlobalObjectGPR); |
368 | m_jit.loadPtr(JITCompiler::Address(argPayloadGPR, JSCell::structureIDOffset()), resultGPR); |
369 | m_jit.loadPtr(JITCompiler::Address(resultGPR, Structure::globalObjectOffset()), remoteGlobalObjectGPR); |
370 | branchPtr(JITCompiler::Equal, localGlobalObjectGPR, remoteGlobalObjectGPR, invert ? notTaken : taken); |
371 | } |
372 | |
373 | if (!isKnownCell(operand.node())) { |
374 | jump(notTaken, ForceJump); |
375 | |
376 | notCell.link(&m_jit); |
377 | // null or undefined? |
378 | COMPILE_ASSERT((JSValue::UndefinedTag | 1) == JSValue::NullTag, UndefinedTag_OR_1_EQUALS_NullTag); |
379 | m_jit.or32(TrustedImm32(1), argTagGPR, resultGPR); |
380 | branch32(invert ? JITCompiler::NotEqual : JITCompiler::Equal, resultGPR, JITCompiler::TrustedImm32(JSValue::NullTag), taken); |
381 | } |
382 | |
383 | jump(notTaken); |
384 | } |
385 | |
386 | void SpeculativeJIT::nonSpeculativePeepholeStrictEq(Node* node, Node* branchNode, bool invert) |
387 | { |
388 | BasicBlock* taken = branchNode->branchData()->taken.block; |
389 | BasicBlock* notTaken = branchNode->branchData()->notTaken.block; |
390 | |
391 | // The branch instruction will branch to the taken block. |
392 | // If taken is next, switch taken with notTaken & invert the branch condition so we can fall through. |
393 | if (taken == nextBlock()) { |
394 | invert = !invert; |
395 | BasicBlock* tmp = taken; |
396 | taken = notTaken; |
397 | notTaken = tmp; |
398 | } |
399 | |
400 | JSValueOperand arg1(this, node->child1()); |
401 | JSValueOperand arg2(this, node->child2()); |
402 | GPRReg arg1PayloadGPR = arg1.payloadGPR(); |
403 | GPRReg arg2PayloadGPR = arg2.payloadGPR(); |
404 | JSValueRegs arg1Regs = arg1.jsValueRegs(); |
405 | JSValueRegs arg2Regs = arg2.jsValueRegs(); |
406 | |
407 | GPRTemporary resultPayload(this, Reuse, arg1, PayloadWord); |
408 | GPRReg resultPayloadGPR = resultPayload.gpr(); |
409 | |
410 | arg1.use(); |
411 | arg2.use(); |
412 | |
413 | if (isKnownCell(node->child1().node()) && isKnownCell(node->child2().node())) { |
414 | // see if we get lucky: if the arguments are cells and they reference the same |
415 | // cell, then they must be strictly equal. |
416 | branchPtr(JITCompiler::Equal, arg1PayloadGPR, arg2PayloadGPR, invert ? notTaken : taken); |
417 | |
418 | silentSpillAllRegisters(resultPayloadGPR); |
419 | callOperation(operationCompareStrictEqCell, resultPayloadGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), arg1PayloadGPR, arg2PayloadGPR); |
420 | m_jit.exceptionCheck(); |
421 | silentFillAllRegisters(); |
422 | |
423 | branchTest32(invert ? JITCompiler::Zero : JITCompiler::NonZero, resultPayloadGPR, taken); |
424 | } else { |
425 | // FIXME: Add fast paths for twoCells, number etc. |
426 | |
427 | silentSpillAllRegisters(resultPayloadGPR); |
428 | callOperation(operationCompareStrictEq, resultPayloadGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), arg1Regs, arg2Regs); |
429 | m_jit.exceptionCheck(); |
430 | silentFillAllRegisters(); |
431 | |
432 | branchTest32(invert ? JITCompiler::Zero : JITCompiler::NonZero, resultPayloadGPR, taken); |
433 | } |
434 | |
435 | jump(notTaken); |
436 | } |
437 | |
438 | void SpeculativeJIT::nonSpeculativeNonPeepholeStrictEq(Node* node, bool invert) |
439 | { |
440 | JSValueOperand arg1(this, node->child1()); |
441 | JSValueOperand arg2(this, node->child2()); |
442 | GPRReg arg1PayloadGPR = arg1.payloadGPR(); |
443 | GPRReg arg2PayloadGPR = arg2.payloadGPR(); |
444 | JSValueRegs arg1Regs = arg1.jsValueRegs(); |
445 | JSValueRegs arg2Regs = arg2.jsValueRegs(); |
446 | |
447 | GPRTemporary resultPayload(this, Reuse, arg1, PayloadWord); |
448 | GPRReg resultPayloadGPR = resultPayload.gpr(); |
449 | |
450 | arg1.use(); |
451 | arg2.use(); |
452 | |
453 | if (isKnownCell(node->child1().node()) && isKnownCell(node->child2().node())) { |
454 | // see if we get lucky: if the arguments are cells and they reference the same |
455 | // cell, then they must be strictly equal. |
456 | // FIXME: this should flush registers instead of silent spill/fill. |
457 | JITCompiler::Jump notEqualCase = m_jit.branchPtr(JITCompiler::NotEqual, arg1PayloadGPR, arg2PayloadGPR); |
458 | |
459 | m_jit.move(JITCompiler::TrustedImm32(!invert), resultPayloadGPR); |
460 | JITCompiler::Jump done = m_jit.jump(); |
461 | |
462 | notEqualCase.link(&m_jit); |
463 | |
464 | silentSpillAllRegisters(resultPayloadGPR); |
465 | callOperation(operationCompareStrictEqCell, resultPayloadGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), arg1PayloadGPR, arg2PayloadGPR); |
466 | m_jit.exceptionCheck(); |
467 | silentFillAllRegisters(); |
468 | |
469 | m_jit.andPtr(JITCompiler::TrustedImm32(1), resultPayloadGPR); |
470 | |
471 | done.link(&m_jit); |
472 | } else { |
473 | // FIXME: Add fast paths. |
474 | |
475 | silentSpillAllRegisters(resultPayloadGPR); |
476 | callOperation(operationCompareStrictEq, resultPayloadGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), arg1Regs, arg2Regs); |
477 | silentFillAllRegisters(); |
478 | m_jit.exceptionCheck(); |
479 | |
480 | m_jit.andPtr(JITCompiler::TrustedImm32(1), resultPayloadGPR); |
481 | } |
482 | |
483 | booleanResult(resultPayloadGPR, node, UseChildrenCalledExplicitly); |
484 | } |
485 | |
486 | void SpeculativeJIT::compileCompareEqPtr(Node* node) |
487 | { |
488 | JSValueOperand operand(this, node->child1()); |
489 | GPRTemporary result(this); |
490 | JSValueRegs regs = operand.jsValueRegs(); |
491 | GPRReg resultGPR = result.gpr(); |
492 | m_jit.boxBooleanPayload(false, resultGPR); |
493 | JITCompiler::JumpList notEqual = m_jit.branchIfNotEqual(regs, node->cellOperand()->value()); |
494 | m_jit.boxBooleanPayload(true, resultGPR); |
495 | notEqual.link(&m_jit); |
496 | blessedBooleanResult(resultGPR, node); |
497 | } |
498 | |
499 | void SpeculativeJIT::emitCall(Node* node) |
500 | { |
501 | CallLinkInfo::CallType callType; |
502 | bool isVarargs = false; |
503 | bool isForwardVarargs = false; |
504 | bool isTail = false; |
505 | bool isDirect = false; |
506 | bool isEmulatedTail = false; |
507 | switch (node->op()) { |
508 | case Call: |
509 | case CallEval: |
510 | callType = CallLinkInfo::Call; |
511 | break; |
512 | case TailCall: |
513 | callType = CallLinkInfo::TailCall; |
514 | isTail = true; |
515 | break; |
516 | case TailCallInlinedCaller: |
517 | callType = CallLinkInfo::Call; |
518 | isEmulatedTail = true; |
519 | break; |
520 | case Construct: |
521 | callType = CallLinkInfo::Construct; |
522 | break; |
523 | case CallVarargs: |
524 | callType = CallLinkInfo::CallVarargs; |
525 | isVarargs = true; |
526 | break; |
527 | case TailCallVarargs: |
528 | callType = CallLinkInfo::TailCallVarargs; |
529 | isVarargs = true; |
530 | isTail = true; |
531 | break; |
532 | case TailCallVarargsInlinedCaller: |
533 | callType = CallLinkInfo::CallVarargs; |
534 | isVarargs = true; |
535 | isEmulatedTail = true; |
536 | break; |
537 | case ConstructVarargs: |
538 | callType = CallLinkInfo::ConstructVarargs; |
539 | isVarargs = true; |
540 | break; |
541 | case CallForwardVarargs: |
542 | callType = CallLinkInfo::CallVarargs; |
543 | isForwardVarargs = true; |
544 | break; |
545 | case TailCallForwardVarargs: |
546 | callType = CallLinkInfo::TailCallVarargs; |
547 | isTail = true; |
548 | isForwardVarargs = true; |
549 | break; |
550 | case TailCallForwardVarargsInlinedCaller: |
551 | callType = CallLinkInfo::CallVarargs; |
552 | isEmulatedTail = true; |
553 | isForwardVarargs = true; |
554 | break; |
555 | case ConstructForwardVarargs: |
556 | callType = CallLinkInfo::ConstructVarargs; |
557 | isForwardVarargs = true; |
558 | break; |
559 | case DirectCall: |
560 | callType = CallLinkInfo::DirectCall; |
561 | isDirect = true; |
562 | break; |
563 | case DirectConstruct: |
564 | callType = CallLinkInfo::DirectConstruct; |
565 | isDirect = true; |
566 | break; |
567 | case DirectTailCall: |
568 | callType = CallLinkInfo::DirectTailCall; |
569 | isTail = true; |
570 | isDirect = true; |
571 | break; |
572 | case DirectTailCallInlinedCaller: |
573 | callType = CallLinkInfo::DirectCall; |
574 | isEmulatedTail = true; |
575 | isDirect = true; |
576 | break; |
577 | default: |
578 | DFG_CRASH(m_jit.graph(), node, "bad node type" ); |
579 | break; |
580 | } |
581 | |
582 | Edge calleeEdge = m_jit.graph().child(node, 0); |
583 | GPRReg calleeTagGPR = InvalidGPRReg; |
584 | GPRReg calleePayloadGPR = InvalidGPRReg; |
585 | CallFrameShuffleData shuffleData; |
586 | |
587 | JSGlobalObject* globalObject = m_graph.globalObjectFor(node->origin.semantic); |
588 | ExecutableBase* executable = nullptr; |
589 | FunctionExecutable* functionExecutable = nullptr; |
590 | if (isDirect) { |
591 | executable = node->castOperand<ExecutableBase*>(); |
592 | functionExecutable = jsDynamicCast<FunctionExecutable*>(vm(), executable); |
593 | } |
594 | |
595 | unsigned numPassedArgs = 0; |
596 | unsigned numAllocatedArgs = 0; |
597 | |
598 | // Gotta load the arguments somehow. Varargs is trickier. |
599 | if (isVarargs || isForwardVarargs) { |
600 | RELEASE_ASSERT(!isDirect); |
601 | CallVarargsData* data = node->callVarargsData(); |
602 | |
603 | int numUsedStackSlots = m_jit.graph().m_nextMachineLocal; |
604 | |
605 | if (isForwardVarargs) { |
606 | flushRegisters(); |
607 | if (node->child3()) |
608 | use(node->child3()); |
609 | |
610 | GPRReg scratchGPR1; |
611 | GPRReg scratchGPR2; |
612 | GPRReg scratchGPR3; |
613 | |
614 | scratchGPR1 = JITCompiler::selectScratchGPR(); |
615 | scratchGPR2 = JITCompiler::selectScratchGPR(scratchGPR1); |
616 | scratchGPR3 = JITCompiler::selectScratchGPR(scratchGPR1, scratchGPR2); |
617 | |
618 | m_jit.move(TrustedImm32(numUsedStackSlots), scratchGPR2); |
619 | JITCompiler::JumpList slowCase; |
620 | InlineCallFrame* inlineCallFrame; |
621 | if (node->child3()) |
622 | inlineCallFrame = node->child3()->origin.semantic.inlineCallFrame(); |
623 | else |
624 | inlineCallFrame = node->origin.semantic.inlineCallFrame(); |
625 | // emitSetupVarargsFrameFastCase modifies the stack pointer if it succeeds. |
626 | emitSetupVarargsFrameFastCase(vm(), m_jit, scratchGPR2, scratchGPR1, scratchGPR2, scratchGPR3, inlineCallFrame, data->firstVarArgOffset, slowCase); |
627 | JITCompiler::Jump done = m_jit.jump(); |
628 | slowCase.link(&m_jit); |
629 | callOperation(operationThrowStackOverflowForVarargs, TrustedImmPtr::weakPointer(m_graph, globalObject)); |
630 | m_jit.exceptionCheck(); |
631 | m_jit.abortWithReason(DFGVarargsThrowingPathDidNotThrow); |
632 | done.link(&m_jit); |
633 | } else { |
634 | GPRReg argumentsPayloadGPR; |
635 | GPRReg argumentsTagGPR; |
636 | GPRReg scratchGPR1; |
637 | GPRReg scratchGPR2; |
638 | GPRReg scratchGPR3; |
639 | |
640 | auto loadArgumentsGPR = [&] (GPRReg reservedGPR) { |
641 | if (reservedGPR != InvalidGPRReg) |
642 | lock(reservedGPR); |
643 | JSValueOperand arguments(this, node->child3()); |
644 | argumentsTagGPR = arguments.tagGPR(); |
645 | argumentsPayloadGPR = arguments.payloadGPR(); |
646 | if (reservedGPR != InvalidGPRReg) |
647 | unlock(reservedGPR); |
648 | flushRegisters(); |
649 | |
650 | scratchGPR1 = JITCompiler::selectScratchGPR(argumentsPayloadGPR, argumentsTagGPR, reservedGPR); |
651 | scratchGPR2 = JITCompiler::selectScratchGPR(argumentsPayloadGPR, argumentsTagGPR, scratchGPR1, reservedGPR); |
652 | scratchGPR3 = JITCompiler::selectScratchGPR(argumentsPayloadGPR, argumentsTagGPR, scratchGPR1, scratchGPR2, reservedGPR); |
653 | }; |
654 | |
655 | loadArgumentsGPR(InvalidGPRReg); |
656 | |
657 | DFG_ASSERT(m_jit.graph(), node, isFlushed()); |
658 | |
659 | // Right now, arguments is in argumentsTagGPR/argumentsPayloadGPR and the register file is |
660 | // flushed. |
661 | callOperation(operationSizeFrameForVarargs, GPRInfo::returnValueGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), JSValueRegs(argumentsTagGPR, argumentsPayloadGPR), numUsedStackSlots, data->firstVarArgOffset); |
662 | m_jit.exceptionCheck(); |
663 | |
664 | // Now we have the argument count of the callee frame, but we've lost the arguments operand. |
665 | // Reconstruct the arguments operand while preserving the callee frame. |
666 | loadArgumentsGPR(GPRInfo::returnValueGPR); |
667 | m_jit.move(TrustedImm32(numUsedStackSlots), scratchGPR1); |
668 | emitSetVarargsFrame(m_jit, GPRInfo::returnValueGPR, false, scratchGPR1, scratchGPR1); |
669 | m_jit.addPtr(TrustedImm32(-(sizeof(CallerFrameAndPC) + WTF::roundUpToMultipleOf(stackAlignmentBytes(), 6 * sizeof(void*)))), scratchGPR1, JITCompiler::stackPointerRegister); |
670 | |
671 | callOperation(operationSetupVarargsFrame, GPRInfo::returnValueGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), scratchGPR1, JSValueRegs(argumentsTagGPR, argumentsPayloadGPR), data->firstVarArgOffset, GPRInfo::returnValueGPR); |
672 | m_jit.exceptionCheck(); |
673 | m_jit.addPtr(TrustedImm32(sizeof(CallerFrameAndPC)), GPRInfo::returnValueGPR, JITCompiler::stackPointerRegister); |
674 | } |
675 | |
676 | DFG_ASSERT(m_jit.graph(), node, isFlushed()); |
677 | |
678 | // We don't need the arguments array anymore. |
679 | if (isVarargs) |
680 | use(node->child3()); |
681 | |
682 | // Now set up the "this" argument. |
683 | JSValueOperand thisArgument(this, node->child2()); |
684 | GPRReg thisArgumentTagGPR = thisArgument.tagGPR(); |
685 | GPRReg thisArgumentPayloadGPR = thisArgument.payloadGPR(); |
686 | thisArgument.use(); |
687 | |
688 | m_jit.store32(thisArgumentTagGPR, JITCompiler::calleeArgumentTagSlot(0)); |
689 | m_jit.store32(thisArgumentPayloadGPR, JITCompiler::calleeArgumentPayloadSlot(0)); |
690 | } else { |
691 | // The call instruction's first child is either the function (normal call) or the |
692 | // receiver (method call). subsequent children are the arguments. |
693 | numPassedArgs = node->numChildren() - 1; |
694 | numAllocatedArgs = numPassedArgs; |
695 | |
696 | if (functionExecutable) { |
697 | // Allocate more args if this would let us avoid arity checks. This is throttled by |
698 | // CallLinkInfo's limit. It's probably good to throttle it - if the callee wants a |
699 | // ginormous amount of argument space then it's better for them to do it so that when we |
700 | // make calls to other things, we don't waste space. |
701 | unsigned desiredNumAllocatedArgs = static_cast<unsigned>(functionExecutable->parameterCount()) + 1; |
702 | if (desiredNumAllocatedArgs <= Options::maximumDirectCallStackSize()) { |
703 | numAllocatedArgs = std::max(numAllocatedArgs, desiredNumAllocatedArgs); |
704 | |
705 | // Whoever converts to DirectCall should do this adjustment. It's too late for us to |
706 | // do this adjustment now since we will have already emitted code that relied on the |
707 | // value of m_parameterSlots. |
708 | DFG_ASSERT( |
709 | m_jit.graph(), node, |
710 | Graph::parameterSlotsForArgCount(numAllocatedArgs) |
711 | <= m_jit.graph().m_parameterSlots); |
712 | } |
713 | } |
714 | |
715 | if (isTail) { |
716 | JSValueOperand callee(this, calleeEdge); |
717 | calleeTagGPR = callee.tagGPR(); |
718 | calleePayloadGPR = callee.payloadGPR(); |
719 | if (!isDirect) |
720 | use(calleeEdge); |
721 | |
722 | shuffleData.numLocals = m_jit.graph().frameRegisterCount(); |
723 | shuffleData.callee = ValueRecovery::inPair(calleeTagGPR, calleePayloadGPR); |
724 | shuffleData.args.resize(numAllocatedArgs); |
725 | shuffleData.numPassedArgs = numPassedArgs; |
726 | |
727 | for (unsigned i = 0; i < numPassedArgs; ++i) { |
728 | Edge argEdge = m_jit.graph().varArgChild(node, i + 1); |
729 | GenerationInfo& info = generationInfo(argEdge.node()); |
730 | if (!isDirect) |
731 | use(argEdge); |
732 | shuffleData.args[i] = info.recovery(argEdge->virtualRegister()); |
733 | } |
734 | |
735 | for (unsigned i = numPassedArgs; i < numAllocatedArgs; ++i) |
736 | shuffleData.args[i] = ValueRecovery::constant(jsUndefined()); |
737 | } else { |
738 | m_jit.store32(MacroAssembler::TrustedImm32(numPassedArgs), m_jit.calleeFramePayloadSlot(CallFrameSlot::argumentCount)); |
739 | |
740 | for (unsigned i = 0; i < numPassedArgs; i++) { |
741 | Edge argEdge = m_jit.graph().m_varArgChildren[node->firstChild() + 1 + i]; |
742 | JSValueOperand arg(this, argEdge); |
743 | GPRReg argTagGPR = arg.tagGPR(); |
744 | GPRReg argPayloadGPR = arg.payloadGPR(); |
745 | use(argEdge); |
746 | |
747 | m_jit.store32(argTagGPR, m_jit.calleeArgumentTagSlot(i)); |
748 | m_jit.store32(argPayloadGPR, m_jit.calleeArgumentPayloadSlot(i)); |
749 | } |
750 | |
751 | for (unsigned i = numPassedArgs; i < numAllocatedArgs; ++i) |
752 | m_jit.storeTrustedValue(jsUndefined(), JITCompiler::calleeArgumentSlot(i)); |
753 | } |
754 | } |
755 | |
756 | if (!isTail || isVarargs || isForwardVarargs) { |
757 | JSValueOperand callee(this, calleeEdge); |
758 | calleeTagGPR = callee.tagGPR(); |
759 | calleePayloadGPR = callee.payloadGPR(); |
760 | use(calleeEdge); |
761 | m_jit.store32(calleePayloadGPR, m_jit.calleeFramePayloadSlot(CallFrameSlot::callee)); |
762 | m_jit.store32(calleeTagGPR, m_jit.calleeFrameTagSlot(CallFrameSlot::callee)); |
763 | |
764 | if (!isTail) |
765 | flushRegisters(); |
766 | } |
767 | |
768 | JITCompiler::DataLabelPtr targetToCheck; |
769 | JITCompiler::JumpList slowPath; |
770 | |
771 | CodeOrigin staticOrigin = node->origin.semantic; |
772 | InlineCallFrame* staticInlineCallFrame = staticOrigin.inlineCallFrame(); |
773 | ASSERT(!isTail || !staticInlineCallFrame || !staticInlineCallFrame->getCallerSkippingTailCalls()); |
774 | ASSERT(!isEmulatedTail || (staticInlineCallFrame && staticInlineCallFrame->getCallerSkippingTailCalls())); |
775 | CodeOrigin dynamicOrigin = |
776 | isEmulatedTail ? *staticInlineCallFrame->getCallerSkippingTailCalls() : staticOrigin; |
777 | CallSiteIndex callSite = m_jit.recordCallSiteAndGenerateExceptionHandlingOSRExitIfNeeded(dynamicOrigin, m_stream->size()); |
778 | |
779 | CallLinkInfo* info = m_jit.codeBlock()->addCallLinkInfo(); |
780 | info->setUpCall(callType, node->origin.semantic, calleePayloadGPR); |
781 | |
782 | auto setResultAndResetStack = [&] () { |
783 | JSValueRegsFlushedCallResult result(this); |
784 | JSValueRegs resultRegs = result.regs(); |
785 | |
786 | m_jit.setupResults(resultRegs); |
787 | |
788 | jsValueResult(resultRegs, node, DataFormatJS, UseChildrenCalledExplicitly); |
789 | // After the calls are done, we need to reestablish our stack |
790 | // pointer. We rely on this for varargs calls, calls with arity |
791 | // mismatch (the callframe is slided) and tail calls. |
792 | m_jit.addPtr(TrustedImm32(m_jit.graph().stackPointerOffset() * sizeof(Register)), GPRInfo::callFrameRegister, JITCompiler::stackPointerRegister); |
793 | }; |
794 | |
795 | if (node->op() == CallEval) { |
796 | // We want to call operationCallEval but we don't want to overwrite the parameter area in |
797 | // which we have created a prototypical eval call frame. This means that we have to |
798 | // subtract stack to make room for the call. Lucky for us, at this point we have the whole |
799 | // register file to ourselves. |
800 | |
801 | m_jit.emitStoreCallSiteIndex(callSite); |
802 | m_jit.addPtr(TrustedImm32(-static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC))), JITCompiler::stackPointerRegister, GPRInfo::regT0); |
803 | m_jit.storePtr(GPRInfo::callFrameRegister, JITCompiler::Address(GPRInfo::regT0, CallFrame::callerFrameOffset())); |
804 | |
805 | // Now we need to make room for: |
806 | // - The caller frame and PC of a call to operationCallEval. |
807 | // - Potentially two arguments on the stack. |
808 | unsigned requiredBytes = sizeof(CallerFrameAndPC) + sizeof(CallFrame*) * 2; |
809 | requiredBytes = WTF::roundUpToMultipleOf(stackAlignmentBytes(), requiredBytes); |
810 | m_jit.subPtr(TrustedImm32(requiredBytes), JITCompiler::stackPointerRegister); |
811 | m_jit.setupArguments<decltype(operationCallEval)>(TrustedImmPtr::weakPointer(m_graph, globalObject), GPRInfo::regT0); |
812 | prepareForExternalCall(); |
813 | m_jit.appendCall(operationCallEval); |
814 | m_jit.exceptionCheck(); |
815 | JITCompiler::Jump done = m_jit.branchIfNotEmpty(GPRInfo::returnValueGPR2); |
816 | |
817 | // This is the part where we meant to make a normal call. Oops. |
818 | m_jit.addPtr(TrustedImm32(requiredBytes), JITCompiler::stackPointerRegister); |
819 | m_jit.load32(JITCompiler::calleeFrameSlot(CallFrameSlot::callee).withOffset(PayloadOffset), GPRInfo::regT0); |
820 | m_jit.load32(JITCompiler::calleeFrameSlot(CallFrameSlot::callee).withOffset(TagOffset), GPRInfo::regT1); |
821 | m_jit.emitDumbVirtualCall(vm(), globalObject, info); |
822 | |
823 | done.link(&m_jit); |
824 | setResultAndResetStack(); |
825 | return; |
826 | } |
827 | |
828 | if (isDirect) { |
829 | info->setExecutableDuringCompilation(executable); |
830 | info->setMaxArgumentCountIncludingThis(numAllocatedArgs); |
831 | |
832 | if (isTail) { |
833 | RELEASE_ASSERT(node->op() == DirectTailCall); |
834 | |
835 | JITCompiler::PatchableJump patchableJump = m_jit.patchableJump(); |
836 | JITCompiler::Label mainPath = m_jit.label(); |
837 | |
838 | m_jit.emitStoreCallSiteIndex(callSite); |
839 | |
840 | info->setFrameShuffleData(shuffleData); |
841 | CallFrameShuffler(m_jit, shuffleData).prepareForTailCall(); |
842 | |
843 | JITCompiler::Call call = m_jit.nearTailCall(); |
844 | |
845 | JITCompiler::Label slowPath = m_jit.label(); |
846 | patchableJump.m_jump.linkTo(slowPath, &m_jit); |
847 | |
848 | silentSpillAllRegisters(InvalidGPRReg); |
849 | callOperation(operationLinkDirectCall, info, calleePayloadGPR); |
850 | silentFillAllRegisters(); |
851 | m_jit.exceptionCheck(); |
852 | m_jit.jump().linkTo(mainPath, &m_jit); |
853 | |
854 | useChildren(node); |
855 | |
856 | m_jit.addJSDirectTailCall(patchableJump, call, slowPath, info); |
857 | return; |
858 | } |
859 | |
860 | JITCompiler::Label mainPath = m_jit.label(); |
861 | |
862 | m_jit.emitStoreCallSiteIndex(callSite); |
863 | |
864 | JITCompiler::Call call = m_jit.nearCall(); |
865 | JITCompiler::Jump done = m_jit.jump(); |
866 | |
867 | JITCompiler::Label slowPath = m_jit.label(); |
868 | callOperation(operationLinkDirectCall, info, calleePayloadGPR); |
869 | m_jit.exceptionCheck(); |
870 | m_jit.jump().linkTo(mainPath, &m_jit); |
871 | |
872 | done.link(&m_jit); |
873 | |
874 | setResultAndResetStack(); |
875 | |
876 | m_jit.addJSDirectCall(call, slowPath, info); |
877 | return; |
878 | } |
879 | |
880 | m_jit.emitStoreCallSiteIndex(callSite); |
881 | |
882 | slowPath.append(m_jit.branchIfNotCell(JSValueRegs(calleeTagGPR, calleePayloadGPR))); |
883 | slowPath.append(m_jit.branchPtrWithPatch(MacroAssembler::NotEqual, calleePayloadGPR, targetToCheck)); |
884 | |
885 | if (isTail) { |
886 | if (node->op() == TailCall) { |
887 | info->setFrameShuffleData(shuffleData); |
888 | CallFrameShuffler(m_jit, shuffleData).prepareForTailCall(); |
889 | } else { |
890 | m_jit.emitRestoreCalleeSaves(); |
891 | m_jit.prepareForTailCallSlow(); |
892 | } |
893 | } |
894 | |
895 | JITCompiler::Call fastCall = isTail ? m_jit.nearTailCall() : m_jit.nearCall(); |
896 | |
897 | JITCompiler::Jump done = m_jit.jump(); |
898 | |
899 | slowPath.link(&m_jit); |
900 | |
901 | if (node->op() == TailCall) { |
902 | CallFrameShuffler callFrameShuffler(m_jit, shuffleData); |
903 | callFrameShuffler.setCalleeJSValueRegs(JSValueRegs( |
904 | GPRInfo::regT1, GPRInfo::regT0)); |
905 | callFrameShuffler.prepareForSlowPath(); |
906 | } else { |
907 | // Callee payload needs to be in regT0, tag in regT1 |
908 | if (calleeTagGPR == GPRInfo::regT0) { |
909 | if (calleePayloadGPR == GPRInfo::regT1) |
910 | m_jit.swap(GPRInfo::regT1, GPRInfo::regT0); |
911 | else { |
912 | m_jit.move(calleeTagGPR, GPRInfo::regT1); |
913 | m_jit.move(calleePayloadGPR, GPRInfo::regT0); |
914 | } |
915 | } else { |
916 | m_jit.move(calleePayloadGPR, GPRInfo::regT0); |
917 | m_jit.move(calleeTagGPR, GPRInfo::regT1); |
918 | } |
919 | |
920 | if (isTail) |
921 | m_jit.emitRestoreCalleeSaves(); |
922 | } |
923 | |
924 | m_jit.move(TrustedImmPtr(info), GPRInfo::regT2); |
925 | m_jit.move(TrustedImmPtr::weakPointer(m_graph, globalObject), GPRInfo::regT3); |
926 | JITCompiler::Call slowCall = m_jit.nearCall(); |
927 | |
928 | done.link(&m_jit); |
929 | |
930 | if (isTail) |
931 | m_jit.abortWithReason(JITDidReturnFromTailCall); |
932 | else |
933 | setResultAndResetStack(); |
934 | |
935 | m_jit.addJSCall(fastCall, slowCall, targetToCheck, info); |
936 | } |
937 | |
938 | template<bool strict> |
939 | GPRReg SpeculativeJIT::fillSpeculateInt32Internal(Edge edge, DataFormat& returnFormat) |
940 | { |
941 | AbstractValue& value = m_state.forNode(edge); |
942 | SpeculatedType type = value.m_type; |
943 | ASSERT(edge.useKind() != KnownInt32Use || !(value.m_type & ~SpecInt32Only)); |
944 | |
945 | m_interpreter.filter(value, SpecInt32Only); |
946 | if (value.isClear()) { |
947 | terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0); |
948 | returnFormat = DataFormatInt32; |
949 | return allocate(); |
950 | } |
951 | |
952 | VirtualRegister virtualRegister = edge->virtualRegister(); |
953 | GenerationInfo& info = generationInfoFromVirtualRegister(virtualRegister); |
954 | |
955 | switch (info.registerFormat()) { |
956 | case DataFormatNone: { |
957 | if (edge->hasConstant()) { |
958 | ASSERT(edge->isInt32Constant()); |
959 | GPRReg gpr = allocate(); |
960 | m_jit.move(MacroAssembler::Imm32(edge->asInt32()), gpr); |
961 | m_gprs.retain(gpr, virtualRegister, SpillOrderConstant); |
962 | info.fillInt32(*m_stream, gpr); |
963 | returnFormat = DataFormatInt32; |
964 | return gpr; |
965 | } |
966 | |
967 | DataFormat spillFormat = info.spillFormat(); |
968 | |
969 | ASSERT_UNUSED(spillFormat, (spillFormat & DataFormatJS) || spillFormat == DataFormatInt32); |
970 | |
971 | // If we know this was spilled as an integer we can fill without checking. |
972 | if (type & ~SpecInt32Only) |
973 | speculationCheck(BadType, JSValueSource(JITCompiler::addressFor(virtualRegister)), edge, m_jit.branch32(MacroAssembler::NotEqual, JITCompiler::tagFor(virtualRegister), TrustedImm32(JSValue::Int32Tag))); |
974 | |
975 | GPRReg gpr = allocate(); |
976 | m_jit.load32(JITCompiler::payloadFor(virtualRegister), gpr); |
977 | m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled); |
978 | info.fillInt32(*m_stream, gpr); |
979 | returnFormat = DataFormatInt32; |
980 | return gpr; |
981 | } |
982 | |
983 | case DataFormatJSInt32: |
984 | case DataFormatJS: { |
985 | // Check the value is an integer. |
986 | GPRReg tagGPR = info.tagGPR(); |
987 | GPRReg payloadGPR = info.payloadGPR(); |
988 | m_gprs.lock(tagGPR); |
989 | m_gprs.lock(payloadGPR); |
990 | if (type & ~SpecInt32Only) |
991 | speculationCheck(BadType, JSValueRegs(tagGPR, payloadGPR), edge, m_jit.branchIfNotInt32(tagGPR)); |
992 | m_gprs.unlock(tagGPR); |
993 | m_gprs.release(tagGPR); |
994 | m_gprs.release(payloadGPR); |
995 | m_gprs.retain(payloadGPR, virtualRegister, SpillOrderInteger); |
996 | info.fillInt32(*m_stream, payloadGPR); |
997 | // If !strict we're done, return. |
998 | returnFormat = DataFormatInt32; |
999 | return payloadGPR; |
1000 | } |
1001 | |
1002 | case DataFormatInt32: { |
1003 | GPRReg gpr = info.gpr(); |
1004 | m_gprs.lock(gpr); |
1005 | returnFormat = DataFormatInt32; |
1006 | return gpr; |
1007 | } |
1008 | |
1009 | case DataFormatCell: |
1010 | case DataFormatBoolean: |
1011 | case DataFormatJSDouble: |
1012 | case DataFormatJSCell: |
1013 | case DataFormatJSBoolean: |
1014 | case DataFormatDouble: |
1015 | case DataFormatStorage: |
1016 | default: |
1017 | RELEASE_ASSERT_NOT_REACHED(); |
1018 | return InvalidGPRReg; |
1019 | } |
1020 | } |
1021 | |
1022 | GPRReg SpeculativeJIT::fillSpeculateInt32(Edge edge, DataFormat& returnFormat) |
1023 | { |
1024 | return fillSpeculateInt32Internal<false>(edge, returnFormat); |
1025 | } |
1026 | |
1027 | GPRReg SpeculativeJIT::fillSpeculateInt32Strict(Edge edge) |
1028 | { |
1029 | DataFormat mustBeDataFormatInt32; |
1030 | GPRReg result = fillSpeculateInt32Internal<true>(edge, mustBeDataFormatInt32); |
1031 | ASSERT(mustBeDataFormatInt32 == DataFormatInt32); |
1032 | return result; |
1033 | } |
1034 | |
1035 | FPRReg SpeculativeJIT::fillSpeculateDouble(Edge edge) |
1036 | { |
1037 | ASSERT(isDouble(edge.useKind())); |
1038 | ASSERT(edge->hasDoubleResult()); |
1039 | VirtualRegister virtualRegister = edge->virtualRegister(); |
1040 | GenerationInfo& info = generationInfoFromVirtualRegister(virtualRegister); |
1041 | |
1042 | if (info.registerFormat() == DataFormatNone) { |
1043 | |
1044 | if (edge->hasConstant()) { |
1045 | RELEASE_ASSERT(edge->isNumberConstant()); |
1046 | FPRReg fpr = fprAllocate(); |
1047 | m_jit.loadDouble(TrustedImmPtr(m_jit.addressOfDoubleConstant(edge.node())), fpr); |
1048 | m_fprs.retain(fpr, virtualRegister, SpillOrderConstant); |
1049 | info.fillDouble(*m_stream, fpr); |
1050 | return fpr; |
1051 | } |
1052 | |
1053 | RELEASE_ASSERT(info.spillFormat() == DataFormatDouble); |
1054 | FPRReg fpr = fprAllocate(); |
1055 | m_jit.loadDouble(JITCompiler::addressFor(virtualRegister), fpr); |
1056 | m_fprs.retain(fpr, virtualRegister, SpillOrderSpilled); |
1057 | info.fillDouble(*m_stream, fpr); |
1058 | return fpr; |
1059 | } |
1060 | |
1061 | RELEASE_ASSERT(info.registerFormat() == DataFormatDouble); |
1062 | FPRReg fpr = info.fpr(); |
1063 | m_fprs.lock(fpr); |
1064 | return fpr; |
1065 | } |
1066 | |
1067 | GPRReg SpeculativeJIT::fillSpeculateCell(Edge edge) |
1068 | { |
1069 | AbstractValue& value = m_state.forNode(edge); |
1070 | SpeculatedType type = value.m_type; |
1071 | ASSERT((edge.useKind() != KnownCellUse && edge.useKind() != KnownStringUse) || !(value.m_type & ~SpecCell)); |
1072 | |
1073 | m_interpreter.filter(value, SpecCell); |
1074 | if (value.isClear()) { |
1075 | terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0); |
1076 | return allocate(); |
1077 | } |
1078 | |
1079 | VirtualRegister virtualRegister = edge->virtualRegister(); |
1080 | GenerationInfo& info = generationInfoFromVirtualRegister(virtualRegister); |
1081 | |
1082 | switch (info.registerFormat()) { |
1083 | case DataFormatNone: { |
1084 | if (edge->hasConstant()) { |
1085 | GPRReg gpr = allocate(); |
1086 | m_gprs.retain(gpr, virtualRegister, SpillOrderConstant); |
1087 | m_jit.move(TrustedImmPtr(edge->constant()), gpr); |
1088 | info.fillCell(*m_stream, gpr); |
1089 | return gpr; |
1090 | } |
1091 | |
1092 | ASSERT((info.spillFormat() & DataFormatJS) || info.spillFormat() == DataFormatCell); |
1093 | if (type & ~SpecCell) { |
1094 | speculationCheck( |
1095 | BadType, |
1096 | JSValueSource(JITCompiler::addressFor(virtualRegister)), |
1097 | edge, |
1098 | m_jit.branch32( |
1099 | MacroAssembler::NotEqual, |
1100 | JITCompiler::tagFor(virtualRegister), |
1101 | TrustedImm32(JSValue::CellTag))); |
1102 | } |
1103 | GPRReg gpr = allocate(); |
1104 | m_jit.load32(JITCompiler::payloadFor(virtualRegister), gpr); |
1105 | m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled); |
1106 | info.fillCell(*m_stream, gpr); |
1107 | return gpr; |
1108 | } |
1109 | |
1110 | case DataFormatCell: { |
1111 | GPRReg gpr = info.gpr(); |
1112 | m_gprs.lock(gpr); |
1113 | return gpr; |
1114 | } |
1115 | |
1116 | case DataFormatJSCell: |
1117 | case DataFormatJS: { |
1118 | GPRReg tagGPR = info.tagGPR(); |
1119 | GPRReg payloadGPR = info.payloadGPR(); |
1120 | m_gprs.lock(tagGPR); |
1121 | m_gprs.lock(payloadGPR); |
1122 | if (type & ~SpecCell) { |
1123 | speculationCheck( |
1124 | BadType, JSValueRegs(tagGPR, payloadGPR), edge, |
1125 | m_jit.branchIfNotCell(info.jsValueRegs())); |
1126 | } |
1127 | m_gprs.unlock(tagGPR); |
1128 | m_gprs.release(tagGPR); |
1129 | m_gprs.release(payloadGPR); |
1130 | m_gprs.retain(payloadGPR, virtualRegister, SpillOrderCell); |
1131 | info.fillCell(*m_stream, payloadGPR); |
1132 | return payloadGPR; |
1133 | } |
1134 | |
1135 | case DataFormatJSInt32: |
1136 | case DataFormatInt32: |
1137 | case DataFormatJSDouble: |
1138 | case DataFormatJSBoolean: |
1139 | case DataFormatBoolean: |
1140 | case DataFormatDouble: |
1141 | case DataFormatStorage: |
1142 | RELEASE_ASSERT_NOT_REACHED(); |
1143 | |
1144 | default: |
1145 | RELEASE_ASSERT_NOT_REACHED(); |
1146 | return InvalidGPRReg; |
1147 | } |
1148 | } |
1149 | |
1150 | GPRReg SpeculativeJIT::fillSpeculateBoolean(Edge edge) |
1151 | { |
1152 | AbstractValue& value = m_state.forNode(edge); |
1153 | SpeculatedType type = value.m_type; |
1154 | ASSERT(edge.useKind() != KnownBooleanUse || !(value.m_type & ~SpecBoolean)); |
1155 | |
1156 | m_interpreter.filter(value, SpecBoolean); |
1157 | if (value.isClear()) { |
1158 | terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0); |
1159 | return allocate(); |
1160 | } |
1161 | |
1162 | VirtualRegister virtualRegister = edge->virtualRegister(); |
1163 | GenerationInfo& info = generationInfoFromVirtualRegister(virtualRegister); |
1164 | |
1165 | switch (info.registerFormat()) { |
1166 | case DataFormatNone: { |
1167 | if (edge->hasConstant()) { |
1168 | JSValue jsValue = edge->asJSValue(); |
1169 | GPRReg gpr = allocate(); |
1170 | m_gprs.retain(gpr, virtualRegister, SpillOrderConstant); |
1171 | m_jit.move(MacroAssembler::TrustedImm32(jsValue.asBoolean()), gpr); |
1172 | info.fillBoolean(*m_stream, gpr); |
1173 | return gpr; |
1174 | } |
1175 | |
1176 | ASSERT((info.spillFormat() & DataFormatJS) || info.spillFormat() == DataFormatBoolean); |
1177 | |
1178 | if (type & ~SpecBoolean) |
1179 | speculationCheck(BadType, JSValueSource(JITCompiler::addressFor(virtualRegister)), edge, m_jit.branch32(MacroAssembler::NotEqual, JITCompiler::tagFor(virtualRegister), TrustedImm32(JSValue::BooleanTag))); |
1180 | |
1181 | GPRReg gpr = allocate(); |
1182 | m_jit.load32(JITCompiler::payloadFor(virtualRegister), gpr); |
1183 | m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled); |
1184 | info.fillBoolean(*m_stream, gpr); |
1185 | return gpr; |
1186 | } |
1187 | |
1188 | case DataFormatBoolean: { |
1189 | GPRReg gpr = info.gpr(); |
1190 | m_gprs.lock(gpr); |
1191 | return gpr; |
1192 | } |
1193 | |
1194 | case DataFormatJSBoolean: |
1195 | case DataFormatJS: { |
1196 | GPRReg tagGPR = info.tagGPR(); |
1197 | GPRReg payloadGPR = info.payloadGPR(); |
1198 | m_gprs.lock(tagGPR); |
1199 | m_gprs.lock(payloadGPR); |
1200 | if (type & ~SpecBoolean) |
1201 | speculationCheck(BadType, JSValueRegs(tagGPR, payloadGPR), edge, m_jit.branchIfNotBoolean(tagGPR, InvalidGPRReg)); |
1202 | |
1203 | m_gprs.unlock(tagGPR); |
1204 | m_gprs.release(tagGPR); |
1205 | m_gprs.release(payloadGPR); |
1206 | m_gprs.retain(payloadGPR, virtualRegister, SpillOrderBoolean); |
1207 | info.fillBoolean(*m_stream, payloadGPR); |
1208 | return payloadGPR; |
1209 | } |
1210 | |
1211 | case DataFormatJSInt32: |
1212 | case DataFormatInt32: |
1213 | case DataFormatJSDouble: |
1214 | case DataFormatJSCell: |
1215 | case DataFormatCell: |
1216 | case DataFormatDouble: |
1217 | case DataFormatStorage: |
1218 | RELEASE_ASSERT_NOT_REACHED(); |
1219 | |
1220 | default: |
1221 | RELEASE_ASSERT_NOT_REACHED(); |
1222 | return InvalidGPRReg; |
1223 | } |
1224 | } |
1225 | |
1226 | void SpeculativeJIT::compileObjectStrictEquality(Edge objectChild, Edge otherChild) |
1227 | { |
1228 | SpeculateCellOperand op1(this, objectChild); |
1229 | JSValueOperand op2(this, otherChild); |
1230 | |
1231 | GPRReg op1GPR = op1.gpr(); |
1232 | GPRReg op2GPR = op2.payloadGPR(); |
1233 | |
1234 | DFG_TYPE_CHECK(JSValueSource::unboxedCell(op1GPR), objectChild, SpecObject, m_jit.branchIfNotObject(op1GPR)); |
1235 | |
1236 | GPRTemporary resultPayload(this, Reuse, op1); |
1237 | GPRReg resultPayloadGPR = resultPayload.gpr(); |
1238 | |
1239 | MacroAssembler::Jump op2CellJump = m_jit.branchIfCell(op2.jsValueRegs()); |
1240 | |
1241 | m_jit.move(TrustedImm32(0), resultPayloadGPR); |
1242 | MacroAssembler::Jump op2NotCellJump = m_jit.jump(); |
1243 | |
1244 | // At this point we know that we can perform a straight-forward equality comparison on pointer |
1245 | // values because we are doing strict equality. |
1246 | op2CellJump.link(&m_jit); |
1247 | m_jit.compare32(MacroAssembler::Equal, op1GPR, op2GPR, resultPayloadGPR); |
1248 | |
1249 | op2NotCellJump.link(&m_jit); |
1250 | booleanResult(resultPayloadGPR, m_currentNode); |
1251 | } |
1252 | |
1253 | void SpeculativeJIT::compilePeepHoleObjectStrictEquality(Edge objectChild, Edge otherChild, Node* branchNode) |
1254 | { |
1255 | BasicBlock* taken = branchNode->branchData()->taken.block; |
1256 | BasicBlock* notTaken = branchNode->branchData()->notTaken.block; |
1257 | |
1258 | SpeculateCellOperand op1(this, objectChild); |
1259 | JSValueOperand op2(this, otherChild); |
1260 | |
1261 | GPRReg op1GPR = op1.gpr(); |
1262 | GPRReg op2GPR = op2.payloadGPR(); |
1263 | |
1264 | DFG_TYPE_CHECK(JSValueSource::unboxedCell(op1GPR), objectChild, SpecObject, m_jit.branchIfNotObject(op1GPR)); |
1265 | |
1266 | branch32(MacroAssembler::NotEqual, op2.tagGPR(), TrustedImm32(JSValue::CellTag), notTaken); |
1267 | |
1268 | if (taken == nextBlock()) { |
1269 | branch32(MacroAssembler::NotEqual, op1GPR, op2GPR, notTaken); |
1270 | jump(taken); |
1271 | } else { |
1272 | branch32(MacroAssembler::Equal, op1GPR, op2GPR, taken); |
1273 | jump(notTaken); |
1274 | } |
1275 | } |
1276 | |
1277 | void SpeculativeJIT::compileObjectToObjectOrOtherEquality(Edge leftChild, Edge rightChild) |
1278 | { |
1279 | SpeculateCellOperand op1(this, leftChild); |
1280 | JSValueOperand op2(this, rightChild, ManualOperandSpeculation); |
1281 | GPRTemporary result(this); |
1282 | |
1283 | GPRReg op1GPR = op1.gpr(); |
1284 | GPRReg op2TagGPR = op2.tagGPR(); |
1285 | GPRReg op2PayloadGPR = op2.payloadGPR(); |
1286 | GPRReg resultGPR = result.gpr(); |
1287 | |
1288 | bool masqueradesAsUndefinedWatchpointValid = |
1289 | masqueradesAsUndefinedWatchpointIsStillValid(); |
1290 | |
1291 | if (masqueradesAsUndefinedWatchpointValid) { |
1292 | DFG_TYPE_CHECK( |
1293 | JSValueSource::unboxedCell(op1GPR), leftChild, SpecObject, m_jit.branchIfNotObject(op1GPR)); |
1294 | } else { |
1295 | DFG_TYPE_CHECK( |
1296 | JSValueSource::unboxedCell(op1GPR), leftChild, SpecObject, m_jit.branchIfNotObject(op1GPR)); |
1297 | speculationCheck(BadType, JSValueSource::unboxedCell(op1GPR), leftChild, |
1298 | m_jit.branchTest8( |
1299 | MacroAssembler::NonZero, |
1300 | MacroAssembler::Address(op1GPR, JSCell::typeInfoFlagsOffset()), |
1301 | MacroAssembler::TrustedImm32(MasqueradesAsUndefined))); |
1302 | } |
1303 | |
1304 | |
1305 | // It seems that most of the time when programs do a == b where b may be either null/undefined |
1306 | // or an object, b is usually an object. Balance the branches to make that case fast. |
1307 | MacroAssembler::Jump rightNotCell = m_jit.branchIfNotCell(op2.jsValueRegs()); |
1308 | |
1309 | // We know that within this branch, rightChild must be a cell. |
1310 | if (masqueradesAsUndefinedWatchpointValid) { |
1311 | DFG_TYPE_CHECK( |
1312 | JSValueRegs(op2TagGPR, op2PayloadGPR), rightChild, (~SpecCell) | SpecObject, m_jit.branchIfNotObject(op2PayloadGPR)); |
1313 | } else { |
1314 | DFG_TYPE_CHECK( |
1315 | JSValueRegs(op2TagGPR, op2PayloadGPR), rightChild, (~SpecCell) | SpecObject, m_jit.branchIfNotObject(op2PayloadGPR)); |
1316 | speculationCheck(BadType, JSValueRegs(op2TagGPR, op2PayloadGPR), rightChild, |
1317 | m_jit.branchTest8( |
1318 | MacroAssembler::NonZero, |
1319 | MacroAssembler::Address(op2PayloadGPR, JSCell::typeInfoFlagsOffset()), |
1320 | MacroAssembler::TrustedImm32(MasqueradesAsUndefined))); |
1321 | } |
1322 | |
1323 | // At this point we know that we can perform a straight-forward equality comparison on pointer |
1324 | // values because both left and right are pointers to objects that have no special equality |
1325 | // protocols. |
1326 | MacroAssembler::Jump falseCase = m_jit.branchPtr(MacroAssembler::NotEqual, op1GPR, op2PayloadGPR); |
1327 | MacroAssembler::Jump trueCase = m_jit.jump(); |
1328 | |
1329 | rightNotCell.link(&m_jit); |
1330 | |
1331 | // We know that within this branch, rightChild must not be a cell. Check if that is enough to |
1332 | // prove that it is either null or undefined. |
1333 | if (needsTypeCheck(rightChild, SpecCell | SpecOther)) { |
1334 | m_jit.or32(TrustedImm32(1), op2TagGPR, resultGPR); |
1335 | |
1336 | typeCheck( |
1337 | JSValueRegs(op2TagGPR, op2PayloadGPR), rightChild, SpecCell | SpecOther, |
1338 | m_jit.branch32( |
1339 | MacroAssembler::NotEqual, resultGPR, |
1340 | MacroAssembler::TrustedImm32(JSValue::NullTag))); |
1341 | } |
1342 | |
1343 | falseCase.link(&m_jit); |
1344 | m_jit.move(TrustedImm32(0), resultGPR); |
1345 | MacroAssembler::Jump done = m_jit.jump(); |
1346 | trueCase.link(&m_jit); |
1347 | m_jit.move(TrustedImm32(1), resultGPR); |
1348 | done.link(&m_jit); |
1349 | |
1350 | booleanResult(resultGPR, m_currentNode); |
1351 | } |
1352 | |
1353 | void SpeculativeJIT::compilePeepHoleObjectToObjectOrOtherEquality(Edge leftChild, Edge rightChild, Node* branchNode) |
1354 | { |
1355 | BasicBlock* taken = branchNode->branchData()->taken.block; |
1356 | BasicBlock* notTaken = branchNode->branchData()->notTaken.block; |
1357 | |
1358 | SpeculateCellOperand op1(this, leftChild); |
1359 | JSValueOperand op2(this, rightChild, ManualOperandSpeculation); |
1360 | GPRTemporary result(this); |
1361 | |
1362 | GPRReg op1GPR = op1.gpr(); |
1363 | GPRReg op2TagGPR = op2.tagGPR(); |
1364 | GPRReg op2PayloadGPR = op2.payloadGPR(); |
1365 | GPRReg resultGPR = result.gpr(); |
1366 | |
1367 | bool masqueradesAsUndefinedWatchpointValid = |
1368 | masqueradesAsUndefinedWatchpointIsStillValid(); |
1369 | |
1370 | if (masqueradesAsUndefinedWatchpointValid) { |
1371 | DFG_TYPE_CHECK( |
1372 | JSValueSource::unboxedCell(op1GPR), leftChild, SpecObject, m_jit.branchIfNotObject(op1GPR)); |
1373 | } else { |
1374 | DFG_TYPE_CHECK( |
1375 | JSValueSource::unboxedCell(op1GPR), leftChild, SpecObject, m_jit.branchIfNotObject(op1GPR)); |
1376 | speculationCheck(BadType, JSValueSource::unboxedCell(op1GPR), leftChild, |
1377 | m_jit.branchTest8( |
1378 | MacroAssembler::NonZero, |
1379 | MacroAssembler::Address(op1GPR, JSCell::typeInfoFlagsOffset()), |
1380 | MacroAssembler::TrustedImm32(MasqueradesAsUndefined))); |
1381 | } |
1382 | |
1383 | // It seems that most of the time when programs do a == b where b may be either null/undefined |
1384 | // or an object, b is usually an object. Balance the branches to make that case fast. |
1385 | MacroAssembler::Jump rightNotCell = m_jit.branchIfNotCell(op2.jsValueRegs()); |
1386 | |
1387 | // We know that within this branch, rightChild must be a cell. |
1388 | if (masqueradesAsUndefinedWatchpointValid) { |
1389 | DFG_TYPE_CHECK( |
1390 | JSValueRegs(op2TagGPR, op2PayloadGPR), rightChild, (~SpecCell) | SpecObject, |
1391 | m_jit.branchIfNotObject(op2PayloadGPR)); |
1392 | } else { |
1393 | DFG_TYPE_CHECK( |
1394 | JSValueRegs(op2TagGPR, op2PayloadGPR), rightChild, (~SpecCell) | SpecObject, |
1395 | m_jit.branchIfNotObject(op2PayloadGPR)); |
1396 | speculationCheck(BadType, JSValueRegs(op2TagGPR, op2PayloadGPR), rightChild, |
1397 | m_jit.branchTest8( |
1398 | MacroAssembler::NonZero, |
1399 | MacroAssembler::Address(op2PayloadGPR, JSCell::typeInfoFlagsOffset()), |
1400 | MacroAssembler::TrustedImm32(MasqueradesAsUndefined))); |
1401 | } |
1402 | |
1403 | // At this point we know that we can perform a straight-forward equality comparison on pointer |
1404 | // values because both left and right are pointers to objects that have no special equality |
1405 | // protocols. |
1406 | branch32(MacroAssembler::Equal, op1GPR, op2PayloadGPR, taken); |
1407 | |
1408 | // We know that within this branch, rightChild must not be a cell. Check if that is enough to |
1409 | // prove that it is either null or undefined. |
1410 | if (!needsTypeCheck(rightChild, SpecCell | SpecOther)) |
1411 | rightNotCell.link(&m_jit); |
1412 | else { |
1413 | jump(notTaken, ForceJump); |
1414 | |
1415 | rightNotCell.link(&m_jit); |
1416 | m_jit.or32(TrustedImm32(1), op2TagGPR, resultGPR); |
1417 | |
1418 | typeCheck( |
1419 | JSValueRegs(op2TagGPR, op2PayloadGPR), rightChild, SpecCell | SpecOther, |
1420 | m_jit.branch32( |
1421 | MacroAssembler::NotEqual, resultGPR, |
1422 | MacroAssembler::TrustedImm32(JSValue::NullTag))); |
1423 | } |
1424 | |
1425 | jump(notTaken); |
1426 | } |
1427 | |
1428 | void SpeculativeJIT::compileSymbolUntypedEquality(Node* node, Edge symbolEdge, Edge untypedEdge) |
1429 | { |
1430 | SpeculateCellOperand symbol(this, symbolEdge); |
1431 | JSValueOperand untyped(this, untypedEdge); |
1432 | |
1433 | GPRReg symbolGPR = symbol.gpr(); |
1434 | GPRReg untypedGPR = untyped.payloadGPR(); |
1435 | |
1436 | speculateSymbol(symbolEdge, symbolGPR); |
1437 | |
1438 | GPRTemporary resultPayload(this, Reuse, symbol); |
1439 | GPRReg resultPayloadGPR = resultPayload.gpr(); |
1440 | |
1441 | MacroAssembler::Jump untypedCellJump = m_jit.branchIfCell(untyped.jsValueRegs()); |
1442 | |
1443 | m_jit.move(TrustedImm32(0), resultPayloadGPR); |
1444 | MacroAssembler::Jump untypedNotCellJump = m_jit.jump(); |
1445 | |
1446 | // At this point we know that we can perform a straight-forward equality comparison on pointer |
1447 | // values because we are doing strict equality. |
1448 | untypedCellJump.link(&m_jit); |
1449 | m_jit.compare32(MacroAssembler::Equal, symbolGPR, untypedGPR, resultPayloadGPR); |
1450 | |
1451 | untypedNotCellJump.link(&m_jit); |
1452 | booleanResult(resultPayloadGPR, node); |
1453 | } |
1454 | |
1455 | void SpeculativeJIT::compileObjectOrOtherLogicalNot(Edge nodeUse) |
1456 | { |
1457 | JSValueOperand value(this, nodeUse, ManualOperandSpeculation); |
1458 | GPRTemporary resultPayload(this); |
1459 | GPRReg valueTagGPR = value.tagGPR(); |
1460 | GPRReg valuePayloadGPR = value.payloadGPR(); |
1461 | GPRReg resultPayloadGPR = resultPayload.gpr(); |
1462 | GPRTemporary structure; |
1463 | GPRReg structureGPR = InvalidGPRReg; |
1464 | |
1465 | bool masqueradesAsUndefinedWatchpointValid = |
1466 | masqueradesAsUndefinedWatchpointIsStillValid(); |
1467 | |
1468 | if (!masqueradesAsUndefinedWatchpointValid) { |
1469 | // The masquerades as undefined case will use the structure register, so allocate it here. |
1470 | // Do this at the top of the function to avoid branching around a register allocation. |
1471 | GPRTemporary realStructure(this); |
1472 | structure.adopt(realStructure); |
1473 | structureGPR = structure.gpr(); |
1474 | } |
1475 | |
1476 | MacroAssembler::Jump notCell = m_jit.branchIfNotCell(value.jsValueRegs()); |
1477 | if (masqueradesAsUndefinedWatchpointValid) { |
1478 | DFG_TYPE_CHECK( |
1479 | JSValueRegs(valueTagGPR, valuePayloadGPR), nodeUse, (~SpecCell) | SpecObject, |
1480 | m_jit.branchIfNotObject(valuePayloadGPR)); |
1481 | } else { |
1482 | DFG_TYPE_CHECK( |
1483 | JSValueRegs(valueTagGPR, valuePayloadGPR), nodeUse, (~SpecCell) | SpecObject, |
1484 | m_jit.branchIfNotObject(valuePayloadGPR)); |
1485 | |
1486 | MacroAssembler::Jump isNotMasqueradesAsUndefined = |
1487 | m_jit.branchTest8( |
1488 | MacroAssembler::Zero, |
1489 | MacroAssembler::Address(valuePayloadGPR, JSCell::typeInfoFlagsOffset()), |
1490 | MacroAssembler::TrustedImm32(MasqueradesAsUndefined)); |
1491 | |
1492 | m_jit.loadPtr(MacroAssembler::Address(valuePayloadGPR, JSCell::structureIDOffset()), structureGPR); |
1493 | speculationCheck(BadType, JSValueRegs(valueTagGPR, valuePayloadGPR), nodeUse, |
1494 | m_jit.branchPtr( |
1495 | MacroAssembler::Equal, |
1496 | MacroAssembler::Address(structureGPR, Structure::globalObjectOffset()), |
1497 | TrustedImmPtr::weakPointer(m_jit.graph(), m_jit.graph().globalObjectFor(m_currentNode->origin.semantic)))); |
1498 | |
1499 | isNotMasqueradesAsUndefined.link(&m_jit); |
1500 | } |
1501 | m_jit.move(TrustedImm32(0), resultPayloadGPR); |
1502 | MacroAssembler::Jump done = m_jit.jump(); |
1503 | |
1504 | notCell.link(&m_jit); |
1505 | |
1506 | COMPILE_ASSERT((JSValue::UndefinedTag | 1) == JSValue::NullTag, UndefinedTag_OR_1_EQUALS_NullTag); |
1507 | if (needsTypeCheck(nodeUse, SpecCell | SpecOther)) { |
1508 | m_jit.or32(TrustedImm32(1), valueTagGPR, resultPayloadGPR); |
1509 | typeCheck( |
1510 | JSValueRegs(valueTagGPR, valuePayloadGPR), nodeUse, SpecCell | SpecOther, |
1511 | m_jit.branch32( |
1512 | MacroAssembler::NotEqual, |
1513 | resultPayloadGPR, |
1514 | TrustedImm32(JSValue::NullTag))); |
1515 | } |
1516 | m_jit.move(TrustedImm32(1), resultPayloadGPR); |
1517 | |
1518 | done.link(&m_jit); |
1519 | |
1520 | booleanResult(resultPayloadGPR, m_currentNode); |
1521 | } |
1522 | |
1523 | void SpeculativeJIT::compileLogicalNot(Node* node) |
1524 | { |
1525 | switch (node->child1().useKind()) { |
1526 | case BooleanUse: |
1527 | case KnownBooleanUse: { |
1528 | SpeculateBooleanOperand value(this, node->child1()); |
1529 | GPRTemporary result(this, Reuse, value); |
1530 | m_jit.xor32(TrustedImm32(1), value.gpr(), result.gpr()); |
1531 | booleanResult(result.gpr(), node); |
1532 | return; |
1533 | } |
1534 | |
1535 | case ObjectOrOtherUse: { |
1536 | compileObjectOrOtherLogicalNot(node->child1()); |
1537 | return; |
1538 | } |
1539 | |
1540 | case Int32Use: { |
1541 | SpeculateInt32Operand value(this, node->child1()); |
1542 | GPRTemporary resultPayload(this, Reuse, value); |
1543 | m_jit.compare32(MacroAssembler::Equal, value.gpr(), MacroAssembler::TrustedImm32(0), resultPayload.gpr()); |
1544 | booleanResult(resultPayload.gpr(), node); |
1545 | return; |
1546 | } |
1547 | |
1548 | case DoubleRepUse: { |
1549 | SpeculateDoubleOperand value(this, node->child1()); |
1550 | FPRTemporary scratch(this); |
1551 | GPRTemporary resultPayload(this); |
1552 | m_jit.move(TrustedImm32(0), resultPayload.gpr()); |
1553 | MacroAssembler::Jump nonZero = m_jit.branchDoubleNonZero(value.fpr(), scratch.fpr()); |
1554 | m_jit.move(TrustedImm32(1), resultPayload.gpr()); |
1555 | nonZero.link(&m_jit); |
1556 | booleanResult(resultPayload.gpr(), node); |
1557 | return; |
1558 | } |
1559 | |
1560 | case UntypedUse: { |
1561 | JSValueOperand arg1(this, node->child1()); |
1562 | GPRTemporary result(this); |
1563 | GPRTemporary temp(this); |
1564 | FPRTemporary valueFPR(this); |
1565 | FPRTemporary tempFPR(this); |
1566 | |
1567 | GPRReg resultGPR = result.gpr(); |
1568 | |
1569 | bool shouldCheckMasqueradesAsUndefined = !masqueradesAsUndefinedWatchpointIsStillValid(); |
1570 | JSGlobalObject* globalObject = m_jit.graph().globalObjectFor(node->origin.semantic); |
1571 | bool negateResult = true; |
1572 | m_jit.emitConvertValueToBoolean(vm(), arg1.jsValueRegs(), resultGPR, temp.gpr(), valueFPR.fpr(), tempFPR.fpr(), shouldCheckMasqueradesAsUndefined, globalObject, negateResult); |
1573 | booleanResult(resultGPR, node); |
1574 | return; |
1575 | } |
1576 | case StringUse: |
1577 | return compileStringZeroLength(node); |
1578 | |
1579 | case StringOrOtherUse: |
1580 | return compileLogicalNotStringOrOther(node); |
1581 | |
1582 | default: |
1583 | RELEASE_ASSERT_NOT_REACHED(); |
1584 | break; |
1585 | } |
1586 | } |
1587 | |
1588 | void SpeculativeJIT::emitObjectOrOtherBranch(Edge nodeUse, BasicBlock* taken, BasicBlock* notTaken) |
1589 | { |
1590 | JSValueOperand value(this, nodeUse, ManualOperandSpeculation); |
1591 | GPRTemporary scratch(this); |
1592 | GPRReg valueTagGPR = value.tagGPR(); |
1593 | GPRReg valuePayloadGPR = value.payloadGPR(); |
1594 | GPRReg scratchGPR = scratch.gpr(); |
1595 | |
1596 | MacroAssembler::Jump notCell = m_jit.branchIfNotCell(value.jsValueRegs()); |
1597 | if (masqueradesAsUndefinedWatchpointIsStillValid()) { |
1598 | DFG_TYPE_CHECK( |
1599 | JSValueRegs(valueTagGPR, valuePayloadGPR), nodeUse, (~SpecCell) | SpecObject, |
1600 | m_jit.branchIfNotObject(valuePayloadGPR)); |
1601 | } else { |
1602 | DFG_TYPE_CHECK( |
1603 | JSValueRegs(valueTagGPR, valuePayloadGPR), nodeUse, (~SpecCell) | SpecObject, |
1604 | m_jit.branchIfNotObject(valuePayloadGPR)); |
1605 | |
1606 | JITCompiler::Jump isNotMasqueradesAsUndefined = m_jit.branchTest8( |
1607 | JITCompiler::Zero, |
1608 | MacroAssembler::Address(valuePayloadGPR, JSCell::typeInfoFlagsOffset()), |
1609 | TrustedImm32(MasqueradesAsUndefined)); |
1610 | |
1611 | m_jit.loadPtr(MacroAssembler::Address(valuePayloadGPR, JSCell::structureIDOffset()), scratchGPR); |
1612 | speculationCheck(BadType, JSValueRegs(valueTagGPR, valuePayloadGPR), nodeUse, |
1613 | m_jit.branchPtr( |
1614 | MacroAssembler::Equal, |
1615 | MacroAssembler::Address(scratchGPR, Structure::globalObjectOffset()), |
1616 | TrustedImmPtr::weakPointer(m_jit.graph(), m_jit.graph().globalObjectFor(m_currentNode->origin.semantic)))); |
1617 | |
1618 | isNotMasqueradesAsUndefined.link(&m_jit); |
1619 | } |
1620 | jump(taken, ForceJump); |
1621 | |
1622 | notCell.link(&m_jit); |
1623 | |
1624 | COMPILE_ASSERT((JSValue::UndefinedTag | 1) == JSValue::NullTag, UndefinedTag_OR_1_EQUALS_NullTag); |
1625 | if (needsTypeCheck(nodeUse, SpecCell | SpecOther)) { |
1626 | m_jit.or32(TrustedImm32(1), valueTagGPR, scratchGPR); |
1627 | typeCheck( |
1628 | JSValueRegs(valueTagGPR, valuePayloadGPR), nodeUse, SpecCell | SpecOther, |
1629 | m_jit.branch32(MacroAssembler::NotEqual, scratchGPR, TrustedImm32(JSValue::NullTag))); |
1630 | } |
1631 | |
1632 | jump(notTaken); |
1633 | |
1634 | noResult(m_currentNode); |
1635 | } |
1636 | |
1637 | void SpeculativeJIT::emitBranch(Node* node) |
1638 | { |
1639 | BasicBlock* taken = node->branchData()->taken.block; |
1640 | BasicBlock* notTaken = node->branchData()->notTaken.block; |
1641 | |
1642 | switch (node->child1().useKind()) { |
1643 | case BooleanUse: |
1644 | case KnownBooleanUse: { |
1645 | SpeculateBooleanOperand value(this, node->child1()); |
1646 | MacroAssembler::ResultCondition condition = MacroAssembler::NonZero; |
1647 | |
1648 | if (taken == nextBlock()) { |
1649 | condition = MacroAssembler::Zero; |
1650 | BasicBlock* tmp = taken; |
1651 | taken = notTaken; |
1652 | notTaken = tmp; |
1653 | } |
1654 | |
1655 | branchTest32(condition, value.gpr(), TrustedImm32(1), taken); |
1656 | jump(notTaken); |
1657 | |
1658 | noResult(node); |
1659 | return; |
1660 | } |
1661 | |
1662 | case ObjectOrOtherUse: { |
1663 | emitObjectOrOtherBranch(node->child1(), taken, notTaken); |
1664 | return; |
1665 | } |
1666 | |
1667 | case StringUse: { |
1668 | emitStringBranch(node->child1(), taken, notTaken); |
1669 | return; |
1670 | } |
1671 | |
1672 | case StringOrOtherUse: { |
1673 | emitStringOrOtherBranch(node->child1(), taken, notTaken); |
1674 | return; |
1675 | } |
1676 | |
1677 | case DoubleRepUse: |
1678 | case Int32Use: { |
1679 | if (node->child1().useKind() == Int32Use) { |
1680 | bool invert = false; |
1681 | |
1682 | if (taken == nextBlock()) { |
1683 | invert = true; |
1684 | BasicBlock* tmp = taken; |
1685 | taken = notTaken; |
1686 | notTaken = tmp; |
1687 | } |
1688 | |
1689 | SpeculateInt32Operand value(this, node->child1()); |
1690 | branchTest32(invert ? MacroAssembler::Zero : MacroAssembler::NonZero, value.gpr(), taken); |
1691 | } else { |
1692 | SpeculateDoubleOperand value(this, node->child1()); |
1693 | FPRTemporary scratch(this); |
1694 | branchDoubleNonZero(value.fpr(), scratch.fpr(), taken); |
1695 | } |
1696 | |
1697 | jump(notTaken); |
1698 | |
1699 | noResult(node); |
1700 | return; |
1701 | } |
1702 | |
1703 | case UntypedUse: { |
1704 | JSValueOperand value(this, node->child1()); |
1705 | FPRTemporary valueFPR(this); |
1706 | FPRTemporary tempFPR(this); |
1707 | GPRTemporary result(this); |
1708 | GPRTemporary temp(this); |
1709 | |
1710 | JSValueRegs valueRegs = value.jsValueRegs(); |
1711 | GPRReg resultGPR = result.gpr(); |
1712 | |
1713 | use(node->child1()); |
1714 | |
1715 | bool shouldCheckMasqueradesAsUndefined = !masqueradesAsUndefinedWatchpointIsStillValid(); |
1716 | JSGlobalObject* globalObject = m_jit.graph().globalObjectFor(node->origin.semantic); |
1717 | auto falsey = m_jit.branchIfFalsey(vm(), valueRegs, resultGPR, temp.gpr(), valueFPR.fpr(), tempFPR.fpr(), shouldCheckMasqueradesAsUndefined, globalObject); |
1718 | addBranch(falsey, notTaken); |
1719 | jump(taken, ForceJump); |
1720 | |
1721 | noResult(node, UseChildrenCalledExplicitly); |
1722 | return; |
1723 | } |
1724 | |
1725 | default: |
1726 | RELEASE_ASSERT_NOT_REACHED(); |
1727 | break; |
1728 | } |
1729 | } |
1730 | |
1731 | template<typename BaseOperandType, typename PropertyOperandType, typename ValueOperandType, typename TagType> |
1732 | void SpeculativeJIT::compileContiguousPutByVal(Node* node, BaseOperandType& base, PropertyOperandType& property, ValueOperandType& value, GPRReg valuePayloadReg, TagType valueTag) |
1733 | { |
1734 | Edge child4 = m_jit.graph().varArgChild(node, 3); |
1735 | |
1736 | ArrayMode arrayMode = node->arrayMode(); |
1737 | |
1738 | GPRReg baseReg = base.gpr(); |
1739 | GPRReg propertyReg = property.gpr(); |
1740 | |
1741 | StorageOperand storage(this, child4); |
1742 | GPRReg storageReg = storage.gpr(); |
1743 | |
1744 | if (node->op() == PutByValAlias) { |
1745 | // Store the value to the array. |
1746 | GPRReg propertyReg = property.gpr(); |
1747 | m_jit.store32(valueTag, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag))); |
1748 | m_jit.store32(valuePayloadReg, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload))); |
1749 | |
1750 | noResult(node); |
1751 | return; |
1752 | } |
1753 | |
1754 | MacroAssembler::Jump slowCase; |
1755 | |
1756 | if (arrayMode.isInBounds()) { |
1757 | speculationCheck( |
1758 | OutOfBounds, JSValueRegs(), 0, |
1759 | m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength()))); |
1760 | } else { |
1761 | MacroAssembler::Jump inBounds = m_jit.branch32(MacroAssembler::Below, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength())); |
1762 | |
1763 | slowCase = m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfVectorLength())); |
1764 | |
1765 | if (!arrayMode.isOutOfBounds()) |
1766 | speculationCheck(OutOfBounds, JSValueRegs(), 0, slowCase); |
1767 | |
1768 | m_jit.add32(TrustedImm32(1), propertyReg); |
1769 | m_jit.store32(propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength())); |
1770 | m_jit.sub32(TrustedImm32(1), propertyReg); |
1771 | |
1772 | inBounds.link(&m_jit); |
1773 | } |
1774 | |
1775 | m_jit.store32(valueTag, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag))); |
1776 | m_jit.store32(valuePayloadReg, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload))); |
1777 | |
1778 | base.use(); |
1779 | property.use(); |
1780 | value.use(); |
1781 | storage.use(); |
1782 | |
1783 | if (arrayMode.isOutOfBounds()) { |
1784 | if (node->op() == PutByValDirect) { |
1785 | addSlowPathGenerator(slowPathCall( |
1786 | slowCase, this, |
1787 | m_jit.isStrictModeFor(node->origin.semantic) ? operationPutByValDirectBeyondArrayBoundsStrict : operationPutByValDirectBeyondArrayBoundsNonStrict, |
1788 | NoResult, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), baseReg, propertyReg, JSValueRegs(valueTag, valuePayloadReg))); |
1789 | } else { |
1790 | addSlowPathGenerator(slowPathCall( |
1791 | slowCase, this, |
1792 | m_jit.isStrictModeFor(node->origin.semantic) ? operationPutByValBeyondArrayBoundsStrict : operationPutByValBeyondArrayBoundsNonStrict, |
1793 | NoResult, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), baseReg, propertyReg, JSValueRegs(valueTag, valuePayloadReg))); |
1794 | } |
1795 | } |
1796 | |
1797 | noResult(node, UseChildrenCalledExplicitly); |
1798 | } |
1799 | |
1800 | void SpeculativeJIT::compile(Node* node) |
1801 | { |
1802 | NodeType op = node->op(); |
1803 | |
1804 | #if ENABLE(DFG_REGISTER_ALLOCATION_VALIDATION) |
1805 | m_jit.clearRegisterAllocationOffsets(); |
1806 | #endif |
1807 | |
1808 | switch (op) { |
1809 | case JSConstant: |
1810 | case DoubleConstant: |
1811 | case PhantomDirectArguments: |
1812 | case PhantomClonedArguments: |
1813 | initConstantInfo(node); |
1814 | break; |
1815 | |
1816 | case LazyJSConstant: |
1817 | compileLazyJSConstant(node); |
1818 | break; |
1819 | |
1820 | case Identity: { |
1821 | compileIdentity(node); |
1822 | break; |
1823 | } |
1824 | |
1825 | case Inc: |
1826 | case Dec: |
1827 | compileIncOrDec(node); |
1828 | break; |
1829 | |
1830 | case GetLocal: { |
1831 | AbstractValue& value = m_state.operand(node->local()); |
1832 | |
1833 | // If the CFA is tracking this variable and it found that the variable |
1834 | // cannot have been assigned, then don't attempt to proceed. |
1835 | if (value.isClear()) { |
1836 | m_compileOkay = false; |
1837 | break; |
1838 | } |
1839 | |
1840 | switch (node->variableAccessData()->flushFormat()) { |
1841 | case FlushedDouble: { |
1842 | FPRTemporary result(this); |
1843 | m_jit.loadDouble(JITCompiler::addressFor(node->machineLocal()), result.fpr()); |
1844 | VirtualRegister virtualRegister = node->virtualRegister(); |
1845 | m_fprs.retain(result.fpr(), virtualRegister, SpillOrderDouble); |
1846 | generationInfoFromVirtualRegister(virtualRegister).initDouble(node, node->refCount(), result.fpr()); |
1847 | break; |
1848 | } |
1849 | |
1850 | case FlushedInt32: { |
1851 | GPRTemporary result(this); |
1852 | m_jit.load32(JITCompiler::payloadFor(node->machineLocal()), result.gpr()); |
1853 | |
1854 | // Like int32Result, but don't useChildren - our children are phi nodes, |
1855 | // and don't represent values within this dataflow with virtual registers. |
1856 | VirtualRegister virtualRegister = node->virtualRegister(); |
1857 | m_gprs.retain(result.gpr(), virtualRegister, SpillOrderInteger); |
1858 | generationInfoFromVirtualRegister(virtualRegister).initInt32(node, node->refCount(), result.gpr()); |
1859 | break; |
1860 | } |
1861 | |
1862 | case FlushedCell: { |
1863 | GPRTemporary result(this); |
1864 | m_jit.load32(JITCompiler::payloadFor(node->machineLocal()), result.gpr()); |
1865 | |
1866 | // Like cellResult, but don't useChildren - our children are phi nodes, |
1867 | // and don't represent values within this dataflow with virtual registers. |
1868 | VirtualRegister virtualRegister = node->virtualRegister(); |
1869 | m_gprs.retain(result.gpr(), virtualRegister, SpillOrderCell); |
1870 | generationInfoFromVirtualRegister(virtualRegister).initCell(node, node->refCount(), result.gpr()); |
1871 | break; |
1872 | } |
1873 | |
1874 | case FlushedBoolean: { |
1875 | GPRTemporary result(this); |
1876 | m_jit.load32(JITCompiler::payloadFor(node->machineLocal()), result.gpr()); |
1877 | |
1878 | // Like booleanResult, but don't useChildren - our children are phi nodes, |
1879 | // and don't represent values within this dataflow with virtual registers. |
1880 | VirtualRegister virtualRegister = node->virtualRegister(); |
1881 | m_gprs.retain(result.gpr(), virtualRegister, SpillOrderBoolean); |
1882 | generationInfoFromVirtualRegister(virtualRegister).initBoolean(node, node->refCount(), result.gpr()); |
1883 | break; |
1884 | } |
1885 | |
1886 | case FlushedJSValue: { |
1887 | GPRTemporary result(this); |
1888 | GPRTemporary tag(this); |
1889 | m_jit.load32(JITCompiler::payloadFor(node->machineLocal()), result.gpr()); |
1890 | m_jit.load32(JITCompiler::tagFor(node->machineLocal()), tag.gpr()); |
1891 | |
1892 | // Like jsValueResult, but don't useChildren - our children are phi nodes, |
1893 | // and don't represent values within this dataflow with virtual registers. |
1894 | VirtualRegister virtualRegister = node->virtualRegister(); |
1895 | m_gprs.retain(result.gpr(), virtualRegister, SpillOrderJS); |
1896 | m_gprs.retain(tag.gpr(), virtualRegister, SpillOrderJS); |
1897 | |
1898 | generationInfoFromVirtualRegister(virtualRegister).initJSValue(node, node->refCount(), tag.gpr(), result.gpr(), DataFormatJS); |
1899 | break; |
1900 | } |
1901 | |
1902 | default: |
1903 | RELEASE_ASSERT_NOT_REACHED(); |
1904 | } |
1905 | break; |
1906 | } |
1907 | |
1908 | case MovHint: { |
1909 | compileMovHint(m_currentNode); |
1910 | noResult(node); |
1911 | break; |
1912 | } |
1913 | |
1914 | case ZombieHint: { |
1915 | recordSetLocal(m_currentNode->unlinkedLocal(), VirtualRegister(), DataFormatDead); |
1916 | noResult(node); |
1917 | break; |
1918 | } |
1919 | |
1920 | case ExitOK: { |
1921 | noResult(node); |
1922 | break; |
1923 | } |
1924 | |
1925 | case SetLocal: { |
1926 | switch (node->variableAccessData()->flushFormat()) { |
1927 | case FlushedDouble: { |
1928 | SpeculateDoubleOperand value(this, node->child1()); |
1929 | m_jit.storeDouble(value.fpr(), JITCompiler::addressFor(node->machineLocal())); |
1930 | noResult(node); |
1931 | // Indicate that it's no longer necessary to retrieve the value of |
1932 | // this bytecode variable from registers or other locations in the stack, |
1933 | // but that it is stored as a double. |
1934 | recordSetLocal(DataFormatDouble); |
1935 | break; |
1936 | } |
1937 | |
1938 | case FlushedInt32: { |
1939 | SpeculateInt32Operand value(this, node->child1()); |
1940 | m_jit.store32(value.gpr(), JITCompiler::payloadFor(node->machineLocal())); |
1941 | noResult(node); |
1942 | recordSetLocal(DataFormatInt32); |
1943 | break; |
1944 | } |
1945 | |
1946 | case FlushedCell: { |
1947 | SpeculateCellOperand cell(this, node->child1()); |
1948 | GPRReg cellGPR = cell.gpr(); |
1949 | m_jit.storePtr(cellGPR, JITCompiler::payloadFor(node->machineLocal())); |
1950 | noResult(node); |
1951 | recordSetLocal(DataFormatCell); |
1952 | break; |
1953 | } |
1954 | |
1955 | case FlushedBoolean: { |
1956 | SpeculateBooleanOperand value(this, node->child1()); |
1957 | m_jit.store32(value.gpr(), JITCompiler::payloadFor(node->machineLocal())); |
1958 | noResult(node); |
1959 | recordSetLocal(DataFormatBoolean); |
1960 | break; |
1961 | } |
1962 | |
1963 | case FlushedJSValue: { |
1964 | JSValueOperand value(this, node->child1()); |
1965 | m_jit.store32(value.payloadGPR(), JITCompiler::payloadFor(node->machineLocal())); |
1966 | m_jit.store32(value.tagGPR(), JITCompiler::tagFor(node->machineLocal())); |
1967 | noResult(node); |
1968 | recordSetLocal(dataFormatFor(node->variableAccessData()->flushFormat())); |
1969 | break; |
1970 | } |
1971 | |
1972 | default: |
1973 | RELEASE_ASSERT_NOT_REACHED(); |
1974 | break; |
1975 | } |
1976 | break; |
1977 | } |
1978 | |
1979 | case SetArgumentDefinitely: |
1980 | case SetArgumentMaybe: |
1981 | // This is a no-op; it just marks the fact that the argument is being used. |
1982 | // But it may be profitable to use this as a hook to run speculation checks |
1983 | // on arguments, thereby allowing us to trivially eliminate such checks if |
1984 | // the argument is not used. |
1985 | recordSetLocal(dataFormatFor(node->variableAccessData()->flushFormat())); |
1986 | break; |
1987 | |
1988 | case ValueBitOr: |
1989 | case ValueBitAnd: |
1990 | case ValueBitXor: |
1991 | compileValueBitwiseOp(node); |
1992 | break; |
1993 | |
1994 | case ArithBitAnd: |
1995 | case ArithBitOr: |
1996 | case ArithBitXor: |
1997 | compileBitwiseOp(node); |
1998 | break; |
1999 | |
2000 | case ValueBitNot: |
2001 | compileValueBitNot(node); |
2002 | break; |
2003 | |
2004 | case ArithBitNot: |
2005 | compileBitwiseNot(node); |
2006 | break; |
2007 | |
2008 | case ValueBitLShift: |
2009 | compileValueLShiftOp(node); |
2010 | break; |
2011 | |
2012 | case ValueBitRShift: |
2013 | compileValueBitRShift(node); |
2014 | break; |
2015 | |
2016 | case ArithBitRShift: |
2017 | case ArithBitLShift: |
2018 | case BitURShift: |
2019 | compileShiftOp(node); |
2020 | break; |
2021 | |
2022 | case UInt32ToNumber: { |
2023 | compileUInt32ToNumber(node); |
2024 | break; |
2025 | } |
2026 | |
2027 | case DoubleAsInt32: { |
2028 | compileDoubleAsInt32(node); |
2029 | break; |
2030 | } |
2031 | |
2032 | case ValueToInt32: { |
2033 | compileValueToInt32(node); |
2034 | break; |
2035 | } |
2036 | |
2037 | case DoubleRep: { |
2038 | compileDoubleRep(node); |
2039 | break; |
2040 | } |
2041 | |
2042 | case ValueRep: { |
2043 | compileValueRep(node); |
2044 | break; |
2045 | } |
2046 | |
2047 | case ValueNegate: |
2048 | compileValueNegate(node); |
2049 | break; |
2050 | |
2051 | case ValueAdd: |
2052 | compileValueAdd(node); |
2053 | break; |
2054 | |
2055 | case ValueSub: |
2056 | compileValueSub(node); |
2057 | break; |
2058 | |
2059 | case StrCat: { |
2060 | compileStrCat(node); |
2061 | break; |
2062 | } |
2063 | |
2064 | case ArithAdd: |
2065 | compileArithAdd(node); |
2066 | break; |
2067 | |
2068 | case ArithClz32: |
2069 | compileArithClz32(node); |
2070 | break; |
2071 | |
2072 | case MakeRope: |
2073 | compileMakeRope(node); |
2074 | break; |
2075 | |
2076 | case ArithSub: |
2077 | compileArithSub(node); |
2078 | break; |
2079 | |
2080 | case ArithNegate: |
2081 | compileArithNegate(node); |
2082 | break; |
2083 | |
2084 | case ArithMul: |
2085 | compileArithMul(node); |
2086 | break; |
2087 | |
2088 | case ValueMul: |
2089 | compileValueMul(node); |
2090 | break; |
2091 | |
2092 | case ValueDiv: { |
2093 | compileValueDiv(node); |
2094 | break; |
2095 | } |
2096 | |
2097 | case ArithDiv: { |
2098 | compileArithDiv(node); |
2099 | break; |
2100 | } |
2101 | |
2102 | case ValueMod: { |
2103 | compileValueMod(node); |
2104 | break; |
2105 | } |
2106 | |
2107 | case ArithMod: { |
2108 | compileArithMod(node); |
2109 | break; |
2110 | } |
2111 | |
2112 | case ValuePow: { |
2113 | compileValuePow(node); |
2114 | break; |
2115 | } |
2116 | |
2117 | case ArithPow: { |
2118 | compileArithPow(node); |
2119 | break; |
2120 | } |
2121 | |
2122 | case ArithAbs: |
2123 | compileArithAbs(node); |
2124 | break; |
2125 | |
2126 | case ArithMin: |
2127 | case ArithMax: { |
2128 | compileArithMinMax(node); |
2129 | break; |
2130 | } |
2131 | |
2132 | case ArithSqrt: |
2133 | compileArithSqrt(node); |
2134 | break; |
2135 | |
2136 | case ArithFRound: |
2137 | compileArithFRound(node); |
2138 | break; |
2139 | |
2140 | case ArithRandom: |
2141 | compileArithRandom(node); |
2142 | break; |
2143 | |
2144 | case ArithRound: |
2145 | case ArithFloor: |
2146 | case ArithCeil: |
2147 | case ArithTrunc: |
2148 | compileArithRounding(node); |
2149 | break; |
2150 | |
2151 | case ArithUnary: |
2152 | compileArithUnary(node); |
2153 | break; |
2154 | |
2155 | case LogicalNot: |
2156 | compileLogicalNot(node); |
2157 | break; |
2158 | |
2159 | case CompareLess: |
2160 | if (compare(node, JITCompiler::LessThan, JITCompiler::DoubleLessThan, operationCompareLess)) |
2161 | return; |
2162 | break; |
2163 | |
2164 | case CompareLessEq: |
2165 | if (compare(node, JITCompiler::LessThanOrEqual, JITCompiler::DoubleLessThanOrEqual, operationCompareLessEq)) |
2166 | return; |
2167 | break; |
2168 | |
2169 | case CompareGreater: |
2170 | if (compare(node, JITCompiler::GreaterThan, JITCompiler::DoubleGreaterThan, operationCompareGreater)) |
2171 | return; |
2172 | break; |
2173 | |
2174 | case CompareGreaterEq: |
2175 | if (compare(node, JITCompiler::GreaterThanOrEqual, JITCompiler::DoubleGreaterThanOrEqual, operationCompareGreaterEq)) |
2176 | return; |
2177 | break; |
2178 | |
2179 | case CompareBelow: |
2180 | compileCompareUnsigned(node, JITCompiler::Below); |
2181 | break; |
2182 | |
2183 | case CompareBelowEq: |
2184 | compileCompareUnsigned(node, JITCompiler::BelowOrEqual); |
2185 | break; |
2186 | |
2187 | case CompareEq: |
2188 | if (compare(node, JITCompiler::Equal, JITCompiler::DoubleEqual, operationCompareEq)) |
2189 | return; |
2190 | break; |
2191 | |
2192 | case CompareStrictEq: |
2193 | if (compileStrictEq(node)) |
2194 | return; |
2195 | break; |
2196 | |
2197 | case CompareEqPtr: |
2198 | compileCompareEqPtr(node); |
2199 | break; |
2200 | |
2201 | case SameValue: |
2202 | compileSameValue(node); |
2203 | break; |
2204 | |
2205 | case StringCharCodeAt: { |
2206 | compileGetCharCodeAt(node); |
2207 | break; |
2208 | } |
2209 | |
2210 | case StringCharAt: { |
2211 | // Relies on StringCharAt node having same basic layout as GetByVal |
2212 | compileGetByValOnString(node); |
2213 | break; |
2214 | } |
2215 | |
2216 | case StringFromCharCode: { |
2217 | compileFromCharCode(node); |
2218 | break; |
2219 | } |
2220 | |
2221 | case CheckArray: { |
2222 | checkArray(node); |
2223 | break; |
2224 | } |
2225 | |
2226 | case Arrayify: |
2227 | case ArrayifyToStructure: { |
2228 | arrayify(node); |
2229 | break; |
2230 | } |
2231 | |
2232 | case GetByVal: { |
2233 | switch (node->arrayMode().type()) { |
2234 | case Array::SelectUsingPredictions: |
2235 | case Array::ForceExit: |
2236 | RELEASE_ASSERT_NOT_REACHED(); |
2237 | #if COMPILER_QUIRK(CONSIDERS_UNREACHABLE_CODE) |
2238 | terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0); |
2239 | #endif |
2240 | break; |
2241 | case Array::Undecided: { |
2242 | SpeculateStrictInt32Operand index(this, m_graph.varArgChild(node, 1)); |
2243 | GPRTemporary resultTag(this, Reuse, index); |
2244 | GPRTemporary resultPayload(this); |
2245 | |
2246 | GPRReg indexGPR = index.gpr(); |
2247 | GPRReg resultTagGPR = resultTag.gpr(); |
2248 | GPRReg resultPayloadGPR = resultPayload.gpr(); |
2249 | |
2250 | speculationCheck(OutOfBounds, JSValueRegs(), node, |
2251 | m_jit.branch32(MacroAssembler::LessThan, indexGPR, MacroAssembler::TrustedImm32(0))); |
2252 | |
2253 | use(m_graph.varArgChild(node, 0)); |
2254 | index.use(); |
2255 | |
2256 | m_jit.move(MacroAssembler::TrustedImm32(JSValue::UndefinedTag), resultTagGPR); |
2257 | m_jit.move(MacroAssembler::TrustedImm32(0), resultPayloadGPR); |
2258 | jsValueResult(resultTagGPR, resultPayloadGPR, node, UseChildrenCalledExplicitly); |
2259 | break; |
2260 | } |
2261 | case Array::Generic: { |
2262 | // FIXME: Implement IC here: |
2263 | // https://bugs.webkit.org/show_bug.cgi?id=204082 |
2264 | if (m_graph.varArgChild(node, 0).useKind() == ObjectUse) { |
2265 | if (m_graph.varArgChild(node, 1).useKind() == StringUse) { |
2266 | compileGetByValForObjectWithString(node); |
2267 | break; |
2268 | } |
2269 | |
2270 | if (m_graph.varArgChild(node, 1).useKind() == SymbolUse) { |
2271 | compileGetByValForObjectWithSymbol(node); |
2272 | break; |
2273 | } |
2274 | } |
2275 | |
2276 | SpeculateCellOperand base(this, m_graph.varArgChild(node, 0)); // Save a register, speculate cell. We'll probably be right. |
2277 | JSValueOperand property(this, m_graph.varArgChild(node, 1)); |
2278 | GPRReg baseGPR = base.gpr(); |
2279 | JSValueRegs propertyRegs = property.jsValueRegs(); |
2280 | |
2281 | flushRegisters(); |
2282 | JSValueRegsFlushedCallResult result(this); |
2283 | JSValueRegs resultRegs = result.regs(); |
2284 | callOperation(operationGetByValCell, resultRegs, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), baseGPR, propertyRegs); |
2285 | m_jit.exceptionCheck(); |
2286 | |
2287 | jsValueResult(resultRegs, node); |
2288 | break; |
2289 | } |
2290 | case Array::Int32: |
2291 | case Array::Contiguous: { |
2292 | if (node->arrayMode().isInBounds()) { |
2293 | SpeculateStrictInt32Operand property(this, m_graph.varArgChild(node, 1)); |
2294 | StorageOperand storage(this, m_graph.varArgChild(node, 2)); |
2295 | |
2296 | GPRReg propertyReg = property.gpr(); |
2297 | GPRReg storageReg = storage.gpr(); |
2298 | |
2299 | if (!m_compileOkay) |
2300 | return; |
2301 | |
2302 | speculationCheck(OutOfBounds, JSValueRegs(), 0, m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength()))); |
2303 | |
2304 | GPRTemporary resultPayload(this); |
2305 | if (node->arrayMode().type() == Array::Int32) { |
2306 | ASSERT(!node->arrayMode().isSaneChain()); |
2307 | |
2308 | speculationCheck( |
2309 | OutOfBounds, JSValueRegs(), 0, |
2310 | m_jit.branch32( |
2311 | MacroAssembler::Equal, |
2312 | MacroAssembler::BaseIndex( |
2313 | storageReg, propertyReg, MacroAssembler::TimesEight, TagOffset), |
2314 | TrustedImm32(JSValue::EmptyValueTag))); |
2315 | m_jit.load32( |
2316 | MacroAssembler::BaseIndex( |
2317 | storageReg, propertyReg, MacroAssembler::TimesEight, PayloadOffset), |
2318 | resultPayload.gpr()); |
2319 | int32Result(resultPayload.gpr(), node); |
2320 | break; |
2321 | } |
2322 | |
2323 | GPRTemporary resultTag(this); |
2324 | m_jit.load32( |
2325 | MacroAssembler::BaseIndex( |
2326 | storageReg, propertyReg, MacroAssembler::TimesEight, TagOffset), |
2327 | resultTag.gpr()); |
2328 | m_jit.load32( |
2329 | MacroAssembler::BaseIndex( |
2330 | storageReg, propertyReg, MacroAssembler::TimesEight, PayloadOffset), |
2331 | resultPayload.gpr()); |
2332 | if (node->arrayMode().isSaneChain()) { |
2333 | JITCompiler::Jump notHole = m_jit.branchIfNotEmpty(resultTag.gpr()); |
2334 | m_jit.move(TrustedImm32(JSValue::UndefinedTag), resultTag.gpr()); |
2335 | m_jit.move(TrustedImm32(0), resultPayload.gpr()); |
2336 | notHole.link(&m_jit); |
2337 | } else { |
2338 | speculationCheck( |
2339 | LoadFromHole, JSValueRegs(), 0, |
2340 | m_jit.branchIfEmpty(resultTag.gpr())); |
2341 | } |
2342 | jsValueResult(resultTag.gpr(), resultPayload.gpr(), node); |
2343 | break; |
2344 | } |
2345 | |
2346 | SpeculateCellOperand base(this, m_graph.varArgChild(node, 0)); |
2347 | SpeculateStrictInt32Operand property(this, m_graph.varArgChild(node, 1)); |
2348 | StorageOperand storage(this, m_graph.varArgChild(node, 2)); |
2349 | |
2350 | GPRReg baseReg = base.gpr(); |
2351 | GPRReg propertyReg = property.gpr(); |
2352 | GPRReg storageReg = storage.gpr(); |
2353 | |
2354 | if (!m_compileOkay) |
2355 | return; |
2356 | |
2357 | GPRTemporary resultTag(this); |
2358 | GPRTemporary resultPayload(this); |
2359 | GPRReg resultTagReg = resultTag.gpr(); |
2360 | GPRReg resultPayloadReg = resultPayload.gpr(); |
2361 | |
2362 | MacroAssembler::JumpList slowCases; |
2363 | |
2364 | slowCases.append(m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength()))); |
2365 | |
2366 | m_jit.load32(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTagReg); |
2367 | m_jit.load32(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayloadReg); |
2368 | slowCases.append(m_jit.branchIfEmpty(resultTagReg)); |
2369 | |
2370 | addSlowPathGenerator( |
2371 | slowPathCall( |
2372 | slowCases, this, operationGetByValObjectInt, |
2373 | JSValueRegs(resultTagReg, resultPayloadReg), TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), baseReg, propertyReg)); |
2374 | |
2375 | jsValueResult(resultTagReg, resultPayloadReg, node); |
2376 | break; |
2377 | } |
2378 | case Array::Double: { |
2379 | if (node->arrayMode().isInBounds()) { |
2380 | SpeculateStrictInt32Operand property(this, m_graph.varArgChild(node, 1)); |
2381 | StorageOperand storage(this, m_graph.varArgChild(node, 2)); |
2382 | |
2383 | GPRReg propertyReg = property.gpr(); |
2384 | GPRReg storageReg = storage.gpr(); |
2385 | |
2386 | if (!m_compileOkay) |
2387 | return; |
2388 | |
2389 | speculationCheck(OutOfBounds, JSValueRegs(), 0, m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength()))); |
2390 | |
2391 | FPRTemporary result(this); |
2392 | m_jit.loadDouble(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight), result.fpr()); |
2393 | if (!node->arrayMode().isSaneChain()) |
2394 | speculationCheck(LoadFromHole, JSValueRegs(), 0, m_jit.branchIfNaN(result.fpr())); |
2395 | doubleResult(result.fpr(), node); |
2396 | break; |
2397 | } |
2398 | |
2399 | SpeculateCellOperand base(this, m_graph.varArgChild(node, 0)); |
2400 | SpeculateStrictInt32Operand property(this, m_graph.varArgChild(node, 1)); |
2401 | StorageOperand storage(this, m_graph.varArgChild(node, 2)); |
2402 | |
2403 | GPRReg baseReg = base.gpr(); |
2404 | GPRReg propertyReg = property.gpr(); |
2405 | GPRReg storageReg = storage.gpr(); |
2406 | |
2407 | if (!m_compileOkay) |
2408 | return; |
2409 | |
2410 | GPRTemporary resultTag(this); |
2411 | GPRTemporary resultPayload(this); |
2412 | FPRTemporary temp(this); |
2413 | GPRReg resultTagReg = resultTag.gpr(); |
2414 | GPRReg resultPayloadReg = resultPayload.gpr(); |
2415 | FPRReg tempReg = temp.fpr(); |
2416 | |
2417 | MacroAssembler::JumpList slowCases; |
2418 | |
2419 | slowCases.append(m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength()))); |
2420 | |
2421 | m_jit.loadDouble(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight), tempReg); |
2422 | slowCases.append(m_jit.branchIfNaN(tempReg)); |
2423 | boxDouble(tempReg, resultTagReg, resultPayloadReg); |
2424 | |
2425 | addSlowPathGenerator( |
2426 | slowPathCall( |
2427 | slowCases, this, operationGetByValObjectInt, |
2428 | JSValueRegs(resultTagReg, resultPayloadReg), TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), baseReg, propertyReg)); |
2429 | |
2430 | jsValueResult(resultTagReg, resultPayloadReg, node); |
2431 | break; |
2432 | } |
2433 | case Array::ArrayStorage: |
2434 | case Array::SlowPutArrayStorage: { |
2435 | if (node->arrayMode().isInBounds()) { |
2436 | SpeculateStrictInt32Operand property(this, m_graph.varArgChild(node, 1)); |
2437 | StorageOperand storage(this, m_graph.varArgChild(node, 2)); |
2438 | GPRReg propertyReg = property.gpr(); |
2439 | GPRReg storageReg = storage.gpr(); |
2440 | |
2441 | if (!m_compileOkay) |
2442 | return; |
2443 | |
2444 | speculationCheck(OutOfBounds, JSValueRegs(), 0, m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, ArrayStorage::vectorLengthOffset()))); |
2445 | |
2446 | GPRTemporary resultTag(this); |
2447 | GPRTemporary resultPayload(this); |
2448 | |
2449 | m_jit.load32(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, ArrayStorage::vectorOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag.gpr()); |
2450 | speculationCheck(LoadFromHole, JSValueRegs(), 0, m_jit.branchIfEmpty(resultTag.gpr())); |
2451 | m_jit.load32(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, ArrayStorage::vectorOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload.gpr()); |
2452 | |
2453 | jsValueResult(resultTag.gpr(), resultPayload.gpr(), node); |
2454 | break; |
2455 | } |
2456 | |
2457 | SpeculateCellOperand base(this, m_graph.varArgChild(node, 0)); |
2458 | SpeculateStrictInt32Operand property(this, m_graph.varArgChild(node, 1)); |
2459 | StorageOperand storage(this, m_graph.varArgChild(node, 2)); |
2460 | GPRReg propertyReg = property.gpr(); |
2461 | GPRReg storageReg = storage.gpr(); |
2462 | GPRReg baseReg = base.gpr(); |
2463 | |
2464 | if (!m_compileOkay) |
2465 | return; |
2466 | |
2467 | GPRTemporary resultTag(this); |
2468 | GPRTemporary resultPayload(this); |
2469 | GPRReg resultTagReg = resultTag.gpr(); |
2470 | GPRReg resultPayloadReg = resultPayload.gpr(); |
2471 | |
2472 | JITCompiler::Jump outOfBounds = m_jit.branch32( |
2473 | MacroAssembler::AboveOrEqual, propertyReg, |
2474 | MacroAssembler::Address(storageReg, ArrayStorage::vectorLengthOffset())); |
2475 | |
2476 | m_jit.load32(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, ArrayStorage::vectorOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTagReg); |
2477 | JITCompiler::Jump hole = m_jit.branchIfEmpty(resultTag.gpr()); |
2478 | m_jit.load32(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, ArrayStorage::vectorOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayloadReg); |
2479 | |
2480 | JITCompiler::JumpList slowCases; |
2481 | slowCases.append(outOfBounds); |
2482 | slowCases.append(hole); |
2483 | addSlowPathGenerator( |
2484 | slowPathCall( |
2485 | slowCases, this, operationGetByValObjectInt, |
2486 | JSValueRegs(resultTagReg, resultPayloadReg), TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), |
2487 | baseReg, propertyReg)); |
2488 | |
2489 | jsValueResult(resultTagReg, resultPayloadReg, node); |
2490 | break; |
2491 | } |
2492 | case Array::String: |
2493 | compileGetByValOnString(node); |
2494 | break; |
2495 | case Array::DirectArguments: |
2496 | compileGetByValOnDirectArguments(node); |
2497 | break; |
2498 | case Array::ScopedArguments: |
2499 | compileGetByValOnScopedArguments(node); |
2500 | break; |
2501 | default: { |
2502 | TypedArrayType type = node->arrayMode().typedArrayType(); |
2503 | if (isInt(type)) |
2504 | compileGetByValOnIntTypedArray(node, type); |
2505 | else |
2506 | compileGetByValOnFloatTypedArray(node, type); |
2507 | } } |
2508 | break; |
2509 | } |
2510 | |
2511 | case StringSlice: { |
2512 | compileStringSlice(node); |
2513 | break; |
2514 | } |
2515 | |
2516 | case ToLowerCase: { |
2517 | compileToLowerCase(node); |
2518 | break; |
2519 | } |
2520 | |
2521 | case NumberToStringWithRadix: { |
2522 | compileNumberToStringWithRadix(node); |
2523 | break; |
2524 | } |
2525 | |
2526 | case NumberToStringWithValidRadixConstant: { |
2527 | compileNumberToStringWithValidRadixConstant(node); |
2528 | break; |
2529 | } |
2530 | |
2531 | case GetByValWithThis: { |
2532 | compileGetByValWithThis(node); |
2533 | break; |
2534 | } |
2535 | |
2536 | case PutByValDirect: |
2537 | case PutByVal: |
2538 | case PutByValAlias: { |
2539 | Edge child1 = m_jit.graph().varArgChild(node, 0); |
2540 | Edge child2 = m_jit.graph().varArgChild(node, 1); |
2541 | Edge child3 = m_jit.graph().varArgChild(node, 2); |
2542 | Edge child4 = m_jit.graph().varArgChild(node, 3); |
2543 | |
2544 | ArrayMode arrayMode = node->arrayMode().modeForPut(); |
2545 | bool alreadyHandled = false; |
2546 | |
2547 | switch (arrayMode.type()) { |
2548 | case Array::SelectUsingPredictions: |
2549 | case Array::ForceExit: |
2550 | RELEASE_ASSERT_NOT_REACHED(); |
2551 | #if COMPILER_QUIRK(CONSIDERS_UNREACHABLE_CODE) |
2552 | terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0); |
2553 | alreadyHandled = true; |
2554 | #endif |
2555 | break; |
2556 | case Array::Generic: { |
2557 | ASSERT(node->op() == PutByVal || node->op() == PutByValDirect); |
2558 | |
2559 | if (child1.useKind() == CellUse) { |
2560 | if (child2.useKind() == StringUse) { |
2561 | compilePutByValForCellWithString(node, child1, child2, child3); |
2562 | alreadyHandled = true; |
2563 | break; |
2564 | } |
2565 | |
2566 | if (child2.useKind() == SymbolUse) { |
2567 | compilePutByValForCellWithSymbol(node, child1, child2, child3); |
2568 | alreadyHandled = true; |
2569 | break; |
2570 | } |
2571 | } |
2572 | |
2573 | SpeculateCellOperand base(this, child1); // Save a register, speculate cell. We'll probably be right. |
2574 | JSValueOperand property(this, child2); |
2575 | JSValueOperand value(this, child3); |
2576 | GPRReg baseGPR = base.gpr(); |
2577 | JSValueRegs propertyRegs = property.jsValueRegs(); |
2578 | JSValueRegs valueRegs = value.jsValueRegs(); |
2579 | |
2580 | flushRegisters(); |
2581 | if (node->op() == PutByValDirect) |
2582 | callOperation(m_jit.isStrictModeFor(node->origin.semantic) ? operationPutByValDirectCellStrict : operationPutByValDirectCellNonStrict, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), baseGPR, propertyRegs, valueRegs); |
2583 | else |
2584 | callOperation(m_jit.isStrictModeFor(node->origin.semantic) ? operationPutByValCellStrict : operationPutByValCellNonStrict, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), baseGPR, propertyRegs, valueRegs); |
2585 | m_jit.exceptionCheck(); |
2586 | |
2587 | noResult(node); |
2588 | alreadyHandled = true; |
2589 | break; |
2590 | } |
2591 | default: |
2592 | break; |
2593 | } |
2594 | |
2595 | if (alreadyHandled) |
2596 | break; |
2597 | |
2598 | SpeculateCellOperand base(this, child1); |
2599 | SpeculateStrictInt32Operand property(this, child2); |
2600 | |
2601 | GPRReg baseReg = base.gpr(); |
2602 | GPRReg propertyReg = property.gpr(); |
2603 | |
2604 | switch (arrayMode.type()) { |
2605 | case Array::Int32: { |
2606 | speculateInt32(child3); |
2607 | FALLTHROUGH; |
2608 | } |
2609 | case Array::Contiguous: { |
2610 | JSValueOperand value(this, child3, ManualOperandSpeculation); |
2611 | |
2612 | GPRReg valueTagReg = value.tagGPR(); |
2613 | GPRReg valuePayloadReg = value.payloadGPR(); |
2614 | |
2615 | if (!m_compileOkay) |
2616 | return; |
2617 | |
2618 | compileContiguousPutByVal(node, base, property, value, valuePayloadReg, valueTagReg); |
2619 | break; |
2620 | } |
2621 | case Array::Double: { |
2622 | compileDoublePutByVal(node, base, property); |
2623 | break; |
2624 | } |
2625 | case Array::ArrayStorage: |
2626 | case Array::SlowPutArrayStorage: { |
2627 | JSValueOperand value(this, child3); |
2628 | |
2629 | GPRReg valueTagReg = value.tagGPR(); |
2630 | GPRReg valuePayloadReg = value.payloadGPR(); |
2631 | |
2632 | if (!m_compileOkay) |
2633 | return; |
2634 | |
2635 | StorageOperand storage(this, child4); |
2636 | GPRReg storageReg = storage.gpr(); |
2637 | |
2638 | if (node->op() == PutByValAlias) { |
2639 | // Store the value to the array. |
2640 | GPRReg propertyReg = property.gpr(); |
2641 | m_jit.store32(value.tagGPR(), MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, ArrayStorage::vectorOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag))); |
2642 | m_jit.store32(value.payloadGPR(), MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, ArrayStorage::vectorOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload))); |
2643 | |
2644 | noResult(node); |
2645 | break; |
2646 | } |
2647 | |
2648 | MacroAssembler::JumpList slowCases; |
2649 | |
2650 | MacroAssembler::Jump beyondArrayBounds = m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, ArrayStorage::vectorLengthOffset())); |
2651 | if (!arrayMode.isOutOfBounds()) |
2652 | speculationCheck(OutOfBounds, JSValueRegs(), 0, beyondArrayBounds); |
2653 | else |
2654 | slowCases.append(beyondArrayBounds); |
2655 | |
2656 | // Check if we're writing to a hole; if so increment m_numValuesInVector. |
2657 | if (arrayMode.isInBounds()) { |
2658 | speculationCheck( |
2659 | StoreToHole, JSValueRegs(), 0, |
2660 | m_jit.branch32(MacroAssembler::Equal, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, ArrayStorage::vectorOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag))); |
2661 | } else { |
2662 | MacroAssembler::Jump notHoleValue = m_jit.branch32(MacroAssembler::NotEqual, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, ArrayStorage::vectorOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag)); |
2663 | if (arrayMode.isSlowPut()) { |
2664 | // This is sort of strange. If we wanted to optimize this code path, we would invert |
2665 | // the above branch. But it's simply not worth it since this only happens if we're |
2666 | // already having a bad time. |
2667 | slowCases.append(m_jit.jump()); |
2668 | } else { |
2669 | m_jit.add32(TrustedImm32(1), MacroAssembler::Address(storageReg, ArrayStorage::numValuesInVectorOffset())); |
2670 | |
2671 | // If we're writing to a hole we might be growing the array; |
2672 | MacroAssembler::Jump lengthDoesNotNeedUpdate = m_jit.branch32(MacroAssembler::Below, propertyReg, MacroAssembler::Address(storageReg, ArrayStorage::lengthOffset())); |
2673 | m_jit.add32(TrustedImm32(1), propertyReg); |
2674 | m_jit.store32(propertyReg, MacroAssembler::Address(storageReg, ArrayStorage::lengthOffset())); |
2675 | m_jit.sub32(TrustedImm32(1), propertyReg); |
2676 | |
2677 | lengthDoesNotNeedUpdate.link(&m_jit); |
2678 | } |
2679 | notHoleValue.link(&m_jit); |
2680 | } |
2681 | |
2682 | // Store the value to the array. |
2683 | m_jit.store32(valueTagReg, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, ArrayStorage::vectorOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag))); |
2684 | m_jit.store32(valuePayloadReg, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, ArrayStorage::vectorOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload))); |
2685 | |
2686 | base.use(); |
2687 | property.use(); |
2688 | value.use(); |
2689 | storage.use(); |
2690 | |
2691 | if (!slowCases.empty()) { |
2692 | if (node->op() == PutByValDirect) { |
2693 | addSlowPathGenerator(slowPathCall( |
2694 | slowCases, this, |
2695 | m_jit.isStrictModeFor(node->origin.semantic) ? operationPutByValDirectBeyondArrayBoundsStrict : operationPutByValDirectBeyondArrayBoundsNonStrict, |
2696 | NoResult, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), baseReg, propertyReg, JSValueRegs(valueTagReg, valuePayloadReg))); |
2697 | } else { |
2698 | addSlowPathGenerator(slowPathCall( |
2699 | slowCases, this, |
2700 | m_jit.isStrictModeFor(node->origin.semantic) ? operationPutByValBeyondArrayBoundsStrict : operationPutByValBeyondArrayBoundsNonStrict, |
2701 | NoResult, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), baseReg, propertyReg, JSValueRegs(valueTagReg, valuePayloadReg))); |
2702 | } |
2703 | } |
2704 | |
2705 | noResult(node, UseChildrenCalledExplicitly); |
2706 | break; |
2707 | } |
2708 | |
2709 | default: { |
2710 | TypedArrayType type = arrayMode.typedArrayType(); |
2711 | if (isInt(type)) |
2712 | compilePutByValForIntTypedArray(base.gpr(), property.gpr(), node, type); |
2713 | else |
2714 | compilePutByValForFloatTypedArray(base.gpr(), property.gpr(), node, type); |
2715 | } } |
2716 | break; |
2717 | } |
2718 | |
2719 | case PutByValWithThis: { |
2720 | static_assert(GPRInfo::numberOfRegisters >= 8, "We are assuming we have enough registers to make this call without incrementally setting up the arguments." ); |
2721 | |
2722 | JSValueOperand base(this, m_jit.graph().varArgChild(node, 0)); |
2723 | JSValueRegs baseRegs = base.jsValueRegs(); |
2724 | |
2725 | JSValueOperand thisValue(this, m_jit.graph().varArgChild(node, 1)); |
2726 | JSValueRegs thisRegs = thisValue.jsValueRegs(); |
2727 | |
2728 | JSValueOperand property(this, m_jit.graph().varArgChild(node, 2)); |
2729 | JSValueRegs propertyRegs = property.jsValueRegs(); |
2730 | |
2731 | JSValueOperand value(this, m_jit.graph().varArgChild(node, 3)); |
2732 | JSValueRegs valueRegs = value.jsValueRegs(); |
2733 | |
2734 | flushRegisters(); |
2735 | callOperation(m_jit.isStrictModeFor(node->origin.semantic) ? operationPutByValWithThisStrict : operationPutByValWithThis, |
2736 | NoResult, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), baseRegs, thisRegs, propertyRegs, valueRegs); |
2737 | m_jit.exceptionCheck(); |
2738 | |
2739 | noResult(node); |
2740 | break; |
2741 | } |
2742 | |
2743 | case RegExpExec: { |
2744 | compileRegExpExec(node); |
2745 | break; |
2746 | } |
2747 | |
2748 | case RegExpExecNonGlobalOrSticky: { |
2749 | compileRegExpExecNonGlobalOrSticky(node); |
2750 | break; |
2751 | } |
2752 | |
2753 | case RegExpMatchFastGlobal: { |
2754 | compileRegExpMatchFastGlobal(node); |
2755 | break; |
2756 | } |
2757 | |
2758 | case RegExpTest: { |
2759 | compileRegExpTest(node); |
2760 | break; |
2761 | } |
2762 | |
2763 | case RegExpMatchFast: { |
2764 | compileRegExpMatchFast(node); |
2765 | break; |
2766 | } |
2767 | |
2768 | case StringReplace: |
2769 | case StringReplaceRegExp: { |
2770 | compileStringReplace(node); |
2771 | break; |
2772 | } |
2773 | |
2774 | case GetRegExpObjectLastIndex: { |
2775 | compileGetRegExpObjectLastIndex(node); |
2776 | break; |
2777 | } |
2778 | |
2779 | case SetRegExpObjectLastIndex: { |
2780 | compileSetRegExpObjectLastIndex(node); |
2781 | break; |
2782 | } |
2783 | |
2784 | case RecordRegExpCachedResult: { |
2785 | compileRecordRegExpCachedResult(node); |
2786 | break; |
2787 | } |
2788 | |
2789 | case ArrayPush: { |
2790 | compileArrayPush(node); |
2791 | break; |
2792 | } |
2793 | |
2794 | case ArrayPop: { |
2795 | ASSERT(node->arrayMode().isJSArray()); |
2796 | |
2797 | SpeculateCellOperand base(this, node->child1()); |
2798 | StorageOperand storage(this, node->child2()); |
2799 | GPRTemporary valueTag(this); |
2800 | GPRTemporary valuePayload(this); |
2801 | |
2802 | GPRReg baseGPR = base.gpr(); |
2803 | GPRReg valueTagGPR = valueTag.gpr(); |
2804 | GPRReg valuePayloadGPR = valuePayload.gpr(); |
2805 | GPRReg storageGPR = storage.gpr(); |
2806 | |
2807 | switch (node->arrayMode().type()) { |
2808 | case Array::Int32: |
2809 | case Array::Contiguous: { |
2810 | m_jit.load32( |
2811 | MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength()), valuePayloadGPR); |
2812 | MacroAssembler::Jump undefinedCase = |
2813 | m_jit.branchTest32(MacroAssembler::Zero, valuePayloadGPR); |
2814 | m_jit.sub32(TrustedImm32(1), valuePayloadGPR); |
2815 | m_jit.store32( |
2816 | valuePayloadGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength())); |
2817 | m_jit.load32( |
2818 | MacroAssembler::BaseIndex(storageGPR, valuePayloadGPR, MacroAssembler::TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), |
2819 | valueTagGPR); |
2820 | MacroAssembler::Jump slowCase = m_jit.branchIfEmpty(valueTagGPR); |
2821 | m_jit.store32( |
2822 | MacroAssembler::TrustedImm32(JSValue::EmptyValueTag), |
2823 | MacroAssembler::BaseIndex(storageGPR, valuePayloadGPR, MacroAssembler::TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag))); |
2824 | m_jit.load32( |
2825 | MacroAssembler::BaseIndex(storageGPR, valuePayloadGPR, MacroAssembler::TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), |
2826 | valuePayloadGPR); |
2827 | |
2828 | addSlowPathGenerator( |
2829 | slowPathMove( |
2830 | undefinedCase, this, |
2831 | MacroAssembler::TrustedImm32(jsUndefined().tag()), valueTagGPR, |
2832 | MacroAssembler::TrustedImm32(jsUndefined().payload()), valuePayloadGPR)); |
2833 | addSlowPathGenerator( |
2834 | slowPathCall( |
2835 | slowCase, this, operationArrayPopAndRecoverLength, |
2836 | JSValueRegs(valueTagGPR, valuePayloadGPR), TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), baseGPR)); |
2837 | |
2838 | jsValueResult(valueTagGPR, valuePayloadGPR, node); |
2839 | break; |
2840 | } |
2841 | |
2842 | case Array::Double: { |
2843 | FPRTemporary temp(this); |
2844 | FPRReg tempFPR = temp.fpr(); |
2845 | |
2846 | m_jit.load32( |
2847 | MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength()), valuePayloadGPR); |
2848 | MacroAssembler::Jump undefinedCase = |
2849 | m_jit.branchTest32(MacroAssembler::Zero, valuePayloadGPR); |
2850 | m_jit.sub32(TrustedImm32(1), valuePayloadGPR); |
2851 | m_jit.store32( |
2852 | valuePayloadGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength())); |
2853 | m_jit.loadDouble( |
2854 | MacroAssembler::BaseIndex(storageGPR, valuePayloadGPR, MacroAssembler::TimesEight), |
2855 | tempFPR); |
2856 | MacroAssembler::Jump slowCase = m_jit.branchIfNaN(tempFPR); |
2857 | JSValue nan = JSValue(JSValue::EncodeAsDouble, PNaN); |
2858 | m_jit.store32( |
2859 | MacroAssembler::TrustedImm32(nan.u.asBits.tag), |
2860 | MacroAssembler::BaseIndex(storageGPR, valuePayloadGPR, MacroAssembler::TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag))); |
2861 | m_jit.store32( |
2862 | MacroAssembler::TrustedImm32(nan.u.asBits.payload), |
2863 | MacroAssembler::BaseIndex(storageGPR, valuePayloadGPR, MacroAssembler::TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload))); |
2864 | boxDouble(tempFPR, valueTagGPR, valuePayloadGPR); |
2865 | |
2866 | addSlowPathGenerator( |
2867 | slowPathMove( |
2868 | undefinedCase, this, |
2869 | MacroAssembler::TrustedImm32(jsUndefined().tag()), valueTagGPR, |
2870 | MacroAssembler::TrustedImm32(jsUndefined().payload()), valuePayloadGPR)); |
2871 | addSlowPathGenerator( |
2872 | slowPathCall( |
2873 | slowCase, this, operationArrayPopAndRecoverLength, |
2874 | JSValueRegs(valueTagGPR, valuePayloadGPR), TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), baseGPR)); |
2875 | |
2876 | jsValueResult(valueTagGPR, valuePayloadGPR, node); |
2877 | break; |
2878 | } |
2879 | |
2880 | case Array::ArrayStorage: { |
2881 | GPRTemporary storageLength(this); |
2882 | GPRReg storageLengthGPR = storageLength.gpr(); |
2883 | |
2884 | m_jit.load32(MacroAssembler::Address(storageGPR, ArrayStorage::lengthOffset()), storageLengthGPR); |
2885 | |
2886 | JITCompiler::JumpList setUndefinedCases; |
2887 | setUndefinedCases.append(m_jit.branchTest32(MacroAssembler::Zero, storageLengthGPR)); |
2888 | |
2889 | m_jit.sub32(TrustedImm32(1), storageLengthGPR); |
2890 | |
2891 | MacroAssembler::Jump slowCase = m_jit.branch32(MacroAssembler::AboveOrEqual, storageLengthGPR, MacroAssembler::Address(storageGPR, ArrayStorage::vectorLengthOffset())); |
2892 | |
2893 | m_jit.load32(MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight, ArrayStorage::vectorOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), valueTagGPR); |
2894 | m_jit.load32(MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight, ArrayStorage::vectorOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), valuePayloadGPR); |
2895 | |
2896 | m_jit.store32(storageLengthGPR, MacroAssembler::Address(storageGPR, ArrayStorage::lengthOffset())); |
2897 | |
2898 | setUndefinedCases.append(m_jit.branchIfEmpty(valueTagGPR)); |
2899 | |
2900 | m_jit.store32(TrustedImm32(JSValue::EmptyValueTag), MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight, ArrayStorage::vectorOffset() + OBJECT_OFFSETOF(JSValue, u.asBits.tag))); |
2901 | |
2902 | m_jit.sub32(TrustedImm32(1), MacroAssembler::Address(storageGPR, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector))); |
2903 | |
2904 | addSlowPathGenerator( |
2905 | slowPathMove( |
2906 | setUndefinedCases, this, |
2907 | MacroAssembler::TrustedImm32(jsUndefined().tag()), valueTagGPR, |
2908 | MacroAssembler::TrustedImm32(jsUndefined().payload()), valuePayloadGPR)); |
2909 | |
2910 | addSlowPathGenerator( |
2911 | slowPathCall( |
2912 | slowCase, this, operationArrayPop, |
2913 | JSValueRegs(valueTagGPR, valuePayloadGPR), TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), baseGPR)); |
2914 | |
2915 | jsValueResult(valueTagGPR, valuePayloadGPR, node); |
2916 | break; |
2917 | } |
2918 | |
2919 | default: |
2920 | CRASH(); |
2921 | break; |
2922 | } |
2923 | break; |
2924 | } |
2925 | |
2926 | case ArraySlice: { |
2927 | compileArraySlice(node); |
2928 | break; |
2929 | } |
2930 | |
2931 | case ArrayIndexOf: { |
2932 | compileArrayIndexOf(node); |
2933 | break; |
2934 | } |
2935 | |
2936 | case DFG::Jump: { |
2937 | jump(node->targetBlock()); |
2938 | noResult(node); |
2939 | break; |
2940 | } |
2941 | |
2942 | case Branch: |
2943 | emitBranch(node); |
2944 | break; |
2945 | |
2946 | case Switch: |
2947 | emitSwitch(node); |
2948 | break; |
2949 | |
2950 | case Return: { |
2951 | ASSERT(GPRInfo::callFrameRegister != GPRInfo::regT2); |
2952 | ASSERT(GPRInfo::regT1 != GPRInfo::returnValueGPR); |
2953 | ASSERT(GPRInfo::returnValueGPR != GPRInfo::callFrameRegister); |
2954 | |
2955 | // Return the result in returnValueGPR. |
2956 | JSValueOperand op1(this, node->child1()); |
2957 | op1.fill(); |
2958 | if (op1.isDouble()) |
2959 | boxDouble(op1.fpr(), GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR); |
2960 | else { |
2961 | if (op1.payloadGPR() == GPRInfo::returnValueGPR2 && op1.tagGPR() == GPRInfo::returnValueGPR) |
2962 | m_jit.swap(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR2); |
2963 | else if (op1.payloadGPR() == GPRInfo::returnValueGPR2) { |
2964 | m_jit.move(op1.payloadGPR(), GPRInfo::returnValueGPR); |
2965 | m_jit.move(op1.tagGPR(), GPRInfo::returnValueGPR2); |
2966 | } else { |
2967 | m_jit.move(op1.tagGPR(), GPRInfo::returnValueGPR2); |
2968 | m_jit.move(op1.payloadGPR(), GPRInfo::returnValueGPR); |
2969 | } |
2970 | } |
2971 | |
2972 | m_jit.emitRestoreCalleeSaves(); |
2973 | m_jit.emitFunctionEpilogue(); |
2974 | m_jit.ret(); |
2975 | |
2976 | noResult(node); |
2977 | break; |
2978 | } |
2979 | |
2980 | case Throw: { |
2981 | compileThrow(node); |
2982 | break; |
2983 | } |
2984 | |
2985 | case ThrowStaticError: { |
2986 | compileThrowStaticError(node); |
2987 | break; |
2988 | } |
2989 | |
2990 | case BooleanToNumber: { |
2991 | switch (node->child1().useKind()) { |
2992 | case BooleanUse: { |
2993 | SpeculateBooleanOperand value(this, node->child1()); |
2994 | GPRTemporary result(this); // FIXME: We could reuse, but on speculation fail would need recovery to restore tag (akin to add). |
2995 | |
2996 | m_jit.move(value.gpr(), result.gpr()); |
2997 | |
2998 | int32Result(result.gpr(), node); |
2999 | break; |
3000 | } |
3001 | |
3002 | case UntypedUse: { |
3003 | JSValueOperand value(this, node->child1()); |
3004 | |
3005 | if (!m_interpreter.needsTypeCheck(node->child1(), SpecBoolInt32 | SpecBoolean)) { |
3006 | GPRTemporary result(this); |
3007 | |
3008 | GPRReg valueGPR = value.payloadGPR(); |
3009 | GPRReg resultGPR = result.gpr(); |
3010 | |
3011 | m_jit.move(valueGPR, resultGPR); |
3012 | int32Result(result.gpr(), node); |
3013 | break; |
3014 | } |
3015 | |
3016 | GPRTemporary resultTag(this); |
3017 | GPRTemporary resultPayload(this); |
3018 | |
3019 | GPRReg valueTagGPR = value.tagGPR(); |
3020 | GPRReg valuePayloadGPR = value.payloadGPR(); |
3021 | GPRReg resultTagGPR = resultTag.gpr(); |
3022 | GPRReg resultPayloadGPR = resultPayload.gpr(); |
3023 | |
3024 | m_jit.move(valuePayloadGPR, resultPayloadGPR); |
3025 | JITCompiler::Jump isBoolean = m_jit.branchIfBoolean(valueTagGPR, InvalidGPRReg); |
3026 | m_jit.move(valueTagGPR, resultTagGPR); |
3027 | JITCompiler::Jump done = m_jit.jump(); |
3028 | isBoolean.link(&m_jit); |
3029 | m_jit.move(TrustedImm32(JSValue::Int32Tag), resultTagGPR); |
3030 | done.link(&m_jit); |
3031 | |
3032 | jsValueResult(resultTagGPR, resultPayloadGPR, node); |
3033 | break; |
3034 | } |
3035 | |
3036 | default: |
3037 | RELEASE_ASSERT_NOT_REACHED(); |
3038 | break; |
3039 | } |
3040 | break; |
3041 | } |
3042 | |
3043 | case ToPrimitive: { |
3044 | compileToPrimitive(node); |
3045 | break; |
3046 | } |
3047 | |
3048 | case ToNumber: { |
3049 | JSValueOperand argument(this, node->child1()); |
3050 | GPRTemporary resultTag(this, Reuse, argument, TagWord); |
3051 | GPRTemporary resultPayload(this, Reuse, argument, PayloadWord); |
3052 | |
3053 | GPRReg argumentPayloadGPR = argument.payloadGPR(); |
3054 | GPRReg argumentTagGPR = argument.tagGPR(); |
3055 | JSValueRegs argumentRegs = argument.jsValueRegs(); |
3056 | JSValueRegs resultRegs(resultTag.gpr(), resultPayload.gpr()); |
3057 | |
3058 | argument.use(); |
3059 | |
3060 | // We have several attempts to remove ToNumber. But ToNumber still exists. |
3061 | // It means that converting non-numbers to numbers by this ToNumber is not rare. |
3062 | // Instead of the slow path generator, we emit callOperation here. |
3063 | if (!(m_state.forNode(node->child1()).m_type & SpecBytecodeNumber)) { |
3064 | flushRegisters(); |
3065 | callOperation(operationToNumber, resultRegs, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), argumentRegs); |
3066 | m_jit.exceptionCheck(); |
3067 | } else { |
3068 | MacroAssembler::Jump notNumber; |
3069 | { |
3070 | GPRTemporary scratch(this); |
3071 | notNumber = m_jit.branchIfNotNumber(argument.jsValueRegs(), scratch.gpr()); |
3072 | } |
3073 | m_jit.move(argumentTagGPR, resultRegs.tagGPR()); |
3074 | m_jit.move(argumentPayloadGPR, resultRegs.payloadGPR()); |
3075 | MacroAssembler::Jump done = m_jit.jump(); |
3076 | |
3077 | notNumber.link(&m_jit); |
3078 | silentSpillAllRegisters(resultRegs); |
3079 | callOperation(operationToNumber, resultRegs, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), argumentRegs); |
3080 | silentFillAllRegisters(); |
3081 | m_jit.exceptionCheck(); |
3082 | |
3083 | done.link(&m_jit); |
3084 | } |
3085 | |
3086 | jsValueResult(resultRegs.tagGPR(), resultRegs.payloadGPR(), node, UseChildrenCalledExplicitly); |
3087 | break; |
3088 | } |
3089 | |
3090 | case ToNumeric: { |
3091 | compileToNumeric(node); |
3092 | break; |
3093 | } |
3094 | |
3095 | case ToString: |
3096 | case CallStringConstructor: |
3097 | case StringValueOf: { |
3098 | compileToStringOrCallStringConstructorOrStringValueOf(node); |
3099 | break; |
3100 | } |
3101 | |
3102 | case NewStringObject: { |
3103 | compileNewStringObject(node); |
3104 | break; |
3105 | } |
3106 | |
3107 | case NewSymbol: { |
3108 | compileNewSymbol(node); |
3109 | break; |
3110 | } |
3111 | |
3112 | case NewArray: { |
3113 | compileNewArray(node); |
3114 | break; |
3115 | } |
3116 | |
3117 | case NewArrayWithSpread: { |
3118 | compileNewArrayWithSpread(node); |
3119 | break; |
3120 | } |
3121 | |
3122 | case Spread: { |
3123 | compileSpread(node); |
3124 | break; |
3125 | } |
3126 | |
3127 | case NewArrayWithSize: { |
3128 | compileNewArrayWithSize(node); |
3129 | break; |
3130 | } |
3131 | |
3132 | case NewArrayBuffer: { |
3133 | compileNewArrayBuffer(node); |
3134 | break; |
3135 | } |
3136 | |
3137 | case NewTypedArray: { |
3138 | compileNewTypedArray(node); |
3139 | break; |
3140 | } |
3141 | |
3142 | case NewRegexp: { |
3143 | compileNewRegexp(node); |
3144 | break; |
3145 | } |
3146 | |
3147 | case ToObject: |
3148 | case CallObjectConstructor: { |
3149 | compileToObjectOrCallObjectConstructor(node); |
3150 | break; |
3151 | } |
3152 | |
3153 | case ToThis: { |
3154 | compileToThis(node); |
3155 | break; |
3156 | } |
3157 | |
3158 | case ObjectCreate: { |
3159 | compileObjectCreate(node); |
3160 | break; |
3161 | } |
3162 | |
3163 | case ObjectKeys: { |
3164 | compileObjectKeys(node); |
3165 | break; |
3166 | } |
3167 | |
3168 | case CreateThis: { |
3169 | compileCreateThis(node); |
3170 | break; |
3171 | } |
3172 | |
3173 | case CreatePromise: { |
3174 | compileCreatePromise(node); |
3175 | break; |
3176 | } |
3177 | |
3178 | case CreateGenerator: { |
3179 | compileCreateGenerator(node); |
3180 | break; |
3181 | } |
3182 | |
3183 | case CreateAsyncGenerator: { |
3184 | compileCreateAsyncGenerator(node); |
3185 | break; |
3186 | } |
3187 | |
3188 | case NewObject: { |
3189 | compileNewObject(node); |
3190 | break; |
3191 | } |
3192 | |
3193 | case NewPromise: { |
3194 | compileNewPromise(node); |
3195 | break; |
3196 | } |
3197 | |
3198 | case NewGenerator: { |
3199 | compileNewGenerator(node); |
3200 | break; |
3201 | } |
3202 | |
3203 | case NewAsyncGenerator: { |
3204 | compileNewAsyncGenerator(node); |
3205 | break; |
3206 | } |
3207 | |
3208 | case GetCallee: { |
3209 | compileGetCallee(node); |
3210 | break; |
3211 | } |
3212 | |
3213 | case SetCallee: { |
3214 | compileSetCallee(node); |
3215 | break; |
3216 | } |
3217 | |
3218 | case GetArgumentCountIncludingThis: { |
3219 | compileGetArgumentCountIncludingThis(node); |
3220 | break; |
3221 | } |
3222 | |
3223 | case SetArgumentCountIncludingThis: |
3224 | compileSetArgumentCountIncludingThis(node); |
3225 | break; |
3226 | |
3227 | case GetScope: |
3228 | compileGetScope(node); |
3229 | break; |
3230 | |
3231 | case SkipScope: |
3232 | compileSkipScope(node); |
3233 | break; |
3234 | |
3235 | case GetGlobalObject: |
3236 | compileGetGlobalObject(node); |
3237 | break; |
3238 | |
3239 | case GetGlobalThis: |
3240 | compileGetGlobalThis(node); |
3241 | break; |
3242 | |
3243 | case GetClosureVar: { |
3244 | compileGetClosureVar(node); |
3245 | break; |
3246 | } |
3247 | |
3248 | case PutClosureVar: { |
3249 | compilePutClosureVar(node); |
3250 | break; |
3251 | } |
3252 | |
3253 | case GetInternalField: { |
3254 | compileGetInternalField(node); |
3255 | break; |
3256 | } |
3257 | |
3258 | case PutInternalField: { |
3259 | compilePutInternalField(node); |
3260 | break; |
3261 | } |
3262 | |
3263 | case TryGetById: { |
3264 | compileGetById(node, AccessType::TryGetById); |
3265 | break; |
3266 | } |
3267 | |
3268 | case GetByIdDirect: { |
3269 | compileGetById(node, AccessType::GetByIdDirect); |
3270 | break; |
3271 | } |
3272 | |
3273 | case GetByIdDirectFlush: { |
3274 | compileGetByIdFlush(node, AccessType::GetByIdDirect); |
3275 | break; |
3276 | } |
3277 | |
3278 | case GetById: { |
3279 | compileGetById(node, AccessType::GetById); |
3280 | break; |
3281 | } |
3282 | |
3283 | case GetByIdFlush: { |
3284 | compileGetByIdFlush(node, AccessType::GetById); |
3285 | break; |
3286 | } |
3287 | |
3288 | case GetByIdWithThis: { |
3289 | if (node->child1().useKind() == CellUse && node->child2().useKind() == CellUse) { |
3290 | SpeculateCellOperand base(this, node->child1()); |
3291 | SpeculateCellOperand thisValue(this, node->child2()); |
3292 | GPRTemporary resultTag(this); |
3293 | GPRTemporary resultPayload(this); |
3294 | |
3295 | GPRReg baseGPR = base.gpr(); |
3296 | GPRReg thisGPR = thisValue.gpr(); |
3297 | GPRReg resultTagGPR = resultTag.gpr(); |
3298 | GPRReg resultPayloadGPR = resultPayload.gpr(); |
3299 | |
3300 | cachedGetByIdWithThis(node->origin.semantic, InvalidGPRReg, baseGPR, InvalidGPRReg, thisGPR, resultTagGPR, resultPayloadGPR, node->identifierNumber()); |
3301 | |
3302 | jsValueResult(resultTagGPR, resultPayloadGPR, node); |
3303 | } else { |
3304 | JSValueOperand base(this, node->child1()); |
3305 | JSValueOperand thisValue(this, node->child2()); |
3306 | GPRTemporary resultTag(this); |
3307 | GPRTemporary resultPayload(this); |
3308 | |
3309 | GPRReg baseTagGPR = base.tagGPR(); |
3310 | GPRReg basePayloadGPR = base.payloadGPR(); |
3311 | GPRReg thisTagGPR = thisValue.tagGPR(); |
3312 | GPRReg thisPayloadGPR = thisValue.payloadGPR(); |
3313 | GPRReg resultTagGPR = resultTag.gpr(); |
3314 | GPRReg resultPayloadGPR = resultPayload.gpr(); |
3315 | |
3316 | JITCompiler::JumpList notCellList; |
3317 | notCellList.append(m_jit.branchIfNotCell(base.jsValueRegs())); |
3318 | notCellList.append(m_jit.branchIfNotCell(thisValue.jsValueRegs())); |
3319 | |
3320 | cachedGetByIdWithThis(node->origin.semantic, baseTagGPR, basePayloadGPR, thisTagGPR, thisPayloadGPR, resultTagGPR, resultPayloadGPR, node->identifierNumber(), notCellList); |
3321 | |
3322 | jsValueResult(resultTagGPR, resultPayloadGPR, node); |
3323 | } |
3324 | |
3325 | break; |
3326 | } |
3327 | |
3328 | case GetArrayLength: |
3329 | compileGetArrayLength(node); |
3330 | break; |
3331 | |
3332 | case DeleteById: { |
3333 | compileDeleteById(node); |
3334 | break; |
3335 | } |
3336 | |
3337 | case DeleteByVal: { |
3338 | compileDeleteByVal(node); |
3339 | break; |
3340 | } |
3341 | |
3342 | case CheckCell: { |
3343 | compileCheckCell(node); |
3344 | break; |
3345 | } |
3346 | |
3347 | case CheckNotEmpty: { |
3348 | compileCheckNotEmpty(node); |
3349 | break; |
3350 | } |
3351 | |
3352 | case CheckIdent: |
3353 | compileCheckIdent(node); |
3354 | break; |
3355 | |
3356 | case GetExecutable: { |
3357 | compileGetExecutable(node); |
3358 | break; |
3359 | } |
3360 | |
3361 | case CheckStructure: { |
3362 | compileCheckStructure(node); |
3363 | break; |
3364 | } |
3365 | |
3366 | case PutStructure: { |
3367 | RegisteredStructure oldStructure = node->transition()->previous; |
3368 | RegisteredStructure newStructure = node->transition()->next; |
3369 | |
3370 | m_jit.jitCode()->common.notifyCompilingStructureTransition(m_jit.graph().m_plan, m_jit.codeBlock(), node); |
3371 | |
3372 | SpeculateCellOperand base(this, node->child1()); |
3373 | GPRReg baseGPR = base.gpr(); |
3374 | |
3375 | ASSERT_UNUSED(oldStructure, oldStructure->indexingMode() == newStructure->indexingMode()); |
3376 | ASSERT(oldStructure->typeInfo().type() == newStructure->typeInfo().type()); |
3377 | ASSERT(oldStructure->typeInfo().inlineTypeFlags() == newStructure->typeInfo().inlineTypeFlags()); |
3378 | m_jit.storePtr(TrustedImmPtr(newStructure), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset())); |
3379 | |
3380 | noResult(node); |
3381 | break; |
3382 | } |
3383 | |
3384 | case AllocatePropertyStorage: |
3385 | compileAllocatePropertyStorage(node); |
3386 | break; |
3387 | |
3388 | case ReallocatePropertyStorage: |
3389 | compileReallocatePropertyStorage(node); |
3390 | break; |
3391 | |
3392 | case NukeStructureAndSetButterfly: |
3393 | compileNukeStructureAndSetButterfly(node); |
3394 | break; |
3395 | |
3396 | case GetButterfly: |
3397 | compileGetButterfly(node); |
3398 | break; |
3399 | |
3400 | case GetIndexedPropertyStorage: { |
3401 | compileGetIndexedPropertyStorage(node); |
3402 | break; |
3403 | } |
3404 | |
3405 | case ConstantStoragePointer: { |
3406 | compileConstantStoragePointer(node); |
3407 | break; |
3408 | } |
3409 | |
3410 | case GetTypedArrayByteOffset: { |
3411 | compileGetTypedArrayByteOffset(node); |
3412 | break; |
3413 | } |
3414 | |
3415 | case GetPrototypeOf: { |
3416 | compileGetPrototypeOf(node); |
3417 | break; |
3418 | } |
3419 | |
3420 | case GetByOffset: { |
3421 | compileGetByOffset(node); |
3422 | break; |
3423 | } |
3424 | |
3425 | case GetGetterSetterByOffset: { |
3426 | StorageOperand storage(this, node->child1()); |
3427 | GPRTemporary resultPayload(this); |
3428 | |
3429 | GPRReg storageGPR = storage.gpr(); |
3430 | GPRReg resultPayloadGPR = resultPayload.gpr(); |
3431 | |
3432 | StorageAccessData& storageAccessData = node->storageAccessData(); |
3433 | |
3434 | m_jit.load32(JITCompiler::Address(storageGPR, offsetRelativeToBase(storageAccessData.offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultPayloadGPR); |
3435 | |
3436 | cellResult(resultPayloadGPR, node); |
3437 | break; |
3438 | } |
3439 | |
3440 | case MatchStructure: { |
3441 | compileMatchStructure(node); |
3442 | break; |
3443 | } |
3444 | |
3445 | case GetGetter: { |
3446 | compileGetGetter(node); |
3447 | break; |
3448 | } |
3449 | |
3450 | case GetSetter: { |
3451 | compileGetSetter(node); |
3452 | break; |
3453 | } |
3454 | |
3455 | case PutByOffset: { |
3456 | compilePutByOffset(node); |
3457 | break; |
3458 | } |
3459 | |
3460 | case PutByIdFlush: { |
3461 | compilePutByIdFlush(node); |
3462 | break; |
3463 | } |
3464 | |
3465 | case PutById: { |
3466 | compilePutById(node); |
3467 | break; |
3468 | } |
3469 | |
3470 | case PutByIdDirect: { |
3471 | compilePutByIdDirect(node); |
3472 | break; |
3473 | } |
3474 | |
3475 | case PutByIdWithThis: { |
3476 | compilePutByIdWithThis(node); |
3477 | break; |
3478 | } |
3479 | |
3480 | case PutGetterById: |
3481 | case PutSetterById: { |
3482 | compilePutAccessorById(node); |
3483 | break; |
3484 | } |
3485 | |
3486 | case PutGetterSetterById: { |
3487 | compilePutGetterSetterById(node); |
3488 | break; |
3489 | } |
3490 | |
3491 | case PutGetterByVal: |
3492 | case PutSetterByVal: { |
3493 | compilePutAccessorByVal(node); |
3494 | break; |
3495 | } |
3496 | |
3497 | case DefineDataProperty: { |
3498 | compileDefineDataProperty(node); |
3499 | break; |
3500 | } |
3501 | |
3502 | case DefineAccessorProperty: { |
3503 | compileDefineAccessorProperty(node); |
3504 | break; |
3505 | } |
3506 | |
3507 | case GetGlobalLexicalVariable: |
3508 | case GetGlobalVar: { |
3509 | compileGetGlobalVariable(node); |
3510 | break; |
3511 | } |
3512 | |
3513 | case PutGlobalVariable: { |
3514 | compilePutGlobalVariable(node); |
3515 | break; |
3516 | } |
3517 | |
3518 | case NotifyWrite: { |
3519 | compileNotifyWrite(node); |
3520 | break; |
3521 | } |
3522 | |
3523 | case ParseInt: { |
3524 | compileParseInt(node); |
3525 | break; |
3526 | } |
3527 | |
3528 | case CheckTypeInfoFlags: { |
3529 | compileCheckTypeInfoFlags(node); |
3530 | break; |
3531 | } |
3532 | |
3533 | case OverridesHasInstance: { |
3534 | compileOverridesHasInstance(node); |
3535 | break; |
3536 | } |
3537 | |
3538 | case InstanceOf: { |
3539 | compileInstanceOf(node); |
3540 | break; |
3541 | } |
3542 | |
3543 | case InstanceOfCustom: { |
3544 | compileInstanceOfCustom(node); |
3545 | break; |
3546 | } |
3547 | |
3548 | case IsEmpty: { |
3549 | JSValueOperand value(this, node->child1()); |
3550 | GPRTemporary result(this, Reuse, value, TagWord); |
3551 | m_jit.comparePtr(JITCompiler::Equal, value.tagGPR(), TrustedImm32(JSValue::EmptyValueTag), result.gpr()); |
3552 | booleanResult(result.gpr(), node); |
3553 | break; |
3554 | } |
3555 | |
3556 | case IsUndefined: { |
3557 | JSValueOperand value(this, node->child1()); |
3558 | GPRTemporary result(this); |
3559 | GPRTemporary localGlobalObject(this); |
3560 | GPRTemporary remoteGlobalObject(this); |
3561 | |
3562 | JITCompiler::Jump isCell = m_jit.branchIfCell(value.jsValueRegs()); |
3563 | |
3564 | m_jit.compare32(JITCompiler::Equal, value.tagGPR(), TrustedImm32(JSValue::UndefinedTag), result.gpr()); |
3565 | JITCompiler::Jump done = m_jit.jump(); |
3566 | |
3567 | isCell.link(&m_jit); |
3568 | JITCompiler::Jump notMasqueradesAsUndefined; |
3569 | if (masqueradesAsUndefinedWatchpointIsStillValid()) { |
3570 | m_jit.move(TrustedImm32(0), result.gpr()); |
3571 | notMasqueradesAsUndefined = m_jit.jump(); |
3572 | } else { |
3573 | JITCompiler::Jump isMasqueradesAsUndefined = m_jit.branchTest8( |
3574 | JITCompiler::NonZero, |
3575 | JITCompiler::Address(value.payloadGPR(), JSCell::typeInfoFlagsOffset()), |
3576 | TrustedImm32(MasqueradesAsUndefined)); |
3577 | m_jit.move(TrustedImm32(0), result.gpr()); |
3578 | notMasqueradesAsUndefined = m_jit.jump(); |
3579 | |
3580 | isMasqueradesAsUndefined.link(&m_jit); |
3581 | GPRReg localGlobalObjectGPR = localGlobalObject.gpr(); |
3582 | GPRReg remoteGlobalObjectGPR = remoteGlobalObject.gpr(); |
3583 | m_jit.move(TrustedImmPtr::weakPointer(m_jit.graph(), m_jit.globalObjectFor(node->origin.semantic)), localGlobalObjectGPR); |
3584 | m_jit.loadPtr(JITCompiler::Address(value.payloadGPR(), JSCell::structureIDOffset()), result.gpr()); |
3585 | m_jit.loadPtr(JITCompiler::Address(result.gpr(), Structure::globalObjectOffset()), remoteGlobalObjectGPR); |
3586 | m_jit.compare32(JITCompiler::Equal, localGlobalObjectGPR, remoteGlobalObjectGPR, result.gpr()); |
3587 | } |
3588 | |
3589 | notMasqueradesAsUndefined.link(&m_jit); |
3590 | done.link(&m_jit); |
3591 | booleanResult(result.gpr(), node); |
3592 | break; |
3593 | } |
3594 | |
3595 | case IsUndefinedOrNull: { |
3596 | JSValueOperand value(this, node->child1()); |
3597 | GPRTemporary result(this, Reuse, value, TagWord); |
3598 | |
3599 | GPRReg valueTagGPR = value.tagGPR(); |
3600 | GPRReg resultGPR = result.gpr(); |
3601 | |
3602 | m_jit.move(valueTagGPR, resultGPR); |
3603 | static_assert((JSValue::UndefinedTag + 1 == JSValue::NullTag) && (JSValue::NullTag & 0x1), "" ); |
3604 | m_jit.or32(CCallHelpers::TrustedImm32(1), resultGPR); |
3605 | m_jit.compare32(CCallHelpers::Equal, resultGPR, CCallHelpers::TrustedImm32(JSValue::NullTag), resultGPR); |
3606 | |
3607 | booleanResult(resultGPR, node); |
3608 | break; |
3609 | } |
3610 | |
3611 | |
3612 | case IsBoolean: { |
3613 | JSValueOperand value(this, node->child1()); |
3614 | GPRTemporary result(this, Reuse, value, TagWord); |
3615 | |
3616 | m_jit.compare32(JITCompiler::Equal, value.tagGPR(), JITCompiler::TrustedImm32(JSValue::BooleanTag), result.gpr()); |
3617 | booleanResult(result.gpr(), node); |
3618 | break; |
3619 | } |
3620 | |
3621 | case IsNumber: { |
3622 | JSValueOperand value(this, node->child1()); |
3623 | GPRTemporary result(this, Reuse, value, TagWord); |
3624 | |
3625 | m_jit.add32(TrustedImm32(1), value.tagGPR(), result.gpr()); |
3626 | m_jit.compare32(JITCompiler::Below, result.gpr(), JITCompiler::TrustedImm32(JSValue::LowestTag + 1), result.gpr()); |
3627 | booleanResult(result.gpr(), node); |
3628 | break; |
3629 | } |
3630 | |
3631 | case NumberIsInteger: { |
3632 | JSValueOperand input(this, node->child1()); |
3633 | JSValueRegs inputRegs = input.jsValueRegs(); |
3634 | flushRegisters(); |
3635 | GPRFlushedCallResult result(this); |
3636 | GPRReg resultGPR = result.gpr(); |
3637 | callOperation(operationNumberIsInteger, resultGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), inputRegs); |
3638 | booleanResult(resultGPR, node); |
3639 | break; |
3640 | } |
3641 | |
3642 | case IsObject: { |
3643 | compileIsObject(node); |
3644 | break; |
3645 | } |
3646 | |
3647 | case IsObjectOrNull: { |
3648 | compileIsObjectOrNull(node); |
3649 | break; |
3650 | } |
3651 | |
3652 | case IsFunction: { |
3653 | compileIsFunction(node); |
3654 | break; |
3655 | } |
3656 | |
3657 | case IsCellWithType: { |
3658 | compileIsCellWithType(node); |
3659 | break; |
3660 | } |
3661 | |
3662 | case IsTypedArrayView: { |
3663 | compileIsTypedArrayView(node); |
3664 | break; |
3665 | } |
3666 | |
3667 | case TypeOf: { |
3668 | compileTypeOf(node); |
3669 | break; |
3670 | } |
3671 | |
3672 | case MapHash: { |
3673 | JSValueOperand input(this, node->child1()); |
3674 | |
3675 | JSValueRegs inputRegs = input.jsValueRegs(); |
3676 | |
3677 | flushRegisters(); |
3678 | GPRFlushedCallResult result(this); |
3679 | GPRReg resultGPR = result.gpr(); |
3680 | callOperation(operationMapHash, resultGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), inputRegs); |
3681 | m_jit.exceptionCheck(); |
3682 | int32Result(resultGPR, node); |
3683 | break; |
3684 | } |
3685 | |
3686 | case NormalizeMapKey: { |
3687 | compileNormalizeMapKey(node); |
3688 | break; |
3689 | } |
3690 | |
3691 | case GetMapBucket: { |
3692 | SpeculateCellOperand map(this, node->child1()); |
3693 | JSValueOperand key(this, node->child2()); |
3694 | SpeculateInt32Operand hash(this, node->child3()); |
3695 | |
3696 | GPRReg mapGPR = map.gpr(); |
3697 | JSValueRegs keyRegs = key.jsValueRegs(); |
3698 | GPRReg hashGPR = hash.gpr(); |
3699 | |
3700 | if (node->child1().useKind() == MapObjectUse) |
3701 | speculateMapObject(node->child1(), mapGPR); |
3702 | else if (node->child1().useKind() == SetObjectUse) |
3703 | speculateSetObject(node->child1(), mapGPR); |
3704 | else |
3705 | RELEASE_ASSERT_NOT_REACHED(); |
3706 | |
3707 | flushRegisters(); |
3708 | GPRFlushedCallResult result(this); |
3709 | GPRReg resultGPR = result.gpr(); |
3710 | if (node->child1().useKind() == MapObjectUse) |
3711 | callOperation(operationJSMapFindBucket, resultGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), mapGPR, keyRegs, hashGPR); |
3712 | else |
3713 | callOperation(operationJSSetFindBucket, resultGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), mapGPR, keyRegs, hashGPR); |
3714 | m_jit.exceptionCheck(); |
3715 | cellResult(resultGPR, node); |
3716 | break; |
3717 | } |
3718 | |
3719 | case GetMapBucketHead: |
3720 | compileGetMapBucketHead(node); |
3721 | break; |
3722 | |
3723 | case GetMapBucketNext: |
3724 | compileGetMapBucketNext(node); |
3725 | break; |
3726 | |
3727 | case LoadKeyFromMapBucket: |
3728 | compileLoadKeyFromMapBucket(node); |
3729 | break; |
3730 | |
3731 | case LoadValueFromMapBucket: |
3732 | compileLoadValueFromMapBucket(node); |
3733 | break; |
3734 | |
3735 | case ExtractValueFromWeakMapGet: |
3736 | compileExtractValueFromWeakMapGet(node); |
3737 | break; |
3738 | |
3739 | case SetAdd: |
3740 | compileSetAdd(node); |
3741 | break; |
3742 | |
3743 | case MapSet: |
3744 | compileMapSet(node); |
3745 | break; |
3746 | |
3747 | case WeakMapGet: |
3748 | compileWeakMapGet(node); |
3749 | break; |
3750 | |
3751 | case WeakSetAdd: |
3752 | compileWeakSetAdd(node); |
3753 | break; |
3754 | |
3755 | case WeakMapSet: |
3756 | compileWeakMapSet(node); |
3757 | break; |
3758 | |
3759 | case Flush: |
3760 | break; |
3761 | |
3762 | case Call: |
3763 | case TailCall: |
3764 | case TailCallInlinedCaller: |
3765 | case Construct: |
3766 | case CallVarargs: |
3767 | case TailCallVarargs: |
3768 | case TailCallVarargsInlinedCaller: |
3769 | case ConstructVarargs: |
3770 | case CallForwardVarargs: |
3771 | case TailCallForwardVarargs: |
3772 | case TailCallForwardVarargsInlinedCaller: |
3773 | case ConstructForwardVarargs: |
3774 | case CallEval: |
3775 | case DirectCall: |
3776 | case DirectConstruct: |
3777 | case DirectTailCall: |
3778 | case DirectTailCallInlinedCaller: |
3779 | emitCall(node); |
3780 | break; |
3781 | |
3782 | case LoadVarargs: { |
3783 | compileLoadVarargs(node); |
3784 | break; |
3785 | } |
3786 | |
3787 | case ForwardVarargs: { |
3788 | compileForwardVarargs(node); |
3789 | break; |
3790 | } |
3791 | |
3792 | case CreateActivation: { |
3793 | compileCreateActivation(node); |
3794 | break; |
3795 | } |
3796 | |
3797 | case PushWithScope: { |
3798 | compilePushWithScope(node); |
3799 | break; |
3800 | } |
3801 | |
3802 | case CreateDirectArguments: { |
3803 | compileCreateDirectArguments(node); |
3804 | break; |
3805 | } |
3806 | |
3807 | case GetFromArguments: { |
3808 | compileGetFromArguments(node); |
3809 | break; |
3810 | } |
3811 | |
3812 | case PutToArguments: { |
3813 | compilePutToArguments(node); |
3814 | break; |
3815 | } |
3816 | |
3817 | case GetArgument: { |
3818 | compileGetArgument(node); |
3819 | break; |
3820 | } |
3821 | |
3822 | case CreateScopedArguments: { |
3823 | compileCreateScopedArguments(node); |
3824 | break; |
3825 | } |
3826 | |
3827 | case CreateClonedArguments: { |
3828 | compileCreateClonedArguments(node); |
3829 | break; |
3830 | } |
3831 | |
3832 | case CreateRest: { |
3833 | compileCreateRest(node); |
3834 | break; |
3835 | } |
3836 | |
3837 | case GetRestLength: { |
3838 | compileGetRestLength(node); |
3839 | break; |
3840 | } |
3841 | |
3842 | case NewFunction: |
3843 | case NewGeneratorFunction: |
3844 | case NewAsyncFunction: |
3845 | case NewAsyncGeneratorFunction: |
3846 | compileNewFunction(node); |
3847 | break; |
3848 | |
3849 | case SetFunctionName: |
3850 | compileSetFunctionName(node); |
3851 | break; |
3852 | |
3853 | case InById: |
3854 | compileInById(node); |
3855 | break; |
3856 | |
3857 | case InByVal: |
3858 | compileInByVal(node); |
3859 | break; |
3860 | |
3861 | case HasOwnProperty: { |
3862 | SpeculateCellOperand object(this, node->child1()); |
3863 | GPRTemporary uniquedStringImpl(this); |
3864 | GPRTemporary temp(this); |
3865 | GPRTemporary hash(this); |
3866 | GPRTemporary structureID(this); |
3867 | GPRTemporary result(this); |
3868 | |
3869 | Optional<SpeculateCellOperand> keyAsCell; |
3870 | Optional<JSValueOperand> keyAsValue; |
3871 | JSValueRegs keyRegs; |
3872 | if (node->child2().useKind() == UntypedUse) { |
3873 | keyAsValue.emplace(this, node->child2()); |
3874 | keyRegs = keyAsValue->jsValueRegs(); |
3875 | } else { |
3876 | ASSERT(node->child2().useKind() == StringUse || node->child2().useKind() == SymbolUse); |
3877 | keyAsCell.emplace(this, node->child2()); |
3878 | keyRegs = JSValueRegs::payloadOnly(keyAsCell->gpr()); |
3879 | } |
3880 | |
3881 | GPRReg objectGPR = object.gpr(); |
3882 | GPRReg implGPR = uniquedStringImpl.gpr(); |
3883 | GPRReg tempGPR = temp.gpr(); |
3884 | GPRReg hashGPR = hash.gpr(); |
3885 | GPRReg structureIDGPR = structureID.gpr(); |
3886 | GPRReg resultGPR = result.gpr(); |
3887 | |
3888 | speculateObject(node->child1()); |
3889 | |
3890 | MacroAssembler::JumpList slowPath; |
3891 | switch (node->child2().useKind()) { |
3892 | case SymbolUse: { |
3893 | speculateSymbol(node->child2(), keyRegs.payloadGPR()); |
3894 | m_jit.loadPtr(MacroAssembler::Address(keyRegs.payloadGPR(), Symbol::offsetOfSymbolImpl()), implGPR); |
3895 | break; |
3896 | } |
3897 | case StringUse: { |
3898 | speculateString(node->child2(), keyRegs.payloadGPR()); |
3899 | m_jit.loadPtr(MacroAssembler::Address(keyRegs.payloadGPR(), JSString::offsetOfValue()), implGPR); |
3900 | slowPath.append(m_jit.branchIfRopeStringImpl(implGPR)); |
3901 | slowPath.append(m_jit.branchTest32( |
3902 | MacroAssembler::Zero, MacroAssembler::Address(implGPR, StringImpl::flagsOffset()), |
3903 | MacroAssembler::TrustedImm32(StringImpl::flagIsAtom()))); |
3904 | break; |
3905 | } |
3906 | case UntypedUse: { |
3907 | slowPath.append(m_jit.branchIfNotCell(keyRegs)); |
3908 | auto isNotString = m_jit.branchIfNotString(keyRegs.payloadGPR()); |
3909 | m_jit.loadPtr(MacroAssembler::Address(keyRegs.payloadGPR(), JSString::offsetOfValue()), implGPR); |
3910 | slowPath.append(m_jit.branchIfRopeStringImpl(implGPR)); |
3911 | slowPath.append(m_jit.branchTest32( |
3912 | MacroAssembler::Zero, MacroAssembler::Address(implGPR, StringImpl::flagsOffset()), |
3913 | MacroAssembler::TrustedImm32(StringImpl::flagIsAtom()))); |
3914 | auto hasUniquedImpl = m_jit.jump(); |
3915 | |
3916 | isNotString.link(&m_jit); |
3917 | slowPath.append(m_jit.branchIfNotSymbol(keyRegs.payloadGPR())); |
3918 | m_jit.loadPtr(MacroAssembler::Address(keyRegs.payloadGPR(), Symbol::offsetOfSymbolImpl()), implGPR); |
3919 | |
3920 | hasUniquedImpl.link(&m_jit); |
3921 | break; |
3922 | } |
3923 | default: |
3924 | RELEASE_ASSERT_NOT_REACHED(); |
3925 | } |
3926 | |
3927 | // Note that we don't test if the hash is zero here. AtomStringImpl's can't have a zero |
3928 | // hash, however, a SymbolImpl may. But, because this is a cache, we don't care. We only |
3929 | // ever load the result from the cache if the cache entry matches what we are querying for. |
3930 | // So we either get super lucky and use zero for the hash and somehow collide with the entity |
3931 | // we're looking for, or we realize we're comparing against another entity, and go to the |
3932 | // slow path anyways. |
3933 | m_jit.load32(MacroAssembler::Address(implGPR, UniquedStringImpl::flagsOffset()), hashGPR); |
3934 | m_jit.urshift32(MacroAssembler::TrustedImm32(StringImpl::s_flagCount), hashGPR); |
3935 | m_jit.load32(MacroAssembler::Address(objectGPR, JSCell::structureIDOffset()), structureIDGPR); |
3936 | m_jit.add32(structureIDGPR, hashGPR); |
3937 | m_jit.and32(TrustedImm32(HasOwnPropertyCache::mask), hashGPR); |
3938 | m_jit.mul32(TrustedImm32(sizeof(HasOwnPropertyCache::Entry)), hashGPR, hashGPR); |
3939 | ASSERT(vm().hasOwnPropertyCache()); |
3940 | m_jit.move(TrustedImmPtr(vm().hasOwnPropertyCache()), tempGPR); |
3941 | slowPath.append(m_jit.branchPtr(MacroAssembler::NotEqual, |
3942 | MacroAssembler::BaseIndex(tempGPR, hashGPR, MacroAssembler::TimesOne, HasOwnPropertyCache::Entry::offsetOfImpl()), implGPR)); |
3943 | m_jit.load8(MacroAssembler::BaseIndex(tempGPR, hashGPR, MacroAssembler::TimesOne, HasOwnPropertyCache::Entry::offsetOfResult()), resultGPR); |
3944 | m_jit.load32(MacroAssembler::BaseIndex(tempGPR, hashGPR, MacroAssembler::TimesOne, HasOwnPropertyCache::Entry::offsetOfStructureID()), tempGPR); |
3945 | slowPath.append(m_jit.branch32(MacroAssembler::NotEqual, tempGPR, structureIDGPR)); |
3946 | auto done = m_jit.jump(); |
3947 | |
3948 | slowPath.link(&m_jit); |
3949 | silentSpillAllRegisters(resultGPR); |
3950 | if (node->child2().useKind() != UntypedUse) { |
3951 | m_jit.move(MacroAssembler::TrustedImm32(JSValue::CellTag), tempGPR); |
3952 | keyRegs = JSValueRegs(tempGPR, keyRegs.payloadGPR()); |
3953 | } |
3954 | callOperation(operationHasOwnProperty, resultGPR, TrustedImmPtr::weakPointer(m_graph, m_graph.globalObjectFor(node->origin.semantic)), objectGPR, keyRegs); |
3955 | silentFillAllRegisters(); |
3956 | m_jit.exceptionCheck(); |
3957 | |
3958 | done.link(&m_jit); |
3959 | booleanResult(resultGPR, node); |
3960 | break; |
3961 | } |
3962 | |
3963 | case StoreBarrier: |
3964 | case FencedStoreBarrier: { |
3965 | compileStoreBarrier(node); |
3966 | break; |
3967 | } |
3968 | |
3969 | case GetEnumerableLength: { |
3970 | compileGetEnumerableLength(node); |
3971 | break; |
3972 | } |
3973 | case HasGenericProperty: { |
3974 | compileHasGenericProperty(node); |
3975 | break; |
3976 | } |
3977 | case HasStructureProperty: { |
3978 | compileHasStructureProperty(node); |
3979 | break; |
3980 | } |
3981 | case HasIndexedProperty: { |
3982 | compileHasIndexedProperty(node); |
3983 | break; |
3984 | } |
3985 | case GetDirectPname: { |
3986 | compileGetDirectPname(node); |
3987 | break; |
3988 | } |
3989 | case GetPropertyEnumerator: { |
3990 | compileGetPropertyEnumerator(node); |
3991 | break; |
3992 | } |
3993 | case GetEnumeratorStructurePname: |
3994 | case GetEnumeratorGenericPname: { |
3995 | compileGetEnumeratorPname(node); |
3996 | break; |
3997 | } |
3998 | case ToIndexString: { |
3999 | compileToIndexString(node); |
4000 | break; |
4001 | } |
4002 | case ProfileType: { |
4003 | compileProfileType(node); |
4004 | break; |
4005 | } |
4006 | case ProfileControlFlow: { |
4007 | GPRTemporary scratch1(this); |
4008 | BasicBlockLocation* basicBlockLocation = node->basicBlockLocation(); |
4009 | basicBlockLocation->emitExecuteCode(m_jit, scratch1.gpr()); |
4010 | noResult(node); |
4011 | break; |
4012 | } |
4013 | |
4014 | case LogShadowChickenPrologue: { |
4015 | compileLogShadowChickenPrologue(node); |
4016 | break; |
4017 | } |
4018 | |
4019 | case LogShadowChickenTail: { |
4020 | compileLogShadowChickenTail(node); |
4021 | break; |
4022 | } |
4023 | |
4024 | case ForceOSRExit: { |
4025 | terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0); |
4026 | break; |
4027 | } |
4028 | |
4029 | case InvalidationPoint: |
4030 | emitInvalidationPoint(node); |
4031 | break; |
4032 | |
4033 | case CheckTraps: |
4034 | compileCheckTraps(node); |
4035 | break; |
4036 | |
4037 | case CountExecution: |
4038 | m_jit.add64(TrustedImm32(1), MacroAssembler::AbsoluteAddress(node->executionCounter()->address())); |
4039 | break; |
4040 | |
4041 | case SuperSamplerBegin: |
4042 | m_jit.add32(TrustedImm32(1), MacroAssembler::AbsoluteAddress(bitwise_cast<void*>(&g_superSamplerCount))); |
4043 | break; |
4044 | |
4045 | case SuperSamplerEnd: |
4046 | m_jit.sub32(TrustedImm32(1), MacroAssembler::AbsoluteAddress(bitwise_cast<void*>(&g_superSamplerCount))); |
4047 | break; |
4048 | |
4049 | case Phantom: |
4050 | case Check: |
4051 | case CheckVarargs: |
4052 | DFG_NODE_DO_TO_CHILDREN(m_jit.graph(), node, speculate); |
4053 | noResult(node); |
4054 | break; |
4055 | |
4056 | case PhantomLocal: |
4057 | case LoopHint: |
4058 | // This is a no-op. |
4059 | noResult(node); |
4060 | break; |
4061 | |
4062 | case MaterializeNewObject: |
4063 | compileMaterializeNewObject(node); |
4064 | break; |
4065 | |
4066 | case PutDynamicVar: { |
4067 | compilePutDynamicVar(node); |
4068 | break; |
4069 | } |
4070 | |
4071 | case GetDynamicVar: { |
4072 | compileGetDynamicVar(node); |
4073 | break; |
4074 | } |
4075 | |
4076 | case ResolveScopeForHoistingFuncDeclInEval: { |
4077 | compileResolveScopeForHoistingFuncDeclInEval(node); |
4078 | break; |
4079 | } |
4080 | |
4081 | case ResolveScope: { |
4082 | compileResolveScope(node); |
4083 | break; |
4084 | } |
4085 | |
4086 | case CallDOM: |
4087 | compileCallDOM(node); |
4088 | break; |
4089 | |
4090 | case CallDOMGetter: |
4091 | compileCallDOMGetter(node); |
4092 | break; |
4093 | |
4094 | case CheckSubClass: |
4095 | compileCheckSubClass(node); |
4096 | break; |
4097 | |
4098 | case Unreachable: |
4099 | unreachable(node); |
4100 | break; |
4101 | |
4102 | case ExtractCatchLocal: { |
4103 | compileExtractCatchLocal(node); |
4104 | break; |
4105 | } |
4106 | |
4107 | case ClearCatchLocals: |
4108 | compileClearCatchLocals(node); |
4109 | break; |
4110 | |
4111 | case CheckStructureOrEmpty: |
4112 | DFG_CRASH(m_jit.graph(), node, "CheckStructureOrEmpty only used in 64-bit DFG" ); |
4113 | break; |
4114 | |
4115 | case FilterCallLinkStatus: |
4116 | case FilterGetByStatus: |
4117 | case FilterPutByIdStatus: |
4118 | case FilterInByIdStatus: |
4119 | m_interpreter.filterICStatus(node); |
4120 | noResult(node); |
4121 | break; |
4122 | |
4123 | case LastNodeType: |
4124 | case Phi: |
4125 | case Upsilon: |
4126 | case ExtractOSREntryLocal: |
4127 | case CheckTierUpInLoop: |
4128 | case CheckTierUpAtReturn: |
4129 | case CheckTierUpAndOSREnter: |
4130 | case Int52Rep: |
4131 | case FiatInt52: |
4132 | case Int52Constant: |
4133 | case CheckInBounds: |
4134 | case ArithIMul: |
4135 | case MultiGetByOffset: |
4136 | case MultiPutByOffset: |
4137 | case CheckBadCell: |
4138 | case BottomValue: |
4139 | case PhantomNewObject: |
4140 | case PhantomNewFunction: |
4141 | case PhantomNewGeneratorFunction: |
4142 | case PhantomNewAsyncFunction: |
4143 | case PhantomNewAsyncGeneratorFunction: |
4144 | case PhantomCreateActivation: |
4145 | case PhantomNewRegexp: |
4146 | case PutHint: |
4147 | case CheckStructureImmediate: |
4148 | case MaterializeCreateActivation: |
4149 | case PutStack: |
4150 | case KillStack: |
4151 | case GetStack: |
4152 | case GetMyArgumentByVal: |
4153 | case GetMyArgumentByValOutOfBounds: |
4154 | case GetVectorLength: |
4155 | case PhantomCreateRest: |
4156 | case PhantomSpread: |
4157 | case PhantomNewArrayWithSpread: |
4158 | case PhantomNewArrayBuffer: |
4159 | case AtomicsIsLockFree: |
4160 | case AtomicsAdd: |
4161 | case AtomicsAnd: |
4162 | case AtomicsCompareExchange: |
4163 | case AtomicsExchange: |
4164 | case AtomicsLoad: |
4165 | case AtomicsOr: |
4166 | case AtomicsStore: |
4167 | case AtomicsSub: |
4168 | case AtomicsXor: |
4169 | case IdentityWithProfile: |
4170 | case InitializeEntrypointArguments: |
4171 | case EntrySwitch: |
4172 | case CPUIntrinsic: |
4173 | case AssertNotEmpty: |
4174 | case DataViewGetInt: |
4175 | case DataViewGetFloat: |
4176 | case DataViewSet: |
4177 | case DateGetInt32OrNaN: |
4178 | case DateGetTime: |
4179 | case StringCodePointAt: |
4180 | DFG_CRASH(m_jit.graph(), node, "unexpected node in DFG backend" ); |
4181 | break; |
4182 | } |
4183 | |
4184 | if (!m_compileOkay) |
4185 | return; |
4186 | |
4187 | if (node->hasResult() && node->mustGenerate()) |
4188 | use(node); |
4189 | } |
4190 | |
4191 | void SpeculativeJIT::moveTrueTo(GPRReg gpr) |
4192 | { |
4193 | m_jit.move(TrustedImm32(1), gpr); |
4194 | } |
4195 | |
4196 | void SpeculativeJIT::moveFalseTo(GPRReg gpr) |
4197 | { |
4198 | m_jit.move(TrustedImm32(0), gpr); |
4199 | } |
4200 | |
4201 | void SpeculativeJIT::blessBoolean(GPRReg) |
4202 | { |
4203 | } |
4204 | |
4205 | void SpeculativeJIT::compileArithRandom(Node* node) |
4206 | { |
4207 | JSGlobalObject* globalObject = m_jit.graph().globalObjectFor(node->origin.semantic); |
4208 | |
4209 | flushRegisters(); |
4210 | |
4211 | FPRResult result(this); |
4212 | callOperation(operationRandom, result.fpr(), globalObject); |
4213 | // operationRandom does not raise any exception. |
4214 | doubleResult(result.fpr(), node); |
4215 | } |
4216 | |
4217 | #endif |
4218 | |
4219 | } } // namespace JSC::DFG |
4220 | |
4221 | #endif |
4222 | |