1/*
2 * Copyright (C) 2015-2017 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "SetupVarargsFrame.h"
28
29#if ENABLE(JIT)
30
31#include "Interpreter.h"
32#include "JSCInlines.h"
33#include "StackAlignment.h"
34
35namespace JSC {
36
37void emitSetVarargsFrame(CCallHelpers& jit, GPRReg lengthGPR, bool lengthIncludesThis, GPRReg numUsedSlotsGPR, GPRReg resultGPR)
38{
39 jit.move(numUsedSlotsGPR, resultGPR);
40 // We really want to make sure the size of the new call frame is a multiple of
41 // stackAlignmentRegisters(), however it is easier to accomplish this by
42 // rounding numUsedSlotsGPR to the next multiple of stackAlignmentRegisters().
43 // Together with the rounding below, we will assure that the new call frame is
44 // located on a stackAlignmentRegisters() boundary and a multiple of
45 // stackAlignmentRegisters() in size.
46 jit.addPtr(CCallHelpers::TrustedImm32(stackAlignmentRegisters() - 1), resultGPR);
47 jit.andPtr(CCallHelpers::TrustedImm32(~(stackAlignmentRegisters() - 1)), resultGPR);
48
49 jit.addPtr(lengthGPR, resultGPR);
50 jit.addPtr(CCallHelpers::TrustedImm32(CallFrame::headerSizeInRegisters + (lengthIncludesThis ? 0 : 1)), resultGPR);
51
52 // resultGPR now has the required frame size in Register units
53 // Round resultGPR to next multiple of stackAlignmentRegisters()
54 jit.addPtr(CCallHelpers::TrustedImm32(stackAlignmentRegisters() - 1), resultGPR);
55 jit.andPtr(CCallHelpers::TrustedImm32(~(stackAlignmentRegisters() - 1)), resultGPR);
56
57 // Now resultGPR has the right stack frame offset in Register units.
58 jit.negPtr(resultGPR);
59 jit.getEffectiveAddress(CCallHelpers::BaseIndex(GPRInfo::callFrameRegister, resultGPR, CCallHelpers::TimesEight), resultGPR);
60}
61
62static void emitSetupVarargsFrameFastCase(VM& vm, CCallHelpers& jit, GPRReg numUsedSlotsGPR, GPRReg scratchGPR1, GPRReg scratchGPR2, GPRReg scratchGPR3, ValueRecovery argCountRecovery, VirtualRegister firstArgumentReg, unsigned firstVarArgOffset, CCallHelpers::JumpList& slowCase)
63{
64 CCallHelpers::JumpList end;
65
66 if (argCountRecovery.isConstant()) {
67 // FIXME: We could constant-fold a lot of the computation below in this case.
68 // https://bugs.webkit.org/show_bug.cgi?id=141486
69 jit.move(CCallHelpers::TrustedImm32(argCountRecovery.constant().asInt32()), scratchGPR1);
70 } else
71 jit.load32(CCallHelpers::payloadFor(argCountRecovery.virtualRegister()), scratchGPR1);
72 if (firstVarArgOffset) {
73 CCallHelpers::Jump sufficientArguments = jit.branch32(CCallHelpers::GreaterThan, scratchGPR1, CCallHelpers::TrustedImm32(firstVarArgOffset + 1));
74 jit.move(CCallHelpers::TrustedImm32(1), scratchGPR1);
75 CCallHelpers::Jump endVarArgs = jit.jump();
76 sufficientArguments.link(&jit);
77 jit.sub32(CCallHelpers::TrustedImm32(firstVarArgOffset), scratchGPR1);
78 endVarArgs.link(&jit);
79 }
80 slowCase.append(jit.branch32(CCallHelpers::Above, scratchGPR1, CCallHelpers::TrustedImm32(JSC::maxArguments + 1)));
81
82 emitSetVarargsFrame(jit, scratchGPR1, true, numUsedSlotsGPR, scratchGPR2);
83
84 slowCase.append(jit.branchPtr(CCallHelpers::Above, scratchGPR2, GPRInfo::callFrameRegister));
85 slowCase.append(jit.branchPtr(CCallHelpers::Above, CCallHelpers::AbsoluteAddress(vm.addressOfSoftStackLimit()), scratchGPR2));
86
87 // Before touching stack values, we should update the stack pointer to protect them from signal stack.
88 jit.addPtr(CCallHelpers::TrustedImm32(sizeof(CallerFrameAndPC)), scratchGPR2, CCallHelpers::stackPointerRegister);
89
90 // Initialize ArgumentCount.
91 jit.store32(scratchGPR1, CCallHelpers::Address(scratchGPR2, CallFrameSlot::argumentCount * static_cast<int>(sizeof(Register)) + PayloadOffset));
92
93 // Copy arguments.
94 jit.signExtend32ToPtr(scratchGPR1, scratchGPR1);
95 CCallHelpers::Jump done = jit.branchSubPtr(CCallHelpers::Zero, CCallHelpers::TrustedImm32(1), scratchGPR1);
96 // scratchGPR1: argumentCount
97
98 CCallHelpers::Label copyLoop = jit.label();
99 int argOffset = (firstArgumentReg.offset() - 1 + firstVarArgOffset) * static_cast<int>(sizeof(Register));
100#if USE(JSVALUE64)
101 jit.load64(CCallHelpers::BaseIndex(GPRInfo::callFrameRegister, scratchGPR1, CCallHelpers::TimesEight, argOffset), scratchGPR3);
102 jit.store64(scratchGPR3, CCallHelpers::BaseIndex(scratchGPR2, scratchGPR1, CCallHelpers::TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))));
103#else // USE(JSVALUE64), so this begins the 32-bit case
104 jit.load32(CCallHelpers::BaseIndex(GPRInfo::callFrameRegister, scratchGPR1, CCallHelpers::TimesEight, argOffset + TagOffset), scratchGPR3);
105 jit.store32(scratchGPR3, CCallHelpers::BaseIndex(scratchGPR2, scratchGPR1, CCallHelpers::TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register)) + TagOffset));
106 jit.load32(CCallHelpers::BaseIndex(GPRInfo::callFrameRegister, scratchGPR1, CCallHelpers::TimesEight, argOffset + PayloadOffset), scratchGPR3);
107 jit.store32(scratchGPR3, CCallHelpers::BaseIndex(scratchGPR2, scratchGPR1, CCallHelpers::TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register)) + PayloadOffset));
108#endif // USE(JSVALUE64), end of 32-bit case
109 jit.branchSubPtr(CCallHelpers::NonZero, CCallHelpers::TrustedImm32(1), scratchGPR1).linkTo(copyLoop, &jit);
110
111 done.link(&jit);
112}
113
114void emitSetupVarargsFrameFastCase(VM& vm, CCallHelpers& jit, GPRReg numUsedSlotsGPR, GPRReg scratchGPR1, GPRReg scratchGPR2, GPRReg scratchGPR3, InlineCallFrame* inlineCallFrame, unsigned firstVarArgOffset, CCallHelpers::JumpList& slowCase)
115{
116 ValueRecovery argumentCountRecovery;
117 VirtualRegister firstArgumentReg;
118 if (inlineCallFrame) {
119 if (inlineCallFrame->isVarargs()) {
120 argumentCountRecovery = ValueRecovery::displacedInJSStack(
121 inlineCallFrame->argumentCountRegister, DataFormatInt32);
122 } else {
123 argumentCountRecovery = ValueRecovery::constant(jsNumber(inlineCallFrame->argumentCountIncludingThis));
124 }
125 if (inlineCallFrame->argumentsWithFixup.size() > 1)
126 firstArgumentReg = inlineCallFrame->argumentsWithFixup[1].virtualRegister();
127 else
128 firstArgumentReg = VirtualRegister(0);
129 } else {
130 argumentCountRecovery = ValueRecovery::displacedInJSStack(
131 VirtualRegister(CallFrameSlot::argumentCount), DataFormatInt32);
132 firstArgumentReg = VirtualRegister(CallFrame::argumentOffset(0));
133 }
134 emitSetupVarargsFrameFastCase(vm, jit, numUsedSlotsGPR, scratchGPR1, scratchGPR2, scratchGPR3, argumentCountRecovery, firstArgumentReg, firstVarArgOffset, slowCase);
135}
136
137} // namespace JSC
138
139#endif // ENABLE(JIT)
140
141