1/*
2 * Copyright (C) 2013-2019 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "FTLSlowPathCall.h"
28
29#if ENABLE(FTL_JIT)
30
31#include "CCallHelpers.h"
32#include "FTLState.h"
33#include "FTLThunks.h"
34#include "GPRInfo.h"
35#include "JSCInlines.h"
36
37namespace JSC { namespace FTL {
38
39// This code relies on us being 64-bit. FTL is currently always 64-bit.
40static constexpr size_t wordSize = 8;
41
42SlowPathCallContext::SlowPathCallContext(
43 RegisterSet usedRegisters, CCallHelpers& jit, unsigned numArgs, GPRReg returnRegister)
44 : m_jit(jit)
45 , m_numArgs(numArgs)
46 , m_returnRegister(returnRegister)
47{
48 // We don't care that you're using callee-save, stack, or hardware registers.
49 usedRegisters.exclude(RegisterSet::stackRegisters());
50 usedRegisters.exclude(RegisterSet::reservedHardwareRegisters());
51 usedRegisters.exclude(RegisterSet::calleeSaveRegisters());
52
53 // The return register doesn't need to be saved.
54 if (m_returnRegister != InvalidGPRReg)
55 usedRegisters.clear(m_returnRegister);
56
57 size_t stackBytesNeededForReturnAddress = wordSize;
58
59 m_offsetToSavingArea =
60 (std::max(m_numArgs, NUMBER_OF_ARGUMENT_REGISTERS) - NUMBER_OF_ARGUMENT_REGISTERS) * wordSize;
61
62 for (unsigned i = std::min(NUMBER_OF_ARGUMENT_REGISTERS, numArgs); i--;)
63 m_argumentRegisters.set(GPRInfo::toArgumentRegister(i));
64 m_callingConventionRegisters.merge(m_argumentRegisters);
65 if (returnRegister != InvalidGPRReg)
66 m_callingConventionRegisters.set(GPRInfo::returnValueGPR);
67 m_callingConventionRegisters.filter(usedRegisters);
68
69 unsigned numberOfCallingConventionRegisters =
70 m_callingConventionRegisters.numberOfSetRegisters();
71
72 size_t offsetToThunkSavingArea =
73 m_offsetToSavingArea +
74 numberOfCallingConventionRegisters * wordSize;
75
76 m_stackBytesNeeded =
77 offsetToThunkSavingArea +
78 stackBytesNeededForReturnAddress +
79 (usedRegisters.numberOfSetRegisters() - numberOfCallingConventionRegisters) * wordSize;
80
81 m_stackBytesNeeded = (m_stackBytesNeeded + stackAlignmentBytes() - 1) & ~(stackAlignmentBytes() - 1);
82
83 m_jit.subPtr(CCallHelpers::TrustedImm32(m_stackBytesNeeded), CCallHelpers::stackPointerRegister);
84
85 m_thunkSaveSet = usedRegisters;
86
87 // This relies on all calling convention registers also being temp registers.
88 unsigned stackIndex = 0;
89 for (unsigned i = GPRInfo::numberOfRegisters; i--;) {
90 GPRReg reg = GPRInfo::toRegister(i);
91 if (!m_callingConventionRegisters.get(reg))
92 continue;
93 m_jit.storePtr(reg, CCallHelpers::Address(CCallHelpers::stackPointerRegister, m_offsetToSavingArea + (stackIndex++) * wordSize));
94 m_thunkSaveSet.clear(reg);
95 }
96
97 m_offset = offsetToThunkSavingArea;
98}
99
100SlowPathCallContext::~SlowPathCallContext()
101{
102 if (m_returnRegister != InvalidGPRReg)
103 m_jit.move(GPRInfo::returnValueGPR, m_returnRegister);
104
105 unsigned stackIndex = 0;
106 for (unsigned i = GPRInfo::numberOfRegisters; i--;) {
107 GPRReg reg = GPRInfo::toRegister(i);
108 if (!m_callingConventionRegisters.get(reg))
109 continue;
110 m_jit.loadPtr(CCallHelpers::Address(CCallHelpers::stackPointerRegister, m_offsetToSavingArea + (stackIndex++) * wordSize), reg);
111 }
112
113 m_jit.addPtr(CCallHelpers::TrustedImm32(m_stackBytesNeeded), CCallHelpers::stackPointerRegister);
114}
115
116SlowPathCallKey SlowPathCallContext::keyWithTarget(FunctionPtr<CFunctionPtrTag> callTarget) const
117{
118 return SlowPathCallKey(m_thunkSaveSet, callTarget, m_argumentRegisters, m_offset);
119}
120
121SlowPathCall SlowPathCallContext::makeCall(VM& vm, FunctionPtr<CFunctionPtrTag> callTarget)
122{
123 SlowPathCallKey key = keyWithTarget(callTarget);
124 SlowPathCall result = SlowPathCall(m_jit.call(OperationPtrTag), key);
125
126 m_jit.addLinkTask(
127 [result, &vm] (LinkBuffer& linkBuffer) {
128 MacroAssemblerCodeRef<JITThunkPtrTag> thunk =
129 vm.ftlThunks->getSlowPathCallThunk(vm, result.key());
130
131 linkBuffer.link(result.call(), CodeLocationLabel<OperationPtrTag>(thunk.retaggedCode<OperationPtrTag>()));
132 });
133
134 return result;
135}
136
137CallSiteIndex callSiteIndexForCodeOrigin(State& state, CodeOrigin codeOrigin)
138{
139 if (codeOrigin)
140 return state.jitCode->common.addCodeOrigin(codeOrigin);
141 return CallSiteIndex();
142}
143
144} } // namespace JSC::FTL
145
146#endif // ENABLE(FTL_JIT)
147
148