1/*
2 * Copyright (C) 2015-2018 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "AirCCallSpecial.h"
28
29#if ENABLE(B3_JIT)
30
31namespace JSC { namespace B3 { namespace Air {
32
33CCallSpecial::CCallSpecial()
34{
35 m_clobberedRegs = RegisterSet::allRegisters();
36 m_clobberedRegs.exclude(RegisterSet::stackRegisters());
37 m_clobberedRegs.exclude(RegisterSet::reservedHardwareRegisters());
38 m_clobberedRegs.exclude(RegisterSet::calleeSaveRegisters());
39 m_clobberedRegs.clear(GPRInfo::returnValueGPR);
40 m_clobberedRegs.clear(GPRInfo::returnValueGPR2);
41 m_clobberedRegs.clear(FPRInfo::returnValueFPR);
42}
43
44CCallSpecial::~CCallSpecial()
45{
46}
47
48void CCallSpecial::forEachArg(Inst& inst, const ScopedLambda<Inst::EachArgCallback>& callback)
49{
50 for (unsigned i = 0; i < numCalleeArgs; ++i)
51 callback(inst.args[calleeArgOffset + i], Arg::Use, GP, pointerWidth());
52 for (unsigned i = 0; i < numReturnGPArgs; ++i)
53 callback(inst.args[returnGPArgOffset + i], Arg::Def, GP, pointerWidth());
54 for (unsigned i = 0; i < numReturnFPArgs; ++i)
55 callback(inst.args[returnFPArgOffset + i], Arg::Def, FP, Width64);
56
57 for (unsigned i = argArgOffset; i < inst.args.size(); ++i) {
58 // For the type, we can just query the arg's bank. The arg will have a bank, because we
59 // require these args to be argument registers.
60 Bank bank = inst.args[i].bank();
61 callback(inst.args[i], Arg::Use, bank, conservativeWidth(bank));
62 }
63}
64
65bool CCallSpecial::isValid(Inst& inst)
66{
67 if (inst.args.size() < argArgOffset)
68 return false;
69
70 for (unsigned i = 0; i < numCalleeArgs; ++i) {
71 Arg& arg = inst.args[i + calleeArgOffset];
72 if (!arg.isGP())
73 return false;
74 switch (arg.kind()) {
75 case Arg::Imm:
76 if (is32Bit())
77 break;
78 return false;
79 case Arg::BigImm:
80 if (is64Bit())
81 break;
82 return false;
83 case Arg::Tmp:
84 case Arg::Addr:
85 case Arg::ExtendedOffsetAddr:
86 case Arg::Stack:
87 case Arg::CallArg:
88 break;
89 default:
90 return false;
91 }
92 }
93
94 // Return args need to be exact.
95 if (inst.args[returnGPArgOffset + 0] != Tmp(GPRInfo::returnValueGPR))
96 return false;
97 if (inst.args[returnGPArgOffset + 1] != Tmp(GPRInfo::returnValueGPR2))
98 return false;
99 if (inst.args[returnFPArgOffset + 0] != Tmp(FPRInfo::returnValueFPR))
100 return false;
101
102 for (unsigned i = argArgOffset; i < inst.args.size(); ++i) {
103 if (!inst.args[i].isReg())
104 return false;
105
106 if (inst.args[i] == Tmp(scratchRegister))
107 return false;
108 }
109 return true;
110}
111
112bool CCallSpecial::admitsStack(Inst&, unsigned argIndex)
113{
114 // The callee can be on the stack unless targeting ARM64, where we can't later properly
115 // handle an Addr callee argument in generate() due to disallowed scratch register usage.
116 if (argIndex == calleeArgOffset)
117 return !isARM64();
118
119 return false;
120}
121
122bool CCallSpecial::admitsExtendedOffsetAddr(Inst& inst, unsigned argIndex)
123{
124 return admitsStack(inst, argIndex);
125}
126
127void CCallSpecial::reportUsedRegisters(Inst&, const RegisterSet&)
128{
129}
130
131CCallHelpers::Jump CCallSpecial::generate(Inst& inst, CCallHelpers& jit, GenerationContext&)
132{
133 switch (inst.args[calleeArgOffset].kind()) {
134 case Arg::Imm:
135 case Arg::BigImm:
136 jit.move(inst.args[calleeArgOffset].asTrustedImmPtr(), scratchRegister);
137 jit.call(scratchRegister, B3CCallPtrTag);
138 break;
139 case Arg::Tmp:
140 jit.call(inst.args[calleeArgOffset].gpr(), B3CCallPtrTag);
141 break;
142 case Arg::Addr:
143 case Arg::ExtendedOffsetAddr:
144 jit.call(inst.args[calleeArgOffset].asAddress(), B3CCallPtrTag);
145 break;
146 default:
147 RELEASE_ASSERT_NOT_REACHED();
148 break;
149 }
150 return CCallHelpers::Jump();
151}
152
153RegisterSet CCallSpecial::extraEarlyClobberedRegs(Inst&)
154{
155 return m_emptyRegs;
156}
157
158RegisterSet CCallSpecial::extraClobberedRegs(Inst&)
159{
160 return m_clobberedRegs;
161}
162
163void CCallSpecial::dumpImpl(PrintStream& out) const
164{
165 out.print("CCall");
166}
167
168void CCallSpecial::deepDumpImpl(PrintStream& out) const
169{
170 out.print("function call that uses the C calling convention.");
171}
172
173} } } // namespace JSC::B3::Air
174
175#endif // ENABLE(B3_JIT)
176