1/*
2 * Copyright (C) 2015-2017 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "B3PatchpointSpecial.h"
28
29#if ENABLE(B3_JIT)
30
31#include "AirGenerationContext.h"
32#include "B3StackmapGenerationParams.h"
33#include "B3ValueInlines.h"
34
35namespace JSC { namespace B3 {
36
37using Arg = Air::Arg;
38using Inst = Air::Inst;
39
40PatchpointSpecial::PatchpointSpecial()
41{
42}
43
44PatchpointSpecial::~PatchpointSpecial()
45{
46}
47
48void PatchpointSpecial::forEachArg(Inst& inst, const ScopedLambda<Inst::EachArgCallback>& callback)
49{
50 PatchpointValue* patchpoint = inst.origin->as<PatchpointValue>();
51 unsigned argIndex = 1;
52
53 if (patchpoint->type() != Void) {
54 Arg::Role role;
55 if (patchpoint->resultConstraint.kind() == ValueRep::SomeEarlyRegister)
56 role = Arg::EarlyDef;
57 else
58 role = Arg::Def;
59
60 callback(inst.args[argIndex++], role, inst.origin->resultBank(), inst.origin->resultWidth());
61 }
62
63 forEachArgImpl(0, argIndex, inst, SameAsRep, WTF::nullopt, callback, WTF::nullopt);
64 argIndex += inst.origin->numChildren();
65
66 for (unsigned i = patchpoint->numGPScratchRegisters; i--;)
67 callback(inst.args[argIndex++], Arg::Scratch, GP, conservativeWidth(GP));
68 for (unsigned i = patchpoint->numFPScratchRegisters; i--;)
69 callback(inst.args[argIndex++], Arg::Scratch, FP, conservativeWidth(FP));
70}
71
72bool PatchpointSpecial::isValid(Inst& inst)
73{
74 PatchpointValue* patchpoint = inst.origin->as<PatchpointValue>();
75 unsigned argIndex = 1;
76
77 if (inst.origin->type() != Void) {
78 if (argIndex >= inst.args.size())
79 return false;
80
81 if (!isArgValidForValue(inst.args[argIndex], patchpoint))
82 return false;
83 if (!isArgValidForRep(code(), inst.args[argIndex], patchpoint->resultConstraint))
84 return false;
85 argIndex++;
86 }
87
88 if (!isValidImpl(0, argIndex, inst))
89 return false;
90 argIndex += patchpoint->numChildren();
91
92 if (argIndex + patchpoint->numGPScratchRegisters + patchpoint->numFPScratchRegisters
93 != inst.args.size())
94 return false;
95
96 for (unsigned i = patchpoint->numGPScratchRegisters; i--;) {
97 Arg arg = inst.args[argIndex++];
98 if (!arg.isGPTmp())
99 return false;
100 }
101 for (unsigned i = patchpoint->numFPScratchRegisters; i--;) {
102 Arg arg = inst.args[argIndex++];
103 if (!arg.isFPTmp())
104 return false;
105 }
106
107 return true;
108}
109
110bool PatchpointSpecial::admitsStack(Inst& inst, unsigned argIndex)
111{
112 if (inst.origin->type() == Void)
113 return admitsStackImpl(0, 1, inst, argIndex);
114
115 if (argIndex == 1) {
116 switch (inst.origin->as<PatchpointValue>()->resultConstraint.kind()) {
117 case ValueRep::WarmAny:
118 case ValueRep::StackArgument:
119 return true;
120 case ValueRep::SomeRegister:
121 case ValueRep::SomeRegisterWithClobber:
122 case ValueRep::SomeEarlyRegister:
123 case ValueRep::SomeLateRegister:
124 case ValueRep::Register:
125 case ValueRep::LateRegister:
126 return false;
127 default:
128 RELEASE_ASSERT_NOT_REACHED();
129 return false;
130 }
131 }
132
133 return admitsStackImpl(0, 2, inst, argIndex);
134}
135
136bool PatchpointSpecial::admitsExtendedOffsetAddr(Inst& inst, unsigned argIndex)
137{
138 return admitsStack(inst, argIndex);
139}
140
141CCallHelpers::Jump PatchpointSpecial::generate(Inst& inst, CCallHelpers& jit, Air::GenerationContext& context)
142{
143 PatchpointValue* value = inst.origin->as<PatchpointValue>();
144 ASSERT(value);
145
146 Vector<ValueRep> reps;
147 unsigned offset = 1;
148 if (inst.origin->type() != Void)
149 reps.append(repForArg(*context.code, inst.args[offset++]));
150 reps.appendVector(repsImpl(context, 0, offset, inst));
151 offset += value->numChildren();
152
153 StackmapGenerationParams params(value, reps, context);
154
155 for (unsigned i = value->numGPScratchRegisters; i--;)
156 params.m_gpScratch.append(inst.args[offset++].gpr());
157 for (unsigned i = value->numFPScratchRegisters; i--;)
158 params.m_fpScratch.append(inst.args[offset++].fpr());
159
160 value->m_generator->run(jit, params);
161
162 return CCallHelpers::Jump();
163}
164
165bool PatchpointSpecial::isTerminal(Inst& inst)
166{
167 return inst.origin->as<PatchpointValue>()->effects.terminal;
168}
169
170void PatchpointSpecial::dumpImpl(PrintStream& out) const
171{
172 out.print("Patchpoint");
173}
174
175void PatchpointSpecial::deepDumpImpl(PrintStream& out) const
176{
177 out.print("Lowered B3::PatchpointValue.");
178}
179
180} } // namespace JSC::B3
181
182#endif // ENABLE(B3_JIT)
183