1 | /* |
2 | * Copyright (C) 2015-2017 Apple Inc. All rights reserved. |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without |
5 | * modification, are permitted provided that the following conditions |
6 | * are met: |
7 | * 1. Redistributions of source code must retain the above copyright |
8 | * notice, this list of conditions and the following disclaimer. |
9 | * 2. Redistributions in binary form must reproduce the above copyright |
10 | * notice, this list of conditions and the following disclaimer in the |
11 | * documentation and/or other materials provided with the distribution. |
12 | * |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
24 | */ |
25 | |
26 | #include "config.h" |
27 | #include "B3PatchpointSpecial.h" |
28 | |
29 | #if ENABLE(B3_JIT) |
30 | |
31 | #include "AirCode.h" |
32 | #include "AirGenerationContext.h" |
33 | #include "B3ProcedureInlines.h" |
34 | #include "B3StackmapGenerationParams.h" |
35 | #include "B3ValueInlines.h" |
36 | |
37 | namespace JSC { namespace B3 { |
38 | |
39 | using Arg = Air::Arg; |
40 | using Inst = Air::Inst; |
41 | |
42 | PatchpointSpecial::PatchpointSpecial() |
43 | { |
44 | } |
45 | |
46 | PatchpointSpecial::~PatchpointSpecial() |
47 | { |
48 | } |
49 | |
50 | void PatchpointSpecial::forEachArg(Inst& inst, const ScopedLambda<Inst::EachArgCallback>& callback) |
51 | { |
52 | const Procedure& procedure = code().proc(); |
53 | PatchpointValue* patchpoint = inst.origin->as<PatchpointValue>(); |
54 | unsigned argIndex = 1; |
55 | |
56 | Type type = patchpoint->type(); |
57 | for (; argIndex <= procedure.resultCount(type); ++argIndex) { |
58 | Arg::Role role; |
59 | if (patchpoint->resultConstraints[argIndex - 1].kind() == ValueRep::SomeEarlyRegister) |
60 | role = Arg::EarlyDef; |
61 | else |
62 | role = Arg::Def; |
63 | |
64 | Type argType = type.isTuple() ? procedure.extractFromTuple(type, argIndex - 1) : type; |
65 | callback(inst.args[argIndex], role, bankForType(argType), widthForType(argType)); |
66 | } |
67 | |
68 | forEachArgImpl(0, argIndex, inst, SameAsRep, WTF::nullopt, callback, WTF::nullopt); |
69 | argIndex += inst.origin->numChildren(); |
70 | |
71 | for (unsigned i = patchpoint->numGPScratchRegisters; i--;) |
72 | callback(inst.args[argIndex++], Arg::Scratch, GP, conservativeWidth(GP)); |
73 | for (unsigned i = patchpoint->numFPScratchRegisters; i--;) |
74 | callback(inst.args[argIndex++], Arg::Scratch, FP, conservativeWidth(FP)); |
75 | } |
76 | |
77 | bool PatchpointSpecial::isValid(Inst& inst) |
78 | { |
79 | const Procedure& procedure = code().proc(); |
80 | PatchpointValue* patchpoint = inst.origin->as<PatchpointValue>(); |
81 | unsigned argIndex = 1; |
82 | |
83 | Type type = patchpoint->type(); |
84 | for (; argIndex <= procedure.resultCount(type); ++argIndex) { |
85 | if (argIndex >= inst.args.size()) |
86 | return false; |
87 | |
88 | if (!isArgValidForType(inst.args[argIndex], type.isTuple() ? procedure.extractFromTuple(type, argIndex - 1) : type)) |
89 | return false; |
90 | if (!isArgValidForRep(code(), inst.args[argIndex], patchpoint->resultConstraints[argIndex - 1])) |
91 | return false; |
92 | } |
93 | |
94 | if (!isValidImpl(0, argIndex, inst)) |
95 | return false; |
96 | argIndex += patchpoint->numChildren(); |
97 | |
98 | if (argIndex + patchpoint->numGPScratchRegisters + patchpoint->numFPScratchRegisters |
99 | != inst.args.size()) |
100 | return false; |
101 | |
102 | for (unsigned i = patchpoint->numGPScratchRegisters; i--;) { |
103 | Arg arg = inst.args[argIndex++]; |
104 | if (!arg.isGPTmp()) |
105 | return false; |
106 | } |
107 | for (unsigned i = patchpoint->numFPScratchRegisters; i--;) { |
108 | Arg arg = inst.args[argIndex++]; |
109 | if (!arg.isFPTmp()) |
110 | return false; |
111 | } |
112 | |
113 | return true; |
114 | } |
115 | |
116 | bool PatchpointSpecial::admitsStack(Inst& inst, unsigned argIndex) |
117 | { |
118 | ASSERT(argIndex); |
119 | |
120 | Type type = inst.origin->type(); |
121 | unsigned returnCount = code().proc().resultCount(type); |
122 | |
123 | if (argIndex <= returnCount) { |
124 | switch (inst.origin->as<PatchpointValue>()->resultConstraints[argIndex - 1].kind()) { |
125 | case ValueRep::WarmAny: |
126 | case ValueRep::StackArgument: |
127 | return true; |
128 | case ValueRep::SomeRegister: |
129 | case ValueRep::SomeRegisterWithClobber: |
130 | case ValueRep::SomeEarlyRegister: |
131 | case ValueRep::SomeLateRegister: |
132 | case ValueRep::Register: |
133 | case ValueRep::LateRegister: |
134 | return false; |
135 | default: |
136 | RELEASE_ASSERT_NOT_REACHED(); |
137 | return false; |
138 | } |
139 | } |
140 | |
141 | return admitsStackImpl(0, returnCount + 1, inst, argIndex); |
142 | } |
143 | |
144 | bool PatchpointSpecial::admitsExtendedOffsetAddr(Inst& inst, unsigned argIndex) |
145 | { |
146 | return admitsStack(inst, argIndex); |
147 | } |
148 | |
149 | CCallHelpers::Jump PatchpointSpecial::generate(Inst& inst, CCallHelpers& jit, Air::GenerationContext& context) |
150 | { |
151 | const Procedure& procedure = code().proc(); |
152 | PatchpointValue* value = inst.origin->as<PatchpointValue>(); |
153 | ASSERT(value); |
154 | |
155 | Vector<ValueRep> reps; |
156 | unsigned offset = 1; |
157 | |
158 | Type type = value->type(); |
159 | while (offset <= procedure.resultCount(type)) |
160 | reps.append(repForArg(*context.code, inst.args[offset++])); |
161 | reps.appendVector(repsImpl(context, 0, offset, inst)); |
162 | offset += value->numChildren(); |
163 | |
164 | StackmapGenerationParams params(value, reps, context); |
165 | |
166 | for (unsigned i = value->numGPScratchRegisters; i--;) |
167 | params.m_gpScratch.append(inst.args[offset++].gpr()); |
168 | for (unsigned i = value->numFPScratchRegisters; i--;) |
169 | params.m_fpScratch.append(inst.args[offset++].fpr()); |
170 | |
171 | value->m_generator->run(jit, params); |
172 | |
173 | return CCallHelpers::Jump(); |
174 | } |
175 | |
176 | bool PatchpointSpecial::isTerminal(Inst& inst) |
177 | { |
178 | return inst.origin->as<PatchpointValue>()->effects.terminal; |
179 | } |
180 | |
181 | void PatchpointSpecial::dumpImpl(PrintStream& out) const |
182 | { |
183 | out.print("Patchpoint" ); |
184 | } |
185 | |
186 | void PatchpointSpecial::deepDumpImpl(PrintStream& out) const |
187 | { |
188 | out.print("Lowered B3::PatchpointValue." ); |
189 | } |
190 | |
191 | } } // namespace JSC::B3 |
192 | |
193 | #endif // ENABLE(B3_JIT) |
194 | |