1/*
2 * Copyright (C) 2015 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "CallFrameShuffler.h"
28
29#if ENABLE(JIT) && USE(JSVALUE64)
30
31#include "CCallHelpers.h"
32#include "DataFormat.h"
33#include "JSCInlines.h"
34
35namespace JSC {
36
37DataFormat CallFrameShuffler::emitStore(
38 CachedRecovery& cachedRecovery, MacroAssembler::Address address)
39{
40 ASSERT(!cachedRecovery.recovery().isInJSStack());
41
42 switch (cachedRecovery.recovery().technique()) {
43 case InGPR:
44 m_jit.storePtr(cachedRecovery.recovery().gpr(), address);
45 return DataFormatJS;
46 case UnboxedInt32InGPR:
47 m_jit.store32(cachedRecovery.recovery().gpr(), address.withOffset(PayloadOffset));
48 return DataFormatInt32;
49 case UnboxedInt52InGPR:
50 m_jit.rshift64(MacroAssembler::TrustedImm32(JSValue::int52ShiftAmount),
51 cachedRecovery.recovery().gpr());
52 FALLTHROUGH;
53 case UnboxedStrictInt52InGPR:
54 m_jit.storePtr(cachedRecovery.recovery().gpr(), address);
55 return DataFormatStrictInt52;
56 case UnboxedBooleanInGPR:
57 m_jit.storePtr(cachedRecovery.recovery().gpr(), address);
58 return DataFormatBoolean;
59 case UnboxedCellInGPR:
60 m_jit.storePtr(cachedRecovery.recovery().gpr(), address);
61 return DataFormatCell;
62 case UnboxedDoubleInFPR:
63 m_jit.storeDouble(cachedRecovery.recovery().fpr(), address);
64 return DataFormatDouble;
65 case InFPR:
66 m_jit.storeDouble(cachedRecovery.recovery().fpr(), address);
67 return DataFormatJS;
68 case Constant:
69 m_jit.storeTrustedValue(cachedRecovery.recovery().constant(), address);
70 return DataFormatJS;
71 default:
72 RELEASE_ASSERT_NOT_REACHED();
73 }
74}
75
76void CallFrameShuffler::emitBox(CachedRecovery& cachedRecovery)
77{
78 ASSERT(canBox(cachedRecovery));
79 if (cachedRecovery.recovery().isConstant())
80 return;
81
82 if (cachedRecovery.recovery().isInGPR()) {
83 switch (cachedRecovery.recovery().dataFormat()) {
84 case DataFormatInt32:
85 if (verbose)
86 dataLog(" * Boxing ", cachedRecovery.recovery());
87 m_jit.zeroExtend32ToPtr(
88 cachedRecovery.recovery().gpr(),
89 cachedRecovery.recovery().gpr());
90 m_lockedRegisters.set(cachedRecovery.recovery().gpr());
91 if (tryAcquireTagTypeNumber())
92 m_jit.or64(m_tagTypeNumber, cachedRecovery.recovery().gpr());
93 else {
94 // We have to do this the hard way
95 m_jit.or64(MacroAssembler::TrustedImm64(TagTypeNumber),
96 cachedRecovery.recovery().gpr());
97 }
98 m_lockedRegisters.clear(cachedRecovery.recovery().gpr());
99 cachedRecovery.setRecovery(
100 ValueRecovery::inGPR(cachedRecovery.recovery().gpr(), DataFormatJS));
101 if (verbose)
102 dataLog(" into ", cachedRecovery.recovery(), "\n");
103 return;
104 case DataFormatInt52:
105 if (verbose)
106 dataLog(" * Boxing ", cachedRecovery.recovery());
107 m_jit.rshift64(MacroAssembler::TrustedImm32(JSValue::int52ShiftAmount),
108 cachedRecovery.recovery().gpr());
109 cachedRecovery.setRecovery(
110 ValueRecovery::inGPR(cachedRecovery.recovery().gpr(), DataFormatStrictInt52));
111 if (verbose)
112 dataLog(" into ", cachedRecovery.recovery(), "\n");
113 FALLTHROUGH;
114 case DataFormatStrictInt52: {
115 if (verbose)
116 dataLog(" * Boxing ", cachedRecovery.recovery());
117 FPRReg resultFPR = getFreeFPR();
118 ASSERT(resultFPR != InvalidFPRReg);
119 m_jit.convertInt64ToDouble(cachedRecovery.recovery().gpr(), resultFPR);
120 updateRecovery(cachedRecovery, ValueRecovery::inFPR(resultFPR, DataFormatDouble));
121 if (verbose)
122 dataLog(" into ", cachedRecovery.recovery(), "\n");
123 break;
124 }
125 case DataFormatBoolean:
126 if (verbose)
127 dataLog(" * Boxing ", cachedRecovery.recovery());
128 m_jit.add32(MacroAssembler::TrustedImm32(ValueFalse),
129 cachedRecovery.recovery().gpr());
130 cachedRecovery.setRecovery(
131 ValueRecovery::inGPR(cachedRecovery.recovery().gpr(), DataFormatJS));
132 if (verbose)
133 dataLog(" into ", cachedRecovery.recovery(), "\n");
134 return;
135 default:
136 return;
137 }
138 }
139
140 if (cachedRecovery.recovery().isInFPR()) {
141 if (cachedRecovery.recovery().dataFormat() == DataFormatDouble) {
142 if (verbose)
143 dataLog(" * Boxing ", cachedRecovery.recovery());
144 GPRReg resultGPR = cachedRecovery.wantedJSValueRegs().gpr();
145 if (resultGPR == InvalidGPRReg || m_registers[resultGPR])
146 resultGPR = getFreeGPR();
147 ASSERT(resultGPR != InvalidGPRReg);
148 m_jit.purifyNaN(cachedRecovery.recovery().fpr());
149 m_jit.moveDoubleTo64(cachedRecovery.recovery().fpr(), resultGPR);
150 m_lockedRegisters.set(resultGPR);
151 if (tryAcquireTagTypeNumber())
152 m_jit.sub64(m_tagTypeNumber, resultGPR);
153 else
154 m_jit.sub64(MacroAssembler::TrustedImm64(TagTypeNumber), resultGPR);
155 m_lockedRegisters.clear(resultGPR);
156 updateRecovery(cachedRecovery, ValueRecovery::inGPR(resultGPR, DataFormatJS));
157 if (verbose)
158 dataLog(" into ", cachedRecovery.recovery(), "\n");
159 return;
160 }
161 ASSERT(cachedRecovery.recovery().dataFormat() == DataFormatJS);
162 return;
163 }
164
165 RELEASE_ASSERT_NOT_REACHED();
166}
167
168void CallFrameShuffler::emitLoad(CachedRecovery& cachedRecovery)
169{
170 if (!cachedRecovery.recovery().isInJSStack())
171 return;
172
173 if (verbose)
174 dataLog(" * Loading ", cachedRecovery.recovery(), " into ");
175
176 VirtualRegister reg = cachedRecovery.recovery().virtualRegister();
177 MacroAssembler::Address address { addressForOld(reg) };
178 bool tryFPR { true };
179 GPRReg resultGPR { cachedRecovery.wantedJSValueRegs().gpr() };
180
181 // If we want a GPR and it's available, that's better than loading
182 // into an FPR.
183 if (resultGPR != InvalidGPRReg && !m_registers[resultGPR]
184 && !m_lockedRegisters.get(resultGPR) && cachedRecovery.loadsIntoGPR())
185 tryFPR = false;
186
187 // Otherwise, we prefer loading into FPRs if possible
188 if (tryFPR && cachedRecovery.loadsIntoFPR()) {
189 FPRReg resultFPR { cachedRecovery.wantedFPR() };
190 if (resultFPR == InvalidFPRReg || m_registers[resultFPR] || m_lockedRegisters.get(resultFPR))
191 resultFPR = getFreeFPR();
192 if (resultFPR != InvalidFPRReg) {
193 m_jit.loadDouble(address, resultFPR);
194 DataFormat dataFormat = DataFormatJS;
195 // We could be transforming a DataFormatCell into a
196 // DataFormatJS here - but that's OK.
197 if (cachedRecovery.recovery().dataFormat() == DataFormatDouble)
198 dataFormat = DataFormatDouble;
199 updateRecovery(cachedRecovery,
200 ValueRecovery::inFPR(resultFPR, dataFormat));
201 if (verbose)
202 dataLog(cachedRecovery.recovery(), "\n");
203 if (reg == newAsOld(dangerFrontier()))
204 updateDangerFrontier();
205 return;
206 }
207 }
208
209 ASSERT(cachedRecovery.loadsIntoGPR());
210 if (resultGPR == InvalidGPRReg || m_registers[resultGPR] || m_lockedRegisters.get(resultGPR))
211 resultGPR = getFreeGPR();
212 ASSERT(resultGPR != InvalidGPRReg);
213 m_jit.loadPtr(address, resultGPR);
214 updateRecovery(cachedRecovery,
215 ValueRecovery::inGPR(resultGPR, cachedRecovery.recovery().dataFormat()));
216 if (verbose)
217 dataLog(cachedRecovery.recovery(), "\n");
218 if (reg == newAsOld(dangerFrontier()))
219 updateDangerFrontier();
220}
221
222bool CallFrameShuffler::canLoad(CachedRecovery& cachedRecovery)
223{
224 if (!cachedRecovery.recovery().isInJSStack())
225 return true;
226
227 ASSERT(cachedRecovery.loadsIntoFPR() || cachedRecovery.loadsIntoGPR());
228
229 if (cachedRecovery.loadsIntoFPR() && getFreeFPR() != InvalidFPRReg)
230 return true;
231
232 if (cachedRecovery.loadsIntoGPR() && getFreeGPR() != InvalidGPRReg)
233 return true;
234
235 return false;
236}
237
238void CallFrameShuffler::emitDisplace(CachedRecovery& cachedRecovery)
239{
240 Reg wantedReg;
241 if (!(wantedReg = Reg { cachedRecovery.wantedJSValueRegs().gpr() }))
242 wantedReg = Reg { cachedRecovery.wantedFPR() };
243 ASSERT(wantedReg);
244 ASSERT(!m_lockedRegisters.get(wantedReg));
245
246 if (CachedRecovery* current = m_registers[wantedReg]) {
247 if (current == &cachedRecovery) {
248 if (verbose)
249 dataLog(" + ", wantedReg, " is OK\n");
250 return;
251 }
252 // We could do a more complex thing by finding cycles
253 // etc. in that case.
254 // However, ending up in this situation will be super
255 // rare, and should actually be outright impossible for
256 // non-FTL tiers, since:
257 // (a) All doubles have been converted into JSValues with
258 // ValueRep nodes, so FPRs are initially free
259 //
260 // (b) The only recoveries with wanted registers are the
261 // callee (which always starts out in a register) and
262 // the callee-save registers
263 //
264 // (c) The callee-save registers are the first things we
265 // load (after the return PC), and they are loaded as JSValues
266 //
267 // (d) We prefer loading JSValues into FPRs if their
268 // wanted GPR is not available
269 //
270 // (e) If we end up spilling some registers with a
271 // target, we won't load them again before the very
272 // end of the algorithm
273 //
274 // Combined, this means that we will never load a recovery
275 // with a wanted GPR into any GPR other than its wanted
276 // GPR. The callee could however have been initially in
277 // one of the callee-save registers - but since the wanted
278 // GPR for the callee is always regT0, it will be the
279 // first one to be displaced, and we won't see it when
280 // handling any of the callee-save registers.
281 //
282 // Thus, the only way we could ever reach this path is in
283 // the FTL, when there is so much pressure that we
284 // absolutely need to load the callee-save registers into
285 // different GPRs initially but not enough pressure to
286 // then have to spill all of them. And even in that case,
287 // depending on the order in which B3 saves the
288 // callee-saves, we will probably still be safe. Anyway,
289 // the couple extra move instructions compared to an
290 // efficient cycle-based algorithm are not going to hurt
291 // us.
292 if (wantedReg.isFPR()) {
293 FPRReg tempFPR = getFreeFPR();
294 if (verbose)
295 dataLog(" * Moving ", wantedReg, " into ", tempFPR, "\n");
296 m_jit.moveDouble(wantedReg.fpr(), tempFPR);
297 updateRecovery(*current,
298 ValueRecovery::inFPR(tempFPR, current->recovery().dataFormat()));
299 } else {
300 GPRReg tempGPR = getFreeGPR();
301 if (verbose)
302 dataLog(" * Moving ", wantedReg.gpr(), " into ", tempGPR, "\n");
303 m_jit.move(wantedReg.gpr(), tempGPR);
304 updateRecovery(*current,
305 ValueRecovery::inGPR(tempGPR, current->recovery().dataFormat()));
306 }
307 }
308 ASSERT(!m_registers[wantedReg]);
309
310 if (cachedRecovery.recovery().isConstant()) {
311 // We only care about callee saves for wanted FPRs, and those are never constants
312 ASSERT(wantedReg.isGPR());
313 if (verbose)
314 dataLog(" * Loading ", cachedRecovery.recovery().constant(), " into ", wantedReg, "\n");
315 m_jit.moveTrustedValue(cachedRecovery.recovery().constant(), JSValueRegs { wantedReg.gpr() });
316 updateRecovery(
317 cachedRecovery,
318 ValueRecovery::inRegister(wantedReg, DataFormatJS));
319 } else if (cachedRecovery.recovery().isInGPR()) {
320 if (verbose)
321 dataLog(" * Moving ", cachedRecovery.recovery(), " into ", wantedReg, "\n");
322 if (wantedReg.isGPR())
323 m_jit.move(cachedRecovery.recovery().gpr(), wantedReg.gpr());
324 else
325 m_jit.move64ToDouble(cachedRecovery.recovery().gpr(), wantedReg.fpr());
326 RELEASE_ASSERT(cachedRecovery.recovery().dataFormat() == DataFormatJS);
327 updateRecovery(cachedRecovery,
328 ValueRecovery::inRegister(wantedReg, DataFormatJS));
329 } else {
330 ASSERT(cachedRecovery.recovery().isInFPR());
331 if (cachedRecovery.recovery().dataFormat() == DataFormatDouble) {
332 // We only care about callee saves for wanted FPRs, and those are always DataFormatJS
333 ASSERT(wantedReg.isGPR());
334 // This will automatically pick the wanted GPR
335 emitBox(cachedRecovery);
336 } else {
337 if (verbose)
338 dataLog(" * Moving ", cachedRecovery.recovery().fpr(), " into ", wantedReg, "\n");
339 if (wantedReg.isGPR())
340 m_jit.moveDoubleTo64(cachedRecovery.recovery().fpr(), wantedReg.gpr());
341 else
342 m_jit.moveDouble(cachedRecovery.recovery().fpr(), wantedReg.fpr());
343 RELEASE_ASSERT(cachedRecovery.recovery().dataFormat() == DataFormatJS);
344 updateRecovery(cachedRecovery,
345 ValueRecovery::inRegister(wantedReg, DataFormatJS));
346 }
347 }
348
349 ASSERT(m_registers[wantedReg] == &cachedRecovery);
350}
351
352bool CallFrameShuffler::tryAcquireTagTypeNumber()
353{
354 if (m_tagTypeNumber != InvalidGPRReg)
355 return true;
356
357 m_tagTypeNumber = getFreeGPR();
358
359 if (m_tagTypeNumber == InvalidGPRReg)
360 return false;
361
362 m_lockedRegisters.set(m_tagTypeNumber);
363 m_jit.move(MacroAssembler::TrustedImm64(TagTypeNumber), m_tagTypeNumber);
364 return true;
365}
366
367} // namespace JSC
368
369#endif // ENABLE(JIT) && USE(JSVALUE64)
370