1 | /* |
2 | * Copyright (C) 2015-2019 Apple Inc. All rights reserved. |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without |
5 | * modification, are permitted provided that the following conditions |
6 | * are met: |
7 | * 1. Redistributions of source code must retain the above copyright |
8 | * notice, this list of conditions and the following disclaimer. |
9 | * 2. Redistributions in binary form must reproduce the above copyright |
10 | * notice, this list of conditions and the following disclaimer in the |
11 | * documentation and/or other materials provided with the distribution. |
12 | * |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
24 | */ |
25 | |
26 | #include "config.h" |
27 | #include "JITMulGenerator.h" |
28 | |
29 | #if ENABLE(JIT) |
30 | |
31 | #include "ArithProfile.h" |
32 | #include "JITMathIC.h" |
33 | |
34 | namespace JSC { |
35 | |
36 | JITMathICInlineResult JITMulGenerator::generateInline(CCallHelpers& jit, MathICGenerationState& state, const BinaryArithProfile* arithProfile) |
37 | { |
38 | // We default to speculating int32. |
39 | ObservedType lhs = ObservedType().withInt32(); |
40 | ObservedType rhs = ObservedType().withInt32(); |
41 | if (arithProfile) { |
42 | lhs = arithProfile->lhsObservedType(); |
43 | rhs = arithProfile->rhsObservedType(); |
44 | } |
45 | |
46 | if (lhs.isOnlyNonNumber() && rhs.isOnlyNonNumber()) |
47 | return JITMathICInlineResult::DontGenerate; |
48 | |
49 | if (lhs.isOnlyNumber() && rhs.isOnlyNumber() && !m_leftOperand.isConst() && !m_rightOperand.isConst()) { |
50 | if (!jit.supportsFloatingPoint()) |
51 | return JITMathICInlineResult::DontGenerate; |
52 | |
53 | ASSERT(m_left); |
54 | ASSERT(m_right); |
55 | if (!m_leftOperand.definitelyIsNumber()) |
56 | state.slowPathJumps.append(jit.branchIfNotNumber(m_left, m_scratchGPR)); |
57 | if (!m_rightOperand.definitelyIsNumber()) |
58 | state.slowPathJumps.append(jit.branchIfNotNumber(m_right, m_scratchGPR)); |
59 | state.slowPathJumps.append(jit.branchIfInt32(m_left)); |
60 | state.slowPathJumps.append(jit.branchIfInt32(m_right)); |
61 | jit.unboxDoubleNonDestructive(m_left, m_leftFPR, m_scratchGPR, m_scratchFPR); |
62 | jit.unboxDoubleNonDestructive(m_right, m_rightFPR, m_scratchGPR, m_scratchFPR); |
63 | jit.mulDouble(m_rightFPR, m_leftFPR); |
64 | jit.boxDouble(m_leftFPR, m_result); |
65 | |
66 | return JITMathICInlineResult::GeneratedFastPath; |
67 | } |
68 | |
69 | if ((lhs.isOnlyInt32() || m_leftOperand.isPositiveConstInt32()) && (rhs.isOnlyInt32() || m_rightOperand.isPositiveConstInt32())) { |
70 | ASSERT(!m_leftOperand.isPositiveConstInt32() || !m_rightOperand.isPositiveConstInt32()); |
71 | if (!m_leftOperand.isPositiveConstInt32()) |
72 | state.slowPathJumps.append(jit.branchIfNotInt32(m_left)); |
73 | if (!m_rightOperand.isPositiveConstInt32()) |
74 | state.slowPathJumps.append(jit.branchIfNotInt32(m_right)); |
75 | |
76 | if (m_leftOperand.isPositiveConstInt32() || m_rightOperand.isPositiveConstInt32()) { |
77 | JSValueRegs var = m_leftOperand.isPositiveConstInt32() ? m_right : m_left; |
78 | int32_t constValue = m_leftOperand.isPositiveConstInt32() ? m_leftOperand.asConstInt32() : m_rightOperand.asConstInt32(); |
79 | state.slowPathJumps.append(jit.branchMul32(CCallHelpers::Overflow, var.payloadGPR(), CCallHelpers::Imm32(constValue), m_scratchGPR)); |
80 | } else { |
81 | state.slowPathJumps.append(jit.branchMul32(CCallHelpers::Overflow, m_right.payloadGPR(), m_left.payloadGPR(), m_scratchGPR)); |
82 | state.slowPathJumps.append(jit.branchTest32(CCallHelpers::Zero, m_scratchGPR)); // Go slow if potential negative zero. |
83 | } |
84 | jit.boxInt32(m_scratchGPR, m_result); |
85 | |
86 | return JITMathICInlineResult::GeneratedFastPath; |
87 | } |
88 | |
89 | return JITMathICInlineResult::GenerateFullSnippet; |
90 | } |
91 | |
92 | bool JITMulGenerator::generateFastPath(CCallHelpers& jit, CCallHelpers::JumpList& endJumpList, CCallHelpers::JumpList& slowPathJumpList, const BinaryArithProfile* arithProfile, bool shouldEmitProfiling) |
93 | { |
94 | ASSERT(m_scratchGPR != InvalidGPRReg); |
95 | ASSERT(m_scratchGPR != m_left.payloadGPR()); |
96 | ASSERT(m_scratchGPR != m_right.payloadGPR()); |
97 | #if USE(JSVALUE64) |
98 | ASSERT(m_scratchGPR != m_result.payloadGPR()); |
99 | #else |
100 | ASSERT(m_scratchGPR != m_left.tagGPR()); |
101 | ASSERT(m_scratchGPR != m_right.tagGPR()); |
102 | ASSERT(m_scratchFPR != InvalidFPRReg); |
103 | #endif |
104 | |
105 | ASSERT(!m_leftOperand.isPositiveConstInt32() || !m_rightOperand.isPositiveConstInt32()); |
106 | |
107 | if (!m_leftOperand.mightBeNumber() || !m_rightOperand.mightBeNumber()) |
108 | return false; |
109 | |
110 | if (m_leftOperand.isPositiveConstInt32() || m_rightOperand.isPositiveConstInt32()) { |
111 | JSValueRegs var = m_leftOperand.isPositiveConstInt32() ? m_right : m_left; |
112 | SnippetOperand& varOpr = m_leftOperand.isPositiveConstInt32() ? m_rightOperand : m_leftOperand; |
113 | SnippetOperand& constOpr = m_leftOperand.isPositiveConstInt32() ? m_leftOperand : m_rightOperand; |
114 | |
115 | // Try to do intVar * intConstant. |
116 | CCallHelpers::Jump notInt32 = jit.branchIfNotInt32(var); |
117 | |
118 | GPRReg multiplyResultGPR = m_result.payloadGPR(); |
119 | if (multiplyResultGPR == var.payloadGPR()) |
120 | multiplyResultGPR = m_scratchGPR; |
121 | |
122 | slowPathJumpList.append(jit.branchMul32(CCallHelpers::Overflow, var.payloadGPR(), CCallHelpers::Imm32(constOpr.asConstInt32()), multiplyResultGPR)); |
123 | |
124 | jit.boxInt32(multiplyResultGPR, m_result); |
125 | endJumpList.append(jit.jump()); |
126 | |
127 | if (!jit.supportsFloatingPoint()) { |
128 | slowPathJumpList.append(notInt32); |
129 | return true; |
130 | } |
131 | |
132 | // Try to do doubleVar * double(intConstant). |
133 | notInt32.link(&jit); |
134 | if (!varOpr.definitelyIsNumber()) |
135 | slowPathJumpList.append(jit.branchIfNotNumber(var, m_scratchGPR)); |
136 | |
137 | jit.unboxDoubleNonDestructive(var, m_leftFPR, m_scratchGPR, m_scratchFPR); |
138 | |
139 | jit.move(CCallHelpers::Imm32(constOpr.asConstInt32()), m_scratchGPR); |
140 | jit.convertInt32ToDouble(m_scratchGPR, m_rightFPR); |
141 | |
142 | // Fall thru to doubleVar * doubleVar. |
143 | |
144 | } else { |
145 | ASSERT(!m_leftOperand.isPositiveConstInt32() && !m_rightOperand.isPositiveConstInt32()); |
146 | |
147 | CCallHelpers::Jump leftNotInt; |
148 | CCallHelpers::Jump rightNotInt; |
149 | |
150 | // Try to do intVar * intVar. |
151 | leftNotInt = jit.branchIfNotInt32(m_left); |
152 | rightNotInt = jit.branchIfNotInt32(m_right); |
153 | |
154 | slowPathJumpList.append(jit.branchMul32(CCallHelpers::Overflow, m_right.payloadGPR(), m_left.payloadGPR(), m_scratchGPR)); |
155 | slowPathJumpList.append(jit.branchTest32(CCallHelpers::Zero, m_scratchGPR)); // Go slow if potential negative zero. |
156 | |
157 | jit.boxInt32(m_scratchGPR, m_result); |
158 | endJumpList.append(jit.jump()); |
159 | |
160 | if (!jit.supportsFloatingPoint()) { |
161 | slowPathJumpList.append(leftNotInt); |
162 | slowPathJumpList.append(rightNotInt); |
163 | return true; |
164 | } |
165 | |
166 | leftNotInt.link(&jit); |
167 | if (!m_leftOperand.definitelyIsNumber()) |
168 | slowPathJumpList.append(jit.branchIfNotNumber(m_left, m_scratchGPR)); |
169 | if (!m_rightOperand.definitelyIsNumber()) |
170 | slowPathJumpList.append(jit.branchIfNotNumber(m_right, m_scratchGPR)); |
171 | |
172 | jit.unboxDoubleNonDestructive(m_left, m_leftFPR, m_scratchGPR, m_scratchFPR); |
173 | CCallHelpers::Jump rightIsDouble = jit.branchIfNotInt32(m_right); |
174 | |
175 | jit.convertInt32ToDouble(m_right.payloadGPR(), m_rightFPR); |
176 | CCallHelpers::Jump rightWasInteger = jit.jump(); |
177 | |
178 | rightNotInt.link(&jit); |
179 | if (!m_rightOperand.definitelyIsNumber()) |
180 | slowPathJumpList.append(jit.branchIfNotNumber(m_right, m_scratchGPR)); |
181 | |
182 | jit.convertInt32ToDouble(m_left.payloadGPR(), m_leftFPR); |
183 | |
184 | rightIsDouble.link(&jit); |
185 | jit.unboxDoubleNonDestructive(m_right, m_rightFPR, m_scratchGPR, m_scratchFPR); |
186 | |
187 | rightWasInteger.link(&jit); |
188 | |
189 | // Fall thru to doubleVar * doubleVar. |
190 | } |
191 | |
192 | // Do doubleVar * doubleVar. |
193 | jit.mulDouble(m_rightFPR, m_leftFPR); |
194 | |
195 | if (!arithProfile || !shouldEmitProfiling) |
196 | jit.boxDouble(m_leftFPR, m_result); |
197 | else { |
198 | // The Int52 overflow check below intentionally omits 1ll << 51 as a valid negative Int52 value. |
199 | // Therefore, we will get a false positive if the result is that value. This is intentionally |
200 | // done to simplify the checking algorithm. |
201 | |
202 | const int64_t negativeZeroBits = 1ll << 63; |
203 | #if USE(JSVALUE64) |
204 | jit.moveDoubleTo64(m_leftFPR, m_result.payloadGPR()); |
205 | |
206 | CCallHelpers::Jump notNegativeZero = jit.branch64(CCallHelpers::NotEqual, m_result.payloadGPR(), CCallHelpers::TrustedImm64(negativeZeroBits)); |
207 | |
208 | arithProfile->emitUnconditionalSet(jit, ObservedResults::NegZeroDouble); |
209 | CCallHelpers::Jump done = jit.jump(); |
210 | |
211 | notNegativeZero.link(&jit); |
212 | arithProfile->emitUnconditionalSet(jit, ObservedResults::NonNegZeroDouble); |
213 | |
214 | jit.move(m_result.payloadGPR(), m_scratchGPR); |
215 | jit.urshiftPtr(CCallHelpers::Imm32(52), m_scratchGPR); |
216 | jit.and32(CCallHelpers::Imm32(0x7ff), m_scratchGPR); |
217 | CCallHelpers::Jump noInt52Overflow = jit.branch32(CCallHelpers::LessThanOrEqual, m_scratchGPR, CCallHelpers::TrustedImm32(0x431)); |
218 | |
219 | arithProfile->emitUnconditionalSet(jit, ObservedResults::Int52Overflow); |
220 | noInt52Overflow.link(&jit); |
221 | |
222 | done.link(&jit); |
223 | jit.sub64(GPRInfo::numberTagRegister, m_result.payloadGPR()); // Box the double. |
224 | #else |
225 | jit.boxDouble(m_leftFPR, m_result); |
226 | CCallHelpers::JumpList notNegativeZero; |
227 | notNegativeZero.append(jit.branch32(CCallHelpers::NotEqual, m_result.payloadGPR(), CCallHelpers::TrustedImm32(0))); |
228 | notNegativeZero.append(jit.branch32(CCallHelpers::NotEqual, m_result.tagGPR(), CCallHelpers::TrustedImm32(negativeZeroBits >> 32))); |
229 | |
230 | arithProfile->emitUnconditionalSet(jit, ObservedResults::NegZeroDouble); |
231 | CCallHelpers::Jump done = jit.jump(); |
232 | |
233 | notNegativeZero.link(&jit); |
234 | arithProfile->emitUnconditionalSet(jit, ObservedResults::NonNegZeroDouble); |
235 | |
236 | jit.move(m_result.tagGPR(), m_scratchGPR); |
237 | jit.urshiftPtr(CCallHelpers::Imm32(52 - 32), m_scratchGPR); |
238 | jit.and32(CCallHelpers::Imm32(0x7ff), m_scratchGPR); |
239 | CCallHelpers::Jump noInt52Overflow = jit.branch32(CCallHelpers::LessThanOrEqual, m_scratchGPR, CCallHelpers::TrustedImm32(0x431)); |
240 | |
241 | arithProfile->emitUnconditionalSet(jit, ObservedResults::Int52Overflow); |
242 | |
243 | endJumpList.append(noInt52Overflow); |
244 | if (m_scratchGPR == m_result.tagGPR() || m_scratchGPR == m_result.payloadGPR()) |
245 | jit.boxDouble(m_leftFPR, m_result); |
246 | |
247 | endJumpList.append(done); |
248 | #endif |
249 | } |
250 | |
251 | return true; |
252 | } |
253 | |
254 | } // namespace JSC |
255 | |
256 | #endif // ENABLE(JIT) |
257 | |