1/*
2 * Copyright (C) 2016-2018 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "InlineAccess.h"
28
29#if ENABLE(JIT)
30
31#include "CCallHelpers.h"
32#include "JSArray.h"
33#include "JSCellInlines.h"
34#include "LinkBuffer.h"
35#include "ScratchRegisterAllocator.h"
36#include "Structure.h"
37#include "StructureStubInfo.h"
38
39namespace JSC {
40
41void InlineAccess::dumpCacheSizesAndCrash()
42{
43 GPRReg base = GPRInfo::regT0;
44 GPRReg value = GPRInfo::regT1;
45#if USE(JSVALUE32_64)
46 JSValueRegs regs(base, value);
47#else
48 JSValueRegs regs(base);
49#endif
50 {
51 CCallHelpers jit;
52
53 GPRReg scratchGPR = value;
54 jit.patchableBranch8(
55 CCallHelpers::NotEqual,
56 CCallHelpers::Address(base, JSCell::typeInfoTypeOffset()),
57 CCallHelpers::TrustedImm32(StringType));
58
59 jit.loadPtr(CCallHelpers::Address(base, JSString::offsetOfValue()), scratchGPR);
60 auto isRope = jit.branchIfRopeStringImpl(scratchGPR);
61 jit.load32(CCallHelpers::Address(scratchGPR, StringImpl::lengthMemoryOffset()), regs.payloadGPR());
62 auto done = jit.jump();
63
64 isRope.link(&jit);
65 jit.load32(CCallHelpers::Address(base, JSRopeString::offsetOfLength()), regs.payloadGPR());
66
67 done.link(&jit);
68 jit.boxInt32(regs.payloadGPR(), regs);
69
70 dataLog("string length size: ", jit.m_assembler.buffer().codeSize(), "\n");
71 }
72
73 {
74 CCallHelpers jit;
75
76 GPRReg scratchGPR = value;
77 jit.load8(CCallHelpers::Address(base, JSCell::indexingTypeAndMiscOffset()), value);
78 jit.and32(CCallHelpers::TrustedImm32(IsArray | IndexingShapeMask), value);
79 jit.patchableBranch32(
80 CCallHelpers::NotEqual, value, CCallHelpers::TrustedImm32(IsArray | ContiguousShape));
81 jit.loadPtr(CCallHelpers::Address(base, JSObject::butterflyOffset()), value);
82 jit.load32(CCallHelpers::Address(value, ArrayStorage::lengthOffset()), value);
83 jit.boxInt32(scratchGPR, regs);
84
85 dataLog("array length size: ", jit.m_assembler.buffer().codeSize(), "\n");
86 }
87
88 {
89 CCallHelpers jit;
90
91 jit.patchableBranch32(
92 MacroAssembler::NotEqual,
93 MacroAssembler::Address(base, JSCell::structureIDOffset()),
94 MacroAssembler::TrustedImm32(0x000ab21ca));
95 jit.loadPtr(
96 CCallHelpers::Address(base, JSObject::butterflyOffset()),
97 value);
98 GPRReg storageGPR = value;
99 jit.loadValue(
100 CCallHelpers::Address(storageGPR, 0x000ab21ca), regs);
101
102 dataLog("out of line offset cache size: ", jit.m_assembler.buffer().codeSize(), "\n");
103 }
104
105 {
106 CCallHelpers jit;
107
108 jit.patchableBranch32(
109 MacroAssembler::NotEqual,
110 MacroAssembler::Address(base, JSCell::structureIDOffset()),
111 MacroAssembler::TrustedImm32(0x000ab21ca));
112 jit.loadValue(
113 MacroAssembler::Address(base, 0x000ab21ca), regs);
114
115 dataLog("inline offset cache size: ", jit.m_assembler.buffer().codeSize(), "\n");
116 }
117
118 {
119 CCallHelpers jit;
120
121 jit.patchableBranch32(
122 MacroAssembler::NotEqual,
123 MacroAssembler::Address(base, JSCell::structureIDOffset()),
124 MacroAssembler::TrustedImm32(0x000ab21ca));
125
126 jit.storeValue(
127 regs, MacroAssembler::Address(base, 0x000ab21ca));
128
129 dataLog("replace cache size: ", jit.m_assembler.buffer().codeSize(), "\n");
130 }
131
132 {
133 CCallHelpers jit;
134
135 jit.patchableBranch32(
136 MacroAssembler::NotEqual,
137 MacroAssembler::Address(base, JSCell::structureIDOffset()),
138 MacroAssembler::TrustedImm32(0x000ab21ca));
139
140 jit.loadPtr(MacroAssembler::Address(base, JSObject::butterflyOffset()), value);
141 jit.storeValue(
142 regs,
143 MacroAssembler::Address(base, 120342));
144
145 dataLog("replace out of line cache size: ", jit.m_assembler.buffer().codeSize(), "\n");
146 }
147
148 CRASH();
149}
150
151
152template <typename Function>
153ALWAYS_INLINE static bool linkCodeInline(const char* name, CCallHelpers& jit, StructureStubInfo& stubInfo, const Function& function)
154{
155 if (jit.m_assembler.buffer().codeSize() <= stubInfo.patch.inlineSize()) {
156 bool needsBranchCompaction = true;
157 LinkBuffer linkBuffer(jit, stubInfo.patch.start, stubInfo.patch.inlineSize(), JITCompilationMustSucceed, needsBranchCompaction);
158 ASSERT(linkBuffer.isValid());
159 function(linkBuffer);
160 FINALIZE_CODE(linkBuffer, NoPtrTag, "InlineAccessType: '%s'", name);
161 return true;
162 }
163
164 // This is helpful when determining the size for inline ICs on various
165 // platforms. You want to choose a size that usually succeeds, but sometimes
166 // there may be variability in the length of the code we generate just because
167 // of randomness. It's helpful to flip this on when running tests or browsing
168 // the web just to see how often it fails. You don't want an IC size that always fails.
169 constexpr bool failIfCantInline = false;
170 if (failIfCantInline) {
171 dataLog("Failure for: ", name, "\n");
172 dataLog("real size: ", jit.m_assembler.buffer().codeSize(), " inline size:", stubInfo.patch.inlineSize(), "\n");
173 CRASH();
174 }
175
176 return false;
177}
178
179bool InlineAccess::generateSelfPropertyAccess(StructureStubInfo& stubInfo, Structure* structure, PropertyOffset offset)
180{
181 if (!stubInfo.hasConstantIdentifier)
182 return false;
183
184 CCallHelpers jit;
185
186 GPRReg base = stubInfo.baseGPR();
187 JSValueRegs value = stubInfo.valueRegs();
188
189 auto branchToSlowPath = jit.patchableBranch32(
190 MacroAssembler::NotEqual,
191 MacroAssembler::Address(base, JSCell::structureIDOffset()),
192 MacroAssembler::TrustedImm32(bitwise_cast<uint32_t>(structure->id())));
193 GPRReg storage;
194 if (isInlineOffset(offset))
195 storage = base;
196 else {
197 jit.loadPtr(CCallHelpers::Address(base, JSObject::butterflyOffset()), value.payloadGPR());
198 storage = value.payloadGPR();
199 }
200
201 jit.loadValue(
202 MacroAssembler::Address(storage, offsetRelativeToBase(offset)), value);
203
204 bool linkedCodeInline = linkCodeInline("property access", jit, stubInfo, [&] (LinkBuffer& linkBuffer) {
205 linkBuffer.link(branchToSlowPath, stubInfo.slowPathStartLocation());
206 });
207 return linkedCodeInline;
208}
209
210ALWAYS_INLINE static GPRReg getScratchRegister(StructureStubInfo& stubInfo)
211{
212 ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
213 allocator.lock(stubInfo.baseGPR());
214 allocator.lock(stubInfo.patch.valueGPR);
215#if USE(JSVALUE32_64)
216 allocator.lock(stubInfo.patch.baseTagGPR);
217 allocator.lock(stubInfo.patch.valueTagGPR);
218#endif
219 GPRReg scratch = allocator.allocateScratchGPR();
220 if (allocator.didReuseRegisters())
221 return InvalidGPRReg;
222 return scratch;
223}
224
225ALWAYS_INLINE static bool hasFreeRegister(StructureStubInfo& stubInfo)
226{
227 return getScratchRegister(stubInfo) != InvalidGPRReg;
228}
229
230bool InlineAccess::canGenerateSelfPropertyReplace(StructureStubInfo& stubInfo, PropertyOffset offset)
231{
232 if (!stubInfo.hasConstantIdentifier)
233 return false;
234
235 if (isInlineOffset(offset))
236 return true;
237
238 return hasFreeRegister(stubInfo);
239}
240
241bool InlineAccess::generateSelfPropertyReplace(StructureStubInfo& stubInfo, Structure* structure, PropertyOffset offset)
242{
243 if (!stubInfo.hasConstantIdentifier)
244 return false;
245
246 ASSERT(canGenerateSelfPropertyReplace(stubInfo, offset));
247
248 CCallHelpers jit;
249
250 GPRReg base = stubInfo.baseGPR();
251 JSValueRegs value = stubInfo.valueRegs();
252
253 auto branchToSlowPath = jit.patchableBranch32(
254 MacroAssembler::NotEqual,
255 MacroAssembler::Address(base, JSCell::structureIDOffset()),
256 MacroAssembler::TrustedImm32(bitwise_cast<uint32_t>(structure->id())));
257
258 GPRReg storage;
259 if (isInlineOffset(offset))
260 storage = base;
261 else {
262 storage = getScratchRegister(stubInfo);
263 ASSERT(storage != InvalidGPRReg);
264 jit.loadPtr(CCallHelpers::Address(base, JSObject::butterflyOffset()), storage);
265 }
266
267 jit.storeValue(
268 value, MacroAssembler::Address(storage, offsetRelativeToBase(offset)));
269
270 bool linkedCodeInline = linkCodeInline("property replace", jit, stubInfo, [&] (LinkBuffer& linkBuffer) {
271 linkBuffer.link(branchToSlowPath, stubInfo.slowPathStartLocation());
272 });
273 return linkedCodeInline;
274}
275
276bool InlineAccess::isCacheableArrayLength(StructureStubInfo& stubInfo, JSArray* array)
277{
278 ASSERT(array->indexingType() & IsArray);
279
280 if (!stubInfo.hasConstantIdentifier)
281 return false;
282
283 if (!hasFreeRegister(stubInfo))
284 return false;
285
286 return !hasAnyArrayStorage(array->indexingType()) && array->indexingType() != ArrayClass;
287}
288
289bool InlineAccess::generateArrayLength(StructureStubInfo& stubInfo, JSArray* array)
290{
291 ASSERT(isCacheableArrayLength(stubInfo, array));
292
293 if (!stubInfo.hasConstantIdentifier)
294 return false;
295
296 CCallHelpers jit;
297
298 GPRReg base = stubInfo.baseGPR();
299 JSValueRegs value = stubInfo.valueRegs();
300 GPRReg scratch = getScratchRegister(stubInfo);
301
302 jit.load8(CCallHelpers::Address(base, JSCell::indexingTypeAndMiscOffset()), scratch);
303 jit.and32(CCallHelpers::TrustedImm32(IndexingTypeMask), scratch);
304 auto branchToSlowPath = jit.patchableBranch32(
305 CCallHelpers::NotEqual, scratch, CCallHelpers::TrustedImm32(array->indexingType()));
306 jit.loadPtr(CCallHelpers::Address(base, JSObject::butterflyOffset()), value.payloadGPR());
307 jit.load32(CCallHelpers::Address(value.payloadGPR(), ArrayStorage::lengthOffset()), value.payloadGPR());
308 jit.boxInt32(value.payloadGPR(), value);
309
310 bool linkedCodeInline = linkCodeInline("array length", jit, stubInfo, [&] (LinkBuffer& linkBuffer) {
311 linkBuffer.link(branchToSlowPath, stubInfo.slowPathStartLocation());
312 });
313 return linkedCodeInline;
314}
315
316bool InlineAccess::isCacheableStringLength(StructureStubInfo& stubInfo)
317{
318 if (!stubInfo.hasConstantIdentifier)
319 return false;
320
321 return hasFreeRegister(stubInfo);
322}
323
324bool InlineAccess::generateStringLength(StructureStubInfo& stubInfo)
325{
326 ASSERT(isCacheableStringLength(stubInfo));
327
328 if (!stubInfo.hasConstantIdentifier)
329 return false;
330
331 CCallHelpers jit;
332
333 GPRReg base = stubInfo.baseGPR();
334 JSValueRegs value = stubInfo.valueRegs();
335 GPRReg scratch = getScratchRegister(stubInfo);
336
337 auto branchToSlowPath = jit.patchableBranch8(
338 CCallHelpers::NotEqual,
339 CCallHelpers::Address(base, JSCell::typeInfoTypeOffset()),
340 CCallHelpers::TrustedImm32(StringType));
341
342 jit.loadPtr(CCallHelpers::Address(base, JSString::offsetOfValue()), scratch);
343 auto isRope = jit.branchIfRopeStringImpl(scratch);
344 jit.load32(CCallHelpers::Address(scratch, StringImpl::lengthMemoryOffset()), value.payloadGPR());
345 auto done = jit.jump();
346
347 isRope.link(&jit);
348 jit.load32(CCallHelpers::Address(base, JSRopeString::offsetOfLength()), value.payloadGPR());
349
350 done.link(&jit);
351 jit.boxInt32(value.payloadGPR(), value);
352
353 bool linkedCodeInline = linkCodeInline("string length", jit, stubInfo, [&] (LinkBuffer& linkBuffer) {
354 linkBuffer.link(branchToSlowPath, stubInfo.slowPathStartLocation());
355 });
356 return linkedCodeInline;
357}
358
359
360bool InlineAccess::generateSelfInAccess(StructureStubInfo& stubInfo, Structure* structure)
361{
362 CCallHelpers jit;
363
364 if (!stubInfo.hasConstantIdentifier)
365 return false;
366
367 GPRReg base = stubInfo.baseGPR();
368 JSValueRegs value = stubInfo.valueRegs();
369
370 auto branchToSlowPath = jit.patchableBranch32(
371 MacroAssembler::NotEqual,
372 MacroAssembler::Address(base, JSCell::structureIDOffset()),
373 MacroAssembler::TrustedImm32(bitwise_cast<uint32_t>(structure->id())));
374 jit.boxBoolean(true, value);
375
376 bool linkedCodeInline = linkCodeInline("in access", jit, stubInfo, [&] (LinkBuffer& linkBuffer) {
377 linkBuffer.link(branchToSlowPath, stubInfo.slowPathStartLocation());
378 });
379 return linkedCodeInline;
380}
381
382void InlineAccess::rewireStubAsJump(StructureStubInfo& stubInfo, CodeLocationLabel<JITStubRoutinePtrTag> target)
383{
384 CCallHelpers jit;
385
386 auto jump = jit.jump();
387
388 // We don't need a nop sled here because nobody should be jumping into the middle of an IC.
389 bool needsBranchCompaction = false;
390 LinkBuffer linkBuffer(jit, stubInfo.patch.start, jit.m_assembler.buffer().codeSize(), JITCompilationMustSucceed, needsBranchCompaction);
391 RELEASE_ASSERT(linkBuffer.isValid());
392 linkBuffer.link(jump, target);
393
394 FINALIZE_CODE(linkBuffer, NoPtrTag, "InlineAccess: linking constant jump");
395}
396
397} // namespace JSC
398
399#endif // ENABLE(JIT)
400