1/*
2 * Copyright (C) 2016-2018 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "InlineAccess.h"
28
29#if ENABLE(JIT)
30
31#include "CCallHelpers.h"
32#include "JSArray.h"
33#include "JSCellInlines.h"
34#include "LinkBuffer.h"
35#include "ScratchRegisterAllocator.h"
36#include "Structure.h"
37#include "StructureStubInfo.h"
38
39namespace JSC {
40
41void InlineAccess::dumpCacheSizesAndCrash()
42{
43 GPRReg base = GPRInfo::regT0;
44 GPRReg value = GPRInfo::regT1;
45#if USE(JSVALUE32_64)
46 JSValueRegs regs(base, value);
47#else
48 JSValueRegs regs(base);
49#endif
50 {
51 CCallHelpers jit;
52
53 GPRReg scratchGPR = value;
54 jit.patchableBranch8(
55 CCallHelpers::NotEqual,
56 CCallHelpers::Address(base, JSCell::typeInfoTypeOffset()),
57 CCallHelpers::TrustedImm32(StringType));
58
59 jit.loadPtr(CCallHelpers::Address(base, JSString::offsetOfValue()), scratchGPR);
60 auto isRope = jit.branchIfRopeStringImpl(scratchGPR);
61 jit.load32(CCallHelpers::Address(scratchGPR, StringImpl::lengthMemoryOffset()), regs.payloadGPR());
62 auto done = jit.jump();
63
64 isRope.link(&jit);
65 jit.load32(CCallHelpers::Address(base, JSRopeString::offsetOfLength()), regs.payloadGPR());
66
67 done.link(&jit);
68 jit.boxInt32(regs.payloadGPR(), regs);
69
70 dataLog("string length size: ", jit.m_assembler.buffer().codeSize(), "\n");
71 }
72
73 {
74 CCallHelpers jit;
75
76 GPRReg scratchGPR = value;
77 jit.load8(CCallHelpers::Address(base, JSCell::indexingTypeAndMiscOffset()), value);
78 jit.and32(CCallHelpers::TrustedImm32(IsArray | IndexingShapeMask), value);
79 jit.patchableBranch32(
80 CCallHelpers::NotEqual, value, CCallHelpers::TrustedImm32(IsArray | ContiguousShape));
81 jit.loadPtr(CCallHelpers::Address(base, JSObject::butterflyOffset()), value);
82 jit.load32(CCallHelpers::Address(value, ArrayStorage::lengthOffset()), value);
83 jit.boxInt32(scratchGPR, regs);
84
85 dataLog("array length size: ", jit.m_assembler.buffer().codeSize(), "\n");
86 }
87
88 {
89 CCallHelpers jit;
90
91 jit.patchableBranch32(
92 MacroAssembler::NotEqual,
93 MacroAssembler::Address(base, JSCell::structureIDOffset()),
94 MacroAssembler::TrustedImm32(0x000ab21ca));
95 jit.loadPtr(
96 CCallHelpers::Address(base, JSObject::butterflyOffset()),
97 value);
98 GPRReg storageGPR = value;
99 jit.loadValue(
100 CCallHelpers::Address(storageGPR, 0x000ab21ca), regs);
101
102 dataLog("out of line offset cache size: ", jit.m_assembler.buffer().codeSize(), "\n");
103 }
104
105 {
106 CCallHelpers jit;
107
108 jit.patchableBranch32(
109 MacroAssembler::NotEqual,
110 MacroAssembler::Address(base, JSCell::structureIDOffset()),
111 MacroAssembler::TrustedImm32(0x000ab21ca));
112 jit.loadValue(
113 MacroAssembler::Address(base, 0x000ab21ca), regs);
114
115 dataLog("inline offset cache size: ", jit.m_assembler.buffer().codeSize(), "\n");
116 }
117
118 {
119 CCallHelpers jit;
120
121 jit.patchableBranch32(
122 MacroAssembler::NotEqual,
123 MacroAssembler::Address(base, JSCell::structureIDOffset()),
124 MacroAssembler::TrustedImm32(0x000ab21ca));
125
126 jit.storeValue(
127 regs, MacroAssembler::Address(base, 0x000ab21ca));
128
129 dataLog("replace cache size: ", jit.m_assembler.buffer().codeSize(), "\n");
130 }
131
132 {
133 CCallHelpers jit;
134
135 jit.patchableBranch32(
136 MacroAssembler::NotEqual,
137 MacroAssembler::Address(base, JSCell::structureIDOffset()),
138 MacroAssembler::TrustedImm32(0x000ab21ca));
139
140 jit.loadPtr(MacroAssembler::Address(base, JSObject::butterflyOffset()), value);
141 jit.storeValue(
142 regs,
143 MacroAssembler::Address(base, 120342));
144
145 dataLog("replace out of line cache size: ", jit.m_assembler.buffer().codeSize(), "\n");
146 }
147
148 CRASH();
149}
150
151
152template <typename Function>
153ALWAYS_INLINE static bool linkCodeInline(const char* name, CCallHelpers& jit, StructureStubInfo& stubInfo, const Function& function)
154{
155 if (jit.m_assembler.buffer().codeSize() <= stubInfo.patch.inlineSize()) {
156 bool needsBranchCompaction = true;
157 LinkBuffer linkBuffer(jit, stubInfo.patch.start, stubInfo.patch.inlineSize(), JITCompilationMustSucceed, needsBranchCompaction);
158 ASSERT(linkBuffer.isValid());
159 function(linkBuffer);
160 FINALIZE_CODE(linkBuffer, NoPtrTag, "InlineAccessType: '%s'", name);
161 return true;
162 }
163
164 // This is helpful when determining the size for inline ICs on various
165 // platforms. You want to choose a size that usually succeeds, but sometimes
166 // there may be variability in the length of the code we generate just because
167 // of randomness. It's helpful to flip this on when running tests or browsing
168 // the web just to see how often it fails. You don't want an IC size that always fails.
169 constexpr bool failIfCantInline = false;
170 if (failIfCantInline) {
171 dataLog("Failure for: ", name, "\n");
172 dataLog("real size: ", jit.m_assembler.buffer().codeSize(), " inline size:", stubInfo.patch.inlineSize(), "\n");
173 CRASH();
174 }
175
176 return false;
177}
178
179bool InlineAccess::generateSelfPropertyAccess(StructureStubInfo& stubInfo, Structure* structure, PropertyOffset offset)
180{
181 CCallHelpers jit;
182
183 GPRReg base = stubInfo.baseGPR();
184 JSValueRegs value = stubInfo.valueRegs();
185
186 auto branchToSlowPath = jit.patchableBranch32(
187 MacroAssembler::NotEqual,
188 MacroAssembler::Address(base, JSCell::structureIDOffset()),
189 MacroAssembler::TrustedImm32(bitwise_cast<uint32_t>(structure->id())));
190 GPRReg storage;
191 if (isInlineOffset(offset))
192 storage = base;
193 else {
194 jit.loadPtr(CCallHelpers::Address(base, JSObject::butterflyOffset()), value.payloadGPR());
195 storage = value.payloadGPR();
196 }
197
198 jit.loadValue(
199 MacroAssembler::Address(storage, offsetRelativeToBase(offset)), value);
200
201 bool linkedCodeInline = linkCodeInline("property access", jit, stubInfo, [&] (LinkBuffer& linkBuffer) {
202 linkBuffer.link(branchToSlowPath, stubInfo.slowPathStartLocation());
203 });
204 return linkedCodeInline;
205}
206
207ALWAYS_INLINE static GPRReg getScratchRegister(StructureStubInfo& stubInfo)
208{
209 ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
210 allocator.lock(stubInfo.baseGPR());
211 allocator.lock(stubInfo.patch.valueGPR);
212#if USE(JSVALUE32_64)
213 allocator.lock(stubInfo.patch.baseTagGPR);
214 allocator.lock(stubInfo.patch.valueTagGPR);
215#endif
216 GPRReg scratch = allocator.allocateScratchGPR();
217 if (allocator.didReuseRegisters())
218 return InvalidGPRReg;
219 return scratch;
220}
221
222ALWAYS_INLINE static bool hasFreeRegister(StructureStubInfo& stubInfo)
223{
224 return getScratchRegister(stubInfo) != InvalidGPRReg;
225}
226
227bool InlineAccess::canGenerateSelfPropertyReplace(StructureStubInfo& stubInfo, PropertyOffset offset)
228{
229 if (isInlineOffset(offset))
230 return true;
231
232 return hasFreeRegister(stubInfo);
233}
234
235bool InlineAccess::generateSelfPropertyReplace(StructureStubInfo& stubInfo, Structure* structure, PropertyOffset offset)
236{
237 ASSERT(canGenerateSelfPropertyReplace(stubInfo, offset));
238
239 CCallHelpers jit;
240
241 GPRReg base = stubInfo.baseGPR();
242 JSValueRegs value = stubInfo.valueRegs();
243
244 auto branchToSlowPath = jit.patchableBranch32(
245 MacroAssembler::NotEqual,
246 MacroAssembler::Address(base, JSCell::structureIDOffset()),
247 MacroAssembler::TrustedImm32(bitwise_cast<uint32_t>(structure->id())));
248
249 GPRReg storage;
250 if (isInlineOffset(offset))
251 storage = base;
252 else {
253 storage = getScratchRegister(stubInfo);
254 ASSERT(storage != InvalidGPRReg);
255 jit.loadPtr(CCallHelpers::Address(base, JSObject::butterflyOffset()), storage);
256 }
257
258 jit.storeValue(
259 value, MacroAssembler::Address(storage, offsetRelativeToBase(offset)));
260
261 bool linkedCodeInline = linkCodeInline("property replace", jit, stubInfo, [&] (LinkBuffer& linkBuffer) {
262 linkBuffer.link(branchToSlowPath, stubInfo.slowPathStartLocation());
263 });
264 return linkedCodeInline;
265}
266
267bool InlineAccess::isCacheableArrayLength(StructureStubInfo& stubInfo, JSArray* array)
268{
269 ASSERT(array->indexingType() & IsArray);
270
271 if (!hasFreeRegister(stubInfo))
272 return false;
273
274 return !hasAnyArrayStorage(array->indexingType()) && array->indexingType() != ArrayClass;
275}
276
277bool InlineAccess::generateArrayLength(StructureStubInfo& stubInfo, JSArray* array)
278{
279 ASSERT(isCacheableArrayLength(stubInfo, array));
280
281 CCallHelpers jit;
282
283 GPRReg base = stubInfo.baseGPR();
284 JSValueRegs value = stubInfo.valueRegs();
285 GPRReg scratch = getScratchRegister(stubInfo);
286
287 jit.load8(CCallHelpers::Address(base, JSCell::indexingTypeAndMiscOffset()), scratch);
288 jit.and32(CCallHelpers::TrustedImm32(IndexingTypeMask), scratch);
289 auto branchToSlowPath = jit.patchableBranch32(
290 CCallHelpers::NotEqual, scratch, CCallHelpers::TrustedImm32(array->indexingType()));
291 jit.loadPtr(CCallHelpers::Address(base, JSObject::butterflyOffset()), value.payloadGPR());
292 jit.load32(CCallHelpers::Address(value.payloadGPR(), ArrayStorage::lengthOffset()), value.payloadGPR());
293 jit.boxInt32(value.payloadGPR(), value);
294
295 bool linkedCodeInline = linkCodeInline("array length", jit, stubInfo, [&] (LinkBuffer& linkBuffer) {
296 linkBuffer.link(branchToSlowPath, stubInfo.slowPathStartLocation());
297 });
298 return linkedCodeInline;
299}
300
301bool InlineAccess::isCacheableStringLength(StructureStubInfo& stubInfo)
302{
303 return hasFreeRegister(stubInfo);
304}
305
306bool InlineAccess::generateStringLength(StructureStubInfo& stubInfo)
307{
308 ASSERT(isCacheableStringLength(stubInfo));
309
310 CCallHelpers jit;
311
312 GPRReg base = stubInfo.baseGPR();
313 JSValueRegs value = stubInfo.valueRegs();
314 GPRReg scratch = getScratchRegister(stubInfo);
315
316 auto branchToSlowPath = jit.patchableBranch8(
317 CCallHelpers::NotEqual,
318 CCallHelpers::Address(base, JSCell::typeInfoTypeOffset()),
319 CCallHelpers::TrustedImm32(StringType));
320
321 jit.loadPtr(CCallHelpers::Address(base, JSString::offsetOfValue()), scratch);
322 auto isRope = jit.branchIfRopeStringImpl(scratch);
323 jit.load32(CCallHelpers::Address(scratch, StringImpl::lengthMemoryOffset()), value.payloadGPR());
324 auto done = jit.jump();
325
326 isRope.link(&jit);
327 jit.load32(CCallHelpers::Address(base, JSRopeString::offsetOfLength()), value.payloadGPR());
328
329 done.link(&jit);
330 jit.boxInt32(value.payloadGPR(), value);
331
332 bool linkedCodeInline = linkCodeInline("string length", jit, stubInfo, [&] (LinkBuffer& linkBuffer) {
333 linkBuffer.link(branchToSlowPath, stubInfo.slowPathStartLocation());
334 });
335 return linkedCodeInline;
336}
337
338
339bool InlineAccess::generateSelfInAccess(StructureStubInfo& stubInfo, Structure* structure)
340{
341 CCallHelpers jit;
342
343 GPRReg base = stubInfo.baseGPR();
344 JSValueRegs value = stubInfo.valueRegs();
345
346 auto branchToSlowPath = jit.patchableBranch32(
347 MacroAssembler::NotEqual,
348 MacroAssembler::Address(base, JSCell::structureIDOffset()),
349 MacroAssembler::TrustedImm32(bitwise_cast<uint32_t>(structure->id())));
350 jit.boxBoolean(true, value);
351
352 bool linkedCodeInline = linkCodeInline("in access", jit, stubInfo, [&] (LinkBuffer& linkBuffer) {
353 linkBuffer.link(branchToSlowPath, stubInfo.slowPathStartLocation());
354 });
355 return linkedCodeInline;
356}
357
358void InlineAccess::rewireStubAsJump(StructureStubInfo& stubInfo, CodeLocationLabel<JITStubRoutinePtrTag> target)
359{
360 CCallHelpers jit;
361
362 auto jump = jit.jump();
363
364 // We don't need a nop sled here because nobody should be jumping into the middle of an IC.
365 bool needsBranchCompaction = false;
366 LinkBuffer linkBuffer(jit, stubInfo.patch.start, jit.m_assembler.buffer().codeSize(), JITCompilationMustSucceed, needsBranchCompaction);
367 RELEASE_ASSERT(linkBuffer.isValid());
368 linkBuffer.link(jump, target);
369
370 FINALIZE_CODE(linkBuffer, NoPtrTag, "InlineAccess: linking constant jump");
371}
372
373} // namespace JSC
374
375#endif // ENABLE(JIT)
376