1 | /* |
2 | * Copyright (C) 2011-2019 Apple Inc. All rights reserved. |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without |
5 | * modification, are permitted provided that the following conditions |
6 | * are met: |
7 | * 1. Redistributions of source code must retain the above copyright |
8 | * notice, this list of conditions and the following disclaimer. |
9 | * 2. Redistributions in binary form must reproduce the above copyright |
10 | * notice, this list of conditions and the following disclaimer in the |
11 | * documentation and/or other materials provided with the distribution. |
12 | * |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
24 | */ |
25 | |
26 | #include "config.h" |
27 | #include "AssemblyHelpers.h" |
28 | |
29 | #if ENABLE(JIT) |
30 | |
31 | #include "JITOperations.h" |
32 | #include "JSCInlines.h" |
33 | #include "LinkBuffer.h" |
34 | #include "MaxFrameExtentForSlowPathCall.h" |
35 | #include "SuperSampler.h" |
36 | #include "ThunkGenerators.h" |
37 | |
38 | #if ENABLE(WEBASSEMBLY) |
39 | #include "WasmContextInlines.h" |
40 | #include "WasmMemoryInformation.h" |
41 | #endif |
42 | |
43 | namespace JSC { |
44 | |
45 | ExecutableBase* AssemblyHelpers::executableFor(const CodeOrigin& codeOrigin) |
46 | { |
47 | auto* inlineCallFrame = codeOrigin.inlineCallFrame(); |
48 | if (!inlineCallFrame) |
49 | return m_codeBlock->ownerExecutable(); |
50 | return inlineCallFrame->baselineCodeBlock->ownerExecutable(); |
51 | } |
52 | |
53 | AssemblyHelpers::Jump AssemblyHelpers::branchIfFastTypedArray(GPRReg baseGPR) |
54 | { |
55 | return branch32( |
56 | Equal, |
57 | Address(baseGPR, JSArrayBufferView::offsetOfMode()), |
58 | TrustedImm32(FastTypedArray)); |
59 | } |
60 | |
61 | AssemblyHelpers::Jump AssemblyHelpers::branchIfNotFastTypedArray(GPRReg baseGPR) |
62 | { |
63 | return branch32( |
64 | NotEqual, |
65 | Address(baseGPR, JSArrayBufferView::offsetOfMode()), |
66 | TrustedImm32(FastTypedArray)); |
67 | } |
68 | |
69 | void AssemblyHelpers::incrementSuperSamplerCount() |
70 | { |
71 | add32(TrustedImm32(1), AbsoluteAddress(bitwise_cast<const void*>(&g_superSamplerCount))); |
72 | } |
73 | |
74 | void AssemblyHelpers::decrementSuperSamplerCount() |
75 | { |
76 | sub32(TrustedImm32(1), AbsoluteAddress(bitwise_cast<const void*>(&g_superSamplerCount))); |
77 | } |
78 | |
79 | void AssemblyHelpers::purifyNaN(FPRReg fpr) |
80 | { |
81 | MacroAssembler::Jump notNaN = branchIfNotNaN(fpr); |
82 | static const double NaN = PNaN; |
83 | loadDouble(TrustedImmPtr(&NaN), fpr); |
84 | notNaN.link(this); |
85 | } |
86 | |
87 | #if ENABLE(SAMPLING_FLAGS) |
88 | void AssemblyHelpers::setSamplingFlag(int32_t flag) |
89 | { |
90 | ASSERT(flag >= 1); |
91 | ASSERT(flag <= 32); |
92 | or32(TrustedImm32(1u << (flag - 1)), AbsoluteAddress(SamplingFlags::addressOfFlags())); |
93 | } |
94 | |
95 | void AssemblyHelpers::clearSamplingFlag(int32_t flag) |
96 | { |
97 | ASSERT(flag >= 1); |
98 | ASSERT(flag <= 32); |
99 | and32(TrustedImm32(~(1u << (flag - 1))), AbsoluteAddress(SamplingFlags::addressOfFlags())); |
100 | } |
101 | #endif |
102 | |
103 | #if !ASSERT_DISABLED |
104 | #if USE(JSVALUE64) |
105 | void AssemblyHelpers::jitAssertIsInt32(GPRReg gpr) |
106 | { |
107 | #if CPU(X86_64) || CPU(ARM64) |
108 | Jump checkInt32 = branch64(BelowOrEqual, gpr, TrustedImm64(static_cast<uintptr_t>(0xFFFFFFFFu))); |
109 | abortWithReason(AHIsNotInt32); |
110 | checkInt32.link(this); |
111 | #else |
112 | UNUSED_PARAM(gpr); |
113 | #endif |
114 | } |
115 | |
116 | void AssemblyHelpers::jitAssertIsJSInt32(GPRReg gpr) |
117 | { |
118 | Jump checkJSInt32 = branch64(AboveOrEqual, gpr, GPRInfo::numberTagRegister); |
119 | abortWithReason(AHIsNotJSInt32); |
120 | checkJSInt32.link(this); |
121 | } |
122 | |
123 | void AssemblyHelpers::jitAssertIsJSNumber(GPRReg gpr) |
124 | { |
125 | Jump checkJSNumber = branchTest64(MacroAssembler::NonZero, gpr, GPRInfo::numberTagRegister); |
126 | abortWithReason(AHIsNotJSNumber); |
127 | checkJSNumber.link(this); |
128 | } |
129 | |
130 | void AssemblyHelpers::jitAssertIsJSDouble(GPRReg gpr) |
131 | { |
132 | Jump checkJSInt32 = branch64(AboveOrEqual, gpr, GPRInfo::numberTagRegister); |
133 | Jump checkJSNumber = branchTest64(MacroAssembler::NonZero, gpr, GPRInfo::numberTagRegister); |
134 | checkJSInt32.link(this); |
135 | abortWithReason(AHIsNotJSDouble); |
136 | checkJSNumber.link(this); |
137 | } |
138 | |
139 | void AssemblyHelpers::jitAssertIsCell(GPRReg gpr) |
140 | { |
141 | Jump checkCell = branchTest64(MacroAssembler::Zero, gpr, GPRInfo::notCellMaskRegister); |
142 | abortWithReason(AHIsNotCell); |
143 | checkCell.link(this); |
144 | } |
145 | |
146 | void AssemblyHelpers::jitAssertTagsInPlace() |
147 | { |
148 | Jump ok = branch64(Equal, GPRInfo::numberTagRegister, TrustedImm64(JSValue::NumberTag)); |
149 | abortWithReason(AHNumberTagNotInPlace); |
150 | breakpoint(); |
151 | ok.link(this); |
152 | |
153 | ok = branch64(Equal, GPRInfo::notCellMaskRegister, TrustedImm64(JSValue::NotCellMask)); |
154 | abortWithReason(AHNotCellMaskNotInPlace); |
155 | ok.link(this); |
156 | } |
157 | #elif USE(JSVALUE32_64) |
158 | void AssemblyHelpers::jitAssertIsInt32(GPRReg gpr) |
159 | { |
160 | UNUSED_PARAM(gpr); |
161 | } |
162 | |
163 | void AssemblyHelpers::jitAssertIsJSInt32(GPRReg gpr) |
164 | { |
165 | Jump checkJSInt32 = branch32(Equal, gpr, TrustedImm32(JSValue::Int32Tag)); |
166 | abortWithReason(AHIsNotJSInt32); |
167 | checkJSInt32.link(this); |
168 | } |
169 | |
170 | void AssemblyHelpers::jitAssertIsJSNumber(GPRReg gpr) |
171 | { |
172 | Jump checkJSInt32 = branch32(Equal, gpr, TrustedImm32(JSValue::Int32Tag)); |
173 | Jump checkJSDouble = branch32(Below, gpr, TrustedImm32(JSValue::LowestTag)); |
174 | abortWithReason(AHIsNotJSNumber); |
175 | checkJSInt32.link(this); |
176 | checkJSDouble.link(this); |
177 | } |
178 | |
179 | void AssemblyHelpers::jitAssertIsJSDouble(GPRReg gpr) |
180 | { |
181 | Jump checkJSDouble = branch32(Below, gpr, TrustedImm32(JSValue::LowestTag)); |
182 | abortWithReason(AHIsNotJSDouble); |
183 | checkJSDouble.link(this); |
184 | } |
185 | |
186 | void AssemblyHelpers::jitAssertIsCell(GPRReg gpr) |
187 | { |
188 | Jump checkCell = branch32(Equal, gpr, TrustedImm32(JSValue::CellTag)); |
189 | abortWithReason(AHIsNotCell); |
190 | checkCell.link(this); |
191 | } |
192 | |
193 | void AssemblyHelpers::jitAssertTagsInPlace() |
194 | { |
195 | } |
196 | #endif // USE(JSVALUE32_64) |
197 | |
198 | void AssemblyHelpers::jitAssertHasValidCallFrame() |
199 | { |
200 | Jump checkCFR = branchTestPtr(Zero, GPRInfo::callFrameRegister, TrustedImm32(7)); |
201 | abortWithReason(AHCallFrameMisaligned); |
202 | checkCFR.link(this); |
203 | } |
204 | |
205 | void AssemblyHelpers::jitAssertIsNull(GPRReg gpr) |
206 | { |
207 | Jump checkNull = branchTestPtr(Zero, gpr); |
208 | abortWithReason(AHIsNotNull); |
209 | checkNull.link(this); |
210 | } |
211 | |
212 | void AssemblyHelpers::jitAssertArgumentCountSane() |
213 | { |
214 | Jump ok = branch32(Below, payloadFor(CallFrameSlot::argumentCount), TrustedImm32(10000000)); |
215 | abortWithReason(AHInsaneArgumentCount); |
216 | ok.link(this); |
217 | } |
218 | |
219 | #endif // !ASSERT_DISABLED |
220 | |
221 | void AssemblyHelpers::jitReleaseAssertNoException(VM& vm) |
222 | { |
223 | Jump noException; |
224 | #if USE(JSVALUE64) |
225 | noException = branchTest64(Zero, AbsoluteAddress(vm.addressOfException())); |
226 | #elif USE(JSVALUE32_64) |
227 | noException = branch32(Equal, AbsoluteAddress(vm.addressOfException()), TrustedImm32(0)); |
228 | #endif |
229 | abortWithReason(JITUncoughtExceptionAfterCall); |
230 | noException.link(this); |
231 | } |
232 | |
233 | void AssemblyHelpers::callExceptionFuzz(VM& vm) |
234 | { |
235 | if (!Options::useExceptionFuzz()) |
236 | return; |
237 | |
238 | EncodedJSValue* buffer = vm.exceptionFuzzingBuffer(sizeof(EncodedJSValue) * (GPRInfo::numberOfRegisters + FPRInfo::numberOfRegisters)); |
239 | |
240 | for (unsigned i = 0; i < GPRInfo::numberOfRegisters; ++i) { |
241 | #if USE(JSVALUE64) |
242 | store64(GPRInfo::toRegister(i), buffer + i); |
243 | #else |
244 | store32(GPRInfo::toRegister(i), buffer + i); |
245 | #endif |
246 | } |
247 | for (unsigned i = 0; i < FPRInfo::numberOfRegisters; ++i) { |
248 | move(TrustedImmPtr(buffer + GPRInfo::numberOfRegisters + i), GPRInfo::regT0); |
249 | storeDouble(FPRInfo::toRegister(i), Address(GPRInfo::regT0)); |
250 | } |
251 | |
252 | // Set up one argument. |
253 | move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0); |
254 | move(TrustedImmPtr(tagCFunctionPtr<OperationPtrTag>(operationExceptionFuzz)), GPRInfo::nonPreservedNonReturnGPR); |
255 | prepareCallOperation(vm); |
256 | call(GPRInfo::nonPreservedNonReturnGPR, OperationPtrTag); |
257 | |
258 | for (unsigned i = 0; i < FPRInfo::numberOfRegisters; ++i) { |
259 | move(TrustedImmPtr(buffer + GPRInfo::numberOfRegisters + i), GPRInfo::regT0); |
260 | loadDouble(Address(GPRInfo::regT0), FPRInfo::toRegister(i)); |
261 | } |
262 | for (unsigned i = 0; i < GPRInfo::numberOfRegisters; ++i) { |
263 | #if USE(JSVALUE64) |
264 | load64(buffer + i, GPRInfo::toRegister(i)); |
265 | #else |
266 | load32(buffer + i, GPRInfo::toRegister(i)); |
267 | #endif |
268 | } |
269 | } |
270 | |
271 | AssemblyHelpers::Jump AssemblyHelpers::emitJumpIfException(VM& vm) |
272 | { |
273 | return emitExceptionCheck(vm, NormalExceptionCheck); |
274 | } |
275 | |
276 | AssemblyHelpers::Jump AssemblyHelpers::emitExceptionCheck(VM& vm, ExceptionCheckKind kind, ExceptionJumpWidth width) |
277 | { |
278 | callExceptionFuzz(vm); |
279 | |
280 | if (width == FarJumpWidth) |
281 | kind = (kind == NormalExceptionCheck ? InvertedExceptionCheck : NormalExceptionCheck); |
282 | |
283 | Jump result; |
284 | #if USE(JSVALUE64) |
285 | result = branchTest64(kind == NormalExceptionCheck ? NonZero : Zero, AbsoluteAddress(vm.addressOfException())); |
286 | #elif USE(JSVALUE32_64) |
287 | result = branch32(kind == NormalExceptionCheck ? NotEqual : Equal, AbsoluteAddress(vm.addressOfException()), TrustedImm32(0)); |
288 | #endif |
289 | |
290 | if (width == NormalJumpWidth) |
291 | return result; |
292 | |
293 | PatchableJump realJump = patchableJump(); |
294 | result.link(this); |
295 | |
296 | return realJump.m_jump; |
297 | } |
298 | |
299 | AssemblyHelpers::Jump AssemblyHelpers::emitNonPatchableExceptionCheck(VM& vm) |
300 | { |
301 | callExceptionFuzz(vm); |
302 | |
303 | Jump result; |
304 | #if USE(JSVALUE64) |
305 | result = branchTest64(NonZero, AbsoluteAddress(vm.addressOfException())); |
306 | #elif USE(JSVALUE32_64) |
307 | result = branch32(NotEqual, AbsoluteAddress(vm.addressOfException()), TrustedImm32(0)); |
308 | #endif |
309 | |
310 | return result; |
311 | } |
312 | |
313 | void AssemblyHelpers::emitStoreStructureWithTypeInfo(AssemblyHelpers& jit, TrustedImmPtr structure, RegisterID dest) |
314 | { |
315 | const Structure* structurePtr = reinterpret_cast<const Structure*>(structure.m_value); |
316 | #if USE(JSVALUE64) |
317 | jit.store64(TrustedImm64(structurePtr->idBlob()), MacroAssembler::Address(dest, JSCell::structureIDOffset())); |
318 | if (!ASSERT_DISABLED) { |
319 | Jump correctStructure = jit.branch32(Equal, MacroAssembler::Address(dest, JSCell::structureIDOffset()), TrustedImm32(structurePtr->id())); |
320 | jit.abortWithReason(AHStructureIDIsValid); |
321 | correctStructure.link(&jit); |
322 | |
323 | Jump correctIndexingType = jit.branch8(Equal, MacroAssembler::Address(dest, JSCell::indexingTypeAndMiscOffset()), TrustedImm32(structurePtr->indexingModeIncludingHistory())); |
324 | jit.abortWithReason(AHIndexingTypeIsValid); |
325 | correctIndexingType.link(&jit); |
326 | |
327 | Jump correctType = jit.branch8(Equal, MacroAssembler::Address(dest, JSCell::typeInfoTypeOffset()), TrustedImm32(structurePtr->typeInfo().type())); |
328 | jit.abortWithReason(AHTypeInfoIsValid); |
329 | correctType.link(&jit); |
330 | |
331 | Jump correctFlags = jit.branch8(Equal, MacroAssembler::Address(dest, JSCell::typeInfoFlagsOffset()), TrustedImm32(structurePtr->typeInfo().inlineTypeFlags())); |
332 | jit.abortWithReason(AHTypeInfoInlineTypeFlagsAreValid); |
333 | correctFlags.link(&jit); |
334 | } |
335 | #else |
336 | // Do a 32-bit wide store to initialize the cell's fields. |
337 | jit.store32(TrustedImm32(structurePtr->objectInitializationBlob()), MacroAssembler::Address(dest, JSCell::indexingTypeAndMiscOffset())); |
338 | jit.storePtr(structure, MacroAssembler::Address(dest, JSCell::structureIDOffset())); |
339 | #endif |
340 | } |
341 | |
342 | void AssemblyHelpers::loadProperty(GPRReg object, GPRReg offset, JSValueRegs result) |
343 | { |
344 | Jump isInline = branch32(LessThan, offset, TrustedImm32(firstOutOfLineOffset)); |
345 | |
346 | loadPtr(Address(object, JSObject::butterflyOffset()), result.payloadGPR()); |
347 | neg32(offset); |
348 | signExtend32ToPtr(offset, offset); |
349 | Jump ready = jump(); |
350 | |
351 | isInline.link(this); |
352 | addPtr( |
353 | TrustedImm32( |
354 | static_cast<int32_t>(sizeof(JSObject)) - |
355 | (static_cast<int32_t>(firstOutOfLineOffset) - 2) * static_cast<int32_t>(sizeof(EncodedJSValue))), |
356 | object, result.payloadGPR()); |
357 | |
358 | ready.link(this); |
359 | |
360 | loadValue( |
361 | BaseIndex( |
362 | result.payloadGPR(), offset, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), |
363 | result); |
364 | } |
365 | |
366 | void AssemblyHelpers::emitLoadStructure(VM& vm, RegisterID source, RegisterID dest, RegisterID scratch) |
367 | { |
368 | #if USE(JSVALUE64) |
369 | #if CPU(ARM64) |
370 | RegisterID scratch2 = dataTempRegister; |
371 | #elif CPU(X86_64) |
372 | RegisterID scratch2 = scratchRegister(); |
373 | #else |
374 | #error "Unsupported cpu" |
375 | #endif |
376 | |
377 | ASSERT(dest != scratch); |
378 | ASSERT(dest != scratch2); |
379 | ASSERT(scratch != scratch2); |
380 | |
381 | load32(MacroAssembler::Address(source, JSCell::structureIDOffset()), scratch2); |
382 | loadPtr(vm.heap.structureIDTable().base(), scratch); |
383 | rshift32(scratch2, TrustedImm32(StructureIDTable::s_numberOfEntropyBits), dest); |
384 | loadPtr(MacroAssembler::BaseIndex(scratch, dest, MacroAssembler::TimesEight), dest); |
385 | lshiftPtr(TrustedImm32(StructureIDTable::s_entropyBitsShiftForStructurePointer), scratch2); |
386 | xorPtr(scratch2, dest); |
387 | #else // not USE(JSVALUE64) |
388 | UNUSED_PARAM(scratch); |
389 | UNUSED_PARAM(vm); |
390 | loadPtr(MacroAssembler::Address(source, JSCell::structureIDOffset()), dest); |
391 | #endif // not USE(JSVALUE64) |
392 | } |
393 | |
394 | void AssemblyHelpers::makeSpaceOnStackForCCall() |
395 | { |
396 | unsigned stackOffset = WTF::roundUpToMultipleOf(stackAlignmentBytes(), maxFrameExtentForSlowPathCall); |
397 | if (stackOffset) |
398 | subPtr(TrustedImm32(stackOffset), stackPointerRegister); |
399 | } |
400 | |
401 | void AssemblyHelpers::reclaimSpaceOnStackForCCall() |
402 | { |
403 | unsigned stackOffset = WTF::roundUpToMultipleOf(stackAlignmentBytes(), maxFrameExtentForSlowPathCall); |
404 | if (stackOffset) |
405 | addPtr(TrustedImm32(stackOffset), stackPointerRegister); |
406 | } |
407 | |
408 | #if USE(JSVALUE64) |
409 | template<typename LoadFromHigh, typename StoreToHigh, typename LoadFromLow, typename StoreToLow> |
410 | void emitRandomThunkImpl(AssemblyHelpers& jit, GPRReg scratch0, GPRReg scratch1, GPRReg scratch2, FPRReg result, const LoadFromHigh& loadFromHigh, const StoreToHigh& storeToHigh, const LoadFromLow& loadFromLow, const StoreToLow& storeToLow) |
411 | { |
412 | // Inlined WeakRandom::advance(). |
413 | // uint64_t x = m_low; |
414 | loadFromLow(scratch0); |
415 | // uint64_t y = m_high; |
416 | loadFromHigh(scratch1); |
417 | // m_low = y; |
418 | storeToLow(scratch1); |
419 | |
420 | // x ^= x << 23; |
421 | jit.move(scratch0, scratch2); |
422 | jit.lshift64(AssemblyHelpers::TrustedImm32(23), scratch2); |
423 | jit.xor64(scratch2, scratch0); |
424 | |
425 | // x ^= x >> 17; |
426 | jit.move(scratch0, scratch2); |
427 | jit.rshift64(AssemblyHelpers::TrustedImm32(17), scratch2); |
428 | jit.xor64(scratch2, scratch0); |
429 | |
430 | // x ^= y ^ (y >> 26); |
431 | jit.move(scratch1, scratch2); |
432 | jit.rshift64(AssemblyHelpers::TrustedImm32(26), scratch2); |
433 | jit.xor64(scratch1, scratch2); |
434 | jit.xor64(scratch2, scratch0); |
435 | |
436 | // m_high = x; |
437 | storeToHigh(scratch0); |
438 | |
439 | // return x + y; |
440 | jit.add64(scratch1, scratch0); |
441 | |
442 | // Extract random 53bit. [0, 53] bit is safe integer number ranges in double representation. |
443 | jit.move(AssemblyHelpers::TrustedImm64((1ULL << 53) - 1), scratch1); |
444 | jit.and64(scratch1, scratch0); |
445 | // Now, scratch0 is always in range of int64_t. Safe to convert it to double with cvtsi2sdq. |
446 | jit.convertInt64ToDouble(scratch0, result); |
447 | |
448 | // Convert `(53bit double integer value) / (1 << 53)` to `(53bit double integer value) * (1.0 / (1 << 53))`. |
449 | // In latter case, `1.0 / (1 << 53)` will become a double value represented as (mantissa = 0 & exp = 970, it means 1e-(2**54)). |
450 | static constexpr double scale = 1.0 / (1ULL << 53); |
451 | |
452 | // Multiplying 1e-(2**54) with the double integer does not change anything of the mantissa part of the double integer. |
453 | // It just reduces the exp part of the given 53bit double integer. |
454 | // (Except for 0.0. This is specially handled and in this case, exp just becomes 0.) |
455 | // Now we get 53bit precision random double value in [0, 1). |
456 | jit.move(AssemblyHelpers::TrustedImmPtr(&scale), scratch1); |
457 | jit.mulDouble(AssemblyHelpers::Address(scratch1), result); |
458 | } |
459 | |
460 | void AssemblyHelpers::emitRandomThunk(JSGlobalObject* globalObject, GPRReg scratch0, GPRReg scratch1, GPRReg scratch2, FPRReg result) |
461 | { |
462 | void* lowAddress = reinterpret_cast<uint8_t*>(globalObject) + JSGlobalObject::weakRandomOffset() + WeakRandom::lowOffset(); |
463 | void* highAddress = reinterpret_cast<uint8_t*>(globalObject) + JSGlobalObject::weakRandomOffset() + WeakRandom::highOffset(); |
464 | |
465 | auto loadFromHigh = [&](GPRReg high) { |
466 | load64(highAddress, high); |
467 | }; |
468 | auto storeToHigh = [&](GPRReg high) { |
469 | store64(high, highAddress); |
470 | }; |
471 | auto loadFromLow = [&](GPRReg low) { |
472 | load64(lowAddress, low); |
473 | }; |
474 | auto storeToLow = [&](GPRReg low) { |
475 | store64(low, lowAddress); |
476 | }; |
477 | |
478 | emitRandomThunkImpl(*this, scratch0, scratch1, scratch2, result, loadFromHigh, storeToHigh, loadFromLow, storeToLow); |
479 | } |
480 | |
481 | void AssemblyHelpers::emitRandomThunk(VM& vm, GPRReg scratch0, GPRReg scratch1, GPRReg scratch2, GPRReg scratch3, FPRReg result) |
482 | { |
483 | emitGetFromCallFrameHeaderPtr(CallFrameSlot::callee, scratch3); |
484 | emitLoadStructure(vm, scratch3, scratch3, scratch0); |
485 | loadPtr(Address(scratch3, Structure::globalObjectOffset()), scratch3); |
486 | // Now, scratch3 holds JSGlobalObject*. |
487 | |
488 | auto loadFromHigh = [&](GPRReg high) { |
489 | load64(Address(scratch3, JSGlobalObject::weakRandomOffset() + WeakRandom::highOffset()), high); |
490 | }; |
491 | auto storeToHigh = [&](GPRReg high) { |
492 | store64(high, Address(scratch3, JSGlobalObject::weakRandomOffset() + WeakRandom::highOffset())); |
493 | }; |
494 | auto loadFromLow = [&](GPRReg low) { |
495 | load64(Address(scratch3, JSGlobalObject::weakRandomOffset() + WeakRandom::lowOffset()), low); |
496 | }; |
497 | auto storeToLow = [&](GPRReg low) { |
498 | store64(low, Address(scratch3, JSGlobalObject::weakRandomOffset() + WeakRandom::lowOffset())); |
499 | }; |
500 | |
501 | emitRandomThunkImpl(*this, scratch0, scratch1, scratch2, result, loadFromHigh, storeToHigh, loadFromLow, storeToLow); |
502 | } |
503 | #endif |
504 | |
505 | void AssemblyHelpers::emitAllocateWithNonNullAllocator(GPRReg resultGPR, const JITAllocator& allocator, GPRReg allocatorGPR, GPRReg scratchGPR, JumpList& slowPath) |
506 | { |
507 | if (Options::forceGCSlowPaths()) { |
508 | slowPath.append(jump()); |
509 | return; |
510 | } |
511 | |
512 | // NOTE, some invariants of this function: |
513 | // - When going to the slow path, we must leave resultGPR with zero in it. |
514 | // - We *can not* use RegisterSet::macroScratchRegisters on x86. |
515 | // - We *can* use RegisterSet::macroScratchRegisters on ARM. |
516 | |
517 | Jump popPath; |
518 | Jump done; |
519 | |
520 | if (allocator.isConstant()) |
521 | move(TrustedImmPtr(allocator.allocator().localAllocator()), allocatorGPR); |
522 | |
523 | load32(Address(allocatorGPR, LocalAllocator::offsetOfFreeList() + FreeList::offsetOfRemaining()), resultGPR); |
524 | popPath = branchTest32(Zero, resultGPR); |
525 | if (allocator.isConstant()) |
526 | add32(TrustedImm32(-allocator.allocator().cellSize()), resultGPR, scratchGPR); |
527 | else { |
528 | move(resultGPR, scratchGPR); |
529 | sub32(Address(allocatorGPR, LocalAllocator::offsetOfCellSize()), scratchGPR); |
530 | } |
531 | negPtr(resultGPR); |
532 | store32(scratchGPR, Address(allocatorGPR, LocalAllocator::offsetOfFreeList() + FreeList::offsetOfRemaining())); |
533 | Address payloadEndAddr = Address(allocatorGPR, LocalAllocator::offsetOfFreeList() + FreeList::offsetOfPayloadEnd()); |
534 | addPtr(payloadEndAddr, resultGPR); |
535 | |
536 | done = jump(); |
537 | |
538 | popPath.link(this); |
539 | |
540 | loadPtr(Address(allocatorGPR, LocalAllocator::offsetOfFreeList() + FreeList::offsetOfScrambledHead()), resultGPR); |
541 | xorPtr(Address(allocatorGPR, LocalAllocator::offsetOfFreeList() + FreeList::offsetOfSecret()), resultGPR); |
542 | slowPath.append(branchTestPtr(Zero, resultGPR)); |
543 | |
544 | // The object is half-allocated: we have what we know is a fresh object, but |
545 | // it's still on the GC's free list. |
546 | loadPtr(Address(resultGPR, FreeCell::offsetOfScrambledNext()), scratchGPR); |
547 | storePtr(scratchGPR, Address(allocatorGPR, LocalAllocator::offsetOfFreeList() + FreeList::offsetOfScrambledHead())); |
548 | |
549 | done.link(this); |
550 | } |
551 | |
552 | void AssemblyHelpers::emitAllocate(GPRReg resultGPR, const JITAllocator& allocator, GPRReg allocatorGPR, GPRReg scratchGPR, JumpList& slowPath) |
553 | { |
554 | if (allocator.isConstant()) { |
555 | if (!allocator.allocator()) { |
556 | slowPath.append(jump()); |
557 | return; |
558 | } |
559 | } else |
560 | slowPath.append(branchTestPtr(Zero, allocatorGPR)); |
561 | emitAllocateWithNonNullAllocator(resultGPR, allocator, allocatorGPR, scratchGPR, slowPath); |
562 | } |
563 | |
564 | void AssemblyHelpers::emitAllocateVariableSized(GPRReg resultGPR, CompleteSubspace& subspace, GPRReg allocationSize, GPRReg scratchGPR1, GPRReg scratchGPR2, JumpList& slowPath) |
565 | { |
566 | static_assert(!(MarkedSpace::sizeStep & (MarkedSpace::sizeStep - 1)), "MarkedSpace::sizeStep must be a power of two." ); |
567 | |
568 | unsigned stepShift = getLSBSet(MarkedSpace::sizeStep); |
569 | |
570 | add32(TrustedImm32(MarkedSpace::sizeStep - 1), allocationSize, scratchGPR1); |
571 | urshift32(TrustedImm32(stepShift), scratchGPR1); |
572 | slowPath.append(branch32(Above, scratchGPR1, TrustedImm32(MarkedSpace::largeCutoff >> stepShift))); |
573 | move(TrustedImmPtr(subspace.allocatorForSizeStep()), scratchGPR2); |
574 | loadPtr(BaseIndex(scratchGPR2, scratchGPR1, timesPtr()), scratchGPR1); |
575 | |
576 | emitAllocate(resultGPR, JITAllocator::variable(), scratchGPR1, scratchGPR2, slowPath); |
577 | } |
578 | |
579 | void AssemblyHelpers::restoreCalleeSavesFromEntryFrameCalleeSavesBuffer(EntryFrame*& topEntryFrame) |
580 | { |
581 | #if NUMBER_OF_CALLEE_SAVES_REGISTERS > 0 |
582 | RegisterAtOffsetList* allCalleeSaves = RegisterSet::vmCalleeSaveRegisterOffsets(); |
583 | RegisterSet dontRestoreRegisters = RegisterSet::stackRegisters(); |
584 | unsigned registerCount = allCalleeSaves->size(); |
585 | |
586 | GPRReg scratch = InvalidGPRReg; |
587 | unsigned scratchGPREntryIndex = 0; |
588 | |
589 | // Use the first GPR entry's register as our scratch. |
590 | for (unsigned i = 0; i < registerCount; i++) { |
591 | RegisterAtOffset entry = allCalleeSaves->at(i); |
592 | if (dontRestoreRegisters.get(entry.reg())) |
593 | continue; |
594 | if (entry.reg().isGPR()) { |
595 | scratchGPREntryIndex = i; |
596 | scratch = entry.reg().gpr(); |
597 | break; |
598 | } |
599 | } |
600 | ASSERT(scratch != InvalidGPRReg); |
601 | |
602 | loadPtr(&topEntryFrame, scratch); |
603 | addPtr(TrustedImm32(EntryFrame::calleeSaveRegistersBufferOffset()), scratch); |
604 | |
605 | // Restore all callee saves except for the scratch. |
606 | for (unsigned i = 0; i < registerCount; i++) { |
607 | RegisterAtOffset entry = allCalleeSaves->at(i); |
608 | if (dontRestoreRegisters.get(entry.reg())) |
609 | continue; |
610 | if (entry.reg().isGPR()) { |
611 | if (i != scratchGPREntryIndex) |
612 | loadPtr(Address(scratch, entry.offset()), entry.reg().gpr()); |
613 | } else |
614 | loadDouble(Address(scratch, entry.offset()), entry.reg().fpr()); |
615 | } |
616 | |
617 | // Restore the callee save value of the scratch. |
618 | RegisterAtOffset entry = allCalleeSaves->at(scratchGPREntryIndex); |
619 | ASSERT(!dontRestoreRegisters.get(entry.reg())); |
620 | ASSERT(entry.reg().isGPR()); |
621 | ASSERT(scratch == entry.reg().gpr()); |
622 | loadPtr(Address(scratch, entry.offset()), scratch); |
623 | #else |
624 | UNUSED_PARAM(topEntryFrame); |
625 | #endif |
626 | } |
627 | |
628 | void AssemblyHelpers::emitDumbVirtualCall(VM& vm, JSGlobalObject* globalObject, CallLinkInfo* info) |
629 | { |
630 | move(TrustedImmPtr(info), GPRInfo::regT2); |
631 | move(TrustedImmPtr(globalObject), GPRInfo::regT3); |
632 | Call call = nearCall(); |
633 | addLinkTask( |
634 | [=, &vm] (LinkBuffer& linkBuffer) { |
635 | MacroAssemblerCodeRef<JITStubRoutinePtrTag> virtualThunk = virtualThunkFor(vm, *info); |
636 | info->setSlowStub(createJITStubRoutine(virtualThunk, vm, nullptr, true)); |
637 | linkBuffer.link(call, CodeLocationLabel<JITStubRoutinePtrTag>(virtualThunk.code())); |
638 | }); |
639 | } |
640 | |
641 | #if USE(JSVALUE64) |
642 | void AssemblyHelpers::wangsInt64Hash(GPRReg inputAndResult, GPRReg scratch) |
643 | { |
644 | GPRReg input = inputAndResult; |
645 | // key += ~(key << 32); |
646 | move(input, scratch); |
647 | lshift64(TrustedImm32(32), scratch); |
648 | not64(scratch); |
649 | add64(scratch, input); |
650 | // key ^= (key >> 22); |
651 | move(input, scratch); |
652 | urshift64(TrustedImm32(22), scratch); |
653 | xor64(scratch, input); |
654 | // key += ~(key << 13); |
655 | move(input, scratch); |
656 | lshift64(TrustedImm32(13), scratch); |
657 | not64(scratch); |
658 | add64(scratch, input); |
659 | // key ^= (key >> 8); |
660 | move(input, scratch); |
661 | urshift64(TrustedImm32(8), scratch); |
662 | xor64(scratch, input); |
663 | // key += (key << 3); |
664 | move(input, scratch); |
665 | lshift64(TrustedImm32(3), scratch); |
666 | add64(scratch, input); |
667 | // key ^= (key >> 15); |
668 | move(input, scratch); |
669 | urshift64(TrustedImm32(15), scratch); |
670 | xor64(scratch, input); |
671 | // key += ~(key << 27); |
672 | move(input, scratch); |
673 | lshift64(TrustedImm32(27), scratch); |
674 | not64(scratch); |
675 | add64(scratch, input); |
676 | // key ^= (key >> 31); |
677 | move(input, scratch); |
678 | urshift64(TrustedImm32(31), scratch); |
679 | xor64(scratch, input); |
680 | |
681 | // return static_cast<unsigned>(result) |
682 | void* mask = bitwise_cast<void*>(static_cast<uintptr_t>(UINT_MAX)); |
683 | and64(TrustedImmPtr(mask), inputAndResult); |
684 | } |
685 | #endif // USE(JSVALUE64) |
686 | |
687 | void AssemblyHelpers::emitConvertValueToBoolean(VM& vm, JSValueRegs value, GPRReg result, GPRReg scratchIfShouldCheckMasqueradesAsUndefined, FPRReg valueAsFPR, FPRReg tempFPR, bool shouldCheckMasqueradesAsUndefined, JSGlobalObject* globalObject, bool invert) |
688 | { |
689 | // Implements the following control flow structure: |
690 | // if (value is cell) { |
691 | // if (value is string or value is BigInt) |
692 | // result = !!value->length |
693 | // else { |
694 | // do evil things for masquerades-as-undefined |
695 | // result = true |
696 | // } |
697 | // } else if (value is int32) { |
698 | // result = !!unboxInt32(value) |
699 | // } else if (value is number) { |
700 | // result = !!unboxDouble(value) |
701 | // } else { |
702 | // result = value == jsTrue |
703 | // } |
704 | |
705 | JumpList done; |
706 | |
707 | auto notCell = branchIfNotCell(value); |
708 | auto isString = branchIfString(value.payloadGPR()); |
709 | auto isBigInt = branchIfBigInt(value.payloadGPR()); |
710 | |
711 | if (shouldCheckMasqueradesAsUndefined) { |
712 | ASSERT(scratchIfShouldCheckMasqueradesAsUndefined != InvalidGPRReg); |
713 | JumpList isNotMasqueradesAsUndefined; |
714 | isNotMasqueradesAsUndefined.append(branchTest8(Zero, Address(value.payloadGPR(), JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined))); |
715 | emitLoadStructure(vm, value.payloadGPR(), result, scratchIfShouldCheckMasqueradesAsUndefined); |
716 | move(TrustedImmPtr(globalObject), scratchIfShouldCheckMasqueradesAsUndefined); |
717 | isNotMasqueradesAsUndefined.append(branchPtr(NotEqual, Address(result, Structure::globalObjectOffset()), scratchIfShouldCheckMasqueradesAsUndefined)); |
718 | |
719 | // We act like we are "undefined" here. |
720 | move(invert ? TrustedImm32(1) : TrustedImm32(0), result); |
721 | done.append(jump()); |
722 | isNotMasqueradesAsUndefined.link(this); |
723 | } |
724 | move(invert ? TrustedImm32(0) : TrustedImm32(1), result); |
725 | done.append(jump()); |
726 | |
727 | isString.link(this); |
728 | move(TrustedImmPtr(jsEmptyString(vm)), result); |
729 | comparePtr(invert ? Equal : NotEqual, value.payloadGPR(), result, result); |
730 | done.append(jump()); |
731 | |
732 | isBigInt.link(this); |
733 | load32(Address(value.payloadGPR(), JSBigInt::offsetOfLength()), result); |
734 | compare32(invert ? Equal : NotEqual, result, TrustedImm32(0), result); |
735 | done.append(jump()); |
736 | |
737 | notCell.link(this); |
738 | auto notInt32 = branchIfNotInt32(value); |
739 | compare32(invert ? Equal : NotEqual, value.payloadGPR(), TrustedImm32(0), result); |
740 | done.append(jump()); |
741 | |
742 | notInt32.link(this); |
743 | auto notDouble = branchIfNotDoubleKnownNotInt32(value); |
744 | #if USE(JSVALUE64) |
745 | unboxDouble(value.gpr(), result, valueAsFPR); |
746 | #else |
747 | unboxDouble(value, valueAsFPR, tempFPR); |
748 | #endif |
749 | move(invert ? TrustedImm32(1) : TrustedImm32(0), result); |
750 | done.append(branchDoubleZeroOrNaN(valueAsFPR, tempFPR)); |
751 | move(invert ? TrustedImm32(0) : TrustedImm32(1), result); |
752 | done.append(jump()); |
753 | |
754 | notDouble.link(this); |
755 | #if USE(JSVALUE64) |
756 | compare64(invert ? NotEqual : Equal, value.gpr(), TrustedImm32(JSValue::ValueTrue), result); |
757 | #else |
758 | move(invert ? TrustedImm32(1) : TrustedImm32(0), result); |
759 | done.append(branchIfNotBoolean(value, InvalidGPRReg)); |
760 | compare32(invert ? Equal : NotEqual, value.payloadGPR(), TrustedImm32(0), result); |
761 | #endif |
762 | |
763 | done.link(this); |
764 | } |
765 | |
766 | AssemblyHelpers::JumpList AssemblyHelpers::branchIfValue(VM& vm, JSValueRegs value, GPRReg scratch, GPRReg scratchIfShouldCheckMasqueradesAsUndefined, FPRReg valueAsFPR, FPRReg tempFPR, bool shouldCheckMasqueradesAsUndefined, JSGlobalObject* globalObject, bool invert) |
767 | { |
768 | // Implements the following control flow structure: |
769 | // if (value is cell) { |
770 | // if (value is string or value is BigInt) |
771 | // result = !!value->length |
772 | // else { |
773 | // do evil things for masquerades-as-undefined |
774 | // result = true |
775 | // } |
776 | // } else if (value is int32) { |
777 | // result = !!unboxInt32(value) |
778 | // } else if (value is number) { |
779 | // result = !!unboxDouble(value) |
780 | // } else { |
781 | // result = value == jsTrue |
782 | // } |
783 | |
784 | JumpList done; |
785 | JumpList truthy; |
786 | |
787 | auto notCell = branchIfNotCell(value); |
788 | auto isString = branchIfString(value.payloadGPR()); |
789 | auto isBigInt = branchIfBigInt(value.payloadGPR()); |
790 | |
791 | if (shouldCheckMasqueradesAsUndefined) { |
792 | ASSERT(scratchIfShouldCheckMasqueradesAsUndefined != InvalidGPRReg); |
793 | JumpList isNotMasqueradesAsUndefined; |
794 | isNotMasqueradesAsUndefined.append(branchTest8(Zero, Address(value.payloadGPR(), JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined))); |
795 | emitLoadStructure(vm, value.payloadGPR(), scratch, scratchIfShouldCheckMasqueradesAsUndefined); |
796 | move(TrustedImmPtr(globalObject), scratchIfShouldCheckMasqueradesAsUndefined); |
797 | isNotMasqueradesAsUndefined.append(branchPtr(NotEqual, Address(scratch, Structure::globalObjectOffset()), scratchIfShouldCheckMasqueradesAsUndefined)); |
798 | |
799 | // We act like we are "undefined" here. |
800 | if (invert) |
801 | truthy.append(jump()); |
802 | else |
803 | done.append(jump()); |
804 | |
805 | if (invert) |
806 | done.append(isNotMasqueradesAsUndefined); |
807 | else |
808 | truthy.append(isNotMasqueradesAsUndefined); |
809 | } else { |
810 | if (invert) |
811 | done.append(jump()); |
812 | else |
813 | truthy.append(jump()); |
814 | } |
815 | |
816 | isString.link(this); |
817 | truthy.append(branchPtr(invert ? Equal : NotEqual, value.payloadGPR(), TrustedImmPtr(jsEmptyString(vm)))); |
818 | done.append(jump()); |
819 | |
820 | isBigInt.link(this); |
821 | truthy.append(branchTest32(invert ? Zero : NonZero, Address(value.payloadGPR(), JSBigInt::offsetOfLength()))); |
822 | done.append(jump()); |
823 | |
824 | notCell.link(this); |
825 | auto notInt32 = branchIfNotInt32(value); |
826 | truthy.append(branchTest32(invert ? Zero : NonZero, value.payloadGPR())); |
827 | done.append(jump()); |
828 | |
829 | notInt32.link(this); |
830 | auto notDouble = branchIfNotDoubleKnownNotInt32(value); |
831 | #if USE(JSVALUE64) |
832 | unboxDouble(value.gpr(), scratch, valueAsFPR); |
833 | #else |
834 | unboxDouble(value, valueAsFPR, tempFPR); |
835 | #endif |
836 | if (invert) { |
837 | truthy.append(branchDoubleZeroOrNaN(valueAsFPR, tempFPR)); |
838 | done.append(jump()); |
839 | } else { |
840 | done.append(branchDoubleZeroOrNaN(valueAsFPR, tempFPR)); |
841 | truthy.append(jump()); |
842 | } |
843 | |
844 | notDouble.link(this); |
845 | #if USE(JSVALUE64) |
846 | truthy.append(branch64(invert ? NotEqual : Equal, value.gpr(), TrustedImm64(JSValue::encode(jsBoolean(true))))); |
847 | #else |
848 | auto notBoolean = branchIfNotBoolean(value, InvalidGPRReg); |
849 | if (invert) |
850 | truthy.append(notBoolean); |
851 | else |
852 | done.append(notBoolean); |
853 | truthy.append(branch32(invert ? Equal : NotEqual, value.payloadGPR(), TrustedImm32(0))); |
854 | #endif |
855 | |
856 | done.link(this); |
857 | |
858 | return truthy; |
859 | } |
860 | |
861 | #if ENABLE(WEBASSEMBLY) |
862 | void AssemblyHelpers::loadWasmContextInstance(GPRReg dst) |
863 | { |
864 | #if ENABLE(FAST_TLS_JIT) |
865 | if (Wasm::Context::useFastTLS()) { |
866 | loadFromTLSPtr(fastTLSOffsetForKey(WTF_WASM_CONTEXT_KEY), dst); |
867 | return; |
868 | } |
869 | #endif |
870 | move(Wasm::PinnedRegisterInfo::get().wasmContextInstancePointer, dst); |
871 | } |
872 | |
873 | void AssemblyHelpers::storeWasmContextInstance(GPRReg src) |
874 | { |
875 | #if ENABLE(FAST_TLS_JIT) |
876 | if (Wasm::Context::useFastTLS()) { |
877 | storeToTLSPtr(src, fastTLSOffsetForKey(WTF_WASM_CONTEXT_KEY)); |
878 | return; |
879 | } |
880 | #endif |
881 | move(src, Wasm::PinnedRegisterInfo::get().wasmContextInstancePointer); |
882 | } |
883 | |
884 | bool AssemblyHelpers::loadWasmContextInstanceNeedsMacroScratchRegister() |
885 | { |
886 | #if ENABLE(FAST_TLS_JIT) |
887 | if (Wasm::Context::useFastTLS()) |
888 | return loadFromTLSPtrNeedsMacroScratchRegister(); |
889 | #endif |
890 | return false; |
891 | } |
892 | |
893 | bool AssemblyHelpers::storeWasmContextInstanceNeedsMacroScratchRegister() |
894 | { |
895 | #if ENABLE(FAST_TLS_JIT) |
896 | if (Wasm::Context::useFastTLS()) |
897 | return storeToTLSPtrNeedsMacroScratchRegister(); |
898 | #endif |
899 | return false; |
900 | } |
901 | |
902 | #endif // ENABLE(WEBASSEMBLY) |
903 | |
904 | void AssemblyHelpers::debugCall(VM& vm, V_DebugOperation_EPP function, void* argument) |
905 | { |
906 | size_t scratchSize = sizeof(EncodedJSValue) * (GPRInfo::numberOfRegisters + FPRInfo::numberOfRegisters); |
907 | ScratchBuffer* scratchBuffer = vm.scratchBufferForSize(scratchSize); |
908 | EncodedJSValue* buffer = static_cast<EncodedJSValue*>(scratchBuffer->dataBuffer()); |
909 | |
910 | for (unsigned i = 0; i < GPRInfo::numberOfRegisters; ++i) { |
911 | #if USE(JSVALUE64) |
912 | store64(GPRInfo::toRegister(i), buffer + i); |
913 | #else |
914 | store32(GPRInfo::toRegister(i), buffer + i); |
915 | #endif |
916 | } |
917 | |
918 | for (unsigned i = 0; i < FPRInfo::numberOfRegisters; ++i) { |
919 | move(TrustedImmPtr(buffer + GPRInfo::numberOfRegisters + i), GPRInfo::regT0); |
920 | storeDouble(FPRInfo::toRegister(i), GPRInfo::regT0); |
921 | } |
922 | |
923 | // Tell GC mark phase how much of the scratch buffer is active during call. |
924 | move(TrustedImmPtr(scratchBuffer->addressOfActiveLength()), GPRInfo::regT0); |
925 | storePtr(TrustedImmPtr(scratchSize), GPRInfo::regT0); |
926 | |
927 | #if CPU(X86_64) || CPU(ARM_THUMB2) || CPU(ARM64) || CPU(MIPS) |
928 | move(TrustedImmPtr(buffer), GPRInfo::argumentGPR2); |
929 | move(TrustedImmPtr(argument), GPRInfo::argumentGPR1); |
930 | move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0); |
931 | GPRReg scratch = selectScratchGPR(GPRInfo::argumentGPR0, GPRInfo::argumentGPR1, GPRInfo::argumentGPR2); |
932 | #else |
933 | #error "JIT not supported on this platform." |
934 | #endif |
935 | prepareCallOperation(vm); |
936 | move(TrustedImmPtr(tagCFunctionPtr<OperationPtrTag>(function)), scratch); |
937 | call(scratch, OperationPtrTag); |
938 | |
939 | move(TrustedImmPtr(scratchBuffer->addressOfActiveLength()), GPRInfo::regT0); |
940 | storePtr(TrustedImmPtr(nullptr), GPRInfo::regT0); |
941 | |
942 | for (unsigned i = 0; i < FPRInfo::numberOfRegisters; ++i) { |
943 | move(TrustedImmPtr(buffer + GPRInfo::numberOfRegisters + i), GPRInfo::regT0); |
944 | loadDouble(GPRInfo::regT0, FPRInfo::toRegister(i)); |
945 | } |
946 | for (unsigned i = 0; i < GPRInfo::numberOfRegisters; ++i) { |
947 | #if USE(JSVALUE64) |
948 | load64(buffer + i, GPRInfo::toRegister(i)); |
949 | #else |
950 | load32(buffer + i, GPRInfo::toRegister(i)); |
951 | #endif |
952 | } |
953 | } |
954 | |
955 | void AssemblyHelpers::copyCalleeSavesToEntryFrameCalleeSavesBufferImpl(GPRReg calleeSavesBuffer) |
956 | { |
957 | #if NUMBER_OF_CALLEE_SAVES_REGISTERS > 0 |
958 | addPtr(TrustedImm32(EntryFrame::calleeSaveRegistersBufferOffset()), calleeSavesBuffer); |
959 | |
960 | RegisterAtOffsetList* allCalleeSaves = RegisterSet::vmCalleeSaveRegisterOffsets(); |
961 | RegisterSet dontCopyRegisters = RegisterSet::stackRegisters(); |
962 | unsigned registerCount = allCalleeSaves->size(); |
963 | |
964 | for (unsigned i = 0; i < registerCount; i++) { |
965 | RegisterAtOffset entry = allCalleeSaves->at(i); |
966 | if (dontCopyRegisters.get(entry.reg())) |
967 | continue; |
968 | if (entry.reg().isGPR()) |
969 | storePtr(entry.reg().gpr(), Address(calleeSavesBuffer, entry.offset())); |
970 | else |
971 | storeDouble(entry.reg().fpr(), Address(calleeSavesBuffer, entry.offset())); |
972 | } |
973 | #else |
974 | UNUSED_PARAM(calleeSavesBuffer); |
975 | #endif |
976 | } |
977 | |
978 | void AssemblyHelpers::sanitizeStackInline(VM& vm, GPRReg scratch) |
979 | { |
980 | loadPtr(vm.addressOfLastStackTop(), scratch); |
981 | Jump done = branchPtr(BelowOrEqual, stackPointerRegister, scratch); |
982 | Label loop = label(); |
983 | storePtr(TrustedImmPtr(nullptr), scratch); |
984 | addPtr(TrustedImmPtr(sizeof(void*)), scratch); |
985 | branchPtr(Above, stackPointerRegister, scratch).linkTo(loop, this); |
986 | done.link(this); |
987 | move(stackPointerRegister, scratch); |
988 | storePtr(scratch, vm.addressOfLastStackTop()); |
989 | } |
990 | |
991 | } // namespace JSC |
992 | |
993 | #endif // ENABLE(JIT) |
994 | |
995 | |