1/*
2 * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "LLIntData.h"
28
29#include "ArithProfile.h"
30#include "BytecodeConventions.h"
31#include "CodeBlock.h"
32#include "CodeType.h"
33#include "Instruction.h"
34#include "JSScope.h"
35#include "LLIntCLoop.h"
36#include "MaxFrameExtentForSlowPathCall.h"
37#include "Opcode.h"
38#include "PropertyOffset.h"
39#include "ShadowChicken.h"
40#include "WriteBarrier.h"
41
42#define STATIC_ASSERT(cond) static_assert(cond, "LLInt assumes " #cond)
43
44
45namespace JSC {
46
47namespace LLInt {
48
49
50uint8_t Data::s_exceptionInstructions[maxOpcodeLength + 1] = { };
51uint8_t Data::s_wasmExceptionInstructions[maxOpcodeLength + 1] = { };
52Opcode g_opcodeMap[numOpcodeIDs + numWasmOpcodeIDs] = { };
53Opcode g_opcodeMapWide16[numOpcodeIDs + numWasmOpcodeIDs] = { };
54Opcode g_opcodeMapWide32[numOpcodeIDs + numWasmOpcodeIDs] = { };
55
56#if !ENABLE(C_LOOP)
57extern "C" void llint_entry(void*, void*, void*);
58
59#if ENABLE(WEBASSEMBLY)
60extern "C" void wasm_entry(void*, void*, void*);
61#endif // ENABLE(WEBASSEMBLY)
62
63#endif // !ENABLE(C_LOOP)
64
65void initialize()
66{
67#if ENABLE(C_LOOP)
68 CLoop::initialize();
69
70#else // !ENABLE(C_LOOP)
71 llint_entry(&g_opcodeMap, &g_opcodeMapWide16, &g_opcodeMapWide32);
72
73#if ENABLE(WEBASSEMBLY)
74 wasm_entry(&g_opcodeMap[numOpcodeIDs], &g_opcodeMapWide16[numOpcodeIDs], &g_opcodeMapWide32[numOpcodeIDs]);
75#endif // ENABLE(WEBASSEMBLY)
76
77 for (int i = 0; i < numOpcodeIDs + numWasmOpcodeIDs; ++i) {
78 g_opcodeMap[i] = tagCodePtr(g_opcodeMap[i], BytecodePtrTag);
79 g_opcodeMapWide16[i] = tagCodePtr(g_opcodeMapWide16[i], BytecodePtrTag);
80 g_opcodeMapWide32[i] = tagCodePtr(g_opcodeMapWide32[i], BytecodePtrTag);
81 }
82
83 ASSERT(llint_throw_from_slow_path_trampoline < UINT8_MAX);
84 for (int i = 0; i < maxOpcodeLength + 1; ++i) {
85 Data::s_exceptionInstructions[i] = llint_throw_from_slow_path_trampoline;
86 Data::s_wasmExceptionInstructions[i] = wasm_throw_from_slow_path_trampoline;
87 }
88#endif // ENABLE(C_LOOP)
89}
90
91IGNORE_WARNINGS_BEGIN("missing-noreturn")
92void Data::performAssertions(VM& vm)
93{
94 UNUSED_PARAM(vm);
95
96 // Assertions to match LowLevelInterpreter.asm. If you change any of this code, be
97 // prepared to change LowLevelInterpreter.asm as well!!
98
99#if USE(JSVALUE64)
100 const ptrdiff_t CallFrameHeaderSlots = 5;
101#else // USE(JSVALUE64) // i.e. 32-bit version
102 const ptrdiff_t CallFrameHeaderSlots = 4;
103#endif
104 const ptrdiff_t MachineRegisterSize = sizeof(CPURegister);
105 const ptrdiff_t SlotSize = 8;
106
107 STATIC_ASSERT(sizeof(Register) == SlotSize);
108 STATIC_ASSERT(CallFrame::headerSizeInRegisters == CallFrameHeaderSlots);
109
110 ASSERT(!CallFrame::callerFrameOffset());
111 STATIC_ASSERT(CallerFrameAndPC::sizeInRegisters == (MachineRegisterSize * 2) / SlotSize);
112 ASSERT(CallFrame::returnPCOffset() == CallFrame::callerFrameOffset() + MachineRegisterSize);
113 ASSERT(CallFrameSlot::codeBlock * sizeof(Register) == CallFrame::returnPCOffset() + MachineRegisterSize);
114 STATIC_ASSERT(CallFrameSlot::callee * sizeof(Register) == CallFrameSlot::codeBlock * sizeof(Register) + SlotSize);
115 STATIC_ASSERT(CallFrameSlot::argumentCount * sizeof(Register) == CallFrameSlot::callee * sizeof(Register) + SlotSize);
116 STATIC_ASSERT(CallFrameSlot::thisArgument * sizeof(Register) == CallFrameSlot::argumentCount * sizeof(Register) + SlotSize);
117 STATIC_ASSERT(CallFrame::headerSizeInRegisters == CallFrameSlot::thisArgument);
118
119 ASSERT(CallFrame::argumentOffsetIncludingThis(0) == CallFrameSlot::thisArgument);
120
121#if CPU(BIG_ENDIAN)
122 STATIC_ASSERT(TagOffset == 0);
123 STATIC_ASSERT(PayloadOffset == 4);
124#else
125 STATIC_ASSERT(TagOffset == 4);
126 STATIC_ASSERT(PayloadOffset == 0);
127#endif
128
129#if ENABLE(C_LOOP)
130 ASSERT(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters() == 1);
131#elif USE(JSVALUE32_64)
132 ASSERT(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters() == 1);
133#elif (CPU(X86_64) && !OS(WINDOWS)) || CPU(ARM64)
134 ASSERT(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters() == 4);
135#elif (CPU(X86_64) && OS(WINDOWS))
136 ASSERT(CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters() == 4);
137#endif
138
139 ASSERT(!(reinterpret_cast<ptrdiff_t>((reinterpret_cast<WriteBarrier<JSCell>*>(0x4000)->slot())) - 0x4000));
140
141 // FIXME: make these assertions less horrible.
142#if !ASSERT_DISABLED
143 Vector<int> testVector;
144 testVector.resize(42);
145 ASSERT(bitwise_cast<uint32_t*>(&testVector)[sizeof(void*)/sizeof(uint32_t) + 1] == 42);
146 ASSERT(bitwise_cast<int**>(&testVector)[0] == testVector.begin());
147#endif
148
149 {
150 UnaryArithProfile arithProfile;
151 arithProfile.argSawInt32();
152 ASSERT(arithProfile.bits() == UnaryArithProfile::observedIntBits());
153 ASSERT(arithProfile.argObservedType().isOnlyInt32());
154 }
155 {
156 UnaryArithProfile arithProfile;
157 arithProfile.argSawNumber();
158 ASSERT(arithProfile.bits() == UnaryArithProfile::observedNumberBits());
159 ASSERT(arithProfile.argObservedType().isOnlyNumber());
160 }
161
162 {
163 BinaryArithProfile arithProfile;
164 arithProfile.lhsSawInt32();
165 arithProfile.rhsSawInt32();
166 ASSERT(arithProfile.bits() == BinaryArithProfile::observedIntIntBits());
167 ASSERT(arithProfile.lhsObservedType().isOnlyInt32());
168 ASSERT(arithProfile.rhsObservedType().isOnlyInt32());
169 }
170 {
171 BinaryArithProfile arithProfile;
172 arithProfile.lhsSawNumber();
173 arithProfile.rhsSawInt32();
174 ASSERT(arithProfile.bits() == BinaryArithProfile::observedNumberIntBits());
175 ASSERT(arithProfile.lhsObservedType().isOnlyNumber());
176 ASSERT(arithProfile.rhsObservedType().isOnlyInt32());
177 }
178 {
179 BinaryArithProfile arithProfile;
180 arithProfile.lhsSawNumber();
181 arithProfile.rhsSawNumber();
182 ASSERT(arithProfile.bits() == BinaryArithProfile::observedNumberNumberBits());
183 ASSERT(arithProfile.lhsObservedType().isOnlyNumber());
184 ASSERT(arithProfile.rhsObservedType().isOnlyNumber());
185 }
186 {
187 BinaryArithProfile arithProfile;
188 arithProfile.lhsSawInt32();
189 arithProfile.rhsSawNumber();
190 ASSERT(arithProfile.bits() == BinaryArithProfile::observedIntNumberBits());
191 ASSERT(arithProfile.lhsObservedType().isOnlyInt32());
192 ASSERT(arithProfile.rhsObservedType().isOnlyNumber());
193 }
194}
195IGNORE_WARNINGS_END
196
197} } // namespace JSC::LLInt
198