1 | /* |
2 | * Copyright (C) 2011-2018 Apple Inc. All rights reserved. |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without |
5 | * modification, are permitted provided that the following conditions |
6 | * are met: |
7 | * 1. Redistributions of source code must retain the above copyright |
8 | * notice, this list of conditions and the following disclaimer. |
9 | * 2. Redistributions in binary form must reproduce the above copyright |
10 | * notice, this list of conditions and the following disclaimer in the |
11 | * documentation and/or other materials provided with the distribution. |
12 | * |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
24 | */ |
25 | |
26 | #pragma once |
27 | |
28 | #include "GPRInfo.h" |
29 | #include "Instruction.h" |
30 | #include "JSCJSValue.h" |
31 | #include "MacroAssemblerCodeRef.h" |
32 | #include "Opcode.h" |
33 | |
34 | namespace JSC { |
35 | |
36 | class VM; |
37 | |
38 | #if ENABLE(C_LOOP) |
39 | typedef OpcodeID LLIntCode; |
40 | #else |
41 | typedef void (*LLIntCode)(); |
42 | #endif |
43 | |
44 | namespace LLInt { |
45 | |
46 | extern "C" JS_EXPORT_PRIVATE Opcode g_opcodeMap[numOpcodeIDs + numWasmOpcodeIDs]; |
47 | extern "C" JS_EXPORT_PRIVATE Opcode g_opcodeMapWide16[numOpcodeIDs + numWasmOpcodeIDs]; |
48 | extern "C" JS_EXPORT_PRIVATE Opcode g_opcodeMapWide32[numOpcodeIDs + numWasmOpcodeIDs]; |
49 | |
50 | class Data { |
51 | |
52 | public: |
53 | static void performAssertions(VM&); |
54 | |
55 | private: |
56 | static uint8_t s_exceptionInstructions[maxOpcodeLength + 1]; |
57 | static uint8_t s_wasmExceptionInstructions[maxOpcodeLength + 1]; |
58 | |
59 | friend void initialize(); |
60 | |
61 | friend Instruction* exceptionInstructions(); |
62 | friend Instruction* wasmExceptionInstructions(); |
63 | friend Opcode* opcodeMap(); |
64 | friend Opcode* opcodeMapWide16(); |
65 | friend Opcode* opcodeMapWide32(); |
66 | friend Opcode getOpcode(OpcodeID); |
67 | friend Opcode getOpcodeWide16(OpcodeID); |
68 | friend Opcode getOpcodeWide32(OpcodeID); |
69 | template<PtrTag tag> friend MacroAssemblerCodePtr<tag> getCodePtr(OpcodeID); |
70 | template<PtrTag tag> friend MacroAssemblerCodePtr<tag> getWide16CodePtr(OpcodeID); |
71 | template<PtrTag tag> friend MacroAssemblerCodePtr<tag> getWide32CodePtr(OpcodeID); |
72 | template<PtrTag tag> friend MacroAssemblerCodeRef<tag> getCodeRef(OpcodeID); |
73 | }; |
74 | |
75 | void initialize(); |
76 | |
77 | inline Instruction* exceptionInstructions() |
78 | { |
79 | return reinterpret_cast<Instruction*>(Data::s_exceptionInstructions); |
80 | } |
81 | |
82 | inline Instruction* wasmExceptionInstructions() |
83 | { |
84 | return bitwise_cast<Instruction*>(Data::s_wasmExceptionInstructions); |
85 | } |
86 | |
87 | inline Opcode* opcodeMap() |
88 | { |
89 | return g_opcodeMap; |
90 | } |
91 | |
92 | inline Opcode* opcodeMapWide16() |
93 | { |
94 | return g_opcodeMapWide16; |
95 | } |
96 | |
97 | inline Opcode* opcodeMapWide32() |
98 | { |
99 | return g_opcodeMapWide32; |
100 | } |
101 | |
102 | inline Opcode getOpcode(OpcodeID id) |
103 | { |
104 | #if ENABLE(COMPUTED_GOTO_OPCODES) |
105 | return g_opcodeMap[id]; |
106 | #else |
107 | return static_cast<Opcode>(id); |
108 | #endif |
109 | } |
110 | |
111 | inline Opcode getOpcodeWide16(OpcodeID id) |
112 | { |
113 | #if ENABLE(COMPUTED_GOTO_OPCODES) |
114 | return g_opcodeMapWide16[id]; |
115 | #else |
116 | UNUSED_PARAM(id); |
117 | RELEASE_ASSERT_NOT_REACHED(); |
118 | #endif |
119 | } |
120 | |
121 | inline Opcode getOpcodeWide32(OpcodeID id) |
122 | { |
123 | #if ENABLE(COMPUTED_GOTO_OPCODES) |
124 | return g_opcodeMapWide32[id]; |
125 | #else |
126 | UNUSED_PARAM(id); |
127 | RELEASE_ASSERT_NOT_REACHED(); |
128 | #endif |
129 | } |
130 | |
131 | template<PtrTag tag> |
132 | ALWAYS_INLINE MacroAssemblerCodePtr<tag> getCodePtr(OpcodeID opcodeID) |
133 | { |
134 | void* address = reinterpret_cast<void*>(getOpcode(opcodeID)); |
135 | address = retagCodePtr<BytecodePtrTag, tag>(address); |
136 | return MacroAssemblerCodePtr<tag>::createFromExecutableAddress(address); |
137 | } |
138 | |
139 | template<PtrTag tag> |
140 | ALWAYS_INLINE MacroAssemblerCodePtr<tag> getWide16CodePtr(OpcodeID opcodeID) |
141 | { |
142 | void* address = reinterpret_cast<void*>(getOpcodeWide16(opcodeID)); |
143 | address = retagCodePtr<BytecodePtrTag, tag>(address); |
144 | return MacroAssemblerCodePtr<tag>::createFromExecutableAddress(address); |
145 | } |
146 | |
147 | template<PtrTag tag> |
148 | ALWAYS_INLINE MacroAssemblerCodePtr<tag> getWide32CodePtr(OpcodeID opcodeID) |
149 | { |
150 | void* address = reinterpret_cast<void*>(getOpcodeWide32(opcodeID)); |
151 | address = retagCodePtr<BytecodePtrTag, tag>(address); |
152 | return MacroAssemblerCodePtr<tag>::createFromExecutableAddress(address); |
153 | } |
154 | |
155 | template<PtrTag tag> |
156 | ALWAYS_INLINE MacroAssemblerCodePtr<tag> getCodePtr(const Instruction& instruction) |
157 | { |
158 | if (instruction.isWide16()) |
159 | return getWide16CodePtr<tag>(instruction.opcodeID()); |
160 | if (instruction.isWide32()) |
161 | return getWide32CodePtr<tag>(instruction.opcodeID()); |
162 | return getCodePtr<tag>(instruction.opcodeID()); |
163 | } |
164 | |
165 | template<PtrTag tag> |
166 | ALWAYS_INLINE MacroAssemblerCodeRef<tag> getCodeRef(OpcodeID opcodeID) |
167 | { |
168 | return MacroAssemblerCodeRef<tag>::createSelfManagedCodeRef(getCodePtr<tag>(opcodeID)); |
169 | } |
170 | |
171 | #if ENABLE(JIT) |
172 | template<PtrTag tag> |
173 | ALWAYS_INLINE LLIntCode getCodeFunctionPtr(OpcodeID opcodeID) |
174 | { |
175 | ASSERT(opcodeID >= NUMBER_OF_BYTECODE_IDS); |
176 | #if COMPILER(MSVC) |
177 | return reinterpret_cast<LLIntCode>(getCodePtr<tag>(opcodeID).executableAddress()); |
178 | #else |
179 | return reinterpret_cast<LLIntCode>(getCodePtr<tag>(opcodeID).template executableAddress()); |
180 | #endif |
181 | } |
182 | |
183 | #else |
184 | ALWAYS_INLINE void* getCodePtr(OpcodeID id) |
185 | { |
186 | return reinterpret_cast<void*>(getOpcode(id)); |
187 | } |
188 | |
189 | ALWAYS_INLINE void* getWide16CodePtr(OpcodeID id) |
190 | { |
191 | return reinterpret_cast<void*>(getOpcodeWide16(id)); |
192 | } |
193 | |
194 | ALWAYS_INLINE void* getWide32CodePtr(OpcodeID id) |
195 | { |
196 | return reinterpret_cast<void*>(getOpcodeWide32(id)); |
197 | } |
198 | #endif |
199 | |
200 | ALWAYS_INLINE void* getCodePtr(JSC::EncodedJSValue glueHelper()) |
201 | { |
202 | return bitwise_cast<void*>(glueHelper); |
203 | } |
204 | |
205 | #if ENABLE(JIT) |
206 | struct Registers { |
207 | static constexpr GPRReg pcGPR = GPRInfo::regT4; |
208 | |
209 | #if CPU(X86_64) && !OS(WINDOWS) |
210 | static constexpr GPRReg metadataTableGPR = GPRInfo::regCS1; |
211 | static constexpr GPRReg pbGPR = GPRInfo::regCS2; |
212 | #elif CPU(X86_64) && OS(WINDOWS) |
213 | static constexpr GPRReg metadataTableGPR = GPRInfo::regCS3; |
214 | static constexpr GPRReg pbGPR = GPRInfo::regCS4; |
215 | #elif CPU(ARM64) |
216 | static constexpr GPRReg metadataTableGPR = GPRInfo::regCS6; |
217 | static constexpr GPRReg pbGPR = GPRInfo::regCS7; |
218 | #elif CPU(MIPS) || CPU(ARM_THUMB2) |
219 | static constexpr GPRReg metadataTableGPR = GPRInfo::regCS0; |
220 | #endif |
221 | }; |
222 | #endif |
223 | |
224 | } } // namespace JSC::LLInt |
225 | |