1/*
2 * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#pragma once
27
28#if ENABLE(DFG_JIT)
29
30#include "CCallHelpers.h"
31#include "CodeBlock.h"
32#include "DFGDisassembler.h"
33#include "DFGGraph.h"
34#include "DFGInlineCacheWrapper.h"
35#include "DFGJITCode.h"
36#include "DFGOSRExitCompilationInfo.h"
37#include "GPRInfo.h"
38#include "HandlerInfo.h"
39#include "JITCode.h"
40#include "JITInlineCacheGenerator.h"
41#include "LinkBuffer.h"
42#include "MacroAssembler.h"
43#include "PCToCodeOriginMap.h"
44
45namespace JSC {
46
47class AbstractSamplingCounter;
48class CodeBlock;
49class VM;
50
51namespace DFG {
52
53class JITCodeGenerator;
54class NodeToRegisterMap;
55class OSRExitJumpPlaceholder;
56class SlowPathGenerator;
57class SpeculativeJIT;
58class SpeculationRecovery;
59
60struct EntryLocation;
61struct OSRExit;
62
63// === CallLinkRecord ===
64//
65// A record of a call out from JIT code that needs linking to a helper function.
66// Every CallLinkRecord contains a reference to the call instruction & the function
67// that it needs to be linked to.
68struct CallLinkRecord {
69 CallLinkRecord(MacroAssembler::Call call, FunctionPtr<OperationPtrTag> function)
70 : m_call(call)
71 , m_function(function)
72 {
73 }
74
75 MacroAssembler::Call m_call;
76 FunctionPtr<OperationPtrTag> m_function;
77};
78
79// === JITCompiler ===
80//
81// DFG::JITCompiler is responsible for generating JIT code from the dataflow graph.
82// It does so by delegating to the speculative & non-speculative JITs, which
83// generate to a MacroAssembler (which the JITCompiler owns through an inheritance
84// relationship). The JITCompiler holds references to information required during
85// compilation, and also records information used in linking (e.g. a list of all
86// call to be linked).
87class JITCompiler : public CCallHelpers {
88public:
89 JITCompiler(Graph& dfg);
90 ~JITCompiler();
91
92 void compile();
93 void compileFunction();
94
95 // Accessors for properties.
96 Graph& graph() { return m_graph; }
97
98 // Methods to set labels for the disassembler.
99 void setStartOfCode()
100 {
101 m_pcToCodeOriginMapBuilder.appendItem(labelIgnoringWatchpoints(), CodeOrigin(0, nullptr));
102 if (LIKELY(!m_disassembler))
103 return;
104 m_disassembler->setStartOfCode(labelIgnoringWatchpoints());
105 }
106
107 void setForBlockIndex(BlockIndex blockIndex)
108 {
109 if (LIKELY(!m_disassembler))
110 return;
111 m_disassembler->setForBlockIndex(blockIndex, labelIgnoringWatchpoints());
112 }
113
114 void setForNode(Node* node)
115 {
116 if (LIKELY(!m_disassembler))
117 return;
118 m_disassembler->setForNode(node, labelIgnoringWatchpoints());
119 }
120
121 void setEndOfMainPath();
122 void setEndOfCode();
123
124 CallSiteIndex addCallSite(CodeOrigin codeOrigin)
125 {
126 return m_jitCode->common.addCodeOrigin(codeOrigin);
127 }
128
129 CallSiteIndex emitStoreCodeOrigin(CodeOrigin codeOrigin)
130 {
131 CallSiteIndex callSite = addCallSite(codeOrigin);
132 emitStoreCallSiteIndex(callSite);
133 return callSite;
134 }
135
136 void emitStoreCallSiteIndex(CallSiteIndex callSite)
137 {
138 store32(TrustedImm32(callSite.bits()), tagFor(static_cast<VirtualRegister>(CallFrameSlot::argumentCount)));
139 }
140
141 // Add a call out from JIT code, without an exception check.
142 Call appendCall(const FunctionPtr<CFunctionPtrTag> function)
143 {
144 Call functionCall = call(OperationPtrTag);
145 m_calls.append(CallLinkRecord(functionCall, function.retagged<OperationPtrTag>()));
146 return functionCall;
147 }
148
149 void exceptionCheck();
150
151 void exceptionCheckWithCallFrameRollback()
152 {
153 m_exceptionChecksWithCallFrameRollback.append(emitExceptionCheck(*vm()));
154 }
155
156 // Add a call out from JIT code, with a fast exception check that tests if the return value is zero.
157 void fastExceptionCheck()
158 {
159 callExceptionFuzz(*vm());
160 m_exceptionChecks.append(branchTestPtr(Zero, GPRInfo::returnValueGPR));
161 }
162
163 OSRExitCompilationInfo& appendExitInfo(MacroAssembler::JumpList jumpsToFail = MacroAssembler::JumpList())
164 {
165 OSRExitCompilationInfo info;
166 info.m_failureJumps = jumpsToFail;
167 m_exitCompilationInfo.append(info);
168 return m_exitCompilationInfo.last();
169 }
170
171#if USE(JSVALUE32_64)
172 void* addressOfDoubleConstant(Node*);
173#endif
174
175 void addGetById(const JITGetByIdGenerator& gen, SlowPathGenerator* slowPath)
176 {
177 m_getByIds.append(InlineCacheWrapper<JITGetByIdGenerator>(gen, slowPath));
178 }
179
180 void addGetByIdWithThis(const JITGetByIdWithThisGenerator& gen, SlowPathGenerator* slowPath)
181 {
182 m_getByIdsWithThis.append(InlineCacheWrapper<JITGetByIdWithThisGenerator>(gen, slowPath));
183 }
184
185 void addPutById(const JITPutByIdGenerator& gen, SlowPathGenerator* slowPath)
186 {
187 m_putByIds.append(InlineCacheWrapper<JITPutByIdGenerator>(gen, slowPath));
188 }
189
190 void addInstanceOf(const JITInstanceOfGenerator& gen, SlowPathGenerator* slowPath)
191 {
192 m_instanceOfs.append(InlineCacheWrapper<JITInstanceOfGenerator>(gen, slowPath));
193 }
194
195 void addInById(const JITInByIdGenerator& gen, SlowPathGenerator* slowPath)
196 {
197 m_inByIds.append(InlineCacheWrapper<JITInByIdGenerator>(gen, slowPath));
198 }
199
200 void addJSCall(Call fastCall, Call slowCall, DataLabelPtr targetToCheck, CallLinkInfo* info)
201 {
202 m_jsCalls.append(JSCallRecord(fastCall, slowCall, targetToCheck, info));
203 }
204
205 void addJSDirectCall(Call call, Label slowPath, CallLinkInfo* info)
206 {
207 m_jsDirectCalls.append(JSDirectCallRecord(call, slowPath, info));
208 }
209
210 void addJSDirectTailCall(PatchableJump patchableJump, Call call, Label slowPath, CallLinkInfo* info)
211 {
212 m_jsDirectTailCalls.append(JSDirectTailCallRecord(patchableJump, call, slowPath, info));
213 }
214
215 void addWeakReference(JSCell* target)
216 {
217 m_graph.m_plan.weakReferences().addLazily(target);
218 }
219
220 void addWeakReferences(const StructureSet& structureSet)
221 {
222 for (unsigned i = structureSet.size(); i--;)
223 addWeakReference(structureSet[i]);
224 }
225
226 template<typename T>
227 Jump branchWeakPtr(RelationalCondition cond, T left, JSCell* weakPtr)
228 {
229 Jump result = branchPtr(cond, left, TrustedImmPtr(weakPtr));
230 addWeakReference(weakPtr);
231 return result;
232 }
233
234 template<typename T>
235 Jump branchWeakStructure(RelationalCondition cond, T left, RegisteredStructure weakStructure)
236 {
237 Structure* structure = weakStructure.get();
238#if USE(JSVALUE64)
239 Jump result = branch32(cond, left, TrustedImm32(structure->id()));
240 return result;
241#else
242 return branchPtr(cond, left, TrustedImmPtr(structure));
243#endif
244 }
245
246 void noticeOSREntry(BasicBlock&, JITCompiler::Label blockHead, LinkBuffer&);
247 void noticeCatchEntrypoint(BasicBlock&, JITCompiler::Label blockHead, LinkBuffer&, Vector<FlushFormat>&& argumentFormats);
248
249 RefPtr<JITCode> jitCode() { return m_jitCode; }
250
251 Vector<Label>& blockHeads() { return m_blockHeads; }
252
253 CallSiteIndex recordCallSiteAndGenerateExceptionHandlingOSRExitIfNeeded(const CodeOrigin&, unsigned eventStreamIndex);
254
255 PCToCodeOriginMapBuilder& pcToCodeOriginMapBuilder() { return m_pcToCodeOriginMapBuilder; }
256
257 VM* vm() { return &m_graph.m_vm; }
258
259private:
260 friend class OSRExitJumpPlaceholder;
261
262 // Internal implementation to compile.
263 void compileEntry();
264 void compileSetupRegistersForEntry();
265 void compileEntryExecutionFlag();
266 void compileBody();
267 void link(LinkBuffer&);
268
269 void exitSpeculativeWithOSR(const OSRExit&, SpeculationRecovery*);
270 void compileExceptionHandlers();
271 void linkOSRExits();
272 void disassemble(LinkBuffer&);
273
274 void appendExceptionHandlingOSRExit(ExitKind, unsigned eventStreamIndex, CodeOrigin, HandlerInfo* exceptionHandler, CallSiteIndex, MacroAssembler::JumpList jumpsToFail = MacroAssembler::JumpList());
275
276 void makeCatchOSREntryBuffer();
277
278 // The dataflow graph currently being generated.
279 Graph& m_graph;
280
281 std::unique_ptr<Disassembler> m_disassembler;
282
283 RefPtr<JITCode> m_jitCode;
284
285 // Vector of calls out from JIT code, including exception handler information.
286 // Count of the number of CallRecords with exception handlers.
287 Vector<CallLinkRecord> m_calls;
288 JumpList m_exceptionChecks;
289 JumpList m_exceptionChecksWithCallFrameRollback;
290
291 Vector<Label> m_blockHeads;
292
293
294 struct JSCallRecord {
295 JSCallRecord(Call fastCall, Call slowCall, DataLabelPtr targetToCheck, CallLinkInfo* info)
296 : fastCall(fastCall)
297 , slowCall(slowCall)
298 , targetToCheck(targetToCheck)
299 , info(info)
300 {
301 ASSERT(fastCall.isFlagSet(Call::Near));
302 ASSERT(slowCall.isFlagSet(Call::Near));
303 }
304
305 Call fastCall;
306 Call slowCall;
307 DataLabelPtr targetToCheck;
308 CallLinkInfo* info;
309 };
310
311 struct JSDirectCallRecord {
312 JSDirectCallRecord(Call call, Label slowPath, CallLinkInfo* info)
313 : call(call)
314 , slowPath(slowPath)
315 , info(info)
316 {
317 ASSERT(call.isFlagSet(Call::Near));
318 }
319
320 Call call;
321 Label slowPath;
322 CallLinkInfo* info;
323 };
324
325 struct JSDirectTailCallRecord {
326 JSDirectTailCallRecord(PatchableJump patchableJump, Call call, Label slowPath, CallLinkInfo* info)
327 : patchableJump(patchableJump)
328 , call(call)
329 , slowPath(slowPath)
330 , info(info)
331 {
332 ASSERT(call.isFlagSet(Call::Near) && call.isFlagSet(Call::Tail));
333 }
334
335 PatchableJump patchableJump;
336 Call call;
337 Label slowPath;
338 CallLinkInfo* info;
339 };
340
341
342 Vector<InlineCacheWrapper<JITGetByIdGenerator>, 4> m_getByIds;
343 Vector<InlineCacheWrapper<JITGetByIdWithThisGenerator>, 4> m_getByIdsWithThis;
344 Vector<InlineCacheWrapper<JITPutByIdGenerator>, 4> m_putByIds;
345 Vector<InlineCacheWrapper<JITInByIdGenerator>, 4> m_inByIds;
346 Vector<InlineCacheWrapper<JITInstanceOfGenerator>, 4> m_instanceOfs;
347 Vector<JSCallRecord, 4> m_jsCalls;
348 Vector<JSDirectCallRecord, 4> m_jsDirectCalls;
349 Vector<JSDirectTailCallRecord, 4> m_jsDirectTailCalls;
350 SegmentedVector<OSRExitCompilationInfo, 4> m_exitCompilationInfo;
351 Vector<Vector<Label>> m_exitSiteLabels;
352
353 struct ExceptionHandlingOSRExitInfo {
354 OSRExitCompilationInfo& exitInfo;
355 HandlerInfo baselineExceptionHandler;
356 CallSiteIndex callSiteIndex;
357 };
358 Vector<ExceptionHandlingOSRExitInfo> m_exceptionHandlerOSRExitCallSites;
359
360 std::unique_ptr<SpeculativeJIT> m_speculative;
361 PCToCodeOriginMapBuilder m_pcToCodeOriginMapBuilder;
362};
363
364} } // namespace JSC::DFG
365
366#endif
367