1/*
2 * Copyright (C) 2015-2018 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "PolymorphicCallStubRoutine.h"
28
29#if ENABLE(JIT)
30
31#include "CallLinkInfo.h"
32#include "CodeBlock.h"
33#include "FullCodeOrigin.h"
34#include "JSCInlines.h"
35#include "LinkBuffer.h"
36
37namespace JSC {
38
39PolymorphicCallNode::~PolymorphicCallNode()
40{
41 if (isOnList())
42 remove();
43}
44
45void PolymorphicCallNode::unlink(VM& vm)
46{
47 if (m_callLinkInfo) {
48 if (Options::dumpDisassembly())
49 dataLog("Unlinking polymorphic call at ", m_callLinkInfo->callReturnLocation(), ", ", m_callLinkInfo->codeOrigin(), "\n");
50
51 m_callLinkInfo->unlink(vm);
52 }
53
54 if (isOnList())
55 remove();
56}
57
58void PolymorphicCallNode::clearCallLinkInfo()
59{
60 m_callLinkInfo = nullptr;
61}
62
63void PolymorphicCallCase::dump(PrintStream& out) const
64{
65 out.print("<variant = ", m_variant, ", codeBlock = ", pointerDump(m_codeBlock), ">");
66}
67
68PolymorphicCallStubRoutine::PolymorphicCallStubRoutine(
69 const MacroAssemblerCodeRef<JITStubRoutinePtrTag>& codeRef, VM& vm, const JSCell* owner, CallFrame* callerFrame,
70 CallLinkInfo& info, const Vector<PolymorphicCallCase>& cases,
71 UniqueArray<uint32_t>&& fastCounts)
72 : GCAwareJITStubRoutine(codeRef, vm)
73 , m_fastCounts(WTFMove(fastCounts))
74{
75 for (PolymorphicCallCase callCase : cases) {
76 m_variants.append(WriteBarrier<JSCell>(vm, owner, callCase.variant().rawCalleeCell()));
77 if (shouldDumpDisassemblyFor(callerFrame->codeBlock()))
78 dataLog("Linking polymorphic call in ", FullCodeOrigin(callerFrame->codeBlock(), callerFrame->codeOrigin()), " to ", callCase.variant(), ", codeBlock = ", pointerDump(callCase.codeBlock()), "\n");
79 if (CodeBlock* codeBlock = callCase.codeBlock())
80 codeBlock->linkIncomingPolymorphicCall(callerFrame, m_callNodes.add(&info));
81 }
82 m_variants.shrinkToFit();
83 WTF::storeStoreFence();
84}
85
86PolymorphicCallStubRoutine::~PolymorphicCallStubRoutine() { }
87
88CallVariantList PolymorphicCallStubRoutine::variants() const
89{
90 CallVariantList result;
91 for (size_t i = 0; i < m_variants.size(); ++i)
92 result.append(CallVariant(m_variants[i].get()));
93 return result;
94}
95
96bool PolymorphicCallStubRoutine::hasEdges() const
97{
98 // The FTL does not count edges in its poly call stub routines. If the FTL went poly call, then
99 // it's not meaningful to keep profiling - we can just leave it at that. Remember, the FTL would
100 // have had full edge profiling from the DFG, and based on this information, it would have
101 // decided to go poly.
102 //
103 // There probably are very-difficult-to-imagine corner cases where the FTL not doing edge
104 // profiling is bad for polyvariant inlining. But polyvariant inlining is profitable sometimes
105 // while not having to increment counts is profitable always. So, we let the FTL run faster and
106 // not keep counts.
107 return !!m_fastCounts;
108}
109
110CallEdgeList PolymorphicCallStubRoutine::edges() const
111{
112 RELEASE_ASSERT(m_fastCounts);
113
114 CallEdgeList result;
115 for (size_t i = 0; i < m_variants.size(); ++i)
116 result.append(CallEdge(CallVariant(m_variants[i].get()), m_fastCounts[i]));
117 return result;
118}
119
120void PolymorphicCallStubRoutine::clearCallNodesFor(CallLinkInfo* info)
121{
122 for (Bag<PolymorphicCallNode>::iterator iter = m_callNodes.begin(); !!iter; ++iter) {
123 PolymorphicCallNode& node = **iter;
124 // All nodes should point to info, but okay to be a little paranoid.
125 if (node.hasCallLinkInfo(info))
126 node.clearCallLinkInfo();
127 }
128}
129
130bool PolymorphicCallStubRoutine::visitWeak(VM& vm)
131{
132 bool isStillLive = true;
133 forEachDependentCell([&](JSCell* cell) {
134 isStillLive &= vm.heap.isMarked(cell);
135 });
136 return isStillLive;
137}
138
139void PolymorphicCallStubRoutine::markRequiredObjectsInternal(SlotVisitor& visitor)
140{
141 for (auto& variant : m_variants)
142 visitor.append(variant);
143}
144
145} // namespace JSC
146
147#endif // ENABLE(JIT)
148