1/*
2 * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "DFGStrengthReductionPhase.h"
28
29#if ENABLE(DFG_JIT)
30
31#include "DFGAbstractHeap.h"
32#include "DFGClobberize.h"
33#include "DFGGraph.h"
34#include "DFGInsertionSet.h"
35#include "DFGPhase.h"
36#include "DFGPredictionPropagationPhase.h"
37#include "DFGVariableAccessDataDump.h"
38#include "JSCInlines.h"
39#include "MathCommon.h"
40#include "RegExpObject.h"
41#include "StringPrototype.h"
42#include <cstdlib>
43#include <wtf/text/StringBuilder.h>
44
45namespace JSC { namespace DFG {
46
47class StrengthReductionPhase : public Phase {
48 static const bool verbose = false;
49
50public:
51 StrengthReductionPhase(Graph& graph)
52 : Phase(graph, "strength reduction")
53 , m_insertionSet(graph)
54 {
55 }
56
57 bool run()
58 {
59 ASSERT(m_graph.m_fixpointState == FixpointNotConverged);
60
61 m_changed = false;
62
63 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
64 m_block = m_graph.block(blockIndex);
65 if (!m_block)
66 continue;
67 for (m_nodeIndex = 0; m_nodeIndex < m_block->size(); ++m_nodeIndex) {
68 m_node = m_block->at(m_nodeIndex);
69 handleNode();
70 }
71 m_insertionSet.execute(m_block);
72 }
73
74 return m_changed;
75 }
76
77private:
78 void handleNode()
79 {
80 switch (m_node->op()) {
81 case ArithBitOr:
82 handleCommutativity();
83
84 if (m_node->child1().useKind() != UntypedUse && m_node->child2()->isInt32Constant() && !m_node->child2()->asInt32()) {
85 convertToIdentityOverChild1();
86 break;
87 }
88 break;
89
90 case ArithBitXor:
91 case ArithBitAnd:
92 handleCommutativity();
93 break;
94
95 case BitLShift:
96 case BitRShift:
97 case BitURShift:
98 if (m_node->child1().useKind() != UntypedUse && m_node->child2()->isInt32Constant() && !(m_node->child2()->asInt32() & 0x1f)) {
99 convertToIdentityOverChild1();
100 break;
101 }
102 break;
103
104 case UInt32ToNumber:
105 if (m_node->child1()->op() == BitURShift
106 && m_node->child1()->child2()->isInt32Constant()
107 && (m_node->child1()->child2()->asInt32() & 0x1f)
108 && m_node->arithMode() != Arith::DoOverflow) {
109 m_node->convertToIdentity();
110 m_changed = true;
111 break;
112 }
113 break;
114
115 case ArithAdd:
116 handleCommutativity();
117
118 if (m_node->child2()->isInt32Constant() && !m_node->child2()->asInt32()) {
119 convertToIdentityOverChild1();
120 break;
121 }
122 break;
123
124 case ValueMul:
125 case ValueBitOr:
126 case ValueBitAnd:
127 case ValueBitXor: {
128 if (m_node->binaryUseKind() == BigIntUse)
129 handleCommutativity();
130 break;
131 }
132
133 case ArithMul: {
134 handleCommutativity();
135 Edge& child2 = m_node->child2();
136 if (child2->isNumberConstant() && child2->asNumber() == 2) {
137 switch (m_node->binaryUseKind()) {
138 case DoubleRepUse:
139 // It is always valuable to get rid of a double multiplication by 2.
140 // We won't have half-register dependencies issues on x86 and we won't have to load the constants.
141 m_node->setOp(ArithAdd);
142 child2.setNode(m_node->child1().node());
143 m_changed = true;
144 break;
145#if USE(JSVALUE64)
146 case Int52RepUse:
147#endif
148 case Int32Use:
149 // For integers, we can only convert compatible modes.
150 // ArithAdd does handle do negative zero check for example.
151 if (m_node->arithMode() == Arith::CheckOverflow || m_node->arithMode() == Arith::Unchecked) {
152 m_node->setOp(ArithAdd);
153 child2.setNode(m_node->child1().node());
154 m_changed = true;
155 }
156 break;
157 default:
158 break;
159 }
160 }
161 break;
162 }
163 case ArithSub:
164 if (m_node->child2()->isInt32Constant()
165 && m_node->isBinaryUseKind(Int32Use)) {
166 int32_t value = m_node->child2()->asInt32();
167 if (value != INT32_MIN) {
168 m_node->setOp(ArithAdd);
169 m_node->child2().setNode(
170 m_insertionSet.insertConstant(
171 m_nodeIndex, m_node->origin, jsNumber(-value)));
172 m_changed = true;
173 break;
174 }
175 }
176 break;
177
178 case ArithPow:
179 if (m_node->child2()->isNumberConstant()) {
180 double yOperandValue = m_node->child2()->asNumber();
181 if (yOperandValue == 1) {
182 convertToIdentityOverChild1();
183 m_changed = true;
184 } else if (yOperandValue == 2) {
185 m_node->setOp(ArithMul);
186 m_node->child2() = m_node->child1();
187 m_changed = true;
188 }
189 }
190 break;
191
192 case ArithMod:
193 // On Integers
194 // In: ArithMod(ArithMod(x, const1), const2)
195 // Out: Identity(ArithMod(x, const1))
196 // if const1 <= const2.
197 if (m_node->binaryUseKind() == Int32Use
198 && m_node->child2()->isInt32Constant()
199 && m_node->child1()->op() == ArithMod
200 && m_node->child1()->binaryUseKind() == Int32Use
201 && m_node->child1()->child2()->isInt32Constant()
202 && std::abs(m_node->child1()->child2()->asInt32()) <= std::abs(m_node->child2()->asInt32())) {
203 convertToIdentityOverChild1();
204 }
205 break;
206
207 case ArithDiv:
208 // Transform
209 // ArithDiv(x, constant)
210 // Into
211 // ArithMul(x, 1 / constant)
212 // if the operation has the same result.
213 if (m_node->isBinaryUseKind(DoubleRepUse)
214 && m_node->child2()->isNumberConstant()) {
215
216 if (Optional<double> reciprocal = safeReciprocalForDivByConst(m_node->child2()->asNumber())) {
217 Node* reciprocalNode = m_insertionSet.insertConstant(m_nodeIndex, m_node->origin, jsDoubleNumber(*reciprocal), DoubleConstant);
218 m_node->setOp(ArithMul);
219 m_node->child2() = Edge(reciprocalNode, DoubleRepUse);
220 m_changed = true;
221 break;
222 }
223 }
224 break;
225
226 case ValueRep:
227 case Int52Rep: {
228 // This short-circuits circuitous conversions, like ValueRep(Int52Rep(value)).
229
230 // The only speculation that we would do beyond validating that we have a type that
231 // can be represented a certain way is an Int32 check that would appear on Int52Rep
232 // nodes. For now, if we see this and the final type we want is an Int52, we use it
233 // as an excuse not to fold. The only thing we would need is a Int52RepInt32Use kind.
234 bool hadInt32Check = false;
235 if (m_node->op() == Int52Rep) {
236 if (m_node->child1().useKind() != Int32Use)
237 break;
238 hadInt32Check = true;
239 }
240 for (Node* node = m_node->child1().node(); ; node = node->child1().node()) {
241 if (canonicalResultRepresentation(node->result()) ==
242 canonicalResultRepresentation(m_node->result())) {
243 m_insertionSet.insertCheck(m_graph, m_nodeIndex, m_node);
244 if (hadInt32Check) {
245 // FIXME: Consider adding Int52RepInt32Use or even DoubleRepInt32Use,
246 // which would be super weird. The latter would only arise in some
247 // seriously circuitous conversions.
248 if (canonicalResultRepresentation(node->result()) != NodeResultJS)
249 break;
250
251 m_insertionSet.insertCheck(
252 m_nodeIndex, m_node->origin, Edge(node, Int32Use));
253 }
254 m_node->child1() = node->defaultEdge();
255 m_node->convertToIdentity();
256 m_changed = true;
257 break;
258 }
259
260 switch (node->op()) {
261 case Int52Rep:
262 if (node->child1().useKind() != Int32Use)
263 break;
264 hadInt32Check = true;
265 continue;
266
267 case ValueRep:
268 continue;
269
270 default:
271 break;
272 }
273 break;
274 }
275 break;
276 }
277
278 case Flush: {
279 ASSERT(m_graph.m_form != SSA);
280
281 if (m_graph.willCatchExceptionInMachineFrame(m_node->origin.semantic)) {
282 // FIXME: We should be able to relax this:
283 // https://bugs.webkit.org/show_bug.cgi?id=150824
284 break;
285 }
286
287 Node* setLocal = nullptr;
288 VirtualRegister local = m_node->local();
289
290 for (unsigned i = m_nodeIndex; i--;) {
291 Node* node = m_block->at(i);
292
293 if (node->op() == SetLocal && node->local() == local) {
294 setLocal = node;
295 break;
296 }
297
298 if (accessesOverlap(m_graph, node, AbstractHeap(Stack, local)))
299 break;
300
301 }
302
303 if (!setLocal)
304 break;
305
306 // The Flush should become a PhantomLocal at this point. This means that we want the
307 // local's value during OSR, but we don't care if the value is stored to the stack. CPS
308 // rethreading can canonicalize PhantomLocals for us.
309 m_node->convertFlushToPhantomLocal();
310 m_graph.dethread();
311 m_changed = true;
312 break;
313 }
314
315 // FIXME: we should probably do this in constant folding but this currently relies on OSR exit history:
316 // https://bugs.webkit.org/show_bug.cgi?id=154832
317 case OverridesHasInstance: {
318 if (!m_node->child2().node()->isCellConstant())
319 break;
320
321 if (m_node->child2().node()->asCell() != m_graph.globalObjectFor(m_node->origin.semantic)->functionProtoHasInstanceSymbolFunction()) {
322 m_graph.convertToConstant(m_node, jsBoolean(true));
323 m_changed = true;
324
325 } else if (!m_graph.hasExitSite(m_node->origin.semantic, BadTypeInfoFlags)) {
326 // We optimistically assume that we will not see a function that has a custom instanceof operation as they should be rare.
327 m_insertionSet.insertNode(m_nodeIndex, SpecNone, CheckTypeInfoFlags, m_node->origin, OpInfo(ImplementsDefaultHasInstance), Edge(m_node->child1().node(), CellUse));
328 m_graph.convertToConstant(m_node, jsBoolean(false));
329 m_changed = true;
330 }
331
332 break;
333 }
334
335 // FIXME: We have a lot of string constant-folding rules here. It would be great to
336 // move these to the abstract interpreter once AbstractValue can support LazyJSValue.
337 // https://bugs.webkit.org/show_bug.cgi?id=155204
338
339 case ValueAdd: {
340 if (m_node->child1()->isConstant()
341 && m_node->child2()->isConstant()
342 && (!!m_node->child1()->tryGetString(m_graph) || !!m_node->child2()->tryGetString(m_graph))) {
343 auto tryGetConstantString = [&] (Node* node) -> String {
344 String string = node->tryGetString(m_graph);
345 if (!string.isEmpty())
346 return string;
347 JSValue value = node->constant()->value();
348 if (!value)
349 return String();
350 if (value.isInt32())
351 return String::number(value.asInt32());
352 if (value.isNumber())
353 return String::number(value.asNumber());
354 if (value.isBoolean())
355 return value.asBoolean() ? "true"_s : "false"_s;
356 if (value.isNull())
357 return "null"_s;
358 if (value.isUndefined())
359 return "undefined"_s;
360 return String();
361 };
362
363 String leftString = tryGetConstantString(m_node->child1().node());
364 if (!leftString)
365 break;
366 String rightString = tryGetConstantString(m_node->child2().node());
367 if (!rightString)
368 break;
369
370 StringBuilder builder;
371 builder.append(leftString);
372 builder.append(rightString);
373 convertToLazyJSValue(m_node, LazyJSValue::newString(m_graph, builder.toString()));
374 m_changed = true;
375 break;
376 }
377
378 if (m_node->binaryUseKind() == BigIntUse)
379 handleCommutativity();
380
381 break;
382 }
383
384 case MakeRope:
385 case StrCat: {
386 String leftString = m_node->child1()->tryGetString(m_graph);
387 if (!leftString)
388 break;
389 String rightString = m_node->child2()->tryGetString(m_graph);
390 if (!rightString)
391 break;
392 String extraString;
393 if (m_node->child3()) {
394 extraString = m_node->child3()->tryGetString(m_graph);
395 if (!extraString)
396 break;
397 }
398
399 StringBuilder builder;
400 builder.append(leftString);
401 builder.append(rightString);
402 if (!!extraString)
403 builder.append(extraString);
404
405 convertToLazyJSValue(m_node, LazyJSValue::newString(m_graph, builder.toString()));
406 m_changed = true;
407 break;
408 }
409
410 case ToString:
411 case CallStringConstructor: {
412 Edge& child1 = m_node->child1();
413 switch (child1.useKind()) {
414 case Int32Use:
415 case Int52RepUse:
416 case DoubleRepUse: {
417 if (child1->hasConstant()) {
418 JSValue value = child1->constant()->value();
419 if (value) {
420 String result;
421 if (value.isInt32())
422 result = String::number(value.asInt32());
423 else if (value.isNumber())
424 result = String::number(value.asNumber());
425
426 if (!result.isNull()) {
427 convertToLazyJSValue(m_node, LazyJSValue::newString(m_graph, result));
428 m_changed = true;
429 }
430 }
431 }
432 break;
433 }
434
435 default:
436 break;
437 }
438 break;
439 }
440
441 case NumberToStringWithValidRadixConstant: {
442 Edge& child1 = m_node->child1();
443 if (child1->hasConstant()) {
444 JSValue value = child1->constant()->value();
445 if (value && value.isNumber()) {
446 String result = toStringWithRadix(value.asNumber(), m_node->validRadixConstant());
447 convertToLazyJSValue(m_node, LazyJSValue::newString(m_graph, result));
448 m_changed = true;
449 }
450 }
451 break;
452 }
453
454 case GetArrayLength: {
455 if (m_node->arrayMode().type() == Array::Generic
456 || m_node->arrayMode().type() == Array::String) {
457 String string = m_node->child1()->tryGetString(m_graph);
458 if (!!string) {
459 m_graph.convertToConstant(m_node, jsNumber(string.length()));
460 m_changed = true;
461 break;
462 }
463 }
464 break;
465 }
466
467 case GetGlobalObject: {
468 if (JSObject* object = m_node->child1()->dynamicCastConstant<JSObject*>(vm())) {
469 m_graph.convertToConstant(m_node, object->globalObject(vm()));
470 m_changed = true;
471 break;
472 }
473 break;
474 }
475
476 case RegExpExec:
477 case RegExpTest:
478 case RegExpMatchFast:
479 case RegExpExecNonGlobalOrSticky: {
480 JSGlobalObject* globalObject = m_node->child1()->dynamicCastConstant<JSGlobalObject*>(vm());
481 if (!globalObject) {
482 if (verbose)
483 dataLog("Giving up because no global object.\n");
484 break;
485 }
486
487 if (globalObject->isHavingABadTime()) {
488 if (verbose)
489 dataLog("Giving up because bad time.\n");
490 break;
491 }
492
493 Node* regExpObjectNode = nullptr;
494 RegExp* regExp = nullptr;
495 if (m_node->op() == RegExpExec || m_node->op() == RegExpTest || m_node->op() == RegExpMatchFast) {
496 regExpObjectNode = m_node->child2().node();
497 if (RegExpObject* regExpObject = regExpObjectNode->dynamicCastConstant<RegExpObject*>(vm()))
498 regExp = regExpObject->regExp();
499 else if (regExpObjectNode->op() == NewRegexp)
500 regExp = regExpObjectNode->castOperand<RegExp*>();
501 else {
502 if (verbose)
503 dataLog("Giving up because the regexp is unknown.\n");
504 break;
505 }
506 } else
507 regExp = m_node->castOperand<RegExp*>();
508
509 if (m_node->op() == RegExpMatchFast) {
510 if (regExp->global()) {
511 if (regExp->sticky())
512 break;
513 if (m_node->child3().useKind() != StringUse)
514 break;
515 NodeOrigin origin = m_node->origin;
516 m_insertionSet.insertNode(
517 m_nodeIndex, SpecNone, Check, origin, m_node->children.justChecks());
518 m_insertionSet.insertNode(
519 m_nodeIndex, SpecNone, SetRegExpObjectLastIndex, origin,
520 OpInfo(false),
521 Edge(regExpObjectNode, RegExpObjectUse),
522 m_insertionSet.insertConstantForUse(
523 m_nodeIndex, origin, jsNumber(0), UntypedUse));
524 origin = origin.withInvalidExit();
525 m_node->convertToRegExpMatchFastGlobalWithoutChecks(m_graph.freeze(regExp));
526 m_node->origin = origin;
527 m_changed = true;
528 break;
529 }
530
531 m_node->setOp(RegExpExec);
532 m_changed = true;
533 // Continue performing strength reduction onto RegExpExec node.
534 }
535
536 ASSERT(m_node->op() != RegExpMatchFast);
537
538 auto foldToConstant = [&] {
539 Node* stringNode = nullptr;
540 if (m_node->op() == RegExpExecNonGlobalOrSticky)
541 stringNode = m_node->child2().node();
542 else
543 stringNode = m_node->child3().node();
544
545 // NOTE: This mostly already protects us from having the compiler execute a regexp
546 // operation on a ginormous string by preventing us from getting our hands on ginormous
547 // strings in the first place.
548 String string = stringNode->tryGetString(m_graph);
549 if (!string) {
550 if (verbose)
551 dataLog("Giving up because the string is unknown.\n");
552 return false;
553 }
554
555 FrozenValue* regExpFrozenValue = m_graph.freeze(regExp);
556
557 // Refuse to do things with regular expressions that have a ginormous number of
558 // subpatterns.
559 unsigned ginormousNumberOfSubPatterns = 1000;
560 if (regExp->numSubpatterns() > ginormousNumberOfSubPatterns) {
561 if (verbose)
562 dataLog("Giving up because of pattern limit.\n");
563 return false;
564 }
565
566 if ((m_node->op() == RegExpExec || m_node->op() == RegExpExecNonGlobalOrSticky) && regExp->hasNamedCaptures()) {
567 // FIXME: https://bugs.webkit.org/show_bug.cgi?id=176464
568 // Implement strength reduction optimization for named capture groups.
569 if (verbose)
570 dataLog("Giving up because of named capture groups.\n");
571 return false;
572 }
573
574 unsigned lastIndex;
575 if (regExp->globalOrSticky()) {
576 // This will only work if we can prove what the value of lastIndex is. To do this
577 // safely, we need to execute the insertion set so that we see any previous strength
578 // reductions. This is needed for soundness since otherwise the effectfulness of any
579 // previous strength reductions would be invisible to us.
580 ASSERT(regExpObjectNode);
581 executeInsertionSet();
582 lastIndex = UINT_MAX;
583 for (unsigned otherNodeIndex = m_nodeIndex; otherNodeIndex--;) {
584 Node* otherNode = m_block->at(otherNodeIndex);
585 if (otherNode == regExpObjectNode) {
586 lastIndex = 0;
587 break;
588 }
589 if (otherNode->op() == SetRegExpObjectLastIndex
590 && otherNode->child1() == regExpObjectNode
591 && otherNode->child2()->isInt32Constant()
592 && otherNode->child2()->asInt32() >= 0) {
593 lastIndex = static_cast<unsigned>(otherNode->child2()->asInt32());
594 break;
595 }
596 if (writesOverlap(m_graph, otherNode, RegExpObject_lastIndex))
597 break;
598 }
599 if (lastIndex == UINT_MAX) {
600 if (verbose)
601 dataLog("Giving up because the last index is not known.\n");
602 return false;
603 }
604 } else
605 lastIndex = 0;
606
607 m_graph.watchpoints().addLazily(globalObject->havingABadTimeWatchpoint());
608
609 Structure* structure;
610 if ((m_node->op() == RegExpExec || m_node->op() == RegExpExecNonGlobalOrSticky) && regExp->hasNamedCaptures())
611 structure = globalObject->regExpMatchesArrayWithGroupsStructure();
612 else
613 structure = globalObject->regExpMatchesArrayStructure();
614
615 if (structure->indexingType() != ArrayWithContiguous) {
616 // This is further protection against a race with haveABadTime.
617 if (verbose)
618 dataLog("Giving up because the structure has the wrong indexing type.\n");
619 return false;
620 }
621 m_graph.registerStructure(structure);
622
623 FrozenValue* globalObjectFrozenValue = m_graph.freeze(globalObject);
624
625 MatchResult result;
626 Vector<int> ovector;
627 // We have to call the kind of match function that the main thread would have called.
628 // Otherwise, we might not have the desired Yarr code compiled, and the match will fail.
629 if (m_node->op() == RegExpExec || m_node->op() == RegExpExecNonGlobalOrSticky) {
630 int position;
631 if (!regExp->matchConcurrently(vm(), string, lastIndex, position, ovector)) {
632 if (verbose)
633 dataLog("Giving up because match failed.\n");
634 return false;
635 }
636 result.start = position;
637 result.end = ovector[1];
638 } else {
639 if (!regExp->matchConcurrently(vm(), string, lastIndex, result)) {
640 if (verbose)
641 dataLog("Giving up because match failed.\n");
642 return false;
643 }
644 }
645
646 // We've constant-folded the regexp. Now we're committed to replacing RegExpExec/Test.
647
648 m_changed = true;
649
650 NodeOrigin origin = m_node->origin;
651
652 m_insertionSet.insertNode(
653 m_nodeIndex, SpecNone, Check, origin, m_node->children.justChecks());
654
655 if (m_node->op() == RegExpExec || m_node->op() == RegExpExecNonGlobalOrSticky) {
656 if (result) {
657 RegisteredStructureSet* structureSet = m_graph.addStructureSet(structure);
658
659 // Create an array modeling the JS array that we will try to allocate. This is
660 // basically createRegExpMatchesArray but over C++ strings instead of JSStrings.
661 Vector<String> resultArray;
662 resultArray.append(string.substring(result.start, result.end - result.start));
663 for (unsigned i = 1; i <= regExp->numSubpatterns(); ++i) {
664 int start = ovector[2 * i];
665 if (start >= 0)
666 resultArray.append(string.substring(start, ovector[2 * i + 1] - start));
667 else
668 resultArray.append(String());
669 }
670
671 unsigned publicLength = resultArray.size();
672 unsigned vectorLength =
673 Butterfly::optimalContiguousVectorLength(structure, publicLength);
674
675 UniquedStringImpl* indexUID = vm().propertyNames->index.impl();
676 UniquedStringImpl* inputUID = vm().propertyNames->input.impl();
677 unsigned indexIndex = m_graph.identifiers().ensure(indexUID);
678 unsigned inputIndex = m_graph.identifiers().ensure(inputUID);
679
680 unsigned firstChild = m_graph.m_varArgChildren.size();
681 m_graph.m_varArgChildren.append(
682 m_insertionSet.insertConstantForUse(
683 m_nodeIndex, origin, structure, KnownCellUse));
684 ObjectMaterializationData* data = m_graph.m_objectMaterializationData.add();
685
686 m_graph.m_varArgChildren.append(
687 m_insertionSet.insertConstantForUse(
688 m_nodeIndex, origin, jsNumber(publicLength), KnownInt32Use));
689 data->m_properties.append(PublicLengthPLoc);
690
691 m_graph.m_varArgChildren.append(
692 m_insertionSet.insertConstantForUse(
693 m_nodeIndex, origin, jsNumber(vectorLength), KnownInt32Use));
694 data->m_properties.append(VectorLengthPLoc);
695
696 m_graph.m_varArgChildren.append(
697 m_insertionSet.insertConstantForUse(
698 m_nodeIndex, origin, jsNumber(result.start), UntypedUse));
699 data->m_properties.append(
700 PromotedLocationDescriptor(NamedPropertyPLoc, indexIndex));
701
702 m_graph.m_varArgChildren.append(Edge(stringNode, UntypedUse));
703 data->m_properties.append(
704 PromotedLocationDescriptor(NamedPropertyPLoc, inputIndex));
705
706 auto materializeString = [&] (const String& string) -> Node* {
707 if (string.isNull())
708 return nullptr;
709 if (string.isEmpty()) {
710 return m_insertionSet.insertConstant(
711 m_nodeIndex, origin, vm().smallStrings.emptyString());
712 }
713 LazyJSValue value = LazyJSValue::newString(m_graph, string);
714 return m_insertionSet.insertNode(
715 m_nodeIndex, SpecNone, LazyJSConstant, origin,
716 OpInfo(m_graph.m_lazyJSValues.add(value)));
717 };
718
719 for (unsigned i = 0; i < resultArray.size(); ++i) {
720 if (Node* node = materializeString(resultArray[i])) {
721 m_graph.m_varArgChildren.append(Edge(node, UntypedUse));
722 data->m_properties.append(
723 PromotedLocationDescriptor(IndexedPropertyPLoc, i));
724 }
725 }
726
727 Node* resultNode = m_insertionSet.insertNode(
728 m_nodeIndex, SpecArray, Node::VarArg, MaterializeNewObject, origin,
729 OpInfo(structureSet), OpInfo(data), firstChild,
730 m_graph.m_varArgChildren.size() - firstChild);
731
732 m_node->convertToIdentityOn(resultNode);
733 } else
734 m_graph.convertToConstant(m_node, jsNull());
735 } else
736 m_graph.convertToConstant(m_node, jsBoolean(!!result));
737
738 // Whether it's Exec or Test, we need to tell the globalObject and RegExpObject what's up.
739 // Because SetRegExpObjectLastIndex may exit and it clobbers exit state, we do that
740 // first.
741
742 if (regExp->globalOrSticky()) {
743 ASSERT(regExpObjectNode);
744 m_insertionSet.insertNode(
745 m_nodeIndex, SpecNone, SetRegExpObjectLastIndex, origin,
746 OpInfo(false),
747 Edge(regExpObjectNode, RegExpObjectUse),
748 m_insertionSet.insertConstantForUse(
749 m_nodeIndex, origin, jsNumber(result ? result.end : 0), UntypedUse));
750
751 origin = origin.withInvalidExit();
752 }
753
754 if (result) {
755 unsigned firstChild = m_graph.m_varArgChildren.size();
756 m_graph.m_varArgChildren.append(
757 m_insertionSet.insertConstantForUse(
758 m_nodeIndex, origin, globalObjectFrozenValue, KnownCellUse));
759 m_graph.m_varArgChildren.append(
760 m_insertionSet.insertConstantForUse(
761 m_nodeIndex, origin, regExpFrozenValue, KnownCellUse));
762 m_graph.m_varArgChildren.append(Edge(stringNode, KnownCellUse));
763 m_graph.m_varArgChildren.append(
764 m_insertionSet.insertConstantForUse(
765 m_nodeIndex, origin, jsNumber(result.start), KnownInt32Use));
766 m_graph.m_varArgChildren.append(
767 m_insertionSet.insertConstantForUse(
768 m_nodeIndex, origin, jsNumber(result.end), KnownInt32Use));
769 m_insertionSet.insertNode(
770 m_nodeIndex, SpecNone, Node::VarArg, RecordRegExpCachedResult, origin,
771 OpInfo(), OpInfo(), firstChild, m_graph.m_varArgChildren.size() - firstChild);
772
773 origin = origin.withInvalidExit();
774 }
775
776 m_node->origin = origin;
777 return true;
778 };
779
780 auto convertToStatic = [&] {
781 if (m_node->op() != RegExpExec)
782 return false;
783 if (regExp->globalOrSticky())
784 return false;
785 if (m_node->child3().useKind() != StringUse)
786 return false;
787 NodeOrigin origin = m_node->origin;
788 m_insertionSet.insertNode(
789 m_nodeIndex, SpecNone, Check, origin, m_node->children.justChecks());
790 m_node->convertToRegExpExecNonGlobalOrStickyWithoutChecks(m_graph.freeze(regExp));
791 m_changed = true;
792 return true;
793 };
794
795 if (foldToConstant())
796 break;
797
798 if (convertToStatic())
799 break;
800
801 break;
802 }
803
804 case StringReplace:
805 case StringReplaceRegExp: {
806 Node* stringNode = m_node->child1().node();
807 String string = stringNode->tryGetString(m_graph);
808 if (!string)
809 break;
810
811 Node* regExpObjectNode = m_node->child2().node();
812 RegExp* regExp;
813 if (RegExpObject* regExpObject = regExpObjectNode->dynamicCastConstant<RegExpObject*>(vm()))
814 regExp = regExpObject->regExp();
815 else if (regExpObjectNode->op() == NewRegexp)
816 regExp = regExpObjectNode->castOperand<RegExp*>();
817 else {
818 if (verbose)
819 dataLog("Giving up because the regexp is unknown.\n");
820 break;
821 }
822
823 String replace = m_node->child3()->tryGetString(m_graph);
824 if (!replace)
825 break;
826
827 StringBuilder builder;
828
829 unsigned lastIndex = 0;
830 unsigned startPosition = 0;
831 bool ok = true;
832 do {
833 MatchResult result;
834 Vector<int> ovector;
835 // Model which version of match() is called by the main thread.
836 if (replace.isEmpty() && regExp->global()) {
837 if (!regExp->matchConcurrently(vm(), string, startPosition, result)) {
838 ok = false;
839 break;
840 }
841 } else {
842 int position;
843 if (!regExp->matchConcurrently(vm(), string, startPosition, position, ovector)) {
844 ok = false;
845 break;
846 }
847
848 result.start = position;
849 result.end = ovector[1];
850 }
851
852 if (!result)
853 break;
854
855 unsigned replLen = replace.length();
856 if (lastIndex < result.start || replLen) {
857 builder.append(string, lastIndex, result.start - lastIndex);
858 if (replLen) {
859 StringBuilder replacement;
860 substituteBackreferences(replacement, replace, string, ovector.data(), regExp);
861 builder.append(replacement);
862 }
863 }
864
865 lastIndex = result.end;
866 startPosition = lastIndex;
867
868 // special case of empty match
869 if (result.empty()) {
870 startPosition++;
871 if (startPosition > string.length())
872 break;
873 }
874 } while (regExp->global());
875 if (!ok)
876 break;
877
878 // We are committed at this point.
879 m_changed = true;
880
881 NodeOrigin origin = m_node->origin;
882
883 // Preserve any checks we have.
884 m_insertionSet.insertNode(
885 m_nodeIndex, SpecNone, Check, origin, m_node->children.justChecks());
886
887 if (regExp->global()) {
888 m_insertionSet.insertNode(
889 m_nodeIndex, SpecNone, SetRegExpObjectLastIndex, origin,
890 OpInfo(false),
891 Edge(regExpObjectNode, RegExpObjectUse),
892 m_insertionSet.insertConstantForUse(
893 m_nodeIndex, origin, jsNumber(0), UntypedUse));
894
895 origin = origin.withInvalidExit();
896 }
897
898 if (!lastIndex && builder.isEmpty())
899 m_node->convertToIdentityOn(stringNode);
900 else {
901 if (lastIndex < string.length())
902 builder.append(string, lastIndex, string.length() - lastIndex);
903
904 m_node->convertToLazyJSConstant(m_graph, LazyJSValue::newString(m_graph, builder.toString()));
905 }
906
907 m_node->origin = origin;
908 break;
909 }
910
911 case Call:
912 case Construct:
913 case TailCallInlinedCaller:
914 case TailCall: {
915 ExecutableBase* executable = nullptr;
916 Edge callee = m_graph.varArgChild(m_node, 0);
917 CallVariant callVariant;
918 if (JSFunction* function = callee->dynamicCastConstant<JSFunction*>(vm())) {
919 executable = function->executable();
920 callVariant = CallVariant(function);
921 } else if (callee->isFunctionAllocation()) {
922 executable = callee->castOperand<FunctionExecutable*>();
923 callVariant = CallVariant(executable);
924 }
925
926 if (!executable)
927 break;
928
929 if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(vm(), executable)) {
930 if (m_node->op() == Construct && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct)
931 break;
932
933 // We need to update m_parameterSlots before we get to the backend, but we don't
934 // want to do too much of this.
935 unsigned numAllocatedArgs =
936 static_cast<unsigned>(functionExecutable->parameterCount()) + 1;
937
938 if (numAllocatedArgs <= Options::maximumDirectCallStackSize()) {
939 m_graph.m_parameterSlots = std::max(
940 m_graph.m_parameterSlots,
941 Graph::parameterSlotsForArgCount(numAllocatedArgs));
942 }
943 }
944
945 m_graph.m_plan.recordedStatuses().addCallLinkStatus(m_node->origin.semantic, CallLinkStatus(callVariant));
946
947 m_node->convertToDirectCall(m_graph.freeze(executable));
948 m_changed = true;
949 break;
950 }
951
952 default:
953 break;
954 }
955 }
956
957 void convertToIdentityOverChild(unsigned childIndex)
958 {
959 ASSERT(!(m_node->flags() & NodeHasVarArgs));
960 m_insertionSet.insertCheck(m_graph, m_nodeIndex, m_node);
961 m_node->children.removeEdge(childIndex ^ 1);
962 m_node->convertToIdentity();
963 m_changed = true;
964 }
965
966 void convertToIdentityOverChild1()
967 {
968 convertToIdentityOverChild(0);
969 }
970
971 void convertToIdentityOverChild2()
972 {
973 convertToIdentityOverChild(1);
974 }
975
976 void convertToLazyJSValue(Node* node, LazyJSValue value)
977 {
978 m_insertionSet.insertCheck(m_graph, m_nodeIndex, node);
979 node->convertToLazyJSConstant(m_graph, value);
980 }
981
982 void handleCommutativity()
983 {
984 // It's definitely not sound to swap the lhs and rhs when we may be performing effectful
985 // calls on the lhs/rhs for valueOf.
986 if (m_node->child1().useKind() == UntypedUse || m_node->child2().useKind() == UntypedUse)
987 return;
988
989 // If the right side is a constant then there is nothing left to do.
990 if (m_node->child2()->hasConstant())
991 return;
992
993 // This case ensures that optimizations that look for x + const don't also have
994 // to look for const + x.
995 if (m_node->child1()->hasConstant() && !m_node->child1()->asJSValue().isCell()) {
996 std::swap(m_node->child1(), m_node->child2());
997 m_changed = true;
998 return;
999 }
1000
1001 // This case ensures that CSE is commutativity-aware.
1002 if (m_node->child1().node() > m_node->child2().node()) {
1003 std::swap(m_node->child1(), m_node->child2());
1004 m_changed = true;
1005 return;
1006 }
1007 }
1008
1009 void executeInsertionSet()
1010 {
1011 m_nodeIndex += m_insertionSet.execute(m_block);
1012 }
1013
1014 InsertionSet m_insertionSet;
1015 BasicBlock* m_block;
1016 unsigned m_nodeIndex;
1017 Node* m_node;
1018 bool m_changed;
1019};
1020
1021bool performStrengthReduction(Graph& graph)
1022{
1023 return runPhase<StrengthReductionPhase>(graph);
1024}
1025
1026} } // namespace JSC::DFG
1027
1028#endif // ENABLE(DFG_JIT)
1029
1030