1/*
2 * Copyright (C) 2013-2019 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "DFGStrengthReductionPhase.h"
28
29#if ENABLE(DFG_JIT)
30
31#include "DFGAbstractHeap.h"
32#include "DFGClobberize.h"
33#include "DFGGraph.h"
34#include "DFGInsertionSet.h"
35#include "DFGPhase.h"
36#include "DFGPredictionPropagationPhase.h"
37#include "DFGVariableAccessDataDump.h"
38#include "JSCInlines.h"
39#include "MathCommon.h"
40#include "RegExpObject.h"
41#include "StringPrototype.h"
42#include <cstdlib>
43#include <wtf/text/StringBuilder.h>
44
45namespace JSC { namespace DFG {
46
47class StrengthReductionPhase : public Phase {
48 static constexpr bool verbose = false;
49
50public:
51 StrengthReductionPhase(Graph& graph)
52 : Phase(graph, "strength reduction")
53 , m_insertionSet(graph)
54 {
55 }
56
57 bool run()
58 {
59 ASSERT(m_graph.m_fixpointState == FixpointNotConverged);
60
61 m_changed = false;
62
63 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
64 m_block = m_graph.block(blockIndex);
65 if (!m_block)
66 continue;
67 for (m_nodeIndex = 0; m_nodeIndex < m_block->size(); ++m_nodeIndex) {
68 m_node = m_block->at(m_nodeIndex);
69 handleNode();
70 }
71 m_insertionSet.execute(m_block);
72 }
73
74 return m_changed;
75 }
76
77private:
78 void handleNode()
79 {
80 switch (m_node->op()) {
81 case ArithBitOr:
82 handleCommutativity();
83
84 if (m_node->child1().useKind() != UntypedUse && m_node->child2()->isInt32Constant() && !m_node->child2()->asInt32()) {
85 convertToIdentityOverChild1();
86 break;
87 }
88 break;
89
90 case ArithBitXor:
91 case ArithBitAnd:
92 handleCommutativity();
93 break;
94
95 case ArithBitLShift:
96 case ArithBitRShift:
97 case BitURShift:
98 if (m_node->child1().useKind() != UntypedUse && m_node->child2()->isInt32Constant() && !(m_node->child2()->asInt32() & 0x1f)) {
99 convertToIdentityOverChild1();
100 break;
101 }
102 break;
103
104 case UInt32ToNumber:
105 if (m_node->child1()->op() == BitURShift
106 && m_node->child1()->child2()->isInt32Constant()
107 && (m_node->child1()->child2()->asInt32() & 0x1f)
108 && m_node->arithMode() != Arith::DoOverflow) {
109 m_node->convertToIdentity();
110 m_changed = true;
111 break;
112 }
113 break;
114
115 case ArithAdd:
116 handleCommutativity();
117
118 if (m_node->child2()->isInt32Constant() && !m_node->child2()->asInt32()) {
119 convertToIdentityOverChild1();
120 break;
121 }
122 break;
123
124 case ValueMul:
125 case ValueBitOr:
126 case ValueBitAnd:
127 case ValueBitXor: {
128 if (m_node->binaryUseKind() == BigIntUse)
129 handleCommutativity();
130 break;
131 }
132
133 case ArithMul: {
134 handleCommutativity();
135 Edge& child2 = m_node->child2();
136 if (child2->isNumberConstant() && child2->asNumber() == 2) {
137 switch (m_node->binaryUseKind()) {
138 case DoubleRepUse:
139 // It is always valuable to get rid of a double multiplication by 2.
140 // We won't have half-register dependencies issues on x86 and we won't have to load the constants.
141 m_node->setOp(ArithAdd);
142 child2.setNode(m_node->child1().node());
143 m_changed = true;
144 break;
145#if USE(JSVALUE64)
146 case Int52RepUse:
147#endif
148 case Int32Use:
149 // For integers, we can only convert compatible modes.
150 // ArithAdd does handle do negative zero check for example.
151 if (m_node->arithMode() == Arith::CheckOverflow || m_node->arithMode() == Arith::Unchecked) {
152 m_node->setOp(ArithAdd);
153 child2.setNode(m_node->child1().node());
154 m_changed = true;
155 }
156 break;
157 default:
158 break;
159 }
160 }
161 break;
162 }
163 case ArithSub:
164 if (m_node->child2()->isInt32Constant()
165 && m_node->isBinaryUseKind(Int32Use)) {
166 int32_t value = m_node->child2()->asInt32();
167 if (value != INT32_MIN) {
168 m_node->setOp(ArithAdd);
169 m_node->child2().setNode(
170 m_insertionSet.insertConstant(
171 m_nodeIndex, m_node->origin, jsNumber(-value)));
172 m_changed = true;
173 break;
174 }
175 }
176 break;
177
178 case ArithPow:
179 if (m_node->child2()->isNumberConstant()) {
180 double yOperandValue = m_node->child2()->asNumber();
181 if (yOperandValue == 1) {
182 convertToIdentityOverChild1();
183 m_changed = true;
184 } else if (yOperandValue == 2) {
185 m_node->setOp(ArithMul);
186 m_node->child2() = m_node->child1();
187 m_changed = true;
188 }
189 }
190 break;
191
192 case ArithMod:
193 // On Integers
194 // In: ArithMod(ArithMod(x, const1), const2)
195 // Out: Identity(ArithMod(x, const1))
196 // if const1 <= const2.
197 if (m_node->binaryUseKind() == Int32Use
198 && m_node->child2()->isInt32Constant()
199 && m_node->child1()->op() == ArithMod
200 && m_node->child1()->binaryUseKind() == Int32Use
201 && m_node->child1()->child2()->isInt32Constant()
202 && std::abs(m_node->child1()->child2()->asInt32()) <= std::abs(m_node->child2()->asInt32())) {
203 convertToIdentityOverChild1();
204 }
205 break;
206
207 case ArithDiv:
208 // Transform
209 // ArithDiv(x, constant)
210 // Into
211 // ArithMul(x, 1 / constant)
212 // if the operation has the same result.
213 if (m_node->isBinaryUseKind(DoubleRepUse)
214 && m_node->child2()->isNumberConstant()) {
215
216 if (Optional<double> reciprocal = safeReciprocalForDivByConst(m_node->child2()->asNumber())) {
217 Node* reciprocalNode = m_insertionSet.insertConstant(m_nodeIndex, m_node->origin, jsDoubleNumber(*reciprocal), DoubleConstant);
218 m_node->setOp(ArithMul);
219 m_node->child2() = Edge(reciprocalNode, DoubleRepUse);
220 m_changed = true;
221 break;
222 }
223 }
224 break;
225
226 case ValueRep:
227 case Int52Rep: {
228 // This short-circuits circuitous conversions, like ValueRep(Int52Rep(value)).
229
230 // The only speculation that we would do beyond validating that we have a type that
231 // can be represented a certain way is an Int32 check that would appear on Int52Rep
232 // nodes. For now, if we see this and the final type we want is an Int52, we use it
233 // as an excuse not to fold. The only thing we would need is a Int52RepInt32Use kind.
234 bool hadInt32Check = false;
235 if (m_node->op() == Int52Rep) {
236 if (m_node->child1().useKind() != Int32Use)
237 break;
238 hadInt32Check = true;
239 }
240 for (Node* node = m_node->child1().node(); ; node = node->child1().node()) {
241 if (canonicalResultRepresentation(node->result()) ==
242 canonicalResultRepresentation(m_node->result())) {
243 m_insertionSet.insertCheck(m_graph, m_nodeIndex, m_node);
244 if (hadInt32Check) {
245 // FIXME: Consider adding Int52RepInt32Use or even DoubleRepInt32Use,
246 // which would be super weird. The latter would only arise in some
247 // seriously circuitous conversions.
248 if (canonicalResultRepresentation(node->result()) != NodeResultJS)
249 break;
250
251 m_insertionSet.insertCheck(
252 m_nodeIndex, m_node->origin, Edge(node, Int32Use));
253 }
254 m_node->child1() = node->defaultEdge();
255 m_node->convertToIdentity();
256 m_changed = true;
257 break;
258 }
259
260 switch (node->op()) {
261 case Int52Rep:
262 if (node->child1().useKind() != Int32Use)
263 break;
264 hadInt32Check = true;
265 continue;
266
267 case ValueRep:
268 continue;
269
270 default:
271 break;
272 }
273 break;
274 }
275 break;
276 }
277
278 case Flush: {
279 ASSERT(m_graph.m_form != SSA);
280
281 if (m_graph.willCatchExceptionInMachineFrame(m_node->origin.semantic)) {
282 // FIXME: We should be able to relax this:
283 // https://bugs.webkit.org/show_bug.cgi?id=150824
284 break;
285 }
286
287 Node* setLocal = nullptr;
288 VirtualRegister local = m_node->local();
289
290 for (unsigned i = m_nodeIndex; i--;) {
291 Node* node = m_block->at(i);
292
293 if (node->op() == SetLocal && node->local() == local) {
294 setLocal = node;
295 break;
296 }
297
298 if (accessesOverlap(m_graph, node, AbstractHeap(Stack, local)))
299 break;
300
301 }
302
303 if (!setLocal)
304 break;
305
306 // The Flush should become a PhantomLocal at this point. This means that we want the
307 // local's value during OSR, but we don't care if the value is stored to the stack. CPS
308 // rethreading can canonicalize PhantomLocals for us.
309 m_node->convertFlushToPhantomLocal();
310 m_graph.dethread();
311 m_changed = true;
312 break;
313 }
314
315 // FIXME: we should probably do this in constant folding but this currently relies on OSR exit history:
316 // https://bugs.webkit.org/show_bug.cgi?id=154832
317 case OverridesHasInstance: {
318 if (!m_node->child2().node()->isCellConstant())
319 break;
320
321 if (m_node->child2().node()->asCell() != m_graph.globalObjectFor(m_node->origin.semantic)->functionProtoHasInstanceSymbolFunction()) {
322 m_graph.convertToConstant(m_node, jsBoolean(true));
323 m_changed = true;
324
325 } else if (!m_graph.hasExitSite(m_node->origin.semantic, BadTypeInfoFlags)) {
326 // We optimistically assume that we will not see a function that has a custom instanceof operation as they should be rare.
327 m_insertionSet.insertNode(m_nodeIndex, SpecNone, CheckTypeInfoFlags, m_node->origin, OpInfo(ImplementsDefaultHasInstance), Edge(m_node->child1().node(), CellUse));
328 m_graph.convertToConstant(m_node, jsBoolean(false));
329 m_changed = true;
330 }
331
332 break;
333 }
334
335 // FIXME: We have a lot of string constant-folding rules here. It would be great to
336 // move these to the abstract interpreter once AbstractValue can support LazyJSValue.
337 // https://bugs.webkit.org/show_bug.cgi?id=155204
338
339 case ValueAdd: {
340 if (m_node->child1()->isConstant()
341 && m_node->child2()->isConstant()
342 && (!!m_node->child1()->tryGetString(m_graph) || !!m_node->child2()->tryGetString(m_graph))) {
343 auto tryGetConstantString = [&] (Node* node) -> String {
344 String string = node->tryGetString(m_graph);
345 if (!string.isEmpty())
346 return string;
347 JSValue value = node->constant()->value();
348 if (!value)
349 return String();
350 if (value.isInt32())
351 return String::number(value.asInt32());
352 if (value.isNumber())
353 return String::number(value.asNumber());
354 if (value.isBoolean())
355 return value.asBoolean() ? "true"_s : "false"_s;
356 if (value.isNull())
357 return "null"_s;
358 if (value.isUndefined())
359 return "undefined"_s;
360 return String();
361 };
362
363 String leftString = tryGetConstantString(m_node->child1().node());
364 if (!leftString)
365 break;
366 String rightString = tryGetConstantString(m_node->child2().node());
367 if (!rightString)
368 break;
369
370 StringBuilder builder;
371 builder.append(leftString);
372 builder.append(rightString);
373 convertToLazyJSValue(m_node, LazyJSValue::newString(m_graph, builder.toString()));
374 m_changed = true;
375 break;
376 }
377
378 if (m_node->binaryUseKind() == BigIntUse)
379 handleCommutativity();
380
381 break;
382 }
383
384 case MakeRope:
385 case StrCat: {
386 String leftString = m_node->child1()->tryGetString(m_graph);
387 if (!leftString)
388 break;
389 String rightString = m_node->child2()->tryGetString(m_graph);
390 if (!rightString)
391 break;
392 String extraString;
393 if (m_node->child3()) {
394 extraString = m_node->child3()->tryGetString(m_graph);
395 if (!extraString)
396 break;
397 }
398
399 StringBuilder builder;
400 builder.append(leftString);
401 builder.append(rightString);
402 if (!!extraString)
403 builder.append(extraString);
404
405 convertToLazyJSValue(m_node, LazyJSValue::newString(m_graph, builder.toString()));
406 m_changed = true;
407 break;
408 }
409
410 case ToString:
411 case CallStringConstructor: {
412 Edge& child1 = m_node->child1();
413 switch (child1.useKind()) {
414 case Int32Use:
415 case Int52RepUse:
416 case DoubleRepUse: {
417 if (child1->hasConstant()) {
418 JSValue value = child1->constant()->value();
419 if (value) {
420 String result;
421 if (value.isInt32())
422 result = String::number(value.asInt32());
423 else if (value.isNumber())
424 result = String::number(value.asNumber());
425
426 if (!result.isNull()) {
427 convertToLazyJSValue(m_node, LazyJSValue::newString(m_graph, result));
428 m_changed = true;
429 }
430 }
431 }
432 break;
433 }
434
435 default:
436 break;
437 }
438 break;
439 }
440
441 case NumberToStringWithValidRadixConstant: {
442 Edge& child1 = m_node->child1();
443 if (child1->hasConstant()) {
444 JSValue value = child1->constant()->value();
445 if (value && value.isNumber()) {
446 String result = toStringWithRadix(value.asNumber(), m_node->validRadixConstant());
447 convertToLazyJSValue(m_node, LazyJSValue::newString(m_graph, result));
448 m_changed = true;
449 }
450 }
451 break;
452 }
453
454 case GetArrayLength: {
455 if (m_node->arrayMode().type() == Array::Generic
456 || m_node->arrayMode().type() == Array::String) {
457 String string = m_node->child1()->tryGetString(m_graph);
458 if (!!string) {
459 m_graph.convertToConstant(m_node, jsNumber(string.length()));
460 m_changed = true;
461 break;
462 }
463 }
464 break;
465 }
466
467 case GetGlobalObject: {
468 if (JSObject* object = m_node->child1()->dynamicCastConstant<JSObject*>(vm())) {
469 m_graph.convertToConstant(m_node, object->globalObject(vm()));
470 m_changed = true;
471 break;
472 }
473 break;
474 }
475
476 case RegExpExec:
477 case RegExpTest:
478 case RegExpMatchFast:
479 case RegExpExecNonGlobalOrSticky: {
480 JSGlobalObject* globalObject = m_node->child1()->dynamicCastConstant<JSGlobalObject*>(vm());
481 if (!globalObject) {
482 if (verbose)
483 dataLog("Giving up because no global object.\n");
484 break;
485 }
486
487 if (globalObject->isHavingABadTime()) {
488 if (verbose)
489 dataLog("Giving up because bad time.\n");
490 break;
491 }
492
493 Node* regExpObjectNode = nullptr;
494 RegExp* regExp = nullptr;
495 if (m_node->op() == RegExpExec || m_node->op() == RegExpTest || m_node->op() == RegExpMatchFast) {
496 regExpObjectNode = m_node->child2().node();
497 if (RegExpObject* regExpObject = regExpObjectNode->dynamicCastConstant<RegExpObject*>(vm()))
498 regExp = regExpObject->regExp();
499 else if (regExpObjectNode->op() == NewRegexp)
500 regExp = regExpObjectNode->castOperand<RegExp*>();
501 else {
502 if (verbose)
503 dataLog("Giving up because the regexp is unknown.\n");
504 break;
505 }
506 } else
507 regExp = m_node->castOperand<RegExp*>();
508
509 if (m_node->op() == RegExpMatchFast) {
510 if (regExp->global()) {
511 if (regExp->sticky())
512 break;
513 if (m_node->child3().useKind() != StringUse)
514 break;
515 NodeOrigin origin = m_node->origin;
516 m_insertionSet.insertNode(
517 m_nodeIndex, SpecNone, Check, origin, m_node->children.justChecks());
518 m_insertionSet.insertNode(
519 m_nodeIndex, SpecNone, SetRegExpObjectLastIndex, origin,
520 OpInfo(false),
521 Edge(regExpObjectNode, RegExpObjectUse),
522 m_insertionSet.insertConstantForUse(
523 m_nodeIndex, origin, jsNumber(0), UntypedUse));
524 origin = origin.withInvalidExit();
525 m_node->convertToRegExpMatchFastGlobalWithoutChecks(m_graph.freeze(regExp));
526 m_node->origin = origin;
527 m_changed = true;
528 break;
529 }
530
531 m_node->setOp(RegExpExec);
532 m_changed = true;
533 // Continue performing strength reduction onto RegExpExec node.
534 }
535
536 ASSERT(m_node->op() != RegExpMatchFast);
537
538 auto foldToConstant = [&] {
539 Node* stringNode = nullptr;
540 if (m_node->op() == RegExpExecNonGlobalOrSticky)
541 stringNode = m_node->child2().node();
542 else
543 stringNode = m_node->child3().node();
544
545 // NOTE: This mostly already protects us from having the compiler execute a regexp
546 // operation on a ginormous string by preventing us from getting our hands on ginormous
547 // strings in the first place.
548 String string = stringNode->tryGetString(m_graph);
549 if (!string) {
550 if (verbose)
551 dataLog("Giving up because the string is unknown.\n");
552 return false;
553 }
554
555 FrozenValue* regExpFrozenValue = m_graph.freeze(regExp);
556
557 // Refuse to do things with regular expressions that have a ginormous number of
558 // subpatterns.
559 unsigned ginormousNumberOfSubPatterns = 1000;
560 if (regExp->numSubpatterns() > ginormousNumberOfSubPatterns) {
561 if (verbose)
562 dataLog("Giving up because of pattern limit.\n");
563 return false;
564 }
565
566 if ((m_node->op() == RegExpExec || m_node->op() == RegExpExecNonGlobalOrSticky) && regExp->hasNamedCaptures()) {
567 // FIXME: https://bugs.webkit.org/show_bug.cgi?id=176464
568 // Implement strength reduction optimization for named capture groups.
569 if (verbose)
570 dataLog("Giving up because of named capture groups.\n");
571 return false;
572 }
573
574 unsigned lastIndex;
575 if (regExp->globalOrSticky()) {
576 // This will only work if we can prove what the value of lastIndex is. To do this
577 // safely, we need to execute the insertion set so that we see any previous strength
578 // reductions. This is needed for soundness since otherwise the effectfulness of any
579 // previous strength reductions would be invisible to us.
580 ASSERT(regExpObjectNode);
581 executeInsertionSet();
582 lastIndex = UINT_MAX;
583 for (unsigned otherNodeIndex = m_nodeIndex; otherNodeIndex--;) {
584 Node* otherNode = m_block->at(otherNodeIndex);
585 if (otherNode == regExpObjectNode) {
586 lastIndex = 0;
587 break;
588 }
589 if (otherNode->op() == SetRegExpObjectLastIndex
590 && otherNode->child1() == regExpObjectNode
591 && otherNode->child2()->isInt32Constant()
592 && otherNode->child2()->asInt32() >= 0) {
593 lastIndex = static_cast<unsigned>(otherNode->child2()->asInt32());
594 break;
595 }
596 if (writesOverlap(m_graph, otherNode, RegExpObject_lastIndex))
597 break;
598 }
599 if (lastIndex == UINT_MAX) {
600 if (verbose)
601 dataLog("Giving up because the last index is not known.\n");
602 return false;
603 }
604 } else
605 lastIndex = 0;
606
607 m_graph.watchpoints().addLazily(globalObject->havingABadTimeWatchpoint());
608
609 Structure* structure = globalObject->regExpMatchesArrayStructure();
610 if (structure->indexingType() != ArrayWithContiguous) {
611 // This is further protection against a race with haveABadTime.
612 if (verbose)
613 dataLog("Giving up because the structure has the wrong indexing type.\n");
614 return false;
615 }
616 m_graph.registerStructure(structure);
617
618 FrozenValue* globalObjectFrozenValue = m_graph.freeze(globalObject);
619
620 MatchResult result;
621 Vector<int> ovector;
622 // We have to call the kind of match function that the main thread would have called.
623 // Otherwise, we might not have the desired Yarr code compiled, and the match will fail.
624 if (m_node->op() == RegExpExec || m_node->op() == RegExpExecNonGlobalOrSticky) {
625 int position;
626 if (!regExp->matchConcurrently(vm(), string, lastIndex, position, ovector)) {
627 if (verbose)
628 dataLog("Giving up because match failed.\n");
629 return false;
630 }
631 result.start = position;
632 result.end = ovector[1];
633 } else {
634 if (!regExp->matchConcurrently(vm(), string, lastIndex, result)) {
635 if (verbose)
636 dataLog("Giving up because match failed.\n");
637 return false;
638 }
639 }
640
641 // We've constant-folded the regexp. Now we're committed to replacing RegExpExec/Test.
642
643 m_changed = true;
644
645 NodeOrigin origin = m_node->origin;
646
647 m_insertionSet.insertNode(
648 m_nodeIndex, SpecNone, Check, origin, m_node->children.justChecks());
649
650 if (m_node->op() == RegExpExec || m_node->op() == RegExpExecNonGlobalOrSticky) {
651 if (result) {
652 RegisteredStructureSet* structureSet = m_graph.addStructureSet(structure);
653
654 // Create an array modeling the JS array that we will try to allocate. This is
655 // basically createRegExpMatchesArray but over C++ strings instead of JSStrings.
656 Vector<String> resultArray;
657 resultArray.append(string.substring(result.start, result.end - result.start));
658 for (unsigned i = 1; i <= regExp->numSubpatterns(); ++i) {
659 int start = ovector[2 * i];
660 if (start >= 0)
661 resultArray.append(string.substring(start, ovector[2 * i + 1] - start));
662 else
663 resultArray.append(String());
664 }
665
666 unsigned publicLength = resultArray.size();
667 unsigned vectorLength =
668 Butterfly::optimalContiguousVectorLength(structure, publicLength);
669
670 UniquedStringImpl* indexUID = vm().propertyNames->index.impl();
671 UniquedStringImpl* inputUID = vm().propertyNames->input.impl();
672 UniquedStringImpl* groupsUID = vm().propertyNames->groups.impl();
673 unsigned indexIndex = m_graph.identifiers().ensure(indexUID);
674 unsigned inputIndex = m_graph.identifiers().ensure(inputUID);
675 unsigned groupsIndex = m_graph.identifiers().ensure(groupsUID);
676
677 unsigned firstChild = m_graph.m_varArgChildren.size();
678 m_graph.m_varArgChildren.append(
679 m_insertionSet.insertConstantForUse(
680 m_nodeIndex, origin, structure, KnownCellUse));
681 ObjectMaterializationData* data = m_graph.m_objectMaterializationData.add();
682
683 m_graph.m_varArgChildren.append(
684 m_insertionSet.insertConstantForUse(
685 m_nodeIndex, origin, jsNumber(publicLength), KnownInt32Use));
686 data->m_properties.append(PublicLengthPLoc);
687
688 m_graph.m_varArgChildren.append(
689 m_insertionSet.insertConstantForUse(
690 m_nodeIndex, origin, jsNumber(vectorLength), KnownInt32Use));
691 data->m_properties.append(VectorLengthPLoc);
692
693 m_graph.m_varArgChildren.append(
694 m_insertionSet.insertConstantForUse(
695 m_nodeIndex, origin, jsNumber(result.start), UntypedUse));
696 data->m_properties.append(
697 PromotedLocationDescriptor(NamedPropertyPLoc, indexIndex));
698
699 m_graph.m_varArgChildren.append(Edge(stringNode, UntypedUse));
700 data->m_properties.append(
701 PromotedLocationDescriptor(NamedPropertyPLoc, inputIndex));
702
703 // FIXME: https://bugs.webkit.org/show_bug.cgi?id=176464
704 // Implement strength reduction optimization for named capture groups.
705 m_graph.m_varArgChildren.append(
706 m_insertionSet.insertConstantForUse(
707 m_nodeIndex, origin, jsUndefined(), UntypedUse));
708 data->m_properties.append(
709 PromotedLocationDescriptor(NamedPropertyPLoc, groupsIndex));
710
711 auto materializeString = [&] (const String& string) -> Node* {
712 if (string.isNull())
713 return nullptr;
714 if (string.isEmpty()) {
715 return m_insertionSet.insertConstant(
716 m_nodeIndex, origin, vm().smallStrings.emptyString());
717 }
718 LazyJSValue value = LazyJSValue::newString(m_graph, string);
719 return m_insertionSet.insertNode(
720 m_nodeIndex, SpecNone, LazyJSConstant, origin,
721 OpInfo(m_graph.m_lazyJSValues.add(value)));
722 };
723
724 for (unsigned i = 0; i < resultArray.size(); ++i) {
725 if (Node* node = materializeString(resultArray[i])) {
726 m_graph.m_varArgChildren.append(Edge(node, UntypedUse));
727 data->m_properties.append(
728 PromotedLocationDescriptor(IndexedPropertyPLoc, i));
729 }
730 }
731
732 Node* resultNode = m_insertionSet.insertNode(
733 m_nodeIndex, SpecArray, Node::VarArg, MaterializeNewObject, origin,
734 OpInfo(structureSet), OpInfo(data), firstChild,
735 m_graph.m_varArgChildren.size() - firstChild);
736
737 m_node->convertToIdentityOn(resultNode);
738 } else
739 m_graph.convertToConstant(m_node, jsNull());
740 } else
741 m_graph.convertToConstant(m_node, jsBoolean(!!result));
742
743 // Whether it's Exec or Test, we need to tell the globalObject and RegExpObject what's up.
744 // Because SetRegExpObjectLastIndex may exit and it clobbers exit state, we do that
745 // first.
746
747 if (regExp->globalOrSticky()) {
748 ASSERT(regExpObjectNode);
749 m_insertionSet.insertNode(
750 m_nodeIndex, SpecNone, SetRegExpObjectLastIndex, origin,
751 OpInfo(false),
752 Edge(regExpObjectNode, RegExpObjectUse),
753 m_insertionSet.insertConstantForUse(
754 m_nodeIndex, origin, jsNumber(result ? result.end : 0), UntypedUse));
755
756 origin = origin.withInvalidExit();
757 }
758
759 if (result) {
760 unsigned firstChild = m_graph.m_varArgChildren.size();
761 m_graph.m_varArgChildren.append(
762 m_insertionSet.insertConstantForUse(
763 m_nodeIndex, origin, globalObjectFrozenValue, KnownCellUse));
764 m_graph.m_varArgChildren.append(
765 m_insertionSet.insertConstantForUse(
766 m_nodeIndex, origin, regExpFrozenValue, KnownCellUse));
767 m_graph.m_varArgChildren.append(Edge(stringNode, KnownCellUse));
768 m_graph.m_varArgChildren.append(
769 m_insertionSet.insertConstantForUse(
770 m_nodeIndex, origin, jsNumber(result.start), KnownInt32Use));
771 m_graph.m_varArgChildren.append(
772 m_insertionSet.insertConstantForUse(
773 m_nodeIndex, origin, jsNumber(result.end), KnownInt32Use));
774 m_insertionSet.insertNode(
775 m_nodeIndex, SpecNone, Node::VarArg, RecordRegExpCachedResult, origin,
776 OpInfo(), OpInfo(), firstChild, m_graph.m_varArgChildren.size() - firstChild);
777
778 origin = origin.withInvalidExit();
779 }
780
781 m_node->origin = origin;
782 return true;
783 };
784
785 auto convertToStatic = [&] {
786 if (m_node->op() != RegExpExec)
787 return false;
788 if (regExp->globalOrSticky())
789 return false;
790 if (m_node->child3().useKind() != StringUse)
791 return false;
792 NodeOrigin origin = m_node->origin;
793 m_insertionSet.insertNode(
794 m_nodeIndex, SpecNone, Check, origin, m_node->children.justChecks());
795 m_node->convertToRegExpExecNonGlobalOrStickyWithoutChecks(m_graph.freeze(regExp));
796 m_changed = true;
797 return true;
798 };
799
800 if (foldToConstant())
801 break;
802
803 if (convertToStatic())
804 break;
805
806 break;
807 }
808
809 case StringReplace:
810 case StringReplaceRegExp: {
811 Node* stringNode = m_node->child1().node();
812 String string = stringNode->tryGetString(m_graph);
813 if (!string)
814 break;
815
816 Node* regExpObjectNode = m_node->child2().node();
817 RegExp* regExp;
818 if (RegExpObject* regExpObject = regExpObjectNode->dynamicCastConstant<RegExpObject*>(vm()))
819 regExp = regExpObject->regExp();
820 else if (regExpObjectNode->op() == NewRegexp)
821 regExp = regExpObjectNode->castOperand<RegExp*>();
822 else {
823 if (verbose)
824 dataLog("Giving up because the regexp is unknown.\n");
825 break;
826 }
827
828 String replace = m_node->child3()->tryGetString(m_graph);
829 if (!replace)
830 break;
831
832 StringBuilder builder;
833
834 unsigned lastIndex = 0;
835 unsigned startPosition = 0;
836 bool ok = true;
837 do {
838 MatchResult result;
839 Vector<int> ovector;
840 // Model which version of match() is called by the main thread.
841 if (replace.isEmpty() && regExp->global()) {
842 if (!regExp->matchConcurrently(vm(), string, startPosition, result)) {
843 ok = false;
844 break;
845 }
846 } else {
847 int position;
848 if (!regExp->matchConcurrently(vm(), string, startPosition, position, ovector)) {
849 ok = false;
850 break;
851 }
852
853 result.start = position;
854 result.end = ovector[1];
855 }
856
857 if (!result)
858 break;
859
860 unsigned replLen = replace.length();
861 if (lastIndex < result.start || replLen) {
862 builder.appendSubstring(string, lastIndex, result.start - lastIndex);
863 if (replLen) {
864 StringBuilder replacement;
865 substituteBackreferences(replacement, replace, string, ovector.data(), regExp);
866 builder.append(replacement);
867 }
868 }
869
870 lastIndex = result.end;
871 startPosition = lastIndex;
872
873 // special case of empty match
874 if (result.empty()) {
875 startPosition++;
876 if (startPosition > string.length())
877 break;
878 }
879 } while (regExp->global());
880 if (!ok)
881 break;
882
883 // We are committed at this point.
884 m_changed = true;
885
886 NodeOrigin origin = m_node->origin;
887
888 // Preserve any checks we have.
889 m_insertionSet.insertNode(
890 m_nodeIndex, SpecNone, Check, origin, m_node->children.justChecks());
891
892 if (regExp->global()) {
893 m_insertionSet.insertNode(
894 m_nodeIndex, SpecNone, SetRegExpObjectLastIndex, origin,
895 OpInfo(false),
896 Edge(regExpObjectNode, RegExpObjectUse),
897 m_insertionSet.insertConstantForUse(
898 m_nodeIndex, origin, jsNumber(0), UntypedUse));
899
900 origin = origin.withInvalidExit();
901 }
902
903 if (!lastIndex && builder.isEmpty())
904 m_node->convertToIdentityOn(stringNode);
905 else {
906 if (lastIndex < string.length())
907 builder.appendSubstring(string, lastIndex, string.length() - lastIndex);
908
909 m_node->convertToLazyJSConstant(m_graph, LazyJSValue::newString(m_graph, builder.toString()));
910 }
911
912 m_node->origin = origin;
913 break;
914 }
915
916 case Call:
917 case Construct:
918 case TailCallInlinedCaller:
919 case TailCall: {
920 ExecutableBase* executable = nullptr;
921 Edge callee = m_graph.varArgChild(m_node, 0);
922 CallVariant callVariant;
923 if (JSFunction* function = callee->dynamicCastConstant<JSFunction*>(vm())) {
924 executable = function->executable();
925 callVariant = CallVariant(function);
926 } else if (callee->isFunctionAllocation()) {
927 executable = callee->castOperand<FunctionExecutable*>();
928 callVariant = CallVariant(executable);
929 }
930
931 if (!executable)
932 break;
933
934 if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(vm(), executable)) {
935 if (m_node->op() == Construct && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct)
936 break;
937
938 // We need to update m_parameterSlots before we get to the backend, but we don't
939 // want to do too much of this.
940 unsigned numAllocatedArgs =
941 static_cast<unsigned>(functionExecutable->parameterCount()) + 1;
942
943 if (numAllocatedArgs <= Options::maximumDirectCallStackSize()) {
944 m_graph.m_parameterSlots = std::max(
945 m_graph.m_parameterSlots,
946 Graph::parameterSlotsForArgCount(numAllocatedArgs));
947 }
948 }
949
950 m_graph.m_plan.recordedStatuses().addCallLinkStatus(m_node->origin.semantic, CallLinkStatus(callVariant));
951
952 m_node->convertToDirectCall(m_graph.freeze(executable));
953 m_changed = true;
954 break;
955 }
956
957 default:
958 break;
959 }
960 }
961
962 void convertToIdentityOverChild(unsigned childIndex)
963 {
964 ASSERT(!(m_node->flags() & NodeHasVarArgs));
965 m_insertionSet.insertCheck(m_graph, m_nodeIndex, m_node);
966 m_node->children.removeEdge(childIndex ^ 1);
967 m_node->convertToIdentity();
968 m_changed = true;
969 }
970
971 void convertToIdentityOverChild1()
972 {
973 convertToIdentityOverChild(0);
974 }
975
976 void convertToIdentityOverChild2()
977 {
978 convertToIdentityOverChild(1);
979 }
980
981 void convertToLazyJSValue(Node* node, LazyJSValue value)
982 {
983 m_insertionSet.insertCheck(m_graph, m_nodeIndex, node);
984 node->convertToLazyJSConstant(m_graph, value);
985 }
986
987 void handleCommutativity()
988 {
989 // It's definitely not sound to swap the lhs and rhs when we may be performing effectful
990 // calls on the lhs/rhs for valueOf.
991 if (m_node->child1().useKind() == UntypedUse || m_node->child2().useKind() == UntypedUse)
992 return;
993
994 // If the right side is a constant then there is nothing left to do.
995 if (m_node->child2()->hasConstant())
996 return;
997
998 // This case ensures that optimizations that look for x + const don't also have
999 // to look for const + x.
1000 if (m_node->child1()->hasConstant() && !m_node->child1()->asJSValue().isCell()) {
1001 std::swap(m_node->child1(), m_node->child2());
1002 m_changed = true;
1003 return;
1004 }
1005
1006 // This case ensures that CSE is commutativity-aware.
1007 if (m_node->child1().node() > m_node->child2().node()) {
1008 std::swap(m_node->child1(), m_node->child2());
1009 m_changed = true;
1010 return;
1011 }
1012 }
1013
1014 void executeInsertionSet()
1015 {
1016 m_nodeIndex += m_insertionSet.execute(m_block);
1017 }
1018
1019 InsertionSet m_insertionSet;
1020 BasicBlock* m_block;
1021 unsigned m_nodeIndex;
1022 Node* m_node;
1023 bool m_changed;
1024};
1025
1026bool performStrengthReduction(Graph& graph)
1027{
1028 return runPhase<StrengthReductionPhase>(graph);
1029}
1030
1031} } // namespace JSC::DFG
1032
1033#endif // ENABLE(DFG_JIT)
1034
1035