1/*
2 * Copyright (C) 2008-2019 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <[email protected]>
4 * Copyright (C) 2012 Igalia, S.L.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:
9 *
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 * 3. Neither the name of Apple Inc. ("Apple") nor the names of
16 * its contributors may be used to endorse or promote products derived
17 * from this software without specific prior written permission.
18 *
19 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
20 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
23 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 */
30
31#include "config.h"
32#include "BytecodeGenerator.h"
33
34#include "ArithProfile.h"
35#include "BuiltinExecutables.h"
36#include "BuiltinNames.h"
37#include "BytecodeGeneratorification.h"
38#include "BytecodeLivenessAnalysis.h"
39#include "BytecodeStructs.h"
40#include "BytecodeUseDef.h"
41#include "CatchScope.h"
42#include "DefinePropertyAttributes.h"
43#include "Interpreter.h"
44#include "JSAsyncGeneratorFunction.h"
45#include "JSBigInt.h"
46#include "JSCInlines.h"
47#include "JSFixedArray.h"
48#include "JSFunction.h"
49#include "JSGeneratorFunction.h"
50#include "JSImmutableButterfly.h"
51#include "JSLexicalEnvironment.h"
52#include "JSTemplateObjectDescriptor.h"
53#include "LowLevelInterpreter.h"
54#include "Options.h"
55#include "PreciseJumpTargetsInlines.h"
56#include "StackAlignment.h"
57#include "StrongInlines.h"
58#include "SuperSamplerBytecodeScope.h"
59#include "UnlinkedCodeBlock.h"
60#include "UnlinkedEvalCodeBlock.h"
61#include "UnlinkedFunctionCodeBlock.h"
62#include "UnlinkedMetadataTableInlines.h"
63#include "UnlinkedModuleProgramCodeBlock.h"
64#include "UnlinkedProgramCodeBlock.h"
65#include <wtf/BitVector.h>
66#include <wtf/CommaPrinter.h>
67#include <wtf/Optional.h>
68#include <wtf/SmallPtrSet.h>
69#include <wtf/StdLibExtras.h>
70#include <wtf/text/WTFString.h>
71
72namespace JSC {
73
74template<typename CallOp, typename = std::true_type>
75struct VarArgsOp;
76
77template<typename CallOp>
78struct VarArgsOp<CallOp, std::enable_if_t<std::is_same<CallOp, OpTailCall>::value, std::true_type>> {
79 using type = OpTailCallVarargs;
80};
81
82
83template<typename CallOp>
84struct VarArgsOp<CallOp, std::enable_if_t<!std::is_same<CallOp, OpTailCall>::value, std::true_type>> {
85 using type = OpCallVarargs;
86};
87
88
89template<typename T>
90static inline void shrinkToFit(T& segmentedVector)
91{
92 while (segmentedVector.size() && !segmentedVector.last().refCount())
93 segmentedVector.removeLast();
94}
95
96void Label::setLocation(BytecodeGenerator& generator, unsigned location)
97{
98 m_location = location;
99
100 for (auto offset : m_unresolvedJumps) {
101 auto instruction = generator.m_writer.ref(offset);
102 int target = m_location - offset;
103
104#define CASE(__op) \
105 case __op::opcodeID: \
106 instruction->cast<__op>()->setTargetLabel(BoundLabel(target), [&]() { \
107 generator.m_codeBlock->addOutOfLineJumpTarget(instruction.offset(), target); \
108 return BoundLabel(); \
109 }); \
110 break;
111
112 switch (instruction->opcodeID()) {
113 CASE(OpJmp)
114 CASE(OpJtrue)
115 CASE(OpJfalse)
116 CASE(OpJeqNull)
117 CASE(OpJneqNull)
118 CASE(OpJeq)
119 CASE(OpJstricteq)
120 CASE(OpJneq)
121 CASE(OpJneqPtr)
122 CASE(OpJnstricteq)
123 CASE(OpJless)
124 CASE(OpJlesseq)
125 CASE(OpJgreater)
126 CASE(OpJgreatereq)
127 CASE(OpJnless)
128 CASE(OpJnlesseq)
129 CASE(OpJngreater)
130 CASE(OpJngreatereq)
131 CASE(OpJbelow)
132 CASE(OpJbeloweq)
133 default:
134 ASSERT_NOT_REACHED();
135 }
136#undef CASE
137 }
138}
139
140int BoundLabel::target()
141{
142 switch (m_type) {
143 case Offset:
144 return m_target;
145 case GeneratorBackward:
146 return m_target - m_generator->m_writer.position();
147 case GeneratorForward:
148 return 0;
149 default:
150 RELEASE_ASSERT_NOT_REACHED();
151 }
152}
153
154int BoundLabel::saveTarget()
155{
156 if (m_type == GeneratorForward) {
157 m_savedTarget = m_generator->m_writer.position();
158 return 0;
159 }
160
161 m_savedTarget = target();
162 return m_savedTarget;
163}
164
165int BoundLabel::commitTarget()
166{
167 if (m_type == GeneratorForward) {
168 m_label->m_unresolvedJumps.append(m_savedTarget);
169 return 0;
170 }
171
172 return m_savedTarget;
173}
174
175void Variable::dump(PrintStream& out) const
176{
177 out.print(
178 "{ident = ", m_ident,
179 ", offset = ", m_offset,
180 ", local = ", RawPointer(m_local),
181 ", attributes = ", m_attributes,
182 ", kind = ", m_kind,
183 ", symbolTableConstantIndex = ", m_symbolTableConstantIndex,
184 ", isLexicallyScoped = ", m_isLexicallyScoped, "}");
185}
186
187FinallyContext::FinallyContext(BytecodeGenerator& generator, Label& finallyLabel)
188 : m_outerContext(generator.m_currentFinallyContext)
189 , m_finallyLabel(&finallyLabel)
190{
191 ASSERT(m_jumps.isEmpty());
192 m_completionRecord.typeRegister = generator.newTemporary();
193 m_completionRecord.valueRegister = generator.newTemporary();
194 generator.emitLoad(completionTypeRegister(), CompletionType::Normal);
195 generator.moveEmptyValue(completionValueRegister());
196}
197
198ParserError BytecodeGenerator::generate()
199{
200 m_codeBlock->setThisRegister(m_thisRegister.virtualRegister());
201
202 emitLogShadowChickenPrologueIfNecessary();
203
204 // If we have declared a variable named "arguments" and we are using arguments then we should
205 // perform that assignment now.
206 if (m_needToInitializeArguments)
207 initializeVariable(variable(propertyNames().arguments), m_argumentsRegister);
208
209 if (m_restParameter)
210 m_restParameter->emit(*this);
211
212 {
213 RefPtr<RegisterID> temp = newTemporary();
214 RefPtr<RegisterID> tolLevelScope;
215 for (auto functionPair : m_functionsToInitialize) {
216 FunctionMetadataNode* metadata = functionPair.first;
217 FunctionVariableType functionType = functionPair.second;
218 emitNewFunction(temp.get(), metadata);
219 if (functionType == NormalFunctionVariable)
220 initializeVariable(variable(metadata->ident()), temp.get());
221 else if (functionType == TopLevelFunctionVariable) {
222 if (!tolLevelScope) {
223 // We know this will resolve to the top level scope or global object because our parser/global initialization code
224 // doesn't allow let/const/class variables to have the same names as functions.
225 // This is a top level function, and it's an error to ever create a top level function
226 // name that would resolve to a lexical variable. E.g:
227 // ```
228 // function f() {
229 // {
230 // let x;
231 // {
232 // //// error thrown here
233 // eval("function x(){}");
234 // }
235 // }
236 // }
237 // ```
238 // Therefore, we're guaranteed to have this resolve to a top level variable.
239 RefPtr<RegisterID> tolLevelObjectScope = emitResolveScope(nullptr, Variable(metadata->ident()));
240 tolLevelScope = newBlockScopeVariable();
241 move(tolLevelScope.get(), tolLevelObjectScope.get());
242 }
243 emitPutToScope(tolLevelScope.get(), Variable(metadata->ident()), temp.get(), ThrowIfNotFound, InitializationMode::NotInitialization);
244 } else
245 RELEASE_ASSERT_NOT_REACHED();
246 }
247 }
248
249 bool callingClassConstructor = constructorKind() != ConstructorKind::None && !isConstructor();
250 if (!callingClassConstructor)
251 m_scopeNode->emitBytecode(*this);
252 else {
253 // At this point we would have emitted an unconditional throw followed by some nonsense that's
254 // just an artifact of how this generator is structured. That code never runs, but it confuses
255 // bytecode analyses because it constitutes an unterminated basic block. So, we terminate the
256 // basic block the strongest way possible.
257 emitUnreachable();
258 }
259
260 for (auto& handler : m_exceptionHandlersToEmit) {
261 Ref<Label> realCatchTarget = newLabel();
262 TryData* tryData = handler.tryData;
263
264 OpCatch::emit(this, handler.exceptionRegister, handler.thrownValueRegister);
265 realCatchTarget->setLocation(*this, m_lastInstruction.offset());
266 if (handler.completionTypeRegister.isValid()) {
267 RegisterID completionTypeRegister { handler.completionTypeRegister };
268 CompletionType completionType =
269 tryData->handlerType == HandlerType::Finally || tryData->handlerType == HandlerType::SynthesizedFinally
270 ? CompletionType::Throw
271 : CompletionType::Normal;
272 emitLoad(&completionTypeRegister, completionType);
273 }
274 m_codeBlock->addJumpTarget(m_lastInstruction.offset());
275
276
277 emitJump(tryData->target.get());
278 tryData->target = WTFMove(realCatchTarget);
279 }
280
281 m_staticPropertyAnalyzer.kill();
282
283 for (auto& range : m_tryRanges) {
284 int start = range.start->bind();
285 int end = range.end->bind();
286
287 // This will happen for empty try blocks and for some cases of finally blocks:
288 //
289 // try {
290 // try {
291 // } finally {
292 // return 42;
293 // // *HERE*
294 // }
295 // } finally {
296 // print("things");
297 // }
298 //
299 // The return will pop scopes to execute the outer finally block. But this includes
300 // popping the try context for the inner try. The try context is live in the fall-through
301 // part of the finally block not because we will emit a handler that overlaps the finally,
302 // but because we haven't yet had a chance to plant the catch target. Then when we finish
303 // emitting code for the outer finally block, we repush the try contex, this time with a
304 // new start index. But that means that the start index for the try range corresponding
305 // to the inner-finally-following-the-return (marked as "*HERE*" above) will be greater
306 // than the end index of the try block. This is harmless since end < start handlers will
307 // never get matched in our logic, but we do the runtime a favor and choose to not emit
308 // such handlers at all.
309 if (end <= start)
310 continue;
311
312 UnlinkedHandlerInfo info(static_cast<uint32_t>(start), static_cast<uint32_t>(end),
313 static_cast<uint32_t>(range.tryData->target->bind()), range.tryData->handlerType);
314 m_codeBlock->addExceptionHandler(info);
315 }
316
317
318 if (isGeneratorOrAsyncFunctionBodyParseMode(m_codeBlock->parseMode()))
319 performGeneratorification(*this, m_codeBlock.get(), m_writer, m_generatorFrameSymbolTable.get(), m_generatorFrameSymbolTableIndex);
320
321 RELEASE_ASSERT(static_cast<unsigned>(m_codeBlock->numCalleeLocals()) < static_cast<unsigned>(FirstConstantRegisterIndex));
322 m_codeBlock->setInstructions(m_writer.finalize());
323
324 m_codeBlock->shrinkToFit();
325
326 if (m_expressionTooDeep)
327 return ParserError(ParserError::OutOfMemory);
328 return ParserError(ParserError::ErrorNone);
329}
330
331BytecodeGenerator::BytecodeGenerator(VM& vm, ProgramNode* programNode, UnlinkedProgramCodeBlock* codeBlock, OptionSet<CodeGenerationMode> codeGenerationMode, const VariableEnvironment* parentScopeTDZVariables)
332 : m_codeGenerationMode(codeGenerationMode)
333 , m_scopeNode(programNode)
334 , m_codeBlock(vm, codeBlock)
335 , m_thisRegister(CallFrame::thisArgumentOffset())
336 , m_codeType(GlobalCode)
337 , m_vm(&vm)
338 , m_needsToUpdateArrowFunctionContext(programNode->usesArrowFunction() || programNode->usesEval())
339{
340 ASSERT_UNUSED(parentScopeTDZVariables, !parentScopeTDZVariables->size());
341
342 for (auto& constantRegister : m_linkTimeConstantRegisters)
343 constantRegister = nullptr;
344
345 allocateCalleeSaveSpace();
346
347 m_codeBlock->setNumParameters(1); // Allocate space for "this"
348
349 emitEnter();
350
351 allocateAndEmitScope();
352
353 emitCheckTraps();
354
355 const FunctionStack& functionStack = programNode->functionStack();
356
357 for (auto* function : functionStack)
358 m_functionsToInitialize.append(std::make_pair(function, TopLevelFunctionVariable));
359
360 if (Options::validateBytecode()) {
361 for (auto& entry : programNode->varDeclarations())
362 RELEASE_ASSERT(entry.value.isVar());
363 }
364 codeBlock->setVariableDeclarations(programNode->varDeclarations());
365 codeBlock->setLexicalDeclarations(programNode->lexicalVariables());
366 // Even though this program may have lexical variables that go under TDZ, when linking the get_from_scope/put_to_scope
367 // operations we emit we will have ResolveTypes that implictly do TDZ checks. Therefore, we don't need
368 // additional TDZ checks on top of those. This is why we can omit pushing programNode->lexicalVariables()
369 // to the TDZ stack.
370
371 if (needsToUpdateArrowFunctionContext()) {
372 initializeArrowFunctionContextScopeIfNeeded();
373 emitPutThisToArrowFunctionContextScope();
374 }
375}
376
377BytecodeGenerator::BytecodeGenerator(VM& vm, FunctionNode* functionNode, UnlinkedFunctionCodeBlock* codeBlock, OptionSet<CodeGenerationMode> codeGenerationMode, const VariableEnvironment* parentScopeTDZVariables)
378 : m_codeGenerationMode(codeGenerationMode)
379 , m_scopeNode(functionNode)
380 , m_codeBlock(vm, codeBlock)
381 , m_codeType(FunctionCode)
382 , m_vm(&vm)
383 , m_isBuiltinFunction(codeBlock->isBuiltinFunction())
384 , m_usesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode())
385 // FIXME: We should be able to have tail call elimination with the profiler
386 // enabled. This is currently not possible because the profiler expects
387 // op_will_call / op_did_call pairs before and after a call, which are not
388 // compatible with tail calls (we have no way of emitting op_did_call).
389 // https://bugs.webkit.org/show_bug.cgi?id=148819
390 , m_inTailPosition(Options::useTailCalls() && !isConstructor() && constructorKind() == ConstructorKind::None && isStrictMode())
391 , m_needsToUpdateArrowFunctionContext(functionNode->usesArrowFunction() || functionNode->usesEval())
392 , m_derivedContextType(codeBlock->derivedContextType())
393{
394 for (auto& constantRegister : m_linkTimeConstantRegisters)
395 constantRegister = nullptr;
396
397 allocateCalleeSaveSpace();
398
399 SymbolTable* functionSymbolTable = SymbolTable::create(*m_vm);
400 functionSymbolTable->setUsesNonStrictEval(m_usesNonStrictEval);
401 int symbolTableConstantIndex = 0;
402
403 FunctionParameters& parameters = *functionNode->parameters();
404 // http://www.ecma-international.org/ecma-262/6.0/index.html#sec-functiondeclarationinstantiation
405 // This implements IsSimpleParameterList in the Ecma 2015 spec.
406 // If IsSimpleParameterList is false, we will create a strict-mode like arguments object.
407 // IsSimpleParameterList is false if the argument list contains any default parameter values,
408 // a rest parameter, or any destructuring patterns.
409 // If we do have default parameters, destructuring parameters, or a rest parameter, our parameters will be allocated in a different scope.
410 bool isSimpleParameterList = parameters.isSimpleParameterList();
411
412 SourceParseMode parseMode = codeBlock->parseMode();
413
414 bool containsArrowOrEvalButNotInArrowBlock = ((functionNode->usesArrowFunction() && functionNode->doAnyInnerArrowFunctionsUseAnyFeature()) || functionNode->usesEval()) && !m_codeBlock->isArrowFunction();
415 bool shouldCaptureSomeOfTheThings = shouldEmitDebugHooks() || functionNode->needsActivation() || containsArrowOrEvalButNotInArrowBlock;
416
417 bool shouldCaptureAllOfTheThings = shouldEmitDebugHooks() || codeBlock->usesEval();
418 bool needsArguments = ((functionNode->usesArguments() && !codeBlock->isArrowFunction()) || codeBlock->usesEval() || (functionNode->usesArrowFunction() && !codeBlock->isArrowFunction() && isArgumentsUsedInInnerArrowFunction()));
419
420 if (isGeneratorOrAsyncFunctionBodyParseMode(parseMode)) {
421 // Generator and AsyncFunction never provides "arguments". "arguments" reference will be resolved in an upper generator function scope.
422 needsArguments = false;
423 }
424
425 if (isGeneratorOrAsyncFunctionWrapperParseMode(parseMode) && needsArguments) {
426 // Generator does not provide "arguments". Instead, wrapping GeneratorFunction provides "arguments".
427 // This is because arguments of a generator should be evaluated before starting it.
428 // To workaround it, we evaluate these arguments as arguments of a wrapping generator function, and reference it from a generator.
429 //
430 // function *gen(a, b = hello())
431 // {
432 // return {
433 // @generatorNext: function (@generator, @generatorState, @generatorValue, @generatorResumeMode, @generatorFrame)
434 // {
435 // arguments; // This `arguments` should reference to the gen's arguments.
436 // ...
437 // }
438 // }
439 // }
440 shouldCaptureSomeOfTheThings = true;
441 }
442
443 if (shouldCaptureAllOfTheThings)
444 functionNode->varDeclarations().markAllVariablesAsCaptured();
445
446 auto captures = scopedLambda<bool (UniquedStringImpl*)>([&] (UniquedStringImpl* uid) -> bool {
447 if (!shouldCaptureSomeOfTheThings)
448 return false;
449 if (needsArguments && uid == propertyNames().arguments.impl()) {
450 // Actually, we only need to capture the arguments object when we "need full activation"
451 // because of name scopes. But historically we did it this way, so for now we just preserve
452 // the old behavior.
453 // FIXME: https://bugs.webkit.org/show_bug.cgi?id=143072
454 return true;
455 }
456 return functionNode->captures(uid);
457 });
458 auto varKind = [&] (UniquedStringImpl* uid) -> VarKind {
459 return captures(uid) ? VarKind::Scope : VarKind::Stack;
460 };
461
462 m_calleeRegister.setIndex(CallFrameSlot::callee);
463
464 initializeParameters(parameters);
465 ASSERT(!(isSimpleParameterList && m_restParameter));
466
467 emitEnter();
468
469 if (isGeneratorOrAsyncFunctionBodyParseMode(parseMode))
470 m_generatorRegister = &m_parameters[1];
471
472 allocateAndEmitScope();
473
474 emitCheckTraps();
475
476 if (functionNameIsInScope(functionNode->ident(), functionNode->functionMode())) {
477 ASSERT(parseMode != SourceParseMode::GeneratorBodyMode);
478 ASSERT(!isAsyncFunctionBodyParseMode(parseMode));
479 bool isDynamicScope = functionNameScopeIsDynamic(codeBlock->usesEval(), codeBlock->isStrictMode());
480 bool isFunctionNameCaptured = captures(functionNode->ident().impl());
481 bool markAsCaptured = isDynamicScope || isFunctionNameCaptured;
482 emitPushFunctionNameScope(functionNode->ident(), &m_calleeRegister, markAsCaptured);
483 }
484
485 if (shouldCaptureSomeOfTheThings)
486 m_lexicalEnvironmentRegister = addVar();
487
488 if (isGeneratorOrAsyncFunctionBodyParseMode(parseMode) || shouldCaptureSomeOfTheThings || shouldEmitTypeProfilerHooks())
489 symbolTableConstantIndex = addConstantValue(functionSymbolTable)->index();
490
491 // We can allocate the "var" environment if we don't have default parameter expressions. If we have
492 // default parameter expressions, we have to hold off on allocating the "var" environment because
493 // the parent scope of the "var" environment is the parameter environment.
494 if (isSimpleParameterList)
495 initializeVarLexicalEnvironment(symbolTableConstantIndex, functionSymbolTable, shouldCaptureSomeOfTheThings);
496
497 // Figure out some interesting facts about our arguments.
498 bool capturesAnyArgumentByName = false;
499 if (functionNode->hasCapturedVariables()) {
500 FunctionParameters& parameters = *functionNode->parameters();
501 for (size_t i = 0; i < parameters.size(); ++i) {
502 auto pattern = parameters.at(i).first;
503 if (!pattern->isBindingNode())
504 continue;
505 const Identifier& ident = static_cast<const BindingNode*>(pattern)->boundProperty();
506 capturesAnyArgumentByName |= captures(ident.impl());
507 }
508 }
509
510 if (capturesAnyArgumentByName)
511 ASSERT(m_lexicalEnvironmentRegister);
512
513 // Need to know what our functions are called. Parameters have some goofy behaviors when it
514 // comes to functions of the same name.
515 for (FunctionMetadataNode* function : functionNode->functionStack())
516 m_functions.add(function->ident().impl());
517
518 if (needsArguments) {
519 // Create the arguments object now. We may put the arguments object into the activation if
520 // it is captured. Either way, we create two arguments object variables: one is our
521 // private variable that is immutable, and another that is the user-visible variable. The
522 // immutable one is only used here, or during formal parameter resolutions if we opt for
523 // DirectArguments.
524
525 m_argumentsRegister = addVar();
526 m_argumentsRegister->ref();
527 }
528
529 if (needsArguments && !codeBlock->isStrictMode() && isSimpleParameterList) {
530 // If we captured any formal parameter by name, then we use ScopedArguments. Otherwise we
531 // use DirectArguments. With ScopedArguments, we lift all of our arguments into the
532 // activation.
533
534 if (capturesAnyArgumentByName) {
535 functionSymbolTable->setArgumentsLength(vm, parameters.size());
536
537 // For each parameter, we have two possibilities:
538 // Either it's a binding node with no function overlap, in which case it gets a name
539 // in the symbol table - or it just gets space reserved in the symbol table. Either
540 // way we lift the value into the scope.
541 for (unsigned i = 0; i < parameters.size(); ++i) {
542 ScopeOffset offset = functionSymbolTable->takeNextScopeOffset(NoLockingNecessary);
543 functionSymbolTable->setArgumentOffset(vm, i, offset);
544 if (UniquedStringImpl* name = visibleNameForParameter(parameters.at(i).first)) {
545 VarOffset varOffset(offset);
546 SymbolTableEntry entry(varOffset);
547 // Stores to these variables via the ScopedArguments object will not do
548 // notifyWrite(), since that would be cumbersome. Also, watching formal
549 // parameters when "arguments" is in play is unlikely to be super profitable.
550 // So, we just disable it.
551 entry.disableWatching(*m_vm);
552 functionSymbolTable->set(NoLockingNecessary, name, entry);
553 }
554 OpPutToScope::emit(this, m_lexicalEnvironmentRegister, UINT_MAX, virtualRegisterForArgument(1 + i), GetPutInfo(ThrowIfNotFound, LocalClosureVar, InitializationMode::NotInitialization), SymbolTableOrScopeDepth::symbolTable(VirtualRegister { symbolTableConstantIndex }), offset.offset());
555 }
556
557 // This creates a scoped arguments object and copies the overflow arguments into the
558 // scope. It's the equivalent of calling ScopedArguments::createByCopying().
559 OpCreateScopedArguments::emit(this, m_argumentsRegister, m_lexicalEnvironmentRegister);
560 } else {
561 // We're going to put all parameters into the DirectArguments object. First ensure
562 // that the symbol table knows that this is happening.
563 for (unsigned i = 0; i < parameters.size(); ++i) {
564 if (UniquedStringImpl* name = visibleNameForParameter(parameters.at(i).first))
565 functionSymbolTable->set(NoLockingNecessary, name, SymbolTableEntry(VarOffset(DirectArgumentsOffset(i))));
566 }
567
568 OpCreateDirectArguments::emit(this, m_argumentsRegister);
569 }
570 } else if (isSimpleParameterList) {
571 // Create the formal parameters the normal way. Any of them could be captured, or not. If
572 // captured, lift them into the scope. We cannot do this if we have default parameter expressions
573 // because when default parameter expressions exist, they belong in their own lexical environment
574 // separate from the "var" lexical environment.
575 for (unsigned i = 0; i < parameters.size(); ++i) {
576 UniquedStringImpl* name = visibleNameForParameter(parameters.at(i).first);
577 if (!name)
578 continue;
579
580 if (!captures(name)) {
581 // This is the easy case - just tell the symbol table about the argument. It will
582 // be accessed directly.
583 functionSymbolTable->set(NoLockingNecessary, name, SymbolTableEntry(VarOffset(virtualRegisterForArgument(1 + i))));
584 continue;
585 }
586
587 ScopeOffset offset = functionSymbolTable->takeNextScopeOffset(NoLockingNecessary);
588 const Identifier& ident =
589 static_cast<const BindingNode*>(parameters.at(i).first)->boundProperty();
590 functionSymbolTable->set(NoLockingNecessary, name, SymbolTableEntry(VarOffset(offset)));
591
592 OpPutToScope::emit(this, m_lexicalEnvironmentRegister, addConstant(ident), virtualRegisterForArgument(1 + i), GetPutInfo(ThrowIfNotFound, LocalClosureVar, InitializationMode::NotInitialization), SymbolTableOrScopeDepth::symbolTable(VirtualRegister { symbolTableConstantIndex }), offset.offset());
593 }
594 }
595
596 if (needsArguments && (codeBlock->isStrictMode() || !isSimpleParameterList)) {
597 // Allocate a cloned arguments object.
598 OpCreateClonedArguments::emit(this, m_argumentsRegister);
599 }
600
601 // There are some variables that need to be preinitialized to something other than Undefined:
602 //
603 // - "arguments": unless it's used as a function or parameter, this should refer to the
604 // arguments object.
605 //
606 // - functions: these always override everything else.
607 //
608 // The most logical way to do all of this is to initialize none of the variables until now,
609 // and then initialize them in BytecodeGenerator::generate() in such an order that the rules
610 // for how these things override each other end up holding. We would initialize "arguments" first,
611 // then all arguments, then the functions.
612 //
613 // But some arguments are already initialized by default, since if they aren't captured and we
614 // don't have "arguments" then we just point the symbol table at the stack slot of those
615 // arguments. We end up initializing the rest of the arguments that have an uncomplicated
616 // binding (i.e. don't involve destructuring) above when figuring out how to lay them out,
617 // because that's just the simplest thing. This means that when we initialize them, we have to
618 // watch out for the things that override arguments (namely, functions).
619
620 // This is our final act of weirdness. "arguments" is overridden by everything except the
621 // callee. We add it to the symbol table if it's not already there and it's not an argument.
622 bool shouldCreateArgumentsVariableInParameterScope = false;
623 if (needsArguments) {
624 // If "arguments" is overridden by a function or destructuring parameter name, then it's
625 // OK for us to call createVariable() because it won't change anything. It's also OK for
626 // us to them tell BytecodeGenerator::generate() to write to it because it will do so
627 // before it initializes functions and destructuring parameters. But if "arguments" is
628 // overridden by a "simple" function parameter, then we have to bail: createVariable()
629 // would assert and BytecodeGenerator::generate() would write the "arguments" after the
630 // argument value had already been properly initialized.
631
632 bool haveParameterNamedArguments = false;
633 for (unsigned i = 0; i < parameters.size(); ++i) {
634 UniquedStringImpl* name = visibleNameForParameter(parameters.at(i).first);
635 if (name == propertyNames().arguments.impl()) {
636 haveParameterNamedArguments = true;
637 break;
638 }
639 }
640
641 bool shouldCreateArgumensVariable = !haveParameterNamedArguments
642 && !SourceParseModeSet(SourceParseMode::ArrowFunctionMode, SourceParseMode::AsyncArrowFunctionMode).contains(m_codeBlock->parseMode());
643 shouldCreateArgumentsVariableInParameterScope = shouldCreateArgumensVariable && !isSimpleParameterList;
644 // Do not create arguments variable in case of Arrow function. Value will be loaded from parent scope
645 if (shouldCreateArgumensVariable && !shouldCreateArgumentsVariableInParameterScope) {
646 createVariable(
647 propertyNames().arguments, varKind(propertyNames().arguments.impl()), functionSymbolTable);
648
649 m_needToInitializeArguments = true;
650 }
651 }
652
653 for (FunctionMetadataNode* function : functionNode->functionStack()) {
654 const Identifier& ident = function->ident();
655 createVariable(ident, varKind(ident.impl()), functionSymbolTable);
656 m_functionsToInitialize.append(std::make_pair(function, NormalFunctionVariable));
657 }
658 for (auto& entry : functionNode->varDeclarations()) {
659 ASSERT(!entry.value.isLet() && !entry.value.isConst());
660 if (!entry.value.isVar()) // This is either a parameter or callee.
661 continue;
662 if (shouldCreateArgumentsVariableInParameterScope && entry.key.get() == propertyNames().arguments.impl())
663 continue;
664 createVariable(Identifier::fromUid(m_vm, entry.key.get()), varKind(entry.key.get()), functionSymbolTable, IgnoreExisting);
665 }
666
667
668 m_newTargetRegister = addVar();
669 switch (parseMode) {
670 case SourceParseMode::GeneratorWrapperFunctionMode:
671 case SourceParseMode::GeneratorWrapperMethodMode:
672 case SourceParseMode::AsyncGeneratorWrapperMethodMode:
673 case SourceParseMode::AsyncGeneratorWrapperFunctionMode: {
674 m_generatorRegister = addVar();
675
676 // FIXME: Emit to_this only when Generator uses it.
677 // https://bugs.webkit.org/show_bug.cgi?id=151586
678 emitToThis();
679
680 move(m_generatorRegister, &m_calleeRegister);
681 emitCreateThis(m_generatorRegister);
682 break;
683 }
684
685 case SourceParseMode::AsyncArrowFunctionMode:
686 case SourceParseMode::AsyncMethodMode:
687 case SourceParseMode::AsyncFunctionMode: {
688 ASSERT(!isConstructor());
689 ASSERT(constructorKind() == ConstructorKind::None);
690 m_generatorRegister = addVar();
691 m_promiseCapabilityRegister = addVar();
692
693 if (parseMode != SourceParseMode::AsyncArrowFunctionMode) {
694 // FIXME: Emit to_this only when AsyncFunctionBody uses it.
695 // https://bugs.webkit.org/show_bug.cgi?id=151586
696 emitToThis();
697 }
698
699 emitNewObject(m_generatorRegister);
700
701 // let promiseCapability be @newPromiseCapability(@Promise)
702 auto varNewPromiseCapability = variable(propertyNames().builtinNames().newPromiseCapabilityPrivateName());
703 RefPtr<RegisterID> scope = newTemporary();
704 move(scope.get(), emitResolveScope(scope.get(), varNewPromiseCapability));
705 RefPtr<RegisterID> newPromiseCapability = emitGetFromScope(newTemporary(), scope.get(), varNewPromiseCapability, ThrowIfNotFound);
706
707 CallArguments args(*this, nullptr, 1);
708 emitLoad(args.thisRegister(), jsUndefined());
709
710 auto& builtinNames = propertyNames().builtinNames();
711 auto varPromiseConstructor = variable(m_isBuiltinFunction ? builtinNames.InternalPromisePrivateName() : builtinNames.PromisePrivateName());
712 move(scope.get(), emitResolveScope(scope.get(), varPromiseConstructor));
713 emitGetFromScope(args.argumentRegister(0), scope.get(), varPromiseConstructor, ThrowIfNotFound);
714
715 // JSTextPosition(int _line, int _offset, int _lineStartOffset)
716 JSTextPosition divot(m_scopeNode->firstLine(), m_scopeNode->startOffset(), m_scopeNode->lineStartOffset());
717 emitCall(promiseCapabilityRegister(), newPromiseCapability.get(), NoExpectedFunction, args, divot, divot, divot, DebuggableCall::No);
718 break;
719 }
720
721 case SourceParseMode::AsyncGeneratorBodyMode:
722 case SourceParseMode::AsyncFunctionBodyMode:
723 case SourceParseMode::AsyncArrowFunctionBodyMode:
724 case SourceParseMode::GeneratorBodyMode: {
725 // |this| is already filled correctly before here.
726 emitLoad(m_newTargetRegister, jsUndefined());
727 break;
728 }
729
730 default: {
731 if (SourceParseMode::ArrowFunctionMode != parseMode) {
732 if (isConstructor()) {
733 move(m_newTargetRegister, &m_thisRegister);
734 if (constructorKind() == ConstructorKind::Extends) {
735 moveEmptyValue(&m_thisRegister);
736 } else
737 emitCreateThis(&m_thisRegister);
738 } else if (constructorKind() != ConstructorKind::None)
739 emitThrowTypeError("Cannot call a class constructor without |new|");
740 else {
741 bool shouldEmitToThis = false;
742 if (functionNode->usesThis() || codeBlock->usesEval() || m_scopeNode->doAnyInnerArrowFunctionsUseThis() || m_scopeNode->doAnyInnerArrowFunctionsUseEval())
743 shouldEmitToThis = true;
744 else if ((functionNode->usesSuperProperty() || m_scopeNode->doAnyInnerArrowFunctionsUseSuperProperty()) && !codeBlock->isStrictMode()) {
745 // We must emit to_this when we're not in strict mode because we
746 // will convert |this| to an object, and that object may be passed
747 // to a strict function as |this|. This is observable because that
748 // strict function's to_this will just return the object.
749 //
750 // We don't need to emit this for strict-mode code because
751 // strict-mode code may call another strict function, which will
752 // to_this if it directly uses this; this is OK, because we defer
753 // to_this until |this| is used directly. Strict-mode code might
754 // also call a sloppy mode function, and that will to_this, which
755 // will defer the conversion, again, until necessary.
756 shouldEmitToThis = true;
757 }
758
759 if (shouldEmitToThis)
760 emitToThis();
761 }
762 }
763 break;
764 }
765 }
766
767 // We need load |super| & |this| for arrow function before initializeDefaultParameterValuesAndSetupFunctionScopeStack
768 // if we have default parameter expression. Because |super| & |this| values can be used there
769 if ((SourceParseModeSet(SourceParseMode::ArrowFunctionMode, SourceParseMode::AsyncArrowFunctionMode).contains(parseMode) && !isSimpleParameterList) || parseMode == SourceParseMode::AsyncArrowFunctionBodyMode) {
770 if (functionNode->usesThis() || functionNode->usesSuperProperty())
771 emitLoadThisFromArrowFunctionLexicalEnvironment();
772
773 if (m_scopeNode->usesNewTarget() || m_scopeNode->usesSuperCall())
774 emitLoadNewTargetFromArrowFunctionLexicalEnvironment();
775 }
776
777 if (needsToUpdateArrowFunctionContext() && !codeBlock->isArrowFunction()) {
778 bool canReuseLexicalEnvironment = isSimpleParameterList;
779 initializeArrowFunctionContextScopeIfNeeded(functionSymbolTable, canReuseLexicalEnvironment);
780 emitPutThisToArrowFunctionContextScope();
781 emitPutNewTargetToArrowFunctionContextScope();
782 emitPutDerivedConstructorToArrowFunctionContextScope();
783 }
784
785 // All "addVar()"s needs to happen before "initializeDefaultParameterValuesAndSetupFunctionScopeStack()" is called
786 // because a function's default parameter ExpressionNodes will use temporary registers.
787 pushTDZVariables(*parentScopeTDZVariables, TDZCheckOptimization::DoNotOptimize, TDZRequirement::UnderTDZ);
788
789 Ref<Label> catchLabel = newLabel();
790 TryData* tryFormalParametersData = nullptr;
791 bool needTryCatch = isAsyncFunctionWrapperParseMode(parseMode) && !isSimpleParameterList;
792 if (needTryCatch) {
793 Ref<Label> tryFormalParametersStart = newEmittedLabel();
794 tryFormalParametersData = pushTry(tryFormalParametersStart.get(), catchLabel.get(), HandlerType::SynthesizedCatch);
795 }
796
797 initializeDefaultParameterValuesAndSetupFunctionScopeStack(parameters, isSimpleParameterList, functionNode, functionSymbolTable, symbolTableConstantIndex, captures, shouldCreateArgumentsVariableInParameterScope);
798
799 if (needTryCatch) {
800 Ref<Label> didNotThrow = newLabel();
801 emitJump(didNotThrow.get());
802 emitLabel(catchLabel.get());
803 popTry(tryFormalParametersData, catchLabel.get());
804
805 RefPtr<RegisterID> thrownValue = newTemporary();
806 emitOutOfLineCatchHandler(thrownValue.get(), nullptr, tryFormalParametersData);
807
808 // return promiseCapability.@reject(thrownValue)
809 RefPtr<RegisterID> reject = emitGetById(newTemporary(), promiseCapabilityRegister(), m_vm->propertyNames->builtinNames().rejectPrivateName());
810
811 CallArguments args(*this, nullptr, 1);
812 emitLoad(args.thisRegister(), jsUndefined());
813 move(args.argumentRegister(0), thrownValue.get());
814
815 JSTextPosition divot(functionNode->firstLine(), functionNode->startOffset(), functionNode->lineStartOffset());
816
817 RefPtr<RegisterID> result = emitCall(newTemporary(), reject.get(), NoExpectedFunction, args, divot, divot, divot, DebuggableCall::No);
818 emitReturn(emitGetById(newTemporary(), promiseCapabilityRegister(), m_vm->propertyNames->builtinNames().promisePrivateName()));
819
820 emitLabel(didNotThrow.get());
821 }
822
823 // If we don't have default parameter expression, then loading |this| inside an arrow function must be done
824 // after initializeDefaultParameterValuesAndSetupFunctionScopeStack() because that function sets up the
825 // SymbolTable stack and emitLoadThisFromArrowFunctionLexicalEnvironment() consults the SymbolTable stack
826 if (SourceParseModeSet(SourceParseMode::ArrowFunctionMode, SourceParseMode::AsyncArrowFunctionMode).contains(parseMode) && isSimpleParameterList) {
827 if (functionNode->usesThis() || functionNode->usesSuperProperty())
828 emitLoadThisFromArrowFunctionLexicalEnvironment();
829
830 if (m_scopeNode->usesNewTarget() || m_scopeNode->usesSuperCall())
831 emitLoadNewTargetFromArrowFunctionLexicalEnvironment();
832 }
833
834 // Set up the lexical environment scope as the generator frame. We store the saved and resumed generator registers into this scope with the symbol keys.
835 // Since they are symbol keyed, these variables cannot be reached from the usual code.
836 if (isGeneratorOrAsyncFunctionBodyParseMode(parseMode)) {
837 m_generatorFrameSymbolTable.set(*m_vm, functionSymbolTable);
838 m_generatorFrameSymbolTableIndex = symbolTableConstantIndex;
839 if (m_lexicalEnvironmentRegister)
840 move(generatorFrameRegister(), m_lexicalEnvironmentRegister);
841 else {
842 // It would be possible that generator does not need to suspend and resume any registers.
843 // In this case, we would like to avoid creating a lexical environment as much as possible.
844 // op_create_generator_frame_environment is a marker, which is similar to op_yield.
845 // Generatorification inserts lexical environment creation if necessary. Otherwise, we convert it to op_mov frame, `undefined`.
846 OpCreateGeneratorFrameEnvironment::emit(this, generatorFrameRegister(), scopeRegister(), VirtualRegister { symbolTableConstantIndex }, addConstantValue(jsUndefined()));
847 }
848 emitPutById(generatorRegister(), propertyNames().builtinNames().generatorFramePrivateName(), generatorFrameRegister());
849 }
850
851 bool shouldInitializeBlockScopedFunctions = false; // We generate top-level function declarations in ::generate().
852 pushLexicalScope(m_scopeNode, TDZCheckOptimization::Optimize, NestedScopeType::IsNotNested, nullptr, shouldInitializeBlockScopedFunctions);
853}
854
855BytecodeGenerator::BytecodeGenerator(VM& vm, EvalNode* evalNode, UnlinkedEvalCodeBlock* codeBlock, OptionSet<CodeGenerationMode> codeGenerationMode, const VariableEnvironment* parentScopeTDZVariables)
856 : m_codeGenerationMode(codeGenerationMode)
857 , m_scopeNode(evalNode)
858 , m_codeBlock(vm, codeBlock)
859 , m_thisRegister(CallFrame::thisArgumentOffset())
860 , m_codeType(EvalCode)
861 , m_vm(&vm)
862 , m_usesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode())
863 , m_needsToUpdateArrowFunctionContext(evalNode->usesArrowFunction() || evalNode->usesEval())
864 , m_derivedContextType(codeBlock->derivedContextType())
865{
866 for (auto& constantRegister : m_linkTimeConstantRegisters)
867 constantRegister = nullptr;
868
869 allocateCalleeSaveSpace();
870
871 m_codeBlock->setNumParameters(1);
872
873 pushTDZVariables(*parentScopeTDZVariables, TDZCheckOptimization::DoNotOptimize, TDZRequirement::UnderTDZ);
874
875 emitEnter();
876
877 allocateAndEmitScope();
878
879 emitCheckTraps();
880
881 for (FunctionMetadataNode* function : evalNode->functionStack()) {
882 m_codeBlock->addFunctionDecl(makeFunction(function));
883 m_functionsToInitialize.append(std::make_pair(function, TopLevelFunctionVariable));
884 }
885
886 const VariableEnvironment& varDeclarations = evalNode->varDeclarations();
887 Vector<Identifier, 0, UnsafeVectorOverflow> variables;
888 Vector<Identifier, 0, UnsafeVectorOverflow> hoistedFunctions;
889 for (auto& entry : varDeclarations) {
890 ASSERT(entry.value.isVar());
891 ASSERT(entry.key->isAtom() || entry.key->isSymbol());
892 if (entry.value.isSloppyModeHoistingCandidate())
893 hoistedFunctions.append(Identifier::fromUid(m_vm, entry.key.get()));
894 else
895 variables.append(Identifier::fromUid(m_vm, entry.key.get()));
896 }
897 codeBlock->adoptVariables(variables);
898 codeBlock->adoptFunctionHoistingCandidates(WTFMove(hoistedFunctions));
899
900 if (evalNode->usesSuperCall() || evalNode->usesNewTarget())
901 m_newTargetRegister = addVar();
902
903 if (codeBlock->isArrowFunctionContext() && (evalNode->usesThis() || evalNode->usesSuperProperty()))
904 emitLoadThisFromArrowFunctionLexicalEnvironment();
905
906 if (evalNode->usesSuperCall() || evalNode->usesNewTarget())
907 emitLoadNewTargetFromArrowFunctionLexicalEnvironment();
908
909 if (needsToUpdateArrowFunctionContext() && !codeBlock->isArrowFunctionContext() && !isDerivedConstructorContext()) {
910 initializeArrowFunctionContextScopeIfNeeded();
911 emitPutThisToArrowFunctionContextScope();
912 }
913
914 bool shouldInitializeBlockScopedFunctions = false; // We generate top-level function declarations in ::generate().
915 pushLexicalScope(m_scopeNode, TDZCheckOptimization::Optimize, NestedScopeType::IsNotNested, nullptr, shouldInitializeBlockScopedFunctions);
916}
917
918BytecodeGenerator::BytecodeGenerator(VM& vm, ModuleProgramNode* moduleProgramNode, UnlinkedModuleProgramCodeBlock* codeBlock, OptionSet<CodeGenerationMode> codeGenerationMode, const VariableEnvironment* parentScopeTDZVariables)
919 : m_codeGenerationMode(codeGenerationMode)
920 , m_scopeNode(moduleProgramNode)
921 , m_codeBlock(vm, codeBlock)
922 , m_thisRegister(CallFrame::thisArgumentOffset())
923 , m_codeType(ModuleCode)
924 , m_vm(&vm)
925 , m_usesNonStrictEval(false)
926 , m_needsToUpdateArrowFunctionContext(moduleProgramNode->usesArrowFunction() || moduleProgramNode->usesEval())
927{
928 ASSERT_UNUSED(parentScopeTDZVariables, !parentScopeTDZVariables->size());
929
930 for (auto& constantRegister : m_linkTimeConstantRegisters)
931 constantRegister = nullptr;
932
933 allocateCalleeSaveSpace();
934
935 SymbolTable* moduleEnvironmentSymbolTable = SymbolTable::create(*m_vm);
936 moduleEnvironmentSymbolTable->setUsesNonStrictEval(m_usesNonStrictEval);
937 moduleEnvironmentSymbolTable->setScopeType(SymbolTable::ScopeType::LexicalScope);
938
939 bool shouldCaptureAllOfTheThings = shouldEmitDebugHooks() || codeBlock->usesEval();
940 if (shouldCaptureAllOfTheThings)
941 moduleProgramNode->varDeclarations().markAllVariablesAsCaptured();
942
943 auto captures = [&] (UniquedStringImpl* uid) -> bool {
944 return moduleProgramNode->captures(uid);
945 };
946 auto lookUpVarKind = [&] (UniquedStringImpl* uid, const VariableEnvironmentEntry& entry) -> VarKind {
947 // Allocate the exported variables in the module environment.
948 if (entry.isExported())
949 return VarKind::Scope;
950
951 // Allocate the namespace variables in the module environment to instantiate
952 // it from the outside of the module code.
953 if (entry.isImportedNamespace())
954 return VarKind::Scope;
955
956 if (entry.isCaptured())
957 return VarKind::Scope;
958 return captures(uid) ? VarKind::Scope : VarKind::Stack;
959 };
960
961 emitEnter();
962
963 allocateAndEmitScope();
964
965 emitCheckTraps();
966
967 m_calleeRegister.setIndex(CallFrameSlot::callee);
968
969 m_codeBlock->setNumParameters(1); // Allocate space for "this"
970
971 // Now declare all variables.
972
973 createVariable(m_vm->propertyNames->builtinNames().metaPrivateName(), VarKind::Scope, moduleEnvironmentSymbolTable, VerifyExisting);
974
975 for (auto& entry : moduleProgramNode->varDeclarations()) {
976 ASSERT(!entry.value.isLet() && !entry.value.isConst());
977 if (!entry.value.isVar()) // This is either a parameter or callee.
978 continue;
979 // Imported bindings are not allocated in the module environment as usual variables' way.
980 // These references remain the "Dynamic" in the unlinked code block. Later, when linking
981 // the code block, we resolve the reference to the "ModuleVar".
982 if (entry.value.isImported() && !entry.value.isImportedNamespace())
983 continue;
984 createVariable(Identifier::fromUid(m_vm, entry.key.get()), lookUpVarKind(entry.key.get(), entry.value), moduleEnvironmentSymbolTable, IgnoreExisting);
985 }
986
987 VariableEnvironment& lexicalVariables = moduleProgramNode->lexicalVariables();
988 instantiateLexicalVariables(lexicalVariables, moduleEnvironmentSymbolTable, ScopeRegisterType::Block, lookUpVarKind);
989
990 // We keep the symbol table in the constant pool.
991 RegisterID* constantSymbolTable = nullptr;
992 if (shouldEmitTypeProfilerHooks())
993 constantSymbolTable = addConstantValue(moduleEnvironmentSymbolTable);
994 else
995 constantSymbolTable = addConstantValue(moduleEnvironmentSymbolTable->cloneScopePart(*m_vm));
996
997 pushTDZVariables(lexicalVariables, TDZCheckOptimization::Optimize, TDZRequirement::UnderTDZ);
998 bool isWithScope = false;
999 m_lexicalScopeStack.append({ moduleEnvironmentSymbolTable, m_topMostScope, isWithScope, constantSymbolTable->index() });
1000 emitPrefillStackTDZVariables(lexicalVariables, moduleEnvironmentSymbolTable);
1001
1002 // makeFunction assumes that there's correct TDZ stack entries.
1003 // So it should be called after putting our lexical environment to the TDZ stack correctly.
1004
1005 for (FunctionMetadataNode* function : moduleProgramNode->functionStack()) {
1006 const auto& iterator = moduleProgramNode->varDeclarations().find(function->ident().impl());
1007 RELEASE_ASSERT(iterator != moduleProgramNode->varDeclarations().end());
1008 RELEASE_ASSERT(!iterator->value.isImported());
1009
1010 VarKind varKind = lookUpVarKind(iterator->key.get(), iterator->value);
1011 if (varKind == VarKind::Scope) {
1012 // http://www.ecma-international.org/ecma-262/6.0/#sec-moduledeclarationinstantiation
1013 // Section 15.2.1.16.4, step 16-a-iv-1.
1014 // All heap allocated function declarations should be instantiated when the module environment
1015 // is created. They include the exported function declarations and not-exported-but-heap-allocated
1016 // function declarations. This is required because exported function should be instantiated before
1017 // executing the any module in the dependency graph. This enables the modules to link the imported
1018 // bindings before executing the any module code.
1019 //
1020 // And since function declarations are instantiated before executing the module body code, the spec
1021 // allows the functions inside the module to be executed before its module body is executed under
1022 // the circular dependencies. The following is the example.
1023 //
1024 // Module A (executed first):
1025 // import { b } from "B";
1026 // // Here, the module "B" is not executed yet, but the function declaration is already instantiated.
1027 // // So we can call the function exported from "B".
1028 // b();
1029 //
1030 // export function a() {
1031 // }
1032 //
1033 // Module B (executed second):
1034 // import { a } from "A";
1035 //
1036 // export function b() {
1037 // c();
1038 // }
1039 //
1040 // // c is not exported, but since it is referenced from the b, we should instantiate it before
1041 // // executing the "B" module code.
1042 // function c() {
1043 // a();
1044 // }
1045 //
1046 // Module EntryPoint (executed last):
1047 // import "B";
1048 // import "A";
1049 //
1050 m_codeBlock->addFunctionDecl(makeFunction(function));
1051 } else {
1052 // Stack allocated functions can be allocated when executing the module's body.
1053 m_functionsToInitialize.append(std::make_pair(function, NormalFunctionVariable));
1054 }
1055 }
1056
1057 // Remember the constant register offset to the top-most symbol table. This symbol table will be
1058 // cloned in the code block linking. After that, to create the module environment, we retrieve
1059 // the cloned symbol table from the linked code block by using this offset.
1060 codeBlock->setModuleEnvironmentSymbolTableConstantRegisterOffset(constantSymbolTable->index());
1061}
1062
1063BytecodeGenerator::~BytecodeGenerator()
1064{
1065}
1066
1067void BytecodeGenerator::initializeDefaultParameterValuesAndSetupFunctionScopeStack(
1068 FunctionParameters& parameters, bool isSimpleParameterList, FunctionNode* functionNode, SymbolTable* functionSymbolTable,
1069 int symbolTableConstantIndex, const ScopedLambda<bool (UniquedStringImpl*)>& captures, bool shouldCreateArgumentsVariableInParameterScope)
1070{
1071 Vector<std::pair<Identifier, RefPtr<RegisterID>>> valuesToMoveIntoVars;
1072 ASSERT(!(isSimpleParameterList && shouldCreateArgumentsVariableInParameterScope));
1073 if (!isSimpleParameterList) {
1074 // Refer to the ES6 spec section 9.2.12: http://www.ecma-international.org/ecma-262/6.0/index.html#sec-functiondeclarationinstantiation
1075 // This implements step 21.
1076 VariableEnvironment environment;
1077 Vector<Identifier> allParameterNames;
1078 for (unsigned i = 0; i < parameters.size(); i++)
1079 parameters.at(i).first->collectBoundIdentifiers(allParameterNames);
1080 if (shouldCreateArgumentsVariableInParameterScope)
1081 allParameterNames.append(propertyNames().arguments);
1082 IdentifierSet parameterSet;
1083 for (auto& ident : allParameterNames) {
1084 parameterSet.add(ident.impl());
1085 auto addResult = environment.add(ident);
1086 addResult.iterator->value.setIsLet(); // When we have default parameter expressions, parameters act like "let" variables.
1087 if (captures(ident.impl()))
1088 addResult.iterator->value.setIsCaptured();
1089 }
1090 // This implements step 25 of section 9.2.12.
1091 pushLexicalScopeInternal(environment, TDZCheckOptimization::Optimize, NestedScopeType::IsNotNested, nullptr, TDZRequirement::UnderTDZ, ScopeType::LetConstScope, ScopeRegisterType::Block);
1092
1093 if (shouldCreateArgumentsVariableInParameterScope) {
1094 Variable argumentsVariable = variable(propertyNames().arguments);
1095 initializeVariable(argumentsVariable, m_argumentsRegister);
1096 liftTDZCheckIfPossible(argumentsVariable);
1097 }
1098
1099 RefPtr<RegisterID> temp = newTemporary();
1100 for (unsigned i = 0; i < parameters.size(); i++) {
1101 std::pair<DestructuringPatternNode*, ExpressionNode*> parameter = parameters.at(i);
1102 if (parameter.first->isRestParameter())
1103 continue;
1104 if ((i + 1) < m_parameters.size())
1105 move(temp.get(), &m_parameters[i + 1]);
1106 else
1107 emitGetArgument(temp.get(), i);
1108 if (parameter.second) {
1109 RefPtr<RegisterID> condition = emitIsUndefined(newTemporary(), temp.get());
1110 Ref<Label> skipDefaultParameterBecauseNotUndefined = newLabel();
1111 emitJumpIfFalse(condition.get(), skipDefaultParameterBecauseNotUndefined.get());
1112 emitNode(temp.get(), parameter.second);
1113 emitLabel(skipDefaultParameterBecauseNotUndefined.get());
1114 }
1115
1116 parameter.first->bindValue(*this, temp.get());
1117 }
1118
1119 // Final act of weirdness for default parameters. If a "var" also
1120 // has the same name as a parameter, it should start out as the
1121 // value of that parameter. Note, though, that they will be distinct
1122 // bindings.
1123 // This is step 28 of section 9.2.12.
1124 for (auto& entry : functionNode->varDeclarations()) {
1125 if (!entry.value.isVar()) // This is either a parameter or callee.
1126 continue;
1127
1128 if (parameterSet.contains(entry.key)) {
1129 Identifier ident = Identifier::fromUid(m_vm, entry.key.get());
1130 Variable var = variable(ident);
1131 RegisterID* scope = emitResolveScope(nullptr, var);
1132 RefPtr<RegisterID> value = emitGetFromScope(newTemporary(), scope, var, DoNotThrowIfNotFound);
1133 valuesToMoveIntoVars.append(std::make_pair(ident, value));
1134 }
1135 }
1136
1137 // Functions with default parameter expressions must have a separate environment
1138 // record for parameters and "var"s. The "var" environment record must have the
1139 // parameter environment record as its parent.
1140 // See step 28 of section 9.2.12.
1141 bool hasCapturedVariables = !!m_lexicalEnvironmentRegister;
1142 initializeVarLexicalEnvironment(symbolTableConstantIndex, functionSymbolTable, hasCapturedVariables);
1143 }
1144
1145 // This completes step 28 of section 9.2.12.
1146 for (unsigned i = 0; i < valuesToMoveIntoVars.size(); i++) {
1147 ASSERT(!isSimpleParameterList);
1148 Variable var = variable(valuesToMoveIntoVars[i].first);
1149 RegisterID* scope = emitResolveScope(nullptr, var);
1150 emitPutToScope(scope, var, valuesToMoveIntoVars[i].second.get(), DoNotThrowIfNotFound, InitializationMode::NotInitialization);
1151 }
1152}
1153
1154bool BytecodeGenerator::needsDerivedConstructorInArrowFunctionLexicalEnvironment()
1155{
1156 ASSERT(m_codeBlock->isClassContext() || !(isConstructor() && constructorKind() == ConstructorKind::Extends));
1157 return m_codeBlock->isClassContext() && isSuperUsedInInnerArrowFunction();
1158}
1159
1160void BytecodeGenerator::initializeArrowFunctionContextScopeIfNeeded(SymbolTable* functionSymbolTable, bool canReuseLexicalEnvironment)
1161{
1162 ASSERT(!m_arrowFunctionContextLexicalEnvironmentRegister);
1163
1164 if (canReuseLexicalEnvironment && m_lexicalEnvironmentRegister) {
1165 RELEASE_ASSERT(!m_codeBlock->isArrowFunction());
1166 RELEASE_ASSERT(functionSymbolTable);
1167
1168 m_arrowFunctionContextLexicalEnvironmentRegister = m_lexicalEnvironmentRegister;
1169
1170 ScopeOffset offset;
1171
1172 if (isThisUsedInInnerArrowFunction()) {
1173 offset = functionSymbolTable->takeNextScopeOffset(NoLockingNecessary);
1174 functionSymbolTable->set(NoLockingNecessary, propertyNames().thisIdentifier.impl(), SymbolTableEntry(VarOffset(offset)));
1175 }
1176
1177 if (m_codeType == FunctionCode && isNewTargetUsedInInnerArrowFunction()) {
1178 offset = functionSymbolTable->takeNextScopeOffset();
1179 functionSymbolTable->set(NoLockingNecessary, propertyNames().builtinNames().newTargetLocalPrivateName().impl(), SymbolTableEntry(VarOffset(offset)));
1180 }
1181
1182 if (needsDerivedConstructorInArrowFunctionLexicalEnvironment()) {
1183 offset = functionSymbolTable->takeNextScopeOffset(NoLockingNecessary);
1184 functionSymbolTable->set(NoLockingNecessary, propertyNames().builtinNames().derivedConstructorPrivateName().impl(), SymbolTableEntry(VarOffset(offset)));
1185 }
1186
1187 return;
1188 }
1189
1190 VariableEnvironment environment;
1191
1192 if (isThisUsedInInnerArrowFunction()) {
1193 auto addResult = environment.add(propertyNames().thisIdentifier);
1194 addResult.iterator->value.setIsCaptured();
1195 addResult.iterator->value.setIsLet();
1196 }
1197
1198 if (m_codeType == FunctionCode && isNewTargetUsedInInnerArrowFunction()) {
1199 auto addTarget = environment.add(propertyNames().builtinNames().newTargetLocalPrivateName());
1200 addTarget.iterator->value.setIsCaptured();
1201 addTarget.iterator->value.setIsLet();
1202 }
1203
1204 if (needsDerivedConstructorInArrowFunctionLexicalEnvironment()) {
1205 auto derivedConstructor = environment.add(propertyNames().builtinNames().derivedConstructorPrivateName());
1206 derivedConstructor.iterator->value.setIsCaptured();
1207 derivedConstructor.iterator->value.setIsLet();
1208 }
1209
1210 if (environment.size() > 0) {
1211 size_t size = m_lexicalScopeStack.size();
1212 pushLexicalScopeInternal(environment, TDZCheckOptimization::Optimize, NestedScopeType::IsNotNested, nullptr, TDZRequirement::UnderTDZ, ScopeType::LetConstScope, ScopeRegisterType::Block);
1213
1214 ASSERT_UNUSED(size, m_lexicalScopeStack.size() == size + 1);
1215
1216 m_arrowFunctionContextLexicalEnvironmentRegister = m_lexicalScopeStack.last().m_scope;
1217 }
1218}
1219
1220RegisterID* BytecodeGenerator::initializeNextParameter()
1221{
1222 VirtualRegister reg = virtualRegisterForArgument(m_codeBlock->numParameters());
1223 m_parameters.grow(m_parameters.size() + 1);
1224 auto& parameter = registerFor(reg);
1225 parameter.setIndex(reg.offset());
1226 m_codeBlock->addParameter();
1227 return &parameter;
1228}
1229
1230void BytecodeGenerator::initializeParameters(FunctionParameters& parameters)
1231{
1232 // Make sure the code block knows about all of our parameters, and make sure that parameters
1233 // needing destructuring are noted.
1234 m_thisRegister.setIndex(initializeNextParameter()->index()); // this
1235
1236 bool nonSimpleArguments = false;
1237 for (unsigned i = 0; i < parameters.size(); ++i) {
1238 auto parameter = parameters.at(i);
1239 auto pattern = parameter.first;
1240 if (pattern->isRestParameter()) {
1241 RELEASE_ASSERT(!m_restParameter);
1242 m_restParameter = static_cast<RestParameterNode*>(pattern);
1243 nonSimpleArguments = true;
1244 continue;
1245 }
1246 if (parameter.second) {
1247 nonSimpleArguments = true;
1248 continue;
1249 }
1250 if (!nonSimpleArguments)
1251 initializeNextParameter();
1252 }
1253}
1254
1255void BytecodeGenerator::initializeVarLexicalEnvironment(int symbolTableConstantIndex, SymbolTable* functionSymbolTable, bool hasCapturedVariables)
1256{
1257 if (hasCapturedVariables) {
1258 RELEASE_ASSERT(m_lexicalEnvironmentRegister);
1259 OpCreateLexicalEnvironment::emit(this, m_lexicalEnvironmentRegister, scopeRegister(), VirtualRegister { symbolTableConstantIndex }, addConstantValue(jsUndefined()));
1260
1261 OpMov::emit(this, scopeRegister(), m_lexicalEnvironmentRegister);
1262
1263 pushLocalControlFlowScope();
1264 }
1265 bool isWithScope = false;
1266 m_lexicalScopeStack.append({ functionSymbolTable, m_lexicalEnvironmentRegister, isWithScope, symbolTableConstantIndex });
1267 m_varScopeLexicalScopeStackIndex = m_lexicalScopeStack.size() - 1;
1268}
1269
1270UniquedStringImpl* BytecodeGenerator::visibleNameForParameter(DestructuringPatternNode* pattern)
1271{
1272 if (pattern->isBindingNode()) {
1273 const Identifier& ident = static_cast<const BindingNode*>(pattern)->boundProperty();
1274 if (!m_functions.contains(ident.impl()))
1275 return ident.impl();
1276 }
1277 return nullptr;
1278}
1279
1280RegisterID* BytecodeGenerator::newRegister()
1281{
1282 m_calleeLocals.append(virtualRegisterForLocal(m_calleeLocals.size()));
1283 int numCalleeLocals = std::max<int>(m_codeBlock->m_numCalleeLocals, m_calleeLocals.size());
1284 numCalleeLocals = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), numCalleeLocals);
1285 m_codeBlock->m_numCalleeLocals = numCalleeLocals;
1286 return &m_calleeLocals.last();
1287}
1288
1289void BytecodeGenerator::reclaimFreeRegisters()
1290{
1291 shrinkToFit(m_calleeLocals);
1292}
1293
1294RegisterID* BytecodeGenerator::newBlockScopeVariable()
1295{
1296 reclaimFreeRegisters();
1297
1298 return newRegister();
1299}
1300
1301RegisterID* BytecodeGenerator::newTemporary()
1302{
1303 reclaimFreeRegisters();
1304
1305 RegisterID* result = newRegister();
1306 result->setTemporary();
1307 return result;
1308}
1309
1310Ref<LabelScope> BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name)
1311{
1312 shrinkToFit(m_labelScopes);
1313
1314 // Allocate new label scope.
1315 m_labelScopes.append(type, name, labelScopeDepth(), newLabel(), type == LabelScope::Loop ? RefPtr<Label>(newLabel()) : RefPtr<Label>()); // Only loops have continue targets.
1316 return m_labelScopes.last();
1317}
1318
1319Ref<Label> BytecodeGenerator::newLabel()
1320{
1321 shrinkToFit(m_labels);
1322
1323 // Allocate new label ID.
1324 m_labels.append();
1325 return m_labels.last();
1326}
1327
1328Ref<Label> BytecodeGenerator::newEmittedLabel()
1329{
1330 Ref<Label> label = newLabel();
1331 emitLabel(label.get());
1332 return label;
1333}
1334
1335void BytecodeGenerator::recordOpcode(OpcodeID opcodeID)
1336{
1337 ASSERT(m_lastOpcodeID == op_end || (m_lastOpcodeID == m_lastInstruction->opcodeID() && m_writer.position() == m_lastInstruction.offset() + m_lastInstruction->size()));
1338 m_lastInstruction = m_writer.ref();
1339 m_lastOpcodeID = opcodeID;
1340}
1341
1342void BytecodeGenerator::alignWideOpcode16()
1343{
1344#if CPU(NEEDS_ALIGNED_ACCESS)
1345 while ((m_writer.position() + 1) % OpcodeSize::Wide16)
1346 OpNop::emit<OpcodeSize::Narrow>(this);
1347#endif
1348}
1349
1350void BytecodeGenerator::alignWideOpcode32()
1351{
1352#if CPU(NEEDS_ALIGNED_ACCESS)
1353 while ((m_writer.position() + 1) % OpcodeSize::Wide32)
1354 OpNop::emit<OpcodeSize::Narrow>(this);
1355#endif
1356}
1357
1358void BytecodeGenerator::emitLabel(Label& l0)
1359{
1360 unsigned newLabelIndex = instructions().size();
1361 l0.setLocation(*this, newLabelIndex);
1362
1363 if (m_codeBlock->numberOfJumpTargets()) {
1364 unsigned lastLabelIndex = m_codeBlock->lastJumpTarget();
1365 ASSERT(lastLabelIndex <= newLabelIndex);
1366 if (newLabelIndex == lastLabelIndex) {
1367 // Peephole optimizations have already been disabled by emitting the last label
1368 return;
1369 }
1370 }
1371
1372 m_codeBlock->addJumpTarget(newLabelIndex);
1373
1374 // This disables peephole optimizations when an instruction is a jump target
1375 m_lastOpcodeID = op_end;
1376}
1377
1378void BytecodeGenerator::emitEnter()
1379{
1380 OpEnter::emit(this);
1381
1382 if (LIKELY(Options::optimizeRecursiveTailCalls())) {
1383 // We must add the end of op_enter as a potential jump target, because the bytecode parser may decide to split its basic block
1384 // to have somewhere to jump to if there is a recursive tail-call that points to this function.
1385 m_codeBlock->addJumpTarget(instructions().size());
1386 // This disables peephole optimizations when an instruction is a jump target
1387 m_lastOpcodeID = op_end;
1388 }
1389}
1390
1391void BytecodeGenerator::emitLoopHint()
1392{
1393 OpLoopHint::emit(this);
1394 emitCheckTraps();
1395}
1396
1397void BytecodeGenerator::emitJump(Label& target)
1398{
1399 OpJmp::emit(this, target.bind(this));
1400}
1401
1402void BytecodeGenerator::emitCheckTraps()
1403{
1404 OpCheckTraps::emit(this);
1405}
1406
1407void ALWAYS_INLINE BytecodeGenerator::rewind()
1408{
1409 ASSERT(m_lastInstruction.isValid());
1410 m_lastOpcodeID = op_end;
1411 m_writer.rewind(m_lastInstruction);
1412}
1413
1414template<typename BinOp, typename JmpOp>
1415bool BytecodeGenerator::fuseCompareAndJump(RegisterID* cond, Label& target, bool swapOperands)
1416{
1417 ASSERT(canDoPeepholeOptimization());
1418 auto binop = m_lastInstruction->as<BinOp>();
1419 if (cond->index() == binop.m_dst.offset() && cond->isTemporary() && !cond->refCount()) {
1420 rewind();
1421
1422 if (swapOperands)
1423 std::swap(binop.m_lhs, binop.m_rhs);
1424
1425 JmpOp::emit(this, binop.m_lhs, binop.m_rhs, target.bind(this));
1426 return true;
1427 }
1428 return false;
1429}
1430
1431template<typename UnaryOp, typename JmpOp>
1432bool BytecodeGenerator::fuseTestAndJmp(RegisterID* cond, Label& target)
1433{
1434 ASSERT(canDoPeepholeOptimization());
1435 auto unop = m_lastInstruction->as<UnaryOp>();
1436 if (cond->index() == unop.m_dst.offset() && cond->isTemporary() && !cond->refCount()) {
1437 rewind();
1438
1439 JmpOp::emit(this, unop.m_operand, target.bind(this));
1440 return true;
1441 }
1442 return false;
1443}
1444
1445void BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label& target)
1446{
1447 if (canDoPeepholeOptimization()) {
1448 if (m_lastOpcodeID == op_less) {
1449 if (fuseCompareAndJump<OpLess, OpJless>(cond, target))
1450 return;
1451 } else if (m_lastOpcodeID == op_lesseq) {
1452 if (fuseCompareAndJump<OpLesseq, OpJlesseq>(cond, target))
1453 return;
1454 } else if (m_lastOpcodeID == op_greater) {
1455 if (fuseCompareAndJump<OpGreater, OpJgreater>(cond, target))
1456 return;
1457 } else if (m_lastOpcodeID == op_greatereq) {
1458 if (fuseCompareAndJump<OpGreatereq, OpJgreatereq>(cond, target))
1459 return;
1460 } else if (m_lastOpcodeID == op_eq) {
1461 if (fuseCompareAndJump<OpEq, OpJeq>(cond, target))
1462 return;
1463 } else if (m_lastOpcodeID == op_stricteq) {
1464 if (fuseCompareAndJump<OpStricteq, OpJstricteq>(cond, target))
1465 return;
1466 } else if (m_lastOpcodeID == op_neq) {
1467 if (fuseCompareAndJump<OpNeq, OpJneq>(cond, target))
1468 return;
1469 } else if (m_lastOpcodeID == op_nstricteq) {
1470 if (fuseCompareAndJump<OpNstricteq, OpJnstricteq>(cond, target))
1471 return;
1472 } else if (m_lastOpcodeID == op_below) {
1473 if (fuseCompareAndJump<OpBelow, OpJbelow>(cond, target))
1474 return;
1475 } else if (m_lastOpcodeID == op_beloweq) {
1476 if (fuseCompareAndJump<OpBeloweq, OpJbeloweq>(cond, target))
1477 return;
1478 } else if (m_lastOpcodeID == op_eq_null && target.isForward()) {
1479 if (fuseTestAndJmp<OpEqNull, OpJeqNull>(cond, target))
1480 return;
1481 } else if (m_lastOpcodeID == op_neq_null && target.isForward()) {
1482 if (fuseTestAndJmp<OpNeqNull, OpJneqNull>(cond, target))
1483 return;
1484 }
1485 }
1486
1487 OpJtrue::emit(this, cond, target.bind(this));
1488}
1489
1490void BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label& target)
1491{
1492 if (canDoPeepholeOptimization()) {
1493 if (m_lastOpcodeID == op_less && target.isForward()) {
1494 if (fuseCompareAndJump<OpLess, OpJnless>(cond, target))
1495 return;
1496 } else if (m_lastOpcodeID == op_lesseq && target.isForward()) {
1497 if (fuseCompareAndJump<OpLesseq, OpJnlesseq>(cond, target))
1498 return;
1499 } else if (m_lastOpcodeID == op_greater && target.isForward()) {
1500 if (fuseCompareAndJump<OpGreater, OpJngreater>(cond, target))
1501 return;
1502 } else if (m_lastOpcodeID == op_greatereq && target.isForward()) {
1503 if (fuseCompareAndJump<OpGreatereq, OpJngreatereq>(cond, target))
1504 return;
1505 } else if (m_lastOpcodeID == op_eq && target.isForward()) {
1506 if (fuseCompareAndJump<OpEq, OpJneq>(cond, target))
1507 return;
1508 } else if (m_lastOpcodeID == op_stricteq && target.isForward()) {
1509 if (fuseCompareAndJump<OpStricteq, OpJnstricteq>(cond, target))
1510 return;
1511 } else if (m_lastOpcodeID == op_neq && target.isForward()) {
1512 if (fuseCompareAndJump<OpNeq, OpJeq>(cond, target))
1513 return;
1514 } else if (m_lastOpcodeID == op_nstricteq && target.isForward()) {
1515 if (fuseCompareAndJump<OpNstricteq, OpJstricteq>(cond, target))
1516 return;
1517 } else if (m_lastOpcodeID == op_below && target.isForward()) {
1518 if (fuseCompareAndJump<OpBelow, OpJbeloweq>(cond, target, true))
1519 return;
1520 } else if (m_lastOpcodeID == op_beloweq && target.isForward()) {
1521 if (fuseCompareAndJump<OpBeloweq, OpJbelow>(cond, target, true))
1522 return;
1523 } else if (m_lastOpcodeID == op_not) {
1524 if (fuseTestAndJmp<OpNot, OpJtrue>(cond, target))
1525 return;
1526 } else if (m_lastOpcodeID == op_eq_null && target.isForward()) {
1527 if (fuseTestAndJmp<OpEqNull, OpJneqNull>(cond, target))
1528 return;
1529 } else if (m_lastOpcodeID == op_neq_null && target.isForward()) {
1530 if (fuseTestAndJmp<OpNeqNull, OpJeqNull>(cond, target))
1531 return;
1532 }
1533 }
1534
1535 OpJfalse::emit(this, cond, target.bind(this));
1536}
1537
1538void BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID* cond, Label& target)
1539{
1540 OpJneqPtr::emit(this, cond, Special::CallFunction, target.bind(this));
1541}
1542
1543void BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID* cond, Label& target)
1544{
1545 OpJneqPtr::emit(this, cond, Special::ApplyFunction, target.bind(this));
1546}
1547
1548bool BytecodeGenerator::hasConstant(const Identifier& ident) const
1549{
1550 UniquedStringImpl* rep = ident.impl();
1551 return m_identifierMap.contains(rep);
1552}
1553
1554unsigned BytecodeGenerator::addConstant(const Identifier& ident)
1555{
1556 UniquedStringImpl* rep = ident.impl();
1557 IdentifierMap::AddResult result = m_identifierMap.add(rep, m_codeBlock->numberOfIdentifiers());
1558 if (result.isNewEntry)
1559 m_codeBlock->addIdentifier(ident);
1560
1561 return result.iterator->value;
1562}
1563
1564// We can't hash JSValue(), so we use a dedicated data member to cache it.
1565RegisterID* BytecodeGenerator::addConstantEmptyValue()
1566{
1567 if (!m_emptyValueRegister) {
1568 int index = addConstantIndex();
1569 m_codeBlock->addConstant(JSValue());
1570 m_emptyValueRegister = &m_constantPoolRegisters[index];
1571 }
1572
1573 return m_emptyValueRegister;
1574}
1575
1576RegisterID* BytecodeGenerator::addConstantValue(JSValue v, SourceCodeRepresentation sourceCodeRepresentation)
1577{
1578 if (!v)
1579 return addConstantEmptyValue();
1580
1581 int index = m_nextConstantOffset;
1582
1583 if (sourceCodeRepresentation == SourceCodeRepresentation::Double && v.isInt32())
1584 v = jsDoubleNumber(v.asNumber());
1585 EncodedJSValueWithRepresentation valueMapKey { JSValue::encode(v), sourceCodeRepresentation };
1586 JSValueMap::AddResult result = m_jsValueMap.add(valueMapKey, m_nextConstantOffset);
1587 if (result.isNewEntry) {
1588 addConstantIndex();
1589 m_codeBlock->addConstant(v, sourceCodeRepresentation);
1590 } else
1591 index = result.iterator->value;
1592 return &m_constantPoolRegisters[index];
1593}
1594
1595RegisterID* BytecodeGenerator::moveLinkTimeConstant(RegisterID* dst, LinkTimeConstant type)
1596{
1597 unsigned constantIndex = static_cast<unsigned>(type);
1598 if (!m_linkTimeConstantRegisters[constantIndex]) {
1599 int index = addConstantIndex();
1600 m_codeBlock->addConstant(type);
1601 m_linkTimeConstantRegisters[constantIndex] = &m_constantPoolRegisters[index];
1602 }
1603
1604 if (!dst)
1605 return m_linkTimeConstantRegisters[constantIndex];
1606
1607 OpMov::emit(this, dst, m_linkTimeConstantRegisters[constantIndex]);
1608
1609 return dst;
1610}
1611
1612RegisterID* BytecodeGenerator::moveEmptyValue(RegisterID* dst)
1613{
1614 RefPtr<RegisterID> emptyValue = addConstantEmptyValue();
1615
1616 OpMov::emit(this, dst, emptyValue.get());
1617
1618 return dst;
1619}
1620
1621RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src)
1622{
1623 ASSERT(src != m_emptyValueRegister);
1624
1625 m_staticPropertyAnalyzer.mov(dst, src);
1626 OpMov::emit(this, dst, src);
1627
1628 return dst;
1629}
1630
1631RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src, OperandTypes types)
1632{
1633 switch (opcodeID) {
1634 case op_not:
1635 emitUnaryOp<OpNot>(dst, src);
1636 break;
1637 case op_negate:
1638 OpNegate::emit(this, dst, src, types);
1639 break;
1640 case op_bitnot:
1641 emitUnaryOp<OpBitnot>(dst, src);
1642 break;
1643 case op_to_number:
1644 emitUnaryOp<OpToNumber>(dst, src);
1645 break;
1646 default:
1647 ASSERT_NOT_REACHED();
1648 }
1649 return dst;
1650}
1651
1652RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types)
1653{
1654 switch (opcodeID) {
1655 case op_eq:
1656 return emitBinaryOp<OpEq>(dst, src1, src2, types);
1657 case op_neq:
1658 return emitBinaryOp<OpNeq>(dst, src1, src2, types);
1659 case op_stricteq:
1660 return emitBinaryOp<OpStricteq>(dst, src1, src2, types);
1661 case op_nstricteq:
1662 return emitBinaryOp<OpNstricteq>(dst, src1, src2, types);
1663 case op_less:
1664 return emitBinaryOp<OpLess>(dst, src1, src2, types);
1665 case op_lesseq:
1666 return emitBinaryOp<OpLesseq>(dst, src1, src2, types);
1667 case op_greater:
1668 return emitBinaryOp<OpGreater>(dst, src1, src2, types);
1669 case op_greatereq:
1670 return emitBinaryOp<OpGreatereq>(dst, src1, src2, types);
1671 case op_below:
1672 return emitBinaryOp<OpBelow>(dst, src1, src2, types);
1673 case op_beloweq:
1674 return emitBinaryOp<OpBeloweq>(dst, src1, src2, types);
1675 case op_mod:
1676 return emitBinaryOp<OpMod>(dst, src1, src2, types);
1677 case op_pow:
1678 return emitBinaryOp<OpPow>(dst, src1, src2, types);
1679 case op_lshift:
1680 return emitBinaryOp<OpLshift>(dst, src1, src2, types);
1681 case op_rshift:
1682 return emitBinaryOp<OpRshift>(dst, src1, src2, types);
1683 case op_urshift:
1684 return emitBinaryOp<OpUrshift>(dst, src1, src2, types);
1685 case op_add:
1686 return emitBinaryOp<OpAdd>(dst, src1, src2, types);
1687 case op_mul:
1688 return emitBinaryOp<OpMul>(dst, src1, src2, types);
1689 case op_div:
1690 return emitBinaryOp<OpDiv>(dst, src1, src2, types);
1691 case op_sub:
1692 return emitBinaryOp<OpSub>(dst, src1, src2, types);
1693 case op_bitand:
1694 return emitBinaryOp<OpBitand>(dst, src1, src2, types);
1695 case op_bitxor:
1696 return emitBinaryOp<OpBitxor>(dst, src1, src2, types);
1697 case op_bitor:
1698 return emitBinaryOp<OpBitor>(dst, src1, src2, types);
1699 default:
1700 ASSERT_NOT_REACHED();
1701 return nullptr;
1702 }
1703}
1704
1705RegisterID* BytecodeGenerator::emitToObject(RegisterID* dst, RegisterID* src, const Identifier& message)
1706{
1707 OpToObject::emit(this, dst, src, addConstant(message));
1708 return dst;
1709}
1710
1711RegisterID* BytecodeGenerator::emitToNumber(RegisterID* dst, RegisterID* src)
1712{
1713 return emitUnaryOp<OpToNumber>(dst, src);
1714}
1715
1716RegisterID* BytecodeGenerator::emitToString(RegisterID* dst, RegisterID* src)
1717{
1718 return emitUnaryOp<OpToString>(dst, src);
1719}
1720
1721RegisterID* BytecodeGenerator::emitTypeOf(RegisterID* dst, RegisterID* src)
1722{
1723 return emitUnaryOp<OpTypeof>(dst, src);
1724}
1725
1726RegisterID* BytecodeGenerator::emitInc(RegisterID* srcDst)
1727{
1728 OpInc::emit(this, srcDst);
1729 return srcDst;
1730}
1731
1732RegisterID* BytecodeGenerator::emitDec(RegisterID* srcDst)
1733{
1734 OpDec::emit(this, srcDst);
1735 return srcDst;
1736}
1737
1738bool BytecodeGenerator::emitEqualityOpImpl(RegisterID* dst, RegisterID* src1, RegisterID* src2)
1739{
1740 if (!canDoPeepholeOptimization())
1741 return false;
1742
1743 if (m_lastInstruction->is<OpTypeof>()) {
1744 auto op = m_lastInstruction->as<OpTypeof>();
1745 if (src1->index() == op.m_dst.offset()
1746 && src1->isTemporary()
1747 && m_codeBlock->isConstantRegisterIndex(src2->index())
1748 && m_codeBlock->constantRegister(src2->index()).get().isString()) {
1749 const String& value = asString(m_codeBlock->constantRegister(src2->index()).get())->tryGetValue();
1750 if (value == "undefined") {
1751 rewind();
1752 OpIsUndefined::emit(this, dst, op.m_value);
1753 return true;
1754 }
1755 if (value == "boolean") {
1756 rewind();
1757 OpIsBoolean::emit(this, dst, op.m_value);
1758 return true;
1759 }
1760 if (value == "number") {
1761 rewind();
1762 OpIsNumber::emit(this, dst, op.m_value);
1763 return true;
1764 }
1765 if (value == "string") {
1766 rewind();
1767 OpIsCellWithType::emit(this, dst, op.m_value, StringType);
1768 return true;
1769 }
1770 if (value == "symbol") {
1771 rewind();
1772 OpIsCellWithType::emit(this, dst, op.m_value, SymbolType);
1773 return true;
1774 }
1775 if (Options::useBigInt() && value == "bigint") {
1776 rewind();
1777 OpIsCellWithType::emit(this, dst, op.m_value, BigIntType);
1778 return true;
1779 }
1780 if (value == "object") {
1781 rewind();
1782 OpIsObjectOrNull::emit(this, dst, op.m_value);
1783 return true;
1784 }
1785 if (value == "function") {
1786 rewind();
1787 OpIsFunction::emit(this, dst, op.m_value);
1788 return true;
1789 }
1790 }
1791 }
1792
1793 return false;
1794}
1795
1796void BytecodeGenerator::emitTypeProfilerExpressionInfo(const JSTextPosition& startDivot, const JSTextPosition& endDivot)
1797{
1798 ASSERT(shouldEmitTypeProfilerHooks());
1799
1800 unsigned start = startDivot.offset; // Ranges are inclusive of their endpoints, AND 0 indexed.
1801 unsigned end = endDivot.offset - 1; // End Ranges already go one past the inclusive range, so subtract 1.
1802 unsigned instructionOffset = instructions().size() - 1;
1803 m_codeBlock->addTypeProfilerExpressionInfo(instructionOffset, start, end);
1804}
1805
1806void BytecodeGenerator::emitProfileType(RegisterID* registerToProfile, ProfileTypeBytecodeFlag flag)
1807{
1808 if (!shouldEmitTypeProfilerHooks())
1809 return;
1810
1811 if (!registerToProfile)
1812 return;
1813
1814 OpProfileType::emit(this, registerToProfile, { }, flag, { }, resolveType());
1815
1816 // Don't emit expression info for this version of profile type. This generally means
1817 // we're profiling information for something that isn't in the actual text of a JavaScript
1818 // program. For example, implicit return undefined from a function call.
1819}
1820
1821void BytecodeGenerator::emitProfileType(RegisterID* registerToProfile, const JSTextPosition& startDivot, const JSTextPosition& endDivot)
1822{
1823 emitProfileType(registerToProfile, ProfileTypeBytecodeDoesNotHaveGlobalID, startDivot, endDivot);
1824}
1825
1826void BytecodeGenerator::emitProfileType(RegisterID* registerToProfile, ProfileTypeBytecodeFlag flag, const JSTextPosition& startDivot, const JSTextPosition& endDivot)
1827{
1828 if (!shouldEmitTypeProfilerHooks())
1829 return;
1830
1831 if (!registerToProfile)
1832 return;
1833
1834 OpProfileType::emit(this, registerToProfile, { }, flag, { }, resolveType());
1835 emitTypeProfilerExpressionInfo(startDivot, endDivot);
1836}
1837
1838void BytecodeGenerator::emitProfileType(RegisterID* registerToProfile, const Variable& var, const JSTextPosition& startDivot, const JSTextPosition& endDivot)
1839{
1840 if (!shouldEmitTypeProfilerHooks())
1841 return;
1842
1843 if (!registerToProfile)
1844 return;
1845
1846 ProfileTypeBytecodeFlag flag;
1847 SymbolTableOrScopeDepth symbolTableOrScopeDepth;
1848 if (var.local() || var.offset().isScope()) {
1849 flag = ProfileTypeBytecodeLocallyResolved;
1850 ASSERT(var.symbolTableConstantIndex());
1851 symbolTableOrScopeDepth = SymbolTableOrScopeDepth::symbolTable(VirtualRegister { var.symbolTableConstantIndex() });
1852 } else {
1853 flag = ProfileTypeBytecodeClosureVar;
1854 symbolTableOrScopeDepth = SymbolTableOrScopeDepth::scopeDepth(localScopeDepth());
1855 }
1856
1857 OpProfileType::emit(this, registerToProfile, symbolTableOrScopeDepth, flag, addConstant(var.ident()), resolveType());
1858 emitTypeProfilerExpressionInfo(startDivot, endDivot);
1859}
1860
1861void BytecodeGenerator::emitProfileControlFlow(int textOffset)
1862{
1863 if (shouldEmitControlFlowProfilerHooks()) {
1864 RELEASE_ASSERT(textOffset >= 0);
1865
1866 OpProfileControlFlow::emit(this, textOffset);
1867 m_codeBlock->addOpProfileControlFlowBytecodeOffset(m_lastInstruction.offset());
1868 }
1869}
1870
1871unsigned BytecodeGenerator::addConstantIndex()
1872{
1873 unsigned index = m_nextConstantOffset;
1874 m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
1875 ++m_nextConstantOffset;
1876 return index;
1877}
1878
1879RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b)
1880{
1881 return emitLoad(dst, jsBoolean(b));
1882}
1883
1884RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier)
1885{
1886 ASSERT(!identifier.isSymbol());
1887 JSString*& stringInMap = m_stringMap.add(identifier.impl(), nullptr).iterator->value;
1888 if (!stringInMap)
1889 stringInMap = jsOwnedString(vm(), identifier.string());
1890
1891 return emitLoad(dst, JSValue(stringInMap));
1892}
1893
1894RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValue v, SourceCodeRepresentation sourceCodeRepresentation)
1895{
1896 RegisterID* constantID = addConstantValue(v, sourceCodeRepresentation);
1897 if (dst)
1898 return move(dst, constantID);
1899 return constantID;
1900}
1901
1902RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, IdentifierSet& set)
1903{
1904 if (m_codeBlock->numberOfConstantIdentifierSets()) {
1905 for (const auto& entry : m_codeBlock->constantIdentifierSets()) {
1906 if (entry.first != set)
1907 continue;
1908
1909 return &m_constantPoolRegisters[entry.second];
1910 }
1911 }
1912
1913 unsigned index = addConstantIndex();
1914 m_codeBlock->addSetConstant(set);
1915 RegisterID* m_setRegister = &m_constantPoolRegisters[index];
1916
1917 if (dst)
1918 return move(dst, m_setRegister);
1919
1920 return m_setRegister;
1921}
1922
1923template<typename LookUpVarKindFunctor>
1924bool BytecodeGenerator::instantiateLexicalVariables(const VariableEnvironment& lexicalVariables, SymbolTable* symbolTable, ScopeRegisterType scopeRegisterType, LookUpVarKindFunctor lookUpVarKind)
1925{
1926 bool hasCapturedVariables = false;
1927 {
1928 for (auto& entry : lexicalVariables) {
1929 ASSERT(entry.value.isLet() || entry.value.isConst() || entry.value.isFunction());
1930 ASSERT(!entry.value.isVar());
1931 SymbolTableEntry symbolTableEntry = symbolTable->get(NoLockingNecessary, entry.key.get());
1932 ASSERT(symbolTableEntry.isNull());
1933
1934 // Imported bindings which are not the namespace bindings are not allocated
1935 // in the module environment as usual variables' way.
1936 // And since these types of the variables only seen in the module environment,
1937 // other lexical environment need not to take care this.
1938 if (entry.value.isImported() && !entry.value.isImportedNamespace())
1939 continue;
1940
1941 VarKind varKind = lookUpVarKind(entry.key.get(), entry.value);
1942 VarOffset varOffset;
1943 if (varKind == VarKind::Scope) {
1944 varOffset = VarOffset(symbolTable->takeNextScopeOffset(NoLockingNecessary));
1945 hasCapturedVariables = true;
1946 } else {
1947 ASSERT(varKind == VarKind::Stack);
1948 RegisterID* local;
1949 if (scopeRegisterType == ScopeRegisterType::Block) {
1950 local = newBlockScopeVariable();
1951 local->ref();
1952 } else
1953 local = addVar();
1954 varOffset = VarOffset(local->virtualRegister());
1955 }
1956
1957 SymbolTableEntry newEntry(varOffset, static_cast<unsigned>(entry.value.isConst() ? PropertyAttribute::ReadOnly : PropertyAttribute::None));
1958 symbolTable->add(NoLockingNecessary, entry.key.get(), newEntry);
1959 }
1960 }
1961 return hasCapturedVariables;
1962}
1963
1964void BytecodeGenerator::emitPrefillStackTDZVariables(const VariableEnvironment& lexicalVariables, SymbolTable* symbolTable)
1965{
1966 // Prefill stack variables with the TDZ empty value.
1967 // Scope variables will be initialized to the TDZ empty value when JSLexicalEnvironment is allocated.
1968 for (auto& entry : lexicalVariables) {
1969 // Imported bindings which are not the namespace bindings are not allocated
1970 // in the module environment as usual variables' way.
1971 // And since these types of the variables only seen in the module environment,
1972 // other lexical environment need not to take care this.
1973 if (entry.value.isImported() && !entry.value.isImportedNamespace())
1974 continue;
1975
1976 if (entry.value.isFunction())
1977 continue;
1978
1979 SymbolTableEntry symbolTableEntry = symbolTable->get(NoLockingNecessary, entry.key.get());
1980 ASSERT(!symbolTableEntry.isNull());
1981 VarOffset offset = symbolTableEntry.varOffset();
1982 if (offset.isScope())
1983 continue;
1984
1985 ASSERT(offset.isStack());
1986 moveEmptyValue(&registerFor(offset.stackOffset()));
1987 }
1988}
1989
1990void BytecodeGenerator::pushLexicalScope(VariableEnvironmentNode* node, TDZCheckOptimization tdzCheckOptimization, NestedScopeType nestedScopeType, RegisterID** constantSymbolTableResult, bool shouldInitializeBlockScopedFunctions)
1991{
1992 VariableEnvironment& environment = node->lexicalVariables();
1993 RegisterID* constantSymbolTableResultTemp = nullptr;
1994 pushLexicalScopeInternal(environment, tdzCheckOptimization, nestedScopeType, &constantSymbolTableResultTemp, TDZRequirement::UnderTDZ, ScopeType::LetConstScope, ScopeRegisterType::Block);
1995
1996 if (shouldInitializeBlockScopedFunctions)
1997 initializeBlockScopedFunctions(environment, node->functionStack(), constantSymbolTableResultTemp);
1998
1999 if (constantSymbolTableResult && constantSymbolTableResultTemp)
2000 *constantSymbolTableResult = constantSymbolTableResultTemp;
2001}
2002
2003void BytecodeGenerator::pushLexicalScopeInternal(VariableEnvironment& environment, TDZCheckOptimization tdzCheckOptimization, NestedScopeType nestedScopeType,
2004 RegisterID** constantSymbolTableResult, TDZRequirement tdzRequirement, ScopeType scopeType, ScopeRegisterType scopeRegisterType)
2005{
2006 if (!environment.size())
2007 return;
2008
2009 if (shouldEmitDebugHooks())
2010 environment.markAllVariablesAsCaptured();
2011
2012 SymbolTable* symbolTable = SymbolTable::create(*m_vm);
2013 switch (scopeType) {
2014 case ScopeType::CatchScope:
2015 symbolTable->setScopeType(SymbolTable::ScopeType::CatchScope);
2016 break;
2017 case ScopeType::LetConstScope:
2018 symbolTable->setScopeType(SymbolTable::ScopeType::LexicalScope);
2019 break;
2020 case ScopeType::FunctionNameScope:
2021 symbolTable->setScopeType(SymbolTable::ScopeType::FunctionNameScope);
2022 break;
2023 }
2024
2025 if (nestedScopeType == NestedScopeType::IsNested)
2026 symbolTable->markIsNestedLexicalScope();
2027
2028 auto lookUpVarKind = [] (UniquedStringImpl*, const VariableEnvironmentEntry& entry) -> VarKind {
2029 return entry.isCaptured() ? VarKind::Scope : VarKind::Stack;
2030 };
2031
2032 bool hasCapturedVariables = instantiateLexicalVariables(environment, symbolTable, scopeRegisterType, lookUpVarKind);
2033
2034 RegisterID* newScope = nullptr;
2035 RegisterID* constantSymbolTable = nullptr;
2036 int symbolTableConstantIndex = 0;
2037 if (shouldEmitTypeProfilerHooks()) {
2038 constantSymbolTable = addConstantValue(symbolTable);
2039 symbolTableConstantIndex = constantSymbolTable->index();
2040 }
2041 if (hasCapturedVariables) {
2042 if (scopeRegisterType == ScopeRegisterType::Block) {
2043 newScope = newBlockScopeVariable();
2044 newScope->ref();
2045 } else
2046 newScope = addVar();
2047 if (!constantSymbolTable) {
2048 ASSERT(!shouldEmitTypeProfilerHooks());
2049 constantSymbolTable = addConstantValue(symbolTable->cloneScopePart(*m_vm));
2050 symbolTableConstantIndex = constantSymbolTable->index();
2051 }
2052 if (constantSymbolTableResult)
2053 *constantSymbolTableResult = constantSymbolTable;
2054
2055 OpCreateLexicalEnvironment::emit(this, newScope, scopeRegister(), VirtualRegister { symbolTableConstantIndex }, addConstantValue(tdzRequirement == TDZRequirement::UnderTDZ ? jsTDZValue() : jsUndefined()));
2056
2057 move(scopeRegister(), newScope);
2058
2059 pushLocalControlFlowScope();
2060 }
2061
2062 bool isWithScope = false;
2063 m_lexicalScopeStack.append({ symbolTable, newScope, isWithScope, symbolTableConstantIndex });
2064 pushTDZVariables(environment, tdzCheckOptimization, tdzRequirement);
2065
2066 if (tdzRequirement == TDZRequirement::UnderTDZ)
2067 emitPrefillStackTDZVariables(environment, symbolTable);
2068}
2069
2070void BytecodeGenerator::initializeBlockScopedFunctions(VariableEnvironment& environment, FunctionStack& functionStack, RegisterID* constantSymbolTable)
2071{
2072 /*
2073 * We must transform block scoped function declarations in strict mode like so:
2074 *
2075 * function foo() {
2076 * if (c) {
2077 * function foo() { ... }
2078 * if (bar) { ... }
2079 * else { ... }
2080 * function baz() { ... }
2081 * }
2082 * }
2083 *
2084 * to:
2085 *
2086 * function foo() {
2087 * if (c) {
2088 * let foo = function foo() { ... }
2089 * let baz = function baz() { ... }
2090 * if (bar) { ... }
2091 * else { ... }
2092 * }
2093 * }
2094 *
2095 * But without the TDZ checks.
2096 */
2097
2098 if (!environment.size()) {
2099 RELEASE_ASSERT(!functionStack.size());
2100 return;
2101 }
2102
2103 if (!functionStack.size())
2104 return;
2105
2106 SymbolTable* symbolTable = m_lexicalScopeStack.last().m_symbolTable;
2107 RegisterID* scope = m_lexicalScopeStack.last().m_scope;
2108 RefPtr<RegisterID> temp = newTemporary();
2109 int symbolTableIndex = constantSymbolTable ? constantSymbolTable->index() : 0;
2110 for (FunctionMetadataNode* function : functionStack) {
2111 const Identifier& name = function->ident();
2112 auto iter = environment.find(name.impl());
2113 RELEASE_ASSERT(iter != environment.end());
2114 RELEASE_ASSERT(iter->value.isFunction());
2115 // We purposefully don't hold the symbol table lock around this loop because emitNewFunctionExpressionCommon may GC.
2116 SymbolTableEntry entry = symbolTable->get(NoLockingNecessary, name.impl());
2117 RELEASE_ASSERT(!entry.isNull());
2118 emitNewFunctionExpressionCommon(temp.get(), function);
2119 bool isLexicallyScoped = true;
2120 emitPutToScope(scope, variableForLocalEntry(name, entry, symbolTableIndex, isLexicallyScoped), temp.get(), DoNotThrowIfNotFound, InitializationMode::Initialization);
2121 }
2122}
2123
2124void BytecodeGenerator::hoistSloppyModeFunctionIfNecessary(const Identifier& functionName)
2125{
2126 if (m_scopeNode->hasSloppyModeHoistedFunction(functionName.impl())) {
2127 if (codeType() != EvalCode) {
2128 Variable currentFunctionVariable = variable(functionName);
2129 RefPtr<RegisterID> currentValue;
2130 if (RegisterID* local = currentFunctionVariable.local())
2131 currentValue = local;
2132 else {
2133 RefPtr<RegisterID> scope = emitResolveScope(nullptr, currentFunctionVariable);
2134 currentValue = emitGetFromScope(newTemporary(), scope.get(), currentFunctionVariable, DoNotThrowIfNotFound);
2135 }
2136
2137 ASSERT(m_varScopeLexicalScopeStackIndex);
2138 ASSERT(*m_varScopeLexicalScopeStackIndex < m_lexicalScopeStack.size());
2139 LexicalScopeStackEntry varScope = m_lexicalScopeStack[*m_varScopeLexicalScopeStackIndex];
2140 SymbolTable* varSymbolTable = varScope.m_symbolTable;
2141 ASSERT(varSymbolTable->scopeType() == SymbolTable::ScopeType::VarScope);
2142 SymbolTableEntry entry = varSymbolTable->get(NoLockingNecessary, functionName.impl());
2143 if (functionName == propertyNames().arguments && entry.isNull()) {
2144 // "arguments" might be put in the parameter scope when we have a non-simple
2145 // parameter list since "arguments" is visible to expressions inside the
2146 // parameter evaluation list.
2147 // e.g:
2148 // function foo(x = arguments) { { function arguments() { } } }
2149 RELEASE_ASSERT(*m_varScopeLexicalScopeStackIndex > 0);
2150 varScope = m_lexicalScopeStack[*m_varScopeLexicalScopeStackIndex - 1];
2151 SymbolTable* parameterSymbolTable = varScope.m_symbolTable;
2152 entry = parameterSymbolTable->get(NoLockingNecessary, functionName.impl());
2153 }
2154 RELEASE_ASSERT(!entry.isNull());
2155 bool isLexicallyScoped = false;
2156 emitPutToScope(varScope.m_scope, variableForLocalEntry(functionName, entry, varScope.m_symbolTableConstantIndex, isLexicallyScoped), currentValue.get(), DoNotThrowIfNotFound, InitializationMode::NotInitialization);
2157 } else {
2158 Variable currentFunctionVariable = variable(functionName);
2159 RefPtr<RegisterID> currentValue;
2160 if (RegisterID* local = currentFunctionVariable.local())
2161 currentValue = local;
2162 else {
2163 RefPtr<RegisterID> scope = emitResolveScope(nullptr, currentFunctionVariable);
2164 currentValue = emitGetFromScope(newTemporary(), scope.get(), currentFunctionVariable, DoNotThrowIfNotFound);
2165 }
2166
2167 RefPtr<RegisterID> scopeId = emitResolveScopeForHoistingFuncDeclInEval(nullptr, functionName);
2168 RefPtr<RegisterID> checkResult = emitIsUndefined(newTemporary(), scopeId.get());
2169
2170 Ref<Label> isNotVarScopeLabel = newLabel();
2171 emitJumpIfTrue(checkResult.get(), isNotVarScopeLabel.get());
2172
2173 // Put to outer scope
2174 emitPutToScope(scopeId.get(), functionName, currentValue.get(), DoNotThrowIfNotFound, InitializationMode::NotInitialization);
2175 emitLabel(isNotVarScopeLabel.get());
2176
2177 }
2178 }
2179}
2180
2181RegisterID* BytecodeGenerator::emitResolveScopeForHoistingFuncDeclInEval(RegisterID* dst, const Identifier& property)
2182{
2183 ASSERT(m_codeType == EvalCode);
2184
2185 dst = finalDestination(dst);
2186 OpResolveScopeForHoistingFuncDeclInEval::emit(this, kill(dst), m_topMostScope, addConstant(property));
2187 return dst;
2188}
2189
2190void BytecodeGenerator::popLexicalScope(VariableEnvironmentNode* node)
2191{
2192 VariableEnvironment& environment = node->lexicalVariables();
2193 popLexicalScopeInternal(environment);
2194}
2195
2196void BytecodeGenerator::popLexicalScopeInternal(VariableEnvironment& environment)
2197{
2198 // NOTE: This function only makes sense for scopes that aren't ScopeRegisterType::Var (only function name scope right now is ScopeRegisterType::Var).
2199 // This doesn't make sense for ScopeRegisterType::Var because we deref RegisterIDs here.
2200 if (!environment.size())
2201 return;
2202
2203 if (shouldEmitDebugHooks())
2204 environment.markAllVariablesAsCaptured();
2205
2206 auto stackEntry = m_lexicalScopeStack.takeLast();
2207 SymbolTable* symbolTable = stackEntry.m_symbolTable;
2208 bool hasCapturedVariables = false;
2209 for (auto& entry : environment) {
2210 if (entry.value.isCaptured()) {
2211 hasCapturedVariables = true;
2212 continue;
2213 }
2214 SymbolTableEntry symbolTableEntry = symbolTable->get(NoLockingNecessary, entry.key.get());
2215 ASSERT(!symbolTableEntry.isNull());
2216 VarOffset offset = symbolTableEntry.varOffset();
2217 ASSERT(offset.isStack());
2218 RegisterID* local = &registerFor(offset.stackOffset());
2219 local->deref();
2220 }
2221
2222 if (hasCapturedVariables) {
2223 RELEASE_ASSERT(stackEntry.m_scope);
2224 emitPopScope(scopeRegister(), stackEntry.m_scope);
2225 popLocalControlFlowScope();
2226 stackEntry.m_scope->deref();
2227 }
2228
2229 m_TDZStack.removeLast();
2230 m_cachedVariablesUnderTDZ = { };
2231}
2232
2233void BytecodeGenerator::prepareLexicalScopeForNextForLoopIteration(VariableEnvironmentNode* node, RegisterID* loopSymbolTable)
2234{
2235 VariableEnvironment& environment = node->lexicalVariables();
2236 if (!environment.size())
2237 return;
2238 if (shouldEmitDebugHooks())
2239 environment.markAllVariablesAsCaptured();
2240 if (!environment.hasCapturedVariables())
2241 return;
2242
2243 RELEASE_ASSERT(loopSymbolTable);
2244
2245 // This function needs to do setup for a for loop's activation if any of
2246 // the for loop's lexically declared variables are captured (that is, variables
2247 // declared in the loop header, not the loop body). This function needs to
2248 // make a copy of the current activation and copy the values from the previous
2249 // activation into the new activation because each iteration of a for loop
2250 // gets a new activation.
2251
2252 auto stackEntry = m_lexicalScopeStack.last();
2253 SymbolTable* symbolTable = stackEntry.m_symbolTable;
2254 RegisterID* loopScope = stackEntry.m_scope;
2255 ASSERT(symbolTable->scopeSize());
2256 ASSERT(loopScope);
2257 Vector<std::pair<RegisterID*, Identifier>> activationValuesToCopyOver;
2258
2259 {
2260 activationValuesToCopyOver.reserveInitialCapacity(symbolTable->scopeSize());
2261
2262 for (auto end = symbolTable->end(NoLockingNecessary), ptr = symbolTable->begin(NoLockingNecessary); ptr != end; ++ptr) {
2263 if (!ptr->value.varOffset().isScope())
2264 continue;
2265
2266 RefPtr<UniquedStringImpl> ident = ptr->key;
2267 Identifier identifier = Identifier::fromUid(m_vm, ident.get());
2268
2269 RegisterID* transitionValue = newBlockScopeVariable();
2270 transitionValue->ref();
2271 emitGetFromScope(transitionValue, loopScope, variableForLocalEntry(identifier, ptr->value, loopSymbolTable->index(), true), DoNotThrowIfNotFound);
2272 activationValuesToCopyOver.uncheckedAppend(std::make_pair(transitionValue, identifier));
2273 }
2274 }
2275
2276 // We need this dynamic behavior of the executing code to ensure
2277 // each loop iteration has a new activation object. (It's pretty ugly).
2278 // Also, this new activation needs to be assigned to the same register
2279 // as the previous scope because the loop body is compiled under
2280 // the assumption that the scope's register index is constant even
2281 // though the value in that register will change on each loop iteration.
2282 RefPtr<RegisterID> parentScope = emitGetParentScope(newTemporary(), loopScope);
2283 move(scopeRegister(), parentScope.get());
2284
2285 OpCreateLexicalEnvironment::emit(this, loopScope, scopeRegister(), loopSymbolTable, addConstantValue(jsTDZValue()));
2286
2287 move(scopeRegister(), loopScope);
2288
2289 {
2290 for (auto pair : activationValuesToCopyOver) {
2291 const Identifier& identifier = pair.second;
2292 SymbolTableEntry entry = symbolTable->get(NoLockingNecessary, identifier.impl());
2293 RELEASE_ASSERT(!entry.isNull());
2294 RegisterID* transitionValue = pair.first;
2295 emitPutToScope(loopScope, variableForLocalEntry(identifier, entry, loopSymbolTable->index(), true), transitionValue, DoNotThrowIfNotFound, InitializationMode::NotInitialization);
2296 transitionValue->deref();
2297 }
2298 }
2299}
2300
2301Variable BytecodeGenerator::variable(const Identifier& property, ThisResolutionType thisResolutionType)
2302{
2303 if (property == propertyNames().thisIdentifier && thisResolutionType == ThisResolutionType::Local)
2304 return Variable(property, VarOffset(thisRegister()->virtualRegister()), thisRegister(), static_cast<unsigned>(PropertyAttribute::ReadOnly), Variable::SpecialVariable, 0, false);
2305
2306 // We can optimize lookups if the lexical variable is found before a "with" or "catch"
2307 // scope because we're guaranteed static resolution. If we have to pass through
2308 // a "with" or "catch" scope we loose this guarantee.
2309 // We can't optimize cases like this:
2310 // {
2311 // let x = ...;
2312 // with (o) {
2313 // doSomethingWith(x);
2314 // }
2315 // }
2316 // Because we can't gaurantee static resolution on x.
2317 // But, in this case, we are guaranteed static resolution:
2318 // {
2319 // let x = ...;
2320 // with (o) {
2321 // let x = ...;
2322 // doSomethingWith(x);
2323 // }
2324 // }
2325 for (unsigned i = m_lexicalScopeStack.size(); i--; ) {
2326 auto& stackEntry = m_lexicalScopeStack[i];
2327 if (stackEntry.m_isWithScope)
2328 return Variable(property);
2329 SymbolTable* symbolTable = stackEntry.m_symbolTable;
2330 SymbolTableEntry symbolTableEntry = symbolTable->get(NoLockingNecessary, property.impl());
2331 if (symbolTableEntry.isNull())
2332 continue;
2333 bool resultIsCallee = false;
2334 if (symbolTable->scopeType() == SymbolTable::ScopeType::FunctionNameScope) {
2335 if (m_usesNonStrictEval) {
2336 // We don't know if an eval has introduced a "var" named the same thing as the function name scope variable name.
2337 // We resort to dynamic lookup to answer this question.
2338 Variable result = Variable(property);
2339 return result;
2340 }
2341 resultIsCallee = true;
2342 }
2343 Variable result = variableForLocalEntry(property, symbolTableEntry, stackEntry.m_symbolTableConstantIndex, symbolTable->scopeType() == SymbolTable::ScopeType::LexicalScope);
2344 if (resultIsCallee)
2345 result.setIsReadOnly();
2346 return result;
2347 }
2348
2349 return Variable(property);
2350}
2351
2352Variable BytecodeGenerator::variableForLocalEntry(
2353 const Identifier& property, const SymbolTableEntry& entry, int symbolTableConstantIndex, bool isLexicallyScoped)
2354{
2355 VarOffset offset = entry.varOffset();
2356
2357 RegisterID* local;
2358 if (offset.isStack())
2359 local = &registerFor(offset.stackOffset());
2360 else
2361 local = nullptr;
2362
2363 return Variable(property, offset, local, entry.getAttributes(), Variable::NormalVariable, symbolTableConstantIndex, isLexicallyScoped);
2364}
2365
2366void BytecodeGenerator::createVariable(
2367 const Identifier& property, VarKind varKind, SymbolTable* symbolTable, ExistingVariableMode existingVariableMode)
2368{
2369 ASSERT(property != propertyNames().thisIdentifier);
2370 SymbolTableEntry entry = symbolTable->get(NoLockingNecessary, property.impl());
2371
2372 if (!entry.isNull()) {
2373 if (existingVariableMode == IgnoreExisting)
2374 return;
2375
2376 // Do some checks to ensure that the variable we're being asked to create is sufficiently
2377 // compatible with the one we have already created.
2378
2379 VarOffset offset = entry.varOffset();
2380
2381 // We can't change our minds about whether it's captured.
2382 if (offset.kind() != varKind) {
2383 dataLog(
2384 "Trying to add variable called ", property, " as ", varKind,
2385 " but it was already added as ", offset, ".\n");
2386 RELEASE_ASSERT_NOT_REACHED();
2387 }
2388
2389 return;
2390 }
2391
2392 VarOffset varOffset;
2393 if (varKind == VarKind::Scope)
2394 varOffset = VarOffset(symbolTable->takeNextScopeOffset(NoLockingNecessary));
2395 else {
2396 ASSERT(varKind == VarKind::Stack);
2397 varOffset = VarOffset(virtualRegisterForLocal(m_calleeLocals.size()));
2398 }
2399 SymbolTableEntry newEntry(varOffset, 0);
2400 symbolTable->add(NoLockingNecessary, property.impl(), newEntry);
2401
2402 if (varKind == VarKind::Stack) {
2403 RegisterID* local = addVar();
2404 RELEASE_ASSERT(local->index() == varOffset.stackOffset().offset());
2405 }
2406}
2407
2408RegisterID* BytecodeGenerator::emitOverridesHasInstance(RegisterID* dst, RegisterID* constructor, RegisterID* hasInstanceValue)
2409{
2410 OpOverridesHasInstance::emit(this, dst, constructor, hasInstanceValue);
2411 return dst;
2412}
2413
2414// Indicates the least upper bound of resolve type based on local scope. The bytecode linker
2415// will start with this ResolveType and compute the least upper bound including intercepting scopes.
2416ResolveType BytecodeGenerator::resolveType()
2417{
2418 for (unsigned i = m_lexicalScopeStack.size(); i--; ) {
2419 if (m_lexicalScopeStack[i].m_isWithScope)
2420 return Dynamic;
2421 if (m_usesNonStrictEval && m_lexicalScopeStack[i].m_symbolTable->scopeType() == SymbolTable::ScopeType::FunctionNameScope) {
2422 // We never want to assign to a FunctionNameScope. Returning Dynamic here achieves this goal.
2423 // If we aren't in non-strict eval mode, then NodesCodeGen needs to take care not to emit
2424 // a put_to_scope with the destination being the function name scope variable.
2425 return Dynamic;
2426 }
2427 }
2428
2429 if (m_usesNonStrictEval)
2430 return GlobalPropertyWithVarInjectionChecks;
2431 return GlobalProperty;
2432}
2433
2434RegisterID* BytecodeGenerator::emitResolveScope(RegisterID* dst, const Variable& variable)
2435{
2436 switch (variable.offset().kind()) {
2437 case VarKind::Stack:
2438 return nullptr;
2439
2440 case VarKind::DirectArgument:
2441 return argumentsRegister();
2442
2443 case VarKind::Scope: {
2444 // This always refers to the activation that *we* allocated, and not the current scope that code
2445 // lives in. Note that this will change once we have proper support for block scoping. Once that
2446 // changes, it will be correct for this code to return scopeRegister(). The only reason why we
2447 // don't do that already is that m_lexicalEnvironment is required by ConstDeclNode. ConstDeclNode
2448 // requires weird things because it is a shameful pile of nonsense, but block scoping would make
2449 // that code sensible and obviate the need for us to do bad things.
2450 for (unsigned i = m_lexicalScopeStack.size(); i--; ) {
2451 auto& stackEntry = m_lexicalScopeStack[i];
2452 // We should not resolve a variable to VarKind::Scope if a "with" scope lies in between the current
2453 // scope and the resolved scope.
2454 RELEASE_ASSERT(!stackEntry.m_isWithScope);
2455
2456 if (stackEntry.m_symbolTable->get(NoLockingNecessary, variable.ident().impl()).isNull())
2457 continue;
2458
2459 RegisterID* scope = stackEntry.m_scope;
2460 RELEASE_ASSERT(scope);
2461 return scope;
2462 }
2463
2464 RELEASE_ASSERT_NOT_REACHED();
2465 return nullptr;
2466
2467 }
2468 case VarKind::Invalid:
2469 // Indicates non-local resolution.
2470
2471 dst = tempDestination(dst);
2472 OpResolveScope::emit(this, kill(dst), scopeRegister(), addConstant(variable.ident()), resolveType(), localScopeDepth());
2473 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2474 return dst;
2475 }
2476
2477 RELEASE_ASSERT_NOT_REACHED();
2478 return nullptr;
2479}
2480
2481RegisterID* BytecodeGenerator::emitGetFromScope(RegisterID* dst, RegisterID* scope, const Variable& variable, ResolveMode resolveMode)
2482{
2483 switch (variable.offset().kind()) {
2484 case VarKind::Stack:
2485 return move(dst, variable.local());
2486
2487 case VarKind::DirectArgument: {
2488 OpGetFromArguments::emit(this, kill(dst), scope, variable.offset().capturedArgumentsOffset().offset());
2489 return dst;
2490 }
2491
2492 case VarKind::Scope:
2493 case VarKind::Invalid: {
2494 OpGetFromScope::emit(
2495 this,
2496 kill(dst),
2497 scope,
2498 addConstant(variable.ident()),
2499 GetPutInfo(resolveMode, variable.offset().isScope() ? LocalClosureVar : resolveType(), InitializationMode::NotInitialization),
2500 localScopeDepth(),
2501 variable.offset().isScope() ? variable.offset().scopeOffset().offset() : 0);
2502 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2503 return dst;
2504 } }
2505
2506 RELEASE_ASSERT_NOT_REACHED();
2507}
2508
2509RegisterID* BytecodeGenerator::emitPutToScope(RegisterID* scope, const Variable& variable, RegisterID* value, ResolveMode resolveMode, InitializationMode initializationMode)
2510{
2511 switch (variable.offset().kind()) {
2512 case VarKind::Stack:
2513 move(variable.local(), value);
2514 return value;
2515
2516 case VarKind::DirectArgument:
2517 OpPutToArguments::emit(this, scope, variable.offset().capturedArgumentsOffset().offset(), value);
2518 return value;
2519
2520 case VarKind::Scope:
2521 case VarKind::Invalid: {
2522 GetPutInfo getPutInfo(0);
2523 SymbolTableOrScopeDepth symbolTableOrScopeDepth;
2524 ScopeOffset offset;
2525 if (variable.offset().isScope()) {
2526 offset = variable.offset().scopeOffset();
2527 getPutInfo = GetPutInfo(resolveMode, LocalClosureVar, initializationMode);
2528 symbolTableOrScopeDepth = SymbolTableOrScopeDepth::symbolTable(VirtualRegister { variable.symbolTableConstantIndex() });
2529 } else {
2530 ASSERT(resolveType() != LocalClosureVar);
2531 getPutInfo = GetPutInfo(resolveMode, resolveType(), initializationMode);
2532 symbolTableOrScopeDepth = SymbolTableOrScopeDepth::scopeDepth(localScopeDepth());
2533 }
2534 OpPutToScope::emit(this, scope, addConstant(variable.ident()), value, getPutInfo, symbolTableOrScopeDepth, !!offset ? offset.offset() : 0);
2535 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2536 return value;
2537 } }
2538
2539 RELEASE_ASSERT_NOT_REACHED();
2540}
2541
2542RegisterID* BytecodeGenerator::initializeVariable(const Variable& variable, RegisterID* value)
2543{
2544 RELEASE_ASSERT(variable.offset().kind() != VarKind::Invalid);
2545 RegisterID* scope = emitResolveScope(nullptr, variable);
2546 return emitPutToScope(scope, variable, value, ThrowIfNotFound, InitializationMode::NotInitialization);
2547}
2548
2549RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* basePrototype)
2550{
2551 OpInstanceof::emit(this, dst, value, basePrototype);
2552 return dst;
2553}
2554
2555RegisterID* BytecodeGenerator::emitInstanceOfCustom(RegisterID* dst, RegisterID* value, RegisterID* constructor, RegisterID* hasInstanceValue)
2556{
2557 OpInstanceofCustom::emit(this, dst, value, constructor, hasInstanceValue);
2558 return dst;
2559}
2560
2561RegisterID* BytecodeGenerator::emitInByVal(RegisterID* dst, RegisterID* property, RegisterID* base)
2562{
2563 OpInByVal::emit(this, dst, base, property);
2564 return dst;
2565}
2566
2567RegisterID* BytecodeGenerator::emitInById(RegisterID* dst, RegisterID* base, const Identifier& property)
2568{
2569 OpInById::emit(this, dst, base, addConstant(property));
2570 return dst;
2571}
2572
2573RegisterID* BytecodeGenerator::emitTryGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
2574{
2575 ASSERT_WITH_MESSAGE(!parseIndex(property), "Indexed properties are not supported with tryGetById.");
2576
2577 OpTryGetById::emit(this, kill(dst), base, addConstant(property));
2578 return dst;
2579}
2580
2581RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
2582{
2583 ASSERT_WITH_MESSAGE(!parseIndex(property), "Indexed properties should be handled with get_by_val.");
2584
2585 OpGetById::emit(this, kill(dst), base, addConstant(property));
2586 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2587 return dst;
2588}
2589
2590RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, RegisterID* thisVal, const Identifier& property)
2591{
2592 ASSERT_WITH_MESSAGE(!parseIndex(property), "Indexed properties should be handled with get_by_val.");
2593
2594 OpGetByIdWithThis::emit(this, kill(dst), base, thisVal, addConstant(property));
2595 return dst;
2596}
2597
2598RegisterID* BytecodeGenerator::emitDirectGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
2599{
2600 ASSERT_WITH_MESSAGE(!parseIndex(property), "Indexed properties should be handled with get_by_val_direct.");
2601
2602 OpGetByIdDirect::emit(this, kill(dst), base, addConstant(property));
2603 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2604 return dst;
2605}
2606
2607RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value)
2608{
2609 ASSERT_WITH_MESSAGE(!parseIndex(property), "Indexed properties should be handled with put_by_val.");
2610
2611 unsigned propertyIndex = addConstant(property);
2612
2613 m_staticPropertyAnalyzer.putById(base, propertyIndex);
2614
2615 OpPutById::emit(this, base, propertyIndex, value, PutByIdNone); // is not direct
2616 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2617
2618 return value;
2619}
2620
2621RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, RegisterID* thisValue, const Identifier& property, RegisterID* value)
2622{
2623 ASSERT_WITH_MESSAGE(!parseIndex(property), "Indexed properties should be handled with put_by_val.");
2624
2625 unsigned propertyIndex = addConstant(property);
2626
2627 OpPutByIdWithThis::emit(this, base, thisValue, propertyIndex, value);
2628
2629 return value;
2630}
2631
2632RegisterID* BytecodeGenerator::emitDirectPutById(RegisterID* base, const Identifier& property, RegisterID* value, PropertyNode::PutType putType)
2633{
2634 ASSERT_WITH_MESSAGE(!parseIndex(property), "Indexed properties should be handled with put_by_val(direct).");
2635
2636 unsigned propertyIndex = addConstant(property);
2637
2638 m_staticPropertyAnalyzer.putById(base, propertyIndex);
2639
2640 PutByIdFlags type = (putType == PropertyNode::KnownDirect || property != m_vm->propertyNames->underscoreProto) ? PutByIdIsDirect : PutByIdNone;
2641 OpPutById::emit(this, base, propertyIndex, value, type);
2642 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2643 return value;
2644}
2645
2646void BytecodeGenerator::emitPutGetterById(RegisterID* base, const Identifier& property, unsigned attributes, RegisterID* getter)
2647{
2648 unsigned propertyIndex = addConstant(property);
2649 m_staticPropertyAnalyzer.putById(base, propertyIndex);
2650
2651 OpPutGetterById::emit(this, base, propertyIndex, attributes, getter);
2652}
2653
2654void BytecodeGenerator::emitPutSetterById(RegisterID* base, const Identifier& property, unsigned attributes, RegisterID* setter)
2655{
2656 unsigned propertyIndex = addConstant(property);
2657 m_staticPropertyAnalyzer.putById(base, propertyIndex);
2658
2659 OpPutSetterById::emit(this, base, propertyIndex, attributes, setter);
2660}
2661
2662void BytecodeGenerator::emitPutGetterSetter(RegisterID* base, const Identifier& property, unsigned attributes, RegisterID* getter, RegisterID* setter)
2663{
2664 unsigned propertyIndex = addConstant(property);
2665
2666 m_staticPropertyAnalyzer.putById(base, propertyIndex);
2667
2668 OpPutGetterSetterById::emit(this, base, propertyIndex, attributes, getter, setter);
2669}
2670
2671void BytecodeGenerator::emitPutGetterByVal(RegisterID* base, RegisterID* property, unsigned attributes, RegisterID* getter)
2672{
2673 OpPutGetterByVal::emit(this, base, property, attributes, getter);
2674}
2675
2676void BytecodeGenerator::emitPutSetterByVal(RegisterID* base, RegisterID* property, unsigned attributes, RegisterID* setter)
2677{
2678 OpPutSetterByVal::emit(this, base, property, attributes, setter);
2679}
2680
2681void BytecodeGenerator::emitPutGeneratorFields(RegisterID* nextFunction)
2682{
2683 // FIXME: Currently, we just create an object and store generator related fields as its properties for ease.
2684 // But to make it efficient, we will introduce JSGenerator class, add opcode new_generator and use its C++ fields instead of these private properties.
2685 // https://bugs.webkit.org/show_bug.cgi?id=151545
2686
2687 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorNextPrivateName(), nextFunction, PropertyNode::KnownDirect);
2688
2689 // We do not store 'this' in arrow function within constructor,
2690 // because it might be not initialized, if super is called later.
2691 if (!(isDerivedConstructorContext() && m_codeBlock->parseMode() == SourceParseMode::AsyncArrowFunctionMode))
2692 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorThisPrivateName(), &m_thisRegister, PropertyNode::KnownDirect);
2693
2694 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorStatePrivateName(), emitLoad(nullptr, jsNumber(0)), PropertyNode::KnownDirect);
2695
2696 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorFramePrivateName(), emitLoad(nullptr, jsNull()), PropertyNode::KnownDirect);
2697}
2698
2699void BytecodeGenerator::emitPutAsyncGeneratorFields(RegisterID* nextFunction)
2700{
2701 ASSERT(isAsyncGeneratorWrapperParseMode(parseMode()));
2702
2703 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorNextPrivateName(), nextFunction, PropertyNode::KnownDirect);
2704
2705 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorThisPrivateName(), &m_thisRegister, PropertyNode::KnownDirect);
2706
2707 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorStatePrivateName(), emitLoad(nullptr, jsNumber(static_cast<int32_t>(JSAsyncGeneratorFunction::AsyncGeneratorState::SuspendedStart))), PropertyNode::KnownDirect);
2708
2709 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().generatorFramePrivateName(), emitLoad(nullptr, jsNull()), PropertyNode::KnownDirect);
2710
2711 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().asyncGeneratorSuspendReasonPrivateName(), emitLoad(nullptr, jsNumber(static_cast<int32_t>(JSAsyncGeneratorFunction::AsyncGeneratorSuspendReason::None))), PropertyNode::KnownDirect);
2712
2713 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().asyncGeneratorQueueFirstPrivateName(), emitLoad(nullptr, jsNull()), PropertyNode::KnownDirect);
2714 emitDirectPutById(m_generatorRegister, propertyNames().builtinNames().asyncGeneratorQueueLastPrivateName(), emitLoad(nullptr, jsNull()), PropertyNode::KnownDirect);
2715}
2716
2717RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property)
2718{
2719 OpDelById::emit(this, dst, base, addConstant(property));
2720 return dst;
2721}
2722
2723RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
2724{
2725 for (size_t i = m_forInContextStack.size(); i--; ) {
2726 ForInContext& context = m_forInContextStack[i].get();
2727 if (context.local() != property)
2728 continue;
2729
2730 if (context.isIndexedForInContext()) {
2731 auto& indexedContext = context.asIndexedForInContext();
2732 kill(dst);
2733 if (OpGetByVal::checkWithoutMetadataID<OpcodeSize::Narrow>(this, dst, base, property))
2734 OpGetByVal::emitWithSmallestSizeRequirement<OpcodeSize::Narrow>(this, dst, base, indexedContext.index());
2735 else if (OpGetByVal::checkWithoutMetadataID<OpcodeSize::Wide16>(this, dst, base, property))
2736 OpGetByVal::emitWithSmallestSizeRequirement<OpcodeSize::Wide16>(this, dst, base, indexedContext.index());
2737 else
2738 OpGetByVal::emit<OpcodeSize::Wide32>(this, dst, base, indexedContext.index());
2739 indexedContext.addGetInst(m_lastInstruction.offset(), property->index());
2740 return dst;
2741 }
2742
2743 // We cannot do the above optimization here since OpGetDirectPname => OpGetByVal conversion involves different metadata ID allocation.
2744 StructureForInContext& structureContext = context.asStructureForInContext();
2745 OpGetDirectPname::emit<OpcodeSize::Wide32>(this, kill(dst), base, property, structureContext.index(), structureContext.enumerator());
2746
2747 structureContext.addGetInst(m_lastInstruction.offset(), property->index());
2748 return dst;
2749 }
2750
2751 OpGetByVal::emit(this, kill(dst), base, property);
2752 return dst;
2753}
2754
2755RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* thisValue, RegisterID* property)
2756{
2757 OpGetByValWithThis::emit(this, kill(dst), base, thisValue, property);
2758 return dst;
2759}
2760
2761RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
2762{
2763 OpPutByVal::emit(this, base, property, value);
2764 return value;
2765}
2766
2767RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* thisValue, RegisterID* property, RegisterID* value)
2768{
2769 OpPutByValWithThis::emit(this, base, thisValue, property, value);
2770 return value;
2771}
2772
2773RegisterID* BytecodeGenerator::emitDirectPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
2774{
2775 OpPutByValDirect::emit(this, base, property, value);
2776 return value;
2777}
2778
2779RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
2780{
2781 OpDelByVal::emit(this, dst, base, property);
2782 return dst;
2783}
2784
2785void BytecodeGenerator::emitSuperSamplerBegin()
2786{
2787 OpSuperSamplerBegin::emit(this);
2788}
2789
2790void BytecodeGenerator::emitSuperSamplerEnd()
2791{
2792 OpSuperSamplerEnd::emit(this);
2793}
2794
2795RegisterID* BytecodeGenerator::emitIdWithProfile(RegisterID* src, SpeculatedType profile)
2796{
2797 OpIdentityWithProfile::emit(this, src, static_cast<uint32_t>(profile >> 32), static_cast<uint32_t>(profile));
2798 return src;
2799}
2800
2801void BytecodeGenerator::emitUnreachable()
2802{
2803 OpUnreachable::emit(this);
2804}
2805
2806RegisterID* BytecodeGenerator::emitGetArgument(RegisterID* dst, int32_t index)
2807{
2808 OpGetArgument::emit(this, dst, index + 1 /* Including |this| */);
2809 return dst;
2810}
2811
2812RegisterID* BytecodeGenerator::emitCreateThis(RegisterID* dst)
2813{
2814 OpCreateThis::emit(this, dst, dst, 0);
2815 m_staticPropertyAnalyzer.createThis(dst, m_lastInstruction);
2816
2817 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
2818 return dst;
2819}
2820
2821void BytecodeGenerator::emitTDZCheck(RegisterID* target)
2822{
2823 OpCheckTdz::emit(this, target);
2824}
2825
2826bool BytecodeGenerator::needsTDZCheck(const Variable& variable)
2827{
2828 for (unsigned i = m_TDZStack.size(); i--;) {
2829 auto iter = m_TDZStack[i].find(variable.ident().impl());
2830 if (iter == m_TDZStack[i].end())
2831 continue;
2832 return iter->value != TDZNecessityLevel::NotNeeded;
2833 }
2834
2835 return false;
2836}
2837
2838void BytecodeGenerator::emitTDZCheckIfNecessary(const Variable& variable, RegisterID* target, RegisterID* scope)
2839{
2840 if (needsTDZCheck(variable)) {
2841 if (target)
2842 emitTDZCheck(target);
2843 else {
2844 RELEASE_ASSERT(!variable.isLocal() && scope);
2845 RefPtr<RegisterID> result = emitGetFromScope(newTemporary(), scope, variable, DoNotThrowIfNotFound);
2846 emitTDZCheck(result.get());
2847 }
2848 }
2849}
2850
2851void BytecodeGenerator::liftTDZCheckIfPossible(const Variable& variable)
2852{
2853 RefPtr<UniquedStringImpl> identifier(variable.ident().impl());
2854 for (unsigned i = m_TDZStack.size(); i--;) {
2855 auto iter = m_TDZStack[i].find(identifier);
2856 if (iter != m_TDZStack[i].end()) {
2857 if (iter->value == TDZNecessityLevel::Optimize) {
2858 m_cachedVariablesUnderTDZ = { };
2859 iter->value = TDZNecessityLevel::NotNeeded;
2860 }
2861 break;
2862 }
2863 }
2864}
2865
2866void BytecodeGenerator::pushTDZVariables(const VariableEnvironment& environment, TDZCheckOptimization optimization, TDZRequirement requirement)
2867{
2868 if (!environment.size())
2869 return;
2870
2871 TDZNecessityLevel level;
2872 if (requirement == TDZRequirement::UnderTDZ) {
2873 if (optimization == TDZCheckOptimization::Optimize)
2874 level = TDZNecessityLevel::Optimize;
2875 else
2876 level = TDZNecessityLevel::DoNotOptimize;
2877 } else
2878 level = TDZNecessityLevel::NotNeeded;
2879
2880 TDZMap map;
2881 for (const auto& entry : environment)
2882 map.add(entry.key, entry.value.isFunction() ? TDZNecessityLevel::NotNeeded : level);
2883
2884 m_TDZStack.append(WTFMove(map));
2885 m_cachedVariablesUnderTDZ = { };
2886}
2887
2888Optional<CompactVariableMap::Handle> BytecodeGenerator::getVariablesUnderTDZ()
2889{
2890 if (m_cachedVariablesUnderTDZ) {
2891 if (!m_hasCachedVariablesUnderTDZ) {
2892 ASSERT(m_cachedVariablesUnderTDZ.environment().toVariableEnvironment().isEmpty());
2893 return WTF::nullopt;
2894 }
2895 return m_cachedVariablesUnderTDZ;
2896 }
2897
2898 // We keep track of variablesThatDontNeedTDZ in this algorithm to prevent
2899 // reporting that "x" is under TDZ if this function is called at "...".
2900 //
2901 // {
2902 // {
2903 // let x;
2904 // ...
2905 // }
2906 // let x;
2907 // }
2908 SmallPtrSet<UniquedStringImpl*, 16> variablesThatDontNeedTDZ;
2909 VariableEnvironment environment;
2910 for (unsigned i = m_TDZStack.size(); i--; ) {
2911 auto& map = m_TDZStack[i];
2912 for (auto& entry : map) {
2913 if (entry.value != TDZNecessityLevel::NotNeeded) {
2914 if (!variablesThatDontNeedTDZ.contains(entry.key.get()))
2915 environment.add(entry.key.get());
2916 } else
2917 variablesThatDontNeedTDZ.add(entry.key.get());
2918 }
2919 }
2920
2921 m_cachedVariablesUnderTDZ = m_vm->m_compactVariableMap->get(environment);
2922 m_hasCachedVariablesUnderTDZ = !environment.isEmpty();
2923 if (!m_hasCachedVariablesUnderTDZ)
2924 return WTF::nullopt;
2925
2926 return m_cachedVariablesUnderTDZ;
2927}
2928
2929void BytecodeGenerator::preserveTDZStack(BytecodeGenerator::PreservedTDZStack& preservedStack)
2930{
2931 preservedStack.m_preservedTDZStack = m_TDZStack;
2932}
2933
2934void BytecodeGenerator::restoreTDZStack(const BytecodeGenerator::PreservedTDZStack& preservedStack)
2935{
2936 m_TDZStack = preservedStack.m_preservedTDZStack;
2937 m_cachedVariablesUnderTDZ = { };
2938}
2939
2940RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst)
2941{
2942 OpNewObject::emit(this, dst, 0);
2943 m_staticPropertyAnalyzer.newObject(dst, m_lastInstruction);
2944
2945 return dst;
2946}
2947
2948JSValue BytecodeGenerator::addBigIntConstant(const Identifier& identifier, uint8_t radix, bool sign)
2949{
2950 return m_bigIntMap.ensure(BigIntMapEntry(identifier.impl(), radix, sign), [&] {
2951 auto scope = DECLARE_CATCH_SCOPE(*vm());
2952 auto parseIntSign = sign ? JSBigInt::ParseIntSign::Signed : JSBigInt::ParseIntSign::Unsigned;
2953 JSBigInt* bigIntInMap = JSBigInt::parseInt(nullptr, *vm(), identifier.string(), radix, JSBigInt::ErrorParseMode::ThrowExceptions, parseIntSign);
2954 // FIXME: [ESNext] Enables a way to throw an error on ByteCodeGenerator step
2955 // https://bugs.webkit.org/show_bug.cgi?id=180139
2956 scope.assertNoException();
2957 RELEASE_ASSERT(bigIntInMap);
2958 addConstantValue(bigIntInMap);
2959
2960 return bigIntInMap;
2961 }).iterator->value;
2962}
2963
2964JSString* BytecodeGenerator::addStringConstant(const Identifier& identifier)
2965{
2966 JSString*& stringInMap = m_stringMap.add(identifier.impl(), nullptr).iterator->value;
2967 if (!stringInMap) {
2968 stringInMap = jsString(vm(), identifier.string());
2969 addConstantValue(stringInMap);
2970 }
2971 return stringInMap;
2972}
2973
2974RegisterID* BytecodeGenerator::addTemplateObjectConstant(Ref<TemplateObjectDescriptor>&& descriptor, int endOffset)
2975{
2976 auto result = m_templateObjectDescriptorSet.add(WTFMove(descriptor));
2977 JSTemplateObjectDescriptor* descriptorValue = m_templateDescriptorMap.ensure(endOffset, [&] {
2978 return JSTemplateObjectDescriptor::create(*vm(), result.iterator->copyRef(), endOffset);
2979 }).iterator->value;
2980 int index = addConstantIndex();
2981 m_codeBlock->addConstant(descriptorValue);
2982 return &m_constantPoolRegisters[index];
2983}
2984
2985RegisterID* BytecodeGenerator::emitNewArrayBuffer(RegisterID* dst, JSImmutableButterfly* array, IndexingType recommendedIndexingType)
2986{
2987 OpNewArrayBuffer::emit(this, dst, addConstantValue(array), recommendedIndexingType);
2988 return dst;
2989}
2990
2991RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements, unsigned length, IndexingType recommendedIndexingType)
2992{
2993 Vector<RefPtr<RegisterID>, 16, UnsafeVectorOverflow> argv;
2994 for (ElementNode* n = elements; n; n = n->next()) {
2995 if (!length)
2996 break;
2997 length--;
2998 ASSERT(!n->value()->isSpreadExpression());
2999 argv.append(newTemporary());
3000 // op_new_array requires the initial values to be a sequential range of registers
3001 ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() - 1);
3002 emitNode(argv.last().get(), n->value());
3003 }
3004 ASSERT(!length);
3005 OpNewArray::emit(this, dst, argv.size() ? argv[0].get() : VirtualRegister { 0 }, argv.size(), recommendedIndexingType);
3006 return dst;
3007}
3008
3009RegisterID* BytecodeGenerator::emitNewArrayWithSpread(RegisterID* dst, ElementNode* elements)
3010{
3011 BitVector bitVector;
3012 Vector<RefPtr<RegisterID>, 16> argv;
3013 for (ElementNode* node = elements; node; node = node->next()) {
3014 bitVector.set(argv.size(), node->value()->isSpreadExpression());
3015
3016 argv.append(newTemporary());
3017 // op_new_array_with_spread requires the initial values to be a sequential range of registers.
3018 RELEASE_ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() - 1);
3019 }
3020
3021 RELEASE_ASSERT(argv.size());
3022
3023 {
3024 unsigned i = 0;
3025 for (ElementNode* node = elements; node; node = node->next()) {
3026 if (node->value()->isSpreadExpression()) {
3027 ExpressionNode* expression = static_cast<SpreadExpressionNode*>(node->value())->expression();
3028 RefPtr<RegisterID> tmp = newTemporary();
3029 emitNode(tmp.get(), expression);
3030
3031 OpSpread::emit(this, argv[i].get(), tmp.get());
3032 } else {
3033 ExpressionNode* expression = node->value();
3034 emitNode(argv[i].get(), expression);
3035 }
3036 i++;
3037 }
3038 }
3039
3040 unsigned bitVectorIndex = m_codeBlock->addBitVector(WTFMove(bitVector));
3041 OpNewArrayWithSpread::emit(this, dst, argv[0].get(), argv.size(), bitVectorIndex);
3042 return dst;
3043}
3044
3045RegisterID* BytecodeGenerator::emitNewArrayWithSize(RegisterID* dst, RegisterID* length)
3046{
3047 OpNewArrayWithSize::emit(this, dst, length);
3048 return dst;
3049}
3050
3051RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, RegExp* regExp)
3052{
3053 OpNewRegexp::emit(this, dst, addConstantValue(regExp));
3054 return dst;
3055}
3056
3057void BytecodeGenerator::emitNewFunctionExpressionCommon(RegisterID* dst, FunctionMetadataNode* function)
3058{
3059 unsigned index = m_codeBlock->addFunctionExpr(makeFunction(function));
3060
3061 switch (function->parseMode()) {
3062 case SourceParseMode::GeneratorWrapperFunctionMode:
3063 case SourceParseMode::GeneratorWrapperMethodMode:
3064 OpNewGeneratorFuncExp::emit(this, dst, scopeRegister(), index);
3065 break;
3066 case SourceParseMode::AsyncFunctionMode:
3067 case SourceParseMode::AsyncMethodMode:
3068 case SourceParseMode::AsyncArrowFunctionMode:
3069 OpNewAsyncFuncExp::emit(this, dst, scopeRegister(), index);
3070 break;
3071 case SourceParseMode::AsyncGeneratorWrapperFunctionMode:
3072 case SourceParseMode::AsyncGeneratorWrapperMethodMode:
3073 OpNewAsyncGeneratorFuncExp::emit(this, dst, scopeRegister(), index);
3074 break;
3075 default:
3076 OpNewFuncExp::emit(this, dst, scopeRegister(), index);
3077 break;
3078 }
3079}
3080
3081RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* dst, FuncExprNode* func)
3082{
3083 emitNewFunctionExpressionCommon(dst, func->metadata());
3084 return dst;
3085}
3086
3087RegisterID* BytecodeGenerator::emitNewArrowFunctionExpression(RegisterID* dst, ArrowFuncExprNode* func)
3088{
3089 ASSERT(SourceParseModeSet(SourceParseMode::ArrowFunctionMode, SourceParseMode::AsyncArrowFunctionMode).contains(func->metadata()->parseMode()));
3090 emitNewFunctionExpressionCommon(dst, func->metadata());
3091 return dst;
3092}
3093
3094RegisterID* BytecodeGenerator::emitNewMethodDefinition(RegisterID* dst, MethodDefinitionNode* func)
3095{
3096 ASSERT(isMethodParseMode(func->metadata()->parseMode()));
3097 emitNewFunctionExpressionCommon(dst, func->metadata());
3098 return dst;
3099}
3100
3101RegisterID* BytecodeGenerator::emitNewDefaultConstructor(RegisterID* dst, ConstructorKind constructorKind, const Identifier& name,
3102 const Identifier& ecmaName, const SourceCode& classSource)
3103{
3104 UnlinkedFunctionExecutable* executable = m_vm->builtinExecutables()->createDefaultConstructor(constructorKind, name);
3105 executable->setInvalidTypeProfilingOffsets();
3106 executable->setEcmaName(ecmaName);
3107 executable->setClassSource(classSource);
3108
3109 unsigned index = m_codeBlock->addFunctionExpr(executable);
3110
3111 OpNewFuncExp::emit(this, dst, scopeRegister(), index);
3112 return dst;
3113}
3114
3115RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, FunctionMetadataNode* function)
3116{
3117 unsigned index = m_codeBlock->addFunctionDecl(makeFunction(function));
3118 if (isGeneratorWrapperParseMode(function->parseMode()))
3119 OpNewGeneratorFunc::emit(this, dst, scopeRegister(), index);
3120 else if (function->parseMode() == SourceParseMode::AsyncFunctionMode)
3121 OpNewAsyncFunc::emit(this, dst, scopeRegister(), index);
3122 else if (isAsyncGeneratorWrapperParseMode(function->parseMode()))
3123 OpNewAsyncGeneratorFunc::emit(this, dst, scopeRegister(), index);
3124 else
3125 OpNewFunc::emit(this, dst, scopeRegister(), index);
3126 return dst;
3127}
3128
3129void BytecodeGenerator::emitSetFunctionNameIfNeeded(ExpressionNode* valueNode, RegisterID* value, RegisterID* name)
3130{
3131 if (valueNode->isBaseFuncExprNode()) {
3132 FunctionMetadataNode* metadata = static_cast<BaseFuncExprNode*>(valueNode)->metadata();
3133 if (!metadata->ecmaName().isNull())
3134 return;
3135 } else if (valueNode->isClassExprNode()) {
3136 ClassExprNode* classExprNode = static_cast<ClassExprNode*>(valueNode);
3137 if (!classExprNode->ecmaName().isNull())
3138 return;
3139 if (classExprNode->hasStaticProperty(m_vm->propertyNames->name))
3140 return;
3141 } else
3142 return;
3143
3144 // FIXME: We should use an op_call to an internal function here instead.
3145 // https://bugs.webkit.org/show_bug.cgi?id=155547
3146 OpSetFunctionName::emit(this, value, name);
3147}
3148
3149RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3150{
3151 return emitCall<OpCall>(dst, func, expectedFunction, callArguments, divot, divotStart, divotEnd, debuggableCall);
3152}
3153
3154RegisterID* BytecodeGenerator::emitCallInTailPosition(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3155{
3156 if (m_inTailPosition) {
3157 m_codeBlock->setHasTailCalls();
3158 return emitCall<OpTailCall>(dst, func, expectedFunction, callArguments, divot, divotStart, divotEnd, debuggableCall);
3159 }
3160 return emitCall<OpCall>(dst, func, expectedFunction, callArguments, divot, divotStart, divotEnd, debuggableCall);
3161}
3162
3163RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3164{
3165 return emitCall<OpCallEval>(dst, func, NoExpectedFunction, callArguments, divot, divotStart, divotEnd, debuggableCall);
3166}
3167
3168ExpectedFunction BytecodeGenerator::expectedFunctionForIdentifier(const Identifier& identifier)
3169{
3170 if (identifier == propertyNames().Object || identifier == propertyNames().builtinNames().ObjectPrivateName())
3171 return ExpectObjectConstructor;
3172 if (identifier == propertyNames().Array || identifier == propertyNames().builtinNames().ArrayPrivateName())
3173 return ExpectArrayConstructor;
3174 return NoExpectedFunction;
3175}
3176
3177ExpectedFunction BytecodeGenerator::emitExpectedFunctionSnippet(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, Label& done)
3178{
3179 Ref<Label> realCall = newLabel();
3180 switch (expectedFunction) {
3181 case ExpectObjectConstructor: {
3182 // If the number of arguments is non-zero, then we can't do anything interesting.
3183 if (callArguments.argumentCountIncludingThis() >= 2)
3184 return NoExpectedFunction;
3185
3186 OpJneqPtr::emit(this, func, Special::ObjectConstructor, realCall->bind(this));
3187
3188 if (dst != ignoredResult())
3189 emitNewObject(dst);
3190 break;
3191 }
3192
3193 case ExpectArrayConstructor: {
3194 // If you're doing anything other than "new Array()" or "new Array(foo)" then we
3195 // don't do inline it, for now. The only reason is that call arguments are in
3196 // the opposite order of what op_new_array expects, so we'd either need to change
3197 // how op_new_array works or we'd need an op_new_array_reverse. Neither of these
3198 // things sounds like it's worth it.
3199 if (callArguments.argumentCountIncludingThis() > 2)
3200 return NoExpectedFunction;
3201
3202 OpJneqPtr::emit(this, func, Special::ArrayConstructor, realCall->bind(this));
3203
3204 if (dst != ignoredResult()) {
3205 if (callArguments.argumentCountIncludingThis() == 2)
3206 emitNewArrayWithSize(dst, callArguments.argumentRegister(0));
3207 else {
3208 ASSERT(callArguments.argumentCountIncludingThis() == 1);
3209 OpNewArray::emit(this, dst, VirtualRegister { 0 }, 0, ArrayWithUndecided);
3210 }
3211 }
3212 break;
3213 }
3214
3215 default:
3216 ASSERT(expectedFunction == NoExpectedFunction);
3217 return NoExpectedFunction;
3218 }
3219
3220 OpJmp::emit(this, done.bind(this));
3221 emitLabel(realCall.get());
3222
3223 return expectedFunction;
3224}
3225
3226template<typename CallOp>
3227RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3228{
3229 constexpr auto opcodeID = CallOp::opcodeID;
3230 ASSERT(opcodeID == op_call || opcodeID == op_call_eval || opcodeID == op_tail_call);
3231 ASSERT(func->refCount());
3232
3233 // Generate code for arguments.
3234 unsigned argument = 0;
3235 if (callArguments.argumentsNode()) {
3236 ArgumentListNode* n = callArguments.argumentsNode()->m_listNode;
3237 if (n && n->m_expr->isSpreadExpression()) {
3238 RELEASE_ASSERT(!n->m_next);
3239 auto expression = static_cast<SpreadExpressionNode*>(n->m_expr)->expression();
3240 if (expression->isArrayLiteral()) {
3241 auto* elements = static_cast<ArrayNode*>(expression)->elements();
3242 if (elements && !elements->next() && elements->value()->isSpreadExpression()) {
3243 ExpressionNode* expression = static_cast<SpreadExpressionNode*>(elements->value())->expression();
3244 RefPtr<RegisterID> argumentRegister = emitNode(callArguments.argumentRegister(0), expression);
3245 OpSpread::emit(this, argumentRegister.get(), argumentRegister.get());
3246
3247 return emitCallVarargs<typename VarArgsOp<CallOp>::type>(dst, func, callArguments.thisRegister(), argumentRegister.get(), newTemporary(), 0, divot, divotStart, divotEnd, debuggableCall);
3248 }
3249 }
3250 RefPtr<RegisterID> argumentRegister;
3251 argumentRegister = expression->emitBytecode(*this, callArguments.argumentRegister(0));
3252 RefPtr<RegisterID> thisRegister = move(newTemporary(), callArguments.thisRegister());
3253 return emitCallVarargs<typename VarArgsOp<CallOp>::type>(dst, func, callArguments.thisRegister(), argumentRegister.get(), newTemporary(), 0, divot, divotStart, divotEnd, debuggableCall);
3254 }
3255 for (; n; n = n->m_next)
3256 emitNode(callArguments.argumentRegister(argument++), n);
3257 }
3258
3259 // Reserve space for call frame.
3260 Vector<RefPtr<RegisterID>, CallFrame::headerSizeInRegisters, UnsafeVectorOverflow> callFrame;
3261 for (int i = 0; i < CallFrame::headerSizeInRegisters; ++i)
3262 callFrame.append(newTemporary());
3263
3264 if (shouldEmitDebugHooks() && debuggableCall == DebuggableCall::Yes)
3265 emitDebugHook(WillExecuteExpression, divotStart);
3266
3267 emitExpressionInfo(divot, divotStart, divotEnd);
3268
3269 Ref<Label> done = newLabel();
3270 expectedFunction = emitExpectedFunctionSnippet(dst, func, expectedFunction, callArguments, done.get());
3271
3272 if (opcodeID == op_tail_call)
3273 emitLogShadowChickenTailIfNecessary();
3274
3275 // Emit call.
3276 ASSERT(dst);
3277 ASSERT(dst != ignoredResult());
3278 CallOp::emit(this, dst, func, callArguments.argumentCountIncludingThis(), callArguments.stackOffset());
3279
3280 if (expectedFunction != NoExpectedFunction)
3281 emitLabel(done.get());
3282
3283 return dst;
3284}
3285
3286RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, int32_t firstVarArgOffset, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3287{
3288 return emitCallVarargs<OpCallVarargs>(dst, func, thisRegister, arguments, firstFreeRegister, firstVarArgOffset, divot, divotStart, divotEnd, debuggableCall);
3289}
3290
3291RegisterID* BytecodeGenerator::emitCallVarargsInTailPosition(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, int32_t firstVarArgOffset, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3292{
3293 if (m_inTailPosition)
3294 return emitCallVarargs<OpTailCallVarargs>(dst, func, thisRegister, arguments, firstFreeRegister, firstVarArgOffset, divot, divotStart, divotEnd, debuggableCall);
3295 return emitCallVarargs<OpCallVarargs>(dst, func, thisRegister, arguments, firstFreeRegister, firstVarArgOffset, divot, divotStart, divotEnd, debuggableCall);
3296}
3297
3298RegisterID* BytecodeGenerator::emitConstructVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, int32_t firstVarArgOffset, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3299{
3300 return emitCallVarargs<OpConstructVarargs>(dst, func, thisRegister, arguments, firstFreeRegister, firstVarArgOffset, divot, divotStart, divotEnd, debuggableCall);
3301}
3302
3303RegisterID* BytecodeGenerator::emitCallForwardArgumentsInTailPosition(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* firstFreeRegister, int32_t firstVarArgOffset, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3304{
3305 // We must emit a tail call here because we did not allocate an arguments object thus we would otherwise have no way to correctly make this call.
3306 ASSERT(m_inTailPosition || !Options::useTailCalls());
3307 return emitCallVarargs<OpTailCallForwardArguments>(dst, func, thisRegister, nullptr, firstFreeRegister, firstVarArgOffset, divot, divotStart, divotEnd, debuggableCall);
3308}
3309
3310template<typename VarargsOp>
3311RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, int32_t firstVarArgOffset, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd, DebuggableCall debuggableCall)
3312{
3313 if (shouldEmitDebugHooks() && debuggableCall == DebuggableCall::Yes)
3314 emitDebugHook(WillExecuteExpression, divotStart);
3315
3316 emitExpressionInfo(divot, divotStart, divotEnd);
3317
3318 if (VarargsOp::opcodeID == op_tail_call_varargs)
3319 emitLogShadowChickenTailIfNecessary();
3320
3321 // Emit call.
3322 ASSERT(dst != ignoredResult());
3323 VarargsOp::emit(this, dst, func, thisRegister, arguments ? arguments : VirtualRegister(0), firstFreeRegister, firstVarArgOffset);
3324 return dst;
3325}
3326
3327void BytecodeGenerator::emitLogShadowChickenPrologueIfNecessary()
3328{
3329 if (!shouldEmitDebugHooks() && !Options::alwaysUseShadowChicken())
3330 return;
3331 OpLogShadowChickenPrologue::emit(this, scopeRegister());
3332}
3333
3334void BytecodeGenerator::emitLogShadowChickenTailIfNecessary()
3335{
3336 if (!shouldEmitDebugHooks() && !Options::alwaysUseShadowChicken())
3337 return;
3338 OpLogShadowChickenTail::emit(this, thisRegister(), scopeRegister());
3339}
3340
3341void BytecodeGenerator::emitCallDefineProperty(RegisterID* newObj, RegisterID* propertyNameRegister,
3342 RegisterID* valueRegister, RegisterID* getterRegister, RegisterID* setterRegister, unsigned options, const JSTextPosition& position)
3343{
3344 DefinePropertyAttributes attributes;
3345 if (options & PropertyConfigurable)
3346 attributes.setConfigurable(true);
3347
3348 if (options & PropertyWritable)
3349 attributes.setWritable(true);
3350 else if (valueRegister)
3351 attributes.setWritable(false);
3352
3353 if (options & PropertyEnumerable)
3354 attributes.setEnumerable(true);
3355
3356 if (valueRegister)
3357 attributes.setValue();
3358 if (getterRegister)
3359 attributes.setGet();
3360 if (setterRegister)
3361 attributes.setSet();
3362
3363 ASSERT(!valueRegister || (!getterRegister && !setterRegister));
3364
3365 emitExpressionInfo(position, position, position);
3366
3367 if (attributes.hasGet() || attributes.hasSet()) {
3368 RefPtr<RegisterID> throwTypeErrorFunction;
3369 if (!attributes.hasGet() || !attributes.hasSet())
3370 throwTypeErrorFunction = moveLinkTimeConstant(nullptr, LinkTimeConstant::ThrowTypeErrorFunction);
3371
3372 RefPtr<RegisterID> getter;
3373 if (attributes.hasGet())
3374 getter = getterRegister;
3375 else
3376 getter = throwTypeErrorFunction;
3377
3378 RefPtr<RegisterID> setter;
3379 if (attributes.hasSet())
3380 setter = setterRegister;
3381 else
3382 setter = throwTypeErrorFunction;
3383
3384 OpDefineAccessorProperty::emit(this, newObj, propertyNameRegister, getter.get(), setter.get(), emitLoad(nullptr, jsNumber(attributes.rawRepresentation())));
3385 } else {
3386 OpDefineDataProperty::emit(this, newObj, propertyNameRegister, valueRegister, emitLoad(nullptr, jsNumber(attributes.rawRepresentation())));
3387 }
3388}
3389
3390RegisterID* BytecodeGenerator::emitReturn(RegisterID* src, ReturnFrom from)
3391{
3392 if (isConstructor()) {
3393 bool isDerived = constructorKind() == ConstructorKind::Extends;
3394 bool srcIsThis = src->index() == m_thisRegister.index();
3395
3396 if (isDerived && (srcIsThis || from == ReturnFrom::Finally))
3397 emitTDZCheck(src);
3398
3399 if (!srcIsThis || from == ReturnFrom::Finally) {
3400 Ref<Label> isObjectLabel = newLabel();
3401 emitJumpIfTrue(emitIsObject(newTemporary(), src), isObjectLabel.get());
3402
3403 if (isDerived) {
3404 Ref<Label> isUndefinedLabel = newLabel();
3405 emitJumpIfTrue(emitIsUndefined(newTemporary(), src), isUndefinedLabel.get());
3406 emitThrowTypeError("Cannot return a non-object type in the constructor of a derived class.");
3407 emitLabel(isUndefinedLabel.get());
3408 emitTDZCheck(&m_thisRegister);
3409 }
3410 OpRet::emit(this, &m_thisRegister);
3411 emitLabel(isObjectLabel.get());
3412 }
3413 }
3414
3415 OpRet::emit(this, src);
3416 return src;
3417}
3418
3419RegisterID* BytecodeGenerator::emitEnd(RegisterID* src)
3420{
3421 OpEnd::emit(this, src);
3422 return src;
3423}
3424
3425
3426RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, RegisterID* lazyThis, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
3427{
3428 ASSERT(func->refCount());
3429
3430 // Generate code for arguments.
3431 unsigned argument = 0;
3432 if (ArgumentsNode* argumentsNode = callArguments.argumentsNode()) {
3433
3434 ArgumentListNode* n = callArguments.argumentsNode()->m_listNode;
3435 if (n && n->m_expr->isSpreadExpression()) {
3436 RELEASE_ASSERT(!n->m_next);
3437 auto expression = static_cast<SpreadExpressionNode*>(n->m_expr)->expression();
3438 if (expression->isArrayLiteral()) {
3439 auto* elements = static_cast<ArrayNode*>(expression)->elements();
3440 if (elements && !elements->next() && elements->value()->isSpreadExpression()) {
3441 ExpressionNode* expression = static_cast<SpreadExpressionNode*>(elements->value())->expression();
3442 RefPtr<RegisterID> argumentRegister = emitNode(callArguments.argumentRegister(0), expression);
3443 OpSpread::emit(this, argumentRegister.get(), argumentRegister.get());
3444
3445 move(callArguments.thisRegister(), lazyThis);
3446 RefPtr<RegisterID> thisRegister = move(newTemporary(), callArguments.thisRegister());
3447 return emitConstructVarargs(dst, func, callArguments.thisRegister(), argumentRegister.get(), newTemporary(), 0, divot, divotStart, divotEnd, DebuggableCall::No);
3448 }
3449 }
3450 RefPtr<RegisterID> argumentRegister;
3451 argumentRegister = expression->emitBytecode(*this, callArguments.argumentRegister(0));
3452 move(callArguments.thisRegister(), lazyThis);
3453 return emitConstructVarargs(dst, func, callArguments.thisRegister(), argumentRegister.get(), newTemporary(), 0, divot, divotStart, divotEnd, DebuggableCall::No);
3454 }
3455
3456 for (ArgumentListNode* n = argumentsNode->m_listNode; n; n = n->m_next)
3457 emitNode(callArguments.argumentRegister(argument++), n);
3458 }
3459
3460 move(callArguments.thisRegister(), lazyThis);
3461
3462 // Reserve space for call frame.
3463 Vector<RefPtr<RegisterID>, CallFrame::headerSizeInRegisters, UnsafeVectorOverflow> callFrame;
3464 for (int i = 0; i < CallFrame::headerSizeInRegisters; ++i)
3465 callFrame.append(newTemporary());
3466
3467 emitExpressionInfo(divot, divotStart, divotEnd);
3468
3469 Ref<Label> done = newLabel();
3470 expectedFunction = emitExpectedFunctionSnippet(dst, func, expectedFunction, callArguments, done.get());
3471
3472 OpConstruct::emit(this, dst, func, callArguments.argumentCountIncludingThis(), callArguments.stackOffset());
3473
3474 if (expectedFunction != NoExpectedFunction)
3475 emitLabel(done.get());
3476
3477 return dst;
3478}
3479
3480RegisterID* BytecodeGenerator::emitStrcat(RegisterID* dst, RegisterID* src, int count)
3481{
3482 OpStrcat::emit(this, dst, src, count);
3483 return dst;
3484}
3485
3486void BytecodeGenerator::emitToPrimitive(RegisterID* dst, RegisterID* src)
3487{
3488 OpToPrimitive::emit(this, dst, src);
3489}
3490
3491void BytecodeGenerator::emitGetScope()
3492{
3493 OpGetScope::emit(this, scopeRegister());
3494}
3495
3496RegisterID* BytecodeGenerator::emitPushWithScope(RegisterID* objectScope)
3497{
3498 pushLocalControlFlowScope();
3499 RegisterID* newScope = newBlockScopeVariable();
3500 newScope->ref();
3501
3502 OpPushWithScope::emit(this, newScope, scopeRegister(), objectScope);
3503
3504 move(scopeRegister(), newScope);
3505 m_lexicalScopeStack.append({ nullptr, newScope, true, 0 });
3506
3507 return newScope;
3508}
3509
3510RegisterID* BytecodeGenerator::emitGetParentScope(RegisterID* dst, RegisterID* scope)
3511{
3512 OpGetParentScope::emit(this, dst, scope);
3513 return dst;
3514}
3515
3516void BytecodeGenerator::emitPopScope(RegisterID* dst, RegisterID* scope)
3517{
3518 RefPtr<RegisterID> parentScope = emitGetParentScope(newTemporary(), scope);
3519 move(dst, parentScope.get());
3520}
3521
3522void BytecodeGenerator::emitPopWithScope()
3523{
3524 emitPopScope(scopeRegister(), scopeRegister());
3525 popLocalControlFlowScope();
3526 auto stackEntry = m_lexicalScopeStack.takeLast();
3527 stackEntry.m_scope->deref();
3528 RELEASE_ASSERT(stackEntry.m_isWithScope);
3529}
3530
3531void BytecodeGenerator::emitDebugHook(DebugHookType debugHookType, const JSTextPosition& divot)
3532{
3533 if (!shouldEmitDebugHooks())
3534 return;
3535
3536 emitExpressionInfo(divot, divot, divot);
3537 OpDebug::emit(this, debugHookType, false);
3538}
3539
3540void BytecodeGenerator::emitDebugHook(DebugHookType debugHookType, unsigned line, unsigned charOffset, unsigned lineStart)
3541{
3542 emitDebugHook(debugHookType, JSTextPosition(line, charOffset, lineStart));
3543}
3544
3545void BytecodeGenerator::emitDebugHook(StatementNode* statement)
3546{
3547 // DebuggerStatementNode will output its own special debug hook.
3548 if (statement->isDebuggerStatement())
3549 return;
3550
3551 emitDebugHook(WillExecuteStatement, statement->position());
3552}
3553
3554void BytecodeGenerator::emitDebugHook(ExpressionNode* expr)
3555{
3556 emitDebugHook(WillExecuteStatement, expr->position());
3557}
3558
3559void BytecodeGenerator::emitWillLeaveCallFrameDebugHook()
3560{
3561 RELEASE_ASSERT(m_scopeNode->isFunctionNode());
3562 emitDebugHook(WillLeaveCallFrame, m_scopeNode->lastLine(), m_scopeNode->startOffset(), m_scopeNode->lineStartOffset());
3563}
3564
3565void BytecodeGenerator::pushFinallyControlFlowScope(FinallyContext& finallyContext)
3566{
3567 ControlFlowScope scope(ControlFlowScope::Finally, currentLexicalScopeIndex(), &finallyContext);
3568 m_controlFlowScopeStack.append(WTFMove(scope));
3569
3570 m_finallyDepth++;
3571 m_currentFinallyContext = &finallyContext;
3572}
3573
3574void BytecodeGenerator::popFinallyControlFlowScope()
3575{
3576 ASSERT(m_controlFlowScopeStack.size());
3577 ASSERT(m_controlFlowScopeStack.last().isFinallyScope());
3578 ASSERT(m_finallyDepth > 0);
3579 ASSERT(m_currentFinallyContext);
3580 m_currentFinallyContext = m_currentFinallyContext->outerContext();
3581 m_finallyDepth--;
3582 m_controlFlowScopeStack.removeLast();
3583}
3584
3585LabelScope* BytecodeGenerator::breakTarget(const Identifier& name)
3586{
3587 shrinkToFit(m_labelScopes);
3588
3589 if (!m_labelScopes.size())
3590 return nullptr;
3591
3592 // We special-case the following, which is a syntax error in Firefox:
3593 // label:
3594 // break;
3595 if (name.isEmpty()) {
3596 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
3597 LabelScope& scope = m_labelScopes[i];
3598 if (scope.type() != LabelScope::NamedLabel)
3599 return &scope;
3600 }
3601 return nullptr;
3602 }
3603
3604 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
3605 LabelScope& scope = m_labelScopes[i];
3606 if (scope.name() && *scope.name() == name)
3607 return &scope;
3608 }
3609 return nullptr;
3610}
3611
3612LabelScope* BytecodeGenerator::continueTarget(const Identifier& name)
3613{
3614 shrinkToFit(m_labelScopes);
3615
3616 if (!m_labelScopes.size())
3617 return nullptr;
3618
3619 if (name.isEmpty()) {
3620 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
3621 LabelScope& scope = m_labelScopes[i];
3622 if (scope.type() == LabelScope::Loop) {
3623 ASSERT(scope.continueTarget());
3624 return &scope;
3625 }
3626 }
3627 return nullptr;
3628 }
3629
3630 // Continue to the loop nested nearest to the label scope that matches
3631 // 'name'.
3632 LabelScope* result = nullptr;
3633 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
3634 LabelScope& scope = m_labelScopes[i];
3635 if (scope.type() == LabelScope::Loop) {
3636 ASSERT(scope.continueTarget());
3637 result = &scope;
3638 }
3639 if (scope.name() && *scope.name() == name)
3640 return result; // may be null.
3641 }
3642 return nullptr;
3643}
3644
3645void BytecodeGenerator::allocateCalleeSaveSpace()
3646{
3647 size_t virtualRegisterCountForCalleeSaves = CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters();
3648
3649 for (size_t i = 0; i < virtualRegisterCountForCalleeSaves; i++) {
3650 RegisterID* localRegister = addVar();
3651 localRegister->ref();
3652 m_localRegistersForCalleeSaveRegisters.append(localRegister);
3653 }
3654}
3655
3656void BytecodeGenerator::allocateAndEmitScope()
3657{
3658 m_scopeRegister = addVar();
3659 m_scopeRegister->ref();
3660 m_codeBlock->setScopeRegister(scopeRegister()->virtualRegister());
3661 emitGetScope();
3662 m_topMostScope = addVar();
3663 move(m_topMostScope, scopeRegister());
3664}
3665
3666TryData* BytecodeGenerator::pushTry(Label& start, Label& handlerLabel, HandlerType handlerType)
3667{
3668 m_tryData.append(TryData { handlerLabel, handlerType });
3669 TryData* result = &m_tryData.last();
3670
3671 m_tryContextStack.append(TryContext {
3672 start,
3673 result
3674 });
3675
3676 return result;
3677}
3678
3679void BytecodeGenerator::popTry(TryData* tryData, Label& end)
3680{
3681 m_usesExceptions = true;
3682
3683 ASSERT_UNUSED(tryData, m_tryContextStack.last().tryData == tryData);
3684
3685 m_tryRanges.append(TryRange {
3686 m_tryContextStack.last().start.copyRef(),
3687 end,
3688 m_tryContextStack.last().tryData
3689 });
3690 m_tryContextStack.removeLast();
3691}
3692
3693void BytecodeGenerator::emitOutOfLineCatchHandler(RegisterID* thrownValueRegister, RegisterID* completionTypeRegister, TryData* data)
3694{
3695 RegisterID* unused = newTemporary();
3696 emitOutOfLineExceptionHandler(unused, thrownValueRegister, completionTypeRegister, data);
3697}
3698
3699void BytecodeGenerator::emitOutOfLineFinallyHandler(RegisterID* exceptionRegister, RegisterID* completionTypeRegister, TryData* data)
3700{
3701 RegisterID* unused = newTemporary();
3702 ASSERT(completionTypeRegister);
3703 emitOutOfLineExceptionHandler(exceptionRegister, unused, completionTypeRegister, data);
3704}
3705
3706void BytecodeGenerator::emitOutOfLineExceptionHandler(RegisterID* exceptionRegister, RegisterID* thrownValueRegister, RegisterID* completionTypeRegister, TryData* data)
3707{
3708 VirtualRegister completionTypeVirtualRegister = completionTypeRegister ? completionTypeRegister : VirtualRegister();
3709 m_exceptionHandlersToEmit.append({ data, exceptionRegister, thrownValueRegister, completionTypeVirtualRegister });
3710}
3711
3712void BytecodeGenerator::restoreScopeRegister(int lexicalScopeIndex)
3713{
3714 if (lexicalScopeIndex == CurrentLexicalScopeIndex)
3715 return; // No change needed.
3716
3717 if (lexicalScopeIndex != OutermostLexicalScopeIndex) {
3718 ASSERT(lexicalScopeIndex < static_cast<int>(m_lexicalScopeStack.size()));
3719 int endIndex = lexicalScopeIndex + 1;
3720 for (size_t i = endIndex; i--; ) {
3721 if (m_lexicalScopeStack[i].m_scope) {
3722 move(scopeRegister(), m_lexicalScopeStack[i].m_scope);
3723 return;
3724 }
3725 }
3726 }
3727 // Note that if we don't find a local scope in the current function/program,
3728 // we must grab the outer-most scope of this bytecode generation.
3729 move(scopeRegister(), m_topMostScope);
3730}
3731
3732void BytecodeGenerator::restoreScopeRegister()
3733{
3734 restoreScopeRegister(currentLexicalScopeIndex());
3735}
3736
3737int BytecodeGenerator::labelScopeDepthToLexicalScopeIndex(int targetLabelScopeDepth)
3738{
3739 ASSERT(labelScopeDepth() - targetLabelScopeDepth >= 0);
3740 size_t scopeDelta = labelScopeDepth() - targetLabelScopeDepth;
3741 ASSERT(scopeDelta <= m_controlFlowScopeStack.size());
3742 if (!scopeDelta)
3743 return CurrentLexicalScopeIndex;
3744
3745 ControlFlowScope& targetScope = m_controlFlowScopeStack[targetLabelScopeDepth];
3746 return targetScope.lexicalScopeIndex;
3747}
3748
3749void BytecodeGenerator::emitThrow(RegisterID* exc)
3750{
3751 m_usesExceptions = true;
3752 OpThrow::emit(this, exc);
3753}
3754
3755RegisterID* BytecodeGenerator::emitArgumentCount(RegisterID* dst)
3756{
3757 OpArgumentCount::emit(this, dst);
3758 return dst;
3759}
3760
3761unsigned BytecodeGenerator::localScopeDepth() const
3762{
3763 return m_localScopeDepth;
3764}
3765
3766int BytecodeGenerator::labelScopeDepth() const
3767{
3768 unsigned depth = localScopeDepth() + m_finallyDepth;
3769 ASSERT(depth == m_controlFlowScopeStack.size());
3770 return depth;
3771}
3772
3773void BytecodeGenerator::emitThrowStaticError(ErrorType errorType, RegisterID* raw)
3774{
3775 RefPtr<RegisterID> message = newTemporary();
3776 emitToString(message.get(), raw);
3777 OpThrowStaticError::emit(this, message.get(), errorType);
3778}
3779
3780void BytecodeGenerator::emitThrowStaticError(ErrorType errorType, const Identifier& message)
3781{
3782 OpThrowStaticError::emit(this, addConstantValue(addStringConstant(message)), errorType);
3783}
3784
3785void BytecodeGenerator::emitThrowReferenceError(const String& message)
3786{
3787 emitThrowStaticError(ErrorType::ReferenceError, Identifier::fromString(m_vm, message));
3788}
3789
3790void BytecodeGenerator::emitThrowTypeError(const String& message)
3791{
3792 emitThrowStaticError(ErrorType::TypeError, Identifier::fromString(m_vm, message));
3793}
3794
3795void BytecodeGenerator::emitThrowTypeError(const Identifier& message)
3796{
3797 emitThrowStaticError(ErrorType::TypeError, message);
3798}
3799
3800void BytecodeGenerator::emitThrowRangeError(const Identifier& message)
3801{
3802 emitThrowStaticError(ErrorType::RangeError, message);
3803}
3804
3805void BytecodeGenerator::emitThrowOutOfMemoryError()
3806{
3807 emitThrowStaticError(ErrorType::Error, Identifier::fromString(m_vm, "Out of memory"));
3808}
3809
3810void BytecodeGenerator::emitPushFunctionNameScope(const Identifier& property, RegisterID* callee, bool isCaptured)
3811{
3812 // There is some nuance here:
3813 // If we're in strict mode code, the function name scope variable acts exactly like a "const" variable.
3814 // If we're not in strict mode code, we want to allow bogus assignments to the name scoped variable.
3815 // This means any assignment to the variable won't throw, but it won't actually assign a new value to it.
3816 // To accomplish this, we don't report that this scope is a lexical scope. This will prevent
3817 // any throws when trying to assign to the variable (while still ensuring it keeps its original
3818 // value). There is some ugliness and exploitation of a leaky abstraction here, but it's better than
3819 // having a completely new op code and a class to handle name scopes which are so close in functionality
3820 // to lexical environments.
3821 VariableEnvironment nameScopeEnvironment;
3822 auto addResult = nameScopeEnvironment.add(property);
3823 if (isCaptured)
3824 addResult.iterator->value.setIsCaptured();
3825 addResult.iterator->value.setIsConst(); // The function name scope name acts like a const variable.
3826 unsigned numVars = m_codeBlock->m_numVars;
3827 pushLexicalScopeInternal(nameScopeEnvironment, TDZCheckOptimization::Optimize, NestedScopeType::IsNotNested, nullptr, TDZRequirement::NotUnderTDZ, ScopeType::FunctionNameScope, ScopeRegisterType::Var);
3828 ASSERT_UNUSED(numVars, m_codeBlock->m_numVars == static_cast<int>(numVars + 1)); // Should have only created one new "var" for the function name scope.
3829 bool shouldTreatAsLexicalVariable = isStrictMode();
3830 Variable functionVar = variableForLocalEntry(property, m_lexicalScopeStack.last().m_symbolTable->get(NoLockingNecessary, property.impl()), m_lexicalScopeStack.last().m_symbolTableConstantIndex, shouldTreatAsLexicalVariable);
3831 emitPutToScope(m_lexicalScopeStack.last().m_scope, functionVar, callee, ThrowIfNotFound, InitializationMode::NotInitialization);
3832}
3833
3834void BytecodeGenerator::pushLocalControlFlowScope()
3835{
3836 ControlFlowScope scope(ControlFlowScope::Label, currentLexicalScopeIndex());
3837 m_controlFlowScopeStack.append(WTFMove(scope));
3838 m_localScopeDepth++;
3839}
3840
3841void BytecodeGenerator::popLocalControlFlowScope()
3842{
3843 ASSERT(m_controlFlowScopeStack.size());
3844 ASSERT(!m_controlFlowScopeStack.last().isFinallyScope());
3845 m_controlFlowScopeStack.removeLast();
3846 m_localScopeDepth--;
3847}
3848
3849void BytecodeGenerator::emitPushCatchScope(VariableEnvironment& environment)
3850{
3851 pushLexicalScopeInternal(environment, TDZCheckOptimization::Optimize, NestedScopeType::IsNotNested, nullptr, TDZRequirement::UnderTDZ, ScopeType::CatchScope, ScopeRegisterType::Block);
3852}
3853
3854void BytecodeGenerator::emitPopCatchScope(VariableEnvironment& environment)
3855{
3856 popLexicalScopeInternal(environment);
3857}
3858
3859void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type)
3860{
3861 switch (type) {
3862 case SwitchInfo::SwitchImmediate: {
3863 size_t tableIndex = m_codeBlock->numberOfSwitchJumpTables();
3864 m_codeBlock->addSwitchJumpTable();
3865 OpSwitchImm::emit(this, tableIndex, BoundLabel(), scrutineeRegister);
3866 break;
3867 }
3868 case SwitchInfo::SwitchCharacter: {
3869 size_t tableIndex = m_codeBlock->numberOfSwitchJumpTables();
3870 m_codeBlock->addSwitchJumpTable();
3871 OpSwitchChar::emit(this, tableIndex, BoundLabel(), scrutineeRegister);
3872 break;
3873 }
3874 case SwitchInfo::SwitchString: {
3875 size_t tableIndex = m_codeBlock->numberOfStringSwitchJumpTables();
3876 m_codeBlock->addStringSwitchJumpTable();
3877 OpSwitchString::emit(this, tableIndex, BoundLabel(), scrutineeRegister);
3878 break;
3879 }
3880 default:
3881 RELEASE_ASSERT_NOT_REACHED();
3882 }
3883
3884 SwitchInfo info = { m_lastInstruction.offset(), type };
3885 m_switchContextStack.append(info);
3886}
3887
3888static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max)
3889{
3890 UNUSED_PARAM(max);
3891 ASSERT(node->isNumber());
3892 double value = static_cast<NumberNode*>(node)->value();
3893 int32_t key = static_cast<int32_t>(value);
3894 ASSERT(key == value);
3895 ASSERT(key >= min);
3896 ASSERT(key <= max);
3897 return key - min;
3898}
3899
3900static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max)
3901{
3902 UNUSED_PARAM(max);
3903 ASSERT(node->isString());
3904 StringImpl* clause = static_cast<StringNode*>(node)->value().impl();
3905 ASSERT(clause->length() == 1);
3906
3907 int32_t key = (*clause)[0];
3908 ASSERT(key >= min);
3909 ASSERT(key <= max);
3910 return key - min;
3911}
3912
3913static void prepareJumpTableForSwitch(
3914 UnlinkedSimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount,
3915 const Vector<Ref<Label>, 8>& labels, ExpressionNode** nodes, int32_t min, int32_t max,
3916 int32_t (*keyGetter)(ExpressionNode*, int32_t min, int32_t max))
3917{
3918 jumpTable.min = min;
3919 jumpTable.branchOffsets.resize(max - min + 1);
3920 jumpTable.branchOffsets.fill(0);
3921 for (uint32_t i = 0; i < clauseCount; ++i) {
3922 // We're emitting this after the clause labels should have been fixed, so
3923 // the labels should not be "forward" references
3924 ASSERT(!labels[i]->isForward());
3925 jumpTable.add(keyGetter(nodes[i], min, max), labels[i]->bind(switchAddress));
3926 }
3927}
3928
3929static void prepareJumpTableForStringSwitch(UnlinkedStringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, const Vector<Ref<Label>, 8>& labels, ExpressionNode** nodes)
3930{
3931 for (uint32_t i = 0; i < clauseCount; ++i) {
3932 // We're emitting this after the clause labels should have been fixed, so
3933 // the labels should not be "forward" references
3934 ASSERT(!labels[i]->isForward());
3935
3936 ASSERT(nodes[i]->isString());
3937 StringImpl* clause = static_cast<StringNode*>(nodes[i])->value().impl();
3938 jumpTable.offsetTable.add(clause, UnlinkedStringJumpTable::OffsetLocation { labels[i]->bind(switchAddress) });
3939 }
3940}
3941
3942void BytecodeGenerator::endSwitch(uint32_t clauseCount, const Vector<Ref<Label>, 8>& labels, ExpressionNode** nodes, Label& defaultLabel, int32_t min, int32_t max)
3943{
3944 SwitchInfo switchInfo = m_switchContextStack.last();
3945 m_switchContextStack.removeLast();
3946
3947 BoundLabel defaultTarget = defaultLabel.bind(switchInfo.bytecodeOffset);
3948 auto handleSwitch = [&](auto* op, auto bytecode) {
3949 op->setDefaultOffset(defaultTarget, [&]() {
3950 m_codeBlock->addOutOfLineJumpTarget(switchInfo.bytecodeOffset, defaultTarget);
3951 return BoundLabel();
3952 });
3953
3954 UnlinkedSimpleJumpTable& jumpTable = m_codeBlock->switchJumpTable(bytecode.m_tableIndex);
3955 prepareJumpTableForSwitch(
3956 jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max,
3957 switchInfo.switchType == SwitchInfo::SwitchImmediate
3958 ? keyForImmediateSwitch
3959 : keyForCharacterSwitch);
3960 };
3961
3962 auto ref = m_writer.ref(switchInfo.bytecodeOffset);
3963 switch (switchInfo.switchType) {
3964 case SwitchInfo::SwitchImmediate: {
3965 handleSwitch(ref->cast<OpSwitchImm>(), ref->as<OpSwitchImm>());
3966 break;
3967 }
3968 case SwitchInfo::SwitchCharacter: {
3969 handleSwitch(ref->cast<OpSwitchChar>(), ref->as<OpSwitchChar>());
3970 break;
3971 }
3972
3973 case SwitchInfo::SwitchString: {
3974 ref->cast<OpSwitchString>()->setDefaultOffset(defaultTarget, [&]() {
3975 m_codeBlock->addOutOfLineJumpTarget(switchInfo.bytecodeOffset, defaultTarget);
3976 return BoundLabel();
3977 });
3978
3979 UnlinkedStringJumpTable& jumpTable = m_codeBlock->stringSwitchJumpTable(ref->as<OpSwitchString>().m_tableIndex);
3980 prepareJumpTableForStringSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes);
3981 break;
3982 }
3983
3984 default:
3985 RELEASE_ASSERT_NOT_REACHED();
3986 break;
3987 }
3988}
3989
3990RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException()
3991{
3992 // It would be nice to do an even better job of identifying exactly where the expression is.
3993 // And we could make the caller pass the node pointer in, if there was some way of getting
3994 // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
3995 // is still good enough to get us an accurate line number.
3996 m_expressionTooDeep = true;
3997 return newTemporary();
3998}
3999
4000bool BytecodeGenerator::isArgumentNumber(const Identifier& ident, int argumentNumber)
4001{
4002 RegisterID* registerID = variable(ident).local();
4003 if (!registerID)
4004 return false;
4005 return registerID->index() == CallFrame::argumentOffset(argumentNumber);
4006}
4007
4008bool BytecodeGenerator::emitReadOnlyExceptionIfNeeded(const Variable& variable)
4009{
4010 // If we're in strict mode, we always throw.
4011 // If we're not in strict mode, we throw for "const" variables but not the function callee.
4012 if (isStrictMode() || variable.isConst()) {
4013 emitThrowTypeError(Identifier::fromString(m_vm, ReadonlyPropertyWriteError));
4014 return true;
4015 }
4016 return false;
4017}
4018
4019void BytecodeGenerator::emitEnumeration(ThrowableExpressionData* node, ExpressionNode* subjectNode, const ScopedLambda<void(BytecodeGenerator&, RegisterID*)>& callBack, ForOfNode* forLoopNode, RegisterID* forLoopSymbolTable)
4020{
4021 bool isForAwait = forLoopNode ? forLoopNode->isForAwait() : false;
4022 ASSERT(!isForAwait || (isForAwait && isAsyncFunctionParseMode(parseMode())));
4023
4024 RefPtr<RegisterID> subject = newTemporary();
4025 emitNode(subject.get(), subjectNode);
4026 RefPtr<RegisterID> iterator = isForAwait ? emitGetAsyncIterator(subject.get(), node) : emitGetIterator(subject.get(), node);
4027 RefPtr<RegisterID> nextMethod = emitGetById(newTemporary(), iterator.get(), propertyNames().next);
4028
4029 Ref<Label> loopDone = newLabel();
4030 Ref<Label> tryStartLabel = newLabel();
4031 Ref<Label> finallyViaThrowLabel = newLabel();
4032 Ref<Label> finallyLabel = newLabel();
4033 Ref<Label> catchLabel = newLabel();
4034 Ref<Label> endCatchLabel = newLabel();
4035
4036 // RefPtr<Register> iterator's lifetime must be longer than IteratorCloseContext.
4037 FinallyContext finallyContext(*this, finallyLabel.get());
4038 pushFinallyControlFlowScope(finallyContext);
4039
4040 {
4041 Ref<LabelScope> scope = newLabelScope(LabelScope::Loop);
4042 RefPtr<RegisterID> value = newTemporary();
4043 emitLoad(value.get(), jsUndefined());
4044
4045 emitJump(*scope->continueTarget());
4046
4047 Ref<Label> loopStart = newLabel();
4048 emitLabel(loopStart.get());
4049 emitLoopHint();
4050
4051 emitLabel(tryStartLabel.get());
4052 TryData* tryData = pushTry(tryStartLabel.get(), finallyViaThrowLabel.get(), HandlerType::SynthesizedFinally);
4053 callBack(*this, value.get());
4054 emitJump(*scope->continueTarget());
4055
4056 // IteratorClose sequence for abrupt completions.
4057 {
4058 // Finally block for the enumeration.
4059 emitLabel(finallyViaThrowLabel.get());
4060 popTry(tryData, finallyViaThrowLabel.get());
4061
4062 Ref<Label> finallyBodyLabel = newLabel();
4063 RefPtr<RegisterID> finallyExceptionRegister = newTemporary();
4064
4065 emitOutOfLineFinallyHandler(finallyContext.completionValueRegister(), finallyContext.completionTypeRegister(), tryData);
4066 move(finallyExceptionRegister.get(), finallyContext.completionValueRegister());
4067 emitJump(finallyBodyLabel.get());
4068
4069 emitLabel(finallyLabel.get());
4070 moveEmptyValue(finallyExceptionRegister.get());
4071
4072 // Finally fall through case.
4073 emitLabel(finallyBodyLabel.get());
4074 restoreScopeRegister();
4075
4076 Ref<Label> finallyDone = newLabel();
4077
4078 RefPtr<RegisterID> returnMethod = emitGetById(newTemporary(), iterator.get(), propertyNames().returnKeyword);
4079 emitJumpIfTrue(emitIsUndefined(newTemporary(), returnMethod.get()), finallyDone.get());
4080
4081 Ref<Label> returnCallTryStart = newLabel();
4082 emitLabel(returnCallTryStart.get());
4083 TryData* returnCallTryData = pushTry(returnCallTryStart.get(), catchLabel.get(), HandlerType::SynthesizedCatch);
4084
4085 CallArguments returnArguments(*this, nullptr);
4086 move(returnArguments.thisRegister(), iterator.get());
4087 emitCall(value.get(), returnMethod.get(), NoExpectedFunction, returnArguments, node->divot(), node->divotStart(), node->divotEnd(), DebuggableCall::No);
4088
4089 if (isForAwait)
4090 emitAwait(value.get());
4091
4092 emitJumpIfTrue(emitIsObject(newTemporary(), value.get()), finallyDone.get());
4093 emitThrowTypeError("Iterator result interface is not an object."_s);
4094
4095 emitLabel(finallyDone.get());
4096 emitFinallyCompletion(finallyContext, endCatchLabel.get());
4097
4098 popTry(returnCallTryData, finallyDone.get());
4099
4100 // Catch block for exceptions that may be thrown while calling the return
4101 // handler in the enumeration finally block. The only reason we need this
4102 // catch block is because if entered the above finally block due to a thrown
4103 // exception, then we want to re-throw the original exception on exiting
4104 // the finally block. Otherwise, we'll let any new exception pass through.
4105 {
4106 emitLabel(catchLabel.get());
4107
4108 RefPtr<RegisterID> exceptionRegister = newTemporary();
4109 emitOutOfLineFinallyHandler(exceptionRegister.get(), finallyContext.completionTypeRegister(), returnCallTryData);
4110 // Since this is a synthesized catch block and we're guaranteed to never need
4111 // to resolve any symbols from the scope, we can skip restoring the scope
4112 // register here.
4113
4114 Ref<Label> throwLabel = newLabel();
4115 emitJumpIfTrue(emitIsEmpty(newTemporary(), finallyExceptionRegister.get()), throwLabel.get());
4116 move(exceptionRegister.get(), finallyExceptionRegister.get());
4117
4118 emitLabel(throwLabel.get());
4119 emitThrow(exceptionRegister.get());
4120
4121 emitLabel(endCatchLabel.get());
4122 }
4123 }
4124
4125 emitLabel(*scope->continueTarget());
4126 if (forLoopNode) {
4127 RELEASE_ASSERT(forLoopNode->isForOfNode());
4128 prepareLexicalScopeForNextForLoopIteration(forLoopNode, forLoopSymbolTable);
4129 emitDebugHook(forLoopNode->lexpr());
4130 }
4131
4132 {
4133 emitIteratorNext(value.get(), nextMethod.get(), iterator.get(), node, isForAwait ? EmitAwait::Yes : EmitAwait::No);
4134
4135 emitJumpIfTrue(emitGetById(newTemporary(), value.get(), propertyNames().done), loopDone.get());
4136 emitGetById(value.get(), value.get(), propertyNames().value);
4137 emitJump(loopStart.get());
4138 }
4139
4140 bool breakLabelIsBound = scope->breakTargetMayBeBound();
4141 if (breakLabelIsBound)
4142 emitLabel(scope->breakTarget());
4143 popFinallyControlFlowScope();
4144 if (breakLabelIsBound) {
4145 // IteratorClose sequence for break-ed control flow.
4146 emitIteratorClose(iterator.get(), node, isForAwait ? EmitAwait::Yes : EmitAwait::No);
4147 }
4148 }
4149 emitLabel(loopDone.get());
4150}
4151
4152RegisterID* BytecodeGenerator::emitGetTemplateObject(RegisterID* dst, TaggedTemplateNode* taggedTemplate)
4153{
4154 TemplateObjectDescriptor::StringVector rawStrings;
4155 TemplateObjectDescriptor::OptionalStringVector cookedStrings;
4156
4157 TemplateStringListNode* templateString = taggedTemplate->templateLiteral()->templateStrings();
4158 for (; templateString; templateString = templateString->next()) {
4159 auto* string = templateString->value();
4160 ASSERT(string->raw());
4161 rawStrings.append(string->raw()->impl());
4162 if (!string->cooked())
4163 cookedStrings.append(WTF::nullopt);
4164 else
4165 cookedStrings.append(string->cooked()->impl());
4166 }
4167 RefPtr<RegisterID> constant = addTemplateObjectConstant(TemplateObjectDescriptor::create(WTFMove(rawStrings), WTFMove(cookedStrings)), taggedTemplate->endOffset());
4168 if (!dst)
4169 return constant.get();
4170 return move(dst, constant.get());
4171}
4172
4173RegisterID* BytecodeGenerator::emitGetGlobalPrivate(RegisterID* dst, const Identifier& property)
4174{
4175 dst = tempDestination(dst);
4176 Variable var = variable(property);
4177 if (RegisterID* local = var.local())
4178 return move(dst, local);
4179
4180 RefPtr<RegisterID> scope = newTemporary();
4181 move(scope.get(), emitResolveScope(scope.get(), var));
4182 return emitGetFromScope(dst, scope.get(), var, ThrowIfNotFound);
4183}
4184
4185RegisterID* BytecodeGenerator::emitGetEnumerableLength(RegisterID* dst, RegisterID* base)
4186{
4187 OpGetEnumerableLength::emit(this, dst, base);
4188 return dst;
4189}
4190
4191RegisterID* BytecodeGenerator::emitHasGenericProperty(RegisterID* dst, RegisterID* base, RegisterID* propertyName)
4192{
4193 OpHasGenericProperty::emit(this, dst, base, propertyName);
4194 return dst;
4195}
4196
4197RegisterID* BytecodeGenerator::emitHasIndexedProperty(RegisterID* dst, RegisterID* base, RegisterID* propertyName)
4198{
4199 OpHasIndexedProperty::emit(this, dst, base, propertyName);
4200 return dst;
4201}
4202
4203RegisterID* BytecodeGenerator::emitHasStructureProperty(RegisterID* dst, RegisterID* base, RegisterID* propertyName, RegisterID* enumerator)
4204{
4205 OpHasStructureProperty::emit(this, dst, base, propertyName, enumerator);
4206 return dst;
4207}
4208
4209RegisterID* BytecodeGenerator::emitGetPropertyEnumerator(RegisterID* dst, RegisterID* base)
4210{
4211 OpGetPropertyEnumerator::emit(this, dst, base);
4212 return dst;
4213}
4214
4215RegisterID* BytecodeGenerator::emitEnumeratorStructurePropertyName(RegisterID* dst, RegisterID* enumerator, RegisterID* index)
4216{
4217 OpEnumeratorStructurePname::emit(this, dst, enumerator, index);
4218 return dst;
4219}
4220
4221RegisterID* BytecodeGenerator::emitEnumeratorGenericPropertyName(RegisterID* dst, RegisterID* enumerator, RegisterID* index)
4222{
4223 OpEnumeratorGenericPname::emit(this, dst, enumerator, index);
4224 return dst;
4225}
4226
4227RegisterID* BytecodeGenerator::emitToIndexString(RegisterID* dst, RegisterID* index)
4228{
4229 OpToIndexString::emit(this, dst, index);
4230 return dst;
4231}
4232
4233RegisterID* BytecodeGenerator::emitIsCellWithType(RegisterID* dst, RegisterID* src, JSType type)
4234{
4235 OpIsCellWithType::emit(this, dst, src, type);
4236 return dst;
4237}
4238
4239RegisterID* BytecodeGenerator::emitIsObject(RegisterID* dst, RegisterID* src)
4240{
4241 OpIsObject::emit(this, dst, src);
4242 return dst;
4243}
4244
4245RegisterID* BytecodeGenerator::emitIsNumber(RegisterID* dst, RegisterID* src)
4246{
4247 OpIsNumber::emit(this, dst, src);
4248 return dst;
4249}
4250
4251RegisterID* BytecodeGenerator::emitIsUndefined(RegisterID* dst, RegisterID* src)
4252{
4253 OpIsUndefined::emit(this, dst, src);
4254 return dst;
4255}
4256
4257RegisterID* BytecodeGenerator::emitIsUndefinedOrNull(RegisterID* dst, RegisterID* src)
4258{
4259 OpIsUndefinedOrNull::emit(this, dst, src);
4260 return dst;
4261}
4262
4263RegisterID* BytecodeGenerator::emitIsEmpty(RegisterID* dst, RegisterID* src)
4264{
4265 OpIsEmpty::emit(this, dst, src);
4266 return dst;
4267}
4268
4269RegisterID* BytecodeGenerator::emitIteratorNext(RegisterID* dst, RegisterID* nextMethod, RegisterID* iterator, const ThrowableExpressionData* node, EmitAwait doEmitAwait)
4270{
4271 {
4272 CallArguments nextArguments(*this, nullptr);
4273 move(nextArguments.thisRegister(), iterator);
4274 emitCall(dst, nextMethod, NoExpectedFunction, nextArguments, node->divot(), node->divotStart(), node->divotEnd(), DebuggableCall::No);
4275
4276 if (doEmitAwait == EmitAwait::Yes)
4277 emitAwait(dst);
4278 }
4279 {
4280 Ref<Label> typeIsObject = newLabel();
4281 emitJumpIfTrue(emitIsObject(newTemporary(), dst), typeIsObject.get());
4282 emitThrowTypeError("Iterator result interface is not an object."_s);
4283 emitLabel(typeIsObject.get());
4284 }
4285 return dst;
4286}
4287
4288RegisterID* BytecodeGenerator::emitIteratorNextWithValue(RegisterID* dst, RegisterID* nextMethod, RegisterID* iterator, RegisterID* value, const ThrowableExpressionData* node)
4289{
4290 {
4291 CallArguments nextArguments(*this, nullptr, 1);
4292 move(nextArguments.thisRegister(), iterator);
4293 move(nextArguments.argumentRegister(0), value);
4294 emitCall(dst, nextMethod, NoExpectedFunction, nextArguments, node->divot(), node->divotStart(), node->divotEnd(), DebuggableCall::No);
4295 }
4296
4297 return dst;
4298}
4299
4300void BytecodeGenerator::emitIteratorClose(RegisterID* iterator, const ThrowableExpressionData* node, EmitAwait doEmitAwait)
4301{
4302 Ref<Label> done = newLabel();
4303 RefPtr<RegisterID> returnMethod = emitGetById(newTemporary(), iterator, propertyNames().returnKeyword);
4304 emitJumpIfTrue(emitIsUndefined(newTemporary(), returnMethod.get()), done.get());
4305
4306 RefPtr<RegisterID> value = newTemporary();
4307 CallArguments returnArguments(*this, nullptr);
4308 move(returnArguments.thisRegister(), iterator);
4309 emitCall(value.get(), returnMethod.get(), NoExpectedFunction, returnArguments, node->divot(), node->divotStart(), node->divotEnd(), DebuggableCall::No);
4310
4311 if (doEmitAwait == EmitAwait::Yes)
4312 emitAwait(value.get());
4313
4314 emitJumpIfTrue(emitIsObject(newTemporary(), value.get()), done.get());
4315 emitThrowTypeError("Iterator result interface is not an object."_s);
4316 emitLabel(done.get());
4317}
4318
4319void BytecodeGenerator::pushIndexedForInScope(RegisterID* localRegister, RegisterID* indexRegister)
4320{
4321 if (!localRegister)
4322 return;
4323 unsigned bodyBytecodeStartOffset = instructions().size();
4324 m_forInContextStack.append(adoptRef(*new IndexedForInContext(localRegister, indexRegister, bodyBytecodeStartOffset)));
4325}
4326
4327void BytecodeGenerator::popIndexedForInScope(RegisterID* localRegister)
4328{
4329 if (!localRegister)
4330 return;
4331 unsigned bodyBytecodeEndOffset = instructions().size();
4332 m_forInContextStack.last()->asIndexedForInContext().finalize(*this, m_codeBlock.get(), bodyBytecodeEndOffset);
4333 m_forInContextStack.removeLast();
4334}
4335
4336RegisterID* BytecodeGenerator::emitLoadArrowFunctionLexicalEnvironment(const Identifier& identifier)
4337{
4338 ASSERT(m_codeBlock->isArrowFunction() || m_codeBlock->isArrowFunctionContext() || constructorKind() == ConstructorKind::Extends || m_codeType == EvalCode);
4339
4340 return emitResolveScope(nullptr, variable(identifier, ThisResolutionType::Scoped));
4341}
4342
4343void BytecodeGenerator::emitLoadThisFromArrowFunctionLexicalEnvironment()
4344{
4345 emitGetFromScope(thisRegister(), emitLoadArrowFunctionLexicalEnvironment(propertyNames().thisIdentifier), variable(propertyNames().thisIdentifier, ThisResolutionType::Scoped), DoNotThrowIfNotFound);
4346}
4347
4348RegisterID* BytecodeGenerator::emitLoadNewTargetFromArrowFunctionLexicalEnvironment()
4349{
4350 Variable newTargetVar = variable(propertyNames().builtinNames().newTargetLocalPrivateName());
4351
4352 return emitGetFromScope(m_newTargetRegister, emitLoadArrowFunctionLexicalEnvironment(propertyNames().builtinNames().newTargetLocalPrivateName()), newTargetVar, ThrowIfNotFound);
4353
4354}
4355
4356RegisterID* BytecodeGenerator::emitLoadDerivedConstructorFromArrowFunctionLexicalEnvironment()
4357{
4358 Variable protoScopeVar = variable(propertyNames().builtinNames().derivedConstructorPrivateName());
4359 return emitGetFromScope(newTemporary(), emitLoadArrowFunctionLexicalEnvironment(propertyNames().builtinNames().derivedConstructorPrivateName()), protoScopeVar, ThrowIfNotFound);
4360}
4361
4362RegisterID* BytecodeGenerator::ensureThis()
4363{
4364 if (constructorKind() == ConstructorKind::Extends || isDerivedConstructorContext()) {
4365 if ((needsToUpdateArrowFunctionContext() && isSuperCallUsedInInnerArrowFunction()) || m_codeBlock->parseMode() == SourceParseMode::AsyncArrowFunctionBodyMode)
4366 emitLoadThisFromArrowFunctionLexicalEnvironment();
4367
4368 emitTDZCheck(thisRegister());
4369 }
4370
4371 return thisRegister();
4372}
4373
4374bool BytecodeGenerator::isThisUsedInInnerArrowFunction()
4375{
4376 return m_scopeNode->doAnyInnerArrowFunctionsUseThis() || m_scopeNode->doAnyInnerArrowFunctionsUseSuperProperty() || m_scopeNode->doAnyInnerArrowFunctionsUseSuperCall() || m_scopeNode->doAnyInnerArrowFunctionsUseEval() || m_codeBlock->usesEval();
4377}
4378
4379bool BytecodeGenerator::isArgumentsUsedInInnerArrowFunction()
4380{
4381 return m_scopeNode->doAnyInnerArrowFunctionsUseArguments() || m_scopeNode->doAnyInnerArrowFunctionsUseEval();
4382}
4383
4384bool BytecodeGenerator::isNewTargetUsedInInnerArrowFunction()
4385{
4386 return m_scopeNode->doAnyInnerArrowFunctionsUseNewTarget() || m_scopeNode->doAnyInnerArrowFunctionsUseSuperCall() || m_scopeNode->doAnyInnerArrowFunctionsUseEval() || m_codeBlock->usesEval();
4387}
4388
4389bool BytecodeGenerator::isSuperUsedInInnerArrowFunction()
4390{
4391 return m_scopeNode->doAnyInnerArrowFunctionsUseSuperCall() || m_scopeNode->doAnyInnerArrowFunctionsUseSuperProperty() || m_scopeNode->doAnyInnerArrowFunctionsUseEval() || m_codeBlock->usesEval();
4392}
4393
4394bool BytecodeGenerator::isSuperCallUsedInInnerArrowFunction()
4395{
4396 return m_scopeNode->doAnyInnerArrowFunctionsUseSuperCall() || m_scopeNode->doAnyInnerArrowFunctionsUseEval() || m_codeBlock->usesEval();
4397}
4398
4399void BytecodeGenerator::emitPutNewTargetToArrowFunctionContextScope()
4400{
4401 if (isNewTargetUsedInInnerArrowFunction()) {
4402 ASSERT(m_arrowFunctionContextLexicalEnvironmentRegister);
4403
4404 Variable newTargetVar = variable(propertyNames().builtinNames().newTargetLocalPrivateName());
4405 emitPutToScope(m_arrowFunctionContextLexicalEnvironmentRegister, newTargetVar, newTarget(), DoNotThrowIfNotFound, InitializationMode::Initialization);
4406 }
4407}
4408
4409void BytecodeGenerator::emitPutDerivedConstructorToArrowFunctionContextScope()
4410{
4411 if (needsDerivedConstructorInArrowFunctionLexicalEnvironment()) {
4412 ASSERT(m_arrowFunctionContextLexicalEnvironmentRegister);
4413
4414 Variable protoScope = variable(propertyNames().builtinNames().derivedConstructorPrivateName());
4415 emitPutToScope(m_arrowFunctionContextLexicalEnvironmentRegister, protoScope, &m_calleeRegister, DoNotThrowIfNotFound, InitializationMode::Initialization);
4416 }
4417}
4418
4419void BytecodeGenerator::emitPutThisToArrowFunctionContextScope()
4420{
4421 if (isThisUsedInInnerArrowFunction() || (m_scopeNode->usesSuperCall() && m_codeType == EvalCode)) {
4422 ASSERT(isDerivedConstructorContext() || m_arrowFunctionContextLexicalEnvironmentRegister != nullptr);
4423
4424 Variable thisVar = variable(propertyNames().thisIdentifier, ThisResolutionType::Scoped);
4425 RegisterID* scope = isDerivedConstructorContext() ? emitLoadArrowFunctionLexicalEnvironment(propertyNames().thisIdentifier) : m_arrowFunctionContextLexicalEnvironmentRegister;
4426
4427 emitPutToScope(scope, thisVar, thisRegister(), ThrowIfNotFound, InitializationMode::NotInitialization);
4428 }
4429}
4430
4431void BytecodeGenerator::pushStructureForInScope(RegisterID* localRegister, RegisterID* indexRegister, RegisterID* propertyRegister, RegisterID* enumeratorRegister)
4432{
4433 if (!localRegister)
4434 return;
4435 unsigned bodyBytecodeStartOffset = instructions().size();
4436 m_forInContextStack.append(adoptRef(*new StructureForInContext(localRegister, indexRegister, propertyRegister, enumeratorRegister, bodyBytecodeStartOffset)));
4437}
4438
4439void BytecodeGenerator::popStructureForInScope(RegisterID* localRegister)
4440{
4441 if (!localRegister)
4442 return;
4443 unsigned bodyBytecodeEndOffset = instructions().size();
4444 m_forInContextStack.last()->asStructureForInContext().finalize(*this, m_codeBlock.get(), bodyBytecodeEndOffset);
4445 m_forInContextStack.removeLast();
4446}
4447
4448RegisterID* BytecodeGenerator::emitRestParameter(RegisterID* result, unsigned numParametersToSkip)
4449{
4450 RefPtr<RegisterID> restArrayLength = newTemporary();
4451 OpGetRestLength::emit(this, restArrayLength.get(), numParametersToSkip);
4452
4453 OpCreateRest::emit(this, result, restArrayLength.get(), numParametersToSkip);
4454
4455 return result;
4456}
4457
4458void BytecodeGenerator::emitRequireObjectCoercible(RegisterID* value, const String& error)
4459{
4460 // FIXME: op_jneq_null treats "undetectable" objects as null/undefined. RequireObjectCoercible
4461 // thus incorrectly throws a TypeError for interfaces like HTMLAllCollection.
4462 Ref<Label> target = newLabel();
4463 OpJneqNull::emit(this, value, target->bind(this));
4464 emitThrowTypeError(error);
4465 emitLabel(target.get());
4466}
4467
4468void BytecodeGenerator::emitYieldPoint(RegisterID* argument, JSAsyncGeneratorFunction::AsyncGeneratorSuspendReason result)
4469{
4470 Ref<Label> mergePoint = newLabel();
4471 unsigned yieldPointIndex = m_yieldPoints++;
4472 emitGeneratorStateChange(yieldPointIndex + 1);
4473
4474 if (parseMode() == SourceParseMode::AsyncGeneratorBodyMode) {
4475 int suspendReason = static_cast<int32_t>(result);
4476 emitPutById(generatorRegister(), propertyNames().builtinNames().asyncGeneratorSuspendReasonPrivateName(), emitLoad(nullptr, jsNumber(suspendReason)));
4477 }
4478
4479 // Split the try range here.
4480 Ref<Label> savePoint = newEmittedLabel();
4481 for (unsigned i = m_tryContextStack.size(); i--;) {
4482 TryContext& context = m_tryContextStack[i];
4483 m_tryRanges.append(TryRange {
4484 context.start.copyRef(),
4485 savePoint.copyRef(),
4486 context.tryData
4487 });
4488 // Try range will be restared at the merge point.
4489 context.start = mergePoint.get();
4490 }
4491 Vector<TryContext> savedTryContextStack;
4492 m_tryContextStack.swap(savedTryContextStack);
4493
4494
4495#if CPU(NEEDS_ALIGNED_ACCESS)
4496 // conservatively align for the bytecode rewriter: it will delete this yield and
4497 // append a fragment, so we make sure that the start of the fragments is aligned
4498 while (m_writer.position() % OpcodeSize::Wide32)
4499 OpNop::emit<OpcodeSize::Narrow>(this);
4500#endif
4501 OpYield::emit(this, generatorFrameRegister(), yieldPointIndex, argument);
4502
4503 // Restore the try contexts, which start offset is updated to the merge point.
4504 m_tryContextStack.swap(savedTryContextStack);
4505 emitLabel(mergePoint.get());
4506}
4507
4508RegisterID* BytecodeGenerator::emitYield(RegisterID* argument, JSAsyncGeneratorFunction::AsyncGeneratorSuspendReason result)
4509{
4510 emitYieldPoint(argument, result);
4511
4512 Ref<Label> normalLabel = newLabel();
4513 RefPtr<RegisterID> condition = newTemporary();
4514 emitEqualityOp<OpStricteq>(condition.get(), generatorResumeModeRegister(), emitLoad(nullptr, jsNumber(static_cast<int32_t>(JSGeneratorFunction::GeneratorResumeMode::NormalMode))));
4515 emitJumpIfTrue(condition.get(), normalLabel.get());
4516
4517 Ref<Label> throwLabel = newLabel();
4518 emitEqualityOp<OpStricteq>(condition.get(), generatorResumeModeRegister(), emitLoad(nullptr, jsNumber(static_cast<int32_t>(JSGeneratorFunction::GeneratorResumeMode::ThrowMode))));
4519 emitJumpIfTrue(condition.get(), throwLabel.get());
4520 // Return.
4521 {
4522 RefPtr<RegisterID> returnRegister = generatorValueRegister();
4523 bool hasFinally = emitReturnViaFinallyIfNeeded(returnRegister.get());
4524 if (!hasFinally)
4525 emitReturn(returnRegister.get());
4526 }
4527
4528 // Throw.
4529 emitLabel(throwLabel.get());
4530 emitThrow(generatorValueRegister());
4531
4532 // Normal.
4533 emitLabel(normalLabel.get());
4534 return generatorValueRegister();
4535}
4536
4537RegisterID* BytecodeGenerator::emitCallIterator(RegisterID* iterator, RegisterID* argument, ThrowableExpressionData* node)
4538{
4539 CallArguments args(*this, nullptr);
4540 move(args.thisRegister(), argument);
4541 emitCall(iterator, iterator, NoExpectedFunction, args, node->divot(), node->divotStart(), node->divotEnd(), DebuggableCall::No);
4542
4543 return iterator;
4544}
4545
4546void BytecodeGenerator::emitAwait(RegisterID* value)
4547{
4548 emitYield(value, JSAsyncGeneratorFunction::AsyncGeneratorSuspendReason::Await);
4549 move(value, generatorValueRegister());
4550}
4551
4552RegisterID* BytecodeGenerator::emitGetIterator(RegisterID* argument, ThrowableExpressionData* node)
4553{
4554 RefPtr<RegisterID> iterator = emitGetById(newTemporary(), argument, propertyNames().iteratorSymbol);
4555 emitCallIterator(iterator.get(), argument, node);
4556
4557 return iterator.get();
4558}
4559
4560RegisterID* BytecodeGenerator::emitGetAsyncIterator(RegisterID* argument, ThrowableExpressionData* node)
4561{
4562 RefPtr<RegisterID> iterator = emitGetById(newTemporary(), argument, propertyNames().asyncIteratorSymbol);
4563 Ref<Label> asyncIteratorNotFound = newLabel();
4564 Ref<Label> asyncIteratorFound = newLabel();
4565 Ref<Label> iteratorReceived = newLabel();
4566
4567 emitJumpIfTrue(emitUnaryOp<OpEqNull>(newTemporary(), iterator.get()), asyncIteratorNotFound.get());
4568
4569 emitJump(asyncIteratorFound.get());
4570 emitLabel(asyncIteratorNotFound.get());
4571
4572 RefPtr<RegisterID> commonIterator = emitGetIterator(argument, node);
4573 move(iterator.get(), commonIterator.get());
4574
4575 RefPtr<RegisterID> nextMethod = emitGetById(newTemporary(), iterator.get(), propertyNames().next);
4576
4577 auto varCreateAsyncFromSyncIterator = variable(propertyNames().builtinNames().createAsyncFromSyncIteratorPrivateName());
4578 RefPtr<RegisterID> scope = newTemporary();
4579 move(scope.get(), emitResolveScope(scope.get(), varCreateAsyncFromSyncIterator));
4580 RefPtr<RegisterID> createAsyncFromSyncIterator = emitGetFromScope(newTemporary(), scope.get(), varCreateAsyncFromSyncIterator, ThrowIfNotFound);
4581
4582 CallArguments args(*this, nullptr, 2);
4583 emitLoad(args.thisRegister(), jsUndefined());
4584
4585 move(args.argumentRegister(0), iterator.get());
4586 move(args.argumentRegister(1), nextMethod.get());
4587
4588 JSTextPosition divot(m_scopeNode->firstLine(), m_scopeNode->startOffset(), m_scopeNode->lineStartOffset());
4589 emitCall(iterator.get(), createAsyncFromSyncIterator.get(), NoExpectedFunction, args, divot, divot, divot, DebuggableCall::No);
4590
4591 emitJump(iteratorReceived.get());
4592
4593 emitLabel(asyncIteratorFound.get());
4594 emitCallIterator(iterator.get(), argument, node);
4595 emitLabel(iteratorReceived.get());
4596
4597 return iterator.get();
4598}
4599
4600RegisterID* BytecodeGenerator::emitDelegateYield(RegisterID* argument, ThrowableExpressionData* node)
4601{
4602 RefPtr<RegisterID> value = newTemporary();
4603 {
4604 RefPtr<RegisterID> iterator = parseMode() == SourceParseMode::AsyncGeneratorBodyMode ? emitGetAsyncIterator(argument, node) : emitGetIterator(argument, node);
4605 RefPtr<RegisterID> nextMethod = emitGetById(newTemporary(), iterator.get(), propertyNames().next);
4606
4607 Ref<Label> loopDone = newLabel();
4608 {
4609 Ref<Label> nextElement = newLabel();
4610 emitLoad(value.get(), jsUndefined());
4611
4612 emitJump(nextElement.get());
4613
4614 Ref<Label> loopStart = newLabel();
4615 emitLabel(loopStart.get());
4616 emitLoopHint();
4617
4618 Ref<Label> branchOnResult = newLabel();
4619 {
4620 emitYieldPoint(value.get(), JSAsyncGeneratorFunction::AsyncGeneratorSuspendReason::Yield);
4621
4622 Ref<Label> normalLabel = newLabel();
4623 Ref<Label> returnLabel = newLabel();
4624 {
4625 RefPtr<RegisterID> condition = newTemporary();
4626 emitEqualityOp<OpStricteq>(condition.get(), generatorResumeModeRegister(), emitLoad(nullptr, jsNumber(static_cast<int32_t>(JSGeneratorFunction::GeneratorResumeMode::NormalMode))));
4627 emitJumpIfTrue(condition.get(), normalLabel.get());
4628
4629 emitEqualityOp<OpStricteq>(condition.get(), generatorResumeModeRegister(), emitLoad(nullptr, jsNumber(static_cast<int32_t>(JSGeneratorFunction::GeneratorResumeMode::ReturnMode))));
4630 emitJumpIfTrue(condition.get(), returnLabel.get());
4631
4632 // Fallthrough to ThrowMode.
4633 }
4634
4635 // Throw.
4636 {
4637 Ref<Label> throwMethodFound = newLabel();
4638 RefPtr<RegisterID> throwMethod = emitGetById(newTemporary(), iterator.get(), propertyNames().throwKeyword);
4639 emitJumpIfFalse(emitIsUndefined(newTemporary(), throwMethod.get()), throwMethodFound.get());
4640
4641 EmitAwait emitAwaitInIteratorClose = parseMode() == SourceParseMode::AsyncGeneratorBodyMode ? EmitAwait::Yes : EmitAwait::No;
4642 emitIteratorClose(iterator.get(), node, emitAwaitInIteratorClose);
4643
4644 emitThrowTypeError("Delegated generator does not have a 'throw' method."_s);
4645
4646 emitLabel(throwMethodFound.get());
4647 CallArguments throwArguments(*this, nullptr, 1);
4648 move(throwArguments.thisRegister(), iterator.get());
4649 move(throwArguments.argumentRegister(0), generatorValueRegister());
4650 emitCall(value.get(), throwMethod.get(), NoExpectedFunction, throwArguments, node->divot(), node->divotStart(), node->divotEnd(), DebuggableCall::No);
4651
4652 emitJump(branchOnResult.get());
4653 }
4654
4655 // Return.
4656 emitLabel(returnLabel.get());
4657 {
4658 Ref<Label> returnMethodFound = newLabel();
4659 RefPtr<RegisterID> returnMethod = emitGetById(newTemporary(), iterator.get(), propertyNames().returnKeyword);
4660 emitJumpIfFalse(emitIsUndefined(newTemporary(), returnMethod.get()), returnMethodFound.get());
4661
4662 move(value.get(), generatorValueRegister());
4663
4664 Ref<Label> returnSequence = newLabel();
4665 emitJump(returnSequence.get());
4666
4667 emitLabel(returnMethodFound.get());
4668 CallArguments returnArguments(*this, nullptr, 1);
4669 move(returnArguments.thisRegister(), iterator.get());
4670 move(returnArguments.argumentRegister(0), generatorValueRegister());
4671 emitCall(value.get(), returnMethod.get(), NoExpectedFunction, returnArguments, node->divot(), node->divotStart(), node->divotEnd(), DebuggableCall::No);
4672
4673 if (parseMode() == SourceParseMode::AsyncGeneratorBodyMode)
4674 emitAwait(value.get());
4675
4676 Ref<Label> returnIteratorResultIsObject = newLabel();
4677 emitJumpIfTrue(emitIsObject(newTemporary(), value.get()), returnIteratorResultIsObject.get());
4678 emitThrowTypeError("Iterator result interface is not an object."_s);
4679
4680 emitLabel(returnIteratorResultIsObject.get());
4681
4682 Ref<Label> returnFromGenerator = newLabel();
4683 emitJumpIfTrue(emitGetById(newTemporary(), value.get(), propertyNames().done), returnFromGenerator.get());
4684
4685 emitGetById(value.get(), value.get(), propertyNames().value);
4686 emitJump(loopStart.get());
4687
4688 emitLabel(returnFromGenerator.get());
4689 emitGetById(value.get(), value.get(), propertyNames().value);
4690
4691 emitLabel(returnSequence.get());
4692 bool hasFinally = emitReturnViaFinallyIfNeeded(value.get());
4693 if (!hasFinally)
4694 emitReturn(value.get());
4695 }
4696
4697 // Normal.
4698 emitLabel(normalLabel.get());
4699 move(value.get(), generatorValueRegister());
4700 }
4701
4702 emitLabel(nextElement.get());
4703 emitIteratorNextWithValue(value.get(), nextMethod.get(), iterator.get(), value.get(), node);
4704
4705 emitLabel(branchOnResult.get());
4706
4707 if (parseMode() == SourceParseMode::AsyncGeneratorBodyMode)
4708 emitAwait(value.get());
4709
4710 Ref<Label> iteratorValueIsObject = newLabel();
4711 emitJumpIfTrue(emitIsObject(newTemporary(), value.get()), iteratorValueIsObject.get());
4712 emitThrowTypeError("Iterator result interface is not an object."_s);
4713 emitLabel(iteratorValueIsObject.get());
4714
4715 emitJumpIfTrue(emitGetById(newTemporary(), value.get(), propertyNames().done), loopDone.get());
4716 emitGetById(value.get(), value.get(), propertyNames().value);
4717
4718 emitJump(loopStart.get());
4719 }
4720 emitLabel(loopDone.get());
4721 }
4722
4723 emitGetById(value.get(), value.get(), propertyNames().value);
4724 return value.get();
4725}
4726
4727
4728void BytecodeGenerator::emitGeneratorStateChange(int32_t state)
4729{
4730 RegisterID* completedState = emitLoad(nullptr, jsNumber(state));
4731 emitPutById(generatorRegister(), propertyNames().builtinNames().generatorStatePrivateName(), completedState);
4732}
4733
4734bool BytecodeGenerator::emitJumpViaFinallyIfNeeded(int targetLabelScopeDepth, Label& jumpTarget)
4735{
4736 ASSERT(labelScopeDepth() - targetLabelScopeDepth >= 0);
4737 size_t numberOfScopesToCheckForFinally = labelScopeDepth() - targetLabelScopeDepth;
4738 ASSERT(numberOfScopesToCheckForFinally <= m_controlFlowScopeStack.size());
4739 if (!numberOfScopesToCheckForFinally)
4740 return false;
4741
4742 FinallyContext* innermostFinallyContext = nullptr;
4743 FinallyContext* outermostFinallyContext = nullptr;
4744 size_t scopeIndex = m_controlFlowScopeStack.size() - 1;
4745 while (numberOfScopesToCheckForFinally--) {
4746 ControlFlowScope* scope = &m_controlFlowScopeStack[scopeIndex--];
4747 if (scope->isFinallyScope()) {
4748 FinallyContext* finallyContext = scope->finallyContext;
4749 if (!innermostFinallyContext)
4750 innermostFinallyContext = finallyContext;
4751 outermostFinallyContext = finallyContext;
4752 finallyContext->incNumberOfBreaksOrContinues();
4753 }
4754 }
4755 if (!outermostFinallyContext)
4756 return false; // No finallys to thread through.
4757
4758 auto jumpID = bytecodeOffsetToJumpID(instructions().size());
4759 int lexicalScopeIndex = labelScopeDepthToLexicalScopeIndex(targetLabelScopeDepth);
4760 outermostFinallyContext->registerJump(jumpID, lexicalScopeIndex, jumpTarget);
4761
4762 emitLoad(innermostFinallyContext->completionTypeRegister(), jumpID);
4763 emitJump(*innermostFinallyContext->finallyLabel());
4764 return true; // We'll be jumping to a finally block.
4765}
4766
4767bool BytecodeGenerator::emitReturnViaFinallyIfNeeded(RegisterID* returnRegister)
4768{
4769 size_t numberOfScopesToCheckForFinally = m_controlFlowScopeStack.size();
4770 if (!numberOfScopesToCheckForFinally)
4771 return false;
4772
4773 FinallyContext* innermostFinallyContext = nullptr;
4774 while (numberOfScopesToCheckForFinally) {
4775 size_t scopeIndex = --numberOfScopesToCheckForFinally;
4776 ControlFlowScope* scope = &m_controlFlowScopeStack[scopeIndex];
4777 if (scope->isFinallyScope()) {
4778 FinallyContext* finallyContext = scope->finallyContext;
4779 if (!innermostFinallyContext)
4780 innermostFinallyContext = finallyContext;
4781 finallyContext->setHandlesReturns();
4782 }
4783 }
4784 if (!innermostFinallyContext)
4785 return false; // No finallys to thread through.
4786
4787 emitLoad(innermostFinallyContext->completionTypeRegister(), CompletionType::Return);
4788 move(innermostFinallyContext->completionValueRegister(), returnRegister);
4789 emitJump(*innermostFinallyContext->finallyLabel());
4790 return true; // We'll be jumping to a finally block.
4791}
4792
4793void BytecodeGenerator::emitFinallyCompletion(FinallyContext& context, Label& normalCompletionLabel)
4794{
4795 if (context.numberOfBreaksOrContinues() || context.handlesReturns()) {
4796 emitJumpIf<OpStricteq>(context.completionTypeRegister(), CompletionType::Normal, normalCompletionLabel);
4797
4798 FinallyContext* outerContext = context.outerContext();
4799
4800 size_t numberOfJumps = context.numberOfJumps();
4801 ASSERT(outerContext || numberOfJumps == context.numberOfBreaksOrContinues());
4802
4803 // Handle Break or Continue completions that jumps into this FinallyContext.
4804 for (size_t i = 0; i < numberOfJumps; i++) {
4805 Ref<Label> nextLabel = newLabel();
4806 auto& jump = context.jumps(i);
4807 emitJumpIf<OpNstricteq>(context.completionTypeRegister(), jump.jumpID, nextLabel.get());
4808
4809 // This case is for Break / Continue completions from an inner finally context
4810 // with a jump target that is not beyond the next outer finally context:
4811 //
4812 // try {
4813 // for (... stuff ...) {
4814 // try {
4815 // continue; // Sets completionType to jumpID of top of the for loop.
4816 // } finally {
4817 // } // Jump to top of the for loop on completion.
4818 // }
4819 // } finally {
4820 // }
4821 //
4822 // Since the jumpID is targetting a label that is inside the outer finally context,
4823 // we can jump to it directly on completion of this finally context: there is no intermediate
4824 // finally blocks to run. After the Break / Continue, we will contnue execution as normal.
4825 // So, we'll set the completionType to Normal (on behalf of the target) before we jump.
4826 // We can also set the completion value to undefined, but it will never be used for normal
4827 // completion anyway. So, we'll skip setting it.
4828
4829 restoreScopeRegister(jump.targetLexicalScopeIndex);
4830 emitLoad(context.completionTypeRegister(), CompletionType::Normal);
4831 emitJump(jump.targetLabel.get());
4832
4833 emitLabel(nextLabel.get());
4834 }
4835
4836 // Handle completions that take us out of this FinallyContext.
4837 if (outerContext) {
4838 if (context.handlesReturns()) {
4839 Ref<Label> isNotReturnLabel = newLabel();
4840 emitJumpIf<OpNstricteq>(context.completionTypeRegister(), CompletionType::Return, isNotReturnLabel.get());
4841
4842 // This case is for Return completion from an inner finally context:
4843 //
4844 // try {
4845 // try {
4846 // return result; // Sets completionType to Return, and completionValue to result.
4847 // } finally {
4848 // } // Jump to outer finally on completion.
4849 // } finally {
4850 // }
4851 //
4852 // Since we know there's at least one outer finally context (beyond the current context),
4853 // we cannot actually return from here. Instead, we pass the completionType and completionValue
4854 // on to the next outer finally, and let it decide what to do next on its completion. The
4855 // outer finally may or may not actual return depending on whether it encounters an abrupt
4856 // completion in its body that overrrides this Return completion.
4857
4858 move(outerContext->completionTypeRegister(), context.completionTypeRegister());
4859 move(outerContext->completionValueRegister(), context.completionValueRegister());
4860 emitJump(*outerContext->finallyLabel());
4861
4862 emitLabel(isNotReturnLabel.get());
4863 }
4864
4865 bool hasBreaksOrContinuesThatEscapeCurrentFinally = context.numberOfBreaksOrContinues() > numberOfJumps;
4866 if (hasBreaksOrContinuesThatEscapeCurrentFinally) {
4867 Ref<Label> isThrowOrNormalLabel = newLabel();
4868 emitJumpIf<OpBeloweq>(context.completionTypeRegister(), CompletionType::Throw, isThrowOrNormalLabel.get());
4869
4870 // A completionType above Throw means we have a Break or Continue encoded as a jumpID.
4871 // We already ruled out Return above.
4872 static_assert(CompletionType::Throw < CompletionType::Return && CompletionType::Throw < CompletionType::Return, "jumpIDs are above CompletionType::Return");
4873
4874 // This case is for Break / Continue completions in an inner finally context:
4875 //
4876 // 10: label:
4877 // 11: try {
4878 // 12: try {
4879 // 13: for (... stuff ...)
4880 // 14: break label; // Sets completionType to jumpID of label.
4881 // 15: } finally {
4882 // 16: } // Jumps to outer finally on completion.
4883 // 17: } finally {
4884 // 18: }
4885 //
4886 // The break (line 14) says to continue execution at the label at line 10. Before we can
4887 // goto line 10, the inner context's finally (line 15) needs to be run, followed by the
4888 // outer context's finally (line 17). 'outerContext' being non-null above tells us that
4889 // there is at least one outer finally context that we need to run after we complete the
4890 // current finally. Note that unless the body of the outer finally abruptly completes in a
4891 // different way, that outer finally also needs to complete with a Break / Continue to
4892 // the same target label. Hence, we need to pass the jumpID in this finally's completionTypeRegister
4893 // to the outer finally. The completion value for Break and Continue according to the spec
4894 // is undefined, but it won't ever be used. So, we'll skip setting it.
4895 //
4896 // Note that all we're doing here is passing the Break / Continue completion to the next
4897 // outer finally context. We don't worry about finally contexts beyond that. It is the
4898 // responsibility of the next outer finally to determine what to do next at its completion,
4899 // and pass on to the next outer context if present and needed.
4900
4901 move(outerContext->completionTypeRegister(), context.completionTypeRegister());
4902 emitJump(*outerContext->finallyLabel());
4903
4904 emitLabel(isThrowOrNormalLabel.get());
4905 }
4906
4907 } else {
4908 // We are the outermost finally.
4909 if (context.handlesReturns()) {
4910 Ref<Label> notReturnLabel = newLabel();
4911 emitJumpIf<OpNstricteq>(context.completionTypeRegister(), CompletionType::Return, notReturnLabel.get());
4912
4913 // This case is for Return completion from the outermost finally context:
4914 //
4915 // try {
4916 // return result; // Sets completionType to Return, and completionValue to result.
4917 // } finally {
4918 // } // Executes the return of the completionValue.
4919 //
4920 // Since we know there's no outer finally context (beyond the current context) to run,
4921 // we can actually execute a return for this Return completion. The value to return
4922 // is whatever is in the completionValueRegister.
4923
4924 emitWillLeaveCallFrameDebugHook();
4925 emitReturn(context.completionValueRegister(), ReturnFrom::Finally);
4926
4927 emitLabel(notReturnLabel.get());
4928 }
4929 }
4930 }
4931
4932 // By now, we've rule out all Break / Continue / Return completions above. The only remaining
4933 // possibilities are Normal or Throw.
4934
4935 emitJumpIf<OpNstricteq>(context.completionTypeRegister(), CompletionType::Throw, normalCompletionLabel);
4936
4937 // We get here because we entered this finally context with Throw completionType (i.e. we have
4938 // an exception that we need to rethrow), and we didn't encounter a different abrupt completion
4939 // that overrides that incoming completionType. All we have to do here is re-throw the exception
4940 // captured in the completionValue.
4941 //
4942 // Note that unlike for Break / Continue / Return, we don't need to worry about outer finally
4943 // contexts. This is because any outer finally context (if present) will have its own exception
4944 // handler, which will take care of receiving the Throw completion, and re-capturing the exception
4945 // in its completionValue.
4946
4947 emitThrow(context.completionValueRegister());
4948}
4949
4950template<typename CompareOp>
4951void BytecodeGenerator::emitJumpIf(RegisterID* completionTypeRegister, CompletionType type, Label& jumpTarget)
4952{
4953 RefPtr<RegisterID> tempRegister = newTemporary();
4954 RegisterID* valueConstant = addConstantValue(jsNumber(static_cast<int>(type)));
4955 OperandTypes operandTypes = OperandTypes(ResultType::numberTypeIsInt32(), ResultType::unknownType());
4956
4957 auto equivalenceResult = emitBinaryOp<CompareOp>(tempRegister.get(), completionTypeRegister, valueConstant, operandTypes);
4958 emitJumpIfTrue(equivalenceResult, jumpTarget);
4959}
4960
4961void ForInContext::finalize(BytecodeGenerator& generator, UnlinkedCodeBlock* codeBlock, unsigned bodyBytecodeEndOffset)
4962{
4963 // Lexically invalidating ForInContexts is kind of weak sauce, but it only occurs if
4964 // either of the following conditions is true:
4965 //
4966 // (1) The loop iteration variable is re-assigned within the body of the loop.
4967 // (2) The loop iteration variable is captured in the lexical scope of the function.
4968 //
4969 // These two situations occur sufficiently rarely that it's okay to use this style of
4970 // "analysis" to make iteration faster. If we didn't want to do this, we would either have
4971 // to perform some flow-sensitive analysis to see if/when the loop iteration variable was
4972 // reassigned, or we'd have to resort to runtime checks to see if the variable had been
4973 // reassigned from its original value.
4974
4975 for (unsigned offset = bodyBytecodeStartOffset(); isValid() && offset < bodyBytecodeEndOffset;) {
4976 auto instruction = generator.instructions().at(offset);
4977 OpcodeID opcodeID = instruction->opcodeID();
4978
4979 ASSERT(opcodeID != op_enter);
4980 computeDefsForBytecodeOffset(codeBlock, opcodeID, instruction.ptr(), [&] (VirtualRegister operand) {
4981 if (local()->virtualRegister() == operand)
4982 invalidate();
4983 });
4984 offset += instruction->size();
4985 }
4986}
4987
4988void StructureForInContext::finalize(BytecodeGenerator& generator, UnlinkedCodeBlock* codeBlock, unsigned bodyBytecodeEndOffset)
4989{
4990 Base::finalize(generator, codeBlock, bodyBytecodeEndOffset);
4991 if (isValid())
4992 return;
4993
4994 OpcodeID lastOpcodeID = generator.m_lastOpcodeID;
4995 InstructionStream::MutableRef lastInstruction = generator.m_lastInstruction;
4996 for (const auto& instTuple : m_getInsts) {
4997 unsigned instIndex = std::get<0>(instTuple);
4998 int propertyRegIndex = std::get<1>(instTuple);
4999 auto instruction = generator.m_writer.ref(instIndex);
5000 auto end = instIndex + instruction->size();
5001 ASSERT(instruction->isWide32());
5002
5003 generator.m_writer.seek(instIndex);
5004
5005 auto bytecode = instruction->as<OpGetDirectPname>();
5006
5007 // disable peephole optimizations
5008 generator.m_lastOpcodeID = op_end;
5009
5010 // Change the opcode to get_by_val.
5011 // 1. dst stays the same.
5012 // 2. base stays the same.
5013 // 3. property gets switched to the original property.
5014 OpGetByVal::emit<OpcodeSize::Wide32>(&generator, bytecode.m_dst, bytecode.m_base, VirtualRegister(propertyRegIndex));
5015
5016 // 4. nop out the remaining bytes
5017 while (generator.m_writer.position() < end)
5018 OpNop::emit<OpcodeSize::Narrow>(&generator);
5019 }
5020 generator.m_writer.seek(generator.m_writer.size());
5021 if (generator.m_lastInstruction.offset() + generator.m_lastInstruction->size() != generator.m_writer.size()) {
5022 generator.m_lastOpcodeID = lastOpcodeID;
5023 generator.m_lastInstruction = lastInstruction;
5024 }
5025}
5026
5027void IndexedForInContext::finalize(BytecodeGenerator& generator, UnlinkedCodeBlock* codeBlock, unsigned bodyBytecodeEndOffset)
5028{
5029 Base::finalize(generator, codeBlock, bodyBytecodeEndOffset);
5030 if (isValid())
5031 return;
5032
5033 for (const auto& instPair : m_getInsts) {
5034 unsigned instIndex = instPair.first;
5035 int propertyRegIndex = instPair.second;
5036 generator.m_writer.ref(instIndex)->cast<OpGetByVal>()->setProperty(VirtualRegister(propertyRegIndex), []() {
5037 ASSERT_NOT_REACHED();
5038 return VirtualRegister();
5039 });
5040 }
5041}
5042
5043void StaticPropertyAnalysis::record()
5044{
5045 auto* instruction = m_instructionRef.ptr();
5046 auto size = m_propertyIndexes.size();
5047 switch (instruction->opcodeID()) {
5048 case OpNewObject::opcodeID:
5049 instruction->cast<OpNewObject>()->setInlineCapacity(size, []() {
5050 return 255;
5051 });
5052 return;
5053 case OpCreateThis::opcodeID:
5054 instruction->cast<OpCreateThis>()->setInlineCapacity(size, []() {
5055 return 255;
5056 });
5057 return;
5058 default:
5059 ASSERT_NOT_REACHED();
5060 }
5061}
5062
5063void BytecodeGenerator::emitToThis()
5064{
5065 OpToThis::emit(this, kill(&m_thisRegister));
5066 m_codeBlock->addPropertyAccessInstruction(m_lastInstruction.offset());
5067}
5068
5069} // namespace JSC
5070
5071namespace WTF {
5072
5073void printInternal(PrintStream& out, JSC::Variable::VariableKind kind)
5074{
5075 switch (kind) {
5076 case JSC::Variable::NormalVariable:
5077 out.print("Normal");
5078 return;
5079 case JSC::Variable::SpecialVariable:
5080 out.print("Special");
5081 return;
5082 }
5083 RELEASE_ASSERT_NOT_REACHED();
5084}
5085
5086} // namespace WTF
5087
5088