summaryrefslogtreecommitdiff
path: root/js/src
diff options
context:
space:
mode:
Diffstat (limited to 'js/src')
-rw-r--r--js/src/ds/Nestable.h66
-rw-r--r--js/src/frontend/BytecodeControlStructures.cpp84
-rw-r--r--js/src/frontend/BytecodeControlStructures.h175
-rw-r--r--js/src/frontend/BytecodeEmitter.cpp2263
-rw-r--r--js/src/frontend/BytecodeEmitter.h10
-rw-r--r--js/src/frontend/EmitterScope.cpp1092
-rw-r--r--js/src/frontend/EmitterScope.h155
-rw-r--r--js/src/frontend/ForOfLoopControl.cpp167
-rw-r--r--js/src/frontend/ForOfLoopControl.h99
-rw-r--r--js/src/frontend/ParseNode.h13
-rw-r--r--js/src/frontend/Parser.h1
-rw-r--r--js/src/frontend/SharedContext.h50
-rw-r--r--js/src/frontend/SwitchEmitter.cpp424
-rw-r--r--js/src/frontend/SwitchEmitter.h470
-rw-r--r--js/src/frontend/TDZCheckCache.h2
-rw-r--r--js/src/frontend/TryEmitter.cpp286
-rw-r--r--js/src/frontend/TryEmitter.h117
-rw-r--r--js/src/moz.build5
18 files changed, 3238 insertions, 2241 deletions
diff --git a/js/src/ds/Nestable.h b/js/src/ds/Nestable.h
new file mode 100644
index 0000000000..50712cb7f0
--- /dev/null
+++ b/js/src/ds/Nestable.h
@@ -0,0 +1,66 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sts=4 et sw=4 tw=99:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef ds_Nestable_h
+#define ds_Nestable_h
+
+#include "mozilla/Assertions.h"
+#include "mozilla/Attributes.h"
+
+namespace js {
+
+// A base class for nestable structures.
+template <typename Concrete>
+class MOZ_STACK_CLASS Nestable
+{
+ Concrete** stack_;
+ Concrete* enclosing_;
+
+ protected:
+ explicit Nestable(Concrete** stack)
+ : stack_(stack),
+ enclosing_(*stack)
+ {
+ *stack_ = static_cast<Concrete*>(this);
+ }
+
+ // These method are protected. Some derived classes, such as ParseContext,
+ // do not expose the ability to walk the stack.
+ Concrete* enclosing() const {
+ return enclosing_;
+ }
+
+ template <typename Predicate /* (Concrete*) -> bool */>
+ static Concrete* findNearest(Concrete* it, Predicate predicate) {
+ while (it && !predicate(it))
+ it = it->enclosing();
+ return it;
+ }
+
+ template <typename T>
+ static T* findNearest(Concrete* it) {
+ while (it && !it->template is<T>())
+ it = it->enclosing();
+ return it ? &it->template as<T>() : nullptr;
+ }
+
+ template <typename T, typename Predicate /* (T*) -> bool */>
+ static T* findNearest(Concrete* it, Predicate predicate) {
+ while (it && (!it->template is<T>() || !predicate(&it->template as<T>())))
+ it = it->enclosing();
+ return it ? &it->template as<T>() : nullptr;
+ }
+
+ public:
+ ~Nestable() {
+ MOZ_ASSERT(*stack_ == static_cast<Concrete*>(this));
+ *stack_ = enclosing_;
+ }
+};
+
+} // namespace js
+
+#endif /* ds_Nestable_h */
diff --git a/js/src/frontend/BytecodeControlStructures.cpp b/js/src/frontend/BytecodeControlStructures.cpp
new file mode 100644
index 0000000000..281f1e9cfd
--- /dev/null
+++ b/js/src/frontend/BytecodeControlStructures.cpp
@@ -0,0 +1,84 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sts=4 et sw=4 tw=99:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "frontend/BytecodeControlStructures.h"
+
+#include "frontend/BytecodeEmitter.h"
+#include "frontend/EmitterScope.h"
+
+using namespace js;
+using namespace js::frontend;
+
+NestableControl::NestableControl(BytecodeEmitter* bce, StatementKind kind)
+ : Nestable<NestableControl>(&bce->innermostNestableControl),
+ kind_(kind),
+ emitterScope_(bce->innermostEmitterScopeNoCheck())
+{}
+
+BreakableControl::BreakableControl(BytecodeEmitter* bce, StatementKind kind)
+ : NestableControl(bce, kind)
+{
+ MOZ_ASSERT(is<BreakableControl>());
+}
+
+bool
+BreakableControl::patchBreaks(BytecodeEmitter* bce)
+{
+ return bce->emitJumpTargetAndPatch(breaks);
+}
+
+LabelControl::LabelControl(BytecodeEmitter* bce, JSAtom* label, ptrdiff_t startOffset)
+ : BreakableControl(bce, StatementKind::Label),
+ label_(bce->cx, label),
+ startOffset_(startOffset)
+{}
+
+LoopControl::LoopControl(BytecodeEmitter* bce, StatementKind loopKind)
+ : BreakableControl(bce, loopKind),
+ tdzCache_(bce),
+ continueTarget({ -1 })
+{
+ MOZ_ASSERT(is<LoopControl>());
+
+ LoopControl* enclosingLoop = findNearest<LoopControl>(enclosing());
+
+ stackDepth_ = bce->stackDepth;
+ loopDepth_ = enclosingLoop ? enclosingLoop->loopDepth_ + 1 : 1;
+
+ int loopSlots;
+ if (loopKind == StatementKind::Spread || loopKind == StatementKind::ForOfLoop)
+ loopSlots = 3;
+ else if (loopKind == StatementKind::ForInLoop)
+ loopSlots = 2;
+ else
+ loopSlots = 0;
+
+ MOZ_ASSERT(loopSlots <= stackDepth_);
+
+ if (enclosingLoop) {
+ canIonOsr_ = (enclosingLoop->canIonOsr_ &&
+ stackDepth_ == enclosingLoop->stackDepth_ + loopSlots);
+ } else {
+ canIonOsr_ = stackDepth_ == loopSlots;
+ }
+}
+
+bool
+LoopControl::patchBreaksAndContinues(BytecodeEmitter* bce)
+{
+ MOZ_ASSERT(continueTarget.offset != -1);
+ if (!patchBreaks(bce))
+ return false;
+ bce->patchJumpsToTarget(continues, continueTarget);
+ return true;
+}
+
+TryFinallyControl::TryFinallyControl(BytecodeEmitter* bce, StatementKind kind)
+ : NestableControl(bce, kind),
+ emittingSubroutine_(false)
+{
+ MOZ_ASSERT(is<TryFinallyControl>());
+}
diff --git a/js/src/frontend/BytecodeControlStructures.h b/js/src/frontend/BytecodeControlStructures.h
new file mode 100644
index 0000000000..37681c8837
--- /dev/null
+++ b/js/src/frontend/BytecodeControlStructures.h
@@ -0,0 +1,175 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sts=4 et sw=4 tw=99:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef frontend_BytecodeControlStructures_h
+#define frontend_BytecodeControlStructures_h
+
+#include "mozilla/Attributes.h"
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include "ds/Nestable.h"
+#include "frontend/JumpList.h"
+#include "frontend/SharedContext.h"
+#include "frontend/TDZCheckCache.h"
+#include "gc/Rooting.h"
+#include "vm/String.h"
+
+namespace js {
+namespace frontend {
+
+struct BytecodeEmitter;
+class EmitterScope;
+
+class NestableControl : public Nestable<NestableControl>
+{
+ StatementKind kind_;
+
+ // The innermost scope when this was pushed.
+ EmitterScope* emitterScope_;
+
+ protected:
+ NestableControl(BytecodeEmitter* bce, StatementKind kind);
+
+ public:
+ using Nestable<NestableControl>::enclosing;
+ using Nestable<NestableControl>::findNearest;
+
+ StatementKind kind() const {
+ return kind_;
+ }
+
+ EmitterScope* emitterScope() const {
+ return emitterScope_;
+ }
+
+ template <typename T>
+ bool is() const;
+
+ template <typename T>
+ T& as() {
+ MOZ_ASSERT(this->is<T>());
+ return static_cast<T&>(*this);
+ }
+};
+
+class BreakableControl : public NestableControl
+{
+ public:
+ // Offset of the last break.
+ JumpList breaks;
+
+ BreakableControl(BytecodeEmitter* bce, StatementKind kind);
+
+ MOZ_MUST_USE bool patchBreaks(BytecodeEmitter* bce);
+};
+template <>
+inline bool
+NestableControl::is<BreakableControl>() const
+{
+ return StatementKindIsUnlabeledBreakTarget(kind_) || kind_ == StatementKind::Label;
+}
+
+class LabelControl : public BreakableControl
+{
+ RootedAtom label_;
+
+ // The code offset when this was pushed. Used for effectfulness checking.
+ ptrdiff_t startOffset_;
+
+ public:
+ LabelControl(BytecodeEmitter* bce, JSAtom* label, ptrdiff_t startOffset);
+
+ HandleAtom label() const {
+ return label_;
+ }
+
+ ptrdiff_t startOffset() const {
+ return startOffset_;
+ }
+};
+template <>
+inline bool
+NestableControl::is<LabelControl>() const
+{
+ return kind_ == StatementKind::Label;
+}
+
+class LoopControl : public BreakableControl
+{
+ // Loops' children are emitted in dominance order, so they can always
+ // have a TDZCheckCache.
+ TDZCheckCache tdzCache_;
+
+ // Stack depth when this loop was pushed on the control stack.
+ int32_t stackDepth_;
+
+ // The loop nesting depth. Used as a hint to Ion.
+ uint32_t loopDepth_;
+
+ // Can we OSR into Ion from here? True unless there is non-loop state on the stack.
+ bool canIonOsr_;
+
+ public:
+ // The target of continue statement jumps, e.g., the update portion of a
+ // for(;;) loop.
+ JumpTarget continueTarget;
+
+ // Offset of the last continue in the loop.
+ JumpList continues;
+
+ LoopControl(BytecodeEmitter* bce, StatementKind loopKind);
+
+ uint32_t loopDepth() const {
+ return loopDepth_;
+ }
+
+ bool canIonOsr() const {
+ return canIonOsr_;
+ }
+
+ MOZ_MUST_USE bool patchBreaksAndContinues(BytecodeEmitter* bce);
+};
+template <>
+inline bool
+NestableControl::is<LoopControl>() const
+{
+ return StatementKindIsLoop(kind_);
+}
+
+class TryFinallyControl : public NestableControl
+{
+ bool emittingSubroutine_;
+
+ public:
+ // The subroutine when emitting a finally block.
+ JumpList gosubs;
+
+ // Offset of the last catch guard, if any.
+ JumpList guardJump;
+
+ TryFinallyControl(BytecodeEmitter* bce, StatementKind kind);
+
+ void setEmittingSubroutine() {
+ emittingSubroutine_ = true;
+ }
+
+ bool emittingSubroutine() const {
+ return emittingSubroutine_;
+ }
+};
+template <>
+inline bool
+NestableControl::is<TryFinallyControl>() const
+{
+ return kind_ == StatementKind::Try || kind_ == StatementKind::Finally;
+}
+
+} /* namespace frontend */
+} /* namespace js */
+
+#endif /* frontend_BytecodeControlStructures_h */
diff --git a/js/src/frontend/BytecodeEmitter.cpp b/js/src/frontend/BytecodeEmitter.cpp
index 4b80a99669..d1c1082ad7 100644
--- a/js/src/frontend/BytecodeEmitter.cpp
+++ b/js/src/frontend/BytecodeEmitter.cpp
@@ -27,14 +27,20 @@
#include "jstypes.h"
#include "jsutil.h"
+#include "ds/Nestable.h"
+#include "frontend/BytecodeControlStructures.h"
#include "frontend/CallOrNewEmitter.h"
#include "frontend/ElemOpEmitter.h"
+#include "frontend/EmitterScope.h"
+#include "frontend/ForOfLoopControl.h"
#include "frontend/IfEmitter.h"
#include "frontend/NameOpEmitter.h"
#include "frontend/Parser.h"
#include "frontend/PropOpEmitter.h"
+#include "frontend/SwitchEmitter.h"
#include "frontend/TDZCheckCache.h"
#include "frontend/TokenStream.h"
+#include "frontend/TryEmitter.h"
#include "vm/Debugger.h"
#include "vm/GeneratorObject.h"
#include "vm/Stack.h"
@@ -60,11 +66,6 @@ using mozilla::NumberIsInt32;
using mozilla::PodCopy;
using mozilla::Some;
-class BreakableControl;
-class LabelControl;
-class LoopControl;
-class ForOfLoopControl;
-class TryFinallyControl;
class OptionalEmitter;
static bool
@@ -87,1709 +88,6 @@ GetCallArgsAndCount(ParseNode* callNode, ParseNode** argumentNode)
return callNode->pn_count - 1;
}
-class BytecodeEmitter::NestableControl : public Nestable<BytecodeEmitter::NestableControl>
-{
- StatementKind kind_;
-
- // The innermost scope when this was pushed.
- EmitterScope* emitterScope_;
-
- protected:
- NestableControl(BytecodeEmitter* bce, StatementKind kind)
- : Nestable<NestableControl>(&bce->innermostNestableControl),
- kind_(kind),
- emitterScope_(bce->innermostEmitterScopeNoCheck())
- { }
-
- public:
- using Nestable<NestableControl>::enclosing;
- using Nestable<NestableControl>::findNearest;
-
- StatementKind kind() const {
- return kind_;
- }
-
- EmitterScope* emitterScope() const {
- return emitterScope_;
- }
-
- template <typename T>
- bool is() const;
-
- template <typename T>
- T& as() {
- MOZ_ASSERT(this->is<T>());
- return static_cast<T&>(*this);
- }
-};
-
-// Template specializations are disallowed in different namespaces; specialize
-// all the NestableControl subtypes up front.
-namespace js {
-namespace frontend {
-
-template <>
-bool
-BytecodeEmitter::NestableControl::is<BreakableControl>() const
-{
- return StatementKindIsUnlabeledBreakTarget(kind_) || kind_ == StatementKind::Label;
-}
-
-template <>
-bool
-BytecodeEmitter::NestableControl::is<LabelControl>() const
-{
- return kind_ == StatementKind::Label;
-}
-
-template <>
-bool
-BytecodeEmitter::NestableControl::is<LoopControl>() const
-{
- return StatementKindIsLoop(kind_);
-}
-
-template <>
-bool
-BytecodeEmitter::NestableControl::is<ForOfLoopControl>() const
-{
- return kind_ == StatementKind::ForOfLoop;
-}
-
-template <>
-bool
-BytecodeEmitter::NestableControl::is<TryFinallyControl>() const
-{
- return kind_ == StatementKind::Try || kind_ == StatementKind::Finally;
-}
-
-} // namespace frontend
-} // namespace js
-
-class BreakableControl : public BytecodeEmitter::NestableControl
-{
- public:
- // Offset of the last break.
- JumpList breaks;
-
- BreakableControl(BytecodeEmitter* bce, StatementKind kind)
- : NestableControl(bce, kind)
- {
- MOZ_ASSERT(is<BreakableControl>());
- }
-
- MOZ_MUST_USE bool patchBreaks(BytecodeEmitter* bce) {
- return bce->emitJumpTargetAndPatch(breaks);
- }
-};
-
-class LabelControl : public BreakableControl
-{
- RootedAtom label_;
-
- // The code offset when this was pushed. Used for effectfulness checking.
- ptrdiff_t startOffset_;
-
- public:
- LabelControl(BytecodeEmitter* bce, JSAtom* label, ptrdiff_t startOffset)
- : BreakableControl(bce, StatementKind::Label),
- label_(bce->cx, label),
- startOffset_(startOffset)
- { }
-
- HandleAtom label() const {
- return label_;
- }
-
- ptrdiff_t startOffset() const {
- return startOffset_;
- }
-};
-
-class LoopControl : public BreakableControl
-{
- // Loops' children are emitted in dominance order, so they can always
- // have a TDZCheckCache.
- TDZCheckCache tdzCache_;
-
- // Stack depth when this loop was pushed on the control stack.
- int32_t stackDepth_;
-
- // The loop nesting depth. Used as a hint to Ion.
- uint32_t loopDepth_;
-
- // Can we OSR into Ion from here? True unless there is non-loop state on the stack.
- bool canIonOsr_;
-
- public:
- // The target of continue statement jumps, e.g., the update portion of a
- // for(;;) loop.
- JumpTarget continueTarget;
-
- // Offset of the last continue in the loop.
- JumpList continues;
-
- LoopControl(BytecodeEmitter* bce, StatementKind loopKind)
- : BreakableControl(bce, loopKind),
- tdzCache_(bce),
- continueTarget({ -1 })
- {
- MOZ_ASSERT(is<LoopControl>());
-
- LoopControl* enclosingLoop = findNearest<LoopControl>(enclosing());
-
- stackDepth_ = bce->stackDepth;
- loopDepth_ = enclosingLoop ? enclosingLoop->loopDepth_ + 1 : 1;
-
- int loopSlots;
- if (loopKind == StatementKind::Spread || loopKind == StatementKind::ForOfLoop)
- loopSlots = 3;
- else if (loopKind == StatementKind::ForInLoop)
- loopSlots = 2;
- else
- loopSlots = 0;
-
- MOZ_ASSERT(loopSlots <= stackDepth_);
-
- if (enclosingLoop) {
- canIonOsr_ = (enclosingLoop->canIonOsr_ &&
- stackDepth_ == enclosingLoop->stackDepth_ + loopSlots);
- } else {
- canIonOsr_ = stackDepth_ == loopSlots;
- }
- }
-
- uint32_t loopDepth() const {
- return loopDepth_;
- }
-
- bool canIonOsr() const {
- return canIonOsr_;
- }
-
- MOZ_MUST_USE bool patchBreaksAndContinues(BytecodeEmitter* bce) {
- MOZ_ASSERT(continueTarget.offset != -1);
- if (!patchBreaks(bce))
- return false;
- bce->patchJumpsToTarget(continues, continueTarget);
- return true;
- }
-};
-
-class TryFinallyControl : public BytecodeEmitter::NestableControl
-{
- bool emittingSubroutine_;
-
- public:
- // The subroutine when emitting a finally block.
- JumpList gosubs;
-
- // Offset of the last catch guard, if any.
- JumpList guardJump;
-
- TryFinallyControl(BytecodeEmitter* bce, StatementKind kind)
- : NestableControl(bce, kind),
- emittingSubroutine_(false)
- {
- MOZ_ASSERT(is<TryFinallyControl>());
- }
-
- void setEmittingSubroutine() {
- emittingSubroutine_ = true;
- }
-
- bool emittingSubroutine() const {
- return emittingSubroutine_;
- }
-};
-
-static bool
-ScopeKindIsInBody(ScopeKind kind)
-{
- return kind == ScopeKind::Lexical ||
- kind == ScopeKind::SimpleCatch ||
- kind == ScopeKind::Catch ||
- kind == ScopeKind::With ||
- kind == ScopeKind::FunctionBodyVar ||
- kind == ScopeKind::ParameterExpressionVar;
-}
-
-static inline void
-MarkAllBindingsClosedOver(LexicalScope::Data& data)
-{
- TrailingNamesArray& names = data.trailingNames;
- for (uint32_t i = 0; i < data.length; i++)
- names[i] = BindingName(names[i].name(), true);
-}
-
-// A scope that introduces bindings.
-class BytecodeEmitter::EmitterScope : public Nestable<BytecodeEmitter::EmitterScope>
-{
- // The cache of bound names that may be looked up in the
- // scope. Initially populated as the set of names this scope binds. As
- // names are looked up in enclosing scopes, they are cached on the
- // current scope.
- PooledMapPtr<NameLocationMap> nameCache_;
-
- // If this scope's cache does not include free names, such as the
- // global scope, the NameLocation to return.
- Maybe<NameLocation> fallbackFreeNameLocation_;
-
- // True if there is a corresponding EnvironmentObject on the environment
- // chain, false if all bindings are stored in frame slots on the stack.
- bool hasEnvironment_;
-
- // The number of enclosing environments. Used for error checking.
- uint8_t environmentChainLength_;
-
- // The next usable slot on the frame for not-closed over bindings.
- //
- // The initial frame slot when assigning slots to bindings is the
- // enclosing scope's nextFrameSlot. For the first scope in a frame,
- // the initial frame slot is 0.
- uint32_t nextFrameSlot_;
-
- // The index in the BytecodeEmitter's interned scope vector, otherwise
- // ScopeNote::NoScopeIndex.
- uint32_t scopeIndex_;
-
- // If kind is Lexical, Catch, or With, the index in the BytecodeEmitter's
- // block scope note list. Otherwise ScopeNote::NoScopeNote.
- uint32_t noteIndex_;
-
- MOZ_MUST_USE bool ensureCache(BytecodeEmitter* bce) {
- return nameCache_.acquire(bce->cx);
- }
-
- template <typename BindingIter>
- MOZ_MUST_USE bool checkSlotLimits(BytecodeEmitter* bce, const BindingIter& bi) {
- if (bi.nextFrameSlot() >= LOCALNO_LIMIT ||
- bi.nextEnvironmentSlot() >= ENVCOORD_SLOT_LIMIT)
- {
- return bce->reportError(nullptr, JSMSG_TOO_MANY_LOCALS);
- }
- return true;
- }
-
- MOZ_MUST_USE bool checkEnvironmentChainLength(BytecodeEmitter* bce) {
- uint32_t hops;
- if (EmitterScope* emitterScope = enclosing(&bce))
- hops = emitterScope->environmentChainLength_;
- else
- hops = bce->sc->compilationEnclosingScope()->environmentChainLength();
- if (hops >= ENVCOORD_HOPS_LIMIT - 1)
- return bce->reportError(nullptr, JSMSG_TOO_DEEP, js_function_str);
- environmentChainLength_ = mozilla::AssertedCast<uint8_t>(hops + 1);
- return true;
- }
-
- void updateFrameFixedSlots(BytecodeEmitter* bce, const BindingIter& bi) {
- nextFrameSlot_ = bi.nextFrameSlot();
- if (nextFrameSlot_ > bce->maxFixedSlots)
- bce->maxFixedSlots = nextFrameSlot_;
- MOZ_ASSERT_IF(bce->sc->isFunctionBox() &&
- (bce->sc->asFunctionBox()->isStarGenerator() ||
- bce->sc->asFunctionBox()->isLegacyGenerator() ||
- bce->sc->asFunctionBox()->isAsync()),
- bce->maxFixedSlots == 0);
- }
-
- MOZ_MUST_USE bool putNameInCache(BytecodeEmitter* bce, JSAtom* name, NameLocation loc) {
- NameLocationMap& cache = *nameCache_;
- NameLocationMap::AddPtr p = cache.lookupForAdd(name);
- MOZ_ASSERT(!p);
- if (!cache.add(p, name, loc)) {
- ReportOutOfMemory(bce->cx);
- return false;
- }
- return true;
- }
-
- Maybe<NameLocation> lookupInCache(BytecodeEmitter* bce, JSAtom* name) {
- if (NameLocationMap::Ptr p = nameCache_->lookup(name))
- return Some(p->value().wrapped);
- if (fallbackFreeNameLocation_ && nameCanBeFree(bce, name))
- return fallbackFreeNameLocation_;
- return Nothing();
- }
-
- friend bool BytecodeEmitter::needsImplicitThis();
-
- EmitterScope* enclosing(BytecodeEmitter** bce) const {
- // There is an enclosing scope with access to the same frame.
- if (EmitterScope* inFrame = enclosingInFrame())
- return inFrame;
-
- // We are currently compiling the enclosing script, look in the
- // enclosing BCE.
- if ((*bce)->parent) {
- *bce = (*bce)->parent;
- return (*bce)->innermostEmitterScopeNoCheck();
- }
-
- return nullptr;
- }
-
- Scope* enclosingScope(BytecodeEmitter* bce) const {
- if (EmitterScope* es = enclosing(&bce))
- return es->scope(bce);
-
- // The enclosing script is already compiled or the current script is the
- // global script.
- return bce->sc->compilationEnclosingScope();
- }
-
- static bool nameCanBeFree(BytecodeEmitter* bce, JSAtom* name) {
- // '.generator' cannot be accessed by name.
- return name != bce->cx->names().dotGenerator;
- }
-
- static NameLocation searchInEnclosingScope(JSAtom* name, Scope* scope, uint8_t hops);
- NameLocation searchAndCache(BytecodeEmitter* bce, JSAtom* name);
-
- template <typename ScopeCreator>
- MOZ_MUST_USE bool internScope(BytecodeEmitter* bce, ScopeCreator createScope);
- template <typename ScopeCreator>
- MOZ_MUST_USE bool internBodyScope(BytecodeEmitter* bce, ScopeCreator createScope);
- MOZ_MUST_USE bool appendScopeNote(BytecodeEmitter* bce);
-
- MOZ_MUST_USE bool deadZoneFrameSlotRange(BytecodeEmitter* bce, uint32_t slotStart,
- uint32_t slotEnd);
-
- public:
- explicit EmitterScope(BytecodeEmitter* bce)
- : Nestable<EmitterScope>(&bce->innermostEmitterScope_),
- nameCache_(bce->cx->frontendCollectionPool()),
- hasEnvironment_(false),
- environmentChainLength_(0),
- nextFrameSlot_(0),
- scopeIndex_(ScopeNote::NoScopeIndex),
- noteIndex_(ScopeNote::NoScopeNoteIndex)
- { }
-
- void dump(BytecodeEmitter* bce);
-
- MOZ_MUST_USE bool enterLexical(BytecodeEmitter* bce, ScopeKind kind,
- Handle<LexicalScope::Data*> bindings);
- MOZ_MUST_USE bool enterNamedLambda(BytecodeEmitter* bce, FunctionBox* funbox);
- MOZ_MUST_USE bool enterComprehensionFor(BytecodeEmitter* bce,
- Handle<LexicalScope::Data*> bindings);
- MOZ_MUST_USE bool enterFunction(BytecodeEmitter* bce, FunctionBox* funbox);
- MOZ_MUST_USE bool enterFunctionExtraBodyVar(BytecodeEmitter* bce, FunctionBox* funbox);
- MOZ_MUST_USE bool enterParameterExpressionVar(BytecodeEmitter* bce);
- MOZ_MUST_USE bool enterGlobal(BytecodeEmitter* bce, GlobalSharedContext* globalsc);
- MOZ_MUST_USE bool enterEval(BytecodeEmitter* bce, EvalSharedContext* evalsc);
- MOZ_MUST_USE bool enterModule(BytecodeEmitter* module, ModuleSharedContext* modulesc);
- MOZ_MUST_USE bool enterWith(BytecodeEmitter* bce);
- MOZ_MUST_USE bool deadZoneFrameSlots(BytecodeEmitter* bce);
-
- MOZ_MUST_USE bool leave(BytecodeEmitter* bce, bool nonLocal = false);
-
- uint32_t index() const {
- MOZ_ASSERT(scopeIndex_ != ScopeNote::NoScopeIndex, "Did you forget to intern a Scope?");
- return scopeIndex_;
- }
-
- uint32_t noteIndex() const {
- return noteIndex_;
- }
-
- Scope* scope(const BytecodeEmitter* bce) const {
- return bce->scopeList.vector[index()];
- }
-
- bool hasEnvironment() const {
- return hasEnvironment_;
- }
-
- // The first frame slot used.
- uint32_t frameSlotStart() const {
- if (EmitterScope* inFrame = enclosingInFrame())
- return inFrame->nextFrameSlot_;
- return 0;
- }
-
- // The last frame slot used + 1.
- uint32_t frameSlotEnd() const {
- return nextFrameSlot_;
- }
-
- uint32_t numFrameSlots() const {
- return frameSlotEnd() - frameSlotStart();
- }
-
- EmitterScope* enclosingInFrame() const {
- return Nestable<EmitterScope>::enclosing();
- }
-
- NameLocation lookup(BytecodeEmitter* bce, JSAtom* name) {
- if (Maybe<NameLocation> loc = lookupInCache(bce, name))
- return *loc;
- return searchAndCache(bce, name);
- }
-
- Maybe<NameLocation> locationBoundInScope(BytecodeEmitter* bce, JSAtom* name,
- EmitterScope* target);
-};
-
-void
-BytecodeEmitter::EmitterScope::dump(BytecodeEmitter* bce)
-{
- fprintf(stdout, "EmitterScope [%s] %p\n", ScopeKindString(scope(bce)->kind()), this);
-
- for (NameLocationMap::Range r = nameCache_->all(); !r.empty(); r.popFront()) {
- const NameLocation& l = r.front().value();
-
- JSAutoByteString bytes;
- if (!AtomToPrintableString(bce->cx, r.front().key(), &bytes))
- return;
- if (l.kind() != NameLocation::Kind::Dynamic)
- fprintf(stdout, " %s %s ", BindingKindString(l.bindingKind()), bytes.ptr());
- else
- fprintf(stdout, " %s ", bytes.ptr());
-
- switch (l.kind()) {
- case NameLocation::Kind::Dynamic:
- fprintf(stdout, "dynamic\n");
- break;
- case NameLocation::Kind::Global:
- fprintf(stdout, "global\n");
- break;
- case NameLocation::Kind::Intrinsic:
- fprintf(stdout, "intrinsic\n");
- break;
- case NameLocation::Kind::NamedLambdaCallee:
- fprintf(stdout, "named lambda callee\n");
- break;
- case NameLocation::Kind::Import:
- fprintf(stdout, "import\n");
- break;
- case NameLocation::Kind::ArgumentSlot:
- fprintf(stdout, "arg slot=%u\n", l.argumentSlot());
- break;
- case NameLocation::Kind::FrameSlot:
- fprintf(stdout, "frame slot=%u\n", l.frameSlot());
- break;
- case NameLocation::Kind::EnvironmentCoordinate:
- fprintf(stdout, "environment hops=%u slot=%u\n",
- l.environmentCoordinate().hops(), l.environmentCoordinate().slot());
- break;
- case NameLocation::Kind::DynamicAnnexBVar:
- fprintf(stdout, "dynamic annex b var\n");
- break;
- }
- }
-
- fprintf(stdout, "\n");
-}
-
-template <typename ScopeCreator>
-bool
-BytecodeEmitter::EmitterScope::internScope(BytecodeEmitter* bce, ScopeCreator createScope)
-{
- RootedScope enclosing(bce->cx, enclosingScope(bce));
- Scope* scope = createScope(bce->cx, enclosing);
- if (!scope)
- return false;
- hasEnvironment_ = scope->hasEnvironment();
- scopeIndex_ = bce->scopeList.length();
- return bce->scopeList.append(scope);
-}
-
-template <typename ScopeCreator>
-bool
-BytecodeEmitter::EmitterScope::internBodyScope(BytecodeEmitter* bce, ScopeCreator createScope)
-{
- MOZ_ASSERT(bce->bodyScopeIndex == UINT32_MAX, "There can be only one body scope");
- bce->bodyScopeIndex = bce->scopeList.length();
- return internScope(bce, createScope);
-}
-
-bool
-BytecodeEmitter::EmitterScope::appendScopeNote(BytecodeEmitter* bce)
-{
- MOZ_ASSERT(ScopeKindIsInBody(scope(bce)->kind()) && enclosingInFrame(),
- "Scope notes are not needed for body-level scopes.");
- noteIndex_ = bce->scopeNoteList.length();
- return bce->scopeNoteList.append(index(), bce->offset(), bce->inPrologue(),
- enclosingInFrame() ? enclosingInFrame()->noteIndex()
- : ScopeNote::NoScopeNoteIndex);
-}
-
-#ifdef DEBUG
-static bool
-NameIsOnEnvironment(Scope* scope, JSAtom* name)
-{
- for (BindingIter bi(scope); bi; bi++) {
- // If found, the name must already be on the environment or an import,
- // or else there is a bug in the closed-over name analysis in the
- // Parser.
- if (bi.name() == name) {
- BindingLocation::Kind kind = bi.location().kind();
-
- if (bi.hasArgumentSlot()) {
- JSScript* script = scope->as<FunctionScope>().script();
- if (!script->strict() && !script->functionHasParameterExprs()) {
- // Check for duplicate positional formal parameters.
- for (BindingIter bi2(bi); bi2 && bi2.hasArgumentSlot(); bi2++) {
- if (bi2.name() == name)
- kind = bi2.location().kind();
- }
- }
- }
-
- return kind == BindingLocation::Kind::Global ||
- kind == BindingLocation::Kind::Environment ||
- kind == BindingLocation::Kind::Import;
- }
- }
-
- // If not found, assume it's on the global or dynamically accessed.
- return true;
-}
-#endif
-
-/* static */ NameLocation
-BytecodeEmitter::EmitterScope::searchInEnclosingScope(JSAtom* name, Scope* scope, uint8_t hops)
-{
- for (ScopeIter si(scope); si; si++) {
- MOZ_ASSERT(NameIsOnEnvironment(si.scope(), name));
-
- bool hasEnv = si.hasSyntacticEnvironment();
-
- switch (si.kind()) {
- case ScopeKind::Function:
- if (hasEnv) {
- JSScript* script = si.scope()->as<FunctionScope>().script();
- if (script->funHasExtensibleScope())
- return NameLocation::Dynamic();
-
- for (BindingIter bi(si.scope()); bi; bi++) {
- if (bi.name() != name)
- continue;
-
- BindingLocation bindLoc = bi.location();
- if (bi.hasArgumentSlot() &&
- !script->strict() &&
- !script->functionHasParameterExprs())
- {
- // Check for duplicate positional formal parameters.
- for (BindingIter bi2(bi); bi2 && bi2.hasArgumentSlot(); bi2++) {
- if (bi2.name() == name)
- bindLoc = bi2.location();
- }
- }
-
- MOZ_ASSERT(bindLoc.kind() == BindingLocation::Kind::Environment);
- return NameLocation::EnvironmentCoordinate(bi.kind(), hops, bindLoc.slot());
- }
- }
- break;
-
- case ScopeKind::FunctionBodyVar:
- case ScopeKind::ParameterExpressionVar:
- case ScopeKind::Lexical:
- case ScopeKind::NamedLambda:
- case ScopeKind::StrictNamedLambda:
- case ScopeKind::SimpleCatch:
- case ScopeKind::Catch:
- if (hasEnv) {
- for (BindingIter bi(si.scope()); bi; bi++) {
- if (bi.name() != name)
- continue;
-
- // The name must already have been marked as closed
- // over. If this assertion is hit, there is a bug in the
- // name analysis.
- BindingLocation bindLoc = bi.location();
- MOZ_ASSERT(bindLoc.kind() == BindingLocation::Kind::Environment);
- return NameLocation::EnvironmentCoordinate(bi.kind(), hops, bindLoc.slot());
- }
- }
- break;
-
- case ScopeKind::Module:
- if (hasEnv) {
- for (BindingIter bi(si.scope()); bi; bi++) {
- if (bi.name() != name)
- continue;
-
- BindingLocation bindLoc = bi.location();
-
- // Imports are on the environment but are indirect
- // bindings and must be accessed dynamically instead of
- // using an EnvironmentCoordinate.
- if (bindLoc.kind() == BindingLocation::Kind::Import) {
- MOZ_ASSERT(si.kind() == ScopeKind::Module);
- return NameLocation::Import();
- }
-
- MOZ_ASSERT(bindLoc.kind() == BindingLocation::Kind::Environment);
- return NameLocation::EnvironmentCoordinate(bi.kind(), hops, bindLoc.slot());
- }
- }
- break;
-
- case ScopeKind::Eval:
- case ScopeKind::StrictEval:
- // As an optimization, if the eval doesn't have its own var
- // environment and its immediate enclosing scope is a global
- // scope, all accesses are global.
- if (!hasEnv && si.scope()->enclosing()->is<GlobalScope>())
- return NameLocation::Global(BindingKind::Var);
- return NameLocation::Dynamic();
-
- case ScopeKind::Global:
- return NameLocation::Global(BindingKind::Var);
-
- case ScopeKind::With:
- case ScopeKind::NonSyntactic:
- return NameLocation::Dynamic();
- }
-
- if (hasEnv) {
- MOZ_ASSERT(hops < ENVCOORD_HOPS_LIMIT - 1);
- hops++;
- }
- }
-
- MOZ_CRASH("Malformed scope chain");
-}
-
-NameLocation
-BytecodeEmitter::EmitterScope::searchAndCache(BytecodeEmitter* bce, JSAtom* name)
-{
- Maybe<NameLocation> loc;
- uint8_t hops = hasEnvironment() ? 1 : 0;
- DebugOnly<bool> inCurrentScript = enclosingInFrame();
-
- // Start searching in the current compilation.
- for (EmitterScope* es = enclosing(&bce); es; es = es->enclosing(&bce)) {
- loc = es->lookupInCache(bce, name);
- if (loc) {
- if (loc->kind() == NameLocation::Kind::EnvironmentCoordinate)
- *loc = loc->addHops(hops);
- break;
- }
-
- if (es->hasEnvironment())
- hops++;
-
-#ifdef DEBUG
- if (!es->enclosingInFrame())
- inCurrentScript = false;
-#endif
- }
-
- // If the name is not found in the current compilation, walk the Scope
- // chain encompassing the compilation.
- if (!loc) {
- inCurrentScript = false;
- loc = Some(searchInEnclosingScope(name, bce->sc->compilationEnclosingScope(), hops));
- }
-
- // Each script has its own frame. A free name that is accessed
- // from an inner script must not be a frame slot access. If this
- // assertion is hit, it is a bug in the free name analysis in the
- // parser.
- MOZ_ASSERT_IF(!inCurrentScript, loc->kind() != NameLocation::Kind::FrameSlot);
-
- // It is always correct to not cache the location. Ignore OOMs to make
- // lookups infallible.
- if (!putNameInCache(bce, name, *loc))
- bce->cx->recoverFromOutOfMemory();
-
- return *loc;
-}
-
-Maybe<NameLocation>
-BytecodeEmitter::EmitterScope::locationBoundInScope(BytecodeEmitter* bce, JSAtom* name,
- EmitterScope* target)
-{
- // The target scope must be an intra-frame enclosing scope of this
- // one. Count the number of extra hops to reach it.
- uint8_t extraHops = 0;
- for (EmitterScope* es = this; es != target; es = es->enclosingInFrame()) {
- if (es->hasEnvironment())
- extraHops++;
- }
-
- // Caches are prepopulated with bound names. So if the name is bound in a
- // particular scope, it must already be in the cache. Furthermore, don't
- // consult the fallback location as we only care about binding names.
- Maybe<NameLocation> loc;
- if (NameLocationMap::Ptr p = target->nameCache_->lookup(name)) {
- NameLocation l = p->value().wrapped;
- if (l.kind() == NameLocation::Kind::EnvironmentCoordinate)
- loc = Some(l.addHops(extraHops));
- else
- loc = Some(l);
- }
- return loc;
-}
-
-bool
-BytecodeEmitter::EmitterScope::deadZoneFrameSlotRange(BytecodeEmitter* bce, uint32_t slotStart,
- uint32_t slotEnd)
-{
- // Lexical bindings throw ReferenceErrors if they are used before
- // initialization. See ES6 8.1.1.1.6.
- //
- // For completeness, lexical bindings are initialized in ES6 by calling
- // InitializeBinding, after which touching the binding will no longer
- // throw reference errors. See 13.1.11, 9.2.13, 13.6.3.4, 13.6.4.6,
- // 13.6.4.8, 13.14.5, 15.1.8, and 15.2.0.15.
- if (slotStart != slotEnd) {
- if (!bce->emit1(JSOP_UNINITIALIZED))
- return false;
- for (uint32_t slot = slotStart; slot < slotEnd; slot++) {
- if (!bce->emitLocalOp(JSOP_INITLEXICAL, slot))
- return false;
- }
- if (!bce->emit1(JSOP_POP))
- return false;
- }
-
- return true;
-}
-
-bool
-BytecodeEmitter::EmitterScope::deadZoneFrameSlots(BytecodeEmitter* bce)
-{
- return deadZoneFrameSlotRange(bce, frameSlotStart(), frameSlotEnd());
-}
-
-bool
-BytecodeEmitter::EmitterScope::enterLexical(BytecodeEmitter* bce, ScopeKind kind,
- Handle<LexicalScope::Data*> bindings)
-{
- MOZ_ASSERT(kind != ScopeKind::NamedLambda && kind != ScopeKind::StrictNamedLambda);
- MOZ_ASSERT(this == bce->innermostEmitterScopeNoCheck());
-
- if (!ensureCache(bce))
- return false;
-
- // Marks all names as closed over if the the context requires it. This
- // cannot be done in the Parser as we may not know if the context requires
- // all bindings to be closed over until after parsing is finished. For
- // example, legacy generators require all bindings to be closed over but
- // it is unknown if a function is a legacy generator until the first
- // 'yield' expression is parsed.
- //
- // This is not a problem with other scopes, as all other scopes with
- // bindings are body-level. At the time of their creation, whether or not
- // the context requires all bindings to be closed over is already known.
- if (bce->sc->allBindingsClosedOver())
- MarkAllBindingsClosedOver(*bindings);
-
- // Resolve bindings.
- TDZCheckCache* tdzCache = bce->innermostTDZCheckCache;
- uint32_t firstFrameSlot = frameSlotStart();
- BindingIter bi(*bindings, firstFrameSlot, /* isNamedLambda = */ false);
- for (; bi; bi++) {
- if (!checkSlotLimits(bce, bi))
- return false;
-
- NameLocation loc = NameLocation::fromBinding(bi.kind(), bi.location());
- if (!putNameInCache(bce, bi.name(), loc))
- return false;
-
- if (!tdzCache->noteTDZCheck(bce, bi.name(), CheckTDZ))
- return false;
- }
-
- updateFrameFixedSlots(bce, bi);
-
- // Create and intern the VM scope.
- auto createScope = [kind, bindings, firstFrameSlot](ExclusiveContext* cx,
- HandleScope enclosing)
- {
- return LexicalScope::create(cx, kind, bindings, firstFrameSlot, enclosing);
- };
- if (!internScope(bce, createScope))
- return false;
-
- if (ScopeKindIsInBody(kind) && hasEnvironment()) {
- // After interning the VM scope we can get the scope index.
- if (!bce->emitInternedScopeOp(index(), JSOP_PUSHLEXICALENV))
- return false;
- }
-
- // Lexical scopes need notes to be mapped from a pc.
- if (!appendScopeNote(bce))
- return false;
-
- // Put frame slots in TDZ. Environment slots are poisoned during
- // environment creation.
- //
- // This must be done after appendScopeNote to be considered in the extent
- // of the scope.
- if (!deadZoneFrameSlotRange(bce, firstFrameSlot, frameSlotEnd()))
- return false;
-
- return checkEnvironmentChainLength(bce);
-}
-
-bool
-BytecodeEmitter::EmitterScope::enterNamedLambda(BytecodeEmitter* bce, FunctionBox* funbox)
-{
- MOZ_ASSERT(this == bce->innermostEmitterScopeNoCheck());
- MOZ_ASSERT(funbox->namedLambdaBindings());
-
- if (!ensureCache(bce))
- return false;
-
- // See comment in enterLexical about allBindingsClosedOver.
- if (funbox->allBindingsClosedOver())
- MarkAllBindingsClosedOver(*funbox->namedLambdaBindings());
-
- BindingIter bi(*funbox->namedLambdaBindings(), LOCALNO_LIMIT, /* isNamedLambda = */ true);
- MOZ_ASSERT(bi.kind() == BindingKind::NamedLambdaCallee);
-
- // The lambda name, if not closed over, is accessed via JSOP_CALLEE and
- // not a frame slot. Do not update frame slot information.
- NameLocation loc = NameLocation::fromBinding(bi.kind(), bi.location());
- if (!putNameInCache(bce, bi.name(), loc))
- return false;
-
- bi++;
- MOZ_ASSERT(!bi, "There should be exactly one binding in a NamedLambda scope");
-
- auto createScope = [funbox](ExclusiveContext* cx, HandleScope enclosing) {
- ScopeKind scopeKind =
- funbox->strict() ? ScopeKind::StrictNamedLambda : ScopeKind::NamedLambda;
- return LexicalScope::create(cx, scopeKind, funbox->namedLambdaBindings(),
- LOCALNO_LIMIT, enclosing);
- };
- if (!internScope(bce, createScope))
- return false;
-
- return checkEnvironmentChainLength(bce);
-}
-
-bool
-BytecodeEmitter::EmitterScope::enterComprehensionFor(BytecodeEmitter* bce,
- Handle<LexicalScope::Data*> bindings)
-{
- if (!enterLexical(bce, ScopeKind::Lexical, bindings))
- return false;
-
- // For comprehensions, initialize all lexical names up front to undefined
- // because they're now a dead feature and don't interact properly with
- // TDZ.
- auto nop = [](BytecodeEmitter*, const NameLocation&, bool) {
- return true;
- };
-
- if (!bce->emit1(JSOP_UNDEFINED))
- return false;
-
- RootedAtom name(bce->cx);
- for (BindingIter bi(*bindings, frameSlotStart(), /* isNamedLambda = */ false); bi; bi++) {
- name = bi.name();
- NameOpEmitter noe(bce, name, NameOpEmitter::Kind::Initialize);
- if (!noe.prepareForRhs())
- return false;
- if (!noe.emitAssignment())
- return false;
- }
-
- if (!bce->emit1(JSOP_POP))
- return false;
-
- return true;
-}
-
-bool
-BytecodeEmitter::EmitterScope::enterParameterExpressionVar(BytecodeEmitter* bce)
-{
- MOZ_ASSERT(this == bce->innermostEmitterScopeNoCheck());
-
- if (!ensureCache(bce))
- return false;
-
- // Parameter expressions var scopes have no pre-set bindings and are
- // always extensible, as they are needed for eval.
- fallbackFreeNameLocation_ = Some(NameLocation::Dynamic());
-
- // Create and intern the VM scope.
- uint32_t firstFrameSlot = frameSlotStart();
- auto createScope = [firstFrameSlot](ExclusiveContext* cx, HandleScope enclosing) {
- return VarScope::create(cx, ScopeKind::ParameterExpressionVar,
- /* data = */ nullptr, firstFrameSlot,
- /* needsEnvironment = */ true, enclosing);
- };
- if (!internScope(bce, createScope))
- return false;
-
- MOZ_ASSERT(hasEnvironment());
- if (!bce->emitInternedScopeOp(index(), JSOP_PUSHVARENV))
- return false;
-
- // The extra var scope needs a note to be mapped from a pc.
- if (!appendScopeNote(bce))
- return false;
-
- return checkEnvironmentChainLength(bce);
-}
-
-bool
-BytecodeEmitter::EmitterScope::enterFunction(BytecodeEmitter* bce, FunctionBox* funbox)
-{
- MOZ_ASSERT(this == bce->innermostEmitterScopeNoCheck());
-
- // If there are parameter expressions, there is an extra var scope.
- if (!funbox->hasExtraBodyVarScope())
- bce->setVarEmitterScope(this);
-
- if (!ensureCache(bce))
- return false;
-
- // Resolve body-level bindings, if there are any.
- auto bindings = funbox->functionScopeBindings();
- Maybe<uint32_t> lastLexicalSlot;
- if (bindings) {
- NameLocationMap& cache = *nameCache_;
-
- BindingIter bi(*bindings, funbox->hasParameterExprs);
- for (; bi; bi++) {
- if (!checkSlotLimits(bce, bi))
- return false;
-
- NameLocation loc = NameLocation::fromBinding(bi.kind(), bi.location());
- NameLocationMap::AddPtr p = cache.lookupForAdd(bi.name());
-
- // The only duplicate bindings that occur are simple formal
- // parameters, in which case the last position counts, so update the
- // location.
- if (p) {
- MOZ_ASSERT(bi.kind() == BindingKind::FormalParameter);
- MOZ_ASSERT(!funbox->hasDestructuringArgs);
- MOZ_ASSERT(!funbox->hasRest());
- p->value() = loc;
- continue;
- }
-
- if (!cache.add(p, bi.name(), loc)) {
- ReportOutOfMemory(bce->cx);
- return false;
- }
- }
-
- updateFrameFixedSlots(bce, bi);
- } else {
- nextFrameSlot_ = 0;
- }
-
- // If the function's scope may be extended at runtime due to sloppy direct
- // eval and there is no extra var scope, any names beyond the function
- // scope must be accessed dynamically as we don't know if the name will
- // become a 'var' binding due to direct eval.
- if (!funbox->hasParameterExprs && funbox->hasExtensibleScope())
- fallbackFreeNameLocation_ = Some(NameLocation::Dynamic());
-
- // In case of parameter expressions, the parameters are lexical
- // bindings and have TDZ.
- if (funbox->hasParameterExprs && nextFrameSlot_) {
- uint32_t paramFrameSlotEnd = 0;
- for (BindingIter bi(*bindings, true); bi; bi++) {
- if (!BindingKindIsLexical(bi.kind()))
- break;
-
- NameLocation loc = NameLocation::fromBinding(bi.kind(), bi.location());
- if (loc.kind() == NameLocation::Kind::FrameSlot) {
- MOZ_ASSERT(paramFrameSlotEnd <= loc.frameSlot());
- paramFrameSlotEnd = loc.frameSlot() + 1;
- }
- }
-
- if (!deadZoneFrameSlotRange(bce, 0, paramFrameSlotEnd))
- return false;
- }
-
- // Create and intern the VM scope.
- auto createScope = [funbox](ExclusiveContext* cx, HandleScope enclosing) {
- RootedFunction fun(cx, funbox->function());
- return FunctionScope::create(cx, funbox->functionScopeBindings(),
- funbox->hasParameterExprs,
- funbox->needsCallObjectRegardlessOfBindings(),
- fun, enclosing);
- };
- if (!internBodyScope(bce, createScope))
- return false;
-
- return checkEnvironmentChainLength(bce);
-}
-
-bool
-BytecodeEmitter::EmitterScope::enterFunctionExtraBodyVar(BytecodeEmitter* bce, FunctionBox* funbox)
-{
- MOZ_ASSERT(funbox->hasParameterExprs);
- MOZ_ASSERT(funbox->extraVarScopeBindings() ||
- funbox->needsExtraBodyVarEnvironmentRegardlessOfBindings());
- MOZ_ASSERT(this == bce->innermostEmitterScopeNoCheck());
-
- // The extra var scope is never popped once it's entered. It replaces the
- // function scope as the var emitter scope.
- bce->setVarEmitterScope(this);
-
- if (!ensureCache(bce))
- return false;
-
- // Resolve body-level bindings, if there are any.
- uint32_t firstFrameSlot = frameSlotStart();
- if (auto bindings = funbox->extraVarScopeBindings()) {
- BindingIter bi(*bindings, firstFrameSlot);
- for (; bi; bi++) {
- if (!checkSlotLimits(bce, bi))
- return false;
-
- NameLocation loc = NameLocation::fromBinding(bi.kind(), bi.location());
- if (!putNameInCache(bce, bi.name(), loc))
- return false;
- }
-
- updateFrameFixedSlots(bce, bi);
- } else {
- nextFrameSlot_ = firstFrameSlot;
- }
-
- // If the extra var scope may be extended at runtime due to sloppy
- // direct eval, any names beyond the var scope must be accessed
- // dynamically as we don't know if the name will become a 'var' binding
- // due to direct eval.
- if (funbox->hasExtensibleScope())
- fallbackFreeNameLocation_ = Some(NameLocation::Dynamic());
-
- // Create and intern the VM scope.
- auto createScope = [funbox, firstFrameSlot](ExclusiveContext* cx, HandleScope enclosing) {
- return VarScope::create(cx, ScopeKind::FunctionBodyVar,
- funbox->extraVarScopeBindings(), firstFrameSlot,
- funbox->needsExtraBodyVarEnvironmentRegardlessOfBindings(),
- enclosing);
- };
- if (!internScope(bce, createScope))
- return false;
-
- if (hasEnvironment()) {
- if (!bce->emitInternedScopeOp(index(), JSOP_PUSHVARENV))
- return false;
- }
-
- // The extra var scope needs a note to be mapped from a pc.
- if (!appendScopeNote(bce))
- return false;
-
- return checkEnvironmentChainLength(bce);
-}
-
-class DynamicBindingIter : public BindingIter
-{
- public:
- explicit DynamicBindingIter(GlobalSharedContext* sc)
- : BindingIter(*sc->bindings)
- { }
-
- explicit DynamicBindingIter(EvalSharedContext* sc)
- : BindingIter(*sc->bindings, /* strict = */ false)
- {
- MOZ_ASSERT(!sc->strict());
- }
-
- JSOp bindingOp() const {
- switch (kind()) {
- case BindingKind::Var:
- return JSOP_DEFVAR;
- case BindingKind::Let:
- return JSOP_DEFLET;
- case BindingKind::Const:
- return JSOP_DEFCONST;
- default:
- MOZ_CRASH("Bad BindingKind");
- }
- }
-};
-
-bool
-BytecodeEmitter::EmitterScope::enterGlobal(BytecodeEmitter* bce, GlobalSharedContext* globalsc)
-{
- MOZ_ASSERT(this == bce->innermostEmitterScopeNoCheck());
-
- bce->setVarEmitterScope(this);
-
- if (!ensureCache(bce))
- return false;
-
- if (bce->emitterMode == BytecodeEmitter::SelfHosting) {
- // In self-hosting, it is incorrect to consult the global scope because
- // self-hosted scripts are cloned into their target compartments before
- // they are run. Instead of Global, Intrinsic is used for all names.
- //
- // Intrinsic lookups are redirected to the special intrinsics holder
- // in the global object, into which any missing values are cloned
- // lazily upon first access.
- fallbackFreeNameLocation_ = Some(NameLocation::Intrinsic());
-
- auto createScope = [](ExclusiveContext* cx, HandleScope enclosing) {
- MOZ_ASSERT(!enclosing);
- return &cx->global()->emptyGlobalScope();
- };
- return internBodyScope(bce, createScope);
- }
-
- // Resolve binding names and emit DEF{VAR,LET,CONST} prologue ops.
- if (globalsc->bindings) {
- for (DynamicBindingIter bi(globalsc); bi; bi++) {
- NameLocation loc = NameLocation::fromBinding(bi.kind(), bi.location());
- JSAtom* name = bi.name();
- if (!putNameInCache(bce, name, loc))
- return false;
-
- // Define the name in the prologue. Do not emit DEFVAR for
- // functions that we'll emit DEFFUN for.
- if (bi.isTopLevelFunction())
- continue;
-
- if (!bce->emitAtomOp(name, bi.bindingOp()))
- return false;
- }
- }
-
- // Note that to save space, we don't add free names to the cache for
- // global scopes. They are assumed to be global vars in the syntactic
- // global scope, dynamic accesses under non-syntactic global scope.
- if (globalsc->scopeKind() == ScopeKind::Global)
- fallbackFreeNameLocation_ = Some(NameLocation::Global(BindingKind::Var));
- else
- fallbackFreeNameLocation_ = Some(NameLocation::Dynamic());
-
- auto createScope = [globalsc](ExclusiveContext* cx, HandleScope enclosing) {
- MOZ_ASSERT(!enclosing);
- return GlobalScope::create(cx, globalsc->scopeKind(), globalsc->bindings);
- };
- return internBodyScope(bce, createScope);
-}
-
-bool
-BytecodeEmitter::EmitterScope::enterEval(BytecodeEmitter* bce, EvalSharedContext* evalsc)
-{
- MOZ_ASSERT(this == bce->innermostEmitterScopeNoCheck());
-
- bce->setVarEmitterScope(this);
-
- if (!ensureCache(bce))
- return false;
-
- // For simplicity, treat all free name lookups in eval scripts as dynamic.
- fallbackFreeNameLocation_ = Some(NameLocation::Dynamic());
-
- // Create the `var` scope. Note that there is also a lexical scope, created
- // separately in emitScript().
- auto createScope = [evalsc](ExclusiveContext* cx, HandleScope enclosing) {
- ScopeKind scopeKind = evalsc->strict() ? ScopeKind::StrictEval : ScopeKind::Eval;
- return EvalScope::create(cx, scopeKind, evalsc->bindings, enclosing);
- };
- if (!internBodyScope(bce, createScope))
- return false;
-
- if (hasEnvironment()) {
- if (!bce->emitInternedScopeOp(index(), JSOP_PUSHVARENV))
- return false;
- } else {
- // Resolve binding names and emit DEFVAR prologue ops if we don't have
- // an environment (i.e., a sloppy eval not in a parameter expression).
- // Eval scripts always have their own lexical scope, but non-strict
- // scopes may introduce 'var' bindings to the nearest var scope.
- //
- // TODO: We may optimize strict eval bindings in the future to be on
- // the frame. For now, handle everything dynamically.
- if (!hasEnvironment() && evalsc->bindings) {
- for (DynamicBindingIter bi(evalsc); bi; bi++) {
- MOZ_ASSERT(bi.bindingOp() == JSOP_DEFVAR);
-
- if (bi.isTopLevelFunction())
- continue;
-
- if (!bce->emitAtomOp(bi.name(), JSOP_DEFVAR))
- return false;
- }
- }
-
- // As an optimization, if the eval does not have its own var
- // environment and is directly enclosed in a global scope, then all
- // free name lookups are global.
- if (scope(bce)->enclosing()->is<GlobalScope>())
- fallbackFreeNameLocation_ = Some(NameLocation::Global(BindingKind::Var));
- }
-
- return true;
-}
-
-bool
-BytecodeEmitter::EmitterScope::enterModule(BytecodeEmitter* bce, ModuleSharedContext* modulesc)
-{
- MOZ_ASSERT(this == bce->innermostEmitterScopeNoCheck());
-
- bce->setVarEmitterScope(this);
-
- if (!ensureCache(bce))
- return false;
-
- // Resolve body-level bindings, if there are any.
- TDZCheckCache* tdzCache = bce->innermostTDZCheckCache;
- Maybe<uint32_t> firstLexicalFrameSlot;
- if (ModuleScope::Data* bindings = modulesc->bindings) {
- BindingIter bi(*bindings);
- for (; bi; bi++) {
- if (!checkSlotLimits(bce, bi))
- return false;
-
- NameLocation loc = NameLocation::fromBinding(bi.kind(), bi.location());
- if (!putNameInCache(bce, bi.name(), loc))
- return false;
-
- if (BindingKindIsLexical(bi.kind())) {
- if (loc.kind() == NameLocation::Kind::FrameSlot && !firstLexicalFrameSlot)
- firstLexicalFrameSlot = Some(loc.frameSlot());
-
- if (!tdzCache->noteTDZCheck(bce, bi.name(), CheckTDZ))
- return false;
- }
- }
-
- updateFrameFixedSlots(bce, bi);
- } else {
- nextFrameSlot_ = 0;
- }
-
- // Modules are toplevel, so any free names are global.
- fallbackFreeNameLocation_ = Some(NameLocation::Global(BindingKind::Var));
-
- // Put lexical frame slots in TDZ. Environment slots are poisoned during
- // environment creation.
- if (firstLexicalFrameSlot) {
- if (!deadZoneFrameSlotRange(bce, *firstLexicalFrameSlot, frameSlotEnd()))
- return false;
- }
-
- // Create and intern the VM scope.
- auto createScope = [modulesc](ExclusiveContext* cx, HandleScope enclosing) {
- return ModuleScope::create(cx, modulesc->bindings, modulesc->module(), enclosing);
- };
- if (!internBodyScope(bce, createScope))
- return false;
-
- return checkEnvironmentChainLength(bce);
-}
-
-bool
-BytecodeEmitter::EmitterScope::enterWith(BytecodeEmitter* bce)
-{
- MOZ_ASSERT(this == bce->innermostEmitterScopeNoCheck());
-
- if (!ensureCache(bce))
- return false;
-
- // 'with' make all accesses dynamic and unanalyzable.
- fallbackFreeNameLocation_ = Some(NameLocation::Dynamic());
-
- auto createScope = [](ExclusiveContext* cx, HandleScope enclosing) {
- return WithScope::create(cx, enclosing);
- };
- if (!internScope(bce, createScope))
- return false;
-
- if (!bce->emitInternedScopeOp(index(), JSOP_ENTERWITH))
- return false;
-
- if (!appendScopeNote(bce))
- return false;
-
- return checkEnvironmentChainLength(bce);
-}
-
-bool
-BytecodeEmitter::EmitterScope::leave(BytecodeEmitter* bce, bool nonLocal)
-{
- // If we aren't leaving the scope due to a non-local jump (e.g., break),
- // we must be the innermost scope.
- MOZ_ASSERT_IF(!nonLocal, this == bce->innermostEmitterScopeNoCheck());
-
- ScopeKind kind = scope(bce)->kind();
- switch (kind) {
- case ScopeKind::Lexical:
- case ScopeKind::SimpleCatch:
- case ScopeKind::Catch:
- if (!bce->emit1(hasEnvironment() ? JSOP_POPLEXICALENV : JSOP_DEBUGLEAVELEXICALENV))
- return false;
- break;
-
- case ScopeKind::With:
- if (!bce->emit1(JSOP_LEAVEWITH))
- return false;
- break;
-
- case ScopeKind::ParameterExpressionVar:
- MOZ_ASSERT(hasEnvironment());
- if (!bce->emit1(JSOP_POPVARENV))
- return false;
- break;
-
- case ScopeKind::Function:
- case ScopeKind::FunctionBodyVar:
- case ScopeKind::NamedLambda:
- case ScopeKind::StrictNamedLambda:
- case ScopeKind::Eval:
- case ScopeKind::StrictEval:
- case ScopeKind::Global:
- case ScopeKind::NonSyntactic:
- case ScopeKind::Module:
- break;
- }
-
- // Finish up the scope if we are leaving it in LIFO fashion.
- if (!nonLocal) {
- // Popping scopes due to non-local jumps generate additional scope
- // notes. See NonLocalExitControl::prepareForNonLocalJump.
- if (ScopeKindIsInBody(kind)) {
- // The extra function var scope is never popped once it's pushed,
- // so its scope note extends until the end of any possible code.
- uint32_t offset = kind == ScopeKind::FunctionBodyVar ? UINT32_MAX : bce->offset();
- bce->scopeNoteList.recordEnd(noteIndex_, offset, bce->inPrologue());
- }
- }
-
- return true;
-}
-
-class MOZ_STACK_CLASS TryEmitter
-{
- public:
- enum Kind {
- TryCatch,
- TryCatchFinally,
- TryFinally
- };
- enum ShouldUseRetVal {
- UseRetVal,
- DontUseRetVal
- };
- enum ShouldUseControl {
- UseControl,
- DontUseControl,
- };
-
- private:
- BytecodeEmitter* bce_;
- Kind kind_;
- ShouldUseRetVal retValKind_;
-
- // Track jumps-over-catches and gosubs-to-finally for later fixup.
- //
- // When a finally block is active, non-local jumps (including
- // jumps-over-catches) result in a GOSUB being written into the bytecode
- // stream and fixed-up later.
- //
- // If ShouldUseControl is DontUseControl, all that handling is skipped.
- // DontUseControl is used by yield* and the internal try-catch around
- // IteratorClose. These internal uses must:
- // * have only one catch block
- // * have no catch guard
- // * have JSOP_GOTO at the end of catch-block
- // * have no non-local-jump
- // * don't use finally block for normal completion of try-block and
- // catch-block
- //
- // Additionally, a finally block may be emitted when ShouldUseControl is
- // DontUseControl, even if the kind is not TryCatchFinally or TryFinally,
- // because GOSUBs are not emitted. This internal use shares the
- // requirements as above.
- Maybe<TryFinallyControl> controlInfo_;
-
- int depth_;
- unsigned noteIndex_;
- ptrdiff_t tryStart_;
- JumpList catchAndFinallyJump_;
- JumpTarget tryEnd_;
- JumpTarget finallyStart_;
-
- enum State {
- Start,
- Try,
- TryEnd,
- Catch,
- CatchEnd,
- Finally,
- FinallyEnd,
- End
- };
- State state_;
-
- bool hasCatch() const {
- return kind_ == TryCatch || kind_ == TryCatchFinally;
- }
- bool hasFinally() const {
- return kind_ == TryCatchFinally || kind_ == TryFinally;
- }
-
- public:
- TryEmitter(BytecodeEmitter* bce, Kind kind, ShouldUseRetVal retValKind = UseRetVal,
- ShouldUseControl controlKind = UseControl)
- : bce_(bce),
- kind_(kind),
- retValKind_(retValKind),
- depth_(0),
- noteIndex_(0),
- tryStart_(0),
- state_(Start)
- {
- if (controlKind == UseControl)
- controlInfo_.emplace(bce_, hasFinally() ? StatementKind::Finally : StatementKind::Try);
- finallyStart_.offset = 0;
- }
-
- bool emitJumpOverCatchAndFinally() {
- if (!bce_->emitJump(JSOP_GOTO, &catchAndFinallyJump_))
- return false;
- return true;
- }
-
- bool emitTry() {
- MOZ_ASSERT(state_ == Start);
-
- // Since an exception can be thrown at any place inside the try block,
- // we need to restore the stack and the scope chain before we transfer
- // the control to the exception handler.
- //
- // For that we store in a try note associated with the catch or
- // finally block the stack depth upon the try entry. The interpreter
- // uses this depth to properly unwind the stack and the scope chain.
- depth_ = bce_->stackDepth;
-
- // Record the try location, then emit the try block.
- if (!bce_->newSrcNote(SRC_TRY, &noteIndex_))
- return false;
- if (!bce_->emit1(JSOP_TRY))
- return false;
- tryStart_ = bce_->offset();
-
- state_ = Try;
- return true;
- }
-
- private:
- bool emitTryEnd() {
- MOZ_ASSERT(state_ == Try);
- MOZ_ASSERT(depth_ == bce_->stackDepth);
-
- // GOSUB to finally, if present.
- if (hasFinally() && controlInfo_) {
- if (!bce_->emitJump(JSOP_GOSUB, &controlInfo_->gosubs))
- return false;
- }
-
- // Source note points to the jump at the end of the try block.
- if (!bce_->setSrcNoteOffset(noteIndex_, 0, bce_->offset() - tryStart_ + JSOP_TRY_LENGTH))
- return false;
-
- // Emit jump over catch and/or finally.
- if (!bce_->emitJump(JSOP_GOTO, &catchAndFinallyJump_))
- return false;
-
- if (!bce_->emitJumpTarget(&tryEnd_))
- return false;
-
- return true;
- }
-
- public:
- bool emitCatch() {
- if (state_ == Try) {
- if (!emitTryEnd())
- return false;
- } else {
- MOZ_ASSERT(state_ == Catch);
- if (!emitCatchEnd(true))
- return false;
- }
-
- MOZ_ASSERT(bce_->stackDepth == depth_);
-
- if (retValKind_ == UseRetVal) {
- // Clear the frame's return value that might have been set by the
- // try block:
- //
- // eval("try { 1; throw 2 } catch(e) {}"); // undefined, not 1
- if (!bce_->emit1(JSOP_UNDEFINED))
- return false;
- if (!bce_->emit1(JSOP_SETRVAL))
- return false;
- }
-
- state_ = Catch;
- return true;
- }
-
- private:
- bool emitCatchEnd(bool hasNext) {
- MOZ_ASSERT(state_ == Catch);
-
- if (!controlInfo_)
- return true;
-
- // gosub <finally>, if required.
- if (hasFinally()) {
- if (!bce_->emitJump(JSOP_GOSUB, &controlInfo_->gosubs))
- return false;
- MOZ_ASSERT(bce_->stackDepth == depth_);
- }
-
- // Jump over the remaining catch blocks. This will get fixed
- // up to jump to after catch/finally.
- if (!bce_->emitJump(JSOP_GOTO, &catchAndFinallyJump_))
- return false;
-
- // If this catch block had a guard clause, patch the guard jump to
- // come here.
- if (controlInfo_->guardJump.offset != -1) {
- if (!bce_->emitJumpTargetAndPatch(controlInfo_->guardJump))
- return false;
- controlInfo_->guardJump.offset = -1;
-
- // If this catch block is the last one, rethrow, delegating
- // execution of any finally block to the exception handler.
- if (!hasNext) {
- if (!bce_->emit1(JSOP_EXCEPTION))
- return false;
- if (!bce_->emit1(JSOP_THROW))
- return false;
- }
- }
-
- return true;
- }
-
- public:
- bool emitFinally(Maybe<uint32_t> finallyPos = Nothing()) {
- // If we are using controlInfo_ (i.e., emitting a syntactic try
- // blocks), we must have specified up front if there will be a finally
- // close. For internal try blocks, like those emitted for yield* and
- // IteratorClose inside for-of loops, we can emitFinally even without
- // specifying up front, since the internal try blocks emit no GOSUBs.
- if (!controlInfo_) {
- if (kind_ == TryCatch)
- kind_ = TryCatchFinally;
- } else {
- MOZ_ASSERT(hasFinally());
- }
-
- if (state_ == Try) {
- if (!emitTryEnd())
- return false;
- } else {
- MOZ_ASSERT(state_ == Catch);
- if (!emitCatchEnd(false))
- return false;
- }
-
- MOZ_ASSERT(bce_->stackDepth == depth_);
-
- if (!bce_->emitJumpTarget(&finallyStart_))
- return false;
-
- if (controlInfo_) {
- // Fix up the gosubs that might have been emitted before non-local
- // jumps to the finally code.
- bce_->patchJumpsToTarget(controlInfo_->gosubs, finallyStart_);
-
- // Indicate that we're emitting a subroutine body.
- controlInfo_->setEmittingSubroutine();
- }
- if (finallyPos) {
- if (!bce_->updateSourceCoordNotes(finallyPos.value()))
- return false;
- }
- if (!bce_->emit1(JSOP_FINALLY))
- return false;
-
- if (retValKind_ == UseRetVal) {
- if (!bce_->emit1(JSOP_GETRVAL))
- return false;
-
- // Clear the frame's return value to make break/continue return
- // correct value even if there's no other statement before them:
- //
- // eval("x: try { 1 } finally { break x; }"); // undefined, not 1
- if (!bce_->emit1(JSOP_UNDEFINED))
- return false;
- if (!bce_->emit1(JSOP_SETRVAL))
- return false;
- }
-
- state_ = Finally;
- return true;
- }
-
- private:
- bool emitFinallyEnd() {
- MOZ_ASSERT(state_ == Finally);
-
- if (retValKind_ == UseRetVal) {
- if (!bce_->emit1(JSOP_SETRVAL))
- return false;
- }
-
- if (!bce_->emit1(JSOP_RETSUB))
- return false;
-
- bce_->hasTryFinally = true;
- return true;
- }
-
- public:
- bool emitEnd() {
- if (state_ == Catch) {
- MOZ_ASSERT(!hasFinally());
- if (!emitCatchEnd(false))
- return false;
- } else {
- MOZ_ASSERT(state_ == Finally);
- MOZ_ASSERT(hasFinally());
- if (!emitFinallyEnd())
- return false;
- }
-
- MOZ_ASSERT(bce_->stackDepth == depth_);
-
- // ReconstructPCStack needs a NOP here to mark the end of the last
- // catch block.
- if (!bce_->emit1(JSOP_NOP))
- return false;
-
- // Fix up the end-of-try/catch jumps to come here.
- if (!bce_->emitJumpTargetAndPatch(catchAndFinallyJump_))
- return false;
-
- // Add the try note last, to let post-order give us the right ordering
- // (first to last for a given nesting level, inner to outer by level).
- if (hasCatch()) {
- if (!bce_->tryNoteList.append(JSTRY_CATCH, depth_, tryStart_, tryEnd_.offset))
- return false;
- }
-
- // If we've got a finally, mark try+catch region with additional
- // trynote to catch exceptions (re)thrown from a catch block or
- // for the try{}finally{} case.
- if (hasFinally()) {
- if (!bce_->tryNoteList.append(JSTRY_FINALLY, depth_, tryStart_, finallyStart_.offset))
- return false;
- }
-
- state_ = End;
- return true;
- }
-};
-
// Class for emitting bytecode for optional expressions.
class MOZ_RAII OptionalEmitter
{
@@ -1865,198 +163,6 @@ class MOZ_RAII OptionalEmitter
MOZ_MUST_USE bool emitOptionalJumpTarget(JSOp op, Kind kind = Kind::Other);
};
-class ForOfLoopControl : public LoopControl
-{
- using EmitterScope = BytecodeEmitter::EmitterScope;
-
- // The stack depth of the iterator.
- int32_t iterDepth_;
-
- // for-of loops, when throwing from non-iterator code (i.e. from the body
- // or from evaluating the LHS of the loop condition), need to call
- // IteratorClose. This is done by enclosing non-iterator code with
- // try-catch and call IteratorClose in `catch` block.
- // If IteratorClose itself throws, we must not re-call IteratorClose. Since
- // non-local jumps like break and return call IteratorClose, whenever a
- // non-local jump is emitted, we must tell catch block not to perform
- // IteratorClose.
- //
- // for (x of y) {
- // // Operations for iterator (IteratorNext etc) are outside of
- // // try-block.
- // try {
- // ...
- // if (...) {
- // // Before non-local jump, clear iterator on the stack to tell
- // // catch block not to perform IteratorClose.
- // tmpIterator = iterator;
- // iterator = undefined;
- // IteratorClose(tmpIterator, { break });
- // break;
- // }
- // ...
- // } catch (e) {
- // // Just throw again when iterator is cleared by non-local jump.
- // if (iterator === undefined)
- // throw e;
- // IteratorClose(iterator, { throw, e });
- // }
- // }
- Maybe<TryEmitter> tryCatch_;
-
- // Used to track if any yields were emitted between calls to to
- // emitBeginCodeNeedingIteratorClose and emitEndCodeNeedingIteratorClose.
- uint32_t numYieldsAtBeginCodeNeedingIterClose_;
-
- bool allowSelfHosted_;
-
- IteratorKind iterKind_;
-
- public:
- ForOfLoopControl(BytecodeEmitter* bce, int32_t iterDepth, bool allowSelfHosted,
- IteratorKind iterKind)
- : LoopControl(bce, StatementKind::ForOfLoop),
- iterDepth_(iterDepth),
- numYieldsAtBeginCodeNeedingIterClose_(UINT32_MAX),
- allowSelfHosted_(allowSelfHosted),
- iterKind_(iterKind)
- {
- }
-
- bool emitBeginCodeNeedingIteratorClose(BytecodeEmitter* bce) {
- tryCatch_.emplace(bce, TryEmitter::TryCatch, TryEmitter::DontUseRetVal,
- TryEmitter::DontUseControl);
-
- if (!tryCatch_->emitTry())
- return false;
-
- MOZ_ASSERT(numYieldsAtBeginCodeNeedingIterClose_ == UINT32_MAX);
- numYieldsAtBeginCodeNeedingIterClose_ = bce->yieldAndAwaitOffsetList.numYields;
-
- return true;
- }
-
- bool emitEndCodeNeedingIteratorClose(BytecodeEmitter* bce) {
- if (!tryCatch_->emitCatch()) // ITER ...
- return false;
-
- if (!bce->emit1(JSOP_EXCEPTION)) // ITER ... EXCEPTION
- return false;
- unsigned slotFromTop = bce->stackDepth - iterDepth_;
- if (!bce->emitDupAt(slotFromTop)) // ITER ... EXCEPTION ITER
- return false;
-
- // If ITER is undefined, it means the exception is thrown by
- // IteratorClose for non-local jump, and we should't perform
- // IteratorClose again here.
- if (!bce->emit1(JSOP_UNDEFINED)) // ITER ... EXCEPTION ITER UNDEF
- return false;
- if (!bce->emit1(JSOP_STRICTNE)) // ITER ... EXCEPTION NE
- return false;
-
- InternalIfEmitter ifIteratorIsNotClosed(bce);
- if (!ifIteratorIsNotClosed.emitThen()) // ITER ... EXCEPTION
- return false;
-
- MOZ_ASSERT(slotFromTop == unsigned(bce->stackDepth - iterDepth_));
- if (!bce->emitDupAt(slotFromTop)) // ITER ... EXCEPTION ITER
- return false;
- if (!emitIteratorCloseInInnermostScope(bce, CompletionKind::Throw))
- return false; // ITER ... EXCEPTION
-
- if (!ifIteratorIsNotClosed.emitEnd()) // ITER ... EXCEPTION
- return false;
-
- if (!bce->emit1(JSOP_THROW)) // ITER ...
- return false;
-
- // If any yields were emitted, then this for-of loop is inside a star
- // generator and must handle the case of Generator.return. Like in
- // yield*, it is handled with a finally block.
- uint32_t numYieldsEmitted = bce->yieldAndAwaitOffsetList.numYields;
- if (numYieldsEmitted > numYieldsAtBeginCodeNeedingIterClose_) {
- if (!tryCatch_->emitFinally())
- return false;
-
- InternalIfEmitter ifGeneratorClosing(bce);
- if (!bce->emit1(JSOP_ISGENCLOSING)) // ITER ... FTYPE FVALUE CLOSING
- return false;
- if (!ifGeneratorClosing.emitThen()) // ITER ... FTYPE FVALUE
- return false;
- if (!bce->emitDupAt(slotFromTop + 1)) // ITER ... FTYPE FVALUE ITER
- return false;
- if (!emitIteratorCloseInInnermostScope(bce, CompletionKind::Normal))
- return false; // ITER ... FTYPE FVALUE
- if (!ifGeneratorClosing.emitEnd()) // ITER ... FTYPE FVALUE
- return false;
- }
-
- if (!tryCatch_->emitEnd())
- return false;
-
- tryCatch_.reset();
- numYieldsAtBeginCodeNeedingIterClose_ = UINT32_MAX;
-
- return true;
- }
-
- bool emitIteratorCloseInInnermostScope(BytecodeEmitter* bce,
- CompletionKind completionKind = CompletionKind::Normal) {
- return emitIteratorCloseInScope(bce, *bce->innermostEmitterScope(), completionKind);
- }
-
- bool emitIteratorCloseInScope(BytecodeEmitter* bce,
- EmitterScope& currentScope,
- CompletionKind completionKind = CompletionKind::Normal) {
- ptrdiff_t start = bce->offset();
- if (!bce->emitIteratorCloseInScope(currentScope, iterKind_, completionKind,
- allowSelfHosted_))
- {
- return false;
- }
- ptrdiff_t end = bce->offset();
- return bce->tryNoteList.append(JSTRY_FOR_OF_ITERCLOSE, 0, start, end);
- }
-
- bool emitPrepareForNonLocalJumpFromScope(BytecodeEmitter* bce,
- EmitterScope& currentScope,
- bool isTarget) {
- // Pop unnecessary values from the stack. Effectively this means
- // leaving try-catch block. However, the performing IteratorClose can
- // reach the depth for try-catch, and effectively re-enter the
- // try-catch block.
- if (!bce->emit1(JSOP_POP)) // ITER RESULT
- return false;
- if (!bce->emit1(JSOP_POP)) // ITER
- return false;
-
- // Clear ITER slot on the stack to tell catch block to avoid performing
- // IteratorClose again.
- if (!bce->emit1(JSOP_UNDEFINED)) // ITER UNDEF
- return false;
- if (!bce->emit1(JSOP_SWAP)) // UNDEF ITER
- return false;
-
- if (!emitIteratorCloseInScope(bce, currentScope, CompletionKind::Normal)) // UNDEF
- return false;
-
- if (isTarget) {
- // At the level of the target block, there's bytecode after the
- // loop that will pop the iterator and the value, so push
- // undefineds to balance the stack.
- if (!bce->emit1(JSOP_UNDEFINED)) // UNDEF UNDEF
- return false;
- if (!bce->emit1(JSOP_UNDEFINED)) // UNDEF UNDEF UNDEF
- return false;
- } else {
- if (!bce->emit1(JSOP_POP)) //
- return false;
- }
-
- return true;
- }
-};
-
BytecodeEmitter::BytecodeEmitter(BytecodeEmitter* parent,
Parser<FullParseHandler>* parser, SharedContext* sc,
HandleScript script, Handle<LazyScript*> lazyScript,
@@ -2117,13 +223,6 @@ BytecodeEmitter::init()
return atomIndices.acquire(cx);
}
-template <typename Predicate /* (NestableControl*) -> bool */>
-BytecodeEmitter::NestableControl*
-BytecodeEmitter::findInnermostNestableControl(Predicate predicate) const
-{
- return NestableControl::findNearest(innermostNestableControl, predicate);
-}
-
template <typename T>
T*
BytecodeEmitter::findInnermostNestableControl() const
@@ -2147,7 +246,7 @@ BytecodeEmitter::lookupName(JSAtom* name)
Maybe<NameLocation>
BytecodeEmitter::locationOfNameBoundInScope(JSAtom* name, EmitterScope* target)
{
- return innermostEmitterScope()->locationBoundInScope(this, name, target);
+ return innermostEmitterScope()->locationBoundInScope(name, target);
}
Maybe<NameLocation>
@@ -2156,7 +255,7 @@ BytecodeEmitter::locationOfNameBoundInFunctionScope(JSAtom* name, EmitterScope*
EmitterScope* funScope = source;
while (!funScope->scope(this)->is<FunctionScope>())
funScope = funScope->enclosingInFrame();
- return source->locationBoundInScope(this, name, funScope);
+ return source->locationBoundInScope(name, funScope);
}
bool
@@ -2605,7 +704,7 @@ class NonLocalExitControl
NonLocalExitControl(const NonLocalExitControl&) = delete;
- MOZ_MUST_USE bool leaveScope(BytecodeEmitter::EmitterScope* scope);
+ MOZ_MUST_USE bool leaveScope(EmitterScope* scope);
public:
NonLocalExitControl(BytecodeEmitter* bce, Kind kind)
@@ -2622,7 +721,7 @@ class NonLocalExitControl
bce_->stackDepth = savedDepth_;
}
- MOZ_MUST_USE bool prepareForNonLocalJump(BytecodeEmitter::NestableControl* target);
+ MOZ_MUST_USE bool prepareForNonLocalJump(NestableControl* target);
MOZ_MUST_USE bool prepareForNonLocalJumpToOutermost() {
return prepareForNonLocalJump(nullptr);
@@ -2630,7 +729,7 @@ class NonLocalExitControl
};
bool
-NonLocalExitControl::leaveScope(BytecodeEmitter::EmitterScope* es)
+NonLocalExitControl::leaveScope(EmitterScope* es)
{
if (!es->leave(bce_, /* nonLocal = */ true))
return false;
@@ -2653,11 +752,8 @@ NonLocalExitControl::leaveScope(BytecodeEmitter::EmitterScope* es)
* Emit additional bytecode(s) for non-local jumps.
*/
bool
-NonLocalExitControl::prepareForNonLocalJump(BytecodeEmitter::NestableControl* target)
+NonLocalExitControl::prepareForNonLocalJump(NestableControl* target)
{
- using NestableControl = BytecodeEmitter::NestableControl;
- using EmitterScope = BytecodeEmitter::EmitterScope;
-
EmitterScope* es = bce_->innermostEmitterScope();
int npops = 0;
@@ -3534,6 +1630,19 @@ BytecodeEmitter::reportError(ParseNode* pn, unsigned errorNumber, ...)
}
bool
+BytecodeEmitter::reportError(const mozilla::Maybe<uint32_t>& maybeOffset, unsigned errorNumber, ...)
+{
+ uint32_t offset = maybeOffset ? *maybeOffset : tokenStream().currentToken().pos.begin;
+
+ va_list args;
+ va_start(args, errorNumber);
+ bool result = tokenStream().reportCompileErrorNumberVA(nullptr, offset, JSREPORT_ERROR,
+ errorNumber, args);
+ va_end(args);
+ return result;
+}
+
+bool
BytecodeEmitter::reportExtraWarning(ParseNode* pn, unsigned errorNumber, ...)
{
TokenPos pos = pn ? pn->pn_pos : tokenStream().currentToken().pos;
@@ -3547,6 +1656,19 @@ BytecodeEmitter::reportExtraWarning(ParseNode* pn, unsigned errorNumber, ...)
}
bool
+BytecodeEmitter::reportExtraWarning(const mozilla::Maybe<uint32_t>& maybeOffset, unsigned errorNumber, ...)
+{
+ uint32_t offset = maybeOffset ? *maybeOffset : tokenStream().currentToken().pos.begin;
+
+ va_list args;
+ va_start(args, errorNumber);
+ bool result = tokenStream().reportExtraWarningErrorNumberVA(nullptr, offset,
+ errorNumber, args);
+ va_end(args);
+ return result;
+}
+
+bool
BytecodeEmitter::reportStrictModeError(ParseNode* pn, unsigned errorNumber, ...)
{
TokenPos pos = pn ? pn->pn_pos : tokenStream().currentToken().pos;
@@ -3935,33 +2057,27 @@ BytecodeEmitter::emitNumberOp(double dval)
MOZ_NEVER_INLINE bool
BytecodeEmitter::emitSwitch(ParseNode* pn)
{
- ParseNode* cases = pn->pn_right;
- MOZ_ASSERT(cases->isKind(PNK_LEXICALSCOPE) || cases->isKind(PNK_STATEMENTLIST));
+ ParseNode* lexical = pn->pn_right;
+ MOZ_ASSERT(lexical->isKind(PNK_LEXICALSCOPE));
+ ParseNode* cases = lexical->scopeBody();
+ MOZ_ASSERT(cases->isKind(PNK_STATEMENTLIST));
- // Emit code for the discriminant.
+ SwitchEmitter se(this);
+ if (!se.emitDiscriminant(Some(pn->pn_pos.begin)))
+ return false;
if (!emitTree(pn->pn_left))
return false;
// Enter the scope before pushing the switch BreakableControl since all
// breaks are under this scope.
- Maybe<TDZCheckCache> tdzCache;
- Maybe<EmitterScope> emitterScope;
- if (cases->isKind(PNK_LEXICALSCOPE)) {
- if (!cases->isEmptyScope()) {
- tdzCache.emplace(this);
- emitterScope.emplace(this);
- if (!emitterScope->enterLexical(this, ScopeKind::Lexical, cases->scopeBindings()))
- return false;
- }
-
- // Advance |cases| to refer to the switch case list.
- cases = cases->scopeBody();
+ if (!lexical->isEmptyScope()) {
+ if (!se.emitLexical(lexical->scopeBindings()))
+ return false;
// A switch statement may contain hoisted functions inside its
// cases. The PNX_FUNCDEFS flag is propagated from the STATEMENTLIST
// bodies of the cases to the case list.
if (cases->pn_xflags & PNX_FUNCDEFS) {
- MOZ_ASSERT(emitterScope);
for (ParseNode* caseNode = cases->pn_head; caseNode; caseNode = caseNode->pn_next) {
if (caseNode->pn_right->pn_xflags & PNX_FUNCDEFS) {
if (!emitHoistedFunctionsInList(caseNode->pn_right))
@@ -3969,302 +2085,103 @@ BytecodeEmitter::emitSwitch(ParseNode* pn)
}
}
}
+ } else {
+ MOZ_ASSERT(!(cases->pn_xflags & PNX_FUNCDEFS));
}
- // After entering the scope, push the switch control.
- BreakableControl controlInfo(this, StatementKind::Switch);
-
- ptrdiff_t top = offset();
-
- // Switch bytecodes run from here till end of final case.
+ SwitchEmitter::TableGenerator tableGen(this);
uint32_t caseCount = cases->pn_count;
- if (caseCount > JS_BIT(16)) {
- parser->tokenStream.reportError(JSMSG_TOO_MANY_CASES);
- return false;
- }
-
- // Try for most optimal, fall back if not dense ints.
- JSOp switchOp = JSOP_TABLESWITCH;
- uint32_t tableLength = 0;
- int32_t low, high;
- bool hasDefault = false;
CaseClause* firstCase = cases->pn_head ? &cases->pn_head->as<CaseClause>() : nullptr;
- if (caseCount == 0 ||
- (caseCount == 1 && (hasDefault = firstCase->isDefault())))
- {
+ if (caseCount == 0) {
+ tableGen.finish(0);
+ } else if (caseCount == 1 && firstCase->isDefault()) {
caseCount = 0;
- low = 0;
- high = -1;
+ tableGen.finish(0);
} else {
- Vector<jsbitmap, 128, SystemAllocPolicy> intmap;
- int32_t intmapBitLength = 0;
-
- low = JSVAL_INT_MAX;
- high = JSVAL_INT_MIN;
-
for (CaseClause* caseNode = firstCase; caseNode; caseNode = caseNode->next()) {
if (caseNode->isDefault()) {
- hasDefault = true;
caseCount--; // one of the "cases" was the default
continue;
}
- if (switchOp == JSOP_CONDSWITCH)
+ if (tableGen.isInvalid())
continue;
- MOZ_ASSERT(switchOp == JSOP_TABLESWITCH);
-
ParseNode* caseValue = caseNode->caseExpression();
if (caseValue->getKind() != PNK_NUMBER) {
- switchOp = JSOP_CONDSWITCH;
+ tableGen.setInvalid();
continue;
}
int32_t i;
if (!NumberIsInt32(caseValue->pn_dval, &i)) {
- switchOp = JSOP_CONDSWITCH;
+ tableGen.setInvalid();
continue;
}
- if (unsigned(i + int(JS_BIT(15))) >= unsigned(JS_BIT(16))) {
- switchOp = JSOP_CONDSWITCH;
- continue;
- }
- if (i < low)
- low = i;
- if (i > high)
- high = i;
-
- // Check for duplicates, which require a JSOP_CONDSWITCH.
- // We bias i by 65536 if it's negative, and hope that's a rare
- // case (because it requires a malloc'd bitmap).
- if (i < 0)
- i += JS_BIT(16);
- if (i >= intmapBitLength) {
- size_t newLength = (i / JS_BITMAP_NBITS) + 1;
- if (!intmap.resize(newLength))
- return false;
- intmapBitLength = newLength * JS_BITMAP_NBITS;
- }
- if (JS_TEST_BIT(intmap, i)) {
- switchOp = JSOP_CONDSWITCH;
- continue;
- }
- JS_SET_BIT(intmap, i);
+ if (!tableGen.addNumber(i))
+ return false;
}
- // Compute table length and select condswitch instead if overlarge or
- // more than half-sparse.
- if (switchOp == JSOP_TABLESWITCH) {
- tableLength = uint32_t(high - low + 1);
- if (tableLength >= JS_BIT(16) || tableLength > 2 * caseCount)
- switchOp = JSOP_CONDSWITCH;
- }
+ tableGen.finish(caseCount);
}
+ if (!se.validateCaseCount(caseCount))
+ return false;
- // The note has one or two offsets: first tells total switch code length;
- // second (if condswitch) tells offset to first JSOP_CASE.
- unsigned noteIndex;
- size_t switchSize;
- if (switchOp == JSOP_CONDSWITCH) {
- // 0 bytes of immediate for unoptimized switch.
- switchSize = 0;
- if (!newSrcNote3(SRC_CONDSWITCH, 0, 0, &noteIndex))
+ bool isTableSwitch = tableGen.isValid();
+ if (isTableSwitch) {
+ if (!se.emitTable(tableGen))
return false;
} else {
- MOZ_ASSERT(switchOp == JSOP_TABLESWITCH);
-
- // 3 offsets (len, low, high) before the table, 1 per entry.
- switchSize = size_t(JUMP_OFFSET_LEN * (3 + tableLength));
- if (!newSrcNote2(SRC_TABLESWITCH, 0, &noteIndex))
+ if (!se.emitCond())
return false;
- }
-
- // Emit switchOp followed by switchSize bytes of jump or lookup table.
- MOZ_ASSERT(top == offset());
- if (!emitN(switchOp, switchSize))
- return false;
-
- Vector<CaseClause*, 32, SystemAllocPolicy> table;
-
- JumpList condSwitchDefaultOff;
- if (switchOp == JSOP_CONDSWITCH) {
- unsigned caseNoteIndex;
- bool beforeCases = true;
- ptrdiff_t lastCaseOffset = -1;
-
- // The case conditions need their own TDZ cache since they might not
- // all execute.
- TDZCheckCache tdzCache(this);
// Emit code for evaluating cases and jumping to case statements.
for (CaseClause* caseNode = firstCase; caseNode; caseNode = caseNode->next()) {
+ if (caseNode->isDefault())
+ continue;
+
ParseNode* caseValue = caseNode->caseExpression();
// If the expression is a literal, suppress line number emission so
// that debugging works more naturally.
- if (caseValue) {
- if (!emitTree(caseValue, ValueUsage::WantValue,
- caseValue->isLiteral() ? SUPPRESS_LINENOTE : EMIT_LINENOTE))
- {
- return false;
- }
- }
-
- if (!beforeCases) {
- // prevCase is the previous JSOP_CASE's bytecode offset.
- if (!setSrcNoteOffset(caseNoteIndex, 0, offset() - lastCaseOffset))
- return false;
- }
- if (!caseValue) {
- // This is the default clause.
- continue;
- }
-
- if (!newSrcNote2(SRC_NEXTCASE, 0, &caseNoteIndex))
- return false;
-
- // The case clauses are produced before any of the case body. The
- // JumpList is saved on the parsed tree, then later restored and
- // patched when generating the cases body.
- JumpList caseJump;
- if (!emitJump(JSOP_CASE, &caseJump))
+ if (!emitTree(caseValue, ValueUsage::WantValue,
+ caseValue->isLiteral() ? SUPPRESS_LINENOTE : EMIT_LINENOTE))
+ {
return false;
- caseNode->setOffset(caseJump.offset);
- lastCaseOffset = caseJump.offset;
-
- if (beforeCases) {
- // Switch note's second offset is to first JSOP_CASE.
- unsigned noteCount = notes().length();
- if (!setSrcNoteOffset(noteIndex, 1, lastCaseOffset - top))
- return false;
- unsigned noteCountDelta = notes().length() - noteCount;
- if (noteCountDelta != 0)
- caseNoteIndex += noteCountDelta;
- beforeCases = false;
}
- }
-
- // If we didn't have an explicit default (which could fall in between
- // cases, preventing us from fusing this setSrcNoteOffset with the call
- // in the loop above), link the last case to the implicit default for
- // the benefit of IonBuilder.
- if (!hasDefault &&
- !beforeCases &&
- !setSrcNoteOffset(caseNoteIndex, 0, offset() - lastCaseOffset))
- {
- return false;
- }
-
- // Emit default even if no explicit default statement.
- if (!emitJump(JSOP_DEFAULT, &condSwitchDefaultOff))
- return false;
- } else {
- MOZ_ASSERT(switchOp == JSOP_TABLESWITCH);
-
- // skip default offset.
- jsbytecode* pc = code(top + JUMP_OFFSET_LEN);
-
- // Fill in switch bounds, which we know fit in 16-bit offsets.
- SET_JUMP_OFFSET(pc, low);
- pc += JUMP_OFFSET_LEN;
- SET_JUMP_OFFSET(pc, high);
- pc += JUMP_OFFSET_LEN;
-
- if (tableLength != 0) {
- if (!table.growBy(tableLength))
+ if (!se.emitCaseJump())
return false;
-
- for (CaseClause* caseNode = firstCase; caseNode; caseNode = caseNode->next()) {
- if (ParseNode* caseValue = caseNode->caseExpression()) {
- MOZ_ASSERT(caseValue->isKind(PNK_NUMBER));
-
- int32_t i = int32_t(caseValue->pn_dval);
- MOZ_ASSERT(double(i) == caseValue->pn_dval);
-
- i -= low;
- MOZ_ASSERT(uint32_t(i) < tableLength);
- MOZ_ASSERT(!table[i]);
- table[i] = caseNode;
- }
- }
}
}
- JumpTarget defaultOffset{ -1 };
-
// Emit code for each case's statements.
for (CaseClause* caseNode = firstCase; caseNode; caseNode = caseNode->next()) {
- if (switchOp == JSOP_CONDSWITCH && !caseNode->isDefault()) {
- // The case offset got saved in the caseNode structure after
- // emitting the JSOP_CASE jump instruction above.
- JumpList caseCond;
- caseCond.offset = caseNode->offset();
- if (!emitJumpTargetAndPatch(caseCond))
+ if (caseNode->isDefault()) {
+ if (!se.emitDefaultBody())
return false;
- }
-
- JumpTarget here;
- if (!emitJumpTarget(&here))
- return false;
- if (caseNode->isDefault())
- defaultOffset = here;
+ } else {
+ if (isTableSwitch) {
+ ParseNode* caseValue = caseNode->caseExpression();
+ MOZ_ASSERT(caseValue->isKind(PNK_NUMBER));
- // If this is emitted as a TABLESWITCH, we'll need to know this case's
- // offset later when emitting the table. Store it in the node's
- // pn_offset (giving the field a different meaning vs. how we used it
- // on the immediately preceding line of code).
- caseNode->setOffset(here.offset);
+ int32_t i = int32_t(caseValue->pn_dval);
+ MOZ_ASSERT(double(i) == caseValue->pn_dval);
- TDZCheckCache tdzCache(this);
+ if (!se.emitCaseBody(i, tableGen))
+ return false;
+ } else {
+ if (!se.emitCaseBody())
+ return false;
+ }
+ }
if (!emitTree(caseNode->statementList()))
return false;
}
- if (!hasDefault) {
- // If no default case, offset for default is to end of switch.
- if (!emitJumpTarget(&defaultOffset))
- return false;
- }
- MOZ_ASSERT(defaultOffset.offset != -1);
-
- // Set the default offset (to end of switch if no default).
- jsbytecode* pc;
- if (switchOp == JSOP_CONDSWITCH) {
- pc = nullptr;
- patchJumpsToTarget(condSwitchDefaultOff, defaultOffset);
- } else {
- MOZ_ASSERT(switchOp == JSOP_TABLESWITCH);
- pc = code(top);
- SET_JUMP_OFFSET(pc, defaultOffset.offset - top);
- pc += JUMP_OFFSET_LEN;
- }
-
- // Set the SRC_SWITCH note's offset operand to tell end of switch.
- if (!setSrcNoteOffset(noteIndex, 0, lastNonJumpTargetOffset() - top))
- return false;
-
- if (switchOp == JSOP_TABLESWITCH) {
- // Skip over the already-initialized switch bounds.
- pc += 2 * JUMP_OFFSET_LEN;
-
- // Fill in the jump table, if there is one.
- for (uint32_t i = 0; i < tableLength; i++) {
- CaseClause* caseNode = table[i];
- ptrdiff_t off = caseNode ? caseNode->offset() - top : 0;
- SET_JUMP_OFFSET(pc, off);
- pc += JUMP_OFFSET_LEN;
- }
- }
-
- // Patch breaks before leaving the scope, as all breaks are under the
- // lexical scope if it exists.
- if (!controlInfo.patchBreaks(this))
- return false;
-
- if (emitterScope && !emitterScope->leave(this))
+ if (!se.emitEnd())
return false;
return true;
diff --git a/js/src/frontend/BytecodeEmitter.h b/js/src/frontend/BytecodeEmitter.h
index 47400658dc..79c36c5004 100644
--- a/js/src/frontend/BytecodeEmitter.h
+++ b/js/src/frontend/BytecodeEmitter.h
@@ -122,14 +122,13 @@ typedef Vector<jssrcnote, 64> SrcNotesVector;
class CallOrNewEmitter;
class ElemOpEmitter;
+class EmitterScope;
+class NestableControl;
class PropOpEmitter;
class TDZCheckCache;
struct MOZ_STACK_CLASS BytecodeEmitter
{
- class NestableControl;
- class EmitterScope;
-
SharedContext* const sc; /* context shared between parsing and bytecode generation */
ExclusiveContext* const cx;
@@ -257,9 +256,6 @@ struct MOZ_STACK_CLASS BytecodeEmitter
MOZ_MUST_USE bool init();
- template <typename Predicate /* (NestableControl*) -> bool */>
- NestableControl* findInnermostNestableControl(Predicate predicate) const;
-
template <typename T>
T* findInnermostNestableControl() const;
@@ -357,7 +353,9 @@ struct MOZ_STACK_CLASS BytecodeEmitter
}
bool reportError(ParseNode* pn, unsigned errorNumber, ...);
+ bool reportError(const mozilla::Maybe<uint32_t>& maybeOffset, unsigned errorNumber, ...);
bool reportExtraWarning(ParseNode* pn, unsigned errorNumber, ...);
+ bool reportExtraWarning(const mozilla::Maybe<uint32_t>& maybeOffset, unsigned errorNumber, ...);
bool reportStrictModeError(ParseNode* pn, unsigned errorNumber, ...);
// If pn contains a useful expression, return true with *answer set to true.
diff --git a/js/src/frontend/EmitterScope.cpp b/js/src/frontend/EmitterScope.cpp
new file mode 100644
index 0000000000..d6ebfe265f
--- /dev/null
+++ b/js/src/frontend/EmitterScope.cpp
@@ -0,0 +1,1092 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sts=4 et sw=4 tw=99:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "frontend/EmitterScope.h"
+
+#include "frontend/BytecodeEmitter.h"
+#include "frontend/NameOpEmitter.h"
+#include "frontend/TDZCheckCache.h"
+
+using namespace js;
+using namespace js::frontend;
+
+using mozilla::DebugOnly;
+using mozilla::Maybe;
+using mozilla::Nothing;
+using mozilla::Some;
+
+EmitterScope::EmitterScope(BytecodeEmitter* bce)
+ : Nestable<EmitterScope>(&bce->innermostEmitterScope_),
+ nameCache_(bce->cx->frontendCollectionPool()),
+ hasEnvironment_(false),
+ environmentChainLength_(0),
+ nextFrameSlot_(0),
+ scopeIndex_(ScopeNote::NoScopeIndex),
+ noteIndex_(ScopeNote::NoScopeNoteIndex)
+{}
+
+static inline void
+MarkAllBindingsClosedOver(LexicalScope::Data& data)
+{
+ TrailingNamesArray& names = data.trailingNames;
+ for (uint32_t i = 0; i < data.length; i++)
+ names[i] = BindingName(names[i].name(), true);
+}
+
+static bool
+ScopeKindIsInBody(ScopeKind kind)
+{
+ return kind == ScopeKind::Lexical ||
+ kind == ScopeKind::SimpleCatch ||
+ kind == ScopeKind::Catch ||
+ kind == ScopeKind::With ||
+ kind == ScopeKind::FunctionBodyVar ||
+ kind == ScopeKind::ParameterExpressionVar;
+}
+
+bool
+EmitterScope::ensureCache(BytecodeEmitter* bce)
+{
+ return nameCache_.acquire(bce->cx);
+}
+
+template <typename BindingIter>
+bool
+EmitterScope::checkSlotLimits(BytecodeEmitter* bce, const BindingIter& bi)
+{
+ if (bi.nextFrameSlot() >= LOCALNO_LIMIT ||
+ bi.nextEnvironmentSlot() >= ENVCOORD_SLOT_LIMIT)
+ {
+ return bce->reportError(nullptr, JSMSG_TOO_MANY_LOCALS);
+ }
+ return true;
+}
+
+bool
+EmitterScope::checkEnvironmentChainLength(BytecodeEmitter* bce)
+{
+ uint32_t hops;
+ if (EmitterScope* emitterScope = enclosing(&bce))
+ hops = emitterScope->environmentChainLength_;
+ else
+ hops = bce->sc->compilationEnclosingScope()->environmentChainLength();
+ if (hops >= ENVCOORD_HOPS_LIMIT - 1)
+ return bce->reportError(nullptr, JSMSG_TOO_DEEP, js_function_str);
+ environmentChainLength_ = mozilla::AssertedCast<uint8_t>(hops + 1);
+ return true;
+}
+
+void
+EmitterScope::updateFrameFixedSlots(BytecodeEmitter* bce, const BindingIter& bi)
+{
+ nextFrameSlot_ = bi.nextFrameSlot();
+ if (nextFrameSlot_ > bce->maxFixedSlots)
+ bce->maxFixedSlots = nextFrameSlot_;
+ MOZ_ASSERT_IF(bce->sc->isFunctionBox() &&
+ (bce->sc->asFunctionBox()->isStarGenerator() ||
+ bce->sc->asFunctionBox()->isLegacyGenerator() ||
+ bce->sc->asFunctionBox()->isAsync()),
+ bce->maxFixedSlots == 0);
+}
+
+bool
+EmitterScope::putNameInCache(BytecodeEmitter* bce, JSAtom* name, NameLocation loc)
+{
+ NameLocationMap& cache = *nameCache_;
+ NameLocationMap::AddPtr p = cache.lookupForAdd(name);
+ MOZ_ASSERT(!p);
+ if (!cache.add(p, name, loc)) {
+ ReportOutOfMemory(bce->cx);
+ return false;
+ }
+ return true;
+}
+
+Maybe<NameLocation>
+EmitterScope::lookupInCache(BytecodeEmitter* bce, JSAtom* name)
+{
+ if (NameLocationMap::Ptr p = nameCache_->lookup(name))
+ return Some(p->value().wrapped);
+ if (fallbackFreeNameLocation_ && nameCanBeFree(bce, name))
+ return fallbackFreeNameLocation_;
+ return Nothing();
+}
+
+EmitterScope*
+EmitterScope::enclosing(BytecodeEmitter** bce) const
+{
+ // There is an enclosing scope with access to the same frame.
+ if (EmitterScope* inFrame = enclosingInFrame())
+ return inFrame;
+
+ // We are currently compiling the enclosing script, look in the
+ // enclosing BCE.
+ if ((*bce)->parent) {
+ *bce = (*bce)->parent;
+ return (*bce)->innermostEmitterScopeNoCheck();
+ }
+
+ return nullptr;
+}
+
+Scope*
+EmitterScope::enclosingScope(BytecodeEmitter* bce) const
+{
+ if (EmitterScope* es = enclosing(&bce))
+ return es->scope(bce);
+
+ // The enclosing script is already compiled or the current script is the
+ // global script.
+ return bce->sc->compilationEnclosingScope();
+}
+
+/* static */ bool
+EmitterScope::nameCanBeFree(BytecodeEmitter* bce, JSAtom* name)
+{
+ // '.generator' cannot be accessed by name.
+ return name != bce->cx->names().dotGenerator;
+}
+
+#ifdef DEBUG
+static bool
+NameIsOnEnvironment(Scope* scope, JSAtom* name)
+{
+ for (BindingIter bi(scope); bi; bi++) {
+ // If found, the name must already be on the environment or an import,
+ // or else there is a bug in the closed-over name analysis in the
+ // Parser.
+ if (bi.name() == name) {
+ BindingLocation::Kind kind = bi.location().kind();
+
+ if (bi.hasArgumentSlot()) {
+ JSScript* script = scope->as<FunctionScope>().script();
+ if (!script->strict() && !script->functionHasParameterExprs()) {
+ // Check for duplicate positional formal parameters.
+ for (BindingIter bi2(bi); bi2 && bi2.hasArgumentSlot(); bi2++) {
+ if (bi2.name() == name)
+ kind = bi2.location().kind();
+ }
+ }
+ }
+
+ return kind == BindingLocation::Kind::Global ||
+ kind == BindingLocation::Kind::Environment ||
+ kind == BindingLocation::Kind::Import;
+ }
+ }
+
+ // If not found, assume it's on the global or dynamically accessed.
+ return true;
+}
+#endif
+
+/* static */ NameLocation
+EmitterScope::searchInEnclosingScope(JSAtom* name, Scope* scope, uint8_t hops)
+{
+ for (ScopeIter si(scope); si; si++) {
+ MOZ_ASSERT(NameIsOnEnvironment(si.scope(), name));
+
+ bool hasEnv = si.hasSyntacticEnvironment();
+
+ switch (si.kind()) {
+ case ScopeKind::Function:
+ if (hasEnv) {
+ JSScript* script = si.scope()->as<FunctionScope>().script();
+ if (script->funHasExtensibleScope())
+ return NameLocation::Dynamic();
+
+ for (BindingIter bi(si.scope()); bi; bi++) {
+ if (bi.name() != name)
+ continue;
+
+ BindingLocation bindLoc = bi.location();
+ if (bi.hasArgumentSlot() &&
+ !script->strict() &&
+ !script->functionHasParameterExprs())
+ {
+ // Check for duplicate positional formal parameters.
+ for (BindingIter bi2(bi); bi2 && bi2.hasArgumentSlot(); bi2++) {
+ if (bi2.name() == name)
+ bindLoc = bi2.location();
+ }
+ }
+
+ MOZ_ASSERT(bindLoc.kind() == BindingLocation::Kind::Environment);
+ return NameLocation::EnvironmentCoordinate(bi.kind(), hops, bindLoc.slot());
+ }
+ }
+ break;
+
+ case ScopeKind::FunctionBodyVar:
+ case ScopeKind::ParameterExpressionVar:
+ case ScopeKind::Lexical:
+ case ScopeKind::NamedLambda:
+ case ScopeKind::StrictNamedLambda:
+ case ScopeKind::SimpleCatch:
+ case ScopeKind::Catch:
+ if (hasEnv) {
+ for (BindingIter bi(si.scope()); bi; bi++) {
+ if (bi.name() != name)
+ continue;
+
+ // The name must already have been marked as closed
+ // over. If this assertion is hit, there is a bug in the
+ // name analysis.
+ BindingLocation bindLoc = bi.location();
+ MOZ_ASSERT(bindLoc.kind() == BindingLocation::Kind::Environment);
+ return NameLocation::EnvironmentCoordinate(bi.kind(), hops, bindLoc.slot());
+ }
+ }
+ break;
+
+ case ScopeKind::Module:
+ if (hasEnv) {
+ for (BindingIter bi(si.scope()); bi; bi++) {
+ if (bi.name() != name)
+ continue;
+
+ BindingLocation bindLoc = bi.location();
+
+ // Imports are on the environment but are indirect
+ // bindings and must be accessed dynamically instead of
+ // using an EnvironmentCoordinate.
+ if (bindLoc.kind() == BindingLocation::Kind::Import) {
+ MOZ_ASSERT(si.kind() == ScopeKind::Module);
+ return NameLocation::Import();
+ }
+
+ MOZ_ASSERT(bindLoc.kind() == BindingLocation::Kind::Environment);
+ return NameLocation::EnvironmentCoordinate(bi.kind(), hops, bindLoc.slot());
+ }
+ }
+ break;
+
+ case ScopeKind::Eval:
+ case ScopeKind::StrictEval:
+ // As an optimization, if the eval doesn't have its own var
+ // environment and its immediate enclosing scope is a global
+ // scope, all accesses are global.
+ if (!hasEnv && si.scope()->enclosing()->is<GlobalScope>())
+ return NameLocation::Global(BindingKind::Var);
+ return NameLocation::Dynamic();
+
+ case ScopeKind::Global:
+ return NameLocation::Global(BindingKind::Var);
+
+ case ScopeKind::With:
+ case ScopeKind::NonSyntactic:
+ return NameLocation::Dynamic();
+ }
+
+ if (hasEnv) {
+ MOZ_ASSERT(hops < ENVCOORD_HOPS_LIMIT - 1);
+ hops++;
+ }
+ }
+
+ MOZ_CRASH("Malformed scope chain");
+}
+
+NameLocation
+EmitterScope::searchAndCache(BytecodeEmitter* bce, JSAtom* name)
+{
+ Maybe<NameLocation> loc;
+ uint8_t hops = hasEnvironment() ? 1 : 0;
+ DebugOnly<bool> inCurrentScript = enclosingInFrame();
+
+ // Start searching in the current compilation.
+ for (EmitterScope* es = enclosing(&bce); es; es = es->enclosing(&bce)) {
+ loc = es->lookupInCache(bce, name);
+ if (loc) {
+ if (loc->kind() == NameLocation::Kind::EnvironmentCoordinate)
+ *loc = loc->addHops(hops);
+ break;
+ }
+
+ if (es->hasEnvironment())
+ hops++;
+
+#ifdef DEBUG
+ if (!es->enclosingInFrame())
+ inCurrentScript = false;
+#endif
+ }
+
+ // If the name is not found in the current compilation, walk the Scope
+ // chain encompassing the compilation.
+ if (!loc) {
+ inCurrentScript = false;
+ loc = Some(searchInEnclosingScope(name, bce->sc->compilationEnclosingScope(), hops));
+ }
+
+ // Each script has its own frame. A free name that is accessed
+ // from an inner script must not be a frame slot access. If this
+ // assertion is hit, it is a bug in the free name analysis in the
+ // parser.
+ MOZ_ASSERT_IF(!inCurrentScript, loc->kind() != NameLocation::Kind::FrameSlot);
+
+ // It is always correct to not cache the location. Ignore OOMs to make
+ // lookups infallible.
+ if (!putNameInCache(bce, name, *loc))
+ bce->cx->recoverFromOutOfMemory();
+
+ return *loc;
+}
+
+template <typename ScopeCreator>
+bool
+EmitterScope::internScope(BytecodeEmitter* bce, ScopeCreator createScope)
+{
+ RootedScope enclosing(bce->cx, enclosingScope(bce));
+ Scope* scope = createScope(bce->cx, enclosing);
+ if (!scope)
+ return false;
+ hasEnvironment_ = scope->hasEnvironment();
+ scopeIndex_ = bce->scopeList.length();
+ return bce->scopeList.append(scope);
+}
+
+template <typename ScopeCreator>
+bool
+EmitterScope::internBodyScope(BytecodeEmitter* bce, ScopeCreator createScope)
+{
+ MOZ_ASSERT(bce->bodyScopeIndex == UINT32_MAX, "There can be only one body scope");
+ bce->bodyScopeIndex = bce->scopeList.length();
+ return internScope(bce, createScope);
+}
+
+bool
+EmitterScope::appendScopeNote(BytecodeEmitter* bce)
+{
+ MOZ_ASSERT(ScopeKindIsInBody(scope(bce)->kind()) && enclosingInFrame(),
+ "Scope notes are not needed for body-level scopes.");
+ noteIndex_ = bce->scopeNoteList.length();
+ return bce->scopeNoteList.append(index(), bce->offset(), bce->inPrologue(),
+ enclosingInFrame() ? enclosingInFrame()->noteIndex()
+ : ScopeNote::NoScopeNoteIndex);
+}
+
+bool
+EmitterScope::deadZoneFrameSlotRange(BytecodeEmitter* bce, uint32_t slotStart, uint32_t slotEnd)
+{
+ // Lexical bindings throw ReferenceErrors if they are used before
+ // initialization. See ES6 8.1.1.1.6.
+ //
+ // For completeness, lexical bindings are initialized in ES6 by calling
+ // InitializeBinding, after which touching the binding will no longer
+ // throw reference errors. See 13.1.11, 9.2.13, 13.6.3.4, 13.6.4.6,
+ // 13.6.4.8, 13.14.5, 15.1.8, and 15.2.0.15.
+ if (slotStart != slotEnd) {
+ if (!bce->emit1(JSOP_UNINITIALIZED))
+ return false;
+ for (uint32_t slot = slotStart; slot < slotEnd; slot++) {
+ if (!bce->emitLocalOp(JSOP_INITLEXICAL, slot))
+ return false;
+ }
+ if (!bce->emit1(JSOP_POP))
+ return false;
+ }
+
+ return true;
+}
+
+void
+EmitterScope::dump(BytecodeEmitter* bce)
+{
+ fprintf(stdout, "EmitterScope [%s] %p\n", ScopeKindString(scope(bce)->kind()), this);
+
+ for (NameLocationMap::Range r = nameCache_->all(); !r.empty(); r.popFront()) {
+ const NameLocation& l = r.front().value();
+
+ JSAutoByteString bytes;
+ if (!AtomToPrintableString(bce->cx, r.front().key(), &bytes))
+ return;
+ if (l.kind() != NameLocation::Kind::Dynamic)
+ fprintf(stdout, " %s %s ", BindingKindString(l.bindingKind()), bytes.ptr());
+ else
+ fprintf(stdout, " %s ", bytes.ptr());
+
+ switch (l.kind()) {
+ case NameLocation::Kind::Dynamic:
+ fprintf(stdout, "dynamic\n");
+ break;
+ case NameLocation::Kind::Global:
+ fprintf(stdout, "global\n");
+ break;
+ case NameLocation::Kind::Intrinsic:
+ fprintf(stdout, "intrinsic\n");
+ break;
+ case NameLocation::Kind::NamedLambdaCallee:
+ fprintf(stdout, "named lambda callee\n");
+ break;
+ case NameLocation::Kind::Import:
+ fprintf(stdout, "import\n");
+ break;
+ case NameLocation::Kind::ArgumentSlot:
+ fprintf(stdout, "arg slot=%u\n", l.argumentSlot());
+ break;
+ case NameLocation::Kind::FrameSlot:
+ fprintf(stdout, "frame slot=%u\n", l.frameSlot());
+ break;
+ case NameLocation::Kind::EnvironmentCoordinate:
+ fprintf(stdout, "environment hops=%u slot=%u\n",
+ l.environmentCoordinate().hops(), l.environmentCoordinate().slot());
+ break;
+ case NameLocation::Kind::DynamicAnnexBVar:
+ fprintf(stdout, "dynamic annex b var\n");
+ break;
+ }
+ }
+
+ fprintf(stdout, "\n");
+}
+
+bool
+EmitterScope::enterLexical(BytecodeEmitter* bce, ScopeKind kind,
+ Handle<LexicalScope::Data*> bindings)
+{
+ MOZ_ASSERT(kind != ScopeKind::NamedLambda && kind != ScopeKind::StrictNamedLambda);
+ MOZ_ASSERT(this == bce->innermostEmitterScopeNoCheck());
+
+ if (!ensureCache(bce))
+ return false;
+
+ // Marks all names as closed over if the the context requires it. This
+ // cannot be done in the Parser as we may not know if the context requires
+ // all bindings to be closed over until after parsing is finished. For
+ // example, legacy generators require all bindings to be closed over but
+ // it is unknown if a function is a legacy generator until the first
+ // 'yield' expression is parsed.
+ //
+ // This is not a problem with other scopes, as all other scopes with
+ // bindings are body-level. At the time of their creation, whether or not
+ // the context requires all bindings to be closed over is already known.
+ if (bce->sc->allBindingsClosedOver())
+ MarkAllBindingsClosedOver(*bindings);
+
+ // Resolve bindings.
+ TDZCheckCache* tdzCache = bce->innermostTDZCheckCache;
+ uint32_t firstFrameSlot = frameSlotStart();
+ BindingIter bi(*bindings, firstFrameSlot, /* isNamedLambda = */ false);
+ for (; bi; bi++) {
+ if (!checkSlotLimits(bce, bi))
+ return false;
+
+ NameLocation loc = NameLocation::fromBinding(bi.kind(), bi.location());
+ if (!putNameInCache(bce, bi.name(), loc))
+ return false;
+
+ if (!tdzCache->noteTDZCheck(bce, bi.name(), CheckTDZ))
+ return false;
+ }
+
+ updateFrameFixedSlots(bce, bi);
+
+ // Create and intern the VM scope.
+ auto createScope = [kind, bindings, firstFrameSlot](ExclusiveContext* cx,
+ HandleScope enclosing)
+ {
+ return LexicalScope::create(cx, kind, bindings, firstFrameSlot, enclosing);
+ };
+ if (!internScope(bce, createScope))
+ return false;
+
+ if (ScopeKindIsInBody(kind) && hasEnvironment()) {
+ // After interning the VM scope we can get the scope index.
+ if (!bce->emitInternedScopeOp(index(), JSOP_PUSHLEXICALENV))
+ return false;
+ }
+
+ // Lexical scopes need notes to be mapped from a pc.
+ if (!appendScopeNote(bce))
+ return false;
+
+ // Put frame slots in TDZ. Environment slots are poisoned during
+ // environment creation.
+ //
+ // This must be done after appendScopeNote to be considered in the extent
+ // of the scope.
+ if (!deadZoneFrameSlotRange(bce, firstFrameSlot, frameSlotEnd()))
+ return false;
+
+ return checkEnvironmentChainLength(bce);
+}
+bool
+EmitterScope::enterNamedLambda(BytecodeEmitter* bce, FunctionBox* funbox)
+{
+ MOZ_ASSERT(this == bce->innermostEmitterScopeNoCheck());
+ MOZ_ASSERT(funbox->namedLambdaBindings());
+
+ if (!ensureCache(bce))
+ return false;
+
+ // See comment in enterLexical about allBindingsClosedOver.
+ if (funbox->allBindingsClosedOver())
+ MarkAllBindingsClosedOver(*funbox->namedLambdaBindings());
+
+ BindingIter bi(*funbox->namedLambdaBindings(), LOCALNO_LIMIT, /* isNamedLambda = */ true);
+ MOZ_ASSERT(bi.kind() == BindingKind::NamedLambdaCallee);
+
+ // The lambda name, if not closed over, is accessed via JSOP_CALLEE and
+ // not a frame slot. Do not update frame slot information.
+ NameLocation loc = NameLocation::fromBinding(bi.kind(), bi.location());
+ if (!putNameInCache(bce, bi.name(), loc))
+ return false;
+
+ bi++;
+ MOZ_ASSERT(!bi, "There should be exactly one binding in a NamedLambda scope");
+
+ auto createScope = [funbox](ExclusiveContext* cx, HandleScope enclosing) {
+ ScopeKind scopeKind =
+ funbox->strict() ? ScopeKind::StrictNamedLambda : ScopeKind::NamedLambda;
+ return LexicalScope::create(cx, scopeKind, funbox->namedLambdaBindings(),
+ LOCALNO_LIMIT, enclosing);
+ };
+ if (!internScope(bce, createScope))
+ return false;
+
+ return checkEnvironmentChainLength(bce);
+}
+
+bool
+EmitterScope::enterComprehensionFor(BytecodeEmitter* bce,
+ Handle<LexicalScope::Data*> bindings)
+{
+ if (!enterLexical(bce, ScopeKind::Lexical, bindings))
+ return false;
+
+ // For comprehensions, initialize all lexical names up front to undefined
+ // because they're now a dead feature and don't interact properly with
+ // TDZ.
+ auto nop = [](BytecodeEmitter*, const NameLocation&, bool) {
+ return true;
+ };
+
+ if (!bce->emit1(JSOP_UNDEFINED))
+ return false;
+
+ RootedAtom name(bce->cx);
+ for (BindingIter bi(*bindings, frameSlotStart(), /* isNamedLambda = */ false); bi; bi++) {
+ name = bi.name();
+ NameOpEmitter noe(bce, name, NameOpEmitter::Kind::Initialize);
+ if (!noe.prepareForRhs())
+ return false;
+ if (!noe.emitAssignment())
+ return false;
+ }
+
+ if (!bce->emit1(JSOP_POP))
+ return false;
+
+ return true;
+}
+
+bool
+EmitterScope::enterFunction(BytecodeEmitter* bce, FunctionBox* funbox)
+{
+ MOZ_ASSERT(this == bce->innermostEmitterScopeNoCheck());
+
+ // If there are parameter expressions, there is an extra var scope.
+ if (!funbox->hasExtraBodyVarScope())
+ bce->setVarEmitterScope(this);
+
+ if (!ensureCache(bce))
+ return false;
+
+ // Resolve body-level bindings, if there are any.
+ auto bindings = funbox->functionScopeBindings();
+ Maybe<uint32_t> lastLexicalSlot;
+ if (bindings) {
+ NameLocationMap& cache = *nameCache_;
+
+ BindingIter bi(*bindings, funbox->hasParameterExprs);
+ for (; bi; bi++) {
+ if (!checkSlotLimits(bce, bi))
+ return false;
+
+ NameLocation loc = NameLocation::fromBinding(bi.kind(), bi.location());
+ NameLocationMap::AddPtr p = cache.lookupForAdd(bi.name());
+
+ // The only duplicate bindings that occur are simple formal
+ // parameters, in which case the last position counts, so update the
+ // location.
+ if (p) {
+ MOZ_ASSERT(bi.kind() == BindingKind::FormalParameter);
+ MOZ_ASSERT(!funbox->hasDestructuringArgs);
+ MOZ_ASSERT(!funbox->hasRest());
+ p->value() = loc;
+ continue;
+ }
+
+ if (!cache.add(p, bi.name(), loc)) {
+ ReportOutOfMemory(bce->cx);
+ return false;
+ }
+ }
+
+ updateFrameFixedSlots(bce, bi);
+ } else {
+ nextFrameSlot_ = 0;
+ }
+
+ // If the function's scope may be extended at runtime due to sloppy direct
+ // eval and there is no extra var scope, any names beyond the function
+ // scope must be accessed dynamically as we don't know if the name will
+ // become a 'var' binding due to direct eval.
+ if (!funbox->hasParameterExprs && funbox->hasExtensibleScope())
+ fallbackFreeNameLocation_ = Some(NameLocation::Dynamic());
+
+ // In case of parameter expressions, the parameters are lexical
+ // bindings and have TDZ.
+ if (funbox->hasParameterExprs && nextFrameSlot_) {
+ uint32_t paramFrameSlotEnd = 0;
+ for (BindingIter bi(*bindings, true); bi; bi++) {
+ if (!BindingKindIsLexical(bi.kind()))
+ break;
+
+ NameLocation loc = NameLocation::fromBinding(bi.kind(), bi.location());
+ if (loc.kind() == NameLocation::Kind::FrameSlot) {
+ MOZ_ASSERT(paramFrameSlotEnd <= loc.frameSlot());
+ paramFrameSlotEnd = loc.frameSlot() + 1;
+ }
+ }
+
+ if (!deadZoneFrameSlotRange(bce, 0, paramFrameSlotEnd))
+ return false;
+ }
+
+ // Create and intern the VM scope.
+ auto createScope = [funbox](ExclusiveContext* cx, HandleScope enclosing) {
+ RootedFunction fun(cx, funbox->function());
+ return FunctionScope::create(cx, funbox->functionScopeBindings(),
+ funbox->hasParameterExprs,
+ funbox->needsCallObjectRegardlessOfBindings(),
+ fun, enclosing);
+ };
+ if (!internBodyScope(bce, createScope))
+ return false;
+
+ return checkEnvironmentChainLength(bce);
+}
+
+bool
+EmitterScope::enterFunctionExtraBodyVar(BytecodeEmitter* bce, FunctionBox* funbox)
+{
+ MOZ_ASSERT(funbox->hasParameterExprs);
+ MOZ_ASSERT(funbox->extraVarScopeBindings() ||
+ funbox->needsExtraBodyVarEnvironmentRegardlessOfBindings());
+ MOZ_ASSERT(this == bce->innermostEmitterScopeNoCheck());
+
+ // The extra var scope is never popped once it's entered. It replaces the
+ // function scope as the var emitter scope.
+ bce->setVarEmitterScope(this);
+
+ if (!ensureCache(bce))
+ return false;
+
+ // Resolve body-level bindings, if there are any.
+ uint32_t firstFrameSlot = frameSlotStart();
+ if (auto bindings = funbox->extraVarScopeBindings()) {
+ BindingIter bi(*bindings, firstFrameSlot);
+ for (; bi; bi++) {
+ if (!checkSlotLimits(bce, bi))
+ return false;
+
+ NameLocation loc = NameLocation::fromBinding(bi.kind(), bi.location());
+ if (!putNameInCache(bce, bi.name(), loc))
+ return false;
+ }
+
+ updateFrameFixedSlots(bce, bi);
+ } else {
+ nextFrameSlot_ = firstFrameSlot;
+ }
+
+ // If the extra var scope may be extended at runtime due to sloppy
+ // direct eval, any names beyond the var scope must be accessed
+ // dynamically as we don't know if the name will become a 'var' binding
+ // due to direct eval.
+ if (funbox->hasExtensibleScope())
+ fallbackFreeNameLocation_ = Some(NameLocation::Dynamic());
+
+ // Create and intern the VM scope.
+ auto createScope = [funbox, firstFrameSlot](ExclusiveContext* cx, HandleScope enclosing) {
+ return VarScope::create(cx, ScopeKind::FunctionBodyVar,
+ funbox->extraVarScopeBindings(), firstFrameSlot,
+ funbox->needsExtraBodyVarEnvironmentRegardlessOfBindings(),
+ enclosing);
+ };
+ if (!internScope(bce, createScope))
+ return false;
+
+ if (hasEnvironment()) {
+ if (!bce->emitInternedScopeOp(index(), JSOP_PUSHVARENV))
+ return false;
+ }
+
+ // The extra var scope needs a note to be mapped from a pc.
+ if (!appendScopeNote(bce))
+ return false;
+
+ return checkEnvironmentChainLength(bce);
+}
+
+bool
+EmitterScope::enterParameterExpressionVar(BytecodeEmitter* bce)
+{
+ MOZ_ASSERT(this == bce->innermostEmitterScopeNoCheck());
+
+ if (!ensureCache(bce))
+ return false;
+
+ // Parameter expressions var scopes have no pre-set bindings and are
+ // always extensible, as they are needed for eval.
+ fallbackFreeNameLocation_ = Some(NameLocation::Dynamic());
+
+ // Create and intern the VM scope.
+ uint32_t firstFrameSlot = frameSlotStart();
+ auto createScope = [firstFrameSlot](ExclusiveContext* cx, HandleScope enclosing) {
+ return VarScope::create(cx, ScopeKind::ParameterExpressionVar,
+ /* data = */ nullptr, firstFrameSlot,
+ /* needsEnvironment = */ true, enclosing);
+ };
+ if (!internScope(bce, createScope))
+ return false;
+
+ MOZ_ASSERT(hasEnvironment());
+ if (!bce->emitInternedScopeOp(index(), JSOP_PUSHVARENV))
+ return false;
+
+ // The extra var scope needs a note to be mapped from a pc.
+ if (!appendScopeNote(bce))
+ return false;
+
+ return checkEnvironmentChainLength(bce);
+}
+
+class DynamicBindingIter : public BindingIter
+{
+ public:
+ explicit DynamicBindingIter(GlobalSharedContext* sc)
+ : BindingIter(*sc->bindings)
+ { }
+
+ explicit DynamicBindingIter(EvalSharedContext* sc)
+ : BindingIter(*sc->bindings, /* strict = */ false)
+ {
+ MOZ_ASSERT(!sc->strict());
+ }
+
+ JSOp bindingOp() const {
+ switch (kind()) {
+ case BindingKind::Var:
+ return JSOP_DEFVAR;
+ case BindingKind::Let:
+ return JSOP_DEFLET;
+ case BindingKind::Const:
+ return JSOP_DEFCONST;
+ default:
+ MOZ_CRASH("Bad BindingKind");
+ }
+ }
+};
+
+bool
+EmitterScope::enterGlobal(BytecodeEmitter* bce, GlobalSharedContext* globalsc)
+{
+ MOZ_ASSERT(this == bce->innermostEmitterScopeNoCheck());
+
+ bce->setVarEmitterScope(this);
+
+ if (!ensureCache(bce))
+ return false;
+
+ if (bce->emitterMode == BytecodeEmitter::SelfHosting) {
+ // In self-hosting, it is incorrect to consult the global scope because
+ // self-hosted scripts are cloned into their target compartments before
+ // they are run. Instead of Global, Intrinsic is used for all names.
+ //
+ // Intrinsic lookups are redirected to the special intrinsics holder
+ // in the global object, into which any missing values are cloned
+ // lazily upon first access.
+ fallbackFreeNameLocation_ = Some(NameLocation::Intrinsic());
+
+ auto createScope = [](ExclusiveContext* cx, HandleScope enclosing) {
+ MOZ_ASSERT(!enclosing);
+ return &cx->global()->emptyGlobalScope();
+ };
+ return internBodyScope(bce, createScope);
+ }
+
+ // Resolve binding names and emit DEF{VAR,LET,CONST} prologue ops.
+ if (globalsc->bindings) {
+ for (DynamicBindingIter bi(globalsc); bi; bi++) {
+ NameLocation loc = NameLocation::fromBinding(bi.kind(), bi.location());
+ JSAtom* name = bi.name();
+ if (!putNameInCache(bce, name, loc))
+ return false;
+
+ // Define the name in the prologue. Do not emit DEFVAR for
+ // functions that we'll emit DEFFUN for.
+ if (bi.isTopLevelFunction())
+ continue;
+
+ if (!bce->emitAtomOp(name, bi.bindingOp()))
+ return false;
+ }
+ }
+
+ // Note that to save space, we don't add free names to the cache for
+ // global scopes. They are assumed to be global vars in the syntactic
+ // global scope, dynamic accesses under non-syntactic global scope.
+ if (globalsc->scopeKind() == ScopeKind::Global)
+ fallbackFreeNameLocation_ = Some(NameLocation::Global(BindingKind::Var));
+ else
+ fallbackFreeNameLocation_ = Some(NameLocation::Dynamic());
+
+ auto createScope = [globalsc](ExclusiveContext* cx, HandleScope enclosing) {
+ MOZ_ASSERT(!enclosing);
+ return GlobalScope::create(cx, globalsc->scopeKind(), globalsc->bindings);
+ };
+ return internBodyScope(bce, createScope);
+}
+
+bool
+EmitterScope::enterEval(BytecodeEmitter* bce, EvalSharedContext* evalsc)
+{
+ MOZ_ASSERT(this == bce->innermostEmitterScopeNoCheck());
+
+ bce->setVarEmitterScope(this);
+
+ if (!ensureCache(bce))
+ return false;
+
+ // For simplicity, treat all free name lookups in eval scripts as dynamic.
+ fallbackFreeNameLocation_ = Some(NameLocation::Dynamic());
+
+ // Create the `var` scope. Note that there is also a lexical scope, created
+ // separately in emitScript().
+ auto createScope = [evalsc](ExclusiveContext* cx, HandleScope enclosing) {
+ ScopeKind scopeKind = evalsc->strict() ? ScopeKind::StrictEval : ScopeKind::Eval;
+ return EvalScope::create(cx, scopeKind, evalsc->bindings, enclosing);
+ };
+ if (!internBodyScope(bce, createScope))
+ return false;
+
+ if (hasEnvironment()) {
+ if (!bce->emitInternedScopeOp(index(), JSOP_PUSHVARENV))
+ return false;
+ } else {
+ // Resolve binding names and emit DEFVAR prologue ops if we don't have
+ // an environment (i.e., a sloppy eval not in a parameter expression).
+ // Eval scripts always have their own lexical scope, but non-strict
+ // scopes may introduce 'var' bindings to the nearest var scope.
+ //
+ // TODO: We may optimize strict eval bindings in the future to be on
+ // the frame. For now, handle everything dynamically.
+ if (!hasEnvironment() && evalsc->bindings) {
+ for (DynamicBindingIter bi(evalsc); bi; bi++) {
+ MOZ_ASSERT(bi.bindingOp() == JSOP_DEFVAR);
+
+ if (bi.isTopLevelFunction())
+ continue;
+
+ if (!bce->emitAtomOp(bi.name(), JSOP_DEFVAR))
+ return false;
+ }
+ }
+
+ // As an optimization, if the eval does not have its own var
+ // environment and is directly enclosed in a global scope, then all
+ // free name lookups are global.
+ if (scope(bce)->enclosing()->is<GlobalScope>())
+ fallbackFreeNameLocation_ = Some(NameLocation::Global(BindingKind::Var));
+ }
+
+ return true;
+}
+
+bool
+EmitterScope::enterModule(BytecodeEmitter* bce, ModuleSharedContext* modulesc)
+{
+ MOZ_ASSERT(this == bce->innermostEmitterScopeNoCheck());
+
+ bce->setVarEmitterScope(this);
+
+ if (!ensureCache(bce))
+ return false;
+
+ // Resolve body-level bindings, if there are any.
+ TDZCheckCache* tdzCache = bce->innermostTDZCheckCache;
+ Maybe<uint32_t> firstLexicalFrameSlot;
+ if (ModuleScope::Data* bindings = modulesc->bindings) {
+ BindingIter bi(*bindings);
+ for (; bi; bi++) {
+ if (!checkSlotLimits(bce, bi))
+ return false;
+
+ NameLocation loc = NameLocation::fromBinding(bi.kind(), bi.location());
+ if (!putNameInCache(bce, bi.name(), loc))
+ return false;
+
+ if (BindingKindIsLexical(bi.kind())) {
+ if (loc.kind() == NameLocation::Kind::FrameSlot && !firstLexicalFrameSlot)
+ firstLexicalFrameSlot = Some(loc.frameSlot());
+
+ if (!tdzCache->noteTDZCheck(bce, bi.name(), CheckTDZ))
+ return false;
+ }
+ }
+
+ updateFrameFixedSlots(bce, bi);
+ } else {
+ nextFrameSlot_ = 0;
+ }
+
+ // Modules are toplevel, so any free names are global.
+ fallbackFreeNameLocation_ = Some(NameLocation::Global(BindingKind::Var));
+
+ // Put lexical frame slots in TDZ. Environment slots are poisoned during
+ // environment creation.
+ if (firstLexicalFrameSlot) {
+ if (!deadZoneFrameSlotRange(bce, *firstLexicalFrameSlot, frameSlotEnd()))
+ return false;
+ }
+
+ // Create and intern the VM scope.
+ auto createScope = [modulesc](ExclusiveContext* cx, HandleScope enclosing) {
+ return ModuleScope::create(cx, modulesc->bindings, modulesc->module(), enclosing);
+ };
+ if (!internBodyScope(bce, createScope))
+ return false;
+
+ return checkEnvironmentChainLength(bce);
+}
+
+bool
+EmitterScope::enterWith(BytecodeEmitter* bce)
+{
+ MOZ_ASSERT(this == bce->innermostEmitterScopeNoCheck());
+
+ if (!ensureCache(bce))
+ return false;
+
+ // 'with' make all accesses dynamic and unanalyzable.
+ fallbackFreeNameLocation_ = Some(NameLocation::Dynamic());
+
+ auto createScope = [](ExclusiveContext* cx, HandleScope enclosing) {
+ return WithScope::create(cx, enclosing);
+ };
+ if (!internScope(bce, createScope))
+ return false;
+
+ if (!bce->emitInternedScopeOp(index(), JSOP_ENTERWITH))
+ return false;
+
+ if (!appendScopeNote(bce))
+ return false;
+
+ return checkEnvironmentChainLength(bce);
+}
+
+bool
+EmitterScope::deadZoneFrameSlots(BytecodeEmitter* bce)
+{
+ return deadZoneFrameSlotRange(bce, frameSlotStart(), frameSlotEnd());
+}
+
+bool
+EmitterScope::leave(BytecodeEmitter* bce, bool nonLocal)
+{
+ // If we aren't leaving the scope due to a non-local jump (e.g., break),
+ // we must be the innermost scope.
+ MOZ_ASSERT_IF(!nonLocal, this == bce->innermostEmitterScopeNoCheck());
+
+ ScopeKind kind = scope(bce)->kind();
+ switch (kind) {
+ case ScopeKind::Lexical:
+ case ScopeKind::SimpleCatch:
+ case ScopeKind::Catch:
+ if (!bce->emit1(hasEnvironment() ? JSOP_POPLEXICALENV : JSOP_DEBUGLEAVELEXICALENV))
+ return false;
+ break;
+
+ case ScopeKind::With:
+ if (!bce->emit1(JSOP_LEAVEWITH))
+ return false;
+ break;
+
+ case ScopeKind::ParameterExpressionVar:
+ MOZ_ASSERT(hasEnvironment());
+ if (!bce->emit1(JSOP_POPVARENV))
+ return false;
+ break;
+
+ case ScopeKind::Function:
+ case ScopeKind::FunctionBodyVar:
+ case ScopeKind::NamedLambda:
+ case ScopeKind::StrictNamedLambda:
+ case ScopeKind::Eval:
+ case ScopeKind::StrictEval:
+ case ScopeKind::Global:
+ case ScopeKind::NonSyntactic:
+ case ScopeKind::Module:
+ break;
+ }
+
+ // Finish up the scope if we are leaving it in LIFO fashion.
+ if (!nonLocal) {
+ // Popping scopes due to non-local jumps generate additional scope
+ // notes. See NonLocalExitControl::prepareForNonLocalJump.
+ if (ScopeKindIsInBody(kind)) {
+ // The extra function var scope is never popped once it's pushed,
+ // so its scope note extends until the end of any possible code.
+ uint32_t offset = kind == ScopeKind::FunctionBodyVar ? UINT32_MAX : bce->offset();
+ bce->scopeNoteList.recordEnd(noteIndex_, offset, bce->inPrologue());
+ }
+ }
+
+ return true;
+}
+
+Scope*
+EmitterScope::scope(const BytecodeEmitter* bce) const
+{
+ return bce->scopeList.vector[index()];
+}
+
+NameLocation
+EmitterScope::lookup(BytecodeEmitter* bce, JSAtom* name)
+{
+ if (Maybe<NameLocation> loc = lookupInCache(bce, name))
+ return *loc;
+ return searchAndCache(bce, name);
+}
+
+Maybe<NameLocation>
+EmitterScope::locationBoundInScope(JSAtom* name, EmitterScope* target)
+{
+ // The target scope must be an intra-frame enclosing scope of this
+ // one. Count the number of extra hops to reach it.
+ uint8_t extraHops = 0;
+ for (EmitterScope* es = this; es != target; es = es->enclosingInFrame()) {
+ if (es->hasEnvironment())
+ extraHops++;
+ }
+
+ // Caches are prepopulated with bound names. So if the name is bound in a
+ // particular scope, it must already be in the cache. Furthermore, don't
+ // consult the fallback location as we only care about binding names.
+ Maybe<NameLocation> loc;
+ if (NameLocationMap::Ptr p = target->nameCache_->lookup(name)) {
+ NameLocation l = p->value().wrapped;
+ if (l.kind() == NameLocation::Kind::EnvironmentCoordinate)
+ loc = Some(l.addHops(extraHops));
+ else
+ loc = Some(l);
+ }
+ return loc;
+}
diff --git a/js/src/frontend/EmitterScope.h b/js/src/frontend/EmitterScope.h
new file mode 100644
index 0000000000..cfee86a214
--- /dev/null
+++ b/js/src/frontend/EmitterScope.h
@@ -0,0 +1,155 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sts=4 et sw=4 tw=99:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef frontend_EmitterScope_h
+#define frontend_EmitterScope_h
+
+#include "mozilla/Attributes.h"
+#include "mozilla/Maybe.h"
+
+#include <stdint.h>
+
+#include "ds/Nestable.h"
+#include "frontend/NameAnalysisTypes.h"
+#include "frontend/NameCollections.h"
+#include "frontend/SharedContext.h"
+#include "jstypes.h"
+#include "vm/Scope.h"
+
+namespace js {
+namespace frontend {
+
+// A scope that introduces bindings.
+class EmitterScope : public Nestable<EmitterScope>
+{
+ // The cache of bound names that may be looked up in the
+ // scope. Initially populated as the set of names this scope binds. As
+ // names are looked up in enclosing scopes, they are cached on the
+ // current scope.
+ PooledMapPtr<NameLocationMap> nameCache_;
+
+ // If this scope's cache does not include free names, such as the
+ // global scope, the NameLocation to return.
+ mozilla::Maybe<NameLocation> fallbackFreeNameLocation_;
+
+ // True if there is a corresponding EnvironmentObject on the environment
+ // chain, false if all bindings are stored in frame slots on the stack.
+ bool hasEnvironment_;
+
+ // The number of enclosing environments. Used for error checking.
+ uint8_t environmentChainLength_;
+
+ // The next usable slot on the frame for not-closed over bindings.
+ //
+ // The initial frame slot when assigning slots to bindings is the
+ // enclosing scope's nextFrameSlot. For the first scope in a frame,
+ // the initial frame slot is 0.
+ uint32_t nextFrameSlot_;
+
+ // The index in the BytecodeEmitter's interned scope vector, otherwise
+ // ScopeNote::NoScopeIndex.
+ uint32_t scopeIndex_;
+
+ // If kind is Lexical, Catch, or With, the index in the BytecodeEmitter's
+ // block scope note list. Otherwise ScopeNote::NoScopeNote.
+ uint32_t noteIndex_;
+
+ MOZ_MUST_USE bool ensureCache(BytecodeEmitter* bce);
+
+ template <typename BindingIter>
+ MOZ_MUST_USE bool checkSlotLimits(BytecodeEmitter* bce, const BindingIter& bi);
+
+ MOZ_MUST_USE bool checkEnvironmentChainLength(BytecodeEmitter* bce);
+
+ void updateFrameFixedSlots(BytecodeEmitter* bce, const BindingIter& bi);
+
+ MOZ_MUST_USE bool putNameInCache(BytecodeEmitter* bce, JSAtom* name, NameLocation loc);
+
+ mozilla::Maybe<NameLocation> lookupInCache(BytecodeEmitter* bce, JSAtom* name);
+
+ EmitterScope* enclosing(BytecodeEmitter** bce) const;
+
+ Scope* enclosingScope(BytecodeEmitter* bce) const;
+
+ static bool nameCanBeFree(BytecodeEmitter* bce, JSAtom* name);
+
+ static NameLocation searchInEnclosingScope(JSAtom* name, Scope* scope, uint8_t hops);
+ NameLocation searchAndCache(BytecodeEmitter* bce, JSAtom* name);
+
+ template <typename ScopeCreator>
+ MOZ_MUST_USE bool internScope(BytecodeEmitter* bce, ScopeCreator createScope);
+ template <typename ScopeCreator>
+ MOZ_MUST_USE bool internBodyScope(BytecodeEmitter* bce, ScopeCreator createScope);
+ MOZ_MUST_USE bool appendScopeNote(BytecodeEmitter* bce);
+
+ MOZ_MUST_USE bool deadZoneFrameSlotRange(BytecodeEmitter* bce, uint32_t slotStart,
+ uint32_t slotEnd);
+
+ public:
+ explicit EmitterScope(BytecodeEmitter* bce);
+
+ void dump(BytecodeEmitter* bce);
+
+ MOZ_MUST_USE bool enterLexical(BytecodeEmitter* bce, ScopeKind kind,
+ Handle<LexicalScope::Data*> bindings);
+ MOZ_MUST_USE bool enterNamedLambda(BytecodeEmitter* bce, FunctionBox* funbox);
+ MOZ_MUST_USE bool enterComprehensionFor(BytecodeEmitter* bce,
+ Handle<LexicalScope::Data*> bindings);
+ MOZ_MUST_USE bool enterFunction(BytecodeEmitter* bce, FunctionBox* funbox);
+ MOZ_MUST_USE bool enterFunctionExtraBodyVar(BytecodeEmitter* bce, FunctionBox* funbox);
+ MOZ_MUST_USE bool enterParameterExpressionVar(BytecodeEmitter* bce);
+ MOZ_MUST_USE bool enterGlobal(BytecodeEmitter* bce, GlobalSharedContext* globalsc);
+ MOZ_MUST_USE bool enterEval(BytecodeEmitter* bce, EvalSharedContext* evalsc);
+ MOZ_MUST_USE bool enterModule(BytecodeEmitter* module, ModuleSharedContext* modulesc);
+ MOZ_MUST_USE bool enterWith(BytecodeEmitter* bce);
+ MOZ_MUST_USE bool deadZoneFrameSlots(BytecodeEmitter* bce);
+
+ MOZ_MUST_USE bool leave(BytecodeEmitter* bce, bool nonLocal = false);
+
+ uint32_t index() const {
+ MOZ_ASSERT(scopeIndex_ != ScopeNote::NoScopeIndex, "Did you forget to intern a Scope?");
+ return scopeIndex_;
+ }
+
+ uint32_t noteIndex() const {
+ return noteIndex_;
+ }
+
+ Scope* scope(const BytecodeEmitter* bce) const;
+
+ bool hasEnvironment() const {
+ return hasEnvironment_;
+ }
+
+ // The first frame slot used.
+ uint32_t frameSlotStart() const {
+ if (EmitterScope* inFrame = enclosingInFrame())
+ return inFrame->nextFrameSlot_;
+ return 0;
+ }
+
+ // The last frame slot used + 1.
+ uint32_t frameSlotEnd() const {
+ return nextFrameSlot_;
+ }
+
+ uint32_t numFrameSlots() const {
+ return frameSlotEnd() - frameSlotStart();
+ }
+
+ EmitterScope* enclosingInFrame() const {
+ return Nestable<EmitterScope>::enclosing();
+ }
+
+ NameLocation lookup(BytecodeEmitter* bce, JSAtom* name);
+
+ mozilla::Maybe<NameLocation> locationBoundInScope(JSAtom* name, EmitterScope* target);
+};
+
+} /* namespace frontend */
+} /* namespace js */
+
+#endif /* frontend_EmitterScope_h */
diff --git a/js/src/frontend/ForOfLoopControl.cpp b/js/src/frontend/ForOfLoopControl.cpp
new file mode 100644
index 0000000000..9556a03bf2
--- /dev/null
+++ b/js/src/frontend/ForOfLoopControl.cpp
@@ -0,0 +1,167 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sts=4 et sw=4 tw=99:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "frontend/ForOfLoopControl.h"
+
+#include "frontend/BytecodeEmitter.h"
+#include "frontend/EmitterScope.h"
+#include "frontend/IfEmitter.h"
+
+using namespace js;
+using namespace js::frontend;
+
+ForOfLoopControl::ForOfLoopControl(BytecodeEmitter* bce, int32_t iterDepth, bool allowSelfHosted,
+ IteratorKind iterKind)
+ : LoopControl(bce, StatementKind::ForOfLoop),
+ iterDepth_(iterDepth),
+ numYieldsAtBeginCodeNeedingIterClose_(UINT32_MAX),
+ allowSelfHosted_(allowSelfHosted),
+ iterKind_(iterKind)
+{
+}
+
+bool
+ForOfLoopControl::emitBeginCodeNeedingIteratorClose(BytecodeEmitter* bce)
+{
+ tryCatch_.emplace(bce, TryEmitter::TryCatch, TryEmitter::DontUseRetVal,
+ TryEmitter::DontUseControl);
+
+ if (!tryCatch_->emitTry())
+ return false;
+
+ MOZ_ASSERT(numYieldsAtBeginCodeNeedingIterClose_ == UINT32_MAX);
+ numYieldsAtBeginCodeNeedingIterClose_ = bce->yieldAndAwaitOffsetList.numYields;
+
+ return true;
+}
+
+bool
+ForOfLoopControl::emitEndCodeNeedingIteratorClose(BytecodeEmitter* bce)
+{
+ if (!tryCatch_->emitCatch()) // ITER ...
+ return false;
+
+ if (!bce->emit1(JSOP_EXCEPTION)) // ITER ... EXCEPTION
+ return false;
+ unsigned slotFromTop = bce->stackDepth - iterDepth_;
+ if (!bce->emitDupAt(slotFromTop)) // ITER ... EXCEPTION ITER
+ return false;
+
+ // If ITER is undefined, it means the exception is thrown by
+ // IteratorClose for non-local jump, and we should't perform
+ // IteratorClose again here.
+ if (!bce->emit1(JSOP_UNDEFINED)) // ITER ... EXCEPTION ITER UNDEF
+ return false;
+ if (!bce->emit1(JSOP_STRICTNE)) // ITER ... EXCEPTION NE
+ return false;
+
+ InternalIfEmitter ifIteratorIsNotClosed(bce);
+ if (!ifIteratorIsNotClosed.emitThen()) // ITER ... EXCEPTION
+ return false;
+
+ MOZ_ASSERT(slotFromTop == unsigned(bce->stackDepth - iterDepth_));
+ if (!bce->emitDupAt(slotFromTop)) // ITER ... EXCEPTION ITER
+ return false;
+ if (!emitIteratorCloseInInnermostScope(bce, CompletionKind::Throw))
+ return false; // ITER ... EXCEPTION
+
+ if (!ifIteratorIsNotClosed.emitEnd()) // ITER ... EXCEPTION
+ return false;
+
+ if (!bce->emit1(JSOP_THROW)) // ITER ...
+ return false;
+
+ // If any yields were emitted, then this for-of loop is inside a star
+ // generator and must handle the case of Generator.return. Like in
+ // yield*, it is handled with a finally block.
+ uint32_t numYieldsEmitted = bce->yieldAndAwaitOffsetList.numYields;
+ if (numYieldsEmitted > numYieldsAtBeginCodeNeedingIterClose_) {
+ if (!tryCatch_->emitFinally())
+ return false;
+
+ InternalIfEmitter ifGeneratorClosing(bce);
+ if (!bce->emit1(JSOP_ISGENCLOSING)) // ITER ... FTYPE FVALUE CLOSING
+ return false;
+ if (!ifGeneratorClosing.emitThen()) // ITER ... FTYPE FVALUE
+ return false;
+ if (!bce->emitDupAt(slotFromTop + 1)) // ITER ... FTYPE FVALUE ITER
+ return false;
+ if (!emitIteratorCloseInInnermostScope(bce, CompletionKind::Normal))
+ return false; // ITER ... FTYPE FVALUE
+ if (!ifGeneratorClosing.emitEnd()) // ITER ... FTYPE FVALUE
+ return false;
+ }
+
+ if (!tryCatch_->emitEnd())
+ return false;
+
+ tryCatch_.reset();
+ numYieldsAtBeginCodeNeedingIterClose_ = UINT32_MAX;
+
+ return true;
+}
+
+bool
+ForOfLoopControl::emitIteratorCloseInInnermostScope(BytecodeEmitter* bce,
+ CompletionKind completionKind /* = CompletionKind::Normal */)
+{
+ return emitIteratorCloseInScope(bce, *bce->innermostEmitterScope(), completionKind);
+}
+
+bool
+ForOfLoopControl::emitIteratorCloseInScope(BytecodeEmitter* bce,
+ EmitterScope& currentScope,
+ CompletionKind completionKind /* = CompletionKind::Normal */)
+{
+ ptrdiff_t start = bce->offset();
+ if (!bce->emitIteratorCloseInScope(currentScope, iterKind_, completionKind,
+ allowSelfHosted_))
+ {
+ return false;
+ }
+ ptrdiff_t end = bce->offset();
+ return bce->tryNoteList.append(JSTRY_FOR_OF_ITERCLOSE, 0, start, end);
+}
+
+bool
+ForOfLoopControl::emitPrepareForNonLocalJumpFromScope(BytecodeEmitter* bce,
+ EmitterScope& currentScope,
+ bool isTarget)
+{
+ // Pop unnecessary values from the stack. Effectively this means
+ // leaving try-catch block. However, the performing IteratorClose can
+ // reach the depth for try-catch, and effectively re-enter the
+ // try-catch block.
+ if (!bce->emit1(JSOP_POP)) // ITER RESULT
+ return false;
+ if (!bce->emit1(JSOP_POP)) // ITER
+ return false;
+
+ // Clear ITER slot on the stack to tell catch block to avoid performing
+ // IteratorClose again.
+ if (!bce->emit1(JSOP_UNDEFINED)) // ITER UNDEF
+ return false;
+ if (!bce->emit1(JSOP_SWAP)) // UNDEF ITER
+ return false;
+
+ if (!emitIteratorCloseInScope(bce, currentScope, CompletionKind::Normal)) // UNDEF
+ return false;
+
+ if (isTarget) {
+ // At the level of the target block, there's bytecode after the
+ // loop that will pop the iterator and the value, so push
+ // undefineds to balance the stack.
+ if (!bce->emit1(JSOP_UNDEFINED)) // UNDEF UNDEF
+ return false;
+ if (!bce->emit1(JSOP_UNDEFINED)) // UNDEF UNDEF UNDEF
+ return false;
+ } else {
+ if (!bce->emit1(JSOP_POP)) //
+ return false;
+ }
+
+ return true;
+}
diff --git a/js/src/frontend/ForOfLoopControl.h b/js/src/frontend/ForOfLoopControl.h
new file mode 100644
index 0000000000..4ac9d4b2f8
--- /dev/null
+++ b/js/src/frontend/ForOfLoopControl.h
@@ -0,0 +1,99 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sts=4 et sw=4 tw=99:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef frontend_ForOfLoopControl_h
+#define frontend_ForOfLoopControl_h
+
+#include "mozilla/Attributes.h"
+#include "mozilla/Maybe.h"
+
+#include <stdint.h>
+
+#include "jsapi.h"
+
+#include "frontend/BytecodeControlStructures.h"
+#include "frontend/TryEmitter.h"
+#include "jsiter.h"
+
+namespace js {
+namespace frontend {
+
+struct BytecodeEmitter;
+class EmitterScope;
+
+class ForOfLoopControl : public LoopControl
+{
+ // The stack depth of the iterator.
+ int32_t iterDepth_;
+
+ // for-of loops, when throwing from non-iterator code (i.e. from the body
+ // or from evaluating the LHS of the loop condition), need to call
+ // IteratorClose. This is done by enclosing non-iterator code with
+ // try-catch and call IteratorClose in `catch` block.
+ // If IteratorClose itself throws, we must not re-call IteratorClose. Since
+ // non-local jumps like break and return call IteratorClose, whenever a
+ // non-local jump is emitted, we must tell catch block not to perform
+ // IteratorClose.
+ //
+ // for (x of y) {
+ // // Operations for iterator (IteratorNext etc) are outside of
+ // // try-block.
+ // try {
+ // ...
+ // if (...) {
+ // // Before non-local jump, clear iterator on the stack to tell
+ // // catch block not to perform IteratorClose.
+ // tmpIterator = iterator;
+ // iterator = undefined;
+ // IteratorClose(tmpIterator, { break });
+ // break;
+ // }
+ // ...
+ // } catch (e) {
+ // // Just throw again when iterator is cleared by non-local jump.
+ // if (iterator === undefined)
+ // throw e;
+ // IteratorClose(iterator, { throw, e });
+ // }
+ // }
+ mozilla::Maybe<TryEmitter> tryCatch_;
+
+ // Used to track if any yields were emitted between calls to to
+ // emitBeginCodeNeedingIteratorClose and emitEndCodeNeedingIteratorClose.
+ uint32_t numYieldsAtBeginCodeNeedingIterClose_;
+
+ bool allowSelfHosted_;
+
+ IteratorKind iterKind_;
+
+ public:
+ ForOfLoopControl(BytecodeEmitter* bce, int32_t iterDepth, bool allowSelfHosted,
+ IteratorKind iterKind);
+
+ MOZ_MUST_USE bool emitBeginCodeNeedingIteratorClose(BytecodeEmitter* bce);
+ MOZ_MUST_USE bool emitEndCodeNeedingIteratorClose(BytecodeEmitter* bce);
+
+ MOZ_MUST_USE bool emitIteratorCloseInInnermostScope(BytecodeEmitter* bce,
+ CompletionKind completionKind = CompletionKind::Normal);
+ MOZ_MUST_USE bool emitIteratorCloseInScope(BytecodeEmitter* bce,
+ EmitterScope& currentScope,
+ CompletionKind completionKind = CompletionKind::Normal);
+
+ MOZ_MUST_USE bool emitPrepareForNonLocalJumpFromScope(BytecodeEmitter* bce,
+ EmitterScope& currentScope,
+ bool isTarget);
+};
+template <>
+inline bool
+NestableControl::is<ForOfLoopControl>() const
+{
+ return kind_ == StatementKind::ForOfLoop;
+}
+
+} /* namespace frontend */
+} /* namespace js */
+
+#endif /* frontend_ForOfLoopControl_h */
diff --git a/js/src/frontend/ParseNode.h b/js/src/frontend/ParseNode.h
index bdba7a1ff9..2e5e29675e 100644
--- a/js/src/frontend/ParseNode.h
+++ b/js/src/frontend/ParseNode.h
@@ -254,16 +254,12 @@ IsTypeofKind(ParseNodeKind kind)
* or (if the push was optimized away) empty
* PNK_STATEMENTLIST.
* PNK_SWITCH binary pn_left: discriminant
- * pn_right: list of PNK_CASE nodes, with at most one
- * default node, or if there are let bindings
- * in the top level of the switch body's cases, a
- * PNK_LEXICALSCOPE node that contains the list of
- * PNK_CASE nodes.
+ * pn_right: PNK_LEXICALSCOPE node that contains the list
+ * of PNK_CASE nodes, with at most one default node.
* PNK_CASE binary pn_left: case-expression if CaseClause, or
* null if DefaultClause
* pn_right: PNK_STATEMENTLIST node for this case's
* statements
- * pn_u.binary.offset: scratch space for the emitter
* PNK_WHILE binary pn_left: cond, pn_right: body
* PNK_DOWHILE binary pn_left: body, pn_right: cond
* PNK_FOR binary pn_left: either PNK_FORIN (for-in statement),
@@ -587,7 +583,6 @@ class ParseNode
union {
unsigned iflags; /* JSITER_* flags for PNK_{COMPREHENSION,}FOR node */
bool isStatic; /* only for PNK_CLASSMETHOD */
- uint32_t offset; /* for the emitter's use on PNK_CASE nodes */
};
} binary;
struct { /* one kid if unary */
@@ -1069,10 +1064,6 @@ class CaseClause : public BinaryNode
// The next CaseClause in the same switch statement.
CaseClause* next() const { return pn_next ? &pn_next->as<CaseClause>() : nullptr; }
- // Scratch space used by the emitter.
- uint32_t offset() const { return pn_u.binary.offset; }
- void setOffset(uint32_t u) { pn_u.binary.offset = u; }
-
static bool test(const ParseNode& node) {
bool match = node.isKind(PNK_CASE);
MOZ_ASSERT_IF(match, node.isArity(PN_BINARY));
diff --git a/js/src/frontend/Parser.h b/js/src/frontend/Parser.h
index 1e9028cbee..54eb7f4168 100644
--- a/js/src/frontend/Parser.h
+++ b/js/src/frontend/Parser.h
@@ -14,6 +14,7 @@
#include "jsiter.h"
#include "jspubtd.h"
+#include "ds/Nestable.h"
#include "frontend/BytecodeCompiler.h"
#include "frontend/FullParseHandler.h"
#include "frontend/NameAnalysisTypes.h"
diff --git a/js/src/frontend/SharedContext.h b/js/src/frontend/SharedContext.h
index d9cdbcaf95..ab90953194 100644
--- a/js/src/frontend/SharedContext.h
+++ b/js/src/frontend/SharedContext.h
@@ -60,56 +60,6 @@ StatementKindIsUnlabeledBreakTarget(StatementKind kind)
return StatementKindIsLoop(kind) || kind == StatementKind::Switch;
}
-// A base class for nestable structures in the frontend, such as statements
-// and scopes.
-template <typename Concrete>
-class MOZ_STACK_CLASS Nestable
-{
- Concrete** stack_;
- Concrete* enclosing_;
-
- protected:
- explicit Nestable(Concrete** stack)
- : stack_(stack),
- enclosing_(*stack)
- {
- *stack_ = static_cast<Concrete*>(this);
- }
-
- // These method are protected. Some derived classes, such as ParseContext,
- // do not expose the ability to walk the stack.
- Concrete* enclosing() const {
- return enclosing_;
- }
-
- template <typename Predicate /* (Concrete*) -> bool */>
- static Concrete* findNearest(Concrete* it, Predicate predicate) {
- while (it && !predicate(it))
- it = it->enclosing();
- return it;
- }
-
- template <typename T>
- static T* findNearest(Concrete* it) {
- while (it && !it->template is<T>())
- it = it->enclosing();
- return it ? &it->template as<T>() : nullptr;
- }
-
- template <typename T, typename Predicate /* (T*) -> bool */>
- static T* findNearest(Concrete* it, Predicate predicate) {
- while (it && (!it->template is<T>() || !predicate(&it->template as<T>())))
- it = it->enclosing();
- return it ? &it->template as<T>() : nullptr;
- }
-
- public:
- ~Nestable() {
- MOZ_ASSERT(*stack_ == static_cast<Concrete*>(this));
- *stack_ = enclosing_;
- }
-};
-
// These flags apply to both global and function contexts.
class AnyContextFlags
{
diff --git a/js/src/frontend/SwitchEmitter.cpp b/js/src/frontend/SwitchEmitter.cpp
new file mode 100644
index 0000000000..c72120e2d3
--- /dev/null
+++ b/js/src/frontend/SwitchEmitter.cpp
@@ -0,0 +1,424 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sts=4 et sw=4 tw=99:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "frontend/SwitchEmitter.h"
+
+#include "jsutil.h"
+
+#include "frontend/BytecodeEmitter.h"
+#include "frontend/SharedContext.h"
+#include "frontend/SourceNotes.h"
+#include "vm/Opcodes.h"
+#include "vm/Runtime.h"
+
+using namespace js;
+using namespace js::frontend;
+
+using mozilla::Maybe;
+
+bool
+SwitchEmitter::TableGenerator::addNumber(int32_t caseValue)
+{
+ if (isInvalid())
+ return true;
+
+ if (unsigned(caseValue + int(JS_BIT(15))) >= unsigned(JS_BIT(16))) {
+ setInvalid();
+ return true;
+ }
+
+ if (intmap_.isNothing())
+ intmap_.emplace();
+
+ low_ = std::min(low_, caseValue);
+ high_ = std::max(high_, caseValue);
+
+ // Check for duplicates, which require a JSOP_CONDSWITCH.
+ // We bias caseValue by 65536 if it's negative, and hope that's a rare case
+ // (because it requires a malloc'd bitmap).
+ if (caseValue < 0)
+ caseValue += JS_BIT(16);
+ if (caseValue >= intmapBitLength_) {
+ size_t newLength = NumWordsForBitArrayOfLength(caseValue + 1);
+ if (!intmap_->resize(newLength)) {
+ ReportOutOfMemory(bce_->cx);
+ return false;
+ }
+ intmapBitLength_ = newLength * BitArrayElementBits;
+ }
+ if (IsBitArrayElementSet(intmap_->begin(), intmap_->length(), caseValue)) {
+ // Duplicate entry is not supported in table switch.
+ setInvalid();
+ return true;
+ }
+ SetBitArrayElement(intmap_->begin(), intmap_->length(), caseValue);
+ return true;
+}
+
+void
+SwitchEmitter::TableGenerator::finish(uint32_t caseCount)
+{
+ intmap_.reset();
+
+#ifdef DEBUG
+ finished_ = true;
+#endif
+
+ if (isInvalid())
+ return;
+
+ if (caseCount == 0) {
+ low_ = 0;
+ high_ = -1;
+ return;
+ }
+
+ // Compute table length and select condswitch instead if overlarge
+ // or more than half-sparse.
+ tableLength_ = uint32_t(high_ - low_ + 1);
+ if (tableLength_ >= JS_BIT(16) || tableLength_ > 2 * caseCount)
+ setInvalid();
+}
+
+uint32_t
+SwitchEmitter::TableGenerator::toCaseIndex(int32_t caseValue) const
+{
+ MOZ_ASSERT(finished_);
+ MOZ_ASSERT(isValid());
+ uint32_t caseIndex = uint32_t(caseValue - low_);
+ MOZ_ASSERT(caseIndex < tableLength_);
+ return caseIndex;
+}
+
+uint32_t
+SwitchEmitter::TableGenerator::tableLength() const
+{
+ MOZ_ASSERT(finished_);
+ MOZ_ASSERT(isValid());
+ return tableLength_;
+}
+
+SwitchEmitter::SwitchEmitter(BytecodeEmitter* bce)
+ : bce_(bce)
+{}
+
+bool
+SwitchEmitter::emitDiscriminant(const Maybe<uint32_t>& switchPos)
+{
+ MOZ_ASSERT(state_ == State::Start);
+ switchPos_ = switchPos;
+
+ if (switchPos_) {
+ // Ensure that the column of the switch statement is set properly.
+ if (!bce_->updateSourceCoordNotes(*switchPos_))
+ return false;
+ }
+
+ state_ = State::Discriminant;
+ return true;
+}
+
+bool
+SwitchEmitter::emitLexical(Handle<LexicalScope::Data*> bindings)
+{
+ MOZ_ASSERT(state_ == State::Discriminant);
+ MOZ_ASSERT(bindings);
+
+ tdzCacheLexical_.emplace(bce_);
+ emitterScope_.emplace(bce_);
+ if (!emitterScope_->enterLexical(bce_, ScopeKind::Lexical, bindings))
+ return false;
+
+ state_ = State::Lexical;
+ return true;
+}
+
+bool
+SwitchEmitter::validateCaseCount(uint32_t caseCount)
+{
+ MOZ_ASSERT(state_ == State::Discriminant || state_ == State::Lexical);
+ if (caseCount > JS_BIT(16)) {
+ bce_->reportError(switchPos_, JSMSG_TOO_MANY_CASES);
+ return false;
+ }
+ caseCount_ = caseCount;
+
+ state_ = State::CaseCount;
+ return true;
+}
+
+bool
+SwitchEmitter::emitCond()
+{
+ MOZ_ASSERT(state_ == State::CaseCount);
+
+ kind_ = Kind::Cond;
+
+ // After entering the scope if necessary, push the switch control.
+ controlInfo_.emplace(bce_, StatementKind::Switch);
+ top_ = bce_->offset();
+
+ if (!caseOffsets_.resize(caseCount_)) {
+ ReportOutOfMemory(bce_->cx);
+ return false;
+ }
+
+ // The note has two offsets: first tells total switch code length;
+ // second tells offset to first JSOP_CASE.
+ if (!bce_->newSrcNote3(SRC_CONDSWITCH, 0, 0, &noteIndex_))
+ return false;
+
+ MOZ_ASSERT(top_ == bce_->offset());
+ if (!bce_->emitN(JSOP_CONDSWITCH, 0))
+ return false;
+
+ tdzCacheCaseAndBody_.emplace(bce_);
+
+ state_ = State::Cond;
+ return true;
+}
+
+bool
+SwitchEmitter::emitTable(const TableGenerator& tableGen)
+{
+ MOZ_ASSERT(state_ == State::CaseCount);
+ kind_ = Kind::Table;
+
+ // After entering the scope if necessary, push the switch control.
+ controlInfo_.emplace(bce_, StatementKind::Switch);
+ top_ = bce_->offset();
+
+ // The note has one offset that tells total switch code length.
+
+ // 3 offsets (len, low, high) before the table, 1 per entry.
+ size_t switchSize = size_t(JUMP_OFFSET_LEN * (3 + tableGen.tableLength()));
+ if (!bce_->newSrcNote2(SRC_TABLESWITCH, 0, &noteIndex_))
+ return false;
+
+ if (!caseOffsets_.resize(tableGen.tableLength())) {
+ ReportOutOfMemory(bce_->cx);
+ return false;
+ }
+
+ MOZ_ASSERT(top_ == bce_->offset());
+ if (!bce_->emitN(JSOP_TABLESWITCH, switchSize))
+ return false;
+
+ // Skip default offset.
+ jsbytecode* pc = bce_->code(top_ + JUMP_OFFSET_LEN);
+
+ // Fill in switch bounds, which we know fit in 16-bit offsets.
+ SET_JUMP_OFFSET(pc, tableGen.low());
+ SET_JUMP_OFFSET(pc + JUMP_OFFSET_LEN, tableGen.high());
+
+ state_ = State::Table;
+ return true;
+}
+
+bool
+SwitchEmitter::emitCaseOrDefaultJump(uint32_t caseIndex, bool isDefault)
+{
+ MOZ_ASSERT(kind_ == Kind::Cond);
+
+ if (state_ == State::Case) {
+ // Link the last JSOP_CASE's SRC_NEXTCASE to current JSOP_CASE or
+ // JSOP_DEFAULT for the benefit of IonBuilder.
+ if (!bce_->setSrcNoteOffset(caseNoteIndex_, 0, bce_->offset() - lastCaseOffset_))
+ return false;
+ }
+
+ if (isDefault) {
+ if (!bce_->emitJump(JSOP_DEFAULT, &condSwitchDefaultOffset_))
+ return false;
+ return true;
+ }
+
+ if (!bce_->newSrcNote2(SRC_NEXTCASE, 0, &caseNoteIndex_))
+ return false;
+
+ JumpList caseJump;
+ if (!bce_->emitJump(JSOP_CASE, &caseJump))
+ return false;
+ caseOffsets_[caseIndex] = caseJump.offset;
+ lastCaseOffset_ = caseJump.offset;
+
+ if (state_ == State::Cond) {
+ // Switch note's second offset is to first JSOP_CASE.
+ unsigned noteCount = bce_->notes().length();
+ if (!bce_->setSrcNoteOffset(noteIndex_, 1, lastCaseOffset_ - top_))
+ return false;
+ unsigned noteCountDelta = bce_->notes().length() - noteCount;
+ if (noteCountDelta != 0)
+ caseNoteIndex_ += noteCountDelta;
+ }
+
+ return true;
+}
+
+bool
+SwitchEmitter::emitCaseJump()
+{
+ MOZ_ASSERT(kind_ == Kind::Cond);
+ MOZ_ASSERT(state_ == State::Cond || state_ == State::Case);
+ if (!emitCaseOrDefaultJump(caseIndex_, false))
+ return false;
+ caseIndex_++;
+
+ state_ = State::Case;
+ return true;
+}
+
+bool
+SwitchEmitter::emitImplicitDefault()
+{
+ MOZ_ASSERT(kind_ == Kind::Cond);
+ MOZ_ASSERT(state_ == State::Cond || state_ == State::Case);
+ if (!emitCaseOrDefaultJump(0, true))
+ return false;
+
+ caseIndex_ = 0;
+
+ // No internal state after emitting default jump.
+ return true;
+}
+
+bool
+SwitchEmitter::emitCaseBody()
+{
+ MOZ_ASSERT(kind_ == Kind::Cond);
+ MOZ_ASSERT(state_ == State::Cond || state_ == State::Case ||
+ state_ == State::CaseBody || state_ == State::DefaultBody);
+
+ tdzCacheCaseAndBody_.reset();
+
+ if (state_ == State::Cond || state_ == State::Case) {
+ // For cond switch, JSOP_DEFAULT is always emitted.
+ if (!emitImplicitDefault())
+ return false;
+ }
+
+ JumpList caseJump;
+ caseJump.offset = caseOffsets_[caseIndex_];
+ if (!bce_->emitJumpTargetAndPatch(caseJump))
+ return false;
+
+ JumpTarget here;
+ if (!bce_->emitJumpTarget(&here))
+ return false;
+ caseIndex_++;
+
+ tdzCacheCaseAndBody_.emplace(bce_);
+
+ state_ = State::CaseBody;
+ return true;
+}
+
+bool
+SwitchEmitter::emitCaseBody(int32_t caseValue, const TableGenerator& tableGen)
+{
+ MOZ_ASSERT(kind_ == Kind::Table);
+ MOZ_ASSERT(state_ == State::Table ||
+ state_ == State::CaseBody || state_ == State::DefaultBody);
+
+ tdzCacheCaseAndBody_.reset();
+
+ JumpTarget here;
+ if (!bce_->emitJumpTarget(&here))
+ return false;
+ caseOffsets_[tableGen.toCaseIndex(caseValue)] = here.offset;
+
+ tdzCacheCaseAndBody_.emplace(bce_);
+
+ state_ = State::CaseBody;
+ return true;
+}
+
+bool
+SwitchEmitter::emitDefaultBody()
+{
+ MOZ_ASSERT(state_ == State::Cond || state_ == State::Table ||
+ state_ == State::Case ||
+ state_ == State::CaseBody);
+ MOZ_ASSERT(!hasDefault_);
+
+ tdzCacheCaseAndBody_.reset();
+
+ if (state_ == State::Cond || state_ == State::Case) {
+ // For cond switch, JSOP_DEFAULT is always emitted.
+ if (!emitImplicitDefault())
+ return false;
+ }
+ JumpTarget here;
+ if (!bce_->emitJumpTarget(&here))
+ return false;
+ defaultJumpTargetOffset_ = here;
+
+ tdzCacheCaseAndBody_.emplace(bce_);
+
+ hasDefault_ = true;
+ state_ = State::DefaultBody;
+ return true;
+}
+
+bool
+SwitchEmitter::emitEnd()
+{
+ MOZ_ASSERT(state_ == State::Cond || state_ == State::Table ||
+ state_ == State::CaseBody || state_ == State::DefaultBody);
+
+ tdzCacheCaseAndBody_.reset();
+
+ if (!hasDefault_) {
+ // If no default case, offset for default is to end of switch.
+ if (!bce_->emitJumpTarget(&defaultJumpTargetOffset_))
+ return false;
+ }
+ MOZ_ASSERT(defaultJumpTargetOffset_.offset != -1);
+
+ // Set the default offset (to end of switch if no default).
+ jsbytecode* pc;
+ if (kind_ == Kind::Cond) {
+ pc = nullptr;
+ bce_->patchJumpsToTarget(condSwitchDefaultOffset_, defaultJumpTargetOffset_);
+ } else {
+ // Fill in the default jump target.
+ pc = bce_->code(top_);
+ SET_JUMP_OFFSET(pc, defaultJumpTargetOffset_.offset - top_);
+ pc += JUMP_OFFSET_LEN;
+ }
+
+ // Set the SRC_SWITCH note's offset operand to tell end of switch.
+ if (!bce_->setSrcNoteOffset(noteIndex_, 0, bce_->lastNonJumpTargetOffset() - top_))
+ return false;
+
+ if (kind_ == Kind::Table) {
+ // Skip over the already-initialized switch bounds.
+ pc += 2 * JUMP_OFFSET_LEN;
+
+ // Fill in the jump table, if there is one.
+ for (uint32_t i = 0, length = caseOffsets_.length(); i < length; i++) {
+ ptrdiff_t off = caseOffsets_[i];
+ SET_JUMP_OFFSET(pc, off == 0 ? 0 : off - top_);
+ pc += JUMP_OFFSET_LEN;
+ }
+ }
+
+ // Patch breaks before leaving the scope, as all breaks are under the
+ // lexical scope if it exists.
+ if (!controlInfo_->patchBreaks(bce_))
+ return false;
+
+ if (emitterScope_ && !emitterScope_->leave(bce_))
+ return false;
+
+ emitterScope_.reset();
+ tdzCacheLexical_.reset();
+
+ controlInfo_.reset();
+
+ state_ = State::End;
+ return true;
+}
diff --git a/js/src/frontend/SwitchEmitter.h b/js/src/frontend/SwitchEmitter.h
new file mode 100644
index 0000000000..f712059e67
--- /dev/null
+++ b/js/src/frontend/SwitchEmitter.h
@@ -0,0 +1,470 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sts=4 et sw=4 tw=99:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef frontend_SwitchEmitter_h
+#define frontend_SwitchEmitter_h
+
+#include "mozilla/Attributes.h"
+#include "mozilla/Maybe.h"
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include "jsalloc.h"
+
+#include "frontend/BytecodeControlStructures.h"
+#include "frontend/EmitterScope.h"
+#include "frontend/JumpList.h"
+#include "frontend/TDZCheckCache.h"
+#include "gc/Rooting.h"
+#include "js/Value.h"
+#include "js/Vector.h"
+#include "vm/Scope.h"
+
+namespace js {
+namespace frontend {
+
+struct BytecodeEmitter;
+
+// Class for emitting bytecode for switch-case-default block.
+//
+// Usage: (check for the return value is omitted for simplicity)
+//
+// `switch (discriminant) { case c1_expr: c1_body; }`
+// SwitchEmitter se(this);
+// se.emitDiscriminant(Some(offset_of_switch));
+// emit(discriminant);
+//
+// se.validateCaseCount(1);
+// se.emitCond();
+//
+// emit(c1_expr);
+// se.emitCaseJump();
+//
+// se.emitCaseBody();
+// emit(c1_body);
+//
+// se.emitEnd();
+//
+// `switch (discriminant) { case c1_expr: c1_body; case c2_expr: c2_body;
+// default: def_body; }`
+// SwitchEmitter se(this);
+// se.emitDiscriminant(Some(offset_of_switch));
+// emit(discriminant);
+//
+// se.validateCaseCount(2);
+// se.emitCond();
+//
+// emit(c1_expr);
+// se.emitCaseJump();
+//
+// emit(c2_expr);
+// se.emitCaseJump();
+//
+// se.emitCaseBody();
+// emit(c1_body);
+//
+// se.emitCaseBody();
+// emit(c2_body);
+//
+// se.emitDefaultBody();
+// emit(def_body);
+//
+// se.emitEnd();
+//
+// `switch (discriminant) { case c1_expr: c1_body; case c2_expr: c2_body; }`
+// with Table Switch
+// SwitchEmitter::TableGenerator tableGen(this);
+// tableGen.addNumber(c1_expr_value);
+// tableGen.addNumber(c2_expr_value);
+// tableGen.finish(2);
+//
+// // If `!tableGen.isValid()` here, `emitCond` should be used instead.
+//
+// SwitchEmitter se(this);
+// se.emitDiscriminant(Some(offset_of_switch));
+// emit(discriminant);
+// se.validateCaseCount(2);
+// se.emitTable(tableGen);
+//
+// se.emitCaseBody(c1_expr_value, tableGen);
+// emit(c1_body);
+//
+// se.emitCaseBody(c2_expr_value, tableGen);
+// emit(c2_body);
+//
+// se.emitEnd();
+//
+// `switch (discriminant) { case c1_expr: c1_body; case c2_expr: c2_body;
+// default: def_body; }`
+// with Table Switch
+// SwitchEmitter::TableGenerator tableGen(bce);
+// tableGen.addNumber(c1_expr_value);
+// tableGen.addNumber(c2_expr_value);
+// tableGen.finish(2);
+//
+// // If `!tableGen.isValid()` here, `emitCond` should be used instead.
+//
+// SwitchEmitter se(this);
+// se.emitDiscriminant(Some(offset_of_switch));
+// emit(discriminant);
+// se.validateCaseCount(2);
+// se.emitTable(tableGen);
+//
+// se.emitCaseBody(c1_expr_value, tableGen);
+// emit(c1_body);
+//
+// se.emitCaseBody(c2_expr_value, tableGen);
+// emit(c2_body);
+//
+// se.emitDefaultBody();
+// emit(def_body);
+//
+// se.emitEnd();
+//
+// `switch (discriminant) { case c1_expr: c1_body; }`
+// in case c1_body contains lexical bindings
+// SwitchEmitter se(this);
+// se.emitDiscriminant(Some(offset_of_switch));
+// emit(discriminant);
+//
+// se.validateCaseCount(1);
+//
+// se.emitLexical(bindings);
+//
+// se.emitCond();
+//
+// emit(c1_expr);
+// se.emitCaseJump();
+//
+// se.emitCaseBody();
+// emit(c1_body);
+//
+// se.emitEnd();
+//
+// `switch (discriminant) { case c1_expr: c1_body; }`
+// in case c1_body contains hosted functions
+// SwitchEmitter se(this);
+// se.emitDiscriminant(Some(offset_of_switch));
+// emit(discriminant);
+//
+// se.validateCaseCount(1);
+//
+// se.emitLexical(bindings);
+// emit(hosted functions);
+//
+// se.emitCond();
+//
+// emit(c1_expr);
+// se.emitCaseJump();
+//
+// se.emitCaseBody();
+// emit(c1_body);
+//
+// se.emitEnd();
+//
+class MOZ_STACK_CLASS SwitchEmitter
+{
+ // Bytecode for each case.
+ //
+ // Cond Switch
+ // {discriminant}
+ // JSOP_CONDSWITCH
+ //
+ // {c1_expr}
+ // JSOP_CASE c1
+ //
+ // JSOP_JUMPTARGET
+ // {c2_expr}
+ // JSOP_CASE c2
+ //
+ // ...
+ //
+ // JSOP_JUMPTARGET
+ // JSOP_DEFAULT default
+ //
+ // c1:
+ // JSOP_JUMPTARGET
+ // {c1_body}
+ // JSOP_GOTO end
+ //
+ // c2:
+ // JSOP_JUMPTARGET
+ // {c2_body}
+ // JSOP_GOTO end
+ //
+ // default:
+ // end:
+ // JSOP_JUMPTARGET
+ //
+ // Table Switch
+ // {discriminant}
+ // JSOP_TABLESWITCH c1, c2, ...
+ //
+ // c1:
+ // JSOP_JUMPTARGET
+ // {c1_body}
+ // JSOP_GOTO end
+ //
+ // c2:
+ // JSOP_JUMPTARGET
+ // {c2_body}
+ // JSOP_GOTO end
+ //
+ // ...
+ //
+ // end:
+ // JSOP_JUMPTARGET
+
+ public:
+ enum class Kind {
+ Table,
+ Cond
+ };
+
+ // Class for generating optimized table switch data.
+ class MOZ_STACK_CLASS TableGenerator
+ {
+ BytecodeEmitter* bce_;
+
+ // Bit array for given numbers.
+ mozilla::Maybe<js::Vector<size_t, 128, SystemAllocPolicy>> intmap_;
+
+ // The length of the intmap_.
+ int32_t intmapBitLength_ = 0;
+
+ // The length of the table.
+ uint32_t tableLength_ = 0;
+
+ // The lower and higher bounds of the table.
+ int32_t low_ = JSVAL_INT_MAX, high_ = JSVAL_INT_MIN;
+
+ // Whether the table is still valid.
+ bool valid_= true;
+
+#ifdef DEBUG
+ bool finished_ = false;
+#endif
+
+ public:
+ explicit TableGenerator(BytecodeEmitter* bce)
+ : bce_(bce)
+ {}
+
+ void setInvalid() {
+ valid_ = false;
+ }
+ MOZ_MUST_USE bool isValid() const {
+ return valid_;
+ }
+ MOZ_MUST_USE bool isInvalid() const {
+ return !valid_;
+ }
+
+ // Add the given number to the table. The number is the value of
+ // `expr` for `case expr:` syntax.
+ MOZ_MUST_USE bool addNumber(int32_t caseValue);
+
+ // Finish generating the table.
+ // `caseCount` should be the number of cases in the switch statement,
+ // excluding the default case.
+ void finish(uint32_t caseCount);
+
+ private:
+ friend SwitchEmitter;
+
+ // The following methods can be used only after calling `finish`.
+
+ // Returns the lower bound of the added numbers.
+ int32_t low() const {
+ MOZ_ASSERT(finished_);
+ return low_;
+ }
+
+ // Returns the higher bound of the numbers.
+ int32_t high() const {
+ MOZ_ASSERT(finished_);
+ return high_;
+ }
+
+ // Returns the index in SwitchEmitter.caseOffsets_ for table switch.
+ uint32_t toCaseIndex(int32_t caseValue) const;
+
+ // Returns the length of the table.
+ // This method can be called only if `isValid()` is true.
+ uint32_t tableLength() const;
+ };
+
+ private:
+ BytecodeEmitter* bce_;
+
+ // `kind_` should be set to the correct value in emitCond/emitTable.
+ Kind kind_ = Kind::Cond;
+
+ // True if there's explicit default case.
+ bool hasDefault_ = false;
+
+ // The source note index for SRC_CONDSWITCH.
+ unsigned noteIndex_ = 0;
+
+ // Source note index of the previous SRC_NEXTCASE.
+ unsigned caseNoteIndex_ = 0;
+
+ // The number of cases in the switch statement, excluding the default case.
+ uint32_t caseCount_ = 0;
+
+ // Internal index for case jump and case body, used by cond switch.
+ uint32_t caseIndex_ = 0;
+
+ // Bytecode offset after emitting `discriminant`.
+ ptrdiff_t top_ = 0;
+
+ // Bytecode offset of the previous JSOP_CASE.
+ ptrdiff_t lastCaseOffset_ = 0;
+
+ // Bytecode offset of the JSOP_JUMPTARGET for default body.
+ JumpTarget defaultJumpTargetOffset_ = { -1 };
+
+ // Bytecode offset of the JSOP_DEFAULT.
+ JumpList condSwitchDefaultOffset_;
+
+ // Instantiated when there's lexical scope for entire switch.
+ mozilla::Maybe<TDZCheckCache> tdzCacheLexical_;
+ mozilla::Maybe<EmitterScope> emitterScope_;
+
+ // Instantiated while emitting case expression and case/default body.
+ mozilla::Maybe<TDZCheckCache> tdzCacheCaseAndBody_;
+
+ // Control for switch.
+ mozilla::Maybe<BreakableControl> controlInfo_;
+
+ mozilla::Maybe<uint32_t> switchPos_;
+
+ // Cond Switch:
+ // Offset of each JSOP_CASE.
+ // Table Switch:
+ // Offset of each JSOP_JUMPTARGET for case.
+ js::Vector<ptrdiff_t, 32, SystemAllocPolicy> caseOffsets_;
+
+ // The state of this emitter.
+ //
+ // +-------+ emitDiscriminant +--------------+
+ // | Start |----------------->| Discriminant |-+
+ // +-------+ +--------------+ |
+ // |
+ // +-------------------------------------------+
+ // |
+ // | validateCaseCount +-----------+
+ // +->+------------------------>+------------------>| CaseCount |-+
+ // | ^ +-----------+ |
+ // | emitLexical +---------+ | |
+ // +------------>| Lexical |-+ |
+ // +---------+ |
+ // |
+ // +--------------------------------------------------------------+
+ // |
+ // | emitTable +-------+
+ // +---------->| Table |---------------------------->+-+
+ // | +-------+ ^ |
+ // | | |
+ // | emitCond +------+ | |
+ // +---------->| Cond |-+------------------------>+->+ |
+ // +------+ | ^ |
+ // | | |
+ // | emitCase +------+ | |
+ // +->+--------->| Case |->+-+ |
+ // ^ +------+ | |
+ // | | |
+ // +--------------------+ |
+ // |
+ // +---------------------------------------------------+
+ // |
+ // | emitEnd +-----+
+ // +-+----------------------------------------->+-------->| End |
+ // | ^ +-----+
+ // | emitCaseBody +----------+ |
+ // +->+-+---------------->| CaseBody |--->+-+-+
+ // ^ | +----------+ ^ |
+ // | | | |
+ // | | emitDefaultBody +-------------+ | |
+ // | +---------------->| DefaultBody |-+ |
+ // | +-------------+ |
+ // | |
+ // +-------------------------------------+
+ //
+ enum class State {
+ // The initial state.
+ Start,
+
+ // After calling emitDiscriminant.
+ Discriminant,
+
+ // After calling validateCaseCount.
+ CaseCount,
+
+ // After calling emitLexical.
+ Lexical,
+
+ // After calling emitCond.
+ Cond,
+
+ // After calling emitTable.
+ Table,
+
+ // After calling emitCase.
+ Case,
+
+ // After calling emitCaseBody.
+ CaseBody,
+
+ // After calling emitDefaultBody.
+ DefaultBody,
+
+ // After calling emitEnd.
+ End
+ };
+ State state_ = State::Start;
+
+ public:
+ explicit SwitchEmitter(BytecodeEmitter* bce);
+
+ // `switchPos` is the offset in the source code for the character below:
+ //
+ // switch ( cond ) { ... }
+ // ^
+ // |
+ // switchPos
+ //
+ // Can be Nothing() if not available.
+ MOZ_MUST_USE bool emitDiscriminant(const mozilla::Maybe<uint32_t>& switchPos);
+
+ // `caseCount` should be the number of cases in the switch statement,
+ // excluding the default case.
+ MOZ_MUST_USE bool validateCaseCount(uint32_t caseCount);
+
+ // `bindings` is a lexical scope for the entire switch, in case there's
+ // let/const effectively directly under case or default blocks.
+ MOZ_MUST_USE bool emitLexical(Handle<LexicalScope::Data*> bindings);
+
+ MOZ_MUST_USE bool emitCond();
+ MOZ_MUST_USE bool emitTable(const TableGenerator& tableGen);
+
+ MOZ_MUST_USE bool emitCaseJump();
+
+ MOZ_MUST_USE bool emitCaseBody();
+ MOZ_MUST_USE bool emitCaseBody(int32_t caseValue, const TableGenerator& tableGen);
+ MOZ_MUST_USE bool emitDefaultBody();
+ MOZ_MUST_USE bool emitEnd();
+
+ private:
+ MOZ_MUST_USE bool emitCaseOrDefaultJump(uint32_t caseIndex, bool isDefault);
+ MOZ_MUST_USE bool emitImplicitDefault();
+};
+
+} /* namespace frontend */
+} /* namespace js */
+
+#endif /* frontend_SwitchEmitter_h */
diff --git a/js/src/frontend/TDZCheckCache.h b/js/src/frontend/TDZCheckCache.h
index 0789cbeeaf..29ccefda14 100644
--- a/js/src/frontend/TDZCheckCache.h
+++ b/js/src/frontend/TDZCheckCache.h
@@ -10,7 +10,7 @@
#include "mozilla/Attributes.h"
#include "mozilla/Maybe.h"
-#include "frontend/SharedContext.h" // for Nestable
+#include "ds/Nestable.h"
#include "frontend/NameCollections.h"
#include "js/TypeDecls.h"
#include "vm/Stack.h"
diff --git a/js/src/frontend/TryEmitter.cpp b/js/src/frontend/TryEmitter.cpp
new file mode 100644
index 0000000000..2d6e774456
--- /dev/null
+++ b/js/src/frontend/TryEmitter.cpp
@@ -0,0 +1,286 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sts=4 et sw=4 tw=99:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "frontend/TryEmitter.h"
+
+#include "frontend/BytecodeEmitter.h"
+#include "frontend/SourceNotes.h"
+#include "vm/Opcodes.h"
+
+using namespace js;
+using namespace js::frontend;
+
+using mozilla::Maybe;
+
+TryEmitter::TryEmitter(BytecodeEmitter* bce, Kind kind,
+ ShouldUseRetVal retValKind,
+ ShouldUseControl controlKind)
+ : bce_(bce),
+ kind_(kind),
+ retValKind_(retValKind),
+ depth_(0),
+ noteIndex_(0),
+ tryStart_(0),
+ state_(Start)
+{
+ if (controlKind == UseControl)
+ controlInfo_.emplace(bce_, hasFinally() ? StatementKind::Finally : StatementKind::Try);
+ finallyStart_.offset = 0;
+}
+
+// Emits JSOP_GOTO to the end of try-catch-finally.
+// Used in `yield*`.
+bool
+TryEmitter::emitJumpOverCatchAndFinally()
+{
+ if (!bce_->emitJump(JSOP_GOTO, &catchAndFinallyJump_))
+ return false;
+ return true;
+}
+
+bool
+TryEmitter::emitTry()
+{
+ MOZ_ASSERT(state_ == Start);
+
+ // Since an exception can be thrown at any place inside the try block,
+ // we need to restore the stack and the scope chain before we transfer
+ // the control to the exception handler.
+ //
+ // For that we store in a try note associated with the catch or
+ // finally block the stack depth upon the try entry. The interpreter
+ // uses this depth to properly unwind the stack and the scope chain.
+ depth_ = bce_->stackDepth;
+
+ // Record the try location, then emit the try block.
+ if (!bce_->newSrcNote(SRC_TRY, &noteIndex_))
+ return false;
+ if (!bce_->emit1(JSOP_TRY))
+ return false;
+ tryStart_ = bce_->offset();
+
+ state_ = Try;
+ return true;
+}
+
+bool
+TryEmitter::emitTryEnd()
+{
+ MOZ_ASSERT(state_ == Try);
+ MOZ_ASSERT(depth_ == bce_->stackDepth);
+
+ // GOSUB to finally, if present.
+ if (hasFinally() && controlInfo_) {
+ if (!bce_->emitJump(JSOP_GOSUB, &controlInfo_->gosubs))
+ return false;
+ }
+
+ // Source note points to the jump at the end of the try block.
+ if (!bce_->setSrcNoteOffset(noteIndex_, 0, bce_->offset() - tryStart_ + JSOP_TRY_LENGTH))
+ return false;
+
+ // Emit jump over catch and/or finally.
+ if (!bce_->emitJump(JSOP_GOTO, &catchAndFinallyJump_))
+ return false;
+
+ if (!bce_->emitJumpTarget(&tryEnd_))
+ return false;
+
+ return true;
+}
+
+bool
+TryEmitter::emitCatch()
+{
+ if (state_ == Try) {
+ if (!emitTryEnd())
+ return false;
+ } else {
+ MOZ_ASSERT(state_ == Catch);
+ if (!emitCatchEnd(true))
+ return false;
+ }
+
+ MOZ_ASSERT(bce_->stackDepth == depth_);
+
+ if (retValKind_ == UseRetVal) {
+ // Clear the frame's return value that might have been set by the
+ // try block:
+ //
+ // eval("try { 1; throw 2 } catch(e) {}"); // undefined, not 1
+ if (!bce_->emit1(JSOP_UNDEFINED))
+ return false;
+ if (!bce_->emit1(JSOP_SETRVAL))
+ return false;
+ }
+
+ state_ = Catch;
+ return true;
+}
+
+
+bool
+TryEmitter::emitCatchEnd(bool hasNext)
+{
+ MOZ_ASSERT(state_ == Catch);
+
+ if (!controlInfo_)
+ return true;
+
+ // gosub <finally>, if required.
+ if (hasFinally()) {
+ if (!bce_->emitJump(JSOP_GOSUB, &controlInfo_->gosubs))
+ return false;
+ MOZ_ASSERT(bce_->stackDepth == depth_);
+ }
+
+ // Jump over the remaining catch blocks. This will get fixed
+ // up to jump to after catch/finally.
+ if (!bce_->emitJump(JSOP_GOTO, &catchAndFinallyJump_))
+ return false;
+
+ // If this catch block had a guard clause, patch the guard jump to
+ // come here.
+ if (controlInfo_->guardJump.offset != -1) {
+ if (!bce_->emitJumpTargetAndPatch(controlInfo_->guardJump))
+ return false;
+ controlInfo_->guardJump.offset = -1;
+
+ // If this catch block is the last one, rethrow, delegating
+ // execution of any finally block to the exception handler.
+ if (!hasNext) {
+ if (!bce_->emit1(JSOP_EXCEPTION))
+ return false;
+ if (!bce_->emit1(JSOP_THROW))
+ return false;
+ }
+ }
+
+ return true;
+}
+
+bool
+TryEmitter::emitFinally(const Maybe<uint32_t>& finallyPos /* = Nothing() */)
+{
+ // If we are using controlInfo_ (i.e., emitting a syntactic try
+ // blocks), we must have specified up front if there will be a finally
+ // close. For internal try blocks, like those emitted for yield* and
+ // IteratorClose inside for-of loops, we can emitFinally even without
+ // specifying up front, since the internal try blocks emit no GOSUBs.
+ if (!controlInfo_) {
+ if (kind_ == TryCatch)
+ kind_ = TryCatchFinally;
+ } else {
+ MOZ_ASSERT(hasFinally());
+ }
+
+ if (state_ == Try) {
+ if (!emitTryEnd())
+ return false;
+ } else {
+ MOZ_ASSERT(state_ == Catch);
+ if (!emitCatchEnd(false))
+ return false;
+ }
+
+ MOZ_ASSERT(bce_->stackDepth == depth_);
+
+ if (!bce_->emitJumpTarget(&finallyStart_))
+ return false;
+
+ if (controlInfo_) {
+ // Fix up the gosubs that might have been emitted before non-local
+ // jumps to the finally code.
+ bce_->patchJumpsToTarget(controlInfo_->gosubs, finallyStart_);
+
+ // Indicate that we're emitting a subroutine body.
+ controlInfo_->setEmittingSubroutine();
+ }
+ if (finallyPos) {
+ if (!bce_->updateSourceCoordNotes(finallyPos.value()))
+ return false;
+ }
+ if (!bce_->emit1(JSOP_FINALLY))
+ return false;
+
+ if (retValKind_ == UseRetVal) {
+ if (!bce_->emit1(JSOP_GETRVAL))
+ return false;
+
+ // Clear the frame's return value to make break/continue return
+ // correct value even if there's no other statement before them:
+ //
+ // eval("x: try { 1 } finally { break x; }"); // undefined, not 1
+ if (!bce_->emit1(JSOP_UNDEFINED))
+ return false;
+ if (!bce_->emit1(JSOP_SETRVAL))
+ return false;
+ }
+
+ state_ = Finally;
+ return true;
+}
+
+bool
+TryEmitter::emitFinallyEnd()
+{
+ MOZ_ASSERT(state_ == Finally);
+
+ if (retValKind_ == UseRetVal) {
+ if (!bce_->emit1(JSOP_SETRVAL))
+ return false;
+ }
+
+ if (!bce_->emit1(JSOP_RETSUB))
+ return false;
+
+ bce_->hasTryFinally = true;
+ return true;
+}
+
+bool
+TryEmitter::emitEnd()
+{
+ if (state_ == Catch) {
+ MOZ_ASSERT(!hasFinally());
+ if (!emitCatchEnd(false))
+ return false;
+ } else {
+ MOZ_ASSERT(state_ == Finally);
+ MOZ_ASSERT(hasFinally());
+ if (!emitFinallyEnd())
+ return false;
+ }
+
+ MOZ_ASSERT(bce_->stackDepth == depth_);
+
+ // ReconstructPCStack needs a NOP here to mark the end of the last
+ // catch block.
+ if (!bce_->emit1(JSOP_NOP))
+ return false;
+
+ // Fix up the end-of-try/catch jumps to come here.
+ if (!bce_->emitJumpTargetAndPatch(catchAndFinallyJump_))
+ return false;
+
+ // Add the try note last, to let post-order give us the right ordering
+ // (first to last for a given nesting level, inner to outer by level).
+ if (hasCatch()) {
+ if (!bce_->tryNoteList.append(JSTRY_CATCH, depth_, tryStart_, tryEnd_.offset))
+ return false;
+ }
+
+ // If we've got a finally, mark try+catch region with additional
+ // trynote to catch exceptions (re)thrown from a catch block or
+ // for the try{}finally{} case.
+ if (hasFinally()) {
+ if (!bce_->tryNoteList.append(JSTRY_FINALLY, depth_, tryStart_, finallyStart_.offset))
+ return false;
+ }
+
+ state_ = End;
+ return true;
+}
diff --git a/js/src/frontend/TryEmitter.h b/js/src/frontend/TryEmitter.h
new file mode 100644
index 0000000000..ff42cda0d1
--- /dev/null
+++ b/js/src/frontend/TryEmitter.h
@@ -0,0 +1,117 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sts=4 et sw=4 tw=99:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef frontend_TryEmitter_h
+#define frontend_TryEmitter_h
+
+#include "mozilla/Attributes.h"
+#include "mozilla/Maybe.h"
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include "frontend/BytecodeControlStructures.h"
+#include "frontend/JumpList.h"
+#include "frontend/TDZCheckCache.h"
+
+namespace js {
+namespace frontend {
+
+struct BytecodeEmitter;
+
+class MOZ_STACK_CLASS TryEmitter
+{
+ public:
+ enum Kind {
+ TryCatch,
+ TryCatchFinally,
+ TryFinally
+ };
+ enum ShouldUseRetVal {
+ UseRetVal,
+ DontUseRetVal
+ };
+ enum ShouldUseControl {
+ UseControl,
+ DontUseControl,
+ };
+
+ private:
+ BytecodeEmitter* bce_;
+ Kind kind_;
+ ShouldUseRetVal retValKind_;
+
+ // Track jumps-over-catches and gosubs-to-finally for later fixup.
+ //
+ // When a finally block is active, non-local jumps (including
+ // jumps-over-catches) result in a GOSUB being written into the bytecode
+ // stream and fixed-up later.
+ //
+ // If ShouldUseControl is DontUseControl, all that handling is skipped.
+ // DontUseControl is used by yield* and the internal try-catch around
+ // IteratorClose. These internal uses must:
+ // * have only one catch block
+ // * have no catch guard
+ // * have JSOP_GOTO at the end of catch-block
+ // * have no non-local-jump
+ // * don't use finally block for normal completion of try-block and
+ // catch-block
+ //
+ // Additionally, a finally block may be emitted when ShouldUseControl is
+ // DontUseControl, even if the kind is not TryCatchFinally or TryFinally,
+ // because GOSUBs are not emitted. This internal use shares the
+ // requirements as above.
+ Maybe<TryFinallyControl> controlInfo_;
+
+ int depth_;
+ unsigned noteIndex_;
+ ptrdiff_t tryStart_;
+ JumpList catchAndFinallyJump_;
+ JumpTarget tryEnd_;
+ JumpTarget finallyStart_;
+
+ enum State {
+ Start,
+ Try,
+ TryEnd,
+ Catch,
+ CatchEnd,
+ Finally,
+ FinallyEnd,
+ End
+ };
+ State state_;
+
+ bool hasCatch() const {
+ return kind_ == TryCatch || kind_ == TryCatchFinally;
+ }
+ bool hasFinally() const {
+ return kind_ == TryCatchFinally || kind_ == TryFinally;
+ }
+
+ public:
+ TryEmitter(BytecodeEmitter* bce, Kind kind,
+ ShouldUseRetVal retValKind = UseRetVal, ShouldUseControl controlKind = UseControl);
+
+ MOZ_MUST_USE bool emitJumpOverCatchAndFinally();
+
+ MOZ_MUST_USE bool emitTry();
+ MOZ_MUST_USE bool emitCatch();
+
+ MOZ_MUST_USE bool emitFinally(const mozilla::Maybe<uint32_t>& finallyPos = mozilla::Nothing());
+
+ MOZ_MUST_USE bool emitEnd();
+
+ private:
+ MOZ_MUST_USE bool emitTryEnd();
+ MOZ_MUST_USE bool emitCatchEnd(bool hasNext);
+ MOZ_MUST_USE bool emitFinallyEnd();
+};
+
+} /* namespace frontend */
+} /* namespace js */
+
+#endif /* frontend_TryEmitter_h */
diff --git a/js/src/moz.build b/js/src/moz.build
index 86f1289e40..115747daaf 100644
--- a/js/src/moz.build
+++ b/js/src/moz.build
@@ -139,18 +139,23 @@ UNIFIED_SOURCES += [
'ds/LifoAlloc.cpp',
'ds/MemoryProtectionExceptionHandler.cpp',
'frontend/BytecodeCompiler.cpp',
+ 'frontend/BytecodeControlStructures.cpp',
'frontend/BytecodeEmitter.cpp',
'frontend/CallOrNewEmitter.cpp',
'frontend/ElemOpEmitter.cpp',
+ 'frontend/EmitterScope.cpp',
'frontend/FoldConstants.cpp',
+ 'frontend/ForOfLoopControl.cpp',
'frontend/IfEmitter.cpp',
'frontend/JumpList.cpp',
'frontend/NameFunctions.cpp',
'frontend/NameOpEmitter.cpp',
'frontend/ParseNode.cpp',
'frontend/PropOpEmitter.cpp',
+ 'frontend/SwitchEmitter.cpp',
'frontend/TDZCheckCache.cpp',
'frontend/TokenStream.cpp',
+ 'frontend/TryEmitter.cpp',
'gc/Allocator.cpp',
'gc/Barrier.cpp',
'gc/GCTrace.cpp',