summaryrefslogtreecommitdiffstats
path: root/js/src/frontend/BytecodeEmitter.h
diff options
context:
space:
mode:
Diffstat (limited to 'js/src/frontend/BytecodeEmitter.h')
-rw-r--r--js/src/frontend/BytecodeEmitter.h50
1 files changed, 38 insertions, 12 deletions
diff --git a/js/src/frontend/BytecodeEmitter.h b/js/src/frontend/BytecodeEmitter.h
index 04307c8c1..595ee6405 100644
--- a/js/src/frontend/BytecodeEmitter.h
+++ b/js/src/frontend/BytecodeEmitter.h
@@ -109,10 +109,12 @@ struct CGYieldOffsetList {
void finish(YieldOffsetArray& array, uint32_t prologueLength);
};
-// Use zero inline elements because these go on the stack and affect how many
-// nested functions are possible.
-typedef Vector<jsbytecode, 0> BytecodeVector;
-typedef Vector<jssrcnote, 0> SrcNotesVector;
+static size_t MaxBytecodeLength = INT32_MAX;
+static size_t MaxSrcNotesLength = INT32_MAX;
+
+// Have a few inline elements to avoid heap allocation for tiny sequences.
+typedef Vector<jsbytecode, 256> BytecodeVector;
+typedef Vector<jssrcnote, 64> SrcNotesVector;
// Linked list of jump instructions that need to be patched. The linked list is
// stored in the bytes of the incomplete bytecode that will be patched, so no
@@ -169,6 +171,11 @@ struct JumpList {
void patchAll(jsbytecode* code, JumpTarget target);
};
+enum class ValueUsage {
+ WantValue,
+ IgnoreValue
+};
+
struct MOZ_STACK_CLASS BytecodeEmitter
{
class TDZCheckCache;
@@ -354,7 +361,7 @@ struct MOZ_STACK_CLASS BytecodeEmitter
MOZ_MUST_USE bool maybeSetSourceMap();
void tellDebuggerAboutCompiledScript(ExclusiveContext* cx);
- inline TokenStream* tokenStream();
+ inline TokenStream& tokenStream();
BytecodeVector& code() const { return current->code; }
jsbytecode* code(ptrdiff_t offset) const { return current->code.begin() + offset; }
@@ -388,7 +395,7 @@ struct MOZ_STACK_CLASS BytecodeEmitter
}
bool reportError(ParseNode* pn, unsigned errorNumber, ...);
- bool reportStrictWarning(ParseNode* pn, unsigned errorNumber, ...);
+ bool reportExtraWarning(ParseNode* pn, unsigned errorNumber, ...);
bool reportStrictModeError(ParseNode* pn, unsigned errorNumber, ...);
// If pn contains a useful expression, return true with *answer set to true.
@@ -432,10 +439,12 @@ struct MOZ_STACK_CLASS BytecodeEmitter
};
// Emit code for the tree rooted at pn.
- MOZ_MUST_USE bool emitTree(ParseNode* pn, EmitLineNumberNote emitLineNote = EMIT_LINENOTE);
+ MOZ_MUST_USE bool emitTree(ParseNode* pn, ValueUsage valueUsage = ValueUsage::WantValue,
+ EmitLineNumberNote emitLineNote = EMIT_LINENOTE);
// Emit code for the tree rooted at pn with its own TDZ cache.
- MOZ_MUST_USE bool emitTreeInBranch(ParseNode* pn);
+ MOZ_MUST_USE bool emitTreeInBranch(ParseNode* pn,
+ ValueUsage valueUsage = ValueUsage::WantValue);
// Emit global, eval, or module code for tree rooted at body. Always
// encompasses the entire source.
@@ -519,7 +528,7 @@ struct MOZ_STACK_CLASS BytecodeEmitter
MOZ_MUST_USE bool emitAtomOp(ParseNode* pn, JSOp op);
MOZ_MUST_USE bool emitArrayLiteral(ParseNode* pn);
- MOZ_MUST_USE bool emitArray(ParseNode* pn, uint32_t count, JSOp op);
+ MOZ_MUST_USE bool emitArray(ParseNode* pn, uint32_t count);
MOZ_MUST_USE bool emitArrayComp(ParseNode* pn);
MOZ_MUST_USE bool emitInternedScopeOp(uint32_t index, JSOp op);
@@ -531,6 +540,8 @@ struct MOZ_STACK_CLASS BytecodeEmitter
MOZ_NEVER_INLINE MOZ_MUST_USE bool emitFunction(ParseNode* pn, bool needsProto = false);
MOZ_NEVER_INLINE MOZ_MUST_USE bool emitObject(ParseNode* pn);
+ MOZ_MUST_USE bool replaceNewInitWithNewObject(JSObject* obj, ptrdiff_t offset);
+
MOZ_MUST_USE bool emitHoistedFunctionsInList(ParseNode* pn);
MOZ_MUST_USE bool emitPropertyList(ParseNode* pn, MutableHandlePlainObject objp,
@@ -658,6 +669,10 @@ struct MOZ_STACK_CLASS BytecodeEmitter
// []/{} expression).
MOZ_MUST_USE bool emitSetOrInitializeDestructuring(ParseNode* target, DestructuringFlavor flav);
+ // emitDestructuringObjRestExclusionSet emits the property exclusion set
+ // for the rest-property in an object pattern.
+ MOZ_MUST_USE bool emitDestructuringObjRestExclusionSet(ParseNode* pattern);
+
// emitDestructuringOps assumes the to-be-destructured value has been
// pushed on the stack and emits code to destructure each part of a [] or
// {} lhs expression.
@@ -675,6 +690,15 @@ struct MOZ_STACK_CLASS BytecodeEmitter
// object, with no overall effect on the stack.
MOZ_MUST_USE bool emitRequireObjectCoercible();
+ enum class CopyOption {
+ Filtered, Unfiltered
+ };
+
+ // Calls either the |CopyDataProperties| or the
+ // |CopyDataPropertiesUnfiltered| intrinsic function, consumes three (or
+ // two in the latter case) elements from the stack.
+ MOZ_MUST_USE bool emitCopyDataProperties(CopyOption option);
+
// emitIterator expects the iterable to already be on the stack.
// It will replace that stack value with the corresponding iterator
MOZ_MUST_USE bool emitIterator();
@@ -722,16 +746,18 @@ struct MOZ_STACK_CLASS BytecodeEmitter
MOZ_MUST_USE bool emitRightAssociative(ParseNode* pn);
MOZ_MUST_USE bool emitLeftAssociative(ParseNode* pn);
MOZ_MUST_USE bool emitLogical(ParseNode* pn);
- MOZ_MUST_USE bool emitSequenceExpr(ParseNode* pn);
+ MOZ_MUST_USE bool emitSequenceExpr(ParseNode* pn,
+ ValueUsage valueUsage = ValueUsage::WantValue);
MOZ_NEVER_INLINE MOZ_MUST_USE bool emitIncOrDec(ParseNode* pn);
- MOZ_MUST_USE bool emitConditionalExpression(ConditionalExpression& conditional);
+ MOZ_MUST_USE bool emitConditionalExpression(ConditionalExpression& conditional,
+ ValueUsage valueUsage = ValueUsage::WantValue);
MOZ_MUST_USE bool isRestParameter(ParseNode* pn, bool* result);
MOZ_MUST_USE bool emitOptimizeSpread(ParseNode* arg0, JumpList* jmp, bool* emitted);
- MOZ_MUST_USE bool emitCallOrNew(ParseNode* pn);
+ MOZ_MUST_USE bool emitCallOrNew(ParseNode* pn, ValueUsage valueUsage = ValueUsage::WantValue);
MOZ_MUST_USE bool emitSelfHostedCallFunction(ParseNode* pn);
MOZ_MUST_USE bool emitSelfHostedResumeGenerator(ParseNode* pn);
MOZ_MUST_USE bool emitSelfHostedForceInterpreter(ParseNode* pn);