summaryrefslogtreecommitdiffstats
path: root/js/src/jit
diff options
context:
space:
mode:
Diffstat (limited to 'js/src/jit')
-rw-r--r--js/src/jit/BaselineBailouts.cpp35
-rw-r--r--js/src/jit/BaselineCompiler.cpp95
-rw-r--r--js/src/jit/BaselineCompiler.h13
-rw-r--r--js/src/jit/CodeGenerator.cpp127
-rw-r--r--js/src/jit/CodeGenerator.h8
-rw-r--r--js/src/jit/IonBuilder.cpp81
-rw-r--r--js/src/jit/IonBuilder.h4
-rw-r--r--js/src/jit/JitFrames.cpp101
-rw-r--r--js/src/jit/Lowering.cpp27
-rw-r--r--js/src/jit/Lowering.h2
-rw-r--r--js/src/jit/MIR.h60
-rw-r--r--js/src/jit/MIRGraph.cpp13
-rw-r--r--js/src/jit/MIRGraph.h3
-rw-r--r--js/src/jit/MOpcodes.h2
-rw-r--r--js/src/jit/VMFunctions.cpp9
-rw-r--r--js/src/jit/VMFunctions.h4
-rw-r--r--js/src/jit/arm/MacroAssembler-arm.cpp12
-rw-r--r--js/src/jit/arm/MacroAssembler-arm.h16
-rw-r--r--js/src/jit/arm64/MacroAssembler-arm64.h8
-rw-r--r--js/src/jit/mips32/MacroAssembler-mips32.cpp4
-rw-r--r--js/src/jit/mips32/MacroAssembler-mips32.h4
-rw-r--r--js/src/jit/mips64/MacroAssembler-mips64.cpp2
-rw-r--r--js/src/jit/mips64/MacroAssembler-mips64.h6
-rw-r--r--js/src/jit/shared/LIR-shared.h40
-rw-r--r--js/src/jit/shared/LOpcodes-shared.h2
-rw-r--r--js/src/jit/x64/MacroAssembler-x64.h8
-rw-r--r--js/src/jit/x86/MacroAssembler-x86.cpp4
-rw-r--r--js/src/jit/x86/MacroAssembler-x86.h12
28 files changed, 568 insertions, 134 deletions
diff --git a/js/src/jit/BaselineBailouts.cpp b/js/src/jit/BaselineBailouts.cpp
index 8fc8a522d..3ab722b3d 100644
--- a/js/src/jit/BaselineBailouts.cpp
+++ b/js/src/jit/BaselineBailouts.cpp
@@ -487,7 +487,7 @@ GetNextNonLoopEntryPc(jsbytecode* pc)
}
static bool
-HasLiveIteratorAtStackDepth(JSScript* script, jsbytecode* pc, uint32_t stackDepth)
+HasLiveStackValueAtDepth(JSScript* script, jsbytecode* pc, uint32_t stackDepth)
{
if (!script->hasTrynotes())
return false;
@@ -501,14 +501,31 @@ HasLiveIteratorAtStackDepth(JSScript* script, jsbytecode* pc, uint32_t stackDept
if (pcOffset >= tn->start + tn->length)
continue;
- // For-in loops have only the iterator on stack.
- if (tn->kind == JSTRY_FOR_IN && stackDepth == tn->stackDepth)
- return true;
+ switch (tn->kind) {
+ case JSTRY_FOR_IN:
+ // For-in loops have only the iterator on stack.
+ if (stackDepth == tn->stackDepth)
+ return true;
+ break;
+
+ case JSTRY_FOR_OF:
+ // For-of loops have the iterator, the result object, and the value
+ // of the result object on stack. The iterator is below the result
+ // object and the value.
+ if (stackDepth == tn->stackDepth - 2)
+ return true;
+ break;
+
+ case JSTRY_DESTRUCTURING_ITERCLOSE:
+ // Destructuring code that need to call IteratorClose have both
+ // the iterator and the "done" value on the stack.
+ if (stackDepth == tn->stackDepth || stackDepth == tn->stackDepth - 1)
+ return true;
+ break;
- // For-of loops have both the iterator and the result object on
- // stack. The iterator is below the result object.
- if (tn->kind == JSTRY_FOR_OF && stackDepth == tn->stackDepth - 1)
- return true;
+ default:
+ break;
+ }
}
return false;
@@ -945,7 +962,7 @@ InitFromBailout(JSContext* cx, HandleScript caller, jsbytecode* callerPC,
// iterators, however, so read them out. They will be closed by
// HandleExceptionBaseline.
MOZ_ASSERT(cx->compartment()->isDebuggee());
- if (iter.moreFrames() || HasLiveIteratorAtStackDepth(script, pc, i + 1)) {
+ if (iter.moreFrames() || HasLiveStackValueAtDepth(script, pc, i + 1)) {
v = iter.read();
} else {
iter.skip();
diff --git a/js/src/jit/BaselineCompiler.cpp b/js/src/jit/BaselineCompiler.cpp
index c58367aa3..3fa5a80ed 100644
--- a/js/src/jit/BaselineCompiler.cpp
+++ b/js/src/jit/BaselineCompiler.cpp
@@ -9,6 +9,8 @@
#include "mozilla/Casting.h"
#include "mozilla/SizePrintfMacros.h"
+#include "jsfun.h"
+
#include "jit/BaselineIC.h"
#include "jit/BaselineJIT.h"
#include "jit/FixedList.h"
@@ -1061,6 +1063,12 @@ BaselineCompiler::emit_JSOP_NOP_DESTRUCTURING()
}
bool
+BaselineCompiler::emit_JSOP_TRY_DESTRUCTURING_ITERCLOSE()
+{
+ return true;
+}
+
+bool
BaselineCompiler::emit_JSOP_LABEL()
{
return true;
@@ -1145,7 +1153,7 @@ BaselineCompiler::emit_JSOP_PICK()
// after : A B D E C
// First, move value at -(amount + 1) into R0.
- int depth = -(GET_INT8(pc) + 1);
+ int32_t depth = -(GET_INT8(pc) + 1);
masm.loadValue(frame.addressOfStackValue(frame.peek(depth)), R0);
// Move the other values down.
@@ -1164,6 +1172,34 @@ BaselineCompiler::emit_JSOP_PICK()
}
bool
+BaselineCompiler::emit_JSOP_UNPICK()
+{
+ frame.syncStack(0);
+
+ // Pick takes the top of the stack value and moves it under the nth value.
+ // For instance, unpick 2:
+ // before: A B C D E
+ // after : A B E C D
+
+ // First, move value at -1 into R0.
+ masm.loadValue(frame.addressOfStackValue(frame.peek(-1)), R0);
+
+ // Move the other values up.
+ int32_t depth = -(GET_INT8(pc) + 1);
+ for (int32_t i = -1; i > depth; i--) {
+ Address source = frame.addressOfStackValue(frame.peek(i - 1));
+ Address dest = frame.addressOfStackValue(frame.peek(i));
+ masm.loadValue(source, R1);
+ masm.storeValue(R1, dest);
+ }
+
+ // Store R0 under the nth value.
+ Address dest = frame.addressOfStackValue(frame.peek(depth));
+ masm.storeValue(R0, dest);
+ return true;
+}
+
+bool
BaselineCompiler::emit_JSOP_GOTO()
{
frame.syncStack(0);
@@ -1351,6 +1387,26 @@ BaselineCompiler::emit_JSOP_CHECKISOBJ()
return true;
}
+typedef bool (*CheckIsCallableFn)(JSContext*, HandleValue, CheckIsCallableKind);
+static const VMFunction CheckIsCallableInfo =
+ FunctionInfo<CheckIsCallableFn>(CheckIsCallable, "CheckIsCallable");
+
+bool
+BaselineCompiler::emit_JSOP_CHECKISCALLABLE()
+{
+ frame.syncStack(0);
+ masm.loadValue(frame.addressOfStackValue(frame.peek(-1)), R0);
+
+ prepareVMCall();
+
+ pushArg(Imm32(GET_UINT8(pc)));
+ pushArg(R0);
+ if (!callVM(CheckIsCallableInfo))
+ return false;
+
+ return true;
+}
+
typedef bool (*ThrowUninitializedThisFn)(JSContext*, BaselineFrame* frame);
static const VMFunction ThrowUninitializedThisInfo =
FunctionInfo<ThrowUninitializedThisFn>(BaselineThrowUninitializedThis,
@@ -1681,6 +1737,29 @@ BaselineCompiler::emit_JSOP_LAMBDA_ARROW()
return true;
}
+typedef bool (*SetFunNameFn)(JSContext*, HandleFunction, HandleValue, FunctionPrefixKind);
+static const VMFunction SetFunNameInfo =
+ FunctionInfo<SetFunNameFn>(js::SetFunctionNameIfNoOwnName, "SetFunName");
+
+bool
+BaselineCompiler::emit_JSOP_SETFUNNAME()
+{
+ frame.popRegsAndSync(2);
+
+ frame.push(R0);
+ frame.syncStack(0);
+
+ FunctionPrefixKind prefixKind = FunctionPrefixKind(GET_UINT8(pc));
+ masm.unboxObject(R0, R0.scratchReg());
+
+ prepareVMCall();
+
+ pushArg(Imm32(int32_t(prefixKind)));
+ pushArg(R1);
+ pushArg(R0.scratchReg());
+ return callVM(SetFunNameInfo);
+}
+
void
BaselineCompiler::storeValue(const StackValue* source, const Address& dest,
const ValueOperand& scratch)
@@ -3922,7 +4001,7 @@ BaselineCompiler::emit_JSOP_MOREITER()
}
bool
-BaselineCompiler::emit_JSOP_ISNOITER()
+BaselineCompiler::emitIsMagicValue()
{
frame.syncStack(0);
@@ -3941,6 +4020,12 @@ BaselineCompiler::emit_JSOP_ISNOITER()
}
bool
+BaselineCompiler::emit_JSOP_ISNOITER()
+{
+ return emitIsMagicValue();
+}
+
+bool
BaselineCompiler::emit_JSOP_ENDITER()
{
if (!emit_JSOP_JUMPTARGET())
@@ -3952,6 +4037,12 @@ BaselineCompiler::emit_JSOP_ENDITER()
}
bool
+BaselineCompiler::emit_JSOP_ISGENCLOSING()
+{
+ return emitIsMagicValue();
+}
+
+bool
BaselineCompiler::emit_JSOP_GETRVAL()
{
frame.syncStack(0);
diff --git a/js/src/jit/BaselineCompiler.h b/js/src/jit/BaselineCompiler.h
index 9adf65c27..6b5bf009e 100644
--- a/js/src/jit/BaselineCompiler.h
+++ b/js/src/jit/BaselineCompiler.h
@@ -42,6 +42,7 @@ namespace jit {
_(JSOP_DUP2) \
_(JSOP_SWAP) \
_(JSOP_PICK) \
+ _(JSOP_UNPICK) \
_(JSOP_GOTO) \
_(JSOP_IFEQ) \
_(JSOP_IFNE) \
@@ -71,6 +72,7 @@ namespace jit {
_(JSOP_REGEXP) \
_(JSOP_LAMBDA) \
_(JSOP_LAMBDA_ARROW) \
+ _(JSOP_SETFUNNAME) \
_(JSOP_BITOR) \
_(JSOP_BITXOR) \
_(JSOP_BITAND) \
@@ -201,6 +203,7 @@ namespace jit {
_(JSOP_MOREITER) \
_(JSOP_ISNOITER) \
_(JSOP_ENDITER) \
+ _(JSOP_ISGENCLOSING) \
_(JSOP_GENERATOR) \
_(JSOP_INITIALYIELD) \
_(JSOP_YIELD) \
@@ -215,6 +218,7 @@ namespace jit {
_(JSOP_FUNCTIONTHIS) \
_(JSOP_GLOBALTHIS) \
_(JSOP_CHECKISOBJ) \
+ _(JSOP_CHECKISCALLABLE) \
_(JSOP_CHECKTHIS) \
_(JSOP_CHECKRETURN) \
_(JSOP_NEWTARGET) \
@@ -222,7 +226,7 @@ namespace jit {
_(JSOP_SPREADSUPERCALL) \
_(JSOP_THROWSETCONST) \
_(JSOP_THROWSETALIASEDCONST) \
- _(JSOP_THROWSETCALLEE) \
+ _(JSOP_THROWSETCALLEE) \
_(JSOP_INITHIDDENPROP_GETTER) \
_(JSOP_INITHIDDENPROP_SETTER) \
_(JSOP_INITHIDDENELEM) \
@@ -230,8 +234,9 @@ namespace jit {
_(JSOP_INITHIDDENELEM_SETTER) \
_(JSOP_CHECKOBJCOERCIBLE) \
_(JSOP_DEBUGCHECKSELFHOSTED) \
- _(JSOP_JUMPTARGET) \
- _(JSOP_IS_CONSTRUCTING)
+ _(JSOP_JUMPTARGET) \
+ _(JSOP_IS_CONSTRUCTING) \
+ _(JSOP_TRY_DESTRUCTURING_ITERCLOSE)
class BaselineCompiler : public BaselineCompilerSpecific
{
@@ -340,6 +345,8 @@ class BaselineCompiler : public BaselineCompilerSpecific
MOZ_MUST_USE bool emitThrowConstAssignment();
MOZ_MUST_USE bool emitUninitializedLexicalCheck(const ValueOperand& val);
+ MOZ_MUST_USE bool emitIsMagicValue();
+
MOZ_MUST_USE bool addPCMappingEntry(bool addIndexEntry);
MOZ_MUST_USE bool addYieldOffset();
diff --git a/js/src/jit/CodeGenerator.cpp b/js/src/jit/CodeGenerator.cpp
index ce97363be..7b2f8214b 100644
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -2548,6 +2548,20 @@ CodeGenerator::emitLambdaInit(Register output, Register envChain,
masm.storePtr(ImmGCPtr(info.fun->displayAtom()), Address(output, JSFunction::offsetOfAtom()));
}
+typedef bool (*SetFunNameFn)(JSContext*, HandleFunction, HandleValue, FunctionPrefixKind);
+static const VMFunction SetFunNameInfo =
+ FunctionInfo<SetFunNameFn>(js::SetFunctionNameIfNoOwnName, "SetFunName");
+
+void
+CodeGenerator::visitSetFunName(LSetFunName* lir)
+{
+ pushArg(Imm32(lir->mir()->prefixKind()));
+ pushArg(ToValue(lir, LSetFunName::NameValue));
+ pushArg(ToRegister(lir->fun()));
+
+ callVM(SetFunNameInfo, lir);
+}
+
void
CodeGenerator::visitOsiPoint(LOsiPoint* lir)
{
@@ -8512,8 +8526,8 @@ StoreUnboxedPointer(MacroAssembler& masm, T address, MIRType type, const LAlloca
masm.patchableCallPreBarrier(address, type);
if (value->isConstant()) {
Value v = value->toConstant()->toJSValue();
- if (v.isMarkable()) {
- masm.storePtr(ImmGCPtr(v.toMarkablePointer()), address);
+ if (v.isGCThing()) {
+ masm.storePtr(ImmGCPtr(v.toGCThing()), address);
} else {
MOZ_ASSERT(v.isNull());
masm.storePtr(ImmWord(0), address);
@@ -11312,25 +11326,35 @@ class OutOfLineIsCallable : public OutOfLineCodeBase<CodeGenerator>
}
};
+template <CodeGenerator::CallableOrConstructor mode>
void
-CodeGenerator::visitIsCallable(LIsCallable* ins)
+CodeGenerator::emitIsCallableOrConstructor(Register object, Register output, Label* failure)
{
- Register object = ToRegister(ins->object());
- Register output = ToRegister(ins->output());
-
- OutOfLineIsCallable* ool = new(alloc()) OutOfLineIsCallable(ins);
- addOutOfLineCode(ool, ins->mir());
-
Label notFunction, hasCOps, done;
masm.loadObjClass(object, output);
- // Just skim proxies off. Their notion of isCallable() is more complicated.
- masm.branchTestClassIsProxy(true, output, ool->entry());
+ // Just skim proxies off. Their notion of isCallable()/isConstructor() is
+ // more complicated.
+ masm.branchTestClassIsProxy(true, output, failure);
// An object is callable iff:
// is<JSFunction>() || (getClass()->cOps && getClass()->cOps->call).
+ // An object is constructor iff:
+ // ((is<JSFunction>() && as<JSFunction>().isConstructor) ||
+ // (getClass()->cOps && getClass()->cOps->construct)).
masm.branchPtr(Assembler::NotEqual, output, ImmPtr(&JSFunction::class_), &notFunction);
- masm.move32(Imm32(1), output);
+ if (mode == Callable) {
+ masm.move32(Imm32(1), output);
+ } else {
+ Label notConstructor;
+ masm.load16ZeroExtend(Address(object, JSFunction::offsetOfFlags()), output);
+ masm.and32(Imm32(JSFunction::CONSTRUCTOR), output);
+ masm.branchTest32(Assembler::Zero, output, output, &notConstructor);
+ masm.move32(Imm32(1), output);
+ masm.jump(&done);
+ masm.bind(&notConstructor);
+ masm.move32(Imm32(0), output);
+ }
masm.jump(&done);
masm.bind(&notFunction);
@@ -11341,10 +11365,26 @@ CodeGenerator::visitIsCallable(LIsCallable* ins)
masm.bind(&hasCOps);
masm.loadPtr(Address(output, offsetof(js::Class, cOps)), output);
- masm.cmpPtrSet(Assembler::NonZero, Address(output, offsetof(js::ClassOps, call)),
+ size_t opsOffset = mode == Callable
+ ? offsetof(js::ClassOps, call)
+ : offsetof(js::ClassOps, construct);
+ masm.cmpPtrSet(Assembler::NonZero, Address(output, opsOffset),
ImmPtr(nullptr), output);
masm.bind(&done);
+}
+
+void
+CodeGenerator::visitIsCallable(LIsCallable* ins)
+{
+ Register object = ToRegister(ins->object());
+ Register output = ToRegister(ins->output());
+
+ OutOfLineIsCallable* ool = new(alloc()) OutOfLineIsCallable(ins);
+ addOutOfLineCode(ool, ins->mir());
+
+ emitIsCallableOrConstructor<Callable>(object, output, ool->entry());
+
masm.bind(ool->rejoin());
}
@@ -11364,6 +11404,36 @@ CodeGenerator::visitOutOfLineIsCallable(OutOfLineIsCallable* ool)
masm.jump(ool->rejoin());
}
+typedef bool (*CheckIsCallableFn)(JSContext*, HandleValue, CheckIsCallableKind);
+static const VMFunction CheckIsCallableInfo =
+ FunctionInfo<CheckIsCallableFn>(CheckIsCallable, "CheckIsCallable");
+
+void
+CodeGenerator::visitCheckIsCallable(LCheckIsCallable* ins)
+{
+ ValueOperand checkValue = ToValue(ins, LCheckIsCallable::CheckValue);
+ Register temp = ToRegister(ins->temp());
+
+ // OOL code is used in the following 2 cases:
+ // * checkValue is not callable
+ // * checkValue is proxy and it's unknown whether it's callable or not
+ // CheckIsCallable checks if passed value is callable, regardless of the
+ // cases above. IsCallable operation is not observable and checking it
+ // again doesn't matter.
+ OutOfLineCode* ool = oolCallVM(CheckIsCallableInfo, ins,
+ ArgList(checkValue, Imm32(ins->mir()->checkKind())),
+ StoreNothing());
+
+ masm.branchTestObject(Assembler::NotEqual, checkValue, ool->entry());
+
+ Register object = masm.extractObject(checkValue, temp);
+ emitIsCallableOrConstructor<Callable>(object, temp, ool->entry());
+
+ masm.branchTest32(Assembler::Zero, temp, temp, ool->entry());
+
+ masm.bind(ool->rejoin());
+}
+
class OutOfLineIsConstructor : public OutOfLineCodeBase<CodeGenerator>
{
LIsConstructor* ins_;
@@ -11390,37 +11460,8 @@ CodeGenerator::visitIsConstructor(LIsConstructor* ins)
OutOfLineIsConstructor* ool = new(alloc()) OutOfLineIsConstructor(ins);
addOutOfLineCode(ool, ins->mir());
- Label notFunction, notConstructor, hasCOps, done;
- masm.loadObjClass(object, output);
-
- // Just skim proxies off. Their notion of isConstructor() is more complicated.
- masm.branchTestClassIsProxy(true, output, ool->entry());
-
- // An object is constructor iff
- // ((is<JSFunction>() && as<JSFunction>().isConstructor) ||
- // (getClass()->cOps && getClass()->cOps->construct)).
- masm.branchPtr(Assembler::NotEqual, output, ImmPtr(&JSFunction::class_), &notFunction);
- masm.load16ZeroExtend(Address(object, JSFunction::offsetOfFlags()), output);
- masm.and32(Imm32(JSFunction::CONSTRUCTOR), output);
- masm.branchTest32(Assembler::Zero, output, output, &notConstructor);
- masm.move32(Imm32(1), output);
- masm.jump(&done);
- masm.bind(&notConstructor);
- masm.move32(Imm32(0), output);
- masm.jump(&done);
-
- masm.bind(&notFunction);
- masm.branchPtr(Assembler::NonZero, Address(output, offsetof(js::Class, cOps)),
- ImmPtr(nullptr), &hasCOps);
- masm.move32(Imm32(0), output);
- masm.jump(&done);
-
- masm.bind(&hasCOps);
- masm.loadPtr(Address(output, offsetof(js::Class, cOps)), output);
- masm.cmpPtrSet(Assembler::NonZero, Address(output, offsetof(js::ClassOps, construct)),
- ImmPtr(nullptr), output);
+ emitIsCallableOrConstructor<Constructor>(object, output, ool->entry());
- masm.bind(&done);
masm.bind(ool->rejoin());
}
diff --git a/js/src/jit/CodeGenerator.h b/js/src/jit/CodeGenerator.h
index 8f4bcc813..d3126651b 100644
--- a/js/src/jit/CodeGenerator.h
+++ b/js/src/jit/CodeGenerator.h
@@ -134,6 +134,7 @@ class CodeGenerator final : public CodeGeneratorSpecific
void visitOutOfLineLambdaArrow(OutOfLineLambdaArrow* ool);
void visitLambdaArrow(LLambdaArrow* lir);
void visitLambdaForSingleton(LLambdaForSingleton* lir);
+ void visitSetFunName(LSetFunName* lir);
void visitPointer(LPointer* lir);
void visitKeepAliveObject(LKeepAliveObject* lir);
void visitSlots(LSlots* lir);
@@ -363,6 +364,12 @@ class CodeGenerator final : public CodeGeneratorSpecific
void visitCallDOMNative(LCallDOMNative* lir);
void visitCallGetIntrinsicValue(LCallGetIntrinsicValue* lir);
void visitCallBindVar(LCallBindVar* lir);
+ enum CallableOrConstructor {
+ Callable,
+ Constructor
+ };
+ template <CallableOrConstructor mode>
+ void emitIsCallableOrConstructor(Register object, Register output, Label* failure);
void visitIsCallable(LIsCallable* lir);
void visitOutOfLineIsCallable(OutOfLineIsCallable* ool);
void visitIsConstructor(LIsConstructor* lir);
@@ -383,6 +390,7 @@ class CodeGenerator final : public CodeGeneratorSpecific
void visitArrowNewTarget(LArrowNewTarget* ins);
void visitCheckReturn(LCheckReturn* ins);
void visitCheckIsObj(LCheckIsObj* ins);
+ void visitCheckIsCallable(LCheckIsCallable* ins);
void visitCheckObjCoercible(LCheckObjCoercible* ins);
void visitDebugCheckSelfHosted(LDebugCheckSelfHosted* ins);
void visitNaNToZero(LNaNToZero* ins);
diff --git a/js/src/jit/IonBuilder.cpp b/js/src/jit/IonBuilder.cpp
index 1488d7d34..54d05cac4 100644
--- a/js/src/jit/IonBuilder.cpp
+++ b/js/src/jit/IonBuilder.cpp
@@ -958,30 +958,35 @@ IonBuilder::build()
bool
IonBuilder::processIterators()
{
- // Find phis that must directly hold an iterator live.
- Vector<MPhi*, 0, SystemAllocPolicy> worklist;
+ // Find and mark phis that must transitively hold an iterator live.
+
+ Vector<MDefinition*, 8, SystemAllocPolicy> worklist;
+
for (size_t i = 0; i < iterators_.length(); i++) {
- MInstruction* ins = iterators_[i];
- for (MUseDefIterator iter(ins); iter; iter++) {
- if (iter.def()->isPhi()) {
- if (!worklist.append(iter.def()->toPhi()))
- return false;
- }
+ MDefinition* iter = iterators_[i];
+ if (!iter->isInWorklist()) {
+ if (!worklist.append(iter))
+ return false;
+ iter->setInWorklist();
}
}
- // Propagate the iterator and live status of phis to all other connected
- // phis.
while (!worklist.empty()) {
- MPhi* phi = worklist.popCopy();
- phi->setIterator();
- phi->setImplicitlyUsedUnchecked();
-
- for (MUseDefIterator iter(phi); iter; iter++) {
- if (iter.def()->isPhi()) {
- MPhi* other = iter.def()->toPhi();
- if (!other->isIterator() && !worklist.append(other))
+ MDefinition* def = worklist.popCopy();
+ def->setNotInWorklist();
+
+ if (def->isPhi()) {
+ MPhi* phi = def->toPhi();
+ phi->setIterator();
+ phi->setImplicitlyUsedUnchecked();
+ }
+
+ for (MUseDefIterator iter(def); iter; iter++) {
+ MDefinition* use = iter.def();
+ if (!use->isInWorklist() && (!use->isPhi() || !use->toPhi()->isIterator())) {
+ if (!worklist.append(use))
return false;
+ use->setInWorklist();
}
}
}
@@ -1563,6 +1568,7 @@ IonBuilder::traverseBytecode()
case JSOP_DUP:
case JSOP_DUP2:
case JSOP_PICK:
+ case JSOP_UNPICK:
case JSOP_SWAP:
case JSOP_SETARG:
case JSOP_SETLOCAL:
@@ -1672,6 +1678,7 @@ IonBuilder::inspectOpcode(JSOp op)
switch (op) {
case JSOP_NOP:
case JSOP_NOP_DESTRUCTURING:
+ case JSOP_TRY_DESTRUCTURING_ITERCLOSE:
case JSOP_LINENO:
case JSOP_LOOPENTRY:
case JSOP_JUMPTARGET:
@@ -1935,6 +1942,10 @@ IonBuilder::inspectOpcode(JSOp op)
case JSOP_CALLITER:
case JSOP_NEW:
case JSOP_SUPERCALL:
+ if (op == JSOP_CALLITER) {
+ if (!outermostBuilder()->iterators_.append(current->peek(-1)))
+ return false;
+ }
return jsop_call(GET_ARGC(pc), (JSOp)*pc == JSOP_NEW || (JSOp)*pc == JSOP_SUPERCALL);
case JSOP_EVAL:
@@ -2017,6 +2028,10 @@ IonBuilder::inspectOpcode(JSOp op)
current->pick(-GET_INT8(pc));
return true;
+ case JSOP_UNPICK:
+ current->unpick(-GET_INT8(pc));
+ return true;
+
case JSOP_GETALIASEDVAR:
return jsop_getaliasedvar(EnvironmentCoordinate(pc));
@@ -2122,6 +2137,9 @@ IonBuilder::inspectOpcode(JSOp op)
case JSOP_LAMBDA_ARROW:
return jsop_lambda_arrow(info().getFunction(pc));
+ case JSOP_SETFUNNAME:
+ return jsop_setfunname(GET_UINT8(pc));
+
case JSOP_ITER:
return jsop_iter(GET_INT8(pc));
@@ -2166,6 +2184,9 @@ IonBuilder::inspectOpcode(JSOp op)
case JSOP_CHECKISOBJ:
return jsop_checkisobj(GET_UINT8(pc));
+ case JSOP_CHECKISCALLABLE:
+ return jsop_checkiscallable(GET_UINT8(pc));
+
case JSOP_CHECKOBJCOERCIBLE:
return jsop_checkobjcoercible();
@@ -10883,6 +10904,15 @@ IonBuilder::jsop_checkisobj(uint8_t kind)
}
bool
+IonBuilder::jsop_checkiscallable(uint8_t kind)
+{
+ MCheckIsCallable* check = MCheckIsCallable::New(alloc(), current->pop(), kind);
+ current->add(check);
+ current->push(check);
+ return true;
+}
+
+bool
IonBuilder::jsop_checkobjcoercible()
{
MDefinition* toCheck = current->peek(-1);
@@ -13340,6 +13370,21 @@ IonBuilder::jsop_lambda_arrow(JSFunction* fun)
}
bool
+IonBuilder::jsop_setfunname(uint8_t prefixKind)
+{
+ MDefinition* name = current->pop();
+ MDefinition* fun = current->pop();
+ MOZ_ASSERT(fun->type() == MIRType::Object);
+
+ MSetFunName* ins = MSetFunName::New(alloc(), fun, name, prefixKind);
+
+ current->add(ins);
+ current->push(fun);
+
+ return resumeAfter(ins);
+}
+
+bool
IonBuilder::jsop_setarg(uint32_t arg)
{
// To handle this case, we should spill the arguments to the space where
diff --git a/js/src/jit/IonBuilder.h b/js/src/jit/IonBuilder.h
index 38647a88f..35ad120f7 100644
--- a/js/src/jit/IonBuilder.h
+++ b/js/src/jit/IonBuilder.h
@@ -766,6 +766,7 @@ class IonBuilder
MOZ_MUST_USE bool jsop_object(JSObject* obj);
MOZ_MUST_USE bool jsop_lambda(JSFunction* fun);
MOZ_MUST_USE bool jsop_lambda_arrow(JSFunction* fun);
+ MOZ_MUST_USE bool jsop_setfunname(uint8_t prefixKind);
MOZ_MUST_USE bool jsop_functionthis();
MOZ_MUST_USE bool jsop_globalthis();
MOZ_MUST_USE bool jsop_typeof();
@@ -782,6 +783,7 @@ class IonBuilder
MOZ_MUST_USE bool jsop_debugger();
MOZ_MUST_USE bool jsop_newtarget();
MOZ_MUST_USE bool jsop_checkisobj(uint8_t kind);
+ MOZ_MUST_USE bool jsop_checkiscallable(uint8_t kind);
MOZ_MUST_USE bool jsop_checkobjcoercible();
MOZ_MUST_USE bool jsop_pushcallobj();
@@ -1241,7 +1243,7 @@ class IonBuilder
Vector<ControlFlowInfo, 4, JitAllocPolicy> loops_;
Vector<ControlFlowInfo, 0, JitAllocPolicy> switches_;
Vector<ControlFlowInfo, 2, JitAllocPolicy> labels_;
- Vector<MInstruction*, 2, JitAllocPolicy> iterators_;
+ Vector<MDefinition*, 2, JitAllocPolicy> iterators_;
Vector<LoopHeader, 0, JitAllocPolicy> loopHeaders_;
BaselineInspector* inspector;
diff --git a/js/src/jit/JitFrames.cpp b/js/src/jit/JitFrames.cpp
index 646442b4c..f11f17225 100644
--- a/js/src/jit/JitFrames.cpp
+++ b/js/src/jit/JitFrames.cpp
@@ -328,23 +328,46 @@ NumArgAndLocalSlots(const InlineFrameIterator& frame)
}
static void
-CloseLiveIteratorIon(JSContext* cx, const InlineFrameIterator& frame, uint32_t stackSlot)
+CloseLiveIteratorIon(JSContext* cx, const InlineFrameIterator& frame, JSTryNote* tn)
{
+ MOZ_ASSERT(tn->kind == JSTRY_FOR_IN ||
+ tn->kind == JSTRY_DESTRUCTURING_ITERCLOSE);
+
+ bool isDestructuring = tn->kind == JSTRY_DESTRUCTURING_ITERCLOSE;
+ MOZ_ASSERT_IF(!isDestructuring, tn->stackDepth > 0);
+ MOZ_ASSERT_IF(isDestructuring, tn->stackDepth > 1);
+
SnapshotIterator si = frame.snapshotIterator();
- // Skip stack slots until we reach the iterator object.
- uint32_t skipSlots = NumArgAndLocalSlots(frame) + stackSlot - 1;
+ // Skip stack slots until we reach the iterator object on the stack. For
+ // the destructuring case, we also need to get the "done" value.
+ uint32_t stackSlot = tn->stackDepth;
+ uint32_t adjust = isDestructuring ? 2 : 1;
+ uint32_t skipSlots = NumArgAndLocalSlots(frame) + stackSlot - adjust;
for (unsigned i = 0; i < skipSlots; i++)
si.skip();
Value v = si.read();
- RootedObject obj(cx, &v.toObject());
+ RootedObject iterObject(cx, &v.toObject());
+
+ if (isDestructuring) {
+ RootedValue doneValue(cx, si.read());
+ bool done = ToBoolean(doneValue);
+ // Do not call IteratorClose if the destructuring iterator is already
+ // done.
+ if (done)
+ return;
+ }
- if (cx->isExceptionPending())
- UnwindIteratorForException(cx, obj);
- else
- UnwindIteratorForUncatchableException(cx, obj);
+ if (cx->isExceptionPending()) {
+ if (tn->kind == JSTRY_FOR_IN)
+ UnwindIteratorForException(cx, iterObject);
+ else
+ IteratorCloseForException(cx, iterObject);
+ } else {
+ UnwindIteratorForUncatchableException(cx, iterObject);
+ }
}
class IonFrameStackDepthOp
@@ -413,25 +436,36 @@ HandleExceptionIon(JSContext* cx, const InlineFrameIterator& frame, ResumeFromEx
if (!script->hasTrynotes())
return;
+ bool inForOfIterClose = false;
+
for (TryNoteIterIon tni(cx, frame); !tni.done(); ++tni) {
JSTryNote* tn = *tni;
switch (tn->kind) {
- case JSTRY_FOR_IN: {
- MOZ_ASSERT(JSOp(*(script->main() + tn->start + tn->length)) == JSOP_ENDITER);
- MOZ_ASSERT(tn->stackDepth > 0);
+ case JSTRY_FOR_IN:
+ case JSTRY_DESTRUCTURING_ITERCLOSE:
+ MOZ_ASSERT_IF(tn->kind == JSTRY_FOR_IN,
+ JSOp(*(script->main() + tn->start + tn->length)) == JSOP_ENDITER);
+ CloseLiveIteratorIon(cx, frame, tn);
+ break;
- uint32_t localSlot = tn->stackDepth;
- CloseLiveIteratorIon(cx, frame, localSlot);
+ case JSTRY_FOR_OF_ITERCLOSE:
+ inForOfIterClose = true;
break;
- }
case JSTRY_FOR_OF:
+ inForOfIterClose = false;
+ break;
+
case JSTRY_LOOP:
break;
case JSTRY_CATCH:
if (cx->isExceptionPending()) {
+ // See corresponding comment in ProcessTryNotes.
+ if (inForOfIterClose)
+ break;
+
// Ion can compile try-catch, but bailing out to catch
// exceptions is slow. Reset the warm-up counter so that if we
// catch many exceptions we won't Ion-compile the script.
@@ -562,6 +596,7 @@ ProcessTryNotesBaseline(JSContext* cx, const JitFrameIterator& frame, Environmen
ResumeFromException* rfe, jsbytecode** pc)
{
RootedScript script(cx, frame.baselineFrame()->script());
+ bool inForOfIterClose = false;
for (TryNoteIterBaseline tni(cx, frame.baselineFrame(), *pc); !tni.done(); ++tni) {
JSTryNote* tn = *tni;
@@ -572,7 +607,11 @@ ProcessTryNotesBaseline(JSContext* cx, const JitFrameIterator& frame, Environmen
// If we're closing a legacy generator, we have to skip catch
// blocks.
if (cx->isClosingGenerator())
- continue;
+ break;
+
+ // See corresponding comment in ProcessTryNotes.
+ if (inForOfIterClose)
+ break;
SettleOnTryNote(cx, tn, frame, ei, rfe, pc);
@@ -588,6 +627,10 @@ ProcessTryNotesBaseline(JSContext* cx, const JitFrameIterator& frame, Environmen
}
case JSTRY_FINALLY: {
+ // See corresponding comment in ProcessTryNotes.
+ if (inForOfIterClose)
+ break;
+
SettleOnTryNote(cx, tn, frame, ei, rfe, pc);
rfe->kind = ResumeFromException::RESUME_FINALLY;
rfe->target = script->baselineScript()->nativeCodeForPC(script, *pc);
@@ -602,7 +645,7 @@ ProcessTryNotesBaseline(JSContext* cx, const JitFrameIterator& frame, Environmen
uint8_t* framePointer;
uint8_t* stackPointer;
BaselineFrameAndStackPointersFromTryNote(tn, frame, &framePointer, &stackPointer);
- Value iterValue(*(Value*) stackPointer);
+ Value iterValue(*reinterpret_cast<Value*>(stackPointer));
RootedObject iterObject(cx, &iterValue.toObject());
if (!UnwindIteratorForException(cx, iterObject)) {
// See comment in the JSTRY_FOR_IN case in Interpreter.cpp's
@@ -614,7 +657,31 @@ ProcessTryNotesBaseline(JSContext* cx, const JitFrameIterator& frame, Environmen
break;
}
+ case JSTRY_DESTRUCTURING_ITERCLOSE: {
+ uint8_t* framePointer;
+ uint8_t* stackPointer;
+ BaselineFrameAndStackPointersFromTryNote(tn, frame, &framePointer, &stackPointer);
+ RootedValue doneValue(cx, *(reinterpret_cast<Value*>(stackPointer)));
+ bool done = ToBoolean(doneValue);
+ if (!done) {
+ Value iterValue(*(reinterpret_cast<Value*>(stackPointer) + 1));
+ RootedObject iterObject(cx, &iterValue.toObject());
+ if (!IteratorCloseForException(cx, iterObject)) {
+ SettleOnTryNote(cx, tn, frame, ei, rfe, pc);
+ return false;
+ }
+ }
+ break;
+ }
+
+ case JSTRY_FOR_OF_ITERCLOSE:
+ inForOfIterClose = true;
+ break;
+
case JSTRY_FOR_OF:
+ inForOfIterClose = false;
+ break;
+
case JSTRY_LOOP:
break;
@@ -1995,7 +2062,7 @@ SnapshotIterator::traceAllocation(JSTracer* trc)
return;
Value v = allocationValue(alloc, RM_AlwaysDefault);
- if (!v.isMarkable())
+ if (!v.isGCThing())
return;
Value copy = v;
diff --git a/js/src/jit/Lowering.cpp b/js/src/jit/Lowering.cpp
index 13e50820e..730697163 100644
--- a/js/src/jit/Lowering.cpp
+++ b/js/src/jit/Lowering.cpp
@@ -2460,6 +2460,18 @@ LIRGenerator::visitLambdaArrow(MLambdaArrow* ins)
}
void
+LIRGenerator::visitSetFunName(MSetFunName* ins)
+{
+ MOZ_ASSERT(ins->fun()->type() == MIRType::Object);
+ MOZ_ASSERT(ins->name()->type() == MIRType::Value);
+
+ LSetFunName* lir = new(alloc()) LSetFunName(useRegisterAtStart(ins->fun()),
+ useBoxAtStart(ins->name()));
+ add(lir, ins);
+ assignSafepoint(lir, ins);
+}
+
+void
LIRGenerator::visitKeepAliveObject(MKeepAliveObject* ins)
{
MDefinition* obj = ins->object();
@@ -2675,7 +2687,7 @@ IsNonNurseryConstant(MDefinition* def)
if (!def->isConstant())
return false;
Value v = def->toConstant()->toJSValue();
- return !v.isMarkable() || !IsInsideNursery(v.toMarkablePointer());
+ return !v.isGCThing() || !IsInsideNursery(v.toGCThing());
}
void
@@ -4677,6 +4689,19 @@ LIRGenerator::visitCheckIsObj(MCheckIsObj* ins)
}
void
+LIRGenerator::visitCheckIsCallable(MCheckIsCallable* ins)
+{
+ MDefinition* checkVal = ins->checkValue();
+ MOZ_ASSERT(checkVal->type() == MIRType::Value);
+
+ LCheckIsCallable* lir = new(alloc()) LCheckIsCallable(useBox(checkVal),
+ temp());
+ redefine(ins, checkVal);
+ add(lir, ins);
+ assignSafepoint(lir, ins);
+}
+
+void
LIRGenerator::visitCheckObjCoercible(MCheckObjCoercible* ins)
{
MDefinition* checkVal = ins->checkValue();
diff --git a/js/src/jit/Lowering.h b/js/src/jit/Lowering.h
index 0f66a3c24..b2805cb7a 100644
--- a/js/src/jit/Lowering.h
+++ b/js/src/jit/Lowering.h
@@ -185,6 +185,7 @@ class LIRGenerator : public LIRGeneratorSpecific
void visitNullarySharedStub(MNullarySharedStub* ins);
void visitLambda(MLambda* ins);
void visitLambdaArrow(MLambdaArrow* ins);
+ void visitSetFunName(MSetFunName* ins);
void visitKeepAliveObject(MKeepAliveObject* ins);
void visitSlots(MSlots* ins);
void visitElements(MElements* ins);
@@ -328,6 +329,7 @@ class LIRGenerator : public LIRGeneratorSpecific
void visitGuardSharedTypedArray(MGuardSharedTypedArray* ins);
void visitCheckReturn(MCheckReturn* ins);
void visitCheckIsObj(MCheckIsObj* ins);
+ void visitCheckIsCallable(MCheckIsCallable* ins);
void visitCheckObjCoercible(MCheckObjCoercible* ins);
void visitDebugCheckSelfHosted(MDebugCheckSelfHosted* ins);
};
diff --git a/js/src/jit/MIR.h b/js/src/jit/MIR.h
index dcb08c317..2de91e2df 100644
--- a/js/src/jit/MIR.h
+++ b/js/src/jit/MIR.h
@@ -8464,6 +8464,34 @@ class MLambdaArrow
}
};
+class MSetFunName
+ : public MAryInstruction<2>,
+ public MixPolicy<ObjectPolicy<0>, BoxPolicy<1> >::Data
+{
+ uint8_t prefixKind_;
+
+ explicit MSetFunName(MDefinition* fun, MDefinition* name, uint8_t prefixKind)
+ : prefixKind_(prefixKind)
+ {
+ initOperand(0, fun);
+ initOperand(1, name);
+ setResultType(MIRType::None);
+ }
+
+ public:
+ INSTRUCTION_HEADER(SetFunName)
+ TRIVIAL_NEW_WRAPPERS
+ NAMED_OPERANDS((0, fun), (1, name))
+
+ uint8_t prefixKind() const {
+ return prefixKind_;
+ }
+
+ bool possiblyCalls() const override {
+ return true;
+ }
+};
+
// Returns obj->slots.
class MSlots
: public MUnaryInstruction,
@@ -13427,8 +13455,9 @@ class MCheckIsObj
{
uint8_t checkKind_;
- explicit MCheckIsObj(MDefinition* toCheck, uint8_t checkKind)
- : MUnaryInstruction(toCheck), checkKind_(checkKind)
+ MCheckIsObj(MDefinition* toCheck, uint8_t checkKind)
+ : MUnaryInstruction(toCheck),
+ checkKind_(checkKind)
{
setResultType(MIRType::Value);
setResultTypeSet(toCheck->resultTypeSet());
@@ -13447,6 +13476,33 @@ class MCheckIsObj
}
};
+class MCheckIsCallable
+ : public MUnaryInstruction,
+ public BoxInputsPolicy::Data
+{
+ uint8_t checkKind_;
+
+ MCheckIsCallable(MDefinition* toCheck, uint8_t checkKind)
+ : MUnaryInstruction(toCheck),
+ checkKind_(checkKind)
+ {
+ setResultType(MIRType::Value);
+ setResultTypeSet(toCheck->resultTypeSet());
+ setGuard();
+ }
+
+ public:
+ INSTRUCTION_HEADER(CheckIsCallable)
+ TRIVIAL_NEW_WRAPPERS
+ NAMED_OPERANDS((0, checkValue))
+
+ uint8_t checkKind() const { return checkKind_; }
+
+ AliasSet getAliasSet() const override {
+ return AliasSet::None();
+ }
+};
+
class MCheckObjCoercible
: public MUnaryInstruction,
public BoxInputsPolicy::Data
diff --git a/js/src/jit/MIRGraph.cpp b/js/src/jit/MIRGraph.cpp
index 3a363a5bf..d6e0fa8ff 100644
--- a/js/src/jit/MIRGraph.cpp
+++ b/js/src/jit/MIRGraph.cpp
@@ -790,6 +790,19 @@ MBasicBlock::pick(int32_t depth)
}
void
+MBasicBlock::unpick(int32_t depth)
+{
+ // unpick take the top of the stack element and move it under the depth-th
+ // element;
+ // unpick(-2):
+ // A B C D E
+ // A B C E D [ swapAt(-1) ]
+ // A B E C D [ swapAt(-2) ]
+ for (int32_t n = -1; n >= depth; n--)
+ swapAt(n);
+}
+
+void
MBasicBlock::swapAt(int32_t depth)
{
uint32_t lhsDepth = stackPosition_ + depth - 1;
diff --git a/js/src/jit/MIRGraph.h b/js/src/jit/MIRGraph.h
index b986218f4..705d70fa1 100644
--- a/js/src/jit/MIRGraph.h
+++ b/js/src/jit/MIRGraph.h
@@ -142,6 +142,9 @@ class MBasicBlock : public TempObject, public InlineListNode<MBasicBlock>
// Move the definition to the top of the stack.
void pick(int32_t depth);
+ // Move the top of the stack definition under the depth-th stack value.
+ void unpick(int32_t depth);
+
// Exchange 2 stack slots at the defined depth
void swapAt(int32_t depth);
diff --git a/js/src/jit/MOpcodes.h b/js/src/jit/MOpcodes.h
index 74594cb35..bb2ab8190 100644
--- a/js/src/jit/MOpcodes.h
+++ b/js/src/jit/MOpcodes.h
@@ -161,6 +161,7 @@ namespace jit {
_(StringReplace) \
_(Lambda) \
_(LambdaArrow) \
+ _(SetFunName) \
_(KeepAliveObject) \
_(Slots) \
_(Elements) \
@@ -284,6 +285,7 @@ namespace jit {
_(ArrowNewTarget) \
_(CheckReturn) \
_(CheckIsObj) \
+ _(CheckIsCallable) \
_(CheckObjCoercible) \
_(DebugCheckSelfHosted) \
_(AsmJSNeg) \
diff --git a/js/src/jit/VMFunctions.cpp b/js/src/jit/VMFunctions.cpp
index 4edbc3c83..77b9e3647 100644
--- a/js/src/jit/VMFunctions.cpp
+++ b/js/src/jit/VMFunctions.cpp
@@ -1349,5 +1349,14 @@ BaselineGetFunctionThis(JSContext* cx, BaselineFrame* frame, MutableHandleValue
return GetFunctionThis(cx, frame, res);
}
+bool
+CheckIsCallable(JSContext* cx, HandleValue v, CheckIsCallableKind kind)
+{
+ if (!IsCallable(v))
+ return ThrowCheckIsCallable(cx, kind);
+
+ return true;
+}
+
} // namespace jit
} // namespace js
diff --git a/js/src/jit/VMFunctions.h b/js/src/jit/VMFunctions.h
index f754d58c7..572f05373 100644
--- a/js/src/jit/VMFunctions.h
+++ b/js/src/jit/VMFunctions.h
@@ -13,6 +13,7 @@
#include "jit/CompileInfo.h"
#include "jit/JitFrames.h"
+#include "vm/Interpreter.h"
namespace js {
@@ -802,6 +803,9 @@ ThrowObjectCoercible(JSContext* cx, HandleValue v);
MOZ_MUST_USE bool
BaselineGetFunctionThis(JSContext* cx, BaselineFrame* frame, MutableHandleValue res);
+MOZ_MUST_USE bool
+CheckIsCallable(JSContext* cx, HandleValue v, CheckIsCallableKind kind);
+
} // namespace jit
} // namespace js
diff --git a/js/src/jit/arm/MacroAssembler-arm.cpp b/js/src/jit/arm/MacroAssembler-arm.cpp
index c6e627db6..d40578514 100644
--- a/js/src/jit/arm/MacroAssembler-arm.cpp
+++ b/js/src/jit/arm/MacroAssembler-arm.cpp
@@ -3286,8 +3286,8 @@ void
MacroAssemblerARMCompat::moveValue(const Value& val, Register type, Register data)
{
ma_mov(Imm32(val.toNunboxTag()), type);
- if (val.isMarkable())
- ma_mov(ImmGCPtr(val.toMarkablePointer()), data);
+ if (val.isGCThing())
+ ma_mov(ImmGCPtr(val.toGCThing()), data);
else
ma_mov(Imm32(val.toNunboxPayload()), data);
}
@@ -3484,8 +3484,8 @@ MacroAssemblerARMCompat::storePayload(const Value& val, const BaseIndex& dest)
ScratchRegisterScope scratch(asMasm());
SecondScratchRegisterScope scratch2(asMasm());
- if (val.isMarkable())
- ma_mov(ImmGCPtr(val.toMarkablePointer()), scratch);
+ if (val.isGCThing())
+ ma_mov(ImmGCPtr(val.toGCThing()), scratch);
else
ma_mov(Imm32(val.toNunboxPayload()), scratch);
@@ -5314,8 +5314,8 @@ MacroAssembler::branchTestValue(Condition cond, const ValueOperand& lhs,
// equal, short circuit false (NotEqual).
ScratchRegisterScope scratch(*this);
- if (rhs.isMarkable())
- ma_cmp(lhs.payloadReg(), ImmGCPtr(rhs.toMarkablePointer()), scratch);
+ if (rhs.isGCThing())
+ ma_cmp(lhs.payloadReg(), ImmGCPtr(rhs.toGCThing()), scratch);
else
ma_cmp(lhs.payloadReg(), Imm32(rhs.toNunboxPayload()), scratch);
ma_cmp(lhs.typeReg(), Imm32(rhs.toNunboxTag()), scratch, Equal);
diff --git a/js/src/jit/arm/MacroAssembler-arm.h b/js/src/jit/arm/MacroAssembler-arm.h
index c011af3c3..c20a6c3e5 100644
--- a/js/src/jit/arm/MacroAssembler-arm.h
+++ b/js/src/jit/arm/MacroAssembler-arm.h
@@ -915,8 +915,8 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
ma_mov(Imm32(val.toNunboxTag()), scratch);
ma_str(scratch, ToType(dest), scratch2);
- if (val.isMarkable())
- ma_mov(ImmGCPtr(val.toMarkablePointer()), scratch);
+ if (val.isGCThing())
+ ma_mov(ImmGCPtr(val.toGCThing()), scratch);
else
ma_mov(Imm32(val.toNunboxPayload()), scratch);
ma_str(scratch, ToPayload(dest), scratch2);
@@ -944,15 +944,15 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
// Store the payload, marking if necessary.
if (payloadoffset < 4096 && payloadoffset > -4096) {
- if (val.isMarkable())
- ma_mov(ImmGCPtr(val.toMarkablePointer()), scratch2);
+ if (val.isGCThing())
+ ma_mov(ImmGCPtr(val.toGCThing()), scratch2);
else
ma_mov(Imm32(val.toNunboxPayload()), scratch2);
ma_str(scratch2, DTRAddr(scratch, DtrOffImm(payloadoffset)));
} else {
ma_add(Imm32(payloadoffset), scratch, scratch2);
- if (val.isMarkable())
- ma_mov(ImmGCPtr(val.toMarkablePointer()), scratch2);
+ if (val.isGCThing())
+ ma_mov(ImmGCPtr(val.toGCThing()), scratch2);
else
ma_mov(Imm32(val.toNunboxPayload()), scratch2);
ma_str(scratch2, DTRAddr(scratch, DtrOffImm(0)));
@@ -977,8 +977,8 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
void popValue(ValueOperand val);
void pushValue(const Value& val) {
push(Imm32(val.toNunboxTag()));
- if (val.isMarkable())
- push(ImmGCPtr(val.toMarkablePointer()));
+ if (val.isGCThing())
+ push(ImmGCPtr(val.toGCThing()));
else
push(Imm32(val.toNunboxPayload()));
}
diff --git a/js/src/jit/arm64/MacroAssembler-arm64.h b/js/src/jit/arm64/MacroAssembler-arm64.h
index b95831443..c21e2fd66 100644
--- a/js/src/jit/arm64/MacroAssembler-arm64.h
+++ b/js/src/jit/arm64/MacroAssembler-arm64.h
@@ -306,7 +306,7 @@ class MacroAssemblerCompat : public vixl::MacroAssembler
void pushValue(const Value& val) {
vixl::UseScratchRegisterScope temps(this);
const Register scratch = temps.AcquireX().asUnsized();
- if (val.isMarkable()) {
+ if (val.isGCThing()) {
BufferOffset load = movePatchablePtr(ImmPtr(val.bitsAsPunboxPointer()), scratch);
writeDataRelocation(val, load);
push(scratch);
@@ -349,7 +349,7 @@ class MacroAssemblerCompat : public vixl::MacroAssembler
}
}
void moveValue(const Value& val, Register dest) {
- if (val.isMarkable()) {
+ if (val.isGCThing()) {
BufferOffset load = movePatchablePtr(ImmPtr(val.bitsAsPunboxPointer()), dest);
writeDataRelocation(val, load);
} else {
@@ -1835,8 +1835,8 @@ class MacroAssemblerCompat : public vixl::MacroAssembler
dataRelocations_.writeUnsigned(load.getOffset());
}
void writeDataRelocation(const Value& val, BufferOffset load) {
- if (val.isMarkable()) {
- gc::Cell* cell = val.toMarkablePointer();
+ if (val.isGCThing()) {
+ gc::Cell* cell = val.toGCThing();
if (cell && gc::IsInsideNursery(cell))
embedsNurseryPointers_ = true;
dataRelocations_.writeUnsigned(load.getOffset());
diff --git a/js/src/jit/mips32/MacroAssembler-mips32.cpp b/js/src/jit/mips32/MacroAssembler-mips32.cpp
index 0d3e55e21..2b2fab92d 100644
--- a/js/src/jit/mips32/MacroAssembler-mips32.cpp
+++ b/js/src/jit/mips32/MacroAssembler-mips32.cpp
@@ -1527,8 +1527,8 @@ MacroAssemblerMIPSCompat::getType(const Value& val)
void
MacroAssemblerMIPSCompat::moveData(const Value& val, Register data)
{
- if (val.isMarkable())
- ma_li(data, ImmGCPtr(val.toMarkablePointer()));
+ if (val.isGCThing())
+ ma_li(data, ImmGCPtr(val.toGCThing()));
else
ma_li(data, Imm32(val.toNunboxPayload()));
}
diff --git a/js/src/jit/mips32/MacroAssembler-mips32.h b/js/src/jit/mips32/MacroAssembler-mips32.h
index 4c7618d08..adb626bb0 100644
--- a/js/src/jit/mips32/MacroAssembler-mips32.h
+++ b/js/src/jit/mips32/MacroAssembler-mips32.h
@@ -480,8 +480,8 @@ class MacroAssemblerMIPSCompat : public MacroAssemblerMIPS
void popValue(ValueOperand val);
void pushValue(const Value& val) {
push(Imm32(val.toNunboxTag()));
- if (val.isMarkable())
- push(ImmGCPtr(val.toMarkablePointer()));
+ if (val.isGCThing())
+ push(ImmGCPtr(val.toGCThing()));
else
push(Imm32(val.toNunboxPayload()));
}
diff --git a/js/src/jit/mips64/MacroAssembler-mips64.cpp b/js/src/jit/mips64/MacroAssembler-mips64.cpp
index 329fa83f8..f58184bca 100644
--- a/js/src/jit/mips64/MacroAssembler-mips64.cpp
+++ b/js/src/jit/mips64/MacroAssembler-mips64.cpp
@@ -1885,7 +1885,7 @@ MacroAssemblerMIPS64Compat::storeValue(JSValueType type, Register reg, Address d
void
MacroAssemblerMIPS64Compat::storeValue(const Value& val, Address dest)
{
- if (val.isMarkable()) {
+ if (val.isGCThing()) {
writeDataRelocation(val);
movWithPatch(ImmWord(val.asRawBits()), SecondScratchReg);
} else {
diff --git a/js/src/jit/mips64/MacroAssembler-mips64.h b/js/src/jit/mips64/MacroAssembler-mips64.h
index 4cff87236..bfe452974 100644
--- a/js/src/jit/mips64/MacroAssembler-mips64.h
+++ b/js/src/jit/mips64/MacroAssembler-mips64.h
@@ -221,8 +221,8 @@ class MacroAssemblerMIPS64Compat : public MacroAssemblerMIPS64
}
void writeDataRelocation(const Value& val) {
- if (val.isMarkable()) {
- gc::Cell* cell = val.toMarkablePointer();
+ if (val.isGCThing()) {
+ gc::Cell* cell = val.toGCThing();
if (cell && gc::IsInsideNursery(cell))
embedsNurseryPointers_ = true;
dataRelocations_.writeUnsigned(currentOffset());
@@ -498,7 +498,7 @@ class MacroAssemblerMIPS64Compat : public MacroAssemblerMIPS64
void pushValue(ValueOperand val);
void popValue(ValueOperand val);
void pushValue(const Value& val) {
- if (val.isMarkable()) {
+ if (val.isGCThing()) {
writeDataRelocation(val);
movWithPatch(ImmWord(val.asRawBits()), ScratchRegister);
push(ScratchRegister);
diff --git a/js/src/jit/shared/LIR-shared.h b/js/src/jit/shared/LIR-shared.h
index a352f5d8a..9dcb527c5 100644
--- a/js/src/jit/shared/LIR-shared.h
+++ b/js/src/jit/shared/LIR-shared.h
@@ -4995,6 +4995,25 @@ class LLambdaArrow : public LInstructionHelper<1, 1 + BOX_PIECES, 0>
}
};
+class LSetFunName : public LCallInstructionHelper<1, 1 + BOX_PIECES, 0>
+{
+ public:
+ LIR_HEADER(SetFunName)
+
+ static const size_t NameValue = 1;
+
+ LSetFunName(const LAllocation& fun, const LBoxAllocation& name) {
+ setOperand(0, fun);
+ setBoxOperand(NameValue, name);
+ }
+ const LAllocation* fun() {
+ return getOperand(0);
+ }
+ const MSetFunName* mir() const {
+ return mir_->toSetFunName();
+ }
+};
+
class LKeepAliveObject : public LInstructionHelper<0, 1, 0>
{
public:
@@ -8874,6 +8893,27 @@ class LCheckIsObj : public LInstructionHelper<BOX_PIECES, BOX_PIECES, 0>
}
};
+class LCheckIsCallable : public LInstructionHelper<BOX_PIECES, BOX_PIECES, 1>
+{
+ public:
+ LIR_HEADER(CheckIsCallable)
+
+ static const size_t CheckValue = 0;
+
+ LCheckIsCallable(const LBoxAllocation& value, const LDefinition& temp) {
+ setBoxOperand(CheckValue, value);
+ setTemp(0, temp);
+ }
+
+ const LDefinition* temp() {
+ return getTemp(0);
+ }
+
+ MCheckIsCallable* mir() const {
+ return mir_->toCheckIsCallable();
+ }
+};
+
class LCheckObjCoercible : public LCallInstructionHelper<BOX_PIECES, BOX_PIECES, 0>
{
public:
diff --git a/js/src/jit/shared/LOpcodes-shared.h b/js/src/jit/shared/LOpcodes-shared.h
index bb04553a6..3eea1b449 100644
--- a/js/src/jit/shared/LOpcodes-shared.h
+++ b/js/src/jit/shared/LOpcodes-shared.h
@@ -242,6 +242,7 @@
_(Lambda) \
_(LambdaArrow) \
_(LambdaForSingleton) \
+ _(SetFunName) \
_(KeepAliveObject) \
_(Slots) \
_(Elements) \
@@ -401,6 +402,7 @@
_(ArrowNewTarget) \
_(CheckReturn) \
_(CheckIsObj) \
+ _(CheckIsCallable) \
_(CheckObjCoercible) \
_(DebugCheckSelfHosted) \
_(AsmJSLoadHeap) \
diff --git a/js/src/jit/x64/MacroAssembler-x64.h b/js/src/jit/x64/MacroAssembler-x64.h
index cb81bd7c1..be450767b 100644
--- a/js/src/jit/x64/MacroAssembler-x64.h
+++ b/js/src/jit/x64/MacroAssembler-x64.h
@@ -58,8 +58,8 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared
// X64 helpers.
/////////////////////////////////////////////////////////////////
void writeDataRelocation(const Value& val) {
- if (val.isMarkable()) {
- gc::Cell* cell = val.toMarkablePointer();
+ if (val.isGCThing()) {
+ gc::Cell* cell = val.toGCThing();
if (cell && gc::IsInsideNursery(cell))
embedsNurseryPointers_ = true;
dataRelocations_.writeUnsigned(masm.currentOffset());
@@ -132,7 +132,7 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared
template <typename T>
void storeValue(const Value& val, const T& dest) {
ScratchRegisterScope scratch(asMasm());
- if (val.isMarkable()) {
+ if (val.isGCThing()) {
movWithPatch(ImmWord(val.asRawBits()), scratch);
writeDataRelocation(val);
} else {
@@ -171,7 +171,7 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared
pop(val.valueReg());
}
void pushValue(const Value& val) {
- if (val.isMarkable()) {
+ if (val.isGCThing()) {
ScratchRegisterScope scratch(asMasm());
movWithPatch(ImmWord(val.asRawBits()), scratch);
writeDataRelocation(val);
diff --git a/js/src/jit/x86/MacroAssembler-x86.cpp b/js/src/jit/x86/MacroAssembler-x86.cpp
index 754b29c2d..dc97b5b5b 100644
--- a/js/src/jit/x86/MacroAssembler-x86.cpp
+++ b/js/src/jit/x86/MacroAssembler-x86.cpp
@@ -499,8 +499,8 @@ MacroAssembler::branchTestValue(Condition cond, const ValueOperand& lhs,
const Value& rhs, Label* label)
{
MOZ_ASSERT(cond == Equal || cond == NotEqual);
- if (rhs.isMarkable())
- cmpPtr(lhs.payloadReg(), ImmGCPtr(rhs.toMarkablePointer()));
+ if (rhs.isGCThing())
+ cmpPtr(lhs.payloadReg(), ImmGCPtr(rhs.toGCThing()));
else
cmpPtr(lhs.payloadReg(), ImmWord(rhs.toNunboxPayload()));
diff --git a/js/src/jit/x86/MacroAssembler-x86.h b/js/src/jit/x86/MacroAssembler-x86.h
index 21cd63a0c..2b2507c77 100644
--- a/js/src/jit/x86/MacroAssembler-x86.h
+++ b/js/src/jit/x86/MacroAssembler-x86.h
@@ -94,8 +94,8 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared
}
void moveValue(const Value& val, Register type, Register data) {
movl(Imm32(val.toNunboxTag()), type);
- if (val.isMarkable())
- movl(ImmGCPtr(val.toMarkablePointer()), data);
+ if (val.isGCThing())
+ movl(ImmGCPtr(val.toGCThing()), data);
else
movl(Imm32(val.toNunboxPayload()), data);
}
@@ -213,8 +213,8 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared
}
void pushValue(const Value& val) {
push(Imm32(val.toNunboxTag()));
- if (val.isMarkable())
- push(ImmGCPtr(val.toMarkablePointer()));
+ if (val.isGCThing())
+ push(ImmGCPtr(val.toGCThing()));
else
push(Imm32(val.toNunboxPayload()));
}
@@ -235,8 +235,8 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared
pop(dest.high);
}
void storePayload(const Value& val, Operand dest) {
- if (val.isMarkable())
- movl(ImmGCPtr(val.toMarkablePointer()), ToPayload(dest));
+ if (val.isGCThing())
+ movl(ImmGCPtr(val.toGCThing()), ToPayload(dest));
else
movl(Imm32(val.toNunboxPayload()), ToPayload(dest));
}