summaryrefslogtreecommitdiffstats
path: root/js/src/jit/BaselineIC.cpp
diff options
context:
space:
mode:
authorwolfbeast <mcwerewolf@wolfbeast.com>2020-02-23 14:41:40 +0100
committerwolfbeast <mcwerewolf@wolfbeast.com>2020-02-23 14:41:40 +0100
commitdd57b9273c7c95a7cdabc94854c8dc63b0653f02 (patch)
tree545f5066b231b616f92b3702982d8276fed13f01 /js/src/jit/BaselineIC.cpp
parentbe16123ddcaf061afced444a6db7d5b55020b744 (diff)
downloadUXP-dd57b9273c7c95a7cdabc94854c8dc63b0653f02.tar
UXP-dd57b9273c7c95a7cdabc94854c8dc63b0653f02.tar.gz
UXP-dd57b9273c7c95a7cdabc94854c8dc63b0653f02.tar.lz
UXP-dd57b9273c7c95a7cdabc94854c8dc63b0653f02.tar.xz
UXP-dd57b9273c7c95a7cdabc94854c8dc63b0653f02.zip
Revert #1137 - Remove unboxed arrays
- accounting for removal of watch()/unwatch() - updated for intermediate code changes.
Diffstat (limited to 'js/src/jit/BaselineIC.cpp')
-rw-r--r--js/src/jit/BaselineIC.cpp213
1 files changed, 181 insertions, 32 deletions
diff --git a/js/src/jit/BaselineIC.cpp b/js/src/jit/BaselineIC.cpp
index 1b98325b7..9c8cd9835 100644
--- a/js/src/jit/BaselineIC.cpp
+++ b/js/src/jit/BaselineIC.cpp
@@ -1375,7 +1375,7 @@ IsNativeDenseElementAccess(HandleObject obj, HandleValue key)
static bool
IsNativeOrUnboxedDenseElementAccess(HandleObject obj, HandleValue key)
{
- if (!obj->isNative())
+ if (!obj->isNative() && !obj->is<UnboxedArrayObject>())
return false;
if (key.isInt32() && key.toInt32() >= 0 && !obj->is<TypedArrayObject>())
return true;
@@ -1479,6 +1479,20 @@ TryAttachGetElemStub(JSContext* cx, JSScript* script, jsbytecode* pc, ICGetElem_
script = rootedScript;
}
+ // Check for UnboxedArray[int] accesses.
+ if (obj->is<UnboxedArrayObject>() && rhs.isInt32() && rhs.toInt32() >= 0) {
+ JitSpew(JitSpew_BaselineIC, " Generating GetElem(UnboxedArray[Int32]) stub");
+ ICGetElem_UnboxedArray::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
+ obj->group());
+ ICStub* unboxedStub = compiler.getStub(compiler.getStubSpace(script));
+ if (!unboxedStub)
+ return false;
+
+ stub->addNewStub(unboxedStub);
+ *attached = true;
+ return true;
+ }
+
// Check for TypedArray[int] => Number and TypedObject[int] => Number accesses.
if ((obj->is<TypedArrayObject>() || IsPrimitiveArrayTypedObject(obj)) &&
rhs.isNumber() &&
@@ -2085,6 +2099,56 @@ ICGetElem_Dense::Compiler::generateStubCode(MacroAssembler& masm)
}
//
+// GetElem_UnboxedArray
+//
+
+bool
+ICGetElem_UnboxedArray::Compiler::generateStubCode(MacroAssembler& masm)
+{
+ MOZ_ASSERT(engine_ == Engine::Baseline);
+
+ Label failure;
+ masm.branchTestObject(Assembler::NotEqual, R0, &failure);
+ masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
+
+ AllocatableGeneralRegisterSet regs(availableGeneralRegs(2));
+ Register scratchReg = regs.takeAny();
+
+ // Unbox R0 and group guard.
+ Register obj = masm.extractObject(R0, ExtractTemp0);
+ masm.loadPtr(Address(ICStubReg, ICGetElem_UnboxedArray::offsetOfGroup()), scratchReg);
+ masm.branchTestObjGroup(Assembler::NotEqual, obj, scratchReg, &failure);
+
+ // Unbox key.
+ Register key = masm.extractInt32(R1, ExtractTemp1);
+
+ // Bounds check.
+ masm.load32(Address(obj, UnboxedArrayObject::offsetOfCapacityIndexAndInitializedLength()),
+ scratchReg);
+ masm.and32(Imm32(UnboxedArrayObject::InitializedLengthMask), scratchReg);
+ masm.branch32(Assembler::BelowOrEqual, scratchReg, key, &failure);
+
+ // Load obj->elements.
+ masm.loadPtr(Address(obj, UnboxedArrayObject::offsetOfElements()), scratchReg);
+
+ // Load value.
+ size_t width = UnboxedTypeSize(elementType_);
+ BaseIndex addr(scratchReg, key, ScaleFromElemWidth(width));
+ masm.loadUnboxedProperty(addr, elementType_, R0);
+
+ // Only monitor the result if its type might change.
+ if (elementType_ == JSVAL_TYPE_OBJECT)
+ EmitEnterTypeMonitorIC(masm);
+ else
+ EmitReturnFromIC(masm);
+
+ // Failure case - jump to next stub
+ masm.bind(&failure);
+ EmitStubGuardFailure(masm);
+ return true;
+}
+
+//
// GetElem_TypedArray
//
@@ -2388,8 +2452,8 @@ CanOptimizeDenseOrUnboxedArraySetElem(JSObject* obj, uint32_t index,
Shape* oldShape, uint32_t oldCapacity, uint32_t oldInitLength,
bool* isAddingCaseOut, size_t* protoDepthOut)
{
- uint32_t initLength = obj->as<NativeObject>().getDenseInitializedLength();
- uint32_t capacity = obj->as<NativeObject>().getDenseCapacity();
+ uint32_t initLength = GetAnyBoxedOrUnboxedInitializedLength(obj);
+ uint32_t capacity = GetAnyBoxedOrUnboxedCapacity(obj);
*isAddingCaseOut = false;
*protoDepthOut = 0;
@@ -2398,6 +2462,10 @@ CanOptimizeDenseOrUnboxedArraySetElem(JSObject* obj, uint32_t index,
if (initLength < oldInitLength || capacity < oldCapacity)
return false;
+ // Unboxed arrays need to be able to emit floating point code.
+ if (obj->is<UnboxedArrayObject>() && !obj->runtimeFromMainThread()->jitSupportsFloatingPoint)
+ return false;
+
Shape* shape = obj->maybeShape();
// Cannot optimize if the shape changed.
@@ -2479,8 +2547,8 @@ DoSetElemFallback(JSContext* cx, BaselineFrame* frame, ICSetElem_Fallback* stub_
uint32_t oldCapacity = 0;
uint32_t oldInitLength = 0;
if (index.isInt32() && index.toInt32() >= 0) {
- oldCapacity = obj->as<NativeObject>().getDenseCapacity();
- oldInitLength = obj->as<NativeObject>().getDenseInitializedLength();
+ oldCapacity = GetAnyBoxedOrUnboxedCapacity(obj);
+ oldInitLength = GetAnyBoxedOrUnboxedInitializedLength(obj);
}
if (op == JSOP_INITELEM || op == JSOP_INITHIDDENELEM) {
@@ -2818,6 +2886,29 @@ ICSetElem_DenseOrUnboxedArray::Compiler::generateStubCode(MacroAssembler& masm)
masm.loadValue(valueAddr, tmpVal);
EmitPreBarrier(masm, element, MIRType::Value);
masm.storeValue(tmpVal, element);
+ } else {
+ // Set element on an unboxed array.
+
+ // Bounds check.
+ Address initLength(obj, UnboxedArrayObject::offsetOfCapacityIndexAndInitializedLength());
+ masm.load32(initLength, scratchReg);
+ masm.and32(Imm32(UnboxedArrayObject::InitializedLengthMask), scratchReg);
+ masm.branch32(Assembler::BelowOrEqual, scratchReg, key, &failure);
+
+ // Load obj->elements.
+ masm.loadPtr(Address(obj, UnboxedArrayObject::offsetOfElements()), scratchReg);
+
+ // Compute the address being written to.
+ BaseIndex address(scratchReg, key, ScaleFromElemWidth(UnboxedTypeSize(unboxedType_)));
+
+ EmitUnboxedPreBarrierForBaseline(masm, address, unboxedType_);
+
+ Address valueAddr(masm.getStackPointer(), ICStackValueOffset + sizeof(Value));
+ masm.Push(R0);
+ masm.loadValue(valueAddr, R0);
+ masm.storeUnboxedProperty(address, unboxedType_,
+ ConstantOrRegister(TypedOrValueRegister(R0)), &failurePopR0);
+ masm.Pop(R0);
}
EmitReturnFromIC(masm);
@@ -3011,6 +3102,40 @@ ICSetElemDenseOrUnboxedArrayAddCompiler::generateStubCode(MacroAssembler& masm)
BaseIndex element(scratchReg, key, TimesEight);
masm.loadValue(valueAddr, tmpVal);
masm.storeValue(tmpVal, element);
+ } else {
+ // Adding element to an unboxed array.
+
+ // Bounds check (key == initLength)
+ Address initLengthAddr(obj, UnboxedArrayObject::offsetOfCapacityIndexAndInitializedLength());
+ masm.load32(initLengthAddr, scratchReg);
+ masm.and32(Imm32(UnboxedArrayObject::InitializedLengthMask), scratchReg);
+ masm.branch32(Assembler::NotEqual, scratchReg, key, &failure);
+
+ // Capacity check.
+ masm.checkUnboxedArrayCapacity(obj, RegisterOrInt32Constant(key), scratchReg, &failure);
+
+ // Load obj->elements.
+ masm.loadPtr(Address(obj, UnboxedArrayObject::offsetOfElements()), scratchReg);
+
+ // Write the value first, since this can fail. No need for pre-barrier
+ // since we're not overwriting an old value.
+ masm.Push(R0);
+ Address valueAddr(masm.getStackPointer(), ICStackValueOffset + sizeof(Value));
+ masm.loadValue(valueAddr, R0);
+ BaseIndex address(scratchReg, key, ScaleFromElemWidth(UnboxedTypeSize(unboxedType_)));
+ masm.storeUnboxedProperty(address, unboxedType_,
+ ConstantOrRegister(TypedOrValueRegister(R0)), &failurePopR0);
+ masm.Pop(R0);
+
+ // Increment initialized length.
+ masm.add32(Imm32(1), initLengthAddr);
+
+ // If length is now <= key, increment length.
+ Address lengthAddr(obj, UnboxedArrayObject::offsetOfLength());
+ Label skipIncrementLength;
+ masm.branch32(Assembler::Above, lengthAddr, key, &skipIncrementLength);
+ masm.add32(Imm32(1), lengthAddr);
+ masm.bind(&skipIncrementLength);
}
EmitReturnFromIC(masm);
@@ -5374,6 +5499,13 @@ GetTemplateObjectForSimd(JSContext* cx, JSFunction* target, MutableHandleObject
return true;
}
+static void
+EnsureArrayGroupAnalyzed(JSContext* cx, JSObject* obj)
+{
+ if (PreliminaryObjectArrayWithTemplate* objects = obj->group()->maybePreliminaryObjects())
+ objects->maybeAnalyze(cx, obj->group(), /* forceAnalyze = */ true);
+}
+
static bool
GetTemplateObjectForNative(JSContext* cx, HandleFunction target, const CallArgs& args,
MutableHandleObject res, bool* skipAttach)
@@ -5405,7 +5537,10 @@ GetTemplateObjectForNative(JSContext* cx, HandleFunction target, const CallArgs&
// With this and other array templates, analyze the group so that
// we don't end up with a template whose structure might change later.
res.set(NewFullyAllocatedArrayForCallingAllocationSite(cx, count, TenuredObject));
- return !!res;
+ if (!res)
+ return false;
+ EnsureArrayGroupAnalyzed(cx, res);
+ return true;
}
}
@@ -5430,7 +5565,10 @@ GetTemplateObjectForNative(JSContext* cx, HandleFunction target, const CallArgs&
return true;
}
res.set(NewFullyAllocatedArrayTryReuseGroup(cx, obj, 0, TenuredObject));
- return !!res;
+ if (!res)
+ return false;
+ EnsureArrayGroupAnalyzed(cx, res);
+ return true;
}
}
}
@@ -5447,7 +5585,10 @@ GetTemplateObjectForNative(JSContext* cx, HandleFunction target, const CallArgs&
}
res.set(NewFullyAllocatedArrayForCallingAllocationSite(cx, 0, TenuredObject));
- return !!res;
+ if (!res)
+ return false;
+ EnsureArrayGroupAnalyzed(cx, res);
+ return true;
}
if (native == StringConstructor) {
@@ -5760,24 +5901,15 @@ TryAttachCallStub(JSContext* cx, ICCall_Fallback* stub, HandleScript script, jsb
}
static bool
-CopyArray(JSContext* cx, HandleArrayObject arr, MutableHandleValue result)
+CopyArray(JSContext* cx, HandleObject obj, MutableHandleValue result)
{
- uint32_t length = arr->length();
- ArrayObject* nobj = NewFullyAllocatedArrayTryReuseGroup(cx, arr, length, TenuredObject);
+ uint32_t length = GetAnyBoxedOrUnboxedArrayLength(obj);
+ JSObject* nobj = NewFullyAllocatedArrayTryReuseGroup(cx, obj, length, TenuredObject);
if (!nobj)
return false;
-
- MOZ_ASSERT(arr->isNative());
- MOZ_ASSERT(nobj->isNative());
- MOZ_ASSERT(nobj->as<NativeObject>().getDenseInitializedLength() == 0);
- MOZ_ASSERT(arr->as<NativeObject>().getDenseInitializedLength() >= length);
- MOZ_ASSERT(nobj->as<NativeObject>().getDenseCapacity() >= length);
-
- nobj->as<NativeObject>().setDenseInitializedLength(length);
-
- const Value* vp = arr->as<NativeObject>().getDenseElements();
- nobj->as<NativeObject>().initDenseElements(0, vp, length);
-
+ EnsureArrayGroupAnalyzed(cx, nobj);
+ CopyAnyBoxedOrUnboxedDenseElements(cx, nobj, obj, 0, 0, length);
+
result.setObject(*nobj);
return true;
}
@@ -5808,22 +5940,26 @@ TryAttachStringSplit(JSContext* cx, ICCall_Fallback* stub, HandleScript script,
RootedValue arr(cx);
// Copy the array before storing in stub.
- if (!CopyArray(cx, obj.as<ArrayObject>(), &arr))
+ if (!CopyArray(cx, obj, &arr))
return false;
// Atomize all elements of the array.
- RootedArrayObject arrObj(cx, &arr.toObject().as<ArrayObject>());
- uint32_t initLength = arrObj->length();
+ RootedObject arrObj(cx, &arr.toObject());
+ uint32_t initLength = GetAnyBoxedOrUnboxedArrayLength(arrObj);
for (uint32_t i = 0; i < initLength; i++) {
- JSAtom* str = js::AtomizeString(cx, arrObj->getDenseElement(i).toString());
+ JSAtom* str = js::AtomizeString(cx, GetAnyBoxedOrUnboxedDenseElement(arrObj, i).toString());
if (!str)
return false;
- arrObj->setDenseElementWithType(cx, i, StringValue(str));
+ if (!SetAnyBoxedOrUnboxedDenseElement(cx, arrObj, i, StringValue(str))) {
+ // The value could not be stored to an unboxed dense element.
+ return true;
+ }
}
ICCall_StringSplit::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
- script->pcToOffset(pc), str, sep, arrObj);
+ script->pcToOffset(pc), str, sep,
+ arr);
ICStub* newStub = compiler.getStub(compiler.getStubSpace(script));
if (!newStub)
return false;
@@ -6711,7 +6847,7 @@ ICCallScriptedCompiler::generateStubCode(MacroAssembler& masm)
return true;
}
-typedef bool (*CopyArrayFn)(JSContext*, HandleArrayObject, MutableHandleValue);
+typedef bool (*CopyArrayFn)(JSContext*, HandleObject, MutableHandleValue);
static const VMFunction CopyArrayInfo = FunctionInfo<CopyArrayFn>(CopyArray, "CopyArray");
bool
@@ -8188,6 +8324,19 @@ ICGetElem_Dense::Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorSt
return New<ICGetElem_Dense>(cx, space, other.jitCode(), firstMonitorStub, other.shape_);
}
+ICGetElem_UnboxedArray::ICGetElem_UnboxedArray(JitCode* stubCode, ICStub* firstMonitorStub,
+ ObjectGroup *group)
+ : ICMonitoredStub(GetElem_UnboxedArray, stubCode, firstMonitorStub),
+ group_(group)
+{ }
+
+/* static */ ICGetElem_UnboxedArray*
+ICGetElem_UnboxedArray::Clone(JSContext* cx, ICStubSpace* space, ICStub* firstMonitorStub,
+ ICGetElem_UnboxedArray& other)
+{
+ return New<ICGetElem_UnboxedArray>(cx, space, other.jitCode(), firstMonitorStub, other.group_);
+}
+
ICGetElem_TypedArray::ICGetElem_TypedArray(JitCode* stubCode, Shape* shape, Scalar::Type type)
: ICStub(GetElem_TypedArray, stubCode),
shape_(shape)
@@ -8563,8 +8712,8 @@ static bool DoRestFallback(JSContext* cx, BaselineFrame* frame, ICRest_Fallback*
unsigned numRest = numActuals > numFormals ? numActuals - numFormals : 0;
Value* rest = frame->argv() + numFormals;
- ArrayObject* obj = ObjectGroup::newArrayObject(cx, rest, numRest, GenericObject,
- ObjectGroup::NewArrayKind::UnknownIndex);
+ JSObject* obj = ObjectGroup::newArrayObject(cx, rest, numRest, GenericObject,
+ ObjectGroup::NewArrayKind::UnknownIndex);
if (!obj)
return false;
res.setObject(*obj);