summaryrefslogtreecommitdiffstats
path: root/js/src/jit/mips-shared/Assembler-mips-shared.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'js/src/jit/mips-shared/Assembler-mips-shared.cpp')
-rw-r--r--js/src/jit/mips-shared/Assembler-mips-shared.cpp81
1 files changed, 81 insertions, 0 deletions
diff --git a/js/src/jit/mips-shared/Assembler-mips-shared.cpp b/js/src/jit/mips-shared/Assembler-mips-shared.cpp
index 8519bab4d..1b14ef0eb 100644
--- a/js/src/jit/mips-shared/Assembler-mips-shared.cpp
+++ b/js/src/jit/mips-shared/Assembler-mips-shared.cpp
@@ -1597,6 +1597,87 @@ AssemblerMIPSShared::bindLater(Label* label, wasm::TrapDesc target)
}
void
+AssemblerMIPSShared::bind(InstImm* inst, uintptr_t branch, uintptr_t target)
+{
+ intptr_t offset = target - branch;
+
+ // Generate the patchable mixed jump for call.
+ if (inst->extractOpcode() == ((uint32_t)op_jal >> OpcodeShift)) {
+ addMixedJump(BufferOffset(branch), ImmPtr((void*)target));
+ return;
+ }
+
+ // If encoded offset is 4, then the jump must be short
+ if (BOffImm16(inst[0]).decode() == 4) {
+ MOZ_ASSERT(BOffImm16::IsInRange(offset));
+ inst[0].setBOffImm16(BOffImm16(offset));
+ inst[1].makeNop();
+ return;
+ }
+
+ if (BOffImm16::IsInRange(offset)) {
+ inst[0].setBOffImm16(BOffImm16(offset));
+ inst[1].makeNop();
+ return;
+ }
+
+ addMixedJump(BufferOffset(branch), ImmPtr((void*)target));
+}
+
+void
+AssemblerMIPSShared::bind(RepatchLabel* label)
+{
+ BufferOffset dest = nextOffset();
+ if (label->used() && !oom()) {
+ // If the label has a use, then change this use to refer to
+ // the bound label;
+ BufferOffset b(label->offset());
+ InstImm* inst = (InstImm*)editSrc(b);
+ InstImm inst_beq = InstImm(op_beq, zero, zero, BOffImm16(0));
+ intptr_t offset = dest.getOffset() - label->offset();
+
+ // If first instruction is j, then this is a mixed jump.
+ // If second instruction is lui, then this is a loop backedge.
+ if (inst[0].extractOpcode() == (uint32_t(op_j) >> OpcodeShift)) {
+ // For unconditional mixed branches generated by jumpWithPatch
+ addMixedJump(b, ImmPtr((void*)dest.getOffset()), MixedJumpPatch::PATCHABLE);
+ } else if (inst[1].extractOpcode() == (uint32_t(op_lui) >> OpcodeShift) ||
+ BOffImm16::IsInRange(offset))
+ {
+ // Handle code produced by:
+ // backedgeJump
+ MOZ_ASSERT(BOffImm16::IsInRange(offset));
+ MOZ_ASSERT(inst[0].extractOpcode() == (uint32_t(op_beq) >> OpcodeShift) ||
+ inst[0].extractOpcode() == (uint32_t(op_bne) >> OpcodeShift) ||
+ inst[0].extractOpcode() == (uint32_t(op_blez) >> OpcodeShift) ||
+ inst[0].extractOpcode() == (uint32_t(op_bgtz) >> OpcodeShift));
+ inst[0].setBOffImm16(BOffImm16(offset));
+ } else if (inst[0].encode() == inst_beq.encode()) {
+ // Handle open mixed unconditional jumps created by
+ // MacroAssemblerMIPSShared::ma_b(..., wasm::Trap, ...).
+ // We need to add it to mixed jumps array here.
+ // See MacroAssemblerMIPS::branchWithCode().
+ MOZ_ASSERT(inst[1].encode() == NopInst);
+ addMixedJump(b, ImmPtr((void*)dest.getOffset()), MixedJumpPatch::PATCHABLE);
+ inst[0] = InstJump(op_j, JOffImm26(0)).encode();
+ } else {
+ // Handle open mixed conditional jumps created by
+ // MacroAssemblerMIPSShared::ma_b(..., wasm::Trap, ...).
+ inst[0] = invertBranch(inst[0], BOffImm16(4 * sizeof(uint32_t)));
+ // No need for a "nop" here because we can clobber scratch.
+ // We need to add it to mixed jumps array here.
+ // See MacroAssemblerMIPS::branchWithCode().
+ MOZ_ASSERT(inst[1].encode() == NopInst);
+ MOZ_ASSERT(inst[2].encode() == NopInst);
+ MOZ_ASSERT(inst[3].encode() == NopInst);
+ addMixedJump(b, ImmPtr((void*)dest.getOffset()), MixedJumpPatch::PATCHABLE);
+ inst[2] = InstJump(op_j, JOffImm26(0)).encode();
+ }
+ }
+ label->bind(dest.getOffset());
+}
+
+void
AssemblerMIPSShared::retarget(Label* label, Label* target)
{
if (label->used() && !oom()) {