summaryrefslogtreecommitdiff
path: root/js/src/jit/mips64/Assembler-mips64.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'js/src/jit/mips64/Assembler-mips64.cpp')
-rw-r--r--js/src/jit/mips64/Assembler-mips64.cpp82
1 files changed, 0 insertions, 82 deletions
diff --git a/js/src/jit/mips64/Assembler-mips64.cpp b/js/src/jit/mips64/Assembler-mips64.cpp
index 6d76363090..a7254b8257 100644
--- a/js/src/jit/mips64/Assembler-mips64.cpp
+++ b/js/src/jit/mips64/Assembler-mips64.cpp
@@ -228,88 +228,6 @@ Assembler::Bind(uint8_t* rawCode, CodeOffset* label, const void* address)
}
}
-void
-Assembler::bind(InstImm* inst, uintptr_t branch, uintptr_t target)
-{
- int64_t offset = target - branch;
-
- // Generate the patchable mixed jump for call.
- if (inst->extractOpcode() == ((uint32_t)op_jal >> OpcodeShift)) {
- addMixedJump(BufferOffset(branch), ImmPtr((void*)target));
- return;
- }
-
- // If encoded offset is 4, then the jump must be short
- if (BOffImm16(inst[0]).decode() == 4) {
- MOZ_ASSERT(BOffImm16::IsInRange(offset));
- inst[0].setBOffImm16(BOffImm16(offset));
- inst[1].makeNop();
- return;
- }
-
- if (BOffImm16::IsInRange(offset)) {
- inst[0].setBOffImm16(BOffImm16(offset));
- inst[1].makeNop();
-
- return;
- }
-
- addMixedJump(BufferOffset(branch), ImmPtr((void*)target));
-}
-
-void
-Assembler::bind(RepatchLabel* label)
-{
- BufferOffset dest = nextOffset();
- if (label->used() && !oom()) {
- // If the label has a use, then change this use to refer to
- // the bound label;
- BufferOffset b(label->offset());
- InstImm* inst = (InstImm*)editSrc(b);
- InstImm inst_beq = InstImm(op_beq, zero, zero, BOffImm16(0));
- uint64_t offset = dest.getOffset() - label->offset();
-
- // If first instruction is j, then this is a mixed jump.
- // If second instruction is lui, then this is a loop backedge.
- if (inst[0].extractOpcode() == (uint32_t(op_j) >> OpcodeShift)) {
- // For unconditional mixed branches generated by jumpWithPatch
- addMixedJump(b, ImmPtr((void*)dest.getOffset()), MixedJumpPatch::PATCHABLE);
- } else if (inst[1].extractOpcode() == (uint32_t(op_lui) >> OpcodeShift) ||
- BOffImm16::IsInRange(offset))
- {
- // Handle code produced by:
- // backedgeJump
- MOZ_ASSERT(BOffImm16::IsInRange(offset));
- MOZ_ASSERT(inst[0].extractOpcode() == (uint32_t(op_beq) >> OpcodeShift) ||
- inst[0].extractOpcode() == (uint32_t(op_bne) >> OpcodeShift) ||
- inst[0].extractOpcode() == (uint32_t(op_blez) >> OpcodeShift) ||
- inst[0].extractOpcode() == (uint32_t(op_bgtz) >> OpcodeShift));
- inst[0].setBOffImm16(BOffImm16(offset));
- } else if (inst[0].encode() == inst_beq.encode()) {
- // Handle open mixed unconditional jumps created by
- // MacroAssemblerMIPSShared::ma_b(..., wasm::Trap, ...).
- // We need to add it to mixed jumps array here.
- // See MacroAssemblerMIPS64::branchWithCode().
- MOZ_ASSERT(inst[1].encode() == NopInst);
- addMixedJump(b, ImmPtr((void*)dest.getOffset()), MixedJumpPatch::PATCHABLE);
- inst[0] = InstJump(op_j, JOffImm26(0)).encode();
- } else {
- // Handle open mixed conditional jumps created by
- // MacroAssemblerMIPSShared::ma_b(..., wasm::Trap, ...).
- inst[0] = invertBranch(inst[0], BOffImm16(4 * sizeof(uint32_t)));
- // No need for a "nop" here because we can clobber scratch.
- // We need to add it to mixed jumps array here.
- // See MacroAssemblerMIPS64::branchWithCode().
- MOZ_ASSERT(inst[1].encode() == NopInst);
- MOZ_ASSERT(inst[2].encode() == NopInst);
- MOZ_ASSERT(inst[3].encode() == NopInst);
- addMixedJump(b, ImmPtr((void*)dest.getOffset()), MixedJumpPatch::PATCHABLE);
- inst[2] = InstJump(op_j, JOffImm26(0)).encode();
- }
- }
- label->bind(dest.getOffset());
-}
-
uint32_t
Assembler::PatchWrite_NearCallSize()
{