diff --git a/src/backend/A64/emitter/a64_emitter.cpp b/src/backend/A64/emitter/a64_emitter.cpp index 99242356..15287e5e 100644 --- a/src/backend/A64/emitter/a64_emitter.cpp +++ b/src/backend/A64/emitter/a64_emitter.cpp @@ -324,7 +324,7 @@ void ARM64XEmitter::FlushIcache() { m_lastCacheFlushEnd = m_code; } -void ARM64XEmitter::FlushIcacheSection(u8* start, u8* end) { +void ARM64XEmitter::FlushIcacheSection(const u8* start, const u8* end) { if (start == end) return; @@ -748,6 +748,8 @@ void ARM64XEmitter::EncodeBitfieldMOVInst(u32 op, ARM64Reg Rd, ARM64Reg Rn, u32 void ARM64XEmitter::EncodeLoadStoreRegisterOffset(u32 size, u32 opc, ARM64Reg Rt, ARM64Reg Rn, ArithOption Rm) { + ASSERT_MSG(Rm.GetType() == ArithOption::TYPE_EXTENDEDREG, "Shifted registers are not supported used Indexed registers"); + Rt = DecodeReg(Rt); Rn = DecodeReg(Rn); ARM64Reg decoded_Rm = DecodeReg(Rm.GetReg()); @@ -995,7 +997,7 @@ void ARM64XEmitter::BL(const void* ptr) { EncodeUnconditionalBranchInst(1, ptr); } -void ARM64XEmitter::QuickCallFunction(ARM64Reg scratchreg, const void* func) { +void ARM64XEmitter::QuickCallFunction(const void* func, ARM64Reg scratchreg) { s64 distance = reinterpret_cast(func) - reinterpret_cast(m_code); distance >>= 2; // Can only branch to opcode-aligned (4) addresses if (!IsInRangeImm26(distance)) { diff --git a/src/backend/A64/emitter/a64_emitter.h b/src/backend/A64/emitter/a64_emitter.h index 7d7f6fb8..0fd2668e 100644 --- a/src/backend/A64/emitter/a64_emitter.h +++ b/src/backend/A64/emitter/a64_emitter.h @@ -444,7 +444,7 @@ public: return (m_shifttype << 22) | (m_shift << 10); break; default: - DEBUG_ASSERT_MSG(false, "Invalid type in GetData"); + ASSERT_MSG(false, "Invalid type in GetData"); break; } return 0; @@ -512,7 +512,7 @@ public: const u8* AlignCodePage(); const u8* GetCodePtr() const; void FlushIcache(); - void FlushIcacheSection(u8* start, u8* end); + void FlushIcacheSection(const u8* start, const u8* end); u8* GetWritableCodePtr(); // FixupBranch branching @@ -879,10 +879,10 @@ public: } // Plain function call - void QuickCallFunction(ARM64Reg scratchreg, const void* func); + void QuickCallFunction(const void* func, ARM64Reg scratchreg = X16); template - void QuickCallFunction(ARM64Reg scratchreg, T func) { - QuickCallFunction(scratchreg, (const void*)func); + void QuickCallFunction(T func, ARM64Reg scratchreg = X16) { + QuickCallFunction((const void*)func, scratchreg); } };