diff --git a/src/backend/x64/a32_emit_x64.cpp b/src/backend/x64/a32_emit_x64.cpp index 40de5207..d8649caf 100644 --- a/src/backend/x64/a32_emit_x64.cpp +++ b/src/backend/x64/a32_emit_x64.cpp @@ -791,9 +791,11 @@ void A32EmitX64::EmitA32CallSupervisor(A32EmitContext& ctx, IR::Inst* inst) { code.sub(code.ABI_PARAM2, qword[rsp + ABI_SHADOW_SPACE + offsetof(StackLayout, cycles_remaining)]); Devirtualize<&A32::UserCallbacks::AddTicks>(conf.callbacks).EmitCall(code); ctx.reg_alloc.EndOfAllocScope(); + auto args = ctx.reg_alloc.GetArgumentInfo(inst); ctx.reg_alloc.HostCall(nullptr, {}, args[0]); Devirtualize<&A32::UserCallbacks::CallSVC>(conf.callbacks).EmitCall(code); + Devirtualize<&A32::UserCallbacks::GetTicksRemaining>(conf.callbacks).EmitCall(code); code.mov(qword[rsp + ABI_SHADOW_SPACE + offsetof(StackLayout, cycles_to_run)], code.ABI_RETURN); code.mov(qword[rsp + ABI_SHADOW_SPACE + offsetof(StackLayout, cycles_remaining)], code.ABI_RETURN); @@ -802,6 +804,13 @@ void A32EmitX64::EmitA32CallSupervisor(A32EmitContext& ctx, IR::Inst* inst) { void A32EmitX64::EmitA32ExceptionRaised(A32EmitContext& ctx, IR::Inst* inst) { ctx.reg_alloc.HostCall(nullptr); + + code.SwitchMxcsrOnExit(); + code.mov(code.ABI_PARAM2, qword[rsp + ABI_SHADOW_SPACE + offsetof(StackLayout, cycles_to_run)]); + code.sub(code.ABI_PARAM2, qword[rsp + ABI_SHADOW_SPACE + offsetof(StackLayout, cycles_remaining)]); + Devirtualize<&A32::UserCallbacks::AddTicks>(conf.callbacks).EmitCall(code); + ctx.reg_alloc.EndOfAllocScope(); + auto args = ctx.reg_alloc.GetArgumentInfo(inst); ASSERT(args[0].IsImmediate() && args[1].IsImmediate()); const u32 pc = args[0].GetImmediateU32(); @@ -810,6 +819,11 @@ void A32EmitX64::EmitA32ExceptionRaised(A32EmitContext& ctx, IR::Inst* inst) { code.mov(param[0], pc); code.mov(param[1], exception); }); + + Devirtualize<&A32::UserCallbacks::GetTicksRemaining>(conf.callbacks).EmitCall(code); + code.mov(qword[rsp + ABI_SHADOW_SPACE + offsetof(StackLayout, cycles_to_run)], code.ABI_RETURN); + code.mov(qword[rsp + ABI_SHADOW_SPACE + offsetof(StackLayout, cycles_remaining)], code.ABI_RETURN); + code.SwitchMxcsrOnEntry(); } static u32 GetFpscrImpl(A32JitState* jit_state) { diff --git a/src/frontend/A32/translate/conditional_state.cpp b/src/frontend/A32/translate/conditional_state.cpp index 34b801bf..884a31be 100644 --- a/src/frontend/A32/translate/conditional_state.cpp +++ b/src/frontend/A32/translate/conditional_state.cpp @@ -11,6 +11,7 @@ #include "common/common_types.h" #include "frontend/A32/ir_emitter.h" #include "frontend/A32/translate/conditional_state.h" +#include "frontend/A32/translate/impl/translate.h" #include "ir/cond.h" namespace Dynarmic::A32 { @@ -25,29 +26,29 @@ bool CondCanContinue(ConditionalState cond_state, const A32::IREmitter& ir) { return std::all_of(ir.block.begin(), ir.block.end(), [](const IR::Inst& inst) { return !inst.WritesToCPSR(); }); } -bool IsConditionPassed(IR::Cond cond, ConditionalState& cond_state, A32::IREmitter& ir, int instruction_size) { - ASSERT_MSG(cond_state != ConditionalState::Break, +bool IsConditionPassed(TranslatorVisitor& v, IR::Cond cond) { + ASSERT_MSG(v.cond_state != ConditionalState::Break, "This should never happen. We requested a break but that wasn't honored."); if (cond == IR::Cond::NV) { // NV conditional is obsolete - ir.ExceptionRaised(Exception::UnpredictableInstruction); + v.RaiseException(Exception::UnpredictableInstruction); return false; } - if (cond_state == ConditionalState::Translating) { - if (ir.block.ConditionFailedLocation() != ir.current_location || cond == IR::Cond::AL) { - cond_state = ConditionalState::Trailing; + if (v.cond_state == ConditionalState::Translating) { + if (v.ir.block.ConditionFailedLocation() != v.ir.current_location || cond == IR::Cond::AL) { + v.cond_state = ConditionalState::Trailing; } else { - if (cond == ir.block.GetCondition()) { - ir.block.SetConditionFailedLocation(ir.current_location.AdvancePC(instruction_size).AdvanceIT()); - ir.block.ConditionFailedCycleCount()++; + if (cond == v.ir.block.GetCondition()) { + v.ir.block.SetConditionFailedLocation(v.ir.current_location.AdvancePC(v.current_instruction_size).AdvanceIT()); + v.ir.block.ConditionFailedCycleCount()++; return true; } // cond has changed, abort - cond_state = ConditionalState::Break; - ir.SetTerm(IR::Term::LinkBlockFast{ir.current_location}); + v.cond_state = ConditionalState::Break; + v.ir.SetTerm(IR::Term::LinkBlockFast{v.ir.current_location}); return false; } } @@ -59,20 +60,20 @@ bool IsConditionPassed(IR::Cond cond, ConditionalState& cond_state, A32::IREmitt // non-AL cond - if (!ir.block.empty()) { + if (!v.ir.block.empty()) { // We've already emitted instructions. Quit for now, we'll make a new block here later. - cond_state = ConditionalState::Break; - ir.SetTerm(IR::Term::LinkBlockFast{ir.current_location}); + v.cond_state = ConditionalState::Break; + v.ir.SetTerm(IR::Term::LinkBlockFast{v.ir.current_location}); return false; } // We've not emitted instructions yet. // We'll emit one instruction, and set the block-entry conditional appropriately. - cond_state = ConditionalState::Translating; - ir.block.SetCondition(cond); - ir.block.SetConditionFailedLocation(ir.current_location.AdvancePC(instruction_size).AdvanceIT()); - ir.block.ConditionFailedCycleCount() = ir.block.CycleCount() + 1; + v.cond_state = ConditionalState::Translating; + v.ir.block.SetCondition(cond); + v.ir.block.SetConditionFailedLocation(v.ir.current_location.AdvancePC(v.current_instruction_size).AdvanceIT()); + v.ir.block.ConditionFailedCycleCount() = v.ir.block.CycleCount() + 1; return true; } diff --git a/src/frontend/A32/translate/conditional_state.h b/src/frontend/A32/translate/conditional_state.h index 0c3b1e19..bba7f05b 100644 --- a/src/frontend/A32/translate/conditional_state.h +++ b/src/frontend/A32/translate/conditional_state.h @@ -14,6 +14,7 @@ enum class Cond; namespace Dynarmic::A32 { class IREmitter; +struct TranslatorVisitor; enum class ConditionalState { /// We haven't met any conditional instructions yet. @@ -27,6 +28,6 @@ enum class ConditionalState { }; bool CondCanContinue(ConditionalState cond_state, const A32::IREmitter& ir); -bool IsConditionPassed(IR::Cond cond, ConditionalState& cond_state, A32::IREmitter& ir, int instruction_size); +bool IsConditionPassed(TranslatorVisitor& v, IR::Cond cond); } // namespace Dynarmic::A32 diff --git a/src/frontend/A32/translate/impl/exception_generating.cpp b/src/frontend/A32/translate/impl/exception_generating.cpp index 02f212f3..34152fe3 100644 --- a/src/frontend/A32/translate/impl/exception_generating.cpp +++ b/src/frontend/A32/translate/impl/exception_generating.cpp @@ -20,9 +20,7 @@ bool TranslatorVisitor::arm_BKPT(Cond cond, Imm<12> /*imm12*/, Imm<4> /*imm4*/) return true; } - ir.ExceptionRaised(Exception::Breakpoint); - ir.SetTerm(IR::Term::CheckHalt{IR::Term::ReturnToDispatch{}}); - return false; + return RaiseException(Exception::Breakpoint); } // SVC # diff --git a/src/frontend/A32/translate/impl/thumb16.cpp b/src/frontend/A32/translate/impl/thumb16.cpp index 61c1f77b..d05a7255 100644 --- a/src/frontend/A32/translate/impl/thumb16.cpp +++ b/src/frontend/A32/translate/impl/thumb16.cpp @@ -903,11 +903,7 @@ bool TranslatorVisitor::thumb16_REVSH(Reg m, Reg d) { // BKPT # bool TranslatorVisitor::thumb16_BKPT(Imm<8> /*imm8*/) { - ir.ExceptionRaised(Exception::Breakpoint); - ir.UpdateUpperLocationDescriptor(); - ir.LoadWritePC(ir.Imm32(ir.current_location.PC())); - ir.SetTerm(IR::Term::CheckHalt{IR::Term::ReturnToDispatch{}}); - return false; + return RaiseException(Exception::Breakpoint); } // STM !, diff --git a/src/frontend/A32/translate/impl/translate.cpp b/src/frontend/A32/translate/impl/translate.cpp index 0f43a239..5f9dc9b5 100644 --- a/src/frontend/A32/translate/impl/translate.cpp +++ b/src/frontend/A32/translate/impl/translate.cpp @@ -12,12 +12,12 @@ namespace Dynarmic::A32 { bool TranslatorVisitor::ArmConditionPassed(Cond cond) { - return IsConditionPassed(cond, cond_state, ir, 4); + return IsConditionPassed(*this, cond); } bool TranslatorVisitor::ThumbConditionPassed() { const Cond cond = ir.current_location.IT().Cond(); - return IsConditionPassed(cond, cond_state, ir, static_cast(current_instruction_size)); + return IsConditionPassed(*this, cond); } bool TranslatorVisitor::VFPConditionPassed(Cond cond) { @@ -46,6 +46,7 @@ bool TranslatorVisitor::DecodeError() { } bool TranslatorVisitor::RaiseException(Exception exception) { + ir.UpdateUpperLocationDescriptor(); ir.BranchWritePC(ir.Imm32(ir.current_location.PC() + static_cast(current_instruction_size))); ir.ExceptionRaised(exception); ir.SetTerm(IR::Term::CheckHalt{IR::Term::ReturnToDispatch{}}); diff --git a/src/frontend/A64/translate/impl/exception_generating.cpp b/src/frontend/A64/translate/impl/exception_generating.cpp index cb39ea7b..8ea235d9 100644 --- a/src/frontend/A64/translate/impl/exception_generating.cpp +++ b/src/frontend/A64/translate/impl/exception_generating.cpp @@ -7,10 +7,8 @@ namespace Dynarmic::A64 { -bool TranslatorVisitor::BRK([[maybe_unused]] Imm<16> imm16) { - ir.ExceptionRaised(Exception::Breakpoint); - ir.SetTerm(IR::Term::CheckHalt{IR::Term::ReturnToDispatch{}}); - return false; +bool TranslatorVisitor::BRK(Imm<16> /*imm16*/) { + return RaiseException(Exception::Breakpoint); } bool TranslatorVisitor::SVC(Imm<16> imm16) {