diff --git a/src/backend/A64/block_of_code.cpp b/src/backend/A64/block_of_code.cpp index 7997b9c0..4587c005 100644 --- a/src/backend/A64/block_of_code.cpp +++ b/src/backend/A64/block_of_code.cpp @@ -45,7 +45,6 @@ namespace { constexpr size_t TOTAL_CODE_SIZE = 128 * 1024 * 1024; constexpr size_t FAR_CODE_OFFSET = 100 * 1024 * 1024; -constexpr size_t CONSTANT_POOL_SIZE = 512 * 1024; #ifdef DYNARMIC_ENABLE_NO_EXECUTE_SUPPORT void ProtectMemory(const void* base, size_t size, bool is_executable) { @@ -68,9 +67,8 @@ BlockOfCode::BlockOfCode(RunCodeCallbacks cb, JitStateInfo jsi) : fp_emitter(this) , cb(std::move(cb)) , jsi(jsi) - , constant_pool(*this, CONSTANT_POOL_SIZE) { + , constant_pool(*this) { AllocCodeSpace(TOTAL_CODE_SIZE); - constant_pool.AllocatePool(); EnableWriting(); GenRunCode(); exception_handler.Register(*this); @@ -260,12 +258,8 @@ void BlockOfCode::LookupBlock() { cb.LookupBlock->EmitCall(*this); } -u64 BlockOfCode::MConst(u64 lower, u64 upper) { - return constant_pool.GetConstant(lower, upper); -} - void BlockOfCode::EmitPatchLDR(Arm64Gen::ARM64Reg Rt, u64 lower, u64 upper) { - ASSERT_MSG(!in_far_code, "Can't patch when in far code"); + ASSERT_MSG(!in_far_code, "Can't patch when in far code, yet!"); constant_pool.EmitPatchLDR(Rt, lower, upper); } diff --git a/src/backend/A64/constant_pool.cpp b/src/backend/A64/constant_pool.cpp index 784124fc..dfec2c53 100644 --- a/src/backend/A64/constant_pool.cpp +++ b/src/backend/A64/constant_pool.cpp @@ -12,28 +12,7 @@ namespace Dynarmic::BackendA64 { -ConstantPool::ConstantPool(BlockOfCode& code, size_t size) : code(code), pool_size(size) {} - -void ConstantPool::AllocatePool() { - code.BRK(0); - pool_begin = const_cast(code.AlignCode16()); - code.AllocateFromCodeSpace(pool_size); - current_pool_ptr = pool_begin; - ASSERT(code.GetCodePtr() - pool_begin == static_cast(pool_size)); -} - -u64 ConstantPool::GetConstant(u64 lower, u64 upper) { - const auto constant = std::make_tuple(lower, upper); - auto iter = constant_info.find(constant); - if (iter == constant_info.end()) { - ASSERT(static_cast(current_pool_ptr - pool_begin) < pool_size); - std::memcpy(current_pool_ptr, &lower, sizeof(u64)); - std::memcpy(current_pool_ptr + sizeof(u64), &upper, sizeof(u64)); - iter = constant_info.emplace(constant, current_pool_ptr).first; - current_pool_ptr += align_size; - } - return reinterpret_cast(iter->second) - reinterpret_cast(code.GetCodePtr()); -} +ConstantPool::ConstantPool(BlockOfCode& code) : code(code) {} void ConstantPool::EmitPatchLDR(Arm64Gen::ARM64Reg Rt, u64 lower, u64 upper) { const auto constant = std::make_tuple(lower, upper); @@ -59,8 +38,7 @@ void ConstantPool::EmitPatchLDR(Arm64Gen::ARM64Reg Rt, u64 lower, u64 upper) { } void ConstantPool::PatchPool() { - - u8* pool_ptr = const_cast(code.GetCodePtr()); + u8* pool_ptr = code.GetWritableCodePtr(); for (PatchInfo patch : patch_info) { std::memcpy(pool_ptr, &std::get<0>(patch.constant), sizeof(u64)); std::memcpy(pool_ptr + sizeof(u64), &std::get<1>(patch.constant), sizeof(u64)); @@ -81,4 +59,4 @@ void ConstantPool::Clear() { patch_info.clear(); } -} // namespace Dynarmic::BackendX64 +} // namespace Dynarmic::BackendA64 diff --git a/src/backend/A64/constant_pool.h b/src/backend/A64/constant_pool.h index 43756055..ede34e5b 100644 --- a/src/backend/A64/constant_pool.h +++ b/src/backend/A64/constant_pool.h @@ -20,11 +20,7 @@ class BlockOfCode; /// already exists, its memory location is reused. class ConstantPool final { public: - ConstantPool(BlockOfCode& code, size_t size); - - void AllocatePool(); - - u64 GetConstant(u64 lower, u64 upper = 0); + ConstantPool(BlockOfCode& code); void EmitPatchLDR(Arm64Gen::ARM64Reg Rt, u64 lower, u64 upper = 0); @@ -32,17 +28,12 @@ public: void Clear(); - - private: static constexpr size_t align_size = 16; // bytes std::map, void*> constant_info; BlockOfCode& code; - size_t pool_size; - u8* pool_begin; - u8* current_pool_ptr; struct PatchInfo { const void* ptr;