backend/A64/constant_pool: Clean up unused stuff
This commit is contained in:
parent
f837fab9dd
commit
481af9f823
@ -45,7 +45,6 @@ namespace {
|
|||||||
|
|
||||||
constexpr size_t TOTAL_CODE_SIZE = 128 * 1024 * 1024;
|
constexpr size_t TOTAL_CODE_SIZE = 128 * 1024 * 1024;
|
||||||
constexpr size_t FAR_CODE_OFFSET = 100 * 1024 * 1024;
|
constexpr size_t FAR_CODE_OFFSET = 100 * 1024 * 1024;
|
||||||
constexpr size_t CONSTANT_POOL_SIZE = 512 * 1024;
|
|
||||||
|
|
||||||
#ifdef DYNARMIC_ENABLE_NO_EXECUTE_SUPPORT
|
#ifdef DYNARMIC_ENABLE_NO_EXECUTE_SUPPORT
|
||||||
void ProtectMemory(const void* base, size_t size, bool is_executable) {
|
void ProtectMemory(const void* base, size_t size, bool is_executable) {
|
||||||
@ -68,9 +67,8 @@ BlockOfCode::BlockOfCode(RunCodeCallbacks cb, JitStateInfo jsi)
|
|||||||
: fp_emitter(this)
|
: fp_emitter(this)
|
||||||
, cb(std::move(cb))
|
, cb(std::move(cb))
|
||||||
, jsi(jsi)
|
, jsi(jsi)
|
||||||
, constant_pool(*this, CONSTANT_POOL_SIZE) {
|
, constant_pool(*this) {
|
||||||
AllocCodeSpace(TOTAL_CODE_SIZE);
|
AllocCodeSpace(TOTAL_CODE_SIZE);
|
||||||
constant_pool.AllocatePool();
|
|
||||||
EnableWriting();
|
EnableWriting();
|
||||||
GenRunCode();
|
GenRunCode();
|
||||||
exception_handler.Register(*this);
|
exception_handler.Register(*this);
|
||||||
@ -260,12 +258,8 @@ void BlockOfCode::LookupBlock() {
|
|||||||
cb.LookupBlock->EmitCall(*this);
|
cb.LookupBlock->EmitCall(*this);
|
||||||
}
|
}
|
||||||
|
|
||||||
u64 BlockOfCode::MConst(u64 lower, u64 upper) {
|
|
||||||
return constant_pool.GetConstant(lower, upper);
|
|
||||||
}
|
|
||||||
|
|
||||||
void BlockOfCode::EmitPatchLDR(Arm64Gen::ARM64Reg Rt, u64 lower, u64 upper) {
|
void BlockOfCode::EmitPatchLDR(Arm64Gen::ARM64Reg Rt, u64 lower, u64 upper) {
|
||||||
ASSERT_MSG(!in_far_code, "Can't patch when in far code");
|
ASSERT_MSG(!in_far_code, "Can't patch when in far code, yet!");
|
||||||
constant_pool.EmitPatchLDR(Rt, lower, upper);
|
constant_pool.EmitPatchLDR(Rt, lower, upper);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -12,28 +12,7 @@
|
|||||||
|
|
||||||
namespace Dynarmic::BackendA64 {
|
namespace Dynarmic::BackendA64 {
|
||||||
|
|
||||||
ConstantPool::ConstantPool(BlockOfCode& code, size_t size) : code(code), pool_size(size) {}
|
ConstantPool::ConstantPool(BlockOfCode& code) : code(code) {}
|
||||||
|
|
||||||
void ConstantPool::AllocatePool() {
|
|
||||||
code.BRK(0);
|
|
||||||
pool_begin = const_cast<u8*>(code.AlignCode16());
|
|
||||||
code.AllocateFromCodeSpace(pool_size);
|
|
||||||
current_pool_ptr = pool_begin;
|
|
||||||
ASSERT(code.GetCodePtr() - pool_begin == static_cast<u32>(pool_size));
|
|
||||||
}
|
|
||||||
|
|
||||||
u64 ConstantPool::GetConstant(u64 lower, u64 upper) {
|
|
||||||
const auto constant = std::make_tuple(lower, upper);
|
|
||||||
auto iter = constant_info.find(constant);
|
|
||||||
if (iter == constant_info.end()) {
|
|
||||||
ASSERT(static_cast<size_t>(current_pool_ptr - pool_begin) < pool_size);
|
|
||||||
std::memcpy(current_pool_ptr, &lower, sizeof(u64));
|
|
||||||
std::memcpy(current_pool_ptr + sizeof(u64), &upper, sizeof(u64));
|
|
||||||
iter = constant_info.emplace(constant, current_pool_ptr).first;
|
|
||||||
current_pool_ptr += align_size;
|
|
||||||
}
|
|
||||||
return reinterpret_cast<u64>(iter->second) - reinterpret_cast<u64>(code.GetCodePtr());
|
|
||||||
}
|
|
||||||
|
|
||||||
void ConstantPool::EmitPatchLDR(Arm64Gen::ARM64Reg Rt, u64 lower, u64 upper) {
|
void ConstantPool::EmitPatchLDR(Arm64Gen::ARM64Reg Rt, u64 lower, u64 upper) {
|
||||||
const auto constant = std::make_tuple(lower, upper);
|
const auto constant = std::make_tuple(lower, upper);
|
||||||
@ -59,8 +38,7 @@ void ConstantPool::EmitPatchLDR(Arm64Gen::ARM64Reg Rt, u64 lower, u64 upper) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void ConstantPool::PatchPool() {
|
void ConstantPool::PatchPool() {
|
||||||
|
u8* pool_ptr = code.GetWritableCodePtr();
|
||||||
u8* pool_ptr = const_cast<u8*>(code.GetCodePtr());
|
|
||||||
for (PatchInfo patch : patch_info) {
|
for (PatchInfo patch : patch_info) {
|
||||||
std::memcpy(pool_ptr, &std::get<0>(patch.constant), sizeof(u64));
|
std::memcpy(pool_ptr, &std::get<0>(patch.constant), sizeof(u64));
|
||||||
std::memcpy(pool_ptr + sizeof(u64), &std::get<1>(patch.constant), sizeof(u64));
|
std::memcpy(pool_ptr + sizeof(u64), &std::get<1>(patch.constant), sizeof(u64));
|
||||||
@ -81,4 +59,4 @@ void ConstantPool::Clear() {
|
|||||||
patch_info.clear();
|
patch_info.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Dynarmic::BackendX64
|
} // namespace Dynarmic::BackendA64
|
||||||
|
@ -20,11 +20,7 @@ class BlockOfCode;
|
|||||||
/// already exists, its memory location is reused.
|
/// already exists, its memory location is reused.
|
||||||
class ConstantPool final {
|
class ConstantPool final {
|
||||||
public:
|
public:
|
||||||
ConstantPool(BlockOfCode& code, size_t size);
|
ConstantPool(BlockOfCode& code);
|
||||||
|
|
||||||
void AllocatePool();
|
|
||||||
|
|
||||||
u64 GetConstant(u64 lower, u64 upper = 0);
|
|
||||||
|
|
||||||
void EmitPatchLDR(Arm64Gen::ARM64Reg Rt, u64 lower, u64 upper = 0);
|
void EmitPatchLDR(Arm64Gen::ARM64Reg Rt, u64 lower, u64 upper = 0);
|
||||||
|
|
||||||
@ -32,17 +28,12 @@ public:
|
|||||||
|
|
||||||
void Clear();
|
void Clear();
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
static constexpr size_t align_size = 16; // bytes
|
static constexpr size_t align_size = 16; // bytes
|
||||||
|
|
||||||
std::map<std::tuple<u64, u64>, void*> constant_info;
|
std::map<std::tuple<u64, u64>, void*> constant_info;
|
||||||
|
|
||||||
BlockOfCode& code;
|
BlockOfCode& code;
|
||||||
size_t pool_size;
|
|
||||||
u8* pool_begin;
|
|
||||||
u8* current_pool_ptr;
|
|
||||||
|
|
||||||
struct PatchInfo {
|
struct PatchInfo {
|
||||||
const void* ptr;
|
const void* ptr;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user