Arm64Emitter: Allow non-optimizing LoadConstant

This is pulled from the code cache PR. Will be necessary for supporting
relocations.

Not currently being used but will be once we have code caching in place.
This commit is contained in:
Ryan Houdek 2022-02-10 19:58:55 -08:00
parent 43fada7555
commit cbce06d012
2 changed files with 14 additions and 2 deletions

View File

@ -28,20 +28,32 @@ Arm64Emitter::Arm64Emitter(FEXCore::Context::Context *ctx, size_t size) : vixl::
SetCPUFeatures(Features);
}
void Arm64Emitter::LoadConstant(vixl::aarch64::Register Reg, uint64_t Constant) {
void Arm64Emitter::LoadConstant(vixl::aarch64::Register Reg, uint64_t Constant, bool NOPPad) {
bool Is64Bit = Reg.IsX();
int Segments = Is64Bit ? 4 : 2;
if (Is64Bit && ((~Constant)>> 16) == 0) {
movn(Reg, (~Constant) & 0xFFFF);
if (NOPPad) {
nop(); nop(); nop();
}
return;
}
int NumMoves = 1;
movz(Reg, (Constant) & 0xFFFF, 0);
for (int i = 1; i < Segments; ++i) {
uint16_t Part = (Constant >> (i * 16)) & 0xFFFF;
if (Part) {
movk(Reg, Part, i * 16);
++NumMoves;
}
}
if (NOPPad) {
for (int i = NumMoves; i < Segments; ++i) {
nop();
}
}
}

View File

@ -61,7 +61,7 @@ protected:
Arm64Emitter(FEXCore::Context::Context *ctx, size_t size);
vixl::aarch64::CPU CPU;
void LoadConstant(vixl::aarch64::Register Reg, uint64_t Constant);
void LoadConstant(vixl::aarch64::Register Reg, uint64_t Constant, bool NOPPad = false);
void SpillStaticRegs(bool FPRs = true, uint32_t SpillMask = ~0U);
void FillStaticRegs(bool FPRs = true, uint32_t FillMask = ~0U);