mirror of
https://github.com/ryujinx-mirror/ryujinx.git
synced 2024-12-23 08:05:48 +00:00
Validate CPU virtual addresses on access (#1987)
* Enable PTE null checks again * Do address validation on EmitPtPointerLoad, and make it branchless * PTC version increment * Mask of pointer tag for exclusive access * Move mask to the correct place Co-authored-by: LDj3SNuD <35856442+LDj3SNuD@users.noreply.github.com>
This commit is contained in:
parent
6f1d964801
commit
715b605e95
4 changed files with 52 additions and 122 deletions
|
@ -19,19 +19,8 @@ namespace ARMeilleure.Instructions
|
||||||
|
|
||||||
if (size == 4)
|
if (size == 4)
|
||||||
{
|
{
|
||||||
Operand isUnalignedAddr = InstEmitMemoryHelper.EmitAddressCheck(context, address, size);
|
|
||||||
|
|
||||||
Operand lblFastPath = Label();
|
|
||||||
|
|
||||||
context.BranchIfFalse(lblFastPath, isUnalignedAddr);
|
|
||||||
|
|
||||||
// The call is not expected to return (it should throw).
|
|
||||||
context.Call(typeof(NativeInterface).GetMethod(nameof(NativeInterface.ThrowInvalidMemoryAccess)), address);
|
|
||||||
|
|
||||||
context.MarkLabel(lblFastPath);
|
|
||||||
|
|
||||||
// Only 128-bit CAS is guaranteed to have a atomic load.
|
// Only 128-bit CAS is guaranteed to have a atomic load.
|
||||||
Operand physAddr = InstEmitMemoryHelper.EmitPtPointerLoad(context, address, null, write: false);
|
Operand physAddr = InstEmitMemoryHelper.EmitPtPointerLoad(context, address, null, write: false, 4);
|
||||||
|
|
||||||
Operand zero = context.VectorZero();
|
Operand zero = context.VectorZero();
|
||||||
|
|
||||||
|
@ -119,20 +108,8 @@ namespace ARMeilleure.Instructions
|
||||||
|
|
||||||
context.BranchIfTrue(lblExit, exFailed);
|
context.BranchIfTrue(lblExit, exFailed);
|
||||||
|
|
||||||
// STEP 2: We have exclusive access, make sure that the address is valid.
|
// STEP 2: We have exclusive access and the address is valid, attempt the store using CAS.
|
||||||
Operand isUnalignedAddr = InstEmitMemoryHelper.EmitAddressCheck(context, address, size);
|
Operand physAddr = InstEmitMemoryHelper.EmitPtPointerLoad(context, address, null, write: true, size);
|
||||||
|
|
||||||
Operand lblFastPath = Label();
|
|
||||||
|
|
||||||
context.BranchIfFalse(lblFastPath, isUnalignedAddr);
|
|
||||||
|
|
||||||
// The call is not expected to return (it should throw).
|
|
||||||
context.Call(typeof(NativeInterface).GetMethod(nameof(NativeInterface.ThrowInvalidMemoryAccess)), address);
|
|
||||||
|
|
||||||
// STEP 3: We have exclusive access and the address is valid, attempt the store using CAS.
|
|
||||||
context.MarkLabel(lblFastPath);
|
|
||||||
|
|
||||||
Operand physAddr = InstEmitMemoryHelper.EmitPtPointerLoad(context, address, null, write: true);
|
|
||||||
|
|
||||||
Operand exValuePtr = context.Add(arg0, Const((long)NativeContext.GetExclusiveValueOffset()));
|
Operand exValuePtr = context.Add(arg0, Const((long)NativeContext.GetExclusiveValueOffset()));
|
||||||
Operand exValue = size switch
|
Operand exValue = size switch
|
||||||
|
@ -151,7 +128,7 @@ namespace ARMeilleure.Instructions
|
||||||
_ => context.CompareAndSwap(physAddr, exValue, value)
|
_ => context.CompareAndSwap(physAddr, exValue, value)
|
||||||
};
|
};
|
||||||
|
|
||||||
// STEP 4: Check if we succeeded by comparing expected and in-memory values.
|
// STEP 3: Check if we succeeded by comparing expected and in-memory values.
|
||||||
Operand storeFailed;
|
Operand storeFailed;
|
||||||
|
|
||||||
if (size == 4)
|
if (size == 4)
|
||||||
|
|
|
@ -127,11 +127,7 @@ namespace ARMeilleure.Instructions
|
||||||
Operand lblSlowPath = Label();
|
Operand lblSlowPath = Label();
|
||||||
Operand lblEnd = Label();
|
Operand lblEnd = Label();
|
||||||
|
|
||||||
Operand isUnalignedAddr = EmitAddressCheck(context, address, size);
|
Operand physAddr = EmitPtPointerLoad(context, address, lblSlowPath, write: false, size);
|
||||||
|
|
||||||
context.BranchIfTrue(lblSlowPath, isUnalignedAddr);
|
|
||||||
|
|
||||||
Operand physAddr = EmitPtPointerLoad(context, address, lblSlowPath, write: false);
|
|
||||||
|
|
||||||
Operand value = null;
|
Operand value = null;
|
||||||
|
|
||||||
|
@ -161,18 +157,7 @@ namespace ARMeilleure.Instructions
|
||||||
throw new ArgumentOutOfRangeException(nameof(size));
|
throw new ArgumentOutOfRangeException(nameof(size));
|
||||||
}
|
}
|
||||||
|
|
||||||
Operand isUnalignedAddr = EmitAddressCheck(context, address, size);
|
Operand physAddr = EmitPtPointerLoad(context, address, null, write: false, size);
|
||||||
|
|
||||||
Operand lblFastPath = Label();
|
|
||||||
|
|
||||||
context.BranchIfFalse(lblFastPath, isUnalignedAddr, BasicBlockFrequency.Cold);
|
|
||||||
|
|
||||||
// The call is not expected to return (it should throw).
|
|
||||||
context.Call(typeof(NativeInterface).GetMethod(nameof(NativeInterface.ThrowInvalidMemoryAccess)), address);
|
|
||||||
|
|
||||||
context.MarkLabel(lblFastPath);
|
|
||||||
|
|
||||||
Operand physAddr = EmitPtPointerLoad(context, address, null, write: false);
|
|
||||||
|
|
||||||
return size switch
|
return size switch
|
||||||
{
|
{
|
||||||
|
@ -195,11 +180,7 @@ namespace ARMeilleure.Instructions
|
||||||
Operand lblSlowPath = Label();
|
Operand lblSlowPath = Label();
|
||||||
Operand lblEnd = Label();
|
Operand lblEnd = Label();
|
||||||
|
|
||||||
Operand isUnalignedAddr = EmitAddressCheck(context, address, size);
|
Operand physAddr = EmitPtPointerLoad(context, address, lblSlowPath, write: false, size);
|
||||||
|
|
||||||
context.BranchIfTrue(lblSlowPath, isUnalignedAddr);
|
|
||||||
|
|
||||||
Operand physAddr = EmitPtPointerLoad(context, address, lblSlowPath, write: false);
|
|
||||||
|
|
||||||
Operand value = null;
|
Operand value = null;
|
||||||
|
|
||||||
|
@ -233,11 +214,7 @@ namespace ARMeilleure.Instructions
|
||||||
Operand lblSlowPath = Label();
|
Operand lblSlowPath = Label();
|
||||||
Operand lblEnd = Label();
|
Operand lblEnd = Label();
|
||||||
|
|
||||||
Operand isUnalignedAddr = EmitAddressCheck(context, address, size);
|
Operand physAddr = EmitPtPointerLoad(context, address, lblSlowPath, write: true, size);
|
||||||
|
|
||||||
context.BranchIfTrue(lblSlowPath, isUnalignedAddr);
|
|
||||||
|
|
||||||
Operand physAddr = EmitPtPointerLoad(context, address, lblSlowPath, write: true);
|
|
||||||
|
|
||||||
Operand value = GetInt(context, rt);
|
Operand value = GetInt(context, rt);
|
||||||
|
|
||||||
|
@ -270,18 +247,7 @@ namespace ARMeilleure.Instructions
|
||||||
throw new ArgumentOutOfRangeException(nameof(size));
|
throw new ArgumentOutOfRangeException(nameof(size));
|
||||||
}
|
}
|
||||||
|
|
||||||
Operand isUnalignedAddr = EmitAddressCheck(context, address, size);
|
Operand physAddr = EmitPtPointerLoad(context, address, null, write: true, size);
|
||||||
|
|
||||||
Operand lblFastPath = Label();
|
|
||||||
|
|
||||||
context.BranchIfFalse(lblFastPath, isUnalignedAddr, BasicBlockFrequency.Cold);
|
|
||||||
|
|
||||||
// The call is not expected to return (it should throw).
|
|
||||||
context.Call(typeof(NativeInterface).GetMethod(nameof(NativeInterface.ThrowInvalidMemoryAccess)), address);
|
|
||||||
|
|
||||||
context.MarkLabel(lblFastPath);
|
|
||||||
|
|
||||||
Operand physAddr = EmitPtPointerLoad(context, address, null, write: true);
|
|
||||||
|
|
||||||
if (size < 3 && value.Type == OperandType.I64)
|
if (size < 3 && value.Type == OperandType.I64)
|
||||||
{
|
{
|
||||||
|
@ -312,11 +278,7 @@ namespace ARMeilleure.Instructions
|
||||||
Operand lblSlowPath = Label();
|
Operand lblSlowPath = Label();
|
||||||
Operand lblEnd = Label();
|
Operand lblEnd = Label();
|
||||||
|
|
||||||
Operand isUnalignedAddr = EmitAddressCheck(context, address, size);
|
Operand physAddr = EmitPtPointerLoad(context, address, lblSlowPath, write: true, size);
|
||||||
|
|
||||||
context.BranchIfTrue(lblSlowPath, isUnalignedAddr);
|
|
||||||
|
|
||||||
Operand physAddr = EmitPtPointerLoad(context, address, lblSlowPath, write: true);
|
|
||||||
|
|
||||||
Operand value = GetVec(rt);
|
Operand value = GetVec(rt);
|
||||||
|
|
||||||
|
@ -338,61 +300,49 @@ namespace ARMeilleure.Instructions
|
||||||
context.MarkLabel(lblEnd);
|
context.MarkLabel(lblEnd);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Operand EmitAddressCheck(ArmEmitterContext context, Operand address, int size)
|
public static Operand EmitPtPointerLoad(ArmEmitterContext context, Operand address, Operand lblSlowPath, bool write, int size)
|
||||||
{
|
{
|
||||||
ulong addressCheckMask = ~((1UL << context.Memory.AddressSpaceBits) - 1);
|
int ptLevelBits = context.Memory.AddressSpaceBits - PageBits;
|
||||||
|
|
||||||
addressCheckMask |= (1u << size) - 1;
|
|
||||||
|
|
||||||
return context.BitwiseAnd(address, Const(address.Type, (long)addressCheckMask));
|
|
||||||
}
|
|
||||||
|
|
||||||
public static Operand EmitPtPointerLoad(ArmEmitterContext context, Operand address, Operand lblSlowPath, bool write)
|
|
||||||
{
|
|
||||||
int ptLevelBits = context.Memory.AddressSpaceBits - 12; // 12 = Number of page bits.
|
|
||||||
int ptLevelSize = 1 << ptLevelBits;
|
int ptLevelSize = 1 << ptLevelBits;
|
||||||
int ptLevelMask = ptLevelSize - 1;
|
int ptLevelMask = ptLevelSize - 1;
|
||||||
|
|
||||||
|
Operand addrRotated = size != 0 ? context.RotateRight(address, Const(size)) : address;
|
||||||
|
Operand addrShifted = context.ShiftRightUI(addrRotated, Const(PageBits - size));
|
||||||
|
|
||||||
Operand pte = Ptc.State == PtcState.Disabled
|
Operand pte = Ptc.State == PtcState.Disabled
|
||||||
? Const(context.Memory.PageTablePointer.ToInt64())
|
? Const(context.Memory.PageTablePointer.ToInt64())
|
||||||
: Const(context.Memory.PageTablePointer.ToInt64(), true, Ptc.PageTablePointerIndex);
|
: Const(context.Memory.PageTablePointer.ToInt64(), true, Ptc.PageTablePointerIndex);
|
||||||
|
|
||||||
int bit = PageBits;
|
Operand pteOffset = context.BitwiseAnd(addrShifted, Const(addrShifted.Type, ptLevelMask));
|
||||||
|
|
||||||
// Load page table entry from the page table.
|
if (pteOffset.Type == OperandType.I32)
|
||||||
// This was designed to support multi-level page tables of any size, however right
|
|
||||||
// now we only use flat page tables (so there's only one level).
|
|
||||||
// The page table entry contains the host address where the page is located.
|
|
||||||
// Additionally, the higher 16-bits of the host address may contain extra information
|
|
||||||
// used for write tracking, so this must be handled here aswell.
|
|
||||||
do
|
|
||||||
{
|
{
|
||||||
Operand addrPart = context.ShiftRightUI(address, Const(bit));
|
pteOffset = context.ZeroExtend32(OperandType.I64, pteOffset);
|
||||||
|
|
||||||
bit += ptLevelBits;
|
|
||||||
|
|
||||||
if (bit < context.Memory.AddressSpaceBits)
|
|
||||||
{
|
|
||||||
addrPart = context.BitwiseAnd(addrPart, Const(addrPart.Type, ptLevelMask));
|
|
||||||
}
|
|
||||||
|
|
||||||
Operand pteOffset = context.ShiftLeft(addrPart, Const(3));
|
|
||||||
|
|
||||||
if (pteOffset.Type == OperandType.I32)
|
|
||||||
{
|
|
||||||
pteOffset = context.ZeroExtend32(OperandType.I64, pteOffset);
|
|
||||||
}
|
|
||||||
|
|
||||||
Operand pteAddress = context.Add(pte, pteOffset);
|
|
||||||
|
|
||||||
pte = context.Load(OperandType.I64, pteAddress);
|
|
||||||
}
|
}
|
||||||
while (bit < context.Memory.AddressSpaceBits);
|
|
||||||
|
pte = context.Load(OperandType.I64, context.Add(pte, context.ShiftLeft(pteOffset, Const(3))));
|
||||||
|
|
||||||
|
if (addrShifted.Type == OperandType.I32)
|
||||||
|
{
|
||||||
|
addrShifted = context.ZeroExtend32(OperandType.I64, addrShifted);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the VA is out of range, or not aligned to the access size, force PTE to 0 by masking it.
|
||||||
|
pte = context.BitwiseAnd(pte, context.ShiftRightSI(context.Add(addrShifted, Const(-(long)ptLevelSize)), Const(63)));
|
||||||
|
|
||||||
if (lblSlowPath != null)
|
if (lblSlowPath != null)
|
||||||
{
|
{
|
||||||
ulong protection = (write ? 3UL : 1UL) << 48;
|
if (write)
|
||||||
context.BranchIfTrue(lblSlowPath, context.BitwiseAnd(pte, Const(protection)));
|
{
|
||||||
|
pte = context.ShiftLeft(pte, Const(1));
|
||||||
|
context.BranchIf(lblSlowPath, pte, Const(0L), Comparison.LessOrEqual);
|
||||||
|
pte = context.ShiftRightUI(pte, Const(1));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
context.BranchIf(lblSlowPath, pte, Const(0L), Comparison.LessOrEqual);
|
||||||
|
pte = context.BitwiseAnd(pte, Const(0xffffffffffffUL)); // Ignore any software protection bits. (they are still used by C# memory access)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
@ -401,13 +351,15 @@ namespace ARMeilleure.Instructions
|
||||||
|
|
||||||
Operand lblNotWatched = Label();
|
Operand lblNotWatched = Label();
|
||||||
|
|
||||||
// Is the page currently being tracked for read/write? If so we need to call MarkRegionAsModified.
|
// Is the page currently being tracked for read/write? If so we need to call SignalMemoryTracking.
|
||||||
context.BranchIf(lblNotWatched, pte, Const(0L), Comparison.GreaterOrEqual, BasicBlockFrequency.Cold);
|
context.BranchIf(lblNotWatched, pte, Const(0L), Comparison.GreaterOrEqual, BasicBlockFrequency.Cold);
|
||||||
|
|
||||||
// Mark the region as modified. Size here doesn't matter as address is assumed to be size aligned here.
|
// Signal memory tracking. Size here doesn't matter as address is assumed to be size aligned here.
|
||||||
context.Call(typeof(NativeInterface).GetMethod(nameof(NativeInterface.SignalMemoryTracking)), address, Const(1UL), Const(write ? 1 : 0));
|
context.Call(typeof(NativeInterface).GetMethod(nameof(NativeInterface.SignalMemoryTracking)), address, Const(1UL), Const(write ? 1 : 0));
|
||||||
context.MarkLabel(lblNotWatched);
|
context.MarkLabel(lblNotWatched);
|
||||||
|
|
||||||
|
pte = context.BitwiseAnd(pte, Const(0xffffffffffffUL)); // Ignore any software protection bits. (they are still used by C# memory access)
|
||||||
|
|
||||||
Operand lblNonNull = Label();
|
Operand lblNonNull = Label();
|
||||||
|
|
||||||
// Skip exception if the PTE address is non-null (not zero).
|
// Skip exception if the PTE address is non-null (not zero).
|
||||||
|
@ -418,8 +370,6 @@ namespace ARMeilleure.Instructions
|
||||||
context.MarkLabel(lblNonNull);
|
context.MarkLabel(lblNonNull);
|
||||||
}
|
}
|
||||||
|
|
||||||
pte = context.BitwiseAnd(pte, Const(0xffffffffffffUL)); // Ignore any software protection bits. (they are still used by c# memory access)
|
|
||||||
|
|
||||||
Operand pageOffset = context.BitwiseAnd(address, Const(address.Type, PageMask));
|
Operand pageOffset = context.BitwiseAnd(address, Const(address.Type, PageMask));
|
||||||
|
|
||||||
if (pageOffset.Type == OperandType.I32)
|
if (pageOffset.Type == OperandType.I32)
|
||||||
|
|
|
@ -22,7 +22,7 @@ namespace ARMeilleure.Translation.PTC
|
||||||
{
|
{
|
||||||
private const string HeaderMagic = "PTChd";
|
private const string HeaderMagic = "PTChd";
|
||||||
|
|
||||||
private const int InternalVersion = 1971; //! To be incremented manually for each change to the ARMeilleure project.
|
private const int InternalVersion = 1987; //! To be incremented manually for each change to the ARMeilleure project.
|
||||||
|
|
||||||
private const string ActualDir = "0";
|
private const string ActualDir = "0";
|
||||||
private const string BackupDir = "1";
|
private const string BackupDir = "1";
|
||||||
|
|
|
@ -21,6 +21,8 @@ namespace Ryujinx.Cpu
|
||||||
|
|
||||||
private const int PteSize = 8;
|
private const int PteSize = 8;
|
||||||
|
|
||||||
|
private const int PointerTagBit = 62;
|
||||||
|
|
||||||
private readonly InvalidAccessHandler _invalidAccessHandler;
|
private readonly InvalidAccessHandler _invalidAccessHandler;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
@ -556,11 +558,12 @@ namespace Ryujinx.Cpu
|
||||||
// Protection is inverted on software pages, since the default value is 0.
|
// Protection is inverted on software pages, since the default value is 0.
|
||||||
protection = (~protection) & MemoryPermission.ReadAndWrite;
|
protection = (~protection) & MemoryPermission.ReadAndWrite;
|
||||||
|
|
||||||
long tag = (long)protection << 48;
|
long tag = protection switch
|
||||||
if (tag > 0)
|
|
||||||
{
|
{
|
||||||
tag |= long.MinValue; // If any protection is present, the whole pte is negative.
|
MemoryPermission.None => 0L,
|
||||||
}
|
MemoryPermission.Read => 2L << PointerTagBit,
|
||||||
|
_ => 3L << PointerTagBit
|
||||||
|
};
|
||||||
|
|
||||||
ulong endVa = (va + size + PageMask) & ~(ulong)PageMask;
|
ulong endVa = (va + size + PageMask) & ~(ulong)PageMask;
|
||||||
long invTagMask = ~(0xffffL << 48);
|
long invTagMask = ~(0xffffL << 48);
|
||||||
|
@ -628,7 +631,7 @@ namespace Ryujinx.Cpu
|
||||||
// tracking using host guard pages in future, but also supporting platforms where this is not possible.
|
// tracking using host guard pages in future, but also supporting platforms where this is not possible.
|
||||||
|
|
||||||
// Write tag includes read protection, since we don't have any read actions that aren't performed before write too.
|
// Write tag includes read protection, since we don't have any read actions that aren't performed before write too.
|
||||||
long tag = (write ? 3L : 1L) << 48;
|
long tag = (write ? 3L : 2L) << PointerTagBit;
|
||||||
|
|
||||||
ulong endVa = (va + size + PageMask) & ~(ulong)PageMask;
|
ulong endVa = (va + size + PageMask) & ~(ulong)PageMask;
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue