0
0
Fork 0
mirror of https://github.com/ryujinx-mirror/ryujinx.git synced 2024-12-22 23:25:46 +00:00

Add support to IVirtualMemoryManager for zero-copy reads (#6251)

* - WritableRegion: enable wrapping IMemoryOwner<byte>
- IVirtualMemoryManager impls of GetWritableRegion() use pooled memory when region is non-contiguous.
- IVirtualMemoryManager: add GetReadOnlySequence() and impls
- ByteMemoryPool: add new method RentCopy()
- ByteMemoryPool: make class static, remove ctor and singleton field from earlier impl

* - BytesReadOnlySequenceSegment: move from Ryujinx.Common.Memory to Ryujinx.Memory
- BytesReadOnlySequenceSegment: add IsContiguousWith() and Replace() methods
- VirtualMemoryManagerBase:
  - remove generic type parameters, instead use ulong for virtual addresses and nuint for host/physical addresses
  - implement IWritableBlock
  - add virtual GetReadOnlySequence() with coalescing of contiguous segments
  - add virtual GetSpan()
  - add virtual GetWritableRegion()
  - add abstract IsMapped()
  - add virtual MapForeign(ulong, nuint, ulong)
  - add virtual Read<T>()
  - add virtual Read(ulong, Span<byte>)
  - add virtual ReadTracked<T>()
  - add virtual SignalMemoryTracking()
  - add virtual Write()
  - add virtual Write<T>()
  - add virtual WriteUntracked()
  - add virtual WriteWithRedundancyCheck()
- VirtualMemoryManagerRefCountedBase: remove generic type parameters
- AddressSpaceManager: remove redundant methods, add required overrides
- HvMemoryManager: remove redundant methods, add required overrides, add overrides for _invalidAccessHandler handling
- MemoryManager: remove redundant methods, add required overrides, add overrides for _invalidAccessHandler handling
- MemoryManagerHostMapped: remove redundant methods, add required overrides, add overrides for _invalidAccessHandler handling
- NativeMemoryManager: add get properties for Pointer and Length
- throughout: removed invalid <inheritdoc/> comments

* make HvMemoryManager class sealed

* remove unused method

* adjust MemoryManagerHostTracked

* let MemoryManagerHostTracked override WriteImpl()
This commit is contained in:
jhorv 2024-04-04 21:23:03 -04:00 committed by GitHub
parent 8e74fa3456
commit 5def0429f8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
14 changed files with 635 additions and 819 deletions

View file

@ -4,7 +4,7 @@ using System.Threading;
namespace Ryujinx.Common.Memory namespace Ryujinx.Common.Memory
{ {
public sealed partial class ByteMemoryPool public partial class ByteMemoryPool
{ {
/// <summary> /// <summary>
/// Represents a <see cref="IMemoryOwner{Byte}"/> that wraps an array rented from /// Represents a <see cref="IMemoryOwner{Byte}"/> that wraps an array rented from

View file

@ -6,24 +6,8 @@ namespace Ryujinx.Common.Memory
/// <summary> /// <summary>
/// Provides a pool of re-usable byte array instances. /// Provides a pool of re-usable byte array instances.
/// </summary> /// </summary>
public sealed partial class ByteMemoryPool public static partial class ByteMemoryPool
{ {
private static readonly ByteMemoryPool _shared = new();
/// <summary>
/// Constructs a <see cref="ByteMemoryPool"/> instance. Private to force access through
/// the <see cref="ByteMemoryPool.Shared"/> instance.
/// </summary>
private ByteMemoryPool()
{
// No implementation
}
/// <summary>
/// Retrieves a shared <see cref="ByteMemoryPool"/> instance.
/// </summary>
public static ByteMemoryPool Shared => _shared;
/// <summary> /// <summary>
/// Returns the maximum buffer size supported by this pool. /// Returns the maximum buffer size supported by this pool.
/// </summary> /// </summary>
@ -95,6 +79,20 @@ namespace Ryujinx.Common.Memory
return buffer; return buffer;
} }
/// <summary>
/// Copies <paramref name="buffer"/> into a newly rented byte memory buffer.
/// </summary>
/// <param name="buffer">The byte buffer to copy</param>
/// <returns>A <see cref="IMemoryOwner{Byte}"/> wrapping the rented memory with <paramref name="buffer"/> copied to it</returns>
public static IMemoryOwner<byte> RentCopy(ReadOnlySpan<byte> buffer)
{
var copy = RentImpl(buffer.Length);
buffer.CopyTo(copy.Memory.Span);
return copy;
}
private static ByteMemoryPoolBuffer RentImpl(int length) private static ByteMemoryPoolBuffer RentImpl(int length)
{ {
if ((uint)length > Array.MaxLength) if ((uint)length > Array.MaxLength)

View file

@ -3,10 +3,10 @@ using Ryujinx.Memory;
using Ryujinx.Memory.Range; using Ryujinx.Memory.Range;
using Ryujinx.Memory.Tracking; using Ryujinx.Memory.Tracking;
using System; using System;
using System.Buffers;
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Runtime.CompilerServices; using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Versioning; using System.Runtime.Versioning;
namespace Ryujinx.Cpu.AppleHv namespace Ryujinx.Cpu.AppleHv
@ -15,7 +15,7 @@ namespace Ryujinx.Cpu.AppleHv
/// Represents a CPU memory manager which maps guest virtual memory directly onto the Hypervisor page table. /// Represents a CPU memory manager which maps guest virtual memory directly onto the Hypervisor page table.
/// </summary> /// </summary>
[SupportedOSPlatform("macos")] [SupportedOSPlatform("macos")]
public class HvMemoryManager : VirtualMemoryManagerRefCountedBase<ulong, ulong>, IMemoryManager, IVirtualMemoryManagerTracked, IWritableBlock public sealed class HvMemoryManager : VirtualMemoryManagerRefCountedBase, IMemoryManager, IVirtualMemoryManagerTracked
{ {
private readonly InvalidAccessHandler _invalidAccessHandler; private readonly InvalidAccessHandler _invalidAccessHandler;
@ -96,12 +96,6 @@ namespace Ryujinx.Cpu.AppleHv
} }
} }
/// <inheritdoc/>
public void MapForeign(ulong va, nuint hostPointer, ulong size)
{
throw new NotSupportedException();
}
/// <inheritdoc/> /// <inheritdoc/>
public void Unmap(ulong va, ulong size) public void Unmap(ulong va, ulong size)
{ {
@ -126,20 +120,11 @@ namespace Ryujinx.Cpu.AppleHv
} }
} }
/// <inheritdoc/> public override T ReadTracked<T>(ulong va)
public T Read<T>(ulong va) where T : unmanaged
{
return MemoryMarshal.Cast<byte, T>(GetSpan(va, Unsafe.SizeOf<T>()))[0];
}
/// <inheritdoc/>
public T ReadTracked<T>(ulong va) where T : unmanaged
{ {
try try
{ {
SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), false); return base.ReadTracked<T>(va);
return Read<T>(va);
} }
catch (InvalidMemoryRegionException) catch (InvalidMemoryRegionException)
{ {
@ -152,7 +137,6 @@ namespace Ryujinx.Cpu.AppleHv
} }
} }
/// <inheritdoc/>
public override void Read(ulong va, Span<byte> data) public override void Read(ulong va, Span<byte> data)
{ {
try try
@ -168,101 +152,11 @@ namespace Ryujinx.Cpu.AppleHv
} }
} }
/// <inheritdoc/> public override void Write(ulong va, ReadOnlySpan<byte> data)
public void Write<T>(ulong va, T value) where T : unmanaged
{
Write(va, MemoryMarshal.Cast<T, byte>(MemoryMarshal.CreateSpan(ref value, 1)));
}
/// <inheritdoc/>
public void Write(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
{
return;
}
SignalMemoryTracking(va, (ulong)data.Length, true);
WriteImpl(va, data);
}
/// <inheritdoc/>
public void WriteUntracked(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
{
return;
}
WriteImpl(va, data);
}
/// <inheritdoc/>
public bool WriteWithRedundancyCheck(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
{
return false;
}
SignalMemoryTracking(va, (ulong)data.Length, false);
if (IsContiguousAndMapped(va, data.Length))
{
var target = _backingMemory.GetSpan(GetPhysicalAddressInternal(va), data.Length);
bool changed = !data.SequenceEqual(target);
if (changed)
{
data.CopyTo(target);
}
return changed;
}
else
{
WriteImpl(va, data);
return true;
}
}
private void WriteImpl(ulong va, ReadOnlySpan<byte> data)
{ {
try try
{ {
AssertValidAddressAndSize(va, (ulong)data.Length); base.Write(va, data);
if (IsContiguousAndMapped(va, data.Length))
{
data.CopyTo(_backingMemory.GetSpan(GetPhysicalAddressInternal(va), data.Length));
}
else
{
int offset = 0, size;
if ((va & PageMask) != 0)
{
ulong pa = GetPhysicalAddressChecked(va);
size = Math.Min(data.Length, PageSize - (int)(va & PageMask));
data[..size].CopyTo(_backingMemory.GetSpan(pa, size));
offset += size;
}
for (; offset < data.Length; offset += size)
{
ulong pa = GetPhysicalAddressChecked(va + (ulong)offset);
size = Math.Min(data.Length - offset, PageSize);
data.Slice(offset, size).CopyTo(_backingMemory.GetSpan(pa, size));
}
}
} }
catch (InvalidMemoryRegionException) catch (InvalidMemoryRegionException)
{ {
@ -273,61 +167,38 @@ namespace Ryujinx.Cpu.AppleHv
} }
} }
/// <inheritdoc/> public override void WriteUntracked(ulong va, ReadOnlySpan<byte> data)
public ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false)
{ {
if (size == 0) try
{ {
return ReadOnlySpan<byte>.Empty; base.WriteUntracked(va, data);
} }
catch (InvalidMemoryRegionException)
if (tracked)
{ {
SignalMemoryTracking(va, (ulong)size, false); if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
{
throw;
} }
if (IsContiguousAndMapped(va, size))
{
return _backingMemory.GetSpan(GetPhysicalAddressInternal(va), size);
}
else
{
Span<byte> data = new byte[size];
base.Read(va, data);
return data;
} }
} }
/// <inheritdoc/> public override ReadOnlySequence<byte> GetReadOnlySequence(ulong va, int size, bool tracked = false)
public WritableRegion GetWritableRegion(ulong va, int size, bool tracked = false)
{ {
if (size == 0) try
{ {
return new WritableRegion(null, va, Memory<byte>.Empty); return base.GetReadOnlySequence(va, size, tracked);
}
catch (InvalidMemoryRegionException)
{
if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
{
throw;
} }
if (tracked) return ReadOnlySequence<byte>.Empty;
{
SignalMemoryTracking(va, (ulong)size, true);
}
if (IsContiguousAndMapped(va, size))
{
return new WritableRegion(null, va, _backingMemory.GetMemory(GetPhysicalAddressInternal(va), size));
}
else
{
Memory<byte> memory = new byte[size];
base.Read(va, memory.Span);
return new WritableRegion(this, va, memory);
} }
} }
/// <inheritdoc/>
public ref T GetRef<T>(ulong va) where T : unmanaged public ref T GetRef<T>(ulong va) where T : unmanaged
{ {
if (!IsContiguous(va, Unsafe.SizeOf<T>())) if (!IsContiguous(va, Unsafe.SizeOf<T>()))
@ -340,9 +211,8 @@ namespace Ryujinx.Cpu.AppleHv
return ref _backingMemory.GetRef<T>(GetPhysicalAddressChecked(va)); return ref _backingMemory.GetRef<T>(GetPhysicalAddressChecked(va));
} }
/// <inheritdoc/>
[MethodImpl(MethodImplOptions.AggressiveInlining)] [MethodImpl(MethodImplOptions.AggressiveInlining)]
public bool IsMapped(ulong va) public override bool IsMapped(ulong va)
{ {
return ValidateAddress(va) && _pages.IsMapped(va); return ValidateAddress(va) && _pages.IsMapped(va);
} }
@ -355,39 +225,6 @@ namespace Ryujinx.Cpu.AppleHv
return _pages.IsRangeMapped(va, size); return _pages.IsRangeMapped(va, size);
} }
private static void ThrowMemoryNotContiguous() => throw new MemoryNotContiguousException();
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool IsContiguousAndMapped(ulong va, int size) => IsContiguous(va, size) && IsMapped(va);
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool IsContiguous(ulong va, int size)
{
if (!ValidateAddress(va) || !ValidateAddressAndSize(va, (ulong)size))
{
return false;
}
int pages = GetPagesCount(va, (uint)size, out va);
for (int page = 0; page < pages - 1; page++)
{
if (!ValidateAddress(va + PageSize))
{
return false;
}
if (GetPhysicalAddressInternal(va) + PageSize != GetPhysicalAddressInternal(va + PageSize))
{
return false;
}
va += PageSize;
}
return true;
}
/// <inheritdoc/> /// <inheritdoc/>
public IEnumerable<HostMemoryRange> GetHostRegions(ulong va, ulong size) public IEnumerable<HostMemoryRange> GetHostRegions(ulong va, ulong size)
{ {
@ -464,11 +301,10 @@ namespace Ryujinx.Cpu.AppleHv
return regions; return regions;
} }
/// <inheritdoc/>
/// <remarks> /// <remarks>
/// This function also validates that the given range is both valid and mapped, and will throw if it is not. /// This function also validates that the given range is both valid and mapped, and will throw if it is not.
/// </remarks> /// </remarks>
public void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null) public override void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null)
{ {
AssertValidAddressAndSize(va, size); AssertValidAddressAndSize(va, size);
@ -481,24 +317,6 @@ namespace Ryujinx.Cpu.AppleHv
_pages.SignalMemoryTracking(Tracking, va, size, write, exemptId); _pages.SignalMemoryTracking(Tracking, va, size, write, exemptId);
} }
/// <summary>
/// Computes the number of pages in a virtual address range.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range</param>
/// <param name="startVa">The virtual address of the beginning of the first page</param>
/// <remarks>This function does not differentiate between allocated and unallocated pages.</remarks>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static int GetPagesCount(ulong va, ulong size, out ulong startVa)
{
// WARNING: Always check if ulong does not overflow during the operations.
startVa = va & ~(ulong)PageMask;
ulong vaSpan = (va - startVa + size + PageMask) & ~(ulong)PageMask;
return (int)(vaSpan / PageSize);
}
/// <inheritdoc/>
public void Reprotect(ulong va, ulong size, MemoryPermission protection) public void Reprotect(ulong va, ulong size, MemoryPermission protection)
{ {
// TODO // TODO
@ -535,7 +353,7 @@ namespace Ryujinx.Cpu.AppleHv
return Tracking.BeginSmartGranularTracking(address, size, granularity, id); return Tracking.BeginSmartGranularTracking(address, size, granularity, id);
} }
private ulong GetPhysicalAddressChecked(ulong va) private nuint GetPhysicalAddressChecked(ulong va)
{ {
if (!IsMapped(va)) if (!IsMapped(va))
{ {
@ -545,9 +363,9 @@ namespace Ryujinx.Cpu.AppleHv
return GetPhysicalAddressInternal(va); return GetPhysicalAddressInternal(va);
} }
private ulong GetPhysicalAddressInternal(ulong va) private nuint GetPhysicalAddressInternal(ulong va)
{ {
return _pageTable.Read(va) + (va & PageMask); return (nuint)(_pageTable.Read(va) + (va & PageMask));
} }
/// <summary> /// <summary>
@ -558,10 +376,17 @@ namespace Ryujinx.Cpu.AppleHv
_addressSpace.Dispose(); _addressSpace.Dispose();
} }
protected override Span<byte> GetPhysicalAddressSpan(ulong pa, int size) protected override Memory<byte> GetPhysicalAddressMemory(nuint pa, int size)
=> _backingMemory.GetMemory(pa, size);
protected override Span<byte> GetPhysicalAddressSpan(nuint pa, int size)
=> _backingMemory.GetSpan(pa, size); => _backingMemory.GetSpan(pa, size);
protected override ulong TranslateVirtualAddressForRead(ulong va) protected override nuint TranslateVirtualAddressChecked(ulong va)
=> GetPhysicalAddressChecked(va); => GetPhysicalAddressChecked(va);
protected override nuint TranslateVirtualAddressUnchecked(ulong va)
=> GetPhysicalAddressInternal(va);
} }
} }

View file

@ -3,6 +3,7 @@ using Ryujinx.Memory;
using Ryujinx.Memory.Range; using Ryujinx.Memory.Range;
using Ryujinx.Memory.Tracking; using Ryujinx.Memory.Tracking;
using System; using System;
using System.Buffers;
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Runtime.CompilerServices; using System.Runtime.CompilerServices;
@ -14,7 +15,7 @@ namespace Ryujinx.Cpu.Jit
/// <summary> /// <summary>
/// Represents a CPU memory manager. /// Represents a CPU memory manager.
/// </summary> /// </summary>
public sealed class MemoryManager : VirtualMemoryManagerRefCountedBase<ulong, ulong>, IMemoryManager, IVirtualMemoryManagerTracked, IWritableBlock public sealed class MemoryManager : VirtualMemoryManagerRefCountedBase, IMemoryManager, IVirtualMemoryManagerTracked
{ {
private const int PteSize = 8; private const int PteSize = 8;
@ -97,12 +98,6 @@ namespace Ryujinx.Cpu.Jit
Tracking.Map(oVa, size); Tracking.Map(oVa, size);
} }
/// <inheritdoc/>
public void MapForeign(ulong va, nuint hostPointer, ulong size)
{
throw new NotSupportedException();
}
/// <inheritdoc/> /// <inheritdoc/>
public void Unmap(ulong va, ulong size) public void Unmap(ulong va, ulong size)
{ {
@ -128,20 +123,11 @@ namespace Ryujinx.Cpu.Jit
} }
} }
/// <inheritdoc/> public override T ReadTracked<T>(ulong va)
public T Read<T>(ulong va) where T : unmanaged
{
return MemoryMarshal.Cast<byte, T>(GetSpan(va, Unsafe.SizeOf<T>()))[0];
}
/// <inheritdoc/>
public T ReadTracked<T>(ulong va) where T : unmanaged
{ {
try try
{ {
SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), false); return base.ReadTracked<T>(va);
return Read<T>(va);
} }
catch (InvalidMemoryRegionException) catch (InvalidMemoryRegionException)
{ {
@ -190,117 +176,11 @@ namespace Ryujinx.Cpu.Jit
} }
} }
/// <inheritdoc/> public override void Write(ulong va, ReadOnlySpan<byte> data)
public void Write<T>(ulong va, T value) where T : unmanaged
{
Write(va, MemoryMarshal.Cast<T, byte>(MemoryMarshal.CreateSpan(ref value, 1)));
}
/// <inheritdoc/>
public void Write(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
{
return;
}
SignalMemoryTracking(va, (ulong)data.Length, true);
WriteImpl(va, data);
}
/// <inheritdoc/>
public void WriteGuest<T>(ulong va, T value) where T : unmanaged
{
Span<byte> data = MemoryMarshal.Cast<T, byte>(MemoryMarshal.CreateSpan(ref value, 1));
SignalMemoryTrackingImpl(va, (ulong)data.Length, true, true);
WriteImpl(va, data);
}
/// <inheritdoc/>
public void WriteUntracked(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
{
return;
}
WriteImpl(va, data);
}
/// <inheritdoc/>
public bool WriteWithRedundancyCheck(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
{
return false;
}
SignalMemoryTracking(va, (ulong)data.Length, false);
if (IsContiguousAndMapped(va, data.Length))
{
var target = _backingMemory.GetSpan(GetPhysicalAddressInternal(va), data.Length);
bool changed = !data.SequenceEqual(target);
if (changed)
{
data.CopyTo(target);
}
return changed;
}
else
{
WriteImpl(va, data);
return true;
}
}
/// <summary>
/// Writes data to CPU mapped memory.
/// </summary>
/// <param name="va">Virtual address to write the data into</param>
/// <param name="data">Data to be written</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void WriteImpl(ulong va, ReadOnlySpan<byte> data)
{ {
try try
{ {
AssertValidAddressAndSize(va, (ulong)data.Length); base.Write(va, data);
if (IsContiguousAndMapped(va, data.Length))
{
data.CopyTo(_backingMemory.GetSpan(GetPhysicalAddressInternal(va), data.Length));
}
else
{
int offset = 0, size;
if ((va & PageMask) != 0)
{
ulong pa = GetPhysicalAddressInternal(va);
size = Math.Min(data.Length, PageSize - (int)(va & PageMask));
data[..size].CopyTo(_backingMemory.GetSpan(pa, size));
offset += size;
}
for (; offset < data.Length; offset += size)
{
ulong pa = GetPhysicalAddressInternal(va + (ulong)offset);
size = Math.Min(data.Length - offset, PageSize);
data.Slice(offset, size).CopyTo(_backingMemory.GetSpan(pa, size));
}
}
} }
catch (InvalidMemoryRegionException) catch (InvalidMemoryRegionException)
{ {
@ -312,60 +192,47 @@ namespace Ryujinx.Cpu.Jit
} }
/// <inheritdoc/> /// <inheritdoc/>
public ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false) public void WriteGuest<T>(ulong va, T value) where T : unmanaged
{ {
if (size == 0) Span<byte> data = MemoryMarshal.Cast<T, byte>(MemoryMarshal.CreateSpan(ref value, 1));
{
return ReadOnlySpan<byte>.Empty; SignalMemoryTrackingImpl(va, (ulong)data.Length, true, true);
Write(va, data);
} }
if (tracked) public override void WriteUntracked(ulong va, ReadOnlySpan<byte> data)
{ {
SignalMemoryTracking(va, (ulong)size, false); try
{
base.WriteUntracked(va, data);
} }
catch (InvalidMemoryRegionException)
if (IsContiguousAndMapped(va, size))
{ {
return _backingMemory.GetSpan(GetPhysicalAddressInternal(va), size); if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
{
throw;
} }
else
{
Span<byte> data = new byte[size];
base.Read(va, data);
return data;
} }
} }
/// <inheritdoc/> public override ReadOnlySequence<byte> GetReadOnlySequence(ulong va, int size, bool tracked = false)
public WritableRegion GetWritableRegion(ulong va, int size, bool tracked = false)
{ {
if (size == 0) try
{ {
return new WritableRegion(null, va, Memory<byte>.Empty); return base.GetReadOnlySequence(va, size, tracked);
}
catch (InvalidMemoryRegionException)
{
if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
{
throw;
} }
if (IsContiguousAndMapped(va, size)) return ReadOnlySequence<byte>.Empty;
{
if (tracked)
{
SignalMemoryTracking(va, (ulong)size, true);
}
return new WritableRegion(null, va, _backingMemory.GetMemory(GetPhysicalAddressInternal(va), size));
}
else
{
Memory<byte> memory = new byte[size];
GetSpan(va, size).CopyTo(memory.Span);
return new WritableRegion(this, va, memory, tracked);
} }
} }
/// <inheritdoc/>
public ref T GetRef<T>(ulong va) where T : unmanaged public ref T GetRef<T>(ulong va) where T : unmanaged
{ {
if (!IsContiguous(va, Unsafe.SizeOf<T>())) if (!IsContiguous(va, Unsafe.SizeOf<T>()))
@ -378,56 +245,6 @@ namespace Ryujinx.Cpu.Jit
return ref _backingMemory.GetRef<T>(GetPhysicalAddressInternal(va)); return ref _backingMemory.GetRef<T>(GetPhysicalAddressInternal(va));
} }
/// <summary>
/// Computes the number of pages in a virtual address range.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range</param>
/// <param name="startVa">The virtual address of the beginning of the first page</param>
/// <remarks>This function does not differentiate between allocated and unallocated pages.</remarks>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static int GetPagesCount(ulong va, uint size, out ulong startVa)
{
// WARNING: Always check if ulong does not overflow during the operations.
startVa = va & ~(ulong)PageMask;
ulong vaSpan = (va - startVa + size + PageMask) & ~(ulong)PageMask;
return (int)(vaSpan / PageSize);
}
private static void ThrowMemoryNotContiguous() => throw new MemoryNotContiguousException();
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool IsContiguousAndMapped(ulong va, int size) => IsContiguous(va, size) && IsMapped(va);
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool IsContiguous(ulong va, int size)
{
if (!ValidateAddress(va) || !ValidateAddressAndSize(va, (ulong)size))
{
return false;
}
int pages = GetPagesCount(va, (uint)size, out va);
for (int page = 0; page < pages - 1; page++)
{
if (!ValidateAddress(va + PageSize))
{
return false;
}
if (GetPhysicalAddressInternal(va) + PageSize != GetPhysicalAddressInternal(va + PageSize))
{
return false;
}
va += PageSize;
}
return true;
}
/// <inheritdoc/> /// <inheritdoc/>
public IEnumerable<HostMemoryRange> GetHostRegions(ulong va, ulong size) public IEnumerable<HostMemoryRange> GetHostRegions(ulong va, ulong size)
{ {
@ -532,9 +349,8 @@ namespace Ryujinx.Cpu.Jit
return true; return true;
} }
/// <inheritdoc/>
[MethodImpl(MethodImplOptions.AggressiveInlining)] [MethodImpl(MethodImplOptions.AggressiveInlining)]
public bool IsMapped(ulong va) public override bool IsMapped(ulong va)
{ {
if (!ValidateAddress(va)) if (!ValidateAddress(va))
{ {
@ -544,9 +360,9 @@ namespace Ryujinx.Cpu.Jit
return _pageTable.Read<ulong>((va / PageSize) * PteSize) != 0; return _pageTable.Read<ulong>((va / PageSize) * PteSize) != 0;
} }
private ulong GetPhysicalAddressInternal(ulong va) private nuint GetPhysicalAddressInternal(ulong va)
{ {
return PteToPa(_pageTable.Read<ulong>((va / PageSize) * PteSize) & ~(0xffffUL << 48)) + (va & PageMask); return (nuint)(PteToPa(_pageTable.Read<ulong>((va / PageSize) * PteSize) & ~(0xffffUL << 48)) + (va & PageMask));
} }
/// <inheritdoc/> /// <inheritdoc/>
@ -643,9 +459,7 @@ namespace Ryujinx.Cpu.Jit
{ {
ref long pageRef = ref _pageTable.GetRef<long>(pageStart * PteSize); ref long pageRef = ref _pageTable.GetRef<long>(pageStart * PteSize);
long pte; long pte = Volatile.Read(ref pageRef);
pte = Volatile.Read(ref pageRef);
if ((pte & tag) != 0) if ((pte & tag) != 0)
{ {
@ -663,7 +477,7 @@ namespace Ryujinx.Cpu.Jit
} }
/// <inheritdoc/> /// <inheritdoc/>
public void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null) public override void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null)
{ {
SignalMemoryTrackingImpl(va, size, write, false, precise, exemptId); SignalMemoryTrackingImpl(va, size, write, false, precise, exemptId);
} }
@ -683,10 +497,16 @@ namespace Ryujinx.Cpu.Jit
/// </summary> /// </summary>
protected override void Destroy() => _pageTable.Dispose(); protected override void Destroy() => _pageTable.Dispose();
protected override Span<byte> GetPhysicalAddressSpan(ulong pa, int size) protected override Memory<byte> GetPhysicalAddressMemory(nuint pa, int size)
=> _backingMemory.GetMemory(pa, size);
protected override Span<byte> GetPhysicalAddressSpan(nuint pa, int size)
=> _backingMemory.GetSpan(pa, size); => _backingMemory.GetSpan(pa, size);
protected override ulong TranslateVirtualAddressForRead(ulong va) protected override nuint TranslateVirtualAddressChecked(ulong va)
=> GetPhysicalAddressInternal(va);
protected override nuint TranslateVirtualAddressUnchecked(ulong va)
=> GetPhysicalAddressInternal(va); => GetPhysicalAddressInternal(va);
} }
} }

View file

@ -3,6 +3,7 @@ using Ryujinx.Memory;
using Ryujinx.Memory.Range; using Ryujinx.Memory.Range;
using Ryujinx.Memory.Tracking; using Ryujinx.Memory.Tracking;
using System; using System;
using System.Buffers;
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Runtime.CompilerServices; using System.Runtime.CompilerServices;
@ -12,7 +13,7 @@ namespace Ryujinx.Cpu.Jit
/// <summary> /// <summary>
/// Represents a CPU memory manager which maps guest virtual memory directly onto a host virtual region. /// Represents a CPU memory manager which maps guest virtual memory directly onto a host virtual region.
/// </summary> /// </summary>
public sealed class MemoryManagerHostMapped : VirtualMemoryManagerRefCountedBase<ulong, ulong>, IMemoryManager, IVirtualMemoryManagerTracked, IWritableBlock public sealed class MemoryManagerHostMapped : VirtualMemoryManagerRefCountedBase, IMemoryManager, IVirtualMemoryManagerTracked
{ {
private readonly InvalidAccessHandler _invalidAccessHandler; private readonly InvalidAccessHandler _invalidAccessHandler;
private readonly bool _unsafeMode; private readonly bool _unsafeMode;
@ -96,12 +97,6 @@ namespace Ryujinx.Cpu.Jit
Tracking.Map(va, size); Tracking.Map(va, size);
} }
/// <inheritdoc/>
public void MapForeign(ulong va, nuint hostPointer, ulong size)
{
throw new NotSupportedException();
}
/// <inheritdoc/> /// <inheritdoc/>
public void Unmap(ulong va, ulong size) public void Unmap(ulong va, ulong size)
{ {
@ -138,8 +133,7 @@ namespace Ryujinx.Cpu.Jit
} }
} }
/// <inheritdoc/> public override T Read<T>(ulong va)
public T Read<T>(ulong va) where T : unmanaged
{ {
try try
{ {
@ -158,14 +152,11 @@ namespace Ryujinx.Cpu.Jit
} }
} }
/// <inheritdoc/> public override T ReadTracked<T>(ulong va)
public T ReadTracked<T>(ulong va) where T : unmanaged
{ {
try try
{ {
SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), false); return base.ReadTracked<T>(va);
return Read<T>(va);
} }
catch (InvalidMemoryRegionException) catch (InvalidMemoryRegionException)
{ {
@ -178,7 +169,6 @@ namespace Ryujinx.Cpu.Jit
} }
} }
/// <inheritdoc/>
public override void Read(ulong va, Span<byte> data) public override void Read(ulong va, Span<byte> data)
{ {
try try
@ -196,9 +186,7 @@ namespace Ryujinx.Cpu.Jit
} }
} }
public override void Write<T>(ulong va, T value)
/// <inheritdoc/>
public void Write<T>(ulong va, T value) where T : unmanaged
{ {
try try
{ {
@ -215,8 +203,7 @@ namespace Ryujinx.Cpu.Jit
} }
} }
/// <inheritdoc/> public override void Write(ulong va, ReadOnlySpan<byte> data)
public void Write(ulong va, ReadOnlySpan<byte> data)
{ {
try try
{ {
@ -233,8 +220,7 @@ namespace Ryujinx.Cpu.Jit
} }
} }
/// <inheritdoc/> public override void WriteUntracked(ulong va, ReadOnlySpan<byte> data)
public void WriteUntracked(ulong va, ReadOnlySpan<byte> data)
{ {
try try
{ {
@ -251,8 +237,7 @@ namespace Ryujinx.Cpu.Jit
} }
} }
/// <inheritdoc/> public override bool WriteWithRedundancyCheck(ulong va, ReadOnlySpan<byte> data)
public bool WriteWithRedundancyCheck(ulong va, ReadOnlySpan<byte> data)
{ {
try try
{ {
@ -279,8 +264,21 @@ namespace Ryujinx.Cpu.Jit
} }
} }
/// <inheritdoc/> public override ReadOnlySequence<byte> GetReadOnlySequence(ulong va, int size, bool tracked = false)
public ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false) {
if (tracked)
{
SignalMemoryTracking(va, (ulong)size, write: false);
}
else
{
AssertMapped(va, (ulong)size);
}
return new ReadOnlySequence<byte>(_addressSpace.Mirror.GetMemory(va, size));
}
public override ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false)
{ {
if (tracked) if (tracked)
{ {
@ -294,8 +292,7 @@ namespace Ryujinx.Cpu.Jit
return _addressSpace.Mirror.GetSpan(va, size); return _addressSpace.Mirror.GetSpan(va, size);
} }
/// <inheritdoc/> public override WritableRegion GetWritableRegion(ulong va, int size, bool tracked = false)
public WritableRegion GetWritableRegion(ulong va, int size, bool tracked = false)
{ {
if (tracked) if (tracked)
{ {
@ -309,7 +306,6 @@ namespace Ryujinx.Cpu.Jit
return _addressSpace.Mirror.GetWritableRegion(va, size); return _addressSpace.Mirror.GetWritableRegion(va, size);
} }
/// <inheritdoc/>
public ref T GetRef<T>(ulong va) where T : unmanaged public ref T GetRef<T>(ulong va) where T : unmanaged
{ {
SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), true); SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), true);
@ -317,9 +313,8 @@ namespace Ryujinx.Cpu.Jit
return ref _addressSpace.Mirror.GetRef<T>(va); return ref _addressSpace.Mirror.GetRef<T>(va);
} }
/// <inheritdoc/>
[MethodImpl(MethodImplOptions.AggressiveInlining)] [MethodImpl(MethodImplOptions.AggressiveInlining)]
public bool IsMapped(ulong va) public override bool IsMapped(ulong va)
{ {
return ValidateAddress(va) && _pages.IsMapped(va); return ValidateAddress(va) && _pages.IsMapped(va);
} }
@ -390,11 +385,10 @@ namespace Ryujinx.Cpu.Jit
return _pageTable.Read(va) + (va & PageMask); return _pageTable.Read(va) + (va & PageMask);
} }
/// <inheritdoc/>
/// <remarks> /// <remarks>
/// This function also validates that the given range is both valid and mapped, and will throw if it is not. /// This function also validates that the given range is both valid and mapped, and will throw if it is not.
/// </remarks> /// </remarks>
public void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null) public override void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null)
{ {
AssertValidAddressAndSize(va, size); AssertValidAddressAndSize(va, size);
@ -407,23 +401,6 @@ namespace Ryujinx.Cpu.Jit
_pages.SignalMemoryTracking(Tracking, va, size, write, exemptId); _pages.SignalMemoryTracking(Tracking, va, size, write, exemptId);
} }
/// <summary>
/// Computes the number of pages in a virtual address range.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range</param>
/// <param name="startVa">The virtual address of the beginning of the first page</param>
/// <remarks>This function does not differentiate between allocated and unallocated pages.</remarks>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static int GetPagesCount(ulong va, ulong size, out ulong startVa)
{
// WARNING: Always check if ulong does not overflow during the operations.
startVa = va & ~(ulong)PageMask;
ulong vaSpan = (va - startVa + size + PageMask) & ~(ulong)PageMask;
return (int)(vaSpan / PageSize);
}
/// <inheritdoc/> /// <inheritdoc/>
public void Reprotect(ulong va, ulong size, MemoryPermission protection) public void Reprotect(ulong va, ulong size, MemoryPermission protection)
{ {
@ -470,10 +447,16 @@ namespace Ryujinx.Cpu.Jit
_memoryEh.Dispose(); _memoryEh.Dispose();
} }
protected override Span<byte> GetPhysicalAddressSpan(ulong pa, int size) protected override Memory<byte> GetPhysicalAddressMemory(nuint pa, int size)
=> _addressSpace.Mirror.GetMemory(pa, size);
protected override Span<byte> GetPhysicalAddressSpan(nuint pa, int size)
=> _addressSpace.Mirror.GetSpan(pa, size); => _addressSpace.Mirror.GetSpan(pa, size);
protected override ulong TranslateVirtualAddressForRead(ulong va) protected override nuint TranslateVirtualAddressChecked(ulong va)
=> va; => (nuint)GetPhysicalAddressChecked(va);
protected override nuint TranslateVirtualAddressUnchecked(ulong va)
=> (nuint)GetPhysicalAddressInternal(va);
} }
} }

View file

@ -8,14 +8,13 @@ using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Runtime.CompilerServices; using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
namespace Ryujinx.Cpu.Jit namespace Ryujinx.Cpu.Jit
{ {
/// <summary> /// <summary>
/// Represents a CPU memory manager which maps guest virtual memory directly onto a host virtual region. /// Represents a CPU memory manager which maps guest virtual memory directly onto a host virtual region.
/// </summary> /// </summary>
public sealed class MemoryManagerHostTracked : VirtualMemoryManagerRefCountedBase<ulong, ulong>, IWritableBlock, IMemoryManager, IVirtualMemoryManagerTracked public sealed class MemoryManagerHostTracked : VirtualMemoryManagerRefCountedBase, IMemoryManager, IVirtualMemoryManagerTracked
{ {
private readonly InvalidAccessHandler _invalidAccessHandler; private readonly InvalidAccessHandler _invalidAccessHandler;
private readonly bool _unsafeMode; private readonly bool _unsafeMode;
@ -100,12 +99,6 @@ namespace Ryujinx.Cpu.Jit
Tracking.Map(va, size); Tracking.Map(va, size);
} }
/// <inheritdoc/>
public void MapForeign(ulong va, nuint hostPointer, ulong size)
{
throw new NotSupportedException();
}
/// <inheritdoc/> /// <inheritdoc/>
public void Unmap(ulong va, ulong size) public void Unmap(ulong va, ulong size)
{ {
@ -120,18 +113,11 @@ namespace Ryujinx.Cpu.Jit
_nativePageTable.Unmap(va, size); _nativePageTable.Unmap(va, size);
} }
public T Read<T>(ulong va) where T : unmanaged public override T ReadTracked<T>(ulong va)
{
return MemoryMarshal.Cast<byte, T>(GetSpan(va, Unsafe.SizeOf<T>()))[0];
}
public T ReadTracked<T>(ulong va) where T : unmanaged
{ {
try try
{ {
SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), false); return base.ReadTracked<T>(va);
return Read<T>(va);
} }
catch (InvalidMemoryRegionException) catch (InvalidMemoryRegionException)
{ {
@ -145,38 +131,39 @@ namespace Ryujinx.Cpu.Jit
} }
public override void Read(ulong va, Span<byte> data) public override void Read(ulong va, Span<byte> data)
{
ReadImpl(va, data);
}
public void Write<T>(ulong va, T value) where T : unmanaged
{
Write(va, MemoryMarshal.Cast<T, byte>(MemoryMarshal.CreateSpan(ref value, 1)));
}
public void Write(ulong va, ReadOnlySpan<byte> data)
{ {
if (data.Length == 0) if (data.Length == 0)
{ {
return; return;
} }
SignalMemoryTracking(va, (ulong)data.Length, true); try
WriteImpl(va, data);
}
public void WriteUntracked(ulong va, ReadOnlySpan<byte> data)
{ {
if (data.Length == 0) AssertValidAddressAndSize(va, (ulong)data.Length);
ulong endVa = va + (ulong)data.Length;
int offset = 0;
while (va < endVa)
{ {
return; (MemoryBlock memory, ulong rangeOffset, ulong copySize) = GetMemoryOffsetAndSize(va, (ulong)(data.Length - offset));
memory.GetSpan(rangeOffset, (int)copySize).CopyTo(data.Slice(offset, (int)copySize));
va += copySize;
offset += (int)copySize;
}
}
catch (InvalidMemoryRegionException)
{
if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
{
throw;
}
}
} }
WriteImpl(va, data); public override bool WriteWithRedundancyCheck(ulong va, ReadOnlySpan<byte> data)
}
public bool WriteWithRedundancyCheck(ulong va, ReadOnlySpan<byte> data)
{ {
if (data.Length == 0) if (data.Length == 0)
{ {
@ -206,35 +193,7 @@ namespace Ryujinx.Cpu.Jit
} }
} }
private void WriteImpl(ulong va, ReadOnlySpan<byte> data) public override ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false)
{
try
{
AssertValidAddressAndSize(va, (ulong)data.Length);
ulong endVa = va + (ulong)data.Length;
int offset = 0;
while (va < endVa)
{
(MemoryBlock memory, ulong rangeOffset, ulong copySize) = GetMemoryOffsetAndSize(va, (ulong)(data.Length - offset));
data.Slice(offset, (int)copySize).CopyTo(memory.GetSpan(rangeOffset, (int)copySize));
va += copySize;
offset += (int)copySize;
}
}
catch (InvalidMemoryRegionException)
{
if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
{
throw;
}
}
}
public ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false)
{ {
if (size == 0) if (size == 0)
{ {
@ -254,13 +213,13 @@ namespace Ryujinx.Cpu.Jit
{ {
Span<byte> data = new byte[size]; Span<byte> data = new byte[size];
ReadImpl(va, data); Read(va, data);
return data; return data;
} }
} }
public WritableRegion GetWritableRegion(ulong va, int size, bool tracked = false) public override WritableRegion GetWritableRegion(ulong va, int size, bool tracked = false)
{ {
if (size == 0) if (size == 0)
{ {
@ -280,7 +239,7 @@ namespace Ryujinx.Cpu.Jit
{ {
Memory<byte> memory = new byte[size]; Memory<byte> memory = new byte[size];
ReadImpl(va, memory.Span); Read(va, memory.Span);
return new WritableRegion(this, va, memory); return new WritableRegion(this, va, memory);
} }
@ -299,7 +258,7 @@ namespace Ryujinx.Cpu.Jit
} }
[MethodImpl(MethodImplOptions.AggressiveInlining)] [MethodImpl(MethodImplOptions.AggressiveInlining)]
public bool IsMapped(ulong va) public override bool IsMapped(ulong va)
{ {
return ValidateAddress(va) && _pages.IsMapped(va); return ValidateAddress(va) && _pages.IsMapped(va);
} }
@ -311,8 +270,6 @@ namespace Ryujinx.Cpu.Jit
return _pages.IsRangeMapped(va, size); return _pages.IsRangeMapped(va, size);
} }
private static void ThrowMemoryNotContiguous() => throw new MemoryNotContiguousException();
private bool TryGetVirtualContiguous(ulong va, int size, out MemoryBlock memory, out ulong offset) private bool TryGetVirtualContiguous(ulong va, int size, out MemoryBlock memory, out ulong offset)
{ {
if (_addressSpace.HasAnyPrivateAllocation(va, (ulong)size, out PrivateRange range)) if (_addressSpace.HasAnyPrivateAllocation(va, (ulong)size, out PrivateRange range))
@ -491,44 +448,11 @@ namespace Ryujinx.Cpu.Jit
return regions; return regions;
} }
private void ReadImpl(ulong va, Span<byte> data)
{
if (data.Length == 0)
{
return;
}
try
{
AssertValidAddressAndSize(va, (ulong)data.Length);
ulong endVa = va + (ulong)data.Length;
int offset = 0;
while (va < endVa)
{
(MemoryBlock memory, ulong rangeOffset, ulong copySize) = GetMemoryOffsetAndSize(va, (ulong)(data.Length - offset));
memory.GetSpan(rangeOffset, (int)copySize).CopyTo(data.Slice(offset, (int)copySize));
va += copySize;
offset += (int)copySize;
}
}
catch (InvalidMemoryRegionException)
{
if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
{
throw;
}
}
}
/// <inheritdoc/> /// <inheritdoc/>
/// <remarks> /// <remarks>
/// This function also validates that the given range is both valid and mapped, and will throw if it is not. /// This function also validates that the given range is both valid and mapped, and will throw if it is not.
/// </remarks> /// </remarks>
public void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null) public override void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null)
{ {
AssertValidAddressAndSize(va, size); AssertValidAddressAndSize(va, size);
@ -543,23 +467,6 @@ namespace Ryujinx.Cpu.Jit
_pages.SignalMemoryTracking(Tracking, va, size, write, exemptId); _pages.SignalMemoryTracking(Tracking, va, size, write, exemptId);
} }
/// <summary>
/// Computes the number of pages in a virtual address range.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range</param>
/// <param name="startVa">The virtual address of the beginning of the first page</param>
/// <remarks>This function does not differentiate between allocated and unallocated pages.</remarks>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private int GetPagesCount(ulong va, ulong size, out ulong startVa)
{
// WARNING: Always check if ulong does not overflow during the operations.
startVa = va & ~(ulong)PageMask;
ulong vaSpan = (va - startVa + size + PageMask) & ~(ulong)PageMask;
return (int)(vaSpan / PageSize);
}
public RegionHandle BeginTracking(ulong address, ulong size, int id, RegionFlags flags = RegionFlags.None) public RegionHandle BeginTracking(ulong address, ulong size, int id, RegionFlags flags = RegionFlags.None)
{ {
return Tracking.BeginTracking(address, size, id, flags); return Tracking.BeginTracking(address, size, id, flags);
@ -618,10 +525,44 @@ namespace Ryujinx.Cpu.Jit
_nativePageTable.Dispose(); _nativePageTable.Dispose();
} }
protected override Span<byte> GetPhysicalAddressSpan(ulong pa, int size) protected override Memory<byte> GetPhysicalAddressMemory(nuint pa, int size)
=> _backingMemory.GetMemory(pa, size);
protected override Span<byte> GetPhysicalAddressSpan(nuint pa, int size)
=> _backingMemory.GetSpan(pa, size); => _backingMemory.GetSpan(pa, size);
protected override ulong TranslateVirtualAddressForRead(ulong va) protected override void WriteImpl(ulong va, ReadOnlySpan<byte> data)
=> GetPhysicalAddressInternal(va); {
try
{
AssertValidAddressAndSize(va, (ulong)data.Length);
ulong endVa = va + (ulong)data.Length;
int offset = 0;
while (va < endVa)
{
(MemoryBlock memory, ulong rangeOffset, ulong copySize) = GetMemoryOffsetAndSize(va, (ulong)(data.Length - offset));
data.Slice(offset, (int)copySize).CopyTo(memory.GetSpan(rangeOffset, (int)copySize));
va += copySize;
offset += (int)copySize;
}
}
catch (InvalidMemoryRegionException)
{
if (_invalidAccessHandler == null || !_invalidAccessHandler(va))
{
throw;
}
}
}
protected override nuint TranslateVirtualAddressChecked(ulong va)
=> (nuint)GetPhysicalAddressChecked(va);
protected override nuint TranslateVirtualAddressUnchecked(ulong va)
=> (nuint)GetPhysicalAddressInternal(va);
} }
} }

View file

@ -1,13 +1,10 @@
using Ryujinx.Memory; using Ryujinx.Memory;
using System.Diagnostics; using System.Diagnostics;
using System.Numerics;
using System.Threading; using System.Threading;
namespace Ryujinx.Cpu namespace Ryujinx.Cpu
{ {
public abstract class VirtualMemoryManagerRefCountedBase<TVirtual, TPhysical> : VirtualMemoryManagerBase<TVirtual, TPhysical>, IRefCounted public abstract class VirtualMemoryManagerRefCountedBase : VirtualMemoryManagerBase, IRefCounted
where TVirtual : IBinaryInteger<TVirtual>
where TPhysical : IBinaryInteger<TPhysical>
{ {
private int _referenceCount; private int _referenceCount;

View file

@ -3,7 +3,6 @@ using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Runtime.CompilerServices; using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
namespace Ryujinx.Memory namespace Ryujinx.Memory
{ {
@ -11,7 +10,7 @@ namespace Ryujinx.Memory
/// Represents a address space manager. /// Represents a address space manager.
/// Supports virtual memory region mapping, address translation and read/write access to mapped regions. /// Supports virtual memory region mapping, address translation and read/write access to mapped regions.
/// </summary> /// </summary>
public sealed class AddressSpaceManager : VirtualMemoryManagerBase<ulong, nuint>, IVirtualMemoryManager, IWritableBlock public sealed class AddressSpaceManager : VirtualMemoryManagerBase, IVirtualMemoryManager
{ {
/// <inheritdoc/> /// <inheritdoc/>
public bool Supports4KBPages => true; public bool Supports4KBPages => true;
@ -63,8 +62,7 @@ namespace Ryujinx.Memory
} }
} }
/// <inheritdoc/> public override void MapForeign(ulong va, nuint hostPointer, ulong size)
public void MapForeign(ulong va, nuint hostPointer, ulong size)
{ {
AssertValidAddressAndSize(va, size); AssertValidAddressAndSize(va, size);
@ -92,106 +90,6 @@ namespace Ryujinx.Memory
} }
} }
/// <inheritdoc/>
public T Read<T>(ulong va) where T : unmanaged
{
return MemoryMarshal.Cast<byte, T>(GetSpan(va, Unsafe.SizeOf<T>()))[0];
}
/// <inheritdoc/>
public void Write<T>(ulong va, T value) where T : unmanaged
{
Write(va, MemoryMarshal.Cast<T, byte>(MemoryMarshal.CreateSpan(ref value, 1)));
}
/// <inheritdoc/>
public void Write(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
{
return;
}
AssertValidAddressAndSize(va, (ulong)data.Length);
if (IsContiguousAndMapped(va, data.Length))
{
data.CopyTo(GetHostSpanContiguous(va, data.Length));
}
else
{
int offset = 0, size;
if ((va & PageMask) != 0)
{
size = Math.Min(data.Length, PageSize - (int)(va & PageMask));
data[..size].CopyTo(GetHostSpanContiguous(va, size));
offset += size;
}
for (; offset < data.Length; offset += size)
{
size = Math.Min(data.Length - offset, PageSize);
data.Slice(offset, size).CopyTo(GetHostSpanContiguous(va + (ulong)offset, size));
}
}
}
/// <inheritdoc/>
public bool WriteWithRedundancyCheck(ulong va, ReadOnlySpan<byte> data)
{
Write(va, data);
return true;
}
/// <inheritdoc/>
public ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false)
{
if (size == 0)
{
return ReadOnlySpan<byte>.Empty;
}
if (IsContiguousAndMapped(va, size))
{
return GetHostSpanContiguous(va, size);
}
else
{
Span<byte> data = new byte[size];
Read(va, data);
return data;
}
}
/// <inheritdoc/>
public unsafe WritableRegion GetWritableRegion(ulong va, int size, bool tracked = false)
{
if (size == 0)
{
return new WritableRegion(null, va, Memory<byte>.Empty);
}
if (IsContiguousAndMapped(va, size))
{
return new WritableRegion(null, va, new NativeMemoryManager<byte>((byte*)GetHostAddress(va), size).Memory);
}
else
{
Memory<byte> memory = new byte[size];
GetSpan(va, size).CopyTo(memory.Span);
return new WritableRegion(this, va, memory);
}
}
/// <inheritdoc/> /// <inheritdoc/>
public unsafe ref T GetRef<T>(ulong va) where T : unmanaged public unsafe ref T GetRef<T>(ulong va) where T : unmanaged
{ {
@ -203,50 +101,6 @@ namespace Ryujinx.Memory
return ref *(T*)GetHostAddress(va); return ref *(T*)GetHostAddress(va);
} }
/// <inheritdoc/>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static int GetPagesCount(ulong va, uint size, out ulong startVa)
{
// WARNING: Always check if ulong does not overflow during the operations.
startVa = va & ~(ulong)PageMask;
ulong vaSpan = (va - startVa + size + PageMask) & ~(ulong)PageMask;
return (int)(vaSpan / PageSize);
}
private static void ThrowMemoryNotContiguous() => throw new MemoryNotContiguousException();
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool IsContiguousAndMapped(ulong va, int size) => IsContiguous(va, size) && IsMapped(va);
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private bool IsContiguous(ulong va, int size)
{
if (!ValidateAddress(va) || !ValidateAddressAndSize(va, (ulong)size))
{
return false;
}
int pages = GetPagesCount(va, (uint)size, out va);
for (int page = 0; page < pages - 1; page++)
{
if (!ValidateAddress(va + PageSize))
{
return false;
}
if (GetHostAddress(va) + PageSize != GetHostAddress(va + PageSize))
{
return false;
}
va += PageSize;
}
return true;
}
/// <inheritdoc/> /// <inheritdoc/>
public IEnumerable<HostMemoryRange> GetHostRegions(ulong va, ulong size) public IEnumerable<HostMemoryRange> GetHostRegions(ulong va, ulong size)
{ {
@ -304,7 +158,7 @@ namespace Ryujinx.Memory
return null; return null;
} }
int pages = GetPagesCount(va, (uint)size, out va); int pages = GetPagesCount(va, size, out va);
var regions = new List<HostMemoryRange>(); var regions = new List<HostMemoryRange>();
@ -336,9 +190,8 @@ namespace Ryujinx.Memory
return regions; return regions;
} }
/// <inheritdoc/>
[MethodImpl(MethodImplOptions.AggressiveInlining)] [MethodImpl(MethodImplOptions.AggressiveInlining)]
public bool IsMapped(ulong va) public override bool IsMapped(ulong va)
{ {
if (!ValidateAddress(va)) if (!ValidateAddress(va))
{ {
@ -351,7 +204,7 @@ namespace Ryujinx.Memory
/// <inheritdoc/> /// <inheritdoc/>
public bool IsRangeMapped(ulong va, ulong size) public bool IsRangeMapped(ulong va, ulong size)
{ {
if (size == 0UL) if (size == 0)
{ {
return true; return true;
} }
@ -376,11 +229,6 @@ namespace Ryujinx.Memory
return true; return true;
} }
private unsafe Span<byte> GetHostSpanContiguous(ulong va, int size)
{
return new Span<byte>((void*)GetHostAddress(va), size);
}
private nuint GetHostAddress(ulong va) private nuint GetHostAddress(ulong va)
{ {
return _pageTable.Read(va) + (nuint)(va & PageMask); return _pageTable.Read(va) + (nuint)(va & PageMask);
@ -397,16 +245,16 @@ namespace Ryujinx.Memory
throw new NotImplementedException(); throw new NotImplementedException();
} }
/// <inheritdoc/> protected unsafe override Memory<byte> GetPhysicalAddressMemory(nuint pa, int size)
public void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null) => new NativeMemoryManager<byte>((byte*)pa, size).Memory;
{
// Only the ARM Memory Manager has tracking for now.
}
protected override unsafe Span<byte> GetPhysicalAddressSpan(nuint pa, int size) protected override unsafe Span<byte> GetPhysicalAddressSpan(nuint pa, int size)
=> new((void*)pa, size); => new Span<byte>((void*)pa, size);
protected override nuint TranslateVirtualAddressForRead(ulong va) protected override nuint TranslateVirtualAddressChecked(ulong va)
=> GetHostAddress(va);
protected override nuint TranslateVirtualAddressUnchecked(ulong va)
=> GetHostAddress(va); => GetHostAddress(va);
} }
} }

View file

@ -0,0 +1,60 @@
using System;
using System.Buffers;
using System.Runtime.InteropServices;
namespace Ryujinx.Memory
{
/// <summary>
/// A concrete implementation of <seealso cref="ReadOnlySequence{Byte}"/>,
/// with methods to help build a full sequence.
/// </summary>
public sealed class BytesReadOnlySequenceSegment : ReadOnlySequenceSegment<byte>
{
public BytesReadOnlySequenceSegment(Memory<byte> memory) => Memory = memory;
public BytesReadOnlySequenceSegment Append(Memory<byte> memory)
{
var nextSegment = new BytesReadOnlySequenceSegment(memory)
{
RunningIndex = RunningIndex + Memory.Length
};
Next = nextSegment;
return nextSegment;
}
/// <summary>
/// Attempts to determine if the current <seealso cref="Memory{Byte}"/> and <paramref name="other"/> are contiguous.
/// Only works if both were created by a <seealso cref="NativeMemoryManager{Byte}"/>.
/// </summary>
/// <param name="other">The segment to check if continuous with the current one</param>
/// <param name="contiguousStart">The starting address of the contiguous segment</param>
/// <param name="contiguousSize">The size of the contiguous segment</param>
/// <returns>True if the segments are contiguous, otherwise false</returns>
public unsafe bool IsContiguousWith(Memory<byte> other, out nuint contiguousStart, out int contiguousSize)
{
if (MemoryMarshal.TryGetMemoryManager<byte, NativeMemoryManager<byte>>(Memory, out var thisMemoryManager) &&
MemoryMarshal.TryGetMemoryManager<byte, NativeMemoryManager<byte>>(other, out var otherMemoryManager) &&
thisMemoryManager.Pointer + thisMemoryManager.Length == otherMemoryManager.Pointer)
{
contiguousStart = (nuint)thisMemoryManager.Pointer;
contiguousSize = thisMemoryManager.Length + otherMemoryManager.Length;
return true;
}
else
{
contiguousStart = 0;
contiguousSize = 0;
return false;
}
}
/// <summary>
/// Replaces the current <seealso cref="Memory{Byte}"/> value with the one provided.
/// </summary>
/// <param name="memory">The new segment to hold in this <seealso cref="BytesReadOnlySequenceSegment"/></param>
public void Replace(Memory<byte> memory)
=> Memory = memory;
}
}

View file

@ -124,6 +124,16 @@ namespace Ryujinx.Memory
} }
} }
/// <summary>
/// Gets a read-only sequence of read-only memory blocks from CPU mapped memory.
/// </summary>
/// <param name="va">Virtual address of the data</param>
/// <param name="size">Size of the data</param>
/// <param name="tracked">True if read tracking is triggered on the memory</param>
/// <returns>A read-only sequence of read-only memory of the data</returns>
/// <exception cref="InvalidMemoryRegionException">Throw for unhandled invalid or unmapped memory accesses</exception>
ReadOnlySequence<byte> GetReadOnlySequence(ulong va, int size, bool tracked = false);
/// <summary> /// <summary>
/// Gets a read-only span of data from CPU mapped memory. /// Gets a read-only span of data from CPU mapped memory.
/// </summary> /// </summary>

View file

@ -14,6 +14,10 @@ namespace Ryujinx.Memory
_length = length; _length = length;
} }
public unsafe T* Pointer => _pointer;
public int Length => _length;
public override Span<T> GetSpan() public override Span<T> GetSpan()
{ {
return new Span<T>((void*)_pointer, _length); return new Span<T>((void*)_pointer, _length);

View file

@ -1,34 +1,171 @@
using Ryujinx.Common.Memory;
using System; using System;
using System.Numerics; using System.Buffers;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
namespace Ryujinx.Memory namespace Ryujinx.Memory
{ {
public abstract class VirtualMemoryManagerBase<TVirtual, TPhysical> public abstract class VirtualMemoryManagerBase : IWritableBlock
where TVirtual : IBinaryInteger<TVirtual>
where TPhysical : IBinaryInteger<TPhysical>
{ {
public const int PageBits = 12; public const int PageBits = 12;
public const int PageSize = 1 << PageBits; public const int PageSize = 1 << PageBits;
public const int PageMask = PageSize - 1; public const int PageMask = PageSize - 1;
protected abstract TVirtual AddressSpaceSize { get; } protected abstract ulong AddressSpaceSize { get; }
public virtual void Read(TVirtual va, Span<byte> data) public virtual ReadOnlySequence<byte> GetReadOnlySequence(ulong va, int size, bool tracked = false)
{
if (size == 0)
{
return ReadOnlySequence<byte>.Empty;
}
if (tracked)
{
SignalMemoryTracking(va, (ulong)size, false);
}
if (IsContiguousAndMapped(va, size))
{
nuint pa = TranslateVirtualAddressUnchecked(va);
return new ReadOnlySequence<byte>(GetPhysicalAddressMemory(pa, size));
}
else
{
AssertValidAddressAndSize(va, size);
int offset = 0, segmentSize;
BytesReadOnlySequenceSegment first = null, last = null;
if ((va & PageMask) != 0)
{
nuint pa = TranslateVirtualAddressChecked(va);
segmentSize = Math.Min(size, PageSize - (int)(va & PageMask));
Memory<byte> memory = GetPhysicalAddressMemory(pa, segmentSize);
first = last = new BytesReadOnlySequenceSegment(memory);
offset += segmentSize;
}
for (; offset < size; offset += segmentSize)
{
nuint pa = TranslateVirtualAddressChecked(va + (ulong)offset);
segmentSize = Math.Min(size - offset, PageSize);
Memory<byte> memory = GetPhysicalAddressMemory(pa, segmentSize);
if (first is null)
{
first = last = new BytesReadOnlySequenceSegment(memory);
}
else
{
if (last.IsContiguousWith(memory, out nuint contiguousStart, out int contiguousSize))
{
last.Replace(GetPhysicalAddressMemory(contiguousStart, contiguousSize));
}
else
{
last = last.Append(memory);
}
}
}
return new ReadOnlySequence<byte>(first, 0, last, (int)(size - last.RunningIndex));
}
}
public virtual ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false)
{
if (size == 0)
{
return ReadOnlySpan<byte>.Empty;
}
if (tracked)
{
SignalMemoryTracking(va, (ulong)size, false);
}
if (IsContiguousAndMapped(va, size))
{
nuint pa = TranslateVirtualAddressUnchecked(va);
return GetPhysicalAddressSpan(pa, size);
}
else
{
Span<byte> data = new byte[size];
Read(va, data);
return data;
}
}
public virtual WritableRegion GetWritableRegion(ulong va, int size, bool tracked = false)
{
if (size == 0)
{
return new WritableRegion(null, va, Memory<byte>.Empty);
}
if (tracked)
{
SignalMemoryTracking(va, (ulong)size, true);
}
if (IsContiguousAndMapped(va, size))
{
nuint pa = TranslateVirtualAddressUnchecked(va);
return new WritableRegion(null, va, GetPhysicalAddressMemory(pa, size));
}
else
{
IMemoryOwner<byte> memoryOwner = ByteMemoryPool.Rent(size);
Read(va, memoryOwner.Memory.Span);
return new WritableRegion(this, va, memoryOwner);
}
}
public abstract bool IsMapped(ulong va);
public virtual void MapForeign(ulong va, nuint hostPointer, ulong size)
{
throw new NotSupportedException();
}
public virtual T Read<T>(ulong va) where T : unmanaged
{
return MemoryMarshal.Cast<byte, T>(GetSpan(va, Unsafe.SizeOf<T>()))[0];
}
public virtual void Read(ulong va, Span<byte> data)
{ {
if (data.Length == 0) if (data.Length == 0)
{ {
return; return;
} }
AssertValidAddressAndSize(va, TVirtual.CreateChecked(data.Length)); AssertValidAddressAndSize(va, data.Length);
int offset = 0, size; int offset = 0, size;
if ((int.CreateTruncating(va) & PageMask) != 0) if ((va & PageMask) != 0)
{ {
TPhysical pa = TranslateVirtualAddressForRead(va); nuint pa = TranslateVirtualAddressChecked(va);
size = Math.Min(data.Length, PageSize - ((int.CreateTruncating(va) & PageMask))); size = Math.Min(data.Length, PageSize - (int)(va & PageMask));
GetPhysicalAddressSpan(pa, size).CopyTo(data[..size]); GetPhysicalAddressSpan(pa, size).CopyTo(data[..size]);
@ -37,7 +174,7 @@ namespace Ryujinx.Memory
for (; offset < data.Length; offset += size) for (; offset < data.Length; offset += size)
{ {
TPhysical pa = TranslateVirtualAddressForRead(va + TVirtual.CreateChecked(offset)); nuint pa = TranslateVirtualAddressChecked(va + (ulong)offset);
size = Math.Min(data.Length - offset, PageSize); size = Math.Min(data.Length - offset, PageSize);
@ -45,13 +182,84 @@ namespace Ryujinx.Memory
} }
} }
public virtual T ReadTracked<T>(ulong va) where T : unmanaged
{
SignalMemoryTracking(va, (ulong)Unsafe.SizeOf<T>(), false);
return Read<T>(va);
}
public virtual void SignalMemoryTracking(ulong va, ulong size, bool write, bool precise = false, int? exemptId = null)
{
// No default implementation
}
public virtual void Write(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
{
return;
}
SignalMemoryTracking(va, (ulong)data.Length, true);
WriteImpl(va, data);
}
public virtual void Write<T>(ulong va, T value) where T : unmanaged
{
Write(va, MemoryMarshal.Cast<T, byte>(MemoryMarshal.CreateSpan(ref value, 1)));
}
public virtual void WriteUntracked(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
{
return;
}
WriteImpl(va, data);
}
public virtual bool WriteWithRedundancyCheck(ulong va, ReadOnlySpan<byte> data)
{
if (data.Length == 0)
{
return false;
}
if (IsContiguousAndMapped(va, data.Length))
{
SignalMemoryTracking(va, (ulong)data.Length, false);
nuint pa = TranslateVirtualAddressChecked(va);
var target = GetPhysicalAddressSpan(pa, data.Length);
bool changed = !data.SequenceEqual(target);
if (changed)
{
data.CopyTo(target);
}
return changed;
}
else
{
Write(va, data);
return true;
}
}
/// <summary> /// <summary>
/// Ensures the combination of virtual address and size is part of the addressable space. /// Ensures the combination of virtual address and size is part of the addressable space.
/// </summary> /// </summary>
/// <param name="va">Virtual address of the range</param> /// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range in bytes</param> /// <param name="size">Size of the range in bytes</param>
/// <exception cref="InvalidMemoryRegionException">Throw when the memory region specified outside the addressable space</exception> /// <exception cref="InvalidMemoryRegionException">Throw when the memory region specified outside the addressable space</exception>
protected void AssertValidAddressAndSize(TVirtual va, TVirtual size) protected void AssertValidAddressAndSize(ulong va, ulong size)
{ {
if (!ValidateAddressAndSize(va, size)) if (!ValidateAddressAndSize(va, size))
{ {
@ -59,16 +267,82 @@ namespace Ryujinx.Memory
} }
} }
protected abstract Span<byte> GetPhysicalAddressSpan(TPhysical pa, int size); /// <summary>
/// Ensures the combination of virtual address and size is part of the addressable space.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range in bytes</param>
/// <exception cref="InvalidMemoryRegionException">Throw when the memory region specified outside the addressable space</exception>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
protected void AssertValidAddressAndSize(ulong va, int size)
=> AssertValidAddressAndSize(va, (ulong)size);
protected abstract TPhysical TranslateVirtualAddressForRead(TVirtual va); /// <summary>
/// Computes the number of pages in a virtual address range.
/// </summary>
/// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range</param>
/// <param name="startVa">The virtual address of the beginning of the first page</param>
/// <remarks>This function does not differentiate between allocated and unallocated pages.</remarks>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
protected static int GetPagesCount(ulong va, ulong size, out ulong startVa)
{
// WARNING: Always check if ulong does not overflow during the operations.
startVa = va & ~(ulong)PageMask;
ulong vaSpan = (va - startVa + size + PageMask) & ~(ulong)PageMask;
return (int)(vaSpan / PageSize);
}
protected abstract Memory<byte> GetPhysicalAddressMemory(nuint pa, int size);
protected abstract Span<byte> GetPhysicalAddressSpan(nuint pa, int size);
[MethodImpl(MethodImplOptions.AggressiveInlining)]
protected bool IsContiguous(ulong va, int size) => IsContiguous(va, (ulong)size);
protected virtual bool IsContiguous(ulong va, ulong size)
{
if (!ValidateAddress(va) || !ValidateAddressAndSize(va, size))
{
return false;
}
int pages = GetPagesCount(va, size, out va);
for (int page = 0; page < pages - 1; page++)
{
if (!ValidateAddress(va + PageSize))
{
return false;
}
if (TranslateVirtualAddressUnchecked(va) + PageSize != TranslateVirtualAddressUnchecked(va + PageSize))
{
return false;
}
va += PageSize;
}
return true;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
protected bool IsContiguousAndMapped(ulong va, int size)
=> IsContiguous(va, size) && IsMapped(va);
protected abstract nuint TranslateVirtualAddressChecked(ulong va);
protected abstract nuint TranslateVirtualAddressUnchecked(ulong va);
/// <summary> /// <summary>
/// Checks if the virtual address is part of the addressable space. /// Checks if the virtual address is part of the addressable space.
/// </summary> /// </summary>
/// <param name="va">Virtual address</param> /// <param name="va">Virtual address</param>
/// <returns>True if the virtual address is part of the addressable space</returns> /// <returns>True if the virtual address is part of the addressable space</returns>
protected bool ValidateAddress(TVirtual va) [MethodImpl(MethodImplOptions.AggressiveInlining)]
protected bool ValidateAddress(ulong va)
{ {
return va < AddressSpaceSize; return va < AddressSpaceSize;
} }
@ -79,13 +353,53 @@ namespace Ryujinx.Memory
/// <param name="va">Virtual address of the range</param> /// <param name="va">Virtual address of the range</param>
/// <param name="size">Size of the range in bytes</param> /// <param name="size">Size of the range in bytes</param>
/// <returns>True if the combination of virtual address and size is part of the addressable space</returns> /// <returns>True if the combination of virtual address and size is part of the addressable space</returns>
protected bool ValidateAddressAndSize(TVirtual va, TVirtual size) protected bool ValidateAddressAndSize(ulong va, ulong size)
{ {
TVirtual endVa = va + size; ulong endVa = va + size;
return endVa >= va && endVa >= size && endVa <= AddressSpaceSize; return endVa >= va && endVa >= size && endVa <= AddressSpaceSize;
} }
protected static void ThrowInvalidMemoryRegionException(string message) protected static void ThrowInvalidMemoryRegionException(string message)
=> throw new InvalidMemoryRegionException(message); => throw new InvalidMemoryRegionException(message);
protected static void ThrowMemoryNotContiguous()
=> throw new MemoryNotContiguousException();
protected virtual void WriteImpl(ulong va, ReadOnlySpan<byte> data)
{
AssertValidAddressAndSize(va, data.Length);
if (IsContiguousAndMapped(va, data.Length))
{
nuint pa = TranslateVirtualAddressUnchecked(va);
data.CopyTo(GetPhysicalAddressSpan(pa, data.Length));
}
else
{
int offset = 0, size;
if ((va & PageMask) != 0)
{
nuint pa = TranslateVirtualAddressChecked(va);
size = Math.Min(data.Length, PageSize - (int)(va & PageMask));
data[..size].CopyTo(GetPhysicalAddressSpan(pa, size));
offset += size;
}
for (; offset < data.Length; offset += size)
{
nuint pa = TranslateVirtualAddressChecked(va + (ulong)offset);
size = Math.Min(data.Length - offset, PageSize);
data.Slice(offset, size).CopyTo(GetPhysicalAddressSpan(pa, size));
}
}
}
} }
} }

View file

@ -1,4 +1,5 @@
using System; using System;
using System.Buffers;
namespace Ryujinx.Memory namespace Ryujinx.Memory
{ {
@ -6,6 +7,7 @@ namespace Ryujinx.Memory
{ {
private readonly IWritableBlock _block; private readonly IWritableBlock _block;
private readonly ulong _va; private readonly ulong _va;
private readonly IMemoryOwner<byte> _memoryOwner;
private readonly bool _tracked; private readonly bool _tracked;
private bool NeedsWriteback => _block != null; private bool NeedsWriteback => _block != null;
@ -20,6 +22,12 @@ namespace Ryujinx.Memory
Memory = memory; Memory = memory;
} }
public WritableRegion(IWritableBlock block, ulong va, IMemoryOwner<byte> memoryOwner, bool tracked = false)
: this(block, va, memoryOwner.Memory, tracked)
{
_memoryOwner = memoryOwner;
}
public void Dispose() public void Dispose()
{ {
if (NeedsWriteback) if (NeedsWriteback)
@ -33,6 +41,8 @@ namespace Ryujinx.Memory
_block.WriteUntracked(_va, Memory.Span); _block.WriteUntracked(_va, Memory.Span);
} }
} }
_memoryOwner?.Dispose();
} }
} }
} }

View file

@ -1,6 +1,7 @@
using Ryujinx.Memory; using Ryujinx.Memory;
using Ryujinx.Memory.Range; using Ryujinx.Memory.Range;
using System; using System;
using System.Buffers;
using System.Collections.Generic; using System.Collections.Generic;
namespace Ryujinx.Tests.Memory namespace Ryujinx.Tests.Memory
@ -57,6 +58,11 @@ namespace Ryujinx.Tests.Memory
throw new NotImplementedException(); throw new NotImplementedException();
} }
public ReadOnlySequence<byte> GetReadOnlySequence(ulong va, int size, bool tracked = false)
{
throw new NotImplementedException();
}
public ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false) public ReadOnlySpan<byte> GetSpan(ulong va, int size, bool tracked = false)
{ {
throw new NotImplementedException(); throw new NotImplementedException();