0
0
Fork 0
mirror of https://github.com/ryujinx-mirror/ryujinx.git synced 2024-12-23 03:45:46 +00:00

GPU: Access non-prefetch command buffers directly (#3882)

* GPU: Access non-prefetch command buffers directly

Saves allocating new arrays for them constantly - they can be quite small so it can be very wasteful. About 0.4% of GPU thread in SMO, but was a bit higher in S/V when I checked.

Assumes that non-prefetch command buffers won't be randomly clobbered before they finish executing, though that's probably a safe bet.

* Small change while I'm here

* Address feedback
This commit is contained in:
riperiperi 2022-11-24 01:56:55 +00:00 committed by GitHub
parent 5a39d3c4a1
commit f3cc2e5703
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -51,16 +51,35 @@ namespace Ryujinx.Graphics.Gpu.Engine.GPFifo
/// </summary> /// </summary>
public uint EntryCount; public uint EntryCount;
/// <summary>
/// Get the entries for the command buffer from memory.
/// </summary>
/// <param name="memoryManager">The memory manager used to fetch the data</param>
/// <param name="flush">If true, flushes potential GPU written data before reading the command buffer</param>
/// <returns>The fetched data</returns>
private ReadOnlySpan<int> GetWords(MemoryManager memoryManager, bool flush)
{
return MemoryMarshal.Cast<byte, int>(memoryManager.GetSpan(EntryAddress, (int)EntryCount * 4, flush));
}
/// <summary>
/// Prefetch the command buffer.
/// </summary>
/// <param name="memoryManager">The memory manager used to fetch the data</param>
public void Prefetch(MemoryManager memoryManager)
{
Words = GetWords(memoryManager, true).ToArray();
}
/// <summary> /// <summary>
/// Fetch the command buffer. /// Fetch the command buffer.
/// </summary> /// </summary>
/// <param name="memoryManager">The memory manager used to fetch the data</param>
/// <param name="flush">If true, flushes potential GPU written data before reading the command buffer</param> /// <param name="flush">If true, flushes potential GPU written data before reading the command buffer</param>
public void Fetch(MemoryManager memoryManager, bool flush = true) /// <returns>The command buffer words</returns>
public ReadOnlySpan<int> Fetch(MemoryManager memoryManager, bool flush)
{ {
if (Words == null) return Words ?? GetWords(memoryManager, flush);
{
Words = MemoryMarshal.Cast<byte, int>(memoryManager.GetSpan(EntryAddress, (int)EntryCount * 4, flush)).ToArray();
}
} }
} }
@ -158,7 +177,7 @@ namespace Ryujinx.Graphics.Gpu.Engine.GPFifo
if (beforeBarrier && commandBuffer.Type == CommandBufferType.Prefetch) if (beforeBarrier && commandBuffer.Type == CommandBufferType.Prefetch)
{ {
commandBuffer.Fetch(processor.MemoryManager); commandBuffer.Prefetch(processor.MemoryManager);
} }
if (commandBuffer.Type == CommandBufferType.NoPrefetch) if (commandBuffer.Type == CommandBufferType.NoPrefetch)
@ -199,7 +218,7 @@ namespace Ryujinx.Graphics.Gpu.Engine.GPFifo
} }
_currentCommandBuffer = entry; _currentCommandBuffer = entry;
_currentCommandBuffer.Fetch(entry.Processor.MemoryManager, flushCommandBuffer); ReadOnlySpan<int> words = entry.Fetch(entry.Processor.MemoryManager, flushCommandBuffer);
// If we are changing the current channel, // If we are changing the current channel,
// we need to force all the host state to be updated. // we need to force all the host state to be updated.
@ -209,7 +228,7 @@ namespace Ryujinx.Graphics.Gpu.Engine.GPFifo
entry.Processor.ForceAllDirty(); entry.Processor.ForceAllDirty();
} }
entry.Processor.Process(entry.EntryAddress, _currentCommandBuffer.Words); entry.Processor.Process(entry.EntryAddress, words);
} }
_interrupt = false; _interrupt = false;