0
0
Fork 0
mirror of https://github.com/ryujinx-mirror/ryujinx.git synced 2024-12-24 13:25:45 +00:00
ryujinx-fork/ARMeilleure/Instructions/InstEmitSimdCrypto32.cs
riperiperi f695a215ad
Add Fast Paths for Crypto instructions (A32/A64) (#1026)
* Add Fast Paths for Crypto instructions (A32/A64)

* Replace additional XOR with passing in const zero.
2020-03-25 17:20:29 +11:00

95 lines
2.8 KiB
C#

using ARMeilleure.Decoders;
using ARMeilleure.IntermediateRepresentation;
using ARMeilleure.Translation;
using static ARMeilleure.Instructions.InstEmitHelper;
namespace ARMeilleure.Instructions
{
partial class InstEmit32
{
public static void Aesd_V(ArmEmitterContext context)
{
OpCode32Simd op = (OpCode32Simd)context.CurrOp;
Operand d = GetVecA32(op.Qd);
Operand n = GetVecA32(op.Qm);
Operand res;
if (Optimizations.UseAesni)
{
res = context.AddIntrinsic(Intrinsic.X86Aesdeclast, context.AddIntrinsic(Intrinsic.X86Xorpd, d, n), context.VectorZero());
}
else
{
res = context.Call(new _V128_V128_V128(SoftFallback.Decrypt), d, n);
}
context.Copy(d, res);
}
public static void Aese_V(ArmEmitterContext context)
{
OpCode32Simd op = (OpCode32Simd)context.CurrOp;
Operand d = GetVecA32(op.Qd);
Operand n = GetVecA32(op.Qm);
Operand res;
if (Optimizations.UseAesni)
{
res = context.AddIntrinsic(Intrinsic.X86Aesenclast, context.AddIntrinsic(Intrinsic.X86Xorpd, d, n), context.VectorZero());
}
else
{
res = context.Call(new _V128_V128_V128(SoftFallback.Encrypt), d, n);
}
context.Copy(d, res);
}
public static void Aesimc_V(ArmEmitterContext context)
{
OpCode32Simd op = (OpCode32Simd)context.CurrOp;
Operand n = GetVecA32(op.Qm);
Operand res;
if (Optimizations.UseAesni)
{
res = context.AddIntrinsic(Intrinsic.X86Aesimc, n);
}
else
{
res = context.Call(new _V128_V128(SoftFallback.InverseMixColumns), n);
}
context.Copy(GetVecA32(op.Qd), res);
}
public static void Aesmc_V(ArmEmitterContext context)
{
OpCode32Simd op = (OpCode32Simd)context.CurrOp;
Operand n = GetVecA32(op.Qm);
Operand res;
if (Optimizations.UseAesni)
{
Operand roundKey = context.VectorZero();
// Inverse Shift Rows, Inverse Sub Bytes, xor 0 so nothing happens.
res = context.AddIntrinsic(Intrinsic.X86Aesdeclast, n, roundKey);
// Shift Rows, Sub Bytes, Mix Columns (!), xor 0 so nothing happens.
res = context.AddIntrinsic(Intrinsic.X86Aesenc, res, roundKey);
}
else
{
res = context.Call(new _V128_V128(SoftFallback.MixColumns), n);
}
context.Copy(GetVecA32(op.Qd), res);
}
}
}