|
|
|
@ -6380,7 +6380,6 @@ define pcodeop pavgw;
|
|
|
|
|
mmxreg1[32,32] = zext(mmxreg1[32,32] == mmxreg2[32,32]) * 0xFFFFFFFF;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
define pcodeop pcmpeqb;
|
|
|
|
|
:PCMPEQB XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x74; m128 & XmmReg ...
|
|
|
|
|
{
|
|
|
|
|
local m:16 = m128;
|
|
|
|
@ -6690,11 +6689,73 @@ define pcodeop pmaxsw;
|
|
|
|
|
:PMAXSW XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0xEE; XmmReg ... & m128 { XmmReg = pmaxsw(XmmReg, m128); }
|
|
|
|
|
:PMAXSW XmmReg1, XmmReg2 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0xEE; xmmmod = 3 & XmmReg1 & XmmReg2 { XmmReg1 = pmaxsw(XmmReg1, XmmReg2); }
|
|
|
|
|
|
|
|
|
|
define pcodeop pmaxub;
|
|
|
|
|
:PMAXUB mmxreg, m64 is vexMode=0 & mandover=0 & byte=0x0F; byte=0xDE; mmxreg ... & m64 { mmxreg = pmaxub(mmxreg, m64); }
|
|
|
|
|
:PMAXUB mmxreg1, mmxreg2 is vexMode=0 & mandover=0 & byte=0x0F; byte=0xDE; mmxmod = 3 & mmxreg1 & mmxreg2 { mmxreg1 = pmaxub(mmxreg1, mmxreg2); }
|
|
|
|
|
:PMAXUB XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0xDE; XmmReg ... & m128 { XmmReg = pmaxub(XmmReg, m128); }
|
|
|
|
|
:PMAXUB XmmReg1, XmmReg2 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0xDE; xmmmod = 3 & XmmReg1 & XmmReg2 { XmmReg1 = pmaxub(XmmReg1, XmmReg2); }
|
|
|
|
|
macro assignUnsignedGreater(dest, x, y){
|
|
|
|
|
dest = (zext(x >= y) * x) + (zext(x < y) * y);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
:PMAXUB mmxreg, m64 is vexMode=0 & mandover=0 & byte=0x0F; byte=0xDE; mmxreg ... & m64
|
|
|
|
|
{
|
|
|
|
|
assignUnsignedGreater(mmxreg[0,8],mmxreg[0,8],m64[0,8]);
|
|
|
|
|
assignUnsignedGreater(mmxreg[8,8],mmxreg[8,8],m64[8,8]);
|
|
|
|
|
assignUnsignedGreater(mmxreg[16,8],mmxreg[16,8],m64[16,8]);
|
|
|
|
|
assignUnsignedGreater(mmxreg[24,8],mmxreg[24,8],m64[24,8]);
|
|
|
|
|
assignUnsignedGreater(mmxreg[32,8],mmxreg[32,8],m64[32,8]);
|
|
|
|
|
assignUnsignedGreater(mmxreg[40,8],mmxreg[40,8],m64[40,8]);
|
|
|
|
|
assignUnsignedGreater(mmxreg[48,8],mmxreg[48,8],m64[48,8]);
|
|
|
|
|
assignUnsignedGreater(mmxreg[56,8],mmxreg[56,8],m64[56,8]);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
:PMAXUB mmxreg1, mmxreg2 is vexMode=0 & mandover=0 & byte=0x0F; byte=0xDE; mmxmod = 3 & mmxreg1 & mmxreg2
|
|
|
|
|
{
|
|
|
|
|
assignUnsignedGreater(mmxreg1[0,8],mmxreg1[0,8],mmxreg2[0,8]);
|
|
|
|
|
assignUnsignedGreater(mmxreg1[8,8],mmxreg1[8,8],mmxreg2[8,8]);
|
|
|
|
|
assignUnsignedGreater(mmxreg1[16,8],mmxreg1[16,8],mmxreg2[16,8]);
|
|
|
|
|
assignUnsignedGreater(mmxreg1[24,8],mmxreg1[24,8],mmxreg2[24,8]);
|
|
|
|
|
assignUnsignedGreater(mmxreg1[32,8],mmxreg1[32,8],mmxreg2[32,8]);
|
|
|
|
|
assignUnsignedGreater(mmxreg1[40,8],mmxreg1[40,8],mmxreg2[40,8]);
|
|
|
|
|
assignUnsignedGreater(mmxreg1[48,8],mmxreg1[48,8],mmxreg2[48,8]);
|
|
|
|
|
assignUnsignedGreater(mmxreg1[56,8],mmxreg1[56,8],mmxreg2[56,8]);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
:PMAXUB XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0xDE; XmmReg ... & m128
|
|
|
|
|
{
|
|
|
|
|
assignUnsignedGreater(XmmReg[0,8],XmmReg[0,8],m128[0,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[8,8],XmmReg[8,8],m128[8,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[16,8],XmmReg[16,8],m128[16,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[24,8],XmmReg[24,8],m128[24,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[32,8],XmmReg[32,8],m128[32,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[40,8],XmmReg[40,8],m128[40,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[48,8],XmmReg[48,8],m128[48,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[56,8],XmmReg[56,8],m128[56,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[64,8],XmmReg[64,8],m128[64,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[72,8],XmmReg[72,8],m128[72,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[80,8],XmmReg[80,8],m128[80,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[88,8],XmmReg[88,8],m128[88,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[96,8],XmmReg[96,8],m128[96,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[104,8],XmmReg[104,8],m128[104,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[112,8],XmmReg[112,8],m128[112,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[120,8],XmmReg[120,8],m128[120,8]);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
:PMAXUB XmmReg1, XmmReg2 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0xDE; xmmmod = 3 & XmmReg1 & XmmReg2
|
|
|
|
|
{
|
|
|
|
|
assignUnsignedGreater(XmmReg1[0,8],XmmReg1[0,8],XmmReg2[0,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[8,8],XmmReg1[8,8],XmmReg2[8,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[16,8],XmmReg1[16,8],XmmReg2[16,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[24,8],XmmReg1[24,8],XmmReg2[24,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[32,8],XmmReg1[32,8],XmmReg2[32,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[40,8],XmmReg1[40,8],XmmReg2[40,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[48,8],XmmReg1[48,8],XmmReg2[48,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[56,8],XmmReg1[56,8],XmmReg2[56,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[64,8],XmmReg1[64,8],XmmReg2[64,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[72,8],XmmReg1[72,8],XmmReg2[72,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[80,8],XmmReg1[80,8],XmmReg2[80,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[88,8],XmmReg1[88,8],XmmReg2[88,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[96,8],XmmReg1[96,8],XmmReg2[96,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[104,8],XmmReg1[104,8],XmmReg2[104,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[112,8],XmmReg1[112,8],XmmReg2[112,8]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[120,8],XmmReg1[120,8],XmmReg2[120,8]);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
define pcodeop pminsw;
|
|
|
|
|
:PMINSW mmxreg, m64 is vexMode=0 & mandover=0 & byte=0x0F; byte=0xEA; mmxreg ... & m64 { mmxreg = pminsw(mmxreg, m64); }
|
|
|
|
@ -6702,19 +6763,116 @@ define pcodeop pminsw;
|
|
|
|
|
:PMINSW XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0xEA; XmmReg ... & m128 { XmmReg = pminsw(XmmReg, m128); }
|
|
|
|
|
:PMINSW XmmReg1, XmmReg2 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0xEA; xmmmod = 3 & XmmReg1 & XmmReg2 { XmmReg1 = pminsw(XmmReg1, XmmReg2); }
|
|
|
|
|
|
|
|
|
|
define pcodeop pminub;
|
|
|
|
|
:PMINUB mmxreg, m64 is vexMode=0 & mandover=0 & byte=0x0F; byte=0xDA; mmxreg ... & m64 { mmxreg = pminub(mmxreg, m64); }
|
|
|
|
|
:PMINUB mmxreg1, mmxreg2 is vexMode=0 & mandover=0 & byte=0x0F; byte=0xDA; mmxmod = 3 & mmxreg1 & mmxreg2 { mmxreg1 = pminub(mmxreg1, mmxreg2); }
|
|
|
|
|
:PMINUB XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0xDA; XmmReg ... & m128 { XmmReg = pminub(XmmReg, m128); }
|
|
|
|
|
:PMINUB XmmReg1, XmmReg2 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0xDA; xmmmod = 3 & XmmReg1 & XmmReg2 { XmmReg1 = pminub(XmmReg1, XmmReg2); }
|
|
|
|
|
macro assignUnsignedLesser(dest, x, y){
|
|
|
|
|
dest = (zext(x <= y) * x) + (zext(y < x) * y);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
define pcodeop pmovmskb;
|
|
|
|
|
:PMOVMSKB Reg32, mmxreg2 is vexMode=0 & mandover=0 & byte=0x0F; byte=0xD7; Reg32 & mmxreg2 { Reg32 = pmovmskb(Reg32, mmxreg2); }
|
|
|
|
|
:PMOVMSKB Reg32, XmmReg2 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0xD7; Reg32 & XmmReg2 { Reg32 = pmovmskb(Reg32, XmmReg2); }
|
|
|
|
|
@ifdef IA64
|
|
|
|
|
:PMOVMSKB Reg64, mmxreg2 is vexMode=0 & opsize=2 & mandover=0 & byte=0x0F; byte=0xD7; Reg64 & mmxreg2 { Reg64 = pmovmskb(Reg64, mmxreg2); }
|
|
|
|
|
@endif
|
|
|
|
|
:PMINUB mmxreg, m64 is vexMode=0 & mandover=0 & byte=0x0F; byte=0xDA; mmxreg ... & m64
|
|
|
|
|
{
|
|
|
|
|
assignUnsignedLesser(mmxreg[0,8],mmxreg[0,8],m64[0,8]);
|
|
|
|
|
assignUnsignedLesser(mmxreg[8,8],mmxreg[8,8],m64[8,8]);
|
|
|
|
|
assignUnsignedLesser(mmxreg[16,8],mmxreg[16,8],m64[16,8]);
|
|
|
|
|
assignUnsignedLesser(mmxreg[24,8],mmxreg[24,8],m64[24,8]);
|
|
|
|
|
assignUnsignedLesser(mmxreg[32,8],mmxreg[32,8],m64[32,8]);
|
|
|
|
|
assignUnsignedLesser(mmxreg[40,8],mmxreg[40,8],m64[40,8]);
|
|
|
|
|
assignUnsignedLesser(mmxreg[48,8],mmxreg[48,8],m64[48,8]);
|
|
|
|
|
assignUnsignedLesser(mmxreg[56,8],mmxreg[56,8],m64[56,8]);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
:PMINUB mmxreg1, mmxreg2 is vexMode=0 & mandover=0 & byte=0x0F; byte=0xDA; mmxmod = 3 & mmxreg1 & mmxreg2
|
|
|
|
|
{
|
|
|
|
|
assignUnsignedLesser(mmxreg1[0,8],mmxreg1[0,8],mmxreg2[0,8]);
|
|
|
|
|
assignUnsignedLesser(mmxreg1[8,8],mmxreg1[8,8],mmxreg2[8,8]);
|
|
|
|
|
assignUnsignedLesser(mmxreg1[16,8],mmxreg1[16,8],mmxreg2[16,8]);
|
|
|
|
|
assignUnsignedLesser(mmxreg1[24,8],mmxreg1[24,8],mmxreg2[24,8]);
|
|
|
|
|
assignUnsignedLesser(mmxreg1[32,8],mmxreg1[32,8],mmxreg2[32,8]);
|
|
|
|
|
assignUnsignedLesser(mmxreg1[40,8],mmxreg1[40,8],mmxreg2[40,8]);
|
|
|
|
|
assignUnsignedLesser(mmxreg1[48,8],mmxreg1[48,8],mmxreg2[48,8]);
|
|
|
|
|
assignUnsignedLesser(mmxreg1[56,8],mmxreg1[56,8],mmxreg2[56,8]);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
:PMINUB XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0xDA; XmmReg ... & m128
|
|
|
|
|
{
|
|
|
|
|
assignUnsignedLesser(XmmReg[0,8],XmmReg[0,8],m128[0,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[8,8],XmmReg[8,8],m128[8,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[16,8],XmmReg[16,8],m128[16,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[24,8],XmmReg[24,8],m128[24,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[32,8],XmmReg[32,8],m128[32,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[40,8],XmmReg[40,8],m128[40,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[48,8],XmmReg[48,8],m128[48,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[56,8],XmmReg[56,8],m128[56,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[64,8],XmmReg[64,8],m128[64,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[72,8],XmmReg[72,8],m128[72,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[80,8],XmmReg[80,8],m128[80,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[88,8],XmmReg[88,8],m128[88,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[96,8],XmmReg[96,8],m128[96,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[104,8],XmmReg[104,8],m128[104,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[112,8],XmmReg[112,8],m128[112,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[120,8],XmmReg[120,8],m128[120,8]);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
:PMINUB XmmReg1, XmmReg2 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0xDA; xmmmod = 3 & XmmReg1 & XmmReg2
|
|
|
|
|
{
|
|
|
|
|
assignUnsignedLesser(XmmReg1[0,8],XmmReg1[0,8],XmmReg2[0,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[8,8],XmmReg1[8,8],XmmReg2[8,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[16,8],XmmReg1[16,8],XmmReg2[16,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[24,8],XmmReg1[24,8],XmmReg2[24,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[32,8],XmmReg1[32,8],XmmReg2[32,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[40,8],XmmReg1[40,8],XmmReg2[40,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[48,8],XmmReg1[48,8],XmmReg2[48,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[56,8],XmmReg1[56,8],XmmReg2[56,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[64,8],XmmReg1[64,8],XmmReg2[64,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[72,8],XmmReg1[72,8],XmmReg2[72,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[80,8],XmmReg1[80,8],XmmReg2[80,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[88,8],XmmReg1[88,8],XmmReg2[88,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[96,8],XmmReg1[96,8],XmmReg2[96,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[104,8],XmmReg1[104,8],XmmReg2[104,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[112,8],XmmReg1[112,8],XmmReg2[112,8]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[120,8],XmmReg1[120,8],XmmReg2[120,8]);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#in 64-bit mode the default operand size is 64 bits
|
|
|
|
|
#note that gcc assembles pmovmskb eax, mm0 and pmovmskb rax, mm0 to 0f d7 c0
|
|
|
|
|
:PMOVMSKB Reg32, mmxreg2 is vexMode=0 & mandover=0 & byte=0x0F; byte=0xD7; mod = 3 & Reg32 & mmxreg2 & check_Reg32_dest
|
|
|
|
|
{
|
|
|
|
|
local byte_mask:1 = 0:1;
|
|
|
|
|
byte_mask[0,1] = mmxreg2[7,1];
|
|
|
|
|
byte_mask[1,1] = mmxreg2[15,1];
|
|
|
|
|
byte_mask[2,1] = mmxreg2[23,1];
|
|
|
|
|
byte_mask[3,1] = mmxreg2[31,1];
|
|
|
|
|
byte_mask[4,1] = mmxreg2[39,1];
|
|
|
|
|
byte_mask[5,1] = mmxreg2[47,1];
|
|
|
|
|
byte_mask[6,1] = mmxreg2[55,1];
|
|
|
|
|
byte_mask[7,1] = mmxreg2[63,1];
|
|
|
|
|
Reg32 = zext(byte_mask);
|
|
|
|
|
build check_Reg32_dest;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#in 64-bit mode the default operand size is 64 bits
|
|
|
|
|
#note that gcc assembles pmovmskb eax, xmm0 and pmovmskb rax, xmm0 to 66 0f d7 c0
|
|
|
|
|
:PMOVMSKB Reg32, XmmReg2 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0xD7; mod = 3 & Reg32 & XmmReg2 & check_Reg32_dest
|
|
|
|
|
{
|
|
|
|
|
local byte_mask:2 = 0:2;
|
|
|
|
|
byte_mask[0,1] = XmmReg2[7,1];
|
|
|
|
|
byte_mask[1,1] = XmmReg2[15,1];
|
|
|
|
|
byte_mask[2,1] = XmmReg2[23,1];
|
|
|
|
|
byte_mask[3,1] = XmmReg2[31,1];
|
|
|
|
|
byte_mask[4,1] = XmmReg2[39,1];
|
|
|
|
|
byte_mask[5,1] = XmmReg2[47,1];
|
|
|
|
|
byte_mask[6,1] = XmmReg2[55,1];
|
|
|
|
|
byte_mask[7,1] = XmmReg2[63,1];
|
|
|
|
|
byte_mask[8,1] = XmmReg2[71,1];
|
|
|
|
|
byte_mask[9,1] = XmmReg2[79,1];
|
|
|
|
|
byte_mask[10,1] = XmmReg2[87,1];
|
|
|
|
|
byte_mask[11,1] = XmmReg2[95,1];
|
|
|
|
|
byte_mask[12,1] = XmmReg2[103,1];
|
|
|
|
|
byte_mask[13,1] = XmmReg2[111,1];
|
|
|
|
|
byte_mask[14,1] = XmmReg2[119,1];
|
|
|
|
|
byte_mask[15,1] = XmmReg2[127,1];
|
|
|
|
|
Reg32 = zext(byte_mask);
|
|
|
|
|
build check_Reg32_dest;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
define pcodeop pmulhrsw;
|
|
|
|
|
:PMULHRSW mmxreg, m64 is vexMode=0 & mandover=0 & byte=0x0F; byte=0x38; byte=0x0B; mmxreg ... & m64 { mmxreg=pmulhrsw(mmxreg,m64); }
|
|
|
|
|
:PMULHRSW mmxreg1, mmxreg2 is vexMode=0 & mandover=0 & byte=0x0F; byte=0x38; byte=0x0B; mmxmod = 3 & mmxreg1 & mmxreg2 { mmxreg1=pmulhrsw(mmxreg1,mmxreg2); }
|
|
|
|
@ -6892,8 +7050,23 @@ define pcodeop psignd;
|
|
|
|
|
:PSIGND XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x0a; XmmReg ... & m128 { XmmReg=psignd(XmmReg,m128); }
|
|
|
|
|
:PSIGND XmmReg1, XmmReg2 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x0a; xmmmod = 3 & XmmReg1 & XmmReg2 { XmmReg1=psignd(XmmReg1,XmmReg2); }
|
|
|
|
|
|
|
|
|
|
define pcodeop pslldq;
|
|
|
|
|
:PSLLDQ XmmReg2, imm8 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x73; xmmmod = 3 & reg_opcode=7 & XmmReg2; imm8 { XmmReg2 = pslldq(XmmReg2, imm8:8); }
|
|
|
|
|
#break into two 64-bit chunks so decompiler can follow constants
|
|
|
|
|
:PSLLDQ XmmReg2, imm8 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x73; xmmmod = 3 & reg_opcode=7 & XmmReg2; imm8
|
|
|
|
|
{
|
|
|
|
|
if (imm8:1 > 15:1) goto <zero>;
|
|
|
|
|
local low64copy:8 = XmmReg2[0,64];
|
|
|
|
|
XmmReg2[0,64] = XmmReg2[0,64] << (8:1 * imm8:1);
|
|
|
|
|
if (imm8:1 > 8:1) goto <greater>;
|
|
|
|
|
XmmReg2[64,64] = (XmmReg2[64,64] << (8:1 * imm8:1)) | (low64copy >> (8:1 * (8 - imm8:1)));
|
|
|
|
|
goto <end>;
|
|
|
|
|
<greater>
|
|
|
|
|
XmmReg2[64,64] = low64copy << (8:1 * (imm8 - 8));
|
|
|
|
|
goto <end>;
|
|
|
|
|
<zero>
|
|
|
|
|
XmmReg2[0,64] = 0:8;
|
|
|
|
|
XmmReg2[64,64] = 0:8;
|
|
|
|
|
<end>
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
define pcodeop psllw;
|
|
|
|
|
:PSLLW mmxreg, m64 is vexMode=0 & mandover=0 & byte=0x0F; byte=0xF1; mmxreg ... & m64 ... { mmxreg = psllw(mmxreg, m64); }
|
|
|
|
@ -8003,13 +8176,43 @@ define pcodeop pminsb;
|
|
|
|
|
:PMINSB XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x38; XmmReg ... & m128 { XmmReg = pminsb(XmmReg, m128); }
|
|
|
|
|
:PMINSB XmmReg1, XmmReg2 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x38; xmmmod=3 & XmmReg1 & XmmReg2 { XmmReg1 = pminsb(XmmReg1, XmmReg2); }
|
|
|
|
|
|
|
|
|
|
define pcodeop pminuw;
|
|
|
|
|
:PMINUW XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x3A; XmmReg ... & m128 { XmmReg = pminuw(XmmReg, m128); }
|
|
|
|
|
:PMINUW XmmReg1, XmmReg2 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x3A; xmmmod=3 & XmmReg1 & XmmReg2 { XmmReg1 = pminuw(XmmReg1, XmmReg2); }
|
|
|
|
|
:PMINUW XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x3A; XmmReg ... & m128
|
|
|
|
|
{
|
|
|
|
|
assignUnsignedLesser(XmmReg[0,16],XmmReg[0,16],m128[0,16]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[16,16],XmmReg[16,16],m128[16,16]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[32,16],XmmReg[32,16],m128[32,16]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[48,16],XmmReg[48,16],m128[48,16]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[64,16],XmmReg[64,16],m128[64,16]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[80,16],XmmReg[80,16],m128[80,16]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[96,16],XmmReg[96,16],m128[96,16]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[112,16],XmmReg[112,16],m128[112,16]);
|
|
|
|
|
}
|
|
|
|
|
:PMINUW XmmReg1, XmmReg2 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x3A; xmmmod=3 & XmmReg1 & XmmReg2
|
|
|
|
|
{
|
|
|
|
|
assignUnsignedLesser(XmmReg1[0,16],XmmReg1[0,16],XmmReg2[0,16]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[16,16],XmmReg1[16,16],XmmReg2[16,16]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[32,16],XmmReg1[32,16],XmmReg2[32,16]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[48,16],XmmReg1[48,16],XmmReg2[48,16]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[64,16],XmmReg1[64,16],XmmReg2[64,16]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[80,16],XmmReg1[80,16],XmmReg2[80,16]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[96,16],XmmReg1[96,16],XmmReg2[96,16]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[112,16],XmmReg1[112,16],XmmReg2[112,16]);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
define pcodeop pminud;
|
|
|
|
|
:PMINUD XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x3B; XmmReg ... & m128 { XmmReg = pminud(XmmReg, m128); }
|
|
|
|
|
:PMINUD XmmReg1, XmmReg2 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x3B; xmmmod=3 & XmmReg1 & XmmReg2 { XmmReg1 = pminud(XmmReg1, XmmReg2); }
|
|
|
|
|
:PMINUD XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x3B; XmmReg ... & m128
|
|
|
|
|
{
|
|
|
|
|
assignUnsignedLesser(XmmReg[0,32],XmmReg[0,32],m128[0,32]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[32,32],XmmReg[32,32],m128[32,32]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[64,32],XmmReg[64,32],m128[64,32]);
|
|
|
|
|
assignUnsignedLesser(XmmReg[96,32],XmmReg[96,32],m128[96,32]);
|
|
|
|
|
}
|
|
|
|
|
:PMINUD XmmReg1, XmmReg2 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x3B; xmmmod=3 & XmmReg1 & XmmReg2
|
|
|
|
|
{
|
|
|
|
|
assignUnsignedLesser(XmmReg1[0,32],XmmReg1[0,32],XmmReg2[0,32]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[32,32],XmmReg1[32,32],XmmReg2[32,32]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[64,32],XmmReg1[64,32],XmmReg2[64,32]);
|
|
|
|
|
assignUnsignedLesser(XmmReg1[96,32],XmmReg1[96,32],XmmReg2[96,32]);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
define pcodeop pminsd;
|
|
|
|
|
:PMINSD XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x39; XmmReg ... & m128 { XmmReg = pminsd(XmmReg, m128); }
|
|
|
|
@ -8019,13 +8222,46 @@ define pcodeop pmaxsb;
|
|
|
|
|
:PMAXSB XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x3C; XmmReg ... & m128 { XmmReg = pmaxsb(XmmReg, m128); }
|
|
|
|
|
:PMAXSB XmmReg1, XmmReg2 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x3C; xmmmod=3 & XmmReg1 & XmmReg2 { XmmReg1 = pmaxsb(XmmReg1, XmmReg2); }
|
|
|
|
|
|
|
|
|
|
define pcodeop pmaxuw;
|
|
|
|
|
:PMAXUW XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x3E; XmmReg ... & m128 { XmmReg = pmaxuw(XmmReg, m128); }
|
|
|
|
|
:PMAXUW XmmReg1, XmmReg2 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x3E; xmmmod=3 & XmmReg1 & XmmReg2 { XmmReg1 = pmaxuw(XmmReg1, XmmReg2); }
|
|
|
|
|
|
|
|
|
|
define pcodeop pmaxud;
|
|
|
|
|
:PMAXUD XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x3F; XmmReg ... & m128 { XmmReg = pmaxud(XmmReg, m128); }
|
|
|
|
|
:PMAXUD XmmReg1, XmmReg2 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x3F; xmmmod=3 & XmmReg1 & XmmReg2 { XmmReg1 = pmaxud(XmmReg1, XmmReg2); }
|
|
|
|
|
:PMAXUW XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x3E; XmmReg ... & m128
|
|
|
|
|
{
|
|
|
|
|
assignUnsignedGreater(XmmReg[0,16],XmmReg[0,16],m128[0,16]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[16,16],XmmReg[16,16],m128[16,16]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[32,16],XmmReg[32,16],m128[32,16]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[48,16],XmmReg[48,16],m128[48,16]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[64,16],XmmReg[64,16],m128[64,16]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[80,16],XmmReg[80,16],m128[80,16]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[96,16],XmmReg[96,16],m128[96,16]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[112,16],XmmReg[112,16],m128[112,16]);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
:PMAXUW XmmReg1, XmmReg2 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x3E; xmmmod=3 & XmmReg1 & XmmReg2
|
|
|
|
|
{
|
|
|
|
|
assignUnsignedGreater(XmmReg1[0,16],XmmReg1[0,16],XmmReg2[0,16]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[16,16],XmmReg1[16,16],XmmReg2[16,16]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[32,16],XmmReg1[32,16],XmmReg2[32,16]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[48,16],XmmReg1[48,16],XmmReg2[48,16]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[64,16],XmmReg1[64,16],XmmReg2[64,16]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[80,16],XmmReg1[80,16],XmmReg2[80,16]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[96,16],XmmReg1[96,16],XmmReg2[96,16]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[112,16],XmmReg1[112,16],XmmReg2[112,16]);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
:PMAXUD XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x3F; XmmReg ... & m128
|
|
|
|
|
{
|
|
|
|
|
assignUnsignedGreater(XmmReg[0,32],XmmReg[0,32],m128[0,32]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[32,32],XmmReg[32,32],m128[32,32]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[64,32],XmmReg[64,32],m128[64,32]);
|
|
|
|
|
assignUnsignedGreater(XmmReg[96,32],XmmReg[96,32],m128[96,32]);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
:PMAXUD XmmReg1, XmmReg2 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x3F; xmmmod=3 & XmmReg1 & XmmReg2
|
|
|
|
|
{
|
|
|
|
|
assignUnsignedGreater(XmmReg1[0,32],XmmReg1[0,32],XmmReg2[0,32]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[32,32],XmmReg1[32,32],XmmReg2[32,32]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[64,32],XmmReg1[64,32],XmmReg2[64,32]);
|
|
|
|
|
assignUnsignedGreater(XmmReg1[96,32],XmmReg1[96,32],XmmReg2[96,32]);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
define pcodeop pmaxsd;
|
|
|
|
|
:PMAXSD XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x3D; XmmReg ... & m128 { XmmReg = pmaxsd(XmmReg, m128); }
|
|
|
|
@ -8149,9 +8385,16 @@ define pcodeop pmovzxdq;
|
|
|
|
|
SF = 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
define pcodeop pcmpeqq;
|
|
|
|
|
:PCMPEQQ XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x29; XmmReg ... & m128 { XmmReg = pcmpeqq(XmmReg, m128); }
|
|
|
|
|
:PCMPEQQ XmmReg1, XmmReg2 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x29; xmmmod=3 & XmmReg1 & XmmReg2 { XmmReg1 = pcmpeqq(XmmReg1, XmmReg2); }
|
|
|
|
|
:PCMPEQQ XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x29; XmmReg ... & m128
|
|
|
|
|
{
|
|
|
|
|
XmmReg[0,64] = zext(XmmReg[0,64] == m128[0,64]) * 0xffffffffffffffff:8;
|
|
|
|
|
XmmReg[64,64] = zext(XmmReg[64,64] == m128[64,64]) * 0xffffffffffffffff:8;
|
|
|
|
|
}
|
|
|
|
|
:PCMPEQQ XmmReg1, XmmReg2 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x29; xmmmod=3 & XmmReg1 & XmmReg2
|
|
|
|
|
{
|
|
|
|
|
XmmReg1[0,64] = zext(XmmReg1[0,64] == XmmReg2[0,64]) * 0xffffffffffffffff:8;
|
|
|
|
|
XmmReg1[64,64] = zext(XmmReg1[64,64] == XmmReg2[64,64]) * 0xffffffffffffffff:8;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
define pcodeop packusdw;
|
|
|
|
|
:PACKUSDW XmmReg, m128 is vexMode=0 & $(PRE_66) & byte=0x0F; byte=0x38; byte=0x2B; XmmReg ... & m128 { XmmReg = packusdw(XmmReg, m128); }
|
|
|
|
|