mirror of
https://github.com/netwide-assembler/nasm.git
synced 2025-07-24 10:25:42 -04:00
Add a optimization frameword for operand narrowing (where the operand size doesn't matter beyond a certain range because only certain bits are referenced.) Add a macro *and* matching facility for dealing with segment selectors, which are typically rm16/r32/r64, but exactly how that is applied varies depending on if a datum is read or written. Signed-off-by: H. Peter Anvin (Intel) <hpa@zytor.com>
5564 lines
434 KiB
Plaintext
5564 lines
434 KiB
Plaintext
;; --------------------------------------------------------------------------
|
|
;;
|
|
;; Copyright 1996-2024 The NASM Authors - All Rights Reserved
|
|
;; See the file AUTHORS included with the NASM distribution for
|
|
;; the specific copyright holders.
|
|
;;
|
|
;; Redistribution and use in source and binary forms, with or without
|
|
;; modification, are permitted provided that the following
|
|
;; conditions are met:
|
|
;;
|
|
;; * Redistributions of source code must retain the above copyright
|
|
;; notice, this list of conditions and the following disclaimer.
|
|
;; * Redistributions in binary form must reproduce the above
|
|
;; copyright notice, this list of conditions and the following
|
|
;; disclaimer in the documentation and/or other materials provided
|
|
;; with the distribution.
|
|
;;
|
|
;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
|
|
;; CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
|
|
;; INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
|
;; MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
|
|
;; CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
;; SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
|
;; NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
|
;; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
|
;; HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
;; CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
|
;; OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
|
|
;; EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
;;
|
|
;; --------------------------------------------------------------------------
|
|
|
|
;
|
|
; insns.dat table of instructions for the Netwide Assembler
|
|
;
|
|
; Format of file: All four fields must be present on every functional
|
|
; line. Hence `void' for no-operand instructions, and `\0' for such
|
|
; as EQU. If the last three fields are all `ignore', no action is
|
|
; taken except to register the opcode as being present.
|
|
;
|
|
; For a detailed description of the code string (third field), please
|
|
; see insns.pl and the comment at the top of assemble.c. For a detailed
|
|
; description of the flags (fourth field), please see insns-iflags.ph.
|
|
;
|
|
; Comments with a pound sign after the semicolon generate section
|
|
; subheaders in the NASM documentation.
|
|
;
|
|
|
|
;# Special instructions (pseudo-ops)
|
|
; These MUST be first in this file and must maintain the pattern of
|
|
; Dx by size, RESx by size, INCBIN, and EQU in that order.
|
|
DB ignore ignore PSEUDO
|
|
DW ignore ignore PSEUDO
|
|
DD ignore ignore PSEUDO
|
|
DQ ignore ignore PSEUDO
|
|
DT ignore ignore PSEUDO
|
|
DO ignore ignore PSEUDO
|
|
DY ignore ignore PSEUDO
|
|
DZ ignore ignore PSEUDO
|
|
RESB imm [ resb] PSEUDO
|
|
RESW imm [ resb] PSEUDO
|
|
RESD imm [ resb] PSEUDO
|
|
RESQ imm [ resb] PSEUDO
|
|
REST imm [ resb] PSEUDO
|
|
RESO imm [ resb] PSEUDO
|
|
RESY imm [ resb] PSEUDO
|
|
RESZ imm [ resb] PSEUDO
|
|
INCBIN ignore ignore PSEUDO
|
|
EQU imm ignore PSEUDO
|
|
EQU imm:imm ignore PSEUDO
|
|
|
|
;# No operation
|
|
; In 64-bit mode NOP (90) is technically always 64 bits, but allow "o64 nop" to generate 48 90
|
|
NOP void [ osz norexb nof3 90] 8086
|
|
NOP2 void [ norexb nof3 66 90] 386,ND
|
|
$wdq NOP rm# [m: o# 0f 1f /0] P6
|
|
|
|
;# Integer data move instructions
|
|
$bwdq MOV ax#,mem_offs [-i: o# a0# iwdq] 8086,SM,NOAPX
|
|
$bwdq MOV mem_offs,ax# [i-: o# a2# iwdq] 8086,SM,NOHLE,NOAPX
|
|
$bwdq MOVABS ax#,mem_offs [-i: o# a0# iwdq] 8086,SM,NOAPX,ND
|
|
$bwdq MOVABS mem_offs,ax# [i-: o# a2# iwdq] 8086,SM,NOHLE,NOAPX,ND
|
|
$bwdq MOV rm#,reg# [mr: hlexr o# 88# /r] 8086,SM
|
|
$bwdq MOV reg#,rm# [rm: o# 8a# /r] 8086,SM
|
|
MOV reg64,udword64 [ri: o32 b8+r id,u] X86_64,LONG,SM,OPT,ND
|
|
MOV reg64,sdword64 [mi: o64 c7 /0 id,s] X86_64,LONG,SM,OPT,SDWORD,ND
|
|
$bwdq MOV reg#,imm## [ri: o# b0+r# i##] 8086,SM
|
|
$bwdq MOV reg#,imm##|abs [ri: o# b0+r# i##] 8086,SM,ND
|
|
$bwdq MOVABS reg#,imm## [ri: o# b0+r# i##] 8086,SM,ND
|
|
$bwdq MOV rm#,imm# [mi: hlexr o# c6# /0 i#] 8086,SM
|
|
|
|
;# Load effective address
|
|
$wdq LEA reg#,mem [rm: o# 8d /r] 8086
|
|
$wdq LEA reg#,imm# [rm: o# 8d /r] 8086,ND
|
|
|
|
;# The basic 7 arithmetic operations
|
|
$arith ADD OR ADC SBB AND SUB XOR
|
|
|
|
;# The basic shift and rotate operations
|
|
$shift !x,xs="0f38 f7" ROL x=f3,xs="0f3a f0",ROR RCL RCR x=66,SHL,SAL x=f2,SHR - x=f3,SAR
|
|
|
|
;# Other basic integer arithmetic
|
|
$wd INC reg# [r: o# 40+r] 8086,NOLONG
|
|
$bwdq INC rm# [m: hle o# fe# /0] 8086,LOCK
|
|
$wd DEC reg# [r: o# 48+r] 8086,NOLONG
|
|
$bwdq DEC rm# [m: hle o# fe# /1] 8086,LOCK
|
|
|
|
$bwdq IMUL rm# [m: o# f6# /5] 8086
|
|
$bwdq IMUL rm# [m: evex.nf.nd0.l0.m4.o# f6# /5] APX,SM
|
|
$wdq IMUL reg#,rm# [rm: o# 0f af /r] 386,SM
|
|
$wdq IMUL reg#,rm#*,sbyte# [rmi: o# 6b /r ib,s] 186,SM
|
|
$wdq IMUL reg#,rm#*,imm# [rmi: o# 69 /r i#] 186,SM
|
|
$wdq IMUL reg#?,reg#,rm# [vrm: evex.nf.ndx.l0.m4.o# af /r] APX,SM
|
|
$wdq IMUL reg#,rm#,sbyte# [rmi: evex.nf.zu.l0.m4.o# 6b /r i#] APX,SM
|
|
$wdq IMUL reg#,rm#,imm# [rmi: evex.nf.zu.l0.m4.o# 69 /r i#] APX,SM
|
|
|
|
$bwdq MUL rm# [m: o# f6# /4] 8086
|
|
$dq MULX reg#,reg#*,rm# [rvm: vex.ndd.lz.f2.0f38.w# f6 /r] BMI2
|
|
$dq MUL reg#,reg#*,rm# [rvm: vex.ndd.lz.f2.0f38.w# f6 /r] BMI2,NF!,OPT,ND
|
|
$bwdq MUL rm# [m: evex.nf.nd0.l0.m4.o# f6# /4] APX,SM
|
|
; MUL and IMUL are the same thing when producing single-width output only, so accept MUL to generate IMUL
|
|
; thereby creating symmetry.
|
|
$wdq MUL reg#,rm# [rm: o# 0f af /r] 386,SM,ND
|
|
$wdq MUL reg#,rm#*,sbyte# [rmi: o# 6b /r ib,s] 186,SM,ND
|
|
$wdq MUL reg#,rm#*,imm# [rmi: o# 69 /r i#] 186,SM,ND
|
|
$wdq MUL reg#?,reg#,rm# [vrm: evex.nf.ndx.l0.m4.o# af /r] APX,SM,ND
|
|
$wdq MUL reg#,rm#,sbyte# [rmi: evex.nf.zu.l0.m4.o# 6b /r i#] APX,SM,ND
|
|
$wdq MUL reg#,rm#,imm# [rmi: evex.nf.zu.l0.m4.o# 69 /r i#] APX,SM,ND
|
|
|
|
$bwdq IDIV rm# [m: o# f6# /7] 8086
|
|
$bwdq DIV rm# [m: o# f6# /6] 8086
|
|
|
|
$bwdq NEG rm# [m: hle o# f6 /3] 8086,LOCK
|
|
$bwdq NOT rm# [m: hle o# f6 /2] 8086,LOCK
|
|
|
|
;# Interleaved flags arithmetic
|
|
$dq ADCX reg#,rm# [rm: 66 o# 0f38 f6 /r ] ADX,ZU,FL
|
|
$dq ADCX reg#?,reg#,rm# [vrm: evex.ndx.l0.66.m4.w# 66 /r ] ADX,APX,ZU,FL
|
|
$dq ADOX reg#,rm# [rm: f3 !osp o# 0f38 f6 /r ] ADX,ZU,FL
|
|
$dq ADOX reg#?,reg#,rm# [vrm: evex.ndx.l0.f3.m4.w# 66 /r ] ADX,APX,ZU,FL
|
|
|
|
;# Double width shift
|
|
$wdq SHLD rm#,reg#,imm8 [mri: o# 0f a4 /r ib,u] 386,SM0-1
|
|
$wdq SHLD rm#,reg#,reg_cl [mr-: o# 0f a5 /r] 386,SM0-1
|
|
$wdq SHRD rm#,reg#,imm8 [mri: o# 0f ac /r ib,u] 386,SM0-1
|
|
$wdq SHRD rm#,reg#,reg_cl [mr-: o# 0f ad /r] 386,SM0-1
|
|
|
|
;# Bit operations
|
|
$wdq BT rm#,reg# [mr: o# 0f a3 /r] 386,SM
|
|
$wdq BT rm#,imm8 [mi: o# 0f ba /4 ib,u] 386
|
|
$wdq BTC rm#,reg# [mr: o# 0f bb /r] 386,SM,LOCK
|
|
$wdq BTC rm#,imm8 [mi: o# 0f ba /7 ib,u] 386,LOCK
|
|
$wdq BTR rm#,reg# [mr: o# 0f b3 /r] 386,SM,LOCK
|
|
$wdq BTR rm#,imm8 [mi: o# 0f ab /6 ib,u] 386,LOCK
|
|
$wdq BTC rm#,reg# [mr: o# 0f ab /r] 386,SM,LOCK
|
|
$wdq BTC rm#,imm8 [mi: o# 0f ba /5 ib,u] 386,LOCK
|
|
|
|
$wdq BSF reg#,rm# [rm: o# nof3 0f bc /r] 386,SM
|
|
$wdq BSR reg#,rm# [rm: o# nof3 0f bd /r] 386,SM
|
|
|
|
; Early 386 chips only
|
|
$wd IBTS rm#,reg# [mr: o# 0f a7 /r] 386,UNDOC,ND,OBSOLETE,NOLONG
|
|
$wd XBTS rm#,reg# [mr: o# 0f a6 /r] 386,UNDOC,ND,OBSOLETE,NOLONG
|
|
|
|
;# BMI1 and BMI2 bit operations
|
|
$wdq LZCNT reg#,rm# [rm: o# f3i 0f bd /r] LZCNT,SM
|
|
$wdq TZCNT reg#,rm# [rm: o# f3i 0f bc /r] BMI1,SM
|
|
|
|
$dq ANDN reg#,reg#*,rm# [rvm: vex.nds.lz.0f38.w# f2 /r] BMI1,SM
|
|
$dq BEXTR reg#,rm#*,reg# [rmv: vex.nds.lz.0f38.w# f7 /r] BMI1,SM
|
|
$dq BLSMSK reg#,rm# [vm: vex.ndd.lz.0f38.w# f3 /2] BMI1,SM
|
|
$dq BLSR reg#,rm# [vm: vex.ndd.lz.0f38.w# f3 /1] BMI1,SM
|
|
$dq BLSI reg#,rm# [vm: vex.ndd.lz.0f38.w# f3 /3] BMI1,SM
|
|
$dq BZHI reg#,rm#*,reg# [rmv: vex.nds.lz.0f38.w# f5 /r] BMI2,SM
|
|
$dq PDEP reg#,reg#*,rm# [rvm: vex.nds.lz.f2.0f38.w# f5 /r] BMI2,SM
|
|
$dq PEXT reg#,reg#*,rm# [rvm: vex.nds.lz.f3.0f38.w# f5 /r] BMI2,SM
|
|
|
|
;# AMD XOP bit operations
|
|
$dq BEXTR reg#,rm#*,imm32 [rmi: xop.m10.lz.w# 10 /r id] AMD,OBSOLETE,TBM,SM0-1
|
|
$dq BLCI reg#,rm# [vm: xop.ndd.lz.m9.w# 02 /6] AMD,OBSOLETE,TBM,SM
|
|
$dq BLCIC reg#,rm# [vm: xop.ndd.lz.m9.w# 01 /5] AMD,OBSOLETE,TBM,SM
|
|
$dq BLSIC reg#,rm# [vm: xop.ndd.lz.m9.w# 01 /6] AMD,OBSOLETE,TBM,SM
|
|
$dq BLCFILL reg#,rm# [vm: xop.ndd.lz.m9.w# 01 /1] AMD,OBSOLETE,TBM,SM
|
|
$dq BLSFILL reg#,rm# [vm: xop.ndd.lz.m9.w# 01 /2] AMD,OBSOLETE,TBM
|
|
$dq BLCMSK reg#,rm# [vm: xop.ndd.lz.m9.w# 02 /1] AMD,OBSOLETE,TBM
|
|
$dq BLCS reg#,rm# [vm: xop.ndd.lz.m9.w# 01 /3] AMD,OBSOLETE,TBM
|
|
$dq TZMSK reg#,rm# [vm: xop.ndd.lz.m9.w# 01 /4] AMD,OBSOLETE,TBM
|
|
$dq T1MSKC reg#,rm# [vm: xop.ndd.lz.m9.w# 01 /7] AMD,OBSOLETE,TBM
|
|
|
|
;# Decimal arithmetic
|
|
AAA void [ 37] 8086,NOLONG
|
|
AAD void [ d5 0a] 8086,NOLONG
|
|
AAD imm8 [i: d5 ib,u] 8086,SB,NOLONG
|
|
AAM void [ d4 0a] 8086,NOLONG
|
|
AAM imm8 [i: d4 ib,u] 8086,SB,NOLONG
|
|
AAS void [ 3f] 8086,NOLONG
|
|
DAA void [ 27] 8086,NOLONG
|
|
DAS void [ 2f] 8086,NOLONG
|
|
|
|
;# Endianness handling
|
|
$dq BSWAP reg# [r: o# 0f c8+r] 486
|
|
BSWAP reg_ax [-: 86 c4] 8086,OPT,NOREX,ND
|
|
BSWAP reg_cx [-: 86 cd] 8086,OPT,NOREX,ND
|
|
BSWAP reg_dx [-: 86 d6] 8086,OPT,NOREX,ND
|
|
BSWAP reg_bx [-: 86 df] 8086,OPT,NOREX,ND
|
|
$wdq MOVBE reg#,mem# [rm: o# norep 0f38 f0 /r] NEHALEM,MOVBE,SM
|
|
$wdq MOVBE mem#,reg# [mr: o# norep 0f38 f1 /r] NEHALEM,MOVBE,SM
|
|
|
|
;# Sign and zero extension
|
|
CBW void [ o16 98] 8086
|
|
CDQ void [ o32 99] 386
|
|
CDQE void [ o64 98] X86_64,LONG
|
|
CQO void [ o64 99] X86_64,LONG
|
|
CWD void [ o16 99] 8086
|
|
CWDE void [ o32 98] 386
|
|
MOVSX reg_ax,reg_al [--: o16 98] 8086,OPT,ND
|
|
MOVSXB reg_ax,reg_al [--: o16 98] 8086,OPT,ND
|
|
MOVSX reg_eax,reg_ax [--: o32 98] 386,OPT,ND
|
|
MOVSXW reg_eax,reg_ax [--: o32 98] 386,OPT,ND
|
|
MOVSX reg_rax,reg_eax [--: o64 98] X86_64,LONG,OPT,ND
|
|
MOVSXD reg_rax,reg_eax [--: o64 98] X86_64,LONG,OPT,ND
|
|
$wdq MOVSX reg#,rm8 [rm: o# 0f be /r] 386,SX
|
|
$wdq MOVSXB reg#,rm8 [rm: o# 0f be /r] 386,ND
|
|
$wdq MOVSX reg#,rm16 [rm: o# 0f bf /r] 386,SX
|
|
$wdq MOVSXW reg#,rm16 [rm: o# 0f bf /r] 386,ND
|
|
$wdq MOVSX reg#,rm32 [rm: o# 63 /r] X86_64,SX,LONG,ND
|
|
$wdq MOVSXD reg#,rm32 [rm: o# 63 /r] X86_64,LONG
|
|
|
|
$wdq MOVZX reg#,rm8 [rm: o# 0f b6 /r] 386,SX
|
|
$wdq MOVZXB reg#,rm8 [rm: o# 0f b6 /r] 386,ND
|
|
$wdq MOVZX reg#,rm16 [rm: o# 0f b7 /r] 386,SX
|
|
$wdq MOVZXW reg#,rm16 [rm: o# 0f b7 /r] 386,ND
|
|
$wdq MOVZX reg#,rm32 [rm: nw o# 8b /r] 8086,OPT,SX,ND
|
|
$wdq MOVZXD reg#,rm32 [rm: nw o# 8b /r] 8086,OPT,ND
|
|
|
|
;# Atomic operations
|
|
$bwdq CMPXCHG rm#,reg# [mr: hle 0f b0# /r] PENT,SM,LOCK
|
|
CMPXCHG8B mem64 [m: hle norexw 0f c7 /1] PENT,LOCK
|
|
CMPXCHG16B mem128 [m: o64 0f c7 /1] X86_64,LONG,LOCK
|
|
|
|
$bwdq XADD rm#,reg# [mr: hle o# 0f c0# /r] 486,SM,LOCK
|
|
|
|
$wq XCHG ax#,reg# [-r: o# 90+r] 8086
|
|
$wq XCHG reg#,ax# [r-: o# 90+r] 8086,ND
|
|
; 32-bit operand size needs special handling, because opcode 90 is special in 64-bit mode.
|
|
XCHG reg_eax,reg32na [-r: o32 90+r] 386
|
|
XCHG reg32na,reg_eax [r-: o32 90+r] 386,ND
|
|
XCHG reg_eax,reg_eax [--: o32 90] 386,NOLONG,ND
|
|
$bwdq XCHG rm#,reg# [mr: hlenl o# 86# /r] 8086,SM,LOCK
|
|
$bwdq XCHG reg#,rm# [rm: hlenl o16 87 /r] 8086,SM,LOCK1,ND
|
|
|
|
; 486-only opcode present in some but not all 486 chips
|
|
$bwd CMPXCHG486 rm#,reg# [mr: 0f a6# /r] 486,SM,UNDOC,NOLONG,ND,LOCK,OBSOLETE
|
|
|
|
;# Jumps
|
|
; Call/jmp near imm/reg/mem are always 64-bit in long mode.
|
|
JMP imm8|short [i: nw eb rel8] 8086,NOAPX
|
|
JMP imm [i: jmp8 nw eb rel8] 8086,SX,ND,NOAPX
|
|
$wdq JMP imm##|near [i: nw o# e9 rel] 8086,OSIZE,ND,BND,NOLONG16
|
|
$wdq JMP rm#|near [m: nw o# ff /4] 8086,OSIZE,BND,NOLONG16
|
|
$wd JMP imm#|far [i: o# ea iwd seg] 8086,OSIZE,ND,NOLONG
|
|
$wd JMP imm16:imm# [ji: o# ea iwd iw] 8086,OSIZE,AR1,NOLONG
|
|
$wd JMP imm16:imm#|far [ji: o# ea iwd iw] 8086,OSIZE,AR1,NOLONG,ND
|
|
; OSIZE,NWSIZE makes us match 64 bits by default in 64-bit mode, even though a REX.W is required.
|
|
; This is an intentional "programmer friendliness" quirk.
|
|
$wdq JMP mem#|far [m: o# ff /5] 8086,OSIZE,NWSIZE
|
|
|
|
; APX absolute 64-bit jmp
|
|
JMP imm64|abs [i: a64 np rex2 a1 iq ] APX
|
|
JMPABS imm64 [i: a64 np rex2 a1 iq ] APX,ND
|
|
JMPABS imm64|abs [i: a64 np rex2 a1 iq ] APX,ND
|
|
|
|
Jcc imm8|short [i: nw 70+c rel8] 8086,ND,BND,SX,NOAPX
|
|
Jcc imm [i: jcc8 nw 70+c rel8] 8086,BND,SX,NOAPX
|
|
$zwdq Jcc imm##|near [i: o# 0f 80+c rel] 386,BND,NOAPX,NOLONGwd
|
|
; Jump-over emulation of Jcc on < 386
|
|
Jcc imm16|near [i: nw 71+c jlen e9 rel] 8086,ND,NOAPX,NOLONG
|
|
|
|
; The following only have short forms, hence imm8|near|short
|
|
JCXZ imm8|near|short [i: a16 e3 rel8] 8086,NOLONG
|
|
JECXZ imm8|near|short [i: a32 e3 rel8] 386,NOAPX
|
|
JRCXZ imm8|near|short [i: a64 e3 rel8] X86_64,LONG,NOAPX
|
|
$wdq JCXZ imm8|near|short,cx# [i-: a# e3 rel8] 8086,ND
|
|
|
|
$zwdq LOOP% imm8|near|short [i: a# nw e2 rel8] 8086,NOAPX,(wdq:ND)
|
|
$zwdq LOOPE% imm8|near|short [i: a# nw e1 rel8] 8086,NOAPX,(wdq:ND)
|
|
$zwdq LOOPNE% imm8|near|short [i: a# nw e0 rel8] 8086,NOAPX,(wdq:ND)
|
|
$zwdq LOOPZ% imm8|near|short [i: a# nw e1 rel8] 8086,NOAPX,ND
|
|
$zwdq LOOPNZ% imm8|near|short [i: a# nw e0 rel8] 8086,NOAPX,ND
|
|
$wdq LOOP imm8|near|short,cx# [i-: a# nw e2 rel8] 8086,NOAPX
|
|
$wdq LOOPE imm8|near|short,cx# [i-: a# nw e1 rel8] 8086,NOAPX
|
|
$wdq LOOPNE imm8|near|short,cx# [i-: a# nw e0 rel8] 8086,NOAPX
|
|
$wdq LOOPZ imm8|near|short,cx# [i-: a# nw e1 rel8] 8086,NOAPX,ND
|
|
$wdq LOOPNZ imm8|near|short,cx# [i-: a# nw e0 rel8] 8086,NOAPX,ND
|
|
|
|
; JMPE is obsolete, but seems to be used by a fair number of virtual environments?
|
|
$zwdq JMPE imm##|near [i: nw o# 0f b8 rel] IA64
|
|
; 0f 00 /6 with a prefix has been repurposed in long mode
|
|
$wdq JMPE rm#|near [m: nw o# np 0f 00 /6] IA64
|
|
$wd JMPE rm#|near [m: o# 0f 00 /6] IA64,NOLONG
|
|
|
|
;# Call and return
|
|
$wdq CALL imm##|near [i: nw o# e8 rel] 8086,BND,NOAPX,OSIZE
|
|
$wd CALL imm#|far [i: o# 9a iwd seg] 8086,ND,NOLONG,OSIZE
|
|
$wd CALL imm16:imm# [ji: o# 9a iwd iw] 8086,NOLONG,OSIZE,AR1
|
|
$wd CALL imm16:imm#|far [ji: o# 9a iwd iw] 8086,NOLONG,OSIZE,AR1
|
|
$wdq CALL rm#|near [m: nw o# ff /2] 8086,BND,OSIZE
|
|
; OSIZE,NWSIZE makes us match 64 bits by default in 64-bit mode, even though a REX.W is required.
|
|
; This is an intentional "programmer friendliness" quirk.
|
|
$wdq CALL mem#|far [m: o# ff /3] 8086,OSIZE,NWSIZE
|
|
|
|
$zwdq RET% void [ nw o# c3] 8086,BND
|
|
$zwdq RET% imm16 [i: nw o# c2 iw] 8086,BND
|
|
$zwdq RETF% void [ o# cb] 8086
|
|
$zwdq RETF% imm16 [i: o# ca iw] 8086
|
|
$zwdq RETN% void [ nw o# c3] 8086,BND,ND
|
|
$zwdq RETN% imm16 [i: nw o# c2 iw] 8086,BND,ND
|
|
|
|
;# Interrupts, system calls, and returns
|
|
INT imm8 [i: cd ib,u] 8086
|
|
INT1 void [ f1] 386
|
|
INT01 void [ f1] 386,ND
|
|
ICEBP void [ f1] 386,ND
|
|
INT3 void [ cc] 8086
|
|
INT03 void [ cc] 8086,ND
|
|
BRKPT void [ cc] 8086,ND
|
|
INTO void [ ce] 8086,NOLONG
|
|
|
|
SYSCALL void [ 0f 05] P6,AMD
|
|
SYSENTER void [ 0f 34] P6,NOAPX
|
|
|
|
SYSEXIT void [ 0f 35] P6,PRIV,NOAPX
|
|
SYSRET void [ 0f 07] P6,PRIV,AMD
|
|
|
|
$zwdq IRET% void [ o# cf] 8086
|
|
|
|
ERETS void [ f2 0f 01 ca ] FRED,PRIV,LONG
|
|
ERETU void [ f3 0f 01 ca ] FRED,PRIV,LONG
|
|
|
|
;# Flag register instructions
|
|
CLC void [ f8] 8086
|
|
CLD void [ fc] 8086
|
|
CLI void [ fa] 8086
|
|
CLAC void [ np 0f 01 ca] SMAP,PRIV
|
|
|
|
STC void [ f9] 8086
|
|
STD void [ fd] 8086
|
|
STI void [ fb] 8086
|
|
STAC void [ np 0f 01 cb] SMAP,PRIV
|
|
|
|
CMC void [ f5] 8086
|
|
|
|
LAHF void [ 9f] 8086
|
|
SAHF void [ 9e] 8086
|
|
SALC void [ d6] 8086,UNDOC
|
|
|
|
$zwdq PUSHF% void [ nw o# 9c] 8086
|
|
$zwdq POPF% void [ nw o# 9d] 8086
|
|
|
|
;# String instructions
|
|
$bwdq CMPS% void [ repe o# a6#] 8086,NOAPX
|
|
$bwdq LODS% void [ o# ac#] 8086,NOAPX
|
|
$bwdq MOVS% void [ o# a4#] 8086,NOAPX
|
|
$bwdq STOS% void [ o# aa#] 8086,NOAPX
|
|
$bwdq SCAS% void [ repe o# ae#] 8086,NOAPX
|
|
$bwd INS% void [ o# 6c#] 186
|
|
$bwd OUTS% void [ o# 6e#] 186
|
|
|
|
;# Synchronization and fencing
|
|
LFENCE void [ np 0f ae e8] X86_64,LONG,AMD
|
|
MFENCE void [ np 0f ae f0] X86_64,LONG,AMD
|
|
SFENCE void [ np 0f ae f8] X86_64,LONG,AMD
|
|
SERIALIZE void [ np 0f 01 e8] SERIALIZE
|
|
|
|
;# Memory management and control
|
|
CLFLUSH mem [m: np 0f ae /7] WILLAMETTE,SSE2
|
|
CLFLUSHOPT mem [m: 66 0f ae /7] CLFLUSHOPT
|
|
CLWB mem [m: 66 0f ae /6] CLWB
|
|
|
|
; This one was killed before it saw the light of day
|
|
PCOMMIT void [ 66 0f ae f8] NEVER,NOP
|
|
|
|
; AMD Zen v1
|
|
$wdq CLZERO ax# [-: a# 0f 01 fc] AMD,CLZERO
|
|
CLZERO void [ 0f 01 fc] AMD,CLZERO,ND
|
|
|
|
INVD void [ 0f 08] 486,PRIV
|
|
WBINVD void [ np 0f 09] 486,PRIV
|
|
WBNOINVD void [ f3 0f 09] WBNOINVD,PRIV
|
|
|
|
INVPCID reg32,mem128 [rm: 66 0f38 82 /r] INVPCID,PRIV,NOLONG
|
|
INVPCID reg64,mem128 [rm: 66 0f38 82 /r] INVPCID,PRIV,LONG
|
|
INVLPG mem [m: 0f 01 /7] 486,PRIV
|
|
$wdq INVLPGA ax#,reg_ecx [--: a# 0f 01 df] X86_64,AMD
|
|
INVLPGA void [ adf 0f 01 df] X86_64,AMD,ND
|
|
|
|
;# Special reads: timestamp, CPU number, performance counters, randomness
|
|
RDPMC void [ 0f 33] P6,NOAPX
|
|
RDTSC void [ 0f 31] PENT,NOAPX
|
|
RDTSCP void [ 0f 01 f9] X86_64
|
|
$wdq RDRAND reg# [m: o# 0f c7 /6] RDRAND
|
|
$wdq RDSEED reg# [m: o# 0f c7 /7] RDSEED
|
|
RDPID reg64 [m: f3 0f c7 /7] RDPID,LONG,OPT,ND
|
|
$dq RDPID reg# [m: f3 !osp o# 0f c7 /7] RDPID
|
|
|
|
;# Machine control and management instructions
|
|
CLTS void [ 0f 06] 286,PRIV
|
|
CPUID void [ 0f a2] PENT
|
|
LMSW rm16 [m: 0f 01 /6] 286,PRIV
|
|
SMSW rm16 [m: 0f 01 /4] 286
|
|
SMSW reg64 [m: o64nw 0f 01 /4] X86_64,LONG,ND
|
|
$wdq SMSW reg# [m: o# 0f 01 /4] 286
|
|
|
|
$dq MOV reg#,reg_creg [mr: nw w# rex.l 0f 20 /r] 386,PRIV
|
|
$dq MOV reg_creg,reg# [rm: nw w# rex.l 0f 22 /r] 386,PRIV
|
|
$dq MOV reg#,reg_dreg [mr: nw w# 0f 21 /r] 386,PRIV
|
|
$dq MOV reg_dreg,reg# [rm: nw w# 0f 23 /r] 386,PRIV,NOLONG
|
|
MOV reg32,reg_treg [mr: 0f 24 /r] 386,NOLONG,ND,OBSOLETE
|
|
MOV reg_treg,reg32 [rm: 0f 26 /r] 386,NOLONG,ND,OBSOLETE
|
|
|
|
WRMSR void [ 0f 30] PENT,PRIV,NOAPX
|
|
RDMSR void [ 0f 32] PENT,PRIV,NOAPX
|
|
WRMSRNS void [ np 0f 01 c6 ] WRMSRNS,PRIV,LONG
|
|
RDMSRLIST void [ f2 0f 01 c6 ] MSRLIST,PRIV,LONG
|
|
WRMSRLIST void [ f3 0f 01 c6 ] MSRLIST,PRIV,LONG
|
|
|
|
$bwd UMOV rm#,reg# [mr: np o# 0f 10# /r] 386,UNDOC,SM,ND,NOLONG,OBSOLETE
|
|
$bwd UMOV reg#,rm# [rm: np o# 0f 12# /r] 386,UNDOC,SM,ND,NOLONG,OBSOLETE
|
|
|
|
; Machine control instructions from old Cyrix machines, probably obsolete
|
|
BB0_RESET void [ m1 3a] PENT,CYRIX,NOLONG,OBSOLETE,ND
|
|
BB1_RESET void [ m1 3b] PENT,CYRIX,NOLONG,OBSOLETE,ND
|
|
CPU_READ void [ m1 3d] PENT,NOLONG,CYRIX,OBSOLETE,ND
|
|
CPU_WRITE void [ m1 3c] PENT,NOLONG,CYRIX,OBSOLETE,ND
|
|
DMINT void [ m1 39] P6,NOLONG,CYRIX,NOAPX,OBSOLETE,ND
|
|
RDM void [ 0f 3a] P6,CYRIX,ND,NOLONG,OBSOLETE
|
|
SMINT void [ m1 38] P6,CYRIX,ND,NOLONG,OBSOLETE
|
|
SMINTOLD void [ m1 7e] 486,CYRIX,ND,NOLONG,,OBSOLETE
|
|
|
|
;# System management mode
|
|
RSM void [ 0f aa] PENT,SMM
|
|
|
|
; From old Cyrix machines, probably obsolete
|
|
WRSHR rm32 [m: o32 0f 37 /0] P6,CYRIX,SMM,NOLONG,OBSOLETE,ND
|
|
RDSHR rm32 [m: o32 0f 36 /0] P6,CYRIX,SMM,ND,NOLONG,OBSOLETE
|
|
RSDC reg_sreg,mem80 [rm: 0f 79 /r] 486,CYRIX,SMM,NOLONG,ND,OBSOLETE
|
|
RSLDT mem80 [m: 0f 7b /0] 486,CYRIX,SMM,NOLONG,ND,OBSOLETE
|
|
RSTS mem80 [m: 0f 7d /0] 486,CYRIX,SMM,NOLONG,OBSOLETE,ND
|
|
SVDC mem80,reg_sreg [mr: m1 78 /r] 486,CYRIX,SMM,ND,NOLONG,OBSOLETE
|
|
SVLDT mem80 [m: m1 7a /0] 486,CYRIX,SMM,ND,NOLONG,OBSOLETE
|
|
SVTS mem80 [m: m1 7c /0] 486,CYRIX,SMM,ND,NOLONG,OBSOLETE
|
|
|
|
;# Power management
|
|
HLT void [ f4] 8086,PRIV
|
|
; Allow o64 pause to generate REX.W + pause even if redundant
|
|
PAUSE void [ osz f3i 90] 8086
|
|
|
|
$zwdq MONITOR% void [ a# 0f 01 c8] PRESCOTT,MONITOR
|
|
$wdq MONITOR ax#,reg_ecx,reg_edx [---: a# 0f 01 c8] PRESCOTT,MONITOR,ND
|
|
MWAIT void [ 0f 01 c9] PRESCOTT,MONITOR
|
|
MWAIT reg_eax,reg_ecx [--: 0f 01 c9] PRESCOTT,MONITOR,ND
|
|
|
|
$zwdq MONITORX void [ a# 0f 01 fa] MONITORX,AMD
|
|
$wdq MONITORX ax#,reg_ecx,reg_edx [---: a# 0f 01 fa] MONITORX,AMD,ND
|
|
MWAITX void [ 0f 01 fb] MONITORX,AMD
|
|
MWAITX reg_eax,reg_ecx [--: 0f 01 fb] MONITORX,AMD,ND
|
|
|
|
TPAUSE reg32 [m: 66 0f ae /6] WAITPKG
|
|
TPAUSE reg32,reg_edx,reg_eax [m--: 66 0f ae /6] WAITPKG,ND
|
|
$wdq UMONITOR reg# [m: a# f3 0f ae /6] WAITPKG
|
|
UMWAIT reg32 [m: f2 0f ae /6] WAITPKG
|
|
UMWAIT reg32,reg_edx,reg_eax [m--: f2 0f ae /6] WAITPKG,ND
|
|
|
|
;# I/O instructions
|
|
$bwd IN ax#,imm8 [-i: o# e4# ib,u] 8086,NOAPX
|
|
$bwd IN ax#,reg_dx [--: o# ec#] 8086,NOAPX
|
|
$bwd OUT imm8,ax# [i-: o# e6# ib,u] 8086,NOAPX
|
|
$bwd OUT reg_dx,ax# [--: o# ee#] 8086,NOAPX
|
|
|
|
;# Segment handling instructions
|
|
$zwdq MOV sel#,reg_sreg [mr: optd# o# 8c /r] 8086
|
|
$zwdq MOV reg_sreg,sel# [rm: optw# o# 8e /r] 8086
|
|
|
|
$wd LDS reg#,mem# [rm: o# c5 /r] 8086,NOLONG
|
|
$wd LES reg#,mem# [rm: o# c4 /r] 8086,NOLONG
|
|
$wdq LFS reg#,mem# [rm: o# 0f b4 /r] 386,SM
|
|
$wdq LGS reg#,mem# [rm: o# 0f b5 /r] 386,SM
|
|
$wdq LSS reg#,mem# [rm: o# 0f b2 /r] 386,SM
|
|
|
|
PUSH reg_es [-: 06] 8086,NOLONG
|
|
PUSH reg_cs [-: 0e] 8086,NOLONG
|
|
PUSH reg_ss [-: 16] 8086,NOLONG
|
|
PUSH reg_ds [-: 1e] 8086,NOLONG
|
|
PUSH reg_fs [-: 0f a0] 386
|
|
PUSH reg_gs [-: 0f a8] 386
|
|
|
|
POP reg_es [-: 07] 8086,NOLONG
|
|
POP reg_cs [-: m0 0f] 8086,NOLONG,UNDOC,ND,OBSOLETE
|
|
POP reg_ss [-: 17] 8086,NOLONG
|
|
POP reg_ds [-: 1f] 8086,NOLONG
|
|
POP reg_fs [-: 0f a1] 386
|
|
POP reg_gs [-: 0f a9] 386
|
|
|
|
$dq RDFSBASE reg# [m: w# f3 0f ae /0] LONG
|
|
$dq RDGSBASE reg# [m: w# f3 0f ae /1] LONG
|
|
$dq WRFSBASE reg# [m: w# f3 0f ae /2] LONG
|
|
$dq WRGSBASE reg# [m: w# f3 0f ae /3] LONG
|
|
|
|
$zwd ARPL rm16,sel# [mr: optw# 63 /r] 286,PROT,SM,NOLONG
|
|
$wdq LAR reg#,rm_sel [rm: optd# 0f 02 /r] 286,PROT
|
|
$wdq LSL reg#,rm_sel [rm: optd# 0f 03 /r] 286,PROT
|
|
|
|
VERR rm_sel [m: 0f 00 /4] 286,PROT
|
|
VERW rm_sel [m: 0f 00 /5] 286,PROT
|
|
|
|
; The privileged ones...
|
|
SWAPGS void [ 0f 01 f8] X86_64,LONG
|
|
LKGS rm_sel [m: f2 0f 00 /6 ] LKGS,PRIV,LONG
|
|
|
|
$dq LGDT mem# [m: 0f 01 /2] 286,PRIV,NOLONG32
|
|
$dq LIDT mem# [m: 0f 01 /3] 286,PRIV,NOLONG32
|
|
$zwdq LLDT sel# [m: optw# 0f 00 /2] 286,PROT,PRIV
|
|
$zwdq LTR sel# [m: optw# 0f 00 /3] 286,PROT,PRIV
|
|
|
|
$dq SGDT mem# [m: nw o# 0f 01 /0] 286
|
|
$dq SIDT mem# [m: nw o# 0f 01 /1] 286
|
|
$zwdq SLDT sel# [m: optd# 0f 00 /0] 286,PROT
|
|
$zwdq STR sel# [m: optd# 0f 00 /1] 286,PROT
|
|
|
|
LOADALL void [ 0f 07] 386,UNDOC,ND,OBSOLETE
|
|
LOADALL286 void [ 0f 05] 286,UNDOC,ND,OBSOLETE
|
|
|
|
;# x87 floating point
|
|
F2XM1 void [ d9 f0] 8086,FPU
|
|
FABS void [ d9 e1] 8086,FPU
|
|
FADD mem32 [m: d8 /0] 8086,FPU
|
|
FADD mem64 [m: dc /0] 8086,FPU
|
|
FADD fpureg|to [r: dc c0+r] 8086,FPU
|
|
FADD fpureg [r: d8 c0+r] 8086,FPU
|
|
FADD fpureg,fpu0 [r-: dc c0+r] 8086,FPU
|
|
FADD fpu0,fpureg [-r: d8 c0+r] 8086,FPU
|
|
FADD void [ de c1] 8086,FPU,ND
|
|
FADDP fpureg [r: de c0+r] 8086,FPU
|
|
FADDP fpureg,fpu0 [r-: de c0+r] 8086,FPU
|
|
FADDP void [ de c1] 8086,FPU,ND
|
|
FBLD mem80 [m: df /4] 8086,FPU
|
|
FBLD mem [m: df /4] 8086,FPU
|
|
FBSTP mem80 [m: df /6] 8086,FPU
|
|
FBSTP mem [m: df /6] 8086,FPU
|
|
FCHS void [ d9 e0] 8086,FPU
|
|
FCLEX void [ wait db e2] 8086,FPU
|
|
FCMOVB fpureg [r: da c0+r] P6,FPU
|
|
FCMOVB fpu0,fpureg [-r: da c0+r] P6,FPU
|
|
FCMOVB void [ da c1] P6,FPU,ND
|
|
FCMOVBE fpureg [r: da d0+r] P6,FPU
|
|
FCMOVBE fpu0,fpureg [-r: da d0+r] P6,FPU
|
|
FCMOVBE void [ da d1] P6,FPU,ND
|
|
FCMOVE fpureg [r: da c8+r] P6,FPU
|
|
FCMOVE fpu0,fpureg [-r: da c8+r] P6,FPU
|
|
FCMOVE void [ da c9] P6,FPU,ND
|
|
FCMOVNB fpureg [r: db c0+r] P6,FPU
|
|
FCMOVNB fpu0,fpureg [-r: db c0+r] P6,FPU
|
|
FCMOVNB void [ db c1] P6,FPU,ND
|
|
FCMOVNBE fpureg [r: db d0+r] P6,FPU
|
|
FCMOVNBE fpu0,fpureg [-r: db d0+r] P6,FPU
|
|
FCMOVNBE void [ db d1] P6,FPU,ND
|
|
FCMOVNE fpureg [r: db c8+r] P6,FPU
|
|
FCMOVNE fpu0,fpureg [-r: db c8+r] P6,FPU
|
|
FCMOVNE void [ db c9] P6,FPU,ND
|
|
FCMOVNU fpureg [r: db d8+r] P6,FPU
|
|
FCMOVNU fpu0,fpureg [-r: db d8+r] P6,FPU
|
|
FCMOVNU void [ db d9] P6,FPU,ND
|
|
FCMOVU fpureg [r: da d8+r] P6,FPU
|
|
FCMOVU fpu0,fpureg [-r: da d8+r] P6,FPU
|
|
FCMOVU void [ da d9] P6,FPU,ND
|
|
FCOM mem32 [m: d8 /2] 8086,FPU
|
|
FCOM mem64 [m: dc /2] 8086,FPU
|
|
FCOM fpureg [r: d8 d0+r] 8086,FPU
|
|
FCOM fpu0,fpureg [-r: d8 d0+r] 8086,FPU
|
|
FCOM void [ d8 d1] 8086,FPU,ND
|
|
FCOMI fpureg [r: db f0+r] P6,FPU
|
|
FCOMI fpu0,fpureg [-r: db f0+r] P6,FPU
|
|
FCOMI void [ db f1] P6,FPU,ND
|
|
FCOMIP fpureg [r: df f0+r] P6,FPU
|
|
FCOMIP fpu0,fpureg [-r: df f0+r] P6,FPU
|
|
FCOMIP void [ df f1] P6,FPU,ND
|
|
FCOMP mem32 [m: d8 /3] 8086,FPU
|
|
FCOMP mem64 [m: dc /3] 8086,FPU
|
|
FCOMP fpureg [r: d8 d8+r] 8086,FPU
|
|
FCOMP fpu0,fpureg [-r: d8 d8+r] 8086,FPU
|
|
FCOMP void [ d8 d9] 8086,FPU,ND
|
|
FCOMPP void [ de d9] 8086,FPU
|
|
FCOS void [ d9 ff] 386,FPU
|
|
FDECSTP void [ d9 f6] 8086,FPU
|
|
FDISI void [ wait db e1] 8086,FPU
|
|
FDIV mem32 [m: d8 /6] 8086,FPU
|
|
FDIV mem64 [m: dc /6] 8086,FPU
|
|
FDIV fpureg|to [r: dc f8+r] 8086,FPU
|
|
FDIV fpureg [r: d8 f0+r] 8086,FPU
|
|
FDIV fpureg,fpu0 [r-: dc f8+r] 8086,FPU
|
|
FDIV fpu0,fpureg [-r: d8 f0+r] 8086,FPU
|
|
FDIV void [ de f9] 8086,FPU,ND
|
|
FDIVP fpureg [r: de f8+r] 8086,FPU
|
|
FDIVP fpureg,fpu0 [r-: de f8+r] 8086,FPU
|
|
FDIVP void [ de f9] 8086,FPU,ND
|
|
FDIVR mem32 [m: d8 /7] 8086,FPU
|
|
FDIVR mem64 [m: dc /7] 8086,FPU
|
|
FDIVR fpureg|to [r: dc f0+r] 8086,FPU
|
|
FDIVR fpureg,fpu0 [r-: dc f0+r] 8086,FPU
|
|
FDIVR fpureg [r: d8 f8+r] 8086,FPU
|
|
FDIVR fpu0,fpureg [-r: d8 f8+r] 8086,FPU
|
|
FDIVR void [ de f1] 8086,FPU,ND
|
|
FDIVRP fpureg [r: de f0+r] 8086,FPU
|
|
FDIVRP fpureg,fpu0 [r-: de f0+r] 8086,FPU
|
|
FDIVRP void [ de f1] 8086,FPU,ND
|
|
FEMMS void [ 0f 0e] PENT,3DNOW
|
|
FENI void [ wait db e0] 8086,FPU
|
|
FFREE fpureg [r: dd c0+r] 8086,FPU
|
|
FFREE void [ dd c1] 8086,FPU
|
|
FFREEP fpureg [r: df c0+r] 286,FPU,UNDOC
|
|
FFREEP void [ df c1] 286,FPU,UNDOC
|
|
FIADD mem32 [m: da /0] 8086,FPU
|
|
FIADD mem16 [m: de /0] 8086,FPU
|
|
FICOM mem32 [m: da /2] 8086,FPU
|
|
FICOM mem16 [m: de /2] 8086,FPU
|
|
FICOMP mem32 [m: da /3] 8086,FPU
|
|
FICOMP mem16 [m: de /3] 8086,FPU
|
|
FIDIV mem32 [m: da /6] 8086,FPU
|
|
FIDIV mem16 [m: de /6] 8086,FPU
|
|
FIDIVR mem32 [m: da /7] 8086,FPU
|
|
FIDIVR mem16 [m: de /7] 8086,FPU
|
|
FILD mem32 [m: db /0] 8086,FPU
|
|
FILD mem16 [m: df /0] 8086,FPU
|
|
FILD mem64 [m: df /5] 8086,FPU
|
|
FIMUL mem32 [m: da /1] 8086,FPU
|
|
FIMUL mem16 [m: de /1] 8086,FPU
|
|
FINCSTP void [ d9 f7] 8086,FPU
|
|
FINIT void [ wait db e3] 8086,FPU
|
|
FIST mem32 [m: db /2] 8086,FPU
|
|
FIST mem16 [m: df /2] 8086,FPU
|
|
FISTP mem32 [m: db /3] 8086,FPU
|
|
FISTP mem16 [m: df /3] 8086,FPU
|
|
FISTP mem64 [m: df /7] 8086,FPU
|
|
FISTTP mem16 [m: df /1] PRESCOTT,FPU
|
|
FISTTP mem32 [m: db /1] PRESCOTT,FPU
|
|
FISTTP mem64 [m: dd /1] PRESCOTT,FPU
|
|
FISUB mem32 [m: da /4] 8086,FPU
|
|
FISUB mem16 [m: de /4] 8086,FPU
|
|
FISUBR mem32 [m: da /5] 8086,FPU
|
|
FISUBR mem16 [m: de /5] 8086,FPU
|
|
FLD mem32 [m: d9 /0] 8086,FPU
|
|
FLD mem64 [m: dd /0] 8086,FPU
|
|
FLD mem80 [m: db /5] 8086,FPU
|
|
FLD fpureg [r: d9 c0+r] 8086,FPU
|
|
FLD void [ d9 c1] 8086,FPU,ND
|
|
FLD1 void [ d9 e8] 8086,FPU
|
|
FLDCW mem [m: d9 /5] 8086,FPU,SW
|
|
FLDENV mem [m: d9 /4] 8086,FPU
|
|
FLDL2E void [ d9 ea] 8086,FPU
|
|
FLDL2T void [ d9 e9] 8086,FPU
|
|
FLDLG2 void [ d9 ec] 8086,FPU
|
|
FLDLN2 void [ d9 ed] 8086,FPU
|
|
FLDPI void [ d9 eb] 8086,FPU
|
|
FLDZ void [ d9 ee] 8086,FPU
|
|
FMUL mem32 [m: d8 /1] 8086,FPU
|
|
FMUL mem64 [m: dc /1] 8086,FPU
|
|
FMUL fpureg|to [r: dc c8+r] 8086,FPU
|
|
FMUL fpureg,fpu0 [r-: dc c8+r] 8086,FPU
|
|
FMUL fpureg [r: d8 c8+r] 8086,FPU
|
|
FMUL fpu0,fpureg [-r: d8 c8+r] 8086,FPU
|
|
FMUL void [ de c9] 8086,FPU,ND
|
|
FMULP fpureg [r: de c8+r] 8086,FPU
|
|
FMULP fpureg,fpu0 [r-: de c8+r] 8086,FPU
|
|
FMULP void [ de c9] 8086,FPU,ND
|
|
FNCLEX void [ db e2] 8086,FPU
|
|
FNDISI void [ db e1] 8086,FPU
|
|
FNENI void [ db e0] 8086,FPU
|
|
FNINIT void [ db e3] 8086,FPU
|
|
FNOP void [ d9 d0] 8086,FPU
|
|
FNSAVE mem [m: dd /6] 8086,FPU
|
|
FNSTCW mem [m: d9 /7] 8086,FPU,SW
|
|
FNSTENV mem [m: d9 /6] 8086,FPU
|
|
FNSTSW mem [m: dd /7] 8086,FPU,SW
|
|
FNSTSW reg_ax [-: df e0] 286,FPU
|
|
FPATAN void [ d9 f3] 8086,FPU
|
|
FPREM void [ d9 f8] 8086,FPU
|
|
FPREM1 void [ d9 f5] 386,FPU
|
|
FPTAN void [ d9 f2] 8086,FPU
|
|
FRNDINT void [ d9 fc] 8086,FPU
|
|
FRSTOR mem [m: dd /4] 8086,FPU
|
|
FSAVE mem [m: wait dd /6] 8086,FPU
|
|
FSCALE void [ d9 fd] 8086,FPU
|
|
FSETPM void [ db e4] 286,FPU
|
|
FSIN void [ d9 fe] 386,FPU
|
|
FSINCOS void [ d9 fb] 386,FPU
|
|
FSQRT void [ d9 fa] 8086,FPU
|
|
FST mem32 [m: d9 /2] 8086,FPU
|
|
FST mem64 [m: dd /2] 8086,FPU
|
|
FST fpureg [r: dd d0+r] 8086,FPU
|
|
FST void [ dd d1] 8086,FPU,ND
|
|
FSTCW mem [m: wait d9 /7] 8086,FPU,SW
|
|
FSTENV mem [m: wait d9 /6] 8086,FPU
|
|
FSTP mem32 [m: d9 /3] 8086,FPU
|
|
FSTP mem64 [m: dd /3] 8086,FPU
|
|
FSTP mem80 [m: db /7] 8086,FPU
|
|
FSTP fpureg [r: dd d8+r] 8086,FPU
|
|
FSTP void [ dd d9] 8086,FPU,ND
|
|
FSTSW mem [m: wait dd /7] 8086,FPU,SW
|
|
FSTSW reg_ax [-: wait df e0] 286,FPU
|
|
FSUB mem32 [m: d8 /4] 8086,FPU
|
|
FSUB mem64 [m: dc /4] 8086,FPU
|
|
FSUB fpureg|to [r: dc e8+r] 8086,FPU
|
|
FSUB fpureg,fpu0 [r-: dc e8+r] 8086,FPU
|
|
FSUB fpureg [r: d8 e0+r] 8086,FPU
|
|
FSUB fpu0,fpureg [-r: d8 e0+r] 8086,FPU
|
|
FSUB void [ de e9] 8086,FPU,ND
|
|
FSUBP fpureg [r: de e8+r] 8086,FPU
|
|
FSUBP fpureg,fpu0 [r-: de e8+r] 8086,FPU
|
|
FSUBP void [ de e9] 8086,FPU,ND
|
|
FSUBR mem32 [m: d8 /5] 8086,FPU
|
|
FSUBR mem64 [m: dc /5] 8086,FPU
|
|
FSUBR fpureg|to [r: dc e0+r] 8086,FPU
|
|
FSUBR fpureg,fpu0 [r-: dc e0+r] 8086,FPU
|
|
FSUBR fpureg [r: d8 e8+r] 8086,FPU
|
|
FSUBR fpu0,fpureg [-r: d8 e8+r] 8086,FPU
|
|
FSUBR void [ de e1] 8086,FPU,ND
|
|
FSUBRP fpureg [r: de e0+r] 8086,FPU
|
|
FSUBRP fpureg,fpu0 [r-: de e0+r] 8086,FPU
|
|
FSUBRP void [ de e1] 8086,FPU,ND
|
|
FTST void [ d9 e4] 8086,FPU
|
|
FUCOM fpureg [r: dd e0+r] 386,FPU
|
|
FUCOM fpu0,fpureg [-r: dd e0+r] 386,FPU
|
|
FUCOM void [ dd e1] 386,FPU,ND
|
|
FUCOMI fpureg [r: db e8+r] P6,FPU
|
|
FUCOMI fpu0,fpureg [-r: db e8+r] P6,FPU
|
|
FUCOMI void [ db e9] P6,FPU,ND
|
|
FUCOMIP fpureg [r: df e8+r] P6,FPU
|
|
FUCOMIP fpu0,fpureg [-r: df e8+r] P6,FPU
|
|
FUCOMIP void [ df e9] P6,FPU,ND
|
|
FUCOMP fpureg [r: dd e8+r] 386,FPU
|
|
FUCOMP fpu0,fpureg [-r: dd e8+r] 386,FPU
|
|
FUCOMP void [ dd e9] 386,FPU,ND
|
|
FUCOMPP void [ da e9] 386,FPU
|
|
FXAM void [ d9 e5] 8086,FPU
|
|
FXCH fpureg [r: d9 c8+r] 8086,FPU
|
|
FXCH fpureg,fpu0 [r-: d9 c8+r] 8086,FPU
|
|
FXCH fpu0,fpureg [-r: d9 c8+r] 8086,FPU
|
|
FXCH void [ d9 c9] 8086,FPU,ND
|
|
FXTRACT void [ d9 f4] 8086,FPU
|
|
FYL2X void [ d9 f1] 8086,FPU
|
|
FYL2XP1 void [ d9 f9] 8086,FPU
|
|
|
|
;# MMX (SIMD using the x87 register file)
|
|
EMMS void [ 0f 77] PENT,MMX
|
|
MOVD mmxreg,rm32 [rm: np 0f 6e /r] PENT,MMX
|
|
MOVD rm32,mmxreg [mr: np 0f 7e /r] PENT,MMX
|
|
MOVD mmxreg,rm64 [rm: np o64 0f 6e /r] X86_64,LONG,MMX,ND
|
|
MOVD rm64,mmxreg [mr: np o64 0f 7e /r] X86_64,LONG,MMX,ND
|
|
MOVQ mmxreg,mmxrm64 [rm: np 0f 6f /r] PENT,MMX
|
|
MOVQ mmxrm64,mmxreg [mr: np 0f 7f /r] PENT,MMX
|
|
MOVQ mmxreg,rm64 [rm: np o64 0f 6e /r] X86_64,LONG,MMX
|
|
MOVQ rm64,mmxreg [mr: np o64 0f 7e /r] X86_64,LONG,MMX
|
|
PACKSSDW mmxreg,mmxrm [rm: np 0f 6b /r] PENT,MMX,SQ
|
|
PACKSSWB mmxreg,mmxrm [rm: np 0f 63 /r] PENT,MMX,SQ
|
|
PACKUSWB mmxreg,mmxrm [rm: np 0f 67 /r] PENT,MMX,SQ
|
|
PADDB mmxreg,mmxrm [rm: np 0f fc /r] PENT,MMX,SQ
|
|
PADDD mmxreg,mmxrm [rm: np 0f fe /r] PENT,MMX,SQ
|
|
PADDSB mmxreg,mmxrm [rm: np 0f ec /r] PENT,MMX,SQ
|
|
PADDSIW mmxreg,mmxrm [rm: 0f 51 /r] PENT,MMX,SQ,CYRIX
|
|
PADDSW mmxreg,mmxrm [rm: np 0f ed /r] PENT,MMX,SQ
|
|
PADDUSB mmxreg,mmxrm [rm: np 0f dc /r] PENT,MMX,SQ
|
|
PADDUSW mmxreg,mmxrm [rm: np 0f dd /r] PENT,MMX,SQ
|
|
PADDW mmxreg,mmxrm [rm: np 0f fd /r] PENT,MMX,SQ
|
|
PAND mmxreg,mmxrm [rm: np 0f db /r] PENT,MMX,SQ
|
|
PANDN mmxreg,mmxrm [rm: np 0f df /r] PENT,MMX,SQ
|
|
PAVEB mmxreg,mmxrm [rm: 0f 50 /r] PENT,MMX,SQ,CYRIX
|
|
PAVGUSB mmxreg,mmxrm [rm: 0f 0f /r bf] PENT,3DNOW,SQ
|
|
PCMPEQB mmxreg,mmxrm [rm: np 0f 74 /r] PENT,MMX,SQ
|
|
PCMPEQD mmxreg,mmxrm [rm: np 0f 76 /r] PENT,MMX,SQ
|
|
PCMPEQW mmxreg,mmxrm [rm: np 0f 75 /r] PENT,MMX,SQ
|
|
PCMPGTB mmxreg,mmxrm [rm: np 0f 64 /r] PENT,MMX,SQ
|
|
PCMPGTD mmxreg,mmxrm [rm: np 0f 66 /r] PENT,MMX,SQ
|
|
PCMPGTW mmxreg,mmxrm [rm: np 0f 65 /r] PENT,MMX,SQ
|
|
PDISTIB mmxreg,mem [rm: 0f 54 /r] PENT,MMX,SM,CYRIX
|
|
PF2ID mmxreg,mmxrm [rm: 0f 0f /r 1d] PENT,3DNOW,SQ
|
|
PFACC mmxreg,mmxrm [rm: 0f 0f /r ae] PENT,3DNOW,SQ
|
|
PFADD mmxreg,mmxrm [rm: 0f 0f /r 9e] PENT,3DNOW,SQ
|
|
PFCMPEQ mmxreg,mmxrm [rm: 0f 0f /r b0] PENT,3DNOW,SQ
|
|
PFCMPGE mmxreg,mmxrm [rm: 0f 0f /r 90] PENT,3DNOW,SQ
|
|
PFCMPGT mmxreg,mmxrm [rm: 0f 0f /r a0] PENT,3DNOW,SQ
|
|
PFMAX mmxreg,mmxrm [rm: 0f 0f /r a4] PENT,3DNOW,SQ
|
|
PFMIN mmxreg,mmxrm [rm: 0f 0f /r 94] PENT,3DNOW,SQ
|
|
PFMUL mmxreg,mmxrm [rm: 0f 0f /r b4] PENT,3DNOW,SQ
|
|
PFRCP mmxreg,mmxrm [rm: 0f 0f /r 96] PENT,3DNOW,SQ
|
|
PFRCPIT1 mmxreg,mmxrm [rm: 0f 0f /r a6] PENT,3DNOW,SQ
|
|
PFRCPIT2 mmxreg,mmxrm [rm: 0f 0f /r b6] PENT,3DNOW,SQ
|
|
PFRSQIT1 mmxreg,mmxrm [rm: 0f 0f /r a7] PENT,3DNOW,SQ
|
|
PFRSQRT mmxreg,mmxrm [rm: 0f 0f /r 97] PENT,3DNOW,SQ
|
|
PFSUB mmxreg,mmxrm [rm: 0f 0f /r 9a] PENT,3DNOW,SQ
|
|
PFSUBR mmxreg,mmxrm [rm: 0f 0f /r aa] PENT,3DNOW,SQ
|
|
PI2FD mmxreg,mmxrm [rm: 0f 0f /r 0d] PENT,3DNOW,SQ
|
|
PMACHRIW mmxreg,mem [rm: 0f 5e /r] PENT,MMX,SM,CYRIX
|
|
PMADDWD mmxreg,mmxrm [rm: np 0f f5 /r] PENT,MMX,SQ
|
|
PMAGW mmxreg,mmxrm [rm: 0f 52 /r] PENT,MMX,SQ,CYRIX
|
|
PMULHRIW mmxreg,mmxrm [rm: 0f 5d /r] PENT,MMX,SQ,CYRIX
|
|
PMULHRWA mmxreg,mmxrm [rm: 0f 0f /r b7] PENT,3DNOW,SQ
|
|
PMULHRWC mmxreg,mmxrm [rm: 0f 59 /r] PENT,MMX,SQ,CYRIX
|
|
PMULHW mmxreg,mmxrm [rm: np 0f e5 /r] PENT,MMX,SQ
|
|
PMULLW mmxreg,mmxrm [rm: np 0f d5 /r] PENT,MMX,SQ
|
|
PMVGEZB mmxreg,mem [rm: 0f 5c /r] PENT,MMX,SQ,CYRIX
|
|
PMVLZB mmxreg,mem [rm: 0f 5b /r] PENT,MMX,SQ,CYRIX
|
|
PMVNZB mmxreg,mem [rm: 0f 5a /r] PENT,MMX,SQ,CYRIX
|
|
PMVZB mmxreg,mem [rm: 0f 58 /r] PENT,MMX,SQ,CYRIX
|
|
POR mmxreg,mmxrm [rm: np 0f eb /r] PENT,MMX,SQ
|
|
PREFETCH mem [m: 0f 0d /0] PENT,3DNOW,SQ
|
|
PREFETCHW mem [m: 0f 0d /1] PENT,3DNOW,SQ
|
|
PSLLD mmxreg,mmxrm [rm: np 0f f2 /r] PENT,MMX,SQ
|
|
PSLLD mmxreg,imm [mi: np 0f 72 /6 ib,u] PENT,MMX
|
|
PSLLQ mmxreg,mmxrm [rm: np 0f f3 /r] PENT,MMX,SQ
|
|
PSLLQ mmxreg,imm [mi: np 0f 73 /6 ib,u] PENT,MMX
|
|
PSLLW mmxreg,mmxrm [rm: np 0f f1 /r] PENT,MMX,SQ
|
|
PSLLW mmxreg,imm [mi: np 0f 71 /6 ib,u] PENT,MMX
|
|
PSRAD mmxreg,mmxrm [rm: np 0f e2 /r] PENT,MMX,SQ
|
|
PSRAD mmxreg,imm [mi: np 0f 72 /4 ib,u] PENT,MMX
|
|
PSRAW mmxreg,mmxrm [rm: np 0f e1 /r] PENT,MMX,SQ
|
|
PSRAW mmxreg,imm [mi: np 0f 71 /4 ib,u] PENT,MMX
|
|
PSRLD mmxreg,mmxrm [rm: np 0f d2 /r] PENT,MMX,SQ
|
|
PSRLD mmxreg,imm [mi: np 0f 72 /2 ib,u] PENT,MMX
|
|
PSRLQ mmxreg,mmxrm [rm: np 0f d3 /r] PENT,MMX,SQ
|
|
PSRLQ mmxreg,imm [mi: np 0f 73 /2 ib,u] PENT,MMX
|
|
PSRLW mmxreg,mmxrm [rm: np 0f d1 /r] PENT,MMX,SQ
|
|
PSRLW mmxreg,imm [mi: np 0f 71 /2 ib,u] PENT,MMX
|
|
PSUBB mmxreg,mmxrm [rm: np 0f f8 /r] PENT,MMX,SQ
|
|
PSUBD mmxreg,mmxrm [rm: np 0f fa /r] PENT,MMX,SQ
|
|
PSUBSB mmxreg,mmxrm [rm: np 0f e8 /r] PENT,MMX,SQ
|
|
PSUBSIW mmxreg,mmxrm [rm: 0f 55 /r] PENT,MMX,SQ,CYRIX
|
|
PSUBSW mmxreg,mmxrm [rm: np 0f e9 /r] PENT,MMX,SQ
|
|
PSUBUSB mmxreg,mmxrm [rm: np 0f d8 /r] PENT,MMX,SQ
|
|
PSUBUSW mmxreg,mmxrm [rm: np 0f d9 /r] PENT,MMX,SQ
|
|
PSUBW mmxreg,mmxrm [rm: np 0f f9 /r] PENT,MMX,SQ
|
|
PUNPCKHBW mmxreg,mmxrm [rm: np 0f 68 /r] PENT,MMX,SQ
|
|
PUNPCKHDQ mmxreg,mmxrm [rm: np 0f 6a /r] PENT,MMX,SQ
|
|
PUNPCKHWD mmxreg,mmxrm [rm: np 0f 69 /r] PENT,MMX,SQ
|
|
PUNPCKLBW mmxreg,mmxrm [rm: np 0f 60 /r] PENT,MMX,SQ
|
|
PUNPCKLDQ mmxreg,mmxrm [rm: np 0f 62 /r] PENT,MMX,SQ
|
|
PUNPCKLWD mmxreg,mmxrm [rm: np 0f 61 /r] PENT,MMX,SQ
|
|
|
|
;# Stack operations
|
|
$wdq PUSH reg# [r: nw o# 50+r] 8086
|
|
$wdq PUSH rm# [m: nw o# ff /6] 8086,OSIZE
|
|
PUSH imm8 [i: nw osz 6a ib,s] 186,SX,ND
|
|
$wdq PUSH sbyte# [i: nw o# 6a ib,s] 186,OSIZE
|
|
$wdq PUSH imm# [i: nw o# 68 i#] 186,OSIZE
|
|
|
|
$wdq POP reg# [r: nw o# 58+r] 8086
|
|
$wdq POP rm# [m: nw o# 8f /0] 8086
|
|
|
|
$zwd PUSHA% void [ o# 60] 186,NOLONG
|
|
$zwd POPA% void [ o# 61] 186,NOLONG
|
|
|
|
$zwdq ENTER% imm16,imm8 [ij: nw o# c8 iw ib,u] 186
|
|
$zwdq ENTER% imm16 [i: nw o# c8 iw 00] 186,ND
|
|
$zwdq LEAVE% void [ nw o# c9] 186
|
|
|
|
$wd BOUND reg#,mem [rm: o# 62 /r] 186,NOLONG
|
|
|
|
; APX PUSH/POP extensions
|
|
PUSHP reg64 [r: o64nw rex.w rex2 50+r ] APX
|
|
POPP reg64 [r: o64nw rex.w rex2 58+r ] APX
|
|
|
|
PUSH reg64,reg64 [vm: o64nw evex.nd1.l0.np.m4.w0 ff /6 ] APX,ND
|
|
PUSH reg64:reg64 [vm: o64nw evex.nd1.l0.np.m4.w0 ff /6 ] APX,ND
|
|
PUSHP reg64,reg64 [vm: o64nw evex.nd1.l0.np.m4.w1 ff /6 ] APX,ND
|
|
PUSHP reg64:reg64 [vm: o64nw evex.nd1.l0.np.m4.w1 ff /6 ] APX,ND
|
|
PUSH2 reg64,reg64 [vm: o64nw evex.nd1.l0.np.m4.w0 ff /6 ] APX
|
|
PUSH2 reg64:reg64 [vm: o64nw evex.nd1.l0.np.m4.w0 ff /6 ] APX,ND
|
|
PUSH2P reg64,reg64 [vm: o64nw evex.nd1.l0.np.m4.w1 ff /6 ] APX
|
|
PUSH2P reg64:reg64 [vm: o64nw evex.nd1.l0.np.m4.w1 ff /6 ] APX,ND
|
|
|
|
POP reg64,reg64 [vm: o64nw evex.nd1.l0.np.m4.w0 8f /0 ] APX,ND
|
|
POP reg64:reg64 [mv: o64nw evex.nd1.l0.np.m4.w0 8f /0 ] APX,ND
|
|
POPP reg64,reg64 [vm: o64nw evex.nd1.l0.np.m4.w1 8f /0 ] APX,ND
|
|
POPP reg64:reg64 [mv: o64nw evex.nd1.l0.np.m4.w1 8f /0 ] APX,ND
|
|
POP2 reg64,reg64 [vm: o64nw evex.nd1.l0.np.m4.w0 8f /0 ] APX
|
|
POP2 reg64:reg64 [mv: o64nw evex.nd1.l0.np.m4.w0 8f /0 ] APX,ND
|
|
POP2P reg64,reg64 [vm: o64nw evex.nd1.l0.np.m4.w1 8f /0 ] APX
|
|
POP2P reg64:reg64 [mv: o64nw evex.nd1.l0.np.m4.w1 8f /0 ] APX,ND
|
|
|
|
;# MMX instructions
|
|
|
|
PXOR mmxreg,mmxrm [rm: np 0f ef /r] PENT,MMX,SQ
|
|
SKINIT void [ 0f 01 de] X86_64,LONG
|
|
|
|
;# Permanently undefined instructions
|
|
; UD0, UD1: officially DO take a modrm
|
|
$wdq UD0 reg#,rm# [rm: o# 0f ff /r] 186,SM
|
|
UD0 void [ 0f ff] 186
|
|
$wdq UD1 reg#,rm# [rm: o# 0f b9 /r] 186,SM
|
|
UD1 void [ 0f b9] 186
|
|
$wdq UD2B reg#,rm# [rm: o# 0f b9 /r] 186,ND,SM
|
|
UD2B void [ 0f b9] 186,ND
|
|
; UD2: officially DOES NOT take a modrm
|
|
UD2 void [ 0f 0b] 186
|
|
$wdq UD2 reg#,rm# [rm: o# 0f 0b /r] 186,ND,SM
|
|
UD2A void [ 0f 0b] 186,ND
|
|
$wdq UD2A reg#,rm# [rm: o# 0f 0b /r] 186,ND
|
|
|
|
FWAIT void [ wait] 8086
|
|
|
|
XLATB void [ d7] 8086
|
|
XLAT void [ d7] 8086,ND
|
|
|
|
;# Comparing and testing
|
|
$bwdq CMP rm#,reg# [mr: o# 38# /r ] 8086,SM
|
|
$bwdq CMP reg#,rm# [rm: o# 3a# /r ] 8086,SM
|
|
$wdq CMP rm#,sbyte# [mi: o# 83 /7 ib,s ] 8086,SM
|
|
$bwdq CMP ax#,imm# [mi: o# 3c# i# ] 8086,SM
|
|
$bwdq CMP rm#,imm# [mi: o# 80# /7 i# ] 8086,SM
|
|
|
|
$bwdq TEST rm#,reg# [mr: o# 84# /r ] 8086,SM
|
|
$bwdq TEST ax#,imm# [-i: o# a8# i# ] 8086,SM,NOAPX
|
|
$bwdq TEST rm#,imm# [mi: o# f6# /0 i# ] 8086,SM
|
|
|
|
$bwdq CCMPscc spec4,rm#,reg# [vmr: evex.scc.dfv.l0.np.m4.o# 38# /r ] APX,SM1-2
|
|
$bwdq CCMPscc spec4,reg#,rm# [vrm: evex.scc.dfv.l0.np.m4.o# 3a# /r ] APX,SM1-2
|
|
$wdq CCMPscc spec4,rm#,sbyte# [vmi: evex.scc.dfv.l0.66.m4.o# 83 /7 ib,s ] APX,SM1-2
|
|
$bwdq CCMPscc spec4,rm#,imm# [vmi: evex.scc.dfv.l0.np.m4.o# 80# /7 ib ] APX,SM1-2
|
|
|
|
;# Conditional instructions
|
|
$wdq CMOVcc reg#,rm# [rm: o# 0f 40+c /r] P6,SM
|
|
|
|
SETcc rm8 [m: 0f 90+c /0] 386,SB
|
|
SETcc reg64 [m: evex.nd1.l0.f2.m4.wig 40+c /0 ] APX,ZU
|
|
SETcc reg32 [m: evex.nd1.l0.f2.m4.wig 40+c /0 ] APX,ZU,ND
|
|
SETccZU reg64 [m: evex.nd1.l0.f2.m4.wig 40+c /0 ] APX,ZU,ND
|
|
SETccZU reg32 [m: evex.nd1.l0.f2.m4.wig 40+c /0 ] APX,ZU,ND
|
|
SETcc rm8 [m: evex.zu.l0.f2.m4.wig 40+c /0 ] APX
|
|
SETccZU rm8 [m: evex.nd1.l0.f2.m4.wig 40+c /0 ] APX,ZU,ND
|
|
|
|
$dq CMPccXADD mem#,reg#,reg# [mrv: vex.128.66.0f38.w# e0+c /r] CMPCCXADD,LONG,SM
|
|
|
|
;# Katmai Streaming SIMD instructions (SSE -- a.k.a. KNI, XMM, MMX2)
|
|
ADDPS xmmreg,xmmrm128 [rm: np 0f 58 /r] KATMAI,SSE
|
|
ADDSS xmmreg,xmmrm32 [rm: f3 0f 58 /r] KATMAI,SSE
|
|
ANDNPS xmmreg,xmmrm128 [rm: np 0f 55 /r] KATMAI,SSE
|
|
ANDPS xmmreg,xmmrm128 [rm: np 0f 54 /r] KATMAI,SSE
|
|
CMPEQPS xmmreg,xmmrm128 [rm: np 0f c2 /r 00] KATMAI,SSE
|
|
CMPEQSS xmmreg,xmmrm32 [rm: f3 0f c2 /r 00] KATMAI,SSE
|
|
CMPLEPS xmmreg,xmmrm128 [rm: np 0f c2 /r 02] KATMAI,SSE
|
|
CMPLESS xmmreg,xmmrm32 [rm: f3 0f c2 /r 02] KATMAI,SSE
|
|
CMPLTPS xmmreg,xmmrm128 [rm: np 0f c2 /r 01] KATMAI,SSE
|
|
CMPLTSS xmmreg,xmmrm32 [rm: f3 0f c2 /r 01] KATMAI,SSE
|
|
CMPNEQPS xmmreg,xmmrm128 [rm: np 0f c2 /r 04] KATMAI,SSE
|
|
CMPNEQSS xmmreg,xmmrm32 [rm: f3 0f c2 /r 04] KATMAI,SSE
|
|
CMPNLEPS xmmreg,xmmrm128 [rm: np 0f c2 /r 06] KATMAI,SSE
|
|
CMPNLESS xmmreg,xmmrm32 [rm: f3 0f c2 /r 06] KATMAI,SSE
|
|
CMPNLTPS xmmreg,xmmrm128 [rm: np 0f c2 /r 05] KATMAI,SSE
|
|
CMPNLTSS xmmreg,xmmrm32 [rm: f3 0f c2 /r 05] KATMAI,SSE
|
|
CMPORDPS xmmreg,xmmrm128 [rm: np 0f c2 /r 07] KATMAI,SSE
|
|
CMPORDSS xmmreg,xmmrm32 [rm: f3 0f c2 /r 07] KATMAI,SSE
|
|
CMPUNORDPS xmmreg,xmmrm128 [rm: np 0f c2 /r 03] KATMAI,SSE
|
|
CMPUNORDSS xmmreg,xmmrm32 [rm: f3 0f c2 /r 03] KATMAI,SSE
|
|
; CMPPS/CMPSS must come after the specific ops; that way the disassembler will find the
|
|
; specific ops first and only disassemble illegal ones as cmpps/cmpss.
|
|
CMPPS xmmreg,xmmrm128,imm8 [rmi: np 0f c2 /r ib,u] KATMAI,SSE
|
|
CMPSS xmmreg,xmmrm32,imm8 [rmi: f3 0f c2 /r ib,u] KATMAI,SSE
|
|
COMISS xmmreg,xmmrm32 [rm: np 0f 2f /r] KATMAI,SSE
|
|
CVTPI2PS xmmreg,mmxrm64 [rm: np 0f 2a /r] KATMAI,SSE,MMX
|
|
CVTPS2PI mmxreg,xmmrm64 [rm: np 0f 2d /r] KATMAI,SSE,MMX
|
|
CVTSI2SS xmmreg,rm32 [rm: f3 0f 2a /r] KATMAI,SSE
|
|
CVTSI2SS xmmreg,rm64 [rm: o64 f3 0f 2a /r] X86_64,LONG,SSE,SX,AR1
|
|
CVTSS2SI reg32,xmmrm32 [rm: f3 0f 2d /r] KATMAI,SSE
|
|
CVTSS2SI reg64,xmmrm32 [rm: o64 f3 0f 2d /r] X86_64,LONG,SSE
|
|
CVTTPS2PI mmxreg,xmmrm64 [rm: np 0f 2c /r] KATMAI,SSE,MMX
|
|
CVTTSS2SI reg32,xmmrm32 [rm: f3 0f 2c /r] KATMAI,SSE
|
|
CVTTSS2SI reg64,xmmrm32 [rm: o64 f3 0f 2c /r] X86_64,LONG,SSE
|
|
DIVPS xmmreg,xmmrm128 [rm: np 0f 5e /r] KATMAI,SSE
|
|
DIVSS xmmreg,xmmrm32 [rm: f3 0f 5e /r] KATMAI,SSE
|
|
LDMXCSR mem32 [m: np 0f ae /2] KATMAI,SSE
|
|
MAXPS xmmreg,xmmrm128 [rm: np 0f 5f /r] KATMAI,SSE
|
|
MAXSS xmmreg,xmmrm32 [rm: f3 0f 5f /r] KATMAI,SSE
|
|
MINPS xmmreg,xmmrm128 [rm: np 0f 5d /r] KATMAI,SSE
|
|
MINSS xmmreg,xmmrm32 [rm: f3 0f 5d /r] KATMAI,SSE
|
|
MOVAPS xmmreg,xmmrm128 [rm: np 0f 28 /r] KATMAI,SSE
|
|
MOVAPS xmmrm128,xmmreg [mr: np 0f 29 /r] KATMAI,SSE
|
|
MOVHPS xmmreg,mem64 [rm: np 0f 16 /r] KATMAI,SSE
|
|
MOVHPS mem64,xmmreg [mr: np 0f 17 /r] KATMAI,SSE
|
|
MOVLHPS xmmreg,xmmreg [rm: np 0f 16 /r] KATMAI,SSE
|
|
MOVLPS xmmreg,mem64 [rm: np 0f 12 /r] KATMAI,SSE
|
|
MOVLPS mem64,xmmreg [mr: np 0f 13 /r] KATMAI,SSE
|
|
MOVHLPS xmmreg,xmmreg [rm: np 0f 12 /r] KATMAI,SSE
|
|
MOVMSKPS reg32,xmmreg [rm: np 0f 50 /r] KATMAI,SSE
|
|
MOVMSKPS reg64,xmmreg [rm: np o64 0f 50 /r] X86_64,LONG,SSE
|
|
MOVNTPS mem128,xmmreg [mr: np 0f 2b /r] KATMAI,SSE
|
|
MOVSS xmmreg,xmmrm32 [rm: f3 0f 10 /r] KATMAI,SSE
|
|
MOVSS xmmrm32,xmmreg [mr: f3 0f 11 /r] KATMAI,SSE
|
|
MOVUPS xmmreg,xmmrm128 [rm: np 0f 10 /r] KATMAI,SSE
|
|
MOVUPS xmmrm128,xmmreg [mr: np 0f 11 /r] KATMAI,SSE
|
|
MULPS xmmreg,xmmrm128 [rm: np 0f 59 /r] KATMAI,SSE
|
|
MULSS xmmreg,xmmrm32 [rm: f3 0f 59 /r] KATMAI,SSE
|
|
ORPS xmmreg,xmmrm128 [rm: np 0f 56 /r] KATMAI,SSE
|
|
RCPPS xmmreg,xmmrm128 [rm: np 0f 53 /r] KATMAI,SSE
|
|
RCPSS xmmreg,xmmrm32 [rm: f3 0f 53 /r] KATMAI,SSE
|
|
RSQRTPS xmmreg,xmmrm128 [rm: np 0f 52 /r] KATMAI,SSE
|
|
RSQRTSS xmmreg,xmmrm32 [rm: f3 0f 52 /r] KATMAI,SSE
|
|
SHUFPS xmmreg,xmmrm128,imm8 [rmi: np 0f c6 /r ib,u] KATMAI,SSE
|
|
SQRTPS xmmreg,xmmrm128 [rm: np 0f 51 /r] KATMAI,SSE
|
|
SQRTSS xmmreg,xmmrm32 [rm: f3 0f 51 /r] KATMAI,SSE
|
|
STMXCSR mem32 [m: np 0f ae /3] KATMAI,SSE
|
|
SUBPS xmmreg,xmmrm128 [rm: np 0f 5c /r] KATMAI,SSE
|
|
SUBSS xmmreg,xmmrm32 [rm: f3 0f 5c /r] KATMAI,SSE
|
|
UCOMISS xmmreg,xmmrm32 [rm: np 0f 2e /r] KATMAI,SSE
|
|
UNPCKHPS xmmreg,xmmrm128 [rm: np 0f 15 /r] KATMAI,SSE
|
|
UNPCKLPS xmmreg,xmmrm128 [rm: np 0f 14 /r] KATMAI,SSE
|
|
XORPS xmmreg,xmmrm128 [rm: np 0f 57 /r] KATMAI,SSE
|
|
|
|
;# Introduced in Deschutes but necessary for SSE support
|
|
FXRSTOR mem [m: np 0f ae /1] P6,SSE,FPU
|
|
FXRSTOR64 mem [m: o64 np 0f ae /1] X86_64,LONG,SSE,FPU
|
|
FXSAVE mem [m: np 0f ae /0] P6,SSE,FPU
|
|
FXSAVE64 mem [m: o64 np 0f ae /0] X86_64,LONG,SSE,FPU
|
|
|
|
;# XSAVE group (AVX and extended state)
|
|
; Introduced in late Penryn ... we really need to clean up the handling
|
|
; of CPU feature bits.
|
|
XGETBV void [ 0f 01 d0] NEHALEM
|
|
XSETBV void [ 0f 01 d1] NEHALEM,PRIV
|
|
XSAVE mem [m: np 0f ae /4] NEHALEM,NOAPX
|
|
XSAVE64 mem [m: o64 np 0f ae /4] LONG,NEHALEM,NOAPX
|
|
XSAVEC mem [m: np 0f c7 /4] NOAPX
|
|
XSAVEC64 mem [m: o64 np 0f c7 /4] LONG,NOAPX
|
|
XSAVEOPT mem [m: np 0f ae /6] NOAPX
|
|
XSAVEOPT64 mem [m: o64 np 0f ae /6] LONG,NOAPX
|
|
XSAVES mem [m: np 0f c7 /5] NOAPX
|
|
XSAVES64 mem [m: o64 np 0f c7 /5] LONG,NOAPX
|
|
XRSTOR mem [m: np 0f ae /5] NEHALEM,NOAPX
|
|
XRSTOR64 mem [m: o64 np 0f ae /5] LONG,NEHALEM,NOAPX
|
|
XRSTORS mem [m: np 0f c7 /3] NOAPX
|
|
XRSTORS64 mem [m: o64 np 0f c7 /3] LONG,NOAPX
|
|
|
|
; These instructions are not SSE-specific; they are
|
|
;# Generic memory operations
|
|
; and work even if CR4.OSFXFR == 0
|
|
PREFETCHNTA mem8 [m: 0f 18 /0] KATMAI,SB
|
|
PREFETCHT0 mem8 [m: 0f 18 /1] KATMAI,SB
|
|
PREFETCHT1 mem8 [m: 0f 18 /2] KATMAI,SB
|
|
PREFETCHT2 mem8 [m: 0f 18 /3] KATMAI,SB
|
|
PREFETCHIT0 mem8 [m: 0f 18 /7] PREFETCHI,SB
|
|
PREFETCHIT1 mem8 [m: 0f 18 /6] PREFETCHI,SB
|
|
SFENCE void [ np 0f ae f8] KATMAI
|
|
|
|
;# New MMX instructions introduced in Katmai
|
|
MASKMOVQ mmxreg,mmxreg [rm: np 0f f7 /r] KATMAI,MMX
|
|
MOVNTQ mem,mmxreg [mr: np 0f e7 /r] KATMAI,MMX,SQ
|
|
PAVGB mmxreg,mmxrm [rm: np 0f e0 /r] KATMAI,MMX,SQ
|
|
PAVGW mmxreg,mmxrm [rm: np 0f e3 /r] KATMAI,MMX,SQ
|
|
PEXTRW reg32,mmxreg,imm [rmi: np 0f c5 /r ib,u] KATMAI,MMX,SB,AR2
|
|
; PINSRW is documented as using a reg32, but it's really using only 16 bit
|
|
; -- accept either, but be truthful in disassembly
|
|
PINSRW mmxreg,mem,imm [rmi: np 0f c4 /r ib,u] KATMAI,MMX,SB,AR2
|
|
PINSRW mmxreg,rm16,imm [rmi: np 0f c4 /r ib,u] KATMAI,MMX,SB,AR2
|
|
PINSRW mmxreg,reg32,imm [rmi: np 0f c4 /r ib,u] KATMAI,MMX,SB,AR2
|
|
PMAXSW mmxreg,mmxrm [rm: np 0f ee /r] KATMAI,MMX,SQ
|
|
PMAXUB mmxreg,mmxrm [rm: np 0f de /r] KATMAI,MMX,SQ
|
|
PMINSW mmxreg,mmxrm [rm: np 0f ea /r] KATMAI,MMX,SQ
|
|
PMINUB mmxreg,mmxrm [rm: np 0f da /r] KATMAI,MMX,SQ
|
|
PMOVMSKB reg32,mmxreg [rm: np 0f d7 /r] KATMAI,MMX
|
|
PMULHUW mmxreg,mmxrm [rm: np 0f e4 /r] KATMAI,MMX,SQ
|
|
PSADBW mmxreg,mmxrm [rm: np 0f f6 /r] KATMAI,MMX,SQ
|
|
PSHUFW mmxreg,mmxrm,imm [rmi: np 0f 70 /r ib] KATMAI,MMX,SM0-1,SB,AR2
|
|
|
|
;# AMD Enhanced 3DNow! (Athlon) instructions
|
|
PF2IW mmxreg,mmxrm [rm: 0f 0f /r 1c] PENT,3DNOW,SQ
|
|
PFNACC mmxreg,mmxrm [rm: 0f 0f /r 8a] PENT,3DNOW,SQ
|
|
PFPNACC mmxreg,mmxrm [rm: 0f 0f /r 8e] PENT,3DNOW,SQ
|
|
PI2FW mmxreg,mmxrm [rm: 0f 0f /r 0c] PENT,3DNOW,SQ
|
|
PSWAPD mmxreg,mmxrm [rm: 0f 0f /r bb] PENT,3DNOW,SQ
|
|
|
|
;# Willamette SSE2 Cacheability Instructions
|
|
MASKMOVDQU xmmreg,xmmreg [rm: 66 0f f7 /r] WILLAMETTE,SSE2
|
|
MOVNTDQ mem,xmmreg [mr: 66 0f e7 /r] WILLAMETTE,SSE2,SO
|
|
MOVNTI mem,reg32 [mr: np 0f c3 /r] WILLAMETTE,SD
|
|
MOVNTI mem,reg64 [mr: o64 np 0f c3 /r] X86_64,LONG,SQ
|
|
MOVNTPD mem,xmmreg [mr: 66 0f 2b /r] WILLAMETTE,SSE2,SO
|
|
LFENCE void [ np 0f ae e8] WILLAMETTE,SSE2
|
|
MFENCE void [ np 0f ae f0] WILLAMETTE,SSE2
|
|
|
|
;# Willamette MMX instructions (SSE2 SIMD Integer Instructions)
|
|
MOVD mem,xmmreg [mr: 66 norexw 0f 7e /r] WILLAMETTE,SSE2,SD
|
|
MOVD xmmreg,mem [rm: 66 norexw 0f 6e /r] WILLAMETTE,SSE2,SD
|
|
MOVD xmmreg,rm32 [rm: 66 norexw 0f 6e /r] WILLAMETTE,SSE2
|
|
MOVD rm32,xmmreg [mr: 66 norexw 0f 7e /r] WILLAMETTE,SSE2
|
|
MOVDQA xmmreg,xmmrm128 [rm: 66 0f 6f /r] WILLAMETTE,SSE2,SO
|
|
MOVDQA xmmrm128,xmmreg [mr: 66 0f 7f /r] WILLAMETTE,SSE2,SO
|
|
MOVDQU xmmreg,xmmrm128 [rm: f3 0f 6f /r] WILLAMETTE,SSE2,SO
|
|
MOVDQU xmmrm128,xmmreg [mr: f3 0f 7f /r] WILLAMETTE,SSE2,SO
|
|
MOVDQ2Q mmxreg,xmmreg [rm: f2 0f d6 /r] WILLAMETTE,SSE2
|
|
MOVQ xmmreg,xmmreg [rm: f3 0f 7e /r] WILLAMETTE,SSE2
|
|
MOVQ xmmreg,xmmreg [mr: 66 0f d6 /r] WILLAMETTE,SSE2
|
|
MOVQ mem,xmmreg [mr: 66 0f d6 /r] WILLAMETTE,SSE2,SQ
|
|
MOVQ xmmreg,mem [rm: f3 0f 7e /r] WILLAMETTE,SSE2,SQ
|
|
MOVQ xmmreg,rm64 [rm: 66 o64 0f 6e /r] X86_64,LONG,SSE2
|
|
MOVQ rm64,xmmreg [mr: 66 o64 0f 7e /r] X86_64,LONG,SSE2
|
|
MOVQ2DQ xmmreg,mmxreg [rm: f3 0f d6 /r] WILLAMETTE,SSE2
|
|
PACKSSWB xmmreg,xmmrm [rm: 66 0f 63 /r] WILLAMETTE,SSE2,SO
|
|
PACKSSDW xmmreg,xmmrm [rm: 66 0f 6b /r] WILLAMETTE,SSE2,SO
|
|
PACKUSWB xmmreg,xmmrm [rm: 66 0f 67 /r] WILLAMETTE,SSE2,SO
|
|
PADDB xmmreg,xmmrm [rm: 66 0f fc /r] WILLAMETTE,SSE2,SO
|
|
PADDW xmmreg,xmmrm [rm: 66 0f fd /r] WILLAMETTE,SSE2,SO
|
|
PADDD xmmreg,xmmrm [rm: 66 0f fe /r] WILLAMETTE,SSE2,SO
|
|
PADDQ mmxreg,mmxrm [rm: np 0f d4 /r] WILLAMETTE,MMX,SQ
|
|
PADDQ xmmreg,xmmrm [rm: 66 0f d4 /r] WILLAMETTE,SSE2,SO
|
|
PADDSB xmmreg,xmmrm [rm: 66 0f ec /r] WILLAMETTE,SSE2,SO
|
|
PADDSW xmmreg,xmmrm [rm: 66 0f ed /r] WILLAMETTE,SSE2,SO
|
|
PADDUSB xmmreg,xmmrm [rm: 66 0f dc /r] WILLAMETTE,SSE2,SO
|
|
PADDUSW xmmreg,xmmrm [rm: 66 0f dd /r] WILLAMETTE,SSE2,SO
|
|
PAND xmmreg,xmmrm [rm: 66 0f db /r] WILLAMETTE,SSE2,SO
|
|
PANDN xmmreg,xmmrm [rm: 66 0f df /r] WILLAMETTE,SSE2,SO
|
|
PAVGB xmmreg,xmmrm [rm: 66 0f e0 /r] WILLAMETTE,SSE2,SO
|
|
PAVGW xmmreg,xmmrm [rm: 66 0f e3 /r] WILLAMETTE,SSE2,SO
|
|
PCMPEQB xmmreg,xmmrm [rm: 66 0f 74 /r] WILLAMETTE,SSE2,SO
|
|
PCMPEQW xmmreg,xmmrm [rm: 66 0f 75 /r] WILLAMETTE,SSE2,SO
|
|
PCMPEQD xmmreg,xmmrm [rm: 66 0f 76 /r] WILLAMETTE,SSE2,SO
|
|
PCMPGTB xmmreg,xmmrm [rm: 66 0f 64 /r] WILLAMETTE,SSE2,SO
|
|
PCMPGTW xmmreg,xmmrm [rm: 66 0f 65 /r] WILLAMETTE,SSE2,SO
|
|
PCMPGTD xmmreg,xmmrm [rm: 66 0f 66 /r] WILLAMETTE,SSE2,SO
|
|
PEXTRW reg32,xmmreg,imm [rmi: 66 0f c5 /r ib,u] WILLAMETTE,SSE2,SB,AR2
|
|
PEXTRW reg64,xmmreg,imm [rmi: 66 0f c5 /r ib,u] X86_64,LONG,SSE2,SB,AR2,ND
|
|
PINSRW xmmreg,reg16,imm [rmi: 66 0f c4 /r ib,u] WILLAMETTE,SSE2,SB,AR2
|
|
PINSRW xmmreg,reg32,imm [rmi: 66 0f c4 /r ib,u] WILLAMETTE,SSE2,SB,AR2,ND
|
|
PINSRW xmmreg,reg64,imm [rmi: 66 0f c4 /r ib,u] X86_64,LONG,SSE2,SB,AR2,ND
|
|
PINSRW xmmreg,mem,imm [rmi: 66 0f c4 /r ib,u] WILLAMETTE,SSE2,SB,AR2
|
|
PINSRW xmmreg,mem16,imm [rmi: 66 0f c4 /r ib,u] WILLAMETTE,SSE2,SB,AR2
|
|
PMADDWD xmmreg,xmmrm [rm: 66 0f f5 /r] WILLAMETTE,SSE2,SO
|
|
PMAXSW xmmreg,xmmrm [rm: 66 0f ee /r] WILLAMETTE,SSE2,SO
|
|
PMAXUB xmmreg,xmmrm [rm: 66 0f de /r] WILLAMETTE,SSE2,SO
|
|
PMINSW xmmreg,xmmrm [rm: 66 0f ea /r] WILLAMETTE,SSE2,SO
|
|
PMINUB xmmreg,xmmrm [rm: 66 0f da /r] WILLAMETTE,SSE2,SO
|
|
PMOVMSKB reg32,xmmreg [rm: 66 0f d7 /r] WILLAMETTE,SSE2
|
|
PMULHUW xmmreg,xmmrm [rm: 66 0f e4 /r] WILLAMETTE,SSE2,SO
|
|
PMULHW xmmreg,xmmrm [rm: 66 0f e5 /r] WILLAMETTE,SSE2,SO
|
|
PMULLW xmmreg,xmmrm [rm: 66 0f d5 /r] WILLAMETTE,SSE2,SO
|
|
PMULUDQ mmxreg,mmxrm [rm: np 0f f4 /r] WILLAMETTE,SSE2,SO
|
|
PMULUDQ xmmreg,xmmrm [rm: 66 0f f4 /r] WILLAMETTE,SSE2,SO
|
|
POR xmmreg,xmmrm [rm: 66 0f eb /r] WILLAMETTE,SSE2,SO
|
|
PSADBW xmmreg,xmmrm [rm: 66 0f f6 /r] WILLAMETTE,SSE2,SO
|
|
PSHUFD xmmreg,xmmreg,imm [rmi: 66 0f 70 /r ib] WILLAMETTE,SSE2,SB,AR2
|
|
PSHUFD xmmreg,mem,imm [rmi: 66 0f 70 /r ib] WILLAMETTE,SSE2,SM0-1,SB,AR2
|
|
PSHUFHW xmmreg,xmmreg,imm [rmi: f3 0f 70 /r ib] WILLAMETTE,SSE2,SB,AR2
|
|
PSHUFHW xmmreg,mem,imm [rmi: f3 0f 70 /r ib] WILLAMETTE,SSE2,SM0-1,SB,AR2
|
|
PSHUFLW xmmreg,xmmreg,imm [rmi: f2 0f 70 /r ib] WILLAMETTE,SSE2,SB,AR2
|
|
PSHUFLW xmmreg,mem,imm [rmi: f2 0f 70 /r ib] WILLAMETTE,SSE2,SM0-1,SB,AR2
|
|
PSLLDQ xmmreg,imm [mi: 66 0f 73 /7 ib,u] WILLAMETTE,SSE2,SB,AR1
|
|
PSLLW xmmreg,xmmrm [rm: 66 0f f1 /r] WILLAMETTE,SSE2,SO
|
|
PSLLW xmmreg,imm [mi: 66 0f 71 /6 ib,u] WILLAMETTE,SSE2,SB,AR1
|
|
PSLLD xmmreg,xmmrm [rm: 66 0f f2 /r] WILLAMETTE,SSE2,SO
|
|
PSLLD xmmreg,imm [mi: 66 0f 72 /6 ib,u] WILLAMETTE,SSE2,SB,AR1
|
|
PSLLQ xmmreg,xmmrm [rm: 66 0f f3 /r] WILLAMETTE,SSE2,SO
|
|
PSLLQ xmmreg,imm [mi: 66 0f 73 /6 ib,u] WILLAMETTE,SSE2,SB,AR1
|
|
PSRAW xmmreg,xmmrm [rm: 66 0f e1 /r] WILLAMETTE,SSE2,SO
|
|
PSRAW xmmreg,imm [mi: 66 0f 71 /4 ib,u] WILLAMETTE,SSE2,SB,AR1
|
|
PSRAD xmmreg,xmmrm [rm: 66 0f e2 /r] WILLAMETTE,SSE2,SO
|
|
PSRAD xmmreg,imm [mi: 66 0f 72 /4 ib,u] WILLAMETTE,SSE2,SB,AR1
|
|
PSRLDQ xmmreg,imm [mi: 66 0f 73 /3 ib,u] WILLAMETTE,SSE2,SB,AR1
|
|
PSRLW xmmreg,xmmrm [rm: 66 0f d1 /r] WILLAMETTE,SSE2,SO
|
|
PSRLW xmmreg,imm [mi: 66 0f 71 /2 ib,u] WILLAMETTE,SSE2,SB,AR1
|
|
PSRLD xmmreg,xmmrm [rm: 66 0f d2 /r] WILLAMETTE,SSE2,SO
|
|
PSRLD xmmreg,imm [mi: 66 0f 72 /2 ib,u] WILLAMETTE,SSE2,SB,AR1
|
|
PSRLQ xmmreg,xmmrm [rm: 66 0f d3 /r] WILLAMETTE,SSE2,SO
|
|
PSRLQ xmmreg,imm [mi: 66 0f 73 /2 ib,u] WILLAMETTE,SSE2,SB,AR1
|
|
PSUBB xmmreg,xmmrm [rm: 66 0f f8 /r] WILLAMETTE,SSE2,SO
|
|
PSUBW xmmreg,xmmrm [rm: 66 0f f9 /r] WILLAMETTE,SSE2,SO
|
|
PSUBD xmmreg,xmmrm [rm: 66 0f fa /r] WILLAMETTE,SSE2,SO
|
|
PSUBQ mmxreg,mmxrm [rm: np 0f fb /r] WILLAMETTE,SSE2,SO
|
|
PSUBQ xmmreg,xmmrm [rm: 66 0f fb /r] WILLAMETTE,SSE2,SO
|
|
PSUBSB xmmreg,xmmrm [rm: 66 0f e8 /r] WILLAMETTE,SSE2,SO
|
|
PSUBSW xmmreg,xmmrm [rm: 66 0f e9 /r] WILLAMETTE,SSE2,SO
|
|
PSUBUSB xmmreg,xmmrm [rm: 66 0f d8 /r] WILLAMETTE,SSE2,SO
|
|
PSUBUSW xmmreg,xmmrm [rm: 66 0f d9 /r] WILLAMETTE,SSE2,SO
|
|
PUNPCKHBW xmmreg,xmmrm [rm: 66 0f 68 /r] WILLAMETTE,SSE2,SO
|
|
PUNPCKHWD xmmreg,xmmrm [rm: 66 0f 69 /r] WILLAMETTE,SSE2,SO
|
|
PUNPCKHDQ xmmreg,xmmrm [rm: 66 0f 6a /r] WILLAMETTE,SSE2,SO
|
|
PUNPCKHQDQ xmmreg,xmmrm [rm: 66 0f 6d /r] WILLAMETTE,SSE2,SO
|
|
PUNPCKLBW xmmreg,xmmrm [rm: 66 0f 60 /r] WILLAMETTE,SSE2,SO
|
|
PUNPCKLWD xmmreg,xmmrm [rm: 66 0f 61 /r] WILLAMETTE,SSE2,SO
|
|
PUNPCKLDQ xmmreg,xmmrm [rm: 66 0f 62 /r] WILLAMETTE,SSE2,SO
|
|
PUNPCKLQDQ xmmreg,xmmrm [rm: 66 0f 6c /r] WILLAMETTE,SSE2,SO
|
|
PXOR xmmreg,xmmrm [rm: 66 0f ef /r] WILLAMETTE,SSE2,SO
|
|
|
|
;# Willamette Streaming SIMD instructions (SSE2)
|
|
ADDPD xmmreg,xmmrm [rm: 66 0f 58 /r] WILLAMETTE,SSE2,SO
|
|
ADDSD xmmreg,xmmrm [rm: f2 0f 58 /r] WILLAMETTE,SSE2,SQ
|
|
ANDNPD xmmreg,xmmrm [rm: 66 0f 55 /r] WILLAMETTE,SSE2,SO
|
|
ANDPD xmmreg,xmmrm [rm: 66 0f 54 /r] WILLAMETTE,SSE2,SO
|
|
CMPEQPD xmmreg,xmmrm [rm: 66 0f c2 /r 00] WILLAMETTE,SSE2,SO
|
|
CMPEQSD xmmreg,xmmrm [rm: f2 0f c2 /r 00] WILLAMETTE,SSE2,SQ
|
|
CMPLEPD xmmreg,xmmrm [rm: 66 0f c2 /r 02] WILLAMETTE,SSE2,SO
|
|
CMPLESD xmmreg,xmmrm [rm: f2 0f c2 /r 02] WILLAMETTE,SSE2,SQ
|
|
CMPLTPD xmmreg,xmmrm [rm: 66 0f c2 /r 01] WILLAMETTE,SSE2,SO
|
|
CMPLTSD xmmreg,xmmrm [rm: f2 0f c2 /r 01] WILLAMETTE,SSE2,SQ
|
|
CMPNEQPD xmmreg,xmmrm [rm: 66 0f c2 /r 04] WILLAMETTE,SSE2,SO
|
|
CMPNEQSD xmmreg,xmmrm [rm: f2 0f c2 /r 04] WILLAMETTE,SSE2,SQ
|
|
CMPNLEPD xmmreg,xmmrm [rm: 66 0f c2 /r 06] WILLAMETTE,SSE2,SO
|
|
CMPNLESD xmmreg,xmmrm [rm: f2 0f c2 /r 06] WILLAMETTE,SSE2,SQ
|
|
CMPNLTPD xmmreg,xmmrm [rm: 66 0f c2 /r 05] WILLAMETTE,SSE2,SO
|
|
CMPNLTSD xmmreg,xmmrm [rm: f2 0f c2 /r 05] WILLAMETTE,SSE2,SQ
|
|
CMPORDPD xmmreg,xmmrm [rm: 66 0f c2 /r 07] WILLAMETTE,SSE2,SO
|
|
CMPORDSD xmmreg,xmmrm [rm: f2 0f c2 /r 07] WILLAMETTE,SSE2,SQ
|
|
CMPUNORDPD xmmreg,xmmrm [rm: 66 0f c2 /r 03] WILLAMETTE,SSE2,SO
|
|
CMPUNORDSD xmmreg,xmmrm [rm: f2 0f c2 /r 03] WILLAMETTE,SSE2,SQ
|
|
; CMPPD/CMPSD must come after the specific ops; that way the disassembler will find the
|
|
; specific ops first and only disassemble illegal ones as cmppd/cmpsd.
|
|
CMPPD xmmreg,xmmrm128,imm8 [rmi: 66 0f c2 /r ib,u] WILLAMETTE,SSE2
|
|
CMPSD xmmreg,xmmrm128,imm8 [rmi: f2 0f c2 /r ib,u] WILLAMETTE,SSE2
|
|
COMISD xmmreg,xmmrm64 [rm: 66 0f 2f /r] WILLAMETTE,SSE2
|
|
CVTDQ2PD xmmreg,xmmrm [rm: f3 0f e6 /r] WILLAMETTE,SSE2,SQ
|
|
CVTDQ2PS xmmreg,xmmrm [rm: np 0f 5b /r] WILLAMETTE,SSE2,SO
|
|
CVTPD2DQ xmmreg,xmmrm [rm: f2 0f e6 /r] WILLAMETTE,SSE2,SO
|
|
CVTPD2PI mmxreg,xmmrm [rm: 66 0f 2d /r] WILLAMETTE,SSE2,SO
|
|
CVTPD2PS xmmreg,xmmrm [rm: 66 0f 5a /r] WILLAMETTE,SSE2,SO
|
|
CVTPI2PD xmmreg,mmxrm [rm: 66 0f 2a /r] WILLAMETTE,SSE2,SQ
|
|
CVTPS2DQ xmmreg,xmmrm [rm: 66 0f 5b /r] WILLAMETTE,SSE2,SO
|
|
CVTPS2PD xmmreg,xmmrm [rm: np 0f 5a /r] WILLAMETTE,SSE2,SQ
|
|
CVTSD2SI reg32,xmmrm64 [rm: norexw f2 0f 2d /r] WILLAMETTE,SSE2
|
|
CVTSD2SI reg64,xmmrm64 [rm: o64 f2 0f 2d /r] X86_64,LONG,SSE2
|
|
CVTSD2SS xmmreg,xmmrm64 [rm: f2 0f 5a /r] WILLAMETTE,SSE2,SQ
|
|
CVTSI2SD xmmreg,rm32 [rm: norexw f2 0f 2a /r] WILLAMETTE,SSE2
|
|
CVTSI2SD xmmreg,rm64 [rm: o64 f2 0f 2a /r] X86_64,LONG,SSE2,SX,AR1
|
|
CVTSS2SD xmmreg,xmmrm [rm: f3 0f 5a /r] WILLAMETTE,SSE2,SD
|
|
CVTTPD2PI mmxreg,xmmrm [rm: 66 0f 2c /r] WILLAMETTE,SSE2,SO
|
|
CVTTPD2DQ xmmreg,xmmrm [rm: 66 0f e6 /r] WILLAMETTE,SSE2,SO
|
|
CVTTPS2DQ xmmreg,xmmrm [rm: f3 0f 5b /r] WILLAMETTE,SSE2,SO
|
|
CVTTSD2SI reg32,xmmrm64 [rm: norexw f2 0f 2c /r] WILLAMETTE,SSE2
|
|
CVTTSD2SI reg64,xmmrm64 [rm: o64 f2 0f 2c /r] X86_64,LONG,SSE2
|
|
DIVPD xmmreg,xmmrm [rm: 66 0f 5e /r] WILLAMETTE,SSE2,SO
|
|
DIVSD xmmreg,xmmrm [rm: f2 0f 5e /r] WILLAMETTE,SSE2,SQ
|
|
MAXPD xmmreg,xmmrm [rm: 66 0f 5f /r] WILLAMETTE,SSE2,SO
|
|
MAXSD xmmreg,xmmrm [rm: f2 0f 5f /r] WILLAMETTE,SSE2,SQ
|
|
MINPD xmmreg,xmmrm [rm: 66 0f 5d /r] WILLAMETTE,SSE2,SO
|
|
MINSD xmmreg,xmmrm [rm: f2 0f 5d /r] WILLAMETTE,SSE2,SQ
|
|
MOVAPD xmmreg,xmmrm128 [rm: 66 0f 28 /r] WILLAMETTE,SSE2
|
|
MOVAPD xmmrm128,xmmreg [mr: 66 0f 29 /r] WILLAMETTE,SSE2
|
|
MOVHPD mem64,xmmreg [mr: 66 0f 17 /r] WILLAMETTE,SSE2
|
|
MOVHPD xmmreg,mem64 [rm: 66 0f 16 /r] WILLAMETTE,SSE2
|
|
MOVLPD mem64,xmmreg [mr: 66 0f 13 /r] WILLAMETTE,SSE2
|
|
MOVLPD xmmreg,mem64 [rm: 66 0f 12 /r] WILLAMETTE,SSE2
|
|
MOVMSKPD reg32,xmmreg [rm: 66 0f 50 /r] WILLAMETTE,SSE2
|
|
MOVMSKPD reg64,xmmreg [rm: 66 o64 0f 50 /r] X86_64,LONG,SSE2
|
|
MOVSD xmmreg,xmmrm64 [rm: f2 0f 10 /r] WILLAMETTE,SSE2
|
|
MOVSD xmmrm64,xmmreg [mr: f2 0f 11 /r] WILLAMETTE,SSE2
|
|
MOVUPD xmmreg,xmmrm128 [rm: 66 0f 10 /r] WILLAMETTE,SSE2
|
|
MOVUPD xmmrm128,xmmreg [mr: 66 0f 11 /r] WILLAMETTE,SSE2
|
|
MULPD xmmreg,xmmrm128 [rm: 66 0f 59 /r] WILLAMETTE,SSE2,SO
|
|
MULSD xmmreg,xmmrm64 [rm: f2 0f 59 /r] WILLAMETTE,SSE2,SQ
|
|
ORPD xmmreg,xmmrm128 [rm: 66 0f 56 /r] WILLAMETTE,SSE2,SO
|
|
SHUFPD xmmreg,xmmrm128,imm8 [rmi: 66 0f c6 /r ib,u] WILLAMETTE,SSE2
|
|
SQRTPD xmmreg,xmmrm128 [rm: 66 0f 51 /r] WILLAMETTE,SSE2,SO
|
|
SQRTSD xmmreg,xmmrm64 [rm: f2 0f 51 /r] WILLAMETTE,SSE2
|
|
SUBPD xmmreg,xmmrm128 [rm: 66 0f 5c /r] WILLAMETTE,SSE2,SO
|
|
SUBSD xmmreg,xmmrm64 [rm: f2 0f 5c /r] WILLAMETTE,SSE2
|
|
UCOMISD xmmreg,xmmrm64 [rm: 66 0f 2e /r] WILLAMETTE,SSE2
|
|
UNPCKHPD xmmreg,xmmrm128 [rm: 66 0f 15 /r] WILLAMETTE,SSE2
|
|
UNPCKLPD xmmreg,xmmrm128 [rm: 66 0f 14 /r] WILLAMETTE,SSE2
|
|
XORPD xmmreg,xmmrm128 [rm: 66 0f 57 /r] WILLAMETTE,SSE2
|
|
|
|
;# Prescott New Instructions (SSE3)
|
|
ADDSUBPD xmmreg,xmmrm128 [rm: 66 0f d0 /r] PRESCOTT,SSE3,SO
|
|
ADDSUBPS xmmreg,xmmrm128 [rm: f2 0f d0 /r] PRESCOTT,SSE3,SO
|
|
HADDPD xmmreg,xmmrm128 [rm: 66 0f 7c /r] PRESCOTT,SSE3,SO
|
|
HADDPS xmmreg,xmmrm128 [rm: f2 0f 7c /r] PRESCOTT,SSE3,SO
|
|
HSUBPD xmmreg,xmmrm128 [rm: 66 0f 7d /r] PRESCOTT,SSE3,SO
|
|
HSUBPS xmmreg,xmmrm128 [rm: f2 0f 7d /r] PRESCOTT,SSE3,SO
|
|
LDDQU xmmreg,mem128 [rm: f2 0f f0 /r] PRESCOTT,SSE3,SO
|
|
MOVDDUP xmmreg,xmmrm64 [rm: f2 0f 12 /r] PRESCOTT,SSE3,SQ
|
|
MOVSHDUP xmmreg,xmmrm128 [rm: f3 0f 16 /r] PRESCOTT,SSE3
|
|
MOVSLDUP xmmreg,xmmrm128 [rm: f3 0f 12 /r] PRESCOTT,SSE3
|
|
|
|
;# VMX/SVM Instructions
|
|
CLGI void [ 0f 01 dd] VMX,AMD
|
|
STGI void [ 0f 01 dc] VMX,AMD
|
|
VMCALL void [ 0f 01 c1] VMX
|
|
VMCLEAR mem [m: 66 0f c7 /6] VMX
|
|
VMFUNC void [ 0f 01 d4] VMX
|
|
VMLAUNCH void [ 0f 01 c2] VMX
|
|
VMLOAD void [ 0f 01 da] VMX,AMD
|
|
VMMCALL void [ 0f 01 d9] VMX,AMD
|
|
VMPTRLD mem [m: np 0f c7 /6] VMX
|
|
VMPTRST mem [m: np 0f c7 /7] VMX
|
|
VMREAD rm32,reg32 [mr: np 0f 78 /r] VMX,NOLONG,SD
|
|
VMREAD rm64,reg64 [mr: o64nw np 0f 78 /r] X86_64,LONG,VMX,SQ
|
|
VMRESUME void [ 0f 01 c3] VMX
|
|
VMRUN void [ 0f 01 d8] VMX,AMD
|
|
VMSAVE void [ 0f 01 db] VMX,AMD
|
|
VMWRITE reg32,rm32 [rm: np 0f 79 /r] VMX,NOLONG,SD
|
|
VMWRITE reg64,rm64 [rm: o64nw np 0f 79 /r] X86_64,LONG,VMX,SQ
|
|
VMXOFF void [ 0f 01 c4] VMX
|
|
VMXON mem [m: f3 0f c7 /6] VMX
|
|
;# Extended Page Tables VMX instructions
|
|
INVEPT reg32,mem [rm: 66 0f38 80 /r] VMX,SO,NOLONG
|
|
INVEPT reg64,mem [rm: o64nw 66 0f38 80 /r] VMX,SO,LONG
|
|
INVVPID reg32,mem [rm: 66 0f38 81 /r] VMX,SO,NOLONG
|
|
INVVPID reg64,mem [rm: o64nw 66 0f38 81 /r] VMX,SO,LONG
|
|
;# SEV-SNP AMD instructions
|
|
PVALIDATE void [ f2 0f 01 ff] VMX,AMD
|
|
RMPADJUST void [ f3 0f 01 fe] VMX,AMD
|
|
VMGEXIT void [ f2 0f 01 c1] VMX,AMD
|
|
VMGEXIT void [ f3 0f 01 c1] VMX,AMD
|
|
|
|
;# Tejas New Instructions (SSSE3)
|
|
PABSB mmxreg,mmxrm [rm: np 0f38 1c /r] SSSE3,MMX,SQ
|
|
PABSB xmmreg,xmmrm128 [rm: 66 0f38 1c /r] SSSE3
|
|
PABSW mmxreg,mmxrm [rm: np 0f38 1d /r] SSSE3,MMX,SQ
|
|
PABSW xmmreg,xmmrm128 [rm: 66 0f38 1d /r] SSSE3
|
|
PABSD mmxreg,mmxrm [rm: np 0f38 1e /r] SSSE3,MMX,SQ
|
|
PABSD xmmreg,xmmrm128 [rm: 66 0f38 1e /r] SSSE3
|
|
PALIGNR mmxreg,mmxrm,imm [rmi: np 0f3a 0f /r ib,u] SSSE3,MMX,SQ
|
|
PALIGNR xmmreg,xmmrm,imm [rmi: 66 0f3a 0f /r ib,u] SSSE3
|
|
PHADDW mmxreg,mmxrm [rm: np 0f38 01 /r] SSSE3,MMX,SQ
|
|
PHADDW xmmreg,xmmrm128 [rm: 66 0f38 01 /r] SSSE3
|
|
PHADDD mmxreg,mmxrm [rm: np 0f38 02 /r] SSSE3,MMX,SQ
|
|
PHADDD xmmreg,xmmrm128 [rm: 66 0f38 02 /r] SSSE3
|
|
PHADDSW mmxreg,mmxrm [rm: np 0f38 03 /r] SSSE3,MMX,SQ
|
|
PHADDSW xmmreg,xmmrm128 [rm: 66 0f38 03 /r] SSSE3
|
|
PHSUBW mmxreg,mmxrm [rm: np 0f38 05 /r] SSSE3,MMX,SQ
|
|
PHSUBW xmmreg,xmmrm128 [rm: 66 0f38 05 /r] SSSE3
|
|
PHSUBD mmxreg,mmxrm [rm: np 0f38 06 /r] SSSE3,MMX,SQ
|
|
PHSUBD xmmreg,xmmrm128 [rm: 66 0f38 06 /r] SSSE3
|
|
PHSUBSW mmxreg,mmxrm [rm: np 0f38 07 /r] SSSE3,MMX,SQ
|
|
PHSUBSW xmmreg,xmmrm128 [rm: 66 0f38 07 /r] SSSE3
|
|
PMADDUBSW mmxreg,mmxrm [rm: np 0f38 04 /r] SSSE3,MMX,SQ
|
|
PMADDUBSW xmmreg,xmmrm128 [rm: 66 0f38 04 /r] SSSE3
|
|
PMULHRSW mmxreg,mmxrm [rm: np 0f38 0b /r] SSSE3,MMX,SQ
|
|
PMULHRSW xmmreg,xmmrm128 [rm: 66 0f38 0b /r] SSSE3
|
|
PSHUFB mmxreg,mmxrm [rm: np 0f38 00 /r] SSSE3,MMX,SQ
|
|
PSHUFB xmmreg,xmmrm128 [rm: 66 0f38 00 /r] SSSE3
|
|
PSIGNB mmxreg,mmxrm [rm: np 0f38 08 /r] SSSE3,MMX,SQ
|
|
PSIGNB xmmreg,xmmrm128 [rm: 66 0f38 08 /r] SSSE3
|
|
PSIGNW mmxreg,mmxrm [rm: np 0f38 09 /r] SSSE3,MMX,SQ
|
|
PSIGNW xmmreg,xmmrm128 [rm: 66 0f38 09 /r] SSSE3
|
|
PSIGND mmxreg,mmxrm [rm: np 0f38 0a /r] SSSE3,MMX,SQ
|
|
PSIGND xmmreg,xmmrm128 [rm: 66 0f38 0a /r] SSSE3
|
|
|
|
;# AMD SSE4A
|
|
EXTRQ xmmreg,imm,imm [mij: 66 0f 78 /0 ib,u ib,u] SSE4A,AMD
|
|
EXTRQ xmmreg,xmmreg [rm: 66 0f 79 /r] SSE4A,AMD
|
|
INSERTQ xmmreg,xmmreg,imm,imm [rmij: f2 0f 78 /r ib,u ib,u] SSE4A,AMD
|
|
INSERTQ xmmreg,xmmreg [rm: f2 0f 79 /r] SSE4A,AMD
|
|
MOVNTSD mem64,xmmreg [mr: f2 0f 2b /r] SSE4A,AMD,SQ
|
|
MOVNTSS mem32,xmmreg [mr: f3 0f 2b /r] SSE4A,AMD,SD
|
|
|
|
;# New instructions in Barcelona
|
|
|
|
;# Penryn New Instructions (SSE4.1)
|
|
BLENDPD xmmreg,xmmrm128,imm8 [rmi: 66 0f3a 0d /r ib,u] SSE41
|
|
BLENDPS xmmreg,xmmrm128,imm8 [rmi: 66 0f3a 0c /r ib,u] SSE41
|
|
BLENDVPD xmmreg,xmmrm128,xmm0 [rm-: 66 0f38 15 /r] SSE41
|
|
BLENDVPD xmmreg,xmmrm128 [rm: 66 0f38 15 /r] SSE41
|
|
BLENDVPS xmmreg,xmmrm128,xmm0 [rm-: 66 0f38 14 /r] SSE41
|
|
BLENDVPS xmmreg,xmmrm128 [rm: 66 0f38 14 /r] SSE41
|
|
DPPD xmmreg,xmmrm128,imm8 [rmi: 66 0f3a 41 /r ib,u] SSE41
|
|
DPPS xmmreg,xmmrm128,imm8 [rmi: 66 0f3a 40 /r ib,u] SSE41
|
|
EXTRACTPS rm32,xmmreg,imm8 [mri: 66 0f3a 17 /r ib,u] SSE41
|
|
EXTRACTPS reg64,xmmreg,imm8 [mri: o64 66 0f3a 17 /r ib,u] SSE41,X86_64,LONG
|
|
INSERTPS xmmreg,xmmrm32,imm8 [rmi: 66 0f3a 21 /r ib,u] SSE41
|
|
MOVNTDQA xmmreg,mem128 [rm: 66 0f38 2a /r] SSE41
|
|
MPSADBW xmmreg,xmmrm128,imm8 [rmi: 66 0f3a 42 /r ib,u] SSE41
|
|
PACKUSDW xmmreg,xmmrm128 [rm: 66 0f38 2b /r] SSE41
|
|
PBLENDVB xmmreg,xmmrm,xmm0 [rm-: 66 0f38 10 /r] SSE41
|
|
PBLENDVB xmmreg,xmmrm128 [rm: 66 0f38 10 /r] SSE41
|
|
PBLENDW xmmreg,xmmrm128,imm8 [rmi: 66 0f3a 0e /r ib,u] SSE41
|
|
PCMPEQQ xmmreg,xmmrm128 [rm: 66 0f38 29 /r] SSE41
|
|
PEXTRB reg32,xmmreg,imm8 [mri: 66 0f3a 14 /r ib,u] SSE41
|
|
PEXTRB mem8,xmmreg,imm8 [mri: 66 0f3a 14 /r ib,u] SSE41
|
|
PEXTRB reg64,xmmreg,imm8 [mri: o64nw 66 0f3a 14 /r ib,u] SSE41,X86_64,LONG
|
|
PEXTRD rm32,xmmreg,imm8 [mri: norexw 66 0f3a 16 /r ib,u] SSE41
|
|
PEXTRQ rm64,xmmreg,imm8 [mri: o64 66 0f3a 16 /r ib,u] SSE41,X86_64,LONG
|
|
PEXTRW reg32,xmmreg,imm8 [mri: 66 0f3a 15 /r ib,u] SSE41
|
|
PEXTRW mem16,xmmreg,imm8 [mri: 66 0f3a 15 /r ib,u] SSE41
|
|
PEXTRW reg64,xmmreg,imm8 [mri: o64 66 0f3a 15 /r ib,u] SSE41,X86_64,LONG
|
|
PHMINPOSUW xmmreg,xmmrm128 [rm: 66 0f38 41 /r] SSE41
|
|
PINSRB xmmreg,mem,imm8 [rmi: 66 0f3a 20 /r ib,u] SSE41,SB,AR2
|
|
PINSRB xmmreg,rm8,imm8 [rmi: nohi 66 0f3a 20 /r ib,u] SSE41,SB,AR2
|
|
PINSRB xmmreg,reg32,imm8 [rmi: 66 0f3a 20 /r ib,u] SSE41,SB,AR2
|
|
PINSRD xmmreg,rm32,imm8 [rmi: norexw 66 0f3a 22 /r ib,u] SSE41,SB,AR2
|
|
PINSRQ xmmreg,rm64,imm8 [rmi: o64 66 0f3a 22 /r ib,u] SSE41,X86_64,LONG,SB,AR2
|
|
PMAXSB xmmreg,xmmrm128 [rm: 66 0f38 3c /r] SSE41
|
|
PMAXSD xmmreg,xmmrm128 [rm: 66 0f38 3d /r] SSE41
|
|
PMAXUD xmmreg,xmmrm128 [rm: 66 0f38 3f /r] SSE41
|
|
PMAXUW xmmreg,xmmrm128 [rm: 66 0f38 3e /r] SSE41
|
|
PMINSB xmmreg,xmmrm128 [rm: 66 0f38 38 /r] SSE41
|
|
PMINSD xmmreg,xmmrm128 [rm: 66 0f38 39 /r] SSE41
|
|
PMINUD xmmreg,xmmrm128 [rm: 66 0f38 3b /r] SSE41
|
|
PMINUW xmmreg,xmmrm128 [rm: 66 0f38 3a /r] SSE41
|
|
PMOVSXBW xmmreg,xmmrm64 [rm: 66 0f38 20 /r] SSE41,SQ
|
|
PMOVSXBD xmmreg,xmmrm32 [rm: 66 0f38 21 /r] SSE41,SD
|
|
PMOVSXBQ xmmreg,xmmrm16 [rm: 66 0f38 22 /r] SSE41,SW
|
|
PMOVSXWD xmmreg,xmmrm64 [rm: 66 0f38 23 /r] SSE41,SQ
|
|
PMOVSXWQ xmmreg,xmmrm32 [rm: 66 0f38 24 /r] SSE41,SD
|
|
PMOVSXDQ xmmreg,xmmrm64 [rm: 66 0f38 25 /r] SSE41,SQ
|
|
PMOVZXBW xmmreg,xmmrm64 [rm: 66 0f38 30 /r] SSE41,SQ
|
|
PMOVZXBD xmmreg,xmmrm32 [rm: 66 0f38 31 /r] SSE41,SD
|
|
PMOVZXBQ xmmreg,xmmrm16 [rm: 66 0f38 32 /r] SSE41,SW
|
|
PMOVZXWD xmmreg,xmmrm64 [rm: 66 0f38 33 /r] SSE41,SQ
|
|
PMOVZXWQ xmmreg,xmmrm32 [rm: 66 0f38 34 /r] SSE41,SD
|
|
PMOVZXDQ xmmreg,xmmrm64 [rm: 66 0f38 35 /r] SSE41,SQ
|
|
PMULDQ xmmreg,xmmrm128 [rm: 66 0f38 28 /r] SSE41
|
|
PMULLD xmmreg,xmmrm128 [rm: 66 0f38 40 /r] SSE41
|
|
PTEST xmmreg,xmmrm128 [rm: 66 0f38 17 /r] SSE41
|
|
ROUNDPD xmmreg,xmmrm128,imm8 [rmi: 66 0f3a 09 /r ib,u] SSE41
|
|
ROUNDPS xmmreg,xmmrm128,imm8 [rmi: 66 0f3a 08 /r ib,u] SSE41
|
|
ROUNDSD xmmreg,xmmrm64,imm8 [rmi: 66 0f3a 0b /r ib,u] SSE41
|
|
ROUNDSS xmmreg,xmmrm32,imm8 [rmi: 66 0f3a 0a /r ib,u] SSE41
|
|
|
|
;# Nehalem New Instructions (SSE4.2)
|
|
CRC32 reg32,rm8 [rm: f2i 0f38 f0 /r] SSE42
|
|
CRC32 reg32,rm16 [rm: o16 f2i 0f38 f1 /r] SSE42
|
|
CRC32 reg32,rm32 [rm: o32 f2i 0f38 f1 /r] SSE42
|
|
CRC32 reg64,rm8 [rm: o64 f2i 0f38 f0 /r] SSE42,X86_64,LONG
|
|
CRC32 reg64,rm64 [rm: o64 f2i 0f38 f1 /r] SSE42,X86_64,LONG
|
|
PCMPESTRI xmmreg,xmmrm128,imm8 [rmi: 66 0f3a 61 /r ib,u] SSE42
|
|
PCMPESTRM xmmreg,xmmrm128,imm8 [rmi: 66 0f3a 60 /r ib,u] SSE42
|
|
PCMPISTRI xmmreg,xmmrm128,imm8 [rmi: 66 0f3a 63 /r ib,u] SSE42
|
|
PCMPISTRM xmmreg,xmmrm128,imm8 [rmi: 66 0f3a 62 /r ib,u] SSE42
|
|
PCMPGTQ xmmreg,xmmrm128 [rm: 66 0f38 37 /r] SSE42
|
|
POPCNT reg16,rm16 [rm: o16 f3i 0f b8 /r] NEHALEM,SW
|
|
POPCNT reg32,rm32 [rm: o32 f3i 0f b8 /r] NEHALEM,SD
|
|
POPCNT reg64,rm64 [rm: o64 f3i 0f b8 /r] NEHALEM,SQ,LONG
|
|
|
|
;# Intel SMX
|
|
GETSEC void [ 0f 37] KATMAI,NOAPX
|
|
|
|
;# Geode (Cyrix) 3DNow! additions
|
|
PFRCPV mmxreg,mmxrm [rm: 0f 0f /r 86] PENT,3DNOW,SQ,CYRIX
|
|
PFRSQRTV mmxreg,mmxrm [rm: 0f 0f /r 87] PENT,3DNOW,SQ,CYRIX
|
|
|
|
;# Intel new instructions in ???
|
|
; Is NEHALEM right here?
|
|
|
|
;# Intel AES instructions
|
|
AESENC xmmreg,xmmrm128 [rm: 66 0f38 dc /r] SSE,WESTMERE
|
|
AESENCLAST xmmreg,xmmrm128 [rm: 66 0f38 dd /r] SSE,WESTMERE
|
|
AESDEC xmmreg,xmmrm128 [rm: 66 0f38 de /r] SSE,WESTMERE
|
|
AESDECLAST xmmreg,xmmrm128 [rm: 66 0f38 df /r] SSE,WESTMERE
|
|
AESIMC xmmreg,xmmrm128 [rm: 66 0f38 db /r] SSE,WESTMERE
|
|
AESKEYGENASSIST xmmreg,xmmrm128,imm8 [rmi: 66 0f3a df /r ib] SSE,WESTMERE
|
|
|
|
;# Intel AVX AES instructions
|
|
VAESENC xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 dc /r] AVX,SANDYBRIDGE
|
|
VAESENCLAST xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 dd /r] AVX,SANDYBRIDGE
|
|
VAESDEC xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 de /r] AVX,SANDYBRIDGE
|
|
VAESDECLAST xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 df /r] AVX,SANDYBRIDGE
|
|
VAESIMC xmmreg,xmmrm128 [rm: vex.128.66.0f38 db /r] AVX,SANDYBRIDGE
|
|
VAESKEYGENASSIST xmmreg,xmmrm128,imm8 [rmi: vex.128.66.0f3a df /r ib] AVX,SANDYBRIDGE
|
|
|
|
;# Intel instruction extension based on pub number 319433-030 dated October 2017
|
|
|
|
; Intel VAES instructions
|
|
VAESENC ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38.wig dc /r] VAES
|
|
VAESENCLAST ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38.wig dd /r] VAES
|
|
VAESDEC ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38.wig de /r] VAES
|
|
VAESDECLAST ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38.wig df /r] VAES
|
|
|
|
; Intel VAES + AVX512VL instructions
|
|
VAESENC xmmreg,xmmreg*,xmmrm128 [rvm:fv: evex.nds.128.66.0f38.wig dc /r] AVX512VL,AVX512,VAES
|
|
VAESENC ymmreg,ymmreg*,ymmrm256 [rvm:fv: evex.nds.256.66.0f38.wig dc /r] AVX512VL,AVX512,VAES
|
|
VAESENCLAST xmmreg,xmmreg*,xmmrm128 [rvm:fv: evex.nds.128.66.0f38.wig dd /r] AVX512VL,AVX512,VAES
|
|
VAESENCLAST ymmreg,ymmreg*,ymmrm256 [rvm:fv: evex.nds.256.66.0f38.wig dd /r] AVX512VL,AVX512,VAES
|
|
VAESDEC xmmreg,xmmreg*,xmmrm128 [rvm:fv: evex.nds.128.66.0f38.wig de /r] AVX512VL,AVX512,VAES
|
|
VAESDEC ymmreg,ymmreg*,ymmrm256 [rvm:fv: evex.nds.256.66.0f38.wig de /r] AVX512VL,AVX512,VAES
|
|
VAESDECLAST xmmreg,xmmreg*,xmmrm128 [rvm:fv: evex.nds.128.66.0f38.wig df /r] AVX512VL,AVX512,VAES
|
|
VAESDECLAST ymmreg,ymmreg*,ymmrm256 [rvm:fv: evex.nds.256.66.0f38.wig df /r] AVX512VL,AVX512,VAES
|
|
|
|
; Intel VAES + AVX512F instructions
|
|
VAESENC zmmreg,zmmreg*,zmmrm512 [rvm:fv: evex.nds.512.66.0f38.wig dc /r] AVX512,VAES
|
|
VAESENCLAST zmmreg,zmmreg*,zmmrm512 [rvm:fv: evex.nds.512.66.0f38.wig dd /r] AVX512,VAES
|
|
VAESDEC zmmreg,zmmreg*,zmmrm512 [rvm:fv: evex.nds.512.66.0f38.wig de /r] AVX512,VAES
|
|
VAESDECLAST zmmreg,zmmreg*,zmmrm512 [rvm:fv: evex.nds.512.66.0f38.wig df /r] AVX512,VAES
|
|
|
|
;# Intel AVX instructions
|
|
VADDPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 58 /r] AVX,SANDYBRIDGE
|
|
VADDPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 58 /r] AVX,SANDYBRIDGE
|
|
VADDPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 58 /r] AVX,SANDYBRIDGE
|
|
VADDPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 58 /r] AVX,SANDYBRIDGE
|
|
VADDSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f 58 /r] AVX,SANDYBRIDGE
|
|
VADDSS xmmreg,xmmreg*,xmmrm32 [rvm: vex.nds.lig.f3.0f 58 /r] AVX,SANDYBRIDGE
|
|
VADDSUBPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f d0 /r] AVX,SANDYBRIDGE
|
|
VADDSUBPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f d0 /r] AVX,SANDYBRIDGE
|
|
VADDSUBPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.f2.0f d0 /r] AVX,SANDYBRIDGE
|
|
VADDSUBPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.f2.0f d0 /r] AVX,SANDYBRIDGE
|
|
VANDPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 54 /r] AVX,SANDYBRIDGE
|
|
VANDPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 54 /r] AVX,SANDYBRIDGE
|
|
VANDPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 54 /r] AVX,SANDYBRIDGE
|
|
VANDPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 54 /r] AVX,SANDYBRIDGE
|
|
VANDNPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 55 /r] AVX,SANDYBRIDGE
|
|
VANDNPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 55 /r] AVX,SANDYBRIDGE
|
|
VANDNPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 55 /r] AVX,SANDYBRIDGE
|
|
VANDNPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 55 /r] AVX,SANDYBRIDGE
|
|
VBLENDPD xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f3a 0d /r ib] AVX,SANDYBRIDGE
|
|
VBLENDPD ymmreg,ymmreg*,ymmrm256,imm8 [rvmi: vex.nds.256.66.0f3a 0d /r ib] AVX,SANDYBRIDGE
|
|
VBLENDPS xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f3a 0c /r ib] AVX,SANDYBRIDGE
|
|
VBLENDPS ymmreg,ymmreg*,ymmrm256,imm8 [rvmi: vex.nds.256.66.0f3a 0c /r ib] AVX,SANDYBRIDGE
|
|
VBLENDVPD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.nds.128.66.0f3a.w0 4b /r /is4] AVX,SANDYBRIDGE
|
|
VBLENDVPD ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.nds.256.66.0f3a.w0 4b /r /is4] AVX,SANDYBRIDGE
|
|
VBLENDVPS xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.nds.128.66.0f3a.w0 4a /r /is4] AVX,SANDYBRIDGE
|
|
VBLENDVPS ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.nds.256.66.0f3a.w0 4a /r /is4] AVX,SANDYBRIDGE
|
|
VBROADCASTSS xmmreg,mem32 [rm: vex.128.66.0f38.w0 18 /r] AVX,SANDYBRIDGE
|
|
VBROADCASTSS ymmreg,mem32 [rm: vex.256.66.0f38.w0 18 /r] AVX,SANDYBRIDGE
|
|
VBROADCASTSD ymmreg,mem64 [rm: vex.256.66.0f38.w0 19 /r] AVX,SANDYBRIDGE
|
|
VBROADCASTF128 ymmreg,mem128 [rm: vex.256.66.0f38.w0 1a /r] AVX,SANDYBRIDGE
|
|
; Specific aliases first, then the generic version, to keep the disassembler happy...
|
|
VCMPEQ_OSPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 10] AVX,SANDYBRIDGE
|
|
VCMPEQ_OSPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 10] AVX,SANDYBRIDGE
|
|
VCMPEQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 00] AVX,SANDYBRIDGE
|
|
VCMPEQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 00] AVX,SANDYBRIDGE
|
|
VCMPLT_OSPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 01] AVX,SANDYBRIDGE
|
|
VCMPLT_OSPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 01] AVX,SANDYBRIDGE
|
|
VCMPLTPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 01] AVX,SANDYBRIDGE
|
|
VCMPLTPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 01] AVX,SANDYBRIDGE
|
|
VCMPLE_OSPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 02] AVX,SANDYBRIDGE
|
|
VCMPLE_OSPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 02] AVX,SANDYBRIDGE
|
|
VCMPLEPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 02] AVX,SANDYBRIDGE
|
|
VCMPLEPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 02] AVX,SANDYBRIDGE
|
|
VCMPUNORD_QPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 03] AVX,SANDYBRIDGE
|
|
VCMPUNORD_QPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 03] AVX,SANDYBRIDGE
|
|
VCMPUNORDPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 03] AVX,SANDYBRIDGE
|
|
VCMPUNORDPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 03] AVX,SANDYBRIDGE
|
|
VCMPNEQ_UQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 04] AVX,SANDYBRIDGE
|
|
VCMPNEQ_UQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 04] AVX,SANDYBRIDGE
|
|
VCMPNEQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 04] AVX,SANDYBRIDGE
|
|
VCMPNEQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 04] AVX,SANDYBRIDGE
|
|
VCMPNLT_USPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 05] AVX,SANDYBRIDGE
|
|
VCMPNLT_USPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 05] AVX,SANDYBRIDGE
|
|
VCMPNLTPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 05] AVX,SANDYBRIDGE
|
|
VCMPNLTPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 05] AVX,SANDYBRIDGE
|
|
VCMPNLE_USPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 06] AVX,SANDYBRIDGE
|
|
VCMPNLE_USPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 06] AVX,SANDYBRIDGE
|
|
VCMPNLEPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 06] AVX,SANDYBRIDGE
|
|
VCMPNLEPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 06] AVX,SANDYBRIDGE
|
|
VCMPORD_QPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 07] AVX,SANDYBRIDGE
|
|
VCMPORD_QPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 07] AVX,SANDYBRIDGE
|
|
VCMPORDPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 07] AVX,SANDYBRIDGE
|
|
VCMPORDPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 07] AVX,SANDYBRIDGE
|
|
VCMPEQ_UQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 08] AVX,SANDYBRIDGE
|
|
VCMPEQ_UQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 08] AVX,SANDYBRIDGE
|
|
VCMPNGE_USPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 09] AVX,SANDYBRIDGE
|
|
VCMPNGE_USPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 09] AVX,SANDYBRIDGE
|
|
VCMPNGEPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 09] AVX,SANDYBRIDGE
|
|
VCMPNGEPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 09] AVX,SANDYBRIDGE
|
|
VCMPNGT_USPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 0a] AVX,SANDYBRIDGE
|
|
VCMPNGT_USPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 0a] AVX,SANDYBRIDGE
|
|
VCMPNGTPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 0a] AVX,SANDYBRIDGE
|
|
VCMPNGTPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 0a] AVX,SANDYBRIDGE
|
|
VCMPFALSE_OQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 0b] AVX,SANDYBRIDGE
|
|
VCMPFALSE_OQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 0b] AVX,SANDYBRIDGE
|
|
VCMPFALSEPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 0b] AVX,SANDYBRIDGE
|
|
VCMPFALSEPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 0b] AVX,SANDYBRIDGE
|
|
VCMPNEQ_OQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 0c] AVX,SANDYBRIDGE
|
|
VCMPNEQ_OQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 0c] AVX,SANDYBRIDGE
|
|
VCMPGE_OSPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 0d] AVX,SANDYBRIDGE
|
|
VCMPGE_OSPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 0d] AVX,SANDYBRIDGE
|
|
VCMPGEPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 0d] AVX,SANDYBRIDGE
|
|
VCMPGEPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 0d] AVX,SANDYBRIDGE
|
|
VCMPGT_OSPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 0e] AVX,SANDYBRIDGE
|
|
VCMPGT_OSPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 0e] AVX,SANDYBRIDGE
|
|
VCMPGTPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 0e] AVX,SANDYBRIDGE
|
|
VCMPGTPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 0e] AVX,SANDYBRIDGE
|
|
VCMPTRUE_UQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 0f] AVX,SANDYBRIDGE
|
|
VCMPTRUE_UQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 0f] AVX,SANDYBRIDGE
|
|
VCMPTRUEPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 0f] AVX,SANDYBRIDGE
|
|
VCMPTRUEPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 0f] AVX,SANDYBRIDGE
|
|
VCMPEQ_OSPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 10] AVX,SANDYBRIDGE
|
|
VCMPEQ_OSPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 10] AVX,SANDYBRIDGE
|
|
VCMPLT_OQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 11] AVX,SANDYBRIDGE
|
|
VCMPLT_OQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 11] AVX,SANDYBRIDGE
|
|
VCMPLE_OQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 12] AVX,SANDYBRIDGE
|
|
VCMPLE_OQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 12] AVX,SANDYBRIDGE
|
|
VCMPUNORD_SPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 13] AVX,SANDYBRIDGE
|
|
VCMPUNORD_SPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 13] AVX,SANDYBRIDGE
|
|
VCMPNEQ_USPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 14] AVX,SANDYBRIDGE
|
|
VCMPNEQ_USPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 14] AVX,SANDYBRIDGE
|
|
VCMPNLT_UQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 15] AVX,SANDYBRIDGE
|
|
VCMPNLT_UQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 15] AVX,SANDYBRIDGE
|
|
VCMPNLE_UQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 16] AVX,SANDYBRIDGE
|
|
VCMPNLE_UQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 16] AVX,SANDYBRIDGE
|
|
VCMPORD_SPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 17] AVX,SANDYBRIDGE
|
|
VCMPORD_SPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 17] AVX,SANDYBRIDGE
|
|
VCMPEQ_USPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 18] AVX,SANDYBRIDGE
|
|
VCMPEQ_USPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 18] AVX,SANDYBRIDGE
|
|
VCMPNGE_UQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 19] AVX,SANDYBRIDGE
|
|
VCMPNGE_UQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 19] AVX,SANDYBRIDGE
|
|
VCMPNGT_UQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 1a] AVX,SANDYBRIDGE
|
|
VCMPNGT_UQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 1a] AVX,SANDYBRIDGE
|
|
VCMPFALSE_OSPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 1b] AVX,SANDYBRIDGE
|
|
VCMPFALSE_OSPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 1b] AVX,SANDYBRIDGE
|
|
VCMPNEQ_OSPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 1c] AVX,SANDYBRIDGE
|
|
VCMPNEQ_OSPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 1c] AVX,SANDYBRIDGE
|
|
VCMPGE_OQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 1d] AVX,SANDYBRIDGE
|
|
VCMPGE_OQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 1d] AVX,SANDYBRIDGE
|
|
VCMPGT_OQPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 1e] AVX,SANDYBRIDGE
|
|
VCMPGT_OQPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 1e] AVX,SANDYBRIDGE
|
|
VCMPTRUE_USPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f c2 /r 1f] AVX,SANDYBRIDGE
|
|
VCMPTRUE_USPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f c2 /r 1f] AVX,SANDYBRIDGE
|
|
VCMPPD xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f c2 /r ib] AVX,SANDYBRIDGE
|
|
VCMPPD ymmreg,ymmreg*,ymmrm256,imm8 [rvmi: vex.nds.256.66.0f c2 /r ib] AVX,SANDYBRIDGE
|
|
; Specific aliases first, then the generic version, to keep the disassembler happy...
|
|
VCMPEQ_OSPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 10] AVX,SANDYBRIDGE
|
|
VCMPEQ_OSPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 10] AVX,SANDYBRIDGE
|
|
VCMPEQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 00] AVX,SANDYBRIDGE
|
|
VCMPEQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 00] AVX,SANDYBRIDGE
|
|
VCMPLT_OSPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 01] AVX,SANDYBRIDGE
|
|
VCMPLT_OSPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 01] AVX,SANDYBRIDGE
|
|
VCMPLTPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 01] AVX,SANDYBRIDGE
|
|
VCMPLTPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 01] AVX,SANDYBRIDGE
|
|
VCMPLE_OSPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 02] AVX,SANDYBRIDGE
|
|
VCMPLE_OSPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 02] AVX,SANDYBRIDGE
|
|
VCMPLEPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 02] AVX,SANDYBRIDGE
|
|
VCMPLEPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 02] AVX,SANDYBRIDGE
|
|
VCMPUNORD_QPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 03] AVX,SANDYBRIDGE
|
|
VCMPUNORD_QPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 03] AVX,SANDYBRIDGE
|
|
VCMPUNORDPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 03] AVX,SANDYBRIDGE
|
|
VCMPUNORDPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 03] AVX,SANDYBRIDGE
|
|
VCMPNEQ_UQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 04] AVX,SANDYBRIDGE
|
|
VCMPNEQ_UQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 04] AVX,SANDYBRIDGE
|
|
VCMPNEQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 04] AVX,SANDYBRIDGE
|
|
VCMPNEQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 04] AVX,SANDYBRIDGE
|
|
VCMPNLT_USPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 05] AVX,SANDYBRIDGE
|
|
VCMPNLT_USPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 05] AVX,SANDYBRIDGE
|
|
VCMPNLTPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 05] AVX,SANDYBRIDGE
|
|
VCMPNLTPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 05] AVX,SANDYBRIDGE
|
|
VCMPNLE_USPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 06] AVX,SANDYBRIDGE
|
|
VCMPNLE_USPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 06] AVX,SANDYBRIDGE
|
|
VCMPNLEPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 06] AVX,SANDYBRIDGE
|
|
VCMPNLEPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 06] AVX,SANDYBRIDGE
|
|
VCMPORD_QPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 07] AVX,SANDYBRIDGE
|
|
VCMPORD_QPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 07] AVX,SANDYBRIDGE
|
|
VCMPORDPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 07] AVX,SANDYBRIDGE
|
|
VCMPORDPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 07] AVX,SANDYBRIDGE
|
|
VCMPEQ_UQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 08] AVX,SANDYBRIDGE
|
|
VCMPEQ_UQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 08] AVX,SANDYBRIDGE
|
|
VCMPNGE_USPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 09] AVX,SANDYBRIDGE
|
|
VCMPNGE_USPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 09] AVX,SANDYBRIDGE
|
|
VCMPNGEPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 09] AVX,SANDYBRIDGE
|
|
VCMPNGEPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 09] AVX,SANDYBRIDGE
|
|
VCMPNGT_USPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 0a] AVX,SANDYBRIDGE
|
|
VCMPNGT_USPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 0a] AVX,SANDYBRIDGE
|
|
VCMPNGTPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 0a] AVX,SANDYBRIDGE
|
|
VCMPNGTPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 0a] AVX,SANDYBRIDGE
|
|
VCMPFALSE_OQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 0b] AVX,SANDYBRIDGE
|
|
VCMPFALSE_OQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 0b] AVX,SANDYBRIDGE
|
|
VCMPFALSEPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 0b] AVX,SANDYBRIDGE
|
|
VCMPFALSEPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 0b] AVX,SANDYBRIDGE
|
|
VCMPNEQ_OQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 0c] AVX,SANDYBRIDGE
|
|
VCMPNEQ_OQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 0c] AVX,SANDYBRIDGE
|
|
VCMPGE_OSPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 0d] AVX,SANDYBRIDGE
|
|
VCMPGE_OSPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 0d] AVX,SANDYBRIDGE
|
|
VCMPGEPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 0d] AVX,SANDYBRIDGE
|
|
VCMPGEPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 0d] AVX,SANDYBRIDGE
|
|
VCMPGT_OSPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 0e] AVX,SANDYBRIDGE
|
|
VCMPGT_OSPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 0e] AVX,SANDYBRIDGE
|
|
VCMPGTPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 0e] AVX,SANDYBRIDGE
|
|
VCMPGTPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 0e] AVX,SANDYBRIDGE
|
|
VCMPTRUE_UQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 0f] AVX,SANDYBRIDGE
|
|
VCMPTRUE_UQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 0f] AVX,SANDYBRIDGE
|
|
VCMPTRUEPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 0f] AVX,SANDYBRIDGE
|
|
VCMPTRUEPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 0f] AVX,SANDYBRIDGE
|
|
VCMPEQ_OSPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 10] AVX,SANDYBRIDGE
|
|
VCMPEQ_OSPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 10] AVX,SANDYBRIDGE
|
|
VCMPLT_OQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 11] AVX,SANDYBRIDGE
|
|
VCMPLT_OQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 11] AVX,SANDYBRIDGE
|
|
VCMPLE_OQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 12] AVX,SANDYBRIDGE
|
|
VCMPLE_OQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 12] AVX,SANDYBRIDGE
|
|
VCMPUNORD_SPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 13] AVX,SANDYBRIDGE
|
|
VCMPUNORD_SPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 13] AVX,SANDYBRIDGE
|
|
VCMPNEQ_USPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 14] AVX,SANDYBRIDGE
|
|
VCMPNEQ_USPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 14] AVX,SANDYBRIDGE
|
|
VCMPNLT_UQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 15] AVX,SANDYBRIDGE
|
|
VCMPNLT_UQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 15] AVX,SANDYBRIDGE
|
|
VCMPNLE_UQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 16] AVX,SANDYBRIDGE
|
|
VCMPNLE_UQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 16] AVX,SANDYBRIDGE
|
|
VCMPORD_SPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 17] AVX,SANDYBRIDGE
|
|
VCMPORD_SPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 17] AVX,SANDYBRIDGE
|
|
VCMPEQ_USPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 18] AVX,SANDYBRIDGE
|
|
VCMPEQ_USPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 18] AVX,SANDYBRIDGE
|
|
VCMPNGE_UQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 19] AVX,SANDYBRIDGE
|
|
VCMPNGE_UQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 19] AVX,SANDYBRIDGE
|
|
VCMPNGT_UQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 1a] AVX,SANDYBRIDGE
|
|
VCMPNGT_UQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 1a] AVX,SANDYBRIDGE
|
|
VCMPFALSE_OSPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 1b] AVX,SANDYBRIDGE
|
|
VCMPFALSE_OSPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 1b] AVX,SANDYBRIDGE
|
|
VCMPNEQ_OSPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 1c] AVX,SANDYBRIDGE
|
|
VCMPNEQ_OSPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 1c] AVX,SANDYBRIDGE
|
|
VCMPGE_OQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 1d] AVX,SANDYBRIDGE
|
|
VCMPGE_OQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 1d] AVX,SANDYBRIDGE
|
|
VCMPGT_OQPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 1e] AVX,SANDYBRIDGE
|
|
VCMPGT_OQPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 1e] AVX,SANDYBRIDGE
|
|
VCMPTRUE_USPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f c2 /r 1f] AVX,SANDYBRIDGE
|
|
VCMPTRUE_USPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f c2 /r 1f] AVX,SANDYBRIDGE
|
|
VCMPPS xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.0f c2 /r ib] AVX,SANDYBRIDGE
|
|
VCMPPS ymmreg,ymmreg*,ymmrm256,imm8 [rvmi: vex.nds.256.0f c2 /r ib] AVX,SANDYBRIDGE
|
|
; Specific aliases first, then the generic version, to keep the disassembler happy...
|
|
VCMPEQ_OSSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 10] AVX,SANDYBRIDGE
|
|
VCMPEQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 00] AVX,SANDYBRIDGE
|
|
VCMPLT_OSSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 01] AVX,SANDYBRIDGE
|
|
VCMPLTSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 01] AVX,SANDYBRIDGE
|
|
VCMPLE_OSSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 02] AVX,SANDYBRIDGE
|
|
VCMPLESD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 02] AVX,SANDYBRIDGE
|
|
VCMPUNORD_QSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 03] AVX,SANDYBRIDGE
|
|
VCMPUNORDSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 03] AVX,SANDYBRIDGE
|
|
VCMPNEQ_UQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 04] AVX,SANDYBRIDGE
|
|
VCMPNEQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 04] AVX,SANDYBRIDGE
|
|
VCMPNLT_USSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 05] AVX,SANDYBRIDGE
|
|
VCMPNLTSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 05] AVX,SANDYBRIDGE
|
|
VCMPNLE_USSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 06] AVX,SANDYBRIDGE
|
|
VCMPNLESD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 06] AVX,SANDYBRIDGE
|
|
VCMPORD_QSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 07] AVX,SANDYBRIDGE
|
|
VCMPORDSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 07] AVX,SANDYBRIDGE
|
|
VCMPEQ_UQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 08] AVX,SANDYBRIDGE
|
|
VCMPNGE_USSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 09] AVX,SANDYBRIDGE
|
|
VCMPNGESD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 09] AVX,SANDYBRIDGE
|
|
VCMPNGT_USSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 0a] AVX,SANDYBRIDGE
|
|
VCMPNGTSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 0a] AVX,SANDYBRIDGE
|
|
VCMPFALSE_OQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 0b] AVX,SANDYBRIDGE
|
|
VCMPFALSESD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 0b] AVX,SANDYBRIDGE
|
|
VCMPNEQ_OQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 0c] AVX,SANDYBRIDGE
|
|
VCMPGE_OSSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 0d] AVX,SANDYBRIDGE
|
|
VCMPGESD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 0d] AVX,SANDYBRIDGE
|
|
VCMPGT_OSSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 0e] AVX,SANDYBRIDGE
|
|
VCMPGTSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 0e] AVX,SANDYBRIDGE
|
|
VCMPTRUE_UQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 0f] AVX,SANDYBRIDGE
|
|
VCMPTRUESD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 0f] AVX,SANDYBRIDGE
|
|
VCMPEQ_OSSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 10] AVX,SANDYBRIDGE
|
|
VCMPLT_OQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 11] AVX,SANDYBRIDGE
|
|
VCMPLE_OQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 12] AVX,SANDYBRIDGE
|
|
VCMPUNORD_SSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 13] AVX,SANDYBRIDGE
|
|
VCMPNEQ_USSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 14] AVX,SANDYBRIDGE
|
|
VCMPNLT_UQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 15] AVX,SANDYBRIDGE
|
|
VCMPNLE_UQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 16] AVX,SANDYBRIDGE
|
|
VCMPORD_SSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 17] AVX,SANDYBRIDGE
|
|
VCMPEQ_USSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 18] AVX,SANDYBRIDGE
|
|
VCMPNGE_UQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 19] AVX,SANDYBRIDGE
|
|
VCMPNGT_UQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 1a] AVX,SANDYBRIDGE
|
|
VCMPFALSE_OSSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 1b] AVX,SANDYBRIDGE
|
|
VCMPNEQ_OSSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 1c] AVX,SANDYBRIDGE
|
|
VCMPGE_OQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 1d] AVX,SANDYBRIDGE
|
|
VCMPGT_OQSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 1e] AVX,SANDYBRIDGE
|
|
VCMPTRUE_USSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f c2 /r 1f] AVX,SANDYBRIDGE
|
|
VCMPSD xmmreg,xmmreg*,xmmrm64,imm8 [rvmi: vex.nds.lig.f2.0f c2 /r ib] AVX,SANDYBRIDGE
|
|
; Specific aliases first, then the generic version, to keep the disassembler happy...
|
|
VCMPEQ_OSSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 10] AVX,SANDYBRIDGE
|
|
VCMPEQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 00] AVX,SANDYBRIDGE
|
|
VCMPLT_OSSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 01] AVX,SANDYBRIDGE
|
|
VCMPLTSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 01] AVX,SANDYBRIDGE
|
|
VCMPLE_OSSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 02] AVX,SANDYBRIDGE
|
|
VCMPLESS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 02] AVX,SANDYBRIDGE
|
|
VCMPUNORD_QSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 03] AVX,SANDYBRIDGE
|
|
VCMPUNORDSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 03] AVX,SANDYBRIDGE
|
|
VCMPNEQ_UQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 04] AVX,SANDYBRIDGE
|
|
VCMPNEQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 04] AVX,SANDYBRIDGE
|
|
VCMPNLT_USSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 05] AVX,SANDYBRIDGE
|
|
VCMPNLTSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 05] AVX,SANDYBRIDGE
|
|
VCMPNLE_USSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 06] AVX,SANDYBRIDGE
|
|
VCMPNLESS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 06] AVX,SANDYBRIDGE
|
|
VCMPORD_QSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 07] AVX,SANDYBRIDGE
|
|
VCMPORDSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 07] AVX,SANDYBRIDGE
|
|
VCMPEQ_UQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 08] AVX,SANDYBRIDGE
|
|
VCMPNGE_USSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 09] AVX,SANDYBRIDGE
|
|
VCMPNGESS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 09] AVX,SANDYBRIDGE
|
|
VCMPNGT_USSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 0a] AVX,SANDYBRIDGE
|
|
VCMPNGTSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 0a] AVX,SANDYBRIDGE
|
|
VCMPFALSE_OQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 0b] AVX,SANDYBRIDGE
|
|
VCMPFALSESS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 0b] AVX,SANDYBRIDGE
|
|
VCMPNEQ_OQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 0c] AVX,SANDYBRIDGE
|
|
VCMPGE_OSSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 0d] AVX,SANDYBRIDGE
|
|
VCMPGESS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 0d] AVX,SANDYBRIDGE
|
|
VCMPGT_OSSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 0e] AVX,SANDYBRIDGE
|
|
VCMPGTSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 0e] AVX,SANDYBRIDGE
|
|
VCMPTRUE_UQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 0f] AVX,SANDYBRIDGE
|
|
VCMPTRUESS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 0f] AVX,SANDYBRIDGE
|
|
VCMPEQ_OSSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 10] AVX,SANDYBRIDGE
|
|
VCMPLT_OQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 11] AVX,SANDYBRIDGE
|
|
VCMPLE_OQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 12] AVX,SANDYBRIDGE
|
|
VCMPUNORD_SSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 13] AVX,SANDYBRIDGE
|
|
VCMPNEQ_USSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 14] AVX,SANDYBRIDGE
|
|
VCMPNLT_UQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 15] AVX,SANDYBRIDGE
|
|
VCMPNLE_UQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 16] AVX,SANDYBRIDGE
|
|
VCMPORD_SSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 17] AVX,SANDYBRIDGE
|
|
VCMPEQ_USSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 18] AVX,SANDYBRIDGE
|
|
VCMPNGE_UQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 19] AVX,SANDYBRIDGE
|
|
VCMPNGT_UQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 1a] AVX,SANDYBRIDGE
|
|
VCMPFALSE_OSSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 1b] AVX,SANDYBRIDGE
|
|
VCMPNEQ_OSSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 1c] AVX,SANDYBRIDGE
|
|
VCMPGE_OQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 1d] AVX,SANDYBRIDGE
|
|
VCMPGT_OQSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 1e] AVX,SANDYBRIDGE
|
|
VCMPTRUE_USSS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f3.0f c2 /r 1f] AVX,SANDYBRIDGE
|
|
VCMPSS xmmreg,xmmreg*,xmmrm64,imm8 [rvmi: vex.nds.lig.f3.0f c2 /r ib] AVX,SANDYBRIDGE
|
|
VCOMISD xmmreg,xmmrm64 [rm: vex.lig.66.0f 2f /r] AVX,SANDYBRIDGE
|
|
VCOMISS xmmreg,xmmrm32 [rm: vex.lig.0f 2f /r] AVX,SANDYBRIDGE
|
|
VCVTDQ2PD xmmreg,xmmrm64 [rm: vex.128.f3.0f e6 /r] AVX,SANDYBRIDGE
|
|
VCVTDQ2PD ymmreg,xmmrm128 [rm: vex.256.f3.0f e6 /r] AVX,SANDYBRIDGE
|
|
VCVTDQ2PS xmmreg,xmmrm128 [rm: vex.128.0f 5b /r] AVX,SANDYBRIDGE
|
|
VCVTDQ2PS ymmreg,ymmrm256 [rm: vex.256.0f 5b /r] AVX,SANDYBRIDGE
|
|
VCVTPD2DQ xmmreg,xmmreg [rm: vex.128.f2.0f e6 /r] AVX,SANDYBRIDGE
|
|
VCVTPD2DQ xmmreg,mem128 [rm: vex.128.f2.0f e6 /r] AVX,SANDYBRIDGE,SO
|
|
VCVTPD2DQ xmmreg,ymmreg [rm: vex.256.f2.0f e6 /r] AVX,SANDYBRIDGE
|
|
VCVTPD2DQ xmmreg,mem256 [rm: vex.256.f2.0f e6 /r] AVX,SANDYBRIDGE,SY
|
|
VCVTPD2PS xmmreg,xmmreg [rm: vex.128.66.0f 5a /r] AVX,SANDYBRIDGE
|
|
VCVTPD2PS xmmreg,mem128 [rm: vex.128.66.0f 5a /r] AVX,SANDYBRIDGE,SO
|
|
VCVTPD2PS xmmreg,ymmreg [rm: vex.256.66.0f 5a /r] AVX,SANDYBRIDGE
|
|
VCVTPD2PS xmmreg,mem256 [rm: vex.256.66.0f 5a /r] AVX,SANDYBRIDGE,SY
|
|
VCVTPS2DQ xmmreg,xmmrm128 [rm: vex.128.66.0f 5b /r] AVX,SANDYBRIDGE
|
|
VCVTPS2DQ ymmreg,ymmrm256 [rm: vex.256.66.0f 5b /r] AVX,SANDYBRIDGE
|
|
VCVTPS2PD xmmreg,xmmrm64 [rm: vex.128.0f 5a /r] AVX,SANDYBRIDGE
|
|
VCVTPS2PD ymmreg,xmmrm128 [rm: vex.256.0f 5a /r] AVX,SANDYBRIDGE
|
|
VCVTSD2SI reg32,xmmrm64 [rm: vex.lig.f2.0f.w0 2d /r] AVX,SANDYBRIDGE
|
|
VCVTSD2SI reg64,xmmrm64 [rm: vex.lig.f2.0f.w1 2d /r] AVX,SANDYBRIDGE,LONG
|
|
VCVTSD2SS xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f 5a /r] AVX,SANDYBRIDGE
|
|
VCVTSI2SD xmmreg,xmmreg*,rm32 [rvm: vex.nds.lig.f2.0f.w0 2a /r] AVX,SANDYBRIDGE,SD
|
|
VCVTSI2SD xmmreg,xmmreg*,mem32 [rvm: vex.nds.lig.f2.0f.w0 2a /r] AVX,SANDYBRIDGE,ND,SD
|
|
VCVTSI2SD xmmreg,xmmreg*,rm64 [rvm: vex.nds.lig.f2.0f.w1 2a /r] AVX,SANDYBRIDGE,LONG,SQ
|
|
VCVTSI2SS xmmreg,xmmreg*,rm32 [rvm: vex.nds.lig.f3.0f.w0 2a /r] AVX,SANDYBRIDGE,SD
|
|
VCVTSI2SS xmmreg,xmmreg*,mem32 [rvm: vex.nds.lig.f3.0f.w0 2a /r] AVX,SANDYBRIDGE,ND,SD
|
|
VCVTSI2SS xmmreg,xmmreg*,rm64 [rvm: vex.nds.lig.f3.0f.w1 2a /r] AVX,SANDYBRIDGE,LONG,SQ
|
|
VCVTSS2SD xmmreg,xmmreg*,xmmrm32 [rvm: vex.nds.lig.f3.0f 5a /r] AVX,SANDYBRIDGE
|
|
VCVTSS2SI reg32,xmmrm32 [rm: vex.lig.f3.0f.w0 2d /r] AVX,SANDYBRIDGE
|
|
VCVTSS2SI reg64,xmmrm32 [rm: vex.lig.f3.0f.w1 2d /r] AVX,SANDYBRIDGE,LONG
|
|
VCVTTPD2DQ xmmreg,xmmreg [rm: vex.128.66.0f e6 /r] AVX,SANDYBRIDGE
|
|
VCVTTPD2DQ xmmreg,mem128 [rm: vex.128.66.0f e6 /r] AVX,SANDYBRIDGE,SO
|
|
VCVTTPD2DQ xmmreg,ymmreg [rm: vex.256.66.0f e6 /r] AVX,SANDYBRIDGE
|
|
VCVTTPD2DQ xmmreg,mem256 [rm: vex.256.66.0f e6 /r] AVX,SANDYBRIDGE,SY
|
|
VCVTTPS2DQ xmmreg,xmmrm128 [rm: vex.128.f3.0f 5b /r] AVX,SANDYBRIDGE
|
|
VCVTTPS2DQ ymmreg,ymmrm256 [rm: vex.256.f3.0f 5b /r] AVX,SANDYBRIDGE
|
|
VCVTTSD2SI reg32,xmmrm64 [rm: vex.lig.f2.0f.w0 2c /r] AVX,SANDYBRIDGE
|
|
VCVTTSD2SI reg64,xmmrm64 [rm: vex.lig.f2.0f.w1 2c /r] AVX,SANDYBRIDGE,LONG
|
|
VCVTTSS2SI reg32,xmmrm32 [rm: vex.lig.f3.0f.w0 2c /r] AVX,SANDYBRIDGE
|
|
VCVTTSS2SI reg64,xmmrm32 [rm: vex.lig.f3.0f.w1 2c /r] AVX,SANDYBRIDGE,LONG
|
|
VDIVPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 5e /r] AVX,SANDYBRIDGE
|
|
VDIVPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 5e /r] AVX,SANDYBRIDGE
|
|
VDIVPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 5e /r] AVX,SANDYBRIDGE
|
|
VDIVPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 5e /r] AVX,SANDYBRIDGE
|
|
VDIVSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f 5e /r] AVX,SANDYBRIDGE
|
|
VDIVSS xmmreg,xmmreg*,xmmrm32 [rvm: vex.nds.lig.f3.0f 5e /r] AVX,SANDYBRIDGE
|
|
VDPPD xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f3a 41 /r ib] AVX,SANDYBRIDGE
|
|
VDPPS xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f3a 40 /r ib] AVX,SANDYBRIDGE
|
|
VDPPS ymmreg,ymmreg*,ymmrm256,imm8 [rvmi: vex.nds.256.66.0f3a 40 /r ib] AVX,SANDYBRIDGE
|
|
VEXTRACTF128 xmmrm128,ymmreg,imm8 [mri: vex.256.66.0f3a.w0 19 /r ib] AVX,SANDYBRIDGE
|
|
VEXTRACTPS rm32,xmmreg,imm8 [mri: vex.128.66.0f3a 17 /r ib] AVX,SANDYBRIDGE
|
|
VHADDPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 7c /r] AVX,SANDYBRIDGE
|
|
VHADDPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 7c /r] AVX,SANDYBRIDGE
|
|
VHADDPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.f2.0f 7c /r] AVX,SANDYBRIDGE
|
|
VHADDPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.f2.0f 7c /r] AVX,SANDYBRIDGE
|
|
VHSUBPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 7d /r] AVX,SANDYBRIDGE
|
|
VHSUBPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 7d /r] AVX,SANDYBRIDGE
|
|
VHSUBPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.f2.0f 7d /r] AVX,SANDYBRIDGE
|
|
VHSUBPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.f2.0f 7d /r] AVX,SANDYBRIDGE
|
|
VINSERTF128 ymmreg,ymmreg*,xmmrm128,imm8 [rvmi: vex.nds.256.66.0f3a.w0 18 /r ib] AVX,SANDYBRIDGE
|
|
VINSERTPS xmmreg,xmmreg*,xmmrm32,imm8 [rvmi: vex.nds.128.66.0f3a 21 /r ib] AVX,SANDYBRIDGE
|
|
VLDDQU xmmreg,mem128 [rm: vex.128.f2.0f f0 /r] AVX,SANDYBRIDGE
|
|
VLDQQU ymmreg,mem256 [rm: vex.256.f2.0f f0 /r] AVX,SANDYBRIDGE
|
|
VLDDQU ymmreg,mem256 [rm: vex.256.f2.0f f0 /r] AVX,SANDYBRIDGE
|
|
VLDMXCSR mem32 [m: vex.lz.0f ae /2] AVX,SANDYBRIDGE
|
|
VMASKMOVDQU xmmreg,xmmreg [rm: vex.128.66.0f f7 /r] AVX,SANDYBRIDGE
|
|
VMASKMOVPS xmmreg,xmmreg,mem128 [rvm: vex.nds.128.66.0f38.w0 2c /r] AVX,SANDYBRIDGE
|
|
VMASKMOVPS ymmreg,ymmreg,mem256 [rvm: vex.nds.256.66.0f38.w0 2c /r] AVX,SANDYBRIDGE
|
|
VMASKMOVPS mem128,xmmreg,xmmreg [mvr: vex.nds.128.66.0f38.w0 2e /r] AVX,SANDYBRIDGE,SO
|
|
VMASKMOVPS mem256,ymmreg,ymmreg [mvr: vex.nds.256.66.0f38.w0 2e /r] AVX,SANDYBRIDGE,SY
|
|
VMASKMOVPD xmmreg,xmmreg,mem128 [rvm: vex.nds.128.66.0f38.w0 2d /r] AVX,SANDYBRIDGE
|
|
VMASKMOVPD ymmreg,ymmreg,mem256 [rvm: vex.nds.256.66.0f38.w0 2d /r] AVX,SANDYBRIDGE
|
|
VMASKMOVPD mem128,xmmreg,xmmreg [mvr: vex.nds.128.66.0f38.w0 2f /r] AVX,SANDYBRIDGE
|
|
VMASKMOVPD mem256,ymmreg,ymmreg [mvr: vex.nds.256.66.0f38.w0 2f /r] AVX,SANDYBRIDGE
|
|
VMAXPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 5f /r] AVX,SANDYBRIDGE
|
|
VMAXPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 5f /r] AVX,SANDYBRIDGE
|
|
VMAXPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 5f /r] AVX,SANDYBRIDGE
|
|
VMAXPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 5f /r] AVX,SANDYBRIDGE
|
|
VMAXSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f 5f /r] AVX,SANDYBRIDGE
|
|
VMAXSS xmmreg,xmmreg*,xmmrm32 [rvm: vex.nds.lig.f3.0f 5f /r] AVX,SANDYBRIDGE
|
|
VMINPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 5d /r] AVX,SANDYBRIDGE
|
|
VMINPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 5d /r] AVX,SANDYBRIDGE
|
|
VMINPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 5d /r] AVX,SANDYBRIDGE
|
|
VMINPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 5d /r] AVX,SANDYBRIDGE
|
|
VMINSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f 5d /r] AVX,SANDYBRIDGE
|
|
VMINSS xmmreg,xmmreg*,xmmrm32 [rvm: vex.nds.lig.f3.0f 5d /r] AVX,SANDYBRIDGE
|
|
VMOVAPD xmmreg,xmmrm128 [rm: vex.128.66.0f 28 /r] AVX,SANDYBRIDGE
|
|
VMOVAPD xmmrm128,xmmreg [mr: vex.128.66.0f 29 /r] AVX,SANDYBRIDGE
|
|
VMOVAPD ymmreg,ymmrm256 [rm: vex.256.66.0f 28 /r] AVX,SANDYBRIDGE
|
|
VMOVAPD ymmrm256,ymmreg [mr: vex.256.66.0f 29 /r] AVX,SANDYBRIDGE
|
|
VMOVAPS xmmreg,xmmrm128 [rm: vex.128.0f 28 /r] AVX,SANDYBRIDGE
|
|
VMOVAPS xmmrm128,xmmreg [mr: vex.128.0f 29 /r] AVX,SANDYBRIDGE
|
|
VMOVAPS ymmreg,ymmrm256 [rm: vex.256.0f 28 /r] AVX,SANDYBRIDGE
|
|
VMOVAPS ymmrm256,ymmreg [mr: vex.256.0f 29 /r] AVX,SANDYBRIDGE
|
|
VMOVD xmmreg,rm32 [rm: vex.128.66.0f.w0 6e /r] AVX,SANDYBRIDGE
|
|
VMOVD rm32,xmmreg [mr: vex.128.66.0f.w0 7e /r] AVX,SANDYBRIDGE
|
|
VMOVQ xmmreg,xmmrm64 [rm: vex.128.f3.0f 7e /r] AVX,SANDYBRIDGE,SQ
|
|
VMOVQ xmmrm64,xmmreg [mr: vex.128.66.0f d6 /r] AVX,SANDYBRIDGE,SQ
|
|
VMOVQ xmmreg,rm64 [rm: vex.128.66.0f.w1 6e /r] AVX,SANDYBRIDGE,LONG,SQ
|
|
VMOVQ rm64,xmmreg [mr: vex.128.66.0f.w1 7e /r] AVX,SANDYBRIDGE,LONG,SQ
|
|
VMOVDDUP xmmreg,xmmrm64 [rm: vex.128.f2.0f 12 /r] AVX,SANDYBRIDGE
|
|
VMOVDDUP ymmreg,ymmrm256 [rm: vex.256.f2.0f 12 /r] AVX,SANDYBRIDGE
|
|
VMOVDQA xmmreg,xmmrm128 [rm: vex.128.66.0f 6f /r] AVX,SANDYBRIDGE
|
|
VMOVDQA xmmrm128,xmmreg [mr: vex.128.66.0f 7f /r] AVX,SANDYBRIDGE
|
|
; These are officially documented as VMOVDQA, but VMOVQQA seems more logical to me...
|
|
VMOVQQA ymmreg,ymmrm256 [rm: vex.256.66.0f 6f /r] AVX,SANDYBRIDGE
|
|
VMOVQQA ymmrm256,ymmreg [mr: vex.256.66.0f 7f /r] AVX,SANDYBRIDGE
|
|
VMOVDQA ymmreg,ymmrm256 [rm: vex.256.66.0f 6f /r] AVX,SANDYBRIDGE
|
|
VMOVDQA ymmrm256,ymmreg [mr: vex.256.66.0f 7f /r] AVX,SANDYBRIDGE
|
|
VMOVDQU xmmreg,xmmrm128 [rm: vex.128.f3.0f 6f /r] AVX,SANDYBRIDGE
|
|
VMOVDQU xmmrm128,xmmreg [mr: vex.128.f3.0f 7f /r] AVX,SANDYBRIDGE
|
|
; These are officially documented as VMOVDQU, but VMOVQQU seems more logical to me...
|
|
VMOVQQU ymmreg,ymmrm256 [rm: vex.256.f3.0f 6f /r] AVX,SANDYBRIDGE
|
|
VMOVQQU ymmrm256,ymmreg [mr: vex.256.f3.0f 7f /r] AVX,SANDYBRIDGE
|
|
VMOVDQU ymmreg,ymmrm256 [rm: vex.256.f3.0f 6f /r] AVX,SANDYBRIDGE
|
|
VMOVDQU ymmrm256,ymmreg [mr: vex.256.f3.0f 7f /r] AVX,SANDYBRIDGE
|
|
VMOVHLPS xmmreg,xmmreg*,xmmreg [rvm: vex.nds.128.0f 12 /r] AVX,SANDYBRIDGE
|
|
VMOVHPD xmmreg,xmmreg*,mem64 [rvm: vex.nds.128.66.0f 16 /r] AVX,SANDYBRIDGE
|
|
VMOVHPD mem64,xmmreg [mr: vex.128.66.0f 17 /r] AVX,SANDYBRIDGE
|
|
VMOVHPS xmmreg,xmmreg*,mem64 [rvm: vex.nds.128.0f 16 /r] AVX,SANDYBRIDGE
|
|
VMOVHPS mem64,xmmreg [mr: vex.128.0f 17 /r] AVX,SANDYBRIDGE
|
|
VMOVLHPS xmmreg,xmmreg*,xmmreg [rvm: vex.nds.128.0f 16 /r] AVX,SANDYBRIDGE
|
|
VMOVLPD xmmreg,xmmreg*,mem64 [rvm: vex.nds.128.66.0f 12 /r] AVX,SANDYBRIDGE
|
|
VMOVLPD mem64,xmmreg [mr: vex.128.66.0f 13 /r] AVX,SANDYBRIDGE
|
|
VMOVLPS xmmreg,xmmreg*,mem64 [rvm: vex.nds.128.0f 12 /r] AVX,SANDYBRIDGE
|
|
VMOVLPS mem64,xmmreg [mr: vex.128.0f 13 /r] AVX,SANDYBRIDGE
|
|
VMOVMSKPD reg64,xmmreg [rm: vex.128.66.0f 50 /r] AVX,SANDYBRIDGE,LONG
|
|
VMOVMSKPD reg32,xmmreg [rm: vex.128.66.0f 50 /r] AVX,SANDYBRIDGE
|
|
VMOVMSKPD reg64,ymmreg [rm: vex.256.66.0f 50 /r] AVX,SANDYBRIDGE,LONG
|
|
VMOVMSKPD reg32,ymmreg [rm: vex.256.66.0f 50 /r] AVX,SANDYBRIDGE
|
|
VMOVMSKPS reg64,xmmreg [rm: vex.128.0f 50 /r] AVX,SANDYBRIDGE,LONG
|
|
VMOVMSKPS reg32,xmmreg [rm: vex.128.0f 50 /r] AVX,SANDYBRIDGE
|
|
VMOVMSKPS reg64,ymmreg [rm: vex.256.0f 50 /r] AVX,SANDYBRIDGE,LONG
|
|
VMOVMSKPS reg32,ymmreg [rm: vex.256.0f 50 /r] AVX,SANDYBRIDGE
|
|
VMOVNTDQ mem128,xmmreg [mr: vex.128.66.0f e7 /r] AVX,SANDYBRIDGE
|
|
; Officially VMOVNTDQ, but VMOVNTQQ seems more logical to me...
|
|
VMOVNTQQ mem256,ymmreg [mr: vex.256.66.0f e7 /r] AVX,SANDYBRIDGE
|
|
VMOVNTDQ mem256,ymmreg [mr: vex.256.66.0f e7 /r] AVX,SANDYBRIDGE
|
|
VMOVNTDQA xmmreg,mem128 [rm: vex.128.66.0f38 2a /r] AVX,SANDYBRIDGE
|
|
VMOVNTPD mem128,xmmreg [mr: vex.128.66.0f 2b /r] AVX,SANDYBRIDGE
|
|
VMOVNTPD mem256,ymmreg [mr: vex.256.66.0f 2b /r] AVX,SANDYBRIDGE
|
|
VMOVNTPS mem128,xmmreg [mr: vex.128.0f 2b /r] AVX,SANDYBRIDGE
|
|
VMOVNTPS mem256,ymmreg [mr: vex.256.0f 2b /r] AVX,SANDYBRIDGE
|
|
VMOVSD xmmreg,xmmreg*,xmmreg [rvm: vex.nds.lig.f2.0f 10 /r] AVX,SANDYBRIDGE
|
|
VMOVSD xmmreg,mem64 [rm: vex.lig.f2.0f 10 /r] AVX,SANDYBRIDGE
|
|
VMOVSD xmmreg,xmmreg*,xmmreg [mvr: vex.nds.lig.f2.0f 11 /r] AVX,SANDYBRIDGE
|
|
VMOVSD mem64,xmmreg [mr: vex.lig.f2.0f 11 /r] AVX,SANDYBRIDGE
|
|
VMOVSHDUP xmmreg,xmmrm128 [rm: vex.128.f3.0f 16 /r] AVX,SANDYBRIDGE
|
|
VMOVSHDUP ymmreg,ymmrm256 [rm: vex.256.f3.0f 16 /r] AVX,SANDYBRIDGE
|
|
VMOVSLDUP xmmreg,xmmrm128 [rm: vex.128.f3.0f 12 /r] AVX,SANDYBRIDGE
|
|
VMOVSLDUP ymmreg,ymmrm256 [rm: vex.256.f3.0f 12 /r] AVX,SANDYBRIDGE
|
|
VMOVSS xmmreg,xmmreg*,xmmreg [rvm: vex.nds.lig.f3.0f 10 /r] AVX,SANDYBRIDGE
|
|
VMOVSS xmmreg,mem32 [rm: vex.lig.f3.0f 10 /r] AVX,SANDYBRIDGE
|
|
VMOVSS xmmreg,xmmreg*,xmmreg [mvr: vex.nds.lig.f3.0f 11 /r] AVX,SANDYBRIDGE
|
|
VMOVSS mem32,xmmreg [mr: vex.lig.f3.0f 11 /r] AVX,SANDYBRIDGE
|
|
VMOVUPD xmmreg,xmmrm128 [rm: vex.128.66.0f 10 /r] AVX,SANDYBRIDGE
|
|
VMOVUPD xmmrm128,xmmreg [mr: vex.128.66.0f 11 /r] AVX,SANDYBRIDGE
|
|
VMOVUPD ymmreg,ymmrm256 [rm: vex.256.66.0f 10 /r] AVX,SANDYBRIDGE
|
|
VMOVUPD ymmrm256,ymmreg [mr: vex.256.66.0f 11 /r] AVX,SANDYBRIDGE
|
|
VMOVUPS xmmreg,xmmrm128 [rm: vex.128.0f 10 /r] AVX,SANDYBRIDGE
|
|
VMOVUPS xmmrm128,xmmreg [mr: vex.128.0f 11 /r] AVX,SANDYBRIDGE
|
|
VMOVUPS ymmreg,ymmrm256 [rm: vex.256.0f 10 /r] AVX,SANDYBRIDGE
|
|
VMOVUPS ymmrm256,ymmreg [mr: vex.256.0f 11 /r] AVX,SANDYBRIDGE
|
|
VMPSADBW xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f3a 42 /r ib] AVX,SANDYBRIDGE
|
|
VMULPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 59 /r] AVX,SANDYBRIDGE
|
|
VMULPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 59 /r] AVX,SANDYBRIDGE
|
|
VMULPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 59 /r] AVX,SANDYBRIDGE
|
|
VMULPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 59 /r] AVX,SANDYBRIDGE
|
|
VMULSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f 59 /r] AVX,SANDYBRIDGE
|
|
VMULSS xmmreg,xmmreg*,xmmrm32 [rvm: vex.nds.lig.f3.0f 59 /r] AVX,SANDYBRIDGE
|
|
VORPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 56 /r] AVX,SANDYBRIDGE
|
|
VORPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 56 /r] AVX,SANDYBRIDGE
|
|
VORPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 56 /r] AVX,SANDYBRIDGE
|
|
VORPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 56 /r] AVX,SANDYBRIDGE
|
|
VPABSB xmmreg,xmmrm128 [rm: vex.128.66.0f38 1c /r] AVX,SANDYBRIDGE
|
|
VPABSW xmmreg,xmmrm128 [rm: vex.128.66.0f38 1d /r] AVX,SANDYBRIDGE
|
|
VPABSD xmmreg,xmmrm128 [rm: vex.128.66.0f38 1e /r] AVX,SANDYBRIDGE
|
|
VPACKSSWB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 63 /r] AVX,SANDYBRIDGE
|
|
VPACKSSDW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 6b /r] AVX,SANDYBRIDGE
|
|
VPACKUSWB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 67 /r] AVX,SANDYBRIDGE
|
|
VPACKUSDW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 2b /r] AVX,SANDYBRIDGE
|
|
VPADDB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f fc /r] AVX,SANDYBRIDGE
|
|
VPADDW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f fd /r] AVX,SANDYBRIDGE
|
|
VPADDD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f fe /r] AVX,SANDYBRIDGE
|
|
VPADDQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f d4 /r] AVX,SANDYBRIDGE
|
|
VPADDSB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f ec /r] AVX,SANDYBRIDGE
|
|
VPADDSW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f ed /r] AVX,SANDYBRIDGE
|
|
VPADDUSB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f dc /r] AVX,SANDYBRIDGE
|
|
VPADDUSW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f dd /r] AVX,SANDYBRIDGE
|
|
VPALIGNR xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f3a 0f /r ib] AVX,SANDYBRIDGE
|
|
VPAND xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f db /r] AVX,SANDYBRIDGE
|
|
VPANDN xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f df /r] AVX,SANDYBRIDGE
|
|
VPAVGB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f e0 /r] AVX,SANDYBRIDGE
|
|
VPAVGW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f e3 /r] AVX,SANDYBRIDGE
|
|
VPBLENDVB xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.nds.128.66.0f3a.w0 4c /r /is4] AVX,SANDYBRIDGE
|
|
VPBLENDW xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f3a 0e /r ib] AVX,SANDYBRIDGE
|
|
VPCMPESTRI xmmreg,xmmrm128,imm8 [rmi: vex.128.66.0f3a 61 /r ib] AVX,SANDYBRIDGE
|
|
VPCMPESTRM xmmreg,xmmrm128,imm8 [rmi: vex.128.66.0f3a 60 /r ib] AVX,SANDYBRIDGE
|
|
VPCMPISTRI xmmreg,xmmrm128,imm8 [rmi: vex.128.66.0f3a 63 /r ib] AVX,SANDYBRIDGE
|
|
VPCMPISTRM xmmreg,xmmrm128,imm8 [rmi: vex.128.66.0f3a 62 /r ib] AVX,SANDYBRIDGE
|
|
VPCMPEQB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 74 /r] AVX,SANDYBRIDGE
|
|
VPCMPEQW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 75 /r] AVX,SANDYBRIDGE
|
|
VPCMPEQD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 76 /r] AVX,SANDYBRIDGE
|
|
VPCMPEQQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 29 /r] AVX,SANDYBRIDGE
|
|
VPCMPGTB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 64 /r] AVX,SANDYBRIDGE
|
|
VPCMPGTW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 65 /r] AVX,SANDYBRIDGE
|
|
VPCMPGTD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 66 /r] AVX,SANDYBRIDGE
|
|
VPCMPGTQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 37 /r] AVX,SANDYBRIDGE
|
|
VPERMILPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38.w0 0d /r] AVX,SANDYBRIDGE
|
|
VPERMILPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38.w0 0d /r] AVX,SANDYBRIDGE
|
|
VPERMILPD xmmreg,xmmrm128,imm8 [rmi: vex.128.66.0f3a.w0 05 /r ib] AVX,SANDYBRIDGE
|
|
VPERMILPD ymmreg,ymmrm256,imm8 [rmi: vex.256.66.0f3a.w0 05 /r ib] AVX,SANDYBRIDGE
|
|
VPERMILPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38.w0 0c /r] AVX,SANDYBRIDGE
|
|
VPERMILPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38.w0 0c /r] AVX,SANDYBRIDGE
|
|
VPERMILPS xmmreg,xmmrm128,imm8 [rmi: vex.128.66.0f3a.w0 04 /r ib] AVX,SANDYBRIDGE
|
|
VPERMILPS ymmreg,ymmrm256,imm8 [rmi: vex.256.66.0f3a.w0 04 /r ib] AVX,SANDYBRIDGE
|
|
VPERM2F128 ymmreg,ymmreg*,ymmrm256,imm8 [rvmi: vex.nds.256.66.0f3a.w0 06 /r ib] AVX,SANDYBRIDGE
|
|
VPEXTRB reg64,xmmreg,imm8 [mri: vex.128.66.0f3a.w0 14 /r ib] AVX,SANDYBRIDGE,LONG
|
|
VPEXTRB reg32,xmmreg,imm8 [mri: vex.128.66.0f3a.w0 14 /r ib] AVX,SANDYBRIDGE
|
|
VPEXTRB mem8,xmmreg,imm8 [mri: vex.128.66.0f3a.w0 14 /r ib] AVX,SANDYBRIDGE
|
|
VPEXTRW reg64,xmmreg,imm8 [rmi: vex.128.66.0f.w0 c5 /r ib] AVX,SANDYBRIDGE,LONG
|
|
VPEXTRW reg32,xmmreg,imm8 [rmi: vex.128.66.0f.w0 c5 /r ib] AVX,SANDYBRIDGE
|
|
VPEXTRW reg64,xmmreg,imm8 [mri: vex.128.66.0f3a.w0 15 /r ib] AVX,SANDYBRIDGE,LONG
|
|
VPEXTRW reg32,xmmreg,imm8 [mri: vex.128.66.0f3a.w0 15 /r ib] AVX,SANDYBRIDGE
|
|
VPEXTRW mem16,xmmreg,imm8 [mri: vex.128.66.0f3a.w0 15 /r ib] AVX,SANDYBRIDGE
|
|
VPEXTRD reg64,xmmreg,imm8 [mri: vex.128.66.0f3a.w0 16 /r ib] AVX,SANDYBRIDGE,LONG
|
|
VPEXTRD rm32,xmmreg,imm8 [mri: vex.128.66.0f3a.w0 16 /r ib] AVX,SANDYBRIDGE
|
|
VPEXTRQ rm64,xmmreg,imm8 [mri: vex.128.66.0f3a.w1 16 /r ib] AVX,SANDYBRIDGE,LONG
|
|
VPHADDW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 01 /r] AVX,SANDYBRIDGE
|
|
VPHADDD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 02 /r] AVX,SANDYBRIDGE
|
|
VPHADDSW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 03 /r] AVX,SANDYBRIDGE
|
|
VPHMINPOSUW xmmreg,xmmrm128 [rm: vex.128.66.0f38 41 /r] AVX,SANDYBRIDGE
|
|
VPHSUBW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 05 /r] AVX,SANDYBRIDGE
|
|
VPHSUBD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 06 /r] AVX,SANDYBRIDGE
|
|
VPHSUBSW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 07 /r] AVX,SANDYBRIDGE
|
|
VPINSRB xmmreg,xmmreg*,mem8,imm8 [rvmi: vex.nds.128.66.0f3a 20 /r ib] AVX,SANDYBRIDGE
|
|
VPINSRB xmmreg,xmmreg*,rm8,imm8 [rvmi: vex.nds.128.66.0f3a 20 /r ib] AVX,SANDYBRIDGE
|
|
VPINSRB xmmreg,xmmreg*,reg32,imm8 [rvmi: vex.nds.128.66.0f3a 20 /r ib] AVX,SANDYBRIDGE
|
|
VPINSRW xmmreg,xmmreg*,mem16,imm8 [rvmi: vex.nds.128.66.0f c4 /r ib] AVX,SANDYBRIDGE
|
|
VPINSRW xmmreg,xmmreg*,rm16,imm8 [rvmi: vex.nds.128.66.0f c4 /r ib] AVX,SANDYBRIDGE
|
|
VPINSRW xmmreg,xmmreg*,reg32,imm8 [rvmi: vex.nds.128.66.0f c4 /r ib] AVX,SANDYBRIDGE
|
|
VPINSRD xmmreg,xmmreg*,mem32,imm8 [rvmi: vex.nds.128.66.0f3a.w0 22 /r ib] AVX,SANDYBRIDGE
|
|
VPINSRD xmmreg,xmmreg*,rm32,imm8 [rvmi: vex.nds.128.66.0f3a.w0 22 /r ib] AVX,SANDYBRIDGE
|
|
VPINSRQ xmmreg,xmmreg*,mem64,imm8 [rvmi: vex.nds.128.66.0f3a.w1 22 /r ib] AVX,SANDYBRIDGE,LONG
|
|
VPINSRQ xmmreg,xmmreg*,rm64,imm8 [rvmi: vex.nds.128.66.0f3a.w1 22 /r ib] AVX,SANDYBRIDGE,LONG
|
|
VPMADDWD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f f5 /r] AVX,SANDYBRIDGE
|
|
VPMADDUBSW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 04 /r] AVX,SANDYBRIDGE
|
|
VPMAXSB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 3c /r] AVX,SANDYBRIDGE
|
|
VPMAXSW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f ee /r] AVX,SANDYBRIDGE
|
|
VPMAXSD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 3d /r] AVX,SANDYBRIDGE
|
|
VPMAXUB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f de /r] AVX,SANDYBRIDGE
|
|
VPMAXUW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 3e /r] AVX,SANDYBRIDGE
|
|
VPMAXUD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 3f /r] AVX,SANDYBRIDGE
|
|
VPMINSB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 38 /r] AVX,SANDYBRIDGE
|
|
VPMINSW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f ea /r] AVX,SANDYBRIDGE
|
|
VPMINSD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 39 /r] AVX,SANDYBRIDGE
|
|
VPMINUB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f da /r] AVX,SANDYBRIDGE
|
|
VPMINUW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 3a /r] AVX,SANDYBRIDGE
|
|
VPMINUD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 3b /r] AVX,SANDYBRIDGE
|
|
VPMOVMSKB reg64,xmmreg [rm: vex.128.66.0f d7 /r] AVX,SANDYBRIDGE,LONG
|
|
VPMOVMSKB reg32,xmmreg [rm: vex.128.66.0f d7 /r] AVX,SANDYBRIDGE
|
|
VPMOVSXBW xmmreg,xmmrm64 [rm: vex.128.66.0f38 20 /r] AVX,SANDYBRIDGE
|
|
VPMOVSXBD xmmreg,xmmrm32 [rm: vex.128.66.0f38 21 /r] AVX,SANDYBRIDGE
|
|
VPMOVSXBQ xmmreg,xmmrm16 [rm: vex.128.66.0f38 22 /r] AVX,SANDYBRIDGE
|
|
VPMOVSXWD xmmreg,xmmrm64 [rm: vex.128.66.0f38 23 /r] AVX,SANDYBRIDGE
|
|
VPMOVSXWQ xmmreg,xmmrm32 [rm: vex.128.66.0f38 24 /r] AVX,SANDYBRIDGE
|
|
VPMOVSXDQ xmmreg,xmmrm64 [rm: vex.128.66.0f38 25 /r] AVX,SANDYBRIDGE
|
|
VPMOVZXBW xmmreg,xmmrm64 [rm: vex.128.66.0f38 30 /r] AVX,SANDYBRIDGE
|
|
VPMOVZXBD xmmreg,xmmrm32 [rm: vex.128.66.0f38 31 /r] AVX,SANDYBRIDGE
|
|
VPMOVZXBQ xmmreg,xmmrm16 [rm: vex.128.66.0f38 32 /r] AVX,SANDYBRIDGE
|
|
VPMOVZXWD xmmreg,xmmrm64 [rm: vex.128.66.0f38 33 /r] AVX,SANDYBRIDGE
|
|
VPMOVZXWQ xmmreg,xmmrm32 [rm: vex.128.66.0f38 34 /r] AVX,SANDYBRIDGE
|
|
VPMOVZXDQ xmmreg,xmmrm64 [rm: vex.128.66.0f38 35 /r] AVX,SANDYBRIDGE
|
|
VPMULHUW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f e4 /r] AVX,SANDYBRIDGE
|
|
VPMULHRSW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 0b /r] AVX,SANDYBRIDGE
|
|
VPMULHW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f e5 /r] AVX,SANDYBRIDGE
|
|
VPMULLW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f d5 /r] AVX,SANDYBRIDGE
|
|
VPMULLD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 40 /r] AVX,SANDYBRIDGE
|
|
VPMULUDQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f f4 /r] AVX,SANDYBRIDGE
|
|
VPMULDQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 28 /r] AVX,SANDYBRIDGE
|
|
VPOR xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f eb /r] AVX,SANDYBRIDGE
|
|
VPSADBW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f f6 /r] AVX,SANDYBRIDGE
|
|
VPSHUFB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 00 /r] AVX,SANDYBRIDGE
|
|
VPSHUFD xmmreg,xmmrm128,imm8 [rmi: vex.128.66.0f 70 /r ib] AVX,SANDYBRIDGE
|
|
VPSHUFHW xmmreg,xmmrm128,imm8 [rmi: vex.128.f3.0f 70 /r ib] AVX,SANDYBRIDGE
|
|
VPSHUFLW xmmreg,xmmrm128,imm8 [rmi: vex.128.f2.0f 70 /r ib] AVX,SANDYBRIDGE
|
|
VPSIGNB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 08 /r] AVX,SANDYBRIDGE
|
|
VPSIGNW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 09 /r] AVX,SANDYBRIDGE
|
|
VPSIGND xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38 0a /r] AVX,SANDYBRIDGE
|
|
VPSLLDQ xmmreg,xmmreg*,imm8 [vmi: vex.ndd.128.66.0f 73 /7 ib] AVX,SANDYBRIDGE
|
|
VPSRLDQ xmmreg,xmmreg*,imm8 [vmi: vex.ndd.128.66.0f 73 /3 ib] AVX,SANDYBRIDGE
|
|
VPSLLW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f f1 /r] AVX,SANDYBRIDGE
|
|
VPSLLW xmmreg,xmmreg*,imm8 [vmi: vex.ndd.128.66.0f 71 /6 ib] AVX,SANDYBRIDGE
|
|
VPSLLD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f f2 /r] AVX,SANDYBRIDGE
|
|
VPSLLD xmmreg,xmmreg*,imm8 [vmi: vex.ndd.128.66.0f 72 /6 ib] AVX,SANDYBRIDGE
|
|
VPSLLQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f f3 /r] AVX,SANDYBRIDGE
|
|
VPSLLQ xmmreg,xmmreg*,imm8 [vmi: vex.ndd.128.66.0f 73 /6 ib] AVX,SANDYBRIDGE
|
|
VPSRAW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f e1 /r] AVX,SANDYBRIDGE
|
|
VPSRAW xmmreg,xmmreg*,imm8 [vmi: vex.ndd.128.66.0f 71 /4 ib] AVX,SANDYBRIDGE
|
|
VPSRAD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f e2 /r] AVX,SANDYBRIDGE
|
|
VPSRAD xmmreg,xmmreg*,imm8 [vmi: vex.ndd.128.66.0f 72 /4 ib] AVX,SANDYBRIDGE
|
|
VPSRLW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f d1 /r] AVX,SANDYBRIDGE
|
|
VPSRLW xmmreg,xmmreg*,imm8 [vmi: vex.ndd.128.66.0f 71 /2 ib] AVX,SANDYBRIDGE
|
|
VPSRLD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f d2 /r] AVX,SANDYBRIDGE
|
|
VPSRLD xmmreg,xmmreg*,imm8 [vmi: vex.ndd.128.66.0f 72 /2 ib] AVX,SANDYBRIDGE
|
|
VPSRLQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f d3 /r] AVX,SANDYBRIDGE
|
|
VPSRLQ xmmreg,xmmreg*,imm8 [vmi: vex.ndd.128.66.0f 73 /2 ib] AVX,SANDYBRIDGE
|
|
VPTEST xmmreg,xmmrm128 [rm: vex.128.66.0f38 17 /r] AVX,SANDYBRIDGE
|
|
VPTEST ymmreg,ymmrm256 [rm: vex.256.66.0f38 17 /r] AVX,SANDYBRIDGE
|
|
VPSUBB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f f8 /r] AVX,SANDYBRIDGE
|
|
VPSUBW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f f9 /r] AVX,SANDYBRIDGE
|
|
VPSUBD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f fa /r] AVX,SANDYBRIDGE
|
|
VPSUBQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f fb /r] AVX,SANDYBRIDGE
|
|
VPSUBSB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f e8 /r] AVX,SANDYBRIDGE
|
|
VPSUBSW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f e9 /r] AVX,SANDYBRIDGE
|
|
VPSUBUSB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f d8 /r] AVX,SANDYBRIDGE
|
|
VPSUBUSW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f d9 /r] AVX,SANDYBRIDGE
|
|
VPUNPCKHBW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 68 /r] AVX,SANDYBRIDGE
|
|
VPUNPCKHWD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 69 /r] AVX,SANDYBRIDGE
|
|
VPUNPCKHDQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 6a /r] AVX,SANDYBRIDGE
|
|
VPUNPCKHQDQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 6d /r] AVX,SANDYBRIDGE
|
|
VPUNPCKLBW xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 60 /r] AVX,SANDYBRIDGE
|
|
VPUNPCKLWD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 61 /r] AVX,SANDYBRIDGE
|
|
VPUNPCKLDQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 62 /r] AVX,SANDYBRIDGE
|
|
VPUNPCKLQDQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 6c /r] AVX,SANDYBRIDGE
|
|
VPXOR xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f ef /r] AVX,SANDYBRIDGE
|
|
VRCPPS xmmreg,xmmrm128 [rm: vex.128.0f 53 /r] AVX,SANDYBRIDGE
|
|
VRCPPS ymmreg,ymmrm256 [rm: vex.256.0f 53 /r] AVX,SANDYBRIDGE
|
|
VRCPSS xmmreg,xmmreg*,xmmrm32 [rvm: vex.nds.lig.f3.0f 53 /r] AVX,SANDYBRIDGE
|
|
VRSQRTPS xmmreg,xmmrm128 [rm: vex.128.0f 52 /r] AVX,SANDYBRIDGE
|
|
VRSQRTPS ymmreg,ymmrm256 [rm: vex.256.0f 52 /r] AVX,SANDYBRIDGE
|
|
VRSQRTSS xmmreg,xmmreg*,xmmrm32 [rvm: vex.nds.lig.f3.0f 52 /r] AVX,SANDYBRIDGE
|
|
VROUNDPD xmmreg,xmmrm128,imm8 [rmi: vex.128.66.0f3a 09 /r ib] AVX,SANDYBRIDGE
|
|
VROUNDPD ymmreg,ymmrm256,imm8 [rmi: vex.256.66.0f3a 09 /r ib] AVX,SANDYBRIDGE
|
|
VROUNDPS xmmreg,xmmrm128,imm8 [rmi: vex.128.66.0f3a 08 /r ib] AVX,SANDYBRIDGE
|
|
VROUNDPS ymmreg,ymmrm256,imm8 [rmi: vex.256.66.0f3a 08 /r ib] AVX,SANDYBRIDGE
|
|
VROUNDSD xmmreg,xmmreg*,xmmrm64,imm8 [rvmi: vex.nds.128.66.0f3a 0b /r ib] AVX,SANDYBRIDGE
|
|
VROUNDSS xmmreg,xmmreg*,xmmrm32,imm8 [rvmi: vex.nds.128.66.0f3a 0a /r ib] AVX,SANDYBRIDGE
|
|
VSHUFPD xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f c6 /r ib] AVX,SANDYBRIDGE
|
|
VSHUFPD ymmreg,ymmreg*,ymmrm256,imm8 [rvmi: vex.nds.256.66.0f c6 /r ib] AVX,SANDYBRIDGE
|
|
VSHUFPS xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.0f c6 /r ib] AVX,SANDYBRIDGE
|
|
VSHUFPS ymmreg,ymmreg*,ymmrm256,imm8 [rvmi: vex.nds.256.0f c6 /r ib] AVX,SANDYBRIDGE
|
|
VSQRTPD xmmreg,xmmrm128 [rm: vex.128.66.0f 51 /r] AVX,SANDYBRIDGE
|
|
VSQRTPD ymmreg,ymmrm256 [rm: vex.256.66.0f 51 /r] AVX,SANDYBRIDGE
|
|
VSQRTPS xmmreg,xmmrm128 [rm: vex.128.0f 51 /r] AVX,SANDYBRIDGE
|
|
VSQRTPS ymmreg,ymmrm256 [rm: vex.256.0f 51 /r] AVX,SANDYBRIDGE
|
|
VSQRTSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f 51 /r] AVX,SANDYBRIDGE
|
|
VSQRTSS xmmreg,xmmreg*,xmmrm32 [rvm: vex.nds.lig.f3.0f 51 /r] AVX,SANDYBRIDGE
|
|
VSTMXCSR mem32 [m: vex.128.0f ae /3] AVX,SANDYBRIDGE
|
|
VSUBPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 5c /r] AVX,SANDYBRIDGE
|
|
VSUBPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 5c /r] AVX,SANDYBRIDGE
|
|
VSUBPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 5c /r] AVX,SANDYBRIDGE
|
|
VSUBPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 5c /r] AVX,SANDYBRIDGE
|
|
VSUBSD xmmreg,xmmreg*,xmmrm64 [rvm: vex.nds.lig.f2.0f 5c /r] AVX,SANDYBRIDGE
|
|
VSUBSS xmmreg,xmmreg*,xmmrm32 [rvm: vex.nds.lig.f3.0f 5c /r] AVX,SANDYBRIDGE
|
|
VTESTPS xmmreg,xmmrm128 [rm: vex.128.66.0f38.w0 0e /r] AVX,SANDYBRIDGE
|
|
VTESTPS ymmreg,ymmrm256 [rm: vex.256.66.0f38.w0 0e /r] AVX,SANDYBRIDGE
|
|
VTESTPD xmmreg,xmmrm128 [rm: vex.128.66.0f38.w0 0f /r] AVX,SANDYBRIDGE
|
|
VTESTPD ymmreg,ymmrm256 [rm: vex.256.66.0f38.w0 0f /r] AVX,SANDYBRIDGE
|
|
VUCOMISD xmmreg,xmmrm64 [rm: vex.lig.66.0f 2e /r] AVX,SANDYBRIDGE
|
|
VUCOMISS xmmreg,xmmrm32 [rm: vex.lig.0f 2e /r] AVX,SANDYBRIDGE
|
|
VUNPCKHPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 15 /r] AVX,SANDYBRIDGE
|
|
VUNPCKHPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 15 /r] AVX,SANDYBRIDGE
|
|
VUNPCKHPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 15 /r] AVX,SANDYBRIDGE
|
|
VUNPCKHPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 15 /r] AVX,SANDYBRIDGE
|
|
VUNPCKLPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 14 /r] AVX,SANDYBRIDGE
|
|
VUNPCKLPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 14 /r] AVX,SANDYBRIDGE
|
|
VUNPCKLPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 14 /r] AVX,SANDYBRIDGE
|
|
VUNPCKLPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 14 /r] AVX,SANDYBRIDGE
|
|
VXORPD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f 57 /r] AVX,SANDYBRIDGE
|
|
VXORPD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 57 /r] AVX,SANDYBRIDGE
|
|
VXORPS xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.0f 57 /r] AVX,SANDYBRIDGE
|
|
VXORPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.0f 57 /r] AVX,SANDYBRIDGE
|
|
VZEROALL void [ vex.256.0f.w0 77] AVX,SANDYBRIDGE
|
|
VZEROUPPER void [ vex.128.0f.w0 77] AVX,SANDYBRIDGE
|
|
|
|
;# Intel Carry-Less Multiplication instructions (CLMUL)
|
|
PCLMULLQLQDQ xmmreg,xmmrm128 [rm: 66 0f3a 44 /r 00] SSE,WESTMERE
|
|
PCLMULHQLQDQ xmmreg,xmmrm128 [rm: 66 0f3a 44 /r 01] SSE,WESTMERE
|
|
PCLMULLQHQDQ xmmreg,xmmrm128 [rm: 66 0f3a 44 /r 10] SSE,WESTMERE
|
|
PCLMULHQHQDQ xmmreg,xmmrm128 [rm: 66 0f3a 44 /r 11] SSE,WESTMERE
|
|
PCLMULQDQ xmmreg,xmmrm128,imm8 [rmi: 66 0f3a 44 /r ib] SSE,WESTMERE
|
|
|
|
;# Intel AVX Carry-Less Multiplication instructions (CLMUL)
|
|
VPCLMULLQLQDQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f3a 44 /r 00] AVX,SANDYBRIDGE
|
|
VPCLMULHQLQDQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f3a 44 /r 01] AVX,SANDYBRIDGE
|
|
VPCLMULLQHQDQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f3a 44 /r 10] AVX,SANDYBRIDGE
|
|
VPCLMULHQHQDQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f3a 44 /r 11] AVX,SANDYBRIDGE
|
|
VPCLMULQDQ xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f3a 44 /r ib] AVX,SANDYBRIDGE
|
|
|
|
; Intel VPCLMULQDQ instructions
|
|
VPCLMULLQLQDQ ymmreg,ymmreg*,ymmrm256 [rvm:fv: vex.nds.256.66.0f3a.wig 44 /r 00] VPCLMULQDQ
|
|
VPCLMULHQLQDQ ymmreg,ymmreg*,ymmrm256 [rvm:fv: vex.nds.256.66.0f3a.wig 44 /r 01] VPCLMULQDQ
|
|
VPCLMULLQHQDQ ymmreg,ymmreg*,ymmrm256 [rvm:fv: vex.nds.256.66.0f3a.wig 44 /r 10] VPCLMULQDQ
|
|
VPCLMULHQHQDQ ymmreg,ymmreg*,ymmrm256 [rvm:fv: vex.nds.256.66.0f3a.wig 44 /r 11] VPCLMULQDQ
|
|
VPCLMULQDQ ymmreg,ymmreg*,ymmrm256,imm8 [rvmi:fv: vex.nds.256.66.0f3a.wig 44 /r ib] VPCLMULQDQ
|
|
|
|
; Intel VPCLMULQDQ + AVX512VL instructions
|
|
VPCLMULLQLQDQ xmmreg,xmmreg*,xmmrm128 [rvm:fv: evex.nds.128.66.0f3a.wig 44 /r 00] AVX512VL,VPCLMULQDQ
|
|
VPCLMULHQLQDQ xmmreg,xmmreg*,xmmrm128 [rvm:fv: evex.nds.128.66.0f3a.wig 44 /r 01] AVX512VL,VPCLMULQDQ
|
|
VPCLMULLQHQDQ xmmreg,xmmreg*,xmmrm128 [rvm:fv: evex.nds.128.66.0f3a.wig 44 /r 10] AVX512VL,VPCLMULQDQ
|
|
VPCLMULHQHQDQ xmmreg,xmmreg*,xmmrm128 [rvm:fv: evex.nds.128.66.0f3a.wig 44 /r 11] AVX512VL,VPCLMULQDQ
|
|
VPCLMULQDQ xmmreg,xmmreg*,xmmrm128,imm8 [rvmi:fv: evex.nds.128.66.0f3a.wig 44 /r ib] AVX512VL,VPCLMULQDQ
|
|
|
|
VPCLMULLQLQDQ ymmreg,ymmreg*,ymmrm256 [rvm:fv: evex.nds.256.66.0f3a.wig 44 /r 00] AVX512VL,VPCLMULQDQ
|
|
VPCLMULHQLQDQ ymmreg,ymmreg*,ymmrm256 [rvm:fv: evex.nds.256.66.0f3a.wig 44 /r 01] AVX512VL,VPCLMULQDQ
|
|
VPCLMULLQHQDQ ymmreg,ymmreg*,ymmrm256 [rvm:fv: evex.nds.256.66.0f3a.wig 44 /r 10] AVX512VL,VPCLMULQDQ
|
|
VPCLMULHQHQDQ ymmreg,ymmreg*,ymmrm256 [rvm:fv: evex.nds.256.66.0f3a.wig 44 /r 11] AVX512VL,VPCLMULQDQ
|
|
VPCLMULQDQ ymmreg,ymmreg*,ymmrm256,imm8 [rvmi:fv: evex.nds.256.66.0f3a.wig 44 /r ib] AVX512VL,VPCLMULQDQ
|
|
|
|
; Intel VPCLMULQDQ + AVX512F instructions
|
|
VPCLMULLQLQDQ zmmreg,zmmreg*,zmmrm512 [rvm:fv: evex.nds.512.66.0f3a.wig 44 /r 00] AVX512,VPCLMULQDQ
|
|
VPCLMULHQLQDQ zmmreg,zmmreg*,zmmrm512 [rvm:fv: evex.nds.512.66.0f3a.wig 44 /r 01] AVX512,VPCLMULQDQ
|
|
VPCLMULLQHQDQ zmmreg,zmmreg*,zmmrm512 [rvm:fv: evex.nds.512.66.0f3a.wig 44 /r 10] AVX512,VPCLMULQDQ
|
|
VPCLMULHQHQDQ zmmreg,zmmreg*,zmmrm512 [rvm:fv: evex.nds.512.66.0f3a.wig 44 /r 11] AVX512,VPCLMULQDQ
|
|
VPCLMULQDQ zmmreg,zmmreg*,zmmrm512,imm8 [rvmi:fv: evex.nds.512.66.0f3a.wig 44 /r ib] AVX512,VPCLMULQDQ
|
|
|
|
;# Intel Fused Multiply-Add instructions (FMA)
|
|
VFMADD132PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 98 /r] FMA
|
|
VFMADD132PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 98 /r] FMA
|
|
VFMADD132PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 98 /r] FMA
|
|
VFMADD132PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 98 /r] FMA
|
|
VFMADD312PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 98 /r] FMA
|
|
VFMADD312PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 98 /r] FMA
|
|
VFMADD312PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 98 /r] FMA
|
|
VFMADD312PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 98 /r] FMA
|
|
VFMADD213PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 a8 /r] FMA
|
|
VFMADD213PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 a8 /r] FMA
|
|
VFMADD213PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 a8 /r] FMA
|
|
VFMADD213PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 a8 /r] FMA
|
|
VFMADD123PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 a8 /r] FMA
|
|
VFMADD123PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 a8 /r] FMA
|
|
VFMADD123PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 a8 /r] FMA
|
|
VFMADD123PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 a8 /r] FMA
|
|
VFMADD231PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 b8 /r] FMA
|
|
VFMADD231PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 b8 /r] FMA
|
|
VFMADD231PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 b8 /r] FMA
|
|
VFMADD231PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 b8 /r] FMA
|
|
VFMADD321PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 b8 /r] FMA
|
|
VFMADD321PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 b8 /r] FMA
|
|
VFMADD321PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 b8 /r] FMA
|
|
VFMADD321PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 b8 /r] FMA
|
|
VFMADDSUB132PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 96 /r] FMA
|
|
VFMADDSUB132PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 96 /r] FMA
|
|
VFMADDSUB132PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 96 /r] FMA
|
|
VFMADDSUB132PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 96 /r] FMA
|
|
VFMADDSUB312PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 96 /r] FMA
|
|
VFMADDSUB312PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 96 /r] FMA
|
|
VFMADDSUB312PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 96 /r] FMA
|
|
VFMADDSUB312PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 96 /r] FMA
|
|
VFMADDSUB213PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 a6 /r] FMA
|
|
VFMADDSUB213PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 a6 /r] FMA
|
|
VFMADDSUB213PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 a6 /r] FMA
|
|
VFMADDSUB213PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 a6 /r] FMA
|
|
VFMADDSUB123PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 a6 /r] FMA
|
|
VFMADDSUB123PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 a6 /r] FMA
|
|
VFMADDSUB123PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 a6 /r] FMA
|
|
VFMADDSUB123PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 a6 /r] FMA
|
|
VFMADDSUB231PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 b6 /r] FMA
|
|
VFMADDSUB231PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 b6 /r] FMA
|
|
VFMADDSUB231PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 b6 /r] FMA
|
|
VFMADDSUB231PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 b6 /r] FMA
|
|
VFMADDSUB321PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 b6 /r] FMA
|
|
VFMADDSUB321PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 b6 /r] FMA
|
|
VFMADDSUB321PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 b6 /r] FMA
|
|
VFMADDSUB321PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 b6 /r] FMA
|
|
VFMSUB132PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 9a /r] FMA
|
|
VFMSUB132PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 9a /r] FMA
|
|
VFMSUB132PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 9a /r] FMA
|
|
VFMSUB132PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 9a /r] FMA
|
|
VFMSUB312PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 9a /r] FMA
|
|
VFMSUB312PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 9a /r] FMA
|
|
VFMSUB312PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 9a /r] FMA
|
|
VFMSUB312PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 9a /r] FMA
|
|
VFMSUB213PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 aa /r] FMA
|
|
VFMSUB213PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 aa /r] FMA
|
|
VFMSUB213PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 aa /r] FMA
|
|
VFMSUB213PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 aa /r] FMA
|
|
VFMSUB123PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 aa /r] FMA
|
|
VFMSUB123PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 aa /r] FMA
|
|
VFMSUB123PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 aa /r] FMA
|
|
VFMSUB123PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 aa /r] FMA
|
|
VFMSUB231PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 ba /r] FMA
|
|
VFMSUB231PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 ba /r] FMA
|
|
VFMSUB231PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 ba /r] FMA
|
|
VFMSUB231PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 ba /r] FMA
|
|
VFMSUB321PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 ba /r] FMA
|
|
VFMSUB321PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 ba /r] FMA
|
|
VFMSUB321PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 ba /r] FMA
|
|
VFMSUB321PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 ba /r] FMA
|
|
VFMSUBADD132PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 97 /r] FMA
|
|
VFMSUBADD132PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 97 /r] FMA
|
|
VFMSUBADD132PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 97 /r] FMA
|
|
VFMSUBADD132PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 97 /r] FMA
|
|
VFMSUBADD312PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 97 /r] FMA
|
|
VFMSUBADD312PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 97 /r] FMA
|
|
VFMSUBADD312PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 97 /r] FMA
|
|
VFMSUBADD312PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 97 /r] FMA
|
|
VFMSUBADD213PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 a7 /r] FMA
|
|
VFMSUBADD213PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 a7 /r] FMA
|
|
VFMSUBADD213PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 a7 /r] FMA
|
|
VFMSUBADD213PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 a7 /r] FMA
|
|
VFMSUBADD123PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 a7 /r] FMA
|
|
VFMSUBADD123PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 a7 /r] FMA
|
|
VFMSUBADD123PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 a7 /r] FMA
|
|
VFMSUBADD123PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 a7 /r] FMA
|
|
VFMSUBADD231PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 b7 /r] FMA
|
|
VFMSUBADD231PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 b7 /r] FMA
|
|
VFMSUBADD231PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 b7 /r] FMA
|
|
VFMSUBADD231PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 b7 /r] FMA
|
|
VFMSUBADD321PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 b7 /r] FMA
|
|
VFMSUBADD321PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 b7 /r] FMA
|
|
VFMSUBADD321PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 b7 /r] FMA
|
|
VFMSUBADD321PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 b7 /r] FMA
|
|
VFNMADD132PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 9c /r] FMA
|
|
VFNMADD132PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 9c /r] FMA
|
|
VFNMADD132PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 9c /r] FMA
|
|
VFNMADD132PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 9c /r] FMA
|
|
VFNMADD312PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 9c /r] FMA
|
|
VFNMADD312PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 9c /r] FMA
|
|
VFNMADD312PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 9c /r] FMA
|
|
VFNMADD312PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 9c /r] FMA
|
|
VFNMADD213PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 ac /r] FMA
|
|
VFNMADD213PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 ac /r] FMA
|
|
VFNMADD213PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 ac /r] FMA
|
|
VFNMADD213PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 ac /r] FMA
|
|
VFNMADD123PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 ac /r] FMA
|
|
VFNMADD123PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 ac /r] FMA
|
|
VFNMADD123PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 ac /r] FMA
|
|
VFNMADD123PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 ac /r] FMA
|
|
VFNMADD231PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 bc /r] FMA
|
|
VFNMADD231PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 bc /r] FMA
|
|
VFNMADD231PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 bc /r] FMA
|
|
VFNMADD231PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 bc /r] FMA
|
|
VFNMADD321PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 bc /r] FMA
|
|
VFNMADD321PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 bc /r] FMA
|
|
VFNMADD321PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 bc /r] FMA
|
|
VFNMADD321PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 bc /r] FMA
|
|
VFNMSUB132PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 9e /r] FMA
|
|
VFNMSUB132PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 9e /r] FMA
|
|
VFNMSUB132PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 9e /r] FMA
|
|
VFNMSUB132PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 9e /r] FMA
|
|
VFNMSUB312PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 9e /r] FMA
|
|
VFNMSUB312PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 9e /r] FMA
|
|
VFNMSUB312PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 9e /r] FMA
|
|
VFNMSUB312PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 9e /r] FMA
|
|
VFNMSUB213PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 ae /r] FMA
|
|
VFNMSUB213PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 ae /r] FMA
|
|
VFNMSUB213PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 ae /r] FMA
|
|
VFNMSUB213PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 ae /r] FMA
|
|
VFNMSUB123PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 ae /r] FMA
|
|
VFNMSUB123PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 ae /r] FMA
|
|
VFNMSUB123PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 ae /r] FMA
|
|
VFNMSUB123PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 ae /r] FMA
|
|
VFNMSUB231PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 be /r] FMA
|
|
VFNMSUB231PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 be /r] FMA
|
|
VFNMSUB231PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 be /r] FMA
|
|
VFNMSUB231PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 be /r] FMA
|
|
VFNMSUB321PS xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w0 be /r] FMA
|
|
VFNMSUB321PS ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w0 be /r] FMA
|
|
VFNMSUB321PD xmmreg,xmmreg,xmmrm128 [rvm: vex.dds.128.66.0f38.w1 be /r] FMA
|
|
VFNMSUB321PD ymmreg,ymmreg,ymmrm256 [rvm: vex.dds.256.66.0f38.w1 be /r] FMA
|
|
VFMADD132SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 99 /r] FMA
|
|
VFMADD132SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 99 /r] FMA
|
|
VFMADD312SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 99 /r] FMA
|
|
VFMADD312SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 99 /r] FMA
|
|
VFMADD213SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 a9 /r] FMA
|
|
VFMADD213SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 a9 /r] FMA
|
|
VFMADD123SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 a9 /r] FMA
|
|
VFMADD123SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 a9 /r] FMA
|
|
VFMADD231SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 b9 /r] FMA
|
|
VFMADD231SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 b9 /r] FMA
|
|
VFMADD321SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 b9 /r] FMA
|
|
VFMADD321SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 b9 /r] FMA
|
|
VFMSUB132SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 9b /r] FMA
|
|
VFMSUB132SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 9b /r] FMA
|
|
VFMSUB312SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 9b /r] FMA
|
|
VFMSUB312SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 9b /r] FMA
|
|
VFMSUB213SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 ab /r] FMA
|
|
VFMSUB213SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 ab /r] FMA
|
|
VFMSUB123SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 ab /r] FMA
|
|
VFMSUB123SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 ab /r] FMA
|
|
VFMSUB231SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 bb /r] FMA
|
|
VFMSUB231SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 bb /r] FMA
|
|
VFMSUB321SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 bb /r] FMA
|
|
VFMSUB321SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 bb /r] FMA
|
|
VFNMADD132SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 9d /r] FMA
|
|
VFNMADD132SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 9d /r] FMA
|
|
VFNMADD312SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 9d /r] FMA
|
|
VFNMADD312SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 9d /r] FMA
|
|
VFNMADD213SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 ad /r] FMA
|
|
VFNMADD213SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 ad /r] FMA
|
|
VFNMADD123SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 ad /r] FMA
|
|
VFNMADD123SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 ad /r] FMA
|
|
VFNMADD231SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 bd /r] FMA
|
|
VFNMADD231SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 bd /r] FMA
|
|
VFNMADD321SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 bd /r] FMA
|
|
VFNMADD321SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 bd /r] FMA
|
|
VFNMSUB132SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 9f /r] FMA
|
|
VFNMSUB132SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 9f /r] FMA
|
|
VFNMSUB312SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 9f /r] FMA
|
|
VFNMSUB312SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 9f /r] FMA
|
|
VFNMSUB213SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 af /r] FMA
|
|
VFNMSUB213SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 af /r] FMA
|
|
VFNMSUB123SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 af /r] FMA
|
|
VFNMSUB123SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 af /r] FMA
|
|
VFNMSUB231SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 bf /r] FMA
|
|
VFNMSUB231SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 bf /r] FMA
|
|
VFNMSUB321SS xmmreg,xmmreg,xmmrm32 [rvm: vex.dds.128.66.0f38.w0 bf /r] FMA
|
|
VFNMSUB321SD xmmreg,xmmreg,xmmrm64 [rvm: vex.dds.128.66.0f38.w1 bf /r] FMA
|
|
|
|
;# Intel post-32 nm processor instructions
|
|
;
|
|
; Per AVX spec revision 7, document 319433-007
|
|
VCVTPH2PS ymmreg,xmmrm128 [rm: vex.256.66.0f38.w0 13 /r] AVX
|
|
VCVTPH2PS xmmreg,xmmrm64 [rm: vex.128.66.0f38.w0 13 /r] AVX
|
|
VCVTPS2PH xmmrm128,ymmreg,imm8 [mri: vex.256.66.0f3a.w0 1d /r ib] AVX
|
|
VCVTPS2PH xmmrm64,xmmreg,imm8 [mri: vex.128.66.0f3a.w0 1d /r ib] AVX
|
|
|
|
;# Supervisor Mode Access Prevention (SMAP)
|
|
|
|
;# VIA (Centaur) security instructions
|
|
XSTORE void [ 0f a7 c0] PENT,CYRIX
|
|
XCRYPTECB void [ f3 0f a7 c8] PENT,CYRIX
|
|
XCRYPTCBC void [ f3 0f a7 d0] PENT,CYRIX
|
|
XCRYPTCTR void [ f3 0f a7 d8] PENT,CYRIX
|
|
XCRYPTCFB void [ f3 0f a7 e0] PENT,CYRIX
|
|
XCRYPTOFB void [ f3 0f a7 e8] PENT,CYRIX
|
|
MONTMUL void [ f3 0f a6 c0] PENT,CYRIX
|
|
XSHA1 void [ f3 0f a6 c8] PENT,CYRIX
|
|
XSHA256 void [ f3 0f a6 d0] PENT,CYRIX
|
|
|
|
;# AMD Lightweight Profiling (LWP) instructions
|
|
;
|
|
; based on pub number 43724 revision 3.04 date August 2009
|
|
;
|
|
; updated to match draft from AMD developer (patch has been
|
|
; sent to binutils
|
|
; 2010-03-22 Quentin Neill <quentin.neill@amd.com>
|
|
; Sebastian Pop <sebastian.pop@amd.com>
|
|
;
|
|
LLWPCB reg32 [m: xop.m9.w0.l0.p0 12 /0] AMD,386
|
|
LLWPCB reg64 [m: xop.m9.w1.l0.p0 12 /0] AMD,X86_64,LONG
|
|
|
|
SLWPCB reg32 [m: xop.m9.w0.l0.p0 12 /1] AMD,386
|
|
SLWPCB reg64 [m: xop.m9.w1.l0.p0 12 /1] AMD,X86_64,LONG
|
|
|
|
LWPVAL reg32,rm32,imm32 [vmi: xop.m10.w0.ndd.l0.p0 12 /1 id] AMD,386
|
|
LWPVAL reg64,rm32,imm32 [vmi: xop.m10.w1.ndd.l0.p0 12 /1 id] AMD,X86_64,LONG
|
|
|
|
LWPINS reg32,rm32,imm32 [vmi: xop.m10.w0.ndd.l0.p0 12 /0 id] AMD,386
|
|
LWPINS reg64,rm32,imm32 [vmi: xop.m10.w1.ndd.l0.p0 12 /0 id] AMD,X86_64,LONG
|
|
|
|
;# AMD XOP and FMA4 instructions (SSE5)
|
|
;
|
|
; based on pub number 43479 revision 3.04 dated November 2009
|
|
;
|
|
VFMADDPD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 69 /r /is4] AMD,SSE5
|
|
VFMADDPD ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 69 /r /is4] AMD,SSE5
|
|
VFMADDPD xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 69 /r /is4] AMD,SSE5
|
|
VFMADDPD ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 69 /r /is4] AMD,SSE5
|
|
|
|
VFMADDPS xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 68 /r /is4] AMD,SSE5
|
|
VFMADDPS ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 68 /r /is4] AMD,SSE5
|
|
VFMADDPS xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 68 /r /is4] AMD,SSE5
|
|
VFMADDPS ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 68 /r /is4] AMD,SSE5
|
|
|
|
VFMADDSD xmmreg,xmmreg*,xmmrm64,xmmreg [rvms: vex.m3.w0.nds.l0.p1 6b /r /is4] AMD,SSE5
|
|
VFMADDSD xmmreg,xmmreg*,xmmreg,xmmrm64 [rvsm: vex.m3.w1.nds.l0.p1 6b /r /is4] AMD,SSE5
|
|
|
|
VFMADDSS xmmreg,xmmreg*,xmmrm32,xmmreg [rvms: vex.m3.w0.nds.l0.p1 6a /r /is4] AMD,SSE5
|
|
VFMADDSS xmmreg,xmmreg*,xmmreg,xmmrm32 [rvsm: vex.m3.w1.nds.l0.p1 6a /r /is4] AMD,SSE5
|
|
|
|
VFMADDSUBPD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 5d /r /is4] AMD,SSE5
|
|
VFMADDSUBPD ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 5d /r /is4] AMD,SSE5
|
|
VFMADDSUBPD xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 5d /r /is4] AMD,SSE5
|
|
VFMADDSUBPD ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 5d /r /is4] AMD,SSE5
|
|
|
|
VFMADDSUBPS xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 5c /r /is4] AMD,SSE5
|
|
VFMADDSUBPS ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 5c /r /is4] AMD,SSE5
|
|
VFMADDSUBPS xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 5c /r /is4] AMD,SSE5
|
|
VFMADDSUBPS ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 5c /r /is4] AMD,SSE5
|
|
|
|
VFMSUBADDPD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 5f /r /is4] AMD,SSE5
|
|
VFMSUBADDPD ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 5f /r /is4] AMD,SSE5
|
|
VFMSUBADDPD xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 5f /r /is4] AMD,SSE5
|
|
VFMSUBADDPD ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 5f /r /is4] AMD,SSE5
|
|
|
|
VFMSUBADDPS xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 5e /r /is4] AMD,SSE5
|
|
VFMSUBADDPS ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 5e /r /is4] AMD,SSE5
|
|
VFMSUBADDPS xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 5e /r /is4] AMD,SSE5
|
|
VFMSUBADDPS ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 5e /r /is4] AMD,SSE5
|
|
|
|
VFMSUBPD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 6d /r /is4] AMD,SSE5
|
|
VFMSUBPD ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 6d /r /is4] AMD,SSE5
|
|
VFMSUBPD xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 6d /r /is4] AMD,SSE5
|
|
VFMSUBPD ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 6d /r /is4] AMD,SSE5
|
|
|
|
VFMSUBPS xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 6c /r /is4] AMD,SSE5
|
|
VFMSUBPS ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 6c /r /is4] AMD,SSE5
|
|
VFMSUBPS xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 6c /r /is4] AMD,SSE5
|
|
VFMSUBPS ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 6c /r /is4] AMD,SSE5
|
|
|
|
VFMSUBSD xmmreg,xmmreg*,xmmrm64,xmmreg [rvms: vex.m3.w0.nds.l0.p1 6f /r /is4] AMD,SSE5
|
|
VFMSUBSD xmmreg,xmmreg*,xmmreg,xmmrm64 [rvsm: vex.m3.w1.nds.l0.p1 6f /r /is4] AMD,SSE5
|
|
|
|
VFMSUBSS xmmreg,xmmreg*,xmmrm32,xmmreg [rvms: vex.m3.w0.nds.l0.p1 6e /r /is4] AMD,SSE5
|
|
VFMSUBSS xmmreg,xmmreg*,xmmreg,xmmrm32 [rvsm: vex.m3.w1.nds.l0.p1 6e /r /is4] AMD,SSE5
|
|
|
|
VFNMADDPD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 79 /r /is4] AMD,SSE5
|
|
VFNMADDPD ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 79 /r /is4] AMD,SSE5
|
|
VFNMADDPD xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 79 /r /is4] AMD,SSE5
|
|
VFNMADDPD ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 79 /r /is4] AMD,SSE5
|
|
|
|
VFNMADDPS xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 78 /r /is4] AMD,SSE5
|
|
VFNMADDPS ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 78 /r /is4] AMD,SSE5
|
|
VFNMADDPS xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 78 /r /is4] AMD,SSE5
|
|
VFNMADDPS ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 78 /r /is4] AMD,SSE5
|
|
|
|
VFNMADDSD xmmreg,xmmreg*,xmmrm64,xmmreg [rvms: vex.m3.w0.nds.l0.p1 7b /r /is4] AMD,SSE5
|
|
VFNMADDSD xmmreg,xmmreg*,xmmreg,xmmrm64 [rvsm: vex.m3.w1.nds.l0.p1 7b /r /is4] AMD,SSE5
|
|
|
|
VFNMADDSS xmmreg,xmmreg*,xmmrm32,xmmreg [rvms: vex.m3.w0.nds.l0.p1 7a /r /is4] AMD,SSE5
|
|
VFNMADDSS xmmreg,xmmreg*,xmmreg,xmmrm32 [rvsm: vex.m3.w1.nds.l0.p1 7a /r /is4] AMD,SSE5
|
|
|
|
VFNMSUBPD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 7d /r /is4] AMD,SSE5
|
|
VFNMSUBPD ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 7d /r /is4] AMD,SSE5
|
|
VFNMSUBPD xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 7d /r /is4] AMD,SSE5
|
|
VFNMSUBPD ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 7d /r /is4] AMD,SSE5
|
|
|
|
VFNMSUBPS xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: vex.m3.w0.nds.l0.p1 7c /r /is4] AMD,SSE5
|
|
VFNMSUBPS ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.m3.w0.nds.l1.p1 7c /r /is4] AMD,SSE5
|
|
VFNMSUBPS xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: vex.m3.w1.nds.l0.p1 7c /r /is4] AMD,SSE5
|
|
VFNMSUBPS ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: vex.m3.w1.nds.l1.p1 7c /r /is4] AMD,SSE5
|
|
|
|
VFNMSUBSD xmmreg,xmmreg*,xmmrm64,xmmreg [rvms: vex.m3.w0.nds.l0.p1 7f /r /is4] AMD,SSE5
|
|
VFNMSUBSD xmmreg,xmmreg*,xmmreg,xmmrm64 [rvsm: vex.m3.w1.nds.l0.p1 7f /r /is4] AMD,SSE5
|
|
|
|
VFNMSUBSS xmmreg,xmmreg*,xmmrm32,xmmreg [rvms: vex.m3.w0.nds.l0.p1 7e /r /is4] AMD,SSE5
|
|
VFNMSUBSS xmmreg,xmmreg*,xmmreg,xmmrm32 [rvsm: vex.m3.w1.nds.l0.p1 7e /r /is4] AMD,SSE5
|
|
|
|
VFRCZPD xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 81 /r] AMD,SSE5
|
|
VFRCZPD ymmreg,ymmrm256* [rm: xop.m9.w0.l1.p0 81 /r] AMD,SSE5
|
|
|
|
VFRCZPS xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 80 /r] AMD,SSE5
|
|
VFRCZPS ymmreg,ymmrm256* [rm: xop.m9.w0.l1.p0 80 /r] AMD,SSE5
|
|
|
|
VFRCZSD xmmreg,xmmrm64* [rm: xop.m9.w0.l0.p0 83 /r] AMD,SSE5
|
|
|
|
VFRCZSS xmmreg,xmmrm32* [rm: xop.m9.w0.l0.p0 82 /r] AMD,SSE5
|
|
;
|
|
; fixed: spec mention imm[7:4] though it should be /is4 even in spec
|
|
VPCMOV xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 a2 /r /is4] AMD,SSE5
|
|
VPCMOV ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: xop.m8.w0.nds.l1.p0 a2 /r /is4] AMD,SSE5
|
|
VPCMOV xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: xop.m8.w1.nds.l0.p0 a2 /r /is4] AMD,SSE5
|
|
VPCMOV ymmreg,ymmreg*,ymmreg,ymmrm256 [rvsm: xop.m8.w1.nds.l1.p0 a2 /r /is4] AMD,SSE5
|
|
|
|
VPCOMB xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: xop.m8.w0.nds.l0.p0 cc /r ib] AMD,SSE5
|
|
VPCOMD xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: xop.m8.w0.nds.l0.p0 ce /r ib] AMD,SSE5
|
|
VPCOMQ xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: xop.m8.w0.nds.l0.p0 cf /r ib] AMD,SSE5
|
|
;
|
|
; fixed: spec mention only 3 operands in mnemonics
|
|
VPCOMUB xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: xop.m8.w0.nds.l0.p0 ec /r ib] AMD,SSE5
|
|
VPCOMUD xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: xop.m8.w0.nds.l0.p0 ee /r ib] AMD,SSE5
|
|
VPCOMUQ xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: xop.m8.w0.nds.l0.p0 ef /r ib] AMD,SSE5
|
|
;
|
|
; fixed: spec point wrong VPCOMB in mnemonic
|
|
VPCOMUW xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: xop.m8.w0.nds.l0.p0 ed /r ib] AMD,SSE5
|
|
VPCOMW xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: xop.m8.w0.nds.l0.p0 cd /r ib] AMD,SSE5
|
|
|
|
VPHADDBD xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 c2 /r] AMD,SSE5
|
|
VPHADDBQ xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 c3 /r] AMD,SSE5
|
|
VPHADDBW xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 c1 /r] AMD,SSE5
|
|
VPHADDDQ xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 cb /r] AMD,SSE5
|
|
;
|
|
; fixed: spec has ymmreg for l0
|
|
VPHADDUBD xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 d2 /r] AMD,SSE5
|
|
VPHADDUBQ xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 d3 /r] AMD,SSE5
|
|
;
|
|
; fixed: spec has VPHADDUBWD
|
|
VPHADDUBW xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 d1 /r] AMD,SSE5
|
|
;
|
|
; fixed: opcode db
|
|
VPHADDUDQ xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 db /r] AMD,SSE5
|
|
VPHADDUWD xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 d6 /r] AMD,SSE5
|
|
VPHADDUWQ xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 d7 /r] AMD,SSE5
|
|
;
|
|
; fixed: spec has ymmreg for l0
|
|
VPHADDWD xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 c6 /r] AMD,SSE5
|
|
;
|
|
; fixed: spec has d7 opcode
|
|
VPHADDWQ xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 c7 /r] AMD,SSE5
|
|
|
|
VPHSUBBW xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 e1 /r] AMD,SSE5
|
|
VPHSUBDQ xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 e3 /r] AMD,SSE5
|
|
VPHSUBWD xmmreg,xmmrm128* [rm: xop.m9.w0.l0.p0 e2 /r] AMD,SSE5
|
|
|
|
VPMACSDD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 9e /r /is4] AMD,SSE5
|
|
;
|
|
; fixed: spec has 97,9f opcodes here
|
|
VPMACSDQH xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 9f /r /is4] AMD,SSE5
|
|
VPMACSDQL xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 97 /r /is4] AMD,SSE5
|
|
VPMACSSDD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 8e /r /is4] AMD,SSE5
|
|
VPMACSSDQH xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 8f /r /is4] AMD,SSE5
|
|
VPMACSSDQL xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 87 /r /is4] AMD,SSE5
|
|
VPMACSSWD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 86 /r /is4] AMD,SSE5
|
|
VPMACSSWW xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 85 /r /is4] AMD,SSE5
|
|
VPMACSWD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 96 /r /is4] AMD,SSE5
|
|
VPMACSWW xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 95 /r /is4] AMD,SSE5
|
|
VPMADCSSWD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 a6 /r /is4] AMD,SSE5
|
|
VPMADCSWD xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 b6 /r /is4] AMD,SSE5
|
|
|
|
VPPERM xmmreg,xmmreg*,xmmreg,xmmrm128 [rvsm: xop.m8.w1.nds.l0.p0 a3 /r /is4] AMD,SSE5
|
|
VPPERM xmmreg,xmmreg*,xmmrm128,xmmreg [rvms: xop.m8.w0.nds.l0.p0 a3 /r /is4] AMD,SSE5
|
|
|
|
VPROTB xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 90 /r] AMD,SSE5
|
|
VPROTB xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 90 /r] AMD,SSE5
|
|
;
|
|
; fixed: spec point xmmreg instead of reg/mem
|
|
VPROTB xmmreg,xmmrm128*,imm8 [rmi: xop.m8.w0.l0.p0 c0 /r ib] AMD,SSE5
|
|
|
|
VPROTD xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 92 /r] AMD,SSE5
|
|
VPROTD xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 92 /r] AMD,SSE5
|
|
;
|
|
; fixed: spec error /r is needed
|
|
VPROTD xmmreg,xmmrm128*,imm8 [rmi: xop.m8.w0.l0.p0 c2 /r ib] AMD,SSE5
|
|
VPROTQ xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 93 /r] AMD,SSE5
|
|
VPROTQ xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 93 /r] AMD,SSE5
|
|
;
|
|
; fixed: spec error /r is needed
|
|
VPROTQ xmmreg,xmmrm128*,imm8 [rmi: xop.m8.w0.l0.p0 c3 /r ib] AMD,SSE5
|
|
VPROTW xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 91 /r] AMD,SSE5
|
|
VPROTW xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 91 /r] AMD,SSE5
|
|
VPROTW xmmreg,xmmrm128*,imm8 [rmi: xop.m8.w0.l0.p0 c1 /r ib] AMD,SSE5
|
|
|
|
VPSHAB xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 98 /r] AMD,SSE5
|
|
VPSHAB xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 98 /r] AMD,SSE5
|
|
|
|
VPSHAD xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 9a /r] AMD,SSE5
|
|
VPSHAD xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 9a /r] AMD,SSE5
|
|
|
|
VPSHAQ xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 9b /r] AMD,SSE5
|
|
VPSHAQ xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 9b /r] AMD,SSE5
|
|
|
|
VPSHAW xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 99 /r] AMD,SSE5
|
|
VPSHAW xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 99 /r] AMD,SSE5
|
|
|
|
VPSHLB xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 94 /r] AMD,SSE5
|
|
VPSHLB xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 94 /r] AMD,SSE5
|
|
|
|
;
|
|
; fixed: spec has ymmreg for l0
|
|
VPSHLD xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 96 /r] AMD,SSE5
|
|
VPSHLD xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 96 /r] AMD,SSE5
|
|
|
|
VPSHLQ xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 97 /r] AMD,SSE5
|
|
VPSHLQ xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 97 /r] AMD,SSE5
|
|
|
|
VPSHLW xmmreg,xmmrm128*,xmmreg [rmv: xop.m9.w0.nds.l0.p0 95 /r] AMD,SSE5
|
|
VPSHLW xmmreg,xmmreg*,xmmrm128 [rvm: xop.m9.w1.nds.l0.p0 95 /r] AMD,SSE5
|
|
|
|
;# Intel AVX2 instructions
|
|
;
|
|
; based on pub number 319433-011 dated July 2011
|
|
;
|
|
VMPSADBW ymmreg,ymmreg*,ymmrm256,imm8 [rvmi: vex.nds.256.66.0f3a 42 /r ib] AVX2
|
|
VPABSB ymmreg,ymmrm256 [rm: vex.256.66.0f38 1c /r] AVX2
|
|
VPABSW ymmreg,ymmrm256 [rm: vex.256.66.0f38 1d /r] AVX2
|
|
VPABSD ymmreg,ymmrm256 [rm: vex.256.66.0f38 1e /r] AVX2
|
|
VPACKSSWB ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 63 /r] AVX2
|
|
VPACKSSDW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 6b /r] AVX2
|
|
VPACKUSDW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 2b /r] AVX2
|
|
VPACKUSWB ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 67 /r] AVX2
|
|
VPADDB ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f fc /r] AVX2
|
|
VPADDW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f fd /r] AVX2
|
|
VPADDD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f fe /r] AVX2
|
|
VPADDQ ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f d4 /r] AVX2
|
|
VPADDSB ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f ec /r] AVX2
|
|
VPADDSW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f ed /r] AVX2
|
|
VPADDUSB ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f dc /r] AVX2
|
|
VPADDUSW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f dd /r] AVX2
|
|
VPALIGNR ymmreg,ymmreg*,ymmrm256,imm8 [rvmi: vex.nds.256.66.0f3a 0f /r ib] AVX2
|
|
VPAND ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f db /r] AVX2
|
|
VPANDN ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f df /r] AVX2
|
|
VPAVGB ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f e0 /r] AVX2
|
|
VPAVGW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f e3 /r] AVX2
|
|
VPBLENDVB ymmreg,ymmreg*,ymmrm256,ymmreg [rvms: vex.nds.256.66.0f3a 4c /r /is4] AVX2
|
|
VPBLENDW ymmreg,ymmreg*,ymmrm256,imm8 [rvmi: vex.nds.256.66.0f3a 0e /r ib] AVX2
|
|
VPCMPEQB ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 74 /r] AVX2
|
|
VPCMPEQW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 75 /r] AVX2
|
|
VPCMPEQD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 76 /r] AVX2
|
|
VPCMPEQQ ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 29 /r] AVX2
|
|
VPCMPGTB ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 64 /r] AVX2
|
|
VPCMPGTW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 65 /r] AVX2
|
|
VPCMPGTD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 66 /r] AVX2
|
|
VPCMPGTQ ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 37 /r] AVX2
|
|
VPHADDW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 01 /r] AVX2
|
|
VPHADDD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 02 /r] AVX2
|
|
VPHADDSW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 03 /r] AVX2
|
|
VPHSUBW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 05 /r] AVX2
|
|
VPHSUBD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 06 /r] AVX2
|
|
VPHSUBSW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 07 /r] AVX2
|
|
VPMADDUBSW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 04 /r] AVX2
|
|
VPMADDWD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f f5 /r] AVX2
|
|
VPMAXSB ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 3c /r] AVX2
|
|
VPMAXSW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f ee /r] AVX2
|
|
VPMAXSD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 3d /r] AVX2
|
|
VPMAXUB ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f de /r] AVX2
|
|
VPMAXUW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 3e /r] AVX2
|
|
VPMAXUD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 3f /r] AVX2
|
|
VPMINSB ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 38 /r] AVX2
|
|
VPMINSW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f ea /r] AVX2
|
|
VPMINSD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 39 /r] AVX2
|
|
VPMINUB ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f da /r] AVX2
|
|
VPMINUW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 3a /r] AVX2
|
|
VPMINUD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 3b /r] AVX2
|
|
VPMOVMSKB reg32,ymmreg [rm: vex.256.66.0f d7 /r] AVX2
|
|
VPMOVMSKB reg64,ymmreg [rm: vex.256.66.0f d7 /r] AVX2
|
|
VPMOVSXBW ymmreg,xmmrm128 [rm: vex.256.66.0f38 20 /r] AVX2
|
|
VPMOVSXBD ymmreg,mem64 [rm: vex.256.66.0f38 21 /r] AVX2
|
|
VPMOVSXBD ymmreg,xmmreg [rm: vex.256.66.0f38 21 /r] AVX2
|
|
VPMOVSXBQ ymmreg,mem32 [rm: vex.256.66.0f38 22 /r] AVX2
|
|
VPMOVSXBQ ymmreg,xmmreg [rm: vex.256.66.0f38 22 /r] AVX2
|
|
VPMOVSXWD ymmreg,xmmrm128 [rm: vex.256.66.0f38 23 /r] AVX2
|
|
VPMOVSXWQ ymmreg,mem64 [rm: vex.256.66.0f38 24 /r] AVX2
|
|
VPMOVSXWQ ymmreg,xmmreg [rm: vex.256.66.0f38 24 /r] AVX2
|
|
VPMOVSXDQ ymmreg,xmmrm128 [rm: vex.256.66.0f38 25 /r] AVX2
|
|
VPMOVZXBW ymmreg,xmmrm128 [rm: vex.256.66.0f38 30 /r] AVX2
|
|
VPMOVZXBD ymmreg,mem64 [rm: vex.256.66.0f38 31 /r] AVX2
|
|
VPMOVZXBD ymmreg,xmmreg [rm: vex.256.66.0f38 31 /r] AVX2
|
|
VPMOVZXBQ ymmreg,mem32 [rm: vex.256.66.0f38 32 /r] AVX2
|
|
VPMOVZXBQ ymmreg,xmmreg [rm: vex.256.66.0f38 32 /r] AVX2
|
|
VPMOVZXWD ymmreg,xmmrm128 [rm: vex.256.66.0f38 33 /r] AVX2
|
|
VPMOVZXWQ ymmreg,mem64 [rm: vex.256.66.0f38 34 /r] AVX2
|
|
VPMOVZXWQ ymmreg,xmmreg [rm: vex.256.66.0f38 34 /r] AVX2
|
|
VPMOVZXDQ ymmreg,xmmrm128 [rm: vex.256.66.0f38 35 /r] AVX2
|
|
VPMULDQ ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 28 /r] AVX2
|
|
VPMULHRSW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 0b /r] AVX2
|
|
VPMULHUW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f e4 /r] AVX2
|
|
VPMULHW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f e5 /r] AVX2
|
|
VPMULLW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f d5 /r] AVX2
|
|
VPMULLD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 40 /r] AVX2
|
|
VPMULUDQ ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f f4 /r] AVX2
|
|
VPOR ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f eb /r] AVX2
|
|
VPSADBW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f f6 /r] AVX2
|
|
VPSHUFB ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 00 /r] AVX2
|
|
VPSHUFD ymmreg,ymmrm256,imm8 [rmi: vex.256.66.0f 70 /r ib] AVX2
|
|
VPSHUFHW ymmreg,ymmrm256,imm8 [rmi: vex.256.f3.0f 70 /r ib] AVX2
|
|
VPSHUFLW ymmreg,ymmrm256,imm8 [rmi: vex.256.f2.0f 70 /r ib] AVX2
|
|
VPSIGNB ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 08 /r] AVX2
|
|
VPSIGNW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 09 /r] AVX2
|
|
VPSIGND ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38 0a /r] AVX2
|
|
VPSLLDQ ymmreg,ymmreg*,imm8 [vmi: vex.ndd.256.66.0f 73 /7 ib] AVX2
|
|
VPSLLW ymmreg,ymmreg*,xmmrm128 [rvm: vex.nds.256.66.0f f1 /r] AVX2
|
|
VPSLLW ymmreg,ymmreg*,imm8 [vmi: vex.ndd.256.66.0f 71 /6 ib] AVX2
|
|
VPSLLD ymmreg,ymmreg*,xmmrm128 [rvm: vex.nds.256.66.0f f2 /r] AVX2
|
|
VPSLLD ymmreg,ymmreg*,imm8 [vmi: vex.ndd.256.66.0f 72 /6 ib] AVX2
|
|
VPSLLQ ymmreg,ymmreg*,xmmrm128 [rvm: vex.nds.256.66.0f f3 /r] AVX2
|
|
VPSLLQ ymmreg,ymmreg*,imm8 [vmi: vex.ndd.256.66.0f 73 /6 ib] AVX2
|
|
VPSRAW ymmreg,ymmreg*,xmmrm128 [rvm: vex.nds.256.66.0f e1 /r] AVX2
|
|
VPSRAW ymmreg,ymmreg*,imm8 [vmi: vex.ndd.256.66.0f 71 /4 ib] AVX2
|
|
VPSRAD ymmreg,ymmreg*,xmmrm128 [rvm: vex.nds.256.66.0f e2 /r] AVX2
|
|
VPSRAD ymmreg,ymmreg*,imm8 [vmi: vex.ndd.256.66.0f 72 /4 ib] AVX2
|
|
VPSRLDQ ymmreg,ymmreg*,imm8 [vmi: vex.ndd.256.66.0f 73 /3 ib] AVX2
|
|
VPSRLW ymmreg,ymmreg*,xmmrm128 [rvm: vex.nds.256.66.0f d1 /r] AVX2
|
|
VPSRLW ymmreg,ymmreg*,imm8 [vmi: vex.ndd.256.66.0f 71 /2 ib] AVX2
|
|
VPSRLD ymmreg,ymmreg*,xmmrm128 [rvm: vex.nds.256.66.0f d2 /r] AVX2
|
|
VPSRLD ymmreg,ymmreg*,imm8 [vmi: vex.ndd.256.66.0f 72 /2 ib] AVX2
|
|
VPSRLQ ymmreg,ymmreg*,xmmrm128 [rvm: vex.nds.256.66.0f d3 /r] AVX2
|
|
VPSRLQ ymmreg,ymmreg*,imm8 [vmi: vex.ndd.256.66.0f.wig 73 /2 ib] AVX2
|
|
VPSUBB ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f f8 /r] AVX2
|
|
VPSUBW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f f9 /r] AVX2
|
|
VPSUBD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f fa /r] AVX2
|
|
VPSUBQ ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f fb /r] AVX2
|
|
VPSUBSB ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f e8 /r] AVX2
|
|
VPSUBSW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f e9 /r] AVX2
|
|
VPSUBUSB ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f d8 /r] AVX2
|
|
VPSUBUSW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f d9 /r] AVX2
|
|
VPUNPCKHBW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 68 /r] AVX2
|
|
VPUNPCKHWD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 69 /r] AVX2
|
|
VPUNPCKHDQ ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 6a /r] AVX2
|
|
VPUNPCKHQDQ ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 6d /r] AVX2
|
|
VPUNPCKLBW ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 60 /r] AVX2
|
|
VPUNPCKLWD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 61 /r] AVX2
|
|
VPUNPCKLDQ ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 62 /r] AVX2
|
|
VPUNPCKLQDQ ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f 6c /r] AVX2
|
|
VPXOR ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f ef /r] AVX2
|
|
VMOVNTDQA ymmreg,mem256 [rm: vex.256.66.0f38 2a /r] AVX2
|
|
VBROADCASTSS xmmreg,xmmreg [rm: vex.128.66.0f38.w0 18 /r] AVX2
|
|
VBROADCASTSS ymmreg,xmmreg [rm: vex.256.66.0f38.w0 18 /r] AVX2
|
|
VBROADCASTSD ymmreg,xmmreg [rm: vex.256.66.0f38.w0 19 /r] AVX2
|
|
VBROADCASTI128 ymmreg,mem128 [rm: vex.256.66.0f38.w0 5a /r] AVX2
|
|
VPBLENDD xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f3a.w0 02 /r ib] AVX2
|
|
VPBLENDD ymmreg,ymmreg*,ymmrm256,imm8 [rvmi: vex.nds.256.66.0f3a.w0 02 /r ib] AVX2
|
|
VPBROADCASTB xmmreg,mem8 [rm: vex.128.66.0f38.w0 78 /r] AVX2
|
|
VPBROADCASTB xmmreg,xmmreg [rm: vex.128.66.0f38.w0 78 /r] AVX2
|
|
VPBROADCASTB ymmreg,mem8 [rm: vex.256.66.0f38.w0 78 /r] AVX2
|
|
VPBROADCASTB ymmreg,xmmreg [rm: vex.256.66.0f38.w0 78 /r] AVX2
|
|
VPBROADCASTW xmmreg,mem16 [rm: vex.128.66.0f38.w0 79 /r] AVX2
|
|
VPBROADCASTW xmmreg,xmmreg [rm: vex.128.66.0f38.w0 79 /r] AVX2
|
|
VPBROADCASTW ymmreg,mem16 [rm: vex.256.66.0f38.w0 79 /r] AVX2
|
|
VPBROADCASTW ymmreg,xmmreg [rm: vex.256.66.0f38.w0 79 /r] AVX2
|
|
VPBROADCASTD xmmreg,mem32 [rm: vex.128.66.0f38.w0 58 /r] AVX2
|
|
VPBROADCASTD xmmreg,xmmreg [rm: vex.128.66.0f38.w0 58 /r] AVX2
|
|
VPBROADCASTD ymmreg,mem32 [rm: vex.256.66.0f38.w0 58 /r] AVX2
|
|
VPBROADCASTD ymmreg,xmmreg [rm: vex.256.66.0f38.w0 58 /r] AVX2
|
|
VPBROADCASTQ xmmreg,mem64 [rm: vex.128.66.0f38.w0 59 /r] AVX2
|
|
VPBROADCASTQ xmmreg,xmmreg [rm: vex.128.66.0f38.w0 59 /r] AVX2
|
|
VPBROADCASTQ ymmreg,mem64 [rm: vex.256.66.0f38.w0 59 /r] AVX2
|
|
VPBROADCASTQ ymmreg,xmmreg [rm: vex.256.66.0f38.w0 59 /r] AVX2
|
|
|
|
VPERMD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38.w0 36 /r] AVX2
|
|
VPERMPD ymmreg,ymmrm256,imm8 [rmi: vex.256.66.0f3a.w1 01 /r ib] AVX2
|
|
VPERMPS ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38.w0 16 /r] AVX2
|
|
VPERMQ ymmreg,ymmrm256,imm8 [rmi: vex.256.66.0f3a.w1 00 /r ib] AVX2
|
|
VPERM2I128 ymmreg,ymmreg*,ymmrm256,imm8 [rvmi: vex.nds.256.66.0f3a.w0 46 /r ib] AVX2
|
|
VEXTRACTI128 xmmrm128,ymmreg,imm8 [mri: vex.256.66.0f3a.w0 39 /r ib] AVX2
|
|
|
|
VINSERTI128 ymmreg,ymmreg*,xmmrm128,imm8 [rvmi: vex.nds.256.66.0f3a.w0 38 /r ib] AVX2
|
|
VPMASKMOVD xmmreg,xmmreg*,mem128 [rvm: vex.nds.128.66.0f38.w0 8c /r] AVX2
|
|
VPMASKMOVD ymmreg,ymmreg*,mem256 [rvm: vex.nds.256.66.0f38.w0 8c /r] AVX2
|
|
VPMASKMOVQ xmmreg,xmmreg*,mem128 [rvm: vex.nds.128.66.0f38.w1 8c /r] AVX2
|
|
VPMASKMOVQ ymmreg,ymmreg*,mem256 [rvm: vex.nds.256.66.0f38.w1 8c /r] AVX2
|
|
|
|
VPMASKMOVD mem128,xmmreg*,xmmreg [mvr: vex.nds.128.66.0f38.w0 8e /r] AVX2
|
|
VPMASKMOVD mem256,ymmreg*,ymmreg [mvr: vex.nds.256.66.0f38.w0 8e /r] AVX2
|
|
VPMASKMOVQ mem128,xmmreg*,xmmreg [mvr: vex.nds.128.66.0f38.w1 8e /r] AVX2
|
|
VPMASKMOVQ mem256,ymmreg*,ymmreg [mvr: vex.nds.256.66.0f38.w1 8e /r] AVX2
|
|
|
|
VPSLLVD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38.w0 47 /r] AVX2
|
|
VPSLLVQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38.w1 47 /r] AVX2
|
|
VPSLLVD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38.w0 47 /r] AVX2
|
|
VPSLLVQ ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38.w1 47 /r] AVX2
|
|
|
|
VPSRAVD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38.w0 46 /r] AVX2
|
|
VPSRAVD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38.w0 46 /r] AVX2
|
|
|
|
VPSRLVD xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38.w0 45 /r] AVX2
|
|
VPSRLVQ xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38.w1 45 /r] AVX2
|
|
VPSRLVD ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38.w0 45 /r] AVX2
|
|
VPSRLVQ ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38.w1 45 /r] AVX2
|
|
|
|
VGATHERDPD xmmreg,xmem64,xmmreg [rmv: vm32x vex.dds.128.66.0f38.w1 92 /r] AVX2
|
|
VGATHERQPD xmmreg,xmem64,xmmreg [rmv: vm64x vex.dds.128.66.0f38.w1 93 /r] AVX2
|
|
VGATHERDPD ymmreg,xmem64,ymmreg [rmv: vm32x vex.dds.256.66.0f38.w1 92 /r] AVX2
|
|
VGATHERQPD ymmreg,ymem64,ymmreg [rmv: vm64y vex.dds.256.66.0f38.w1 93 /r] AVX2
|
|
|
|
VGATHERDPS xmmreg,xmem32,xmmreg [rmv: vm32x vex.dds.128.66.0f38.w0 92 /r] AVX2
|
|
VGATHERQPS xmmreg,xmem32,xmmreg [rmv: vm64x vex.dds.128.66.0f38.w0 93 /r] AVX2
|
|
VGATHERDPS ymmreg,ymem32,ymmreg [rmv: vm32y vex.dds.256.66.0f38.w0 92 /r] AVX2
|
|
VGATHERQPS xmmreg,ymem32,xmmreg [rmv: vm64y vex.dds.256.66.0f38.w0 93 /r] AVX2
|
|
|
|
VPGATHERDD xmmreg,xmem32,xmmreg [rmv: vm32x vex.dds.128.66.0f38.w0 90 /r] AVX2
|
|
VPGATHERQD xmmreg,xmem32,xmmreg [rmv: vm64x vex.dds.128.66.0f38.w0 91 /r] AVX2
|
|
VPGATHERDD ymmreg,ymem32,ymmreg [rmv: vm32y vex.dds.256.66.0f38.w0 90 /r] AVX2
|
|
VPGATHERQD xmmreg,ymem32,xmmreg [rmv: vm64y vex.dds.256.66.0f38.w0 91 /r] AVX2
|
|
|
|
VPGATHERDQ xmmreg,xmem64,xmmreg [rmv: vm32x vex.dds.128.66.0f38.w1 90 /r] AVX2
|
|
VPGATHERQQ xmmreg,xmem64,xmmreg [rmv: vm64x vex.dds.128.66.0f38.w1 91 /r] AVX2
|
|
VPGATHERDQ ymmreg,xmem64,ymmreg [rmv: vm32x vex.dds.256.66.0f38.w1 90 /r] AVX2
|
|
VPGATHERQQ ymmreg,ymem64,ymmreg [rmv: vm64y vex.dds.256.66.0f38.w1 91 /r] AVX2
|
|
|
|
;# Intel Transactional Synchronization Extensions (TSX)
|
|
XABORT imm8 [i: c6 f8 ib] RTM
|
|
XBEGIN imm [i: nw odf c7 f8 rel] RTM
|
|
XBEGIN imm|near [i: nw odf c7 f8 rel] RTM,SX,ND
|
|
XBEGIN imm16 [i: o16 c7 f8 rel] RTM,NOLONG,SX
|
|
XBEGIN imm16|near [i: o16 c7 f8 rel] RTM,NOLONG,SX,ND
|
|
XBEGIN imm32 [i: o32 c7 f8 rel] RTM,NOLONG,SX
|
|
XBEGIN imm32|near [i: o32 c7 f8 rel] RTM,NOLONG,SX,ND
|
|
XBEGIN imm64 [i: o64nw c7 f8 rel] RTM,LONG,SX
|
|
XBEGIN imm64|near [i: o64nw c7 f8 rel] RTM,LONG,SX,ND
|
|
XEND void [ 0f 01 d5] RTM
|
|
XTEST void [ 0f 01 d6] HLE,RTM
|
|
|
|
|
|
PREFETCHWT1 mem8 [m: 0f 0d /2 ] PREFETCHWT1
|
|
|
|
;# Intel Memory Protection Extensions (MPX)
|
|
BNDMK bndreg,mem [rm: f3 0f 1b /r ] MPX,MIB
|
|
BNDCL bndreg,mem [rm: f3 0f 1a /r ] MPX
|
|
BNDCL bndreg,reg32 [rm: f3 0f 1a /r ] MPX,NOLONG
|
|
BNDCL bndreg,reg64 [rm: o64nw f3 0f 1a /r ] MPX,LONG
|
|
BNDCU bndreg,mem [rm: f2 0f 1a /r ] MPX
|
|
BNDCU bndreg,reg32 [rm: f2 0f 1a /r ] MPX,NOLONG
|
|
BNDCU bndreg,reg64 [rm: o64nw f2 0f 1a /r ] MPX,LONG
|
|
BNDCN bndreg,mem [rm: f2 0f 1b /r ] MPX
|
|
BNDCN bndreg,reg32 [rm: f2 0f 1b /r ] MPX,NOLONG
|
|
BNDCN bndreg,reg64 [rm: o64nw f2 0f 1b /r ] MPX,LONG
|
|
BNDMOV bndreg,bndreg [rm: 66 0f 1a /r ] MPX
|
|
BNDMOV bndreg,mem [rm: 66 0f 1a /r ] MPX
|
|
BNDMOV bndreg,bndreg [mr: 66 0f 1b /r ] MPX
|
|
BNDMOV mem,bndreg [mr: 66 0f 1b /r ] MPX
|
|
BNDLDX bndreg,mem [rm: 0f 1a /r ] MPX,MIB
|
|
BNDLDX bndreg,mem,reg32 [rmx: 0f 1a /r ] MPX,MIB,NOLONG
|
|
BNDLDX bndreg,mem,reg64 [rmx: 0f 1a /r ] MPX,MIB,LONG
|
|
BNDSTX mem,bndreg [mr: 0f 1b /r ] MPX,MIB
|
|
BNDSTX mem,reg32,bndreg [mxr: 0f 1b /r ] MPX,MIB,NOLONG
|
|
BNDSTX mem,reg64,bndreg [mxr: 0f 1b /r ] MPX,MIB,LONG
|
|
BNDSTX mem,bndreg,reg32 [mrx: 0f 1b /r ] MPX,MIB,NOLONG
|
|
BNDSTX mem,bndreg,reg64 [mrx: 0f 1b /r ] MPX,MIB,LONG
|
|
|
|
;# Intel SHA acceleration instructions
|
|
SHA1MSG1 xmmreg,xmmrm128 [rm: 0f38 c9 /r ] SHA
|
|
SHA1MSG2 xmmreg,xmmrm128 [rm: 0f38 ca /r ] SHA
|
|
SHA1NEXTE xmmreg,xmmrm128 [rm: 0f38 c8 /r ] SHA
|
|
SHA1RNDS4 xmmreg,xmmrm128,imm8 [rmi: 0f3a cc /r ib ] SHA
|
|
SHA256MSG1 xmmreg,xmmrm128 [rm: 0f38 cc /r ] SHA
|
|
SHA256MSG2 xmmreg,xmmrm128 [rm: 0f38 cd /r ] SHA
|
|
SHA256RNDS2 xmmreg,xmmrm128,xmm0 [rm-: 0f38 cb /r ] SHA
|
|
SHA256RNDS2 xmmreg,xmmrm128 [rm: 0f38 cb /r ] SHA
|
|
VSHA512MSG1 ymmreg,xmmreg [rm: vex.256.f2.0f38.w0 cc /r] SHA512,AVX
|
|
VSHA512MSG2 ymmreg,ymmreg [rm: vex.256.f2.0f38.w0 cd /r] SHA512,AVX
|
|
VSHA512RNDS2 ymmreg,ymmreg,xmmreg [rvm: vex.nds.256.f2.0f38.w0 cb /r] SHA512,AVX
|
|
|
|
;# S3M hash instructions
|
|
VSM3MSG1 xmmreg,xmmreg,xmmreg [rvm: vex.nds.128.p0.0f38.w0 da /r] HSM3,AVX
|
|
VSM3MSG2 xmmreg,xmmreg,xmmreg [rvm: vex.nds.128.66.0f38.w0 da /r] HSM3,AVX
|
|
VSM3RNDS2 xmmreg,xmmreg,xmmreg,imm8 [rvmi: vex.nds.128.66.0f3a.w0 de /r ib] HSM3,AVX
|
|
|
|
;# SM4 hash instructions
|
|
VSM4KEY4 xmmreg,xmmreg,xmmrm128 [rvm: vex.nds.128.f3.0f38.w0 da /r] HSM4,AVX
|
|
VSM4KEY4 ymmreg,ymmreg,ymmrm128 [rvm: vex.nds.256.f3.0f38.w0 da /r] HSM4,AVX
|
|
VSM4RNDS4 xmmreg,xmmreg,xmmrm128 [rvm: vex.nds.128.f2.0f38.w0 da /r] HSM4,AVX
|
|
VSM4RNDS4 ymmreg,ymmreg,ymmrm128 [rvm: vex.nds.256.f2.0f38.w0 da /r] HSM4,AVX
|
|
|
|
;# AVX no exception conversions
|
|
; Must precede AVX-512 versions
|
|
VBCSTNEBF16PS xmmreg,mem16 [rm: vex.128.f3.0f38.w0 b1 /r] AVXNECONVERT,LATEVEX,SW
|
|
VBCSTNEBF16PS ymmreg,mem16 [rm: vex.256.f3.0f38.w0 b1 /r] AVXNECONVERT,LATEVEX,SW
|
|
VBCSTNESH2PS xmmreg,mem16 [rm: vex.128.66.0f38.w0 b1 /r] AVXNECONVERT,LATEVEX,SW
|
|
VBCSTNESH2PS ymmreg,mem16 [rm: vex.256.66.0f38.w0 b1 /r] AVXNECONVERT,LATEVEX,SW
|
|
VCVTNEEBF162PS xmmreg,mem128 [rm: vex.128.f3.0f38.w0 b0 /r] AVXNECONVERT,LATEVEX,SO
|
|
VCVTNEEBF162PS ymmreg,mem256 [rm: vex.256.f3.0f38.w0 b0 /r] AVXNECONVERT,LATEVEX,SY
|
|
VCVTNEEPH2PS xmmreg,mem128 [rm: vex.128.66.0f38.w0 b0 /r] AVXNECONVERT,LATEVEX,SO
|
|
VCVTNEEPH2PS ymmreg,mem256 [rm: vex.256.66.0f38.w0 b0 /r] AVXNECONVERT,LATEVEX,SY
|
|
VCVTNEOBF162PS xmmreg,mem128 [rm: vex.128.f2.0f38.w0 b0 /r] AVXNECONVERT,LATEVEX,SO
|
|
VCVTNEOBF162PS ymmreg,mem256 [rm: vex.256.f2.0f38.w0 b0 /r] AVXNECONVERT,LATEVEX,SY
|
|
VCVTNEOPH2PS xmmreg,mem128 [rm: vex.128.np.0f38.w0 b0 /r] AVXNECONVERT,LATEVEX,SO
|
|
VCVTNEOPH2PS ymmreg,mem256 [rm: vex.256.np.0f38.w0 b0 /r] AVXNECONVERT,LATEVEX,SY
|
|
VCVTNEPS2BF16 xmmreg,xmmrm128 [rm: vex.128.f3.0f38.w0 72 /r] AVXNECONVERT,LATEVEX,SO
|
|
VCVTNEPS2BF16 ymmreg,ymmrm256 [rm: vex.256.f3.0f38.w0 72 /r] AVXNECONVERT,LATEVEX,SY
|
|
|
|
;# AVX Vector Neural Network Instructions
|
|
; Must precede AVX-512 versions
|
|
VPDPBSSD xmmreg,xmmreg,xmmrm128 [rvm: vex.128.f2.0f38.w0 50 /r] AVXVNNIINT8,LATEVEX,SO
|
|
VPDPBSSD ymmreg,ymmreg,ymmrm256 [rvm: vex.256.f2.0f38.w0 50 /r] AVXVNNIINT8,LATEVEX,SY
|
|
VPDPBSSDS xmmreg,xmmreg,xmmrm128 [rvm: vex.128.f2.0f38.w0 51 /r] AVXVNNIINT8,LATEVEX,SO
|
|
VPDPBSSDS ymmreg,ymmreg,ymmrm256 [rvm: vex.256.f2.0f38.w0 51 /r] AVXVNNIINT8,LATEVEX,SY
|
|
VPDPBSUD xmmreg,xmmreg,xmmrm128 [rvm: vex.128.f3.0f38.w0 50 /r] AVXVNNIINT8,LATEVEX,SO
|
|
VPDPBSUD ymmreg,ymmreg,ymmrm256 [rvm: vex.256.f3.0f38.w0 50 /r] AVXVNNIINT8,LATEVEX,SY
|
|
VPDPBSUDS xmmreg,xmmreg,xmmrm128 [rvm: vex.128.f3.0f38.w0 51 /r] AVXVNNIINT8,LATEVEX,SO
|
|
VPDPBSUDS ymmreg,ymmreg,ymmrm256 [rvm: vex.256.f3.0f38.w0 51 /r] AVXVNNIINT8,LATEVEX,SY
|
|
VPDPBUUD xmmreg,xmmreg,xmmrm128 [rvm: vex.128.np.0f38.w0 50 /r] AVXVNNIINT8,LATEVEX,SO
|
|
VPDPBUUD ymmreg,ymmreg,ymmrm256 [rvm: vex.256.np.0f38.w0 50 /r] AVXVNNIINT8,LATEVEX,SY
|
|
VPDPBUUDS xmmreg,xmmreg,xmmrm128 [rvm: vex.128.np.0f38.w0 51 /r] AVXVNNIINT8,LATEVEX,SO
|
|
VPDPBUUDS ymmreg,ymmreg,ymmrm256 [rvm: vex.256.np.0f38.w0 51 /r] AVXVNNIINT8,LATEVEX,SY
|
|
|
|
;# AVX Integer Fused Multiply-Add
|
|
; Must precede AVX-512 versions
|
|
VPMADD52HUQ xmmreg,xmmreg,xmmrm128 [rvm: vex.128.66.0f38.w1 b5 /r] AVXIFMA,LATEVEX,SO
|
|
VPMADD52HUQ ymmreg,ymmreg,ymmrm256 [rvm: vex.256.66.0f38.w1 b5 /r] AVXIFMA,LATEVEX,SY
|
|
VPMADD52LUQ xmmreg,xmmreg,xmmrm128 [rvm: vex.128.66.0f38.w1 b4 /r] AVXIFMA,LATEVEX,SO
|
|
VPMADD52LUQ ymmreg,ymmreg,ymmrm256 [rvm: vex.256.66.0f38.w1 b4 /r] AVXIFMA,LATEVEX,SY
|
|
|
|
;# AVX-512 mask register instructions
|
|
$k $bwdq KMOV% kreg#,krm# [rm: vex.l0.0f.ko# 90 /r ] AVX512(b:DQ/w:F/BW),SM
|
|
$k $bwdq KMOV% krm#,kreg# [rm: vex.l0.0f.ko# 91 /r ] AVX512(bw:DQ/BW),SM
|
|
$k $dq KMOV% kreg#,reg# [rm: vex.l0.f2.0f.ko# 92 /r ] AVX512BW,SM
|
|
$k $dq KMOV% reg#,kreg# [rm: vex.l0.f2.0f.ko# 93 /r ] AVX512BW,SM
|
|
$k $bw KMOV% kreg#,reg32 [rm: vex.l0.0f.ko# 92 /r ] AVX512(b:DQ/F)
|
|
$k $bw KMOV% kreg#,reg# [rm: vex.l0.0f.ko# 92 /r ] AVX512(b:DQ/F),ND,SM
|
|
$k $bw KMOV% reg32,kreg# [rm: vex.l0.0f.ko# 93 /r ] AVX512(b:DQ/F)
|
|
$k $bw KMOV% reg#,kreg# [rm: vex.l0.0f.ko# 93 /r ] AVX512(b:DQ/F),ND,SM
|
|
|
|
$k $bwdq KADD% kreg#,kreg#*,kreg# [rvm: vex.l1.0f.ko# 4a /r ] AVX512(bw:DQ/BW),SM
|
|
$k $bwdq KAND% kreg#,kreg#*,kreg# [rvm: vex.l1.0f.ko# 41 /r ] AVX512(b:DQ/w:F/BW),SM
|
|
$k $bwdq KANDN% kreg#,kreg#*,kreg# [rvm: vex.l1.0f.ko# 42 /r ] AVX512(b:DQ/w:F/BW),SM
|
|
$k $bwdq KNOT% kreg#,kreg#* [rm: vex.l0.0f.ko# 44 /r ] AVX512(b:DQ/w:F/BW),SM
|
|
$k $bwdq KOR% kreg#,kreg#*,kreg# [rvm: vex.nds.l1.0f.ko# 45 /r ] AVX512(b:DQ/w:F/BW),SM
|
|
$k $bwdq KORTEST% kreg#,kreg# [rm: vex.l0.0f.ko# 98 /r ] AVX512(b:DQ/w:F/BW),SM,FL
|
|
$k $bwdq KSHIFTL% kreg#,kreg#,imm8 [rmi: vex.l0.66.0f3a.w## 32## /r ib ] AVX512(b:DQ/w:F/BW),SM0-1
|
|
$k $bwdq KSHIFTR% kreg#,kreg#,imm8 [rmi: vex.l0.66.0f3a.w## 32## /r ib ] AVX512(b:DQ/w:F/BW),SM0-1
|
|
$k $bwdq KTEST% kreg#,kreg# [rm: vex.l0.0f.ko# 99 /r ] AVX512(bw:DQ/BW),SM,FL
|
|
$k $wdq KUNPCK%% kreg#,kreg#*,kreg# [rvm: vex.nds.l1.0f.ko# 4b /r ] AVX512(w:F/BW),SM0-1
|
|
$k $bwdq KXNOR% kreg#,kreg#*,kreg# [rvm: vex.nds.l1.0f.ko# 46 /r ] AVX512(b:DQ/w:F/BW),SM
|
|
$k $bwdq KXOR% kreg#,kreg#*,kreg# [rvm: vex.nds.l1.0f.ko# 47 /r ] AVX512(b:DQ/w:F/BW),SM
|
|
|
|
;# AVX-512 instructions
|
|
VADDPD xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f.w1 58 /r ] AVX512VL,AVX512
|
|
VADDPD ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f.w1 58 /r ] AVX512VL,AVX512
|
|
VADDPD zmmreg|mask|z,zmmreg*,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f.w1 58 /r ] AVX512
|
|
VADDPS xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.0f.w0 58 /r ] AVX512VL,AVX512
|
|
VADDPS ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.0f.w0 58 /r ] AVX512VL,AVX512
|
|
VADDPS zmmreg|mask|z,zmmreg*,zmmrm512|b32|er [rvm:fv: evex.nds.512.0f.w0 58 /r ] AVX512
|
|
VADDSD xmmreg|mask|z,xmmreg*,xmmrm64|er [rvm:t1s: evex.nds.lig.f2.0f.w1 58 /r ] AVX512
|
|
VADDSS xmmreg|mask|z,xmmreg*,xmmrm32|er [rvm:t1s: evex.nds.lig.f3.0f.w0 58 /r ] AVX512
|
|
VALIGND xmmreg|mask|z,xmmreg*,xmmrm128|b32,imm8 [rvmi:fv: evex.nds.128.66.0f3a.w0 03 /r ib ] AVX512VL,AVX512
|
|
VALIGND ymmreg|mask|z,ymmreg*,ymmrm256|b32,imm8 [rvmi:fv: evex.nds.256.66.0f3a.w0 03 /r ib ] AVX512VL,AVX512
|
|
VALIGND zmmreg|mask|z,zmmreg*,zmmrm512|b32,imm8 [rvmi:fv: evex.nds.512.66.0f3a.w0 03 /r ib ] AVX512
|
|
VALIGNQ xmmreg|mask|z,xmmreg*,xmmrm128|b64,imm8 [rvmi:fv: evex.nds.128.66.0f3a.w1 03 /r ib ] AVX512VL,AVX512
|
|
VALIGNQ ymmreg|mask|z,ymmreg*,ymmrm256|b64,imm8 [rvmi:fv: evex.nds.256.66.0f3a.w1 03 /r ib ] AVX512VL,AVX512
|
|
VALIGNQ zmmreg|mask|z,zmmreg*,zmmrm512|b64,imm8 [rvmi:fv: evex.nds.512.66.0f3a.w1 03 /r ib ] AVX512
|
|
VANDNPD xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f.w1 55 /r ] AVX512VL,AVX512DQ
|
|
VANDNPD ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f.w1 55 /r ] AVX512VL,AVX512DQ
|
|
VANDNPD zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f.w1 55 /r ] AVX512DQ
|
|
VANDNPS xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.0f.w0 55 /r ] AVX512VL,AVX512DQ
|
|
VANDNPS ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.0f.w0 55 /r ] AVX512VL,AVX512DQ
|
|
VANDNPS zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.0f.w0 55 /r ] AVX512DQ
|
|
VANDPD xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f.w1 54 /r ] AVX512VL,AVX512DQ
|
|
VANDPD ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f.w1 54 /r ] AVX512VL,AVX512DQ
|
|
VANDPD zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f.w1 54 /r ] AVX512DQ
|
|
VANDPS xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.0f.w0 54 /r ] AVX512VL,AVX512DQ
|
|
VANDPS ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.0f.w0 54 /r ] AVX512VL,AVX512DQ
|
|
VANDPS zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.0f.w0 54 /r ] AVX512DQ
|
|
VBLENDMPD xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 65 /r ] AVX512VL,AVX512
|
|
VBLENDMPD ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 65 /r ] AVX512VL,AVX512
|
|
VBLENDMPD zmmreg|mask|z,zmmreg,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 65 /r ] AVX512
|
|
VBLENDMPS xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 65 /r ] AVX512VL,AVX512
|
|
VBLENDMPS ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 65 /r ] AVX512VL,AVX512
|
|
VBLENDMPS zmmreg|mask|z,zmmreg,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f38.w0 65 /r ] AVX512
|
|
VBROADCASTF32X2 ymmreg|mask|z,xmmrm64 [rm:t2: evex.256.66.0f38.w0 19 /r ] AVX512VL,AVX512DQ
|
|
VBROADCASTF32X2 zmmreg|mask|z,xmmrm64 [rm:t2: evex.512.66.0f38.w0 19 /r ] AVX512DQ
|
|
VBROADCASTF32X4 ymmreg|mask|z,mem128 [rm:t4: evex.256.66.0f38.w0 1a /r ] AVX512VL,AVX512
|
|
VBROADCASTF32X4 zmmreg|mask|z,mem128 [rm:t4: evex.512.66.0f38.w0 1a /r ] AVX512
|
|
VBROADCASTF32X8 zmmreg|mask|z,mem256 [rm:t8: evex.512.66.0f38.w0 1b /r ] AVX512DQ
|
|
VBROADCASTF64X2 ymmreg|mask|z,mem128 [rm:t2: evex.256.66.0f38.w1 1a /r ] AVX512VL,AVX512DQ
|
|
VBROADCASTF64X2 zmmreg|mask|z,mem128 [rm:t2: evex.512.66.0f38.w1 1a /r ] AVX512DQ
|
|
VBROADCASTF64X4 zmmreg|mask|z,mem256 [rm:t4: evex.512.66.0f38.w1 1b /r ] AVX512
|
|
VBROADCASTI32X2 xmmreg|mask|z,xmmrm64 [rm:t2: evex.128.66.0f38.w0 59 /r ] AVX512VL,AVX512DQ
|
|
VBROADCASTI32X2 ymmreg|mask|z,xmmrm64 [rm:t2: evex.256.66.0f38.w0 59 /r ] AVX512VL,AVX512DQ
|
|
VBROADCASTI32X2 zmmreg|mask|z,xmmrm64 [rm:t2: evex.512.66.0f38.w0 59 /r ] AVX512DQ
|
|
VBROADCASTI32X4 ymmreg|mask|z,mem128 [rm:t4: evex.256.66.0f38.w0 5a /r ] AVX512VL,AVX512
|
|
VBROADCASTI32X4 zmmreg|mask|z,mem128 [rm:t4: evex.512.66.0f38.w0 5a /r ] AVX512
|
|
VBROADCASTI32X8 zmmreg|mask|z,mem256 [rm:t8: evex.512.66.0f38.w0 5b /r ] AVX512DQ
|
|
VBROADCASTI64X2 ymmreg|mask|z,mem128 [rm:t2: evex.256.66.0f38.w1 5a /r ] AVX512VL,AVX512DQ
|
|
VBROADCASTI64X2 zmmreg|mask|z,mem128 [rm:t2: evex.512.66.0f38.w1 5a /r ] AVX512DQ
|
|
VBROADCASTI64X4 zmmreg|mask|z,mem256 [rm:t4: evex.512.66.0f38.w1 5b /r ] AVX512
|
|
VBROADCASTSD ymmreg|mask|z,mem64 [rm:t1s: evex.256.66.0f38.w1 19 /r ] AVX512VL,AVX512
|
|
VBROADCASTSD zmmreg|mask|z,mem64 [rm:t1s: evex.512.66.0f38.w1 19 /r ] AVX512
|
|
VBROADCASTSD ymmreg|mask|z,xmmreg [rm: evex.256.66.0f38.w1 19 /r ] AVX512VL,AVX512
|
|
VBROADCASTSD zmmreg|mask|z,xmmreg [rm: evex.512.66.0f38.w1 19 /r ] AVX512
|
|
VBROADCASTSS xmmreg|mask|z,mem32 [rm:t1s: evex.128.66.0f38.w0 18 /r ] AVX512VL,AVX512
|
|
VBROADCASTSS ymmreg|mask|z,mem32 [rm:t1s: evex.256.66.0f38.w0 18 /r ] AVX512VL,AVX512
|
|
VBROADCASTSS zmmreg|mask|z,mem32 [rm:t1s: evex.512.66.0f38.w0 18 /r ] AVX512
|
|
VBROADCASTSS xmmreg|mask|z,xmmreg [rm: evex.128.66.0f38.w0 18 /r ] AVX512VL,AVX512
|
|
VBROADCASTSS ymmreg|mask|z,xmmreg [rm: evex.256.66.0f38.w0 18 /r ] AVX512VL,AVX512
|
|
VBROADCASTSS zmmreg|mask|z,xmmreg [rm: evex.512.66.0f38.w0 18 /r ] AVX512
|
|
VCMPEQPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 00 ] AVX512VL,AVX512
|
|
VCMPEQPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 00 ] AVX512VL,AVX512
|
|
VCMPEQPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 00 ] AVX512
|
|
VCMPEQPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 00 ] AVX512VL,AVX512
|
|
VCMPEQPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 00 ] AVX512VL,AVX512
|
|
VCMPEQPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 00 ] AVX512
|
|
VCMPEQSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 00 ] AVX512
|
|
VCMPEQSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 00 ] AVX512
|
|
VCMPEQ_OQPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 00 ] AVX512VL,AVX512
|
|
VCMPEQ_OQPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 00 ] AVX512VL,AVX512
|
|
VCMPEQ_OQPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 00 ] AVX512
|
|
VCMPEQ_OQPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 00 ] AVX512VL,AVX512
|
|
VCMPEQ_OQPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 00 ] AVX512VL,AVX512
|
|
VCMPEQ_OQPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 00 ] AVX512
|
|
VCMPEQ_OQSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 00 ] AVX512
|
|
VCMPEQ_OQSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 00 ] AVX512
|
|
VCMPLTPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 01 ] AVX512VL,AVX512
|
|
VCMPLTPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 01 ] AVX512VL,AVX512
|
|
VCMPLTPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 01 ] AVX512
|
|
VCMPLTPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 01 ] AVX512VL,AVX512
|
|
VCMPLTPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 01 ] AVX512VL,AVX512
|
|
VCMPLTPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 01 ] AVX512
|
|
VCMPLTSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 01 ] AVX512
|
|
VCMPLTSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 01 ] AVX512
|
|
VCMPLT_OSPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 01 ] AVX512VL,AVX512
|
|
VCMPLT_OSPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 01 ] AVX512VL,AVX512
|
|
VCMPLT_OSPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 01 ] AVX512
|
|
VCMPLT_OSPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 01 ] AVX512VL,AVX512
|
|
VCMPLT_OSPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 01 ] AVX512VL,AVX512
|
|
VCMPLT_OSPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 01 ] AVX512
|
|
VCMPLT_OSSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 01 ] AVX512
|
|
VCMPLT_OSSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 01 ] AVX512
|
|
VCMPLEPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 02 ] AVX512VL,AVX512
|
|
VCMPLEPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 02 ] AVX512VL,AVX512
|
|
VCMPLEPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 02 ] AVX512
|
|
VCMPLEPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 02 ] AVX512VL,AVX512
|
|
VCMPLEPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 02 ] AVX512VL,AVX512
|
|
VCMPLEPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 02 ] AVX512
|
|
VCMPLESD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 02 ] AVX512
|
|
VCMPLESS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 02 ] AVX512
|
|
VCMPLE_OSPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 02 ] AVX512VL,AVX512
|
|
VCMPLE_OSPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 02 ] AVX512VL,AVX512
|
|
VCMPLE_OSPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 02 ] AVX512
|
|
VCMPLE_OSPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 02 ] AVX512VL,AVX512
|
|
VCMPLE_OSPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 02 ] AVX512VL,AVX512
|
|
VCMPLE_OSPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 02 ] AVX512
|
|
VCMPLE_OSSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 02 ] AVX512
|
|
VCMPLE_OSSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 02 ] AVX512
|
|
VCMPUNORDPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 03 ] AVX512VL,AVX512
|
|
VCMPUNORDPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 03 ] AVX512VL,AVX512
|
|
VCMPUNORDPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 03 ] AVX512
|
|
VCMPUNORDPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 03 ] AVX512VL,AVX512
|
|
VCMPUNORDPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 03 ] AVX512VL,AVX512
|
|
VCMPUNORDPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 03 ] AVX512
|
|
VCMPUNORDSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 03 ] AVX512
|
|
VCMPUNORDSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 03 ] AVX512
|
|
VCMPUNORD_QPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 03 ] AVX512VL,AVX512
|
|
VCMPUNORD_QPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 03 ] AVX512VL,AVX512
|
|
VCMPUNORD_QPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 03 ] AVX512
|
|
VCMPUNORD_QPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 03 ] AVX512VL,AVX512
|
|
VCMPUNORD_QPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 03 ] AVX512VL,AVX512
|
|
VCMPUNORD_QPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 03 ] AVX512
|
|
VCMPUNORD_QSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 03 ] AVX512
|
|
VCMPUNORD_QSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 03 ] AVX512
|
|
VCMPNEQPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 04 ] AVX512VL,AVX512
|
|
VCMPNEQPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 04 ] AVX512VL,AVX512
|
|
VCMPNEQPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 04 ] AVX512
|
|
VCMPNEQPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 04 ] AVX512VL,AVX512
|
|
VCMPNEQPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 04 ] AVX512VL,AVX512
|
|
VCMPNEQPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 04 ] AVX512
|
|
VCMPNEQSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 04 ] AVX512
|
|
VCMPNEQSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 04 ] AVX512
|
|
VCMPNEQ_UQPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 04 ] AVX512VL,AVX512
|
|
VCMPNEQ_UQPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 04 ] AVX512VL,AVX512
|
|
VCMPNEQ_UQPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 04 ] AVX512
|
|
VCMPNEQ_UQPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 04 ] AVX512VL,AVX512
|
|
VCMPNEQ_UQPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 04 ] AVX512VL,AVX512
|
|
VCMPNEQ_UQPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 04 ] AVX512
|
|
VCMPNEQ_UQSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 04 ] AVX512
|
|
VCMPNEQ_UQSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 04 ] AVX512
|
|
VCMPNLTPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 05 ] AVX512VL,AVX512
|
|
VCMPNLTPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 05 ] AVX512VL,AVX512
|
|
VCMPNLTPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 05 ] AVX512
|
|
VCMPNLTPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 05 ] AVX512VL,AVX512
|
|
VCMPNLTPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 05 ] AVX512VL,AVX512
|
|
VCMPNLTPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 05 ] AVX512
|
|
VCMPNLTSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 05 ] AVX512
|
|
VCMPNLTSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 05 ] AVX512
|
|
VCMPNLT_USPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 05 ] AVX512VL,AVX512
|
|
VCMPNLT_USPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 05 ] AVX512VL,AVX512
|
|
VCMPNLT_USPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 05 ] AVX512
|
|
VCMPNLT_USPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 05 ] AVX512VL,AVX512
|
|
VCMPNLT_USPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 05 ] AVX512VL,AVX512
|
|
VCMPNLT_USPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 05 ] AVX512
|
|
VCMPNLT_USSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 05 ] AVX512
|
|
VCMPNLT_USSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 05 ] AVX512
|
|
VCMPNLEPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 06 ] AVX512VL,AVX512
|
|
VCMPNLEPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 06 ] AVX512VL,AVX512
|
|
VCMPNLEPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 06 ] AVX512
|
|
VCMPNLEPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 06 ] AVX512VL,AVX512
|
|
VCMPNLEPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 06 ] AVX512VL,AVX512
|
|
VCMPNLEPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 06 ] AVX512
|
|
VCMPNLESD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 06 ] AVX512
|
|
VCMPNLESS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 06 ] AVX512
|
|
VCMPNLE_USPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 06 ] AVX512VL,AVX512
|
|
VCMPNLE_USPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 06 ] AVX512VL,AVX512
|
|
VCMPNLE_USPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 06 ] AVX512
|
|
VCMPNLE_USPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 06 ] AVX512VL,AVX512
|
|
VCMPNLE_USPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 06 ] AVX512VL,AVX512
|
|
VCMPNLE_USPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 06 ] AVX512
|
|
VCMPNLE_USSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 06 ] AVX512
|
|
VCMPNLE_USSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 06 ] AVX512
|
|
VCMPORDPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 07 ] AVX512VL,AVX512
|
|
VCMPORDPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 07 ] AVX512VL,AVX512
|
|
VCMPORDPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 07 ] AVX512
|
|
VCMPORDPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 07 ] AVX512VL,AVX512
|
|
VCMPORDPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 07 ] AVX512VL,AVX512
|
|
VCMPORDPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 07 ] AVX512
|
|
VCMPORDSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 07 ] AVX512
|
|
VCMPORDSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 07 ] AVX512
|
|
VCMPORD_QPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 07 ] AVX512VL,AVX512
|
|
VCMPORD_QPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 07 ] AVX512VL,AVX512
|
|
VCMPORD_QPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 07 ] AVX512
|
|
VCMPORD_QPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 07 ] AVX512VL,AVX512
|
|
VCMPORD_QPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 07 ] AVX512VL,AVX512
|
|
VCMPORD_QPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 07 ] AVX512
|
|
VCMPORD_QSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 07 ] AVX512
|
|
VCMPORD_QSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 07 ] AVX512
|
|
VCMPEQ_UQPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 08 ] AVX512VL,AVX512
|
|
VCMPEQ_UQPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 08 ] AVX512VL,AVX512
|
|
VCMPEQ_UQPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 08 ] AVX512
|
|
VCMPEQ_UQPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 08 ] AVX512VL,AVX512
|
|
VCMPEQ_UQPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 08 ] AVX512VL,AVX512
|
|
VCMPEQ_UQPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 08 ] AVX512
|
|
VCMPEQ_UQSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 08 ] AVX512
|
|
VCMPEQ_UQSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 08 ] AVX512
|
|
VCMPNGEPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 09 ] AVX512VL,AVX512
|
|
VCMPNGEPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 09 ] AVX512VL,AVX512
|
|
VCMPNGEPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 09 ] AVX512
|
|
VCMPNGEPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 09 ] AVX512VL,AVX512
|
|
VCMPNGEPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 09 ] AVX512VL,AVX512
|
|
VCMPNGEPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 09 ] AVX512
|
|
VCMPNGESD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 09 ] AVX512
|
|
VCMPNGESS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 09 ] AVX512
|
|
VCMPNGE_USPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 09 ] AVX512VL,AVX512
|
|
VCMPNGE_USPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 09 ] AVX512VL,AVX512
|
|
VCMPNGE_USPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 09 ] AVX512
|
|
VCMPNGE_USPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 09 ] AVX512VL,AVX512
|
|
VCMPNGE_USPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 09 ] AVX512VL,AVX512
|
|
VCMPNGE_USPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 09 ] AVX512
|
|
VCMPNGE_USSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 09 ] AVX512
|
|
VCMPNGE_USSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 09 ] AVX512
|
|
VCMPNGTPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 0a ] AVX512VL,AVX512
|
|
VCMPNGTPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 0a ] AVX512VL,AVX512
|
|
VCMPNGTPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 0a ] AVX512
|
|
VCMPNGTPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 0a ] AVX512VL,AVX512
|
|
VCMPNGTPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 0a ] AVX512VL,AVX512
|
|
VCMPNGTPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 0a ] AVX512
|
|
VCMPNGTSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 0a ] AVX512
|
|
VCMPNGTSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 0a ] AVX512
|
|
VCMPNGT_USPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 0a ] AVX512VL,AVX512
|
|
VCMPNGT_USPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 0a ] AVX512VL,AVX512
|
|
VCMPNGT_USPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 0a ] AVX512
|
|
VCMPNGT_USPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 0a ] AVX512VL,AVX512
|
|
VCMPNGT_USPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 0a ] AVX512VL,AVX512
|
|
VCMPNGT_USPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 0a ] AVX512
|
|
VCMPNGT_USSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 0a ] AVX512
|
|
VCMPNGT_USSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 0a ] AVX512
|
|
VCMPFALSEPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 0b ] AVX512VL,AVX512
|
|
VCMPFALSEPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 0b ] AVX512VL,AVX512
|
|
VCMPFALSEPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 0b ] AVX512
|
|
VCMPFALSEPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 0b ] AVX512VL,AVX512
|
|
VCMPFALSEPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 0b ] AVX512VL,AVX512
|
|
VCMPFALSEPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 0b ] AVX512
|
|
VCMPFALSESD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 0b ] AVX512
|
|
VCMPFALSESS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 0b ] AVX512
|
|
VCMPFALSE_OQPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 0b ] AVX512VL,AVX512
|
|
VCMPFALSE_OQPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 0b ] AVX512VL,AVX512
|
|
VCMPFALSE_OQPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 0b ] AVX512
|
|
VCMPFALSE_OQPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 0b ] AVX512VL,AVX512
|
|
VCMPFALSE_OQPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 0b ] AVX512VL,AVX512
|
|
VCMPFALSE_OQPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 0b ] AVX512
|
|
VCMPFALSE_OQSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 0b ] AVX512
|
|
VCMPFALSE_OQSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 0b ] AVX512
|
|
VCMPNEQ_OQPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 0c ] AVX512VL,AVX512
|
|
VCMPNEQ_OQPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 0c ] AVX512VL,AVX512
|
|
VCMPNEQ_OQPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 0c ] AVX512
|
|
VCMPNEQ_OQPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 0c ] AVX512VL,AVX512
|
|
VCMPNEQ_OQPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 0c ] AVX512VL,AVX512
|
|
VCMPNEQ_OQPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 0c ] AVX512
|
|
VCMPNEQ_OQSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 0c ] AVX512
|
|
VCMPNEQ_OQSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 0c ] AVX512
|
|
VCMPGEPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 0d ] AVX512VL,AVX512
|
|
VCMPGEPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 0d ] AVX512VL,AVX512
|
|
VCMPGEPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 0d ] AVX512
|
|
VCMPGEPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 0d ] AVX512VL,AVX512
|
|
VCMPGEPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 0d ] AVX512VL,AVX512
|
|
VCMPGEPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 0d ] AVX512
|
|
VCMPGESD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 0d ] AVX512
|
|
VCMPGESS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 0d ] AVX512
|
|
VCMPGE_OSPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 0d ] AVX512VL,AVX512
|
|
VCMPGE_OSPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 0d ] AVX512VL,AVX512
|
|
VCMPGE_OSPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 0d ] AVX512
|
|
VCMPGE_OSPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 0d ] AVX512VL,AVX512
|
|
VCMPGE_OSPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 0d ] AVX512VL,AVX512
|
|
VCMPGE_OSPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 0d ] AVX512
|
|
VCMPGE_OSSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 0d ] AVX512
|
|
VCMPGE_OSSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 0d ] AVX512
|
|
VCMPGTPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 0e ] AVX512VL,AVX512
|
|
VCMPGTPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 0e ] AVX512VL,AVX512
|
|
VCMPGTPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 0e ] AVX512
|
|
VCMPGTPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 0e ] AVX512VL,AVX512
|
|
VCMPGTPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 0e ] AVX512VL,AVX512
|
|
VCMPGTPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 0e ] AVX512
|
|
VCMPGTSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 0e ] AVX512
|
|
VCMPGTSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 0e ] AVX512
|
|
VCMPGT_OSPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 0e ] AVX512VL,AVX512
|
|
VCMPGT_OSPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 0e ] AVX512VL,AVX512
|
|
VCMPGT_OSPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 0e ] AVX512
|
|
VCMPGT_OSPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 0e ] AVX512VL,AVX512
|
|
VCMPGT_OSPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 0e ] AVX512VL,AVX512
|
|
VCMPGT_OSPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 0e ] AVX512
|
|
VCMPGT_OSSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 0e ] AVX512
|
|
VCMPGT_OSSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 0e ] AVX512
|
|
VCMPTRUEPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 0f ] AVX512VL,AVX512
|
|
VCMPTRUEPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 0f ] AVX512VL,AVX512
|
|
VCMPTRUEPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 0f ] AVX512
|
|
VCMPTRUEPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 0f ] AVX512VL,AVX512
|
|
VCMPTRUEPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 0f ] AVX512VL,AVX512
|
|
VCMPTRUEPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 0f ] AVX512
|
|
VCMPTRUESD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 0f ] AVX512
|
|
VCMPTRUESS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 0f ] AVX512
|
|
VCMPTRUE_UQPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 0f ] AVX512VL,AVX512
|
|
VCMPTRUE_UQPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 0f ] AVX512VL,AVX512
|
|
VCMPTRUE_UQPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 0f ] AVX512
|
|
VCMPTRUE_UQPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 0f ] AVX512VL,AVX512
|
|
VCMPTRUE_UQPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 0f ] AVX512VL,AVX512
|
|
VCMPTRUE_UQPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 0f ] AVX512
|
|
VCMPTRUE_UQSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 0f ] AVX512
|
|
VCMPTRUE_UQSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 0f ] AVX512
|
|
VCMPEQ_OSPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 10 ] AVX512VL,AVX512
|
|
VCMPEQ_OSPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 10 ] AVX512VL,AVX512
|
|
VCMPEQ_OSPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 10 ] AVX512
|
|
VCMPEQ_OSPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 10 ] AVX512VL,AVX512
|
|
VCMPEQ_OSPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 10 ] AVX512VL,AVX512
|
|
VCMPEQ_OSPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 10 ] AVX512
|
|
VCMPEQ_OSSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 10 ] AVX512
|
|
VCMPEQ_OSSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 10 ] AVX512
|
|
VCMPLT_OQPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 11 ] AVX512VL,AVX512
|
|
VCMPLT_OQPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 11 ] AVX512VL,AVX512
|
|
VCMPLT_OQPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 11 ] AVX512
|
|
VCMPLT_OQPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 11 ] AVX512VL,AVX512
|
|
VCMPLT_OQPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 11 ] AVX512VL,AVX512
|
|
VCMPLT_OQPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 11 ] AVX512
|
|
VCMPLT_OQSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 11 ] AVX512
|
|
VCMPLT_OQSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 11 ] AVX512
|
|
VCMPLE_OQPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 12 ] AVX512VL,AVX512
|
|
VCMPLE_OQPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 12 ] AVX512VL,AVX512
|
|
VCMPLE_OQPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 12 ] AVX512
|
|
VCMPLE_OQPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 12 ] AVX512VL,AVX512
|
|
VCMPLE_OQPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 12 ] AVX512VL,AVX512
|
|
VCMPLE_OQPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 12 ] AVX512
|
|
VCMPLE_OQSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 12 ] AVX512
|
|
VCMPLE_OQSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 12 ] AVX512
|
|
VCMPUNORD_SPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 13 ] AVX512VL,AVX512
|
|
VCMPUNORD_SPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 13 ] AVX512VL,AVX512
|
|
VCMPUNORD_SPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 13 ] AVX512
|
|
VCMPUNORD_SPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 13 ] AVX512VL,AVX512
|
|
VCMPUNORD_SPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 13 ] AVX512VL,AVX512
|
|
VCMPUNORD_SPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 13 ] AVX512
|
|
VCMPUNORD_SSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 13 ] AVX512
|
|
VCMPUNORD_SSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 13 ] AVX512
|
|
VCMPNEQ_USPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 14 ] AVX512VL,AVX512
|
|
VCMPNEQ_USPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 14 ] AVX512VL,AVX512
|
|
VCMPNEQ_USPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 14 ] AVX512
|
|
VCMPNEQ_USPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 14 ] AVX512VL,AVX512
|
|
VCMPNEQ_USPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 14 ] AVX512VL,AVX512
|
|
VCMPNEQ_USPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 14 ] AVX512
|
|
VCMPNEQ_USSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 14 ] AVX512
|
|
VCMPNEQ_USSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 14 ] AVX512
|
|
VCMPNLT_UQPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 15 ] AVX512VL,AVX512
|
|
VCMPNLT_UQPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 15 ] AVX512VL,AVX512
|
|
VCMPNLT_UQPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 15 ] AVX512
|
|
VCMPNLT_UQPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 15 ] AVX512VL,AVX512
|
|
VCMPNLT_UQPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 15 ] AVX512VL,AVX512
|
|
VCMPNLT_UQPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 15 ] AVX512
|
|
VCMPNLT_UQSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 15 ] AVX512
|
|
VCMPNLT_UQSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 15 ] AVX512
|
|
VCMPNLE_UQPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 16 ] AVX512VL,AVX512
|
|
VCMPNLE_UQPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 16 ] AVX512VL,AVX512
|
|
VCMPNLE_UQPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 16 ] AVX512
|
|
VCMPNLE_UQPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 16 ] AVX512VL,AVX512
|
|
VCMPNLE_UQPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 16 ] AVX512VL,AVX512
|
|
VCMPNLE_UQPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 16 ] AVX512
|
|
VCMPNLE_UQSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 16 ] AVX512
|
|
VCMPNLE_UQSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 16 ] AVX512
|
|
VCMPORD_SPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 17 ] AVX512VL,AVX512
|
|
VCMPORD_SPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 17 ] AVX512VL,AVX512
|
|
VCMPORD_SPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 17 ] AVX512
|
|
VCMPORD_SPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 17 ] AVX512VL,AVX512
|
|
VCMPORD_SPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 17 ] AVX512VL,AVX512
|
|
VCMPORD_SPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 17 ] AVX512
|
|
VCMPORD_SSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 17 ] AVX512
|
|
VCMPORD_SSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 17 ] AVX512
|
|
VCMPEQ_USPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 18 ] AVX512VL,AVX512
|
|
VCMPEQ_USPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 18 ] AVX512VL,AVX512
|
|
VCMPEQ_USPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 18 ] AVX512
|
|
VCMPEQ_USPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 18 ] AVX512VL,AVX512
|
|
VCMPEQ_USPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 18 ] AVX512VL,AVX512
|
|
VCMPEQ_USPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 18 ] AVX512
|
|
VCMPEQ_USSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 18 ] AVX512
|
|
VCMPEQ_USSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 18 ] AVX512
|
|
VCMPNGE_UQPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 19 ] AVX512VL,AVX512
|
|
VCMPNGE_UQPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 19 ] AVX512VL,AVX512
|
|
VCMPNGE_UQPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 19 ] AVX512
|
|
VCMPNGE_UQPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 19 ] AVX512VL,AVX512
|
|
VCMPNGE_UQPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 19 ] AVX512VL,AVX512
|
|
VCMPNGE_UQPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 19 ] AVX512
|
|
VCMPNGE_UQSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 19 ] AVX512
|
|
VCMPNGE_UQSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 19 ] AVX512
|
|
VCMPNGT_UQPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 1a ] AVX512VL,AVX512
|
|
VCMPNGT_UQPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 1a ] AVX512VL,AVX512
|
|
VCMPNGT_UQPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 1a ] AVX512
|
|
VCMPNGT_UQPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 1a ] AVX512VL,AVX512
|
|
VCMPNGT_UQPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 1a ] AVX512VL,AVX512
|
|
VCMPNGT_UQPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 1a ] AVX512
|
|
VCMPNGT_UQSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 1a ] AVX512
|
|
VCMPNGT_UQSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 1a ] AVX512
|
|
VCMPFALSE_OSPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 1b ] AVX512VL,AVX512
|
|
VCMPFALSE_OSPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 1b ] AVX512VL,AVX512
|
|
VCMPFALSE_OSPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 1b ] AVX512
|
|
VCMPFALSE_OSPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 1b ] AVX512VL,AVX512
|
|
VCMPFALSE_OSPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 1b ] AVX512VL,AVX512
|
|
VCMPFALSE_OSPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 1b ] AVX512
|
|
VCMPFALSE_OSSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 1b ] AVX512
|
|
VCMPFALSE_OSSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 1b ] AVX512
|
|
VCMPNEQ_OSPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 1c ] AVX512VL,AVX512
|
|
VCMPNEQ_OSPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 1c ] AVX512VL,AVX512
|
|
VCMPNEQ_OSPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 1c ] AVX512
|
|
VCMPNEQ_OSPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 1c ] AVX512VL,AVX512
|
|
VCMPNEQ_OSPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 1c ] AVX512VL,AVX512
|
|
VCMPNEQ_OSPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 1c ] AVX512
|
|
VCMPNEQ_OSSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 1c ] AVX512
|
|
VCMPNEQ_OSSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 1c ] AVX512
|
|
VCMPGE_OQPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 1d ] AVX512VL,AVX512
|
|
VCMPGE_OQPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 1d ] AVX512VL,AVX512
|
|
VCMPGE_OQPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 1d ] AVX512
|
|
VCMPGE_OQPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 1d ] AVX512VL,AVX512
|
|
VCMPGE_OQPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 1d ] AVX512VL,AVX512
|
|
VCMPGE_OQPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 1d ] AVX512
|
|
VCMPGE_OQSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 1d ] AVX512
|
|
VCMPGE_OQSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 1d ] AVX512
|
|
VCMPGT_OQPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 1e ] AVX512VL,AVX512
|
|
VCMPGT_OQPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 1e ] AVX512VL,AVX512
|
|
VCMPGT_OQPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 1e ] AVX512
|
|
VCMPGT_OQPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 1e ] AVX512VL,AVX512
|
|
VCMPGT_OQPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 1e ] AVX512VL,AVX512
|
|
VCMPGT_OQPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 1e ] AVX512
|
|
VCMPGT_OQSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 1e ] AVX512
|
|
VCMPGT_OQSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 1e ] AVX512
|
|
VCMPTRUE_USPD kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r 1f ] AVX512VL,AVX512
|
|
VCMPTRUE_USPD kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r 1f ] AVX512VL,AVX512
|
|
VCMPTRUE_USPD kreg|mask,zmmreg,zmmrm512|b64|sae [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r 1f ] AVX512
|
|
VCMPTRUE_USPS kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.0f.w0 c2 /r 1f ] AVX512VL,AVX512
|
|
VCMPTRUE_USPS kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.0f.w0 c2 /r 1f ] AVX512VL,AVX512
|
|
VCMPTRUE_USPS kreg|mask,zmmreg,zmmrm512|b32|sae [rvmi:fv: evex.nds.512.0f.w0 c2 /r 1f ] AVX512
|
|
VCMPTRUE_USSD kreg|mask,xmmreg,xmmrm64|sae [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r 1f ] AVX512
|
|
VCMPTRUE_USSS kreg|mask,xmmreg,xmmrm32|sae [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r 1f ] AVX512
|
|
VCMPPD kreg|mask,xmmreg,xmmrm128|b64,imm8 [rvmi:fv: evex.nds.128.66.0f.w1 c2 /r ib ] AVX512VL,AVX512
|
|
VCMPPD kreg|mask,ymmreg,ymmrm256|b64,imm8 [rvmi:fv: evex.nds.256.66.0f.w1 c2 /r ib ] AVX512VL,AVX512
|
|
VCMPPD kreg|mask,zmmreg,zmmrm512|b64|sae,imm8 [rvmi:fv: evex.nds.512.66.0f.w1 c2 /r ib ] AVX512
|
|
VCMPPS kreg|mask,xmmreg,xmmrm128|b32,imm8 [rvmi:fv: evex.nds.128.0f.w0 c2 /r ib ] AVX512VL,AVX512
|
|
VCMPPS kreg|mask,ymmreg,ymmrm256|b32,imm8 [rvmi:fv: evex.nds.256.0f.w0 c2 /r ib ] AVX512VL,AVX512
|
|
VCMPPS kreg|mask,zmmreg,zmmrm512|b32|sae,imm8 [rvmi:fv: evex.nds.512.0f.w0 c2 /r ib ] AVX512
|
|
VCMPSD kreg|mask,xmmreg,xmmrm64|sae,imm8 [rvmi:t1s: evex.nds.lig.f2.0f.w1 c2 /r ib ] AVX512
|
|
VCMPSS kreg|mask,xmmreg,xmmrm32|sae,imm8 [rvmi:t1s: evex.nds.lig.f3.0f.w0 c2 /r ib ] AVX512
|
|
VCOMISD xmmreg,xmmrm64|sae [rm:t1s: evex.lig.66.0f.w1 2f /r ] AVX512
|
|
VCOMISS xmmreg,xmmrm32|sae [rm:t1s: evex.lig.0f.w0 2f /r ] AVX512
|
|
VCOMPRESSPD mem128|mask,xmmreg [mr:t1s: evex.128.66.0f38.w1 8a /r ] AVX512VL,AVX512
|
|
VCOMPRESSPD mem256|mask,ymmreg [mr:t1s: evex.256.66.0f38.w1 8a /r ] AVX512VL,AVX512
|
|
VCOMPRESSPD mem512|mask,zmmreg [mr:t1s: evex.512.66.0f38.w1 8a /r ] AVX512
|
|
VCOMPRESSPD xmmreg|mask|z,xmmreg [mr: evex.128.66.0f38.w1 8a /r ] AVX512VL,AVX512
|
|
VCOMPRESSPD ymmreg|mask|z,ymmreg [mr: evex.256.66.0f38.w1 8a /r ] AVX512VL,AVX512
|
|
VCOMPRESSPD zmmreg|mask|z,zmmreg [mr: evex.512.66.0f38.w1 8a /r ] AVX512
|
|
VCOMPRESSPS mem128|mask,xmmreg [mr:t1s: evex.128.66.0f38.w0 8a /r ] AVX512VL,AVX512
|
|
VCOMPRESSPS mem256|mask,ymmreg [mr:t1s: evex.256.66.0f38.w0 8a /r ] AVX512VL,AVX512
|
|
VCOMPRESSPS mem512|mask,zmmreg [mr:t1s: evex.512.66.0f38.w0 8a /r ] AVX512
|
|
VCOMPRESSPS xmmreg|mask|z,xmmreg [mr: evex.128.66.0f38.w0 8a /r ] AVX512VL,AVX512
|
|
VCOMPRESSPS ymmreg|mask|z,ymmreg [mr: evex.256.66.0f38.w0 8a /r ] AVX512VL,AVX512
|
|
VCOMPRESSPS zmmreg|mask|z,zmmreg [mr: evex.512.66.0f38.w0 8a /r ] AVX512
|
|
VCVTDQ2PD xmmreg|mask|z,xmmrm64|b32 [rm:hv: evex.128.f3.0f.w0 e6 /r ] AVX512VL,AVX512
|
|
VCVTDQ2PD ymmreg|mask|z,xmmrm128|b32 [rm:hv: evex.256.f3.0f.w0 e6 /r ] AVX512VL,AVX512
|
|
VCVTDQ2PD zmmreg|mask|z,ymmrm256|b32|er [rm:hv: evex.512.f3.0f.w0 e6 /r ] AVX512
|
|
VCVTDQ2PS xmmreg|mask|z,xmmrm128|b32 [rm:fv: evex.128.0f.w0 5b /r ] AVX512VL,AVX512
|
|
VCVTDQ2PS ymmreg|mask|z,ymmrm256|b32 [rm:fv: evex.256.0f.w0 5b /r ] AVX512VL,AVX512
|
|
VCVTDQ2PS zmmreg|mask|z,zmmrm512|b32|er [rm:fv: evex.512.0f.w0 5b /r ] AVX512
|
|
VCVTPD2DQ xmmreg|mask|z,xmmrm128|b64 [rm:fv: evex.128.f2.0f.w1 e6 /r ] AVX512VL,AVX512
|
|
VCVTPD2DQ xmmreg|mask|z,ymmrm256|b64 [rm:fv: evex.256.f2.0f.w1 e6 /r ] AVX512VL,AVX512
|
|
VCVTPD2DQ ymmreg|mask|z,zmmrm512|b64|er [rm:fv: evex.512.f2.0f.w1 e6 /r ] AVX512
|
|
VCVTPD2PS xmmreg|mask|z,xmmrm128|b64 [rm:fv: evex.128.66.0f.w1 5a /r ] AVX512VL,AVX512
|
|
VCVTPD2PS xmmreg|mask|z,ymmrm256|b64 [rm:fv: evex.256.66.0f.w1 5a /r ] AVX512VL,AVX512
|
|
VCVTPD2PS ymmreg|mask|z,zmmrm512|b64|er [rm:fv: evex.512.66.0f.w1 5a /r ] AVX512
|
|
VCVTPD2QQ xmmreg|mask|z,xmmrm128|b64 [rm:fv: evex.128.66.0f.w1 7b /r ] AVX512VL,AVX512DQ
|
|
VCVTPD2QQ ymmreg|mask|z,ymmrm256|b64 [rm:fv: evex.256.66.0f.w1 7b /r ] AVX512VL,AVX512DQ
|
|
VCVTPD2QQ zmmreg|mask|z,zmmrm512|b64|er [rm:fv: evex.512.66.0f.w1 7b /r ] AVX512DQ
|
|
VCVTPD2UDQ xmmreg|mask|z,xmmrm128|b64 [rm:fv: evex.128.0f.w1 79 /r ] AVX512VL,AVX512
|
|
VCVTPD2UDQ xmmreg|mask|z,ymmrm256|b64 [rm:fv: evex.256.0f.w1 79 /r ] AVX512VL,AVX512
|
|
VCVTPD2UDQ ymmreg|mask|z,zmmrm512|b64|er [rm:fv: evex.512.0f.w1 79 /r ] AVX512
|
|
VCVTPD2UQQ xmmreg|mask|z,xmmrm128|b64 [rm:fv: evex.128.66.0f.w1 79 /r ] AVX512VL,AVX512DQ
|
|
VCVTPD2UQQ ymmreg|mask|z,ymmrm256|b64 [rm:fv: evex.256.66.0f.w1 79 /r ] AVX512VL,AVX512DQ
|
|
VCVTPD2UQQ zmmreg|mask|z,zmmrm512|b64|er [rm:fv: evex.512.66.0f.w1 79 /r ] AVX512DQ
|
|
VCVTPH2PS xmmreg|mask|z,xmmrm64 [rm:hvm: evex.128.66.0f38.w0 13 /r ] AVX512VL,AVX512
|
|
VCVTPH2PS ymmreg|mask|z,xmmrm128 [rm:hvm: evex.256.66.0f38.w0 13 /r ] AVX512VL,AVX512
|
|
VCVTPH2PS zmmreg|mask|z,ymmrm256|sae [rm:hvm: evex.512.66.0f38.w0 13 /r ] AVX512
|
|
VCVTPS2DQ xmmreg|mask|z,xmmrm128|b32 [rm:fv: evex.128.66.0f.w0 5b /r ] AVX512VL,AVX512
|
|
VCVTPS2DQ ymmreg|mask|z,ymmrm256|b32 [rm:fv: evex.256.66.0f.w0 5b /r ] AVX512VL,AVX512
|
|
VCVTPS2DQ zmmreg|mask|z,zmmrm512|b32|er [rm:fv: evex.512.66.0f.w0 5b /r ] AVX512
|
|
VCVTPS2PD xmmreg|mask|z,xmmrm64|b32 [rm:hv: evex.128.0f.w0 5a /r ] AVX512VL,AVX512
|
|
VCVTPS2PD ymmreg|mask|z,xmmrm128|b32 [rm:hv: evex.256.0f.w0 5a /r ] AVX512VL,AVX512
|
|
VCVTPS2PD zmmreg|mask|z,ymmrm256|b32|sae [rm:hv: evex.512.0f.w0 5a /r ] AVX512
|
|
VCVTPS2PH xmmreg|mask|z,xmmreg,imm8 [mri:hvm: evex.128.66.0f3a.w0 1d /r ib ] AVX512VL,AVX512
|
|
VCVTPS2PH xmmreg|mask|z,ymmreg,imm8 [mri:hvm: evex.256.66.0f3a.w0 1d /r ib ] AVX512VL,AVX512
|
|
VCVTPS2PH ymmreg|mask|z,zmmreg|sae,imm8 [mri:hvm: evex.512.66.0f3a.w0 1d /r ib ] AVX512
|
|
VCVTPS2PH mem64|mask,xmmreg,imm8 [mri:hvm: evex.128.66.0f3a.w0 1d /r ib ] AVX512VL,AVX512
|
|
VCVTPS2PH mem128|mask,ymmreg,imm8 [mri:hvm: evex.256.66.0f3a.w0 1d /r ib ] AVX512VL,AVX512
|
|
VCVTPS2PH mem256|mask,zmmreg|sae,imm8 [mri:hvm: evex.512.66.0f3a.w0 1d /r ib ] AVX512
|
|
VCVTPS2QQ xmmreg|mask|z,xmmrm64|b32 [rm:hv: evex.128.66.0f.w0 7b /r ] AVX512VL,AVX512DQ
|
|
VCVTPS2QQ ymmreg|mask|z,xmmrm128|b32 [rm:hv: evex.256.66.0f.w0 7b /r ] AVX512VL,AVX512DQ
|
|
VCVTPS2QQ zmmreg|mask|z,ymmrm256|b32|er [rm:hv: evex.512.66.0f.w0 7b /r ] AVX512DQ
|
|
VCVTPS2UDQ xmmreg|mask|z,xmmrm128|b32 [rm:fv: evex.128.0f.w0 79 /r ] AVX512VL,AVX512
|
|
VCVTPS2UDQ ymmreg|mask|z,ymmrm256|b32 [rm:fv: evex.256.0f.w0 79 /r ] AVX512VL,AVX512
|
|
VCVTPS2UDQ zmmreg|mask|z,zmmrm512|b32|er [rm:fv: evex.512.0f.w0 79 /r ] AVX512
|
|
VCVTPS2UQQ xmmreg|mask|z,xmmrm64|b32 [rm:hv: evex.128.66.0f.w0 79 /r ] AVX512VL,AVX512DQ
|
|
VCVTPS2UQQ ymmreg|mask|z,xmmrm128|b32 [rm:hv: evex.256.66.0f.w0 79 /r ] AVX512VL,AVX512DQ
|
|
VCVTPS2UQQ zmmreg|mask|z,ymmrm256|b32|er [rm:hv: evex.512.66.0f.w0 79 /r ] AVX512DQ
|
|
VCVTQQ2PD xmmreg|mask|z,xmmrm128|b64 [rm:fv: evex.128.f3.0f.w1 e6 /r ] AVX512VL,AVX512DQ
|
|
VCVTQQ2PD ymmreg|mask|z,ymmrm256|b64 [rm:fv: evex.256.f3.0f.w1 e6 /r ] AVX512VL,AVX512DQ
|
|
VCVTQQ2PD zmmreg|mask|z,zmmrm512|b64|er [rm:fv: evex.512.f3.0f.w1 e6 /r ] AVX512DQ
|
|
VCVTQQ2PS xmmreg|mask|z,xmmrm128|b64 [rm:fv: evex.128.0f.w1 5b /r ] AVX512VL,AVX512DQ
|
|
VCVTQQ2PS xmmreg|mask|z,ymmrm256|b64 [rm:fv: evex.256.0f.w1 5b /r ] AVX512VL,AVX512DQ
|
|
VCVTQQ2PS ymmreg|mask|z,zmmrm512|b64|er [rm:fv: evex.512.0f.w1 5b /r ] AVX512DQ
|
|
VCVTSD2SI reg32,xmmrm64|er [rm:t1f64: evex.128.f2.0f.w0 2d /r ] AVX512
|
|
VCVTSD2SI reg64,xmmrm64|er [rm:t1f64: evex.128.f2.0f.w1 2d /r ] AVX512
|
|
VCVTSD2SS xmmreg|mask|z,xmmreg,xmmrm64|er [rvm:t1s: evex.nds.lig.f2.0f.w1 5a /r ] AVX512
|
|
VCVTSD2USI reg32,xmmrm64|er [rm:t1f64: evex.128.f2.0f.w0 79 /r ] AVX512
|
|
VCVTSD2USI reg64,xmmrm64|er [rm:t1f64: evex.128.f2.0f.w1 79 /r ] AVX512
|
|
VCVTSI2SD xmmreg,xmmreg|er,rm32 [rvm:t1s: evex.nds.lig.f2.0f.w0 2a /r ] AVX512
|
|
VCVTSI2SD xmmreg,xmmreg|er,rm64 [rvm:t1s: evex.nds.lig.f2.0f.w1 2a /r ] AVX512
|
|
VCVTSI2SS xmmreg,xmmreg|er,rm32 [rvm:t1s: evex.nds.lig.f3.0f.w0 2a /r ] AVX512
|
|
VCVTSI2SS xmmreg,xmmreg|er,rm64 [rvm:t1s: evex.nds.lig.f3.0f.w1 2a /r ] AVX512
|
|
VCVTSS2SD xmmreg|mask|z,xmmreg,xmmrm32|sae [rvm:t1s: evex.nds.lig.f3.0f.w0 5a /r ] AVX512
|
|
VCVTSS2SI reg32,xmmrm32|er [rm:t1f32: evex.128.f3.0f.w0 2d /r ] AVX512
|
|
VCVTSS2SI reg64,xmmrm32|er [rm:t1f32: evex.128.f3.0f.w1 2d /r ] AVX512
|
|
VCVTSS2USI reg32,xmmrm32|er [rm:t1f32: evex.128.f3.0f.w0 79 /r ] AVX512
|
|
VCVTSS2USI reg64,xmmrm32|er [rm:t1f32: evex.128.f3.0f.w1 79 /r ] AVX512
|
|
VCVTTPD2DQ xmmreg|mask|z,xmmrm128|b64 [rm:fv: evex.128.66.0f.w1 e6 /r ] AVX512VL,AVX512
|
|
VCVTTPD2DQ xmmreg|mask|z,ymmrm256|b64 [rm:fv: evex.256.66.0f.w1 e6 /r ] AVX512VL,AVX512
|
|
VCVTTPD2DQ ymmreg|mask|z,zmmrm512|b64|sae [rm:fv: evex.512.66.0f.w1 e6 /r ] AVX512
|
|
VCVTTPD2QQ xmmreg|mask|z,xmmrm128|b64 [rm:fv: evex.128.66.0f.w1 7a /r ] AVX512VL,AVX512DQ
|
|
VCVTTPD2QQ ymmreg|mask|z,ymmrm256|b64 [rm:fv: evex.256.66.0f.w1 7a /r ] AVX512VL,AVX512DQ
|
|
VCVTTPD2QQ zmmreg|mask|z,zmmrm512|b64|sae [rm:fv: evex.512.66.0f.w1 7a /r ] AVX512DQ
|
|
VCVTTPD2UDQ xmmreg|mask|z,xmmrm128|b64 [rm:fv: evex.128.0f.w1 78 /r ] AVX512VL,AVX512
|
|
VCVTTPD2UDQ xmmreg|mask|z,ymmrm256|b64 [rm:fv: evex.256.0f.w1 78 /r ] AVX512VL,AVX512
|
|
VCVTTPD2UDQ ymmreg|mask|z,zmmrm512|b64|sae [rm:fv: evex.512.0f.w1 78 /r ] AVX512
|
|
VCVTTPD2UQQ xmmreg|mask|z,xmmrm128|b64 [rm:fv: evex.128.66.0f.w1 78 /r ] AVX512VL,AVX512DQ
|
|
VCVTTPD2UQQ ymmreg|mask|z,ymmrm256|b64 [rm:fv: evex.256.66.0f.w1 78 /r ] AVX512VL,AVX512DQ
|
|
VCVTTPD2UQQ zmmreg|mask|z,zmmrm512|b64|sae [rm:fv: evex.512.66.0f.w1 78 /r ] AVX512DQ
|
|
VCVTTPS2DQ xmmreg|mask|z,xmmrm128|b32 [rm:fv: evex.128.f3.0f.w0 5b /r ] AVX512VL,AVX512
|
|
VCVTTPS2DQ ymmreg|mask|z,ymmrm256|b32 [rm:fv: evex.256.f3.0f.w0 5b /r ] AVX512VL,AVX512
|
|
VCVTTPS2DQ zmmreg|mask|z,zmmrm512|b32|sae [rm:fv: evex.512.f3.0f.w0 5b /r ] AVX512
|
|
VCVTTPS2QQ xmmreg|mask|z,xmmrm64|b32 [rm:hv: evex.128.66.0f.w0 7a /r ] AVX512VL,AVX512DQ
|
|
VCVTTPS2QQ ymmreg|mask|z,xmmrm128|b32 [rm:hv: evex.256.66.0f.w0 7a /r ] AVX512VL,AVX512DQ
|
|
VCVTTPS2QQ zmmreg|mask|z,ymmrm256|b32|sae [rm:hv: evex.512.66.0f.w0 7a /r ] AVX512DQ
|
|
VCVTTPS2UDQ xmmreg|mask|z,xmmrm128|b32 [rm:fv: evex.128.0f.w0 78 /r ] AVX512VL,AVX512
|
|
VCVTTPS2UDQ ymmreg|mask|z,ymmrm256|b32 [rm:fv: evex.256.0f.w0 78 /r ] AVX512VL,AVX512
|
|
VCVTTPS2UDQ zmmreg|mask|z,zmmrm512|b32|sae [rm:fv: evex.512.0f.w0 78 /r ] AVX512
|
|
VCVTTPS2UQQ xmmreg|mask|z,xmmrm64|b32 [rm:hv: evex.128.66.0f.w0 78 /r ] AVX512VL,AVX512DQ
|
|
VCVTTPS2UQQ ymmreg|mask|z,xmmrm128|b32 [rm:hv: evex.256.66.0f.w0 78 /r ] AVX512VL,AVX512DQ
|
|
VCVTTPS2UQQ zmmreg|mask|z,ymmrm256|b32|sae [rm:hv: evex.512.66.0f.w0 78 /r ] AVX512DQ
|
|
VCVTTSD2SI reg32,xmmrm64|sae [rm:t1f64: evex.128.f2.0f.w0 2c /r ] AVX512
|
|
VCVTTSD2SI reg64,xmmrm64|sae [rm:t1f64: evex.128.f2.0f.w1 2c /r ] AVX512
|
|
VCVTTSD2USI reg32,xmmrm64|sae [rm:t1f64: evex.128.f2.0f.w0 78 /r ] AVX512
|
|
VCVTTSD2USI reg64,xmmrm64|sae [rm:t1f64: evex.128.f2.0f.w1 78 /r ] AVX512
|
|
VCVTTSS2SI reg32,xmmrm32|sae [rm:t1f32: evex.128.f3.0f.w0 2c /r ] AVX512
|
|
VCVTTSS2SI reg64,xmmrm32|sae [rm:t1f32: evex.128.f3.0f.w1 2c /r ] AVX512
|
|
VCVTTSS2USI reg32,xmmrm32|sae [rm:t1f32: evex.128.f3.0f.w0 78 /r ] AVX512
|
|
VCVTTSS2USI reg64,xmmrm32|sae [rm:t1f32: evex.128.f3.0f.w1 78 /r ] AVX512
|
|
VCVTUDQ2PD xmmreg|mask|z,xmmrm64|b32 [rm:hv: evex.128.f3.0f.w0 7a /r ] AVX512VL,AVX512
|
|
VCVTUDQ2PD ymmreg|mask|z,xmmrm128|b32 [rm:hv: evex.256.f3.0f.w0 7a /r ] AVX512VL,AVX512
|
|
VCVTUDQ2PD zmmreg|mask|z,ymmrm256|b32|er [rm:hv: evex.512.f3.0f.w0 7a /r ] AVX512
|
|
VCVTUDQ2PS xmmreg|mask|z,xmmrm128|b32 [rm:fv: evex.128.f2.0f.w0 7a /r ] AVX512VL,AVX512
|
|
VCVTUDQ2PS ymmreg|mask|z,ymmrm256|b32 [rm:fv: evex.256.f2.0f.w0 7a /r ] AVX512VL,AVX512
|
|
VCVTUDQ2PS zmmreg|mask|z,zmmrm512|b32|er [rm:fv: evex.512.f2.0f.w0 7a /r ] AVX512
|
|
VCVTUQQ2PD xmmreg|mask|z,xmmrm128|b64 [rm:fv: evex.128.f3.0f.w1 7a /r ] AVX512VL,AVX512DQ
|
|
VCVTUQQ2PD ymmreg|mask|z,ymmrm256|b64 [rm:fv: evex.256.f3.0f.w1 7a /r ] AVX512VL,AVX512DQ
|
|
VCVTUQQ2PD zmmreg|mask|z,zmmrm512|b64|er [rm:fv: evex.512.f3.0f.w1 7a /r ] AVX512DQ
|
|
VCVTUQQ2PS xmmreg|mask|z,xmmrm128|b64 [rm:fv: evex.128.f2.0f.w1 7a /r ] AVX512VL,AVX512DQ
|
|
VCVTUQQ2PS xmmreg|mask|z,ymmrm256|b64 [rm:fv: evex.256.f2.0f.w1 7a /r ] AVX512VL,AVX512DQ
|
|
VCVTUQQ2PS ymmreg|mask|z,zmmrm512|b64|er [rm:fv: evex.512.f2.0f.w1 7a /r ] AVX512DQ
|
|
VCVTUSI2SD xmmreg,xmmreg|er,rm32 [rvm:t1s: evex.nds.lig.f2.0f.w0 7b /r ] AVX512
|
|
VCVTUSI2SD xmmreg,xmmreg|er,rm64 [rvm:t1s: evex.nds.lig.f2.0f.w1 7b /r ] AVX512
|
|
VCVTUSI2SS xmmreg,xmmreg|er,rm32 [rvm:t1s: evex.nds.lig.f3.0f.w0 7b /r ] AVX512
|
|
VCVTUSI2SS xmmreg,xmmreg|er,rm64 [rvm:t1s: evex.nds.lig.f3.0f.w1 7b /r ] AVX512
|
|
VDBPSADBW xmmreg|mask|z,xmmreg*,xmmrm128,imm8 [rvmi:fvm: evex.nds.128.66.0f3a.w0 42 /r ib ] AVX512VL,AVX512BW
|
|
VDBPSADBW ymmreg|mask|z,ymmreg*,ymmrm256,imm8 [rvmi:fvm: evex.nds.256.66.0f3a.w0 42 /r ib ] AVX512VL,AVX512BW
|
|
VDBPSADBW zmmreg|mask|z,zmmreg*,zmmrm512,imm8 [rvmi:fvm: evex.nds.512.66.0f3a.w0 42 /r ib ] AVX512BW
|
|
VDIVPD xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f.w1 5e /r ] AVX512VL,AVX512
|
|
VDIVPD ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f.w1 5e /r ] AVX512VL,AVX512
|
|
VDIVPD zmmreg|mask|z,zmmreg*,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f.w1 5e /r ] AVX512
|
|
VDIVPS xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.0f.w0 5e /r ] AVX512VL,AVX512
|
|
VDIVPS ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.0f.w0 5e /r ] AVX512VL,AVX512
|
|
VDIVPS zmmreg|mask|z,zmmreg*,zmmrm512|b32|er [rvm:fv: evex.nds.512.0f.w0 5e /r ] AVX512
|
|
VDIVSD xmmreg|mask|z,xmmreg*,xmmrm64|er [rvm:t1s: evex.nds.lig.f2.0f.w1 5e /r ] AVX512
|
|
VDIVSS xmmreg|mask|z,xmmreg*,xmmrm32|er [rvm:t1s: evex.nds.lig.f3.0f.w0 5e /r ] AVX512
|
|
VEXP2PD zmmreg|mask|z,zmmrm512|b64|sae [rm:fv: evex.512.66.0f38.w1 c8 /r ] AVX512ER
|
|
VEXP2PS zmmreg|mask|z,zmmrm512|b32|sae [rm:fv: evex.512.66.0f38.w0 c8 /r ] AVX512ER
|
|
VEXPANDPD xmmreg|mask|z,mem128 [rm:t1s: evex.128.66.0f38.w1 88 /r ] AVX512VL,AVX512
|
|
VEXPANDPD ymmreg|mask|z,mem256 [rm:t1s: evex.256.66.0f38.w1 88 /r ] AVX512VL,AVX512
|
|
VEXPANDPD zmmreg|mask|z,mem512 [rm:t1s: evex.512.66.0f38.w1 88 /r ] AVX512
|
|
VEXPANDPD xmmreg|mask|z,xmmreg [rm:t1s: evex.128.66.0f38.w1 88 /r ] AVX512VL,AVX512
|
|
VEXPANDPD ymmreg|mask|z,ymmreg [rm:t1s: evex.256.66.0f38.w1 88 /r ] AVX512VL,AVX512
|
|
VEXPANDPD zmmreg|mask|z,zmmreg [rm:t1s: evex.512.66.0f38.w1 88 /r ] AVX512
|
|
VEXPANDPS xmmreg|mask|z,mem128 [rm:t1s: evex.128.66.0f38.w0 88 /r ] AVX512VL,AVX512
|
|
VEXPANDPS ymmreg|mask|z,mem256 [rm:t1s: evex.256.66.0f38.w0 88 /r ] AVX512VL,AVX512
|
|
VEXPANDPS zmmreg|mask|z,mem512 [rm:t1s: evex.512.66.0f38.w0 88 /r ] AVX512
|
|
VEXPANDPS xmmreg|mask|z,xmmreg [rm:t1s: evex.128.66.0f38.w0 88 /r ] AVX512VL,AVX512
|
|
VEXPANDPS ymmreg|mask|z,ymmreg [rm:t1s: evex.256.66.0f38.w0 88 /r ] AVX512VL,AVX512
|
|
VEXPANDPS zmmreg|mask|z,zmmreg [rm:t1s: evex.512.66.0f38.w0 88 /r ] AVX512
|
|
VEXTRACTF32X4 xmmreg|mask|z,ymmreg,imm8 [mri: evex.256.66.0f3a.w0 19 /r ib ] AVX512VL,AVX512
|
|
VEXTRACTF32X4 xmmreg|mask|z,zmmreg,imm8 [mri: evex.512.66.0f3a.w0 19 /r ib ] AVX512
|
|
VEXTRACTF32X4 mem128|mask,ymmreg,imm8 [mri:t4: evex.256.66.0f3a.w0 19 /r ib ] AVX512VL,AVX512
|
|
VEXTRACTF32X4 mem128|mask,zmmreg,imm8 [mri:t4: evex.512.66.0f3a.w0 19 /r ib ] AVX512
|
|
VEXTRACTF32X8 ymmreg|mask|z,zmmreg,imm8 [mri: evex.512.66.0f3a.w0 1b /r ib ] AVX512DQ
|
|
VEXTRACTF32X8 mem256|mask,zmmreg,imm8 [mri:t8: evex.512.66.0f3a.w0 1b /r ib ] AVX512DQ
|
|
VEXTRACTF64X2 xmmreg|mask|z,ymmreg,imm8 [mri: evex.256.66.0f3a.w1 19 /r ib ] AVX512VL,AVX512DQ
|
|
VEXTRACTF64X2 xmmreg|mask|z,zmmreg,imm8 [mri: evex.512.66.0f3a.w1 19 /r ib ] AVX512DQ
|
|
VEXTRACTF64X2 mem128|mask,ymmreg,imm8 [mri:t2: evex.256.66.0f3a.w1 19 /r ib ] AVX512VL,AVX512DQ
|
|
VEXTRACTF64X2 mem128|mask,zmmreg,imm8 [mri:t2: evex.512.66.0f3a.w1 19 /r ib ] AVX512DQ
|
|
VEXTRACTF64X4 ymmreg|mask|z,zmmreg,imm8 [mri: evex.512.66.0f3a.w1 1b /r ib ] AVX512
|
|
VEXTRACTF64X4 mem256|mask,zmmreg,imm8 [mri:t4: evex.512.66.0f3a.w1 1b /r ib ] AVX512
|
|
VEXTRACTI32X4 xmmreg|mask|z,ymmreg,imm8 [mri: evex.256.66.0f3a.w0 39 /r ib ] AVX512VL,AVX512
|
|
VEXTRACTI32X4 xmmreg|mask|z,zmmreg,imm8 [mri: evex.512.66.0f3a.w0 39 /r ib ] AVX512
|
|
VEXTRACTI32X4 mem128|mask,ymmreg,imm8 [mri:t4: evex.256.66.0f3a.w0 39 /r ib ] AVX512VL,AVX512
|
|
VEXTRACTI32X4 mem128|mask,zmmreg,imm8 [mri:t4: evex.512.66.0f3a.w0 39 /r ib ] AVX512
|
|
VEXTRACTI32X8 ymmreg|mask|z,zmmreg,imm8 [mri: evex.512.66.0f3a.w0 3b /r ib ] AVX512DQ
|
|
VEXTRACTI32X8 mem256|mask,zmmreg,imm8 [mri:t8: evex.512.66.0f3a.w0 3b /r ib ] AVX512DQ
|
|
VEXTRACTI64X2 xmmreg|mask|z,ymmreg,imm8 [mri: evex.256.66.0f3a.w1 39 /r ib ] AVX512VL,AVX512DQ
|
|
VEXTRACTI64X2 xmmreg|mask|z,zmmreg,imm8 [mri: evex.512.66.0f3a.w1 39 /r ib ] AVX512DQ
|
|
VEXTRACTI64X2 mem128|mask,ymmreg,imm8 [mri:t2: evex.256.66.0f3a.w1 39 /r ib ] AVX512VL,AVX512DQ
|
|
VEXTRACTI64X2 mem128|mask,zmmreg,imm8 [mri:t2: evex.512.66.0f3a.w1 39 /r ib ] AVX512DQ
|
|
VEXTRACTI64X4 ymmreg|mask|z,zmmreg,imm8 [mri: evex.512.66.0f3a.w1 3b /r ib ] AVX512
|
|
VEXTRACTI64X4 mem256|mask,zmmreg,imm8 [mri:t4: evex.512.66.0f3a.w1 3b /r ib ] AVX512
|
|
VEXTRACTPS reg32,xmmreg,imm8 [mri:t1s: evex.128.66.0f3a.wig 17 /r ib ] AVX512
|
|
VEXTRACTPS reg64,xmmreg,imm8 [mri:t1s: evex.128.66.0f3a.wig 17 /r ib ] AVX512
|
|
VEXTRACTPS mem32,xmmreg,imm8 [mri:t1s: evex.128.66.0f3a.wig 17 /r ib ] AVX512
|
|
VFIXUPIMMPD xmmreg|mask|z,xmmreg*,xmmrm128|b64,imm8 [rvmi:fv: evex.nds.128.66.0f3a.w1 54 /r ib ] AVX512VL,AVX512
|
|
VFIXUPIMMPD ymmreg|mask|z,ymmreg*,ymmrm256|b64,imm8 [rvmi:fv: evex.nds.256.66.0f3a.w1 54 /r ib ] AVX512VL,AVX512
|
|
VFIXUPIMMPD zmmreg|mask|z,zmmreg*,zmmrm512|b64|sae,imm8 [rvmi:fv: evex.nds.512.66.0f3a.w1 54 /r ib ] AVX512
|
|
VFIXUPIMMPS xmmreg|mask|z,xmmreg*,xmmrm128|b32,imm8 [rvmi:fv: evex.nds.128.66.0f3a.w0 54 /r ib ] AVX512VL,AVX512
|
|
VFIXUPIMMPS ymmreg|mask|z,ymmreg*,ymmrm256|b32,imm8 [rvmi:fv: evex.nds.256.66.0f3a.w0 54 /r ib ] AVX512VL,AVX512
|
|
VFIXUPIMMPS zmmreg|mask|z,zmmreg*,zmmrm512|b32|sae,imm8 [rvmi:fv: evex.nds.512.66.0f3a.w0 54 /r ib ] AVX512
|
|
VFIXUPIMMSD xmmreg|mask|z,xmmreg*,xmmrm64|sae,imm8 [rvmi:t1s: evex.nds.lig.66.0f3a.w1 55 /r ib ] AVX512
|
|
VFIXUPIMMSS xmmreg|mask|z,xmmreg*,xmmrm32|sae,imm8 [rvmi:t1s: evex.nds.lig.66.0f3a.w0 55 /r ib ] AVX512
|
|
VFMADD132PD xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 98 /r ] AVX512VL,AVX512
|
|
VFMADD132PD ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 98 /r ] AVX512VL,AVX512
|
|
VFMADD132PD zmmreg|mask|z,zmmreg,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f38.w1 98 /r ] AVX512
|
|
VFMADD132PS xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 98 /r ] AVX512VL,AVX512
|
|
VFMADD132PS ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 98 /r ] AVX512VL,AVX512
|
|
VFMADD132PS zmmreg|mask|z,zmmreg,zmmrm512|b32|er [rvm:fv: evex.nds.512.66.0f38.w0 98 /r ] AVX512
|
|
VFMADD132SD xmmreg|mask|z,xmmreg,xmmrm64|er [rvm:t1s: evex.nds.lig.66.0f38.w1 99 /r ] AVX512
|
|
VFMADD132SS xmmreg|mask|z,xmmreg,xmmrm32|er [rvm:t1s: evex.nds.lig.66.0f38.w0 99 /r ] AVX512
|
|
VFMADD213PD xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 a8 /r ] AVX512VL,AVX512
|
|
VFMADD213PD ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 a8 /r ] AVX512VL,AVX512
|
|
VFMADD213PD zmmreg|mask|z,zmmreg,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f38.w1 a8 /r ] AVX512
|
|
VFMADD213PS xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 a8 /r ] AVX512VL,AVX512
|
|
VFMADD213PS ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 a8 /r ] AVX512VL,AVX512
|
|
VFMADD213PS zmmreg|mask|z,zmmreg,zmmrm512|b32|er [rvm:fv: evex.nds.512.66.0f38.w0 a8 /r ] AVX512
|
|
VFMADD213SD xmmreg|mask|z,xmmreg,xmmrm64|er [rvm:t1s: evex.nds.lig.66.0f38.w1 a9 /r ] AVX512
|
|
VFMADD213SS xmmreg|mask|z,xmmreg,xmmrm32|er [rvm:t1s: evex.nds.lig.66.0f38.w0 a9 /r ] AVX512
|
|
VFMADD231PD xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 b8 /r ] AVX512VL,AVX512
|
|
VFMADD231PD ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 b8 /r ] AVX512VL,AVX512
|
|
VFMADD231PD zmmreg|mask|z,zmmreg,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f38.w1 b8 /r ] AVX512
|
|
VFMADD231PS xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 b8 /r ] AVX512VL,AVX512
|
|
VFMADD231PS ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 b8 /r ] AVX512VL,AVX512
|
|
VFMADD231PS zmmreg|mask|z,zmmreg,zmmrm512|b32|er [rvm:fv: evex.nds.512.66.0f38.w0 b8 /r ] AVX512
|
|
VFMADD231SD xmmreg|mask|z,xmmreg,xmmrm64|er [rvm:t1s: evex.nds.lig.66.0f38.w1 b9 /r ] AVX512
|
|
VFMADD231SS xmmreg|mask|z,xmmreg,xmmrm32|er [rvm:t1s: evex.nds.lig.66.0f38.w0 b9 /r ] AVX512
|
|
VFMADDSUB132PD xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 96 /r ] AVX512VL,AVX512
|
|
VFMADDSUB132PD ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 96 /r ] AVX512VL,AVX512
|
|
VFMADDSUB132PD zmmreg|mask|z,zmmreg,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f38.w1 96 /r ] AVX512
|
|
VFMADDSUB132PS xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 96 /r ] AVX512VL,AVX512
|
|
VFMADDSUB132PS ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 96 /r ] AVX512VL,AVX512
|
|
VFMADDSUB132PS zmmreg|mask|z,zmmreg,zmmrm512|b32|er [rvm:fv: evex.nds.512.66.0f38.w0 96 /r ] AVX512
|
|
VFMADDSUB213PD xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 a6 /r ] AVX512VL,AVX512
|
|
VFMADDSUB213PD ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 a6 /r ] AVX512VL,AVX512
|
|
VFMADDSUB213PD zmmreg|mask|z,zmmreg,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f38.w1 a6 /r ] AVX512
|
|
VFMADDSUB213PS xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 a6 /r ] AVX512VL,AVX512
|
|
VFMADDSUB213PS ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 a6 /r ] AVX512VL,AVX512
|
|
VFMADDSUB213PS zmmreg|mask|z,zmmreg,zmmrm512|b32|er [rvm:fv: evex.nds.512.66.0f38.w0 a6 /r ] AVX512
|
|
VFMADDSUB231PD xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 b6 /r ] AVX512VL,AVX512
|
|
VFMADDSUB231PD ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 b6 /r ] AVX512VL,AVX512
|
|
VFMADDSUB231PD zmmreg|mask|z,zmmreg,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f38.w1 b6 /r ] AVX512
|
|
VFMADDSUB231PS xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 b6 /r ] AVX512VL,AVX512
|
|
VFMADDSUB231PS ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 b6 /r ] AVX512VL,AVX512
|
|
VFMADDSUB231PS zmmreg|mask|z,zmmreg,zmmrm512|b32|er [rvm:fv: evex.nds.512.66.0f38.w0 b6 /r ] AVX512
|
|
VFMSUB132PD xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 9a /r ] AVX512VL,AVX512
|
|
VFMSUB132PD ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 9a /r ] AVX512VL,AVX512
|
|
VFMSUB132PD zmmreg|mask|z,zmmreg,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f38.w1 9a /r ] AVX512
|
|
VFMSUB132PS xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 9a /r ] AVX512VL,AVX512
|
|
VFMSUB132PS ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 9a /r ] AVX512VL,AVX512
|
|
VFMSUB132PS zmmreg|mask|z,zmmreg,zmmrm512|b32|er [rvm:fv: evex.nds.512.66.0f38.w0 9a /r ] AVX512
|
|
VFMSUB132SD xmmreg|mask|z,xmmreg,xmmrm64|er [rvm:t1s: evex.nds.lig.66.0f38.w1 9b /r ] AVX512
|
|
VFMSUB132SS xmmreg|mask|z,xmmreg,xmmrm32|er [rvm:t1s: evex.nds.lig.66.0f38.w0 9b /r ] AVX512
|
|
VFMSUB213PD xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 aa /r ] AVX512VL,AVX512
|
|
VFMSUB213PD ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 aa /r ] AVX512VL,AVX512
|
|
VFMSUB213PD zmmreg|mask|z,zmmreg,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f38.w1 aa /r ] AVX512
|
|
VFMSUB213PS xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 aa /r ] AVX512VL,AVX512
|
|
VFMSUB213PS ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 aa /r ] AVX512VL,AVX512
|
|
VFMSUB213PS zmmreg|mask|z,zmmreg,zmmrm512|b32|er [rvm:fv: evex.nds.512.66.0f38.w0 aa /r ] AVX512
|
|
VFMSUB213SD xmmreg|mask|z,xmmreg,xmmrm64|er [rvm:t1s: evex.nds.lig.66.0f38.w1 ab /r ] AVX512
|
|
VFMSUB213SS xmmreg|mask|z,xmmreg,xmmrm32|er [rvm:t1s: evex.nds.lig.66.0f38.w0 ab /r ] AVX512
|
|
VFMSUB231PD xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 ba /r ] AVX512VL,AVX512
|
|
VFMSUB231PD ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 ba /r ] AVX512VL,AVX512
|
|
VFMSUB231PD zmmreg|mask|z,zmmreg,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f38.w1 ba /r ] AVX512
|
|
VFMSUB231PS xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 ba /r ] AVX512VL,AVX512
|
|
VFMSUB231PS ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 ba /r ] AVX512VL,AVX512
|
|
VFMSUB231PS zmmreg|mask|z,zmmreg,zmmrm512|b32|er [rvm:fv: evex.nds.512.66.0f38.w0 ba /r ] AVX512
|
|
VFMSUB231SD xmmreg|mask|z,xmmreg,xmmrm64|er [rvm:t1s: evex.nds.lig.66.0f38.w1 bb /r ] AVX512
|
|
VFMSUB231SS xmmreg|mask|z,xmmreg,xmmrm32|er [rvm:t1s: evex.nds.lig.66.0f38.w0 bb /r ] AVX512
|
|
VFMSUBADD132PD xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 97 /r ] AVX512VL,AVX512
|
|
VFMSUBADD132PD ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 97 /r ] AVX512VL,AVX512
|
|
VFMSUBADD132PD zmmreg|mask|z,zmmreg,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f38.w1 97 /r ] AVX512
|
|
VFMSUBADD132PS xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 97 /r ] AVX512VL,AVX512
|
|
VFMSUBADD132PS ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 97 /r ] AVX512VL,AVX512
|
|
VFMSUBADD132PS zmmreg|mask|z,zmmreg,zmmrm512|b32|er [rvm:fv: evex.nds.512.66.0f38.w0 97 /r ] AVX512
|
|
VFMSUBADD213PD xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 a7 /r ] AVX512VL,AVX512
|
|
VFMSUBADD213PD ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 a7 /r ] AVX512VL,AVX512
|
|
VFMSUBADD213PD zmmreg|mask|z,zmmreg,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f38.w1 a7 /r ] AVX512
|
|
VFMSUBADD213PS xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 a7 /r ] AVX512VL,AVX512
|
|
VFMSUBADD213PS ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 a7 /r ] AVX512VL,AVX512
|
|
VFMSUBADD213PS zmmreg|mask|z,zmmreg,zmmrm512|b32|er [rvm:fv: evex.nds.512.66.0f38.w0 a7 /r ] AVX512
|
|
VFMSUBADD231PD xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 b7 /r ] AVX512VL,AVX512
|
|
VFMSUBADD231PD ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 b7 /r ] AVX512VL,AVX512
|
|
VFMSUBADD231PD zmmreg|mask|z,zmmreg,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f38.w1 b7 /r ] AVX512
|
|
VFMSUBADD231PS xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 b7 /r ] AVX512VL,AVX512
|
|
VFMSUBADD231PS ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 b7 /r ] AVX512VL,AVX512
|
|
VFMSUBADD231PS zmmreg|mask|z,zmmreg,zmmrm512|b32|er [rvm:fv: evex.nds.512.66.0f38.w0 b7 /r ] AVX512
|
|
VFNMADD132PD xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 9c /r ] AVX512VL,AVX512
|
|
VFNMADD132PD ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 9c /r ] AVX512VL,AVX512
|
|
VFNMADD132PD zmmreg|mask|z,zmmreg,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f38.w1 9c /r ] AVX512
|
|
VFNMADD132PS xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 9c /r ] AVX512VL,AVX512
|
|
VFNMADD132PS ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 9c /r ] AVX512VL,AVX512
|
|
VFNMADD132PS zmmreg|mask|z,zmmreg,zmmrm512|b32|er [rvm:fv: evex.nds.512.66.0f38.w0 9c /r ] AVX512
|
|
VFNMADD132SD xmmreg|mask|z,xmmreg,xmmrm64|er [rvm:t1s: evex.nds.lig.66.0f38.w1 9d /r ] AVX512
|
|
VFNMADD132SS xmmreg|mask|z,xmmreg,xmmrm32|er [rvm:t1s: evex.nds.lig.66.0f38.w0 9d /r ] AVX512
|
|
VFNMADD213PD xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 ac /r ] AVX512VL,AVX512
|
|
VFNMADD213PD ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 ac /r ] AVX512VL,AVX512
|
|
VFNMADD213PD zmmreg|mask|z,zmmreg,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f38.w1 ac /r ] AVX512
|
|
VFNMADD213PS xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 ac /r ] AVX512VL,AVX512
|
|
VFNMADD213PS ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 ac /r ] AVX512VL,AVX512
|
|
VFNMADD213PS zmmreg|mask|z,zmmreg,zmmrm512|b32|er [rvm:fv: evex.nds.512.66.0f38.w0 ac /r ] AVX512
|
|
VFNMADD213SD xmmreg|mask|z,xmmreg,xmmrm64|er [rvm:t1s: evex.nds.lig.66.0f38.w1 ad /r ] AVX512
|
|
VFNMADD213SS xmmreg|mask|z,xmmreg,xmmrm32|er [rvm:t1s: evex.nds.lig.66.0f38.w0 ad /r ] AVX512
|
|
VFNMADD231PD xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 bc /r ] AVX512VL,AVX512
|
|
VFNMADD231PD ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 bc /r ] AVX512VL,AVX512
|
|
VFNMADD231PD zmmreg|mask|z,zmmreg,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f38.w1 bc /r ] AVX512
|
|
VFNMADD231PS xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 bc /r ] AVX512VL,AVX512
|
|
VFNMADD231PS ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 bc /r ] AVX512VL,AVX512
|
|
VFNMADD231PS zmmreg|mask|z,zmmreg,zmmrm512|b32|er [rvm:fv: evex.nds.512.66.0f38.w0 bc /r ] AVX512
|
|
VFNMADD231SD xmmreg|mask|z,xmmreg,xmmrm64|er [rvm:t1s: evex.nds.lig.66.0f38.w1 bd /r ] AVX512
|
|
VFNMADD231SS xmmreg|mask|z,xmmreg,xmmrm32|er [rvm:t1s: evex.nds.lig.66.0f38.w0 bd /r ] AVX512
|
|
VFNMSUB132PD xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 9e /r ] AVX512VL,AVX512
|
|
VFNMSUB132PD ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 9e /r ] AVX512VL,AVX512
|
|
VFNMSUB132PD zmmreg|mask|z,zmmreg,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f38.w1 9e /r ] AVX512
|
|
VFNMSUB132PS xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 9e /r ] AVX512VL,AVX512
|
|
VFNMSUB132PS ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 9e /r ] AVX512VL,AVX512
|
|
VFNMSUB132PS zmmreg|mask|z,zmmreg,zmmrm512|b32|er [rvm:fv: evex.nds.512.66.0f38.w0 9e /r ] AVX512
|
|
VFNMSUB132SD xmmreg|mask|z,xmmreg,xmmrm64|er [rvm:t1s: evex.nds.lig.66.0f38.w1 9f /r ] AVX512
|
|
VFNMSUB132SS xmmreg|mask|z,xmmreg,xmmrm32|er [rvm:t1s: evex.nds.lig.66.0f38.w0 9f /r ] AVX512
|
|
VFNMSUB213PD xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 ae /r ] AVX512VL,AVX512
|
|
VFNMSUB213PD ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 ae /r ] AVX512VL,AVX512
|
|
VFNMSUB213PD zmmreg|mask|z,zmmreg,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f38.w1 ae /r ] AVX512
|
|
VFNMSUB213PS xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 ae /r ] AVX512VL,AVX512
|
|
VFNMSUB213PS ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 ae /r ] AVX512VL,AVX512
|
|
VFNMSUB213PS zmmreg|mask|z,zmmreg,zmmrm512|b32|er [rvm:fv: evex.nds.512.66.0f38.w0 ae /r ] AVX512
|
|
VFNMSUB213SD xmmreg|mask|z,xmmreg,xmmrm64|er [rvm:t1s: evex.nds.lig.66.0f38.w1 af /r ] AVX512
|
|
VFNMSUB213SS xmmreg|mask|z,xmmreg,xmmrm32|er [rvm:t1s: evex.nds.lig.66.0f38.w0 af /r ] AVX512
|
|
VFNMSUB231PD xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 be /r ] AVX512VL,AVX512
|
|
VFNMSUB231PD ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 be /r ] AVX512VL,AVX512
|
|
VFNMSUB231PD zmmreg|mask|z,zmmreg,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f38.w1 be /r ] AVX512
|
|
VFNMSUB231PS xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 be /r ] AVX512VL,AVX512
|
|
VFNMSUB231PS ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 be /r ] AVX512VL,AVX512
|
|
VFNMSUB231PS zmmreg|mask|z,zmmreg,zmmrm512|b32|er [rvm:fv: evex.nds.512.66.0f38.w0 be /r ] AVX512
|
|
VFNMSUB231SD xmmreg|mask|z,xmmreg,xmmrm64|er [rvm:t1s: evex.nds.lig.66.0f38.w1 bf /r ] AVX512
|
|
VFNMSUB231SS xmmreg|mask|z,xmmreg,xmmrm32|er [rvm:t1s: evex.nds.lig.66.0f38.w0 bf /r ] AVX512
|
|
VFPCLASSPD kreg|mask,xmmrm128|b64,imm8 [rmi:fv: evex.128.66.0f3a.w1 66 /r ib ] AVX512VL,AVX512DQ
|
|
VFPCLASSPD kreg|mask,ymmrm256|b64,imm8 [rmi:fv: evex.256.66.0f3a.w1 66 /r ib ] AVX512VL,AVX512DQ
|
|
VFPCLASSPD kreg|mask,zmmrm512|b64,imm8 [rmi:fv: evex.512.66.0f3a.w1 66 /r ib ] AVX512DQ
|
|
VFPCLASSPS kreg|mask,xmmrm128|b32,imm8 [rmi:fv: evex.128.66.0f3a.w0 66 /r ib ] AVX512VL,AVX512DQ
|
|
VFPCLASSPS kreg|mask,ymmrm256|b32,imm8 [rmi:fv: evex.256.66.0f3a.w0 66 /r ib ] AVX512VL,AVX512DQ
|
|
VFPCLASSPS kreg|mask,zmmrm512|b32,imm8 [rmi:fv: evex.512.66.0f3a.w0 66 /r ib ] AVX512DQ
|
|
VFPCLASSSD kreg|mask,xmmrm64,imm8 [rmi:t1s: evex.lig.66.0f3a.w1 67 /r ib ] AVX512DQ
|
|
VFPCLASSSS kreg|mask,xmmrm32,imm8 [rmi:t1s: evex.lig.66.0f3a.w0 67 /r ib ] AVX512DQ
|
|
VGATHERDPD xmmreg|mask,xmem64 [rm:t1s: vsibx evex.128.66.0f38.w1 92 /r ] AVX512VL,AVX512
|
|
VGATHERDPD ymmreg|mask,xmem64 [rm:t1s: vsibx evex.256.66.0f38.w1 92 /r ] AVX512VL,AVX512
|
|
VGATHERDPD zmmreg|mask,ymem64 [rm:t1s: vsiby evex.512.66.0f38.w1 92 /r ] AVX512
|
|
VGATHERDPS xmmreg|mask,xmem32 [rm:t1s: vsibx evex.128.66.0f38.w0 92 /r ] AVX512VL,AVX512
|
|
VGATHERDPS ymmreg|mask,ymem32 [rm:t1s: vsiby evex.256.66.0f38.w0 92 /r ] AVX512VL,AVX512
|
|
VGATHERDPS zmmreg|mask,zmem32 [rm:t1s: vsibz evex.512.66.0f38.w0 92 /r ] AVX512
|
|
VGATHERPF0DPD ymem64|mask [m:t1s: vsiby evex.512.66.0f38.w1 c6 /1 ] AVX512PF
|
|
VGATHERPF0DPS zmem32|mask [m:t1s: vsibz evex.512.66.0f38.w0 c6 /1 ] AVX512PF
|
|
VGATHERPF0QPD zmem64|mask [m:t1s: vsibz evex.512.66.0f38.w1 c7 /1 ] AVX512PF
|
|
VGATHERPF0QPS zmem32|mask [m:t1s: vsibz evex.512.66.0f38.w0 c7 /1 ] AVX512PF
|
|
VGATHERPF1DPD ymem64|mask [m:t1s: vsiby evex.512.66.0f38.w1 c6 /2 ] AVX512PF
|
|
VGATHERPF1DPS zmem32|mask [m:t1s: vsibz evex.512.66.0f38.w0 c6 /2 ] AVX512PF
|
|
VGATHERPF1QPD zmem64|mask [m:t1s: vsibz evex.512.66.0f38.w1 c7 /2 ] AVX512PF
|
|
VGATHERPF1QPS zmem32|mask [m:t1s: vsibz evex.512.66.0f38.w0 c7 /2 ] AVX512PF
|
|
VGATHERQPD xmmreg|mask,xmem64 [rm:t1s: vsibx evex.128.66.0f38.w1 93 /r ] AVX512VL,AVX512
|
|
VGATHERQPD ymmreg|mask,ymem64 [rm:t1s: vsiby evex.256.66.0f38.w1 93 /r ] AVX512VL,AVX512
|
|
VGATHERQPD zmmreg|mask,zmem64 [rm:t1s: vsibz evex.512.66.0f38.w1 93 /r ] AVX512
|
|
VGATHERQPS xmmreg|mask,xmem32 [rm:t1s: vsibx evex.128.66.0f38.w0 93 /r ] AVX512VL,AVX512
|
|
VGATHERQPS xmmreg|mask,ymem32 [rm:t1s: vsiby evex.256.66.0f38.w0 93 /r ] AVX512VL,AVX512
|
|
VGATHERQPS ymmreg|mask,zmem32 [rm:t1s: vsibz evex.512.66.0f38.w0 93 /r ] AVX512
|
|
VGETEXPPD xmmreg|mask|z,xmmrm128|b64 [rm:fv: evex.128.66.0f38.w1 42 /r ] AVX512VL,AVX512
|
|
VGETEXPPD ymmreg|mask|z,ymmrm256|b64 [rm:fv: evex.256.66.0f38.w1 42 /r ] AVX512VL,AVX512
|
|
VGETEXPPD zmmreg|mask|z,zmmrm512|b64|sae [rm:fv: evex.512.66.0f38.w1 42 /r ] AVX512
|
|
VGETEXPPS xmmreg|mask|z,xmmrm128|b32 [rm:fv: evex.128.66.0f38.w0 42 /r ] AVX512VL,AVX512
|
|
VGETEXPPS ymmreg|mask|z,ymmrm256|b32 [rm:fv: evex.256.66.0f38.w0 42 /r ] AVX512VL,AVX512
|
|
VGETEXPPS zmmreg|mask|z,zmmrm512|b32|sae [rm:fv: evex.512.66.0f38.w0 42 /r ] AVX512
|
|
VGETEXPSD xmmreg|mask|z,xmmreg,xmmrm64|sae [rvm:t1s: evex.nds.lig.66.0f38.w1 43 /r ] AVX512
|
|
VGETEXPSS xmmreg|mask|z,xmmreg,xmmrm32|sae [rvm:t1s: evex.nds.lig.66.0f38.w0 43 /r ] AVX512
|
|
VGETMANTPD xmmreg|mask|z,xmmrm128|b64,imm8 [rmi:fv: evex.128.66.0f3a.w1 26 /r ib ] AVX512VL,AVX512
|
|
VGETMANTPD ymmreg|mask|z,ymmrm256|b64,imm8 [rmi:fv: evex.256.66.0f3a.w1 26 /r ib ] AVX512VL,AVX512
|
|
VGETMANTPD zmmreg|mask|z,zmmrm512|b64|sae,imm8 [rmi:fv: evex.512.66.0f3a.w1 26 /r ib ] AVX512
|
|
VGETMANTPS xmmreg|mask|z,xmmrm128|b32,imm8 [rmi:fv: evex.128.66.0f3a.w0 26 /r ib ] AVX512VL,AVX512
|
|
VGETMANTPS ymmreg|mask|z,ymmrm256|b32,imm8 [rmi:fv: evex.256.66.0f3a.w0 26 /r ib ] AVX512VL,AVX512
|
|
VGETMANTPS zmmreg|mask|z,zmmrm512|b32|sae,imm8 [rmi:fv: evex.512.66.0f3a.w0 26 /r ib ] AVX512
|
|
VGETMANTSD xmmreg|mask|z,xmmreg,xmmrm64|sae,imm8 [rvmi:t1s: evex.nds.lig.66.0f3a.w1 27 /r ib ] AVX512
|
|
VGETMANTSS xmmreg|mask|z,xmmreg,xmmrm32|sae,imm8 [rvmi:t1s: evex.nds.lig.66.0f3a.w0 27 /r ib ] AVX512
|
|
VINSERTF32X4 ymmreg|mask|z,ymmreg*,xmmrm128,imm8 [rvmi:t4: evex.nds.256.66.0f3a.w0 18 /r ib ] AVX512VL,AVX512
|
|
VINSERTF32X4 zmmreg|mask|z,zmmreg*,xmmrm128,imm8 [rvmi:t4: evex.nds.512.66.0f3a.w0 18 /r ib ] AVX512
|
|
VINSERTF32X8 zmmreg|mask|z,zmmreg*,ymmrm256,imm8 [rvmi:t8: evex.nds.512.66.0f3a.w0 1a /r ib ] AVX512DQ
|
|
VINSERTF64X2 ymmreg|mask|z,ymmreg*,xmmrm128,imm8 [rvmi:t2: evex.nds.256.66.0f3a.w1 18 /r ib ] AVX512VL,AVX512DQ
|
|
VINSERTF64X2 zmmreg|mask|z,zmmreg*,xmmrm128,imm8 [rvmi:t2: evex.nds.512.66.0f3a.w1 18 /r ib ] AVX512DQ
|
|
VINSERTF64X4 zmmreg|mask|z,zmmreg*,ymmrm256,imm8 [rvmi:t4: evex.nds.512.66.0f3a.w1 1a /r ib ] AVX512
|
|
VINSERTI32X4 ymmreg|mask|z,ymmreg*,xmmrm128,imm8 [rvmi:t4: evex.nds.256.66.0f3a.w0 38 /r ib ] AVX512VL,AVX512
|
|
VINSERTI32X4 zmmreg|mask|z,zmmreg*,xmmrm128,imm8 [rvmi:t4: evex.nds.512.66.0f3a.w0 38 /r ib ] AVX512
|
|
VINSERTI32X8 zmmreg|mask|z,zmmreg*,ymmrm256,imm8 [rvmi:t8: evex.nds.512.66.0f3a.w0 3a /r ib ] AVX512DQ
|
|
VINSERTI64X2 ymmreg|mask|z,ymmreg*,xmmrm128,imm8 [rvmi:t2: evex.nds.256.66.0f3a.w1 38 /r ib ] AVX512VL,AVX512DQ
|
|
VINSERTI64X2 zmmreg|mask|z,zmmreg*,xmmrm128,imm8 [rvmi:t2: evex.nds.512.66.0f3a.w1 38 /r ib ] AVX512DQ
|
|
VINSERTI64X4 zmmreg|mask|z,zmmreg*,ymmrm256,imm8 [rvmi:t4: evex.nds.512.66.0f3a.w1 3a /r ib ] AVX512
|
|
VINSERTPS xmmreg,xmmreg*,xmmrm32,imm8 [rvmi:t1s: evex.nds.128.66.0f3a.w0 21 /r ib ] AVX512
|
|
VMAXPD xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f.w1 5f /r ] AVX512VL,AVX512
|
|
VMAXPD ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f.w1 5f /r ] AVX512VL,AVX512
|
|
VMAXPD zmmreg|mask|z,zmmreg*,zmmrm512|b64|sae [rvm:fv: evex.nds.512.66.0f.w1 5f /r ] AVX512
|
|
VMAXPS xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.0f.w0 5f /r ] AVX512VL,AVX512
|
|
VMAXPS ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.0f.w0 5f /r ] AVX512VL,AVX512
|
|
VMAXPS zmmreg|mask|z,zmmreg*,zmmrm512|b32|sae [rvm:fv: evex.nds.512.0f.w0 5f /r ] AVX512
|
|
VMAXSD xmmreg|mask|z,xmmreg*,xmmrm64|sae [rvm:t1s: evex.nds.lig.f2.0f.w1 5f /r ] AVX512
|
|
VMAXSS xmmreg|mask|z,xmmreg*,xmmrm32|sae [rvm:t1s: evex.nds.lig.f3.0f.w0 5f /r ] AVX512
|
|
VMINPD xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f.w1 5d /r ] AVX512VL,AVX512
|
|
VMINPD ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f.w1 5d /r ] AVX512VL,AVX512
|
|
VMINPD zmmreg|mask|z,zmmreg*,zmmrm512|b64|sae [rvm:fv: evex.nds.512.66.0f.w1 5d /r ] AVX512
|
|
VMINPS xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.0f.w0 5d /r ] AVX512VL,AVX512
|
|
VMINPS ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.0f.w0 5d /r ] AVX512VL,AVX512
|
|
VMINPS zmmreg|mask|z,zmmreg*,zmmrm512|b32|sae [rvm:fv: evex.nds.512.0f.w0 5d /r ] AVX512
|
|
VMINSD xmmreg|mask|z,xmmreg*,xmmrm64|sae [rvm:t1s: evex.nds.lig.f2.0f.w1 5d /r ] AVX512
|
|
VMINSS xmmreg|mask|z,xmmreg*,xmmrm32|sae [rvm:t1s: evex.nds.lig.f3.0f.w0 5d /r ] AVX512
|
|
VMOVAPD xmmreg|mask|z,xmmrm128 [rm:fvm: evex.128.66.0f.w1 28 /r ] AVX512VL,AVX512
|
|
VMOVAPD ymmreg|mask|z,ymmrm256 [rm:fvm: evex.256.66.0f.w1 28 /r ] AVX512VL,AVX512
|
|
VMOVAPD zmmreg|mask|z,zmmrm512 [rm:fvm: evex.512.66.0f.w1 28 /r ] AVX512
|
|
VMOVAPD xmmreg|mask|z,xmmreg [mr: evex.128.66.0f.w1 29 /r ] AVX512VL,AVX512
|
|
VMOVAPD ymmreg|mask|z,ymmreg [mr: evex.256.66.0f.w1 29 /r ] AVX512VL,AVX512
|
|
VMOVAPD zmmreg|mask|z,zmmreg [mr: evex.512.66.0f.w1 29 /r ] AVX512
|
|
VMOVAPD mem128|mask,xmmreg [mr:fvm: evex.128.66.0f.w1 29 /r ] AVX512VL,AVX512
|
|
VMOVAPD mem256|mask,ymmreg [mr:fvm: evex.256.66.0f.w1 29 /r ] AVX512VL,AVX512
|
|
VMOVAPD mem512|mask,zmmreg [mr:fvm: evex.512.66.0f.w1 29 /r ] AVX512
|
|
VMOVAPS xmmreg|mask|z,xmmrm128 [rm:fvm: evex.128.0f.w0 28 /r ] AVX512VL,AVX512
|
|
VMOVAPS ymmreg|mask|z,ymmrm256 [rm:fvm: evex.256.0f.w0 28 /r ] AVX512VL,AVX512
|
|
VMOVAPS zmmreg|mask|z,zmmrm512 [rm:fvm: evex.512.0f.w0 28 /r ] AVX512
|
|
VMOVAPS xmmreg|mask|z,xmmreg [mr: evex.128.0f.w0 29 /r ] AVX512VL,AVX512
|
|
VMOVAPS ymmreg|mask|z,ymmreg [mr: evex.256.0f.w0 29 /r ] AVX512VL,AVX512
|
|
VMOVAPS zmmreg|mask|z,zmmreg [mr: evex.512.0f.w0 29 /r ] AVX512
|
|
VMOVAPS mem128|mask,xmmreg [mr:fvm: evex.128.0f.w0 29 /r ] AVX512VL,AVX512
|
|
VMOVAPS mem256|mask,ymmreg [mr:fvm: evex.256.0f.w0 29 /r ] AVX512VL,AVX512
|
|
VMOVAPS mem512|mask,zmmreg [mr:fvm: evex.512.0f.w0 29 /r ] AVX512
|
|
VMOVD xmmreg,rm32 [rm:t1s: evex.128.66.0f.w0 6e /r ] AVX512
|
|
VMOVD rm32,xmmreg [mr:t1s: evex.128.66.0f.w0 7e /r ] AVX512
|
|
VMOVDDUP xmmreg|mask|z,xmmrm64 [rm:dup: evex.128.f2.0f.w1 12 /r ] AVX512VL,AVX512
|
|
VMOVDDUP ymmreg|mask|z,ymmrm256 [rm:dup: evex.256.f2.0f.w1 12 /r ] AVX512VL,AVX512
|
|
VMOVDDUP zmmreg|mask|z,zmmrm512 [rm:dup: evex.512.f2.0f.w1 12 /r ] AVX512
|
|
VMOVDQA32 xmmreg|mask|z,xmmrm128 [rm:fvm: evex.128.66.0f.w0 6f /r ] AVX512VL,AVX512
|
|
VMOVDQA32 ymmreg|mask|z,ymmrm256 [rm:fvm: evex.256.66.0f.w0 6f /r ] AVX512VL,AVX512
|
|
VMOVDQA32 zmmreg|mask|z,zmmrm512 [rm:fvm: evex.512.66.0f.w0 6f /r ] AVX512
|
|
VMOVDQA32 xmmrm128|mask|z,xmmreg [mr:fvm: evex.128.66.0f.w0 7f /r ] AVX512VL,AVX512
|
|
VMOVDQA32 ymmrm256|mask|z,ymmreg [mr:fvm: evex.256.66.0f.w0 7f /r ] AVX512VL,AVX512
|
|
VMOVDQA32 zmmrm512|mask|z,zmmreg [mr:fvm: evex.512.66.0f.w0 7f /r ] AVX512
|
|
VMOVDQA64 xmmreg|mask|z,xmmrm128 [rm:fvm: evex.128.66.0f.w1 6f /r ] AVX512VL,AVX512
|
|
VMOVDQA64 ymmreg|mask|z,ymmrm256 [rm:fvm: evex.256.66.0f.w1 6f /r ] AVX512VL,AVX512
|
|
VMOVDQA64 zmmreg|mask|z,zmmrm512 [rm:fvm: evex.512.66.0f.w1 6f /r ] AVX512
|
|
VMOVDQA64 xmmrm128|mask|z,xmmreg [mr:fvm: evex.128.66.0f.w1 7f /r ] AVX512VL,AVX512
|
|
VMOVDQA64 ymmrm256|mask|z,ymmreg [mr:fvm: evex.256.66.0f.w1 7f /r ] AVX512VL,AVX512
|
|
VMOVDQA64 zmmrm512|mask|z,zmmreg [mr:fvm: evex.512.66.0f.w1 7f /r ] AVX512
|
|
VMOVDQU16 xmmreg|mask|z,xmmrm128 [rm:fvm: evex.128.f2.0f.w1 6f /r ] AVX512VL,AVX512BW
|
|
VMOVDQU16 ymmreg|mask|z,ymmrm256 [rm:fvm: evex.256.f2.0f.w1 6f /r ] AVX512VL,AVX512BW
|
|
VMOVDQU16 zmmreg|mask|z,zmmrm512 [rm:fvm: evex.512.f2.0f.w1 6f /r ] AVX512BW
|
|
VMOVDQU16 xmmrm128|mask|z,xmmreg [mr:fvm: evex.128.f2.0f.w1 7f /r ] AVX512VL,AVX512BW
|
|
VMOVDQU16 ymmrm256|mask|z,ymmreg [mr:fvm: evex.256.f2.0f.w1 7f /r ] AVX512VL,AVX512BW
|
|
VMOVDQU16 zmmrm512|mask|z,zmmreg [mr:fvm: evex.512.f2.0f.w1 7f /r ] AVX512BW
|
|
VMOVDQU32 xmmreg|mask|z,xmmrm128 [rm:fvm: evex.128.f3.0f.w0 6f /r ] AVX512VL,AVX512
|
|
VMOVDQU32 ymmreg|mask|z,ymmrm256 [rm:fvm: evex.256.f3.0f.w0 6f /r ] AVX512VL,AVX512
|
|
VMOVDQU32 zmmreg|mask|z,zmmrm512 [rm:fvm: evex.512.f3.0f.w0 6f /r ] AVX512
|
|
VMOVDQU32 xmmrm128|mask|z,xmmreg [mr:fvm: evex.128.f3.0f.w0 7f /r ] AVX512VL,AVX512
|
|
VMOVDQU32 ymmrm256|mask|z,ymmreg [mr:fvm: evex.256.f3.0f.w0 7f /r ] AVX512VL,AVX512
|
|
VMOVDQU32 zmmrm512|mask|z,zmmreg [mr:fvm: evex.512.f3.0f.w0 7f /r ] AVX512
|
|
VMOVDQU64 xmmreg|mask|z,xmmrm128 [rm:fvm: evex.128.f3.0f.w1 6f /r ] AVX512VL,AVX512
|
|
VMOVDQU64 ymmreg|mask|z,ymmrm256 [rm:fvm: evex.256.f3.0f.w1 6f /r ] AVX512VL,AVX512
|
|
VMOVDQU64 zmmreg|mask|z,zmmrm512 [rm:fvm: evex.512.f3.0f.w1 6f /r ] AVX512
|
|
VMOVDQU64 xmmrm128|mask|z,xmmreg [mr:fvm: evex.128.f3.0f.w1 7f /r ] AVX512VL,AVX512
|
|
VMOVDQU64 ymmrm256|mask|z,ymmreg [mr:fvm: evex.256.f3.0f.w1 7f /r ] AVX512VL,AVX512
|
|
VMOVDQU64 zmmrm512|mask|z,zmmreg [mr:fvm: evex.512.f3.0f.w1 7f /r ] AVX512
|
|
VMOVDQU8 xmmreg|mask|z,xmmrm128 [rm:fvm: evex.128.f2.0f.w0 6f /r ] AVX512VL,AVX512BW
|
|
VMOVDQU8 ymmreg|mask|z,ymmrm256 [rm:fvm: evex.256.f2.0f.w0 6f /r ] AVX512VL,AVX512BW
|
|
VMOVDQU8 zmmreg|mask|z,zmmrm512 [rm:fvm: evex.512.f2.0f.w0 6f /r ] AVX512BW
|
|
VMOVDQU8 xmmrm128|mask|z,xmmreg [mr:fvm: evex.128.f2.0f.w0 7f /r ] AVX512VL,AVX512BW
|
|
VMOVDQU8 ymmrm256|mask|z,ymmreg [mr:fvm: evex.256.f2.0f.w0 7f /r ] AVX512VL,AVX512BW
|
|
VMOVDQU8 zmmrm512|mask|z,zmmreg [mr:fvm: evex.512.f2.0f.w0 7f /r ] AVX512BW
|
|
VMOVHLPS xmmreg,xmmreg*,xmmreg [rvm: evex.nds.128.0f.w0 12 /r ] AVX512
|
|
VMOVHPD xmmreg,xmmreg*,mem64 [rvm:t1s: evex.nds.128.66.0f.w1 16 /r ] AVX512
|
|
VMOVHPD mem64,xmmreg [mr:t1s: evex.128.66.0f.w1 17 /r ] AVX512
|
|
VMOVHPS xmmreg,xmmreg*,mem64 [rvm:t2: evex.nds.128.0f.w0 16 /r ] AVX512
|
|
VMOVHPS mem64,xmmreg [mr:t2: evex.128.0f.w0 17 /r ] AVX512
|
|
VMOVLHPS xmmreg,xmmreg*,xmmreg [rvm: evex.nds.128.0f.w0 16 /r ] AVX512
|
|
VMOVLPD xmmreg,xmmreg*,mem64 [rvm:t1s: evex.nds.128.66.0f.w1 12 /r ] AVX512
|
|
VMOVLPD mem64,xmmreg [mr:t1s: evex.128.66.0f.w1 13 /r ] AVX512
|
|
VMOVLPS xmmreg,xmmreg*,mem64 [rvm:t2: evex.nds.128.0f.w0 12 /r ] AVX512
|
|
VMOVLPS mem64,xmmreg [mr:t2: evex.128.0f.w0 13 /r ] AVX512
|
|
VMOVNTDQ mem128,xmmreg [mr:fvm: evex.128.66.0f.w0 e7 /r ] AVX512VL,AVX512
|
|
VMOVNTDQ mem256,ymmreg [mr:fvm: evex.256.66.0f.w0 e7 /r ] AVX512VL,AVX512
|
|
VMOVNTDQ mem512,zmmreg [mr:fvm: evex.512.66.0f.w0 e7 /r ] AVX512
|
|
VMOVNTDQA xmmreg,mem128 [rm:fvm: evex.128.66.0f38.w0 2a /r ] AVX512VL,AVX512
|
|
VMOVNTDQA ymmreg,mem256 [rm:fvm: evex.256.66.0f38.w0 2a /r ] AVX512VL,AVX512
|
|
VMOVNTDQA zmmreg,mem512 [rm:fvm: evex.512.66.0f38.w0 2a /r ] AVX512
|
|
VMOVNTPD mem128,xmmreg [mr:fvm: evex.128.66.0f.w1 2b /r ] AVX512VL,AVX512
|
|
VMOVNTPD mem256,ymmreg [mr:fvm: evex.256.66.0f.w1 2b /r ] AVX512VL,AVX512
|
|
VMOVNTPD mem512,zmmreg [mr:fvm: evex.512.66.0f.w1 2b /r ] AVX512
|
|
VMOVNTPS mem128,xmmreg [mr:fvm: evex.128.0f.w0 2b /r ] AVX512VL,AVX512
|
|
VMOVNTPS mem256,ymmreg [mr:fvm: evex.256.0f.w0 2b /r ] AVX512VL,AVX512
|
|
VMOVNTPS mem512,zmmreg [mr:fvm: evex.512.0f.w0 2b /r ] AVX512
|
|
VMOVQ xmmreg,rm64 [rm:t1s: evex.128.66.0f.w1 6e /r ] AVX512
|
|
VMOVQ rm64,xmmreg [mr:t1s: evex.128.66.0f.w1 7e /r ] AVX512
|
|
VMOVQ xmmreg,xmmrm64 [rm:t1s: evex.128.f3.0f.w1 7e /r ] AVX512
|
|
VMOVQ xmmrm64,xmmreg [mr:t1s: evex.128.66.0f.w1 d6 /r ] AVX512
|
|
VMOVSD xmmreg|mask|z,mem64 [rm:t1s: evex.lig.f2.0f.w1 10 /r ] AVX512
|
|
VMOVSD mem64|mask,xmmreg [mr:t1s: evex.lig.f2.0f.w1 11 /r ] AVX512
|
|
VMOVSD xmmreg|mask|z,xmmreg*,xmmreg [rvm: evex.nds.lig.f2.0f.w1 10 /r ] AVX512
|
|
VMOVSD xmmreg|mask|z,xmmreg*,xmmreg [mvr: evex.nds.lig.f2.0f.w1 11 /r ] AVX512
|
|
VMOVSHDUP xmmreg|mask|z,xmmrm128 [rm:fvm: evex.128.f3.0f.w0 16 /r ] AVX512VL,AVX512
|
|
VMOVSHDUP ymmreg|mask|z,ymmrm256 [rm:fvm: evex.256.f3.0f.w0 16 /r ] AVX512VL,AVX512
|
|
VMOVSHDUP zmmreg|mask|z,zmmrm512 [rm:fvm: evex.512.f3.0f.w0 16 /r ] AVX512
|
|
VMOVSLDUP xmmreg|mask|z,xmmrm128 [rm:fvm: evex.128.f3.0f.w0 12 /r ] AVX512VL,AVX512
|
|
VMOVSLDUP ymmreg|mask|z,ymmrm256 [rm:fvm: evex.256.f3.0f.w0 12 /r ] AVX512VL,AVX512
|
|
VMOVSLDUP zmmreg|mask|z,zmmrm512 [rm:fvm: evex.512.f3.0f.w0 12 /r ] AVX512
|
|
VMOVSS xmmreg|mask|z,mem32 [rm:t1s: evex.lig.f3.0f.w0 10 /r ] AVX512
|
|
VMOVSS mem32|mask,xmmreg [mr:t1s: evex.lig.f3.0f.w0 11 /r ] AVX512
|
|
VMOVSS xmmreg|mask|z,xmmreg*,xmmreg [rvm: evex.nds.lig.f3.0f.w0 10 /r ] AVX512
|
|
VMOVSS xmmreg|mask|z,xmmreg*,xmmreg [mvr: evex.nds.lig.f3.0f.w0 11 /r ] AVX512
|
|
VMOVUPD xmmreg|mask|z,xmmrm128 [rm:fvm: evex.128.66.0f.w1 10 /r ] AVX512VL,AVX512
|
|
VMOVUPD ymmreg|mask|z,ymmrm256 [rm:fvm: evex.256.66.0f.w1 10 /r ] AVX512VL,AVX512
|
|
VMOVUPD zmmreg|mask|z,zmmrm512 [rm:fvm: evex.512.66.0f.w1 10 /r ] AVX512
|
|
VMOVUPD xmmreg|mask|z,xmmreg [mr: evex.128.66.0f.w1 11 /r ] AVX512VL,AVX512
|
|
VMOVUPD ymmreg|mask|z,ymmreg [mr: evex.256.66.0f.w1 11 /r ] AVX512VL,AVX512
|
|
VMOVUPD zmmreg|mask|z,zmmreg [mr: evex.512.66.0f.w1 11 /r ] AVX512
|
|
VMOVUPD mem128|mask,xmmreg [mr:fvm: evex.128.66.0f.w1 11 /r ] AVX512VL,AVX512
|
|
VMOVUPD mem256|mask,ymmreg [mr:fvm: evex.256.66.0f.w1 11 /r ] AVX512VL,AVX512
|
|
VMOVUPD mem512|mask,zmmreg [mr:fvm: evex.512.66.0f.w1 11 /r ] AVX512
|
|
VMOVUPS xmmreg|mask|z,xmmrm128 [rm:fvm: evex.128.0f.w0 10 /r ] AVX512VL,AVX512
|
|
VMOVUPS ymmreg|mask|z,ymmrm256 [rm:fvm: evex.256.0f.w0 10 /r ] AVX512VL,AVX512
|
|
VMOVUPS zmmreg|mask|z,zmmrm512 [rm:fvm: evex.512.0f.w0 10 /r ] AVX512
|
|
VMOVUPS xmmreg|mask|z,xmmreg [mr: evex.128.0f.w0 11 /r ] AVX512VL,AVX512
|
|
VMOVUPS ymmreg|mask|z,ymmreg [mr: evex.256.0f.w0 11 /r ] AVX512VL,AVX512
|
|
VMOVUPS zmmreg|mask|z,zmmreg [mr: evex.512.0f.w0 11 /r ] AVX512
|
|
VMOVUPS mem128|mask,xmmreg [mr:fvm: evex.128.0f.w0 11 /r ] AVX512VL,AVX512
|
|
VMOVUPS mem256|mask,ymmreg [mr:fvm: evex.256.0f.w0 11 /r ] AVX512VL,AVX512
|
|
VMOVUPS mem512|mask,zmmreg [mr:fvm: evex.512.0f.w0 11 /r ] AVX512
|
|
VMULPD xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f.w1 59 /r ] AVX512VL,AVX512
|
|
VMULPD ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f.w1 59 /r ] AVX512VL,AVX512
|
|
VMULPD zmmreg|mask|z,zmmreg*,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f.w1 59 /r ] AVX512
|
|
VMULPS xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.0f.w0 59 /r ] AVX512VL,AVX512
|
|
VMULPS ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.0f.w0 59 /r ] AVX512VL,AVX512
|
|
VMULPS zmmreg|mask|z,zmmreg*,zmmrm512|b32|er [rvm:fv: evex.nds.512.0f.w0 59 /r ] AVX512
|
|
VMULSD xmmreg|mask|z,xmmreg*,xmmrm64|er [rvm:t1s: evex.nds.lig.f2.0f.w1 59 /r ] AVX512
|
|
VMULSS xmmreg|mask|z,xmmreg*,xmmrm32|er [rvm:t1s: evex.nds.lig.f3.0f.w0 59 /r ] AVX512
|
|
VORPD xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f.w1 56 /r ] AVX512VL,AVX512DQ
|
|
VORPD ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f.w1 56 /r ] AVX512VL,AVX512DQ
|
|
VORPD zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f.w1 56 /r ] AVX512DQ
|
|
VORPS xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.0f.w0 56 /r ] AVX512VL,AVX512DQ
|
|
VORPS ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.0f.w0 56 /r ] AVX512VL,AVX512DQ
|
|
VORPS zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.0f.w0 56 /r ] AVX512DQ
|
|
VPABSB xmmreg|mask|z,xmmrm128 [rm:fvm: evex.128.66.0f38.wig 1c /r ] AVX512VL,AVX512BW
|
|
VPABSB ymmreg|mask|z,ymmrm256 [rm:fvm: evex.256.66.0f38.wig 1c /r ] AVX512VL,AVX512BW
|
|
VPABSB zmmreg|mask|z,zmmrm512 [rm:fvm: evex.512.66.0f38.wig 1c /r ] AVX512BW
|
|
VPABSD xmmreg|mask|z,xmmrm128|b32 [rm:fv: evex.128.66.0f38.w0 1e /r ] AVX512VL,AVX512
|
|
VPABSD ymmreg|mask|z,ymmrm256|b32 [rm:fv: evex.256.66.0f38.w0 1e /r ] AVX512VL,AVX512
|
|
VPABSD zmmreg|mask|z,zmmrm512|b32 [rm:fv: evex.512.66.0f38.w0 1e /r ] AVX512
|
|
VPABSQ xmmreg|mask|z,xmmrm128|b64 [rm:fv: evex.128.66.0f38.w1 1f /r ] AVX512VL,AVX512
|
|
VPABSQ ymmreg|mask|z,ymmrm256|b64 [rm:fv: evex.256.66.0f38.w1 1f /r ] AVX512VL,AVX512
|
|
VPABSQ zmmreg|mask|z,zmmrm512|b64 [rm:fv: evex.512.66.0f38.w1 1f /r ] AVX512
|
|
VPABSW xmmreg|mask|z,xmmrm128 [rm:fvm: evex.128.66.0f38.wig 1d /r ] AVX512VL,AVX512BW
|
|
VPABSW ymmreg|mask|z,ymmrm256 [rm:fvm: evex.256.66.0f38.wig 1d /r ] AVX512VL,AVX512BW
|
|
VPABSW zmmreg|mask|z,zmmrm512 [rm:fvm: evex.512.66.0f38.wig 1d /r ] AVX512BW
|
|
VPACKSSDW xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f.w0 6b /r ] AVX512VL,AVX512BW
|
|
VPACKSSDW ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f.w0 6b /r ] AVX512VL,AVX512BW
|
|
VPACKSSDW zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f.w0 6b /r ] AVX512BW
|
|
VPACKSSWB xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig 63 /r ] AVX512VL,AVX512BW
|
|
VPACKSSWB ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig 63 /r ] AVX512VL,AVX512BW
|
|
VPACKSSWB zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig 63 /r ] AVX512BW
|
|
VPACKUSDW xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 2b /r ] AVX512VL,AVX512BW
|
|
VPACKUSDW ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 2b /r ] AVX512VL,AVX512BW
|
|
VPACKUSDW zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f38.w0 2b /r ] AVX512BW
|
|
VPACKUSWB xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig 67 /r ] AVX512VL,AVX512BW
|
|
VPACKUSWB ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig 67 /r ] AVX512VL,AVX512BW
|
|
VPACKUSWB zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig 67 /r ] AVX512BW
|
|
VPADDB xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig fc /r ] AVX512VL,AVX512BW
|
|
VPADDB ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig fc /r ] AVX512VL,AVX512BW
|
|
VPADDB zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig fc /r ] AVX512BW
|
|
VPADDD xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f.w0 fe /r ] AVX512VL,AVX512
|
|
VPADDD ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f.w0 fe /r ] AVX512VL,AVX512
|
|
VPADDD zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f.w0 fe /r ] AVX512
|
|
VPADDQ xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f.w1 d4 /r ] AVX512VL,AVX512
|
|
VPADDQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f.w1 d4 /r ] AVX512VL,AVX512
|
|
VPADDQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f.w1 d4 /r ] AVX512
|
|
VPADDSB xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig ec /r ] AVX512VL,AVX512BW
|
|
VPADDSB ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig ec /r ] AVX512VL,AVX512BW
|
|
VPADDSB zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig ec /r ] AVX512BW
|
|
VPADDSW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig ed /r ] AVX512VL,AVX512BW
|
|
VPADDSW ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig ed /r ] AVX512VL,AVX512BW
|
|
VPADDSW zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig ed /r ] AVX512BW
|
|
VPADDUSB xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig dc /r ] AVX512VL,AVX512BW
|
|
VPADDUSB ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig dc /r ] AVX512VL,AVX512BW
|
|
VPADDUSB zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig dc /r ] AVX512BW
|
|
VPADDUSW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig dd /r ] AVX512VL,AVX512BW
|
|
VPADDUSW ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig dd /r ] AVX512VL,AVX512BW
|
|
VPADDUSW zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig dd /r ] AVX512BW
|
|
VPADDW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig fd /r ] AVX512VL,AVX512BW
|
|
VPADDW ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig fd /r ] AVX512VL,AVX512BW
|
|
VPADDW zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig fd /r ] AVX512BW
|
|
VPALIGNR xmmreg|mask|z,xmmreg*,xmmrm128,imm8 [rvmi:fvm: evex.nds.128.66.0f3a.wig 0f /r ib ] AVX512VL,AVX512BW
|
|
VPALIGNR ymmreg|mask|z,ymmreg*,ymmrm256,imm8 [rvmi:fvm: evex.nds.256.66.0f3a.wig 0f /r ib ] AVX512VL,AVX512BW
|
|
VPALIGNR zmmreg|mask|z,zmmreg*,zmmrm512,imm8 [rvmi:fvm: evex.nds.512.66.0f3a.wig 0f /r ib ] AVX512BW
|
|
VPANDD xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f.w0 db /r ] AVX512VL,AVX512
|
|
VPANDD ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f.w0 db /r ] AVX512VL,AVX512
|
|
VPANDD zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f.w0 db /r ] AVX512
|
|
VPANDND xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f.w0 df /r ] AVX512VL,AVX512
|
|
VPANDND ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f.w0 df /r ] AVX512VL,AVX512
|
|
VPANDND zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f.w0 df /r ] AVX512
|
|
VPANDNQ xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f.w1 df /r ] AVX512VL,AVX512
|
|
VPANDNQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f.w1 df /r ] AVX512VL,AVX512
|
|
VPANDNQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f.w1 df /r ] AVX512
|
|
VPANDQ xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f.w1 db /r ] AVX512VL,AVX512
|
|
VPANDQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f.w1 db /r ] AVX512VL,AVX512
|
|
VPANDQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f.w1 db /r ] AVX512
|
|
VPAVGB xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig e0 /r ] AVX512VL,AVX512BW
|
|
VPAVGB ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig e0 /r ] AVX512VL,AVX512BW
|
|
VPAVGB zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig e0 /r ] AVX512BW
|
|
VPAVGW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig e3 /r ] AVX512VL,AVX512BW
|
|
VPAVGW ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig e3 /r ] AVX512VL,AVX512BW
|
|
VPAVGW zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig e3 /r ] AVX512BW
|
|
VPBLENDMB xmmreg|mask|z,xmmreg,xmmrm128 [rvm:fvm: evex.nds.128.66.0f38.w0 66 /r ] AVX512VL,AVX512BW
|
|
VPBLENDMB ymmreg|mask|z,ymmreg,ymmrm256 [rvm:fvm: evex.nds.256.66.0f38.w0 66 /r ] AVX512VL,AVX512BW
|
|
VPBLENDMB zmmreg|mask|z,zmmreg,zmmrm512 [rvm:fvm: evex.nds.512.66.0f38.w0 66 /r ] AVX512BW
|
|
VPBLENDMD xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 64 /r ] AVX512VL,AVX512
|
|
VPBLENDMD ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 64 /r ] AVX512VL,AVX512
|
|
VPBLENDMD zmmreg|mask|z,zmmreg,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f38.w0 64 /r ] AVX512
|
|
VPBLENDMQ xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 64 /r ] AVX512VL,AVX512
|
|
VPBLENDMQ ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 64 /r ] AVX512VL,AVX512
|
|
VPBLENDMQ zmmreg|mask|z,zmmreg,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 64 /r ] AVX512
|
|
VPBLENDMW xmmreg|mask|z,xmmreg,xmmrm128 [rvm:fvm: evex.nds.128.66.0f38.w1 66 /r ] AVX512VL,AVX512BW
|
|
VPBLENDMW ymmreg|mask|z,ymmreg,ymmrm256 [rvm:fvm: evex.nds.256.66.0f38.w1 66 /r ] AVX512VL,AVX512BW
|
|
VPBLENDMW zmmreg|mask|z,zmmreg,zmmrm512 [rvm:fvm: evex.nds.512.66.0f38.w1 66 /r ] AVX512BW
|
|
VPBROADCASTB xmmreg|mask|z,xmmrm8 [rm:t1s8: evex.128.66.0f38.w0 78 /r ] AVX512VL,AVX512BW
|
|
VPBROADCASTB ymmreg|mask|z,xmmrm8 [rm:t1s8: evex.256.66.0f38.w0 78 /r ] AVX512VL,AVX512BW
|
|
VPBROADCASTB zmmreg|mask|z,xmmrm8 [rm:t1s8: evex.512.66.0f38.w0 78 /r ] AVX512BW
|
|
VPBROADCASTB xmmreg|mask|z,reg8 [rm: evex.128.66.0f38.w0 7a /r ] AVX512VL,AVX512BW
|
|
VPBROADCASTB xmmreg|mask|z,reg16 [rm: evex.128.66.0f38.w0 7a /r ] AVX512VL,AVX512BW
|
|
VPBROADCASTB xmmreg|mask|z,reg32 [rm: evex.128.66.0f38.w0 7a /r ] AVX512VL,AVX512BW
|
|
VPBROADCASTB xmmreg|mask|z,reg64 [rm: evex.128.66.0f38.w0 7a /r ] AVX512VL,AVX512BW
|
|
VPBROADCASTB ymmreg|mask|z,reg8 [rm: evex.256.66.0f38.w0 7a /r ] AVX512VL,AVX512BW
|
|
VPBROADCASTB ymmreg|mask|z,reg16 [rm: evex.256.66.0f38.w0 7a /r ] AVX512VL,AVX512BW
|
|
VPBROADCASTB ymmreg|mask|z,reg32 [rm: evex.256.66.0f38.w0 7a /r ] AVX512VL,AVX512BW
|
|
VPBROADCASTB ymmreg|mask|z,reg64 [rm: evex.256.66.0f38.w0 7a /r ] AVX512VL,AVX512BW
|
|
VPBROADCASTB zmmreg|mask|z,reg8 [rm: evex.512.66.0f38.w0 7a /r ] AVX512BW
|
|
VPBROADCASTB zmmreg|mask|z,reg16 [rm: evex.512.66.0f38.w0 7a /r ] AVX512BW
|
|
VPBROADCASTB zmmreg|mask|z,reg32 [rm: evex.512.66.0f38.w0 7a /r ] AVX512BW
|
|
VPBROADCASTB zmmreg|mask|z,reg64 [rm: evex.512.66.0f38.w0 7a /r ] AVX512BW
|
|
VPBROADCASTD xmmreg|mask|z,mem32 [rm:t1s: evex.128.66.0f38.w0 58 /r ] AVX512VL,AVX512
|
|
VPBROADCASTD ymmreg|mask|z,mem32 [rm:t1s: evex.256.66.0f38.w0 58 /r ] AVX512VL,AVX512
|
|
VPBROADCASTD zmmreg|mask|z,mem32 [rm:t1s: evex.512.66.0f38.w0 58 /r ] AVX512
|
|
VPBROADCASTD xmmreg|mask|z,xmmreg [rm: evex.128.66.0f38.w0 58 /r ] AVX512VL,AVX512
|
|
VPBROADCASTD ymmreg|mask|z,xmmreg [rm: evex.256.66.0f38.w0 58 /r ] AVX512VL,AVX512
|
|
VPBROADCASTD zmmreg|mask|z,xmmreg [rm: evex.512.66.0f38.w0 58 /r ] AVX512
|
|
VPBROADCASTD xmmreg|mask|z,reg32 [rm: evex.128.66.0f38.w0 7c /r ] AVX512VL,AVX512
|
|
VPBROADCASTD ymmreg|mask|z,reg32 [rm: evex.256.66.0f38.w0 7c /r ] AVX512VL,AVX512
|
|
VPBROADCASTD zmmreg|mask|z,reg32 [rm: evex.512.66.0f38.w0 7c /r ] AVX512
|
|
VPBROADCASTMB2Q xmmreg,kreg [rm: evex.128.f3.0f38.w1 2a /r ] AVX512VL,AVX512CD
|
|
VPBROADCASTMB2Q ymmreg,kreg [rm: evex.256.f3.0f38.w1 2a /r ] AVX512VL,AVX512CD
|
|
VPBROADCASTMB2Q zmmreg,kreg [rm: evex.512.f3.0f38.w1 2a /r ] AVX512CD
|
|
VPBROADCASTMW2D xmmreg,kreg [rm: evex.128.f3.0f38.w0 3a /r ] AVX512VL,AVX512CD
|
|
VPBROADCASTMW2D ymmreg,kreg [rm: evex.256.f3.0f38.w0 3a /r ] AVX512VL,AVX512CD
|
|
VPBROADCASTMW2D zmmreg,kreg [rm: evex.512.f3.0f38.w0 3a /r ] AVX512CD
|
|
VPBROADCASTQ xmmreg|mask|z,mem64 [rm:t1s: evex.128.66.0f38.w1 59 /r ] AVX512VL,AVX512
|
|
VPBROADCASTQ ymmreg|mask|z,mem64 [rm:t1s: evex.256.66.0f38.w1 59 /r ] AVX512VL,AVX512
|
|
VPBROADCASTQ zmmreg|mask|z,mem64 [rm:t1s: evex.512.66.0f38.w1 59 /r ] AVX512
|
|
VPBROADCASTQ xmmreg|mask|z,xmmreg [rm: evex.128.66.0f38.w1 59 /r ] AVX512VL,AVX512
|
|
VPBROADCASTQ ymmreg|mask|z,xmmreg [rm: evex.256.66.0f38.w1 59 /r ] AVX512VL,AVX512
|
|
VPBROADCASTQ zmmreg|mask|z,xmmreg [rm: evex.512.66.0f38.w1 59 /r ] AVX512
|
|
VPBROADCASTQ xmmreg|mask|z,reg64 [rm: evex.128.66.0f38.w1 7c /r ] AVX512VL,AVX512
|
|
VPBROADCASTQ ymmreg|mask|z,reg64 [rm: evex.256.66.0f38.w1 7c /r ] AVX512VL,AVX512
|
|
VPBROADCASTQ zmmreg|mask|z,reg64 [rm: evex.512.66.0f38.w1 7c /r ] AVX512
|
|
VPBROADCASTW xmmreg|mask|z,xmmrm16 [rm:t1s16: evex.128.66.0f38.w0 79 /r ] AVX512VL,AVX512BW
|
|
VPBROADCASTW ymmreg|mask|z,xmmrm16 [rm:t1s16: evex.256.66.0f38.w0 79 /r ] AVX512VL,AVX512BW
|
|
VPBROADCASTW zmmreg|mask|z,xmmrm16 [rm:t1s16: evex.512.66.0f38.w0 79 /r ] AVX512BW
|
|
VPBROADCASTW xmmreg|mask|z,reg16 [rm: evex.128.66.0f38.w0 7b /r ] AVX512VL,AVX512BW
|
|
VPBROADCASTW xmmreg|mask|z,reg32 [rm: evex.128.66.0f38.w0 7b /r ] AVX512VL,AVX512BW
|
|
VPBROADCASTW xmmreg|mask|z,reg64 [rm: evex.128.66.0f38.w0 7b /r ] AVX512VL,AVX512BW
|
|
VPBROADCASTW ymmreg|mask|z,reg16 [rm: evex.256.66.0f38.w0 7b /r ] AVX512VL,AVX512BW
|
|
VPBROADCASTW ymmreg|mask|z,reg32 [rm: evex.256.66.0f38.w0 7b /r ] AVX512VL,AVX512BW
|
|
VPBROADCASTW ymmreg|mask|z,reg64 [rm: evex.256.66.0f38.w0 7b /r ] AVX512VL,AVX512BW
|
|
VPBROADCASTW zmmreg|mask|z,reg16 [rm: evex.512.66.0f38.w0 7b /r ] AVX512BW
|
|
VPBROADCASTW zmmreg|mask|z,reg32 [rm: evex.512.66.0f38.w0 7b /r ] AVX512BW
|
|
VPBROADCASTW zmmreg|mask|z,reg64 [rm: evex.512.66.0f38.w0 7b /r ] AVX512BW
|
|
; VPCMPEQx and VPCMPGTx come in two flavors: SSE-like, and VPCMP with immediate. They are both
|
|
; valid, but prefer the SSE version as it is one byte shorter.
|
|
VPCMPEQB kreg|mask,xmmreg,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig 74 /r ] AVX512VL,AVX512BW
|
|
VPCMPEQB kreg|mask,ymmreg,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig 74 /r ] AVX512VL,AVX512BW
|
|
VPCMPEQB kreg|mask,zmmreg,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig 74 /r ] AVX512BW
|
|
VPCMPEQD kreg|mask,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f.w0 76 /r ] AVX512VL,AVX512
|
|
VPCMPEQD kreg|mask,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f.w0 76 /r ] AVX512VL,AVX512
|
|
VPCMPEQD kreg|mask,zmmreg,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f.w0 76 /r ] AVX512
|
|
VPCMPEQQ kreg|mask,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 29 /r ] AVX512VL,AVX512
|
|
VPCMPEQQ kreg|mask,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 29 /r ] AVX512VL,AVX512
|
|
VPCMPEQQ kreg|mask,zmmreg,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 29 /r ] AVX512
|
|
VPCMPEQW kreg|mask,xmmreg,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig 75 /r ] AVX512VL,AVX512BW
|
|
VPCMPEQW kreg|mask,ymmreg,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig 75 /r ] AVX512VL,AVX512BW
|
|
VPCMPEQW kreg|mask,zmmreg,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig 75 /r ] AVX512BW
|
|
VPCMPGTB kreg|mask,xmmreg,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig 64 /r ] AVX512VL,AVX512BW
|
|
VPCMPGTB kreg|mask,ymmreg,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig 64 /r ] AVX512VL,AVX512BW
|
|
VPCMPGTB kreg|mask,zmmreg,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig 64 /r ] AVX512BW
|
|
VPCMPGTD kreg|mask,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f.w0 66 /r ] AVX512VL,AVX512
|
|
VPCMPGTD kreg|mask,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f.w0 66 /r ] AVX512VL,AVX512
|
|
VPCMPGTD kreg|mask,zmmreg,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f.w0 66 /r ] AVX512
|
|
VPCMPGTQ kreg|mask,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 37 /r ] AVX512VL,AVX512
|
|
VPCMPGTQ kreg|mask,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 37 /r ] AVX512VL,AVX512
|
|
VPCMPGTQ kreg|mask,zmmreg,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 37 /r ] AVX512
|
|
VPCMPGTW kreg|mask,xmmreg,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig 65 /r ] AVX512VL,AVX512BW
|
|
VPCMPGTW kreg|mask,ymmreg,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig 65 /r ] AVX512VL,AVX512BW
|
|
VPCMPGTW kreg|mask,zmmreg,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig 65 /r ] AVX512BW
|
|
; The systematic VPCMP with immediate instructions
|
|
VPCMPEQB kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w0 3f /r 00 ] AVX512VL,AVX512BW
|
|
VPCMPEQB kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w0 3f /r 00 ] AVX512VL,AVX512BW
|
|
VPCMPEQB kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w0 3f /r 00 ] AVX512BW
|
|
VPCMPEQD kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.66.0f3a.w0 1f /r 00 ] AVX512VL,AVX512
|
|
VPCMPEQD kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.66.0f3a.w0 1f /r 00 ] AVX512VL,AVX512
|
|
VPCMPEQD kreg|mask,zmmreg,zmmrm512|b32 [rvmi:fv: evex.nds.512.66.0f3a.w0 1f /r 00 ] AVX512
|
|
VPCMPEQQ kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f3a.w1 1f /r 00 ] AVX512VL,AVX512
|
|
VPCMPEQQ kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f3a.w1 1f /r 00 ] AVX512VL,AVX512
|
|
VPCMPEQQ kreg|mask,zmmreg,zmmrm512|b64 [rvmi:fv: evex.nds.512.66.0f3a.w1 1f /r 00 ] AVX512
|
|
VPCMPEQUB kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w0 3e /r 00 ] AVX512VL,AVX512BW
|
|
VPCMPEQUB kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w0 3e /r 00 ] AVX512VL,AVX512BW
|
|
VPCMPEQUB kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w0 3e /r 00 ] AVX512BW
|
|
VPCMPEQUD kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.66.0f3a.w0 1e /r 00 ] AVX512VL,AVX512
|
|
VPCMPEQUD kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.66.0f3a.w0 1e /r 00 ] AVX512VL,AVX512
|
|
VPCMPEQUD kreg|mask,zmmreg,zmmrm512|b32 [rvmi:fv: evex.nds.512.66.0f3a.w0 1e /r 00 ] AVX512
|
|
VPCMPEQUQ kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f3a.w1 1e /r 00 ] AVX512VL,AVX512
|
|
VPCMPEQUQ kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f3a.w1 1e /r 00 ] AVX512VL,AVX512
|
|
VPCMPEQUQ kreg|mask,zmmreg,zmmrm512|b64 [rvmi:fv: evex.nds.512.66.0f3a.w1 1e /r 00 ] AVX512
|
|
VPCMPEQUW kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w1 3e /r 00 ] AVX512VL,AVX512BW
|
|
VPCMPEQUW kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w1 3e /r 00 ] AVX512VL,AVX512BW
|
|
VPCMPEQUW kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w1 3e /r 00 ] AVX512BW
|
|
VPCMPEQW kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w1 3f /r 00 ] AVX512VL,AVX512BW
|
|
VPCMPEQW kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w1 3f /r 00 ] AVX512VL,AVX512BW
|
|
VPCMPEQW kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w1 3f /r 00 ] AVX512BW
|
|
VPCMPGEB kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w0 3f /r 05 ] AVX512VL,AVX512BW
|
|
VPCMPGEB kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w0 3f /r 05 ] AVX512VL,AVX512BW
|
|
VPCMPGEB kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w0 3f /r 05 ] AVX512BW
|
|
VPCMPGED kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.66.0f3a.w0 1f /r 05 ] AVX512VL,AVX512
|
|
VPCMPGED kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.66.0f3a.w0 1f /r 05 ] AVX512VL,AVX512
|
|
VPCMPGED kreg|mask,zmmreg,zmmrm512|b32 [rvmi:fv: evex.nds.512.66.0f3a.w0 1f /r 05 ] AVX512
|
|
VPCMPGEQ kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f3a.w1 1f /r 05 ] AVX512VL,AVX512
|
|
VPCMPGEQ kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f3a.w1 1f /r 05 ] AVX512VL,AVX512
|
|
VPCMPGEQ kreg|mask,zmmreg,zmmrm512|b64 [rvmi:fv: evex.nds.512.66.0f3a.w1 1f /r 05 ] AVX512
|
|
VPCMPGEUB kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w0 3e /r 05 ] AVX512VL,AVX512BW
|
|
VPCMPGEUB kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w0 3e /r 05 ] AVX512VL,AVX512BW
|
|
VPCMPGEUB kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w0 3e /r 05 ] AVX512BW
|
|
VPCMPGEUD kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.66.0f3a.w0 1e /r 05 ] AVX512VL,AVX512
|
|
VPCMPGEUD kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.66.0f3a.w0 1e /r 05 ] AVX512VL,AVX512
|
|
VPCMPGEUD kreg|mask,zmmreg,zmmrm512|b32 [rvmi:fv: evex.nds.512.66.0f3a.w0 1e /r 05 ] AVX512
|
|
VPCMPGEUQ kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f3a.w1 1e /r 05 ] AVX512VL,AVX512
|
|
VPCMPGEUQ kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f3a.w1 1e /r 05 ] AVX512VL,AVX512
|
|
VPCMPGEUQ kreg|mask,zmmreg,zmmrm512|b64 [rvmi:fv: evex.nds.512.66.0f3a.w1 1e /r 05 ] AVX512
|
|
VPCMPGEUW kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w1 3e /r 05 ] AVX512VL,AVX512BW
|
|
VPCMPGEUW kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w1 3e /r 05 ] AVX512VL,AVX512BW
|
|
VPCMPGEUW kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w1 3e /r 05 ] AVX512BW
|
|
VPCMPGEW kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w1 3f /r 05 ] AVX512VL,AVX512BW
|
|
VPCMPGEW kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w1 3f /r 05 ] AVX512VL,AVX512BW
|
|
VPCMPGEW kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w1 3f /r 05 ] AVX512BW
|
|
VPCMPGTB kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w0 3f /r 06 ] AVX512VL,AVX512BW
|
|
VPCMPGTB kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w0 3f /r 06 ] AVX512VL,AVX512BW
|
|
VPCMPGTB kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w0 3f /r 06 ] AVX512BW
|
|
VPCMPGTD kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.66.0f3a.w0 1f /r 06 ] AVX512VL,AVX512
|
|
VPCMPGTD kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.66.0f3a.w0 1f /r 06 ] AVX512VL,AVX512
|
|
VPCMPGTD kreg|mask,zmmreg,zmmrm512|b32 [rvmi:fv: evex.nds.512.66.0f3a.w0 1f /r 06 ] AVX512
|
|
VPCMPGTQ kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f3a.w1 1f /r 06 ] AVX512VL,AVX512
|
|
VPCMPGTQ kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f3a.w1 1f /r 06 ] AVX512VL,AVX512
|
|
VPCMPGTQ kreg|mask,zmmreg,zmmrm512|b64 [rvmi:fv: evex.nds.512.66.0f3a.w1 1f /r 06 ] AVX512
|
|
VPCMPGTUB kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w0 3e /r 06 ] AVX512VL,AVX512BW
|
|
VPCMPGTUB kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w0 3e /r 06 ] AVX512VL,AVX512BW
|
|
VPCMPGTUB kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w0 3e /r 06 ] AVX512BW
|
|
VPCMPGTUD kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.66.0f3a.w0 1e /r 06 ] AVX512VL,AVX512
|
|
VPCMPGTUD kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.66.0f3a.w0 1e /r 06 ] AVX512VL,AVX512
|
|
VPCMPGTUD kreg|mask,zmmreg,zmmrm512|b32 [rvmi:fv: evex.nds.512.66.0f3a.w0 1e /r 06 ] AVX512
|
|
VPCMPGTUQ kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f3a.w1 1e /r 06 ] AVX512VL,AVX512
|
|
VPCMPGTUQ kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f3a.w1 1e /r 06 ] AVX512VL,AVX512
|
|
VPCMPGTUQ kreg|mask,zmmreg,zmmrm512|b64 [rvmi:fv: evex.nds.512.66.0f3a.w1 1e /r 06 ] AVX512
|
|
VPCMPGTUW kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w1 3e /r 06 ] AVX512VL,AVX512BW
|
|
VPCMPGTUW kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w1 3e /r 06 ] AVX512VL,AVX512BW
|
|
VPCMPGTUW kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w1 3e /r 06 ] AVX512BW
|
|
VPCMPGTW kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w1 3f /r 06 ] AVX512VL,AVX512BW
|
|
VPCMPGTW kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w1 3f /r 06 ] AVX512VL,AVX512BW
|
|
VPCMPGTW kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w1 3f /r 06 ] AVX512BW
|
|
VPCMPLEB kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w0 3f /r 02 ] AVX512VL,AVX512BW
|
|
VPCMPLEB kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w0 3f /r 02 ] AVX512VL,AVX512BW
|
|
VPCMPLEB kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w0 3f /r 02 ] AVX512BW
|
|
VPCMPLED kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.66.0f3a.w0 1f /r 02 ] AVX512VL,AVX512
|
|
VPCMPLED kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.66.0f3a.w0 1f /r 02 ] AVX512VL,AVX512
|
|
VPCMPLED kreg|mask,zmmreg,zmmrm512|b32 [rvmi:fv: evex.nds.512.66.0f3a.w0 1f /r 02 ] AVX512
|
|
VPCMPLEQ kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f3a.w1 1f /r 02 ] AVX512VL,AVX512
|
|
VPCMPLEQ kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f3a.w1 1f /r 02 ] AVX512VL,AVX512
|
|
VPCMPLEQ kreg|mask,zmmreg,zmmrm512|b64 [rvmi:fv: evex.nds.512.66.0f3a.w1 1f /r 02 ] AVX512
|
|
VPCMPLEUB kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w0 3e /r 02 ] AVX512VL,AVX512BW
|
|
VPCMPLEUB kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w0 3e /r 02 ] AVX512VL,AVX512BW
|
|
VPCMPLEUB kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w0 3e /r 02 ] AVX512BW
|
|
VPCMPLEUD kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.66.0f3a.w0 1e /r 02 ] AVX512VL,AVX512
|
|
VPCMPLEUD kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.66.0f3a.w0 1e /r 02 ] AVX512VL,AVX512
|
|
VPCMPLEUD kreg|mask,zmmreg,zmmrm512|b32 [rvmi:fv: evex.nds.512.66.0f3a.w0 1e /r 02 ] AVX512
|
|
VPCMPLEUQ kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f3a.w1 1e /r 02 ] AVX512VL,AVX512
|
|
VPCMPLEUQ kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f3a.w1 1e /r 02 ] AVX512VL,AVX512
|
|
VPCMPLEUQ kreg|mask,zmmreg,zmmrm512|b64 [rvmi:fv: evex.nds.512.66.0f3a.w1 1e /r 02 ] AVX512
|
|
VPCMPLEUW kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w1 3e /r 02 ] AVX512VL,AVX512BW
|
|
VPCMPLEUW kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w1 3e /r 02 ] AVX512VL,AVX512BW
|
|
VPCMPLEUW kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w1 3e /r 02 ] AVX512BW
|
|
VPCMPLEW kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w1 3f /r 02 ] AVX512VL,AVX512BW
|
|
VPCMPLEW kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w1 3f /r 02 ] AVX512VL,AVX512BW
|
|
VPCMPLEW kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w1 3f /r 02 ] AVX512BW
|
|
VPCMPLTB kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w0 3f /r 01 ] AVX512VL,AVX512BW
|
|
VPCMPLTB kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w0 3f /r 01 ] AVX512VL,AVX512BW
|
|
VPCMPLTB kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w0 3f /r 01 ] AVX512BW
|
|
VPCMPLTD kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.66.0f3a.w0 1f /r 01 ] AVX512VL,AVX512
|
|
VPCMPLTD kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.66.0f3a.w0 1f /r 01 ] AVX512VL,AVX512
|
|
VPCMPLTD kreg|mask,zmmreg,zmmrm512|b32 [rvmi:fv: evex.nds.512.66.0f3a.w0 1f /r 01 ] AVX512
|
|
VPCMPLTQ kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f3a.w1 1f /r 01 ] AVX512VL,AVX512
|
|
VPCMPLTQ kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f3a.w1 1f /r 01 ] AVX512VL,AVX512
|
|
VPCMPLTQ kreg|mask,zmmreg,zmmrm512|b64 [rvmi:fv: evex.nds.512.66.0f3a.w1 1f /r 01 ] AVX512
|
|
VPCMPLTUB kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w0 3e /r 01 ] AVX512VL,AVX512BW
|
|
VPCMPLTUB kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w0 3e /r 01 ] AVX512VL,AVX512BW
|
|
VPCMPLTUB kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w0 3e /r 01 ] AVX512BW
|
|
VPCMPLTUD kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.66.0f3a.w0 1e /r 01 ] AVX512VL,AVX512
|
|
VPCMPLTUD kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.66.0f3a.w0 1e /r 01 ] AVX512VL,AVX512
|
|
VPCMPLTUD kreg|mask,zmmreg,zmmrm512|b32 [rvmi:fv: evex.nds.512.66.0f3a.w0 1e /r 01 ] AVX512
|
|
VPCMPLTUQ kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f3a.w1 1e /r 01 ] AVX512VL,AVX512
|
|
VPCMPLTUQ kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f3a.w1 1e /r 01 ] AVX512VL,AVX512
|
|
VPCMPLTUQ kreg|mask,zmmreg,zmmrm512|b64 [rvmi:fv: evex.nds.512.66.0f3a.w1 1e /r 01 ] AVX512
|
|
VPCMPLTUW kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w1 3e /r 01 ] AVX512VL,AVX512BW
|
|
VPCMPLTUW kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w1 3e /r 01 ] AVX512VL,AVX512BW
|
|
VPCMPLTUW kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w1 3e /r 01 ] AVX512BW
|
|
VPCMPLTW kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w1 3f /r 01 ] AVX512VL,AVX512BW
|
|
VPCMPLTW kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w1 3f /r 01 ] AVX512VL,AVX512BW
|
|
VPCMPLTW kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w1 3f /r 01 ] AVX512BW
|
|
VPCMPNEQB kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w0 3f /r 04 ] AVX512VL,AVX512BW
|
|
VPCMPNEQB kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w0 3f /r 04 ] AVX512VL,AVX512BW
|
|
VPCMPNEQB kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w0 3f /r 04 ] AVX512BW
|
|
VPCMPNEQD kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.66.0f3a.w0 1f /r 04 ] AVX512VL,AVX512
|
|
VPCMPNEQD kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.66.0f3a.w0 1f /r 04 ] AVX512VL,AVX512
|
|
VPCMPNEQD kreg|mask,zmmreg,zmmrm512|b32 [rvmi:fv: evex.nds.512.66.0f3a.w0 1f /r 04 ] AVX512
|
|
VPCMPNEQQ kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f3a.w1 1f /r 04 ] AVX512VL,AVX512
|
|
VPCMPNEQQ kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f3a.w1 1f /r 04 ] AVX512VL,AVX512
|
|
VPCMPNEQQ kreg|mask,zmmreg,zmmrm512|b64 [rvmi:fv: evex.nds.512.66.0f3a.w1 1f /r 04 ] AVX512
|
|
VPCMPNEQUB kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w0 3e /r 04 ] AVX512VL,AVX512BW
|
|
VPCMPNEQUB kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w0 3e /r 04 ] AVX512VL,AVX512BW
|
|
VPCMPNEQUB kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w0 3e /r 04 ] AVX512BW
|
|
VPCMPNEQUD kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.66.0f3a.w0 1e /r 04 ] AVX512VL,AVX512
|
|
VPCMPNEQUD kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.66.0f3a.w0 1e /r 04 ] AVX512VL,AVX512
|
|
VPCMPNEQUD kreg|mask,zmmreg,zmmrm512|b32 [rvmi:fv: evex.nds.512.66.0f3a.w0 1e /r 04 ] AVX512
|
|
VPCMPNEQUQ kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f3a.w1 1e /r 04 ] AVX512VL,AVX512
|
|
VPCMPNEQUQ kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f3a.w1 1e /r 04 ] AVX512VL,AVX512
|
|
VPCMPNEQUQ kreg|mask,zmmreg,zmmrm512|b64 [rvmi:fv: evex.nds.512.66.0f3a.w1 1e /r 04 ] AVX512
|
|
VPCMPNEQUW kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w1 3e /r 04 ] AVX512VL,AVX512BW
|
|
VPCMPNEQUW kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w1 3e /r 04 ] AVX512VL,AVX512BW
|
|
VPCMPNEQUW kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w1 3e /r 04 ] AVX512BW
|
|
VPCMPNEQW kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w1 3f /r 04 ] AVX512VL,AVX512BW
|
|
VPCMPNEQW kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w1 3f /r 04 ] AVX512VL,AVX512BW
|
|
VPCMPNEQW kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w1 3f /r 04 ] AVX512BW
|
|
VPCMPNGTB kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w0 3f /r 02 ] AVX512VL,AVX512BW
|
|
VPCMPNGTB kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w0 3f /r 02 ] AVX512VL,AVX512BW
|
|
VPCMPNGTB kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w0 3f /r 02 ] AVX512BW
|
|
VPCMPNGTD kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.66.0f3a.w0 1f /r 02 ] AVX512VL,AVX512
|
|
VPCMPNGTD kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.66.0f3a.w0 1f /r 02 ] AVX512VL,AVX512
|
|
VPCMPNGTD kreg|mask,zmmreg,zmmrm512|b32 [rvmi:fv: evex.nds.512.66.0f3a.w0 1f /r 02 ] AVX512
|
|
VPCMPNGTQ kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f3a.w1 1f /r 02 ] AVX512VL,AVX512
|
|
VPCMPNGTQ kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f3a.w1 1f /r 02 ] AVX512VL,AVX512
|
|
VPCMPNGTQ kreg|mask,zmmreg,zmmrm512|b64 [rvmi:fv: evex.nds.512.66.0f3a.w1 1f /r 02 ] AVX512
|
|
VPCMPNGTUB kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w0 3e /r 02 ] AVX512VL,AVX512BW
|
|
VPCMPNGTUB kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w0 3e /r 02 ] AVX512VL,AVX512BW
|
|
VPCMPNGTUB kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w0 3e /r 02 ] AVX512BW
|
|
VPCMPNGTUD kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.66.0f3a.w0 1e /r 02 ] AVX512VL,AVX512
|
|
VPCMPNGTUD kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.66.0f3a.w0 1e /r 02 ] AVX512VL,AVX512
|
|
VPCMPNGTUD kreg|mask,zmmreg,zmmrm512|b32 [rvmi:fv: evex.nds.512.66.0f3a.w0 1e /r 02 ] AVX512
|
|
VPCMPNGTUQ kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f3a.w1 1e /r 02 ] AVX512VL,AVX512
|
|
VPCMPNGTUQ kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f3a.w1 1e /r 02 ] AVX512VL,AVX512
|
|
VPCMPNGTUQ kreg|mask,zmmreg,zmmrm512|b64 [rvmi:fv: evex.nds.512.66.0f3a.w1 1e /r 02 ] AVX512
|
|
VPCMPNGTUW kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w1 3e /r 02 ] AVX512VL,AVX512BW
|
|
VPCMPNGTUW kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w1 3e /r 02 ] AVX512VL,AVX512BW
|
|
VPCMPNGTUW kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w1 3e /r 02 ] AVX512BW
|
|
VPCMPNGTW kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w1 3f /r 02 ] AVX512VL,AVX512BW
|
|
VPCMPNGTW kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w1 3f /r 02 ] AVX512VL,AVX512BW
|
|
VPCMPNGTW kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w1 3f /r 02 ] AVX512BW
|
|
VPCMPNLEB kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w0 3f /r 06 ] AVX512VL,AVX512BW
|
|
VPCMPNLEB kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w0 3f /r 06 ] AVX512VL,AVX512BW
|
|
VPCMPNLEB kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w0 3f /r 06 ] AVX512BW
|
|
VPCMPNLED kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.66.0f3a.w0 1f /r 06 ] AVX512VL,AVX512
|
|
VPCMPNLED kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.66.0f3a.w0 1f /r 06 ] AVX512VL,AVX512
|
|
VPCMPNLED kreg|mask,zmmreg,zmmrm512|b32 [rvmi:fv: evex.nds.512.66.0f3a.w0 1f /r 06 ] AVX512
|
|
VPCMPNLEQ kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f3a.w1 1f /r 06 ] AVX512VL,AVX512
|
|
VPCMPNLEQ kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f3a.w1 1f /r 06 ] AVX512VL,AVX512
|
|
VPCMPNLEQ kreg|mask,zmmreg,zmmrm512|b64 [rvmi:fv: evex.nds.512.66.0f3a.w1 1f /r 06 ] AVX512
|
|
VPCMPNLEUB kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w0 3e /r 06 ] AVX512VL,AVX512BW
|
|
VPCMPNLEUB kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w0 3e /r 06 ] AVX512VL,AVX512BW
|
|
VPCMPNLEUB kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w0 3e /r 06 ] AVX512BW
|
|
VPCMPNLEUD kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.66.0f3a.w0 1e /r 06 ] AVX512VL,AVX512
|
|
VPCMPNLEUD kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.66.0f3a.w0 1e /r 06 ] AVX512VL,AVX512
|
|
VPCMPNLEUD kreg|mask,zmmreg,zmmrm512|b32 [rvmi:fv: evex.nds.512.66.0f3a.w0 1e /r 06 ] AVX512
|
|
VPCMPNLEUQ kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f3a.w1 1e /r 06 ] AVX512VL,AVX512
|
|
VPCMPNLEUQ kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f3a.w1 1e /r 06 ] AVX512VL,AVX512
|
|
VPCMPNLEUQ kreg|mask,zmmreg,zmmrm512|b64 [rvmi:fv: evex.nds.512.66.0f3a.w1 1e /r 06 ] AVX512
|
|
VPCMPNLEUW kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w1 3e /r 06 ] AVX512VL,AVX512BW
|
|
VPCMPNLEUW kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w1 3e /r 06 ] AVX512VL,AVX512BW
|
|
VPCMPNLEUW kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w1 3e /r 06 ] AVX512BW
|
|
VPCMPNLEW kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w1 3f /r 06 ] AVX512VL,AVX512BW
|
|
VPCMPNLEW kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w1 3f /r 06 ] AVX512VL,AVX512BW
|
|
VPCMPNLEW kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w1 3f /r 06 ] AVX512BW
|
|
VPCMPNLTB kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w0 3f /r 05 ] AVX512VL,AVX512BW
|
|
VPCMPNLTB kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w0 3f /r 05 ] AVX512VL,AVX512BW
|
|
VPCMPNLTB kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w0 3f /r 05 ] AVX512BW
|
|
VPCMPNLTD kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.66.0f3a.w0 1f /r 05 ] AVX512VL,AVX512
|
|
VPCMPNLTD kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.66.0f3a.w0 1f /r 05 ] AVX512VL,AVX512
|
|
VPCMPNLTD kreg|mask,zmmreg,zmmrm512|b32 [rvmi:fv: evex.nds.512.66.0f3a.w0 1f /r 05 ] AVX512
|
|
VPCMPNLTQ kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f3a.w1 1f /r 05 ] AVX512VL,AVX512
|
|
VPCMPNLTQ kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f3a.w1 1f /r 05 ] AVX512VL,AVX512
|
|
VPCMPNLTQ kreg|mask,zmmreg,zmmrm512|b64 [rvmi:fv: evex.nds.512.66.0f3a.w1 1f /r 05 ] AVX512
|
|
VPCMPNLTUB kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w0 3e /r 05 ] AVX512VL,AVX512BW
|
|
VPCMPNLTUB kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w0 3e /r 05 ] AVX512VL,AVX512BW
|
|
VPCMPNLTUB kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w0 3e /r 05 ] AVX512BW
|
|
VPCMPNLTUD kreg|mask,xmmreg,xmmrm128|b32 [rvmi:fv: evex.nds.128.66.0f3a.w0 1e /r 05 ] AVX512VL,AVX512
|
|
VPCMPNLTUD kreg|mask,ymmreg,ymmrm256|b32 [rvmi:fv: evex.nds.256.66.0f3a.w0 1e /r 05 ] AVX512VL,AVX512
|
|
VPCMPNLTUD kreg|mask,zmmreg,zmmrm512|b32 [rvmi:fv: evex.nds.512.66.0f3a.w0 1e /r 05 ] AVX512
|
|
VPCMPNLTUQ kreg|mask,xmmreg,xmmrm128|b64 [rvmi:fv: evex.nds.128.66.0f3a.w1 1e /r 05 ] AVX512VL,AVX512
|
|
VPCMPNLTUQ kreg|mask,ymmreg,ymmrm256|b64 [rvmi:fv: evex.nds.256.66.0f3a.w1 1e /r 05 ] AVX512VL,AVX512
|
|
VPCMPNLTUQ kreg|mask,zmmreg,zmmrm512|b64 [rvmi:fv: evex.nds.512.66.0f3a.w1 1e /r 05 ] AVX512
|
|
VPCMPNLTUW kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w1 3e /r 05 ] AVX512VL,AVX512BW
|
|
VPCMPNLTUW kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w1 3e /r 05 ] AVX512VL,AVX512BW
|
|
VPCMPNLTUW kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w1 3e /r 05 ] AVX512BW
|
|
VPCMPNLTW kreg|mask,xmmreg,xmmrm128 [rvmi:fvm: evex.nds.128.66.0f3a.w1 3f /r 05 ] AVX512VL,AVX512BW
|
|
VPCMPNLTW kreg|mask,ymmreg,ymmrm256 [rvmi:fvm: evex.nds.256.66.0f3a.w1 3f /r 05 ] AVX512VL,AVX512BW
|
|
VPCMPNLTW kreg|mask,zmmreg,zmmrm512 [rvmi:fvm: evex.nds.512.66.0f3a.w1 3f /r 05 ] AVX512BW
|
|
VPCMPB kreg|mask,xmmreg,xmmrm128,imm8 [rvmi:fvm: evex.nds.128.66.0f3a.w0 3f /r ib ] AVX512VL,AVX512BW
|
|
VPCMPB kreg|mask,ymmreg,ymmrm256,imm8 [rvmi:fvm: evex.nds.256.66.0f3a.w0 3f /r ib ] AVX512VL,AVX512BW
|
|
VPCMPB kreg|mask,zmmreg,zmmrm512,imm8 [rvmi:fvm: evex.nds.512.66.0f3a.w0 3f /r ib ] AVX512BW
|
|
VPCMPD kreg|mask,xmmreg,xmmrm128|b32,imm8 [rvmi:fv: evex.nds.128.66.0f3a.w0 1f /r ib ] AVX512VL,AVX512
|
|
VPCMPD kreg|mask,ymmreg,ymmrm256|b32,imm8 [rvmi:fv: evex.nds.256.66.0f3a.w0 1f /r ib ] AVX512VL,AVX512
|
|
VPCMPD kreg|mask,zmmreg,zmmrm512|b32,imm8 [rvmi:fv: evex.nds.512.66.0f3a.w0 1f /r ib ] AVX512
|
|
VPCMPQ kreg|mask,xmmreg,xmmrm128|b64,imm8 [rvmi:fv: evex.nds.128.66.0f3a.w1 1f /r ib ] AVX512VL,AVX512
|
|
VPCMPQ kreg|mask,ymmreg,ymmrm256|b64,imm8 [rvmi:fv: evex.nds.256.66.0f3a.w1 1f /r ib ] AVX512VL,AVX512
|
|
VPCMPQ kreg|mask,zmmreg,zmmrm512|b64,imm8 [rvmi:fv: evex.nds.512.66.0f3a.w1 1f /r ib ] AVX512
|
|
VPCMPUB kreg|mask,xmmreg,xmmrm128,imm8 [rvmi:fvm: evex.nds.128.66.0f3a.w0 3e /r ib ] AVX512VL,AVX512BW
|
|
VPCMPUB kreg|mask,ymmreg,ymmrm256,imm8 [rvmi:fvm: evex.nds.256.66.0f3a.w0 3e /r ib ] AVX512VL,AVX512BW
|
|
VPCMPUB kreg|mask,zmmreg,zmmrm512,imm8 [rvmi:fvm: evex.nds.512.66.0f3a.w0 3e /r ib ] AVX512BW
|
|
VPCMPUD kreg|mask,xmmreg,xmmrm128|b32,imm8 [rvmi:fv: evex.nds.128.66.0f3a.w0 1e /r ib ] AVX512VL,AVX512
|
|
VPCMPUD kreg|mask,ymmreg,ymmrm256|b32,imm8 [rvmi:fv: evex.nds.256.66.0f3a.w0 1e /r ib ] AVX512VL,AVX512
|
|
VPCMPUD kreg|mask,zmmreg,zmmrm512|b32,imm8 [rvmi:fv: evex.nds.512.66.0f3a.w0 1e /r ib ] AVX512
|
|
VPCMPUQ kreg|mask,xmmreg,xmmrm128|b64,imm8 [rvmi:fv: evex.nds.128.66.0f3a.w1 1e /r ib ] AVX512VL,AVX512
|
|
VPCMPUQ kreg|mask,ymmreg,ymmrm256|b64,imm8 [rvmi:fv: evex.nds.256.66.0f3a.w1 1e /r ib ] AVX512VL,AVX512
|
|
VPCMPUQ kreg|mask,zmmreg,zmmrm512|b64,imm8 [rvmi:fv: evex.nds.512.66.0f3a.w1 1e /r ib ] AVX512
|
|
VPCMPUW kreg|mask,xmmreg,xmmrm128,imm8 [rvmi:fvm: evex.nds.128.66.0f3a.w1 3e /r ib ] AVX512VL,AVX512BW
|
|
VPCMPUW kreg|mask,ymmreg,ymmrm256,imm8 [rvmi:fvm: evex.nds.256.66.0f3a.w1 3e /r ib ] AVX512VL,AVX512BW
|
|
VPCMPUW kreg|mask,zmmreg,zmmrm512,imm8 [rvmi:fvm: evex.nds.512.66.0f3a.w1 3e /r ib ] AVX512BW
|
|
VPCMPW kreg|mask,xmmreg,xmmrm128,imm8 [rvmi:fvm: evex.nds.128.66.0f3a.w1 3f /r ib ] AVX512VL,AVX512BW
|
|
VPCMPW kreg|mask,ymmreg,ymmrm256,imm8 [rvmi:fvm: evex.nds.256.66.0f3a.w1 3f /r ib ] AVX512VL,AVX512BW
|
|
VPCMPW kreg|mask,zmmreg,zmmrm512,imm8 [rvmi:fvm: evex.nds.512.66.0f3a.w1 3f /r ib ] AVX512BW
|
|
VPCOMPRESSD mem128|mask,xmmreg [mr:t1s: evex.128.66.0f38.w0 8b /r ] AVX512VL,AVX512
|
|
VPCOMPRESSD mem256|mask,ymmreg [mr:t1s: evex.256.66.0f38.w0 8b /r ] AVX512VL,AVX512
|
|
VPCOMPRESSD mem512|mask,zmmreg [mr:t1s: evex.512.66.0f38.w0 8b /r ] AVX512
|
|
VPCOMPRESSD xmmreg|mask|z,xmmreg [mr: evex.128.66.0f38.w0 8b /r ] AVX512VL,AVX512
|
|
VPCOMPRESSD ymmreg|mask|z,ymmreg [mr: evex.256.66.0f38.w0 8b /r ] AVX512VL,AVX512
|
|
VPCOMPRESSD zmmreg|mask|z,zmmreg [mr: evex.512.66.0f38.w0 8b /r ] AVX512
|
|
VPCOMPRESSQ mem128|mask,xmmreg [mr:t1s: evex.128.66.0f38.w1 8b /r ] AVX512VL,AVX512
|
|
VPCOMPRESSQ mem256|mask,ymmreg [mr:t1s: evex.256.66.0f38.w1 8b /r ] AVX512VL,AVX512
|
|
VPCOMPRESSQ mem512|mask,zmmreg [mr:t1s: evex.512.66.0f38.w1 8b /r ] AVX512
|
|
VPCOMPRESSQ xmmreg|mask|z,xmmreg [mr: evex.128.66.0f38.w1 8b /r ] AVX512VL,AVX512
|
|
VPCOMPRESSQ ymmreg|mask|z,ymmreg [mr: evex.256.66.0f38.w1 8b /r ] AVX512VL,AVX512
|
|
VPCOMPRESSQ zmmreg|mask|z,zmmreg [mr: evex.512.66.0f38.w1 8b /r ] AVX512
|
|
VPCONFLICTD xmmreg|mask|z,xmmrm128|b32 [rm:fv: evex.128.66.0f38.w0 c4 /r ] AVX512VL,AVX512CD
|
|
VPCONFLICTD ymmreg|mask|z,ymmrm256|b32 [rm:fv: evex.256.66.0f38.w0 c4 /r ] AVX512VL,AVX512CD
|
|
VPCONFLICTD zmmreg|mask|z,zmmrm512|b32 [rm:fv: evex.512.66.0f38.w0 c4 /r ] AVX512CD
|
|
VPCONFLICTQ xmmreg|mask|z,xmmrm128|b64 [rm:fv: evex.128.66.0f38.w1 c4 /r ] AVX512VL,AVX512CD
|
|
VPCONFLICTQ ymmreg|mask|z,ymmrm256|b64 [rm:fv: evex.256.66.0f38.w1 c4 /r ] AVX512VL,AVX512CD
|
|
VPCONFLICTQ zmmreg|mask|z,zmmrm512|b64 [rm:fv: evex.512.66.0f38.w1 c4 /r ] AVX512CD
|
|
VPERMB xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f38.w0 8d /r ] AVX512VL,AVX512VBMI
|
|
VPERMB ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f38.w0 8d /r ] AVX512VL,AVX512VBMI
|
|
VPERMB zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f38.w0 8d /r ] AVX512VBMI
|
|
VPERMD ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 36 /r ] AVX512VL,AVX512
|
|
VPERMD zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f38.w0 36 /r ] AVX512
|
|
VPERMI2B xmmreg|mask|z,xmmreg,xmmrm128 [rvm:fvm: evex.nds.128.66.0f38.w0 75 /r ] AVX512VL,AVX512VBMI
|
|
VPERMI2B ymmreg|mask|z,ymmreg,ymmrm256 [rvm:fvm: evex.nds.256.66.0f38.w0 75 /r ] AVX512VL,AVX512VBMI
|
|
VPERMI2B zmmreg|mask|z,zmmreg,zmmrm512 [rvm:fvm: evex.nds.512.66.0f38.w0 75 /r ] AVX512VBMI
|
|
VPERMI2D xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 76 /r ] AVX512VL,AVX512
|
|
VPERMI2D ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 76 /r ] AVX512VL,AVX512
|
|
VPERMI2D zmmreg|mask|z,zmmreg,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f38.w0 76 /r ] AVX512
|
|
VPERMI2PD xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 77 /r ] AVX512VL,AVX512
|
|
VPERMI2PD ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 77 /r ] AVX512VL,AVX512
|
|
VPERMI2PD zmmreg|mask|z,zmmreg,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 77 /r ] AVX512
|
|
VPERMI2PS xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 77 /r ] AVX512VL,AVX512
|
|
VPERMI2PS ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 77 /r ] AVX512VL,AVX512
|
|
VPERMI2PS zmmreg|mask|z,zmmreg,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f38.w0 77 /r ] AVX512
|
|
VPERMI2Q xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 76 /r ] AVX512VL,AVX512
|
|
VPERMI2Q ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 76 /r ] AVX512VL,AVX512
|
|
VPERMI2Q zmmreg|mask|z,zmmreg,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 76 /r ] AVX512
|
|
VPERMI2W xmmreg|mask|z,xmmreg,xmmrm128 [rvm:fvm: evex.nds.128.66.0f38.w1 75 /r ] AVX512VL,AVX512BW
|
|
VPERMI2W ymmreg|mask|z,ymmreg,ymmrm256 [rvm:fvm: evex.nds.256.66.0f38.w1 75 /r ] AVX512VL,AVX512BW
|
|
VPERMI2W zmmreg|mask|z,zmmreg,zmmrm512 [rvm:fvm: evex.nds.512.66.0f38.w1 75 /r ] AVX512BW
|
|
VPERMILPD xmmreg|mask|z,xmmrm128|b64,imm8 [rmi:fv: evex.128.66.0f3a.w1 05 /r ib ] AVX512VL,AVX512
|
|
VPERMILPD ymmreg|mask|z,ymmrm256|b64,imm8 [rmi:fv: evex.256.66.0f3a.w1 05 /r ib ] AVX512VL,AVX512
|
|
VPERMILPD zmmreg|mask|z,zmmrm512|b64,imm8 [rmi:fv: evex.512.66.0f3a.w1 05 /r ib ] AVX512
|
|
VPERMILPD xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 0d /r ] AVX512VL,AVX512
|
|
VPERMILPD ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 0d /r ] AVX512VL,AVX512
|
|
VPERMILPD zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 0d /r ] AVX512
|
|
VPERMILPS xmmreg|mask|z,xmmrm128|b32,imm8 [rmi:fv: evex.128.66.0f3a.w0 04 /r ib ] AVX512VL,AVX512
|
|
VPERMILPS ymmreg|mask|z,ymmrm256|b32,imm8 [rmi:fv: evex.256.66.0f3a.w0 04 /r ib ] AVX512VL,AVX512
|
|
VPERMILPS zmmreg|mask|z,zmmrm512|b32,imm8 [rmi:fv: evex.512.66.0f3a.w0 04 /r ib ] AVX512
|
|
VPERMILPS xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 0c /r ] AVX512VL,AVX512
|
|
VPERMILPS ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 0c /r ] AVX512VL,AVX512
|
|
VPERMILPS zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f38.w0 0c /r ] AVX512
|
|
VPERMPD ymmreg|mask|z,ymmrm256|b64,imm8 [rmi:fv: evex.256.66.0f3a.w1 01 /r ib ] AVX512VL,AVX512
|
|
VPERMPD zmmreg|mask|z,zmmrm512|b64,imm8 [rmi:fv: evex.512.66.0f3a.w1 01 /r ib ] AVX512
|
|
VPERMPD ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 16 /r ] AVX512VL,AVX512
|
|
VPERMPD zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 16 /r ] AVX512
|
|
VPERMPS ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 16 /r ] AVX512VL,AVX512
|
|
VPERMPS zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f38.w0 16 /r ] AVX512
|
|
VPERMQ ymmreg|mask|z,ymmrm256|b64,imm8 [rmi:fv: evex.256.66.0f3a.w1 00 /r ib ] AVX512VL,AVX512
|
|
VPERMQ zmmreg|mask|z,zmmrm512|b64,imm8 [rmi:fv: evex.512.66.0f3a.w1 00 /r ib ] AVX512
|
|
VPERMQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 36 /r ] AVX512VL,AVX512
|
|
VPERMQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 36 /r ] AVX512
|
|
VPERMT2B xmmreg|mask|z,xmmreg,xmmrm128 [rvm:fvm: evex.nds.128.66.0f38.w0 7d /r ] AVX512VL,AVX512VBMI
|
|
VPERMT2B ymmreg|mask|z,ymmreg,ymmrm256 [rvm:fvm: evex.nds.256.66.0f38.w0 7d /r ] AVX512VL,AVX512VBMI
|
|
VPERMT2B zmmreg|mask|z,zmmreg,zmmrm512 [rvm:fvm: evex.nds.512.66.0f38.w0 7d /r ] AVX512VBMI
|
|
VPERMT2D xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 7e /r ] AVX512VL,AVX512
|
|
VPERMT2D ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 7e /r ] AVX512VL,AVX512
|
|
VPERMT2D zmmreg|mask|z,zmmreg,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f38.w0 7e /r ] AVX512
|
|
VPERMT2PD xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 7f /r ] AVX512VL,AVX512
|
|
VPERMT2PD ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 7f /r ] AVX512VL,AVX512
|
|
VPERMT2PD zmmreg|mask|z,zmmreg,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 7f /r ] AVX512
|
|
VPERMT2PS xmmreg|mask|z,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 7f /r ] AVX512VL,AVX512
|
|
VPERMT2PS ymmreg|mask|z,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 7f /r ] AVX512VL,AVX512
|
|
VPERMT2PS zmmreg|mask|z,zmmreg,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f38.w0 7f /r ] AVX512
|
|
VPERMT2Q xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 7e /r ] AVX512VL,AVX512
|
|
VPERMT2Q ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 7e /r ] AVX512VL,AVX512
|
|
VPERMT2Q zmmreg|mask|z,zmmreg,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 7e /r ] AVX512
|
|
VPERMT2W xmmreg|mask|z,xmmreg,xmmrm128 [rvm:fvm: evex.nds.128.66.0f38.w1 7d /r ] AVX512VL,AVX512BW
|
|
VPERMT2W ymmreg|mask|z,ymmreg,ymmrm256 [rvm:fvm: evex.nds.256.66.0f38.w1 7d /r ] AVX512VL,AVX512BW
|
|
VPERMT2W zmmreg|mask|z,zmmreg,zmmrm512 [rvm:fvm: evex.nds.512.66.0f38.w1 7d /r ] AVX512BW
|
|
VPERMW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f38.w1 8d /r ] AVX512VL,AVX512BW
|
|
VPERMW ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f38.w1 8d /r ] AVX512VL,AVX512BW
|
|
VPERMW zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f38.w1 8d /r ] AVX512BW
|
|
VPEXPANDD xmmreg|mask|z,mem128 [rm:t1s: evex.128.66.0f38.w0 89 /r ] AVX512VL,AVX512
|
|
VPEXPANDD ymmreg|mask|z,mem256 [rm:t1s: evex.256.66.0f38.w0 89 /r ] AVX512VL,AVX512
|
|
VPEXPANDD zmmreg|mask|z,mem512 [rm:t1s: evex.512.66.0f38.w0 89 /r ] AVX512
|
|
VPEXPANDD xmmreg|mask|z,xmmreg [rm:t1s: evex.128.66.0f38.w0 89 /r ] AVX512VL,AVX512
|
|
VPEXPANDD ymmreg|mask|z,ymmreg [rm:t1s: evex.256.66.0f38.w0 89 /r ] AVX512VL,AVX512
|
|
VPEXPANDD zmmreg|mask|z,zmmreg [rm:t1s: evex.512.66.0f38.w0 89 /r ] AVX512
|
|
VPEXPANDQ xmmreg|mask|z,mem128 [rm:t1s: evex.128.66.0f38.w1 89 /r ] AVX512VL,AVX512
|
|
VPEXPANDQ ymmreg|mask|z,mem256 [rm:t1s: evex.256.66.0f38.w1 89 /r ] AVX512VL,AVX512
|
|
VPEXPANDQ zmmreg|mask|z,mem512 [rm:t1s: evex.512.66.0f38.w1 89 /r ] AVX512
|
|
VPEXPANDQ xmmreg|mask|z,xmmreg [rm:t1s: evex.128.66.0f38.w1 89 /r ] AVX512VL,AVX512
|
|
VPEXPANDQ ymmreg|mask|z,ymmreg [rm:t1s: evex.256.66.0f38.w1 89 /r ] AVX512VL,AVX512
|
|
VPEXPANDQ zmmreg|mask|z,zmmreg [rm:t1s: evex.512.66.0f38.w1 89 /r ] AVX512
|
|
VPEXTRB reg8,xmmreg,imm8 [mri:t1s8: evex.128.66.0f3a.wig 14 /r ib ] AVX512BW
|
|
VPEXTRB reg16,xmmreg,imm8 [mri:t1s8: evex.128.66.0f3a.wig 14 /r ib ] AVX512BW
|
|
VPEXTRB reg32,xmmreg,imm8 [mri:t1s8: evex.128.66.0f3a.wig 14 /r ib ] AVX512BW
|
|
VPEXTRB reg64,xmmreg,imm8 [mri:t1s8: evex.128.66.0f3a.wig 14 /r ib ] AVX512BW
|
|
VPEXTRB mem8,xmmreg,imm8 [mri:t1s8: evex.128.66.0f3a.wig 14 /r ib ] AVX512BW
|
|
VPEXTRD rm32,xmmreg,imm8 [mri:t1s: evex.128.66.0f3a.w0 16 /r ib ] AVX512DQ
|
|
VPEXTRQ rm64,xmmreg,imm8 [mri:t1s: evex.128.66.0f3a.w1 16 /r ib ] AVX512DQ
|
|
VPEXTRW reg16,xmmreg,imm8 [mri:t1s16: evex.128.66.0f3a.wig 15 /r ib ] AVX512BW
|
|
VPEXTRW reg32,xmmreg,imm8 [mri:t1s16: evex.128.66.0f3a.wig 15 /r ib ] AVX512BW
|
|
VPEXTRW reg64,xmmreg,imm8 [mri:t1s16: evex.128.66.0f3a.wig 15 /r ib ] AVX512BW
|
|
VPEXTRW mem16,xmmreg,imm8 [mri:t1s16: evex.128.66.0f3a.wig 15 /r ib ] AVX512BW
|
|
VPEXTRW reg16,xmmreg,imm8 [rmi: evex.128.66.0f.wig c5 /r ib ] AVX512BW
|
|
VPEXTRW reg32,xmmreg,imm8 [rmi: evex.128.66.0f.wig c5 /r ib ] AVX512BW
|
|
VPEXTRW reg64,xmmreg,imm8 [rmi: evex.128.66.0f.wig c5 /r ib ] AVX512BW
|
|
VPGATHERDD xmmreg|mask,xmem32 [rm:t1s: vsibx evex.128.66.0f38.w0 90 /r ] AVX512VL,AVX512
|
|
VPGATHERDD ymmreg|mask,ymem32 [rm:t1s: vsiby evex.256.66.0f38.w0 90 /r ] AVX512VL,AVX512
|
|
VPGATHERDD zmmreg|mask,zmem32 [rm:t1s: vsibz evex.512.66.0f38.w0 90 /r ] AVX512
|
|
VPGATHERDQ xmmreg|mask,xmem64 [rm:t1s: vsibx evex.128.66.0f38.w1 90 /r ] AVX512VL,AVX512
|
|
VPGATHERDQ ymmreg|mask,xmem64 [rm:t1s: vsibx evex.256.66.0f38.w1 90 /r ] AVX512VL,AVX512
|
|
VPGATHERDQ zmmreg|mask,ymem64 [rm:t1s: vsiby evex.512.66.0f38.w1 90 /r ] AVX512
|
|
VPGATHERQD xmmreg|mask,xmem32 [rm:t1s: vsibx evex.128.66.0f38.w0 91 /r ] AVX512VL,AVX512
|
|
VPGATHERQD xmmreg|mask,ymem32 [rm:t1s: vsiby evex.256.66.0f38.w0 91 /r ] AVX512VL,AVX512
|
|
VPGATHERQD ymmreg|mask,zmem32 [rm:t1s: vsibz evex.512.66.0f38.w0 91 /r ] AVX512
|
|
VPGATHERQQ xmmreg|mask,xmem64 [rm:t1s: vsibx evex.128.66.0f38.w1 91 /r ] AVX512VL,AVX512
|
|
VPGATHERQQ ymmreg|mask,ymem64 [rm:t1s: vsiby evex.256.66.0f38.w1 91 /r ] AVX512VL,AVX512
|
|
VPGATHERQQ zmmreg|mask,zmem64 [rm:t1s: vsibz evex.512.66.0f38.w1 91 /r ] AVX512
|
|
VPINSRB xmmreg,xmmreg*,reg32,imm8 [rvmi:t1s8: evex.nds.128.66.0f3a.wig 20 /r ib ] AVX512BW
|
|
VPINSRB xmmreg,xmmreg*,mem8,imm8 [rvmi:t1s8: evex.nds.128.66.0f3a.wig 20 /r ib ] AVX512BW
|
|
VPINSRD xmmreg,xmmreg*,rm32,imm8 [rvmi:t1s: evex.nds.128.66.0f3a.w0 22 /r ib ] AVX512DQ
|
|
VPINSRQ xmmreg,xmmreg*,rm64,imm8 [rvmi:t1s: evex.nds.128.66.0f3a.w1 22 /r ib ] AVX512DQ
|
|
VPINSRW xmmreg,xmmreg*,reg32,imm8 [rvmi:t1s16: evex.nds.128.66.0f.wig c4 /r ib ] AVX512BW
|
|
VPINSRW xmmreg,xmmreg*,mem16,imm8 [rvmi:t1s16: evex.nds.128.66.0f.wig c4 /r ib ] AVX512BW
|
|
VPLZCNTD xmmreg|mask|z,xmmrm128|b32 [rm:fv: evex.128.66.0f38.w0 44 /r ] AVX512VL,AVX512CD
|
|
VPLZCNTD ymmreg|mask|z,ymmrm256|b32 [rm:fv: evex.256.66.0f38.w0 44 /r ] AVX512VL,AVX512CD
|
|
VPLZCNTD zmmreg|mask|z,zmmrm512|b32 [rm:fv: evex.512.66.0f38.w0 44 /r ] AVX512CD
|
|
VPLZCNTQ xmmreg|mask|z,xmmrm128|b64 [rm:fv: evex.128.66.0f38.w1 44 /r ] AVX512VL,AVX512CD
|
|
VPLZCNTQ ymmreg|mask|z,ymmrm256|b64 [rm:fv: evex.256.66.0f38.w1 44 /r ] AVX512VL,AVX512CD
|
|
VPLZCNTQ zmmreg|mask|z,zmmrm512|b64 [rm:fv: evex.512.66.0f38.w1 44 /r ] AVX512CD
|
|
VPMADD52HUQ xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 b5 /r ] AVX512VL,AVX512IFMA
|
|
VPMADD52HUQ ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 b5 /r ] AVX512VL,AVX512IFMA
|
|
VPMADD52HUQ zmmreg|mask|z,zmmreg,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 b5 /r ] AVX512IFMA
|
|
VPMADD52LUQ xmmreg|mask|z,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 b4 /r ] AVX512VL,AVX512IFMA
|
|
VPMADD52LUQ ymmreg|mask|z,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 b4 /r ] AVX512VL,AVX512IFMA
|
|
VPMADD52LUQ zmmreg|mask|z,zmmreg,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 b4 /r ] AVX512IFMA
|
|
VPMADDUBSW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f38.wig 04 /r ] AVX512VL,AVX512BW
|
|
VPMADDUBSW ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f38.wig 04 /r ] AVX512VL,AVX512BW
|
|
VPMADDUBSW zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f38.wig 04 /r ] AVX512BW
|
|
VPMADDWD xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig f5 /r ] AVX512VL,AVX512BW
|
|
VPMADDWD ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig f5 /r ] AVX512VL,AVX512BW
|
|
VPMADDWD zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig f5 /r ] AVX512BW
|
|
VPMAXSB xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f38.wig 3c /r ] AVX512VL,AVX512BW
|
|
VPMAXSB ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f38.wig 3c /r ] AVX512VL,AVX512BW
|
|
VPMAXSB zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f38.wig 3c /r ] AVX512BW
|
|
VPMAXSD xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 3d /r ] AVX512VL,AVX512
|
|
VPMAXSD ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 3d /r ] AVX512VL,AVX512
|
|
VPMAXSD zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f38.w0 3d /r ] AVX512
|
|
VPMAXSQ xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 3d /r ] AVX512VL,AVX512
|
|
VPMAXSQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 3d /r ] AVX512VL,AVX512
|
|
VPMAXSQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 3d /r ] AVX512
|
|
VPMAXSW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig ee /r ] AVX512VL,AVX512BW
|
|
VPMAXSW ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig ee /r ] AVX512VL,AVX512BW
|
|
VPMAXSW zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig ee /r ] AVX512BW
|
|
VPMAXUB xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig de /r ] AVX512VL,AVX512BW
|
|
VPMAXUB ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig de /r ] AVX512VL,AVX512BW
|
|
VPMAXUB zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig de /r ] AVX512BW
|
|
VPMAXUD xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 3f /r ] AVX512VL,AVX512
|
|
VPMAXUD ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 3f /r ] AVX512VL,AVX512
|
|
VPMAXUD zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f38.w0 3f /r ] AVX512
|
|
VPMAXUQ xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 3f /r ] AVX512VL,AVX512
|
|
VPMAXUQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 3f /r ] AVX512VL,AVX512
|
|
VPMAXUQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 3f /r ] AVX512
|
|
VPMAXUW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f38.wig 3e /r ] AVX512VL,AVX512BW
|
|
VPMAXUW ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f38.wig 3e /r ] AVX512VL,AVX512BW
|
|
VPMAXUW zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f38.wig 3e /r ] AVX512BW
|
|
VPMINSB xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f38.wig 38 /r ] AVX512VL,AVX512BW
|
|
VPMINSB ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f38.wig 38 /r ] AVX512VL,AVX512BW
|
|
VPMINSB zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f38.wig 38 /r ] AVX512BW
|
|
VPMINSD xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 39 /r ] AVX512VL,AVX512
|
|
VPMINSD ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 39 /r ] AVX512VL,AVX512
|
|
VPMINSD zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f38.w0 39 /r ] AVX512
|
|
VPMINSQ xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 39 /r ] AVX512VL,AVX512
|
|
VPMINSQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 39 /r ] AVX512VL,AVX512
|
|
VPMINSQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 39 /r ] AVX512
|
|
VPMINSW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig ea /r ] AVX512VL,AVX512BW
|
|
VPMINSW ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig ea /r ] AVX512VL,AVX512BW
|
|
VPMINSW zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig ea /r ] AVX512BW
|
|
VPMINUB xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig da /r ] AVX512VL,AVX512BW
|
|
VPMINUB ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig da /r ] AVX512VL,AVX512BW
|
|
VPMINUB zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig da /r ] AVX512BW
|
|
VPMINUD xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 3b /r ] AVX512VL,AVX512
|
|
VPMINUD ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 3b /r ] AVX512VL,AVX512
|
|
VPMINUD zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f38.w0 3b /r ] AVX512
|
|
VPMINUQ xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 3b /r ] AVX512VL,AVX512
|
|
VPMINUQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 3b /r ] AVX512VL,AVX512
|
|
VPMINUQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 3b /r ] AVX512
|
|
VPMINUW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f38.wig 3a /r ] AVX512VL,AVX512BW
|
|
VPMINUW ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f38.wig 3a /r ] AVX512VL,AVX512BW
|
|
VPMINUW zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f38.wig 3a /r ] AVX512BW
|
|
VPMOVB2M kreg,xmmreg [rm: evex.128.f3.0f38.w0 29 /r ] AVX512VL,AVX512BW
|
|
VPMOVB2M kreg,ymmreg [rm: evex.256.f3.0f38.w0 29 /r ] AVX512VL,AVX512BW
|
|
VPMOVB2M kreg,zmmreg [rm: evex.512.f3.0f38.w0 29 /r ] AVX512BW
|
|
VPMOVD2M kreg,xmmreg [rm: evex.128.f3.0f38.w0 39 /r ] AVX512VL,AVX512DQ
|
|
VPMOVD2M kreg,ymmreg [rm: evex.256.f3.0f38.w0 39 /r ] AVX512VL,AVX512DQ
|
|
VPMOVD2M kreg,zmmreg [rm: evex.512.f3.0f38.w0 39 /r ] AVX512DQ
|
|
VPMOVDB xmmreg|mask|z,xmmreg [mr: evex.128.f3.0f38.w0 31 /r ] AVX512VL,AVX512
|
|
VPMOVDB xmmreg|mask|z,ymmreg [mr: evex.256.f3.0f38.w0 31 /r ] AVX512VL,AVX512
|
|
VPMOVDB xmmreg|mask|z,zmmreg [mr: evex.512.f3.0f38.w0 31 /r ] AVX512
|
|
VPMOVDB mem32|mask,xmmreg [mr:qvm: evex.128.f3.0f38.w0 31 /r ] AVX512VL,AVX512
|
|
VPMOVDB mem64|mask,ymmreg [mr:qvm: evex.256.f3.0f38.w0 31 /r ] AVX512VL,AVX512
|
|
VPMOVDB mem128|mask,zmmreg [mr:qvm: evex.512.f3.0f38.w0 31 /r ] AVX512
|
|
VPMOVDW xmmreg|mask|z,xmmreg [mr: evex.128.f3.0f38.w0 33 /r ] AVX512VL,AVX512
|
|
VPMOVDW xmmreg|mask|z,ymmreg [mr: evex.256.f3.0f38.w0 33 /r ] AVX512VL,AVX512
|
|
VPMOVDW ymmreg|mask|z,zmmreg [mr: evex.512.f3.0f38.w0 33 /r ] AVX512
|
|
VPMOVDW mem64|mask,xmmreg [mr:hvm: evex.128.f3.0f38.w0 33 /r ] AVX512VL,AVX512
|
|
VPMOVDW mem128|mask,ymmreg [mr:hvm: evex.256.f3.0f38.w0 33 /r ] AVX512VL,AVX512
|
|
VPMOVDW mem256|mask,zmmreg [mr:hvm: evex.512.f3.0f38.w0 33 /r ] AVX512
|
|
VPMOVM2B xmmreg,kreg [rm: evex.128.f3.0f38.w0 28 /r ] AVX512VL,AVX512BW
|
|
VPMOVM2B ymmreg,kreg [rm: evex.256.f3.0f38.w0 28 /r ] AVX512VL,AVX512BW
|
|
VPMOVM2B zmmreg,kreg [rm: evex.512.f3.0f38.w0 28 /r ] AVX512BW
|
|
VPMOVM2D xmmreg,kreg [rm: evex.128.f3.0f38.w0 38 /r ] AVX512VL,AVX512DQ
|
|
VPMOVM2D ymmreg,kreg [rm: evex.256.f3.0f38.w0 38 /r ] AVX512VL,AVX512DQ
|
|
VPMOVM2D zmmreg,kreg [rm: evex.512.f3.0f38.w0 38 /r ] AVX512DQ
|
|
VPMOVM2Q xmmreg,kreg [rm: evex.128.f3.0f38.w1 38 /r ] AVX512VL,AVX512DQ
|
|
VPMOVM2Q ymmreg,kreg [rm: evex.256.f3.0f38.w1 38 /r ] AVX512VL,AVX512DQ
|
|
VPMOVM2Q zmmreg,kreg [rm: evex.512.f3.0f38.w1 38 /r ] AVX512DQ
|
|
VPMOVM2W xmmreg,kreg [rm: evex.128.f3.0f38.w1 28 /r ] AVX512VL,AVX512BW
|
|
VPMOVM2W ymmreg,kreg [rm: evex.256.f3.0f38.w1 28 /r ] AVX512VL,AVX512BW
|
|
VPMOVM2W zmmreg,kreg [rm: evex.512.f3.0f38.w1 28 /r ] AVX512BW
|
|
VPMOVQ2M kreg,xmmreg [rm: evex.128.f3.0f38.w1 39 /r ] AVX512VL,AVX512DQ
|
|
VPMOVQ2M kreg,ymmreg [rm: evex.256.f3.0f38.w1 39 /r ] AVX512VL,AVX512DQ
|
|
VPMOVQ2M kreg,zmmreg [rm: evex.512.f3.0f38.w1 39 /r ] AVX512DQ
|
|
VPMOVQB xmmreg|mask|z,xmmreg [mr: evex.128.f3.0f38.w0 32 /r ] AVX512VL,AVX512
|
|
VPMOVQB xmmreg|mask|z,ymmreg [mr: evex.256.f3.0f38.w0 32 /r ] AVX512VL,AVX512
|
|
VPMOVQB xmmreg|mask|z,zmmreg [mr: evex.512.f3.0f38.w0 32 /r ] AVX512
|
|
VPMOVQB mem16|mask,xmmreg [mr:ovm: evex.128.f3.0f38.w0 32 /r ] AVX512VL,AVX512
|
|
VPMOVQB mem32|mask,ymmreg [mr:ovm: evex.256.f3.0f38.w0 32 /r ] AVX512VL,AVX512
|
|
VPMOVQB mem64|mask,zmmreg [mr:ovm: evex.512.f3.0f38.w0 32 /r ] AVX512
|
|
VPMOVQD xmmreg|mask|z,xmmreg [mr: evex.128.f3.0f38.w0 35 /r ] AVX512VL,AVX512
|
|
VPMOVQD xmmreg|mask|z,ymmreg [mr: evex.256.f3.0f38.w0 35 /r ] AVX512VL,AVX512
|
|
VPMOVQD ymmreg|mask|z,zmmreg [mr: evex.512.f3.0f38.w0 35 /r ] AVX512
|
|
VPMOVQD mem64|mask,xmmreg [mr:hvm: evex.128.f3.0f38.w0 35 /r ] AVX512VL,AVX512
|
|
VPMOVQD mem128|mask,ymmreg [mr:hvm: evex.256.f3.0f38.w0 35 /r ] AVX512VL,AVX512
|
|
VPMOVQD mem256|mask,zmmreg [mr:hvm: evex.512.f3.0f38.w0 35 /r ] AVX512
|
|
VPMOVQW xmmreg|mask|z,xmmreg [mr: evex.128.f3.0f38.w0 34 /r ] AVX512VL,AVX512
|
|
VPMOVQW xmmreg|mask|z,ymmreg [mr: evex.256.f3.0f38.w0 34 /r ] AVX512VL,AVX512
|
|
VPMOVQW xmmreg|mask|z,zmmreg [mr: evex.512.f3.0f38.w0 34 /r ] AVX512
|
|
VPMOVQW mem32|mask,xmmreg [mr:qvm: evex.128.f3.0f38.w0 34 /r ] AVX512VL,AVX512
|
|
VPMOVQW mem64|mask,ymmreg [mr:qvm: evex.256.f3.0f38.w0 34 /r ] AVX512VL,AVX512
|
|
VPMOVQW mem128|mask,zmmreg [mr:qvm: evex.512.f3.0f38.w0 34 /r ] AVX512
|
|
VPMOVSDB xmmreg|mask|z,xmmreg [mr: evex.128.f3.0f38.w0 21 /r ] AVX512VL,AVX512
|
|
VPMOVSDB xmmreg|mask|z,ymmreg [mr: evex.256.f3.0f38.w0 21 /r ] AVX512VL,AVX512
|
|
VPMOVSDB xmmreg|mask|z,zmmreg [mr: evex.512.f3.0f38.w0 21 /r ] AVX512
|
|
VPMOVSDB mem32|mask,xmmreg [mr:qvm: evex.128.f3.0f38.w0 21 /r ] AVX512VL,AVX512
|
|
VPMOVSDB mem64|mask,ymmreg [mr:qvm: evex.256.f3.0f38.w0 21 /r ] AVX512VL,AVX512
|
|
VPMOVSDB mem128|mask,zmmreg [mr:qvm: evex.512.f3.0f38.w0 21 /r ] AVX512
|
|
VPMOVSDW xmmreg|mask|z,xmmreg [mr: evex.128.f3.0f38.w0 23 /r ] AVX512VL,AVX512
|
|
VPMOVSDW xmmreg|mask|z,ymmreg [mr: evex.256.f3.0f38.w0 23 /r ] AVX512VL,AVX512
|
|
VPMOVSDW ymmreg|mask|z,zmmreg [mr: evex.512.f3.0f38.w0 23 /r ] AVX512
|
|
VPMOVSDW mem64|mask,xmmreg [mr:hvm: evex.128.f3.0f38.w0 23 /r ] AVX512VL,AVX512
|
|
VPMOVSDW mem128|mask,ymmreg [mr:hvm: evex.256.f3.0f38.w0 23 /r ] AVX512VL,AVX512
|
|
VPMOVSDW mem256|mask,zmmreg [mr:hvm: evex.512.f3.0f38.w0 23 /r ] AVX512
|
|
VPMOVSQB xmmreg|mask|z,xmmreg [mr: evex.128.f3.0f38.w0 22 /r ] AVX512VL,AVX512
|
|
VPMOVSQB xmmreg|mask|z,ymmreg [mr: evex.256.f3.0f38.w0 22 /r ] AVX512VL,AVX512
|
|
VPMOVSQB xmmreg|mask|z,zmmreg [mr: evex.512.f3.0f38.w0 22 /r ] AVX512
|
|
VPMOVSQB mem16|mask,xmmreg [mr:ovm: evex.128.f3.0f38.w0 22 /r ] AVX512VL,AVX512
|
|
VPMOVSQB mem32|mask,ymmreg [mr:ovm: evex.256.f3.0f38.w0 22 /r ] AVX512VL,AVX512
|
|
VPMOVSQB mem64|mask,zmmreg [mr:ovm: evex.512.f3.0f38.w0 22 /r ] AVX512
|
|
VPMOVSQD xmmreg|mask|z,xmmreg [mr: evex.128.f3.0f38.w0 25 /r ] AVX512VL,AVX512
|
|
VPMOVSQD xmmreg|mask|z,ymmreg [mr: evex.256.f3.0f38.w0 25 /r ] AVX512VL,AVX512
|
|
VPMOVSQD ymmreg|mask|z,zmmreg [mr: evex.512.f3.0f38.w0 25 /r ] AVX512
|
|
VPMOVSQD mem64|mask,xmmreg [mr:hvm: evex.128.f3.0f38.w0 25 /r ] AVX512VL,AVX512
|
|
VPMOVSQD mem128|mask,ymmreg [mr:hvm: evex.256.f3.0f38.w0 25 /r ] AVX512VL,AVX512
|
|
VPMOVSQD mem256|mask,zmmreg [mr:hvm: evex.512.f3.0f38.w0 25 /r ] AVX512
|
|
VPMOVSQW xmmreg|mask|z,xmmreg [mr: evex.128.f3.0f38.w0 24 /r ] AVX512VL,AVX512
|
|
VPMOVSQW xmmreg|mask|z,ymmreg [mr: evex.256.f3.0f38.w0 24 /r ] AVX512VL,AVX512
|
|
VPMOVSQW xmmreg|mask|z,zmmreg [mr: evex.512.f3.0f38.w0 24 /r ] AVX512
|
|
VPMOVSQW mem32|mask,xmmreg [mr:qvm: evex.128.f3.0f38.w0 24 /r ] AVX512VL,AVX512
|
|
VPMOVSQW mem64|mask,ymmreg [mr:qvm: evex.256.f3.0f38.w0 24 /r ] AVX512VL,AVX512
|
|
VPMOVSQW mem128|mask,zmmreg [mr:qvm: evex.512.f3.0f38.w0 24 /r ] AVX512
|
|
VPMOVSWB xmmreg|mask|z,xmmreg [mr: evex.128.f3.0f38.w0 20 /r ] AVX512VL,AVX512BW
|
|
VPMOVSWB xmmreg|mask|z,ymmreg [mr: evex.256.f3.0f38.w0 20 /r ] AVX512VL,AVX512BW
|
|
VPMOVSWB ymmreg|mask|z,zmmreg [mr: evex.512.f3.0f38.w0 20 /r ] AVX512BW
|
|
VPMOVSWB mem64|mask,xmmreg [mr:hvm: evex.128.f3.0f38.w0 20 /r ] AVX512VL,AVX512BW
|
|
VPMOVSWB mem128|mask,ymmreg [mr:hvm: evex.256.f3.0f38.w0 20 /r ] AVX512VL,AVX512BW
|
|
VPMOVSWB mem256|mask,zmmreg [mr:hvm: evex.512.f3.0f38.w0 20 /r ] AVX512BW
|
|
VPMOVSXBD xmmreg|mask|z,xmmrm32 [rm:qvm: evex.128.66.0f38.wig 21 /r ] AVX512VL,AVX512
|
|
VPMOVSXBD ymmreg|mask|z,xmmrm64 [rm:qvm: evex.256.66.0f38.wig 21 /r ] AVX512VL,AVX512
|
|
VPMOVSXBD zmmreg|mask|z,xmmrm128 [rm:qvm: evex.512.66.0f38.wig 21 /r ] AVX512
|
|
VPMOVSXBQ xmmreg|mask|z,xmmrm16 [rm:ovm: evex.128.66.0f38.wig 22 /r ] AVX512VL,AVX512
|
|
VPMOVSXBQ ymmreg|mask|z,xmmrm32 [rm:ovm: evex.256.66.0f38.wig 22 /r ] AVX512VL,AVX512
|
|
VPMOVSXBQ zmmreg|mask|z,xmmrm64 [rm:ovm: evex.512.66.0f38.wig 22 /r ] AVX512
|
|
VPMOVSXBW xmmreg|mask|z,xmmrm64 [rm:hvm: evex.128.66.0f38.wig 20 /r ] AVX512VL,AVX512BW
|
|
VPMOVSXBW ymmreg|mask|z,xmmrm128 [rm:hvm: evex.256.66.0f38.wig 20 /r ] AVX512VL,AVX512BW
|
|
VPMOVSXBW zmmreg|mask|z,ymmrm256 [rm:hvm: evex.512.66.0f38.wig 20 /r ] AVX512BW
|
|
VPMOVSXDQ xmmreg|mask|z,xmmrm64 [rm:hvm: evex.128.66.0f38.w0 25 /r ] AVX512VL,AVX512
|
|
VPMOVSXDQ ymmreg|mask|z,xmmrm128 [rm:hvm: evex.256.66.0f38.w0 25 /r ] AVX512VL,AVX512
|
|
VPMOVSXDQ zmmreg|mask|z,ymmrm256 [rm:hvm: evex.512.66.0f38.w0 25 /r ] AVX512
|
|
VPMOVSXWD xmmreg|mask|z,xmmrm64 [rm:hvm: evex.128.66.0f38.wig 23 /r ] AVX512VL,AVX512
|
|
VPMOVSXWD ymmreg|mask|z,xmmrm128 [rm:hvm: evex.256.66.0f38.wig 23 /r ] AVX512VL,AVX512
|
|
VPMOVSXWD zmmreg|mask|z,ymmrm256 [rm:hvm: evex.512.66.0f38.wig 23 /r ] AVX512
|
|
VPMOVSXWQ xmmreg|mask|z,xmmrm32 [rm:qvm: evex.128.66.0f38.wig 24 /r ] AVX512VL,AVX512
|
|
VPMOVSXWQ ymmreg|mask|z,xmmrm64 [rm:qvm: evex.256.66.0f38.wig 24 /r ] AVX512VL,AVX512
|
|
VPMOVSXWQ zmmreg|mask|z,xmmrm128 [rm:qvm: evex.512.66.0f38.wig 24 /r ] AVX512
|
|
VPMOVUSDB xmmreg|mask|z,xmmreg [mr: evex.128.f3.0f38.w0 11 /r ] AVX512VL,AVX512
|
|
VPMOVUSDB xmmreg|mask|z,ymmreg [mr: evex.256.f3.0f38.w0 11 /r ] AVX512VL,AVX512
|
|
VPMOVUSDB xmmreg|mask|z,zmmreg [mr: evex.512.f3.0f38.w0 11 /r ] AVX512
|
|
VPMOVUSDB mem32|mask,xmmreg [mr:qvm: evex.128.f3.0f38.w0 11 /r ] AVX512VL,AVX512
|
|
VPMOVUSDB mem64|mask,ymmreg [mr:qvm: evex.256.f3.0f38.w0 11 /r ] AVX512VL,AVX512
|
|
VPMOVUSDB mem128|mask,zmmreg [mr:qvm: evex.512.f3.0f38.w0 11 /r ] AVX512
|
|
VPMOVUSDW xmmreg|mask|z,xmmreg [mr: evex.128.f3.0f38.w0 13 /r ] AVX512VL,AVX512
|
|
VPMOVUSDW xmmreg|mask|z,ymmreg [mr: evex.256.f3.0f38.w0 13 /r ] AVX512VL,AVX512
|
|
VPMOVUSDW ymmreg|mask|z,zmmreg [mr: evex.512.f3.0f38.w0 13 /r ] AVX512
|
|
VPMOVUSDW mem64|mask,xmmreg [mr:hvm: evex.128.f3.0f38.w0 13 /r ] AVX512VL,AVX512
|
|
VPMOVUSDW mem128|mask,ymmreg [mr:hvm: evex.256.f3.0f38.w0 13 /r ] AVX512VL,AVX512
|
|
VPMOVUSDW mem256|mask,zmmreg [mr:hvm: evex.512.f3.0f38.w0 13 /r ] AVX512
|
|
VPMOVUSQB xmmreg|mask|z,xmmreg [mr: evex.128.f3.0f38.w0 12 /r ] AVX512VL,AVX512
|
|
VPMOVUSQB xmmreg|mask|z,ymmreg [mr: evex.256.f3.0f38.w0 12 /r ] AVX512VL,AVX512
|
|
VPMOVUSQB xmmreg|mask|z,zmmreg [mr: evex.512.f3.0f38.w0 12 /r ] AVX512
|
|
VPMOVUSQB mem16|mask,xmmreg [mr:ovm: evex.128.f3.0f38.w0 12 /r ] AVX512VL,AVX512
|
|
VPMOVUSQB mem32|mask,ymmreg [mr:ovm: evex.256.f3.0f38.w0 12 /r ] AVX512VL,AVX512
|
|
VPMOVUSQB mem64|mask,zmmreg [mr:ovm: evex.512.f3.0f38.w0 12 /r ] AVX512
|
|
VPMOVUSQD xmmreg|mask|z,xmmreg [mr: evex.128.f3.0f38.w0 15 /r ] AVX512VL,AVX512
|
|
VPMOVUSQD xmmreg|mask|z,ymmreg [mr: evex.256.f3.0f38.w0 15 /r ] AVX512VL,AVX512
|
|
VPMOVUSQD ymmreg|mask|z,zmmreg [mr: evex.512.f3.0f38.w0 15 /r ] AVX512
|
|
VPMOVUSQD mem64|mask,xmmreg [mr:hvm: evex.128.f3.0f38.w0 15 /r ] AVX512VL,AVX512
|
|
VPMOVUSQD mem128|mask,ymmreg [mr:hvm: evex.256.f3.0f38.w0 15 /r ] AVX512VL,AVX512
|
|
VPMOVUSQD mem256|mask,zmmreg [mr:hvm: evex.512.f3.0f38.w0 15 /r ] AVX512
|
|
VPMOVUSQW xmmreg|mask|z,xmmreg [mr: evex.128.f3.0f38.w0 14 /r ] AVX512VL,AVX512
|
|
VPMOVUSQW xmmreg|mask|z,ymmreg [mr: evex.256.f3.0f38.w0 14 /r ] AVX512VL,AVX512
|
|
VPMOVUSQW xmmreg|mask|z,zmmreg [mr: evex.512.f3.0f38.w0 14 /r ] AVX512
|
|
VPMOVUSQW mem32|mask,xmmreg [mr:qvm: evex.128.f3.0f38.w0 14 /r ] AVX512VL,AVX512
|
|
VPMOVUSQW mem64|mask,ymmreg [mr:qvm: evex.256.f3.0f38.w0 14 /r ] AVX512VL,AVX512
|
|
VPMOVUSQW mem128|mask,zmmreg [mr:qvm: evex.512.f3.0f38.w0 14 /r ] AVX512
|
|
VPMOVUSWB xmmreg|mask|z,xmmreg [mr: evex.128.f3.0f38.w0 10 /r ] AVX512VL,AVX512BW
|
|
VPMOVUSWB xmmreg|mask|z,ymmreg [mr: evex.256.f3.0f38.w0 10 /r ] AVX512VL,AVX512BW
|
|
VPMOVUSWB ymmreg|mask|z,zmmreg [mr: evex.512.f3.0f38.w0 10 /r ] AVX512BW
|
|
VPMOVUSWB mem64|mask,xmmreg [mr:hvm: evex.128.f3.0f38.w0 10 /r ] AVX512VL,AVX512BW
|
|
VPMOVUSWB mem128|mask,ymmreg [mr:hvm: evex.256.f3.0f38.w0 10 /r ] AVX512VL,AVX512BW
|
|
VPMOVUSWB mem256|mask,zmmreg [mr:hvm: evex.512.f3.0f38.w0 10 /r ] AVX512BW
|
|
VPMOVW2M kreg,xmmreg [rm: evex.128.f3.0f38.w1 29 /r ] AVX512VL,AVX512BW
|
|
VPMOVW2M kreg,ymmreg [rm: evex.256.f3.0f38.w1 29 /r ] AVX512VL,AVX512BW
|
|
VPMOVW2M kreg,zmmreg [rm: evex.512.f3.0f38.w1 29 /r ] AVX512BW
|
|
VPMOVWB xmmreg|mask|z,xmmreg [mr: evex.128.f3.0f38.w0 30 /r ] AVX512VL,AVX512BW
|
|
VPMOVWB xmmreg|mask|z,ymmreg [mr: evex.256.f3.0f38.w0 30 /r ] AVX512VL,AVX512BW
|
|
VPMOVWB ymmreg|mask|z,zmmreg [mr: evex.512.f3.0f38.w0 30 /r ] AVX512BW
|
|
VPMOVWB mem64|mask,xmmreg [mr:hvm: evex.128.f3.0f38.w0 30 /r ] AVX512VL,AVX512BW
|
|
VPMOVWB mem128|mask,ymmreg [mr:hvm: evex.256.f3.0f38.w0 30 /r ] AVX512VL,AVX512BW
|
|
VPMOVWB mem256|mask,zmmreg [mr:hvm: evex.512.f3.0f38.w0 30 /r ] AVX512BW
|
|
VPMOVZXBD xmmreg|mask|z,xmmrm32 [rm:qvm: evex.128.66.0f38.wig 31 /r ] AVX512VL,AVX512
|
|
VPMOVZXBD ymmreg|mask|z,xmmrm64 [rm:qvm: evex.256.66.0f38.wig 31 /r ] AVX512VL,AVX512
|
|
VPMOVZXBD zmmreg|mask|z,xmmrm128 [rm:qvm: evex.512.66.0f38.wig 31 /r ] AVX512
|
|
VPMOVZXBQ xmmreg|mask|z,xmmrm16 [rm:ovm: evex.128.66.0f38.wig 32 /r ] AVX512VL,AVX512
|
|
VPMOVZXBQ ymmreg|mask|z,xmmrm32 [rm:ovm: evex.256.66.0f38.wig 32 /r ] AVX512VL,AVX512
|
|
VPMOVZXBQ zmmreg|mask|z,xmmrm64 [rm:ovm: evex.512.66.0f38.wig 32 /r ] AVX512
|
|
VPMOVZXBW xmmreg|mask|z,xmmrm64 [rm:hvm: evex.128.66.0f38.wig 30 /r ] AVX512VL,AVX512BW
|
|
VPMOVZXBW ymmreg|mask|z,xmmrm128 [rm:hvm: evex.256.66.0f38.wig 30 /r ] AVX512VL,AVX512BW
|
|
VPMOVZXBW zmmreg|mask|z,ymmrm256 [rm:hvm: evex.512.66.0f38.wig 30 /r ] AVX512BW
|
|
VPMOVZXDQ xmmreg|mask|z,xmmrm64 [rm:hvm: evex.128.66.0f38.w0 35 /r ] AVX512VL,AVX512
|
|
VPMOVZXDQ ymmreg|mask|z,xmmrm128 [rm:hvm: evex.256.66.0f38.w0 35 /r ] AVX512VL,AVX512
|
|
VPMOVZXDQ zmmreg|mask|z,ymmrm256 [rm:hvm: evex.512.66.0f38.w0 35 /r ] AVX512
|
|
VPMOVZXWD xmmreg|mask|z,xmmrm64 [rm:hvm: evex.128.66.0f38.wig 33 /r ] AVX512VL,AVX512
|
|
VPMOVZXWD ymmreg|mask|z,xmmrm128 [rm:hvm: evex.256.66.0f38.wig 33 /r ] AVX512VL,AVX512
|
|
VPMOVZXWD zmmreg|mask|z,ymmrm256 [rm:hvm: evex.512.66.0f38.wig 33 /r ] AVX512
|
|
VPMOVZXWQ xmmreg|mask|z,xmmrm32 [rm:qvm: evex.128.66.0f38.wig 34 /r ] AVX512VL,AVX512
|
|
VPMOVZXWQ ymmreg|mask|z,xmmrm64 [rm:qvm: evex.256.66.0f38.wig 34 /r ] AVX512VL,AVX512
|
|
VPMOVZXWQ zmmreg|mask|z,xmmrm128 [rm:qvm: evex.512.66.0f38.wig 34 /r ] AVX512
|
|
VPMULDQ xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 28 /r ] AVX512VL,AVX512
|
|
VPMULDQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 28 /r ] AVX512VL,AVX512
|
|
VPMULDQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 28 /r ] AVX512
|
|
VPMULHRSW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f38.wig 0b /r ] AVX512VL,AVX512BW
|
|
VPMULHRSW ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f38.wig 0b /r ] AVX512VL,AVX512BW
|
|
VPMULHRSW zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f38.wig 0b /r ] AVX512BW
|
|
VPMULHUW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig e4 /r ] AVX512VL,AVX512BW
|
|
VPMULHUW ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig e4 /r ] AVX512VL,AVX512BW
|
|
VPMULHUW zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig e4 /r ] AVX512BW
|
|
VPMULHW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig e5 /r ] AVX512VL,AVX512BW
|
|
VPMULHW ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig e5 /r ] AVX512VL,AVX512BW
|
|
VPMULHW zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig e5 /r ] AVX512BW
|
|
VPMULLD xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 40 /r ] AVX512VL,AVX512
|
|
VPMULLD ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 40 /r ] AVX512VL,AVX512
|
|
VPMULLD zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f38.w0 40 /r ] AVX512
|
|
VPMULLQ xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 40 /r ] AVX512VL,AVX512DQ
|
|
VPMULLQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 40 /r ] AVX512VL,AVX512DQ
|
|
VPMULLQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 40 /r ] AVX512DQ
|
|
VPMULLW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig d5 /r ] AVX512VL,AVX512BW
|
|
VPMULLW ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig d5 /r ] AVX512VL,AVX512BW
|
|
VPMULLW zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig d5 /r ] AVX512BW
|
|
VPMULTISHIFTQB xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 83 /r ] AVX512VL,AVX512VBMI
|
|
VPMULTISHIFTQB ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 83 /r ] AVX512VL,AVX512VBMI
|
|
VPMULTISHIFTQB zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 83 /r ] AVX512VBMI
|
|
VPMULUDQ xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f.w1 f4 /r ] AVX512VL,AVX512
|
|
VPMULUDQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f.w1 f4 /r ] AVX512VL,AVX512
|
|
VPMULUDQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f.w1 f4 /r ] AVX512
|
|
VPORD xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f.w0 eb /r ] AVX512VL,AVX512
|
|
VPORD ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f.w0 eb /r ] AVX512VL,AVX512
|
|
VPORD zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f.w0 eb /r ] AVX512
|
|
VPORQ xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f.w1 eb /r ] AVX512VL,AVX512
|
|
VPORQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f.w1 eb /r ] AVX512VL,AVX512
|
|
VPORQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f.w1 eb /r ] AVX512
|
|
VPROLD xmmreg|mask|z,xmmrm128|b32*,imm8 [vmi:fv: evex.nds.128.66.0f.w0 72 /1 ib ] AVX512VL,AVX512
|
|
VPROLD ymmreg|mask|z,ymmrm256|b32*,imm8 [vmi:fv: evex.nds.256.66.0f.w0 72 /1 ib ] AVX512VL,AVX512
|
|
VPROLD zmmreg|mask|z,zmmrm512|b32*,imm8 [vmi:fv: evex.nds.512.66.0f.w0 72 /1 ib ] AVX512
|
|
VPROLQ xmmreg|mask|z,xmmrm128|b64*,imm8 [vmi:fv: evex.nds.128.66.0f.w1 72 /1 ib ] AVX512VL,AVX512
|
|
VPROLQ ymmreg|mask|z,ymmrm256|b64*,imm8 [vmi:fv: evex.nds.256.66.0f.w1 72 /1 ib ] AVX512VL,AVX512
|
|
VPROLQ zmmreg|mask|z,zmmrm512|b64*,imm8 [vmi:fv: evex.nds.512.66.0f.w1 72 /1 ib ] AVX512
|
|
VPROLVD xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 15 /r ] AVX512VL,AVX512
|
|
VPROLVD ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 15 /r ] AVX512VL,AVX512
|
|
VPROLVD zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f38.w0 15 /r ] AVX512
|
|
VPROLVQ xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 15 /r ] AVX512VL,AVX512
|
|
VPROLVQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 15 /r ] AVX512VL,AVX512
|
|
VPROLVQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 15 /r ] AVX512
|
|
VPRORD xmmreg|mask|z,xmmrm128|b32*,imm8 [vmi:fv: evex.nds.128.66.0f.w0 72 /0 ib ] AVX512VL,AVX512
|
|
VPRORD ymmreg|mask|z,ymmrm256|b32*,imm8 [vmi:fv: evex.nds.256.66.0f.w0 72 /0 ib ] AVX512VL,AVX512
|
|
VPRORD zmmreg|mask|z,zmmrm512|b32*,imm8 [vmi:fv: evex.nds.512.66.0f.w0 72 /0 ib ] AVX512
|
|
VPRORQ xmmreg|mask|z,xmmrm128|b64*,imm8 [vmi:fv: evex.nds.128.66.0f.w1 72 /0 ib ] AVX512VL,AVX512
|
|
VPRORQ ymmreg|mask|z,ymmrm256|b64*,imm8 [vmi:fv: evex.nds.256.66.0f.w1 72 /0 ib ] AVX512VL,AVX512
|
|
VPRORQ zmmreg|mask|z,zmmrm512|b64*,imm8 [vmi:fv: evex.nds.512.66.0f.w1 72 /0 ib ] AVX512
|
|
VPRORVD xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 14 /r ] AVX512VL,AVX512
|
|
VPRORVD ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 14 /r ] AVX512VL,AVX512
|
|
VPRORVD zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f38.w0 14 /r ] AVX512
|
|
VPRORVQ xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 14 /r ] AVX512VL,AVX512
|
|
VPRORVQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 14 /r ] AVX512VL,AVX512
|
|
VPRORVQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 14 /r ] AVX512
|
|
VPSADBW xmmreg,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig f6 /r ] AVX512VL,AVX512BW
|
|
VPSADBW ymmreg,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig f6 /r ] AVX512VL,AVX512BW
|
|
VPSADBW zmmreg,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig f6 /r ] AVX512BW
|
|
VPSCATTERDD xmem32|mask,xmmreg [mr:t1s: vsibx evex.128.66.0f38.w0 a0 /r ] AVX512VL,AVX512
|
|
VPSCATTERDD ymem32|mask,ymmreg [mr:t1s: vsiby evex.256.66.0f38.w0 a0 /r ] AVX512VL,AVX512
|
|
VPSCATTERDD zmem32|mask,zmmreg [mr:t1s: vsibz evex.512.66.0f38.w0 a0 /r ] AVX512
|
|
VPSCATTERDQ xmem64|mask,xmmreg [mr:t1s: vsibx evex.128.66.0f38.w1 a0 /r ] AVX512VL,AVX512
|
|
VPSCATTERDQ xmem64|mask,ymmreg [mr:t1s: vsibx evex.256.66.0f38.w1 a0 /r ] AVX512VL,AVX512
|
|
VPSCATTERDQ ymem64|mask,zmmreg [mr:t1s: vsiby evex.512.66.0f38.w1 a0 /r ] AVX512
|
|
VPSCATTERQD xmem32|mask,xmmreg [mr:t1s: vsibx evex.128.66.0f38.w0 a1 /r ] AVX512VL,AVX512
|
|
VPSCATTERQD ymem32|mask,xmmreg [mr:t1s: vsiby evex.256.66.0f38.w0 a1 /r ] AVX512VL,AVX512
|
|
VPSCATTERQD zmem32|mask,ymmreg [mr:t1s: vsibz evex.512.66.0f38.w0 a1 /r ] AVX512
|
|
VPSCATTERQQ xmem64|mask,xmmreg [mr:t1s: vsibx evex.128.66.0f38.w1 a1 /r ] AVX512VL,AVX512
|
|
VPSCATTERQQ ymem64|mask,ymmreg [mr:t1s: vsiby evex.256.66.0f38.w1 a1 /r ] AVX512VL,AVX512
|
|
VPSCATTERQQ zmem64|mask,zmmreg [mr:t1s: vsibz evex.512.66.0f38.w1 a1 /r ] AVX512
|
|
VPSHUFB xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f38.wig 00 /r ] AVX512VL,AVX512BW
|
|
VPSHUFB ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f38.wig 00 /r ] AVX512VL,AVX512BW
|
|
VPSHUFB zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f38.wig 00 /r ] AVX512BW
|
|
VPSHUFD xmmreg|mask|z,xmmrm128|b32,imm8 [rmi:fv: evex.128.66.0f.w0 70 /r ib ] AVX512VL,AVX512
|
|
VPSHUFD ymmreg|mask|z,ymmrm256|b32,imm8 [rmi:fv: evex.256.66.0f.w0 70 /r ib ] AVX512VL,AVX512
|
|
VPSHUFD zmmreg|mask|z,zmmrm512|b32,imm8 [rmi:fv: evex.512.66.0f.w0 70 /r ib ] AVX512
|
|
VPSHUFHW xmmreg|mask|z,xmmrm128,imm8 [rmi:fvm: evex.128.f3.0f.wig 70 /r ib ] AVX512VL,AVX512BW
|
|
VPSHUFHW ymmreg|mask|z,ymmrm256,imm8 [rmi:fvm: evex.256.f3.0f.wig 70 /r ib ] AVX512VL,AVX512BW
|
|
VPSHUFHW zmmreg|mask|z,zmmrm512,imm8 [rmi:fvm: evex.512.f3.0f.wig 70 /r ib ] AVX512BW
|
|
VPSHUFLW xmmreg|mask|z,xmmrm128,imm8 [rmi:fvm: evex.128.f2.0f.wig 70 /r ib ] AVX512VL,AVX512BW
|
|
VPSHUFLW ymmreg|mask|z,ymmrm256,imm8 [rmi:fvm: evex.256.f2.0f.wig 70 /r ib ] AVX512VL,AVX512BW
|
|
VPSHUFLW zmmreg|mask|z,zmmrm512,imm8 [rmi:fvm: evex.512.f2.0f.wig 70 /r ib ] AVX512BW
|
|
VPSLLD xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:m128: evex.nds.128.66.0f.w0 f2 /r ] AVX512VL,AVX512
|
|
VPSLLD ymmreg|mask|z,ymmreg*,xmmrm128 [rvm:m128: evex.nds.256.66.0f.w0 f2 /r ] AVX512VL,AVX512
|
|
VPSLLD zmmreg|mask|z,zmmreg*,xmmrm128 [rvm:m128: evex.nds.512.66.0f.w0 f2 /r ] AVX512
|
|
VPSLLD xmmreg|mask|z,xmmrm128|b32*,imm8 [vmi:fv: evex.nds.128.66.0f.w0 72 /6 ib ] AVX512VL,AVX512
|
|
VPSLLD ymmreg|mask|z,ymmrm256|b32*,imm8 [vmi:fv: evex.nds.256.66.0f.w0 72 /6 ib ] AVX512VL,AVX512
|
|
VPSLLD zmmreg|mask|z,zmmrm512|b32*,imm8 [vmi:fv: evex.nds.512.66.0f.w0 72 /6 ib ] AVX512
|
|
VPSLLDQ xmmreg,xmmrm128*,imm8 [vmi:fvm: evex.nds.128.66.0f.wig 73 /7 ib ] AVX512VL,AVX512BW
|
|
VPSLLDQ ymmreg,ymmrm256*,imm8 [vmi:fvm: evex.nds.256.66.0f.wig 73 /7 ib ] AVX512VL,AVX512BW
|
|
VPSLLDQ zmmreg,zmmrm512*,imm8 [vmi:fvm: evex.nds.512.66.0f.wig 73 /7 ib ] AVX512BW
|
|
VPSLLQ xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:m128: evex.nds.128.66.0f.w1 f3 /r ] AVX512VL,AVX512
|
|
VPSLLQ ymmreg|mask|z,ymmreg*,xmmrm128 [rvm:m128: evex.nds.256.66.0f.w1 f3 /r ] AVX512VL,AVX512
|
|
VPSLLQ zmmreg|mask|z,zmmreg*,xmmrm128 [rvm:m128: evex.nds.512.66.0f.w1 f3 /r ] AVX512
|
|
VPSLLQ xmmreg|mask|z,xmmrm128|b64*,imm8 [vmi:fv: evex.nds.128.66.0f.w1 73 /6 ib ] AVX512VL,AVX512
|
|
VPSLLQ ymmreg|mask|z,ymmrm256|b64*,imm8 [vmi:fv: evex.nds.256.66.0f.w1 73 /6 ib ] AVX512VL,AVX512
|
|
VPSLLQ zmmreg|mask|z,zmmrm512|b64*,imm8 [vmi:fv: evex.nds.512.66.0f.w1 73 /6 ib ] AVX512
|
|
VPSLLVD xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 47 /r ] AVX512VL,AVX512
|
|
VPSLLVD ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 47 /r ] AVX512VL,AVX512
|
|
VPSLLVD zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f38.w0 47 /r ] AVX512
|
|
VPSLLVQ xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 47 /r ] AVX512VL,AVX512
|
|
VPSLLVQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 47 /r ] AVX512VL,AVX512
|
|
VPSLLVQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 47 /r ] AVX512
|
|
VPSLLVW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f38.w1 12 /r ] AVX512VL,AVX512BW
|
|
VPSLLVW ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f38.w1 12 /r ] AVX512VL,AVX512BW
|
|
VPSLLVW zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f38.w1 12 /r ] AVX512BW
|
|
VPSLLW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:m128: evex.nds.128.66.0f.wig f1 /r ] AVX512VL,AVX512BW
|
|
VPSLLW ymmreg|mask|z,ymmreg*,xmmrm128 [rvm:m128: evex.nds.256.66.0f.wig f1 /r ] AVX512VL,AVX512BW
|
|
VPSLLW zmmreg|mask|z,zmmreg*,xmmrm128 [rvm:m128: evex.nds.512.66.0f.wig f1 /r ] AVX512BW
|
|
VPSLLW xmmreg|mask|z,xmmrm128*,imm8 [vmi:fvm: evex.nds.128.66.0f.wig 71 /6 ib ] AVX512VL,AVX512BW
|
|
VPSLLW ymmreg|mask|z,ymmrm256*,imm8 [vmi:fvm: evex.nds.256.66.0f.wig 71 /6 ib ] AVX512VL,AVX512BW
|
|
VPSLLW zmmreg|mask|z,zmmrm512*,imm8 [vmi:fvm: evex.nds.512.66.0f.wig 71 /6 ib ] AVX512BW
|
|
VPSRAD xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:m128: evex.nds.128.66.0f.w0 e2 /r ] AVX512VL,AVX512
|
|
VPSRAD ymmreg|mask|z,ymmreg*,xmmrm128 [rvm:m128: evex.nds.256.66.0f.w0 e2 /r ] AVX512VL,AVX512
|
|
VPSRAD zmmreg|mask|z,zmmreg*,xmmrm128 [rvm:m128: evex.nds.512.66.0f.w0 e2 /r ] AVX512
|
|
VPSRAD xmmreg|mask|z,xmmrm128|b32*,imm8 [vmi:fv: evex.nds.128.66.0f.w0 72 /4 ib ] AVX512VL,AVX512
|
|
VPSRAD ymmreg|mask|z,ymmrm256|b32*,imm8 [vmi:fv: evex.nds.256.66.0f.w0 72 /4 ib ] AVX512VL,AVX512
|
|
VPSRAD zmmreg|mask|z,zmmrm512|b32*,imm8 [vmi:fv: evex.nds.512.66.0f.w0 72 /4 ib ] AVX512
|
|
VPSRAQ xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:m128: evex.nds.128.66.0f.w1 e2 /r ] AVX512VL,AVX512
|
|
VPSRAQ ymmreg|mask|z,ymmreg*,xmmrm128 [rvm:m128: evex.nds.256.66.0f.w1 e2 /r ] AVX512VL,AVX512
|
|
VPSRAQ zmmreg|mask|z,zmmreg*,xmmrm128 [rvm:m128: evex.nds.512.66.0f.w1 e2 /r ] AVX512
|
|
VPSRAQ xmmreg|mask|z,xmmrm128|b64*,imm8 [vmi:fv: evex.nds.128.66.0f.w1 72 /4 ib ] AVX512VL,AVX512
|
|
VPSRAQ ymmreg|mask|z,ymmrm256|b64*,imm8 [vmi:fv: evex.nds.256.66.0f.w1 72 /4 ib ] AVX512VL,AVX512
|
|
VPSRAQ zmmreg|mask|z,zmmrm512|b64*,imm8 [vmi:fv: evex.nds.512.66.0f.w1 72 /4 ib ] AVX512
|
|
VPSRAVD xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 46 /r ] AVX512VL,AVX512
|
|
VPSRAVD ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 46 /r ] AVX512VL,AVX512
|
|
VPSRAVD zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f38.w0 46 /r ] AVX512
|
|
VPSRAVQ xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 46 /r ] AVX512VL,AVX512
|
|
VPSRAVQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 46 /r ] AVX512VL,AVX512
|
|
VPSRAVQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 46 /r ] AVX512
|
|
VPSRAVW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f38.w1 11 /r ] AVX512VL,AVX512BW
|
|
VPSRAVW ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f38.w1 11 /r ] AVX512VL,AVX512BW
|
|
VPSRAVW zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f38.w1 11 /r ] AVX512BW
|
|
VPSRAW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:m128: evex.nds.128.66.0f.wig e1 /r ] AVX512VL,AVX512BW
|
|
VPSRAW ymmreg|mask|z,ymmreg*,xmmrm128 [rvm:m128: evex.nds.256.66.0f.wig e1 /r ] AVX512VL,AVX512BW
|
|
VPSRAW zmmreg|mask|z,zmmreg*,xmmrm128 [rvm:m128: evex.nds.512.66.0f.wig e1 /r ] AVX512BW
|
|
VPSRAW xmmreg|mask|z,xmmrm128*,imm8 [vmi:fvm: evex.nds.128.66.0f.wig 71 /4 ib ] AVX512VL,AVX512BW
|
|
VPSRAW ymmreg|mask|z,ymmrm256*,imm8 [vmi:fvm: evex.nds.256.66.0f.wig 71 /4 ib ] AVX512VL,AVX512BW
|
|
VPSRAW zmmreg|mask|z,zmmrm512*,imm8 [vmi:fvm: evex.nds.512.66.0f.wig 71 /4 ib ] AVX512BW
|
|
VPSRLD xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:m128: evex.nds.128.66.0f.w0 d2 /r ] AVX512VL,AVX512
|
|
VPSRLD ymmreg|mask|z,ymmreg*,xmmrm128 [rvm:m128: evex.nds.256.66.0f.w0 d2 /r ] AVX512VL,AVX512
|
|
VPSRLD zmmreg|mask|z,zmmreg*,xmmrm128 [rvm:m128: evex.nds.512.66.0f.w0 d2 /r ] AVX512
|
|
VPSRLD xmmreg|mask|z,xmmrm128|b32*,imm8 [vmi:fv: evex.nds.128.66.0f.w0 72 /2 ib ] AVX512VL,AVX512
|
|
VPSRLD ymmreg|mask|z,ymmrm256|b32*,imm8 [vmi:fv: evex.nds.256.66.0f.w0 72 /2 ib ] AVX512VL,AVX512
|
|
VPSRLD zmmreg|mask|z,zmmrm512|b32*,imm8 [vmi:fv: evex.nds.512.66.0f.w0 72 /2 ib ] AVX512
|
|
VPSRLDQ xmmreg,xmmrm128*,imm8 [vmi:fvm: evex.nds.128.66.0f.wig 73 /3 ib ] AVX512VL,AVX512BW
|
|
VPSRLDQ ymmreg,ymmrm256*,imm8 [vmi:fvm: evex.nds.256.66.0f.wig 73 /3 ib ] AVX512VL,AVX512BW
|
|
VPSRLDQ zmmreg,zmmrm512*,imm8 [vmi:fvm: evex.nds.512.66.0f.wig 73 /3 ib ] AVX512BW
|
|
VPSRLQ xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:m128: evex.nds.128.66.0f.w1 d3 /r ] AVX512VL,AVX512
|
|
VPSRLQ ymmreg|mask|z,ymmreg*,xmmrm128 [rvm:m128: evex.nds.256.66.0f.w1 d3 /r ] AVX512VL,AVX512
|
|
VPSRLQ zmmreg|mask|z,zmmreg*,xmmrm128 [rvm:m128: evex.nds.512.66.0f.w1 d3 /r ] AVX512
|
|
VPSRLQ xmmreg|mask|z,xmmrm128|b64*,imm8 [vmi:fv: evex.nds.128.66.0f.w1 73 /2 ib ] AVX512VL,AVX512
|
|
VPSRLQ ymmreg|mask|z,ymmrm256|b64*,imm8 [vmi:fv: evex.nds.256.66.0f.w1 73 /2 ib ] AVX512VL,AVX512
|
|
VPSRLQ zmmreg|mask|z,zmmrm512|b64*,imm8 [vmi:fv: evex.nds.512.66.0f.w1 73 /2 ib ] AVX512
|
|
VPSRLVD xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 45 /r ] AVX512VL,AVX512
|
|
VPSRLVD ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 45 /r ] AVX512VL,AVX512
|
|
VPSRLVD zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f38.w0 45 /r ] AVX512
|
|
VPSRLVQ xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 45 /r ] AVX512VL,AVX512
|
|
VPSRLVQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 45 /r ] AVX512VL,AVX512
|
|
VPSRLVQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 45 /r ] AVX512
|
|
VPSRLVW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f38.w1 10 /r ] AVX512VL,AVX512BW
|
|
VPSRLVW ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f38.w1 10 /r ] AVX512VL,AVX512BW
|
|
VPSRLVW zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f38.w1 10 /r ] AVX512BW
|
|
VPSRLW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:m128: evex.nds.128.66.0f.wig d1 /r ] AVX512VL,AVX512BW
|
|
VPSRLW ymmreg|mask|z,ymmreg*,xmmrm128 [rvm:m128: evex.nds.256.66.0f.wig d1 /r ] AVX512VL,AVX512BW
|
|
VPSRLW zmmreg|mask|z,zmmreg*,xmmrm128 [rvm:m128: evex.nds.512.66.0f.wig d1 /r ] AVX512BW
|
|
VPSRLW xmmreg|mask|z,xmmrm128*,imm8 [vmi:fvm: evex.nds.128.66.0f.wig 71 /2 ib ] AVX512VL,AVX512BW
|
|
VPSRLW ymmreg|mask|z,ymmrm256*,imm8 [vmi:fvm: evex.nds.256.66.0f.wig 71 /2 ib ] AVX512VL,AVX512BW
|
|
VPSRLW zmmreg|mask|z,zmmrm512*,imm8 [vmi:fvm: evex.nds.512.66.0f.wig 71 /2 ib ] AVX512BW
|
|
VPSUBB xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig f8 /r ] AVX512VL,AVX512BW
|
|
VPSUBB ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig f8 /r ] AVX512VL,AVX512BW
|
|
VPSUBB zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig f8 /r ] AVX512BW
|
|
VPSUBD xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f.w0 fa /r ] AVX512VL,AVX512
|
|
VPSUBD ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f.w0 fa /r ] AVX512VL,AVX512
|
|
VPSUBD zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f.w0 fa /r ] AVX512
|
|
VPSUBQ xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f.w1 fb /r ] AVX512VL,AVX512
|
|
VPSUBQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f.w1 fb /r ] AVX512VL,AVX512
|
|
VPSUBQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f.w1 fb /r ] AVX512
|
|
VPSUBSB xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig e8 /r ] AVX512VL,AVX512BW
|
|
VPSUBSB ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig e8 /r ] AVX512VL,AVX512BW
|
|
VPSUBSB zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig e8 /r ] AVX512BW
|
|
VPSUBSW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig e9 /r ] AVX512VL,AVX512BW
|
|
VPSUBSW ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig e9 /r ] AVX512VL,AVX512BW
|
|
VPSUBSW zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig e9 /r ] AVX512BW
|
|
VPSUBUSB xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig d8 /r ] AVX512VL,AVX512BW
|
|
VPSUBUSB ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig d8 /r ] AVX512VL,AVX512BW
|
|
VPSUBUSB zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig d8 /r ] AVX512BW
|
|
VPSUBUSW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig d9 /r ] AVX512VL,AVX512BW
|
|
VPSUBUSW ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig d9 /r ] AVX512VL,AVX512BW
|
|
VPSUBUSW zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig d9 /r ] AVX512BW
|
|
VPSUBW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig f9 /r ] AVX512VL,AVX512BW
|
|
VPSUBW ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig f9 /r ] AVX512VL,AVX512BW
|
|
VPSUBW zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig f9 /r ] AVX512BW
|
|
VPTERNLOGD xmmreg|mask|z,xmmreg,xmmrm128|b32,imm8 [rvmi:fv: evex.nds.128.66.0f3a.w0 25 /r ib ] AVX512VL,AVX512
|
|
VPTERNLOGD ymmreg|mask|z,ymmreg,ymmrm256|b32,imm8 [rvmi:fv: evex.nds.256.66.0f3a.w0 25 /r ib ] AVX512VL,AVX512
|
|
VPTERNLOGD zmmreg|mask|z,zmmreg,zmmrm512|b32,imm8 [rvmi:fv: evex.nds.512.66.0f3a.w0 25 /r ib ] AVX512
|
|
VPTERNLOGQ xmmreg|mask|z,xmmreg,xmmrm128|b64,imm8 [rvmi:fv: evex.nds.128.66.0f3a.w1 25 /r ib ] AVX512VL,AVX512
|
|
VPTERNLOGQ ymmreg|mask|z,ymmreg,ymmrm256|b64,imm8 [rvmi:fv: evex.nds.256.66.0f3a.w1 25 /r ib ] AVX512VL,AVX512
|
|
VPTERNLOGQ zmmreg|mask|z,zmmreg,zmmrm512|b64,imm8 [rvmi:fv: evex.nds.512.66.0f3a.w1 25 /r ib ] AVX512
|
|
VPTESTMB kreg|mask,xmmreg,xmmrm128 [rvm:fvm: evex.nds.128.66.0f38.w0 26 /r ] AVX512VL,AVX512BW
|
|
VPTESTMB kreg|mask,ymmreg,ymmrm256 [rvm:fvm: evex.nds.256.66.0f38.w0 26 /r ] AVX512VL,AVX512BW
|
|
VPTESTMB kreg|mask,zmmreg,zmmrm512 [rvm:fvm: evex.nds.512.66.0f38.w0 26 /r ] AVX512BW
|
|
VPTESTMD kreg|mask,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 27 /r ] AVX512VL,AVX512
|
|
VPTESTMD kreg|mask,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 27 /r ] AVX512VL,AVX512
|
|
VPTESTMD kreg|mask,zmmreg,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f38.w0 27 /r ] AVX512
|
|
VPTESTMQ kreg|mask,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 27 /r ] AVX512VL,AVX512
|
|
VPTESTMQ kreg|mask,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 27 /r ] AVX512VL,AVX512
|
|
VPTESTMQ kreg|mask,zmmreg,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f38.w1 27 /r ] AVX512
|
|
VPTESTMW kreg|mask,xmmreg,xmmrm128 [rvm:fvm: evex.nds.128.66.0f38.w1 26 /r ] AVX512VL,AVX512BW
|
|
VPTESTMW kreg|mask,ymmreg,ymmrm256 [rvm:fvm: evex.nds.256.66.0f38.w1 26 /r ] AVX512VL,AVX512BW
|
|
VPTESTMW kreg|mask,zmmreg,zmmrm512 [rvm:fvm: evex.nds.512.66.0f38.w1 26 /r ] AVX512BW
|
|
VPTESTNMB kreg|mask,xmmreg,xmmrm128 [rvm:fvm: evex.nds.128.f3.0f38.w0 26 /r ] AVX512VL,AVX512BW
|
|
VPTESTNMB kreg|mask,ymmreg,ymmrm256 [rvm:fvm: evex.nds.256.f3.0f38.w0 26 /r ] AVX512VL,AVX512BW
|
|
VPTESTNMB kreg|mask,zmmreg,zmmrm512 [rvm:fvm: evex.nds.512.f3.0f38.w0 26 /r ] AVX512BW
|
|
VPTESTNMD kreg|mask,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.f3.0f38.w0 27 /r ] AVX512VL,AVX512
|
|
VPTESTNMD kreg|mask,ymmreg,ymmrm256|b32 [rvm:fv: evex.nds.256.f3.0f38.w0 27 /r ] AVX512VL,AVX512
|
|
VPTESTNMD kreg|mask,zmmreg,zmmrm512|b32 [rvm:fv: evex.nds.512.f3.0f38.w0 27 /r ] AVX512
|
|
VPTESTNMQ kreg|mask,xmmreg,xmmrm128|b64 [rvm:fv: evex.nds.128.f3.0f38.w1 27 /r ] AVX512VL,AVX512
|
|
VPTESTNMQ kreg|mask,ymmreg,ymmrm256|b64 [rvm:fv: evex.nds.256.f3.0f38.w1 27 /r ] AVX512VL,AVX512
|
|
VPTESTNMQ kreg|mask,zmmreg,zmmrm512|b64 [rvm:fv: evex.nds.512.f3.0f38.w1 27 /r ] AVX512
|
|
VPTESTNMW kreg|mask,xmmreg,xmmrm128 [rvm:fvm: evex.nds.128.f3.0f38.w1 26 /r ] AVX512VL,AVX512BW
|
|
VPTESTNMW kreg|mask,ymmreg,ymmrm256 [rvm:fvm: evex.nds.256.f3.0f38.w1 26 /r ] AVX512VL,AVX512BW
|
|
VPTESTNMW kreg|mask,zmmreg,zmmrm512 [rvm:fvm: evex.nds.512.f3.0f38.w1 26 /r ] AVX512BW
|
|
VPUNPCKHBW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig 68 /r ] AVX512VL,AVX512BW
|
|
VPUNPCKHBW ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig 68 /r ] AVX512VL,AVX512BW
|
|
VPUNPCKHBW zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig 68 /r ] AVX512BW
|
|
VPUNPCKHDQ xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f.w0 6a /r ] AVX512VL,AVX512
|
|
VPUNPCKHDQ ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f.w0 6a /r ] AVX512VL,AVX512
|
|
VPUNPCKHDQ zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f.w0 6a /r ] AVX512
|
|
VPUNPCKHQDQ xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f.w1 6d /r ] AVX512VL,AVX512
|
|
VPUNPCKHQDQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f.w1 6d /r ] AVX512VL,AVX512
|
|
VPUNPCKHQDQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f.w1 6d /r ] AVX512
|
|
VPUNPCKHWD xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig 69 /r ] AVX512VL,AVX512BW
|
|
VPUNPCKHWD ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig 69 /r ] AVX512VL,AVX512BW
|
|
VPUNPCKHWD zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig 69 /r ] AVX512BW
|
|
VPUNPCKLBW xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig 60 /r ] AVX512VL,AVX512BW
|
|
VPUNPCKLBW ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig 60 /r ] AVX512VL,AVX512BW
|
|
VPUNPCKLBW zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig 60 /r ] AVX512BW
|
|
VPUNPCKLDQ xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f.w0 62 /r ] AVX512VL,AVX512
|
|
VPUNPCKLDQ ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f.w0 62 /r ] AVX512VL,AVX512
|
|
VPUNPCKLDQ zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f.w0 62 /r ] AVX512
|
|
VPUNPCKLQDQ xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f.w1 6c /r ] AVX512VL,AVX512
|
|
VPUNPCKLQDQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f.w1 6c /r ] AVX512VL,AVX512
|
|
VPUNPCKLQDQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f.w1 6c /r ] AVX512
|
|
VPUNPCKLWD xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f.wig 61 /r ] AVX512VL,AVX512BW
|
|
VPUNPCKLWD ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f.wig 61 /r ] AVX512VL,AVX512BW
|
|
VPUNPCKLWD zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f.wig 61 /r ] AVX512BW
|
|
VPXORD xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f.w0 ef /r ] AVX512VL,AVX512
|
|
VPXORD ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f.w0 ef /r ] AVX512VL,AVX512
|
|
VPXORD zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.66.0f.w0 ef /r ] AVX512
|
|
VPXORQ xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f.w1 ef /r ] AVX512VL,AVX512
|
|
VPXORQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f.w1 ef /r ] AVX512VL,AVX512
|
|
VPXORQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f.w1 ef /r ] AVX512
|
|
VRANGEPD xmmreg|mask|z,xmmreg*,xmmrm128|b64,imm8 [rvmi:fv: evex.nds.128.66.0f3a.w1 50 /r ib ] AVX512VL,AVX512DQ
|
|
VRANGEPD ymmreg|mask|z,ymmreg*,ymmrm256|b64,imm8 [rvmi:fv: evex.nds.256.66.0f3a.w1 50 /r ib ] AVX512VL,AVX512DQ
|
|
VRANGEPD zmmreg|mask|z,zmmreg*,zmmrm512|b64|sae,imm8 [rvmi:fv: evex.nds.512.66.0f3a.w1 50 /r ib ] AVX512DQ
|
|
VRANGEPS xmmreg|mask|z,xmmreg*,xmmrm128|b32,imm8 [rvmi:fv: evex.nds.128.66.0f3a.w0 50 /r ib ] AVX512VL,AVX512DQ
|
|
VRANGEPS ymmreg|mask|z,ymmreg*,ymmrm256|b32,imm8 [rvmi:fv: evex.nds.256.66.0f3a.w0 50 /r ib ] AVX512VL,AVX512DQ
|
|
VRANGEPS zmmreg|mask|z,zmmreg*,zmmrm512|b32|sae,imm8 [rvmi:fv: evex.nds.512.66.0f3a.w0 50 /r ib ] AVX512DQ
|
|
VRANGESD xmmreg|mask|z,xmmreg*,xmmrm64|sae,imm8 [rvmi:t1s: evex.nds.lig.66.0f3a.w1 51 /r ib ] AVX512DQ
|
|
VRANGESS xmmreg|mask|z,xmmreg*,xmmrm32|sae,imm8 [rvmi:t1s: evex.nds.lig.66.0f3a.w0 51 /r ib ] AVX512DQ
|
|
VRCP14PD xmmreg|mask|z,xmmrm128|b64 [rm:fv: evex.128.66.0f38.w1 4c /r ] AVX512VL,AVX512
|
|
VRCP14PD ymmreg|mask|z,ymmrm256|b64 [rm:fv: evex.256.66.0f38.w1 4c /r ] AVX512VL,AVX512
|
|
VRCP14PD zmmreg|mask|z,zmmrm512|b64 [rm:fv: evex.512.66.0f38.w1 4c /r ] AVX512
|
|
VRCP14PS xmmreg|mask|z,xmmrm128|b32 [rm:fv: evex.128.66.0f38.w0 4c /r ] AVX512VL,AVX512
|
|
VRCP14PS ymmreg|mask|z,ymmrm256|b32 [rm:fv: evex.256.66.0f38.w0 4c /r ] AVX512VL,AVX512
|
|
VRCP14PS zmmreg|mask|z,zmmrm512|b32 [rm:fv: evex.512.66.0f38.w0 4c /r ] AVX512
|
|
VRCP14SD xmmreg|mask|z,xmmreg*,xmmrm64 [rvm:t1s: evex.nds.lig.66.0f38.w1 4d /r ] AVX512
|
|
VRCP14SS xmmreg|mask|z,xmmreg*,xmmrm32 [rvm:t1s: evex.nds.lig.66.0f38.w0 4d /r ] AVX512
|
|
VRCP28PD zmmreg|mask|z,zmmrm512|b64|sae [rm:fv: evex.512.66.0f38.w1 ca /r ] AVX512ER
|
|
VRCP28PS zmmreg|mask|z,zmmrm512|b32|sae [rm:fv: evex.512.66.0f38.w0 ca /r ] AVX512ER
|
|
VRCP28SD xmmreg|mask|z,xmmreg*,xmmrm64|sae [rvm:t1s: evex.nds.lig.66.0f38.w1 cb /r ] AVX512ER
|
|
VRCP28SS xmmreg|mask|z,xmmreg*,xmmrm32|sae [rvm:t1s: evex.nds.lig.66.0f38.w0 cb /r ] AVX512ER
|
|
VREDUCEPD xmmreg|mask|z,xmmrm128|b64,imm8 [rmi:fv: evex.128.66.0f3a.w1 56 /r ib ] AVX512VL,AVX512DQ
|
|
VREDUCEPD ymmreg|mask|z,ymmrm256|b64,imm8 [rmi:fv: evex.256.66.0f3a.w1 56 /r ib ] AVX512VL,AVX512DQ
|
|
VREDUCEPD zmmreg|mask|z,zmmrm512|b64|sae,imm8 [rmi:fv: evex.512.66.0f3a.w1 56 /r ib ] AVX512DQ
|
|
VREDUCEPS xmmreg|mask|z,xmmrm128|b32,imm8 [rmi:fv: evex.128.66.0f3a.w0 56 /r ib ] AVX512VL,AVX512DQ
|
|
VREDUCEPS ymmreg|mask|z,ymmrm256|b32,imm8 [rmi:fv: evex.256.66.0f3a.w0 56 /r ib ] AVX512VL,AVX512DQ
|
|
VREDUCEPS zmmreg|mask|z,zmmrm512|b32|sae,imm8 [rmi:fv: evex.512.66.0f3a.w0 56 /r ib ] AVX512DQ
|
|
VREDUCESD xmmreg|mask|z,xmmreg*,xmmrm64|sae,imm8 [rvmi:t1s: evex.nds.lig.66.0f3a.w1 57 /r ib ] AVX512DQ
|
|
VREDUCESS xmmreg|mask|z,xmmreg*,xmmrm32|sae,imm8 [rvmi:t1s: evex.nds.lig.66.0f3a.w0 57 /r ib ] AVX512DQ
|
|
VRNDSCALEPD xmmreg|mask|z,xmmrm128|b64,imm8 [rmi:fv: evex.128.66.0f3a.w1 09 /r ib ] AVX512VL,AVX512
|
|
VRNDSCALEPD ymmreg|mask|z,ymmrm256|b64,imm8 [rmi:fv: evex.256.66.0f3a.w1 09 /r ib ] AVX512VL,AVX512
|
|
VRNDSCALEPD zmmreg|mask|z,zmmrm512|b64|sae,imm8 [rmi:fv: evex.512.66.0f3a.w1 09 /r ib ] AVX512
|
|
VRNDSCALEPS xmmreg|mask|z,xmmrm128|b32,imm8 [rmi:fv: evex.128.66.0f3a.w0 08 /r ib ] AVX512VL,AVX512
|
|
VRNDSCALEPS ymmreg|mask|z,ymmrm256|b32,imm8 [rmi:fv: evex.256.66.0f3a.w0 08 /r ib ] AVX512VL,AVX512
|
|
VRNDSCALEPS zmmreg|mask|z,zmmrm512|b32|sae,imm8 [rmi:fv: evex.512.66.0f3a.w0 08 /r ib ] AVX512
|
|
VRNDSCALESD xmmreg|mask|z,xmmreg*,xmmrm64|sae,imm8 [rvmi:t1s: evex.nds.lig.66.0f3a.w1 0b /r ib ] AVX512
|
|
VRNDSCALESS xmmreg|mask|z,xmmreg*,xmmrm32|sae,imm8 [rvmi:t1s: evex.nds.lig.66.0f3a.w0 0a /r ib ] AVX512
|
|
VRSQRT14PD xmmreg|mask|z,xmmrm128|b64 [rm:fv: evex.128.66.0f38.w1 4e /r ] AVX512VL,AVX512
|
|
VRSQRT14PD ymmreg|mask|z,ymmrm256|b64 [rm:fv: evex.256.66.0f38.w1 4e /r ] AVX512VL,AVX512
|
|
VRSQRT14PD zmmreg|mask|z,zmmrm512|b64 [rm:fv: evex.512.66.0f38.w1 4e /r ] AVX512
|
|
VRSQRT14PS xmmreg|mask|z,xmmrm128|b32 [rm:fv: evex.128.66.0f38.w0 4e /r ] AVX512VL,AVX512
|
|
VRSQRT14PS ymmreg|mask|z,ymmrm256|b32 [rm:fv: evex.256.66.0f38.w0 4e /r ] AVX512VL,AVX512
|
|
VRSQRT14PS zmmreg|mask|z,zmmrm512|b32 [rm:fv: evex.512.66.0f38.w0 4e /r ] AVX512
|
|
VRSQRT14SD xmmreg|mask|z,xmmreg*,xmmrm64 [rvm:t1s: evex.nds.lig.66.0f38.w1 4f /r ] AVX512
|
|
VRSQRT14SS xmmreg|mask|z,xmmreg*,xmmrm32 [rvm:t1s: evex.nds.lig.66.0f38.w0 4f /r ] AVX512
|
|
VRSQRT28PD zmmreg|mask|z,zmmrm512|b64|sae [rm:fv: evex.512.66.0f38.w1 cc /r ] AVX512ER
|
|
VRSQRT28PS zmmreg|mask|z,zmmrm512|b32|sae [rm:fv: evex.512.66.0f38.w0 cc /r ] AVX512ER
|
|
VRSQRT28SD xmmreg|mask|z,xmmreg*,xmmrm64|sae [rvm:t1s: evex.nds.lig.66.0f38.w1 cd /r ] AVX512ER
|
|
VRSQRT28SS xmmreg|mask|z,xmmreg*,xmmrm32|sae [rvm:t1s: evex.nds.lig.66.0f38.w0 cd /r ] AVX512ER
|
|
VSCALEFPD xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f38.w1 2c /r ] AVX512VL,AVX512
|
|
VSCALEFPD ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f38.w1 2c /r ] AVX512VL,AVX512
|
|
VSCALEFPD zmmreg|mask|z,zmmreg*,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f38.w1 2c /r ] AVX512
|
|
VSCALEFPS xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.66.0f38.w0 2c /r ] AVX512VL,AVX512
|
|
VSCALEFPS ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.66.0f38.w0 2c /r ] AVX512VL,AVX512
|
|
VSCALEFPS zmmreg|mask|z,zmmreg*,zmmrm512|b32|er [rvm:fv: evex.nds.512.66.0f38.w0 2c /r ] AVX512
|
|
VSCALEFSD xmmreg|mask|z,xmmreg*,xmmrm64|er [rvm:t1s: evex.nds.lig.66.0f38.w1 2d /r ] AVX512
|
|
VSCALEFSS xmmreg|mask|z,xmmreg*,xmmrm32|er [rvm:t1s: evex.nds.lig.66.0f38.w0 2d /r ] AVX512
|
|
VSCATTERDPD xmem64|mask,xmmreg [mr:t1s: vsibx evex.128.66.0f38.w1 a2 /r ] AVX512VL,AVX512
|
|
VSCATTERDPD xmem64|mask,ymmreg [mr:t1s: vsibx evex.256.66.0f38.w1 a2 /r ] AVX512VL,AVX512
|
|
VSCATTERDPD ymem64|mask,zmmreg [mr:t1s: vsiby evex.512.66.0f38.w1 a2 /r ] AVX512
|
|
VSCATTERDPS xmem32|mask,xmmreg [mr:t1s: vsibx evex.128.66.0f38.w0 a2 /r ] AVX512VL,AVX512
|
|
VSCATTERDPS ymem32|mask,ymmreg [mr:t1s: vsiby evex.256.66.0f38.w0 a2 /r ] AVX512VL,AVX512
|
|
VSCATTERDPS zmem32|mask,zmmreg [mr:t1s: vsibz evex.512.66.0f38.w0 a2 /r ] AVX512
|
|
VSCATTERPF0DPD ymem64|mask [m:t1s: vsiby evex.512.66.0f38.w1 c6 /5 ] AVX512PF
|
|
VSCATTERPF0DPS zmem32|mask [m:t1s: vsibz evex.512.66.0f38.w0 c6 /5 ] AVX512PF
|
|
VSCATTERPF0QPD zmem64|mask [m:t1s: vsibz evex.512.66.0f38.w1 c7 /5 ] AVX512PF
|
|
VSCATTERPF0QPS zmem32|mask [m:t1s: vsibz evex.512.66.0f38.w0 c7 /5 ] AVX512PF
|
|
VSCATTERPF1DPD ymem64|mask [m:t1s: vsiby evex.512.66.0f38.w1 c6 /6 ] AVX512PF
|
|
VSCATTERPF1DPS zmem32|mask [m:t1s: vsibz evex.512.66.0f38.w0 c6 /6 ] AVX512PF
|
|
VSCATTERPF1QPD zmem64|mask [m:t1s: vsibz evex.512.66.0f38.w1 c7 /6 ] AVX512PF
|
|
VSCATTERPF1QPS zmem32|mask [m:t1s: vsibz evex.512.66.0f38.w0 c7 /6 ] AVX512PF
|
|
VSCATTERQPD xmem64|mask,xmmreg [mr:t1s: vsibx evex.128.66.0f38.w1 a3 /r ] AVX512VL,AVX512
|
|
VSCATTERQPD ymem64|mask,ymmreg [mr:t1s: vsiby evex.256.66.0f38.w1 a3 /r ] AVX512VL,AVX512
|
|
VSCATTERQPD zmem64|mask,zmmreg [mr:t1s: vsibz evex.512.66.0f38.w1 a3 /r ] AVX512
|
|
VSCATTERQPS xmem32|mask,xmmreg [mr:t1s: vsibx evex.128.66.0f38.w0 a3 /r ] AVX512VL,AVX512
|
|
VSCATTERQPS ymem32|mask,xmmreg [mr:t1s: vsiby evex.256.66.0f38.w0 a3 /r ] AVX512VL,AVX512
|
|
VSCATTERQPS zmem32|mask,ymmreg [mr:t1s: vsibz evex.512.66.0f38.w0 a3 /r ] AVX512
|
|
VSHUFF32X4 ymmreg|mask|z,ymmreg*,ymmrm256|b32,imm8 [rvmi:fv: evex.nds.256.66.0f3a.w0 23 /r ib ] AVX512VL,AVX512
|
|
VSHUFF32X4 zmmreg|mask|z,zmmreg*,zmmrm512|b32,imm8 [rvmi:fv: evex.nds.512.66.0f3a.w0 23 /r ib ] AVX512
|
|
VSHUFF64X2 ymmreg|mask|z,ymmreg*,ymmrm256|b64,imm8 [rvmi:fv: evex.nds.256.66.0f3a.w1 23 /r ib ] AVX512VL,AVX512
|
|
VSHUFF64X2 zmmreg|mask|z,zmmreg*,zmmrm512|b64,imm8 [rvmi:fv: evex.nds.512.66.0f3a.w1 23 /r ib ] AVX512
|
|
VSHUFI32X4 ymmreg|mask|z,ymmreg*,ymmrm256|b32,imm8 [rvmi:fv: evex.nds.256.66.0f3a.w0 43 /r ib ] AVX512VL,AVX512
|
|
VSHUFI32X4 zmmreg|mask|z,zmmreg*,zmmrm512|b32,imm8 [rvmi:fv: evex.nds.512.66.0f3a.w0 43 /r ib ] AVX512
|
|
VSHUFI64X2 ymmreg|mask|z,ymmreg*,ymmrm256|b64,imm8 [rvmi:fv: evex.nds.256.66.0f3a.w1 43 /r ib ] AVX512VL,AVX512
|
|
VSHUFI64X2 zmmreg|mask|z,zmmreg*,zmmrm512|b64,imm8 [rvmi:fv: evex.nds.512.66.0f3a.w1 43 /r ib ] AVX512
|
|
VSHUFPD xmmreg|mask|z,xmmreg*,xmmrm128|b64,imm8 [rvmi:fv: evex.nds.128.66.0f.w1 c6 /r ib ] AVX512VL,AVX512
|
|
VSHUFPD ymmreg|mask|z,ymmreg*,ymmrm256|b64,imm8 [rvmi:fv: evex.nds.256.66.0f.w1 c6 /r ib ] AVX512VL,AVX512
|
|
VSHUFPD zmmreg|mask|z,zmmreg*,zmmrm512|b64,imm8 [rvmi:fv: evex.nds.512.66.0f.w1 c6 /r ib ] AVX512
|
|
VSHUFPS xmmreg|mask|z,xmmreg*,xmmrm128|b32,imm8 [rvmi:fv: evex.nds.128.0f.w0 c6 /r ib ] AVX512VL,AVX512
|
|
VSHUFPS ymmreg|mask|z,ymmreg*,ymmrm256|b32,imm8 [rvmi:fv: evex.nds.256.0f.w0 c6 /r ib ] AVX512VL,AVX512
|
|
VSHUFPS zmmreg|mask|z,zmmreg*,zmmrm512|b32,imm8 [rvmi:fv: evex.nds.512.0f.w0 c6 /r ib ] AVX512
|
|
VSQRTPD xmmreg|mask|z,xmmrm128|b64 [rm:fv: evex.128.66.0f.w1 51 /r ] AVX512VL,AVX512
|
|
VSQRTPD ymmreg|mask|z,ymmrm256|b64 [rm:fv: evex.256.66.0f.w1 51 /r ] AVX512VL,AVX512
|
|
VSQRTPD zmmreg|mask|z,zmmrm512|b64|er [rm:fv: evex.512.66.0f.w1 51 /r ] AVX512
|
|
VSQRTPS xmmreg|mask|z,xmmrm128|b32 [rm:fv: evex.128.0f.w0 51 /r ] AVX512VL,AVX512
|
|
VSQRTPS ymmreg|mask|z,ymmrm256|b32 [rm:fv: evex.256.0f.w0 51 /r ] AVX512VL,AVX512
|
|
VSQRTPS zmmreg|mask|z,zmmrm512|b32|er [rm:fv: evex.512.0f.w0 51 /r ] AVX512
|
|
VSQRTSD xmmreg|mask|z,xmmreg*,xmmrm64|er [rvm:t1s: evex.nds.lig.f2.0f.w1 51 /r ] AVX512
|
|
VSQRTSS xmmreg|mask|z,xmmreg*,xmmrm32|er [rvm:t1s: evex.nds.lig.f3.0f.w0 51 /r ] AVX512
|
|
VSUBPD xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f.w1 5c /r ] AVX512VL,AVX512
|
|
VSUBPD ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f.w1 5c /r ] AVX512VL,AVX512
|
|
VSUBPD zmmreg|mask|z,zmmreg*,zmmrm512|b64|er [rvm:fv: evex.nds.512.66.0f.w1 5c /r ] AVX512
|
|
VSUBPS xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.0f.w0 5c /r ] AVX512VL,AVX512
|
|
VSUBPS ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.0f.w0 5c /r ] AVX512VL,AVX512
|
|
VSUBPS zmmreg|mask|z,zmmreg*,zmmrm512|b32|er [rvm:fv: evex.nds.512.0f.w0 5c /r ] AVX512
|
|
VSUBSD xmmreg|mask|z,xmmreg*,xmmrm64|er [rvm:t1s: evex.nds.lig.f2.0f.w1 5c /r ] AVX512
|
|
VSUBSS xmmreg|mask|z,xmmreg*,xmmrm32|er [rvm:t1s: evex.nds.lig.f3.0f.w0 5c /r ] AVX512
|
|
VUCOMISD xmmreg,xmmrm64|sae [rm:t1s: evex.lig.66.0f.w1 2e /r ] AVX512
|
|
VUCOMISS xmmreg,xmmrm32|sae [rm:t1s: evex.lig.0f.w0 2e /r ] AVX512
|
|
VUNPCKHPD xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f.w1 15 /r ] AVX512VL,AVX512
|
|
VUNPCKHPD ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f.w1 15 /r ] AVX512VL,AVX512
|
|
VUNPCKHPD zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f.w1 15 /r ] AVX512
|
|
VUNPCKHPS xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.0f.w0 15 /r ] AVX512VL,AVX512
|
|
VUNPCKHPS ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.0f.w0 15 /r ] AVX512VL,AVX512
|
|
VUNPCKHPS zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.0f.w0 15 /r ] AVX512
|
|
VUNPCKLPD xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f.w1 14 /r ] AVX512VL,AVX512
|
|
VUNPCKLPD ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f.w1 14 /r ] AVX512VL,AVX512
|
|
VUNPCKLPD zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f.w1 14 /r ] AVX512
|
|
VUNPCKLPS xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.0f.w0 14 /r ] AVX512VL,AVX512
|
|
VUNPCKLPS ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.0f.w0 14 /r ] AVX512VL,AVX512
|
|
VUNPCKLPS zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.0f.w0 14 /r ] AVX512
|
|
VXORPD xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvm:fv: evex.nds.128.66.0f.w1 57 /r ] AVX512VL,AVX512DQ
|
|
VXORPD ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvm:fv: evex.nds.256.66.0f.w1 57 /r ] AVX512VL,AVX512DQ
|
|
VXORPD zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvm:fv: evex.nds.512.66.0f.w1 57 /r ] AVX512DQ
|
|
VXORPS xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.0f.w0 57 /r ] AVX512VL,AVX512DQ
|
|
VXORPS ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.0f.w0 57 /r ] AVX512VL,AVX512DQ
|
|
VXORPS zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.nds.512.0f.w0 57 /r ] AVX512DQ
|
|
; MJC PUBLIC END
|
|
|
|
;# Intel memory protection keys for userspace (PKU aka PKEYs)
|
|
RDPKRU void [ 0f 01 ee] PKU,LONG
|
|
WRPKRU void [ 0f 01 ef] PKU,LONG
|
|
|
|
;# Read Processor ID
|
|
|
|
|
|
;# Processor trace write
|
|
$dq PTWRITE rm# [m: f3 o# 0f ae /4] PTWRITE,(q:SX)
|
|
|
|
;# Instructions from the Intel Instruction Set Extensions,
|
|
;# doc 319433-034 May 2018
|
|
CLDEMOTE mem [m: np 0f 1c /0] CLDEMOTE
|
|
$dq MOVDIRI mem#,reg# [mr: np o# 0f38 f9 /r] MOVDIRI
|
|
$wdq MOVDIR64B reg#,mem512 [rm: a# 66 0f38 f8 /r] MOVDIR64B
|
|
PCONFIG void [ np 0f 01 c5] PCONFIG
|
|
|
|
;# Galois field operations (GFNI)
|
|
GF2P8AFFINEINVQB xmmreg,xmmrm128,imm8 [rmi: 66 0f3a cf /r ib] GFNI,SSE
|
|
VGF2P8AFFINEINVQB xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f3a.w1 cf /r ib] GFNI,AVX
|
|
VGF2P8AFFINEINVQB ymmreg,ymmreg*,ymmrm256,imm8 [rvmi: vex.nds.256.66.0f3a.w1 cf /r ib] GFNI,AVX
|
|
VGF2P8AFFINEINVQB xmmreg|mask|z,xmmreg*,xmmrm128|b64,imm8 [rvmi:fv: evex.nds.128.66.0f3a.w1 cf /r ib] GFNI,AVX512VL
|
|
VGF2P8AFFINEINVQB ymmreg|mask|z,ymmreg*,ymmrm256|b64,imm8 [rvmi:fv: evex.nds.256.66.0f3a.w1 cf /r ib] GFNI,AVX512VL
|
|
VGF2P8AFFINEINVQB zmmreg|mask|z,zmmreg*,zmmrm512|b64,imm8 [rvmi:fv: evex.nds.512.66.0f3a.w1 cf /r ib] GFNI,AVX512
|
|
GF2P8AFFINEQB xmmreg,xmmrm128,imm8 [rmi: 66 0f3a ce /r ib] GFNI,SSE
|
|
VGF2P8AFFINEQB xmmreg,xmmreg*,xmmrm128,imm8 [rvmi: vex.nds.128.66.0f3a.w1 ce /r ib] GFNI,AVX
|
|
VGF2P8AFFINEQB ymmreg,ymmreg*,ymmrm256,imm8 [rvmi: vex.nds.256.66.0f3a.w1 ce /r ib] GFNI,AVX
|
|
VGF2P8AFFINEQB xmmreg|mask|z,xmmreg*,xmmrm128|b64,imm8 [rvmi:fv: evex.nds.128.66.0f3a.w1 ce /r ib] GFNI,AVX512VL
|
|
VGF2P8AFFINEQB ymmreg|mask|z,ymmreg*,ymmrm256|b64,imm8 [rvmi:fv: evex.nds.256.66.0f3a.w1 ce /r ib] GFNI,AVX512VL
|
|
VGF2P8AFFINEQB zmmreg|mask|z,zmmreg*,zmmrm512|b64,imm8 [rvmi:fv: evex.nds.512.66.0f3a.w1 ce /r ib] GFNI,AVX512
|
|
GF2P8MULB xmmreg,xmmrm128 [rm: 66 0f38 cf /r] GFNI,SSE
|
|
VGF2P8MULB xmmreg,xmmreg*,xmmrm128 [rvm: vex.nds.128.66.0f38.w0 cf /r] GFNI,AVX
|
|
VGF2P8MULB ymmreg,ymmreg*,ymmrm256 [rvm: vex.nds.256.66.0f38.w0 cf /r] GFNI,AVX
|
|
VGF2P8MULB xmmreg|mask|z,xmmreg*,xmmrm128 [rvm:fvm: evex.nds.128.66.0f38.w0 cf /r] GFNI,AVX512VL
|
|
VGF2P8MULB ymmreg|mask|z,ymmreg*,ymmrm256 [rvm:fvm: evex.nds.256.66.0f38.w0 cf /r] GFNI,AVX512VL
|
|
VGF2P8MULB zmmreg|mask|z,zmmreg*,zmmrm512 [rvm:fvm: evex.nds.512.66.0f38.w0 cf /r] GFNI,AVX512
|
|
|
|
;# AVX512 Vector Bit Manipulation Instructions 2
|
|
VPCOMPRESSB mem128|mask,xmmreg [mr:t1s8: evex.128.66.0f38.w0 63 /r] AVX512VBMI2,AVX512VL
|
|
VPCOMPRESSB mem256|mask,ymmreg [mr:t1s8: evex.256.66.0f38.w0 63 /r] AVX512VBMI2,AVX512VL
|
|
VPCOMPRESSB mem512|mask,zmmreg [mr:t1s8: evex.512.66.0f38.w0 63 /r] AVX512VBMI2
|
|
VPCOMPRESSB xmmreg|mask|z,xmmreg [mr: evex.128.66.0f38.w0 63 /r] AVX512VBMI2,AVX512VL
|
|
VPCOMPRESSB ymmreg|mask|z,ymmreg [mr: evex.256.66.0f38.w0 63 /r] AVX512VBMI2,AVX512VL
|
|
VPCOMPRESSB zmmreg|mask|z,zmmreg [mr: evex.512.66.0f38.w0 63 /r] AVX512VBMI2
|
|
VPCOMPRESSW mem128|mask,xmmreg [mr:t1s16: evex.128.66.0f38.w1 63 /r] AVX512VBMI2,AVX512VL
|
|
VPCOMPRESSW mem256|mask,ymmreg [mr:t1s16: evex.256.66.0f38.w1 63 /r] AVX512VBMI2,AVX512VL
|
|
VPCOMPRESSW mem512|mask,zmmreg [mr:t1s16: evex.512.66.0f38.w1 63 /r] AVX512VBMI2
|
|
VPCOMPRESSW xmmreg|mask|z,xmmreg [mr: evex.128.66.0f38.w1 63 /r] AVX512VBMI2,AVX512VL
|
|
VPCOMPRESSW ymmreg|mask|z,ymmreg [mr: evex.256.66.0f38.w1 63 /r] AVX512VBMI2,AVX512VL
|
|
VPCOMPRESSW zmmreg|mask|z,zmmreg [mr: evex.512.66.0f38.w1 63 /r] AVX512VBMI2
|
|
VPEXPANDB xmmreg|mask|z,xmmrm128 [rm:t1s8: evex.128.66.0f38.w0 62 /r] AVX512VBMI2,AVX512VL
|
|
VPEXPANDB ymmreg|mask|z,ymmrm256 [rm:t1s8: evex.256.66.0f38.w0 62 /r] AVX512VBMI2,AVX512VL
|
|
VPEXPANDB zmmreg|mask|z,zmmrm512 [rm:t1s8: evex.512.66.0f38.w0 62 /r] AVX512VBMI2
|
|
VPEXPANDW xmmreg|mask|z,xmmrm128 [rm:t1s16: evex.128.66.0f38.w1 62 /r] AVX512VBMI2,AVX512VL
|
|
VPEXPANDW ymmreg|mask|z,ymmrm256 [rm:t1s16: evex.256.66.0f38.w1 62 /r] AVX512VBMI2,AVX512VL
|
|
VPEXPANDW zmmreg|mask|z,zmmrm512 [rm:t1s16: evex.512.66.0f38.w1 62 /r] AVX512VBMI2
|
|
VPSHLDW xmmreg|mask|z,xmmreg*,xmmrm128,imm8 [rvmi:fvm: evex.nds.128.66.0f3a.w1 70 /r ib] AVX512VBMI2,AVX512VL
|
|
VPSHLDW ymmreg|mask|z,ymmreg*,ymmrm256,imm8 [rvmi:fvm: evex.nds.256.66.0f3a.w1 70 /r ib] AVX512VBMI2,AVX512VL
|
|
VPSHLDW zmmreg|mask|z,zmmreg*,zmmrm512,imm8 [rvmi:fvm: evex.nds.512.66.0f3a.w1 70 /r ib] AVX512VBMI2
|
|
VPSHLDD xmmreg|mask|z,xmmreg*,xmmrm128|b32,imm8 [rvmi:fv: evex.nds.128.66.0f3a.w0 71 /r ib] AVX512VBMI2,AVX512VL
|
|
VPSHLDD ymmreg|mask|z,ymmreg*,ymmrm256|b32,imm8 [rvmi:fv: evex.nds.256.66.0f3a.w0 71 /r ib] AVX512VBMI2,AVX512VL
|
|
VPSHLDD zmmreg|mask|z,zmmreg*,zmmrm512|b32,imm8 [rvmi:fv: evex.nds.512.66.0f3a.w0 71 /r ib] AVX512VBMI2
|
|
VPSHLDQ xmmreg|mask|z,xmmreg*,xmmrm128|b64,imm8 [rvmi:fv: evex.nds.128.66.0f3a.w1 71 /r ib] AVX512VBMI2,AVX512VL
|
|
VPSHLDQ ymmreg|mask|z,ymmreg*,ymmrm256|b64,imm8 [rvmi:fv: evex.nds.256.66.0f3a.w1 71 /r ib] AVX512VBMI2,AVX512VL
|
|
VPSHLDQ zmmreg|mask|z,zmmreg*,zmmrm512|b64,imm8 [rvmi:fv: evex.nds.512.66.0f3a.w1 71 /r ib] AVX512VBMI2
|
|
VPSHLDVW xmmreg|mask|z,xmmreg*,xmmrm128 [rvmi:fvm: evex.dds.128.66.0f38.w1 70 /r] AVX512VBMI2,AVX512VL
|
|
VPSHLDVW ymmreg|mask|z,ymmreg*,ymmrm256 [rvmi:fvm: evex.dds.256.66.0f38.w1 70 /r] AVX512VBMI2,AVX512VL
|
|
VPSHLDVW zmmreg|mask|z,zmmreg*,zmmrm512 [rvmi:fvm: evex.dds.512.66.0f38.w1 70 /r] AVX512VBMI2
|
|
VPSHLDVD xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvmi:fv: evex.dds.128.66.0f38.w0 71 /r] AVX512VBMI2,AVX512VL
|
|
VPSHLDVD ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvmi:fv: evex.dds.256.66.0f38.w0 71 /r] AVX512VBMI2,AVX512VL
|
|
VPSHLDVD zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvmi:fv: evex.dds.512.66.0f38.w0 71 /r] AVX512VBMI2
|
|
VPSHLDVQ xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvmi:fv: evex.dds.128.66.0f38.w1 71 /r] AVX512VBMI2,AVX512VL
|
|
VPSHLDVQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvmi:fv: evex.dds.256.66.0f38.w1 71 /r] AVX512VBMI2,AVX512VL
|
|
VPSHLDVQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvmi:fv: evex.dds.512.66.0f38.w1 71 /r] AVX512VBMI2
|
|
VPSHRDW xmmreg|mask|z,xmmreg*,xmmrm128,imm8 [rvmi:fvm: evex.nds.128.66.0f3a.w1 72 /r ib] AVX512VBMI2,AVX512VL
|
|
VPSHRDW ymmreg|mask|z,ymmreg*,ymmrm256,imm8 [rvmi:fvm: evex.nds.256.66.0f3a.w1 72 /r ib] AVX512VBMI2,AVX512VL
|
|
VPSHRDW zmmreg|mask|z,zmmreg*,zmmrm512,imm8 [rvmi:fvm: evex.nds.512.66.0f3a.w1 72 /r ib] AVX512VBMI2
|
|
VPSHRDD xmmreg|mask|z,xmmreg*,xmmrm128|b32,imm8 [rvmi:fv: evex.nds.128.66.0f3a.w0 73 /r ib] AVX512VBMI2,AVX512VL
|
|
VPSHRDD ymmreg|mask|z,ymmreg*,ymmrm256|b32,imm8 [rvmi:fv: evex.nds.256.66.0f3a.w0 73 /r ib] AVX512VBMI2,AVX512VL
|
|
VPSHRDD zmmreg|mask|z,zmmreg*,zmmrm512|b32,imm8 [rvmi:fv: evex.nds.512.66.0f3a.w0 73 /r ib] AVX512VBMI2
|
|
VPSHRDQ xmmreg|mask|z,xmmreg*,xmmrm128|b64,imm8 [rvmi:fv: evex.nds.128.66.0f3a.w1 73 /r ib] AVX512VBMI2,AVX512VL
|
|
VPSHRDQ ymmreg|mask|z,ymmreg*,ymmrm256|b64,imm8 [rvmi:fv: evex.nds.256.66.0f3a.w1 73 /r ib] AVX512VBMI2,AVX512VL
|
|
VPSHRDQ zmmreg|mask|z,zmmreg*,zmmrm512|b64,imm8 [rvmi:fv: evex.nds.512.66.0f3a.w1 73 /r ib] AVX512VBMI2
|
|
VPSHRDVW xmmreg|mask|z,xmmreg*,xmmrm128 [rvmi:fvm: evex.dds.128.66.0f38.w1 72 /r] AVX512VBMI2,AVX512VL
|
|
VPSHRDVW ymmreg|mask|z,ymmreg*,ymmrm256 [rvmi:fvm: evex.dds.256.66.0f38.w1 72 /r] AVX512VBMI2,AVX512VL
|
|
VPSHRDVW zmmreg|mask|z,zmmreg*,zmmrm512 [rvmi:fvm: evex.dds.512.66.0f38.w1 72 /r] AVX512VBMI2
|
|
VPSHRDVD xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvmi:fv: evex.dds.128.66.0f38.w0 73 /r] AVX512VBMI2,AVX512VL
|
|
VPSHRDVD ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvmi:fv: evex.dds.256.66.0f38.w0 73 /r] AVX512VBMI2,AVX512VL
|
|
VPSHRDVD zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvmi:fv: evex.dds.512.66.0f38.w0 73 /r] AVX512VBMI2
|
|
VPSHRDVQ xmmreg|mask|z,xmmreg*,xmmrm128|b64 [rvmi:fv: evex.dds.128.66.0f38.w1 73 /r] AVX512VBMI2,AVX512VL
|
|
VPSHRDVQ ymmreg|mask|z,ymmreg*,ymmrm256|b64 [rvmi:fv: evex.dds.256.66.0f38.w1 73 /r] AVX512VBMI2,AVX512VL
|
|
VPSHRDVQ zmmreg|mask|z,zmmreg*,zmmrm512|b64 [rvmi:fv: evex.dds.512.66.0f38.w1 73 /r] AVX512VBMI2
|
|
|
|
;# AVX512 VNNI
|
|
VPDPBUSD xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.dds.128.66.0f38.w0 50 /r] AVX512VNNI,AVX512VL
|
|
VPDPBUSD ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.dds.256.66.0f38.w0 50 /r] AVX512VNNI,AVX512VL
|
|
VPDPBUSD zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.dds.512.66.0f38.w0 50 /r] AVX512VNNI
|
|
VPDPBUSDS xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.dds.128.66.0f38.w0 51 /r] AVX512VNNI,AVX512VL
|
|
VPDPBUSDS ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.dds.256.66.0f38.w0 51 /r] AVX512VNNI,AVX512VL
|
|
VPDPBUSDS zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.dds.512.66.0f38.w0 51 /r] AVX512VNNI
|
|
VPDPWSSD xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.dds.128.66.0f38.w0 52 /r] AVX512VNNI,AVX512VL
|
|
VPDPWSSD ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.dds.256.66.0f38.w0 52 /r] AVX512VNNI,AVX512VL
|
|
VPDPWSSD zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.dds.512.66.0f38.w0 52 /r] AVX512VNNI
|
|
VPDPWSSDS xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.dds.128.66.0f38.w0 53 /r] AVX512VNNI,AVX512VL
|
|
VPDPWSSDS ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.dds.256.66.0f38.w0 53 /r] AVX512VNNI,AVX512VL
|
|
VPDPWSSDS zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.dds.512.66.0f38.w0 53 /r] AVX512VNNI
|
|
|
|
;# AVX512 Bit Algorithms
|
|
VPOPCNTB xmmreg|mask|z,xmmrm128 [rm:fvm: evex.128.66.0f38.w0 54 /r] AVX512BITALG,AVX512VL
|
|
VPOPCNTB ymmreg|mask|z,ymmrm256 [rm:fvm: evex.256.66.0f38.w0 54 /r] AVX512BITALG,AVX512VL
|
|
VPOPCNTB zmmreg|mask|z,zmmrm512 [rm:fvm: evex.512.66.0f38.w0 54 /r] AVX512BITALG
|
|
VPOPCNTW xmmreg|mask|z,xmmrm128 [rm:fvm: evex.128.66.0f38.w1 54 /r] AVX512BITALG,AVX512VL
|
|
VPOPCNTW ymmreg|mask|z,ymmrm256 [rm:fvm: evex.256.66.0f38.w1 54 /r] AVX512BITALG,AVX512VL
|
|
VPOPCNTW zmmreg|mask|z,zmmrm512 [rm:fvm: evex.512.66.0f38.w1 54 /r] AVX512BITALG
|
|
VPOPCNTD xmmreg|mask|z,xmmrm128 [rm:fv: evex.128.66.0f38.w0 55 /r] AVX512VPOPCNTDQ,AVX512VL
|
|
VPOPCNTD ymmreg|mask|z,ymmrm256 [rm:fv: evex.256.66.0f38.w0 55 /r] AVX512VPOPCNTDQ,AVX512VL
|
|
VPOPCNTD zmmreg|mask|z,zmmrm512 [rm:fv: evex.512.66.0f38.w0 55 /r] AVX512VPOPCNTDQ
|
|
VPOPCNTQ xmmreg|mask|z,xmmrm128 [rm:fv: evex.128.66.0f38.w1 55 /r] AVX512VPOPCNTDQ,AVX512VL
|
|
VPOPCNTQ ymmreg|mask|z,ymmrm256 [rm:fv: evex.256.66.0f38.w1 55 /r] AVX512VPOPCNTDQ,AVX512VL
|
|
VPOPCNTQ zmmreg|mask|z,zmmrm512 [rm:fv: evex.512.66.0f38.w1 55 /r] AVX512VPOPCNTDQ
|
|
VPSHUFBITQMB kreg|mask,xmmreg,xmmrm128 [rvm:fvm: evex.nds.128.66.0f38.w0 8f /r] AVX512BITALG,AVX512VL
|
|
VPSHUFBITQMB kreg|mask,ymmreg,ymmrm256 [rvm:fvm: evex.nds.256.66.0f38.w0 8f /r] AVX512BITALG,AVX512VL
|
|
VPSHUFBITQMB kreg|mask,zmmreg,zmmrm512 [rvm:fvm: evex.nds.512.66.0f38.w0 8f /r] AVX512BITALG
|
|
|
|
;# AVX512 4-iteration Multiply-Add
|
|
V4FMADDPS zmmreg|mask|z,zmmreg|rs4,mem [rvm:m128:evex.dds.512.f2.0f38.w0 9a /r] AVX5124FMAPS,SO
|
|
V4FNMADDPS zmmreg|mask|z,zmmreg|rs4,mem [rvm:m128:evex.dds.512.f2.0f38.w0 aa /r] AVX5124FMAPS,SO
|
|
V4FMADDSS zmmreg|mask|z,zmmreg|rs4,mem [rvm:m128:evex.dds.lig.f2.0f38.w0 9b /r] AVX5124FMAPS,SO
|
|
V4FNMADDSS zmmreg|mask|z,zmmreg|rs4,mem [rvm:m128:evex.dds.lig.f2.0f38.w0 ab /r] AVX5124FMAPS,SO
|
|
|
|
;# AVX512 4-iteration Dot Product
|
|
V4DPWSSDS zmmreg|mask|z,zmmreg|rs4,mem [rvm:m128:evex.dds.512.f2.0f38.w0 53 /r] AVX5124VNNIW,SO
|
|
V4DPWSSD zmmreg|mask|z,zmmreg|rs4,mem [rvm:m128:evex.dds.512.f2.0f38.w0 52 /r] AVX5124VNNIW,SO
|
|
|
|
;# Intel Software Guard Extensions (SGX)
|
|
ENCLS void [ np 0f 01 cf] SGX
|
|
ENCLU void [ np 0f 01 d7] SGX
|
|
ENCLV void [ np 0f 01 c0] SGX
|
|
|
|
;# Intel Control-Flow Enforcement Technology (CET)
|
|
CLRSSBSY mem64 [m: f3 0f ae /6] CET
|
|
ENDBR32 void [ f3 0f 1e fb] CET
|
|
ENDBR64 void [ f3 0f 1e fa] CET
|
|
INCSSPD reg32 [m: o32 f3 0f ae /5] CET
|
|
INCSSPQ reg64 [m: o64 f3 0f ae /5] CET,LONG
|
|
RDSSPD reg32 [m: o32 f3 0f 1e /1] CET
|
|
RDSSPQ reg64 [m: o64 f3 0f 1e /1] CET,LONG
|
|
RSTORSSP mem64 [m: f3 0f 01 /5] CET
|
|
SAVEPREVSSP void [ f3 0f 01 ea] CET
|
|
SETSSBSY void [ f3 0f 01 e8] CET
|
|
WRUSSD mem32,reg32 [mr: o32 66 0f38 f5 /r] CET
|
|
WRUSSQ mem64,reg64 [mr: o64 66 0f38 f5 /r] CET,LONG
|
|
WRSSD mem32,reg32 [mr: o32 0f38 f6 /r] CET
|
|
WRSSQ mem64,reg64 [mr: o64 0f38 f6 /r] CET,LONG
|
|
|
|
;# Instructions from ISE doc 319433-040, June 2020
|
|
ENQCMD reg16,mem512 [rm: a16 f2 0f38 f8 /r] ENQCMD,SZ,NOLONG
|
|
ENQCMD reg32,mem512 [rm: a16 f2 0f38 f8 /r] ENQCMD,SZ,NOLONG,ND
|
|
ENQCMD reg32,mem512 [rm: a32 f2 0f38 f8 /r] ENQCMD,SZ
|
|
ENQCMD reg64,mem512 [rm: a64 f2 0f38 f8 /r] ENQCMD,SZ,LONG
|
|
ENQCMDS reg16,mem512 [rm: a16 f3 0f38 f8 /r] ENQCMD,SZ,NOLONG,PRIV
|
|
ENQCMDS reg32,mem512 [rm: a16 f3 0f38 f8 /r] ENQCMD,SZ,NOLONG,PRIV,ND
|
|
ENQCMDS reg32,mem512 [rm: a32 f3 0f38 f8 /r] ENQCMD,SZ,PRIV
|
|
ENQCMDS reg64,mem512 [rm: a64 f3 0f38 f8 /r] ENQCMD,SZ,PRIV,LONG
|
|
PCONFIG void [ np 0f 01 c5] PCONFIG,PRIV
|
|
XRESLDTRK void [ f2 0f 01 e9] TSXLDTRK
|
|
XSUSLDTRK void [ f2 0f 01 e8] TSXLDTRK
|
|
|
|
;# AVX512 Bfloat16 instructions
|
|
VCVTNE2PS2BF16 xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.128.f2.0f38.w0 72 /r] AVX512BF16
|
|
VCVTNE2PS2BF16 ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.256.f2.0f38.w0 72 /r] AVX512BF16
|
|
VCVTNE2PS2BF16 zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.512.f2.0f38.w0 72 /r] AVX512BF16
|
|
VCVTNEPS2BF16 xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.128.f3.0f38.w0 72 /r] AVX512BF16
|
|
VCVTNEPS2BF16 ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.256.f3.0f38.w0 72 /r] AVX512BF16
|
|
VCVTNEPS2BF16 zmmreg|mask|z,zmmreg*,zmmrm512|b32 [rvm:fv: evex.512.f3.0f38.w0 72 /r] AVX512BF16
|
|
VDPBF16PS xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.128.f3.0f38.w0 52 /r] AVX512BF16
|
|
VDPBF16PS ymmreg|mask|z,ymmreg*,ymmrm128|b32 [rvm:fv: evex.256.f3.0f38.w0 52 /r] AVX512BF16
|
|
VDPBF16PS zmmreg|mask|z,zmmreg*,zmmrm128|b32 [rvm:fv: evex.512.f3.0f38.w0 52 /r] AVX512BF16
|
|
|
|
;# AVX512 mask intersect instructions
|
|
VP2INTERSECTD kreg|rs2,xmmreg,xmmrm128|b32 [rvm:fv: evex.nds.128.f2.0f38.w0 68 /r] AVX512BF16
|
|
VP2INTERSECTD kreg|rs2,ymmreg,ymmrm128|b32 [rvm:fv: evex.nds.256.f2.0f38.w0 68 /r] AVX512BF16
|
|
VP2INTERSECTD kreg|rs2,zmmreg,zmmrm128|b32 [rvm:fv: evex.nds.512.f2.0f38.w0 68 /r] AVX512BF16
|
|
|
|
;# Intel Advanced Matrix Extensions (AMX)
|
|
LDTILECFG mem512 [m: vex.128.np.0f38.w0 49 /0] AMXTILE,SZ,LONG
|
|
STTILECFG mem512 [m: vex.128.66.0f38.w0 49 /0] AMXTILE,SZ,LONG
|
|
TDPBF16PS tmmreg,tmmreg,tmmreg [rmv: vex.128.f3.0f38.w0 5c /r] AMXBF16,LONG
|
|
TDPBSSD tmmreg,tmmreg,tmmreg [rmv: vex.128.f2.0f38.w0 5e /r] AMXINT8,LONG
|
|
TDPBSUD tmmreg,tmmreg,tmmreg [rmv: vex.128.f3.0f38.w0 5e /r] AMXINT8,LONG
|
|
TDPBUSD tmmreg,tmmreg,tmmreg [rmv: vex.128.66.0f38.w0 5e /r] AMXINT8,LONG
|
|
TDPBUUD tmmreg,tmmreg,tmmreg [rmv: vex.128.np.0f38.w0 5e /r] AMXINT8,LONG
|
|
TILELOADD tmmreg,mem [rm: vex.128.f2.0f38.w0 4b /r] AMXTILE,MIB,SIB,ANYSIZE,AR1,LONG
|
|
TILELOADDT1 tmmreg,mem [rm: vex.128.66.0f38.w0 4b /r] AMXTILE,MIB,SIB,ANYSIZE,AR1,LONG
|
|
TILERELEASE void [ vex.128.np.0f38.w0 49 c0] AMXTILE,LONG
|
|
TILESTORED mem,tmmreg [mr: vex.128.f3.0f38.w0 4b /r] AMXTILE,MIB,SIB,ANYSIZE,AR0,LONG
|
|
TILEZERO tmmreg [r: vex.128.f2.0f38.w0 49 /3r0] AMXTILE,LONG
|
|
|
|
;# Intel AVX512-FP16 instructions
|
|
VADDPH xmmreg|mask|z,xmmreg*,xmmrm16|b16 [rvm:fv: evex.nds.128.np.map5.w0 58 /r] AVX512FP16,AVX512VL
|
|
VADDPH ymmreg|mask|z,ymmreg*,ymmrm16|b16 [rvm:fv: evex.nds.256.np.map5.w0 58 /r] AVX512FP16,AVX512VL
|
|
VADDPH zmmreg|mask|z,zmmreg*,zmmrm16|b16|er [rvm:fv: evex.nds.512.np.map5.w0 58 /r] AVX512FP16
|
|
VADDSH xmmreg|mask|z,xmmreg*,xmmrm16|er [rvm:t1s: evex.nds.lig.f3.map5.w0 58 /r] AVX512FP16
|
|
VCMPPH kreg|mask,xmmreg*,xmmrm16|b16,imm8 [rvmi:fv: evex.nds.128.np.0f3a.w0 C2 /r ib] AVX512FP16,AVX512VL
|
|
VCMPPH kreg|mask,ymmreg*,ymmrm16|b16,imm8 [rvmi:fv: evex.nds.256.np.0f3a.w0 C2 /r ib] AVX512FP16,AVX512VL
|
|
VCMPPH kreg|mask,zmmreg*,zmmrm16|b16|sae,imm8 [rvmi:fv: evex.nds.512.np.0f3a.w0 C2 /r ib] AVX512FP16
|
|
VCMPSH kreg|mask,xmmreg*,xmmrm16|sae,imm8 [rvmi:t1s: evex.nds.lig.f3.0f3a.w0 C2 /r ib] AVX512FP16
|
|
VCOMISH xmmreg,xmmrm16|sae [rm:fv: evex.lig.np.map5.w0 2F /r] AVX512FP16
|
|
VCVTDQ2PH xmmreg|mask|z,xmmrm128|b32 [rm:fv: evex.128.np.map5.w0 5B /r] AVX512FP16,AVX512VL
|
|
VCVTDQ2PH ymmreg|mask|z,ymmrm256|b32 [rm:fv: evex.256.np.map5.w0 5B /r] AVX512FP16,AVX512VL
|
|
VCVTDQ2PH zmmreg|mask|z,zmmrm512|b32|er [rm:fv: evex.512.np.map5.w0 5B /r] AVX512FP16
|
|
VCVTPD2PH xmmreg|mask|z,xmmrm128|b64 [rm:fv: evex.128.66.map5.w1 5A /r] AVX512FP16,AVX512VL
|
|
VCVTPD2PH ymmreg|mask|z,ymmrm256|b64 [rm:fv: evex.256.66.map5.w1 5A /r] AVX512FP16,AVX512VL
|
|
VCVTPD2PH zmmreg|mask|z,zmmrm512|b64|er [rm:fv: evex.512.66.map5.w1 5A /r] AVX512FP16
|
|
VCVTPH2DQ xmmreg|mask|z,xmmrm64|b16 [rm:hv: evex.128.66.map5.w0 5B /r] AVX512FP16,AVX512VL
|
|
VCVTPH2DQ ymmreg|mask|z,xmmrm128|b16 [rm:hv: evex.256.66.map5.w0 5B /r] AVX512FP16,AVX512VL
|
|
VCVTPH2DQ zmmreg|mask|z,ymmrm256|b16|er [rm:hv: evex.512.66.map5.w0 5B /r] AVX512FP16
|
|
VCVTPH2PD xmmreg|mask|z,xmmrm32|b16 [rm:qvm: evex.128.np.map5.w0 5A /r] AVX512FP16,AVX512VL
|
|
VCVTPH2PD ymmreg|mask|z,xmmrm64|b16 [rm:qvm: evex.256.np.map5.w0 5A /r] AVX512FP16,AVX512VL
|
|
VCVTPH2PD zmmreg|mask|z,xmmrm128|b16|sae [rm:qvm: evex.512.np.map5.w0 5A /r] AVX512FP16
|
|
VCVTPH2PS xmmreg,xmmrm64 [rm: vex.128.66.0f38.w0 13 /r] AVX512FC16
|
|
VCVTPH2PS ymmreg,xmmrm128 [rm: vex.256.66.0f38.w0 13 /r] AVX512FC16
|
|
VCVTPH2PS xmmreg|mask|z,xmmrm64 [rm:hvm:evex.128.66.0f38.w0 13 /r] AVX512,AVX512VL
|
|
VCVTPH2PS ymmreg|mask|z,xmmrm128 [rm:hvm:evex.256.66.0f38.w0 13 /r] AVX512,AVX512VL
|
|
VCVTPH2PS zmmreg|mask|z,ymmrm256|sae [rm:hvm:evex.512.66.0f38.w0 13 /r] AVX512
|
|
VCVTPH2PSX xmmreg|mask|z,xmmrm64|b16 [rm:hv: evex.128.66.map6.w0 13 /r] AVX512FP16,AVX512VL
|
|
VCVTPH2PSX ymmreg|mask|z,xmmrm128|b16 [rm:hv: evex.256.66.map6.w0 13 /r] AVX512FP16,AVX512VL
|
|
VCVTPH2PSX zmmreg|mask|z,ymmrm256|b16|sae [rm:hv: evex.512.66.map6.w0 13 /r] AVX512FP16
|
|
VCVTPH2QQ xmmreg|mask|z,xmmrm32|b16 [rm:qvm:evex.128.66.map5.w0 7b /r] AVX512FP16,AVX512VL
|
|
VCVTPH2QQ ymmreg|mask|z,xmmrm64|b16 [rm:qvm:evex.256.66.map5.w0 7b /r] AVX512FP16,AVX512VL
|
|
VCVTPH2QQ zmmreg|mask|z,xmmrm128|b16|er [rm:qvm:evex.512.66.map5.w0 7b /r] AVX512FP16
|
|
VCVTPH2UDQ xmmreg|mask|z,xmmrm32|b16 [rm:hv: evex.128.map5.w0 79 /r] AVX512FP16,AVX512VL
|
|
VCVTPH2UDQ ymmreg|mask|z,xmmrm64|b16 [rm:hv: evex.256.map5.w0 79 /r] AVX512FP16,AVX512VL
|
|
VCVTPH2UDQ zmmreg|mask|z,xmmrm128|b16|er [rm:hv: evex.512.map5.w0 79 /r] AVX512FP16
|
|
VCVTPH2UQQ xmmreg|mask|z,xmmrm32|b16 [rm:qvm:evex.128.66.map5.w0 79 /r] AVX512FP16,AVX512VL
|
|
VCVTPH2UQQ ymmreg|mask|z,xmmrm64|b16 [rm:qvm:evex.256.66.map5.w0 79 /r] AVX512FP16,AVX512VL
|
|
VCVTPH2UQQ zmmreg|mask|z,xmmrm128|b16|er [rm:qvm:evex.512.66.map5.w0 79 /r] AVX512FP16
|
|
VCVTPH2UW xmmreg|mask|z,xmmrm128|b16 [rm:fv: evex.128.np.map5.w0 7d /r] AVX512FP16,AVX512VL
|
|
VCVTPH2UW ymmreg|mask|z,ymmrm256|b16 [rm:fv: evex.256.np.map5.w0 7d /r] AVX512FP16,AVX512VL
|
|
VCVTPH2UW zmmreg|mask|z,zmmrm512|b16|er [rm:fv: evex.512.np.map5.w0 7d /r] AVX512FP16
|
|
VCVTPH2W xmmreg|mask|z,xmmrm128|b16 [rm:fv: evex.128.66.map5.w0 7d /r] AVX512FP16,AVX512VL
|
|
VCVTPH2W ymmreg|mask|z,ymmrm256|b16 [rm:fv: evex.256.66.map5.w0 7d /r] AVX512FP16,AVX512VL
|
|
VCVTPH2W zmmreg|mask|z,zmmrm512|b16|er [rm:fv: evex.512.66.map5.w0 7d /r] AVX512FP16
|
|
VCVTPS2PH xmmrm64,xmmreg,imm8 [mri: vex.128.66.0f3a.w0 1d /r ib] AVX512FC16,AVX512VL
|
|
VCVTPS2PH xmmrm128,ymmreg,imm8 [mri: vex.256.66.0f3a.w0 1d /r ib] AVX512FC16,AVX512VL
|
|
VCVTPS2PH xmmreg|mask|z,xmmreg,imm8 [mri:hvm: evex.128.66.0f3a.w0 1d /r ib] AVX512,AVX512VL
|
|
VCVTPS2PH mem64|mask,xmmreg,imm8 [mri:hvm: evex.128.66.0f3a.w0 1d /r ib] AVX512,AVX512VL
|
|
VCVTPS2PH xmmreg|mask|z,ymmreg,imm8 [mri:hvm: evex.256.66.0f3a.w0 1d /r ib] AVX512,AVX512VL
|
|
VCVTPS2PH mem128|mask,ymmreg,imm8 [mri:hvm: evex.256.66.0f3a.w0 1d /r ib] AVX512,AVX512VL
|
|
VCVTPS2PH ymmreg|mask|z,zmmreg|sae,imm8 [mri:hvm: evex.512.66.0f3a.w0 1d /r ib] AVX512
|
|
VCVTPS2PH mem256|mask,zmmreg|sae,imm8 [mri:hvm: evex.512.66.0f3a.w0 1d /r ib] AVX512
|
|
VCVTPS2PH xmmreg|mask|z,xmmrm128|b32 [rm:fv: evex.128.66.map5.w0 1d /r] AVX512FP16,AVX512VL
|
|
VCVTPS2PH ymmreg|mask|z,ymmrm256|b32 [rm:fv: evex.256.66.map5.w0 1d /r] AVX512FP16,AVX512VL
|
|
VCVTPS2PH zmmreg|mask|z,zmmrm512|b32|er [rm:fv: evex.512.66.map5.w0 1d /r] AVX512FP16
|
|
VCVTQQ2PH xmmreg|mask|z,xmmrm128|b64 [rm:fv: evex.128.np.map5.w1 5b /r] AVX512FP16,AVX512VL
|
|
VCVTQQ2PH ymmreg|mask|z,ymmrm256|b64 [rm:fv: evex.256.np.map5.w1 5b /r] AVX512FP16,AVX512VL
|
|
VCVTQQ2PH zmmreg|mask|z,zmmrm512|b64|er [rm:fv: evex.512.np.map5.w1 5b /r] AVX512FP16,AVX512VL
|
|
VCVTSD2SH xmmreg|mask|z,xmmreg*,xmmrm64|er [rvm:t1s: evex.nds.lig.f2.map5.w1 5a /r] AVX512FP16
|
|
VCVTSH2SD xmmreg,xmmreg*,xmmrm16|sae [rvm:t1s: evex.nds.lig.f3.map5.w0 5a /r] AVX512FP16
|
|
VCVTSH2SI reg32,xmmrm16|er [rm:t1s:evex.lig.f3.map5.w0 2d /r] AVX512FP16
|
|
VCVTSH2SI reg64,xmmrm16|er [rm:t1s:evex.lig.f3.map5.w1 2d /r] AVX512FP16
|
|
VCVTSH2SS xmmreg|mask|z,xmmreg*,xmmrm16|sae [rvm:t1s: evex.nds.lig.map6.w0 13 /r] AVX512FP16
|
|
VCVTSH2USI reg32,xmmrm16|er [rm:t1s:evex.lig.f3.map5.w0 79 /r] AVX512FP16
|
|
VCVTSH2USI reg64,xmmrm16|er [rm:t1s:evex.lig.f3.map5.w1 79 /r] AVX512FP16
|
|
VCVTSI2SH xmmreg,xmmreg*,rm32|er [rvm:t1s: evex.nds.lig.f3.map5.w0 2a /r] AVX512FP16
|
|
VCVTSI2SH xmmreg,xmmreg*,rm64|er [rvm:t1s: evex.nds.lig.f3.map5.w1 2a /r] AVX512FP16
|
|
VCVTSS2SH xmmreg,xmmreg*,xmmrm32|er [rvm:t1s: evex.nds.lig.np.map5 1d /r] AVX512FP16
|
|
VCVTTPH2DQ xmmreg|mask|z,xmmrm64|b16 [rm:hv: evex.128.f3.map5.w0 5b /r] AVX512FP16,AVX512VL
|
|
VCVTTPH2DQ ymmreg|mask|z,xmmrm128|b16 [rm:hv: evex.256.f3.map5.w0 5b /r] AVX512FP16,AVX512VL
|
|
VCVTTPH2DQ zmmreg|mask|z,ymmrm256|b16|sae [rm:hv: evex.512.f3.map5.w0 5b /r] AVX512FP16
|
|
VCVTTPH2QQ xmmreg|mask|z,xmmrm32|b16 [rm:t1s:evex.128.66.map5.w0 7a /r] AVX512FP16,AVX512VL
|
|
VCVTTPH2QQ ymmreg|mask|z,xmmrm64|b16 [rm:t1s:evex.256.66.map5.w0 7a /r] AVX512FP16,AVX512VL
|
|
VCVTTPH2QQ zmmreg|mask|z,xmmrm128|b16|sae [rm:t1s:evex.512.66.map5.w0 7a /r] AVX512FP16
|
|
VCVTTPH2UDQ xmmreg|mask|z,xmmrm64|b16 [rm:hv: evex.128.np.map5.w0 78 /r] AVX512FP16,AVX512VL
|
|
VCVTTPH2UDQ ymmreg|mask|z,xmmrm128|b16 [rm:hv: evex.256.np.map5.w0 78 /r] AVX512FP16,AVX512VL
|
|
VCVTTPH2UDQ zmmreg|mask|z,ymmrm256|b16|sae [rm:hv: evex.512.np.map5.w0 78 /r] AVX512FP16
|
|
VCVTTPH2UQQ xmmreg|mask|z,xmmrm32|b16 [rm:t1s: evex.128.66.map5.w0 78 /r] AVX512FP16,AVX512VL
|
|
VCVTTPH2UQQ ymmreg|mask|z,xmmrm64|b16 [rm:t1s: evex.256.66.map5.w0 78 /r] AVX512FP16,AVX512VL
|
|
VCVTTPH2UQQ zmmreg|mask|z,xmmrm128|b16|sae [rm:t1s: evex.512.66.map5.w0 78 /r] AVX512FP16
|
|
VCVTTPH2UW xmmreg|mask|z,xmmrm128|b16 [rm:fv: evex.128.np.map5.w0 7c /r] AVX512FP16,AVX512VL
|
|
VCVTTPH2UW ymmreg|mask|z,ymmrm256|b16 [rm:fv: evex.256.np.map5.w0 7c /r] AVX512FP16,AVX512VL
|
|
VCVTTPH2UW zmmreg|mask|z,zmmrm512|b16|sae [rm:fv: evex.512.np.map5.w0 7c /r] AVX512FP16
|
|
VCVTTPH2W xmmreg|mask|z,xmmrm128|b16 [rm:fv: evex.128.66.map5.w0 7c /r] AVX512FP16,AVX512VL
|
|
VCVTTPH2W ymmreg|mask|z,ymmrm256|b16 [rm:fv: evex.256.66.map5.w0 7c /r] AVX512FP16,AVX512VL
|
|
VCVTTPH2W zmmreg|mask|z,zmmrm512|b16|sae [rm:fv: evex.512.66.map5.w0 7c /r] AVX512FP16
|
|
VCVTTSH2SI reg32,xmmrm16|sae [rm:t1s:evex.lig.f3.map5.w0 2c /r] AVX512FP16
|
|
VCVTTSH2SI reg64,xmmrm16|sae [rm:t1s:evex.lig.f3.map5.w1 2c /r] AVX512FP16
|
|
VCVTTSH2USI reg32,xmmrm16|sae [rm:t1s:evex.lig.f3.map5.w0 78 /r] AVX512FP16
|
|
VCVTTSH2USI reg64,xmmrm16|sae [rm:t1s:evex.lig.f3.map5.w1 78 /r] AVX512FP16
|
|
VCVTUDQ2PH xmmreg|mask|z,xmmrm128|b32 [rm:fv: evex.128.f2.map5.w0 7a /r] AVX512FP16,AVX512VL
|
|
VCVTUDQ2PH ymmreg|mask|z,ymmrm256|b32 [rm:fv: evex.256.f2.map5.w0 7a /r] AVX512FP16,AVX512VL
|
|
VCVTUDQ2PH zmmreg|mask|z,zmmrm512|b32 [rm:fv: evex.512.f2.map5.w0 7a /r] AVX512FP16
|
|
VCVTUQQ2PH xmmreg|mask|z,xmmrm128|b32 [rm:fv: evex.128.f2.map5.w1 7a /r] AVX512FP16,AVX512VL
|
|
VCVTUQQ2PH ymmreg|mask|z,ymmrm256|b32 [rm:fv: evex.256.f2.map5.w1 7a /r] AVX512FP16,AVX512VL
|
|
VCVTUQQ2PH zmmreg|mask|z,zmmrm512|b32 [rm:fv: evex.512.f2.map5.w1 7a /r] AVX512FP16
|
|
VCVTUSI2SH xmmreg,xmmreg|er,rm32|er [rvm:t1s: evex.nds.lig.f3.map5.w0 7b /r] AVX512FP16
|
|
VCVTUSI2SS xmmreg,xmmreg|er,rm64|er [rvm:t1s: evex.nds.lig.f3.map5.w1 7b /r] AVX512FP16
|
|
VCVTUW2PH xmmreg|mask|z,xmmrm128|b16 [rm:fv: evex.128.f2.map5.w0 7d /r] AVX512FP16,AVX512VL
|
|
VCVTUW2PH ymmreg|mask|z,ymmrm256|b16 [rm:fv: evex.256.f2.map5.w0 7d /r] AVX512FP16,AVX512VL
|
|
VCVTUW2PH zmmreg|mask|z,zmmrm512|b16|er [rm:fv: evex.512.f2.map5.w0 7d /r] AVX512FP16
|
|
VCVTW2PH xmmreg|mask|z,xmmrm128|b16 [rm:fv: evex.128.f3.map5.w0 7d /r] AVX512FP16,AVX512VL
|
|
VCVTW2PH ymmreg|mask|z,ymmrm256|b16 [rm:fv: evex.256.f3.map5.w0 7d /r] AVX512FP16,AVX512VL
|
|
VCVTW2PH zmmreg|mask|z,zmmrm512|b16|er [rm:fv: evex.512.f3.map5.w0 7d /r] AVX512FP16
|
|
VDIVPH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.np.map5 5e /r] AVX512FP16,AVX512VL
|
|
VDIVPH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.np.map5 5e /r] AVX512FP16,AVX512VL
|
|
VDIVPH zmmreg|mask|z,zmmreg*,zmmrm512|b16|er [rvm:fv: evex.nds.512.np.map5 5e /r] AVX512FP16
|
|
VDIVSH xmmreg|mask|z,xmmreg*,xmmrm16|er [rvm:t1s: evex.nds.lig.f3.map5 5e /r] AVX512FP16
|
|
VFCMADDCPH xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.f2.map6.w0 56 /r] AVX512FP16,AVX512VL
|
|
VFCMADDCPH ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.f2.map6.w0 56 /r] AVX512FP16,AVX512VL
|
|
VFCMADDCPH zmmreg|mask|z,zmmreg*,zmmrm512|b32|er [rvm:fv: evex.nds.512.f2.map6.w0 56 /r] AVX512FP16,AVX512VL
|
|
VFMADDCPH xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.f3.map6.w0 56 /r] AVX512FP16,AVX512VL
|
|
VFMADDCPH ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.f3.map6.w0 56 /r] AVX512FP16,AVX512VL
|
|
VFMADDCPH zmmreg|mask|z,zmmreg*,zmmrm512|b32|er [rvm:fv: evex.nds.512.f3.map6.w0 56 /r] AVX512FP16,AVX512VL
|
|
VFCMADDCSH xmmreg|mask|z,xmmreg*,xmmrm32|er [rvm:t1s: evex.nds.lig.f2.map6.w0 57 /r] AVX512FP16
|
|
VFMADDCSH xmmreg|mask|z,xmmreg*,xmmrm32|er [rvm:t1s: evex.nds.lig.f3.map6.w0 57 /r] AVX512FP16
|
|
VFCMULCPCH xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.f2.map6.w0 d6 /r] AVX512FP16,AVX512VL
|
|
VFCMULCPCH ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.f2.map6.w0 d6 /r] AVX512FP16,AVX512VL
|
|
VFCMULCPCH zmmreg|mask|z,zmmreg*,zmmrm512|b32|er [rvm:fv: evex.nds.512.f2.map6.w0 d6 /r] AVX512FP16,AVX512VL
|
|
VFMULCPCH xmmreg|mask|z,xmmreg*,xmmrm128|b32 [rvm:fv: evex.nds.128.f3.map6.w0 d6 /r] AVX512FP16,AVX512VL
|
|
VFMULCPCH ymmreg|mask|z,ymmreg*,ymmrm256|b32 [rvm:fv: evex.nds.256.f3.map6.w0 d6 /r] AVX512FP16,AVX512VL
|
|
VFMULCPCH zmmreg|mask|z,zmmreg*,zmmrm512|b32|er [rvm:fv: evex.nds.512.f3.map6.w0 d6 /r] AVX512FP16,AVX512VL
|
|
VFCMULCSH xmmreg|mask|z,xmmreg*,xmmrm32|er [rvm:t1s: evex.nds.lig.f2.map6.w0 d7 /r] AVX512FP16
|
|
VFMULCSH xmmreg|mask|z,xmmreg*,xmmrm32|er [rvm:t1s: evex.nds.lig.f3.map6.w0 d7 /r] AVX512FP16
|
|
VFMADDSUB132PH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.66.map6.w0 96 /r] AVX512FP16,AVX512VL
|
|
VFMADDSUB132PH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.66.map6.w0 96 /r] AVX512FP16,AVX512VL
|
|
VFMADDSUB132PH zmmreg|mask|z,zmmreg*,zmmrm512|b16|er [rvm:fv: evex.nds.512.66.map6.w0 96 /r] AVX512FP16
|
|
VFMADDSUB213PH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.66.map6.w0 a6 /r] AVX512FP16,AVX512VL
|
|
VFMADDSUB213PH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.66.map6.w0 a6 /r] AVX512FP16,AVX512VL
|
|
VFMADDSUB213PH zmmreg|mask|z,zmmreg*,zmmrm512|b16|er [rvm:fv: evex.nds.512.66.map6.w0 a6 /r] AVX512FP16
|
|
VFMADDSUB231PH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.66.map6.w0 b6 /r] AVX512FP16,AVX512VL
|
|
VFMADDSUB231PH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.66.map6.w0 b6 /r] AVX512FP16,AVX512VL
|
|
VFMADDSUB231PH zmmreg|mask|z,zmmreg*,zmmrm512|b16|er [rvm:fv: evex.nds.512.66.map6.w0 b6 /r] AVX512FP16
|
|
VFMSUBADD132PH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.66.map6.w0 97 /r] AVX512FP16,AVX512VL
|
|
VFMSUBADD132PH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.66.map6.w0 97 /r] AVX512FP16,AVX512VL
|
|
VFMSUBADD132PH zmmreg|mask|z,zmmreg*,zmmrm512|b16|er [rvm:fv: evex.nds.512.66.map6.w0 97 /r] AVX512FP16
|
|
VFMSUBADD213PH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.66.map6.w0 a7 /r] AVX512FP16,AVX512VL
|
|
VFMSUBADD213PH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.66.map6.w0 a7 /r] AVX512FP16,AVX512VL
|
|
VFMSUBADD213PH zmmreg|mask|z,zmmreg*,zmmrm512|b16|er [rvm:fv: evex.nds.512.66.map6.w0 a7 /r] AVX512FP16
|
|
VFMSUBADD231PH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.66.map6.w0 b7 /r] AVX512FP16,AVX512VL
|
|
VFMSUBADD231PH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.66.map6.w0 b7 /r] AVX512FP16,AVX512VL
|
|
VFMSUBADD231PH zmmreg|mask|z,zmmreg*,zmmrm512|b16|er [rvm:fv: evex.nds.512.66.map6.w0 b7 /r] AVX512FP16
|
|
VPMADD132PH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.66.map6.w0 98 /r] AVX512FP16,AVX512VL
|
|
VPMADD132PH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.66.map6.w0 98 /r] AVX512FP16,AVX512VL
|
|
VPMADD132PH zmmreg|mask|z,zmmreg*,zmmrm512|b16|er [rvm:fv: evex.nds.512.66.map6.w0 98 /r] AVX512FP16
|
|
VPMADD213PH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.66.map6.w0 a8 /r] AVX512FP16,AVX512VL
|
|
VPMADD213PH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.66.map6.w0 a8 /r] AVX512FP16,AVX512VL
|
|
VPMADD213PH zmmreg|mask|z,zmmreg*,zmmrm512|b16|er [rvm:fv: evex.nds.512.66.map6.w0 a8 /r] AVX512FP16
|
|
VPMADD231PH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.66.map6.w0 b8 /r] AVX512FP16,AVX512VL
|
|
VPMADD231PH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.66.map6.w0 b8 /r] AVX512FP16,AVX512VL
|
|
VPMADD231PH zmmreg|mask|z,zmmreg*,zmmrm512|b16|er [rvm:fv: evex.nds.512.66.map6.w0 b8 /r] AVX512FP16
|
|
VFMADD132PH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.66.map6.w0 9c /r] AVX512FP16,AVX512VL
|
|
VFMADD132PH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.66.map6.w0 9c /r] AVX512FP16,AVX512VL
|
|
VFMADD132PH zmmreg|mask|z,zmmreg*,zmmrm512|b16|er [rvm:fv: evex.nds.512.66.map6.w0 9c /r] AVX512FP16
|
|
VFMADD213PH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.66.map6.w0 ac /r] AVX512FP16,AVX512VL
|
|
VFMADD213PH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.66.map6.w0 ac /r] AVX512FP16,AVX512VL
|
|
VFMADD213PH zmmreg|mask|z,zmmreg*,zmmrm512|b16|er [rvm:fv: evex.nds.512.66.map6.w0 ac /r] AVX512FP16
|
|
VFMADD231PH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.66.map6.w0 bc /r] AVX512FP16,AVX512VL
|
|
VFMADD231PH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.66.map6.w0 bc /r] AVX512FP16,AVX512VL
|
|
VFMADD231PH zmmreg|mask|z,zmmreg*,zmmrm512|b16|er [rvm:fv: evex.nds.512.66.map6.w0 bc /r] AVX512FP16
|
|
VPMADD132SH xmmreg|mask|z,xmmreg*,xmmrm16|er [rvm:t1s: evex.nds.lig.66.map6.w0 99 /r] AVX512FP16
|
|
VPMADD213SH xmmreg|mask|z,xmmreg*,xmmrm16|er [rvm:t1s: evex.nds.lig.66.map6.w0 a9 /r] AVX512FP16
|
|
VPMADD231SH xmmreg|mask|z,xmmreg*,xmmrm16|er [rvm:t1s: evex.nds.lig.66.map6.w0 b9 /r] AVX512FP16
|
|
VPNMADD132SH xmmreg|mask|z,xmmreg*,xmmrm16|er [rvm:t1s: evex.nds.lig.66.map6.w0 9d /r] AVX512FP16
|
|
VPNMADD213SH xmmreg|mask|z,xmmreg*,xmmrm16|er [rvm:t1s: evex.nds.lig.66.map6.w0 ad /r] AVX512FP16
|
|
VPNMADD231SH xmmreg|mask|z,xmmreg*,xmmrm16|er [rvm:t1s: evex.nds.lig.66.map6.w0 bd /r] AVX512FP16
|
|
VPMSUB132PH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.66.map6.w0 9a /r] AVX512FP16,AVX512VL
|
|
VPMSUB132PH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.66.map6.w0 9a /r] AVX512FP16,AVX512VL
|
|
VPMSUB132PH zmmreg|mask|z,zmmreg*,zmmrm512|b16|er [rvm:fv: evex.nds.512.66.map6.w0 9a /r] AVX512FP16
|
|
VPMSUB213PH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.66.map6.w0 aa /r] AVX512FP16,AVX512VL
|
|
VPMSUB213PH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.66.map6.w0 aa /r] AVX512FP16,AVX512VL
|
|
VPMSUB213PH zmmreg|mask|z,zmmreg*,zmmrm512|b16|er [rvm:fv: evex.nds.512.66.map6.w0 aa /r] AVX512FP16
|
|
VPMSUB231PH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.66.map6.w0 ba /r] AVX512FP16,AVX512VL
|
|
VPMSUB231PH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.66.map6.w0 ba /r] AVX512FP16,AVX512VL
|
|
VPMSUB231PH zmmreg|mask|z,zmmreg*,zmmrm512|b16|er [rvm:fv: evex.nds.512.66.map6.w0 ba /r] AVX512FP16
|
|
VFMSUB132PH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.66.map6.w0 9e /r] AVX512FP16,AVX512VL
|
|
VFMSUB132PH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.66.map6.w0 9e /r] AVX512FP16,AVX512VL
|
|
VFMSUB132PH zmmreg|mask|z,zmmreg*,zmmrm512|b16|er [rvm:fv: evex.nds.512.66.map6.w0 9e /r] AVX512FP16
|
|
VFMSUB213PH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.66.map6.w0 ae /r] AVX512FP16,AVX512VL
|
|
VFMSUB213PH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.66.map6.w0 ae /r] AVX512FP16,AVX512VL
|
|
VFMSUB213PH zmmreg|mask|z,zmmreg*,zmmrm512|b16|er [rvm:fv: evex.nds.512.66.map6.w0 ae /r] AVX512FP16
|
|
VFMSUB231PH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.66.map6.w0 be /r] AVX512FP16,AVX512VL
|
|
VFMSUB231PH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.66.map6.w0 be /r] AVX512FP16,AVX512VL
|
|
VFMSUB231PH zmmreg|mask|z,zmmreg*,zmmrm512|b16|er [rvm:fv: evex.nds.512.66.map6.w0 be /r] AVX512FP16
|
|
VPMSUB132SH xmmreg|mask|z,xmmreg*,xmmrm16|er [rvm:t1s: evex.nds.lig.66.map6.w0 9b /r] AVX512FP16
|
|
VPMSUB213SH xmmreg|mask|z,xmmreg*,xmmrm16|er [rvm:t1s: evex.nds.lig.66.map6.w0 ab /r] AVX512FP16
|
|
VPMSUB231SH xmmreg|mask|z,xmmreg*,xmmrm16|er [rvm:t1s: evex.nds.lig.66.map6.w0 bb /r] AVX512FP16
|
|
VPNMSUB132SH xmmreg|mask|z,xmmreg*,xmmrm16|er [rvm:t1s: evex.nds.lig.66.map6.w0 9f /r] AVX512FP16
|
|
VPNMSUB213SH xmmreg|mask|z,xmmreg*,xmmrm16|er [rvm:t1s: evex.nds.lig.66.map6.w0 af /r] AVX512FP16
|
|
VPNMSUB231SH xmmreg|mask|z,xmmreg*,xmmrm16|er [rvm:t1s: evex.nds.lig.66.map6.w0 bf /r] AVX512FP16
|
|
VFPCLASSPH kreg|mask,xmmrm128|b16,imm8 [rmi:fv: evex.128.np.0f3a.w0 66 /r ib] AVX512FP16,AVX512VL
|
|
VFPCLASSPH kreg|mask,ymmrm256|b16,imm8 [rmi:fv: evex.256.np.0f3a.w0 66 /r ib] AVX512FP16,AVX512VL
|
|
VFPCLASSPH kreg|mask,zmmrm512|b16,imm8 [rmi:fv: evex.512.np.0f3a.w0 66 /r ib] AVX512FP16
|
|
VFPCLASSSH kreg|mask,xmmrm16,imm8 [rmi:t1s: evex.lig.np.0f3a.w0 67 /r ib] AVX512FP16
|
|
VGETEXPPH xmmreg|mask|z,xmmrm128|b16 [rm:fv: evex.128.66.map6.w0 42 /r] AVX512FP16,AVX512VL
|
|
VGETEXPPH ymmreg|mask|z,ymmrm256|b16 [rm:fv: evex.256.66.map6.w0 42 /r] AVX512FP16,AVX512VL
|
|
VGETEXPPH zmmreg|mask|z,zmmrm512|b16|sae [rm:fv: evex.512.66.map6.w0 42 /r] AVX512FP16
|
|
VGETEXPSH xmmreg|mask|z,xmmrm16|sae [rm:t1s: evex.128.66.map6.w0 43 /r] AVX512FP16
|
|
VGETMANTPH xmmreg|mask|z,xmmrm128|b16,imm8 [rmi:fv: evex.128.np.0f3a.w0 25 /r ib] AVX512FP16,AVX512VL
|
|
VGETMANTPH ymmreg|mask|z,ymmrm256|b16,imm8 [rmi:fv: evex.256.np.0f3a.w0 25 /r ib] AVX512FP16,AVX512VL
|
|
VGETMANTPH zmmreg|mask|z,zmmrm512|b16|sae,imm8 [rmi:fv: evex.512.np.0f3a.w0 25 /r ib] AVX512FP16
|
|
VGETMANTSH xmmreg|mask|z,xmmrm16|sae,imm8 [rmi:t1s: evex.128.np.0f3a.w0 27 /r ib] AVX512FP16
|
|
VGETMAXPH xmmreg|mask|z,xmmrm128|b16 [rm:fv: evex.128.np.map5.w0 5f /r] AVX512FP16,AVX512VL
|
|
VGETMAXPH ymmreg|mask|z,ymmrm256|b16 [rm:fv: evex.256.np.map5.w0 5f /r] AVX512FP16,AVX512VL
|
|
VGETMAXPH zmmreg|mask|z,zmmrm512|b16|sae [rm:fv: evex.512.np.map5.w0 5f /r] AVX512FP16
|
|
VGETMAXSH xmmreg|mask|z,xmmrm16|sae [rm:t1s: evex.lig.f3.map5.w0 5f /r] AVX512FP16
|
|
VGETMINPH xmmreg|mask|z,xmmrm128|b16 [rm:fv: evex.128.np.map5.w0 5d /r] AVX512FP16,AVX512VL
|
|
VGETMINPH ymmreg|mask|z,xmmrm256|b16 [rm:fv: evex.256.np.map5.w0 5d /r] AVX512FP16,AVX512VL
|
|
VGETMINPH zmmreg|mask|z,zmmrm512|b16|sae [rm:fv: evex.512.np.map5.w0 5d /r] AVX512FP16
|
|
VGETMINSH xmmreg|mask|z,xmmrm16|sae [rm:t1s: evex.lig.f3.map5.w0 5d /r] AVX512FP16
|
|
VMOVSH xmmreg|mask|z,mem16 [rm:t1s: evex.lig.f3.map5.w0 10 /r] AVX512FP16
|
|
VMOVSH mem16|mask,xmmreg [mr:t1s: evex.lig.f3.map5.w0 11 /r] AVX512FP16
|
|
VMOVSH xmmreg|mask|z,xmmreg*,xmmreg [rvm: evex.nds.lig.f3.map5.w0 10 /r] AVX512FP16
|
|
VMOVSH xmmreg|mask|z,xmmreg*,xmmreg [rvm: evex.nds.lig.f3.map5.w0 11 /r] AVX512FP16
|
|
VMOVW xmmreg|mask|z,rm16 [rm:t1s: evex.128.66.map5.wig 6e /r] AVX512FP16
|
|
VMOVW rm16,xmmreg [mr:t1s: evex.128.66.map5.wig 7e /r] AVX512FP16
|
|
VMULPH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.np.map5.w0 59 /r] AVX512FP16,AVX512VL
|
|
VMULPH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.np.map5.w0 59 /r] AVX512FP16,AVX512VL
|
|
VMULPH zmmreg|mask|z,zmmreg*,zmmrm512|b16 [rvm:fv: evex.nds.512.np.map5.w0 59 /r] AVX512FP16
|
|
VMULSH xmmreg|mask|z,xmmreg*,xmmrm16|er [rvm:t1s: evex.nds.lig.f3.map5.w0 59 /r] AVX512FP16
|
|
VRCPPH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.66.map6.w0 4c /r] AVX512FP16,AVX512VL
|
|
VRCPPH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.66.map6.w0 4c /r] AVX512FP16,AVX512VL
|
|
VRCPPH zmmreg|mask|z,zmmreg*,zmmrm512|b16 [rvm:fv: evex.nds.512.66.map6.w0 4c /r] AVX512FP16
|
|
VRCPSH xmmreg|mask|z,xmmreg*,xmmrm16|sae [rvm:t1s: evex.nds.lig.66.map6.w0 4d /r] AVX512FP16
|
|
VREDUCEPH xmmreg|mask|z,xmmrm128|b16,imm8 [rmi:fv: evex.128.np.0f3a.w0 56 /r ib] AVX512FP16,AVX512VL
|
|
VREDUCEPH ymmreg|mask|z,ymmrm256|b16,imm8 [rmi:fv: evex.256.np.0f3a.w0 56 /r ib] AVX512FP16,AVX512VL
|
|
VREDUCEPH zmmreg|mask|z,zmmrm512|b16|sae,imm8 [rmi:fv: evex.512.np.0f3a.w0 56 /r ib] AVX512FP16
|
|
VREDUCESH xmmreg|mask|z,xmmreg*,xmmrm16|sae,imm8 [rmvi:t1s: evex.nds.lig.np.0f3a.w0 57 /r ib] AVX512FP16
|
|
VENDSCALEPH xmmreg|mask|z,xmmrm128|b16,imm8 [rmi:fv: evex.128.np.0f3a.w0 08 /r ib] AVX512FP16,AVX512VL
|
|
VENDSCALEPH ymmreg|mask|z,ymmrm256|b16,imm8 [rmi:fv: evex.256.np.0f3a.w0 08 /r ib] AVX512FP16,AVX512VL
|
|
VENDSCALEPH zmmreg|mask|z,zmmrm512|b16|sae,imm8 [rmi:fv: evex.512.np.0f3a.w0 08 /r ib] AVX512FP16
|
|
VENDSCALESH xmmreg|mask|z,xmmreg*,xmmrm16|sae,imm8 [rvmi:t1s: evex.nds.lig.np.0f3a.w0 0a /r ib] AVX512FP16
|
|
VRSQRTPH xmmreg|mask|z,xmmrm128|b16,imm8 [rmi:fv: evex.128.66.map6.w0 4e /r ib] AVX512FP16,AVX512VL
|
|
VRSQRTPH ymmreg|mask|z,ymmrm256|b16,imm8 [rmi:fv: evex.256.66.map6.w0 4e /r ib] AVX512FP16,AVX512VL
|
|
VRSQRTPH zmmreg|mask|z,zmmrm512|b16|sae,imm8 [rmi:fv: evex.512.66.map6.w0 4e /r ib] AVX512FP16
|
|
VRSQRTSH xmmreg|mask|z,xmmreg*,xmmrm16|sae,imm8 [rvmi:t1s: evex.nds.lig.66.map6.w0 4f /r ib] AVX512FP16
|
|
VSCALEFPH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.66.map6.w0 2c /r] AVX512FP16,AVX512VL
|
|
VSCALEFPH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.66.map6.w0 2c /r] AVX512FP16,AVX512VL
|
|
VSCALEFPH zmmreg|mask|z,zmmreg*,zmmrm512|b16|er [rvm:fv: evex.nds.512.66.map6.w0 2c /r] AVX512FP16
|
|
VSCALEFSH xmmreg|mask|z,xmmreg*,xmmrm16|er [rvm:t1s: evex.nds.lig.66.map6.w0 2d /r] AVX512FP16
|
|
VSQRTPH xmmreg|mask|z,xmmrm128|b16 [rm:fv: evex.128.np.map5.w0 51 /r] AVX512FP16,AVX512VL
|
|
VSQRTPH ymmreg|mask|z,ymmrm256|b16 [rm:fv: evex.256.np.map5.w0 51 /r] AVX512FP16,AVX512VL
|
|
VSQRTPH zmmreg|mask|z,zmmrm512|b16|er [rm:fv: evex.512.np.map5.w0 51 /r] AVX512FP16
|
|
VSQRTSH xmmreg|mask|z,xmmreg*,xmmrm16|er [rvm:t1s: evex.nds.lig.f3.map5.w0 51 /r] AVX512FP16
|
|
VSUBPH xmmreg|mask|z,xmmreg*,xmmrm128|b16 [rvm:fv: evex.nds.128.np.map5.w0 5c /r] AVX512FP16,AVX512VL
|
|
VSUBPH ymmreg|mask|z,ymmreg*,ymmrm256|b16 [rvm:fv: evex.nds.256.np.map5.w0 5c /r] AVX512FP16,AVX512VL
|
|
VSUBPH zmmreg|mask|z,zmmreg*,zmmrm512|b16|er [rvm:fv: evex.nds.512.np.map5.w0 5c /r] AVX512FP16
|
|
VSUBSH xmmreg|mask|z,xmmreg*,xmmrm16|er [rvm:t1s: evex.nds.lig.f3.map5.w0 5c /r] AVX512FP16
|
|
VUCOMISH xmmreg,xmmrm16|sae [rm:t1s: evex.lig.np.map5.w0 2e /r] AVX512FP16
|
|
|
|
;# RAO-INT weakly ordered atomic operations
|
|
AADD mem32,reg32 [mr: norexw np 0f38 fc /r ] RAOINT,SD
|
|
AADD mem64,reg64 [mr: o64 np 0f38 fc /r ] RAOINT,SQ,LONG
|
|
AAND mem32,reg32 [mr: norexw 66 0f38 fc /r ] RAOINT,SD
|
|
AAND mem64,reg64 [mr: o64 66 0f38 fc /r ] RAOINT,SQ,LONG
|
|
AXOR mem32,reg32 [mr: norexw f3 0f38 fc /r ] RAOINT,SD
|
|
AXOR mem64,reg64 [mr: o64 f3 0f38 fc /r ] RAOINT,SQ,LONG
|
|
|
|
;# User interrupts
|
|
CLUI void [ f3 0f 01 ee ] UINTR,LONG
|
|
SENDUIPI reg64 [m: o64nw f3 0f c7 /6 ] UINTR,LONG
|
|
STUI void [ f3 0f 01 ef ] UINTR,LONG
|
|
TESTUI void [ f3 0f 01 ed ] UINTR,LONG
|
|
UIRET void [ f3 0f 01 ec ] UINTR,LONG
|
|
|
|
;# Flexible Return and Exception Delivery
|
|
|
|
;# History reset
|
|
HRESET imm,reg_eax [i-: f3 0f3a f0 c0 ib ] HRESET,PRIV,SB
|
|
HRESET imm [i: f3 0f3a f0 c0 ib ] HRESET,PRIV,SB,ND
|
|
|
|
|
|
;# Systematic names for the hinting nop instructions
|
|
; These should be last in the file
|
|
$wdq NOP imm,reg#,rm# [irm: o# 0f ibn /r] P6,UNDOC,SM1-2
|
|
$wdq HINT_NOP imm,reg#,rm# [irm: o# 0f ibn /r] P6,UNDOC,SM1-2,ND
|
|
$hint_nops
|