#BEGIN_LEGAL # #Copyright (c) 2020 Intel Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # #END_LEGAL AVX_INSTRUCTIONS():: { ICLASS : PDEP CPL : 3 CATEGORY : BMI2 EXTENSION : BMI2 #32b PATTERN : VV1 0xF5 V0F38 VF2 not64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_N():r:d MEM0:r:d PATTERN : VV1 0xF5 V0F38 VF2 W0 mode64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_N():r:d MEM0:r:d PATTERN : VV1 0xF5 V0F38 VF2 not64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_N():r:d REG2=VGPR32_B():r:d PATTERN : VV1 0xF5 V0F38 VF2 W0 mode64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_N():r:d REG2=VGPR32_B():r:d # 64b PATTERN : VV1 0xF5 V0F38 VF2 W1 VL128 mode64 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR64_R():w:q REG1=VGPR64_N():r:q MEM0:r:q PATTERN : VV1 0xF5 V0F38 VF2 W1 VL128 mode64 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR64_R():w:q REG1=VGPR64_N():r:q REG2=VGPR64_B():r:q } { ICLASS : PEXT CPL : 3 CATEGORY : BMI2 EXTENSION : BMI2 #32b PATTERN : VV1 0xF5 V0F38 VF3 not64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_N():r:d MEM0:r:d PATTERN : VV1 0xF5 V0F38 VF3 W0 mode64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_N():r:d MEM0:r:d PATTERN : VV1 0xF5 V0F38 VF3 not64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_N():r:d REG2=VGPR32_B():r:d PATTERN : VV1 0xF5 V0F38 VF3 W0 mode64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_N():r:d REG2=VGPR32_B():r:d # 64b PATTERN : VV1 0xF5 V0F38 VF3 W1 VL128 mode64 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR64_R():w:q REG1=VGPR64_N():r:q MEM0:r:q PATTERN : VV1 0xF5 V0F38 VF3 W1 VL128 mode64 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR64_R():w:q REG1=VGPR64_N():r:q REG2=VGPR64_B():r:q } { ICLASS : ANDN CPL : 3 CATEGORY : BMI1 EXTENSION : BMI1 FLAGS : MUST [ of-0 sf-mod zf-mod af-u pf-u cf-0 ] # 32b PATTERN : VV1 0xF2 V0F38 VNP not64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_N():r:d MEM0:r:d PATTERN : VV1 0xF2 V0F38 VNP W0 mode64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_N():r:d MEM0:r:d PATTERN : VV1 0xF2 V0F38 VNP not64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_N():r:d REG2=VGPR32_B():r:d PATTERN : VV1 0xF2 V0F38 VNP W0 mode64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_N():r:d REG2=VGPR32_B():r:d # 64b PATTERN : VV1 0xF2 V0F38 VNP W1 VL128 mode64 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR64_R():w:q REG1=VGPR64_N():r:q MEM0:r:q PATTERN : VV1 0xF2 V0F38 VNP W1 VL128 mode64 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR64_R():w:q REG1=VGPR64_N():r:q REG2=VGPR64_B():r:q } { ICLASS : BLSR CPL : 3 CATEGORY : BMI1 EXTENSION : BMI1 FLAGS : MUST [ of-0 sf-mod zf-mod af-u pf-u cf-mod ] # 32b PATTERN : VV1 0xF3 V0F38 VNP not64 VL128 MOD[mm] MOD!=3 REG[0b001] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_N():w:d MEM0:r:d PATTERN : VV1 0xF3 V0F38 VNP W0 mode64 VL128 MOD[mm] MOD!=3 REG[0b001] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_N():w:d MEM0:r:d PATTERN : VV1 0xF3 V0F38 VNP not64 VL128 MOD[0b11] MOD=3 REG[0b001] RM[nnn] OPERANDS : REG0=VGPR32_N():w:d REG1=VGPR32_B():r:d PATTERN : VV1 0xF3 V0F38 VNP W0 mode64 VL128 MOD[0b11] MOD=3 REG[0b001] RM[nnn] OPERANDS : REG0=VGPR32_N():w:d REG1=VGPR32_B():r:d # 64b PATTERN : VV1 0xF3 V0F38 VNP W1 VL128 mode64 MOD[mm] MOD!=3 REG[0b001] RM[nnn] MODRM() OPERANDS : REG0=VGPR64_N():w:q MEM0:r:q PATTERN : VV1 0xF3 V0F38 VNP W1 VL128 mode64 MOD[0b11] MOD=3 REG[0b001] RM[nnn] OPERANDS : REG0=VGPR64_N():w:q REG1=VGPR64_B():r:q } { ICLASS : BLSMSK CPL : 3 CATEGORY : BMI1 EXTENSION : BMI1 FLAGS : MUST [ of-0 sf-mod zf-0 af-u pf-u cf-mod ] #32b PATTERN : VV1 0xF3 V0F38 VNP not64 VL128 MOD[mm] MOD!=3 REG[0b010] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_N():w:d MEM0:r:d PATTERN : VV1 0xF3 V0F38 VNP W0 mode64 VL128 MOD[mm] MOD!=3 REG[0b010] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_N():w:d MEM0:r:d PATTERN : VV1 0xF3 V0F38 VNP not64 VL128 MOD[0b11] MOD=3 REG[0b010] RM[nnn] OPERANDS : REG0=VGPR32_N():w:d REG1=VGPR32_B():r:d PATTERN : VV1 0xF3 V0F38 VNP W0 mode64 VL128 MOD[0b11] MOD=3 REG[0b010] RM[nnn] OPERANDS : REG0=VGPR32_N():w:d REG1=VGPR32_B():r:d #64b PATTERN : VV1 0xF3 V0F38 VNP W1 VL128 mode64 MOD[mm] MOD!=3 REG[0b010] RM[nnn] MODRM() OPERANDS : REG0=VGPR64_N():w:q MEM0:r:q PATTERN : VV1 0xF3 V0F38 VNP W1 VL128 mode64 MOD[0b11] MOD=3 REG[0b010] RM[nnn] OPERANDS : REG0=VGPR64_N():w:q REG1=VGPR64_B():r:q } { ICLASS : BLSI CPL : 3 CATEGORY : BMI1 EXTENSION : BMI1 FLAGS : MUST [ of-0 sf-mod zf-mod af-u pf-u cf-0 ] # 32b PATTERN : VV1 0xF3 V0F38 VNP not64 VL128 MOD[mm] MOD!=3 REG[0b011] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_N():w:d MEM0:r:d PATTERN : VV1 0xF3 V0F38 VNP W0 mode64 VL128 MOD[mm] MOD!=3 REG[0b011] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_N():w:d MEM0:r:d PATTERN : VV1 0xF3 V0F38 VNP not64 VL128 MOD[0b11] MOD=3 REG[0b011] RM[nnn] OPERANDS : REG0=VGPR32_N():w:d REG1=VGPR32_B():r:d PATTERN : VV1 0xF3 V0F38 VNP W0 mode64 VL128 MOD[0b11] MOD=3 REG[0b011] RM[nnn] OPERANDS : REG0=VGPR32_N():w:d REG1=VGPR32_B():r:d # 64b PATTERN : VV1 0xF3 V0F38 VNP W1 VL128 mode64 MOD[mm] MOD!=3 REG[0b011] RM[nnn] MODRM() OPERANDS : REG0=VGPR64_N():w:q MEM0:r:q PATTERN : VV1 0xF3 V0F38 VNP W1 VL128 mode64 MOD[0b11] MOD=3 REG[0b011] RM[nnn] OPERANDS : REG0=VGPR64_N():w:q REG1=VGPR64_B():r:q } { ICLASS : BZHI CPL : 3 CATEGORY : BMI2 EXTENSION : BMI2 FLAGS : MUST [ of-0 sf-mod zf-mod af-u pf-u cf-mod ] # 32b PATTERN : VV1 0xF5 V0F38 VNP not64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_R():w:d MEM0:r:d REG1=VGPR32_N():r:d PATTERN : VV1 0xF5 V0F38 VNP W0 mode64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_R():w:d MEM0:r:d REG1=VGPR32_N():r:d PATTERN : VV1 0xF5 V0F38 VNP not64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d REG2=VGPR32_N():r:d PATTERN : VV1 0xF5 V0F38 VNP W0 mode64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d REG2=VGPR32_N():r:d # 64b PATTERN : VV1 0xF5 V0F38 VNP W1 VL128 mode64 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR64_R():w:q MEM0:r:q REG1=VGPR64_N():r:q PATTERN : VV1 0xF5 V0F38 VNP W1 VL128 mode64 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR64_R():w:q REG1=VGPR64_B():r:q REG2=VGPR64_N():r:q } { ICLASS : BEXTR CPL : 3 CATEGORY : BMI1 EXTENSION : BMI1 FLAGS : MUST [ of-u sf-u zf-mod af-u pf-u cf-u ] # 32b PATTERN : VV1 0xF7 V0F38 VNP not64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_R():w:d MEM0:r:d REG1=VGPR32_N():r:d PATTERN : VV1 0xF7 V0F38 VNP W0 mode64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_R():w:d MEM0:r:d REG1=VGPR32_N():r:d PATTERN : VV1 0xF7 V0F38 VNP not64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d REG2=VGPR32_N():r:d PATTERN : VV1 0xF7 V0F38 VNP W0 mode64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d REG2=VGPR32_N():r:d # 64b PATTERN : VV1 0xF7 V0F38 VNP W1 VL128 mode64 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR64_R():w:q MEM0:r:q REG1=VGPR64_N():r:q PATTERN : VV1 0xF7 V0F38 VNP W1 VL128 mode64 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR64_R():w:q REG1=VGPR64_B():r:q REG2=VGPR64_N():r:q } { ICLASS : SHLX CPL : 3 CATEGORY : BMI2 EXTENSION : BMI2 # 32b PATTERN : VV1 0xF7 V0F38 V66 not64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_R():w:d MEM0:r:d REG1=VGPR32_N():r:d PATTERN : VV1 0xF7 V0F38 V66 W0 mode64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_R():w:d MEM0:r:d REG1=VGPR32_N():r:d PATTERN : VV1 0xF7 V0F38 V66 not64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d REG2=VGPR32_N():r:d PATTERN : VV1 0xF7 V0F38 V66 W0 mode64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d REG2=VGPR32_N():r:d # 64b PATTERN : VV1 0xF7 V0F38 V66 W1 VL128 mode64 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR64_R():w:q MEM0:r:q REG1=VGPR64_N():r:q PATTERN : VV1 0xF7 V0F38 V66 W1 VL128 mode64 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR64_R():w:q REG1=VGPR64_B():r:q REG2=VGPR64_N():r:q } { ICLASS : SARX CPL : 3 CATEGORY : BMI2 EXTENSION : BMI2 # 32b PATTERN : VV1 0xF7 V0F38 VF3 not64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_R():w:d MEM0:r:d REG1=VGPR32_N():r:d PATTERN : VV1 0xF7 V0F38 VF3 W0 mode64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_R():w:d MEM0:r:d REG1=VGPR32_N():r:d PATTERN : VV1 0xF7 V0F38 VF3 not64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d REG2=VGPR32_N():r:d PATTERN : VV1 0xF7 V0F38 VF3 W0 mode64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d REG2=VGPR32_N():r:d # 64b PATTERN : VV1 0xF7 V0F38 VF3 W1 VL128 mode64 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR64_R():w:q MEM0:r:q REG1=VGPR64_N():r:q PATTERN : VV1 0xF7 V0F38 VF3 W1 VL128 mode64 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR64_R():w:q REG1=VGPR64_B():r:q REG2=VGPR64_N():r:q } { ICLASS : SHRX CPL : 3 CATEGORY : BMI2 EXTENSION : BMI2 # 32b PATTERN : VV1 0xF7 V0F38 VF2 not64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_R():w:d MEM0:r:d REG1=VGPR32_N():r:d PATTERN : VV1 0xF7 V0F38 VF2 W0 mode64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_R():w:d MEM0:r:d REG1=VGPR32_N():r:d PATTERN : VV1 0xF7 V0F38 VF2 not64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d REG2=VGPR32_N():r:d PATTERN : VV1 0xF7 V0F38 VF2 W0 mode64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d REG2=VGPR32_N():r:d # 64b PATTERN : VV1 0xF7 V0F38 VF2 W1 VL128 mode64 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR64_R():w:q MEM0:r:q REG1=VGPR64_N():r:q PATTERN : VV1 0xF7 V0F38 VF2 W1 VL128 mode64 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR64_R():w:q REG1=VGPR64_B():r:q REG2=VGPR64_N():r:q } { ICLASS : MULX CPL : 3 CATEGORY : BMI2 EXTENSION : BMI2 # reg:w vvvv:w rm:r rdx:r # 32b PATTERN : VV1 0xF6 VF2 V0F38 not64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_N():w:d REG2=VGPR32_B():r:d REG3=XED_REG_EDX:r:SUPP PATTERN : VV1 0xF6 VF2 V0F38 W0 mode64 VL128 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_N():w:d REG2=VGPR32_B():r:d REG3=XED_REG_EDX:r:SUPP PATTERN : VV1 0xF6 VF2 V0F38 not64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_N():w:d MEM0:r:d REG2=XED_REG_EDX:r:SUPP PATTERN : VV1 0xF6 VF2 V0F38 W0 mode64 VL128 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_N():w:d MEM0:r:d REG2=XED_REG_EDX:r:SUPP # 64b PATTERN : VV1 0xF6 VF2 V0F38 W1 VL128 mode64 MOD[0b11] MOD=3 REG[rrr] RM[nnn] OPERANDS : REG0=VGPR64_R():w:q REG1=VGPR64_N():w:q REG2=VGPR64_B():r:q REG3=XED_REG_RDX:r:SUPP PATTERN : VV1 0xF6 VF2 V0F38 W1 VL128 mode64 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() OPERANDS : REG0=VGPR64_R():w:q REG1=VGPR64_N():w:q MEM0:r:q REG2=XED_REG_RDX:r:SUPP } { ICLASS : RORX CPL : 3 CATEGORY : BMI2 EXTENSION : BMI2 # reg(w) rm(r) / vvvv must be 1111. / 2010-01-08 CART change # 32b PATTERN : VV1 0xF0 VF2 V0F3A not64 VL128 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8() OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d IMM0:r:b PATTERN : VV1 0xF0 VF2 V0F3A W0 mode64 VL128 NOVSR MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8() OPERANDS : REG0=VGPR32_R():w:d REG1=VGPR32_B():r:d IMM0:r:b PATTERN : VV1 0xF0 VF2 V0F3A not64 VL128 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8() OPERANDS : REG0=VGPR32_R():w:d MEM0:r:d IMM0:r:b PATTERN : VV1 0xF0 VF2 V0F3A W0 mode64 VL128 NOVSR MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8() OPERANDS : REG0=VGPR32_R():w:d MEM0:r:d IMM0:r:b # 64b PATTERN : VV1 0xF0 VF2 V0F3A W1 VL128 NOVSR mode64 MOD[0b11] MOD=3 REG[rrr] RM[nnn] UIMM8() OPERANDS : REG0=VGPR64_R():w:q REG1=VGPR64_B():r:q IMM0:r:b PATTERN : VV1 0xF0 VF2 V0F3A W1 VL128 NOVSR mode64 MOD[mm] MOD!=3 REG[rrr] RM[nnn] MODRM() UIMM8() OPERANDS : REG0=VGPR64_R():w:q MEM0:r:q IMM0:r:b }